summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorContext Git Mirror Bot <phg42.2a@gmail.com>2015-05-16 00:15:04 +0200
committerContext Git Mirror Bot <phg42.2a@gmail.com>2015-05-16 00:15:04 +0200
commitb55577d0998160c0174e250b542016ecd6ca9056 (patch)
tree27093212d5ca3e6ffe4ae434c3ec094233ed37ba
parent624cbb5da392e9403984dd1cf368c0d408b1c2a8 (diff)
downloadcontext-b55577d0998160c0174e250b542016ecd6ca9056.tar.gz
2015-05-15 23:06:00
-rw-r--r--COPYING350
-rw-r--r--README.rst14
-rw-r--r--context/data/scite/context/documents/scite-context-readme.pdfbin0 -> 221437 bytes
-rw-r--r--context/data/scite/context/documents/scite-context-readme.tex (renamed from context/data/scite/scite-context-readme.tex)242
-rw-r--r--context/data/scite/context/documents/scite-context-visual.pdf (renamed from context/data/scite/scite-context-visual.pdf)0
-rw-r--r--context/data/scite/context/documents/scite-context-visual.png (renamed from context/data/scite/scite-context-visual.png)bin77849 -> 77849 bytes
-rw-r--r--context/data/scite/context/lexers/data/scite-context-data-context.lua4
-rw-r--r--context/data/scite/context/lexers/data/scite-context-data-interfaces.lua10
-rw-r--r--context/data/scite/context/lexers/data/scite-context-data-metafun.lua4
-rw-r--r--context/data/scite/context/lexers/data/scite-context-data-metapost.lua9
-rw-r--r--context/data/scite/context/lexers/data/scite-context-data-tex.lua (renamed from context/data/scite/lexers/data/scite-context-data-tex.lua)4
-rw-r--r--context/data/scite/context/lexers/lexer.lua3
-rw-r--r--context/data/scite/context/lexers/scite-context-lexer-bibtex.lua195
-rw-r--r--context/data/scite/context/lexers/scite-context-lexer-cld.lua (renamed from context/data/scite/lexers/scite-context-lexer-cld.lua)11
-rw-r--r--context/data/scite/context/lexers/scite-context-lexer-cpp-web.lua23
-rw-r--r--context/data/scite/context/lexers/scite-context-lexer-cpp.lua188
-rw-r--r--context/data/scite/context/lexers/scite-context-lexer-dummy.lua35
-rw-r--r--context/data/scite/context/lexers/scite-context-lexer-lua-longstring.lua (renamed from context/data/scite/lexers/scite-context-lexer-lua-longstring.lua)19
-rw-r--r--context/data/scite/context/lexers/scite-context-lexer-lua.lua (renamed from context/data/scite/lexers/scite-context-lexer-lua.lua)261
-rw-r--r--context/data/scite/context/lexers/scite-context-lexer-mps.lua177
-rw-r--r--context/data/scite/context/lexers/scite-context-lexer-pdf-object.lua136
-rw-r--r--context/data/scite/context/lexers/scite-context-lexer-pdf-xref.lua43
-rw-r--r--context/data/scite/context/lexers/scite-context-lexer-pdf.lua204
-rw-r--r--context/data/scite/context/lexers/scite-context-lexer-tex-web.lua23
-rw-r--r--context/data/scite/context/lexers/scite-context-lexer-tex.lua (renamed from context/data/scite/lexers/scite-context-lexer-tex.lua)185
-rw-r--r--context/data/scite/context/lexers/scite-context-lexer-txt.lua (renamed from context/data/scite/lexers/scite-context-lexer-txt.lua)28
-rw-r--r--context/data/scite/context/lexers/scite-context-lexer-web-snippets.lua133
-rw-r--r--context/data/scite/context/lexers/scite-context-lexer-web.lua67
-rw-r--r--context/data/scite/context/lexers/scite-context-lexer-xml-cdata.lua (renamed from context/data/scite/lexers/scite-context-lexer-xml-cdata.lua)21
-rw-r--r--context/data/scite/context/lexers/scite-context-lexer-xml-comment.lua33
-rw-r--r--context/data/scite/context/lexers/scite-context-lexer-xml-script.lua33
-rw-r--r--context/data/scite/context/lexers/scite-context-lexer-xml.lua (renamed from context/data/scite/lexers/scite-context-lexer-xml.lua)213
-rw-r--r--context/data/scite/context/lexers/scite-context-lexer.lua2177
-rw-r--r--context/data/scite/context/lexers/themes/scite-context-theme.lua150
-rw-r--r--context/data/scite/context/scite-context-data-context.properties206
-rw-r--r--context/data/scite/context/scite-context-data-interfaces.properties1812
-rw-r--r--context/data/scite/context/scite-context-data-metafun.properties59
-rw-r--r--context/data/scite/context/scite-context-data-metapost.properties127
-rw-r--r--context/data/scite/context/scite-context-data-tex.properties (renamed from context/data/scite/scite-context-data-tex.properties)226
-rw-r--r--context/data/scite/context/scite-context-external.properties (renamed from context/data/scite/scite-context-external.properties)56
-rw-r--r--context/data/scite/context/scite-context-internal.properties (renamed from context/data/scite/scite-context-internal.properties)10
-rw-r--r--context/data/scite/context/scite-context-user.properties15
-rw-r--r--context/data/scite/context/scite-context.properties (renamed from context/data/scite/scite-context.properties)56
-rw-r--r--context/data/scite/context/scite-ctx-context.properties (renamed from context/data/scite/scite-ctx-context.properties)0
-rw-r--r--context/data/scite/context/scite-ctx-example.properties (renamed from context/data/scite/scite-ctx-example.properties)0
-rw-r--r--context/data/scite/context/scite-ctx.lua (renamed from context/data/scite/scite-ctx.lua)29
-rw-r--r--context/data/scite/context/scite-ctx.properties (renamed from context/data/scite/scite-ctx.properties)25
-rw-r--r--context/data/scite/context/scite-metapost.properties (renamed from context/data/scite/scite-metapost.properties)2
-rw-r--r--context/data/scite/context/scite-pragma.properties (renamed from context/data/scite/scite-pragma.properties)7
-rw-r--r--context/data/scite/context/scite-tex.properties (renamed from context/data/scite/scite-tex.properties)2
-rw-r--r--context/data/scite/lexers/archive/scite-context-lexer-pre-3-3-1.lua1100
-rw-r--r--context/data/scite/lexers/data/scite-context-data-context.lua4
-rw-r--r--context/data/scite/lexers/data/scite-context-data-interfaces.lua10
-rw-r--r--context/data/scite/lexers/data/scite-context-data-metafun.lua4
-rw-r--r--context/data/scite/lexers/data/scite-context-data-metapost.lua7
-rw-r--r--context/data/scite/lexers/scite-context-lexer-mps.lua155
-rw-r--r--context/data/scite/lexers/scite-context-lexer-pdf-object.lua117
-rw-r--r--context/data/scite/lexers/scite-context-lexer-pdf-xref.lua51
-rw-r--r--context/data/scite/lexers/scite-context-lexer-pdf.lua80
-rw-r--r--context/data/scite/lexers/scite-context-lexer-web.lua155
-rw-r--r--context/data/scite/lexers/scite-context-lexer-xml-comment.lua42
-rw-r--r--context/data/scite/lexers/scite-context-lexer-xml-script.lua30
-rw-r--r--context/data/scite/lexers/scite-context-lexer.lua876
-rw-r--r--context/data/scite/lexers/themes/scite-context-theme-keep.lua233
-rw-r--r--context/data/scite/lexers/themes/scite-context-theme.lua226
-rw-r--r--context/data/scite/metapost.properties1
-rw-r--r--context/data/scite/scite-context-data-context.properties190
-rw-r--r--context/data/scite/scite-context-data-interfaces.properties1276
-rw-r--r--context/data/scite/scite-context-data-metafun.properties57
-rw-r--r--context/data/scite/scite-context-data-metapost.properties102
-rw-r--r--context/data/scite/scite-context-readme.pdfbin210827 -> 0 bytes
-rw-r--r--context/data/scite/scite-context-user.properties15
-rw-r--r--context/data/scite/scite-context-visual.tex52
-rw-r--r--context/data/scite/tex.properties1
-rw-r--r--context/data/texworks/configuration/smart-quotes-modes.txt~21
-rw-r--r--doc/context/document/general/manuals/mreadme.pdfbin85996 -> 0 bytes
-rw-r--r--doc/context/document/general/manuals/tiptrick.pdfbin30607 -> 0 bytes
-rw-r--r--doc/context/documents/general/manuals/epub-mkiv-demo.epubbin0 -> 10194 bytes
-rw-r--r--doc/context/documents/general/manuals/epub-mkiv-demo.pdfbin0 -> 19204 bytes
-rw-r--r--doc/context/documents/general/manuals/epub-mkiv.pdfbin0 -> 98659 bytes
-rw-r--r--doc/context/documents/general/manuals/mreadme.pdfbin0 -> 40702 bytes
-rw-r--r--doc/context/documents/general/manuals/swiglib-mkiv.pdfbin0 -> 169009 bytes
-rw-r--r--doc/context/documents/general/manuals/tiptrick.pdfbin0 -> 47182 bytes
-rw-r--r--doc/context/documents/general/manuals/tools-mkiv.pdfbin0 -> 374423 bytes
-rw-r--r--doc/context/documents/general/manuals/units-mkiv.pdfbin0 -> 163696 bytes
-rw-r--r--doc/context/documents/general/manuals/workflows-mkiv.pdfbin0 -> 94311 bytes
-rw-r--r--doc/context/documents/general/manuals/xtables-mkiv.pdfbin0 -> 136336 bytes
-rw-r--r--doc/context/manuals/allkind/mcommon.tex199
-rw-r--r--doc/context/manuals/allkind/mreadme.tex361
-rw-r--r--doc/context/scripts/mkii/ctxtools.man2
-rw-r--r--doc/context/scripts/mkii/imgtopdf.man2
-rw-r--r--doc/context/scripts/mkii/mptopdf.man4
-rw-r--r--doc/context/scripts/mkii/mptopdf.xml2
-rw-r--r--doc/context/scripts/mkii/pdftools.man2
-rw-r--r--doc/context/scripts/mkii/pstopdf.man2
-rw-r--r--doc/context/scripts/mkii/rlxtools.man2
-rw-r--r--doc/context/scripts/mkii/texexec.man2
-rw-r--r--doc/context/scripts/mkii/texmfstart.html2
-rw-r--r--doc/context/scripts/mkii/texmfstart.man8
-rw-r--r--doc/context/scripts/mkii/texmfstart.xml2
-rw-r--r--doc/context/scripts/mkii/textools.man2
-rw-r--r--doc/context/scripts/mkii/texutil.man2
-rw-r--r--doc/context/scripts/mkii/tmftools.man2
-rw-r--r--doc/context/scripts/mkii/xmltools.man2
-rw-r--r--doc/context/scripts/mkiv/context.html11
-rw-r--r--doc/context/scripts/mkiv/context.man17
-rw-r--r--doc/context/scripts/mkiv/context.xml35
-rw-r--r--doc/context/scripts/mkiv/luatools.man2
-rw-r--r--doc/context/scripts/mkiv/mtx-babel.man2
-rw-r--r--doc/context/scripts/mkiv/mtx-base.man2
-rw-r--r--doc/context/scripts/mkiv/mtx-cache.man4
-rw-r--r--doc/context/scripts/mkiv/mtx-chars.man2
-rw-r--r--doc/context/scripts/mkiv/mtx-check.html2
-rw-r--r--doc/context/scripts/mkiv/mtx-check.man4
-rw-r--r--doc/context/scripts/mkiv/mtx-check.xml2
-rw-r--r--doc/context/scripts/mkiv/mtx-colors.man2
-rw-r--r--doc/context/scripts/mkiv/mtx-context.html11
-rw-r--r--doc/context/scripts/mkiv/mtx-context.man17
-rw-r--r--doc/context/scripts/mkiv/mtx-context.xml35
-rw-r--r--doc/context/scripts/mkiv/mtx-epub.html9
-rw-r--r--doc/context/scripts/mkiv/mtx-epub.man17
-rw-r--r--doc/context/scripts/mkiv/mtx-epub.xml7
-rw-r--r--doc/context/scripts/mkiv/mtx-fcd.man2
-rw-r--r--doc/context/scripts/mkiv/mtx-flac.man2
-rw-r--r--doc/context/scripts/mkiv/mtx-fonts.html2
-rw-r--r--doc/context/scripts/mkiv/mtx-fonts.man4
-rw-r--r--doc/context/scripts/mkiv/mtx-fonts.xml2
-rw-r--r--doc/context/scripts/mkiv/mtx-grep.man2
-rw-r--r--doc/context/scripts/mkiv/mtx-interface.man2
-rw-r--r--doc/context/scripts/mkiv/mtx-metapost.man2
-rw-r--r--doc/context/scripts/mkiv/mtx-metatex.man2
-rw-r--r--doc/context/scripts/mkiv/mtx-modules.man2
-rw-r--r--doc/context/scripts/mkiv/mtx-package.man2
-rw-r--r--doc/context/scripts/mkiv/mtx-patterns.html2
-rw-r--r--doc/context/scripts/mkiv/mtx-patterns.man5
-rw-r--r--doc/context/scripts/mkiv/mtx-patterns.xml2
-rw-r--r--doc/context/scripts/mkiv/mtx-pdf.man2
-rw-r--r--doc/context/scripts/mkiv/mtx-plain.man2
-rw-r--r--doc/context/scripts/mkiv/mtx-profile.man2
-rw-r--r--doc/context/scripts/mkiv/mtx-rsync.man2
-rw-r--r--doc/context/scripts/mkiv/mtx-scite.html2
-rw-r--r--doc/context/scripts/mkiv/mtx-scite.man8
-rw-r--r--doc/context/scripts/mkiv/mtx-scite.xml2
-rw-r--r--doc/context/scripts/mkiv/mtx-server.man2
-rw-r--r--doc/context/scripts/mkiv/mtx-texworks.man2
-rw-r--r--doc/context/scripts/mkiv/mtx-timing.man2
-rw-r--r--doc/context/scripts/mkiv/mtx-tools.man2
-rw-r--r--doc/context/scripts/mkiv/mtx-unzip.man2
-rw-r--r--doc/context/scripts/mkiv/mtx-update.html4
-rw-r--r--doc/context/scripts/mkiv/mtx-update.man2
-rw-r--r--doc/context/scripts/mkiv/mtx-update.xml2
-rw-r--r--doc/context/scripts/mkiv/mtx-watch.man2
-rw-r--r--doc/context/scripts/mkiv/mtx-youless.man2
-rw-r--r--doc/context/scripts/mkiv/mtxrun.html2
-rw-r--r--doc/context/scripts/mkiv/mtxrun.man8
-rw-r--r--doc/context/scripts/mkiv/mtxrun.xml2
-rw-r--r--doc/context/sources/general/manuals/epub/epub-mkiv-demo.tex43
-rw-r--r--doc/context/sources/general/manuals/epub/epub-mkiv.tex466
-rw-r--r--doc/context/sources/general/manuals/mcommon.tex210
-rw-r--r--doc/context/sources/general/manuals/readme/mreadme.tex372
-rw-r--r--doc/context/sources/general/manuals/swiglib/swiglib-mkiv.tex335
-rw-r--r--doc/context/sources/general/manuals/tiptrick/tiptrick.tex117
-rw-r--r--doc/context/sources/general/manuals/tiptrick/tiptrick.xml53
-rw-r--r--doc/context/sources/general/manuals/tools/tools-mkiv.tex501
-rw-r--r--doc/context/sources/general/manuals/units/units-mkiv.tex538
-rw-r--r--doc/context/sources/general/manuals/workflows/workflows-contents.tex13
-rw-r--r--doc/context/sources/general/manuals/workflows/workflows-graphics.tex157
-rw-r--r--doc/context/sources/general/manuals/workflows/workflows-injectors.tex86
-rw-r--r--doc/context/sources/general/manuals/workflows/workflows-introduction.tex25
-rw-r--r--doc/context/sources/general/manuals/workflows/workflows-mkiv.tex32
-rw-r--r--doc/context/sources/general/manuals/workflows/workflows-resources.tex156
-rw-r--r--doc/context/sources/general/manuals/workflows/workflows-setups.tex72
-rw-r--r--doc/context/sources/general/manuals/workflows/workflows-style.tex49
-rw-r--r--doc/context/sources/general/manuals/workflows/workflows-suspects.tex54
-rw-r--r--doc/context/sources/general/manuals/workflows/workflows-titlepage.tex37
-rw-r--r--doc/context/sources/general/manuals/workflows/workflows-xml.tex96
-rw-r--r--doc/context/sources/general/manuals/xtables/xtables-mkiv.tex1225
-rw-r--r--fonts/map/pdftex/context/mkiv-base.map182
-rw-r--r--metapost/context/base/metafun.mpiv5
-rw-r--r--metapost/context/base/mp-bare.mpiv93
-rw-r--r--metapost/context/base/mp-base.mpii19
-rw-r--r--metapost/context/base/mp-base.mpiv95
-rw-r--r--metapost/context/base/mp-chem.mpiv115
-rw-r--r--metapost/context/base/mp-form.mpiv2
-rw-r--r--metapost/context/base/mp-func.mpiv33
-rw-r--r--metapost/context/base/mp-grap.mpiv336
-rw-r--r--metapost/context/base/mp-luas.mpiv99
-rw-r--r--metapost/context/base/mp-mlib.mpiv571
-rw-r--r--metapost/context/base/mp-page.mpiv522
-rw-r--r--metapost/context/base/mp-tool.mpii2683
-rw-r--r--metapost/context/base/mp-tool.mpiv218
-rw-r--r--metapost/context/fonts/bidi-symbols.tex3
-rw-r--r--scripts/context/lua/mtx-bibtex.lua152
-rw-r--r--scripts/context/lua/mtx-check.lua43
-rw-r--r--scripts/context/lua/mtx-context.lua201
-rw-r--r--scripts/context/lua/mtx-context.xml35
-rw-r--r--scripts/context/lua/mtx-convert.lua2
-rw-r--r--scripts/context/lua/mtx-epub.lua879
-rw-r--r--scripts/context/lua/mtx-fcd.lua4
-rw-r--r--scripts/context/lua/mtx-flac.lua116
-rw-r--r--scripts/context/lua/mtx-fonts.lua16
-rw-r--r--scripts/context/lua/mtx-interface.lua8
-rw-r--r--scripts/context/lua/mtx-metapost.lua60
-rw-r--r--scripts/context/lua/mtx-mk-help.lua2
-rw-r--r--scripts/context/lua/mtx-patterns.lua58
-rw-r--r--scripts/context/lua/mtx-plain.lua8
-rw-r--r--scripts/context/lua/mtx-scite.lua55
-rw-r--r--scripts/context/lua/mtx-server.lua25
-rw-r--r--scripts/context/lua/mtx-update.lua15
-rw-r--r--scripts/context/lua/mtxrun.lua4449
-rw-r--r--scripts/context/ruby/texexec.rb8
-rw-r--r--scripts/context/stubs/install/first-setup.bat (renamed from scripts/context/stubs/mswin/first-setup.bat)0
-rw-r--r--scripts/context/stubs/install/first-setup.sh120
-rw-r--r--scripts/context/stubs/mswin/context.exebin4608 -> 4608 bytes
-rw-r--r--scripts/context/stubs/mswin/ctxtools.exebin4608 -> 4608 bytes
-rw-r--r--scripts/context/stubs/mswin/luatools.exebin4608 -> 4608 bytes
-rw-r--r--scripts/context/stubs/mswin/metatex.exebin4608 -> 4608 bytes
-rw-r--r--scripts/context/stubs/mswin/mptopdf.exebin4608 -> 4608 bytes
-rw-r--r--scripts/context/stubs/mswin/mtxrun.dllbin7680 -> 7680 bytes
-rw-r--r--scripts/context/stubs/mswin/mtxrun.exebin4608 -> 4608 bytes
-rw-r--r--scripts/context/stubs/mswin/mtxrun.lua4449
-rw-r--r--scripts/context/stubs/mswin/mtxrunjit.exebin0 -> 4608 bytes
-rw-r--r--scripts/context/stubs/mswin/mtxworks.exebin4608 -> 4608 bytes
-rw-r--r--scripts/context/stubs/mswin/pstopdf.exebin4608 -> 4608 bytes
-rw-r--r--scripts/context/stubs/mswin/texexec.exebin4608 -> 4608 bytes
-rw-r--r--scripts/context/stubs/mswin/texmfstart.exebin4608 -> 4608 bytes
-rw-r--r--scripts/context/stubs/setup/setuptex167
-rw-r--r--scripts/context/stubs/setup/setuptex.bat (renamed from scripts/context/stubs/mswin/setuptex.bat)0
-rw-r--r--scripts/context/stubs/setup/setuptex.csh164
-rw-r--r--scripts/context/stubs/source/mtxrun_dll.c142
-rw-r--r--scripts/context/stubs/source/readme.txt42
-rw-r--r--scripts/context/stubs/unix/contextjit5
-rw-r--r--scripts/context/stubs/unix/ctxtools2
-rw-r--r--scripts/context/stubs/unix/mptopdf2
-rw-r--r--scripts/context/stubs/unix/mtxrun4449
-rw-r--r--scripts/context/stubs/unix/mtxrunjit5
-rw-r--r--scripts/context/stubs/unix/pstopdf2
-rw-r--r--scripts/context/stubs/win64/context.exebin0 -> 15360 bytes
-rw-r--r--scripts/context/stubs/win64/contextjit.exebin0 -> 15360 bytes
-rw-r--r--scripts/context/stubs/win64/ctxtools.exebin0 -> 15360 bytes
-rw-r--r--scripts/context/stubs/win64/luatools.exebin0 -> 15360 bytes
-rw-r--r--scripts/context/stubs/win64/metatex.exebin0 -> 15360 bytes
-rw-r--r--scripts/context/stubs/win64/mptopdf.exebin0 -> 15360 bytes
-rw-r--r--scripts/context/stubs/win64/mtxrun.dllbin0 -> 18432 bytes
-rw-r--r--scripts/context/stubs/win64/mtxrun.exebin0 -> 15360 bytes
-rw-r--r--scripts/context/stubs/win64/mtxrun.lua19363
-rw-r--r--scripts/context/stubs/win64/mtxrunjit.exebin0 -> 15360 bytes
-rw-r--r--scripts/context/stubs/win64/mtxworks.exebin0 -> 15360 bytes
-rw-r--r--scripts/context/stubs/win64/pstopdf.exebin0 -> 15360 bytes
-rw-r--r--scripts/context/stubs/win64/texexec.exebin0 -> 15360 bytes
-rw-r--r--scripts/context/stubs/win64/texmfstart.exebin0 -> 15360 bytes
-rw-r--r--tex/context/base/anch-bar.mkiv14
-rw-r--r--tex/context/base/anch-bck.mkvi6
-rw-r--r--tex/context/base/anch-pgr.lua145
-rw-r--r--tex/context/base/anch-pgr.mkiv49
-rw-r--r--tex/context/base/anch-pos.lua489
-rw-r--r--tex/context/base/anch-pos.mkiv124
-rw-r--r--tex/context/base/anch-snc.mkiv4
-rw-r--r--tex/context/base/anch-tab.mkiv8
-rw-r--r--tex/context/base/attr-col.lua73
-rw-r--r--tex/context/base/attr-eff.lua41
-rw-r--r--tex/context/base/attr-eff.mkiv12
-rw-r--r--tex/context/base/attr-ini.lua115
-rw-r--r--tex/context/base/attr-ini.mkiv58
-rw-r--r--tex/context/base/attr-lay.lua88
-rw-r--r--tex/context/base/attr-lay.mkiv39
-rw-r--r--tex/context/base/attr-neg.lua12
-rw-r--r--tex/context/base/attr-neg.mkiv2
-rw-r--r--tex/context/base/back-exp.lua4112
-rw-r--r--tex/context/base/back-exp.mkiv163
-rw-r--r--tex/context/base/back-ini.lua25
-rw-r--r--tex/context/base/back-ini.mkiv23
-rw-r--r--tex/context/base/back-pdf.lua114
-rw-r--r--tex/context/base/back-pdf.mkiv276
-rw-r--r--tex/context/base/back-u3d.mkiv2
-rw-r--r--tex/context/base/bibl-bib.lua2
-rw-r--r--tex/context/base/bibl-bib.mkiv24
-rw-r--r--tex/context/base/bibl-tra.lua12
-rw-r--r--tex/context/base/bibl-tra.mkii3
-rw-r--r--tex/context/base/bibl-tra.mkiv68
-rw-r--r--tex/context/base/blob-ini.lua21
-rw-r--r--tex/context/base/blob-ini.mkiv8
-rw-r--r--tex/context/base/buff-imp-default.lua2
-rw-r--r--tex/context/base/buff-imp-lua.lua6
-rw-r--r--tex/context/base/buff-ini.lua350
-rw-r--r--tex/context/base/buff-ini.mkiv85
-rw-r--r--tex/context/base/buff-par.lua112
-rw-r--r--tex/context/base/buff-par.mkvi68
-rw-r--r--tex/context/base/buff-ver.lua193
-rw-r--r--tex/context/base/buff-ver.mkiv267
-rw-r--r--tex/context/base/bxml-apa.mkiv4
-rw-r--r--tex/context/base/catc-act.mkiv6
-rw-r--r--tex/context/base/catc-ctx.mkiv34
-rw-r--r--tex/context/base/catc-ini.lua4
-rw-r--r--tex/context/base/catc-ini.mkiv34
-rw-r--r--tex/context/base/catc-sym.mkiv8
-rw-r--r--tex/context/base/catc-xml.mkiv21
-rw-r--r--tex/context/base/char-act.mkiv7
-rw-r--r--tex/context/base/char-cjk.lua10
-rw-r--r--tex/context/base/char-def.lua45082
-rw-r--r--tex/context/base/char-enc.lua11
-rw-r--r--tex/context/base/char-ent.lua10
-rw-r--r--tex/context/base/char-fio.lua94
-rw-r--r--tex/context/base/char-ini.lua1080
-rw-r--r--tex/context/base/char-ini.mkiv57
-rw-r--r--tex/context/base/char-tex.lua662
-rw-r--r--tex/context/base/char-utf.lua815
-rw-r--r--tex/context/base/char-utf.mkiv17
-rw-r--r--tex/context/base/chem-ini.lua37
-rw-r--r--tex/context/base/chem-ini.mkiv2
-rw-r--r--tex/context/base/chem-str.lua122
-rw-r--r--tex/context/base/chem-str.mkiv92
-rw-r--r--tex/context/base/cldf-bas.lua24
-rw-r--r--tex/context/base/cldf-ini.lua871
-rw-r--r--tex/context/base/cldf-ini.mkiv9
-rw-r--r--tex/context/base/cldf-prs.lua94
-rw-r--r--tex/context/base/cldf-scn.lua163
-rw-r--r--tex/context/base/cldf-ver.lua22
-rw-r--r--tex/context/base/colo-ext.mkiv18
-rw-r--r--tex/context/base/colo-grp.mkiv2
-rw-r--r--tex/context/base/colo-imp-rgb.mkiv8
-rw-r--r--tex/context/base/colo-ini.lua475
-rw-r--r--tex/context/base/colo-ini.mkiv151
-rw-r--r--tex/context/base/colo-run.mkiv8
-rw-r--r--tex/context/base/colo-xwi.mkii7
-rw-r--r--tex/context/base/cont-fil.mkii2
-rw-r--r--tex/context/base/cont-fil.mkiv1
-rw-r--r--tex/context/base/cont-log.mkiv141
-rw-r--r--tex/context/base/cont-new.mkiv67
-rw-r--r--tex/context/base/cont-run.lua252
-rw-r--r--tex/context/base/cont-run.mkiv20
-rw-r--r--tex/context/base/cont-yes.mkiv154
-rw-r--r--tex/context/base/context-version.pdfbin4115 -> 4178 bytes
-rw-r--r--tex/context/base/context-version.pngbin40350 -> 38170 bytes
-rw-r--r--tex/context/base/context.mkiv78
-rw-r--r--tex/context/base/core-con.lua330
-rw-r--r--tex/context/base/core-con.mkiv175
-rw-r--r--tex/context/base/core-ctx.lua76
-rw-r--r--tex/context/base/core-ctx.mkiv14
-rw-r--r--tex/context/base/core-dat.lua58
-rw-r--r--tex/context/base/core-dat.mkiv38
-rw-r--r--tex/context/base/core-def.mkiv12
-rw-r--r--tex/context/base/core-env.lua287
-rw-r--r--tex/context/base/core-env.mkiv253
-rw-r--r--tex/context/base/core-ini.mkiv10
-rw-r--r--tex/context/base/core-sys.lua62
-rw-r--r--tex/context/base/core-sys.mkiv90
-rw-r--r--tex/context/base/core-two.lua54
-rw-r--r--tex/context/base/core-two.mkiv27
-rw-r--r--tex/context/base/core-uti.lua133
-rw-r--r--tex/context/base/core-uti.mkiv20
-rw-r--r--tex/context/base/data-aux.lua3
-rw-r--r--tex/context/base/data-crl.lua2
-rw-r--r--tex/context/base/data-env.lua19
-rw-r--r--tex/context/base/data-exp.lua357
-rw-r--r--tex/context/base/data-fil.lua28
-rw-r--r--tex/context/base/data-ini.lua152
-rw-r--r--tex/context/base/data-lst.lua26
-rw-r--r--tex/context/base/data-lua.lua23
-rw-r--r--tex/context/base/data-met.lua4
-rw-r--r--tex/context/base/data-pre.lua234
-rw-r--r--tex/context/base/data-res.lua813
-rw-r--r--tex/context/base/data-sch.lua18
-rw-r--r--tex/context/base/data-tex.lua36
-rw-r--r--tex/context/base/data-tmf.lua2
-rw-r--r--tex/context/base/data-tmp.lua36
-rw-r--r--tex/context/base/data-tre.lua222
-rw-r--r--tex/context/base/data-use.lua4
-rw-r--r--tex/context/base/data-vir.lua10
-rw-r--r--tex/context/base/data-zip.lua62
-rw-r--r--tex/context/base/enco-ini.mkiv257
-rw-r--r--tex/context/base/export-example.css443
-rw-r--r--tex/context/base/export-example.tex4
-rw-r--r--tex/context/base/file-ini.lua53
-rw-r--r--tex/context/base/file-ini.mkvi30
-rw-r--r--tex/context/base/file-job.lua633
-rw-r--r--tex/context/base/file-job.mkvi175
-rw-r--r--tex/context/base/file-lib.lua65
-rw-r--r--tex/context/base/file-mod.lua101
-rw-r--r--tex/context/base/file-mod.mkvi60
-rw-r--r--tex/context/base/file-res.lua51
-rw-r--r--tex/context/base/file-res.mkvi8
-rw-r--r--tex/context/base/file-syn.lua35
-rw-r--r--tex/context/base/file-syn.mkvi6
-rw-r--r--tex/context/base/font-afm.lua101
-rw-r--r--tex/context/base/font-agl.lua12
-rw-r--r--tex/context/base/font-chk.lua171
-rw-r--r--tex/context/base/font-chk.mkiv13
-rw-r--r--tex/context/base/font-cid.lua25
-rw-r--r--tex/context/base/font-col.lua83
-rw-r--r--tex/context/base/font-col.mkvi12
-rw-r--r--tex/context/base/font-con.lua116
-rw-r--r--tex/context/base/font-ctx.lua1468
-rw-r--r--tex/context/base/font-def.lua4
-rw-r--r--tex/context/base/font-enc.lua8
-rw-r--r--tex/context/base/font-enh.lua28
-rw-r--r--tex/context/base/font-ext.lua95
-rw-r--r--tex/context/base/font-fbk.lua41
-rw-r--r--tex/context/base/font-fea.mkvi36
-rw-r--r--tex/context/base/font-fil.mkvi100
-rw-r--r--tex/context/base/font-gds.lua65
-rw-r--r--tex/context/base/font-gds.mkvi4
-rw-r--r--tex/context/base/font-hsh.lua44
-rw-r--r--tex/context/base/font-ini.lua2
-rw-r--r--tex/context/base/font-ini.mkvi270
-rw-r--r--tex/context/base/font-inj.lua1055
-rw-r--r--tex/context/base/font-lib.mkvi30
-rw-r--r--tex/context/base/font-map.lua333
-rw-r--r--tex/context/base/font-mis.lua14
-rw-r--r--tex/context/base/font-nod.lua265
-rw-r--r--tex/context/base/font-odv.lua1220
-rw-r--r--tex/context/base/font-ota.lua427
-rw-r--r--tex/context/base/font-otb.lua218
-rw-r--r--tex/context/base/font-otc.lua16
-rw-r--r--tex/context/base/font-otd.lua109
-rw-r--r--tex/context/base/font-otf.lua711
-rw-r--r--tex/context/base/font-otn.lua1818
-rw-r--r--tex/context/base/font-otp.lua73
-rw-r--r--tex/context/base/font-ott.lua12
-rw-r--r--tex/context/base/font-otx.lua404
-rw-r--r--tex/context/base/font-pat.lua2
-rw-r--r--tex/context/base/font-pre.mkiv145
-rw-r--r--tex/context/base/font-run.mkiv10
-rw-r--r--tex/context/base/font-sel.lua172
-rw-r--r--tex/context/base/font-sel.mkvi178
-rw-r--r--tex/context/base/font-set.mkvi50
-rw-r--r--tex/context/base/font-sol.lua232
-rw-r--r--tex/context/base/font-sol.mkvi32
-rw-r--r--tex/context/base/font-sty.mkvi6
-rw-r--r--tex/context/base/font-sym.mkvi18
-rw-r--r--tex/context/base/font-syn.lua165
-rw-r--r--tex/context/base/font-tfm.lua12
-rw-r--r--tex/context/base/font-tra.mkiv18
-rw-r--r--tex/context/base/font-uni.mkiv10
-rw-r--r--tex/context/base/font-var.mkvi3
-rw-r--r--tex/context/base/grph-epd.lua9
-rw-r--r--tex/context/base/grph-epd.mkiv6
-rw-r--r--tex/context/base/grph-fig.mkiv6
-rw-r--r--tex/context/base/grph-fil.lua6
-rw-r--r--tex/context/base/grph-inc.lua717
-rw-r--r--tex/context/base/grph-inc.mkiv182
-rw-r--r--tex/context/base/grph-raw.lua15
-rw-r--r--tex/context/base/grph-raw.mkiv16
-rw-r--r--tex/context/base/grph-trf.mkiv16
-rw-r--r--tex/context/base/hand-ini.mkiv12
-rw-r--r--tex/context/base/java-imp-fld.mkiv33
-rw-r--r--tex/context/base/java-ini.lua52
-rw-r--r--tex/context/base/java-ini.mkiv20
-rw-r--r--tex/context/base/l-boolean.lua6
-rw-r--r--tex/context/base/l-dir.lua462
-rw-r--r--tex/context/base/l-file.lua193
-rw-r--r--tex/context/base/l-io.lua7
-rw-r--r--tex/context/base/l-lpeg.lua311
-rw-r--r--tex/context/base/l-lua.lua28
-rw-r--r--tex/context/base/l-math.lua4
-rw-r--r--tex/context/base/l-md5.lua64
-rw-r--r--tex/context/base/l-os.lua36
-rw-r--r--tex/context/base/l-sandbox.lua271
-rw-r--r--tex/context/base/l-string.lua7
-rw-r--r--tex/context/base/l-table.lua429
-rw-r--r--tex/context/base/l-unicode.lua710
-rw-r--r--tex/context/base/l-url.lua78
-rw-r--r--tex/context/base/lang-def.mkiv38
-rw-r--r--tex/context/base/lang-dis.lua203
-rw-r--r--tex/context/base/lang-hyp.lua1648
-rw-r--r--tex/context/base/lang-hyp.mkiv267
-rw-r--r--tex/context/base/lang-ini.lua121
-rw-r--r--tex/context/base/lang-ini.mkiv140
-rw-r--r--tex/context/base/lang-lab.lua49
-rw-r--r--tex/context/base/lang-lab.mkiv55
-rw-r--r--tex/context/base/lang-mis.mkiv229
-rw-r--r--tex/context/base/lang-rep.lua174
-rw-r--r--tex/context/base/lang-rep.mkiv75
-rw-r--r--tex/context/base/lang-txt.lua227
-rw-r--r--tex/context/base/lang-url.lua167
-rw-r--r--tex/context/base/lang-url.mkiv130
-rw-r--r--tex/context/base/lang-wrd.lua66
-rw-r--r--tex/context/base/lang-wrd.mkiv9
-rw-r--r--tex/context/base/lpdf-ano.lua960
-rw-r--r--tex/context/base/lpdf-col.lua138
-rw-r--r--tex/context/base/lpdf-epa.lua240
-rw-r--r--tex/context/base/lpdf-epd.lua809
-rw-r--r--tex/context/base/lpdf-fld.lua202
-rw-r--r--tex/context/base/lpdf-fmt.lua16
-rw-r--r--tex/context/base/lpdf-grp.lua6
-rw-r--r--tex/context/base/lpdf-ini.lua986
-rw-r--r--tex/context/base/lpdf-mis.lua238
-rw-r--r--tex/context/base/lpdf-mov.lua14
-rw-r--r--tex/context/base/lpdf-nod.lua157
-rw-r--r--tex/context/base/lpdf-pda.xml9
-rw-r--r--tex/context/base/lpdf-pdx.xml8
-rw-r--r--tex/context/base/lpdf-ren.lua124
-rw-r--r--tex/context/base/lpdf-swf.lua4
-rw-r--r--tex/context/base/lpdf-tag.lua597
-rw-r--r--tex/context/base/lpdf-u3d.lua15
-rw-r--r--tex/context/base/lpdf-wid.lua90
-rw-r--r--tex/context/base/lpdf-xmp.lua60
-rw-r--r--tex/context/base/luat-bas.mkiv3
-rw-r--r--tex/context/base/luat-cbk.lua2
-rw-r--r--tex/context/base/luat-cnf.lua85
-rw-r--r--tex/context/base/luat-cod.lua7
-rw-r--r--tex/context/base/luat-env.lua14
-rw-r--r--tex/context/base/luat-exe.lua143
-rw-r--r--tex/context/base/luat-fmt.lua4
-rw-r--r--tex/context/base/luat-ini.lua184
-rw-r--r--tex/context/base/luat-ini.mkiv198
-rw-r--r--tex/context/base/luat-iop.lua203
-rw-r--r--tex/context/base/luat-lib.mkiv6
-rw-r--r--tex/context/base/luat-mac.lua11
-rw-r--r--tex/context/base/luat-run.lua107
-rw-r--r--tex/context/base/luat-sto.lua122
-rw-r--r--tex/context/base/luat-usr.lua192
-rw-r--r--tex/context/base/luat-usr.mkiv126
-rw-r--r--tex/context/base/lxml-aux.lua330
-rw-r--r--tex/context/base/lxml-css.lua14
-rw-r--r--tex/context/base/lxml-ini.lua142
-rw-r--r--tex/context/base/lxml-ini.mkiv349
-rw-r--r--tex/context/base/lxml-lpt.lua74
-rw-r--r--tex/context/base/lxml-sor.mkiv5
-rw-r--r--tex/context/base/lxml-tab.lua186
-rw-r--r--tex/context/base/lxml-tex.lua614
-rw-r--r--tex/context/base/m-chart.lua89
-rw-r--r--tex/context/base/m-chart.mkvi10
-rw-r--r--tex/context/base/m-hemistich.mkiv10
-rw-r--r--tex/context/base/m-matrix.mkiv495
-rw-r--r--tex/context/base/m-morse.mkvi273
-rw-r--r--tex/context/base/m-oldbibtex.mkiv16
-rw-r--r--tex/context/base/m-oldfun.mkiv2
-rw-r--r--tex/context/base/m-oldnum.mkiv32
-rw-r--r--tex/context/base/m-pipemode.mkiv7
-rw-r--r--tex/context/base/m-pstricks.mkii2
-rw-r--r--tex/context/base/m-punk.mkiv30
-rw-r--r--tex/context/base/m-scite.mkiv275
-rw-r--r--tex/context/base/m-spreadsheet.lua4
-rw-r--r--tex/context/base/m-spreadsheet.mkiv6
-rw-r--r--tex/context/base/m-steps.lua21
-rw-r--r--tex/context/base/m-steps.mkvi2
-rw-r--r--tex/context/base/m-translate.mkiv31
-rw-r--r--tex/context/base/m-visual.mkiv8
-rw-r--r--tex/context/base/math-act.lua136
-rw-r--r--tex/context/base/math-ali.mkiv54
-rw-r--r--tex/context/base/math-arr.mkiv2
-rw-r--r--tex/context/base/math-def.mkiv4
-rw-r--r--tex/context/base/math-dir.lua48
-rw-r--r--tex/context/base/math-fbk.lua99
-rw-r--r--tex/context/base/math-fen.mkiv217
-rw-r--r--tex/context/base/math-frc.lua18
-rw-r--r--tex/context/base/math-frc.mkiv275
-rw-r--r--tex/context/base/math-ini.lua180
-rw-r--r--tex/context/base/math-ini.mkiv630
-rw-r--r--tex/context/base/math-int.mkiv2
-rw-r--r--tex/context/base/math-map.lua292
-rw-r--r--tex/context/base/math-noa.lua779
-rw-r--r--tex/context/base/math-rad.mkvi34
-rw-r--r--tex/context/base/math-ren.lua8
-rw-r--r--tex/context/base/math-stc.mkvi441
-rw-r--r--tex/context/base/math-tag.lua637
-rw-r--r--tex/context/base/math-vfu.lua46
-rw-r--r--tex/context/base/meta-fig.mkiv5
-rw-r--r--tex/context/base/meta-fnt.lua43
-rw-r--r--tex/context/base/meta-fnt.mkiv8
-rw-r--r--tex/context/base/meta-fun.lua4
-rw-r--r--tex/context/base/meta-imp-dum.mkiv3
-rw-r--r--tex/context/base/meta-imp-tab.mkiv73
-rw-r--r--tex/context/base/meta-imp-txt.mkiv9
-rw-r--r--tex/context/base/meta-ini.lua28
-rw-r--r--tex/context/base/meta-ini.mkiv346
-rw-r--r--tex/context/base/meta-pag.mkiv126
-rw-r--r--tex/context/base/meta-pdf.lua17
-rw-r--r--tex/context/base/meta-pdf.mkiv2
-rw-r--r--tex/context/base/meta-pdh.mkiv10
-rw-r--r--tex/context/base/meta-tex.lua47
-rw-r--r--tex/context/base/meta-tex.mkiv16
-rw-r--r--tex/context/base/mlib-ctx.lua232
-rw-r--r--tex/context/base/mlib-ctx.mkiv2
-rw-r--r--tex/context/base/mlib-int.lua153
-rw-r--r--tex/context/base/mlib-lua.lua383
-rw-r--r--tex/context/base/mlib-pdf.lua337
-rw-r--r--tex/context/base/mlib-pdf.mkiv59
-rw-r--r--tex/context/base/mlib-pps.lua510
-rw-r--r--tex/context/base/mlib-pps.mkiv58
-rw-r--r--tex/context/base/mlib-run.lua416
-rw-r--r--tex/context/base/mtx-context-arrange.tex25
-rw-r--r--tex/context/base/mtx-context-listing.tex29
-rw-r--r--tex/context/base/mtx-context-precache.tex161
-rw-r--r--tex/context/base/mult-aux.lua14
-rw-r--r--tex/context/base/mult-aux.mkiv439
-rw-r--r--tex/context/base/mult-chk.lua21
-rw-r--r--tex/context/base/mult-chk.mkiv7
-rw-r--r--tex/context/base/mult-de.mkii24
-rw-r--r--tex/context/base/mult-def.lua122
-rw-r--r--tex/context/base/mult-def.mkiv199
-rw-r--r--tex/context/base/mult-en.mkii22
-rw-r--r--tex/context/base/mult-fr.mkii26
-rw-r--r--tex/context/base/mult-fun.lua96
-rw-r--r--tex/context/base/mult-ini.lua78
-rw-r--r--tex/context/base/mult-ini.mkiv24
-rw-r--r--tex/context/base/mult-it.mkii26
-rw-r--r--tex/context/base/mult-low.lua93
-rw-r--r--tex/context/base/mult-mes.lua15
-rw-r--r--tex/context/base/mult-mps.lua67
-rw-r--r--tex/context/base/mult-nl.mkii36
-rw-r--r--tex/context/base/mult-pe.mkii26
-rw-r--r--tex/context/base/mult-prm.lua14
-rw-r--r--tex/context/base/mult-ro.mkii24
-rw-r--r--tex/context/base/mult-sys.mkiv57
-rw-r--r--tex/context/base/node-acc.lua130
-rw-r--r--tex/context/base/node-aux.lua422
-rw-r--r--tex/context/base/node-bck.lua129
-rw-r--r--tex/context/base/node-bck.mkiv14
-rw-r--r--tex/context/base/node-fin.lua338
-rw-r--r--tex/context/base/node-fin.mkiv10
-rw-r--r--tex/context/base/node-fnt.lua245
-rw-r--r--tex/context/base/node-ini.lua10
-rw-r--r--tex/context/base/node-ini.mkiv7
-rw-r--r--tex/context/base/node-inj.lua239
-rw-r--r--tex/context/base/node-ltp.lua1637
-rw-r--r--tex/context/base/node-met.lua55
-rw-r--r--tex/context/base/node-mig.lua97
-rw-r--r--tex/context/base/node-nut.lua790
-rw-r--r--tex/context/base/node-ppt.lua476
-rw-r--r--tex/context/base/node-pro.lua111
-rw-r--r--tex/context/base/node-ref.lua605
-rw-r--r--tex/context/base/node-res.lua618
-rw-r--r--tex/context/base/node-rul.lua174
-rw-r--r--tex/context/base/node-rul.mkiv88
-rw-r--r--tex/context/base/node-shp.lua144
-rw-r--r--tex/context/base/node-tex.lua27
-rw-r--r--tex/context/base/node-tra.lua514
-rw-r--r--tex/context/base/node-tsk.lua8
-rw-r--r--tex/context/base/node-tst.lua69
-rw-r--r--tex/context/base/node-typ.lua73
-rw-r--r--tex/context/base/pack-bck.mkvi9
-rw-r--r--tex/context/base/pack-box.mkiv115
-rw-r--r--tex/context/base/pack-com.mkiv28
-rw-r--r--tex/context/base/pack-fen.mkiv18
-rw-r--r--tex/context/base/pack-lyr.mkiv33
-rw-r--r--tex/context/base/pack-mis.mkvi3
-rw-r--r--tex/context/base/pack-mrl.mkiv16
-rw-r--r--tex/context/base/pack-obj.lua67
-rw-r--r--tex/context/base/pack-obj.mkiv15
-rw-r--r--tex/context/base/pack-pos.mkiv2
-rw-r--r--tex/context/base/pack-rul.lua131
-rw-r--r--tex/context/base/pack-rul.mkiv313
-rw-r--r--tex/context/base/page-app.mkiv3
-rw-r--r--tex/context/base/page-bck.mkiv56
-rw-r--r--tex/context/base/page-brk.mkiv261
-rw-r--r--tex/context/base/page-flt.lua181
-rw-r--r--tex/context/base/page-flt.mkiv30
-rw-r--r--tex/context/base/page-flw.mkiv7
-rw-r--r--tex/context/base/page-grd.mkiv35
-rw-r--r--tex/context/base/page-imp.mkiv16
-rw-r--r--tex/context/base/page-ini.mkiv13
-rw-r--r--tex/context/base/page-inj.lua29
-rw-r--r--tex/context/base/page-inj.mkvi20
-rw-r--r--tex/context/base/page-ins.lua54
-rw-r--r--tex/context/base/page-ins.mkiv22
-rw-r--r--tex/context/base/page-lay.mkiv67
-rw-r--r--tex/context/base/page-lin.lua298
-rw-r--r--tex/context/base/page-lin.mkiv573
-rw-r--r--tex/context/base/page-lin.mkvi590
-rw-r--r--tex/context/base/page-mak.mkvi64
-rw-r--r--tex/context/base/page-mbk.mkvi2
-rw-r--r--tex/context/base/page-mix.lua590
-rw-r--r--tex/context/base/page-mix.mkiv241
-rw-r--r--tex/context/base/page-mul.mkiv12
-rw-r--r--tex/context/base/page-one.mkiv15
-rw-r--r--tex/context/base/page-pst.lua41
-rw-r--r--tex/context/base/page-pst.mkiv7
-rw-r--r--tex/context/base/page-run.mkiv40
-rw-r--r--tex/context/base/page-sel.mkvi2
-rw-r--r--tex/context/base/page-set.mkiv14
-rw-r--r--tex/context/base/page-sid.mkiv142
-rw-r--r--tex/context/base/page-str.lua62
-rw-r--r--tex/context/base/page-str.mkiv28
-rw-r--r--tex/context/base/page-txt.mkvi31
-rw-r--r--tex/context/base/pdfr-def.mkii2
-rw-r--r--tex/context/base/phys-dim.lua125
-rw-r--r--tex/context/base/phys-dim.mkiv19
-rw-r--r--tex/context/base/ppchtex.mkiv38
-rw-r--r--tex/context/base/publ-aut.lua876
-rw-r--r--tex/context/base/publ-dat.lua1194
-rw-r--r--tex/context/base/publ-fnd.lua298
-rw-r--r--tex/context/base/publ-imp-apa.lua523
-rw-r--r--tex/context/base/publ-imp-apa.mkvi1465
-rw-r--r--tex/context/base/publ-imp-aps.lua479
-rw-r--r--tex/context/base/publ-imp-aps.mkvi1110
-rw-r--r--tex/context/base/publ-imp-author.mkvi303
-rw-r--r--tex/context/base/publ-imp-cite.mkvi281
-rw-r--r--tex/context/base/publ-imp-commands.mkvi15
-rw-r--r--tex/context/base/publ-imp-default.lua124
-rw-r--r--tex/context/base/publ-imp-default.mkvi482
-rw-r--r--tex/context/base/publ-imp-definitions.mkvi123
-rw-r--r--tex/context/base/publ-imp-list.mkvi96
-rw-r--r--tex/context/base/publ-imp-page.mkvi51
-rw-r--r--tex/context/base/publ-imp-replacements.lua23
-rw-r--r--tex/context/base/publ-imp-test.bib294
-rw-r--r--tex/context/base/publ-ini.lua3295
-rw-r--r--tex/context/base/publ-ini.mkiv1813
-rw-r--r--tex/context/base/publ-jrn.lua189
-rw-r--r--tex/context/base/publ-old.mkiv22
-rw-r--r--tex/context/base/publ-oth.lua154
-rw-r--r--tex/context/base/publ-reg.lua227
-rw-r--r--tex/context/base/publ-sor.lua377
-rw-r--r--tex/context/base/publ-tra.lua504
-rw-r--r--tex/context/base/publ-tra.mkiv87
-rw-r--r--tex/context/base/publ-usr.lua128
-rw-r--r--tex/context/base/publ-usr.mkiv2
-rw-r--r--tex/context/base/publ-xml.mkiv114
-rw-r--r--tex/context/base/regi-ini.lua234
-rw-r--r--tex/context/base/regi-ini.mkiv32
-rw-r--r--tex/context/base/regi-pdfdoc.lua26
-rw-r--r--tex/context/base/s-abr-01.tex9
-rw-r--r--tex/context/base/s-art-01.mkiv4
-rw-r--r--tex/context/base/s-figures-names.mkiv99
-rw-r--r--tex/context/base/s-fnt-10.mkiv4
-rw-r--r--tex/context/base/s-fnt-21.mkiv2
-rw-r--r--tex/context/base/s-fnt-24.mkiv4
-rw-r--r--tex/context/base/s-fonts-coverage.lua14
-rw-r--r--tex/context/base/s-fonts-ligatures.mkiv292
-rw-r--r--tex/context/base/s-fonts-shapes.lua89
-rw-r--r--tex/context/base/s-fonts-tables.lua9
-rw-r--r--tex/context/base/s-inf-01.mkvi12
-rw-r--r--tex/context/base/s-inf-03.mkiv29
-rw-r--r--tex/context/base/s-languages-hyphenation.lua18
-rw-r--r--tex/context/base/s-languages-hyphenation.mkiv6
-rw-r--r--tex/context/base/s-languages-system.lua63
-rw-r--r--tex/context/base/s-languages-system.mkiv2
-rw-r--r--tex/context/base/s-languages-words.lua32
-rw-r--r--tex/context/base/s-languages-words.mkiv22
-rw-r--r--tex/context/base/s-map-10.mkiv16
-rw-r--r--tex/context/base/s-math-characters.lua286
-rw-r--r--tex/context/base/s-math-characters.mkiv15
-rw-r--r--tex/context/base/s-math-coverage.lua223
-rw-r--r--tex/context/base/s-math-coverage.mkiv10
-rw-r--r--tex/context/base/s-math-repertoire.mkiv57
-rw-r--r--tex/context/base/s-pre-17.mkiv2
-rw-r--r--tex/context/base/s-references-show.mkiv132
-rw-r--r--tex/context/base/s-structure-sections.mkiv80
-rw-r--r--tex/context/base/s-syntax.mkii (renamed from tex/context/base/s-syn-01.tex)2
-rw-r--r--tex/context/base/s-syntax.mkiv96
-rw-r--r--tex/context/base/s-typesetting-kerning.mkiv209
-rw-r--r--tex/context/base/s-youless.mkiv11
-rw-r--r--tex/context/base/scrn-bar.mkvi2
-rw-r--r--tex/context/base/scrn-but.lua10
-rw-r--r--tex/context/base/scrn-but.mkvi30
-rw-r--r--tex/context/base/scrn-fld.lua163
-rw-r--r--tex/context/base/scrn-fld.mkvi139
-rw-r--r--tex/context/base/scrn-hlp.lua123
-rw-r--r--tex/context/base/scrn-hlp.mkvi18
-rw-r--r--tex/context/base/scrn-ini.lua19
-rw-r--r--tex/context/base/scrn-ini.mkvi18
-rw-r--r--tex/context/base/scrn-pag.lua37
-rw-r--r--tex/context/base/scrn-pag.mkvi77
-rw-r--r--tex/context/base/scrn-ref.lua18
-rw-r--r--tex/context/base/scrn-ref.mkvi14
-rw-r--r--tex/context/base/scrn-wid.lua211
-rw-r--r--tex/context/base/scrn-wid.mkvi195
-rw-r--r--tex/context/base/scrp-cjk.lua131
-rw-r--r--tex/context/base/scrp-eth.lua22
-rw-r--r--tex/context/base/scrp-ini.lua113
-rw-r--r--tex/context/base/scrp-ini.mkiv8
-rw-r--r--tex/context/base/sort-ini.lua286
-rw-r--r--tex/context/base/sort-lan.lua2
-rw-r--r--tex/context/base/spac-adj.lua8
-rw-r--r--tex/context/base/spac-adj.mkiv7
-rw-r--r--tex/context/base/spac-ali.lua52
-rw-r--r--tex/context/base/spac-ali.mkiv95
-rw-r--r--tex/context/base/spac-chr.lua131
-rw-r--r--tex/context/base/spac-chr.mkiv3
-rw-r--r--tex/context/base/spac-def.mkiv2
-rw-r--r--tex/context/base/spac-hor.lua23
-rw-r--r--tex/context/base/spac-hor.mkiv104
-rw-r--r--tex/context/base/spac-lin.mkiv10
-rw-r--r--tex/context/base/spac-pag.mkiv11
-rw-r--r--tex/context/base/spac-par.mkiv3
-rw-r--r--tex/context/base/spac-prf.mkiv31
-rw-r--r--tex/context/base/spac-ver.lua951
-rw-r--r--tex/context/base/spac-ver.mkiv251
-rw-r--r--tex/context/base/status-files.pdfbin24556 -> 24472 bytes
-rw-r--r--tex/context/base/status-lua.pdfbin228200 -> 251187 bytes
-rw-r--r--tex/context/base/status-mkiv.lua177
-rw-r--r--tex/context/base/strc-bkm.lua427
-rw-r--r--tex/context/base/strc-bkm.mkiv68
-rw-r--r--tex/context/base/strc-blk.lua31
-rw-r--r--tex/context/base/strc-blk.mkiv13
-rw-r--r--tex/context/base/strc-con.mkvi145
-rw-r--r--tex/context/base/strc-def.mkiv20
-rw-r--r--tex/context/base/strc-des.mkvi15
-rw-r--r--tex/context/base/strc-doc.lua329
-rw-r--r--tex/context/base/strc-doc.mkiv10
-rw-r--r--tex/context/base/strc-enu.mkvi11
-rw-r--r--tex/context/base/strc-flt.mkvi253
-rw-r--r--tex/context/base/strc-ini.lua84
-rw-r--r--tex/context/base/strc-ini.mkvi10
-rw-r--r--tex/context/base/strc-itm.lua75
-rw-r--r--tex/context/base/strc-itm.mkvi192
-rw-r--r--tex/context/base/strc-lab.mkiv31
-rw-r--r--tex/context/base/strc-lev.lua43
-rw-r--r--tex/context/base/strc-lev.mkvi12
-rw-r--r--tex/context/base/strc-lnt.mkvi8
-rw-r--r--tex/context/base/strc-lst.lua1004
-rw-r--r--tex/context/base/strc-lst.mkvi404
-rw-r--r--tex/context/base/strc-mar.lua64
-rw-r--r--tex/context/base/strc-mar.mkiv38
-rw-r--r--tex/context/base/strc-mat.lua48
-rw-r--r--tex/context/base/strc-mat.mkiv18
-rw-r--r--tex/context/base/strc-not.lua108
-rw-r--r--tex/context/base/strc-not.mkvi127
-rw-r--r--tex/context/base/strc-num.lua133
-rw-r--r--tex/context/base/strc-num.mkiv586
-rw-r--r--tex/context/base/strc-pag.lua109
-rw-r--r--tex/context/base/strc-pag.mkiv49
-rw-r--r--tex/context/base/strc-ref.lua1576
-rw-r--r--tex/context/base/strc-ref.mkvi644
-rw-r--r--tex/context/base/strc-reg.lua928
-rw-r--r--tex/context/base/strc-reg.mkiv469
-rw-r--r--tex/context/base/strc-ren.mkiv40
-rw-r--r--tex/context/base/strc-rsc.lua51
-rw-r--r--tex/context/base/strc-sbe.mkiv12
-rw-r--r--tex/context/base/strc-sec.mkiv272
-rw-r--r--tex/context/base/strc-syn.lua284
-rw-r--r--tex/context/base/strc-syn.mkiv628
-rw-r--r--tex/context/base/strc-tag.lua633
-rw-r--r--tex/context/base/strc-tag.mkiv439
-rw-r--r--tex/context/base/supp-box.lua353
-rw-r--r--tex/context/base/supp-box.mkiv141
-rw-r--r--tex/context/base/supp-mat.mkiv34
-rw-r--r--tex/context/base/supp-ran.lua94
-rw-r--r--tex/context/base/supp-ran.mkiv29
-rw-r--r--tex/context/base/symb-imp-cc.mkiv4
-rw-r--r--tex/context/base/symb-ini.lua29
-rw-r--r--tex/context/base/symb-ini.mkiv13
-rw-r--r--tex/context/base/syst-aux.lua200
-rw-r--r--tex/context/base/syst-aux.mkiv510
-rw-r--r--tex/context/base/syst-con.lua98
-rw-r--r--tex/context/base/syst-con.mkiv37
-rw-r--r--tex/context/base/syst-ini.mkiv76
-rw-r--r--tex/context/base/syst-lua.lua195
-rw-r--r--tex/context/base/syst-lua.mkiv62
-rw-r--r--tex/context/base/syst-rtp.mkiv2
-rw-r--r--tex/context/base/tabl-ltb.mkiv4
-rw-r--r--tex/context/base/tabl-ntb.mkii29
-rw-r--r--tex/context/base/tabl-ntb.mkiv690
-rw-r--r--tex/context/base/tabl-tab.mkiv15
-rw-r--r--tex/context/base/tabl-tbl.lua29
-rw-r--r--tex/context/base/tabl-tbl.mkiv119
-rw-r--r--tex/context/base/tabl-tsp.mkiv2
-rw-r--r--tex/context/base/tabl-xnt.mkvi2
-rw-r--r--tex/context/base/tabl-xtb.lua584
-rw-r--r--tex/context/base/tabl-xtb.mkvi179
-rw-r--r--tex/context/base/task-ini.lua60
-rw-r--r--tex/context/base/toks-ini.lua524
-rw-r--r--tex/context/base/toks-ini.mkiv52
-rw-r--r--tex/context/base/toks-map.lua70
-rw-r--r--tex/context/base/toks-map.mkiv63
-rw-r--r--tex/context/base/toks-scn.lua437
-rw-r--r--tex/context/base/toks-tra.lua298
-rw-r--r--tex/context/base/toks-tra.mkiv31
-rw-r--r--tex/context/base/trac-ctx.lua42
-rw-r--r--tex/context/base/trac-ctx.mkiv9
-rw-r--r--tex/context/base/trac-deb.lua180
-rw-r--r--tex/context/base/trac-deb.mkiv34
-rw-r--r--tex/context/base/trac-inf.lua40
-rw-r--r--tex/context/base/trac-jus.lua66
-rw-r--r--tex/context/base/trac-jus.mkiv2
-rw-r--r--tex/context/base/trac-lmx.lua9
-rw-r--r--tex/context/base/trac-log.lua383
-rw-r--r--tex/context/base/trac-par.lua39
-rw-r--r--tex/context/base/trac-pro.lua6
-rw-r--r--tex/context/base/trac-set.lua5
-rw-r--r--tex/context/base/trac-tex.lua77
-rw-r--r--tex/context/base/trac-tim.lua2
-rw-r--r--tex/context/base/trac-vis.lua431
-rw-r--r--tex/context/base/trac-vis.mkiv56
-rw-r--r--tex/context/base/type-imp-buy.mkiv136
-rw-r--r--tex/context/base/type-imp-cambria.mkiv8
-rw-r--r--tex/context/base/type-imp-dejavu.mkiv63
-rw-r--r--tex/context/base/type-imp-ebgaramond.mkiv45
-rw-r--r--tex/context/base/type-imp-ipaex.mkiv137
-rw-r--r--tex/context/base/type-imp-latinmodern.mkiv5
-rw-r--r--tex/context/base/type-imp-lato.mkiv56
-rw-r--r--tex/context/base/type-imp-texgyre.mkiv15
-rw-r--r--tex/context/base/type-ini.lua121
-rw-r--r--tex/context/base/type-ini.mkvi34
-rw-r--r--tex/context/base/type-run.mkiv4
-rw-r--r--tex/context/base/typo-bld.lua55
-rw-r--r--tex/context/base/typo-bld.mkiv14
-rw-r--r--tex/context/base/typo-brk.lua159
-rw-r--r--tex/context/base/typo-brk.mkiv28
-rw-r--r--tex/context/base/typo-cap.lua385
-rw-r--r--tex/context/base/typo-cap.mkiv11
-rw-r--r--tex/context/base/typo-chr.lua251
-rw-r--r--tex/context/base/typo-chr.mkiv82
-rw-r--r--tex/context/base/typo-cln.lua24
-rw-r--r--tex/context/base/typo-cln.mkiv2
-rw-r--r--tex/context/base/typo-del.mkiv250
-rw-r--r--tex/context/base/typo-dha.lua81
-rw-r--r--tex/context/base/typo-dig.lua71
-rw-r--r--tex/context/base/typo-dig.mkiv2
-rw-r--r--tex/context/base/typo-dir.lua112
-rw-r--r--tex/context/base/typo-dir.mkiv23
-rw-r--r--tex/context/base/typo-drp.lua405
-rw-r--r--tex/context/base/typo-drp.mkiv60
-rw-r--r--tex/context/base/typo-dua.lua97
-rw-r--r--tex/context/base/typo-dub.lua106
-rw-r--r--tex/context/base/typo-fln.lua117
-rw-r--r--tex/context/base/typo-fln.mkiv20
-rw-r--r--tex/context/base/typo-inj.lua94
-rw-r--r--tex/context/base/typo-inj.mkiv77
-rw-r--r--tex/context/base/typo-itc.lua457
-rw-r--r--tex/context/base/typo-itc.mkvi6
-rw-r--r--tex/context/base/typo-itm.mkiv4
-rw-r--r--tex/context/base/typo-krn.lua586
-rw-r--r--tex/context/base/typo-krn.mkiv11
-rw-r--r--tex/context/base/typo-lan.lua8
-rw-r--r--tex/context/base/typo-lan.mkiv2
-rw-r--r--tex/context/base/typo-lig.mkiv31
-rw-r--r--tex/context/base/typo-man.lua113
-rw-r--r--tex/context/base/typo-mar.lua297
-rw-r--r--tex/context/base/typo-mar.mkiv76
-rw-r--r--tex/context/base/typo-pag.lua99
-rw-r--r--tex/context/base/typo-par.mkiv29
-rw-r--r--tex/context/base/typo-prc.lua28
-rw-r--r--tex/context/base/typo-prc.mkvi15
-rw-r--r--tex/context/base/typo-rep.lua55
-rw-r--r--tex/context/base/typo-rep.mkiv2
-rw-r--r--tex/context/base/typo-spa.lua86
-rw-r--r--tex/context/base/typo-spa.mkiv18
-rw-r--r--tex/context/base/typo-sus.lua311
-rw-r--r--tex/context/base/typo-sus.mkiv51
-rw-r--r--tex/context/base/typo-tal.lua332
-rw-r--r--tex/context/base/typo-tal.mkiv61
-rw-r--r--tex/context/base/typo-txt.mkvi49
-rw-r--r--tex/context/base/typo-wrp.lua76
-rw-r--r--tex/context/base/typo-wrp.mkiv65
-rw-r--r--tex/context/base/unic-ini.lua14
-rw-r--r--tex/context/base/unic-ini.mkiv3
-rw-r--r--tex/context/base/util-deb.lua46
-rw-r--r--tex/context/base/util-dim.lua97
-rw-r--r--tex/context/base/util-env.lua42
-rw-r--r--tex/context/base/util-prs.lua158
-rw-r--r--tex/context/base/util-sbx.lua415
-rw-r--r--tex/context/base/util-sci.lua280
-rw-r--r--tex/context/base/util-seq.lua45
-rw-r--r--tex/context/base/util-sql-loggers.lua2
-rw-r--r--tex/context/base/util-sta.lua22
-rw-r--r--tex/context/base/util-str.lua354
-rw-r--r--tex/context/base/util-tab.lua107
-rw-r--r--tex/context/base/util-tpl.lua41
-rw-r--r--tex/context/base/x-asciimath.lua2279
-rw-r--r--tex/context/base/x-asciimath.mkiv421
-rw-r--r--tex/context/base/x-calcmath.lua7
-rw-r--r--tex/context/base/x-cals.lua7
-rw-r--r--tex/context/base/x-ct.lua2
-rw-r--r--tex/context/base/x-html.mkiv379
-rw-r--r--tex/context/base/x-math-svg.lua176
-rw-r--r--tex/context/base/x-mathml-basics.mkiv276
-rw-r--r--tex/context/base/x-mathml-html.mkiv40
-rw-r--r--tex/context/base/x-mathml.lua230
-rw-r--r--tex/context/base/x-mathml.mkiv433
-rw-r--r--tex/context/base/x-mathml.xsd6
-rw-r--r--tex/context/base/x-res-01.mkiv2
-rw-r--r--tex/context/base/x-set-11.mkiv123
-rw-r--r--tex/context/base/x-set-12.mkiv3
-rw-r--r--tex/context/base/x-xtag.mkiv2
-rw-r--r--tex/context/bib/bibl-apa-it.tex385
-rw-r--r--tex/context/bib/sample.bib1
-rw-r--r--tex/context/extra/showunic.tex130
-rw-r--r--tex/context/fonts/lm-math.lfg2
-rw-r--r--tex/context/fonts/lm.lfg15
-rw-r--r--tex/context/fonts/texgyre.lfg6
-rw-r--r--tex/context/fonts/treatments.lfg62
-rw-r--r--tex/context/foxet/fe-bryson.xml12
-rw-r--r--tex/context/foxet/fe-ward.xml8
-rw-r--r--tex/context/foxet/fe-zapf.xml14
-rw-r--r--tex/context/foxet/fo-0101.fo17
-rw-r--r--tex/context/foxet/fo-0102.fo25
-rw-r--r--tex/context/foxet/fo-0103.fo21
-rw-r--r--tex/context/foxet/fo-0201.fo22
-rw-r--r--tex/context/foxet/fo-0301.fo56
-rw-r--r--tex/context/foxet/fo-0601.fo29
-rw-r--r--tex/context/foxet/fo-0602.fo27
-rw-r--r--tex/context/foxet/fo-0603.fo26
-rw-r--r--tex/context/foxet/fo-0604.fo26
-rw-r--r--tex/context/foxet/fo-0611.fo21
-rw-r--r--tex/context/foxet/fo-0612.fo21
-rw-r--r--tex/context/foxet/fo-0613.fo21
-rw-r--r--tex/context/foxet/fo-0621.fo106
-rw-r--r--tex/context/foxet/fo-0641.fo25
-rw-r--r--tex/context/foxet/fo-0642.fo27
-rw-r--r--tex/context/foxet/fo-0643.fo27
-rw-r--r--tex/context/foxet/fo-0644.fo27
-rw-r--r--tex/context/foxet/fo-0650.fo26
-rw-r--r--tex/context/foxet/fo-0651.fo26
-rw-r--r--tex/context/foxet/fo-0701.fo39
-rw-r--r--tex/context/foxet/fo-0801.fo55
-rw-r--r--tex/context/foxet/fo-0901.fo58
-rw-r--r--tex/context/foxet/fo-0902.fo33
-rw-r--r--tex/context/foxet/fo-1001.fo63
-rw-r--r--tex/context/foxet/fo-1002.fo31
-rw-r--r--tex/context/foxet/fo-1003.fo31
-rw-r--r--tex/context/foxet/fo-1004.fo35
-rw-r--r--tex/context/foxet/fo-1101.fo63
-rw-r--r--tex/context/foxet/fo-1102.fo128
-rw-r--r--tex/context/foxet/fo-1103.fo85
-rw-r--r--tex/context/foxet/fo-1104.fo28
-rw-r--r--tex/context/foxet/fo-1201.fo40
-rw-r--r--tex/context/interface/cont-cs.xml1
-rw-r--r--tex/context/interface/cont-de.xml1
-rw-r--r--tex/context/interface/cont-en.xml1
-rw-r--r--tex/context/interface/cont-fr.xml1
-rw-r--r--tex/context/interface/cont-it.xml1
-rw-r--r--tex/context/interface/cont-nl.xml5
-rw-r--r--tex/context/interface/cont-pe.xml1
-rw-r--r--tex/context/interface/cont-ro.xml1
-rw-r--r--tex/context/interface/keys-cs.xml24
-rw-r--r--tex/context/interface/keys-de.xml24
-rw-r--r--tex/context/interface/keys-en.xml22
-rw-r--r--tex/context/interface/keys-fr.xml26
-rw-r--r--tex/context/interface/keys-it.xml26
-rw-r--r--tex/context/interface/keys-nl.xml36
-rw-r--r--tex/context/interface/keys-pe.xml26
-rw-r--r--tex/context/interface/keys-ro.xml24
-rw-r--r--tex/context/patterns/lang-it.lua9
-rw-r--r--tex/context/patterns/lang-it.pat7
-rw-r--r--tex/context/patterns/lang-it.rme3
-rw-r--r--tex/context/sample/cervantes-es.tex6
-rw-r--r--tex/context/sample/darwin.tex19
-rw-r--r--tex/context/sample/dawkins.tex6
-rw-r--r--tex/context/sample/douglas.tex8
-rw-r--r--tex/context/sample/quevedo-es.tex19
-rw-r--r--tex/context/sample/sample.tex80
-rw-r--r--tex/context/sample/samples.bib29
-rw-r--r--tex/context/sample/samples.tex78
-rw-r--r--tex/context/test/pdf-a1b-2005.mkiv6
-rw-r--r--tex/generic/context/luatex/luatex-basics-gen.lua39
-rw-r--r--tex/generic/context/luatex/luatex-basics-nod.lua123
-rw-r--r--tex/generic/context/luatex/luatex-fonts-cbk.lua117
-rw-r--r--tex/generic/context/luatex/luatex-fonts-enc.lua7
-rw-r--r--tex/generic/context/luatex/luatex-fonts-inj.lua1055
-rw-r--r--tex/generic/context/luatex/luatex-fonts-merged.lua3773
-rw-r--r--tex/generic/context/luatex/luatex-fonts-ota.lua459
-rw-r--r--tex/generic/context/luatex/luatex-fonts-otn.lua2893
-rw-r--r--tex/generic/context/luatex/luatex-fonts.lua21
-rw-r--r--tex/generic/context/luatex/luatex-math.tex79
-rw-r--r--tex/generic/context/luatex/luatex-mplib.tex1
-rw-r--r--tex/generic/context/luatex/luatex-plain.tex1
-rw-r--r--tex/generic/context/luatex/luatex-test.tex45
-rw-r--r--web2c/contextcnf.lua52
1050 files changed, 172103 insertions, 56516 deletions
diff --git a/COPYING b/COPYING
new file mode 100644
index 000000000..eb0e6c978
--- /dev/null
+++ b/COPYING
@@ -0,0 +1,350 @@
+Context was released under the GNU General Public License, version 2,
+exactly [0]. The full license text [1] by the Free Software Foundation
+is reproduced below.
+
+[0] Taco posted an exhaustive treatment of Context licensing at
+ http://tex.stackexchange.com/a/12456/14066
+
+[1] http://www.gnu.org/licenses/gpl-2.0.txt
+
+------------------------------------------------------------------------
+
+ GNU GENERAL PUBLIC LICENSE
+ Version 2, June 1991
+
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The licenses for most software are designed to take away your
+freedom to share and change it. By contrast, the GNU General Public
+License is intended to guarantee your freedom to share and change free
+software--to make sure the software is free for all its users. This
+General Public License applies to most of the Free Software
+Foundation's software and to any other program whose authors commit to
+using it. (Some other Free Software Foundation software is covered by
+the GNU Lesser General Public License instead.) You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+this service if you wish), that you receive source code or can get it
+if you want it, that you can change the software or use pieces of it
+in new free programs; and that you know you can do these things.
+
+ To protect your rights, we need to make restrictions that forbid
+anyone to deny you these rights or to ask you to surrender the rights.
+These restrictions translate to certain responsibilities for you if you
+distribute copies of the software, or if you modify it.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must give the recipients all the rights that
+you have. You must make sure that they, too, receive or can get the
+source code. And you must show them these terms so they know their
+rights.
+
+ We protect your rights with two steps: (1) copyright the software, and
+(2) offer you this license which gives you legal permission to copy,
+distribute and/or modify the software.
+
+ Also, for each author's protection and ours, we want to make certain
+that everyone understands that there is no warranty for this free
+software. If the software is modified by someone else and passed on, we
+want its recipients to know that what they have is not the original, so
+that any problems introduced by others will not reflect on the original
+authors' reputations.
+
+ Finally, any free program is threatened constantly by software
+patents. We wish to avoid the danger that redistributors of a free
+program will individually obtain patent licenses, in effect making the
+program proprietary. To prevent this, we have made it clear that any
+patent must be licensed for everyone's free use or not licensed at all.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ GNU GENERAL PUBLIC LICENSE
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+ 0. This License applies to any program or other work which contains
+a notice placed by the copyright holder saying it may be distributed
+under the terms of this General Public License. The "Program", below,
+refers to any such program or work, and a "work based on the Program"
+means either the Program or any derivative work under copyright law:
+that is to say, a work containing the Program or a portion of it,
+either verbatim or with modifications and/or translated into another
+language. (Hereinafter, translation is included without limitation in
+the term "modification".) Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope. The act of
+running the Program is not restricted, and the output from the Program
+is covered only if its contents constitute a work based on the
+Program (independent of having been made by running the Program).
+Whether that is true depends on what the Program does.
+
+ 1. You may copy and distribute verbatim copies of the Program's
+source code as you receive it, in any medium, provided that you
+conspicuously and appropriately publish on each copy an appropriate
+copyright notice and disclaimer of warranty; keep intact all the
+notices that refer to this License and to the absence of any warranty;
+and give any other recipients of the Program a copy of this License
+along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and
+you may at your option offer warranty protection in exchange for a fee.
+
+ 2. You may modify your copy or copies of the Program or any portion
+of it, thus forming a work based on the Program, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+ a) You must cause the modified files to carry prominent notices
+ stating that you changed the files and the date of any change.
+
+ b) You must cause any work that you distribute or publish, that in
+ whole or in part contains or is derived from the Program or any
+ part thereof, to be licensed as a whole at no charge to all third
+ parties under the terms of this License.
+
+ c) If the modified program normally reads commands interactively
+ when run, you must cause it, when started running for such
+ interactive use in the most ordinary way, to print or display an
+ announcement including an appropriate copyright notice and a
+ notice that there is no warranty (or else, saying that you provide
+ a warranty) and that users may redistribute the program under
+ these conditions, and telling the user how to view a copy of this
+ License. (Exception: if the Program itself is interactive but
+ does not normally print such an announcement, your work based on
+ the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole. If
+identifiable sections of that work are not derived from the Program,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works. But when you
+distribute the same sections as part of a whole which is a work based
+on the Program, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program
+with the Program (or with a work based on the Program) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+ 3. You may copy and distribute the Program (or a work based on it,
+under Section 2) in object code or executable form under the terms of
+Sections 1 and 2 above provided that you also do one of the following:
+
+ a) Accompany it with the complete corresponding machine-readable
+ source code, which must be distributed under the terms of Sections
+ 1 and 2 above on a medium customarily used for software interchange; or,
+
+ b) Accompany it with a written offer, valid for at least three
+ years, to give any third party, for a charge no more than your
+ cost of physically performing source distribution, a complete
+ machine-readable copy of the corresponding source code, to be
+ distributed under the terms of Sections 1 and 2 above on a medium
+ customarily used for software interchange; or,
+
+ c) Accompany it with the information you received as to the offer
+ to distribute corresponding source code. (This alternative is
+ allowed only for noncommercial distribution and only if you
+ received the program in object code or executable form with such
+ an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for
+making modifications to it. For an executable work, complete source
+code means all the source code for all modules it contains, plus any
+associated interface definition files, plus the scripts used to
+control compilation and installation of the executable. However, as a
+special exception, the source code distributed need not include
+anything that is normally distributed (in either source or binary
+form) with the major components (compiler, kernel, and so on) of the
+operating system on which the executable runs, unless that component
+itself accompanies the executable.
+
+If distribution of executable or object code is made by offering
+access to copy from a designated place, then offering equivalent
+access to copy the source code from the same place counts as
+distribution of the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+ 4. You may not copy, modify, sublicense, or distribute the Program
+except as expressly provided under this License. Any attempt
+otherwise to copy, modify, sublicense or distribute the Program is
+void, and will automatically terminate your rights under this License.
+However, parties who have received copies, or rights, from you under
+this License will not have their licenses terminated so long as such
+parties remain in full compliance.
+
+ 5. You are not required to accept this License, since you have not
+signed it. However, nothing else grants you permission to modify or
+distribute the Program or its derivative works. These actions are
+prohibited by law if you do not accept this License. Therefore, by
+modifying or distributing the Program (or any work based on the
+Program), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Program or works based on it.
+
+ 6. Each time you redistribute the Program (or any work based on the
+Program), the recipient automatically receives a license from the
+original licensor to copy, distribute or modify the Program subject to
+these terms and conditions. You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties to
+this License.
+
+ 7. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Program at all. For example, if a patent
+license would not permit royalty-free redistribution of the Program by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system, which is
+implemented by public license practices. Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+ 8. If the distribution and/or use of the Program is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Program under this License
+may add an explicit geographical distribution limitation excluding
+those countries, so that distribution is permitted only in or among
+countries not thus excluded. In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+ 9. The Free Software Foundation may publish revised and/or new versions
+of the General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program
+specifies a version number of this License which applies to it and "any
+later version", you have the option of following the terms and conditions
+either of that version or of any later version published by the Free
+Software Foundation. If the Program does not specify a version number of
+this License, you may choose any version ever published by the Free Software
+Foundation.
+
+ 10. If you wish to incorporate parts of the Program into other free
+programs whose distribution conditions are different, write to the author
+to ask for permission. For software which is copyrighted by the Free
+Software Foundation, write to the Free Software Foundation; we sometimes
+make exceptions for this. Our decision will be guided by the two goals
+of preserving the free status of all derivatives of our free software and
+of promoting the sharing and reuse of software generally.
+
+ NO WARRANTY
+
+ 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
+FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
+OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
+PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
+OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
+TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
+PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
+REPAIR OR CORRECTION.
+
+ 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
+REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
+OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
+TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
+YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
+PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGES.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+ <one line to give the program's name and a brief idea of what it does.>
+ Copyright (C) <year> <name of author>
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 2 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License along
+ with this program; if not, write to the Free Software Foundation, Inc.,
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this
+when it starts in an interactive mode:
+
+ Gnomovision version 69, Copyright (C) year name of author
+ Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, the commands you use may
+be called something other than `show w' and `show c'; they could even be
+mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary. Here is a sample; alter the names:
+
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the program
+ `Gnomovision' (which makes passes at compilers) written by James Hacker.
+
+ <signature of Ty Coon>, 1 April 1989
+ Ty Coon, President of Vice
+
+This General Public License does not permit incorporating your program into
+proprietary programs. If your program is a subroutine library, you may
+consider it more useful to permit linking proprietary applications with the
+library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License.
diff --git a/README.rst b/README.rst
new file mode 100644
index 000000000..3d716e98e
--- /dev/null
+++ b/README.rst
@@ -0,0 +1,14 @@
+-----------------------------------------------------------------------
+ Context Mirror
+-----------------------------------------------------------------------
+
+This is a mirror repository of Context_, a document processing system
+built around Donald E. Knuth’s TeX_. Inconveniently, Context is
+supplied only as file archives and thus lacks version control. The
+mirror aims at providing a source controlled repository to facilitate
+common operations like bisection. Still no commit messages, though,
+besides timestamps.
+
+.. _Context: http://www.pragma-ade.com
+.. _TeX: http://www-cs-faculty.stanford.edu/~uno/abcde.html
+
diff --git a/context/data/scite/context/documents/scite-context-readme.pdf b/context/data/scite/context/documents/scite-context-readme.pdf
new file mode 100644
index 000000000..2bd7d4216
--- /dev/null
+++ b/context/data/scite/context/documents/scite-context-readme.pdf
Binary files differ
diff --git a/context/data/scite/scite-context-readme.tex b/context/data/scite/context/documents/scite-context-readme.tex
index ef1475fa2..cbfc00a33 100644
--- a/context/data/scite/scite-context-readme.tex
+++ b/context/data/scite/context/documents/scite-context-readme.tex
@@ -191,60 +191,115 @@ You need to add this path to your local path definition. Installing \SCITE\ to
some known place has the advantage that you can move it around. There are no
special dependencies on the operating system.
+On \MSWINDOWS\ you can for instance install \SCITE\ in:
+
+\starttyping
+c:\data\system\scite
+\stoptyping
+
+and then end up with:
+
+\starttyping
+c:\data\system\scite\wscite
+\stoptyping
+
+and that is the path you need to add to your environment \type {PATH} variable.
+
+On \LINUX\ the files end up in:
+
+\starttyping
+/usr/bin
+/usr/share/scite
+\stoptyping
+
+Where the second path is the path we will put more files.
+
+\subject{Installing \type {scintillua}}
+
Next you need to install the lpeg lexers. \footnote {Versions later than 2.11
will not run on \MSWINDOWS\ 2K. In that case you need to comment the external
-lexer import.} These can be fetched from:
+lexer import.} The library is part of the \type {textadept} editor by Mitchell
+(\hyphenatedurl {mitchell.att.foicica.com}) which is also based on scintilla:
+The archive can be fetched from:
\starttyping
http://foicica.com/scintillua/
\stoptyping
-On \MSWINDOWS\ you need to copy the \type {lexers} subfolder to the \type
-{wscite} folder. For \LINUX\ the place depends on the distribution and I just
-copy them in the same path as where the regular properties files live. \footnote
-{If you update, don't do so without testing first. Sometimes there are changes in
-\SCITE\ that influence the lexers in which case you have to wait till we have
-update them to suit those changes.}
+On \MSWINDOWS\ you need to copy the files to the \type {wscite} folder (so we end
+up with a \type {lexers} subfolder there). For \LINUX\ the place depends on the
+distribution, for instance \type {/usr/share/scite}; this is the place where the
+regular properties files live. \footnote {If you update, don't do so without
+testing first. Sometimes there are changes in \SCITE\ that influence the lexers
+in which case you have to wait till we have update them to suit those changes.}
-For \UNIX, one can take a precompiled version as well. Here we might need to split
-the set of files into:
+So, you end up, on \MSWINDOWS\ with:
\starttyping
-/usr/bin
-/usr/share/scite
+c:\data\system\scite\wscite\lexers
+\stoptyping
+
+And on \LINUX:
+
+\starttyping
+/usr/share/scite/lexers
\stoptyping
-The second path is hard coded in the binary and moving all files there probably works
-okay. Beware: if you're on a 64 bit system, you need to rename the 64 bit \type {so}
-library.
+Beware: if you're on a 64 bit system, you need to rename the 64 bit \type {so}
+library into one without a number. Unfortunately the 64 bit library is now always
+available which can give surprises when the operating system gets updates. In such
+a case you should downgrade or use \type {wine} with the \MSWINDOWS\ binaries
+instead. After installation you need to restart \SCITE\ in order to see if things
+work out as expected.
-If you want to use \CONTEXT, you need to copy the relevant files from
+\subject{Installing the \CONTEXT\ lexers}
+
+When we started using this nice extension, we ran into issues and as a
+consequence shipped a patched \LUA\ code. We also needed some more control as we
+wanted to provide more features and complex nested lexers. Because the library
+\API\ changed a couple of times, we now have our own variant which will be
+cleaned up over time to be more consistent with our other \LUA\ code (so that we
+can also use it in \CONTEXT\ as variant verbatim lexer). We hope to be able to
+use the \type {scintillua} library as it does the job.
+
+Anyway, if you want to use \CONTEXT, you need to copy the relevant files from
\starttyping
<texroot>/tex/texmf-context/context/data/scite
\stoptyping
-to the path were \SCITE\ keeps its property files (\type {*.properties}). There
-is a file called \type {SciteGlobal.properties}. At the end of that file (on
-\MSWINDOWS\ it is in the path where the Scite binary) you then add a line to the
-end:
+to the path were \SCITE\ keeps its property files (\type {*.properties}). This is
+the path we already mentioned. There should be a file there called \type
+{SciteGlobal.properties}.
+
+So,in the end you get on \MSWINDOWS\ new files in:
\starttyping
-import scite-context-user
+c:\data\system\scite\wscite
+c:\data\system\scite\wscite\context
+c:\data\system\scite\wscite\context\lexer
+c:\data\system\scite\wscite\context\lexer\themes
+c:\data\system\scite\wscite\context\lexer\data
+c:\data\system\scite\wscite\context\documents
\stoptyping
-You need to restart \SCITE\ in order to see if things work out as expected.
-
-Disabling the external lexer in a recent \SCITE\ is somewhat tricky. In that case
-the end of that file looks like:
+while on \LINUX\ you get:
\starttyping
-imports.exclude=scite-context-external
-import *
-import scite-context-user
+/usr/bin/share/
+/usr/bin/share/context
+/usr/bin/share/context/lexer
+/usr/bin/share/context/lexer/themes
+/usr/bin/share/context/lexer/data
+/usr/bin/share/context/documents
\stoptyping
-In any case you need to make sure that the user file is loaded last.
+At the end of the \type {SciteGlobal.properties} you need to add the following
+line:
+
+\starttyping
+import context/scite-context-user
+\stoptyping
After this, things should run as expected (given that \TEX\ runs at the console
as well).
@@ -266,102 +321,15 @@ The configuration file defaults to the Dejavu fonts. These free fonts are part o
the \CONTEXT\ suite (also known as the standalone distribution). Of course you
can fetch them from \type {http://dejavu-fonts.org} as well. You have to copy
them to where your operating system expects them. In the suite they are available
-in
+in:
\starttyping
<contextroot>/tex/texmf/fonts/truetype/public/dejavu
\stoptyping
-\subject{An alternative approach}
-
-If for some reason you prefer not to mess with property files in the main \SCITE\
-path, you can follow a different route and selectively copy files to places.
-
-The following files are needed for the lpeg based lexer:
-
-\starttyping
-lexers/scite-context-lexer.lua
-lexers/scite-context-lexer-tex.lua
-lexers/scite-context-lexer-mps.lua
-lexers/scite-context-lexer-lua.lua
-lexers/scite-context-lexer-cld.lua
-lexers/scite-context-lexer-txt.lua
-lexers/scite-context-lexer-xml*.lua
-lexers/scite-context-lexer-pdf*.lua
-
-lexers/context/data/scite-context-data-tex.lua
-lexers/context/data/scite-context-data-context.lua
-lexers/context/data/scite-context-data-interfaces.lua
-lexers/context/data/scite-context-data-metapost.lua
-lexers/context/data/scite-context-data-metafun.lua
-
-lexers/themes/scite-context-theme.lua
-\stoptyping
-
-The data files are needed because we cannot access property files from within the
-lexer. If we could open a file we could use the property files instead.
-
-These files go to the \type {lexers} subpath in your \SCITE\ installation.
-Normally this sits in the binary path. The following files provide some
-extensions. On \MSWINDOWS\ you can copy these files to the path where the \SCITE\
-binary lives.
-
-\starttyping
-scite-ctx.lua
-\stoptyping
-
-Because property files can only be loaded from the same path where the (user)
-file loads them you need to copy the following files to the same path where the
-loading is defined:
-
-\starttyping
-scite-context.properties
-scite-context-internal.properties
-scite-context-external.properties
-
-scite-pragma.properties
-
-scite-tex.properties
-scite-metapost.properties
-
-scite-context-data-tex.properties
-scite-context-data-context.properties
-scite-context-data-interfaces.properties
-scite-context-data-metapost.properties
-scite-context-data-metafun.properties
-
-scite-ctx.properties
-scite-ctx-context.properties
-scite-ctx-example.properties
-\stoptyping
-
-On \MSWINDOWS\ these go to:
-
-\starttyping
-c:/Users/YourName
-\stoptyping
-
-Next you need to add this to:
-
-\starttyping
-import scite-context
-import scite-context-internal
-import scite-context-external
-import scite-pragma
-\stoptyping
-
-to the file:
-
-\starttyping
-SciTEUser.properties
-\stoptyping
-
-Of course the pragma import is optional. You can comment either the internal or
-external variant but there is no reason not to keep them both.
-
\subject{Extensions}
-Just a quick not to some extensions. If you select a part of the text (normally
+Just a quick note to some extensions. If you select a part of the text (normally
you do this with the shift key pressed) and you hit \type {Shift-F11}, you get a
menu with some options. More (robust) ones will be provided at some point.
@@ -388,6 +356,27 @@ disable it). Wrong words are colored red, and words that might have a case
problem are colored orange. Recognized words are greyed and words with less than
three characters are ignored.
+A spell checking file has to be put in the \type {lexers/data} directory and
+looks as follows (e.g. \type {spell-uk.lua}):
+
+\starttyping
+return {
+ ["max"]=40,
+ ["min"]=3,
+ ["n"]=151493,
+ ["words"]={
+ ["aardvark"]="aardvark",
+ ["aardvarks"]="aardvarks",
+ ["aardwolf"]="aardwolf",
+ ["aardwolves"]="aardwolves",
+ ...
+ }
+}
+\stoptyping
+
+The keys are words that get checked for the given value (which can have uppercase
+characters). The word files are not distributed (but they might be at some point).
+
In the case of internal lexers, the following file is needed:
\starttyping
@@ -451,8 +440,8 @@ releases.
\subject{The external lexers}
-These are the more advanced. They provide more detail and the \CONTEXT\ lexer
-also supports nested \METAPOST\ and \LUA. Currently there is no detailed
+These are the more advanced lexers. They provide more detail and the \CONTEXT\
+lexer also supports nested \METAPOST\ and \LUA. Currently there is no detailed
configuration but this might change once they are stable.
The external lexers operate on documents while the internal ones operate on
@@ -463,13 +452,6 @@ garbage collecting many small tables comes at a price. Of course in practice thi
probably gets unnoticed. \footnote {I wrote the code in 2011 on a more than 5
years old Dell M90 laptop, so I suppose that speed is less an issue now.}
-In principle the external lexers can be used with \type {textadept} which also
-uses \type {scintilla}. Actually, support for lpeg lexing originates in \type
-{textadept}. Currently \type {textadept} lacks a couple of features I like about
-\SCITE\ (for instance it has no realtime logpane) and it's also still changing.
-At some point the \CONTEXT\ distribution might ship with files for \type
-{textadept} as well.
-
The external lpeg lexers work okay with the \MSWINDOWS\ and \LINUX\ versions of
\SCITE, but unfortunately at the time of writing this, the \LUA\ library that is
needed is not available for the \MACOSX\ version of \SCITE. Also, due to the fact
@@ -480,7 +462,7 @@ In addition to \CONTEXT\ and \METAFUN\ lexing a \LUA\ lexer is also provided so
that we can handle \CONTEXT\ \LUA\ Document (\CLD) files too. There is also an
\XML\ lexer. This one also provides spell checking. The \PDF\ lexer tries to do a
good job on \PDF\ files, but it has some limitations. There is also a simple text
-file lexer that does spell checking.
+file lexer that does spell checking. Finally there is a lexer for \CWEB\ files.
Don't worry if you see an orange rectangle in your \TEX\ or \XML\ document. This
indicates that there is a special space character there, for instance \type
@@ -821,12 +803,18 @@ from the on|-|line help pages.
\NC \type{Ctrl+Right} \NC next word; \type{Shift} extends selection \NC \NR
\NC \type{Ctrl+/} \NC previous word part; \type{Shift} extends selection \NC \NR
\NC \type{Ctrl+\ } \NC next word part; \type{Shift} extends selection \NC \NR
+\ML
+\NC \type{F12 / Ctrl+F7} \NC check (or process) \NC \NR
+\NC \type{Ctrl+F12 / Ctrl+F7} \NC process (run) \NC \NR
+\NC \type{Alt+F12 / Ctrl+F7} \NC process (run) using the luajit vm (if applicable) \NC \NR
\LL
\stoptabulate
\stopbuffer
\getbuffer[keybindings]
+\page
+
\subject{Affiliation}
\starttabulate[|l|l|]
diff --git a/context/data/scite/scite-context-visual.pdf b/context/data/scite/context/documents/scite-context-visual.pdf
index 69d82eda6..69d82eda6 100644
--- a/context/data/scite/scite-context-visual.pdf
+++ b/context/data/scite/context/documents/scite-context-visual.pdf
diff --git a/context/data/scite/scite-context-visual.png b/context/data/scite/context/documents/scite-context-visual.png
index 7368a68f1..7368a68f1 100644
--- a/context/data/scite/scite-context-visual.png
+++ b/context/data/scite/context/documents/scite-context-visual.png
Binary files differ
diff --git a/context/data/scite/context/lexers/data/scite-context-data-context.lua b/context/data/scite/context/lexers/data/scite-context-data-context.lua
new file mode 100644
index 000000000..0fe56100b
--- /dev/null
+++ b/context/data/scite/context/lexers/data/scite-context-data-context.lua
@@ -0,0 +1,4 @@
+return {
+ ["constants"]={ "zerocount", "minusone", "minustwo", "plusone", "plustwo", "plusthree", "plusfour", "plusfive", "plussix", "plusseven", "pluseight", "plusnine", "plusten", "plussixteen", "plushundred", "plusthousand", "plustenthousand", "plustwentythousand", "medcard", "maxcard", "maxcardminusone", "zeropoint", "onepoint", "halfapoint", "onebasepoint", "maxdimen", "scaledpoint", "thousandpoint", "points", "halfpoint", "zeroskip", "zeromuskip", "onemuskip", "pluscxxvii", "pluscxxviii", "pluscclv", "pluscclvi", "normalpagebox", "endoflinetoken", "outputnewlinechar", "emptytoks", "empty", "undefined", "voidbox", "emptybox", "emptyvbox", "emptyhbox", "bigskipamount", "medskipamount", "smallskipamount", "fmtname", "fmtversion", "texengine", "texenginename", "texengineversion", "luatexengine", "pdftexengine", "xetexengine", "unknownengine", "activecatcode", "bgroup", "egroup", "endline", "conditionaltrue", "conditionalfalse", "attributeunsetvalue", "uprotationangle", "rightrotationangle", "downrotationangle", "leftrotationangle", "inicatcodes", "ctxcatcodes", "texcatcodes", "notcatcodes", "txtcatcodes", "vrbcatcodes", "prtcatcodes", "nilcatcodes", "luacatcodes", "tpacatcodes", "tpbcatcodes", "xmlcatcodes", "ctdcatcodes", "escapecatcode", "begingroupcatcode", "endgroupcatcode", "mathshiftcatcode", "alignmentcatcode", "endoflinecatcode", "parametercatcode", "superscriptcatcode", "subscriptcatcode", "ignorecatcode", "spacecatcode", "lettercatcode", "othercatcode", "activecatcode", "commentcatcode", "invalidcatcode", "tabasciicode", "newlineasciicode", "formfeedasciicode", "endoflineasciicode", "endoffileasciicode", "spaceasciicode", "hashasciicode", "dollarasciicode", "commentasciicode", "ampersandasciicode", "colonasciicode", "backslashasciicode", "circumflexasciicode", "underscoreasciicode", "leftbraceasciicode", "barasciicode", "rightbraceasciicode", "tildeasciicode", "delasciicode", "lessthanasciicode", "morethanasciicode", "doublecommentsignal", "atsignasciicode", "exclamationmarkasciicode", "questionmarkasciicode", "doublequoteasciicode", "singlequoteasciicode", "forwardslashasciicode", "primeasciicode", "hyphenasciicode", "activemathcharcode", "activetabtoken", "activeformfeedtoken", "activeendoflinetoken", "batchmodecode", "nonstopmodecode", "scrollmodecode", "errorstopmodecode", "bottomlevelgroupcode", "simplegroupcode", "hboxgroupcode", "adjustedhboxgroupcode", "vboxgroupcode", "vtopgroupcode", "aligngroupcode", "noaligngroupcode", "outputgroupcode", "mathgroupcode", "discretionarygroupcode", "insertgroupcode", "vcentergroupcode", "mathchoicegroupcode", "semisimplegroupcode", "mathshiftgroupcode", "mathleftgroupcode", "vadjustgroupcode", "charnodecode", "hlistnodecode", "vlistnodecode", "rulenodecode", "insertnodecode", "marknodecode", "adjustnodecode", "ligaturenodecode", "discretionarynodecode", "whatsitnodecode", "mathnodecode", "gluenodecode", "kernnodecode", "penaltynodecode", "unsetnodecode", "mathsnodecode", "charifcode", "catifcode", "numifcode", "dimifcode", "oddifcode", "vmodeifcode", "hmodeifcode", "mmodeifcode", "innerifcode", "voidifcode", "hboxifcode", "vboxifcode", "xifcode", "eofifcode", "trueifcode", "falseifcode", "caseifcode", "definedifcode", "csnameifcode", "fontcharifcode", "fontslantperpoint", "fontinterwordspace", "fontinterwordstretch", "fontinterwordshrink", "fontexheight", "fontemwidth", "fontextraspace", "slantperpoint", "interwordspace", "interwordstretch", "interwordshrink", "exheight", "emwidth", "extraspace", "mathsupdisplay", "mathsupnormal", "mathsupcramped", "mathsubnormal", "mathsubcombined", "mathaxisheight", "muquad", "startmode", "stopmode", "startnotmode", "stopnotmode", "startmodeset", "stopmodeset", "doifmode", "doifelsemode", "doifmodeelse", "doifnotmode", "startmodeset", "stopmodeset", "startallmodes", "stopallmodes", "startnotallmodes", "stopnotallmodes", "doifallmodes", "doifelseallmodes", "doifallmodeselse", "doifnotallmodes", "startenvironment", "stopenvironment", "environment", "startcomponent", "stopcomponent", "component", "startproduct", "stopproduct", "product", "startproject", "stopproject", "project", "starttext", "stoptext", "startnotext", "stopnotext", "startdocument", "stopdocument", "documentvariable", "setupdocument", "startmodule", "stopmodule", "usemodule", "usetexmodule", "useluamodule", "setupmodule", "currentmoduleparameter", "moduleparameter", "everystarttext", "everystoptext", "startTEXpage", "stopTEXpage", "enablemode", "disablemode", "preventmode", "definemode", "globalenablemode", "globaldisablemode", "globalpreventmode", "pushmode", "popmode", "typescriptone", "typescripttwo", "typescriptthree", "mathsizesuffix", "mathordcode", "mathopcode", "mathbincode", "mathrelcode", "mathopencode", "mathclosecode", "mathpunctcode", "mathalphacode", "mathinnercode", "mathnothingcode", "mathlimopcode", "mathnolopcode", "mathboxcode", "mathchoicecode", "mathaccentcode", "mathradicalcode", "constantnumber", "constantnumberargument", "constantdimen", "constantdimenargument", "constantemptyargument", "continueifinputfile", "luastringsep", "!!bs", "!!es", "lefttorightmark", "righttoleftmark", "breakablethinspace", "nobreakspace", "nonbreakablespace", "narrownobreakspace", "zerowidthnobreakspace", "ideographicspace", "ideographichalffillspace", "twoperemspace", "threeperemspace", "fourperemspace", "fiveperemspace", "sixperemspace", "figurespace", "punctuationspace", "hairspace", "zerowidthspace", "zerowidthnonjoiner", "zerowidthjoiner", "zwnj", "zwj", "optionalspace", "asciispacechar" },
+ ["helpers"]={ "startsetups", "stopsetups", "startxmlsetups", "stopxmlsetups", "startluasetups", "stopluasetups", "starttexsetups", "stoptexsetups", "startrawsetups", "stoprawsetups", "startlocalsetups", "stoplocalsetups", "starttexdefinition", "stoptexdefinition", "starttexcode", "stoptexcode", "startcontextcode", "stopcontextcode", "startcontextdefinitioncode", "stopcontextdefinitioncode", "texdefinition", "doifelsesetups", "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup", "fastsetup", "doifelsecommandhandler", "doifcommandhandlerelse", "doifnotcommandhandler", "doifcommandhandler", "newmode", "setmode", "resetmode", "newsystemmode", "setsystemmode", "resetsystemmode", "pushsystemmode", "popsystemmode", "booleanmodevalue", "newcount", "newdimen", "newskip", "newmuskip", "newbox", "newtoks", "newread", "newwrite", "newmarks", "newinsert", "newattribute", "newif", "newlanguage", "newfamily", "newfam", "newhelp", "then", "begcsname", "strippedcsname", "checkedstrippedcsname", "firstargumentfalse", "firstargumenttrue", "secondargumentfalse", "secondargumenttrue", "thirdargumentfalse", "thirdargumenttrue", "fourthargumentfalse", "fourthargumenttrue", "fifthargumentfalse", "fifthsargumenttrue", "sixthargumentfalse", "sixtsargumenttrue", "doglobal", "dodoglobal", "redoglobal", "resetglobal", "donothing", "dontcomplain", "forgetall", "donetrue", "donefalse", "inlineordisplaymath", "indisplaymath", "forcedisplaymath", "startforceddisplaymath", "stopforceddisplaymath", "reqno", "htdp", "unvoidbox", "hfilll", "vfilll", "mathbox", "mathlimop", "mathnolop", "mathnothing", "mathalpha", "currentcatcodetable", "defaultcatcodetable", "catcodetablename", "newcatcodetable", "startcatcodetable", "stopcatcodetable", "startextendcatcodetable", "stopextendcatcodetable", "pushcatcodetable", "popcatcodetable", "restorecatcodes", "setcatcodetable", "letcatcodecommand", "defcatcodecommand", "uedcatcodecommand", "hglue", "vglue", "hfillneg", "vfillneg", "hfilllneg", "vfilllneg", "ruledhss", "ruledhfil", "ruledhfill", "ruledhfilneg", "ruledhfillneg", "normalhfillneg", "ruledvss", "ruledvfil", "ruledvfill", "ruledvfilneg", "ruledvfillneg", "normalvfillneg", "ruledhbox", "ruledvbox", "ruledvtop", "ruledvcenter", "ruledmbox", "ruledhskip", "ruledvskip", "ruledkern", "ruledmskip", "ruledmkern", "ruledhglue", "ruledvglue", "normalhglue", "normalvglue", "ruledpenalty", "filledhboxb", "filledhboxr", "filledhboxg", "filledhboxc", "filledhboxm", "filledhboxy", "filledhboxk", "scratchcounter", "globalscratchcounter", "scratchdimen", "globalscratchdimen", "scratchskip", "globalscratchskip", "scratchmuskip", "globalscratchmuskip", "scratchtoks", "globalscratchtoks", "scratchbox", "globalscratchbox", "normalbaselineskip", "normallineskip", "normallineskiplimit", "availablehsize", "localhsize", "setlocalhsize", "distributedhsize", "hsizefraction", "nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs", "scratchwidth", "scratchheight", "scratchdepth", "scratchoffset", "scratchdistance", "scratchhsize", "scratchvsize", "scratchxoffset", "scratchyoffset", "scratchhoffset", "scratchvoffset", "scratchxposition", "scratchyposition", "scratchtopoffset", "scratchbottomoffset", "scratchleftoffset", "scratchrightoffset", "scratchcounterone", "scratchcountertwo", "scratchcounterthree", "scratchdimenone", "scratchdimentwo", "scratchdimenthree", "scratchskipone", "scratchskiptwo", "scratchskipthree", "scratchmuskipone", "scratchmuskiptwo", "scratchmuskipthree", "scratchtoksone", "scratchtokstwo", "scratchtoksthree", "scratchboxone", "scratchboxtwo", "scratchboxthree", "scratchnx", "scratchny", "scratchmx", "scratchmy", "scratchunicode", "scratchleftskip", "scratchrightskip", "scratchtopskip", "scratchbottomskip", "doif", "doifnot", "doifelse", "doifinset", "doifnotinset", "doifelseinset", "doifinsetelse", "doifelsenextchar", "doifnextcharelse", "doifelsenextoptional", "doifnextoptionalelse", "doifelsenextoptionalcs", "doifnextoptionalcselse", "doifelsefastoptionalcheck", "doiffastoptionalcheckelse", "doifelsenextbgroup", "doifnextbgroupelse", "doifelsenextbgroupcs", "doifnextbgroupcselse", "doifelsenextparenthesis", "doifnextparenthesiselse", "doifelseundefined", "doifundefinedelse", "doifelsedefined", "doifdefinedelse", "doifundefined", "doifdefined", "doifelsevalue", "doifvalue", "doifnotvalue", "doifnothing", "doifsomething", "doifelsenothing", "doifnothingelse", "doifelsesomething", "doifsomethingelse", "doifvaluenothing", "doifvaluesomething", "doifelsevaluenothing", "doifvaluenothingelse", "doifelsedimension", "doifdimensionelse", "doifelsenumber", "doifnumberelse", "doifnumber", "doifnotnumber", "doifelsecommon", "doifcommonelse", "doifcommon", "doifnotcommon", "doifinstring", "doifnotinstring", "doifelseinstring", "doifinstringelse", "doifelseassignment", "doifassignmentelse", "docheckassignment", "tracingall", "tracingnone", "loggingall", "removetoks", "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to", "endgraf", "endpar", "everyendpar", "reseteverypar", "finishpar", "empty", "null", "space", "quad", "enspace", "nbsp", "obeyspaces", "obeylines", "obeyedspace", "obeyedline", "obeyedtab", "obeyedpage", "normalspace", "executeifdefined", "singleexpandafter", "doubleexpandafter", "tripleexpandafter", "dontleavehmode", "removelastspace", "removeunwantedspaces", "keepunwantedspaces", "wait", "writestatus", "define", "defineexpandable", "redefine", "setmeasure", "setemeasure", "setgmeasure", "setxmeasure", "definemeasure", "freezemeasure", "measure", "measured", "installcorenamespace", "getvalue", "getuvalue", "setvalue", "setevalue", "setgvalue", "setxvalue", "letvalue", "letgvalue", "resetvalue", "undefinevalue", "ignorevalue", "setuvalue", "setuevalue", "setugvalue", "setuxvalue", "globallet", "glet", "udef", "ugdef", "uedef", "uxdef", "checked", "unique", "getparameters", "geteparameters", "getgparameters", "getxparameters", "forgetparameters", "copyparameters", "getdummyparameters", "dummyparameter", "directdummyparameter", "setdummyparameter", "letdummyparameter", "usedummystyleandcolor", "usedummystyleparameter", "usedummycolorparameter", "processcommalist", "processcommacommand", "quitcommalist", "quitprevcommalist", "processaction", "processallactions", "processfirstactioninset", "processallactionsinset", "unexpanded", "expanded", "startexpanded", "stopexpanded", "protected", "protect", "unprotect", "firstofoneargument", "firstoftwoarguments", "secondoftwoarguments", "firstofthreearguments", "secondofthreearguments", "thirdofthreearguments", "firstoffourarguments", "secondoffourarguments", "thirdoffourarguments", "fourthoffourarguments", "firstoffivearguments", "secondoffivearguments", "thirdoffivearguments", "fourthoffivearguments", "fifthoffivearguments", "firstofsixarguments", "secondofsixarguments", "thirdofsixarguments", "fourthofsixarguments", "fifthofsixarguments", "sixthofsixarguments", "firstofoneunexpanded", "firstoftwounexpanded", "secondoftwounexpanded", "firstofthreeunexpanded", "secondofthreeunexpanded", "thirdofthreeunexpanded", "gobbleoneargument", "gobbletwoarguments", "gobblethreearguments", "gobblefourarguments", "gobblefivearguments", "gobblesixarguments", "gobblesevenarguments", "gobbleeightarguments", "gobbleninearguments", "gobbletenarguments", "gobbleoneoptional", "gobbletwooptionals", "gobblethreeoptionals", "gobblefouroptionals", "gobblefiveoptionals", "dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "dowith", "newconstant", "setnewconstant", "setconstant", "setconstantvalue", "newconditional", "settrue", "setfalse", "settruevalue", "setfalsevalue", "newmacro", "setnewmacro", "newfraction", "newsignal", "dosingleempty", "dodoubleempty", "dotripleempty", "doquadrupleempty", "doquintupleempty", "dosixtupleempty", "doseventupleempty", "dosingleargument", "dodoubleargument", "dotripleargument", "doquadrupleargument", "doquintupleargument", "dosixtupleargument", "doseventupleargument", "dosinglegroupempty", "dodoublegroupempty", "dotriplegroupempty", "doquadruplegroupempty", "doquintuplegroupempty", "permitspacesbetweengroups", "dontpermitspacesbetweengroups", "nopdfcompression", "maximumpdfcompression", "normalpdfcompression", "modulonumber", "dividenumber", "getfirstcharacter", "doifelsefirstchar", "doiffirstcharelse", "startnointerference", "stopnointerference", "twodigits", "threedigits", "leftorright", "offinterlineskip", "oninterlineskip", "nointerlineskip", "strut", "halfstrut", "quarterstrut", "depthstrut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "begstrut", "endstrut", "lineheight", "ordordspacing", "ordopspacing", "ordbinspacing", "ordrelspacing", "ordopenspacing", "ordclosespacing", "ordpunctspacing", "ordinnerspacing", "opordspacing", "opopspacing", "opbinspacing", "oprelspacing", "opopenspacing", "opclosespacing", "oppunctspacing", "opinnerspacing", "binordspacing", "binopspacing", "binbinspacing", "binrelspacing", "binopenspacing", "binclosespacing", "binpunctspacing", "bininnerspacing", "relordspacing", "relopspacing", "relbinspacing", "relrelspacing", "relopenspacing", "relclosespacing", "relpunctspacing", "relinnerspacing", "openordspacing", "openopspacing", "openbinspacing", "openrelspacing", "openopenspacing", "openclosespacing", "openpunctspacing", "openinnerspacing", "closeordspacing", "closeopspacing", "closebinspacing", "closerelspacing", "closeopenspacing", "closeclosespacing", "closepunctspacing", "closeinnerspacing", "punctordspacing", "punctopspacing", "punctbinspacing", "punctrelspacing", "punctopenspacing", "punctclosespacing", "punctpunctspacing", "punctinnerspacing", "innerordspacing", "inneropspacing", "innerbinspacing", "innerrelspacing", "inneropenspacing", "innerclosespacing", "innerpunctspacing", "innerinnerspacing", "normalreqno", "startimath", "stopimath", "normalstartimath", "normalstopimath", "startdmath", "stopdmath", "normalstartdmath", "normalstopdmath", "uncramped", "cramped", "triggermathstyle", "mathstylefont", "mathsmallstylefont", "mathstyleface", "mathsmallstyleface", "mathstylecommand", "mathpalette", "mathstylehbox", "mathstylevbox", "mathstylevcenter", "mathstylevcenteredhbox", "mathstylevcenteredvbox", "mathtext", "setmathsmalltextbox", "setmathtextbox", "pushmathstyle", "popmathstyle", "triggerdisplaystyle", "triggertextstyle", "triggerscriptstyle", "triggerscriptscriptstyle", "triggeruncrampedstyle", "triggercrampedstyle", "triggersmallstyle", "triggeruncrampedsmallstyle", "triggercrampedsmallstyle", "triggerbigstyle", "triggeruncrampedbigstyle", "triggercrampedbigstyle", "luaexpr", "expelsedoif", "expdoif", "expdoifnot", "expdoifelsecommon", "expdoifcommonelse", "expdoifelseinset", "expdoifinsetelse", "ctxdirectlua", "ctxlatelua", "ctxsprint", "ctxwrite", "ctxcommand", "ctxdirectcommand", "ctxlatecommand", "ctxreport", "ctxlua", "luacode", "lateluacode", "directluacode", "registerctxluafile", "ctxloadluafile", "luaversion", "luamajorversion", "luaminorversion", "ctxluacode", "luaconditional", "luaexpanded", "startluaparameterset", "stopluaparameterset", "luaparameterset", "definenamedlua", "obeylualines", "obeyluatokens", "startluacode", "stopluacode", "startlua", "stoplua", "startctxfunction", "stopctxfunction", "ctxfunction", "startctxfunctiondefinition", "stopctxfunctiondefinition", "installctxfunction", "carryoverpar", "assumelongusagecs", "Umathbotaccent", "righttolefthbox", "lefttorighthbox", "righttoleftvbox", "lefttorightvbox", "righttoleftvtop", "lefttorightvtop", "rtlhbox", "ltrhbox", "rtlvbox", "ltrvbox", "rtlvtop", "ltrvtop", "autodirhbox", "autodirvbox", "autodirvtop", "lefttoright", "righttoleft", "synchronizelayoutdirection", "synchronizedisplaydirection", "synchronizeinlinedirection", "lesshyphens", "morehyphens", "nohyphens", "dohyphens", "Ucheckedstartdisplaymath", "Ucheckedstopdisplaymath", "nobreak", "allowbreak", "goodbreak" },
+} \ No newline at end of file
diff --git a/context/data/scite/context/lexers/data/scite-context-data-interfaces.lua b/context/data/scite/context/lexers/data/scite-context-data-interfaces.lua
new file mode 100644
index 000000000..ce4c6c01c
--- /dev/null
+++ b/context/data/scite/context/lexers/data/scite-context-data-interfaces.lua
@@ -0,0 +1,10 @@
+return {
+ ["cs"]={ "CAP", "Cap", "Caps", "Character", "Characters", "Cisla", "KAP", "Kap", "Kaps", "MESIC", "MONTH", "Rimskecislice", "Romannumerals", "SLOVA", "SLOVO", "Slova", "Slovo", "VSEDNIDEN", "WEEKDAY", "WORD", "WORDS", "Word", "Words", "Znak", "Znaky", "about", "adaptlayout", "aktualnicislonadpisu", "aktualnidatum", "appendix", "arg", "at", "atleftmargin", "atpage", "atrightmargin", "background", "barevnalista", "barva", "bilemisto", "blackrule", "blackrules", "blank", "bookmark", "bottomspace", "bublinkovanapoveda", "but", "button", "bydliste", "bypassblocks", "cap", "celkovypocetstran", "cernalinka", "cernelinky", "chapter", "character", "characters", "chem", "cisla", "cislonadpisu", "cislopodrovnice", "cislorovnice", "cislostrany", "citace", "citovat", "clip", "clonefield", "color", "column", "comment", "comparecolorgroup", "comparepalet", "completecombinedlist", "completelistoffloats", "completelistofsorts", "completelistofsynonyms", "completepagenumber", "completeregister", "convertnumber", "copyfield", "correctwhitespace", "coupledocument", "coupledregister", "couplemarking", "couplepage", "couplepaper", "coupleregister", "crlf", "currentdate", "currentheadnumber", "cutspace", "date", "datum", "decouplemarking", "decrementnumber", "define", "defineblank", "defineblock", "definebodyfont", "definebodyfontDEF", "definebodyfontREF", "definebodyfontenvironment", "definebuffer", "definecolor", "definecolorgroup", "definecolumnbreak", "definecolumnset", "definecombination", "definecombinedlist", "defineconversion", "definedescription", "definedfont", "defineenumeration", "definefield", "definefieldstack", "definefiguresymbol", "definefloat", "definefont", "definefontfeature", "definefonthandling", "definefontsynonym", "defineframed", "defineframedtext", "definehead", "defineindentedtext", "defineinmargin", "defineinteractionmenu", "defineitemgroup", "definelabel", "definelayer", "definelayout", "definelist", "definelogo", "definemakeup", "definemarking", "definemathalignment", "defineoutput", "defineoverlay", "definepagebreak", "definepalet", "definepapersize", "defineparagraphs", "defineplacement", "defineprofile", "defineprogram", "definerawfont", "definereference", "definereferenceformat", "definereferencelist", "defineregister", "definerule", "definesection", "definesectionblock", "definesorting", "definestartstop", "definesubfield", "definesymbol", "definesynonyms", "definetabletemplate", "definetabulate", "definetext", "definetextposition", "definetextvariable", "definetype", "definetypeface", "definetyping", "defineversion", "definuj", "definujakcent", "definujbarvu", "definujblok", "definujbloksekce", "definujbuffer", "definujfont", "definujformatodkazu", "definujhbox", "definujhlavnipole", "definujinterakcnimenu", "definujkombinovanyseznam", "definujkonverzi", "definujlogo", "definujnadpis", "definujobrazeksymbol", "definujodkaz", "definujodstavce", "definujopis", "definujoramovani", "definujoramovanytext", "definujpaletu", "definujplvouciobjekt", "definujpodpole", "definujpole", "definujpopis", "definujpopisek", "definujprekryv", "definujpreskok", "definujprikaz", "definujprofil", "definujprogram", "definujprostredizakladnihofontu", "definujrejstrik", "definujsablonutabulky", "definujsekci", "definujseznam", "definujseznamodkazu", "definujskupinubarev", "definujstartstop", "definujstyl", "definujstylfontu", "definujsymbol", "definujsynonumumfontu", "definujsynonyma", "definujtabelaci", "definujtext", "definujtrideni", "definujupravu", "definujvelikostpapiru", "definujverzi", "definujvycet", "definujvystup", "definujzakladnifont", "definujzasobnikpoli", "definujznaceni", "definujznak", "delkaseznamu", "description", "determineheadnumber", "determinelistcharacteristics", "disableinteractionmenu", "dodrzujprofil", "dodrzujverzi", "dodrzujverziprofilu", "dvoustrannypapir", "emptylines", "enumeration", "externalfigure", "externiobraz", "fakt", "field", "fieldstack", "fillinfield", "fillinline", "fillinrules", "fillintext", "fitfield", "fixedspaces", "followprofile", "followprofileversion", "followversion", "footnote", "footnotetext", "forceblocks", "fraction", "framed", "framedtext", "from", "getbuffer", "getmarking", "getnumber", "godown", "goto", "gotobox", "graycolor", "grid", "hairline", "head", "headnumber", "headsym", "headtext", "hideblocks", "high", "hl", "hlavnijazyk", "hlavniuroven", "hodnotabarvy", "hodnotasedi", "immediatebetweenlist", "immediatetolist", "in", "indentation", "indenting", "inframed", "ininner", "inleft", "inline", "inmargin", "inneredgedistance", "inneredgewidth", "innermargindistance", "innermarginwidth", "inothermargin", "inouter", "inright", "instalacejazyka", "installlanguage", "interactionbar", "interactionbuttons", "interakcnilista", "interakcnitlacitka", "interaktivnimenu", "item", "items", "its", "jazyk", "jdidolu", "jdina", "jdinabox", "jdinastranu", "jmeno", "kap", "keepblocks", "klonujpole", "komponenta", "konvertujcislo", "kopirujpole", "korekcebilehomista", "labeling", "labels", "labeltext", "language", "leftaligned", "leg", "listsymbol", "loadsorts", "loadsynonyms", "logfields", "lohi", "low", "mainlanguage", "maoramovani", "mapfontsize", "mar", "marginalnilinka", "marginalninadpis", "marginalnislovo", "marginalnitext", "marginrule", "margintext", "marking", "markversion", "matematika", "mathematics", "mazaramovani", "mediaeval", "meritko", "mesic", "mezera", "midaligned", "mirror", "month", "moveformula", "moveongrid", "movesidefloat", "mrizka", "nadpis", "nadruhyokraj", "nalevo", "nalevyokraj", "name", "naokraj", "napravo", "napravyokraj", "nastavbarvu", "nastavbarvy", "nastavbilamista", "nastavblok", "nastavbloksekce", "nastavbuffer", "nastavcernelinky", "nastavcislonadpisu", "nastavcislostrany", "nastavcislovani", "nastavcislovaniodstavcu", "nastavcislovaniradku", "nastavcislovanistran", "nastavcitaci", "nastavdefinicipoznamekpodcarou", "nastavdeleniplvoucichobjektu", "nastavdelitko", "nastavdolnitexty", "nastavexterniobrazy", "nastavhorejsek", "nastavhornitexty", "nastavinterakci", "nastavinterakcnilistu", "nastavinterakcnimenu", "nastavinterakcniobrazovku", "nastavjazyk", "nastavkapitalky", "nastavkombinovanyseznam", "nastavkomentar", "nastavkomentarstrany", "nastavlegendu", "nastavmarginalie", "nastavmarginalniblok", "nastavmarginalnilinky", "nastavmeziradkovoumezeru", "nastavnadpis", "nastavnadpisy", "nastavodkazovani", "nastavodsazovani", "nastavodstavce", "nastavopis", "nastavoramovanetexty", "nastavoramovani", "nastavorez", "nastavotoceni", "nastavpaletu", "nastavplvouciobjekt", "nastavplvouciobjekty", "nastavpodcislostrany", "nastavpodtrzeni", "nastavpole", "nastavpolozky", "nastavpopisek", "nastavpopisky", "nastavpopisy", "nastavpozadi", "nastavpoznamkypodcarou", "nastavprechodstrany", "nastavpreskok", "nastavprofily", "nastavprogramy", "nastavprostredizakladnihofontu", "nastavpublikace", "nastavradkovani", "nastavradky", "nastavrastr", "nastavrejstrik", "nastavrovnice", "nastavsadusymbolu", "nastavsekci", "nastavseznam", "nastavseznamodkazu", "nastavsirkucary", "nastavsloupce", "nastavspodek", "nastavspojeni", "nastavsynchronizaci", "nastavsynchronizacnilistu", "nastavsynonyma", "nastavsystem", "nastavtab", "nastavtabelaci", "nastavtabulky", "nastavtenkelinky", "nastavtext", "nastavtexthlavicky", "nastavtextovelinky", "nastavtextpopisku", "nastavtexttexty", "nastavtextyupati", "nastavtextyzahlavi", "nastavtlacitka", "nastavtoleranci", "nastavtrideni", "nastavtype", "nastavumisteniprotejsku", "nastavumistovani", "nastavupati", "nastavupravu", "nastavurl", "nastavusporadani", "nastavvelikostpapiru", "nastavverze", "nastavvsechnapole", "nastavvycty", "nastavvyplnovelinky", "nastavvyplnoveradky", "nastavvystup", "nastavvzhled", "nastavzahlavi", "nastavzakladnifont", "nastavzarovnani", "nastavznaceni", "nastavzuzeni", "nastrane", "navigating", "nejakyradek", "nekde", "nextsection", "neznamo", "nivy", "nizky", "nocap", "noheaderandfooterlines", "noindenting", "nokap", "nolist", "nomarking", "nomoreblocks", "nomorefiles", "nop", "nospace", "note", "notopandbottomlines", "nowhitespace", "numberofsubpages", "numbers", "obrazovka", "odkaz", "odkaznadatum", "odkaznastranu", "odkaznatext", "odkazujici", "odsazenishora", "odsazenizleva", "odsazovani", "okr", "opakovat", "opis", "opissoubor", "oramovani", "oref", "orez", "otocit", "outeredgedistance", "outeredgewidth", "outermargindistance", "outermarginwidth", "overbar", "overbars", "overstrike", "overstrikes", "oznaceni", "oznacverzi", "packed", "page", "pagedepth", "pageoffset", "pagereference", "paragraph", "parovastrana", "part", "periods", "pis", "placebookmarks", "placecombinedlist", "placefloat", "placefootnotes", "placeformula", "placeheadnumber", "placeheadtext", "placelegend", "placelist", "placelistoffloats", "placelistofsorts", "placelistofsynonyms", "placelocalfootnotes", "placelogos", "placeongrid", "placeontopofeachother", "placepagenumber", "placerawlist", "placereferencelist", "placeregister", "placerule", "placesidebyside", "placesubformula", "placetextvariable", "plnezneni", "pol", "pole", "polozka", "polozky", "popisky", "poppisek", "porovnejpaletu", "porovnejskupinubarev", "position", "positiontext", "pozadi", "pozice", "poznamka", "poznamkapodcarou", "pref", "prelozit", "premistinamrizku", "prepninazakladnifont", "preskoc", "prizpusobivepole", "prizpusobvzhled", "processblocks", "processpage", "produkt", "program", "projekt", "propojeneznaceni", "propojenydokument", "propojenyrejstrik", "prostredi", "publication", "publikace", "quotation", "quote", "ran", "ref", "reference", "referral", "referraldate", "register", "reservefloat", "reset", "resetmarking", "resetnumber", "resettextcontent", "resetznaceni", "rightaligned", "rimskecislice", "romannumerals", "rotate", "rozdelplvouciobjekt", "rozmer", "rozpojeneznaceni", "roztazene", "scale", "schovejbloky", "screen", "section", "sedabarva", "seeregister", "selectblocks", "selectpaper", "selectversion", "setnumber", "settextcontent", "settextvariable", "setupalign", "setupanswerarea", "setuparranging", "setupbackground", "setupbackgrounds", "setupblackrules", "setupblank", "setupblock", "setupbodyfont", "setupbodyfontenvironment", "setupbottom", "setupbottomtexts", "setupbuffer", "setupbuttons", "setupcapitals", "setupcaption", "setupcaptions", "setupclipping", "setupcolor", "setupcolors", "setupcolumns", "setupcolumnset", "setupcolumnsetlines", "setupcolumnsetstart", "setupcombinations", "setupcombinedlist", "setupcomment", "setupdescriptions", "setupenumerations", "setupexternalfigures", "setupfield", "setupfields", "setupfillinlines", "setupfillinrules", "setupfloat", "setupfloats", "setupfloatsplitting", "setupfonthandling", "setupfontsynonym", "setupfooter", "setupfootertexts", "setupfootnotedefinition", "setupfootnotes", "setupforms", "setupformulas", "setupframed", "setupframedtexts", "setuphead", "setupheader", "setupheadertexts", "setupheadnumber", "setupheads", "setupheadtext", "setuphyphenmark", "setupindentedtext", "setupindenting", "setupinmargin", "setupinteraction", "setupinteractionbar", "setupinteractionmenu", "setupinteractionscreen", "setupinterlinespace", "setupinterlinespace2", "setupitemgroup", "setupitems", "setuplabeltext", "setuplanguage", "setuplayout", "setuplegend", "setuplinenumbering", "setuplines", "setuplinewidth", "setuplist", "setuplistalternative", "setupmakeup", "setupmarginblocks", "setupmarginrules", "setupmarking", "setupmathalignment", "setupnarrower", "setupnumber", "setupnumbering", "setupoppositeplacing", "setupoutput", "setuppagenumber", "setuppagenumbering", "setuppagetransitions", "setuppalet", "setuppaper", "setuppapersize", "setupparagraphnumbering", "setupparagraphs", "setupplacement", "setuppositioning", "setupprofiles", "setupprograms", "setuppublications", "setupquote", "setupreferencelist", "setupreferencing", "setupregister", "setuprotate", "setuprule", "setupscreens", "setupsection", "setupsectionblock", "setupsorting", "setupspacing", "setupstartstop", "setupstrut", "setupsubpagenumber", "setupsymbolset", "setupsynchronization", "setupsynchronizationbar", "setupsynonyms", "setupsystem", "setuptab", "setuptables", "setuptabulate", "setuptext", "setuptextposition", "setuptextrules", "setuptexttexts", "setuptextvariable", "setupthinrules", "setuptolerance", "setuptop", "setuptoptexts", "setuptype", "setuptyping", "setupunderbar", "setupurl", "setupversions", "setupwhitespace", "showbodyfont", "showbodyfontenvironment", "showcolor", "showcolorgroup", "showexternalfigures", "showfields", "showframe", "showgrid", "showlayout", "showmakeup", "showpalet", "showprint", "showsetups", "showstruts", "showsymbolset", "sirkalevehookraje", "sirkalevemarginalie", "sirkamarginalie", "sirkaokraje", "sirkapapiru", "sirkapravehookraje", "sirkapravemarginalie", "sirkasazby", "sirkaseznamu", "sirkatextu", "sirkatiskpapiru", "sloupec", "slovovpravo", "someline", "somewhere", "sort", "space", "splitfloat", "spodek", "stanovcharakteristickuseznamu", "stanovcislonadpisu", "startalignment", "startbackground", "startbarva", "startbuffer", "startcislovaniradku", "startcitace", "startcolor", "startcolumnmakeup", "startcolumns", "startcolumnset", "startcombination", "startcomment", "startcomponent", "startdescription", "startdocument", "startdokument", "startenumeration", "startenvironment", "startfact", "startfigure", "startfloattext", "startformula", "startframedtext", "startglobalni", "starthiding", "startinteractionmenu", "startitemgroup", "startkodovani", "startkomponenta", "startkorekceradku", "startlegend", "startline", "startlinecorrection", "startlinenumbering", "startlines", "startlocal", "startlocalenvironment", "startlocalfootnotes", "startlokalni", "startlokalnipoznamkypodcarou", "startmakeup", "startmarginalniblok", "startmarginalnilinka", "startmarginblock", "startmarginrule", "startnamemakeup", "startnarrower", "startnezhustene", "startobraz", "startopposite", "startoverlay", "startoverview", "startpacked", "startparagraph", "startpositioning", "startpostponing", "startpozadi", "startprehled", "startprekryv", "startproduct", "startprodukt", "startprofil", "startprofile", "startproject", "startprojekt", "startprostredi", "startprotejsek", "startquotation", "startradek", "startradky", "startrastr", "startregister", "startsadasymbolu", "startsloupce", "startspojeni", "startsymbolset", "startsynchronizace", "startsynchronization", "starttable", "starttables", "starttabulate", "starttabulka", "starttabulky", "starttext", "starttextovalinka", "starttextrule", "starttyping", "startumistovani", "startunpacked", "startuprava", "startversion", "startverze", "startzarovnavani", "startzhustene", "startzuzeni", "stopalignment", "stopbackground", "stopbarva", "stopbuffer", "stopcislovaniradku", "stopcitace", "stopcolor", "stopcolumnmakeup", "stopcolumns", "stopcolumnset", "stopcombination", "stopcomment", "stopcomponent", "stopdescription", "stopdocument", "stopdokument", "stopenumeration", "stopenvironment", "stopfact", "stopfigure", "stopfloattext", "stopformula", "stopframedtext", "stopglobalni", "stophiding", "stopinteractionmenu", "stopitemgroup", "stopkodovani", "stopkomponenta", "stopkorekceradku", "stoplegend", "stopline", "stoplinecorrection", "stoplinenumbering", "stoplines", "stoplocal", "stoplocalenvironment", "stoplocalfootnotes", "stoplokalni", "stoplokalnipoznamkypodcarou", "stopmakeup", "stopmarginalniblok", "stopmarginalnilinka", "stopmarginblock", "stopmarginrule", "stopnamemakeup", "stopnarrower", "stopnezhustene", "stopopposite", "stopoverlay", "stopoverview", "stoppacked", "stopparagraph", "stoppositioning", "stoppostponing", "stoppozadi", "stopprehled", "stopprekryv", "stopproduct", "stopprodukt", "stopprofil", "stopprofile", "stopproject", "stopprojekt", "stopprostredi", "stopprotejsek", "stopquotation", "stopradek", "stopradky", "stoprastr", "stopsloupce", "stopspojeni", "stopsymbolset", "stopsynchronizace", "stopsynchronization", "stoptable", "stoptables", "stoptabulate", "stoptabulka", "stoptabulky", "stoptext", "stoptextovalinka", "stoptextrule", "stoptyping", "stopumistovani", "stopunpacked", "stopuprava", "stopversion", "stopverze", "stopzarovnavani", "stopzhustene", "stopzuzeni", "strana", "stretched", "sub", "subject", "subpagenumber", "subsection", "subsubject", "subsubsection", "subsubsubject", "switchtobodyfont", "switchtorawfont", "sym", "symbol", "synchronizacnilista", "synchronizationbar", "synchronize", "synchronizovat", "synonym", "tab", "tecky", "tenkalinka", "tenkelinky", "testcolumn", "testpage", "tex", "texthlavicky", "textovalinka", "textpopisku", "textreference", "textrule", "textvariable", "thinrule", "thinrules", "title", "tlacitko", "tlacitkomenu", "tloustkacary", "tooltip", "translate", "tref", "tvrdamezera", "tvrdemezery", "txt", "typ", "type", "typebuffer", "typefile", "ukazbarvu", "ukazexterniobrazy", "ukazmrizku", "ukaznastaveni", "ukazpaletu", "ukazpodpery", "ukazpole", "ukazpostredizakladnihofontu", "ukazramecek", "ukazsadusymbolu", "ukazskupinubarev", "ukazupravu", "ukazvytisk", "ukazvzhled", "ukazzakladnifont", "umistikombinovanyseznam", "umistilegendu", "umistiloga", "umistilokalnipoznamkypodcarou", "umistinadsebe", "umistinamrizku", "umistipodrovnici", "umistipoznamkypodcarou", "umistirejstrik", "umistirovnici", "umistiseznam", "umistivedlesebe", "umistizalozky", "underbar", "underbars", "urcicharakteristikurejstriku", "useURL", "useXMLfilter", "useblocks", "usecommands", "usedirectory", "useencoding", "useexternaldocument", "useexternalfigure", "useexternalfile", "useexternalfiles", "useexternalsoundtrack", "usemodule", "usereferences", "usespecials", "usesymbols", "usetypescript", "usetypescriptfile", "uzijJSscripts", "uzijURL", "uzijadresar", "uzijbloky", "uzijexternidokument", "uzijexterniobraz", "uzijexternisoubor", "uzijexternisoubory", "uzijexternizvuk", "uzijkodovani", "uzijmodul", "uzijmoduly", "uzijodkazy", "uzijprikazy", "uzijspeciality", "uzijsymbol", "uzijurl", "version", "verze", "vl", "vlasovalinka", "vlevo", "vpravo", "vradku", "vsedniden", "vyberbloky", "vyberpapir", "vyberverzi", "vyplnenytext", "vyplnovelinky", "vyplnovepole", "vyplnovyradek", "vyskahorejsku", "vyskapapiru", "vyskasazby", "vyskaseznamu", "vyskaspodku", "vyskatextu", "vyskatiskpapiru", "vyskaupati", "vyskazahlavi", "vysoky", "vyznam", "vzdalenosthorejsku", "vzdalenostlevehookraje", "vzdalenostlevemarginalie", "vzdalenostmarginalie", "vzdalenostokraje", "vzdalenostpravehookraje", "vzdalenostpravemarginalie", "vzdalenostspodku", "vzdalenostupati", "vzdalenostzahlavi", "weekday", "whitespace", "wordright", "writebetweenlist", "writetolist", "writetoreferencelist", "writetoregister", "zablokujinterakcnimenu", "zachovejbloky", "zadnamezera", "zadnebilemisto", "zadnedalsibloky", "zadnedalsisoubory", "zadnehorniadolniradky", "zadneodsazovani", "zadnezahlaviaupati", "zadneznaceni", "zadnyrozmer", "zadnyseznam", "zadnytest", "zalozka", "zapisdorejstriku", "zapisdoseznamu", "zapisdoseznamuodkazu", "zapismeziseznam", "zaramovani", "zarovnanonastred", "zarovnanovlevo", "zarovnanovpravo", "zasobnikpoli", "zaznamovepole", "zhustene", "ziskejbuffer", "ziskejznaceni", "zlomek", "znaceni", "znak", "znaky", "zpracujbloky", "zpracujstranu", "zrcadlit", "zref", "zvysujicicislo" },
+ ["de"]={ "Buchstabe", "Buchstaben", "CAP", "Cap", "Caps", "Character", "Characters", "KAP", "Kap", "Kaps", "MONAT", "MONTH", "Roemischezahlen", "Romannumerals", "WEEKDAY", "WOCHENTAG", "WOERTER", "WORD", "WORDS", "WORT", "Woerter", "Word", "Words", "Wort", "Ziffern", "about", "abstandlinkerrand", "abstandoben", "abstandrechterrand", "abstandunten", "adaptlayout", "amgitterausrichten", "amgitterneuausrichten", "appendix", "arg", "at", "atleftmargin", "atpage", "atrightmargin", "aufseite", "ausfuellfeld", "ausfuelltext", "ausschnitt", "background", "bearbeitebloecke", "bearbeiteseite", "bedeutung", "behaltebloecke", "bei", "bemerkung", "benutzekodierung", "benutzespezielles", "benutzeverzeichnis", "beschrifteversion", "beschriftung", "bestimmekopfnummer", "bestimmelistencharakeristika", "bestimmeregistercharakteristika", "bildschirm", "blackrule", "blackrules", "blank", "blanko", "bookmark", "bottomspace", "breitelinkerrand", "breiterechterrand", "bruch", "buchstabe", "buchstaben", "but", "button", "bypassblocks", "cap", "chapter", "character", "characters", "chem", "clip", "clonefield", "color", "column", "comment", "comparecolorgroup", "comparepalet", "completecombinedlist", "completelistoffloats", "completelistofsorts", "completelistofsynonyms", "completepagenumber", "completeregister", "convertnumber", "copyfield", "correctwhitespace", "coupledocument", "coupledregister", "couplemarking", "couplepage", "couplepaper", "coupleregister", "crlf", "currentdate", "currentheadnumber", "cutspace", "date", "datum", "decouplemarking", "decrementnumber", "define", "defineblank", "defineblock", "definebodyfont", "definebodyfontDEF", "definebodyfontREF", "definebodyfontenvironment", "definebuffer", "definecolor", "definecolorgroup", "definecolumnbreak", "definecolumnset", "definecombination", "definecombinedlist", "defineconversion", "definedescription", "definedfont", "defineenumeration", "definefield", "definefieldstack", "definefiguresymbol", "definefloat", "definefont", "definefontfeature", "definefonthandling", "definefontsynonym", "defineframed", "defineframedtext", "definehead", "defineindentedtext", "defineinmargin", "defineinteractionmenu", "defineitemgroup", "definelabel", "definelayer", "definelayout", "definelist", "definelogo", "definemakeup", "definemarking", "definemathalignment", "defineoutput", "defineoverlay", "definepagebreak", "definepalet", "definepapersize", "defineparagraphs", "defineplacement", "defineprofile", "defineprogram", "definerawfont", "definereference", "definereferenceformat", "definereferencelist", "defineregister", "definerule", "defineschriftsynonym", "definesection", "definesectionblock", "definesorting", "definestartstop", "definesubfield", "definesymbol", "definesynonyms", "definetabletemplate", "definetabulate", "definetext", "definetextposition", "definetextvariable", "definetype", "definetypeface", "definetyping", "defineversion", "definiereabbsymbol", "definiereabsaetze", "definiereabschnitt", "definiereabschnittsblock", "definiereakzent", "definierebefehl", "definierebeschreibung", "definierebeschreibungen", "definierebeschriftung", "definiereblanko", "definiereblock", "definierefarbe", "definierefarbengruppe", "definierefeld", "definierefeldstapel", "definierefliesstext", "definierefliesstextumgebung", "definieregleitobjekt", "definierehauptfeld", "definierehbox", "definiereinteraktionsmenue", "definierekonversion", "definierelabel", "definiereliste", "definierelogo", "definieren", "definierenummerierung", "definiereoverlay", "definierepalette", "definierepapierformat", "definiereprofil", "definiereprogramme", "definierepuffer", "definierereferenz", "definierereferenzformat", "definierereferenzliste", "definiereregister", "definiereschrift", "definiereschriftstil", "definieresortieren", "definierestartstop", "definierestil", "definieresubfeld", "definieresymbol", "definieresynonyme", "definieretabellenvorlage", "definieretabulator", "definieretext", "definieretippen", "definiereueberschrift", "definiereumbruch", "definiereumrahmt", "definiereumrahmtertext", "definiereversion", "definierezeichen", "definierezusammengestellteliste", "description", "determineheadnumber", "determinelistcharacteristics", "dimension", "disableinteractionmenu", "doppelseite", "doppelseitigespapier", "drehen", "duennelinie", "duennerumriss", "einezeile", "einziehen", "emptylines", "entknuepfebeschriftung", "enumeration", "externalfigure", "externeabbildung", "farbbalken", "farbe", "farbewert", "feld", "feldstapel", "festesspatium", "field", "fieldstack", "fillinfield", "fillinline", "fillinrules", "fillintext", "fitfield", "fixedspaces", "folgeprofil", "folgeprofilversion", "folgeversion", "followprofile", "followprofileversion", "followversion", "footnote", "footnotetext", "forceblocks", "format", "formelnummer", "fraction", "framed", "framedtext", "from", "fussnote", "fusszeileabstand", "fusszeilenhoehe", "gefuelltesrechteck", "gefuelltezeile", "geg", "gesamtseitenanzahl", "gestreckt", "getbuffer", "getmarking", "getnumber", "gitter", "godown", "goto", "gotobox", "graufarbe", "grauwert", "graycolor", "grid", "haarlinie", "hairline", "hauptsprache", "head", "headnumber", "headsym", "headtext", "heutigesdatum", "heutigeskopfnummer", "hideblocks", "high", "hintergrund", "hl", "hoch", "hoeheoben", "hoeheunten", "holebeschriftung", "holepuffer", "imlinken", "imlinkenrand", "immaumrise", "immediatebetweenlist", "immediatetolist", "imrechten", "imrechtenrand", "imumriss", "in", "inaktiviereinteraktionsmenue", "inanderermarginale", "indentation", "indenting", "inframed", "ininner", "inleft", "inline", "inlinkermarginale", "inmargin", "inmarginalie", "inneredgedistance", "inneredgewidth", "innermargindistance", "innermarginwidth", "inothermargin", "inouter", "inrechtermarginale", "inright", "installieresprache", "installlanguage", "interactionbar", "interactionbuttons", "interaktionsbalken", "interaktionsknopfe", "interaktionsmenue", "inzeile", "irgendwo", "item", "items", "its", "kap", "keepblocks", "keindimension", "keinebeschriftung", "keinebloeckemehr", "keinedateienmehr", "keinekopfundfusszeilen", "keineliste", "keinspatium", "keintest", "keinzeilenobenundunten", "keinzwischenraum", "kleinerdurchschuss", "klonierefeld", "knopf", "komponente", "konvertierezahl", "kopf", "kopfniveau", "kopfnummer", "kopfweite", "kopfzeilenabstand", "kopfzeilenhoehe", "kopierefeld", "korrigierezwischenraum", "label", "labeling", "labels", "labeltext", "language", "leftaligned", "leg", "liniendicke", "linkemarginalafstand", "linkemarginalbreite", "linksbuendig", "listenbreite", "listenhoehe", "listenlaenge", "listsymbol", "loadsorts", "loadsynonyms", "logfields", "lohi", "low", "mainlanguage", "mapfontsize", "mar", "marginalafstand", "marginalbreite", "marginallinie", "marginaltext", "marginaltitel", "marginalwort", "marginrule", "margintext", "marking", "markversion", "mathematics", "mathematik", "maumrise", "mediaeval", "menueknopf", "midaligned", "mirror", "monat", "month", "moveformula", "moveongrid", "movesidefloat", "nachunten", "name", "navigating", "nextsection", "nichteinziehen", "nocap", "noheaderandfooterlines", "noindenting", "nokap", "nolist", "nomarking", "nomoreblocks", "nomorefiles", "nop", "nospace", "note", "notiz", "notopandbottomlines", "nowhitespace", "numberofsubpages", "numbers", "nummererhoehen", "outeredgedistance", "outeredgewidth", "outermargindistance", "outermarginwidth", "overbar", "overbars", "overstrike", "overstrikes", "packed", "page", "pagedepth", "pageoffset", "pagereference", "papierbreite", "papierhoehe", "paragraph", "part", "passelayoutan", "passendfeld", "periods", "placebookmarks", "placecombinedlist", "placefloat", "placefootnotes", "placeformula", "placeheadnumber", "placeheadtext", "placelegend", "placelist", "placelistoffloats", "placelistofsorts", "placelistofsynonyms", "placelocalfootnotes", "placelogos", "placeongrid", "placeontopofeachother", "placepagenumber", "placerawlist", "placereferencelist", "placeregister", "placerule", "placesidebyside", "placesubformula", "placetextvariable", "platzierebookmarks", "platziereformel", "platzierefussnoten", "platzierelegende", "platziereliste", "platzierelogo", "platzierelokalefussnoten", "platzierenebeneinander", "platziereregister", "platziereuntereinander", "platziereunterformel", "platzierezusammengestellteliste", "pos", "position", "positiontext", "posten", "printpapierbreite", "printpapierhoehe", "processblocks", "processpage", "produkt", "program", "programm", "projekt", "publication", "publikation", "punkt", "quotation", "quote", "ran", "randabstand", "randbreite", "rechteck", "rechtecke", "rechtemarginalafstand", "rechtemarginalbreite", "rechtsbuendig", "ref", "reference", "referenz", "referieren", "referral", "referraldate", "register", "registrierefelder", "reservefloat", "reset", "resetmarking", "resetnumber", "resettextcontent", "rightaligned", "roemischezahlen", "romannumerals", "rotate", "ruecksetzten", "ruecksetztenbeschriftung", "rumpfweite", "satzbreite", "satzhoehe", "scale", "schreibezumregister", "schreibezurliste", "schreibezurreferenzliste", "schreibezwischenliste", "screen", "section", "seeregister", "seite", "seitenreferenz", "seitenummer", "selectblocks", "selectpaper", "selectversion", "setnumber", "settext", "settextcontent", "settextvariable", "setupalign", "setupanswerarea", "setuparranging", "setupbackground", "setupbackgrounds", "setupblackrules", "setupblank", "setupblock", "setupbodyfont", "setupbodyfontenvironment", "setupbottom", "setupbottomtexts", "setupbuffer", "setupbuttons", "setupcapitals", "setupcaption", "setupcaptions", "setupclipping", "setupcolor", "setupcolors", "setupcolumns", "setupcolumnset", "setupcolumnsetlines", "setupcolumnsetstart", "setupcombinations", "setupcombinedlist", "setupcomment", "setupdescriptions", "setupenumerations", "setupexternalfigures", "setupfield", "setupfields", "setupfillinlines", "setupfillinrules", "setupfloat", "setupfloats", "setupfloatsplitting", "setupfonthandling", "setupfontsynonym", "setupfooter", "setupfootertexts", "setupfootnotedefinition", "setupfootnotes", "setupforms", "setupformulas", "setupframed", "setupframedtexts", "setuphead", "setupheader", "setupheadertexts", "setupheadnumber", "setupheads", "setupheadtext", "setuphyphenmark", "setupindentedtext", "setupindenting", "setupinmargin", "setupinteraction", "setupinteractionbar", "setupinteractionmenu", "setupinteractionscreen", "setupinterlinespace", "setupinterlinespace2", "setupitemgroup", "setupitems", "setuplabeltext", "setuplanguage", "setuplayout", "setuplegend", "setuplinenumbering", "setuplines", "setuplinewidth", "setuplist", "setuplistalternative", "setupmakeup", "setupmarginblocks", "setupmarginrules", "setupmarking", "setupmathalignment", "setupnarrower", "setupnumber", "setupnumbering", "setupoppositeplacing", "setupoutput", "setuppagenumber", "setuppagenumbering", "setuppagetransitions", "setuppalet", "setuppaper", "setuppapersize", "setupparagraphnumbering", "setupparagraphs", "setupplacement", "setuppositioning", "setupprofiles", "setupprograms", "setuppublications", "setupquote", "setupreferencelist", "setupreferencing", "setupregister", "setuprotate", "setuprule", "setupscreens", "setupsection", "setupsectionblock", "setupsorting", "setupspacing", "setupstartstop", "setupstrut", "setupsubpagenumber", "setupsymbolset", "setupsynchronization", "setupsynchronizationbar", "setupsynonyms", "setupsystem", "setuptab", "setuptables", "setuptabulate", "setuptext", "setuptextposition", "setuptextrules", "setuptexttexts", "setuptextvariable", "setupthinrules", "setuptolerance", "setuptop", "setuptoptexts", "setuptype", "setuptyping", "setupunderbar", "setupurl", "setupversions", "setupwhitespace", "showbodyfont", "showbodyfontenvironment", "showcolor", "showcolorgroup", "showexternalfigures", "showfields", "showframe", "showgrid", "showlayout", "showmakeup", "showpalet", "showprint", "showsetups", "showstruts", "showsymbolset", "someline", "somewhere", "sort", "space", "spalte", "spatium", "spiegeln", "splitfloat", "sprache", "startabbildung", "startalignment", "startausrichtung", "startbackground", "startbuffer", "startcolor", "startcolumnmakeup", "startcolumns", "startcolumnset", "startcombination", "startcomment", "startcomponent", "startdescription", "startdocument", "startdokument", "startenger", "startenumeration", "startenvironment", "startfact", "startfarbe", "startfigure", "startfloattext", "startformula", "startframedtext", "startgegenueber", "startglobal", "startgrosserdurchschuss", "starthiding", "starthintergrund", "startinteractionmenu", "startitemgroup", "startkleinerdurchschuss", "startkodierung", "startkombination", "startkomponente", "startlegend", "startline", "startlinecorrection", "startlinenumbering", "startlines", "startlocal", "startlocalenvironment", "startlocalfootnotes", "startlokal", "startlokalefussnoten", "startmakeup", "startmarginalblock", "startmarginallinie", "startmarginblock", "startmarginrule", "startnamemakeup", "startnarrower", "startopposite", "startoverlay", "startoverview", "startpacked", "startparagraph", "startpositionieren", "startpositioning", "startpostponing", "startproduct", "startprodukt", "startprofil", "startprofile", "startproject", "startprojekt", "startquotation", "startraster", "startregister", "startspalten", "startsymbolset", "startsynchronisation", "startsynchronization", "starttabelle", "starttabellen", "starttable", "starttables", "starttabulate", "starttext", "starttextlinie", "starttextrule", "starttyping", "startueberblick", "startumbruch", "startumgebung", "startunpacked", "startversion", "startzeile", "startzeilen", "startzeilenkorrektur", "startzeilennumerierung", "startzitat", "stelleabsaetzeein", "stelleabsatznummerierungein", "stelleabschnittein", "stelleabschnittsblockein", "stelleanordnenein", "stelleaufzaehlungenein", "stelleausgabeein", "stelleausrichtungein", "stelleausschnittein", "stellebeschreibungein", "stellebeschriftungein", "stellebilderunterschriftein", "stellebildunterschriftein", "stellebindestrichein", "stelleblankoein", "stelleblockein", "stelledrehenein", "stelleduennerumrissein", "stelleeinziehenein", "stelleengerein", "stelleexterneabbildungenein", "stellefarbeein", "stellefarbenein", "stellefeldein", "stellefelderin", "stellefliesstextein", "stellefliesstextumgebungein", "stelleformelnein", "stellefussnotendefinitionein", "stellefussnotenein", "stellefusszeileein", "stellefusszeilentextein", "stellegefuelltesrechteckein", "stellegefuelltezeileein", "stellegegenueberplatzierenein", "stellegleitobjekteein", "stellegleitobjektein", "stellehintergruendeein", "stellehintergrundein", "stelleinmarginalieein", "stelleinteraktionein", "stelleinteraktionsbalkenein", "stelleinteraktionsbildschirmein", "stelleinteraktionsmenueein", "stelleknopfein", "stellekombinationein", "stellekommentarein", "stellekopfzahlein", "stellekopfzeileein", "stellekopfzeilentextein", "stellelabeltextein", "stellelayoutein", "stellelegendeein", "stellelinienbreiteein", "stellelisteein", "stellemarginalblockein", "stellemarginallinieein", "stellenobenein", "stellenummerierungein", "stellepaletteein", "stellepapierformatein", "stelleplatziegeteiltegleitobjekt", "stellepositionierenein", "stellepostenein", "stelleprofilein", "stelleprogrammein", "stellepublikationein", "stellepufferein", "stellerasterein", "stellerechteckein", "stellereferenzierenein", "stellereferenzlisteein", "stelleregisterein", "stelleseitenkommentarein", "stelleseitennummerein", "stelleseitennummeriernungein", "stelleseitenuebergangein", "stellesortierenein", "stellespaltenein", "stellespatiumein", "stellespracheein", "stellesymbolsetein", "stellesynchronisationein", "stellesynchronisationsbalkenein", "stellesynonymein", "stellesystemein", "stelletabein", "stelletabellenein", "stelletabulatorein", "stelletextein", "stelletextobenein", "stelletexttexteein", "stelletextumrissein", "stelletextuntenein", "stelletipein", "stelletippenein", "stelletoleranzein", "stelleueberschriftein", "stelleueberschriftenein", "stelleueberschrifttextein", "stelleumbruchein", "stelleumrahmtein", "stelleumrahmtetexteein", "stelleuntenein", "stelleunterseitennummerein", "stelleunterstreichenein", "stelleurlein", "stelleversalienein", "stelleversionein", "stellezeilenabstandein", "stellezeilenein", "stellezeilennumerierungein", "stellezitierenein", "stellezusammengestelltelisteein", "stellezwischenraumein", "stopalignment", "stopausrichtung", "stopbackground", "stopbuffer", "stopcolor", "stopcolumnmakeup", "stopcolumns", "stopcolumnset", "stopcombination", "stopcomment", "stopcomponent", "stopdescription", "stopdocument", "stopdokument", "stopenger", "stopenumeration", "stopenvironment", "stopfact", "stopfarbe", "stopfigure", "stopfloattext", "stopformula", "stopframedtext", "stopgegenueber", "stopglobal", "stopgrosserdurchschuss", "stophiding", "stophintergrund", "stopinteractionmenu", "stopitemgroup", "stopkleinerdurchschuss", "stopkodierung", "stopkombination", "stopkomponente", "stoplegend", "stopline", "stoplinecorrection", "stoplinenumbering", "stoplines", "stoplocal", "stoplocalenvironment", "stoplocalfootnotes", "stoplokal", "stoplokalefussnoten", "stopmakeup", "stopmarginalblock", "stopmarginallinie", "stopmarginblock", "stopmarginrule", "stopnamemakeup", "stopnarrower", "stopopposite", "stopoverlay", "stopoverview", "stoppacked", "stopparagraph", "stoppositionieren", "stoppositioning", "stoppostponing", "stopproduct", "stopprodukt", "stopprofil", "stopprofile", "stopproject", "stopprojekt", "stopquotation", "stopraster", "stopspalten", "stopsymbolset", "stopsynchronisation", "stopsynchronization", "stoptabelle", "stoptabellen", "stoptable", "stoptables", "stoptabulate", "stoptext", "stoptextlinie", "stoptextrule", "stoptyping", "stopueberblick", "stopumbruch", "stopumgebung", "stopunpacked", "stopversion", "stopzeile", "stopzeilen", "stopzeilenkorrektur", "stopzeilennumerierung", "stopzitat", "stretched", "sub", "subject", "subpagenumber", "subsection", "subsubject", "subsubsection", "subsubsubject", "switchtobodyfont", "switchtorawfont", "sym", "symbol", "synchronisationsbalken", "synchronisieren", "synchronizationbar", "synchronize", "synonym", "tab", "teilegleitobjekt", "testcolumn", "testpage", "tex", "textbreite", "texthoehe", "textlinie", "textreference", "textreferenz", "textrule", "textvariable", "thinrule", "thinrules", "tief", "tiho", "tip", "tippedatei", "tippen", "tippepuffer", "title", "tooltip", "translate", "txt", "typ", "type", "typebuffer", "typefile", "ueber", "ueberschrifttext", "uebersetzten", "umgebung", "umrahmt", "unbekant", "underbar", "underbars", "unterformelnummer", "useURL", "useXMLfilter", "useblocks", "usecommands", "usedirectory", "useencoding", "useexternaldocument", "useexternalfigure", "useexternalfile", "useexternalfiles", "useexternalsoundtrack", "usemodule", "usereferences", "usespecials", "usesymbols", "usetypescript", "usetypescriptfile", "verbergebloecke", "vergleichefarbengruppe", "vergleichepalette", "verknuepfebeschriftung", "verknuepfedokument", "verknuepfregister", "version", "verweis", "verweisdatum", "verwendeJSscript", "verwendeURL", "verwendebefehl", "verwendebloecke", "verwendeexteresdokument", "verwendeexterneabbildung", "verwendeexternedatei", "verwendeexternedateien", "verwendeexternestonstueck", "verwendemodul", "verwendemodule", "verwendereferenzen", "verwendesymbole", "verwendeurl", "vl", "volleswort", "von", "waehlebloeckeaus", "waehlepapieraus", "waehleversionaus", "wechselezumfliesstext", "weekday", "whitespace", "wiederholen", "wochentag", "wohnort", "wordright", "wortrechts", "writebetweenlist", "writetolist", "writetoreferencelist", "writetoregister", "zeigedruck", "zeigeeinstellungen", "zeigeexterneabbildungen", "zeigefarbe", "zeigefarbengruppe", "zeigefelder", "zeigefliesstext", "zeigefliesstextumgebung", "zeigegitter", "zeigelayout", "zeigepalette", "zeigerahmen", "zeigestruts", "zeigeumbruch", "zentriert", "ziffern", "zitat", "zitieren", "zu", "zurbox", "zurseite", "zwischenraum" },
+ ["en"]={ "CAP", "Cap", "Caps", "Character", "Characters", "MONTH", "Numbers", "Romannumerals", "WEEKDAY", "WORD", "WORDS", "Word", "Words", "about", "adaptlayout", "adding", "appendix", "arg", "at", "atleftmargin", "atpage", "atrightmargin", "background", "backspace", "blackrule", "blackrules", "blank", "bookmark", "bottomdistance", "bottomheight", "bottomspace", "but", "button", "bypassblocks", "cap", "chapter", "character", "characters", "chem", "clip", "clonefield", "color", "colorbar", "colorvalue", "column", "comment", "comparecolorgroup", "comparepalet", "completecombinedlist", "completelistoffloats", "completelistofsorts", "completelistofsynonyms", "completepagenumber", "completeregister", "component", "convertnumber", "copyfield", "correctwhitespace", "coupledocument", "coupledregister", "couplemarking", "couplepage", "couplepaper", "coupleregister", "crlf", "currentdate", "currentheadnumber", "cutspace", "date", "decouplemarking", "decrementnumber", "define", "defineaccent", "defineblank", "defineblock", "definebodyfont", "definebodyfontDEF", "definebodyfontREF", "definebodyfontenvironment", "definebuffer", "definecharacter", "definecolor", "definecolorgroup", "definecolumnbreak", "definecolumnset", "definecombination", "definecombinedlist", "definecommand", "defineconversion", "definedescription", "definedfont", "defineenumeration", "definefield", "definefieldstack", "definefiguresymbol", "definefloat", "definefont", "definefontfeature", "definefonthandling", "definefontstyle", "definefontsynonym", "defineframed", "defineframedtext", "definehbox", "definehead", "defineindentedtext", "defineinmargin", "defineinteractionmenu", "defineitemgroup", "definelabel", "definelayer", "definelayout", "definelist", "definelogo", "definemainfield", "definemakeup", "definemarking", "definemathalignment", "defineoutput", "defineoverlay", "definepagebreak", "definepalet", "definepapersize", "defineparagraphs", "defineplacement", "defineprofile", "defineprogram", "definerawfont", "definereference", "definereferenceformat", "definereferencelist", "defineregister", "definerule", "definesection", "definesectionblock", "definesorting", "definestartstop", "definestyle", "definesubfield", "definesymbol", "definesynonyms", "definetabletemplate", "definetabulate", "definetext", "definetextbackground", "definetextposition", "definetextvariable", "definetype", "definetypeface", "definetyping", "defineversion", "description", "determineheadnumber", "determinelistcharacteristics", "determineregistercharacteristics", "dimension", "disableinteractionmenu", "domicile", "donttest", "edgedistance", "edgewidth", "emptylines", "enumeration", "environment", "externalfigure", "fact", "field", "fieldstack", "fillinfield", "fillinline", "fillinrules", "fillintext", "fitfield", "fixedspace", "fixedspaces", "followprofile", "followprofileversion", "followversion", "footerdistance", "footerheight", "footnote", "footnotetext", "forceblocks", "formulanumber", "fraction", "framed", "framedtext", "from", "getbuffer", "getmarking", "getnumber", "godown", "goto", "gotobox", "gotopage", "graycolor", "greyvalue", "grid", "hairline", "head", "headerdistance", "headerheight", "headlevel", "headnumber", "headsym", "headtext", "hideblocks", "high", "hl", "immediatebetweenlist", "immediatetolist", "in", "incrementnumber", "indentation", "indenting", "inframed", "infull", "ininner", "inleft", "inleftedge", "inleftmargin", "inline", "inmaframed", "inmargin", "inneredgedistance", "inneredgewidth", "innermargindistance", "innermarginwidth", "inothermargin", "inouter", "inright", "inrightedge", "inrightmargin", "installlanguage", "interactionbar", "interactionbuttons", "interactionmenu", "item", "items", "its", "keepblocks", "label", "labeling", "labels", "labeltext", "language", "leftaligned", "leftedgedistance", "leftedgewidth", "leftmargindistance", "leftmarginwidth", "leg", "linethickness", "listheight", "listlength", "listsymbol", "listwidth", "loadsorts", "loadsynonyms", "logfields", "lohi", "low", "maframed", "mainlanguage", "makeupheight", "makeupwidth", "mapfontsize", "mar", "margindistance", "marginrule", "margintext", "margintitle", "marginwidth", "marginword", "marking", "markversion", "mathematics", "mediaeval", "menubutton", "midaligned", "mirror", "month", "moveformula", "moveongrid", "movesidefloat", "name", "navigating", "nextsection", "nocap", "nodimension", "noheaderandfooterlines", "noindenting", "nolist", "nomarking", "nomoreblocks", "nomorefiles", "nop", "nospace", "note", "notopandbottomlines", "nowhitespace", "numberofsubpages", "numbers", "outeredgedistance", "outeredgewidth", "outermargindistance", "outermarginwidth", "overbar", "overbars", "overstrike", "overstrikes", "packed", "page", "pagedepth", "pagenumber", "pageoffset", "pagereference", "paperheight", "paperwidth", "paragraph", "part", "periods", "placebookmarks", "placecombinedlist", "placefloat", "placefootnotes", "placeformula", "placeheadnumber", "placeheadtext", "placelegend", "placelist", "placelistoffloats", "placelistofsorts", "placelistofsynonyms", "placelocalfootnotes", "placelogos", "placeongrid", "placeontopofeachother", "placepagenumber", "placerawlist", "placereferencelist", "placeregister", "placerule", "placesidebyside", "placesubformula", "placetextvariable", "position", "positiontext", "printpaperheight", "printpaperwidth", "processblocks", "processpage", "product", "program", "project", "publication", "quotation", "quote", "ran", "redo", "ref", "reference", "referral", "referraldate", "referring", "register", "remark", "reservefloat", "reset", "resetmarking", "resetnumber", "resettextcontent", "rightaligned", "rightedgedistance", "rightedgewidth", "rightmargindistance", "rightmarginwidth", "romannumerals", "rotate", "scale", "screen", "section", "seeregister", "selectblocks", "selectpaper", "selectversion", "setnumber", "settextcontent", "settextvariable", "setupalign", "setupanswerarea", "setuparranging", "setupbackground", "setupbackgrounds", "setupblackrules", "setupblank", "setupblock", "setupbodyfont", "setupbodyfontenvironment", "setupbottom", "setupbottomtexts", "setupbuffer", "setupbuttons", "setupcapitals", "setupcaption", "setupcaptions", "setupclipping", "setupcolor", "setupcolors", "setupcolumns", "setupcolumnset", "setupcolumnsetlines", "setupcolumnsetstart", "setupcombinations", "setupcombinedlist", "setupcomment", "setupdescriptions", "setupenumerations", "setupexternalfigures", "setupfield", "setupfields", "setupfillinlines", "setupfillinrules", "setupfloat", "setupfloats", "setupfloatsplitting", "setupfonthandling", "setupfontsynonym", "setupfooter", "setupfootertexts", "setupfootnotedefinition", "setupfootnotes", "setupforms", "setupformulae", "setupformulas", "setupframed", "setupframedtexts", "setuphead", "setupheader", "setupheadertexts", "setupheadnumber", "setupheads", "setupheadtext", "setuphyphenmark", "setupindentedtext", "setupindenting", "setupinmargin", "setupinteraction", "setupinteractionbar", "setupinteractionmenu", "setupinteractionscreen", "setupinterlinespace", "setupinterlinespace2", "setupitemgroup", "setupitemizations", "setupitems", "setuplabeltext", "setuplanguage", "setuplayout", "setuplegend", "setuplinenumbering", "setuplines", "setuplinewidth", "setuplist", "setuplistalternative", "setupmakeup", "setupmarginblocks", "setupmarginrules", "setupmarking", "setupmathalignment", "setupnarrower", "setupnumber", "setupnumbering", "setupoppositeplacing", "setupoutput", "setuppagecomment", "setuppagenumber", "setuppagenumbering", "setuppagetransitions", "setuppalet", "setuppaper", "setuppapersize", "setupparagraphnumbering", "setupparagraphs", "setupplacement", "setuppositioning", "setupprofiles", "setupprograms", "setuppublications", "setupquote", "setupreferencelist", "setupreferencing", "setupregister", "setuprotate", "setuprule", "setupscreens", "setupsection", "setupsectionblock", "setupsorting", "setupspacing", "setupstartstop", "setupstrut", "setupsubpagenumber", "setupsymbolset", "setupsynchronization", "setupsynchronizationbar", "setupsynonyms", "setupsystem", "setuptab", "setuptables", "setuptabulate", "setuptext", "setuptextbackground", "setuptextposition", "setuptextrules", "setuptexttexts", "setuptextvariable", "setupthinrules", "setuptolerance", "setuptop", "setuptoptexts", "setuptype", "setuptyping", "setupunderbar", "setupurl", "setupversions", "setupwhitespace", "showbodyfont", "showbodyfontenvironment", "showcolor", "showcolorgroup", "showexternalfigures", "showfields", "showframe", "showgrid", "showlayout", "showmakeup", "showpalet", "showprint", "showsetups", "showstruts", "showsymbolset", "someline", "somewhere", "sort", "space", "splitfloat", "startalignment", "startbackground", "startbuffer", "startcoding", "startcolor", "startcolumnmakeup", "startcolumns", "startcolumnset", "startcombination", "startcomment", "startcomponent", "startdescription", "startdocument", "startenumeration", "startenvironment", "startfact", "startfigure", "startfloattext", "startformula", "startframed", "startframedtext", "startglobal", "starthiding", "startinteractionmenu", "startitemgroup", "startlegend", "startline", "startlinecorrection", "startlinenumbering", "startlines", "startlocal", "startlocalenvironment", "startlocalfootnotes", "startmakeup", "startmarginblock", "startmarginrule", "startnamemakeup", "startnarrower", "startopposite", "startoverlay", "startoverview", "startpacked", "startparagraph", "startpositioning", "startpostponing", "startproduct", "startprofile", "startproject", "startquotation", "startraster", "startregister", "startsymbolset", "startsynchronization", "starttable", "starttables", "starttabulate", "starttext", "starttextbackground", "starttextrule", "starttyping", "startunpacked", "startversion", "stopalignment", "stopbackground", "stopbuffer", "stopcoding", "stopcolor", "stopcolumnmakeup", "stopcolumns", "stopcolumnset", "stopcombination", "stopcomment", "stopcomponent", "stopdescription", "stopdocument", "stopenumeration", "stopenvironment", "stopfact", "stopfigure", "stopfloattext", "stopformula", "stopframed", "stopframedtext", "stopglobal", "stophiding", "stopinteractionmenu", "stopitemgroup", "stoplegend", "stopline", "stoplinecorrection", "stoplinenumbering", "stoplines", "stoplocal", "stoplocalenvironment", "stoplocalfootnotes", "stopmakeup", "stopmarginblock", "stopmarginrule", "stopnamemakeup", "stopnarrower", "stopopposite", "stopoverlay", "stopoverview", "stoppacked", "stopparagraph", "stoppositioning", "stoppostponing", "stopproduct", "stopprofile", "stopproject", "stopquotation", "stopraster", "stopsymbolset", "stopsynchronization", "stoptable", "stoptables", "stoptabulate", "stoptext", "stoptextbackground", "stoptextrule", "stoptyping", "stopunpacked", "stopversion", "stretched", "sub", "subformulanumber", "subject", "subpagenumber", "subsection", "subsubject", "subsubsection", "subsubsubject", "switchtobodyfont", "switchtorawfont", "sym", "symbol", "symoffset", "synchronizationbar", "synchronize", "synonym", "tab", "testcolumn", "testpage", "tex", "textheight", "textreference", "textrule", "textvariable", "textwidth", "thinrule", "thinrules", "title", "tooltip", "topdistance", "topheight", "topspace", "totalnumberofpages", "translate", "txt", "typ", "type", "typebuffer", "typefile", "underbar", "underbars", "unitmeaning", "unknown", "useJSscripts", "useURL", "useXMLfilter", "useblocks", "usecommands", "usedirectory", "useencoding", "useexternaldocument", "useexternalfigure", "useexternalfile", "useexternalfiles", "useexternalsoundtrack", "usemodule", "usemodules", "usereferences", "usespecials", "usesymbols", "usetypescript", "usetypescriptfile", "useurl", "version", "vl", "weekday", "whitespace", "wordright", "writebetweenlist", "writetolist", "writetoreferencelist", "writetoregister" },
+ ["fr"]={ "CAP", "Cap", "Caps", "Caractere", "Caracteres", "Character", "Characters", "Chiffresromains", "JOURSEMAINE", "MOIS", "MONTH", "MOT", "MOTS", "Mot", "Mots", "Numeros", "Romannumerals", "WEEKDAY", "WORD", "WORDS", "Word", "Words", "a", "about", "adaptedisposition", "adaptlayout", "affectenumero", "affectevariabletexte", "ajustechamp", "alaligne", "alapage", "aligneadroite", "aligneagauche", "aligneaumilieu", "appendix", "arg", "arriereplan", "at", "atleftmargin", "atpage", "atrightmargin", "background", "baha", "barrecouleur", "barreinteraction", "barresynchronisation", "bas", "blackrule", "blackrules", "blank", "bookmark", "bouton", "boutonmenu", "boutonsinteraction", "but", "button", "bypassblocks", "cacheblocs", "cap", "caractere", "caracteres", "champ", "changepolicebrute", "changepolicecorps", "chapter", "character", "characters", "chem", "chiffresromains", "citation", "citer", "clip", "clonechamp", "clonefield", "colonne", "color", "column", "comment", "commentaire", "comparecolorgroup", "comparegroupecouleur", "comparepalet", "comparepalette", "completecombinedlist", "completelistoffloats", "completelistofsorts", "completelistofsynonyms", "completenumeropage", "completeregister", "completeregistre", "composant", "composeenalinea", "concernant", "convertitnumero", "convertnumber", "copitchamp", "copyfield", "correctwhitespace", "corrigeespaceblanc", "couleur", "couleurgrise", "coupledocument", "coupledregister", "couplemarking", "couplemarquage", "couplepage", "couplepaper", "couplepapier", "coupleregister", "coupleregistre", "crlf", "currentdate", "currentheadnumber", "cutspace", "dactylographier", "dans", "dansautremarge", "dansborddroit", "dansbordgauche", "dansdroite", "dansgauche", "dansmarge", "dansmargedroite", "dansmargegauche", "date", "datecourante", "daterecommandation", "de", "decouplemarking", "decouplemarquage", "decrementenumero", "define", "defineblank", "defineblock", "definebodyfont", "definebodyfontDEF", "definebodyfontREF", "definebodyfontenvironment", "definebuffer", "definecolor", "definecolorgroup", "definecombination", "definecombinedlist", "defineconversion", "definedescription", "definedfont", "defineenumeration", "definefield", "definefieldstack", "definefiguresymbol", "definefloat", "definefont", "definefontfeature", "definefonthandling", "definefontsynonym", "defineframed", "defineframedtext", "definehead", "defineindentedtext", "defineinteractionmenu", "defineitemgroup", "definelabel", "definelist", "definelogo", "definemakeup", "definemarking", "definemathalignment", "defineoutput", "defineoverlay", "definepalet", "definepapersize", "defineparagraphs", "defineplacement", "defineprofile", "defineprogram", "definereference", "definereferenceformat", "definereferencelist", "defineregister", "definesection", "definesectionblock", "definesorting", "definestartstop", "definesubfield", "definesymbol", "definesynonyms", "definetabletemplate", "definetabulate", "definetext", "definetype", "definetypeface", "definetyping", "defineversion", "definicaractere", "definit", "definitaccent", "definitbloc", "definitblocsection", "definitbuffer", "definitcalque", "definitchamp", "definitchampprincipal", "definitcommande", "definitconversion", "definitcouleur", "definitdactylo", "definitdansmarge", "definitdemarrestoppe", "definitdescription", "definitdisposition", "definitenumeration", "definitenvironnementpolicecorps", "definitetiquette", "definitflottant", "definitformatreference", "definitgroupecouleur", "definithbox", "definitjeucolonne", "definitliste", "definitlisteimbriquee", "definitlistereference", "definitlogo", "definitmakeup", "definitmarquage", "definitmenuinteraction", "definitnotepdp", "definitpalette", "definitparagraphes", "definitpilechamp", "definitpolice", "definitpolicebrute", "definitpolicecorps", "definitpositiontexte", "definitprofil", "definitprogramme", "definitreference", "definitregistre", "definitregle", "definitrevetement", "definitsautdecolonne", "definitsautdepage", "definitsection", "definitsortie", "definitsouschamp", "definitstyle", "definitstylepolice", "definitsymbole", "definitsymbolefigure", "definitsynonymepolice", "definitsynonymes", "definittabulation", "definittaillepapier", "definittete", "definittexte", "definittrametableau", "definittri", "definittype", "definitvariabletexte", "definitversion", "definitvide", "demarrealignement", "demarrearriereplan", "demarreblocmarge", "demarrecitation", "demarreciter", "demarrecodage", "demarrecolonnes", "demarrecombinaison", "demarrecompoetroite", "demarrecomposant", "demarrecorrectionligne", "demarrecouleur", "demarredegroupe", "demarredocument", "demarreenvironement", "demarrefigure", "demarreglobal", "demarregroupe", "demarrejeucolonne", "demarrejeusymboles", "demarreligne", "demarreligneregleetexte", "demarrelignes", "demarrelocal", "demarremakeup", "demarremargereglee", "demarrenotespdplocales", "demarrenumerotationligne", "demarreopposition", "demarrepositionnement", "demarreproduit", "demarreprofil", "demarreprojet", "demarreraster", "demarrerevetement", "demarresynchronisation", "demarretableau", "demarretableaux", "demarretexte", "demarreversion", "demarrevuedensemble", "deplaceformule", "deplacesurgrille", "description", "determinecaracteristiqueliste", "determinecaracteristiquesregistre", "determineheadnumber", "determinelistcharacteristics", "determinenumerotete", "dimension", "disableinteractionmenu", "distancebord", "distanceborddroit", "distancebordgauche", "distanceentete", "distanceinf", "distancemarge", "distancemargedroite", "distancemargegauche", "distancepdp", "distancesup", "domicile", "echelle", "ecran", "ecritdansliste", "ecritdanslistereference", "ecritentreliste", "ecritregistre", "el", "element", "elements", "emptylines", "enumeration", "environement", "espace", "espaceblanc", "espacefixe", "espaceinf", "espacesfixes", "espacesup", "etiquette", "etiquettes", "etire", "externalfigure", "fait", "faitreference", "fichierdactylo", "field", "fieldstack", "figureexterne", "fillinfield", "fillinline", "fillinrules", "fillintext", "fitfield", "fixedspaces", "followprofile", "followprofileversion", "followversion", "footnote", "footnotetext", "forceblocks", "forceblocs", "fraction", "framed", "framedtext", "from", "gardeblocs", "getbuffer", "getmarking", "getnumber", "godown", "goto", "gotobox", "graycolor", "grid", "grille", "groupe", "hairline", "haut", "hauteureditionpapier", "hauteurentete", "hauteurinf", "hauteurliste", "hauteurmakeup", "hauteurpapier", "hauteurpdp", "hauteursup", "hauteurtexte", "head", "headnumber", "headsym", "headtext", "hideblocks", "high", "hl", "immediatebetweenlist", "immediatetolist", "in", "inconnu", "incrementenumero", "indentation", "indenting", "inframed", "infull", "inhibemenuinteraction", "ininner", "inleft", "inline", "inmargin", "inmframed", "inneredgedistance", "inneredgewidth", "innermargindistance", "innermarginwidth", "inothermargin", "inouter", "inright", "installelangue", "installlanguage", "interactionbar", "interactionbuttons", "item", "items", "its", "joursemaine", "keepblocks", "labeling", "labels", "labeltext", "labeltexte", "language", "langue", "langueprincipale", "largeurbord", "largeurborddroit", "largeurbordgauche", "largeureditionpapier", "largeurligne", "largeurliste", "largeurmakeup", "largeurmarge", "largeurmargedroite", "largeurmargegauche", "largeurpapier", "largeurtexte", "leftaligned", "leg", "ligneh", "lignenoire", "ligneregleetexte", "lignesnoires", "listesymbole", "listsymbol", "llongueurliste", "loadsorts", "loadsynonyms", "logchamp", "logfields", "lohi", "low", "mainlanguage", "mapfontsize", "mar", "margereglee", "marginrule", "margintext", "marking", "markversion", "marquage", "marquageversion", "marquepage", "mathematics", "mathematique", "mediaeval", "menuinteraction", "mframed", "midaligned", "mirror", "mois", "month", "montrecadre", "montrechamps", "montrecouleur", "montredisposition", "montreedition", "montreenvironnementpolicecorps", "montrefiguresexternes", "montregrille", "montregroupecouleur", "montrejeusymboles", "montremakeup", "montrepalette", "montrepolicecorps", "montrereglages", "montrestruts", "motdroit", "motmarge", "moveongrid", "movesidefloat", "name", "navigating", "nextsection", "niveautete", "nocap", "noheaderandfooterlines", "noindenting", "nolist", "nomarking", "nombredesouspages", "nombretotaldepages", "nommacro", "nomoreblocks", "nomorefiles", "nop", "nospace", "note", "notepdp", "notopandbottomlines", "nowhitespace", "numbers", "numeroformule", "numeropage", "numeros", "numerosousformule", "numerotete", "numerotetecourant", "obtientmarquage", "oriente", "outeredgedistance", "outeredgewidth", "outermargindistance", "outermarginwidth", "overbar", "overbars", "overstrike", "overstrikes", "packed", "page", "pagedepth", "pagedouble", "pageoffset", "pagereference", "paragraph", "part", "pasplusdeblocs", "pasplusdefichiers", "periodes", "periods", "pilechamp", "placebookmarks", "placecombinedlist", "placecoteacote", "placefloat", "placeflottant", "placefootnotes", "placeformula", "placeformule", "placelegend", "placelegende", "placelesunsaudessusdesautres", "placelist", "placeliste", "placelisteinmbriquee", "placelistereference", "placelistoffloats", "placelistofsorts", "placelistofsynonyms", "placelocalfootnotes", "placelogos", "placemarquespages", "placenotespdp", "placenotespdplocales", "placenumeropage", "placenumerotete", "placeongrid", "placeontopofeachother", "placerawlist", "placereferencelist", "placeregister", "placeregistre", "placeregle", "placesidebyside", "placesousformule", "placesubformula", "placesurgrille", "placetextetete", "placevariabletexte", "position", "positionnetexte", "prendbuffer", "processblocks", "processpage", "produit", "program", "programme", "projet", "publication", "qqpart", "quotation", "quote", "ran", "raz", "razmarquage", "raznumero", "recommandation", "ref", "refait", "reference", "referencepage", "referencetexte", "referral", "referraldate", "reflete", "register", "reglealignement", "reglearrangement", "reglearriereplan", "reglearriereplans", "reglebarreinteraction", "reglebarresynchronisation", "reglebloc", "regleblocmarge", "regleblocsection", "regleboutons", "reglebuffer", "reglecapitales", "reglechamp", "reglechamps", "regleclipping", "reglecolonnes", "reglecombinaisons", "reglecommentaire", "reglecommentairepage", "reglecompoetroite", "reglecomposeenalinea", "reglecouleur", "reglecouleurs", "regledactylo", "regledansmarge", "regledemarrestoppe", "regledescriptions", "regledisposition", "regleecraninteraction", "regleecrans", "regleelements", "regleencadre", "regleentete", "regleenumerations", "regleenvironnementpolicecorps", "regleepaisseurligne", "regleespaceblanc", "regleespacement", "regleespacementinterligne", "reglefiguresexternes", "regleflottant", "regleflottants", "regleformulaires", "regleformules", "reglegroupeselements", "regleinf", "regleinteraction", "regleintitule", "regleintitules", "reglejeucolonne", "reglejeusymboles", "reglelabeltexte", "reglelangue", "reglelegende", "reglelignes", "reglelignesnoires", "reglelignesreglestexte", "regleliste", "reglelisteimbriquee", "reglelistereference", "reglemakeup", "reglemargereglee", "reglemarquage", "reglemarquagehyphenation", "reglemenuinteraction", "reglenotepdp", "reglenumero", "reglenumeropage", "reglenumerotation", "reglenumerotationligne", "reglenumerotationpage", "reglenumerotationparagraphe", "reglenumerotete", "regleoriente", "reglepalette", "reglepapier", "regleparagraphes", "reglepdp", "regleplacementopposition", "reglepolicecorps", "reglepositionnement", "reglepositiontexte", "regleprofils", "regleprogrammes", "reglepublications", "reglereferencage", "regleregistre", "regleregle", "regleremplitligne", "regleremplitlignesreglees", "reglesection", "regleseparationflottant", "reglesortie", "reglesouslignage", "reglesousnumeropage", "reglestrut", "reglesup", "reglesynchronisation", "reglesynonymes", "reglesysteme", "regletab", "regletableaux", "regletabulation", "regletaillepapier", "regletete", "regletetes", "regletexte", "regletextesentete", "regletextesinf", "regletextespdp", "regletextessup", "regletextestexte", "regletextetete", "regletolerance", "regletraitsfins", "regletransitionspage", "regletri", "regletype", "regleurl", "reglevariabletexte", "regleversions", "remplitchamp", "remplitligne", "remplitlignesreglees", "remplittexte", "reservefloat", "reset", "resetmarking", "resettextcontent", "retourarriere", "rightaligned", "romannumerals", "rotate", "sansalinea", "sansdimension", "sansespace", "sansespaceblanc", "sanslignesenteteetpdp", "sanslignessupetinf", "sansliste", "sansmarquage", "sanstest", "sauteblocs", "scale", "screen", "section", "seeregister", "selectblocks", "selectionneblocs", "selectionnepapier", "selectionneversion", "selectpaper", "selectversion", "sensunite", "separeflottant", "settext", "settextcontent", "setupalign", "setupanswerarea", "setuparranging", "setupbackground", "setupbackgrounds", "setupblackrules", "setupblank", "setupblock", "setupbodyfont", "setupbodyfontenvironment", "setupbottom", "setupbottomtexts", "setupbuffer", "setupbuttons", "setupcapitals", "setupcaption", "setupcaptions", "setupclipping", "setupcolor", "setupcolors", "setupcolumns", "setupcolumnsetlines", "setupcolumnsetstart", "setupcombinations", "setupcombinedlist", "setupcomment", "setupdescriptions", "setupenumerations", "setupexternalfigures", "setupfield", "setupfields", "setupfillinlines", "setupfillinrules", "setupfloat", "setupfloats", "setupfloatsplitting", "setupfonthandling", "setupfontsynonym", "setupfooter", "setupfootertexts", "setupfootnotedefinition", "setupfootnotes", "setupforms", "setupformulas", "setupframed", "setupframedtexts", "setuphead", "setupheader", "setupheadertexts", "setupheadnumber", "setupheads", "setupheadtext", "setuphyphenmark", "setupindentedtext", "setupindenting", "setupinmargin", "setupinteraction", "setupinteractionbar", "setupinteractionmenu", "setupinteractionscreen", "setupinterlinespace", "setupinterlinespace2", "setupitemgroup", "setupitems", "setuplabeltext", "setuplanguage", "setuplayout", "setuplegend", "setuplinenumbering", "setuplines", "setuplinewidth", "setuplist", "setuplistalternative", "setupmakeup", "setupmarginblocks", "setupmarginrules", "setupmarking", "setupmathalignment", "setupnarrower", "setupnumbering", "setupoppositeplacing", "setupoutput", "setuppagenumber", "setuppagenumbering", "setuppagetransitions", "setuppalet", "setuppaper", "setuppapersize", "setupparagraphnumbering", "setupparagraphs", "setupplacement", "setuppositioning", "setupprofiles", "setupprograms", "setuppublications", "setupquote", "setupreferencelist", "setupreferencing", "setupregister", "setuprotate", "setupscreens", "setupsection", "setupsectionblock", "setupsorting", "setupspacing", "setupstrut", "setupsubpagenumber", "setupsymbolset", "setupsynchronization", "setupsynchronizationbar", "setupsynonyms", "setupsystem", "setuptab", "setuptables", "setuptabulate", "setuptext", "setuptextrules", "setuptexttexts", "setupthinrules", "setuptolerance", "setuptop", "setuptoptexts", "setuptype", "setuptyping", "setupunderbar", "setupurl", "setupversions", "setupwhitespace", "showbodyfont", "showbodyfontenvironment", "showcolor", "showcolorgroup", "showexternalfigures", "showfields", "showframe", "showgrid", "showlayout", "showmakeup", "showpalet", "showprint", "showsetups", "showstruts", "showsymbolset", "someline", "somewhere", "sort", "sousnumeropage", "space", "splitfloat", "startalignment", "startbackground", "startbuffer", "startcolor", "startcolumnmakeup", "startcolumns", "startcombination", "startcomment", "startcomponent", "startdescription", "startdocument", "startenumeration", "startenvironment", "startfact", "startfigure", "startfloattext", "startformula", "startframedtext", "starthiding", "startinteractionmenu", "startitemgroup", "startlegend", "startline", "startlinecorrection", "startlinenumbering", "startlines", "startlocal", "startlocalenvironment", "startlocalfootnotes", "startmakeup", "startmarginblock", "startmarginrule", "startnamemakeup", "startnarrower", "startopposite", "startoverlay", "startoverview", "startpacked", "startparagraph", "startpositioning", "startpostponing", "startproduct", "startprofile", "startproject", "startquotation", "startregister", "startsymbolset", "startsynchronization", "starttable", "starttables", "starttabulate", "starttextrule", "starttyping", "startunpacked", "startversion", "stopalignment", "stopbackground", "stopbuffer", "stopcolor", "stopcolumnmakeup", "stopcolumns", "stopcombination", "stopcomment", "stopcompoetroite", "stopcomponent", "stopdescription", "stopdocument", "stopenumeration", "stopenvironment", "stopfact", "stopfigure", "stopfloattext", "stopformula", "stopframedtext", "stophiding", "stopinteractionmenu", "stopitemgroup", "stoplegend", "stopline", "stoplinecorrection", "stoplinenumbering", "stoplines", "stoplocal", "stoplocalenvironment", "stoplocalfootnotes", "stopmakeup", "stopmarginblock", "stopmarginrule", "stopnamemakeup", "stopnarrower", "stopopposite", "stopoverlay", "stopoverview", "stoppacked", "stopparagraph", "stoppealignement", "stoppearriereplan", "stoppeblocmarge", "stoppecitation", "stoppecodage", "stoppecolonnes", "stoppecombinaison", "stoppecomposant", "stoppecorrectionligne", "stoppecouleur", "stoppedegroupe", "stoppedocument", "stoppeenvironement", "stoppeglobal", "stoppegroupe", "stoppejeucolonne", "stoppeligne", "stoppeligneregleetexte", "stoppelignes", "stoppelocal", "stoppemakeup", "stoppemargereglee", "stoppenotespdplocales", "stoppenumerotationligne", "stoppeopposition", "stoppepositionnement", "stoppeproduit", "stoppeprofil", "stoppeprojet", "stopperaster", "stopperevetement", "stoppesynchronisation", "stoppetableau", "stoppetableaux", "stoppetexte", "stoppeversion", "stoppevuedensemble", "stoppositioning", "stoppostponing", "stopproduct", "stopprofile", "stopproject", "stopquotation", "stopsymbolset", "stopsynchronization", "stoptable", "stoptables", "stoptabulate", "stoptextrule", "stoptyping", "stopunpacked", "stopversion", "stretched", "sub", "subject", "subsection", "subsubject", "subsubsection", "subsubsubject", "suggestion", "suivantprofil", "suivantversion", "suivantversionprofil", "switchtobodyfont", "sym", "symbol", "symbole", "synchronise", "synchronizationbar", "synchronize", "synonym", "tab", "tapebuffer", "testcolumn", "testpage", "tete", "tex", "textemarge", "textenotepdp", "textetete", "textreference", "textrule", "thinrule", "thinrules", "title", "titremarge", "tooltip", "traduire", "traiteblocs", "traitepage", "traitfin", "traitsfins", "translate", "txt", "typ", "type", "typebuffer", "typefile", "underbar", "underbars", "uneligne", "useURL", "useXMLfilter", "useblocks", "usecommands", "usedirectory", "useencoding", "useexternaldocument", "useexternalfigure", "useexternalfile", "useexternalfiles", "useexternalsoundtrack", "usemodule", "usereferences", "usespecials", "usesymbols", "usetypescript", "usetypescriptfile", "utiliseJSscripts", "utiliseURL", "utiliseblocs", "utilisechemin", "utilisecommandes", "utilisedocumentexterne", "utiliseencodage", "utilisefichierexterne", "utilisefichiersexternes", "utilisefigureexterne", "utilisemodule", "utilisemodules", "utilisepsiteaudioexterne", "utilisereferences", "utilisespecialites", "utilisesymboles", "utiliseurl", "va", "vaalaboite", "vaalapage", "vaenbas", "valeurcouleur", "valeurgris", "variabletexte", "version", "vide", "vl", "weekday", "whitespace", "wordright", "writebetweenlist", "writetolist", "writetoreferencelist", "writetoregister" },
+ ["it"]={ "CAP", "Cap", "Caps", "Character", "Characters", "GIORNOSETTIMANA", "Lettera", "Lettere", "MESE", "MONTH", "Numeri", "Numeriromani", "PAROLA", "PAROLE", "Parola", "Parole", "Romannumerals", "WEEKDAY", "WORD", "WORDS", "Word", "Words", "about", "accoppiacarta", "accoppiadocumento", "accoppiamarcatura", "accoppiapagina", "accoppiaregistro", "adaptlayout", "adattacampo", "adattalayout", "al", "allineacentro", "allineadestra", "allineasinistra", "altezzacarta", "altezzacartastampa", "altezzacima", "altezzaelenco", "altezzafondo", "altezzaintestazione", "altezzamakeup", "altezzapdp", "altezzatesto", "ambiente", "ampiezzabordo", "ampiezzabordodestro", "ampiezzabordosinistro", "ampiezzacarta", "ampiezzacartastampa", "ampiezzaelenco", "ampiezzamakeup", "ampiezzamargine", "ampiezzamarginedestro", "ampiezzamarginesinistro", "ampiezzatesto", "ap", "apagina", "appendix", "arg", "at", "atleftmargin", "atpage", "atrightmargin", "background", "barracolori", "barrainterazione", "barrasincronizzazione", "bastablocchi", "bastafile", "blackrule", "blackrules", "blank", "bookmark", "but", "button", "bypassblocks", "cambiaafontdeltesto", "campi", "camporiempimento", "cap", "capello", "chapter", "character", "characters", "chem", "chim", "circondato", "citazione", "clip", "clonacampo", "clonefield", "colonna", "color", "colore", "coloregrigio", "column", "comment", "commento", "comparecolorgroup", "comparepalet", "completecombinedlist", "completelistoffloats", "completelistofsorts", "completelistofsynonyms", "completeregister", "componenet", "confrontagruppocolori", "confrontatavolozza", "convertinumero", "convertnumber", "copiacampo", "copyfield", "correctwhitespace", "correggispaziobianco", "coupledocument", "coupledregister", "couplemarking", "couplepage", "couplepaper", "coupleregister", "crlf", "currentdate", "currentheadnumber", "cutspace", "da", "daqualcheparte", "data", "datadioggi", "datareferral", "date", "decouplemarking", "decrementnumber", "define", "defineblank", "defineblock", "definebodyfont", "definebodyfontDEF", "definebodyfontREF", "definebodyfontenvironment", "definebuffer", "definecolor", "definecolorgroup", "definecolumnbreak", "definecombination", "definecombinedlist", "defineconversion", "definedescription", "definedfont", "defineenumeration", "definefield", "definefieldstack", "definefiguresymbol", "definefloat", "definefont", "definefontfeature", "definefonthandling", "definefontsynonym", "defineframed", "defineframedtext", "definehead", "defineindentedtext", "defineinmargin", "defineinteractionmenu", "defineitemgroup", "definelabel", "definelayer", "definelist", "definelogo", "definemakeup", "definemarking", "definemathalignment", "defineoutput", "defineoverlay", "definepagebreak", "definepalet", "definepapersize", "defineparagraphs", "defineplacement", "defineprofile", "defineprogram", "definereference", "definereferenceformat", "definereferencelist", "defineregister", "definesection", "definesectionblock", "definesorting", "definestartstop", "definesubfield", "definesymbol", "definesynonyms", "definetabletemplate", "definetabulate", "definetext", "definetype", "definetypeface", "definetyping", "defineversion", "definisci", "definisciaccento", "definisciambientefontdeltesto", "definisciblocco", "definiscibloccosezione", "definiscibuffer", "definiscicampo", "definiscicampoprincipale", "definiscicapoversi", "definiscicarattere", "definiscicolore", "definiscicomando", "definisciconversione", "definiscidescrizione", "definiscidimensionicarta", "definiscielenco", "definiscielencocombinato", "definiscienumerazione", "definiscietichetta", "definiscifigurasimbolo", "definiscifont", "definiscifontdeltesto", "definiscifontgrezzo", "definisciformatoriferimento", "definiscigruppocolonne", "definiscigruppocolori", "definiscihbox", "definisciincorniciato", "definisciiniziatermina", "definiscilayout", "definiscilinea", "definiscilistariferimenti", "definiscilogo", "definiscimakeup", "definiscimarcatura", "definiscimenuinterazione", "definiscimodellotabella", "definiscioggettomobile", "definisciordinamento", "definiscioutput", "definisciposizionetesto", "definisciprofilo", "definisciprogramma", "definisciregistro", "definisciriferimento", "definiscirigovuoto", "definiscisezione", "definiscisimbolo", "definiscisinonimi", "definiscisinonimofont", "definiscisottocampo", "definiscisovrapposizione", "definiscistackcampi", "definiscistile", "definiscistilefont", "definiscitabulato", "definiscitavolozza", "definiscitesta", "definiscitesto", "definiscitestoincorniciato", "definiscitype", "definiscityping", "definiscivariabiletesto", "definisciversion", "description", "determinacaratteristicheregistro", "determinacarattersticheelenco", "determinanumerotesta", "determineheadnumber", "determinelistcharacteristics", "dimensione", "disabilitamenuinterazione", "disableinteractionmenu", "distanzabordo", "distanzabordodestro", "distanzabordosinistro", "distanzacima", "distanzafondo", "distanzaintestazione", "distanzamargine", "distanzamarginedestro", "distanzamarginesinistro", "distanzapdp", "domicilio", "el", "elaborablocchi", "elaborapagina", "elementi", "elemento", "emptylines", "enumeration", "etichetta", "etichette", "externalfigure", "fatto", "field", "fieldstack", "figuraesterna", "fillinfield", "fillinline", "fillinrules", "fillintext", "fitfield", "fixedspaces", "followprofile", "followprofileversion", "followversion", "fondo", "footnote", "footnotetext", "forceblocks", "forzablocchi", "fraction", "framed", "framedtext", "frazione", "from", "getbuffer", "getmarking", "getnumber", "giornosettimana", "godown", "goto", "gotobox", "graycolor", "grid", "griglia", "hairline", "head", "headnumber", "headsym", "headtext", "hideblocks", "high", "hl", "ignoto", "immediatebetweenlist", "immediatetolist", "impaccato", "impostaallineamento", "impostaambientefontdeltesto", "impostaampiezzariga", "impostabarrainterazione", "impostabarrasincronizzazione", "impostablocchimargine", "impostablocco", "impostabloccosezione", "impostabuffer", "impostacampi", "impostacampo", "impostacapoversi", "impostacaption", "impostacaptions", "impostacima", "impostaclippling", "impostacolonne", "impostacolore", "impostacolori", "impostacombinazioni", "impostacommento", "impostacommentopagina", "impostadefinizionenotepdp", "impostadescrizioni", "impostadimensionicarta", "impostaelementi", "impostaelencazioni", "impostaelenco", "impostaelencocombinato", "impostaenumerazioni", "impostafigureesterne", "impostafondo", "impostafontdeltesto", "impostaforms", "impostaformule", "impostagruppocolonne", "impostaincorniciato", "impostainiziatermina", "impostainmargine", "impostainstestazione", "impostainterazione", "impostainterlinea", "impostalayout", "impostalegenda", "impostalinea", "impostalineemargine", "impostalineenere", "impostalineeriempimento", "impostalineesottili", "impostalineetesto", "impostalingua", "impostalistariferimenti", "impostamaiuscole", "impostamakeup", "impostamarcatura", "impostamenuinterazione", "impostamenzione", "impostanotepdp", "impostanumerazione", "impostanumerazionecapoversi", "impostanumerazionepagina", "impostanumerazionerighe", "impostanumeropagina", "impostanumerosottopagina", "impostanumerotesta", "impostaoggettimobili", "impostaoggettomobile", "impostaordinamento", "impostaoutput", "impostaparranging", "impostapdp", "impostapiustretto", "impostaposizionamento", "impostaposizionamentoopposti", "impostaposizionetesto", "impostaprofili", "impostaprogrammi", "impostapubblicazioni", "impostapulsanti", "impostaregistro", "impostarientro", "impostariferimento", "impostarighe", "impostarigheriempimento", "impostarigovuoto", "impostarotazione", "impostaschermi", "impostaschermointerazione", "impostasegnosillabazione", "impostasetsimboli", "impostasezione", "impostasfondi", "impostasfondo", "impostasincronizzazione", "impostasinonimi", "impostasistema", "impostasottolinea", "impostaspaziatura", "impostaspaziobianco", "impostaspezzamentooggettomobile", "impostastrut", "impostatab", "impostatabelle", "impostatabulato", "impostatavolozza", "impostatesta", "impostateste", "impostatesticima", "impostatestifondo", "impostatestiincorniciati", "impostatestiintestazioni", "impostatestipdp", "impostatesto", "impostatestoetichette", "impostatestointestazioni", "impostatestotesti", "impostatolleranza", "impostatransizionepagina", "impostatype", "impostatyping", "impostaurl", "impostavariabiletesto", "impostaversioni", "in", "inaltromargine", "incorniciato", "incrementanumero", "indentation", "indenting", "indestra", "inframed", "ininner", "iniziaallineamento", "iniziaambiente", "iniziabloccomargine", "iniziacitazione", "iniziacodifica", "iniziacolonne", "iniziacolore", "iniziacombinazione", "iniziacomponente", "iniziacorrezioneriga", "iniziadocumento", "iniziafigura", "iniziaglobale", "iniziagruppocolonne", "iniziaimpaccato", "inizialineamargine", "inizialineatesto", "inizialocale", "iniziamakeup", "inizianotepdplocali", "inizianumerazionerighe", "iniziaopposto", "iniziaoverview", "iniziapiustretto", "iniziaposizionamento", "iniziaprodotto", "iniziaprofilo", "iniziaprogetto", "iniziaraster", "iniziariga", "iniziarighe", "iniziasetsimboli", "iniziasfondo", "iniziasincronizzazione", "iniziasovrapposizione", "iniziatabella", "iniziatabelle", "iniziatesto", "iniziaunpacked", "iniziaversione", "inlatodestro", "inlatosinistro", "inleft", "inline", "inmaframed", "inmargin", "inmargine", "inmarginedestro", "inmarginesinistro", "inneredgedistance", "inneredgewidth", "innermargindistance", "innermarginwidth", "inothermargin", "inouter", "inriga", "inright", "insinistra", "installalingua", "installlanguage", "interactionbar", "interactionbuttons", "intorno", "item", "items", "its", "keepblocks", "labeling", "labels", "labeltext", "language", "leftaligned", "leg", "lettera", "lettere", "lineamargine", "lineanera", "lineasottile", "lineatesto", "lineenere", "lineeriempimento", "lineesottili", "lingua", "linguaprincipale", "listsymbol", "livellotesta", "loadsorts", "loadsynonyms", "logcampi", "logfields", "lohi", "low", "lunghezzaelenco", "maframed", "mainlanguage", "mapfontsize", "mar", "marcatura", "marcaversione", "marginrule", "margintext", "marking", "markversion", "matematica", "mathematics", "mediaeval", "menuinterattivo", "menzione", "mese", "mettielenco", "mettielencocombinato", "mettifiancoafianco", "mettiformula", "mettiingriglia", "mettilegenda", "mettilinea", "mettiloghi", "mettinotepdp", "mettinotepdplocali", "mettinumeropagina", "mettiregistro", "mettisegnalibro", "mettisottoformula", "mettiunosullaltro", "mettivariabiletesto", "midaligned", "mirror", "month", "mostraambientefontdeltesto", "mostracampi", "mostracolore", "mostracornice", "mostrafiguresterne", "mostrafontdeltesto", "mostragriglia", "mostragruppocolori", "mostraimpostazioni", "mostralyout", "mostramakeup", "mostrasetsimboli", "mostrastampa", "mostrastruts", "mostratavolozza", "moveongrid", "movesidefloat", "name", "nascondiblocchi", "navigating", "nextsection", "nientedimensioni", "nienteelenco", "nientelineecimafondo", "nientelineintestazionepdp", "nientemarcatura", "nienterientro", "nientespazio", "nientespaziobianco", "nocap", "noheaderandfooterlines", "noindenting", "nolist", "nomarking", "nome", "nomeunita", "nomoreblocks", "nomorefiles", "nop", "nospace", "nota", "notapdp", "note", "notest", "notopandbottomlines", "nowhitespace", "numberofsubpages", "numbers", "numeri", "numeriromani", "numeroformula", "numeropagina", "numeropaginacompleto", "numerosottoformula", "numerotesta", "numerotestacorrente", "numerototaledipagine", "outeredgedistance", "outeredgewidth", "outermargindistance", "outermarginwidth", "overbar", "overbars", "overstrike", "overstrikes", "packed", "page", "pagedepth", "pageoffset", "pagereference", "pagina", "paragraph", "paroladestra", "parolainmargine", "part", "passaafontgrezzo", "ped", "pedap", "periods", "perlungo", "placebookmarks", "placecombinedlist", "placefloat", "placefootnotes", "placeformula", "placelegend", "placelist", "placelistoffloats", "placelistofsorts", "placelistofsynonyms", "placelocalfootnotes", "placelogos", "placeongrid", "placeontopofeachother", "placerawlist", "placereferencelist", "placeregister", "placesidebyside", "placesubformula", "position", "posizionanumerotesta", "posizionatesto", "posizionatestotesta", "posizione", "prendibuffer", "prendimarcatura", "processblocks", "processpage", "prodotto", "progetto", "program", "programma", "pubblicazione", "publication", "pulsante", "pulsantemenu", "pulsantinterazione", "punti", "qualcheriga", "quotation", "quote", "ran", "ref", "reference", "referral", "referraldate", "referring", "register", "reimposta", "reimpostamarcatura", "reservefloat", "reset", "resetmarking", "resetnumber", "resettextcontent", "rientro", "rif", "rifai", "riferimento", "riferimentopagina", "riferimentotesto", "riflessione", "rigariempimento", "rightaligned", "rigovuoto", "romannumerals", "rotate", "ruota", "saltablocchi", "scala", "scale", "schermo", "screen", "scrividentroelenco", "scriviinelenco", "scriviinlistariferimenti", "scriviinregistro", "section", "seeregister", "segnalibro", "seguiprofilo", "seguiversione", "seguiversioneprofilo", "selectblocks", "selectpaper", "selectversion", "selezionablocchi", "selezionacarta", "selezionaversione", "separamarcatura", "setnumber", "settext", "settextcontent", "setupalign", "setupanswerarea", "setuparranging", "setupbackground", "setupbackgrounds", "setupblackrules", "setupblank", "setupblock", "setupbodyfont", "setupbodyfontenvironment", "setupbottom", "setupbottomtexts", "setupbuffer", "setupbuttons", "setupcapitals", "setupcaption", "setupcaptions", "setupclipping", "setupcolor", "setupcolors", "setupcolumns", "setupcolumnsetlines", "setupcolumnsetstart", "setupcombinations", "setupcombinedlist", "setupcomment", "setupdescriptions", "setupenumerations", "setupexternalfigures", "setupfield", "setupfields", "setupfillinlines", "setupfillinrules", "setupfloat", "setupfloats", "setupfloatsplitting", "setupfonthandling", "setupfontsynonym", "setupfooter", "setupfootertexts", "setupfootnotedefinition", "setupfootnotes", "setupforms", "setupformulas", "setupframed", "setupframedtexts", "setuphead", "setupheader", "setupheadertexts", "setupheadnumber", "setupheads", "setupheadtext", "setuphyphenmark", "setupindentedtext", "setupindenting", "setupinmargin", "setupinteraction", "setupinteractionbar", "setupinteractionmenu", "setupinteractionscreen", "setupinterlinespace", "setupinterlinespace2", "setupitemgroup", "setupitems", "setuplabeltext", "setuplanguage", "setuplayout", "setuplegend", "setuplinenumbering", "setuplines", "setuplinewidth", "setuplist", "setuplistalternative", "setupmakeup", "setupmarginblocks", "setupmarginrules", "setupmarking", "setupmathalignment", "setupnarrower", "setupnumbering", "setupoppositeplacing", "setupoutput", "setuppagenumber", "setuppagenumbering", "setuppagetransitions", "setuppalet", "setuppaper", "setuppapersize", "setupparagraphnumbering", "setupparagraphs", "setupplacement", "setuppositioning", "setupprofiles", "setupprograms", "setuppublications", "setupquote", "setupreferencelist", "setupreferencing", "setupregister", "setuprotate", "setupscreens", "setupsection", "setupsectionblock", "setupsorting", "setupspacing", "setupstrut", "setupsubpagenumber", "setupsymbolset", "setupsynchronization", "setupsynchronizationbar", "setupsynonyms", "setupsystem", "setuptab", "setuptables", "setuptabulate", "setuptext", "setuptextrules", "setuptexttexts", "setupthinrules", "setuptolerance", "setuptop", "setuptoptexts", "setuptype", "setuptyping", "setupunderbar", "setupurl", "setupversions", "setupwhitespace", "setvariabiletesto", "sfondo", "showbodyfont", "showbodyfontenvironment", "showcolor", "showcolorgroup", "showexternalfigures", "showfields", "showframe", "showgrid", "showlayout", "showmakeup", "showpalet", "showprint", "showsetups", "showstruts", "showsymbolset", "sim", "simbolo", "sincronizza", "someline", "somewhere", "sort", "space", "spazifissi", "spazio", "spaziobianco", "spaziocima", "spaziodietro", "spaziofisso", "spaziofondo", "spessoreriga", "spezzaoggettomobile", "splitfloat", "spostaagriglia", "spostaformula", "stackcampi", "startalignment", "startbackground", "startbuffer", "startcolor", "startcolumnmakeup", "startcolumns", "startcombination", "startcomment", "startcomponent", "startdescription", "startdocument", "startenumeration", "startenvironment", "startfact", "startfigure", "startfloattext", "startformula", "startframedtext", "starthiding", "startinteractionmenu", "startitemgroup", "startlegend", "startline", "startlinecorrection", "startlinenumbering", "startlines", "startlocal", "startlocalenvironment", "startlocalfootnotes", "startmakeup", "startmarginblock", "startmarginrule", "startnamemakeup", "startnarrower", "startopposite", "startoverlay", "startoverview", "startpacked", "startparagraph", "startpositioning", "startpostponing", "startproduct", "startprofile", "startproject", "startquotation", "startregister", "startsymbolset", "startsynchronization", "starttable", "starttables", "starttabulate", "starttextrule", "starttyping", "startunpacked", "startversion", "stirato", "stopalignment", "stopbackground", "stopbuffer", "stopcolor", "stopcolumnmakeup", "stopcolumns", "stopcombination", "stopcomment", "stopcomponent", "stopdescription", "stopdocument", "stopenumeration", "stopenvironment", "stopfact", "stopfigure", "stopfloattext", "stopformula", "stopframedtext", "stophiding", "stopinteractionmenu", "stopitemgroup", "stoplegend", "stopline", "stoplinecorrection", "stoplinenumbering", "stoplines", "stoplocal", "stoplocalenvironment", "stoplocalfootnotes", "stopmakeup", "stopmarginblock", "stopmarginrule", "stopnamemakeup", "stopnarrower", "stopopposite", "stopoverlay", "stopoverview", "stoppacked", "stopparagraph", "stoppositioning", "stoppostponing", "stopproduct", "stopprofile", "stopproject", "stopquotation", "stopsymbolset", "stopsynchronization", "stoptable", "stoptables", "stoptabulate", "stoptextrule", "stoptyping", "stopunpacked", "stopversion", "stretched", "sub", "subject", "subpagenumber", "subsection", "subsubject", "subsubsection", "subsubsubject", "switchtobodyfont", "sym", "symbol", "synchronizationbar", "synchronize", "synonym", "tab", "terminaallineamento", "terminaambiente", "terminabloccomargine", "terminacitazione", "terminacodifica", "terminacolonne", "terminacolore", "terminacombinazione", "terminacomponente", "terminacorrezioneriga", "terminadocumento", "terminaglobale", "terminagruppocolonne", "terminaimpaccato", "terminalineamargine", "terminalineatesto", "terminalocale", "terminamakeup", "terminanotepdplocali", "terminanumerazionerighe", "terminaopposto", "terminaoverview", "terminapiustretto", "terminaposizionamento", "terminaprodotto", "terminaprofili", "terminaprogetto", "terminaraster", "terminariga", "terminarighe", "terminasfondo", "terminasincronizzazione", "terminasovrapposizione", "terminatabella", "terminatabelle", "terminatesto", "terminaunpacked", "terminaversioni", "testa", "testcolumn", "testoetichetta", "testoinmargine", "testoinstestazioni", "testonotapdp", "testoriempimento", "testpage", "tex", "textreference", "textrule", "thinrule", "thinrules", "tieniblocchi", "title", "titoloinmargine", "tooltip", "traduci", "translate", "txt", "typ", "type", "typebuffer", "typefile", "underbar", "underbars", "usaJSscripts", "usaURL", "usablocco", "usacartella", "usacodifica", "usacolonnasonoraesterna", "usacomandi", "usadocumentoesterno", "usafiguraesterna", "usafileesterni", "usafileesterno", "usamoduli", "usamodulo", "usariferimenti", "usasimboli", "usaspecialita", "usaurl", "useURL", "useXMLfilter", "useblocks", "usecommands", "usedirectory", "useencoding", "useexternaldocument", "useexternalfigure", "useexternalfile", "useexternalfiles", "useexternalsoundtrack", "usemodule", "usereferences", "usespecials", "usesymbols", "usetypescript", "usetypescriptfile", "vaia", "vaiabox", "vaiapagina", "vaigiu", "valorecolore", "valoregrigio", "variabiletesto", "version", "versione", "vl", "weekday", "whitespace", "wordright", "writebetweenlist", "writetolist", "writetoreferencelist", "writetoregister" },
+ ["nl"]={ "CAP", "Cap", "Caps", "Character", "Characters", "Cijfers", "KAP", "Kap", "Kaps", "Letter", "Letters", "MAAND", "MONTH", "Romannumerals", "Romeins", "WEEKDAG", "WEEKDAY", "WOORD", "WOORDEN", "WORD", "WORDS", "Woord", "Woorden", "Word", "Words", "aantalsubpaginas", "about", "achtergrond", "adaptlayout", "appendix", "arg", "at", "atpage", "background", "bepaalkopnummer", "bepaallijstkenmerken", "bepaalregisterkenmerken", "betekenis", "binnenmargeafstand", "binnenmargebreedte", "binnenrandafstand", "binnenrandbreedte", "blackrule", "blackrules", "blank", "blanko", "blokje", "blokjes", "blokkeerinteractiemenu", "bodemwit", "bookmark", "bovenafstand", "bovenhoogte", "breuk", "buitenmargeafstand", "buitenmargebreedte", "buitenrandafstand", "buitenrandbreedte", "but", "button", "bypassblocks", "cap", "chapter", "character", "characters", "chem", "cijfers", "citaat", "citeer", "clip", "clonefield", "color", "column", "comment", "comparecolorgroup", "comparepalet", "completecombinedlist", "completelistoffloats", "completelistofsorts", "completelistofsynonyms", "completeregister", "converteernummer", "convertnumber", "copieerveld", "copyfield", "correctwhitespace", "corrigeerwitruimte", "coupledocument", "coupledregister", "couplemarking", "couplepage", "couplepaper", "coupleregister", "crlf", "currentdate", "currentheadnumber", "date", "datum", "decouplemarking", "define", "defineblank", "defineblock", "definebodyfont", "definebodyfontDEF", "definebodyfontREF", "definebodyfontenvironment", "definebuffer", "definecolor", "definecolorgroup", "definecombinedlist", "defineconversion", "definedescription", "definedfont", "defineenumeration", "definefield", "definefieldstack", "definefiguresymbol", "definefloat", "definefont", "definefontfeature", "definefonthandling", "definefontsynonym", "defineframed", "defineframedtext", "definehead", "defineindentedtext", "defineinteractionmenu", "definelabel", "definelist", "definelogo", "definemakeup", "definemarking", "defineoutput", "defineoverlay", "definepalet", "definepapersize", "defineparagraphs", "defineprofile", "defineprogram", "definerawfont", "definereference", "definereferenceformat", "definereferencelist", "defineregister", "definesection", "definesectionblock", "definesorting", "definestartstop", "definesubfield", "definesymbol", "definesynonyms", "definetabletemplate", "definetabulate", "definetext", "definetype", "definetypeface", "definetyping", "defineversion", "definieer", "definieeraccent", "definieeralineas", "definieerbeeldmerk", "definieerblanko", "definieerblok", "definieerbuffer", "definieercombinatie", "definieercommando", "definieerconversie", "definieerfiguursymbool", "definieerfont", "definieerfontstijl", "definieerfontsynoniem", "definieerhbox", "definieerhoofdveld", "definieeringesprongentext", "definieerinmarge", "definieerinteractiemenu", "definieeritemgroep", "definieerkadertekst", "definieerkarakter", "definieerkleur", "definieerkleurgroep", "definieerkolomgroep", "definieerkolomovergang", "definieerkop", "definieerkorps", "definieerkorpsomgeving", "definieerlayer", "definieerlayout", "definieerletter", "definieerlijn", "definieerlijst", "definieermarkering", "definieeromlijnd", "definieeropmaak", "definieeroverlay", "definieerpaginaovergang", "definieerpalet", "definieerpapierformaat", "definieerplaats", "definieerplaatsblok", "definieerprofiel", "definieerprogramma", "definieerreferentie", "definieerreferentieformaat", "definieerreferentielijst", "definieerregister", "definieersamengesteldelijst", "definieersectie", "definieersectieblok", "definieersorteren", "definieerstartstop", "definieersubveld", "definieersymbool", "definieersynoniemen", "definieertabelvorm", "definieertabulatie", "definieertekst", "definieertekstachtergrond", "definieertekstpositie", "definieertekstvariabele", "definieertype", "definieertypen", "definieeruitvoer", "definieerveld", "definieerveldstapel", "definieerversie", "definieerwiskundeuitlijnen", "description", "determineheadnumber", "determinelistcharacteristics", "dimensie", "directnaarlijst", "directtussenlijst", "disableinteractionmenu", "doordefinieren", "doorlabelen", "doornummeren", "dunnelijn", "dunnelijnen", "eenregel", "emptylines", "enumeration", "ergens", "externalfigure", "externfiguur", "field", "fieldstack", "fillinfield", "fillinline", "fillinrules", "fillintext", "fitfield", "fixedspaces", "followprofile", "followprofileversion", "followversion", "footnote", "footnotetext", "forceblocks", "forceerblokken", "formulenummer", "fraction", "framed", "framedtext", "from", "gebruikJSscripts", "gebruikURL", "gebruikXMLfilter", "gebruikblokken", "gebruikcommandos", "gebruikexterndocument", "gebruikexternefile", "gebruikexternefiles", "gebruikexternfiguur", "gebruikexterngeluidsfragment", "gebruikgebied", "gebruikmodule", "gebruikmodules", "gebruikreferenties", "gebruikspecials", "gebruiksymbolen", "gebruiktypescript", "gebruiktypescriptfile", "gebruikurl", "geenblokkenmeer", "geenbovenenonderregels", "geendimensie", "geenfilesmeer", "geenhoofdenvoetregels", "geenlijst", "geenmarkering", "geenspatie", "geentest", "geenwitruimte", "geg", "getbuffer", "getmarking", "godown", "goto", "gotobox", "graycolor", "grid", "grijskleur", "grijswaarde", "haalbuffer", "haalmarkering", "haalnummer", "haarlijn", "hairline", "handhaafblokken", "head", "headnumber", "headtext", "herhaal", "hideblocks", "high", "hl", "hoofdafstand", "hoofdhoogte", "hoofdtaal", "hoog", "huidigedatum", "huidigekopnummer", "in", "inanderemarge", "inbinnen", "inbuiten", "indentation", "indenting", "inframed", "ininner", "inleft", "inlijnd", "inline", "inlinker", "inlinkermarge", "inlinkerrand", "inmarge", "inmargin", "inothermargin", "inouter", "inrechter", "inrechtermarge", "inrechterrand", "inregel", "inright", "inspringen", "installeertaal", "installlanguage", "interactiebalk", "interactiebuttons", "interactiemenu", "interactionbar", "interactionbuttons", "invullijnen", "invulregel", "invultekst", "invulveld", "inwilijnd", "item", "items", "its", "kantlijn", "kap", "keepblocks", "kenmerk", "kenmerkdatum", "kentekstvariabeletoe", "kleur", "kleurenbalk", "kleurwaarde", "kloonveld", "kolom", "kop", "kopniveau", "kopnummer", "koppeldocument", "koppelmarkering", "koppelpagina", "koppelpapier", "koppelregister", "kopsym", "koptekst", "kopwit", "laag", "label", "labeling", "labels", "labeltekst", "labeltext", "laho", "language", "leftaligned", "leg", "legeregels", "letter", "letters", "lijndikte", "lijstbreedte", "lijsthoogte", "lijstlengte", "lijstsymbool", "linkermargeafstand", "linkermargebreedte", "linkerrandafstand", "linkerrandbreedte", "listsymbol", "loadsorts", "loadsynonyms", "logfields", "lohi", "low", "maand", "mainlanguage", "mapfontsize", "mar", "margeafstand", "margebreedte", "margetekst", "margetitel", "margewoord", "marginrule", "margintext", "markeer", "markeerversie", "marking", "markversion", "mathematics", "mediaeval", "menubutton", "midaligned", "mirror", "month", "moveongrid", "naam", "naar", "naarbox", "naarpagina", "name", "navigerend", "nextsection", "nietinspringen", "nocap", "noheaderandfooterlines", "noindenting", "nokap", "nolist", "nomarking", "nomoreblocks", "nomorefiles", "noot", "nop", "nospace", "note", "notopandbottomlines", "nowhitespace", "numbers", "omgeving", "omlaag", "omlijnd", "onbekend", "onderafstand", "onderdeel", "onderhoogte", "ontkoppelmarkering", "op", "opelkaar", "oplinkermarge", "oppagina", "oprechtermarge", "overbar", "overbars", "overstrike", "overstrikes", "packed", "page", "pagereference", "pagina", "paginadiepte", "paginanummer", "paginaoffset", "paginareferentie", "papierbreedte", "papierhoogte", "paragraph", "part", "paslayoutaan", "passeerblokken", "passendveld", "periods", "plaatsbeeldmerken", "plaatsbookmarks", "plaatsformule", "plaatskopnummer", "plaatskoptekst", "plaatslegenda", "plaatslijn", "plaatslijst", "plaatslijstmetsynoniemen", "plaatslokalevoetnoten", "plaatsnaastelkaar", "plaatsonderelkaar", "plaatsopgrid", "plaatspaginanummer", "plaatsplaatsblok", "plaatsreferentielijst", "plaatsregister", "plaatsruwelijst", "plaatssamengesteldelijst", "plaatssubformule", "plaatstekstvariabele", "plaatsvoetnoten", "placebookmarks", "placecombinedlist", "placefloat", "placefootnotes", "placeformula", "placelegend", "placelist", "placelistoffloats", "placelistofsorts", "placelistofsynonyms", "placelocalfootnotes", "placelogos", "placeongrid", "placeontopofeachother", "placereferencelist", "placeregister", "placesidebyside", "placesubformula", "position", "positioneer", "positioneertekst", "printpapierbreedte", "printpapierhoogte", "processblocks", "processpage", "produkt", "program", "programma", "projekt", "publicatie", "publication", "punten", "quotation", "quote", "ran", "randafstand", "randbreedte", "rechtermargeafstand", "rechtermargebreedte", "rechterrandafstand", "rechterrandbreedte", "ref", "refereer", "reference", "referentie", "referral", "referraldate", "regellinks", "regelmidden", "regelrechts", "register", "registreervelden", "reservefloat", "reset", "resetmarkering", "resetmarking", "resetnummer", "resettekstinhoud", "resettextcontent", "rightaligned", "romannumerals", "romeins", "rooster", "rotate", "roteer", "rugwit", "scale", "schaal", "scherm", "schrijfnaarlijst", "schrijfnaarreferentielijst", "schrijfnaarregister", "schrijftussenlijst", "screen", "section", "seeregister", "selectblocks", "selecteerblokken", "selecteerpapier", "selecteerversie", "selectpaper", "selectversion", "setnummer", "settextcontent", "setupalign", "setuparranging", "setupbackground", "setupbackgrounds", "setupblackrules", "setupblank", "setupblock", "setupbodyfont", "setupbodyfontenvironment", "setupbottom", "setupbottomtexts", "setupbuffer", "setupbuttons", "setupcapitals", "setupcaption", "setupcaptions", "setupclipping", "setupcolor", "setupcolors", "setupcolumns", "setupcombinations", "setupcombinedlist", "setupcomment", "setupdescriptions", "setupenumerations", "setupexternalfigures", "setupfield", "setupfields", "setupfillinlines", "setupfillinrules", "setupfloat", "setupfloats", "setupfloatsplitting", "setupfonthandling", "setupfontsynonym", "setupfooter", "setupfootertexts", "setupfootnotedefinition", "setupfootnotes", "setupforms", "setupformulas", "setupframed", "setupframedtexts", "setuphead", "setupheader", "setupheadertexts", "setupheadnumber", "setupheads", "setupheadtext", "setuphyphenmark", "setupindentedtext", "setupindenting", "setupinmargin", "setupinteraction", "setupinteractionbar", "setupinteractionmenu", "setupinteractionscreen", "setupinterlinespace", "setupinterlinespace2", "setupitemgroup", "setupitems", "setuplabeltext", "setuplanguage", "setuplayout", "setuplegend", "setuplinenumbering", "setuplines", "setuplinewidth", "setuplist", "setuplistalternative", "setupmakeup", "setupmarginblocks", "setupmarginrules", "setupmarking", "setupnarrower", "setupnumbering", "setupoppositeplacing", "setupoutput", "setuppagenumber", "setuppagenumbering", "setuppagetransitions", "setuppalet", "setuppaper", "setuppapersize", "setupparagraphnumbering", "setupparagraphs", "setuppositioning", "setupprofiles", "setupprograms", "setuppublications", "setupquote", "setupreferencelist", "setupreferencing", "setupregister", "setuprotate", "setupscreens", "setupsection", "setupsectionblock", "setupsorting", "setupspacing", "setupstrut", "setupsubpagenumber", "setupsymbolset", "setupsynchronization", "setupsynchronizationbar", "setupsynonyms", "setupsystem", "setuptab", "setuptables", "setuptabulate", "setuptext", "setuptextrules", "setuptexttexts", "setupthinrules", "setuptolerance", "setuptop", "setuptoptexts", "setuptype", "setuptyping", "setupunderbar", "setupurl", "setupversions", "setupwhitespace", "showbodyfont", "showbodyfontenvironment", "showcolor", "showcolorgroup", "showexternalfigures", "showfields", "showframe", "showgrid", "showlayout", "showmakeup", "showpalet", "showprint", "showsetups", "showstruts", "showsymbolset", "snijwit", "som", "someline", "somewhere", "sort", "space", "spatie", "spiegel", "splitfloat", "splitsplaatsblok", "startachtergrond", "startalignment", "startbackground", "startbuffer", "startcitaat", "startcodering", "startcolor", "startcolumns", "startcombinatie", "startcombination", "startcomment", "startcomponent", "startdescription", "startdocument", "startenumeration", "startenvironment", "startfact", "startfigure", "startfiguur", "startfloattext", "startformula", "startframedtext", "startglobaal", "starthiding", "startinteractionmenu", "startitemgroup", "startkantlijn", "startkleur", "startkolomgroep", "startkolommen", "startkolomopmaak", "startlegend", "startline", "startlinecorrection", "startlinenumbering", "startlines", "startlocal", "startlocalenvironment", "startlocalfootnotes", "startlokaal", "startlokalevoetnoten", "startmakeup", "startmargeblok", "startmarginblock", "startmarginrule", "startnaast", "startnamemakeup", "startnarrower", "startomgeving", "startomlijnd", "startonderdeel", "startopelkaar", "startopmaak", "startopposite", "startoverlay", "startoverview", "startoverzicht", "startpacked", "startparagraph", "startpositioneren", "startpositioning", "startpostponing", "startproduct", "startprodukt", "startprofiel", "startprofile", "startproject", "startprojekt", "startquotation", "startraster", "startregel", "startregelcorrectie", "startregelnummeren", "startregels", "startregister", "startsmaller", "startsymbolset", "startsymboolset", "startsynchronisatie", "startsynchronization", "starttabel", "starttabellen", "starttable", "starttables", "starttabulate", "starttekst", "starttekstachtergrond", "starttekstlijn", "starttextrule", "starttyping", "startuitlijnen", "startunpacked", "startvanelkaar", "startversie", "startversion", "stelachtergrondenin", "stelachtergrondin", "stelalineasin", "stelantwoordgebiedin", "stelarrangerenin", "stelblankoin", "stelblokin", "stelblokjesin", "stelblokkopjein", "stelblokkopjesin", "stelbovenin", "stelboventekstenin", "stelbufferin", "stelbuttonsin", "stelciterenin", "stelclipin", "stelcombinatiesin", "stelcommentaarin", "steldoordefinierenin", "steldoornummerenin", "steldunnelijnenin", "stelexternefigurenin", "stelformulesin", "stelformulierenin", "stelhoofdin", "stelhoofdtekstenin", "stelingesprongentextin", "stelinmargein", "stelinspringenin", "stelinteractiebalkin", "stelinteractiein", "stelinteractiemenuin", "stelinteractieschermin", "stelinterliniein", "stelinvullijnenin", "stelinvulregelsin", "stelitemgroepin", "stelitemsin", "stelkadertekstenin", "stelkantlijnin", "stelkapitalenin", "stelkleurenin", "stelkleurin", "stelkolomgroepin", "stelkolomgroepregelsin", "stelkolomgroepstartin", "stelkolommenin", "stelkopin", "stelkopnummerin", "stelkoppeltekenin", "stelkoppenin", "stelkoptekstin", "stelkorpsin", "stelkorpsomgevingin", "stellabeltekstin", "stellayoutin", "stellegendain", "stellijndiktein", "stellijnin", "stellijstin", "stelmargeblokkenin", "stelmarkeringin", "stelnaastplaatsenin", "stelnummerenin", "stelnummerin", "stelomlijndin", "stelonderin", "stelonderstrepenin", "stelondertekstenin", "stelopmaakin", "stelopsommingenin", "stelpaginacommentaarin", "stelpaginanummerin", "stelpaginanummeringin", "stelpaginaovergangenin", "stelpaletin", "stelpapierformaatin", "stelpapierin", "stelparagraafnummerenin", "stelplaatsblokin", "stelplaatsblokkenin", "stelplaatsbloksplitsenin", "stelplaatsin", "stelpositionerenin", "stelprofielenin", "stelprogrammasin", "stelpublicatiesin", "stelrastersin", "stelreferentielijstin", "stelrefererenin", "stelregelnummerenin", "stelregelsin", "stelregisterin", "stelroterenin", "stelsamengesteldelijstin", "stelsectieblokin", "stelsectiein", "stelsmallerin", "stelsorterenin", "stelspatieringin", "stelstartstopin", "stelstrutin", "stelsubpaginanummerin", "stelsymboolsetin", "stelsynchronisatiebalkin", "stelsynchronisatiein", "stelsynoniemenin", "stelsysteemin", "steltaalin", "steltabellenin", "steltabin", "steltabulatiein", "steltekstachtergrondin", "steltekstin", "steltekstinhoudin", "steltekstlijnenin", "steltekstpositiein", "stelteksttekstenin", "steltekstvariabelein", "steltolerantiein", "steltypein", "steltypenin", "steluitlijnenin", "steluitvoerin", "stelurlin", "stelveldenin", "stelveldin", "stelversiesin", "stelvoetin", "stelvoetnootdefinitiein", "stelvoetnotenin", "stelvoettekstenin", "stelwiskundeuitlijnenin", "stelwitruimtein", "stopachtergrond", "stopalignment", "stopbackground", "stopbuffer", "stopcitaat", "stopcodering", "stopcolor", "stopcolumns", "stopcombinatie", "stopcombination", "stopcomment", "stopcomponent", "stopdescription", "stopdocument", "stopenumeration", "stopenvironment", "stopfact", "stopfigure", "stopfloattext", "stopformula", "stopframedtext", "stopglobaal", "stophiding", "stopinteractionmenu", "stopitemgroup", "stopkantlijn", "stopkleur", "stopkolomgroep", "stopkolommen", "stopkolomopmaak", "stoplegend", "stopline", "stoplinecorrection", "stoplinenumbering", "stoplines", "stoplocal", "stoplocalenvironment", "stoplocalfootnotes", "stoplokaal", "stoplokalevoetnoten", "stopmakeup", "stopmargeblok", "stopmarginblock", "stopmarginrule", "stopnaast", "stopnamemakeup", "stopnarrower", "stopomgeving", "stopomlijnd", "stoponderdeel", "stopopelkaar", "stopopmaak", "stopopposite", "stopoverlay", "stopoverview", "stopoverzicht", "stoppacked", "stopparagraph", "stoppositioneren", "stoppositioning", "stoppostponing", "stopproduct", "stopprodukt", "stopprofiel", "stopprofile", "stopproject", "stopprojekt", "stopquotation", "stopraster", "stopregel", "stopregelcorrectie", "stopregelnummeren", "stopregels", "stopsmaller", "stopsymbolset", "stopsynchronisatie", "stopsynchronization", "stoptabel", "stoptabellen", "stoptable", "stoptables", "stoptabulate", "stoptekst", "stoptekstachtergrond", "stoptekstlijn", "stoptextrule", "stoptyping", "stopuitlijnen", "stopunpacked", "stopvanelkaar", "stopversie", "stopversion", "stretched", "sub", "subformulenummer", "subject", "subpaginanummer", "subsection", "subsubject", "subsubsection", "subsubsubject", "suggestie", "switchnaarkorps", "switchtobodyfont", "switchtorawfont", "sym", "symbol", "symbool", "symoffset", "synchronisatiebalk", "synchroniseer", "synchronizationbar", "synchronize", "synonym", "taal", "tab", "tekstbreedte", "teksthoogte", "tekstlijn", "tekstreferentie", "tekstvariabele", "testkolom", "testpagina", "tex", "textreference", "textrule", "thinrule", "thinrules", "title", "toelichting", "tooltip", "toonexternefiguren", "toongrid", "tooninstellingen", "toonkader", "toonkleur", "toonkleurgroep", "toonkorps", "toonkorpsomgeving", "toonlayout", "toonopmaak", "toonpalet", "toonprint", "toonstruts", "toonsymboolset", "toonvelden", "totaalaantalpaginas", "translate", "txt", "typ", "type", "typebuffer", "typefile", "uit", "uitgerekt", "underbar", "underbars", "useURL", "useblocks", "usecodering", "usecommands", "usedirectory", "useencoding", "useexternaldocument", "useexternalfigure", "useexternalfile", "useexternalfiles", "useexternalsoundtrack", "usemodule", "usereferences", "usespecials", "usesymbols", "usetypescript", "usetypescriptfile", "vastespatie", "vastespaties", "veld", "veldstapel", "verbergblokken", "vergelijkkleurgroep", "vergelijkpalet", "verhoognummer", "verlaagnummer", "verplaatsformule", "verplaatsopgrid", "verplaatszijblok", "versie", "version", "vertaal", "verwerkblokken", "verwerkpagina", "vl", "voetafstand", "voethoogte", "voetnoot", "voetnoottekst", "volgprofiel", "volgprofielversie", "volgversie", "volledigepaginanummer", "volledigregister", "voluit", "weekdag", "weekday", "whitespace", "wilijnd", "wiskunde", "witruimte", "woonplaats", "woordrechts", "wordright", "writebetweenlist", "writetolist", "writetoreferencelist", "writetoregister", "zetbreedte", "zethoogte" },
+ ["pe"]={ "CAP", "Cap", "Caps", "Character", "Characters", "MONTH", "Numbers", "Romannumerals", "WEEKDAY", "WORD", "WORDS", "Word", "Words", "about", "adaptlayout", "appendix", "at", "atpage", "background", "blackrule", "blackrules", "blank", "bookmark", "but", "button", "bypassblocks", "cap", "chapter", "character", "characters", "chem", "clip", "clonefield", "color", "column", "comment", "comparecolorgroup", "comparepalet", "completecombinedlist", "completelistoffloats", "completelistofsorts", "completelistofsynonyms", "completeregister", "convertnumber", "copyfield", "correctwhitespace", "coupledocument", "coupledregister", "couplemarking", "couplepage", "couplepaper", "coupleregister", "crlf", "currentdate", "currentheadnumber", "date", "decouplemarking", "define", "defineblank", "defineblock", "definebodyfont", "definebodyfontDEF", "definebodyfontREF", "definebodyfontenvironment", "definebuffer", "definecolor", "definecolorgroup", "definecombinedlist", "defineconversion", "definedescription", "definedfont", "defineenumeration", "definefield", "definefieldstack", "definefiguresymbol", "definefloat", "definefont", "definefontfeature", "definefonthandling", "definefontsynonym", "defineframed", "defineframedtext", "definehead", "defineindentedtext", "defineinteractionmenu", "definelabel", "definelist", "definelogo", "definemakeup", "definemarking", "defineoutput", "defineoverlay", "definepalet", "definepapersize", "defineparagraphs", "defineprofile", "defineprogram", "definereference", "definereferenceformat", "definereferencelist", "defineregister", "definesection", "definesectionblock", "definesorting", "definestartstop", "definesubfield", "definesymbol", "definesynonyms", "definetabletemplate", "definetabulate", "definetext", "definetype", "definetypeface", "definetyping", "defineversion", "description", "determineheadnumber", "determinelistcharacteristics", "disableinteractionmenu", "emptylines", "enumeration", "externalfigure", "field", "fieldstack", "fillinfield", "fillinline", "fillinrules", "fillintext", "fitfield", "fixedspaces", "followprofile", "followprofileversion", "followversion", "footnote", "footnotetext", "forceblocks", "fraction", "framed", "framedtext", "from", "getbuffer", "getmarking", "godown", "goto", "gotobox", "graycolor", "grid", "hairline", "head", "headnumber", "headtext", "hideblocks", "high", "hl", "in", "indentation", "indenting", "inframed", "ininner", "inleft", "inline", "inmargin", "inmframed", "inothermargin", "inouter", "inright", "installlanguage", "interactionbar", "interactionbuttons", "item", "items", "its", "keepblocks", "labeling", "labels", "labeltext", "language", "leftaligned", "listsymbol", "loadsorts", "loadsynonyms", "logfields", "lohi", "low", "mainlanguage", "mapfontsize", "mar", "marginrule", "margintext", "marking", "markversion", "mathematics", "mediaeval", "mframed", "midaligned", "mirror", "month", "moveongrid", "name", "nextsection", "nocap", "noheaderandfooterlines", "noindenting", "nolist", "nomarking", "nomoreblocks", "nomorefiles", "nop", "nospace", "note", "notopandbottomlines", "nowhitespace", "numbers", "overbar", "overbars", "overstrike", "overstrikes", "packed", "page", "pagereference", "paragraph", "part", "periods", "placebookmarks", "placecombinedlist", "placefloat", "placefootnotes", "placeformula", "placelegend", "placelist", "placelistoffloats", "placelistofsorts", "placelistofsynonyms", "placelocalfootnotes", "placelogos", "placeongrid", "placeontopofeachother", "placereferencelist", "placeregister", "placesidebyside", "placesubformula", "position", "processblocks", "processpage", "program", "publication", "quotation", "quote", "ran", "ref", "reference", "referral", "referraldate", "register", "reservefloat", "reset", "resetmarking", "resettextcontent", "rightaligned", "romannumerals", "rotate", "scale", "screen", "section", "seeregister", "selectblocks", "selectpaper", "selectversion", "settextcontent", "setupalign", "setupanswerarea", "setuparranging", "setupbackground", "setupbackgrounds", "setupblackrules", "setupblank", "setupblock", "setupbodyfont", "setupbodyfontenvironment", "setupbottom", "setupbottomtexts", "setupbuffer", "setupbuttons", "setupcapitals", "setupcaption", "setupcaptions", "setupclipping", "setupcolor", "setupcolors", "setupcolumns", "setupcombinations", "setupcombinedlist", "setupcomment", "setupdescriptions", "setupenumerations", "setupexternalfigures", "setupfield", "setupfields", "setupfillinlines", "setupfillinrules", "setupfloat", "setupfloats", "setupfloatsplitting", "setupfonthandling", "setupfontsynonym", "setupfooter", "setupfootertexts", "setupfootnotedefinition", "setupfootnotes", "setupforms", "setupformulas", "setupframed", "setupframedtexts", "setuphead", "setupheader", "setupheadertexts", "setupheadnumber", "setupheads", "setupheadtext", "setuphyphenmark", "setupindentedtext", "setupindenting", "setupinmargin", "setupinteraction", "setupinteractionbar", "setupinteractionmenu", "setupinteractionscreen", "setupinterlinespace", "setupinterlinespace2", "setupitemgroup", "setupitems", "setuplabeltext", "setuplanguage", "setuplayout", "setuplegend", "setuplinenumbering", "setuplines", "setuplinewidth", "setuplist", "setuplistalternative", "setupmakeup", "setupmarginblocks", "setupmarginrules", "setupmarking", "setupnarrower", "setupnumbering", "setupoppositeplacing", "setupoutput", "setuppagenumber", "setuppagenumbering", "setuppagetransitions", "setuppalet", "setuppaper", "setuppapersize", "setupparagraphnumbering", "setupparagraphs", "setuppositioning", "setupprofiles", "setupprograms", "setuppublications", "setupquote", "setupreferencelist", "setupreferencing", "setupregister", "setuprotate", "setupscreens", "setupsection", "setupsectionblock", "setupsorting", "setupspacing", "setupstrut", "setupsubpagenumber", "setupsymbolset", "setupsynchronization", "setupsynchronizationbar", "setupsynonyms", "setupsystem", "setuptab", "setuptables", "setuptabulate", "setuptext", "setuptextrules", "setuptexttexts", "setupthinrules", "setuptolerance", "setuptop", "setuptoptexts", "setuptype", "setuptyping", "setupunderbar", "setupurl", "setupversions", "setupwhitespace", "showbodyfont", "showbodyfontenvironment", "showcolor", "showcolorgroup", "showexternalfigures", "showfields", "showframe", "showgrid", "showlayout", "showmakeup", "showpalet", "showprint", "showsetups", "showstruts", "showsymbolset", "someline", "somewhere", "sort", "space", "splitfloat", "startalignment", "startbackground", "startbuffer", "startcolor", "startcolumns", "startcombination", "startcomment", "startcomponent", "startdescription", "startdocument", "startenumeration", "startenvironment", "startfact", "startfigure", "startfloattext", "startformula", "startframedtext", "starthiding", "startinteractionmenu", "startitemgroup", "startlegend", "startline", "startlinecorrection", "startlinenumbering", "startlines", "startlocal", "startlocalenvironment", "startlocalfootnotes", "startmakeup", "startmarginblock", "startmarginrule", "startnamemakeup", "startnarrower", "startopposite", "startoverlay", "startoverview", "startpacked", "startparagraph", "startpositioning", "startpostponing", "startproduct", "startprofile", "startproject", "startquotation", "startraster", "startregister", "startsymbolset", "startsynchronization", "starttable", "starttables", "starttabulate", "starttextrule", "starttyping", "startunpacked", "startversion", "stopalignment", "stopbackground", "stopbuffer", "stopcolor", "stopcolumns", "stopcombination", "stopcomment", "stopcomponent", "stopdescription", "stopdocument", "stopenumeration", "stopenvironment", "stopfact", "stopfigure", "stopfloattext", "stopformula", "stopframedtext", "stophiding", "stopinteractionmenu", "stopitemgroup", "stoplegend", "stopline", "stoplinecorrection", "stoplinenumbering", "stoplines", "stoplocal", "stoplocalenvironment", "stoplocalfootnotes", "stopmakeup", "stopmarginblock", "stopmarginrule", "stopnamemakeup", "stopnarrower", "stopopposite", "stopoverlay", "stopoverview", "stoppacked", "stopparagraph", "stoppositioning", "stoppostponing", "stopproduct", "stopprofile", "stopproject", "stopquotation", "stopraster", "stopsymbolset", "stopsynchronization", "stoptable", "stoptables", "stoptabulate", "stoptextrule", "stoptyping", "stopunpacked", "stopversion", "stretched", "sub", "subject", "subsection", "subsubject", "subsubsection", "subsubsubject", "switchtobodyfont", "sym", "symbol", "synchronizationbar", "synchronize", "synonym", "tab", "tex", "textreference", "textrule", "thinrule", "thinrules", "title", "tooltip", "translate", "txt", "typ", "type", "typebuffer", "typefile", "underbar", "underbars", "useJSscripts", "useURL", "useXMLfilter", "useblocks", "usecommands", "usedirectory", "useencoding", "useexternaldocument", "useexternalfigure", "useexternalfile", "useexternalfiles", "useexternalsoundtrack", "usemodule", "usereferences", "usespecials", "usesymbols", "usetypescript", "usetypescriptfile", "useurl", "version", "vl", "weekday", "whitespace", "wordright", "writebetweenlist", "writetolist", "writetoreferencelist", "writetoregister", "آفست‌صفحه", "آیتم", "آیتمها", "آینه", "اجباربلوکها", "ارتفاع‌آرایش", "ارتفاع‌بالا", "ارتفاع‌برگ", "ارتفاع‌ته‌برگ", "ارتفاع‌خط", "ارتفاع‌سربرگ", "ارتفاع‌متن", "ارتفاع‌پایین", "از", "ازکارانداختن‌منوی‌پانل", "استفاده‌بلوکها", "استفاده‌دستخط‌تایپ", "استفاده‌رمزینه", "استفاده‌شکل‌خارجی", "استفاده‌فرمانها", "استفاده‌قطعه‌موزیک‌خارجی", "استفاده‌مدول", "استفاده‌مدولها", "استفاده‌مرجعها", "استفاده‌مسیر", "استفاده‌نمادها", "استفاده‌نوشتارخارجی", "استفاده‌ویژگیها", "استفاده‌پرونده‌خارجی", "استفاده‌پرونده‌دستخط‌تایپ", "استفاده‌پرونده‌های‌خارجی", "اعدادلاتین", "افزودن", "اما", "امتحان‌نکن", "انتخاب‌برگ", "انتخاب‌بلوکها", "انتخاب‌نسخه", "انتقال‌به‌توری", "انتقال‌فرمول", "انتقال‌کنار‌شناور", "انجام‌دوباره", "بارگذاری‌آرایش", "بارگذاری‌آیتمها", "بارگذاری‌ارجاع", "بارگذاری‌اندازه‌برگ", "بارگذاری‌باریکتر", "بارگذاری‌بافر", "بارگذاری‌بالا", "بارگذاری‌بخش", "بارگذاری‌بردباری", "بارگذاری‌برنامه‌ها", "بارگذاری‌برگ", "بارگذاری‌بست", "بارگذاری‌بلوک", "بارگذاری‌بلوکهای‌حاشیه", "بارگذاری‌بلوک‌بخش", "بارگذاری‌تایپ", "بارگذاری‌تایپ‌کردن", "بارگذاری‌تب", "بارگذاری‌ترتیب", "بارگذاری‌ترکیب‌ها", "بارگذاری‌تطابق", "بارگذاری‌تعریف‌پانوشت", "بارگذاری‌تنظیم", "بارگذاری‌تنظیم‌ریاضی", "بارگذاری‌ته‌برگ", "بارگذاری‌تورفتگی", "بارگذاری‌توضیح", "بارگذاری‌توضیح‌صفحه", "بارگذاری‌ثبت", "بارگذاری‌جانشانی", "بارگذاری‌جدولها", "بارگذاری‌جدول‌بندی", "بارگذاری‌خالی", "بارگذاری‌خروجی", "بارگذاری‌خط", "بارگذاری‌خطها", "بارگذاری‌خطهای‌حاشیه", "بارگذاری‌خطهای‌سیاه", "بارگذاری‌خطهای‌متن", "بارگذاری‌خطهای‌مجموعه‌ستون", "بارگذاری‌خطها‌ی‌نازک", "بارگذاری‌درج‌درخطها", "بارگذاری‌درج‌مخالف", "بارگذاری‌درون‌حاشیه", "بارگذاری‌دوران", "بارگذاری‌دکمه‌ها", "بارگذاری‌راهنما", "بارگذاری‌رنگ", "بارگذاری‌رنگها", "بارگذاری‌زبان", "بارگذاری‌ستونها", "بارگذاری‌سر", "بارگذاری‌سربرگ", "بارگذاری‌سرها", "بارگذاری‌سیستم", "بارگذاری‌شرح", "بارگذاری‌شرحها", "بارگذاری‌شروع‌مجموعه‌ستون", "بارگذاری‌شروع‌پایان", "بارگذاری‌شماره", "بارگذاری‌شماره‌زیرصفحه", "بارگذاری‌شماره‌سر", "بارگذاری‌شماره‌صفحه", "بارگذاری‌شماره‌گذاری", "بارگذاری‌شماره‌گذاریها", "بارگذاری‌شماره‌گذاری‌صفحه", "بارگذاری‌شماره‌گذاری‌پاراگراف", "بارگذاری‌شماره‌‌گذاری‌خط", "بارگذاری‌شناور", "بارگذاری‌شناورها", "بارگذاری‌شکافتن‌شناورها", "بارگذاری‌شکلهای‌خارجی", "بارگذاری‌طرح", "بارگذاری‌طرح‌بندی", "بارگذاری‌عرض‌خط", "بارگذاری‌فاصله‌بین‌خط", "بارگذاری‌فرمولها", "بارگذاری‌فضای‌سفید", "بارگذاری‌فضا‌گذاری", "بارگذاری‌قالبی", "بارگذاری‌قلم‌متن", "بارگذاری‌لوح", "بارگذاری‌لیست", "بارگذاری‌لیست‌ترکیبی", "بارگذاری‌لیست‌مرجع", "بارگذاری‌مترادفها", "بارگذاری‌متغیر‌متن", "بارگذاری‌متن", "بارگذاری‌متنهای‌بالا", "بارگذاری‌متن‌سر", "بارگذاری‌متن‌سربرگ", "بارگذاری‌متن‌قالبی", "بارگذاری‌متن‌متنها", "بارگذاری‌متن‌پانوشت", "بارگذاری‌متن‌پایین", "بارگذاری‌مجموعه‌ستون", "بارگذاری‌مجموعه‌نماد", "بارگذاری‌محیط‌قلم‌متن", "بارگذاری‌منوی‌پانل", "بارگذاری‌مکان‌متن", "بارگذاری‌مکان‌گذاری", "بارگذاری‌میدان", "بارگذاری‌میدانها", "بارگذاری‌میله‌تطابق", "بارگذاری‌میله‌زیر", "بارگذاری‌میله‌پانل", "بارگذاری‌نسخه‌ها", "بارگذاری‌نشانه‌شکستن", "بارگذاری‌نشانه‌گذاری", "بارگذاری‌نشرها", "بارگذاری‌نقل", "بارگذاری‌پاراگرافها", "بارگذاری‌پانل", "بارگذاری‌پانوشتها", "بارگذاری‌پایین", "بارگذاری‌پرده‌ها", "بارگذاری‌پرده‌پانل", "بارگذاری‌پروفایلها", "بارگذاری‌پرکردن‌خطها", "بارگذاری‌پس‌زمینه", "بارگذاری‌پس‌زمینه‌ها", "بارگذاری‌چیدن", "بارگذاری‌گذارصفحه", "بارگذاری‌گروههای‌آیتم", "بارگذاری‌گروه‌آیتم", "بازنشانی", "بازنشانی‌شماره", "بازنشانی‌متن", "بازنشانی‌نشانه‌گذاری", "باگذاری‌متن‌برچسب", "بدون‌بعد", "بدون‌بلوکهای‌بیشتر", "بدون‌تورفتگی", "بدون‌خط‌بالاوپایین", "بدون‌خط‌سروته‌برگ", "بدون‌فایلهای‌بیشتر", "بدون‌فضا", "بدون‌فضای‌سفید", "بدون‌لیست", "بدون‌نشانه‌گذاری", "برنامه", "بروبه", "بروبه‌جعبه", "بروبه‌صفحه", "بروپایین", "برچسب", "برچسبها", "بعد", "بلند", "بلوکهای‌پردازش", "بلوکها‌پنهان", "بنویس‌بین‌لیست", "بنویس‌درثبت", "بنویس‌درلیست‌مرجع", "بنویس‌در‌لیست", "تاریخ", "تاریخ‌جاری", "تاریخ‌رجوع", "تایپ", "تایپ‌بافر", "تایپ‌پرونده", "تب", "ترجمه", "تطابق", "تعریف", "تعریف‌آرایش", "تعریف‌آرم", "تعریف‌الگوی‌جدول", "تعریف‌اندازه‌برگ", "تعریف‌بافر", "تعریف‌بخش", "تعریف‌برنامه", "تعریف‌برچسب", "تعریف‌بلوک", "تعریف‌بلوک‌بخش", "تعریف‌تایپ", "تعریف‌تایپ‌کردن", "تعریف‌تبدیل", "تعریف‌ترتیب", "تعریف‌ترکیب", "تعریف‌تنظیم‌ریاضی", "تعریف‌توده‌میدان", "تعریف‌ثبت", "تعریف‌جانشانی", "تعریف‌جدول‌بندی", "تعریف‌جعبه‌‌افقی", "تعریف‌حرف", "تعریف‌خالی", "تعریف‌خروجی", "تعریف‌خط‌حائل", "تعریف‌درون‌حاشیه", "تعریف‌رنگ", "تعریف‌زیرمیدان", "تعریف‌سبک", "تعریف‌سبک‌قلم", "تعریف‌سر", "تعریف‌شرح", "تعریف‌شروع‌پایان", "تعریف‌شماره‌بندی", "تعریف‌شمایل‌مرجع", "تعریف‌شناور", "تعریف‌شکستن‌ستون", "تعریف‌شکست‌صفحه", "تعریف‌طرح‌بندی", "تعریف‌فرمان", "تعریف‌قالبی", "تعریف‌قلم", "تعریف‌قلم‌خام", "تعریف‌قلم‌متن", "تعریف‌لایه", "تعریف‌لهجه", "تعریف‌لوح", "تعریف‌لیست", "تعریف‌لیست‌ترکیبی", "تعریف‌لیست‌مرجع", "تعریف‌مترادفها", "تعریف‌مترادف‌قلم", "تعریف‌متغیرمتن", "تعریف‌متن", "تعریف‌متن‌قالبی", "تعریف‌مجموعه‌ستون", "تعریف‌محیط‌قلم‌بدنه", "تعریف‌مرجع", "تعریف‌منوی‌پانل", "تعریف‌مکان‌متن", "تعریف‌میدان", "تعریف‌میدان‌اصلی", "تعریف‌نسخه", "تعریف‌نشانه‌گذاری", "تعریف‌نماد", "تعریف‌نمادشکل", "تعریف‌پاراگرافها", "تعریف‌پروفایل", "تعریف‌پوشش", "تعریف‌گروه‌آیتم", "تعریف‌گروه‌رنگ", "تعیین‌شماره", "تعیین‌شماره‌سر", "تعیین‌متغیر‌متن", "تعیین‌محتوای‌متن", "تعیین‌مشخصات‌ثبت", "تعیین‌مشخصات‌لیست", "تغییربه‌قلم‌بدنه", "تغییربه‌قلم‌خام", "تنظیم‌راست", "تنظیم‌طرح‌بندی", "تنظیم‌وسط", "توجه", "تورفتگی", "توری", "تولید", "تک", "ثبت‌زوج", "ثبت‌کامل", "جداسازی‌نشانه‌گذاری", "حاش", "حرف", "حرفها", "حفظ‌بلوکها", "حقیقت", "خالی", "خطهای‌سیاه", "خطهای‌نازک", "خطها‌خالی", "خط‌حاشیه", "خط‌سیاه", "خط‌متن", "خط‌مو", "خط‌نازک", "خ‌ا", "خ‌ع", "در", "درج‌آرمها", "درج‌ثبت", "درج‌خط", "درج‌درخط", "درج‌درخطها", "درج‌درمتن", "درج‌درمیدان", "درج‌در‌بالای‌یکدیگر", "درج‌در‌توری", "درج‌راهنما", "درج‌زیرفرمول", "درج‌شماره‌سر", "درج‌شماره‌صفحه", "درج‌شناور", "درج‌فرمول", "درج‌لیست", "درج‌لیست‌خام", "درج‌لیست‌مختلط", "درج‌لیست‌مرجع", "درج‌متغیرمتن", "درج‌متن‌سر", "درج‌پانوشتها", "درج‌پانوشتهای‌موضعی", "درج‌چوب‌خط", "درج‌کنار‌به‌کنار", "درحاشیه", "درحاشیه‌دیگر", "درحاشیه‌راست", "درحاشیه‌چپ", "درخارجی", "درخط", "درداخلی", "درراست", "درصفحه", "درقالبی", "درلبه‌راست", "درلبه‌چپ", "درمورد", "درون", "درپر", "درچپ", "دریافت‌بافر", "دریافت‌شماره", "دریافت‌نشانه", "دوران", "دکمه", "دکمه‌منو", "دکمه‌پانل", "رج", "رجوع", "رنگ", "رنگ‌خاکستری", "روزهفته", "ریاضی", "زبان", "زبان‌اصلی", "ستون", "ستون‌امتحان", "سر", "سرپوش‌کوچک‌نه", "شروع‌آرایش", "شروع‌آرایش‌ستون", "شروع‌باریکتر", "شروع‌بازبینی", "شروع‌بلوک‌حاشیه", "شروع‌ترکیب", "شروع‌تصحیح‌خط", "شروع‌تطابق", "شروع‌تنظیم", "شروع‌تولید", "شروع‌جدول", "شروع‌جدولها", "شروع‌خط", "شروع‌خطها", "شروع‌خط‌حاشیه", "شروع‌خط‌متن", "شروع‌رنگ", "شروع‌ستونها", "شروع‌سراسری", "شروع‌شماره‌گذاری‌خط", "شروع‌شکل", "شروع‌غیر‌فشرده", "شروع‌فشرده", "شروع‌متن", "شروع‌مجموعه‌ستون", "شروع‌مجموعه‌نماد", "شروع‌محیط", "شروع‌مخالف", "شروع‌موضعی", "شروع‌مولفه", "شروع‌مکان‌گذاری", "شروع‌نسخه", "شروع‌نقل‌قول", "شروع‌نوشتار", "شروع‌پانوشتهای‌موضعی", "شروع‌پروفایل", "شروع‌پروژه", "شروع‌پس‌زمینه", "شروع‌پوشش", "شروع‌کد", "شماره‌افزایش", "شماره‌زیرصفحه", "شماره‌زیرفرمول", "شماره‌سر", "شماره‌سرجاری", "شماره‌صفحه", "شماره‌صفحه‌کامل", "شماره‌فرمول", "شماره‌مبدل", "شماره‌ها", "شماره‌کاهش", "شماره‌کل‌صفحه‌ها", "شکافتن‌شناور", "شکل‌خارجی", "صفحه", "صفحه‌تست", "صفحه‌زوج", "صفحه‌پردازش", "طول‌لیست", "عبوربلوکها", "عرض‌آرایش", "عرض‌برگ", "عرض‌حاشیه", "عرض‌حاشیه‌خارجی", "عرض‌حاشیه‌داخلی", "عرض‌حاشیه‌راست", "عرض‌حاشیه‌چپ", "عرض‌خط", "عرض‌لبه", "عرض‌لبه‌خارجی", "عرض‌لبه‌داخلی", "عرض‌لبه‌راست", "عرض‌لبه‌چپ", "عرض‌لیست", "عرض‌متن", "عمق‌صفحه", "عنوان‌حاشیه", "فاصله‌بالا", "فاصله‌ته‌برگ", "فاصله‌حاشیه", "فاصله‌حاشیه‌خارجی", "فاصله‌حاشیه‌داخلی", "فاصله‌حاشیه‌راست", "فاصله‌حاشیه‌چپ", "فاصله‌سربرگ", "فاصله‌لبه", "فاصله‌لبه‌خارجی", "فاصله‌لبه‌داخلی", "فاصله‌لبه‌راست", "فاصله‌لبه‌چپ", "فاصله‌پایین", "فاصله‌پشت", "فشرده", "فضا", "فضاهای‌ثابت", "فضای‌بالا", "فضای‌برش", "فضای‌ثابت", "فضای‌سفید", "فضای‌سفیدصحیح", "فضای‌پایین", "فوری‌به‌لیست", "فوری‌بین‌لیست", "قالبی", "لوح‌مقایسه", "ماه", "متغیر متن", "متن‌برچسب", "متن‌حاشیه", "متن‌سر", "متن‌پانوشت", "محیط", "مراجعه", "مرجع", "مرجع‌صفحه", "مرجع‌متن", "مرحله‌سر", "مسکن", "معنی‌واحد", "مقایسه‌گروه‌رنگ", "مقدارخاکستری", "مقداررنگ", "مقیاس", "منفی", "منوی‌پانل", "مولفه", "مکان", "مکان‌متن", "میدان", "میدانهای‌گزارش", "میدان‌شبیه‌سازی", "میدان‌پشته", "میدان‌کپی", "میله‌تطابق", "میله‌رنگ", "میله‌پانل", "ناشناس", "نام‌ماکرو", "نسخه", "نسخه‌نشانه", "نشانه‌گذاری", "نشانه‌گذاری‌زوج", "نشر", "نصب‌زبان", "نقطه‌ها", "نقل", "نقل‌قول", "نم", "نماد", "نمادسر", "نمادلیست", "نمایش‌آرایش", "نمایش‌بارگذاریها", "نمایش‌بستها", "نمایش‌توری", "نمایش‌رنگ", "نمایش‌شکلهای‌خارجی", "نمایش‌طرح‌بندی", "نمایش‌قالب", "نمایش‌قلم‌بدنه", "نمایش‌لوح", "نمایش‌مجموعه‌علامت", "نمایش‌محیط‌قلم‌بدنه", "نمایش‌میدانها", "نمایش‌چاپ", "نمایش‌گروه‌رنگ", "نوشتارزوج", "هدایت", "پا", "پابا", "پانوشت", "پایان‌آرایش", "پایان‌آرایش‌ستون", "پایان‌بازبینی", "پایان‌بلوک‌حاشیه", "پایان‌ترکیب", "پایان‌تصحیح‌خط", "پایان‌تطابق", "پایان‌تنظیم", "پایان‌تولید", "پایان‌جدول", "پایان‌جدولها", "پایان‌خط", "پایان‌خطها", "پایان‌خط‌حاشیه", "پایان‌خط‌متن", "پایان‌رنگ", "پایان‌ستونها", "پایان‌سراسری", "پایان‌شماره‌گذاری‌خط", "پایان‌غیرفشرده", "پایان‌فشرده", "پایان‌متن", "پایان‌مجموعه‌ستون", "پایان‌محیط", "پایان‌مخالف", "پایان‌موضعی", "پایان‌مولفه", "پایان‌مکان‌گذاری", "پایان‌نازکتر", "پایان‌نسخه", "پایان‌نقل‌قول", "پایان‌نوشتار", "پایان‌پانوشتهای‌موضعی", "پایان‌پروفایل", "پایان‌پروژه", "پایان‌پس‌زمینه", "پایان‌پوشش", "پایان‌کد", "پایین", "پرده", "پروژه", "پرکردن‌میدان", "پس‌زمینه", "پیروی‌نسخه", "پیروی‌نسخه‌پروفایل", "پیروی‌پروفایل", "چاپ‌ارتفاع‌برگ", "چاپ‌عرض‌برگ", "چوبخط", "چپ‌چین", "کاغذزوج", "کسر", "کشیده", "کلمه‌حاشیه", "کلمه‌راست", "گیره", "یادداشت", "یک‌جا", "یک‌خط" },
+ ["ro"]={ "CAP", "CUVANT", "CUVINTE", "Cap", "Caps", "Character", "Characters", "Cuvant", "Cuvinte", "KAP", "Kap", "Kaps", "LUNA", "Litera", "Litere", "MONTH", "Numere", "Numereromane", "Romannumerals", "WEEKDAY", "WORD", "WORDS", "Word", "Words", "ZIDINSAPTAMANA", "about", "adapteazaaspect", "adaptlayout", "adubuffer", "adumarcaje", "afiseazaaspect", "afiseazacampuri", "afiseazaculoare", "afiseazafiguriexterne", "afiseazafonttext", "afiseazagrid", "afiseazagrupculoare", "afiseazamakeup", "afiseazamediufonttext", "afiseazapaleta", "afiseazarama", "afiseazasetari", "afiseazasetsimboluri", "afiseazastruts", "afiseazatiparire", "aliniat", "aliniatcentru", "aliniatdreapta", "aliniatstanga", "appendix", "arg", "ascundeblocuri", "at", "atleftmargin", "atpage", "atrightmargin", "background", "baraculoare", "barainteractiune", "barasincronizare", "blackrule", "blackrules", "blanc", "blank", "bookmark", "but", "butoaneinteractiune", "buton", "butonmeniu", "button", "bypassblocks", "camp", "campumplere", "cap", "chapter", "character", "characters", "chem", "citat", "clip", "cloneazacamp", "clonefield", "coloana", "color", "column", "comment", "comparagrupculoare", "comparapaleta", "comparecolorgroup", "comparepalet", "completeazanumarpagina", "completecombinedlist", "completelistoffloats", "completelistofsorts", "completelistofsynonyms", "completeregister", "componenta", "convertestenumar", "convertnumber", "copiazacamp", "copyfield", "corecteazaspatiualb", "correctwhitespace", "coupledocument", "coupledregister", "couplemarking", "couplepage", "couplepaper", "coupleregister", "crlf", "culoare", "culoaregri", "cupleazadocument", "cupleazamarcaje", "cupleazaregistru", "currentdate", "currentheadnumber", "cutspace", "cuvantdreapta", "cuvantmarginal", "data", "datacurenta", "datareferit", "date", "decouplemarking", "decrementnumber", "decupleazamarcaje", "define", "defineblank", "defineblock", "definebodyfont", "definebodyfontDEF", "definebodyfontREF", "definebodyfontenvironment", "definebuffer", "definecolor", "definecolorgroup", "definecolumnbreak", "definecolumnset", "definecombination", "definecombinedlist", "defineconversion", "definedescription", "definedfont", "defineenumeration", "definefield", "definefieldstack", "definefiguresymbol", "definefloat", "definefont", "definefontfeature", "definefonthandling", "definefontsynonym", "defineframed", "defineframedtext", "definehead", "defineindentedtext", "defineinmargin", "defineinteractionmenu", "defineitemgroup", "definelabel", "definelayer", "definelayout", "definelist", "definelogo", "definemakeup", "definemarking", "definemathalignment", "defineoutput", "defineoverlay", "definepagebreak", "definepalet", "definepapersize", "defineparagraphs", "defineplacement", "defineprofile", "defineprogram", "definereference", "definereferenceformat", "definereferencelist", "defineregister", "definesection", "definesectionblock", "definesorting", "definestartstop", "defineste", "definesteaccent", "definesteantet", "definesteblanc", "definestebloc", "definesteblocsectiune", "definestebuffer", "definestecamp", "definestecampprincipal", "definestecaracter", "definestecomanda", "definesteconversie", "definesteculoare", "definestedescriere", "definestedimensiunehartie", "definesteenumerare", "definesteeticheta", "definestefloat", "definestefont", "definestefontraw", "definestefonttext", "definesteformatreferinte", "definestegrupculori", "definestehbox", "definesteinconjurare", "definestelista", "definestelistacombinata", "definestelistareferinte", "definestelogo", "definestemakeup", "definestemarcaje", "definestemediulfonttext", "definestemeniuinteractiune", "definesteoutput", "definesteoverlay", "definestepaleta", "definesteparagraf", "definestepozitietext", "definesteprofil", "definesteprogram", "definestereferinte", "definesteregistru", "definesterigla", "definestesablontabel", "definestesectiune", "definestesimbol", "definestesimbolfigura", "definestesinonim", "definestesinonimfont", "definestesortare", "definestestartstop", "definestestil", "definestestilfont", "definestestivacampuri", "definestesubcamp", "definestetabulatori", "definestetext", "definestetexteinconjurate", "definestetextinconjurat", "definestetyping", "definestevariabilatext", "definesteversiune", "definesubfield", "definesymbol", "definesynonyms", "definetabletemplate", "definetabulate", "definetext", "definetype", "definetypeface", "definetyping", "defineversion", "description", "despre", "determinacaracteristicilelistei", "determinacaracteristiciregistru", "determinanumartitlu", "determineheadnumber", "determinelistcharacteristics", "dezactiveazameniuinteractiune", "dimensiune", "din", "disableinteractionmenu", "distantaantet", "distantacolt", "distantacoltdreapta", "distantacoltstanga", "distantajos", "distantamargine", "distantamarginedreapta", "distantamarginestanga", "distantasubsol", "distantasus", "domiciliu", "dute", "dutebox", "dutepagina", "ecran", "el", "element", "emptylines", "enumeration", "eticheta", "etichete", "externalfigure", "fact", "faraaliniat", "faradimensiune", "farafisiere", "faraliniiantetsisubsol", "faraliniisussijos", "faralista", "faramarcaje", "faraspatiu", "faraspatiualb", "field", "fieldstack", "figuraexterna", "fillinfield", "fillinline", "fillinrules", "fillintext", "firdepar", "fitfield", "fixedspaces", "followprofile", "followprofileversion", "followversion", "folosesteURL", "folosestebloc", "folosestecodificarea", "folosestecomenzi", "folosestedirector", "folosestedocumentextern", "folosestefiguraexterna", "folosestefisiereexterne", "folosestefisierextern", "folosestemodul", "folosestemodule", "folosestemuzicaexterna", "folosestereferinte", "folosestescriptJS", "folosestesimboluri", "folosestespeciale", "folosesteurl", "footnote", "footnotetext", "forceblocks", "forteazablocuri", "fractie", "fraction", "framed", "framedtext", "from", "fundal", "gatablocuri", "getbuffer", "getmarking", "getnumber", "godown", "goto", "gotobox", "graycolor", "grid", "grosimelinie", "hairline", "hartiedubla", "head", "headnumber", "headsym", "headtext", "hideblocks", "high", "hl", "immediatebetweenlist", "immediatetolist", "impachetat", "impartefloat", "in", "inalt", "inaltamargine", "inaltimeantet", "inaltimehartie", "inaltimehartieimprimanta", "inaltimejos", "inaltimelista", "inaltimemakeup", "inaltimesubsol", "inaltimesus", "inaltimetext", "indentation", "indenting", "indreapta", "inframed", "ininner", "injos", "inleft", "inline", "inlinie", "inmaframed", "inmargin", "inmargineadreapta", "inmargineastanga", "inneredgedistance", "inneredgewidth", "innermargindistance", "innermarginwidth", "inothermargin", "inouter", "inparteadreapta", "inparteastanga", "inright", "instalarelimba", "installlanguage", "instanga", "interactionbar", "interactionbuttons", "intins", "item", "items", "its", "jos", "jossus", "kap", "keepblocks", "la", "labeling", "labels", "labeltext", "language", "lapagina", "latimecoltdreapta", "latimecoltstanga", "latimecolturi", "latimehartie", "latimehartieimprimanta", "latimelista", "latimemakeup", "latimemargine", "latimemarginedreapta", "latimemarginestanga", "latimetext", "leftaligned", "leg", "limba", "limbaprincipala", "liniemargine", "linieneagra", "liniesubtire", "linieumplere", "liniinegre", "liniisubtiri", "listsymbol", "litera", "litere", "loadsorts", "loadsynonyms", "logcampuri", "logfields", "lohi", "low", "luna", "lungimelista", "maframed", "mainlanguage", "mapfontsize", "mar", "marcaje", "marcheazaversiune", "marginal", "marginrule", "margintext", "marking", "markversion", "matematica", "mathematics", "mediaeval", "mediu", "meniuinteractiune", "midaligned", "minicitat", "mirror", "month", "moveformula", "moveongrid", "movesidefloat", "mutapegrid", "name", "navigating", "necunoscut", "nextsection", "niveltitlu", "nocap", "noheaderandfooterlines", "noindenting", "nokap", "nolist", "nomarking", "nomoreblocks", "nomorefiles", "nop", "nospace", "nota", "notasubsol", "note", "notopandbottomlines", "nowhitespace", "numarformula", "numarincrement", "numarpagina", "numarsubformula", "numartitlu", "numartitlucurent", "numartotalpagini", "numberofsubpages", "numbers", "nume", "numere", "numereromane", "numeunitate", "nutesta", "olinie", "outeredgedistance", "outeredgewidth", "outermargindistance", "outermarginwidth", "overbar", "overbars", "overstrike", "overstrikes", "packed", "page", "pagedepth", "pageoffset", "pagereference", "pagina", "paginadubla", "paragraph", "part", "pastreazablocuri", "pelung", "periods", "placebookmarks", "placecombinedlist", "placefloat", "placefootnotes", "placeformula", "placeheadnumber", "placeheadtext", "placelegend", "placelist", "placelistoffloats", "placelistofsorts", "placelistofsynonyms", "placelocalfootnotes", "placelogos", "placeongrid", "placeontopofeachother", "placerawlist", "placereferencelist", "placeregister", "placesidebyside", "placesubformula", "plaseazapegrid", "plaseazasemnecarte", "position", "potrivestecamp", "pozitie", "pozitietext", "proceseazabloc", "proceseazapagina", "processblocks", "processpage", "produs", "program", "proiect", "publicatie", "publication", "puncte", "punedeasuprafiecareia", "punefatainfata", "puneformula", "punelegenda", "punelista", "punelistacombinata", "punelogouri", "punenotesubsol", "punenotesubsollocale", "punenumarpagina", "puneregistru", "punerigla", "punesubformula", "punevariabilatext", "quotation", "quote", "ran", "ref", "refa", "reference", "referinta", "referintapagina", "referintatext", "referit", "referral", "referraldate", "referring", "reflexie", "register", "remarca", "reservefloat", "reset", "reseteazamarcaje", "resetmarking", "resetnumber", "resettextcontent", "rightaligned", "riglatext", "rigleumplere", "romannumerals", "rotate", "roteste", "saripesteblocuri", "scala", "scale", "screen", "scriebuffer", "scrieinlista", "scrieinlistareferinte", "scrieinregistru", "scrieintreliste", "section", "seeregister", "selectblocks", "selecteazablocuri", "selecteazahartie", "selecteazaversiune", "selectpaper", "selectversion", "semncarte", "setarebarasincronizare", "setareitemization", "setarelimba", "setareoutput", "setarepozitie", "setaresincronizare", "seteazaaliniat", "seteazaalinierea", "seteazaantet", "seteazaaranjareapag", "seteazaaspect", "seteazabarainteractiune", "seteazablanc", "seteazabloc", "seteazablocsectiune", "seteazablocurimarginale", "seteazabuffer", "seteazabutoane", "seteazacamp", "seteazacampuri", "seteazaclipping", "seteazacoloane", "seteazacombinari", "seteazacomentariu", "seteazacomentariupagina", "seteazaculoare", "seteazaculori", "seteazadefinireanotasubsol", "seteazadescriere", "seteazadimensiunihartie", "seteazaecrane", "seteazaecraninteractiune", "seteazaelemente", "seteazaenumerare", "seteazafiguriexterne", "seteazafloat", "seteazafloats", "seteazafonttext", "seteazaformulare", "seteazaformule", "seteazafundal", "seteazafundaluri", "seteazagrosimelinie", "seteazaimpartireafloat", "seteazainconjurat", "seteazaingust", "seteazainteractiunea", "seteazajos", "seteazalegenda", "seteazalegendele", "seteazaliniesilabe", "seteazaliniesubtire", "seteazalinii", "seteazaliniimargine", "seteazaliniinegre", "seteazaliniiumplere", "seteazalista", "seteazalistacombinata", "seteazalistareferinte", "seteazamajuscule", "seteazamakeup", "seteazamarcaje", "seteazamarginal", "seteazamediulfonttext", "seteazameniuinteractiune", "seteazaminicitat", "seteazanotasubsol", "seteazanumarpagina", "seteazanumarsubpagina", "seteazanumartitlu", "seteazanumerotare", "seteazanumerotarelinii", "seteazanumerotarepagina", "seteazanumerotareparagrafe", "seteazapaleta", "seteazaparagrafe", "seteazaplasareaopozita", "seteazapozitietext", "seteazaprofile", "seteazaprograme", "seteazapublicatii", "seteazareferinte", "seteazaregistru", "seteazarigla", "seteazarigletext", "seteazarigleumplere", "seteazarotare", "seteazasectiune", "seteazasimbol", "seteazasinonime", "seteazasistem", "seteazasortare", "seteazaspatiu", "seteazaspatiualb", "seteazaspatiuinterliniar", "seteazastrut", "seteazasublinie", "seteazasubsol", "seteazasus", "seteazatab", "seteazatabele", "seteazatabulatori", "seteazatext", "seteazatexteantet", "seteazatextejos", "seteazatextesubsol", "seteazatextesus", "seteazatextetext", "seteazatexteticheta", "seteazatexttitlu", "seteazatitlu", "seteazatitluri", "seteazatoleranta", "seteazatranzitiepagina", "seteazatype", "seteazatyping", "seteazaurl", "seteazavariabilatext", "seteazaversiuni", "setnumber", "settextcontent", "setupalign", "setupanswerarea", "setuparranging", "setupbackground", "setupbackgrounds", "setupblackrules", "setupblank", "setupblock", "setupbodyfont", "setupbodyfontenvironment", "setupbottom", "setupbottomtexts", "setupbuffer", "setupbuttons", "setupcapitals", "setupcaption", "setupcaptions", "setupclipping", "setupcolor", "setupcolors", "setupcolumns", "setupcolumnset", "setupcolumnsetlines", "setupcolumnsetstart", "setupcombinations", "setupcombinedlist", "setupcomment", "setupdescriptions", "setupenumerations", "setupexternalfigures", "setupfield", "setupfields", "setupfillinlines", "setupfillinrules", "setupfloat", "setupfloats", "setupfloatsplitting", "setupfonthandling", "setupfontsynonym", "setupfooter", "setupfootertexts", "setupfootnotedefinition", "setupfootnotes", "setupforms", "setupformulas", "setupframed", "setupframedtexts", "setuphead", "setupheader", "setupheadertexts", "setupheadnumber", "setupheads", "setupheadtext", "setuphyphenmark", "setupindentedtext", "setupindenting", "setupinmargin", "setupinteraction", "setupinteractionbar", "setupinteractionmenu", "setupinteractionscreen", "setupinterlinespace", "setupinterlinespace2", "setupitemgroup", "setupitems", "setuplabeltext", "setuplanguage", "setuplayout", "setuplegend", "setuplinenumbering", "setuplines", "setuplinewidth", "setuplist", "setuplistalternative", "setupmakeup", "setupmarginblocks", "setupmarginrules", "setupmarking", "setupmathalignment", "setupnarrower", "setupnumber", "setupnumbering", "setupoppositeplacing", "setupoutput", "setuppagenumber", "setuppagenumbering", "setuppagetransitions", "setuppalet", "setuppaper", "setuppapersize", "setupparagraphnumbering", "setupparagraphs", "setupplacement", "setuppositioning", "setupprofiles", "setupprograms", "setuppublications", "setupquote", "setupreferencelist", "setupreferencing", "setupregister", "setuprotate", "setupscreens", "setupsection", "setupsectionblock", "setupsorting", "setupspacing", "setupstartstop", "setupstrut", "setupsubpagenumber", "setupsymbolset", "setupsynchronization", "setupsynchronizationbar", "setupsynonyms", "setupsystem", "setuptab", "setuptables", "setuptabulate", "setuptext", "setuptextrules", "setuptexttexts", "setupthinrules", "setuptolerance", "setuptop", "setuptoptexts", "setuptype", "setuptyping", "setupunderbar", "setupurl", "setupversions", "setupwhitespace", "setvariabilatext", "showbodyfont", "showbodyfontenvironment", "showcolor", "showcolorgroup", "showexternalfigures", "showfields", "showframe", "showgrid", "showlayout", "showmakeup", "showpalet", "showprint", "showsetups", "showstruts", "showsymbolset", "sim", "simbol", "sincronizeaza", "someline", "somewhere", "sort", "space", "spatiifixate", "spatiu", "spatiualb", "spatiufixat", "spatiujos", "spatiuspate", "spatiusus", "splitfloat", "startalignment", "startaliniere", "startbackground", "startblocmarginal", "startbuffer", "startcitat", "startcodificare", "startcoloane", "startcolor", "startcolumnmakeup", "startcolumns", "startcolumnset", "startcombinare", "startcombination", "startcomment", "startcomponent", "startcomponenta", "startcorectielinie", "startculoare", "startdescription", "startdocument", "startenumeration", "startenvironment", "startfact", "startfigura", "startfigure", "startfloattext", "startformula", "startframedtext", "startfundal", "startglobal", "starthiding", "startimpachetat", "startingust", "startinteractionmenu", "startitemgroup", "startlegend", "startline", "startlinecorrection", "startlinenumbering", "startlines", "startlinie", "startliniemargine", "startlinii", "startlocal", "startlocalenvironment", "startlocalfootnotes", "startmakeup", "startmarginblock", "startmarginrule", "startmediu", "startnamemakeup", "startnarrower", "startneimpachetat", "startnotesubsollocale", "startnumerotarelinii", "startopozit", "startopposite", "startoverlay", "startoverview", "startpacked", "startparagraph", "startpositioning", "startpostponing", "startpozitionare", "startproduct", "startprodus", "startprofil", "startprofile", "startproiect", "startproject", "startquotation", "startraster", "startregister", "startriglatext", "startsetsimboluri", "startsincronizare", "startsymbolset", "startsynchronization", "starttabel", "starttabele", "starttable", "starttables", "starttabulate", "starttext", "starttextrule", "starttyping", "startunpacked", "startversion", "startversiune", "stivacampuri", "stopalignment", "stopaliniere", "stopbackground", "stopblobal", "stopblocmarginal", "stopbuffer", "stopcitat", "stopcodificare", "stopcoloane", "stopcolor", "stopcolumnmakeup", "stopcolumns", "stopcolumnset", "stopcombinare", "stopcombination", "stopcomment", "stopcomponent", "stopcomponenta", "stopcorectielinie", "stopculoare", "stopdescription", "stopdocument", "stopenumeration", "stopenvironment", "stopfact", "stopfigure", "stopfloattext", "stopformula", "stopframedtext", "stopfundal", "stophiding", "stopimpachetat", "stopingust", "stopinteractionmenu", "stopitemgroup", "stoplegend", "stopline", "stoplinecorrection", "stoplinenumbering", "stoplines", "stoplinie", "stopliniemargine", "stoplinii", "stoplocal", "stoplocalenvironment", "stoplocalfootnotes", "stopmakeup", "stopmarginblock", "stopmarginrule", "stopmediu", "stopnamemakeup", "stopnarrower", "stopneimpachetat", "stopnotesubsollocale", "stopnumerotarelinii", "stopopozit", "stopopposite", "stopoverlay", "stopoverview", "stoppacked", "stopparagraph", "stoppositioning", "stoppostponing", "stoppozitionare", "stopproduct", "stopprodus", "stopprofil", "stopprofile", "stopproiect", "stopproject", "stopquotation", "stopraster", "stopriglatext", "stopsincronizare", "stopsymbolset", "stopsynchronization", "stoptabel", "stoptabele", "stoptable", "stoptables", "stoptabulate", "stoptext", "stoptextrule", "stoptyping", "stopunpacked", "stopversion", "stopversiune", "stretched", "sub", "subject", "subpagenumber", "subsection", "subsubject", "subsubsection", "subsubsubject", "switchtobodyfont", "sym", "symbol", "synchronizationbar", "synchronize", "synonym", "tab", "testcolumn", "testpage", "tex", "texteticheta", "textmarginal", "textreference", "textrule", "texttitlu", "textumplere", "thinrule", "thinrules", "title", "titlu", "titlumarginal", "tooltip", "traduce", "translate", "trecilafontraw", "trecilafonttext", "txt", "typ", "type", "typebuffer", "typefile", "underbar", "underbars", "undeva", "urmeazaprofil", "urmeazaversiune", "urmeazaversiuneprofil", "useURL", "useXMLfilter", "useblocks", "usecommands", "usedirectory", "useencoding", "useexternaldocument", "useexternalfigure", "useexternalfile", "useexternalfiles", "useexternalsoundtrack", "usemodule", "usereferences", "usespecials", "usesymbols", "usetypescript", "usetypescriptfile", "valoareculoare", "valoaregri", "variabilatext", "version", "versiune", "vl", "weekday", "whitespace", "wordright", "writebetweenlist", "writetolist", "writetoreferencelist", "writetoregister", "zidinsaptamana" },
+} \ No newline at end of file
diff --git a/context/data/scite/context/lexers/data/scite-context-data-metafun.lua b/context/data/scite/context/lexers/data/scite-context-data-metafun.lua
new file mode 100644
index 000000000..547b8c145
--- /dev/null
+++ b/context/data/scite/context/lexers/data/scite-context-data-metafun.lua
@@ -0,0 +1,4 @@
+return {
+ ["commands"]={ "transparency", "sqr", "log", "ln", "exp", "inv", "pow", "pi", "radian", "tand", "cotd", "sin", "cos", "tan", "cot", "atan", "asin", "acos", "invsin", "invcos", "invtan", "acosh", "asinh", "sinh", "cosh", "zmod", "paired", "tripled", "unitcircle", "fulldiamond", "unitdiamond", "fullsquare", "llcircle", "lrcircle", "urcircle", "ulcircle", "tcircle", "bcircle", "lcircle", "rcircle", "lltriangle", "lrtriangle", "urtriangle", "ultriangle", "uptriangle", "downtriangle", "lefttriangle", "righttriangle", "triangle", "smoothed", "cornered", "superellipsed", "randomized", "squeezed", "enlonged", "shortened", "punked", "curved", "unspiked", "simplified", "blownup", "stretched", "enlarged", "leftenlarged", "topenlarged", "rightenlarged", "bottomenlarged", "crossed", "laddered", "randomshifted", "interpolated", "paralleled", "cutends", "peepholed", "llenlarged", "lrenlarged", "urenlarged", "ulenlarged", "llmoved", "lrmoved", "urmoved", "ulmoved", "rightarrow", "leftarrow", "centerarrow", "boundingbox", "innerboundingbox", "outerboundingbox", "pushboundingbox", "popboundingbox", "bottomboundary", "leftboundary", "topboundary", "rightboundary", "xsized", "ysized", "xysized", "sized", "xyscaled", "intersection_point", "intersection_found", "penpoint", "bbwidth", "bbheight", "withshade", "withcircularshade", "withlinearshade", "defineshade", "shaded", "shadedinto", "withshadecolors", "withshadedomain", "withshademethod", "withshadefactor", "withshadevector", "withshadecenter", "cmyk", "spotcolor", "multitonecolor", "namedcolor", "drawfill", "undrawfill", "inverted", "uncolored", "softened", "grayed", "greyed", "onlayer", "along", "graphictext", "loadfigure", "externalfigure", "figure", "register", "withmask", "bitmapimage", "colordecimals", "ddecimal", "dddecimal", "ddddecimal", "textext", "thetextext", "rawtextext", "textextoffset", "verbatim", "thelabel", "label", "autoalign", "transparent", "withtransparency", "property", "properties", "withproperties", "asgroup", "infont", "space", "crlf", "dquote", "percent", "SPACE", "CRLF", "DQUOTE", "PERCENT", "grayscale", "greyscale", "withgray", "withgrey", "colorpart", "readfile", "clearxy", "unitvector", "center", "epsed", "anchored", "originpath", "infinite", "break", "xstretched", "ystretched", "snapped", "pathconnectors", "function", "constructedfunction", "constructedpath", "constructedpairs", "straightfunction", "straightpath", "straightpairs", "curvedfunction", "curvedpath", "curvedpairs", "evenly", "oddly", "condition", "pushcurrentpicture", "popcurrentpicture", "arrowpath", "tensecircle", "roundedsquare", "colortype", "whitecolor", "blackcolor", "basiccolors", "normalfill", "normaldraw", "visualizepaths", "naturalizepaths", "drawboundary", "drawwholepath", "visualizeddraw", "visualizedfill", "draworigin", "drawboundingbox", "drawpath", "drawpoint", "drawpoints", "drawcontrolpoints", "drawcontrollines", "drawpointlabels", "drawlineoptions", "drawpointoptions", "drawcontroloptions", "drawlabeloptions", "draworiginoptions", "drawboundoptions", "drawpathoptions", "resetdrawoptions", "undashed", "decorated", "redecorated", "undecorated", "passvariable", "passarrayvariable", "tostring", "format", "formatted", "startpassingvariable", "stoppassingvariable", "eofill", "eoclip", "area" },
+ ["internals"]={ "nocolormodel", "greycolormodel", "graycolormodel", "rgbcolormodel", "cmykcolormodel", "shadefactor", "textextoffset", "normaltransparent", "multiplytransparent", "screentransparent", "overlaytransparent", "softlighttransparent", "hardlighttransparent", "colordodgetransparent", "colorburntransparent", "darkentransparent", "lightentransparent", "differencetransparent", "exclusiontransparent", "huetransparent", "saturationtransparent", "colortransparent", "luminositytransparent", "metapostversion", "maxdimensions" },
+} \ No newline at end of file
diff --git a/context/data/scite/context/lexers/data/scite-context-data-metapost.lua b/context/data/scite/context/lexers/data/scite-context-data-metapost.lua
new file mode 100644
index 000000000..f3ace2b6a
--- /dev/null
+++ b/context/data/scite/context/lexers/data/scite-context-data-metapost.lua
@@ -0,0 +1,9 @@
+return {
+ ["commands"]={ "upto", "downto", "beginfig", "endfig", "beginglyph", "endglyph", "rotatedaround", "reflectedabout", "arrowhead", "currentpen", "currentpicture", "cuttings", "defaultfont", "extra_beginfig", "extra_endfig", "ditto", "EOF", "down", "evenly", "fullcircle", "halfcircle", "identity", "in", "left", "pensquare", "penrazor", "penspec", "origin", "quartercircle", "right", "unitsquare", "up", "withdots", "abs", "bbox", "ceiling", "center", "cutafter", "cutbefore", "dir", "directionpoint", "div", "dotprod", "intersectionpoint", "inverse", "mod", "round", "unitvector", "whatever", "cutdraw", "draw", "drawarrow", "drawdblarrow", "fill", "filldraw", "drawdot", "loggingall", "interact", "tracingall", "tracingnone", "pickup", "undraw", "unfill", "unfilldraw", "buildcycle", "dashpattern", "decr", "dotlabel", "dotlabels", "drawoptions", "incr", "label", "labels", "max", "min", "thelabel", "z", "beginchar", "blacker", "capsule_end", "change_width", "define_blacker_pixels", "define_corrected_pixels", "define_good_x_pixels", "define_good_y_pixels", "define_horizontal_corrected_pixels", "define_pixels", "define_whole_blacker_pixels", "define_whole_pixels", "define_whole_vertical_blacker_pixels", "define_whole_vertical_pixels", "endchar", "extra_beginchar", "extra_endchar", "extra_setup", "font_coding_scheme", "clearxy", "clearit", "clearpen", "shipit", "font_extra_space", "exitunless", "relax", "hide", "gobble", "gobbled", "stop", "blankpicture", "counterclockwise", "tensepath", "takepower", "direction", "softjoin", "makelabel", "rotatedabout", "flex", "superellipse", "image", "nullpen", "savepen", "clearpen", "penpos", "penlabels", "range", "thru", "z", "laboff", "bye", "red", "green", "blue", "cyan", "magenta", "yellow", "black", "white", "background", "mm", "pt", "dd", "bp", "cm", "pc", "cc", "in", "triplet", "quadruplet" },
+ ["disabled"]={ "verbatimtex", "troffmode" },
+ ["internals"]={ "mitered", "rounded", "beveled", "butt", "squared", "eps", "epsilon", "infinity", "bboxmargin", "ahlength", "ahangle", "labeloffset", "dotlabeldiam", "defaultpen", "defaultscale", "join_radius", "charscale", "pen_lft", "pen_rt", "pen_top", "pen_bot" },
+ ["metafont"]={ "autorounding", "beginchar", "blacker", "boundarychar", "capsule_def", "capsule_end", "change_width", "chardp", "chardx", "chardy", "charexists", "charext", "charht", "charic", "charlist", "charwd", "cull", "cullit", "currenttransform", "currentwindow", "define_blacker_pixels", "define_corrected_pixels", "define_good_x_pixels", "define_good_y_pixels", "define_horizontal_corrected_pixels", "define_pixels", "define_whole_blacker_pixels", "define_whole_pixels", "define_whole_vertical_blacker_pixels", "define_whole_vertical_pixels", "designsize", "display", "displaying", "endchar", "extensible", "extra_beginchar", "extra_endchar", "extra_setup", "fillin", "font_coding_scheme", "font_extra_space", "font_identifier", "font_normal_shrink", "font_normal_space", "font_normal_stretch", "font_quad", "font_size", "font_slant", "font_x_height", "fontdimen", "fontmaking", "gfcorners", "granularity", "grayfont", "headerbyte", "hppp", "hround", "imagerules", "italcorr", "kern", "labelfont", "ligtable", "lowres_fix", "makebox", "makegrid", "maketicks", "mode_def", "mode_setup", "nodisplays", "notransforms", "numspecial", "o_correction", "openit", "openwindow", "pixels_per_inch", "proofing", "proofoffset", "proofrule", "proofrulethickness", "rulepen", "screenchars", "screenrule", "screenstrokes", "screen_cols", "screen_rows", "showit", "slantfont", "smode", "smoothing", "titlefont", "totalweight", "tracingedges", "tracingpens", "turningcheck", "unitpixel", "vppp", "vround", "xoffset", "yoffset" },
+ ["primitives"]={ "charcode", "day", "linecap", "linejoin", "miterlimit", "month", "pausing", "prologues", "showstopping", "time", "tracingcapsules", "tracingchoices", "mpprocset", "tracingcommands", "tracingequations", "tracinglostchars", "tracingmacros", "tracingonline", "tracingoutput", "tracingrestores", "tracingspecs", "tracingstats", "tracingtitles", "truecorners", "warningcheck", "year", "false", "nullpicture", "pencircle", "true", "and", "angle", "arclength", "arctime", "ASCII", "boolean", "bot", "char", "color", "cosd", "cycle", "decimal", "directiontime", "floor", "fontsize", "hex", "infont", "intersectiontimes", "known", "length", "llcorner", "lrcorner", "makepath", "makepen", "mexp", "mlog", "normaldeviate", "not", "numeric", "oct", "odd", "or", "path", "pair", "pen", "penoffset", "picture", "point", "postcontrol", "precontrol", "reverse", "rotated", "scaled", "shifted", "sind", "slanted", "sqrt", "str", "string", "subpath", "substring", "transform", "transformed", "ulcorner", "uniformdeviate", "unknown", "urcorner", "xpart", "xscaled", "xxpart", "xypart", "ypart", "yscaled", "yxpart", "yypart", "zscaled", "addto", "clip", "input", "interim", "let", "newinternal", "save", "setbounds", "shipout", "show", "showdependencies", "showtoken", "showvariable", "special", "begingroup", "endgroup", "of", "curl", "tension", "and", "controls", "interpath", "on", "off", "def", "vardef", "enddef", "expr", "suffix", "text", "primary", "secondary", "tertiary", "primarydef", "secondarydef", "tertiarydef", "randomseed", "also", "contour", "doublepath", "withcolor", "withcmykcolor", "withpen", "dashed", "if", "else", "elseif", "fi", "for", "endfor", "forever", "exitif", "within", "forsuffixes", "step", "until", "charlist", "extensible", "fontdimen", "headerbyte", "kern", "ligtable", "boundarychar", "chardp", "charext", "charht", "charic", "charwd", "designsize", "fontmaking", "charexists", "cullit", "currenttransform", "gfcorners", "grayfont", "hround", "imagerules", "lowres_fix", "nodisplays", "notransforms", "openit", "displaying", "currentwindow", "screen_rows", "screen_cols", "pixels_per_inch", "cull", "display", "openwindow", "numspecial", "totalweight", "autorounding", "fillin", "proofing", "tracingpens", "xoffset", "chardx", "granularity", "smoothing", "turningcheck", "yoffset", "chardy", "hppp", "tracingedges", "vppp", "extra_beginfig", "extra_endfig", "mpxbreak", "endinput", "message", "delimiters", "turningnumber", "errmessage", "readstring", "scantokens", "end", "outer", "inner", "write", "to", "readfrom", "withprescript", "withpostscript", "top", "bot", "lft", "rt", "ulft", "urt", "llft", "lrt", "redpart", "greenpart", "bluepart", "cyanpart", "magentapart", "yellowpart", "blackpart", "prescriptpart", "postscriptpart", "rgbcolor", "cmykcolor", "colormodel", "graypart", "greypart", "greycolor", "graycolor", "dashpart", "penpart", "stroked", "filled", "textual", "clipped", "bounded", "pathpart", "expandafter", "minute", "hour", "outputformat", "outputtemplate", "filenametemplate", "fontmapfile", "fontmapline", "fontpart", "fontsize", "glyph", "restoreclipcolor", "troffmode", "runscript", "maketext" },
+ ["shortcuts"]={ "..", "...", "--", "---", "&", "\\" },
+ ["tex"]={ "btex", "etex", "verbatimtex" },
+} \ No newline at end of file
diff --git a/context/data/scite/lexers/data/scite-context-data-tex.lua b/context/data/scite/context/lexers/data/scite-context-data-tex.lua
index 7d710740c..90b64c512 100644
--- a/context/data/scite/lexers/data/scite-context-data-tex.lua
+++ b/context/data/scite/context/lexers/data/scite-context-data-tex.lua
@@ -1,9 +1,9 @@
return {
["aleph"]={ "AlephVersion", "Alephminorversion", "Alephrevision", "Alephversion", "Omegaminorversion", "Omegarevision", "Omegaversion", "boxdir", "pagebottomoffset", "pagerightoffset" },
["etex"]={ "botmarks", "clubpenalties", "currentgrouplevel", "currentgrouptype", "currentifbranch", "currentiflevel", "currentiftype", "detokenize", "dimexpr", "displaywidowpenalties", "eTeXVersion", "eTeXminorversion", "eTeXrevision", "eTeXversion", "everyeof", "firstmarks", "fontchardp", "fontcharht", "fontcharic", "fontcharwd", "glueexpr", "glueshrink", "glueshrinkorder", "gluestretch", "gluestretchorder", "gluetomu", "ifcsname", "ifdefined", "iffontchar", "interactionmode", "interlinepenalties", "lastlinefit", "lastnodetype", "marks", "muexpr", "mutoglue", "numexpr", "pagediscards", "parshapedimen", "parshapeindent", "parshapelength", "predisplaydirection", "protected", "readline", "savinghyphcodes", "savingvdiscards", "scantokens", "showgroups", "showifs", "showtokens", "splitbotmarks", "splitdiscards", "splitfirstmarks", "topmarks", "tracingassigns", "tracinggroups", "tracingifs", "tracingnesting", "tracingscantokens", "unexpanded", "unless", "widowpenalties" },
- ["luatex"]={ "Uchar", "Udelcode", "Udelcodenum", "Udelimiter", "Udelimiterover", "Udelimiterunder", "Umathaccent", "Umathaxis", "Umathbinbinspacing", "Umathbinclosespacing", "Umathbininnerspacing", "Umathbinopenspacing", "Umathbinopspacing", "Umathbinordspacing", "Umathbinpunctspacing", "Umathbinrelspacing", "Umathchar", "Umathchardef", "Umathcharnum", "Umathclosebinspacing", "Umathcloseclosespacing", "Umathcloseinnerspacing", "Umathcloseopenspacing", "Umathcloseopspacing", "Umathcloseordspacing", "Umathclosepunctspacing", "Umathcloserelspacing", "Umathcode", "Umathcodenum", "Umathconnectoroverlapmin", "Umathfractiondelsize", "Umathfractiondenomdown", "Umathfractiondenomvgap", "Umathfractionnumup", "Umathfractionnumvgap", "Umathfractionrule", "Umathinnerbinspacing", "Umathinnerclosespacing", "Umathinnerinnerspacing", "Umathinneropenspacing", "Umathinneropspacing", "Umathinnerordspacing", "Umathinnerpunctspacing", "Umathinnerrelspacing", "Umathlimitabovebgap", "Umathlimitabovekern", "Umathlimitabovevgap", "Umathlimitbelowbgap", "Umathlimitbelowkern", "Umathlimitbelowvgap", "Umathopbinspacing", "Umathopclosespacing", "Umathopenbinspacing", "Umathopenclosespacing", "Umathopeninnerspacing", "Umathopenopenspacing", "Umathopenopspacing", "Umathopenordspacing", "Umathopenpunctspacing", "Umathopenrelspacing", "Umathoperatorsize", "Umathopinnerspacing", "Umathopopenspacing", "Umathopopspacing", "Umathopordspacing", "Umathoppunctspacing", "Umathoprelspacing", "Umathordbinspacing", "Umathordclosespacing", "Umathordinnerspacing", "Umathordopenspacing", "Umathordopspacing", "Umathordordspacing", "Umathordpunctspacing", "Umathordrelspacing", "Umathoverbarkern", "Umathoverbarrule", "Umathoverbarvgap", "Umathoverdelimiterbgap", "Umathoverdelimitervgap", "Umathpunctbinspacing", "Umathpunctclosespacing", "Umathpunctinnerspacing", "Umathpunctopenspacing", "Umathpunctopspacing", "Umathpunctordspacing", "Umathpunctpunctspacing", "Umathpunctrelspacing", "Umathquad", "Umathradicaldegreeafter", "Umathradicaldegreebefore", "Umathradicaldegreeraise", "Umathradicalkern", "Umathradicalrule", "Umathradicalvgap", "Umathrelbinspacing", "Umathrelclosespacing", "Umathrelinnerspacing", "Umathrelopenspacing", "Umathrelopspacing", "Umathrelordspacing", "Umathrelpunctspacing", "Umathrelrelspacing", "Umathspaceafterscript", "Umathstackdenomdown", "Umathstacknumup", "Umathstackvgap", "Umathsubshiftdown", "Umathsubshiftdrop", "Umathsubsupshiftdown", "Umathsubsupvgap", "Umathsubtopmax", "Umathsupbottommin", "Umathsupshiftdrop", "Umathsupshiftup", "Umathsupsubbottommax", "Umathunderbarkern", "Umathunderbarrule", "Umathunderbarvgap", "Umathunderdelimiterbgap", "Umathunderdelimitervgap", "Uoverdelimiter", "Uradical", "Uroot", "Ustack", "Ustartdisplaymath", "Ustartmath", "Ustopdisplaymath", "Ustopmath", "Usubscript", "Usuperscript", "Uunderdelimiter", "alignmark", "aligntab", "attribute", "attributedef", "catcodetable", "clearmarks", "crampeddisplaystyle", "crampedscriptscriptstyle", "crampedscriptstyle", "crampedtextstyle", "fontid", "formatname", "gleaders", "ifabsdim", "ifabsnum", "ifprimitive", "initcatcodetable", "latelua", "luaescapestring", "luastartup", "luatexdatestamp", "luatexrevision", "luatexversion", "mathstyle", "nokerns", "noligs", "outputbox", "pageleftoffset", "pagetopoffset", "postexhyphenchar", "posthyphenchar", "preexhyphenchar", "prehyphenchar", "primitive", "savecatcodetable", "scantextokens", "suppressfontnotfounderror", "suppressifcsnameerror", "suppresslongerror", "suppressoutererror", "synctex" },
+ ["luatex"]={ "Uchar", "Udelcode", "Udelcodenum", "Udelimiter", "Udelimiterover", "Udelimiterunder", "Umathaccent", "Umathaxis", "Umathbinbinspacing", "Umathbinclosespacing", "Umathbininnerspacing", "Umathbinopenspacing", "Umathbinopspacing", "Umathbinordspacing", "Umathbinpunctspacing", "Umathbinrelspacing", "Umathchar", "Umathchardef", "Umathcharnum", "Umathclosebinspacing", "Umathcloseclosespacing", "Umathcloseinnerspacing", "Umathcloseopenspacing", "Umathcloseopspacing", "Umathcloseordspacing", "Umathclosepunctspacing", "Umathcloserelspacing", "Umathcode", "Umathcodenum", "Umathconnectoroverlapmin", "Umathfractiondelsize", "Umathfractiondenomdown", "Umathfractiondenomvgap", "Umathfractionnumup", "Umathfractionnumvgap", "Umathfractionrule", "Umathinnerbinspacing", "Umathinnerclosespacing", "Umathinnerinnerspacing", "Umathinneropenspacing", "Umathinneropspacing", "Umathinnerordspacing", "Umathinnerpunctspacing", "Umathinnerrelspacing", "Umathlimitabovebgap", "Umathlimitabovekern", "Umathlimitabovevgap", "Umathlimitbelowbgap", "Umathlimitbelowkern", "Umathlimitbelowvgap", "Umathopbinspacing", "Umathopclosespacing", "Umathopenbinspacing", "Umathopenclosespacing", "Umathopeninnerspacing", "Umathopenopenspacing", "Umathopenopspacing", "Umathopenordspacing", "Umathopenpunctspacing", "Umathopenrelspacing", "Umathoperatorsize", "Umathopinnerspacing", "Umathopopenspacing", "Umathopopspacing", "Umathopordspacing", "Umathoppunctspacing", "Umathoprelspacing", "Umathordbinspacing", "Umathordclosespacing", "Umathordinnerspacing", "Umathordopenspacing", "Umathordopspacing", "Umathordordspacing", "Umathordpunctspacing", "Umathordrelspacing", "Umathoverbarkern", "Umathoverbarrule", "Umathoverbarvgap", "Umathoverdelimiterbgap", "Umathoverdelimitervgap", "Umathpunctbinspacing", "Umathpunctclosespacing", "Umathpunctinnerspacing", "Umathpunctopenspacing", "Umathpunctopspacing", "Umathpunctordspacing", "Umathpunctpunctspacing", "Umathpunctrelspacing", "Umathquad", "Umathradicaldegreeafter", "Umathradicaldegreebefore", "Umathradicaldegreeraise", "Umathradicalkern", "Umathradicalrule", "Umathradicalvgap", "Umathrelbinspacing", "Umathrelclosespacing", "Umathrelinnerspacing", "Umathrelopenspacing", "Umathrelopspacing", "Umathrelordspacing", "Umathrelpunctspacing", "Umathrelrelspacing", "Umathspaceafterscript", "Umathstackdenomdown", "Umathstacknumup", "Umathstackvgap", "Umathsubshiftdown", "Umathsubshiftdrop", "Umathsubsupshiftdown", "Umathsubsupvgap", "Umathsubtopmax", "Umathsupbottommin", "Umathsupshiftdrop", "Umathsupshiftup", "Umathsupsubbottommax", "Umathunderbarkern", "Umathunderbarrule", "Umathunderbarvgap", "Umathunderdelimiterbgap", "Umathunderdelimitervgap", "Uoverdelimiter", "Uradical", "Uroot", "Ustack", "Ustartdisplaymath", "Ustartmath", "Ustopdisplaymath", "Ustopmath", "Usubscript", "Usuperscript", "Uunderdelimiter", "alignmark", "aligntab", "attribute", "attributedef", "catcodetable", "clearmarks", "crampeddisplaystyle", "crampedscriptscriptstyle", "crampedscriptstyle", "crampedtextstyle", "fontid", "formatname", "gleaders", "ifabsdim", "ifabsnum", "ifprimitive", "initcatcodetable", "latelua", "luaescapestring", "luastartup", "luatexbanner", "luatexrevision", "luatexversion", "luafunction", "mathstyle", "nokerns", "noligs", "outputbox", "pageleftoffset", "pagetopoffset", "postexhyphenchar", "posthyphenchar", "preexhyphenchar", "prehyphenchar", "primitive", "savecatcodetable", "scantextokens", "suppressfontnotfounderror", "suppressifcsnameerror", "suppresslongerror", "suppressoutererror", "suppressmathparerror", "synctex" },
["omega"]={ "OmegaVersion", "bodydir", "chardp", "charht", "charit", "charwd", "leftghost", "localbrokenpenalty", "localinterlinepenalty", "localleftbox", "localrightbox", "mathdir", "odelcode", "odelimiter", "omathaccent", "omathchar", "omathchardef", "omathcode", "oradical", "pagedir", "pageheight", "pagewidth", "pardir", "rightghost", "textdir" },
["pdftex"]={ "efcode", "expanded", "ifincsname", "ifpdfabsdim", "ifpdfabsnum", "ifpdfprimitive", "leftmarginkern", "letterspacefont", "lpcode", "pdfadjustspacing", "pdfannot", "pdfcatalog", "pdfcolorstack", "pdfcolorstackinit", "pdfcompresslevel", "pdfcopyfont", "pdfcreationdate", "pdfdecimaldigits", "pdfdest", "pdfdestmargin", "pdfdraftmode", "pdfeachlinedepth", "pdfeachlineheight", "pdfendlink", "pdfendthread", "pdffirstlineheight", "pdffontattr", "pdffontexpand", "pdffontname", "pdffontobjnum", "pdffontsize", "pdfgamma", "pdfgentounicode", "pdfglyphtounicode", "pdfhorigin", "pdfignoreddimen", "pdfimageapplygamma", "pdfimagegamma", "pdfimagehicolor", "pdfimageresolution", "pdfincludechars", "pdfinclusioncopyfonts", "pdfinclusionerrorlevel", "pdfinfo", "pdfinsertht", "pdflastannot", "pdflastlinedepth", "pdflastlink", "pdflastobj", "pdflastxform", "pdflastximage", "pdflastximagecolordepth", "pdflastximagepages", "pdflastxpos", "pdflastypos", "pdflinkmargin", "pdfliteral", "pdfmapfile", "pdfmapline", "pdfminorversion", "pdfnames", "pdfnoligatures", "pdfnormaldeviate", "pdfobj", "pdfobjcompresslevel", "pdfoptionpdfminorversion", "pdfoutline", "pdfoutput", "pdfpageattr", "pdfpagebox", "pdfpageheight", "pdfpageref", "pdfpageresources", "pdfpagesattr", "pdfpagewidth", "pdfpkmode", "pdfpkresolution", "pdfprimitive", "pdfprotrudechars", "pdfpxdimen", "pdfrandomseed", "pdfrefobj", "pdfrefxform", "pdfrefximage", "pdfreplacefont", "pdfrestore", "pdfretval", "pdfsave", "pdfsavepos", "pdfsetmatrix", "pdfsetrandomseed", "pdfstartlink", "pdfstartthread", "pdftexbanner", "pdftexrevision", "pdftexversion", "pdfthread", "pdfthreadmargin", "pdftracingfonts", "pdftrailer", "pdfuniformdeviate", "pdfuniqueresname", "pdfvorigin", "pdfxform", "pdfxformattr", "pdfxformname", "pdfxformresources", "pdfximage", "pdfximagebbox", "quitvmode", "rightmarginkern", "rpcode", "tagcode" },
- ["tex"]={ "-", "/", "AlephVersion", "Alephminorversion", "Alephrevision", "Alephversion", "OmegaVersion", "Omegaminorversion", "Omegarevision", "Omegaversion", "Udelcode", "Udelcodenum", "Udelimiter", "Udelimiterover", "Udelimiterunder", "Umathaccent", "Umathaxis", "Umathbinbinspacing", "Umathbinclosespacing", "Umathbininnerspacing", "Umathbinopenspacing", "Umathbinopspacing", "Umathbinordspacing", "Umathbinpunctspacing", "Umathbinrelspacing", "Umathchar", "Umathchardef", "Umathcharnum", "Umathclosebinspacing", "Umathcloseclosespacing", "Umathcloseinnerspacing", "Umathcloseopenspacing", "Umathcloseopspacing", "Umathcloseordspacing", "Umathclosepunctspacing", "Umathcloserelspacing", "Umathcode", "Umathcodenum", "Umathconnectoroverlapmin", "Umathfractiondelsize", "Umathfractiondenomdown", "Umathfractiondenomvgap", "Umathfractionnumup", "Umathfractionnumvgap", "Umathfractionrule", "Umathinnerbinspacing", "Umathinnerclosespacing", "Umathinnerinnerspacing", "Umathinneropenspacing", "Umathinneropspacing", "Umathinnerordspacing", "Umathinnerpunctspacing", "Umathinnerrelspacing", "Umathlimitabovebgap", "Umathlimitabovekern", "Umathlimitabovevgap", "Umathlimitbelowbgap", "Umathlimitbelowkern", "Umathlimitbelowvgap", "Umathopbinspacing", "Umathopclosespacing", "Umathopenbinspacing", "Umathopenclosespacing", "Umathopeninnerspacing", "Umathopenopenspacing", "Umathopenopspacing", "Umathopenordspacing", "Umathopenpunctspacing", "Umathopenrelspacing", "Umathoperatorsize", "Umathopinnerspacing", "Umathopopenspacing", "Umathopopspacing", "Umathopordspacing", "Umathoppunctspacing", "Umathoprelspacing", "Umathordbinspacing", "Umathordclosespacing", "Umathordinnerspacing", "Umathordopenspacing", "Umathordopspacing", "Umathordordspacing", "Umathordpunctspacing", "Umathordrelspacing", "Umathoverbarkern", "Umathoverbarrule", "Umathoverbarvgap", "Umathoverdelimiterbgap", "Umathoverdelimitervgap", "Umathpunctbinspacing", "Umathpunctclosespacing", "Umathpunctinnerspacing", "Umathpunctopenspacing", "Umathpunctopspacing", "Umathpunctordspacing", "Umathpunctpunctspacing", "Umathpunctrelspacing", "Umathquad", "Umathradicaldegreeafter", "Umathradicaldegreebefore", "Umathradicaldegreeraise", "Umathradicalkern", "Umathradicalrule", "Umathradicalvgap", "Umathrelbinspacing", "Umathrelclosespacing", "Umathrelinnerspacing", "Umathrelopenspacing", "Umathrelopspacing", "Umathrelordspacing", "Umathrelpunctspacing", "Umathrelrelspacing", "Umathspaceafterscript", "Umathstackdenomdown", "Umathstacknumup", "Umathstackvgap", "Umathsubshiftdown", "Umathsubshiftdrop", "Umathsubsupshiftdown", "Umathsubsupvgap", "Umathsubtopmax", "Umathsupbottommin", "Umathsupshiftdrop", "Umathsupshiftup", "Umathsupsubbottommax", "Umathunderbarkern", "Umathunderbarrule", "Umathunderbarvgap", "Umathunderdelimiterbgap", "Umathunderdelimitervgap", "Uoverdelimiter", "Uradical", "Uroot", "Ustack", "Ustartdisplaymath", "Ustartmath", "Ustopdisplaymath", "Ustopmath", "Usubscript", "Usuperscript", "Uunderdelimiter", "above", "abovedisplayshortskip", "abovedisplayskip", "abovewithdelims", "accent", "adjdemerits", "advance", "afterassignment", "aftergroup", "alignmark", "aligntab", "atop", "atopwithdelims", "attribute", "attributedef", "badness", "baselineskip", "batchmode", "begingroup", "belowdisplayshortskip", "belowdisplayskip", "binoppenalty", "bodydir", "botmark", "botmarks", "box", "boxdir", "boxmaxdepth", "brokenpenalty", "catcode", "catcodetable", "char", "chardef", "chardp", "charht", "charit", "charwd", "cleaders", "clearmarks", "closein", "closeout", "clubpenalties", "clubpenalty", "copy", "count", "countdef", "cr", "crampeddisplaystyle", "crampedscriptscriptstyle", "crampedscriptstyle", "crampedtextstyle", "crcr", "csname", "currentgrouplevel", "currentgrouptype", "currentifbranch", "currentiflevel", "currentiftype", "day", "deadcycles", "def", "defaulthyphenchar", "defaultskewchar", "delcode", "delimiter", "delimiterfactor", "delimitershortfall", "detokenize", "dimen", "dimendef", "dimexpr", "directlua", "discretionary", "displayindent", "displaylimits", "displaystyle", "displaywidowpenalties", "displaywidowpenalty", "displaywidth", "divide", "doublehyphendemerits", "dp", "dump", "eTeXVersion", "eTeXminorversion", "eTeXrevision", "eTeXversion", "edef", "efcode", "else", "emergencystretch", "end", "endcsname", "endgroup", "endinput", "endlinechar", "eqno", "errhelp", "errmessage", "errorcontextlines", "errorstopmode", "escapechar", "everycr", "everydisplay", "everyeof", "everyhbox", "everyjob", "everymath", "everypar", "everyvbox", "exhyphenchar", "exhyphenpenalty", "expandafter", "expanded", "fam", "fi", "finalhyphendemerits", "firstmark", "firstmarks", "floatingpenalty", "font", "fontchardp", "fontcharht", "fontcharic", "fontcharwd", "fontdimen", "fontid", "fontname", "formatname", "futurelet", "gdef", "gleaders", "global", "globaldefs", "glueexpr", "glueshrink", "glueshrinkorder", "gluestretch", "gluestretchorder", "gluetomu", "halign", "hangafter", "hangindent", "hbadness", "hbox", "hfil", "hfill", "hfilneg", "hfuzz", "hoffset", "holdinginserts", "hrule", "hsize", "hskip", "hss", "ht", "hyphenation", "hyphenchar", "hyphenpenalty", "if", "ifabsdim", "ifabsnum", "ifcase", "ifcat", "ifcsname", "ifdefined", "ifdim", "ifeof", "iffalse", "iffontchar", "ifhbox", "ifhmode", "ifincsname", "ifinner", "ifmmode", "ifnum", "ifodd", "ifpdfabsdim", "ifpdfabsnum", "ifpdfprimitive", "ifprimitive", "iftrue", "ifvbox", "ifvmode", "ifvoid", "ifx", "ignorespaces", "immediate", "indent", "initcatcodetable", "input", "inputlineno", "insert", "insertpenalties", "interactionmode", "interlinepenalties", "interlinepenalty", "jobname", "kern", "language", "lastbox", "lastkern", "lastlinefit", "lastnodetype", "lastpenalty", "lastskip", "latelua", "lccode", "leaders", "left", "leftghost", "lefthyphenmin", "leftmarginkern", "leftskip", "leqno", "let", "letterspacefont", "limits", "linepenalty", "lineskip", "lineskiplimit", "localbrokenpenalty", "localinterlinepenalty", "localleftbox", "localrightbox", "long", "looseness", "lower", "lowercase", "lpcode", "luaescapestring", "luastartup", "luatexdatestamp", "luatexrevision", "luatexversion", "mag", "mark", "marks", "mathaccent", "mathbin", "mathchar", "mathchardef", "mathchoice", "mathclose", "mathcode", "mathdir", "mathinner", "mathop", "mathopen", "mathord", "mathpunct", "mathrel", "mathstyle", "mathsurround", "maxdeadcycles", "maxdepth", "meaning", "medmuskip", "message", "middle", "mkern", "month", "moveleft", "moveright", "mskip", "muexpr", "multiply", "muskip", "muskipdef", "mutoglue", "newlinechar", "noalign", "noboundary", "noexpand", "noindent", "nokerns", "noligs", "nolimits", "nolocaldirs", "nolocalwhatsits", "nonscript", "nonstopmode", "nulldelimiterspace", "nullfont", "number", "numexpr", "odelcode", "odelimiter", "omathaccent", "omathchar", "omathchardef", "omathcode", "omit", "openin", "openout", "or", "oradical", "outer", "output", "outputbox", "outputpenalty", "over", "overfullrule", "overline", "overwithdelims", "pagebottomoffset", "pagedepth", "pagedir", "pagediscards", "pagefilllstretch", "pagefillstretch", "pagefilstretch", "pagegoal", "pageheight", "pageleftoffset", "pagerightoffset", "pageshrink", "pagestretch", "pagetopoffset", "pagetotal", "pagewidth", "par", "pardir", "parfillskip", "parindent", "parshape", "parshapedimen", "parshapeindent", "parshapelength", "parskip", "patterns", "pausing", "pdfadjustspacing", "pdfannot", "pdfcatalog", "pdfcolorstack", "pdfcolorstackinit", "pdfcompresslevel", "pdfcopyfont", "pdfcreationdate", "pdfdecimaldigits", "pdfdest", "pdfdestmargin", "pdfdraftmode", "pdfeachlinedepth", "pdfeachlineheight", "pdfendlink", "pdfendthread", "pdffirstlineheight", "pdffontattr", "pdffontexpand", "pdffontname", "pdffontobjnum", "pdffontsize", "pdfgamma", "pdfgentounicode", "pdfglyphtounicode", "pdfhorigin", "pdfignoreddimen", "pdfimageapplygamma", "pdfimagegamma", "pdfimagehicolor", "pdfimageresolution", "pdfincludechars", "pdfinclusioncopyfonts", "pdfinclusionerrorlevel", "pdfinfo", "pdfinsertht", "pdflastannot", "pdflastlinedepth", "pdflastlink", "pdflastobj", "pdflastxform", "pdflastximage", "pdflastximagecolordepth", "pdflastximagepages", "pdflastxpos", "pdflastypos", "pdflinkmargin", "pdfliteral", "pdfmapfile", "pdfmapline", "pdfminorversion", "pdfnames", "pdfnoligatures", "pdfnormaldeviate", "pdfobj", "pdfobjcompresslevel", "pdfoptionpdfminorversion", "pdfoutline", "pdfoutput", "pdfpageattr", "pdfpagebox", "pdfpageheight", "pdfpageref", "pdfpageresources", "pdfpagesattr", "pdfpagewidth", "pdfpkmode", "pdfpkresolution", "pdfprimitive", "pdfprotrudechars", "pdfpxdimen", "pdfrandomseed", "pdfrefobj", "pdfrefxform", "pdfrefximage", "pdfreplacefont", "pdfrestore", "pdfretval", "pdfsave", "pdfsavepos", "pdfsetmatrix", "pdfsetrandomseed", "pdfstartlink", "pdfstartthread", "pdftexbanner", "pdftexrevision", "pdftexversion", "pdfthread", "pdfthreadmargin", "pdftracingfonts", "pdftrailer", "pdfuniformdeviate", "pdfuniqueresname", "pdfvorigin", "pdfxform", "pdfxformattr", "pdfxformname", "pdfxformresources", "pdfximage", "pdfximagebbox", "penalty", "postdisplaypenalty", "postexhyphenchar", "posthyphenchar", "predisplaydirection", "predisplaypenalty", "predisplaysize", "preexhyphenchar", "prehyphenchar", "pretolerance", "prevdepth", "prevgraf", "primitive", "protected", "quitvmode", "radical", "raise", "read", "readline", "relax", "relpenalty", "right", "rightghost", "righthyphenmin", "rightmarginkern", "rightskip", "romannumeral", "rpcode", "savecatcodetable", "savinghyphcodes", "savingvdiscards", "scantextokens", "scantokens", "scriptfont", "scriptscriptfont", "scriptscriptstyle", "scriptspace", "scriptstyle", "scrollmode", "setbox", "setlanguage", "sfcode", "shipout", "show", "showbox", "showboxbreadth", "showboxdepth", "showgroups", "showifs", "showlists", "showthe", "showtokens", "skewchar", "skip", "skipdef", "spacefactor", "spaceskip", "span", "special", "splitbotmark", "splitbotmarks", "splitdiscards", "splitfirstmark", "splitfirstmarks", "splitmaxdepth", "splittopskip", "string", "suppressfontnotfounderror", "suppressifcsnameerror", "suppresslongerror", "suppressoutererror", "synctex", "tabskip", "tagcode", "textdir", "textfont", "textstyle", "the", "thickmuskip", "thinmuskip", "time", "toks", "toksdef", "tolerance", "topmark", "topmarks", "topskip", "tracingassigns", "tracingcommands", "tracinggroups", "tracingifs", "tracinglostchars", "tracingmacros", "tracingnesting", "tracingonline", "tracingoutput", "tracingpages", "tracingparagraphs", "tracingrestores", "tracingscantokens", "tracingstats", "uccode", "uchyph", "underline", "unexpanded", "unhbox", "unhcopy", "unkern", "unless", "unpenalty", "unskip", "unvbox", "unvcopy", "uppercase", "vadjust", "valign", "vbadness", "vbox", "vcenter", "vfil", "vfill", "vfilneg", "vfuzz", "voffset", "vrule", "vsize", "vskip", "vsplit", "vss", "vtop", "wd", "widowpenalties", "widowpenalty", "write", "xdef", "xleaders", "xspaceskip", "year" },
+ ["tex"]={ "-", "/", "AlephVersion", "Alephminorversion", "Alephrevision", "Alephversion", "OmegaVersion", "Omegaminorversion", "Omegarevision", "Omegaversion", "Udelcode", "Udelcodenum", "Udelimiter", "Udelimiterover", "Udelimiterunder", "Umathaccent", "Umathaxis", "Umathbinbinspacing", "Umathbinclosespacing", "Umathbininnerspacing", "Umathbinopenspacing", "Umathbinopspacing", "Umathbinordspacing", "Umathbinpunctspacing", "Umathbinrelspacing", "Umathchar", "Umathchardef", "Umathcharnum", "Umathclosebinspacing", "Umathcloseclosespacing", "Umathcloseinnerspacing", "Umathcloseopenspacing", "Umathcloseopspacing", "Umathcloseordspacing", "Umathclosepunctspacing", "Umathcloserelspacing", "Umathcode", "Umathcodenum", "Umathconnectoroverlapmin", "Umathfractiondelsize", "Umathfractiondenomdown", "Umathfractiondenomvgap", "Umathfractionnumup", "Umathfractionnumvgap", "Umathfractionrule", "Umathinnerbinspacing", "Umathinnerclosespacing", "Umathinnerinnerspacing", "Umathinneropenspacing", "Umathinneropspacing", "Umathinnerordspacing", "Umathinnerpunctspacing", "Umathinnerrelspacing", "Umathlimitabovebgap", "Umathlimitabovekern", "Umathlimitabovevgap", "Umathlimitbelowbgap", "Umathlimitbelowkern", "Umathlimitbelowvgap", "Umathopbinspacing", "Umathopclosespacing", "Umathopenbinspacing", "Umathopenclosespacing", "Umathopeninnerspacing", "Umathopenopenspacing", "Umathopenopspacing", "Umathopenordspacing", "Umathopenpunctspacing", "Umathopenrelspacing", "Umathoperatorsize", "Umathopinnerspacing", "Umathopopenspacing", "Umathopopspacing", "Umathopordspacing", "Umathoppunctspacing", "Umathoprelspacing", "Umathordbinspacing", "Umathordclosespacing", "Umathordinnerspacing", "Umathordopenspacing", "Umathordopspacing", "Umathordordspacing", "Umathordpunctspacing", "Umathordrelspacing", "Umathoverbarkern", "Umathoverbarrule", "Umathoverbarvgap", "Umathoverdelimiterbgap", "Umathoverdelimitervgap", "Umathpunctbinspacing", "Umathpunctclosespacing", "Umathpunctinnerspacing", "Umathpunctopenspacing", "Umathpunctopspacing", "Umathpunctordspacing", "Umathpunctpunctspacing", "Umathpunctrelspacing", "Umathquad", "Umathradicaldegreeafter", "Umathradicaldegreebefore", "Umathradicaldegreeraise", "Umathradicalkern", "Umathradicalrule", "Umathradicalvgap", "Umathrelbinspacing", "Umathrelclosespacing", "Umathrelinnerspacing", "Umathrelopenspacing", "Umathrelopspacing", "Umathrelordspacing", "Umathrelpunctspacing", "Umathrelrelspacing", "Umathspaceafterscript", "Umathstackdenomdown", "Umathstacknumup", "Umathstackvgap", "Umathsubshiftdown", "Umathsubshiftdrop", "Umathsubsupshiftdown", "Umathsubsupvgap", "Umathsubtopmax", "Umathsupbottommin", "Umathsupshiftdrop", "Umathsupshiftup", "Umathsupsubbottommax", "Umathunderbarkern", "Umathunderbarrule", "Umathunderbarvgap", "Umathunderdelimiterbgap", "Umathunderdelimitervgap", "Uoverdelimiter", "Uradical", "Uroot", "Ustack", "Ustartdisplaymath", "Ustartmath", "Ustopdisplaymath", "Ustopmath", "Usubscript", "Usuperscript", "Uunderdelimiter", "above", "abovedisplayshortskip", "abovedisplayskip", "abovewithdelims", "accent", "adjdemerits", "advance", "afterassignment", "aftergroup", "alignmark", "aligntab", "atop", "atopwithdelims", "attribute", "attributedef", "badness", "baselineskip", "batchmode", "begingroup", "belowdisplayshortskip", "belowdisplayskip", "binoppenalty", "bodydir", "botmark", "botmarks", "box", "boxdir", "boxmaxdepth", "brokenpenalty", "catcode", "catcodetable", "char", "chardef", "cleaders", "clearmarks", "closein", "closeout", "clubpenalties", "clubpenalty", "copy", "count", "countdef", "cr", "crampeddisplaystyle", "crampedscriptscriptstyle", "crampedscriptstyle", "crampedtextstyle", "crcr", "csname", "currentgrouplevel", "currentgrouptype", "currentifbranch", "currentiflevel", "currentiftype", "day", "deadcycles", "def", "defaulthyphenchar", "defaultskewchar", "delcode", "delimiter", "delimiterfactor", "delimitershortfall", "detokenize", "dimen", "dimendef", "dimexpr", "directlua", "discretionary", "displayindent", "displaylimits", "displaystyle", "displaywidowpenalties", "displaywidowpenalty", "displaywidth", "divide", "doublehyphendemerits", "dp", "dump", "eTeXVersion", "eTeXminorversion", "eTeXrevision", "eTeXversion", "edef", "efcode", "else", "emergencystretch", "end", "endcsname", "endgroup", "endinput", "endlinechar", "eqno", "errhelp", "errmessage", "errorcontextlines", "errorstopmode", "escapechar", "everycr", "everydisplay", "everyeof", "everyhbox", "everyjob", "everymath", "everypar", "everyvbox", "exhyphenchar", "exhyphenpenalty", "expandafter", "expanded", "fam", "fi", "finalhyphendemerits", "firstmark", "firstmarks", "floatingpenalty", "font", "fontchardp", "fontcharht", "fontcharic", "fontcharwd", "fontdimen", "fontid", "fontname", "formatname", "futurelet", "gdef", "gleaders", "global", "globaldefs", "glueexpr", "glueshrink", "glueshrinkorder", "gluestretch", "gluestretchorder", "gluetomu", "halign", "hangafter", "hangindent", "hbadness", "hbox", "hfil", "hfill", "hfilneg", "hfuzz", "hoffset", "holdinginserts", "hrule", "hsize", "hskip", "hss", "ht", "hyphenation", "hyphenchar", "hyphenpenalty", "if", "ifabsdim", "ifabsnum", "ifcase", "ifcat", "ifcsname", "ifdefined", "ifdim", "ifeof", "iffalse", "iffontchar", "ifhbox", "ifhmode", "ifincsname", "ifinner", "ifmmode", "ifnum", "ifodd", "ifpdfabsdim", "ifpdfabsnum", "ifpdfprimitive", "ifprimitive", "iftrue", "ifvbox", "ifvmode", "ifvoid", "ifx", "ignorespaces", "immediate", "indent", "initcatcodetable", "input", "inputlineno", "insert", "insertpenalties", "interactionmode", "interlinepenalties", "interlinepenalty", "jobname", "kern", "language", "lastbox", "lastkern", "lastlinefit", "lastnodetype", "lastpenalty", "lastskip", "latelua", "lccode", "leaders", "left", "leftghost", "lefthyphenmin", "leftmarginkern", "leftskip", "leqno", "let", "letterspacefont", "limits", "linepenalty", "lineskip", "lineskiplimit", "localbrokenpenalty", "localinterlinepenalty", "localleftbox", "localrightbox", "long", "looseness", "lower", "lowercase", "lpcode", "luaescapestring", "luastartup", "luatexbanner", "luatexrevision", "luatexversion", "mag", "mark", "marks", "mathaccent", "mathbin", "mathchar", "mathchardef", "mathchoice", "mathclose", "mathcode", "mathdir", "mathinner", "mathop", "mathopen", "mathord", "mathpunct", "mathrel", "mathstyle", "mathsurround", "maxdeadcycles", "maxdepth", "meaning", "medmuskip", "message", "middle", "mkern", "month", "moveleft", "moveright", "mskip", "muexpr", "multiply", "muskip", "muskipdef", "mutoglue", "newlinechar", "noalign", "noboundary", "noexpand", "noindent", "nokerns", "noligs", "nolimits", "nolocaldirs", "nolocalwhatsits", "nonscript", "nonstopmode", "nulldelimiterspace", "nullfont", "number", "numexpr", "odelcode", "odelimiter", "omathaccent", "omathchar", "omathchardef", "omathcode", "omit", "openin", "openout", "or", "oradical", "outer", "output", "outputbox", "outputpenalty", "over", "overfullrule", "overline", "overwithdelims", "pagebottomoffset", "pagedepth", "pagedir", "pagediscards", "pagefilllstretch", "pagefillstretch", "pagefilstretch", "pagegoal", "pageheight", "pageleftoffset", "pagerightoffset", "pageshrink", "pagestretch", "pagetopoffset", "pagetotal", "pagewidth", "par", "pardir", "parfillskip", "parindent", "parshape", "parshapedimen", "parshapeindent", "parshapelength", "parskip", "patterns", "pausing", "pdfadjustspacing", "pdfannot", "pdfcatalog", "pdfcolorstack", "pdfcolorstackinit", "pdfcompresslevel", "pdfcopyfont", "pdfcreationdate", "pdfdecimaldigits", "pdfdest", "pdfdestmargin", "pdfdraftmode", "pdfeachlinedepth", "pdfeachlineheight", "pdfendlink", "pdfendthread", "pdffirstlineheight", "pdffontattr", "pdffontexpand", "pdffontname", "pdffontobjnum", "pdffontsize", "pdfgamma", "pdfgentounicode", "pdfglyphtounicode", "pdfhorigin", "pdfignoreddimen", "pdfimageapplygamma", "pdfimagegamma", "pdfimagehicolor", "pdfimageresolution", "pdfincludechars", "pdfinclusioncopyfonts", "pdfinclusionerrorlevel", "pdfinfo", "pdfinsertht", "pdflastannot", "pdflastlinedepth", "pdflastlink", "pdflastobj", "pdflastxform", "pdflastximage", "pdflastximagecolordepth", "pdflastximagepages", "pdflastxpos", "pdflastypos", "pdflinkmargin", "pdfliteral", "pdfmapfile", "pdfmapline", "pdfminorversion", "pdfnames", "pdfnoligatures", "pdfnormaldeviate", "pdfobj", "pdfobjcompresslevel", "pdfoptionpdfminorversion", "pdfoutline", "pdfoutput", "pdfpageattr", "pdfpagebox", "pdfpageheight", "pdfpageref", "pdfpageresources", "pdfpagesattr", "pdfpagewidth", "pdfpkmode", "pdfpkresolution", "pdfprimitive", "pdfprotrudechars", "pdfpxdimen", "pdfrandomseed", "pdfrefobj", "pdfrefxform", "pdfrefximage", "pdfreplacefont", "pdfrestore", "pdfretval", "pdfsave", "pdfsavepos", "pdfsetmatrix", "pdfsetrandomseed", "pdfstartlink", "pdfstartthread", "pdftexbanner", "pdftexrevision", "pdftexversion", "pdfthread", "pdfthreadmargin", "pdftracingfonts", "pdftrailer", "pdfuniformdeviate", "pdfuniqueresname", "pdfvorigin", "pdfxform", "pdfxformattr", "pdfxformname", "pdfxformresources", "pdfximage", "pdfximagebbox", "penalty", "postdisplaypenalty", "postexhyphenchar", "posthyphenchar", "predisplaydirection", "predisplaypenalty", "predisplaysize", "preexhyphenchar", "prehyphenchar", "pretolerance", "prevdepth", "prevgraf", "primitive", "protected", "quitvmode", "radical", "raise", "read", "readline", "relax", "relpenalty", "right", "rightghost", "righthyphenmin", "rightmarginkern", "rightskip", "romannumeral", "rpcode", "savecatcodetable", "savinghyphcodes", "savingvdiscards", "scantextokens", "scantokens", "scriptfont", "scriptscriptfont", "scriptscriptstyle", "scriptspace", "scriptstyle", "scrollmode", "setbox", "setlanguage", "sfcode", "shipout", "show", "showbox", "showboxbreadth", "showboxdepth", "showgroups", "showifs", "showlists", "showthe", "showtokens", "skewchar", "skip", "skipdef", "spacefactor", "spaceskip", "span", "special", "splitbotmark", "splitbotmarks", "splitdiscards", "splitfirstmark", "splitfirstmarks", "splitmaxdepth", "splittopskip", "string", "suppressfontnotfounderror", "suppressifcsnameerror", "suppresslongerror", "suppressoutererror", "synctex", "tabskip", "tagcode", "textdir", "textfont", "textstyle", "the", "thickmuskip", "thinmuskip", "time", "toks", "toksdef", "tolerance", "topmark", "topmarks", "topskip", "tracingassigns", "tracingcommands", "tracinggroups", "tracingifs", "tracinglostchars", "tracingmacros", "tracingnesting", "tracingonline", "tracingoutput", "tracingpages", "tracingparagraphs", "tracingrestores", "tracingscantokens", "tracingstats", "uccode", "uchyph", "underline", "unexpanded", "unhbox", "unhcopy", "unkern", "unless", "unpenalty", "unskip", "unvbox", "unvcopy", "uppercase", "vadjust", "valign", "vbadness", "vbox", "vcenter", "vfil", "vfill", "vfilneg", "vfuzz", "voffset", "vrule", "vsize", "vskip", "vsplit", "vss", "vtop", "wd", "widowpenalties", "widowpenalty", "write", "xdef", "xleaders", "xspaceskip", "year" },
["xetex"]={ "XeTeXversion" },
} \ No newline at end of file
diff --git a/context/data/scite/context/lexers/lexer.lua b/context/data/scite/context/lexers/lexer.lua
new file mode 100644
index 000000000..9582f6a76
--- /dev/null
+++ b/context/data/scite/context/lexers/lexer.lua
@@ -0,0 +1,3 @@
+-- this works ok:
+
+return require("scite-context-lexer")
diff --git a/context/data/scite/context/lexers/scite-context-lexer-bibtex.lua b/context/data/scite/context/lexers/scite-context-lexer-bibtex.lua
new file mode 100644
index 000000000..ebcd7cbc6
--- /dev/null
+++ b/context/data/scite/context/lexers/scite-context-lexer-bibtex.lua
@@ -0,0 +1,195 @@
+local info = {
+ version = 1.002,
+ comment = "scintilla lpeg lexer for bibtex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+local global, string, table, lpeg = _G, string, table, lpeg
+local P, R, S, V = lpeg.P, lpeg.R, lpeg.S, lpeg.V
+local type = type
+
+local lexer = require("lexer")
+local context = lexer.context
+local patterns = context.patterns
+
+local token = lexer.token
+local exact_match = lexer.exact_match
+
+local bibtexlexer = lexer.new("bib","scite-context-lexer-bibtex")
+local whitespace = bibtexlexer.whitespace
+
+ local escape, left, right = P("\\"), P('{'), P('}')
+
+ patterns.balanced = P {
+ [1] = ((escape * (left+right)) + (1 - (left+right)) + V(2))^0,
+ [2] = left * V(1) * right
+ }
+
+-- taken from bibl-bib.lua
+
+local anything = patterns.anything
+local percent = P("%")
+local start = P("@")
+local comma = P(",")
+local hash = P("#")
+local escape = P("\\")
+local single = P("'")
+local double = P('"')
+local left = P('{')
+local right = P('}')
+local lineending = S("\n\r")
+local space = S(" \t\n\r\f")
+local spaces = space^1
+local equal = P("=")
+
+local keyword = (R("az","AZ","09") + S("@_:-"))^1
+----- s_quoted = ((escape*single) + spaces + (1-single))^0
+----- d_quoted = ((escape*double) + spaces + (1-double))^0
+local s_quoted = ((escape*single) + (1-single))^0
+local d_quoted = ((escape*double) + (1-double))^0
+
+local balanced = patterns.balanced
+
+local t_spacing = token(whitespace, space^1)
+local t_optionalws = token("default", space^1)^0
+
+local t_equal = token("operator",equal)
+local t_left = token("grouping",left)
+local t_right = token("grouping",right)
+local t_comma = token("operator",comma)
+local t_hash = token("operator",hash)
+
+local t_s_value = token("operator",single)
+ * token("text",s_quoted)
+ * token("operator",single)
+local t_d_value = token("operator",double)
+ * token("text",d_quoted)
+ * token("operator",double)
+local t_b_value = token("operator",left)
+ * token("text",balanced)
+ * token("operator",right)
+local t_r_value = token("text",keyword)
+
+local t_keyword = token("keyword",keyword)
+local t_key = token("command",keyword)
+local t_label = token("warning",keyword)
+
+local t_somevalue = t_s_value + t_d_value + t_b_value + t_r_value
+local t_value = t_somevalue
+ * ((t_optionalws * t_hash * t_optionalws) * t_somevalue)^0
+
+local t_assignment = t_optionalws
+ * t_key
+ * t_optionalws
+ * t_equal
+ * t_optionalws
+ * t_value
+
+local t_shortcut = t_keyword
+ * t_optionalws
+ * t_left
+ * t_optionalws
+ * (t_assignment * t_comma^0)^0
+ * t_optionalws
+ * t_right
+
+local t_definition = t_keyword
+ * t_optionalws
+ * t_left
+ * t_optionalws
+ * t_label
+ * t_optionalws
+ * t_comma
+ * (t_assignment * t_comma^0)^0
+ * t_optionalws
+ * t_right
+
+local t_comment = t_keyword
+ * t_optionalws
+ * t_left
+ * token("text",(1-t_right)^0)
+ * t_optionalws
+ * t_right
+
+local t_forget = token("comment",percent^1 * (1-lineending)^0)
+
+local t_rest = token("default",anything)
+
+-- this kind of lexing seems impossible as the size of the buffer passed to the lexer is not
+-- large enough .. but we can cheat and use this:
+--
+-- function OnOpen(filename) editor:Colourise(1,editor.TextLength) end -- or is it 0?
+
+-- somehow lexing fails on this more complex lexer when we insert something, there is no
+-- backtracking to whitespace when we have no embedded lexer, so we fake one ... this works
+-- to some extend but not in all cases (e.g. editing inside line fails) .. maybe i need to
+-- patch the dll ... (better not)
+
+local dummylexer = lexer.load("scite-context-lexer-dummy","bib-dum")
+
+local dummystart = token("embedded",P("\001")) -- an unlikely to be used character
+local dummystop = token("embedded",P("\002")) -- an unlikely to be used character
+
+lexer.embed_lexer(bibtexlexer,dummylexer,dummystart,dummystop)
+
+-- maybe we need to define each functional block as lexer (some 4) so i'll do that when
+-- this issue is persistent ... maybe consider making a local lexer options (not load,
+-- just lexer.new or so) .. or maybe do the reverse, embed the main one in a dummy child
+
+bibtexlexer._rules = {
+ { "whitespace", t_spacing },
+ { "forget", t_forget },
+ { "shortcut", t_shortcut },
+ { "definition", t_definition },
+ { "comment", t_comment },
+ { "rest", t_rest },
+}
+
+-- local t_assignment = t_key
+-- * t_optionalws
+-- * t_equal
+-- * t_optionalws
+-- * t_value
+--
+-- local t_shortcut = t_keyword
+-- * t_optionalws
+-- * t_left
+--
+-- local t_definition = t_keyword
+-- * t_optionalws
+-- * t_left
+-- * t_optionalws
+-- * t_label
+-- * t_optionalws
+-- * t_comma
+--
+-- bibtexlexer._rules = {
+-- { "whitespace", t_spacing },
+-- { "assignment", t_assignment },
+-- { "definition", t_definition },
+-- { "shortcut", t_shortcut },
+-- { "right", t_right },
+-- { "comma", t_comma },
+-- { "forget", t_forget },
+-- { "comment", t_comment },
+-- { "rest", t_rest },
+-- }
+
+bibtexlexer._tokenstyles = context.styleset
+
+bibtexlexer._foldpattern = P("{") + P("}")
+
+bibtexlexer._foldsymbols = {
+ _patterns = {
+ "{",
+ "}",
+ },
+ ["grouping"] = {
+ ["{"] = 1,
+ ["}"] = -1,
+ },
+}
+
+return bibtexlexer
diff --git a/context/data/scite/lexers/scite-context-lexer-cld.lua b/context/data/scite/context/lexers/scite-context-lexer-cld.lua
index 1e30c18a2..3442a195c 100644
--- a/context/data/scite/lexers/scite-context-lexer-cld.lua
+++ b/context/data/scite/context/lexers/scite-context-lexer-cld.lua
@@ -6,13 +6,14 @@ local info = {
license = "see context related readme files",
}
-local lexer = lexer
+local lexer = require("lexer")
+local context = lexer.context
+local patterns = context.patterns
-local cldlexer = { _NAME = "cld", _FILENAME = "scite-context-lexer-cld" }
-local whitespace = lexer.WHITESPACE -- maybe we need to fix this
-local context = lexer.context
+local cldlexer = lexer.new("cld","scite-context-lexer-cld")
+local lualexer = lexer.load("scite-context-lexer-lua")
-local lualexer = lexer.load('scite-context-lexer-lua')
+-- can probably be done nicer now, a bit of a hack
cldlexer._rules = lualexer._rules_cld
cldlexer._tokenstyles = lualexer._tokenstyles
diff --git a/context/data/scite/context/lexers/scite-context-lexer-cpp-web.lua b/context/data/scite/context/lexers/scite-context-lexer-cpp-web.lua
new file mode 100644
index 000000000..daa9221ba
--- /dev/null
+++ b/context/data/scite/context/lexers/scite-context-lexer-cpp-web.lua
@@ -0,0 +1,23 @@
+local info = {
+ version = 1.002,
+ comment = "scintilla lpeg lexer for cpp web",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+local lexer = require("lexer")
+local context = lexer.context
+local patterns = context.patterns
+
+local cppweblexer = lexer.new("cpp-web","scite-context-lexer-cpp")
+local cpplexer = lexer.load("scite-context-lexer-cpp")
+
+-- can probably be done nicer now, a bit of a hack
+
+cppweblexer._rules = cpplexer._rules_web
+cppweblexer._tokenstyles = cpplexer._tokenstyles
+cppweblexer._foldsymbols = cpplexer._foldsymbols
+cppweblexer._directives = cpplexer._directives
+
+return cppweblexer
diff --git a/context/data/scite/context/lexers/scite-context-lexer-cpp.lua b/context/data/scite/context/lexers/scite-context-lexer-cpp.lua
new file mode 100644
index 000000000..31180e6a5
--- /dev/null
+++ b/context/data/scite/context/lexers/scite-context-lexer-cpp.lua
@@ -0,0 +1,188 @@
+local info = {
+ version = 1.002,
+ comment = "scintilla lpeg lexer for cpp",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- looks liks the original cpp lexer but web ready (so nothing special here yet)
+
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lexer = require("lexer")
+local context = lexer.context
+local patterns = context.patterns
+
+local token = lexer.token
+local exact_match = lexer.exact_match
+
+local cpplexer = lexer.new("cpp","scite-context-lexer-cpp")
+local whitespace = cpplexer.whitespace
+
+local keywords = { -- copied from cpp.lua
+ -- c
+ "asm", "auto", "break", "case", "const", "continue", "default", "do", "else",
+ "extern", "false", "for", "goto", "if", "inline", "register", "return",
+ "sizeof", "static", "switch", "true", "typedef", "volatile", "while",
+ "restrict",
+ -- hm
+ "_Bool", "_Complex", "_Pragma", "_Imaginary",
+ -- c++.
+ "catch", "class", "const_cast", "delete", "dynamic_cast", "explicit",
+ "export", "friend", "mutable", "namespace", "new", "operator", "private",
+ "protected", "public", "signals", "slots", "reinterpret_cast",
+ "static_assert", "static_cast", "template", "this", "throw", "try", "typeid",
+ "typename", "using", "virtual"
+}
+
+local datatypes = { -- copied from cpp.lua
+ "bool", "char", "double", "enum", "float", "int", "long", "short", "signed",
+ "struct", "union", "unsigned", "void"
+}
+
+local macros = { -- copied from cpp.lua
+ "define", "elif", "else", "endif", "error", "if", "ifdef", "ifndef", "import",
+ "include", "line", "pragma", "undef", "using", "warning"
+}
+
+local space = patterns.space -- S(" \n\r\t\f\v")
+local any = patterns.any
+local restofline = patterns.restofline
+local startofline = patterns.startofline
+
+local squote = P("'")
+local dquote = P('"')
+local period = P(".")
+local escaped = P("\\") * P(1)
+local slashes = P("//")
+local begincomment = P("/*")
+local endcomment = P("*/")
+local percent = P("%")
+
+local hexadecimal = patterns.hexadecimal
+local decimal = patterns.decimal
+local float = patterns.float
+local integer = P("-")^-1 * (hexadecimal + decimal) -- also in patterns ?
+
+local spacing = token(whitespace, space^1)
+local rest = token("default", any)
+
+local shortcomment = token("comment", slashes * restofline^0)
+local longcomment = token("comment", begincomment * (1-endcomment)^0 * endcomment^-1)
+
+local shortstring = token("quote", dquote) -- can be shared
+ * token("string", (escaped + (1-dquote))^0)
+ * token("quote", dquote)
+ + token("quote", squote)
+ * token("string", (escaped + (1-squote))^0)
+ * token("quote", squote)
+
+local number = token("number", float + integer)
+
+local validword = R("AZ","az","__") * R("AZ","az","__","09")^0
+local identifier = token("default",validword)
+
+local operator = token("special", S("+-*/%^!=<>;:{}[]().&|?~"))
+
+----- optionalspace = spacing^0
+
+local p_keywords = exact_match(keywords )
+local p_datatypes = exact_match(datatypes)
+local p_macros = exact_match(macros)
+
+local keyword = token("keyword", p_keywords)
+local datatype = token("keyword", p_datatypes)
+local identifier = token("default", validword)
+
+local macro = token("data", #P("#") * startofline * P("#") * S("\t ")^0 * p_macros)
+
+cpplexer._rules = {
+ { "whitespace", spacing },
+ { "keyword", keyword },
+ { "type", datatype },
+ { "identifier", identifier },
+ { "string", shortstring },
+ { "longcomment", longcomment },
+ { "shortcomment", shortcomment },
+ { "number", number },
+ { "macro", macro },
+ { "operator", operator },
+ { "rest", rest },
+}
+
+local web = lexer.loadluafile("scite-context-lexer-web-snippets")
+
+if web then
+
+ lexer.inform("supporting web snippets in cpp lexer")
+
+ cpplexer._rules_web = {
+ { "whitespace", spacing },
+ { "keyword", keyword },
+ { "type", datatype },
+ { "identifier", identifier },
+ { "string", shortstring },
+ { "longcomment", longcomment },
+ { "shortcomment", shortcomment },
+ { "web", web.pattern },
+ { "number", number },
+ { "macro", macro },
+ { "operator", operator },
+ { "rest", rest },
+ }
+
+else
+
+ lexer.report("not supporting web snippets in cpp lexer")
+
+ cpplexer._rules_web = {
+ { "whitespace", spacing },
+ { "keyword", keyword },
+ { "type", datatype },
+ { "identifier", identifier },
+ { "string", shortstring },
+ { "longcomment", longcomment },
+ { "shortcomment", shortcomment },
+ { "number", number },
+ { "macro", macro },
+ { "operator", operator },
+ { "rest", rest },
+ }
+
+end
+
+cpplexer._tokenstyles = context.styleset
+
+cpplexer._foldpattern = P("/*") + P("*/") + S("{}") -- separate entry else interference
+
+cpplexer._foldsymbols = {
+ _patterns = {
+ "[{}]",
+ "/%*",
+ "%*/",
+ },
+ -- ["data"] = { -- macro
+ -- ["region"] = 1,
+ -- ["endregion"] = -1,
+ -- ["if"] = 1,
+ -- ["ifdef"] = 1,
+ -- ["ifndef"] = 1,
+ -- ["endif"] = -1,
+ -- },
+ ["special"] = { -- operator
+ ["{"] = 1,
+ ["}"] = -1,
+ },
+ ["comment"] = {
+ ["/*"] = 1,
+ ["*/"] = -1,
+ }
+}
+
+-- -- by indentation:
+
+cpplexer._foldpatterns = nil
+cpplexer._foldsymbols = nil
+
+return cpplexer
diff --git a/context/data/scite/context/lexers/scite-context-lexer-dummy.lua b/context/data/scite/context/lexers/scite-context-lexer-dummy.lua
new file mode 100644
index 000000000..9465561f3
--- /dev/null
+++ b/context/data/scite/context/lexers/scite-context-lexer-dummy.lua
@@ -0,0 +1,35 @@
+local info = {
+ version = 1.002,
+ comment = "scintilla lpeg lexer that triggers whitespace backtracking",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- the lexer dll doesn't backtrack when there is no embedded lexer so
+-- we need to trigger that, for instance in the bibtex lexer, but still
+-- we get failed lexing
+
+local lexer = require("lexer")
+local context = lexer.context
+local patterns = context.patterns
+
+local token = lexer.token
+
+local dummylexer = lexer.new("dummy","scite-context-lexer-dummy")
+local whitespace = dummylexer.whitespace
+
+local space = patterns.space
+local nospace = (1-space)
+
+local t_spacing = token(whitespace, space ^1)
+local t_rest = token("default", nospace^1)
+
+dummylexer._rules = {
+ { "whitespace", t_spacing },
+ { "rest", t_rest },
+}
+
+dummylexer._tokenstyles = context.styleset
+
+return dummylexer
diff --git a/context/data/scite/lexers/scite-context-lexer-lua-longstring.lua b/context/data/scite/context/lexers/scite-context-lexer-lua-longstring.lua
index fdec301be..855adbe4e 100644
--- a/context/data/scite/lexers/scite-context-lexer-lua-longstring.lua
+++ b/context/data/scite/context/lexers/scite-context-lexer-lua-longstring.lua
@@ -1,20 +1,21 @@
local info = {
version = 1.002,
- comment = "scintilla lpeg lexer for lua",
+ comment = "scintilla lpeg lexer for lua longstrings",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files",
}
-local lexer = lexer
-local token = lexer.token
-local P = lpeg.P
-
-local stringlexer = { _NAME = "lua-longstring", _FILENAME = "scite-context-lexer-lua-longstring" }
-local whitespace = lexer.WHITESPACE
+local lexer = require("lexer") -- require("scite-context-lexer")
local context = lexer.context
+local patterns = context.patterns
+
+local token = lexer.token
+
+local stringlexer = lexer.new("lua-longstring","scite-context-lexer-lua-longstring")
+local whitespace = stringlexer.whitespace
-local space = lexer.space
+local space = patterns.space
local nospace = 1 - space
local p_spaces = token(whitespace, space ^1)
@@ -25,6 +26,6 @@ stringlexer._rules = {
{ "string", p_string },
}
-stringlexer._tokenstyles = lexer.context.styleset
+stringlexer._tokenstyles = context.styleset
return stringlexer
diff --git a/context/data/scite/lexers/scite-context-lexer-lua.lua b/context/data/scite/context/lexers/scite-context-lexer-lua.lua
index 4c276b1bb..9bee74845 100644
--- a/context/data/scite/lexers/scite-context-lexer-lua.lua
+++ b/context/data/scite/context/lexers/scite-context-lexer-lua.lua
@@ -6,58 +6,69 @@ local info = {
license = "see context related readme files",
}
--- todo: _G.print (keep _G colored)
-
-if not lexer._CONTEXTEXTENSIONS then require("scite-context-lexer") end
+-- beware: all multiline is messy, so even if it's no lexer, it should be an embedded lexer
+-- we probably could use a local whitespace variant but this is cleaner
-local lexer = lexer
-local token, style, colors, exact_match, no_style = lexer.token, lexer.style, lexer.colors, lexer.exact_match, lexer.style_nothing
-local P, R, S, C, Cg, Cb, Cs, Cmt = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cg, lpeg.Cb, lpeg.Cs, lpeg.Cmt
+local P, R, S, C, Cmt, Cp = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cmt, lpeg.Cp
local match, find = string.match, string.find
local setmetatable = setmetatable
--- beware: all multiline is messy, so even if it's no lexer, it should be an embedded lexer
--- we probably could use a local whitespace variant but this is cleaner
-
-local lualexer = { _NAME = "lua", _FILENAME = "scite-context-lexer-lua" }
-local whitespace = lexer.WHITESPACE
+local lexer = require("lexer")
local context = lexer.context
+local patterns = context.patterns
+
+local token = lexer.token
+local exact_match = lexer.exact_match
+local just_match = lexer.just_match
+
+local lualexer = lexer.new("lua","scite-context-lexer-lua")
+local whitespace = lualexer.whitespace
local stringlexer = lexer.load("scite-context-lexer-lua-longstring")
+local labellexer = lexer.load("scite-context-lexer-lua-labelstring")
-local directives = { } -- communication channel
+local directives = { } -- communication channel
-- this will be extended
+-- we could combine some in a hash that returns the class that then makes the token
+-- this can save time on large files
+
local keywords = {
- 'and', 'break', 'do', 'else', 'elseif', 'end', 'false', 'for', 'function', -- 'goto',
- 'if', 'in', 'local', 'nil', 'not', 'or', 'repeat', 'return', 'then', 'true',
- 'until', 'while',
+ "and", "break", "do", "else", "elseif", "end", "false", "for", "function", -- "goto",
+ "if", "in", "local", "nil", "not", "or", "repeat", "return", "then", "true",
+ "until", "while",
}
local functions = {
- 'assert', 'collectgarbage', 'dofile', 'error', 'getmetatable',
- 'ipairs', 'load', 'loadfile', 'module', 'next', 'pairs',
- 'pcall', 'print', 'rawequal', 'rawget', 'rawset', 'require',
- 'setmetatable', 'tonumber', 'tostring', 'type', 'unpack', 'xpcall', 'select',
+ "assert", "collectgarbage", "dofile", "error", "getmetatable",
+ "ipairs", "load", "loadfile", "module", "next", "pairs",
+ "pcall", "print", "rawequal", "rawget", "rawset", "require",
+ "setmetatable", "tonumber", "tostring", "type", "unpack", "xpcall", "select",
"string", "table", "coroutine", "debug", "file", "io", "lpeg", "math", "os", "package", "bit32",
}
local constants = {
- '_G', '_VERSION', '_M', '...', '_ENV',
+ "_G", "_VERSION", "_M", "...", "_ENV",
-- here too
- '__add', '__call', '__concat', '__div', '__idiv', '__eq', '__gc', '__index',
- '__le', '__lt', '__metatable', '__mode', '__mul', '__newindex',
- '__pow', '__sub', '__tostring', '__unm', '__len',
- '__pairs', '__ipairs',
- 'NaN',
+ "__add", "__call", "__concat", "__div", "__idiv", "__eq", "__gc", "__index",
+ "__le", "__lt", "__metatable", "__mode", "__mul", "__newindex",
+ "__pow", "__sub", "__tostring", "__unm", "__len",
+ "__pairs", "__ipairs",
+ "NaN",
}
+-- local tokenmappings = { }
+--
+-- for i=1,#keywords do tokenmappings[keywords [i]] = "keyword" }
+-- for i=1,#functions do tokenmappings[functions[i]] = "function" }
+-- for i=1,#constants do tokenmappings[constants[i]] = "constant" }
+
local internals = { -- __
- 'add', 'call', 'concat', 'div', 'eq', 'gc', 'index',
- 'le', 'lt', 'metatable', 'mode', 'mul', 'newindex',
- 'pow', 'sub', 'tostring', 'unm', 'len',
+ "add", "call", "concat", "div", "eq", "gc", "index",
+ "le", "lt", "metatable", "mode", "mul", "newindex",
+ "pow", "sub", "tostring", "unm", "len",
}
local depricated = {
@@ -67,7 +78,9 @@ local depricated = {
}
local csnames = { -- todo: option
+ "commands",
"context",
+ "ctx",
"metafun",
"metapost",
}
@@ -81,14 +94,14 @@ local longonestart = P("[[")
local longonestop = P("]]")
local longonestring = (1-longonestop)^0
-local longtwostart = P('[') * Cmt(equals,setlevel) * P('[')
-local longtwostop = P(']') * equals * P(']')
+local longtwostart = P("[") * Cmt(equals,setlevel) * P("[")
+local longtwostop = P("]") * equals * P("]")
local sentinels = { } setmetatable(sentinels, { __index = function(t,k) local v = "]" .. k .. "]" t[k] = v return v end })
local longtwostring = P(function(input,index)
if level then
- -- local sentinel = ']' .. level .. ']'
+ -- local sentinel = "]" .. level .. "]"
local sentinel = sentinels[level]
local _, stop = find(input,sentinel,index,true)
return stop and stop + 1 - #sentinel or #input + 1
@@ -99,32 +112,33 @@ end)
local longtwostring_end = P(function(input,index)
if level then
- -- local sentinel = ']' .. level .. ']'
+ -- local sentinel = "]" .. level .. "]"
local sentinel = sentinels[level]
local _, stop = find(input,sentinel,index,true)
return stop and stop + 1 or #input + 1
end
end)
-local longcomment = Cmt(#('[[' + ('[' * C(equals) * '[')), function(input,index,level)
- -- local sentinel = ']' .. level .. ']'
+local longcomment = Cmt(#("[[" + ("[" * C(equals) * "[")), function(input,index,level)
+ -- local sentinel = "]" .. level .. "]"
local sentinel = sentinels[level]
local _, stop = find(input,sentinel,index,true)
return stop and stop + 1 or #input + 1
end)
-local space = lexer.space -- S(" \n\r\t\f\v")
-local any = lexer.any
+local space = patterns.space -- S(" \n\r\t\f\v")
+local any = patterns.any
+local eol = patterns.eol
local squote = P("'")
local dquote = P('"')
local escaped = P("\\") * P(1)
-local dashes = P('--')
+local dashes = P("--")
local spacing = token(whitespace, space^1)
local rest = token("default", any)
-local shortcomment = token("comment", dashes * lexer.nonnewline^0)
+local shortcomment = token("comment", dashes * (1-eol)^0)
local longcomment = token("comment", dashes * longcomment)
-- fails on very long string with \ at end of lines (needs embedded lexer)
@@ -149,41 +163,55 @@ local string = shortstring
lexer.embed_lexer(lualexer, stringlexer, token("quote",longtwostart), token("string",longtwostring_body) * token("quote",longtwostring_end))
-local integer = P("-")^-1 * (lexer.hex_num + lexer.dec_num)
-local number = token("number", lexer.float + integer)
+local integer = P("-")^-1 * (patterns.hexadecimal + patterns.decimal)
+local number = token("number", patterns.float + integer)
+ * (token("error",R("AZ","az","__")^1))^0
-- officially 127-255 are ok but not utf so useless
-local validword = R("AZ","az","__") * R("AZ","az","__","09")^0
+----- validword = R("AZ","az","__") * R("AZ","az","__","09")^0
local utf8character = P(1) * R("\128\191")^1
local validword = (R("AZ","az","__") + utf8character) * (R("AZ","az","__","09") + utf8character)^0
+local validsuffix = (R("AZ","az") + utf8character) * (R("AZ","az","__","09") + utf8character)^0
local identifier = token("default",validword)
----- operator = token("special", P('..') + P('~=') + S('+-*/%^#=<>;:,.{}[]()')) -- maybe split off {}[]()
----- operator = token("special", S('+-*/%^#=<>;:,{}[]()') + P('..') + P('.') + P('~=') ) -- maybe split off {}[]()
-local operator = token("special", S('+-*/%^#=<>;:,{}[]().') + P('~=') ) -- no ^1 because of nested lexers
+----- operator = token("special", S('+-*/%^#=<>;:,{}[]().') + P('~=') ) -- no ^1 because of nested lexers
+local operator = token("special", S('+-*/%^#=<>;:,{}[]().|~')) -- no ^1 because of nested lexers
local structure = token("special", S('{}[]()'))
local optionalspace = spacing^0
local hasargument = #S("{([")
+-- ideal should be an embedded lexer ..
+
local gotokeyword = token("keyword", P("goto"))
* spacing
* token("grouping",validword)
local gotolabel = token("keyword", P("::"))
+ * (spacing + shortcomment)^0
* token("grouping",validword)
+ * (spacing + shortcomment)^0
* token("keyword", P("::"))
-local p_keywords = exact_match(keywords)
-local p_functions = exact_match(functions)
-local p_constants = exact_match(constants)
+----- p_keywords = exact_match(keywords)
+----- p_functions = exact_match(functions)
+----- p_constants = exact_match(constants)
+----- p_internals = P("__")
+----- * exact_match(internals)
+
+local p_finish = #(1-R("az","AZ","__"))
+local p_keywords = lexer.helpers.utfchartabletopattern(keywords) * p_finish -- exact_match(keywords)
+local p_functions = lexer.helpers.utfchartabletopattern(functions) * p_finish -- exact_match(functions)
+local p_constants = lexer.helpers.utfchartabletopattern(constants) * p_finish -- exact_match(constants)
local p_internals = P("__")
- * exact_match(internals)
-local p_csnames = exact_match(csnames)
+ * lexer.helpers.utfchartabletopattern(internals) * p_finish -- exact_match(internals)
+local p_csnames = lexer.helpers.utfchartabletopattern(csnames) * p_finish -- just_match(csnames)
local keyword = token("keyword", p_keywords)
local builtin = token("plain", p_functions)
local constant = token("data", p_constants)
@@ -191,8 +219,10 @@ local internal = token("data", p_internals)
local csname = token("user", p_csnames)
* (
optionalspace * hasargument
- + ( optionalspace * token("special", S(".:")) * optionalspace * token("user", validword) )^1
+ + ( optionalspace * token("special", S(".:")) * optionalspace * token("user", validword ) )^1
+ + token("user", P("_") * validsuffix)
)
+
local identifier = token("default", validword)
* ( optionalspace * token("special", S(".:")) * optionalspace * (
token("warning", p_keywords) +
@@ -200,22 +230,33 @@ local identifier = token("default", validword)
token("default", validword )
) )^0
+-- local t = { } for k, v in next, tokenmappings do t[#t+1] = k end t = table.concat(t)
+-- -- local experimental = (S(t)^1) / function(s) return tokenmappings[s] end * Cp()
+--
+-- local experimental = Cmt(S(t)^1, function(_,i,s)
+-- local t = tokenmappings[s]
+-- if t then
+-- return true, t, i
+-- end
+-- end)
+
lualexer._rules = {
- { 'whitespace', spacing },
- { 'keyword', keyword },
- -- { 'structure', structure },
- { 'function', builtin },
- { 'csname', csname },
- { 'constant', constant },
- { 'goto', gotokeyword },
- { 'identifier', identifier },
- { 'string', string },
- { 'number', number },
- { 'longcomment', longcomment },
- { 'shortcomment', shortcomment },
- { 'label', gotolabel },
- { 'operator', operator },
- { 'rest', rest },
+ { "whitespace", spacing },
+ { "keyword", keyword }, -- can be combined
+ -- { "structure", structure },
+ { "function", builtin }, -- can be combined
+ { "constant", constant }, -- can be combined
+ -- { "experimental", experimental }, -- works but better split
+ { "csname", csname },
+ { "goto", gotokeyword },
+ { "identifier", identifier },
+ { "string", string },
+ { "number", number },
+ { "longcomment", longcomment },
+ { "shortcomment", shortcomment },
+ { "label", gotolabel },
+ { "operator", operator },
+ { "rest", rest },
}
-- -- experiment
@@ -250,18 +291,18 @@ lualexer._rules = {
-- }
--
-- lualexer._rules = {
--- { 'whitespace', spacing },
--- { 'whatever', whatever },
--- { 'csname', csname },
--- { 'goto', gotokeyword },
--- { 'identifier', identifier },
--- { 'string', string },
--- { 'number', number },
--- { 'longcomment', longcomment },
--- { 'shortcomment', shortcomment },
--- { 'label', gotolabel },
--- { 'operator', operator },
--- { 'rest', rest },
+-- { "whitespace", spacing },
+-- { "whatever", whatever },
+-- { "csname", csname },
+-- { "goto", gotokeyword },
+-- { "identifier", identifier },
+-- { "string", string },
+-- { "number", number },
+-- { "longcomment", longcomment },
+-- { "shortcomment", shortcomment },
+-- { "label", gotolabel },
+-- { "operator", operator },
+-- { "rest", rest },
-- }
lualexer._tokenstyles = context.styleset
@@ -273,26 +314,26 @@ lualexer._foldpattern = (P("end") + P("if") + P("do") + P("function") + P("repea
lualexer._foldsymbols = {
_patterns = {
- '[a-z][a-z]+',
- '[{}%[%]]',
+ "[a-z][a-z]+",
+ "[{}%[%]]",
},
- ['keyword'] = { -- challenge: if=0 then=1 else=-1 elseif=-1
- ['if'] = 1, -- if .. [then|else] .. end
- ['do'] = 1, -- [while] do .. end
- ['function'] = 1, -- function .. end
- ['repeat'] = 1, -- repeat .. until
- ['until'] = -1,
- ['end'] = -1,
+ ["keyword"] = { -- challenge: if=0 then=1 else=-1 elseif=-1
+ ["if"] = 1, -- if .. [then|else] .. end
+ ["do"] = 1, -- [while] do .. end
+ ["function"] = 1, -- function .. end
+ ["repeat"] = 1, -- repeat .. until
+ ["until"] = -1,
+ ["end"] = -1,
},
- ['comment'] = {
- ['['] = 1, [']'] = -1,
+ ["comment"] = {
+ ["["] = 1, ["]"] = -1,
},
- -- ['quote'] = { -- confusing
- -- ['['] = 1, [']'] = -1,
+ -- ["quote"] = { -- confusing
+ -- ["["] = 1, ["]"] = -1,
-- },
- ['special'] = {
- -- ['('] = 1, [')'] = -1,
- ['{'] = 1, ['}'] = -1,
+ ["special"] = {
+ -- ["("] = 1, [")"] = -1,
+ ["{"] = 1, ["}"] = -1,
},
}
@@ -300,9 +341,9 @@ lualexer._foldsymbols = {
local cstoken = R("az","AZ","\127\255") + S("@!?_")
local texcsname = P("\\") * cstoken^1
-local commentline = P('%') * (1-S("\n\r"))^0
+local commentline = P("%") * (1-S("\n\r"))^0
-local texcomment = token('comment', Cmt(commentline, function() return directives.cld_inline end))
+local texcomment = token("comment", Cmt(commentline, function() return directives.cld_inline end))
local longthreestart = P("\\!!bs")
local longthreestop = P("\\!!es")
@@ -312,7 +353,7 @@ local texstring = token("quote", longthreestart)
* token("string", longthreestring)
* token("quote", longthreestop)
--- local texcommand = token("user", texcsname)
+----- texcommand = token("user", texcsname)
local texcommand = token("warning", texcsname)
-- local texstring = token("quote", longthreestart)
@@ -325,22 +366,22 @@ local texcommand = token("warning", texcsname)
lualexer._directives = directives
lualexer._rules_cld = {
- { 'whitespace', spacing },
- { 'texstring', texstring },
- { 'texcomment', texcomment },
- { 'texcommand', texcommand },
- -- { 'structure', structure },
- { 'keyword', keyword },
- { 'function', builtin },
- { 'csname', csname },
- { 'constant', constant },
- { 'identifier', identifier },
- { 'string', string },
- { 'longcomment', longcomment },
- { 'shortcomment', shortcomment }, -- should not be used inline so best signal it as comment (otherwise complex state till end of inline)
- { 'number', number },
- { 'operator', operator },
- { 'rest', rest },
+ { "whitespace", spacing },
+ { "texstring", texstring },
+ { "texcomment", texcomment },
+ { "texcommand", texcommand },
+ -- { "structure", structure },
+ { "keyword", keyword },
+ { "function", builtin },
+ { "csname", csname },
+ { "constant", constant },
+ { "identifier", identifier },
+ { "string", string },
+ { "longcomment", longcomment },
+ { "shortcomment", shortcomment }, -- should not be used inline so best signal it as comment (otherwise complex state till end of inline)
+ { "number", number },
+ { "operator", operator },
+ { "rest", rest },
}
return lualexer
diff --git a/context/data/scite/context/lexers/scite-context-lexer-mps.lua b/context/data/scite/context/lexers/scite-context-lexer-mps.lua
new file mode 100644
index 000000000..b87ea83cb
--- /dev/null
+++ b/context/data/scite/context/lexers/scite-context-lexer-mps.lua
@@ -0,0 +1,177 @@
+local info = {
+ version = 1.002,
+ comment = "scintilla lpeg lexer for metafun",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+local global, string, table, lpeg = _G, string, table, lpeg
+local P, R, S, V = lpeg.P, lpeg.R, lpeg.S, lpeg.V
+local type = type
+
+local lexer = require("lexer")
+local context = lexer.context
+local patterns = context.patterns
+
+local token = lexer.token
+local exact_match = lexer.exact_match
+
+local metafunlexer = lexer.new("mps","scite-context-lexer-mps")
+local whitespace = metafunlexer.whitespace
+
+local metapostprimitives = { }
+local metapostinternals = { }
+local metapostshortcuts = { }
+local metapostcommands = { }
+
+local metafuninternals = { }
+local metafunshortcuts = { }
+local metafuncommands = { }
+
+local mergedshortcuts = { }
+local mergedinternals = { }
+
+do
+
+ local definitions = context.loaddefinitions("scite-context-data-metapost")
+
+ if definitions then
+ metapostprimitives = definitions.primitives or { }
+ metapostinternals = definitions.internals or { }
+ metapostshortcuts = definitions.shortcuts or { }
+ metapostcommands = definitions.commands or { }
+ end
+
+ local definitions = context.loaddefinitions("scite-context-data-metafun")
+
+ if definitions then
+ metafuninternals = definitions.internals or { }
+ metafunshortcuts = definitions.shortcuts or { }
+ metafuncommands = definitions.commands or { }
+ end
+
+ for i=1,#metapostshortcuts do
+ mergedshortcuts[#mergedshortcuts+1] = metapostshortcuts[i]
+ end
+ for i=1,#metafunshortcuts do
+ mergedshortcuts[#mergedshortcuts+1] = metafunshortcuts[i]
+ end
+
+ for i=1,#metapostinternals do
+ mergedinternals[#mergedinternals+1] = metapostinternals[i]
+ end
+ for i=1,#metafuninternals do
+ mergedinternals[#mergedinternals+1] = metafuninternals[i]
+ end
+
+end
+
+local space = patterns.space -- S(" \n\r\t\f\v")
+local any = patterns.any
+
+local dquote = P('"')
+local cstoken = patterns.idtoken
+local mptoken = patterns.alpha
+local leftbrace = P("{")
+local rightbrace = P("}")
+local number = patterns.real
+
+local cstokentex = R("az","AZ","\127\255") + S("@!?_")
+
+-- we could collapse as in tex
+
+local spacing = token(whitespace, space^1)
+local rest = token("default", any)
+local comment = token("comment", P("%") * (1-S("\n\r"))^0)
+local internal = token("reserved", exact_match(mergedshortcuts,false))
+local shortcut = token("data", exact_match(mergedinternals))
+local helper = token("command", exact_match(metafuncommands))
+local plain = token("plain", exact_match(metapostcommands))
+local quoted = token("quote", dquote)
+ * token("string", P(1-dquote)^0)
+ * token("quote", dquote)
+local texstuff = token("quote", P("btex ") + P("verbatimtex "))
+ * token("string", P(1-P(" etex"))^0)
+ * token("quote", P(" etex"))
+local primitive = token("primitive", exact_match(metapostprimitives))
+local identifier = token("default", cstoken^1)
+local number = token("number", number)
+local grouping = token("grouping", S("()[]{}")) -- can be an option
+local special = token("special", S("#()[]{}<>=:\"")) -- or else := <> etc split
+local texlike = token("warning", P("\\") * cstokentex^1)
+local extra = token("extra", P("+-+") + P("++") + S("`~%^&_-+*/\'|\\"))
+
+local nested = P { leftbrace * (V(1) + (1-rightbrace))^0 * rightbrace }
+local texlike = token("embedded", P("\\") * (P("MP") + P("mp")) * mptoken^1)
+ * spacing^0
+ * token("grouping", leftbrace)
+ * token("default", (nested + (1-rightbrace))^0 )
+ * token("grouping", rightbrace)
+ + token("warning", P("\\") * cstokentex^1)
+
+-- lua: we assume: lua ( "lua code" )
+
+local cldlexer = lexer.load("scite-context-lexer-cld","mps-cld")
+
+local startlua = P("lua") * space^0 * P('(') * space^0 * P('"')
+local stoplua = P('"') * space^0 * P(')')
+
+local startluacode = token("embedded", startlua)
+local stopluacode = #stoplua * token("embedded", stoplua)
+
+lexer.embed_lexer(metafunlexer, cldlexer, startluacode, stopluacode)
+
+metafunlexer._rules = {
+ { "whitespace", spacing },
+ { "comment", comment },
+ { "internal", internal },
+ { "shortcut", shortcut },
+ { "helper", helper },
+ { "plain", plain },
+ { "primitive", primitive },
+ { "texstuff", texstuff },
+ { "identifier", identifier },
+ { "number", number },
+ { "quoted", quoted },
+ -- { "grouping", grouping }, -- can be an option
+ { "special", special },
+ { "texlike", texlike },
+ { "extra", extra },
+ { "rest", rest },
+}
+
+metafunlexer._tokenstyles = context.styleset
+
+metafunlexer._foldpattern = patterns.lower^2 -- separate entry else interference
+
+metafunlexer._foldsymbols = {
+ _patterns = {
+ "[a-z][a-z]+",
+ },
+ ["plain"] = {
+ ["beginfig"] = 1,
+ ["endfig"] = -1,
+ ["beginglyph"] = 1,
+ ["endglyph"] = -1,
+ -- ["begingraph"] = 1,
+ -- ["endgraph"] = -1,
+ },
+ ["primitive"] = {
+ ["def"] = 1,
+ ["vardef"] = 1,
+ ["primarydef"] = 1,
+ ["secondarydef" ] = 1,
+ ["tertiarydef"] = 1,
+ ["enddef"] = -1,
+ ["if"] = 1,
+ ["fi"] = -1,
+ ["for"] = 1,
+ ["forever"] = 1,
+ ["endfor"] = -1,
+ }
+}
+
+-- if inspect then inspect(metafunlexer) end
+
+return metafunlexer
diff --git a/context/data/scite/context/lexers/scite-context-lexer-pdf-object.lua b/context/data/scite/context/lexers/scite-context-lexer-pdf-object.lua
new file mode 100644
index 000000000..1fb95838a
--- /dev/null
+++ b/context/data/scite/context/lexers/scite-context-lexer-pdf-object.lua
@@ -0,0 +1,136 @@
+local info = {
+ version = 1.002,
+ comment = "scintilla lpeg lexer for pdf objects",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- no longer used: nesting lexers with whitespace in start/stop is unreliable
+
+local P, R, S, C, V = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.V
+
+local lexer = require("lexer")
+local context = lexer.context
+local patterns = context.patterns
+
+local token = lexer.token
+
+local pdfobjectlexer = lexer.new("pdfobj","scite-context-lexer-pdf-object")
+local whitespace = pdfobjectlexer.whitespace
+
+local space = patterns.space
+local spacing = patterns.spacing
+local nospacing = patterns.nospacing
+local anything = patterns.anything
+local newline = patterns.eol
+local real = patterns.real
+local cardinal = patterns.cardinal
+
+local lparent = P("(")
+local rparent = P(")")
+local langle = P("<")
+local rangle = P(">")
+local escape = P("\\")
+local unicodetrigger = P("feff")
+
+local nametoken = 1 - space - S("<>/[]()")
+local name = P("/") * nametoken^1
+
+local p_string = P { ( escape * anything + lparent * V(1) * rparent + (1 - rparent) )^0 }
+
+local t_spacing = token(whitespace, spacing)
+local t_spaces = token(whitespace, spacing)^0
+local t_rest = token("default", nospacing) -- anything
+
+local p_stream = P("stream")
+local p_endstream = P("endstream")
+local p_obj = P("obj")
+local p_endobj = P("endobj")
+local p_reference = P("R")
+
+local p_objectnumber = patterns.cardinal
+local p_comment = P("%") * (1-S("\n\r"))^0
+
+local t_string = token("quote", lparent)
+ * token("string", p_string)
+ * token("quote", rparent)
+local t_unicode = token("quote", langle)
+ * token("plain", unicodetrigger)
+ * token("string", (1-rangle)^1)
+ * token("quote", rangle)
+local t_whatsit = token("quote", langle)
+ * token("string", (1-rangle)^1)
+ * token("quote", rangle)
+local t_keyword = token("command", name)
+local t_constant = token("constant", name)
+local t_number = token("number", real)
+-- t_reference = token("number", cardinal)
+-- * t_spacing
+-- * token("number", cardinal)
+local t_reserved = token("number", P("true") + P("false") + P("NULL"))
+local t_reference = token("warning", cardinal)
+ * t_spacing
+ * token("warning", cardinal)
+ * t_spacing
+ * token("keyword", p_reference)
+
+local t_comment = token("comment", p_comment)
+
+local t_openobject = token("warning", p_objectnumber * spacing)
+-- * t_spacing
+ * token("warning", p_objectnumber * spacing)
+-- * t_spacing
+ * token("keyword", p_obj)
+local t_closeobject = token("keyword", p_endobj)
+
+local t_opendictionary = token("grouping", P("<<"))
+local t_closedictionary = token("grouping", P(">>"))
+
+local t_openarray = token("grouping", P("["))
+local t_closearray = token("grouping", P("]"))
+
+-- todo: comment
+
+local t_stream = token("keyword", p_stream)
+-- * token("default", newline * (1-newline*p_endstream*newline)^1 * newline)
+-- * token("text", (1 - p_endstream)^1)
+ * (token("text", (1 - p_endstream-spacing)^1) + t_spacing)^1
+ * token("keyword", p_endstream)
+
+local t_dictionary = { "dictionary",
+ dictionary = t_opendictionary * (t_spaces * t_keyword * t_spaces * V("whatever"))^0 * t_spaces * t_closedictionary,
+ array = t_openarray * (t_spaces * V("whatever"))^0 * t_spaces * t_closearray,
+ whatever = V("dictionary") + V("array") + t_constant + t_reference + t_string + t_unicode + t_number + t_reserved + t_whatsit,
+ }
+
+----- t_object = { "object", -- weird that we need to catch the end here (probably otherwise an invalid lpeg)
+----- object = t_spaces * (V("dictionary") * t_spaces * t_stream^-1 + V("array") + V("number") + t_spaces) * t_spaces * t_closeobject,
+----- dictionary = t_opendictionary * (t_spaces * t_keyword * t_spaces * V("whatever"))^0 * t_spaces * t_closedictionary,
+----- array = t_openarray * (t_spaces * V("whatever"))^0 * t_spaces * t_closearray,
+----- whatever = V("dictionary") + V("array") + t_constant + t_reference + t_string + t_unicode + t_number + t_reserved + t_whatsit,
+----- number = t_number,
+----- }
+
+local t_object = { "object", -- weird that we need to catch the end here (probably otherwise an invalid lpeg)
+ dictionary = t_dictionary.dictionary,
+ array = t_dictionary.array,
+ whatever = t_dictionary.whatever,
+ object = t_openobject^-1 * t_spaces * (V("dictionary") * t_spaces * t_stream^-1 + V("array") + V("number") + t_spaces) * t_spaces * t_closeobject,
+ number = t_number,
+ }
+
+pdfobjectlexer._shared = {
+ dictionary = t_dictionary,
+ object = t_object,
+ stream = t_stream,
+}
+
+pdfobjectlexer._rules = {
+ { "whitespace", t_spacing }, -- in fact, here we don't want whitespace as it's top level lexer work
+ { "object", t_object },
+}
+
+pdfobjectlexer._tokenstyles = context.styleset
+
+return pdfobjectlexer
diff --git a/context/data/scite/context/lexers/scite-context-lexer-pdf-xref.lua b/context/data/scite/context/lexers/scite-context-lexer-pdf-xref.lua
new file mode 100644
index 000000000..7097c41a6
--- /dev/null
+++ b/context/data/scite/context/lexers/scite-context-lexer-pdf-xref.lua
@@ -0,0 +1,43 @@
+local info = {
+ version = 1.002,
+ comment = "scintilla lpeg lexer for pdf xref",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- no longer used: nesting lexers with whitespace in start/stop is unreliable
+
+local P, R = lpeg.P, lpeg.R
+
+local lexer = require("lexer")
+local context = lexer.context
+local patterns = context.patterns
+
+local token = lexer.token
+
+local pdfxreflexer = lexer.new("pdfxref","scite-context-lexer-pdf-xref")
+local whitespace = pdfxreflexer.whitespace
+
+local spacing = patterns.spacing
+local cardinal = patterns.cardinal
+local alpha = patterns.alpha
+
+local t_spacing = token(whitespace, spacing)
+
+local p_xref = P("xref")
+local t_xref = token("keyword",p_xref)
+ * token("number", cardinal * spacing * cardinal * spacing)
+
+local t_number = token("number", cardinal * spacing * cardinal * spacing)
+ * token("keyword", alpha)
+
+pdfxreflexer._rules = {
+ { "whitespace", t_spacing },
+ { "xref", t_xref },
+ { "number", t_number },
+}
+
+pdfxreflexer._tokenstyles = context.styleset
+
+return pdfxreflexer
diff --git a/context/data/scite/context/lexers/scite-context-lexer-pdf.lua b/context/data/scite/context/lexers/scite-context-lexer-pdf.lua
new file mode 100644
index 000000000..f8e4e7380
--- /dev/null
+++ b/context/data/scite/context/lexers/scite-context-lexer-pdf.lua
@@ -0,0 +1,204 @@
+local info = {
+ version = 1.002,
+ comment = "scintilla lpeg lexer for pdf",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- pdf is normally startic .. i.e. not edited so we don't really
+-- need embedded lexers.
+
+local P, R, S, V = lpeg.P, lpeg.R, lpeg.S, lpeg.V
+
+local lexer = require("lexer")
+local context = lexer.context
+local patterns = context.patterns
+
+local token = lexer.token
+
+local pdflexer = lexer.new("pdf","scite-context-lexer-pdf")
+local whitespace = pdflexer.whitespace
+
+----- pdfobjectlexer = lexer.load("scite-context-lexer-pdf-object")
+----- pdfxreflexer = lexer.load("scite-context-lexer-pdf-xref")
+
+local anything = patterns.anything
+local space = patterns.space
+local spacing = patterns.spacing
+local nospacing = patterns.nospacing
+local anything = patterns.anything
+local restofline = patterns.restofline
+
+local t_whitespace = token(whitespace, spacing)
+local t_spacing = token("default", spacing)
+----- t_rest = token("default", nospacing)
+local t_rest = token("default", anything)
+
+local p_comment = P("%") * restofline
+local t_comment = token("comment", p_comment)
+
+-- whatever
+
+local space = patterns.space
+local spacing = patterns.spacing
+local nospacing = patterns.nospacing
+local anything = patterns.anything
+local newline = patterns.eol
+local real = patterns.real
+local cardinal = patterns.cardinal
+local alpha = patterns.alpha
+
+local lparent = P("(")
+local rparent = P(")")
+local langle = P("<")
+local rangle = P(">")
+local escape = P("\\")
+local unicodetrigger = P("feff")
+
+local nametoken = 1 - space - S("<>/[]()")
+local name = P("/") * nametoken^1
+
+local p_string = P { ( escape * anything + lparent * V(1) * rparent + (1 - rparent) )^0 }
+
+local t_spacing = token("default", spacing)
+local t_spaces = token("default", spacing)^0
+local t_rest = token("default", nospacing) -- anything
+
+local p_stream = P("stream")
+local p_endstream = P("endstream")
+local p_obj = P("obj")
+local p_endobj = P("endobj")
+local p_reference = P("R")
+
+local p_objectnumber = patterns.cardinal
+local p_comment = P("%") * (1-S("\n\r"))^0
+
+local t_string = token("quote", lparent)
+ * token("string", p_string)
+ * token("quote", rparent)
+local t_unicode = token("quote", langle)
+ * token("plain", unicodetrigger)
+ * token("string", (1-rangle)^1)
+ * token("quote", rangle)
+local t_whatsit = token("quote", langle)
+ * token("string", (1-rangle)^1)
+ * token("quote", rangle)
+local t_keyword = token("command", name)
+local t_constant = token("constant", name)
+local t_number = token("number", real)
+-- t_reference = token("number", cardinal)
+-- * t_spacing
+-- * token("number", cardinal)
+local t_reserved = token("number", P("true") + P("false") + P("NULL"))
+-- t_reference = token("warning", cardinal * spacing * cardinal * spacing)
+-- * token("keyword", p_reference)
+local t_reference = token("warning", cardinal)
+ * t_spacing
+ * token("warning", cardinal)
+ * t_spacing
+ * token("keyword", p_reference)
+
+local t_comment = token("comment", p_comment)
+
+local t_openobject = token("warning", p_objectnumber)
+ * t_spacing
+ * token("warning", p_objectnumber)
+ * t_spacing
+ * token("keyword", p_obj)
+-- t_openobject = token("warning", p_objectnumber * spacing)
+-- * token("warning", p_objectnumber * spacing)
+-- * token("keyword", p_obj)
+local t_closeobject = token("keyword", p_endobj)
+
+local t_opendictionary = token("grouping", P("<<"))
+local t_closedictionary = token("grouping", P(">>"))
+
+local t_openarray = token("grouping", P("["))
+local t_closearray = token("grouping", P("]"))
+
+local t_stream = token("keyword", p_stream)
+ * token("text", (1 - p_endstream)^1)
+ * token("keyword", p_endstream)
+
+local t_dictionary = { "dictionary",
+ dictionary = t_opendictionary * (t_spaces * t_keyword * t_spaces * V("whatever"))^0 * t_spaces * t_closedictionary,
+ array = t_openarray * (t_spaces * V("whatever"))^0 * t_spaces * t_closearray,
+ whatever = V("dictionary") + V("array") + t_constant + t_reference + t_string + t_unicode + t_number + t_reserved + t_whatsit,
+ }
+
+local t_object = { "object", -- weird that we need to catch the end here (probably otherwise an invalid lpeg)
+ dictionary = t_dictionary.dictionary,
+ array = t_dictionary.array,
+ whatever = t_dictionary.whatever,
+ object = t_openobject * t_spaces * (V("dictionary")^-1 * t_spaces * t_stream^-1 + V("array") + V("number") + t_spaces) * t_spaces * t_closeobject,
+ number = t_number,
+ }
+
+-- objects ... sometimes NUL characters play havoc ... and in xref we have
+-- issues with embedded lexers that have spaces in the start and stop
+-- conditions and this cannot be handled well either ... so, an imperfect
+-- solution ... but anyway, there is not that much that can end up in
+-- the root of the tree see we're sort of safe
+
+local p_trailer = P("trailer")
+local t_trailer = token("keyword", p_trailer)
+ * t_spacing
+ * t_dictionary
+-- t_trailer = token("keyword", p_trailer * spacing)
+-- * t_dictionary
+
+local p_startxref = P("startxref")
+local t_startxref = token("keyword", p_startxref)
+ * t_spacing
+ * token("number", cardinal)
+-- t_startxref = token("keyword", p_startxref * spacing)
+-- * token("number", cardinal)
+
+local p_xref = P("xref")
+local t_xref = token("keyword",p_xref)
+ * t_spacing
+ * token("number", cardinal)
+ * t_spacing
+ * token("number", cardinal)
+ * spacing
+-- t_xref = token("keyword",p_xref)
+-- * token("number", spacing * cardinal * spacing * cardinal * spacing)
+
+local t_number = token("number", cardinal)
+ * t_spacing
+ * token("number", cardinal)
+ * t_spacing
+ * token("keyword", S("fn"))
+-- t_number = token("number", cardinal * spacing * cardinal * spacing)
+-- * token("keyword", S("fn"))
+
+pdflexer._rules = {
+ { "whitespace", t_whitespace },
+ { "object", t_object },
+ { "comment", t_comment },
+ { "trailer", t_trailer },
+ { "startxref", t_startxref },
+ { "xref", t_xref },
+ { "number", t_number },
+ { "rest", t_rest },
+}
+
+pdflexer._tokenstyles = context.styleset
+
+-- lexer.inspect(pdflexer)
+
+-- collapser: obj endobj stream endstream
+
+pdflexer._foldpattern = p_obj + p_endobj + p_stream + p_endstream
+
+pdflexer._foldsymbols = {
+ ["keyword"] = {
+ ["obj"] = 1,
+ ["endobj"] = -1,
+ ["stream"] = 1,
+ ["endstream"] = -1,
+ },
+}
+
+return pdflexer
diff --git a/context/data/scite/context/lexers/scite-context-lexer-tex-web.lua b/context/data/scite/context/lexers/scite-context-lexer-tex-web.lua
new file mode 100644
index 000000000..5d8859c26
--- /dev/null
+++ b/context/data/scite/context/lexers/scite-context-lexer-tex-web.lua
@@ -0,0 +1,23 @@
+local info = {
+ version = 1.002,
+ comment = "scintilla lpeg lexer for tex web",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+local lexer = require("lexer")
+local context = lexer.context
+local patterns = context.patterns
+
+local texweblexer = lexer.new("tex-web","scite-context-lexer-tex")
+local texlexer = lexer.load("scite-context-lexer-tex")
+
+-- can probably be done nicer now, a bit of a hack
+
+texweblexer._rules = texlexer._rules_web
+texweblexer._tokenstyles = texlexer._tokenstyles
+texweblexer._foldsymbols = texlexer._foldsymbols
+texweblexer._directives = texlexer._directives
+
+return texweblexer
diff --git a/context/data/scite/lexers/scite-context-lexer-tex.lua b/context/data/scite/context/lexers/scite-context-lexer-tex.lua
index a509fadab..ad73f4217 100644
--- a/context/data/scite/lexers/scite-context-lexer-tex.lua
+++ b/context/data/scite/context/lexers/scite-context-lexer-tex.lua
@@ -24,33 +24,26 @@ local info = {
-- local interface = props["keywordclass.macros.context.en"]
-- local interface = lexer.get_property("keywordclass.macros.context.en","")
- -- it seems that whitespace triggers the lexer when embedding happens, but this
- -- is quite fragile due to duplicate styles .. lexer.WHITESPACE is a number
- -- (initially) ... _NAME vs filename (but we don't want to overwrite files)
-
- -- this lexer does not care about other macro packages (one can of course add a fake
- -- interface but it's not on the agenda)
-
]]--
-if not lexer._CONTEXTEXTENSIONS then require("scite-context-lexer") end
-
-local lexer = lexer
local global, string, table, lpeg = _G, string, table, lpeg
-local token, exact_match = lexer.token, lexer.exact_match
local P, R, S, V, C, Cmt, Cp, Cc, Ct = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.C, lpeg.Cmt, lpeg.Cp, lpeg.Cc, lpeg.Ct
local type, next = type, next
local find, match, lower, upper = string.find, string.match, string.lower, string.upper
--- module(...)
-
-local contextlexer = { _NAME = "tex", _FILENAME = "scite-context-lexer-tex" }
-local whitespace = lexer.WHITESPACE
+local lexer = require("lexer")
local context = lexer.context
+local patterns = context.patterns
+local inform = context.inform
+
+local token = lexer.token
+local exact_match = lexer.exact_match
-local cldlexer = lexer.load('scite-context-lexer-cld')
------ cldlexer = lexer.load('scite-context-lexer-lua')
-local mpslexer = lexer.load('scite-context-lexer-mps')
+local contextlexer = lexer.new("tex","scite-context-lexer-tex")
+local whitespace = contextlexer.whitespace
+
+local cldlexer = lexer.load("scite-context-lexer-cld")
+local mpslexer = lexer.load("scite-context-lexer-mps")
local commands = { en = { } }
local primitives = { }
@@ -64,7 +57,9 @@ do -- todo: only once, store in global
local definitions = context.loaddefinitions("scite-context-data-interfaces")
if definitions then
+ local list = { }
for interface, list in next, definitions do
+ list[#list+1] = interface
local c = { }
for i=1,#list do
c[list[i]] = true
@@ -79,6 +74,7 @@ do -- todo: only once, store in global
end
commands[interface] = c
end
+ inform("context user interfaces '%s' supported",table.concat(list," "))
end
local definitions = context.loaddefinitions("scite-context-data-context")
@@ -146,13 +142,16 @@ local validminimum = 3
-- % language=uk
-local knownpreamble = Cmt(#P("% "), function(input,i,_) -- todo : utfbomb
+-- fails (empty loop message) ... latest lpeg issue?
+
+local knownpreamble = Cmt(P("% "), function(input,i,_) -- todo : utfbomb, was #P("% ")
if i < 10 then
validwords, validminimum = false, 3
- local s, e, word = find(input,'^(.+)[\n\r]',i) -- combine with match
+ local s, e, word = find(input,"^(.+)[\n\r]",i) -- combine with match
if word then
local interface = match(word,"interface=([a-z]+)")
- if interface then
+ if interface and #interface == 2 then
+ inform("enabling context user interface '%s'",interface)
currentcommands = commands[interface] or commands.en or { }
end
local language = match(word,"language=([a-z]+)")
@@ -170,7 +169,7 @@ end)
-- local helpers_hash = { } for i=1,#helpers do helpers_hash [helpers [i]] = true end
-- local primitives_hash = { } for i=1,#primitives do primitives_hash[primitives[i]] = true end
--- local specialword = Ct( P('\\') * Cmt( C(cstoken^1), function(input,i,s)
+-- local specialword = Ct( P("\\") * Cmt( C(cstoken^1), function(input,i,s)
-- if currentcommands[s] then
-- return true, "command", i
-- elseif constants_hash[s] then
@@ -184,7 +183,7 @@ end)
-- end
-- end) )
--- local specialword = P('\\') * Cmt( C(cstoken^1), function(input,i,s)
+-- local specialword = P("\\") * Cmt( C(cstoken^1), function(input,i,s)
-- if currentcommands[s] then
-- return true, { "command", i }
-- elseif constants_hash[s] then
@@ -202,11 +201,11 @@ end)
-- 10pt
-local commentline = P('%') * (1-S("\n\r"))^0
+local commentline = P("%") * (1-S("\n\r"))^0
local endline = S("\n\r")^1
-local space = lexer.space -- S(" \n\r\t\f\v")
-local any = lexer.any
+local space = patterns.space -- S(" \n\r\t\f\v")
+local any = patterns.any
local backslash = P("\\")
local hspace = S(" \t")
@@ -215,11 +214,17 @@ local p_rest = any
local p_preamble = knownpreamble
local p_comment = commentline
-local p_command = backslash * knowncommand
-local p_constant = backslash * exact_match(constants)
-local p_helper = backslash * exact_match(helpers)
-local p_primitive = backslash * exact_match(primitives)
-local p_ifprimitive = P('\\if') * cstoken^1
+----- p_command = backslash * knowncommand
+----- p_constant = backslash * exact_match(constants)
+----- p_helper = backslash * exact_match(helpers)
+----- p_primitive = backslash * exact_match(primitives)
+
+local p_command = backslash * lexer.helpers.utfchartabletopattern(currentcommands) * #(1-cstoken)
+local p_constant = backslash * lexer.helpers.utfchartabletopattern(constants) * #(1-cstoken)
+local p_helper = backslash * lexer.helpers.utfchartabletopattern(helpers) * #(1-cstoken)
+local p_primitive = backslash * lexer.helpers.utfchartabletopattern(primitives) * #(1-cstoken)
+
+local p_ifprimitive = P("\\if") * cstoken^1
local p_csname = backslash * (cstoken^1 + P(1))
local p_grouping = S("{$}")
local p_special = S("#()[]<>=\"")
@@ -299,24 +304,24 @@ local p_invisible = invisibles^1
local spacing = token(whitespace, p_spacing )
-local rest = token('default', p_rest )
-local preamble = token('preamble', p_preamble )
-local comment = token('comment', p_comment )
-local command = token('command', p_command )
-local constant = token('data', p_constant )
-local helper = token('plain', p_helper )
-local primitive = token('primitive', p_primitive )
-local ifprimitive = token('primitive', p_ifprimitive)
-local reserved = token('reserved', p_reserved )
-local csname = token('user', p_csname )
-local grouping = token('grouping', p_grouping )
-local number = token('number', p_number )
- * token('constant', p_unit )
-local special = token('special', p_special )
-local reserved = token('reserved', p_reserved ) -- reserved internal preproc
-local extra = token('extra', p_extra )
-local invisible = token('invisible', p_invisible )
-local text = token('default', p_text )
+local rest = token("default", p_rest )
+local preamble = token("preamble", p_preamble )
+local comment = token("comment", p_comment )
+local command = token("command", p_command )
+local constant = token("data", p_constant )
+local helper = token("plain", p_helper )
+local primitive = token("primitive", p_primitive )
+local ifprimitive = token("primitive", p_ifprimitive)
+local reserved = token("reserved", p_reserved )
+local csname = token("user", p_csname )
+local grouping = token("grouping", p_grouping )
+local number = token("number", p_number )
+ * token("constant", p_unit )
+local special = token("special", p_special )
+local reserved = token("reserved", p_reserved ) -- reserved internal preproc
+local extra = token("extra", p_extra )
+local invisible = token("invisible", p_invisible )
+local text = token("default", p_text )
local word = p_word
----- startluacode = token("grouping", P("\\startluacode"))
@@ -390,10 +395,11 @@ contextlexer._reset_parser = function()
end
local luaenvironment = P("lua") * (P("setups") + P("code") + P(true))
+ + P("ctxfunction") * (P("definition") + P(true))
local inlinelua = P("\\") * (
- P("ctx") * ( P("lua") + P("command") + P("late") * (P("lua") + P("command")) )
- + P("cld") * ( P("command") + P("context") )
+ P("ctx") * (P("lua") + P("command") + P("late") * (P("lua") + P("command")) + P("function"))
+ + P("cld") * (P("command") + P("context"))
+ P("luaexpr")
+ (P("direct") + P("late")) * P("lua")
)
@@ -410,6 +416,8 @@ local stoplua = P("\\stop") * Cmt(luaenvironment,stopdisplaylua)
local startluacode = token("embedded", startlua)
local stopluacode = #stoplua * token("embedded", stoplua)
+local luacall = P("clf_") * R("az","__","AZ")^1
+
local metafuncall = ( P("reusable") + P("usable") + P("unique") + P("use") + P("reuse") ) * ("MPgraphic")
+ P("uniqueMPpagegraphic")
+ P("MPpositiongraphic")
@@ -420,6 +428,11 @@ local metafunenvironment = metafuncall -- ( P("use") + P("reusable") + P("un
local startmetafun = P("\\start") * metafunenvironment
local stopmetafun = P("\\stop") * metafunenvironment -- todo match start
+----- subsystem = token("embedded", P("\\xml") * R("az")^1 + (P("\\st") * (P("art") + P("op")) * P("xmlsetups")))
+local subsystemtags = P("xml") + P("btx") -- will be pluggable or maybe even a proper list of valid commands
+local subsystemmacro = P("\\") * (subsystemtags * R("az")^1 + (R("az")-subsystemtags)^1 * subsystemtags * R("az")^1)
+local subsystem = token("embedded", subsystemmacro)
+
local openargument = token("special", P("{"))
local closeargument = token("special", P("}"))
local argumentcontent = token("default",(1-P("}"))^0) -- maybe space needs a treatment
@@ -430,13 +443,11 @@ local startmetafuncode = token("embedded", startmetafun) * metafunargument
local stopmetafuncode = token("embedded", stopmetafun)
local callers = token("embedded", P("\\") * metafuncall) * metafunarguments
+ + token("embedded", P("\\") * luacall)
lexer.embed_lexer(contextlexer, cldlexer, startluacode, stopluacode)
lexer.embed_lexer(contextlexer, mpslexer, startmetafuncode, stopmetafuncode)
--- Watch the text grabber, after all, we're talking mostly of text (beware,
--- no punctuation here as it can be special. We might go for utf here.
-
contextlexer._rules = {
{ "whitespace", spacing },
{ "preamble", preamble },
@@ -444,11 +455,13 @@ contextlexer._rules = {
{ "text", text }, -- non words
{ "comment", comment },
{ "constant", constant },
+ -- { "subsystem", subsystem },
{ "callers", callers },
{ "helper", helper },
{ "command", command },
{ "primitive", primitive },
{ "ifprimitive", ifprimitive },
+ { "subsystem", subsystem },
{ "reserved", reserved },
{ "csname", csname },
-- { "whatever", specialword }, -- not yet, crashes
@@ -460,11 +473,61 @@ contextlexer._rules = {
{ "rest", rest },
}
-contextlexer._tokenstyles = context.styleset
--- contextlexer._tokenstyles = context.stylesetcopy() -- experiment
+-- Watch the text grabber, after all, we're talking mostly of text (beware,
+-- no punctuation here as it can be special). We might go for utf here.
+
+local web = lexer.loadluafile("scite-context-lexer-web-snippets")
+
+if web then
+
+ lexer.inform("supporting web snippets in tex lexer")
+
+ contextlexer._rules_web = {
+ { "whitespace", spacing },
+ { "text", text }, -- non words
+ { "comment", comment },
+ { "constant", constant },
+ { "callers", callers },
+ { "helper", helper },
+ { "command", command },
+ { "primitive", primitive },
+ { "ifprimitive", ifprimitive },
+ { "reserved", reserved },
+ { "csname", csname },
+ { "grouping", grouping },
+ { "special", special },
+ { "extra", extra },
+ { "invisible", invisible },
+ { "web", web.pattern },
+ { "rest", rest },
+ }
+
+else
+
+ lexer.report("not supporting web snippets in tex lexer")
+
+ contextlexer._rules_web = {
+ { "whitespace", spacing },
+ { "text", text }, -- non words
+ { "comment", comment },
+ { "constant", constant },
+ { "callers", callers },
+ { "helper", helper },
+ { "command", command },
+ { "primitive", primitive },
+ { "ifprimitive", ifprimitive },
+ { "reserved", reserved },
+ { "csname", csname },
+ { "grouping", grouping },
+ { "special", special },
+ { "extra", extra },
+ { "invisible", invisible },
+ { "rest", rest },
+ }
--- contextlexer._tokenstyles[#contextlexer._tokenstyles + 1] = { cldlexer._NAME..'_whitespace', lexer.style_whitespace }
--- contextlexer._tokenstyles[#contextlexer._tokenstyles + 1] = { mpslexer._NAME..'_whitespace', lexer.style_whitespace }
+end
+
+contextlexer._tokenstyles = context.styleset
local environment = {
["\\start"] = 1, ["\\stop"] = -1,
@@ -481,7 +544,7 @@ local group = {
contextlexer._foldpattern = P("\\" ) * (P("start") + P("stop")) + S("{}") -- separate entry else interference
-contextlexer._foldsymbols = { -- these need to be style references
+contextlexer._foldsymbols = { -- these need to be style references .. todo: multiple styles
_patterns = {
"\\start", "\\stop", -- regular environments
-- "\\begin", "\\end", -- (moveable) blocks
@@ -492,7 +555,11 @@ contextlexer._foldsymbols = { -- these need to be style references
["data"] = environment,
["user"] = environment,
["embedded"] = environment,
+ ["helper"] = environment,
+ ["plain"] = environment,
["grouping"] = group,
}
+-- context.inspect(contextlexer)
+
return contextlexer
diff --git a/context/data/scite/lexers/scite-context-lexer-txt.lua b/context/data/scite/context/lexers/scite-context-lexer-txt.lua
index fe062fb94..43eec2c35 100644
--- a/context/data/scite/lexers/scite-context-lexer-txt.lua
+++ b/context/data/scite/context/lexers/scite-context-lexer-txt.lua
@@ -6,22 +6,23 @@ local info = {
license = "see context related readme files",
}
-if not lexer._CONTEXTEXTENSIONS then require("scite-context-lexer") end
-
-local lexer = lexer
-local token = lexer.token
-local P, S, Cmt, Cp, Ct = lpeg.P, lpeg.S, lpeg.Cmt, lpeg.Cp, lpeg.Ct
+local P, S, Cmt, Cp = lpeg.P, lpeg.S, lpeg.Cmt, lpeg.Cp
local find, match = string.find, string.match
-local textlexer = { _NAME = "txt", _FILENAME = "scite-context-lexer-txt" }
-local whitespace = lexer.WHITESPACE
+local lexer = require("lexer")
local context = lexer.context
+local patterns = context.patterns
+
+local token = lexer.token
+
+local textlexer = lexer.new("txt","scite-context-lexer-txt")
+local whitespace = textlexer.whitespace
-local space = lexer.space
-local any = lexer.any
+local space = patterns.space
+local any = patterns.any
+local wordtoken = patterns.wordtoken
+local wordpattern = patterns.wordpattern
-local wordtoken = context.patterns.wordtoken
-local wordpattern = context.patterns.wordpattern
local checkedword = context.checkedword
local styleofword = context.styleofword
local setwordlist = context.setwordlist
@@ -36,10 +37,10 @@ local validminimum = 3
-- [#!-%] language=uk
-local p_preamble = Cmt(#(S("#!-%") * P(" ")), function(input,i,_) -- todo: utf bomb
+local p_preamble = Cmt((S("#!-%") * P(" ")), function(input,i,_) -- todo: utf bomb no longer #
if i == 1 then -- < 10 then
validwords, validminimum = false, 3
- local s, e, line = find(input,'^[#!%-%%](.+)[\n\r]',i)
+ local s, e, line = find(input,"^[#!%-%%](.+)[\n\r]",i)
if line then
local language = match(line,"language=([a-z]+)")
if language then
@@ -54,7 +55,6 @@ local t_preamble =
token("preamble", p_preamble)
local t_word =
--- Ct( wordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() ) -- the function can be inlined
wordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() -- the function can be inlined
local t_text =
diff --git a/context/data/scite/context/lexers/scite-context-lexer-web-snippets.lua b/context/data/scite/context/lexers/scite-context-lexer-web-snippets.lua
new file mode 100644
index 000000000..196a545bc
--- /dev/null
+++ b/context/data/scite/context/lexers/scite-context-lexer-web-snippets.lua
@@ -0,0 +1,133 @@
+local info = {
+ version = 1.002,
+ comment = "scintilla lpeg lexer for web snippets",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+local P, R, S, C, Cg, Cb, Cs, Cmt, lpegmatch = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cg, lpeg.Cb, lpeg.Cs, lpeg.Cmt, lpeg.match
+
+local lexer = require("lexer")
+local context = lexer.context
+local patterns = context.patterns
+
+local token = lexer.token
+
+local websnippets = { }
+
+local space = patterns.space -- S(" \n\r\t\f\v")
+local any = patterns.any
+local restofline = patterns.restofline
+local startofline = patterns.startofline
+
+local squote = P("'")
+local dquote = P('"')
+local period = P(".")
+
+local t_whitespace = token(whitespace, space^1)
+local t_spacing = token("default", space^1)
+local t_rest = token("default", any)
+
+-- the web subset
+
+local p_beginofweb = P("@")
+local p_endofweb = P("@>")
+
+-- @, @/ @| @# @+ @; @[ @]
+
+local p_directive_1 = p_beginofweb * S(",/|#+;[]")
+local t_directive_1 = token("label",p_directive_1)
+
+-- @.text @>(monospaced)
+-- @:text @>(macro driven)
+-- @= verbose@>
+-- @! underlined @>
+-- @t text @> (hbox)
+-- @q ignored @>
+
+local p_typeset = p_beginofweb * S(".:=!tq")
+local t_typeset = token("label",p_typeset) * token("warning",(1-p_endofweb)^1) * token("label",p_endofweb)
+
+-- @^index@>
+
+local p_index = p_beginofweb * P("^")
+local t_index = token("label",p_index) * token("function",(1-p_endofweb)^1) * token("label",p_endofweb)
+
+-- @f text renderclass
+
+local p_render = p_beginofweb * S("f")
+local t_render = token("label",p_render) * t_spacing * token("warning",(1-space)^1) * t_spacing * token("label",(1-space)^1)
+
+-- @s idem
+-- @p idem
+-- @& strip (spaces before)
+-- @h
+
+local p_directive_2 = p_beginofweb * S("sp&h")
+local t_directive_2 = token("label",p_directive_2)
+
+-- @< ... @> [=|+=|]
+-- @(foo@>
+
+local p_reference = p_beginofweb * S("<(")
+local t_reference = token("label",p_reference) * token("function",(1-p_endofweb)^1) * token("label",p_endofweb * (P("+=") + P("="))^-1)
+
+-- @'char' (ascii code)
+
+local p_character = p_beginofweb * S("'")
+local t_character = token("label",p_character) * token("reserved",(1-squote)^1) * token("label",squote)
+
+-- @l nonascii
+
+local p_nonascii = p_beginofweb * S("l")
+local t_nonascii = token("label",p_nonascii) * t_spacing * token("reserved",(1-space)^1)
+
+-- @x @y @z changefile
+-- @i webfile
+
+local p_filename = p_beginofweb * S("xyzi")
+local t_filename = token("label",p_filename) * t_spacing * token("reserved",(1-space)^1)
+
+-- @@ escape
+
+local p_escape = p_beginofweb * p_beginofweb
+local t_escape = token("text",p_escape)
+
+-- structure
+
+-- @* title.
+
+-- local p_section = p_beginofweb * P("*")^1
+-- local t_section = token("label",p_section) * t_spacing * token("function",(1-period)^1) * token("label",period)
+
+-- @ explanation
+
+-- local p_explanation = p_beginofweb
+-- local t_explanation = token("label",p_explanation) * t_spacing^1
+
+-- @d macro
+
+-- local p_macro = p_beginofweb * P("d")
+-- local t_macro = token("label",p_macro)
+
+-- @c code
+
+-- local p_code = p_beginofweb * P("c")
+-- local t_code = token("label",p_code)
+
+websnippets.pattern = P (
+ t_typeset
+ + t_index
+ + t_render
+ + t_reference
+ + t_filename
+ + t_directive_1
+ + t_directive_2
+ + t_character
+ + t_nonascii
+ + t_escape
+)
+
+
+return websnippets
diff --git a/context/data/scite/context/lexers/scite-context-lexer-web.lua b/context/data/scite/context/lexers/scite-context-lexer-web.lua
new file mode 100644
index 000000000..86ae76644
--- /dev/null
+++ b/context/data/scite/context/lexers/scite-context-lexer-web.lua
@@ -0,0 +1,67 @@
+local info = {
+ version = 1.003,
+ comment = "scintilla lpeg lexer for web",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+local P, R, S = lpeg.P, lpeg.R, lpeg.S
+
+local lexer = require("lexer")
+local context = lexer.context
+local patterns = context.patterns
+
+local token = lexer.token
+local exact_match = lexer.exact_match
+
+local weblexer = lexer.new("web","scite-context-lexer-web")
+local whitespace = weblexer.whitespace
+
+local space = patterns.space -- S(" \n\r\t\f\v")
+local any = patterns.any
+local restofline = patterns.restofline
+local startofline = patterns.startofline
+
+local period = P(".")
+local percent = P("%")
+
+local spacing = token(whitespace, space^1)
+local rest = token("default", any)
+
+local eop = P("@>")
+local eos = eop * P("+")^-1 * P("=")
+
+-- we can put some of the next in the web-snippets file
+-- is f okay here?
+
+local texcomment = token("comment", percent * restofline^0)
+
+local texpart = token("label",P("@")) * #spacing
+ + token("label",P("@") * P("*")^1) * token("function",(1-period)^1) * token("label",period)
+local midpart = token("label",P("@d")) * #spacing
+ + token("label",P("@f")) * #spacing
+local cpppart = token("label",P("@c")) * #spacing
+ + token("label",P("@p")) * #spacing
+ + token("label",P("@") * S("<(")) * token("function",(1-eop)^1) * token("label",eos)
+
+local anypart = P("@") * ( P("*")^1 + S("dfcp") + space^1 + S("<(") * (1-eop)^1 * eos )
+local limbo = 1 - anypart - percent
+
+local texlexer = lexer.load("scite-context-lexer-tex-web")
+local cpplexer = lexer.load("scite-context-lexer-cpp-web")
+
+lexer.embed_lexer(weblexer, texlexer, texpart + limbo, #anypart)
+lexer.embed_lexer(weblexer, cpplexer, cpppart + midpart, #anypart)
+
+local texcomment = token("comment", percent * restofline^0)
+
+weblexer._rules = {
+ { "whitespace", spacing },
+ { "texcomment", texcomment }, -- else issues with first tex section
+ { "rest", rest },
+}
+
+weblexer._tokenstyles = context.styleset
+
+return weblexer
diff --git a/context/data/scite/lexers/scite-context-lexer-xml-cdata.lua b/context/data/scite/context/lexers/scite-context-lexer-xml-cdata.lua
index 97253e140..e6276da0d 100644
--- a/context/data/scite/lexers/scite-context-lexer-xml-cdata.lua
+++ b/context/data/scite/context/lexers/scite-context-lexer-xml-cdata.lua
@@ -6,23 +6,26 @@ local info = {
license = "see context related readme files",
}
-local lexer = lexer
-local token = lexer.token
local P = lpeg.P
-local xmlcdatalexer = { _NAME = "xml-cdata", _FILENAME = "scite-context-lexer-xml-cdata" }
-local whitespace = lexer.WHITESPACE -- triggers states
+local lexer = require("lexer")
local context = lexer.context
+local patterns = context.patterns
-local space = lexer.space
+local token = lexer.token
+
+local xmlcdatalexer = lexer.new("xml-cdata","scite-context-lexer-xml-cdata")
+local whitespace = xmlcdatalexer.whitespace
+
+local space = patterns.space
local nospace = 1 - space - P("]]>")
-local p_spaces = token(whitespace, space ^1)
-local p_cdata = token("comment", nospace^1)
+local t_spaces = token(whitespace, space ^1)
+local t_cdata = token("comment", nospace^1)
xmlcdatalexer._rules = {
- { "whitespace", p_spaces },
- { "cdata", p_cdata },
+ { "whitespace", t_spaces },
+ { "cdata", t_cdata },
}
xmlcdatalexer._tokenstyles = context.styleset
diff --git a/context/data/scite/context/lexers/scite-context-lexer-xml-comment.lua b/context/data/scite/context/lexers/scite-context-lexer-xml-comment.lua
new file mode 100644
index 000000000..b5b3fefe0
--- /dev/null
+++ b/context/data/scite/context/lexers/scite-context-lexer-xml-comment.lua
@@ -0,0 +1,33 @@
+local info = {
+ version = 1.002,
+ comment = "scintilla lpeg lexer for xml comments",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+local P = lpeg.P
+
+local lexer = require("lexer")
+local context = lexer.context
+local patterns = context.patterns
+
+local token = lexer.token
+
+local xmlcommentlexer = lexer.new("xml-comment","scite-context-lexer-xml-comment")
+local whitespace = xmlcommentlexer.whitespace
+
+local space = patterns.space
+local nospace = 1 - space - P("-->")
+
+local t_spaces = token(whitespace, space ^1)
+local t_comment = token("comment", nospace^1)
+
+xmlcommentlexer._rules = {
+ { "whitespace", t_spaces },
+ { "comment", t_comment },
+}
+
+xmlcommentlexer._tokenstyles = context.styleset
+
+return xmlcommentlexer
diff --git a/context/data/scite/context/lexers/scite-context-lexer-xml-script.lua b/context/data/scite/context/lexers/scite-context-lexer-xml-script.lua
new file mode 100644
index 000000000..bbb938dc5
--- /dev/null
+++ b/context/data/scite/context/lexers/scite-context-lexer-xml-script.lua
@@ -0,0 +1,33 @@
+local info = {
+ version = 1.002,
+ comment = "scintilla lpeg lexer for xml script",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+local P = lpeg.P
+
+local lexer = require("lexer")
+local context = lexer.context
+local patterns = context.patterns
+
+local token = lexer.token
+
+local xmlscriptlexer = lexer.new("xml-script","scite-context-lexer-xml-script")
+local whitespace = xmlscriptlexer.whitespace
+
+local space = patterns.space
+local nospace = 1 - space - (P("</") * P("script") + P("SCRIPT")) * P(">")
+
+local t_spaces = token(whitespace, space ^1)
+local t_script = token("default", nospace^1)
+
+xmlscriptlexer._rules = {
+ { "whitespace", t_spaces },
+ { "script", t_script },
+}
+
+xmlscriptlexer._tokenstyles = context.styleset
+
+return xmlscriptlexer
diff --git a/context/data/scite/lexers/scite-context-lexer-xml.lua b/context/data/scite/context/lexers/scite-context-lexer-xml.lua
index 241e22591..77c89b1d6 100644
--- a/context/data/scite/lexers/scite-context-lexer-xml.lua
+++ b/context/data/scite/context/lexers/scite-context-lexer-xml.lua
@@ -12,26 +12,28 @@ local info = {
-- todo: parse entities in attributes
-if not lexer._CONTEXTEXTENSIONS then require("scite-context-lexer") end
-
-local lexer = lexer
local global, string, table, lpeg = _G, string, table, lpeg
-local token, exact_match = lexer.token, lexer.exact_match
-local P, R, S, V, C, Cmt, Ct, Cp = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.C, lpeg.Cmt, lpeg.Ct, lpeg.Cp
+local P, R, S, C, Cmt, Cp = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cmt, lpeg.Cp
local type = type
local match, find = string.match, string.find
-local xmllexer = { _NAME = "xml", _FILENAME = "scite-context-lexer-xml" }
-local whitespace = lexer.WHITESPACE -- triggers states
+local lexer = require("lexer")
local context = lexer.context
+local patterns = context.patterns
+
+local token = lexer.token
+local exact_match = lexer.exact_match
-local xmlcommentlexer = lexer.load("scite-context-lexer-xml-comment") -- indirect (some issue with the lexer framework)
-local xmlcdatalexer = lexer.load("scite-context-lexer-xml-cdata") -- indirect (some issue with the lexer framework)
-local xmlscriptlexer = lexer.load("scite-context-lexer-xml-script") -- indirect (some issue with the lexer framework)
-local lualexer = lexer.load("scite-context-lexer-lua") --
+local xmllexer = lexer.new("xml","scite-context-lexer-xml")
+local whitespace = xmllexer.whitespace
-local space = lexer.space -- S(" \t\n\r\v\f")
-local any = lexer.any -- P(1)
+local xmlcommentlexer = lexer.load("scite-context-lexer-xml-comment")
+local xmlcdatalexer = lexer.load("scite-context-lexer-xml-cdata")
+local xmlscriptlexer = lexer.load("scite-context-lexer-xml-script")
+local lualexer = lexer.load("scite-context-lexer-lua")
+
+local space = patterns.space
+local any = patterns.any
local dquote = P('"')
local squote = P("'")
@@ -40,7 +42,7 @@ local semicolon = P(";")
local equal = P("=")
local ampersand = P("&")
-local name = (R("az","AZ","09") + S('_-.'))^1
+local name = (R("az","AZ","09") + S("_-."))^1
local openbegin = P("<")
local openend = P("</")
local closebegin = P("/>") + P(">")
@@ -84,12 +86,12 @@ local validminimum = 3
--
-- <?context-directive editor language us ?>
-local p_preamble = Cmt(#P("<?xml "), function(input,i,_) -- todo: utf bomb
+local t_preamble = Cmt(P("<?xml "), function(input,i,_) -- todo: utf bomb, no longer #
if i < 200 then
validwords, validminimum = false, 3
local language = match(input,"^<%?xml[^>]*%?>%s*<%?context%-directive%s+editor%s+language%s+(..)%s+%?>")
-- if not language then
- -- language = match(input,'^<%?xml[^>]*language=[\"\'](..)[\"\'][^>]*%?>',i)
+ -- language = match(input,"^<%?xml[^>]*language=[\"\'](..)[\"\'][^>]*%?>",i)
-- end
if language then
validwords, validminimum = setwordlist(language)
@@ -98,24 +100,23 @@ local p_preamble = Cmt(#P("<?xml "), function(input,i,_) -- todo: utf bomb
return false
end)
-local p_word =
+local t_word =
-- Ct( iwordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() ) -- the function can be inlined
iwordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() -- the function can be inlined
-local p_rest =
+local t_rest =
token("default", any)
-local p_text =
+local t_text =
token("default", (1-S("<>&")-space)^1)
-local p_spacing =
+local t_spacing =
token(whitespace, space^1)
--- token("whitespace", space^1)
-local p_optionalwhitespace =
- p_spacing^0
+local t_optionalwhitespace =
+ token("default", space^1)^0
-local p_localspacing =
+local t_localspacing =
token("default", space^1)
-- Because we want a differently colored open and close we need an embedded lexer (whitespace
@@ -123,22 +124,22 @@ local p_localspacing =
-- Even using different style keys is not robust as they can be shared. I'll fix the main
-- lexer code.
-local p_sstring =
+local t_sstring =
token("quote",dquote)
* token("string",(1-dquote)^0) -- different from context
* token("quote",dquote)
-local p_dstring =
+local t_dstring =
token("quote",squote)
* token("string",(1-squote)^0) -- different from context
* token("quote",squote)
--- local p_comment =
+-- local t_comment =
-- token("command",opencomment)
-- * token("comment",(1-closecomment)^0) -- different from context
-- * token("command",closecomment)
--- local p_cdata =
+-- local t_cdata =
-- token("command",opencdata)
-- * token("comment",(1-closecdata)^0) -- different from context
-- * token("command",closecdata)
@@ -156,74 +157,74 @@ local p_dstring =
-- <!ENTITY xxxx PUBLIC "yyyy" >
-- <!ENTITY xxxx "yyyy" >
-local p_docstr = p_dstring + p_sstring
+local t_docstr = t_dstring + t_sstring
-local p_docent = token("command",P("<!ENTITY"))
- * p_optionalwhitespace
+local t_docent = token("command",P("<!ENTITY"))
+ * t_optionalwhitespace
* token("keyword",name)
- * p_optionalwhitespace
+ * t_optionalwhitespace
* (
(
token("constant",P("SYSTEM"))
- * p_optionalwhitespace
- * p_docstr
- * p_optionalwhitespace
+ * t_optionalwhitespace
+ * t_docstr
+ * t_optionalwhitespace
* token("constant",P("NDATA"))
- * p_optionalwhitespace
+ * t_optionalwhitespace
* token("keyword",name)
) + (
token("constant",P("PUBLIC"))
- * p_optionalwhitespace
- * p_docstr
+ * t_optionalwhitespace
+ * t_docstr
) + (
- p_docstr
+ t_docstr
)
)
- * p_optionalwhitespace
+ * t_optionalwhitespace
* token("command",P(">"))
-local p_docele = token("command",P("<!ELEMENT"))
- * p_optionalwhitespace
+local t_docele = token("command",P("<!ELEMENT"))
+ * t_optionalwhitespace
* token("keyword",name)
- * p_optionalwhitespace
+ * t_optionalwhitespace
* token("command",P("("))
* (
- p_spacing
+ t_localspacing
+ token("constant",P("#CDATA") + P("#PCDATA") + P("ANY"))
+ token("text",P(","))
+ token("comment",(1-S(",)"))^1)
)^1
* token("command",P(")"))
- * p_optionalwhitespace
+ * t_optionalwhitespace
* token("command",P(">"))
-local p_docset = token("command",P("["))
- * p_optionalwhitespace
- * ((p_optionalwhitespace * (p_docent + p_docele))^1 + token("comment",(1-P("]"))^0))
- * p_optionalwhitespace
+local t_docset = token("command",P("["))
+ * t_optionalwhitespace
+ * ((t_optionalwhitespace * (t_docent + t_docele))^1 + token("comment",(1-P("]"))^0))
+ * t_optionalwhitespace
* token("command",P("]"))
-local p_doctype = token("command",P("<!DOCTYPE"))
- * p_optionalwhitespace
+local t_doctype = token("command",P("<!DOCTYPE"))
+ * t_optionalwhitespace
* token("keyword",name)
- * p_optionalwhitespace
+ * t_optionalwhitespace
* (
(
token("constant",P("PUBLIC"))
- * p_optionalwhitespace
- * p_docstr
- * p_optionalwhitespace
- * p_docstr
- * p_optionalwhitespace
+ * t_optionalwhitespace
+ * t_docstr
+ * t_optionalwhitespace
+ * t_docstr
+ * t_optionalwhitespace
) + (
token("constant",P("SYSTEM"))
- * p_optionalwhitespace
- * p_docstr
- * p_optionalwhitespace
+ * t_optionalwhitespace
+ * t_docstr
+ * t_optionalwhitespace
)
)^-1
- * p_docset^-1
- * p_optionalwhitespace
+ * t_docset^-1
+ * t_optionalwhitespace
* token("command",P(">"))
lexer.embed_lexer(xmllexer, lualexer, token("command", openlua), token("command", closelua))
@@ -231,7 +232,7 @@ lexer.embed_lexer(xmllexer, xmlcommentlexer, token("command", opencomment), toke
lexer.embed_lexer(xmllexer, xmlcdatalexer, token("command", opencdata), token("command", closecdata))
lexer.embed_lexer(xmllexer, xmlscriptlexer, token("command", openscript), token("command", closescript))
--- local p_name =
+-- local t_name =
-- token("plain",name)
-- * (
-- token("default",colon)
@@ -239,11 +240,11 @@ lexer.embed_lexer(xmllexer, xmlscriptlexer, token("command", openscript), toke
-- )
-- + token("keyword",name)
-local p_name = -- more robust
+local t_name = -- more robust
token("plain",name * colon)^-1
* token("keyword",name)
--- local p_key =
+-- local t_key =
-- token("plain",name)
-- * (
-- token("default",colon)
@@ -251,81 +252,82 @@ local p_name = -- more robust
-- )
-- + token("constant",name)
-local p_key =
+local t_key =
token("plain",name * colon)^-1
* token("constant",name)
-local p_attributes = (
- p_optionalwhitespace
- * p_key
- * p_optionalwhitespace
+local t_attributes = (
+ t_optionalwhitespace
+ * t_key
+ * t_optionalwhitespace
* token("plain",equal)
- * p_optionalwhitespace
- * (p_dstring + p_sstring)
- * p_optionalwhitespace
+ * t_optionalwhitespace
+ * (t_dstring + t_sstring)
+ * t_optionalwhitespace
)^0
-local p_open =
+local t_open =
token("keyword",openbegin)
* (
- p_name
- * p_optionalwhitespace
- * p_attributes
+ t_name
+ * t_optionalwhitespace
+ * t_attributes
* token("keyword",closebegin)
+
token("error",(1-closebegin)^1)
)
-local p_close =
+local t_close =
token("keyword",openend)
* (
- p_name
- * p_optionalwhitespace
+ t_name
+ * t_optionalwhitespace
* token("keyword",closeend)
+
token("error",(1-closeend)^1)
)
-local p_entity =
+local t_entity =
token("constant",entity)
-local p_instruction =
+local t_instruction =
token("command",openinstruction * P("xml"))
- * p_optionalwhitespace
- * p_attributes
- * p_optionalwhitespace
+ * t_optionalwhitespace
+ * t_attributes
+ * t_optionalwhitespace
* token("command",closeinstruction)
+ token("command",openinstruction * name)
* token("default",(1-closeinstruction)^1)
* token("command",closeinstruction)
-local p_invisible =
+local t_invisible =
token("invisible",invisibles^1)
--- local p_preamble =
--- token('preamble', p_preamble )
+-- local t_preamble =
+-- token("preamble", t_preamble )
xmllexer._rules = {
- { "whitespace", p_spacing },
- { "preamble", p_preamble },
- { "word", p_word },
- -- { "text", p_text },
- -- { "comment", p_comment },
- -- { "cdata", p_cdata },
- { "doctype", p_doctype },
- { "instruction", p_instruction },
- { "close", p_close },
- { "open", p_open },
- { "entity", p_entity },
- { "invisible", p_invisible },
- { "rest", p_rest },
+ { "whitespace", t_spacing },
+ { "preamble", t_preamble },
+ { "word", t_word },
+ -- { "text", t_text },
+ -- { "comment", t_comment },
+ -- { "cdata", t_cdata },
+ { "doctype", t_doctype },
+ { "instruction", t_instruction },
+ { "close", t_close },
+ { "open", t_open },
+ { "entity", t_entity },
+ { "invisible", t_invisible },
+ { "rest", t_rest },
}
xmllexer._tokenstyles = context.styleset
xmllexer._foldpattern = P("</") + P("<") + P("/>") -- separate entry else interference
++ P("<!--") + P("-->")
-xmllexer._foldsymbols = { -- somehow doesn't work yet
+xmllexer._foldsymbols = {
_patterns = {
"</",
"/>",
@@ -336,6 +338,13 @@ xmllexer._foldsymbols = { -- somehow doesn't work yet
["/>"] = -1,
["<"] = 1,
},
+ ["command"] = {
+ ["</"] = -1,
+ ["/>"] = -1,
+ ["<!--"] = 1,
+ ["-->"] = -1,
+ ["<"] = 1,
+ },
}
return xmllexer
diff --git a/context/data/scite/context/lexers/scite-context-lexer.lua b/context/data/scite/context/lexers/scite-context-lexer.lua
new file mode 100644
index 000000000..4d401ac7b
--- /dev/null
+++ b/context/data/scite/context/lexers/scite-context-lexer.lua
@@ -0,0 +1,2177 @@
+local info = {
+ version = 1.400,
+ comment = "basics for scintilla lpeg lexer for context/metafun",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+ comment = "contains copyrighted code from mitchell.att.foicica.com",
+
+}
+
+-- todo: hook into context resolver etc
+-- todo: only old api in lexers, rest in context subnamespace
+-- todo: make sure we can run in one state .. copies or shared?
+-- todo: auto-nesting
+
+if lpeg.setmaxstack then lpeg.setmaxstack(1000) end
+
+local log = false
+local trace = false
+local detail = false
+local show = false -- nice for tracing (also for later)
+local collapse = false -- can save some 15% (maybe easier on scintilla)
+local inspect = false -- can save some 15% (maybe easier on scintilla)
+
+-- local log = true
+-- local trace = true
+
+-- GET GOING
+--
+-- You need to copy this file over lexer.lua. In principle other lexers could
+-- work too but not now. Maybe some day. All patterns will move into the patterns
+-- name space. I might do the same with styles. If you run an older version of
+-- SciTE you can take one of the archives. Pre 3.41 versions can just be copied
+-- to the right path, as there we still use part of the normal lexer.
+--
+-- REMARK
+--
+-- We started using lpeg lexing as soon as it came available. Because we had
+-- rather demanding files an dalso wanted to use nested lexers, we ended up with
+-- our own variant (more robust and faster). As a consequence successive versions
+-- had to be adapted to changes in the (still unstable) api. In addition to
+-- lexing we also have spell checking and such.
+--
+-- STATUS
+--
+-- todo: maybe use a special stripped version of the dll (stable api)
+-- todo: play with hotspot and other properties
+-- wish: access to all scite properties and in fact integrate in scite
+-- todo: add proper tracing and so .. not too hard as we can run on mtxrun
+-- todo: get rid of these lexers.STYLE_XX and lexers.XX (hide such details)
+--
+-- HISTORY
+--
+-- The fold and lex functions are copied and patched from original code by Mitchell
+-- (see lexer.lua). All errors are mine. The ability to use lpeg is a real nice
+-- adition and a brilliant move. The code is a byproduct of the (mainly Lua based)
+-- textadept (still a rapidly moving target) that unfortunately misses a realtime
+-- output pane. On the other hand, SciTE is somewhat crippled by the fact that we
+-- cannot pop in our own (language dependent) lexer into the output pane (somehow
+-- the errorlist lexer is hard coded into the editor). Hopefully that will change
+-- some day.
+--
+-- Starting with SciTE version 3.20 there is an issue with coloring. As we still
+-- lack a connection with SciTE itself (properties as well as printing to the log
+-- pane) and we cannot trace this (on windows). As far as I can see, there are no
+-- fundamental changes in lexer.lua or LexLPeg.cxx so it must be in Scintilla
+-- itself. So for the moment I stick to 3.10. Indicators are: no lexing of 'next'
+-- and 'goto <label>' in the Lua lexer and no brace highlighting either. Interesting
+-- is that it does work ok in the cld lexer (so the Lua code is okay). Also the fact
+-- that char-def.lua lexes fast is a signal that the lexer quits somewhere halfway.
+-- Maybe there are some hard coded limitations on the amount of styles and/or length
+-- if names.
+--
+-- After checking 3.24 and adapting to the new lexer tables things are okay again.
+-- So, this version assumes 3.24 or higher. In 3.24 we have a different token
+-- result, i.e. no longer a { tag, pattern } but just two return values. I didn't
+-- check other changes but will do that when I run into issues. I had optimized
+-- these small tables by hashing which was more efficient but this is no longer
+-- needed. For the moment we keep some of that code around as I don't know what
+-- happens in future versions.
+--
+-- In 3.31 another major change took place: some helper constants (maybe they're no
+-- longer constants) and functions were moved into the lexer modules namespace but
+-- the functions are assigned to the Lua module afterward so we cannot alias them
+-- beforehand. We're probably getting close to a stable interface now. I've
+-- considered making a whole copy and patch the other functions too as we need an
+-- extra nesting model. However, I don't want to maintain too much. An unfortunate
+-- change in 3.03 is that no longer a script can be specified. This means that
+-- instead of loading the extensions via the properties file, we now need to load
+-- them in our own lexers, unless of course we replace lexer.lua completely (which
+-- adds another installation issue).
+--
+-- Another change has been that _LEXERHOME is no longer available. It looks like
+-- more and more functionality gets dropped so maybe at some point we need to ship
+-- our own dll/so files. For instance, I'd like to have access to the current
+-- filename and other scite properties. For instance, we could cache some info with
+-- each file, if only we had knowledge of what file we're dealing with.
+--
+-- For huge files folding can be pretty slow and I do have some large ones that I
+-- keep open all the time. Loading is normally no ussue, unless one has remembered
+-- the status and the cursor is at the last line of a 200K line file. Optimizing the
+-- fold function brought down loading of char-def.lua from 14 sec => 8 sec.
+-- Replacing the word_match function and optimizing the lex function gained another
+-- 2+ seconds. A 6 second load is quite ok for me. The changed lexer table structure
+-- (no subtables) brings loading down to a few seconds.
+--
+-- When the lexer path is copied to the textadept lexer path, and the theme
+-- definition to theme path (as lexer.lua), the lexer works there as well. When I
+-- have time and motive I will make a proper setup file to tune the look and feel a
+-- bit and associate suffixes with the context lexer. The textadept editor has a
+-- nice style tracing option but lacks the tabs for selecting files that scite has.
+-- It also has no integrated run that pipes to the log pane. Interesting is that the
+-- jit version of textadept crashes on lexing large files (and does not feel faster
+-- either; maybe a side effect of known limitations).
+--
+-- Function load(lexer_name) starts with _lexers.WHITESPACE = lexer_name ..
+-- '_whitespace' which means that we need to have it frozen at the moment we load
+-- another lexer. Because spacing is used to revert to a parent lexer we need to
+-- make sure that we load children as late as possible in order not to get the wrong
+-- whitespace trigger. This took me quite a while to figure out (not being that
+-- familiar with the internals). The lex and fold functions have been optimized. It
+-- is a pitty that there is no proper print available. Another thing needed is a
+-- default style in our own theme style definition, as otherwise we get wrong nested
+-- lexers, especially if they are larger than a view. This is the hardest part of
+-- getting things right.
+--
+-- It's a pitty that there is no scintillua library for the OSX version of scite.
+-- Even better would be to have the scintillua library as integral part of scite as
+-- that way I could use OSX alongside windows and linux (depending on needs). Also
+-- nice would be to have a proper interface to scite then because currently the
+-- lexer is rather isolated and the lua version does not provide all standard
+-- libraries. It would also be good to have lpeg support in the regular scite lua
+-- extension (currently you need to pick it up from someplace else).
+--
+-- With 3.41 the interface changed again so it gets time to look into the C++ code
+-- and consider compiling and patching myself. Loading is more complicated not as
+-- the lexer gets loaded automatically so we have little control over extending the
+-- code now. After a few days trying all kind of solutions I decided to follow a
+-- different approach: drop in a complete replacement. This of course means that I
+-- need to keep track of even more changes (which for sure will happen) but at least
+-- I get rid of interferences. The api (lexing and configuration) is simply too
+-- unstable across versions. Maybe in a few years things have stabelized. (Or maybe
+-- it's not really expected that one writes lexers at all.) A side effect is that I
+-- now no longer will use shipped lexers but just the built-in ones. Not that it
+-- matters much as the context lexers cover what I need (and I can always write
+-- more).
+--
+-- In fact, the transition to 3.41 was triggered by an unfateful update of Ubuntu
+-- which left me with an incompatible SciTE and lexer library and updating was not
+-- possible due to the lack of 64 bit libraries. We'll see what the future brings.
+--
+-- Promissing is that the library now can use another Lua instance so maybe some day
+-- it will get properly in SciTE and we can use more clever scripting.
+--
+-- In some lexers we use embedded ones even if we could do it directly, The reason is
+-- that when the end token is edited (e.g. -->), backtracking to the space before the
+-- begin token (e.g. <!--) results in applying the surrounding whitespace which in
+-- turn means that when the end token is edited right, backtracking doesn't go back.
+-- One solution (in the dll) would be to backtrack several space categories. After all,
+-- lexing is quite fast (applying the result is much slower).
+--
+-- For some reason the first blob of text tends to go wrong (pdf and web). It would be
+-- nice to have 'whole doc' initial lexing. Quite fishy as it makes it impossible to
+-- lex the first part well (for already opened documents) because only a partial
+-- text is passed.
+--
+-- So, maybe I should just write this from scratch (assuming more generic usage)
+-- because after all, the dll expects just tables, based on a string. I can then also
+-- do some more aggressive resource sharing (needed when used generic).
+--
+-- I think that nested lexers are still bugged (esp over longer ranges). It never was
+-- robust or maybe it's simply not meant for too complex cases. The 3.24 version was
+-- probably the best so far. The fact that styles bleed between lexers even if their
+-- states are isolated is an issue. Another issus is that zero characters in the
+-- text passed to the lexer can mess things up (pdf files have them in streams).
+--
+-- For more complex 'languages', like web or xml, we need to make sure that we use
+-- e.g. 'default' for spacing that makes up some construct. Ok, we then still have a
+-- backtracking issue but less.
+--
+-- TODO
+--
+-- I can make an export to context, but first I'll redo the code that makes the grammar,
+-- as we only seem to need
+--
+-- lexer._TOKENSTYLES : table
+-- lexer._CHILDREN : flag
+-- lexer._EXTRASTYLES : table
+-- lexer._GRAMMAR : flag
+--
+-- lexers.load : function
+-- lexers.lex : function
+--
+-- So, if we drop compatibility with other lex definitions, we can make things simpler.
+
+-- TRACING
+--
+-- The advantage is that we now can check more easily with regular Lua. We can also
+-- use wine and print to the console (somehow stdout is intercepted there.) So, I've
+-- added a bit of tracing. Interesting is to notice that each document gets its own
+-- instance which has advantages but also means that when we are spellchecking we
+-- reload the word lists each time. (In the past I assumed a shared instance and took
+-- some precautions.)
+
+-- todo: make sure we don't overload context definitions when used in context
+
+local lpeg = require("lpeg")
+
+local global = _G
+local find, gmatch, match, lower, upper, gsub, sub, format = string.find, string.gmatch, string.match, string.lower, string.upper, string.gsub, string.sub, string.format
+local concat, sort = table.concat, table.sort
+local type, next, setmetatable, rawset, tonumber, tostring = type, next, setmetatable, rawset, tonumber, tostring
+local R, P, S, V, C, Cp, Cs, Ct, Cmt, Cc, Cf, Cg, Carg = lpeg.R, lpeg.P, lpeg.S, lpeg.V, lpeg.C, lpeg.Cp, lpeg.Cs, lpeg.Ct, lpeg.Cmt, lpeg.Cc, lpeg.Cf, lpeg.Cg, lpeg.Carg
+local lpegmatch = lpeg.match
+
+local nesting = 0
+
+local function report(fmt,str,...)
+ if log then
+ if str then
+ fmt = format(fmt,str,...)
+ end
+ print(format("scite lpeg lexer > %s > %s",nesting == 0 and "-" or nesting,fmt))
+ end
+end
+
+local function inform(...)
+ if log and trace then
+ report(...)
+ end
+end
+
+inform("loading context lexer module (global table: %s)",tostring(global))
+
+if not package.searchpath then
+
+ -- Unfortunately the io library is only available when we end up
+ -- in this branch of code.
+
+ inform("using adapted function 'package.searchpath' (if used at all)")
+
+ function package.searchpath(name,path)
+ local tried = { }
+ for part in gmatch(path,"[^;]+") do
+ local filename = gsub(part,"%?",name)
+ local f = io.open(filename,"r")
+ if f then
+ inform("file found on path: %s",filename)
+ f:close()
+ return filename
+ end
+ tried[#tried + 1] = format("no file '%s'",filename)
+ end
+ -- added: local path .. for testing
+ local f = io.open(filename,"r")
+ if f then
+ inform("file found on current path: %s",filename)
+ f:close()
+ return filename
+ end
+ --
+ tried[#tried + 1] = format("no file '%s'",filename)
+ return nil, concat(tried,"\n")
+ end
+
+end
+
+local lexers = { }
+local context = { }
+local helpers = { }
+lexers.context = context
+lexers.helpers = helpers
+
+local patterns = { }
+context.patterns = patterns -- todo: lexers.patterns
+
+context.report = report
+context.inform = inform
+
+lexers.LEXERPATH = package.path -- can be multiple paths separated by ;
+lexers.LEXERPATH = "./?.lua" -- good enough, will be set anyway (was
+
+if resolvers then
+ -- todo: set LEXERPATH
+ -- todo: set report
+end
+
+local function sortedkeys(hash) -- simple version, good enough for here
+ local t, n = { }, 0
+ for k, v in next, hash do
+ t[#t+1] = k
+ local l = #tostring(k)
+ if l > n then
+ n = l
+ end
+ end
+ sort(t)
+ return t, n
+end
+
+helpers.sortedkeys = sortedkeys
+
+local usedlexers = { }
+local parent_lexer = nil
+
+-- The problem with styles is that there is some nasty interaction with scintilla
+-- and each version of lexer dll/so has a different issue. So, from now on we will
+-- just add them here. There is also a limit on some 30 styles. Maybe I should
+-- hash them in order to reuse.
+
+-- todo: work with proper hashes and analyze what styles are really used by a
+-- lexer
+
+local default = {
+ "nothing", "whitespace", "comment", "string", "number", "keyword",
+ "identifier", "operator", "error", "preprocessor", "constant", "variable",
+ "function", "type", "label", "embedded",
+ "quote", "special", "extra", "reserved", "okay", "warning",
+ "command", "internal", "preamble", "grouping", "primitive", "plain",
+ "user",
+ -- not used (yet) .. we cross the 32 boundary so had to patch the initializer, see (1)
+ "char", "class", "data", "definition", "invisible", "regex",
+ "standout", "tag",
+ "text",
+}
+
+local predefined = {
+ "default", "linenumber", "bracelight", "bracebad", "controlchar",
+ "indentguide", "calltip"
+}
+
+-- Bah ... ugly ... nicer would be a proper hash .. we now have properties
+-- as well as STYLE_* and some connection between them ... why .. ok, we
+-- could delay things but who cares. Anyway, at this moment the properties
+-- are still unknown.
+
+local function preparestyles(list)
+ local reverse = { }
+ for i=1,#list do
+ local k = list[i]
+ local K = upper(k)
+ local s = "style." .. k
+ lexers[K] = k -- is this used
+ lexers["STYLE_"..K] = "$(" .. k .. ")"
+ reverse[k] = true
+ end
+ return reverse
+end
+
+local defaultstyles = preparestyles(default)
+local predefinedstyles = preparestyles(predefined)
+
+-- These helpers are set afterwards so we delay their initialization ... there
+-- is no need to alias each time again and this way we can more easily adapt
+-- to updates.
+
+-- These keep changing (values, functions, tables ...) so we nee to check these
+-- with each update. Some of them are set in the loader (the require 'lexer' is
+-- in fact not a real one as the lexer code is loaded in the dll). It's also not
+-- getting more efficient.
+
+-- FOLD_BASE = lexers.FOLD_BASE or SC_FOLDLEVELBASE
+-- FOLD_HEADER = lexers.FOLD_HEADER or SC_FOLDLEVELHEADERFLAG
+-- FOLD_BLANK = lexers.FOLD_BLANK or SC_FOLDLEVELWHITEFLAG
+-- get_style_at = lexers.get_style_at or GetStyleAt
+-- get_indent_amount = lexers.get_indent_amount or GetIndentAmount
+-- get_property = lexers.get_property or GetProperty
+-- get_fold_level = lexers.get_fold_level or GetFoldLevel
+
+-- It needs checking: do we have access to all properties now? I'll clean
+-- this up anyway as I want a simple clean and stable model.
+
+-- This is somewhat messy. The lexer dll provides some virtual fields:
+--
+-- + property
+-- + property_int
+-- + style_at
+-- + fold_level
+-- + indent_amount
+--
+-- but for some reasons not:
+--
+-- + property_expanded
+--
+-- As a consequence we need to define it here because otherwise the
+-- lexer will crash. The fuzzy thing is that we don't have to define
+-- the property and property_int tables but we do have to define the
+-- expanded beforehand. The folding properties are no longer interfaced
+-- so the interface to scite is now rather weak (only a few hard coded
+-- properties).
+
+local FOLD_BASE = 0
+local FOLD_HEADER = 0
+local FOLD_BLANK = 0
+
+local style_at = { }
+local indent_amount = { }
+local fold_level = { }
+
+local function check_main_properties()
+ if not lexers.property then
+ lexers.property = { }
+ end
+ if not lexers.property_int then
+ lexers.property_int = setmetatable({ }, {
+ __index = function(t,k)
+ -- why the tostring .. it relies on lua casting to a number when
+ -- doing a comparison
+ return tonumber(lexers.property[k]) or 0 -- tostring removed
+ end,
+ __newindex = function(t,k,v)
+ report("properties are read-only, '%s' is not changed",k)
+ end,
+ })
+ end
+end
+
+lexers.property_expanded = setmetatable({ }, {
+ __index = function(t,k)
+ -- better be safe for future changes .. what if at some point this is
+ -- made consistent in the dll ... we need to keep an eye on that
+ local property = lexers.property
+ if not property then
+ check_main_properties()
+ end
+ --
+ return gsub(property[k],"[$%%]%b()", function(k)
+ return t[sub(k,3,-2)]
+ end)
+ end,
+ __newindex = function(t,k,v)
+ report("properties are read-only, '%s' is not changed",k)
+ end,
+})
+
+-- A downward compatible feature but obsolete:
+
+-- local function get_property(tag,default)
+-- return lexers.property_int[tag] or lexers.property[tag] or default
+-- end
+
+-- We still want our own properties (as it keeps changing so better play
+-- safe from now on):
+
+local function check_properties(lexer)
+ if lexer.properties then
+ return lexer
+ end
+ check_main_properties()
+ -- we use a proxy
+ local mainproperties = lexers.property
+ local properties = { }
+ local expanded = setmetatable({ }, {
+ __index = function(t,k)
+ return gsub(properties[k] or mainproperties[k],"[$%%]%b()", function(k)
+ return t[sub(k,3,-2)]
+ end)
+ end,
+ })
+ lexer.properties = setmetatable(properties, {
+ __index = mainproperties,
+ __call = function(t,k,default) -- expands
+ local v = expanded[k]
+ local t = type(default)
+ if t == "number" then
+ return tonumber(v) or default
+ elseif t == "boolean" then
+ return v == nil and default or v
+ else
+ return v or default
+ end
+ end,
+ })
+ return lexer
+end
+
+-- do
+-- lexers.property = { foo = 123, red = "R" }
+-- local a = check_properties({}) print("a.foo",a.properties.foo)
+-- a.properties.foo = "bar" print("a.foo",a.properties.foo)
+-- a.properties.foo = "bar:$(red)" print("a.foo",a.properties.foo) print("a.foo",a.properties("foo"))
+-- end
+
+local function set(value,default)
+ if value == 0 or value == false or value == "0" then
+ return false
+ elseif value == 1 or value == true or value == "1" then
+ return true
+ else
+ return default
+ end
+end
+
+local function check_context_properties()
+ local property = lexers.property -- let's hope that this stays
+ log = set(property["lexer.context.log"], log)
+ trace = set(property["lexer.context.trace"], trace)
+ detail = set(property["lexer.context.detail"], detail)
+ show = set(property["lexer.context.show"], show)
+ collapse = set(property["lexer.context.collapse"],collapse)
+ inspect = set(property["lexer.context.inspect"], inspect)
+end
+
+function context.registerproperties(p) -- global
+ check_main_properties()
+ local property = lexers.property -- let's hope that this stays
+ for k, v in next, p do
+ property[k] = v
+ end
+ check_context_properties()
+end
+
+context.properties = setmetatable({ }, {
+ __index = lexers.property,
+ __newindex = function(t,k,v)
+ check_main_properties()
+ lexers.property[k] = v
+ check_context_properties()
+ end,
+})
+
+-- We want locals to we set them delayed. Once.
+
+local function initialize()
+ FOLD_BASE = lexers.FOLD_BASE
+ FOLD_HEADER = lexers.FOLD_HEADER
+ FOLD_BLANK = lexers.FOLD_BLANK
+ --
+ style_at = lexers.style_at -- table
+ indent_amount = lexers.indent_amount -- table
+ fold_level = lexers.fold_level -- table
+ --
+ check_main_properties()
+ --
+ initialize = nil
+end
+
+-- Style handler.
+--
+-- The property table will be set later (after loading) by the library. The
+-- styleset is not needed any more as we predefine all styles as defaults
+-- anyway (too bug sensitive otherwise).
+
+local function toproperty(specification)
+ local serialized = { }
+ for key, value in next, specification do
+ if value == true then
+ serialized[#serialized+1] = key
+ elseif type(value) == "table" then
+ serialized[#serialized+1] = key .. ":" .. "#" .. value[1] .. value[2] .. value[3]
+ else
+ serialized[#serialized+1] = key .. ":" .. tostring(value)
+ end
+ end
+ return concat(serialized,",")
+end
+
+local function tostyles(styles)
+ local styleset = { }
+ local property = lexers.property or { }
+ for k, v in next, styles do
+ v = toproperty(v)
+ styleset[k] = v
+ property["style."..k] = v
+ end
+ return styleset
+end
+
+context.toproperty = toproperty
+context.tostyles = tostyles
+
+-- If we had one instance/state of Lua as well as all regular libraries
+-- preloaded we could use the context base libraries. So, let's go poor-
+-- mans solution now.
+
+function context.registerstyles(styles)
+ local styleset = tostyles(styles)
+ context.styles = styles
+ context.styleset = styleset
+ if trace then
+ if detail then
+ local t, n = sortedkeys(styleset)
+ local template = " %-" .. n .. "s : %s"
+ report("initializing styleset:")
+ for i=1,#t do
+ local k = t[i]
+ report(template,k,styleset[k])
+ end
+ else
+ report("initializing styleset")
+ end
+ end
+end
+
+-- Some spell checking related stuff. Unfortunately we cannot use a path set
+-- by property. This will get a hook for resolvers.
+
+local locations = {
+ "context/lexers", -- context lexers
+ "context/lexers/data", -- context lexers
+ "../lexers", -- original lexers
+ "../lexers/data", -- original lexers
+ ".", -- whatever
+ "./data", -- whatever
+}
+
+local function collect(name)
+ local root = gsub(lexers.LEXERPATH or ".","/.-lua$","") .. "/" -- this is a horrible hack
+ -- report("module '%s' locating '%s'",tostring(lexers),name)
+ for i=1,#locations do
+ local fullname = root .. locations[i] .. "/" .. name .. ".lua" -- so we can also check for .luc
+ if trace then
+ report("attempt to locate '%s'",fullname)
+ end
+ local okay, result = pcall(function () return dofile(fullname) end)
+ if okay then
+ return result, fullname
+ end
+ end
+end
+
+function context.loadluafile(name)
+ local data, fullname = collect(name)
+ if data then
+ if trace then
+ report("lua file '%s' has been loaded",fullname)
+ end
+ return data, fullname
+ end
+ report("unable to load lua file '%s'",name)
+end
+
+-- in fact we could share more as we probably process the data but then we need
+-- to have a more advanced helper
+
+local cache = { }
+
+function context.loaddefinitions(name)
+ local data = cache[name]
+ if data then
+ if trace then
+ report("reusing definitions '%s'",name)
+ end
+ return data
+ elseif trace and data == false then
+ report("definitions '%s' were not found",name)
+ end
+ local data, fullname = collect(name)
+ if not data then
+ report("unable to load definition file '%s'",name)
+ data = false
+ elseif trace then
+ report("definition file '%s' has been loaded",fullname)
+ if detail then
+ local t, n = sortedkeys(data)
+ local template = " %-" .. n .. "s : %s"
+ for i=1,#t do
+ local k = t[i]
+ local v = data[k]
+ if type(v) ~= "table" then
+ report(template,k,tostring(v))
+ elseif #v > 0 then
+ report(template,k,#v)
+ else
+ -- no need to show hash
+ end
+ end
+ end
+ end
+ cache[name] = data
+ return type(data) == "table" and data
+end
+
+function context.word_match(words,word_chars,case_insensitive)
+ local chars = "%w_" -- maybe just "" when word_chars
+ if word_chars then
+ chars = "^([" .. chars .. gsub(word_chars,"([%^%]%-])", "%%%1") .."]+)"
+ else
+ chars = "^([" .. chars .."]+)"
+ end
+ if case_insensitive then
+ local word_list = { }
+ for i=1,#words do
+ word_list[lower(words[i])] = true
+ end
+ return P(function(input, index)
+ local s, e, word = find(input,chars,index)
+ return word and word_list[lower(word)] and e + 1 or nil
+ end)
+ else
+ local word_list = { }
+ for i=1,#words do
+ word_list[words[i]] = true
+ end
+ return P(function(input, index)
+ local s, e, word = find(input,chars,index)
+ return word and word_list[word] and e + 1 or nil
+ end)
+ end
+end
+
+-- Patterns are grouped in a separate namespace but the regular lexers expect
+-- shortcuts to be present in the lexers library. Maybe I'll incorporate some
+-- of l-lpeg later.
+
+do
+
+ local anything = P(1)
+ local idtoken = R("az","AZ","\127\255","__")
+ local digit = R("09")
+ local sign = S("+-")
+ local period = P(".")
+ local octdigit = R("07")
+ local hexdigit = R("09","AF","af")
+ local lower = R("az")
+ local upper = R("AZ")
+ local alpha = upper + lower
+ local space = S(" \n\r\t\f\v")
+ local eol = S("\r\n")
+ local backslash = P("\\")
+ local decimal = digit^1
+ local octal = P("0")
+ * octdigit^1
+ local hexadecimal = P("0") * S("xX")
+ * (hexdigit^0 * period * hexdigit^1 + hexdigit^1 * period * hexdigit^0 + hexdigit^1)
+ * (S("pP") * sign^-1 * hexdigit^1)^-1 -- *
+
+ patterns.idtoken = idtoken
+ patterns.digit = digit
+ patterns.sign = sign
+ patterns.period = period
+ patterns.octdigit = octdigit
+ patterns.hexdigit = hexdigit
+ patterns.ascii = R("\000\127") -- useless
+ patterns.extend = R("\000\255") -- useless
+ patterns.control = R("\000\031")
+ patterns.lower = lower
+ patterns.upper = upper
+ patterns.alpha = alpha
+ patterns.decimal = decimal
+ patterns.octal = octal
+ patterns.hexadecimal = hexadecimal
+ patterns.float = sign^-1
+ * (digit^0 * period * digit^1 + digit^1 * period * digit^0 + digit^1)
+ * S("eE") * sign^-1 * digit^1 -- *
+ patterns.cardinal = decimal
+
+ patterns.signeddecimal = sign^-1 * decimal
+ patterns.signedoctal = sign^-1 * octal
+ patterns.signedhexadecimal = sign^-1 * hexadecimal
+ patterns.integer = sign^-1 * (hexadecimal + octal + decimal)
+ patterns.real =
+ sign^-1 * ( -- at most one
+ digit^1 * period * digit^0 -- 10.0 10.
+ + digit^0 * period * digit^1 -- 0.10 .10
+ + digit^1 -- 10
+ )
+
+ patterns.anything = anything
+ patterns.any = anything
+ patterns.restofline = (1-eol)^1
+ patterns.space = space
+ patterns.spacing = space^1
+ patterns.nospacing = (1-space)^1
+ patterns.eol = eol
+ patterns.newline = P("\r\n") + eol
+
+ local endof = S("\n\r\f")
+
+ patterns.startofline = P(function(input,index)
+ return (index == 1 or lpegmatch(endof,input,index-1)) and index
+ end)
+
+ -- These are the expected ones for other lexers. Maybe all in own namespace
+ -- and provide compatibility layer. or should I just remove them?
+
+ lexers.any = anything
+ lexers.ascii = ascii
+ lexers.extend = extend
+ lexers.alpha = alpha
+ lexers.digit = digit
+ lexers.alnum = alnum
+ lexers.lower = lower
+ lexers.upper = upper
+ lexers.xdigit = hexdigit
+ lexers.cntrl = control
+ lexers.graph = R("!~")
+ lexers.print = R(" ~")
+ lexers.punct = R("!/", ":@", "[\'", "{~")
+ lexers.space = space
+ lexers.newline = S("\r\n\f")^1
+ lexers.nonnewline = 1 - lexers.newline
+ lexers.nonnewline_esc = 1 - (lexers.newline + '\\') + backslash * anything
+ lexers.dec_num = decimal
+ lexers.oct_num = octal
+ lexers.hex_num = hexadecimal
+ lexers.integer = integer
+ lexers.float = float
+ lexers.word = (alpha + "_") * (alpha + digit + "_")^0 -- weird, why digits
+
+end
+
+-- end of patterns
+
+function context.exact_match(words,word_chars,case_insensitive)
+ local characters = concat(words)
+ local pattern -- the concat catches _ etc
+ if word_chars == true or word_chars == false or word_chars == nil then
+ word_chars = ""
+ end
+ if type(word_chars) == "string" then
+ pattern = S(characters) + patterns.idtoken
+ if case_insensitive then
+ pattern = pattern + S(upper(characters)) + S(lower(characters))
+ end
+ if word_chars ~= "" then
+ pattern = pattern + S(word_chars)
+ end
+ elseif word_chars then
+ pattern = word_chars
+ end
+ if case_insensitive then
+ local list = { }
+ if #words == 0 then
+ for k, v in next, words do
+ list[lower(k)] = v
+ end
+ else
+ for i=1,#words do
+ list[lower(words[i])] = true
+ end
+ end
+ return Cmt(pattern^1, function(_,i,s)
+ return list[lower(s)] -- and i or nil
+ end)
+ else
+ local list = { }
+ if #words == 0 then
+ for k, v in next, words do
+ list[k] = v
+ end
+ else
+ for i=1,#words do
+ list[words[i]] = true
+ end
+ end
+ return Cmt(pattern^1, function(_,i,s)
+ return list[s] -- and i or nil
+ end)
+ end
+end
+
+function context.just_match(words)
+ local p = P(words[1])
+ for i=2,#words do
+ p = p + P(words[i])
+ end
+ return p
+end
+
+-- spell checking (we can only load lua files)
+--
+-- return {
+-- min = 3,
+-- max = 40,
+-- n = 12345,
+-- words = {
+-- ["someword"] = "someword",
+-- ["anotherword"] = "Anotherword",
+-- },
+-- }
+
+local lists = { }
+
+function context.setwordlist(tag,limit) -- returns hash (lowercase keys and original values)
+ if not tag or tag == "" then
+ return false, 3
+ end
+ local list = lists[tag]
+ if not list then
+ list = context.loaddefinitions("spell-" .. tag)
+ if not list or type(list) ~= "table" then
+ report("invalid spell checking list for '%s'",tag)
+ list = { words = false, min = 3 }
+ else
+ list.words = list.words or false
+ list.min = list.min or 3
+ end
+ lists[tag] = list
+ end
+ if trace then
+ report("enabling spell checking for '%s' with minimum '%s'",tag,list.min)
+ end
+ return list.words, list.min
+end
+
+patterns.wordtoken = R("az","AZ","\127\255")
+patterns.wordpattern = patterns.wordtoken^3 -- todo: if limit and #s < limit then
+
+function context.checkedword(validwords,validminimum,s,i) -- ,limit
+ if not validwords then -- or #s < validminimum then
+ return true, "text", i -- true, "default", i
+ else
+ -- keys are lower
+ local word = validwords[s]
+ if word == s then
+ return true, "okay", i -- exact match
+ elseif word then
+ return true, "warning", i -- case issue
+ else
+ local word = validwords[lower(s)]
+ if word == s then
+ return true, "okay", i -- exact match
+ elseif word then
+ return true, "warning", i -- case issue
+ elseif upper(s) == s then
+ return true, "warning", i -- probably a logo or acronym
+ else
+ return true, "error", i
+ end
+ end
+ end
+end
+
+function context.styleofword(validwords,validminimum,s) -- ,limit
+ if not validwords or #s < validminimum then
+ return "text"
+ else
+ -- keys are lower
+ local word = validwords[s]
+ if word == s then
+ return "okay" -- exact match
+ elseif word then
+ return "warning" -- case issue
+ else
+ local word = validwords[lower(s)]
+ if word == s then
+ return "okay" -- exact match
+ elseif word then
+ return "warning" -- case issue
+ elseif upper(s) == s then
+ return "warning" -- probably a logo or acronym
+ else
+ return "error"
+ end
+ end
+ end
+end
+
+-- overloaded functions
+
+local h_table, b_table, n_table = { }, { }, { } -- from the time small tables were used (optimization)
+
+setmetatable(h_table, { __index = function(t,level) local v = { level, FOLD_HEADER } t[level] = v return v end })
+setmetatable(b_table, { __index = function(t,level) local v = { level, FOLD_BLANK } t[level] = v return v end })
+setmetatable(n_table, { __index = function(t,level) local v = { level } t[level] = v return v end })
+
+local newline = patterns.newline
+local p_yes = Cp() * Cs((1-newline)^1) * newline^-1
+local p_nop = newline
+
+local folders = { }
+
+local function fold_by_parsing(text,start_pos,start_line,start_level,lexer)
+ local folder = folders[lexer]
+ if not folder then
+ --
+ local pattern, folds, text, start_pos, line_num, prev_level, current_level
+ --
+ local fold_symbols = lexer._foldsymbols
+ local fold_pattern = lexer._foldpattern -- use lpeg instead (context extension)
+ --
+ if fold_pattern then
+ -- if no functions are found then we could have a faster one
+ fold_pattern = Cp() * C(fold_pattern) / function(s,match)
+ local symbols = fold_symbols[style_at[start_pos + s]]
+ if symbols then
+ local l = symbols[match]
+ if l then
+ current_level = current_level + l
+ end
+ end
+ end
+ local action_y = function()
+ folds[line_num] = prev_level
+ if current_level > prev_level then
+ folds[line_num] = prev_level + FOLD_HEADER
+ end
+ if current_level < FOLD_BASE then
+ current_level = FOLD_BASE
+ end
+ prev_level = current_level
+ line_num = line_num + 1
+ end
+ local action_n = function()
+ folds[line_num] = prev_level + FOLD_BLANK
+ line_num = line_num + 1
+ end
+ pattern = ((fold_pattern + (1-newline))^1 * newline / action_y + newline/action_n)^0
+
+ else
+ -- the traditional one but a bit optimized
+ local fold_symbols_patterns = fold_symbols._patterns
+ local action_y = function(pos,line)
+ for j = 1, #fold_symbols_patterns do
+ for s, match in gmatch(line,fold_symbols_patterns[j]) do -- "()(" .. patterns[i] .. ")"
+ local symbols = fold_symbols[style_at[start_pos + pos + s - 1]]
+ local l = symbols and symbols[match]
+ local t = type(l)
+ if t == "number" then
+ current_level = current_level + l
+ elseif t == "function" then
+ current_level = current_level + l(text, pos, line, s, match)
+ end
+ end
+ end
+ folds[line_num] = prev_level
+ if current_level > prev_level then
+ folds[line_num] = prev_level + FOLD_HEADER
+ end
+ if current_level < FOLD_BASE then
+ current_level = FOLD_BASE
+ end
+ prev_level = current_level
+ line_num = line_num + 1
+ end
+ local action_n = function()
+ folds[line_num] = prev_level + FOLD_BLANK
+ line_num = line_num + 1
+ end
+ pattern = (p_yes/action_y + p_nop/action_n)^0
+ end
+ --
+ local reset_parser = lexer._reset_parser
+ --
+ folder = function(_text_,_start_pos_,_start_line_,_start_level_)
+ if reset_parser then
+ reset_parser()
+ end
+ folds = { }
+ text = _text_
+ start_pos = _start_pos_
+ line_num = _start_line_
+ prev_level = _start_level_
+ current_level = prev_level
+ lpegmatch(pattern,text)
+ -- make folds collectable
+ local t = folds
+ folds = nil
+ return t
+ end
+ folders[lexer] = folder
+ end
+ return folder(text,start_pos,start_line,start_level,lexer)
+end
+
+local folds, current_line, prev_level
+
+local function action_y()
+ local current_level = FOLD_BASE + indent_amount[current_line]
+ if current_level > prev_level then -- next level
+ local i = current_line - 1
+ local f
+ while true do
+ f = folds[i]
+ if not f then
+ break
+ elseif f[2] == FOLD_BLANK then
+ i = i - 1
+ else
+ f[2] = FOLD_HEADER -- low indent
+ break
+ end
+ end
+ folds[current_line] = { current_level } -- high indent
+ elseif current_level < prev_level then -- prev level
+ local f = folds[current_line - 1]
+ if f then
+ f[1] = prev_level -- high indent
+ end
+ folds[current_line] = { current_level } -- low indent
+ else -- same level
+ folds[current_line] = { prev_level }
+ end
+ prev_level = current_level
+ current_line = current_line + 1
+end
+
+local function action_n()
+ folds[current_line] = { prev_level, FOLD_BLANK }
+ current_line = current_line + 1
+end
+
+local pattern = ( S("\t ")^0 * ( (1-patterns.eol)^1 / action_y + P(true) / action_n) * newline )^0
+
+local function fold_by_indentation(text,start_pos,start_line,start_level)
+ -- initialize
+ folds = { }
+ current_line = start_line
+ prev_level = start_level
+ -- define
+ -- -- not here .. pattern binds and local functions are not frozen
+ -- analyze
+ lpegmatch(pattern,text)
+ -- flatten
+ for line, level in next, folds do
+ folds[line] = level[1] + (level[2] or 0)
+ end
+ -- done, make folds collectable
+ local t = folds
+ folds = nil
+ return t
+end
+
+local function fold_by_line(text,start_pos,start_line,start_level)
+ local folds = { }
+ -- can also be lpeg'd
+ for _ in gmatch(text,".-\r?\n") do
+ folds[start_line] = n_table[start_level] -- { start_level } -- stile tables ? needs checking
+ start_line = start_line + 1
+ end
+ return folds
+end
+
+local threshold_by_lexer = 512 * 1024 -- we don't know the filesize yet
+local threshold_by_parsing = 512 * 1024 -- we don't know the filesize yet
+local threshold_by_indentation = 512 * 1024 -- we don't know the filesize yet
+local threshold_by_line = 512 * 1024 -- we don't know the filesize yet
+
+function context.fold(lexer,text,start_pos,start_line,start_level) -- hm, we had size thresholds .. where did they go
+ if text == "" then
+ return { }
+ end
+ if initialize then
+ initialize()
+ end
+ local fold_by_lexer = lexer._fold
+ local fold_by_symbols = lexer._foldsymbols
+ local filesize = 0 -- we don't know that
+ if fold_by_lexer then
+ if filesize <= threshold_by_lexer then
+ return fold_by_lexer(text,start_pos,start_line,start_level,lexer)
+ end
+ elseif fold_by_symbols then -- and lexer.properties("fold.by.parsing",1) > 0 then
+ if filesize <= threshold_by_parsing then
+ return fold_by_parsing(text,start_pos,start_line,start_level,lexer)
+ end
+ elseif lexer.properties("fold.by.indentation",1) > 0 then
+ if filesize <= threshold_by_indentation then
+ return fold_by_indentation(text,start_pos,start_line,start_level,lexer)
+ end
+ elseif lexer.properties("fold.by.line",1) > 0 then
+ if filesize <= threshold_by_line then
+ return fold_by_line(text,start_pos,start_line,start_level,lexer)
+ end
+ end
+ return { }
+end
+
+-- The following code is mostly unchanged:
+
+local function add_rule(lexer,id,rule) -- unchanged
+ if not lexer._RULES then
+ lexer._RULES = { }
+ lexer._RULEORDER = { }
+ end
+ lexer._RULES[id] = rule
+ lexer._RULEORDER[#lexer._RULEORDER + 1] = id
+end
+
+-- I finally figured out that adding more styles was an issue because of several
+-- reasons:
+--
+-- + in old versions there was a limit in the amount, so we overran the built-in
+-- hard coded scintilla range
+-- + then, the add_style function didn't check for already known ones, so again
+-- we had an overrun (with some magic that could be avoided)
+-- + then, when I messed with a new default set I realized that there is no check
+-- in initializing _TOKENSTYLES (here the inspect function helps)
+-- + of course it was mostly a side effect of passing all the used styles to the
+-- _tokenstyles instead of only the not-default ones but such a thing should not
+-- matter (read: intercepted)
+--
+-- This finally removed a head-ache and was revealed by lots of tracing, which I
+-- should have built in way earlier.
+
+local function add_style(lexer,token_name,style) -- changed a bit around 3.41
+ -- We don't add styles that are already defined as this can overflow the
+ -- amount possible (in old versions of scintilla).
+ if defaultstyles[token_name] then
+ if trace and detail then
+ report("default style '%s' is ignored as extra style",token_name)
+ end
+ return
+ elseif predefinedstyles[token_name] then
+ if trace and detail then
+ report("predefined style '%s' is ignored as extra style",token_name)
+ end
+ return
+ else
+ if trace and detail then
+ report("adding extra style '%s' as '%s'",token_name,style)
+ end
+ end
+ -- This is unchanged. We skip the dangerous zone.
+ local num_styles = lexer._numstyles
+ if num_styles == 32 then
+ num_styles = num_styles + 8
+ end
+ if num_styles >= 255 then
+ report("there can't be more than %s styles",255)
+ end
+ lexer._TOKENSTYLES[token_name] = num_styles
+ lexer._EXTRASTYLES[token_name] = style
+ lexer._numstyles = num_styles + 1
+end
+
+local function check_styles(lexer)
+ -- Here we also use a check for the dangerous zone. That way we can have a
+ -- larger default set. The original code just assumes that #default is less
+ -- than the dangerous zone's start.
+ local numstyles = 0
+ local tokenstyles = { }
+ for i=1, #default do
+ if numstyles == 32 then
+ numstyles = numstyles + 8
+ end
+ tokenstyles[default[i]] = numstyles
+ numstyles = numstyles + 1
+ end
+ -- Unchanged.
+ for i=1, #predefined do
+ tokenstyles[predefined[i]] = i + 31
+ end
+ lexer._TOKENSTYLES = tokenstyles
+ lexer._numstyles = numstyles
+ lexer._EXTRASTYLES = { }
+ return lexer
+end
+
+-- At some point an 'any' append showed up in the original code ...
+-- but I see no need to catch that case ... beter fix the specification.
+--
+-- hm, why are many joined twice
+
+local function join_tokens(lexer) -- slightly different from the original (no 'any' append)
+ local patterns = lexer._RULES
+ local order = lexer._RULEORDER
+ -- report("lexer: %s, tokens: %s",lexer._NAME,table.concat(order," + "))
+ if patterns and order then
+ local token_rule = patterns[order[1]] -- normally whitespace
+ for i=2,#order do
+ token_rule = token_rule + patterns[order[i]]
+ end
+ if lexer._TYPE ~= "context" then
+ token_rule = token_rule + lexers.token(lexers.DEFAULT, patterns.any)
+ end
+ lexer._TOKENRULE = token_rule
+ return token_rule
+ else
+ return P(1)
+ end
+end
+
+local function add_lexer(grammar, lexer) -- mostly the same as the original
+ local token_rule = join_tokens(lexer)
+ local lexer_name = lexer._NAME
+ local children = lexer._CHILDREN
+ for i=1,#children do
+ local child = children[i]
+ if child._CHILDREN then
+ add_lexer(grammar, child)
+ end
+ local child_name = child._NAME
+ local rules = child._EMBEDDEDRULES[lexer_name]
+ local rules_token_rule = grammar["__" .. child_name] or rules.token_rule
+ local pattern = (-rules.end_rule * rules_token_rule)^0 * rules.end_rule^-1
+ grammar[child_name] = pattern * V(lexer_name)
+ local embedded_child = "_" .. child_name
+ grammar[embedded_child] = rules.start_rule * pattern
+ token_rule = V(embedded_child) + token_rule
+ end
+ if trace then
+ report("adding lexer '%s' with %s children",lexer_name,#children)
+ end
+ grammar["__" .. lexer_name] = token_rule
+ grammar[lexer_name] = token_rule^0
+end
+
+local function build_grammar(lexer,initial_rule) -- same as the original
+ local children = lexer._CHILDREN
+ local lexer_name = lexer._NAME
+ if children then
+ if not initial_rule then
+ initial_rule = lexer_name
+ end
+ local grammar = { initial_rule }
+ add_lexer(grammar, lexer)
+ lexer._INITIALRULE = initial_rule
+ lexer._GRAMMAR = Ct(P(grammar))
+ if trace then
+ report("building grammar for '%s' with whitespace '%s'and %s children",lexer_name,lexer.whitespace or "?",#children)
+ end
+ else
+ lexer._GRAMMAR = Ct(join_tokens(lexer)^0)
+ if trace then
+ report("building grammar for '%s' with whitespace '%s'",lexer_name,lexer.whitespace or "?")
+ end
+ end
+end
+
+-- So far. We need these local functions in the next one.
+
+local lineparsers = { }
+
+local maxmatched = 100
+
+local function collapsed(t)
+ local lasttoken = nil
+ local lastindex = nil
+ for i=1,#t,2 do
+ local token = t[i]
+ local position = t[i+1]
+ if token == lasttoken then
+ t[lastindex] = position
+ elseif lastindex then
+ lastindex = lastindex + 1
+ t[lastindex] = token
+ lastindex = lastindex + 1
+ t[lastindex] = position
+ lasttoken = token
+ else
+ lastindex = i+1
+ lasttoken = token
+ end
+ end
+ for i=#t,lastindex+1,-1 do
+ t[i] = nil
+ end
+ return t
+end
+
+local function matched(lexer,grammar,text)
+ -- text = string.gsub(text,"\z","!")
+ local t = lpegmatch(grammar,text)
+ if trace then
+ if show then
+ report("output of lexer: %s (max %s entries)",lexer._NAME,maxmatched)
+ local s = lexer._TOKENSTYLES
+ local p = 1
+ for i=1,2*maxmatched,2 do
+ local n = i + 1
+ local ti = t[i]
+ local tn = t[n]
+ if ti then
+ local txt = sub(text,p,tn-1)
+ if txt then
+ txt = gsub(txt,"[%s]"," ")
+ else
+ txt = "!no text!"
+ end
+ report("%4i : %s > %s (%s) (%s)",n/2,ti,tn,s[ti] or "!unset!",txt)
+ p = tn
+ else
+ break
+ end
+ end
+ end
+ report("lexer results: %s, length: %s, ranges: %s",lexer._NAME,#text,#t/2)
+ if collapse then
+ t = collapsed(t)
+ report("lexer collapsed: %s, length: %s, ranges: %s",lexer._NAME,#text,#t/2)
+ end
+ elseif collapse then
+ t = collapsed(t)
+ end
+ return t
+end
+
+-- Todo: make nice generic lexer (extra argument with start/stop commands) for
+-- context itself.
+
+function context.lex(lexer,text,init_style)
+ -- local lexer = global._LEXER
+ local grammar = lexer._GRAMMAR
+ if initialize then
+ initialize()
+ end
+ if not grammar then
+ return { }
+ elseif lexer._LEXBYLINE then -- we could keep token
+ local tokens = { }
+ local offset = 0
+ local noftokens = 0
+ local lineparser = lineparsers[lexer]
+ if not lineparser then -- probably a cmt is more efficient
+ lineparser = C((1-newline)^0 * newline) / function(line)
+ local length = #line
+ local line_tokens = length > 0 and lpegmatch(grammar,line)
+ if line_tokens then
+ for i=1,#line_tokens,2 do
+ noftokens = noftokens + 1
+ tokens[noftokens] = line_tokens[i]
+ noftokens = noftokens + 1
+ tokens[noftokens] = line_tokens[i + 1] + offset
+ end
+ end
+ offset = offset + length
+ if noftokens > 0 and tokens[noftokens] ~= offset then
+ noftokens = noftokens + 1
+ tokens[noftokens] = "default"
+ noftokens = noftokens + 1
+ tokens[noftokens] = offset + 1
+ end
+ end
+ lineparser = lineparser^0
+ lineparsers[lexer] = lineparser
+ end
+ lpegmatch(lineparser,text)
+ return tokens
+ elseif lexer._CHILDREN then
+ local hash = lexer._HASH -- hm, was _hash
+ if not hash then
+ hash = { }
+ lexer._HASH = hash
+ end
+ grammar = hash[init_style]
+ if grammar then
+ lexer._GRAMMAR = grammar
+ -- lexer._GRAMMAR = lexer._GRAMMAR or grammar
+ else
+ for style, style_num in next, lexer._TOKENSTYLES do
+ if style_num == init_style then
+ -- the name of the lexers is filtered from the whitespace
+ -- specification .. weird code, should be a reverse hash
+ local lexer_name = match(style,"^(.+)_whitespace") or lexer._NAME
+ if lexer._INITIALRULE ~= lexer_name then
+ grammar = hash[lexer_name]
+ if not grammar then
+ build_grammar(lexer,lexer_name)
+ grammar = lexer._GRAMMAR
+ hash[lexer_name] = grammar
+ end
+ end
+ break
+ end
+ end
+ grammar = grammar or lexer._GRAMMAR
+ hash[init_style] = grammar
+ end
+ if trace then
+ report("lexing '%s' with initial style '%s' and %s children",lexer._NAME,#lexer._CHILDREN or 0,init_style)
+ end
+ return matched(lexer,grammar,text)
+ else
+ if trace then
+ report("lexing '%s' with initial style '%s'",lexer._NAME,init_style)
+ end
+ return matched(lexer,grammar,text)
+ end
+end
+
+-- hm, changed in 3.24 .. no longer small table but one table:
+
+function context.token(name, patt)
+ return patt * Cc(name) * Cp()
+end
+
+-- The next ones were mostly unchanged (till now), we moved it here when 3.41
+-- became close to impossible to combine with cq. overload and a merge was
+-- the only solution. It makes later updates more painful but the update to
+-- 3.41 was already a bit of a nightmare anyway.
+
+-- Loading lexers is rather interwoven with what the dll/so sets and
+-- it changes over time. So, we need to keep an eye on changes. One
+-- problem that we always faced were the limitations in length of
+-- lexer names (as they get app/prepended occasionally to strings with
+-- a hard coded limit). So, we always used alternative names and now need
+-- to make sure this doesn't clash. As I no longer intend to use shipped
+-- lexers I could strip away some of the code in the future, but keeping
+-- it as reference makes sense.
+
+-- I spend quite some time figuring out why 3.41 didn't work or crashed which
+-- is hard when no stdout is available and when the io library is absent. In
+-- the end of of the problems was in the _NAME setting. We set _NAME
+-- to e.g. 'tex' but load from a file with a longer name, which we do
+-- as we don't want to clash with existing files, we end up in
+-- lexers not being found.
+
+local whitespaces = { }
+
+local function push_whitespace(name)
+ table.insert(whitespaces,lexers.WHITESPACE or "whitespace")
+ lexers.WHITESPACE = name .. "_whitespace"
+end
+
+local function pop_whitespace()
+ lexers.WHITESPACE = table.remove(whitespaces) or "whitespace"
+end
+
+local function check_whitespace(lexer,name)
+ if lexer then
+ lexer.whitespace = (name or lexer.name or lexer._NAME) .. "_whitespace"
+ end
+end
+
+function context.new(name,filename)
+ local lexer = {
+ _TYPE = "context",
+ --
+ _NAME = name, -- used for token building
+ _FILENAME = filename, -- for diagnostic purposed
+ --
+ name = name,
+ filename = filename,
+ }
+ if trace then
+ report("initializing lexer tagged '%s' from file '%s'",name,filename or name)
+ end
+ check_whitespace(lexer)
+ check_styles(lexer)
+ check_properties(lexer)
+ return lexer
+end
+
+local function nolexer(name)
+ local lexer = {
+ _TYPE = "unset",
+ _NAME = name,
+ -- _rules = { },
+ }
+ check_styles(lexer)
+ check_whitespace(lexer)
+ check_properties(lexer)
+ return lexer
+end
+
+local function load_lexer(name,namespace)
+ if trace then
+ report("loading lexer file '%s'",name)
+ end
+ push_whitespace(namespace or name) -- for traditional lexers .. no alt_name yet
+ local lexer, fullname = context.loadluafile(name)
+ pop_whitespace()
+ if not lexer then
+ report("invalid lexer file '%s'",name)
+ elseif trace then
+ report("lexer file '%s' has been loaded",fullname)
+ end
+ if type(lexer) ~= "table" then
+ if trace then
+ report("lexer file '%s' gets a dummy lexer",name)
+ end
+ return nolexer(name)
+ end
+ if lexer._TYPE ~= "context" then
+ lexer._TYPE = "native"
+ check_styles(lexer)
+ check_whitespace(lexer,namespace or name)
+ check_properties(lexer)
+ end
+ if not lexer._NAME then
+ lexer._NAME = name -- so: filename
+ end
+ if name ~= namespace then
+ lexer._NAME = namespace
+ end
+ return lexer
+end
+
+-- tracing ...
+
+local function inspect_lexer(lexer,level)
+ -- If we had the regular libs available I could use the usual
+ -- helpers.
+ local parent = lexer._lexer
+ lexer._lexer = nil -- prevent endless recursion
+ local name = lexer._NAME
+ local function showstyles_1(tag,styles)
+ local numbers = { }
+ for k, v in next, styles do
+ numbers[v] = k
+ end
+ -- sort by number and make number hash too
+ local keys = sortedkeys(numbers)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = numbers[k]
+ report("[%s %s] %s %s = %s",level,name,tag,k,v)
+ end
+ end
+ local function showstyles_2(tag,styles)
+ local keys = sortedkeys(styles)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = styles[k]
+ report("[%s %s] %s %s = %s",level,name,tag,k,v)
+ end
+ end
+ local keys = sortedkeys(lexer)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = lexer[k]
+ report("[%s %s] root key : %s = %s",level,name,k,tostring(v))
+ end
+ showstyles_1("token style",lexer._TOKENSTYLES)
+ showstyles_2("extra style",lexer._EXTRASTYLES)
+ local children = lexer._CHILDREN
+ if children then
+ for i=1,#children do
+ inspect_lexer(children[i],level+1)
+ end
+ end
+ lexer._lexer = parent
+end
+
+function context.inspect(lexer)
+ inspect_lexer(lexer,0)
+end
+
+-- An optional second argument has been introduced so that one can embed a lexer
+-- more than once ... maybe something to look into (as not it's done by remembering
+-- the start sequence ... quite okay but maybe suboptimal ... anyway, never change
+-- a working solution).
+
+-- namespace can be automatic: if parent then use name of parent (chain)
+
+function context.loadlexer(filename,namespace)
+ nesting = nesting + 1
+ if not namespace then
+ namespace = filename
+ end
+ local lexer = usedlexers[namespace] -- we load by filename but the internal name can be short
+ if lexer then
+ if trace then
+ report("reusing lexer '%s'",namespace)
+ end
+ nesting = nesting - 1
+ return lexer
+ elseif trace then
+ report("loading lexer '%s'",namespace)
+ end
+ --
+ if initialize then
+ initialize()
+ end
+ --
+ parent_lexer = nil
+ --
+ lexer = load_lexer(filename,namespace) or nolexer(filename,namespace)
+ usedlexers[filename] = lexer
+ --
+ if not lexer._rules and not lexer._lexer then
+ lexer._lexer = parent_lexer
+ end
+ --
+ if lexer._lexer then
+ local _l = lexer._lexer
+ local _r = lexer._rules
+ local _s = lexer._tokenstyles
+ if not _l._tokenstyles then
+ _l._tokenstyles = { }
+ end
+ if _r then
+ local rules = _l._rules
+ local name = lexer.name
+ for i=1,#_r do
+ local rule = _r[i]
+ rules[#rules + 1] = {
+ name .. "_" .. rule[1],
+ rule[2],
+ }
+ end
+ end
+ if _s then
+ local tokenstyles = _l._tokenstyles
+ for token, style in next, _s do
+ tokenstyles[token] = style
+ end
+ end
+ lexer = _l
+ end
+ --
+ local _r = lexer._rules
+ if _r then
+ local _s = lexer._tokenstyles
+ if _s then
+ for token, style in next, _s do
+ add_style(lexer, token, style)
+ end
+ end
+ for i=1,#_r do
+ local rule = _r[i]
+ add_rule(lexer, rule[1], rule[2])
+ end
+ build_grammar(lexer)
+ end
+ --
+ add_style(lexer, lexer.whitespace, lexers.STYLE_WHITESPACE)
+ --
+ local foldsymbols = lexer._foldsymbols
+ if foldsymbols then
+ local patterns = foldsymbols._patterns
+ if patterns then
+ for i = 1, #patterns do
+ patterns[i] = "()(" .. patterns[i] .. ")"
+ end
+ end
+ end
+ --
+ lexer.lex = lexers.lex
+ lexer.fold = lexers.fold
+ --
+ nesting = nesting - 1
+ --
+ if inspect then
+ context.inspect(lexer)
+ end
+ --
+ return lexer
+end
+
+function context.embed_lexer(parent, child, start_rule, end_rule) -- mostly the same as the original
+ local embeddedrules = child._EMBEDDEDRULES
+ if not embeddedrules then
+ embeddedrules = { }
+ child._EMBEDDEDRULES = embeddedrules
+ end
+ if not child._RULES then
+ local rules = child._rules
+ if not rules then
+ report("child lexer '%s' has no rules",child._NAME or "unknown")
+ rules = { }
+ child._rules = rules
+ end
+ for i=1,#rules do
+ local rule = rules[i]
+ add_rule(child, rule[1], rule[2])
+ end
+ end
+ embeddedrules[parent._NAME] = {
+ ["start_rule"] = start_rule,
+ ["token_rule"] = join_tokens(child),
+ ["end_rule"] = end_rule
+ }
+ local children = parent._CHILDREN
+ if not children then
+ children = { }
+ parent._CHILDREN = children
+ end
+ children[#children + 1] = child
+ local tokenstyles = parent._tokenstyles
+ if not tokenstyles then
+ tokenstyles = { }
+ parent._tokenstyles = tokenstyles
+ end
+ local childname = child._NAME
+ local whitespace = childname .. "_whitespace"
+ tokenstyles[whitespace] = lexers.STYLE_WHITESPACE -- all these STYLE_THINGS will go .. just a proper hash
+ if trace then
+ report("using whitespace '%s' as trigger for '%s' with property '%s'",whitespace,childname,lexers.STYLE_WHITESPACE)
+ end
+ local childstyles = child._tokenstyles
+ if childstyles then
+ for token, style in next, childstyles do
+ tokenstyles[token] = style
+ end
+ end
+ child._lexer = parent
+ parent_lexer = parent
+end
+
+-- we now move the adapted code to the lexers namespace
+
+lexers.new = context.new
+lexers.load = context.loadlexer
+------.loadlexer = context.loadlexer
+lexers.loadluafile = context.loadluafile
+lexers.embed_lexer = context.embed_lexer
+lexers.fold = context.fold
+lexers.lex = context.lex
+lexers.token = context.token
+lexers.word_match = context.word_match
+lexers.exact_match = context.exact_match
+lexers.just_match = context.just_match
+lexers.inspect = context.inspect
+lexers.report = context.report
+lexers.inform = context.inform
+
+-- helper .. alas ... the lexer's lua instance is rather crippled .. not even
+-- math is part of it
+
+do
+
+ local floor = math and math.floor
+ local char = string.char
+
+ if not floor then
+
+ floor = function(n)
+ return tonumber(format("%d",n))
+ end
+
+ math = math or { }
+
+ math.floor = floor
+
+ end
+
+ local function utfchar(n)
+ if n < 0x80 then
+ return char(n)
+ elseif n < 0x800 then
+ return char(
+ 0xC0 + floor(n/0x40),
+ 0x80 + (n % 0x40)
+ )
+ elseif n < 0x10000 then
+ return char(
+ 0xE0 + floor(n/0x1000),
+ 0x80 + (floor(n/0x40) % 0x40),
+ 0x80 + (n % 0x40)
+ )
+ elseif n < 0x40000 then
+ return char(
+ 0xF0 + floor(n/0x40000),
+ 0x80 + floor(n/0x1000),
+ 0x80 + (floor(n/0x40) % 0x40),
+ 0x80 + (n % 0x40)
+ )
+ else
+ -- return char(
+ -- 0xF1 + floor(n/0x1000000),
+ -- 0x80 + floor(n/0x40000),
+ -- 0x80 + floor(n/0x1000),
+ -- 0x80 + (floor(n/0x40) % 0x40),
+ -- 0x80 + (n % 0x40)
+ -- )
+ return "?"
+ end
+ end
+
+ context.utfchar = utfchar
+
+ -- -- the next one is good enough for use here but not perfect (see context for a
+ -- -- better one)
+ --
+ -- local function make(t)
+ -- local p
+ -- for k, v in next, t do
+ -- if not p then
+ -- if next(v) then
+ -- p = P(k) * make(v)
+ -- else
+ -- p = P(k)
+ -- end
+ -- else
+ -- if next(v) then
+ -- p = p + P(k) * make(v)
+ -- else
+ -- p = p + P(k)
+ -- end
+ -- end
+ -- end
+ -- return p
+ -- end
+ --
+ -- function lpeg.utfchartabletopattern(list)
+ -- local tree = { }
+ -- for i=1,#list do
+ -- local t = tree
+ -- for c in gmatch(list[i],".") do
+ -- if not t[c] then
+ -- t[c] = { }
+ -- end
+ -- t = t[c]
+ -- end
+ -- end
+ -- return make(tree)
+ -- end
+
+ helpers.utfcharpattern = P(1) * R("\128\191")^0 -- unchecked but fast
+
+ local p_false = P(false)
+ local p_true = P(true)
+
+ local function make(t)
+ local function making(t)
+ local p = p_false
+ local keys = sortedkeys(t)
+ for i=1,#keys do
+ local k = keys[i]
+ if k ~= "" then
+ local v = t[k]
+ if v == true then
+ p = p + P(k) * p_true
+ elseif v == false then
+ -- can't happen
+ else
+ p = p + P(k) * making(v)
+ end
+ end
+ end
+ if t[""] then
+ p = p + p_true
+ end
+ return p
+ end
+ local p = p_false
+ local keys = sortedkeys(t)
+ for i=1,#keys do
+ local k = keys[i]
+ if k ~= "" then
+ local v = t[k]
+ if v == true then
+ p = p + P(k) * p_true
+ elseif v == false then
+ -- can't happen
+ else
+ p = p + P(k) * making(v)
+ end
+ end
+ end
+ return p
+ end
+
+ local function collapse(t,x)
+ if type(t) ~= "table" then
+ return t, x
+ else
+ local n = next(t)
+ if n == nil then
+ return t, x
+ elseif next(t,n) == nil then
+ -- one entry
+ local k = n
+ local v = t[k]
+ if type(v) == "table" then
+ return collapse(v,x..k)
+ else
+ return v, x .. k
+ end
+ else
+ local tt = { }
+ for k, v in next, t do
+ local vv, kk = collapse(v,k)
+ tt[kk] = vv
+ end
+ return tt, x
+ end
+ end
+ end
+
+ function helpers.utfchartabletopattern(list)
+ local tree = { }
+ local n = #list
+ if n == 0 then
+ for s in next, list do
+ local t = tree
+ local p, pk
+ for c in gmatch(s,".") do
+ if t == true then
+ t = { [c] = true, [""] = true }
+ p[pk] = t
+ p = t
+ t = false
+ elseif t == false then
+ t = { [c] = false }
+ p[pk] = t
+ p = t
+ t = false
+ else
+ local tc = t[c]
+ if not tc then
+ tc = false
+ t[c] = false
+ end
+ p = t
+ t = tc
+ end
+ pk = c
+ end
+ if t == false then
+ p[pk] = true
+ elseif t == true then
+ -- okay
+ else
+ t[""] = true
+ end
+ end
+ else
+ for i=1,n do
+ local s = list[i]
+ local t = tree
+ local p, pk
+ for c in gmatch(s,".") do
+ if t == true then
+ t = { [c] = true, [""] = true }
+ p[pk] = t
+ p = t
+ t = false
+ elseif t == false then
+ t = { [c] = false }
+ p[pk] = t
+ p = t
+ t = false
+ else
+ local tc = t[c]
+ if not tc then
+ tc = false
+ t[c] = false
+ end
+ p = t
+ t = tc
+ end
+ pk = c
+ end
+ if t == false then
+ p[pk] = true
+ elseif t == true then
+ -- okay
+ else
+ t[""] = true
+ end
+ end
+ end
+ collapse(tree,"")
+ -- inspect(tree)
+ return make(tree)
+ end
+
+ patterns.invisibles = helpers.utfchartabletopattern {
+ utfchar(0x00A0), -- nbsp
+ utfchar(0x2000), -- enquad
+ utfchar(0x2001), -- emquad
+ utfchar(0x2002), -- enspace
+ utfchar(0x2003), -- emspace
+ utfchar(0x2004), -- threeperemspace
+ utfchar(0x2005), -- fourperemspace
+ utfchar(0x2006), -- sixperemspace
+ utfchar(0x2007), -- figurespace
+ utfchar(0x2008), -- punctuationspace
+ utfchar(0x2009), -- breakablethinspace
+ utfchar(0x200A), -- hairspace
+ utfchar(0x200B), -- zerowidthspace
+ utfchar(0x202F), -- narrownobreakspace
+ utfchar(0x205F), -- math thinspace
+ }
+
+ -- now we can make:
+
+ patterns.iwordtoken = patterns.wordtoken - patterns.invisibles
+ patterns.iwordpattern = patterns.iwordtoken^3
+
+end
+
+-- The following helpers are not used, partially replaced by other mechanisms and
+-- when needed I'll first optimize them. I only made them somewhat more readable.
+
+function lexers.delimited_range(chars, single_line, no_escape, balanced) -- unchanged
+ local s = sub(chars,1,1)
+ local e = #chars == 2 and sub(chars,2,2) or s
+ local range
+ local b = balanced and s or ""
+ local n = single_line and "\n" or ""
+ if no_escape then
+ local invalid = S(e .. n .. b)
+ range = patterns.any - invalid
+ else
+ local invalid = S(e .. n .. b) + patterns.backslash
+ range = patterns.any - invalid + patterns.backslash * patterns.any
+ end
+ if balanced and s ~= e then
+ return P {
+ s * (range + V(1))^0 * e
+ }
+ else
+ return s * range^0 * P(e)^-1
+ end
+end
+
+function lexers.starts_line(patt) -- unchanged
+ return P ( function(input, index)
+ if index == 1 then
+ return index
+ end
+ local char = sub(input,index - 1,index - 1)
+ if char == "\n" or char == "\r" or char == "\f" then
+ return index
+ end
+ end ) * patt
+end
+
+function lexers.last_char_includes(s) -- unchanged
+ s = "[" .. gsub(s,"[-%%%[]", "%%%1") .. "]"
+ return P ( function(input, index)
+ if index == 1 then
+ return index
+ end
+ local i = index
+ while match(sub(input,i - 1,i - 1),"[ \t\r\n\f]") do
+ i = i - 1
+ end
+ if match(sub(input,i - 1,i - 1),s) then
+ return index
+ end
+ end)
+end
+
+function lexers.nested_pair(start_chars, end_chars) -- unchanged
+ local s = start_chars
+ local e = P(end_chars)^-1
+ return P {
+ s * (patterns.any - s - end_chars + V(1))^0 * e
+ }
+end
+
+local function prev_line_is_comment(prefix, text, pos, line, s) -- unchanged
+ local start = find(line,"%S")
+ if start < s and not find(line,prefix,start,true) then
+ return false
+ end
+ local p = pos - 1
+ if sub(text,p,p) == "\n" then
+ p = p - 1
+ if sub(text,p,p) == "\r" then
+ p = p - 1
+ end
+ if sub(text,p,p) ~= "\n" then
+ while p > 1 and sub(text,p - 1,p - 1) ~= "\n"
+ do p = p - 1
+ end
+ while find(sub(text,p,p),"^[\t ]$") do
+ p = p + 1
+ end
+ return sub(text,p,p + #prefix - 1) == prefix
+ end
+ end
+ return false
+end
+
+local function next_line_is_comment(prefix, text, pos, line, s)
+ local p = find(text,"\n",pos + s)
+ if p then
+ p = p + 1
+ while find(sub(text,p,p),"^[\t ]$") do
+ p = p + 1
+ end
+ return sub(text,p,p + #prefix - 1) == prefix
+ end
+ return false
+end
+
+function lexers.fold_line_comments(prefix)
+ local property_int = lexers.property_int
+ return function(text, pos, line, s)
+ if property_int["fold.line.comments"] == 0 then
+ return 0
+ end
+ if s > 1 and match(line,"^%s*()") < s then
+ return 0
+ end
+ local prev_line_comment = prev_line_is_comment(prefix, text, pos, line, s)
+ local next_line_comment = next_line_is_comment(prefix, text, pos, line, s)
+ if not prev_line_comment and next_line_comment then
+ return 1
+ end
+ if prev_line_comment and not next_line_comment then
+ return -1
+ end
+ return 0
+ end
+end
+
+-- done
+
+return lexers
diff --git a/context/data/scite/context/lexers/themes/scite-context-theme.lua b/context/data/scite/context/lexers/themes/scite-context-theme.lua
new file mode 100644
index 000000000..b0c63fe39
--- /dev/null
+++ b/context/data/scite/context/lexers/themes/scite-context-theme.lua
@@ -0,0 +1,150 @@
+local info = {
+ version = 1.002,
+ comment = "theme for scintilla lpeg lexer for context/metafun",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- context_path = string.split(os.resultof("mtxrun --find-file context.mkiv"))[1] or ""
+
+-- What used to be proper Lua definitions are in 3.42 SciTE properties although
+-- integration is still somewhat half. Also, the indexed style specification is
+-- now a hash (which indeed makes more sense). However, the question is: am I
+-- going to rewrite the style bit? It anyway makes more sense to keep this file
+-- somewhat neutral as we no longer need to be compatible. However, we cannot be
+-- sure of helpers being present yet when this file is loaded, so we are somewhat
+-- crippled. On the other hand, I don't see other schemes being used with the
+-- context lexers.
+
+-- The next kludge is no longer needed which is good!
+--
+-- if GTK then -- WIN32 GTK OSX CURSES
+-- font_name = '!' .. font_name
+-- end
+
+-- I need to play with these, some work ok:
+--
+-- eolfilled noteolfilled
+-- characterset:u|l
+-- visible notvisible
+-- changeable notchangeable (this way we can protect styles, e.g. preamble?)
+-- hotspot nothotspot
+
+local font_name = 'Dejavu Sans Mono'
+local font_size = '14'
+
+local colors = {
+ red = { '7F', '00', '00' },
+ green = { '00', '7F', '00' },
+ blue = { '00', '00', '7F' },
+ cyan = { '00', '7F', '7F' },
+ magenta = { '7F', '00', '7F' },
+ yellow = { '7F', '7F', '00' },
+ orange = { 'B0', '7F', '00' },
+ --
+ white = { 'FF', 'FF', 'FF' },
+ light = { 'CF', 'CF', 'CF' },
+ grey = { '80', '80', '80' },
+ dark = { '4F', '4F', '4F' },
+ black = { '00', '00', '00' },
+ --
+ selection = { 'F7', 'F7', 'F7' },
+ logpanel = { 'E7', 'E7', 'E7' },
+ textpanel = { 'CF', 'CF', 'CF' },
+ linepanel = { 'A7', 'A7', 'A7' },
+ tippanel = { '44', '44', '44' },
+ --
+ right = { '00', '00', 'FF' },
+ wrong = { 'FF', '00', '00' },
+}
+
+local styles = {
+
+ ["whitespace"] = { },
+ ["default"] = { font = font_name, size = font_size, fore = colors.black, back = colors.textpanel },
+ ["default"] = { font = font_name, size = font_size, fore = colors.black },
+ ["number"] = { fore = colors.cyan },
+ ["comment"] = { fore = colors.yellow },
+ ["keyword"] = { fore = colors.blue, bold = true },
+ ["string"] = { fore = colors.magenta },
+ -- ["preproc"] = { fore = colors.yellow, bold = true },
+ ["error"] = { fore = colors.red },
+ ["label"] = { fore = colors.red, bold = true },
+
+ ["nothing"] = { },
+ ["class"] = { fore = colors.black, bold = true },
+ ["function"] = { fore = colors.black, bold = true },
+ ["constant"] = { fore = colors.cyan, bold = true },
+ ["operator"] = { fore = colors.blue },
+ ["regex"] = { fore = colors.magenta },
+ ["preprocessor"] = { fore = colors.yellow, bold = true },
+ ["tag"] = { fore = colors.cyan },
+ ["type"] = { fore = colors.blue },
+ ["variable"] = { fore = colors.black },
+ ["identifier"] = { },
+
+ ["linenumber"] = { back = colors.linepanel },
+ ["bracelight"] = { fore = colors.orange, bold = true },
+ ["bracebad"] = { fore = colors.orange, bold = true },
+ ["controlchar"] = { },
+ ["indentguide"] = { fore = colors.linepanel, back = colors.white },
+ ["calltip"] = { fore = colors.white, back = colors.tippanel },
+
+ ["invisible"] = { back = colors.orange },
+ ["quote"] = { fore = colors.blue, bold = true },
+ ["special"] = { fore = colors.blue },
+ ["extra"] = { fore = colors.yellow },
+ ["embedded"] = { fore = colors.black, bold = true },
+ ["char"] = { fore = colors.magenta },
+ ["reserved"] = { fore = colors.magenta, bold = true },
+ ["definition"] = { fore = colors.black, bold = true },
+ ["okay"] = { fore = colors.dark },
+ ["warning"] = { fore = colors.orange },
+ ["standout"] = { fore = colors.orange, bold = true },
+ ["command"] = { fore = colors.green, bold = true },
+ ["internal"] = { fore = colors.orange, bold = true },
+ ["preamble"] = { fore = colors.yellow },
+ ["grouping"] = { fore = colors.red },
+ ["primitive"] = { fore = colors.blue, bold = true },
+ ["plain"] = { fore = colors.dark, bold = true },
+ ["user"] = { fore = colors.green },
+ ["data"] = { fore = colors.cyan, bold = true },
+
+ -- equal to default:
+
+ ["text"] = { font = font_name, size = font_size, fore = colors.black, back = colors.textpanel },
+ ["text"] = { font = font_name, size = font_size, fore = colors.black },
+
+}
+
+local properties = {
+ ["fold.by.parsing"] = 1,
+ ["fold.by.indentation"] = 0,
+ ["fold.by.line"] = 0,
+ ["fold.line.comments"] = 0,
+ --
+ ["lexer.context.log"] = 1, -- log errors and warnings
+ ["lexer.context.trace"] = 0, -- show loading, initializations etc
+ ["lexer.context.detail"] = 0, -- show more detail when tracing
+ ["lexer.context.show"] = 0, -- show result of lexing
+ ["lexer.context.collapse"] = 0, -- make lexing results somewhat more efficient
+ ["lexer.context.inspect"] = 0, -- show some info about lexer (styles and so)
+ --
+-- ["lexer.context.log"] = 1, -- log errors and warnings
+-- ["lexer.context.trace"] = 1, -- show loading, initializations etc
+}
+
+local lexer = lexer or require("lexer")
+local context = lexer.context
+
+if context then
+ context.inform("loading context (style) properties")
+ if context.registerstyles then
+ context.registerstyles(styles)
+ end
+ if context.registerproperties then
+ context.registerproperties(properties)
+ end
+end
+
diff --git a/context/data/scite/context/scite-context-data-context.properties b/context/data/scite/context/scite-context-data-context.properties
new file mode 100644
index 000000000..717b08c80
--- /dev/null
+++ b/context/data/scite/context/scite-context-data-context.properties
@@ -0,0 +1,206 @@
+keywordclass.context.constants=\
+zerocount minusone minustwo plusone \
+plustwo plusthree plusfour plusfive plussix \
+plusseven pluseight plusnine plusten plussixteen \
+plushundred plusthousand plustenthousand plustwentythousand medcard \
+maxcard maxcardminusone zeropoint onepoint halfapoint \
+onebasepoint maxdimen scaledpoint thousandpoint points \
+halfpoint zeroskip zeromuskip onemuskip pluscxxvii \
+pluscxxviii pluscclv pluscclvi normalpagebox endoflinetoken \
+outputnewlinechar emptytoks empty undefined voidbox \
+emptybox emptyvbox emptyhbox bigskipamount medskipamount \
+smallskipamount fmtname fmtversion texengine texenginename \
+texengineversion luatexengine pdftexengine xetexengine unknownengine \
+activecatcode bgroup egroup endline conditionaltrue \
+conditionalfalse attributeunsetvalue uprotationangle rightrotationangle downrotationangle \
+leftrotationangle inicatcodes ctxcatcodes texcatcodes notcatcodes \
+txtcatcodes vrbcatcodes prtcatcodes nilcatcodes luacatcodes \
+tpacatcodes tpbcatcodes xmlcatcodes ctdcatcodes escapecatcode \
+begingroupcatcode endgroupcatcode mathshiftcatcode alignmentcatcode endoflinecatcode \
+parametercatcode superscriptcatcode subscriptcatcode ignorecatcode spacecatcode \
+lettercatcode othercatcode activecatcode commentcatcode invalidcatcode \
+tabasciicode newlineasciicode formfeedasciicode endoflineasciicode endoffileasciicode \
+spaceasciicode hashasciicode dollarasciicode commentasciicode ampersandasciicode \
+colonasciicode backslashasciicode circumflexasciicode underscoreasciicode leftbraceasciicode \
+barasciicode rightbraceasciicode tildeasciicode delasciicode lessthanasciicode \
+morethanasciicode doublecommentsignal atsignasciicode exclamationmarkasciicode questionmarkasciicode \
+doublequoteasciicode singlequoteasciicode forwardslashasciicode primeasciicode hyphenasciicode \
+activemathcharcode activetabtoken activeformfeedtoken activeendoflinetoken batchmodecode \
+nonstopmodecode scrollmodecode errorstopmodecode bottomlevelgroupcode simplegroupcode \
+hboxgroupcode adjustedhboxgroupcode vboxgroupcode vtopgroupcode aligngroupcode \
+noaligngroupcode outputgroupcode mathgroupcode discretionarygroupcode insertgroupcode \
+vcentergroupcode mathchoicegroupcode semisimplegroupcode mathshiftgroupcode mathleftgroupcode \
+vadjustgroupcode charnodecode hlistnodecode vlistnodecode rulenodecode \
+insertnodecode marknodecode adjustnodecode ligaturenodecode discretionarynodecode \
+whatsitnodecode mathnodecode gluenodecode kernnodecode penaltynodecode \
+unsetnodecode mathsnodecode charifcode catifcode numifcode \
+dimifcode oddifcode vmodeifcode hmodeifcode mmodeifcode \
+innerifcode voidifcode hboxifcode vboxifcode xifcode \
+eofifcode trueifcode falseifcode caseifcode definedifcode \
+csnameifcode fontcharifcode fontslantperpoint fontinterwordspace fontinterwordstretch \
+fontinterwordshrink fontexheight fontemwidth fontextraspace slantperpoint \
+interwordspace interwordstretch interwordshrink exheight emwidth \
+extraspace mathsupdisplay mathsupnormal mathsupcramped mathsubnormal \
+mathsubcombined mathaxisheight muquad startmode stopmode \
+startnotmode stopnotmode startmodeset stopmodeset doifmode \
+doifelsemode doifmodeelse doifnotmode startmodeset stopmodeset \
+startallmodes stopallmodes startnotallmodes stopnotallmodes doifallmodes \
+doifelseallmodes doifallmodeselse doifnotallmodes startenvironment stopenvironment \
+environment startcomponent stopcomponent component startproduct \
+stopproduct product startproject stopproject project \
+starttext stoptext startnotext stopnotext startdocument \
+stopdocument documentvariable setupdocument startmodule stopmodule \
+usemodule usetexmodule useluamodule setupmodule currentmoduleparameter \
+moduleparameter everystarttext everystoptext startTEXpage stopTEXpage \
+enablemode disablemode preventmode definemode globalenablemode \
+globaldisablemode globalpreventmode pushmode popmode typescriptone \
+typescripttwo typescriptthree mathsizesuffix mathordcode mathopcode \
+mathbincode mathrelcode mathopencode mathclosecode mathpunctcode \
+mathalphacode mathinnercode mathnothingcode mathlimopcode mathnolopcode \
+mathboxcode mathchoicecode mathaccentcode mathradicalcode constantnumber \
+constantnumberargument constantdimen constantdimenargument constantemptyargument continueifinputfile \
+luastringsep !!bs !!es lefttorightmark righttoleftmark \
+breakablethinspace nobreakspace nonbreakablespace narrownobreakspace zerowidthnobreakspace \
+ideographicspace ideographichalffillspace twoperemspace threeperemspace fourperemspace \
+fiveperemspace sixperemspace figurespace punctuationspace hairspace \
+zerowidthspace zerowidthnonjoiner zerowidthjoiner zwnj zwj \
+optionalspace asciispacechar
+
+keywordclass.context.helpers=\
+startsetups stopsetups startxmlsetups stopxmlsetups \
+startluasetups stopluasetups starttexsetups stoptexsetups startrawsetups \
+stoprawsetups startlocalsetups stoplocalsetups starttexdefinition stoptexdefinition \
+starttexcode stoptexcode startcontextcode stopcontextcode startcontextdefinitioncode \
+stopcontextdefinitioncode texdefinition doifelsesetups doifsetupselse doifsetups \
+doifnotsetups setup setups texsetup xmlsetup \
+luasetup directsetup fastsetup doifelsecommandhandler doifcommandhandlerelse \
+doifnotcommandhandler doifcommandhandler newmode setmode resetmode \
+newsystemmode setsystemmode resetsystemmode pushsystemmode popsystemmode \
+booleanmodevalue newcount newdimen newskip newmuskip \
+newbox newtoks newread newwrite newmarks \
+newinsert newattribute newif newlanguage newfamily \
+newfam newhelp then begcsname strippedcsname \
+checkedstrippedcsname firstargumentfalse firstargumenttrue secondargumentfalse secondargumenttrue \
+thirdargumentfalse thirdargumenttrue fourthargumentfalse fourthargumenttrue fifthargumentfalse \
+fifthsargumenttrue sixthargumentfalse sixtsargumenttrue doglobal dodoglobal \
+redoglobal resetglobal donothing dontcomplain forgetall \
+donetrue donefalse inlineordisplaymath indisplaymath forcedisplaymath \
+startforceddisplaymath stopforceddisplaymath reqno htdp unvoidbox \
+hfilll vfilll mathbox mathlimop mathnolop \
+mathnothing mathalpha currentcatcodetable defaultcatcodetable catcodetablename \
+newcatcodetable startcatcodetable stopcatcodetable startextendcatcodetable stopextendcatcodetable \
+pushcatcodetable popcatcodetable restorecatcodes setcatcodetable letcatcodecommand \
+defcatcodecommand uedcatcodecommand hglue vglue hfillneg \
+vfillneg hfilllneg vfilllneg ruledhss ruledhfil \
+ruledhfill ruledhfilneg ruledhfillneg normalhfillneg ruledvss \
+ruledvfil ruledvfill ruledvfilneg ruledvfillneg normalvfillneg \
+ruledhbox ruledvbox ruledvtop ruledvcenter ruledmbox \
+ruledhskip ruledvskip ruledkern ruledmskip ruledmkern \
+ruledhglue ruledvglue normalhglue normalvglue ruledpenalty \
+filledhboxb filledhboxr filledhboxg filledhboxc filledhboxm \
+filledhboxy filledhboxk scratchcounter globalscratchcounter scratchdimen \
+globalscratchdimen scratchskip globalscratchskip scratchmuskip globalscratchmuskip \
+scratchtoks globalscratchtoks scratchbox globalscratchbox normalbaselineskip \
+normallineskip normallineskiplimit availablehsize localhsize setlocalhsize \
+distributedhsize hsizefraction nextbox dowithnextbox dowithnextboxcs \
+dowithnextboxcontent dowithnextboxcontentcs scratchwidth scratchheight scratchdepth \
+scratchoffset scratchdistance scratchhsize scratchvsize scratchxoffset \
+scratchyoffset scratchhoffset scratchvoffset scratchxposition scratchyposition \
+scratchtopoffset scratchbottomoffset scratchleftoffset scratchrightoffset scratchcounterone \
+scratchcountertwo scratchcounterthree scratchdimenone scratchdimentwo scratchdimenthree \
+scratchskipone scratchskiptwo scratchskipthree scratchmuskipone scratchmuskiptwo \
+scratchmuskipthree scratchtoksone scratchtokstwo scratchtoksthree scratchboxone \
+scratchboxtwo scratchboxthree scratchnx scratchny scratchmx \
+scratchmy scratchunicode scratchleftskip scratchrightskip scratchtopskip \
+scratchbottomskip doif doifnot doifelse doifinset \
+doifnotinset doifelseinset doifinsetelse doifelsenextchar doifnextcharelse \
+doifelsenextoptional doifnextoptionalelse doifelsenextoptionalcs doifnextoptionalcselse doifelsefastoptionalcheck \
+doiffastoptionalcheckelse doifelsenextbgroup doifnextbgroupelse doifelsenextbgroupcs doifnextbgroupcselse \
+doifelsenextparenthesis doifnextparenthesiselse doifelseundefined doifundefinedelse doifelsedefined \
+doifdefinedelse doifundefined doifdefined doifelsevalue doifvalue \
+doifnotvalue doifnothing doifsomething doifelsenothing doifnothingelse \
+doifelsesomething doifsomethingelse doifvaluenothing doifvaluesomething doifelsevaluenothing \
+doifvaluenothingelse doifelsedimension doifdimensionelse doifelsenumber doifnumberelse \
+doifnumber doifnotnumber doifelsecommon doifcommonelse doifcommon \
+doifnotcommon doifinstring doifnotinstring doifelseinstring doifinstringelse \
+doifelseassignment doifassignmentelse docheckassignment tracingall tracingnone \
+loggingall removetoks appendtoks prependtoks appendtotoks \
+prependtotoks to endgraf endpar everyendpar \
+reseteverypar finishpar empty null space \
+quad enspace nbsp obeyspaces obeylines \
+obeyedspace obeyedline obeyedtab obeyedpage normalspace \
+executeifdefined singleexpandafter doubleexpandafter tripleexpandafter dontleavehmode \
+removelastspace removeunwantedspaces keepunwantedspaces wait writestatus \
+define defineexpandable redefine setmeasure setemeasure \
+setgmeasure setxmeasure definemeasure freezemeasure measure \
+measured installcorenamespace getvalue getuvalue setvalue \
+setevalue setgvalue setxvalue letvalue letgvalue \
+resetvalue undefinevalue ignorevalue setuvalue setuevalue \
+setugvalue setuxvalue globallet glet udef \
+ugdef uedef uxdef checked unique \
+getparameters geteparameters getgparameters getxparameters forgetparameters \
+copyparameters getdummyparameters dummyparameter directdummyparameter setdummyparameter \
+letdummyparameter usedummystyleandcolor usedummystyleparameter usedummycolorparameter processcommalist \
+processcommacommand quitcommalist quitprevcommalist processaction processallactions \
+processfirstactioninset processallactionsinset unexpanded expanded startexpanded \
+stopexpanded protected protect unprotect firstofoneargument \
+firstoftwoarguments secondoftwoarguments firstofthreearguments secondofthreearguments thirdofthreearguments \
+firstoffourarguments secondoffourarguments thirdoffourarguments fourthoffourarguments firstoffivearguments \
+secondoffivearguments thirdoffivearguments fourthoffivearguments fifthoffivearguments firstofsixarguments \
+secondofsixarguments thirdofsixarguments fourthofsixarguments fifthofsixarguments sixthofsixarguments \
+firstofoneunexpanded firstoftwounexpanded secondoftwounexpanded firstofthreeunexpanded secondofthreeunexpanded \
+thirdofthreeunexpanded gobbleoneargument gobbletwoarguments gobblethreearguments gobblefourarguments \
+gobblefivearguments gobblesixarguments gobblesevenarguments gobbleeightarguments gobbleninearguments \
+gobbletenarguments gobbleoneoptional gobbletwooptionals gobblethreeoptionals gobblefouroptionals \
+gobblefiveoptionals dorecurse doloop exitloop dostepwiserecurse \
+recurselevel recursedepth dofastloopcs dowith newconstant \
+setnewconstant setconstant setconstantvalue newconditional settrue \
+setfalse settruevalue setfalsevalue newmacro setnewmacro \
+newfraction newsignal dosingleempty dodoubleempty dotripleempty \
+doquadrupleempty doquintupleempty dosixtupleempty doseventupleempty dosingleargument \
+dodoubleargument dotripleargument doquadrupleargument doquintupleargument dosixtupleargument \
+doseventupleargument dosinglegroupempty dodoublegroupempty dotriplegroupempty doquadruplegroupempty \
+doquintuplegroupempty permitspacesbetweengroups dontpermitspacesbetweengroups nopdfcompression maximumpdfcompression \
+normalpdfcompression modulonumber dividenumber getfirstcharacter doifelsefirstchar \
+doiffirstcharelse startnointerference stopnointerference twodigits threedigits \
+leftorright offinterlineskip oninterlineskip nointerlineskip strut \
+halfstrut quarterstrut depthstrut setstrut strutbox \
+strutht strutdp strutwd struthtdp begstrut \
+endstrut lineheight ordordspacing ordopspacing ordbinspacing \
+ordrelspacing ordopenspacing ordclosespacing ordpunctspacing ordinnerspacing \
+opordspacing opopspacing opbinspacing oprelspacing opopenspacing \
+opclosespacing oppunctspacing opinnerspacing binordspacing binopspacing \
+binbinspacing binrelspacing binopenspacing binclosespacing binpunctspacing \
+bininnerspacing relordspacing relopspacing relbinspacing relrelspacing \
+relopenspacing relclosespacing relpunctspacing relinnerspacing openordspacing \
+openopspacing openbinspacing openrelspacing openopenspacing openclosespacing \
+openpunctspacing openinnerspacing closeordspacing closeopspacing closebinspacing \
+closerelspacing closeopenspacing closeclosespacing closepunctspacing closeinnerspacing \
+punctordspacing punctopspacing punctbinspacing punctrelspacing punctopenspacing \
+punctclosespacing punctpunctspacing punctinnerspacing innerordspacing inneropspacing \
+innerbinspacing innerrelspacing inneropenspacing innerclosespacing innerpunctspacing \
+innerinnerspacing normalreqno startimath stopimath normalstartimath \
+normalstopimath startdmath stopdmath normalstartdmath normalstopdmath \
+uncramped cramped triggermathstyle mathstylefont mathsmallstylefont \
+mathstyleface mathsmallstyleface mathstylecommand mathpalette mathstylehbox \
+mathstylevbox mathstylevcenter mathstylevcenteredhbox mathstylevcenteredvbox mathtext \
+setmathsmalltextbox setmathtextbox pushmathstyle popmathstyle triggerdisplaystyle \
+triggertextstyle triggerscriptstyle triggerscriptscriptstyle triggeruncrampedstyle triggercrampedstyle \
+triggersmallstyle triggeruncrampedsmallstyle triggercrampedsmallstyle triggerbigstyle triggeruncrampedbigstyle \
+triggercrampedbigstyle luaexpr expelsedoif expdoif expdoifnot \
+expdoifelsecommon expdoifcommonelse expdoifelseinset expdoifinsetelse ctxdirectlua \
+ctxlatelua ctxsprint ctxwrite ctxcommand ctxdirectcommand \
+ctxlatecommand ctxreport ctxlua luacode lateluacode \
+directluacode registerctxluafile ctxloadluafile luaversion luamajorversion \
+luaminorversion ctxluacode luaconditional luaexpanded startluaparameterset \
+stopluaparameterset luaparameterset definenamedlua obeylualines obeyluatokens \
+startluacode stopluacode startlua stoplua startctxfunction \
+stopctxfunction ctxfunction startctxfunctiondefinition stopctxfunctiondefinition installctxfunction \
+carryoverpar assumelongusagecs Umathbotaccent righttolefthbox lefttorighthbox \
+righttoleftvbox lefttorightvbox righttoleftvtop lefttorightvtop rtlhbox \
+ltrhbox rtlvbox ltrvbox rtlvtop ltrvtop \
+autodirhbox autodirvbox autodirvtop lefttoright righttoleft \
+synchronizelayoutdirection synchronizedisplaydirection synchronizeinlinedirection lesshyphens morehyphens \
+nohyphens dohyphens Ucheckedstartdisplaymath Ucheckedstopdisplaymath nobreak \
+allowbreak goodbreak
+
diff --git a/context/data/scite/context/scite-context-data-interfaces.properties b/context/data/scite/context/scite-context-data-interfaces.properties
new file mode 100644
index 000000000..e75b5b92b
--- /dev/null
+++ b/context/data/scite/context/scite-context-data-interfaces.properties
@@ -0,0 +1,1812 @@
+keywordclass.context.cs=\
+CAP Cap Caps Character \
+Characters Cisla KAP Kap Kaps \
+MESIC MONTH Rimskecislice Romannumerals SLOVA \
+SLOVO Slova Slovo VSEDNIDEN WEEKDAY \
+WORD WORDS Word Words Znak \
+Znaky about adaptlayout aktualnicislonadpisu aktualnidatum \
+appendix arg at atleftmargin atpage \
+atrightmargin background barevnalista barva bilemisto \
+blackrule blackrules blank bookmark bottomspace \
+bublinkovanapoveda but button bydliste bypassblocks \
+cap celkovypocetstran cernalinka cernelinky chapter \
+character characters chem cisla cislonadpisu \
+cislopodrovnice cislorovnice cislostrany citace citovat \
+clip clonefield color column comment \
+comparecolorgroup comparepalet completecombinedlist completelistoffloats completelistofsorts \
+completelistofsynonyms completepagenumber completeregister convertnumber copyfield \
+correctwhitespace coupledocument coupledregister couplemarking couplepage \
+couplepaper coupleregister crlf currentdate currentheadnumber \
+cutspace date datum decouplemarking decrementnumber \
+define defineblank defineblock definebodyfont definebodyfontDEF \
+definebodyfontREF definebodyfontenvironment definebuffer definecolor definecolorgroup \
+definecolumnbreak definecolumnset definecombination definecombinedlist defineconversion \
+definedescription definedfont defineenumeration definefield definefieldstack \
+definefiguresymbol definefloat definefont definefontfeature definefonthandling \
+definefontsynonym defineframed defineframedtext definehead defineindentedtext \
+defineinmargin defineinteractionmenu defineitemgroup definelabel definelayer \
+definelayout definelist definelogo definemakeup definemarking \
+definemathalignment defineoutput defineoverlay definepagebreak definepalet \
+definepapersize defineparagraphs defineplacement defineprofile defineprogram \
+definerawfont definereference definereferenceformat definereferencelist defineregister \
+definerule definesection definesectionblock definesorting definestartstop \
+definesubfield definesymbol definesynonyms definetabletemplate definetabulate \
+definetext definetextposition definetextvariable definetype definetypeface \
+definetyping defineversion definuj definujakcent definujbarvu \
+definujblok definujbloksekce definujbuffer definujfont definujformatodkazu \
+definujhbox definujhlavnipole definujinterakcnimenu definujkombinovanyseznam definujkonverzi \
+definujlogo definujnadpis definujobrazeksymbol definujodkaz definujodstavce \
+definujopis definujoramovani definujoramovanytext definujpaletu definujplvouciobjekt \
+definujpodpole definujpole definujpopis definujpopisek definujprekryv \
+definujpreskok definujprikaz definujprofil definujprogram definujprostredizakladnihofontu \
+definujrejstrik definujsablonutabulky definujsekci definujseznam definujseznamodkazu \
+definujskupinubarev definujstartstop definujstyl definujstylfontu definujsymbol \
+definujsynonumumfontu definujsynonyma definujtabelaci definujtext definujtrideni \
+definujupravu definujvelikostpapiru definujverzi definujvycet definujvystup \
+definujzakladnifont definujzasobnikpoli definujznaceni definujznak delkaseznamu \
+description determineheadnumber determinelistcharacteristics disableinteractionmenu dodrzujprofil \
+dodrzujverzi dodrzujverziprofilu dvoustrannypapir emptylines enumeration \
+externalfigure externiobraz fakt field fieldstack \
+fillinfield fillinline fillinrules fillintext fitfield \
+fixedspaces followprofile followprofileversion followversion footnote \
+footnotetext forceblocks fraction framed framedtext \
+from getbuffer getmarking getnumber godown \
+goto gotobox graycolor grid hairline \
+head headnumber headsym headtext hideblocks \
+high hl hlavnijazyk hlavniuroven hodnotabarvy \
+hodnotasedi immediatebetweenlist immediatetolist in indentation \
+indenting inframed ininner inleft inline \
+inmargin inneredgedistance inneredgewidth innermargindistance innermarginwidth \
+inothermargin inouter inright instalacejazyka installlanguage \
+interactionbar interactionbuttons interakcnilista interakcnitlacitka interaktivnimenu \
+item items its jazyk jdidolu \
+jdina jdinabox jdinastranu jmeno kap \
+keepblocks klonujpole komponenta konvertujcislo kopirujpole \
+korekcebilehomista labeling labels labeltext language \
+leftaligned leg listsymbol loadsorts loadsynonyms \
+logfields lohi low mainlanguage maoramovani \
+mapfontsize mar marginalnilinka marginalninadpis marginalnislovo \
+marginalnitext marginrule margintext marking markversion \
+matematika mathematics mazaramovani mediaeval meritko \
+mesic mezera midaligned mirror month \
+moveformula moveongrid movesidefloat mrizka nadpis \
+nadruhyokraj nalevo nalevyokraj name naokraj \
+napravo napravyokraj nastavbarvu nastavbarvy nastavbilamista \
+nastavblok nastavbloksekce nastavbuffer nastavcernelinky nastavcislonadpisu \
+nastavcislostrany nastavcislovani nastavcislovaniodstavcu nastavcislovaniradku nastavcislovanistran \
+nastavcitaci nastavdefinicipoznamekpodcarou nastavdeleniplvoucichobjektu nastavdelitko nastavdolnitexty \
+nastavexterniobrazy nastavhorejsek nastavhornitexty nastavinterakci nastavinterakcnilistu \
+nastavinterakcnimenu nastavinterakcniobrazovku nastavjazyk nastavkapitalky nastavkombinovanyseznam \
+nastavkomentar nastavkomentarstrany nastavlegendu nastavmarginalie nastavmarginalniblok \
+nastavmarginalnilinky nastavmeziradkovoumezeru nastavnadpis nastavnadpisy nastavodkazovani \
+nastavodsazovani nastavodstavce nastavopis nastavoramovanetexty nastavoramovani \
+nastavorez nastavotoceni nastavpaletu nastavplvouciobjekt nastavplvouciobjekty \
+nastavpodcislostrany nastavpodtrzeni nastavpole nastavpolozky nastavpopisek \
+nastavpopisky nastavpopisy nastavpozadi nastavpoznamkypodcarou nastavprechodstrany \
+nastavpreskok nastavprofily nastavprogramy nastavprostredizakladnihofontu nastavpublikace \
+nastavradkovani nastavradky nastavrastr nastavrejstrik nastavrovnice \
+nastavsadusymbolu nastavsekci nastavseznam nastavseznamodkazu nastavsirkucary \
+nastavsloupce nastavspodek nastavspojeni nastavsynchronizaci nastavsynchronizacnilistu \
+nastavsynonyma nastavsystem nastavtab nastavtabelaci nastavtabulky \
+nastavtenkelinky nastavtext nastavtexthlavicky nastavtextovelinky nastavtextpopisku \
+nastavtexttexty nastavtextyupati nastavtextyzahlavi nastavtlacitka nastavtoleranci \
+nastavtrideni nastavtype nastavumisteniprotejsku nastavumistovani nastavupati \
+nastavupravu nastavurl nastavusporadani nastavvelikostpapiru nastavverze \
+nastavvsechnapole nastavvycty nastavvyplnovelinky nastavvyplnoveradky nastavvystup \
+nastavvzhled nastavzahlavi nastavzakladnifont nastavzarovnani nastavznaceni \
+nastavzuzeni nastrane navigating nejakyradek nekde \
+nextsection neznamo nivy nizky nocap \
+noheaderandfooterlines noindenting nokap nolist nomarking \
+nomoreblocks nomorefiles nop nospace note \
+notopandbottomlines nowhitespace numberofsubpages numbers obrazovka \
+odkaz odkaznadatum odkaznastranu odkaznatext odkazujici \
+odsazenishora odsazenizleva odsazovani okr opakovat \
+opis opissoubor oramovani oref orez \
+otocit outeredgedistance outeredgewidth outermargindistance outermarginwidth \
+overbar overbars overstrike overstrikes oznaceni \
+oznacverzi packed page pagedepth pageoffset \
+pagereference paragraph parovastrana part periods \
+pis placebookmarks placecombinedlist placefloat placefootnotes \
+placeformula placeheadnumber placeheadtext placelegend placelist \
+placelistoffloats placelistofsorts placelistofsynonyms placelocalfootnotes placelogos \
+placeongrid placeontopofeachother placepagenumber placerawlist placereferencelist \
+placeregister placerule placesidebyside placesubformula placetextvariable \
+plnezneni pol pole polozka polozky \
+popisky poppisek porovnejpaletu porovnejskupinubarev position \
+positiontext pozadi pozice poznamka poznamkapodcarou \
+pref prelozit premistinamrizku prepninazakladnifont preskoc \
+prizpusobivepole prizpusobvzhled processblocks processpage produkt \
+program projekt propojeneznaceni propojenydokument propojenyrejstrik \
+prostredi publication publikace quotation quote \
+ran ref reference referral referraldate \
+register reservefloat reset resetmarking resetnumber \
+resettextcontent resetznaceni rightaligned rimskecislice romannumerals \
+rotate rozdelplvouciobjekt rozmer rozpojeneznaceni roztazene \
+scale schovejbloky screen section sedabarva \
+seeregister selectblocks selectpaper selectversion setnumber \
+settextcontent settextvariable setupalign setupanswerarea setuparranging \
+setupbackground setupbackgrounds setupblackrules setupblank setupblock \
+setupbodyfont setupbodyfontenvironment setupbottom setupbottomtexts setupbuffer \
+setupbuttons setupcapitals setupcaption setupcaptions setupclipping \
+setupcolor setupcolors setupcolumns setupcolumnset setupcolumnsetlines \
+setupcolumnsetstart setupcombinations setupcombinedlist setupcomment setupdescriptions \
+setupenumerations setupexternalfigures setupfield setupfields setupfillinlines \
+setupfillinrules setupfloat setupfloats setupfloatsplitting setupfonthandling \
+setupfontsynonym setupfooter setupfootertexts setupfootnotedefinition setupfootnotes \
+setupforms setupformulas setupframed setupframedtexts setuphead \
+setupheader setupheadertexts setupheadnumber setupheads setupheadtext \
+setuphyphenmark setupindentedtext setupindenting setupinmargin setupinteraction \
+setupinteractionbar setupinteractionmenu setupinteractionscreen setupinterlinespace setupinterlinespace2 \
+setupitemgroup setupitems setuplabeltext setuplanguage setuplayout \
+setuplegend setuplinenumbering setuplines setuplinewidth setuplist \
+setuplistalternative setupmakeup setupmarginblocks setupmarginrules setupmarking \
+setupmathalignment setupnarrower setupnumber setupnumbering setupoppositeplacing \
+setupoutput setuppagenumber setuppagenumbering setuppagetransitions setuppalet \
+setuppaper setuppapersize setupparagraphnumbering setupparagraphs setupplacement \
+setuppositioning setupprofiles setupprograms setuppublications setupquote \
+setupreferencelist setupreferencing setupregister setuprotate setuprule \
+setupscreens setupsection setupsectionblock setupsorting setupspacing \
+setupstartstop setupstrut setupsubpagenumber setupsymbolset setupsynchronization \
+setupsynchronizationbar setupsynonyms setupsystem setuptab setuptables \
+setuptabulate setuptext setuptextposition setuptextrules setuptexttexts \
+setuptextvariable setupthinrules setuptolerance setuptop setuptoptexts \
+setuptype setuptyping setupunderbar setupurl setupversions \
+setupwhitespace showbodyfont showbodyfontenvironment showcolor showcolorgroup \
+showexternalfigures showfields showframe showgrid showlayout \
+showmakeup showpalet showprint showsetups showstruts \
+showsymbolset sirkalevehookraje sirkalevemarginalie sirkamarginalie sirkaokraje \
+sirkapapiru sirkapravehookraje sirkapravemarginalie sirkasazby sirkaseznamu \
+sirkatextu sirkatiskpapiru sloupec slovovpravo someline \
+somewhere sort space splitfloat spodek \
+stanovcharakteristickuseznamu stanovcislonadpisu startalignment startbackground startbarva \
+startbuffer startcislovaniradku startcitace startcolor startcolumnmakeup \
+startcolumns startcolumnset startcombination startcomment startcomponent \
+startdescription startdocument startdokument startenumeration startenvironment \
+startfact startfigure startfloattext startformula startframedtext \
+startglobalni starthiding startinteractionmenu startitemgroup startkodovani \
+startkomponenta startkorekceradku startlegend startline startlinecorrection \
+startlinenumbering startlines startlocal startlocalenvironment startlocalfootnotes \
+startlokalni startlokalnipoznamkypodcarou startmakeup startmarginalniblok startmarginalnilinka \
+startmarginblock startmarginrule startnamemakeup startnarrower startnezhustene \
+startobraz startopposite startoverlay startoverview startpacked \
+startparagraph startpositioning startpostponing startpozadi startprehled \
+startprekryv startproduct startprodukt startprofil startprofile \
+startproject startprojekt startprostredi startprotejsek startquotation \
+startradek startradky startrastr startregister startsadasymbolu \
+startsloupce startspojeni startsymbolset startsynchronizace startsynchronization \
+starttable starttables starttabulate starttabulka starttabulky \
+starttext starttextovalinka starttextrule starttyping startumistovani \
+startunpacked startuprava startversion startverze startzarovnavani \
+startzhustene startzuzeni stopalignment stopbackground stopbarva \
+stopbuffer stopcislovaniradku stopcitace stopcolor stopcolumnmakeup \
+stopcolumns stopcolumnset stopcombination stopcomment stopcomponent \
+stopdescription stopdocument stopdokument stopenumeration stopenvironment \
+stopfact stopfigure stopfloattext stopformula stopframedtext \
+stopglobalni stophiding stopinteractionmenu stopitemgroup stopkodovani \
+stopkomponenta stopkorekceradku stoplegend stopline stoplinecorrection \
+stoplinenumbering stoplines stoplocal stoplocalenvironment stoplocalfootnotes \
+stoplokalni stoplokalnipoznamkypodcarou stopmakeup stopmarginalniblok stopmarginalnilinka \
+stopmarginblock stopmarginrule stopnamemakeup stopnarrower stopnezhustene \
+stopopposite stopoverlay stopoverview stoppacked stopparagraph \
+stoppositioning stoppostponing stoppozadi stopprehled stopprekryv \
+stopproduct stopprodukt stopprofil stopprofile stopproject \
+stopprojekt stopprostredi stopprotejsek stopquotation stopradek \
+stopradky stoprastr stopsloupce stopspojeni stopsymbolset \
+stopsynchronizace stopsynchronization stoptable stoptables stoptabulate \
+stoptabulka stoptabulky stoptext stoptextovalinka stoptextrule \
+stoptyping stopumistovani stopunpacked stopuprava stopversion \
+stopverze stopzarovnavani stopzhustene stopzuzeni strana \
+stretched sub subject subpagenumber subsection \
+subsubject subsubsection subsubsubject switchtobodyfont switchtorawfont \
+sym symbol synchronizacnilista synchronizationbar synchronize \
+synchronizovat synonym tab tecky tenkalinka \
+tenkelinky testcolumn testpage tex texthlavicky \
+textovalinka textpopisku textreference textrule textvariable \
+thinrule thinrules title tlacitko tlacitkomenu \
+tloustkacary tooltip translate tref tvrdamezera \
+tvrdemezery txt typ type typebuffer \
+typefile ukazbarvu ukazexterniobrazy ukazmrizku ukaznastaveni \
+ukazpaletu ukazpodpery ukazpole ukazpostredizakladnihofontu ukazramecek \
+ukazsadusymbolu ukazskupinubarev ukazupravu ukazvytisk ukazvzhled \
+ukazzakladnifont umistikombinovanyseznam umistilegendu umistiloga umistilokalnipoznamkypodcarou \
+umistinadsebe umistinamrizku umistipodrovnici umistipoznamkypodcarou umistirejstrik \
+umistirovnici umistiseznam umistivedlesebe umistizalozky underbar \
+underbars urcicharakteristikurejstriku useURL useXMLfilter useblocks \
+usecommands usedirectory useencoding useexternaldocument useexternalfigure \
+useexternalfile useexternalfiles useexternalsoundtrack usemodule usereferences \
+usespecials usesymbols usetypescript usetypescriptfile uzijJSscripts \
+uzijURL uzijadresar uzijbloky uzijexternidokument uzijexterniobraz \
+uzijexternisoubor uzijexternisoubory uzijexternizvuk uzijkodovani uzijmodul \
+uzijmoduly uzijodkazy uzijprikazy uzijspeciality uzijsymbol \
+uzijurl version verze vl vlasovalinka \
+vlevo vpravo vradku vsedniden vyberbloky \
+vyberpapir vyberverzi vyplnenytext vyplnovelinky vyplnovepole \
+vyplnovyradek vyskahorejsku vyskapapiru vyskasazby vyskaseznamu \
+vyskaspodku vyskatextu vyskatiskpapiru vyskaupati vyskazahlavi \
+vysoky vyznam vzdalenosthorejsku vzdalenostlevehookraje vzdalenostlevemarginalie \
+vzdalenostmarginalie vzdalenostokraje vzdalenostpravehookraje vzdalenostpravemarginalie vzdalenostspodku \
+vzdalenostupati vzdalenostzahlavi weekday whitespace wordright \
+writebetweenlist writetolist writetoreferencelist writetoregister zablokujinterakcnimenu \
+zachovejbloky zadnamezera zadnebilemisto zadnedalsibloky zadnedalsisoubory \
+zadnehorniadolniradky zadneodsazovani zadnezahlaviaupati zadneznaceni zadnyrozmer \
+zadnyseznam zadnytest zalozka zapisdorejstriku zapisdoseznamu \
+zapisdoseznamuodkazu zapismeziseznam zaramovani zarovnanonastred zarovnanovlevo \
+zarovnanovpravo zasobnikpoli zaznamovepole zhustene ziskejbuffer \
+ziskejznaceni zlomek znaceni znak znaky \
+zpracujbloky zpracujstranu zrcadlit zref zvysujicicislo
+
+keywordclass.context.de=\
+Buchstabe Buchstaben CAP Cap \
+Caps Character Characters KAP Kap \
+Kaps MONAT MONTH Roemischezahlen Romannumerals \
+WEEKDAY WOCHENTAG WOERTER WORD WORDS \
+WORT Woerter Word Words Wort \
+Ziffern about abstandlinkerrand abstandoben abstandrechterrand \
+abstandunten adaptlayout amgitterausrichten amgitterneuausrichten appendix \
+arg at atleftmargin atpage atrightmargin \
+aufseite ausfuellfeld ausfuelltext ausschnitt background \
+bearbeitebloecke bearbeiteseite bedeutung behaltebloecke bei \
+bemerkung benutzekodierung benutzespezielles benutzeverzeichnis beschrifteversion \
+beschriftung bestimmekopfnummer bestimmelistencharakeristika bestimmeregistercharakteristika bildschirm \
+blackrule blackrules blank blanko bookmark \
+bottomspace breitelinkerrand breiterechterrand bruch buchstabe \
+buchstaben but button bypassblocks cap \
+chapter character characters chem clip \
+clonefield color column comment comparecolorgroup \
+comparepalet completecombinedlist completelistoffloats completelistofsorts completelistofsynonyms \
+completepagenumber completeregister convertnumber copyfield correctwhitespace \
+coupledocument coupledregister couplemarking couplepage couplepaper \
+coupleregister crlf currentdate currentheadnumber cutspace \
+date datum decouplemarking decrementnumber define \
+defineblank defineblock definebodyfont definebodyfontDEF definebodyfontREF \
+definebodyfontenvironment definebuffer definecolor definecolorgroup definecolumnbreak \
+definecolumnset definecombination definecombinedlist defineconversion definedescription \
+definedfont defineenumeration definefield definefieldstack definefiguresymbol \
+definefloat definefont definefontfeature definefonthandling definefontsynonym \
+defineframed defineframedtext definehead defineindentedtext defineinmargin \
+defineinteractionmenu defineitemgroup definelabel definelayer definelayout \
+definelist definelogo definemakeup definemarking definemathalignment \
+defineoutput defineoverlay definepagebreak definepalet definepapersize \
+defineparagraphs defineplacement defineprofile defineprogram definerawfont \
+definereference definereferenceformat definereferencelist defineregister definerule \
+defineschriftsynonym definesection definesectionblock definesorting definestartstop \
+definesubfield definesymbol definesynonyms definetabletemplate definetabulate \
+definetext definetextposition definetextvariable definetype definetypeface \
+definetyping defineversion definiereabbsymbol definiereabsaetze definiereabschnitt \
+definiereabschnittsblock definiereakzent definierebefehl definierebeschreibung definierebeschreibungen \
+definierebeschriftung definiereblanko definiereblock definierefarbe definierefarbengruppe \
+definierefeld definierefeldstapel definierefliesstext definierefliesstextumgebung definieregleitobjekt \
+definierehauptfeld definierehbox definiereinteraktionsmenue definierekonversion definierelabel \
+definiereliste definierelogo definieren definierenummerierung definiereoverlay \
+definierepalette definierepapierformat definiereprofil definiereprogramme definierepuffer \
+definierereferenz definierereferenzformat definierereferenzliste definiereregister definiereschrift \
+definiereschriftstil definieresortieren definierestartstop definierestil definieresubfeld \
+definieresymbol definieresynonyme definieretabellenvorlage definieretabulator definieretext \
+definieretippen definiereueberschrift definiereumbruch definiereumrahmt definiereumrahmtertext \
+definiereversion definierezeichen definierezusammengestellteliste description determineheadnumber \
+determinelistcharacteristics dimension disableinteractionmenu doppelseite doppelseitigespapier \
+drehen duennelinie duennerumriss einezeile einziehen \
+emptylines entknuepfebeschriftung enumeration externalfigure externeabbildung \
+farbbalken farbe farbewert feld feldstapel \
+festesspatium field fieldstack fillinfield fillinline \
+fillinrules fillintext fitfield fixedspaces folgeprofil \
+folgeprofilversion folgeversion followprofile followprofileversion followversion \
+footnote footnotetext forceblocks format formelnummer \
+fraction framed framedtext from fussnote \
+fusszeileabstand fusszeilenhoehe gefuelltesrechteck gefuelltezeile geg \
+gesamtseitenanzahl gestreckt getbuffer getmarking getnumber \
+gitter godown goto gotobox graufarbe \
+grauwert graycolor grid haarlinie hairline \
+hauptsprache head headnumber headsym headtext \
+heutigesdatum heutigeskopfnummer hideblocks high hintergrund \
+hl hoch hoeheoben hoeheunten holebeschriftung \
+holepuffer imlinken imlinkenrand immaumrise immediatebetweenlist \
+immediatetolist imrechten imrechtenrand imumriss in \
+inaktiviereinteraktionsmenue inanderermarginale indentation indenting inframed \
+ininner inleft inline inlinkermarginale inmargin \
+inmarginalie inneredgedistance inneredgewidth innermargindistance innermarginwidth \
+inothermargin inouter inrechtermarginale inright installieresprache \
+installlanguage interactionbar interactionbuttons interaktionsbalken interaktionsknopfe \
+interaktionsmenue inzeile irgendwo item items \
+its kap keepblocks keindimension keinebeschriftung \
+keinebloeckemehr keinedateienmehr keinekopfundfusszeilen keineliste keinspatium \
+keintest keinzeilenobenundunten keinzwischenraum kleinerdurchschuss klonierefeld \
+knopf komponente konvertierezahl kopf kopfniveau \
+kopfnummer kopfweite kopfzeilenabstand kopfzeilenhoehe kopierefeld \
+korrigierezwischenraum label labeling labels labeltext \
+language leftaligned leg liniendicke linkemarginalafstand \
+linkemarginalbreite linksbuendig listenbreite listenhoehe listenlaenge \
+listsymbol loadsorts loadsynonyms logfields lohi \
+low mainlanguage mapfontsize mar marginalafstand \
+marginalbreite marginallinie marginaltext marginaltitel marginalwort \
+marginrule margintext marking markversion mathematics \
+mathematik maumrise mediaeval menueknopf midaligned \
+mirror monat month moveformula moveongrid \
+movesidefloat nachunten name navigating nextsection \
+nichteinziehen nocap noheaderandfooterlines noindenting nokap \
+nolist nomarking nomoreblocks nomorefiles nop \
+nospace note notiz notopandbottomlines nowhitespace \
+numberofsubpages numbers nummererhoehen outeredgedistance outeredgewidth \
+outermargindistance outermarginwidth overbar overbars overstrike \
+overstrikes packed page pagedepth pageoffset \
+pagereference papierbreite papierhoehe paragraph part \
+passelayoutan passendfeld periods placebookmarks placecombinedlist \
+placefloat placefootnotes placeformula placeheadnumber placeheadtext \
+placelegend placelist placelistoffloats placelistofsorts placelistofsynonyms \
+placelocalfootnotes placelogos placeongrid placeontopofeachother placepagenumber \
+placerawlist placereferencelist placeregister placerule placesidebyside \
+placesubformula placetextvariable platzierebookmarks platziereformel platzierefussnoten \
+platzierelegende platziereliste platzierelogo platzierelokalefussnoten platzierenebeneinander \
+platziereregister platziereuntereinander platziereunterformel platzierezusammengestellteliste pos \
+position positiontext posten printpapierbreite printpapierhoehe \
+processblocks processpage produkt program programm \
+projekt publication publikation punkt quotation \
+quote ran randabstand randbreite rechteck \
+rechtecke rechtemarginalafstand rechtemarginalbreite rechtsbuendig ref \
+reference referenz referieren referral referraldate \
+register registrierefelder reservefloat reset resetmarking \
+resetnumber resettextcontent rightaligned roemischezahlen romannumerals \
+rotate ruecksetzten ruecksetztenbeschriftung rumpfweite satzbreite \
+satzhoehe scale schreibezumregister schreibezurliste schreibezurreferenzliste \
+schreibezwischenliste screen section seeregister seite \
+seitenreferenz seitenummer selectblocks selectpaper selectversion \
+setnumber settext settextcontent settextvariable setupalign \
+setupanswerarea setuparranging setupbackground setupbackgrounds setupblackrules \
+setupblank setupblock setupbodyfont setupbodyfontenvironment setupbottom \
+setupbottomtexts setupbuffer setupbuttons setupcapitals setupcaption \
+setupcaptions setupclipping setupcolor setupcolors setupcolumns \
+setupcolumnset setupcolumnsetlines setupcolumnsetstart setupcombinations setupcombinedlist \
+setupcomment setupdescriptions setupenumerations setupexternalfigures setupfield \
+setupfields setupfillinlines setupfillinrules setupfloat setupfloats \
+setupfloatsplitting setupfonthandling setupfontsynonym setupfooter setupfootertexts \
+setupfootnotedefinition setupfootnotes setupforms setupformulas setupframed \
+setupframedtexts setuphead setupheader setupheadertexts setupheadnumber \
+setupheads setupheadtext setuphyphenmark setupindentedtext setupindenting \
+setupinmargin setupinteraction setupinteractionbar setupinteractionmenu setupinteractionscreen \
+setupinterlinespace setupinterlinespace2 setupitemgroup setupitems setuplabeltext \
+setuplanguage setuplayout setuplegend setuplinenumbering setuplines \
+setuplinewidth setuplist setuplistalternative setupmakeup setupmarginblocks \
+setupmarginrules setupmarking setupmathalignment setupnarrower setupnumber \
+setupnumbering setupoppositeplacing setupoutput setuppagenumber setuppagenumbering \
+setuppagetransitions setuppalet setuppaper setuppapersize setupparagraphnumbering \
+setupparagraphs setupplacement setuppositioning setupprofiles setupprograms \
+setuppublications setupquote setupreferencelist setupreferencing setupregister \
+setuprotate setuprule setupscreens setupsection setupsectionblock \
+setupsorting setupspacing setupstartstop setupstrut setupsubpagenumber \
+setupsymbolset setupsynchronization setupsynchronizationbar setupsynonyms setupsystem \
+setuptab setuptables setuptabulate setuptext setuptextposition \
+setuptextrules setuptexttexts setuptextvariable setupthinrules setuptolerance \
+setuptop setuptoptexts setuptype setuptyping setupunderbar \
+setupurl setupversions setupwhitespace showbodyfont showbodyfontenvironment \
+showcolor showcolorgroup showexternalfigures showfields showframe \
+showgrid showlayout showmakeup showpalet showprint \
+showsetups showstruts showsymbolset someline somewhere \
+sort space spalte spatium spiegeln \
+splitfloat sprache startabbildung startalignment startausrichtung \
+startbackground startbuffer startcolor startcolumnmakeup startcolumns \
+startcolumnset startcombination startcomment startcomponent startdescription \
+startdocument startdokument startenger startenumeration startenvironment \
+startfact startfarbe startfigure startfloattext startformula \
+startframedtext startgegenueber startglobal startgrosserdurchschuss starthiding \
+starthintergrund startinteractionmenu startitemgroup startkleinerdurchschuss startkodierung \
+startkombination startkomponente startlegend startline startlinecorrection \
+startlinenumbering startlines startlocal startlocalenvironment startlocalfootnotes \
+startlokal startlokalefussnoten startmakeup startmarginalblock startmarginallinie \
+startmarginblock startmarginrule startnamemakeup startnarrower startopposite \
+startoverlay startoverview startpacked startparagraph startpositionieren \
+startpositioning startpostponing startproduct startprodukt startprofil \
+startprofile startproject startprojekt startquotation startraster \
+startregister startspalten startsymbolset startsynchronisation startsynchronization \
+starttabelle starttabellen starttable starttables starttabulate \
+starttext starttextlinie starttextrule starttyping startueberblick \
+startumbruch startumgebung startunpacked startversion startzeile \
+startzeilen startzeilenkorrektur startzeilennumerierung startzitat stelleabsaetzeein \
+stelleabsatznummerierungein stelleabschnittein stelleabschnittsblockein stelleanordnenein stelleaufzaehlungenein \
+stelleausgabeein stelleausrichtungein stelleausschnittein stellebeschreibungein stellebeschriftungein \
+stellebilderunterschriftein stellebildunterschriftein stellebindestrichein stelleblankoein stelleblockein \
+stelledrehenein stelleduennerumrissein stelleeinziehenein stelleengerein stelleexterneabbildungenein \
+stellefarbeein stellefarbenein stellefeldein stellefelderin stellefliesstextein \
+stellefliesstextumgebungein stelleformelnein stellefussnotendefinitionein stellefussnotenein stellefusszeileein \
+stellefusszeilentextein stellegefuelltesrechteckein stellegefuelltezeileein stellegegenueberplatzierenein stellegleitobjekteein \
+stellegleitobjektein stellehintergruendeein stellehintergrundein stelleinmarginalieein stelleinteraktionein \
+stelleinteraktionsbalkenein stelleinteraktionsbildschirmein stelleinteraktionsmenueein stelleknopfein stellekombinationein \
+stellekommentarein stellekopfzahlein stellekopfzeileein stellekopfzeilentextein stellelabeltextein \
+stellelayoutein stellelegendeein stellelinienbreiteein stellelisteein stellemarginalblockein \
+stellemarginallinieein stellenobenein stellenummerierungein stellepaletteein stellepapierformatein \
+stelleplatziegeteiltegleitobjekt stellepositionierenein stellepostenein stelleprofilein stelleprogrammein \
+stellepublikationein stellepufferein stellerasterein stellerechteckein stellereferenzierenein \
+stellereferenzlisteein stelleregisterein stelleseitenkommentarein stelleseitennummerein stelleseitennummeriernungein \
+stelleseitenuebergangein stellesortierenein stellespaltenein stellespatiumein stellespracheein \
+stellesymbolsetein stellesynchronisationein stellesynchronisationsbalkenein stellesynonymein stellesystemein \
+stelletabein stelletabellenein stelletabulatorein stelletextein stelletextobenein \
+stelletexttexteein stelletextumrissein stelletextuntenein stelletipein stelletippenein \
+stelletoleranzein stelleueberschriftein stelleueberschriftenein stelleueberschrifttextein stelleumbruchein \
+stelleumrahmtein stelleumrahmtetexteein stelleuntenein stelleunterseitennummerein stelleunterstreichenein \
+stelleurlein stelleversalienein stelleversionein stellezeilenabstandein stellezeilenein \
+stellezeilennumerierungein stellezitierenein stellezusammengestelltelisteein stellezwischenraumein stopalignment \
+stopausrichtung stopbackground stopbuffer stopcolor stopcolumnmakeup \
+stopcolumns stopcolumnset stopcombination stopcomment stopcomponent \
+stopdescription stopdocument stopdokument stopenger stopenumeration \
+stopenvironment stopfact stopfarbe stopfigure stopfloattext \
+stopformula stopframedtext stopgegenueber stopglobal stopgrosserdurchschuss \
+stophiding stophintergrund stopinteractionmenu stopitemgroup stopkleinerdurchschuss \
+stopkodierung stopkombination stopkomponente stoplegend stopline \
+stoplinecorrection stoplinenumbering stoplines stoplocal stoplocalenvironment \
+stoplocalfootnotes stoplokal stoplokalefussnoten stopmakeup stopmarginalblock \
+stopmarginallinie stopmarginblock stopmarginrule stopnamemakeup stopnarrower \
+stopopposite stopoverlay stopoverview stoppacked stopparagraph \
+stoppositionieren stoppositioning stoppostponing stopproduct stopprodukt \
+stopprofil stopprofile stopproject stopprojekt stopquotation \
+stopraster stopspalten stopsymbolset stopsynchronisation stopsynchronization \
+stoptabelle stoptabellen stoptable stoptables stoptabulate \
+stoptext stoptextlinie stoptextrule stoptyping stopueberblick \
+stopumbruch stopumgebung stopunpacked stopversion stopzeile \
+stopzeilen stopzeilenkorrektur stopzeilennumerierung stopzitat stretched \
+sub subject subpagenumber subsection subsubject \
+subsubsection subsubsubject switchtobodyfont switchtorawfont sym \
+symbol synchronisationsbalken synchronisieren synchronizationbar synchronize \
+synonym tab teilegleitobjekt testcolumn testpage \
+tex textbreite texthoehe textlinie textreference \
+textreferenz textrule textvariable thinrule thinrules \
+tief tiho tip tippedatei tippen \
+tippepuffer title tooltip translate txt \
+typ type typebuffer typefile ueber \
+ueberschrifttext uebersetzten umgebung umrahmt unbekant \
+underbar underbars unterformelnummer useURL useXMLfilter \
+useblocks usecommands usedirectory useencoding useexternaldocument \
+useexternalfigure useexternalfile useexternalfiles useexternalsoundtrack usemodule \
+usereferences usespecials usesymbols usetypescript usetypescriptfile \
+verbergebloecke vergleichefarbengruppe vergleichepalette verknuepfebeschriftung verknuepfedokument \
+verknuepfregister version verweis verweisdatum verwendeJSscript \
+verwendeURL verwendebefehl verwendebloecke verwendeexteresdokument verwendeexterneabbildung \
+verwendeexternedatei verwendeexternedateien verwendeexternestonstueck verwendemodul verwendemodule \
+verwendereferenzen verwendesymbole verwendeurl vl volleswort \
+von waehlebloeckeaus waehlepapieraus waehleversionaus wechselezumfliesstext \
+weekday whitespace wiederholen wochentag wohnort \
+wordright wortrechts writebetweenlist writetolist writetoreferencelist \
+writetoregister zeigedruck zeigeeinstellungen zeigeexterneabbildungen zeigefarbe \
+zeigefarbengruppe zeigefelder zeigefliesstext zeigefliesstextumgebung zeigegitter \
+zeigelayout zeigepalette zeigerahmen zeigestruts zeigeumbruch \
+zentriert ziffern zitat zitieren zu \
+zurbox zurseite zwischenraum
+
+keywordclass.context.en=\
+CAP Cap Caps Character \
+Characters MONTH Numbers Romannumerals WEEKDAY \
+WORD WORDS Word Words about \
+adaptlayout adding appendix arg at \
+atleftmargin atpage atrightmargin background backspace \
+blackrule blackrules blank bookmark bottomdistance \
+bottomheight bottomspace but button bypassblocks \
+cap chapter character characters chem \
+clip clonefield color colorbar colorvalue \
+column comment comparecolorgroup comparepalet completecombinedlist \
+completelistoffloats completelistofsorts completelistofsynonyms completepagenumber completeregister \
+component convertnumber copyfield correctwhitespace coupledocument \
+coupledregister couplemarking couplepage couplepaper coupleregister \
+crlf currentdate currentheadnumber cutspace date \
+decouplemarking decrementnumber define defineaccent defineblank \
+defineblock definebodyfont definebodyfontDEF definebodyfontREF definebodyfontenvironment \
+definebuffer definecharacter definecolor definecolorgroup definecolumnbreak \
+definecolumnset definecombination definecombinedlist definecommand defineconversion \
+definedescription definedfont defineenumeration definefield definefieldstack \
+definefiguresymbol definefloat definefont definefontfeature definefonthandling \
+definefontstyle definefontsynonym defineframed defineframedtext definehbox \
+definehead defineindentedtext defineinmargin defineinteractionmenu defineitemgroup \
+definelabel definelayer definelayout definelist definelogo \
+definemainfield definemakeup definemarking definemathalignment defineoutput \
+defineoverlay definepagebreak definepalet definepapersize defineparagraphs \
+defineplacement defineprofile defineprogram definerawfont definereference \
+definereferenceformat definereferencelist defineregister definerule definesection \
+definesectionblock definesorting definestartstop definestyle definesubfield \
+definesymbol definesynonyms definetabletemplate definetabulate definetext \
+definetextbackground definetextposition definetextvariable definetype definetypeface \
+definetyping defineversion description determineheadnumber determinelistcharacteristics \
+determineregistercharacteristics dimension disableinteractionmenu domicile donttest \
+edgedistance edgewidth emptylines enumeration environment \
+externalfigure fact field fieldstack fillinfield \
+fillinline fillinrules fillintext fitfield fixedspace \
+fixedspaces followprofile followprofileversion followversion footerdistance \
+footerheight footnote footnotetext forceblocks formulanumber \
+fraction framed framedtext from getbuffer \
+getmarking getnumber godown goto gotobox \
+gotopage graycolor greyvalue grid hairline \
+head headerdistance headerheight headlevel headnumber \
+headsym headtext hideblocks high hl \
+immediatebetweenlist immediatetolist in incrementnumber indentation \
+indenting inframed infull ininner inleft \
+inleftedge inleftmargin inline inmaframed inmargin \
+inneredgedistance inneredgewidth innermargindistance innermarginwidth inothermargin \
+inouter inright inrightedge inrightmargin installlanguage \
+interactionbar interactionbuttons interactionmenu item items \
+its keepblocks label labeling labels \
+labeltext language leftaligned leftedgedistance leftedgewidth \
+leftmargindistance leftmarginwidth leg linethickness listheight \
+listlength listsymbol listwidth loadsorts loadsynonyms \
+logfields lohi low maframed mainlanguage \
+makeupheight makeupwidth mapfontsize mar margindistance \
+marginrule margintext margintitle marginwidth marginword \
+marking markversion mathematics mediaeval menubutton \
+midaligned mirror month moveformula moveongrid \
+movesidefloat name navigating nextsection nocap \
+nodimension noheaderandfooterlines noindenting nolist nomarking \
+nomoreblocks nomorefiles nop nospace note \
+notopandbottomlines nowhitespace numberofsubpages numbers outeredgedistance \
+outeredgewidth outermargindistance outermarginwidth overbar overbars \
+overstrike overstrikes packed page pagedepth \
+pagenumber pageoffset pagereference paperheight paperwidth \
+paragraph part periods placebookmarks placecombinedlist \
+placefloat placefootnotes placeformula placeheadnumber placeheadtext \
+placelegend placelist placelistoffloats placelistofsorts placelistofsynonyms \
+placelocalfootnotes placelogos placeongrid placeontopofeachother placepagenumber \
+placerawlist placereferencelist placeregister placerule placesidebyside \
+placesubformula placetextvariable position positiontext printpaperheight \
+printpaperwidth processblocks processpage product program \
+project publication quotation quote ran \
+redo ref reference referral referraldate \
+referring register remark reservefloat reset \
+resetmarking resetnumber resettextcontent rightaligned rightedgedistance \
+rightedgewidth rightmargindistance rightmarginwidth romannumerals rotate \
+scale screen section seeregister selectblocks \
+selectpaper selectversion setnumber settextcontent settextvariable \
+setupalign setupanswerarea setuparranging setupbackground setupbackgrounds \
+setupblackrules setupblank setupblock setupbodyfont setupbodyfontenvironment \
+setupbottom setupbottomtexts setupbuffer setupbuttons setupcapitals \
+setupcaption setupcaptions setupclipping setupcolor setupcolors \
+setupcolumns setupcolumnset setupcolumnsetlines setupcolumnsetstart setupcombinations \
+setupcombinedlist setupcomment setupdescriptions setupenumerations setupexternalfigures \
+setupfield setupfields setupfillinlines setupfillinrules setupfloat \
+setupfloats setupfloatsplitting setupfonthandling setupfontsynonym setupfooter \
+setupfootertexts setupfootnotedefinition setupfootnotes setupforms setupformulae \
+setupformulas setupframed setupframedtexts setuphead setupheader \
+setupheadertexts setupheadnumber setupheads setupheadtext setuphyphenmark \
+setupindentedtext setupindenting setupinmargin setupinteraction setupinteractionbar \
+setupinteractionmenu setupinteractionscreen setupinterlinespace setupinterlinespace2 setupitemgroup \
+setupitemizations setupitems setuplabeltext setuplanguage setuplayout \
+setuplegend setuplinenumbering setuplines setuplinewidth setuplist \
+setuplistalternative setupmakeup setupmarginblocks setupmarginrules setupmarking \
+setupmathalignment setupnarrower setupnumber setupnumbering setupoppositeplacing \
+setupoutput setuppagecomment setuppagenumber setuppagenumbering setuppagetransitions \
+setuppalet setuppaper setuppapersize setupparagraphnumbering setupparagraphs \
+setupplacement setuppositioning setupprofiles setupprograms setuppublications \
+setupquote setupreferencelist setupreferencing setupregister setuprotate \
+setuprule setupscreens setupsection setupsectionblock setupsorting \
+setupspacing setupstartstop setupstrut setupsubpagenumber setupsymbolset \
+setupsynchronization setupsynchronizationbar setupsynonyms setupsystem setuptab \
+setuptables setuptabulate setuptext setuptextbackground setuptextposition \
+setuptextrules setuptexttexts setuptextvariable setupthinrules setuptolerance \
+setuptop setuptoptexts setuptype setuptyping setupunderbar \
+setupurl setupversions setupwhitespace showbodyfont showbodyfontenvironment \
+showcolor showcolorgroup showexternalfigures showfields showframe \
+showgrid showlayout showmakeup showpalet showprint \
+showsetups showstruts showsymbolset someline somewhere \
+sort space splitfloat startalignment startbackground \
+startbuffer startcoding startcolor startcolumnmakeup startcolumns \
+startcolumnset startcombination startcomment startcomponent startdescription \
+startdocument startenumeration startenvironment startfact startfigure \
+startfloattext startformula startframed startframedtext startglobal \
+starthiding startinteractionmenu startitemgroup startlegend startline \
+startlinecorrection startlinenumbering startlines startlocal startlocalenvironment \
+startlocalfootnotes startmakeup startmarginblock startmarginrule startnamemakeup \
+startnarrower startopposite startoverlay startoverview startpacked \
+startparagraph startpositioning startpostponing startproduct startprofile \
+startproject startquotation startraster startregister startsymbolset \
+startsynchronization starttable starttables starttabulate starttext \
+starttextbackground starttextrule starttyping startunpacked startversion \
+stopalignment stopbackground stopbuffer stopcoding stopcolor \
+stopcolumnmakeup stopcolumns stopcolumnset stopcombination stopcomment \
+stopcomponent stopdescription stopdocument stopenumeration stopenvironment \
+stopfact stopfigure stopfloattext stopformula stopframed \
+stopframedtext stopglobal stophiding stopinteractionmenu stopitemgroup \
+stoplegend stopline stoplinecorrection stoplinenumbering stoplines \
+stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup stopmarginblock \
+stopmarginrule stopnamemakeup stopnarrower stopopposite stopoverlay \
+stopoverview stoppacked stopparagraph stoppositioning stoppostponing \
+stopproduct stopprofile stopproject stopquotation stopraster \
+stopsymbolset stopsynchronization stoptable stoptables stoptabulate \
+stoptext stoptextbackground stoptextrule stoptyping stopunpacked \
+stopversion stretched sub subformulanumber subject \
+subpagenumber subsection subsubject subsubsection subsubsubject \
+switchtobodyfont switchtorawfont sym symbol symoffset \
+synchronizationbar synchronize synonym tab testcolumn \
+testpage tex textheight textreference textrule \
+textvariable textwidth thinrule thinrules title \
+tooltip topdistance topheight topspace totalnumberofpages \
+translate txt typ type typebuffer \
+typefile underbar underbars unitmeaning unknown \
+useJSscripts useURL useXMLfilter useblocks usecommands \
+usedirectory useencoding useexternaldocument useexternalfigure useexternalfile \
+useexternalfiles useexternalsoundtrack usemodule usemodules usereferences \
+usespecials usesymbols usetypescript usetypescriptfile useurl \
+version vl weekday whitespace wordright \
+writebetweenlist writetolist writetoreferencelist writetoregister
+
+keywordclass.context.fr=\
+CAP Cap Caps Caractere \
+Caracteres Character Characters Chiffresromains JOURSEMAINE \
+MOIS MONTH MOT MOTS Mot \
+Mots Numeros Romannumerals WEEKDAY WORD \
+WORDS Word Words a about \
+adaptedisposition adaptlayout affectenumero affectevariabletexte ajustechamp \
+alaligne alapage aligneadroite aligneagauche aligneaumilieu \
+appendix arg arriereplan at atleftmargin \
+atpage atrightmargin background baha barrecouleur \
+barreinteraction barresynchronisation bas blackrule blackrules \
+blank bookmark bouton boutonmenu boutonsinteraction \
+but button bypassblocks cacheblocs cap \
+caractere caracteres champ changepolicebrute changepolicecorps \
+chapter character characters chem chiffresromains \
+citation citer clip clonechamp clonefield \
+colonne color column comment commentaire \
+comparecolorgroup comparegroupecouleur comparepalet comparepalette completecombinedlist \
+completelistoffloats completelistofsorts completelistofsynonyms completenumeropage completeregister \
+completeregistre composant composeenalinea concernant convertitnumero \
+convertnumber copitchamp copyfield correctwhitespace corrigeespaceblanc \
+couleur couleurgrise coupledocument coupledregister couplemarking \
+couplemarquage couplepage couplepaper couplepapier coupleregister \
+coupleregistre crlf currentdate currentheadnumber cutspace \
+dactylographier dans dansautremarge dansborddroit dansbordgauche \
+dansdroite dansgauche dansmarge dansmargedroite dansmargegauche \
+date datecourante daterecommandation de decouplemarking \
+decouplemarquage decrementenumero define defineblank defineblock \
+definebodyfont definebodyfontDEF definebodyfontREF definebodyfontenvironment definebuffer \
+definecolor definecolorgroup definecombination definecombinedlist defineconversion \
+definedescription definedfont defineenumeration definefield definefieldstack \
+definefiguresymbol definefloat definefont definefontfeature definefonthandling \
+definefontsynonym defineframed defineframedtext definehead defineindentedtext \
+defineinteractionmenu defineitemgroup definelabel definelist definelogo \
+definemakeup definemarking definemathalignment defineoutput defineoverlay \
+definepalet definepapersize defineparagraphs defineplacement defineprofile \
+defineprogram definereference definereferenceformat definereferencelist defineregister \
+definesection definesectionblock definesorting definestartstop definesubfield \
+definesymbol definesynonyms definetabletemplate definetabulate definetext \
+definetype definetypeface definetyping defineversion definicaractere \
+definit definitaccent definitbloc definitblocsection definitbuffer \
+definitcalque definitchamp definitchampprincipal definitcommande definitconversion \
+definitcouleur definitdactylo definitdansmarge definitdemarrestoppe definitdescription \
+definitdisposition definitenumeration definitenvironnementpolicecorps definitetiquette definitflottant \
+definitformatreference definitgroupecouleur definithbox definitjeucolonne definitliste \
+definitlisteimbriquee definitlistereference definitlogo definitmakeup definitmarquage \
+definitmenuinteraction definitnotepdp definitpalette definitparagraphes definitpilechamp \
+definitpolice definitpolicebrute definitpolicecorps definitpositiontexte definitprofil \
+definitprogramme definitreference definitregistre definitregle definitrevetement \
+definitsautdecolonne definitsautdepage definitsection definitsortie definitsouschamp \
+definitstyle definitstylepolice definitsymbole definitsymbolefigure definitsynonymepolice \
+definitsynonymes definittabulation definittaillepapier definittete definittexte \
+definittrametableau definittri definittype definitvariabletexte definitversion \
+definitvide demarrealignement demarrearriereplan demarreblocmarge demarrecitation \
+demarreciter demarrecodage demarrecolonnes demarrecombinaison demarrecompoetroite \
+demarrecomposant demarrecorrectionligne demarrecouleur demarredegroupe demarredocument \
+demarreenvironement demarrefigure demarreglobal demarregroupe demarrejeucolonne \
+demarrejeusymboles demarreligne demarreligneregleetexte demarrelignes demarrelocal \
+demarremakeup demarremargereglee demarrenotespdplocales demarrenumerotationligne demarreopposition \
+demarrepositionnement demarreproduit demarreprofil demarreprojet demarreraster \
+demarrerevetement demarresynchronisation demarretableau demarretableaux demarretexte \
+demarreversion demarrevuedensemble deplaceformule deplacesurgrille description \
+determinecaracteristiqueliste determinecaracteristiquesregistre determineheadnumber determinelistcharacteristics determinenumerotete \
+dimension disableinteractionmenu distancebord distanceborddroit distancebordgauche \
+distanceentete distanceinf distancemarge distancemargedroite distancemargegauche \
+distancepdp distancesup domicile echelle ecran \
+ecritdansliste ecritdanslistereference ecritentreliste ecritregistre el \
+element elements emptylines enumeration environement \
+espace espaceblanc espacefixe espaceinf espacesfixes \
+espacesup etiquette etiquettes etire externalfigure \
+fait faitreference fichierdactylo field fieldstack \
+figureexterne fillinfield fillinline fillinrules fillintext \
+fitfield fixedspaces followprofile followprofileversion followversion \
+footnote footnotetext forceblocks forceblocs fraction \
+framed framedtext from gardeblocs getbuffer \
+getmarking getnumber godown goto gotobox \
+graycolor grid grille groupe hairline \
+haut hauteureditionpapier hauteurentete hauteurinf hauteurliste \
+hauteurmakeup hauteurpapier hauteurpdp hauteursup hauteurtexte \
+head headnumber headsym headtext hideblocks \
+high hl immediatebetweenlist immediatetolist in \
+inconnu incrementenumero indentation indenting inframed \
+infull inhibemenuinteraction ininner inleft inline \
+inmargin inmframed inneredgedistance inneredgewidth innermargindistance \
+innermarginwidth inothermargin inouter inright installelangue \
+installlanguage interactionbar interactionbuttons item items \
+its joursemaine keepblocks labeling labels \
+labeltext labeltexte language langue langueprincipale \
+largeurbord largeurborddroit largeurbordgauche largeureditionpapier largeurligne \
+largeurliste largeurmakeup largeurmarge largeurmargedroite largeurmargegauche \
+largeurpapier largeurtexte leftaligned leg ligneh \
+lignenoire ligneregleetexte lignesnoires listesymbole listsymbol \
+llongueurliste loadsorts loadsynonyms logchamp logfields \
+lohi low mainlanguage mapfontsize mar \
+margereglee marginrule margintext marking markversion \
+marquage marquageversion marquepage mathematics mathematique \
+mediaeval menuinteraction mframed midaligned mirror \
+mois month montrecadre montrechamps montrecouleur \
+montredisposition montreedition montreenvironnementpolicecorps montrefiguresexternes montregrille \
+montregroupecouleur montrejeusymboles montremakeup montrepalette montrepolicecorps \
+montrereglages montrestruts motdroit motmarge moveongrid \
+movesidefloat name navigating nextsection niveautete \
+nocap noheaderandfooterlines noindenting nolist nomarking \
+nombredesouspages nombretotaldepages nommacro nomoreblocks nomorefiles \
+nop nospace note notepdp notopandbottomlines \
+nowhitespace numbers numeroformule numeropage numeros \
+numerosousformule numerotete numerotetecourant obtientmarquage oriente \
+outeredgedistance outeredgewidth outermargindistance outermarginwidth overbar \
+overbars overstrike overstrikes packed page \
+pagedepth pagedouble pageoffset pagereference paragraph \
+part pasplusdeblocs pasplusdefichiers periodes periods \
+pilechamp placebookmarks placecombinedlist placecoteacote placefloat \
+placeflottant placefootnotes placeformula placeformule placelegend \
+placelegende placelesunsaudessusdesautres placelist placeliste placelisteinmbriquee \
+placelistereference placelistoffloats placelistofsorts placelistofsynonyms placelocalfootnotes \
+placelogos placemarquespages placenotespdp placenotespdplocales placenumeropage \
+placenumerotete placeongrid placeontopofeachother placerawlist placereferencelist \
+placeregister placeregistre placeregle placesidebyside placesousformule \
+placesubformula placesurgrille placetextetete placevariabletexte position \
+positionnetexte prendbuffer processblocks processpage produit \
+program programme projet publication qqpart \
+quotation quote ran raz razmarquage \
+raznumero recommandation ref refait reference \
+referencepage referencetexte referral referraldate reflete \
+register reglealignement reglearrangement reglearriereplan reglearriereplans \
+reglebarreinteraction reglebarresynchronisation reglebloc regleblocmarge regleblocsection \
+regleboutons reglebuffer reglecapitales reglechamp reglechamps \
+regleclipping reglecolonnes reglecombinaisons reglecommentaire reglecommentairepage \
+reglecompoetroite reglecomposeenalinea reglecouleur reglecouleurs regledactylo \
+regledansmarge regledemarrestoppe regledescriptions regledisposition regleecraninteraction \
+regleecrans regleelements regleencadre regleentete regleenumerations \
+regleenvironnementpolicecorps regleepaisseurligne regleespaceblanc regleespacement regleespacementinterligne \
+reglefiguresexternes regleflottant regleflottants regleformulaires regleformules \
+reglegroupeselements regleinf regleinteraction regleintitule regleintitules \
+reglejeucolonne reglejeusymboles reglelabeltexte reglelangue reglelegende \
+reglelignes reglelignesnoires reglelignesreglestexte regleliste reglelisteimbriquee \
+reglelistereference reglemakeup reglemargereglee reglemarquage reglemarquagehyphenation \
+reglemenuinteraction reglenotepdp reglenumero reglenumeropage reglenumerotation \
+reglenumerotationligne reglenumerotationpage reglenumerotationparagraphe reglenumerotete regleoriente \
+reglepalette reglepapier regleparagraphes reglepdp regleplacementopposition \
+reglepolicecorps reglepositionnement reglepositiontexte regleprofils regleprogrammes \
+reglepublications reglereferencage regleregistre regleregle regleremplitligne \
+regleremplitlignesreglees reglesection regleseparationflottant reglesortie reglesouslignage \
+reglesousnumeropage reglestrut reglesup reglesynchronisation reglesynonymes \
+reglesysteme regletab regletableaux regletabulation regletaillepapier \
+regletete regletetes regletexte regletextesentete regletextesinf \
+regletextespdp regletextessup regletextestexte regletextetete regletolerance \
+regletraitsfins regletransitionspage regletri regletype regleurl \
+reglevariabletexte regleversions remplitchamp remplitligne remplitlignesreglees \
+remplittexte reservefloat reset resetmarking resettextcontent \
+retourarriere rightaligned romannumerals rotate sansalinea \
+sansdimension sansespace sansespaceblanc sanslignesenteteetpdp sanslignessupetinf \
+sansliste sansmarquage sanstest sauteblocs scale \
+screen section seeregister selectblocks selectionneblocs \
+selectionnepapier selectionneversion selectpaper selectversion sensunite \
+separeflottant settext settextcontent setupalign setupanswerarea \
+setuparranging setupbackground setupbackgrounds setupblackrules setupblank \
+setupblock setupbodyfont setupbodyfontenvironment setupbottom setupbottomtexts \
+setupbuffer setupbuttons setupcapitals setupcaption setupcaptions \
+setupclipping setupcolor setupcolors setupcolumns setupcolumnsetlines \
+setupcolumnsetstart setupcombinations setupcombinedlist setupcomment setupdescriptions \
+setupenumerations setupexternalfigures setupfield setupfields setupfillinlines \
+setupfillinrules setupfloat setupfloats setupfloatsplitting setupfonthandling \
+setupfontsynonym setupfooter setupfootertexts setupfootnotedefinition setupfootnotes \
+setupforms setupformulas setupframed setupframedtexts setuphead \
+setupheader setupheadertexts setupheadnumber setupheads setupheadtext \
+setuphyphenmark setupindentedtext setupindenting setupinmargin setupinteraction \
+setupinteractionbar setupinteractionmenu setupinteractionscreen setupinterlinespace setupinterlinespace2 \
+setupitemgroup setupitems setuplabeltext setuplanguage setuplayout \
+setuplegend setuplinenumbering setuplines setuplinewidth setuplist \
+setuplistalternative setupmakeup setupmarginblocks setupmarginrules setupmarking \
+setupmathalignment setupnarrower setupnumbering setupoppositeplacing setupoutput \
+setuppagenumber setuppagenumbering setuppagetransitions setuppalet setuppaper \
+setuppapersize setupparagraphnumbering setupparagraphs setupplacement setuppositioning \
+setupprofiles setupprograms setuppublications setupquote setupreferencelist \
+setupreferencing setupregister setuprotate setupscreens setupsection \
+setupsectionblock setupsorting setupspacing setupstrut setupsubpagenumber \
+setupsymbolset setupsynchronization setupsynchronizationbar setupsynonyms setupsystem \
+setuptab setuptables setuptabulate setuptext setuptextrules \
+setuptexttexts setupthinrules setuptolerance setuptop setuptoptexts \
+setuptype setuptyping setupunderbar setupurl setupversions \
+setupwhitespace showbodyfont showbodyfontenvironment showcolor showcolorgroup \
+showexternalfigures showfields showframe showgrid showlayout \
+showmakeup showpalet showprint showsetups showstruts \
+showsymbolset someline somewhere sort sousnumeropage \
+space splitfloat startalignment startbackground startbuffer \
+startcolor startcolumnmakeup startcolumns startcombination startcomment \
+startcomponent startdescription startdocument startenumeration startenvironment \
+startfact startfigure startfloattext startformula startframedtext \
+starthiding startinteractionmenu startitemgroup startlegend startline \
+startlinecorrection startlinenumbering startlines startlocal startlocalenvironment \
+startlocalfootnotes startmakeup startmarginblock startmarginrule startnamemakeup \
+startnarrower startopposite startoverlay startoverview startpacked \
+startparagraph startpositioning startpostponing startproduct startprofile \
+startproject startquotation startregister startsymbolset startsynchronization \
+starttable starttables starttabulate starttextrule starttyping \
+startunpacked startversion stopalignment stopbackground stopbuffer \
+stopcolor stopcolumnmakeup stopcolumns stopcombination stopcomment \
+stopcompoetroite stopcomponent stopdescription stopdocument stopenumeration \
+stopenvironment stopfact stopfigure stopfloattext stopformula \
+stopframedtext stophiding stopinteractionmenu stopitemgroup stoplegend \
+stopline stoplinecorrection stoplinenumbering stoplines stoplocal \
+stoplocalenvironment stoplocalfootnotes stopmakeup stopmarginblock stopmarginrule \
+stopnamemakeup stopnarrower stopopposite stopoverlay stopoverview \
+stoppacked stopparagraph stoppealignement stoppearriereplan stoppeblocmarge \
+stoppecitation stoppecodage stoppecolonnes stoppecombinaison stoppecomposant \
+stoppecorrectionligne stoppecouleur stoppedegroupe stoppedocument stoppeenvironement \
+stoppeglobal stoppegroupe stoppejeucolonne stoppeligne stoppeligneregleetexte \
+stoppelignes stoppelocal stoppemakeup stoppemargereglee stoppenotespdplocales \
+stoppenumerotationligne stoppeopposition stoppepositionnement stoppeproduit stoppeprofil \
+stoppeprojet stopperaster stopperevetement stoppesynchronisation stoppetableau \
+stoppetableaux stoppetexte stoppeversion stoppevuedensemble stoppositioning \
+stoppostponing stopproduct stopprofile stopproject stopquotation \
+stopsymbolset stopsynchronization stoptable stoptables stoptabulate \
+stoptextrule stoptyping stopunpacked stopversion stretched \
+sub subject subsection subsubject subsubsection \
+subsubsubject suggestion suivantprofil suivantversion suivantversionprofil \
+switchtobodyfont sym symbol symbole synchronise \
+synchronizationbar synchronize synonym tab tapebuffer \
+testcolumn testpage tete tex textemarge \
+textenotepdp textetete textreference textrule thinrule \
+thinrules title titremarge tooltip traduire \
+traiteblocs traitepage traitfin traitsfins translate \
+txt typ type typebuffer typefile \
+underbar underbars uneligne useURL useXMLfilter \
+useblocks usecommands usedirectory useencoding useexternaldocument \
+useexternalfigure useexternalfile useexternalfiles useexternalsoundtrack usemodule \
+usereferences usespecials usesymbols usetypescript usetypescriptfile \
+utiliseJSscripts utiliseURL utiliseblocs utilisechemin utilisecommandes \
+utilisedocumentexterne utiliseencodage utilisefichierexterne utilisefichiersexternes utilisefigureexterne \
+utilisemodule utilisemodules utilisepsiteaudioexterne utilisereferences utilisespecialites \
+utilisesymboles utiliseurl va vaalaboite vaalapage \
+vaenbas valeurcouleur valeurgris variabletexte version \
+vide vl weekday whitespace wordright \
+writebetweenlist writetolist writetoreferencelist writetoregister
+
+keywordclass.context.it=\
+CAP Cap Caps Character \
+Characters GIORNOSETTIMANA Lettera Lettere MESE \
+MONTH Numeri Numeriromani PAROLA PAROLE \
+Parola Parole Romannumerals WEEKDAY WORD \
+WORDS Word Words about accoppiacarta \
+accoppiadocumento accoppiamarcatura accoppiapagina accoppiaregistro adaptlayout \
+adattacampo adattalayout al allineacentro allineadestra \
+allineasinistra altezzacarta altezzacartastampa altezzacima altezzaelenco \
+altezzafondo altezzaintestazione altezzamakeup altezzapdp altezzatesto \
+ambiente ampiezzabordo ampiezzabordodestro ampiezzabordosinistro ampiezzacarta \
+ampiezzacartastampa ampiezzaelenco ampiezzamakeup ampiezzamargine ampiezzamarginedestro \
+ampiezzamarginesinistro ampiezzatesto ap apagina appendix \
+arg at atleftmargin atpage atrightmargin \
+background barracolori barrainterazione barrasincronizzazione bastablocchi \
+bastafile blackrule blackrules blank bookmark \
+but button bypassblocks cambiaafontdeltesto campi \
+camporiempimento cap capello chapter character \
+characters chem chim circondato citazione \
+clip clonacampo clonefield colonna color \
+colore coloregrigio column comment commento \
+comparecolorgroup comparepalet completecombinedlist completelistoffloats completelistofsorts \
+completelistofsynonyms completeregister componenet confrontagruppocolori confrontatavolozza \
+convertinumero convertnumber copiacampo copyfield correctwhitespace \
+correggispaziobianco coupledocument coupledregister couplemarking couplepage \
+couplepaper coupleregister crlf currentdate currentheadnumber \
+cutspace da daqualcheparte data datadioggi \
+datareferral date decouplemarking decrementnumber define \
+defineblank defineblock definebodyfont definebodyfontDEF definebodyfontREF \
+definebodyfontenvironment definebuffer definecolor definecolorgroup definecolumnbreak \
+definecombination definecombinedlist defineconversion definedescription definedfont \
+defineenumeration definefield definefieldstack definefiguresymbol definefloat \
+definefont definefontfeature definefonthandling definefontsynonym defineframed \
+defineframedtext definehead defineindentedtext defineinmargin defineinteractionmenu \
+defineitemgroup definelabel definelayer definelist definelogo \
+definemakeup definemarking definemathalignment defineoutput defineoverlay \
+definepagebreak definepalet definepapersize defineparagraphs defineplacement \
+defineprofile defineprogram definereference definereferenceformat definereferencelist \
+defineregister definesection definesectionblock definesorting definestartstop \
+definesubfield definesymbol definesynonyms definetabletemplate definetabulate \
+definetext definetype definetypeface definetyping defineversion \
+definisci definisciaccento definisciambientefontdeltesto definisciblocco definiscibloccosezione \
+definiscibuffer definiscicampo definiscicampoprincipale definiscicapoversi definiscicarattere \
+definiscicolore definiscicomando definisciconversione definiscidescrizione definiscidimensionicarta \
+definiscielenco definiscielencocombinato definiscienumerazione definiscietichetta definiscifigurasimbolo \
+definiscifont definiscifontdeltesto definiscifontgrezzo definisciformatoriferimento definiscigruppocolonne \
+definiscigruppocolori definiscihbox definisciincorniciato definisciiniziatermina definiscilayout \
+definiscilinea definiscilistariferimenti definiscilogo definiscimakeup definiscimarcatura \
+definiscimenuinterazione definiscimodellotabella definiscioggettomobile definisciordinamento definiscioutput \
+definisciposizionetesto definisciprofilo definisciprogramma definisciregistro definisciriferimento \
+definiscirigovuoto definiscisezione definiscisimbolo definiscisinonimi definiscisinonimofont \
+definiscisottocampo definiscisovrapposizione definiscistackcampi definiscistile definiscistilefont \
+definiscitabulato definiscitavolozza definiscitesta definiscitesto definiscitestoincorniciato \
+definiscitype definiscityping definiscivariabiletesto definisciversion description \
+determinacaratteristicheregistro determinacarattersticheelenco determinanumerotesta determineheadnumber determinelistcharacteristics \
+dimensione disabilitamenuinterazione disableinteractionmenu distanzabordo distanzabordodestro \
+distanzabordosinistro distanzacima distanzafondo distanzaintestazione distanzamargine \
+distanzamarginedestro distanzamarginesinistro distanzapdp domicilio el \
+elaborablocchi elaborapagina elementi elemento emptylines \
+enumeration etichetta etichette externalfigure fatto \
+field fieldstack figuraesterna fillinfield fillinline \
+fillinrules fillintext fitfield fixedspaces followprofile \
+followprofileversion followversion fondo footnote footnotetext \
+forceblocks forzablocchi fraction framed framedtext \
+frazione from getbuffer getmarking getnumber \
+giornosettimana godown goto gotobox graycolor \
+grid griglia hairline head headnumber \
+headsym headtext hideblocks high hl \
+ignoto immediatebetweenlist immediatetolist impaccato impostaallineamento \
+impostaambientefontdeltesto impostaampiezzariga impostabarrainterazione impostabarrasincronizzazione impostablocchimargine \
+impostablocco impostabloccosezione impostabuffer impostacampi impostacampo \
+impostacapoversi impostacaption impostacaptions impostacima impostaclippling \
+impostacolonne impostacolore impostacolori impostacombinazioni impostacommento \
+impostacommentopagina impostadefinizionenotepdp impostadescrizioni impostadimensionicarta impostaelementi \
+impostaelencazioni impostaelenco impostaelencocombinato impostaenumerazioni impostafigureesterne \
+impostafondo impostafontdeltesto impostaforms impostaformule impostagruppocolonne \
+impostaincorniciato impostainiziatermina impostainmargine impostainstestazione impostainterazione \
+impostainterlinea impostalayout impostalegenda impostalinea impostalineemargine \
+impostalineenere impostalineeriempimento impostalineesottili impostalineetesto impostalingua \
+impostalistariferimenti impostamaiuscole impostamakeup impostamarcatura impostamenuinterazione \
+impostamenzione impostanotepdp impostanumerazione impostanumerazionecapoversi impostanumerazionepagina \
+impostanumerazionerighe impostanumeropagina impostanumerosottopagina impostanumerotesta impostaoggettimobili \
+impostaoggettomobile impostaordinamento impostaoutput impostaparranging impostapdp \
+impostapiustretto impostaposizionamento impostaposizionamentoopposti impostaposizionetesto impostaprofili \
+impostaprogrammi impostapubblicazioni impostapulsanti impostaregistro impostarientro \
+impostariferimento impostarighe impostarigheriempimento impostarigovuoto impostarotazione \
+impostaschermi impostaschermointerazione impostasegnosillabazione impostasetsimboli impostasezione \
+impostasfondi impostasfondo impostasincronizzazione impostasinonimi impostasistema \
+impostasottolinea impostaspaziatura impostaspaziobianco impostaspezzamentooggettomobile impostastrut \
+impostatab impostatabelle impostatabulato impostatavolozza impostatesta \
+impostateste impostatesticima impostatestifondo impostatestiincorniciati impostatestiintestazioni \
+impostatestipdp impostatesto impostatestoetichette impostatestointestazioni impostatestotesti \
+impostatolleranza impostatransizionepagina impostatype impostatyping impostaurl \
+impostavariabiletesto impostaversioni in inaltromargine incorniciato \
+incrementanumero indentation indenting indestra inframed \
+ininner iniziaallineamento iniziaambiente iniziabloccomargine iniziacitazione \
+iniziacodifica iniziacolonne iniziacolore iniziacombinazione iniziacomponente \
+iniziacorrezioneriga iniziadocumento iniziafigura iniziaglobale iniziagruppocolonne \
+iniziaimpaccato inizialineamargine inizialineatesto inizialocale iniziamakeup \
+inizianotepdplocali inizianumerazionerighe iniziaopposto iniziaoverview iniziapiustretto \
+iniziaposizionamento iniziaprodotto iniziaprofilo iniziaprogetto iniziaraster \
+iniziariga iniziarighe iniziasetsimboli iniziasfondo iniziasincronizzazione \
+iniziasovrapposizione iniziatabella iniziatabelle iniziatesto iniziaunpacked \
+iniziaversione inlatodestro inlatosinistro inleft inline \
+inmaframed inmargin inmargine inmarginedestro inmarginesinistro \
+inneredgedistance inneredgewidth innermargindistance innermarginwidth inothermargin \
+inouter inriga inright insinistra installalingua \
+installlanguage interactionbar interactionbuttons intorno item \
+items its keepblocks labeling labels \
+labeltext language leftaligned leg lettera \
+lettere lineamargine lineanera lineasottile lineatesto \
+lineenere lineeriempimento lineesottili lingua linguaprincipale \
+listsymbol livellotesta loadsorts loadsynonyms logcampi \
+logfields lohi low lunghezzaelenco maframed \
+mainlanguage mapfontsize mar marcatura marcaversione \
+marginrule margintext marking markversion matematica \
+mathematics mediaeval menuinterattivo menzione mese \
+mettielenco mettielencocombinato mettifiancoafianco mettiformula mettiingriglia \
+mettilegenda mettilinea mettiloghi mettinotepdp mettinotepdplocali \
+mettinumeropagina mettiregistro mettisegnalibro mettisottoformula mettiunosullaltro \
+mettivariabiletesto midaligned mirror month mostraambientefontdeltesto \
+mostracampi mostracolore mostracornice mostrafiguresterne mostrafontdeltesto \
+mostragriglia mostragruppocolori mostraimpostazioni mostralyout mostramakeup \
+mostrasetsimboli mostrastampa mostrastruts mostratavolozza moveongrid \
+movesidefloat name nascondiblocchi navigating nextsection \
+nientedimensioni nienteelenco nientelineecimafondo nientelineintestazionepdp nientemarcatura \
+nienterientro nientespazio nientespaziobianco nocap noheaderandfooterlines \
+noindenting nolist nomarking nome nomeunita \
+nomoreblocks nomorefiles nop nospace nota \
+notapdp note notest notopandbottomlines nowhitespace \
+numberofsubpages numbers numeri numeriromani numeroformula \
+numeropagina numeropaginacompleto numerosottoformula numerotesta numerotestacorrente \
+numerototaledipagine outeredgedistance outeredgewidth outermargindistance outermarginwidth \
+overbar overbars overstrike overstrikes packed \
+page pagedepth pageoffset pagereference pagina \
+paragraph paroladestra parolainmargine part passaafontgrezzo \
+ped pedap periods perlungo placebookmarks \
+placecombinedlist placefloat placefootnotes placeformula placelegend \
+placelist placelistoffloats placelistofsorts placelistofsynonyms placelocalfootnotes \
+placelogos placeongrid placeontopofeachother placerawlist placereferencelist \
+placeregister placesidebyside placesubformula position posizionanumerotesta \
+posizionatesto posizionatestotesta posizione prendibuffer prendimarcatura \
+processblocks processpage prodotto progetto program \
+programma pubblicazione publication pulsante pulsantemenu \
+pulsantinterazione punti qualcheriga quotation quote \
+ran ref reference referral referraldate \
+referring register reimposta reimpostamarcatura reservefloat \
+reset resetmarking resetnumber resettextcontent rientro \
+rif rifai riferimento riferimentopagina riferimentotesto \
+riflessione rigariempimento rightaligned rigovuoto romannumerals \
+rotate ruota saltablocchi scala scale \
+schermo screen scrividentroelenco scriviinelenco scriviinlistariferimenti \
+scriviinregistro section seeregister segnalibro seguiprofilo \
+seguiversione seguiversioneprofilo selectblocks selectpaper selectversion \
+selezionablocchi selezionacarta selezionaversione separamarcatura setnumber \
+settext settextcontent setupalign setupanswerarea setuparranging \
+setupbackground setupbackgrounds setupblackrules setupblank setupblock \
+setupbodyfont setupbodyfontenvironment setupbottom setupbottomtexts setupbuffer \
+setupbuttons setupcapitals setupcaption setupcaptions setupclipping \
+setupcolor setupcolors setupcolumns setupcolumnsetlines setupcolumnsetstart \
+setupcombinations setupcombinedlist setupcomment setupdescriptions setupenumerations \
+setupexternalfigures setupfield setupfields setupfillinlines setupfillinrules \
+setupfloat setupfloats setupfloatsplitting setupfonthandling setupfontsynonym \
+setupfooter setupfootertexts setupfootnotedefinition setupfootnotes setupforms \
+setupformulas setupframed setupframedtexts setuphead setupheader \
+setupheadertexts setupheadnumber setupheads setupheadtext setuphyphenmark \
+setupindentedtext setupindenting setupinmargin setupinteraction setupinteractionbar \
+setupinteractionmenu setupinteractionscreen setupinterlinespace setupinterlinespace2 setupitemgroup \
+setupitems setuplabeltext setuplanguage setuplayout setuplegend \
+setuplinenumbering setuplines setuplinewidth setuplist setuplistalternative \
+setupmakeup setupmarginblocks setupmarginrules setupmarking setupmathalignment \
+setupnarrower setupnumbering setupoppositeplacing setupoutput setuppagenumber \
+setuppagenumbering setuppagetransitions setuppalet setuppaper setuppapersize \
+setupparagraphnumbering setupparagraphs setupplacement setuppositioning setupprofiles \
+setupprograms setuppublications setupquote setupreferencelist setupreferencing \
+setupregister setuprotate setupscreens setupsection setupsectionblock \
+setupsorting setupspacing setupstrut setupsubpagenumber setupsymbolset \
+setupsynchronization setupsynchronizationbar setupsynonyms setupsystem setuptab \
+setuptables setuptabulate setuptext setuptextrules setuptexttexts \
+setupthinrules setuptolerance setuptop setuptoptexts setuptype \
+setuptyping setupunderbar setupurl setupversions setupwhitespace \
+setvariabiletesto sfondo showbodyfont showbodyfontenvironment showcolor \
+showcolorgroup showexternalfigures showfields showframe showgrid \
+showlayout showmakeup showpalet showprint showsetups \
+showstruts showsymbolset sim simbolo sincronizza \
+someline somewhere sort space spazifissi \
+spazio spaziobianco spaziocima spaziodietro spaziofisso \
+spaziofondo spessoreriga spezzaoggettomobile splitfloat spostaagriglia \
+spostaformula stackcampi startalignment startbackground startbuffer \
+startcolor startcolumnmakeup startcolumns startcombination startcomment \
+startcomponent startdescription startdocument startenumeration startenvironment \
+startfact startfigure startfloattext startformula startframedtext \
+starthiding startinteractionmenu startitemgroup startlegend startline \
+startlinecorrection startlinenumbering startlines startlocal startlocalenvironment \
+startlocalfootnotes startmakeup startmarginblock startmarginrule startnamemakeup \
+startnarrower startopposite startoverlay startoverview startpacked \
+startparagraph startpositioning startpostponing startproduct startprofile \
+startproject startquotation startregister startsymbolset startsynchronization \
+starttable starttables starttabulate starttextrule starttyping \
+startunpacked startversion stirato stopalignment stopbackground \
+stopbuffer stopcolor stopcolumnmakeup stopcolumns stopcombination \
+stopcomment stopcomponent stopdescription stopdocument stopenumeration \
+stopenvironment stopfact stopfigure stopfloattext stopformula \
+stopframedtext stophiding stopinteractionmenu stopitemgroup stoplegend \
+stopline stoplinecorrection stoplinenumbering stoplines stoplocal \
+stoplocalenvironment stoplocalfootnotes stopmakeup stopmarginblock stopmarginrule \
+stopnamemakeup stopnarrower stopopposite stopoverlay stopoverview \
+stoppacked stopparagraph stoppositioning stoppostponing stopproduct \
+stopprofile stopproject stopquotation stopsymbolset stopsynchronization \
+stoptable stoptables stoptabulate stoptextrule stoptyping \
+stopunpacked stopversion stretched sub subject \
+subpagenumber subsection subsubject subsubsection subsubsubject \
+switchtobodyfont sym symbol synchronizationbar synchronize \
+synonym tab terminaallineamento terminaambiente terminabloccomargine \
+terminacitazione terminacodifica terminacolonne terminacolore terminacombinazione \
+terminacomponente terminacorrezioneriga terminadocumento terminaglobale terminagruppocolonne \
+terminaimpaccato terminalineamargine terminalineatesto terminalocale terminamakeup \
+terminanotepdplocali terminanumerazionerighe terminaopposto terminaoverview terminapiustretto \
+terminaposizionamento terminaprodotto terminaprofili terminaprogetto terminaraster \
+terminariga terminarighe terminasfondo terminasincronizzazione terminasovrapposizione \
+terminatabella terminatabelle terminatesto terminaunpacked terminaversioni \
+testa testcolumn testoetichetta testoinmargine testoinstestazioni \
+testonotapdp testoriempimento testpage tex textreference \
+textrule thinrule thinrules tieniblocchi title \
+titoloinmargine tooltip traduci translate txt \
+typ type typebuffer typefile underbar \
+underbars usaJSscripts usaURL usablocco usacartella \
+usacodifica usacolonnasonoraesterna usacomandi usadocumentoesterno usafiguraesterna \
+usafileesterni usafileesterno usamoduli usamodulo usariferimenti \
+usasimboli usaspecialita usaurl useURL useXMLfilter \
+useblocks usecommands usedirectory useencoding useexternaldocument \
+useexternalfigure useexternalfile useexternalfiles useexternalsoundtrack usemodule \
+usereferences usespecials usesymbols usetypescript usetypescriptfile \
+vaia vaiabox vaiapagina vaigiu valorecolore \
+valoregrigio variabiletesto version versione vl \
+weekday whitespace wordright writebetweenlist writetolist \
+writetoreferencelist writetoregister
+
+keywordclass.context.nl=\
+CAP Cap Caps Character \
+Characters Cijfers KAP Kap Kaps \
+Letter Letters MAAND MONTH Romannumerals \
+Romeins WEEKDAG WEEKDAY WOORD WOORDEN \
+WORD WORDS Woord Woorden Word \
+Words aantalsubpaginas about achtergrond adaptlayout \
+appendix arg at atpage background \
+bepaalkopnummer bepaallijstkenmerken bepaalregisterkenmerken betekenis binnenmargeafstand \
+binnenmargebreedte binnenrandafstand binnenrandbreedte blackrule blackrules \
+blank blanko blokje blokjes blokkeerinteractiemenu \
+bodemwit bookmark bovenafstand bovenhoogte breuk \
+buitenmargeafstand buitenmargebreedte buitenrandafstand buitenrandbreedte but \
+button bypassblocks cap chapter character \
+characters chem cijfers citaat citeer \
+clip clonefield color column comment \
+comparecolorgroup comparepalet completecombinedlist completelistoffloats completelistofsorts \
+completelistofsynonyms completeregister converteernummer convertnumber copieerveld \
+copyfield correctwhitespace corrigeerwitruimte coupledocument coupledregister \
+couplemarking couplepage couplepaper coupleregister crlf \
+currentdate currentheadnumber date datum decouplemarking \
+define defineblank defineblock definebodyfont definebodyfontDEF \
+definebodyfontREF definebodyfontenvironment definebuffer definecolor definecolorgroup \
+definecombinedlist defineconversion definedescription definedfont defineenumeration \
+definefield definefieldstack definefiguresymbol definefloat definefont \
+definefontfeature definefonthandling definefontsynonym defineframed defineframedtext \
+definehead defineindentedtext defineinteractionmenu definelabel definelist \
+definelogo definemakeup definemarking defineoutput defineoverlay \
+definepalet definepapersize defineparagraphs defineprofile defineprogram \
+definerawfont definereference definereferenceformat definereferencelist defineregister \
+definesection definesectionblock definesorting definestartstop definesubfield \
+definesymbol definesynonyms definetabletemplate definetabulate definetext \
+definetype definetypeface definetyping defineversion definieer \
+definieeraccent definieeralineas definieerbeeldmerk definieerblanko definieerblok \
+definieerbuffer definieercombinatie definieercommando definieerconversie definieerfiguursymbool \
+definieerfont definieerfontstijl definieerfontsynoniem definieerhbox definieerhoofdveld \
+definieeringesprongentext definieerinmarge definieerinteractiemenu definieeritemgroep definieerkadertekst \
+definieerkarakter definieerkleur definieerkleurgroep definieerkolomgroep definieerkolomovergang \
+definieerkop definieerkorps definieerkorpsomgeving definieerlayer definieerlayout \
+definieerletter definieerlijn definieerlijst definieermarkering definieeromlijnd \
+definieeropmaak definieeroverlay definieerpaginaovergang definieerpalet definieerpapierformaat \
+definieerplaats definieerplaatsblok definieerprofiel definieerprogramma definieerreferentie \
+definieerreferentieformaat definieerreferentielijst definieerregister definieersamengesteldelijst definieersectie \
+definieersectieblok definieersorteren definieerstartstop definieersubveld definieersymbool \
+definieersynoniemen definieertabelvorm definieertabulatie definieertekst definieertekstachtergrond \
+definieertekstpositie definieertekstvariabele definieertype definieertypen definieeruitvoer \
+definieerveld definieerveldstapel definieerversie definieerwiskundeuitlijnen description \
+determineheadnumber determinelistcharacteristics dimensie directnaarlijst directtussenlijst \
+disableinteractionmenu doordefinieren doorlabelen doornummeren dunnelijn \
+dunnelijnen eenregel emptylines enumeration ergens \
+externalfigure externfiguur field fieldstack fillinfield \
+fillinline fillinrules fillintext fitfield fixedspaces \
+followprofile followprofileversion followversion footnote footnotetext \
+forceblocks forceerblokken formulenummer fraction framed \
+framedtext from gebruikJSscripts gebruikURL gebruikXMLfilter \
+gebruikblokken gebruikcommandos gebruikexterndocument gebruikexternefile gebruikexternefiles \
+gebruikexternfiguur gebruikexterngeluidsfragment gebruikgebied gebruikmodule gebruikmodules \
+gebruikreferenties gebruikspecials gebruiksymbolen gebruiktypescript gebruiktypescriptfile \
+gebruikurl geenblokkenmeer geenbovenenonderregels geendimensie geenfilesmeer \
+geenhoofdenvoetregels geenlijst geenmarkering geenspatie geentest \
+geenwitruimte geg getbuffer getmarking godown \
+goto gotobox graycolor grid grijskleur \
+grijswaarde haalbuffer haalmarkering haalnummer haarlijn \
+hairline handhaafblokken head headnumber headtext \
+herhaal hideblocks high hl hoofdafstand \
+hoofdhoogte hoofdtaal hoog huidigedatum huidigekopnummer \
+in inanderemarge inbinnen inbuiten indentation \
+indenting inframed ininner inleft inlijnd \
+inline inlinker inlinkermarge inlinkerrand inmarge \
+inmargin inothermargin inouter inrechter inrechtermarge \
+inrechterrand inregel inright inspringen installeertaal \
+installlanguage interactiebalk interactiebuttons interactiemenu interactionbar \
+interactionbuttons invullijnen invulregel invultekst invulveld \
+inwilijnd item items its kantlijn \
+kap keepblocks kenmerk kenmerkdatum kentekstvariabeletoe \
+kleur kleurenbalk kleurwaarde kloonveld kolom \
+kop kopniveau kopnummer koppeldocument koppelmarkering \
+koppelpagina koppelpapier koppelregister kopsym koptekst \
+kopwit laag label labeling labels \
+labeltekst labeltext laho language leftaligned \
+leg legeregels letter letters lijndikte \
+lijstbreedte lijsthoogte lijstlengte lijstsymbool linkermargeafstand \
+linkermargebreedte linkerrandafstand linkerrandbreedte listsymbol loadsorts \
+loadsynonyms logfields lohi low maand \
+mainlanguage mapfontsize mar margeafstand margebreedte \
+margetekst margetitel margewoord marginrule margintext \
+markeer markeerversie marking markversion mathematics \
+mediaeval menubutton midaligned mirror month \
+moveongrid naam naar naarbox naarpagina \
+name navigerend nextsection nietinspringen nocap \
+noheaderandfooterlines noindenting nokap nolist nomarking \
+nomoreblocks nomorefiles noot nop nospace \
+note notopandbottomlines nowhitespace numbers omgeving \
+omlaag omlijnd onbekend onderafstand onderdeel \
+onderhoogte ontkoppelmarkering op opelkaar oplinkermarge \
+oppagina oprechtermarge overbar overbars overstrike \
+overstrikes packed page pagereference pagina \
+paginadiepte paginanummer paginaoffset paginareferentie papierbreedte \
+papierhoogte paragraph part paslayoutaan passeerblokken \
+passendveld periods plaatsbeeldmerken plaatsbookmarks plaatsformule \
+plaatskopnummer plaatskoptekst plaatslegenda plaatslijn plaatslijst \
+plaatslijstmetsynoniemen plaatslokalevoetnoten plaatsnaastelkaar plaatsonderelkaar plaatsopgrid \
+plaatspaginanummer plaatsplaatsblok plaatsreferentielijst plaatsregister plaatsruwelijst \
+plaatssamengesteldelijst plaatssubformule plaatstekstvariabele plaatsvoetnoten placebookmarks \
+placecombinedlist placefloat placefootnotes placeformula placelegend \
+placelist placelistoffloats placelistofsorts placelistofsynonyms placelocalfootnotes \
+placelogos placeongrid placeontopofeachother placereferencelist placeregister \
+placesidebyside placesubformula position positioneer positioneertekst \
+printpapierbreedte printpapierhoogte processblocks processpage produkt \
+program programma projekt publicatie publication \
+punten quotation quote ran randafstand \
+randbreedte rechtermargeafstand rechtermargebreedte rechterrandafstand rechterrandbreedte \
+ref refereer reference referentie referral \
+referraldate regellinks regelmidden regelrechts register \
+registreervelden reservefloat reset resetmarkering resetmarking \
+resetnummer resettekstinhoud resettextcontent rightaligned romannumerals \
+romeins rooster rotate roteer rugwit \
+scale schaal scherm schrijfnaarlijst schrijfnaarreferentielijst \
+schrijfnaarregister schrijftussenlijst screen section seeregister \
+selectblocks selecteerblokken selecteerpapier selecteerversie selectpaper \
+selectversion setnummer settextcontent setupalign setuparranging \
+setupbackground setupbackgrounds setupblackrules setupblank setupblock \
+setupbodyfont setupbodyfontenvironment setupbottom setupbottomtexts setupbuffer \
+setupbuttons setupcapitals setupcaption setupcaptions setupclipping \
+setupcolor setupcolors setupcolumns setupcombinations setupcombinedlist \
+setupcomment setupdescriptions setupenumerations setupexternalfigures setupfield \
+setupfields setupfillinlines setupfillinrules setupfloat setupfloats \
+setupfloatsplitting setupfonthandling setupfontsynonym setupfooter setupfootertexts \
+setupfootnotedefinition setupfootnotes setupforms setupformulas setupframed \
+setupframedtexts setuphead setupheader setupheadertexts setupheadnumber \
+setupheads setupheadtext setuphyphenmark setupindentedtext setupindenting \
+setupinmargin setupinteraction setupinteractionbar setupinteractionmenu setupinteractionscreen \
+setupinterlinespace setupinterlinespace2 setupitemgroup setupitems setuplabeltext \
+setuplanguage setuplayout setuplegend setuplinenumbering setuplines \
+setuplinewidth setuplist setuplistalternative setupmakeup setupmarginblocks \
+setupmarginrules setupmarking setupnarrower setupnumbering setupoppositeplacing \
+setupoutput setuppagenumber setuppagenumbering setuppagetransitions setuppalet \
+setuppaper setuppapersize setupparagraphnumbering setupparagraphs setuppositioning \
+setupprofiles setupprograms setuppublications setupquote setupreferencelist \
+setupreferencing setupregister setuprotate setupscreens setupsection \
+setupsectionblock setupsorting setupspacing setupstrut setupsubpagenumber \
+setupsymbolset setupsynchronization setupsynchronizationbar setupsynonyms setupsystem \
+setuptab setuptables setuptabulate setuptext setuptextrules \
+setuptexttexts setupthinrules setuptolerance setuptop setuptoptexts \
+setuptype setuptyping setupunderbar setupurl setupversions \
+setupwhitespace showbodyfont showbodyfontenvironment showcolor showcolorgroup \
+showexternalfigures showfields showframe showgrid showlayout \
+showmakeup showpalet showprint showsetups showstruts \
+showsymbolset snijwit som someline somewhere \
+sort space spatie spiegel splitfloat \
+splitsplaatsblok startachtergrond startalignment startbackground startbuffer \
+startcitaat startcodering startcolor startcolumns startcombinatie \
+startcombination startcomment startcomponent startdescription startdocument \
+startenumeration startenvironment startfact startfigure startfiguur \
+startfloattext startformula startframedtext startglobaal starthiding \
+startinteractionmenu startitemgroup startkantlijn startkleur startkolomgroep \
+startkolommen startkolomopmaak startlegend startline startlinecorrection \
+startlinenumbering startlines startlocal startlocalenvironment startlocalfootnotes \
+startlokaal startlokalevoetnoten startmakeup startmargeblok startmarginblock \
+startmarginrule startnaast startnamemakeup startnarrower startomgeving \
+startomlijnd startonderdeel startopelkaar startopmaak startopposite \
+startoverlay startoverview startoverzicht startpacked startparagraph \
+startpositioneren startpositioning startpostponing startproduct startprodukt \
+startprofiel startprofile startproject startprojekt startquotation \
+startraster startregel startregelcorrectie startregelnummeren startregels \
+startregister startsmaller startsymbolset startsymboolset startsynchronisatie \
+startsynchronization starttabel starttabellen starttable starttables \
+starttabulate starttekst starttekstachtergrond starttekstlijn starttextrule \
+starttyping startuitlijnen startunpacked startvanelkaar startversie \
+startversion stelachtergrondenin stelachtergrondin stelalineasin stelantwoordgebiedin \
+stelarrangerenin stelblankoin stelblokin stelblokjesin stelblokkopjein \
+stelblokkopjesin stelbovenin stelboventekstenin stelbufferin stelbuttonsin \
+stelciterenin stelclipin stelcombinatiesin stelcommentaarin steldoordefinierenin \
+steldoornummerenin steldunnelijnenin stelexternefigurenin stelformulesin stelformulierenin \
+stelhoofdin stelhoofdtekstenin stelingesprongentextin stelinmargein stelinspringenin \
+stelinteractiebalkin stelinteractiein stelinteractiemenuin stelinteractieschermin stelinterliniein \
+stelinvullijnenin stelinvulregelsin stelitemgroepin stelitemsin stelkadertekstenin \
+stelkantlijnin stelkapitalenin stelkleurenin stelkleurin stelkolomgroepin \
+stelkolomgroepregelsin stelkolomgroepstartin stelkolommenin stelkopin stelkopnummerin \
+stelkoppeltekenin stelkoppenin stelkoptekstin stelkorpsin stelkorpsomgevingin \
+stellabeltekstin stellayoutin stellegendain stellijndiktein stellijnin \
+stellijstin stelmargeblokkenin stelmarkeringin stelnaastplaatsenin stelnummerenin \
+stelnummerin stelomlijndin stelonderin stelonderstrepenin stelondertekstenin \
+stelopmaakin stelopsommingenin stelpaginacommentaarin stelpaginanummerin stelpaginanummeringin \
+stelpaginaovergangenin stelpaletin stelpapierformaatin stelpapierin stelparagraafnummerenin \
+stelplaatsblokin stelplaatsblokkenin stelplaatsbloksplitsenin stelplaatsin stelpositionerenin \
+stelprofielenin stelprogrammasin stelpublicatiesin stelrastersin stelreferentielijstin \
+stelrefererenin stelregelnummerenin stelregelsin stelregisterin stelroterenin \
+stelsamengesteldelijstin stelsectieblokin stelsectiein stelsmallerin stelsorterenin \
+stelspatieringin stelstartstopin stelstrutin stelsubpaginanummerin stelsymboolsetin \
+stelsynchronisatiebalkin stelsynchronisatiein stelsynoniemenin stelsysteemin steltaalin \
+steltabellenin steltabin steltabulatiein steltekstachtergrondin steltekstin \
+steltekstinhoudin steltekstlijnenin steltekstpositiein stelteksttekstenin steltekstvariabelein \
+steltolerantiein steltypein steltypenin steluitlijnenin steluitvoerin \
+stelurlin stelveldenin stelveldin stelversiesin stelvoetin \
+stelvoetnootdefinitiein stelvoetnotenin stelvoettekstenin stelwiskundeuitlijnenin stelwitruimtein \
+stopachtergrond stopalignment stopbackground stopbuffer stopcitaat \
+stopcodering stopcolor stopcolumns stopcombinatie stopcombination \
+stopcomment stopcomponent stopdescription stopdocument stopenumeration \
+stopenvironment stopfact stopfigure stopfloattext stopformula \
+stopframedtext stopglobaal stophiding stopinteractionmenu stopitemgroup \
+stopkantlijn stopkleur stopkolomgroep stopkolommen stopkolomopmaak \
+stoplegend stopline stoplinecorrection stoplinenumbering stoplines \
+stoplocal stoplocalenvironment stoplocalfootnotes stoplokaal stoplokalevoetnoten \
+stopmakeup stopmargeblok stopmarginblock stopmarginrule stopnaast \
+stopnamemakeup stopnarrower stopomgeving stopomlijnd stoponderdeel \
+stopopelkaar stopopmaak stopopposite stopoverlay stopoverview \
+stopoverzicht stoppacked stopparagraph stoppositioneren stoppositioning \
+stoppostponing stopproduct stopprodukt stopprofiel stopprofile \
+stopproject stopprojekt stopquotation stopraster stopregel \
+stopregelcorrectie stopregelnummeren stopregels stopsmaller stopsymbolset \
+stopsynchronisatie stopsynchronization stoptabel stoptabellen stoptable \
+stoptables stoptabulate stoptekst stoptekstachtergrond stoptekstlijn \
+stoptextrule stoptyping stopuitlijnen stopunpacked stopvanelkaar \
+stopversie stopversion stretched sub subformulenummer \
+subject subpaginanummer subsection subsubject subsubsection \
+subsubsubject suggestie switchnaarkorps switchtobodyfont switchtorawfont \
+sym symbol symbool symoffset synchronisatiebalk \
+synchroniseer synchronizationbar synchronize synonym taal \
+tab tekstbreedte teksthoogte tekstlijn tekstreferentie \
+tekstvariabele testkolom testpagina tex textreference \
+textrule thinrule thinrules title toelichting \
+tooltip toonexternefiguren toongrid tooninstellingen toonkader \
+toonkleur toonkleurgroep toonkorps toonkorpsomgeving toonlayout \
+toonopmaak toonpalet toonprint toonstruts toonsymboolset \
+toonvelden totaalaantalpaginas translate txt typ \
+type typebuffer typefile uit uitgerekt \
+underbar underbars useURL useblocks usecodering \
+usecommands usedirectory useencoding useexternaldocument useexternalfigure \
+useexternalfile useexternalfiles useexternalsoundtrack usemodule usereferences \
+usespecials usesymbols usetypescript usetypescriptfile vastespatie \
+vastespaties veld veldstapel verbergblokken vergelijkkleurgroep \
+vergelijkpalet verhoognummer verlaagnummer verplaatsformule verplaatsopgrid \
+verplaatszijblok versie version vertaal verwerkblokken \
+verwerkpagina vl voetafstand voethoogte voetnoot \
+voetnoottekst volgprofiel volgprofielversie volgversie volledigepaginanummer \
+volledigregister voluit weekdag weekday whitespace \
+wilijnd wiskunde witruimte woonplaats woordrechts \
+wordright writebetweenlist writetolist writetoreferencelist writetoregister \
+zetbreedte zethoogte
+
+keywordclass.context.pe=\
+CAP Cap Caps Character \
+Characters MONTH Numbers Romannumerals WEEKDAY \
+WORD WORDS Word Words about \
+adaptlayout appendix at atpage background \
+blackrule blackrules blank bookmark but \
+button bypassblocks cap chapter character \
+characters chem clip clonefield color \
+column comment comparecolorgroup comparepalet completecombinedlist \
+completelistoffloats completelistofsorts completelistofsynonyms completeregister convertnumber \
+copyfield correctwhitespace coupledocument coupledregister couplemarking \
+couplepage couplepaper coupleregister crlf currentdate \
+currentheadnumber date decouplemarking define defineblank \
+defineblock definebodyfont definebodyfontDEF definebodyfontREF definebodyfontenvironment \
+definebuffer definecolor definecolorgroup definecombinedlist defineconversion \
+definedescription definedfont defineenumeration definefield definefieldstack \
+definefiguresymbol definefloat definefont definefontfeature definefonthandling \
+definefontsynonym defineframed defineframedtext definehead defineindentedtext \
+defineinteractionmenu definelabel definelist definelogo definemakeup \
+definemarking defineoutput defineoverlay definepalet definepapersize \
+defineparagraphs defineprofile defineprogram definereference definereferenceformat \
+definereferencelist defineregister definesection definesectionblock definesorting \
+definestartstop definesubfield definesymbol definesynonyms definetabletemplate \
+definetabulate definetext definetype definetypeface definetyping \
+defineversion description determineheadnumber determinelistcharacteristics disableinteractionmenu \
+emptylines enumeration externalfigure field fieldstack \
+fillinfield fillinline fillinrules fillintext fitfield \
+fixedspaces followprofile followprofileversion followversion footnote \
+footnotetext forceblocks fraction framed framedtext \
+from getbuffer getmarking godown goto \
+gotobox graycolor grid hairline head \
+headnumber headtext hideblocks high hl \
+in indentation indenting inframed ininner \
+inleft inline inmargin inmframed inothermargin \
+inouter inright installlanguage interactionbar interactionbuttons \
+item items its keepblocks labeling \
+labels labeltext language leftaligned listsymbol \
+loadsorts loadsynonyms logfields lohi low \
+mainlanguage mapfontsize mar marginrule margintext \
+marking markversion mathematics mediaeval mframed \
+midaligned mirror month moveongrid name \
+nextsection nocap noheaderandfooterlines noindenting nolist \
+nomarking nomoreblocks nomorefiles nop nospace \
+note notopandbottomlines nowhitespace numbers overbar \
+overbars overstrike overstrikes packed page \
+pagereference paragraph part periods placebookmarks \
+placecombinedlist placefloat placefootnotes placeformula placelegend \
+placelist placelistoffloats placelistofsorts placelistofsynonyms placelocalfootnotes \
+placelogos placeongrid placeontopofeachother placereferencelist placeregister \
+placesidebyside placesubformula position processblocks processpage \
+program publication quotation quote ran \
+ref reference referral referraldate register \
+reservefloat reset resetmarking resettextcontent rightaligned \
+romannumerals rotate scale screen section \
+seeregister selectblocks selectpaper selectversion settextcontent \
+setupalign setupanswerarea setuparranging setupbackground setupbackgrounds \
+setupblackrules setupblank setupblock setupbodyfont setupbodyfontenvironment \
+setupbottom setupbottomtexts setupbuffer setupbuttons setupcapitals \
+setupcaption setupcaptions setupclipping setupcolor setupcolors \
+setupcolumns setupcombinations setupcombinedlist setupcomment setupdescriptions \
+setupenumerations setupexternalfigures setupfield setupfields setupfillinlines \
+setupfillinrules setupfloat setupfloats setupfloatsplitting setupfonthandling \
+setupfontsynonym setupfooter setupfootertexts setupfootnotedefinition setupfootnotes \
+setupforms setupformulas setupframed setupframedtexts setuphead \
+setupheader setupheadertexts setupheadnumber setupheads setupheadtext \
+setuphyphenmark setupindentedtext setupindenting setupinmargin setupinteraction \
+setupinteractionbar setupinteractionmenu setupinteractionscreen setupinterlinespace setupinterlinespace2 \
+setupitemgroup setupitems setuplabeltext setuplanguage setuplayout \
+setuplegend setuplinenumbering setuplines setuplinewidth setuplist \
+setuplistalternative setupmakeup setupmarginblocks setupmarginrules setupmarking \
+setupnarrower setupnumbering setupoppositeplacing setupoutput setuppagenumber \
+setuppagenumbering setuppagetransitions setuppalet setuppaper setuppapersize \
+setupparagraphnumbering setupparagraphs setuppositioning setupprofiles setupprograms \
+setuppublications setupquote setupreferencelist setupreferencing setupregister \
+setuprotate setupscreens setupsection setupsectionblock setupsorting \
+setupspacing setupstrut setupsubpagenumber setupsymbolset setupsynchronization \
+setupsynchronizationbar setupsynonyms setupsystem setuptab setuptables \
+setuptabulate setuptext setuptextrules setuptexttexts setupthinrules \
+setuptolerance setuptop setuptoptexts setuptype setuptyping \
+setupunderbar setupurl setupversions setupwhitespace showbodyfont \
+showbodyfontenvironment showcolor showcolorgroup showexternalfigures showfields \
+showframe showgrid showlayout showmakeup showpalet \
+showprint showsetups showstruts showsymbolset someline \
+somewhere sort space splitfloat startalignment \
+startbackground startbuffer startcolor startcolumns startcombination \
+startcomment startcomponent startdescription startdocument startenumeration \
+startenvironment startfact startfigure startfloattext startformula \
+startframedtext starthiding startinteractionmenu startitemgroup startlegend \
+startline startlinecorrection startlinenumbering startlines startlocal \
+startlocalenvironment startlocalfootnotes startmakeup startmarginblock startmarginrule \
+startnamemakeup startnarrower startopposite startoverlay startoverview \
+startpacked startparagraph startpositioning startpostponing startproduct \
+startprofile startproject startquotation startraster startregister \
+startsymbolset startsynchronization starttable starttables starttabulate \
+starttextrule starttyping startunpacked startversion stopalignment \
+stopbackground stopbuffer stopcolor stopcolumns stopcombination \
+stopcomment stopcomponent stopdescription stopdocument stopenumeration \
+stopenvironment stopfact stopfigure stopfloattext stopformula \
+stopframedtext stophiding stopinteractionmenu stopitemgroup stoplegend \
+stopline stoplinecorrection stoplinenumbering stoplines stoplocal \
+stoplocalenvironment stoplocalfootnotes stopmakeup stopmarginblock stopmarginrule \
+stopnamemakeup stopnarrower stopopposite stopoverlay stopoverview \
+stoppacked stopparagraph stoppositioning stoppostponing stopproduct \
+stopprofile stopproject stopquotation stopraster stopsymbolset \
+stopsynchronization stoptable stoptables stoptabulate stoptextrule \
+stoptyping stopunpacked stopversion stretched sub \
+subject subsection subsubject subsubsection subsubsubject \
+switchtobodyfont sym symbol synchronizationbar synchronize \
+synonym tab tex textreference textrule \
+thinrule thinrules title tooltip translate \
+txt typ type typebuffer typefile \
+underbar underbars useJSscripts useURL useXMLfilter \
+useblocks usecommands usedirectory useencoding useexternaldocument \
+useexternalfigure useexternalfile useexternalfiles useexternalsoundtrack usemodule \
+usereferences usespecials usesymbols usetypescript usetypescriptfile \
+useurl version vl weekday whitespace \
+wordright writebetweenlist writetolist writetoreferencelist writetoregister \
+آفست‌صفحه آیتم آیتمها آینه اجباربلوکها \
+ارتفاع‌آرایش ارتفاع‌بالا ارتفاع‌برگ ارتفاع‌ته‌برگ ارتفاع‌خط \
+ارتفاع‌سربرگ ارتفاع‌متن ارتفاع‌پایین از ازکارانداختن‌منوی‌پانل \
+استفاده‌بلوکها استفاده‌دستخط‌تایپ استفاده‌رمزینه استفاده‌شکل‌خارجی استفاده‌فرمانها \
+استفاده‌قطعه‌موزیک‌خارجی استفاده‌مدول استفاده‌مدولها استفاده‌مرجعها استفاده‌مسیر \
+استفاده‌نمادها استفاده‌نوشتارخارجی استفاده‌ویژگیها استفاده‌پرونده‌خارجی استفاده‌پرونده‌دستخط‌تایپ \
+استفاده‌پرونده‌های‌خارجی اعدادلاتین افزودن اما امتحان‌نکن \
+انتخاب‌برگ انتخاب‌بلوکها انتخاب‌نسخه انتقال‌به‌توری انتقال‌فرمول \
+انتقال‌کنار‌شناور انجام‌دوباره بارگذاری‌آرایش بارگذاری‌آیتمها بارگذاری‌ارجاع \
+بارگذاری‌اندازه‌برگ بارگذاری‌باریکتر بارگذاری‌بافر بارگذاری‌بالا بارگذاری‌بخش \
+بارگذاری‌بردباری بارگذاری‌برنامه‌ها بارگذاری‌برگ بارگذاری‌بست بارگذاری‌بلوک \
+بارگذاری‌بلوکهای‌حاشیه بارگذاری‌بلوک‌بخش بارگذاری‌تایپ بارگذاری‌تایپ‌کردن بارگذاری‌تب \
+بارگذاری‌ترتیب بارگذاری‌ترکیب‌ها بارگذاری‌تطابق بارگذاری‌تعریف‌پانوشت بارگذاری‌تنظیم \
+بارگذاری‌تنظیم‌ریاضی بارگذاری‌ته‌برگ بارگذاری‌تورفتگی بارگذاری‌توضیح بارگذاری‌توضیح‌صفحه \
+بارگذاری‌ثبت بارگذاری‌جانشانی بارگذاری‌جدولها بارگذاری‌جدول‌بندی بارگذاری‌خالی \
+بارگذاری‌خروجی بارگذاری‌خط بارگذاری‌خطها بارگذاری‌خطهای‌حاشیه بارگذاری‌خطهای‌سیاه \
+بارگذاری‌خطهای‌متن بارگذاری‌خطهای‌مجموعه‌ستون بارگذاری‌خطها‌ی‌نازک بارگذاری‌درج‌درخطها بارگذاری‌درج‌مخالف \
+بارگذاری‌درون‌حاشیه بارگذاری‌دوران بارگذاری‌دکمه‌ها بارگذاری‌راهنما بارگذاری‌رنگ \
+بارگذاری‌رنگها بارگذاری‌زبان بارگذاری‌ستونها بارگذاری‌سر بارگذاری‌سربرگ \
+بارگذاری‌سرها بارگذاری‌سیستم بارگذاری‌شرح بارگذاری‌شرحها بارگذاری‌شروع‌مجموعه‌ستون \
+بارگذاری‌شروع‌پایان بارگذاری‌شماره بارگذاری‌شماره‌زیرصفحه بارگذاری‌شماره‌سر بارگذاری‌شماره‌صفحه \
+بارگذاری‌شماره‌گذاری بارگذاری‌شماره‌گذاریها بارگذاری‌شماره‌گذاری‌صفحه بارگذاری‌شماره‌گذاری‌پاراگراف بارگذاری‌شماره‌‌گذاری‌خط \
+بارگذاری‌شناور بارگذاری‌شناورها بارگذاری‌شکافتن‌شناورها بارگذاری‌شکلهای‌خارجی بارگذاری‌طرح \
+بارگذاری‌طرح‌بندی بارگذاری‌عرض‌خط بارگذاری‌فاصله‌بین‌خط بارگذاری‌فرمولها بارگذاری‌فضای‌سفید \
+بارگذاری‌فضا‌گذاری بارگذاری‌قالبی بارگذاری‌قلم‌متن بارگذاری‌لوح بارگذاری‌لیست \
+بارگذاری‌لیست‌ترکیبی بارگذاری‌لیست‌مرجع بارگذاری‌مترادفها بارگذاری‌متغیر‌متن بارگذاری‌متن \
+بارگذاری‌متنهای‌بالا بارگذاری‌متن‌سر بارگذاری‌متن‌سربرگ بارگذاری‌متن‌قالبی بارگذاری‌متن‌متنها \
+بارگذاری‌متن‌پانوشت بارگذاری‌متن‌پایین بارگذاری‌مجموعه‌ستون بارگذاری‌مجموعه‌نماد بارگذاری‌محیط‌قلم‌متن \
+بارگذاری‌منوی‌پانل بارگذاری‌مکان‌متن بارگذاری‌مکان‌گذاری بارگذاری‌میدان بارگذاری‌میدانها \
+بارگذاری‌میله‌تطابق بارگذاری‌میله‌زیر بارگذاری‌میله‌پانل بارگذاری‌نسخه‌ها بارگذاری‌نشانه‌شکستن \
+بارگذاری‌نشانه‌گذاری بارگذاری‌نشرها بارگذاری‌نقل بارگذاری‌پاراگرافها بارگذاری‌پانل \
+بارگذاری‌پانوشتها بارگذاری‌پایین بارگذاری‌پرده‌ها بارگذاری‌پرده‌پانل بارگذاری‌پروفایلها \
+بارگذاری‌پرکردن‌خطها بارگذاری‌پس‌زمینه بارگذاری‌پس‌زمینه‌ها بارگذاری‌چیدن بارگذاری‌گذارصفحه \
+بارگذاری‌گروههای‌آیتم بارگذاری‌گروه‌آیتم بازنشانی بازنشانی‌شماره بازنشانی‌متن \
+بازنشانی‌نشانه‌گذاری باگذاری‌متن‌برچسب بدون‌بعد بدون‌بلوکهای‌بیشتر بدون‌تورفتگی \
+بدون‌خط‌بالاوپایین بدون‌خط‌سروته‌برگ بدون‌فایلهای‌بیشتر بدون‌فضا بدون‌فضای‌سفید \
+بدون‌لیست بدون‌نشانه‌گذاری برنامه بروبه بروبه‌جعبه \
+بروبه‌صفحه بروپایین برچسب برچسبها بعد \
+بلند بلوکهای‌پردازش بلوکها‌پنهان بنویس‌بین‌لیست بنویس‌درثبت \
+بنویس‌درلیست‌مرجع بنویس‌در‌لیست تاریخ تاریخ‌جاری تاریخ‌رجوع \
+تایپ تایپ‌بافر تایپ‌پرونده تب ترجمه \
+تطابق تعریف تعریف‌آرایش تعریف‌آرم تعریف‌الگوی‌جدول \
+تعریف‌اندازه‌برگ تعریف‌بافر تعریف‌بخش تعریف‌برنامه تعریف‌برچسب \
+تعریف‌بلوک تعریف‌بلوک‌بخش تعریف‌تایپ تعریف‌تایپ‌کردن تعریف‌تبدیل \
+تعریف‌ترتیب تعریف‌ترکیب تعریف‌تنظیم‌ریاضی تعریف‌توده‌میدان تعریف‌ثبت \
+تعریف‌جانشانی تعریف‌جدول‌بندی تعریف‌جعبه‌‌افقی تعریف‌حرف تعریف‌خالی \
+تعریف‌خروجی تعریف‌خط‌حائل تعریف‌درون‌حاشیه تعریف‌رنگ تعریف‌زیرمیدان \
+تعریف‌سبک تعریف‌سبک‌قلم تعریف‌سر تعریف‌شرح تعریف‌شروع‌پایان \
+تعریف‌شماره‌بندی تعریف‌شمایل‌مرجع تعریف‌شناور تعریف‌شکستن‌ستون تعریف‌شکست‌صفحه \
+تعریف‌طرح‌بندی تعریف‌فرمان تعریف‌قالبی تعریف‌قلم تعریف‌قلم‌خام \
+تعریف‌قلم‌متن تعریف‌لایه تعریف‌لهجه تعریف‌لوح تعریف‌لیست \
+تعریف‌لیست‌ترکیبی تعریف‌لیست‌مرجع تعریف‌مترادفها تعریف‌مترادف‌قلم تعریف‌متغیرمتن \
+تعریف‌متن تعریف‌متن‌قالبی تعریف‌مجموعه‌ستون تعریف‌محیط‌قلم‌بدنه تعریف‌مرجع \
+تعریف‌منوی‌پانل تعریف‌مکان‌متن تعریف‌میدان تعریف‌میدان‌اصلی تعریف‌نسخه \
+تعریف‌نشانه‌گذاری تعریف‌نماد تعریف‌نمادشکل تعریف‌پاراگرافها تعریف‌پروفایل \
+تعریف‌پوشش تعریف‌گروه‌آیتم تعریف‌گروه‌رنگ تعیین‌شماره تعیین‌شماره‌سر \
+تعیین‌متغیر‌متن تعیین‌محتوای‌متن تعیین‌مشخصات‌ثبت تعیین‌مشخصات‌لیست تغییربه‌قلم‌بدنه \
+تغییربه‌قلم‌خام تنظیم‌راست تنظیم‌طرح‌بندی تنظیم‌وسط توجه \
+تورفتگی توری تولید تک ثبت‌زوج \
+ثبت‌کامل جداسازی‌نشانه‌گذاری حاش حرف حرفها \
+حفظ‌بلوکها حقیقت خالی خطهای‌سیاه خطهای‌نازک \
+خطها‌خالی خط‌حاشیه خط‌سیاه خط‌متن خط‌مو \
+خط‌نازک خ‌ا خ‌ع در درج‌آرمها \
+درج‌ثبت درج‌خط درج‌درخط درج‌درخطها درج‌درمتن \
+درج‌درمیدان درج‌در‌بالای‌یکدیگر درج‌در‌توری درج‌راهنما درج‌زیرفرمول \
+درج‌شماره‌سر درج‌شماره‌صفحه درج‌شناور درج‌فرمول درج‌لیست \
+درج‌لیست‌خام درج‌لیست‌مختلط درج‌لیست‌مرجع درج‌متغیرمتن درج‌متن‌سر \
+درج‌پانوشتها درج‌پانوشتهای‌موضعی درج‌چوب‌خط درج‌کنار‌به‌کنار درحاشیه \
+درحاشیه‌دیگر درحاشیه‌راست درحاشیه‌چپ درخارجی درخط \
+درداخلی درراست درصفحه درقالبی درلبه‌راست \
+درلبه‌چپ درمورد درون درپر درچپ \
+دریافت‌بافر دریافت‌شماره دریافت‌نشانه دوران دکمه \
+دکمه‌منو دکمه‌پانل رج رجوع رنگ \
+رنگ‌خاکستری روزهفته ریاضی زبان زبان‌اصلی \
+ستون ستون‌امتحان سر سرپوش‌کوچک‌نه شروع‌آرایش \
+شروع‌آرایش‌ستون شروع‌باریکتر شروع‌بازبینی شروع‌بلوک‌حاشیه شروع‌ترکیب \
+شروع‌تصحیح‌خط شروع‌تطابق شروع‌تنظیم شروع‌تولید شروع‌جدول \
+شروع‌جدولها شروع‌خط شروع‌خطها شروع‌خط‌حاشیه شروع‌خط‌متن \
+شروع‌رنگ شروع‌ستونها شروع‌سراسری شروع‌شماره‌گذاری‌خط شروع‌شکل \
+شروع‌غیر‌فشرده شروع‌فشرده شروع‌متن شروع‌مجموعه‌ستون شروع‌مجموعه‌نماد \
+شروع‌محیط شروع‌مخالف شروع‌موضعی شروع‌مولفه شروع‌مکان‌گذاری \
+شروع‌نسخه شروع‌نقل‌قول شروع‌نوشتار شروع‌پانوشتهای‌موضعی شروع‌پروفایل \
+شروع‌پروژه شروع‌پس‌زمینه شروع‌پوشش شروع‌کد شماره‌افزایش \
+شماره‌زیرصفحه شماره‌زیرفرمول شماره‌سر شماره‌سرجاری شماره‌صفحه \
+شماره‌صفحه‌کامل شماره‌فرمول شماره‌مبدل شماره‌ها شماره‌کاهش \
+شماره‌کل‌صفحه‌ها شکافتن‌شناور شکل‌خارجی صفحه صفحه‌تست \
+صفحه‌زوج صفحه‌پردازش طول‌لیست عبوربلوکها عرض‌آرایش \
+عرض‌برگ عرض‌حاشیه عرض‌حاشیه‌خارجی عرض‌حاشیه‌داخلی عرض‌حاشیه‌راست \
+عرض‌حاشیه‌چپ عرض‌خط عرض‌لبه عرض‌لبه‌خارجی عرض‌لبه‌داخلی \
+عرض‌لبه‌راست عرض‌لبه‌چپ عرض‌لیست عرض‌متن عمق‌صفحه \
+عنوان‌حاشیه فاصله‌بالا فاصله‌ته‌برگ فاصله‌حاشیه فاصله‌حاشیه‌خارجی \
+فاصله‌حاشیه‌داخلی فاصله‌حاشیه‌راست فاصله‌حاشیه‌چپ فاصله‌سربرگ فاصله‌لبه \
+فاصله‌لبه‌خارجی فاصله‌لبه‌داخلی فاصله‌لبه‌راست فاصله‌لبه‌چپ فاصله‌پایین \
+فاصله‌پشت فشرده فضا فضاهای‌ثابت فضای‌بالا \
+فضای‌برش فضای‌ثابت فضای‌سفید فضای‌سفیدصحیح فضای‌پایین \
+فوری‌به‌لیست فوری‌بین‌لیست قالبی لوح‌مقایسه ماه \
+متغیر متن متن‌برچسب متن‌حاشیه متن‌سر متن‌پانوشت \
+محیط مراجعه مرجع مرجع‌صفحه مرجع‌متن \
+مرحله‌سر مسکن معنی‌واحد مقایسه‌گروه‌رنگ مقدارخاکستری \
+مقداررنگ مقیاس منفی منوی‌پانل مولفه \
+مکان مکان‌متن میدان میدانهای‌گزارش میدان‌شبیه‌سازی \
+میدان‌پشته میدان‌کپی میله‌تطابق میله‌رنگ میله‌پانل \
+ناشناس نام‌ماکرو نسخه نسخه‌نشانه نشانه‌گذاری \
+نشانه‌گذاری‌زوج نشر نصب‌زبان نقطه‌ها نقل \
+نقل‌قول نم نماد نمادسر نمادلیست \
+نمایش‌آرایش نمایش‌بارگذاریها نمایش‌بستها نمایش‌توری نمایش‌رنگ \
+نمایش‌شکلهای‌خارجی نمایش‌طرح‌بندی نمایش‌قالب نمایش‌قلم‌بدنه نمایش‌لوح \
+نمایش‌مجموعه‌علامت نمایش‌محیط‌قلم‌بدنه نمایش‌میدانها نمایش‌چاپ نمایش‌گروه‌رنگ \
+نوشتارزوج هدایت پا پابا پانوشت \
+پایان‌آرایش پایان‌آرایش‌ستون پایان‌بازبینی پایان‌بلوک‌حاشیه پایان‌ترکیب \
+پایان‌تصحیح‌خط پایان‌تطابق پایان‌تنظیم پایان‌تولید پایان‌جدول \
+پایان‌جدولها پایان‌خط پایان‌خطها پایان‌خط‌حاشیه پایان‌خط‌متن \
+پایان‌رنگ پایان‌ستونها پایان‌سراسری پایان‌شماره‌گذاری‌خط پایان‌غیرفشرده \
+پایان‌فشرده پایان‌متن پایان‌مجموعه‌ستون پایان‌محیط پایان‌مخالف \
+پایان‌موضعی پایان‌مولفه پایان‌مکان‌گذاری پایان‌نازکتر پایان‌نسخه \
+پایان‌نقل‌قول پایان‌نوشتار پایان‌پانوشتهای‌موضعی پایان‌پروفایل پایان‌پروژه \
+پایان‌پس‌زمینه پایان‌پوشش پایان‌کد پایین پرده \
+پروژه پرکردن‌میدان پس‌زمینه پیروی‌نسخه پیروی‌نسخه‌پروفایل \
+پیروی‌پروفایل چاپ‌ارتفاع‌برگ چاپ‌عرض‌برگ چوبخط چپ‌چین \
+کاغذزوج کسر کشیده کلمه‌حاشیه کلمه‌راست \
+گیره یادداشت یک‌جا یک‌خط
+
+keywordclass.context.ro=\
+CAP CUVANT CUVINTE Cap \
+Caps Character Characters Cuvant Cuvinte \
+KAP Kap Kaps LUNA Litera \
+Litere MONTH Numere Numereromane Romannumerals \
+WEEKDAY WORD WORDS Word Words \
+ZIDINSAPTAMANA about adapteazaaspect adaptlayout adubuffer \
+adumarcaje afiseazaaspect afiseazacampuri afiseazaculoare afiseazafiguriexterne \
+afiseazafonttext afiseazagrid afiseazagrupculoare afiseazamakeup afiseazamediufonttext \
+afiseazapaleta afiseazarama afiseazasetari afiseazasetsimboluri afiseazastruts \
+afiseazatiparire aliniat aliniatcentru aliniatdreapta aliniatstanga \
+appendix arg ascundeblocuri at atleftmargin \
+atpage atrightmargin background baraculoare barainteractiune \
+barasincronizare blackrule blackrules blanc blank \
+bookmark but butoaneinteractiune buton butonmeniu \
+button bypassblocks camp campumplere cap \
+chapter character characters chem citat \
+clip cloneazacamp clonefield coloana color \
+column comment comparagrupculoare comparapaleta comparecolorgroup \
+comparepalet completeazanumarpagina completecombinedlist completelistoffloats completelistofsorts \
+completelistofsynonyms completeregister componenta convertestenumar convertnumber \
+copiazacamp copyfield corecteazaspatiualb correctwhitespace coupledocument \
+coupledregister couplemarking couplepage couplepaper coupleregister \
+crlf culoare culoaregri cupleazadocument cupleazamarcaje \
+cupleazaregistru currentdate currentheadnumber cutspace cuvantdreapta \
+cuvantmarginal data datacurenta datareferit date \
+decouplemarking decrementnumber decupleazamarcaje define defineblank \
+defineblock definebodyfont definebodyfontDEF definebodyfontREF definebodyfontenvironment \
+definebuffer definecolor definecolorgroup definecolumnbreak definecolumnset \
+definecombination definecombinedlist defineconversion definedescription definedfont \
+defineenumeration definefield definefieldstack definefiguresymbol definefloat \
+definefont definefontfeature definefonthandling definefontsynonym defineframed \
+defineframedtext definehead defineindentedtext defineinmargin defineinteractionmenu \
+defineitemgroup definelabel definelayer definelayout definelist \
+definelogo definemakeup definemarking definemathalignment defineoutput \
+defineoverlay definepagebreak definepalet definepapersize defineparagraphs \
+defineplacement defineprofile defineprogram definereference definereferenceformat \
+definereferencelist defineregister definesection definesectionblock definesorting \
+definestartstop defineste definesteaccent definesteantet definesteblanc \
+definestebloc definesteblocsectiune definestebuffer definestecamp definestecampprincipal \
+definestecaracter definestecomanda definesteconversie definesteculoare definestedescriere \
+definestedimensiunehartie definesteenumerare definesteeticheta definestefloat definestefont \
+definestefontraw definestefonttext definesteformatreferinte definestegrupculori definestehbox \
+definesteinconjurare definestelista definestelistacombinata definestelistareferinte definestelogo \
+definestemakeup definestemarcaje definestemediulfonttext definestemeniuinteractiune definesteoutput \
+definesteoverlay definestepaleta definesteparagraf definestepozitietext definesteprofil \
+definesteprogram definestereferinte definesteregistru definesterigla definestesablontabel \
+definestesectiune definestesimbol definestesimbolfigura definestesinonim definestesinonimfont \
+definestesortare definestestartstop definestestil definestestilfont definestestivacampuri \
+definestesubcamp definestetabulatori definestetext definestetexteinconjurate definestetextinconjurat \
+definestetyping definestevariabilatext definesteversiune definesubfield definesymbol \
+definesynonyms definetabletemplate definetabulate definetext definetype \
+definetypeface definetyping defineversion description despre \
+determinacaracteristicilelistei determinacaracteristiciregistru determinanumartitlu determineheadnumber determinelistcharacteristics \
+dezactiveazameniuinteractiune dimensiune din disableinteractionmenu distantaantet \
+distantacolt distantacoltdreapta distantacoltstanga distantajos distantamargine \
+distantamarginedreapta distantamarginestanga distantasubsol distantasus domiciliu \
+dute dutebox dutepagina ecran el \
+element emptylines enumeration eticheta etichete \
+externalfigure fact faraaliniat faradimensiune farafisiere \
+faraliniiantetsisubsol faraliniisussijos faralista faramarcaje faraspatiu \
+faraspatiualb field fieldstack figuraexterna fillinfield \
+fillinline fillinrules fillintext firdepar fitfield \
+fixedspaces followprofile followprofileversion followversion folosesteURL \
+folosestebloc folosestecodificarea folosestecomenzi folosestedirector folosestedocumentextern \
+folosestefiguraexterna folosestefisiereexterne folosestefisierextern folosestemodul folosestemodule \
+folosestemuzicaexterna folosestereferinte folosestescriptJS folosestesimboluri folosestespeciale \
+folosesteurl footnote footnotetext forceblocks forteazablocuri \
+fractie fraction framed framedtext from \
+fundal gatablocuri getbuffer getmarking getnumber \
+godown goto gotobox graycolor grid \
+grosimelinie hairline hartiedubla head headnumber \
+headsym headtext hideblocks high hl \
+immediatebetweenlist immediatetolist impachetat impartefloat in \
+inalt inaltamargine inaltimeantet inaltimehartie inaltimehartieimprimanta \
+inaltimejos inaltimelista inaltimemakeup inaltimesubsol inaltimesus \
+inaltimetext indentation indenting indreapta inframed \
+ininner injos inleft inline inlinie \
+inmaframed inmargin inmargineadreapta inmargineastanga inneredgedistance \
+inneredgewidth innermargindistance innermarginwidth inothermargin inouter \
+inparteadreapta inparteastanga inright instalarelimba installlanguage \
+instanga interactionbar interactionbuttons intins item \
+items its jos jossus kap \
+keepblocks la labeling labels labeltext \
+language lapagina latimecoltdreapta latimecoltstanga latimecolturi \
+latimehartie latimehartieimprimanta latimelista latimemakeup latimemargine \
+latimemarginedreapta latimemarginestanga latimetext leftaligned leg \
+limba limbaprincipala liniemargine linieneagra liniesubtire \
+linieumplere liniinegre liniisubtiri listsymbol litera \
+litere loadsorts loadsynonyms logcampuri logfields \
+lohi low luna lungimelista maframed \
+mainlanguage mapfontsize mar marcaje marcheazaversiune \
+marginal marginrule margintext marking markversion \
+matematica mathematics mediaeval mediu meniuinteractiune \
+midaligned minicitat mirror month moveformula \
+moveongrid movesidefloat mutapegrid name navigating \
+necunoscut nextsection niveltitlu nocap noheaderandfooterlines \
+noindenting nokap nolist nomarking nomoreblocks \
+nomorefiles nop nospace nota notasubsol \
+note notopandbottomlines nowhitespace numarformula numarincrement \
+numarpagina numarsubformula numartitlu numartitlucurent numartotalpagini \
+numberofsubpages numbers nume numere numereromane \
+numeunitate nutesta olinie outeredgedistance outeredgewidth \
+outermargindistance outermarginwidth overbar overbars overstrike \
+overstrikes packed page pagedepth pageoffset \
+pagereference pagina paginadubla paragraph part \
+pastreazablocuri pelung periods placebookmarks placecombinedlist \
+placefloat placefootnotes placeformula placeheadnumber placeheadtext \
+placelegend placelist placelistoffloats placelistofsorts placelistofsynonyms \
+placelocalfootnotes placelogos placeongrid placeontopofeachother placerawlist \
+placereferencelist placeregister placesidebyside placesubformula plaseazapegrid \
+plaseazasemnecarte position potrivestecamp pozitie pozitietext \
+proceseazabloc proceseazapagina processblocks processpage produs \
+program proiect publicatie publication puncte \
+punedeasuprafiecareia punefatainfata puneformula punelegenda punelista \
+punelistacombinata punelogouri punenotesubsol punenotesubsollocale punenumarpagina \
+puneregistru punerigla punesubformula punevariabilatext quotation \
+quote ran ref refa reference \
+referinta referintapagina referintatext referit referral \
+referraldate referring reflexie register remarca \
+reservefloat reset reseteazamarcaje resetmarking resetnumber \
+resettextcontent rightaligned riglatext rigleumplere romannumerals \
+rotate roteste saripesteblocuri scala scale \
+screen scriebuffer scrieinlista scrieinlistareferinte scrieinregistru \
+scrieintreliste section seeregister selectblocks selecteazablocuri \
+selecteazahartie selecteazaversiune selectpaper selectversion semncarte \
+setarebarasincronizare setareitemization setarelimba setareoutput setarepozitie \
+setaresincronizare seteazaaliniat seteazaalinierea seteazaantet seteazaaranjareapag \
+seteazaaspect seteazabarainteractiune seteazablanc seteazabloc seteazablocsectiune \
+seteazablocurimarginale seteazabuffer seteazabutoane seteazacamp seteazacampuri \
+seteazaclipping seteazacoloane seteazacombinari seteazacomentariu seteazacomentariupagina \
+seteazaculoare seteazaculori seteazadefinireanotasubsol seteazadescriere seteazadimensiunihartie \
+seteazaecrane seteazaecraninteractiune seteazaelemente seteazaenumerare seteazafiguriexterne \
+seteazafloat seteazafloats seteazafonttext seteazaformulare seteazaformule \
+seteazafundal seteazafundaluri seteazagrosimelinie seteazaimpartireafloat seteazainconjurat \
+seteazaingust seteazainteractiunea seteazajos seteazalegenda seteazalegendele \
+seteazaliniesilabe seteazaliniesubtire seteazalinii seteazaliniimargine seteazaliniinegre \
+seteazaliniiumplere seteazalista seteazalistacombinata seteazalistareferinte seteazamajuscule \
+seteazamakeup seteazamarcaje seteazamarginal seteazamediulfonttext seteazameniuinteractiune \
+seteazaminicitat seteazanotasubsol seteazanumarpagina seteazanumarsubpagina seteazanumartitlu \
+seteazanumerotare seteazanumerotarelinii seteazanumerotarepagina seteazanumerotareparagrafe seteazapaleta \
+seteazaparagrafe seteazaplasareaopozita seteazapozitietext seteazaprofile seteazaprograme \
+seteazapublicatii seteazareferinte seteazaregistru seteazarigla seteazarigletext \
+seteazarigleumplere seteazarotare seteazasectiune seteazasimbol seteazasinonime \
+seteazasistem seteazasortare seteazaspatiu seteazaspatiualb seteazaspatiuinterliniar \
+seteazastrut seteazasublinie seteazasubsol seteazasus seteazatab \
+seteazatabele seteazatabulatori seteazatext seteazatexteantet seteazatextejos \
+seteazatextesubsol seteazatextesus seteazatextetext seteazatexteticheta seteazatexttitlu \
+seteazatitlu seteazatitluri seteazatoleranta seteazatranzitiepagina seteazatype \
+seteazatyping seteazaurl seteazavariabilatext seteazaversiuni setnumber \
+settextcontent setupalign setupanswerarea setuparranging setupbackground \
+setupbackgrounds setupblackrules setupblank setupblock setupbodyfont \
+setupbodyfontenvironment setupbottom setupbottomtexts setupbuffer setupbuttons \
+setupcapitals setupcaption setupcaptions setupclipping setupcolor \
+setupcolors setupcolumns setupcolumnset setupcolumnsetlines setupcolumnsetstart \
+setupcombinations setupcombinedlist setupcomment setupdescriptions setupenumerations \
+setupexternalfigures setupfield setupfields setupfillinlines setupfillinrules \
+setupfloat setupfloats setupfloatsplitting setupfonthandling setupfontsynonym \
+setupfooter setupfootertexts setupfootnotedefinition setupfootnotes setupforms \
+setupformulas setupframed setupframedtexts setuphead setupheader \
+setupheadertexts setupheadnumber setupheads setupheadtext setuphyphenmark \
+setupindentedtext setupindenting setupinmargin setupinteraction setupinteractionbar \
+setupinteractionmenu setupinteractionscreen setupinterlinespace setupinterlinespace2 setupitemgroup \
+setupitems setuplabeltext setuplanguage setuplayout setuplegend \
+setuplinenumbering setuplines setuplinewidth setuplist setuplistalternative \
+setupmakeup setupmarginblocks setupmarginrules setupmarking setupmathalignment \
+setupnarrower setupnumber setupnumbering setupoppositeplacing setupoutput \
+setuppagenumber setuppagenumbering setuppagetransitions setuppalet setuppaper \
+setuppapersize setupparagraphnumbering setupparagraphs setupplacement setuppositioning \
+setupprofiles setupprograms setuppublications setupquote setupreferencelist \
+setupreferencing setupregister setuprotate setupscreens setupsection \
+setupsectionblock setupsorting setupspacing setupstartstop setupstrut \
+setupsubpagenumber setupsymbolset setupsynchronization setupsynchronizationbar setupsynonyms \
+setupsystem setuptab setuptables setuptabulate setuptext \
+setuptextrules setuptexttexts setupthinrules setuptolerance setuptop \
+setuptoptexts setuptype setuptyping setupunderbar setupurl \
+setupversions setupwhitespace setvariabilatext showbodyfont showbodyfontenvironment \
+showcolor showcolorgroup showexternalfigures showfields showframe \
+showgrid showlayout showmakeup showpalet showprint \
+showsetups showstruts showsymbolset sim simbol \
+sincronizeaza someline somewhere sort space \
+spatiifixate spatiu spatiualb spatiufixat spatiujos \
+spatiuspate spatiusus splitfloat startalignment startaliniere \
+startbackground startblocmarginal startbuffer startcitat startcodificare \
+startcoloane startcolor startcolumnmakeup startcolumns startcolumnset \
+startcombinare startcombination startcomment startcomponent startcomponenta \
+startcorectielinie startculoare startdescription startdocument startenumeration \
+startenvironment startfact startfigura startfigure startfloattext \
+startformula startframedtext startfundal startglobal starthiding \
+startimpachetat startingust startinteractionmenu startitemgroup startlegend \
+startline startlinecorrection startlinenumbering startlines startlinie \
+startliniemargine startlinii startlocal startlocalenvironment startlocalfootnotes \
+startmakeup startmarginblock startmarginrule startmediu startnamemakeup \
+startnarrower startneimpachetat startnotesubsollocale startnumerotarelinii startopozit \
+startopposite startoverlay startoverview startpacked startparagraph \
+startpositioning startpostponing startpozitionare startproduct startprodus \
+startprofil startprofile startproiect startproject startquotation \
+startraster startregister startriglatext startsetsimboluri startsincronizare \
+startsymbolset startsynchronization starttabel starttabele starttable \
+starttables starttabulate starttext starttextrule starttyping \
+startunpacked startversion startversiune stivacampuri stopalignment \
+stopaliniere stopbackground stopblobal stopblocmarginal stopbuffer \
+stopcitat stopcodificare stopcoloane stopcolor stopcolumnmakeup \
+stopcolumns stopcolumnset stopcombinare stopcombination stopcomment \
+stopcomponent stopcomponenta stopcorectielinie stopculoare stopdescription \
+stopdocument stopenumeration stopenvironment stopfact stopfigure \
+stopfloattext stopformula stopframedtext stopfundal stophiding \
+stopimpachetat stopingust stopinteractionmenu stopitemgroup stoplegend \
+stopline stoplinecorrection stoplinenumbering stoplines stoplinie \
+stopliniemargine stoplinii stoplocal stoplocalenvironment stoplocalfootnotes \
+stopmakeup stopmarginblock stopmarginrule stopmediu stopnamemakeup \
+stopnarrower stopneimpachetat stopnotesubsollocale stopnumerotarelinii stopopozit \
+stopopposite stopoverlay stopoverview stoppacked stopparagraph \
+stoppositioning stoppostponing stoppozitionare stopproduct stopprodus \
+stopprofil stopprofile stopproiect stopproject stopquotation \
+stopraster stopriglatext stopsincronizare stopsymbolset stopsynchronization \
+stoptabel stoptabele stoptable stoptables stoptabulate \
+stoptext stoptextrule stoptyping stopunpacked stopversion \
+stopversiune stretched sub subject subpagenumber \
+subsection subsubject subsubsection subsubsubject switchtobodyfont \
+sym symbol synchronizationbar synchronize synonym \
+tab testcolumn testpage tex texteticheta \
+textmarginal textreference textrule texttitlu textumplere \
+thinrule thinrules title titlu titlumarginal \
+tooltip traduce translate trecilafontraw trecilafonttext \
+txt typ type typebuffer typefile \
+underbar underbars undeva urmeazaprofil urmeazaversiune \
+urmeazaversiuneprofil useURL useXMLfilter useblocks usecommands \
+usedirectory useencoding useexternaldocument useexternalfigure useexternalfile \
+useexternalfiles useexternalsoundtrack usemodule usereferences usespecials \
+usesymbols usetypescript usetypescriptfile valoareculoare valoaregri \
+variabilatext version versiune vl weekday \
+whitespace wordright writebetweenlist writetolist writetoreferencelist \
+writetoregister zidinsaptamana
+
diff --git a/context/data/scite/context/scite-context-data-metafun.properties b/context/data/scite/context/scite-context-data-metafun.properties
new file mode 100644
index 000000000..977ca9dfd
--- /dev/null
+++ b/context/data/scite/context/scite-context-data-metafun.properties
@@ -0,0 +1,59 @@
+keywordclass.metafun.commands=\
+transparency sqr log ln \
+exp inv pow pi radian \
+tand cotd sin cos tan \
+cot atan asin acos invsin \
+invcos invtan acosh asinh sinh \
+cosh zmod paired tripled unitcircle \
+fulldiamond unitdiamond fullsquare llcircle lrcircle \
+urcircle ulcircle tcircle bcircle lcircle \
+rcircle lltriangle lrtriangle urtriangle ultriangle \
+uptriangle downtriangle lefttriangle righttriangle triangle \
+smoothed cornered superellipsed randomized squeezed \
+enlonged shortened punked curved unspiked \
+simplified blownup stretched enlarged leftenlarged \
+topenlarged rightenlarged bottomenlarged crossed laddered \
+randomshifted interpolated paralleled cutends peepholed \
+llenlarged lrenlarged urenlarged ulenlarged llmoved \
+lrmoved urmoved ulmoved rightarrow leftarrow \
+centerarrow boundingbox innerboundingbox outerboundingbox pushboundingbox \
+popboundingbox bottomboundary leftboundary topboundary rightboundary \
+xsized ysized xysized sized xyscaled \
+intersection_point intersection_found penpoint bbwidth bbheight \
+withshade withcircularshade withlinearshade defineshade shaded \
+shadedinto withshadecolors withshadedomain withshademethod withshadefactor \
+withshadevector withshadecenter cmyk spotcolor multitonecolor \
+namedcolor drawfill undrawfill inverted uncolored \
+softened grayed greyed onlayer along \
+graphictext loadfigure externalfigure figure register \
+withmask bitmapimage colordecimals ddecimal dddecimal \
+ddddecimal textext thetextext rawtextext textextoffset \
+verbatim thelabel label autoalign transparent \
+withtransparency property properties withproperties asgroup \
+infont space crlf dquote percent \
+SPACE CRLF DQUOTE PERCENT grayscale \
+greyscale withgray withgrey colorpart readfile \
+clearxy unitvector center epsed anchored \
+originpath infinite break xstretched ystretched \
+snapped pathconnectors function constructedfunction constructedpath \
+constructedpairs straightfunction straightpath straightpairs curvedfunction \
+curvedpath curvedpairs evenly oddly condition \
+pushcurrentpicture popcurrentpicture arrowpath tensecircle roundedsquare \
+colortype whitecolor blackcolor basiccolors normalfill \
+normaldraw visualizepaths naturalizepaths drawboundary drawwholepath \
+visualizeddraw visualizedfill draworigin drawboundingbox drawpath \
+drawpoint drawpoints drawcontrolpoints drawcontrollines drawpointlabels \
+drawlineoptions drawpointoptions drawcontroloptions drawlabeloptions draworiginoptions \
+drawboundoptions drawpathoptions resetdrawoptions undashed decorated \
+redecorated undecorated passvariable passarrayvariable tostring \
+format formatted startpassingvariable stoppassingvariable eofill \
+eoclip area
+
+keywordclass.metafun.internals=\
+nocolormodel greycolormodel graycolormodel rgbcolormodel \
+cmykcolormodel shadefactor textextoffset normaltransparent multiplytransparent \
+screentransparent overlaytransparent softlighttransparent hardlighttransparent colordodgetransparent \
+colorburntransparent darkentransparent lightentransparent differencetransparent exclusiontransparent \
+huetransparent saturationtransparent colortransparent luminositytransparent metapostversion \
+maxdimensions
+
diff --git a/context/data/scite/context/scite-context-data-metapost.properties b/context/data/scite/context/scite-context-data-metapost.properties
new file mode 100644
index 000000000..59014623a
--- /dev/null
+++ b/context/data/scite/context/scite-context-data-metapost.properties
@@ -0,0 +1,127 @@
+keywordclass.metapost.commands=\
+upto downto beginfig endfig \
+beginglyph endglyph rotatedaround reflectedabout arrowhead \
+currentpen currentpicture cuttings defaultfont extra_beginfig \
+extra_endfig ditto EOF down evenly \
+fullcircle halfcircle identity in left \
+pensquare penrazor penspec origin quartercircle \
+right unitsquare up withdots abs \
+bbox ceiling center cutafter cutbefore \
+dir directionpoint div dotprod intersectionpoint \
+inverse mod round unitvector whatever \
+cutdraw draw drawarrow drawdblarrow fill \
+filldraw drawdot loggingall interact tracingall \
+tracingnone pickup undraw unfill unfilldraw \
+buildcycle dashpattern decr dotlabel dotlabels \
+drawoptions incr label labels max \
+min thelabel z beginchar blacker \
+capsule_end change_width define_blacker_pixels define_corrected_pixels define_good_x_pixels \
+define_good_y_pixels define_horizontal_corrected_pixels define_pixels define_whole_blacker_pixels define_whole_pixels \
+define_whole_vertical_blacker_pixels define_whole_vertical_pixels endchar extra_beginchar extra_endchar \
+extra_setup font_coding_scheme clearxy clearit clearpen \
+shipit font_extra_space exitunless relax hide \
+gobble gobbled stop blankpicture counterclockwise \
+tensepath takepower direction softjoin makelabel \
+rotatedabout flex superellipse image nullpen \
+savepen clearpen penpos penlabels range \
+thru z laboff bye red \
+green blue cyan magenta yellow \
+black white background mm pt \
+dd bp cm pc cc \
+in triplet quadruplet
+
+keywordclass.metapost.disabled=\
+verbatimtex troffmode
+
+keywordclass.metapost.internals=\
+mitered rounded beveled butt \
+squared eps epsilon infinity bboxmargin \
+ahlength ahangle labeloffset dotlabeldiam defaultpen \
+defaultscale join_radius charscale pen_lft pen_rt \
+pen_top pen_bot
+
+keywordclass.metapost.metafont=\
+autorounding beginchar blacker boundarychar \
+capsule_def capsule_end change_width chardp chardx \
+chardy charexists charext charht charic \
+charlist charwd cull cullit currenttransform \
+currentwindow define_blacker_pixels define_corrected_pixels define_good_x_pixels define_good_y_pixels \
+define_horizontal_corrected_pixels define_pixels define_whole_blacker_pixels define_whole_pixels define_whole_vertical_blacker_pixels \
+define_whole_vertical_pixels designsize display displaying endchar \
+extensible extra_beginchar extra_endchar extra_setup fillin \
+font_coding_scheme font_extra_space font_identifier font_normal_shrink font_normal_space \
+font_normal_stretch font_quad font_size font_slant font_x_height \
+fontdimen fontmaking gfcorners granularity grayfont \
+headerbyte hppp hround imagerules italcorr \
+kern labelfont ligtable lowres_fix makebox \
+makegrid maketicks mode_def mode_setup nodisplays \
+notransforms numspecial o_correction openit openwindow \
+pixels_per_inch proofing proofoffset proofrule proofrulethickness \
+rulepen screenchars screenrule screenstrokes screen_cols \
+screen_rows showit slantfont smode smoothing \
+titlefont totalweight tracingedges tracingpens turningcheck \
+unitpixel vppp vround xoffset yoffset
+
+keywordclass.metapost.primitives=\
+charcode day linecap linejoin \
+miterlimit month pausing prologues showstopping \
+time tracingcapsules tracingchoices mpprocset tracingcommands \
+tracingequations tracinglostchars tracingmacros tracingonline tracingoutput \
+tracingrestores tracingspecs tracingstats tracingtitles truecorners \
+warningcheck year false nullpicture pencircle \
+true and angle arclength arctime \
+ASCII boolean bot char color \
+cosd cycle decimal directiontime floor \
+fontsize hex infont intersectiontimes known \
+length llcorner lrcorner makepath makepen \
+mexp mlog normaldeviate not numeric \
+oct odd or path pair \
+pen penoffset picture point postcontrol \
+precontrol reverse rotated scaled shifted \
+sind slanted sqrt str string \
+subpath substring transform transformed ulcorner \
+uniformdeviate unknown urcorner xpart xscaled \
+xxpart xypart ypart yscaled yxpart \
+yypart zscaled addto clip input \
+interim let newinternal save setbounds \
+shipout show showdependencies showtoken showvariable \
+special begingroup endgroup of curl \
+tension and controls interpath on \
+off def vardef enddef expr \
+suffix text primary secondary tertiary \
+primarydef secondarydef tertiarydef randomseed also \
+contour doublepath withcolor withcmykcolor withpen \
+dashed if else elseif fi \
+for endfor forever exitif within \
+forsuffixes step until charlist extensible \
+fontdimen headerbyte kern ligtable boundarychar \
+chardp charext charht charic charwd \
+designsize fontmaking charexists cullit currenttransform \
+gfcorners grayfont hround imagerules lowres_fix \
+nodisplays notransforms openit displaying currentwindow \
+screen_rows screen_cols pixels_per_inch cull display \
+openwindow numspecial totalweight autorounding fillin \
+proofing tracingpens xoffset chardx granularity \
+smoothing turningcheck yoffset chardy hppp \
+tracingedges vppp extra_beginfig extra_endfig mpxbreak \
+endinput message delimiters turningnumber errmessage \
+readstring scantokens end outer inner \
+write to readfrom withprescript withpostscript \
+top bot lft rt ulft \
+urt llft lrt redpart greenpart \
+bluepart cyanpart magentapart yellowpart blackpart \
+prescriptpart postscriptpart rgbcolor cmykcolor colormodel \
+graypart greypart greycolor graycolor dashpart \
+penpart stroked filled textual clipped \
+bounded pathpart expandafter minute hour \
+outputformat outputtemplate filenametemplate fontmapfile fontmapline \
+fontpart fontsize glyph restoreclipcolor troffmode \
+runscript maketext
+
+keywordclass.metapost.shortcuts=\
+.. ... -- --- \
+& \
+
+keywordclass.metapost.tex=\
+btex etex verbatimtex
+
diff --git a/context/data/scite/scite-context-data-tex.properties b/context/data/scite/context/scite-context-data-tex.properties
index 195125433..95933ad53 100644
--- a/context/data/scite/scite-context-data-tex.properties
+++ b/context/data/scite/context/scite-context-data-tex.properties
@@ -49,11 +49,12 @@ Usubscript Usuperscript Uunderdelimiter alignmark aligntab \
attribute attributedef catcodetable clearmarks crampeddisplaystyle \
crampedscriptscriptstyle crampedscriptstyle crampedtextstyle fontid formatname \
gleaders ifabsdim ifabsnum ifprimitive initcatcodetable \
-latelua luaescapestring luastartup luatexdatestamp luatexrevision \
-luatexversion mathstyle nokerns noligs outputbox \
-pageleftoffset pagetopoffset postexhyphenchar posthyphenchar preexhyphenchar \
-prehyphenchar primitive savecatcodetable scantextokens suppressfontnotfounderror \
-suppressifcsnameerror suppresslongerror suppressoutererror synctex
+latelua luaescapestring luastartup luatexbanner luatexrevision \
+luatexversion luafunction mathstyle nokerns noligs \
+outputbox pageleftoffset pagetopoffset postexhyphenchar posthyphenchar \
+preexhyphenchar prehyphenchar primitive savecatcodetable scantextokens \
+suppressfontnotfounderror suppressifcsnameerror suppresslongerror suppressoutererror suppressmathparerror \
+synctex
keywordclass.tex.omega=\
OmegaVersion bodydir chardp charht \
@@ -124,114 +125,113 @@ attribute attributedef badness baselineskip batchmode \
begingroup belowdisplayshortskip belowdisplayskip binoppenalty bodydir \
botmark botmarks box boxdir boxmaxdepth \
brokenpenalty catcode catcodetable char chardef \
-chardp charht charit charwd cleaders \
-clearmarks closein closeout clubpenalties clubpenalty \
-copy count countdef cr crampeddisplaystyle \
-crampedscriptscriptstyle crampedscriptstyle crampedtextstyle crcr csname \
-currentgrouplevel currentgrouptype currentifbranch currentiflevel currentiftype \
-day deadcycles def defaulthyphenchar defaultskewchar \
-delcode delimiter delimiterfactor delimitershortfall detokenize \
-dimen dimendef dimexpr directlua discretionary \
-displayindent displaylimits displaystyle displaywidowpenalties displaywidowpenalty \
-displaywidth divide doublehyphendemerits dp dump \
-eTeXVersion eTeXminorversion eTeXrevision eTeXversion edef \
-efcode else emergencystretch end endcsname \
-endgroup endinput endlinechar eqno errhelp \
-errmessage errorcontextlines errorstopmode escapechar everycr \
-everydisplay everyeof everyhbox everyjob everymath \
-everypar everyvbox exhyphenchar exhyphenpenalty expandafter \
-expanded fam fi finalhyphendemerits firstmark \
-firstmarks floatingpenalty font fontchardp fontcharht \
-fontcharic fontcharwd fontdimen fontid fontname \
-formatname futurelet gdef gleaders global \
-globaldefs glueexpr glueshrink glueshrinkorder gluestretch \
-gluestretchorder gluetomu halign hangafter hangindent \
-hbadness hbox hfil hfill hfilneg \
-hfuzz hoffset holdinginserts hrule hsize \
-hskip hss ht hyphenation hyphenchar \
-hyphenpenalty if ifabsdim ifabsnum ifcase \
-ifcat ifcsname ifdefined ifdim ifeof \
-iffalse iffontchar ifhbox ifhmode ifincsname \
-ifinner ifmmode ifnum ifodd ifpdfabsdim \
-ifpdfabsnum ifpdfprimitive ifprimitive iftrue ifvbox \
-ifvmode ifvoid ifx ignorespaces immediate \
-indent initcatcodetable input inputlineno insert \
-insertpenalties interactionmode interlinepenalties interlinepenalty jobname \
-kern language lastbox lastkern lastlinefit \
-lastnodetype lastpenalty lastskip latelua lccode \
-leaders left leftghost lefthyphenmin leftmarginkern \
-leftskip leqno let letterspacefont limits \
-linepenalty lineskip lineskiplimit localbrokenpenalty localinterlinepenalty \
-localleftbox localrightbox long looseness lower \
-lowercase lpcode luaescapestring luastartup luatexdatestamp \
-luatexrevision luatexversion mag mark marks \
-mathaccent mathbin mathchar mathchardef mathchoice \
-mathclose mathcode mathdir mathinner mathop \
-mathopen mathord mathpunct mathrel mathstyle \
-mathsurround maxdeadcycles maxdepth meaning medmuskip \
-message middle mkern month moveleft \
-moveright mskip muexpr multiply muskip \
-muskipdef mutoglue newlinechar noalign noboundary \
-noexpand noindent nokerns noligs nolimits \
-nolocaldirs nolocalwhatsits nonscript nonstopmode nulldelimiterspace \
-nullfont number numexpr odelcode odelimiter \
-omathaccent omathchar omathchardef omathcode omit \
-openin openout or oradical outer \
-output outputbox outputpenalty over overfullrule \
-overline overwithdelims pagebottomoffset pagedepth pagedir \
-pagediscards pagefilllstretch pagefillstretch pagefilstretch pagegoal \
-pageheight pageleftoffset pagerightoffset pageshrink pagestretch \
-pagetopoffset pagetotal pagewidth par pardir \
-parfillskip parindent parshape parshapedimen parshapeindent \
-parshapelength parskip patterns pausing pdfadjustspacing \
-pdfannot pdfcatalog pdfcolorstack pdfcolorstackinit pdfcompresslevel \
-pdfcopyfont pdfcreationdate pdfdecimaldigits pdfdest pdfdestmargin \
-pdfdraftmode pdfeachlinedepth pdfeachlineheight pdfendlink pdfendthread \
-pdffirstlineheight pdffontattr pdffontexpand pdffontname pdffontobjnum \
-pdffontsize pdfgamma pdfgentounicode pdfglyphtounicode pdfhorigin \
-pdfignoreddimen pdfimageapplygamma pdfimagegamma pdfimagehicolor pdfimageresolution \
-pdfincludechars pdfinclusioncopyfonts pdfinclusionerrorlevel pdfinfo pdfinsertht \
-pdflastannot pdflastlinedepth pdflastlink pdflastobj pdflastxform \
-pdflastximage pdflastximagecolordepth pdflastximagepages pdflastxpos pdflastypos \
-pdflinkmargin pdfliteral pdfmapfile pdfmapline pdfminorversion \
-pdfnames pdfnoligatures pdfnormaldeviate pdfobj pdfobjcompresslevel \
-pdfoptionpdfminorversion pdfoutline pdfoutput pdfpageattr pdfpagebox \
-pdfpageheight pdfpageref pdfpageresources pdfpagesattr pdfpagewidth \
-pdfpkmode pdfpkresolution pdfprimitive pdfprotrudechars pdfpxdimen \
-pdfrandomseed pdfrefobj pdfrefxform pdfrefximage pdfreplacefont \
-pdfrestore pdfretval pdfsave pdfsavepos pdfsetmatrix \
-pdfsetrandomseed pdfstartlink pdfstartthread pdftexbanner pdftexrevision \
-pdftexversion pdfthread pdfthreadmargin pdftracingfonts pdftrailer \
-pdfuniformdeviate pdfuniqueresname pdfvorigin pdfxform pdfxformattr \
-pdfxformname pdfxformresources pdfximage pdfximagebbox penalty \
-postdisplaypenalty postexhyphenchar posthyphenchar predisplaydirection predisplaypenalty \
-predisplaysize preexhyphenchar prehyphenchar pretolerance prevdepth \
-prevgraf primitive protected quitvmode radical \
-raise read readline relax relpenalty \
-right rightghost righthyphenmin rightmarginkern rightskip \
-romannumeral rpcode savecatcodetable savinghyphcodes savingvdiscards \
-scantextokens scantokens scriptfont scriptscriptfont scriptscriptstyle \
-scriptspace scriptstyle scrollmode setbox setlanguage \
-sfcode shipout show showbox showboxbreadth \
-showboxdepth showgroups showifs showlists showthe \
-showtokens skewchar skip skipdef spacefactor \
-spaceskip span special splitbotmark splitbotmarks \
-splitdiscards splitfirstmark splitfirstmarks splitmaxdepth splittopskip \
-string suppressfontnotfounderror suppressifcsnameerror suppresslongerror suppressoutererror \
-synctex tabskip tagcode textdir textfont \
-textstyle the thickmuskip thinmuskip time \
-toks toksdef tolerance topmark topmarks \
-topskip tracingassigns tracingcommands tracinggroups tracingifs \
-tracinglostchars tracingmacros tracingnesting tracingonline tracingoutput \
-tracingpages tracingparagraphs tracingrestores tracingscantokens tracingstats \
-uccode uchyph underline unexpanded unhbox \
-unhcopy unkern unless unpenalty unskip \
-unvbox unvcopy uppercase vadjust valign \
-vbadness vbox vcenter vfil vfill \
-vfilneg vfuzz voffset vrule vsize \
-vskip vsplit vss vtop wd \
-widowpenalties widowpenalty write xdef xleaders \
-xspaceskip year
+cleaders clearmarks closein closeout clubpenalties \
+clubpenalty copy count countdef cr \
+crampeddisplaystyle crampedscriptscriptstyle crampedscriptstyle crampedtextstyle crcr \
+csname currentgrouplevel currentgrouptype currentifbranch currentiflevel \
+currentiftype day deadcycles def defaulthyphenchar \
+defaultskewchar delcode delimiter delimiterfactor delimitershortfall \
+detokenize dimen dimendef dimexpr directlua \
+discretionary displayindent displaylimits displaystyle displaywidowpenalties \
+displaywidowpenalty displaywidth divide doublehyphendemerits dp \
+dump eTeXVersion eTeXminorversion eTeXrevision eTeXversion \
+edef efcode else emergencystretch end \
+endcsname endgroup endinput endlinechar eqno \
+errhelp errmessage errorcontextlines errorstopmode escapechar \
+everycr everydisplay everyeof everyhbox everyjob \
+everymath everypar everyvbox exhyphenchar exhyphenpenalty \
+expandafter expanded fam fi finalhyphendemerits \
+firstmark firstmarks floatingpenalty font fontchardp \
+fontcharht fontcharic fontcharwd fontdimen fontid \
+fontname formatname futurelet gdef gleaders \
+global globaldefs glueexpr glueshrink glueshrinkorder \
+gluestretch gluestretchorder gluetomu halign hangafter \
+hangindent hbadness hbox hfil hfill \
+hfilneg hfuzz hoffset holdinginserts hrule \
+hsize hskip hss ht hyphenation \
+hyphenchar hyphenpenalty if ifabsdim ifabsnum \
+ifcase ifcat ifcsname ifdefined ifdim \
+ifeof iffalse iffontchar ifhbox ifhmode \
+ifincsname ifinner ifmmode ifnum ifodd \
+ifpdfabsdim ifpdfabsnum ifpdfprimitive ifprimitive iftrue \
+ifvbox ifvmode ifvoid ifx ignorespaces \
+immediate indent initcatcodetable input inputlineno \
+insert insertpenalties interactionmode interlinepenalties interlinepenalty \
+jobname kern language lastbox lastkern \
+lastlinefit lastnodetype lastpenalty lastskip latelua \
+lccode leaders left leftghost lefthyphenmin \
+leftmarginkern leftskip leqno let letterspacefont \
+limits linepenalty lineskip lineskiplimit localbrokenpenalty \
+localinterlinepenalty localleftbox localrightbox long looseness \
+lower lowercase lpcode luaescapestring luastartup \
+luatexbanner luatexrevision luatexversion mag mark \
+marks mathaccent mathbin mathchar mathchardef \
+mathchoice mathclose mathcode mathdir mathinner \
+mathop mathopen mathord mathpunct mathrel \
+mathstyle mathsurround maxdeadcycles maxdepth meaning \
+medmuskip message middle mkern month \
+moveleft moveright mskip muexpr multiply \
+muskip muskipdef mutoglue newlinechar noalign \
+noboundary noexpand noindent nokerns noligs \
+nolimits nolocaldirs nolocalwhatsits nonscript nonstopmode \
+nulldelimiterspace nullfont number numexpr odelcode \
+odelimiter omathaccent omathchar omathchardef omathcode \
+omit openin openout or oradical \
+outer output outputbox outputpenalty over \
+overfullrule overline overwithdelims pagebottomoffset pagedepth \
+pagedir pagediscards pagefilllstretch pagefillstretch pagefilstretch \
+pagegoal pageheight pageleftoffset pagerightoffset pageshrink \
+pagestretch pagetopoffset pagetotal pagewidth par \
+pardir parfillskip parindent parshape parshapedimen \
+parshapeindent parshapelength parskip patterns pausing \
+pdfadjustspacing pdfannot pdfcatalog pdfcolorstack pdfcolorstackinit \
+pdfcompresslevel pdfcopyfont pdfcreationdate pdfdecimaldigits pdfdest \
+pdfdestmargin pdfdraftmode pdfeachlinedepth pdfeachlineheight pdfendlink \
+pdfendthread pdffirstlineheight pdffontattr pdffontexpand pdffontname \
+pdffontobjnum pdffontsize pdfgamma pdfgentounicode pdfglyphtounicode \
+pdfhorigin pdfignoreddimen pdfimageapplygamma pdfimagegamma pdfimagehicolor \
+pdfimageresolution pdfincludechars pdfinclusioncopyfonts pdfinclusionerrorlevel pdfinfo \
+pdfinsertht pdflastannot pdflastlinedepth pdflastlink pdflastobj \
+pdflastxform pdflastximage pdflastximagecolordepth pdflastximagepages pdflastxpos \
+pdflastypos pdflinkmargin pdfliteral pdfmapfile pdfmapline \
+pdfminorversion pdfnames pdfnoligatures pdfnormaldeviate pdfobj \
+pdfobjcompresslevel pdfoptionpdfminorversion pdfoutline pdfoutput pdfpageattr \
+pdfpagebox pdfpageheight pdfpageref pdfpageresources pdfpagesattr \
+pdfpagewidth pdfpkmode pdfpkresolution pdfprimitive pdfprotrudechars \
+pdfpxdimen pdfrandomseed pdfrefobj pdfrefxform pdfrefximage \
+pdfreplacefont pdfrestore pdfretval pdfsave pdfsavepos \
+pdfsetmatrix pdfsetrandomseed pdfstartlink pdfstartthread pdftexbanner \
+pdftexrevision pdftexversion pdfthread pdfthreadmargin pdftracingfonts \
+pdftrailer pdfuniformdeviate pdfuniqueresname pdfvorigin pdfxform \
+pdfxformattr pdfxformname pdfxformresources pdfximage pdfximagebbox \
+penalty postdisplaypenalty postexhyphenchar posthyphenchar predisplaydirection \
+predisplaypenalty predisplaysize preexhyphenchar prehyphenchar pretolerance \
+prevdepth prevgraf primitive protected quitvmode \
+radical raise read readline relax \
+relpenalty right rightghost righthyphenmin rightmarginkern \
+rightskip romannumeral rpcode savecatcodetable savinghyphcodes \
+savingvdiscards scantextokens scantokens scriptfont scriptscriptfont \
+scriptscriptstyle scriptspace scriptstyle scrollmode setbox \
+setlanguage sfcode shipout show showbox \
+showboxbreadth showboxdepth showgroups showifs showlists \
+showthe showtokens skewchar skip skipdef \
+spacefactor spaceskip span special splitbotmark \
+splitbotmarks splitdiscards splitfirstmark splitfirstmarks splitmaxdepth \
+splittopskip string suppressfontnotfounderror suppressifcsnameerror suppresslongerror \
+suppressoutererror synctex tabskip tagcode textdir \
+textfont textstyle the thickmuskip thinmuskip \
+time toks toksdef tolerance topmark \
+topmarks topskip tracingassigns tracingcommands tracinggroups \
+tracingifs tracinglostchars tracingmacros tracingnesting tracingonline \
+tracingoutput tracingpages tracingparagraphs tracingrestores tracingscantokens \
+tracingstats uccode uchyph underline unexpanded \
+unhbox unhcopy unkern unless unpenalty \
+unskip unvbox unvcopy uppercase vadjust \
+valign vbadness vbox vcenter vfil \
+vfill vfilneg vfuzz voffset vrule \
+vsize vskip vsplit vss vtop \
+wd widowpenalties widowpenalty write xdef \
+xleaders xspaceskip year
keywordclass.tex.xetex=\
XeTeXversion
diff --git a/context/data/scite/scite-context-external.properties b/context/data/scite/context/scite-context-external.properties
index 5c7149341..b48f25525 100644
--- a/context/data/scite/scite-context-external.properties
+++ b/context/data/scite/context/scite-context-external.properties
@@ -1,36 +1,46 @@
# external lpeg lexers
-import $(SciteDefaultHome)/lexers/lpeg
+lexer.lpeg.home=$(SciteDefaultHome)/context/lexers
-lexer.lpeg.home=$(SciteDefaultHome)/lexers
+lexer.lpeg.color.theme=scite-context-theme
+# lexer.lpeg.color.theme=$(SciteDefaultHome)/context/lexers/themes/scite-context-theme.lua
-# # pre 3.03:
-#
-#~ lexer.lpeg.script=$(lexer.lpeg.home)/scite-context-lexer.lua
-#
-# # post 3.03:
-#
-lexer.lpeg.script=$(lexer.lpeg.home)/lexer.lua
-#
-# where we load the extensions in the lexers themselves.
-
-lexer.lpeg.color.theme=$(lexer.lpeg.home)/themes/scite-context-theme.lua
-
-# alas, only a few properties are passed (only indentation)
+# The lexer dll no longer interfaces to teh following properties. It never had a full
+# interface, so maybe I'll make my own.
fold.by.parsing=1
fold.by.indentation=0
fold.by.line=0
+fold.line.comments=0
+
+# you can put the dll/so file in the <scitehome>/context/lexers path or keep it in
+# <scitehome>/lexers
if PLAT_WIN
- lexerpath.*.lpeg=$(lexer.lpeg.home)/LexLPeg.dll
+ lexerpath.*.lpeg=$(lexer.lpeg.home)/../../lexers/lexlpeg.dll
+# lexerpath.*.lpeg=$(lexer.lpeg.home)/lexers/lexlpeg.dll
if PLAT_GTK
- lexerpath.*.lpeg=$(lexer.lpeg.home)/liblexlpeg.so
+ lexerpath.*.lpeg=$(lexer.lpeg.home)/../../lexers/liblexlpeg.so
+# lexerpath.*.lpeg=$(lexer.lpeg.home)/lexers/liblexlpeg.so
+
+# the variable lexer.name is automatically set but I'm not sure what the following
+# one is supposed to do so we keep it around (sams as in lpeg.properties, which we
+# don't load)
lexer.*.lpeg=lpeg
-file.patterns.cweb=*.h;*.c;*.w;*.hh;*.cc;*.ww;*.hpp;*.cpp;*.hxx;*.cxx;
+# in principle you can do the following, as we're mostly compatible with the
+# default lexers but for a regular context setup the lexers built-in scite are
+# just fine so in principle we only need the dll/so
+#
+# import lexers/lpeg
+
+# patterns should be original (not clash with built in)
+
+file.patterns.cweb=*.w;*.ww;
+file.patterns.cpp=*.h;*.c;*.hh;*.cc;*.hpp;*.cpp;*.hxx;*.cxx;
+file.patterns.bib=*.bib
lexer.$(file.patterns.metapost)=lpeg_scite-context-lexer-mps
lexer.$(file.patterns.metafun)=lpeg_scite-context-lexer-mps
@@ -40,18 +50,19 @@ lexer.$(file.patterns.example)=lpeg_scite-context-lexer-xml
lexer.$(file.patterns.text)=lpeg_scite-context-lexer-txt
lexer.$(file.patterns.pdf)=lpeg_scite-context-lexer-pdf
lexer.$(file.patterns.cweb)=lpeg_scite-context-lexer-web
+lexer.$(file.patterns.cpp)=lpeg_scite-context-lexer-cpp
+lexer.$(file.patterns.bib)=lpeg_scite-context-lexer-bibtex
lexer.$(file.patterns.tex)=lpeg_scite-context-lexer-tex
lexer.$(file.patterns.xml)=lpeg_scite-context-lexer-xml
lexer.$(file.patterns.html)=lpeg_scite-context-lexer-xml
-lexer.$(file.patterns.cpp)=lpeg_scite-context-lexer-web
# It's a real pitty that we cannot overload the errorlist lexer. That would
# make scite even more interesting. Add to that including lpeg and the lpeg
# lexer and thereby providing an interface to properties.
-# lexer.errorlist=lpeg_scite-context-lexer-txt
-# lexer.output=lpeg_scite-context-lexer-txt
+#~ lexer.errorlist=lpeg_scite-context-lexer-txt
+#~ lexer.output=lpeg_scite-context-lexer-txt
comment.block.lpeg_scite-context-lexer-tex=%
comment.block.at.line.start.lpeg_scite-context-lexer-tex=1
@@ -65,6 +76,9 @@ comment.block.at.line.start.lpeg_scite-context-lexer-lua=1
comment.block.lpeg_scite-context-lexer-cld=--
comment.block.at.line.start.lpeg_scite-context-lexer-cld=1
+comment.block.lpeg_scite-context-lexer-bibtex=%
+comment.block.at.line.start.lpeg_scite-context-lexer-bibtex=1
+
comment.block.lpeg_props=#
comment.block.at.line.start.lpeg_props=1
diff --git a/context/data/scite/scite-context-internal.properties b/context/data/scite/context/scite-context-internal.properties
index 130e64f1e..038381dc7 100644
--- a/context/data/scite/scite-context-internal.properties
+++ b/context/data/scite/context/scite-context-internal.properties
@@ -8,8 +8,8 @@
#
# % interface=none|metapost|mp|metafun
-import scite-context-data-metapost
-import scite-context-data-metafun
+import context/scite-context-data-metapost
+import context/scite-context-data-metafun
keywordclass.metapost.all=$(keywordclass.metapost.tex) $(keywordclass.metapost.plain) $(keywordclass.metapost.primitives)
keywordclass.metafun.all=$(keywordclass.metafun.constants) $(keywordclass.metafun.helpers)
@@ -44,9 +44,9 @@ comment.block.at.line.start.metapost=1
#
# % interface=all|nl|en|de|cz|it|ro|latex
-import scite-context-data-tex
-import scite-context-data-context
-import scite-context-data-interfaces
+import context/scite-context-data-tex
+import context/scite-context-data-context
+import context/scite-context-data-interfaces
word.characters.$(file.patterns.context)=abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ@!?_\\
diff --git a/context/data/scite/context/scite-context-user.properties b/context/data/scite/context/scite-context-user.properties
new file mode 100644
index 000000000..b6fc34282
--- /dev/null
+++ b/context/data/scite/context/scite-context-user.properties
@@ -0,0 +1,15 @@
+# this loades the basics
+
+import context/scite-context
+
+# internal lexing
+
+import context/scite-context-internal
+
+# external lexing (tex, mps, cld/lua, xml)
+
+import context/scite-context-external
+
+# this does some tuning
+
+import context/scite-pragma
diff --git a/context/data/scite/scite-context.properties b/context/data/scite/context/scite-context.properties
index caf230de7..f1deb9df5 100644
--- a/context/data/scite/scite-context.properties
+++ b/context/data/scite/context/scite-context.properties
@@ -66,7 +66,7 @@ open.suffix.$(file.patterns.context)=.tex
# Example : patterns
file.patterns.xml=
-file.patterns.example=*.xml;*.xsl;*.xsd;*.fo;*.exa;*.rlb;*.rlg;*.rlv;*.rng;*.xfdf;*.xslt;*.dtd;*.lmx;*.htm;*.html;*.xhtml*.ctx;*.export;
+file.patterns.example=*.xml;*.xsl;*.xsd;*.fo;*.exa;*.rlb;*.rlg;*.rlv;*.rng;*.xfdf;*.xslt;*.dtd;*.lmx;*.htm;*.html;*.xhtml;*.ctx;*.export;*.svg;*.xul
open.suffix.$(file.patterns.example)=.xml
filter.example=eXaMpLe|$(file.patterns.example)|
#~ lexer.$(file.patterns.example)=xml
@@ -115,9 +115,11 @@ name.metafun.console=$(name.context.console)
name.example.console=$(name.context.console)
name.context.mtxrun=mtxrun --autogenerate
+name.context.mtxrunjit=mtxrunjit --autogenerate
name.context.check=$(name.context.mtxrun) --script check
name.context.run=$(name.context.mtxrun) --script context $(name.flag.pdfopen)
+name.context.runjit=$(name.context.mtxrunjit) --script context $(name.flag.pdfopen)
name.context.texshow=$(name.context.mtxrun) texshow
name.context.purge=$(name.context.mtxrun) --context --purge --all
name.context.showcase=$(name.context.mtxrun) --launch showcase.pdf
@@ -158,24 +160,24 @@ xml.auto.close.tags=1
# extensions
-import scite-ctx
+import context/scite-ctx
# hard coded compile / build / go
-command.compile.$(file.patterns.context)=$(name.context.check) $(FileNameExt)
-command.compile.$(file.patterns.metafun)=
-command.compile.$(file.patterns.example)=$(name.example.xmlcheck) $(FileNameExt)
-command.compile.*.fo=$(name.example.xmlcheck) $(FileNameExt)
+command.build.$(file.patterns.context)=$(name.context.check) $(FileNameExt)
+command.build.$(file.patterns.metafun)=
+command.build.$(file.patterns.example)=$(name.example.xmlcheck) $(FileNameExt)
+command.build.*.fo=$(name.example.xmlcheck) $(FileNameExt)
-command.build.$(file.patterns.context)=$(name.context.run) $(FileNameExt)
-command.build.$(file.patterns.metafun)=$(name.context.mtxrun) --script context $(name.flag.pdfopen) $(FileNameExt)
-command.build.$(file.patterns.example)=$(name.context.run) --forcexml $(FileNameExt)
-command.build.*.fo=$(name.context.run) $(name.flag.pdfopen) --forcexml --use=foxet $(FileNameExt)
+command.compile.$(file.patterns.context)=$(name.context.run) $(FileNameExt)
+command.compile.$(file.patterns.metafun)=$(name.context.run) $(name.flag.pdfopen) $(FileNameExt)
+command.compile.$(file.patterns.example)=$(name.context.run) --forcexml $(FileNameExt)
+command.compile.*.fo=$(name.context.run) $(name.flag.pdfopen) --forcexml --use=foxet $(FileNameExt)
-command.build.subsystem.$(file.patterns.context)=1
-command.build.subsystem.$(file.patterns.metafun)=1
-command.build.subsystem.$(file.patterns.example)=1
-command.build.subsystem.*.fo=1
+command.compile.subsystem.$(file.patterns.context)=1
+command.compile.subsystem.$(file.patterns.metafun)=1
+command.compile.subsystem.$(file.patterns.example)=1
+command.compile.subsystem.*.fo=1
if PLAT_WIN
command.go.$(file.patterns.context)=$(FileName).pdf
@@ -218,6 +220,24 @@ command.1.subsystem.$(file.patterns.context)=1
command.1.subsystem.$(file.patterns.metafun)=1
command.1.subsystem.$(file.patterns.example)=1
+command.name.29.*=Run with jit
+command.subsystem.29.*=1
+command.29.$(file.patterns.context)=$(name.context.runjit) $(FileNameExt)
+command.29.$(file.patterns.metafun)=$(name.context.runjit) $(FileNameExt) --metapost
+command.29.$(file.patterns.exmaple)=$(name.context.runjit) $(FileNameExt) --xml
+command.groupundo.29.*=yes
+command.save.before.29.*=2
+command.shortcut.29.*=Alt+F12
+
+#~ command.name.30.*=Run with jit
+#~ command.subsystem.30.*=1
+#~ command.30.$(file.patterns.context)=$(name.context.runjit) $(FileNameExt)
+#~ command.30.$(file.patterns.metafun)=$(name.context.runjit) $(FileNameExt) --metapost
+#~ command.30.$(file.patterns.exmaple)=$(name.context.runjit) $(FileNameExt) --xml
+#~ command.groupundo.30.*=yes
+#~ command.save.before.30.*=2
+#~ command.shortcut.30.*=Alt+F7
+
# 2 : pdf viewing
command.name.2.$(file.patterns.context)=View PDF File with GhostScript
@@ -337,10 +357,9 @@ highlight.indentation.guides=1
# Editor: keys
user.shortcuts=\
-F12|IDM_COMPILE|\
-Ctrl+F12|IDM_BUILD|\
+F12|IDM_BUILD|\
+Ctrl+F12|IDM_COMPILE|\
Shift+F12|IDM_GO|\
-Alt+F12|IDM_STOPEXECUTE|\
os.x.home.end.keys=0
@@ -658,6 +677,9 @@ fold.margin.colour=#CCCCCC
# testing
+caret.width = 3
+selection.rectangular.switch.mouse = 1
+
#~ cache.layout=
find.command=mtxrun --script grep "$(find.what)" "$(find.files)"
diff --git a/context/data/scite/scite-ctx-context.properties b/context/data/scite/context/scite-ctx-context.properties
index a1d5800e6..a1d5800e6 100644
--- a/context/data/scite/scite-ctx-context.properties
+++ b/context/data/scite/context/scite-ctx-context.properties
diff --git a/context/data/scite/scite-ctx-example.properties b/context/data/scite/context/scite-ctx-example.properties
index 78b2f2859..78b2f2859 100644
--- a/context/data/scite/scite-ctx-example.properties
+++ b/context/data/scite/context/scite-ctx-example.properties
diff --git a/context/data/scite/scite-ctx.lua b/context/data/scite/context/scite-ctx.lua
index 421e9cd89..a529b3ced 100644
--- a/context/data/scite/scite-ctx.lua
+++ b/context/data/scite/context/scite-ctx.lua
@@ -902,13 +902,16 @@ local textlists = { -- taken from sort-lan.lua
"U", "V", "W", "X", "Y", "Z",
},
fr = {
- "a", "b", "c", "d", "e", "f", "g", "h", "i", "j",
- "k", "l", "m", "n", "o", "p", "q", "r", "s", "t",
- "u", "v", "w", "x", "y", "z",
+ "a", "æ", "b", "c", "ç", "d", "e", "è", "é", "ê",
+ "f", "g", "h", "i", "j", "k", "l", "m", "n", "o",
+ "p", "q", "r", "s", "t", "u", "v", "w", "x", "y",
+ "z",
+
+ "A", "Æ", "B", "C", "Ç", "D", "E", "È", "É", "Ê",
+ "F", "G", "H", "I", "J", "K", "L", "M", "N", "O",
+ "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y",
+ "Z",
- "A", "B", "C", "D", "E", "F", "G", "H", "I", "J",
- "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T",
- "U", "V", "W", "X", "Y", "Z",
},
de = {
"a", "ä", "b", "c", "d", "e", "f", "g", "h", "i",
@@ -1169,12 +1172,12 @@ local textlists = { -- taken from sort-lan.lua
"T", "U", "Ú", "Ü", "V", "W", "X", "Y", "Z",
},
lt = { -- lithuanian
- "a", "ą", "b", "c", ch, "č", "d", "e", "ę", "ė",
+ "a", "ą", "b", "c", "ch", "č", "d", "e", "ę", "ė",
"f", "g", "h", "i", "į", "y", "j", "k", "l", "m",
"n", "o", "p", "r", "s", "š", "t", "u", "ų", "ū",
"v", "z", "ž",
- "A", "Ą", "B", "C", CH, "Č", "D", "E", "Ę", "Ė",
+ "A", "Ą", "B", "C", "CH", "Č", "D", "E", "Ę", "Ė",
"F", "G", "H", "I", "Į", "Y", "J", "K", "L", "M",
"N", "O", "P", "R", "S", "Š", "T", "U", "Ų", "Ū",
"V", "Z", "Ž",
@@ -1383,3 +1386,13 @@ function toggle_strip(name)
OnStrip = ignore_strip
end
end
+
+-- this way we get proper lexing for lexers that do more extensive
+-- parsing
+
+function OnOpen(filename)
+ -- print("opening: " .. filename .. " (size: " .. editor.TextLength .. ")")
+ editor:Colourise(0,editor.TextLength)
+end
+
+-- output.LexerLanguage = ""
diff --git a/context/data/scite/scite-ctx.properties b/context/data/scite/context/scite-ctx.properties
index d56ae653d..874a381e3 100644
--- a/context/data/scite/scite-ctx.properties
+++ b/context/data/scite/context/scite-ctx.properties
@@ -12,7 +12,7 @@
# <?xml version='1.0' language='uk' ?>
ext.lua.auto.reload=1
-ext.lua.startup.script=$(SciteDefaultHome)/scite-ctx.lua
+ext.lua.startup.script=$(SciteDefaultHome)/context/scite-ctx.lua
#~ extension.$(file.patterns.context)=scite-ctx.lua
#~ extension.$(file.patterns.example)=scite-ctx.lua
@@ -69,14 +69,19 @@ ctx.spellcheck.wordsize.uk=4
ctx.spellcheck.wordsize.nl=4
ctx.helpinfo=\
- Shift + F11 pop up menu with ctx options|\
+ Shift + F11 pop up menu with ctx options|\
|\
- Ctrl + B check spelling|\
- Ctrl + M wrap text (auto indent)|\
- Ctrl + R reset spelling results|\
- Ctrl + I insert template|\
- Ctrl + E open log file|\
- Ctrl + + toggle strip
+ Ctrl + B check spelling|\
+ Ctrl + M wrap text (auto indent)|\
+ Ctrl + R reset spelling results|\
+ Ctrl + I insert template|\
+ Ctrl + E open log file|\
+ Ctrl + + toggle strip|\
+ |\
+ F7 / F12 check (or process)|\
+ Ctrl + F7 / F12 process|\
+ Alt + F7 / F12 process with jit|\
+ shift + F7 / F12 launch
command.name.21.$(file.patterns.context)=CTX Action List
command.subsystem.21.$(file.patterns.context)=3
@@ -145,8 +150,8 @@ command.save.before.26.*=2
command.groupundo.26.*=yes
command.shortcut.26.*=Ctrl+E
-import scite-ctx-context
-import scite-ctx-example
+import context/scite-ctx-context
+import context/scite-ctx-example
ctx.template.scan=yes
ctx.template.rescan=no
diff --git a/context/data/scite/scite-metapost.properties b/context/data/scite/context/scite-metapost.properties
index e3ac25244..fc06dcaa2 100644
--- a/context/data/scite/scite-metapost.properties
+++ b/context/data/scite/context/scite-metapost.properties
@@ -69,7 +69,7 @@ lexer.metapost.comment.process=0
# Metapost: keywords
-import scite-context-data-metapost.properties
+import context/scite-context-data-metapost.properties
keywords.$(file.patterns.metapost)=$(keywordclass.metapost.all)
diff --git a/context/data/scite/scite-pragma.properties b/context/data/scite/context/scite-pragma.properties
index 7308f1fb6..2dea18bad 100644
--- a/context/data/scite/scite-pragma.properties
+++ b/context/data/scite/context/scite-pragma.properties
@@ -25,7 +25,9 @@ $(filter.metafun)\
$(filter.example)\
$(filter.lua)\
$(filter.text)\
-$(filter.pdf)
+$(filter.pdf)\
+$(filter.cweb)\
+$(filter.txt)
# Editor: menus
@@ -36,5 +38,4 @@ XML|xml||\
Lua|lua||\
Text|txt||\
PDF|pdf||\
-CWeb|web||\
-Text|txt||
+CWeb|cweb||
diff --git a/context/data/scite/scite-tex.properties b/context/data/scite/context/scite-tex.properties
index 6933971e2..7d271eaf1 100644
--- a/context/data/scite/scite-tex.properties
+++ b/context/data/scite/context/scite-tex.properties
@@ -89,7 +89,7 @@ lexer.tex.auto.if=1
# only the macros that make sense:
-import scite-context-data-tex.properties
+import context/scite-context-data-tex.properties
# collections
diff --git a/context/data/scite/lexers/archive/scite-context-lexer-pre-3-3-1.lua b/context/data/scite/lexers/archive/scite-context-lexer-pre-3-3-1.lua
deleted file mode 100644
index 7883177b4..000000000
--- a/context/data/scite/lexers/archive/scite-context-lexer-pre-3-3-1.lua
+++ /dev/null
@@ -1,1100 +0,0 @@
-local info = {
- version = 1.324,
- comment = "basics for scintilla lpeg lexer for context/metafun",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
- comment = "contains copyrighted code from mitchell.att.foicica.com",
-
-}
-
--- todo: move all code here
--- todo: explore adapted dll ... properties + init
-
--- The fold and lex functions are copied and patched from original code by Mitchell (see
--- lexer.lua). All errors are mine.
---
--- Starting with SciTE version 3.20 there is an issue with coloring. As we still lack
--- a connection with scite itself (properties as well as printing to the log pane) we
--- cannot trace this (on windows). As far as I can see, there are no fundamental
--- changes in lexer.lua or LexLPeg.cxx so it must be in scintilla itself. So for the
--- moment I stick to 3.10. Indicators are: no lexing of 'next' and 'goto <label>' in the
--- Lua lexer and no brace highlighting either. Interesting is that it does work ok in
--- the cld lexer (so the Lua code is okay). Also the fact that char-def.lua lexes fast
--- is a signal that the lexer quits somewhere halfway.
---
--- After checking 3.24 and adapting to the new lexer tables things are okay again. So,
--- this version assumes 3.24 or higher. In 3.24 we have a different token result, i.e. no
--- longer a { tag, pattern } but just two return values. I didn't check other changes but
--- will do that when I run into issues.
---
--- I've considered making a whole copy and patch the other functions too as we need
--- an extra nesting model. However, I don't want to maintain too much. An unfortunate
--- change in 3.03 is that no longer a script can be specified. This means that instead
--- of loading the extensions via the properties file, we now need to load them in our
--- own lexers, unless of course we replace lexer.lua completely (which adds another
--- installation issue).
---
--- Another change has been that _LEXERHOME is no longer available. It looks like more and
--- more functionality gets dropped so maybe at some point we need to ship our own dll/so
--- files. For instance, I'd like to have access to the current filename and other scite
--- properties. For instance, we could cache some info with each file, if only we had
--- knowledge of what file we're dealing with.
---
--- For huge files folding can be pretty slow and I do have some large ones that I keep
--- open all the time. Loading is normally no ussue, unless one has remembered the status
--- and the cursor is at the last line of a 200K line file. Optimizing the fold function
--- brought down loading of char-def.lua from 14 sec => 8 sec. Replacing the word_match
--- function and optimizing the lex function gained another 2+ seconds. A 6 second load
--- is quite ok for me. The changed lexer table structure (no subtables) brings loading
--- down to a few seconds.
---
--- When the lexer path is copied to the textadept lexer path, and the theme definition to
--- theme path (as lexer.lua), the lexer works there as well. When I have time and motive
--- I will make a proper setup file to tune the look and feel a bit and associate suffixes
--- with the context lexer. The textadept editor has a nice style tracing option but lacks
--- the tabs for selecting files that scite has. It also has no integrated run that pipes
--- to the log pane (I wonder if it could borrow code from the console2 project). Interesting
--- is that the jit version of textadept crashes on lexing large files (and does not feel
--- faster either).
---
--- Function load(lexer_name) starts with _M.WHITESPACE = lexer_name..'_whitespace' which
--- means that we need to have it frozen at the moment we load another lexer. Because spacing
--- is used to revert to a parent lexer we need to make sure that we load children as late
--- as possible in order not to get the wrong whitespace trigger. This took me quite a while
--- to figure out (not being that familiar with the internals). The lex and fold functions
--- have been optimized. It is a pitty that there is no proper print available. Another thing
--- needed is a default style in ourown theme style definition, as otherwise we get wrong
--- nested lexers, especially if they are larger than a view. This is the hardest part of
--- getting things right.
---
--- Eventually it might be safer to copy the other methods from lexer.lua here as well so
--- that we have no dependencies, apart from the c library (for which at some point the api
--- will be stable I hope).
---
--- It's a pitty that there is no scintillua library for the OSX version of scite. Even
--- better would be to have the scintillua library as integral part of scite as that way I
--- could use OSX alongside windows and linux (depending on needs). Also nice would be to
--- have a proper interface to scite then because currently the lexer is rather isolated and the
--- lua version does not provide all standard libraries. It would also be good to have lpeg
--- support in the regular scite lua extension (currently you need to pick it up from someplace
--- else).
-
-local lpeg = require 'lpeg'
-
-local R, P, S, C, V, Cp, Cs, Ct, Cmt, Cc, Cf, Cg, Carg = lpeg.R, lpeg.P, lpeg.S, lpeg.C, lpeg.V, lpeg.Cp, lpeg.Cs, lpeg.Ct, lpeg.Cmt, lpeg.Cc, lpeg.Cf, lpeg.Cg, lpeg.Carg
-local lpegmatch = lpeg.match
-local find, gmatch, match, lower, upper, gsub = string.find, string.gmatch, string.match, string.lower, string.upper, string.gsub
-local concat = table.concat
-local global = _G
-local type, next, setmetatable, rawset = type, next, setmetatable, rawset
-
-if lexer then
- -- in recent c++ code the lexername and loading is hard coded
-elseif _LEXERHOME then
- dofile(_LEXERHOME .. '/lexer.lua') -- pre 3.03 situation
-else
- dofile('lexer.lua') -- whatever
-end
-
-lexer.context = lexer.context or { }
-local context = lexer.context
-
-context.patterns = context.patterns or { }
-local patterns = context.patterns
-
-lexer._CONTEXTEXTENSIONS = true
-
-local locations = {
- -- lexer.context.path,
- "data", -- optional data directory
- "..", -- regular scite directory
-}
-
-local function collect(name)
--- local definitions = loadfile(name .. ".luc") or loadfile(name .. ".lua")
- local okay, definitions = pcall(function () return require(name) end)
- if okay then
- if type(definitions) == "function" then
- definitions = definitions()
- end
- if type(definitions) == "table" then
- return definitions
- end
- end
-end
-
-function context.loaddefinitions(name)
- for i=1,#locations do
- local data = collect(locations[i] .. "/" .. name)
- if data then
- return data
- end
- end
-end
-
--- maybe more efficient:
-
-function context.word_match(words,word_chars,case_insensitive)
- local chars = '%w_' -- maybe just "" when word_chars
- if word_chars then
- chars = '^([' .. chars .. gsub(word_chars,'([%^%]%-])', '%%%1') ..']+)'
- else
- chars = '^([' .. chars ..']+)'
- end
- if case_insensitive then
- local word_list = { }
- for i=1,#words do
- word_list[lower(words[i])] = true
- end
- return P(function(input, index)
- local s, e, word = find(input,chars,index)
- return word and word_list[lower(word)] and e + 1 or nil
- end)
- else
- local word_list = { }
- for i=1,#words do
- word_list[words[i]] = true
- end
- return P(function(input, index)
- local s, e, word = find(input,chars,index)
- return word and word_list[word] and e + 1 or nil
- end)
- end
-end
-
-local idtoken = R("az","AZ","\127\255","__")
-local digit = R("09")
-local sign = S("+-")
-local period = P(".")
-local space = S(" \n\r\t\f\v")
-
-patterns.idtoken = idtoken
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.period = period
-
-patterns.cardinal = digit^1
-patterns.integer = sign^-1 * digit^1
-
-patterns.real =
- sign^-1 * ( -- at most one
- digit^1 * period * digit^0 -- 10.0 10.
- + digit^0 * period * digit^1 -- 0.10 .10
- + digit^1 -- 10
- )
-
-patterns.restofline = (1-S("\n\r"))^1
-patterns.space = space
-patterns.spacing = space^1
-patterns.nospacing = (1-space)^1
-patterns.anything = P(1)
-
-local endof = S("\n\r\f")
-
-patterns.startofline = P(function(input,index)
- return (index == 1 or lpegmatch(endof,input,index-1)) and index
-end)
-
-function context.exact_match(words,word_chars,case_insensitive)
- local characters = concat(words)
- local pattern -- the concat catches _ etc
- if word_chars == true or word_chars == false or word_chars == nil then
- word_chars = ""
- end
- if type(word_chars) == "string" then
- pattern = S(characters) + idtoken
- if case_insensitive then
- pattern = pattern + S(upper(characters)) + S(lower(characters))
- end
- if word_chars ~= "" then
- pattern = pattern + S(word_chars)
- end
- elseif word_chars then
- pattern = word_chars
- end
- if case_insensitive then
- local list = { }
- for i=1,#words do
- list[lower(words[i])] = true
- end
- return Cmt(pattern^1, function(_,i,s)
- return list[lower(s)] -- and i or nil
- end)
- else
- local list = { }
- for i=1,#words do
- list[words[i]] = true
- end
- return Cmt(pattern^1, function(_,i,s)
- return list[s] -- and i or nil
- end)
- end
-end
-
--- spell checking (we can only load lua files)
---
--- return {
--- min = 3,
--- max = 40,
--- n = 12345,
--- words = {
--- ["someword"] = "someword",
--- ["anotherword"] = "Anotherword",
--- },
--- }
-
-local lists = { }
-
-function context.setwordlist(tag,limit) -- returns hash (lowercase keys and original values)
- if not tag or tag == "" then
- return false, 3
- end
- local list = lists[tag]
- if not list then
- list = context.loaddefinitions("spell-" .. tag)
- if not list or type(list) ~= "table" then
- list = { words = false, min = 3 }
- else
- list.words = list.words or false
- list.min = list.min or 3
- end
- lists[tag] = list
- end
- return list.words, list.min
-end
-
-patterns.wordtoken = R("az","AZ","\127\255")
-patterns.wordpattern = patterns.wordtoken^3 -- todo: if limit and #s < limit then
-
--- -- pre 3.24:
---
--- function context.checkedword(validwords,validminimum,s,i) -- ,limit
--- if not validwords then -- or #s < validminimum then
--- return true, { "text", i } -- { "default", i }
--- else
--- -- keys are lower
--- local word = validwords[s]
--- if word == s then
--- return true, { "okay", i } -- exact match
--- elseif word then
--- return true, { "warning", i } -- case issue
--- else
--- local word = validwords[lower(s)]
--- if word == s then
--- return true, { "okay", i } -- exact match
--- elseif word then
--- return true, { "warning", i } -- case issue
--- elseif upper(s) == s then
--- return true, { "warning", i } -- probably a logo or acronym
--- else
--- return true, { "error", i }
--- end
--- end
--- end
--- end
-
-function context.checkedword(validwords,validminimum,s,i) -- ,limit
- if not validwords then -- or #s < validminimum then
- return true, "text", i -- { "default", i }
- else
- -- keys are lower
- local word = validwords[s]
- if word == s then
- return true, "okay", i -- exact match
- elseif word then
- return true, "warning", i -- case issue
- else
- local word = validwords[lower(s)]
- if word == s then
- return true, "okay", i -- exact match
- elseif word then
- return true, "warning", i -- case issue
- elseif upper(s) == s then
- return true, "warning", i -- probably a logo or acronym
- else
- return true, "error", i
- end
- end
- end
-end
-
-function context.styleofword(validwords,validminimum,s) -- ,limit
- if not validwords or #s < validminimum then
- return "text"
- else
- -- keys are lower
- local word = validwords[s]
- if word == s then
- return "okay" -- exact match
- elseif word then
- return "warning" -- case issue
- else
- local word = validwords[lower(s)]
- if word == s then
- return "okay" -- exact match
- elseif word then
- return "warning" -- case issue
- elseif upper(s) == s then
- return "warning" -- probably a logo or acronym
- else
- return "error"
- end
- end
- end
-end
-
--- overloaded functions
-
-local FOLD_BASE = SC_FOLDLEVELBASE
-local FOLD_HEADER = SC_FOLDLEVELHEADERFLAG
-local FOLD_BLANK = SC_FOLDLEVELWHITEFLAG
-
-local get_style_at = GetStyleAt
-local get_property = GetProperty
-local get_indent_amount = GetIndentAmount
-
-local h_table, b_table, n_table = { }, { }, { }
-
-setmetatable(h_table, { __index = function(t,level) local v = { level, FOLD_HEADER } t[level] = v return v end })
-setmetatable(b_table, { __index = function(t,level) local v = { level, FOLD_BLANK } t[level] = v return v end })
-setmetatable(n_table, { __index = function(t,level) local v = { level } t[level] = v return v end })
-
--- -- todo: move the local functions outside (see below) .. old variant < 3.24
---
--- local newline = P("\r\n") + S("\r\n")
--- local p_yes = Cp() * Cs((1-newline)^1) * newline^-1
--- local p_nop = newline
---
--- local function fold_by_parsing(text,start_pos,start_line,start_level,lexer)
--- local foldsymbols = lexer._foldsymbols
--- if not foldsymbols then
--- return { }
--- end
--- local patterns = foldsymbols._patterns
--- if not patterns then
--- return { }
--- end
--- local nofpatterns = #patterns
--- if nofpatterns == 0 then
--- return { }
--- end
--- local folds = { }
--- local line_num = start_line
--- local prev_level = start_level
--- local current_level = prev_level
--- local validmatches = foldsymbols._validmatches
--- if not validmatches then
--- validmatches = { }
--- for symbol, matches in next, foldsymbols do -- whatever = { start = 1, stop = -1 }
--- if not find(symbol,"^_") then -- brrr
--- for s, _ in next, matches do
--- validmatches[s] = true
--- end
--- end
--- end
--- foldsymbols._validmatches = validmatches
--- end
--- -- of course we could instead build a nice lpeg checker .. something for
--- -- a rainy day with a stack of new cd's at hand
--- local function action_y(pos,line)
--- for i=1,nofpatterns do
--- for s, m in gmatch(line,patterns[i]) do
--- if validmatches[m] then
--- local symbols = foldsymbols[get_style_at(start_pos + pos + s - 1)]
--- if symbols then
--- local action = symbols[m]
--- if action then
--- if type(action) == 'number' then -- we could store this in validmatches if there was only one symbol category
--- current_level = current_level + action
--- else
--- current_level = current_level + action(text,pos,line,s,m)
--- end
--- if current_level < FOLD_BASE then
--- current_level = FOLD_BASE
--- end
--- end
--- end
--- end
--- end
--- end
--- if current_level > prev_level then
--- folds[line_num] = h_table[prev_level] -- { prev_level, FOLD_HEADER }
--- else
--- folds[line_num] = n_table[prev_level] -- { prev_level }
--- end
--- prev_level = current_level
--- line_num = line_num + 1
--- end
--- local function action_n()
--- folds[line_num] = b_table[prev_level] -- { prev_level, FOLD_BLANK }
--- line_num = line_num + 1
--- end
--- if lexer._reset_parser then
--- lexer._reset_parser()
--- end
--- local lpegpattern = (p_yes/action_y + p_nop/action_n)^0 -- not too efficient but indirect function calls are neither but
--- lpegmatch(lpegpattern,text) -- keys are not pressed that fast ... large files are slow anyway
--- return folds
--- end
-
--- The 3.24 variant; no longer subtable optimization is needed:
-
-local newline = P("\r\n") + S("\r\n")
-local p_yes = Cp() * Cs((1-newline)^1) * newline^-1
-local p_nop = newline
-
-local folders = { }
-
-local function fold_by_parsing(text,start_pos,start_line,start_level,lexer)
- local folder = folders[lexer]
- if not folder then
- --
- local pattern, folds, text, start_pos, line_num, prev_level, current_level
- --
- local fold_symbols = lexer._foldsymbols
- local fold_pattern = lexer._foldpattern -- use lpeg instead (context extension)
- --
- if fold_pattern then
- -- if no functions are found then we could have a faster one
-
- -- fold_pattern = Cp() * C(fold_pattern) * Carg(1) / function(s,match,pos)
- -- local symbols = fold_symbols[get_style_at(start_pos + pos + s - 1)]
- -- local l = symbols and symbols[match]
- -- if l then
- -- local t = type(l)
- -- if t == 'number' then
- -- current_level = current_level + l
- -- elseif t == 'function' then
- -- current_level = current_level + l(text, pos, line, s, match)
- -- end
- -- end
- -- end
- -- fold_pattern = (fold_pattern + P(1))^0
- -- local action_y = function(pos,line)
- -- lpegmatch(fold_pattern,line,1,pos)
- -- folds[line_num] = prev_level
- -- if current_level > prev_level then
- -- folds[line_num] = prev_level + FOLD_HEADER
- -- end
- -- if current_level < FOLD_BASE then
- -- current_level = FOLD_BASE
- -- end
- -- prev_level = current_level
- -- line_num = line_num + 1
- -- end
- -- local action_n = function()
- -- folds[line_num] = prev_level + FOLD_BLANK
- -- line_num = line_num + 1
- -- end
- -- pattern = (p_yes/action_y + p_nop/action_n)^0
-
- fold_pattern = Cp() * C(fold_pattern) / function(s,match)
- local symbols = fold_symbols[get_style_at(start_pos + s)]
- if symbols then
- local l = symbols[match]
- if l then
- current_level = current_level + l
- end
- end
- end
- local action_y = function()
- folds[line_num] = prev_level
- if current_level > prev_level then
- folds[line_num] = prev_level + FOLD_HEADER
- end
- if current_level < FOLD_BASE then
- current_level = FOLD_BASE
- end
- prev_level = current_level
- line_num = line_num + 1
- end
- local action_n = function()
- folds[line_num] = prev_level + FOLD_BLANK
- line_num = line_num + 1
- end
- pattern = ((fold_pattern + (1-newline))^1 * newline / action_y + newline/action_n)^0
-
- else
- -- the traditional one but a bit optimized
- local fold_symbols_patterns = fold_symbols._patterns
- local action_y = function(pos,line)
- for j = 1, #fold_symbols_patterns do
- for s, match in gmatch(line,fold_symbols_patterns[j]) do -- '()('..patterns[i]..')'
- local symbols = fold_symbols[get_style_at(start_pos + pos + s - 1)]
- local l = symbols and symbols[match]
- local t = type(l)
- if t == 'number' then
- current_level = current_level + l
- elseif t == 'function' then
- current_level = current_level + l(text, pos, line, s, match)
- end
- end
- end
- folds[line_num] = prev_level
- if current_level > prev_level then
- folds[line_num] = prev_level + FOLD_HEADER
- end
- if current_level < FOLD_BASE then
- current_level = FOLD_BASE
- end
- prev_level = current_level
- line_num = line_num + 1
- end
- local action_n = function()
- folds[line_num] = prev_level + FOLD_BLANK
- line_num = line_num + 1
- end
- pattern = (p_yes/action_y + p_nop/action_n)^0
- end
- --
- local reset_parser = lexer._reset_parser
- --
- folder = function(_text_,_start_pos_,_start_line_,_start_level_)
- if reset_parser then
- reset_parser()
- end
- folds = { }
- text = _text_
- start_pos = _start_pos_
- line_num = _start_line_
- prev_level = _start_level_
- current_level = prev_level
- lpegmatch(pattern,text)
--- return folds
-local t = folds
-folds = nil
-return t -- so folds can be collected
- end
- folders[lexer] = folder
- end
- return folder(text,start_pos,start_line,start_level,lexer)
-end
-
--- local function fold_by_indentation(text,start_pos,start_line,start_level)
--- local folds = { }
--- local current_line = start_line
--- local prev_level = start_level
--- for line in gmatch(text,'[\t ]*(.-)\r?\n') do
--- if line ~= "" then
--- local current_level = FOLD_BASE + get_indent_amount(current_line)
--- if current_level > prev_level then -- next level
--- local i = current_line - 1
--- while true do
--- local f = folds[i]
--- if f and f[2] == FOLD_BLANK then
--- i = i - 1
--- else
--- break
--- end
--- end
--- local f = folds[i]
--- if f then
--- f[2] = FOLD_HEADER
--- end -- low indent
--- folds[current_line] = n_table[current_level] -- { current_level } -- high indent
--- elseif current_level < prev_level then -- prev level
--- local f = folds[current_line - 1]
--- if f then
--- f[1] = prev_level -- high indent
--- end
--- folds[current_line] = n_table[current_level] -- { current_level } -- low indent
--- else -- same level
--- folds[current_line] = n_table[prev_level] -- { prev_level }
--- end
--- prev_level = current_level
--- else
--- folds[current_line] = b_table[prev_level] -- { prev_level, FOLD_BLANK }
--- end
--- current_line = current_line + 1
--- end
--- return folds
--- end
-
--- local function fold_by_indentation(text,start_pos,start_line,start_level)
--- local folds = { }
--- local current_line = start_line
--- local prev_level = start_level
--- for line in gmatch(text,'[\t ]*(.-)\r?\n') do
--- if line ~= '' then
--- local current_level = FOLD_BASE + get_indent_amount(current_line)
--- if current_level > prev_level then -- next level
--- local i = current_line - 1
--- local f
--- while true do
--- f = folds[i]
--- if not f then
--- break
--- elseif f[2] == FOLD_BLANK then
--- i = i - 1
--- else
--- f[2] = FOLD_HEADER -- low indent
--- break
--- end
--- end
--- folds[current_line] = { current_level } -- high indent
--- elseif current_level < prev_level then -- prev level
--- local f = folds[current_line - 1]
--- if f then
--- f[1] = prev_level -- high indent
--- end
--- folds[current_line] = { current_level } -- low indent
--- else -- same level
--- folds[current_line] = { prev_level }
--- end
--- prev_level = current_level
--- else
--- folds[current_line] = { prev_level, FOLD_BLANK }
--- end
--- current_line = current_line + 1
--- end
--- for line, level in next, folds do
--- folds[line] = level[1] + (level[2] or 0)
--- end
--- return folds
--- end
-
-local folds, current_line, prev_level
-
-local function action_y()
- local current_level = FOLD_BASE + get_indent_amount(current_line)
- if current_level > prev_level then -- next level
- local i = current_line - 1
- local f
- while true do
- f = folds[i]
- if not f then
- break
- elseif f[2] == FOLD_BLANK then
- i = i - 1
- else
- f[2] = FOLD_HEADER -- low indent
- break
- end
- end
- folds[current_line] = { current_level } -- high indent
- elseif current_level < prev_level then -- prev level
- local f = folds[current_line - 1]
- if f then
- f[1] = prev_level -- high indent
- end
- folds[current_line] = { current_level } -- low indent
- else -- same level
- folds[current_line] = { prev_level }
- end
- prev_level = current_level
- current_line = current_line + 1
-end
-
-local function action_n()
- folds[current_line] = { prev_level, FOLD_BLANK }
- current_line = current_line + 1
-end
-
-local pattern = ( S("\t ")^0 * ( (1-S("\n\r"))^1 / action_y + P(true) / action_n) * newline )^0
-
-local function fold_by_indentation(text,start_pos,start_line,start_level)
- -- initialize
- folds = { }
- current_line = start_line
- prev_level = start_level
- -- define
- -- -- not here .. pattern binds and local functions are not frozen
- -- analyze
- lpegmatch(pattern,text)
- -- flatten
- for line, level in next, folds do
- folds[line] = level[1] + (level[2] or 0)
- end
- -- done
--- return folds
-local t = folds
-folds = nil
-return t -- so folds can be collected
-end
-
-local function fold_by_line(text,start_pos,start_line,start_level)
- local folds = { }
- -- can also be lpeg'd
- for _ in gmatch(text,".-\r?\n") do
- folds[start_line] = n_table[start_level] -- { start_level }
- start_line = start_line + 1
- end
- return folds
-end
-
-local threshold_by_lexer = 512 * 1024 -- we don't know the filesize yet
-local threshold_by_parsing = 512 * 1024 -- we don't know the filesize yet
-local threshold_by_indentation = 512 * 1024 -- we don't know the filesize yet
-local threshold_by_line = 512 * 1024 -- we don't know the filesize yet
-
-function context.fold(text,start_pos,start_line,start_level) -- hm, we had size thresholds .. where did they go
- if text == '' then
- return { }
- end
- local lexer = global._LEXER
- local fold_by_lexer = lexer._fold
- local fold_by_symbols = lexer._foldsymbols
- local filesize = 0 -- we don't know that
- if fold_by_lexer then
- if filesize <= threshold_by_lexer then
- return fold_by_lexer(text,start_pos,start_line,start_level,lexer)
- end
- elseif fold_by_symbols then -- and get_property('fold.by.parsing',1) > 0 then
- if filesize <= threshold_by_parsing then
- return fold_by_parsing(text,start_pos,start_line,start_level,lexer)
- end
- elseif get_property('fold.by.indentation',1) > 0 then
- if filesize <= threshold_by_indentation then
- return fold_by_indentation(text,start_pos,start_line,start_level,lexer)
- end
- elseif get_property('fold.by.line',1) > 0 then
- if filesize <= threshold_by_line then
- return fold_by_line(text,start_pos,start_line,start_level,lexer)
- end
- end
- return { }
-end
-
--- The following code is mostly unchanged:
-
-local function add_rule(lexer, id, rule)
- if not lexer._RULES then
- lexer._RULES = {}
- lexer._RULEORDER = {}
- end
- lexer._RULES[id] = rule
- lexer._RULEORDER[#lexer._RULEORDER + 1] = id
-end
-
-local function add_style(lexer, token_name, style)
- local len = lexer._STYLES.len
- if len == 32 then
- len = len + 8
- end
- if len >= 128 then
- print('Too many styles defined (128 MAX)')
- end
- lexer._TOKENS[token_name] = len
- lexer._STYLES[len] = style
- lexer._STYLES.len = len + 1
-end
-
-local function join_tokens(lexer)
- local patterns, order = lexer._RULES, lexer._RULEORDER
- local token_rule = patterns[order[1]]
- for i=2,#order do
- token_rule = token_rule + patterns[order[i]]
- end
- lexer._TOKENRULE = token_rule
- return lexer._TOKENRULE
-end
-
-local function add_lexer(grammar, lexer, token_rule)
- local token_rule = join_tokens(lexer)
- local lexer_name = lexer._NAME
- local children = lexer._CHILDREN
- for i=1,#children do
- local child = children[i]
- if child._CHILDREN then
- add_lexer(grammar, child)
- end
- local child_name = child._NAME
- local rules = child._EMBEDDEDRULES[lexer_name]
- local rules_token_rule = grammar['__'..child_name] or rules.token_rule
- grammar[child_name] = (-rules.end_rule * rules_token_rule)^0 * rules.end_rule^-1 * V(lexer_name)
- local embedded_child = '_' .. child_name
- grammar[embedded_child] = rules.start_rule * (-rules.end_rule * rules_token_rule)^0 * rules.end_rule^-1
- token_rule = V(embedded_child) + token_rule
- end
- grammar['__' .. lexer_name] = token_rule
- grammar[lexer_name] = token_rule^0
-end
-
-local function build_grammar(lexer, initial_rule)
- local children = lexer._CHILDREN
- if children then
- local lexer_name = lexer._NAME
- if not initial_rule then
- initial_rule = lexer_name
- end
- local grammar = { initial_rule }
- add_lexer(grammar, lexer)
- lexer._INITIALRULE = initial_rule
- lexer._GRAMMAR = Ct(P(grammar))
- else
- lexer._GRAMMAR = Ct(join_tokens(lexer)^0)
- end
-end
-
--- so far. We need these local functions in the next one.
---
--- Before 3.24 we had tokens[..] = { category, position }, now it's a two values.
-
-local lineparsers = { }
-
-function context.lex(text,init_style)
- local lexer = global._LEXER
- local grammar = lexer._GRAMMAR
- if not grammar then
- return { }
- elseif lexer._LEXBYLINE then -- we could keep token
- local tokens = { }
- local offset = 0
- local noftokens = 0
- -- -- pre 3.24
- --
- -- for line in gmatch(text,'[^\r\n]*\r?\n?') do -- could be an lpeg
- -- local line_tokens = lpegmatch(grammar,line)
- -- if line_tokens then
- -- for i=1,#line_tokens do
- -- local token = line_tokens[i]
- -- token[2] = token[2] + offset
- -- noftokens = noftokens + 1
- -- tokens[noftokens] = token
- -- end
- -- end
- -- offset = offset + #line
- -- if noftokens > 0 and tokens[noftokens][2] ~= offset then
- -- noftokens = noftokens + 1
- -- tokens[noftokens] = { 'default', offset + 1 }
- -- end
- -- end
-
- -- for line in gmatch(text,'[^\r\n]*\r?\n?') do
- -- local line_tokens = lpegmatch(grammar,line)
- -- if line_tokens then
- -- for i=1,#line_tokens,2 do
- -- noftokens = noftokens + 1
- -- tokens[noftokens] = line_tokens[i]
- -- noftokens = noftokens + 1
- -- tokens[noftokens] = line_tokens[i + 1] + offset
- -- end
- -- end
- -- offset = offset + #line
- -- if noftokens > 0 and tokens[noftokens] ~= offset then
- -- noftokens = noftokens + 1
- -- tokens[noftokens] = 'default'
- -- noftokens = noftokens + 1
- -- tokens[noftokens] = offset + 1
- -- end
- -- end
-
- local lineparser = lineparsers[lexer]
- if not lineparser then -- probably a cmt is more efficient
- lineparser = C((1-newline)^0 * newline) / function(line)
- local length = #line
- local line_tokens = length > 0 and lpegmatch(grammar,line)
- if line_tokens then
- for i=1,#line_tokens,2 do
- noftokens = noftokens + 1
- tokens[noftokens] = line_tokens[i]
- noftokens = noftokens + 1
- tokens[noftokens] = line_tokens[i + 1] + offset
- end
- end
- offset = offset + length
- if noftokens > 0 and tokens[noftokens] ~= offset then
- noftokens = noftokens + 1
- tokens[noftokens] = 'default'
- noftokens = noftokens + 1
- tokens[noftokens] = offset + 1
- end
- end
- lineparser = lineparser^0
- lineparsers[lexer] = lineparser
- end
- lpegmatch(lineparser,text)
- return tokens
-
- elseif lexer._CHILDREN then
- -- as we cannot print, tracing is not possible ... this might change as we can as well
- -- generate them all in one go (sharing as much as possible)
- local hash = lexer._HASH -- hm, was _hash
- if not hash then
- hash = { }
- lexer._HASH = hash
- end
- grammar = hash[init_style]
- if grammar then
- lexer._GRAMMAR = grammar
- else
- for style, style_num in next, lexer._TOKENS do
- if style_num == init_style then
- -- the name of the lexers is filtered from the whitespace
- -- specification
- local lexer_name = match(style,'^(.+)_whitespace') or lexer._NAME
- if lexer._INITIALRULE ~= lexer_name then
- grammar = hash[lexer_name]
- if not grammar then
- build_grammar(lexer,lexer_name)
- grammar = lexer._GRAMMAR
- hash[lexer_name] = grammar
- end
- end
- break
- end
- end
- grammar = grammar or lexer._GRAMMAR
- hash[init_style] = grammar
- end
- return lpegmatch(grammar,text)
- else
- return lpegmatch(grammar,text)
- end
-end
-
--- todo: keywords: one lookup and multiple matches
-
--- function context.token(name, patt)
--- return Ct(patt * Cc(name) * Cp())
--- end
---
--- -- hm, changed in 3.24 .. no longer a table
-
-function context.token(name, patt)
- return patt * Cc(name) * Cp()
-end
-
-lexer.fold = context.fold
-lexer.lex = context.lex
-lexer.token = context.token
-lexer.exact_match = context.exact_match
-
--- helper .. alas ... the lexer's lua instance is rather crippled .. not even
--- math is part of it
-
-local floor = math and math.floor
-local char = string.char
-
-if not floor then
-
- floor = function(n)
- return tonumber(string.format("%d",n))
- end
-
- math = math or { }
-
- math.floor = floor
-
-end
-
-local function utfchar(n)
- if n < 0x80 then
- return char(n)
- elseif n < 0x800 then
- return char(
- 0xC0 + floor(n/0x40),
- 0x80 + (n % 0x40)
- )
- elseif n < 0x10000 then
- return char(
- 0xE0 + floor(n/0x1000),
- 0x80 + (floor(n/0x40) % 0x40),
- 0x80 + (n % 0x40)
- )
- elseif n < 0x40000 then
- return char(
- 0xF0 + floor(n/0x40000),
- 0x80 + floor(n/0x1000),
- 0x80 + (floor(n/0x40) % 0x40),
- 0x80 + (n % 0x40)
- )
- else
- -- return char(
- -- 0xF1 + floor(n/0x1000000),
- -- 0x80 + floor(n/0x40000),
- -- 0x80 + floor(n/0x1000),
- -- 0x80 + (floor(n/0x40) % 0x40),
- -- 0x80 + (n % 0x40)
- -- )
- return "?"
- end
-end
-
-context.utfchar = utfchar
-
--- a helper from l-lpeg:
-
-local gmatch = string.gmatch
-
-local function make(t)
- local p
- for k, v in next, t do
- if not p then
- if next(v) then
- p = P(k) * make(v)
- else
- p = P(k)
- end
- else
- if next(v) then
- p = p + P(k) * make(v)
- else
- p = p + P(k)
- end
- end
- end
- return p
-end
-
-function lpeg.utfchartabletopattern(list)
- local tree = { }
- for i=1,#list do
- local t = tree
- for c in gmatch(list[i],".") do
- if not t[c] then
- t[c] = { }
- end
- t = t[c]
- end
- end
- return make(tree)
-end
-
--- patterns.invisibles =
--- P(utfchar(0x00A0)) -- nbsp
--- + P(utfchar(0x2000)) -- enquad
--- + P(utfchar(0x2001)) -- emquad
--- + P(utfchar(0x2002)) -- enspace
--- + P(utfchar(0x2003)) -- emspace
--- + P(utfchar(0x2004)) -- threeperemspace
--- + P(utfchar(0x2005)) -- fourperemspace
--- + P(utfchar(0x2006)) -- sixperemspace
--- + P(utfchar(0x2007)) -- figurespace
--- + P(utfchar(0x2008)) -- punctuationspace
--- + P(utfchar(0x2009)) -- breakablethinspace
--- + P(utfchar(0x200A)) -- hairspace
--- + P(utfchar(0x200B)) -- zerowidthspace
--- + P(utfchar(0x202F)) -- narrownobreakspace
--- + P(utfchar(0x205F)) -- math thinspace
-
-patterns.invisibles = lpeg.utfchartabletopattern {
- utfchar(0x00A0), -- nbsp
- utfchar(0x2000), -- enquad
- utfchar(0x2001), -- emquad
- utfchar(0x2002), -- enspace
- utfchar(0x2003), -- emspace
- utfchar(0x2004), -- threeperemspace
- utfchar(0x2005), -- fourperemspace
- utfchar(0x2006), -- sixperemspace
- utfchar(0x2007), -- figurespace
- utfchar(0x2008), -- punctuationspace
- utfchar(0x2009), -- breakablethinspace
- utfchar(0x200A), -- hairspace
- utfchar(0x200B), -- zerowidthspace
- utfchar(0x202F), -- narrownobreakspace
- utfchar(0x205F), -- math thinspace
-}
-
--- now we can make:
-
-patterns.iwordtoken = patterns.wordtoken - patterns.invisibles
-patterns.iwordpattern = patterns.iwordtoken^3
-
--- require("themes/scite-context-theme")
-
--- In order to deal with some bug in additional styles (I have no cue what is
--- wrong, but additional styles get ignored and clash somehow) I just copy the
--- original lexer code ... see original for comments.
diff --git a/context/data/scite/lexers/data/scite-context-data-context.lua b/context/data/scite/lexers/data/scite-context-data-context.lua
deleted file mode 100644
index f167c82c1..000000000
--- a/context/data/scite/lexers/data/scite-context-data-context.lua
+++ /dev/null
@@ -1,4 +0,0 @@
-return {
- ["constants"]={ "zerocount", "minusone", "minustwo", "plusone", "plustwo", "plusthree", "plusfour", "plusfive", "plussix", "plusseven", "pluseight", "plusnine", "plusten", "plussixteen", "plushundred", "plusthousand", "plustenthousand", "plustwentythousand", "medcard", "maxcard", "zeropoint", "onepoint", "halfapoint", "onebasepoint", "maxdimen", "scaledpoint", "thousandpoint", "points", "halfpoint", "zeroskip", "zeromuskip", "onemuskip", "pluscxxvii", "pluscxxviii", "pluscclv", "pluscclvi", "normalpagebox", "endoflinetoken", "outputnewlinechar", "emptytoks", "empty", "undefined", "voidbox", "emptybox", "emptyvbox", "emptyhbox", "bigskipamount", "medskipamount", "smallskipamount", "fmtname", "fmtversion", "texengine", "texenginename", "texengineversion", "luatexengine", "pdftexengine", "xetexengine", "unknownengine", "etexversion", "pdftexversion", "xetexversion", "xetexrevision", "activecatcode", "bgroup", "egroup", "endline", "conditionaltrue", "conditionalfalse", "attributeunsetvalue", "uprotationangle", "rightrotationangle", "downrotationangle", "leftrotationangle", "inicatcodes", "ctxcatcodes", "texcatcodes", "notcatcodes", "txtcatcodes", "vrbcatcodes", "prtcatcodes", "nilcatcodes", "luacatcodes", "tpacatcodes", "tpbcatcodes", "xmlcatcodes", "escapecatcode", "begingroupcatcode", "endgroupcatcode", "mathshiftcatcode", "alignmentcatcode", "endoflinecatcode", "parametercatcode", "superscriptcatcode", "subscriptcatcode", "ignorecatcode", "spacecatcode", "lettercatcode", "othercatcode", "activecatcode", "commentcatcode", "invalidcatcode", "tabasciicode", "newlineasciicode", "formfeedasciicode", "endoflineasciicode", "endoffileasciicode", "spaceasciicode", "hashasciicode", "dollarasciicode", "commentasciicode", "ampersandasciicode", "colonasciicode", "backslashasciicode", "circumflexasciicode", "underscoreasciicode", "leftbraceasciicode", "barasciicode", "rightbraceasciicode", "tildeasciicode", "delasciicode", "lessthanasciicode", "morethanasciicode", "doublecommentsignal", "atsignasciicode", "exclamationmarkasciicode", "questionmarkasciicode", "doublequoteasciicode", "singlequoteasciicode", "forwardslashasciicode", "primeasciicode", "activemathcharcode", "activetabtoken", "activeformfeedtoken", "activeendoflinetoken", "batchmodecode", "nonstopmodecode", "scrollmodecode", "errorstopmodecode", "bottomlevelgroupcode", "simplegroupcode", "hboxgroupcode", "adjustedhboxgroupcode", "vboxgroupcode", "vtopgroupcode", "aligngroupcode", "noaligngroupcode", "outputgroupcode", "mathgroupcode", "discretionarygroupcode", "insertgroupcode", "vcentergroupcode", "mathchoicegroupcode", "semisimplegroupcode", "mathshiftgroupcode", "mathleftgroupcode", "vadjustgroupcode", "charnodecode", "hlistnodecode", "vlistnodecode", "rulenodecode", "insertnodecode", "marknodecode", "adjustnodecode", "ligaturenodecode", "discretionarynodecode", "whatsitnodecode", "mathnodecode", "gluenodecode", "kernnodecode", "penaltynodecode", "unsetnodecode", "mathsnodecode", "charifcode", "catifcode", "numifcode", "dimifcode", "oddifcode", "vmodeifcode", "hmodeifcode", "mmodeifcode", "innerifcode", "voidifcode", "hboxifcode", "vboxifcode", "xifcode", "eofifcode", "trueifcode", "falseifcode", "caseifcode", "definedifcode", "csnameifcode", "fontcharifcode", "fontslantperpoint", "fontinterwordspace", "fontinterwordstretch", "fontinterwordshrink", "fontexheight", "fontemwidth", "fontextraspace", "slantperpoint", "interwordspace", "interwordstretch", "interwordshrink", "exheight", "emwidth", "extraspace", "mathsupdisplay", "mathsupnormal", "mathsupcramped", "mathsubnormal", "mathsubcombined", "mathaxisheight", "startmode", "stopmode", "startnotmode", "stopnotmode", "startmodeset", "stopmodeset", "doifmode", "doifmodeelse", "doifnotmode", "startallmodes", "stopallmodes", "startnotallmodes", "stopnotallmodes", "doifallmodes", "doifallmodeselse", "doifnotallmodes", "startenvironment", "stopenvironment", "environment", "startcomponent", "stopcomponent", "component", "startproduct", "stopproduct", "product", "startproject", "stopproject", "project", "starttext", "stoptext", "startnotext", "stopnotext", "startdocument", "stopdocument", "documentvariable", "setupdocument", "startmodule", "stopmodule", "usemodule", "usetexmodule", "useluamodule", "setupmodule", "currentmoduleparameter", "moduleparameter", "startTEXpage", "stopTEXpage", "enablemode", "disablemode", "preventmode", "globalenablemode", "globaldisablemode", "globalpreventmode", "pushmode", "popmode", "typescriptone", "typescripttwo", "typescriptthree", "mathsizesuffix", "mathordcode", "mathopcode", "mathbincode", "mathrelcode", "mathopencode", "mathclosecode", "mathpunctcode", "mathalphacode", "mathinnercode", "mathnothingcode", "mathlimopcode", "mathnolopcode", "mathboxcode", "mathchoicecode", "mathaccentcode", "mathradicalcode", "constantnumber", "constantnumberargument", "constantdimen", "constantdimenargument", "constantemptyargument", "continueifinputfile", "luastringsep", "!!bs", "!!es", "lefttorightmark", "righttoleftmark", "breakablethinspace", "nobreakspace", "narrownobreakspace", "zerowidthnobreakspace", "ideographicspace", "ideographichalffillspace", "twoperemspace", "threeperemspace", "fourperemspace", "fiveperemspace", "sixperemspace", "figurespace", "punctuationspace", "hairspace", "zerowidthspace", "zerowidthnonjoiner", "zerowidthjoiner", "zwnj", "zwj" },
- ["helpers"]={ "startsetups", "stopsetups", "startxmlsetups", "stopxmlsetups", "startluasetups", "stopluasetups", "starttexsetups", "stoptexsetups", "startrawsetups", "stoprawsetups", "startlocalsetups", "stoplocalsetups", "starttexdefinition", "stoptexdefinition", "starttexcode", "stoptexcode", "startcontextcode", "stopcontextcode", "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup", "doifelsecommandhandler", "doifnotcommandhandler", "doifcommandhandler", "newmode", "setmode", "resetmode", "newsystemmode", "setsystemmode", "resetsystemmode", "pushsystemmode", "popsystemmode", "booleanmodevalue", "newcount", "newdimen", "newskip", "newmuskip", "newbox", "newtoks", "newread", "newwrite", "newmarks", "newinsert", "newattribute", "newif", "newlanguage", "newfamily", "newfam", "newhelp", "then", "begcsname", "strippedcsname", "firstargumentfalse", "firstargumenttrue", "secondargumentfalse", "secondargumenttrue", "thirdargumentfalse", "thirdargumenttrue", "fourthargumentfalse", "fourthargumenttrue", "fifthargumentfalse", "fifthsargumenttrue", "sixthargumentfalse", "sixtsargumenttrue", "doglobal", "dodoglobal", "redoglobal", "resetglobal", "donothing", "dontcomplain", "forgetall", "donetrue", "donefalse", "htdp", "unvoidbox", "hfilll", "vfilll", "mathbox", "mathlimop", "mathnolop", "mathnothing", "mathalpha", "currentcatcodetable", "defaultcatcodetable", "catcodetablename", "newcatcodetable", "startcatcodetable", "stopcatcodetable", "startextendcatcodetable", "stopextendcatcodetable", "pushcatcodetable", "popcatcodetable", "restorecatcodes", "setcatcodetable", "letcatcodecommand", "defcatcodecommand", "uedcatcodecommand", "hglue", "vglue", "hfillneg", "vfillneg", "hfilllneg", "vfilllneg", "ruledhss", "ruledhfil", "ruledhfill", "ruledhfilneg", "ruledhfillneg", "normalhfillneg", "ruledvss", "ruledvfil", "ruledvfill", "ruledvfilneg", "ruledvfillneg", "normalvfillneg", "ruledhbox", "ruledvbox", "ruledvtop", "ruledvcenter", "ruledmbox", "ruledhskip", "ruledvskip", "ruledkern", "ruledmskip", "ruledmkern", "ruledhglue", "ruledvglue", "normalhglue", "normalvglue", "ruledpenalty", "filledhboxb", "filledhboxr", "filledhboxg", "filledhboxc", "filledhboxm", "filledhboxy", "filledhboxk", "scratchcounter", "globalscratchcounter", "scratchdimen", "globalscratchdimen", "scratchskip", "globalscratchskip", "scratchmuskip", "globalscratchmuskip", "scratchtoks", "globalscratchtoks", "scratchbox", "globalscratchbox", "normalbaselineskip", "normallineskip", "normallineskiplimit", "availablehsize", "localhsize", "setlocalhsize", "nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs", "scratchwidth", "scratchheight", "scratchdepth", "scratchoffset", "scratchdistance", "scratchhsize", "scratchvsize", "scratchxoffset", "scratchyoffset", "scratchhoffset", "scratchvoffset", "scratchxposition", "scratchyposition", "scratchtopoffset", "scratchbottomoffset", "scratchleftoffset", "scratchrightoffset", "scratchcounterone", "scratchcountertwo", "scratchcounterthree", "scratchdimenone", "scratchdimentwo", "scratchdimenthree", "scratchskipone", "scratchskiptwo", "scratchskipthree", "scratchmuskipone", "scratchmuskiptwo", "scratchmuskipthree", "scratchtoksone", "scratchtokstwo", "scratchtoksthree", "scratchboxone", "scratchboxtwo", "scratchboxthree", "scratchnx", "scratchny", "scratchmx", "scratchmy", "scratchunicode", "scratchleftskip", "scratchrightskip", "scratchtopskip", "scratchbottomskip", "doif", "doifnot", "doifelse", "doifinset", "doifnotinset", "doifinsetelse", "doifnextcharelse", "doifnextoptionalelse", "doifnextbgroupelse", "doifnextparenthesiselse", "doiffastoptionalcheckelse", "doifundefinedelse", "doifdefinedelse", "doifundefined", "doifdefined", "doifelsevalue", "doifvalue", "doifnotvalue", "doifnothing", "doifsomething", "doifelsenothing", "doifsomethingelse", "doifvaluenothing", "doifvaluesomething", "doifelsevaluenothing", "doifdimensionelse", "doifnumberelse", "doifnumber", "doifnotnumber", "doifcommonelse", "doifcommon", "doifnotcommon", "doifinstring", "doifnotinstring", "doifinstringelse", "doifassignmentelse", "docheckassignment", "tracingall", "tracingnone", "loggingall", "removetoks", "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to", "endgraf", "endpar", "everyendpar", "reseteverypar", "finishpar", "empty", "null", "space", "quad", "enspace", "obeyspaces", "obeylines", "obeyedspace", "obeyedline", "normalspace", "executeifdefined", "singleexpandafter", "doubleexpandafter", "tripleexpandafter", "dontleavehmode", "removelastspace", "removeunwantedspaces", "keepunwantedspaces", "wait", "writestatus", "define", "defineexpandable", "redefine", "setmeasure", "setemeasure", "setgmeasure", "setxmeasure", "definemeasure", "freezemeasure", "measure", "measured", "installcorenamespace", "getvalue", "getuvalue", "setvalue", "setevalue", "setgvalue", "setxvalue", "letvalue", "letgvalue", "resetvalue", "undefinevalue", "ignorevalue", "setuvalue", "setuevalue", "setugvalue", "setuxvalue", "globallet", "glet", "udef", "ugdef", "uedef", "uxdef", "checked", "unique", "getparameters", "geteparameters", "getgparameters", "getxparameters", "forgetparameters", "copyparameters", "getdummyparameters", "dummyparameter", "directdummyparameter", "setdummyparameter", "letdummyparameter", "usedummystyleandcolor", "usedummystyleparameter", "usedummycolorparameter", "processcommalist", "processcommacommand", "quitcommalist", "quitprevcommalist", "processaction", "processallactions", "processfirstactioninset", "processallactionsinset", "unexpanded", "expanded", "startexpanded", "stopexpanded", "protected", "protect", "unprotect", "firstofoneargument", "firstoftwoarguments", "secondoftwoarguments", "firstofthreearguments", "secondofthreearguments", "thirdofthreearguments", "firstoffourarguments", "secondoffourarguments", "thirdoffourarguments", "fourthoffourarguments", "firstoffivearguments", "secondoffivearguments", "thirdoffivearguments", "fourthoffivearguments", "fifthoffivearguments", "firstofsixarguments", "secondofsixarguments", "thirdofsixarguments", "fourthofsixarguments", "fifthofsixarguments", "sixthofsixarguments", "firstofoneunexpanded", "gobbleoneargument", "gobbletwoarguments", "gobblethreearguments", "gobblefourarguments", "gobblefivearguments", "gobblesixarguments", "gobblesevenarguments", "gobbleeightarguments", "gobbleninearguments", "gobbletenarguments", "gobbleoneoptional", "gobbletwooptionals", "gobblethreeoptionals", "gobblefouroptionals", "gobblefiveoptionals", "dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "dowith", "newconstant", "setnewconstant", "newconditional", "settrue", "setfalse", "setconstant", "newmacro", "setnewmacro", "newfraction", "newsignal", "dosingleempty", "dodoubleempty", "dotripleempty", "doquadrupleempty", "doquintupleempty", "dosixtupleempty", "doseventupleempty", "dosingleargument", "dodoubleargument", "dotripleargument", "doquadrupleargument", "doquintupleargument", "dosixtupleargument", "doseventupleargument", "dosinglegroupempty", "dodoublegroupempty", "dotriplegroupempty", "doquadruplegroupempty", "doquintuplegroupempty", "permitspacesbetweengroups", "dontpermitspacesbetweengroups", "nopdfcompression", "maximumpdfcompression", "normalpdfcompression", "modulonumber", "dividenumber", "getfirstcharacter", "doiffirstcharelse", "startnointerference", "stopnointerference", "twodigits", "threedigits", "leftorright", "strut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "begstrut", "endstrut", "lineheight", "ordordspacing", "ordopspacing", "ordbinspacing", "ordrelspacing", "ordopenspacing", "ordclosespacing", "ordpunctspacing", "ordinnerspacing", "opordspacing", "opopspacing", "opbinspacing", "oprelspacing", "opopenspacing", "opclosespacing", "oppunctspacing", "opinnerspacing", "binordspacing", "binopspacing", "binbinspacing", "binrelspacing", "binopenspacing", "binclosespacing", "binpunctspacing", "bininnerspacing", "relordspacing", "relopspacing", "relbinspacing", "relrelspacing", "relopenspacing", "relclosespacing", "relpunctspacing", "relinnerspacing", "openordspacing", "openopspacing", "openbinspacing", "openrelspacing", "openopenspacing", "openclosespacing", "openpunctspacing", "openinnerspacing", "closeordspacing", "closeopspacing", "closebinspacing", "closerelspacing", "closeopenspacing", "closeclosespacing", "closepunctspacing", "closeinnerspacing", "punctordspacing", "punctopspacing", "punctbinspacing", "punctrelspacing", "punctopenspacing", "punctclosespacing", "punctpunctspacing", "punctinnerspacing", "innerordspacing", "inneropspacing", "innerbinspacing", "innerrelspacing", "inneropenspacing", "innerclosespacing", "innerpunctspacing", "innerinnerspacing", "normalreqno", "startimath", "stopimath", "normalstartimath", "normalstopimath", "startdmath", "stopdmath", "normalstartdmath", "normalstopdmath", "uncramped", "cramped", "triggermathstyle", "mathstylefont", "mathsmallstylefont", "mathstyleface", "mathsmallstyleface", "mathstylecommand", "mathpalette", "mathstylehbox", "mathstylevbox", "mathstylevcenter", "mathstylevcenteredhbox", "mathstylevcenteredvbox", "mathtext", "setmathsmalltextbox", "setmathtextbox", "triggerdisplaystyle", "triggertextstyle", "triggerscriptstyle", "triggerscriptscriptstyle", "triggeruncrampedstyle", "triggercrampedstyle", "triggersmallstyle", "triggeruncrampedsmallstyle", "triggercrampedsmallstyle", "triggerbigstyle", "triggeruncrampedbigstyle", "triggercrampedbigstyle", "luaexpr", "expdoifelse", "expdoif", "expdoifnot", "expdoifcommonelse", "expdoifinsetelse", "ctxdirectlua", "ctxlatelua", "ctxsprint", "ctxwrite", "ctxcommand", "ctxdirectcommand", "ctxlatecommand", "ctxreport", "ctxlua", "luacode", "lateluacode", "directluacode", "registerctxluafile", "ctxloadluafile", "luaversion", "luamajorversion", "luaminorversion", "ctxluacode", "luaconditional", "luaexpanded", "startluaparameterset", "stopluaparameterset", "luaparameterset", "definenamedlua", "obeylualines", "obeyluatokens", "startluacode", "stopluacode", "startlua", "stoplua", "carryoverpar", "assumelongusagecs", "Umathbotaccent", "righttolefthbox", "lefttorighthbox", "righttoleftvbox", "lefttorightvbox", "righttoleftvtop", "lefttorightvtop", "rtlhbox", "ltrhbox", "rtlvbox", "ltrvbox", "rtlvtop", "ltrvtop", "autodirhbox", "autodirvbox", "autodirvtop", "lefttoright", "righttoleft", "synchronizelayoutdirection", "synchronizedisplaydirection", "synchronizeinlinedirection", "lesshyphens", "morehyphens", "nohyphens", "dohyphens" },
-} \ No newline at end of file
diff --git a/context/data/scite/lexers/data/scite-context-data-interfaces.lua b/context/data/scite/lexers/data/scite-context-data-interfaces.lua
deleted file mode 100644
index b2c09b62a..000000000
--- a/context/data/scite/lexers/data/scite-context-data-interfaces.lua
+++ /dev/null
@@ -1,10 +0,0 @@
-return {
- ["cs"]={ "CAP", "Cap", "Caps", "Cisla", "KAP", "Kap", "Kaps", "MESIC", "Rimskecislice", "SLOVA", "SLOVO", "Slova", "Slovo", "VSEDNIDEN", "Znak", "Znaky", "aktualnicislonadpisu", "aktualnidatum", "appendix", "arg", "atleftmargin", "atrightmargin", "barevnalista", "barva", "bilemisto", "bottomspace", "bublinkovanapoveda", "bydliste", "bypassblocks", "cap", "celkovypocetstran", "cernalinka", "cernelinky", "chapter", "chem", "cisla", "cislonadpisu", "cislopodrovnice", "cislorovnice", "cislostrany", "citace", "citovat", "comment", "completecombinedlist", "completelistoffloats", "completelistofsorts", "completelistofsynonyms", "completepagenumber", "completeregister", "coupledregister", "crlf", "cutspace", "datum", "decrementnumber", "definebodyfontDEF", "definebodyfontREF", "definecolumnbreak", "definecolumnset", "definecombination", "definedfont", "definefontfeature", "definefonthandling", "defineindentedtext", "defineinmargin", "defineitemgroup", "definelayer", "definelayout", "definemathalignment", "definepagebreak", "defineplacement", "definerawfont", "definerule", "definetextposition", "definetextvariable", "definetype", "definetypeface", "definuj", "definujakcent", "definujbarvu", "definujblok", "definujbloksekce", "definujbuffer", "definujfont", "definujformatodkazu", "definujhbox", "definujhlavnipole", "definujinterakcnimenu", "definujkombinovanyseznam", "definujkonverzi", "definujlogo", "definujnadpis", "definujobrazeksymbol", "definujodkaz", "definujodstavce", "definujopis", "definujoramovani", "definujoramovanytext", "definujpaletu", "definujplvouciobjekt", "definujpodpole", "definujpole", "definujpopis", "definujpopisek", "definujprekryv", "definujpreskok", "definujprikaz", "definujprofil", "definujprogram", "definujprostredizakladnihofontu", "definujrejstrik", "definujsablonutabulky", "definujsekci", "definujseznam", "definujseznamodkazu", "definujskupinubarev", "definujstartstop", "definujstyl", "definujstylfontu", "definujsymbol", "definujsynonumumfontu", "definujsynonyma", "definujtabelaci", "definujtext", "definujtrideni", "definujupravu", "definujvelikostpapiru", "definujverzi", "definujvycet", "definujvystup", "definujzakladnifont", "definujzasobnikpoli", "definujznaceni", "definujznak", "delkaseznamu", "description", "dodrzujprofil", "dodrzujverzi", "dodrzujverziprofilu", "dvoustrannypapir", "emptylines", "enumeration", "externiobraz", "fakt", "footnotetext", "forceblocks", "framedtext", "getnumber", "headsym", "hl", "hlavnijazyk", "hlavniuroven", "hodnotabarvy", "hodnotasedi", "immediatebetweenlist", "immediatetolist", "indentation", "ininner", "inneredgedistance", "inneredgewidth", "innermargindistance", "innermarginwidth", "inouter", "instalacejazyka", "interakcnilista", "interakcnitlacitka", "interaktivnimenu", "jazyk", "jdidolu", "jdina", "jdinabox", "jdinastranu", "jmeno", "kap", "klonujpole", "komponenta", "konvertujcislo", "kopirujpole", "korekcebilehomista", "labeling", "leg", "listsymbol", "loadsorts", "loadsynonyms", "maoramovani", "mapfontsize", "marginalnilinka", "marginalninadpis", "marginalnislovo", "marginalnitext", "matematika", "mazaramovani", "mediaeval", "meritko", "mesic", "mezera", "moveformula", "movesidefloat", "mrizka", "nadpis", "nadruhyokraj", "nalevo", "nalevyokraj", "name", "naokraj", "napravo", "napravyokraj", "nastavbarvu", "nastavbarvy", "nastavbilamista", "nastavblok", "nastavbloksekce", "nastavbuffer", "nastavcernelinky", "nastavcislonadpisu", "nastavcislostrany", "nastavcislovani", "nastavcislovaniodstavcu", "nastavcislovaniradku", "nastavcislovanistran", "nastavcitaci", "nastavdefinicipoznamekpodcarou", "nastavdeleniplvoucichobjektu", "nastavdelitko", "nastavdolnitexty", "nastaveni", "nastavexterniobrazy", "nastavhorejsek", "nastavhornitexty", "nastavinterakci", "nastavinterakcnilistu", "nastavinterakcnimenu", "nastavinterakcniobrazovku", "nastavjazyk", "nastavkapitalky", "nastavkombinovanyseznam", "nastavkomentar", "nastavkomentarstrany", "nastavlegendu", "nastavmarginalie", "nastavmarginalniblok", "nastavmarginalnilinky", "nastavmeziradkovoumezeru", "nastavnadpis", "nastavnadpisy", "nastavodkazovani", "nastavodsazovani", "nastavodstavce", "nastavopis", "nastavoramovanetexty", "nastavoramovani", "nastavorez", "nastavotoceni", "nastavpaletu", "nastavplvouciobjekt", "nastavplvouciobjekty", "nastavpodcislostrany", "nastavpodtrzeni", "nastavpole", "nastavpolozky", "nastavpopisek", "nastavpopisky", "nastavpopisy", "nastavpozadi", "nastavpoznamkypodcarou", "nastavprechodstrany", "nastavpreskok", "nastavprofily", "nastavprogramy", "nastavprostredizakladnihofontu", "nastavpublikace", "nastavradkovani", "nastavradky", "nastavrastr", "nastavrejstrik", "nastavrovnice", "nastavsadusymbolu", "nastavsekci", "nastavseznam", "nastavseznamodkazu", "nastavsirkucary", "nastavsloupce", "nastavspodek", "nastavspojeni", "nastavsynchronizaci", "nastavsynchronizacnilistu", "nastavsynonyma", "nastavsystem", "nastavtab", "nastavtabelaci", "nastavtabulky", "nastavtenkelinky", "nastavtext", "nastavtexthlavicky", "nastavtextovelinky", "nastavtextpopisku", "nastavtexttexty", "nastavtextyupati", "nastavtextyzahlavi", "nastavtlacitka", "nastavtoleranci", "nastavtrideni", "nastavtype", "nastavumisteniprotejsku", "nastavumistovani", "nastavupati", "nastavupravu", "nastavurl", "nastavusporadani", "nastavvelikostpapiru", "nastavverze", "nastavvsechnapole", "nastavvycty", "nastavvyplnovelinky", "nastavvyplnoveradky", "nastavvystup", "nastavvzhled", "nastavzahlavi", "nastavzakladnifont", "nastavzarovnani", "nastavznaceni", "nastavzuzeni", "nastrane", "navigating", "nejakyradek", "nekde", "nextsection", "neznamo", "nivy", "nizky", "nocap", "nokap", "nop", "numberofsubpages", "obrazovka", "odkaz", "odkaznadatum", "odkaznastranu", "odkaznatext", "odkazujici", "odsazenishora", "odsazenizleva", "odsazovani", "okr", "opakovat", "opis", "opissoubor", "oramovani", "oref", "orez", "otocit", "outeredgedistance", "outeredgewidth", "outermargindistance", "outermarginwidth", "overbar", "overbars", "overstrike", "overstrikes", "oznaceni", "oznacverzi", "pagedepth", "pageoffset", "paragraph", "parovastrana", "part", "pis", "placefloat", "placeheadnumber", "placeheadtext", "placelistoffloats", "placelistofsorts", "placelistofsynonyms", "placepagenumber", "placerawlist", "placereferencelist", "placerule", "placetextvariable", "plnezneni", "pol", "pole", "polozka", "polozky", "popisky", "poppisek", "porovnejpaletu", "porovnejskupinubarev", "positiontext", "pozadi", "pozice", "poznamka", "poznamkapodcarou", "pref", "prelozit", "premistinamrizku", "prepninazakladnifont", "preskoc", "prizpusobivepole", "prizpusobvzhled", "produkt", "program", "projekt", "propojeneznaceni", "propojenydokument", "propojenyrejstrik", "prostredi", "publikace", "ran", "ref", "register", "reservefloat", "reset", "resetnumber", "resettextcontent", "resetznaceni", "rimskecislice", "rozdelplvouciobjekt", "rozmer", "rozpojeneznaceni", "roztazene", "schovejbloky", "section", "sedabarva", "seeregister", "setnumber", "settextcontent", "settextvariable", "setupanswerarea", "setupcolumnset", "setupcolumnsetlines", "setupcolumnsetstart", "setupfonthandling", "setupfontsynonym", "setupforms", "setupindentedtext", "setupinterlinespace2", "setupitemgroup", "setuplistalternative", "setupmathalignment", "setupnumber", "setuppaper", "setupplacement", "setuprule", "setupstartstop", "setupstrut", "setuptextposition", "setuptextvariable", "sirkalevehookraje", "sirkalevemarginalie", "sirkamarginalie", "sirkaokraje", "sirkapapiru", "sirkapravehookraje", "sirkapravemarginalie", "sirkasazby", "sirkaseznamu", "sirkatextu", "sirkatiskpapiru", "sloupec", "slovovpravo", "sort", "spodek", "stanovcharakteristickuseznamu", "stanovcislonadpisu", "startalignment", "startbarva", "startbuffer", "startcislovaniradku", "startcitace", "startcolumnmakeup", "startcolumns", "startcolumnset", "startcombination", "startcomment", "startdescription", "startdocument", "startdokument", "startenumeration", "startfakt", "startfigure", "startfloattext", "startformula", "startframedtext", "startglobalni", "starthiding", "startinteraktivnimenu", "startitemgroup", "startkodovani", "startkomponenta", "startkorekceradku", "startlegend", "startline", "startlinecorrection", "startlinenumbering", "startlines", "startlocal", "startlocalenvironment", "startlocalfootnotes", "startlokalni", "startlokalnipoznamkypodcarou", "startmakeup", "startmarginalniblok", "startmarginalnilinka", "startmarginblock", "startnamemakeup", "startnarrower", "startnezhustene", "startobraz", "startopposite", "startoverlay", "startoverview", "startparagraph", "startpositioning", "startpostponing", "startpozadi", "startprehled", "startprekryv", "startprodukt", "startprofil", "startprofile", "startprojekt", "startprostredi", "startprotejsek", "startradek", "startradky", "startrastr", "startregister", "startsadasymbolu", "startsloupce", "startspojeni", "startsymbolset", "startsynchronizace", "startsynchronization", "starttable", "starttables", "starttabulate", "starttabulka", "starttabulky", "starttext", "starttextovalinka", "starttyping", "startumistovani", "startunpacked", "startuprava", "startverze", "startzarovnavani", "startzhustene", "startzuzeni", "stopalignment", "stopbarva", "stopbuffer", "stopcislovaniradku", "stopcitace", "stopcolumnmakeup", "stopcolumns", "stopcolumnset", "stopcombination", "stopcomment", "stopdescription", "stopdocument", "stopdokument", "stopenumeration", "stopfakt", "stopfigure", "stopfloattext", "stopformula", "stopframedtext", "stopglobalni", "stophiding", "stopinteraktivnimenu", "stopitemgroup", "stopkodovani", "stopkomponenta", "stopkorekceradku", "stoplegend", "stopline", "stoplinecorrection", "stoplinenumbering", "stoplines", "stoplocal", "stoplocalenvironment", "stoplocalfootnotes", "stoplokalni", "stoplokalnipoznamkypodcarou", "stopmakeup", "stopmarginalniblok", "stopmarginalnilinka", "stopmarginblock", "stopnamemakeup", "stopnarrower", "stopnezhustene", "stopopposite", "stopoverlay", "stopoverview", "stopparagraph", "stoppositioning", "stoppostponing", "stoppozadi", "stopprehled", "stopprekryv", "stopprodukt", "stopprofil", "stopprofile", "stopprojekt", "stopprostredi", "stopprotejsek", "stopradek", "stopradky", "stoprastr", "stopsloupce", "stopspojeni", "stopsymbolset", "stopsynchronizace", "stopsynchronization", "stoptable", "stoptables", "stoptabulate", "stoptabulka", "stoptabulky", "stoptext", "stoptextovalinka", "stoptyping", "stopumistovani", "stopunpacked", "stopuprava", "stopverze", "stopzarovnavani", "stopzhustene", "stopzuzeni", "strana", "sub", "subject", "subpagenumber", "subsection", "subsubject", "subsubsection", "subsubsubject", "switchtorawfont", "sym", "symbol", "synchronizacnilista", "synchronizovat", "synonym", "tab", "tecky", "tenkalinka", "tenkelinky", "testcolumn", "testpage", "tex", "texthlavicky", "textovalinka", "textpopisku", "textvariable", "title", "tlacitko", "tlacitkomenu", "tloustkacary", "tref", "tvrdamezera", "tvrdemezery", "txt", "typebuffer", "ukazbarvu", "ukazexterniobrazy", "ukazmrizku", "ukaznastaveni", "ukazpaletu", "ukazpodpery", "ukazpole", "ukazpostredizakladnihofontu", "ukazramecek", "ukazsadusymbolu", "ukazskupinubarev", "ukazupravu", "ukazvytisk", "ukazvzhled", "ukazzakladnifont", "umistikombinovanyseznam", "umistilegendu", "umistiloga", "umistilokalnipoznamkypodcarou", "umistinadsebe", "umistinamrizku", "umistipodrovnici", "umistipoznamkypodcarou", "umistirejstrik", "umistirovnici", "umistiseznam", "umistivedlesebe", "umistizalozky", "underbar", "underbars", "urcicharakteristikurejstriku", "useXMLfilter", "usedirectory", "usetypescript", "usetypescriptfile", "uzijJSscripts", "uzijURL", "uzijadresar", "uzijbloky", "uzijexternidokument", "uzijexterniobraz", "uzijexternisoubor", "uzijexternisoubory", "uzijexternizvuk", "uzijkodovani", "uzijmodul", "uzijmoduly", "uzijodkazy", "uzijprikazy", "uzijspeciality", "uzijsymbol", "uzijurl", "verze", "vl", "vlasovalinka", "vlevo", "vpravo", "vradku", "vsedniden", "vyberbloky", "vyberpapir", "vyberverzi", "vyplnenytext", "vyplnovelinky", "vyplnovepole", "vyplnovyradek", "vyskahorejsku", "vyskapapiru", "vyskasazby", "vyskaseznamu", "vyskaspodku", "vyskatextu", "vyskatiskpapiru", "vyskaupati", "vyskazahlavi", "vysoky", "vyznam", "vzdalenosthorejsku", "vzdalenostlevehookraje", "vzdalenostlevemarginalie", "vzdalenostmarginalie", "vzdalenostokraje", "vzdalenostpravehookraje", "vzdalenostpravemarginalie", "vzdalenostspodku", "vzdalenostupati", "vzdalenostzahlavi", "zablokujinterakcnimenu", "zachovejbloky", "zadnamezera", "zadnebilemisto", "zadnedalsibloky", "zadnedalsisoubory", "zadnehorniadolniradky", "zadneodsazovani", "zadnezahlaviaupati", "zadneznaceni", "zadnyrozmer", "zadnyseznam", "zadnytest", "zalozka", "zapisdorejstriku", "zapisdoseznamu", "zapisdoseznamuodkazu", "zapismeziseznam", "zaramovani", "zarovnanonastred", "zarovnanovlevo", "zarovnanovpravo", "zasobnikpoli", "zaznamovepole", "zhustene", "ziskejbuffer", "ziskejznaceni", "zlomek", "znaceni", "znak", "znaky", "zpracujbloky", "zpracujstranu", "zrcadlit", "zref", "zvysujicicislo" },
- ["de"]={ "Buchstabe", "Buchstaben", "CAP", "Cap", "Caps", "KAP", "Kap", "Kaps", "MONAT", "Roemischezahlen", "WOCHENTAG", "WOERTER", "WORT", "Woerter", "Wort", "Ziffern", "abstandlinkerrand", "abstandoben", "abstandrechterrand", "abstandunten", "amgitterausrichten", "amgitterneuausrichten", "appendix", "arg", "atleftmargin", "atrightmargin", "aufseite", "ausfuellfeld", "ausfuelltext", "ausschnitt", "bearbeitebloecke", "bearbeiteseite", "bedeutung", "behaltebloecke", "bei", "bemerkung", "benutzekodierung", "benutzespezielles", "benutzeverzeichnis", "beschrifteversion", "beschriftung", "bestimmekopfnummer", "bestimmelistencharakeristika", "bestimmeregistercharakteristika", "bildschirm", "blanko", "bookmark", "bottomspace", "breitelinkerrand", "breiterechterrand", "bruch", "buchstabe", "buchstaben", "but", "bypassblocks", "cap", "chapter", "chem", "comment", "completecombinedlist", "completelistoffloats", "completelistofsorts", "completelistofsynonyms", "completepagenumber", "completeregister", "coupledregister", "crlf", "cutspace", "datum", "decrementnumber", "definebodyfontDEF", "definebodyfontREF", "definecolumnbreak", "definecolumnset", "definecombination", "definedfont", "definefontfeature", "definefonthandling", "defineindentedtext", "defineinmargin", "defineitemgroup", "definelayer", "definelayout", "definemathalignment", "defineoutput", "definepagebreak", "defineplacement", "definerawfont", "definerule", "defineschriftsynonym", "definetextposition", "definetextvariable", "definetype", "definetypeface", "definiereabbsymbol", "definiereabsaetze", "definiereabschnitt", "definiereabschnittsblock", "definiereakzent", "definierebefehl", "definierebeschreibung", "definierebeschreibungen", "definierebeschriftung", "definiereblanko", "definiereblock", "definierefarbe", "definierefarbengruppe", "definierefeld", "definierefeldstapel", "definierefliesstext", "definierefliesstextumgebung", "definieregleitobjekt", "definierehauptfeld", "definierehbox", "definiereinteraktionsmenue", "definierekonversion", "definierelabel", "definiereliste", "definierelogo", "definieren", "definierenummerierung", "definiereoverlay", "definierepalette", "definierepapierformat", "definiereprofil", "definiereprogramme", "definierepuffer", "definierereferenz", "definierereferenzformat", "definierereferenzliste", "definiereregister", "definiereschrift", "definiereschriftstil", "definieresortieren", "definierestartstop", "definierestil", "definieresubfeld", "definieresymbol", "definieresynonyme", "definieretabellenvorlage", "definieretabulator", "definieretext", "definieretippen", "definiereueberschrift", "definiereumbruch", "definiereumrahmt", "definiereumrahmtertext", "definiereversion", "definierezeichen", "definierezusammengestellteliste", "description", "dimension", "doppelseite", "doppelseitigespapier", "drehen", "duennelinie", "duennerumriss", "einezeile", "einstellungen", "einziehen", "emptylines", "entknuepfebeschriftung", "enumeration", "externeabbildung", "farbbalken", "farbe", "farbewert", "feld", "feldstapel", "festesspatium", "folgeprofil", "folgeprofilversion", "folgeversion", "footnotetext", "forceblocks", "format", "formelnummer", "framedtext", "fussnote", "fusszeileabstand", "fusszeilenhoehe", "gefuelltesrechteck", "gefuelltezeile", "geg", "gesamtseitenanzahl", "gestreckt", "getnumber", "gitter", "graufarbe", "grauwert", "haarlinie", "hauptsprache", "headsym", "heutigesdatum", "heutigeskopfnummer", "hintergrund", "hl", "hoch", "hoeheoben", "hoeheunten", "holebeschriftung", "holepuffer", "imlinken", "imlinkenrand", "immaumrise", "immediatebetweenlist", "immediatetolist", "imrechten", "imrechtenrand", "imumriss", "in", "inaktiviereinteraktionsmenue", "inanderermarginale", "indentation", "ininner", "inlinkermarginale", "inmarginalie", "inneredgedistance", "inneredgewidth", "innermargindistance", "innermarginwidth", "inouter", "inrechtermarginale", "installieresprache", "interaktionsbalken", "interaktionsknopfe", "interaktionsmenue", "inzeile", "irgendwo", "its", "kap", "keindimension", "keinebeschriftung", "keinebloeckemehr", "keinedateienmehr", "keinekopfundfusszeilen", "keineliste", "keinspatium", "keintest", "keinzeilenobenundunten", "keinzwischenraum", "kleinerdurchschuss", "klonierefeld", "knopf", "komponente", "konvertierezahl", "kopf", "kopfniveau", "kopfnummer", "kopfweite", "kopfzeilenabstand", "kopfzeilenhoehe", "kopierefeld", "korrigierezwischenraum", "label", "labeling", "labels", "labeltext", "leg", "liniendicke", "linkemarginalafstand", "linkemarginalbreite", "linksbuendig", "listenbreite", "listenhoehe", "listenlaenge", "listsymbol", "loadsorts", "loadsynonyms", "mapfontsize", "mar", "marginalafstand", "marginalbreite", "marginallinie", "marginaltext", "marginaltitel", "marginalwort", "mathematik", "maumrise", "mediaeval", "menueknopf", "monat", "moveformula", "movesidefloat", "nachunten", "name", "navigating", "nextsection", "nichteinziehen", "nocap", "nokap", "nop", "notiz", "numberofsubpages", "nummererhoehen", "outeredgedistance", "outeredgewidth", "outermargindistance", "outermarginwidth", "overbar", "overbars", "overstrike", "overstrikes", "pagedepth", "pageoffset", "papierbreite", "papierhoehe", "paragraph", "part", "passelayoutan", "passendfeld", "placefloat", "placeheadnumber", "placeheadtext", "placelistoffloats", "placelistofsorts", "placelistofsynonyms", "placepagenumber", "placerawlist", "placereferencelist", "placerule", "placetextvariable", "platzierebookmarks", "platziereformel", "platzierefussnoten", "platzierelegende", "platziereliste", "platzierelogo", "platzierelokalefussnoten", "platzierenebeneinander", "platziereregister", "platziereuntereinander", "platziereunterformel", "platzierezusammengestellteliste", "pos", "position", "positiontext", "posten", "printpapierbreite", "printpapierhoehe", "produkt", "programm", "projekt", "publikation", "punkt", "ran", "randabstand", "randbreite", "rechteck", "rechtecke", "rechtemarginalafstand", "rechtemarginalbreite", "rechtsbuendig", "ref", "referenz", "referieren", "register", "registrierefelder", "reservefloat", "resetnumber", "resettextcontent", "roemischezahlen", "ruecksetzten", "ruecksetztenbeschriftung", "rumpfweite", "satzbreite", "satzhoehe", "schreibezumregister", "schreibezurliste", "schreibezurreferenzliste", "schreibezwischenliste", "section", "seeregister", "seite", "seitenreferenz", "seitenummer", "setnumber", "settext", "settextvariable", "setupanswerarea", "setupcolumnset", "setupcolumnsetlines", "setupcolumnsetstart", "setupfonthandling", "setupfontsynonym", "setupforms", "setupindentedtext", "setupinterlinespace2", "setupitemgroup", "setuplistalternative", "setupmathalignment", "setupnumber", "setuppaper", "setupplacement", "setuprule", "setupstartstop", "setupstrut", "setuptextposition", "setuptextvariable", "showsymbolset", "sort", "spalte", "spatium", "spiegeln", "sprache", "startabbildung", "startalignment", "startausrichtung", "startbuffer", "startcolumnmakeup", "startcolumns", "startcolumnset", "startcombination", "startcomment", "startdescription", "startdocument", "startdokument", "startenger", "startenumeration", "startfarbe", "startfigure", "startfloattext", "startformula", "startframedtext", "startgeg", "startgegenueber", "startglobal", "startgrosserdurchschuss", "starthiding", "starthintergrund", "startinteraktionsmenue", "startitemgroup", "startkleinerdurchschuss", "startkodierung", "startkombination", "startkomponente", "startlegend", "startline", "startlinecorrection", "startlinenumbering", "startlines", "startlocal", "startlocalenvironment", "startlocalfootnotes", "startlokal", "startlokalefussnoten", "startmakeup", "startmarginalblock", "startmarginallinie", "startmarginblock", "startnamemakeup", "startnarrower", "startopposite", "startoverlay", "startoverview", "startparagraph", "startpositionieren", "startpositioning", "startpostponing", "startprodukt", "startprofil", "startprofile", "startprojekt", "startraster", "startregister", "startspalten", "startsymbolset", "startsynchronisation", "startsynchronization", "starttabelle", "starttabellen", "starttable", "starttables", "starttabulate", "starttext", "starttextlinie", "starttyping", "startueberblick", "startumbruch", "startumgebung", "startunpacked", "startversion", "startzeile", "startzeilen", "startzeilenkorrektur", "startzeilennumerierung", "startzitat", "stelleabsaetzeein", "stelleabsatznummerierungein", "stelleabschnittein", "stelleabschnittsblockein", "stelleanordnenein", "stelleaufzaehlungenein", "stelleausgabeein", "stelleausrichtungein", "stelleausschnittein", "stellebeschreibungein", "stellebeschriftungein", "stellebilderunterschriftein", "stellebildunterschriftein", "stellebindestrichein", "stelleblankoein", "stelleblockein", "stelledrehenein", "stelleduennerumrissein", "stelleeinziehenein", "stelleengerein", "stelleexterneabbildungenein", "stellefarbeein", "stellefarbenein", "stellefeldein", "stellefelderin", "stellefliesstextein", "stellefliesstextumgebungein", "stelleformelnein", "stellefussnotendefinitionein", "stellefussnotenein", "stellefusszeileein", "stellefusszeilentextein", "stellegefuelltesrechteckein", "stellegefuelltezeileein", "stellegegenueberplatzierenein", "stellegleitobjekteein", "stellegleitobjektein", "stellehintergruendeein", "stellehintergrundein", "stelleinmarginalieein", "stelleinteraktionein", "stelleinteraktionsbalkenein", "stelleinteraktionsbildschirmein", "stelleinteraktionsmenueein", "stelleknopfein", "stellekombinationein", "stellekommentarein", "stellekopfzahlein", "stellekopfzeileein", "stellekopfzeilentextein", "stellelabeltextein", "stellelayoutein", "stellelegendeein", "stellelinienbreiteein", "stellelisteein", "stellemarginalblockein", "stellemarginallinieein", "stellenobenein", "stellenummerierungein", "stellepaletteein", "stellepapierformatein", "stelleplatziegeteiltegleitobjekt", "stellepositionierenein", "stellepostenein", "stelleprofilein", "stelleprogrammein", "stellepublikationein", "stellepufferein", "stellerasterein", "stellerechteckein", "stellereferenzierenein", "stellereferenzlisteein", "stelleregisterein", "stelleseitenkommentarein", "stelleseitennummerein", "stelleseitennummeriernungein", "stelleseitenuebergangein", "stellesortierenein", "stellespaltenein", "stellespatiumein", "stellespracheein", "stellesymbolsetein", "stellesynchronisationein", "stellesynchronisationsbalkenein", "stellesynonymein", "stellesystemein", "stelletabein", "stelletabellenein", "stelletabulatorein", "stelletextein", "stelletextobenein", "stelletexttexteein", "stelletextumrissein", "stelletextuntenein", "stelletipein", "stelletippenein", "stelletoleranzein", "stelleueberschriftein", "stelleueberschriftenein", "stelleueberschrifttextein", "stelleumbruchein", "stelleumrahmtein", "stelleumrahmtetexteein", "stelleuntenein", "stelleunterseitennummerein", "stelleunterstreichenein", "stelleurlein", "stelleversalienein", "stelleversionein", "stellezeilenabstandein", "stellezeilenein", "stellezeilennumerierungein", "stellezitierenein", "stellezusammengestelltelisteein", "stellezwischenraumein", "stopalignment", "stopausrichtung", "stopbuffer", "stopcolumnmakeup", "stopcolumns", "stopcolumnset", "stopcombination", "stopcomment", "stopdescription", "stopdocument", "stopdokument", "stopenger", "stopenumeration", "stopfarbe", "stopfigure", "stopfloattext", "stopformula", "stopframedtext", "stopgeg", "stopgegenueber", "stopglobal", "stopgrosserdurchschuss", "stophiding", "stophintergrund", "stopinteraktionsmenue", "stopitemgroup", "stopkleinerdurchschuss", "stopkodierung", "stopkombination", "stopkomponente", "stoplegend", "stopline", "stoplinecorrection", "stoplinenumbering", "stoplines", "stoplocal", "stoplocalenvironment", "stoplocalfootnotes", "stoplokal", "stoplokalefussnoten", "stopmakeup", "stopmarginalblock", "stopmarginallinie", "stopmarginblock", "stopnamemakeup", "stopnarrower", "stopopposite", "stopoverlay", "stopoverview", "stopparagraph", "stoppositionieren", "stoppositioning", "stoppostponing", "stopprodukt", "stopprofil", "stopprofile", "stopprojekt", "stopraster", "stopspalten", "stopsymbolset", "stopsynchronisation", "stopsynchronization", "stoptabelle", "stoptabellen", "stoptable", "stoptables", "stoptabulate", "stoptext", "stoptextlinie", "stoptyping", "stopueberblick", "stopumbruch", "stopumgebung", "stopunpacked", "stopversion", "stopzeile", "stopzeilen", "stopzeilenkorrektur", "stopzeilennumerierung", "stopzitat", "sub", "subject", "subpagenumber", "subsection", "subsubject", "subsubsection", "subsubsubject", "switchtorawfont", "sym", "symbol", "synchronisationsbalken", "synchronisieren", "synonym", "tab", "teilegleitobjekt", "testcolumn", "testpage", "tex", "textbreite", "texthoehe", "textlinie", "textreferenz", "textvariable", "tief", "tiho", "tip", "tippedatei", "tippen", "tippepuffer", "title", "tooltip", "txt", "ueber", "ueberschrifttext", "uebersetzten", "umgebung", "umrahmt", "unbekant", "underbar", "underbars", "unterformelnummer", "useXMLfilter", "usedirectory", "usetypescript", "usetypescriptfile", "verbergebloecke", "vergleichefarbengruppe", "vergleichepalette", "verknuepfebeschriftung", "verknuepfedokument", "verknuepfregister", "version", "verweis", "verweisdatum", "verwendeJSscript", "verwendeURL", "verwendebefehl", "verwendebloecke", "verwendeexteresdokument", "verwendeexterneabbildung", "verwendeexternedatei", "verwendeexternedateien", "verwendeexternestonstueck", "verwendemodul", "verwendemodule", "verwendereferenzen", "verwendesymbole", "verwendeurl", "vl", "volleswort", "von", "waehlebloeckeaus", "waehlepapieraus", "waehleversionaus", "wechselezumfliesstext", "wiederholen", "wochentag", "wohnort", "wortrechts", "zeigedruck", "zeigeeinstellungen", "zeigeexterneabbildungen", "zeigefarbe", "zeigefarbengruppe", "zeigefelder", "zeigefliesstext", "zeigefliesstextumgebung", "zeigegitter", "zeigelayout", "zeigepalette", "zeigerahmen", "zeigestruts", "zeigeumbruch", "zentriert", "ziffern", "zitat", "zitieren", "zu", "zurbox", "zurseite", "zwischenraum" },
- ["en"]={ "CAP", "Cap", "Caps", "Character", "Characters", "MONTH", "Numbers", "Romannumerals", "WEEKDAY", "WORD", "WORDS", "Word", "Words", "about", "adaptlayout", "adding", "appendix", "arg", "at", "atleftmargin", "atpage", "atrightmargin", "background", "backspace", "blackrule", "blackrules", "blank", "bookmark", "bottomdistance", "bottomheight", "bottomspace", "but", "button", "bypassblocks", "cap", "chapter", "character", "characters", "chem", "clip", "clonefield", "color", "colorbar", "colorvalue", "column", "comment", "comparecolorgroup", "comparepalet", "completecombinedlist", "completelistoffloats", "completelistofsorts", "completelistofsynonyms", "completepagenumber", "completeregister", "component", "convertnumber", "copyfield", "correctwhitespace", "coupledocument", "coupledregister", "couplemarking", "couplepage", "couplepaper", "coupleregister", "crlf", "currentdate", "currentheadnumber", "cutspace", "date", "decouplemarking", "decrementnumber", "define", "defineaccent", "defineblank", "defineblock", "definebodyfont", "definebodyfontDEF", "definebodyfontREF", "definebodyfontenvironment", "definebuffer", "definecharacter", "definecolor", "definecolorgroup", "definecolumnbreak", "definecolumnset", "definecombination", "definecombinedlist", "definecommand", "defineconversion", "definedescription", "definedfont", "defineenumeration", "definefield", "definefieldstack", "definefiguresymbol", "definefloat", "definefont", "definefontfeature", "definefonthandling", "definefontstyle", "definefontsynonym", "defineframed", "defineframedtext", "definehbox", "definehead", "defineindentedtext", "defineinmargin", "defineinteractionmenu", "defineitemgroup", "definelabel", "definelayer", "definelayout", "definelist", "definelogo", "definemainfield", "definemakeup", "definemarking", "definemathalignment", "defineoutput", "defineoverlay", "definepagebreak", "definepalet", "definepapersize", "defineparagraphs", "defineplacement", "defineprofile", "defineprogram", "definerawfont", "definereference", "definereferenceformat", "definereferencelist", "defineregister", "definerule", "definesection", "definesectionblock", "definesorting", "definestartstop", "definestyle", "definesubfield", "definesymbol", "definesynonyms", "definetabletemplate", "definetabulate", "definetext", "definetextposition", "definetextvariable", "definetype", "definetypeface", "definetyping", "defineversion", "description", "determineheadnumber", "determinelistcharacteristics", "determineregistercharacteristics", "dimension", "disableinteractionmenu", "domicile", "donttest", "edgedistance", "edgewidth", "emptylines", "enumeration", "environment", "externalfigure", "fact", "field", "fieldstack", "fillinfield", "fillinline", "fillinrules", "fillintext", "fitfield", "fixedspace", "fixedspaces", "followprofile", "followprofileversion", "followversion", "footerdistance", "footerheight", "footnote", "footnotetext", "forceblocks", "formulanumber", "fraction", "framed", "framedtext", "from", "getbuffer", "getmarking", "getnumber", "godown", "goto", "gotobox", "gotopage", "graycolor", "greyvalue", "grid", "hairline", "head", "headerdistance", "headerheight", "headlevel", "headnumber", "headsym", "headtext", "hideblocks", "high", "hl", "immediatebetweenlist", "immediatetolist", "in", "incrementnumber", "indentation", "indenting", "inframed", "infull", "ininner", "inleft", "inleftedge", "inleftmargin", "inline", "inmaframed", "inmargin", "inneredgedistance", "inneredgewidth", "innermargindistance", "innermarginwidth", "inothermargin", "inouter", "inright", "inrightedge", "inrightmargin", "installlanguage", "interactionbar", "interactionbuttons", "interactionmenu", "item", "items", "its", "keepblocks", "label", "labeling", "labels", "labeltext", "language", "leftaligned", "leftedgedistance", "leftedgewidth", "leftmargindistance", "leftmarginwidth", "leg", "linethickness", "listheight", "listlength", "listsymbol", "listwidth", "loadsorts", "loadsynonyms", "logfields", "lohi", "low", "maframed", "mainlanguage", "makeupheight", "makeupwidth", "mapfontsize", "mar", "margindistance", "marginrule", "margintext", "margintitle", "marginwidth", "marginword", "marking", "markversion", "mathematics", "mediaeval", "menubutton", "midaligned", "mirror", "month", "moveformula", "moveongrid", "movesidefloat", "name", "navigating", "nextsection", "nocap", "nodimension", "noheaderandfooterlines", "noindenting", "nolist", "nomarking", "nomoreblocks", "nomorefiles", "nop", "nospace", "note", "notopandbottomlines", "nowhitespace", "numberofsubpages", "numbers", "outeredgedistance", "outeredgewidth", "outermargindistance", "outermarginwidth", "overbar", "overbars", "overstrike", "overstrikes", "packed", "page", "pagedepth", "pagenumber", "pageoffset", "pagereference", "paperheight", "paperwidth", "paragraph", "part", "periods", "placebookmarks", "placecombinedlist", "placefloat", "placefootnotes", "placeformula", "placeheadnumber", "placeheadtext", "placelegend", "placelist", "placelistoffloats", "placelistofsorts", "placelistofsynonyms", "placelocalfootnotes", "placelogos", "placeongrid", "placeontopofeachother", "placepagenumber", "placerawlist", "placereferencelist", "placeregister", "placerule", "placesidebyside", "placesubformula", "placetextvariable", "position", "positiontext", "printpaperheight", "printpaperwidth", "processblocks", "processpage", "product", "program", "project", "publication", "quotation", "quote", "ran", "redo", "ref", "reference", "referral", "referraldate", "referring", "register", "remark", "reservefloat", "reset", "resetmarking", "resetnumber", "resettextcontent", "rightaligned", "rightedgedistance", "rightedgewidth", "rightmargindistance", "rightmarginwidth", "romannumerals", "rotate", "scale", "screen", "section", "seeregister", "selectblocks", "selectpaper", "selectversion", "setnumber", "settextcontent", "settextvariable", "setupalign", "setupanswerarea", "setuparranging", "setupbackground", "setupbackgrounds", "setupblackrules", "setupblank", "setupblock", "setupbodyfont", "setupbodyfontenvironment", "setupbottom", "setupbottomtexts", "setupbuffer", "setupbuttons", "setupcapitals", "setupcaption", "setupcaptions", "setupclipping", "setupcolor", "setupcolors", "setupcolumns", "setupcolumnset", "setupcolumnsetlines", "setupcolumnsetstart", "setupcombinations", "setupcombinedlist", "setupcomment", "setupdescriptions", "setupenumerations", "setupexternalfigures", "setupfield", "setupfields", "setupfillinlines", "setupfillinrules", "setupfloat", "setupfloats", "setupfloatsplitting", "setupfonthandling", "setupfontsynonym", "setupfooter", "setupfootertexts", "setupfootnotedefinition", "setupfootnotes", "setupforms", "setupformulae", "setupframed", "setupframedtexts", "setuphead", "setupheader", "setupheadertexts", "setupheadnumber", "setupheads", "setupheadtext", "setuphyphenmark", "setupindentedtext", "setupindenting", "setupinmargin", "setupinteraction", "setupinteractionbar", "setupinteractionmenu", "setupinteractionscreen", "setupinterlinespace", "setupinterlinespace2", "setupitemgroup", "setupitemizations", "setupitems", "setuplabeltext", "setuplanguage", "setuplayout", "setuplegend", "setuplinenumbering", "setuplines", "setuplinewidth", "setuplist", "setuplistalternative", "setupmakeup", "setupmarginblocks", "setupmarginrules", "setupmarking", "setupmathalignment", "setupnarrower", "setupnumber", "setupnumbering", "setupoppositeplacing", "setupoutput", "setuppagecomment", "setuppagenumber", "setuppagenumbering", "setuppagetransitions", "setuppalet", "setuppaper", "setuppapersize", "setupparagraphnumbering", "setupparagraphs", "setupplacement", "setuppositioning", "setupprofiles", "setupprograms", "setuppublications", "setupquote", "setupreferencelist", "setupreferencing", "setupregister", "setuprotate", "setuprule", "setups", "setupscreens", "setupsection", "setupsectionblock", "setupsorting", "setupspacing", "setupstartstop", "setupstrut", "setupsubpagenumber", "setupsymbolset", "setupsynchronization", "setupsynchronizationbar", "setupsynonyms", "setupsystem", "setuptab", "setuptables", "setuptabulate", "setuptext", "setuptextposition", "setuptextrules", "setuptexttexts", "setuptextvariable", "setupthinrules", "setuptolerance", "setuptop", "setuptoptexts", "setuptype", "setuptyping", "setupunderbar", "setupurl", "setupversions", "setupwhitespace", "showbodyfont", "showbodyfontenvironment", "showcolor", "showcolorgroup", "showexternalfigures", "showfields", "showframe", "showgrid", "showlayout", "showmakeup", "showpalet", "showprint", "showsetups", "showstruts", "showsymbolset", "someline", "somewhere", "sort", "space", "splitfloat", "startalignment", "startbackground", "startbuffer", "startcoding", "startcolor", "startcolumnmakeup", "startcolumns", "startcolumnset", "startcombination", "startcomment", "startcomponent", "startdescription", "startdocument", "startenumeration", "startenvironment", "startfact", "startfigure", "startfloattext", "startformula", "startframedtext", "startglobal", "starthiding", "startinteractionmenu", "startitemgroup", "startlegend", "startline", "startlinecorrection", "startlinenumbering", "startlines", "startlocal", "startlocalenvironment", "startlocalfootnotes", "startmakeup", "startmarginblock", "startmarginrule", "startnamemakeup", "startnarrower", "startopposite", "startoverlay", "startoverview", "startpacked", "startparagraph", "startpositioning", "startpostponing", "startproduct", "startprofile", "startproject", "startquotation", "startraster", "startregister", "startsymbolset", "startsynchronization", "starttable", "starttables", "starttabulate", "starttext", "starttextrule", "starttyping", "startunpacked", "startversion", "stopalignment", "stopbackground", "stopbuffer", "stopcoding", "stopcolor", "stopcolumnmakeup", "stopcolumns", "stopcolumnset", "stopcombination", "stopcomment", "stopcomponent", "stopdescription", "stopdocument", "stopenumeration", "stopenvironment", "stopfact", "stopfigure", "stopfloattext", "stopformula", "stopframedtext", "stopglobal", "stophiding", "stopinteractionmenu", "stopitemgroup", "stoplegend", "stopline", "stoplinecorrection", "stoplinenumbering", "stoplines", "stoplocal", "stoplocalenvironment", "stoplocalfootnotes", "stopmakeup", "stopmarginblock", "stopmarginrule", "stopnamemakeup", "stopnarrower", "stopopposite", "stopoverlay", "stopoverview", "stoppacked", "stopparagraph", "stoppositioning", "stoppostponing", "stopproduct", "stopprofile", "stopproject", "stopquotation", "stopraster", "stopsymbolset", "stopsynchronization", "stoptable", "stoptables", "stoptabulate", "stoptext", "stoptextrule", "stoptyping", "stopunpacked", "stopversion", "stretched", "sub", "subformulanumber", "subject", "subpagenumber", "subsection", "subsubject", "subsubsection", "subsubsubject", "switchtobodyfont", "switchtorawfont", "sym", "symbol", "symoffset", "synchronizationbar", "synchronize", "synonym", "tab", "testcolumn", "testpage", "tex", "textheight", "textreference", "textrule", "textvariable", "textwidth", "thinrule", "thinrules", "title", "tooltip", "topdistance", "topheight", "topspace", "totalnumberofpages", "translate", "txt", "typ", "type", "typebuffer", "typefile", "underbar", "underbars", "unitmeaning", "unknown", "useJSscripts", "useURL", "useXMLfilter", "useblocks", "usecommands", "usedirectory", "useencoding", "useexternaldocument", "useexternalfigure", "useexternalfile", "useexternalfiles", "useexternalsoundtrack", "usemodule", "usemodules", "usereferences", "usespecials", "usesymbols", "usetypescript", "usetypescriptfile", "useurl", "version", "vl", "weekday", "whitespace", "wordright", "writebetweenlist", "writetolist", "writetoreferencelist", "writetoregister" },
- ["fr"]={ "CAP", "Cap", "Caps", "Caractere", "Caracteres", "Chiffresromains", "JOURSEMAINE", "MOIS", "MOT", "MOTS", "Mot", "Mots", "Numeros", "a", "adaptedisposition", "affectenumero", "affectevariabletexte", "ajustechamp", "alaligne", "alapage", "aligneadroite", "aligneagauche", "aligneaumilieu", "appendix", "arg", "arriereplan", "atleftmargin", "atrightmargin", "baha", "barrecouleur", "barreinteraction", "barresynchronisation", "bas", "bouton", "boutonmenu", "boutonsinteraction", "but", "cacheblocs", "cap", "caractere", "caracteres", "champ", "changepolicebrute", "changepolicecorps", "chapter", "chem", "chiffresromains", "citation", "citer", "clip", "clonechamp", "colonne", "comment", "commentaire", "comparegroupecouleur", "comparepalette", "completecombinedlist", "completelistoffloats", "completelistofsorts", "completelistofsynonyms", "completenumeropage", "completeregistre", "composant", "composeenalinea", "concernant", "convertitnumero", "copitchamp", "corrigeespaceblanc", "couleur", "couleurgrise", "coupledocument", "coupledregister", "couplemarquage", "couplepapier", "coupleregistre", "crlf", "cutspace", "dactylographier", "dans", "dansautremarge", "dansborddroit", "dansbordgauche", "dansdroite", "dansgauche", "dansmarge", "dansmargedroite", "dansmargegauche", "date", "datecourante", "daterecommandation", "de", "decouplemarquage", "decrementenumero", "definebodyfontDEF", "definebodyfontREF", "definecombination", "definedfont", "definefontfeature", "definefonthandling", "defineframed", "defineframedtext", "defineindentedtext", "defineitemgroup", "definemathalignment", "defineplacement", "definetypeface", "definicaractere", "definit", "definitaccent", "definitbloc", "definitblocsection", "definitbuffer", "definitcalque", "definitchamp", "definitchampprincipal", "definitcommande", "definitconversion", "definitcouleur", "definitdactylo", "definitdansmarge", "definitdemarrestoppe", "definitdescription", "definitdisposition", "definitenumeration", "definitenvironnementpolicecorps", "definitetiquette", "definitflottant", "definitformatreference", "definitgroupecouleur", "definithbox", "definitjeucolonne", "definitliste", "definitlisteimbriquee", "definitlistereference", "definitlogo", "definitmakeup", "definitmarquage", "definitmenuinteraction", "definitnotepdp", "definitpalette", "definitparagraphes", "definitpilechamp", "definitpolice", "definitpolicebrute", "definitpolicecorps", "definitpositiontexte", "definitprofil", "definitprogramme", "definitreference", "definitregistre", "definitregle", "definitrevetement", "definitsautdecolonne", "definitsautdepage", "definitsection", "definitsortie", "definitsouschamp", "definitstyle", "definitstylepolice", "definitsymbole", "definitsymbolefigure", "definitsynonymepolice", "definitsynonymes", "definittabulation", "definittaillepapier", "definittete", "definittexte", "definittrametableau", "definittri", "definittype", "definitvariabletexte", "definitversion", "definitvide", "demarrealignement", "demarrearriereplan", "demarreblocmarge", "demarrecitation", "demarreciter", "demarrecodage", "demarrecolonnes", "demarrecombinaison", "demarrecompoetroite", "demarrecomposant", "demarrecorrectionligne", "demarrecouleur", "demarredegroupe", "demarredocument", "demarreenvironement", "demarrefigure", "demarreglobal", "demarregroupe", "demarrejeucolonne", "demarrejeusymboles", "demarreligne", "demarreligneregleetexte", "demarrelignes", "demarrelocal", "demarremakeup", "demarremargereglee", "demarrenotespdplocales", "demarrenumerotationligne", "demarreopposition", "demarrepositionnement", "demarreproduit", "demarreprofil", "demarreprojet", "demarreraster", "demarrerevetement", "demarresynchronisation", "demarretableau", "demarretableaux", "demarretexte", "demarreversion", "demarrevuedensemble", "deplaceformule", "deplacesurgrille", "description", "determinecaracteristiqueliste", "determinecaracteristiquesregistre", "determinenumerotete", "dimension", "distancebord", "distanceborddroit", "distancebordgauche", "distanceentete", "distanceinf", "distancemarge", "distancemargedroite", "distancemargegauche", "distancepdp", "distancesup", "domicile", "echelle", "ecran", "ecritdansliste", "ecritdanslistereference", "ecritentreliste", "ecritregistre", "el", "element", "elements", "emptylines", "enumeration", "environement", "espace", "espaceblanc", "espacefixe", "espaceinf", "espacesfixes", "espacesup", "etiquette", "etiquettes", "etire", "fait", "faitreference", "fichierdactylo", "figureexterne", "forceblocs", "fraction", "framed", "framedtext", "gardeblocs", "getnumber", "grille", "groupe", "haut", "hauteureditionpapier", "hauteurentete", "hauteurinf", "hauteurliste", "hauteurmakeup", "hauteurpapier", "hauteurpdp", "hauteursup", "hauteurtexte", "headsym", "hl", "immediatebetweenlist", "immediatetolist", "inconnu", "incrementenumero", "indentation", "inframed", "infull", "inhibemenuinteraction", "ininner", "inmframed", "inneredgedistance", "inneredgewidth", "innermargindistance", "innermarginwidth", "inouter", "installelangue", "joursemaine", "labeling", "labeltexte", "langue", "langueprincipale", "largeurbord", "largeurborddroit", "largeurbordgauche", "largeureditionpapier", "largeurligne", "largeurliste", "largeurmakeup", "largeurmarge", "largeurmargedroite", "largeurmargegauche", "largeurpapier", "largeurtexte", "leg", "ligneh", "lignenoire", "ligneregleetexte", "lignesnoires", "listesymbole", "llongueurliste", "loadsorts", "loadsynonyms", "logchamp", "mapfontsize", "mar", "margereglee", "marquage", "marquageversion", "marquepage", "mathematique", "mediaeval", "menuinteraction", "mframed", "mois", "montrecadre", "montrechamps", "montrecouleur", "montredisposition", "montreedition", "montreenvironnementpolicecorps", "montrefiguresexternes", "montregrille", "montregroupecouleur", "montrejeusymboles", "montremakeup", "montrepalette", "montrepolicecorps", "montrereglages", "montrestruts", "motdroit", "motmarge", "movesidefloat", "name", "navigating", "nextsection", "niveautete", "nocap", "nombredesouspages", "nombretotaldepages", "nommacro", "nop", "note", "notepdp", "numeroformule", "numeropage", "numeros", "numerosousformule", "numerotete", "numerotetecourant", "obtientmarquage", "oriente", "outeredgedistance", "outeredgewidth", "outermargindistance", "outermarginwidth", "overbar", "overbars", "overstrike", "overstrikes", "page", "pagedepth", "pagedouble", "pageoffset", "paragraph", "part", "pasplusdeblocs", "pasplusdefichiers", "periodes", "pilechamp", "placecoteacote", "placeflottant", "placeformule", "placelegende", "placelesunsaudessusdesautres", "placeliste", "placelisteinmbriquee", "placelistereference", "placelistoffloats", "placelistofsorts", "placelistofsynonyms", "placelogos", "placemarquespages", "placenotespdp", "placenotespdplocales", "placenumeropage", "placenumerotete", "placerawlist", "placeregistre", "placeregle", "placesousformule", "placesurgrille", "placetextetete", "placevariabletexte", "position", "positionnetexte", "prendbuffer", "produit", "programme", "projet", "publication", "qqpart", "ran", "raz", "razmarquage", "raznumero", "recommandation", "ref", "refait", "reference", "referencepage", "referencetexte", "reflete", "register", "reglages", "reglealignement", "reglearrangement", "reglearriereplan", "reglearriereplans", "reglebarreinteraction", "reglebarresynchronisation", "reglebloc", "regleblocmarge", "regleblocsection", "regleboutons", "reglebuffer", "reglecapitales", "reglechamp", "reglechamps", "regleclipping", "reglecolonnes", "reglecombinaisons", "reglecommentaire", "reglecommentairepage", "reglecompoetroite", "reglecomposeenalinea", "reglecouleur", "reglecouleurs", "regledactylo", "regledansmarge", "regledemarrestoppe", "regledescriptions", "regledisposition", "regleecraninteraction", "regleecrans", "regleelements", "regleencadre", "regleentete", "regleenumerations", "regleenvironnementpolicecorps", "regleepaisseurligne", "regleespaceblanc", "regleespacement", "regleespacementinterligne", "reglefiguresexternes", "regleflottant", "regleflottants", "regleformulaires", "regleformules", "reglegroupeselements", "regleinf", "regleinteraction", "regleintitule", "regleintitules", "reglejeucolonne", "reglejeusymboles", "reglelabeltexte", "reglelangue", "reglelegende", "reglelignes", "reglelignesnoires", "reglelignesreglestexte", "regleliste", "reglelisteimbriquee", "reglelistereference", "reglemakeup", "reglemargereglee", "reglemarquage", "reglemarquagehyphenation", "reglemenuinteraction", "reglenotepdp", "reglenumero", "reglenumeropage", "reglenumerotation", "reglenumerotationligne", "reglenumerotationpage", "reglenumerotationparagraphe", "reglenumerotete", "regleoriente", "reglepalette", "reglepapier", "regleparagraphes", "reglepdp", "regleplacementopposition", "reglepolicecorps", "reglepositionnement", "reglepositiontexte", "regleprofils", "regleprogrammes", "reglepublications", "reglereferencage", "regleregistre", "regleregle", "regleremplitligne", "regleremplitlignesreglees", "reglesection", "regleseparationflottant", "reglesortie", "reglesouslignage", "reglesousnumeropage", "reglestrut", "reglesup", "reglesynchronisation", "reglesynonymes", "reglesysteme", "regletab", "regletableaux", "regletabulation", "regletaillepapier", "regletete", "regletetes", "regletexte", "regletextesentete", "regletextesinf", "regletextespdp", "regletextessup", "regletextestexte", "regletextetete", "regletolerance", "regletraitsfins", "regletransitionspage", "regletri", "regletype", "regleurl", "reglevariabletexte", "regleversions", "remplitchamp", "remplitligne", "remplitlignesreglees", "remplittexte", "reservefloat", "resettextcontent", "retourarriere", "sansalinea", "sansdimension", "sansespace", "sansespaceblanc", "sanslignesenteteetpdp", "sanslignessupetinf", "sansliste", "sansmarquage", "sanstest", "sauteblocs", "section", "seeregister", "selectionneblocs", "selectionnepapier", "selectionneversion", "sensunite", "separeflottant", "settext", "setupanswerarea", "setupcolumnsetlines", "setupcolumnsetstart", "setupfonthandling", "setupfontsynonym", "setupframedtexts", "setupindentedtext", "setupinterlinespace2", "setupitemgroup", "setuplistalternative", "setupmathalignment", "setupplacement", "sort", "sousnumeropage", "startalignment", "startarriereplan", "startbuffer", "startcitation", "startcolumnmakeup", "startcolumns", "startcombination", "startcomment", "startcomposant", "startcouleur", "startdescription", "startdocument", "startenumeration", "startenvironement", "startfait", "startfigure", "startfloattext", "startformula", "startframedtext", "startgroupe", "starthiding", "startitemgroup", "startlegend", "startligneregleetexte", "startline", "startlinecorrection", "startlinenumbering", "startlines", "startlocal", "startlocalenvironment", "startlocalfootnotes", "startmakeup", "startmargereglee", "startmarginblock", "startmenuinteraction", "startnamemakeup", "startnarrower", "startopposite", "startoverlay", "startoverview", "startparagraph", "startpositioning", "startpostponing", "startproduit", "startprofile", "startprojet", "startregister", "startsymbolset", "startsynchronization", "starttable", "starttables", "starttabulate", "starttyping", "startunpacked", "startversion", "stopalignment", "stoparriereplan", "stopbuffer", "stopcitation", "stopcolumnmakeup", "stopcolumns", "stopcombination", "stopcomment", "stopcompoetroite", "stopcomposant", "stopcouleur", "stopdescription", "stopdocument", "stopenumeration", "stopenvironement", "stopfait", "stopfigure", "stopfloattext", "stopformula", "stopframedtext", "stopgroupe", "stophiding", "stopitemgroup", "stoplegend", "stopligneregleetexte", "stopline", "stoplinecorrection", "stoplinenumbering", "stoplines", "stoplocal", "stoplocalenvironment", "stoplocalfootnotes", "stopmakeup", "stopmargereglee", "stopmarginblock", "stopmenuinteraction", "stopnamemakeup", "stopnarrower", "stopopposite", "stopoverlay", "stopoverview", "stopparagraph", "stoppealignement", "stoppearriereplan", "stoppeblocmarge", "stoppecitation", "stoppecodage", "stoppecolonnes", "stoppecombinaison", "stoppecomposant", "stoppecorrectionligne", "stoppecouleur", "stoppedegroupe", "stoppedocument", "stoppeenvironement", "stoppeglobal", "stoppegroupe", "stoppejeucolonne", "stoppeligne", "stoppeligneregleetexte", "stoppelignes", "stoppelocal", "stoppemakeup", "stoppemargereglee", "stoppenotespdplocales", "stoppenumerotationligne", "stoppeopposition", "stoppepositionnement", "stoppeproduit", "stoppeprofil", "stoppeprojet", "stopperaster", "stopperevetement", "stoppesynchronisation", "stoppetableau", "stoppetableaux", "stoppetexte", "stoppeversion", "stoppevuedensemble", "stoppositioning", "stoppostponing", "stopproduit", "stopprofile", "stopprojet", "stopsymbolset", "stopsynchronization", "stoptable", "stoptables", "stoptabulate", "stoptyping", "stopunpacked", "stopversion", "sub", "subject", "subsection", "subsubject", "subsubsection", "subsubsubject", "suggestion", "suivantprofil", "suivantversion", "suivantversionprofil", "sym", "symbole", "synchronise", "synonym", "tab", "tapebuffer", "testcolumn", "testpage", "tete", "tex", "textemarge", "textenotepdp", "textetete", "title", "titremarge", "traduire", "traiteblocs", "traitepage", "traitfin", "traitsfins", "txt", "typ", "underbar", "underbars", "uneligne", "useXMLfilter", "usedirectory", "usetypescript", "usetypescriptfile", "utiliseJSscripts", "utiliseURL", "utiliseblocs", "utilisechemin", "utilisecommandes", "utilisedocumentexterne", "utiliseencodage", "utilisefichierexterne", "utilisefichiersexternes", "utilisefigureexterne", "utilisemodule", "utilisemodules", "utilisepsiteaudioexterne", "utilisereferences", "utilisespecialites", "utilisesymboles", "utiliseurl", "va", "vaalaboite", "vaalapage", "vaenbas", "valeurcouleur", "valeurgris", "variabletexte", "version", "vide", "vl" },
- ["it"]={ "CAP", "Cap", "Caps", "GIORNOSETTIMANA", "Lettera", "Lettere", "MESE", "Numeri", "Numeriromani", "PAROLA", "PAROLE", "Parola", "Parole", "accoppiacarta", "accoppiadocumento", "accoppiamarcatura", "accoppiapagina", "accoppiaregistro", "adattacampo", "adattalayout", "al", "allineacentro", "allineadestra", "allineasinistra", "altezzacarta", "altezzacartastampa", "altezzacima", "altezzaelenco", "altezzafondo", "altezzaintestazione", "altezzamakeup", "altezzapdp", "altezzatesto", "ambiente", "ampiezzabordo", "ampiezzabordodestro", "ampiezzabordosinistro", "ampiezzacarta", "ampiezzacartastampa", "ampiezzaelenco", "ampiezzamakeup", "ampiezzamargine", "ampiezzamarginedestro", "ampiezzamarginesinistro", "ampiezzatesto", "ap", "apagina", "appendix", "arg", "atleftmargin", "atrightmargin", "barracolori", "barrainterazione", "barrasincronizzazione", "bastablocchi", "bastafile", "cambiaafontdeltesto", "campi", "camporiempimento", "cap", "capello", "chapter", "chim", "circondato", "citazione", "clip", "clonacampo", "colonna", "colore", "coloregrigio", "comment", "commento", "completecombinedlist", "completelistoffloats", "completelistofsorts", "completelistofsynonyms", "completeregister", "componenet", "confrontagruppocolori", "confrontatavolozza", "convertinumero", "copiacampo", "correggispaziobianco", "coupledregister", "crlf", "cutspace", "da", "daqualcheparte", "data", "datadioggi", "datareferral", "decrementnumber", "definebodyfontDEF", "definebodyfontREF", "definecolumnbreak", "definecombination", "definedfont", "definefontfeature", "definefonthandling", "defineindentedtext", "defineinmargin", "defineitemgroup", "definelayer", "definemathalignment", "definepagebreak", "defineplacement", "definetypeface", "definisci", "definisciaccento", "definisciambientefontdeltesto", "definisciblocco", "definiscibloccosezione", "definiscibuffer", "definiscicampo", "definiscicampoprincipale", "definiscicapoversi", "definiscicarattere", "definiscicolore", "definiscicomando", "definisciconversione", "definiscidescrizione", "definiscidimensionicarta", "definiscielenco", "definiscielencocombinato", "definiscienumerazione", "definiscietichetta", "definiscifigurasimbolo", "definiscifont", "definiscifontdeltesto", "definiscifontgrezzo", "definisciformatoriferimento", "definiscigruppocolonne", "definiscigruppocolori", "definiscihbox", "definisciincorniciato", "definisciiniziatermina", "definiscilayout", "definiscilinea", "definiscilistariferimenti", "definiscilogo", "definiscimakeup", "definiscimarcatura", "definiscimenuinterazione", "definiscimodellotabella", "definiscioggettomobile", "definisciordinamento", "definiscioutput", "definisciposizionetesto", "definisciprofilo", "definisciprogramma", "definisciregistro", "definisciriferimento", "definiscirigovuoto", "definiscisezione", "definiscisimbolo", "definiscisinonimi", "definiscisinonimofont", "definiscisottocampo", "definiscisovrapposizione", "definiscistackcampi", "definiscistile", "definiscistilefont", "definiscitabulato", "definiscitavolozza", "definiscitesta", "definiscitesto", "definiscitestoincorniciato", "definiscitype", "definiscityping", "definiscivariabiletesto", "definisciversion", "description", "determinacaratteristicheregistro", "determinacarattersticheelenco", "determinanumerotesta", "dimensione", "disabilitamenuinterazione", "distanzabordo", "distanzabordodestro", "distanzabordosinistro", "distanzacima", "distanzafondo", "distanzaintestazione", "distanzamargine", "distanzamarginedestro", "distanzamarginesinistro", "distanzapdp", "domicilio", "el", "elaborablocchi", "elaborapagina", "elementi", "elemento", "emptylines", "enumeration", "etichetta", "etichette", "fatto", "figuraesterna", "fondo", "forzablocchi", "framedtext", "frazione", "getnumber", "giornosettimana", "griglia", "headsym", "hl", "ignoto", "immediatebetweenlist", "immediatetolist", "impaccato", "impostaallineamento", "impostaambientefontdeltesto", "impostaampiezzariga", "impostabarrainterazione", "impostabarrasincronizzazione", "impostablocchimargine", "impostablocco", "impostabloccosezione", "impostabuffer", "impostacampi", "impostacampo", "impostacapoversi", "impostacaption", "impostacaptions", "impostacima", "impostaclippling", "impostacolonne", "impostacolore", "impostacolori", "impostacombinazioni", "impostacommento", "impostacommentopagina", "impostadefinizionenotepdp", "impostadescrizioni", "impostadimensionicarta", "impostaelementi", "impostaelencazioni", "impostaelenco", "impostaelencocombinato", "impostaenumerazioni", "impostafigureesterne", "impostafondo", "impostafontdeltesto", "impostaforms", "impostaformule", "impostagruppocolonne", "impostaincorniciato", "impostainiziatermina", "impostainmargine", "impostainstestazione", "impostainterazione", "impostainterlinea", "impostalayout", "impostalegenda", "impostalinea", "impostalineemargine", "impostalineenere", "impostalineeriempimento", "impostalineesottili", "impostalineetesto", "impostalingua", "impostalistariferimenti", "impostamaiuscole", "impostamakeup", "impostamarcatura", "impostamenuinterazione", "impostamenzione", "impostanotepdp", "impostanumerazione", "impostanumerazionecapoversi", "impostanumerazionepagina", "impostanumerazionerighe", "impostanumeropagina", "impostanumerosottopagina", "impostanumerotesta", "impostaoggettimobili", "impostaoggettomobile", "impostaordinamento", "impostaoutput", "impostaparranging", "impostapdp", "impostapiustretto", "impostaposizionamento", "impostaposizionamentoopposti", "impostaposizionetesto", "impostaprofili", "impostaprogrammi", "impostapubblicazioni", "impostapulsanti", "impostaregistro", "impostarientro", "impostariferimento", "impostarighe", "impostarigheriempimento", "impostarigovuoto", "impostarotazione", "impostaschermi", "impostaschermointerazione", "impostasegnosillabazione", "impostasetsimboli", "impostasezione", "impostasfondi", "impostasfondo", "impostasincronizzazione", "impostasinonimi", "impostasistema", "impostasottolinea", "impostaspaziatura", "impostaspaziobianco", "impostaspezzamentooggettomobile", "impostastrut", "impostatab", "impostatabelle", "impostatabulato", "impostatavolozza", "impostatesta", "impostateste", "impostatesticima", "impostatestifondo", "impostatestiincorniciati", "impostatestiintestazioni", "impostatestipdp", "impostatesto", "impostatestoetichette", "impostatestointestazioni", "impostatestotesti", "impostatolleranza", "impostatransizionepagina", "impostatype", "impostatyping", "impostaurl", "impostavariabiletesto", "impostaversioni", "impostazioni", "in", "inaltromargine", "incorniciato", "incrementanumero", "indentation", "indestra", "ininner", "iniziaallineamento", "iniziaambiente", "iniziabloccomargine", "iniziacitazione", "iniziacodifica", "iniziacolonne", "iniziacolore", "iniziacombinazione", "iniziacomponente", "iniziacorrezioneriga", "iniziadocumento", "iniziafigura", "iniziaglobale", "iniziagruppocolonne", "iniziaimpaccato", "inizialineamargine", "inizialineatesto", "inizialocale", "iniziamakeup", "inizianotepdplocali", "inizianumerazionerighe", "iniziaopposto", "iniziaoverview", "iniziapiustretto", "iniziaposizionamento", "iniziaprodotto", "iniziaprofilo", "iniziaprogetto", "iniziaraster", "iniziariga", "iniziarighe", "iniziasetsimboli", "iniziasfondo", "iniziasincronizzazione", "iniziasovrapposizione", "iniziatabella", "iniziatabelle", "iniziatesto", "iniziaunpacked", "iniziaversione", "inlatodestro", "inlatosinistro", "inmaframed", "inmargine", "inmarginedestro", "inmarginesinistro", "inneredgedistance", "inneredgewidth", "innermargindistance", "innermarginwidth", "inouter", "inriga", "insinistra", "installalingua", "intorno", "labeling", "leg", "lettera", "lettere", "lineamargine", "lineanera", "lineasottile", "lineatesto", "lineenere", "lineeriempimento", "lineesottili", "lingua", "linguaprincipale", "listsymbol", "livellotesta", "loadsorts", "loadsynonyms", "logcampi", "lunghezzaelenco", "maframed", "mapfontsize", "mar", "marcatura", "marcaversione", "matematica", "mediaeval", "menuinterattivo", "menzione", "mese", "mettielenco", "mettielencocombinato", "mettifiancoafianco", "mettiformula", "mettiingriglia", "mettilegenda", "mettilinea", "mettiloghi", "mettinotepdp", "mettinotepdplocali", "mettinumeropagina", "mettiregistro", "mettisegnalibro", "mettisottoformula", "mettiunosullaltro", "mettivariabiletesto", "mostraambientefontdeltesto", "mostracampi", "mostracolore", "mostracornice", "mostrafiguresterne", "mostrafontdeltesto", "mostragriglia", "mostragruppocolori", "mostraimpostazioni", "mostralyout", "mostramakeup", "mostrasetsimboli", "mostrastampa", "mostrastruts", "mostratavolozza", "movesidefloat", "name", "nascondiblocchi", "navigating", "nextsection", "nientedimensioni", "nienteelenco", "nientelineecimafondo", "nientelineintestazionepdp", "nientemarcatura", "nienterientro", "nientespazio", "nientespaziobianco", "nocap", "nome", "nomeunita", "nop", "nota", "notapdp", "notest", "numberofsubpages", "numeri", "numeriromani", "numeroformula", "numeropagina", "numeropaginacompleto", "numerosottoformula", "numerotesta", "numerotestacorrente", "numerototaledipagine", "outeredgedistance", "outeredgewidth", "outermargindistance", "outermarginwidth", "overbar", "overbars", "overstrike", "overstrikes", "pagedepth", "pageoffset", "pagina", "paragraph", "paroladestra", "parolainmargine", "part", "passaafontgrezzo", "ped", "pedap", "perlungo", "placefloat", "placelistoffloats", "placelistofsorts", "placelistofsynonyms", "placerawlist", "placereferencelist", "posizionanumerotesta", "posizionatesto", "posizionatestotesta", "posizione", "prendibuffer", "prendimarcatura", "prodotto", "progetto", "programma", "pubblicazione", "pulsante", "pulsantemenu", "pulsantinterazione", "punti", "qualcheriga", "ran", "referral", "referring", "register", "reimposta", "reimpostamarcatura", "reservefloat", "resetnumber", "resettextcontent", "rientro", "rif", "rifai", "riferimento", "riferimentopagina", "riferimentotesto", "riflessione", "rigariempimento", "rigovuoto", "ruota", "saltablocchi", "scala", "schermo", "scrividentroelenco", "scriviinelenco", "scriviinlistariferimenti", "scriviinregistro", "section", "seeregister", "segnalibro", "seguiprofilo", "seguiversione", "seguiversioneprofilo", "selezionablocchi", "selezionacarta", "selezionaversione", "separamarcatura", "setnumber", "settext", "setupanswerarea", "setupcolumnsetlines", "setupcolumnsetstart", "setupfonthandling", "setupfontsynonym", "setupindentedtext", "setupinterlinespace2", "setupitemgroup", "setuplistalternative", "setupmathalignment", "setuppaper", "setupplacement", "setvariabiletesto", "sfondo", "sim", "simbolo", "sincronizza", "sort", "spazifissi", "spazio", "spaziobianco", "spaziocima", "spaziodietro", "spaziofisso", "spaziofondo", "spessoreriga", "spezzaoggettomobile", "spostaagriglia", "spostaformula", "stackcampi", "startalignment", "startambiente", "startbuffer", "startcitazione", "startcolore", "startcolumnmakeup", "startcolumns", "startcombination", "startcomment", "startcomponenet", "startdescription", "startdocument", "startenumeration", "startfatto", "startfigure", "startfloattext", "startformula", "startframedtext", "starthiding", "startimpaccato", "startitemgroup", "startlegend", "startline", "startlineamargine", "startlineatesto", "startlinecorrection", "startlinenumbering", "startlines", "startlocal", "startlocalenvironment", "startlocalfootnotes", "startmakeup", "startmarginblock", "startmenuinterattivo", "startnamemakeup", "startnarrower", "startopposite", "startoverlay", "startoverview", "startparagraph", "startpositioning", "startpostponing", "startprodotto", "startprofile", "startprogetto", "startregister", "startsfondo", "startsymbolset", "startsynchronization", "starttable", "starttables", "starttabulate", "starttyping", "startunpacked", "startversione", "stirato", "stopalignment", "stopambiente", "stopbuffer", "stopcitazione", "stopcolore", "stopcolumnmakeup", "stopcolumns", "stopcombination", "stopcomment", "stopcomponenet", "stopdescription", "stopdocument", "stopenumeration", "stopfatto", "stopfigure", "stopfloattext", "stopformula", "stopframedtext", "stophiding", "stopimpaccato", "stopitemgroup", "stoplegend", "stopline", "stoplineamargine", "stoplineatesto", "stoplinecorrection", "stoplinenumbering", "stoplines", "stoplocal", "stoplocalenvironment", "stoplocalfootnotes", "stopmakeup", "stopmarginblock", "stopmenuinterattivo", "stopnamemakeup", "stopnarrower", "stopopposite", "stopoverlay", "stopoverview", "stopparagraph", "stoppositioning", "stoppostponing", "stopprodotto", "stopprofile", "stopprogetto", "stopsfondo", "stopsymbolset", "stopsynchronization", "stoptable", "stoptables", "stoptabulate", "stoptyping", "stopunpacked", "stopversione", "sub", "subject", "subpagenumber", "subsection", "subsubject", "subsubsection", "subsubsubject", "synonym", "tab", "terminaallineamento", "terminaambiente", "terminabloccomargine", "terminacitazione", "terminacodifica", "terminacolonne", "terminacolore", "terminacombinazione", "terminacomponente", "terminacorrezioneriga", "terminadocumento", "terminaglobale", "terminagruppocolonne", "terminaimpaccato", "terminalineamargine", "terminalineatesto", "terminalocale", "terminamakeup", "terminanotepdplocali", "terminanumerazionerighe", "terminaopposto", "terminaoverview", "terminapiustretto", "terminaposizionamento", "terminaprodotto", "terminaprofili", "terminaprogetto", "terminaraster", "terminariga", "terminarighe", "terminasfondo", "terminasincronizzazione", "terminasovrapposizione", "terminatabella", "terminatabelle", "terminatesto", "terminaunpacked", "terminaversioni", "testa", "testcolumn", "testoetichetta", "testoinmargine", "testoinstestazioni", "testonotapdp", "testoriempimento", "testpage", "tex", "tieniblocchi", "title", "titoloinmargine", "tooltip", "traduci", "txt", "typ", "type", "typebuffer", "typefile", "underbar", "underbars", "usaJSscripts", "usaURL", "usablocco", "usacartella", "usacodifica", "usacolonnasonoraesterna", "usacomandi", "usadocumentoesterno", "usafiguraesterna", "usafileesterni", "usafileesterno", "usamoduli", "usamodulo", "usariferimenti", "usasimboli", "usaspecialita", "usaurl", "useXMLfilter", "usedirectory", "usetypescript", "usetypescriptfile", "vaia", "vaiabox", "vaiapagina", "vaigiu", "valorecolore", "valoregrigio", "variabiletesto", "versione", "vl" },
- ["nl"]={ "CAP", "Cap", "Caps", "Cijfers", "KAP", "Kap", "Kaps", "Letter", "Letters", "MAAND", "Romeins", "WEEKDAG", "WOORD", "WOORDEN", "Woord", "Woorden", "aantalsubpaginas", "about", "achtergrond", "appendix", "arg", "bepaalkopnummer", "bepaallijstkenmerken", "bepaalregisterkenmerken", "betekenis", "binnenmargeafstand", "binnenmargebreedte", "binnenrandafstand", "binnenrandbreedte", "blanko", "blokje", "blokjes", "blokkeerinteractiemenu", "bodemwit", "bookmark", "bovenafstand", "bovenhoogte", "breuk", "buitenmargeafstand", "buitenmargebreedte", "buitenrandafstand", "buitenrandbreedte", "but", "button", "cap", "chapter", "chem", "cijfers", "citaat", "citeer", "clip", "comment", "completecombinedlist", "completelistoffloats", "completelistofsorts", "completelistofsynonyms", "converteernummer", "copieerveld", "corrigeerwitruimte", "coupledregister", "crlf", "datum", "definebodyfontDEF", "definebodyfontREF", "definedfont", "definefontfeature", "definefonthandling", "definerawfont", "definetypeface", "definieer", "definieeraccent", "definieeralineas", "definieerbeeldmerk", "definieerblanko", "definieerblok", "definieerbuffer", "definieercombinatie", "definieercommando", "definieerconversie", "definieerfiguursymbool", "definieerfont", "definieerfontstijl", "definieerfontsynoniem", "definieerhbox", "definieerhoofdveld", "definieeringesprongentext", "definieerinmarge", "definieerinteractiemenu", "definieeritemgroep", "definieerkadertekst", "definieerkarakter", "definieerkleur", "definieerkleurgroep", "definieerkolomgroep", "definieerkolomovergang", "definieerkop", "definieerkorps", "definieerkorpsomgeving", "definieerlayer", "definieerlayout", "definieerletter", "definieerlijn", "definieerlijst", "definieermarkering", "definieeromlijnd", "definieeropmaak", "definieeroverlay", "definieerpaginaovergang", "definieerpalet", "definieerpapierformaat", "definieerplaats", "definieerplaatsblok", "definieerprofiel", "definieerprogramma", "definieerreferentie", "definieerreferentieformaat", "definieerreferentielijst", "definieerregister", "definieersamengesteldelijst", "definieersectie", "definieersectieblok", "definieersorteren", "definieerstartstop", "definieersubveld", "definieersymbool", "definieersynoniemen", "definieertabelvorm", "definieertabulatie", "definieertekst", "definieertekstpositie", "definieertekstvariabele", "definieertype", "definieertypen", "definieeruitvoer", "definieerveld", "definieerveldstapel", "definieerversie", "definieerwiskundeuitlijnen", "description", "dimensie", "directnaarlijst", "directtussenlijst", "doordefinieren", "doorlabelen", "doornummeren", "dunnelijn", "dunnelijnen", "eenregel", "enumeration", "ergens", "externfiguur", "forceerblokken", "formulenummer", "framedtext", "gebruikJSscripts", "gebruikURL", "gebruikXMLfilter", "gebruikblokken", "gebruikcommandos", "gebruikexterndocument", "gebruikexternefile", "gebruikexternefiles", "gebruikexternfiguur", "gebruikexterngeluidsfragment", "gebruikgebied", "gebruikmodule", "gebruikmodules", "gebruikreferenties", "gebruikspecials", "gebruiksymbolen", "gebruiktypescript", "gebruiktypescriptfile", "gebruikurl", "geenblokkenmeer", "geenbovenenonderregels", "geendimensie", "geenfilesmeer", "geenhoofdenvoetregels", "geenlijst", "geenmarkering", "geenspatie", "geentest", "geenwitruimte", "geg", "grijskleur", "grijswaarde", "haalbuffer", "haalmarkering", "haalnummer", "haarlijn", "handhaafblokken", "herhaal", "hl", "hoofdafstand", "hoofdhoogte", "hoofdtaal", "hoog", "huidigedatum", "huidigekopnummer", "in", "inanderemarge", "inbinnen", "inbuiten", "indentation", "inlijnd", "inlinker", "inlinkermarge", "inlinkerrand", "inmarge", "inrechter", "inrechtermarge", "inrechterrand", "inregel", "inspringen", "installeertaal", "instellingen", "interactiebalk", "interactiebuttons", "interactiemenu", "invullijnen", "invulregel", "invultekst", "invulveld", "inwilijnd", "items", "its", "kantlijn", "kap", "kenmerk", "kenmerkdatum", "kentekstvariabeletoe", "kleur", "kleurenbalk", "kleurwaarde", "kloonveld", "kolom", "kop", "kopniveau", "kopnummer", "koppeldocument", "koppelmarkering", "koppelpagina", "koppelpapier", "koppelregister", "kopsym", "koptekst", "kopwit", "laag", "label", "labeling", "labels", "labeltekst", "laho", "leg", "legeregels", "letter", "letters", "lijndikte", "lijstbreedte", "lijsthoogte", "lijstlengte", "lijstsymbool", "linkermargeafstand", "linkermargebreedte", "linkerrandafstand", "linkerrandbreedte", "loadsorts", "loadsynonyms", "maand", "mapfontsize", "mar", "margeafstand", "margebreedte", "margetekst", "margetitel", "margewoord", "markeer", "markeerversie", "mediaeval", "menubutton", "naam", "naar", "naarbox", "naarpagina", "name", "navigerend", "nextsection", "nietinspringen", "nocap", "nokap", "noot", "nop", "omgeving", "omlaag", "omlijnd", "onbekend", "onderafstand", "onderdeel", "onderhoogte", "ontkoppelmarkering", "op", "opelkaar", "oplinkermarge", "oppagina", "oprechtermarge", "overbar", "overbars", "overstrike", "overstrikes", "pagina", "paginadiepte", "paginanummer", "paginaoffset", "paginareferentie", "papierbreedte", "papierhoogte", "paragraph", "part", "paslayoutaan", "passeerblokken", "passendveld", "plaatsbeeldmerken", "plaatsbookmarks", "plaatsformule", "plaatskopnummer", "plaatskoptekst", "plaatslegenda", "plaatslijn", "plaatslijst", "plaatslokalevoetnoten", "plaatsnaastelkaar", "plaatsonderelkaar", "plaatsopgrid", "plaatspaginanummer", "plaatsplaatsblok", "plaatsreferentielijst", "plaatsregister", "plaatsruwelijst", "plaatssamengesteldelijst", "plaatssubformule", "plaatstekstvariabele", "plaatsvoetnoten", "placelistoffloats", "placelistofsorts", "placelistofsynonyms", "positioneer", "positioneertekst", "printpapierbreedte", "printpapierhoogte", "produkt", "programma", "projekt", "publicatie", "punten", "ran", "randafstand", "randbreedte", "rechtermargeafstand", "rechtermargebreedte", "rechterrandafstand", "rechterrandbreedte", "ref", "refereer", "referentie", "regellinks", "regelmidden", "regelrechts", "register", "registreervelden", "reservefloat", "reset", "resetmarkering", "resetnummer", "resettekstinhoud", "resettextcontent", "romeins", "rooster", "roteer", "rugwit", "schaal", "scherm", "schrijfnaarlijst", "schrijfnaarreferentielijst", "schrijfnaarregister", "schrijftussenlijst", "section", "seeregister", "selecteerblokken", "selecteerpapier", "selecteerversie", "setnummer", "setupfonthandling", "setupfontsynonym", "setupinterlinespace2", "setuplistalternative", "snijwit", "som", "sort", "spatie", "spiegel", "splitsplaatsblok", "startachtergrond", "startalignment", "startbuffer", "startcitaat", "startcodering", "startcolumns", "startcombinatie", "startcombination", "startcomment", "startdescription", "startdocument", "startenumeration", "startfigure", "startfiguur", "startfloattext", "startformula", "startframedtext", "startgeg", "startglobaal", "starthiding", "startinteractiemenu", "startitemgroup", "startkantlijn", "startkleur", "startkolomgroep", "startkolommen", "startkolomopmaak", "startlegend", "startline", "startlinecorrection", "startlinenumbering", "startlines", "startlocal", "startlocalenvironment", "startlocalfootnotes", "startlokaal", "startlokalevoetnoten", "startmakeup", "startmargeblok", "startmarginblock", "startnaast", "startnamemakeup", "startnarrower", "startomgeving", "startonderdeel", "startopelkaar", "startopmaak", "startopposite", "startoverlay", "startoverview", "startoverzicht", "startparagraph", "startpositioneren", "startpositioning", "startpostponing", "startprodukt", "startprofiel", "startprofile", "startprojekt", "startraster", "startregel", "startregelcorrectie", "startregelnummeren", "startregels", "startregister", "startsmaller", "startsymbolset", "startsymboolset", "startsynchronisatie", "startsynchronization", "starttabel", "starttabellen", "starttable", "starttables", "starttabulate", "starttekst", "starttekstlijn", "starttyping", "startuitlijnen", "startunpacked", "startvanelkaar", "startversie", "stelachtergrondenin", "stelachtergrondin", "stelalineasin", "stelantwoordgebiedin", "stelarrangerenin", "stelblankoin", "stelblokin", "stelblokjesin", "stelblokkopjein", "stelblokkopjesin", "stelbovenin", "stelboventekstenin", "stelbufferin", "stelbuttonsin", "stelciterenin", "stelclipin", "stelcombinatiesin", "stelcommentaarin", "steldoordefinierenin", "steldoornummerenin", "steldunnelijnenin", "stelexternefigurenin", "stelformulesin", "stelformulierenin", "stelhoofdin", "stelhoofdtekstenin", "stelingesprongentextin", "stelinmargein", "stelinspringenin", "stelinteractiebalkin", "stelinteractiein", "stelinteractiemenuin", "stelinteractieschermin", "stelinterliniein", "stelinvullijnenin", "stelinvulregelsin", "stelitemgroepin", "stelitemsin", "stelkadertekstenin", "stelkantlijnin", "stelkapitalenin", "stelkleurenin", "stelkleurin", "stelkolomgroepin", "stelkolomgroepregelsin", "stelkolomgroepstartin", "stelkolommenin", "stelkopin", "stelkopnummerin", "stelkoppeltekenin", "stelkoppenin", "stelkoptekstin", "stelkorpsin", "stelkorpsomgevingin", "stellabeltekstin", "stellayoutin", "stellegendain", "stellijndiktein", "stellijnin", "stellijstin", "stelmargeblokkenin", "stelmarkeringin", "stelnaastplaatsenin", "stelnummerenin", "stelnummerin", "stelomlijndin", "stelonderin", "stelonderstrepenin", "stelondertekstenin", "stelopmaakin", "stelopsommingenin", "stelpaginacommentaarin", "stelpaginanummerin", "stelpaginanummeringin", "stelpaginaovergangenin", "stelpaletin", "stelpapierformaatin", "stelpapierin", "stelparagraafnummerenin", "stelplaatsblokin", "stelplaatsblokkenin", "stelplaatsbloksplitsenin", "stelplaatsin", "stelpositionerenin", "stelprofielenin", "stelprogrammasin", "stelpublicatiesin", "stelrastersin", "stelreferentielijstin", "stelrefererenin", "stelregelnummerenin", "stelregelsin", "stelregisterin", "stelroterenin", "stelsamengesteldelijstin", "stelsectieblokin", "stelsectiein", "stelsmallerin", "stelsorterenin", "stelspatieringin", "stelstartstopin", "stelstrutin", "stelsubpaginanummerin", "stelsymboolsetin", "stelsynchronisatiebalkin", "stelsynchronisatiein", "stelsynoniemenin", "stelsysteemin", "steltaalin", "steltabellenin", "steltabin", "steltabulatiein", "steltekstin", "steltekstinhoudin", "steltekstlijnenin", "steltekstpositiein", "stelteksttekstenin", "steltekstvariabelein", "steltolerantiein", "steltypein", "steltypenin", "steluitlijnenin", "steluitvoerin", "stelurlin", "stelveldenin", "stelveldin", "stelversiesin", "stelvoetin", "stelvoetnootdefinitiein", "stelvoetnotenin", "stelvoettekstenin", "stelwiskundeuitlijnenin", "stelwitruimtein", "stopachtergrond", "stopalignment", "stopbuffer", "stopcitaat", "stopcodering", "stopcolumns", "stopcombinatie", "stopcombination", "stopcomment", "stopdescription", "stopdocument", "stopenumeration", "stopfigure", "stopfloattext", "stopformula", "stopframedtext", "stopgeg", "stopglobaal", "stophiding", "stopinteractiemenu", "stopitemgroup", "stopkantlijn", "stopkleur", "stopkolomgroep", "stopkolommen", "stopkolomopmaak", "stoplegend", "stopline", "stoplinecorrection", "stoplinenumbering", "stoplines", "stoplocal", "stoplocalenvironment", "stoplocalfootnotes", "stoplokaal", "stoplokalevoetnoten", "stopmakeup", "stopmargeblok", "stopmarginblock", "stopnaast", "stopnamemakeup", "stopnarrower", "stopomgeving", "stoponderdeel", "stopopelkaar", "stopopmaak", "stopopposite", "stopoverlay", "stopoverview", "stopoverzicht", "stopparagraph", "stoppositioneren", "stoppositioning", "stoppostponing", "stopprodukt", "stopprofiel", "stopprofile", "stopprojekt", "stopraster", "stopregel", "stopregelcorrectie", "stopregelnummeren", "stopregels", "stopsmaller", "stopsymbolset", "stopsynchronisatie", "stopsynchronization", "stoptabel", "stoptabellen", "stoptable", "stoptables", "stoptabulate", "stoptekst", "stoptekstlijn", "stoptyping", "stopuitlijnen", "stopunpacked", "stopvanelkaar", "stopversie", "sub", "subformulenummer", "subject", "subpaginanummer", "subsection", "subsubject", "subsubsection", "subsubsubject", "suggestie", "switchnaarkorps", "switchtorawfont", "sym", "symbool", "symoffset", "synchronisatiebalk", "synchroniseer", "synonym", "taal", "tab", "tekstbreedte", "teksthoogte", "tekstlijn", "tekstreferentie", "tekstvariabele", "testkolom", "testpagina", "tex", "title", "toelichting", "toonexternefiguren", "toongrid", "tooninstellingen", "toonkader", "toonkleur", "toonkleurgroep", "toonkorps", "toonkorpsomgeving", "toonlayout", "toonopmaak", "toonpalet", "toonprint", "toonstruts", "toonsymboolset", "toonvelden", "totaalaantalpaginas", "txt", "typ", "type", "typebuffer", "typefile", "uit", "uitgerekt", "underbar", "underbars", "usecodering", "usedirectory", "vastespatie", "vastespaties", "veld", "veldstapel", "verbergblokken", "vergelijkkleurgroep", "vergelijkpalet", "verhoognummer", "verlaagnummer", "verplaatsformule", "verplaatsopgrid", "verplaatszijblok", "versie", "vertaal", "verwerkblokken", "verwerkpagina", "vl", "voetafstand", "voethoogte", "voetnoot", "voetnoottekst", "volgprofiel", "volgprofielversie", "volgversie", "volledigepaginanummer", "volledigregister", "voluit", "weekdag", "wilijnd", "wiskunde", "witruimte", "woonplaats", "woordrechts", "zetbreedte", "zethoogte" },
- ["pe"]={ "CAP", "Cap", "Caps", "Character", "Characters", "MONTH", "Numbers", "Romannumerals", "WEEKDAY", "WORD", "WORDS", "Word", "Words", "appendix", "cap", "chapter", "chem", "comment", "completecombinedlist", "completelistoffloats", "completelistofsorts", "completelistofsynonyms", "coupledregister", "crlf", "definebodyfontDEF", "definebodyfontREF", "definedfont", "definefontfeature", "definefonthandling", "defineindentedtext", "definetypeface", "description", "enumeration", "framedtext", "indentation", "inmframed", "its", "labeling", "loadsorts", "loadsynonyms", "mapfontsize", "mediaeval", "mframed", "name", "nextsection", "nocap", "overbar", "overbars", "overstrike", "overstrikes", "paragraph", "part", "placelistoffloats", "placelistofsorts", "placelistofsynonyms", "ran", "register", "reservefloat", "resettextcontent", "section", "seeregister", "setupanswerarea", "setupcapitals", "setupfonthandling", "setupfontsynonym", "setupindentedtext", "setupinterlinespace2", "setuplistalternative", "setupurl", "sort", "startalignment", "startbuffer", "startcolumns", "startcombination", "startcomment", "startdescription", "startdocument", "startenumeration", "startfigure", "startfloattext", "startformula", "startframedtext", "starthiding", "startitemgroup", "startlegend", "startline", "startlinecorrection", "startlinenumbering", "startlines", "startlocal", "startlocalenvironment", "startlocalfootnotes", "startmakeup", "startmarginblock", "startnamemakeup", "startnarrower", "startopposite", "startoverlay", "startoverview", "startparagraph", "startpositioning", "startpostponing", "startprofile", "startraster", "startregister", "startsymbolset", "startsynchronization", "starttable", "starttables", "starttabulate", "starttyping", "startunpacked", "startتولید", "startحقیقت", "startخط‌حاشیه", "startخط‌متن", "startرنگ", "startفشرده", "startمحیط", "startمنوی‌پانل", "startمولفه", "startنسخه", "startنقل‌قول", "startپروژه", "startپس‌زمینه", "stopalignment", "stopbuffer", "stopcolumns", "stopcombination", "stopcomment", "stopdescription", "stopdocument", "stopenumeration", "stopfigure", "stopfloattext", "stopformula", "stopframedtext", "stophiding", "stopitemgroup", "stoplegend", "stopline", "stoplinecorrection", "stoplinenumbering", "stoplines", "stoplocal", "stoplocalenvironment", "stoplocalfootnotes", "stopmakeup", "stopmarginblock", "stopnamemakeup", "stopnarrower", "stopopposite", "stopoverlay", "stopoverview", "stopparagraph", "stoppositioning", "stoppostponing", "stopprofile", "stopraster", "stopsymbolset", "stopsynchronization", "stoptable", "stoptables", "stoptabulate", "stoptyping", "stopunpacked", "stopتولید", "stopحقیقت", "stopخط‌حاشیه", "stopخط‌متن", "stopرنگ", "stopفشرده", "stopمحیط", "stopمنوی‌پانل", "stopمولفه", "stopنسخه", "stopنقل‌قول", "stopپروژه", "stopپس‌زمینه", "sub", "subject", "subsection", "subsubject", "subsubsection", "subsubsubject", "synonym", "title", "tooltip", "txt", "typ", "underbar", "underbars", "useJSscripts", "useURL", "useXMLfilter", "usedirectory", "useurl", "آفست‌صفحه", "آیتم", "آیتمها", "آینه", "اجباربلوکها", "ارتفاع‌آرایش", "ارتفاع‌بالا", "ارتفاع‌برگ", "ارتفاع‌ته‌برگ", "ارتفاع‌خط", "ارتفاع‌سربرگ", "ارتفاع‌متن", "ارتفاع‌پایین", "از", "ازکارانداختن‌منوی‌پانل", "استفاده‌بلوکها", "استفاده‌دستخط‌تایپ", "استفاده‌رمزینه", "استفاده‌شکل‌خارجی", "استفاده‌فرمانها", "استفاده‌قطعه‌موزیک‌خارجی", "استفاده‌مدول", "استفاده‌مدولها", "استفاده‌مرجعها", "استفاده‌مسیر", "استفاده‌نمادها", "استفاده‌نوشتارخارجی", "استفاده‌ویژگیها", "استفاده‌پرونده‌خارجی", "استفاده‌پرونده‌دستخط‌تایپ", "استفاده‌پرونده‌های‌خارجی", "اعدادلاتین", "افزودن", "اما", "امتحان‌نکن", "انتخاب‌برگ", "انتخاب‌بلوکها", "انتخاب‌نسخه", "انتقال‌به‌توری", "انتقال‌فرمول", "انتقال‌کنار‌شناور", "انجام‌دوباره", "بارگذاریها", "بارگذاری‌آرایش", "بارگذاری‌آیتمها", "بارگذاری‌ارجاع", "بارگذاری‌اندازه‌برگ", "بارگذاری‌باریکتر", "بارگذاری‌بافر", "بارگذاری‌بالا", "بارگذاری‌بخش", "بارگذاری‌بردباری", "بارگذاری‌برنامه‌ها", "بارگذاری‌برگ", "بارگذاری‌بست", "بارگذاری‌بلوک", "بارگذاری‌بلوکهای‌حاشیه", "بارگذاری‌بلوک‌بخش", "بارگذاری‌تایپ", "بارگذاری‌تایپ‌کردن", "بارگذاری‌تب", "بارگذاری‌ترتیب", "بارگذاری‌ترکیب‌ها", "بارگذاری‌تطابق", "بارگذاری‌تعریف‌پانوشت", "بارگذاری‌تنظیم", "بارگذاری‌تنظیم‌ریاضی", "بارگذاری‌ته‌برگ", "بارگذاری‌تورفتگی", "بارگذاری‌توضیح", "بارگذاری‌توضیح‌صفحه", "بارگذاری‌ثبت", "بارگذاری‌جانشانی", "بارگذاری‌جدولها", "بارگذاری‌جدول‌بندی", "بارگذاری‌خالی", "بارگذاری‌خروجی", "بارگذاری‌خط", "بارگذاری‌خطها", "بارگذاری‌خطهای‌حاشیه", "بارگذاری‌خطهای‌سیاه", "بارگذاری‌خطهای‌متن", "بارگذاری‌خطهای‌مجموعه‌ستون", "بارگذاری‌خطها‌ی‌نازک", "بارگذاری‌درج‌درخطها", "بارگذاری‌درج‌مخالف", "بارگذاری‌درون‌حاشیه", "بارگذاری‌دوران", "بارگذاری‌دکمه‌ها", "بارگذاری‌راهنما", "بارگذاری‌رنگ", "بارگذاری‌رنگها", "بارگذاری‌زبان", "بارگذاری‌ستونها", "بارگذاری‌سر", "بارگذاری‌سربرگ", "بارگذاری‌سرها", "بارگذاری‌سیستم", "بارگذاری‌شرح", "بارگذاری‌شرحها", "بارگذاری‌شروع‌مجموعه‌ستون", "بارگذاری‌شروع‌پایان", "بارگذاری‌شماره", "بارگذاری‌شماره‌زیرصفحه", "بارگذاری‌شماره‌سر", "بارگذاری‌شماره‌صفحه", "بارگذاری‌شماره‌گذاری", "بارگذاری‌شماره‌گذاریها", "بارگذاری‌شماره‌گذاری‌صفحه", "بارگذاری‌شماره‌گذاری‌پاراگراف", "بارگذاری‌شماره‌‌گذاری‌خط", "بارگذاری‌شناور", "بارگذاری‌شناورها", "بارگذاری‌شکافتن‌شناورها", "بارگذاری‌شکلهای‌خارجی", "بارگذاری‌طرح", "بارگذاری‌طرح‌بندی", "بارگذاری‌عرض‌خط", "بارگذاری‌فاصله‌بین‌خط", "بارگذاری‌فرمولها", "بارگذاری‌فضای‌سفید", "بارگذاری‌فضا‌گذاری", "بارگذاری‌قالبی", "بارگذاری‌قلم‌متن", "بارگذاری‌لوح", "بارگذاری‌لیست", "بارگذاری‌لیست‌ترکیبی", "بارگذاری‌لیست‌مرجع", "بارگذاری‌مترادفها", "بارگذاری‌متغیر‌متن", "بارگذاری‌متن", "بارگذاری‌متنهای‌بالا", "بارگذاری‌متن‌سر", "بارگذاری‌متن‌سربرگ", "بارگذاری‌متن‌قالبی", "بارگذاری‌متن‌متنها", "بارگذاری‌متن‌پانوشت", "بارگذاری‌متن‌پایین", "بارگذاری‌مجموعه‌ستون", "بارگذاری‌مجموعه‌نماد", "بارگذاری‌محیط‌قلم‌متن", "بارگذاری‌منوی‌پانل", "بارگذاری‌مکان‌متن", "بارگذاری‌مکان‌گذاری", "بارگذاری‌میدان", "بارگذاری‌میدانها", "بارگذاری‌میله‌تطابق", "بارگذاری‌میله‌زیر", "بارگذاری‌میله‌پانل", "بارگذاری‌نسخه‌ها", "بارگذاری‌نشانه‌شکستن", "بارگذاری‌نشانه‌گذاری", "بارگذاری‌نشرها", "بارگذاری‌نقل", "بارگذاری‌پاراگرافها", "بارگذاری‌پانل", "بارگذاری‌پانوشتها", "بارگذاری‌پایین", "بارگذاری‌پرده‌ها", "بارگذاری‌پرده‌پانل", "بارگذاری‌پروفایلها", "بارگذاری‌پرکردن‌خطها", "بارگذاری‌پس‌زمینه", "بارگذاری‌پس‌زمینه‌ها", "بارگذاری‌چیدن", "بارگذاری‌گذارصفحه", "بارگذاری‌گروههای‌آیتم", "بارگذاری‌گروه‌آیتم", "بازنشانی", "بازنشانی‌شماره", "بازنشانی‌متن", "بازنشانی‌نشانه‌گذاری", "باگذاری‌متن‌برچسب", "بدون‌بعد", "بدون‌بلوکهای‌بیشتر", "بدون‌تورفتگی", "بدون‌خط‌بالاوپایین", "بدون‌خط‌سروته‌برگ", "بدون‌فایلهای‌بیشتر", "بدون‌فضا", "بدون‌فضای‌سفید", "بدون‌لیست", "بدون‌نشانه‌گذاری", "برنامه", "بروبه", "بروبه‌جعبه", "بروبه‌صفحه", "بروپایین", "برچسب", "برچسبها", "بعد", "بلند", "بلوکهای‌پردازش", "بلوکها‌پنهان", "بنویس‌بین‌لیست", "بنویس‌درثبت", "بنویس‌درلیست‌مرجع", "بنویس‌در‌لیست", "تاریخ", "تاریخ‌جاری", "تاریخ‌رجوع", "تایپ", "تایپ‌بافر", "تایپ‌پرونده", "تب", "ترجمه", "تطابق", "تعریف", "تعریف‌آرایش", "تعریف‌آرم", "تعریف‌الگوی‌جدول", "تعریف‌اندازه‌برگ", "تعریف‌بافر", "تعریف‌بخش", "تعریف‌برنامه", "تعریف‌برچسب", "تعریف‌بلوک", "تعریف‌بلوک‌بخش", "تعریف‌تایپ", "تعریف‌تایپ‌کردن", "تعریف‌تبدیل", "تعریف‌ترتیب", "تعریف‌ترکیب", "تعریف‌تنظیم‌ریاضی", "تعریف‌توده‌میدان", "تعریف‌ثبت", "تعریف‌جانشانی", "تعریف‌جدول‌بندی", "تعریف‌جعبه‌‌افقی", "تعریف‌حرف", "تعریف‌خالی", "تعریف‌خروجی", "تعریف‌خط‌حائل", "تعریف‌درون‌حاشیه", "تعریف‌رنگ", "تعریف‌زیرمیدان", "تعریف‌سبک", "تعریف‌سبک‌قلم", "تعریف‌سر", "تعریف‌شرح", "تعریف‌شروع‌پایان", "تعریف‌شماره‌بندی", "تعریف‌شمایل‌مرجع", "تعریف‌شناور", "تعریف‌شکستن‌ستون", "تعریف‌شکست‌صفحه", "تعریف‌طرح‌بندی", "تعریف‌فرمان", "تعریف‌قالبی", "تعریف‌قلم", "تعریف‌قلم‌خام", "تعریف‌قلم‌متن", "تعریف‌لایه", "تعریف‌لهجه", "تعریف‌لوح", "تعریف‌لیست", "تعریف‌لیست‌ترکیبی", "تعریف‌لیست‌مرجع", "تعریف‌مترادفها", "تعریف‌مترادف‌قلم", "تعریف‌متغیرمتن", "تعریف‌متن", "تعریف‌متن‌قالبی", "تعریف‌مجموعه‌ستون", "تعریف‌محیط‌قلم‌بدنه", "تعریف‌مرجع", "تعریف‌منوی‌پانل", "تعریف‌مکان‌متن", "تعریف‌میدان", "تعریف‌میدان‌اصلی", "تعریف‌نسخه", "تعریف‌نشانه‌گذاری", "تعریف‌نماد", "تعریف‌نمادشکل", "تعریف‌پاراگرافها", "تعریف‌پروفایل", "تعریف‌پوشش", "تعریف‌گروه‌آیتم", "تعریف‌گروه‌رنگ", "تعیین‌شماره", "تعیین‌شماره‌سر", "تعیین‌متغیر‌متن", "تعیین‌محتوای‌متن", "تعیین‌مشخصات‌ثبت", "تعیین‌مشخصات‌لیست", "تغییربه‌قلم‌بدنه", "تغییربه‌قلم‌خام", "تنظیم‌راست", "تنظیم‌طرح‌بندی", "تنظیم‌وسط", "توجه", "تورفتگی", "توری", "تولید", "تک", "ثبت‌زوج", "ثبت‌کامل", "جداسازی‌نشانه‌گذاری", "حاش", "حرف", "حرفها", "حفظ‌بلوکها", "حقیقت", "خالی", "خطهای‌سیاه", "خطهای‌نازک", "خطها‌خالی", "خط‌حاشیه", "خط‌سیاه", "خط‌متن", "خط‌مو", "خط‌نازک", "خ‌ا", "خ‌ع", "در", "درج‌آرمها", "درج‌ثبت", "درج‌خط", "درج‌درخط", "درج‌درخطها", "درج‌درمتن", "درج‌درمیدان", "درج‌در‌بالای‌یکدیگر", "درج‌در‌توری", "درج‌راهنما", "درج‌زیرفرمول", "درج‌شماره‌سر", "درج‌شماره‌صفحه", "درج‌شناور", "درج‌فرمول", "درج‌لیست", "درج‌لیست‌خام", "درج‌لیست‌مختلط", "درج‌لیست‌مرجع", "درج‌متغیرمتن", "درج‌متن‌سر", "درج‌پانوشتها", "درج‌پانوشتهای‌موضعی", "درج‌چوب‌خط", "درج‌کنار‌به‌کنار", "درحاشیه", "درحاشیه‌دیگر", "درحاشیه‌راست", "درحاشیه‌چپ", "درخارجی", "درخط", "درداخلی", "درراست", "درصفحه", "درقالبی", "درلبه‌راست", "درلبه‌چپ", "درمورد", "درون", "درپر", "درچپ", "دریافت‌بافر", "دریافت‌شماره", "دریافت‌نشانه", "دوران", "دکمه", "دکمه‌منو", "دکمه‌پانل", "رج", "رجوع", "رنگ", "رنگ‌خاکستری", "روزهفته", "ریاضی", "زبان", "زبان‌اصلی", "ستون", "ستون‌امتحان", "سر", "سرپوش‌کوچک‌نه", "شروع‌آرایش", "شروع‌آرایش‌ستون", "شروع‌باریکتر", "شروع‌بازبینی", "شروع‌بلوک‌حاشیه", "شروع‌ترکیب", "شروع‌تصحیح‌خط", "شروع‌تطابق", "شروع‌تنظیم", "شروع‌تولید", "شروع‌جدول", "شروع‌جدولها", "شروع‌خط", "شروع‌خطها", "شروع‌خط‌حاشیه", "شروع‌خط‌متن", "شروع‌رنگ", "شروع‌ستونها", "شروع‌سراسری", "شروع‌شماره‌گذاری‌خط", "شروع‌شکل", "شروع‌غیر‌فشرده", "شروع‌فشرده", "شروع‌متن", "شروع‌مجموعه‌ستون", "شروع‌مجموعه‌نماد", "شروع‌محیط", "شروع‌مخالف", "شروع‌موضعی", "شروع‌مولفه", "شروع‌مکان‌گذاری", "شروع‌نسخه", "شروع‌نقل‌قول", "شروع‌نوشتار", "شروع‌پانوشتهای‌موضعی", "شروع‌پروفایل", "شروع‌پروژه", "شروع‌پس‌زمینه", "شروع‌پوشش", "شروع‌کد", "شماره‌افزایش", "شماره‌زیرصفحه", "شماره‌زیرفرمول", "شماره‌سر", "شماره‌سرجاری", "شماره‌صفحه", "شماره‌صفحه‌کامل", "شماره‌فرمول", "شماره‌مبدل", "شماره‌ها", "شماره‌کاهش", "شماره‌کل‌صفحه‌ها", "شکافتن‌شناور", "شکل‌خارجی", "صفحه", "صفحه‌تست", "صفحه‌زوج", "صفحه‌پردازش", "طول‌لیست", "عبوربلوکها", "عرض‌آرایش", "عرض‌برگ", "عرض‌حاشیه", "عرض‌حاشیه‌خارجی", "عرض‌حاشیه‌داخلی", "عرض‌حاشیه‌راست", "عرض‌حاشیه‌چپ", "عرض‌خط", "عرض‌لبه", "عرض‌لبه‌خارجی", "عرض‌لبه‌داخلی", "عرض‌لبه‌راست", "عرض‌لبه‌چپ", "عرض‌لیست", "عرض‌متن", "عمق‌صفحه", "عنوان‌حاشیه", "فاصله‌بالا", "فاصله‌ته‌برگ", "فاصله‌حاشیه", "فاصله‌حاشیه‌خارجی", "فاصله‌حاشیه‌داخلی", "فاصله‌حاشیه‌راست", "فاصله‌حاشیه‌چپ", "فاصله‌سربرگ", "فاصله‌لبه", "فاصله‌لبه‌خارجی", "فاصله‌لبه‌داخلی", "فاصله‌لبه‌راست", "فاصله‌لبه‌چپ", "فاصله‌پایین", "فاصله‌پشت", "فشرده", "فضا", "فضاهای‌ثابت", "فضای‌بالا", "فضای‌برش", "فضای‌ثابت", "فضای‌سفید", "فضای‌سفیدصحیح", "فضای‌پایین", "فوری‌به‌لیست", "فوری‌بین‌لیست", "قالبی", "لوح‌مقایسه", "ماه", "متغیر متن", "متن‌برچسب", "متن‌حاشیه", "متن‌سر", "متن‌پانوشت", "محیط", "مراجعه", "مرجع", "مرجع‌صفحه", "مرجع‌متن", "مرحله‌سر", "مسکن", "معنی‌واحد", "مقایسه‌گروه‌رنگ", "مقدارخاکستری", "مقداررنگ", "مقیاس", "منفی", "منوی‌پانل", "مولفه", "مکان", "مکان‌متن", "میدان", "میدانهای‌گزارش", "میدان‌شبیه‌سازی", "میدان‌پشته", "میدان‌کپی", "میله‌تطابق", "میله‌رنگ", "میله‌پانل", "ناشناس", "نام‌ماکرو", "نسخه", "نسخه‌نشانه", "نشانه‌گذاری", "نشانه‌گذاری‌زوج", "نشر", "نصب‌زبان", "نقطه‌ها", "نقل", "نقل‌قول", "نم", "نماد", "نمادسر", "نمادلیست", "نمایش‌آرایش", "نمایش‌بارگذاریها", "نمایش‌بستها", "نمایش‌توری", "نمایش‌رنگ", "نمایش‌شکلهای‌خارجی", "نمایش‌طرح‌بندی", "نمایش‌قالب", "نمایش‌قلم‌بدنه", "نمایش‌لوح", "نمایش‌مجموعه‌علامت", "نمایش‌محیط‌قلم‌بدنه", "نمایش‌میدانها", "نمایش‌چاپ", "نمایش‌گروه‌رنگ", "نوشتارزوج", "هدایت", "پا", "پابا", "پانوشت", "پایان‌آرایش", "پایان‌آرایش‌ستون", "پایان‌بازبینی", "پایان‌بلوک‌حاشیه", "پایان‌ترکیب", "پایان‌تصحیح‌خط", "پایان‌تطابق", "پایان‌تنظیم", "پایان‌تولید", "پایان‌جدول", "پایان‌جدولها", "پایان‌خط", "پایان‌خطها", "پایان‌خط‌حاشیه", "پایان‌خط‌متن", "پایان‌رنگ", "پایان‌ستونها", "پایان‌سراسری", "پایان‌شماره‌گذاری‌خط", "پایان‌غیرفشرده", "پایان‌فشرده", "پایان‌متن", "پایان‌مجموعه‌ستون", "پایان‌محیط", "پایان‌مخالف", "پایان‌موضعی", "پایان‌مولفه", "پایان‌مکان‌گذاری", "پایان‌نازکتر", "پایان‌نسخه", "پایان‌نقل‌قول", "پایان‌نوشتار", "پایان‌پانوشتهای‌موضعی", "پایان‌پروفایل", "پایان‌پروژه", "پایان‌پس‌زمینه", "پایان‌پوشش", "پایان‌کد", "پایین", "پرده", "پروژه", "پرکردن‌میدان", "پس‌زمینه", "پیروی‌نسخه", "پیروی‌نسخه‌پروفایل", "پیروی‌پروفایل", "چاپ‌ارتفاع‌برگ", "چاپ‌عرض‌برگ", "چوبخط", "چپ‌چین", "کاغذزوج", "کسر", "کشیده", "کلمه‌حاشیه", "کلمه‌راست", "گیره", "یادداشت", "یک‌جا", "یک‌خط" },
- ["ro"]={ "CAP", "CUVANT", "CUVINTE", "Cap", "Caps", "Cuvant", "Cuvinte", "KAP", "Kap", "Kaps", "LUNA", "Litera", "Litere", "Numere", "Numereromane", "ZIDINSAPTAMANA", "adapteazaaspect", "adubuffer", "adumarcaje", "afiseazaaspect", "afiseazacampuri", "afiseazaculoare", "afiseazafiguriexterne", "afiseazafonttext", "afiseazagrid", "afiseazagrupculoare", "afiseazamakeup", "afiseazamediufonttext", "afiseazapaleta", "afiseazarama", "afiseazasetari", "afiseazasetsimboluri", "afiseazastruts", "afiseazatiparire", "aliniat", "aliniatcentru", "aliniatdreapta", "aliniatstanga", "appendix", "arg", "ascundeblocuri", "atleftmargin", "atrightmargin", "baraculoare", "barainteractiune", "barasincronizare", "blanc", "but", "butoaneinteractiune", "buton", "butonmeniu", "camp", "campumplere", "cap", "chapter", "chem", "citat", "clip", "cloneazacamp", "coloana", "comment", "comparagrupculoare", "comparapaleta", "completeazanumarpagina", "completecombinedlist", "completelistoffloats", "completelistofsorts", "completelistofsynonyms", "completeregister", "componenta", "convertestenumar", "copiazacamp", "corecteazaspatiualb", "coupledregister", "crlf", "culoare", "culoaregri", "cupleazadocument", "cupleazamarcaje", "cupleazaregistru", "cutspace", "cuvantdreapta", "cuvantmarginal", "data", "datacurenta", "datareferit", "decrementnumber", "decupleazamarcaje", "definebodyfontDEF", "definebodyfontREF", "definecolumnbreak", "definecolumnset", "definecombination", "definedfont", "definefontfeature", "definefonthandling", "defineindentedtext", "defineinmargin", "defineitemgroup", "definelayer", "definelayout", "definemathalignment", "definepagebreak", "defineplacement", "defineste", "definesteaccent", "definesteantet", "definesteblanc", "definestebloc", "definesteblocsectiune", "definestebuffer", "definestecamp", "definestecampprincipal", "definestecaracter", "definestecomanda", "definesteconversie", "definesteculoare", "definestedescriere", "definestedimensiunehartie", "definesteenumerare", "definesteeticheta", "definestefloat", "definestefont", "definestefontraw", "definestefonttext", "definesteformatreferinte", "definestegrupculori", "definestehbox", "definesteinconjurare", "definestelista", "definestelistacombinata", "definestelistareferinte", "definestelogo", "definestemakeup", "definestemarcaje", "definestemediulfonttext", "definestemeniuinteractiune", "definesteoutput", "definesteoverlay", "definestepaleta", "definesteparagraf", "definestepozitietext", "definesteprofil", "definesteprogram", "definestereferinte", "definesteregistru", "definesterigla", "definestesablontabel", "definestesectiune", "definestesimbol", "definestesimbolfigura", "definestesinonim", "definestesinonimfont", "definestesortare", "definestestartstop", "definestestil", "definestestilfont", "definestestivacampuri", "definestesubcamp", "definestetabulatori", "definestetext", "definestetexteinconjurate", "definestetextinconjurat", "definestetyping", "definestevariabilatext", "definesteversiune", "definetype", "definetypeface", "description", "despre", "determinacaracteristicilelistei", "determinacaracteristiciregistru", "determinanumartitlu", "dezactiveazameniuinteractiune", "dimensiune", "din", "distantaantet", "distantacolt", "distantacoltdreapta", "distantacoltstanga", "distantajos", "distantamargine", "distantamarginedreapta", "distantamarginestanga", "distantasubsol", "distantasus", "domiciliu", "dute", "dutebox", "dutepagina", "ecran", "el", "element", "emptylines", "enumeration", "eticheta", "etichete", "fact", "faraaliniat", "faradimensiune", "farafisiere", "faraliniiantetsisubsol", "faraliniisussijos", "faralista", "faramarcaje", "faraspatiu", "faraspatiualb", "figuraexterna", "firdepar", "folosesteURL", "folosestebloc", "folosestecodificarea", "folosestecomenzi", "folosestedirector", "folosestedocumentextern", "folosestefiguraexterna", "folosestefisiereexterne", "folosestefisierextern", "folosestemodul", "folosestemodule", "folosestemuzicaexterna", "folosestereferinte", "folosestescriptJS", "folosestesimboluri", "folosestespeciale", "folosesteurl", "footnotetext", "forteazablocuri", "fractie", "framed", "framedtext", "fundal", "gatablocuri", "getnumber", "grid", "grosimelinie", "hartiedubla", "headsym", "hl", "immediatebetweenlist", "immediatetolist", "impachetat", "impartefloat", "in", "inalt", "inaltamargine", "inaltimeantet", "inaltimehartie", "inaltimehartieimprimanta", "inaltimejos", "inaltimelista", "inaltimemakeup", "inaltimesubsol", "inaltimesus", "inaltimetext", "indentation", "indreapta", "inframed", "ininner", "injos", "inlinie", "inmaframed", "inmargineadreapta", "inmargineastanga", "inneredgedistance", "inneredgewidth", "innermargindistance", "innermarginwidth", "inouter", "inparteadreapta", "inparteastanga", "instalarelimba", "instanga", "intins", "jos", "jossus", "kap", "la", "labeling", "lapagina", "latimecoltdreapta", "latimecoltstanga", "latimecolturi", "latimehartie", "latimehartieimprimanta", "latimelista", "latimemakeup", "latimemargine", "latimemarginedreapta", "latimemarginestanga", "latimetext", "leg", "limba", "limbaprincipala", "liniemargine", "linieneagra", "liniesubtire", "linieumplere", "liniinegre", "liniisubtiri", "listsymbol", "litera", "litere", "loadsorts", "loadsynonyms", "logcampuri", "luna", "lungimelista", "maframed", "mapfontsize", "mar", "marcaje", "marcheazaversiune", "marginal", "matematica", "mediaeval", "mediu", "meniuinteractiune", "minicitat", "moveformula", "movesidefloat", "mutapegrid", "name", "navigating", "necunoscut", "nextsection", "niveltitlu", "nocap", "nokap", "nop", "nota", "notasubsol", "numarformula", "numarincrement", "numarpagina", "numarsubformula", "numartitlu", "numartitlucurent", "numartotalpagini", "numberofsubpages", "nume", "numere", "numereromane", "numeunitate", "nutesta", "olinie", "outeredgedistance", "outeredgewidth", "outermargindistance", "outermarginwidth", "overbar", "overbars", "overstrike", "overstrikes", "pagedepth", "pageoffset", "pagina", "paginadubla", "paragraph", "part", "pastreazablocuri", "pelung", "placefloat", "placeheadnumber", "placeheadtext", "placelistoffloats", "placelistofsorts", "placelistofsynonyms", "placerawlist", "placereferencelist", "plaseazapegrid", "plaseazasemnecarte", "potrivestecamp", "pozitie", "pozitietext", "proceseazabloc", "proceseazapagina", "produs", "program", "proiect", "publicatie", "puncte", "punedeasuprafiecareia", "punefatainfata", "puneformula", "punelegenda", "punelista", "punelistacombinata", "punelogouri", "punenotesubsol", "punenotesubsollocale", "punenumarpagina", "puneregistru", "punerigla", "punesubformula", "punevariabilatext", "ran", "ref", "refa", "referinta", "referintapagina", "referintatext", "referit", "referring", "reflexie", "register", "remarca", "reservefloat", "reset", "reseteazamarcaje", "resetnumber", "resettextcontent", "riglatext", "rigleumplere", "roteste", "saripesteblocuri", "scala", "scriebuffer", "scrieinlista", "scrieinlistareferinte", "scrieinregistru", "scrieintreliste", "section", "seeregister", "selecteazablocuri", "selecteazahartie", "selecteazaversiune", "semncarte", "setarebarasincronizare", "setareitemization", "setarelimba", "setareoutput", "setarepozitie", "setaresincronizare", "setari", "seteazaaliniat", "seteazaalinierea", "seteazaantet", "seteazaaranjareapag", "seteazaaspect", "seteazabarainteractiune", "seteazablanc", "seteazabloc", "seteazablocsectiune", "seteazablocurimarginale", "seteazabuffer", "seteazabutoane", "seteazacamp", "seteazacampuri", "seteazaclipping", "seteazacoloane", "seteazacombinari", "seteazacomentariu", "seteazacomentariupagina", "seteazaculoare", "seteazaculori", "seteazadefinireanotasubsol", "seteazadescriere", "seteazadimensiunihartie", "seteazaecrane", "seteazaecraninteractiune", "seteazaelemente", "seteazaenumerare", "seteazafiguriexterne", "seteazafloat", "seteazafloats", "seteazafonttext", "seteazaformulare", "seteazaformule", "seteazafundal", "seteazafundaluri", "seteazagrosimelinie", "seteazaimpartireafloat", "seteazainconjurat", "seteazaingust", "seteazainteractiunea", "seteazajos", "seteazalegenda", "seteazalegendele", "seteazaliniesilabe", "seteazaliniesubtire", "seteazalinii", "seteazaliniimargine", "seteazaliniinegre", "seteazaliniiumplere", "seteazalista", "seteazalistacombinata", "seteazalistareferinte", "seteazamajuscule", "seteazamakeup", "seteazamarcaje", "seteazamarginal", "seteazamediulfonttext", "seteazameniuinteractiune", "seteazaminicitat", "seteazanotasubsol", "seteazanumarpagina", "seteazanumarsubpagina", "seteazanumartitlu", "seteazanumerotare", "seteazanumerotarelinii", "seteazanumerotarepagina", "seteazanumerotareparagrafe", "seteazapaleta", "seteazaparagrafe", "seteazaplasareaopozita", "seteazapozitietext", "seteazaprofile", "seteazaprograme", "seteazapublicatii", "seteazareferinte", "seteazaregistru", "seteazarigla", "seteazarigletext", "seteazarigleumplere", "seteazarotare", "seteazasectiune", "seteazasimbol", "seteazasinonime", "seteazasistem", "seteazasortare", "seteazaspatiu", "seteazaspatiualb", "seteazaspatiuinterliniar", "seteazastrut", "seteazasublinie", "seteazasubsol", "seteazasus", "seteazatab", "seteazatabele", "seteazatabulatori", "seteazatext", "seteazatexteantet", "seteazatextejos", "seteazatextesubsol", "seteazatextesus", "seteazatextetext", "seteazatexteticheta", "seteazatexttitlu", "seteazatitlu", "seteazatitluri", "seteazatoleranta", "seteazatranzitiepagina", "seteazatype", "seteazatyping", "seteazaurl", "seteazavariabilatext", "seteazaversiuni", "setnumber", "settextcontent", "setupanswerarea", "setupcolumnset", "setupcolumnsetlines", "setupcolumnsetstart", "setupfonthandling", "setupfontsynonym", "setupindentedtext", "setupinterlinespace2", "setupitemgroup", "setuplistalternative", "setupmathalignment", "setupnumber", "setuppaper", "setupplacement", "setupstartstop", "setvariabilatext", "sim", "simbol", "sincronizeaza", "sort", "spatiifixate", "spatiu", "spatiualb", "spatiufixat", "spatiujos", "spatiuspate", "spatiusus", "startalignment", "startaliniere", "startblocmarginal", "startbuffer", "startcitat", "startcodificare", "startcoloane", "startcolumnmakeup", "startcolumns", "startcolumnset", "startcombinare", "startcombination", "startcomment", "startcomponenta", "startcorectielinie", "startculoare", "startdescription", "startdocument", "startenumeration", "startfact", "startfigura", "startfigure", "startfloattext", "startformula", "startframedtext", "startfundal", "startglobal", "starthiding", "startimpachetat", "startingust", "startitemgroup", "startlegend", "startline", "startlinecorrection", "startlinenumbering", "startlines", "startlinie", "startliniemargine", "startlinii", "startlocal", "startlocalenvironment", "startlocalfootnotes", "startmakeup", "startmarginblock", "startmediu", "startmeniuinteractiune", "startnamemakeup", "startnarrower", "startneimpachetat", "startnotesubsollocale", "startnumerotarelinii", "startopozit", "startopposite", "startoverlay", "startoverview", "startparagraph", "startpositioning", "startpostponing", "startpozitionare", "startprodus", "startprofil", "startprofile", "startproiect", "startraster", "startregister", "startriglatext", "startsetsimboluri", "startsincronizare", "startsymbolset", "startsynchronization", "starttabel", "starttabele", "starttable", "starttables", "starttabulate", "starttext", "starttyping", "startunpacked", "startversiune", "stivacampuri", "stopalignment", "stopaliniere", "stopblobal", "stopblocmarginal", "stopbuffer", "stopcitat", "stopcodificare", "stopcoloane", "stopcolumnmakeup", "stopcolumns", "stopcolumnset", "stopcombinare", "stopcombination", "stopcomment", "stopcomponenta", "stopcorectielinie", "stopculoare", "stopdescription", "stopdocument", "stopenumeration", "stopfact", "stopfigure", "stopfloattext", "stopformula", "stopframedtext", "stopfundal", "stophiding", "stopimpachetat", "stopingust", "stopitemgroup", "stoplegend", "stopline", "stoplinecorrection", "stoplinenumbering", "stoplines", "stoplinie", "stopliniemargine", "stoplinii", "stoplocal", "stoplocalenvironment", "stoplocalfootnotes", "stopmakeup", "stopmarginblock", "stopmediu", "stopmeniuinteractiune", "stopnamemakeup", "stopnarrower", "stopneimpachetat", "stopnotesubsollocale", "stopnumerotarelinii", "stopopozit", "stopopposite", "stopoverlay", "stopoverview", "stopparagraph", "stoppositioning", "stoppostponing", "stoppozitionare", "stopprodus", "stopprofil", "stopprofile", "stopproiect", "stopraster", "stopriglatext", "stopsincronizare", "stopsymbolset", "stopsynchronization", "stoptabel", "stoptabele", "stoptable", "stoptables", "stoptabulate", "stoptext", "stoptyping", "stopunpacked", "stopversiune", "sub", "subject", "subpagenumber", "subsection", "subsubject", "subsubsection", "subsubsubject", "synonym", "tab", "testcolumn", "testpage", "tex", "texteticheta", "textmarginal", "texttitlu", "textumplere", "title", "titlu", "titlumarginal", "tooltip", "traduce", "trecilafontraw", "trecilafonttext", "txt", "typ", "type", "typefile", "underbar", "underbars", "undeva", "urmeazaprofil", "urmeazaversiune", "urmeazaversiuneprofil", "useXMLfilter", "usedirectory", "usetypescript", "usetypescriptfile", "valoareculoare", "valoaregri", "variabilatext", "versiune", "vl", "zidinsaptamana" },
-} \ No newline at end of file
diff --git a/context/data/scite/lexers/data/scite-context-data-metafun.lua b/context/data/scite/lexers/data/scite-context-data-metafun.lua
deleted file mode 100644
index 1ca02de97..000000000
--- a/context/data/scite/lexers/data/scite-context-data-metafun.lua
+++ /dev/null
@@ -1,4 +0,0 @@
-return {
- ["commands"]={ "sqr", "log", "ln", "exp", "inv", "pow", "pi", "radian", "tand", "cotd", "sin", "cos", "tan", "cot", "atan", "asin", "acos", "invsin", "invcos", "acosh", "asinh", "sinh", "cosh", "paired", "tripled", "unitcircle", "fulldiamond", "unitdiamond", "fullsquare", "llcircle", "lrcircle", "urcircle", "ulcircle", "tcircle", "bcircle", "lcircle", "rcircle", "lltriangle", "lrtriangle", "urtriangle", "ultriangle", "smoothed", "cornered", "superellipsed", "randomized", "squeezed", "enlonged", "shortened", "punked", "curved", "unspiked", "simplified", "blownup", "stretched", "enlarged", "leftenlarged", "topenlarged", "rightenlarged", "bottomenlarged", "crossed", "laddered", "randomshifted", "interpolated", "paralleled", "cutends", "peepholed", "llenlarged", "lrenlarged", "urenlarged", "ulenlarged", "llmoved", "lrmoved", "urmoved", "ulmoved", "rightarrow", "leftarrow", "centerarrow", "boundingbox", "innerboundingbox", "outerboundingbox", "pushboundingbox", "popboundingbox", "bottomboundary", "leftboundary", "topboundary", "rightboundary", "xsized", "ysized", "xysized", "sized", "xyscaled", "intersection_point", "intersection_found", "penpoint", "bbwidth", "bbheight", "withshade", "withlinearshading", "withcircularshading", "withfromshadecolor", "withtoshadecolor", "withshading", "shadedinto", "withcircularshade", "withlinearshade", "cmyk", "spotcolor", "multitonecolor", "namedcolor", "drawfill", "undrawfill", "inverted", "uncolored", "softened", "grayed", "greyed", "onlayer", "along", "graphictext", "loadfigure", "externalfigure", "withmask", "figure", "register", "bitmapimage", "colordecimals", "ddecimal", "dddecimal", "ddddecimal", "textext", "thetextext", "rawtextext", "textextoffset", "verbatim", "thelabel", "label", "autoalign", "transparent", "withtransparency", "property", "properties", "withproperties", "asgroup", "infont", "set_linear_vector", "linear_shade", "define_linear_shade", "define_circular_linear_shade", "define_sampled_linear_shade", "set_circular_vector", "circular_shade", "define_circular_shade", "define_circular_linear_shade", "define_sampled_circular_shade", "space", "CRLF", "grayscale", "greyscale", "withgray", "withgrey", "colorpart", "readfile", "clearxy", "unitvector", "center", "epsed", "anchored", "originpath", "infinite", "break", "xstretched", "ystretched", "snapped", "pathconnectors", "function", "constructedpath", "constructedpairs", "punkedfunction", "curvedfunction", "tightfunction", "punkedpath", "curvedpath", "tightpath", "punkedpairs", "curvedpairs", "tightpairs", "evenly", "oddly", "condition", "pushcurrentpicture", "popcurrentpicture", "arrowpath", "tensecircle", "roundedsquare", "colortype", "whitecolor", "blackcolor", "normalfill", "normaldraw", "visualizepaths", "naturalizepaths", "drawboundary", "drawwholepath", "visualizeddraw", "visualizedfill", "draworigin", "drawboundingbox", "drawpath", "drawpoint", "drawpoints", "drawcontrolpoints", "drawcontrollines", "drawpointlabels", "drawlineoptions", "drawpointoptions", "drawcontroloptions", "drawlabeloptions", "draworiginoptions", "drawboundoptions", "drawpathoptions", "resetdrawoptions", "undashed", "decorated", "redecorated", "undecorated", "passvariable", "passarrayvariable", "tostring", "format", "formatted", "startpassingvariable", "stoppassingvariable" },
- ["internals"]={ "nocolormodel", "greycolormodel", "graycolormodel", "rgbcolormodel", "cmykcolormodel", "shadefactor", "textextoffset", "normaltransparent", "multiplytransparent", "screentransparent", "overlaytransparent", "softlighttransparent", "hardlighttransparent", "colordodgetransparent", "colorburntransparent", "darkentransparent", "lightentransparent", "differencetransparent", "exclusiontransparent", "huetransparent", "saturationtransparent", "colortransparent", "luminositytransparent", "metapostversion", "maxdimensions" },
-} \ No newline at end of file
diff --git a/context/data/scite/lexers/data/scite-context-data-metapost.lua b/context/data/scite/lexers/data/scite-context-data-metapost.lua
deleted file mode 100644
index 766ea90da..000000000
--- a/context/data/scite/lexers/data/scite-context-data-metapost.lua
+++ /dev/null
@@ -1,7 +0,0 @@
-return {
- ["commands"]={ "beginfig", "endfig", "beginglyph", "endglyph", "charscale", "rotatedaround", "reflectedabout", "arrowhead", "currentpen", "currentpicture", "cuttings", "defaultfont", "extra_beginfig", "extra_endfig", "ditto", "EOF", "down", "evenly", "fullcircle", "halfcircle", "identity", "in", "left", "origin", "pensquare", "quartercircle", "right", "unitsquare", "up", "withdots", "abs", "bbox", "ceiling", "center", "cutafter", "cutbefore", "dir", "directionpoint", "div", "dotprod", "intersectionpoint", "inverse", "mod", "round", "unitvector", "whatever", "cutdraw", "draw", "drawarrow", "drawdblarrow", "fill", "filldraw", "drawdot", "loggingall", "interact", "tracingall", "tracingnone", "pickup", "undraw", "unfill", "unfilldraw", "buildcycle", "dashpattern", "decr", "dotlabel", "dotlabels", "drawoptions", "incr", "label", "labels", "max", "min", "thelabel", "z", "beginchar", "blacker", "capsule_end", "change_width", "define_blacker_pixels", "define_corrected_pixels", "define_good_x_pixels", "define_good_y_pixels", "define_horizontal_corrected_pixels", "define_pixels", "define_whole_blacker_pixels", "define_whole_pixels", "define_whole_vertical_blacker_pixels", "define_whole_vertical_pixels", "endchar", "extra_beginchar", "extra_endchar", "extra_setup", "font_coding_scheme", "clearxy", "clearit", "clearpen", "shipit", "font_extra_space", "exitunless", "relax", "hide", "gobble", "gobbled", "stop", "blankpicture", "counterclockwise", "tensepath", "takepower", "direction", "softjoin", "makelabel", "rotatedabout", "flex", "superellipse", "erase", "image", "nullpen", "savepen", "clearpen", "penpos", "penlabels", "range", "numtok", "thru", "z", "laboff", "bye", "red", "green", "blue", "cyan", "magenta", "yellow", "black", "white", "background", "graypart", "graycolor", "mm", "pt", "dd", "bp", "cm", "pc", "cc", "in", "triplet", "quadruplet" },
- ["internals"]={ "mitered", "rounded", "beveled", "butt", "squared", "eps", "epsilon", "infinity", "bboxmargin", "ahlength", "ahangle", "labeloffset", "dotlabeldiam", "defaultpen", "defaultscale", "join_radius", "pen_lft", "pen_rt", "pen_top", "pen_bot" },
- ["primitives"]={ "charcode", "day", "linecap", "linejoin", "miterlimit", "month", "pausing", "prologues", "showstopping", "time", "tracingcapsules", "tracingchoices", "mpprocset", "tracingcommands", "tracingequations", "tracinglostchars", "tracingmacros", "tracingonline", "tracingoutput", "tracingrestores", "tracingspecs", "tracingstats", "tracingtitles", "truecorners", "warningcheck", "year", "false", "nullpicture", "pencircle", "true", "and", "angle", "arclength", "arctime", "ASCII", "boolean", "bot", "char", "color", "cosd", "cycle", "decimal", "directiontime", "floor", "fontsize", "hex", "infont", "intersectiontimes", "known", "length", "llcorner", "lrcorner", "makepath", "makepen", "mexp", "mlog", "normaldeviate", "not", "numeric", "oct", "odd", "or", "path", "pair", "pen", "penoffset", "picture", "point", "postcontrol", "precontrol", "reverse", "rotated", "scaled", "shifted", "sind", "slanted", "sqrt", "str", "string", "subpath", "substring", "transform", "transformed", "ulcorner", "uniformdeviate", "unknown", "urcorner", "xpart", "xscaled", "xxpart", "xypart", "ypart", "yscaled", "yxpart", "yypart", "zscaled", "addto", "clip", "input", "interim", "let", "newinternal", "save", "setbounds", "shipout", "show", "showdependencies", "showtoken", "showvariable", "special", "begingroup", "endgroup", "of", "curl", "tension", "and", "controls", "interpath", "on", "off", "def", "vardef", "enddef", "expr", "suffix", "text", "primary", "secondary", "tertiary", "primarydef", "secondarydef", "tertiarydef", "randomseed", "also", "contour", "doublepath", "withcolor", "withcmykcolor", "withpen", "dashed", "if", "else", "elseif", "fi", "for", "endfor", "forever", "exitif", "within", "forsuffixes", "downto", "upto", "step", "until", "charlist", "extensible", "fontdimen", "headerbyte", "kern", "ligtable", "boundarychar", "chardp", "charext", "charht", "charic", "charwd", "designsize", "fontmaking", "charexists", "cullit", "currenttransform", "gfcorners", "grayfont", "hround", "imagerules", "lowres_fix", "nodisplays", "notransforms", "openit", "displaying", "currentwindow", "screen_rows", "screen_cols", "pixels_per_inch", "cull", "display", "openwindow", "numspecial", "totalweight", "autorounding", "fillin", "proofing", "tracingpens", "xoffset", "chardx", "granularity", "smoothing", "turningcheck", "yoffset", "chardy", "hppp", "tracingedges", "vppp", "extra_beginfig", "extra_endfig", "mpxbreak", "endinput", "message", "delimiters", "turningnumber", "errmessage", "readstring", "scantokens", "end", "outer", "inner", "write", "to", "readfrom", "withprescript", "withpostscript", "top", "bot", "lft", "rt", "ulft", "urt", "llft", "lrt", "redpart", "greenpart", "bluepart", "cyanpart", "magentapart", "yellowpart", "blackpart", "greypart", "prescriptpart", "postscriptpart", "rgbcolor", "cmykcolor", "greycolor", "graycolor", "colormodel", "graypart", "dashpart", "penpart", "stroked", "filled", "textual", "clipped", "bounded", "pathpart", "expandafter", "minute", "hour", "outputformat", "outputtemplate", "filenametemplate", "fontmapfile", "fontmapline", "fontpart", "fontsize", "glyph", "restoreclipcolor", "troffmode" },
- ["shortcuts"]={ "..", "...", "--", "---", "&" },
- ["tex"]={ "btex", "etex", "verbatimtex" },
-} \ No newline at end of file
diff --git a/context/data/scite/lexers/scite-context-lexer-mps.lua b/context/data/scite/lexers/scite-context-lexer-mps.lua
deleted file mode 100644
index 96c5e9c3c..000000000
--- a/context/data/scite/lexers/scite-context-lexer-mps.lua
+++ /dev/null
@@ -1,155 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for metafun",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-if not lexer._CONTEXTEXTENSIONS then require("scite-context-lexer") end
-
-local lexer = lexer
-local global, string, table, lpeg = _G, string, table, lpeg
-local token, exact_match = lexer.token, lexer.exact_match
-local P, R, S, V, C, Cmt = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.C, lpeg.Cmt
-local type = type
-
-local metafunlexer = { _NAME = "mps", _FILENAME = "scite-context-lexer-mps" }
-local whitespace = lexer.WHITESPACE
-local context = lexer.context
-
-local metapostprimitives = { }
-local metapostinternals = { }
-local metapostshortcuts = { }
-local metapostcommands = { }
-
-local metafuninternals = { }
-local metafunshortcuts = { }
-local metafuncommands = { }
-
-local mergedshortcuts = { }
-local mergedinternals = { }
-
-do
-
- local definitions = context.loaddefinitions("scite-context-data-metapost")
-
- if definitions then
- metapostprimitives = definitions.primitives or { }
- metapostinternals = definitions.internals or { }
- metapostshortcuts = definitions.shortcuts or { }
- metapostcommands = definitions.commands or { }
- end
-
- local definitions = context.loaddefinitions("scite-context-data-metafun")
-
- if definitions then
- metafuninternals = definitions.internals or { }
- metafunshortcuts = definitions.shortcuts or { }
- metafuncommands = definitions.commands or { }
- end
-
- for i=1,#metapostshortcuts do
- mergedshortcuts[#mergedshortcuts+1] = metapostshortcuts[i]
- end
- for i=1,#metafunshortcuts do
- mergedshortcuts[#mergedshortcuts+1] = metafunshortcuts[i]
- end
-
- for i=1,#metapostinternals do
- mergedinternals[#mergedinternals+1] = metapostinternals[i]
- end
- for i=1,#metafuninternals do
- mergedinternals[#mergedinternals+1] = metafuninternals[i]
- end
-
-end
-
-local space = lexer.space -- S(" \n\r\t\f\v")
-local any = lexer.any
-
-local dquote = P('"')
-local cstoken = R("az","AZ") + P("_")
-local mptoken = R("az","AZ")
-local leftbrace = P("{")
-local rightbrace = P("}")
-local number = context.patterns.real
-
-local cstokentex = R("az","AZ","\127\255") + S("@!?_")
-
--- we could collapse as in tex
-
-local spacing = token(whitespace, space^1)
-local rest = token('default', any)
-local comment = token('comment', P('%') * (1-S("\n\r"))^0)
-local internal = token('reserved', exact_match(mergedshortcuts,false))
-local shortcut = token('data', exact_match(mergedinternals))
-local helper = token('command', exact_match(metafuncommands))
-local plain = token('plain', exact_match(metapostcommands))
-local quoted = token('quote', dquote)
- * token('string', P(1-dquote)^0)
- * token('quote', dquote)
-local texstuff = token('quote', P("btex ") + P("verbatimtex "))
- * token('string', P(1-P(" etex"))^0)
- * token('quote', P(" etex"))
-local primitive = token('primitive', exact_match(metapostprimitives))
-local identifier = token('default', cstoken^1)
-local number = token('number', number)
-local grouping = token('grouping', S("()[]{}")) -- can be an option
-local special = token('special', S("#()[]{}<>=:\"")) -- or else := <> etc split
-local texlike = token('warning', P("\\") * cstokentex^1)
-local extra = token('extra', S("`~%^&_-+*/\'|\\"))
-
-local nested = P { leftbrace * (V(1) + (1-rightbrace))^0 * rightbrace }
-local texlike = token('embedded', P("\\") * (P("MP") + P("mp")) * mptoken^1)
- * spacing^0
- * token('grouping', leftbrace)
- * token('rest', (nested + (1-rightbrace))^0 )
- * token('grouping', rightbrace)
- + token('warning', P("\\") * cstokentex^1)
-
-metafunlexer._rules = {
- { 'whitespace', spacing },
- { 'comment', comment },
- { 'internal', internal },
- { 'shortcut', shortcut },
- { 'helper', helper },
- { 'plain', plain },
- { 'primitive', primitive },
- { 'texstuff', texstuff },
- { 'identifier', identifier },
- { 'number', number },
- { 'quoted', quoted },
- -- { 'grouping', grouping }, -- can be an option
- { 'special', special },
- { 'texlike', texlike },
- { 'extra', extra },
- { 'rest', rest },
-}
-
-metafunlexer._tokenstyles = context.styleset
-
-metafunlexer._foldpattern = R("az")^2 -- separate entry else interference
-
-metafunlexer._foldsymbols = {
- _patterns = {
- '[a-z][a-z]+',
- },
- ["primitive"] = {
- ["beginfig"] = 1,
- ["endfig"] = -1,
- ["def"] = 1,
- ["vardef"] = 1,
- ["primarydef"] = 1,
- ["secondarydef" ] = 1,
- ["tertiarydef"] = 1,
- ["enddef"] = -1,
- ["if"] = 1,
- ["fi"] = -1,
- ["for"] = 1,
- ["forever"] = 1,
- ["endfor"] = -1,
- }
-}
-
-return metafunlexer
diff --git a/context/data/scite/lexers/scite-context-lexer-pdf-object.lua b/context/data/scite/lexers/scite-context-lexer-pdf-object.lua
deleted file mode 100644
index 6d0b6d8da..000000000
--- a/context/data/scite/lexers/scite-context-lexer-pdf-object.lua
+++ /dev/null
@@ -1,117 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for pdf",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local lexer = lexer
-local token = lexer.token
-local P, R, S, C, V = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.V
-
-local pdfobjectlexer = { _NAME = "pdf-object", _FILENAME = "scite-context-lexer-pdf-object" }
-local whitespace = lexer.WHITESPACE -- triggers states
-local context = lexer.context
-local patterns = context.patterns
-
-local space = lexer.space
-local somespace = space^1
-
-local newline = S("\n\r")
-local real = patterns.real
-local cardinal = patterns.cardinal
-
-local lparent = P("(")
-local rparent = P(")")
-local langle = P("<")
-local rangle = P(">")
-local escape = P("\\")
-local anything = P(1)
-local unicodetrigger = P("feff")
-
-local nametoken = 1 - space - S("<>/[]()")
-local name = P("/") * nametoken^1
-
-local p_string = P { ( escape * anything + lparent * V(1) * rparent + (1 - rparent) )^0 }
-
-local t_spacing = token(whitespace, space^1)
-local t_spaces = token(whitespace, space^1)^0
-
-local p_stream = P("stream")
-local p_endstream = P("endstream")
------ p_obj = P("obj")
-local p_endobj = P("endobj")
-local p_reference = P("R")
-
-local p_objectnumber = patterns.cardinal
-local p_comment = P('%') * (1-S("\n\r"))^0
-
-local string = token("quote", lparent)
- * token("string", p_string)
- * token("quote", rparent)
-local unicode = token("quote", langle)
- * token("plain", unicodetrigger)
- * token("string", (1-rangle)^1)
- * token("quote", rangle)
-local whatsit = token("quote", langle)
- * token("string", (1-rangle)^1)
- * token("quote", rangle)
-local keyword = token("command", name)
-local constant = token("constant", name)
-local number = token('number', real)
--- local reference = token("number", cardinal)
--- * t_spacing
--- * token("number", cardinal)
-local reserved = token("number", P("true") + P("false") + P("NULL"))
-local reference = token("warning", cardinal)
- * t_spacing
- * token("warning", cardinal)
- * t_spacing
- * token("keyword", p_reference)
-local t_comment = token("comment", p_comment)
-
--- t_openobject = token("number", p_objectnumber)
--- * t_spacing
--- * token("number", p_objectnumber)
--- * t_spacing
--- * token("keyword", p_obj)
-local t_closeobject = token("keyword", p_endobj)
-
-local t_opendictionary = token("grouping", P("<<"))
-local t_closedictionary = token("grouping", P(">>"))
-
-local t_openarray = token("grouping", P("["))
-local t_closearray = token("grouping", P("]"))
-
-local t_stream = token("keyword", p_stream)
--- * token("default", newline * (1-newline*p_endstream*newline)^1 * newline)
- * token("default", (1 - p_endstream)^1)
- * token("keyword", p_endstream)
-
-local t_dictionary = { "dictionary",
- dictionary = t_opendictionary * (t_spaces * keyword * t_spaces * V("whatever"))^0 * t_spaces * t_closedictionary,
- array = t_openarray * (t_spaces * V("whatever"))^0 * t_spaces * t_closearray,
- whatever = V("dictionary") + V("array") + constant + reference + string + unicode + number + whatsit,
- }
-
-local t_object = { "object", -- weird that we need to catch the end here (probably otherwise an invalid lpeg)
- object = t_spaces * (V("dictionary") * t_spaces * t_stream^-1 + V("array") + V("number") + t_spaces) * t_spaces * t_closeobject,
- dictionary = t_opendictionary * (t_spaces * keyword * t_spaces * V("whatever"))^0 * t_spaces * t_closedictionary,
- array = t_openarray * (t_spaces * V("whatever"))^0 * t_spaces * t_closearray,
- number = number,
- whatever = V("dictionary") + V("array") + constant + reference + string + unicode + number + reserved + whatsit,
- }
-
-pdfobjectlexer._shared = {
- dictionary = t_dictionary,
-}
-
-pdfobjectlexer._rules = {
- { 'whitespace', t_spacing },
- { 'object', t_object },
-}
-
-pdfobjectlexer._tokenstyles = context.styleset
-
-return pdfobjectlexer
diff --git a/context/data/scite/lexers/scite-context-lexer-pdf-xref.lua b/context/data/scite/lexers/scite-context-lexer-pdf-xref.lua
deleted file mode 100644
index f205e9130..000000000
--- a/context/data/scite/lexers/scite-context-lexer-pdf-xref.lua
+++ /dev/null
@@ -1,51 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for pdf xref",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local lexer = lexer
-local token = lexer.token
-local P, R = lpeg.P, lpeg.R
-
--- xref
--- cardinal cardinal [character]
--- ..
--- %%EOF | startxref | trailer
-
-local pdfxreflexer = { _NAME = "pdf-xref", _FILENAME = "scite-context-lexer-pdf-xref" }
-local whitespace = lexer.WHITESPACE -- triggers states
-local context = lexer.context
-local patterns = context.patterns
-
-local pdfobjectlexer = lexer.load("scite-context-lexer-pdf-object")
-
-local spacing = patterns.spacing
-
-local t_spacing = token(whitespace, spacing)
-
-local p_trailer = P("trailer")
-
-local t_number = token("number", R("09")^1)
- * t_spacing
- * token("number", R("09")^1)
- * t_spacing
- * (token("keyword", R("az","AZ")) * t_spacing)^-1
-
-local t_xref = t_number^1
-
--- local t_xref = token("default", (1-p_trailer)^1)
--- * token("keyword", p_trailer)
--- * t_spacing
--- * pdfobjectlexer._shared.dictionary
-
-pdfxreflexer._rules = {
- { 'whitespace', t_spacing },
- { 'xref', t_xref },
-}
-
-pdfxreflexer._tokenstyles = context.styleset
-
-return pdfxreflexer
diff --git a/context/data/scite/lexers/scite-context-lexer-pdf.lua b/context/data/scite/lexers/scite-context-lexer-pdf.lua
deleted file mode 100644
index 685fdb16e..000000000
--- a/context/data/scite/lexers/scite-context-lexer-pdf.lua
+++ /dev/null
@@ -1,80 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for pdf",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-if not lexer._CONTEXTEXTENSIONS then require("scite-context-lexer") end
-
-local lexer = lexer
-local token = lexer.token
-local P, R, S = lpeg.P, lpeg.R, lpeg.S
-
-local pdflexer = { _NAME = "pdf", _FILENAME = "scite-context-lexer-pdf" }
-local whitespace = lexer.WHITESPACE -- triggers states
-
-local pdfobjectlexer = lexer.load("scite-context-lexer-pdf-object")
-local pdfxreflexer = lexer.load("scite-context-lexer-pdf-xref")
-
-local context = lexer.context
-local patterns = context.patterns
-
-local space = patterns.space
-local spacing = patterns.spacing
-local nospacing = patterns.nospacing
-local anything = patterns.anything
-local restofline = patterns.restofline
-
-local t_spacing = token(whitespace, spacing)
-local t_rest = token("default", nospacing) -- anything
-
-local p_obj = P("obj")
-local p_endobj = P("endobj")
-local p_xref = P("xref")
-local p_startxref = P("startxref")
-local p_eof = P("%%EOF")
-local p_trailer = P("trailer")
-
-local p_objectnumber = patterns.cardinal
-local p_comment = P('%') * restofline
-
-local t_comment = token("comment", p_comment)
-local t_openobject = token("warning", p_objectnumber)
- * t_spacing
- * token("warning", p_objectnumber)
- * t_spacing
- * token("keyword", p_obj)
- * t_spacing^0
-local t_closeobject = token("keyword", p_endobj)
-
--- We could do clever xref parsing but why should we (i.e. we should check for
--- the xref body. As a pdf file is not edited, we could do without a nested
--- lexer anyway.
-
-local t_trailer = token("keyword", p_trailer)
- * t_spacing
- * pdfobjectlexer._shared.dictionary
-
-local t_openxref = token("plain", p_xref)
-local t_closexref = token("plain", p_startxref)
- + token("comment", p_eof)
- + t_trailer
-local t_startxref = token("plain", p_startxref)
- * t_spacing
- * token("number", R("09")^1)
-
-lexer.embed_lexer(pdflexer, pdfobjectlexer, t_openobject, t_closeobject)
-lexer.embed_lexer(pdflexer, pdfxreflexer, t_openxref, t_closexref)
-
-pdflexer._rules = {
- { 'whitespace', t_spacing },
- { 'comment', t_comment },
- { 'xref', t_startxref },
- { 'rest', t_rest },
-}
-
-pdflexer._tokenstyles = context.styleset
-
-return pdflexer
diff --git a/context/data/scite/lexers/scite-context-lexer-web.lua b/context/data/scite/lexers/scite-context-lexer-web.lua
deleted file mode 100644
index f59a3205d..000000000
--- a/context/data/scite/lexers/scite-context-lexer-web.lua
+++ /dev/null
@@ -1,155 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for w",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- this will be extended
-
-if not lexer._CONTEXTEXTENSIONS then require("scite-context-lexer") end
-
-local lexer = lexer
-local token, style, colors, exact_match, no_style = lexer.token, lexer.style, lexer.colors, lexer.exact_match, lexer.style_nothing
-local P, R, S, C, Cg, Cb, Cs, Cmt, lpegmatch = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cg, lpeg.Cb, lpeg.Cs, lpeg.Cmt, lpeg.match
-local setmetatable = setmetatable
-
-local weblexer = { _NAME = "web", _FILENAME = "scite-context-lexer-web" }
-local whitespace = lexer.WHITESPACE
-local context = lexer.context
-
-local keywords = { -- copied from cpp.lua
- -- c
- 'asm', 'auto', 'break', 'case', 'const', 'continue', 'default', 'do', 'else',
- 'extern', 'false', 'for', 'goto', 'if', 'inline', 'register', 'return',
- 'sizeof', 'static', 'switch', 'true', 'typedef', 'volatile', 'while',
- 'restrict',
- -- hm
- '_Bool', '_Complex', '_Pragma', '_Imaginary',
- -- c++.
- 'catch', 'class', 'const_cast', 'delete', 'dynamic_cast', 'explicit',
- 'export', 'friend', 'mutable', 'namespace', 'new', 'operator', 'private',
- 'protected', 'public', 'signals', 'slots', 'reinterpret_cast',
- 'static_assert', 'static_cast', 'template', 'this', 'throw', 'try', 'typeid',
- 'typename', 'using', 'virtual'
-}
-
-local datatypes = { -- copied from cpp.lua
- 'bool', 'char', 'double', 'enum', 'float', 'int', 'long', 'short', 'signed',
- 'struct', 'union', 'unsigned', 'void'
-}
-
-local macros = { -- copied from cpp.lua
- 'define', 'elif', 'else', 'endif', 'error', 'if', 'ifdef', 'ifndef', 'import',
- 'include', 'line', 'pragma', 'undef', 'using', 'warning'
-}
-
-local space = lexer.space -- S(" \n\r\t\f\v")
-local any = lexer.any
-local patterns = context.patterns
-local restofline = patterns.restofline
-local startofline = patterns.startofline
-
-local squote = P("'")
-local dquote = P('"')
-local escaped = P("\\") * P(1)
-local slashes = P('//')
-local begincomment = P("/*")
-local endcomment = P("*/")
-local percent = P("%")
-
-local spacing = token(whitespace, space^1)
-local rest = token("default", any)
-
-local shortcomment = token("comment", slashes * restofline^0)
-local longcomment = token("comment", begincomment * (1-endcomment)^0 * endcomment^-1)
-local texcomment = token("comment", percent * restofline^0)
-
-local shortstring = token("quote", dquote) -- can be shared
- * token("string", (escaped + (1-dquote))^0)
- * token("quote", dquote)
- + token("quote", squote)
- * token("string", (escaped + (1-squote))^0)
- * token("quote", squote)
-
-local integer = P("-")^-1 * (lexer.hex_num + lexer.dec_num)
-local number = token("number", lexer.float + integer)
-
-local validword = R("AZ","az","__") * R("AZ","az","__","09")^0
-
-local identifier = token("default",validword)
-
-local operator = token("special", S('+-*/%^!=<>;:{}[]().&|?~'))
-
------ optionalspace = spacing^0
-
-local p_keywords = exact_match(keywords )
-local p_datatypes = exact_match(datatypes)
-local p_macros = exact_match(macros)
-
-local keyword = token("keyword", p_keywords)
-local datatype = token("keyword", p_datatypes)
-local identifier = token("default", validword)
-
-local macro = token("data", #P('#') * startofline * P('#') * S('\t ')^0 * p_macros)
-
-local beginweb = P("@")
-local endweb = P("@c")
-
-local webcomment = token("comment", #beginweb * startofline * beginweb * (1-endweb)^0 * endweb)
-
-local texlexer = lexer.load('scite-context-lexer-tex')
-
-lexer.embed_lexer(weblexer, texlexer, #beginweb * startofline * token("comment",beginweb), token("comment",endweb))
-
-weblexer._rules = {
- { 'whitespace', spacing },
- { 'keyword', keyword },
- { 'type', datatype },
- { 'identifier', identifier },
- { 'string', shortstring },
- -- { 'webcomment', webcomment },
- { 'texcomment', texcomment },
- { 'longcomment', longcomment },
- { 'shortcomment', shortcomment },
- { 'number', number },
- { 'macro', macro },
- { 'operator', operator },
- { 'rest', rest },
-}
-
-weblexer._tokenstyles = context.styleset
-
-weblexer._foldpattern = P("/*") + P("*/") + S("{}") -- separate entry else interference
-
-weblexer._foldsymbols = {
- _patterns = {
- '[{}]',
- '/%*',
- '%*/',
- },
- -- ["data"] = { -- macro
- -- ['region'] = 1,
- -- ['endregion'] = -1,
- -- ['if'] = 1,
- -- ['ifdef'] = 1,
- -- ['ifndef'] = 1,
- -- ['endif'] = -1,
- -- },
- ["special"] = { -- operator
- ['{'] = 1,
- ['}'] = -1,
- },
- ["comment"] = {
- ['/*'] = 1,
- ['*/'] = -1,
- }
-}
-
--- -- by indentation:
---
-weblexer._foldpatterns = nil
-weblexer._foldsymbols = nil
-
-return weblexer
diff --git a/context/data/scite/lexers/scite-context-lexer-xml-comment.lua b/context/data/scite/lexers/scite-context-lexer-xml-comment.lua
deleted file mode 100644
index 104310f94..000000000
--- a/context/data/scite/lexers/scite-context-lexer-xml-comment.lua
+++ /dev/null
@@ -1,42 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for xml comments",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local lexer = lexer
-local token = lexer.token
-local P = lpeg.P
-
-local xmlcommentlexer = { _NAME = "xml-comment", _FILENAME = "scite-context-lexer-xml-comment" }
-local whitespace = lexer.WHITESPACE
-local context = lexer.context
-
-local space = lexer.space
-local nospace = 1 - space - P("-->")
-
-local p_spaces = token(whitespace, space ^1)
-local p_comment = token("comment", nospace^1)
-
-xmlcommentlexer._rules = {
- { "whitespace", p_spaces },
- { "comment", p_comment },
-}
-
-xmlcommentlexer._tokenstyles = context.styleset
-
-xmlcommentlexer._foldpattern = P("<!--") + P("-->")
-
-xmlcommentlexer._foldsymbols = {
- _patterns = {
- "<%!%-%-", "%-%->", -- comments
- },
- ["comment"] = {
- ["<!--"] = 1,
- ["-->" ] = -1,
- }
-}
-
-return xmlcommentlexer
diff --git a/context/data/scite/lexers/scite-context-lexer-xml-script.lua b/context/data/scite/lexers/scite-context-lexer-xml-script.lua
deleted file mode 100644
index fd1aae7f7..000000000
--- a/context/data/scite/lexers/scite-context-lexer-xml-script.lua
+++ /dev/null
@@ -1,30 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for xml cdata",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local lexer = lexer
-local token = lexer.token
-local P = lpeg.P
-
-local xmlscriptlexer = { _NAME = "xml-script", _FILENAME = "scite-context-lexer-xml-script" }
-local whitespace = lexer.WHITESPACE -- triggers states
-local context = lexer.context
-
-local space = lexer.space
-local nospace = 1 - space - (P("</") * P("script") + P("SCRIPT")) * P(">")
-
-local p_spaces = token(whitespace, space ^1)
-local p_cdata = token("default", nospace^1)
-
-xmlscriptlexer._rules = {
- { "whitespace", p_spaces },
- { "script", p_cdata },
-}
-
-xmlscriptlexer._tokenstyles = context.styleset
-
-return xmlscriptlexer
diff --git a/context/data/scite/lexers/scite-context-lexer.lua b/context/data/scite/lexers/scite-context-lexer.lua
deleted file mode 100644
index 5c7f40e7d..000000000
--- a/context/data/scite/lexers/scite-context-lexer.lua
+++ /dev/null
@@ -1,876 +0,0 @@
-local info = {
- version = 1.324,
- comment = "basics for scintilla lpeg lexer for context/metafun",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
- comment = "contains copyrighted code from mitchell.att.foicica.com",
-
-}
-
--- todo: move all code here
--- todo: explore adapted dll ... properties + init
--- todo: play with hotspot and other properties
-
--- wish: replace errorlist lexer (per language!)
--- wish: access to all scite properties
-
--- The fold and lex functions are copied and patched from original code by Mitchell (see
--- lexer.lua). All errors are mine. The ability to use lpeg is a real nice adition and a
--- brilliant move. The code is a byproduct of the (mainly Lua based) textadept (still a
--- rapidly moving target) that unfortunately misses a realtime output pane. On the other
--- hand, SciTE is somewhat crippled by the fact that we cannot pop in our own (language
--- dependent) lexer into the output pane (somehow the errorlist lexer is hard coded into
--- the editor). Hopefully that will change some day.
---
--- Starting with SciTE version 3.20 there is an issue with coloring. As we still lack
--- a connection with scite itself (properties as well as printing to the log pane) we
--- cannot trace this (on windows). As far as I can see, there are no fundamental
--- changes in lexer.lua or LexLPeg.cxx so it must be in scintilla itself. So for the
--- moment I stick to 3.10. Indicators are: no lexing of 'next' and 'goto <label>' in the
--- Lua lexer and no brace highlighting either. Interesting is that it does work ok in
--- the cld lexer (so the Lua code is okay). Also the fact that char-def.lua lexes fast
--- is a signal that the lexer quits somewhere halfway.
---
--- After checking 3.24 and adapting to the new lexer tables things are okay again. So,
--- this version assumes 3.24 or higher. In 3.24 we have a different token result, i.e. no
--- longer a { tag, pattern } but just two return values. I didn't check other changes but
--- will do that when I run into issues. I had optimized these small tables by hashing which
--- was more efficient but this is no longer needed.
---
--- In 3.3.1 another major change took place: some helper constants (maybe they're no
--- longer constants) and functions were moved into the lexer modules namespace but the
--- functions are assigned to the Lua module afterward so we cannot alias them beforehand.
--- We're probably getting close to a stable interface now.
---
--- I've considered making a whole copy and patch the other functions too as we need
--- an extra nesting model. However, I don't want to maintain too much. An unfortunate
--- change in 3.03 is that no longer a script can be specified. This means that instead
--- of loading the extensions via the properties file, we now need to load them in our
--- own lexers, unless of course we replace lexer.lua completely (which adds another
--- installation issue).
---
--- Another change has been that _LEXERHOME is no longer available. It looks like more and
--- more functionality gets dropped so maybe at some point we need to ship our own dll/so
--- files. For instance, I'd like to have access to the current filename and other scite
--- properties. For instance, we could cache some info with each file, if only we had
--- knowledge of what file we're dealing with.
---
--- For huge files folding can be pretty slow and I do have some large ones that I keep
--- open all the time. Loading is normally no ussue, unless one has remembered the status
--- and the cursor is at the last line of a 200K line file. Optimizing the fold function
--- brought down loading of char-def.lua from 14 sec => 8 sec. Replacing the word_match
--- function and optimizing the lex function gained another 2+ seconds. A 6 second load
--- is quite ok for me. The changed lexer table structure (no subtables) brings loading
--- down to a few seconds.
---
--- When the lexer path is copied to the textadept lexer path, and the theme definition to
--- theme path (as lexer.lua), the lexer works there as well. When I have time and motive
--- I will make a proper setup file to tune the look and feel a bit and associate suffixes
--- with the context lexer. The textadept editor has a nice style tracing option but lacks
--- the tabs for selecting files that scite has. It also has no integrated run that pipes
--- to the log pane (I wonder if it could borrow code from the console2 project). Interesting
--- is that the jit version of textadept crashes on lexing large files (and does not feel
--- faster either).
---
--- Function load(lexer_name) starts with _M.WHITESPACE = lexer_name..'_whitespace' which
--- means that we need to have it frozen at the moment we load another lexer. Because spacing
--- is used to revert to a parent lexer we need to make sure that we load children as late
--- as possible in order not to get the wrong whitespace trigger. This took me quite a while
--- to figure out (not being that familiar with the internals). The lex and fold functions
--- have been optimized. It is a pitty that there is no proper print available. Another thing
--- needed is a default style in ourown theme style definition, as otherwise we get wrong
--- nested lexers, especially if they are larger than a view. This is the hardest part of
--- getting things right.
---
--- Eventually it might be safer to copy the other methods from lexer.lua here as well so
--- that we have no dependencies, apart from the c library (for which at some point the api
--- will be stable I hope).
---
--- It's a pitty that there is no scintillua library for the OSX version of scite. Even
--- better would be to have the scintillua library as integral part of scite as that way I
--- could use OSX alongside windows and linux (depending on needs). Also nice would be to
--- have a proper interface to scite then because currently the lexer is rather isolated and the
--- lua version does not provide all standard libraries. It would also be good to have lpeg
--- support in the regular scite lua extension (currently you need to pick it up from someplace
--- else).
-
-local lpeg = require 'lpeg'
-
-local R, P, S, C, V, Cp, Cs, Ct, Cmt, Cc, Cf, Cg, Carg = lpeg.R, lpeg.P, lpeg.S, lpeg.C, lpeg.V, lpeg.Cp, lpeg.Cs, lpeg.Ct, lpeg.Cmt, lpeg.Cc, lpeg.Cf, lpeg.Cg, lpeg.Carg
-local lpegmatch = lpeg.match
-local find, gmatch, match, lower, upper, gsub = string.find, string.gmatch, string.match, string.lower, string.upper, string.gsub
-local concat = table.concat
-local global = _G
-local type, next, setmetatable, rawset = type, next, setmetatable, rawset
-
--- less confusing as we also use lexer for the current lexer and local _M = lexer is just ugly
-
-local lexers = lexer or { } -- + fallback for syntax check
-
--- ok, let's also move helpers here (todo: all go here)
-
-local sign = S("+-")
-local digit = R("09")
-local octdigit = R("07")
-local hexdigit = R("09","AF","af")
-
-lexers.sign = sign
-lexers.digit = digit
-lexers.octdigit = octdigit
-lexers.hexdigit = hexdigit
-lexers.xdigit = hexdigit
-
-lexers.dec_num = digit^1
-lexers.oct_num = P("0")
- * octdigit^1
-lexers.hex_num = P("0") * S("xX")
- * (hexdigit^0 * '.' * hexdigit^1 + hexdigit^1 * '.' * hexdigit^0 + hexdigit^1)
- * (S("pP") * sign^-1 * hexdigit^1)^-1
-lexers.float = sign^-1
- * (digit^0 * '.' * digit^1 + digit^1 * '.' * digit^0 + digit^1)
- * S("eE") * sign^-1 * digit^1
-
-lexers.dec_int = sign^-1 * lexers.dec_num
-lexers.oct_int = sign^-1 * lexers.oct_num
-lexers.hex_int = sign^-1 * lexers.hex_num
-
--- these helpers are set afterwards so we delay their initialization ... there is no need to alias
--- each time again and this way we can more easily adapt to updates
-
-local get_style_at, get_indent_amount, get_property, get_fold_level, FOLD_BASE, FOLD_HEADER, FOLD_BLANK, initialize
-
-initialize = function()
- FOLD_BASE = lexers.FOLD_BASE or SC_FOLDLEVELBASE
- FOLD_HEADER = lexers.FOLD_HEADER or SC_FOLDLEVELHEADERFLAG
- FOLD_BLANK = lexers.FOLD_BLANK or SC_FOLDLEVELWHITEFLAG
- get_style_at = lexers.get_style_at or GetStyleAt
- get_indent_amount = lexers.get_indent_amount or GetIndentAmount
- get_property = lexers.get_property or GetProperty
- get_fold_level = lexers.get_fold_level or GetFoldLevel
- --
- initialize = nil
-end
-
--- we create our own extra namespace for extensions and helpers
-
-lexers.context = lexers.context or { }
-local context = lexers.context
-
-context.patterns = context.patterns or { }
-local patterns = context.patterns
-
-lexers._CONTEXTEXTENSIONS = true
-
-local locations = {
- -- lexers.context.path,
- "data", -- optional data directory
- "..", -- regular scite directory
-}
-
-local function collect(name)
--- local definitions = loadfile(name .. ".luc") or loadfile(name .. ".lua")
- local okay, definitions = pcall(function () return require(name) end)
- if okay then
- if type(definitions) == "function" then
- definitions = definitions()
- end
- if type(definitions) == "table" then
- return definitions
- end
- end
-end
-
-function context.loaddefinitions(name)
- for i=1,#locations do
- local data = collect(locations[i] .. "/" .. name)
- if data then
- return data
- end
- end
-end
-
-function context.word_match(words,word_chars,case_insensitive)
- local chars = '%w_' -- maybe just "" when word_chars
- if word_chars then
- chars = '^([' .. chars .. gsub(word_chars,'([%^%]%-])', '%%%1') ..']+)'
- else
- chars = '^([' .. chars ..']+)'
- end
- if case_insensitive then
- local word_list = { }
- for i=1,#words do
- word_list[lower(words[i])] = true
- end
- return P(function(input, index)
- local s, e, word = find(input,chars,index)
- return word and word_list[lower(word)] and e + 1 or nil
- end)
- else
- local word_list = { }
- for i=1,#words do
- word_list[words[i]] = true
- end
- return P(function(input, index)
- local s, e, word = find(input,chars,index)
- return word and word_list[word] and e + 1 or nil
- end)
- end
-end
-
-local idtoken = R("az","AZ","\127\255","__")
-local digit = R("09")
-local sign = S("+-")
-local period = P(".")
-local space = S(" \n\r\t\f\v")
-
-patterns.idtoken = idtoken
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.period = period
-
-patterns.cardinal = digit^1
-patterns.integer = sign^-1 * digit^1
-
-patterns.real =
- sign^-1 * ( -- at most one
- digit^1 * period * digit^0 -- 10.0 10.
- + digit^0 * period * digit^1 -- 0.10 .10
- + digit^1 -- 10
- )
-
-patterns.restofline = (1-S("\n\r"))^1
-patterns.space = space
-patterns.spacing = space^1
-patterns.nospacing = (1-space)^1
-patterns.anything = P(1)
-
-local endof = S("\n\r\f")
-
-patterns.startofline = P(function(input,index)
- return (index == 1 or lpegmatch(endof,input,index-1)) and index
-end)
-
-function context.exact_match(words,word_chars,case_insensitive)
- local characters = concat(words)
- local pattern -- the concat catches _ etc
- if word_chars == true or word_chars == false or word_chars == nil then
- word_chars = ""
- end
- if type(word_chars) == "string" then
- pattern = S(characters) + idtoken
- if case_insensitive then
- pattern = pattern + S(upper(characters)) + S(lower(characters))
- end
- if word_chars ~= "" then
- pattern = pattern + S(word_chars)
- end
- elseif word_chars then
- pattern = word_chars
- end
- if case_insensitive then
- local list = { }
- for i=1,#words do
- list[lower(words[i])] = true
- end
- return Cmt(pattern^1, function(_,i,s)
- return list[lower(s)] -- and i or nil
- end)
- else
- local list = { }
- for i=1,#words do
- list[words[i]] = true
- end
- return Cmt(pattern^1, function(_,i,s)
- return list[s] -- and i or nil
- end)
- end
-end
-
--- spell checking (we can only load lua files)
---
--- return {
--- min = 3,
--- max = 40,
--- n = 12345,
--- words = {
--- ["someword"] = "someword",
--- ["anotherword"] = "Anotherword",
--- },
--- }
-
-local lists = { }
-
-function context.setwordlist(tag,limit) -- returns hash (lowercase keys and original values)
- if not tag or tag == "" then
- return false, 3
- end
- local list = lists[tag]
- if not list then
- list = context.loaddefinitions("spell-" .. tag)
- if not list or type(list) ~= "table" then
- list = { words = false, min = 3 }
- else
- list.words = list.words or false
- list.min = list.min or 3
- end
- lists[tag] = list
- end
- return list.words, list.min
-end
-
-patterns.wordtoken = R("az","AZ","\127\255")
-patterns.wordpattern = patterns.wordtoken^3 -- todo: if limit and #s < limit then
-
-function context.checkedword(validwords,validminimum,s,i) -- ,limit
- if not validwords then -- or #s < validminimum then
- return true, "text", i -- true, "default", i
- else
- -- keys are lower
- local word = validwords[s]
- if word == s then
- return true, "okay", i -- exact match
- elseif word then
- return true, "warning", i -- case issue
- else
- local word = validwords[lower(s)]
- if word == s then
- return true, "okay", i -- exact match
- elseif word then
- return true, "warning", i -- case issue
- elseif upper(s) == s then
- return true, "warning", i -- probably a logo or acronym
- else
- return true, "error", i
- end
- end
- end
-end
-
-function context.styleofword(validwords,validminimum,s) -- ,limit
- if not validwords or #s < validminimum then
- return "text"
- else
- -- keys are lower
- local word = validwords[s]
- if word == s then
- return "okay" -- exact match
- elseif word then
- return "warning" -- case issue
- else
- local word = validwords[lower(s)]
- if word == s then
- return "okay" -- exact match
- elseif word then
- return "warning" -- case issue
- elseif upper(s) == s then
- return "warning" -- probably a logo or acronym
- else
- return "error"
- end
- end
- end
-end
-
--- overloaded functions
-
-local h_table, b_table, n_table = { }, { }, { } -- from the time small tables were used (optimization)
-
-setmetatable(h_table, { __index = function(t,level) local v = { level, FOLD_HEADER } t[level] = v return v end })
-setmetatable(b_table, { __index = function(t,level) local v = { level, FOLD_BLANK } t[level] = v return v end })
-setmetatable(n_table, { __index = function(t,level) local v = { level } t[level] = v return v end })
-
-local newline = P("\r\n") + S("\r\n")
-local p_yes = Cp() * Cs((1-newline)^1) * newline^-1
-local p_nop = newline
-
-local folders = { }
-
-local function fold_by_parsing(text,start_pos,start_line,start_level,lexer)
- local folder = folders[lexer]
- if not folder then
- --
- local pattern, folds, text, start_pos, line_num, prev_level, current_level
- --
- local fold_symbols = lexer._foldsymbols
- local fold_pattern = lexer._foldpattern -- use lpeg instead (context extension)
- --
- if fold_pattern then
- -- if no functions are found then we could have a faster one
- fold_pattern = Cp() * C(fold_pattern) / function(s,match)
- local symbols = fold_symbols[get_style_at(start_pos + s)]
- if symbols then
- local l = symbols[match]
- if l then
- current_level = current_level + l
- end
- end
- end
- local action_y = function()
- folds[line_num] = prev_level
- if current_level > prev_level then
- folds[line_num] = prev_level + FOLD_HEADER
- end
- if current_level < FOLD_BASE then
- current_level = FOLD_BASE
- end
- prev_level = current_level
- line_num = line_num + 1
- end
- local action_n = function()
- folds[line_num] = prev_level + FOLD_BLANK
- line_num = line_num + 1
- end
- pattern = ((fold_pattern + (1-newline))^1 * newline / action_y + newline/action_n)^0
-
- else
- -- the traditional one but a bit optimized
- local fold_symbols_patterns = fold_symbols._patterns
- local action_y = function(pos,line)
- for j = 1, #fold_symbols_patterns do
- for s, match in gmatch(line,fold_symbols_patterns[j]) do -- '()('..patterns[i]..')'
- local symbols = fold_symbols[get_style_at(start_pos + pos + s - 1)]
- local l = symbols and symbols[match]
- local t = type(l)
- if t == 'number' then
- current_level = current_level + l
- elseif t == 'function' then
- current_level = current_level + l(text, pos, line, s, match)
- end
- end
- end
- folds[line_num] = prev_level
- if current_level > prev_level then
- folds[line_num] = prev_level + FOLD_HEADER
- end
- if current_level < FOLD_BASE then
- current_level = FOLD_BASE
- end
- prev_level = current_level
- line_num = line_num + 1
- end
- local action_n = function()
- folds[line_num] = prev_level + FOLD_BLANK
- line_num = line_num + 1
- end
- pattern = (p_yes/action_y + p_nop/action_n)^0
- end
- --
- local reset_parser = lexer._reset_parser
- --
- folder = function(_text_,_start_pos_,_start_line_,_start_level_)
- if reset_parser then
- reset_parser()
- end
- folds = { }
- text = _text_
- start_pos = _start_pos_
- line_num = _start_line_
- prev_level = _start_level_
- current_level = prev_level
- lpegmatch(pattern,text)
- -- make folds collectable
- local t = folds
- folds = nil
- return t
- end
- folders[lexer] = folder
- end
- return folder(text,start_pos,start_line,start_level,lexer)
-end
-
-local folds, current_line, prev_level
-
-local function action_y()
- local current_level = FOLD_BASE + get_indent_amount(current_line)
- if current_level > prev_level then -- next level
- local i = current_line - 1
- local f
- while true do
- f = folds[i]
- if not f then
- break
- elseif f[2] == FOLD_BLANK then
- i = i - 1
- else
- f[2] = FOLD_HEADER -- low indent
- break
- end
- end
- folds[current_line] = { current_level } -- high indent
- elseif current_level < prev_level then -- prev level
- local f = folds[current_line - 1]
- if f then
- f[1] = prev_level -- high indent
- end
- folds[current_line] = { current_level } -- low indent
- else -- same level
- folds[current_line] = { prev_level }
- end
- prev_level = current_level
- current_line = current_line + 1
-end
-
-local function action_n()
- folds[current_line] = { prev_level, FOLD_BLANK }
- current_line = current_line + 1
-end
-
-local pattern = ( S("\t ")^0 * ( (1-S("\n\r"))^1 / action_y + P(true) / action_n) * newline )^0
-
-local function fold_by_indentation(text,start_pos,start_line,start_level)
- -- initialize
- folds = { }
- current_line = start_line
- prev_level = start_level
- -- define
- -- -- not here .. pattern binds and local functions are not frozen
- -- analyze
- lpegmatch(pattern,text)
- -- flatten
- for line, level in next, folds do
- folds[line] = level[1] + (level[2] or 0)
- end
- -- done, make folds collectable
- local t = folds
- folds = nil
- return t
-end
-
-local function fold_by_line(text,start_pos,start_line,start_level)
- local folds = { }
- -- can also be lpeg'd
- for _ in gmatch(text,".-\r?\n") do
- folds[start_line] = n_table[start_level] -- { start_level } -- stile tables ? needs checking
- start_line = start_line + 1
- end
- return folds
-end
-
-local threshold_by_lexer = 512 * 1024 -- we don't know the filesize yet
-local threshold_by_parsing = 512 * 1024 -- we don't know the filesize yet
-local threshold_by_indentation = 512 * 1024 -- we don't know the filesize yet
-local threshold_by_line = 512 * 1024 -- we don't know the filesize yet
-
-function context.fold(text,start_pos,start_line,start_level) -- hm, we had size thresholds .. where did they go
- if text == '' then
- return { }
- end
- if initialize then
- initialize()
- end
- local lexer = global._LEXER
- local fold_by_lexer = lexer._fold
- local fold_by_symbols = lexer._foldsymbols
- local filesize = 0 -- we don't know that
- if fold_by_lexer then
- if filesize <= threshold_by_lexer then
- return fold_by_lexer(text,start_pos,start_line,start_level,lexer)
- end
- elseif fold_by_symbols then -- and get_property('fold.by.parsing',1) > 0 then
- if filesize <= threshold_by_parsing then
- return fold_by_parsing(text,start_pos,start_line,start_level,lexer)
- end
- elseif get_property('fold.by.indentation',1) > 0 then
- if filesize <= threshold_by_indentation then
- return fold_by_indentation(text,start_pos,start_line,start_level,lexer)
- end
- elseif get_property('fold.by.line',1) > 0 then
- if filesize <= threshold_by_line then
- return fold_by_line(text,start_pos,start_line,start_level,lexer)
- end
- end
- return { }
-end
-
--- The following code is mostly unchanged:
-
-local function add_rule(lexer,id,rule)
- if not lexer._RULES then
- lexer._RULES = { }
- lexer._RULEORDER = { }
- end
- lexer._RULES[id] = rule
- lexer._RULEORDER[#lexer._RULEORDER + 1] = id
-end
-
-local function add_style(lexer,token_name,style)
- local len = lexer._STYLES.len
- if len == 32 then
- len = len + 8
- end
- if len >= 128 then
- print('Too many styles defined (128 MAX)')
- end
- lexer._TOKENS[token_name] = len
- lexer._STYLES[len] = style
- lexer._STYLES.len = len + 1
-end
-
-local function join_tokens(lexer)
- local patterns = lexer._RULES
- local order = lexer._RULEORDER
- local token_rule = patterns[order[1]]
- for i=2,#order do
- token_rule = token_rule + patterns[order[i]]
- end
- lexer._TOKENRULE = token_rule
- return token_rule
-end
-
-local function add_lexer(grammar, lexer, token_rule)
- local token_rule = join_tokens(lexer)
- local lexer_name = lexer._NAME
- local children = lexer._CHILDREN
- for i=1,#children do
- local child = children[i]
- if child._CHILDREN then
- add_lexer(grammar, child)
- end
- local child_name = child._NAME
- local rules = child._EMBEDDEDRULES[lexer_name]
- local rules_token_rule = grammar['__'..child_name] or rules.token_rule
- grammar[child_name] = (-rules.end_rule * rules_token_rule)^0 * rules.end_rule^-1 * V(lexer_name)
- local embedded_child = '_' .. child_name
- grammar[embedded_child] = rules.start_rule * (-rules.end_rule * rules_token_rule)^0 * rules.end_rule^-1
- token_rule = V(embedded_child) + token_rule
- end
- grammar['__' .. lexer_name] = token_rule
- grammar[lexer_name] = token_rule^0
-end
-
-local function build_grammar(lexer, initial_rule)
- local children = lexer._CHILDREN
- if children then
- local lexer_name = lexer._NAME
- if not initial_rule then
- initial_rule = lexer_name
- end
- local grammar = { initial_rule }
- add_lexer(grammar, lexer)
- lexer._INITIALRULE = initial_rule
- lexer._GRAMMAR = Ct(P(grammar))
- else
- lexer._GRAMMAR = Ct(join_tokens(lexer)^0)
- end
-end
-
--- so far. We need these local functions in the next one.
-
-local lineparsers = { }
-
-function context.lex(text,init_style)
- local lexer = global._LEXER
- local grammar = lexer._GRAMMAR
- if initialize then
- initialize()
- end
- if not grammar then
- return { }
- elseif lexer._LEXBYLINE then -- we could keep token
- local tokens = { }
- local offset = 0
- local noftokens = 0
- local lineparser = lineparsers[lexer]
- if not lineparser then -- probably a cmt is more efficient
- lineparser = C((1-newline)^0 * newline) / function(line)
- local length = #line
- local line_tokens = length > 0 and lpegmatch(grammar,line)
- if line_tokens then
- for i=1,#line_tokens,2 do
- noftokens = noftokens + 1
- tokens[noftokens] = line_tokens[i]
- noftokens = noftokens + 1
- tokens[noftokens] = line_tokens[i + 1] + offset
- end
- end
- offset = offset + length
- if noftokens > 0 and tokens[noftokens] ~= offset then
- noftokens = noftokens + 1
- tokens[noftokens] = 'default'
- noftokens = noftokens + 1
- tokens[noftokens] = offset + 1
- end
- end
- lineparser = lineparser^0
- lineparsers[lexer] = lineparser
- end
- lpegmatch(lineparser,text)
- return tokens
-
- elseif lexer._CHILDREN then
- -- as we cannot print, tracing is not possible ... this might change as we can as well
- -- generate them all in one go (sharing as much as possible)
- local hash = lexer._HASH -- hm, was _hash
- if not hash then
- hash = { }
- lexer._HASH = hash
- end
- grammar = hash[init_style]
- if grammar then
- lexer._GRAMMAR = grammar
- else
- for style, style_num in next, lexer._TOKENS do
- if style_num == init_style then
- -- the name of the lexers is filtered from the whitespace
- -- specification
- local lexer_name = match(style,'^(.+)_whitespace') or lexer._NAME
- if lexer._INITIALRULE ~= lexer_name then
- grammar = hash[lexer_name]
- if not grammar then
- build_grammar(lexer,lexer_name)
- grammar = lexer._GRAMMAR
- hash[lexer_name] = grammar
- end
- end
- break
- end
- end
- grammar = grammar or lexer._GRAMMAR
- hash[init_style] = grammar
- end
- return lpegmatch(grammar,text)
- else
- return lpegmatch(grammar,text)
- end
-end
-
--- todo: keywords: one lookup and multiple matches
-
--- function context.token(name, patt)
--- return Ct(patt * Cc(name) * Cp())
--- end
---
--- -- hm, changed in 3.24 .. no longer a table
-
-function context.token(name, patt)
- return patt * Cc(name) * Cp()
-end
-
-lexers.fold = context.fold
-lexers.lex = context.lex
-lexers.token = context.token
-lexers.exact_match = context.exact_match
-
--- helper .. alas ... the lexer's lua instance is rather crippled .. not even
--- math is part of it
-
-local floor = math and math.floor
-local char = string.char
-
-if not floor then
-
- floor = function(n)
- return tonumber(string.format("%d",n))
- end
-
- math = math or { }
-
- math.floor = floor
-
-end
-
-local function utfchar(n)
- if n < 0x80 then
- return char(n)
- elseif n < 0x800 then
- return char(
- 0xC0 + floor(n/0x40),
- 0x80 + (n % 0x40)
- )
- elseif n < 0x10000 then
- return char(
- 0xE0 + floor(n/0x1000),
- 0x80 + (floor(n/0x40) % 0x40),
- 0x80 + (n % 0x40)
- )
- elseif n < 0x40000 then
- return char(
- 0xF0 + floor(n/0x40000),
- 0x80 + floor(n/0x1000),
- 0x80 + (floor(n/0x40) % 0x40),
- 0x80 + (n % 0x40)
- )
- else
- -- return char(
- -- 0xF1 + floor(n/0x1000000),
- -- 0x80 + floor(n/0x40000),
- -- 0x80 + floor(n/0x1000),
- -- 0x80 + (floor(n/0x40) % 0x40),
- -- 0x80 + (n % 0x40)
- -- )
- return "?"
- end
-end
-
-context.utfchar = utfchar
-
--- a helper from l-lpeg:
-
-local gmatch = string.gmatch
-
-local function make(t)
- local p
- for k, v in next, t do
- if not p then
- if next(v) then
- p = P(k) * make(v)
- else
- p = P(k)
- end
- else
- if next(v) then
- p = p + P(k) * make(v)
- else
- p = p + P(k)
- end
- end
- end
- return p
-end
-
-function lpeg.utfchartabletopattern(list)
- local tree = { }
- for i=1,#list do
- local t = tree
- for c in gmatch(list[i],".") do
- if not t[c] then
- t[c] = { }
- end
- t = t[c]
- end
- end
- return make(tree)
-end
-
-patterns.invisibles = lpeg.utfchartabletopattern {
- utfchar(0x00A0), -- nbsp
- utfchar(0x2000), -- enquad
- utfchar(0x2001), -- emquad
- utfchar(0x2002), -- enspace
- utfchar(0x2003), -- emspace
- utfchar(0x2004), -- threeperemspace
- utfchar(0x2005), -- fourperemspace
- utfchar(0x2006), -- sixperemspace
- utfchar(0x2007), -- figurespace
- utfchar(0x2008), -- punctuationspace
- utfchar(0x2009), -- breakablethinspace
- utfchar(0x200A), -- hairspace
- utfchar(0x200B), -- zerowidthspace
- utfchar(0x202F), -- narrownobreakspace
- utfchar(0x205F), -- math thinspace
-}
-
--- now we can make:
-
-patterns.iwordtoken = patterns.wordtoken - patterns.invisibles
-patterns.iwordpattern = patterns.iwordtoken^3
-
--- require("themes/scite-context-theme")
-
--- In order to deal with some bug in additional styles (I have no cue what is
--- wrong, but additional styles get ignored and clash somehow) I just copy the
--- original lexer code ... see original for comments.
-
-return lexers
diff --git a/context/data/scite/lexers/themes/scite-context-theme-keep.lua b/context/data/scite/lexers/themes/scite-context-theme-keep.lua
deleted file mode 100644
index 7f9423d9a..000000000
--- a/context/data/scite/lexers/themes/scite-context-theme-keep.lua
+++ /dev/null
@@ -1,233 +0,0 @@
-local info = {
- version = 1.002,
- comment = "theme for scintilla lpeg lexer for context/metafun",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- context_path = string.split(os.resultof("mtxrun --find-file context.mkiv"))[1] or ""
--- global.trace("OEPS") -- how do we get access to the regular lua extensions
-
--- The regular styles set the main lexer styles table but we avoid that in order not
--- to end up with updating issues. We just use another table.
-
--- if not lexer._CONTEXTEXTENSIONS then require("scite-context-lexer") end
-
-local context_path = "t:/sources" -- c:/data/tex-context/tex/texmf-context/tex/base
-local font_name = 'Dejavu Sans Mono'
-local font_size = 14
-
-if not WIN32 then
- font_name = '!' .. font_name
-end
-
-local color = lexer.color
-local style = lexer.style
-
-lexer.context = lexer.context or { }
-local context = lexer.context
-
-context.path = context_path
-
-colors = {
- red = color('7F', '00', '00'),
- green = color('00', '7F', '00'),
- blue = color('00', '00', '7F'),
- cyan = color('00', '7F', '7F'),
- magenta = color('7F', '00', '7F'),
- yellow = color('7F', '7F', '00'),
- orange = color('B0', '7F', '00'),
- --
- white = color('FF', 'FF', 'FF'),
- light = color('CF', 'CF', 'CF'),
- grey = color('80', '80', '80'),
- dark = color('4F', '4F', '4F'),
- black = color('00', '00', '00'),
- --
- selection = color('F7', 'F7', 'F7'),
- logpanel = color('E7', 'E7', 'E7'),
- textpanel = color('CF', 'CF', 'CF'),
- linepanel = color('A7', 'A7', 'A7'),
- tippanel = color('44', '44', '44'),
- --
- right = color('00', '00', 'FF'),
- wrong = color('FF', '00', '00'),
-}
-
-colors.teal = colors.cyan
-colors.purple = colors.magenta
-
--- to be set:
---
--- style_nothing
--- style_class
--- style_comment
--- style_constant
--- style_definition
--- style_error
--- style_function
--- style_keyword
--- style_number
--- style_operator
--- style_string
--- style_preproc
--- style_tag
--- style_type
--- style_variable
--- style_embedded
--- style_label
--- style_regex
--- style_identifier
---
--- style_line_number
--- style_bracelight
--- style_bracebad
--- style_controlchar
--- style_indentguide
--- style_calltip
-
-style_default = style {
- font = font_name,
- size = font_size,
- fore = colors.black,
- back = colors.textpanel,
-}
-
-style_nothing = style {
- -- empty
-}
-
-style_number = style { fore = colors.cyan }
-style_comment = style { fore = colors.yellow }
-style_string = style { fore = colors.magenta }
-style_keyword = style { fore = colors.blue, bold = true }
-
-style_quote = style { fore = colors.blue, bold = true }
-style_special = style { fore = colors.blue }
-style_extra = style { fore = colors.yellow }
-
-style_embedded = style { fore = colors.black, bold = true }
-
-style_char = style { fore = colors.magenta }
-style_reserved = style { fore = colors.magenta, bold = true }
-style_class = style { fore = colors.black, bold = true }
-style_constant = style { fore = colors.cyan, bold = true }
-style_definition = style { fore = colors.black, bold = true }
-style_okay = style { fore = colors.dark }
-style_error = style { fore = colors.red }
-style_warning = style { fore = colors.orange }
-style_invisible = style { back = colors.orange }
-style_function = style { fore = colors.black, bold = true }
-style_operator = style { fore = colors.blue }
-style_preproc = style { fore = colors.yellow, bold = true }
-style_tag = style { fore = colors.cyan }
-style_type = style { fore = colors.blue }
-style_variable = style { fore = colors.black }
-style_identifier = style_nothing
-
-style_standout = style { fore = colors.orange, bold = true }
-
-style_line_number = style { back = colors.linepanel }
-style_bracelight = style_standout
-style_bracebad = style_standout
-style_indentguide = style { fore = colors.linepanel, back = colors.white }
-style_calltip = style { fore = colors.white, back = colors.tippanel }
-style_controlchar = style_nothing
-
-style_label = style { fore = colors.red, bold = true } -- style { fore = colors.cyan, bold = true }
-style_regex = style_string
-
-style_command = style { fore = colors.green, bold = true }
-
--- only bold seems to work
-
-lexer.style_nothing = style_nothing
-lexer.style_class = style_class
-lexer.style_comment = style_comment
-lexer.style_constant = style_constant
-lexer.style_definition = style_definition
-lexer.style_error = style_error
-lexer.style_function = style_function
-lexer.style_keyword = style_keyword
-lexer.style_number = style_number
-lexer.style_operator = style_operator
-lexer.style_string = style_string
-lexer.style_preproc = style_preproc
-lexer.style_tag = style_tag
-lexer.style_type = style_type
-lexer.style_variable = style_variable
-lexer.style_embedded = style_embedded
-lexer.style_label = style_label
-lexer.style_regex = style_regex
-lexer.style_identifier = style_nothing
-
-local styles = { -- as we have globals we could do with less
-
- -- ["whitespace"] = style_whitespace, -- not to be set!
-
-["default"] = style_nothing,
-["number"] = style_number,
-["comment"] = style_comment,
-["keyword"] = style_keyword,
-["string"] = style_string,
-["preproc"] = style_preproc,
-
- ["reserved"] = style_reserved,
- ["internal"] = style_standout,
-
- ["command"] = style_command,
- ["preamble"] = style_comment,
- ["embedded"] = style_embedded,
- ["grouping"] = style { fore = colors.red },
-["label"] = style_label,
- ["primitive"] = style_keyword,
- ["plain"] = style { fore = colors.dark, bold = true },
- ["user"] = style { fore = colors.green },
- ["data"] = style_constant,
- ["special"] = style_special,
- ["extra"] = style_extra,
- ["quote"] = style_quote,
-
- ["okay"] = style_okay,
- ["warning"] = style_warning,
- ["invisible"] = style_invisible,
-["error"] = style_error,
-
-}
-
--- Old method (still available):
-
-local styleset = { }
-
-for k, v in next, styles do
- styleset[#styleset+1] = { k, v }
-end
-
-context.styles = styles
-context.styleset = styleset
-
--- We need to be sparse due to some limitation (and the number of built in styles
--- growing).
-
--- function context.newstyleset(list)
--- local t = { }
--- if list then
--- for i=1,#list do
--- t[list[i]] = true
--- end
--- end
--- return t
--- end
-
--- function context.usestyle(set,name)
--- set[name] = true
--- return name
--- end
-
--- function context.usestyleset(set)
--- local t = { }
--- for k, _ in next, set do
--- t[#t+1] = { k, styles[k] or styles.default }
--- end
--- end
diff --git a/context/data/scite/lexers/themes/scite-context-theme.lua b/context/data/scite/lexers/themes/scite-context-theme.lua
deleted file mode 100644
index 6e161b22f..000000000
--- a/context/data/scite/lexers/themes/scite-context-theme.lua
+++ /dev/null
@@ -1,226 +0,0 @@
-local info = {
- version = 1.002,
- comment = "theme for scintilla lpeg lexer for context/metafun",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- context_path = string.split(os.resultof("mtxrun --find-file context.mkiv"))[1] or ""
--- global.trace("OEPS") -- how do we get access to the regular lua extensions
-
--- The regular styles set the main lexer styles table but we avoid that in order not
--- to end up with updating issues. We just use another table.
-
-if not lexer._CONTEXTEXTENSIONS then require("scite-context-lexer") end
-
-local context_path = "t:/sources" -- c:/data/tex-context/tex/texmf-context/tex/base
-local font_name = 'Dejavu Sans Mono'
-local font_size = 14
-
-if not WIN32 then
- font_name = '!' .. font_name
-end
-
-local color = lexer.color
-local style = lexer.style
-
-lexer.context = lexer.context or { }
-local context = lexer.context
-
-context.path = context_path
-
-local colors = {
- red = color('7F', '00', '00'),
- green = color('00', '7F', '00'),
- blue = color('00', '00', '7F'),
- cyan = color('00', '7F', '7F'),
- magenta = color('7F', '00', '7F'),
- yellow = color('7F', '7F', '00'),
- orange = color('B0', '7F', '00'),
- --
- white = color('FF', 'FF', 'FF'),
- light = color('CF', 'CF', 'CF'),
- grey = color('80', '80', '80'),
- dark = color('4F', '4F', '4F'),
- black = color('00', '00', '00'),
- --
- selection = color('F7', 'F7', 'F7'),
- logpanel = color('E7', 'E7', 'E7'),
- textpanel = color('CF', 'CF', 'CF'),
- linepanel = color('A7', 'A7', 'A7'),
- tippanel = color('44', '44', '44'),
- --
- right = color('00', '00', 'FF'),
- wrong = color('FF', '00', '00'),
-}
-
-colors.teal = colors.cyan
-colors.purple = colors.magenta
-
-lexer.colors = colors
-
--- defaults:
-
-local style_nothing = style { }
------ style_whitespace = style { }
-local style_comment = style { fore = colors.yellow }
-local style_string = style { fore = colors.magenta }
-local style_number = style { fore = colors.cyan }
-local style_keyword = style { fore = colors.blue, bold = true }
-local style_identifier = style_nothing
-local style_operator = style { fore = colors.blue }
-local style_error = style { fore = colors.red }
-local style_preproc = style { fore = colors.yellow, bold = true }
-local style_constant = style { fore = colors.cyan, bold = true }
-local style_variable = style { fore = colors.black }
-local style_function = style { fore = colors.black, bold = true }
-local style_class = style { fore = colors.black, bold = true }
-local style_type = style { fore = colors.blue }
-local style_label = style { fore = colors.red, bold = true }
-local style_regex = style { fore = colors.magenta }
-
--- reserved:
-
-local style_default = style { font = font_name, size = font_size, fore = colors.black, back = colors.textpanel }
-local style_text = style { font = font_name, size = font_size, fore = colors.black, back = colors.textpanel }
-local style_line_number = style { back = colors.linepanel }
-local style_bracelight = style { fore = colors.orange, bold = true }
-local style_bracebad = style { fore = colors.orange, bold = true }
-local style_indentguide = style { fore = colors.linepanel, back = colors.white }
-local style_calltip = style { fore = colors.white, back = colors.tippanel }
-local style_controlchar = style_nothing
-
--- extras:
-
-local style_quote = style { fore = colors.blue, bold = true }
-local style_special = style { fore = colors.blue }
-local style_extra = style { fore = colors.yellow }
-local style_embedded = style { fore = colors.black, bold = true }
------ style_char = style { fore = colors.magenta }
-local style_reserved = style { fore = colors.magenta, bold = true }
-local style_definition = style { fore = colors.black, bold = true }
-local style_okay = style { fore = colors.dark }
-local style_warning = style { fore = colors.orange }
-local style_invisible = style { back = colors.orange }
-local style_tag = style { fore = colors.cyan }
------ style_standout = style { fore = colors.orange, bold = true }
-local style_command = style { fore = colors.green, bold = true }
-local style_internal = style { fore = colors.orange, bold = true }
-
-local style_preamble = style { fore = colors.yellow }
-local style_grouping = style { fore = colors.red }
-local style_primitive = style { fore = colors.blue, bold = true }
-local style_plain = style { fore = colors.dark, bold = true }
-local style_user = style { fore = colors.green }
-local style_data = style { fore = colors.cyan, bold = true }
-
-
--- used by the generic lexer:
-
-lexer.style_nothing = style_nothing -- 0
------.whitespace = style_whitespace -- 1
-lexer.style_comment = style_comment -- 2
-lexer.style_string = style_string -- 3
-lexer.style_number = style_number -- 4
-lexer.style_keyword = style_keyword -- 5
-lexer.style_identifier = style_nothing -- 6
-lexer.style_operator = style_operator -- 7
-lexer.style_error = style_error -- 8
-lexer.style_preproc = style_preproc -- 9
-lexer.style_constant = style_constant -- 10
-lexer.style_variable = style_variable -- 11
-lexer.style_function = style_function -- 12
-lexer.style_class = style_class -- 13
-lexer.style_type = style_type -- 14
-lexer.style_label = style_label -- 15
-lexer.style_regex = style_regexp -- 16
-
-lexer.style_default = style_default -- 32
-lexer.style_line_number = style_line_number -- 33
-lexer.style_bracelight = style_bracelight -- 34
-lexer.style_bracebad = style_bracebad -- 35
-lexer.style_indentguide = style_indentguide -- 36
-lexer.style_calltip = style_calltip -- 37
-lexer.style_controlchar = style_controlchar -- 38
-
-local styles = { -- as we have globals we could do with less
-
- -- ["whitespace"] = style_whitespace, -- not to be set!
- ["default"] = style_nothing, -- else no good backtracking to start-of-child
- -- ["number"] = style_number,
- -- ["comment"] = style_comment,
- -- ["keyword"] = style_keyword,
- -- ["string"] = style_string,
- -- ["preproc"] = style_preproc,
- -- ["error"] = style_error,
- -- ["label"] = style_label,
-
- ["invisible"] = style_invisible,
- ["quote"] = style_quote,
- ["special"] = style_special,
- ["extra"] = style_extra,
- ["embedded"] = style_embedded,
- -- ["char"] = style_char,
- ["reserved"] = style_reserved,
- -- ["definition"] = style_definition,
- ["okay"] = style_okay,
- ["warning"] = style_warning,
- -- ["standout"] = style_standout,
- ["command"] = style_command,
- ["internal"] = style_internal,
- ["preamble"] = style_preamble,
- ["grouping"] = style_grouping,
- ["primitive"] = style_primitive,
- ["plain"] = style_plain,
- ["user"] = style_user,
- ["data"] = style_data,
-
- ["text"] = style_text, -- style_default
-
-}
-
-local styleset = { }
-
-for k, v in next, styles do
- styleset[#styleset+1] = { k, v }
-end
-
-context.styles = styles
-context.styleset = styleset
-
-function context.stylesetcopy()
- local t = { }
- for i=1,#styleset do
- local s = styleset[i]
- t[i] = s
-t[s[1]] = t[s[2]] -- new style ?
- end
- t[#t+1] = { "whitespace", style_nothing }
-t.whitespace = style_nothing -- new style ?
- return t
-end
-
--- We can be sparse if needed:
-
--- function context.newstyleset(list)
--- local t = { }
--- if list then
--- for i=1,#list do
--- t[list[i]] = true
--- end
--- end
--- return t
--- end
-
--- function context.usestyle(set,name)
--- set[name] = true
--- return name
--- end
-
--- function context.usestyleset(set)
--- local t = { }
--- for k, _ in next, set do
--- t[#t+1] = { k, styles[k] or styles.default }
--- end
--- end
diff --git a/context/data/scite/metapost.properties b/context/data/scite/metapost.properties
deleted file mode 100644
index fe89b65eb..000000000
--- a/context/data/scite/metapost.properties
+++ /dev/null
@@ -1 +0,0 @@
-import scite-metapost
diff --git a/context/data/scite/scite-context-data-context.properties b/context/data/scite/scite-context-data-context.properties
deleted file mode 100644
index 140b0d96b..000000000
--- a/context/data/scite/scite-context-data-context.properties
+++ /dev/null
@@ -1,190 +0,0 @@
-keywordclass.context.constants=\
-zerocount minusone minustwo plusone \
-plustwo plusthree plusfour plusfive plussix \
-plusseven pluseight plusnine plusten plussixteen \
-plushundred plusthousand plustenthousand plustwentythousand medcard \
-maxcard zeropoint onepoint halfapoint onebasepoint \
-maxdimen scaledpoint thousandpoint points halfpoint \
-zeroskip zeromuskip onemuskip pluscxxvii pluscxxviii \
-pluscclv pluscclvi normalpagebox endoflinetoken outputnewlinechar \
-emptytoks empty undefined voidbox emptybox \
-emptyvbox emptyhbox bigskipamount medskipamount smallskipamount \
-fmtname fmtversion texengine texenginename texengineversion \
-luatexengine pdftexengine xetexengine unknownengine etexversion \
-pdftexversion xetexversion xetexrevision activecatcode bgroup \
-egroup endline conditionaltrue conditionalfalse attributeunsetvalue \
-uprotationangle rightrotationangle downrotationangle leftrotationangle inicatcodes \
-ctxcatcodes texcatcodes notcatcodes txtcatcodes vrbcatcodes \
-prtcatcodes nilcatcodes luacatcodes tpacatcodes tpbcatcodes \
-xmlcatcodes escapecatcode begingroupcatcode endgroupcatcode mathshiftcatcode \
-alignmentcatcode endoflinecatcode parametercatcode superscriptcatcode subscriptcatcode \
-ignorecatcode spacecatcode lettercatcode othercatcode activecatcode \
-commentcatcode invalidcatcode tabasciicode newlineasciicode formfeedasciicode \
-endoflineasciicode endoffileasciicode spaceasciicode hashasciicode dollarasciicode \
-commentasciicode ampersandasciicode colonasciicode backslashasciicode circumflexasciicode \
-underscoreasciicode leftbraceasciicode barasciicode rightbraceasciicode tildeasciicode \
-delasciicode lessthanasciicode morethanasciicode doublecommentsignal atsignasciicode \
-exclamationmarkasciicode questionmarkasciicode doublequoteasciicode singlequoteasciicode forwardslashasciicode \
-primeasciicode activemathcharcode activetabtoken activeformfeedtoken activeendoflinetoken \
-batchmodecode nonstopmodecode scrollmodecode errorstopmodecode bottomlevelgroupcode \
-simplegroupcode hboxgroupcode adjustedhboxgroupcode vboxgroupcode vtopgroupcode \
-aligngroupcode noaligngroupcode outputgroupcode mathgroupcode discretionarygroupcode \
-insertgroupcode vcentergroupcode mathchoicegroupcode semisimplegroupcode mathshiftgroupcode \
-mathleftgroupcode vadjustgroupcode charnodecode hlistnodecode vlistnodecode \
-rulenodecode insertnodecode marknodecode adjustnodecode ligaturenodecode \
-discretionarynodecode whatsitnodecode mathnodecode gluenodecode kernnodecode \
-penaltynodecode unsetnodecode mathsnodecode charifcode catifcode \
-numifcode dimifcode oddifcode vmodeifcode hmodeifcode \
-mmodeifcode innerifcode voidifcode hboxifcode vboxifcode \
-xifcode eofifcode trueifcode falseifcode caseifcode \
-definedifcode csnameifcode fontcharifcode fontslantperpoint fontinterwordspace \
-fontinterwordstretch fontinterwordshrink fontexheight fontemwidth fontextraspace \
-slantperpoint interwordspace interwordstretch interwordshrink exheight \
-emwidth extraspace mathsupdisplay mathsupnormal mathsupcramped \
-mathsubnormal mathsubcombined mathaxisheight startmode stopmode \
-startnotmode stopnotmode startmodeset stopmodeset doifmode \
-doifmodeelse doifnotmode startallmodes stopallmodes startnotallmodes \
-stopnotallmodes doifallmodes doifallmodeselse doifnotallmodes startenvironment \
-stopenvironment environment startcomponent stopcomponent component \
-startproduct stopproduct product startproject stopproject \
-project starttext stoptext startnotext stopnotext \
-startdocument stopdocument documentvariable setupdocument startmodule \
-stopmodule usemodule usetexmodule useluamodule setupmodule \
-currentmoduleparameter moduleparameter startTEXpage stopTEXpage enablemode \
-disablemode preventmode globalenablemode globaldisablemode globalpreventmode \
-pushmode popmode typescriptone typescripttwo typescriptthree \
-mathsizesuffix mathordcode mathopcode mathbincode mathrelcode \
-mathopencode mathclosecode mathpunctcode mathalphacode mathinnercode \
-mathnothingcode mathlimopcode mathnolopcode mathboxcode mathchoicecode \
-mathaccentcode mathradicalcode constantnumber constantnumberargument constantdimen \
-constantdimenargument constantemptyargument continueifinputfile luastringsep !!bs \
-!!es lefttorightmark righttoleftmark breakablethinspace nobreakspace \
-narrownobreakspace zerowidthnobreakspace ideographicspace ideographichalffillspace twoperemspace \
-threeperemspace fourperemspace fiveperemspace sixperemspace figurespace \
-punctuationspace hairspace zerowidthspace zerowidthnonjoiner zerowidthjoiner \
-zwnj zwj
-
-keywordclass.context.helpers=\
-startsetups stopsetups startxmlsetups stopxmlsetups \
-startluasetups stopluasetups starttexsetups stoptexsetups startrawsetups \
-stoprawsetups startlocalsetups stoplocalsetups starttexdefinition stoptexdefinition \
-starttexcode stoptexcode startcontextcode stopcontextcode doifsetupselse \
-doifsetups doifnotsetups setup setups texsetup \
-xmlsetup luasetup directsetup doifelsecommandhandler doifnotcommandhandler \
-doifcommandhandler newmode setmode resetmode newsystemmode \
-setsystemmode resetsystemmode pushsystemmode popsystemmode booleanmodevalue \
-newcount newdimen newskip newmuskip newbox \
-newtoks newread newwrite newmarks newinsert \
-newattribute newif newlanguage newfamily newfam \
-newhelp then begcsname strippedcsname firstargumentfalse \
-firstargumenttrue secondargumentfalse secondargumenttrue thirdargumentfalse thirdargumenttrue \
-fourthargumentfalse fourthargumenttrue fifthargumentfalse fifthsargumenttrue sixthargumentfalse \
-sixtsargumenttrue doglobal dodoglobal redoglobal resetglobal \
-donothing dontcomplain forgetall donetrue donefalse \
-htdp unvoidbox hfilll vfilll mathbox \
-mathlimop mathnolop mathnothing mathalpha currentcatcodetable \
-defaultcatcodetable catcodetablename newcatcodetable startcatcodetable stopcatcodetable \
-startextendcatcodetable stopextendcatcodetable pushcatcodetable popcatcodetable restorecatcodes \
-setcatcodetable letcatcodecommand defcatcodecommand uedcatcodecommand hglue \
-vglue hfillneg vfillneg hfilllneg vfilllneg \
-ruledhss ruledhfil ruledhfill ruledhfilneg ruledhfillneg \
-normalhfillneg ruledvss ruledvfil ruledvfill ruledvfilneg \
-ruledvfillneg normalvfillneg ruledhbox ruledvbox ruledvtop \
-ruledvcenter ruledmbox ruledhskip ruledvskip ruledkern \
-ruledmskip ruledmkern ruledhglue ruledvglue normalhglue \
-normalvglue ruledpenalty filledhboxb filledhboxr filledhboxg \
-filledhboxc filledhboxm filledhboxy filledhboxk scratchcounter \
-globalscratchcounter scratchdimen globalscratchdimen scratchskip globalscratchskip \
-scratchmuskip globalscratchmuskip scratchtoks globalscratchtoks scratchbox \
-globalscratchbox normalbaselineskip normallineskip normallineskiplimit availablehsize \
-localhsize setlocalhsize nextbox dowithnextbox dowithnextboxcs \
-dowithnextboxcontent dowithnextboxcontentcs scratchwidth scratchheight scratchdepth \
-scratchoffset scratchdistance scratchhsize scratchvsize scratchxoffset \
-scratchyoffset scratchhoffset scratchvoffset scratchxposition scratchyposition \
-scratchtopoffset scratchbottomoffset scratchleftoffset scratchrightoffset scratchcounterone \
-scratchcountertwo scratchcounterthree scratchdimenone scratchdimentwo scratchdimenthree \
-scratchskipone scratchskiptwo scratchskipthree scratchmuskipone scratchmuskiptwo \
-scratchmuskipthree scratchtoksone scratchtokstwo scratchtoksthree scratchboxone \
-scratchboxtwo scratchboxthree scratchnx scratchny scratchmx \
-scratchmy scratchunicode scratchleftskip scratchrightskip scratchtopskip \
-scratchbottomskip doif doifnot doifelse doifinset \
-doifnotinset doifinsetelse doifnextcharelse doifnextoptionalelse doifnextbgroupelse \
-doifnextparenthesiselse doiffastoptionalcheckelse doifundefinedelse doifdefinedelse doifundefined \
-doifdefined doifelsevalue doifvalue doifnotvalue doifnothing \
-doifsomething doifelsenothing doifsomethingelse doifvaluenothing doifvaluesomething \
-doifelsevaluenothing doifdimensionelse doifnumberelse doifnumber doifnotnumber \
-doifcommonelse doifcommon doifnotcommon doifinstring doifnotinstring \
-doifinstringelse doifassignmentelse docheckassignment tracingall tracingnone \
-loggingall removetoks appendtoks prependtoks appendtotoks \
-prependtotoks to endgraf endpar everyendpar \
-reseteverypar finishpar empty null space \
-quad enspace obeyspaces obeylines obeyedspace \
-obeyedline normalspace executeifdefined singleexpandafter doubleexpandafter \
-tripleexpandafter dontleavehmode removelastspace removeunwantedspaces keepunwantedspaces \
-wait writestatus define defineexpandable redefine \
-setmeasure setemeasure setgmeasure setxmeasure definemeasure \
-freezemeasure measure measured installcorenamespace getvalue \
-getuvalue setvalue setevalue setgvalue setxvalue \
-letvalue letgvalue resetvalue undefinevalue ignorevalue \
-setuvalue setuevalue setugvalue setuxvalue globallet \
-glet udef ugdef uedef uxdef \
-checked unique getparameters geteparameters getgparameters \
-getxparameters forgetparameters copyparameters getdummyparameters dummyparameter \
-directdummyparameter setdummyparameter letdummyparameter usedummystyleandcolor usedummystyleparameter \
-usedummycolorparameter processcommalist processcommacommand quitcommalist quitprevcommalist \
-processaction processallactions processfirstactioninset processallactionsinset unexpanded \
-expanded startexpanded stopexpanded protected protect \
-unprotect firstofoneargument firstoftwoarguments secondoftwoarguments firstofthreearguments \
-secondofthreearguments thirdofthreearguments firstoffourarguments secondoffourarguments thirdoffourarguments \
-fourthoffourarguments firstoffivearguments secondoffivearguments thirdoffivearguments fourthoffivearguments \
-fifthoffivearguments firstofsixarguments secondofsixarguments thirdofsixarguments fourthofsixarguments \
-fifthofsixarguments sixthofsixarguments firstofoneunexpanded gobbleoneargument gobbletwoarguments \
-gobblethreearguments gobblefourarguments gobblefivearguments gobblesixarguments gobblesevenarguments \
-gobbleeightarguments gobbleninearguments gobbletenarguments gobbleoneoptional gobbletwooptionals \
-gobblethreeoptionals gobblefouroptionals gobblefiveoptionals dorecurse doloop \
-exitloop dostepwiserecurse recurselevel recursedepth dofastloopcs \
-dowith newconstant setnewconstant newconditional settrue \
-setfalse setconstant newmacro setnewmacro newfraction \
-newsignal dosingleempty dodoubleempty dotripleempty doquadrupleempty \
-doquintupleempty dosixtupleempty doseventupleempty dosingleargument dodoubleargument \
-dotripleargument doquadrupleargument doquintupleargument dosixtupleargument doseventupleargument \
-dosinglegroupempty dodoublegroupempty dotriplegroupempty doquadruplegroupempty doquintuplegroupempty \
-permitspacesbetweengroups dontpermitspacesbetweengroups nopdfcompression maximumpdfcompression normalpdfcompression \
-modulonumber dividenumber getfirstcharacter doiffirstcharelse startnointerference \
-stopnointerference twodigits threedigits leftorright strut \
-setstrut strutbox strutht strutdp strutwd \
-struthtdp begstrut endstrut lineheight ordordspacing \
-ordopspacing ordbinspacing ordrelspacing ordopenspacing ordclosespacing \
-ordpunctspacing ordinnerspacing opordspacing opopspacing opbinspacing \
-oprelspacing opopenspacing opclosespacing oppunctspacing opinnerspacing \
-binordspacing binopspacing binbinspacing binrelspacing binopenspacing \
-binclosespacing binpunctspacing bininnerspacing relordspacing relopspacing \
-relbinspacing relrelspacing relopenspacing relclosespacing relpunctspacing \
-relinnerspacing openordspacing openopspacing openbinspacing openrelspacing \
-openopenspacing openclosespacing openpunctspacing openinnerspacing closeordspacing \
-closeopspacing closebinspacing closerelspacing closeopenspacing closeclosespacing \
-closepunctspacing closeinnerspacing punctordspacing punctopspacing punctbinspacing \
-punctrelspacing punctopenspacing punctclosespacing punctpunctspacing punctinnerspacing \
-innerordspacing inneropspacing innerbinspacing innerrelspacing inneropenspacing \
-innerclosespacing innerpunctspacing innerinnerspacing normalreqno startimath \
-stopimath normalstartimath normalstopimath startdmath stopdmath \
-normalstartdmath normalstopdmath uncramped cramped triggermathstyle \
-mathstylefont mathsmallstylefont mathstyleface mathsmallstyleface mathstylecommand \
-mathpalette mathstylehbox mathstylevbox mathstylevcenter mathstylevcenteredhbox \
-mathstylevcenteredvbox mathtext setmathsmalltextbox setmathtextbox triggerdisplaystyle \
-triggertextstyle triggerscriptstyle triggerscriptscriptstyle triggeruncrampedstyle triggercrampedstyle \
-triggersmallstyle triggeruncrampedsmallstyle triggercrampedsmallstyle triggerbigstyle triggeruncrampedbigstyle \
-triggercrampedbigstyle luaexpr expdoifelse expdoif expdoifnot \
-expdoifcommonelse expdoifinsetelse ctxdirectlua ctxlatelua ctxsprint \
-ctxwrite ctxcommand ctxdirectcommand ctxlatecommand ctxreport \
-ctxlua luacode lateluacode directluacode registerctxluafile \
-ctxloadluafile luaversion luamajorversion luaminorversion ctxluacode \
-luaconditional luaexpanded startluaparameterset stopluaparameterset luaparameterset \
-definenamedlua obeylualines obeyluatokens startluacode stopluacode \
-startlua stoplua carryoverpar assumelongusagecs Umathbotaccent \
-righttolefthbox lefttorighthbox righttoleftvbox lefttorightvbox righttoleftvtop \
-lefttorightvtop rtlhbox ltrhbox rtlvbox ltrvbox \
-rtlvtop ltrvtop autodirhbox autodirvbox autodirvtop \
-lefttoright righttoleft synchronizelayoutdirection synchronizedisplaydirection synchronizeinlinedirection \
-lesshyphens morehyphens nohyphens dohyphens
-
diff --git a/context/data/scite/scite-context-data-interfaces.properties b/context/data/scite/scite-context-data-interfaces.properties
deleted file mode 100644
index 9c2ca4623..000000000
--- a/context/data/scite/scite-context-data-interfaces.properties
+++ /dev/null
@@ -1,1276 +0,0 @@
-keywordclass.context.cs=\
-CAP Cap Caps Cisla \
-KAP Kap Kaps MESIC Rimskecislice \
-SLOVA SLOVO Slova Slovo VSEDNIDEN \
-Znak Znaky aktualnicislonadpisu aktualnidatum appendix \
-arg atleftmargin atrightmargin barevnalista barva \
-bilemisto bottomspace bublinkovanapoveda bydliste bypassblocks \
-cap celkovypocetstran cernalinka cernelinky chapter \
-chem cisla cislonadpisu cislopodrovnice cislorovnice \
-cislostrany citace citovat comment completecombinedlist \
-completelistoffloats completelistofsorts completelistofsynonyms completepagenumber completeregister \
-coupledregister crlf cutspace datum decrementnumber \
-definebodyfontDEF definebodyfontREF definecolumnbreak definecolumnset definecombination \
-definedfont definefontfeature definefonthandling defineindentedtext defineinmargin \
-defineitemgroup definelayer definelayout definemathalignment definepagebreak \
-defineplacement definerawfont definerule definetextposition definetextvariable \
-definetype definetypeface definuj definujakcent definujbarvu \
-definujblok definujbloksekce definujbuffer definujfont definujformatodkazu \
-definujhbox definujhlavnipole definujinterakcnimenu definujkombinovanyseznam definujkonverzi \
-definujlogo definujnadpis definujobrazeksymbol definujodkaz definujodstavce \
-definujopis definujoramovani definujoramovanytext definujpaletu definujplvouciobjekt \
-definujpodpole definujpole definujpopis definujpopisek definujprekryv \
-definujpreskok definujprikaz definujprofil definujprogram definujprostredizakladnihofontu \
-definujrejstrik definujsablonutabulky definujsekci definujseznam definujseznamodkazu \
-definujskupinubarev definujstartstop definujstyl definujstylfontu definujsymbol \
-definujsynonumumfontu definujsynonyma definujtabelaci definujtext definujtrideni \
-definujupravu definujvelikostpapiru definujverzi definujvycet definujvystup \
-definujzakladnifont definujzasobnikpoli definujznaceni definujznak delkaseznamu \
-description dodrzujprofil dodrzujverzi dodrzujverziprofilu dvoustrannypapir \
-emptylines enumeration externiobraz fakt footnotetext \
-forceblocks framedtext getnumber headsym hl \
-hlavnijazyk hlavniuroven hodnotabarvy hodnotasedi immediatebetweenlist \
-immediatetolist indentation ininner inneredgedistance inneredgewidth \
-innermargindistance innermarginwidth inouter instalacejazyka interakcnilista \
-interakcnitlacitka interaktivnimenu jazyk jdidolu jdina \
-jdinabox jdinastranu jmeno kap klonujpole \
-komponenta konvertujcislo kopirujpole korekcebilehomista labeling \
-leg listsymbol loadsorts loadsynonyms maoramovani \
-mapfontsize marginalnilinka marginalninadpis marginalnislovo marginalnitext \
-matematika mazaramovani mediaeval meritko mesic \
-mezera moveformula movesidefloat mrizka nadpis \
-nadruhyokraj nalevo nalevyokraj name naokraj \
-napravo napravyokraj nastavbarvu nastavbarvy nastavbilamista \
-nastavblok nastavbloksekce nastavbuffer nastavcernelinky nastavcislonadpisu \
-nastavcislostrany nastavcislovani nastavcislovaniodstavcu nastavcislovaniradku nastavcislovanistran \
-nastavcitaci nastavdefinicipoznamekpodcarou nastavdeleniplvoucichobjektu nastavdelitko nastavdolnitexty \
-nastaveni nastavexterniobrazy nastavhorejsek nastavhornitexty nastavinterakci \
-nastavinterakcnilistu nastavinterakcnimenu nastavinterakcniobrazovku nastavjazyk nastavkapitalky \
-nastavkombinovanyseznam nastavkomentar nastavkomentarstrany nastavlegendu nastavmarginalie \
-nastavmarginalniblok nastavmarginalnilinky nastavmeziradkovoumezeru nastavnadpis nastavnadpisy \
-nastavodkazovani nastavodsazovani nastavodstavce nastavopis nastavoramovanetexty \
-nastavoramovani nastavorez nastavotoceni nastavpaletu nastavplvouciobjekt \
-nastavplvouciobjekty nastavpodcislostrany nastavpodtrzeni nastavpole nastavpolozky \
-nastavpopisek nastavpopisky nastavpopisy nastavpozadi nastavpoznamkypodcarou \
-nastavprechodstrany nastavpreskok nastavprofily nastavprogramy nastavprostredizakladnihofontu \
-nastavpublikace nastavradkovani nastavradky nastavrastr nastavrejstrik \
-nastavrovnice nastavsadusymbolu nastavsekci nastavseznam nastavseznamodkazu \
-nastavsirkucary nastavsloupce nastavspodek nastavspojeni nastavsynchronizaci \
-nastavsynchronizacnilistu nastavsynonyma nastavsystem nastavtab nastavtabelaci \
-nastavtabulky nastavtenkelinky nastavtext nastavtexthlavicky nastavtextovelinky \
-nastavtextpopisku nastavtexttexty nastavtextyupati nastavtextyzahlavi nastavtlacitka \
-nastavtoleranci nastavtrideni nastavtype nastavumisteniprotejsku nastavumistovani \
-nastavupati nastavupravu nastavurl nastavusporadani nastavvelikostpapiru \
-nastavverze nastavvsechnapole nastavvycty nastavvyplnovelinky nastavvyplnoveradky \
-nastavvystup nastavvzhled nastavzahlavi nastavzakladnifont nastavzarovnani \
-nastavznaceni nastavzuzeni nastrane navigating nejakyradek \
-nekde nextsection neznamo nivy nizky \
-nocap nokap nop numberofsubpages obrazovka \
-odkaz odkaznadatum odkaznastranu odkaznatext odkazujici \
-odsazenishora odsazenizleva odsazovani okr opakovat \
-opis opissoubor oramovani oref orez \
-otocit outeredgedistance outeredgewidth outermargindistance outermarginwidth \
-overbar overbars overstrike overstrikes oznaceni \
-oznacverzi pagedepth pageoffset paragraph parovastrana \
-part pis placefloat placeheadnumber placeheadtext \
-placelistoffloats placelistofsorts placelistofsynonyms placepagenumber placerawlist \
-placereferencelist placerule placetextvariable plnezneni pol \
-pole polozka polozky popisky poppisek \
-porovnejpaletu porovnejskupinubarev positiontext pozadi pozice \
-poznamka poznamkapodcarou pref prelozit premistinamrizku \
-prepninazakladnifont preskoc prizpusobivepole prizpusobvzhled produkt \
-program projekt propojeneznaceni propojenydokument propojenyrejstrik \
-prostredi publikace ran ref register \
-reservefloat reset resetnumber resettextcontent resetznaceni \
-rimskecislice rozdelplvouciobjekt rozmer rozpojeneznaceni roztazene \
-schovejbloky section sedabarva seeregister setnumber \
-settextcontent settextvariable setupanswerarea setupcolumnset setupcolumnsetlines \
-setupcolumnsetstart setupfonthandling setupfontsynonym setupforms setupindentedtext \
-setupinterlinespace2 setupitemgroup setuplistalternative setupmathalignment setupnumber \
-setuppaper setupplacement setuprule setupstartstop setupstrut \
-setuptextposition setuptextvariable sirkalevehookraje sirkalevemarginalie sirkamarginalie \
-sirkaokraje sirkapapiru sirkapravehookraje sirkapravemarginalie sirkasazby \
-sirkaseznamu sirkatextu sirkatiskpapiru sloupec slovovpravo \
-sort spodek stanovcharakteristickuseznamu stanovcislonadpisu startalignment \
-startbarva startbuffer startcislovaniradku startcitace startcolumnmakeup \
-startcolumns startcolumnset startcombination startcomment startdescription \
-startdocument startdokument startenumeration startfakt startfigure \
-startfloattext startformula startframedtext startglobalni starthiding \
-startinteraktivnimenu startitemgroup startkodovani startkomponenta startkorekceradku \
-startlegend startline startlinecorrection startlinenumbering startlines \
-startlocal startlocalenvironment startlocalfootnotes startlokalni startlokalnipoznamkypodcarou \
-startmakeup startmarginalniblok startmarginalnilinka startmarginblock startnamemakeup \
-startnarrower startnezhustene startobraz startopposite startoverlay \
-startoverview startparagraph startpositioning startpostponing startpozadi \
-startprehled startprekryv startprodukt startprofil startprofile \
-startprojekt startprostredi startprotejsek startradek startradky \
-startrastr startregister startsadasymbolu startsloupce startspojeni \
-startsymbolset startsynchronizace startsynchronization starttable starttables \
-starttabulate starttabulka starttabulky starttext starttextovalinka \
-starttyping startumistovani startunpacked startuprava startverze \
-startzarovnavani startzhustene startzuzeni stopalignment stopbarva \
-stopbuffer stopcislovaniradku stopcitace stopcolumnmakeup stopcolumns \
-stopcolumnset stopcombination stopcomment stopdescription stopdocument \
-stopdokument stopenumeration stopfakt stopfigure stopfloattext \
-stopformula stopframedtext stopglobalni stophiding stopinteraktivnimenu \
-stopitemgroup stopkodovani stopkomponenta stopkorekceradku stoplegend \
-stopline stoplinecorrection stoplinenumbering stoplines stoplocal \
-stoplocalenvironment stoplocalfootnotes stoplokalni stoplokalnipoznamkypodcarou stopmakeup \
-stopmarginalniblok stopmarginalnilinka stopmarginblock stopnamemakeup stopnarrower \
-stopnezhustene stopopposite stopoverlay stopoverview stopparagraph \
-stoppositioning stoppostponing stoppozadi stopprehled stopprekryv \
-stopprodukt stopprofil stopprofile stopprojekt stopprostredi \
-stopprotejsek stopradek stopradky stoprastr stopsloupce \
-stopspojeni stopsymbolset stopsynchronizace stopsynchronization stoptable \
-stoptables stoptabulate stoptabulka stoptabulky stoptext \
-stoptextovalinka stoptyping stopumistovani stopunpacked stopuprava \
-stopverze stopzarovnavani stopzhustene stopzuzeni strana \
-sub subject subpagenumber subsection subsubject \
-subsubsection subsubsubject switchtorawfont sym symbol \
-synchronizacnilista synchronizovat synonym tab tecky \
-tenkalinka tenkelinky testcolumn testpage tex \
-texthlavicky textovalinka textpopisku textvariable title \
-tlacitko tlacitkomenu tloustkacary tref tvrdamezera \
-tvrdemezery txt typebuffer ukazbarvu ukazexterniobrazy \
-ukazmrizku ukaznastaveni ukazpaletu ukazpodpery ukazpole \
-ukazpostredizakladnihofontu ukazramecek ukazsadusymbolu ukazskupinubarev ukazupravu \
-ukazvytisk ukazvzhled ukazzakladnifont umistikombinovanyseznam umistilegendu \
-umistiloga umistilokalnipoznamkypodcarou umistinadsebe umistinamrizku umistipodrovnici \
-umistipoznamkypodcarou umistirejstrik umistirovnici umistiseznam umistivedlesebe \
-umistizalozky underbar underbars urcicharakteristikurejstriku useXMLfilter \
-usedirectory usetypescript usetypescriptfile uzijJSscripts uzijURL \
-uzijadresar uzijbloky uzijexternidokument uzijexterniobraz uzijexternisoubor \
-uzijexternisoubory uzijexternizvuk uzijkodovani uzijmodul uzijmoduly \
-uzijodkazy uzijprikazy uzijspeciality uzijsymbol uzijurl \
-verze vl vlasovalinka vlevo vpravo \
-vradku vsedniden vyberbloky vyberpapir vyberverzi \
-vyplnenytext vyplnovelinky vyplnovepole vyplnovyradek vyskahorejsku \
-vyskapapiru vyskasazby vyskaseznamu vyskaspodku vyskatextu \
-vyskatiskpapiru vyskaupati vyskazahlavi vysoky vyznam \
-vzdalenosthorejsku vzdalenostlevehookraje vzdalenostlevemarginalie vzdalenostmarginalie vzdalenostokraje \
-vzdalenostpravehookraje vzdalenostpravemarginalie vzdalenostspodku vzdalenostupati vzdalenostzahlavi \
-zablokujinterakcnimenu zachovejbloky zadnamezera zadnebilemisto zadnedalsibloky \
-zadnedalsisoubory zadnehorniadolniradky zadneodsazovani zadnezahlaviaupati zadneznaceni \
-zadnyrozmer zadnyseznam zadnytest zalozka zapisdorejstriku \
-zapisdoseznamu zapisdoseznamuodkazu zapismeziseznam zaramovani zarovnanonastred \
-zarovnanovlevo zarovnanovpravo zasobnikpoli zaznamovepole zhustene \
-ziskejbuffer ziskejznaceni zlomek znaceni znak \
-znaky zpracujbloky zpracujstranu zrcadlit zref \
-zvysujicicislo
-
-keywordclass.context.de=\
-Buchstabe Buchstaben CAP Cap \
-Caps KAP Kap Kaps MONAT \
-Roemischezahlen WOCHENTAG WOERTER WORT Woerter \
-Wort Ziffern abstandlinkerrand abstandoben abstandrechterrand \
-abstandunten amgitterausrichten amgitterneuausrichten appendix arg \
-atleftmargin atrightmargin aufseite ausfuellfeld ausfuelltext \
-ausschnitt bearbeitebloecke bearbeiteseite bedeutung behaltebloecke \
-bei bemerkung benutzekodierung benutzespezielles benutzeverzeichnis \
-beschrifteversion beschriftung bestimmekopfnummer bestimmelistencharakeristika bestimmeregistercharakteristika \
-bildschirm blanko bookmark bottomspace breitelinkerrand \
-breiterechterrand bruch buchstabe buchstaben but \
-bypassblocks cap chapter chem comment \
-completecombinedlist completelistoffloats completelistofsorts completelistofsynonyms completepagenumber \
-completeregister coupledregister crlf cutspace datum \
-decrementnumber definebodyfontDEF definebodyfontREF definecolumnbreak definecolumnset \
-definecombination definedfont definefontfeature definefonthandling defineindentedtext \
-defineinmargin defineitemgroup definelayer definelayout definemathalignment \
-defineoutput definepagebreak defineplacement definerawfont definerule \
-defineschriftsynonym definetextposition definetextvariable definetype definetypeface \
-definiereabbsymbol definiereabsaetze definiereabschnitt definiereabschnittsblock definiereakzent \
-definierebefehl definierebeschreibung definierebeschreibungen definierebeschriftung definiereblanko \
-definiereblock definierefarbe definierefarbengruppe definierefeld definierefeldstapel \
-definierefliesstext definierefliesstextumgebung definieregleitobjekt definierehauptfeld definierehbox \
-definiereinteraktionsmenue definierekonversion definierelabel definiereliste definierelogo \
-definieren definierenummerierung definiereoverlay definierepalette definierepapierformat \
-definiereprofil definiereprogramme definierepuffer definierereferenz definierereferenzformat \
-definierereferenzliste definiereregister definiereschrift definiereschriftstil definieresortieren \
-definierestartstop definierestil definieresubfeld definieresymbol definieresynonyme \
-definieretabellenvorlage definieretabulator definieretext definieretippen definiereueberschrift \
-definiereumbruch definiereumrahmt definiereumrahmtertext definiereversion definierezeichen \
-definierezusammengestellteliste description dimension doppelseite doppelseitigespapier \
-drehen duennelinie duennerumriss einezeile einstellungen \
-einziehen emptylines entknuepfebeschriftung enumeration externeabbildung \
-farbbalken farbe farbewert feld feldstapel \
-festesspatium folgeprofil folgeprofilversion folgeversion footnotetext \
-forceblocks format formelnummer framedtext fussnote \
-fusszeileabstand fusszeilenhoehe gefuelltesrechteck gefuelltezeile geg \
-gesamtseitenanzahl gestreckt getnumber gitter graufarbe \
-grauwert haarlinie hauptsprache headsym heutigesdatum \
-heutigeskopfnummer hintergrund hl hoch hoeheoben \
-hoeheunten holebeschriftung holepuffer imlinken imlinkenrand \
-immaumrise immediatebetweenlist immediatetolist imrechten imrechtenrand \
-imumriss in inaktiviereinteraktionsmenue inanderermarginale indentation \
-ininner inlinkermarginale inmarginalie inneredgedistance inneredgewidth \
-innermargindistance innermarginwidth inouter inrechtermarginale installieresprache \
-interaktionsbalken interaktionsknopfe interaktionsmenue inzeile irgendwo \
-its kap keindimension keinebeschriftung keinebloeckemehr \
-keinedateienmehr keinekopfundfusszeilen keineliste keinspatium keintest \
-keinzeilenobenundunten keinzwischenraum kleinerdurchschuss klonierefeld knopf \
-komponente konvertierezahl kopf kopfniveau kopfnummer \
-kopfweite kopfzeilenabstand kopfzeilenhoehe kopierefeld korrigierezwischenraum \
-label labeling labels labeltext leg \
-liniendicke linkemarginalafstand linkemarginalbreite linksbuendig listenbreite \
-listenhoehe listenlaenge listsymbol loadsorts loadsynonyms \
-mapfontsize mar marginalafstand marginalbreite marginallinie \
-marginaltext marginaltitel marginalwort mathematik maumrise \
-mediaeval menueknopf monat moveformula movesidefloat \
-nachunten name navigating nextsection nichteinziehen \
-nocap nokap nop notiz numberofsubpages \
-nummererhoehen outeredgedistance outeredgewidth outermargindistance outermarginwidth \
-overbar overbars overstrike overstrikes pagedepth \
-pageoffset papierbreite papierhoehe paragraph part \
-passelayoutan passendfeld placefloat placeheadnumber placeheadtext \
-placelistoffloats placelistofsorts placelistofsynonyms placepagenumber placerawlist \
-placereferencelist placerule placetextvariable platzierebookmarks platziereformel \
-platzierefussnoten platzierelegende platziereliste platzierelogo platzierelokalefussnoten \
-platzierenebeneinander platziereregister platziereuntereinander platziereunterformel platzierezusammengestellteliste \
-pos position positiontext posten printpapierbreite \
-printpapierhoehe produkt programm projekt publikation \
-punkt ran randabstand randbreite rechteck \
-rechtecke rechtemarginalafstand rechtemarginalbreite rechtsbuendig ref \
-referenz referieren register registrierefelder reservefloat \
-resetnumber resettextcontent roemischezahlen ruecksetzten ruecksetztenbeschriftung \
-rumpfweite satzbreite satzhoehe schreibezumregister schreibezurliste \
-schreibezurreferenzliste schreibezwischenliste section seeregister seite \
-seitenreferenz seitenummer setnumber settext settextvariable \
-setupanswerarea setupcolumnset setupcolumnsetlines setupcolumnsetstart setupfonthandling \
-setupfontsynonym setupforms setupindentedtext setupinterlinespace2 setupitemgroup \
-setuplistalternative setupmathalignment setupnumber setuppaper setupplacement \
-setuprule setupstartstop setupstrut setuptextposition setuptextvariable \
-showsymbolset sort spalte spatium spiegeln \
-sprache startabbildung startalignment startausrichtung startbuffer \
-startcolumnmakeup startcolumns startcolumnset startcombination startcomment \
-startdescription startdocument startdokument startenger startenumeration \
-startfarbe startfigure startfloattext startformula startframedtext \
-startgeg startgegenueber startglobal startgrosserdurchschuss starthiding \
-starthintergrund startinteraktionsmenue startitemgroup startkleinerdurchschuss startkodierung \
-startkombination startkomponente startlegend startline startlinecorrection \
-startlinenumbering startlines startlocal startlocalenvironment startlocalfootnotes \
-startlokal startlokalefussnoten startmakeup startmarginalblock startmarginallinie \
-startmarginblock startnamemakeup startnarrower startopposite startoverlay \
-startoverview startparagraph startpositionieren startpositioning startpostponing \
-startprodukt startprofil startprofile startprojekt startraster \
-startregister startspalten startsymbolset startsynchronisation startsynchronization \
-starttabelle starttabellen starttable starttables starttabulate \
-starttext starttextlinie starttyping startueberblick startumbruch \
-startumgebung startunpacked startversion startzeile startzeilen \
-startzeilenkorrektur startzeilennumerierung startzitat stelleabsaetzeein stelleabsatznummerierungein \
-stelleabschnittein stelleabschnittsblockein stelleanordnenein stelleaufzaehlungenein stelleausgabeein \
-stelleausrichtungein stelleausschnittein stellebeschreibungein stellebeschriftungein stellebilderunterschriftein \
-stellebildunterschriftein stellebindestrichein stelleblankoein stelleblockein stelledrehenein \
-stelleduennerumrissein stelleeinziehenein stelleengerein stelleexterneabbildungenein stellefarbeein \
-stellefarbenein stellefeldein stellefelderin stellefliesstextein stellefliesstextumgebungein \
-stelleformelnein stellefussnotendefinitionein stellefussnotenein stellefusszeileein stellefusszeilentextein \
-stellegefuelltesrechteckein stellegefuelltezeileein stellegegenueberplatzierenein stellegleitobjekteein stellegleitobjektein \
-stellehintergruendeein stellehintergrundein stelleinmarginalieein stelleinteraktionein stelleinteraktionsbalkenein \
-stelleinteraktionsbildschirmein stelleinteraktionsmenueein stelleknopfein stellekombinationein stellekommentarein \
-stellekopfzahlein stellekopfzeileein stellekopfzeilentextein stellelabeltextein stellelayoutein \
-stellelegendeein stellelinienbreiteein stellelisteein stellemarginalblockein stellemarginallinieein \
-stellenobenein stellenummerierungein stellepaletteein stellepapierformatein stelleplatziegeteiltegleitobjekt \
-stellepositionierenein stellepostenein stelleprofilein stelleprogrammein stellepublikationein \
-stellepufferein stellerasterein stellerechteckein stellereferenzierenein stellereferenzlisteein \
-stelleregisterein stelleseitenkommentarein stelleseitennummerein stelleseitennummeriernungein stelleseitenuebergangein \
-stellesortierenein stellespaltenein stellespatiumein stellespracheein stellesymbolsetein \
-stellesynchronisationein stellesynchronisationsbalkenein stellesynonymein stellesystemein stelletabein \
-stelletabellenein stelletabulatorein stelletextein stelletextobenein stelletexttexteein \
-stelletextumrissein stelletextuntenein stelletipein stelletippenein stelletoleranzein \
-stelleueberschriftein stelleueberschriftenein stelleueberschrifttextein stelleumbruchein stelleumrahmtein \
-stelleumrahmtetexteein stelleuntenein stelleunterseitennummerein stelleunterstreichenein stelleurlein \
-stelleversalienein stelleversionein stellezeilenabstandein stellezeilenein stellezeilennumerierungein \
-stellezitierenein stellezusammengestelltelisteein stellezwischenraumein stopalignment stopausrichtung \
-stopbuffer stopcolumnmakeup stopcolumns stopcolumnset stopcombination \
-stopcomment stopdescription stopdocument stopdokument stopenger \
-stopenumeration stopfarbe stopfigure stopfloattext stopformula \
-stopframedtext stopgeg stopgegenueber stopglobal stopgrosserdurchschuss \
-stophiding stophintergrund stopinteraktionsmenue stopitemgroup stopkleinerdurchschuss \
-stopkodierung stopkombination stopkomponente stoplegend stopline \
-stoplinecorrection stoplinenumbering stoplines stoplocal stoplocalenvironment \
-stoplocalfootnotes stoplokal stoplokalefussnoten stopmakeup stopmarginalblock \
-stopmarginallinie stopmarginblock stopnamemakeup stopnarrower stopopposite \
-stopoverlay stopoverview stopparagraph stoppositionieren stoppositioning \
-stoppostponing stopprodukt stopprofil stopprofile stopprojekt \
-stopraster stopspalten stopsymbolset stopsynchronisation stopsynchronization \
-stoptabelle stoptabellen stoptable stoptables stoptabulate \
-stoptext stoptextlinie stoptyping stopueberblick stopumbruch \
-stopumgebung stopunpacked stopversion stopzeile stopzeilen \
-stopzeilenkorrektur stopzeilennumerierung stopzitat sub subject \
-subpagenumber subsection subsubject subsubsection subsubsubject \
-switchtorawfont sym symbol synchronisationsbalken synchronisieren \
-synonym tab teilegleitobjekt testcolumn testpage \
-tex textbreite texthoehe textlinie textreferenz \
-textvariable tief tiho tip tippedatei \
-tippen tippepuffer title tooltip txt \
-ueber ueberschrifttext uebersetzten umgebung umrahmt \
-unbekant underbar underbars unterformelnummer useXMLfilter \
-usedirectory usetypescript usetypescriptfile verbergebloecke vergleichefarbengruppe \
-vergleichepalette verknuepfebeschriftung verknuepfedokument verknuepfregister version \
-verweis verweisdatum verwendeJSscript verwendeURL verwendebefehl \
-verwendebloecke verwendeexteresdokument verwendeexterneabbildung verwendeexternedatei verwendeexternedateien \
-verwendeexternestonstueck verwendemodul verwendemodule verwendereferenzen verwendesymbole \
-verwendeurl vl volleswort von waehlebloeckeaus \
-waehlepapieraus waehleversionaus wechselezumfliesstext wiederholen wochentag \
-wohnort wortrechts zeigedruck zeigeeinstellungen zeigeexterneabbildungen \
-zeigefarbe zeigefarbengruppe zeigefelder zeigefliesstext zeigefliesstextumgebung \
-zeigegitter zeigelayout zeigepalette zeigerahmen zeigestruts \
-zeigeumbruch zentriert ziffern zitat zitieren \
-zu zurbox zurseite zwischenraum
-
-keywordclass.context.en=\
-CAP Cap Caps Character \
-Characters MONTH Numbers Romannumerals WEEKDAY \
-WORD WORDS Word Words about \
-adaptlayout adding appendix arg at \
-atleftmargin atpage atrightmargin background backspace \
-blackrule blackrules blank bookmark bottomdistance \
-bottomheight bottomspace but button bypassblocks \
-cap chapter character characters chem \
-clip clonefield color colorbar colorvalue \
-column comment comparecolorgroup comparepalet completecombinedlist \
-completelistoffloats completelistofsorts completelistofsynonyms completepagenumber completeregister \
-component convertnumber copyfield correctwhitespace coupledocument \
-coupledregister couplemarking couplepage couplepaper coupleregister \
-crlf currentdate currentheadnumber cutspace date \
-decouplemarking decrementnumber define defineaccent defineblank \
-defineblock definebodyfont definebodyfontDEF definebodyfontREF definebodyfontenvironment \
-definebuffer definecharacter definecolor definecolorgroup definecolumnbreak \
-definecolumnset definecombination definecombinedlist definecommand defineconversion \
-definedescription definedfont defineenumeration definefield definefieldstack \
-definefiguresymbol definefloat definefont definefontfeature definefonthandling \
-definefontstyle definefontsynonym defineframed defineframedtext definehbox \
-definehead defineindentedtext defineinmargin defineinteractionmenu defineitemgroup \
-definelabel definelayer definelayout definelist definelogo \
-definemainfield definemakeup definemarking definemathalignment defineoutput \
-defineoverlay definepagebreak definepalet definepapersize defineparagraphs \
-defineplacement defineprofile defineprogram definerawfont definereference \
-definereferenceformat definereferencelist defineregister definerule definesection \
-definesectionblock definesorting definestartstop definestyle definesubfield \
-definesymbol definesynonyms definetabletemplate definetabulate definetext \
-definetextposition definetextvariable definetype definetypeface definetyping \
-defineversion description determineheadnumber determinelistcharacteristics determineregistercharacteristics \
-dimension disableinteractionmenu domicile donttest edgedistance \
-edgewidth emptylines enumeration environment externalfigure \
-fact field fieldstack fillinfield fillinline \
-fillinrules fillintext fitfield fixedspace fixedspaces \
-followprofile followprofileversion followversion footerdistance footerheight \
-footnote footnotetext forceblocks formulanumber fraction \
-framed framedtext from getbuffer getmarking \
-getnumber godown goto gotobox gotopage \
-graycolor greyvalue grid hairline head \
-headerdistance headerheight headlevel headnumber headsym \
-headtext hideblocks high hl immediatebetweenlist \
-immediatetolist in incrementnumber indentation indenting \
-inframed infull ininner inleft inleftedge \
-inleftmargin inline inmaframed inmargin inneredgedistance \
-inneredgewidth innermargindistance innermarginwidth inothermargin inouter \
-inright inrightedge inrightmargin installlanguage interactionbar \
-interactionbuttons interactionmenu item items its \
-keepblocks label labeling labels labeltext \
-language leftaligned leftedgedistance leftedgewidth leftmargindistance \
-leftmarginwidth leg linethickness listheight listlength \
-listsymbol listwidth loadsorts loadsynonyms logfields \
-lohi low maframed mainlanguage makeupheight \
-makeupwidth mapfontsize mar margindistance marginrule \
-margintext margintitle marginwidth marginword marking \
-markversion mathematics mediaeval menubutton midaligned \
-mirror month moveformula moveongrid movesidefloat \
-name navigating nextsection nocap nodimension \
-noheaderandfooterlines noindenting nolist nomarking nomoreblocks \
-nomorefiles nop nospace note notopandbottomlines \
-nowhitespace numberofsubpages numbers outeredgedistance outeredgewidth \
-outermargindistance outermarginwidth overbar overbars overstrike \
-overstrikes packed page pagedepth pagenumber \
-pageoffset pagereference paperheight paperwidth paragraph \
-part periods placebookmarks placecombinedlist placefloat \
-placefootnotes placeformula placeheadnumber placeheadtext placelegend \
-placelist placelistoffloats placelistofsorts placelistofsynonyms placelocalfootnotes \
-placelogos placeongrid placeontopofeachother placepagenumber placerawlist \
-placereferencelist placeregister placerule placesidebyside placesubformula \
-placetextvariable position positiontext printpaperheight printpaperwidth \
-processblocks processpage product program project \
-publication quotation quote ran redo \
-ref reference referral referraldate referring \
-register remark reservefloat reset resetmarking \
-resetnumber resettextcontent rightaligned rightedgedistance rightedgewidth \
-rightmargindistance rightmarginwidth romannumerals rotate scale \
-screen section seeregister selectblocks selectpaper \
-selectversion setnumber settextcontent settextvariable setupalign \
-setupanswerarea setuparranging setupbackground setupbackgrounds setupblackrules \
-setupblank setupblock setupbodyfont setupbodyfontenvironment setupbottom \
-setupbottomtexts setupbuffer setupbuttons setupcapitals setupcaption \
-setupcaptions setupclipping setupcolor setupcolors setupcolumns \
-setupcolumnset setupcolumnsetlines setupcolumnsetstart setupcombinations setupcombinedlist \
-setupcomment setupdescriptions setupenumerations setupexternalfigures setupfield \
-setupfields setupfillinlines setupfillinrules setupfloat setupfloats \
-setupfloatsplitting setupfonthandling setupfontsynonym setupfooter setupfootertexts \
-setupfootnotedefinition setupfootnotes setupforms setupformulae setupframed \
-setupframedtexts setuphead setupheader setupheadertexts setupheadnumber \
-setupheads setupheadtext setuphyphenmark setupindentedtext setupindenting \
-setupinmargin setupinteraction setupinteractionbar setupinteractionmenu setupinteractionscreen \
-setupinterlinespace setupinterlinespace2 setupitemgroup setupitemizations setupitems \
-setuplabeltext setuplanguage setuplayout setuplegend setuplinenumbering \
-setuplines setuplinewidth setuplist setuplistalternative setupmakeup \
-setupmarginblocks setupmarginrules setupmarking setupmathalignment setupnarrower \
-setupnumber setupnumbering setupoppositeplacing setupoutput setuppagecomment \
-setuppagenumber setuppagenumbering setuppagetransitions setuppalet setuppaper \
-setuppapersize setupparagraphnumbering setupparagraphs setupplacement setuppositioning \
-setupprofiles setupprograms setuppublications setupquote setupreferencelist \
-setupreferencing setupregister setuprotate setuprule setups \
-setupscreens setupsection setupsectionblock setupsorting setupspacing \
-setupstartstop setupstrut setupsubpagenumber setupsymbolset setupsynchronization \
-setupsynchronizationbar setupsynonyms setupsystem setuptab setuptables \
-setuptabulate setuptext setuptextposition setuptextrules setuptexttexts \
-setuptextvariable setupthinrules setuptolerance setuptop setuptoptexts \
-setuptype setuptyping setupunderbar setupurl setupversions \
-setupwhitespace showbodyfont showbodyfontenvironment showcolor showcolorgroup \
-showexternalfigures showfields showframe showgrid showlayout \
-showmakeup showpalet showprint showsetups showstruts \
-showsymbolset someline somewhere sort space \
-splitfloat startalignment startbackground startbuffer startcoding \
-startcolor startcolumnmakeup startcolumns startcolumnset startcombination \
-startcomment startcomponent startdescription startdocument startenumeration \
-startenvironment startfact startfigure startfloattext startformula \
-startframedtext startglobal starthiding startinteractionmenu startitemgroup \
-startlegend startline startlinecorrection startlinenumbering startlines \
-startlocal startlocalenvironment startlocalfootnotes startmakeup startmarginblock \
-startmarginrule startnamemakeup startnarrower startopposite startoverlay \
-startoverview startpacked startparagraph startpositioning startpostponing \
-startproduct startprofile startproject startquotation startraster \
-startregister startsymbolset startsynchronization starttable starttables \
-starttabulate starttext starttextrule starttyping startunpacked \
-startversion stopalignment stopbackground stopbuffer stopcoding \
-stopcolor stopcolumnmakeup stopcolumns stopcolumnset stopcombination \
-stopcomment stopcomponent stopdescription stopdocument stopenumeration \
-stopenvironment stopfact stopfigure stopfloattext stopformula \
-stopframedtext stopglobal stophiding stopinteractionmenu stopitemgroup \
-stoplegend stopline stoplinecorrection stoplinenumbering stoplines \
-stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup stopmarginblock \
-stopmarginrule stopnamemakeup stopnarrower stopopposite stopoverlay \
-stopoverview stoppacked stopparagraph stoppositioning stoppostponing \
-stopproduct stopprofile stopproject stopquotation stopraster \
-stopsymbolset stopsynchronization stoptable stoptables stoptabulate \
-stoptext stoptextrule stoptyping stopunpacked stopversion \
-stretched sub subformulanumber subject subpagenumber \
-subsection subsubject subsubsection subsubsubject switchtobodyfont \
-switchtorawfont sym symbol symoffset synchronizationbar \
-synchronize synonym tab testcolumn testpage \
-tex textheight textreference textrule textvariable \
-textwidth thinrule thinrules title tooltip \
-topdistance topheight topspace totalnumberofpages translate \
-txt typ type typebuffer typefile \
-underbar underbars unitmeaning unknown useJSscripts \
-useURL useXMLfilter useblocks usecommands usedirectory \
-useencoding useexternaldocument useexternalfigure useexternalfile useexternalfiles \
-useexternalsoundtrack usemodule usemodules usereferences usespecials \
-usesymbols usetypescript usetypescriptfile useurl version \
-vl weekday whitespace wordright writebetweenlist \
-writetolist writetoreferencelist writetoregister
-
-keywordclass.context.fr=\
-CAP Cap Caps Caractere \
-Caracteres Chiffresromains JOURSEMAINE MOIS MOT \
-MOTS Mot Mots Numeros a \
-adaptedisposition affectenumero affectevariabletexte ajustechamp alaligne \
-alapage aligneadroite aligneagauche aligneaumilieu appendix \
-arg arriereplan atleftmargin atrightmargin baha \
-barrecouleur barreinteraction barresynchronisation bas bouton \
-boutonmenu boutonsinteraction but cacheblocs cap \
-caractere caracteres champ changepolicebrute changepolicecorps \
-chapter chem chiffresromains citation citer \
-clip clonechamp colonne comment commentaire \
-comparegroupecouleur comparepalette completecombinedlist completelistoffloats completelistofsorts \
-completelistofsynonyms completenumeropage completeregistre composant composeenalinea \
-concernant convertitnumero copitchamp corrigeespaceblanc couleur \
-couleurgrise coupledocument coupledregister couplemarquage couplepapier \
-coupleregistre crlf cutspace dactylographier dans \
-dansautremarge dansborddroit dansbordgauche dansdroite dansgauche \
-dansmarge dansmargedroite dansmargegauche date datecourante \
-daterecommandation de decouplemarquage decrementenumero definebodyfontDEF \
-definebodyfontREF definecombination definedfont definefontfeature definefonthandling \
-defineframed defineframedtext defineindentedtext defineitemgroup definemathalignment \
-defineplacement definetypeface definicaractere definit definitaccent \
-definitbloc definitblocsection definitbuffer definitcalque definitchamp \
-definitchampprincipal definitcommande definitconversion definitcouleur definitdactylo \
-definitdansmarge definitdemarrestoppe definitdescription definitdisposition definitenumeration \
-definitenvironnementpolicecorps definitetiquette definitflottant definitformatreference definitgroupecouleur \
-definithbox definitjeucolonne definitliste definitlisteimbriquee definitlistereference \
-definitlogo definitmakeup definitmarquage definitmenuinteraction definitnotepdp \
-definitpalette definitparagraphes definitpilechamp definitpolice definitpolicebrute \
-definitpolicecorps definitpositiontexte definitprofil definitprogramme definitreference \
-definitregistre definitregle definitrevetement definitsautdecolonne definitsautdepage \
-definitsection definitsortie definitsouschamp definitstyle definitstylepolice \
-definitsymbole definitsymbolefigure definitsynonymepolice definitsynonymes definittabulation \
-definittaillepapier definittete definittexte definittrametableau definittri \
-definittype definitvariabletexte definitversion definitvide demarrealignement \
-demarrearriereplan demarreblocmarge demarrecitation demarreciter demarrecodage \
-demarrecolonnes demarrecombinaison demarrecompoetroite demarrecomposant demarrecorrectionligne \
-demarrecouleur demarredegroupe demarredocument demarreenvironement demarrefigure \
-demarreglobal demarregroupe demarrejeucolonne demarrejeusymboles demarreligne \
-demarreligneregleetexte demarrelignes demarrelocal demarremakeup demarremargereglee \
-demarrenotespdplocales demarrenumerotationligne demarreopposition demarrepositionnement demarreproduit \
-demarreprofil demarreprojet demarreraster demarrerevetement demarresynchronisation \
-demarretableau demarretableaux demarretexte demarreversion demarrevuedensemble \
-deplaceformule deplacesurgrille description determinecaracteristiqueliste determinecaracteristiquesregistre \
-determinenumerotete dimension distancebord distanceborddroit distancebordgauche \
-distanceentete distanceinf distancemarge distancemargedroite distancemargegauche \
-distancepdp distancesup domicile echelle ecran \
-ecritdansliste ecritdanslistereference ecritentreliste ecritregistre el \
-element elements emptylines enumeration environement \
-espace espaceblanc espacefixe espaceinf espacesfixes \
-espacesup etiquette etiquettes etire fait \
-faitreference fichierdactylo figureexterne forceblocs fraction \
-framed framedtext gardeblocs getnumber grille \
-groupe haut hauteureditionpapier hauteurentete hauteurinf \
-hauteurliste hauteurmakeup hauteurpapier hauteurpdp hauteursup \
-hauteurtexte headsym hl immediatebetweenlist immediatetolist \
-inconnu incrementenumero indentation inframed infull \
-inhibemenuinteraction ininner inmframed inneredgedistance inneredgewidth \
-innermargindistance innermarginwidth inouter installelangue joursemaine \
-labeling labeltexte langue langueprincipale largeurbord \
-largeurborddroit largeurbordgauche largeureditionpapier largeurligne largeurliste \
-largeurmakeup largeurmarge largeurmargedroite largeurmargegauche largeurpapier \
-largeurtexte leg ligneh lignenoire ligneregleetexte \
-lignesnoires listesymbole llongueurliste loadsorts loadsynonyms \
-logchamp mapfontsize mar margereglee marquage \
-marquageversion marquepage mathematique mediaeval menuinteraction \
-mframed mois montrecadre montrechamps montrecouleur \
-montredisposition montreedition montreenvironnementpolicecorps montrefiguresexternes montregrille \
-montregroupecouleur montrejeusymboles montremakeup montrepalette montrepolicecorps \
-montrereglages montrestruts motdroit motmarge movesidefloat \
-name navigating nextsection niveautete nocap \
-nombredesouspages nombretotaldepages nommacro nop note \
-notepdp numeroformule numeropage numeros numerosousformule \
-numerotete numerotetecourant obtientmarquage oriente outeredgedistance \
-outeredgewidth outermargindistance outermarginwidth overbar overbars \
-overstrike overstrikes page pagedepth pagedouble \
-pageoffset paragraph part pasplusdeblocs pasplusdefichiers \
-periodes pilechamp placecoteacote placeflottant placeformule \
-placelegende placelesunsaudessusdesautres placeliste placelisteinmbriquee placelistereference \
-placelistoffloats placelistofsorts placelistofsynonyms placelogos placemarquespages \
-placenotespdp placenotespdplocales placenumeropage placenumerotete placerawlist \
-placeregistre placeregle placesousformule placesurgrille placetextetete \
-placevariabletexte position positionnetexte prendbuffer produit \
-programme projet publication qqpart ran \
-raz razmarquage raznumero recommandation ref \
-refait reference referencepage referencetexte reflete \
-register reglages reglealignement reglearrangement reglearriereplan \
-reglearriereplans reglebarreinteraction reglebarresynchronisation reglebloc regleblocmarge \
-regleblocsection regleboutons reglebuffer reglecapitales reglechamp \
-reglechamps regleclipping reglecolonnes reglecombinaisons reglecommentaire \
-reglecommentairepage reglecompoetroite reglecomposeenalinea reglecouleur reglecouleurs \
-regledactylo regledansmarge regledemarrestoppe regledescriptions regledisposition \
-regleecraninteraction regleecrans regleelements regleencadre regleentete \
-regleenumerations regleenvironnementpolicecorps regleepaisseurligne regleespaceblanc regleespacement \
-regleespacementinterligne reglefiguresexternes regleflottant regleflottants regleformulaires \
-regleformules reglegroupeselements regleinf regleinteraction regleintitule \
-regleintitules reglejeucolonne reglejeusymboles reglelabeltexte reglelangue \
-reglelegende reglelignes reglelignesnoires reglelignesreglestexte regleliste \
-reglelisteimbriquee reglelistereference reglemakeup reglemargereglee reglemarquage \
-reglemarquagehyphenation reglemenuinteraction reglenotepdp reglenumero reglenumeropage \
-reglenumerotation reglenumerotationligne reglenumerotationpage reglenumerotationparagraphe reglenumerotete \
-regleoriente reglepalette reglepapier regleparagraphes reglepdp \
-regleplacementopposition reglepolicecorps reglepositionnement reglepositiontexte regleprofils \
-regleprogrammes reglepublications reglereferencage regleregistre regleregle \
-regleremplitligne regleremplitlignesreglees reglesection regleseparationflottant reglesortie \
-reglesouslignage reglesousnumeropage reglestrut reglesup reglesynchronisation \
-reglesynonymes reglesysteme regletab regletableaux regletabulation \
-regletaillepapier regletete regletetes regletexte regletextesentete \
-regletextesinf regletextespdp regletextessup regletextestexte regletextetete \
-regletolerance regletraitsfins regletransitionspage regletri regletype \
-regleurl reglevariabletexte regleversions remplitchamp remplitligne \
-remplitlignesreglees remplittexte reservefloat resettextcontent retourarriere \
-sansalinea sansdimension sansespace sansespaceblanc sanslignesenteteetpdp \
-sanslignessupetinf sansliste sansmarquage sanstest sauteblocs \
-section seeregister selectionneblocs selectionnepapier selectionneversion \
-sensunite separeflottant settext setupanswerarea setupcolumnsetlines \
-setupcolumnsetstart setupfonthandling setupfontsynonym setupframedtexts setupindentedtext \
-setupinterlinespace2 setupitemgroup setuplistalternative setupmathalignment setupplacement \
-sort sousnumeropage startalignment startarriereplan startbuffer \
-startcitation startcolumnmakeup startcolumns startcombination startcomment \
-startcomposant startcouleur startdescription startdocument startenumeration \
-startenvironement startfait startfigure startfloattext startformula \
-startframedtext startgroupe starthiding startitemgroup startlegend \
-startligneregleetexte startline startlinecorrection startlinenumbering startlines \
-startlocal startlocalenvironment startlocalfootnotes startmakeup startmargereglee \
-startmarginblock startmenuinteraction startnamemakeup startnarrower startopposite \
-startoverlay startoverview startparagraph startpositioning startpostponing \
-startproduit startprofile startprojet startregister startsymbolset \
-startsynchronization starttable starttables starttabulate starttyping \
-startunpacked startversion stopalignment stoparriereplan stopbuffer \
-stopcitation stopcolumnmakeup stopcolumns stopcombination stopcomment \
-stopcompoetroite stopcomposant stopcouleur stopdescription stopdocument \
-stopenumeration stopenvironement stopfait stopfigure stopfloattext \
-stopformula stopframedtext stopgroupe stophiding stopitemgroup \
-stoplegend stopligneregleetexte stopline stoplinecorrection stoplinenumbering \
-stoplines stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup \
-stopmargereglee stopmarginblock stopmenuinteraction stopnamemakeup stopnarrower \
-stopopposite stopoverlay stopoverview stopparagraph stoppealignement \
-stoppearriereplan stoppeblocmarge stoppecitation stoppecodage stoppecolonnes \
-stoppecombinaison stoppecomposant stoppecorrectionligne stoppecouleur stoppedegroupe \
-stoppedocument stoppeenvironement stoppeglobal stoppegroupe stoppejeucolonne \
-stoppeligne stoppeligneregleetexte stoppelignes stoppelocal stoppemakeup \
-stoppemargereglee stoppenotespdplocales stoppenumerotationligne stoppeopposition stoppepositionnement \
-stoppeproduit stoppeprofil stoppeprojet stopperaster stopperevetement \
-stoppesynchronisation stoppetableau stoppetableaux stoppetexte stoppeversion \
-stoppevuedensemble stoppositioning stoppostponing stopproduit stopprofile \
-stopprojet stopsymbolset stopsynchronization stoptable stoptables \
-stoptabulate stoptyping stopunpacked stopversion sub \
-subject subsection subsubject subsubsection subsubsubject \
-suggestion suivantprofil suivantversion suivantversionprofil sym \
-symbole synchronise synonym tab tapebuffer \
-testcolumn testpage tete tex textemarge \
-textenotepdp textetete title titremarge traduire \
-traiteblocs traitepage traitfin traitsfins txt \
-typ underbar underbars uneligne useXMLfilter \
-usedirectory usetypescript usetypescriptfile utiliseJSscripts utiliseURL \
-utiliseblocs utilisechemin utilisecommandes utilisedocumentexterne utiliseencodage \
-utilisefichierexterne utilisefichiersexternes utilisefigureexterne utilisemodule utilisemodules \
-utilisepsiteaudioexterne utilisereferences utilisespecialites utilisesymboles utiliseurl \
-va vaalaboite vaalapage vaenbas valeurcouleur \
-valeurgris variabletexte version vide vl
-
-keywordclass.context.it=\
-CAP Cap Caps GIORNOSETTIMANA \
-Lettera Lettere MESE Numeri Numeriromani \
-PAROLA PAROLE Parola Parole accoppiacarta \
-accoppiadocumento accoppiamarcatura accoppiapagina accoppiaregistro adattacampo \
-adattalayout al allineacentro allineadestra allineasinistra \
-altezzacarta altezzacartastampa altezzacima altezzaelenco altezzafondo \
-altezzaintestazione altezzamakeup altezzapdp altezzatesto ambiente \
-ampiezzabordo ampiezzabordodestro ampiezzabordosinistro ampiezzacarta ampiezzacartastampa \
-ampiezzaelenco ampiezzamakeup ampiezzamargine ampiezzamarginedestro ampiezzamarginesinistro \
-ampiezzatesto ap apagina appendix arg \
-atleftmargin atrightmargin barracolori barrainterazione barrasincronizzazione \
-bastablocchi bastafile cambiaafontdeltesto campi camporiempimento \
-cap capello chapter chim circondato \
-citazione clip clonacampo colonna colore \
-coloregrigio comment commento completecombinedlist completelistoffloats \
-completelistofsorts completelistofsynonyms completeregister componenet confrontagruppocolori \
-confrontatavolozza convertinumero copiacampo correggispaziobianco coupledregister \
-crlf cutspace da daqualcheparte data \
-datadioggi datareferral decrementnumber definebodyfontDEF definebodyfontREF \
-definecolumnbreak definecombination definedfont definefontfeature definefonthandling \
-defineindentedtext defineinmargin defineitemgroup definelayer definemathalignment \
-definepagebreak defineplacement definetypeface definisci definisciaccento \
-definisciambientefontdeltesto definisciblocco definiscibloccosezione definiscibuffer definiscicampo \
-definiscicampoprincipale definiscicapoversi definiscicarattere definiscicolore definiscicomando \
-definisciconversione definiscidescrizione definiscidimensionicarta definiscielenco definiscielencocombinato \
-definiscienumerazione definiscietichetta definiscifigurasimbolo definiscifont definiscifontdeltesto \
-definiscifontgrezzo definisciformatoriferimento definiscigruppocolonne definiscigruppocolori definiscihbox \
-definisciincorniciato definisciiniziatermina definiscilayout definiscilinea definiscilistariferimenti \
-definiscilogo definiscimakeup definiscimarcatura definiscimenuinterazione definiscimodellotabella \
-definiscioggettomobile definisciordinamento definiscioutput definisciposizionetesto definisciprofilo \
-definisciprogramma definisciregistro definisciriferimento definiscirigovuoto definiscisezione \
-definiscisimbolo definiscisinonimi definiscisinonimofont definiscisottocampo definiscisovrapposizione \
-definiscistackcampi definiscistile definiscistilefont definiscitabulato definiscitavolozza \
-definiscitesta definiscitesto definiscitestoincorniciato definiscitype definiscityping \
-definiscivariabiletesto definisciversion description determinacaratteristicheregistro determinacarattersticheelenco \
-determinanumerotesta dimensione disabilitamenuinterazione distanzabordo distanzabordodestro \
-distanzabordosinistro distanzacima distanzafondo distanzaintestazione distanzamargine \
-distanzamarginedestro distanzamarginesinistro distanzapdp domicilio el \
-elaborablocchi elaborapagina elementi elemento emptylines \
-enumeration etichetta etichette fatto figuraesterna \
-fondo forzablocchi framedtext frazione getnumber \
-giornosettimana griglia headsym hl ignoto \
-immediatebetweenlist immediatetolist impaccato impostaallineamento impostaambientefontdeltesto \
-impostaampiezzariga impostabarrainterazione impostabarrasincronizzazione impostablocchimargine impostablocco \
-impostabloccosezione impostabuffer impostacampi impostacampo impostacapoversi \
-impostacaption impostacaptions impostacima impostaclippling impostacolonne \
-impostacolore impostacolori impostacombinazioni impostacommento impostacommentopagina \
-impostadefinizionenotepdp impostadescrizioni impostadimensionicarta impostaelementi impostaelencazioni \
-impostaelenco impostaelencocombinato impostaenumerazioni impostafigureesterne impostafondo \
-impostafontdeltesto impostaforms impostaformule impostagruppocolonne impostaincorniciato \
-impostainiziatermina impostainmargine impostainstestazione impostainterazione impostainterlinea \
-impostalayout impostalegenda impostalinea impostalineemargine impostalineenere \
-impostalineeriempimento impostalineesottili impostalineetesto impostalingua impostalistariferimenti \
-impostamaiuscole impostamakeup impostamarcatura impostamenuinterazione impostamenzione \
-impostanotepdp impostanumerazione impostanumerazionecapoversi impostanumerazionepagina impostanumerazionerighe \
-impostanumeropagina impostanumerosottopagina impostanumerotesta impostaoggettimobili impostaoggettomobile \
-impostaordinamento impostaoutput impostaparranging impostapdp impostapiustretto \
-impostaposizionamento impostaposizionamentoopposti impostaposizionetesto impostaprofili impostaprogrammi \
-impostapubblicazioni impostapulsanti impostaregistro impostarientro impostariferimento \
-impostarighe impostarigheriempimento impostarigovuoto impostarotazione impostaschermi \
-impostaschermointerazione impostasegnosillabazione impostasetsimboli impostasezione impostasfondi \
-impostasfondo impostasincronizzazione impostasinonimi impostasistema impostasottolinea \
-impostaspaziatura impostaspaziobianco impostaspezzamentooggettomobile impostastrut impostatab \
-impostatabelle impostatabulato impostatavolozza impostatesta impostateste \
-impostatesticima impostatestifondo impostatestiincorniciati impostatestiintestazioni impostatestipdp \
-impostatesto impostatestoetichette impostatestointestazioni impostatestotesti impostatolleranza \
-impostatransizionepagina impostatype impostatyping impostaurl impostavariabiletesto \
-impostaversioni impostazioni in inaltromargine incorniciato \
-incrementanumero indentation indestra ininner iniziaallineamento \
-iniziaambiente iniziabloccomargine iniziacitazione iniziacodifica iniziacolonne \
-iniziacolore iniziacombinazione iniziacomponente iniziacorrezioneriga iniziadocumento \
-iniziafigura iniziaglobale iniziagruppocolonne iniziaimpaccato inizialineamargine \
-inizialineatesto inizialocale iniziamakeup inizianotepdplocali inizianumerazionerighe \
-iniziaopposto iniziaoverview iniziapiustretto iniziaposizionamento iniziaprodotto \
-iniziaprofilo iniziaprogetto iniziaraster iniziariga iniziarighe \
-iniziasetsimboli iniziasfondo iniziasincronizzazione iniziasovrapposizione iniziatabella \
-iniziatabelle iniziatesto iniziaunpacked iniziaversione inlatodestro \
-inlatosinistro inmaframed inmargine inmarginedestro inmarginesinistro \
-inneredgedistance inneredgewidth innermargindistance innermarginwidth inouter \
-inriga insinistra installalingua intorno labeling \
-leg lettera lettere lineamargine lineanera \
-lineasottile lineatesto lineenere lineeriempimento lineesottili \
-lingua linguaprincipale listsymbol livellotesta loadsorts \
-loadsynonyms logcampi lunghezzaelenco maframed mapfontsize \
-mar marcatura marcaversione matematica mediaeval \
-menuinterattivo menzione mese mettielenco mettielencocombinato \
-mettifiancoafianco mettiformula mettiingriglia mettilegenda mettilinea \
-mettiloghi mettinotepdp mettinotepdplocali mettinumeropagina mettiregistro \
-mettisegnalibro mettisottoformula mettiunosullaltro mettivariabiletesto mostraambientefontdeltesto \
-mostracampi mostracolore mostracornice mostrafiguresterne mostrafontdeltesto \
-mostragriglia mostragruppocolori mostraimpostazioni mostralyout mostramakeup \
-mostrasetsimboli mostrastampa mostrastruts mostratavolozza movesidefloat \
-name nascondiblocchi navigating nextsection nientedimensioni \
-nienteelenco nientelineecimafondo nientelineintestazionepdp nientemarcatura nienterientro \
-nientespazio nientespaziobianco nocap nome nomeunita \
-nop nota notapdp notest numberofsubpages \
-numeri numeriromani numeroformula numeropagina numeropaginacompleto \
-numerosottoformula numerotesta numerotestacorrente numerototaledipagine outeredgedistance \
-outeredgewidth outermargindistance outermarginwidth overbar overbars \
-overstrike overstrikes pagedepth pageoffset pagina \
-paragraph paroladestra parolainmargine part passaafontgrezzo \
-ped pedap perlungo placefloat placelistoffloats \
-placelistofsorts placelistofsynonyms placerawlist placereferencelist posizionanumerotesta \
-posizionatesto posizionatestotesta posizione prendibuffer prendimarcatura \
-prodotto progetto programma pubblicazione pulsante \
-pulsantemenu pulsantinterazione punti qualcheriga ran \
-referral referring register reimposta reimpostamarcatura \
-reservefloat resetnumber resettextcontent rientro rif \
-rifai riferimento riferimentopagina riferimentotesto riflessione \
-rigariempimento rigovuoto ruota saltablocchi scala \
-schermo scrividentroelenco scriviinelenco scriviinlistariferimenti scriviinregistro \
-section seeregister segnalibro seguiprofilo seguiversione \
-seguiversioneprofilo selezionablocchi selezionacarta selezionaversione separamarcatura \
-setnumber settext setupanswerarea setupcolumnsetlines setupcolumnsetstart \
-setupfonthandling setupfontsynonym setupindentedtext setupinterlinespace2 setupitemgroup \
-setuplistalternative setupmathalignment setuppaper setupplacement setvariabiletesto \
-sfondo sim simbolo sincronizza sort \
-spazifissi spazio spaziobianco spaziocima spaziodietro \
-spaziofisso spaziofondo spessoreriga spezzaoggettomobile spostaagriglia \
-spostaformula stackcampi startalignment startambiente startbuffer \
-startcitazione startcolore startcolumnmakeup startcolumns startcombination \
-startcomment startcomponenet startdescription startdocument startenumeration \
-startfatto startfigure startfloattext startformula startframedtext \
-starthiding startimpaccato startitemgroup startlegend startline \
-startlineamargine startlineatesto startlinecorrection startlinenumbering startlines \
-startlocal startlocalenvironment startlocalfootnotes startmakeup startmarginblock \
-startmenuinterattivo startnamemakeup startnarrower startopposite startoverlay \
-startoverview startparagraph startpositioning startpostponing startprodotto \
-startprofile startprogetto startregister startsfondo startsymbolset \
-startsynchronization starttable starttables starttabulate starttyping \
-startunpacked startversione stirato stopalignment stopambiente \
-stopbuffer stopcitazione stopcolore stopcolumnmakeup stopcolumns \
-stopcombination stopcomment stopcomponenet stopdescription stopdocument \
-stopenumeration stopfatto stopfigure stopfloattext stopformula \
-stopframedtext stophiding stopimpaccato stopitemgroup stoplegend \
-stopline stoplineamargine stoplineatesto stoplinecorrection stoplinenumbering \
-stoplines stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup \
-stopmarginblock stopmenuinterattivo stopnamemakeup stopnarrower stopopposite \
-stopoverlay stopoverview stopparagraph stoppositioning stoppostponing \
-stopprodotto stopprofile stopprogetto stopsfondo stopsymbolset \
-stopsynchronization stoptable stoptables stoptabulate stoptyping \
-stopunpacked stopversione sub subject subpagenumber \
-subsection subsubject subsubsection subsubsubject synonym \
-tab terminaallineamento terminaambiente terminabloccomargine terminacitazione \
-terminacodifica terminacolonne terminacolore terminacombinazione terminacomponente \
-terminacorrezioneriga terminadocumento terminaglobale terminagruppocolonne terminaimpaccato \
-terminalineamargine terminalineatesto terminalocale terminamakeup terminanotepdplocali \
-terminanumerazionerighe terminaopposto terminaoverview terminapiustretto terminaposizionamento \
-terminaprodotto terminaprofili terminaprogetto terminaraster terminariga \
-terminarighe terminasfondo terminasincronizzazione terminasovrapposizione terminatabella \
-terminatabelle terminatesto terminaunpacked terminaversioni testa \
-testcolumn testoetichetta testoinmargine testoinstestazioni testonotapdp \
-testoriempimento testpage tex tieniblocchi title \
-titoloinmargine tooltip traduci txt typ \
-type typebuffer typefile underbar underbars \
-usaJSscripts usaURL usablocco usacartella usacodifica \
-usacolonnasonoraesterna usacomandi usadocumentoesterno usafiguraesterna usafileesterni \
-usafileesterno usamoduli usamodulo usariferimenti usasimboli \
-usaspecialita usaurl useXMLfilter usedirectory usetypescript \
-usetypescriptfile vaia vaiabox vaiapagina vaigiu \
-valorecolore valoregrigio variabiletesto versione vl
-
-keywordclass.context.nl=\
-CAP Cap Caps Cijfers \
-KAP Kap Kaps Letter Letters \
-MAAND Romeins WEEKDAG WOORD WOORDEN \
-Woord Woorden aantalsubpaginas about achtergrond \
-appendix arg bepaalkopnummer bepaallijstkenmerken bepaalregisterkenmerken \
-betekenis binnenmargeafstand binnenmargebreedte binnenrandafstand binnenrandbreedte \
-blanko blokje blokjes blokkeerinteractiemenu bodemwit \
-bookmark bovenafstand bovenhoogte breuk buitenmargeafstand \
-buitenmargebreedte buitenrandafstand buitenrandbreedte but button \
-cap chapter chem cijfers citaat \
-citeer clip comment completecombinedlist completelistoffloats \
-completelistofsorts completelistofsynonyms converteernummer copieerveld corrigeerwitruimte \
-coupledregister crlf datum definebodyfontDEF definebodyfontREF \
-definedfont definefontfeature definefonthandling definerawfont definetypeface \
-definieer definieeraccent definieeralineas definieerbeeldmerk definieerblanko \
-definieerblok definieerbuffer definieercombinatie definieercommando definieerconversie \
-definieerfiguursymbool definieerfont definieerfontstijl definieerfontsynoniem definieerhbox \
-definieerhoofdveld definieeringesprongentext definieerinmarge definieerinteractiemenu definieeritemgroep \
-definieerkadertekst definieerkarakter definieerkleur definieerkleurgroep definieerkolomgroep \
-definieerkolomovergang definieerkop definieerkorps definieerkorpsomgeving definieerlayer \
-definieerlayout definieerletter definieerlijn definieerlijst definieermarkering \
-definieeromlijnd definieeropmaak definieeroverlay definieerpaginaovergang definieerpalet \
-definieerpapierformaat definieerplaats definieerplaatsblok definieerprofiel definieerprogramma \
-definieerreferentie definieerreferentieformaat definieerreferentielijst definieerregister definieersamengesteldelijst \
-definieersectie definieersectieblok definieersorteren definieerstartstop definieersubveld \
-definieersymbool definieersynoniemen definieertabelvorm definieertabulatie definieertekst \
-definieertekstpositie definieertekstvariabele definieertype definieertypen definieeruitvoer \
-definieerveld definieerveldstapel definieerversie definieerwiskundeuitlijnen description \
-dimensie directnaarlijst directtussenlijst doordefinieren doorlabelen \
-doornummeren dunnelijn dunnelijnen eenregel enumeration \
-ergens externfiguur forceerblokken formulenummer framedtext \
-gebruikJSscripts gebruikURL gebruikXMLfilter gebruikblokken gebruikcommandos \
-gebruikexterndocument gebruikexternefile gebruikexternefiles gebruikexternfiguur gebruikexterngeluidsfragment \
-gebruikgebied gebruikmodule gebruikmodules gebruikreferenties gebruikspecials \
-gebruiksymbolen gebruiktypescript gebruiktypescriptfile gebruikurl geenblokkenmeer \
-geenbovenenonderregels geendimensie geenfilesmeer geenhoofdenvoetregels geenlijst \
-geenmarkering geenspatie geentest geenwitruimte geg \
-grijskleur grijswaarde haalbuffer haalmarkering haalnummer \
-haarlijn handhaafblokken herhaal hl hoofdafstand \
-hoofdhoogte hoofdtaal hoog huidigedatum huidigekopnummer \
-in inanderemarge inbinnen inbuiten indentation \
-inlijnd inlinker inlinkermarge inlinkerrand inmarge \
-inrechter inrechtermarge inrechterrand inregel inspringen \
-installeertaal instellingen interactiebalk interactiebuttons interactiemenu \
-invullijnen invulregel invultekst invulveld inwilijnd \
-items its kantlijn kap kenmerk \
-kenmerkdatum kentekstvariabeletoe kleur kleurenbalk kleurwaarde \
-kloonveld kolom kop kopniveau kopnummer \
-koppeldocument koppelmarkering koppelpagina koppelpapier koppelregister \
-kopsym koptekst kopwit laag label \
-labeling labels labeltekst laho leg \
-legeregels letter letters lijndikte lijstbreedte \
-lijsthoogte lijstlengte lijstsymbool linkermargeafstand linkermargebreedte \
-linkerrandafstand linkerrandbreedte loadsorts loadsynonyms maand \
-mapfontsize mar margeafstand margebreedte margetekst \
-margetitel margewoord markeer markeerversie mediaeval \
-menubutton naam naar naarbox naarpagina \
-name navigerend nextsection nietinspringen nocap \
-nokap noot nop omgeving omlaag \
-omlijnd onbekend onderafstand onderdeel onderhoogte \
-ontkoppelmarkering op opelkaar oplinkermarge oppagina \
-oprechtermarge overbar overbars overstrike overstrikes \
-pagina paginadiepte paginanummer paginaoffset paginareferentie \
-papierbreedte papierhoogte paragraph part paslayoutaan \
-passeerblokken passendveld plaatsbeeldmerken plaatsbookmarks plaatsformule \
-plaatskopnummer plaatskoptekst plaatslegenda plaatslijn plaatslijst \
-plaatslokalevoetnoten plaatsnaastelkaar plaatsonderelkaar plaatsopgrid plaatspaginanummer \
-plaatsplaatsblok plaatsreferentielijst plaatsregister plaatsruwelijst plaatssamengesteldelijst \
-plaatssubformule plaatstekstvariabele plaatsvoetnoten placelistoffloats placelistofsorts \
-placelistofsynonyms positioneer positioneertekst printpapierbreedte printpapierhoogte \
-produkt programma projekt publicatie punten \
-ran randafstand randbreedte rechtermargeafstand rechtermargebreedte \
-rechterrandafstand rechterrandbreedte ref refereer referentie \
-regellinks regelmidden regelrechts register registreervelden \
-reservefloat reset resetmarkering resetnummer resettekstinhoud \
-resettextcontent romeins rooster roteer rugwit \
-schaal scherm schrijfnaarlijst schrijfnaarreferentielijst schrijfnaarregister \
-schrijftussenlijst section seeregister selecteerblokken selecteerpapier \
-selecteerversie setnummer setupfonthandling setupfontsynonym setupinterlinespace2 \
-setuplistalternative snijwit som sort spatie \
-spiegel splitsplaatsblok startachtergrond startalignment startbuffer \
-startcitaat startcodering startcolumns startcombinatie startcombination \
-startcomment startdescription startdocument startenumeration startfigure \
-startfiguur startfloattext startformula startframedtext startgeg \
-startglobaal starthiding startinteractiemenu startitemgroup startkantlijn \
-startkleur startkolomgroep startkolommen startkolomopmaak startlegend \
-startline startlinecorrection startlinenumbering startlines startlocal \
-startlocalenvironment startlocalfootnotes startlokaal startlokalevoetnoten startmakeup \
-startmargeblok startmarginblock startnaast startnamemakeup startnarrower \
-startomgeving startonderdeel startopelkaar startopmaak startopposite \
-startoverlay startoverview startoverzicht startparagraph startpositioneren \
-startpositioning startpostponing startprodukt startprofiel startprofile \
-startprojekt startraster startregel startregelcorrectie startregelnummeren \
-startregels startregister startsmaller startsymbolset startsymboolset \
-startsynchronisatie startsynchronization starttabel starttabellen starttable \
-starttables starttabulate starttekst starttekstlijn starttyping \
-startuitlijnen startunpacked startvanelkaar startversie stelachtergrondenin \
-stelachtergrondin stelalineasin stelantwoordgebiedin stelarrangerenin stelblankoin \
-stelblokin stelblokjesin stelblokkopjein stelblokkopjesin stelbovenin \
-stelboventekstenin stelbufferin stelbuttonsin stelciterenin stelclipin \
-stelcombinatiesin stelcommentaarin steldoordefinierenin steldoornummerenin steldunnelijnenin \
-stelexternefigurenin stelformulesin stelformulierenin stelhoofdin stelhoofdtekstenin \
-stelingesprongentextin stelinmargein stelinspringenin stelinteractiebalkin stelinteractiein \
-stelinteractiemenuin stelinteractieschermin stelinterliniein stelinvullijnenin stelinvulregelsin \
-stelitemgroepin stelitemsin stelkadertekstenin stelkantlijnin stelkapitalenin \
-stelkleurenin stelkleurin stelkolomgroepin stelkolomgroepregelsin stelkolomgroepstartin \
-stelkolommenin stelkopin stelkopnummerin stelkoppeltekenin stelkoppenin \
-stelkoptekstin stelkorpsin stelkorpsomgevingin stellabeltekstin stellayoutin \
-stellegendain stellijndiktein stellijnin stellijstin stelmargeblokkenin \
-stelmarkeringin stelnaastplaatsenin stelnummerenin stelnummerin stelomlijndin \
-stelonderin stelonderstrepenin stelondertekstenin stelopmaakin stelopsommingenin \
-stelpaginacommentaarin stelpaginanummerin stelpaginanummeringin stelpaginaovergangenin stelpaletin \
-stelpapierformaatin stelpapierin stelparagraafnummerenin stelplaatsblokin stelplaatsblokkenin \
-stelplaatsbloksplitsenin stelplaatsin stelpositionerenin stelprofielenin stelprogrammasin \
-stelpublicatiesin stelrastersin stelreferentielijstin stelrefererenin stelregelnummerenin \
-stelregelsin stelregisterin stelroterenin stelsamengesteldelijstin stelsectieblokin \
-stelsectiein stelsmallerin stelsorterenin stelspatieringin stelstartstopin \
-stelstrutin stelsubpaginanummerin stelsymboolsetin stelsynchronisatiebalkin stelsynchronisatiein \
-stelsynoniemenin stelsysteemin steltaalin steltabellenin steltabin \
-steltabulatiein steltekstin steltekstinhoudin steltekstlijnenin steltekstpositiein \
-stelteksttekstenin steltekstvariabelein steltolerantiein steltypein steltypenin \
-steluitlijnenin steluitvoerin stelurlin stelveldenin stelveldin \
-stelversiesin stelvoetin stelvoetnootdefinitiein stelvoetnotenin stelvoettekstenin \
-stelwiskundeuitlijnenin stelwitruimtein stopachtergrond stopalignment stopbuffer \
-stopcitaat stopcodering stopcolumns stopcombinatie stopcombination \
-stopcomment stopdescription stopdocument stopenumeration stopfigure \
-stopfloattext stopformula stopframedtext stopgeg stopglobaal \
-stophiding stopinteractiemenu stopitemgroup stopkantlijn stopkleur \
-stopkolomgroep stopkolommen stopkolomopmaak stoplegend stopline \
-stoplinecorrection stoplinenumbering stoplines stoplocal stoplocalenvironment \
-stoplocalfootnotes stoplokaal stoplokalevoetnoten stopmakeup stopmargeblok \
-stopmarginblock stopnaast stopnamemakeup stopnarrower stopomgeving \
-stoponderdeel stopopelkaar stopopmaak stopopposite stopoverlay \
-stopoverview stopoverzicht stopparagraph stoppositioneren stoppositioning \
-stoppostponing stopprodukt stopprofiel stopprofile stopprojekt \
-stopraster stopregel stopregelcorrectie stopregelnummeren stopregels \
-stopsmaller stopsymbolset stopsynchronisatie stopsynchronization stoptabel \
-stoptabellen stoptable stoptables stoptabulate stoptekst \
-stoptekstlijn stoptyping stopuitlijnen stopunpacked stopvanelkaar \
-stopversie sub subformulenummer subject subpaginanummer \
-subsection subsubject subsubsection subsubsubject suggestie \
-switchnaarkorps switchtorawfont sym symbool symoffset \
-synchronisatiebalk synchroniseer synonym taal tab \
-tekstbreedte teksthoogte tekstlijn tekstreferentie tekstvariabele \
-testkolom testpagina tex title toelichting \
-toonexternefiguren toongrid tooninstellingen toonkader toonkleur \
-toonkleurgroep toonkorps toonkorpsomgeving toonlayout toonopmaak \
-toonpalet toonprint toonstruts toonsymboolset toonvelden \
-totaalaantalpaginas txt typ type typebuffer \
-typefile uit uitgerekt underbar underbars \
-usecodering usedirectory vastespatie vastespaties veld \
-veldstapel verbergblokken vergelijkkleurgroep vergelijkpalet verhoognummer \
-verlaagnummer verplaatsformule verplaatsopgrid verplaatszijblok versie \
-vertaal verwerkblokken verwerkpagina vl voetafstand \
-voethoogte voetnoot voetnoottekst volgprofiel volgprofielversie \
-volgversie volledigepaginanummer volledigregister voluit weekdag \
-wilijnd wiskunde witruimte woonplaats woordrechts \
-zetbreedte zethoogte
-
-keywordclass.context.pe=\
-CAP Cap Caps Character \
-Characters MONTH Numbers Romannumerals WEEKDAY \
-WORD WORDS Word Words appendix \
-cap chapter chem comment completecombinedlist \
-completelistoffloats completelistofsorts completelistofsynonyms coupledregister crlf \
-definebodyfontDEF definebodyfontREF definedfont definefontfeature definefonthandling \
-defineindentedtext definetypeface description enumeration framedtext \
-indentation inmframed its labeling loadsorts \
-loadsynonyms mapfontsize mediaeval mframed name \
-nextsection nocap overbar overbars overstrike \
-overstrikes paragraph part placelistoffloats placelistofsorts \
-placelistofsynonyms ran register reservefloat resettextcontent \
-section seeregister setupanswerarea setupcapitals setupfonthandling \
-setupfontsynonym setupindentedtext setupinterlinespace2 setuplistalternative setupurl \
-sort startalignment startbuffer startcolumns startcombination \
-startcomment startdescription startdocument startenumeration startfigure \
-startfloattext startformula startframedtext starthiding startitemgroup \
-startlegend startline startlinecorrection startlinenumbering startlines \
-startlocal startlocalenvironment startlocalfootnotes startmakeup startmarginblock \
-startnamemakeup startnarrower startopposite startoverlay startoverview \
-startparagraph startpositioning startpostponing startprofile startraster \
-startregister startsymbolset startsynchronization starttable starttables \
-starttabulate starttyping startunpacked startتولید startحقیقت \
-startخط‌حاشیه startخط‌متن startرنگ startفشرده startمحیط \
-startمنوی‌پانل startمولفه startنسخه startنقل‌قول startپروژه \
-startپس‌زمینه stopalignment stopbuffer stopcolumns stopcombination \
-stopcomment stopdescription stopdocument stopenumeration stopfigure \
-stopfloattext stopformula stopframedtext stophiding stopitemgroup \
-stoplegend stopline stoplinecorrection stoplinenumbering stoplines \
-stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup stopmarginblock \
-stopnamemakeup stopnarrower stopopposite stopoverlay stopoverview \
-stopparagraph stoppositioning stoppostponing stopprofile stopraster \
-stopsymbolset stopsynchronization stoptable stoptables stoptabulate \
-stoptyping stopunpacked stopتولید stopحقیقت stopخط‌حاشیه \
-stopخط‌متن stopرنگ stopفشرده stopمحیط stopمنوی‌پانل \
-stopمولفه stopنسخه stopنقل‌قول stopپروژه stopپس‌زمینه \
-sub subject subsection subsubject subsubsection \
-subsubsubject synonym title tooltip txt \
-typ underbar underbars useJSscripts useURL \
-useXMLfilter usedirectory useurl آفست‌صفحه آیتم \
-آیتمها آینه اجباربلوکها ارتفاع‌آرایش ارتفاع‌بالا \
-ارتفاع‌برگ ارتفاع‌ته‌برگ ارتفاع‌خط ارتفاع‌سربرگ ارتفاع‌متن \
-ارتفاع‌پایین از ازکارانداختن‌منوی‌پانل استفاده‌بلوکها استفاده‌دستخط‌تایپ \
-استفاده‌رمزینه استفاده‌شکل‌خارجی استفاده‌فرمانها استفاده‌قطعه‌موزیک‌خارجی استفاده‌مدول \
-استفاده‌مدولها استفاده‌مرجعها استفاده‌مسیر استفاده‌نمادها استفاده‌نوشتارخارجی \
-استفاده‌ویژگیها استفاده‌پرونده‌خارجی استفاده‌پرونده‌دستخط‌تایپ استفاده‌پرونده‌های‌خارجی اعدادلاتین \
-افزودن اما امتحان‌نکن انتخاب‌برگ انتخاب‌بلوکها \
-انتخاب‌نسخه انتقال‌به‌توری انتقال‌فرمول انتقال‌کنار‌شناور انجام‌دوباره \
-بارگذاریها بارگذاری‌آرایش بارگذاری‌آیتمها بارگذاری‌ارجاع بارگذاری‌اندازه‌برگ \
-بارگذاری‌باریکتر بارگذاری‌بافر بارگذاری‌بالا بارگذاری‌بخش بارگذاری‌بردباری \
-بارگذاری‌برنامه‌ها بارگذاری‌برگ بارگذاری‌بست بارگذاری‌بلوک بارگذاری‌بلوکهای‌حاشیه \
-بارگذاری‌بلوک‌بخش بارگذاری‌تایپ بارگذاری‌تایپ‌کردن بارگذاری‌تب بارگذاری‌ترتیب \
-بارگذاری‌ترکیب‌ها بارگذاری‌تطابق بارگذاری‌تعریف‌پانوشت بارگذاری‌تنظیم بارگذاری‌تنظیم‌ریاضی \
-بارگذاری‌ته‌برگ بارگذاری‌تورفتگی بارگذاری‌توضیح بارگذاری‌توضیح‌صفحه بارگذاری‌ثبت \
-بارگذاری‌جانشانی بارگذاری‌جدولها بارگذاری‌جدول‌بندی بارگذاری‌خالی بارگذاری‌خروجی \
-بارگذاری‌خط بارگذاری‌خطها بارگذاری‌خطهای‌حاشیه بارگذاری‌خطهای‌سیاه بارگذاری‌خطهای‌متن \
-بارگذاری‌خطهای‌مجموعه‌ستون بارگذاری‌خطها‌ی‌نازک بارگذاری‌درج‌درخطها بارگذاری‌درج‌مخالف بارگذاری‌درون‌حاشیه \
-بارگذاری‌دوران بارگذاری‌دکمه‌ها بارگذاری‌راهنما بارگذاری‌رنگ بارگذاری‌رنگها \
-بارگذاری‌زبان بارگذاری‌ستونها بارگذاری‌سر بارگذاری‌سربرگ بارگذاری‌سرها \
-بارگذاری‌سیستم بارگذاری‌شرح بارگذاری‌شرحها بارگذاری‌شروع‌مجموعه‌ستون بارگذاری‌شروع‌پایان \
-بارگذاری‌شماره بارگذاری‌شماره‌زیرصفحه بارگذاری‌شماره‌سر بارگذاری‌شماره‌صفحه بارگذاری‌شماره‌گذاری \
-بارگذاری‌شماره‌گذاریها بارگذاری‌شماره‌گذاری‌صفحه بارگذاری‌شماره‌گذاری‌پاراگراف بارگذاری‌شماره‌‌گذاری‌خط بارگذاری‌شناور \
-بارگذاری‌شناورها بارگذاری‌شکافتن‌شناورها بارگذاری‌شکلهای‌خارجی بارگذاری‌طرح بارگذاری‌طرح‌بندی \
-بارگذاری‌عرض‌خط بارگذاری‌فاصله‌بین‌خط بارگذاری‌فرمولها بارگذاری‌فضای‌سفید بارگذاری‌فضا‌گذاری \
-بارگذاری‌قالبی بارگذاری‌قلم‌متن بارگذاری‌لوح بارگذاری‌لیست بارگذاری‌لیست‌ترکیبی \
-بارگذاری‌لیست‌مرجع بارگذاری‌مترادفها بارگذاری‌متغیر‌متن بارگذاری‌متن بارگذاری‌متنهای‌بالا \
-بارگذاری‌متن‌سر بارگذاری‌متن‌سربرگ بارگذاری‌متن‌قالبی بارگذاری‌متن‌متنها بارگذاری‌متن‌پانوشت \
-بارگذاری‌متن‌پایین بارگذاری‌مجموعه‌ستون بارگذاری‌مجموعه‌نماد بارگذاری‌محیط‌قلم‌متن بارگذاری‌منوی‌پانل \
-بارگذاری‌مکان‌متن بارگذاری‌مکان‌گذاری بارگذاری‌میدان بارگذاری‌میدانها بارگذاری‌میله‌تطابق \
-بارگذاری‌میله‌زیر بارگذاری‌میله‌پانل بارگذاری‌نسخه‌ها بارگذاری‌نشانه‌شکستن بارگذاری‌نشانه‌گذاری \
-بارگذاری‌نشرها بارگذاری‌نقل بارگذاری‌پاراگرافها بارگذاری‌پانل بارگذاری‌پانوشتها \
-بارگذاری‌پایین بارگذاری‌پرده‌ها بارگذاری‌پرده‌پانل بارگذاری‌پروفایلها بارگذاری‌پرکردن‌خطها \
-بارگذاری‌پس‌زمینه بارگذاری‌پس‌زمینه‌ها بارگذاری‌چیدن بارگذاری‌گذارصفحه بارگذاری‌گروههای‌آیتم \
-بارگذاری‌گروه‌آیتم بازنشانی بازنشانی‌شماره بازنشانی‌متن بازنشانی‌نشانه‌گذاری \
-باگذاری‌متن‌برچسب بدون‌بعد بدون‌بلوکهای‌بیشتر بدون‌تورفتگی بدون‌خط‌بالاوپایین \
-بدون‌خط‌سروته‌برگ بدون‌فایلهای‌بیشتر بدون‌فضا بدون‌فضای‌سفید بدون‌لیست \
-بدون‌نشانه‌گذاری برنامه بروبه بروبه‌جعبه بروبه‌صفحه \
-بروپایین برچسب برچسبها بعد بلند \
-بلوکهای‌پردازش بلوکها‌پنهان بنویس‌بین‌لیست بنویس‌درثبت بنویس‌درلیست‌مرجع \
-بنویس‌در‌لیست تاریخ تاریخ‌جاری تاریخ‌رجوع تایپ \
-تایپ‌بافر تایپ‌پرونده تب ترجمه تطابق \
-تعریف تعریف‌آرایش تعریف‌آرم تعریف‌الگوی‌جدول تعریف‌اندازه‌برگ \
-تعریف‌بافر تعریف‌بخش تعریف‌برنامه تعریف‌برچسب تعریف‌بلوک \
-تعریف‌بلوک‌بخش تعریف‌تایپ تعریف‌تایپ‌کردن تعریف‌تبدیل تعریف‌ترتیب \
-تعریف‌ترکیب تعریف‌تنظیم‌ریاضی تعریف‌توده‌میدان تعریف‌ثبت تعریف‌جانشانی \
-تعریف‌جدول‌بندی تعریف‌جعبه‌‌افقی تعریف‌حرف تعریف‌خالی تعریف‌خروجی \
-تعریف‌خط‌حائل تعریف‌درون‌حاشیه تعریف‌رنگ تعریف‌زیرمیدان تعریف‌سبک \
-تعریف‌سبک‌قلم تعریف‌سر تعریف‌شرح تعریف‌شروع‌پایان تعریف‌شماره‌بندی \
-تعریف‌شمایل‌مرجع تعریف‌شناور تعریف‌شکستن‌ستون تعریف‌شکست‌صفحه تعریف‌طرح‌بندی \
-تعریف‌فرمان تعریف‌قالبی تعریف‌قلم تعریف‌قلم‌خام تعریف‌قلم‌متن \
-تعریف‌لایه تعریف‌لهجه تعریف‌لوح تعریف‌لیست تعریف‌لیست‌ترکیبی \
-تعریف‌لیست‌مرجع تعریف‌مترادفها تعریف‌مترادف‌قلم تعریف‌متغیرمتن تعریف‌متن \
-تعریف‌متن‌قالبی تعریف‌مجموعه‌ستون تعریف‌محیط‌قلم‌بدنه تعریف‌مرجع تعریف‌منوی‌پانل \
-تعریف‌مکان‌متن تعریف‌میدان تعریف‌میدان‌اصلی تعریف‌نسخه تعریف‌نشانه‌گذاری \
-تعریف‌نماد تعریف‌نمادشکل تعریف‌پاراگرافها تعریف‌پروفایل تعریف‌پوشش \
-تعریف‌گروه‌آیتم تعریف‌گروه‌رنگ تعیین‌شماره تعیین‌شماره‌سر تعیین‌متغیر‌متن \
-تعیین‌محتوای‌متن تعیین‌مشخصات‌ثبت تعیین‌مشخصات‌لیست تغییربه‌قلم‌بدنه تغییربه‌قلم‌خام \
-تنظیم‌راست تنظیم‌طرح‌بندی تنظیم‌وسط توجه تورفتگی \
-توری تولید تک ثبت‌زوج ثبت‌کامل \
-جداسازی‌نشانه‌گذاری حاش حرف حرفها حفظ‌بلوکها \
-حقیقت خالی خطهای‌سیاه خطهای‌نازک خطها‌خالی \
-خط‌حاشیه خط‌سیاه خط‌متن خط‌مو خط‌نازک \
-خ‌ا خ‌ع در درج‌آرمها درج‌ثبت \
-درج‌خط درج‌درخط درج‌درخطها درج‌درمتن درج‌درمیدان \
-درج‌در‌بالای‌یکدیگر درج‌در‌توری درج‌راهنما درج‌زیرفرمول درج‌شماره‌سر \
-درج‌شماره‌صفحه درج‌شناور درج‌فرمول درج‌لیست درج‌لیست‌خام \
-درج‌لیست‌مختلط درج‌لیست‌مرجع درج‌متغیرمتن درج‌متن‌سر درج‌پانوشتها \
-درج‌پانوشتهای‌موضعی درج‌چوب‌خط درج‌کنار‌به‌کنار درحاشیه درحاشیه‌دیگر \
-درحاشیه‌راست درحاشیه‌چپ درخارجی درخط درداخلی \
-درراست درصفحه درقالبی درلبه‌راست درلبه‌چپ \
-درمورد درون درپر درچپ دریافت‌بافر \
-دریافت‌شماره دریافت‌نشانه دوران دکمه دکمه‌منو \
-دکمه‌پانل رج رجوع رنگ رنگ‌خاکستری \
-روزهفته ریاضی زبان زبان‌اصلی ستون \
-ستون‌امتحان سر سرپوش‌کوچک‌نه شروع‌آرایش شروع‌آرایش‌ستون \
-شروع‌باریکتر شروع‌بازبینی شروع‌بلوک‌حاشیه شروع‌ترکیب شروع‌تصحیح‌خط \
-شروع‌تطابق شروع‌تنظیم شروع‌تولید شروع‌جدول شروع‌جدولها \
-شروع‌خط شروع‌خطها شروع‌خط‌حاشیه شروع‌خط‌متن شروع‌رنگ \
-شروع‌ستونها شروع‌سراسری شروع‌شماره‌گذاری‌خط شروع‌شکل شروع‌غیر‌فشرده \
-شروع‌فشرده شروع‌متن شروع‌مجموعه‌ستون شروع‌مجموعه‌نماد شروع‌محیط \
-شروع‌مخالف شروع‌موضعی شروع‌مولفه شروع‌مکان‌گذاری شروع‌نسخه \
-شروع‌نقل‌قول شروع‌نوشتار شروع‌پانوشتهای‌موضعی شروع‌پروفایل شروع‌پروژه \
-شروع‌پس‌زمینه شروع‌پوشش شروع‌کد شماره‌افزایش شماره‌زیرصفحه \
-شماره‌زیرفرمول شماره‌سر شماره‌سرجاری شماره‌صفحه شماره‌صفحه‌کامل \
-شماره‌فرمول شماره‌مبدل شماره‌ها شماره‌کاهش شماره‌کل‌صفحه‌ها \
-شکافتن‌شناور شکل‌خارجی صفحه صفحه‌تست صفحه‌زوج \
-صفحه‌پردازش طول‌لیست عبوربلوکها عرض‌آرایش عرض‌برگ \
-عرض‌حاشیه عرض‌حاشیه‌خارجی عرض‌حاشیه‌داخلی عرض‌حاشیه‌راست عرض‌حاشیه‌چپ \
-عرض‌خط عرض‌لبه عرض‌لبه‌خارجی عرض‌لبه‌داخلی عرض‌لبه‌راست \
-عرض‌لبه‌چپ عرض‌لیست عرض‌متن عمق‌صفحه عنوان‌حاشیه \
-فاصله‌بالا فاصله‌ته‌برگ فاصله‌حاشیه فاصله‌حاشیه‌خارجی فاصله‌حاشیه‌داخلی \
-فاصله‌حاشیه‌راست فاصله‌حاشیه‌چپ فاصله‌سربرگ فاصله‌لبه فاصله‌لبه‌خارجی \
-فاصله‌لبه‌داخلی فاصله‌لبه‌راست فاصله‌لبه‌چپ فاصله‌پایین فاصله‌پشت \
-فشرده فضا فضاهای‌ثابت فضای‌بالا فضای‌برش \
-فضای‌ثابت فضای‌سفید فضای‌سفیدصحیح فضای‌پایین فوری‌به‌لیست \
-فوری‌بین‌لیست قالبی لوح‌مقایسه ماه متغیر متن \
-متن‌برچسب متن‌حاشیه متن‌سر متن‌پانوشت محیط \
-مراجعه مرجع مرجع‌صفحه مرجع‌متن مرحله‌سر \
-مسکن معنی‌واحد مقایسه‌گروه‌رنگ مقدارخاکستری مقداررنگ \
-مقیاس منفی منوی‌پانل مولفه مکان \
-مکان‌متن میدان میدانهای‌گزارش میدان‌شبیه‌سازی میدان‌پشته \
-میدان‌کپی میله‌تطابق میله‌رنگ میله‌پانل ناشناس \
-نام‌ماکرو نسخه نسخه‌نشانه نشانه‌گذاری نشانه‌گذاری‌زوج \
-نشر نصب‌زبان نقطه‌ها نقل نقل‌قول \
-نم نماد نمادسر نمادلیست نمایش‌آرایش \
-نمایش‌بارگذاریها نمایش‌بستها نمایش‌توری نمایش‌رنگ نمایش‌شکلهای‌خارجی \
-نمایش‌طرح‌بندی نمایش‌قالب نمایش‌قلم‌بدنه نمایش‌لوح نمایش‌مجموعه‌علامت \
-نمایش‌محیط‌قلم‌بدنه نمایش‌میدانها نمایش‌چاپ نمایش‌گروه‌رنگ نوشتارزوج \
-هدایت پا پابا پانوشت پایان‌آرایش \
-پایان‌آرایش‌ستون پایان‌بازبینی پایان‌بلوک‌حاشیه پایان‌ترکیب پایان‌تصحیح‌خط \
-پایان‌تطابق پایان‌تنظیم پایان‌تولید پایان‌جدول پایان‌جدولها \
-پایان‌خط پایان‌خطها پایان‌خط‌حاشیه پایان‌خط‌متن پایان‌رنگ \
-پایان‌ستونها پایان‌سراسری پایان‌شماره‌گذاری‌خط پایان‌غیرفشرده پایان‌فشرده \
-پایان‌متن پایان‌مجموعه‌ستون پایان‌محیط پایان‌مخالف پایان‌موضعی \
-پایان‌مولفه پایان‌مکان‌گذاری پایان‌نازکتر پایان‌نسخه پایان‌نقل‌قول \
-پایان‌نوشتار پایان‌پانوشتهای‌موضعی پایان‌پروفایل پایان‌پروژه پایان‌پس‌زمینه \
-پایان‌پوشش پایان‌کد پایین پرده پروژه \
-پرکردن‌میدان پس‌زمینه پیروی‌نسخه پیروی‌نسخه‌پروفایل پیروی‌پروفایل \
-چاپ‌ارتفاع‌برگ چاپ‌عرض‌برگ چوبخط چپ‌چین کاغذزوج \
-کسر کشیده کلمه‌حاشیه کلمه‌راست گیره \
-یادداشت یک‌جا یک‌خط
-
-keywordclass.context.ro=\
-CAP CUVANT CUVINTE Cap \
-Caps Cuvant Cuvinte KAP Kap \
-Kaps LUNA Litera Litere Numere \
-Numereromane ZIDINSAPTAMANA adapteazaaspect adubuffer adumarcaje \
-afiseazaaspect afiseazacampuri afiseazaculoare afiseazafiguriexterne afiseazafonttext \
-afiseazagrid afiseazagrupculoare afiseazamakeup afiseazamediufonttext afiseazapaleta \
-afiseazarama afiseazasetari afiseazasetsimboluri afiseazastruts afiseazatiparire \
-aliniat aliniatcentru aliniatdreapta aliniatstanga appendix \
-arg ascundeblocuri atleftmargin atrightmargin baraculoare \
-barainteractiune barasincronizare blanc but butoaneinteractiune \
-buton butonmeniu camp campumplere cap \
-chapter chem citat clip cloneazacamp \
-coloana comment comparagrupculoare comparapaleta completeazanumarpagina \
-completecombinedlist completelistoffloats completelistofsorts completelistofsynonyms completeregister \
-componenta convertestenumar copiazacamp corecteazaspatiualb coupledregister \
-crlf culoare culoaregri cupleazadocument cupleazamarcaje \
-cupleazaregistru cutspace cuvantdreapta cuvantmarginal data \
-datacurenta datareferit decrementnumber decupleazamarcaje definebodyfontDEF \
-definebodyfontREF definecolumnbreak definecolumnset definecombination definedfont \
-definefontfeature definefonthandling defineindentedtext defineinmargin defineitemgroup \
-definelayer definelayout definemathalignment definepagebreak defineplacement \
-defineste definesteaccent definesteantet definesteblanc definestebloc \
-definesteblocsectiune definestebuffer definestecamp definestecampprincipal definestecaracter \
-definestecomanda definesteconversie definesteculoare definestedescriere definestedimensiunehartie \
-definesteenumerare definesteeticheta definestefloat definestefont definestefontraw \
-definestefonttext definesteformatreferinte definestegrupculori definestehbox definesteinconjurare \
-definestelista definestelistacombinata definestelistareferinte definestelogo definestemakeup \
-definestemarcaje definestemediulfonttext definestemeniuinteractiune definesteoutput definesteoverlay \
-definestepaleta definesteparagraf definestepozitietext definesteprofil definesteprogram \
-definestereferinte definesteregistru definesterigla definestesablontabel definestesectiune \
-definestesimbol definestesimbolfigura definestesinonim definestesinonimfont definestesortare \
-definestestartstop definestestil definestestilfont definestestivacampuri definestesubcamp \
-definestetabulatori definestetext definestetexteinconjurate definestetextinconjurat definestetyping \
-definestevariabilatext definesteversiune definetype definetypeface description \
-despre determinacaracteristicilelistei determinacaracteristiciregistru determinanumartitlu dezactiveazameniuinteractiune \
-dimensiune din distantaantet distantacolt distantacoltdreapta \
-distantacoltstanga distantajos distantamargine distantamarginedreapta distantamarginestanga \
-distantasubsol distantasus domiciliu dute dutebox \
-dutepagina ecran el element emptylines \
-enumeration eticheta etichete fact faraaliniat \
-faradimensiune farafisiere faraliniiantetsisubsol faraliniisussijos faralista \
-faramarcaje faraspatiu faraspatiualb figuraexterna firdepar \
-folosesteURL folosestebloc folosestecodificarea folosestecomenzi folosestedirector \
-folosestedocumentextern folosestefiguraexterna folosestefisiereexterne folosestefisierextern folosestemodul \
-folosestemodule folosestemuzicaexterna folosestereferinte folosestescriptJS folosestesimboluri \
-folosestespeciale folosesteurl footnotetext forteazablocuri fractie \
-framed framedtext fundal gatablocuri getnumber \
-grid grosimelinie hartiedubla headsym hl \
-immediatebetweenlist immediatetolist impachetat impartefloat in \
-inalt inaltamargine inaltimeantet inaltimehartie inaltimehartieimprimanta \
-inaltimejos inaltimelista inaltimemakeup inaltimesubsol inaltimesus \
-inaltimetext indentation indreapta inframed ininner \
-injos inlinie inmaframed inmargineadreapta inmargineastanga \
-inneredgedistance inneredgewidth innermargindistance innermarginwidth inouter \
-inparteadreapta inparteastanga instalarelimba instanga intins \
-jos jossus kap la labeling \
-lapagina latimecoltdreapta latimecoltstanga latimecolturi latimehartie \
-latimehartieimprimanta latimelista latimemakeup latimemargine latimemarginedreapta \
-latimemarginestanga latimetext leg limba limbaprincipala \
-liniemargine linieneagra liniesubtire linieumplere liniinegre \
-liniisubtiri listsymbol litera litere loadsorts \
-loadsynonyms logcampuri luna lungimelista maframed \
-mapfontsize mar marcaje marcheazaversiune marginal \
-matematica mediaeval mediu meniuinteractiune minicitat \
-moveformula movesidefloat mutapegrid name navigating \
-necunoscut nextsection niveltitlu nocap nokap \
-nop nota notasubsol numarformula numarincrement \
-numarpagina numarsubformula numartitlu numartitlucurent numartotalpagini \
-numberofsubpages nume numere numereromane numeunitate \
-nutesta olinie outeredgedistance outeredgewidth outermargindistance \
-outermarginwidth overbar overbars overstrike overstrikes \
-pagedepth pageoffset pagina paginadubla paragraph \
-part pastreazablocuri pelung placefloat placeheadnumber \
-placeheadtext placelistoffloats placelistofsorts placelistofsynonyms placerawlist \
-placereferencelist plaseazapegrid plaseazasemnecarte potrivestecamp pozitie \
-pozitietext proceseazabloc proceseazapagina produs program \
-proiect publicatie puncte punedeasuprafiecareia punefatainfata \
-puneformula punelegenda punelista punelistacombinata punelogouri \
-punenotesubsol punenotesubsollocale punenumarpagina puneregistru punerigla \
-punesubformula punevariabilatext ran ref refa \
-referinta referintapagina referintatext referit referring \
-reflexie register remarca reservefloat reset \
-reseteazamarcaje resetnumber resettextcontent riglatext rigleumplere \
-roteste saripesteblocuri scala scriebuffer scrieinlista \
-scrieinlistareferinte scrieinregistru scrieintreliste section seeregister \
-selecteazablocuri selecteazahartie selecteazaversiune semncarte setarebarasincronizare \
-setareitemization setarelimba setareoutput setarepozitie setaresincronizare \
-setari seteazaaliniat seteazaalinierea seteazaantet seteazaaranjareapag \
-seteazaaspect seteazabarainteractiune seteazablanc seteazabloc seteazablocsectiune \
-seteazablocurimarginale seteazabuffer seteazabutoane seteazacamp seteazacampuri \
-seteazaclipping seteazacoloane seteazacombinari seteazacomentariu seteazacomentariupagina \
-seteazaculoare seteazaculori seteazadefinireanotasubsol seteazadescriere seteazadimensiunihartie \
-seteazaecrane seteazaecraninteractiune seteazaelemente seteazaenumerare seteazafiguriexterne \
-seteazafloat seteazafloats seteazafonttext seteazaformulare seteazaformule \
-seteazafundal seteazafundaluri seteazagrosimelinie seteazaimpartireafloat seteazainconjurat \
-seteazaingust seteazainteractiunea seteazajos seteazalegenda seteazalegendele \
-seteazaliniesilabe seteazaliniesubtire seteazalinii seteazaliniimargine seteazaliniinegre \
-seteazaliniiumplere seteazalista seteazalistacombinata seteazalistareferinte seteazamajuscule \
-seteazamakeup seteazamarcaje seteazamarginal seteazamediulfonttext seteazameniuinteractiune \
-seteazaminicitat seteazanotasubsol seteazanumarpagina seteazanumarsubpagina seteazanumartitlu \
-seteazanumerotare seteazanumerotarelinii seteazanumerotarepagina seteazanumerotareparagrafe seteazapaleta \
-seteazaparagrafe seteazaplasareaopozita seteazapozitietext seteazaprofile seteazaprograme \
-seteazapublicatii seteazareferinte seteazaregistru seteazarigla seteazarigletext \
-seteazarigleumplere seteazarotare seteazasectiune seteazasimbol seteazasinonime \
-seteazasistem seteazasortare seteazaspatiu seteazaspatiualb seteazaspatiuinterliniar \
-seteazastrut seteazasublinie seteazasubsol seteazasus seteazatab \
-seteazatabele seteazatabulatori seteazatext seteazatexteantet seteazatextejos \
-seteazatextesubsol seteazatextesus seteazatextetext seteazatexteticheta seteazatexttitlu \
-seteazatitlu seteazatitluri seteazatoleranta seteazatranzitiepagina seteazatype \
-seteazatyping seteazaurl seteazavariabilatext seteazaversiuni setnumber \
-settextcontent setupanswerarea setupcolumnset setupcolumnsetlines setupcolumnsetstart \
-setupfonthandling setupfontsynonym setupindentedtext setupinterlinespace2 setupitemgroup \
-setuplistalternative setupmathalignment setupnumber setuppaper setupplacement \
-setupstartstop setvariabilatext sim simbol sincronizeaza \
-sort spatiifixate spatiu spatiualb spatiufixat \
-spatiujos spatiuspate spatiusus startalignment startaliniere \
-startblocmarginal startbuffer startcitat startcodificare startcoloane \
-startcolumnmakeup startcolumns startcolumnset startcombinare startcombination \
-startcomment startcomponenta startcorectielinie startculoare startdescription \
-startdocument startenumeration startfact startfigura startfigure \
-startfloattext startformula startframedtext startfundal startglobal \
-starthiding startimpachetat startingust startitemgroup startlegend \
-startline startlinecorrection startlinenumbering startlines startlinie \
-startliniemargine startlinii startlocal startlocalenvironment startlocalfootnotes \
-startmakeup startmarginblock startmediu startmeniuinteractiune startnamemakeup \
-startnarrower startneimpachetat startnotesubsollocale startnumerotarelinii startopozit \
-startopposite startoverlay startoverview startparagraph startpositioning \
-startpostponing startpozitionare startprodus startprofil startprofile \
-startproiect startraster startregister startriglatext startsetsimboluri \
-startsincronizare startsymbolset startsynchronization starttabel starttabele \
-starttable starttables starttabulate starttext starttyping \
-startunpacked startversiune stivacampuri stopalignment stopaliniere \
-stopblobal stopblocmarginal stopbuffer stopcitat stopcodificare \
-stopcoloane stopcolumnmakeup stopcolumns stopcolumnset stopcombinare \
-stopcombination stopcomment stopcomponenta stopcorectielinie stopculoare \
-stopdescription stopdocument stopenumeration stopfact stopfigure \
-stopfloattext stopformula stopframedtext stopfundal stophiding \
-stopimpachetat stopingust stopitemgroup stoplegend stopline \
-stoplinecorrection stoplinenumbering stoplines stoplinie stopliniemargine \
-stoplinii stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup \
-stopmarginblock stopmediu stopmeniuinteractiune stopnamemakeup stopnarrower \
-stopneimpachetat stopnotesubsollocale stopnumerotarelinii stopopozit stopopposite \
-stopoverlay stopoverview stopparagraph stoppositioning stoppostponing \
-stoppozitionare stopprodus stopprofil stopprofile stopproiect \
-stopraster stopriglatext stopsincronizare stopsymbolset stopsynchronization \
-stoptabel stoptabele stoptable stoptables stoptabulate \
-stoptext stoptyping stopunpacked stopversiune sub \
-subject subpagenumber subsection subsubject subsubsection \
-subsubsubject synonym tab testcolumn testpage \
-tex texteticheta textmarginal texttitlu textumplere \
-title titlu titlumarginal tooltip traduce \
-trecilafontraw trecilafonttext txt typ type \
-typefile underbar underbars undeva urmeazaprofil \
-urmeazaversiune urmeazaversiuneprofil useXMLfilter usedirectory usetypescript \
-usetypescriptfile valoareculoare valoaregri variabilatext versiune \
-vl zidinsaptamana
-
diff --git a/context/data/scite/scite-context-data-metafun.properties b/context/data/scite/scite-context-data-metafun.properties
deleted file mode 100644
index c0b080982..000000000
--- a/context/data/scite/scite-context-data-metafun.properties
+++ /dev/null
@@ -1,57 +0,0 @@
-keywordclass.metafun.commands=\
-sqr log ln exp \
-inv pow pi radian tand \
-cotd sin cos tan cot \
-atan asin acos invsin invcos \
-acosh asinh sinh cosh paired \
-tripled unitcircle fulldiamond unitdiamond fullsquare \
-llcircle lrcircle urcircle ulcircle tcircle \
-bcircle lcircle rcircle lltriangle lrtriangle \
-urtriangle ultriangle smoothed cornered superellipsed \
-randomized squeezed enlonged shortened punked \
-curved unspiked simplified blownup stretched \
-enlarged leftenlarged topenlarged rightenlarged bottomenlarged \
-crossed laddered randomshifted interpolated paralleled \
-cutends peepholed llenlarged lrenlarged urenlarged \
-ulenlarged llmoved lrmoved urmoved ulmoved \
-rightarrow leftarrow centerarrow boundingbox innerboundingbox \
-outerboundingbox pushboundingbox popboundingbox bottomboundary leftboundary \
-topboundary rightboundary xsized ysized xysized \
-sized xyscaled intersection_point intersection_found penpoint \
-bbwidth bbheight withshade withlinearshading withcircularshading \
-withfromshadecolor withtoshadecolor withshading shadedinto withcircularshade \
-withlinearshade cmyk spotcolor multitonecolor namedcolor \
-drawfill undrawfill inverted uncolored softened \
-grayed greyed onlayer along graphictext \
-loadfigure externalfigure withmask figure register \
-bitmapimage colordecimals ddecimal dddecimal ddddecimal \
-textext thetextext rawtextext textextoffset verbatim \
-thelabel label autoalign transparent withtransparency \
-property properties withproperties asgroup infont \
-set_linear_vector linear_shade define_linear_shade define_circular_linear_shade define_sampled_linear_shade \
-set_circular_vector circular_shade define_circular_shade define_circular_linear_shade define_sampled_circular_shade \
-space CRLF grayscale greyscale withgray \
-withgrey colorpart readfile clearxy unitvector \
-center epsed anchored originpath infinite \
-break xstretched ystretched snapped pathconnectors \
-function constructedpath constructedpairs punkedfunction curvedfunction \
-tightfunction punkedpath curvedpath tightpath punkedpairs \
-curvedpairs tightpairs evenly oddly condition \
-pushcurrentpicture popcurrentpicture arrowpath tensecircle roundedsquare \
-colortype whitecolor blackcolor normalfill normaldraw \
-visualizepaths naturalizepaths drawboundary drawwholepath visualizeddraw \
-visualizedfill draworigin drawboundingbox drawpath drawpoint \
-drawpoints drawcontrolpoints drawcontrollines drawpointlabels drawlineoptions \
-drawpointoptions drawcontroloptions drawlabeloptions draworiginoptions drawboundoptions \
-drawpathoptions resetdrawoptions undashed decorated redecorated \
-undecorated passvariable passarrayvariable tostring format \
-formatted startpassingvariable stoppassingvariable
-
-keywordclass.metafun.internals=\
-nocolormodel greycolormodel graycolormodel rgbcolormodel \
-cmykcolormodel shadefactor textextoffset normaltransparent multiplytransparent \
-screentransparent overlaytransparent softlighttransparent hardlighttransparent colordodgetransparent \
-colorburntransparent darkentransparent lightentransparent differencetransparent exclusiontransparent \
-huetransparent saturationtransparent colortransparent luminositytransparent metapostversion \
-maxdimensions
-
diff --git a/context/data/scite/scite-context-data-metapost.properties b/context/data/scite/scite-context-data-metapost.properties
deleted file mode 100644
index 88ace57ca..000000000
--- a/context/data/scite/scite-context-data-metapost.properties
+++ /dev/null
@@ -1,102 +0,0 @@
-keywordclass.metapost.commands=\
-beginfig endfig beginglyph endglyph \
-charscale rotatedaround reflectedabout arrowhead currentpen \
-currentpicture cuttings defaultfont extra_beginfig extra_endfig \
-ditto EOF down evenly fullcircle \
-halfcircle identity in left origin \
-pensquare quartercircle right unitsquare up \
-withdots abs bbox ceiling center \
-cutafter cutbefore dir directionpoint div \
-dotprod intersectionpoint inverse mod round \
-unitvector whatever cutdraw draw drawarrow \
-drawdblarrow fill filldraw drawdot loggingall \
-interact tracingall tracingnone pickup undraw \
-unfill unfilldraw buildcycle dashpattern decr \
-dotlabel dotlabels drawoptions incr label \
-labels max min thelabel z \
-beginchar blacker capsule_end change_width define_blacker_pixels \
-define_corrected_pixels define_good_x_pixels define_good_y_pixels define_horizontal_corrected_pixels define_pixels \
-define_whole_blacker_pixels define_whole_pixels define_whole_vertical_blacker_pixels define_whole_vertical_pixels endchar \
-extra_beginchar extra_endchar extra_setup font_coding_scheme clearxy \
-clearit clearpen shipit font_extra_space exitunless \
-relax hide gobble gobbled stop \
-blankpicture counterclockwise tensepath takepower direction \
-softjoin makelabel rotatedabout flex superellipse \
-erase image nullpen savepen clearpen \
-penpos penlabels range numtok thru \
-z laboff bye red green \
-blue cyan magenta yellow black \
-white background graypart graycolor mm \
-pt dd bp cm pc \
-cc in triplet quadruplet
-
-keywordclass.metapost.internals=\
-mitered rounded beveled butt \
-squared eps epsilon infinity bboxmargin \
-ahlength ahangle labeloffset dotlabeldiam defaultpen \
-defaultscale join_radius pen_lft pen_rt pen_top \
-pen_bot
-
-keywordclass.metapost.primitives=\
-charcode day linecap linejoin \
-miterlimit month pausing prologues showstopping \
-time tracingcapsules tracingchoices mpprocset tracingcommands \
-tracingequations tracinglostchars tracingmacros tracingonline tracingoutput \
-tracingrestores tracingspecs tracingstats tracingtitles truecorners \
-warningcheck year false nullpicture pencircle \
-true and angle arclength arctime \
-ASCII boolean bot char color \
-cosd cycle decimal directiontime floor \
-fontsize hex infont intersectiontimes known \
-length llcorner lrcorner makepath makepen \
-mexp mlog normaldeviate not numeric \
-oct odd or path pair \
-pen penoffset picture point postcontrol \
-precontrol reverse rotated scaled shifted \
-sind slanted sqrt str string \
-subpath substring transform transformed ulcorner \
-uniformdeviate unknown urcorner xpart xscaled \
-xxpart xypart ypart yscaled yxpart \
-yypart zscaled addto clip input \
-interim let newinternal save setbounds \
-shipout show showdependencies showtoken showvariable \
-special begingroup endgroup of curl \
-tension and controls interpath on \
-off def vardef enddef expr \
-suffix text primary secondary tertiary \
-primarydef secondarydef tertiarydef randomseed also \
-contour doublepath withcolor withcmykcolor withpen \
-dashed if else elseif fi \
-for endfor forever exitif within \
-forsuffixes downto upto step until \
-charlist extensible fontdimen headerbyte kern \
-ligtable boundarychar chardp charext charht \
-charic charwd designsize fontmaking charexists \
-cullit currenttransform gfcorners grayfont hround \
-imagerules lowres_fix nodisplays notransforms openit \
-displaying currentwindow screen_rows screen_cols pixels_per_inch \
-cull display openwindow numspecial totalweight \
-autorounding fillin proofing tracingpens xoffset \
-chardx granularity smoothing turningcheck yoffset \
-chardy hppp tracingedges vppp extra_beginfig \
-extra_endfig mpxbreak endinput message delimiters \
-turningnumber errmessage readstring scantokens end \
-outer inner write to readfrom \
-withprescript withpostscript top bot lft \
-rt ulft urt llft lrt \
-redpart greenpart bluepart cyanpart magentapart \
-yellowpart blackpart greypart prescriptpart postscriptpart \
-rgbcolor cmykcolor greycolor graycolor colormodel \
-graypart dashpart penpart stroked filled \
-textual clipped bounded pathpart expandafter \
-minute hour outputformat outputtemplate filenametemplate \
-fontmapfile fontmapline fontpart fontsize glyph \
-restoreclipcolor troffmode
-
-keywordclass.metapost.shortcuts=\
-.. ... -- --- \
-&
-
-keywordclass.metapost.tex=\
-btex etex verbatimtex
-
diff --git a/context/data/scite/scite-context-readme.pdf b/context/data/scite/scite-context-readme.pdf
deleted file mode 100644
index b6a751a36..000000000
--- a/context/data/scite/scite-context-readme.pdf
+++ /dev/null
Binary files differ
diff --git a/context/data/scite/scite-context-user.properties b/context/data/scite/scite-context-user.properties
deleted file mode 100644
index 88e803031..000000000
--- a/context/data/scite/scite-context-user.properties
+++ /dev/null
@@ -1,15 +0,0 @@
-# this loades the basics
-
-import scite-context
-
-# internal lexing
-
-import scite-context-internal
-
-# external lexing (tex, mps, cld/lua, xml)
-
-import scite-context-external
-
-# this does some tuning
-
-import scite-pragma
diff --git a/context/data/scite/scite-context-visual.tex b/context/data/scite/scite-context-visual.tex
deleted file mode 100644
index 0a1b8bb71..000000000
--- a/context/data/scite/scite-context-visual.tex
+++ /dev/null
@@ -1,52 +0,0 @@
-% language=uk
-
-\usemodule[art-01]
-
-\defineframedtext
- [entry]
-
-\starttext
-
-\startchapter[title=Some fancy title]
-
- \startluacode
- local entries = { -- there can be more
- { text = "The third entry!" },
- { text = "The fourth entry!" },
- }
-
- for i=1,#entries do
- context.startentry()
- context(entries[i].text)
- context.stopentry()
- end
- \stopluacode
-
- This is just some text to demonstrate the realtime spellchecker
- in combination with the embedded lua and metapost lexers and
- inline as well as display \ctxlua{context("lua code")}.
-
- Non breakable spaces in for instance 10 mm and quads like here
- are shown as well.
-
- \startlinecorrection
- \startMPcode
- for i=1 upto 100 :
- draw fullcircle scaled (i*mm) ;
- endfor ;
- \stopMPcode
- \stoplinecorrection
-
- \iftrue
- \def\crap{some text} % who cares
- \else
- \def\crap{some crap} % about this
- \fi
-
- \blank[2*big]
-
- \crap
-
-\stopchapter
-
-\stoptext
diff --git a/context/data/scite/tex.properties b/context/data/scite/tex.properties
deleted file mode 100644
index 3fbad41cb..000000000
--- a/context/data/scite/tex.properties
+++ /dev/null
@@ -1 +0,0 @@
-import scite-tex
diff --git a/context/data/texworks/configuration/smart-quotes-modes.txt~ b/context/data/texworks/configuration/smart-quotes-modes.txt~
deleted file mode 100644
index c126b5145..000000000
--- a/context/data/texworks/configuration/smart-quotes-modes.txt~
+++ /dev/null
@@ -1,21 +0,0 @@
-# TeXworks: "Smart Quotes" modes
-
-# Each mode has a label (in [square brackets]), followed by lines specifying the
-# behavior for each quote keystroke. These lines have three fields:
-#
-# <keystroke> <open-form> <close-form>
-#
-# separated by whitespace.
-
-# [TeX ligatures]
-# ' ` '
-# " `` ''
-#
-
-[TeX commands]
-' \quote{ }
-" \quotation{ }
-
-# [Unicode characters]
-# ' ‘ ’
-# " “ ”
diff --git a/doc/context/document/general/manuals/mreadme.pdf b/doc/context/document/general/manuals/mreadme.pdf
deleted file mode 100644
index 338008015..000000000
--- a/doc/context/document/general/manuals/mreadme.pdf
+++ /dev/null
Binary files differ
diff --git a/doc/context/document/general/manuals/tiptrick.pdf b/doc/context/document/general/manuals/tiptrick.pdf
deleted file mode 100644
index 3ced360e1..000000000
--- a/doc/context/document/general/manuals/tiptrick.pdf
+++ /dev/null
Binary files differ
diff --git a/doc/context/documents/general/manuals/epub-mkiv-demo.epub b/doc/context/documents/general/manuals/epub-mkiv-demo.epub
new file mode 100644
index 000000000..2fc6200ff
--- /dev/null
+++ b/doc/context/documents/general/manuals/epub-mkiv-demo.epub
Binary files differ
diff --git a/doc/context/documents/general/manuals/epub-mkiv-demo.pdf b/doc/context/documents/general/manuals/epub-mkiv-demo.pdf
new file mode 100644
index 000000000..9a390cb92
--- /dev/null
+++ b/doc/context/documents/general/manuals/epub-mkiv-demo.pdf
Binary files differ
diff --git a/doc/context/documents/general/manuals/epub-mkiv.pdf b/doc/context/documents/general/manuals/epub-mkiv.pdf
new file mode 100644
index 000000000..1045056cd
--- /dev/null
+++ b/doc/context/documents/general/manuals/epub-mkiv.pdf
Binary files differ
diff --git a/doc/context/documents/general/manuals/mreadme.pdf b/doc/context/documents/general/manuals/mreadme.pdf
new file mode 100644
index 000000000..d27bbaedf
--- /dev/null
+++ b/doc/context/documents/general/manuals/mreadme.pdf
Binary files differ
diff --git a/doc/context/documents/general/manuals/swiglib-mkiv.pdf b/doc/context/documents/general/manuals/swiglib-mkiv.pdf
new file mode 100644
index 000000000..3e146f9d1
--- /dev/null
+++ b/doc/context/documents/general/manuals/swiglib-mkiv.pdf
Binary files differ
diff --git a/doc/context/documents/general/manuals/tiptrick.pdf b/doc/context/documents/general/manuals/tiptrick.pdf
new file mode 100644
index 000000000..85bfda893
--- /dev/null
+++ b/doc/context/documents/general/manuals/tiptrick.pdf
Binary files differ
diff --git a/doc/context/documents/general/manuals/tools-mkiv.pdf b/doc/context/documents/general/manuals/tools-mkiv.pdf
new file mode 100644
index 000000000..563b35aac
--- /dev/null
+++ b/doc/context/documents/general/manuals/tools-mkiv.pdf
Binary files differ
diff --git a/doc/context/documents/general/manuals/units-mkiv.pdf b/doc/context/documents/general/manuals/units-mkiv.pdf
new file mode 100644
index 000000000..f931d6d6b
--- /dev/null
+++ b/doc/context/documents/general/manuals/units-mkiv.pdf
Binary files differ
diff --git a/doc/context/documents/general/manuals/workflows-mkiv.pdf b/doc/context/documents/general/manuals/workflows-mkiv.pdf
new file mode 100644
index 000000000..3a2caaede
--- /dev/null
+++ b/doc/context/documents/general/manuals/workflows-mkiv.pdf
Binary files differ
diff --git a/doc/context/documents/general/manuals/xtables-mkiv.pdf b/doc/context/documents/general/manuals/xtables-mkiv.pdf
new file mode 100644
index 000000000..d3550c0ee
--- /dev/null
+++ b/doc/context/documents/general/manuals/xtables-mkiv.pdf
Binary files differ
diff --git a/doc/context/manuals/allkind/mcommon.tex b/doc/context/manuals/allkind/mcommon.tex
deleted file mode 100644
index f0c22cff4..000000000
--- a/doc/context/manuals/allkind/mcommon.tex
+++ /dev/null
@@ -1,199 +0,0 @@
-% content=tex
-%
-% copyright=pragma-ade readme=readme.pdf licence=cc-by-nc-sa
-
-\startenvironment mcommon
-
-% modules
-
-\usemodule[abr-02]
-
-% layout
-
-\startmode[screen]
- \setuppapersize[S6][S6]
- \setupinteraction[state=start]
- \setupinteractionscreen[options=max]
-\stopmode
-
-\setuplayout
- [footer=0cm,
- width=middle,
- height=middle]
-
-% fonts
-
-\startmode[atpragma]
-
- \startMPenvironment[global]
- \usetypescriptfile[type-buy]
- \usetypescript[lucida][texnansi]
- \setupbodyfont[lucida,10pt]
- \stopMPenvironment
-
- \setupbodyfont[11pt]
-
-\stopmode
-
-\startnotmode[atpragma]
-
- \startMPenvironment[global]
- \usetypescript[palatino][ec]
- \setupbodyfont[palatino,10pt]
- \stopMPenvironment
-
- \setupbodyfont[11pt]
-
-\stopnotmode
-
-\definefont [BigFont] [SansBold at 60pt]
-\definefont [MedFont] [SansBold at 30pt]
-
-% colors
-
-\setupcolors
- [state=start]
-
-\definecolor [NopColor] [r=.6,g=.4,b=.5]
-\definecolor [AltColor] [r=.4,g=.6,b=.5]
-\definecolor [TheColor] [r=.4,g=.5,b=.6]
-\definecolor [TmpColor] [r=.6,g=.5,b=.4]
-
-\definecolor [red] [NopColor]
-\definecolor [green] [AltColor]
-\definecolor [blue] [TheColor]
-\definecolor [yellow][TmpColor]
-
-% spacing
-
-\setupwhitespace
- [big]
-
-\setuptolerance
- [verytolerant,stretch]
-
-% verbatim
-
-\setuptype
- [color=AltColor]
-
-\setuptyping
- [color=AltColor]
-
-% structure
-
-\setupitemize
- [each]
- [color=TheColor]
-
-\definedescription
- [switch]
- [headstyle=type,
- headcolor=TheColor,
- location=serried,
- width=broad]
-
-\defineenumeration
- [topic]
- [location=serried,
- width=broad,
- headstyle=,
- headcolor=TheColor,
- text=,
- left={[},
- right={]}]
-
-\setuphead
- [section]
- [style=\ss\bfb,
- color=TheColor]
-
-\setuplist
- [section]
- [alternative=c,
- color=TheColor,
- textcolor=black,
- pagecolor=black]
-
-% whatever
-
-\setupsystem
- [random=medium]
-
-\setupfloats
- [ntop=100]
-
-\setupinteraction
- [style=,
- color=NopColor,
- contrastcolor=NopColor]
-
-% tables and frames
-
-\setuptabulate
- [rulethickness=.5pt,
- rulecolor=AltColor]
-
-\setuptables
- [rulethickness=.5pt,
- rulecolor=AltColor]
-
-\setupframedtexts
- [rulethickness=.5pt,
- framecolor=TheColor,
- width=\textwidth]
-
-% quick reference things
-
-\usemodule[set-11] \loadsetups
-
-\setupframedtexts
- [setuptext]
- [rulethickness=.5pt,
- framecolor=AltColor]
-
-% basic titlepage and colofon, a bit old fashioned approach, but let's not
-% modernize everything now
-
-\def\TitlePage#1#2#3#4#5% number/name angle title author screen
- {\doifnumberelse{#1}
- {\ifcase#1
- \defineoverlay
- [logo]
- [\useMPgraphic{titlepage}{width=\overlaywidth,height=\overlayheight}]
- \else
- \startMPrun
- logo_type := #1 ; mpgraph := #1 ; input mp-cont ;
- \stopMPrun
- \defineoverlay
- [logo]
- [{\externalfigure
- [\MPrunfile{#1}]
- [width=\overlaywidth,height=\overlayheight]}]
- \fi}
- {\defineoverlay
- [logo]
- [\useMPgraphic{#1}{width=\overlaywidth,height=\overlayheight}]}
- \setupbackgrounds
- [page]
- [background=logo]
- \definecolor[Gray][s=#5]
- \startstandardmakeup
- \dontcomplain
- \BigFont \setupinterlinespace \vfill \setupalign[left] \let\\=\par
- \ifcase#2\relax
- \noindent\color[Gray]{#3}\par
- \else
- \noindent\rotate[rotation=#2]{\color[Gray]{#3}}\par
- \fi
- \stopstandardmakeup
- \setupbackgrounds
- [page]
- [background=]}
-
-\def\ColofonPage
- {\startstandardmakeup
- \vfill \setups [pragma-colofon]
- \stopstandardmakeup}
-
-\stopenvironment
diff --git a/doc/context/manuals/allkind/mreadme.tex b/doc/context/manuals/allkind/mreadme.tex
deleted file mode 100644
index 22af40afe..000000000
--- a/doc/context/manuals/allkind/mreadme.tex
+++ /dev/null
@@ -1,361 +0,0 @@
-% interface=en output=pdftex language=uk
-%
-% copyright=pragma-ade readme=readme.pdf licence=cc-by-nc-sa
-
-\environment mcommon
-
-\useurl[gpl-simple] [http://creativecommons.org/licenses/GPL/2.0/]
-\useurl[gpl-legal] [http://creativecommons.org/licenses/GPL/2.0/legalcode]
-\useurl[byncsa-simple][http://creativecommons.org/licenses/by-nc-sa/2.5/]
-\useurl[byncsa-legal] [http://creativecommons.org/licenses/by-nc-sa/2.5/legalcode]
-
-\useurl[garden] [http://contextgarden.net]
-\useurl[contextlist] [http://www.ntg.nl/mailman/listinfo/ntg-context]
-\useurl[development] [http://www.ntg.nl/mailman/listinfo/dev-context]
-\useurl[announce] [http://www.ntg.nl/mailman/listinfo/ann-context]
-\useurl[collector] [http://context.literatesolutions.com]
-\useurl[pragma] [http://www.pragma-ade.com]
-\useurl[mirror] [http://context.aanhet.net]
-
-\setupinteraction[state=start]
-
-% copied from cont-log: readme_logo
-
-\startuseMPgraphic{titlepage}{width,height}
- numeric width ; width = \MPvar{width} ;
- numeric height ; height = \MPvar{height} ;
- numeric delta ; delta := width/10 ;
- numeric circle ; circle := 2.5delta ;
- color c ; c := (.2,.4,.6) ;
- path p, q, r ;
- p := unitsquare xscaled width yscaled height ;
- z1 = (delta,height-2delta) ;
- z2 = (width-delta,height-delta) ;
- z3 = (width/2-delta,2delta+circle) ;
- z4 = (x3,delta+circle/2) ;
- q := z1 { dir -15 } .. z2 &
- z2 { dir -105 } .. z3 &
- z3 { dir 135 } .. z1 &
- cycle ;
- r := fullcircle xscaled circle yscaled (.85circle) rotated 15 shifted z4 ;
- pickup pencircle scaled (delta/1.5) ;
- fill p withcolor .50c ;
- fill q withcolor .75c ;
- fill r withcolor .75c ;
- draw p withcolor c ;
- draw q withcolor c ;
- pickup pencircle scaled (delta/2) ;
- draw r withcolor c ;
- setbounds currentpicture to p ;
-\stopuseMPgraphic
-
-\starttext
-
-\TitlePage{titlepage}{90}{Read Me First}{}{1}
-
-\subject {Introduction}
-
-What licence suits best for a \TEX\ like system is a matter of
-taste. Personally we dislike any licence that needs more than a few
-pages of dense legal code to get the message accross. A \TEX\
-related system like \CONTEXT\ is a hybrid of programs, scripts
-and|/|or macro code as well as documentation and sample code,
-including graphics. \TEX\ related systems also have a long
-standing tradition of providing support structures for users. In
-order to make support feasable, a \TEX\ based system like
-\CONTEXT\ assumes a certain logic and structure in the way the
-related files are named and organized in a tree structure. Even a
-small change in one of the elements may let such a system behave
-differently than manuals suggest. Swap a font, change some style
-defaults, leave out some pieces, and users may end up in
-confusion. A licence does not give a user any guarantees!
-
-In order to satisfy those responsible for distributing \CONTEXT,
-we need to choose a licence that makes them feel comfortable.
-Unfortunately we don't feel that comfortable with a licence that does
-not provide the guarantees that a system will not be adapted in
-such ways that the advertised behaviour changes. On the
-other hand, it is the responsibility of those distributing and
-extending the system to make sure that this does not happen.
-However, users should not automatically assume that what they get
-shipped is the same as the original, which is why we stress that
-support (from our side) will only be given on unaltered systems.
-
-First of all, what is \CONTEXT ? It's just a bunch of macros,
-written in \TEX\ and \METAPOST, meant for typesetting documents.
-The macros are accompanied by some scripts, written in \PERL\ (mainly
-the older scripts) \RUBY\ (the official ones) and \LUA\ (for
-embedded usage). The \CONTEXT\ distribution comes with a few fonts,
-files that help manage resources (e.g.\ map files), as well as
-patterns (based on official ones, so this is a derived work).
-
-The \CONTEXT\ distribution is packaged in a zip file organized in
-the \TDS\ structure.
-
-\starttabulate
-\NC \type {cont-tmf.zip} \NC The main distribution. \NC \NR
-\NC \type {cont-img.zip} \NC A few extra resources. \NC \NR
-\NC \type {cont-ext.zip} \NC Third party modules. \NC \NR
-\stoptabulate
-
-When we talk about \CONTEXT\ we also mean its graphical companion
-\METAFUN\ and \FOXET, an \XML\ related product. All these are
-included in the main distribution archive.
-
-The documentation can be downloaded from our website, one of its
-mirrors, the \TEX\ collection as distributed by \TEX\ user groups.
-For some manuals, source code is available in a subversion
-repository. The archives are also kept on \CTAN.
-
-That said, what licence does apply? We need to distinguish between
-things that resemble a program on the one hand and documentation
-on the other hand. We (currently) use a different licence for
-either of them.
-
-\subject {The Code}
-
-The program code (i.e. anything not under the \type {/doc}
-subtree) is distributed under the
-
-\startnarrower
-\goto{Creative Commons GNU GPL}[url(gpl-simple)]
-\stopnarrower
-
-For practical purposes distributers may also choose the \LATEX\
-project licence, which is considered to be a bit more \TEX\
-friendly. (BSD alike licences, the Ruby Licence and the Apache
-are all licences that apply well for \CONTEXT.)
-
-In practice, users may forget about the legal part, if only
-because I haven't even read (and understood) it completely myself,
-so let's stick to what Creative Commons makes of it:
-
-\startcolor[blue]
-The GNU General Public License is a Free Software license. Like
-any Free Software license, it grants to you the four following
-freedoms:
-
-\startitemize
-\item The freedom to run the program for any purpose.
-\item The freedom to study how the program works and adapt it to
- your needs.
-\item The freedom to redistribute copies so you can help your neighbour.
-\item The freedom to improve the program and release your improvements
- to the public, so that the whole community benefits.
-\stopitemize
-
-You may exercise the freedoms specified here provided that you
-comply with the express conditions of this license. The principal
-conditions are:
-
-You must conspicuously and appropriately publish on each copy
-distributed an appropriate copyright notice and disclaimer of
-warranty and keep intact all the notices that refer to this
-License and to the absence of any warranty; and give any other
-recipients of the Program a copy of the GNU General Public License
-along with the Program. Any translation of the GNU General Public
-License must be accompanied by the GNU General Public License.
-
-If you modify your copy or copies of the program or any portion of
-it, or develop a program based upon it, you may distribute the
-resulting work provided you do so under the GNU General Public
-License. Any translation of the GNU General Public License must be
-accompanied by the GNU General Public License.
-
-If you copy or distribute the program, you must accompany it with
-the complete corresponding machine-readable source code or with a
-written offer, valid for at least three years, to furnish the
-complete corresponding machine-readable source code.
-
-Any of these conditions can be waived if you get permission from
-the copyright holder.
-
-Your fair use and other rights are in no way affected by the above.
-\stopcolor
-
-\subject {Recommendations}
-
-Here are a few recommendations in case you want to distribute,
-extend of embed \CONTEXT\ in applications:
-
-\startitemize
-
-\item You can best leave the code base untouched. Most of
-\CONTEXT\ provides hooks and it's relatively easy to overload
-code. Leave the lower level system code untouched: changes may
-backfire when you update. Asking for more hooks is the best way to
-go.
-
-\item Put your own code in the right subpaths, i.e.\ modules
-approved by the development team under \type {.../third}, and
-styles and whatever else under \type {.../user}. This way your
-code will not interfere with existing code and updating will give
-less problems. Keep in mind that \TEX\ systems have their own way
-and order in locating files, and the load order often matters.
-
-\item Don't copy styles and change a few lines, but load the base one
-and built|/|patch on top of that. In the end you may benefit from
-improvements to the base style.
-
-\item Be original. The whole idea behind \CONTEXT\ is that you can
-write your own styles. On the \CONTEXT\ mailing list as well as on
-the Wiki there are enough advanced users to help you make a start.
-
-\item Don't hesitate to submit bugs reports and ask for
-extensions. It may even be that what you want is already present
-but yet undocumented.
-
-\item If things don't work as expected, check to what extend your
-system matches the (more or less) standard. We provide so called
-minimal \CONTEXT\ trees that can serve as a reference. Because
-\CONTEXT\ evolves, make sure your system is up to date.
-
-\item The scripts can best be called using \type {texmfstart}. This
-lessens dependencies on the location in the tree and ensures upward
-compatibility. It also prevents clashes with similary named scripts.
-
-\item Some scripts depend on each other. Don't mess around with the
-existing functionality and names of the scripts and then feed them
-back into the standard distributions.
-
-\stopitemize
-
-\subject {Documents}
-
-The documentation is provided under another Creative Commons licence
-
-\startnarrower
-\goto{Attribution NonCommercial ShareAlike}[url(byncsa-simple)]
-\stopnarrower
-
-This one says:
-
-\startcolor[blue]
-You are free:
-
-\startitemize
-\item to copy, distribute, display, and perform the work
-\item to make derivative works
-\stopitemize
-
-{\sc Attribution:} You must attribute the work in the manner
-specified by the author or licensor.
-
-{\sc NonCommercial:} You may not use this work for commercial
-purposes.
-
-{\sc Share Alike:} If you alter, transform, or build upon this
-work, you may distribute the resulting work only under a license
-identical to this one.
-
-\startitemize
-\item For any reuse or distribution, you must make clear to others
- the license terms of this work.
-\item Any of these conditions can be waived if you get permission from
- the copyright holder.
-\stopitemize
-
-Your fair use and other rights are in no way affected by the above.
-\stopcolor
-
-The non||commercial part is mostly a safeguard. We don't mind if
-user groups distribute printed copies, publish (parts of) manuals
-and|/|or if authors use example code in manuals and books about
-\CONTEXT.
-
-If you distribute \CONTEXT\ and related software on electronic media
-as part of \TEX\ distributions (either or not for money), you may
-also distribute the manuals and their sources in electronic form,
-preferable as provided by the maintainers of \CONTEXT.
-
-Keep in mind that logos and cover designs are not meant to be
-copied. We provide the source code for some manuals, but we don't
-always provide all graphics and other resources. For instance, in
-some manuals we use commercial fonts and you have to buy those
-yourself.
-
-We provide the typeset manuals at our website. Those are the official
-ones. We appreciate it if you do not to distribute manuals compiled
-on your own system as substitutes. The manuals are a showcase for what
-\CONTEXT\ provides. Help us to assure the quality.
-
-\subject {More information}
-
-We're not going to fill \mathematics{n}~pages with legal stuff, so if
-you want to know more, you have to consult the web for the legalities
-mentioned. Here are a few starting points:
-
-\startlines
-\goto{\url[gpl-simple]}[url(gpl-simple)]
-\goto{\url[gpl-legal]}[url(gpl-legal)]
-\stoplines
-
-\startlines
-\goto{\url[byncsa-simple]}[url(byncsa-simple)]
-\goto{\url[byncsa-legal]}[url(byncsa-legal)]
-\stoplines
-
-\CONTEXT\ itself can be fetched from the main site or its primary mirror:
-
-\startlines
-\goto{\url[pragma]}[url(pragma)]
-\goto{\url[mirror]}[url(mirror)]
-\stoplines
-
-A starting point for support can be found at:
-
-\startlines
-\goto{\url[contextlist]}[url(contextlist)]
-\goto{\url[garden]}[url(garden)]
-\stoplines
-
-Bugs and feature requests can be registered at the collector:
-
-\startlines
-\goto{\url[collector]}[url(collector)]
-\stoplines
-
-Releases are announced at:
-
-\startlines
-\goto{\url[announce]}[url(announce)]
-\stoplines
-
-The developers can be met at:
-
-\startlines
-\goto{\url[development]}[url(development)]
-\stoplines
-
-\subject {Disclaimer}
-
-To play safe we include a disclaimer here, taken from the BSD style
-licence. For some reason such a text is in capitals, so \unknown
-
-\start \sc \blue
-THIS SOFTWARE IS PROVIDED BY THE AUTHOR \quotation {AS IS} AND ANY EXPRESS OR
-IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
-OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
-IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
-INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
-NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
-THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-\stop
-
-\unknown\ and don't bother discussing licence issues and related things
-with us for the mere sake of discussing licence stuff.
-
-\blank[2*big]
-
-\startlines
-Hans Hagen
-PRAGMA ADE
-Hasselt NL
-\stoplines
-
-\ColofonPage
-
-\stoptext
diff --git a/doc/context/scripts/mkii/ctxtools.man b/doc/context/scripts/mkii/ctxtools.man
index 3eb988c9c..38f10ef7c 100644
--- a/doc/context/scripts/mkii/ctxtools.man
+++ b/doc/context/scripts/mkii/ctxtools.man
@@ -1,4 +1,4 @@
-.TH "ctxtools" "1" "01-01-2014" "version 1.3.5" "CtxTools"
+.TH "ctxtools" "1" "01-01-2015" "version 1.3.5" "CtxTools"
.SH NAME
.B ctxtools
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkii/imgtopdf.man b/doc/context/scripts/mkii/imgtopdf.man
index ade6ce4f3..864cf5a98 100644
--- a/doc/context/scripts/mkii/imgtopdf.man
+++ b/doc/context/scripts/mkii/imgtopdf.man
@@ -1,4 +1,4 @@
-.TH "imgtopdf" "1" "01-01-2014" "version 1.1.2" "ImgToPdf"
+.TH "imgtopdf" "1" "01-01-2015" "version 1.1.2" "ImgToPdf"
.SH NAME
.B imgtopdf
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkii/mptopdf.man b/doc/context/scripts/mkii/mptopdf.man
index 87cb566eb..30197a9f2 100644
--- a/doc/context/scripts/mkii/mptopdf.man
+++ b/doc/context/scripts/mkii/mptopdf.man
@@ -1,4 +1,4 @@
-.TH "mptopdf" "1" "01-01-2014" "version 1.4.1" "convert MetaPost to PDF"
+.TH "mptopdf" "1" "01-01-2015" "version 1.4.1" "convert MetaPost figures to PDF"
.SH NAME
.B mptopdf
.SH SYNOPSIS
@@ -8,7 +8,7 @@
.I FILENAMES
.B ]
.SH DESCRIPTION
-.B convert MetaPost to PDF
+.B convert MetaPost figures to PDF
.SH OPTIONS
.TP
.B --metafun
diff --git a/doc/context/scripts/mkii/mptopdf.xml b/doc/context/scripts/mkii/mptopdf.xml
index a3b06a231..28e67bcb3 100644
--- a/doc/context/scripts/mkii/mptopdf.xml
+++ b/doc/context/scripts/mkii/mptopdf.xml
@@ -2,7 +2,7 @@
<application>
<metadata>
<entry name="name">mptopdf</entry>
- <entry name="detail">convert MetaPost to PDF</entry>
+ <entry name="detail">convert MetaPost figures to PDF</entry>
<entry name="version">1.4.1</entry>
</metadata>
<flags>
diff --git a/doc/context/scripts/mkii/pdftools.man b/doc/context/scripts/mkii/pdftools.man
index ddbebe386..f63bf7e39 100644
--- a/doc/context/scripts/mkii/pdftools.man
+++ b/doc/context/scripts/mkii/pdftools.man
@@ -1,4 +1,4 @@
-.TH "pdftools" "1" "01-01-2014" "version 1.2.1" "PDFTools"
+.TH "pdftools" "1" "01-01-2015" "version 1.2.1" "PDFTools"
.SH NAME
.B pdftools
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkii/pstopdf.man b/doc/context/scripts/mkii/pstopdf.man
index 9a8e5c637..8203e76cc 100644
--- a/doc/context/scripts/mkii/pstopdf.man
+++ b/doc/context/scripts/mkii/pstopdf.man
@@ -1,4 +1,4 @@
-.TH "pstopdf" "1" "01-01-2014" "version 2.0.1" "PStoPDF"
+.TH "pstopdf" "1" "01-01-2015" "version 2.0.1" "PStoPDF"
.SH NAME
.B pstopdf
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkii/rlxtools.man b/doc/context/scripts/mkii/rlxtools.man
index a42a80c38..58e7032d8 100644
--- a/doc/context/scripts/mkii/rlxtools.man
+++ b/doc/context/scripts/mkii/rlxtools.man
@@ -1,4 +1,4 @@
-.TH "rlxtools" "1" "01-01-2014" "version 1.0.1" "RlxTools"
+.TH "rlxtools" "1" "01-01-2015" "version 1.0.1" "RlxTools"
.SH NAME
.B rlxtools
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkii/texexec.man b/doc/context/scripts/mkii/texexec.man
index cfefa9a74..841622bcd 100644
--- a/doc/context/scripts/mkii/texexec.man
+++ b/doc/context/scripts/mkii/texexec.man
@@ -1,4 +1,4 @@
-.TH "texexec" "1" "01-01-2014" "version 6.2.1" "TeXExec"
+.TH "texexec" "1" "01-01-2015" "version 6.2.1" "TeXExec"
.SH NAME
.B texexec
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkii/texmfstart.html b/doc/context/scripts/mkii/texmfstart.html
index 8d44012bf..ed149a0d0 100644
--- a/doc/context/scripts/mkii/texmfstart.html
+++ b/doc/context/scripts/mkii/texmfstart.html
@@ -40,6 +40,7 @@
<tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
<tr><th/><td/><td/></tr>
<tr><th>--script</th><td></td><td>run an mtx script (lua prefered method) (--noquotes), no script gives list</td></tr>
+ <tr><th>--evaluate</th><td></td><td>run code passed on the commandline (between quotes)</td></tr>
<tr><th>--execute</th><td></td><td>run a script or program (texmfstart method) (--noquotes)</td></tr>
<tr><th>--resolve</th><td></td><td>resolve prefixed arguments</td></tr>
<tr><th>--ctxlua</th><td></td><td>run internally (using preloaded libs)</td></tr>
@@ -62,6 +63,7 @@
<tr><th>--verbose</th><td></td><td>give a bit more info</td></tr>
<tr><th>--trackers</th><td>list</td><td>enable given trackers</td></tr>
<tr><th>--progname</th><td>str</td><td>format or backend</td></tr>
+ <tr><th>--systeminfo</th><td>str</td><td>show current operating system, processor, etc</td></tr>
<tr><th/><td/><td/></tr>
<tr><th>--edit</th><td></td><td>launch editor with found file</td></tr>
<tr><th>--launch</th><td></td><td>launch files like manuals, assumes os support (--all)</td></tr>
diff --git a/doc/context/scripts/mkii/texmfstart.man b/doc/context/scripts/mkii/texmfstart.man
index 77dc02dc5..8a3a75b64 100644
--- a/doc/context/scripts/mkii/texmfstart.man
+++ b/doc/context/scripts/mkii/texmfstart.man
@@ -1,4 +1,4 @@
-.TH "mtxrun" "1" "01-01-2014" "version 1.31" "ConTeXt TDS Runner Tool"
+.TH "mtxrun" "1" "01-01-2015" "version 1.31" "ConTeXt TDS Runner Tool"
.SH NAME
.B mtxrun
.SH SYNOPSIS
@@ -14,6 +14,9 @@
.B --script
run an mtx script (lua prefered method) (--noquotes), no script gives list
.TP
+.B --evaluate
+run code passed on the commandline (between quotes)
+.TP
.B --execute
run a script or program (texmfstart method) (--noquotes)
.TP
@@ -71,6 +74,9 @@ enable given trackers
.B --progname=str
format or backend
.TP
+.B --systeminfo=str
+show current operating system, processor, etc
+.TP
.B --edit
launch editor with found file
.TP
diff --git a/doc/context/scripts/mkii/texmfstart.xml b/doc/context/scripts/mkii/texmfstart.xml
index 249a9bb54..c7e719541 100644
--- a/doc/context/scripts/mkii/texmfstart.xml
+++ b/doc/context/scripts/mkii/texmfstart.xml
@@ -8,6 +8,7 @@
<category name="basic">
<subcategory>
<flag name="script"><short>run an mtx script (lua prefered method) (<ref name="noquotes"/>), no script gives list</short></flag>
+ <flag name="evaluate"><short>run code passed on the commandline (between quotes)</short></flag>
<flag name="execute"><short>run a script or program (texmfstart method) (<ref name="noquotes"/>)</short></flag>
<flag name="resolve"><short>resolve prefixed arguments</short></flag>
<flag name="ctxlua"><short>run internally (using preloaded libs)</short></flag>
@@ -33,6 +34,7 @@
<flag name="verbose"><short>give a bit more info</short></flag>
<flag name="trackers" value="list"><short>enable given trackers</short></flag>
<flag name="progname" value="str"><short>format or backend</short></flag>
+ <flag name="systeminfo" value="str"><short>show current operating system, processor, etc</short></flag>
</subcategory>
<subcategory>
<flag name="edit"><short>launch editor with found file</short></flag>
diff --git a/doc/context/scripts/mkii/textools.man b/doc/context/scripts/mkii/textools.man
index ce26c5172..c256969fd 100644
--- a/doc/context/scripts/mkii/textools.man
+++ b/doc/context/scripts/mkii/textools.man
@@ -1,4 +1,4 @@
-.TH "textools" "1" "01-01-2014" "version 1.3.1" "TeXTools"
+.TH "textools" "1" "01-01-2015" "version 1.3.1" "TeXTools"
.SH NAME
.B textools
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkii/texutil.man b/doc/context/scripts/mkii/texutil.man
index 0dab17baa..c92f51a6f 100644
--- a/doc/context/scripts/mkii/texutil.man
+++ b/doc/context/scripts/mkii/texutil.man
@@ -1,4 +1,4 @@
-.TH "texutil" "1" "01-01-2014" "version 9.1.0" "TeXUtil"
+.TH "texutil" "1" "01-01-2015" "version 9.1.0" "TeXUtil"
.SH NAME
.B texutil
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkii/tmftools.man b/doc/context/scripts/mkii/tmftools.man
index 980441719..780efb7eb 100644
--- a/doc/context/scripts/mkii/tmftools.man
+++ b/doc/context/scripts/mkii/tmftools.man
@@ -1,4 +1,4 @@
-.TH "tmftools" "1" "01-01-2014" "version 1.1.0" "TMFTools"
+.TH "tmftools" "1" "01-01-2015" "version 1.1.0" "TMFTools"
.SH NAME
.B tmftools
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkii/xmltools.man b/doc/context/scripts/mkii/xmltools.man
index 2e61a1431..d43cc1a03 100644
--- a/doc/context/scripts/mkii/xmltools.man
+++ b/doc/context/scripts/mkii/xmltools.man
@@ -1,4 +1,4 @@
-.TH "xmltools" "1" "01-01-2014" "version 1.2.2" "XMLTools"
+.TH "xmltools" "1" "01-01-2015" "version 1.2.2" "XMLTools"
.SH NAME
.B xmltools
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkiv/context.html b/doc/context/scripts/mkiv/context.html
index d285af311..b409de6e7 100644
--- a/doc/context/scripts/mkiv/context.html
+++ b/doc/context/scripts/mkiv/context.html
@@ -14,7 +14,7 @@
<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
<head>
- <title>ConTeXt Process Management 0.60</title>
+ <title>ConTeXt Process Management 0.61</title>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<style type="text/css">
body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
@@ -24,7 +24,7 @@
</head>
<body>
<div id="top"> <div id="top-one">
- <div id="top-two">ConTeXt Process Management 0.60 </div>
+ <div id="top-two">ConTeXt Process Management 0.61 </div>
</div>
</div>
<div id="bottom"> <div id="bottom-one">
@@ -75,6 +75,9 @@
<tr><th>--jiton</th><td></td><td>use luajittex with jit turned on (in most cases not faster, even slower)</td></tr>
<tr><th/><td/><td/></tr>
<tr><th>--once</th><td></td><td>only run once (no multipass data file is produced)</td></tr>
+ <tr><th>--runs</th><td></td><td>process at most this many times</td></tr>
+ <tr><th>--forcedruns</th><td></td><td>process this many times (permits for optimization trial runs)</td></tr>
+ <tr><th/><td/><td/></tr>
<tr><th>--batchmode</th><td></td><td>run without stopping and do not show messages on the console</td></tr>
<tr><th>--nonstopmode</th><td></td><td>run without stopping</td></tr>
<tr><th>--synctex</th><td></td><td>run with synctex enabled (optional value: zipped, unzipped, 1, -1)</td></tr>
@@ -92,6 +95,8 @@
<tr><th>--update</th><td></td><td>update context from website (not to be confused with contextgarden)</td></tr>
<tr><th>--profile</th><td></td><td>profile job (use: mtxrun --script profile --analyze)</td></tr>
<tr><th>--timing</th><td></td><td>generate timing and statistics overview</td></tr>
+ <tr><th>--keeptuc</th><td></td><td>keep previous tuc files (jobname-tuc-[run].tmp)</td></tr>
+ <tr><th>--keeplog</th><td></td><td>keep previous log files (jobname-log-[run].tmp)</td></tr>
<tr><th/><td/><td/></tr>
<tr><th>--extra=name</th><td></td><td>process extra (mtx-context-... in distribution)</td></tr>
<tr><th>--extras</th><td></td><td>show extras</td></tr>
@@ -102,6 +107,8 @@
<tr><th>--mkii</th><td></td><td>process file with texexec</td></tr>
<tr><th/><td/><td/></tr>
<tr><th>--pipe</th><td></td><td>do not check for file and enter scroll mode (--dummyfile=whatever.tmp)</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--sandbox</th><td></td><td>process file in a limited environment</td></tr>
</table>
<br/>
</div>
diff --git a/doc/context/scripts/mkiv/context.man b/doc/context/scripts/mkiv/context.man
index ea12beb06..0c2f607f2 100644
--- a/doc/context/scripts/mkiv/context.man
+++ b/doc/context/scripts/mkiv/context.man
@@ -1,4 +1,4 @@
-.TH "mtx-context" "1" "01-01-2014" "version 0.60" "ConTeXt Process Management"
+.TH "mtx-context" "1" "01-01-2015" "version 0.60" "ConTeXt Process Management"
.SH NAME
.B mtx-context
.SH SYNOPSIS
@@ -95,6 +95,12 @@ use luajittex with jit turned on (in most cases not faster, even slower)
.B --once
only run once (no multipass data file is produced)
.TP
+.B --runs
+process at most this many times
+.TP
+.B --forcedruns
+process this many times (permits for optimization trial runs)
+.TP
.B --batchmode
run without stopping and do not show messages on the console
.TP
@@ -135,6 +141,12 @@ profile job (use: mtxrun --script profile --analyze)
.B --timing
generate timing and statistics overview
.TP
+.B --keeptuc
+keep previous tuc files (jobname-tuc-[run].tmp)
+.TP
+.B --keeplog
+keep previous log files (jobname-log-[run].tmp)
+.TP
.B --extra=name
process extra (mtx-context-... in distribution)
.TP
@@ -153,6 +165,9 @@ process file with texexec
.TP
.B --pipe
do not check for file and enter scroll mode (--dummyfile=whatever.tmp)
+.TP
+.B --sandbox
+process file in a limited environment
.SH AUTHOR
More information about ConTeXt and the tools that come with it can be found at:
diff --git a/doc/context/scripts/mkiv/context.xml b/doc/context/scripts/mkiv/context.xml
index a3812288f..c41093289 100644
--- a/doc/context/scripts/mkiv/context.xml
+++ b/doc/context/scripts/mkiv/context.xml
@@ -108,6 +108,14 @@
<flag name="once">
<short>only run once (no multipass data file is produced)</short>
</flag>
+ <flag name="runs">
+ <short>process at most this many times</short>
+ </flag>
+ <flag name="forcedruns">
+ <short>process this many times (permits for optimization trial runs)</short>
+ </flag>
+ </subcategory>
+ <subcategory>
<flag name="batchmode">
<short>run without stopping and do not show messages on the console</short>
</flag>
@@ -117,7 +125,7 @@
<flag name="synctex">
<short>run with synctex enabled (optional value: zipped, unzipped, 1, -1)</short>
</flag>
- </subcategory>
+ </subcategory>
<subcategory>
<flag name="generate">
<short>generate file database etc. (as luatools does)</short>
@@ -134,7 +142,7 @@
<short>assume given file present elsewhere</short>
</flag>
<flag name="nofile">
- <short>use dummy file as jobname</short>
+ <short>use dummy file as jobname</short>
</flag>
</subcategory>
</category>
@@ -144,16 +152,22 @@
<short>update context version number (also provide <ref name="expert"/>, optionally provide <ref name="basepath"/>)</short>
</flag>
<flag name="nostatistics">
- <short>omit runtime statistics at the end of the run</short>
+ <short>omit runtime statistics at the end of the run</short>
</flag>
<flag name="update">
- <short>update context from website (not to be confused with contextgarden)</short>
+ <short>update context from website (not to be confused with contextgarden)</short>
</flag>
- <flag name="profile">
- <short>profile job (use: mtxrun <ref name="script"/> profile <ref name="analyze"/>)</short>
+ <flag name="profile">
+ <short>profile job (use: mtxrun <ref name="script"/> profile <ref name="analyze"/>)</short>
</flag>
- <flag name="timing">
- <short>generate timing and statistics overview</short>
+ <flag name="timing">
+ <short>generate timing and statistics overview</short>
+ </flag>
+ <flag name="keeptuc">
+ <short>keep previous tuc files (jobname-tuc-[run].tmp)</short>
+ </flag>
+ <flag name="keeplog">
+ <short>keep previous log files (jobname-log-[run].tmp)</short>
</flag>
</subcategory>
<subcategory>
@@ -182,6 +196,11 @@
<short>do not check for file and enter scroll mode (<ref name="dummyfile"/>=whatever.tmp)</short>
</flag>
</subcategory>
+ <subcategory>
+ <flag name="sandbox">
+ <short>process file in a limited environment</short>
+ </flag>
+ </subcategory>
</category>
</flags>
</application>
diff --git a/doc/context/scripts/mkiv/luatools.man b/doc/context/scripts/mkiv/luatools.man
index 72dea5800..ecdd6aa32 100644
--- a/doc/context/scripts/mkiv/luatools.man
+++ b/doc/context/scripts/mkiv/luatools.man
@@ -1,4 +1,4 @@
-.TH "luatools" "1" "01-01-2014" "version 1.35" "ConTeXt TDS Management Tool (aka luatools)"
+.TH "luatools" "1" "01-01-2015" "version 1.35" "ConTeXt TDS Management Tool (aka luatools)"
.SH NAME
.B luatools
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkiv/mtx-babel.man b/doc/context/scripts/mkiv/mtx-babel.man
index 6f78cf43d..8477eb522 100644
--- a/doc/context/scripts/mkiv/mtx-babel.man
+++ b/doc/context/scripts/mkiv/mtx-babel.man
@@ -1,4 +1,4 @@
-.TH "mtx-babel" "1" "01-01-2014" "version 1.20" "Babel Input To UTF Conversion"
+.TH "mtx-babel" "1" "01-01-2015" "version 1.20" "Babel Input To UTF Conversion"
.SH NAME
.B mtx-babel
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkiv/mtx-base.man b/doc/context/scripts/mkiv/mtx-base.man
index 84d67a3fe..08e13b09d 100644
--- a/doc/context/scripts/mkiv/mtx-base.man
+++ b/doc/context/scripts/mkiv/mtx-base.man
@@ -1,4 +1,4 @@
-.TH "mtx-base" "1" "01-01-2014" "version 1.35" "ConTeXt TDS Management Tool (aka luatools)"
+.TH "mtx-base" "1" "01-01-2015" "version 1.35" "ConTeXt TDS Management Tool (aka luatools)"
.SH NAME
.B mtx-base
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkiv/mtx-cache.man b/doc/context/scripts/mkiv/mtx-cache.man
index 02f8a2ec4..cc448a5c8 100644
--- a/doc/context/scripts/mkiv/mtx-cache.man
+++ b/doc/context/scripts/mkiv/mtx-cache.man
@@ -1,4 +1,4 @@
-.TH "mtx-cache" "1" "01-01-2014" "version 0.10" "ConTeXt &error; MetaTeX Cache Management"
+.TH "mtx-cache" "1" "01-01-2015" "version 0.10" "ConTeXt & MetaTeX Cache Management"
.SH NAME
.B mtx-cache
.SH SYNOPSIS
@@ -8,7 +8,7 @@
.I FILENAMES
.B ]
.SH DESCRIPTION
-.B ConTeXt &error; MetaTeX Cache Management
+.B ConTeXt & MetaTeX Cache Management
.SH OPTIONS
.TP
.B --purge
diff --git a/doc/context/scripts/mkiv/mtx-chars.man b/doc/context/scripts/mkiv/mtx-chars.man
index 749f6315e..5c9d79bb3 100644
--- a/doc/context/scripts/mkiv/mtx-chars.man
+++ b/doc/context/scripts/mkiv/mtx-chars.man
@@ -1,4 +1,4 @@
-.TH "mtx-chars" "1" "01-01-2014" "version 0.10" "MkII Character Table Generators"
+.TH "mtx-chars" "1" "01-01-2015" "version 0.10" "MkII Character Table Generators"
.SH NAME
.B mtx-chars
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkiv/mtx-check.html b/doc/context/scripts/mkiv/mtx-check.html
index bf1f7da13..0ebbe750f 100644
--- a/doc/context/scripts/mkiv/mtx-check.html
+++ b/doc/context/scripts/mkiv/mtx-check.html
@@ -39,7 +39,7 @@
<table>
<tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
<tr><th/><td/><td/></tr>
- <tr><th>--convert</th><td></td><td>check tex file for errors</td></tr>
+ <tr><th>--check</th><td></td><td>check tex file for errors</td></tr>
</table>
<br/>
</div>
diff --git a/doc/context/scripts/mkiv/mtx-check.man b/doc/context/scripts/mkiv/mtx-check.man
index 3893081cb..ca18bc1a6 100644
--- a/doc/context/scripts/mkiv/mtx-check.man
+++ b/doc/context/scripts/mkiv/mtx-check.man
@@ -1,4 +1,4 @@
-.TH "mtx-check" "1" "01-01-2014" "version 0.10" "Basic ConTeXt Syntax Checking"
+.TH "mtx-check" "1" "01-01-2015" "version 0.10" "Basic ConTeXt Syntax Checking"
.SH NAME
.B mtx-check
.SH SYNOPSIS
@@ -11,7 +11,7 @@
.B Basic ConTeXt Syntax Checking
.SH OPTIONS
.TP
-.B --convert
+.B --check
check tex file for errors
.SH AUTHOR
More information about ConTeXt and the tools that come with it can be found at:
diff --git a/doc/context/scripts/mkiv/mtx-check.xml b/doc/context/scripts/mkiv/mtx-check.xml
index a8dcb82fd..b1c516313 100644
--- a/doc/context/scripts/mkiv/mtx-check.xml
+++ b/doc/context/scripts/mkiv/mtx-check.xml
@@ -8,7 +8,7 @@
<flags>
<category name="basic">
<subcategory>
- <flag name="convert"><short>check tex file for errors</short></flag>
+ <flag name="check"><short>check tex file for errors</short></flag>
</subcategory>
</category>
</flags>
diff --git a/doc/context/scripts/mkiv/mtx-colors.man b/doc/context/scripts/mkiv/mtx-colors.man
index 68959ae74..a969e474c 100644
--- a/doc/context/scripts/mkiv/mtx-colors.man
+++ b/doc/context/scripts/mkiv/mtx-colors.man
@@ -1,4 +1,4 @@
-.TH "mtx-colors" "1" "01-01-2014" "version 0.10" "ConTeXt Color Management"
+.TH "mtx-colors" "1" "01-01-2015" "version 0.10" "ConTeXt Color Management"
.SH NAME
.B mtx-colors
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkiv/mtx-context.html b/doc/context/scripts/mkiv/mtx-context.html
index d285af311..b409de6e7 100644
--- a/doc/context/scripts/mkiv/mtx-context.html
+++ b/doc/context/scripts/mkiv/mtx-context.html
@@ -14,7 +14,7 @@
<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
<head>
- <title>ConTeXt Process Management 0.60</title>
+ <title>ConTeXt Process Management 0.61</title>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<style type="text/css">
body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
@@ -24,7 +24,7 @@
</head>
<body>
<div id="top"> <div id="top-one">
- <div id="top-two">ConTeXt Process Management 0.60 </div>
+ <div id="top-two">ConTeXt Process Management 0.61 </div>
</div>
</div>
<div id="bottom"> <div id="bottom-one">
@@ -75,6 +75,9 @@
<tr><th>--jiton</th><td></td><td>use luajittex with jit turned on (in most cases not faster, even slower)</td></tr>
<tr><th/><td/><td/></tr>
<tr><th>--once</th><td></td><td>only run once (no multipass data file is produced)</td></tr>
+ <tr><th>--runs</th><td></td><td>process at most this many times</td></tr>
+ <tr><th>--forcedruns</th><td></td><td>process this many times (permits for optimization trial runs)</td></tr>
+ <tr><th/><td/><td/></tr>
<tr><th>--batchmode</th><td></td><td>run without stopping and do not show messages on the console</td></tr>
<tr><th>--nonstopmode</th><td></td><td>run without stopping</td></tr>
<tr><th>--synctex</th><td></td><td>run with synctex enabled (optional value: zipped, unzipped, 1, -1)</td></tr>
@@ -92,6 +95,8 @@
<tr><th>--update</th><td></td><td>update context from website (not to be confused with contextgarden)</td></tr>
<tr><th>--profile</th><td></td><td>profile job (use: mtxrun --script profile --analyze)</td></tr>
<tr><th>--timing</th><td></td><td>generate timing and statistics overview</td></tr>
+ <tr><th>--keeptuc</th><td></td><td>keep previous tuc files (jobname-tuc-[run].tmp)</td></tr>
+ <tr><th>--keeplog</th><td></td><td>keep previous log files (jobname-log-[run].tmp)</td></tr>
<tr><th/><td/><td/></tr>
<tr><th>--extra=name</th><td></td><td>process extra (mtx-context-... in distribution)</td></tr>
<tr><th>--extras</th><td></td><td>show extras</td></tr>
@@ -102,6 +107,8 @@
<tr><th>--mkii</th><td></td><td>process file with texexec</td></tr>
<tr><th/><td/><td/></tr>
<tr><th>--pipe</th><td></td><td>do not check for file and enter scroll mode (--dummyfile=whatever.tmp)</td></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--sandbox</th><td></td><td>process file in a limited environment</td></tr>
</table>
<br/>
</div>
diff --git a/doc/context/scripts/mkiv/mtx-context.man b/doc/context/scripts/mkiv/mtx-context.man
index ea12beb06..0c2f607f2 100644
--- a/doc/context/scripts/mkiv/mtx-context.man
+++ b/doc/context/scripts/mkiv/mtx-context.man
@@ -1,4 +1,4 @@
-.TH "mtx-context" "1" "01-01-2014" "version 0.60" "ConTeXt Process Management"
+.TH "mtx-context" "1" "01-01-2015" "version 0.60" "ConTeXt Process Management"
.SH NAME
.B mtx-context
.SH SYNOPSIS
@@ -95,6 +95,12 @@ use luajittex with jit turned on (in most cases not faster, even slower)
.B --once
only run once (no multipass data file is produced)
.TP
+.B --runs
+process at most this many times
+.TP
+.B --forcedruns
+process this many times (permits for optimization trial runs)
+.TP
.B --batchmode
run without stopping and do not show messages on the console
.TP
@@ -135,6 +141,12 @@ profile job (use: mtxrun --script profile --analyze)
.B --timing
generate timing and statistics overview
.TP
+.B --keeptuc
+keep previous tuc files (jobname-tuc-[run].tmp)
+.TP
+.B --keeplog
+keep previous log files (jobname-log-[run].tmp)
+.TP
.B --extra=name
process extra (mtx-context-... in distribution)
.TP
@@ -153,6 +165,9 @@ process file with texexec
.TP
.B --pipe
do not check for file and enter scroll mode (--dummyfile=whatever.tmp)
+.TP
+.B --sandbox
+process file in a limited environment
.SH AUTHOR
More information about ConTeXt and the tools that come with it can be found at:
diff --git a/doc/context/scripts/mkiv/mtx-context.xml b/doc/context/scripts/mkiv/mtx-context.xml
index a3812288f..c41093289 100644
--- a/doc/context/scripts/mkiv/mtx-context.xml
+++ b/doc/context/scripts/mkiv/mtx-context.xml
@@ -108,6 +108,14 @@
<flag name="once">
<short>only run once (no multipass data file is produced)</short>
</flag>
+ <flag name="runs">
+ <short>process at most this many times</short>
+ </flag>
+ <flag name="forcedruns">
+ <short>process this many times (permits for optimization trial runs)</short>
+ </flag>
+ </subcategory>
+ <subcategory>
<flag name="batchmode">
<short>run without stopping and do not show messages on the console</short>
</flag>
@@ -117,7 +125,7 @@
<flag name="synctex">
<short>run with synctex enabled (optional value: zipped, unzipped, 1, -1)</short>
</flag>
- </subcategory>
+ </subcategory>
<subcategory>
<flag name="generate">
<short>generate file database etc. (as luatools does)</short>
@@ -134,7 +142,7 @@
<short>assume given file present elsewhere</short>
</flag>
<flag name="nofile">
- <short>use dummy file as jobname</short>
+ <short>use dummy file as jobname</short>
</flag>
</subcategory>
</category>
@@ -144,16 +152,22 @@
<short>update context version number (also provide <ref name="expert"/>, optionally provide <ref name="basepath"/>)</short>
</flag>
<flag name="nostatistics">
- <short>omit runtime statistics at the end of the run</short>
+ <short>omit runtime statistics at the end of the run</short>
</flag>
<flag name="update">
- <short>update context from website (not to be confused with contextgarden)</short>
+ <short>update context from website (not to be confused with contextgarden)</short>
</flag>
- <flag name="profile">
- <short>profile job (use: mtxrun <ref name="script"/> profile <ref name="analyze"/>)</short>
+ <flag name="profile">
+ <short>profile job (use: mtxrun <ref name="script"/> profile <ref name="analyze"/>)</short>
</flag>
- <flag name="timing">
- <short>generate timing and statistics overview</short>
+ <flag name="timing">
+ <short>generate timing and statistics overview</short>
+ </flag>
+ <flag name="keeptuc">
+ <short>keep previous tuc files (jobname-tuc-[run].tmp)</short>
+ </flag>
+ <flag name="keeplog">
+ <short>keep previous log files (jobname-log-[run].tmp)</short>
</flag>
</subcategory>
<subcategory>
@@ -182,6 +196,11 @@
<short>do not check for file and enter scroll mode (<ref name="dummyfile"/>=whatever.tmp)</short>
</flag>
</subcategory>
+ <subcategory>
+ <flag name="sandbox">
+ <short>process file in a limited environment</short>
+ </flag>
+ </subcategory>
</category>
</flags>
</application>
diff --git a/doc/context/scripts/mkiv/mtx-epub.html b/doc/context/scripts/mkiv/mtx-epub.html
index 45015a34e..9b63234be 100644
--- a/doc/context/scripts/mkiv/mtx-epub.html
+++ b/doc/context/scripts/mkiv/mtx-epub.html
@@ -14,7 +14,7 @@
<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
<head>
- <title>ConTeXt EPUB Helpers 0.12</title>
+ <title>ConTeXt EPUB Helpers 1.10</title>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<style type="text/css">
body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
@@ -24,7 +24,7 @@
</head>
<body>
<div id="top"> <div id="top-one">
- <div id="top-two">ConTeXt EPUB Helpers 0.12 </div>
+ <div id="top-two">ConTeXt EPUB Helpers 1.10 </div>
</div>
</div>
<div id="bottom"> <div id="bottom-one">
@@ -40,6 +40,11 @@
<tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
<tr><th/><td/><td/></tr>
<tr><th>--make</th><td></td><td>create epub zip file</td></tr>
+ <tr><th>--purge</th><td></td><td>remove obsolete files</td></tr>
+ <tr><th>--rename</th><td></td><td>rename images to sane names</td></tr>
+ <tr><th>--svgmath</th><td></td><td>convert mathml to svg</td></tr>
+ <tr><th>--svgstyle</th><td></td><td>use given tex style for svg generation (overloads style in specification)</td></tr>
+ <tr><th>--all</th><td></td><td>assume: --purge --rename --svgmath (for fast testing)</td></tr>
</table>
<br/>
<h1>Example</h1>
diff --git a/doc/context/scripts/mkiv/mtx-epub.man b/doc/context/scripts/mkiv/mtx-epub.man
index 52bcd1d88..bb33c23df 100644
--- a/doc/context/scripts/mkiv/mtx-epub.man
+++ b/doc/context/scripts/mkiv/mtx-epub.man
@@ -1,4 +1,4 @@
-.TH "mtx-epub" "1" "01-01-2014" "version 0.12" "ConTeXt EPUB Helpers"
+.TH "mtx-epub" "1" "01-01-2015" "version 1.10" "ConTeXt EPUB Helpers"
.SH NAME
.B mtx-epub
.SH SYNOPSIS
@@ -13,6 +13,21 @@
.TP
.B --make
create epub zip file
+.TP
+.B --purge
+remove obsolete files
+.TP
+.B --rename
+rename images to sane names
+.TP
+.B --svgmath
+convert mathml to svg
+.TP
+.B --svgstyle
+use given tex style for svg generation (overloads style in specification)
+.TP
+.B --all
+assume: --purge --rename --svgmath (for fast testing)
.SH AUTHOR
More information about ConTeXt and the tools that come with it can be found at:
diff --git a/doc/context/scripts/mkiv/mtx-epub.xml b/doc/context/scripts/mkiv/mtx-epub.xml
index 5ef5dc81b..4ef0f85e3 100644
--- a/doc/context/scripts/mkiv/mtx-epub.xml
+++ b/doc/context/scripts/mkiv/mtx-epub.xml
@@ -3,12 +3,17 @@
<metadata>
<entry name="name">mtx-epub</entry>
<entry name="detail">ConTeXt EPUB Helpers</entry>
- <entry name="version">0.12</entry>
+ <entry name="version">1.10</entry>
</metadata>
<flags>
<category name="basic">
<subcategory>
<flag name="make"><short>create epub zip file</short></flag>
+ <flag name="purge"><short>remove obsolete files</short></flag>
+ <flag name="rename"><short>rename images to sane names</short></flag>
+ <flag name="svgmath"><short>convert mathml to svg</short></flag>
+ <flag name="svgstyle"><short>use given tex style for svg generation (overloads style in specification)</short></flag>
+ <flag name="all"><short>assume: --purge --rename --svgmath (for fast testing)</short></flag>
</subcategory>
</category>
</flags>
diff --git a/doc/context/scripts/mkiv/mtx-fcd.man b/doc/context/scripts/mkiv/mtx-fcd.man
index 41d5120e0..311f90e4a 100644
--- a/doc/context/scripts/mkiv/mtx-fcd.man
+++ b/doc/context/scripts/mkiv/mtx-fcd.man
@@ -1,4 +1,4 @@
-.TH "mtx-fcd" "1" "01-01-2014" "version 1.00" "Fast Directory Change"
+.TH "mtx-fcd" "1" "01-01-2015" "version 1.00" "Fast Directory Change"
.SH NAME
.B mtx-fcd
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkiv/mtx-flac.man b/doc/context/scripts/mkiv/mtx-flac.man
index 0d2c94308..4ab875899 100644
--- a/doc/context/scripts/mkiv/mtx-flac.man
+++ b/doc/context/scripts/mkiv/mtx-flac.man
@@ -1,4 +1,4 @@
-.TH "mtx-flac" "1" "01-01-2014" "version 0.10" "ConTeXt Flac Helpers"
+.TH "mtx-flac" "1" "01-01-2015" "version 0.10" "ConTeXt Flac Helpers"
.SH NAME
.B mtx-flac
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkiv/mtx-fonts.html b/doc/context/scripts/mkiv/mtx-fonts.html
index 3ddc2fa8e..97a16c79a 100644
--- a/doc/context/scripts/mkiv/mtx-fonts.html
+++ b/doc/context/scripts/mkiv/mtx-fonts.html
@@ -53,7 +53,7 @@
<tr><th>--filter</th><td>list</td><td>key-value pairs</td></tr>
<tr><th>--all</th><td></td><td>show all found instances (combined with other flags)</td></tr>
<tr><th>--info</th><td></td><td>give more details</td></tr>
- <tr><th>--track</th><td>list</td><td>enable trackers</td></tr>
+ <tr><th>--trackers</th><td>list</td><td>enable trackers</td></tr>
<tr><th>--statistics</th><td></td><td>some info about the database</td></tr>
</table>
<br/>
diff --git a/doc/context/scripts/mkiv/mtx-fonts.man b/doc/context/scripts/mkiv/mtx-fonts.man
index 9136e30fe..c0806a32c 100644
--- a/doc/context/scripts/mkiv/mtx-fonts.man
+++ b/doc/context/scripts/mkiv/mtx-fonts.man
@@ -1,4 +1,4 @@
-.TH "mtx-fonts" "1" "01-01-2014" "version 0.21" "ConTeXt Font Database Management"
+.TH "mtx-fonts" "1" "01-01-2015" "version 0.21" "ConTeXt Font Database Management"
.SH NAME
.B mtx-fonts
.SH SYNOPSIS
@@ -44,7 +44,7 @@ show all found instances (combined with other flags)
.B --info
give more details
.TP
-.B --track=list
+.B --trackers=list
enable trackers
.TP
.B --statistics
diff --git a/doc/context/scripts/mkiv/mtx-fonts.xml b/doc/context/scripts/mkiv/mtx-fonts.xml
index b138a064a..c3c571ca8 100644
--- a/doc/context/scripts/mkiv/mtx-fonts.xml
+++ b/doc/context/scripts/mkiv/mtx-fonts.xml
@@ -25,7 +25,7 @@
<flag name="filter" value="list"><short>key-value pairs</short></flag>
<flag name="all"><short>show all found instances (combined with other flags)</short></flag>
<flag name="info"><short>give more details</short></flag>
- <flag name="track" value="list"><short>enable trackers</short></flag>
+ <flag name="trackers" value="list"><short>enable trackers</short></flag>
<flag name="statistics"><short>some info about the database</short></flag>
</subcategory>
</category>
diff --git a/doc/context/scripts/mkiv/mtx-grep.man b/doc/context/scripts/mkiv/mtx-grep.man
index 4bc7ea62e..6b85f541f 100644
--- a/doc/context/scripts/mkiv/mtx-grep.man
+++ b/doc/context/scripts/mkiv/mtx-grep.man
@@ -1,4 +1,4 @@
-.TH "mtx-grep" "1" "01-01-2014" "version 0.10" "Simple Grepper"
+.TH "mtx-grep" "1" "01-01-2015" "version 0.10" "Simple Grepper"
.SH NAME
.B mtx-grep
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkiv/mtx-interface.man b/doc/context/scripts/mkiv/mtx-interface.man
index 862fa08bc..36bf8bc13 100644
--- a/doc/context/scripts/mkiv/mtx-interface.man
+++ b/doc/context/scripts/mkiv/mtx-interface.man
@@ -1,4 +1,4 @@
-.TH "mtx-interface" "1" "01-01-2014" "version 0.13" "ConTeXt Interface Related Goodies"
+.TH "mtx-interface" "1" "01-01-2015" "version 0.13" "ConTeXt Interface Related Goodies"
.SH NAME
.B mtx-interface
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkiv/mtx-metapost.man b/doc/context/scripts/mkiv/mtx-metapost.man
index 0c60e33e2..37b1b7c9f 100644
--- a/doc/context/scripts/mkiv/mtx-metapost.man
+++ b/doc/context/scripts/mkiv/mtx-metapost.man
@@ -1,4 +1,4 @@
-.TH "mtx-metapost" "1" "01-01-2014" "version 0.10" "MetaPost to PDF processor"
+.TH "mtx-metapost" "1" "01-01-2015" "version 0.10" "MetaPost to PDF processor"
.SH NAME
.B mtx-metapost
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkiv/mtx-metatex.man b/doc/context/scripts/mkiv/mtx-metatex.man
index a85268645..72635044f 100644
--- a/doc/context/scripts/mkiv/mtx-metatex.man
+++ b/doc/context/scripts/mkiv/mtx-metatex.man
@@ -1,4 +1,4 @@
-.TH "mtx-metatex" "1" "01-01-2014" "version 0.10" "MetaTeX Process Management"
+.TH "mtx-metatex" "1" "01-01-2015" "version 0.10" "MetaTeX Process Management"
.SH NAME
.B mtx-metatex
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkiv/mtx-modules.man b/doc/context/scripts/mkiv/mtx-modules.man
index ebdc51c9e..a51a1b00e 100644
--- a/doc/context/scripts/mkiv/mtx-modules.man
+++ b/doc/context/scripts/mkiv/mtx-modules.man
@@ -1,4 +1,4 @@
-.TH "mtx-modules" "1" "01-01-2014" "version 1.00" "ConTeXt Module Documentation Generators"
+.TH "mtx-modules" "1" "01-01-2015" "version 1.00" "ConTeXt Module Documentation Generators"
.SH NAME
.B mtx-modules
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkiv/mtx-package.man b/doc/context/scripts/mkiv/mtx-package.man
index 6df6cf0ca..83c140f35 100644
--- a/doc/context/scripts/mkiv/mtx-package.man
+++ b/doc/context/scripts/mkiv/mtx-package.man
@@ -1,4 +1,4 @@
-.TH "mtx-package" "1" "01-01-2014" "version 0.10" "Distribution Related Goodies"
+.TH "mtx-package" "1" "01-01-2015" "version 0.10" "Distribution Related Goodies"
.SH NAME
.B mtx-package
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkiv/mtx-patterns.html b/doc/context/scripts/mkiv/mtx-patterns.html
index 07d5265c8..8aa0d8132 100644
--- a/doc/context/scripts/mkiv/mtx-patterns.html
+++ b/doc/context/scripts/mkiv/mtx-patterns.html
@@ -46,6 +46,7 @@
<tr><th>--specification</th><td></td><td>additional patterns: e.g.: =cy,hyph-cy,welsh</td></tr>
<tr><th>--compress</th><td></td><td>compress data</td></tr>
<tr><th>--words</th><td></td><td>update words in given file</td></tr>
+ <tr><th>--hyphenate</th><td></td><td>show hypephenated words</td></tr>
</table>
<br/>
<h1>Examples</h1>
@@ -53,6 +54,7 @@
<br/><tt>mtxrun --script pattern --check --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns</tt>
<br/><tt>mtxrun --script pattern --convert --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns/tex --destination=e:/tmp/patterns</tt>
<br/><tt>mtxrun --script pattern --convert --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns/txt --destination=e:/tmp/patterns</tt>
+<br/><tt>mtxrun --script pattern --hyphenate --language=nl --left=3 nogalwiedes inderdaad</tt>
<br/><br/> </div>
</div>
</body>
diff --git a/doc/context/scripts/mkiv/mtx-patterns.man b/doc/context/scripts/mkiv/mtx-patterns.man
index 3d6bf4d3c..e93c1aff0 100644
--- a/doc/context/scripts/mkiv/mtx-patterns.man
+++ b/doc/context/scripts/mkiv/mtx-patterns.man
@@ -1,4 +1,4 @@
-.TH "mtx-patterns" "1" "01-01-2014" "version 0.20" "ConTeXt Pattern File Management"
+.TH "mtx-patterns" "1" "01-01-2015" "version 0.20" "ConTeXt Pattern File Management"
.SH NAME
.B mtx-patterns
.SH SYNOPSIS
@@ -31,6 +31,9 @@ compress data
.TP
.B --words
update words in given file
+.TP
+.B --hyphenate
+show hypephenated words
.SH AUTHOR
More information about ConTeXt and the tools that come with it can be found at:
diff --git a/doc/context/scripts/mkiv/mtx-patterns.xml b/doc/context/scripts/mkiv/mtx-patterns.xml
index 132fd4d20..86f3aa480 100644
--- a/doc/context/scripts/mkiv/mtx-patterns.xml
+++ b/doc/context/scripts/mkiv/mtx-patterns.xml
@@ -15,6 +15,7 @@
<flag name="specification"><short>additional patterns: e.g.: =cy,hyph-cy,welsh</short></flag>
<flag name="compress"><short>compress data</short></flag>
<flag name="words"><short>update words in given file</short></flag>
+ <flag name="hyphenate"><short>show hypephenated words</short></flag>
</subcategory>
</category>
</flags>
@@ -26,6 +27,7 @@
<example><command>mtxrun --script pattern --check --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns</command></example>
<example><command>mtxrun --script pattern --convert --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns/tex --destination=e:/tmp/patterns</command></example>
<example><command>mtxrun --script pattern --convert --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns/txt --destination=e:/tmp/patterns</command></example>
+ <example><command>mtxrun --script pattern --hyphenate --language=nl --left=3 nogalwiedes inderdaad</command></example>
</subcategory>
</category>
</examples>
diff --git a/doc/context/scripts/mkiv/mtx-pdf.man b/doc/context/scripts/mkiv/mtx-pdf.man
index 979148462..29b5ad420 100644
--- a/doc/context/scripts/mkiv/mtx-pdf.man
+++ b/doc/context/scripts/mkiv/mtx-pdf.man
@@ -1,4 +1,4 @@
-.TH "mtx-pdf" "1" "01-01-2014" "version 0.10" "ConTeXt PDF Helpers"
+.TH "mtx-pdf" "1" "01-01-2015" "version 0.10" "ConTeXt PDF Helpers"
.SH NAME
.B mtx-pdf
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkiv/mtx-plain.man b/doc/context/scripts/mkiv/mtx-plain.man
index 717c9b6e1..c2e0e0bd9 100644
--- a/doc/context/scripts/mkiv/mtx-plain.man
+++ b/doc/context/scripts/mkiv/mtx-plain.man
@@ -1,4 +1,4 @@
-.TH "mtx-plain" "1" "01-01-2014" "version 1.00" "Plain TeX Runner"
+.TH "mtx-plain" "1" "01-01-2015" "version 1.00" "Plain TeX Runner"
.SH NAME
.B mtx-plain
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkiv/mtx-profile.man b/doc/context/scripts/mkiv/mtx-profile.man
index 9b4671f13..6aa8e2087 100644
--- a/doc/context/scripts/mkiv/mtx-profile.man
+++ b/doc/context/scripts/mkiv/mtx-profile.man
@@ -1,4 +1,4 @@
-.TH "mtx-profile" "1" "01-01-2014" "version 1.00" "ConTeXt MkIV LuaTeX Profiler"
+.TH "mtx-profile" "1" "01-01-2015" "version 1.00" "ConTeXt MkIV LuaTeX Profiler"
.SH NAME
.B mtx-profile
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkiv/mtx-rsync.man b/doc/context/scripts/mkiv/mtx-rsync.man
index 5119eae29..195b6b0f0 100644
--- a/doc/context/scripts/mkiv/mtx-rsync.man
+++ b/doc/context/scripts/mkiv/mtx-rsync.man
@@ -1,4 +1,4 @@
-.TH "mtx-rsync" "1" "01-01-2014" "version 0.10" "Rsync Helpers"
+.TH "mtx-rsync" "1" "01-01-2015" "version 0.10" "Rsync Helpers"
.SH NAME
.B mtx-rsync
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkiv/mtx-scite.html b/doc/context/scripts/mkiv/mtx-scite.html
index c4dd157e0..24229db73 100644
--- a/doc/context/scripts/mkiv/mtx-scite.html
+++ b/doc/context/scripts/mkiv/mtx-scite.html
@@ -40,6 +40,8 @@
<tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
<tr><th/><td/><td/></tr>
<tr><th>--words</th><td></td><td>convert spell-*.txt into spell-*.lua</td></tr>
+ <tr><th>--tree</th><td></td><td>converts a tree into an html tree (--source --target --numbers)</td></tr>
+ <tr><th>--file</th><td></td><td>converts a file into an html tree (--source --target --numbers --lexer)</td></tr>
</table>
<br/>
</div>
diff --git a/doc/context/scripts/mkiv/mtx-scite.man b/doc/context/scripts/mkiv/mtx-scite.man
index ece69a9a6..cbaed08d1 100644
--- a/doc/context/scripts/mkiv/mtx-scite.man
+++ b/doc/context/scripts/mkiv/mtx-scite.man
@@ -1,4 +1,4 @@
-.TH "mtx-scite" "1" "01-01-2014" "version 1.00" "Scite Helper Script"
+.TH "mtx-scite" "1" "01-01-2015" "version 1.00" "Scite Helper Script"
.SH NAME
.B mtx-scite
.SH SYNOPSIS
@@ -13,6 +13,12 @@
.TP
.B --words
convert spell-*.txt into spell-*.lua
+.TP
+.B --tree
+converts a tree into an html tree (--source --target --numbers)
+.TP
+.B --file
+converts a file into an html tree (--source --target --numbers --lexer)
.SH AUTHOR
More information about ConTeXt and the tools that come with it can be found at:
diff --git a/doc/context/scripts/mkiv/mtx-scite.xml b/doc/context/scripts/mkiv/mtx-scite.xml
index 87fe506dc..65ad8736a 100644
--- a/doc/context/scripts/mkiv/mtx-scite.xml
+++ b/doc/context/scripts/mkiv/mtx-scite.xml
@@ -9,6 +9,8 @@
<category name="basic">
<subcategory>
<flag name="words"><short>convert spell-*.txt into spell-*.lua</short></flag>
+ <flag name="tree"><short>converts a tree into an html tree (--source --target --numbers)</short></flag>
+ <flag name="file"><short>converts a file into an html tree (--source --target --numbers --lexer)</short></flag>
</subcategory>
</category>
</flags>
diff --git a/doc/context/scripts/mkiv/mtx-server.man b/doc/context/scripts/mkiv/mtx-server.man
index a43fd449c..f3d3ae6e4 100644
--- a/doc/context/scripts/mkiv/mtx-server.man
+++ b/doc/context/scripts/mkiv/mtx-server.man
@@ -1,4 +1,4 @@
-.TH "mtx-server" "1" "01-01-2014" "version 0.10" "Simple Webserver For Helpers"
+.TH "mtx-server" "1" "01-01-2015" "version 0.10" "Simple Webserver For Helpers"
.SH NAME
.B mtx-server
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkiv/mtx-texworks.man b/doc/context/scripts/mkiv/mtx-texworks.man
index 21dd62fe4..b9cc2ed98 100644
--- a/doc/context/scripts/mkiv/mtx-texworks.man
+++ b/doc/context/scripts/mkiv/mtx-texworks.man
@@ -1,4 +1,4 @@
-.TH "mtx-texworks" "1" "01-01-2014" "version 1.00" "TeXworks Startup Script"
+.TH "mtx-texworks" "1" "01-01-2015" "version 1.00" "TeXworks Startup Script"
.SH NAME
.B mtx-texworks
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkiv/mtx-timing.man b/doc/context/scripts/mkiv/mtx-timing.man
index 19aaca173..2f7584f3d 100644
--- a/doc/context/scripts/mkiv/mtx-timing.man
+++ b/doc/context/scripts/mkiv/mtx-timing.man
@@ -1,4 +1,4 @@
-.TH "mtx-timing" "1" "01-01-2014" "version 0.10" "ConTeXt Timing Tools"
+.TH "mtx-timing" "1" "01-01-2015" "version 0.10" "ConTeXt Timing Tools"
.SH NAME
.B mtx-timing
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkiv/mtx-tools.man b/doc/context/scripts/mkiv/mtx-tools.man
index 45011d21d..408d467cd 100644
--- a/doc/context/scripts/mkiv/mtx-tools.man
+++ b/doc/context/scripts/mkiv/mtx-tools.man
@@ -1,4 +1,4 @@
-.TH "mtx-tools" "1" "01-01-2014" "version 1.01" "Some File Related Goodies"
+.TH "mtx-tools" "1" "01-01-2015" "version 1.01" "Some File Related Goodies"
.SH NAME
.B mtx-tools
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkiv/mtx-unzip.man b/doc/context/scripts/mkiv/mtx-unzip.man
index ffddc1cf9..d9e2c8447 100644
--- a/doc/context/scripts/mkiv/mtx-unzip.man
+++ b/doc/context/scripts/mkiv/mtx-unzip.man
@@ -1,4 +1,4 @@
-.TH "mtx-unzip" "1" "01-01-2014" "version 0.10" "Simple Unzipper"
+.TH "mtx-unzip" "1" "01-01-2015" "version 0.10" "Simple Unzipper"
.SH NAME
.B mtx-unzip
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkiv/mtx-update.html b/doc/context/scripts/mkiv/mtx-update.html
index bda0822e9..1cc6bd3af 100644
--- a/doc/context/scripts/mkiv/mtx-update.html
+++ b/doc/context/scripts/mkiv/mtx-update.html
@@ -14,7 +14,7 @@
<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
<head>
- <title>ConTeXt Minimals Updater 1.01</title>
+ <title>ConTeXt Minimals Updater 1.02</title>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<style type="text/css">
body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
@@ -24,7 +24,7 @@
</head>
<body>
<div id="top"> <div id="top-one">
- <div id="top-two">ConTeXt Minimals Updater 1.01 </div>
+ <div id="top-two">ConTeXt Minimals Updater 1.02 </div>
</div>
</div>
<div id="bottom"> <div id="bottom-one">
diff --git a/doc/context/scripts/mkiv/mtx-update.man b/doc/context/scripts/mkiv/mtx-update.man
index 7766122fb..e27c2ab43 100644
--- a/doc/context/scripts/mkiv/mtx-update.man
+++ b/doc/context/scripts/mkiv/mtx-update.man
@@ -1,4 +1,4 @@
-.TH "mtx-update" "1" "01-01-2014" "version 1.01" "ConTeXt Minimals Updater"
+.TH "mtx-update" "1" "01-01-2015" "version 1.02" "ConTeXt Minimals Updater"
.SH NAME
.B mtx-update
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkiv/mtx-update.xml b/doc/context/scripts/mkiv/mtx-update.xml
index 13c25ae13..c5d9205c3 100644
--- a/doc/context/scripts/mkiv/mtx-update.xml
+++ b/doc/context/scripts/mkiv/mtx-update.xml
@@ -3,7 +3,7 @@
<metadata>
<entry name="name">mtx-update</entry>
<entry name="detail">ConTeXt Minimals Updater</entry>
- <entry name="version">1.01</entry>
+ <entry name="version">1.02</entry>
</metadata>
<flags>
<category name="basic">
diff --git a/doc/context/scripts/mkiv/mtx-watch.man b/doc/context/scripts/mkiv/mtx-watch.man
index babd2fcca..d8fab3acd 100644
--- a/doc/context/scripts/mkiv/mtx-watch.man
+++ b/doc/context/scripts/mkiv/mtx-watch.man
@@ -1,4 +1,4 @@
-.TH "mtx-watch" "1" "01-01-2014" "version 1.00" "ConTeXt Request Watchdog"
+.TH "mtx-watch" "1" "01-01-2015" "version 1.00" "ConTeXt Request Watchdog"
.SH NAME
.B mtx-watch
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkiv/mtx-youless.man b/doc/context/scripts/mkiv/mtx-youless.man
index c1e9594c1..2703ec34f 100644
--- a/doc/context/scripts/mkiv/mtx-youless.man
+++ b/doc/context/scripts/mkiv/mtx-youless.man
@@ -1,4 +1,4 @@
-.TH "mtx-youless" "1" "01-01-2014" "version 1.00" "youless Fetcher"
+.TH "mtx-youless" "1" "01-01-2015" "version 1.00" "youless Fetcher"
.SH NAME
.B mtx-youless
.SH SYNOPSIS
diff --git a/doc/context/scripts/mkiv/mtxrun.html b/doc/context/scripts/mkiv/mtxrun.html
index f2c0e69fa..6e4146899 100644
--- a/doc/context/scripts/mkiv/mtxrun.html
+++ b/doc/context/scripts/mkiv/mtxrun.html
@@ -40,6 +40,7 @@
<tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
<tr><th/><td/><td/></tr>
<tr><th>--script</th><td></td><td>run an mtx script (lua prefered method) (--noquotes), no script gives list</td></tr>
+ <tr><th>--evaluate</th><td></td><td>run code passed on the commandline (between quotes)</td></tr>
<tr><th>--execute</th><td></td><td>run a script or program (texmfstart method) (--noquotes)</td></tr>
<tr><th>--resolve</th><td></td><td>resolve prefixed arguments</td></tr>
<tr><th>--ctxlua</th><td></td><td>run internally (using preloaded libs)</td></tr>
@@ -62,6 +63,7 @@
<tr><th>--verbose</th><td></td><td>give a bit more info</td></tr>
<tr><th>--trackers</th><td>list</td><td>enable given trackers</td></tr>
<tr><th>--progname</th><td>str</td><td>format or backend</td></tr>
+ <tr><th>--systeminfo</th><td>str</td><td>show current operating system, processor, etc</td></tr>
<tr><th/><td/><td/></tr>
<tr><th>--edit</th><td></td><td>launch editor with found file</td></tr>
<tr><th>--launch</th><td></td><td>launch files like manuals, assumes os support (--all)</td></tr>
diff --git a/doc/context/scripts/mkiv/mtxrun.man b/doc/context/scripts/mkiv/mtxrun.man
index 77dc02dc5..8a3a75b64 100644
--- a/doc/context/scripts/mkiv/mtxrun.man
+++ b/doc/context/scripts/mkiv/mtxrun.man
@@ -1,4 +1,4 @@
-.TH "mtxrun" "1" "01-01-2014" "version 1.31" "ConTeXt TDS Runner Tool"
+.TH "mtxrun" "1" "01-01-2015" "version 1.31" "ConTeXt TDS Runner Tool"
.SH NAME
.B mtxrun
.SH SYNOPSIS
@@ -14,6 +14,9 @@
.B --script
run an mtx script (lua prefered method) (--noquotes), no script gives list
.TP
+.B --evaluate
+run code passed on the commandline (between quotes)
+.TP
.B --execute
run a script or program (texmfstart method) (--noquotes)
.TP
@@ -71,6 +74,9 @@ enable given trackers
.B --progname=str
format or backend
.TP
+.B --systeminfo=str
+show current operating system, processor, etc
+.TP
.B --edit
launch editor with found file
.TP
diff --git a/doc/context/scripts/mkiv/mtxrun.xml b/doc/context/scripts/mkiv/mtxrun.xml
index f1b1e01f4..138afc96e 100644
--- a/doc/context/scripts/mkiv/mtxrun.xml
+++ b/doc/context/scripts/mkiv/mtxrun.xml
@@ -9,6 +9,7 @@
<category name="basic">
<subcategory>
<flag name="script"><short>run an mtx script (lua prefered method) (<ref name="noquotes"/>), no script gives list</short></flag>
+ <flag name="evaluate"><short>run code passed on the commandline (between quotes)</short></flag>
<flag name="execute"><short>run a script or program (texmfstart method) (<ref name="noquotes"/>)</short></flag>
<flag name="resolve"><short>resolve prefixed arguments</short></flag>
<flag name="ctxlua"><short>run internally (using preloaded libs)</short></flag>
@@ -34,6 +35,7 @@
<flag name="verbose"><short>give a bit more info</short></flag>
<flag name="trackers" value="list"><short>enable given trackers</short></flag>
<flag name="progname" value="str"><short>format or backend</short></flag>
+ <flag name="systeminfo" value="str"><short>show current operating system, processor, etc</short></flag>
</subcategory>
<subcategory>
<flag name="edit"><short>launch editor with found file</short></flag>
diff --git a/doc/context/sources/general/manuals/epub/epub-mkiv-demo.tex b/doc/context/sources/general/manuals/epub/epub-mkiv-demo.tex
new file mode 100644
index 000000000..b4b979874
--- /dev/null
+++ b/doc/context/sources/general/manuals/epub/epub-mkiv-demo.tex
@@ -0,0 +1,43 @@
+
+% \usemodule[luacalls]
+
+\usemodule[mathml]
+
+\setupexternalfigures
+ [location=default]
+
+\setupbackend
+ [export=yes]
+
+\setupexport
+ [svgstyle=mathtest-style,
+ hyphen=yes,
+ width=60em]
+
+\setupbodyfont[pagella]
+
+\definefloat[graphic][graphics][figure]
+
+% \environment [mathtest-style]
+
+\starttext
+
+ \startsection[title=First]
+
+ \startitemize
+ \startitem one \stopitem
+ \startitem two \stopitem
+ \stopitemize
+
+ \startplacefigure[title=First]
+ \externalfigure[cow.pdf]
+ \stopplacefigure
+
+ \startplacegraphic[title=Second]
+ \externalfigure[cow.pdf]
+ \stopplacegraphic
+
+ some math: \m{e=mc^2}
+
+ \stopsection
+\stoptext
diff --git a/doc/context/sources/general/manuals/epub/epub-mkiv.tex b/doc/context/sources/general/manuals/epub/epub-mkiv.tex
new file mode 100644
index 000000000..6fc4ed9d4
--- /dev/null
+++ b/doc/context/sources/general/manuals/epub/epub-mkiv.tex
@@ -0,0 +1,466 @@
+% language=uk
+
+% todo:
+%
+% metadata
+% properties
+% \dontleavehmode before hbox
+% cover page
+%
+% http://www.cnet.com/news/google-subtracts-mathml-from-chrome-and-anger-multiplies/
+
+% \usemodule[luacalls]
+
+\usemodule[art-01,abr-02]
+
+\definehighlight[notabene][style=bold]
+
+\definecolor[darkorange] [.70(green,red)]
+\definecolor[lightorange][.45(orange,white)]
+\definecolor[lesswhite] [.90(white)]
+
+\setuptyping[color=darkorange]
+\setuptype [color=darkorange]
+
+\starttext
+
+\startMPpage
+
+numeric w ; w := 21cm ;
+numeric h ; h := 29.7cm ;
+numeric ww ; ww := 9w/10 ;
+numeric oo ; oo := (w-ww) / 2 ;
+numeric hh ; hh := h/5.5 ;
+path p ; p := unitsquare xysized(w,h) ;
+
+color orange ; orange := \MPcolor{darkorange} ; % .7[green,red] ;
+
+fill p enlarged 2mm withcolor orange ;
+
+draw image (
+ draw anchored.top(
+ textext("\ttbf\setupinterlinespace[line=1.7ex]\framed[frame=off,align=middle,offset=0mm]{\smash{<div/>}\\\smash{<div >}\\\smash{</div>}}")
+ xsized w,
+ center topboundary p shifted (0,-12mm)) withcolor \MPcolor{lightorange} ; % 0.45[white,orange] ;
+ draw anchored.bot(
+ textext("\ssbf\setupinterlinespace[line=2.2ex]\framed[frame=off,align=middle]{exporting\\xml and epub\\from context}")
+ xsized w,
+ center bottomboundary p shifted (0,4mm)) withcolor \MPcolor {lesswhite} ; % 0.90white ;
+) ;
+
+setbounds currentpicture to p ;
+
+\stopMPpage
+
+\startsection[title=Introduction]
+
+There is a pretty long tradition of typesetting math with \TEX\ and it looks like
+this program will dominate for many more years. Even if we move to the web, the
+simple fact that support for \MATHML\ in some browsers is suboptimal will drive
+those who want a quality document to use \PDF\ instead.
+
+I'm writing this in 2014, at a time when \XML\ is widespread. The idea of \XML\ is
+that you code your data in a very structured way, so that it can be manipulated and
+(if needed) validated. Text has always been a target for \XML\ which is a follow|-|up
+to \SGML\ that was in use by publishers. Because \HTML\ is less structured (and also
+quite tolerant with respect to end tags) we prefer to use \XHTML\ but unfortunately
+support for that is less widespread.
+
+Interestingly, documents are probably among the more complex targets of the
+\XML\ format. The reason is that unless the author restricts him|/|herself or
+gets restricted by the publisher, tag abuse can happen. At \PRAGMA\ we mostly
+deal with education|-|related \XML\ and it's not always easy to come up with
+something that suits the specific needs of the educational concept behind a
+school method. Even if we start out nice and clean, eventually we end up with a
+polluted source, often with additional structure needed to satisfy the tools used
+for conversion.
+
+We have been supporting \XML\ from the day it showed up and most of our projects
+involve \XML\ in one way or the other. That doesn't mean that we don't use \TEX\
+for coding documents. This manual is for instance a regular \TEX\ document. In
+many ways a structured \TEX\ document is much more convenient to edit, especially
+if one wants to add a personal touch and do some local page make|-|up. On the other hand,
+diverting from standard structure commands makes the document less suitable for
+output other than \PDF. There is simply no final solution for coding a document,
+it's mostly a matter of taste.
+
+So we have a dilemma: if we want to have multiple output, frozen \PDF\ as well as
+less-controlled \HTML\ output, we can best code in \XML, but when we want to code
+comfortably we'd like to use \TEX. There are other ways, like Markdown, that can
+be converted to intermediate formats like \TEX, but that is only suitable for
+simple documents: the more advanced documents get, the more one has to escape
+from the boundaries of (any) document encoding, and then often \TEX\ is not a bad
+choice. There is a good reason why \TEX\ survived for so long.
+
+It is for this reason that in \CONTEXT\ \MKIV\ we can export the content in a
+reasonable structured way to \XML. Of course we assume a structured document. It
+started out as an experiment because it was relatively easy to implement, and it
+is now an integral component.
+
+\stopsection
+
+\startsection[title=The output]
+
+The regular output is an \XML\ file but as we have some more related data it gets
+organized in a tree. We also export a few variants. An example is given below:
+
+\starttyping
+./test-export
+./test-export/images
+./test-export/images/...
+./test-export/styles
+./test-export/styles/test-defaults.css
+./test-export/styles/test-images.css
+./test-export/styles/test-styles.css
+./test-export/styles/test-templates.css
+./test-export/test-raw.xml
+./test-export/test-raw.lua
+./test-export/test-tag.xhtml
+./test-export/test-div.xhtml
+\stoptyping
+
+Say that we have this input:
+
+\starttyping
+\setupbackend
+ [export=yes]
+
+\starttext
+ \startsection[title=First]
+ \startitemize
+ \startitem one \stopitem
+ \startitem two \stopitem
+ \stopitemize
+ \stopsection
+\stoptext
+\stoptyping
+
+The main export ends up in the \type {test-raw.xml} export file and looks like
+the following (we leave out the preamble and style references):
+
+\starttyping
+<document> <!-- with some attributes -->
+ <section detail="section" chain="section" level="3">
+ <sectionnumber>1</sectionnumber>
+ <sectiontitle>First</sectiontitle>
+ <sectioncontent>
+ <itemgroup detail="itemize" chain="itemize" symbol="1" level="1">
+ <item>
+ <itemtag><m:math ..><m:mo>•</m:mo></m:math></itemtag>
+ <itemcontent>one</itemcontent>
+ </item>
+ <item>
+ <itemtag><m:math ..><m:mo>•</m:mo></m:math></itemtag>
+ <itemcontent>two</itemcontent>
+ </item>
+ </itemgroup>
+ </sectioncontent>
+ </section>
+</document>
+\stoptyping
+
+This file refers to the stylesheets and therefore renders quite well in a browser
+like Firefox that can handle \XHTML\ with arbitrary tags.
+
+The \type {detail} attribute tells us what instance of the element is used.
+Normally the \type {chain} attribute is the same but it can have more values.
+For instance, if we have:
+
+\starttyping
+\definefloat[graphic][graphics][figure]
+
+.....
+
+\startplacefigure[title=First]
+ \externalfigure[cow.pdf]
+\stopplacefigure
+
+.....
+
+\startplacegraphic[title=Second]
+ \externalfigure[cow.pdf]
+\stopplacegraphic
+\stoptyping
+
+we get this:
+
+\starttyping
+<float detail="figure" chain="figure">
+ <floatcontent>...</floatcontent>
+ <floatcaption>...</floatcaption>
+</float>
+<float detail="graphic" chain="figure graphic">
+ <floatcontent>...</floatcontent>
+ <floatcaption>...</floatcaption>
+</float>
+\stoptyping
+
+This makes it possible to style specific categories of floats by using a
+(combination of) \type {detail} and|/|or \type {chain} as filters.
+
+The body of the \type {test-tag.xhtml} file looks similar but it is slightly more
+tuned for viewing. For instance, hyperlinks are converted to a way that \CSS\ and
+browsers like more. Keep in mind that the raw file can be the base for conversion
+to other formats, so that one stays closest to the original structure.
+
+The \type {test-div.xhtml} file is even more tuned for viewing in browsers as it
+completely does away with specific tags. We explicitly don't map onto native
+\HTML\ elements because that would make everything look messy and horrible, if only
+because there seldom is a relation between those elements and the original. One
+can always transform one of the export formats to pure \HTML\ tags if needed.
+
+\starttyping
+<body>
+ <div class="document">
+ <div class="section" id="aut-1">
+ <div class="sectionnumber">1</div>
+ <div class="sectiontitle">First</div>
+ <div class="sectioncontent">
+ <div class="itemgroup itemize symbol-1">
+ <div class="item">
+ <div class="itemtag"><m:math ...><m:mo>•</m:mo></m:math></div>
+ <div class="itemcontent">one</div>
+ </div>
+ <div class="item">
+ <div class="itemtag"><m:math ...><m:mo>•</m:mo></m:math></div>
+ <div class="itemcontent">two</div>
+ </div>
+ </div>
+ <div class="float figure">
+ <div class="floatcontent">...</div></div>
+ <div class="floatcaption">...></div>
+ </div>
+ <div class="float figure graphic">
+ <div class="floatcontent">...</div></div>
+ <div class="floatcaption">...></div>
+ </div>
+ </div>
+ </div>
+</body>
+\stoptyping
+
+The default \CSS\ file can deal with tags as well as classes. The file
+of additional styles contains definitions of so|-|called highlights. In the \CONTEXT\ source
+one is better off using explicit named highlights instead of local font and color
+switches because these properties are then exported to the \CSS. The images style
+defines all images used. The templates file lists all the elements used and can
+be used as a starting point for additional \CSS\ styling.
+
+Keep in mind that the export is \notabene{not} meant as a one|-|to|-|one visual
+representation. It represents structure so that it can be converted to whatever
+you like.
+
+In order to get an export you must start your document with:
+
+\starttyping
+\setupbackend
+ [export=yes]
+\stoptyping
+
+So, we trigger a specific (extra) backend. In addition you can set up the export:
+
+\starttyping
+\setupexport
+ [svgstyle=test-basic-style.tex,
+ cssfile=test-extras.css,
+ hyphen=yes,
+ width=60em]
+\stoptyping
+
+The \type {hyphen} option will also export hyphenation information so that the
+text can be nicely justified. The \type {svgstyle} option can be used to specify
+a file where math is set up; normally this would only contain a \type{bodyfont} setup,
+and this option is only needed if you want to create an \EPUB\ file afterwards which
+has math represented as \SVG.
+
+The value of \type {cssfile} ends up as a style reference in the exported files.
+You can also pass a comma|-|separated list of names (between curly braces). These
+entries come after those of the automatically generated \CSS\ files so you need
+to be aware of default properties.
+
+\stopsection
+
+\startsection[title=Images]
+
+Inclusion of images is done in an indirect way. Each image gets an entry in a
+special image related stylesheet and then gets referred to by \type {id}. Some
+extra information is written to a status file so that the script that creates
+\EPUB\ files can deal with the right conversion, for instance from \PDF\ to \SVG.
+Because we can refer to specific pages in a \PDF\ file, this subsystem deals with
+that too. Images are expected to be in an \type {images} subdirectory and because in \CSS\
+the references are relative to the path where the stylesheet resides, we use
+\type {../images} instead. If you do some postprocessing on the files or relocate
+them you need to keep in mind that you might have to change these paths in the
+image|-|related \CSS\ file.
+
+\stopsection
+
+\startsection[title=Epub files]
+
+At the end of a run with exporting enabled you will get a message to the console that
+tells you how to generate an \EPUB\ file. For instance:
+
+\starttyping
+mtxrun --script epub --make --purge test
+\stoptyping
+
+This will create a tree with the following organization:
+
+\starttyping
+./test-epub
+./test-epub/META-INF
+./test-epub/META-INF/container.xml
+./test-epub/OEBPS
+./test-epub/OEBPS/content.opf
+./test-epub/OEBPS/toc.ncx
+./test-epub/OEBPS/nav.xhtml
+./test-epub/OEBPS/cover.xhtml
+./test-epub/OEBPS/test-div.xhtml
+./test-epub/OEBPS/images
+./test-epub/OEBPS/images/...
+./test-epub/styles
+./test-epub/styles/test-defaults.css
+./test-epub/styles/test-images.css
+./test-epub/styles/test-styles.css
+./test-epub/mimetype
+\stoptyping
+
+Images will be moved to this tree as well and if needed they will be converted,
+for instance into \SVG. Converted \PDF\ files can have a \typ {page-<number>} in
+their name when a specific page has been used.
+
+You can pass the option \type {--svgmath} in which case math will be converted to
+\SVG. The main reason for this feature is that we found out that \MATHML\ support
+in browsers is not currently as widespread as might be expected. The best bet is Firefox which
+natively supports it. The Chrome browser had it for a while but it got dropped
+and math is now delegated to \JAVASCRIPT\ and friends. In Internet Explorer
+\MATHML\ should work (but I need to test that again).
+
+This conversion mechanism is
+kind of interesting: one enters \TEX\ math, then gets \MATHML\ in the export, and
+that gets rendered by \TEX\ again, but now as a standalone snippet that then gets
+converted to \SVG\ and embedded in the result.
+
+\stopsection
+
+\startsection[title=Styles]
+
+One can argue that we should use native \HTML\ elements but since we don't have a nice
+guaranteed|-|consistent mapping onto that, it makes no sense to do so. Instead, we
+rely on either explicit tags with details and chains or divisions with classes
+that combine the tag, detail and chain. The tagged variant has some more
+attributes and those that use a fixed set of values become classes in the
+division variant. Also, once we start going the (for instance) \type {H1}, \type
+{H2}, etc.\ route we're lost when we have more levels than that or use a
+different structure. If an \type {H3} can reflect several levels it makes no
+sense to use it. The same is true for other tags: if a list is not really a list
+than tagging it with \type {LI} is counterproductive. We're often dealing with
+very complex documents so basic \HTML\ tagging becomes rather meaningless.
+
+If you look at the division variant (this is used for \EPUB\ too) you will notice
+that there are no empty elements but \type {div} blocks with a comment as content.
+This is needed because otherwise they get ignored, which for instance makes table
+cells invisible.
+
+The relation between \type {detail} and \type {chain} (reflected in \type {class})
+can best be seen from the next example.
+
+\starttyping
+\definefloat[myfloata]
+\definefloat[myfloatb][myfloatbs][figure]
+\definefloat[myfloatc][myfloatcs][myfloatb]
+\stoptyping
+
+This creates two new float instances. The first inherits from the main float
+settings, but can have its own properties. The second example inherits from
+the \type {figure} so in fact it is part of a chain. The third one has a longer
+chain.
+
+\starttyping
+<float detail="myfloata">...</float>
+<float detail="myfloatb" chain="figure">...</float>
+<float detail="myfloatc" chain="figure myfloatb">...</float>
+\stoptyping
+
+In a \CSS\ style you can now configure tags, details, and chains as well as
+classes (we show only a few possibilities). Here, the \CSS\ element on the
+first line of each pair is invoked by the \CSS\ selector on the second line.
+
+\starttyping
+div.float.myfloata { } float[detail='myfloata'] { }
+div.float.myfloatb { } float[detail='myfloatb'] { }
+div.float.figure { } float[detail='figure'] { }
+div.float.figure.myfloatb { } float[chain~='figure'][detail='myfloata'] { }
+div.myfloata { } *[detail='myfloata'] { }
+div.myfloatb { } *[detail='myfloatb'] { }
+div.figure { } *[chain~='figure'] { }
+div.figure.myfloatb { } *[chain~='figure'][detail='myfloatb'] { }
+\stoptyping
+
+The default styles cover some basics but if you're serious about the export
+or want to use \EPUB\ then it makes sense to overload some of it and|/|or
+provide additional styling. You can find plenty about \CSS\ and its options
+on the Internet.
+
+\stopsection
+
+\startsection[title=Coding]
+
+The default output reflects the structure present in the document. If that is not
+enough you can add your own structure, as in:
+
+\starttyping
+\startelement[question]
+Is this right?
+\stopelement
+\stoptyping
+
+You can also pass attributes:
+
+\starttyping
+\startelement[question][level=difficult]
+Is this right?
+\stopelement
+\stoptyping
+
+But these will be exported only when you also say:
+
+\starttyping
+\setupexport
+ [properties=yes]
+\stoptyping
+
+You can create a namespace. The following will generate attributes
+like \type {my-level}.
+
+\starttyping
+\setupexport
+ [properties=my-]
+\stoptyping
+
+In most cases it makes more sense to use highlights:
+
+\starttyping
+\definehighlight
+ [important]
+ [style=bold]
+\stoptyping
+
+This has the advantage that the style and color are exported to a special
+\CSS\ file.
+
+Headers, footers, and other content that is part of the page builder are not
+exported. If your document has cover pages you might want to hide them too. The
+same is true when you create special chapter title rendering with a side
+effect that content ends up in the page stream. If something shows up that you
+don't want, you can wrap it in an \type {ignore} element:
+
+\starttyping
+\startelement[ignore]
+Don't export this.
+\stopelement
+\stoptyping
+
+\stopsection
+
+\stoptext
diff --git a/doc/context/sources/general/manuals/mcommon.tex b/doc/context/sources/general/manuals/mcommon.tex
new file mode 100644
index 000000000..b6b6026e9
--- /dev/null
+++ b/doc/context/sources/general/manuals/mcommon.tex
@@ -0,0 +1,210 @@
+% content=tex
+%
+% copyright=pragma-ade readme=readme.pdf licence=cc-by-nc-sa
+
+\startenvironment mcommon
+
+% modules
+
+\usemodule[abr-02]
+
+% layout
+
+% \startmode[screen]
+% \setuppapersize[S6][S6]
+% \setupinteractionscreen[options=max]
+% \stopmode
+
+\setuplayout
+ [topspace=15mm,
+ header=15mm,
+ headerdistance=0mm,
+ footer=0cm,
+ width=middle,
+ height=middle]
+
+\setupinteraction
+ [state=start,
+ color=,
+ contrastcolor=,
+ style=]
+
+% fonts
+
+\definetypeface [mainface] [rm] [serif] [pagella] [default]
+\definetypeface [mainface] [ss] [sans] [heros] [default] % [rscale=1.1]
+\definetypeface [mainface] [tt] [mono] [heros] [default] % [rscale=1.1]
+\definetypeface [mainface] [mm] [math] [pagella] [default]
+
+\setupbodyfont[mainface,12pt]
+
+\definefont [BigFont] [SansBold*default at 60pt]
+\definefont [MedFont] [SansBold*default at 30pt]
+
+% colors (historically)
+
+\definecolor [NopColor] [r=.6,g=.4,b=.5]
+\definecolor [AltColor] [r=.4,g=.6,b=.5]
+\definecolor [TheColor] [r=.4,g=.5,b=.6]
+\definecolor [TmpColor] [r=.6,g=.5,b=.4]
+
+\definecolor [NopColor] [r=.40,g=.20,b=.20]
+\definecolor [AltColor] [r=.20,g=.40,b=.20]
+\definecolor [TheColor] [r=.20,g=.20,b=.40]
+\definecolor [TmpColor] [r=.40,g=.40,b=.20]
+
+\definecolor [red] [NopColor]
+\definecolor [green] [AltColor]
+\definecolor [blue] [TheColor]
+\definecolor [yellow][TmpColor]
+
+% spacing
+
+\setupwhitespace
+ [big]
+
+\setuptolerance
+ [verytolerant,stretch]
+
+% verbatim
+
+\setuptype
+ [color=AltColor]
+
+\setuptyping
+ [color=AltColor]
+
+% structure
+
+\setupitemize
+ [each]
+ [color=TheColor]
+
+\definedescription
+ [switch]
+ [headstyle=type,
+ headcolor=TheColor,
+ location=serried,
+ width=broad]
+
+\defineenumeration
+ [topic]
+ [location=serried,
+ width=broad,
+ headstyle=,
+ headcolor=TheColor,
+ text=,
+ left={[},
+ right={]}]
+
+\setuphead
+ [section]
+ [style=\bfb,
+ color=TheColor]
+
+\setuplist
+ [section]
+ [alternative=c,
+ color=TheColor,
+ textcolor=black,
+ pagecolor=black]
+
+% whatever
+
+\setupsystem
+ [random=medium]
+
+\setupfloats
+ [ntop=100]
+
+\setupinteraction
+ [style=,
+ color=NopColor,
+ contrastcolor=NopColor]
+
+% tables and frames
+
+\setuptabulate
+ [rulethickness=.5pt,
+ rulecolor=AltColor]
+
+\setuptables
+ [rulethickness=.5pt,
+ rulecolor=AltColor]
+
+\setupframedtexts
+ [rulethickness=.5pt,
+ framecolor=TheColor,
+ width=\textwidth]
+
+% quick reference things
+
+\usemodule[set-11] \loadsetups
+
+\setupframedtexts
+ [setuptext]
+ [rulethickness=.5pt,
+ framecolor=AltColor]
+
+% titlepage
+
+\startsetups titlepage
+ \defineoverlay
+ [logo]
+ [\useMPgraphic{titlepage}{width=\overlaywidth,height=\overlayheight}]
+ \setupbackgrounds
+ [page]
+ [background=logo]
+ \startstandardmakeup
+ \dontcomplain
+ \BigFont
+ \setupinterlinespace
+ \vfill
+ \setupalign[left]
+ \let\\=\par
+ \dontleavehmode
+ \rotate
+ [rotation=90]
+ {\color
+ [lightgray]
+ {\getvariable{document}{title}}}
+ \par
+ \stopstandardmakeup
+ \setupbackgrounds
+ [page]
+ [background=]
+\stopsetups
+
+\startsetups colofon
+ \blank[2*big]
+ \testpage[3]
+ \startpacked
+ \getvariable{document}{author}\par
+ \getvariable{document}{affiliation}\par
+ \getvariable{document}{location}\par
+ \stoppacked
+\stopsetups
+
+\setupdocument
+ [title=No Title,
+ before=\setups{titlepage},
+ after=\setups{colofon}]
+
+% urls
+
+\useurl[gpl-simple] [http://creativecommons.org/licenses/GPL/2.0/]
+\useurl[gpl-legal] [http://creativecommons.org/licenses/GPL/2.0/legalcode]
+\useurl[byncsa-simple][http://creativecommons.org/licenses/by-nc-sa/2.5/]
+\useurl[byncsa-legal] [http://creativecommons.org/licenses/by-nc-sa/2.5/legalcode]
+
+\useurl[garden] [http://contextgarden.net]
+\useurl[install] [http://wiki.contextgarden.net/ConTeXt_Standalone]
+\useurl[texlive] [http://www.tug.org/texlive/]
+\useurl[group] [http://group.contextgarden.net]
+\useurl[list] [http://www.ntg.nl/mailman/listinfo/ntg-context]
+\useurl[development] [http://www.ntg.nl/mailman/listinfo/dev-context]
+\useurl[announce] [http://www.ntg.nl/mailman/listinfo/ann-context]
+\useurl[collector] [http://tracker.luatex.org]
+\useurl[pragma] [http://www.pragma-ade.com]
+
+\stopenvironment
diff --git a/doc/context/sources/general/manuals/readme/mreadme.tex b/doc/context/sources/general/manuals/readme/mreadme.tex
new file mode 100644
index 000000000..b2af11bc4
--- /dev/null
+++ b/doc/context/sources/general/manuals/readme/mreadme.tex
@@ -0,0 +1,372 @@
+% interface=en engine=luatex language=uk
+%
+% copyright=pragma-ade readme=readme.pdf licence=cc-by-nc-sa
+
+\environment mcommon
+
+% copied from cont-log: readme_logo
+
+\startuseMPgraphic{titlepage}{width,height}
+ numeric width ; width = \MPvar{width} ;
+ numeric height ; height = \MPvar{height} ;
+ numeric delta ; delta := width/10 ;
+ numeric circle ; circle := 2.5delta ;
+ color c ; c := (.2,.4,.6) ;
+ path p, q, r ;
+ p := unitsquare xscaled width yscaled height ;
+ z1 = (delta,height-2delta) ;
+ z2 = (width-delta,height-delta) ;
+ z3 = (width/2-delta,2delta+circle) ;
+ z4 = (x3,delta+circle/2) ;
+ q := z1 { dir -15 } .. z2 & z2 { dir -105 } .. z3 & z3 { dir 135 } .. z1 & cycle ;
+ r := fullcircle xscaled circle yscaled (.85circle) rotated 15 shifted z4 ;
+ pickup pencircle scaled (delta/1.5) ;
+ fill p withcolor .50c ;
+ fill q withcolor .75c ;
+ fill r withcolor .75c ;
+ draw p withcolor c ;
+ draw q withcolor c ;
+ pickup pencircle scaled (delta/2) ;
+ draw r withcolor c ;
+ setbounds currentpicture to p ;
+\stopuseMPgraphic
+
+\startdocument
+ [title={Read Me First},
+ author={Hans Hagen},
+ affiliation={PRAGMA ADE},
+ location={Hasselt NL}]
+
+\startsubject[title={Introduction}]
+
+What licence suits best for a \TEX\ like system is a matter of taste. Personally
+we dislike any licence that needs more than a few pages of dense legal code to
+get the message across. A \TEX\ related system like \CONTEXT\ is a hybrid of
+programs, scripts and|/|or macro code as well as documentation and sample code,
+including graphics. \TEX\ related systems also have a long standing tradition of
+providing support structures for users. In order to make support feasible, a
+\TEX\ based system like \CONTEXT\ assumes a certain logic and structure in the
+way the related files are named and organized in a tree structure. Even a small
+change in one of the elements may let such a system behave differently than
+manuals suggest. Swap a font, change some style defaults, leave out some pieces,
+and users may end up in confusion. A licence does not give a user any guarantees!
+
+In order to satisfy those responsible for distributing \CONTEXT, we need to
+choose a licence that makes them feel comfortable. Unfortunately we don't feel
+that comfortable with a licence that does not provide the guarantees that a
+system will not be adapted in such ways that the advertised behaviour changes. On
+the other hand, it is the responsibility of those distributing and extending the
+system to make sure that this does not happen. However, users should not
+automatically assume that what they get shipped is the same as the original,
+which is why we stress that support (from our side) will only be given on
+unaltered systems.
+
+First of all, what is \CONTEXT ? It's just a bunch of macros, written in \TEX\
+and \METAPOST, meant for typesetting documents. The macros are accompanied by
+some scripts, written in \PERL\ (mainly the older scripts) \RUBY\ (also older
+ones) and \LUA\ (the current fashion). The \CONTEXT\ distribution comes with a
+few fonts, files that help manage resources (e.g.\ map files needed for \MKII),
+as well as patterns (based on official ones, so this is a derived work).
+
+The \CONTEXT\ distribution is packaged in a zip file organized in the \TDS\
+structure.
+
+\starttabulate[|lT|p|]
+\NC \type {cont-tmf.zip} \NC the main distribution that has all relevant files \NC \NR
+\NC \type {cont-tst.7z} \NC a bunch of test files that can also serve as examples \NC \NR
+\NC \type {cont-mpd.zip} \NC a \METAPOST\ to \PDF\ converter (not needed in \CONTEXT) \NC \NR
+\NC \type {cont-ppc.zip} \NC a macro package for typesetting chemistry (not needed in \CONTEXT) \NC \NR
+\NC \type {cont-sci.zip} \NC configuration files for using \CONTEXT\ in the \SCITE\ editor \NC \NR
+\stoptabulate
+
+There are two flavours of \CONTEXT: \MKII\ and \MKIV. The first one is frozen and
+will not be extended. It runs on top of \PDFTEX\ or \XETEX. The \MKIV\ version is
+actively developed and runs on top of \LUATEX\ (an engine that is developed
+alongside \CONTEXT\ but that can also be used for other macro packages).
+
+The documentation can be downloaded from our website or the Wiki. Some manuals
+ship with source code. We might ship more source code but only when the source is
+stable and clean and can serve as an example.
+
+That said, what licence does apply? We need to distinguish between things that
+resemble a program on the one hand and documentation on the other hand. We
+(currently) use a different licence for either of them.
+
+\stopsubject
+
+\startsubject[title={The Code}]
+
+The program code (i.e. anything not under the \type {/doc} subtree) is
+distributed under the
+
+\startnarrower
+\goto{Creative Commons GNU GPL}[url(gpl-simple)]
+\stopnarrower
+
+For practical purposes distributers may also choose the \LATEX\ project licence,
+which is considered to be a bit more \TEX\ friendly. (BSD alike licences, the
+Ruby Licence and the Apache are all licences that apply well for \CONTEXT.)
+
+In practice, users may forget about the legal part, if only because I haven't
+even read (and understood) it completely myself, so let's stick to what Creative
+Commons makes of it:
+
+\startcolor[blue]
+The GNU General Public License is a Free Software license. Like any Free Software
+license, it grants to you the four following freedoms:
+
+\startitemize
+ \startitem
+ The freedom to run the program for any purpose.
+ \stopitem
+ \startitem
+ The freedom to study how the program works and adapt it to your needs.
+ \stopitem
+ \startitem
+ The freedom to redistribute copies so you can help your neighbour.
+ \stopitem
+ \startitem
+ The freedom to improve the program and release your improvements to the
+ public, so that the whole community benefits.
+ \stopitem
+\stopitemize
+
+You may exercise the freedoms specified here provided that you comply with the
+express conditions of this license. The principal conditions are:
+
+You must conspicuously and appropriately publish on each copy distributed an
+appropriate copyright notice and disclaimer of warranty and keep intact all the
+notices that refer to this License and to the absence of any warranty; and give
+any other recipients of the Program a copy of the GNU General Public License
+along with the Program. Any translation of the GNU General Public License must be
+accompanied by the GNU General Public License.
+
+If you modify your copy or copies of the program or any portion of it, or develop
+a program based upon it, you may distribute the resulting work provided you do so
+under the GNU General Public License. Any translation of the GNU General Public
+License must be accompanied by the GNU General Public License.
+
+If you copy or distribute the program, you must accompany it with the complete
+corresponding machine-readable source code or with a written offer, valid for at
+least three years, to furnish the complete corresponding machine-readable source
+code.
+
+Any of these conditions can be waived if you get permission from the copyright
+holder.
+
+Your fair use and other rights are in no way affected by the above.
+\stopcolor
+
+\stopsubject
+
+\startsubject[title={Recommendations}]
+
+Here are a few recommendations in case you want to distribute, extend of embed
+\CONTEXT\ in applications:
+
+\startitemize
+
+\startitem
+ You can best leave the code base untouched. Most of \CONTEXT\ provides hooks
+ and it's relatively easy to overload code. Leave the lower level system code
+ untouched: changes may backfire when you update. Asking for more hooks is the
+ best way to go.
+\stopitem
+
+\startitem
+ Put your own code in the right subpaths, i.e.\ modules approved by the
+ development team under \type {.../third}, and styles and whatever else under
+ \type {.../user}. This way your code will not interfere with existing code
+ and updating will give less problems. Keep in mind that \TEX\ systems have
+ their own way and order in locating files, and the load order often matters.
+\stopitem
+
+\startitem
+ Don't copy styles and change a few lines, but load the base one and
+ built|/|patch on top of that. In the end you may benefit from improvements to
+ the base style.
+\stopitem
+
+\startitem
+ Be original. The whole idea behind \CONTEXT\ is that you can write your own
+ styles. On the \CONTEXT\ mailing list as well as on the Wiki there are enough
+ advanced users to help you make a start.
+\stopitem
+
+\startitem
+ Don't hesitate to submit bugs reports and ask for extensions. It may even be
+ that what you want is already present but yet undocumented.
+\stopitem
+
+\startitem
+ If things don't work as expected, check to what extend your system matches
+ the (more or less) standard. We provide so called minimal \CONTEXT\ trees
+ that can serve as a reference. Because \CONTEXT\ evolves, make sure your
+ system is up to date. The \CONTEXT\ garden provides ways to install and
+ update the standard distribution.
+\stopitem
+
+\startitem
+ The scripts can best be called using \type {mtxrun}. This lessens dependencies
+ on the location in the tree and ensures upward compatibility. It also prevents
+ clashes with similar scripts.
+\stopitem
+
+\startitem
+ Some scripts depend on each other. Don't mess around with the existing
+ functionality and names of the scripts and then feed them back into the
+ standard distributions.
+\stopitem
+
+\stopitemize
+
+\stopsubject
+
+\startsubject[title={Documents}]
+
+The documentation is provided under another Creative Commons licence
+
+\startnarrower
+ \goto{Attribution NonCommercial ShareAlike}[url(byncsa-simple)]
+\stopnarrower
+
+This one says:
+
+\startcolor[blue]
+You are free:
+
+\startitemize
+ \startitem to copy, distribute, display, and perform the work \stopitem
+ \startitem to make derivative works \stopitem
+\stopitemize
+
+{\sc Attribution:} You must attribute the work in the manner specified by the
+author or licensor.
+
+{\sc NonCommercial:} You may not use this work for commercial purposes.
+
+{\sc Share Alike:} If you alter, transform, or build upon this work, you may
+distribute the resulting work only under a license identical to this one.
+
+\startitemize
+ \startitem
+ For any reuse or distribution, you must make clear to others the license
+ terms of this work.
+ \stopitem
+ \startitem
+ Any of these conditions can be waived if you get permission from the
+ copyright holder.
+ \stopitem
+\stopitemize
+
+Your fair use and other rights are in no way affected by the above.
+\stopcolor
+
+The non||commercial part is mostly a safeguard. We don't mind if user groups
+distribute printed copies, publish (parts of) manuals and|/|or if authors use
+example code in manuals and books about \CONTEXT.
+
+If you distribute \CONTEXT\ and related software on electronic media as part of
+\TEX\ distributions (either or not for money), you may also distribute the
+manuals and their sources in electronic form, preferable as provided by the
+maintainers of \CONTEXT.
+
+Keep in mind that logos and cover designs are not meant to be copied. We provide
+the source code for some manuals, but we don't always provide all graphics and
+other resources. For instance, in some manuals we use commercial fonts and you
+have to buy those yourself.
+
+We provide the typeset manuals at our website. Those are the official ones. We
+appreciate it if you do not to distribute manuals compiled on your own system as
+substitutes. The manuals are a showcase for what \CONTEXT\ provides. Help us to
+assure the quality.
+
+\stopsubject
+
+\startsubject[title={More information}]
+
+We're not going to fill \mathematics{n}~pages with legal stuff, so if you want to
+know more, you have to consult the web for the legalities mentioned. Here are a
+few starting points:
+
+\startlines
+\goto{\url[gpl-simple]}[url(gpl-simple)]
+\goto{\url[gpl-legal]}[url(gpl-legal)]
+\stoplines
+
+\startlines
+\goto{\url[byncsa-simple]}[url(byncsa-simple)]
+\goto{\url[byncsa-legal]}[url(byncsa-legal)]
+\stoplines
+
+\CONTEXT\ itself can be fetched from the main site or the garden:
+
+\startlines
+\goto{\url[pragma]}[url(pragma)]
+\goto{\url[install]}[url(install)]
+\stoplines
+
+These always ship the latest versions. Alternatively you can install the whole
+\TEX\ distribution, which is a yearly snapshot:
+
+\startlines
+\goto{\url[texlive]}[url(texlive)]
+\stoplines
+
+A starting point for support can be found at:
+
+\startlines
+\goto{\url[list]}[url(list)]
+\goto{\url[garden]}[url(garden)]
+\stoplines
+
+And of course there is the \CONTEXT\ group:
+
+\startlines
+\goto{\url[group]}[url(group)]
+\stoplines
+
+Bugs and feature requests can be registered at the collector:
+
+\startlines
+\goto{\url[collector]}[url(collector)]
+\stoplines
+
+Releases are announced at:
+
+\startlines
+\goto{\url[announce]}[url(announce)]
+\stoplines
+
+The developers can be met at:
+
+\startlines
+\goto{\url[development]}[url(development)]
+\stoplines
+
+\stopsubject
+
+\startsubject[title={Disclaimer}]
+
+To play safe we include a disclaimer here, taken from the BSD style licence. For
+some reason such a text is always in unreadable capitals, so \unknown
+
+\start \txx \blue
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR \quotation {AS IS} AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
+INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+\stop
+
+\unknown\ and don't bother discussing licence issues and related things with us
+for the mere sake of discussing licence stuff.
+
+\stopsubject
+
+\stopdocument
diff --git a/doc/context/sources/general/manuals/swiglib/swiglib-mkiv.tex b/doc/context/sources/general/manuals/swiglib/swiglib-mkiv.tex
new file mode 100644
index 000000000..fc7a269bb
--- /dev/null
+++ b/doc/context/sources/general/manuals/swiglib/swiglib-mkiv.tex
@@ -0,0 +1,335 @@
+% language=uk
+
+% author : Hans Hagen, PRAGMA ADE, NL
+% license : Creative Commons, Attribution-NonCommercial-ShareAlike 3.0 Unported
+
+\usemodule[art-01,abr-02]
+
+\definecolor
+ [maincolor]
+ [r=.4]
+
+\definecolor
+ [extracolor]
+ [g=.4]
+
+\setupbodyfont
+ [11pt]
+
+\setuptype
+ [color=maincolor]
+
+\setuptyping
+ [color=maincolor]
+
+\definefontsynonym
+ [TitlePageMono]
+ [file:lmmonoproplt10-bold*default]
+
+\setuphead
+ [color=maincolor]
+
+\usesymbols
+ [cc]
+
+\setupinteraction
+ [hidden]
+
+\loadfontgoodies[lm]
+
+\startdocument
+ [metadata:author=Hans Hagen,
+ metadata:title=SwigLib basics,
+ author=Hans Hagen,
+ affiliation=PRAGMA ADE,
+ location=Hasselt NL,
+ title=SwigLib basics,
+ support=www.contextgarden.net,
+ website=www.pragma-ade.nl]
+
+\startluasetups[swiglib]
+ for i=1,640 do
+ context.definedfont { string.formatters["TitlePageMono at %p"](65536*(10+math.random(5))) }
+ context("SwigLib ")
+ end
+ context.removeunwantedspaces()
+\stopluasetups
+
+\startMPpage
+
+StartPage ;
+
+ fill Page enlarged 1cm withcolor \MPcolor{extracolor} ;
+
+ draw textext("\framed[loffset=2pt,roffset=2pt,frame=off,width=\paperwidth,align={normal,paragraph,verytolerant,stretch}]{\luasetup{swiglib}}")
+ xysized (PaperWidth,PaperHeight)
+ shifted center Page
+ withcolor .8white ;
+
+ draw textext.ulft("\definedfont[TitlePageMono]basics")
+ xsized .75PaperWidth
+ shifted lrcorner Page
+ shifted (-1cm,2cm)
+ withcolor \MPcolor{maincolor} ;
+
+% draw textext.ulft("\definedfont[TitlePageMono]in context mkiv")
+% xsized .6PaperWidth
+% shifted lrcorner Page
+% shifted (-1cm,6cm)
+% withcolor \MPcolor{maincolor} ;
+
+StopPage ;
+
+\stopMPpage
+
+\dontcomplain
+
+\startsubject[title=Contents]
+
+\placelist[section][alternative=a]
+
+\stopsubject
+
+\startsection[title=Introduction]
+
+The \SWIGLIB\ project is related to \LUATEX\ and aims as adding portable library
+support to this \TEX\ engine without too much fixed binding. The project does not
+provide \LUA\ code, unless really needed, because it assumes that macro packages
+have different demands. It also fits in the spirit of \TEX\ and \LUA\ to minimize
+the core components.
+
+The technical setup is by Luigi Scarso and documentation about how to build the
+libraries is part of the \SWIGLIB\ repository. Testing happens with help of the
+\CONTEXT\ (garden) infrastructure. This short document only deals with usage in
+\CONTEXT\ but also covers rather plain usage.
+
+\blank \start \em todo: reference to Luigi's manual \stop \blank
+
+\stopsection
+
+\startsection[title=Inside \CONTEXT]
+
+The recommended way to load a library in \CONTEXT\ is by using the
+\type {swiglib} function. This function lives in the global namespace.
+
+\starttyping
+local gm = swiglib("gmwand.core")
+\stoptyping
+
+After this call you have the functionality available in the \type {gm}
+namespace. This way of loading makes \CONTEXT\ aware that such a library
+has been loading and it will report the loaded libraries as part of the
+statistics.
+
+If you want, you can use the more ignorant \type {require} instead but in
+that case you need to be more explicit.
+
+\starttyping
+local gm = require("swiglib.gmwand.core")
+\stoptyping
+
+Here is an example of using such a library (by Luigi):
+
+\startbuffer
+\startluacode
+local gm = swiglib("gmwand.core")
+local findfile = resolvers.findfile
+
+gm.InitializeMagick(".")
+
+local magick_wand = gm.NewMagickWand()
+local drawing_wand = gm.NewDrawingWand()
+local pixel_wand = gm.NewPixelWand();
+
+gm.MagickSetSize(magick_wand,800,600)
+gm.MagickReadImage(magick_wand,"xc:gray")
+
+gm.DrawPushGraphicContext(drawing_wand)
+
+gm.DrawSetFillColor(drawing_wand,pixel_wand)
+
+gm.DrawSetFont(drawing_wand,findfile("dejavuserifbold.ttf"))
+gm.DrawSetFontSize(drawing_wand,96)
+gm.DrawAnnotation(drawing_wand,200,200,"ConTeXt 1")
+
+gm.DrawSetFont(drawing_wand,findfile("texgyreschola-bold.otf"))
+gm.DrawSetFontSize(drawing_wand,78)
+gm.DrawAnnotation(drawing_wand,250,300,"ConTeXt 2")
+
+gm.DrawSetFont(drawing_wand,findfile("lmroman10-bold.otf"))
+gm.DrawSetFontSize(drawing_wand,48)
+gm.DrawAnnotation(drawing_wand,300,400,"ConTeXt 3")
+
+gm.DrawPopGraphicContext(drawing_wand)
+
+gm.MagickDrawImage(magick_wand,drawing_wand)
+
+gm.MagickWriteImages(magick_wand,"./swiglib-mkiv-gm-1.png",1)
+gm.MagickWriteImages(magick_wand,"./swiglib-mkiv-gm-1.jpg",1)
+gm.MagickWriteImages(magick_wand,"./swiglib-mkiv-gm-1.pdf",1)
+
+gm.DestroyDrawingWand(drawing_wand)
+gm.DestroyPixelWand(pixel_wand)
+gm.DestroyMagickWand(magick_wand)
+\stopluacode
+\stopbuffer
+
+\typebuffer \getbuffer
+
+In practice you will probably stay away from manipulating text this way, but it
+illustrates that you can use the regular \CONTEXT\ helpers to locate files.
+
+\startlinecorrection[big]
+ \startcombination[3*1]
+ {\externalfigure[swiglib-mkiv-gm-1.png][width=.3\textwidth]} {png}
+ {\externalfigure[swiglib-mkiv-gm-1.pdf][width=.3\textwidth]} {pdf}
+ {\externalfigure[swiglib-mkiv-gm-1.jpg][width=.3\textwidth]} {jpg}
+ \stopcombination
+\stoplinecorrection
+
+You'd better make sure to use unique filenames for such graphics. Of course a more
+clever mechanism would only run time consuming tasks once for each iteration of a
+document.
+
+\stopsection
+
+\startsection[title=Outside \CONTEXT]
+
+In the \CONTEXT\ distribution we ship some generic macros and code for usage in
+plain \TEX\ but there is no reason why they shouldn't work in other macro packages
+as well. A rather plain example is this:
+
+\starttyping
+\input luatex-swiglib.tex
+
+\directlua {
+ dofile("luatex-swiglib-test.lua")
+}
+
+\pdfximage {luatex-swiglib-test.jpg} \pdfrefximage\pdflastximage
+
+\end
+\stoptyping
+
+Assuming that you made the \type {luatex-plain} format, such a file can be processed using:
+
+\starttyping
+luatex --fmt=luatex=plain luatex-swiglib-test.tex
+\stoptyping
+
+The loaded \LUA\ file \type {luatex-swiglib-test.lua} liike like this:
+
+\starttyping
+local gm = swiglib("gmwand.core")
+
+gm.InitializeMagick(".")
+
+local magick_wand = gm.NewMagickWand()
+local drawing_wand = gm.NewDrawingWand()
+
+gm.MagickSetSize(magick_wand,800,600)
+gm.MagickReadImage(magick_wand,"xc:red")
+gm.DrawPushGraphicContext(drawing_wand)
+gm.DrawSetFillColor(drawing_wand,gm.NewPixelWand())
+gm.DrawPopGraphicContext(drawing_wand)
+gm.MagickDrawImage(magick_wand,drawing_wand)
+gm.MagickWriteImages(magick_wand,"./luatex-swiglib-test.jpg",1)
+
+gm.DestroyDrawingWand(drawing_wand)
+gm.DestroyMagickWand(magick_wand)
+\stoptyping
+
+Instead of loading a library with the \type {swiglib} function, you can also
+use \type {require}:
+
+\starttyping
+local gm = require("swiglib.gmwand.core")
+\stoptyping
+
+Watch the explicit \type {swiglib} reference. Both methods are equivalent.
+
+\stopsection
+
+\startsection[title={The libraries}]
+
+Most libraries are small but some can be rather large and have additional files.
+This is why we keep them separated. On my system they are collected in the
+platform binary tree:
+
+\starttyping
+e:/tex-context/tex/texmf-mswin/bin/lib/luatex/lua/swiglib/gmwand
+e:/tex-context/tex/texmf-mswin/bin/lib/luatex/lua/swiglib/mysql
+e:/tex-context/tex/texmf-mswin/bin/lib/luatex/lua/swiglib/....
+\stoptyping
+
+One can modulate on this:
+
+\starttyping
+...tex/texmf-mswin/bin/lib/luatex/lua/swiglib/mysql/core.dll
+...tex/texmf-mswin/bin/lib/luajittex/lua/swiglib/mysql/core.dll
+...tex/texmf-mswin/bin/lib/luatex/context/lua/swiglib/mysql/core.dll
+\stoptyping
+
+are all valid. When versions are used you can provide an additional argument to the
+\type {swiglib} loader:
+
+\starttyping
+tex/texmf-mswin/bin/lib/luatex/lua/swiglib/mysql/5.6/core.dll
+\stoptyping
+
+This works with:
+
+\starttyping
+local mysql = swiglib("mysql.core","5.6")
+\stoptyping
+
+as well as:
+
+\starttyping
+local mysql = swiglib("mysql.core")
+\stoptyping
+
+It is hard to predict how operating systems look up libraries and especially
+nested loads, but as long as the root of the \type {swiglib} path is known to the
+file search routine. We've kept the main conditions for success simple: the core
+library is called \type {core.dll} or \type {core.so}. Each library has an
+(automatically called) initialize function named \type {luaopen_core}. There is no
+reason why (sym)links from the \type {swiglib} path to someplace else shouldn't
+work.
+
+In \type {texmfcnf.lua} you will find an entry like:
+
+\starttyping
+CLUAINPUTS = ".;$SELFAUTOLOC/lib/{$engine/context,$engine}/lua//"
+\stoptyping
+
+Which in practice boils down to a search for \type {luatex} or \type {luajittex}
+specific libraries. When both binaries are compatible and there are no \type
+{luajittex} binaries, the regular \type {luatex} libraries will be used.
+
+The \type {swiglib} loader function mentioned in previous sections load libraries
+in a special way: it changes dir to the specific path and then loads the library
+in the usual way. After that it returns to the path where it started out. After
+this, when the library needs additional libraries (and for instance graphicmagick
+needs a lot of them) it will first look on its own path (which is remembered).
+
+The \MKIV\ lookups are somewhat more robust in the sense that they first check
+for matches on engine specific paths. This comes in handy when the search
+patterns are too generic and one can match on for instance \type {luajittex}
+whilc \type {luatex} is used.
+
+\stopsection
+
+\startsection[title=Colofon]
+
+\starttabulate[|B|p|]
+\NC author \NC \getvariable{document}{author}, \getvariable{document}{affiliation}, \getvariable{document}{location} \NC \NR
+\NC version \NC \currentdate \NC \NR
+\NC website \NC \getvariable{document}{website} \endash\ \getvariable{document}{support} \NC \NR
+\NC copyright \NC \symbol[cc][cc-by-sa-nc] \NC \NR
+\NC comment \NC the swiglib infrastructure is implemented by Luigi Scarso \NC \NR
+\stoptabulate
+
+\stopsection
+
+\stopdocument
diff --git a/doc/context/sources/general/manuals/tiptrick/tiptrick.tex b/doc/context/sources/general/manuals/tiptrick/tiptrick.tex
new file mode 100644
index 000000000..54a785134
--- /dev/null
+++ b/doc/context/sources/general/manuals/tiptrick/tiptrick.tex
@@ -0,0 +1,117 @@
+% interface=en
+%
+% author: Hans Hagen - Pragma ADE - www.pragma-ade.com
+
+\setupbodyfont
+ [dejavu]
+
+\setuppapersize
+ [HD-]
+
+\setuplayout
+ [location=middle,
+ header=0pt,
+ footer=0pt,
+ backspace=2.25cm,
+ topspace=2.25cm,
+ width=middle,
+ height=middle]
+
+\setupcolors
+ [state=start]
+
+\startreusableMPgraphic{page}
+ StartPage ;
+ picture p ; path s ;
+ fill Page withcolor .5red ;
+ draw Page withpen pensquare scaled 2cm withcolor .75[.5red,white] ;
+ s := (Field[Text][Text] enlarged .5cm) squeezed (.1cm,.15cm) ;
+ fill s withcolor .75[.5red,white] ;
+ if false :
+ p := image (graphictext "\ss TIP" withfillcolor .2white ;) ;
+ else :
+ p := textext.raw("\ss TIP") ;
+ setbounds p to (boundingbox p rightenlarged -0.025bbwidth(p)) ;
+ fi ;
+ p := p xysized(PaperWidth-1cm,PaperHeight-1cm) ;
+ p := p shifted .5(bbwidth(Page)-bbwidth(p),bbheight(Page)-bbheight(p)) ;
+ draw p withcolor .2white ;
+ clip p to s ;
+ draw p withcolor .875[.5red,white] ; ;
+ StopPage ;
+\stopreusableMPgraphic
+
+\defineoverlay
+ [page]
+ [\reuseMPgraphic{page}]
+
+\setupbackgrounds
+ [page]
+ [background=page,
+ state=repeat]
+
+\definecolor[red][r=.5]
+
+\setuphead
+ [chapter]
+ [style=\tfb,
+ before=,
+ after={\blank[line]}]
+
+\setupblank
+ [halfline]
+
+% xml interface
+
+\startxmlsetups xml:tips
+ \xmlflush{#1}
+\stopxmlsetups
+
+\startxmlsetups xml:tip
+ \startstandardmakeup
+ \startnamedsection[title][title=\xmlfirst{#1}{/title}]
+ \xmlall{#1}{/(remark|command)}
+ \vfill
+ \stopnamedsection
+ \stopstandardmakeup
+\stopxmlsetups
+
+\startxmlsetups xml:remark
+ \blank
+ \xmlflush{#1}
+ \blank
+\stopxmlsetups
+
+\definehighlight
+ [command]
+ [style=mono,
+ color=red,
+ command=no]
+
+\startxmlsetups xml:command
+ \blank
+ \starthighlight[command]
+ \xmlflush{#1}
+ \stophighlight
+ \blank
+\stopxmlsetups
+
+\startxmlsetups xml:reference
+ \vfill
+ \hfill\strut see:\space
+ \xmlflush{#1}
+\stopxmlsetups
+
+\startxmlsetups xml:initialize
+ \xmlsetsetup {#1} {
+ tips|tip|remark|command|reference
+ } {xml:*}
+\stopxmlsetups
+
+\xmlregisterdocumentsetup{main}{xml:initialize}
+
+\starttext
+
+ \xmlprocessfile{main}{tiptrick.xml}{}
+
+\stoptext
diff --git a/doc/context/sources/general/manuals/tiptrick/tiptrick.xml b/doc/context/sources/general/manuals/tiptrick/tiptrick.xml
new file mode 100644
index 000000000..8b4a30011
--- /dev/null
+++ b/doc/context/sources/general/manuals/tiptrick/tiptrick.xml
@@ -0,0 +1,53 @@
+<?xml version='1.0'?>
+
+<!-- author: Hans Hagen - Pragma ADE - www.pragma-ade.com -->
+
+<!-- feel free to submit more tips -->
+
+<tips xmlns="www.pragma-ade.com/schemas/tip.rng">
+
+ <tip>
+ <title>Generating Formats</title>
+ <remark>for all languages:</remark>
+ <command>context --make --all</command>
+ <remark>only english interface:</remark>
+ <command>context --make en</command>
+ <remark>for plain tex:</remark>
+ <command>mtxrun --script plain --make</command>
+ <!-- reference>mtexexec.pdf</reference -->
+ </tip>
+
+ <tip>
+ <title>Updating</title>
+ <remark>when installed from the wiki:</remark>
+ <command>..../first-setup(.cmd)</command>
+ <remark>when downloaded from the website:</remark>
+ <command>cd ..../tex/texmf-context</command>
+ <command>wget http://www.pragma-ade.com/context/current/cont-tmf.zip</command>
+ <command>unzip cont-tmf.zip</command>
+ <command>mtxrun --generate</command>
+ <!-- reference>minstall.pdf</reference -->
+ </tip>
+
+ <tip>
+ <title>Generating Command Lists</title>
+ <remark>quick reference document of english and dutch commands:</remark>
+ <command>context --interface=nl --global --result=setup-nl x-set-12.mkiv</command>
+ <command>context --interface=en --global --result=setup-en x-set-12.mkiv</command>
+ </tip>
+
+ <tip>
+ <title>Module Documentation</title>
+ <remark>pretty printed, annotated module documentation:</remark>
+ <command>mtxrun --script modules syst-aux.mkiv</command>
+ </tip>
+
+ <tip>
+ <title>Listings</title>
+ <remark>verbatim listings of (ascii) files:</remark>
+ <command>context --extra=listing --bodyfont=8pt --scite somefile.tex</command>
+ <command>context --extra=listing --bodyfont=8pt --scite somefile.lua</command>
+ <command>context --extra=listing --bodyfont=8pt --scite somefile.xml</command>
+ </tip>
+
+</tips>
diff --git a/doc/context/sources/general/manuals/tools/tools-mkiv.tex b/doc/context/sources/general/manuals/tools/tools-mkiv.tex
new file mode 100644
index 000000000..5f20e6985
--- /dev/null
+++ b/doc/context/sources/general/manuals/tools/tools-mkiv.tex
@@ -0,0 +1,501 @@
+% language=uk
+
+% author : Hans Hagen, PRAGMA ADE, NL
+% license : Creative Commons, Attribution-NonCommercial-ShareAlike 3.0 Unported
+
+\usemodule[abr-02]
+
+\setuplayout
+ [width=middle,
+ height=middle,
+ backspace=2cm,
+ topspace=1cm,
+ footer=0pt,
+ bottomspace=2cm]
+
+\definecolor
+ [DocumentColor]
+ [r=.5]
+
+\setuptype
+ [color=DocumentColor]
+
+\setuptyping
+ [color=DocumentColor]
+
+\usetypescript
+ [iwona]
+
+\setupbodyfont
+ [iwona]
+
+\setuphead
+ [chapter]
+ [style=\bfc,
+ color=DocumentColor]
+
+\setuphead
+ [section]
+ [style=\bfb,
+ color=DocumentColor]
+
+\setupinteraction
+ [hidden]
+
+\setupwhitespace
+ [big]
+
+\setupheadertexts
+ []
+
+\setupheadertexts
+ []
+ [{\DocumentColor \type {luatools mtxrun context}\quad\pagenumber}]
+
+\usesymbols[cc]
+
+\def\sTEXMFSTART{\type{texmfstart}}
+\def\sLUATOOLS {\type{luatools}}
+\def\sMTXRUN {\type{mtxrun}}
+\def\sCONTEXT {\type{context}}
+\def\sKPSEWHICH {\type{kpsewhich}}
+\def\sMKTEXLSR {\type{mktexlsr}}
+\def\sXSLTPROC {\type{xsltproc}}
+
+\usemodule[narrowtt]
+
+\startdocument
+ [metadata:author=Hans Hagen,
+ metadata:title={Tools: luatools, mtxrun, context},
+ author=Hans Hagen,
+ affiliation=PRAGMA ADE,
+ location=Hasselt NL,
+ title=Tools,
+ extra-1=luatools,
+ extra-2=mtxrun,
+ extra-3=context,
+ support=www.contextgarden.net,
+ website=www.pragma-ade.nl]
+
+\startMPpage
+ StartPage ;
+ picture p ; p := image (
+ for i=1 upto 21 :
+ for j=1 upto 30 :
+ drawarrow (fullcircle rotated uniformdeviate 360) scaled 10 shifted (i*15,j*15) ;
+ endfor ;
+ endfor ;
+ ) ;
+ p := p ysized (bbheight(Page)-4mm) ;
+ fill Page enlarged 2mm withcolor \MPcolor{DocumentColor} ;
+ draw p shifted (center Page - center p) withpen pencircle scaled 2 withcolor .5white ;
+ numeric dx ; dx := bbwidth(Page)/21 ;
+ numeric dy ; dy := bbheight(Page)/30 ;
+ p := textext("\tt\bf\white\getvariable{document}{extra-1}") xsized(14*dx) ;
+ p := p shifted (-lrcorner p) shifted lrcorner Page shifted (-1dx,8dy) ;
+ draw p ;
+ p := textext("\tt\bf\white\getvariable{document}{extra-2}") xsized(14*dx) ;
+ p := p shifted (-lrcorner p) shifted lrcorner Page shifted (-1dx,5dy) ;
+ draw p ;
+ p := textext("\tt\bf\white\getvariable{document}{extra-3}") xsized(14*dx) ;
+ p := p shifted (-lrcorner p) shifted lrcorner Page shifted (-1dx,2dy) ;
+ draw p ;
+ setbounds currentpicture to Page ;
+ StopPage
+\stopMPpage
+
+\startsubject[title=Contents]
+
+\placelist[section][alternative=a]
+
+\stopsubject
+
+\startsection[title={Remark}]
+
+This manual is work in progress. Feel free to submit additions or corrections.
+Before you start reading, it is good to know that in order to get starting with
+\CONTEXT, the easiest way to do that is to download the standalone distribution
+from \type {contextgarden.net}. After that you only need to make sure that \type
+{luatex} is in your path. The main command you use is then \type {context} and
+normally it does all the magic it needs itself.
+
+\stopsection
+
+\startsection[title={Introduction}]
+
+Right from the start \CONTEXT\ came with programs that managed the process of
+\TEX-ing. Although you can perfectly well run \TEX\ directly, it is a fact that
+often multiple runs are needed as well as that registers need to be sorted.
+Therefore managing a job makes sense.
+
+First we had \TEXEXEC\ and \TEXUTIL, and both were written in \MODULA, and as
+this language was not supported on all platforms the programs were rewritten in
+\PERL. Following that a few more tools were shipped with \CONTEXT.
+
+When we moved on to \RUBY\ all the \PERL\ scripts were rewritten and when
+\CONTEXT\ \MKIV\ showed up, \LUA\ replaced \RUBY. As we use \LUATEX\ this means
+that currently the tools and the main program share the same language. For \MKII\
+scripts like \TEXEXEC\ will stay around but the idea is that there will be \LUA\
+alternatives for them as well.
+
+Because we shipped many scripts, and because the de facto standard \TEX\
+directory structure expects scripts to be in certain locations we not only ship
+tools but also some more generic scripts that locate and run these tools.
+
+\stopsection
+
+\startsection[title={The location}]
+
+Normally you don't need to know so many details about where the scripts
+are located but here they are:
+
+\starttyping
+<texroot>/scripts/context/perl
+<texroot>/scripts/context/ruby
+<texroot>/scripts/context/lua
+<texroot>/scripts/context/stubs
+\stoptyping
+
+This hierarchy was actually introduced because \CONTEXT\ was shipped with a bunch
+of tools. As mentioned, we nowadays focus on \LUA\ but we keep a few of the older
+scripts around in the \PERL\ and \RUBY\ paths.Now, if you're only using \CONTEXT\
+\MKIV, and this is highly recommended, you can forget about all but the \LUA\
+scripts.
+
+\stopsection
+
+\startsection[title={The traditional finder}]
+
+When you run scripts multiple times, and in the case of \CONTEXT\ they are even
+run inside other scripts, you want to minimize the startup time. Unfortunately
+the traditional way to locate a script, using \sKPSEWHICH, is not that fast,
+especially in a setup with many large trees Also, because not all tasks can be
+done with the traditional scripts (take format generation) we provided a runner
+that could deal with this: \sTEXMFSTART. As this script was also used in more
+complex workflows, it had several tasks:
+
+\startitemize[packed]
+\item locate scripts in the distribution and run them using the right
+ interpreter
+\item do this selectively, for instance identify the need for a run using
+ checksums for potentially changed files (handy for image conversion)
+\item pass information to child processes so that lookups are avoided
+\item choose a distribution among several installed versions (set the root
+ of the \TEX\ tree)
+\item change the working directory before running the script
+\item resolve paths and names on demand and launch programs with arguments
+ where names are expanded controlled by prefixes (handy for
+ \TEX-unware programs)
+\item locate and open documentation, mostly as part the help systems in
+ editors, but also handy for seeing what configuration file is used
+\item act as a \KPSEWHICH\ server cq.\ client (only used in special cases,
+ and using its own database)
+\stopitemize
+
+Of course there were the usual more obscure and undocumented features as
+well. The idea was to use this runner as follows:
+
+\starttyping
+texmfstart texexec <further arguments>
+texmfstart --tree <rootoftree> texexec <further arguments>
+\stoptyping
+
+These are just two ways of calling this program. As \sTEXMFSTART\ can initialize
+the environment as well, it is basically the only script that has to be present
+in the binary path. This is quite comfortable as this avoids conflicts in names
+between the called scripts and other installed programs.
+
+Of course calls like above can be wrapped in a shell script or batch file without
+penalty as long as \sTEXMFSTART\ itself is not wrapped in a caller script that
+applies other inefficient lookups. If you use the \CONTEXT\ minimals you can be
+sure that the most efficient method is chosen, but we've seen quite inefficient
+call chains elsewhere.
+
+In the \CONTEXT\ minimals this script has been replaced by the one we will
+discuss in the next section: \sMTXRUN\ but a stub is still provided.
+
+\stopsection
+
+\startsection[title={The current finder}]
+
+In \MKIV\ we went a step further and completely abandoned the traditional lookup
+methods and do everything in \LUA. As we want a clear separation between
+functionality we have two main controlling scripts: \sMTXRUN\ and \sLUATOOLS. The
+last name may look somewhat confusing but the name is just one on in a series.
+\footnote {We have \type {ctxtools}, \type {exatools}, \type {mpstools}, \type
+{mtxtools}, \type {pdftools}, \type {rlxtools}, \type {runtools}, \type
+{textools}, \type {tmftools} and \type {xmltools}. Most if their funtionality is
+already reimplemented.}
+
+In \MKIV\ the \sLUATOOLS\ program is nowadays seldom used. It's just a drop in
+for \sKPSEWHICH\ plus a bit more. In that respect it's rather dumb in that it
+does not use the database, but clever at the same time because it can make one
+based on the little information available when it runs. It can also be used to
+generate format files either or not using \LUA\ stubs but in practice this is not
+needed at all.
+
+For \CONTEXT\ users, the main invocation of this tool is when the \TEX\ tree is
+updated. For instance, after adding a font to the tree or after updating
+\CONTEXT, you need to run:
+
+\starttyping
+mtxrun --generate
+\stoptyping
+
+After that all tools will know where to find stuff and how to behave well within
+the tree. This is because they share the same code, mostly because they are
+started using \sMTXRUN. For instance, you process a file with:
+
+\starttyping
+mtxrun --script context <somefile>
+\stoptyping
+
+Because this happens often, there's also a shortcut:
+
+\starttyping
+context <somefile>
+\stoptyping
+
+But this does use \sMTXRUN\ as well. The help information of \sMTXRUN\ is rather
+minimalistic and if you have no clue what an option does, you probably never
+needed it anyway. Here we discuss a few options. We already saw that we can
+explicitly ask for a script:
+
+\starttyping
+mtxrun --script context <somefile>
+\stoptyping
+
+but
+
+\starttyping
+mtxrun context <somefile>
+\stoptyping
+
+also works. However, by using \type {--script} you limit te lookup to the valid
+\CONTEXT\ \MKIV\ scripts. In the \TEX\ tree these have names prefixed by \type
+{mtx-} and a lookup look for a plural as well. So, the next two lookups are
+equivalent:
+
+\starttyping
+mtxrun --script font
+mtxrun --script fonts
+\stoptyping
+
+Both will run \type {mtx-fonts.lua}. Actually, this is one of the scripts that
+you might need when your font database is somehow outdated and not updated
+automatically:
+
+\starttyping
+mtxrun --script fonts --reload --force
+\stoptyping
+
+Normally \sMTXRUN\ is all you need in order to run a script. However, there are a
+few more options:
+
+\ctxlua{os.execute("mtxrun > tools-mkiv-help.tmp")}
+
+\typefile[ntyping]{tools-mkiv-help.tmp}
+
+Don't worry,you only need those obscure features when you integrate \CONTEXT\ in
+for instance a web service or when you run large projects where runs and paths
+take special care.
+
+\stopsection
+
+\startsection[title={Updating}]
+
+There are two ways to update \CONTEXT\ \MKIV. When you manage your
+trees yourself or when you use for instance \TEXLIVE, you act as
+follows:
+
+\startitemize[packed]
+\item download the file cont-tmf.zip from \type {www.pragma-ade.com} or elsewhere
+\item unzip this file in a subtree, for instance \type {tex/texmf-local}
+\item run \type {mtxrun --generate}
+\item run \type {mtxrun --script font --reload}
+\item run \type {mtxrun --script context --make}
+\stopitemize
+
+Or shorter:
+
+\startitemize[packed]
+\item run \type {mtxrun --generate}
+\item run \type {mtxrun font --reload}
+\item run \type {context --make}
+\stopitemize
+
+Normally these commands are not even needed, but they are a nice test if your
+tree is still okay. To some extend \sCONTEXT\ is clever enough to decide if the
+databases need to be regenerated and|/|or a format needs to be remade and|/|or if
+a new font database is needed.
+
+Now, if you also want to run \MKII, you need to add:
+
+\startitemize[packed]
+\item run \type {mktexlsr}
+\item run \type {texexec --make}
+\stopitemize
+
+The question is, how to act when \sLUATOOLS\ and \sMTXRUN\ have been updated
+themselves? In that case, after unzipping the archive, you need to do the
+following:
+
+\startitemize[packed]
+\item run \type {luatools --selfupdate}
+\item run \type {mtxrun --selfupdate}
+\stopitemize
+
+For quite a while we shipped so called \CONTEXT\ minimals. These zip files
+contained only the resources and programs that made sense for running \CONTEXT.
+Nowadays the minimals are installed and synchronized via internet. \footnote
+{This project was triggered by Mojca Miklavec who is also charge of this bit of
+the \CONTEXT\ infrastructure. More information can be found at \type
+{contextgarden.net}.} You can just run the installer again and no additional
+commands are needed. In the console you will see several calls to \sMTXRUN\ and
+\sLUATOOLS\ fly by.
+
+\stopsection
+
+\startsection[title={The tools}]
+
+We only mention the tools here. The most important ones are \sCONTEXT\ and \type
+{fonts}. You can ask for a list of installed scripts with:
+
+\starttyping
+mtxrun --script
+\stoptyping
+
+On my machine this gives:
+
+\ctxlua{os.execute("mtxrun --script > tools-mkiv-help.tmp")}
+
+\typefile[ntyping]{tools-mkiv-help.tmp}
+
+The most important scripts are \type {mtx-fonts} and \type {mtx-context}. By
+default fonts are looked up by filename (the \type {file:} prefix before font
+names in \CONTEXT\ is default). But you can also lookup fonts by name (\type
+{name:}) or by specification (\type {spec:}). If you want to use these two
+methods, you need to generate a font database as mentioned in the previous
+section. You can also use the font tool to get information about the fonts
+installed on your system.
+
+\stopsection
+
+\startsection[title={Running \CONTEXT}]
+
+The \sCONTEXT\ tool is what you will use most as it manages your
+run.
+
+\ctxlua{os.execute("context > tools-mkiv-help.tmp")}
+
+\typefile[ntyping]{tools-mkiv-help.tmp}
+
+There are few exert options too:
+
+\ctxlua{os.execute("context --expert > tools-mkiv-help.tmp")}
+
+\typefile[ntyping]{tools-mkiv-help.tmp}
+
+You might as well forget about these unless you are one of the
+\CONTEXT\ developers.
+
+\stopsection
+
+\startsection[title={Prefixes}]
+
+A handy feature of \sMTXRUN\ (and as most features an inheritance of
+\sTEXMFSTART) is that it will resolve prefixed arguments. This can be of help
+when you run programs that are unaware of the \TEX\ tree but nevertheless need to
+locate files in it.
+
+\ctxlua{os.execute("mtxrun --prefixes > tools-mkiv-help.tmp")}
+
+\typefile[ntyping]{tools-mkiv-help.tmp}
+
+An example is:
+
+\starttyping
+mtxrun --execute xsltproc file:whatever.xsl file:whatever.xml
+\stoptyping
+
+The call to \sXSLTPROC\ will get two arguments, being the complete path to the
+files (given that it can be resolved). This permits you to organize the files in
+a similar was as \TEX\ files.
+
+\stopsection
+
+\startsection[title={Stubs}]
+
+As the tools are written in the \LUA\ language we need a \LUA\ interpreter and or
+course we use \LUATEX\ itself. On \UNIX\ we can copy \sLUATOOLS\ and \sMTXRUN\ to
+files in the binary path with the same name but without suffix. Starting them in
+another way is a waste of time, especially when \sKPSEWHICH\ is used to find
+then, something which is useless in \MKIV\ anyway. Just use these scripts
+directly as they are self contained.
+
+For \sCONTEXT\ and other scripts that we want convenient access to, stubs are
+needed, like:
+
+\starttyping
+#!/bin/sh
+mtxrun --script context "$@"
+\stoptyping
+
+This is also quite efficient as the \sCONTEXT\ script \type {mtx-context} is
+loaded in \sMTXRUN\ and uses the same database.
+
+On \WINDOWS\ you can copy the scripts as|-|is and associate the suffix with
+\LUATEX\ (or more precisely: \type {texlua}) but then all \LUA\ script will be
+run that way which is not what you might want.
+
+In \TEXLIVE\ stubs for starting scripts were introduced by Fabrice Popineau. Such
+a stub would start for instance \sTEXMFSTART, that is: it located the script
+(\PERL\ or \RUBY) in the \TEX\ tree and launched it with the right interpreter.
+Later we shipped pseudo binaries of \sTEXMFSTART: a \RUBY\ interpreter plus
+scripts wrapped into a self contained binary.
+
+For \MKIV\ we don't need such methods and started with simple batch files,
+similar to the \UNIX\ startup scripts. However, these have the disadvantage that
+they cannot be used in other batch files without using the \type {start} command.
+In \TEXLIVE\ this is taken care of by a small binary written bij T.M.\ Trzeciak
+so on \TEXLIVE\ 2009 we saw a call chain from \type {exe} to \type {cmd} to \type
+{lua} which is somewhat messy.
+
+This is why we now use an adapted and stripped down version of that program that
+is tuned for \sMTXRUN, \sLUATOOLS\ and \sCONTEXT. So, we moved from the original
+\type {cmd} based approach to an \type {exe} one.
+
+\starttyping
+mtxrun.dll
+mtxrun.exe
+\stoptyping
+
+You can copy \type {mtxrun.exe} to for instance \type {context.exe} and it will
+still use \sMTXRUN\ for locating the right script. It also takes care of mapping
+\sTEXMFSTART\ to \sMTXRUN. So we've removed the intermediate \type {cmd} step,
+can not run the script as any program, and most of all, we're as efficient as can
+be.
+
+Of course this program is only meaningful for the \CONTEXT\ approach to tools.
+
+It may all sound more complex than it is but once it works users will not notice
+those details. Als, in practice not that much has changed in running the tools
+between \MKII\ and \MKIV\ as we've seen no reason to change the methods.
+
+\stopsection
+
+\startsubject[title={Colofon}]
+
+\starttabulate[|B|p|]
+ \NC author \NC \documentvariable{author},
+ \documentvariable{affiliation},
+ \documentvariable{location} \NC \NR
+ \NC version \NC \currentdate \NC \NR
+ \NC website \NC \documentvariable{website} \endash\
+ \documentvariable{support} \NC \NR
+ \NC copyright \NC \symbol[cc][cc-by-sa-nc] \NC \NR
+\stoptabulate
+
+\stopsubject
+
+\stopdocument
diff --git a/doc/context/sources/general/manuals/units/units-mkiv.tex b/doc/context/sources/general/manuals/units/units-mkiv.tex
new file mode 100644
index 000000000..0d91c67df
--- /dev/null
+++ b/doc/context/sources/general/manuals/units/units-mkiv.tex
@@ -0,0 +1,538 @@
+% language=uk
+
+\usemodule[art-01,abr-02,physics-units]
+
+\definecolor[red] [darkred]
+\definecolor[green] [darkgreen]
+\definecolor[blue] [darkblue]
+\definecolor[yellow] [darkyellow]
+\definecolor[magenta][darkmagenta]
+\definecolor[cyan] [darkcyan]
+
+\definecolor[maincolor] [darkcyan]
+\definecolor[extracolor][darkmagenta]
+
+\setupbodyfont
+ [10pt]
+
+\setuptyping
+ [color=extracolor]
+
+\setuptype
+ [color=extracolor] % darkyellow
+
+% \setupnumbering
+% [alternative=doublesided]
+
+\setupinteraction
+ [hidden]
+
+\setuphead
+ [section]
+ [color=maincolor]
+
+\usesymbols[cc]
+
+\startdocument
+ [metadata:author=Hans Hagen,
+ metadata:title=Units,
+ author=Hans Hagen,
+ affiliation=PRAGMA ADE,
+ location=Hasselt NL,
+ title=Units,
+ extra=ConTeXt MkIV,
+ support=www.contextgarden.net,
+ website=www.pragma-ade.nl]
+
+\unexpanded\def\UnitsHack#1#2%
+ {\setbox\scratchbox\hbox{\bf\strut#1#2/}% kerning and such
+ \hbox to \wd\scratchbox{\bf\strut#1\hss/\hss#2}}
+
+\startMPpage
+
+ StartPage ;
+ fill Page enlarged 2mm withcolor \MPcolor{darkcyan} ;
+ pickup pencircle scaled 2mm ;
+ picture p, q, r ;
+ p := textext("\ssbf\WORD{\documentvariable{title}}") xsized (bbheight Page - 2cm) rotated 90 ;
+ q := textext("\ssbf\WORD{\documentvariable{author}}") ysized 1cm ;
+ r := textext("\ssbf\WORD{\documentvariable{extra}}") xsized bbwidth q ;
+ draw anchored.rt (p, center rightboundary Page shifted (-1cm, 0mm)) withcolor white ;
+ draw anchored.lft(q, ulcorner Page shifted ( 1cm, -84mm)) withcolor white ; % \MPcolor{darkred} ;
+ draw anchored.lft(r, ulcorner Page shifted ( 1cm,-108mm)) withcolor white ; % \MPcolor{darkred} ;
+ StopPage ;
+
+\stopMPpage
+
+% \page[empty] \setuppagenumber[start=1]
+
+\startsubject[title={Contents}]
+
+\placelist[section][criterium=all,interaction=all]
+
+\stopsubject
+
+\startsection[title={Introduction}]
+
+In \CONTEXT\ \MKII\ there is a module that implements consistent
+typesetting of units (quantities and dimensions). In \MKIV\ this
+functionality is now part of the physics core modules. This is
+also one of the mechanisms that got a new user interface: instead
+of using commands we now parse text. Thanks to those users who
+provided input we're more complete now that in \MKII. You can browse
+the mailing list archive to get some sense of history.
+
+\stopsection
+
+\startsection[title={The main command}]
+
+The core command is \type {\unit}. The argument to this command gets
+parsed and converted into a properly typeset dimension. Normally there
+will be a quantity in front.
+
+\starttabulate
+\NC \type{10 meter} \NC \unit{10 meter} \NC \NR
+\NC \type{10 meter per second} \NC \unit{10 meter per second} \NC \NR
+\NC \type{10 square meter per second} \NC \unit{10 square meter per second} \NC \NR
+\stoptabulate
+
+The parser knows about special cases, like synonyms:
+
+\starttabulate
+\NC \type{10 degree celsius} \NC \unit{10 degree celsius} \NC \NR
+\NC \type{10 degrees celsius} \NC \unit{10 degrees celsius} \NC \NR
+\NC \type{10 celsius} \NC \unit{10 celsius} \NC \NR
+\stoptabulate
+
+The units can be rather complex, for example:
+
+\startbuffer
+\unit{30 kilo pascal square meter / second kelvin}
+\stopbuffer
+
+\typebuffer
+
+This comes out as: \ignorespaces\getbuffer\removeunwantedspaces. Depending
+on the unit at had, recognition is quite flexible. The following variants
+all work out ok.
+
+\starttabulate
+\NC \type{10 kilogram} \NC \unit{10 kilogram} \NC \NR
+\NC \type{10 kilo gram} \NC \unit{10 kilo gram} \NC \NR
+\NC \type{10 k gram} \NC \unit{10 k gram} \NC \NR
+\NC \type{10 kilo g} \NC \unit{10 kilo g} \NC \NR
+\NC \type{10 k g} \NC \unit{10 k g} \NC \NR
+\NC \type{10 kg} \NC \unit{10 kg} \NC \NR
+\NC \type{10 kilog} \NC \unit{10 kilog} \NC \NR
+\NC \type{10 kgram} \NC \unit{10 kgram} \NC \NR
+\stoptabulate
+
+Of course being consistent makes sense, so normally you will use
+a consistent mix of short or long keywords.
+
+You can provide a qualifier that gets lowered and appended to
+the preceding unit.
+
+\startbuffer
+\unit{112 decibel (A)}
+\stopbuffer
+
+\typebuffer
+
+This gives: \ignorespaces\getbuffer\removeunwantedspaces. Combinations
+are also possible:
+
+\starttabulate
+\NC \type{5 watt per meter celsius} \NC \unit{5 watt per meter celsius} \NC \NR
+\NC \type{5 watt per meter degrees celsius} \NC \unit{5 watt per meter degrees celsius} \NC \NR
+\NC \type{5 watt per meter kelvin} \NC \unit{5 watt per meter kelvin} \NC \NR
+\NC \type{5 watt per meter per kelvin} \NC \unit{5 watt per meter per kelvin} \NC \NR
+\NC \type{10 arcminute} \NC \unit{10 arcminute} \NC \NR
+\NC \type{10 arcminute 20 arcsecond} \NC \unit{10 arcminute 20 arcsecond} \NC \NR
+\stoptabulate
+
+\stopsection
+
+\startsection[title={Extra units}]
+
+To some extent units can be tuned. You can for instance
+influence the spacing between a number and a unit:
+
+\startbuffer
+ \unit{35 kilogram per cubic meter}
+\setupunit[space=normal] \unit{35 kilogram per cubic meter}
+\setupunit[space=big] \unit{35 kilogram per cubic meter}
+\setupunit[space=medium] \unit{35 kilogram per cubic meter}
+\setupunit[space=small] \unit{35 kilogram per cubic meter}
+\setupunit[space=none] \unit{35 kilogram per cubic meter}
+\stopbuffer
+
+\typebuffer
+
+Of course no spacing looks rather bad:
+
+\startlines
+\getbuffer
+\stoplines
+
+Another parameter is \type {separator}. In order to demonstrate
+this we define an extra unit command:
+
+\startbuffer
+\defineunit[sunit][separator=small]
+\defineunit[nunit][separator=none]
+\stopbuffer
+
+\typebuffer \getbuffer
+
+We now have two more commands:
+
+\startbuffer
+\unit {35 kilogram cubic meter}
+\sunit{35 kilogram cubic meter}
+\nunit{35 kilogram cubic meter}
+\stopbuffer
+
+\typebuffer
+
+These three commands give different results:
+
+\startlines
+\getbuffer
+\stoplines
+
+Valid separators are \type {normal}, \type {big}, \type {medium},
+\type {small}, \type {none}. You can let units stand out by
+applying color or a specific style.
+
+\startbuffer
+\setupunit[style=\bi,color=maincolor]
+\unit{10 square meter per second}
+\stopbuffer
+
+\typebuffer
+
+Keep in mind that all defined units inherit from their parent
+definition unless they are set up themselves.
+
+\start \blank \getbuffer \blank \stop
+
+To some extent you can control rendering in text and math mode. As
+an example we define an extra instance.
+
+\startbuffer
+\defineunit[textunit][alternative=text]
+\stopbuffer
+
+\typebuffer \getbuffer
+
+\startbuffer
+test \unit {10 cubic meter per second} test
+test \textunit{10 cubic meter per second} test
+test $\unit {10 cubic meter per second}$ test
+test $\textunit{10 cubic meter per second}$ test
+test 10 \unit {cubic meter per second} test
+test 10 \textunit{cubic meter per second} test
+test $10 \unit {cubic meter per second}$ test
+test $10 \textunit{cubic meter per second}$ test
+\stopbuffer
+
+\typebuffer
+
+\startlines
+\getbuffer
+\stoplines
+
+\stopsection
+
+\startsection[title={Labels}]
+
+The units, prefixes and operators are typeset using the label
+mechanism which means that they can be made to adapt to a language
+and|/|or adapted. Instead of language specific labels you can also
+introduce mappings that don't relate to a language at all. As an
+example we define some bogus mapping.
+
+\startbuffer
+\setupunittext
+ [whatever]
+ [meter=retem,
+ second=dnoces]
+
+\setupprefixtext
+ [whatever]
+ [kilo=olik]
+
+\setupoperatortext
+ [whatever]
+ [solidus={ rep }]
+\stopbuffer
+
+\typebuffer \getbuffer
+
+Such a mapping can be partial and the current language will
+be the default fallback and itselfs falls back on the English
+language mapping.
+
+\startbuffer
+\unit{10 km/s}
+\unit{10 Kilo Meter/s}
+\unit{10 kilo Meter/s}
+\unit{10 Kilo m/s}
+\unit{10 k Meter/s}
+\stopbuffer
+
+\typebuffer
+
+When we typeset this we get the normal rendering:
+
+\startlines
+\getbuffer
+\stoplines
+
+However, when we change the language parameter, we get
+a different result:
+
+\startlines
+\setupunit[language=whatever]\getbuffer
+\stoplines
+
+The alternative rendering is set up as follows:
+
+\starttyping
+\setupunit[language=whatever]
+\stoptyping
+
+You can also decide to use a special instance of units:
+
+\starttyping
+\defineunit[wunit][language=whatever]
+\stoptyping
+
+This will define the \type {\wunit} command and leave the original
+\type {\unit} command untouched.
+
+\stopsection
+
+\startsection[title={Digits}]
+
+In addition to units we have digits. These can be used independently
+but the same functionality is also integrated in the unit commands.
+The main purpose of this command is formatting in tables, of which
+we give an example below.
+
+\starttabulate[|l|r|]
+\NC \type{12,345.67 kilogram} \NC \unit{12,345.67 kilogram} \NR
+\NC \type{__,__1.23 kilogram} \NC \unit{__,__1.23 kilogram} \NR
+\NC \type{__,___.12 kilogram} \NC \unit{__,___.12 kilogram} \NR
+\NC \type{__,__1.== kilogram} \NC \unit{__,__1.== kilogram} \NR
+\NC \type{__,___:23 kilogram} \NC \unit{__,___:23 kilogram} \NR
+\stoptabulate
+
+The \type {_} character serves as placeholders. There are some
+assumptions to how numbers are constructed. In principe the input
+assumes a comma to separate thousands and a period to separate the
+fraction.
+
+\getbuffer
+
+You can swap periods and commas in the output. In fact there are a
+few methods available. For instance we can separate the thousands
+with a small space instead of a symbol.
+
+\startbuffer
+\starttabulate[|c|r|r|]
+\HL
+\NC 0 \NC \setupunit[method=0]\unit{00,000.10 kilogram}
+ \NC \setupunit[method=0]\unit{@@,@@0.10 kilogram} \NC \NR
+\NC 1 \NC \setupunit[method=1]\unit{00,000.10 kilogram}
+ \NC \setupunit[method=1]\unit{@@,@@0.10 kilogram} \NC \NR
+\NC 2 \NC \setupunit[method=2]\unit{00,000.10 kilogram}
+ \NC \setupunit[method=2]\unit{@@,@@0.10 kilogram} \NC \NR
+\NC 3 \NC \setupunit[method=3]\unit{00,000.10 kilogram}
+ \NC \setupunit[method=3]\unit{@@,@@0.10 kilogram} \NC \NR
+\NC 4 \NC \setupunit[method=4]\unit{00,000.10 kilogram}
+ \NC \setupunit[method=4]\unit{@@,@@0.10 kilogram} \NC \NR
+\NC 5 \NC \setupunit[method=5]\unit{00,000.10 kilogram}
+ \NC \setupunit[method=5]\unit{@@,@@0.10 kilogram} \NC \NR
+\NC 6 \NC \setupunit[method=6]\unit{00,000.10 kilogram}
+ \NC \setupunit[method=6]\unit{@@,@@0.10 kilogram} \NC \NR
+\HL
+\stoptabulate
+\stopbuffer
+
+\typebuffer % [bodyfont=9pt]
+
+\getbuffer
+
+The digit modes can be summarized as::
+
+\startitemize[n,packed]
+\item periods/comma
+\item commas/period
+\item thinmuskips/comma
+\item thinmuskips/period
+\item thickmuskips/comma
+\item thickmuskips/period
+\stopitemize
+
+You can reverse the order of commas and period in the input by
+setting the parameter \type {order} to \type {reverse}.
+
+The digit parser handles a bunch of special characters as
+well as different formats. We strongly suggest you to use
+the grouped call.
+
+\starttabulate[|l|l|l|]
+\NC \type{.} \NC , . \NC comma or period \NC \NR
+\NC \type{,} \NC , . \NC comma or period \NC \NR
+\NC \type{:} \NC \NC invisible period \NC \NR
+\NC \type{;} \NC \NC invisible comma \NC \NR
+\NC \type{_} \NC \NC invisible space \NC \NR
+\NC \type{/} \NC \NC invisible sign \NC \NR
+\NC \type{-} \NC $-$ \NC minus sign \NC \NR
+\NC \type{+} \NC $+$ \NC plus sign \NC \NR
+\NC \type{//} \NC \NC invisible high sign \NC \NR
+\NC \type{--} \NC $\negative$ \NC high minus sign \NC \NR
+\NC \type{++} \NC $\positive$ \NC high plus sign \NC \NR
+\NC \type{=} \NC $\zeroamount$ \NC zero padding \NC \NR
+\stoptabulate
+
+Let's give some examples:
+
+\starttabulate[|l|r|]
+\NC \type{1} \NC \ruledhbox{\strut\digits{1}} \NC \NR
+\NC \type{12} \NC \ruledhbox{\strut\digits{12}} \NC \NR
+\NC \type{12.34} \NC \ruledhbox{\strut\digits{12.34}} \NC \NR
+\NC \type{123,456} \NC \ruledhbox{\strut\digits{123,456}} \NC \NR
+\NC \type{123,456.78} \NC \ruledhbox{\strut\digits{123,456.78}} \NC \NR
+\NC \type{12,34} \NC \ruledhbox{\strut\digits{12,34}} \NC \NR
+\NC \type{.1234} \NC \ruledhbox{\strut\digits{.1234}} \NC \NR
+\NC \type{1234} \NC \ruledhbox{\strut\digits{1234}} \NC \NR
+\NC \type{123,456.78^9} \NC \ruledhbox{\strut\digits{123,456.78^9}} \NC \NR
+\NC \type{123,456.78e9} \NC \ruledhbox{\strut\digits{123,456.78e9}} \NC \NR
+\NC \type{/123,456.78e-9} \NC \ruledhbox{\strut\digits{/123,456.78e-9}} \NC \NR
+\NC \type{-123,456.78e-9} \NC \ruledhbox{\strut\digits{-123,456.78e-9}} \NC \NR
+\NC \type{+123,456.78e-9} \NC \ruledhbox{\strut\digits{+123,456.78e-9}} \NC \NR
+\NC \type{//123,456.78e-9} \NC \ruledhbox{\strut\digits{//123,456.78e-9}} \NC \NR
+\NC \type{--123,456.78e-9} \NC \ruledhbox{\strut\digits{--123,456.78e-9}} \NC \NR
+\NC \type{++123,456.78e-9} \NC \ruledhbox{\strut\digits{++123,456.78e-9}} \NC \NR
+\NC \type{___,___,123,456,789.00} \NC \ruledhbox{\strut\digits{___,___,123,456,789.00}} \NC \NR
+\NC \type{___,___,_12,345,678.==} \NC \ruledhbox{\strut\digits{___,___,_12,345,678.==}} \NC \NR
+\stoptabulate
+
+\stopsection
+
+\startsection[title={Adding units}]
+
+It is possible to add extra snippets. This is a two step process:
+first some snippet is defined, next a proper label is set up. In the
+next example we define a couple of \TEX\ dimensions:
+
+\startbuffer
+\registerunit
+ [unit]
+ [point=point,
+ basepoint=basepoint,
+ scaledpoint=scaledpoint,
+ didot=didot,
+ cicero=cicero]
+\stopbuffer
+
+\typebuffer \getbuffer
+
+Possible categories are: \type {prefix}, \type {unit}, \type {operator},
+\type {suffix}, \type {symbol},\type {packaged}. Next we define labels:
+
+\startbuffer
+\setupunittext
+ [point=pt,
+ basepoint=bp,
+ scaledpoint=sp,
+ didot=dd,
+ cicero=cc]
+\stopbuffer
+
+\typebuffer \getbuffer
+
+Now we can use use these:
+
+\startbuffer
+\unit{10 point / second}
+\stopbuffer
+
+\typebuffer
+
+Of course you can wonder what this means.
+
+\blank \getbuffer \blank
+
+When no label is defined the long name is used:
+
+\startbuffer
+\registerunit
+ [unit]
+ [page=page]
+\stopbuffer
+
+\typebuffer \getbuffer
+
+This is used as:
+
+\startbuffer
+\unit{10 point / page}
+\stopbuffer
+
+\typebuffer
+
+Which gives:
+
+\blank \getbuffer \blank
+
+\stopsection
+
+\startsection[title={Built in keywords}]
+
+A given sequence of keywords is translated in an list of internal
+keywords. For instance \type {m}, \type {Meter} and \type {meter}
+all become \type {meter} and that one is used when resolving a
+label. In the next tables the right column mentions the internal
+keyword. The right column shows the Cased variant, but a lowercase
+one is built|-|in as well.
+
+The following prefixes are built|-|in:
+
+\showunits[prefixes]
+
+The following units are supported, including some combinations:
+
+\showunits[units]
+
+The amount of operators is small:
+
+\showunits[operators]
+
+There is also a small set of (names) suffixes:
+
+\showunits[suffixes]
+
+Some symbols get a special treatment:
+
+\showunits[symbols]
+
+These are also special:
+
+\showunits[packaged]
+
+\startsection[title={Colofon}]
+
+\starttabulate[|B|p|]
+\NC author \NC \getvariable{document}{author}, \getvariable{document}{affiliation}, \getvariable{document}{location} \NC \NR
+\NC version \NC \currentdate \NC \NR
+\NC website \NC \getvariable{document}{website} \endash\ \getvariable{document}{support} \NC \NR
+\NC copyright \NC \symbol[cc][cc-by-sa-nc] \NC \NR
+\stoptabulate
+
+\stopsection
+
+\stopdocument
diff --git a/doc/context/sources/general/manuals/workflows/workflows-contents.tex b/doc/context/sources/general/manuals/workflows/workflows-contents.tex
new file mode 100644
index 000000000..a32f4737d
--- /dev/null
+++ b/doc/context/sources/general/manuals/workflows/workflows-contents.tex
@@ -0,0 +1,13 @@
+% language=uk
+
+\environment workflows-style
+
+\startcomponent workflows-contents
+
+\starttitle[title=Contents]
+
+ \placelist[chapter]
+
+\stoptitle
+
+\stopcomponent
diff --git a/doc/context/sources/general/manuals/workflows/workflows-graphics.tex b/doc/context/sources/general/manuals/workflows/workflows-graphics.tex
new file mode 100644
index 000000000..55a8ad701
--- /dev/null
+++ b/doc/context/sources/general/manuals/workflows/workflows-graphics.tex
@@ -0,0 +1,157 @@
+% language=uk
+
+\environment workflows-style
+
+\startcomponent workflows-graphics
+
+\startchapter[title=Graphics]
+
+\startsection[title=Bad names]
+
+After many years of using \CONTEXT\ in workflows where large amounts of source files
+as well as graphics were involved we can safely say that it's hard for publishers to
+control the way these are named. This is probably due to the fact that in a
+click|-|and|-|point based desktop publishing workflow names don't matter as one stays on
+one machine, and names are only entered once (after that these names become abstractions and
+get cut and pasted). Proper consistent resource managament is simply not part of the flow.
+
+This means that you get names like:
+
+\starttyping
+foo_Bar_01_03-a.EPS
+foo__Bar-01a_03.eps
+foo__Bar-01a_03.eps
+foo BarA 01-03.eps
+\stoptyping
+
+Especially when a non proportional screen font is used multiple spaces can look
+like one. In fancy screen fonts upper and lowercase usage might get obscured. It
+really makes one wonder if copy|-|editing or adding labels to graphics isn't
+suffering from the same problem.
+
+Anyhow, as in an automated rendering workflow the rendering is often the last step you
+can imagine that when names get messed up it's that last step that gets blamed. It's not
+that hard to sanitize names of files on disk as well as in the files that refer to them,
+and we normally do that we have complete control. This is no option when all the resources
+are synchronzied from elsewhere. In that case the only way out is signaling potential
+issues. Say that in the source file there is a reference:
+
+\starttyping
+foo_Bar_01_03-a.EPS
+\stoptyping
+
+and that the graphic on disk has the same name, but for some reason after an update
+has become:
+
+\starttyping
+foo-Bar_01_03-a.EPS
+\stoptyping
+
+The old image is probably still there so the update is not reflected in the final
+product. This is not that uncommon when you deal with tens of thousands of files,
+many editors and graphic designers, and no strict filename policy.
+
+For this we provide the following tracing option:
+
+\starttyping
+\enabletrackers[graphics.lognames]
+\stoptyping
+
+This will put information in the log file about included graphics, like:
+
+\starttyping
+system > graphics > start names
+
+used graphic > asked : cow.pdf
+used graphic > comment : not found
+used graphic > asked : t:/sources/cow.pdf
+used graphic > format : pdf
+used graphic > found : t:/sources/cow.pdf
+used graphic > used : t:/sources/cow.pdf
+
+system > graphics > stop names
+\stoptyping
+
+You can also add information to the file itself:
+
+\starttyping
+\usemodule[s-figures-names]
+\stoptyping
+
+Of course that has to be done at the end of the document. Bad names are reported
+and suitable action can be taken.
+
+\stopsection
+
+\startsection[title=Downsampling]
+
+You can plug in you rown converter, here is an example:
+
+\starttyping
+\startluacode
+
+figures.converters.jpg = figures.converters.jpg or { }
+
+figures.converters.jpg["lowresjpg.pdf"] =
+ function(oldname,newname,resolution)
+ figures.programs.run (
+ [[gm]],
+ [[convert -geometry %nx%x%ny% -compress JPEG "%old%" "%new%"]],
+ {
+ old = old,
+ new = new,
+ nx = resolution or 300,
+ ny = resolution or 300,
+ }
+ )
+ end
+\stopluacode
+\stoptyping
+
+You can limit the search to a few types and set the resolution with:
+
+\starttyping
+\setupexternalfigures
+ [order={pdf,jpg},
+ resolution=100,
+ method=auto]
+\stoptyping
+
+And use it like:
+
+\starttyping
+\externalfigure[verybig.jpg][height=10cm]
+\stoptyping
+
+The second string passed to the \type {run} helper contains the arguments to the
+first one. The variables between percent signs get replaced by the variables in
+the tables passed as third argument.
+
+\stopsection
+
+\startsection[title=Trackers]
+
+If you want a lot of info you can say:
+
+\starttyping
+\enabletrackers[figures.*]
+\stoptyping
+
+But you can be more specific. With \type {graphics.locating} you will get some
+insight in where files are looked for. The \type {graphics.inclusion} tracker
+gives some more info about actual inclusion. The \type {graphics.bases} is kind
+of special and only makes sense when you use the graphic database options. The
+\type {graphics.conversion} and related tracker \type {graphics.programs} show if
+and how conversion of images takes place.
+
+The \type {graphics.lognames} will make sure that some extra information about
+used graphics is saved in the log file, while \type {graphics.usage} will produce
+a file \typ {<jobname>-figures-usage.lua} that contains information about found
+(or not found) images and the way they are used.
+
+\stopsection
+
+\stopchapter
+
+\stopcomponent
+
diff --git a/doc/context/sources/general/manuals/workflows/workflows-injectors.tex b/doc/context/sources/general/manuals/workflows/workflows-injectors.tex
new file mode 100644
index 000000000..d2f837d82
--- /dev/null
+++ b/doc/context/sources/general/manuals/workflows/workflows-injectors.tex
@@ -0,0 +1,86 @@
+% language=uk
+
+\environment workflows-style
+
+\startcomponent workflows-injectors
+
+\startchapter[title={Injectors}]
+
+When you have no control over the source but need to manually tweak some aspects
+of the typesetting, like an occasional page break of column switch, you can use
+the injector mechanism. This mechanism is part of list and register building but
+can also be used elsewhere.
+
+\startbuffer[one]
+\startmixedcolumns[balance=yes]
+ \dotestinjector{test}line 1 \par
+ \dotestinjector{test}line 2 \par
+ \dotestinjector{test}line 3 \par
+ \dotestinjector{test}line 4 \par
+ \dotestinjector{test}line 5
+\stopmixedcolumns
+\stopbuffer
+
+\startbuffer[two]
+\startmixedcolumns[balance=yes]
+ \dotestinjector{test}line 1 \par
+ \dotestinjector{test}line 2 \par
+ \dotestinjector{test}line 3 \par
+ \dotestinjector{test}line 4 \par
+ \dotestinjector{test}line 5
+\stopmixedcolumns
+\stopbuffer
+
+We have two buffers:
+
+\typebuffer[one]
+
+and
+
+\typebuffer[two]
+
+When typeset these come out as:
+
+\blank \startpacked \bf \getbuffer[one] \stoppacked \blank
+
+and
+
+\blank \startpacked \bf \getbuffer[two] \stoppacked \blank
+
+We can enable (and show) the injectors with:
+
+\startbuffer
+\doactivateinjector{test} \showinjector
+\stopbuffer
+
+\typebuffer \getbuffer
+
+Now we get:
+
+\blank \startpacked \bf \getbuffer[one] \stoppacked \blank
+
+and
+
+\blank \startpacked \bf \getbuffer[two] \stoppacked \blank
+
+The small numbers are injector points. These will of course change when we add
+more in|-|between. Let's add actions to some of the injection points:
+
+\startbuffer
+\setinjector[test][13][{\column}]
+\setinjector[test][17][{\column}]
+\stopbuffer
+
+\typebuffer \getbuffer
+
+As expected we now get column breaks:
+
+\blank \startpacked \bf \getbuffer[one] \stoppacked \blank
+
+and
+
+\blank \startpacked \bf \getbuffer[two] \stoppacked \blank
+
+\stopchapter
+
+\stopcomponent
diff --git a/doc/context/sources/general/manuals/workflows/workflows-introduction.tex b/doc/context/sources/general/manuals/workflows/workflows-introduction.tex
new file mode 100644
index 000000000..a88640b27
--- /dev/null
+++ b/doc/context/sources/general/manuals/workflows/workflows-introduction.tex
@@ -0,0 +1,25 @@
+% language=uk
+
+\environment workflows-style
+
+\startcomponent workflows-introduction
+
+\startchapter[title=Introduction]
+
+This manual contains some information about features that can help you to manage
+workflows or \CONTEXT\ related processes. Because we use \CONTEXT\ ourselves all
+that we need ends up in the distribution. When you discover something workflow
+related that is not yet covered here, you can tell me. I simply forget about all
+there is, especially if it's made for projects. Don't expect this manual to be
+complete or extensive, it's just a goodie.
+
+\startlines
+\documentvariable{author},
+\documentvariable{affiliation}
+\documentvariable{location}
+\currentdate[month,year]
+\stoplines
+
+\stopchapter
+
+\stopcomponent
diff --git a/doc/context/sources/general/manuals/workflows/workflows-mkiv.tex b/doc/context/sources/general/manuals/workflows/workflows-mkiv.tex
new file mode 100644
index 000000000..3820e04fa
--- /dev/null
+++ b/doc/context/sources/general/manuals/workflows/workflows-mkiv.tex
@@ -0,0 +1,32 @@
+\setupbackend[export=yes]
+
+\environment workflows-style
+
+\startdocument
+ [metadata:author=Hans Hagen,
+ metadata:title=Workflow support in context,
+ author=Hans Hagen,
+ affiliation=PRAGMA ADE,
+ location=Hasselt NL,
+ title=workflow,
+ extra=support in context,
+ support=www.contextgarden.net,
+ website=www.pragma-ade.nl]
+
+\component workflows-titlepage
+
+\startfrontmatter
+ \component workflows-contents
+ \component workflows-introduction
+\stopfrontmatter
+
+\startbodymatter
+ \component workflows-resources
+ \component workflows-graphics
+ \component workflows-suspects
+ \component workflows-injectors
+ \component workflows-xml
+ \component workflows-setups
+\stopbodymatter
+
+\stopdocument
diff --git a/doc/context/sources/general/manuals/workflows/workflows-resources.tex b/doc/context/sources/general/manuals/workflows/workflows-resources.tex
new file mode 100644
index 000000000..cbed64864
--- /dev/null
+++ b/doc/context/sources/general/manuals/workflows/workflows-resources.tex
@@ -0,0 +1,156 @@
+% language=uk
+
+\environment workflows-style
+
+\startcomponent workflows-resources
+
+\startchapter[title=Accessing resources]
+
+One of the benefits of \TEX\ is that you can use it in automated workflows
+where large quantities of data is involved. A document can consist of
+several files and normally also includes images. Of course there are styles
+involved too. At \PRAGMA\ normally put styles and fonts in:
+
+\starttyping
+/data/site/context/tex/texmf-project/tex/context/user/<project>/...
+/data/site/context/tex/texmf-fonts/data/<foundry>/<collection>/...
+\stoptyping
+
+alongside
+
+\starttyping
+/data/framework/...
+\stoptyping
+
+where the job management services are put, while we put resources in:
+
+\starttyping
+/data/resources/...
+\stoptyping
+
+The processing happens in:
+
+\starttyping
+/data/work/<uuid user space>/
+\stoptyping
+
+Putting styles (and resources like logos and common images) and fonts (if the
+project has specific ones not present in the distribution) in the \TEX\ tree
+makes sense because that is where such files are normally searched. Of course you
+need to keep the distributions file database upto|-|date after adding files there.
+
+Processing has to happen isolated from other runs so there we use unique
+locations. The services responsible for running also deal with regular cleanup
+of these temporary files.
+
+Resources are somewhat special. They can be stable, i.e.\ change seldom, but more
+often they are updated or extended periodically (or even daily). We're not
+talking of a few files here but of thousands. In one project we have 20 thousand
+resources, that can be combined into arbitrary books, and in another one, each
+chapter alone is about 400 \XML\ and image files. That means we can have 5000
+files per book and as we have at least 20 books, we end up with 100K files. In
+the first case accessing the resources is easy because there is a well defined
+structure (under our control) so we know exactly where each file sits in the
+resource tree. In the 100K case there is a deeper structure which is in itself
+predictable but because many authors are involved the references to these files
+are somewhat instable (and undefined). It is surprising to notice that publishers
+don't care about filenames (read: cannot control all the parties involved) which
+means that we have inconsist use of mixed case in filenames, and spaces,
+underscores and dashes creeping in. Because typesetting for paper is always at
+the end of the pipeline (which nowadays is mostly driven by (limitations) of web
+products) we need to have a robust and flexible lookup mechanism. It's a side
+effect of the click and point culture: if objects are associated (filename in
+source file with file on the system) anything you key in will work, and
+consistency completely depends on the user. And bad things then happen when files
+are copied, renamed, etc. In that stadium we can better be tolerant than try to
+get it fixed. \footnote {From what we normally receive we often conclude that
+copy|-|editing and image production companies don't impose any discipline or
+probably simply lack the tools and methods to control this. Some of our workflows
+had checkers and fixers, so that when we got 5000 new resources while only a few
+needed to be replaced we could filter the right ones. It was not uncommon to find
+duplicates for thousands of pictures: similar or older variants.}
+
+\starttyping
+foo.jpg
+bar/foo.jpg
+images/bar/foo.jpg
+images/foo.jpg
+\stoptyping
+
+The xml files have names like:
+
+\starttyping
+b-c.xml
+a/b-c.jpg
+a/b/b-c.jpg
+a/b/c/b-c.jpg
+\stoptyping
+
+So it's sort of a mess, especially if you add arbitrary casing to this. Of course
+one can argue that a wrong (relative) location is asking for problems, it's less
+an issue here because each image has a unique name. We could flatten the resource
+tree but having tens of thousands of files on one directory is asking for
+problems when you want to manage them.
+
+The typesetting (and related services) run on virtual machines. The three
+directories:
+
+\starttyping
+/data/site
+/data/resources
+/data/work
+\stoptyping
+
+are all mounted as nfs shares on a network storage. For the styles (and binaries)
+this is no big deal as normally these files are cached, but the resources are
+another story. Scanning the complete (mounted) resource tree each run is no
+option so there we use a special mechanism in \CONTEXT\ for locating files.
+
+Already early in the development of \MKIV\ one of the locating mechanisms was
+the following:
+
+\starttyping
+tree:////data/resources/foo/**/drawing.jpg
+tree:////data/resources/foo/**/Drawing.jpg
+\stoptyping
+
+Here the tree is scanned once per run, which is normally quite okay when there
+are not that many files and when the files reside on the machine itself. For a
+more high performance approach using network shares we have a different
+mechanism. This time it looks like this:
+
+\starttyping
+dirlist:/data/resources/**/drawing.jpg
+dirlist:/data/resources/**/Drawing.jpg
+dirlist:/data/resources/**/just/some/place/drawing.jpg
+dirlist:/data/resources/**/images/drawing.jpg
+dirlist:/data/resources/**/images/drawing.jpg?option=fileonly
+dirfile:/data/resources/**/images/drawing.jpg
+\stoptyping
+
+The first two lookups are wildcard. If there is a file with that name, it will be
+found. If there are more, the first hit is used. The second and third examples
+are more selective. Here the part after the \type {**} has to match too. So here
+we can deal with multiple files named \type {drawing.jpg}. The last two
+equivalent examples are more tolerant. If no explicit match is found, a lookup
+happens without being selective. The case of a name is ignored but when found, a
+name with the right case is used.
+
+You can hook a path into the resolver for source files, for example:
+
+\starttyping
+\usepath [dirfile://./resources/**]
+\setupexternalfigures[directory=dirfile://./resources/**]
+\stoptyping
+
+You need to make sure that file(name)s in that location don't override ones in
+the regular \TEX\ tree. These extra paths are only used for source file lookups
+so for instance font lookups are not affected.
+
+When you add, remove or move files the tree, you need to remove the \type
+{dirlist.*} files in the root because these are used for locating files. A new
+file will be generated automatically. Don't forget this!
+
+\stopchapter
+
+\stopcomponent
diff --git a/doc/context/sources/general/manuals/workflows/workflows-setups.tex b/doc/context/sources/general/manuals/workflows/workflows-setups.tex
new file mode 100644
index 000000000..e9d120f7b
--- /dev/null
+++ b/doc/context/sources/general/manuals/workflows/workflows-setups.tex
@@ -0,0 +1,72 @@
+% language=uk
+
+\environment workflows-style
+
+\startcomponent workflows-setups
+
+\startchapter[title={Setups}]
+
+Setups are a powerful way to organize styles. They are basically macros but live
+in their own namespace. One advantage is that spaces in a setup are ignored so
+you can code without bothering about spurious spaces. Here is a trick that you
+can use when one style contains directives for multiple products:
+
+\startbuffer
+\startsetups tex:whatever
+ \fastsetup{tex:whatever:\documentvariable{stylevariant}}
+\stopsetups
+
+\startsetups tex:whatever:foo
+ FOO
+\stopsetups
+
+\startsetups tex:whatever:bar
+ BAR
+\stopsetups
+\stopbuffer
+
+\typebuffer \getbuffer
+
+Here we define a main setup \type {tex:whatever} that gets expanded in one of two
+variants, controlled by a document variable.
+
+\startbuffer
+\setups{tex:whatever}
+
+\setupdocument
+ [stylevariant=foo]
+
+\setups{tex:whatever}
+
+\setupdocument
+ [stylevariant=bar]
+
+\setups{tex:whatever}
+\stopbuffer
+
+\typebuffer
+
+These lines result in:
+
+\getbuffer
+
+In a similar fashion you can define \XML\ setups that are used to render
+elements:
+
+\starttyping
+\startxmlsetups xml:whatever
+ \xmlsetup{#1}{xml:whatever:\documentvariable{stylevariant}}
+\stopxmlsetups
+
+\startxmlsetups xml:whatever:foo
+ FOO: \xmlflush{#1}
+\stopxmlsetups
+
+\startxmlsetups xml:whatever:bar
+ BAR: \xmlflush{#1}
+\stopxmlsetups
+\stoptyping
+
+\stopchapter
+
+\stopcomponent
diff --git a/doc/context/sources/general/manuals/workflows/workflows-style.tex b/doc/context/sources/general/manuals/workflows/workflows-style.tex
new file mode 100644
index 000000000..f29129fcd
--- /dev/null
+++ b/doc/context/sources/general/manuals/workflows/workflows-style.tex
@@ -0,0 +1,49 @@
+\startenvironment workflows-style
+
+\usemodule
+ [abr-03]
+
+\setupbodyfont
+ [bookman,11pt]
+
+\definecolor
+ [maincolor]
+ [s=.35]
+
+\setuplayout
+ [height=middle,
+ width=middle,
+ footer=0pt]
+
+\setupwhitespace
+ [big]
+
+\setuphead
+ [chapter]
+ [style=\bfc,
+ color=maincolor,
+ header=high]
+
+\setuphead
+ [section]
+ [style=\bfb,
+ color=maincolor]
+
+\setuptyping
+ [color=maincolor]
+
+\setuptype
+ [color=maincolor]
+
+\setupdocument
+ [metadata:author=<author>,
+ metadata:title=<title>,
+ author=<author>,
+ affiliation=<affiliation>,
+ location=<location>,
+ title=<title>,
+ extra=<extra>,
+ support=<support>,
+ website=<website>]
+
+\stopenvironment
diff --git a/doc/context/sources/general/manuals/workflows/workflows-suspects.tex b/doc/context/sources/general/manuals/workflows/workflows-suspects.tex
new file mode 100644
index 000000000..621fd6f59
--- /dev/null
+++ b/doc/context/sources/general/manuals/workflows/workflows-suspects.tex
@@ -0,0 +1,54 @@
+% language=uk
+
+\environment workflows-style
+
+\startcomponent workflows-suspects
+
+\startchapter[title={Suspects}]
+
+When many authors and editors are involved there is the danger of inconsistent
+spacing being applied. We're not only talking of the (often invisible in editing
+programs) nobreak spaces, but also spacing inside or outside quotations and
+before and after punctuation or around math.
+
+In \CONTEXT\ we have a built||in suspects checker that you can enable with the
+following command:
+
+\starttyping
+\enabletrackers[typesetters.suspects]
+\stoptyping
+
+The next table shows some identified suspects.
+
+\starttexdefinition ShowSample #1
+ \NC \type{#1}
+ \NC \enabletrackers[typesetters.suspects]#1\disabletrackers[typesetters.spacing]
+ \NC \NR
+\stoptexdefinition
+
+\starttabulate[|||][before=,after=]
+ \ShowSample{foo$x$}
+ \ShowSample{$x$bar}
+ \ShowSample{foo$x$bar}
+ \ShowSample{$f+o+o$:}
+ \ShowSample{;$f+o+o$}
+ \ShowSample{; bar}
+ \ShowSample{foo:bar}
+ \ShowSample{\quote{ foo }}
+ \ShowSample{\quote{bar }}
+ \ShowSample{\quote{ bar}}
+ \ShowSample{(foo )}
+ \ShowSample{\{foo \}}
+ \ShowSample{foo{\bf gnu}bar}
+ \ShowSample{foo{\it gnu}bar}
+ \ShowSample{foo$x^2$bar}
+ \ShowSample{foo\nobreakspace bar}
+\stoptabulate
+
+Of course this analysis is not perfect but we use it in final checking of complex
+documents that are generated automatically from \XML. \footnote {Think of math
+school books where each book is assembled from over a thousands files.}
+
+\stopchapter
+
+\stopcomponent
diff --git a/doc/context/sources/general/manuals/workflows/workflows-titlepage.tex b/doc/context/sources/general/manuals/workflows/workflows-titlepage.tex
new file mode 100644
index 000000000..f184152b3
--- /dev/null
+++ b/doc/context/sources/general/manuals/workflows/workflows-titlepage.tex
@@ -0,0 +1,37 @@
+% language=uk
+
+\environment workflows-style
+
+\startcomponent workflows-titlepage
+
+\definefontfeature[LatinModernMonoVariable][default][liga=no]
+\definefont[LatinModernMonoVariable][LMTypewriterVarWd-Regular*LatinModernMonoVariable sa 1]
+
+\startMPpage
+
+ fill fullsquare xysized(PaperWidth,PaperHeight) withcolor .4white ;
+
+ draw image (
+ fill arrowhead fullcircle scaled .5 rotated 0 scaled 10 withcolor white ;
+ fill arrowhead fullcircle scaled .5 rotated 120 scaled 10 withcolor white ;
+ fill arrowhead fullcircle scaled .5 rotated 240 scaled 10 withcolor white ;
+ fill arrowhead fullcircle scaled .5 rotated 60 scaled 10 withcolor white ;
+ fill arrowhead fullcircle scaled .5 rotated 180 scaled 10 withcolor white ;
+ fill arrowhead fullcircle scaled .5 rotated 300 scaled 10 withcolor white ;
+ ) xsized (.9PaperWidth) shifted (0,.2PaperWidth) ;
+
+ draw image (
+ fill arrowhead fullcircle scaled .5 rotated 0 scaled 10 withcolor .4red withtransparency (1,.5) ;
+ fill arrowhead fullcircle scaled .5 rotated 120 scaled 10 withcolor .4green withtransparency (1,.5) ;
+ fill arrowhead fullcircle scaled .5 rotated 240 scaled 10 withcolor .4blue withtransparency (1,.5) ;
+ fill arrowhead fullcircle scaled .5 rotated 60 scaled 10 withcolor .4cyan withtransparency (1,.5) ;
+ fill arrowhead fullcircle scaled .5 rotated 180 scaled 10 withcolor .4magenta withtransparency (1,.5) ;
+ fill arrowhead fullcircle scaled .5 rotated 300 scaled 10 withcolor .4yellow withtransparency (1,.5) ;
+ ) xsized (.9PaperWidth) shifted (0,.2PaperWidth) ;
+
+ draw textext ("\LatinModernMonoVariable \documentvariable{title}") xsized (.9PaperWidth) shifted (0,-.425PaperWidth) withcolor white ;
+ draw textext ("\LatinModernMonoVariable \documentvariable{extra}") xsized (.9PaperWidth) shifted (0,-.575PaperWidth) withcolor white ;
+
+\stopMPpage
+
+\stopcomponent
diff --git a/doc/context/sources/general/manuals/workflows/workflows-xml.tex b/doc/context/sources/general/manuals/workflows/workflows-xml.tex
new file mode 100644
index 000000000..0f29f5f55
--- /dev/null
+++ b/doc/context/sources/general/manuals/workflows/workflows-xml.tex
@@ -0,0 +1,96 @@
+% language=uk
+
+\environment workflows-style
+
+\startcomponent workflows-xml
+
+\startluacode
+ for i=1,10 do
+ local filename = string.formatters["temp-%02i.xml"](i)
+ local filedata = string.formatters["<?xml version='1.0'?><p>snippet %i</p>"](i)
+ io.savedata(filename,filedata)
+ end
+\stopluacode
+
+\startchapter[title={XML}]
+
+When you have an \XML\ project with many files involved, finding the right spot
+of something that went wrong can be a pain. In one of our project the production of
+some 50 books involves 60.000 \XML\ files and 20.000 images. Say that we have the
+following file:
+
+\startbuffer[demo]
+<?xml version='1.0'?>
+<document>
+ <include name="temp-01.xml"/> <include name="temp-02.xml"/>
+ <include name="temp-03.xml"/> <include name="temp-04.xml"/>
+ <include name="temp-05.xml"/> <include name="temp-06.xml"/>
+ <include name="temp-07.xml"/> <include name="temp-08.xml"/>
+ <include name="temp-09.xml"/> <include name="temp-10.xml"/>
+</document>
+\stopbuffer
+
+\typebuffer[demo]
+
+Before we process this file we will merge the content of the files defined
+as includes into it. When this happens the filename is automatically
+registered so it can be accessed later.
+
+\startbuffer
+\startxmlsetups xml:initialize
+ \xmlincludeoptions{#1}{include}{filename|name}{recurse,basename}
+ \xmlsetsetup{#1}{p|document}{xml:*}
+\stopxmlsetups
+
+\startxmlsetups xml:document
+ \xmlflush{#1}
+\stopxmlsetups
+
+\startxmlsetups xml:p
+ \inleftmargin{\infofont\xmlinclusion{#1}}
+ \xmlflush{#1}
+ \par
+\stopxmlsetups
+
+\xmlregistersetup{xml:initialize}
+
+\xmlprocessbuffer{main}{demo}{}
+\stopbuffer
+
+\typebuffer
+
+In this case we put the name of the file in the margin. Depending on when and how
+elements are flushed other solutions, like overlays, can be used.
+
+\startpacked
+\getbuffer
+\stoppacked
+
+At any moment you can see what the current node contains. The whole (merged)
+document is also available:
+
+\startbuffer
+\xmlshow{main}
+\stopbuffer
+
+\typebuffer
+
+A small font is used to typeset the (sub)tree:
+
+\blank \getbuffer \blank
+
+You can also save the tree:
+
+\startbuffer
+\xmlsave{main}{temp.xml}
+\stopbuffer
+
+\typebuffer \getbuffer
+
+This file looks like:
+
+\typefile{temp.xml}
+
+\stopchapter
+
+\stopcomponent
diff --git a/doc/context/sources/general/manuals/xtables/xtables-mkiv.tex b/doc/context/sources/general/manuals/xtables/xtables-mkiv.tex
new file mode 100644
index 000000000..302f2880c
--- /dev/null
+++ b/doc/context/sources/general/manuals/xtables/xtables-mkiv.tex
@@ -0,0 +1,1225 @@
+% language=uk
+
+% author : Hans Hagen, PRAGMA ADE, NL
+% license : Creative Commons, Attribution-NonCommercial-ShareAlike 3.0 Unported
+
+\usemodule[art-01,abr-02]
+
+\definecolor[red] [darkred]
+\definecolor[green] [darkgreen]
+\definecolor[blue] [darkblue]
+\definecolor[yellow] [darkyellow]
+\definecolor[magenta][darkmagenta]
+\definecolor[cyan] [darkcyan]
+
+\setupexternalfigures
+ [location={local,default}]
+
+\setupbodyfont
+ [10pt]
+
+\setuptyping
+ [color=darkyellow]
+
+\setuptype
+ [color=darkcyan]
+
+% \setupnumbering
+% [alternative=doublesided]
+
+\setuphead
+ [section]
+ [color=darkmagenta]
+
+\setupinteraction
+ [hidden]
+
+\startdocument
+ [metadata:author=Hans Hagen,
+ metadata:title=Extreme Tables,
+ author=Hans Hagen,
+ affiliation=PRAGMA ADE,
+ location=Hasselt NL,
+ title=Extreme Tables,
+ extra=ConTeXt MkIV,
+ support=www.contextgarden.net,
+ website=www.pragma-ade.nl]
+
+\startMPpage
+
+ StartPage ;
+ fill Page enlarged 2mm withcolor magenta/4 ;
+ pickup pencircle scaled 2mm ;
+ numeric n ; n := bbheight Page ;
+ forever :
+ n := n / 1.5 ;
+ draw bottomboundary Page shifted (0, n) withcolor 2yellow/3 withtransparency (1,0.5) ;
+ draw topboundary Page shifted (0,-n) withcolor 2yellow/3 withtransparency (1,0.5) ;
+ exitif n < 2cm ;
+ endfor ;
+ numeric n ; n := bbheight Page ;
+ forever :
+ n := n / 1.5 ;
+ draw leftboundary Page shifted ( n,0) withcolor 2cyan/3 withtransparency (1,0.5) ;
+ draw rightboundary Page shifted (-n,0) withcolor 2cyan/3 withtransparency (1,0.5) ;
+ exitif n < 2cm ;
+ endfor ;
+ picture p, q, r ;
+ p := textext("\ssbf\WORD{\documentvariable{title}}") xsized (bbheight Page - 2cm) rotated 90 ;
+ q := textext("\ssbf\WORD{\documentvariable{author}}") ysized 1cm ;
+ r := textext("\ssbf\WORD{\documentvariable{extra}}") xsized bbwidth q ;
+ draw anchored.rt (p, center rightboundary Page shifted (-1cm,0cm)) withcolor white ;
+ draw anchored.bot(q, center bottomboundary Page shifted ( 1cm,4.4cm)) withcolor white ;
+ draw anchored.bot(r, center bottomboundary Page shifted ( 1cm,2.8cm)) withcolor white ;
+ StopPage ;
+
+\stopMPpage
+
+% \page[empty] \setuppagenumber[start=1]
+
+\startsubject[title={Contents}]
+
+\placelist[section][criterium=all,interaction=all]
+
+\stopsubject
+
+\startsection[title={Introduction}]
+
+This is a short introduction to yet another table mechanism built in \CONTEXT. It
+is a variant of the so called natural tables but it has a different
+configuration. Also, the implementation is completely different. The reason for
+writing it is that in one of our projects we had to write styles for documents
+that had tables spanning 30 or more pages and apart from memory constraints this
+is quite a challenge for the other mechanisms, if only because splitting them
+into successive floats is not possible due to limitations of \TEX. The extreme
+table mechanism can handle pretty large tables and split them too. As each cell
+is basically a \type {\framed} and as we need to do two passes over the table,
+this mechanism is not the fastest but it is some two times faster than the
+natural tables mechanism, and in most cases can be used instead.
+
+\stopsection
+
+\startsection[title={The structure}]
+
+The structure of the tables is summarized here. There can be the usual head, body
+and foot specifications and we also support the optional header in following
+pages.
+
+\starttyping
+\definextable [tag] | [tag][parent]
+\setupxtable [settings] | [tag][settings]
+
+\startxtable[tag|settings]
+ \startxtablehead|next|body|foot[tag|settings]
+ \startxrowgroup[tag|settings]
+ \startxrow[settings]
+ \startxcellgroup[tag|settings]
+ \startxcell[settings] ... \stopxcell
+ \stopxcellgroup
+ \stopxrow
+ \startxrowgroup
+ \stopxtablehead|next|body|foot
+\stopxtable
+\stoptyping
+
+Contrary to what you might expect, the definition command defines a new set of
+command. You don't need to use this in order to set up a new settings
+environment. Settings and definitions can inherit so you can build a chain of
+parent|-|child settings. The grouping is nothing more than a switch to a new set
+of settings.
+
+\stopsection
+
+\startsection[title={Direct control}]
+
+A simple table with just frames is defined as follows:
+
+\startbuffer
+\startxtable
+ \startxrow
+ \startxcell one \stopxcell
+ \startxcell two \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell alpha \stopxcell
+ \startxcell beta \stopxcell
+ \stopxrow
+\stopxtable
+\stopbuffer
+
+\typebuffer
+
+\startlinecorrection[blank] \getbuffer \stoplinecorrection
+
+You can pass parameters for tuning the table:
+
+\startbuffer
+\startxtable[offset=1cm]
+ \startxrow
+ \startxcell one \stopxcell
+ \startxcell two \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell alpha \stopxcell
+ \startxcell beta \stopxcell
+ \stopxrow
+\stopxtable
+\stopbuffer
+
+\typebuffer
+
+\startlinecorrection[blank] \getbuffer \stoplinecorrection
+
+You can (for as much as they make sense) use the same settings as the \type
+{\framed} command, as long as you keep in mind that messing with the frame
+related offsets can have side effects.
+
+\stopsection
+
+\startsection[title={Sets of settings}]
+
+Instead of directly passing settings you can use a predefined set. Of course you
+can also combine these methods.
+
+\startbuffer
+\definextable
+ [myxtable]
+
+\definextable
+ [myxtable:important]
+ [myxtable]
+
+\setupxtable
+ [myxtable]
+ [width=4cm,
+ foregroundcolor=red]
+
+\setupxtable
+ [myxtable:important]
+ [background=color,
+ backgroundcolor=red,
+ foregroundcolor=white]
+\stopbuffer
+
+\typebuffer \getbuffer
+
+We can use these settings in table. Although it is not really needed to define a
+set beforehand (i.e.\ you can just use the setup command) it is cleaner and more
+efficient too.
+
+\startbuffer
+\startxtable[myxtable]
+ \startxrow[foregroundcolor=green]
+ \startxcell one \stopxcell
+ \startxcell two \stopxcell
+ \startxcellgroup[foregroundcolor=blue]
+ \startxcell tree \stopxcell
+ \startxcell four \stopxcell
+ \stopxcellgroup
+ \stopxrow
+ \startxrow
+ \startxcell alpha \stopxcell
+ \startxcell beta \stopxcell
+ \startxcellgroup[myxtable:important]
+ \startxcell gamma \stopxcell
+ \startxcell delta \stopxcell
+ \stopxcellgroup
+ \stopxrow
+\stopxtable
+\stopbuffer
+
+\typebuffer
+
+\startlinecorrection[blank] \getbuffer \stoplinecorrection
+
+The overhead of (chained) settings is quite acceptable and it helps to keep the
+source of the table uncluttered from specific settings.
+
+\stopsection
+
+\startsection[title={Defining}]
+
+If needed you can define your own encapsulating commands. The following example
+demonstrates this:
+
+\startbuffer
+\definextable[mytable]
+\stopbuffer
+
+\getbuffer \typebuffer
+
+We now can use the \type{mytable} wrapper:
+
+\startbuffer
+\startmytable[height=4cm,width=8cm,align={middle,lohi}]
+ \startxrow
+ \startxcell test \stopxcell
+ \stopxrow
+\stopmytable
+\stopbuffer
+
+\typebuffer
+
+\startlinecorrection[blank] \getbuffer \stoplinecorrection
+
+One drawback of using buffers is that they don't play well in macro definitions.
+In that case you need to use the following wrapper:
+
+\startbuffer
+\starttexdefinition MyTable #1#2#3#4
+ \startembeddedxtable
+ \startxrow
+ \startxcell #1 \stopxcell
+ \startxcell #2 \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell #3 \stopxcell
+ \startxcell #4 \stopxcell
+ \stopxrow
+ \stopembeddedxtable
+\stoptexdefinition
+\stopbuffer
+
+\typebuffer \getbuffer
+
+This macro is used as any other macro with arguments:
+
+\startbuffer
+\MyTable{one}{two}{three}{four}
+\stopbuffer
+
+\typebuffer
+
+\startlinecorrection[blank] \getbuffer \stoplinecorrection
+
+\stopsection
+
+\startsection[title={Stretching}]
+
+If you don't give the width of a cell, the widest natural size will be taken.
+Otherwise the given width applies to the whole column.
+
+\startbuffer
+\startxtable
+ \startxrow
+ \startxcell[width=1cm] one \stopxcell
+ \startxcell[width=2cm] two \stopxcell
+ \startxcell[width=3cm] tree \stopxcell
+ \startxcell[width=4cm] four \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell alpha \stopxcell
+ \startxcell beta \stopxcell
+ \startxcell gamma \stopxcell
+ \startxcell delta \stopxcell
+ \stopxrow
+\stopxtable
+\stopbuffer
+
+\typebuffer
+
+\startlinecorrection[blank] \getbuffer \stoplinecorrection
+
+You can let the cells stretch so that the whole width of the text area is taken.
+
+\startbuffer[one]
+\startxtable[option=stretch]
+ \startxrow
+ \startxcell[width=1cm] one \stopxcell
+ \startxcell[width=2cm] two \stopxcell
+ \startxcell[width=3cm] tree \stopxcell
+ \startxcell[width=4cm] four \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell alpha \stopxcell
+ \startxcell beta \stopxcell
+ \startxcell gamma \stopxcell
+ \startxcell delta \stopxcell
+ \stopxrow
+\stopxtable
+\stopbuffer
+
+\typebuffer[one]
+
+The available left over space is equally distributed among the cells.
+
+\startlinecorrection[blank] \getbuffer[one] \stoplinecorrection
+
+\startbuffer[two]
+\startxtable[option={stretch,width}]
+ \startxrow
+ \startxcell[width=1cm] one \stopxcell
+ \startxcell[width=2cm] two \stopxcell
+ \startxcell[width=3cm] tree \stopxcell
+ \startxcell[width=4cm] four \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell alpha \stopxcell
+ \startxcell beta \stopxcell
+ \startxcell gamma \stopxcell
+ \startxcell delta \stopxcell
+ \stopxrow
+\stopxtable
+\stopbuffer
+
+An alternative is to distribute the space proportionally:
+
+\typebuffer[two]
+
+\startlinecorrection[blank] \getbuffer[two] \stoplinecorrection
+
+Just to stress the difference we show both alongside now:
+
+\startlinecorrection[blank]
+ \getbuffer[one]
+ \blank
+ \getbuffer[two]
+\stoplinecorrection
+
+You can specify the width of a cell with each cell but need to keep into mind
+that that value is then used for the whole column:
+
+\startbuffer
+\startxtable
+ \startxrow
+ \startxcell[width=1em] one \stopxcell
+ \startxcell[width=2em] two \stopxcell
+ \startxcell[width=3em] tree \stopxcell
+ \startxcell[width=4em] four \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell alpha \stopxcell
+ \startxcell beta \stopxcell
+ \startxcell gamma \stopxcell
+ \startxcell delta \stopxcell
+ \stopxrow
+\stopxtable
+\stopbuffer
+
+\typebuffer
+
+\startlinecorrection[blank] \getbuffer \stoplinecorrection
+
+You can enforce that larger columns win via the \type {option} parameter:
+
+\startbuffer
+\startxtable[option=max]
+ \startxrow
+ \startxcell[width=1em] one \stopxcell
+ \startxcell[width=2em] two \stopxcell
+ \startxcell[width=3em] tree \stopxcell
+ \startxcell[width=4em] four \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell alpha \stopxcell
+ \startxcell beta \stopxcell
+ \startxcell gamma \stopxcell
+ \startxcell delta \stopxcell
+ \stopxrow
+\stopxtable
+\stopbuffer
+
+\typebuffer
+
+\startlinecorrection[blank] \getbuffer \stoplinecorrection
+
+\stopsection
+
+\startsection[title={Spacing}]
+
+It is possible to separate the cells by horizontal and/or vertical space. As an
+example we create a setup.
+
+\startbuffer
+\setupxtable
+ [myztable]
+ [option=stretch,
+ foregroundcolor=blue,
+ columndistance=10pt,
+ leftmargindistance=20pt,
+ rightmargindistance=30pt]
+\stopbuffer
+
+\typebuffer \getbuffer
+
+You can use the \type {textwidth} parameter to set a specific maximum width. We
+now apply the previous settings to an extreme table:
+
+\startbuffer
+\startxtable[myztable]
+ \startxrow
+ \startxcell[width=1cm] one \stopxcell
+ \startxcell[width=2cm,distance=5pt] two \stopxcell
+ \startxcell[width=3cm] tree \stopxcell
+ \startxcell[width=4cm] four \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell[width=1cm] alpha \stopxcell
+ \startxcell[width=2cm] beta \stopxcell
+ \startxcell[width=3cm] gamma \stopxcell
+ \startxcell[width=4cm] delta \stopxcell
+ \stopxrow
+\stopxtable
+\stopbuffer
+
+\typebuffer
+
+As you can see here, we can still locally overload the settings but keep in mind
+that these apply to the whole column then, not to the specific cell.
+
+\startlinecorrection[blank] \getbuffer \stoplinecorrection
+
+Vertical spacing is (currently) setup differently, i.e.\ as an argument to the
+\type {\blank} command.
+
+\startbuffer
+\startxtable[spaceinbetween=medium]
+ \startxrow
+ \startxcell one \stopxcell
+ \startxcell two \stopxcell
+ \startxcell tree \stopxcell
+ \startxcell four \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell alpha \stopxcell
+ \startxcell beta \stopxcell
+ \startxcell gamma \stopxcell
+ \startxcell delta \stopxcell
+ \stopxrow
+\stopxtable
+\stopbuffer
+
+\typebuffer
+
+Specifying spacing this way improves consistency with the rest of the document
+spacing.
+
+\startlinecorrection[blank] \getbuffer \stoplinecorrection
+
+\stopsection
+
+\startsection[title={Spanning}]
+
+Of course we can span cells horizontally as well as vertically. Future versions
+might provide more advanced options but the basics work okay.
+
+\startbuffer
+\startxtable
+ \startxrow
+ \startxcell one \stopxcell
+ \startxcell[nx=2] two + three \stopxcell
+ \startxcell four \stopxcell
+ \startxcell five \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell[nx=3] alpha + beta + gamma \stopxcell
+ \startxcell[nx=2] delta + epsilon \stopxcell
+ \stopxrow
+\stopxtable
+\stopbuffer
+
+\typebuffer
+
+This spans a few cells horizontally:
+
+\startlinecorrection[blank] \getbuffer \stoplinecorrection
+
+The next example gives a span in two directions:
+
+\startbuffer
+\startxtable
+ \startxrow
+ \startxcell alpha 1 \stopxcell
+ \startxcell beta 1 \stopxcell
+ \startxcell gamma 1 \stopxcell
+ \startxcell delta 1 \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell alpha 2 \stopxcell
+ \startxcell[nx=2,ny=2] whatever \stopxcell
+ \startxcell delta 2 \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell alpha 3 \stopxcell
+ \startxcell delta 3 \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell alpha 4 \stopxcell
+ \startxcell beta 4 \stopxcell
+ \startxcell gamma 4 \stopxcell
+ \startxcell delta 4 \stopxcell
+ \stopxrow
+\stopxtable
+\stopbuffer
+
+\typebuffer
+
+Of course, spanning is always a compromise but the best fit found by this
+mechanism takes natural width, given width and available space into account.
+
+\startlinecorrection[blank] \getbuffer \stoplinecorrection
+
+\stopsection
+
+\startsection[title={Partitioning}]
+
+You can partition a table as follows:
+
+\startbuffer
+\startxtable
+ \startxtablehead
+ \startxrow
+ \startxcell head one \stopxcell
+ \startxcell head two \stopxcell
+ \startxcell head tree \stopxcell
+ \startxcell head four \stopxcell
+ \stopxrow
+ \stopxtablehead
+ \startxtablenext
+ \startxrow
+ \startxcell next one \stopxcell
+ \startxcell next two \stopxcell
+ \startxcell next tree \stopxcell
+ \startxcell next four \stopxcell
+ \stopxrow
+ \stopxtablenext
+ \startxtablebody
+ \startxrow
+ \startxcell body one \stopxcell
+ \startxcell body two \stopxcell
+ \startxcell body tree \stopxcell
+ \startxcell body four \stopxcell
+ \stopxrow
+ \stopxtablebody
+ \startxtablefoot
+ \startxrow
+ \startxcell foot one \stopxcell
+ \startxcell foot two \stopxcell
+ \startxcell foot tree \stopxcell
+ \startxcell foot four \stopxcell
+ \stopxrow
+ \stopxtablefoot
+\stopxtable
+\stopbuffer
+
+\typebuffer
+
+There can be multiple such partitions and they are collected in head, next, body
+and foot groups. Normally the header ends up at the beginning and the footer at
+the end. When a table is split, the first page gets the header and the following
+pages the next one.
+
+You can let headers and footers be repeated by setting the \type {header}
+and|/|or \type {footer} parameters to \type {repeat}.
+
+\starttyping
+\setupxtable
+ [split=yes,
+ header=repeat,
+ footer=repeat]
+\stoptyping
+
+The table can be flushed in the running text but also in successive
+floats. Given that the table is in a buffer:
+
+\starttyping
+\placetable[here,split]{A big table.}{\getbuffer}
+\stoptyping
+
+When you specify \type {split} as \type {yes} the caption is taken into account
+when calculating the available space.
+
+There are actually three different split methods. The \type {yes} option works in
+text mode as well as in floats, but in text mode no headers and footers get
+repeated. If you want that feature in a text flush you have to set \type {split}
+to \type {repeat} as well.
+
+You can keep rows together by passing a \type {samepage} directive. This
+parameter can get the values \type {before}, \type {after} and \type {both}.
+
+\starttyping
+\startxtable[split=yes]
+ \startxrow \startxcell \tttf .01. \stopxcell \stopxrow
+ \startxrow \startxcell \tttf .... \stopxcell \stopxrow
+ \startxrow \startxcell \tttf \red .21. \stopxcell \stopxrow
+ \startxrow[samepage=both] \startxcell \tttf \red .22. \stopxcell \stopxrow
+ \startxrow[samepage=both] \startxcell \tttf \red .23. \stopxcell \stopxrow
+ \startxrow \startxcell \tttf .... \stopxcell \stopxrow
+ \startxrow \startxcell \tttf .99. \stopxcell \stopxrow
+\stopxtable
+\stoptyping
+
+\stopsection
+
+\startsection[title={Options}]
+
+On the average a table will come out okay but you need to keep in mind that when
+(complex) spans are used the results can be less that optimal. However, as
+normally one pays attention to creating tables, the amount of control provided
+often makes it possible to get what you want.
+
+In the following situations, the first cell width is determined by the span. It
+is possible to make a more clever analyzer but we need to keep in mind that in
+the same column there can be entries that span a different amount of columns. Not
+only would that be inefficient but it would also be rather unpredictable unless
+you know exactly what happens deep down. The following two examples demonstrate
+default behaviour.
+
+\startbuffer
+\startxtable
+ \startxrow
+ \startxcell[nx=3]
+ 1/2/3
+ \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell 1 \stopxcell
+ \startxcell 2 \stopxcell
+ \startxcell 3 \stopxcell
+ \stopxrow
+\stopxtable
+\stopbuffer
+
+\typebuffer \getbuffer
+
+\startbuffer
+\startxtable
+ \startxrow
+ \startxcell[nx=3]
+ 1 / 2 / 3
+ \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell 1 \stopxcell
+ \startxcell 2 \stopxcell
+ \startxcell 3 \stopxcell
+ \stopxrow
+\stopxtable
+\stopbuffer
+
+\typebuffer \getbuffer
+
+In practice you will set the width of the columns, as in:
+
+\startbuffer
+\startxtable
+ \startxrow
+ \startxcell[nx=3]
+ 1/2/3
+ \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell[width=\dimexpr\textwidth/3] 1 \stopxcell
+ \startxcell[width=\dimexpr\textwidth/3] 2 \stopxcell
+ \startxcell[width=\dimexpr\textwidth/3] 3 \stopxcell
+ \stopxrow
+\stopxtable
+\stopbuffer
+
+\typebuffer \getbuffer
+
+But, if you want you can control the enforced width by setting an option:
+
+\startbuffer
+\startxtable
+ \startxrow
+ \startxcell[nx=3,option=tight]
+ 1/2/3
+ \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell 1 \stopxcell
+ \startxcell 2 \stopxcell
+ \startxcell 3 \stopxcell
+ \stopxrow
+\stopxtable
+\stopbuffer
+
+\typebuffer \getbuffer
+
+\startbuffer
+\startxtable
+ \startxrow
+ \startxcell[nx=3,option=tight]
+ 1 / 2 / 3
+ \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell 1 \stopxcell
+ \startxcell 2 \stopxcell
+ \startxcell 3 \stopxcell
+ \stopxrow
+\stopxtable
+\stopbuffer
+
+\typebuffer \getbuffer
+
+There is also a global setting:
+
+\startbuffer
+\startxtable[option=tight]
+ \startxrow
+ \startxcell[nx=3]
+ 1/2/3
+ \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell 1 \stopxcell
+ \startxcell 2 \stopxcell
+ \startxcell 3 \stopxcell
+ \stopxrow
+\stopxtable
+\stopbuffer
+
+\typebuffer \getbuffer
+
+\stopsection
+
+\startsection[title={Nesting}]
+
+Extreme tables can be nested but you need to keep an eye on inheritance here as
+the inner table uses the settings from the encapsulating cell. The widths and
+heights of the inner table default to \type {fit}. We could cook up a more
+complex nesting model but this one is easy to follow.
+
+\startbuffer
+\startxtable
+ \startxrow
+ \startxcell[offset=0pt]
+ \startxtable[background=color,backgroundcolor=green,
+ foregroundcolor=white,offset=1ex]
+ \startxrow
+ \startxcell[width=1cm] one \stopxcell
+ \startxcell[width=2cm] two \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell[width=3cm] alpha \stopxcell
+ \startxcell[width=4cm] beta \stopxcell
+ \stopxrow
+ \stopxtable
+ \stopxcell
+ \startxcell two \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell alpha \stopxcell
+ \startxcell
+ \startxtable[background=color,backgroundcolor=red,
+ foregroundcolor=white]
+ \startxrow
+ \startxcell one \stopxcell
+ \startxcell two \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell alpha \stopxcell
+ \startxcell beta \stopxcell
+ \stopxrow
+ \stopxtable
+ \stopxcell
+ \stopxrow
+\stopxtable
+\stopbuffer
+
+\typebuffer
+
+Here we just manipulate the offset a bit.
+
+\startlinecorrection[blank] \getbuffer \stoplinecorrection
+
+\stopsection
+
+\startsection[title={Buffers}]
+
+When you don't want to clutter your document source too much buffers can be if
+help:
+
+\startbuffer
+\startbuffer[test]
+\startxtable
+ \startxrow
+ \startxcell[width=1cm] one \stopxcell
+ \startxcell[width=2cm] two \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell alpha \stopxcell
+ \startxcell beta \stopxcell
+ \stopxrow
+\stopxtable
+\stopbuffer
+\stopbuffer
+
+\typebuffer \getbuffer
+
+One way of getting this table typeset is to say:
+
+\starttyping
+\getbuffer[test]
+\stoptyping
+
+Normally this is quite okay. However, internally extreme tables become also
+buffers. If you don't like the overhead of this double buffering you can use the
+following command:
+
+\starttyping
+\processxtablebuffer[test]
+\stoptyping
+
+This can save you some memory and runtime, but don't expect miracles. Also, this
+way of processing does not support nested tables (unless \type {{}} is used).
+
+\stopsection
+
+\startsection[title={XML}]
+
+The following example demonstrates that we can use this mechanism in \XML\ too.
+The example was provided by Thomas Schmitz. First we show how a table looks like
+in \XML:
+
+\startbuffer[test]
+<table>
+ <tablerow>
+ <tablecell>
+ One
+ </tablecell>
+ <tablecell>
+ Two
+ </tablecell>
+ </tablerow>
+ <tablerow>
+ <tablecell>
+ <b>Three</b>
+ </tablecell>
+ <tablecell>
+ Four
+ </tablecell>
+ </tablerow>
+</table>
+\stopbuffer
+
+\typebuffer[test]
+
+We need to map these elements to setups:
+
+\startbuffer
+\startxmlsetups xml:testsetups
+ \xmlsetsetup{main}{b|table|tablerow|tablecell}{xml:*}
+\stopxmlsetups
+
+\xmlregistersetup{xml:testsetups}
+\stopbuffer
+
+\typebuffer \getbuffer
+
+The setups themselves are rather simple as we don't capture any attributes.
+
+\startbuffer
+\startxmlsetups xml:b
+ \bold{\xmlflush{#1}}
+\stopxmlsetups
+
+\startxmlsetups xml:table
+ \startembeddedxtable
+ \xmlflush{#1}
+ \stopembeddedxtable
+\stopxmlsetups
+
+\startxmlsetups xml:tablerow
+ \startxrow
+ \xmlflush{#1}
+ \stopxrow
+\stopxmlsetups
+
+\startxmlsetups xml:tablecell
+ \startxcell
+ \xmlflush{#1}
+ \stopxcell
+\stopxmlsetups
+\stopbuffer
+
+\typebuffer \getbuffer
+
+We now process the example. Of course it will also work for files.
+
+\startbuffer
+ \xmlprocessbuffer{main}{test}{}
+\stopbuffer
+
+\typebuffer
+
+The result is:
+
+\startlinecorrection[blank] \getbuffer \stoplinecorrection
+
+\stopsection
+
+\startsection[title={Natural tables}]
+
+For the impatient a small additional module is provided that remaps the natural
+table commands onto extreme tables:
+
+\startbuffer
+\usemodule[ntb-to-xtb]
+\stopbuffer
+
+\typebuffer \getbuffer
+
+After that:
+
+\startbuffer
+\bTABLE
+ \bTR
+ \bTD[background=color,backgroundcolor=red] one \eTD
+ \bTD[width=2cm] two \eTD
+ \eTR
+ \bTR
+ \bTD[width=5cm] alpha \eTD
+ \bTD[background=color,backgroundcolor=yellow] beta \eTD
+ \eTR
+\eTABLE
+\stopbuffer
+\stopbuffer
+
+\typebuffer
+
+Will come out as:
+
+\startlinecorrection[blank]
+\getbuffer
+\stoplinecorrection
+
+You can restore and remap the commands with the following helpers:
+
+\starttyping
+\restoreTABLEfromxtable
+\mapTABLEtoxtable
+\stoptyping
+
+Of course not all functionality of the natural tables maps onto similar
+functionality of extreme tables, but on the average the result will look rather
+similar.
+
+\stopsection
+
+\startsection[title={Colofon}]
+
+\starttabulate[|B|p|]
+\NC author \NC \getvariable{document}{author}, \getvariable{document}{affiliation}, \getvariable{document}{location} \NC \NR
+\NC version \NC \currentdate \NC \NR
+\NC website \NC \getvariable{document}{website} \endash\ \getvariable{document}{support} \NC \NR
+\NC copyright \NC \symbol[cc][cc-by-sa-nc] \NC \NR
+\stoptabulate
+
+\stopsection
+
+\startsection[title={Examples}]
+
+On the following pages we show some examples of (experimental) features. For this
+we will use the usual quotes from Ward, Tufte and Davis etc.\ that you can find
+in the distribution.
+
+\page
+
+\startbuffer
+\startxtable[bodyfont=6pt]
+ \startxrow
+ \startxcell \input ward \stopxcell
+ \startxcell \input tufte \stopxcell
+ \startxcell \input davis \stopxcell
+ \stopxrow
+\stopxtable
+\stopbuffer
+
+\typebuffer \getbuffer
+
+\startbuffer
+\startxtable[bodyfont=6pt,option=width]
+ \startxrow
+ \startxcell \input ward \stopxcell
+ \startxcell \input tufte \stopxcell
+ \startxcell \input davis \stopxcell
+ \stopxrow
+\stopxtable
+\stopbuffer
+
+\typebuffer \getbuffer
+
+\page
+
+\startbuffer
+\startxtable[bodyfont=6pt]
+ \startxrow
+ \startxcell \externalfigure[cow.pdf][width=3cm] \stopxcell
+ \startxcell \input tufte \stopxcell
+ \startxcell \input davis \stopxcell
+ \stopxrow
+\stopxtable
+\stopbuffer
+
+\typebuffer \getbuffer
+
+\startbuffer
+\startxtable[bodyfont=6pt,option=width]
+ \startxrow
+ \startxcell \externalfigure[cow.pdf][width=3cm] \stopxcell
+ \startxcell \input tufte \stopxcell
+ \startxcell \input davis \stopxcell
+ \stopxrow
+\stopxtable
+\stopbuffer
+
+\typebuffer \getbuffer
+
+\page
+
+\startbuffer
+\startxtable[option=stretch]
+ \startxrow
+ \startxcell bla \stopxcell
+ \startxcell bla bla \stopxcell
+ \startxcell bla bla bla \stopxcell
+ \stopxrow
+\stopxtable
+\stopbuffer
+
+\typebuffer \getbuffer
+
+\startbuffer
+\startxtable[option={stretch,width}]
+ \startxrow
+ \startxcell bla \stopxcell
+ \startxcell bla bla \stopxcell
+ \startxcell bla bla bla \stopxcell
+ \stopxrow
+\stopxtable
+\stopbuffer
+
+\typebuffer \getbuffer
+
+\page
+
+\startbuffer
+\setupxtable[suffix][align=middle,foregroundcolor=red]
+\setupxtable[blabla][foregroundstyle=slanted]
+\setupxtable[crap] [foregroundcolor=blue]
+\setupxtable[bold] [crap][foregroundstyle=bold]
+
+\startxtable % [frame=off]
+ \startxtablehead
+ \startxrow[bold]
+ \startxcell[suffix] head a \stopxcell
+ \startxcell[blabla] head b \stopxcell
+ \startxcell head c \stopxcell
+ \stopxrow
+ \stopxtablehead
+ \startxtablebody
+ \startxrow
+ \startxcell[suffix][ny=2] cell a 1 \stopxcell
+ \startxcell cell b 1 \stopxcell
+ \startxcell cell c 1 \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell cell b 2 \stopxcell
+ \startxcell cell c 2 \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell[suffix] cell a 3 \stopxcell
+ \startxcell cell b 3 \stopxcell
+ \startxcell cell c 3 \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell[suffix] cell a 4 \stopxcell
+ \startxcell cell b 4 \stopxcell
+ \startxcell cell c 4 \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell[suffix] cell a 5 \stopxcell
+ \startxcell cell b 5 \stopxcell
+ \startxcell cell c 5 \stopxcell
+ \stopxrow
+ \stopxtablebody
+\stopxtable
+\stopbuffer
+
+\typebuffer \start \getbuffer \stop
+
+\page
+
+\startbuffer
+\startxtable[option=stretch]
+ \startxrow
+ \startxcell[option=fixed] first cell \stopxcell
+ \startxcell 101 \stopxcell
+ \startxcell 102 \stopxcell
+ \startxcell 103 \stopxcell
+ \stopxrow
+ \startxrow
+ \startxcell 2\high{nd} cell \stopxcell
+ \startxcell a \stopxcell
+ \startxcell b \stopxcell
+ \startxcell c \stopxcell
+ \stopxrow
+\stopxtable
+\stopbuffer
+
+\typebuffer \start \getbuffer \stop
+
+\page
+
+\startbuffer[demo]
+\startxtable
+\startxrow
+ \startxcell[demo][nx=4] 1 / 2 / 3 / 4 \stopxcell
+\stopxrow
+\startxrow
+ \startxcell[demo][nx=3] 1 / 2 / 3 \stopxcell
+ \startxcell 4 \stopxcell
+\stopxrow
+\startxrow
+ \startxcell 1 \stopxcell
+ \startxcell[demo][nx=3] 2 / 3 / 4 \stopxcell
+\stopxrow
+\startxrow
+ \startxcell[demo][nx=2] 1 / 2 \stopxcell
+ \startxcell 3 \stopxcell
+ \startxcell 4 \stopxcell
+\stopxrow
+\startxrow
+ \startxcell 1 \stopxcell
+ \startxcell[demo][nx=2] 2 / 3 \stopxcell
+ \startxcell 4 \stopxcell
+\stopxrow
+\startxrow
+ \startxcell 1 \stopxcell
+ \startxcell 2 \stopxcell
+ \startxcell[demo][nx=2] 3 / 4 \stopxcell
+\stopxrow
+\startxrow
+ \startxcell[demo][nx=2] 1 / 2 \stopxcell
+ \startxcell[demo][nx=2] 3 / 4 \stopxcell
+\stopxrow
+\startxrow
+ \startxcell 1 \stopxcell
+ \startxcell 2 \stopxcell
+ \startxcell 3 \stopxcell
+ \startxcell 4 \stopxcell
+\stopxrow
+\stopxtable
+\stopbuffer
+
+\startbuffer[tight]
+\setupxtable[demo][option=tight]
+\stopbuffer
+
+\startbuffer[normal]
+\setupxtable[demo][option=]
+\stopbuffer
+
+\typebuffer[demo]
+
+\page
+
+\typebuffer[tight] \start \getbuffer[tight,demo] \stop
+\typebuffer[normal] \start \getbuffer[normal,demo] \stop
+
+% \ruledhbox{\getbuffer[normal,demo]}
+
+\stopdocument
diff --git a/fonts/map/pdftex/context/mkiv-base.map b/fonts/map/pdftex/context/mkiv-base.map
index 482bf0894..fc2aa643b 100644
--- a/fonts/map/pdftex/context/mkiv-base.map
+++ b/fonts/map/pdftex/context/mkiv-base.map
@@ -135,110 +135,8 @@ eusm5 EUSM5 <eusm5.pfb
eusm7 EUSM7 <eusm7.pfb
eusm10 EUSM10 <eusm10.pfb
-% lm leftovers, used by inclusion
-
-cmb10 CMB10 <lm-rep-cmrm.enc <lmb10.pfb
-cmbx10 CMBX10 <lm-rep-cmrm.enc <lmbx10.pfb
-cmbx12 CMBX12 <lm-rep-cmrm.enc <lmbx12.pfb
-cmbx5 CMBX5 <lm-rep-cmrm.enc <lmbx5.pfb
-cmbx6 CMBX6 <lm-rep-cmrm.enc <lmbx6.pfb
-cmbx7 CMBX7 <lm-rep-cmrm.enc <lmbx7.pfb
-cmbx8 CMBX8 <lm-rep-cmrm.enc <lmbx8.pfb
-cmbx9 CMBX9 <lm-rep-cmrm.enc <lmbx9.pfb
-cmbxsl10 CMBXSL10 <lm-rep-cmrm.enc <lmbxo10.pfb
-cmbxti10 CMBXTI10 <lm-rep-cmit.enc <lmbxi10.pfb
-cmbxti12 CMBXTI12 <lm-rep-cmit.enc <lmbxti10.pfb
-cmbxti7 CMBXTI7 <lm-rep-cmit.enc <lmbxti10.pfb
-cmcsc10 CMCSC10 <lm-rep-cmsc.enc <lmcsc10.pfb
-cmcsc8 CMCSC8 <lm-rep-cmsc.enc <lmcsc10.pfb
-cmcsc9 CMCSC9 <lm-rep-cmsc.enc <lmcsc10.pfb
-cminch CMINCH <lm-rep-cmin.enc <lmssbx10.pfb
-cmitt10 CMITT10 <lm-rep-cmitt.enc <lmtti10.pfb
-cmitt12 CMITT12 <lm-rep-cmitt.enc <lmtti10.pfb
-cmitt9 CMITT9 <lm-rep-cmitt.enc <lmtti10.pfb
-cmr10 CMR10 <lm-rep-cmrm.enc <lmr10.pfb
-cmr12 CMR12 <lm-rep-cmrm.enc <lmr12.pfb
-cmr17 CMR17 <lm-rep-cmrm.enc <lmr17.pfb
-cmr5 CMR5 <lm-rep-cmsc.enc <lmr5.pfb
-cmr6 CMR6 <lm-rep-cmrm.enc <lmr6.pfb
-cmr7 CMR7 <lm-rep-cmrm.enc <lmr7.pfb
-cmr8 CMR8 <lm-rep-cmrm.enc <lmr8.pfb
-cmr9 CMR9 <lm-rep-cmrm.enc <lmr9.pfb
-cmsl10 CMSL10 <lm-rep-cmrm.enc <lmro10.pfb
-cmsl12 CMSL12 <lm-rep-cmrm.enc <lmro12.pfb
-cmsl6 CMSL6 <lm-rep-cmrm.enc <lmro8.pfb
-cmsl8 CMSL8 <lm-rep-cmrm.enc <lmro8.pfb
-cmsl9 CMSL9 <lm-rep-cmrm.enc <lmro9.pfb
-cmsltt10 CMSLTT10 <lm-rep-cmtt.enc <lmtto10.pfb
-cmsltt9 CMSLTT9 <lm-rep-cmtt.enc <lmtto10.pfb
-cmss10 CMS10 <lm-rep-cmrm.enc <lmss10.pfb
-cmss12 CMS12 <lm-rep-cmrm.enc <lmss12.pfb
-cmss17 CMS17 <lm-rep-cmrm.enc <lmss17.pfb
-cmss8 CMS8 <lm-rep-cmrm.enc <lmss8.pfb
-cmss9 CMS9 <lm-rep-cmrm.enc <lmss9.pfb
-cmssbx10 CMSSBX10 <lm-rep-cmrm.enc <lmssbx10.pfb
-cmssbxo10 CMSSBXO10 <lm-rep-cmrm.enc <lmssbo10.pfb
-cmssdc10 CMSSDC10 <lm-rep-cmrm.enc <lmssdc10.pfb
-cmssi10 CMSI10 <lm-rep-cmrm.enc <lmsso10.pfb
-cmssi12 CMSI12 <lm-rep-cmrm.enc <lmsso12.pfb
-cmssi17 CMSI17 <lm-rep-cmrm.enc <lmsso17.pfb
-cmssi8 CMSSI8 <lm-rep-cmrm.enc <lmsso8.pfb
-cmssi9 CMSSI9 <lm-rep-cmrm.enc <lmsso9.pfb
-cmssq8 CMSQ8 <lm-rep-cmrm.enc <lmssq8.pfb
-cmssqi8 CMSSQI8 <lm-rep-cmrm.enc <lmssqo8.pfb
-cmtcsc10 CMTCSC10 <lm-rep-cmtt.enc <lmtcsc10.pfb
-cmti10 CMTI10 <lm-rep-cmit.enc <lmri10.pfb
-cmti12 CMTI12 <lm-rep-cmit.enc <lmri12.pfb
-cmti7 CMTI7 <lm-rep-cmit.enc <lmri7.pfb
-cmti8 CMTI8 <lm-rep-cmit.enc <lmri8.pfb
-cmti9 CMTI9 <lm-rep-cmit.enc <lmri9.pfb
-cmtt10 CMTT10 <lm-rep-cmtt.enc <lmtt10.pfb
-cmtt12 CMTT12 <lm-rep-cmtt.enc <lmtt12.pfb
-cmtt8 CMTT8 <lm-rep-cmtt.enc <lmtt8.pfb
-cmtt9 CMTT9 <lm-rep-cmtt.enc <lmtt9.pfb
-cmvtt10 CMVTT10 <lm-rep-cmrm.enc <lmvtt10.pfb
-
-% math
-
-cmex10 LMMathExtension10-Regular "enclmmathex ReEncodeFont" <lm-mathex.enc <lmex10.pfb
-cmex9 LMMathExtension10-Regular "enclmmathex ReEncodeFont" <lm-mathex.enc <lmex10.pfb
-cmex8 LMMathExtension10-Regular "enclmmathex ReEncodeFont" <lm-mathex.enc <lmex10.pfb
-cmex7 LMMathExtension10-Regular "enclmmathex ReEncodeFont" <lm-mathex.enc <lmex10.pfb
-
-cmmi5 LMMathItalic5-Italic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi5.pfb
-cmmi6 LMMathItalic6-Italic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi6.pfb
-cmmi7 LMMathItalic7-Italic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi7.pfb
-cmmi8 LMMathItalic8-Italic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi8.pfb
-cmmi9 LMMathItalic9-Italic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi9.pfb
-cmmi10 LMMathItalic10-Italic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi10.pfb
-cmmi12 LMMathItalic12-Italic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi12.pfb
-
-cmmib5 LMMathItalic5-BoldItalic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmib5.pfb
-cmmib6 LMMathItalic6-BoldItalic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmib6.pfb
-cmmib7 LMMathItalic7-BoldItalic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmib7.pfb
-cmmib8 LMMathItalic8-BoldItalic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmib8.pfb
-cmmib9 LMMathItalic9-BoldItalic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmib9.pfb
-cmmib10 LMMathItalic10-BoldItalic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmib10.pfb
-cmmib12 LMMathItalic12-BoldItalic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmib12.pfb
-
-cmsy5 LMMathSymbols5-Italic "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmsy5.pfb
-cmsy6 LMMathSymbols6-Italic "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmsy6.pfb
-cmsy7 LMMathSymbols7-Italic "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmsy7.pfb
-cmsy8 LMMathSymbols8-Italic "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmsy8.pfb
-cmsy9 LMMathSymbols9-Italic "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmsy9.pfb
-cmsy10 LMMathSymbols10-Italic "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmsy10.pfb
-
-cmbsy5 LMMathSymbols5-BoldItalic "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmbsy5.pfb
-cmbsy6 LMMathSymbols6-BoldItalic "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmbsy6.pfb
-cmbsy7 LMMathSymbols7-BoldItalic "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmbsy7.pfb
-cmbsy8 LMMathSymbols8-BoldItalic "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmbsy8.pfb
-cmbsy9 LMMathSymbols9-BoldItalic "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmbsy9.pfb
-cmbsy10 LMMathSymbols10-BoldItalic "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmbsy10.pfb
-
% original
-cmdunh10 CMDUNH10 <cmdunh10.pfb
-
cmtex10 CMTEX10 <cmtex10.pfb
cmtex8 CMTEX8 <cmtex8.pfb
cmtex9 CMTEX9 <cmtex9.pfb
@@ -247,6 +145,80 @@ cmff10 CMFF10 <cmff10.pfb
cmfi10 CMFI10 <cmfi10.pfb
cmfib8 CMFIB8 <cmfib8.pfb
-% a weird one, not used in context
-
-cmu10 LMRoman10-Italic "-0.25 SlantFont" <lm-rep-cmit.enc <lmri10.pfb
+% the latest gust list
+
+cmb10 LMRomanDemi10-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmb10.pfb
+cmbx10 LMRoman10-Bold "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmbx10.pfb
+cmbx5 LMRoman5-Bold "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmbx5.pfb
+cmbx6 LMRoman6-Bold "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmbx6.pfb
+cmbx7 LMRoman7-Bold "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmbx7.pfb
+cmbx8 LMRoman8-Bold "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmbx8.pfb
+cmbx9 LMRoman9-Bold "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmbx9.pfb
+cmbxsl10 LMRomanSlant10-Bold "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmbxo10.pfb
+cmbxti10 LMRoman10-BoldItalic "enclmrepcmit ReEncodeFont" <lm-rep-cmit.enc <lmbxi10.pfb
+cmcsc10 LMRomanCaps10-Regular "enclmrepcmsc ReEncodeFont" <lm-rep-cmsc.enc <lmcsc10.pfb
+cmcscsl10 LMRomanCaps10-Oblique "enclmrepcmsc ReEncodeFont" <lm-rep-cmsc.enc <lmcsco10.pfb
+cmdunh10 LMRomanDunh10-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmdunh10.pfb
+cminch LMSans10-Bold "enclmrepcmin ReEncodeFont" <lm-rep-cmin.enc <lmssbx10.pfb
+cmitt10 LMMono10-Italic "enclmrepcmitt ReEncodeFont" <lm-rep-cmitt.enc <lmtti10.pfb
+cmr10 LMRoman10-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmr10.pfb
+cmr12 LMRoman12-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmr12.pfb
+cmr17 LMRoman17-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmr17.pfb
+cmr5 LMRoman5-Regular "enclmrepcmsc ReEncodeFont" <lm-rep-cmsc.enc <lmr5.pfb
+cmr6 LMRoman6-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmr6.pfb
+cmr7 LMRoman7-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmr7.pfb
+cmr8 LMRoman8-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmr8.pfb
+cmr9 LMRoman9-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmr9.pfb
+cmsl10 LMRomanSlant10-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmro10.pfb
+cmsl12 LMRomanSlant12-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmro12.pfb
+cmsl8 LMRomanSlant8-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmro8.pfb
+cmsl9 LMRomanSlant9-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmro9.pfb
+cmsltt10 LMMonoSlant10-Regular "enclmrepcmtt ReEncodeFont" <lm-rep-cmtt.enc <lmtto10.pfb
+cmss10 LMSans10-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmss10.pfb
+cmss12 LMSans12-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmss12.pfb
+cmss17 LMSans17-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmss17.pfb
+cmss8 LMSans8-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmss8.pfb
+cmss9 LMSans9-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmss9.pfb
+cmssbx10 LMSans10-Bold "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmssbx10.pfb
+cmssbxo10 LMSans10-BoldOblique "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmssbo10.pfb
+cmssdc10 LMSansDemiCond10-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmssdc10.pfb
+cmssi10 LMSans10-Oblique "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmsso10.pfb
+cmssi12 LMSans12-Oblique "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmsso12.pfb
+cmssi17 LMSans17-Oblique "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmsso17.pfb
+cmssi8 LMSans8-Oblique "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmsso8.pfb
+cmssi9 LMSans9-Oblique "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmsso9.pfb
+cmssq8 LMSansQuot8-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmssq8.pfb
+cmssqi8 LMSansQuot8-Oblique "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmssqo8.pfb
+cmtcsc10 LMMonoCaps10-Regular "enclmrepcmtt ReEncodeFont" <lm-rep-cmtt.enc <lmtcsc10.pfb
+cmti10 LMRoman10-Italic "enclmrepcmit ReEncodeFont" <lm-rep-cmit.enc <lmri10.pfb
+cmti12 LMRoman12-Italic "enclmrepcmit ReEncodeFont" <lm-rep-cmit.enc <lmri12.pfb
+cmti7 LMRoman7-Italic "enclmrepcmit ReEncodeFont" <lm-rep-cmit.enc <lmri7.pfb
+cmti8 LMRoman8-Italic "enclmrepcmit ReEncodeFont" <lm-rep-cmit.enc <lmri8.pfb
+cmti9 LMRoman9-Italic "enclmrepcmit ReEncodeFont" <lm-rep-cmit.enc <lmri9.pfb
+cmtt10 LMMono10-Regular "enclmrepcmtt ReEncodeFont" <lm-rep-cmtt.enc <lmtt10.pfb
+cmtt12 LMMono12-Regular "enclmrepcmtt ReEncodeFont" <lm-rep-cmtt.enc <lmtt12.pfb
+cmtt8 LMMono8-Regular "enclmrepcmtt ReEncodeFont" <lm-rep-cmtt.enc <lmtt8.pfb
+cmtt9 LMMono9-Regular "enclmrepcmtt ReEncodeFont" <lm-rep-cmtt.enc <lmtt9.pfb
+cmu10 LMRomanUnsl10-Regular "enclmrepcmit ReEncodeFont" <lm-rep-cmit.enc <lmu10.pfb
+cmvtt10 LMMonoProp10-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmvtt10.pfb
+
+cmbsy5 LMMathSymbols5-Bold "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmbsy5.pfb
+cmbsy7 LMMathSymbols7-Bold "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmbsy7.pfb
+cmbsy10 LMMathSymbols10-Bold "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmbsy10.pfb
+cmsy5 LMMathSymbols5-Regular "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmsy5.pfb
+cmsy6 LMMathSymbols6-Regular "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmsy6.pfb
+cmsy7 LMMathSymbols7-Regular "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmsy7.pfb
+cmsy8 LMMathSymbols8-Regular "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmsy8.pfb
+cmsy9 LMMathSymbols9-Regular "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmsy9.pfb
+cmsy10 LMMathSymbols10-Regular "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmsy10.pfb
+cmex10 LMMathExtension10-Regular "enclmmathex ReEncodeFont" <lm-mathex.enc <lmex10.pfb
+cmmi5 LMMathItalic5-Regular "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi5.pfb
+cmmi6 LMMathItalic6-Regular "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi6.pfb
+cmmi7 LMMathItalic7-Regular "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi7.pfb
+cmmi8 LMMathItalic8-Regular "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi8.pfb
+cmmi9 LMMathItalic9-Regular "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi9.pfb
+cmmi10 LMMathItalic10-Regular "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi10.pfb
+cmmi12 LMMathItalic12-Regular "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi12.pfb
+cmmib5 LMMathItalic5-Bold "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmib5.pfb
+cmmib7 LMMathItalic7-Bold "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmib7.pfb
+cmmib10 LMMathItalic10-Bold "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmib10.pfb
diff --git a/metapost/context/base/metafun.mpiv b/metapost/context/base/metafun.mpiv
index a113675e6..b1d4f32e7 100644
--- a/metapost/context/base/metafun.mpiv
+++ b/metapost/context/base/metafun.mpiv
@@ -15,10 +15,14 @@
%D prevent dependency problems and in the end even may use a patched version,
%D we prefer to use a copy.
+prologues := 0 ;
+mpprocset := 1 ;
+
input "mp-base.mpiv" ;
input "mp-tool.mpiv" ;
input "mp-mlib.mpiv" ;
% "mp-core.mpiv" ; % todo: namespace and cleanup
+input "mp-luas.mpiv" ; % experimental
input "mp-page.mpiv" ; % todo: namespace and cleanup
input "mp-butt.mpiv" ; % todo: namespace and cleanup
input "mp-shap.mpiv" ; % will be improved
@@ -46,6 +50,7 @@ let normalend = end ;
if known mplib :
def end = ; message "" ; message metafunversion ; message "" ; endinput ; enddef ;
+ def bye = ; message "" ; message metafunversion ; message "" ; endinput ; enddef ;
else :
def end = ; message "" ; message metafunversion ; message "" ; normalend ; enddef ;
fi ;
diff --git a/metapost/context/base/mp-bare.mpiv b/metapost/context/base/mp-bare.mpiv
new file mode 100644
index 000000000..c6194b1ee
--- /dev/null
+++ b/metapost/context/base/mp-bare.mpiv
@@ -0,0 +1,93 @@
+%D \module
+%D [ file=mp-bare.mpiv,
+%D version=2014.10.31,
+%D title=\CONTEXT\ \METAPOST\ graphics,
+%D subtitle=plain plugins,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
+%C details.
+
+if known context_bare : endinput ; fi ;
+boolean context_bare ; context_bare := true ;
+
+numeric mfun_tt_w[], mfun_tt_h[], mfun_tt_d[] ;
+numeric mfun_tt_n ; mfun_tt_n := 0 ;
+picture mfun_tt_p ; mfun_tt_p := nullpicture ;
+picture mfun_tt_o ; mfun_tt_o := nullpicture ;
+picture mfun_tt_c ; mfun_tt_c := nullpicture ;
+
+if unknown mfun_trial_run :
+ boolean mfun_trial_run ;
+ mfun_trial_run := false ;
+fi ;
+
+if unknown mfun_first_run :
+ boolean mfun_first_run ;
+ mfun_first_run := true ;
+fi ;
+
+def mfun_reset_tex_texts =
+ mfun_tt_n := 0 ;
+ mfun_tt_p := nullpicture ;
+ mfun_tt_o := nullpicture ; % redundant
+ mfun_tt_c := nullpicture ; % redundant
+enddef ;
+
+def mfun_flush_tex_texts =
+ addto currentpicture also mfun_tt_p
+enddef ;
+
+extra_beginfig := extra_beginfig & "mfun_reset_tex_texts ;" ;
+extra_endfig := "mfun_flush_tex_texts ; mfun_reset_tex_texts ; " & extra_endfig ;
+
+vardef colordecimals primary c =
+ if cmykcolor c :
+ decimal cyanpart c & ":" & decimal magentapart c & ":" & decimal yellowpart c & ":" & decimal blackpart c
+ elseif rgbcolor c :
+ decimal redpart c & ":" & decimal greenpart c & ":" & decimal bluepart c
+ else :
+ decimal c
+ fi
+enddef ;
+
+vardef rawtextext(expr str) = % todo: avoid currentpicture
+ if str = "" :
+ nullpicture
+ else :
+ mfun_tt_n := mfun_tt_n + 1 ;
+ mfun_tt_c := nullpicture ;
+ if mfun_trial_run :
+ mfun_tt_o := nullpicture ;
+ addto mfun_tt_o doublepath origin _op_ ; % save drawoptions
+ addto mfun_tt_c doublepath unitsquare
+ withprescript "tx_number=" & decimal mfun_tt_n
+ withprescript "tx_stage=trial"
+ withprescript "tx_color=" & colordecimals colorpart mfun_tt_o
+ withpostscript str ;
+ addto mfun_tt_p also mfun_tt_c ;
+ elseif known mfun_tt_d[mfun_tt_n] :
+ addto mfun_tt_c doublepath unitsquare
+ xscaled mfun_tt_w[mfun_tt_n]
+ yscaled (mfun_tt_h[mfun_tt_n] + mfun_tt_d[mfun_tt_n])
+ shifted (0,-mfun_tt_d[mfun_tt_n])
+ withprescript "tx_number=" & decimal mfun_tt_n
+ withprescript "tx_stage=final" ;
+ else :
+ addto mfun_tt_c doublepath unitsquare ; % unitpicture
+ fi ;
+ mfun_tt_c
+ fi
+enddef ;
+
+primarydef str infont name = % nasty hack
+ if name = "" :
+ rawtextext(str)
+ else :
+ rawtextext("\definedfont[" & name & "]" & str)
+ fi
+enddef ;
+
diff --git a/metapost/context/base/mp-base.mpii b/metapost/context/base/mp-base.mpii
index 0f8104447..7af4bc436 100644
--- a/metapost/context/base/mp-base.mpii
+++ b/metapost/context/base/mp-base.mpii
@@ -110,12 +110,15 @@ transform identity;
for z=origin,right,up: z transformed identity = z; endfor
% color constants
-color black, white, red, green, blue, background;
+color black, white, red, green, blue, cyan, magenta, yellow, background;
black = (0,0,0);
white = (1,1,1);
red = (1,0,0);
green = (0,1,0);
blue = (0,0,1);
+cyan = (0,1,1);
+magenta = (1,0,1);
+yellow = (1,1,0);
background = white; % The user can reset this
% color part selection for within
@@ -360,9 +363,17 @@ enddef;
def filldraw expr c =
addto currentpicture contour c withpen currentpen
_op_ enddef;
-def drawdot expr z =
- addto currentpicture contour makepath currentpen shifted z
- _op_ enddef;
+% def drawdot expr z =
+% addto currentpicture contour makepath currentpen shifted z
+% _op_ enddef;
+
+def drawdot expr p =
+ if pair p :
+ addto currentpicture doublepath p withpen currentpen _op_
+ else :
+ errmessage("drawdot only accepts a pair expression")
+ fi
+enddef ;
def unfill expr c = fill c withcolor background enddef;
def undraw expr p = draw p withcolor background enddef;
diff --git a/metapost/context/base/mp-base.mpiv b/metapost/context/base/mp-base.mpiv
index 2887cc462..28eb57fb8 100644
--- a/metapost/context/base/mp-base.mpiv
+++ b/metapost/context/base/mp-base.mpiv
@@ -195,7 +195,8 @@ yellow := (1,1,0) ;
background := white ; % obsolete
let graypart = greypart ;
-let graycolor = greycolor ;
+let greycolor = numeric ;
+let graycolor = numeric ;
% color part (will be overloaded)
@@ -323,7 +324,7 @@ primarydef w dotprod z =
enddef ;
primarydef x**y =
- if y=2 :
+ if y = 2 :
x*x
else :
takepower y of x
@@ -348,11 +349,46 @@ def takepower expr y of x =
endfor
fi
else :
- hide(errmessage "Undefined power: " & decimal x&"**"&decimal y)
+ hide(errmessage "Undefined power: " & decimal x & "**" & decimal y)
fi
fi
enddef ;
+% for big number systems:
+%
+% primarydef x**y =
+% if y = 1 :
+% x
+% elseif y = 2 :
+% x*x
+% elseif y = 3 :
+% x*x*x
+% else :
+% takepower y of x
+% fi
+% enddef ;
+%
+% vardef takepower expr y of x =
+% if (x=0) and (y>0) :
+% 0
+% else :
+% 1
+% if y = floor y :
+% if y >= 0 :
+% for n=1 upto y :
+% *x
+% endfor
+% else :
+% for n=-1 downto y :
+% /x
+% endfor
+% fi
+% else :
+% hide(errmessage "Undefined power: " & decimal x & "**" & decimal y)
+% fi
+% fi
+% enddef ;
+
vardef direction expr t of p =
postcontrol t of p - precontrol t of p
enddef ;
@@ -594,8 +630,36 @@ def filldraw expr c =
addto currentpicture contour c withpen currentpen _op_
enddef ;
-def drawdot expr z =
- addto currentpicture contour makepath currentpen shifted z _op_
+% def drawdot expr z =
+% addto currentpicture contour makepath currentpen shifted z _op_
+% enddef ;
+%
+% testcase DEK:
+%
+% for j=1 upto 9 :
+% pickup pencircle xscaled .4 yscaled .2 ;
+% drawdot (10j,0) withpen pencircle xscaled .5j yscaled .25j rotated 45 ;
+% pickup pencircle xscaled .5j yscaled .25j rotated 45 ;
+% drawdot (10j,10);
+% endfor ;
+%
+% or:
+%
+%\startMPpage
+%
+% def drawdot expr z =
+% addto currentpicture contour (makepath currentpen shifted z) _op_
+% enddef;
+%
+% drawdot origin shifted (0,-3cm) withpen pencircle scaled 2cm ;
+% pickup pencircle scaled 2cm ; drawdot origin withcolor red ;
+
+def drawdot expr p =
+ if pair p :
+ addto currentpicture doublepath p withpen currentpen _op_
+ else :
+ errmessage("drawdot only accepts a pair expression")
+ fi
enddef ;
def unfill expr c = fill c withcolor background enddef ;
@@ -651,10 +715,10 @@ enddef ;
def pen_pickup_ primary q =
currentpen := q ;
- pen_lft :=xpart penoffset down of currentpen ;
- pen_rt :=xpart penoffset up of currentpen ;
- pen_top :=ypart penoffset left of currentpen ;
- pen_bot :=ypart penoffset right of currentpen ;
+ pen_lft := xpart penoffset down of currentpen ;
+ pen_rt := xpart penoffset up of currentpen ;
+ pen_top := ypart penoffset left of currentpen ;
+ pen_bot := ypart penoffset right of currentpen ;
path currentpen_path ;
enddef ;
@@ -670,7 +734,8 @@ vardef savepen =
pen_count_
enddef ;
-def clearpen = currentpen:=nullpen;
+def clearpen =
+ currentpen := nullpen;
pen_lft := pen_rt := pen_top := pen_bot := 0 ;
path currentpen_path ;
enddef ;
@@ -801,7 +866,7 @@ vardef labels@#(text t) =
endfor
enddef ;
-% til lhere
+% till lhere
vardef dotlabels@#(text t) =
forsuffixes $=t:
@@ -817,17 +882,19 @@ vardef penlabels@#(text t) =
endfor
enddef ;
+% range 4 thru 10
+
def range expr x =
- numtok[x]
+ _numtok_[x]
enddef ;
-def numtok suffix x =
+def _numtok_ suffix x =
x
enddef ;
tertiarydef m thru n =
m for x=m+1 step 1 until n :
- , numtok[x]
+ , _numtok_[x]
endfor
enddef ;
diff --git a/metapost/context/base/mp-chem.mpiv b/metapost/context/base/mp-chem.mpiv
index 2addb0a73..b861d3f12 100644
--- a/metapost/context/base/mp-chem.mpiv
+++ b/metapost/context/base/mp-chem.mpiv
@@ -11,7 +11,8 @@
%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
%C details.
-%D This module is incomplete and experimental.
+%D This module is incomplete and experimental. Okay, it's not that bad but we do need
+%D some disclaimer.
% either consistent setting or not
@@ -24,7 +25,7 @@ numeric
chem_text_min, chem_text_max,
chem_rotation, chem_adjacent, chem_stack_n,
chem_substituent, chem_substituent.lft, chem_substituent.rt,
- chem_setting_offset, chem_text_offset, chem_picture_offset,
+ chem_setting_offset, chem_text_offset,
chem_center_offset, chem_dbl_offset,
chem_bb_angle, chem_axis_rulethickness,
chem_setting_l, chem_setting_r, chem_setting_t, chem_setting_b,
@@ -72,19 +73,18 @@ chem_axis_rulethickness := 1pt ;
chem_emwidth := 10pt ; % EmWidth or \the\emwidth does not work...
chem_b_length := 3 chem_emwidth ;
chem_text_offset := -.3chem_emwidth ; % -.71chem_emwidth ; % 1/sqrt(2)
-chem_center_offset := .5 chem_emwidth ;
-chem_picture_offset := chem_emwidth ;
+chem_center_offset := .5chem_emwidth ;
chem_dbl_offset := .05 ;
chem_bb_angle := angle(1,2chem_dbl_offset) ;
chem_text_min := 0.75 ;
chem_text_max := 1.25 ;
-chem_dot_factor := 4 ; % *linewidth
+chem_dot_factor := 2 ; % *linewidth
chem_sb_pair := (0.25,0.75) ; %chem_sb_dash := dashpattern(off 0.25 on 0.5 off 0.25) ;
chem_sb_pair.m := (0.25,1 ) ; %chem_sb_dash.m := dashpattern(off 0.25 on 0.75) ;
chem_sb_pair.p := (0 ,0.75) ; %chem_sb_dash.p := dashpattern(on 0.75 off 0.25) ;
-chem_sb_pair.b := (0, 1 ) ; %chem_sb_dash.b := dashpattern(on 1) ;
+chem_sb_pair.b := (0 ,1 ) ; %chem_sb_dash.b := dashpattern(on 1) ;
-chem_bd_wedge := false ; % true is incorrect, but quite common...
+chem_bd_wedge := true ; % according to IUPAC 2005
def chem_reset =
chem_rotation := 0 ;
@@ -132,6 +132,8 @@ vardef chem_init_some (suffix $) (expr e) =
fi
if not chem_star[$] :
scaled (.5/(sind .5chem_num1))
+ % carbon-carbon benzene bond length
+ scaled (1.4/1.54)
fi ;
fi ;
@@ -149,7 +151,7 @@ vardef chem_init_some (suffix $) (expr e) =
chem_num2 := i ;
fi
endfor)
- scaled (2*(abs(point chem_num2+.5 of chem_b_path[$]) - chem_dbl_offset))
+ scaled (2*(abs(point chem_num2+.5 of chem_b_path[$]) - 2chem_dbl_offset))
fi ;
chem_r_path[$] :=
@@ -301,17 +303,20 @@ enddef ;
chem_init_all ; % WHY does this not work unless defined and then called?
-
% Like most often in ConTeXt, we will trap but then silently ignore mistaken use,
% unless of course the error be too harmful...
% \startchemical
-def chem_start_structure(expr i, l, r, t, b, rotation, unit, factor, offset, axis, rulethickness, axiscolor) =
+def chem_start_structure(expr i, l, r, t, b, rotation, unit, bond, scale, offset, axis, rulethickness, axiscolor) =
save chem_setting_l, chem_setting_r, chem_setting_t, chem_setting_b ;
- chem_emwidth := unit ;
- chem_b_length := factor * unit ;
+ chem_emwidth := unit ; % dynamically set for each structure.
+ chem_text_offset := -.3chem_emwidth ; % -.71chem_emwidth ; % 1/sqrt(2)
+ chem_center_offset := .5chem_emwidth ;
+ chem_b_length := chem_emwidth * bond * scale ;
+ % scale (normally 1) scales the structure but not the text.
+
if numeric l :
chem_setting_l := -l ;
fi
@@ -526,6 +531,17 @@ vardef chem_set (suffix $) =
% This is a fairly complicated optimization and ajustement. It took some
% thinking to get right, so beware!
+ % And then even more time fixing a bug of a rotation +- half the symmetry
+ % angle of a structure depending on the scale and/or the font size
+ % (through chem_b_length).
+
+ % first save the symmetry angle of the structure (as in chem_rot):
+ chem_num0 := if chem_stacked[$] : 3 else : 0 fi ;
+ chem_num9 := if chem_tetra[$] : 360 else :
+ abs(angle(point 0+chem_num0 of chem_b_path[$]) -
+ angle(point 1+chem_num0 of chem_b_path[$]))
+ fi ;
+
if (chem_adjacent<>0) and chem_star[P] and chem_star[$] :
% nop
chem_adjacent := 0 ;
@@ -578,7 +594,8 @@ vardef chem_set (suffix $) =
-((point (chem_adjacent-1) of chem_b_path[P]) chem_transformed(P)) ;
fi
% adjust the bond angles
- chem_rotation := (chem_rotation + angle(chem_pair1)-angle(chem_pair3)) mod 360 ;
+ chem_num4 := (angle(chem_pair1)-angle(chem_pair3)) zmod chem_num9 ;
+ chem_rotation := chem_rotation + chem_num4 ;
if not chem_star[$] :
chem_pair4 :=
if chem_star[P] :
@@ -663,7 +680,8 @@ vardef chem_set (suffix $) =
fi
endfor
if not chem_front[$] : % adjust rotation
- chem_rotation := (chem_rotation + angle(chem_pair1)-angle(chem_pair3)) mod 360 ;
+ chem_num4 := angle(chem_pair1)-angle(chem_pair3) ;
+ chem_rotation := (chem_rotation + chem_num4) mod 360 ;
fi ;
chem_t := identity chem_transformed($) ;
chem_pair4 := (point chem_num3 of chem_b_path[$]) transformed chem_t ;
@@ -671,6 +689,9 @@ vardef chem_set (suffix $) =
currentpicture := currentpicture shifted chem_pair4 ;
chem_origin := chem_origin shifted chem_pair4 ;
fi
+ if not chem_front[$] : % adjust rotation
+ chem_rotation := chem_rotation zmod chem_num9 ;
+ fi
fi
chem_substituent := 0 ;
fi ;
@@ -798,28 +819,39 @@ vardef chem_eb@# (suffix $) (expr f, t, r, c) = % EB
enddef ;
vardef chem_ad@# (suffix $) (expr f, t, r, c) = % AD
- if not chem_star[$] :
- chem_t := identity chem_transformed($) ;
- for i=f upto t :
- chem_drawarrow(
- ((subpath (chem_sb_pair@# shifted (i-1,i-1)) of chem_b_path[$])
- paralleled 2chem_dbl_offset) transformed chem_t,
- r,c,) ;
- endfor
- fi
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_drawarrow(
+ (
+ (subpath
+ if chem_star[$] :
+ chem_sb_pair@# of chem_r_fragment($,i)
+ ) paralleled 5chem_dbl_offset
+ else :
+ (chem_sb_pair@# shifted (i-1,i-1)) of chem_b_path[$]
+ ) paralleled 2chem_dbl_offset
+ fi
+ ) transformed chem_t,
+ r,c,) ;
+ endfor
enddef ;
vardef chem_au@# (suffix $) (expr f, t, r, c) = % AU
- if not chem_star[$] :
- chem_t := identity chem_transformed($) ;
- for i=f upto t :
- chem_drawarrow(
- reverse(
- (subpath (chem_sb_pair@# shifted (i-1,i-1)) of chem_b_path[$])
- paralleled 2chem_dbl_offset) transformed chem_t,
- r,c,) ;
- endfor
- fi
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_drawarrow(
+ ((reverse
+ subpath
+ if chem_star[$] :
+ chem_sb_pair@# of chem_r_fragment($,i)
+ ) paralleled -5chem_dbl_offset
+ else :
+ (chem_sb_pair@# shifted (i-1,i-1)) of chem_b_path[$]
+ ) paralleled -2chem_dbl_offset
+ fi
+ ) transformed chem_t,
+ r,c,) ;
+ endfor
enddef ;
vardef chem_es@# (suffix $) (expr f, t, r, c) = % ES
@@ -1004,6 +1036,15 @@ enddef ;
vardef chem_hb@# (suffix $) (expr f, t, r, c) = % HB
if chem_star[$] :
chem_rh@#($,f,t,r,c)
+ else :
+ chem_t := identity chem_transformed($) ;
+ for i=f upto t :
+ chem_draw(
+ (subpath (chem_sb_pair@# shifted (i-1,i-1)) of chem_b_path[$])
+ transformed chem_t,
+ chem_dot_factor*r,c,dashed withdots scaled ((.5chem_b_length/3)/5bp)) ;
+ % not symmetric - needs to be tweaked...
+ endfor
fi
enddef ;
@@ -1548,9 +1589,9 @@ vardef chem_rot (suffix $) (expr d, s) = % ROT
chem_rotation := 0
else :
chem_num0 := if chem_stacked[$] : 3 else : 0 fi ;
- chem_num1 := .5(angle(point d+chem_num0 of chem_b_path[$]) -
+ chem_num1 := .5(angle(point d+chem_num0 of chem_b_path[$]) -
angle(point d+chem_num0-1 of chem_b_path[$])) ;
- chem_rotation := (chem_rotation + s*chem_num1) mod 360 ;
+ chem_rotation := (chem_rotation + s*chem_num1) zmod 360 ;
fi
fi
enddef ;
@@ -1561,7 +1602,7 @@ vardef chem_mir (suffix $) (expr d, s) = % MIR
if not chem_front[$] :
if d=0 : % inversion
if chem_mirror=origin :
- chem_rotation := (chem_rotation + 180*s) mod 360 ;
+ chem_rotation := (chem_rotation + 180*s) zmod 360 ;
else :
chem_mirror := chem_mirror rotated 90 ;
fi
@@ -1577,7 +1618,7 @@ vardef chem_mir (suffix $) (expr d, s) = % MIR
chem_num0 := -360 - chem_num0 ;
fi
chem_num0 := chem_num0 * s ;
- chem_rotation := (chem_rotation + 2chem_num0) mod 360 ;
+ chem_rotation := (chem_rotation + 2chem_num0) zmod 360 ;
chem_mirror := origin ;
fi
fi
diff --git a/metapost/context/base/mp-form.mpiv b/metapost/context/base/mp-form.mpiv
index b58792e1a..88b15e097 100644
--- a/metapost/context/base/mp-form.mpiv
+++ b/metapost/context/base/mp-form.mpiv
@@ -27,4 +27,4 @@ string Fmfont_ ; Fmfont_ := "crap" ;
vardef mfun_format_number(expr fmt, i) =
"\ctxlua{metapost.formatnumber('" & fmt & "'," & if string i : i else : decimal i fi & ")}"
-enddef
+enddef ;
diff --git a/metapost/context/base/mp-func.mpiv b/metapost/context/base/mp-func.mpiv
index 58df711f2..b1b9d6d5d 100644
--- a/metapost/context/base/mp-func.mpiv
+++ b/metapost/context/base/mp-func.mpiv
@@ -23,30 +23,36 @@ mfun_pathconnectors[0] := "," ;
mfun_pathconnectors[1] := "--" ;
mfun_pathconnectors[2] := ".." ;
mfun_pathconnectors[3] := "..." ;
+mfun_pathconnectors[4] := "---" ;
def pathconnectors = mfun_pathconnectors enddef ;
vardef mfun_function (expr f) (expr u, t, b, e, s) =
save x ; numeric x ;
+ save c ; string c ; c := if string f : f else : mfun_pathconnectors[f] fi ;
for xx := b step s until e :
hide (x := xx ;)
if xx > b :
- scantokens(mfun_pathconnectors[f])
+ scantokens(c)
fi
(scantokens(u),scantokens(t))
endfor
enddef ;
-def function = mfun_function enddef ; % let doesn't work here
-def punkedfunction = mfun_function (1) enddef ;
-def curvedfunction = mfun_function (2) enddef ;
-def tightfunction = mfun_function (3) enddef ;
+def function = mfun_function enddef ; % let doesn't work here
+def constructedfunction = mfun_function enddef ;
+def straightfunction = mfun_function (1) enddef ;
+def curvedfunction = mfun_function (2) enddef ;
+
+% def punkedfunction = mfun_function (1) enddef ; % same as straightfunction
+% def tightfunction = mfun_function (3) enddef ; % same as curvedfunction
vardef mfun_constructedpath (expr f) (text t) =
save ok ; boolean ok ; ok := false ;
+ save c ; string c ; c := if string f : f else : mfun_pathconnectors[f] fi ;
for i=t :
if ok :
- scantokens(mfun_pathconnectors[f])
+ scantokens(c)
else :
ok := true ;
fi
@@ -55,24 +61,27 @@ vardef mfun_constructedpath (expr f) (text t) =
enddef ;
def constructedpath = mfun_constructedpath enddef ; % let doesn't work here
-def punkedpath = mfun_constructedpath (1) enddef ;
+def straightpath = mfun_constructedpath (1) enddef ;
def curvedpath = mfun_constructedpath (2) enddef ;
-def tightpath = mfun_constructedpath (3) enddef ;
+
+% def punkedpath = mfun_constructedpath (1) enddef ; % same as straightpath
+% def tightpath = mfun_constructedpath (3) enddef ; % same as curvedpath
vardef mfun_constructedpairs (expr f) (text p) =
save i ; i := -1 ;
+ save c ; string c ; c := if string f : f else : mfun_pathconnectors[f] fi ;
forever :
exitif unknown p[incr(i)] ;
if i>0 :
- scantokens(mfun_pathconnectors[f])
+ scantokens(c)
fi
p[i]
endfor
enddef ;
def constructedpairs = mfun_constructedpairs enddef ; % let doesn't work here
-def punkedpairs = mfun_constructedpairs (1) enddef ;
+def straightpairs = mfun_constructedpairs (1) enddef ;
def curvedpairs = mfun_constructedpairs (2) enddef ;
-def tightpairs = mfun_constructedpairs (3) enddef ;
-
+% def punkedpairs = mfun_constructedpairs (1) enddef ; % same as straightpairs
+% def tightpairs = mfun_constructedpairs (3) enddef ; % same as curvedpairs
diff --git a/metapost/context/base/mp-grap.mpiv b/metapost/context/base/mp-grap.mpiv
index 417bfbe69..4fd8ee5bd 100644
--- a/metapost/context/base/mp-grap.mpiv
+++ b/metapost/context/base/mp-grap.mpiv
@@ -17,7 +17,10 @@ boolean context_grap ; context_grap := true ;
% Below is a modified graph.mp
-if epsilon/4 = 0 : % numbersystem="scaled" : (not reliable...)
+show numbersystem, numberprecision ;
+
+%if epsilon/4 = 0 :
+if numbersystem <> "double" :
errmessage "The graph macros require the double precision number system." ;
endinput ;
fi
@@ -52,11 +55,11 @@ fi
% endgraph end of graph--the result is a picture
% option `plot <picture>' draws picture at each path knot, turns off pen
-% Gtemplate.<tickcmd> template paths for tick marks and grid lines
+% graph_template.<tickcmd> template paths for tick marks and grid lines
% graph_margin_fraction.low,
% graph_margin_fraction.high fractions determining margins when no setrange
-% Glmarks[], Gumarks, Gemarks loop text strings used by auto.<x or y>
-% Gmarks, Gminlog numeric parameters used by auto.<x or y>
+% graph_log_marks[], graph_lin_marks, graph_exp_marks loop text strings used by auto.<x or y>
+% graph_minimum_number_of_marks, graph_log_minimum numeric parameters used by auto.<x or y>
% Autoform is the format string used by autogrid
% Autoform_X, Autoform_Y if defined, are used instead
@@ -64,23 +67,27 @@ fi
% are of the form X_.<suffix>, Y_.<suffix>, or Z_.<suffix>, or they start
% with `graph_'
-% Depends on :
-input string.mp
+% Used to depend on :
+
+% input string.mp
% Private version of a few marith macros, fixed for double math...
-newinternal Mzero; Mzero := -16384; % Anything at least this small is treated as zero
-newinternal mlogten ; mlogten := mlog(10) ;
-newinternal singleinfinity ; singleinfinity := 2**128 ;
-newinternal doubleinfinity ; doubleinfinity := 2**1024 ;
-% Note that we get arithmetic overflows if we set to -doubleinfinity below.
-% (but "only on odd days"...)
+
+newinternal Mzero ; Mzero := -16384; % Anything at least this small is treated as zero
+newinternal mlogten ; mlogten := mlog(10) ;
+newinternal largestmantissa ; largestmantissa := 2**52 ; % internal double warningcheck
+newinternal singleinfinity ; singleinfinity := 2**128 ;
+newinternal doubleinfinity ; doubleinfinity := 2**1024 ;
+%Mzero := -largestmantissa ; % Note that we get arithmetic overflows if we set to -doubleinfinity
% Safely convert a number to mlog form, trapping zero.
+
vardef graph_mlog primary x =
if unknown x: whatever
elseif x=0: Mzero
else: mlog(abs x) fi
enddef ;
+
vardef graph_exp primary x =
if unknown x: whatever
elseif x<=Mzero: 0
@@ -89,21 +96,25 @@ enddef ;
% and add the following for utility/completeness
% (replacing the definitions in mp-tool.mpiv).
+
vardef logten primary x =
if unknown x: whatever
elseif x=0: Mzero
else: mlog(abs x)/mlog(10) fi
enddef ;
+
vardef ln primary x =
if unknown x: whatever
elseif x=0: Mzero
else: mlog(abs x)/256 fi
enddef ;
+
vardef exp primary x =
if unknown x: whatever
elseif x<= Mzero: 0
else: (mexp 256)**x fi
enddef ;
+
vardef powten primary x =
if unknown x: whatever
elseif x<= Mzero: 0
@@ -112,6 +123,7 @@ enddef ;
% Convert x from mlog form into a pair whose xpart gives a mantissa and whose
% ypart gives a power of ten.
+
vardef graph_Meform(expr x) =
if x<=Mzero : origin
else :
@@ -122,6 +134,7 @@ vardef graph_Meform(expr x) =
enddef ;
% Modified from above.
+
vardef graph_Feform(expr x) =
interim warningcheck :=0 ;
if x=0 : origin
@@ -146,6 +159,7 @@ def graph_suffix(suffix $) = % convert from x or y to X_ or Y_
enddef ;
% New :
+
save graph_background ; color graph_background ; % if defined, fill the frame.
save graph_close_file ; boolean graph_close_file ; graph_close_file = false ;
@@ -200,17 +214,20 @@ enddef ;
% user to alter the behavior of these macros.
% Not very modifiable : log, linear,
% graph_frame_pair_a, graph_frame_pair_b, graph_margin_pair
-% Modifiable : Gtemplate.suffix, Glmarks[], Gumarks, Gemarks, Gmarks,
-% Gminlog, Autoform
+% Modifiable : graph_template.suffix,
+% graph_log_marks[], graph_lin_marks, graph_exp_marks,
+% graph_minimum_number_of_marks,
+% graph_log_minimum, Autoform
newinternal log, linear ; % coordinate system codes
log :=1 ; linear :=2;
+
% note that mp-tool.mpiv defines log as log10.
%%%%%%%%%%%%%%%%%%%%%% Coordinates : setcoords, setrange %%%%%%%%%%%%%%%%%%%%%%
-% Graph-related usr input is `user graph coordinates' as specified by arguments
+% Graph-related user input is `user graph coordinates' as specified by arguments
% to setcoords.
% `Internal graph coordinates' are used for graph_current_graph, graph_current_bb, Z_.low, Z_.high.
% Their meaning depends on the appropriate component of Z_.graph_coordinate_type :
@@ -227,14 +244,15 @@ vardef graph_set_default_bounds = % Set default Z_.low, Z_.high
graph_margin_pair$ ;
endfor
enddef ;
+
pair graph_margin_pair.low, graph_margin_pair.high ;
graph_margin_pair.high = -graph_margin_pair.low = (.00002,.00002) ;
+% Set $, $$, $$$ so that shifting by $ then transforming by $$ and then $$$ maps
+% the essential bounding box of graph_current_graph into (0,0)..Z_.graph_dimensions.
+% The `essential bounding box' is either what Z_.low and Z_.high imply
+% or the result of ignoring pen widths in graph_current_graph.
-% Set $, $$, $$$ so that shifting by $ then transforming by $$ and then $$$
-% maps the essential bounding box of graph_current_graph into (0,0)..Z_.graph_dimensions. The
-% `essential bounding box' is either what Z_.low and Z_.high imply or the
-% result of ignoring pen widths in graph_current_graph.
vardef graph_remap(suffix $,$$,$$$) =
save p_ ;
graph_set_default_bounds ;
@@ -245,10 +263,10 @@ vardef graph_remap(suffix $,$$,$$$) =
(Z_.high+$) transformed $$ = p_ ;
p_ transformed $$$ = Z_.graph_dimensions ;
enddef ;
+
graph_margin_fraction.low=-.07 ; % bbox fraction for default range start
graph_margin_fraction.high=1.07 ; % bbox fraction for default range stop
-
def graph_with_pen_and_color(expr q) =
withpen penpart q withcolor
if colormodel q=1 :
@@ -268,7 +286,7 @@ enddef ;
% Pair o is the value of p that makes tp (0,0). This implements the trick
% whereby using 1 instead of 0 for the width or height or the setbounds path
% for a label picture suppresses shifting in x or y.
-%
+
%vardef graph_picture_conversion@#(expr q, o)(text tp) =
% save p ;
% if stroked q :
@@ -284,8 +302,9 @@ enddef ;
% addto @# also q shifted ((tp)-llcorner q) ;
% fi
%enddef ;
-%
+
% This new version makes gdraw clip the result to the window defined with setrange
+
vardef graph_picture_conversion@#(expr q, o)(text tp) =
save p ;
save do_clip, tp_clipped ; boolean do_clip ; do_clip := true ;
@@ -315,12 +334,11 @@ enddef ;
def graph_coordinate_multiplication(expr a,b) = (xpart a*xpart b, ypart a*ypart b) enddef ;
-
vardef graph_clear_bounds@# = numeric @#.low, @#.high ; enddef;
-
% Finalize anything drawn in the present coordinate system and set up a new
% system as requested
+
vardef setcoords(expr tx, ty) =
interim warningcheck :=0 ;
if length graph_current_graph>0 :
@@ -335,10 +353,10 @@ vardef setcoords(expr tx, ty) =
X_.graph_coordinate_type := tx ; Y_.graph_coordinate_type := ty;
enddef ;
-
% Set Z_.low and Z_.high to correspond to given range of user graph
% coordinates. The text argument should be a sequence of pairs and/or strings
% with 4 components in all.
+
vardef setrange(text t) =
interim warningcheck :=0 ;
save r_ ; r_=0;
@@ -353,8 +371,8 @@ vardef setrange(text t) =
endfor
enddef ;
-
% @# is X_ or Y_ ; l and h are numeric or string
+
vardef graph_set_bounds@#(expr l, h) =
graph_clear_bounds@# ;
if @#graph_coordinate_type>0 :
@@ -382,15 +400,12 @@ vardef graph_set_bounds@#(expr l, h) =
fi
enddef ;
-
-
-
-
%%%%%%%%%%%%%%%%%%%%%%%%% Converting path coordinates %%%%%%%%%%%%%%%%%%%%%%%%%
% Find the result of scanning path p and using macros tx and ty to adjust the
% x and y parts of each coordinate pair. Boolean parameter c tells whether to
% force the result to be polygonal.
+
vardef graph_scan_path(expr p, c)(suffix tx, ty) =
if (str tx="") and (str ty="") : p
else :
@@ -409,26 +424,29 @@ vardef graph_scan_path(expr p, c)(suffix tx, ty) =
if pair p : point 0 of fi r_
fi
enddef ;
-vardef graph_pair_adjust(expr p)(suffix tx, ty) = (tx xpart p, ty ypart p) enddef ;
+vardef graph_pair_adjust(expr p)(suffix tx, ty) = (tx xpart p, ty ypart p) enddef ;
% Convert path p from user graph coords to internal graph coords.
+
vardef graph_convert_user_path_to_internal primary p =
interim warningcheck :=0 ;
- graph_scan_path(p,
- (abs X_.graph_coordinate_type<>linear) or (abs Y_.graph_coordinate_type<>linear),
- if abs X_.graph_coordinate_type=log : graph_mlog fi,
- if abs Y_.graph_coordinate_type=log : graph_mlog fi)
- transformed (identity
- if X_.graph_coordinate_type<0 : xscaled -1 fi
- if Y_.graph_coordinate_type<0 : yscaled -1 fi)
+ if known p :
+ graph_scan_path(p,
+ (abs X_.graph_coordinate_type<>linear) or (abs Y_.graph_coordinate_type<>linear),
+ if abs X_.graph_coordinate_type=log : graph_mlog fi,
+ if abs Y_.graph_coordinate_type=log : graph_mlog fi)
+ transformed (identity
+ if X_.graph_coordinate_type<0 : xscaled -1 fi
+ if Y_.graph_coordinate_type<0 : yscaled -1 fi)
+ fi
enddef ;
-
% Convert label location t_ from user graph coords to internal graph coords.
% The label location should be a pair, or two numbers/strings. If t_ is empty
% or a single item of non-pair type, just return t_. Unknown coordinates
% produce unknown components in the result.
+
vardef graph_label_convert_user_to_internal(text t_) =
save n_ ; n_=0;
interim warningcheck :=0 ;
@@ -448,23 +466,56 @@ vardef graph_label_convert_user_to_internal(text t_) =
fi
enddef ;
-
-
%%%%%%%%%%%%%%%%%%%%%%%%%%%%% Reading data files %%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Read a line from file f, extract whitespace-separated tokens ignoring any
% initial "%", and return true if at least one token is found. The tokens
% are stored in @#1, @#2, .. with "" in the last @#[] entry.
+
+% String manipulation routines for MetaPost
+% It is harmless to input this file more than once.
+
+vardef isdigit primary d =
+ ("0"<=d)and(d<="9")
+enddef ;
+
+% Number of initial characters of string s where `c <character>' is true
+
+vardef graph_cspan(expr s)(text c) =
+ 0
+ for i=1 upto length s:
+ exitunless c substring (i-1,i) of s;
+ + 1
+ endfor
+enddef ;
+
+% String s is composed of items separated by white space. Lop off the first
+% item and the surrounding white space and return just the item.
+
+vardef graph_loptok suffix s =
+ save t, k;
+ k = graph_cspan(s," ">=);
+ if k > 0 :
+ s := substring(k,infinity) of s ;
+ fi
+ k := graph_cspan(s," "<);
+ string t;
+ t = substring (0,k) of s;
+ s := substring (k,infinity) of s;
+ s := substring (graph_cspan(s," ">=),infinity) of s;
+ t
+enddef ;
+
vardef graph_read_line@#(expr f) =
save n_, s_ ; string s_;
s_ = readfrom f ;
string @#[] ;
if s_<>EOF :
@#0 := s_ ;
- @#1 := loptok s_ ;
+ @#1 := graph_loptok s_ ;
n_ = if @#1="%" : 0 else : 1 fi ;
forever :
- @#[incr n_] := loptok s_ ;
+ @#[incr n_] := graph_loptok s_ ;
exitif @#[n_]="" ;
endfor
@#1<>""
@@ -472,12 +523,13 @@ vardef graph_read_line@#(expr f) =
fi
enddef ;
-
% Execute c for each line of data read from file f, and stop at the first
% line with no data. Commands c can use line number i and tokens $1, $2, ...
+% and j is the number of fields.
+
def gdata(expr f)(suffix $)(text c) =
- boolean flag ;
- for i=1 upto infinity :
+ %boolean flag ; % not used?
+ for i=1 upto largestmantissa :
exitunless graph_read_line$(f) ;
c
endfor
@@ -486,8 +538,8 @@ def gdata(expr f)(suffix $)(text c) =
fi
enddef ;
-
% Read a path from file f. The path is terminated by blank line or EOF.
+
vardef graph_readpath(expr f) =
interim warningcheck :=0 ;
save s ;
@@ -497,9 +549,9 @@ vardef graph_readpath(expr f) =
)
enddef ;
-
% Append coordinates t to polygonal path @#. The coordinates can be numerics,
% strings, or a single pair.
+
vardef augment@#(text t) =
interim warningcheck := 0 ;
if not path begingroup @# endgroup :
@@ -513,12 +565,11 @@ vardef augment@#(text t) =
fi
enddef ;
-
-
%%%%%%%%%%%%%%%%%%%%%%%%%%%%% Drawing and filling %%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Unknown pair components are set to 0 because glabel and gdotlabel understand
% unknown coordinates as `0 in absolute units'.
+
vardef graph_unknown_pair_bbox(expr p) =
interim warningcheck:=0 ;
if known p : addto graph_current_bb doublepath p ;
@@ -535,6 +586,7 @@ enddef ;
% Initiate a gdraw or gfill command. This must be done before scanning the
% argument, because that could invoke the `if known graph_plot_picture' test in a following
% plot option .
+
def graph_addto =
def graph_errorbar_text = enddef ;
color graph_foreground ;
@@ -542,8 +594,8 @@ def graph_addto =
graph_last_drawn := graph_plot_picture := nullpicture ; addto graph_last_drawn
enddef;
+% Handle the part of a gdraw command that uses path or data file p.
-% Handle the part of a Gdraw command that uses path or data file p.
def graph_draw expr p =
if string p : hide(graph_last_path := graph_readpath(p) ;)
graph_convert_user_path_to_internal graph_last_path
@@ -556,8 +608,8 @@ def graph_draw expr p =
withpen currentpen graph_withlist _op_
enddef ;
+% Handle the part of a gdraw command that uses path or data file p.
-% Handle the part of a Gdraw command that uses path or data file p.
def graph_fill expr p =
if string p : hide(graph_last_path := graph_readpath(p) --cycle ;)
graph_convert_user_path_to_internal graph_last_path
@@ -571,8 +623,8 @@ enddef ;
def gdraw = graph_addto doublepath graph_draw enddef ;
def gfill = graph_addto contour graph_fill enddef ;
-
% This is used in graph_draw and graph_fill to allow postprocessing graph_last_drawn
+
def graph_withlist text t_ = t_ ; graph_post_draw; enddef;
def witherrorbars(text t) text options =
@@ -584,6 +636,8 @@ def witherrorbars(text t) text options =
options
enddef ;
+% new feature: graph_errorbars
+
picture graph_errorbar_picture ; graph_errorbar_picture := image(draw (left--right) scaled .5 ;) ;
%picture graph_xbar_picture ; graph_xbar_picture := image(draw (down--up) scaled .5 ;) ;
%picture graph_ybar_picture ; graph_ybar_picture := image(draw (left--right) scaled .5 ;) ;
@@ -646,6 +700,7 @@ enddef ;
% Set graph_plot_picture so the postprocessing step will plot picture p at each path knot.
% Also select nullpen to suppress stroking.
+
def plot expr p =
if known graph_plot_picture :
withpen nullpen
@@ -657,20 +712,19 @@ def plot expr p =
enddef ;
% This hides a semicolon that could prematurely end graph_withlist's text argument
+
def graph_addto_currentpicture primary p = addto currentpicture also p ; enddef;
def graph_setbounds = setbounds currentpicture to enddef ;
-
-def gdrawarrow = graph_number_of_arrowheads :=1 ; gdraw enddef;
-def gdrawdblarrow = graph_number_of_arrowheads :=2 ; gdraw enddef;
-
+def gdrawarrow = graph_number_of_arrowheads := 1 ; gdraw enddef;
+def gdrawdblarrow = graph_number_of_arrowheads := 2 ; gdraw enddef;
% Post-process the filled or stroked picture graph_last_drawn as follows : (1) update
% the bounding box information ; (2) transfer it to graph_current_graph unless the pen has
% been set to nullpen to disable stroking ; (3) plot graph_plot_picture at each knot.
+
vardef graph_post_draw =
- save p ;
- path p ; p=pathpart graph_last_drawn;
+ save p ; path p ; p = pathpart graph_last_drawn ;
graph_unknown_pair_bbox(p) ;
if filled graph_last_drawn or not graph_is_null(penpart graph_last_drawn) :
addto graph_current_graph also graph_last_drawn ;
@@ -687,17 +741,23 @@ vardef graph_post_draw =
if graph_number_of_arrowheads>1 :
graph_draw_arrowhead(reverse p, graph_with_pen_and_color(graph_last_drawn)) ;
fi
- graph_number_of_arrowheads :=0 ;
+ graph_number_of_arrowheads := 0 ;
fi
enddef ;
-vardef graph_is_null(expr p) = (urcorner p=origin) and (llcorner p=origin) enddef ;
+vardef graph_is_null(expr p) = (urcorner p=origin) and (llcorner p=origin) enddef ;
vardef graph_draw_arrowhead(expr p)(text w) = % Draw arrowhead for path p, with list w
+ %save r ; r := angle(precontrol infinity of p shifted -point infinity of p) ;
addto graph_current_graph also
- image(filldraw arrowhead(
- graph_arrowhead_extent(precontrol infinity of p, point infinity of p)) w ;
- graph_setbounds point infinity of p..cycle) ;
+ image(fill arrowhead (graph_arrowhead_extent(precontrol infinity of p,point infinity of p)) w ;
+ draw arrowhead (graph_arrowhead_extent(precontrol infinity of p,point infinity of p)) w
+ undashed ;
+%if (r mod 90 <> 0) : % orientation can be wrong due to remapping
+% draw textext("\tfxx " & decimal r) shifted point infinity of p withcolor blue ;
+%fi
+ graph_setbounds point infinity of p..cycle ;
+ ) ; % rotatedabout(point infinity of p,-r) ;
enddef ;
vardef graph_arrowhead_extent(expr p, q) =
@@ -705,8 +765,6 @@ vardef graph_arrowhead_extent(expr p, q) =
q
enddef ;
-
-
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% Drawing labels %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Argument c is a drawing command that needs an additional argument p that gives
@@ -714,6 +772,7 @@ enddef ;
% path. Unknown components of p cause the setbounds path to have width or height 1 instead of 0.
% Then graph_unknown_pair_bbox sets these components to 0 and graph_picture_conversion
% suppresses subsequent repositioning.
+
def graph_draw_label(expr p)(suffix $)(text c) =
save sdim_ ; pair sdim_;
sdim_ := (if unknown xpart p : 1+ fi 0, if unknown ypart p : 1+ fi 0) ;
@@ -722,14 +781,13 @@ def graph_draw_label(expr p)(suffix $)(text c) =
image(c(p) ; graph_setbounds p--p+sdim_--cycle) _op_
enddef ;
-
% Stash the result drawing command c in the graph_label table using with list w and
% an index based on angle mfun_laboff$.
+
vardef graph_stash_label(suffix $)(text c) text w =
graph_label[1.5+angle mfun_laboff$ /90] = image(c(origin) w) ;
enddef ;
-
def graph_label_location primary p =
if pair p : graph_draw_label(p)
elseif numeric p : graph_draw_label(point p of pathpart graph_last_drawn)
@@ -737,33 +795,31 @@ def graph_label_location primary p =
fi
enddef ;
-
% Place label p at user graph coords t using with list w. (t is a time, a pair
% or 2 numerics or strings).
+
vardef glabel@#(expr p)(text t) text w =
graph_label_location graph_label_convert_user_to_internal(t) (@#,label@#(p)) w ; enddef;
-
% Place label p at user graph coords t using with list w and draw a dot there.
% (t is a time, a pair, or 2 numerics or strings).
+
vardef gdotlabel@#(expr p)(text t) text w =
graph_label_location graph_label_convert_user_to_internal(t) (@#,dotlabel@#(p)) w ; enddef;
-
def OUT = enddef ; % location text for outside labels
-
-
%%%%%%%%%%%%%%%%%%%%%%%%%% Grid lines, ticks, etc. %%%%%%%%%%%%%%%%%%%%%%%%%%
% Grid lines and tick marks are transformed versions of the templates below.
% In the template paths, (0,0) is on the edge of the frame and inward is to
% the right.
-path Gtemplate.tick, Gtemplate.itick, Gtemplate.otick, Gtemplate.grid ;
-Gtemplate.tick = (-3.5bp,0)--(3.5bp,0) ;
-Gtemplate.itick = origin--(7bp,0) ;
-Gtemplate.otick = (-7bp,0)--origin ;
-Gtemplate.grid = origin--(1,0) ;
+
+path graph_template.tick, graph_template.itick, graph_template.otick, graph_template.grid ;
+graph_template.tick = (-3.5bp,0)--(3.5bp,0) ;
+graph_template.itick = origin--(7bp,0) ;
+graph_template.otick = (-7bp,0)--origin ;
+graph_template.grid = origin--(1,0) ;
vardef tick@#(expr f,u) text w = graph_tick_label(@#,@,false,f,u,w) ; enddef;
@@ -774,75 +830,82 @@ vardef otick@#(expr f,u) text w = graph_tick_label(@#,@,false,f,u,w) ; enddef;
vardef grid@#(expr f,u) text w = graph_tick_label(@#,@,true,f,u,w) ; enddef;
-% Produce a tick or grid mark for label suffix $, Gtemplate suffix $$,
-% coordinate value u, and with list w. Boolean c tells whether Gtemplate$$
+% Produce a tick or grid mark for label suffix $, graph_template suffix $$,
+% coordinate value u, and with list w. Boolean c tells whether graph_template$$
% needs scaling by X_.graph_dimensions or Y_.graph_dimensions,
% and f gives a format string or a label picture.
+
def graph_tick_label(suffix $,$$)(expr c, f, u)(text w) =
- graph_draw_label(graph_label_convert_user_to_internal(graph_generate_label_position($,u)),,draw graph_gridline_picture$($$,c,f,u,w) shifted)
+ graph_draw_label(graph_label_convert_user_to_internal(graph_generate_label_position($,u)),,
+ draw graph_gridline_picture$($$,c,f,u,w) shifted)
enddef ;
-
% Generate label positioning arguments appropriate for label suffix $ and
% coordinate u.
+
def graph_generate_label_position(suffix $)(expr u) =
- if xpart mfun_laboff.$=0 : u,whatever else : whatever,u fi
+ if pair u : u elseif xpart mfun_laboff.$=0 : u,whatever else : whatever,u fi
enddef ;
-
% Generate a picture of a grid line labeled with coordinate value u, picture
% or format string f, and with list w. Suffix @# is bot, top, lft, or rt,
-% suffix $ identifies entries in the Gtemplate table, and boolean c tells
-% whether to scale Gtemplate$.
+% suffix $ identifies entries in the graph_template table, and boolean c tells
+% whether to scale graph_template$.
+
vardef graph_gridline_picture@#(suffix $)(expr c, f, u)(text w) =
if unknown u : graph_error(u,"Label coordinate should be known") ; nullpicture
else :
save p ; path p;
interim warningcheck :=0 ;
graph_autogrid_needed :=false ;
- p = Gtemplate$ zscaled -mfun_laboff@#
- if c : Gxyscale fi
- shifted (((.5 + mfun_laboff@# dotprod (.5,.5)) * mfun_laboff@#) Gxyscale) ;
+ p = graph_template$ zscaled -mfun_laboff@#
+ if c : graph_xyscale fi
+ shifted (((.5 + mfun_laboff@# dotprod (.5,.5)) * mfun_laboff@#) graph_xyscale) ;
image(draw p w ;
label@#(if string f : format(f,u) else : f fi, point 0 of p))
fi
enddef ;
-def Gxyscale = xscaled X_.graph_dimensions yscaled Y_.graph_dimensions enddef ;
+def graph_xyscale = xscaled X_.graph_dimensions yscaled Y_.graph_dimensions enddef ;
% Draw the frame or the part corresponding to label suffix @# using with list w.
+
vardef frame@# text w =
graph_frame_needed :=false ;
picture p_ ;
p_ = image(draw
if str@#<>"" : subpath round(angle mfun_laboff@#*graph_frame_pair_a+graph_frame_pair_b) of fi
- unitsquare Gxyscale w) ;
+ unitsquare graph_xyscale w) ;
graph_draw_label((whatever,whatever),,draw p_ shifted) ;
enddef ;
-pair graph_frame_pair_a ; graph_frame_pair_a=(1,1)/90; % unitsquare subpath is linear in label angle
-pair graph_frame_pair_b ; graph_frame_pair_b=(.75,2.25);
-
-
+pair graph_frame_pair_a ; graph_frame_pair_a=(1,1)/90; % unitsquare subpath is linear in label angle
+pair graph_frame_pair_b ; graph_frame_pair_b=(.75,2.25);
%%%%%%%%%%%%%%%%%%%%%%%%%% Automatic grid selection %%%%%%%%%%%%%%%%%%%%%%%%%%
-string Glmarks[] ; % marking options per decade for logarithmic scales
-string Gumarks ; % mark spacing options per decade for linear scales
-string Gemarks ; % exponent spacing options for logarithmic scales
-newinternal Gmarks, Gminlog ;
-Gmarks := 4 ; % minimum number marks generated by auto.x or auto.y
-Gminlog := mlog 3 ; % revert to uniform marks when largest/smallest < this
+string graph_log_marks[] ; % marking options per decade for logarithmic scales
+string graph_lin_marks ; % mark spacing options per decade for linear scales
+string graph_exp_marks ; % exponent spacing options for logarithmic scales
+newinternal graph_minimum_number_of_marks, graph_log_minimum ;
+graph_minimum_number_of_marks := 4 ; % minimum number marks generated by auto.x or auto.y
+graph_log_minimum := mlog 3 ; % revert to uniform marks when largest/smallest < this
+
+def Gfor(text t) = for i=t endfor enddef ; % to shorten the mark templates below
-def Gfor(text t) = for i=t endfor enddef ; % to shorten the mark templates below
-Glmarks[1]="1,2,5" ;
-Glmarks[2]="1,1.5,2,3,4,5,7" ;
-Glmarks[3]="1Gfor(6upto10 :,i/5)Gfor(5upto10 :,i/2)Gfor(6upto9 :,i)" ;
-Glmarks[4]="1Gfor(11upto20 :,i/10)Gfor(11upto25 :,i/5)Gfor(11upto19 :,i/2)" ;
-Glmarks[5]="1Gfor(21upto40 :,i/20)Gfor(21upto50 :,i/10)Gfor(26upto49 :,i/5)" ;
-Gumarks="10,5,2" ; % start with 10 and go down; a final `,1' is appended
-Gemarks="20,10,5,2,1" ;
+graph_log_marks[1]="1,2,5" ;
+graph_log_marks[2]="1,1.5,2,3,4,5,7" ;
+graph_log_marks[3]="1Gfor(6upto10 :,i/5)Gfor(5upto10 :,i/2)Gfor(6upto9 :,i)" ;
+graph_log_marks[4]="1Gfor(11upto20 :,i/10)Gfor(11upto25 :,i/5)Gfor(11upto19 :,i/2)" ;
+graph_log_marks[5]="1Gfor(21upto40 :,i/20)Gfor(21upto50 :,i/10)Gfor(26upto49 :,i/5)" ;
+graph_lin_marks="10,5,2" ; % start with 10 and go down; a final `,1' is appended
+graph_exp_marks="20,10,5,2,1" ;
+Ten_to0 = 1 ;
+Ten_to1 = 10 ;
+Ten_to2 = 100 ;
+Ten_to3 = 1000 ;
+Ten_to4 = 10000 ;
% Determine the X_ or Y_ bounds on the range to be covered by automatic grid
% marks. Suffix @# is X_ or Y_. The result is log or linear to specify the
@@ -851,6 +914,7 @@ Gemarks="20,10,5,2,1" ;
% are upper and lower bounds in
% `modified exponential form'. In modified exponential form, (x,y) means
% (x/1000)*10^y, where 1000<=abs x<10000.
+
vardef graph_bounds@# =
interim warningcheck :=0 ;
save l, h ;
@@ -859,28 +923,29 @@ vardef graph_bounds@# =
if abs @#graph_coordinate_type=log :
graph_modified_lower := graph_Meform(l)+graph_modified_bias ;
graph_modified_higher := graph_Meform(h)+graph_modified_bias ;
- if h-l >= Gminlog : log else : linear fi
+ if h-l >= graph_log_minimum : log else : linear fi
else :
graph_modified_lower := graph_Feform(l)+graph_modified_bias ;
graph_modified_higher := graph_Feform(h)+graph_modified_bias ;
linear
fi
enddef ;
+
pair graph_modified_bias ; graph_modified_bias=(0,3);
pair graph_modified_lower, graph_modified_higher ;
+% Scan graph_log_marks[k] and evaluate tokens t for each m where l<=m<=h.
-% Scan Glmarks[k] and evaluate tokens t for each m where l<=m<=h.
def graph_scan_marks(expr k, l, h)(text t) =
- for m=scantokens Glmarks[k] :
+ for m=scantokens graph_log_marks[k] :
exitif m>h ;
if m>=l : t fi
endfor
enddef ;
-
-% Scan Gmark[k] and evaluate tokens t for each m and e where m*10^e belongs
+% Scan graph_log_marks[k] and evaluate tokens t for each m and e where m*10^e belongs
% between l and h (inclusive), where both l and h are in modified exponent form.
+
def graph_scan_mark(expr k, l, h)(text t) =
for e=ypart l upto ypart h :
graph_scan_marks(k, if e>ypart l : 1 else : xpart l/1000 fi,
@@ -888,27 +953,29 @@ def graph_scan_mark(expr k, l, h)(text t) =
endfor
enddef ;
-
% Select a k for which graph_scan_mark(k,...) gives enough marks.
+
vardef graph_select_mark =
save k ;
k = 0 ;
forever :
- exitif unknown Glmarks[k+1] ;
- exitif 0 graph_scan_mark(incr k, graph_modified_lower, graph_modified_higher, +1) >= Gmarks ;
+ exitif unknown graph_log_marks[k+1] ;
+ exitif 0 graph_scan_mark(incr k, graph_modified_lower, graph_modified_higher, +1)
+ >= graph_minimum_number_of_marks ;
endfor
k
enddef ;
-
-% Try to select an exponent spacing from Gemarks. If successful, set @# and
+% Try to select an exponent spacing from graph_exp_marks. If successful, set @# and
% return true
+
vardef graph_select_exponent_mark@# =
numeric @# ;
- for e=scantokens Gemarks :
+ for e=scantokens graph_exp_marks :
@# = e ;
exitif floor(ypart graph_modified_higher/e) -
- floor(graph_modified_exponent_ypart(graph_modified_lower)/e) >= Gmarks ;
+ floor(graph_modified_exponent_ypart(graph_modified_lower)/e)
+ >= graph_minimum_number_of_marks ;
numeric @# ;
endfor
known @#
@@ -916,17 +983,17 @@ enddef ;
vardef graph_modified_exponent_ypart(expr p) = ypart p if xpart p=1000 : -1 fi enddef ;
-
% Compute the mark spacing d between xpart graph_modified_lower and xpart graph_modified_higher.
+
vardef graph_tick_mark_spacing =
interim warningcheck :=0 ;
save m, n, d ;
- m = Gmarks ;
+ m = graph_minimum_number_of_marks ;
n = 1 for i=1 upto
(mlog(xpart graph_modified_higher-xpart graph_modified_lower) - mlog m)/mlogten :
*10 endfor ;
if n<=1000 :
- for x=scantokens Gumarks :
+ for x=scantokens graph_lin_marks :
d = n*x ;
exitif 0 graph_generate_numbers(d,+1)>=m ;
numeric d ;
@@ -935,25 +1002,24 @@ vardef graph_tick_mark_spacing =
if known d : d else : n fi
enddef ;
-
def graph_generate_numbers(expr d)(text t) =
for m = d*ceiling(xpart graph_modified_lower/d) step d until xpart graph_modified_higher :
t
endfor
enddef ;
-
% Evaluate tokens t for exponents e in multiples of d in the range determined
% by graph_modified_lower and graph_modified_higher.
+
def graph_generate_exponents(expr d)(text t) =
for e = d*floor(graph_modified_exponent_ypart(graph_modified_lower)/d+1)
step d until d*floor(ypart graph_modified_higher/d) : t
endfor
enddef ;
-
% Adjust graph_modified_lower and graph_modified_higher so their exponent parts match
% and they are in true exponent form ((x,y) means x*10^y). Return the new exponent.
+
vardef graph_match_exponents =
interim warningcheck := 0 ;
save e ;
@@ -966,10 +1032,10 @@ vardef graph_match_exponents =
e
enddef ;
-
% Assume e is an integer and either m=0 or 1<=abs(m)<10000. Find m*(10^e)
% and represent the result as a string if its absolute value would be at least
% 4096 or less than .1. It is OK to return 0 as a string or a numeric.
+
vardef graph_factor_and_exponent_to_string(expr m, e) =
if (e>3)or(e<-4) :
decimal m & "e" & decimal e
@@ -984,7 +1050,6 @@ vardef graph_factor_and_exponent_to_string(expr m, e) =
fi
enddef ;
-
def auto suffix $ =
hide(def graph_comma= hide(def graph_comma=,enddef) enddef)
if graph_bounds.graph_suffix($)=log :
@@ -1002,7 +1067,6 @@ def auto suffix $ =
fi
enddef ;
-
string Autoform ; Autoform = "%g";
%vardef autogrid(suffix tx, ty) text w =
@@ -1053,12 +1117,11 @@ vardef autogrid(suffix tx, ty) text w =
fi
enddef ;
-
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% endgraph %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
def endgraph =
if graph_autogrid_needed : autogrid(otick.bot, otick.lft) ; fi
- if graph_frame_needed : frame ; fi
+ if graph_frame_needed : frame ; fi
setcoords(linear,linear) ;
interim truecorners :=1 ;
for b=bbox graph_finished_graph :
@@ -1075,14 +1138,9 @@ enddef ;
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+% We format in luatex (using \mathematics{}) ...
% we could pass via variables and save escaping as that is inefficient
-Ten_to0 = 1 ;
-Ten_to1 = 10 ;
-Ten_to2 = 100 ;
-Ten_to3 = 1000 ;
-Ten_to4 = 10000 ;
-
if unknown context_mlib :
vardef escaped_format(expr s) =
@@ -1108,6 +1166,8 @@ if unknown context_mlib :
fi ;
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
% A couple of extensions :
% Define a function plotsymbol() returning a picture : 10 different shapes,
diff --git a/metapost/context/base/mp-luas.mpiv b/metapost/context/base/mp-luas.mpiv
new file mode 100644
index 000000000..c919ba215
--- /dev/null
+++ b/metapost/context/base/mp-luas.mpiv
@@ -0,0 +1,99 @@
+%D \module
+%D [ file=mp-luas.mpiv,
+%D version=2014.04.14,
+%D title=\CONTEXT\ \METAPOST\ graphics,
+%D subtitle=\LUA,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+if known context_luas : endinput ; fi ;
+
+% When I prototyped the runscript primitive I was just thinking of a usage like
+% the original \directlua primitive in luatex: genererate something and pipe
+% that back to metapost, and have access to some internals. Instead of compiling
+% the code a the metapost end here we delegate that to the lua end. Only strings
+% get passed. Of course in the end the real usage got a bit beyong the intended
+% usage. So, in addition to some definitions here there are and will be use in
+% other metafun modules too. Of course in retrospect I should have done this five
+% years earlier.
+
+boolean context_luas ; context_luas := true ;
+
+% First variant:
+%
+% let lua = runscript ;
+%
+% Second variant:
+%
+% vardef lua (text t) =
+% runscript(for s = t : s & endfor "")
+% enddef;
+%
+% Third variant:
+%
+% vardef lua (text t) =
+% runscript("" for s = t :
+% if string s :
+% & s
+% elseif numeric s :
+% & decimal s
+% elseif boolean s :
+% & if s : "true" else "false" fi
+% fi endfor)
+% enddef;
+%
+% Fourth variant:
+
+vardef mlib_luas_luacall(text t) =
+ runscript("" for s = t :
+ if string s :
+ & s
+ elseif numeric s :
+ & decimal s
+ elseif boolean s :
+ & if s : "true" else "false" fi
+ fi endfor
+ )
+enddef ;
+
+vardef mlib_luas_lualist(expr c)(text t) =
+ save b ; boolean b ; b := false ;
+ runscript(c & "(" for s = t :
+ if b :
+ & ","
+ else :
+ hide(b := true)
+ fi
+ if string s :
+ & ditto & s & ditto
+ elseif numeric s :
+ & decimal s
+ elseif boolean s :
+ & if s : "true" else "false" fi
+ fi endfor & ")"
+ )
+enddef ;
+
+def luacall = mlib_luas_luacall enddef ; % why no let
+
+vardef lualist@#(text t) = mlib_luas_lualist(str @#)(t) enddef ;
+
+string mlib_luas_s ; % saves save/restore
+
+vardef lua@#(text t) =
+ mlib_luas_s := str @# ;
+ if length(mlib_luas_s) > 0 :
+ mlib_luas_lualist(mlib_luas_s,t)
+ else :
+ mlib_luas_luacall(t)
+ fi
+enddef ;
+
+vardef MP@#(text t) =
+ mlib_luas_lualist("MP." & str @#,t)
+enddef ;
diff --git a/metapost/context/base/mp-mlib.mpiv b/metapost/context/base/mp-mlib.mpiv
index 12840b28e..b19f47f1e 100644
--- a/metapost/context/base/mp-mlib.mpiv
+++ b/metapost/context/base/mp-mlib.mpiv
@@ -60,32 +60,70 @@ def namedcolor (expr n) =
withprescript "sp_name=" & n
enddef ;
+% def spotcolor(expr n, v) =
+% 1
+% withprescript "sp_type=spot"
+% withprescript "sp_name=" & n
+% withprescript "sp_value=" & (if numeric v : decimal v else : v fi)
+% enddef ;
+%
+% def multitonecolor(expr name, fractions, components, value) =
+% 1
+% withprescript "sp_type=multitone"
+% withprescript "sp_name=" & name
+% withprescript "sp_fractions=" & decimal fractions
+% withprescript "sp_components=" & components
+% withprescript "sp_value=" & value
+% enddef ;
+
def spotcolor(expr n, v) =
1
withprescript "sp_type=spot"
withprescript "sp_name=" & n
- withprescript "sp_value=" & v
+ withprescript "sp_value=" & colordecimals v
enddef ;
-def multitonecolor(expr name, fractions, components, value) =
+def multitonecolor(expr name)(text t) =
1
withprescript "sp_type=multitone"
- withprescript "sp_name=" & name
- withprescript "sp_fractions=" & decimal fractions
- withprescript "sp_components=" & components
- withprescript "sp_value=" & value
+ withprescript "sp_name=" & name
+ withprescript "sp_value=" & colordecimalslist(t)
enddef ;
-def transparent(expr alternative, transparency)(text c) =
+def transparent(expr a, t)(text c) = % use withtransparency instead
1 % this permits withcolor x intoshade y
- withprescript "tr_alternative=" & decimal transparency_alternative_to_number(alternative)
- withprescript "tr_transparency=" & decimal transparency
+ withprescript "tr_alternative=" & decimal transparency_alternative_to_number(a)
+ withprescript "tr_transparency=" & decimal t
withcolor c
enddef ;
-def withtransparency(expr alternative, transparency) =
- withprescript "tr_alternative=" & decimal transparency_alternative_to_number(alternative)
- withprescript "tr_transparency=" & decimal transparency
+% def withtransparency(expr a, t) =
+% withprescript "tr_alternative=" & decimal transparency_alternative_to_number(a)
+% withprescript "tr_transparency=" & decimal t
+% enddef ;
+
+let transparency = pair ;
+
+% def withtransparency expr t =
+% withprescript "tr_alternative=" & decimal transparency_alternative_to_number(xpart t)
+% withprescript "tr_transparency=" & decimal ypart t
+% enddef ;
+%
+% withtransparency (1,.5)
+% withtransparency ("normal",.5)
+
+def withtransparency (expr t) (text rest) =
+ if pair t :
+ withprescript "tr_alternative=" & decimal transparency_alternative_to_number(xpart t)
+ withprescript "tr_transparency=" & decimal ypart t
+ else :
+ mfun_with_transparency (transparency_alternative_to_number(t))
+ fi rest
+enddef ;
+
+def mfun_with_transparency (expr a) expr t =
+ withprescript "tr_alternative=" & decimal a
+ withprescript "tr_transparency=" & decimal t
enddef ;
def cmyk(expr c, m, y, k) = % provided for downward compability
@@ -96,7 +134,8 @@ enddef ;
newinternal textextoffset ; textextoffset := 0 ;
-numeric mfun_tt_w[], mfun_tt_h[], mfun_tt_d[] ; % we can consider using colors (less hash space)
+%%%%%%% mfun_tt_w[], mfun_tt_h[], mfun_tt_d[] ; % we can consider using colors (less hash space)
+color mfun_tt_b ;
numeric mfun_tt_n ; mfun_tt_n := 0 ;
picture mfun_tt_p ; mfun_tt_p := nullpicture ;
picture mfun_tt_o ; mfun_tt_o := nullpicture ;
@@ -135,44 +174,10 @@ extra_beginfig := extra_beginfig & "mfun_reset_tex_texts ;" ;
% flush twice: once in location in order to pick up e.g. color properties,
% and once at the end because we need to flush missing ones.
-% vardef rawtextext(expr str) =
-% if str = "" :
-% nullpicture
-% elseif mfun_trial_run :
-% mfun_tt_n := mfun_tt_n + 1 ;
-% mfun_tt_o := image(draw origin) ; % save drawoptions
-% addto mfun_tt_p doublepath unitsquare
-% withprescript "tx_number=" & decimal mfun_tt_n
-% withprescript "tx_stage=extra"
-% withpostscript str ;
-% image (
-% addto currentpicture doublepath unitsquare
-% withprescript "tx_number=" & decimal mfun_tt_n
-% withprescript "tx_stage=trial"
-% withprescript "tx_color=" & colordecimals colorpart mfun_tt_o
-% withpostscript str
-% ; )
-% else :
-% mfun_tt_n := mfun_tt_n + 1 ;
-% if known mfun_tt_d[mfun_tt_n] :
-% image (
-% addto currentpicture doublepath unitsquare
-% xscaled mfun_tt_w[mfun_tt_n]
-% yscaled (mfun_tt_h[mfun_tt_n] + mfun_tt_d[mfun_tt_n])
-% withprescript "tx_number=" & decimal mfun_tt_n
-% withprescript "tx_stage=final"
-% % withpostscript str ; % for tracing
-% ; ) shifted (0,-mfun_tt_d[mfun_tt_n])
-% else :
-% image (
-% addto currentpicture doublepath unitsquare
-% ; )
-% fi
-% fi
-% enddef ;
+% see mp-keep.mpiv for older code
-% vardef rawtextext(expr str) = % todo: avoid currentpicture
-% if str = "" :
+% vardef rawtextext(expr s) = % todo: avoid currentpicture
+% if s = "" :
% nullpicture
% else :
% mfun_tt_n := mfun_tt_n + 1 ;
@@ -180,15 +185,12 @@ extra_beginfig := extra_beginfig & "mfun_reset_tex_texts ;" ;
% if mfun_trial_run :
% mfun_tt_o := nullpicture ;
% addto mfun_tt_o doublepath origin _op_ ; % save drawoptions
-% addto mfun_tt_p doublepath unitsquare
-% withprescript "tx_number=" & decimal mfun_tt_n
-% withprescript "tx_stage=extra"
-% withpostscript str ;
% addto mfun_tt_c doublepath unitsquare
% withprescript "tx_number=" & decimal mfun_tt_n
% withprescript "tx_stage=trial"
% withprescript "tx_color=" & colordecimals colorpart mfun_tt_o
-% withpostscript str ;
+% withpostscript s ;
+% addto mfun_tt_p also mfun_tt_c ;
% elseif known mfun_tt_d[mfun_tt_n] :
% addto mfun_tt_c doublepath unitsquare
% xscaled mfun_tt_w[mfun_tt_n]
@@ -203,8 +205,8 @@ extra_beginfig := extra_beginfig & "mfun_reset_tex_texts ;" ;
% fi
% enddef ;
-vardef rawtextext(expr str) = % todo: avoid currentpicture
- if str = "" :
+vardef rawtextext(expr s) = % todo: avoid currentpicture
+ if s = "" :
nullpicture
else :
mfun_tt_n := mfun_tt_n + 1 ;
@@ -216,17 +218,16 @@ vardef rawtextext(expr str) = % todo: avoid currentpicture
withprescript "tx_number=" & decimal mfun_tt_n
withprescript "tx_stage=trial"
withprescript "tx_color=" & colordecimals colorpart mfun_tt_o
- withpostscript str ;
+ withpostscript s ;
addto mfun_tt_p also mfun_tt_c ;
- elseif known mfun_tt_d[mfun_tt_n] :
+ else :
+ mfun_tt_b := lua.mp.tt_dimensions(mfun_tt_n) ;
addto mfun_tt_c doublepath unitsquare
- xscaled mfun_tt_w[mfun_tt_n]
- yscaled (mfun_tt_h[mfun_tt_n] + mfun_tt_d[mfun_tt_n])
- shifted (0,-mfun_tt_d[mfun_tt_n])
+ xscaled redpart mfun_tt_b
+ yscaled (greenpart mfun_tt_b + bluepart mfun_tt_b)
+ shifted (0,- bluepart mfun_tt_b)
withprescript "tx_number=" & decimal mfun_tt_n
withprescript "tx_stage=final" ;
- else :
- addto mfun_tt_c doublepath unitsquare ; % unitpicture
fi ;
mfun_tt_c
fi
@@ -234,7 +235,10 @@ enddef ;
% More text
-defaultfont := "Mono" ; % was cmr10, could be lmmono10-regular, but is fed into context anyway
+defaultfont := "Mono" ;
+defaultscale := 1 ;
+
+extra_beginfig := extra_beginfig & "defaultscale:=1;" ;
vardef fontsize expr name =
save size ; numeric size ;
@@ -366,6 +370,8 @@ vardef thetextext@#(expr p,z) =
% interim labeloffset := textextoffset ;
if string p :
thetextext@#(rawtextext(p),z)
+ elseif numeric p :
+ thetextext@#(rawtextext(decimal p),z)
else :
p
if (mfun_labtype@# >= 10) :
@@ -401,19 +407,171 @@ enddef ;
let normalinfont = infont ;
-primarydef str infont name = % nasty hack
+primarydef s infont name = % nasty hack
if name = "" :
- textext(str)
+ textext(s)
else :
- textext("\definedfont[" & name & "]" & str)
+ textext("\definedfont[" & name & "]" & s)
fi
enddef ;
+% Helper
+
+string mfun_prescript_separator ; mfun_prescript_separator := char(13) ;
+
% Shades
-newinternal shadefactor ; shadefactor := 1 ;
-pair shadeoffset ; shadeoffset := origin ;
-boolean trace_shades ; trace_shades := false ;
+% for while we had this:
+
+newinternal shadefactor ; shadefactor := 1 ; % currently obsolete
+pair shadeoffset ; shadeoffset := origin ; % currently obsolete
+boolean trace_shades ; trace_shades := false ; % still there
+
+% def withlinearshading (expr a, b) =
+% withprescript "sh_type=linear"
+% withprescript "sh_domain=0 1"
+% withprescript "sh_factor=" & decimal shadefactor
+% withprescript "sh_center_a=" & ddecimal (a shifted shadeoffset)
+% withprescript "sh_center_b=" & ddecimal (b shifted shadeoffset)
+% enddef ;
+%
+% def withcircularshading (expr a, b, ra, rb) =
+% withprescript "sh_type=circular"
+% withprescript "sh_domain=0 1"
+% withprescript "sh_factor=" & decimal shadefactor
+% withprescript "sh_center_a=" & ddecimal (a shifted shadeoffset)
+% withprescript "sh_center_b=" & ddecimal (b shifted shadeoffset)
+% withprescript "sh_radius_a=" & decimal ra
+% withprescript "sh_radius_b=" & decimal rb
+% enddef ;
+%
+% def withshading (expr how)(text rest) =
+% if how = "linear" :
+% withlinearshading(rest)
+% elseif how = "circular" :
+% withcircularshading(rest)
+% else :
+% % nothing
+% fi
+% enddef ;
+%
+% def withfromshadecolor expr t =
+% withprescript "sh_color=into"
+% withprescript "sh_color_a=" & colordecimals t
+% enddef ;
+
+% def withtoshadecolor expr t =
+% withprescript "sh_color=into"
+% withprescript "sh_color_b=" & colordecimals t
+% enddef ;
+
+% but this is nicer
+
+path mfun_shade_path ;
+
+primarydef p withshademethod m =
+ hide(mfun_shade_path := p ;)
+ p
+ withprescript "sh_domain=0 1"
+ withprescript "sh_color=into"
+ withprescript "sh_color_a=" & colordecimals white
+ withprescript "sh_color_b=" & colordecimals black
+ if m = "linear" :
+ withprescript "sh_type=linear"
+ withprescript "sh_factor=1"
+ withprescript "sh_center_a=" & ddecimal llcorner p
+ withprescript "sh_center_b=" & ddecimal urcorner p
+ else :
+ withprescript "sh_type=circular"
+ withprescript "sh_factor=1.2"
+ withprescript "sh_center_a=" & ddecimal center p
+ withprescript "sh_center_b=" & ddecimal center p
+ withprescript "sh_radius_a=" & decimal 0
+ withprescript "sh_radius_b=" & decimal ( max (
+ (xpart center p - xpart llcorner p) ++ (ypart center p - ypart llcorner p),
+ (xpart center p - xpart ulcorner p) ++ (ypart ulcorner p - ypart center p),
+ (xpart lrcorner p - xpart center p) ++ (ypart center p - ypart lrcorner p),
+ (xpart urcorner p - xpart center p) ++ (ypart urcorner p - ypart center p)
+ ) )
+ fi
+enddef ;
+
+def withshadevector expr a =
+ withprescript "sh_center_a=" & ddecimal (point xpart a of mfun_shade_path)
+ withprescript "sh_center_b=" & ddecimal (point ypart a of mfun_shade_path)
+enddef ;
+
+def withshadecenter expr a =
+ withprescript "sh_center_a=" & ddecimal (
+ center mfun_shade_path shifted (
+ xpart a * bbwidth (mfun_shade_path)/2,
+ ypart a * bbheight(mfun_shade_path)/2
+ )
+ )
+enddef ;
+
+def withshadedomain expr d =
+ withprescript "sh_domain=" & ddecimal d
+enddef ;
+
+def withshadefactor expr f =
+ withprescript "sh_factor=" & decimal f
+enddef ;
+
+def withshadecolors (expr a, b) =
+ withprescript "sh_color=into"
+ withprescript "sh_color_a=" & colordecimals a
+ withprescript "sh_color_b=" & colordecimals b
+enddef ;
+
+primarydef a shadedinto b = % withcolor red shadedinto green
+ 1 % does not work with transparency
+ withprescript "sh_color=into"
+ withprescript "sh_color_a=" & colordecimals a
+ withprescript "sh_color_b=" & colordecimals b
+enddef ;
+
+primarydef p withshade sc =
+ p withprescript mfun_defined_cs_pre[sc]
+enddef ;
+
+def defineshade suffix s =
+ mfun_defineshade(str s)
+enddef ;
+
+def mfun_defineshade (expr s) text t =
+ expandafter def scantokens s = t enddef ;
+enddef ;
+
+def shaded text s =
+ s
+enddef ;
+
+% Old macros:
+
+def withcircularshade (expr a, b, ra, rb, ca, cb) =
+ withprescript "sh_type=circular"
+ withprescript "sh_domain=0 1"
+ withprescript "sh_factor=1"
+ withprescript "sh_color_a=" & colordecimals ca
+ withprescript "sh_color_b=" & colordecimals cb
+ withprescript "sh_center_a=" & ddecimal a % (a shifted shadeoffset)
+ withprescript "sh_center_b=" & ddecimal b % (b shifted shadeoffset)
+ withprescript "sh_radius_a=" & decimal ra
+ withprescript "sh_radius_b=" & decimal rb
+enddef ;
+
+def withlinearshade (expr a, b, ca, cb) =
+ withprescript "sh_type=linear"
+ withprescript "sh_domain=0 1"
+ withprescript "sh_factor=1"
+ withprescript "sh_color_a=" & colordecimals ca
+ withprescript "sh_color_b=" & colordecimals cb
+ withprescript "sh_center_a=" & ddecimal a % (a shifted shadeoffset)
+ withprescript "sh_center_b=" & ddecimal b % (b shifted shadeoffset)
+enddef ;
+
+% replaced (obsolete):
def set_linear_vector (suffix a,b)(expr p,n) =
if (n=1) : a := llcorner p ; b := urcorner p ;
@@ -428,7 +586,7 @@ def set_linear_vector (suffix a,b)(expr p,n) =
fi ;
enddef ;
-def set_circular_vector (suffix ab, r)(expr p,n) =
+def set_circular_vector (suffix ab,r)(expr p,n) =
if (n=1) : ab := llcorner p ;
elseif (n=2) : ab := lrcorner p ;
elseif (n=3) : ab := urcorner p ;
@@ -451,7 +609,7 @@ enddef ;
def linear_shade (expr p, n, ca, cb) =
begingroup ;
- save a, b, sh ; pair a, b ;
+ save a, b ; pair a, b ;
set_linear_vector(a,b)(p,n) ;
fill p withlinearshade(a,b,ca,cb) ;
if trace_shades :
@@ -460,43 +618,19 @@ def linear_shade (expr p, n, ca, cb) =
endgroup ;
enddef ;
-def withcircularshade (expr a, b, ra, rb, ca, cb) =
- withprescript "sh_type=circular"
- withprescript "sh_domain=0 1"
- withprescript "sh_factor=" & decimal shadefactor
- withprescript "sh_color_a=" & colordecimals ca
- withprescript "sh_color_b=" & colordecimals cb
- withprescript "sh_center_a=" & ddecimal (a shifted shadeoffset)
- withprescript "sh_center_b=" & ddecimal (b shifted shadeoffset)
- withprescript "sh_radius_a=" & decimal ra
- withprescript "sh_radius_b=" & decimal rb
-enddef ;
-
-def withlinearshade (expr a, b, ca, cb) =
- withprescript "sh_type=linear"
- withprescript "sh_domain=0 1"
- withprescript "sh_factor=" & decimal shadefactor
- withprescript "sh_color_a=" & colordecimals ca
- withprescript "sh_color_b=" & colordecimals cb
- withprescript "sh_center_a=" & ddecimal (a shifted shadeoffset)
- withprescript "sh_center_b=" & ddecimal (b shifted shadeoffset)
-enddef ;
-
string mfun_defined_cs_pre[] ; numeric mfun_defined_cs ; mfun_defined_cs := 0 ;
-string mfun_prescript_separator ; mfun_prescript_separator := char(13) ;
-
vardef define_circular_shade (expr a, b, ra, rb, ca, cb) =
mfun_defined_cs := mfun_defined_cs + 1 ;
mfun_defined_cs_pre[mfun_defined_cs] := "sh_type=circular"
- & mfun_prescript_separator & "sh_domain=0 1"
- & mfun_prescript_separator & "sh_factor=" & decimal shadefactor
- & mfun_prescript_separator & "sh_color_a=" & colordecimals ca
- & mfun_prescript_separator & "sh_color_b=" & colordecimals cb
- & mfun_prescript_separator & "sh_center_a=" & ddecimal (a shifted shadeoffset)
- & mfun_prescript_separator & "sh_center_b=" & ddecimal (b shifted shadeoffset)
- & mfun_prescript_separator & "sh_radius_a=" & decimal ra
- & mfun_prescript_separator & "sh_radius_b=" & decimal rb
+ & mfun_prescript_separator & "sh_domain=0 1"
+ & mfun_prescript_separator & "sh_factor=1"
+ & mfun_prescript_separator & "sh_color_a=" & colordecimals ca
+ & mfun_prescript_separator & "sh_color_b=" & colordecimals cb
+ & mfun_prescript_separator & "sh_center_a=" & ddecimal a % (a shifted shadeoffset)
+ & mfun_prescript_separator & "sh_center_b=" & ddecimal b % (b shifted shadeoffset)
+ & mfun_prescript_separator & "sh_radius_a=" & decimal ra
+ & mfun_prescript_separator & "sh_radius_b=" & decimal rb
;
mfun_defined_cs
enddef ;
@@ -504,52 +638,49 @@ enddef ;
vardef define_linear_shade (expr a, b, ca, cb) =
mfun_defined_cs := mfun_defined_cs + 1 ;
mfun_defined_cs_pre[mfun_defined_cs] := "sh_type=linear"
- & mfun_prescript_separator & "sh_domain=0 1"
- & mfun_prescript_separator & "sh_factor=" & decimal shadefactor
- & mfun_prescript_separator & "sh_color_a=" & colordecimals ca
- & mfun_prescript_separator & "sh_color_b=" & colordecimals cb
- & mfun_prescript_separator & "sh_center_a=" & ddecimal (a shifted shadeoffset)
- & mfun_prescript_separator & "sh_center_b=" & ddecimal (b shifted shadeoffset)
+ & mfun_prescript_separator & "sh_domain=0 1"
+ & mfun_prescript_separator & "sh_factor=1"
+ & mfun_prescript_separator & "sh_color_a=" & colordecimals ca
+ & mfun_prescript_separator & "sh_color_b=" & colordecimals cb
+ & mfun_prescript_separator & "sh_center_a=" & ddecimal a % (a shifted shadeoffset)
+ & mfun_prescript_separator & "sh_center_b=" & ddecimal b % (b shifted shadeoffset)
;
mfun_defined_cs
enddef ;
-primarydef p withshade sc =
- p withprescript mfun_defined_cs_pre[sc]
-enddef ;
-
-
-vardef define_sampled_linear_shade(expr a,b,n)(text t) =
- mfun_defined_cs := mfun_defined_cs + 1 ;
- mfun_defined_cs_pre[mfun_defined_cs] := "ssh_type=linear"
- & mfun_prescript_separator & "ssh_center_a=" & ddecimal (a shifted shadeoffset)
- & mfun_prescript_separator & "ssh_center_b=" & ddecimal (b shifted shadeoffset)
- & mfun_prescript_separator & "ssh_nofcolors=" & decimal n
- & mfun_prescript_separator & "ssh_domain=" & domstr
- & mfun_prescript_separator & "ssh_extend=" & extstr
- & mfun_prescript_separator & "ssh_colors=" & colstr
- & mfun_prescript_separator & "ssh_bounds=" & bndstr
- & mfun_prescript_separator & "ssh_ranges=" & ranstr
- ;
- mfun_defined_cs
-enddef ;
-
-vardef define_sampled_circular_shade(expr a,b,ra,rb,n)(text t) =
- mfun_defined_cs := mfun_defined_cs + 1 ;
- mfun_defined_cs_pre[mfun_defined_cs] := "ssh_type=circular"
- & mfun_prescript_separator & "ssh_center_a=" & ddecimal (a shifted shadeoffset)
- & mfun_prescript_separator & "ssh_radius_a=" & decimal ra
- & mfun_prescript_separator & "ssh_center_b=" & ddecimal (b shifted shadeoffset)
- & mfun_prescript_separator & "ssh_radius_b=" & decimal rb
- & mfun_prescript_separator & "ssh_nofcolors=" & decimal n
- & mfun_prescript_separator & "ssh_domain=" & domstr
- & mfun_prescript_separator & "ssh_extend=" & extstr
- & mfun_prescript_separator & "ssh_colors=" & colstr
- & mfun_prescript_separator & "ssh_bounds=" & bndstr
- & mfun_prescript_separator & "ssh_ranges=" & ranstr
- ;
- mfun_defined_cs
-enddef ;
+% I lost the example code that uses this:
+%
+% vardef define_sampled_linear_shade(expr a,b,n)(text t) =
+% mfun_defined_cs := mfun_defined_cs + 1 ;
+% mfun_defined_cs_pre[mfun_defined_cs] := "ssh_type=linear"
+% & mfun_prescript_separator & "ssh_center_a=" & ddecimal (a shifted shadeoffset)
+% & mfun_prescript_separator & "ssh_center_b=" & ddecimal (b shifted shadeoffset)
+% & mfun_prescript_separator & "ssh_nofcolors=" & decimal n
+% & mfun_prescript_separator & "ssh_domain=" & domstr
+% & mfun_prescript_separator & "ssh_extend=" & extstr
+% & mfun_prescript_separator & "ssh_colors=" & colstr
+% & mfun_prescript_separator & "ssh_bounds=" & bndstr
+% & mfun_prescript_separator & "ssh_ranges=" & ranstr
+% ;
+% mfun_defined_cs
+% enddef ;
+%
+% vardef define_sampled_circular_shade(expr a,b,ra,rb,n)(text t) =
+% mfun_defined_cs := mfun_defined_cs + 1 ;
+% mfun_defined_cs_pre[mfun_defined_cs] := "ssh_type=circular"
+% & mfun_prescript_separator & "ssh_center_a=" & ddecimal (a shifted shadeoffset)
+% & mfun_prescript_separator & "ssh_radius_a=" & decimal ra
+% & mfun_prescript_separator & "ssh_center_b=" & ddecimal (b shifted shadeoffset)
+% & mfun_prescript_separator & "ssh_radius_b=" & decimal rb
+% & mfun_prescript_separator & "ssh_nofcolors=" & decimal n
+% & mfun_prescript_separator & "ssh_domain=" & domstr
+% & mfun_prescript_separator & "ssh_extend=" & extstr
+% & mfun_prescript_separator & "ssh_colors=" & colstr
+% & mfun_prescript_separator & "ssh_bounds=" & bndstr
+% & mfun_prescript_separator & "ssh_ranges=" & ranstr
+% ;
+% mfun_defined_cs
+% enddef ;
% vardef predefined_linear_shade (expr p, n, ca, cb) =
% save a, b, sh ; pair a, b ;
@@ -564,53 +695,6 @@ enddef ;
% define_circular_shade(ab,ab,0,r,ca,cb)
% enddef ;
-% NEW EXPERIMENTAL CODE
-
-def withlinearshading (expr a, b) =
- withprescript "sh_type=linear"
- withprescript "sh_domain=0 1"
- withprescript "sh_factor=" & decimal shadefactor
- withprescript "sh_center_a=" & ddecimal (a shifted shadeoffset)
- withprescript "sh_center_b=" & ddecimal (b shifted shadeoffset)
-enddef ;
-
-def withcircularshading (expr a, b, ra, rb) =
- withprescript "sh_type=circular"
- withprescript "sh_domain=0 1"
- withprescript "sh_factor=" & decimal shadefactor
- withprescript "sh_center_a=" & ddecimal (a shifted shadeoffset)
- withprescript "sh_center_b=" & ddecimal (b shifted shadeoffset)
- withprescript "sh_radius_a=" & decimal ra
- withprescript "sh_radius_b=" & decimal rb
-enddef ;
-
-def withfromshadecolor expr t =
- withprescript "sh_color=into"
- withprescript "sh_color_a=" & colordecimals t
-enddef ;
-
-def withtoshadecolor expr t =
- withprescript "sh_color=into"
- withprescript "sh_color_b=" & colordecimals t
-enddef ;
-
-def withshading (expr how)(text rest) =
- if how = "linear" :
- withlinearshading(rest)
- elseif how = "circular" :
- withcircularshading(rest)
- else :
- % nothing
- fi
-enddef ;
-
-primarydef a shadedinto b =
- 1 % does not work with transparency
- withprescript "sh_color=into"
- withprescript "sh_color_a=" & colordecimals a
- withprescript "sh_color_b=" & colordecimals b
-enddef ;
-
% Layers
def onlayer primary name =
@@ -655,10 +739,10 @@ enddef ;
% Positions
-def register (expr label, width, height, offset) =
+def register (expr tag, width, height, offset) =
% draw image (
addto currentpicture doublepath unitsquare xscaled width yscaled height shifted offset
- withprescript "ps_label=" & label ;
+ withprescript "ps_label=" & tag ;
% ) ; % no transformations
enddef ;
@@ -670,8 +754,8 @@ extra_endfig := extra_endfig & "mfun_reset_tex_texts ; " ;
% Bonus
-vardef verbatim(expr str) =
- ditto & "\detokenize{" & str & "}" & ditto
+vardef verbatim(expr s) =
+ ditto & "\detokenize{" & s & "}" & ditto
enddef ;
% New
@@ -747,7 +831,7 @@ primarydef t asgroup s = % s = isolated|knockout
endgroup
enddef ;
-% Also experimental
+% Also experimental ... needs to be made better ... so it can change!
string mfun_auto_align[] ;
@@ -781,6 +865,9 @@ enddef ;
% passvariable("boolean",false) ;
% passvariable("path",fullcircle scaled 1cm) ;
+% we could use the new lua interface but there is not that much gain i.e.
+% we still need to serialize
+
vardef mfun_point_to_string(expr p,i) =
decimal xpart (point i of p) & " " &
decimal ypart (point i of p) & " " &
@@ -821,6 +908,10 @@ vardef mfun_cmykcolor_to_string(expr c) =
decimal blackpart c
enddef ;
+vardef mfun_greycolor_to_string(expr n) =
+ decimal n
+enddef ;
+
vardef mfun_path_to_string(expr p) =
mfun_point_to_string(p,0) for i=1 upto length(p) : & " " & mfun_point_to_string(p,i) endfor
enddef ;
@@ -847,6 +938,7 @@ vardef tostring(expr value) =
elseif pair value : mfun_pair_to_string(value)
elseif rgbcolor value : mfun_rgbcolor_to_string(value)
elseif cmykcolor value : mfun_cmykcolor_to_string(value)
+ elseif greycolor value : mfun_greycolor_to_string(value)
elseif boolean value : mfun_boolean_to_string(value)
elseif path value : mfun_path_to_string(value)
elseif transform value : mfun_transform_to_string(value)
@@ -1042,23 +1134,80 @@ enddef ;
% moved here from mp-grap.mpiv
+% vardef escaped_format(expr s) =
+% "" for n=0 upto length(s) : &
+% if ASCII substring (n,n+1) of s = 37 :
+% "@"
+% else :
+% substring (n,n+1) of s
+% fi
+% endfor
+% enddef ;
+
+numeric mfun_esc_b ; % begin
+numeric mfun_esc_l ; % length
+string mfun_esc_s ; % character
+
+mfun_esc_s := "%" ; % or: char(37)
+
+% this one is the fastest when we have a match
+
+% vardef escaped_format(expr s) =
+% "" for n=0 upto length(s)-1 : &
+% % if ASCII substring (n,n+1) of s = 37 :
+% if substring (n,n+1) of s = mfun_esc_s :
+% "@"
+% else :
+% substring (n,n+1) of s
+% fi
+% endfor
+% enddef ;
+
+% this one wins when we have no match
+
vardef escaped_format(expr s) =
- "" for n=0 upto length(s) : &
- if ASCII substring (n,n+1) of s = 37 :
- "@"
- else :
- substring (n,n+1) of s
+ mfun_esc_b := 0 ;
+ mfun_esc_l := length(s) ;
+ for n=0 upto mfun_esc_l-1 :
+ % if ASCII substring (n,n+1) of s = 37 :
+ if substring (n,n+1) of s = mfun_esc_s :
+ if mfun_esc_b = 0 :
+ ""
+ fi
+ if n >= mfun_esc_b :
+ & (substring (mfun_esc_b,n) of s)
+ exitif numeric begingroup mfun_esc_b := n+1 endgroup ; % hide
+ fi
+ & "@"
fi
endfor
+ if mfun_esc_b = 0 :
+ s
+ % elseif mfun_esc_b > 0 :
+ elseif mfun_esc_b < mfun_esc_l :
+ & (substring (mfun_esc_b,mfun_esc_l) of s)
+ fi
enddef ;
-vardef strfmt(expr f, x) = % maybe use mfun_ namespace
- "\MPgraphformat{" & escaped_format(f) & "}{" & mfun_tagged_string(x) & "}"
-enddef ;
-
-vardef varfmt(expr f, x) = % maybe use mfun_ namespace
- "\MPformatted{" & escaped_format(f) & "}{" & mfun_tagged_string(x) & "}"
-enddef ;
+vardef strfmt(expr f, x) = "\MPgraphformat{" & escaped_format(f) & "}{" & mfun_tagged_string(x) & "}" enddef ;
+vardef varfmt(expr f, x) = "\MPformatted{" & escaped_format(f) & "}{" & mfun_tagged_string(x) & "}" enddef ;
vardef format (expr f, x) = textext(strfmt(f, x)) enddef ;
vardef formatted(expr f, x) = textext(varfmt(f, x)) enddef ;
+
+% could be this (something to discuss with alan as it involves graph):
+%
+% vardef format (expr f,x) = lua.mp.graphformat(f,mfun_tagged_string(x) enddef ;
+% vardef formatted(expr f,x) = lua.mp.format (f, x) enddef ;
+%
+% def strfmt = format enddef ; % old
+% def varfmt = formatted enddef ; % old
+
+% new
+
+def eofill text t = fill t withpostscript "evenodd" enddef ;
+%%% eoclip text t = clip t withpostscript "evenodd" enddef ; % no postscripts yet
+
+% def withrule expr r =
+% if (t = "even-odd") or (t = "evenodd") : withpostscript "evenodd" fi
+% enddef ;
diff --git a/metapost/context/base/mp-page.mpiv b/metapost/context/base/mp-page.mpiv
index 9c538d42a..a6fa3fba3 100644
--- a/metapost/context/base/mp-page.mpiv
+++ b/metapost/context/base/mp-page.mpiv
@@ -11,73 +11,309 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D This module is rather preliminary and subjected to
-%D changes.
+%D This module is rather preliminary and subjected to changes.
if known context_page : endinput ; fi ;
boolean context_page ; context_page := true ;
+% def LoadPageState =
+% % now always set
+% enddef ;
+%
+% if unknown PageStateAvailable :
+% boolean PageStateAvailable ;
+% PageStateAvailable := false ;
+% fi ;
+%
+% if unknown OnRightPage :
+% boolean OnRightPage ;
+% OnRightPage := true ;
+% fi ;
+%
+% if unknown OnOddPage :
+% boolean OnOddPage ;
+% OnOddPage := true ;
+% fi ;
+%
+% if unknown InPageBody :
+% boolean InPageBody ;
+% InPageBody := false ;
+% fi ;
+%
+% string CurrentLayout ;
+%
+% CurrentLayout := "default" ;
+%
+% PageNumber := 0 ;
+% PaperHeight := 845.04684pt ;
+% PaperWidth := 597.50787pt ;
+% PrintPaperHeight := 845.04684pt ;
+% PrintPaperWidth := 597.50787pt ;
+% TopSpace := 71.12546pt ;
+% BottomSpace := 0.0pt ;
+% BackSpace := 71.13275pt ;
+% CutSpace := 0.0pt ;
+% MakeupHeight := 711.3191pt ;
+% MakeupWidth := 426.78743pt ;
+% TopHeight := 0.0pt ;
+% TopDistance := 0.0pt ;
+% HeaderHeight := 56.90294pt ;
+% HeaderDistance := 0.0pt ;
+% TextHeight := 597.51323pt ;
+% FooterDistance := 0.0pt ;
+% FooterHeight := 56.90294pt ;
+% BottomDistance := 0.0pt ;
+% BottomHeight := 0.0pt ;
+% LeftEdgeWidth := 0.0pt ;
+% LeftEdgeDistance := 0.0pt ;
+% LeftMarginWidth := 75.58197pt ;
+% LeftMarginDistance := 11.99829pt ;
+% TextWidth := 426.78743pt ;
+% RightMarginDistance := 11.99829pt ;
+% RightMarginWidth := 75.58197pt ;
+% RightEdgeDistance := 0.0pt ;
+% RightEdgeWidth := 0.0pt ;
+%
+% PageOffset := 0.0pt ;
+% PageDepth := 0.0pt ;
+%
+% LayoutColumns := 0 ;
+% LayoutColumnDistance:= 0.0pt ;
+% LayoutColumnWidth := 0.0pt ;
+%
+% LeftEdge := -4 ; Top := -40 ;
+% LeftEdgeSeparator := -3 ; TopSeparator := -30 ;
+% LeftMargin := -2 ; Header := -20 ;
+% LeftMarginSeparator := -1 ; HeaderSeparator := -10 ;
+% Text := 0 ; Text := 0 ;
+% RightMarginSeparator := +1 ; FooterSeparator := +10 ;
+% RightMargin := +2 ; Footer := +20 ;
+% RightEdgeSeparator := +3 ; BottomSeparator := +30 ;
+% RightEdge := +4 ; Bottom := +40 ;
+%
+% Margin := LeftMargin ; % obsolete
+% Edge := LeftEdge ; % obsolete
+% InnerMargin := RightMargin ; % obsolete
+% InnerEdge := RightEdge ; % obsolete
+% OuterMargin := LeftMargin ; % obsolete
+% OuterEdge := LeftEdge ; % obsolete
+%
+% InnerMarginWidth := 0pt ;
+% OuterMarginWidth := 0pt ;
+% InnerMarginDistance := 0pt ;
+% OuterMarginDistance := 0pt ;
+%
+% InnerEdgeWidth := 0pt ;
+% OuterEdgeWidth := 0pt ;
+% InnerEdgeDistance := 0pt ;
+% OuterEdgeDistance := 0pt ;
+%
+% % path Area[][] ;
+% % pair Location[][] ;
+% % path Field[][] ;
+%
+% % numeric Hstep[] ;
+% % numeric Hsize[] ;
+% % numeric Vstep[] ;
+% % numeric Vsize[] ;
+%
+% path Page ;
+%
+% numeric HorPos ;
+% numeric VerPos ;
+%
+% % for VerPos=Top step 10 until Bottom:
+% % for HorPos=LeftEdge step 1 until RightEdge:
+% % Area[HorPos][VerPos] := origin--cycle ;
+% % Area[VerPos][HorPos] := Area[HorPos][VerPos] ;
+% % Location[HorPos][VerPos] := origin ;
+% % Location[VerPos][HorPos] := Location[HorPos][VerPos] ;
+% % Field[HorPos][VerPos] := origin--cycle ;
+% % Field[VerPos][HorPos] := Field[HorPos][VerPos] ;
+% % endfor ;
+% % endfor ;
+%
+% % def LoadPageState =
+% % scantokens "input mp-state.tmp" ;
+% % enddef ;
+%
+% numeric mfun_temp ;
+%
+% def SwapPageState =
+% if not OnRightPage :
+% BackSpace := PaperWidth-MakeupWidth-BackSpace ;
+% CutSpace := PaperWidth-MakeupWidth-CutSpace ;
+% mfun_temp := LeftMarginWidth ;
+% LeftMarginWidth := RightMarginWidth ;
+% RightMarginWidth := mfun_temp ;
+% mfun_temp := LeftMarginDistance ;
+% LeftMarginDistance := RightMarginDistance ;
+% RightMarginDistance := mfun_temp ;
+% mfun_temp := LeftEdgeWidth ;
+% LeftEdgeWidth := RightEdgeWidth ;
+% RightEdgeWidth := mfun_temp ;
+% mfun_temp := LeftEdgeDistance ;
+% LeftEdgeDistance := RightEdgeDistance ;
+% RightEdgeDistance := mfun_temp ;
+%
+% % these are now available as ..Width and ..Distance
+%
+% Margin := LeftMargin ;
+% Edge := LeftEdge ;
+% InnerMargin := RightMargin ;
+% InnerEdge := RightEdge ;
+% OuterMargin := LeftMargin ;
+% OuterEdge := LeftEdge ;
+% else :
+% Margin := RightMargin ;
+% Edge := RightEdge ;
+% InnerMargin := LeftMargin ;
+% InnerEdge := LeftEdge ;
+% OuterMargin := RightMargin ;
+% OuterEdge := RightEdge ;
+% fi ;
+% enddef ;
+
+% the new way:
+
+def LoadPageState =
+ % now always set
+enddef ;
+
if unknown PageStateAvailable :
boolean PageStateAvailable ;
PageStateAvailable := false ;
fi ;
-if unknown OnRightPage :
- boolean OnRightPage ;
- OnRightPage := true ;
-fi ;
+string CurrentLayout ; CurrentLayout := "default" ;
+
+vardef PaperHeight = lua.mp.PaperHeight () enddef ;
+vardef PaperWidth = lua.mp.PaperWidth () enddef ;
+vardef PrintPaperHeight = lua.mp.PrintPaperHeight () enddef ;
+vardef PrintPaperWidth = lua.mp.PrintPaperWidth () enddef ;
+vardef TopSpace = lua.mp.TopSpace () enddef ;
+vardef BottomSpace = lua.mp.BottomSpace () enddef ;
+vardef BackSpace = lua.mp.BackSpace () enddef ;
+vardef CutSpace = lua.mp.CutSpace () enddef ;
+vardef MakeupHeight = lua.mp.MakeupHeight () enddef ;
+vardef MakeupWidth = lua.mp.MakeupWidth () enddef ;
+vardef TopHeight = lua.mp.TopHeight () enddef ;
+vardef TopDistance = lua.mp.TopDistance () enddef ;
+vardef HeaderHeight = lua.mp.HeaderHeight () enddef ;
+vardef HeaderDistance = lua.mp.HeaderDistance () enddef ;
+vardef TextHeight = lua.mp.TextHeight () enddef ;
+vardef FooterDistance = lua.mp.FooterDistance () enddef ;
+vardef FooterHeight = lua.mp.FooterHeight () enddef ;
+vardef BottomDistance = lua.mp.BottomDistance () enddef ;
+vardef BottomHeight = lua.mp.BottomHeight () enddef ;
+vardef LeftEdgeWidth = lua.mp.LeftEdgeWidth () enddef ;
+vardef LeftEdgeDistance = lua.mp.LeftEdgeDistance () enddef ;
+vardef LeftMarginWidth = lua.mp.LeftMarginWidth () enddef ;
+vardef LeftMarginDistance = lua.mp.LeftMarginDistance () enddef ;
+vardef TextWidth = lua.mp.TextWidth () enddef ;
+vardef RightMarginDistance = lua.mp.RightMarginDistance () enddef ;
+vardef RightMarginWidth = lua.mp.RightMarginWidth () enddef ;
+vardef RightEdgeDistance = lua.mp.RightEdgeDistance () enddef ;
+vardef RightEdgeWidth = lua.mp.RightEdgeWidth () enddef ;
+vardef InnerMarginDistance = lua.mp.InnerMarginDistance () enddef ;
+vardef InnerMarginWidth = lua.mp.InnerMarginWidth () enddef ;
+vardef OuterMarginDistance = lua.mp.OuterMarginDistance () enddef ;
+vardef OuterMarginWidth = lua.mp.OuterMarginWidth () enddef ;
+vardef InnerEdgeDistance = lua.mp.InnerEdgeDistance () enddef ;
+vardef InnerEdgeWidth = lua.mp.InnerEdgeWidth () enddef ;
+vardef OuterEdgeDistance = lua.mp.OuterEdgeDistance () enddef ;
+vardef OuterEdgeWidth = lua.mp.OuterEdgeWidth () enddef ;
+vardef PageOffset = lua.mp.PageOffset () enddef ;
+vardef PageDepth = lua.mp.PageDepth () enddef ;
+vardef LayoutColumns = lua.mp.LayoutColumns () enddef ;
+vardef LayoutColumnDistance = lua.mp.LayoutColumnDistance() enddef ;
+vardef LayoutColumnWidth = lua.mp.LayoutColumnWidth () enddef ;
+
+vardef OnRightPage = lua.mp.OnRightPage () enddef ;
+vardef OnOddPage = lua.mp.OnOddPage () enddef ;
+vardef InPageBody = lua.mp.InPageBody () enddef ;
+
+vardef RealPageNumber = lua.mp.RealPageNumber () enddef ;
+vardef PageNumber = lua.mp.PageNumber () enddef ;
+vardef NOfPages = lua.mp.NOfPages () enddef ;
+vardef LastPageNumber = lua.mp.LastPageNumber () enddef ; % duplicates
+
+vardef CurrentColumn = lua.mp.CurrentColumn () enddef ;
+vardef NOfColumns = lua.mp.NOfColumns () enddef ;
+
+vardef BaseLineSkip = lua.mp.BaseLineSkip () enddef ;
+vardef LineHeight = lua.mp.LineHeight () enddef ;
+vardef BodyFontSize = lua.mp.BodyFontSize () enddef ;
+
+vardef TopSkip = lua.mp.TopSkip () enddef ;
+vardef StrutHeight = lua.mp.StrutHeight () enddef ;
+vardef StrutDepth = lua.mp.StrutDepth () enddef ;
+
+vardef CurrentWidth = lua.mp.CurrentWidth () enddef ;
+vardef CurrentHeight = lua.mp.CurrentHeight () enddef ;
+
+vardef HSize = lua.mp.HSize () enddef ; % duplicates
+vardef VSize = lua.mp.VSize () enddef ; % duplicates
+
+vardef EmWidth = lua.mp.EmWidth () enddef ;
+vardef ExHeight = lua.mp.ExHeight () enddef ;
+
+vardef PageFraction = lua.mp.PageFraction () enddef ;
+
+vardef SpineWidth = lua.mp.SpineWidth () enddef ;
+vardef PaperBleed = lua.mp.PaperBleed () enddef ;
+
+boolean mfun_swapped ;
-if unknown OnOddPage :
- boolean OnOddPage ;
- OnOddPage := true ;
-fi ;
+def SwapPageState =
+ mfun_swapped := true ; % eventually this will go !
+enddef ;
-if unknown InPageBody :
- boolean InPageBody ;
- InPageBody := false ;
-fi ;
+extra_beginfig := extra_beginfig & "mfun_swapped := false ;" ;
+
+vardef LeftMarginWidth = if mfun_swapped and not OnRightPage : lua.mp.RightMarginWidth () else : lua.mp.LeftMarginWidth () fi enddef ;
+vardef RightMarginWidth = if mfun_swapped and not OnRightPage : lua.mp.LeftMarginWidth () else : lua.mp.RightMarginWidth () fi enddef ;
+vardef LeftMarginDistance = if mfun_swapped and not OnRightPage : lua.mp.RightMarginDistance() else : lua.mp.LeftMarginDistance () fi enddef ;
+vardef RightMarginDistance = if mfun_swapped and not OnRightPage : lua.mp.LeftMarginDistance () else : lua.mp.RightMarginDistance() fi enddef ;
+
+vardef LeftEdgeWidth = if mfun_swapped and not OnRightPage : lua.mp.RightEdgeWidth () else : lua.mp.LeftEdgeWidth () fi enddef ;
+vardef RightEdgeWidth = if mfun_swapped and not OnRightPage : lua.mp.LeftEdgeWidth () else : lua.mp.RightEdgeWidth () fi enddef ;
+vardef LeftEdgeDistance = if mfun_swapped and not OnRightPage : lua.mp.RightEdgeDistance () else : lua.mp.LeftEdgeDistance () fi enddef ;
+vardef RightEdgeDistance = if mfun_swapped and not OnRightPage : lua.mp.LeftEdgeDistance () else : lua.mp.RightEdgeDistance () fi enddef ;
+
+vardef BackSpace = if mfun_swapped and not OnRightPage : PaperWidth - MakeupWidth - fi lua.mp.BackSpace() enddef ;
+vardef CutSpace = if mfun_swapped and not OnRightPage : PaperWidth - MakeupWidth - fi lua.mp.CutSpace () enddef ;
+
+% better use:
+
+vardef OuterMarginWidth = if not OnRightPage : lua.mp.LeftMarginWidth () else : lua.mp.RightMarginWidth () fi enddef ;
+vardef InnerMarginWidth = if not OnRightPage : lua.mp.RightMarginWidth () else : lua.mp.LeftMarginWidth () fi enddef ;
+vardef OuterMarginDistance = if not OnRightPage : lua.mp.LeftMarginDistance () else : lua.mp.RightMarginDistance() fi enddef ;
+vardef InnerMarginDistance = if not OnRightPage : lua.mp.RightMarginDistance() else : lua.mp.leftMarginDistance () fi enddef ;
+
+vardef OuterEdgeWidth = if not OnRightPage : lua.mp.LeftEdgeWidth () else : lua.mp.RightEdgeWidth () fi enddef ;
+vardef InnerEdgeWidth = if not OnRightPage : lua.mp.RightEdgeWidth () else : lua.mp.LeftEdgeWidth () fi enddef ;
+vardef OuterEdgeDistance = if not OnRightPage : lua.mp.LeftEdgeDistance () else : lua.mp.RightEdgeDistance () fi enddef ;
+vardef InnerEdgeDistance = if not OnRightPage : lua.mp.RightEdgeDistance () else : lua.mp.leftEdgeDistance () fi enddef ;
-string CurrentLayout ;
-
-CurrentLayout := "default" ;
-
-PageNumber := 0 ;
-PaperHeight := 845.04684pt ;
-PaperWidth := 597.50787pt ;
-PrintPaperHeight := 845.04684pt ;
-PrintPaperWidth := 597.50787pt ;
-TopSpace := 71.12546pt ;
-BottomSpace := 0.0pt ;
-BackSpace := 71.13275pt ;
-CutSpace := 0.0pt ;
-MakeupHeight := 711.3191pt ;
-MakeupWidth := 426.78743pt ;
-TopHeight := 0.0pt ;
-TopDistance := 0.0pt ;
-HeaderHeight := 56.90294pt ;
-HeaderDistance := 0.0pt ;
-TextHeight := 597.51323pt ;
-FooterDistance := 0.0pt ;
-FooterHeight := 56.90294pt ;
-BottomDistance := 0.0pt ;
-BottomHeight := 0.0pt ;
-LeftEdgeWidth := 0.0pt ;
-LeftEdgeDistance := 0.0pt ;
-LeftMarginWidth := 75.58197pt ;
-LeftMarginDistance := 11.99829pt ;
-TextWidth := 426.78743pt ;
-RightMarginDistance := 11.99829pt ;
-RightMarginWidth := 75.58197pt ;
-RightEdgeDistance := 0.0pt ;
-RightEdgeWidth := 0.0pt ;
-
-PageOffset := 0.0pt ;
-PageDepth := 0.0pt ;
-
-LayoutColumns := 0 ;
-LayoutColumnDistance:= 0.0pt ;
-LayoutColumnWidth := 0.0pt ;
+vardef OuterSpaceWidth = if not OnRightPage : lua.mp.BackSpace () else : lua.mp.CutSpace () fi enddef ;
+vardef InnerSpaceWidth = if not OnRightPage : lua.mp.CutSpace () else : lua.mp.BackSpace () fi enddef ;
+
+% vardef CurrentLayout = lua.mp.CurrentLayout () enddef ;
+
+vardef OverlayWidth = lua.mp.OverlayWidth () enddef ;
+vardef OverlayHeight = lua.mp.OverlayHeight () enddef ;
+vardef OverlayDepth = lua.mp.OverlayDepth () enddef ;
+vardef OverlayLineWidth = lua.mp.OverlayLineWidth() enddef ;
+vardef OverlayOffset = lua.mp.OverlayOffset () enddef ;
+
+vardef defaultcolormodel = lua.mp.defaultcolormodel() enddef ;
+
+% def OverlayLineColor = lua.mp.OverlayLineColor() enddef ;
+% def OverlayColor = lua.mp.OverlayColor () enddef ;
+
+% Next we implement the the page area model. First some constants.
LeftEdge := -4 ; Top := -40 ;
LeftEdgeSeparator := -3 ; TopSeparator := -30 ;
@@ -89,37 +325,28 @@ RightMargin := +2 ; Footer := +20 ;
RightEdgeSeparator := +3 ; BottomSeparator := +30 ;
RightEdge := +4 ; Bottom := +40 ;
-Margin := LeftMargin ; % obsolete
-Edge := LeftEdge ; % obsolete
-InnerMargin := RightMargin ; % obsolete
-InnerEdge := RightEdge ; % obsolete
-OuterMargin := LeftMargin ; % obsolete
-OuterEdge := LeftEdge ; % obsolete
+% Margin := LeftMargin ; % obsolete
+% Edge := LeftEdge ; % obsolete
+% InnerMargin := RightMargin ; % obsolete
+% InnerEdge := RightEdge ; % obsolete
+% OuterMargin := LeftMargin ; % obsolete
+% OuterEdge := LeftEdge ; % obsolete
-InnerMarginWidth := 0pt ;
-OuterMarginWidth := 0pt ;
-InnerMarginDistance := 0pt ;
-OuterMarginDistance := 0pt ;
+numeric HorPos ; HorPos := 0 ;
+numeric VerPos ; VerPos := 0 ;
-InnerEdgeWidth := 0pt ;
-OuterEdgeWidth := 0pt ;
-InnerEdgeDistance := 0pt ;
-OuterEdgeDistance := 0pt ;
+% We used to initialize these variables each (sub)run but at some point MP
+% became too slow for this. See later.
% path Area[][] ;
% pair Location[][] ;
% path Field[][] ;
-
+%
% numeric Hstep[] ;
% numeric Hsize[] ;
% numeric Vstep[] ;
% numeric Vsize[] ;
-
-path Page ;
-
-numeric HorPos ;
-numeric VerPos ;
-
+%
% for VerPos=Top step 10 until Bottom:
% for HorPos=LeftEdge step 1 until RightEdge:
% Area[HorPos][VerPos] := origin--cycle ;
@@ -130,46 +357,8 @@ numeric VerPos ;
% Field[VerPos][HorPos] := Field[HorPos][VerPos] ;
% endfor ;
% endfor ;
-
-% def LoadPageState =
-% scantokens "input mp-state.tmp" ;
-% enddef ;
-
-def SwapPageState =
- if not OnRightPage :
- BackSpace := PaperWidth-MakeupWidth-BackSpace ;
- CutSpace := PaperWidth-MakeupWidth-CutSpace ;
- i := LeftMarginWidth ;
- LeftMarginWidth := RightMarginWidth ;
- RightMarginWidth := i ;
- i := LeftMarginDistance ;
- LeftMarginDistance := RightMarginDistance ;
- RightMarginDistance := i ;
- i := LeftEdgeWidth ;
- LeftEdgeWidth := RightEdgeWidth ;
- RightEdgeWidth := i ;
- i := LeftEdgeDistance ;
- LeftEdgeDistance := RightEdgeDistance ;
- RightEdgeDistance := i ;
-
- % these are now available as ..Width and ..Distance
-
- Margin := LeftMargin ;
- Edge := LeftEdge ;
- InnerMargin := RightMargin ;
- InnerEdge := RightEdge ;
- OuterMargin := LeftMargin ;
- OuterEdge := LeftEdge ;
- else :
- Margin := RightMargin ;
- Edge := RightEdge ;
- InnerMargin := LeftMargin ;
- InnerEdge := LeftEdge ;
- OuterMargin := RightMargin ;
- OuterEdge := RightEdge ;
- fi ;
-enddef ;
-
+%
+%
% def SetPageAreas =
%
% numeric Vsize[], Hsize[], Vstep[], Hstep[] ;
@@ -338,29 +527,55 @@ def SetPageField =
endfor ;
enddef ;
-def SetPagePage =
- path Page ;
- Page := unitsquare xscaled PaperWidth yscaled PaperHeight ;
+def mfun_page_Area = hide(SetPageArea ;) Area enddef ;
+def mfun_page_Location = hide(SetPageLocation ;) Location enddef ;
+def mfun_page_Field = hide(SetPageField ;) Field enddef ;
+def mfun_page_Vsize = hide(SetPageVsize ;) Vsize enddef ;
+def mfun_page_Hsize = hide(SetPageHsize ;) Hsize enddef ;
+def mfun_page_Vstep = hide(SetPageVstep ;) Vstep enddef ;
+def mfun_page_Hstep = hide(SetPageHstep ;) Hstep enddef ;
+
+def SetAreaVariables =
+ let Area = mfun_page_Area ;
+ let Location = mfun_page_Location ;
+ let Field = mfun_page_Field ;
+ let Vsize = mfun_page_Vsize ;
+ let Hsize = mfun_page_Hsize ;
+ let Vstep = mfun_page_Vstep ;
+ let Hstep = mfun_page_Hstep ;
enddef ;
-def mfun_page_Area = hide(SetPageArea ;) Area enddef ;
-def mfun_page_Location = hide(SetPageLocation ;) Location enddef ;
-def mfun_page_Field = hide(SetPageField ;) Field enddef ;
-def mfun_page_Vsize = hide(SetPageVsize ;) Vsize enddef ;
-def mfun_page_Hsize = hide(SetPageHsize ;) Hsize enddef ;
-def mfun_page_Vstep = hide(SetPageVstep ;) Vstep enddef ;
-def mfun_page_Hstep = hide(SetPageHstep ;) Hstep enddef ;
-def mfun_page_Page = hide(SetPagePage ;) Page enddef ;
+% we should make Page no path .. from now on don't assume this .. for a while we keek it
+
+vardef FrontPageWidth = PaperWidth enddef ;
+vardef BackPageWidth = PaperWidth enddef ;
+vardef CoverWidth = 2 * PaperWidth + SpineWidth enddef ;
+vardef CoverHeight = PaperHeight enddef ;
+
+vardef FrontPageHeight = PaperHeight enddef ;
+vardef BackPageHeight = PaperHeight enddef ;
+vardef SpineHeight = PaperHeight enddef ;
+
+def SetPagePage = path Page ; Page := unitsquare xscaled PaperWidth yscaled PaperHeight ; enddef ;
+def SetPageCoverPage = path CoverPage ; CoverPage := unitsquare xscaled CoverWidth yscaled CoverHeight ; enddef ;
+def SetPageSpine = path Spine ; Spine := unitsquare xscaled SpineWidth yscaled CoverHeight shifted (BackPageWidth,0) ; enddef ;
+def SetPageBackPage = path BackPage ; BackPage := unitsquare xscaled BackPageWidth yscaled CoverHeight ; enddef ;
+def SetPageFrontPage = path FrontPage ; FrontPage := unitsquare xscaled FrontPageWidth yscaled CoverHeight shifted (BackPageWidth+SpineWidth,0) ; enddef ;
+
+def mfun_page_Page = hide(SetPagePage ;) Page enddef ;
+def mfun_page_CoverPage = hide(SetPageCoverPage;) CoverPage enddef ;
+def mfun_page_Spine = hide(SetPageSpine ;) Spine enddef ;
+def mfun_page_BackPage = hide(SetPageBackPage ;) BackPage enddef ;
+def mfun_page_FrontPage = hide(SetPageFrontPage;) FrontPage enddef ;
def SetPageVariables =
- let Area = mfun_page_Area ;
- let Location = mfun_page_Location ;
- let Field = mfun_page_Field ;
- let Vsize = mfun_page_Vsize ;
- let Hsize = mfun_page_Hsize ;
- let Vstep = mfun_page_Vstep ;
- let Hstep = mfun_page_Hstep ;
- let Page = mfun_page_Page ;
+ SetAreaVariables ;
+ %
+ let Page = mfun_page_Page ;
+ let CoverPage = mfun_page_CoverPage ;
+ let Spine = mfun_page_Spine ;
+ let BackPage = mfun_page_BackPage ;
+ let FrontPage = mfun_page_FrontPage ;
enddef ;
SetPageVariables ;
@@ -393,45 +608,10 @@ enddef ;
def BoundCoverAreas =
% todo: add cropmarks
- bboxmargin := 0 ; setbounds currentpicture to Paper enlarged PaperBleed ;
-enddef ;
-
-def SetCoverAreas =
-
- if unknown SpineWidth :
- SpineWidth := 8mm ;
- fi ;
-
- if unknown PaperBleed :
- PaperBleed := 0 ;
- fi ;
-
- FrontPageWidth := PaperWidth ;
- BackPageWidth := PaperWidth ;
- PaperWidth := 2 * PaperWidth + SpineWidth ;
-
- FrontPageHeight := PaperHeight ;
- BackPageHeight := PaperHeight ;
- PaperHeight := PaperHeight ;
- SpineHeight := PaperHeight ;
-
- path Paper ; Paper := unitsquare xscaled PaperWidth yscaled PaperHeight ;
- path Spine ; Spine := unitsquare xscaled SpineWidth yscaled PaperHeight shifted (BackPageWidth,0);
- path BackPage ; BackPage := unitsquare xscaled BackPageWidth yscaled PaperHeight ;
- path FrontPage ; FrontPage := unitsquare xscaled FrontPageWidth yscaled PaperHeight shifted (BackPageWidth+SpineWidth,0) ;
-
+ bboxmargin := 0 ; setbounds currentpicture to CoverPage enlarged PaperBleed ;
enddef ;
-% def StartCover =
-% begingroup ;
-% if PageStateAvailable :
-% LoadPageState ;
-% % SwapPageState ;
-% fi ;
-% SetPageAreas ;
-% SetCoverAreas ;
-% BoundCoverAreas ;
-% enddef ;
+let SetCoverAreas = SetPageVariables ; % compatiblity
def StartCover =
begingroup ;
diff --git a/metapost/context/base/mp-tool.mpii b/metapost/context/base/mp-tool.mpii
index f363f655e..a5bb345a1 100644
--- a/metapost/context/base/mp-tool.mpii
+++ b/metapost/context/base/mp-tool.mpii
@@ -5,17 +5,13 @@
%D subtitle=auxiliary macros,
%D author=Hans Hagen,
%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-% a cleanup is needed, like using image and alike
-% use a few more "newinternal"'s
-
-%D This module is rather preliminary and subjected to
-%D changes.
+% def loadfile(expr name) = scantokens("input " & name & ";") enddef ;
if known context_tool : endinput ; fi ;
@@ -32,6 +28,8 @@ let @## = @# ;
if not known mpversion : string mpversion ; mpversion := "0.641" ; fi ;
+newinternal metapostversion ; metapostversion := scantokens(mpversion) ;
+
% vardef mpversiongt(expr s) =
% scantokens (mpversion & " > " & if numeric s : decimal s else : s fi)
% enddef ;
@@ -85,20 +83,14 @@ mpprocset := 1 ;
%
% protect ;
-%D By including this module, \METAPOST\ automatically writes a
-%D high resolution boundingbox to the \POSTSCRIPT\ file. This
-%D hack is due to John Hobby himself.
-
-% When somehow the first one gets no HiRes, then make sure
-% that the format matches the mem sizes in the config file.
-
-string space ; space = char 32 ;
+string space ; space := char 32 ;
+string CRLF ; CRLF := char 10 & char 13 ;
vardef ddecimal primary p =
- decimal xpart p & " " & decimal ypart p
+ decimal xpart p & " " & decimal ypart p
enddef ;
-%D Plain compatibility
+%D Plain compatibility:
string plain_compatibility_data ; plain_compatibility_data := "" ;
@@ -112,7 +104,7 @@ def stopplaincompatibility =
enddef ;
% is now built in
-
+%
% extra_endfig := extra_endfig
% & "special "
% & "("
@@ -124,6 +116,11 @@ enddef ;
% & "&ddecimal urcorner currentpicture"
% & ");";
+%D More neutral:
+
+let triplet = rgbcolor ;
+let quadruplet = cmykcolor ;
+
%D Crap (experimental, not used):
def forcemultipass =
@@ -132,12 +129,28 @@ enddef ;
%D Colors:
-nocolormodel := 1 ;
-greycolormodel := 3 ;
-rgbcolormodel := 5 ;
-cmykcolormodel := 7 ;
+newinternal nocolormodel ; nocolormodel := 1 ;
+newinternal greycolormodel ; greycolormodel := 3 ;
+newinternal graycolormodel ; graycolormodel := 3 ;
+newinternal rgbcolormodel ; rgbcolormodel := 5 ;
+newinternal cmykcolormodel ; cmykcolormodel := 7 ;
let grayscale = numeric ;
+let greyscale = numeric ;
+
+vardef colorpart expr c =
+ if not picture c :
+ 0
+ elseif colormodel c = greycolormodel :
+ greypart c
+ elseif colormodel c = rgbcolormodel :
+ (redpart c,greenpart c,bluepart c)
+ elseif colormodel c = cmykcolormodel :
+ (cyanpart c,magentapart c,yellowpart c,blackpart c)
+ else :
+ 0 % black
+ fi
+enddef ;
vardef colorlike(text c) text v = % colorlike(a) b, c, d ;
save _p_ ; picture _p_ ;
@@ -148,25 +161,19 @@ vardef colorlike(text c) text v = % colorlike(a) b, c, d ;
elseif (colormodel _p_ = rgbcolormodel) :
rgbcolor i ;
else :
- grayscale i ;
+ greycolor i ;
fi ;
endfor ;
enddef ;
-% if (unknown colormodel) :
-% def colormodel =
-% rgbcolormodel
-% enddef ;
-% fi ;
-
%D Also handy (when we flush colors):
vardef dddecimal primary c =
- decimal redpart c & " " & decimal greenpart c & " " & decimal bluepart c
+ decimal redpart c & " " & decimal greenpart c & " " & decimal bluepart c
enddef ;
vardef ddddecimal primary c =
- decimal cyanpart c & " " & decimal magentapart c & " " & decimal yellowpart c & " " & decimal blackpart c
+ decimal cyanpart c & " " & decimal magentapart c & " " & decimal yellowpart c & " " & decimal blackpart c
enddef ;
vardef colordecimals primary c =
@@ -193,39 +200,34 @@ enddef ;
%D we need some trickery when we have multiple files.
if unknown collapse_data :
- boolean collapse_data ; collapse_data := false ;
+ boolean collapse_data ;
+ collapse_data := false ;
fi ;
boolean savingdata ; savingdata := false ;
boolean savingdatadone ; savingdatadone := false ;
def savedata expr txt =
- if collapse_data :
- write txt to data_mpd_file ;
- else :
- write if savingdata : txt else :
- "\MPdata{" & decimal charcode & "}{" & txt & "}"
- fi
- & "%" to data_mpd_file ;
- fi ;
+ write if collapse_data :
+ txt
+ else :
+ if savingdata : txt else : "\MPdata{" & decimal charcode & "}{" & txt & "}" fi & "%"
+ fi to data_mpd_file ;
enddef ;
def startsavingdata =
- savingdata := true ;
- savingdatadone := true ;
- if collapse_data :
- write
- "\MPdata{" & decimal charcode & "}{%"
- to
- data_mpd_file ;
- fi ;
+ savingdata := true ;
+ savingdatadone := true ;
+ if collapse_data :
+ write "\MPdata{" & decimal charcode & "}{%" to data_mpd_file ;
+ fi ;
enddef ;
def stopsavingdata =
- if collapse_data :
- write "}%" to data_mpd_file ;
- fi ;
- savingdata := false ;
+ if collapse_data :
+ write "}%" to data_mpd_file ;
+ fi ;
+ savingdata := false ;
enddef ;
def finishsavingdata =
@@ -253,119 +255,130 @@ def newpair text v = forsuffixes i=v : save i ; pair i ; endfor ; endd
%D box, draw the graphics that may not count, and restore the
%D bounding box.
%D
-%D \starttypen
+%D \starttyping
%D push_boundingbox currentpicture;
%D pop_boundingbox currentpicture;
-%D \stoptypen
+%D \stoptyping
%D
%D The bounding box can be called with:
%D
-%D \starttypen
+%D \starttyping
%D boundingbox currentpicture
%D inner_boundingbox currentpicture
%D outer_boundingbox currentpicture
-%D \stoptypen
+%D \stoptyping
%D
%D Especially the latter one can be of use when we include
%D the graphic in a document that is clipped to the bounding
%D box. In such occasions one can use:
%D
-%D \starttypen
+%D \starttyping
%D set_outer_boundingbox currentpicture;
-%D \stoptypen
+%D \stoptyping
%D
%D Its counterpart is:
%D
-%D \starttypen
+%D \starttyping
%D set_inner_boundingbox p
-%D \stoptypen
+%D \stoptyping
-path pushed_boundingbox;
+path mfun_boundingbox_stack ;
+numeric mfun_boundingbox_stack_depth ;
-def push_boundingbox text p =
- pushed_boundingbox := boundingbox p;
-enddef;
+mfun_boundingbox_stack_depth := 0 ;
-def pop_boundingbox text p =
- setbounds p to pushed_boundingbox;
-enddef;
+def pushboundingbox text p =
+ mfun_boundingbox_stack_depth := mfun_boundingbox_stack_depth + 1 ;
+ mfun_boundingbox_stack[mfun_boundingbox_stack_depth] := boundingbox p ;
+enddef ;
+
+def popboundingbox text p =
+ setbounds p to mfun_boundingbox_stack[mfun_boundingbox_stack_depth] ;
+ mfun_boundingbox_stack[mfun_boundingbox_stack_depth] := origin ;
+ mfun_boundingbox_stack_depth := mfun_boundingbox_stack_depth - 1 ;
+enddef ;
+
+let push_boundingbox = pushboundingbox ; % downward compatible
+let pop_boundingbox = popboundingbox ; % downward compatible
vardef boundingbox primary p =
- if (path p) or (picture p) :
- llcorner p -- lrcorner p -- urcorner p -- ulcorner p
- else :
- origin
- fi -- cycle
+ if (path p) or (picture p) :
+ llcorner p -- lrcorner p -- urcorner p -- ulcorner p
+ else :
+ origin
+ fi -- cycle
enddef;
-vardef inner_boundingbox primary p =
- top rt llcorner p --
- top lft lrcorner p --
- bot lft urcorner p --
- bot rt ulcorner p -- cycle
+vardef innerboundingbox primary p =
+ top rt llcorner p --
+ top lft lrcorner p --
+ bot lft urcorner p --
+ bot rt ulcorner p -- cycle
enddef;
-vardef outer_boundingbox primary p =
- bot lft llcorner p --
- bot rt lrcorner p --
- top rt urcorner p --
- top lft ulcorner p -- cycle
+vardef outerboundingbox primary p =
+ bot lft llcorner p --
+ bot rt lrcorner p --
+ top rt urcorner p --
+ top lft ulcorner p -- cycle
enddef;
-def innerboundingbox = inner_boundingbox enddef ;
-def outerboundingbox = outer_boundingbox enddef ;
+def inner_boundingbox = innerboundingbox enddef ;
+def outer_boundingbox = outerboundingbox enddef ;
-vardef set_inner_boundingbox text q =
- setbounds q to inner_boundingbox q;
+vardef set_inner_boundingbox text q = % obsolete
+ setbounds q to innerboundingbox q;
enddef;
-vardef set_outer_boundingbox text q =
- setbounds q to outer_boundingbox q;
+vardef set_outer_boundingbox text q = % obsolete
+ setbounds q to outerboundingbox q;
enddef;
-%D Some missing functions can be implemented rather
-%D straightforward:
-
-numeric Pi ; Pi := 3.1415926 ;
+%D Some missing functions can be implemented rather straightforward (thanks to
+%D Taco and others):
-vardef sqr primary x = (x*x) enddef ;
-vardef log primary x = (if x=0: 0 else: mlog(x)/mlog(10) fi) enddef ;
-vardef ln primary x = (if x=0: 0 else: mlog(x)/256 fi) enddef ;
-vardef exp primary x = ((mexp 256)**x) enddef ;
-vardef inv primary x = (if x=0: 0 else: x**-1 fi) enddef ;
+pi := 3.14159265358979323846 ; radian := 180/pi ; % 2pi*radian = 360 ;
-vardef pow (expr x,p) = (x**p) enddef ;
+% let +++ = ++ ;
-vardef asin primary x = (x+(x**3)/6+3(x**5)/40) enddef ;
-vardef acos primary x = (asin(-x)) enddef ;
-vardef atan primary x = (x-(x**3)/3+(x**5)/5-(x**7)/7) enddef ;
-vardef tand primary x = (sind(x)/cosd(x)) enddef ;
+numeric Pi ; Pi := pi ; % for some old compatibility reasons i guess
-%D Here are Taco Hoekwater's alternatives (but
-%D vardef'd and primaried).
+vardef sqr primary x = x*x enddef ;
+vardef log primary x = if x=0: 0 else: mlog(x)/mlog(10) fi enddef ;
+vardef ln primary x = if x=0: 0 else: mlog(x)/256 fi enddef ;
+vardef exp primary x = (mexp 256)**x enddef ;
+vardef inv primary x = if x=0: 0 else: x**-1 fi enddef ;
-pi := 3.1415926 ; radian := 180/pi ; % 2pi*radian = 360 ;
+vardef pow (expr x,p) = x**p enddef ;
-vardef tand primary x = (sind(x)/cosd(x)) enddef ;
-vardef cotd primary x = (cosd(x)/sind(x)) enddef ;
+vardef tand primary x = sind(x)/cosd(x) enddef ;
+vardef cotd primary x = cosd(x)/sind(x) enddef ;
-vardef sin primary x = (sind(x*radian)) enddef ;
-vardef cos primary x = (cosd(x*radian)) enddef ;
-vardef tan primary x = (sin(x)/cos(x)) enddef ;
-vardef cot primary x = (cos(x)/sin(x)) enddef ;
+vardef sin primary x = sind(x*radian) enddef ;
+vardef cos primary x = cosd(x*radian) enddef ;
+vardef tan primary x = sin(x)/cos(x) enddef ;
+vardef cot primary x = cos(x)/sin(x) enddef ;
-vardef asin primary x = angle((1+-+x,x)) enddef ;
-vardef acos primary x = angle((x,1+-+x)) enddef ;
+vardef asin primary x = angle((1+-+x,x)) enddef ;
+vardef acos primary x = angle((x,1+-+x)) enddef ;
+vardef atan primary x = angle(1,x) enddef ;
-vardef invsin primary x = ((asin(x))/radian) enddef ;
-vardef invcos primary x = ((acos(x))/radian) enddef ;
+vardef invsin primary x = (asin(x))/radian enddef ;
+vardef invcos primary x = (acos(x))/radian enddef ;
+vardef invtan primary x = (atan(x))/radian enddef ;
-vardef acosh primary x = ln(x+(x+-+1)) enddef ;
-vardef asinh primary x = ln(x+(x++1)) enddef ;
+vardef acosh primary x = ln(x+(x+-+1)) enddef ;
+vardef asinh primary x = ln(x+(x++1)) enddef ;
vardef sinh primary x = save xx ; xx = exp x ; (xx-1/xx)/2 enddef ;
vardef cosh primary x = save xx ; xx = exp x ; (xx+1/xx)/2 enddef ;
+%D Sometimes this is handy:
+
+def undashed =
+ dashed nullpicture
+enddef ;
+
%D We provide two macros for drawing stripes across a shape.
%D The first method (with the n suffix) uses another method,
%D slower in calculation, but more efficient when drawn. The
@@ -374,101 +387,218 @@ vardef cosh primary x = save xx ; xx = exp x ; (xx+1/xx)/2 enddef ;
%D the second argument identifier the way the shape is to be
%D drawn.
%D
-%D \starttypen
+%D \starttyping
%D stripe_path_n
%D (dashed evenly withcolor blue)
%D (filldraw)
%D fullcircle xscaled 100 yscaled 40 shifted (50,50) withpen pencircle scaled 4;
-%D \stoptypen
+%D \stoptyping
%D
%D The a (or angle) alternative supports arbitrary angles and
%D is therefore more versatile.
%D
-%D \starttypen
+%D \starttyping
%D stripe_path_a
%D (withpen pencircle scaled 2 withcolor red)
%D (draw)
%D fullcircle xscaled 100 yscaled 40 withcolor blue;
-%D \stoptypen
+%D \stoptyping
+%D
+%D We have two alternatives, controlled by arguments or defaults (when arguments
+%D are zero).
+%D
+%D The newer and nicer interface is used as follows (triggered by a question by Mari):
+%D
+%D \starttyping
+%D draw image (draw fullcircle scaled 3cm shifted (0cm,0cm) withcolor green) numberstriped (1,10,3) withcolor red ;
+%D draw image (draw fullcircle scaled 3cm shifted (3cm,0cm) withcolor green) numberstriped (2,20,3) withcolor green ;
+%D draw image (draw fullcircle scaled 3cm shifted (3cm,3cm) withcolor green) numberstriped (3,10,5) withcolor blue ;
+%D draw image (draw fullcircle scaled 3cm shifted (0cm,3cm) withcolor green) numberstriped (4,20,5) withcolor yellow ;
+%D
+%D draw image (draw fullcircle scaled 3cm shifted (6cm,0cm) withcolor green) anglestriped (1,20,2) withcolor red ;
+%D draw image (draw fullcircle scaled 3cm shifted (9cm,0cm) withcolor green) anglestriped (2,40,2) withcolor green ;
+%D draw image (draw fullcircle scaled 3cm shifted (9cm,3cm) withcolor green) anglestriped (3,60,2) withcolor blue ;
+%D draw image (draw fullcircle scaled 3cm shifted (6cm,3cm) withcolor green) anglestriped (4,80,2) withcolor yellow ;
%D
-%D The first alternative obeys:
+%D draw image (
+%D draw fullcircle scaled 3cm shifted (0cm,0cm) withcolor green withpen pencircle scaled 2mm ;
+%D draw fullcircle scaled 2cm shifted (0cm,1cm) withcolor blue withpen pencircle scaled 3mm ;
+%D ) shifted (9cm,0cm) numberstriped (1,10,3) withcolor red ;
+%D
+%D draw image (
+%D draw fullcircle scaled 3cm shifted (0cm,0cm) withcolor green withpen pencircle scaled 2mm ;
+%D draw fullcircle scaled 2cm shifted (0cm,1cm) withcolor blue withpen pencircle scaled 3mm ;
+%D ) shifted (12cm,0cm) numberstriped (2,10,3) withcolor red ;
+%D
+%D draw image (
+%D draw fullcircle scaled 3cm shifted (0cm,0cm) withcolor green withpen pencircle scaled 2mm ;
+%D draw fullcircle scaled 2cm shifted (0cm,1cm) withcolor blue withpen pencircle scaled 3mm ;
+%D ) shifted (9cm,5cm) numberstriped (3,10,3) withcolor red ;
+%D
+%D draw image (
+%D draw fullcircle scaled 3cm shifted (0cm,0cm) withcolor green withpen pencircle scaled 2mm ;
+%D draw fullcircle scaled 2cm shifted (0cm,1cm) withcolor blue withpen pencircle scaled 3mm ;
+%D ) shifted (12cm,5cm) numberstriped (4,10,3) withcolor red ;
+%D \stoptyping
stripe_n := 10;
stripe_slot := 3;
-
-%D When no pen dimensions are passed, the slot determines
-%D the spacing.
-%D
-%D The angle alternative is influenced by:
-
stripe_gap := 5;
stripe_angle := 45;
+def mfun_tool_striped_number_action text extra =
+ for i = 1/used_n step 1/used_n until 1 :
+ draw point (1+i) of bounds -- point (3-i) of bounds withpen pencircle scaled penwidth extra ;
+ endfor ;
+ for i = 0 step 1/used_n until 1 :
+ draw point (3+i) of bounds -- point (1-i) of bounds withpen pencircle scaled penwidth extra ;
+ endfor ;
+enddef ;
+
+def mfun_tool_striped_set_options(expr option) =
+ save isinner, swapped ;
+ boolean isinner, swapped ;
+ if option = 1 :
+ isinner := false ;
+ swapped := false ;
+ elseif option = 2 :
+ isinner := true ;
+ swapped := false ;
+ elseif option = 3 :
+ isinner := false ;
+ swapped := true ;
+ elseif option = 4 :
+ isinner := true ;
+ swapped := true ;
+ else :
+ isinner := false ;
+ swapped := false ;
+ fi ;
+enddef ;
+
+vardef mfun_tool_striped_number(expr option, p, s_n, s_slot) text extra =
+ image (
+ begingroup ;
+ save pattern, shape, bounds, penwidth, used_n, used_slot ;
+ picture pattern, shape ; path bounds ; numeric used_s, used_slot ;
+ mfun_tool_striped_set_options(option) ;
+ used_slot := if s_slot = 0 : stripe_slot else : s_slot fi ;
+ used_n := if s_n = 0 : stripe_n else : s_n fi ;
+ shape := image(draw p) ;
+ bounds := boundingbox shape ;
+ penwidth := min(ypart urcorner shape - ypart llcorner shape, xpart urcorner shape - xpart llcorner shape) / (used_slot * used_n) ;
+ pattern := image (
+ if isinner :
+ mfun_tool_striped_number_action extra ;
+ for s within shape :
+ if stroked s or filled s :
+ clip currentpicture to pathpart s ;
+ fi
+ endfor ;
+ else :
+ for s within shape :
+ if stroked s or filled s :
+ draw image (
+ mfun_tool_striped_number_action extra ;
+ clip currentpicture to pathpart s ;
+ ) ;
+ fi ;
+ endfor ;
+ fi ;
+ ) ;
+ if swapped :
+ addto currentpicture also shape ;
+ addto currentpicture also pattern ;
+ else :
+ addto currentpicture also pattern ;
+ addto currentpicture also shape ;
+ fi ;
+ endgroup ;
+ )
+enddef ;
+
+def mfun_tool_striped_angle_action text extra =
+ for i = minimum -.5used_gap step used_gap until maximum :
+ draw (minimum,i) -- (maximum,i) extra ;
+ endfor ;
+ currentpicture := currentpicture rotated used_angle ;
+enddef ;
+
+vardef mfun_tool_striped_angle(expr option, p, s_angle, s_gap) text extra =
+ image (
+ begingroup ;
+ save pattern, shape, mask, maximum, minimum, centrum, used_angle, used_gap ;
+ picture pattern, shape, mask ; numeric maximum, minimum ; pair centrum ; numeric used_angle, used_gap ;
+ mfun_tool_striped_set_options(option) ;
+ used_angle := if s_angle = 0 : stripe_angle else : s_angle fi ;
+ used_gap := if s_gap = 0 : stripe_gap else : s_gap fi ;
+ shape := image(draw p) ;
+ centrum := center shape ;
+ shape := shape shifted - centrum ;
+ mask := shape rotated used_angle ;
+ maximum := max (xpart llcorner mask, xpart urcorner mask, ypart llcorner mask, ypart urcorner mask) ;
+ minimum := min (xpart llcorner mask, xpart urcorner mask, ypart llcorner mask, ypart urcorner mask) ;
+ pattern := image (
+ if isinner :
+ mfun_tool_striped_angle_action extra ;
+ for s within shape :
+ if stroked s or filled s :
+ clip currentpicture to pathpart s ;
+ fi
+ endfor ;
+ else :
+ for s within shape :
+ if stroked s or filled s :
+ draw image (
+ mfun_tool_striped_angle_action extra ;
+ clip currentpicture to pathpart s ;
+ ) ;
+ fi ;
+ endfor ;
+ fi ;
+ ) ;
+ if swapped :
+ addto currentpicture also shape ;
+ addto currentpicture also pattern ;
+ else :
+ addto currentpicture also pattern ;
+ addto currentpicture also shape ;
+ fi ;
+ currentpicture := currentpicture shifted - centrum ;
+ endgroup ;
+ )
+enddef;
+
+newinternal striped_normal_inner ; striped_normal_inner := 1 ;
+newinternal striped_reverse_inner ; striped_reverse_inner := 2 ;
+newinternal striped_normal_outer ; striped_normal_outer := 3 ;
+newinternal striped_reverse_outer ; striped_reverse_outer := 4 ;
+
+secondarydef p anglestriped s =
+ mfun_tool_striped_angle(redpart s,p,greenpart s,bluepart s)
+enddef ;
+
+secondarydef p numberstriped s =
+ mfun_tool_striped_number(redpart s,p,greenpart s,bluepart s)
+enddef ;
+
+% for old times sake:
+
def stripe_path_n (text s_spec) (text s_draw) expr s_path =
- do_stripe_path_n (s_spec) (s_draw) (s_path)
+ do_stripe_path_n (s_spec) (s_draw) (s_path)
enddef;
def do_stripe_path_n (text s_spec) (text s_draw) (expr s_path) text s_text =
- begingroup
- save curpic, newpic, bb, pp, ww;
- picture curpic, newpic;
- path bb, pp;
- pp := s_path;
- curpic := currentpicture;
- currentpicture := nullpicture;
- s_draw pp s_text;
- bb := boundingbox currentpicture;
- newpic := currentpicture;
- currentpicture := nullpicture;
- ww := min(ypart urcorner newpic - ypart llcorner newpic,
- xpart urcorner newpic - xpart llcorner newpic);
- ww := ww/(stripe_slot*stripe_n);
- for i=1/stripe_n step 1/stripe_n until 1:
- draw point (1+i) of bb -- point (3-i) of bb
- withpen pencircle scaled ww s_spec ;
- endfor;
- for i=0 step 1/stripe_n until 1:
- draw point (3+i) of bb -- point (1-i) of bb
- withpen pencircle scaled ww s_spec;
- endfor;
- clip currentpicture to pp;
- addto newpic also currentpicture;
- currentpicture := curpic;
- addto currentpicture also newpic;
- endgroup
-enddef;
+ draw image(s_draw s_path s_text) numberstriped(3,0,0) s_spec ;
+enddef ;
def stripe_path_a (text s_spec) (text s_draw) expr s_path =
- do_stripe_path_a (s_spec) (s_draw) (s_path)
+ do_stripe_path_a (s_spec) (s_draw) (s_path)
enddef;
def do_stripe_path_a (text s_spec) (text s_draw) (expr s_path) text s_text =
- begingroup
- save curpic, newpic, pp; picture curpic, newpic; path pp ;
- pp := s_path ;
- curpic := currentpicture;
- currentpicture := nullpicture;
- s_draw pp s_text ;
- def do_stripe_rotation (expr p) =
- (currentpicture rotatedaround(center p,stripe_angle))
- enddef ;
- s_max := max
- (xpart llcorner do_stripe_rotation(currentpicture),
- xpart urcorner do_stripe_rotation(currentpicture),
- ypart llcorner do_stripe_rotation(currentpicture),
- ypart urcorner do_stripe_rotation(currentpicture));
- newpic := currentpicture;
- currentpicture := nullpicture;
- for i=-s_max-.5stripe_gap step stripe_gap until s_max:
- draw (-s_max,i)--(s_max,i) s_spec;
- endfor;
- currentpicture := do_stripe_rotation(newpic);
- clip currentpicture to pp ;
- addto newpic also currentpicture;
- currentpicture := curpic;
- addto currentpicture also newpic;
- endgroup
-enddef;
+ draw image(s_draw s_path s_text) anglestriped(3,0,0) s_spec ;
+enddef ;
%D A few normalizing macros:
%D
@@ -507,46 +637,43 @@ enddef;
% natural_width := xpart urcorner currentpicture - xpart llcorner currentpicture;
% currentpicture := currentpicture scaled (the_width/natural_width) ;
-% TODO TODO TODO TODO, not yet ok
-
primarydef p xsized w =
- (p if (bbwidth (p)>0) and (w>0) : scaled (w/bbwidth (p)) fi)
+ (p if (bbwidth (p)>0) and (w>0) : scaled (w/bbwidth (p)) fi)
enddef ;
primarydef p ysized h =
- (p if (bbheight(p)>0) and (h>0) : scaled (h/bbheight(p)) fi)
+ (p if (bbheight(p)>0) and (h>0) : scaled (h/bbheight(p)) fi)
enddef ;
primarydef p xysized s =
- begingroup ;
+ begingroup
save wh, w, h ; pair wh ; numeric w, h ;
wh := paired (s) ; w := bbwidth(p) ; h := bbheight(p) ;
- (p if (w>0) and (h>0) :
- if xpart wh > 0 : xscaled (xpart wh/w) fi
- if ypart wh > 0 : yscaled (ypart wh/h) fi
- fi)
- endgroup
+ p
+ if (w>0) and (h>0) :
+ if xpart wh > 0 : xscaled (xpart wh/w) fi
+ if ypart wh > 0 : yscaled (ypart wh/h) fi
+ fi
+ endgroup
enddef ;
-primarydef p sized wh =
- (p xysized wh)
-enddef ;
+let sized = xysized ;
-def xscale_currentpicture(expr w) =
- currentpicture := currentpicture xsized w ;
+def xscale_currentpicture(expr w) = % obsolete
+ currentpicture := currentpicture xsized w ;
enddef;
-def yscale_currentpicture(expr h) =
- currentpicture := currentpicture ysized h ;
+def yscale_currentpicture(expr h) = % obsolete
+ currentpicture := currentpicture ysized h ;
enddef;
-def xyscale_currentpicture(expr w, h) =
- currentpicture := currentpicture xysized (w,h) ;
+def xyscale_currentpicture(expr w, h) = % obsolete
+ currentpicture := currentpicture xysized (w,h) ;
enddef;
-def scale_currentpicture(expr w, h) =
- currentpicture := currentpicture xsized w ;
- currentpicture := currentpicture ysized h ;
+def scale_currentpicture(expr w, h) = % obsolete
+ currentpicture := currentpicture xsized w ;
+ currentpicture := currentpicture ysized h ;
enddef;
%D A full circle is centered at the origin, while a unitsquare
@@ -562,28 +689,28 @@ unitcircle := fullcircle shifted urcorner fullcircle ;
path urcircle, ulcircle, llcircle, lrcircle ;
-urcircle := origin--(+.5,0)&(+.5,0){up} ..(0,+.5)&(0,+.5)--cycle ;
-ulcircle := origin--(0,+.5)&(0,+.5){left} ..(-.5,0)&(-.5,0)--cycle ;
-llcircle := origin--(-.5,0)&(-.5,0){down} ..(0,-.5)&(0,-.5)--cycle ;
-lrcircle := origin--(0,-.5)&(0,-.5){right}..(+.5,0)&(+.5,0)--cycle ;
+urcircle := origin -- (+.5,0) & (+.5,0){up} .. (0,+.5) & (0,+.5) -- cycle ;
+ulcircle := origin -- (0,+.5) & (0,+.5){left} .. (-.5,0) & (-.5,0) -- cycle ;
+llcircle := origin -- (-.5,0) & (-.5,0){down} .. (0,-.5) & (0,-.5) -- cycle ;
+lrcircle := origin -- (0,-.5) & (0,-.5){right} .. (+.5,0) & (+.5,0) -- cycle ;
path tcircle, bcircle, lcircle, rcircle ;
-tcircle = origin--(+.5,0)&(+.5,0){up} ..(0,+.5)..{down} (-.5,0)--cycle ;
-bcircle = origin--(-.5,0)&(-.5,0){down} ..(0,-.5)..{up} (+.5,0)--cycle ;
-lcircle = origin--(0,+.5)&(0,+.5){left} ..(-.5,0)..{right}(0,-.5)--cycle ;
-rcircle = origin--(0,-.5)&(0,-.5){right}..(+.5,0)..{left} (0,+.5)--cycle ;
+tcircle = origin -- (+.5,0) & (+.5,0) {up} .. (0,+.5) .. {down} (-.5,0) -- cycle ;
+bcircle = origin -- (-.5,0) & (-.5,0) {down} .. (0,-.5) .. {up} (+.5,0) -- cycle ;
+lcircle = origin -- (0,+.5) & (0,+.5) {left} .. (-.5,0) .. {right} (0,-.5) -- cycle ;
+rcircle = origin -- (0,-.5) & (0,-.5) {right} .. (+.5,0) .. {left} (0,+.5) -- cycle ;
-path urtriangle, ultriangle, lltriangle, lrtriangle ;
+path urtriangle, ultriangle, lltriangle, lrtriangle ; % watch out: it's contrary to what you expect and starts in the origin
-urtriangle := origin--(+.5,0)--(0,+.5)--cycle ;
-ultriangle := origin--(0,+.5)--(-.5,0)--cycle ;
-lltriangle := origin--(-.5,0)--(0,-.5)--cycle ;
-lrtriangle := origin--(0,-.5)--(+.5,0)--cycle ;
+urtriangle := origin -- (+.5,0) -- (0,+.5) -- cycle ;
+ultriangle := origin -- (0,+.5) -- (-.5,0) -- cycle ;
+lltriangle := origin -- (-.5,0) -- (0,-.5) -- cycle ;
+lrtriangle := origin -- (0,-.5) -- (+.5,0) -- cycle ;
path unitdiamond, fulldiamond ;
-unitdiamond := (.5,0)--(1,.5)--(.5,1)--(0,.5)--cycle ;
+unitdiamond := (.5,0) -- (1,.5) -- (.5,1) -- (0,.5) -- cycle ;
fulldiamond := unitdiamond shifted - center unitdiamond ;
%D More robust:
@@ -598,46 +725,49 @@ fulldiamond := unitdiamond shifted - center unitdiamond ;
%D Shorter
-primarydef p xyscaled q =
- begingroup ; save qq ; pair qq ; qq = paired(q) ;
- ( p
- if xpart qq<>0 : xscaled (xpart qq) fi
- if ypart qq<>0 : yscaled (ypart qq) fi )
- endgroup
+primarydef p xyscaled q = % secundarydef does not work out well
+ begingroup
+ save qq ; pair qq ;
+ qq = paired(q) ;
+ p
+ if xpart qq <> 0 : xscaled (xpart qq) fi
+ if ypart qq <> 0 : yscaled (ypart qq) fi
+ endgroup
enddef ;
-%D Experimenteel, zie folder-3.tex.
+%D Some personal code that might move to another module
def set_grid(expr w, h, nx, ny) =
- boolean grid[][] ; boolean grid_full ;
- grid_w := w ;
- grid_h := h ;
- grid_nx := nx ;
- grid_ny := ny ;
- grid_x := round(w/grid_nx) ; % +.5) ;
- grid_y := round(h/grid_ny) ; % +.5) ;
- grid_left := (1+grid_x)*(1+grid_y) ;
- grid_full := false ;
- for i=0 upto grid_x:
- for j=0 upto grid_y:
- grid[i][j] := false ;
+ boolean grid[][] ; boolean grid_full ;
+ numeric grid_w, grid_h, grid_nx, grid_ny, grid_x, grid_y, grid_left ;
+ grid_w := w ;
+ grid_h := h ;
+ grid_nx := nx ;
+ grid_ny := ny ;
+ grid_x := round(w/grid_nx) ; % +.5) ;
+ grid_y := round(h/grid_ny) ; % +.5) ;
+ grid_left := (1+grid_x)*(1+grid_y) ;
+ grid_full := false ;
+ for i=0 upto grid_x :
+ for j=0 upto grid_y :
+ grid[i][j] := false ;
+ endfor ;
endfor ;
- endfor ;
enddef ;
vardef new_on_grid(expr _dx_, _dy_) =
- dx := _dx_ ;
- dy := _dy_ ;
- ddx := min(round(dx/grid_nx),grid_x) ; % +.5),grid_x) ;
- ddy := min(round(dy/grid_ny),grid_y) ; % +.5),grid_y) ;
- if not grid_full and not grid[ddx][ddy]:
- grid[ddx][ddy] := true ;
- grid_left := grid_left-1 ;
- grid_full := (grid_left=0) ;
- true
- else:
- false
- fi
+ dx := _dx_ ;
+ dy := _dy_ ;
+ ddx := min(round(dx/grid_nx),grid_x) ; % +.5),grid_x) ;
+ ddy := min(round(dy/grid_ny),grid_y) ; % +.5),grid_y) ;
+ if not grid_full and not grid[ddx][ddy] :
+ grid[ddx][ddy] := true ;
+ grid_left := grid_left-1 ;
+ grid_full := (grid_left=0) ;
+ true
+ else :
+ false
+ fi
enddef ;
%D usage: \type{innerpath peepholed outerpath}.
@@ -650,79 +780,71 @@ enddef ;
%D endfig;
secondarydef p peepholed q =
- begingroup ;
- save start ; pair start ; start := point 0 of p ;
- if xpart start >= xpart center p :
- if ypart start >= ypart center p :
- urcorner q -- ulcorner q -- llcorner q -- lrcorner q --
- reverse p -- lrcorner q -- cycle
- else :
- lrcorner q -- urcorner q -- ulcorner q -- llcorner q --
- reverse p -- llcorner q -- cycle
- fi
- else :
- if ypart start > ypart center p :
- ulcorner q -- llcorner q -- lrcorner q -- urcorner q --
- reverse p -- urcorner q -- cycle
+ begingroup
+ save start ; pair start ;
+ start := point 0 of p ;
+ if xpart start >= xpart center p :
+ if ypart start >= ypart center p :
+ urcorner q -- ulcorner q -- llcorner q -- lrcorner q --
+ reverse p -- lrcorner q -- cycle
+ else :
+ lrcorner q -- urcorner q -- ulcorner q -- llcorner q --
+ reverse p -- llcorner q -- cycle
+ fi
else :
- llcorner q -- lrcorner q -- urcorner q -- ulcorner q --
- reverse p -- ulcorner q -- cycle
+ if ypart start > ypart center p :
+ ulcorner q -- llcorner q -- lrcorner q -- urcorner q --
+ reverse p -- urcorner q -- cycle
+ else :
+ llcorner q -- lrcorner q -- urcorner q -- ulcorner q --
+ reverse p -- ulcorner q -- cycle
+ fi
fi
- fi
- endgroup
+ endgroup
enddef ;
boolean intersection_found ;
secondarydef p intersection_point q =
- begingroup
+ begingroup
save x_, y_ ;
(x_,y_) = p intersectiontimes q ;
if x_<0 :
- intersection_found := false ;
- center p % origin
+ intersection_found := false ;
+ center p % origin
else :
- intersection_found := true ;
- .5[point x_ of p, point y_ of q]
+ intersection_found := true ;
+ .5[point x_ of p, point y_ of q]
fi
- endgroup
+ endgroup
enddef ;
%D New, undocumented, experimental:
vardef tensecircle (expr width, height, offset) =
- ((-width/2,-height/2) ... (0,-height/2-offset) ...
- (+width/2,-height/2) ... (+width/2+offset,0) ...
- (+width/2,+height/2) ... (0,+height/2+offset) ...
- (-width/2,+height/2) ... (-width/2-offset,0) ... cycle)
+ (-width/2,-height/2) ... (0,-height/2-offset) ...
+ (+width/2,-height/2) ... (+width/2+offset,0) ...
+ (+width/2,+height/2) ... (0,+height/2+offset) ...
+ (-width/2,+height/2) ... (-width/2-offset,0) ... cycle
enddef ;
-%vardef tensecircle (expr width, height, offset) =
-% ((-width/2,-height/2)..(0,-height/2-offset)..(+width/2,-height/2) &
-% (+width/2,-height/2)..(+width/2+offset,0)..(+width/2,+height/2) &
-% (+width/2,+height/2)..(0,+height/2+offset)..(-width/2,+height/2) &
-% (-width/2,+height/2)..(-width/2-offset,0)..(-width/2,-height/2)..cycle)
-%enddef ;
-
vardef roundedsquare (expr width, height, offset) =
- ((offset,0)--(width-offset,0){right} ..
- (width,offset)--(width,height-offset){up} ..
- (width-offset,height)--(offset,height){left} ..
- (0,height-offset)--(0,offset){down} .. cycle)
+ (offset,0) -- (width-offset,0) {right} ..
+ (width,offset) -- (width,height-offset) {up} ..
+ (width-offset,height) -- (offset,height) {left} ..
+ (0,height-offset) -- (0,offset) {down} .. cycle
enddef ;
%D Some colors.
-color cyan ; cyan = (0,1,1) ;
-color magenta ; magenta = (1,0,1) ;
-color yellow ; yellow = (1,1,0) ;
-
def colortype(expr c) =
if cmykcolor c : cmykcolor elseif rgbcolor c : rgbcolor else : grayscale fi
enddef ;
+
vardef whitecolor(expr c) =
if cmykcolor c : (0,0,0,0) elseif rgbcolor c : (1,1,1) else : 1 fi
enddef ;
+
vardef blackcolor(expr c) =
if cmykcolor c : (0,0,0,1) elseif rgbcolor c : (0,0,0) else : 0 fi
enddef ;
@@ -730,112 +852,74 @@ enddef ;
%D Well, this is the dangerous and naive version:
def drawfill text t =
- fill t ;
- draw t ;
+ fill t ;
+ draw t ;
enddef;
%D This two step approach saves the path first, since it can
%D be a function. Attributes must not be randomized.
def drawfill expr c =
- path _c_ ; _c_ := c ;
- do_drawfill
+ path _c_ ; _c_ := c ;
+ mfun_do_drawfill
enddef ;
-def do_drawfill text t =
- draw _c_ t ;
- fill _c_ t ;
+def mfun_do_drawfill text t =
+ draw _c_ t ;
+ fill _c_ t ;
enddef;
def undrawfill expr c =
- drawfill c withcolor background
+ drawfill c withcolor background % rather useless
enddef ;
%D Moved from mp-char.mp
-vardef paired (expr d) =
- if pair d : d else : (d,d) fi
-enddef ;
-
-vardef tripled (expr d) =
- if color d : d else : (d,d,d) fi
-enddef ;
-
-primarydef p enlarged d =
- (p llmoved d -- p lrmoved d -- p urmoved d -- p ulmoved d -- cycle)
-enddef;
-
-primarydef p llenlarged d =
- (p llmoved d -- lrcorner p -- urcorner p -- ulcorner p -- cycle)
-enddef ;
-
-primarydef p lrenlarged d =
- (llcorner p -- p lrmoved d -- urcorner p -- ulcorner p -- cycle)
+vardef paired primary d =
+ if pair d : d else : (d,d) fi
enddef ;
-primarydef p urenlarged d =
- (llcorner p -- lrcorner p -- p urmoved d -- ulcorner p -- cycle)
+vardef tripled primary d =
+ if color d : d else : (d,d,d) fi
enddef ;
-primarydef p ulenlarged d =
- (llcorner p -- lrcorner p -- urcorner p -- p ulmoved d -- cycle)
-enddef ;
+% maybe secondaries:
-primarydef p llmoved d =
- ((llcorner p) shifted (-xpart paired(d),-ypart paired(d)))
-enddef ;
+primarydef p enlarged d = ( p llmoved d -- p lrmoved d -- p urmoved d -- p ulmoved d -- cycle ) enddef ;
+primarydef p llenlarged d = ( p llmoved d -- lrcorner p -- urcorner p -- ulcorner p -- cycle ) enddef ;
+primarydef p lrenlarged d = ( llcorner p -- p lrmoved d -- urcorner p -- ulcorner p -- cycle ) enddef ;
+primarydef p urenlarged d = ( llcorner p -- lrcorner p -- p urmoved d -- ulcorner p -- cycle ) enddef ;
+primarydef p ulenlarged d = ( llcorner p -- lrcorner p -- urcorner p -- p ulmoved d -- cycle ) enddef ;
-primarydef p lrmoved d =
- ((lrcorner p) shifted (+xpart paired(d),-ypart paired(d)))
-enddef ;
-
-primarydef p urmoved d =
- ((urcorner p) shifted (+xpart paired(d),+ypart paired(d)))
-enddef ;
-
-primarydef p ulmoved d =
- ((ulcorner p) shifted (-xpart paired(d),+ypart paired(d)))
-enddef ;
-
-primarydef p leftenlarged d =
- ((llcorner p) shifted (-d,0) -- lrcorner p --
- urcorner p -- (ulcorner p) shifted (-d,0) -- cycle)
-enddef ;
-
-primarydef p rightenlarged d =
- (llcorner p -- (lrcorner p) shifted (d,0) --
- (urcorner p) shifted (d,0) -- ulcorner p -- cycle)
-enddef ;
-
-primarydef p topenlarged d =
- (llcorner p -- lrcorner p --
- (urcorner p) shifted (0,d) -- (ulcorner p) shifted (0,d) -- cycle)
-enddef ;
+primarydef p llmoved d = ( (llcorner p) shifted (-xpart paired(d),-ypart paired(d)) ) enddef ;
+primarydef p lrmoved d = ( (lrcorner p) shifted (+xpart paired(d),-ypart paired(d)) ) enddef ;
+primarydef p urmoved d = ( (urcorner p) shifted (+xpart paired(d),+ypart paired(d)) ) enddef ;
+primarydef p ulmoved d = ( (ulcorner p) shifted (-xpart paired(d),+ypart paired(d)) ) enddef ;
-primarydef p bottomenlarged d =
- (llcorner p shifted (0,-d) -- lrcorner p shifted (0,-d) --
- urcorner p -- ulcorner p -- cycle)
-enddef ;
+primarydef p leftenlarged d = ( (llcorner p) shifted (-d,0) -- lrcorner p -- urcorner p -- (ulcorner p) shifted (-d,0) -- cycle ) enddef ;
+primarydef p rightenlarged d = ( llcorner p -- (lrcorner p) shifted (d,0) -- (urcorner p) shifted (d,0) -- ulcorner p -- cycle ) enddef ;
+primarydef p topenlarged d = ( llcorner p -- lrcorner p -- (urcorner p) shifted (0,d) -- (ulcorner p) shifted (0,d) -- cycle ) enddef ;
+primarydef p bottomenlarged d = ( llcorner p shifted (0,-d) -- lrcorner p shifted (0,-d) -- urcorner p -- ulcorner p -- cycle ) enddef ;
%D Handy for testing/debugging:
-primarydef p crossed d =
+primarydef p crossed d = (
if pair p :
- (p shifted (-d, 0) -- p --
- p shifted ( 0,-d) -- p --
- p shifted (+d, 0) -- p --
- p shifted ( 0,+d) -- p -- cycle)
+ p shifted (-d, 0) -- p --
+ p shifted ( 0,-d) -- p --
+ p shifted (+d, 0) -- p --
+ p shifted ( 0,+d) -- p -- cycle
else :
- (center p shifted (-d, 0) -- llcorner p --
- center p shifted ( 0,-d) -- lrcorner p --
- center p shifted (+d, 0) -- urcorner p --
- center p shifted ( 0,+d) -- ulcorner p -- cycle)
+ center p shifted (-d, 0) -- llcorner p --
+ center p shifted ( 0,-d) -- lrcorner p --
+ center p shifted (+d, 0) -- urcorner p --
+ center p shifted ( 0,+d) -- ulcorner p -- cycle
fi
-enddef ;
+) enddef ;
%D Also handy (math ladders):
-vardef laddered expr p =
+vardef laddered primary p = % was expr
point 0 of p
for i=1 upto length(p) :
-- (xpart (point i of p), ypart (point (i-1) of p)) -- (point i of p)
@@ -849,182 +933,143 @@ enddef ;
% vardef topboundary primary p = (urcorner p -- ulcorner p) enddef ;
% vardef leftboundary primary p = (ulcorner p -- llcorner p) enddef ;
-vardef bottomboundary primary p =
- if pair p : p else : (llcorner p -- lrcorner p) fi
-enddef ;
-
-vardef rightboundary primary p =
- if pair p : p else : (lrcorner p -- urcorner p) fi
-enddef ;
-
-vardef topboundary primary p =
- if pair p : p else : (urcorner p -- ulcorner p) fi
-enddef ;
-
-vardef leftboundary primary p =
- if pair p : p else : (ulcorner p -- llcorner p) fi
-enddef ;
+vardef bottomboundary primary p = if pair p : p else : (llcorner p -- lrcorner p) fi enddef ;
+vardef rightboundary primary p = if pair p : p else : (lrcorner p -- urcorner p) fi enddef ;
+vardef topboundary primary p = if pair p : p else : (urcorner p -- ulcorner p) fi enddef ;
+vardef leftboundary primary p = if pair p : p else : (ulcorner p -- llcorner p) fi enddef ;
%D Nice too:
primarydef p superellipsed s =
- superellipse
- (.5[lrcorner p,urcorner p],
- .5[urcorner p,ulcorner p],
- .5[ulcorner p,llcorner p],
- .5[llcorner p,lrcorner p],
- s)
-enddef ;
+ superellipse (
+ .5[lrcorner p,urcorner p],
+ .5[urcorner p,ulcorner p],
+ .5[ulcorner p,llcorner p],
+ .5[llcorner p,lrcorner p],
+ s
+ )
+enddef ;
+
+primarydef p squeezed s = (
+ (llcorner p .. .5[llcorner p,lrcorner p] shifted ( 0, ypart paired(s)) .. lrcorner p) &
+ (lrcorner p .. .5[lrcorner p,urcorner p] shifted (-xpart paired(s), 0) .. urcorner p) &
+ (urcorner p .. .5[urcorner p,ulcorner p] shifted ( 0,-ypart paired(s)) .. ulcorner p) &
+ (ulcorner p .. .5[ulcorner p,llcorner p] shifted ( xpart paired(s), 0) .. llcorner p) & cycle
+) enddef ;
-primarydef p squeezed s =
- ((llcorner p .. .5[llcorner p,lrcorner p] shifted ( 0, ypart paired(s)) .. lrcorner p) &
- (lrcorner p .. .5[lrcorner p,urcorner p] shifted (-xpart paired(s), 0) .. urcorner p) &
- (urcorner p .. .5[urcorner p,ulcorner p] shifted ( 0,-ypart paired(s)) .. ulcorner p) &
- (ulcorner p .. .5[ulcorner p,llcorner p] shifted ( xpart paired(s), 0) .. llcorner p) & cycle)
+primarydef p randomshifted s =
+ begingroup ;
+ save ss ; pair ss ;
+ ss := paired(s) ;
+ p shifted (-.5xpart ss + uniformdeviate xpart ss,-.5ypart ss + uniformdeviate ypart ss)
+ endgroup
enddef ;
-primarydef p randomshifted s =
- begingroup ; save ss ; pair ss ; ss := paired(s) ;
- p shifted (-.5xpart ss + uniformdeviate xpart ss,
- -.5ypart ss + uniformdeviate ypart ss)
- endgroup
-enddef ;
-
-%primarydef p randomized s =
-% for i=0 upto length(p)-1 :
-% ((point i of p) randomshifted s) .. controls
-% ((postcontrol i of p) randomshifted s) and
-% ((precontrol (i+1) of p) randomshifted s) ..
-% endfor cycle
-%enddef ;
-
-primarydef p randomized s =
- (if path p :
- for i=0 upto length(p)-1 :
- ((point i of p) randomshifted s) .. controls
- ((postcontrol i of p) randomshifted s) and
- ((precontrol (i+1) of p) randomshifted s) ..
- endfor
- if cycle p :
- cycle
- else :
- ((point length(p) of p) randomshifted s)
- fi
- elseif pair p :
- p randomshifted s
- elseif cmykcolor p :
- if color s :
- (uniformdeviate cyanpart s * cyanpart p,
- uniformdeviate magentapart s * magentapart p,
- uniformdeviate yellowpart s * yellowpart p,
- uniformdeviate blackpart s * blackpart p)
- elseif pair s :
- ((xpart s + uniformdeviate (ypart s - xpart s)) * p)
- else :
- (uniformdeviate s * p)
- fi
- elseif rgbcolor p :
- if color s :
- (uniformdeviate redpart s * redpart p,
- uniformdeviate greenpart s * greenpart p,
- uniformdeviate bluepart s * bluepart p)
- elseif pair s :
- ((xpart s + uniformdeviate (ypart s - xpart s)) * p)
- else :
- (uniformdeviate s * p)
- fi
- elseif color p :
- if color s :
- (uniformdeviate graypart s * graypart p)
- elseif pair s :
- ((xpart s + uniformdeviate (ypart s - xpart s)) * p)
+primarydef p randomized s = (
+ if path p :
+ for i=0 upto length(p)-1 :
+ ((point i of p) randomshifted s) .. controls
+ ((postcontrol i of p) randomshifted s) and
+ ((precontrol (i+1) of p) randomshifted s) ..
+ endfor
+ if cycle p :
+ cycle
+ else :
+ ((point length(p) of p) randomshifted s)
+ fi
+ elseif pair p :
+ p randomshifted s
+ elseif cmykcolor p :
+ if color s :
+ ((uniformdeviate cyanpart s) * cyanpart p,
+ (uniformdeviate magentapart s) * magentapart p,
+ (uniformdeviate yellowpart s) * yellowpart p,
+ (uniformdeviate blackpart s) * blackpart p)
+ elseif pair s :
+ ((xpart s + (uniformdeviate (ypart s - xpart s))) * p)
+ else :
+ ((uniformdeviate s) * p)
+ fi
+ elseif rgbcolor p :
+ if color s :
+ ((uniformdeviate redpart s) * redpart p,
+ (uniformdeviate greenpart s) * greenpart p,
+ (uniformdeviate bluepart s) * bluepart p)
+ elseif pair s :
+ ((xpart s + (uniformdeviate (ypart s - xpart s))) * p)
+ else :
+ ((uniformdeviate s) * p)
+ fi
+ elseif color p :
+ if color s :
+ ((uniformdeviate greypart s) * greypart p)
+ elseif pair s :
+ ((xpart s + (uniformdeviate (ypart s - xpart s))) * p)
+ else :
+ ((uniformdeviate s) * p)
+ fi
else :
- (uniformdeviate s * p)
+ p + uniformdeviate s
fi
- else :
- p + uniformdeviate s
- fi)
-enddef ;
+) enddef ;
%D Not perfect (alternative for interpath)
vardef interpolated(expr s, p, q) =
- save m ; m := max(length(p),length(q)) ;
- (if path p :
- for i=0 upto m-1 :
- s[point (i /m) along p,
- point (i /m) along q] .. controls
- s[postcontrol (i /m) along p,
- postcontrol (i /m) along q] and
- s[precontrol ((i+1)/m) along p,
- precontrol ((i+1)/m) along q] ..
- endfor
- if cycle p :
- cycle
- else :
- s[point infinity of p,
- point infinity of q]
- fi
- else :
- a[p,q]
- fi)
+ save m ; numeric m ;
+ m := max(length(p),length(q)) ;
+ if path p :
+ for i=0 upto m-1 :
+ s[point (i /m) along p,point (i /m) along q] .. controls
+ s[postcontrol (i /m) along p,postcontrol (i /m) along q] and
+ s[precontrol ((i+1)/m) along p,precontrol ((i+1)/m) along q] ..
+ endfor
+ if cycle p :
+ cycle
+ else :
+ s[point infinity of p,point infinity of q]
+ fi
+ else :
+ a[p,q]
+ fi
enddef ;
%D Interesting too:
-% primarydef p parallel s =
-% begingroup ; save q, b ; path q ; numeric b ;
-% b := xpart (lrcorner p - llcorner p) ;
-% q := p if b>0 : scaled ((b+2s)/b) fi ;
-% (q shifted (center p-center q))
-% endgroup
-% enddef ;
-
-%primarydef p parallel s =
-% begingroup ; save q, w,h ; path q ; numeric w, h ;
-% w := bbwidth(p) ; h := bbheight(p) ;
-% q := p if (w>0) and (h>0) :
-% xyscaled ((w+2*xpart paired(s))/w,(h+2*ypart paired(s))/h) fi ;
-% (q shifted (center p-center q))
-% endgroup
-%enddef ;
-
-primarydef p paralleled d =
+primarydef p paralleled d = (
p shifted if d < 0 : - fi ((point abs(d) on (p rotatedaround(point 0 of p,90))) - point 0 of p)
-enddef ;
+) enddef ;
vardef punked primary p =
- (point 0 of p for i=1 upto length(p)-1 : -- point i of p endfor
- if cycle p : -- cycle else : -- point length(p) of p fi)
+ point 0 of p for i=1 upto length(p)-1 : -- point i of p endfor
+ if cycle p : -- cycle else : -- point length(p) of p fi
enddef ;
vardef curved primary p =
- (point 0 of p for i=1 upto length(p)-1 : .. point i of p endfor
- if cycle p : .. cycle else : .. point length(p) of p fi)
+ point 0 of p for i=1 upto length(p)-1 : .. point i of p endfor
+ if cycle p : .. cycle else : .. point length(p) of p fi
enddef ;
primarydef p blownup s =
- begingroup
- save _p_ ; path _p_ ; _p_ := p xysized
- (bbwidth (p)+2(xpart paired(s)),
- bbheight(p)+2(ypart paired(s))) ;
- (_p_ shifted (center p - center _p_))
- endgroup
+ begingroup
+ save _p_ ; path _p_ ;
+ _p_ := p xysized (bbwidth(p)+2(xpart paired(s)),bbheight(p)+2(ypart paired(s))) ;
+ (_p_ shifted (center p - center _p_))
+ endgroup
enddef ;
%D Rather fundamental.
% not yet ok
-def leftrightpath(expr p, l) = % used in s-pre-19
- save q, r, t, b ; path q, r ; pair t, b ;
- t := (ulcorner p -- urcorner p) intersection_point p ;
- b := (llcorner p -- lrcorner p) intersection_point p ;
- r := if xpart directionpoint t of p < 0 : reverse p else : p fi ; % r is needed, else problems when reverse is fed
- q := r cutbefore if l: t else: b fi ;
- q := q if xpart point 0 of r > 0 : &
- r fi cutafter if l: b else: t fi ;
- q
+vardef leftrightpath(expr p, l) = % used in s-pre-19
+ save q, r, t, b ; path q, r ; pair t, b ;
+ t := (ulcorner p -- urcorner p) intersection_point p ;
+ b := (llcorner p -- lrcorner p) intersection_point p ;
+ r := if xpart directionpoint t of p < 0 : reverse p else : p fi ; % r is needed, else problems when reverse is fed
+ q := r cutbefore if l: t else: b fi ;
+ q := q if xpart point 0 of r > 0 : & r fi cutafter if l: b else: t fi ;
+ q
enddef ;
vardef leftpath expr p = leftrightpath(p,true ) enddef ;
@@ -1033,10 +1078,10 @@ vardef rightpath expr p = leftrightpath(p,false) enddef ;
%D Drawoptions
def saveoptions =
- save _op_ ; def _op_ = enddef ;
+ save _op_ ; def _op_ = enddef ;
enddef ;
-%D Tracing.
+%D Tracing. (not yet in lexer)
let normaldraw = draw ;
let normalfill = fill ;
@@ -1046,7 +1091,6 @@ let normalfill = fill ;
def normalfill expr c = addto currentpicture contour c _op_ enddef ;
def normaldraw expr p = addto currentpicture if picture p: also p else: doublepath p withpen currentpen fi _op_ enddef ;
-
def drawlineoptions (text t) = def _lin_opt_ = t enddef ; enddef ;
def drawpointoptions (text t) = def _pnt_opt_ = t enddef ; enddef ;
def drawcontroloptions(text t) = def _ctr_opt_ = t enddef ; enddef ;
@@ -1056,13 +1100,13 @@ def drawboundoptions (text t) = def _bnd_opt_ = t enddef ; enddef ;
def drawpathoptions (text t) = def _pth_opt_ = t enddef ; enddef ;
def resetdrawoptions =
- drawlineoptions (withpen pencircle scaled 1pt withcolor .5white) ;
- drawpointoptions (withpen pencircle scaled 4pt withcolor black) ;
- drawcontroloptions(withpen pencircle scaled 2.5pt withcolor black) ;
- drawlabeloptions () ;
- draworiginoptions (withpen pencircle scaled 1pt withcolor .5white) ;
- drawboundoptions (dashed evenly _ori_opt_) ;
- drawpathoptions (withpen pencircle scaled 5pt withcolor .8white) ;
+ drawlineoptions (withpen pencircle scaled 1pt withcolor .5white) ;
+ drawpointoptions (withpen pencircle scaled 4pt withcolor black) ;
+ drawcontroloptions(withpen pencircle scaled 2.5pt withcolor black) ;
+ drawlabeloptions () ;
+ draworiginoptions (withpen pencircle scaled 1pt withcolor .5white) ;
+ drawboundoptions (dashed evenly _ori_opt_) ;
+ drawpathoptions (withpen pencircle scaled 5pt withcolor .8white) ;
enddef ;
resetdrawoptions ;
@@ -1070,96 +1114,95 @@ resetdrawoptions ;
%D Path.
def drawpath expr p =
- normaldraw p _pth_opt_
+ normaldraw p _pth_opt_
enddef ;
%D Arrow.
vardef drawarrowpath expr p =
- save autoarrows ; boolean autoarrows ; autoarrows := true ;
- drawarrow p _pth_opt_
+ save autoarrows ; boolean autoarrows ; autoarrows := true ;
+ drawarrow p _pth_opt_
enddef ;
-%def drawarrowpath expr p =
-% begingroup ;
-% save autoarrows ; boolean autoarrows ; autoarrows := true ;
-% save arrowpath ; path arrowpath ; arrowpath := p ;
-% _drawarrowpath_
-%enddef ;
+% def drawarrowpath expr p =
+% begingroup ;
+% save autoarrows ; boolean autoarrows ; autoarrows := true ;
+% save arrowpath ; path arrowpath ; arrowpath := p ;
+% _drawarrowpath_
+% enddef ;
%
-%def _drawarrowpath_ text t =
-% drawarrow arrowpath _pth_opt_ t ;
-% endgroup ;
-%enddef ;
+% def _drawarrowpath_ text t =
+% drawarrow arrowpath _pth_opt_ t ;
+% endgroup ;
+% enddef ;
def midarrowhead expr p =
- arrowhead p cutafter
- (point length(p cutafter point .5 along p)+ahlength on p)
+ arrowhead p cutafter (point length(p cutafter point .5 along p)+ahlength on p)
enddef ;
vardef arrowheadonpath (expr p, s) =
- save autoarrows ; boolean autoarrows ; autoarrows := true ;
- set_ahlength(scaled ahfactor) ; % added
- arrowhead p if s<1 : cutafter (point (s*arclength(p)+.5ahlength) on p) fi
+ save autoarrows ; boolean autoarrows ;
+ autoarrows := true ;
+ set_ahlength(scaled ahfactor) ; % added
+ arrowhead p if s<1 : cutafter (point (s*arclength(p)+.5ahlength) on p) fi
enddef ;
%D Points.
def drawpoint expr c =
- if string c :
- string _c_ ; _c_ := "(" & c & ")" ;
- dotlabel.urt(_c_, scantokens _c_) ;
- drawdot scantokens _c_
- else :
- dotlabel.urt("(" & decimal xpart c & "," & decimal ypart c & ")", c) ;
- drawdot c
- fi _pnt_opt_
+ if string c :
+ string _c_ ;
+ _c_ := "(" & c & ")" ;
+ dotlabel.urt(_c_, scantokens _c_) ;
+ drawdot scantokens _c_
+ else :
+ dotlabel.urt("(" & decimal xpart c & "," & decimal ypart c & ")", c) ;
+ drawdot c
+ fi _pnt_opt_
enddef ;
%D PathPoints.
-def drawpoints expr c = path _c_ ; _c_ := c ; do_drawpoints enddef ;
-def drawcontrolpoints expr c = path _c_ ; _c_ := c ; do_drawcontrolpoints enddef ;
-def drawcontrollines expr c = path _c_ ; _c_ := c ; do_drawcontrollines enddef ;
-def drawpointlabels expr c = path _c_ ; _c_ := c ; do_drawpointlabels enddef ;
+def drawpoints expr c = path _c_ ; _c_ := c ; mfun_draw_points enddef ;
+def drawcontrolpoints expr c = path _c_ ; _c_ := c ; mfun_draw_controlpoints enddef ;
+def drawcontrollines expr c = path _c_ ; _c_ := c ; mfun_draw_controllines enddef ;
+def drawpointlabels expr c = path _c_ ; _c_ := c ; mfun_draw_pointlabels enddef ;
-def do_drawpoints text t =
- for _i_=0 upto length(_c_) :
- normaldraw point _i_ of _c_ _pnt_opt_ t ;
- endfor ;
+def mfun_draw_points text t =
+ for _i_=0 upto length(_c_) :
+ normaldraw point _i_ of _c_ _pnt_opt_ t ;
+ endfor ;
enddef;
-def do_drawcontrolpoints text t =
- for _i_=0 upto length(_c_) :
- normaldraw precontrol _i_ of _c_ _ctr_opt_ t ;
- normaldraw postcontrol _i_ of _c_ _ctr_opt_ t ;
- endfor ;
+def mfun_draw_controlpoints text t =
+ for _i_=0 upto length(_c_) :
+ normaldraw precontrol _i_ of _c_ _ctr_opt_ t ;
+ normaldraw postcontrol _i_ of _c_ _ctr_opt_ t ;
+ endfor ;
enddef;
-def do_drawcontrollines text t =
- for _i_=0 upto length(_c_) :
- normaldraw point _i_ of _c_ -- precontrol _i_ of _c_ _lin_opt_ t ;
- normaldraw point _i_ of _c_ -- postcontrol _i_ of _c_ _lin_opt_ t ;
- endfor ;
+def mfun_draw_controllines text t =
+ for _i_=0 upto length(_c_) :
+ normaldraw point _i_ of _c_ -- precontrol _i_ of _c_ _lin_opt_ t ;
+ normaldraw point _i_ of _c_ -- postcontrol _i_ of _c_ _lin_opt_ t ;
+ endfor ;
enddef;
boolean swappointlabels ; swappointlabels := false ;
-def do_drawpointlabels text t =
- for _i_=0 upto length(_c_) :
- pair _u_ ; _u_ := unitvector(direction _i_ of _c_)
- rotated if swappointlabels : - fi 90 ;
- pair _p_ ; _p_ := (point _i_ of _c_) ;
- _u_ := 12 * defaultscale * _u_ ;
- normaldraw thelabel ( decimal _i_,
- _p_ shifted if cycle _c_ and (_i_=0) : - fi _u_ ) _lab_opt_ t ;
- endfor ;
+def mfun_draw_pointlabels text t =
+ for _i_=0 upto length(_c_) :
+ pair _u_ ; _u_ := unitvector(direction _i_ of _c_) rotated if swappointlabels : - fi 90 ;
+ pair _p_ ; _p_ := (point _i_ of _c_) ;
+ _u_ := 12 * defaultscale * _u_ ;
+ normaldraw thelabel ( decimal _i_, _p_ shifted if cycle _c_ and (_i_=0) : - fi _u_ ) _lab_opt_ t ;
+ endfor ;
enddef;
%D Bounding box.
def drawboundingbox expr p =
- normaldraw boundingbox p _bnd_opt_
+ normaldraw boundingbox p _bnd_opt_
enddef ;
%D Origin.
@@ -1167,10 +1210,8 @@ enddef ;
numeric originlength ; originlength := .5cm ;
def draworigin text t =
- normaldraw (origin shifted (0, originlength) --
- origin shifted (0,-originlength)) _ori_opt_ t ;
- normaldraw (origin shifted ( originlength,0) --
- origin shifted (-originlength,0)) _ori_opt_ t ;
+ normaldraw (origin shifted (0, originlength) -- origin shifted (0,-originlength)) _ori_opt_ t ;
+ normaldraw (origin shifted ( originlength,0) -- origin shifted (-originlength,0)) _ori_opt_ t ;
enddef;
%D Axis.
@@ -1178,101 +1219,108 @@ enddef;
numeric tickstep ; tickstep := 5mm ;
numeric ticklength ; ticklength := 2mm ;
-def drawxticks expr c = path _c_ ; _c_ := c ; do_drawxticks enddef ;
-def drawyticks expr c = path _c_ ; _c_ := c ; do_drawyticks enddef ;
-def drawticks expr c = path _c_ ; _c_ := c ; do_drawticks enddef ;
+def drawxticks expr c = path _c_ ; _c_ := c ; mfun_draw_xticks enddef ;
+def drawyticks expr c = path _c_ ; _c_ := c ; mfun_draw_yticks enddef ;
+def drawticks expr c = path _c_ ; _c_ := c ; mfun_draw_ticks enddef ;
% Adding eps prevents disappearance due to rounding errors.
-def do_drawxticks text t =
- for i=0 step -tickstep until xpart llcorner _c_ - eps :
- if (i<=xpart lrcorner _c_) :
- normaldraw (i,-ticklength)--(i,ticklength) _ori_opt_ t ;
- fi ;
- endfor ;
- for i=0 step tickstep until xpart lrcorner _c_ + eps :
- if (i>=xpart llcorner _c_) :
- normaldraw (i,-ticklength)--(i,ticklength) _ori_opt_ t ;
- fi ;
- endfor ;
- normaldraw (llcorner _c_ -- ulcorner _c_)
- shifted (-xpart llcorner _c_,0) _ori_opt_ t ;
+def mfun_draw_xticks text t =
+ for i=0 step -tickstep until xpart llcorner _c_ - eps :
+ if (i<=xpart lrcorner _c_) :
+ normaldraw (i,-ticklength)--(i,ticklength) _ori_opt_ t ;
+ fi ;
+ endfor ;
+ for i=0 step tickstep until xpart lrcorner _c_ + eps :
+ if (i>=xpart llcorner _c_) :
+ normaldraw (i,-ticklength)--(i,ticklength) _ori_opt_ t ;
+ fi ;
+ endfor ;
+ normaldraw (llcorner _c_ -- ulcorner _c_) shifted (-xpart llcorner _c_,0) _ori_opt_ t ;
enddef ;
-def do_drawyticks text t =
- for i=0 step -tickstep until ypart llcorner _c_ - eps :
- if (i<=ypart ulcorner _c_) :
- normaldraw (-ticklength,i)--(ticklength,i) _ori_opt_ t ;
- fi ;
- endfor ;
- for i=0 step tickstep until ypart ulcorner _c_ + eps :
- if (i>=ypart llcorner _c_) :
- normaldraw (-ticklength,i)--(ticklength,i) _ori_opt_ t ;
- fi ;
- endfor ;
- normaldraw (llcorner _c_ -- lrcorner _c_)
- shifted (0,-ypart llcorner _c_) _ori_opt_ t ;
+def mfun_draw_yticks text t =
+ for i=0 step -tickstep until ypart llcorner _c_ - eps :
+ if (i<=ypart ulcorner _c_) :
+ normaldraw (-ticklength,i)--(ticklength,i) _ori_opt_ t ;
+ fi ;
+ endfor ;
+ for i=0 step tickstep until ypart ulcorner _c_ + eps :
+ if (i>=ypart llcorner _c_) :
+ normaldraw (-ticklength,i)--(ticklength,i) _ori_opt_ t ;
+ fi ;
+ endfor ;
+ normaldraw (llcorner _c_ -- lrcorner _c_) shifted (0,-ypart llcorner _c_) _ori_opt_ t ;
enddef ;
-def do_drawticks text t =
- drawxticks _c_ t ;
- drawyticks _c_ t ;
+def mfun_draw_ticks text t =
+ drawxticks _c_ t ;
+ drawyticks _c_ t ;
enddef ;
%D All of it except axis.
def drawwholepath expr p =
- draworigin ;
- drawpath p ;
- drawcontrollines p ;
- drawcontrolpoints p ;
- drawpoints p ;
- drawboundingbox p ;
- drawpointlabels p ;
+ draworigin ;
+ drawpath p ;
+ drawcontrollines p ;
+ drawcontrolpoints p ;
+ drawpoints p ;
+ drawboundingbox p ;
+ drawpointlabels p ;
enddef ;
%D Tracing.
def visualizeddraw expr c =
- if picture c : normaldraw c else : path _c_ ; _c_ := c ; do_visualizeddraw fi
+ if picture c : normaldraw c else : path _c_ ; _c_ := c ; do_visualizeddraw fi
enddef ;
def visualizedfill expr c =
- if picture c : normalfill c else : path _c_ ; _c_ := c ; do_visualizedfill fi
+ if picture c : normalfill c else : path _c_ ; _c_ := c ; do_visualizedfill fi
enddef ;
def do_visualizeddraw text t =
- draworigin ;
- drawpath _c_ t ;
- drawcontrollines _c_ ;
- drawcontrolpoints _c_ ;
- drawpoints _c_ ;
- drawboundingbox _c_ ;
- drawpointlabels _c_ ;
+ draworigin ;
+ drawpath _c_ t ;
+ drawcontrollines _c_ ;
+ drawcontrolpoints _c_ ;
+ drawpoints _c_ ;
+ drawboundingbox _c_ ;
+ drawpointlabels _c_ ;
enddef ;
def do_visualizedfill text t =
- if cycle _c_ : normalfill _c_ t fi ;
- draworigin ;
- drawcontrollines _c_ ;
- drawcontrolpoints _c_ ;
- drawpoints _c_ ;
- drawboundingbox _c_ ;
- drawpointlabels _c_ ;
+ if cycle _c_ : normalfill _c_ t fi ;
+ draworigin ;
+ drawcontrollines _c_ ;
+ drawcontrolpoints _c_ ;
+ drawpoints _c_ ;
+ drawboundingbox _c_ ;
+ drawpointlabels _c_ ;
enddef ;
def visualizepaths =
- let fill = visualizedfill ;
- let draw = visualizeddraw ;
+ let fill = visualizedfill ;
+ let draw = visualizeddraw ;
enddef ;
def naturalizepaths =
- let fill = normalfill ;
- let draw = normaldraw ;
+ let fill = normalfill ;
+ let draw = normaldraw ;
enddef ;
extra_endfig := extra_endfig & " naturalizepaths ; " ;
+%D Nice tracer:
+
+def drawboundary primary p =
+ draw p dashed evenly withcolor white ;
+ draw p dashed oddly withcolor black ;
+ draw (- llcorner p) withpen pencircle scaled 3 withcolor white ;
+ draw (- llcorner p) withpen pencircle scaled 1.5 withcolor black ;
+enddef ;
+
%D Also handy:
extra_beginfig := extra_beginfig & " truecorners := 0 ; " ; % restores
@@ -1287,46 +1335,57 @@ boolean autoarrows ; autoarrows := false ;
numeric ahfactor ; ahfactor := 2.5 ;
def set_ahlength (text t) =
-% ahlength := (ahfactor*pen_size(_op_ t)) ; % _op_ added
-% problem: _op_ can contain color so a no-go, we could apply the transform
-% but i need to figure out the best way (fakepicture and take components).
- ahlength := (ahfactor*pen_size(t)) ;
+ % ahlength := (ahfactor*pen_size(_op_ t)) ; % _op_ added
+ % problem: _op_ can contain color so a no-go, we could apply the transform
+ % but i need to figure out the best way (fakepicture and take components).
+ ahlength := (ahfactor*pen_size(t)) ;
enddef ;
vardef pen_size (text t) =
- save p ; picture p ; p := nullpicture ;
- addto p doublepath (top origin -- bot origin) t ;
- (ypart urcorner p - ypart lrcorner p)
+ save p ; picture p ; p := nullpicture ;
+ addto p doublepath (top origin -- bot origin) t ;
+ (ypart urcorner p - ypart lrcorner p)
enddef ;
%D The next two macros are adapted versions of plain
%D \METAPOST\ definitions.
+vardef arrowpath expr p = % patch by Peter Rolf: supports squared pen and shifting (hh: maybe just use center of head as first)
+ (p cutafter makepath(pencircle scaled 2(ahlength*cosd(.5ahangle)) shifted point length p of p))
+enddef;
+
+% def _finarr text t =
+% if autoarrows : set_ahlength (t) fi ;
+% draw arrowpath _apth t ; % arrowpath added
+% filldraw arrowhead _apth t ;
+% enddef;
+
def _finarr text t =
- if autoarrows : set_ahlength (t) fi ;
- draw _apth t ;
- filldraw arrowhead _apth t ;
+ if autoarrows : set_ahlength (t) fi ;
+ draw arrowpath _apth t ; % arrowpath added
+ fill arrowhead _apth t ;
+ draw arrowhead _apth t ;
enddef;
-def _findarr text t =
- if autoarrows : set_ahlength (t) fi ;
- draw _apth t ;
- fill arrowhead _apth withpen currentpen t ;
- fill arrowhead reverse _apth withpen currentpen t ;
-enddef ;
+def _finarr text t =
+ if autoarrows : set_ahlength (t) fi ;
+ draw arrowpath _apth t ; % arrowpath added
+ fill arrowhead _apth t ;
+ draw arrowhead _apth t undashed ;
+enddef;
%D Handy too ......
vardef pointarrow (expr pat, loc, len, off) =
- save l, r, s, t ; path l, r ; numeric s ; pair t ;
- t := if pair loc : loc else : point loc along pat fi ;
- s := len/2 - off ; if s<=0 : s := 0 elseif s>len : s := len fi ;
- r := pat cutbefore t ;
- r := (r cutafter point (arctime s of r) of r) ;
- s := len/2 + off ; if s<=0 : s := 0 elseif s>len : s := len fi ;
- l := reverse (pat cutafter t) ;
- l := (reverse (l cutafter point (arctime s of l) of l)) ;
- (l..r)
+ save l, r, s, t ; path l, r ; numeric s ; pair t ;
+ t := if pair loc : loc else : point loc along pat fi ;
+ s := len/2 - off ; if s<=0 : s := 0 elseif s>len : s := len fi ;
+ r := pat cutbefore t ;
+ r := (r cutafter point (arctime s of r) of r) ;
+ s := len/2 + off ; if s<=0 : s := 0 elseif s>len : s := len fi ;
+ l := reverse (pat cutafter t) ;
+ l := (reverse (l cutafter point (arctime s of l) of l)) ;
+ (l..r)
enddef ;
def rightarrow (expr pat,tim,len) = pointarrow(pat,tim,len,-len) enddef ;
@@ -1336,23 +1395,19 @@ def centerarrow (expr pat,tim,len) = pointarrow(pat,tim,len, 0) enddef ;
%D The \type {along} and \type {on} operators can be used
%D as follows:
%D
-%D \starttypen
+%D \starttyping
%D drawdot point .5 along somepath ;
%D drawdot point 3cm on somepath ;
-%D \stoptypen
+%D \stoptyping
%D
%D The number denotes a percentage (fraction).
primarydef pct along pat = % also negative
- (arctime (pct * (arclength pat)) of pat) of pat
+ (arctime (pct * (arclength pat)) of pat) of pat
enddef ;
-% primarydef len on pat =
-% (arctime len of pat) of pat
-% enddef ;
-
-primarydef len on pat =
- (arctime if len>0 : len else : (arclength(pat)+len) fi of pat) of pat
+primarydef len on pat = % no outer ( ) .. somehow fails
+ (arctime if len>=0 : len else : (arclength(pat)+len) fi of pat) of pat
enddef ;
% this cuts of a piece from both ends
@@ -1365,69 +1420,55 @@ enddef ;
% enddef ;
tertiarydef pat cutends len =
- begingroup ; save tap ; path tap ;
- tap := pat cutbefore (point (xpart paired(len)) on pat) ;
- (tap cutafter (point -(ypart paired(len)) on tap))
- endgroup
+ begingroup
+ save tap ; path tap ;
+ tap := pat cutbefore (point (xpart paired(len)) on pat) ;
+ (tap cutafter (point -(ypart paired(len)) on tap))
+ endgroup
enddef ;
%D To be documented.
path freesquare ;
-freesquare := ((-1,0)--(-1,-1)--(0,-1)--(+1,-1)--
- (+1,0)--(+1,+1)--(0,+1)--(-1,+1)--cycle) scaled .5 ;
+freesquare := (
+ (-1,0) -- (-1,-1) -- (0,-1) -- (+1,-1) --
+ (+1,0) -- (+1,+1) -- (0,+1) -- (-1,+1) -- cycle
+) scaled .5 ;
numeric freelabeloffset ; freelabeloffset := 3pt ;
numeric freedotlabelsize ; freedotlabelsize := 3pt ;
vardef thefreelabel (expr str, loc, ori) =
- save s, p, q, l ; picture s ; path p, q ; pair l ;
- interim labeloffset := freelabeloffset ;
- s := if string str : thelabel(str,loc) else : str shifted -center str shifted loc fi ;
- setbounds s to boundingbox s enlarged freelabeloffset ;
- p := fullcircle scaled (2*length(loc-ori)) shifted ori ;
- q := freesquare xyscaled (urcorner s - llcorner s) ;
-% l := point (xpart (p intersectiontimes (ori--loc))) of q ;
- l := point xpart (p intersectiontimes
- (ori--((1+eps)*arclength(ori--loc)*unitvector(loc-ori)))) of q ;
- setbounds s to boundingbox s enlarged -freelabeloffset ; % new
- %draw boundingbox s shifted -l withpen pencircle scaled .5pt withcolor red ;
- (s shifted -l)
-enddef ;
-
-% better?
-
-vardef thefreelabel (expr str, loc, ori) =
- save s, p, q, l ; picture s ; path p, q ; pair l ;
- interim labeloffset := freelabeloffset ;
- s := if string str : thelabel(str,loc) else : str shifted -center str shifted loc fi ;
- setbounds s to boundingbox s enlarged freelabeloffset ;
- p := fullcircle scaled (2*length(loc-ori)) shifted ori ;
- q := freesquare xyscaled (urcorner s - llcorner s) ;
- l := point xpart (p intersectiontimes (ori--loc shifted (loc-ori))) of q ;
- setbounds s to boundingbox s enlarged -freelabeloffset ; % new
- %draw boundingbox s shifted -l withpen pencircle scaled .5pt withcolor red ;
- (s shifted -l)
+ save s, p, q, l ; picture s ; path p, q ; pair l ;
+ interim labeloffset := freelabeloffset ;
+ s := if string str : thelabel(str,loc) else : str shifted -center str shifted loc fi ;
+ setbounds s to boundingbox s enlarged freelabeloffset ;
+ p := fullcircle scaled (2*length(loc-ori)) shifted ori ;
+ q := freesquare xyscaled (urcorner s - llcorner s) ;
+ l := point xpart (p intersectiontimes (ori--loc shifted (loc-ori))) of q ;
+ setbounds s to boundingbox s enlarged -freelabeloffset ; % new
+ % draw boundingbox s shifted -l withpen pencircle scaled .5pt withcolor red ;
+ (s shifted -l)
enddef ;
vardef freelabel (expr str, loc, ori) =
- draw thefreelabel(str,loc,ori) ;
+ draw thefreelabel(str,loc,ori) ;
enddef ;
vardef freedotlabel (expr str, loc, ori) =
- interim linecap:=rounded ;
- draw loc withpen pencircle scaled freedotlabelsize ;
- draw thefreelabel(str,loc,ori) ;
+ interim linecap := rounded ;
+ draw loc withpen pencircle scaled freedotlabelsize ;
+ draw thefreelabel(str,loc,ori) ;
enddef ;
-%D \starttypen
+%D \starttyping
%D drawarrow anglebetween(line_a,line_b,somelabel) ;
-%D \stoptypen
+%D \stoptyping
-% angleoffset ; angleoffset := 0pt ;
-numeric anglelength ; anglelength := 20pt ;
-numeric anglemethod ; anglemethod := 1 ;
+newinternal angleoffset ; angleoffset := 0pt ;
+newinternal anglelength ; anglelength := 20pt ;
+newinternal anglemethod ; anglemethod := 1 ;
% vardef anglebetween (expr a, b, str) = % path path string
% save pointa, pointb, common, middle, offset ;
@@ -1466,63 +1507,66 @@ numeric anglemethod ; anglemethod := 1 ;
% enddef ;
vardef anglebetween (expr a, b, str) = % path path string
- save pointa, pointb, common, middle, offset ;
- pair pointa, pointb, common, middle, offset ;
- save curve ; path curve ;
- save where ; numeric where ;
- if round point 0 of a = round point 0 of b :
- common := point 0 of a ;
- else :
- common := a intersectionpoint b ;
- fi ;
- pointa := point anglelength on a ;
- pointb := point anglelength on b ;
- where := turningnumber (common--pointa--pointb--cycle) ;
- middle := (reverse(common--pointa) rotatedaround (pointa,-where*90))
- intersection_point
- (reverse(common--pointb) rotatedaround (pointb, where*90)) ;
- if not intersection_found :
- middle := point .5 along
- ((reverse(common--pointa) rotatedaround (pointa,-where*90)) --
- ( (common--pointb) rotatedaround (pointb, where*90))) ;
- fi ;
- if anglemethod = 0 :
- curve := pointa{unitvector(middle-pointa)}.. pointb;
- middle := point .5 along curve ;
- curve := common ;
- elseif anglemethod = 1 :
- curve := pointa{unitvector(middle-pointa)}.. pointb;
- middle := point .5 along curve ;
- elseif anglemethod = 2 :
- middle := common rotatedaround(.5[pointa,pointb],180) ;
- curve := pointa--middle--pointb ;
- elseif anglemethod = 3 :
- curve := pointa--middle--pointb ;
- elseif anglemethod = 4 :
- curve := pointa..controls middle..pointb ;
- middle := point .5 along curve ;
- fi ;
- draw thefreelabel(str, middle, common) ; % withcolor black ;
- curve
+ save pointa, pointb, common, middle, offset ;
+ pair pointa, pointb, common, middle, offset ;
+ save curve ; path curve ;
+ save where ; numeric where ;
+ if round point 0 of a = round point 0 of b :
+ common := point 0 of a ;
+ else :
+ common := a intersectionpoint b ;
+ fi ;
+ pointa := point anglelength on a ;
+ pointb := point anglelength on b ;
+ where := turningnumber (common--pointa--pointb--cycle) ;
+ middle := (reverse(common--pointa) rotatedaround (pointa,-where*90))
+ intersection_point
+ (reverse(common--pointb) rotatedaround (pointb, where*90)) ;
+ if not intersection_found :
+ middle := point .5 along
+ ((reverse(common--pointa) rotatedaround (pointa,-where*90)) --
+ ( (common--pointb) rotatedaround (pointb, where*90))) ;
+ fi ;
+ if anglemethod = 0 :
+ curve := pointa{unitvector(middle-pointa)}.. pointb;
+ middle := point .5 along curve ;
+ curve := common ;
+ elseif anglemethod = 1 :
+ curve := pointa{unitvector(middle-pointa)}.. pointb;
+ middle := point .5 along curve ;
+ elseif anglemethod = 2 :
+ middle := common rotatedaround(.5[pointa,pointb],180) ;
+ curve := pointa--middle--pointb ;
+ elseif anglemethod = 3 :
+ curve := pointa--middle--pointb ;
+ elseif anglemethod = 4 :
+ curve := pointa..controls middle..pointb ;
+ middle := point .5 along curve ;
+ fi ;
+ draw thefreelabel(str, middle, common) ; % withcolor black ;
+ curve
enddef ;
% Stack
-picture currentpicturestack[] ;
-numeric currentpicturedepth ; currentpicturedepth := 0 ;
+picture mfun_current_picture_stack[] ;
+numeric mfun_current_picture_depth ;
+
+mfun_current_picture_depth := 0 ;
def pushcurrentpicture =
- currentpicturedepth := currentpicturedepth + 1 ;
- currentpicturestack[currentpicturedepth] := currentpicture ;
- currentpicture := nullpicture ;
+ mfun_current_picture_depth := mfun_current_picture_depth + 1 ;
+ mfun_current_picture_stack[mfun_current_picture_depth] := currentpicture ;
+ currentpicture := nullpicture ;
enddef ;
def popcurrentpicture text t = % optional text
- if currentpicturedepth > 0 :
- addto currentpicturestack[currentpicturedepth] also currentpicture t ;
- currentpicture := currentpicturestack[currentpicturedepth] ;
- currentpicturedepth := currentpicturedepth - 1 ;
- fi ;
+ if mfun_current_picture_depth > 0 :
+ addto mfun_current_picture_stack[mfun_current_picture_depth] also currentpicture t ;
+ currentpicture := mfun_current_picture_stack[mfun_current_picture_depth] ;
+ mfun_current_picture_stack[mfun_current_picture_depth] := nullpicture ;
+ mfun_current_picture_depth := mfun_current_picture_depth - 1 ;
+ fi ;
enddef ;
%D colorcircle(size, red, green, blue) ;
@@ -1603,71 +1647,76 @@ enddef ;
% popcurrentpicture ;
% enddef ;
-vardef colorcircle (expr size, red, green, blue) =
- save r, g, b, c, m, y, w ; save radius ;
- path r, g, b, c, m, y, w ; numeric radius ;
+vardef colorcircle (expr size, red, green, blue) = % might move
+ save r, g, b, c, m, y, w ; save radius ;
+ path r, g, b, c, m, y, w ; numeric radius ;
- radius := 5cm ; pickup pencircle scaled (radius/25) ;
+ radius := 5cm ; pickup pencircle scaled (radius/25) ;
- transform t ; t := identity rotatedaround(origin,120) ;
+ transform t ; t := identity rotatedaround(origin,120) ;
- r := fullcircle rotated 90 scaled radius
- shifted (0,radius/4) rotatedaround(origin,135) ;
+ r := fullcircle rotated 90 scaled radius shifted (0,radius/4) rotatedaround(origin,135) ;
- b := r transformed t ; g := b transformed t ;
+ b := r transformed t ; g := b transformed t ;
- c := buildcycle(subpath(1,7) of g,subpath(1,7) of b) ;
- y := c transformed t ; m := y transformed t ;
+ c := buildcycle(subpath(1,7) of g,subpath(1,7) of b) ;
+ y := c transformed t ; m := y transformed t ;
- w := buildcycle(subpath(3,5) of r, subpath(3,5) of g,subpath(3,5) of b) ;
+ w := buildcycle(subpath(3,5) of r, subpath(3,5) of g,subpath(3,5) of b) ;
- pushcurrentpicture ;
+ pushcurrentpicture ;
- fill r withcolor red ;
- fill g withcolor green ;
- fill b withcolor blue ;
- fill c withcolor white-red ;
- fill m withcolor white-green ;
- fill y withcolor white-blue ;
- fill w withcolor white ;
+ fill r withcolor red ;
+ fill g withcolor green ;
+ fill b withcolor blue ;
+ fill c withcolor white - red ;
+ fill m withcolor white - green ;
+ fill y withcolor white - blue ;
+ fill w withcolor white ;
- for i = r,g,b,c,m,y : draw i withcolor .5white ; endfor ;
+ for i = r,g,b,c,m,y : draw i withcolor .5white ; endfor ;
- currentpicture := currentpicture xsized size ;
+ currentpicture := currentpicture xsized size ;
- popcurrentpicture ;
+ popcurrentpicture ;
enddef ;
% penpoint (i,2) of somepath -> inner / outer point
vardef penpoint expr pnt of p =
- save n, d ; numeric n, d ;
- (n,d) = if pair pnt : pnt else : (pnt,1) fi ;
- (point n of p shifted ((penoffset direction n of p of currentpen) scaled d))
+ save n, d ; numeric n, d ;
+ (n,d) = if pair pnt : pnt else : (pnt,1) fi ;
+ (point n of p shifted ((penoffset direction n of p of currentpen) scaled d))
enddef ;
% nice: currentpicture := inverted currentpicture ;
primarydef p uncolored c =
- if color p :
- c - p
- else :
- image
- (for i within p :
- addto currentpicture
- if stroked i or filled i :
- if filled i : contour else : doublepath fi pathpart i
- dashed dashpart i withpen penpart i
- else :
- also i
- fi
- withcolor c-(redpart i, greenpart i, bluepart i) ;
- endfor ; )
+ if color p :
+ c - p
+ else :
+ image (
+ for i within p :
+ addto currentpicture
+ if stroked i or filled i :
+ if filled i :
+ contour
+ else :
+ doublepath
+ fi
+ pathpart i
+ dashed dashpart i withpen penpart i
+ else :
+ also i
+ fi
+ withcolor c-(redpart i, greenpart i, bluepart i) ;
+ endfor ;
+ )
fi
enddef ;
vardef inverted primary p =
- (p uncolored white)
+ p uncolored white
enddef ;
% primarydef p softened c =
@@ -1692,45 +1741,54 @@ enddef ;
% enddef ;
primarydef p softened c =
- begingroup
- save cc ; color cc ; cc := tripled(c) ;
- if color p :
- (redpart cc * redpart p,
- greenpart cc * greenpart p,
- bluepart cc * bluepart p)
- else :
- image
- (for i within p :
- addto currentpicture
- if stroked i or filled i :
- if filled i : contour else : doublepath fi pathpart i
- dashed dashpart i withpen penpart i
- else :
- also i
- fi
- withcolor (redpart cc * redpart i,
- greenpart cc * greenpart i,
- bluepart cc * bluepart i) ;
- endfor ;)
- fi
- endgroup
+ begingroup
+ save cc ; color cc ; cc := tripled(c) ;
+ if color p :
+ (redpart cc * redpart p,greenpart cc * greenpart p, bluepart cc * bluepart p)
+ else :
+ image (
+ for i within p :
+ addto currentpicture
+ if stroked i or filled i :
+ if filled i :
+ contour
+ else :
+ doublepath
+ fi
+ pathpart i
+ dashed dashpart i withpen penpart i
+ else :
+ also i
+ fi
+ withcolor (redpart cc * redpart i, greenpart cc * greenpart i, bluepart cc * bluepart i) ;
+ endfor ;
+ )
+ fi
+ endgroup
enddef ;
vardef grayed primary p =
- if color p :
- tripled(.30redpart p+.59greenpart p+.11bluepart p)
- else :
- image
- (for i within p :
- addto currentpicture
- if stroked i or filled i :
- if filled i : contour else : doublepath fi pathpart i
- dashed dashpart i withpen penpart i
- else :
- also i
- fi
- withcolor tripled(.30redpart i+.59greenpart i+.11bluepart i) ;
- endfor ; )
+ if color p :
+ tripled(.30redpart p+.59greenpart p+.11bluepart p)
+ else :
+ image (
+ for i within p :
+ addto currentpicture
+ if stroked i or filled i :
+ if filled i :
+ contour
+ else :
+ doublepath
+ fi
+ pathpart i
+ dashed dashpart i
+ withpen penpart i
+ else :
+ also i
+ fi
+ withcolor tripled(.30redpart i+.59greenpart i+.11bluepart i) ;
+ endfor ;
+ )
fi
enddef ;
@@ -1758,10 +1816,10 @@ def condition primary b = if b : "true" else : "false" fi enddef ;
% undocumented
primarydef p stretched s =
- begingroup
- save pp ; path pp ; pp := p xyscaled s ;
- (pp shifted ((point 0 of p) - (point 0 of pp)))
- endgroup
+ begingroup
+ save pp ; path pp ; pp := p xyscaled s ;
+ (pp shifted ((point 0 of p) - (point 0 of pp)))
+ endgroup
enddef ;
% primarydef p enlonged len =
@@ -1833,40 +1891,40 @@ def yshifted expr dy = shifted(0,dy) enddef ;
%
def readfile (expr name) =
- begingroup ; save ok ; boolean ok ;
- if (readfrom (name) <> EOF) :
- ok := false ;
- elseif (readfrom (name) <> EOF) :
- ok := false ;
- else :
- ok := true ;
- fi ;
- if not ok :
- scantokens("input " & name & " ") ;
- fi ;
- closefrom (name) ;
- endgroup ;
+ begingroup ; save ok ; boolean ok ;
+ if (readfrom (name) <> EOF) :
+ ok := false ;
+ elseif (readfrom (name) <> EOF) :
+ ok := false ;
+ else :
+ ok := true ;
+ fi ;
+ if not ok :
+ scantokens("input " & name & " ") ;
+ fi ;
+ closefrom (name) ;
+ endgroup ;
enddef ;
% permits redefinition of end in macro
inner end ;
-% real fun
+% this will be redone (when needed) using scripts and backend handling
let normalwithcolor = withcolor ;
def remapcolors =
- def withcolor primary c = normalwithcolor remappedcolor(c) enddef ;
+ def withcolor primary c = normalwithcolor remappedcolor(c) enddef ;
enddef ;
def normalcolors =
- let withcolor = normalwithcolor ;
+ let withcolor = normalwithcolor ;
enddef ;
def resetcolormap =
- color color_map[][][] ;
- normalcolors ;
+ color color_map[][][] ;
+ normalcolors ;
enddef ;
resetcolormap ;
@@ -1882,15 +1940,15 @@ def g_color primary c = greenpart c enddef ;
def b_color primary c = bluepart c enddef ;
def remapcolor(expr old, new) =
- color_map[r_color old][g_color old][b_color old] := new ;
+ color_map[redpart old][greenpart old][bluepart old] := new ;
enddef ;
def remappedcolor(expr c) =
- if known color_map[r_color c][g_color c][b_color c] :
- color_map[r_color c][g_color c][b_color c]
- else :
- c
- fi
+ if known color_map[redpart c][greenpart c][bluepart c] :
+ color_map[redpart c][greenpart c][bluepart c]
+ else :
+ c
+ fi
enddef ;
% def refill suffix c = do_repath (1) (c) enddef ;
@@ -1930,11 +1988,11 @@ enddef ;
% Thanks to Jens-Uwe Morawski for pointing out that we need
% to treat bounded and clipped components as local pictures.
-def recolor suffix p = p := repathed (0,p) enddef ;
-def refill suffix p = p := repathed (1,p) enddef ;
-def redraw suffix p = p := repathed (2,p) enddef ;
-def retext suffix p = p := repathed (3,p) enddef ;
-def untext suffix p = p := repathed (4,p) enddef ;
+def recolor suffix p = p := repathed (0,p) enddef ;
+def refill suffix p = p := repathed (1,p) enddef ;
+def redraw suffix p = p := repathed (2,p) enddef ;
+def retext suffix p = p := repathed (3,p) enddef ;
+def untext suffix p = p := repathed (4,p) enddef ;
% primarydef p recolored t = repathed(0,p) t enddef ;
% primarydef p refilled t = repathed(1,p) t enddef ;
@@ -1997,69 +2055,80 @@ def reprocess suffix p = p := repathed (22,p) enddef ; % no attributes
% also 11 and 12
vardef repathed (expr mode, p) text t =
- begingroup ;
- if mode=0 : save withcolor ; remapcolors ; fi ;
- save _p_, _pp_, _ppp_, _f_, _b_, _t_ ;
- picture _p_, _pp_, _ppp_ ; color _f_ ; path _b_ ; transform _t_ ;
- _b_ := boundingbox p ; _p_ := nullpicture ;
- for i within p :
- _f_ := (redpart i, greenpart i, bluepart i) ;
- if bounded i :
- _pp_ := repathed(mode,i) t ;
- setbounds _pp_ to pathpart i ;
- addto _p_ also _pp_ ;
- elseif clipped i :
- _pp_ := repathed(mode,i) t ;
- clip _pp_ to pathpart i ;
- addto _p_ also _pp_ ;
- elseif stroked i :
- if mode=21 :
- _ppp_ := i ; % indirectness is needed
- addto _p_ also image(scantokens(t & " pathpart _ppp_")
- dashed dashpart i withpen penpart i
- withcolor _f_ ; ) ;
- elseif mode=22 :
- _ppp_ := i ; % indirectness is needed
- addto _p_ also image(scantokens(t & " pathpart _ppp_")) ;
- else :
- addto _p_ doublepath pathpart i
- dashed dashpart i withpen penpart i
- withcolor _f_ % (redpart i, greenpart i, bluepart i)
- if mode=2 : t fi ;
- fi ;
- elseif filled i :
- if mode=11 :
- _ppp_ := i ; % indirectness is needed
- addto _p_ also image(scantokens(t & " pathpart _ppp_")
- withcolor _f_ ; ) ;
- elseif mode=12 :
- _ppp_ := i ; % indirectness is needed
- addto _p_ also image(scantokens(t & " pathpart _ppp_")) ;
- else :
- addto _p_ contour pathpart i
- withcolor _f_
- if (mode=1) and (_f_<>refillbackground) : t fi ;
- fi ;
- elseif textual i : % textpart i <> "" :
- if mode <> 4 :
- % transform _t_ ;
- % (xpart _t_, xxpart _t_, xypart _t_) = (xpart i, xxpart i, xypart i) ;
- % (ypart _t_, yypart _t_, yxpart _t_) = (ypart i, yypart i, yxpart i) ;
- % addto _p_ also
- % textpart i infont fontpart i % todo : other font
- % transformed _t_
- % withpen penpart i
- % withcolor _f_
- % if mode=3 : t fi ;
- addto _p_ also i if mode=3 : t fi ;
- fi ;
- else :
- addto _p_ also i ;
+ begingroup ;
+ if mode = 0 :
+ save withcolor ;
+ remapcolors ;
fi ;
- endfor ;
- setbounds _p_ to _b_ ;
- _p_
- endgroup
+ save _p_, _pp_, _ppp_, _f_, _b_, _t_ ;
+ picture _p_, _pp_, _ppp_ ; color _f_ ; path _b_ ; transform _t_ ;
+ _b_ := boundingbox p ;
+ _p_ := nullpicture ;
+ for i within p :
+ _f_ := (redpart i, greenpart i, bluepart i) ;
+ if bounded i :
+ _pp_ := repathed(mode,i) t ;
+ setbounds _pp_ to pathpart i ;
+ addto _p_ also _pp_ ;
+ elseif clipped i :
+ _pp_ := repathed(mode,i) t ;
+ clip _pp_ to pathpart i ;
+ addto _p_ also _pp_ ;
+ elseif stroked i :
+ if mode=21 :
+ _ppp_ := i ; % indirectness is needed
+ addto _p_ also image(scantokens(t & " pathpart _ppp_")
+ dashed dashpart i withpen penpart i
+ withcolor _f_ ; ) ;
+ elseif mode=22 :
+ _ppp_ := i ; % indirectness is needed
+ addto _p_ also image(scantokens(t & " pathpart _ppp_")) ;
+ else :
+ addto _p_ doublepath pathpart i
+ dashed dashpart i withpen penpart i
+ withcolor _f_ % (redpart i, greenpart i, bluepart i)
+ if mode = 2 :
+ t
+ fi ;
+ fi ;
+ elseif filled i :
+ if mode=11 :
+ _ppp_ := i ; % indirectness is needed
+ addto _p_ also image(scantokens(t & " pathpart _ppp_")
+ withcolor _f_ ; ) ;
+ elseif mode=12 :
+ _ppp_ := i ; % indirectness is needed
+ addto _p_ also image(scantokens(t & " pathpart _ppp_")) ;
+ else :
+ addto _p_ contour pathpart i
+ withcolor _f_
+ if (mode=1) and (_f_<>refillbackground) :
+ t
+ fi ;
+ fi ;
+ elseif textual i : % textpart i <> "" :
+ if mode <> 4 :
+ % transform _t_ ;
+ % (xpart _t_, xxpart _t_, xypart _t_) = (xpart i, xxpart i, xypart i) ;
+ % (ypart _t_, yypart _t_, yxpart _t_) = (ypart i, yypart i, yxpart i) ;
+ % addto _p_ also
+ % textpart i infont fontpart i % todo : other font
+ % transformed _t_
+ % withpen penpart i
+ % withcolor _f_
+ % if mode=3 : t fi ;
+ addto _p_ also i
+ if mode=3 :
+ t
+ fi ;
+ fi ;
+ else :
+ addto _p_ also i ;
+ fi ;
+ endfor ;
+ setbounds _p_ to _b_ ;
+ _p_
+ endgroup
enddef ;
% After a question of Denis on how to erase a z variable, Jacko
@@ -2087,11 +2156,11 @@ enddef ;
% which i decided to simplify to:
def clearxy text s =
- if false for $ := s : or true endfor :
- forsuffixes $ := s : x$ := whatever ; y$ := whatever ; endfor ;
- else :
- save x, y ;
- fi
+ if false for $ := s : or true endfor :
+ forsuffixes $ := s : x$ := whatever ; y$ := whatever ; endfor ;
+ else :
+ save x, y ;
+ fi
enddef ;
% so now we can say: clearxy ; as well as clearxy 1, 2, 3 ;
@@ -2103,48 +2172,68 @@ enddef ;
% show x0 ; z0 = (30,30) ;
primarydef p smoothed d =
- (p llmoved (-xpart paired(d),0) -- p lrmoved (-xpart paired(d),0) {right} ..
- p lrmoved (0,-ypart paired(d)) -- p urmoved (0,-ypart paired(d)) {up} ..
- p urmoved (-xpart paired(d),0) -- p ulmoved (-xpart paired(d),0) {left} ..
- p ulmoved (0,-ypart paired(d)) -- p llmoved (0,-ypart paired(d)) {down} .. cycle)
+ (p llmoved (-xpart paired(d),0) -- p lrmoved (-xpart paired(d),0) {right} ..
+ p lrmoved (0,-ypart paired(d)) -- p urmoved (0,-ypart paired(d)) {up} ..
+ p urmoved (-xpart paired(d),0) -- p ulmoved (-xpart paired(d),0) {left} ..
+ p ulmoved (0,-ypart paired(d)) -- p llmoved (0,-ypart paired(d)) {down} .. cycle)
enddef ;
primarydef p cornered c =
- ((point 0 of p) shifted (c*(unitvector(point 1 of p - point 0 of p))) --
- for i=1 upto length(p) :
- (point i-1 of p) shifted (c*(unitvector(point i of p - point i-1 of p))) --
- (point i of p) shifted (c*(unitvector(point i-1 of p - point i of p))) ..
- controls point i of p ..
- endfor cycle)
+ ((point 0 of p) shifted (c*(unitvector(point 1 of p - point 0 of p))) --
+ for i=1 upto length(p) :
+ (point i-1 of p) shifted (c*(unitvector(point i of p - point i-1 of p))) --
+ (point i of p) shifted (c*(unitvector(point i-1 of p - point i of p))) ..
+ controls point i of p ..
+ endfor cycle)
enddef ;
% cmyk color support
vardef cmyk(expr c,m,y,k) =
- (1-c-k,1-m-k,1-y-k)
+ (1-c-k,1-m-k,1-y-k)
enddef ;
% handy
-vardef bbwidth (expr p) = % vardef width_of primary p =
- if known p :
- if path p or picture p :
- xpart (lrcorner p - llcorner p)
- else :
- 0
- fi
+% vardef bbwidth (expr p) = % vardef width_of primary p =
+% if known p :
+% if path p or picture p :
+% xpart (lrcorner p - llcorner p)
+% else :
+% 0
+% fi
+% else :
+% 0
+% fi
+% enddef ;
+
+vardef bbwidth primary p =
+ if unknown p :
+ 0
+ elseif path p or picture p :
+ xpart (lrcorner p - llcorner p)
else :
0
fi
enddef ;
-vardef bbheight (expr p) = % vardef heigth_of primary p =
- if known p :
- if path p or picture p :
- ypart (urcorner p - lrcorner p)
- else :
- 0
- fi
+% vardef bbheight (expr p) = % vardef heigth_of primary p =
+% if known p :
+% if path p or picture p :
+% ypart (urcorner p - lrcorner p)
+% else :
+% 0
+% fi
+% else :
+% 0
+% fi
+% enddef ;
+
+vardef bbheight primary p =
+ if unknown p :
+ 0
+ elseif path p or picture p :
+ ypart (urcorner p - lrcorner p)
else :
0
fi
@@ -2153,122 +2242,87 @@ enddef ;
color nocolor ; numeric noline ; % both unknown signals
def dowithpath (expr p, lw, lc, bc) =
- if known p :
- if known bc :
- fill p withcolor bc ;
- fi ;
- if known lw and known lc :
- draw p withpen pencircle scaled lw withcolor lc ;
- elseif known lw :
- draw p withpen pencircle scaled lw ;
- elseif known lc :
- draw p withcolor lc ;
+ if known p :
+ if known bc :
+ fill p withcolor bc ;
+ fi ;
+ if known lw and known lc :
+ draw p withpen pencircle scaled lw withcolor lc ;
+ elseif known lw :
+ draw p withpen pencircle scaled lw ;
+ elseif known lc :
+ draw p withcolor lc ;
+ fi ;
fi ;
- fi ;
enddef ;
% result from metafont discussion list (denisr/boguslawj)
-def ]] = ] ] enddef ; def ]]] = ] ] ] enddef ;
-def [[ = [ [ enddef ; def [[[ = [ [ [ enddef ;
+def [[ = [ [ enddef ; def [[[ = [ [ [ enddef ;
+def ]] = ] ] enddef ; def ]]] = ] ] ] enddef ;
-% not perfect, but useful since it removes redundant points.
+let == = = ;
-% vardef dostraightened(expr sign, p) =
-% if length(p)>2 : % was 1, but straight lines are ok
-% save pp ; path pp ;
-% pp := point 0 of p ;
-% for i=1 upto length(p)-1 :
-% if round(point i of p) <> round(point length(pp) of pp) :
-% pp := pp -- point i of p ;
-% fi ;
-% endfor ;
-% save n, ok ; numeric n ; boolean ok ;
-% n := length(pp) ; ok := false ;
-% if n>2 :
-% for i=0 upto n : % evt hier ook round
-% if unitvector(round(point i of pp -
-% point if i=0 : n else : i-1 fi of pp)) <>
-% sign * unitvector(round(point if i=n : 0 else : i+1 fi of pp -
-% point i of pp)) :
-% if ok : -- else : ok := true ; fi point i of pp
-% fi
-% endfor
-% if ok and (cycle p) : -- cycle fi
-% else :
-% pp
-% fi
-% else :
-% p
-% fi
-% enddef ;
+% added
-% vardef simplified expr p =
-% (reverse dostraightened(+1,dostraightened(+1,reverse p)))
-% enddef ;
+picture oddly ; % evenly already defined
-% vardef unspiked expr p =
-% (reverse dostraightened(-1,dostraightened(-1,reverse p)))
-% enddef ;
+evenly := dashpattern(on 3 off 3) ;
+oddly := dashpattern(off 3 on 3) ;
+
+% not perfect, but useful since it removes redundant points.
-% simplified : remove same points as well as redundant points
-% unspiked : remove same points as well as areas with zero distance
-
-vardef dostraightened(expr sign, p) =
- save _p_, _q_ ; path _p_, _q_ ;
- _p_ := p ;
- forever :
- _q_ := dodostraightened(sign, _p_) ;
- exitif length(_p_) = length(_q_) ;
- _p_ := _q_ ;
- endfor ;
- _q_
-enddef ;
-
-vardef dodostraightened(expr sign, p) =
- if length(p)>2 : % was 1, but straight lines are ok
- save pp ; path pp ;
- pp := point 0 of p ;
- for i=1 upto length(p)-1 :
- if round(point i of p) <> round(point length(pp) of pp) :
- pp := pp -- point i of p ;
- fi ;
+vardef mfun_straightened(expr sign, p) =
+ save _p_, _q_ ; path _p_, _q_ ;
+ _p_ := p ;
+ forever :
+ _q_ := mfun_do_straightened(sign, _p_) ;
+ exitif length(_p_) = length(_q_) ;
+ _p_ := _q_ ;
endfor ;
- save n, ok ; numeric n ; boolean ok ;
- n := length(pp) ; ok := false ;
- if n>2 :
- for i=0 upto n : % evt hier ook round
- if unitvector(round(point i of pp -
- point if i=0 : n else : i-1 fi of pp)) <>
- sign * unitvector(round(point if i=n : 0 else : i+1 fi of pp -
- point i of pp)) :
- if ok : -- else : ok := true ; fi point i of pp
+ _q_
+enddef ;
+
+vardef mfun_do_straightened(expr sign, p) =
+ if length(p)>2 : % was 1, but straight lines are ok
+ save pp ; path pp ;
+ pp := point 0 of p ;
+ for i=1 upto length(p)-1 :
+ if round(point i of p) <> round(point length(pp) of pp) :
+ pp := pp -- point i of p ;
+ fi ;
+ endfor ;
+ save n, ok ; numeric n ; boolean ok ;
+ n := length(pp) ; ok := false ;
+ if n>2 :
+ for i=0 upto n : % evt hier ook round
+ if unitvector(round(point i of pp - point if i=0 : n else : i-1 fi of pp)) <>
+ sign * unitvector(round(point if i=n : 0 else : i+1 fi of pp - point i of pp)) :
+ if ok :
+ --
+ else :
+ ok := true ;
+ fi point i of pp
+ fi
+ endfor
+ if ok and (cycle p) :
+ -- cycle
+ fi
+ else :
+ pp
fi
- endfor
- if ok and (cycle p) : -- cycle fi
else :
- pp
+ p
fi
- else :
- p
- fi
enddef ;
-% vardef simplified expr p =
-% dostraightened(+1,p)
-% enddef ;
-
-% vardef unspiked expr p =
-% dostraightened(-1,p)
-% enddef ;
-
-vardef simplified expr p =
- (reverse dostraightened(+1,dostraightened(+1,reverse p)))
-enddef ;
+vardef simplified expr p = (
+ reverse mfun_straightened(+1,mfun_straightened(+1,reverse p))
+) enddef ;
-vardef unspiked expr p =
- (reverse dostraightened(-1,dostraightened(-1,reverse p)))
-enddef ;
+vardef unspiked expr p = (
+ reverse mfun_straightened(-1,mfun_straightened(-1,reverse p))
+) enddef ;
% path p ;
% p := (2cm,1cm) -- (2cm,1cm) -- (2cm,1cm) -- (3cm,1cm) --
@@ -2289,213 +2343,197 @@ enddef ;
path originpath ; originpath := origin -- cycle ;
vardef unitvector primary z =
- if abs z = abs origin : z else : z/abs z fi
+ if abs z = abs origin : z else : z/abs z fi
enddef;
% also new
-vardef anchored@#(expr p, z) =
- p shifted (z + (labxf@#*lrcorner p + labyf@#*ulcorner p
- + (1-labxf@#-labyf@#)*llcorner p))
-enddef ;
+% vardef anchored@#(expr p, z) = % maybe use the textext variant
+% p shifted (z + (labxf@#*lrcorner p + labyf@#*ulcorner p + (1-labxf@#-labyf@#)*llcorner p))
+% enddef ;
% epsed(1.2345)
vardef epsed (expr e) =
- e if e>0 : + eps elseif e<0 : - eps fi
+ e if e>0 : + eps elseif e<0 : - eps fi
enddef ;
% handy
def withgray primary g =
- withcolor (g,g,g)
+ withcolor (g,g,g)
enddef ;
% for metafun
-if unknown darkred : color darkred ; darkred := .625(1,0,0) fi ;
-if unknown darkyellow : color darkyellow ; darkyellow := .625(1,1,0) fi ;
-if unknown darkgray : color darkgray ; darkgray := .625(1,1,1) fi ;
-if unknown lightgray : color lightgray ; lightgray := .875(1,1,1) fi ;
+if unknown darkred : color darkred ; darkred := .625(1,0,0) fi ;
+if unknown darkgreen : color darkgreen ; darkgreen := .625(0,1,0) fi ;
+if unknown darkblue : color darkblue ; darkblue := .625(0,0,1) fi ;
+if unknown darkcyan : color darkcyan ; darkcyan := .625(0,1,1) fi ;
+if unknown darkmagenta : color darkmagenta ; darkmagenta := .625(1,0,1) fi ;
+if unknown darkyellow : color darkyellow ; darkyellow := .625(1,1,0) fi ;
+if unknown darkgray : color darkgray ; darkgray := .625(1,1,1) fi ;
+if unknown lightgray : color lightgray ; lightgray := .875(1,1,1) fi ;
% an improved plain mp macro
vardef center primary p =
- if pair p : p else : .5[llcorner p, urcorner p] fi
+ if pair p :
+ p
+ else :
+ .5[llcorner p, urcorner p]
+ fi
enddef;
% new, yet undocumented
vardef rangepath (expr p, d, a) =
- (if length p>0 :
- (d*unitvector(direction 0 of p) rotated a)
- shifted point 0 of p
- -- p --
- (d*unitvector(direction length(p) of p) rotated a)
- shifted point length(p) of p
- else :
- p
- fi)
+ if length p>0 :
+ (d*unitvector(direction 0 of p) rotated a) shifted point 0 of p
+ -- p --
+ (d*unitvector(direction length(p) of p) rotated a) shifted point length(p) of p
+ else :
+ p
+ fi
enddef ;
% under construction
-vardef straightpath(expr a, b, method) =
- if (method<1) or (method>6) :
- (a--b)
- elseif method = 1 :
- (a --
- if xpart a > xpart b :
- if ypart a > ypart b :
- (xpart b,ypart a) --
- elseif ypart a < ypart b :
- (xpart a,ypart b) --
- fi
- elseif xpart a < xpart b :
- if ypart a > ypart b :
- (xpart a,ypart b) --
- elseif ypart a < ypart b :
- (xpart b,ypart a) --
- fi
+vardef straightpath (expr a, b, method) =
+ if (method<1) or (method>6) :
+ (a--b)
+ elseif method = 1 :
+ (a --
+ if xpart a > xpart b :
+ if ypart a > ypart b :
+ (xpart b,ypart a) --
+ elseif ypart a < ypart b :
+ (xpart a,ypart b) --
+ fi
+ elseif xpart a < xpart b :
+ if ypart a > ypart b :
+ (xpart a,ypart b) --
+ elseif ypart a < ypart b :
+ (xpart b,ypart a) --
+ fi
+ fi
+ b)
+ elseif method = 3 :
+ (a --
+ if xpart a > xpart b :
+ (xpart b,ypart a) --
+ elseif xpart a < xpart b :
+ (xpart a,ypart b) --
+ fi
+ b)
+ elseif method = 5 :
+ (a --
+ if ypart a > ypart b :
+ (xpart b,ypart a) --
+ elseif ypart a < ypart b :
+ (xpart a,ypart b) --
+ fi
+ b)
+ else :
+ (reverse straightpath(b,a,method-1))
fi
- b)
- elseif method = 3 :
- (a --
- if xpart a > xpart b :
- (xpart b,ypart a) --
- elseif xpart a < xpart b :
- (xpart a,ypart b) --
- fi
- b)
- elseif method = 5 :
- (a --
- if ypart a > ypart b :
- (xpart b,ypart a) --
- elseif ypart a < ypart b :
- (xpart a,ypart b) --
- fi
- b)
- else :
- (reverse straightpath(b,a,method-1))
- fi
enddef ;
% handy for myself
def addbackground text t =
- begingroup ; save p, b ; picture p ; path b ;
- b := boundingbox currentpicture ;
- p := currentpicture ; currentpicture := nullpicture ;
- fill b t ; setbounds currentpicture to b ; addto currentpicture also p ;
- endgroup ;
+ begingroup ;
+ save p, b ; picture p ; path b ;
+ b := boundingbox currentpicture ;
+ p := currentpicture ; currentpicture := nullpicture ;
+ fill b t ;
+ setbounds currentpicture to b ;
+ addto currentpicture also p ;
+ endgroup ;
enddef ;
% makes a (line) into an infinite one (handy for calculating
% intersection points
vardef infinite expr p =
- (-infinity*unitvector(direction 0 of p)
+ (-infinity*unitvector(direction 0 of p)
shifted point 0 of p
- -- p --
- +infinity*unitvector(direction length(p) of p)
- shifted point length(p) of p)
+ -- p --
+ +infinity*unitvector(direction length(p) of p)
+ shifted point length(p) of p)
enddef ;
% obscure macros: create var from string and replace - and :
-% (needed for process color id's)
+% (needed for process color id's) .. will go away
-string _clean_ascii_[] ;
+string mfun_clean_ascii[] ;
def register_dirty_chars(expr str) =
for i = 0 upto length(str)-1 :
- _clean_ascii_[ASCII substring(i,i+1) of str] := "_" ;
+ mfun_clean_ascii[ASCII substring(i,i+1) of str] := "_" ;
endfor ;
enddef ;
register_dirty_chars("+-*/:;., ") ;
vardef cleanstring (expr s) =
- save ss ; string ss, si ; ss = "" ; save i ;
- for i=0 upto length(s) :
- si := substring(i,i+1) of s ;
- ss := ss & if known _clean_ascii_[ASCII si] : _clean_ascii_[ASCII si] else : si fi ;
- endfor ;
- ss
+ save ss ; string ss, si ; ss = "" ; save i ;
+ for i=0 upto length(s) :
+ si := substring(i,i+1) of s ;
+ ss := ss & if known mfun_clean_ascii[ASCII si] : mfun_clean_ascii[ASCII si] else : si fi ;
+ endfor ;
+ ss
enddef ;
vardef asciistring (expr s) =
- save ss ; string ss, si ; ss = "" ; save i ;
- for i=0 upto length(s) :
- si := substring(i,i+1) of s ;
- if (ASCII si >= ASCII "0") and (ASCII si <= ASCII "9") :
- ss := ss & char(scantokens(si) + ASCII "A") ;
- else :
- ss := ss & si ;
- fi ;
- endfor ;
- ss
-enddef ;
-
-vardef setunstringed (expr s, v) =
- scantokens(cleanstring(s)) := v ;
+ save ss ; string ss, si ; ss = "" ; save i ;
+ for i=0 upto length(s) :
+ si := substring(i,i+1) of s ;
+ if (ASCII si >= ASCII "0") and (ASCII si <= ASCII "9") :
+ ss := ss & char(scantokens(si) + ASCII "A") ;
+ else :
+ ss := ss & si ;
+ fi ;
+ endfor ;
+ ss
enddef ;
vardef setunstringed (expr s, v) =
- scantokens(cleanstring(s)) := v ;
+ scantokens(cleanstring(s)) := v ;
enddef ;
vardef getunstringed (expr s) =
- scantokens(cleanstring(s))
+ scantokens(cleanstring(s))
enddef ;
vardef unstringed (expr s) =
- expandafter known scantokens(cleanstring(s))
-enddef ;
-
-% new
-
-% vardef colorpart(expr i) =
-% (redpart i, greenpart i,bluepart i)
-% enddef ;
-
-vardef colorpart(expr c) =
- if colormodel c = 3 :
- graypart c
- elseif colormodel c = 5 :
- (redpart c,greenpart c,bluepart c)
- elseif colormodel c = 7 :
- (cyanpart c,magentapart c,yellowpart c,blackpart c)
- fi
+ expandafter known scantokens(cleanstring(s))
enddef ;
% for david arnold:
% showgrid(-5,10,1cm,-10,10,1cm);
-def showgrid (expr MinX, MaxX, DeltaX, MinY, MaxY, DeltaY)=
- begingroup
- save defaultfont, defaultscale, size ;
- string defaultfont ; defaultfont := "cmtt10"; % i.e. infofont
- numeric defaultscale ; defaultscale := 8pt / fontsize defaultfont;
- numeric size ; size := 2pt ;
+def showgrid (expr MinX, MaxX, DeltaX, MinY, MaxY, DeltaY) = % will move
+ begingroup
+ save size ; numeric size ; size := 2pt ;
for x=MinX upto MaxX :
- for y=MinY upto MaxY :
- draw (x*DeltaX, y*DeltaY)
- withpen pencircle scaled
- if (x mod 5 = 0) and (y mod 5 = 0) :
- 1.5size withcolor .50white
- else :
- size withcolor .75white
- fi ;
- endfor ;
+ for y=MinY upto MaxY :
+ draw (x*DeltaX, y*DeltaY) withpen pencircle scaled
+ if (x mod 5 = 0) and (y mod 5 = 0) :
+ 1.5size withcolor .50white
+ else :
+ size withcolor .75white
+ fi ;
+ endfor ;
endfor ;
for x=MinX upto MaxX:
- label.bot(decimal x, (x*DeltaX,-size));
+ label.bot(textext("\infofont " & decimal x), (x*DeltaX,-size)) ;
endfor ;
for y=MinY upto MaxY:
- label.lft(decimal y, (-size,y*DeltaY)) ;
+ label.lft(textext("\infofont " & decimal y), (-size,y*DeltaY)) ;
endfor ;
- endgroup
+ endgroup
enddef;
% new, handy for:
@@ -2525,26 +2563,24 @@ enddef;
%
% \useMPgraphic{map}{n=3}
-vardef phantom (text t) =
- picture _p_ ; _p_ := image(t) ; addto _p_ also currentpicture ;
- setbounds currentpicture to boundingbox _p_ ;
+vardef phantom (text t) = % to be checked
+ picture _p_ ;
+ _p_ := image(t) ;
+ addto _p_ also currentpicture ;
+ setbounds currentpicture to boundingbox _p_ ;
enddef ;
vardef c_phantom (expr b) (text t) =
- if b :
- picture _p_ ; _p_ := image(t) ; addto _p_ also currentpicture ;
- setbounds currentpicture to boundingbox _p_ ;
- else :
- t ;
- fi ;
+ if b :
+ picture _p_ ;
+ _p_ := image(t) ;
+ addto _p_ also currentpicture ;
+ setbounds currentpicture to boundingbox _p_ ;
+ else :
+ t ;
+ fi ;
enddef ;
-% mark paths (for external progs to split)
-
-% def somepath(expr p)
-% p
-% enddef ;
-
%D Handy:
def break =
@@ -2553,27 +2589,228 @@ enddef ;
%D New too:
-primarydef p xstretched w =
- (p if (bbwidth (p)>0) and (w>0) : xscaled (w/bbwidth (p)) fi)
-enddef ;
+primarydef p xstretched w = (
+ p if (bbwidth (p)>0) and (w>0) : xscaled (w/bbwidth (p)) fi
+) enddef ;
-primarydef p ystretched h =
- (p if (bbheight(p)>0) and (h>0) : yscaled (h/bbheight(p)) fi)
-enddef ;
+primarydef p ystretched h = (
+ p if (bbheight(p)>0) and (h>0) : yscaled (h/bbheight(p)) fi
+) enddef ;
primarydef p snapped s =
- hide ( if path p :
- forever :
- exitif (bbheight(p) <= s) and (bbwidth(p) <= s) ;
- p := p scaled (1/2) ;
- endfor ;
- elseif numeric p :
- forever :
- exitif p <= s ;
- p := p scaled (1/2) ;
+ hide (
+ if path p :
+ forever :
+ exitif (bbheight(p) <= s) and (bbwidth(p) <= s) ;
+ p := p scaled (1/2) ;
+ endfor ;
+ elseif numeric p :
+ forever :
+ exitif p <= s ;
+ p := p scaled (1/2) ;
+ endfor ;
+ fi ;
+ )
+ p
+enddef ;
+
+% vardef somecolor = (1,1,0,0) enddef ;
+
+% fill OverlayBox withcolor (rcomponent somecolor,gcomponent somecolor,bcomponent somecolor) ;
+% fill OverlayBox withcolor (ccomponent somecolor,mcomponent somecolor,ycomponent somecolor,bcomponent somecolor) ;
+
+% This could be standard mplib 2 behaviour:
+
+vardef rcomponent expr p = if rgbcolor p : redpart p elseif cmykcolor p : 1 - cyanpart p else : p fi enddef ;
+vardef gcomponent expr p = if rgbcolor p : greenpart p elseif cmykcolor p : 1 - magentapart p else : p fi enddef ;
+vardef bcomponent expr p = if rgbcolor p : bluepart p elseif cmykcolor p : 1 - yellowpart p else : p fi enddef ;
+vardef ccomponent expr p = if cmykcolor p : cyanpart p elseif rgbcolor p : 1 - redpart p else : p fi enddef ;
+vardef mcomponent expr p = if cmykcolor p : magentapart p elseif rgbcolor p : 1 - greenpart p else : p fi enddef ;
+vardef ycomponent expr p = if cmykcolor p : yellowpart p elseif rgbcolor p : 1 - bluepart p else : p fi enddef ;
+vardef bcomponent expr p = if cmykcolor p : blackpart p elseif rgbcolor p : 0 else : p fi enddef ;
+
+% draw image (...) ... ; % prescripts prepended to first, postscripts appended to last
+% draw decorated (...) ... ; % prescripts prepended to each, postscripts appended to each
+% draw redecorated (...) ... ; % prescripts assigned to each, postscripts assigned to each
+% draw undecorated (...) ... ; % following properties are ignored, existing properties are kept
+%
+% draw decorated (
+% draw fullcircle scaled 20cm withpen pencircle scaled 20mm withcolor red withtransparency (1,.40) ;
+% draw fullcircle scaled 15cm withpen pencircle scaled 15mm withcolor green withtransparency (1,.30) ;
+% draw fullcircle scaled 10cm withpen pencircle scaled 10mm withcolor blue withtransparency (1,.20) ;
+% )
+% withcolor blue
+% withtransparency (1,.125) % selectively applied
+% withpen pencircle scaled 10mm
+% ;
+
+% vardef image (text imagedata) = % already defined
+% save currentpicture ;
+% picture currentpicture ;
+% currentpicture := nullpicture ;
+% imagedata ;
+% currentpicture
+% enddef ;
+
+vardef undecorated (text imagedata) text decoration =
+ save currentpicture ;
+ picture currentpicture ;
+ currentpicture := nullpicture ;
+ imagedata ;
+ currentpicture
+enddef ;
+
+
+if metapostversion < 1.770 :
+
+ vardef decorated (text imagedata) text decoration =
+ save mfun_decorated_path, currentpicture ;
+ picture mfun_decorated_path, currentpicture ;
+ currentpicture := nullpicture ;
+ imagedata ;
+ mfun_decorated_path := currentpicture ;
+ currentpicture := nullpicture ;
+ for i within mfun_decorated_path :
+ addto currentpicture
+ if stroked i :
+ doublepath pathpart i
+ dashed dashpart i
+ withpen penpart i
+ withcolor colorpart i
+ decoration
+ elseif filled i :
+ contour pathpart i
+ withpen penpart i
+ withcolor colorpart i
+ decoration
+ elseif textual i :
+ also i
+ withcolor colorpart i
+ decoration
+ else :
+ also i
+ fi
+ ;
+ endfor ;
+ currentpicture
+ enddef ;
+
+else:
+
+ vardef decorated (text imagedata) text decoration =
+ save mfun_decorated_path, currentpicture ;
+ picture mfun_decorated_path, currentpicture ;
+ currentpicture := nullpicture ;
+ imagedata ;
+ mfun_decorated_path := currentpicture ;
+ currentpicture := nullpicture ;
+ for i within mfun_decorated_path :
+ addto currentpicture
+ if stroked i :
+ doublepath pathpart i
+ dashed dashpart i
+ withpen penpart i
+ withcolor colorpart i
+ withprescript prescriptpart i
+ withpostscript postscriptpart i
+ decoration
+ elseif filled i :
+ contour pathpart i
+ withpen penpart i
+ withcolor colorpart i
+ withprescript prescriptpart i
+ withpostscript postscriptpart i
+ decoration
+ elseif textual i :
+ also i
+ withcolor colorpart i
+ withprescript prescriptpart i
+ withpostscript postscriptpart i
+ decoration
+ else :
+ also i
+ fi
+ ;
+ endfor ;
+ currentpicture
+ enddef ;
+
+fi ;
+
+vardef redecorated (text imagedata) text decoration =
+ save mfun_decorated_path, currentpicture ;
+ picture mfun_decorated_path, currentpicture ;
+ currentpicture := nullpicture ;
+ imagedata ;
+ mfun_decorated_path := currentpicture ;
+ currentpicture := nullpicture ;
+ for i within mfun_decorated_path :
+ addto currentpicture
+ if stroked i :
+ doublepath pathpart i
+ dashed dashpart i
+ withpen penpart i
+ decoration
+ elseif filled i :
+ contour pathpart i
+ withpen penpart i
+ decoration
+ elseif textual i :
+ also i
+ decoration
+ else :
+ also i
+ fi
+ ;
endfor ;
- fi ; )
- p
+ currentpicture
enddef ;
+% path mfun_bleed_box ;
+
+% primarydef p bleeded d =
+% image (
+% mfun_bleed_box := boundingbox p ;
+% if pair d :
+% draw p xysized (bbwidth(p)+2*xpart d,bbheight(p)+2*ypart d) shifted -d ;
+% else :
+% draw p xysized (bbwidth(p)+2d,bbheight(p)+2d) shifted (-d,-d) ;
+% fi ;
+% setbounds currentpicture to mfun_bleed_box ;
+% )
+% enddef ;
+
+%D New helpers:
+
+def beginglyph(expr unicode, width, height, depth) =
+ beginfig(unicode) ; % the number is irrelevant
+ charcode := unicode ;
+ charwd := width ;
+ charht := height ;
+ chardp := depth ;
+enddef ;
+
+def endglyph =
+ setbounds currentpicture to (boundingbox unitsquare xscaled charwd yscaled (charht + chardp) shifted (0,-chardp)) ;
+ if known charscale :
+ currentpicture := currentpicture scaled charscale ;
+ fi ;
+ endfig ;
+enddef ;
+
+%D Dimensions have bever been an issue as traditional MP can't make that large
+%D pictures, but with double mode we need a catch:
+
+newinternal maxdimensions ; maxdimensions := 14000 ;
+
+def mfun_apply_max_dimensions = % not a generic helper, we want to protect this one
+ if bbwidth currentpicture > maxdimensions :
+ currentpicture := currentpicture if bbheight currentpicture > bbwidth currentpicture : ysized else : xsized fi maxdimensions ;
+ elseif bbheight currentpicture > maxdimensions :
+ currentpicture := currentpicture ysized maxdimensions ;
+ fi ;
+enddef;
+
+extra_endfig := extra_endfig & "mfun_apply_max_dimensions ;" ;
+
let dump = relax ;
diff --git a/metapost/context/base/mp-tool.mpiv b/metapost/context/base/mp-tool.mpiv
index 672a051c2..e497e2f72 100644
--- a/metapost/context/base/mp-tool.mpiv
+++ b/metapost/context/base/mp-tool.mpiv
@@ -57,8 +57,15 @@ mpprocset := 1 ;
%
% protect ;
-string space ; space := char 32 ;
-string CRLF ; CRLF := char 10 & char 13 ;
+string space ; space := char 32 ;
+string percent ; percent := char 37 ;
+string crlf ; crlf := char 10 & char 13 ;
+string dquote ; dquote := char 34 ;
+
+let SPACE = space ;
+let CRLF = crlf ;
+let DQUOTE = dquote ;
+let PERCENT = percent ;
vardef ddecimal primary p =
decimal xpart p & " " & decimal ypart p
@@ -90,8 +97,8 @@ newinternal graycolormodel ; graycolormodel := 3 ;
newinternal rgbcolormodel ; rgbcolormodel := 5 ;
newinternal cmykcolormodel ; cmykcolormodel := 7 ;
-let grayscale = numeric ;
-let greyscale = numeric ;
+let grayscale = graycolor ;
+let greyscale = greycolor ;
vardef colorpart expr c =
if not picture c :
@@ -141,6 +148,39 @@ vardef colordecimals primary c =
fi
enddef ;
+vardef colordecimalslist(text t) =
+ save b ; boolean b ; b := false ;
+ for s=t :
+ if b : & " " & fi
+ colordecimals(s)
+ hide(b := true ;)
+ endfor
+enddef ;
+
+% vardef _ctx_color_spec_ primary c =
+% if cmykcolor c :
+% "c=" & decimal cyanpart c &
+% ",m=" & decimal magentapart c &
+% ",y=" & decimal yellowpart c &
+% ",k=" & decimal blackpart c
+% elseif rgbcolor c :
+% "r=" & decimal redpart c &
+% ",g=" & decimal greenpart c &
+% ",b=" & decimal bluepart c
+% else :
+% "s=" & decimal c
+% fi
+% enddef ;
+%
+% vardef _ctx_color_spec_list_(text t) =
+% save b ; boolean b ; b := false ;
+% for s=t :
+% if b : & " " & fi
+% _ctx_color_spec_(s)
+% hide(b := true ;)
+% endfor
+% enddef ;
+
%D We have standardized data file names:
def job_name =
@@ -152,7 +192,8 @@ def data_mpd_file =
enddef ;
%D Because \METAPOST\ has a hard coded limit of 4~datafiles,
-%D we need some trickery when we have multiple files.
+%D we need some trickery when we have multiple files. This will
+%D be redone (via \LUA).
if unknown collapse_data :
boolean collapse_data ;
@@ -289,10 +330,14 @@ vardef set_outer_boundingbox text q = % obsolete
setbounds q to outerboundingbox q;
enddef;
-%D Some missing functions can be implemented rather
-%D straightforward:
+%D Some missing functions can be implemented rather straightforward (thanks to
+%D Taco and others):
+
+% oldpi := 3.14159265358979323846 ; % from <math.h>
+pi := 3.14159265358979323846264338327950288419716939937510 ; % 50 digits
+radian := 180/pi ; % 2pi*radian = 360 ;
-numeric Pi ; Pi := 3.1415926 ;
+% let +++ = ++ ;
vardef sqr primary x = x*x enddef ;
vardef log primary x = if x=0: 0 else: mlog(x)/mlog(10) fi enddef ;
@@ -302,15 +347,6 @@ vardef inv primary x = if x=0: 0 else: x**-1 fi enddef ;
vardef pow (expr x,p) = x**p enddef ;
-vardef asin primary x = x+(x**3)/6+3(x**5)/40 enddef ;
-vardef acos primary x = asin(-x) enddef ;
-vardef atan primary x = x-(x**3)/3+(x**5)/5-(x**7)/7 enddef ;
-vardef tand primary x = sind(x)/cosd(x) enddef ;
-
-%D Here are Taco Hoekwater's alternatives (but vardef'd and primaried).
-
-pi := 3.1415926 ; radian := 180/pi ; % 2pi*radian = 360 ;
-
vardef tand primary x = sind(x)/cosd(x) enddef ;
vardef cotd primary x = cosd(x)/sind(x) enddef ;
@@ -321,9 +357,11 @@ vardef cot primary x = cos(x)/sin(x) enddef ;
vardef asin primary x = angle((1+-+x,x)) enddef ;
vardef acos primary x = angle((x,1+-+x)) enddef ;
+vardef atan primary x = angle(1,x) enddef ;
vardef invsin primary x = (asin(x))/radian enddef ;
vardef invcos primary x = (acos(x))/radian enddef ;
+vardef invtan primary x = (atan(x))/radian enddef ;
vardef acosh primary x = ln(x+(x+-+1)) enddef ;
vardef asinh primary x = ln(x+(x++1)) enddef ;
@@ -331,6 +369,11 @@ vardef asinh primary x = ln(x+(x++1)) enddef ;
vardef sinh primary x = save xx ; xx = exp x ; (xx-1/xx)/2 enddef ;
vardef cosh primary x = save xx ; xx = exp x ; (xx+1/xx)/2 enddef ;
+%D Like mod, but useful for anglesl it returns (-.5d,+.5d] and is used
+%D in for instance mp-chem.
+
+primarydef a zmod b = (-((b/2 - a) mod b) + b/2) enddef ;
+
%D Sometimes this is handy:
def undashed =
@@ -631,6 +674,15 @@ ultriangle := origin -- (0,+.5) -- (-.5,0) -- cycle ;
lltriangle := origin -- (-.5,0) -- (0,-.5) -- cycle ;
lrtriangle := origin -- (0,-.5) -- (+.5,0) -- cycle ;
+path triangle, uptriangle, downtriangle, lefttriangle, righttriangle ;
+
+triangle := (1,0) -- (1,0) rotated 120 -- (1,0) rotated -120 -- cycle ;
+
+uptriangle := triangle rotated 90 ;
+downtriangle := triangle rotated -90 ;
+lefttriangle := triangle rotated 180 ;
+righttriangle := triangle ;
+
path unitdiamond, fulldiamond ;
unitdiamond := (.5,0) -- (1,.5) -- (.5,1) -- (0,.5) -- cycle ;
@@ -768,8 +820,8 @@ vardef whitecolor(expr c) =
if cmykcolor c : (0,0,0,0) elseif rgbcolor c : (1,1,1) else : 1 fi
enddef ;
-vardef blackcolor(expr c) =
- if cmykcolor c : (0,0,0,1) elseif rgbcolor c : (0,0,0) else : 0 fi
+vardef blackcolor expr c =
+ if cmykcolor c : (0,0,0,1) elseif rgbcolor c : (0,0,0) else : 0 fi
enddef ;
%D Well, this is the dangerous and naive version:
@@ -1223,7 +1275,7 @@ enddef ;
extra_endfig := extra_endfig & " naturalizepaths ; " ;
-%D Noce tracer:
+%D Nice tracer:
def drawboundary primary p =
draw p dashed evenly withcolor white ;
@@ -1318,7 +1370,7 @@ primarydef pct along pat = % also negative
enddef ;
primarydef len on pat = % no outer ( ) .. somehow fails
- (arctime if len>0 : len else : (arclength(pat)+len) fi of pat) of pat
+ (arctime if len>=0 : len else : (arclength(pat)+len) fi of pat) of pat
enddef ;
% this cuts of a piece from both ends
@@ -1539,9 +1591,13 @@ primarydef p softened c =
enddef ;
vardef grayed primary p =
- if color p :
+ if rgbcolor p :
tripled(.30redpart p+.59greenpart p+.11bluepart p)
- else :
+ elseif cmykcolor p :
+ tripled(.30*(1-cyanpart i)+.59*(1-magentapart i)+.11*(1-yellowpart i)+blackpart i)
+ elseif greycolor p :
+ p
+ elseif picture p :
image (
for i within p :
addto currentpicture
@@ -1557,12 +1613,24 @@ vardef grayed primary p =
else :
also i
fi
- withcolor tripled(.30redpart i+.59greenpart i+.11bluepart i) ;
+ if unknown colorpart i :
+ % nothing
+ elseif rgbcolor colorpart i :
+ withcolor tripled(.30redpart i+.59greenpart i+.11bluepart i) ;
+ elseif cmykcolor colorpart i :
+ withcolor tripled(.30*(1-cyanpart i)+.59*(1-magentapart i)+.11*(1-yellowpart i)+blackpart i) ;
+ else :
+ withcolor colorpart i ;
+ fi
endfor ;
)
- fi
+ else :
+ p
+ fi
enddef ;
+let greyed = grayed ;
+
% yes or no: "text" infont "cmr12" at 24pt ;
% let normalinfont = infont ;
@@ -2030,7 +2098,7 @@ enddef ;
% handy
def withgray primary g =
- withcolor (g,g,g)
+ withcolor g
enddef ;
% for metafun
@@ -2253,7 +2321,7 @@ enddef ;
%D Handy:
def break =
- exitif true fi ;
+ exitif true ; % fi
enddef ;
%D New too:
@@ -2266,23 +2334,31 @@ primarydef p ystretched h = (
p if (bbheight(p)>0) and (h>0) : yscaled (h/bbheight(p)) fi
) enddef ;
-primarydef p snapped s =
- hide (
- if path p :
- forever :
- exitif (bbheight(p) <= s) and (bbwidth(p) <= s) ;
- p := p scaled (1/2) ;
- endfor ;
- elseif numeric p :
- forever :
- exitif p <= s ;
- p := p scaled (1/2) ;
- endfor ;
- fi ;
- )
- p
+%D Newer:
+
+vardef area expr p =
+ % we could calculate the boundingbox once
+ (xpart llcorner boundingbox p,0) -- p --
+ (xpart lrcorner boundingbox p,0) -- cycle
enddef ;
+vardef basiccolors[] =
+ if @ = 0 :
+ white
+ else :
+ save n ; n := @ mod 7 ;
+ if n = 1 : red
+ elseif n = 2 : green
+ elseif n = 3 : blue
+ elseif n = 4 : cyan
+ elseif n = 5 : magenta
+ elseif n = 6 : yellow
+ else : black
+ fi
+ fi
+enddef ;
+
+
% vardef somecolor = (1,1,0,0) enddef ;
% fill OverlayBox withcolor (rcomponent somecolor,gcomponent somecolor,bcomponent somecolor) ;
@@ -2329,7 +2405,6 @@ vardef undecorated (text imagedata) text decoration =
currentpicture
enddef ;
-
if metapostversion < 1.770 :
vardef decorated (text imagedata) text decoration =
@@ -2449,25 +2524,76 @@ enddef ;
% )
% enddef ;
+vardef mfun_snapped(expr p, s) =
+ if p < 0 : - ( - else : ( fi p div s) * s % the less tokens the better
+enddef ;
+
+vardef mfun_applied(expr p, s)(suffix a) =
+ if path p :
+ if pair s :
+ for i=0 upto length(p)-1 :
+ (a(xpart point i of p,xpart s),a(ypart point i of p,ypart s)) --
+ endfor
+ if cycle p :
+ cycle
+ else :
+ (a(xpart point length(p) of p,xpart s),a(ypart point length(p) of p,ypart s))
+ fi
+ else :
+ for i=0 upto length(p)-1 :
+ (a(xpart point i of p,s),a(ypart point i of p,s)) --
+ endfor
+ if cycle p :
+ cycle
+ else :
+ (a(xpart point length(p) of p,s),a(ypart point length(p) of p,s))
+ fi
+ fi
+ elseif pair p :
+ if pair s :
+ (a(xpart p,xpart s),a(ypart p,ypart s))
+ else :
+ (a(xpart p,s),a(ypart p,s))
+ fi
+ elseif cmykcolor p :
+ (a(cyanpart p,s),a(magentapart p,s),a(yellowpart p,s),a(blackpart p,s))
+ elseif rgbcolor p :
+ (a(redpart p,s),a(greenpart p,s),a(bluepart p,s))
+ elseif graycolor p :
+ a(p,s)
+ elseif numeric p :
+ a(p,s)
+ else
+ p
+ fi
+enddef ;
+
+primarydef p snapped s =
+ mfun_applied(p,s)(mfun_snapped) % so we can play with variants
+enddef ;
+
%D New helpers:
+newinternal charscale ; charscale := 1 ; % persistent so one needs to 'reset' it to 0 or 1
+
def beginglyph(expr unicode, width, height, depth) =
beginfig(unicode) ; % the number is irrelevant
charcode := unicode ;
charwd := width ;
charht := height ;
chardp := depth ;
+ % charscale := 1 ; % can be set for a whole font, so no reset here
enddef ;
def endglyph =
setbounds currentpicture to (boundingbox unitsquare xscaled charwd yscaled (charht + chardp) shifted (0,-chardp)) ;
- if known charscale :
+ if known charscale : if (charscale > 0) and (charscale <> 1) :
currentpicture := currentpicture scaled charscale ;
- fi ;
+ fi ; fi ;
endfig ;
enddef ;
-%D Dimensions have bever been an issue as traditional MP can't make that large
+%D Dimensions have never been an issue as traditional MP can't make that large
%D pictures, but with double mode we need a catch:
newinternal maxdimensions ; maxdimensions := 14000 ;
diff --git a/metapost/context/fonts/bidi-symbols.tex b/metapost/context/fonts/bidi-symbols.tex
index 800e0e4ea..ba659ccb7 100644
--- a/metapost/context/fonts/bidi-symbols.tex
+++ b/metapost/context/fonts/bidi-symbols.tex
@@ -1,4 +1,4 @@
-% \nopdfcompression
+\nopdfcompression
% At the ConTeXt 2013 meeting Taco suggested to add ActualText entries to the
% shapes. It took us a bit of experimenting and the current implementation of
@@ -30,3 +30,4 @@
\getbuffer \blank
\stoptext
+
diff --git a/scripts/context/lua/mtx-bibtex.lua b/scripts/context/lua/mtx-bibtex.lua
new file mode 100644
index 000000000..92036e3a5
--- /dev/null
+++ b/scripts/context/lua/mtx-bibtex.lua
@@ -0,0 +1,152 @@
+if not modules then modules = { } end modules ['mtx-bibtex'] = {
+ version = 1.002,
+ comment = "this script is part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE",
+ license = "see context related readme files"
+}
+
+local helpinfo = [[
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-bibtex</entry>
+ <entry name="detail">bibtex helpers</entry>
+ <entry name="version">1.00</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="toxml"><short>convert bibtex database(s) to xml</short></flag>
+ <flag name="tolua"><short>convert bibtex database(s) to lua</short></flag>
+ <flag name="search"><short>seatch bibtex database(s)</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+ <examples>
+ <category>
+ <title>Example</title>
+ <subcategory>
+ <example><command>mtxrun --script bibtex --tolua bibl-001.bib</command></example>
+ <example><command>mtxrun --script bibtex --tolua --simple bibl-001.bib</command></example>
+ <example><command>mtxrun --script bibtex --toxml bibl-001.bib bibl-002.bib bibl-003.bib biblio.xml</command></example>
+ <example><command>mtxrun --script bibtex --search --list --pattern=match(author:foo) bar.bib</command></example>
+ </subcategory>
+ </category>
+ </examples>
+</application>
+]]
+
+local application = logs.application {
+ name = "mtx-bibtex",
+ banner = "bibtex helpers",
+ helpinfo = helpinfo,
+}
+
+local report = application.report
+
+require("util-seq")
+require("publ-dat")
+require("publ-fnd")
+
+scripts = scripts or { }
+scripts.bibtex = scripts.bibtex or { }
+
+function scripts.bibtex.toxml(files)
+ local instance = bibtex.new()
+ local target = "mtx-bibtex-output.xml"
+ for i=1,#files do
+ local filename = files[i]
+ local filetype = file.suffix(filename)
+ if filetype == "xml" then
+ target = filename
+ elseif filetype == "bib" then
+ bibtex.load { dataset = instance, filename = filename }
+ else
+ -- not supported
+ end
+ end
+ bibtex.converttoxml(instance,true)
+ instance.shortcuts = nil
+ instance.luadata = nil
+ xml.save(instance.xmldata,target)
+end
+
+function scripts.bibtex.tolua(files)
+ local instance = bibtex.new()
+ local target = "mtx-bibtex-output.lua"
+ for i=1,#files do
+ local filename = files[i]
+ local filetype = file.suffix(filename)
+ if filetype == "lua" then
+ target = filename
+ elseif filetype == "bib" then
+ bibtex.load { dataset = instance, filename = filename }
+
+ else
+ -- not supported
+ end
+ end
+ instance.shortcuts = nil
+ instance.xmldata = nil
+ bibtex.analyze(instance)
+ if environment.arguments.simple then
+ table.save(target,instance)
+ else
+ table.save(target,instance.luadata)
+ end
+end
+
+function scripts.bibtex.search(files,pattern,list)
+ if pattern then
+ local dataset = publications.datasets["whatever"]
+ for i=1,#files do
+ local filename = resolvers.findfile(files[i])
+ if filename and filename ~= "" then
+ publications.load { dataset = "whatever", filename = filename }
+ end
+ end
+ local found = publications.search(dataset,pattern)
+ local tags = table.sortedkeys(found)
+ if #tags == 0 then
+ report("no match")
+ elseif list then
+ report("%s matches:",#tags)
+ local result = { }
+ local luadata = dataset.luadata
+ for i=1,#tags do
+ local tag = tags[i]
+ local entry = luadata[tag]
+ result[i] = {
+ tag,
+ entry.year,
+ entry.author,
+ entry.title,
+ }
+ end
+ utilities.formatters.formatcolumns(result)
+ logs.newline()
+ for i=1,#result do
+ texio.write_nl(result[i])
+ end
+ logs.newline()
+ else
+ report("%s matches: % t",#tags,tags)
+ end
+ end
+end
+
+if environment.arguments.search then
+ scripts.bibtex.search(environment.files,environment.arguments.pattern,environment.arguments.list)
+elseif environment.arguments.toxml then
+ scripts.bibtex.toxml(environment.files)
+elseif environment.arguments.tolua then
+ scripts.bibtex.tolua(environment.files)
+elseif environment.arguments.exporthelp then
+ application.export(environment.arguments.exporthelp,environment.files[1])
+else
+ application.help()
+end
+
+-- scripts.bibtex.toxml { "tugboat.bib" }
+-- scripts.bibtex.tolua { "tugboat.bib" }
diff --git a/scripts/context/lua/mtx-check.lua b/scripts/context/lua/mtx-check.lua
index 9f52509ec..c456b4414 100644
--- a/scripts/context/lua/mtx-check.lua
+++ b/scripts/context/lua/mtx-check.lua
@@ -21,7 +21,7 @@ local helpinfo = [[
<flags>
<category name="basic">
<subcategory>
- <flag name="convert"><short>check tex file for errors</short></flag>
+ <flag name="check"><short>check tex file for errors</short></flag>
</subcategory>
</category>
</flags>
@@ -34,17 +34,17 @@ local application = logs.application {
helpinfo = helpinfo,
}
-local report = application.report
+local report = application.report
-scripts = scripts or { }
-scripts.checker = scripts.checker or { }
+scripts = scripts or { }
+scripts.checker = scripts.checker or { }
-local validator = { }
+local validator = { }
-validator.n = 1
-validator.errors = { }
-validator.trace = false
-validator.direct = false
+validator.n = 1
+validator.errors = { }
+validator.trace = false
+validator.direct = false
validator.printer = print
validator.tracer = print
@@ -68,27 +68,24 @@ local progress = function(position, data, kind)
end
end
-local i_m, d_m = P("$"), P("$$")
-local l_s, r_s = P("["), P("]")
-local l_g, r_g = P("{"), P("}")
+local i_m, d_m = P("$"), P("$$")
+local l_s, r_s = P("["), P("]")
+local l_g, r_g = P("{"), P("}")
-local okay = lpeg.P("{[}") + lpeg.P("{]}")
+local okay = lpeg.P("{[}") + lpeg.P("{]}")
-local esc = P("\\")
-local cr = P("\r")
-local lf = P("\n")
-local crlf = P("\r\n")
-local space = S(" \t\f\v")
-local newline = crlf + cr + lf
+local esc = P("\\")
+local space = S(" \t\f\v")
+local newline = lpeg.patterns.newline
-local line = newline / function() validator.n = validator.n + 1 end
+local line = newline / function() validator.n = validator.n + 1 end
local startluacode = P("\\startluacode")
local stopluacode = P("\\stopluacode")
-local somecode = startluacode * (1-stopluacode)^1 * stopluacode
+local somecode = startluacode * (1-stopluacode)^1 * stopluacode
-local stack = { }
+local stack = { }
local function push(p,s)
-- print("start",p,s)
@@ -117,7 +114,7 @@ local contextgrammar = P { "tokens",
["start"] = start,
["stop"] = stop,
["whatever"] = line + esc * 1 + C(P("%") * (1-line)^0),
- ["grouped"] = l_g * (V("whatever") + V("grouped") + V("setup") + V("display") + V("inline") + (1 - l_g - r_g))^0 * r_g,
+ ["grouped"] = l_g * (V("whatever") + V("grouped") + V("setup") + V("display") + V("inline") + line + (1 - l_g - r_g))^0 * r_g,
["setup"] = l_s * (okay + V("whatever") + V("grouped") + V("setup") + V("display") + V("inline") + (1 - l_s - r_s))^0 * r_s,
["display"] = d_m * (V("whatever") + V("grouped") + (1 - d_m))^0 * d_m,
["inline"] = i_m * (V("whatever") + V("grouped") + (1 - i_m))^0 * i_m,
diff --git a/scripts/context/lua/mtx-context.lua b/scripts/context/lua/mtx-context.lua
index 4c6672051..d624f6831 100644
--- a/scripts/context/lua/mtx-context.lua
+++ b/scripts/context/lua/mtx-context.lua
@@ -28,10 +28,13 @@ local fileaddsuffix = file.addsuffix
local filenewsuffix = file.replacesuffix
local removesuffix = file.removesuffix
local validfile = lfs.isfile
+local removefile = os.remove
+local renamefile = os.rename
+local formatters = string.formatters
local application = logs.application {
name = "mtx-context",
- banner = "ConTeXt Process Management 0.60",
+ banner = "ConTeXt Process Management 0.61",
-- helpinfo = helpinfo, -- table with { category_a = text_1, category_b = text_2 } or helpstring or xml_blob
helpinfo = "mtx-context.xml",
}
@@ -87,14 +90,17 @@ scripts.context = scripts.context or { }
-- for the moment here
-if getargument("jit") or getargument("jiton") then
+if jit then -- already luajittex
+ setargument("engine","luajittex")
+ setargument("jit",nil)
+elseif getargument("jit") or getargument("jiton") then -- relaunch luajittex
-- bonus shortcut, we assume than --jit also indicates the engine
-- although --jit and --engine=luajittex are independent
setargument("engine","luajittex")
end
-local engine_new = getargument("engine") or directives.value("system.engine")
-local engine_old = environment.ownbin
+local engine_new = file.nameonly(getargument("engine") or directives.value("system.engine"))
+local engine_old = file.nameonly(environment.ownbin)
local function restart(engine_old,engine_new)
local command = format("%s --luaonly %q %s --redirected",engine_new,environment.ownname,environment.reconstructcommandline())
@@ -253,8 +259,9 @@ end
-- multipass control
-local multipass_suffixes = { ".tuc" }
-local multipass_nofruns = 8 -- or 7 to test oscillation
+local multipass_suffixes = { ".tuc" }
+local multipass_nofruns = 9 -- better for tracing oscillation
+local multipass_forcedruns = false
local function multipass_hashfiles(jobname)
local hash = { }
@@ -275,11 +282,47 @@ local function multipass_changed(oldhash, newhash)
return false
end
-local function multipass_copyluafile(jobname)
+local f_tempfile = formatters["%s-%s-%02d.tmp"]
+
+local function backup(run,kind,filename)
+ if run == 1 then
+ for i=1,10 do
+ local tmpname = f_tempfile(jobname,kind,i)
+ if validfile(tmpname) then
+ removefile(tmpname)
+ report("removing %a",tmpname)
+ end
+ end
+ end
+ if validfile(filename) then
+ local tmpname = f_tempfile(jobname,kind,run or 1)
+ report("copying %a into %a",filename,tmpname)
+ file.copy(filename,tmpname)
+ else
+ report("no file %a, nothing kept",filename)
+ end
+end
+
+local function multipass_copyluafile(jobname,run)
local tuaname, tucname = jobname..".tua", jobname..".tuc"
if validfile(tuaname) then
- os.remove(tucname)
- os.rename(tuaname,tucname)
+ if run then
+ backup(run,"tuc",tucname)
+ report("copying %a into %a",tuaname,tucname)
+ report()
+ end
+ removefile(tucname)
+ renamefile(tuaname,tucname)
+ end
+end
+
+local function multipass_copylogfile(jobname,run)
+ local logname = jobname..".log"
+ if validfile(logname) then
+ if run then
+ backup(run,"log",logname)
+ report()
+ end
end
end
@@ -344,8 +387,8 @@ local function result_push_purge(oldbase,newbase)
for _, suffix in next, usedsuffixes.after do
local oldname = fileaddsuffix(oldbase,suffix)
local newname = fileaddsuffix(newbase,suffix)
- os.remove(newname)
- os.remove(oldname)
+ removefile(newname)
+ removefile(oldname)
end
end
@@ -354,10 +397,10 @@ local function result_push_keep(oldbase,newbase)
local oldname = fileaddsuffix(oldbase,suffix)
local newname = fileaddsuffix(newbase,suffix)
local tmpname = "keep-"..oldname
- os.remove(tmpname)
- os.rename(oldname,tmpname)
- os.remove(oldname)
- os.rename(newname,oldname)
+ removefile(tmpname)
+ renamefile(oldname,tmpname)
+ removefile(oldname)
+ renamefile(newname,oldname)
end
end
@@ -365,8 +408,8 @@ local function result_save_error(oldbase,newbase)
for _, suffix in next, usedsuffixes.keep do
local oldname = fileaddsuffix(oldbase,suffix)
local newname = fileaddsuffix(newbase,suffix)
- os.remove(newname) -- to be sure
- os.rename(oldname,newname)
+ removefile(newname) -- to be sure
+ renamefile(oldname,newname)
end
end
@@ -374,8 +417,8 @@ local function result_save_purge(oldbase,newbase)
for _, suffix in next, usedsuffixes.after do
local oldname = fileaddsuffix(oldbase,suffix)
local newname = fileaddsuffix(newbase,suffix)
- os.remove(newname) -- to be sure
- os.rename(oldname,newname)
+ removefile(newname) -- to be sure
+ renamefile(oldname,newname)
end
end
@@ -384,9 +427,9 @@ local function result_save_keep(oldbase,newbase)
local oldname = fileaddsuffix(oldbase,suffix)
local newname = fileaddsuffix(newbase,suffix)
local tmpname = "keep-"..oldname
- os.remove(newname)
- os.rename(oldname,newname)
- os.rename(tmpname,oldname)
+ removefile(newname)
+ renamefile(oldname,newname)
+ renamefile(tmpname,oldname)
end
end
@@ -536,15 +579,30 @@ function scripts.context.run(ctxdata,filename)
local a_profile = getargument("profile")
local a_batchmode = getargument("batchmode")
local a_nonstopmode = getargument("nonstopmode")
+ local a_scollmode = getargument("scrollmode")
local a_once = getargument("once")
local a_synctex = getargument("synctex")
local a_backend = getargument("backend")
local a_arrange = getargument("arrange")
local a_noarrange = getargument("noarrange")
local a_jiton = getargument("jiton")
+ local a_jithash = getargument("jithash")
local a_texformat = getargument("texformat")
+ local a_keeptuc = getargument("keeptuc")
+ local a_keeplog = getargument("keeplog")
+
+ -- the following flag is not officially supported because i cannot forsee
+ -- side effects (so no bug reports please) .. we provide --sandbox that
+ -- does similar things but tries to ensure that context works as expected
+
+ local a_safer = getargument("safer")
+
+ if a_safer then
+ report("warning: using the luatex safer options, processing is not guaranteed")
+ end
+
--
- a_batchmode = (a_batchmode and "batchmode") or (a_nonstopmode and "nonstopmode") or nil
+ a_batchmode = (a_batchmode and "batchmode") or (a_nonstopmode and "nonstopmode") or (a_scrollmode and "scrollmode") or nil
a_synctex = check_synctex(a_synctex)
--
for i=1,#filelist do
@@ -578,13 +636,30 @@ function scripts.context.run(ctxdata,filename)
formatfile, scriptfile = resolvers.locateformat(formatname)
end
--
- a_jiton = (a_jiton or toboolean(analysis.jiton,true)) and true or nil
+ a_jiton = (a_jiton or toboolean(analysis.jiton,true)) and true or nil
+ a_jithash = validstring(a_jithash or analysis.jithash) or nil
--
if not formatfile or not scriptfile then
report("warning: no format found, forcing remake (source driven)")
scripts.context.make(formatname,a_engine)
formatfile, scriptfile = resolvers.locateformat(formatname)
end
+ --
+ local function combine(key)
+ local flag = validstring(environment[key])
+ local plus = analysis[key]
+ if flag and plus then
+ return plus .. "," .. flag -- flag wins
+ else
+ return flag or plus -- flag wins
+ end
+ end
+ local a_trackers = analysis.trackers
+ local a_experiments = analysis.experiments
+ local directives = combine("directives")
+ local trackers = combine("trackers")
+ local experiments = combine("experiments")
+ --
if formatfile and scriptfile then
local suffix = validstring(getargument("suffix"))
local resultname = validstring(getargument("result"))
@@ -632,9 +707,9 @@ function scripts.context.run(ctxdata,filename)
local maxnofruns = once and 1 or multipass_nofruns
--
local c_flags = {
- directives = validstring(environment.directives), -- gets passed via mtxrun
- trackers = validstring(environment.trackers), -- gets passed via mtxrun
- experiments = validstring(environment.experiments), -- gets passed via mtxrun
+ directives = directives, -- gets passed via mtxrun
+ trackers = trackers, -- gets passed via mtxrun
+ experiments = experiments, -- gets passed via mtxrun
--
result = validstring(resultname),
input = validstring(getargument("input") or filename), -- alternative input
@@ -655,18 +730,16 @@ function scripts.context.run(ctxdata,filename)
["interaction"] = a_batchmode,
["synctex"] = a_synctex,
["no-parse-first-line"] = true,
+ ["safer"] = a_safer,
-- ["no-mktex"] = true,
-- ["file-line-error-style"] = true,
["fmt"] = formatfile,
["lua"] = scriptfile,
["jobname"] = jobname,
["jiton"] = a_jiton,
+ ["jithash"] = a_jithash,
}
--
- if a_synctex then
- report("warning: synctex is enabled") -- can add upto 5% runtime
- end
- --
if not a_timing then
-- okay
elseif c_flags.usemodule then
@@ -683,6 +756,15 @@ function scripts.context.run(ctxdata,filename)
c_flags.directives = "system.profile"
end
--
+ if a_synctex then
+ report("warning: synctex is enabled") -- can add upto 5% runtime
+ if c_flags.directives then
+ c_flags.directives = format("system.synctex=%s,%s",a_synctex,c_flags.directives)
+ else
+ c_flags.directives = format("system.synctex=%s",a_synctex)
+ end
+ end
+ --
-- kindofrun: 1:first run, 2:successive run, 3:once, 4:last of maxruns
--
for currentrun=1,maxnofruns do
@@ -690,14 +772,16 @@ function scripts.context.run(ctxdata,filename)
c_flags.final = false
c_flags.kindofrun = (a_once and 3) or (currentrun==1 and 1) or (currentrun==maxnofruns and 4) or 2
c_flags.maxnofruns = maxnofruns
+ c_flags.forcedruns = multipass_forcedruns and multipass_forcedruns > 0 and multipass_forcedruns or nil
c_flags.currentrun = currentrun
c_flags.noarrange = a_noarrange or a_arrange or nil
--
local command = luatex_command(l_flags,c_flags,mainfile,a_engine)
--
- report("run %s: %s",i,command)
+ report("run %s: %s",currentrun,command)
print("") -- cleaner, else continuation on same line
local returncode, errorstring = os.spawn(command)
+ -- todo: remake format when no proper format is found
if not returncode then
report("fatal error: no return code, message: %s",errorstring or "?")
if resultname then
@@ -706,11 +790,17 @@ function scripts.context.run(ctxdata,filename)
os.exit(1)
break
elseif returncode == 0 then
- multipass_copyluafile(jobname)
- newhash = multipass_hashfiles(jobname)
- if multipass_changed(oldhash,newhash) then
- oldhash = newhash
- else
+ multipass_copyluafile(jobname,a_keeptuc and currentrun)
+ multipass_copylogfile(jobname,a_keeplog and currentrun)
+ if not multipass_forcedruns then
+ newhash = multipass_hashfiles(jobname)
+ if multipass_changed(oldhash,newhash) then
+ oldhash = newhash
+ else
+ break
+ end
+ elseif currentrun == multipass_forcedruns then
+ report("quitting after force %i runs",multipass_forcedruns)
break
end
else
@@ -773,6 +863,24 @@ function scripts.context.run(ctxdata,filename)
pdf_open(resultname or jobname,pdfview)
end
--
+ local epub = analysis.epub
+ if epub then
+ if type(epub) == "string" then
+ local t = settings_to_array(epub)
+ for i=1,#t do
+ t[i] = "--" .. gsub(t[i],"^%-*","")
+ end
+ epub = concat(t," ")
+ else
+ epub = "--make"
+ end
+ local command = "mtxrun --script epub " .. epub .. " " .. jobname
+ report()
+ report("making epub file: ",command)
+ report()
+ os.execute(command)
+ end
+ --
if a_timing then
report()
report("you can process (timing) statistics with:",jobname)
@@ -838,7 +946,7 @@ function scripts.context.pipe() -- still used?
scripts.context.purge_job(filename)
elseif getargument("purgeall") then
scripts.context.purge_job(filename,true)
- os.remove(filename)
+ removefile(filename)
end
else
if formatname then
@@ -1032,11 +1140,11 @@ local special_runfiles = {
local function purge_file(dfile,cfile)
if cfile and validfile(cfile) then
- if os.remove(dfile) then
+ if removefile(dfile) then
return filebasename(dfile)
end
elseif dfile then
- if os.remove(dfile) then
+ if removefile(dfile) then
return filebasename(dfile)
end
end
@@ -1130,8 +1238,8 @@ local function touch(path,name,versionpattern,kind,kindpattern)
end
if newdata ~= "" and (oldversion ~= newversion or oldkind ~= newkind or newdata ~= olddata) then
local backup = filenewsuffix(name,"tmp")
- os.remove(backup)
- os.rename(name,backup)
+ removefile(backup)
+ renamefile(name,backup)
io.savedata(name,newdata)
return name, oldversion, newversion, oldkind, newkind
end
@@ -1478,9 +1586,12 @@ do
end
if getargument("once") then
- multipass_nofruns = 1
-elseif getargument("runs") then
- multipass_nofruns = tonumber(getargument("runs")) or nil
+ multipass_nofruns = 1
+else
+ if getargument("runs") then
+ multipass_nofruns = tonumber(getargument("runs")) or nil
+ end
+ multipass_forcedruns = tonumber(getargument("forcedruns")) or nil
end
if getargument("run") then
diff --git a/scripts/context/lua/mtx-context.xml b/scripts/context/lua/mtx-context.xml
index a3812288f..c41093289 100644
--- a/scripts/context/lua/mtx-context.xml
+++ b/scripts/context/lua/mtx-context.xml
@@ -108,6 +108,14 @@
<flag name="once">
<short>only run once (no multipass data file is produced)</short>
</flag>
+ <flag name="runs">
+ <short>process at most this many times</short>
+ </flag>
+ <flag name="forcedruns">
+ <short>process this many times (permits for optimization trial runs)</short>
+ </flag>
+ </subcategory>
+ <subcategory>
<flag name="batchmode">
<short>run without stopping and do not show messages on the console</short>
</flag>
@@ -117,7 +125,7 @@
<flag name="synctex">
<short>run with synctex enabled (optional value: zipped, unzipped, 1, -1)</short>
</flag>
- </subcategory>
+ </subcategory>
<subcategory>
<flag name="generate">
<short>generate file database etc. (as luatools does)</short>
@@ -134,7 +142,7 @@
<short>assume given file present elsewhere</short>
</flag>
<flag name="nofile">
- <short>use dummy file as jobname</short>
+ <short>use dummy file as jobname</short>
</flag>
</subcategory>
</category>
@@ -144,16 +152,22 @@
<short>update context version number (also provide <ref name="expert"/>, optionally provide <ref name="basepath"/>)</short>
</flag>
<flag name="nostatistics">
- <short>omit runtime statistics at the end of the run</short>
+ <short>omit runtime statistics at the end of the run</short>
</flag>
<flag name="update">
- <short>update context from website (not to be confused with contextgarden)</short>
+ <short>update context from website (not to be confused with contextgarden)</short>
</flag>
- <flag name="profile">
- <short>profile job (use: mtxrun <ref name="script"/> profile <ref name="analyze"/>)</short>
+ <flag name="profile">
+ <short>profile job (use: mtxrun <ref name="script"/> profile <ref name="analyze"/>)</short>
</flag>
- <flag name="timing">
- <short>generate timing and statistics overview</short>
+ <flag name="timing">
+ <short>generate timing and statistics overview</short>
+ </flag>
+ <flag name="keeptuc">
+ <short>keep previous tuc files (jobname-tuc-[run].tmp)</short>
+ </flag>
+ <flag name="keeplog">
+ <short>keep previous log files (jobname-log-[run].tmp)</short>
</flag>
</subcategory>
<subcategory>
@@ -182,6 +196,11 @@
<short>do not check for file and enter scroll mode (<ref name="dummyfile"/>=whatever.tmp)</short>
</flag>
</subcategory>
+ <subcategory>
+ <flag name="sandbox">
+ <short>process file in a limited environment</short>
+ </flag>
+ </subcategory>
</category>
</flags>
</application>
diff --git a/scripts/context/lua/mtx-convert.lua b/scripts/context/lua/mtx-convert.lua
index b76b3baaf..d5dba075a 100644
--- a/scripts/context/lua/mtx-convert.lua
+++ b/scripts/context/lua/mtx-convert.lua
@@ -48,7 +48,7 @@ local convert = scripts.convert
convert.converters = convert.converters or { }
local converters = convert.converters
-local gsprogram = os.type == "windows" and "gswin32c" or "gs"
+local gsprogram = (os.type == "windows" and (os.which("gswin64c.exe") or os.which("gswin32c.exe"))) or "gs"
local gstemplate_eps = "%s -q -sDEVICE=pdfwrite -dPDFSETTINGS=/prepress -dEPSCrop -dNOPAUSE -dSAFER -dNOCACHE -dBATCH -dAutoRotatePages=/None -dProcessColorModel=/DeviceCMYK -sOutputFile=%s %s -c quit"
local gstemplate_ps = "%s -q -sDEVICE=pdfwrite -dPDFSETTINGS=/prepress -dNOPAUSE -dSAFER -dNOCACHE -dBATCH -dAutoRotatePages=/None -dProcessColorModel=/DeviceCMYK -sOutputFile=%s %s -c quit"
diff --git a/scripts/context/lua/mtx-epub.lua b/scripts/context/lua/mtx-epub.lua
index 11f0a2024..956ce4931 100644
--- a/scripts/context/lua/mtx-epub.lua
+++ b/scripts/context/lua/mtx-epub.lua
@@ -18,8 +18,89 @@ if not modules then modules = { } end modules ['mtx-epub'] = {
-- first we need a decent strategy to export them. More information will be
-- available on the wiki.
-local format, gsub = string.format, string.gsub
-local concat = table.concat
+-- META-INF
+-- container.xml
+-- OEBPS
+-- content.opf
+-- toc.ncx
+-- images
+-- styles
+-- mimetype
+
+-- todo:
+--
+-- remove m_k_v_i prefixes
+-- remap fonts %mono% in css so that we can replace
+-- coverpage tests
+-- split up
+
+-- todo: automated cover page:
+--
+-- \startMPpage
+-- StartPage ;
+-- fill Page withcolor .5red ;
+-- numeric n ;
+-- for i=10 downto 1 :
+-- n := i * PaperWidth/40 ;
+-- draw
+-- lrcorner Page shifted (0,n)
+-- % -- lrcorner Page
+-- -- lrcorner Page shifted (-n,0)
+-- % -- cycle
+-- withpen pencircle scaled 1mm
+-- withcolor white ;
+-- endfor ;
+-- picture p ; p := image (
+-- draw
+-- anchored.top(
+-- textext.bot("\tttf Some Title")
+-- xsized .8PaperWidth
+-- ,center topboundary Page
+-- )
+-- withcolor white ;
+-- ) ;
+-- picture q ; q := image (
+-- draw
+-- anchored.top(
+-- textext.bot("\tttf An Author")
+-- xsized .4PaperWidth
+-- shifted (0,-PaperHeight/40)
+-- ,center bottomboundary p
+-- )
+-- withcolor white ;
+-- ) ;
+-- draw p ;
+-- draw q ;
+-- StopPage ;
+-- \stopMPpage
+
+local format, gsub, find = string.format, string.gsub, string.find
+local concat, sortedhash = table.concat, table.sortedhash
+
+local formatters = string.formatters
+local replacetemplate = utilities.templates.replace
+
+local addsuffix = file.addsuffix
+local nameonly = file.nameonly
+local basename = file.basename
+local pathpart = file.pathpart
+local joinfile = file.join
+local suffix = file.suffix
+local addsuffix = file.addsuffix
+local removesuffix = file.removesuffix
+local replacesuffix = file.replacesuffix
+
+local copyfile = file.copy
+local removefile = os.remove
+
+local needsupdating = file.needsupdating
+
+local isdir = lfs.isdir
+local isfile = lfs.isfile
+local mkdir = lfs.mkdir
+
+local pushdir = dir.push
+local popdir = dir.pop
local helpinfo = [[
<?xml version="1.0"?>
@@ -27,12 +108,17 @@ local helpinfo = [[
<metadata>
<entry name="name">mtx-epub</entry>
<entry name="detail">ConTeXt EPUB Helpers</entry>
- <entry name="version">0.12</entry>
+ <entry name="version">1.10</entry>
</metadata>
<flags>
<category name="basic">
<subcategory>
<flag name="make"><short>create epub zip file</short></flag>
+ <flag name="purge"><short>remove obsolete files</short></flag>
+ <flag name="rename"><short>rename images to sane names</short></flag>
+ <flag name="svgmath"><short>convert mathml to svg</short></flag>
+ <flag name="svgstyle"><short>use given tex style for svg generation (overloads style in specification)</short></flag>
+ <flag name="all"><short>assume: --purge --rename --svgmath (for fast testing)</short></flag>
</subcategory>
</category>
</flags>
@@ -49,10 +135,12 @@ local helpinfo = [[
local application = logs.application {
name = "mtx-epub",
- banner = "ConTeXt EPUB Helpers 0.12",
+ banner = "ConTeXt EPUB Helpers 1.10",
helpinfo = helpinfo,
}
+local report = application.report
+
-- script code
scripts = scripts or { }
@@ -60,91 +148,142 @@ scripts.epub = scripts.epub or { }
local mimetype = "application/epub+zip"
-local container = [[
+local t_container = [[
<?xml version="1.0" encoding="UTF-8"?>
<container version="1.0" xmlns="urn:oasis:names:tc:opendocument:xmlns:container">
<rootfiles>
- <rootfile full-path="OEBPS/%s" media-type="application/oebps-package+xml"/>
+ <rootfile full-path="OEBPS/%rootfile%" media-type="application/oebps-package+xml"/>
</rootfiles>
</container>
]]
-local package = [[
+-- urn:uuid:
+
+-- <dc:identifier id="%identifier%" opf:scheme="UUID">%uuid%</dc:identifier>
+
+local t_package = [[
<?xml version="1.0" encoding="UTF-8"?>
-<package version="2.0" xmlns="http://www.idpf.org/2007/opf" unique-identifier="%s">
+<package xmlns="http://www.idpf.org/2007/opf" unique-identifier="%identifier%" version="3.0">
<metadata xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:opf="http://www.idpf.org/2007/opf">
- <dc:title>%s</dc:title>
- <dc:language>%s</dc:language>
- <dc:identifier id="%s" opf:scheme="UUID">urn:uuid:%s</dc:identifier>
- <dc:creator>%s</dc:creator>
- <dc:date>%s</dc:date>
- <meta name="cover" content="%s" />
+ <dc:title>%title%</dc:title>
+ <dc:language>%language%</dc:language>
+ <dc:identifier id="%identifier%">%uuid%</dc:identifier>
+ <dc:creator>%creator%</dc:creator>
+ <dc:date>%date%</dc:date>
+ <!--
+ <dc:subject>%subject%</dc:subject>
+ <dc:description>%description%</dc:description>
+ <dc:publisher>%publisher%</dc:publisher>
+ <dc:source>%source%</dc:source>
+ <dc:relation>%relation%</dc:relation>
+ <dc:coverage>%coverage%</dc:coverage>
+ <dc:rights>%rights%</dc:rights>
+ -->
+ <meta name="cover" content="%coverpage%" />
+ <meta name="generator" content="ConTeXt MkIV" />
+ <meta property="dcterms:modified">%date%</meta>
</metadata>
<manifest>
-%s
+%manifest%
</manifest>
<spine toc="ncx">
<itemref idref="cover-xhtml" />
- <itemref idref="%s" />
+ <itemref idref="%rootfile%" />
</spine>
</package>
]]
-local item = [[ <item id="%s" href="%s" media-type="%s"/>]]
-local toc = [[
-<?xml version="1.0"?>
+local t_item = [[ <item id="%id%" href="%filename%" media-type="%mime%" />]]
+local t_prop = [[ <item id="%id%" href="%filename%" media-type="%mime%" properties="%properties%" />]]
+
+-- <!DOCTYPE ncx PUBLIC "-//NISO//DTD ncx 2005-1//EN" "http://www.daisy.org/z3986/2005/ncx-2005-1.dtd">
+
+local t_toc = [[
+<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE ncx PUBLIC "-//NISO//DTD ncx 2005-1//EN" "http://www.daisy.org/z3986/2005/ncx-2005-1.dtd">
+<!-- this is no longer needed in epub 3.0+ -->
<ncx xmlns="http://www.daisy.org/z3986/2005/ncx/" version="2005-1">
<head>
- <meta name="dtb:uid" content="%s" />
+ <meta name="generator" content="ConTeXt MkIV" />
+ <meta name="dtb:uid" content="%identifier%" />
<meta name="dtb:depth" content="2" />
<meta name="dtb:totalPgeCount" content="0" />
<meta name="dtb:maxPageNumber" content="0" />
</head>
<docTitle>
- <text>%s</text>
+ <text>%title%</text>
</docTitle>
+ <docAuthor>
+ <text>%author%</text>
+ </docAuthor>
+
<navMap>
<navPoint id="np-1" playOrder="1">
<navLabel>
<text>start</text>
</navLabel>
- <content src="%s"/>
+ <content src="%root%"/>
</navPoint>
</navMap>
</ncx>
]]
-local coverxhtml = [[
+local t_navtoc = [[
<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml" xmlns:epub="http://www.idpf.org/2007/ops">
+ <head>
+ <meta charset="utf-8" />
+ <title>navtoc</title>
+ </head>
+ <body>
+ <div class="navtoc">
+ <!-- <nav epub:type="lot"> -->
+ <nav epub:type="toc" id="navtoc">
+ <ol>
+ <li><a href="%root%">document</a></li>
+ </ol>
+ </nav>
+ </div>
+ </body>
+</html>
+]]
+
+-- <html xmlns="http://www.w3.org/1999/xhtml">
+-- <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
+
+local t_coverxhtml = [[
+<?xml version="1.0" encoding="UTF-8"?>
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
- <title>cover.xhtml</title>
+ <meta charset="utf-8" />
+ <title>cover page</title>
</head>
<body>
- <div>
- <img src="%s" alt="The cover image" style="max-width: 100%%;" />
+ <div class="coverpage">
+ %content%
</div>
</body>
</html>
]]
+local t_coverimg = [[
+ <img src="%image%" alt="The cover image" style="max-width: 100%%;" />
+]]
+
-- We need to figure out what is permitted. Numbers only seem to give
-- problems is some applications as do names with dashes. Also the
-- optional toc is supposed to be there and although id's are by
@@ -154,12 +293,13 @@ local coverxhtml = [[
local function dumbid(filename)
-- return (string.gsub(os.uuid(),"%-%","")) -- to be tested
- return file.nameonly(filename) .. "-" .. file.suffix(filename)
+ return nameonly(filename) .. "-" .. suffix(filename)
end
local mimetypes = {
xhtml = "application/xhtml+xml",
xml = "application/xhtml+xml",
+ html = "application/html",
css = "text/css",
svg = "image/svg+xml",
png = "image/png",
@@ -175,204 +315,601 @@ local idmakers = {
default = function(filename) return dumbid(filename) end,
}
--- specification = {
--- name = "document",
--- identifier = "123",
--- root = "a.xhtml",
--- files = {
--- "a.xhtml",
--- "b.css",
--- "c.png",
--- }
--- }
-
-local function locateimages(oldname,newname,subpath)
+local function relocateimages(imagedata,oldname,newname,subpath,rename)
local data = io.loaddata(oldname)
- local images = { }
- local done = gsub(data,"(background%-image *: * url%()(.-)(%))", function(before,name,after)
- if subpath then
- name = file.join(subpath,name)
+ if data then
+ subpath = joinfile("..",subpath)
+ report("relocating images")
+ local n = 0
+ local done = gsub(data,[[(id=")(.-)(".-background%-image *: *url%()(.-)(%))]], function(s1,id,s2,name,s3)
+ local data = imagedata[id]
+ if data then
+ local newname = data[id].newname
+ if newname then
+ if subpath then
+ name = joinfile(subpath,basename(newname))
+ else
+ name = basename(newname)
+ end
+ -- name = url.addscheme(name)
+ end
+ if newname then
+ n = n + 1
+ if rename then
+ name = joinfile(subpath,addsuffix(id,suffix(name)))
+ end
+ return s1 .. id .. s2 .. name .. s3
+ end
+ end
+ end)
+ report("%s images relocated in %a",n,newname)
+ if newname then
+ io.savedata(newname,done)
end
- images[#images+1] = name
- return before .. name .. after
- end)
- if newname then
- io.savedata(done,newname)
end
return images
end
+function reportobsolete(oldfiles,newfiles,purge)
+
+ for i=1,#oldfiles do oldfiles[i] = gsub(oldfiles[i],"^[%./]+","") end
+ for i=1,#newfiles do newfiles[i] = gsub(newfiles[i],"^[%./]+","") end
+
+ local old = table.tohash(oldfiles)
+ local new = table.tohash(newfiles)
+ local done = false
+
+ for name in sortedhash(old) do
+ if not new[name] then
+ if not done then
+ report()
+ if purge then
+ report("removing obsolete files:")
+ else
+ report("obsolete files:")
+ end
+ report()
+ done = true
+ end
+ report(" %s",name)
+ if purge then
+ removefile(name)
+ end
+ end
+ end
+
+ if done then
+ report()
+ end
+
+ return done
+
+end
+
+
local zippers = {
{
name = "zip",
+ binary = "zip",
uncompressed = "zip %s -X -0 %s",
compressed = "zip %s -X -9 -r %s",
},
{
- name = "7zip (7z)",
+ name = "7z (7zip)",
+ binary = "7z",
uncompressed = "7z a -tzip -mx0 %s %s",
compressed = "7z a -tzip %s %s",
},
}
-function scripts.epub.make()
+function scripts.epub.make(purge,rename,svgmath,svgstyle)
+
+ -- one can enter a jobname or jobname-export but the simple jobname is
+ -- preferred
local filename = environment.files[1]
- if filename and filename ~= "" and type(filename) == "string" then
+ if not filename or filename == "" or type(filename) ~= "string" then
+ report("provide filename")
+ return
+ end
+
+ local specpath, specname, specfull
- filename = file.basename(filename)
- local specfile = file.replacesuffix(filename,"specification")
- local specification = lfs.isfile(specfile) and dofile(specfile) or { }
+ if isdir(filename) then
+ specpath = filename
+ specname = addsuffix(specpath,"lua")
+ specfull = joinfile(specpath,specname)
+ end
+
+ if not specfull or not isfile(specfull) then
+ specpath = filename .. "-export"
+ specname = addsuffix(filename .. "-pub","lua")
+ specfull = joinfile(specpath,specname)
+ end
+
+ if not specfull or not isfile(specfull) then
+ report("unknown specificaton file %a for %a",specfull or "?",filename)
+ return
+ end
--- inspect(specification)
+ local specification = dofile(specfull)
+
+ if not specification or not next(specification) then
+ report("invalid specificaton file %a",specfile)
+ return
+ end
+
+ report("using specification file %a",specfull)
+
+ -- images: { ... url = location ... }
+
+ local defaultcoverpage = "cover.xhtml"
+
+ local name = specification.name or nameonly(filename)
+ local identifier = specification.identifier or ""
+ local htmlfiles = specification.htmlfiles or { }
+ local styles = specification.styles or { }
+ local images = specification.images or { }
+ local htmlroot = specification.htmlroot or htmlfiles[1] or ""
+ local language = specification.language or "en"
+ local creator = specification.creator or "context mkiv"
+ local author = specification.author or "anonymous"
+ local title = specification.title or name
+ local subtitle = specification.subtitle or ""
+ local imagefile = specification.imagefile or ""
+ local imagepath = specification.imagepath or "images"
+ local stylepath = specification.stylepath or "styles"
+ local coverpage = specification.firstpage or defaultcoverpage
+
+ if type(svgstyle) == "string" and not svgstyle then
+ svgstyle = specification.svgstyle or ""
+ end
- local name = specification.name or file.removesuffix(filename)
- local identifier = specification.identifier or os.uuid(true)
- local files = specification.files or { file.addsuffix(filename,"xhtml") }
- local images = specification.images or { }
- local root = specification.root or files[1]
- local language = specification.language or "en"
- local creator = specification.author or "My Self"
- local title = specification.title or "My Title"
- local firstpage = specification.firstpage or ""
- local lastpage = specification.lastpage or ""
+ local obsolete = false
- -- identifier = gsub(identifier,"[^a-zA-z0-9]","")
+ if #htmlfiles == 0 then
+ report("no html files specified")
+ return
+ end
+ if htmlroot == "" then
+ report("no html root file specified")
+ return
+ end
+
+ if subtitle ~= "" then
+ title = format("%s, %s",title,subtitle)
+ end
+
+ local htmlsource = specpath
+ local imagesource = joinfile(specpath,imagepath)
+ local stylesource = joinfile(specpath,stylepath)
+
+ -- once we're here we can start moving files to the right spot; first we deal
+ -- with images
+
+ -- ["image-1"]={
+ -- height = "7.056cm",
+ -- name = "file:///t:/sources/cow.svg",
+ -- page = "1",
+ -- width = "9.701cm",
+ -- }
+
+ -- end of todo
+
+ local pdftosvg = os.which("mudraw") and formatters[ [[mudraw -o "%s" "%s" %s]] ]
+
+ local f_svgpage = formatters["%s-page-%s.svg"]
+ local f_svgname = formatters["%s.svg"]
+
+ local notupdated = 0
+ local updated = 0
+ local skipped = 0
+ local oldfiles = dir.glob(file.join(imagesource,"*"))
+ local newfiles = { }
+
+ if not pdftosvg then
+ report("the %a binary is not present","mudraw")
+ end
+
+ -- a coverpage file has to be in the root of the export tree
+
+ if not coverpage then
+ report("no cover page (image) defined")
+ elseif suffix(coverpage) ~= "xhtml" then
+ report("using cover page %a",coverpage)
+ local source = coverpage
+ local target = joinfile(htmlsource,coverpage)
+ htmlfiles[#htmlfiles+1 ] = coverpage
+ report("copying coverpage %a to %a",source,target)
+ copyfile(source,target)
+ elseif isfile(coverpage) then
+ report("using cover page image %a",coverpage)
+ images.cover = {
+ height = "100%",
+ width = "100%",
+ page = "1",
+ name = url.filename(coverpage),
+ used = coverpage,
+ }
+ local data = replacetemplate(t_coverxhtml, {
+ content = replacetemplate(t_coverimg, {
+ image = coverpage,
+ })
+ })
+ coverpage = defaultcoverpage
+ local target = joinfile(htmlsource,coverpage)
+ report("saving coverpage to %a",target)
+ io.savedata(target,data)
+ htmlfiles[#htmlfiles+1 ] = coverpage
+ else
+ report("cover page image %a is not present",coverpage)
+ coverpage = false
+ end
+
+ if not coverpage then
+ local data = replacetemplate(t_coverxhtml, {
+ content = "no cover page"
+ })
+ coverpage = defaultcoverpage
+ local target = joinfile(htmlsource,coverpage)
+ report("saving dummy coverpage to %a",target)
+ io.savedata(target,data)
+ htmlfiles[#htmlfiles+1 ] = coverpage
+ end
- if firstpage ~= "" then
- images[firstpage] = firstpage
+ for id, data in sortedhash(images) do
+ local name = url.filename(data.name)
+ local used = url.filename(data.used)
+ local base = basename(used)
+ local page = tonumber(data.page) or 1
+ -- todo : check timestamp and prefix, rename to image-*
+ if suffix(used) == "pdf" then
+ -- todo: pass svg name
+ if page > 1 then
+ name = f_svgpage(nameonly(name),page)
+ else
+ name = f_svgname(nameonly(name))
+ end
+ local source = used
+ local target = joinfile(imagesource,name)
+ if needsupdating(source,target) then
+ if pdftosvg then
+ local command = pdftosvg(target,source,page)
+ report("running command %a",command)
+ os.execute(command)
+ updated = updated + 1
+ else
+ skipped = skipped + 1
+ end
+ else
+ notupdated = notupdated + 1
+ end
+ newfiles[#newfiles+1] = target
+ else
+ name = basename(used)
+ local source = used
+ local target = joinfile(imagesource,name)
+ if needsupdating(source,target) then
+ report("copying %a to %a",source,target)
+ copyfile(source,target)
+ updated = updated + 1
+ else
+ notupdated = notupdated + 1
+ -- no message
+ end
+ newfiles[#newfiles+1] = target
end
- if lastpage ~= "" then
- images[lastpage] = lastpage
+ local target = newfiles[#newfiles]
+ if suffix(target) == "svg" and isfile(target) then
+ local data = io.loaddata(target)
+ if data then
+ local done = gsub(data,"<!(DOCTYPE.-)>","<!-- %1 -->",1)
+ if data ~= done then
+ report("doctype fixed in %a",target)
+ io.savedata(target,data)
+ end
+ end
end
+ data.newname = name -- without path
+ end
+
+ report("%s images checked, %s updated, %s kept, %s skipped",updated + notupdated + skipped,updated,notupdated,skipped)
- identifier = "BookId" -- weird requirement
-
- local epubname = name
- local epubpath = file.replacesuffix(name,"tree")
- local epubfile = file.replacesuffix(name,"epub")
- local epubroot = file.replacesuffix(name,"opf")
- local epubtoc = "toc.ncx"
- local epubcover = "cover.xhtml"
-
- application.report("creating paths in tree %s",epubpath)
- lfs.mkdir(epubpath)
- lfs.mkdir(file.join(epubpath,"META-INF"))
- lfs.mkdir(file.join(epubpath,"OEBPS"))
-
- local used = { }
-
- local function copyone(filename)
- local suffix = file.suffix(filename)
- local mime = mimetypes[suffix]
- if mime then
- local idmaker = idmakers[suffix] or idmakers.default
- local target = file.join(epubpath,"OEBPS",filename)
- file.copy(filename,target)
- application.report("copying %s to %s",filename,target)
- used[#used+1] = format(item,idmaker(filename),filename,mime)
+ if reportobsolete(oldfiles,newfiles,purge) then
+ obsolete = true
+ end
+
+ -- here we can decide not to make an epub
+
+ local uuid = format("urn:uuid:%s",os.uuid(true)) -- os.uuid()
+ local identifier = "bookid" -- for now
+
+ local epubname = removesuffix(name)
+ local epubpath = name .. "-epub"
+ local epubfile = replacesuffix(name,"epub")
+ local epubroot = replacesuffix(name,"opf")
+ local epubtoc = "toc.ncx"
+ local epubmimetypes = "mimetype"
+ local epubcontainer = "container.xml"
+ local epubnavigator = "nav.xhtml"
+
+ local metapath = "META-INF"
+ local datapath = "OEBPS"
+
+ local oldfiles = dir.glob(file.join(epubpath,"**/*"))
+ local newfiles = { }
+
+ report("creating paths in tree %a",epubpath)
+
+ if not isdir(epubpath) then
+ mkdir(epubpath)
+ end
+ if not isdir(epubpath) then
+ report("unable to create path %a",epubpath)
+ return
+ end
+
+ local metatarget = joinfile(epubpath,metapath)
+ local htmltarget = joinfile(epubpath,datapath)
+ local styletarget = joinfile(epubpath,datapath,stylepath)
+ local imagetarget = joinfile(epubpath,datapath,imagepath)
+
+ mkdir(metatarget)
+ mkdir(htmltarget)
+ mkdir(styletarget)
+ mkdir(imagetarget)
+
+ local used = { }
+ local notupdated = 0
+ local updated = 0
+
+ local oldimagespecification = joinfile(htmlsource,imagefile)
+ local newimagespecification = joinfile(htmltarget,imagefile)
+
+ report("removing %a",newimagespecification)
+ -- removefile(newimagespecification) -- because we update that one
+
+ local function registerone(path,filename,mathml)
+ local suffix = suffix(filename)
+ local mime = mimetypes[suffix]
+ if mime then
+ local idmaker = idmakers[suffix] or idmakers.default
+ local fullname = path and joinfile(path,filename) or filename
+ if mathml then
+ used[#used+1] = replacetemplate(t_prop, {
+ id = idmaker(filename),
+ filename = fullname,
+ mime = mime,
+ properties = "mathml",
+ } )
+ else
+ used[#used+1] = replacetemplate(t_item, {
+ id = idmaker(filename),
+ filename = fullname,
+ mime = mime,
+ } )
end
+ return true
+ end
+ end
+
+ local function registerandcopyfile(check,path,name,sourcepath,targetpath,newname,image)
+
+ if name == "" then
+ report("ignoring unknown image")
+ return
+ end
+
+ if newname then
+ newname = replacesuffix(newname,suffix(name))
+ else
+ newname = name
end
- copyone("cover.xhtml")
- copyone("toc.ncx")
+ local source = joinfile(sourcepath,name)
+ local target = joinfile(targetpath,newname)
+ local mathml = false
- local function copythem(files)
- for i=1,#files do
- local filename = files[i]
- if type(filename) == "string" then
- copyone(filename)
+ if suffix(source) == "xhtml" then
+ if find(io.loaddata(source),"MathML") then
+ mathml = true -- inbelievable: the property is only valid when there is mathml
+ end
+ else
+ report("checking image %a -> %a",source,target)
+ end
+ if registerone(path,newname,mathml) then
+ if not check or needsupdating(source,target) or mathml and svgmath then
+ report("copying %a to %a",source,target)
+ copyfile(source,target)
+ updated = updated + 1
+ else
+ notupdated = notupdated + 1
+ end
+ newfiles[#newfiles+1] = target
+ if mathml and svgmath then
+ report()
+ report("converting mathml into svg in %a",target)
+ report()
+ local status, total, unique = moduledata.svgmath.convert(target,svgstyle)
+ report()
+ if status then
+ report("%s formulas converted, %s are unique",total,unique)
+ else
+ report("warning: %a in %a",total,target)
end
+ report()
end
end
+ end
- copythem(files)
+ -- local nofdummies = 0
+ -- local dummyname = formatters["dummy-figure-%03i"]
+ -- local makedummy = formatters["context --extra=dummies --noconsole --once --result=%s"]
+ --
+ -- local function registerandcopydummy(targetpath,name)
+ -- nofdummies = nofdummies + 1
+ -- local newname = dummyname(nofdummies)
+ -- local target = joinfile(targetpath,newname)
+ -- if not isfile(target) then
+ -- pushdir(targetpath)
+ -- report("generating dummy %a for %a",newname,name or "unknown")
+ -- os.execute(makedummy(newname))
+ -- popdir()
+ -- end
+ -- return newname
+ -- end
+
+ for image, data in sortedhash(images) do
+ -- if data.used == "" then
+ -- data.newname = registerandcopydummy(imagetarget,data.name)
+ -- end
+ registerandcopyfile(true,imagepath,data.newname,imagesource,imagetarget,rename and image,true)
+ end
+ for i=1,#styles do
+ registerandcopyfile(false,stylepath,styles[i],stylesource,styletarget)
+ end
+ for i=1,#htmlfiles do
+ registerandcopyfile(false,false,htmlfiles[i],htmlsource,htmltarget)
+ end
- local theimages = { }
+ relocateimages(images,oldimagespecification,oldimagespecification,imagepath,rename)
+ relocateimages(images,oldimagespecification,newimagespecification,imagepath,rename)
- for k, v in table.sortedpairs(images) do
- theimages[#theimages+1] = k
- if not lfs.isfile(k) and file.suffix(k) == "svg" and file.suffix(v) == "pdf" then
- local command = format("inkscape --export-plain-svg=%s %s",k,v)
- application.report("running command '%s'\n\n",command)
- os.execute(command)
- end
+ report("%s files registered, %s updated, %s kept",updated + notupdated,updated,notupdated)
+
+ local function saveinfile(what,name,data)
+ report("saving %s in %a",what,name)
+ io.savedata(name,data)
+ newfiles[#newfiles+1] = name
+ end
+
+ used[#used+1] = replacetemplate(t_prop, {
+ id = "nav",
+ filename = epubnavigator,
+ properties = "nav",
+ mime = "application/xhtml+xml",
+ })
+
+ registerone(false,epubtoc)
+
+ saveinfile("navigation data",joinfile(htmltarget,epubnavigator),replacetemplate(t_navtoc, { -- version 3.0
+ root = htmlroot,
+ } ) )
+
+ saveinfile("used mimetypes",joinfile(epubpath,epubmimetypes),mimetype)
+
+ saveinfile("version 2.0 container",joinfile(metatarget,epubcontainer),replacetemplate(t_container, {
+ rootfile = epubroot
+ } ) )
+
+ local idmaker = idmakers[suffix(htmlroot)] or idmakers.default
+
+ saveinfile("package specification",joinfile(htmltarget,epubroot),replacetemplate(t_package, {
+ identifier = identifier,
+ title = title,
+ language = language,
+ uuid = uuid,
+ creator = creator,
+ date = os.date("!%Y-%m-%dT%H:%M:%SZ"),
+ coverpage = idmaker(coverpage),
+ manifest = concat(used,"\n"),
+ rootfile = idmaker(htmlroot)
+ } ) )
+
+ -- t_toc is replaced by t_navtoc in >= 3
+
+ saveinfile("table of contents",joinfile(htmltarget,epubtoc), replacetemplate(t_toc, {
+ identifier = uuid, -- identifier,
+ title = title,
+ author = author,
+ root = htmlroot,
+ } ) )
+
+ report("creating archive\n\n")
+
+ pushdir(epubpath)
+
+ removefile(epubfile)
+
+ local usedzipper = false
+
+ local function zipped(zipper)
+ local ok = os.execute(format(zipper.uncompressed,epubfile,epubmimetypes))
+ if ok == 0 then
+ os.execute(format(zipper.compressed,epubfile,metapath))
+ os.execute(format(zipper.compressed,epubfile,datapath))
+ usedzipper = zipper.name
+ return true
end
+ end
+
+ -- nice way
+
+ for i=1,#zippers do
+ if os.which(zippers[i].binary) and zipped(zippers[i]) then
+ break
+ end
+ end
- copythem(theimages)
-
- local idmaker = idmakers[file.suffix(root)] or idmakers.default
-
- container = format(container,
- epubroot
- )
- package = format(package,
- identifier,
- title,
- language,
- identifier,
- os.uuid(),
- creator,
- os.date("!%Y-%m-%dT%H:%M:%SZ"),
- idmaker(firstpage),
- concat(used,"\n"),
- idmaker(root)
- )
- toc = format(toc,
- identifier,
- title,
- root
- )
- coverxhtml = format(coverxhtml,
- firstpage
- )
-
- io.savedata(file.join(epubpath,"mimetype"),mimetype)
- io.savedata(file.join(epubpath,"META-INF","container.xml"),container)
- io.savedata(file.join(epubpath,"OEBPS",epubroot),package)
- io.savedata(file.join(epubpath,"OEBPS",epubtoc),toc)
- io.savedata(file.join(epubpath,"OEBPS",epubcover),coverxhtml)
-
- application.report("creating archive\n\n")
-
- lfs.chdir(epubpath)
- os.remove(epubfile)
-
- local done = false
+ -- trial and error
+ if not usedzipper then
for i=1,#zippers do
- local zipper = zippers[i]
- if os.execute(format(zipper.uncompressed,epubfile,"mimetype")) then
- os.execute(format(zipper.compressed,epubfile,"META-INF"))
- os.execute(format(zipper.compressed,epubfile,"OEBPS"))
- done = zipper.name
+ if zipped(zippers[i]) then
break
end
end
+ end
- lfs.chdir("..")
+ popdir()
- if done then
- application.report("epub archive made using %s: %s",done,file.join(epubpath,epubfile))
- else
- local list = { }
- for i=1,#zippers do
- list[#list+1] = zipper.name
- end
- application.report("no epub archive made, install one of: %s",concat(list," "))
+ if usedzipper then
+ local treefile = joinfile(epubpath,epubfile)
+ removefile(epubfile)
+ copyfile(treefile,epubfile)
+ if isfile(epubfile) then
+ removefile(treefile)
end
+ report("epub archive made using %s: %s",usedzipper,epubfile)
+ else
+ local list = { }
+ for i=1,#zippers do
+ list[#list+1] = zippers[i].name
+ end
+ report("no epub archive made, install one of: % | t",list)
+ end
+
+ if reportobsolete(oldfiles,newfiles,purge) then
+ obsolete = true
+ end
+ if obsolete and not purge then
+ report("use --purge to remove obsolete files")
end
end
--
-if environment.argument("make") then
- scripts.epub.make()
-elseif environment.argument("exporthelp") then
- application.export(environment.argument("exporthelp"),environment.files[1])
+local a_exporthelp = environment.argument("exporthelp")
+local a_make = environment.argument("make")
+local a_all = environment.argument("all")
+local a_purge = a_all or environment.argument("purge")
+local a_rename = a_all or environment.argument("rename")
+local a_svgmath = a_all or environment.argument("svgmath")
+local a_svgstyle = environment.argument("svgstyle")
+
+if a_make and a_svgmath then
+ require("x-math-svg")
+end
+
+if a_make then
+ scripts.epub.make(a_purge,a_rename,a_svgmath,a_svgstyle)
+elseif a_exporthelp then
+ application.export(a_exporthelp,environment.files[1])
else
application.help()
end
+
+-- java -jar d:\epubcheck\epubcheck-3.0.1.jar -v 3.0 -mode xhtml mkiv-publications.tree\mkiv-publications.epub
diff --git a/scripts/context/lua/mtx-fcd.lua b/scripts/context/lua/mtx-fcd.lua
index 2fcb9a2c7..76087cc37 100644
--- a/scripts/context/lua/mtx-fcd.lua
+++ b/scripts/context/lua/mtx-fcd.lua
@@ -247,7 +247,7 @@ end
local function fcd_find()
found = { }
- pattern = environment.files[1] or ""
+ pattern = lower(environment.files[1] or "")
if pattern ~= "" then
pattern = string.escapedpattern(pattern)
local paths = hash.paths
@@ -255,7 +255,7 @@ local function fcd_find()
local paths = paths[i][2]
for i=1,#paths do
local path = paths[i]
- if find(path,pattern) then
+ if find(lower(path),pattern) then
found[#found+1] = path
end
end
diff --git a/scripts/context/lua/mtx-flac.lua b/scripts/context/lua/mtx-flac.lua
index 4e01abc99..116eeda34 100644
--- a/scripts/context/lua/mtx-flac.lua
+++ b/scripts/context/lua/mtx-flac.lua
@@ -8,7 +8,7 @@ if not modules then modules = { } end modules ['mtx-flac'] = {
local sub, match, byte, lower = string.sub, string.match, string.byte, string.lower
local readstring, readnumber = io.readstring, io.readnumber
-local concat, sortedpairs = table.concat, table.sortedpairs
+local concat, sortedpairs, sort, keys = table.concat, table.sortedpairs, table.sort, table.keys
local tonumber = tonumber
local tobitstring = number.tobitstring
local lpegmatch = lpeg.match
@@ -105,35 +105,62 @@ function flac.savecollection(pattern,filename)
local files = dir.glob(pattern)
flac.report("%s files found, analyzing files",#files)
local music = { }
- table.sort(files)
+ sort(files)
for i=1,#files do
- local data = flac.getmetadata(files[i])
+ local name = files[i]
+ local data = flac.getmetadata(name)
if data then
local tags = data.tags
local info = data.info
- local artist = tags.artist or "no-artist"
- local album = tags.album or "no-album"
- local albums = music[artist]
- if not albums then
- albums = { }
- music[artist] = albums
- end
- local albumx = albums[album]
- if not albumx then
- albumx = {
- year = tags.date,
- tracks = { },
+ if tags and info then
+ local artist = tags.artist or "no-artist"
+ local album = tags.album or "no-album"
+ local albums = music[artist]
+ if not albums then
+ albums = { }
+ music[artist] = albums
+ end
+ local albumx = albums[album]
+ if not albumx then
+ albumx = {
+ year = tags.date,
+ tracks = { },
+ }
+ albums[album] = albumx
+ end
+ albumx.tracks[tonumber(tags.tracknumber) or 0] = {
+ title = tags.title,
+ length = math.round((info.samples_in_stream/info.sample_rate_in_hz)),
}
- albums[album] = albumx
+ else
+ flac.report("unable to read file",name)
end
- albumx.tracks[tonumber(tags.tracknumber) or 0] = {
- title = tags.title,
- length = math.round((info.samples_in_stream/info.sample_rate_in_hz)),
- }
end
end
- -- inspect(music)
- local nofartists, nofalbums, noftracks, noferrors = 0, 0, 0, 0
+ --
+ local nofartists = 0
+ local nofalbums = 0
+ local noftracks = 0
+ local noferrors = 0
+ --
+ local allalbums
+ local function compare(a,b)
+ local ya = allalbums[a].year or 0
+ local yb = allalbums[b].year or 0
+ if ya == yb then
+ return a < b
+ else
+ return ya < yb
+ end
+ end
+ local function getlist(albums)
+ allalbums = albums
+ local list = keys(albums)
+ sort(list,compare)
+ return list
+ end
+ --
+ filename = file.addsuffix(filename,"xml")
local f = io.open(filename,"wb")
if f then
flac.report("saving data in file %q",filename)
@@ -144,15 +171,8 @@ function flac.savecollection(pattern,filename)
f:write("\t<artist>\n")
f:write("\t\t<name>",lpegmatch(p_escaped,artist),"</name>\n")
f:write("\t\t<albums>\n")
- local list = table.keys(albums)
- table.sort(list,function(a,b)
- local ya, yb = albums[a].year or 0, albums[b].year or 0
- if ya == yb then
- return a < b
- else
- return ya < yb
- end
- end)
+ local list = getlist(albums)
+ nofalbums = nofalbums + #list
for nofalbums=1,#list do
local album = list[nofalbums]
local data = albums[album]
@@ -180,6 +200,40 @@ function flac.savecollection(pattern,filename)
f:write("</collection>\n")
f:close()
flac.report("%s tracks of %s albums of %s artists saved in %q (%s errors)",noftracks,nofalbums,nofartists,filename,noferrors)
+ -- a secret option for alan braslau
+ if environment.argument("bibtex") then
+ filename = file.replacesuffix(filename,"bib")
+ local f = io.open(filename,"wb")
+ if f then
+ local n = 0
+ for artist, albums in sortedpairs(music) do
+ local list = getlist(albums)
+ for nofalbums=1,#list do
+ n = n + 1
+ local album = list[nofalbums]
+ local data = albums[album]
+ local tracks = data.tracks
+ f:write("@cd{entry-",n,",\n")
+ f:write("\tartist = {",artist,"},\n")
+ f:write("\ttitle = {",album or "no title","},\n")
+ f:write("\tyear = {",data.year or 0,"},\n")
+ f:write("\ttracks = {",#tracks,"},\n")
+ for i=1,#tracks do
+ local track = tracks[i]
+ if track then
+ noftracks = noftracks + 1
+ f:write("\ttrack:",i," = {",track.title,"},\n")
+ f:write("\tlength:",i," = {",track.length,"},\n")
+ end
+ end
+ f:write("}\n")
+ end
+ end
+ f:close()
+ flac.report("additional bibtex file generated: %s",filename)
+ end
+ end
+ --
else
flac.report("unable to save data in file %q",filename)
end
diff --git a/scripts/context/lua/mtx-fonts.lua b/scripts/context/lua/mtx-fonts.lua
index 4340cb357..694e6a649 100644
--- a/scripts/context/lua/mtx-fonts.lua
+++ b/scripts/context/lua/mtx-fonts.lua
@@ -38,7 +38,7 @@ local helpinfo = [[
<flag name="filter" value="list"><short>key-value pairs</short></flag>
<flag name="all"><short>show all found instances (combined with other flags)</short></flag>
<flag name="info"><short>give more details</short></flag>
- <flag name="track" value="list"><short>enable trackers</short></flag>
+ <flag name="trackers" value="list"><short>enable trackers</short></flag>
<flag name="statistics"><short>some info about the database</short></flag>
</subcategory>
</category>
@@ -413,8 +413,12 @@ function scripts.fonts.save()
local sub = givenfiles[2] or ""
local function save(savename,fontblob)
if fontblob then
+ if fontblob.validation_state and table.contains(fontblob.validation_state,"bad_ps_fontname") then
+ report("ignoring bad fontname for %a",name)
+ savename = file.nameonly(name) .. "-bad-ps-name"
+ end
savename = file.addsuffix(string.lower(savename),"lua")
- report("fontsave, saving data in %s",savename)
+ report("fontsave, saving data in %a",savename)
table.tofile(savename,fontloader.to_table(fontblob),"return")
fontloader.close(fontblob)
end
@@ -426,7 +430,7 @@ function scripts.fonts.save()
if suffix == 'ttf' or suffix == 'otf' or suffix == 'ttc' or suffix == "dfont" then
local fontinfo = fontloader.info(filename)
if fontinfo then
- report("font: %s located as %s",name,filename)
+ report("font: %a located as %a",name,filename)
if #fontinfo > 0 then
for k=1,#fontinfo do
local v = fontinfo[k]
@@ -436,13 +440,13 @@ function scripts.fonts.save()
save(fontinfo.fullname,fontloader.open(filename))
end
else
- report("font: %s cannot be read",filename)
+ report("font: %a cannot be read",filename)
end
else
- report("font: %s not saved",filename)
+ report("font: %a not saved",filename)
end
else
- report("font: %s not found",name)
+ report("font: %a not found",name)
end
else
report("font: no name given")
diff --git a/scripts/context/lua/mtx-interface.lua b/scripts/context/lua/mtx-interface.lua
index 82cefd638..1640f0891 100644
--- a/scripts/context/lua/mtx-interface.lua
+++ b/scripts/context/lua/mtx-interface.lua
@@ -248,7 +248,7 @@ function scripts.interface.editor(editor,split,forcedinterfaces)
local mappings = { }
local environments = { }
local x = xml.load(keyfile)
- for e, d, k in xml.elements(x,"cd:command") do
+ for e, d, k in xml.elements(x,"/cd:interface/cd:commands/cd:command") do -- somehow this was variable
local at = d[k].at
local name, value = at.name, at.value
if name and value then
@@ -256,7 +256,7 @@ function scripts.interface.editor(editor,split,forcedinterfaces)
end
end
local x = xml.load(xmlfile)
- for e, d, k in xml.elements(x,"cd:command") do
+ for e, d, k in xml.elements(x,"/cd:interface/cd:command") do
local at = d[k].at
local name, type = at.name, at["type"]
if name and name ~= "" then
@@ -322,7 +322,7 @@ function scripts.interface.check()
if f then
f:write("\\starttext\n")
local x = xml.load(xmlfile)
- for e, d, k in xml.elements(x,"cd:command") do
+ for e, d, k in xml.elements(x,"/cd:interface/cd:command") do
local dk = d[k]
local at = dk.at
if at then
@@ -384,6 +384,7 @@ function scripts.interface.interfaces()
return a .. b .. c .. b
end)
end
+ -- we could just replace attributes
for language, _ in next, commands.setuplayout do
local texresult, xmlresult = { }, { }
texresult[#texresult+1] = format("%% this file is auto-generated, don't edit this file\n%%")
@@ -403,6 +404,7 @@ function scripts.interface.interfaces()
report("saving interface translations '%s'",xmlfilename)
if language ~= "en" and xmldata ~= "" then
local newdata = xmldata:gsub("(<cd:interface.*language=.)en(.)","%1"..language.."%2",1)
+-- newdata = replace(newdata, 'cd:command', 'name', interface.commands, interface.elements, language)
newdata = replace(newdata, 'cd:string', 'value', interface.commands, interface.elements, language)
newdata = replace(newdata, 'cd:variable' , 'value', interface.variables, nil, language)
newdata = replace(newdata, 'cd:parameter', 'name', interface.constants, nil, language)
diff --git a/scripts/context/lua/mtx-metapost.lua b/scripts/context/lua/mtx-metapost.lua
index 08daec978..6306125d4 100644
--- a/scripts/context/lua/mtx-metapost.lua
+++ b/scripts/context/lua/mtx-metapost.lua
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['mtx-metapost'] = { -- this was m
license = "see context related readme files"
}
+-- todo: load map files
+
local helpinfo = [[
<?xml version="1.0"?>
<application>
@@ -60,24 +62,42 @@ local function assumes_latex(filename)
return find(d,"\\documentstyle") or find(d,"\\documentclass") or find(d,"\\begin{document}")
end
+local basemaps = "original-base.map,original-ams-base.map,original-ams-euler.map,original-public-lm.map"
+
+local wrapper = "\\starttext\n%s\n%s\\stoptext"
+local loadmap = "\\loadmapfile[%s]\n"
local template = "\\startTEXpage\n\\convertMPtoPDF{%s}{1}{1}\n\\stopTEXpage"
local texified = "\\starttext\n%s\n\\stoptext"
local splitter = "\\startTEXpage\\externalfigure[%s][page=%s]\\stopTEXpage"
local tempname = "mptopdf-temp.tex"
-local function do_convert(filename)
+local function do_mapfiles(mapfiles)
+ local maps = { }
+ for i=1,#mapfiles do
+ local mapfile = mapfiles[i]
+ application.report("using map file %a",mapfile)
+ maps[i] = format(loadmap,mapfile)
+ end
+ return table.concat(maps)
+end
+
+local function do_convert(filename,mapfiles)
if find(filename,".%d+$") or find(filename,"%.mps$") then
- io.savedata(tempname,format(template,filename))
+ local body = format(template,filename)
+ local maps = do_mapfiles(mapfiles)
+ io.savedata(tempname,format(wrapper,maps,body))
local resultname = format("%s-%s.pdf",file.nameonly(filename),file.suffix(filename))
local result = os.execute(format([[context --once --batch --purge --result=%s "%s"]],resultname,tempname))
return lfs.isfile(resultname) and resultname
end
end
-local function do_split(filename,numbers)
+local function do_split(filename,numbers,mapfiles)
local name = file.nameonly(filename)
+ local maps = do_mapfiles(mapfiles)
for i=1,#numbers do
- io.savedata(tempname,format(splitter,file.addsuffix(name,"pdf"),i))
+ local body = format(splitter,file.addsuffix(name,"pdf"),i)
+ io.savedata(tempname,format(wrapper,maps,body))
local resultname = format("%s-%s.pdf",name,numbers[i])
local result = os.execute(format([[context --once --batch --purge --result=%s "%s"]],resultname,tempname))
end
@@ -99,12 +119,12 @@ local function do_texify(str)
return format(texified,str), numbers
end
-local function do_convert_all(filename)
+local function do_convert_all(filename,mapfiles)
local results = dir.glob(file.nameonly(filename) .. ".*") -- reset
local report = { }
for i=1,#results do
local filename = results[i]
- local resultname = do_convert(filename)
+ local resultname = do_convert(filename,mapfiles)
if resultname then
report[#report+1] = { filename, resultname }
end
@@ -121,8 +141,8 @@ local function do_convert_all(filename)
end
end
-local function do_convert_one(filename)
- local resultname = do_convert(filename)
+local function do_convert_one(filename,mapfiles)
+ local resultname = do_convert(filename,mapfiles)
if resultname then
report("%s => %s", filename,resultname)
else
@@ -131,17 +151,13 @@ local function do_convert_one(filename)
end
function scripts.mptopdf.convertall()
- local rawmp = environment.arguments.rawmp or false
- local metafun = environment.arguments.metafun or false
- local latex = environment.arguments.latex or false
- local pattern = environment.arguments.pattern or false
- local split = environment.arguments.split or false
- local files
- if pattern then
- files = dir.glob(file.nameonly(filename))
- else
- files = environment.files
- end
+ local rawmp = environment.arguments.rawmp or false
+ local metafun = environment.arguments.metafun or false
+ local latex = environment.arguments.latex or false
+ local pattern = environment.arguments.pattern or false
+ local split = environment.arguments.split or false
+ local files = pattern and dir.glob(file.nameonly(filename)) or environment.files
+ local mapfiles = utilities.parsers.settings_to_array(environment.arguments.mapfiles or basemaps)
if #files > 0 then
for i=1,#files do
local filename = files[i]
@@ -168,16 +184,16 @@ function scripts.mptopdf.convertall()
local done = os.execute(command)
if done then
if convert then
- do_convert_all(filename)
+ do_convert_all(filename,mapfiles)
elseif split then
- do_split(filename,numbers)
+ do_split(filename,numbers,mapfiles)
-- already pdf, maybe optionally split
end
else
report("error while processing mp file '%s'", filename)
end
else
- do_convert_one(filename)
+ do_convert_one(filename,mapfiles)
end
end
else
diff --git a/scripts/context/lua/mtx-mk-help.lua b/scripts/context/lua/mtx-mk-help.lua
index 794bbca37..083dbc3ec 100644
--- a/scripts/context/lua/mtx-mk-help.lua
+++ b/scripts/context/lua/mtx-mk-help.lua
@@ -403,7 +403,7 @@ local helpinfo = [[
<application>
<metadata>
<entry name="name">mptopdf</entry>
- <entry name="detail">convert MetaPost to PDF</entry>
+ <entry name="detail">convert MetaPost figures to PDF</entry>
<entry name="version">1.4.1</entry>
</metadata>
<flags>
diff --git a/scripts/context/lua/mtx-patterns.lua b/scripts/context/lua/mtx-patterns.lua
index 5e2b2d902..b7d41e2b2 100644
--- a/scripts/context/lua/mtx-patterns.lua
+++ b/scripts/context/lua/mtx-patterns.lua
@@ -29,6 +29,7 @@ local helpinfo = [[
<flag name="specification"><short>additional patterns: e.g.: =cy,hyph-cy,welsh</short></flag>
<flag name="compress"><short>compress data</short></flag>
<flag name="words"><short>update words in given file</short></flag>
+ <flag name="hyphenate"><short>show hypephenated words</short></flag>
</subcategory>
</category>
</flags>
@@ -40,6 +41,7 @@ local helpinfo = [[
<example><command>mtxrun --script pattern --check --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns</command></example>
<example><command>mtxrun --script pattern --convert --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns/tex --destination=e:/tmp/patterns</command></example>
<example><command>mtxrun --script pattern --convert --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns/txt --destination=e:/tmp/patterns</command></example>
+ <example><command>mtxrun --script pattern --hyphenate --language=nl --left=3 nogalwiedes inderdaad</command></example>
</subcategory>
</category>
</examples>
@@ -625,6 +627,49 @@ function scripts.patterns.words()
end
end
+-- mtxrun --script patterns --hyphenate --language=nl nogalwiedes --left=3
+--
+-- hyphenator |
+-- hyphenator | . n o g a l w i e d e s . . n o g a l w i e d e s .
+-- hyphenator | .0n4 0 4 0 0 0 0 0 0 0 0 0 0
+-- hyphenator | 0o0g0a4l0 0 4 0 0 4 0 0 0 0 0 0 0
+-- hyphenator | 1g0a0 0 4 1 0 4 0 0 0 0 0 0 0
+-- hyphenator | 0l1w0 0 4 1 0 4 1 0 0 0 0 0 0
+-- hyphenator | 4i0e0 0 4 1 0 4 1 4 0 0 0 0 0
+-- hyphenator | 0i0e3d0e0 0 4 1 0 4 1 4 0 3 0 0 0
+-- hyphenator | 0e1d0 0 4 1 0 4 1 4 0 3 0 0 0
+-- hyphenator | 1d0e0 0 4 1 0 4 1 4 0 3 0 0 0
+-- hyphenator | 0d0e2s0 0 4 1 0 4 1 4 0 3 0 2 0
+-- hyphenator | 4s0. 0 4 1 0 4 1 4 0 3 0 4 0
+-- hyphenator | .0n4o1g0a4l1w4i0e3d0e4s0. . n o-g a l-w i e-d e s .
+-- hyphenator |
+-- mtx-patterns | nl 3 3 : nogalwiedes : nogal-wie-des
+
+function scripts.patterns.hyphenate()
+ require("lang-hyp")
+ local traditional = languages.hyphenators.traditional
+ local left = tonumber(environment.arguments.left) or 3
+ local right = tonumber(environment.arguments.right) or 3
+ local language = environment.arguments.language or "us"
+ local dictionary = traditional.loadpatterns(language)
+ local words = environment.files
+ local specification = {
+ leftcharmin = left,
+ rightcharmin = right,
+ leftchar = false,
+ rightchar = false,
+ }
+ trackers.enable("hyphenator.steps")
+ for i=1,#words do
+ local word = words[i]
+ report("%s %s %s : %s : %s",
+ language, left, right,
+ word,
+ traditional.injecthyphens(dictionary,word,specification)
+ )
+ end
+end
+
if environment.argument("check") then
scripts.patterns.prepare()
scripts.patterns.check()
@@ -633,16 +678,21 @@ elseif environment.argument("convert") then
scripts.patterns.convert()
elseif environment.argument("words") then
scripts.patterns.words() -- for the moment here
+elseif environment.argument("hyphenate") then
+ scripts.patterns.hyphenate() -- for the moment here
elseif environment.argument("exporthelp") then
application.export(environment.argument("exporthelp"),environment.files[1])
else
application.help()
end
--- mtxrun --script pattern --check hyph-*.tex
--- mtxrun --script pattern --check --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns
--- mtxrun --script pattern --convert --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns/tex --destination=e:/tmp/patterns
--- mtxrun --script pattern --convert --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns/txt --destination=e:/tmp/patterns
+-- mtxrun --script pattern --check hyph-*.tex
+-- mtxrun --script pattern --check --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns
+-- mtxrun --script pattern --convert --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns/tex --destination=e:/tmp/patterns
+--
+-- use this call:
+--
+-- mtxrun --script pattern --convert --path=c:/data/develop/svn-hyphen/trunk/hyph-utf8/tex/generic/hyph-utf8/patterns/txt --destination=e:/tmp/patterns
-- copy /Y *.hyp e:\tex-context\tex\texmf-context\tex\context\patterns
-- copy /Y *.pat e:\tex-context\tex\texmf-context\tex\context\patterns
diff --git a/scripts/context/lua/mtx-plain.lua b/scripts/context/lua/mtx-plain.lua
index d10c21375..1076572fc 100644
--- a/scripts/context/lua/mtx-plain.lua
+++ b/scripts/context/lua/mtx-plain.lua
@@ -103,7 +103,11 @@ function scripts.plain.make(texengine,texformat)
end
function scripts.plain.run(texengine,texformat,filename)
- execute('%s --fmt=%s "%s"',texengine,file.removesuffix(texformat),filename)
+ local t = { }
+ for k, v in next, environment.arguments do
+ t[#t+1] = string.format("--mtx:%s=%s",k,v)
+ end
+ execute('%s --fmt=%s %s "%s"',texengine,file.removesuffix(texformat),table.concat(t," "),filename)
end
function scripts.plain.fonts()
@@ -114,7 +118,7 @@ local texformat = environment.arguments.texformat or environment.arguments.forma
local texengine = environment.arguments.texengine or environment.arguments.engine
if type(texengine) ~= "string" or texengine == "" then
- texengine = environment.arguments.jit and "luajittex" or"luatex"
+ texengine = (jit or environment.arguments.jit) and "luajittex" or "luatex"
end
if type(texformat) ~= "string" or texformat == "" then
diff --git a/scripts/context/lua/mtx-scite.lua b/scripts/context/lua/mtx-scite.lua
index 972edbfe6..ae8c67387 100644
--- a/scripts/context/lua/mtx-scite.lua
+++ b/scripts/context/lua/mtx-scite.lua
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['mtx-scite'] = {
license = "see context related readme files"
}
+-- mtxrun --script scite --tree --source=t:/texmf/tex/context --target=e:/tmp/context --numbers
+
local P, R, S, C, Ct, Cf, Cc, Cg = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cf, lpeg.Cc, lpeg.Cg
local lpegmatch = lpeg.match
local format, lower, gmatch = string.format, string.lower, string.gmatch
@@ -22,6 +24,8 @@ local helpinfo = [[
<category name="basic">
<subcategory>
<flag name="words"><short>convert spell-*.txt into spell-*.lua</short></flag>
+ <flag name="tree"><short>converts a tree into an html tree (--source --target --numbers)</short></flag>
+ <flag name="file"><short>converts a file into an html tree (--source --target --numbers --lexer)</short></flag>
</subcategory>
</category>
</flags>
@@ -36,6 +40,8 @@ local application = logs.application {
local report = application.report
+local scite = require("util-sci")
+
scripts = scripts or { }
scripts.scite = scripts.scite or { }
@@ -241,6 +247,51 @@ function scripts.scite.words()
report("you need to move the lua files to lexers/data")
end
+function scripts.scite.tree()
+ local source = environment.argument("source")
+ local target = environment.argument("target")
+ local numbers = environment.argument("numbers")
+ if not lfs.isdir(source) then
+ report("you need to pass a valid source path with --source")
+ return
+ end
+ if not lfs.isdir(target) then
+ report("you need to pass a valid target path with --target")
+ return
+ end
+ if source == target then
+ report("source and target paths must be different")
+ return
+ end
+ scite.converttree(source,target,numbers)
+end
+
+function scripts.scite.file()
+ local source = environment.argument("source")
+ local target = environment.argument("target")
+ local lexer = environment.argument("lexer")
+ local numbers = environment.argument("numbers")
+ if source then
+ local target = target or file.replacesuffix(source,"html")
+ if source == target then
+ report("the source file cannot be the same as the target")
+ else
+ scite.filetohtml(source,lexer,target,numbers)
+ end
+
+ else
+ for i=1,#environment.files do
+ local source = environment.files[i]
+ local target = file.replacesuffix(source,"html")
+ if source == target then
+ report("the source file cannot be the same as the target")
+ else
+ scite.filetohtml(source,nil,target,numbers)
+ end
+ end
+ end
+end
+
-- if environment.argument("start") then
-- scripts.scite.start(true)
-- elseif environment.argument("test") then
@@ -251,6 +302,10 @@ end
if environment.argument("words") then
scripts.scite.words()
+elseif environment.argument("tree") then
+ scripts.scite.tree()
+elseif environment.argument("file") then
+ scripts.scite.file()
elseif environment.argument("exporthelp") then
application.export(environment.argument("exporthelp"),environment.files[1])
else
diff --git a/scripts/context/lua/mtx-server.lua b/scripts/context/lua/mtx-server.lua
index 5466bfe80..dba07f1d5 100644
--- a/scripts/context/lua/mtx-server.lua
+++ b/scripts/context/lua/mtx-server.lua
@@ -278,6 +278,20 @@ handlers.html = handlers.htm
local indices = { "index.htm", "index.html" }
local portnumber = 31415 -- pi suits tex
+local newline = lpeg.patterns.newline
+local spacer = lpeg.patterns.spacer
+local whitespace = lpeg.patterns.whitespace
+local method = lpeg.P("GET")
+ + lpeg.P("POST")
+local identify = (1-method)^0
+ * lpeg.C(method)
+ * spacer^1
+ * lpeg.C((1-spacer)^1)
+ * spacer^1
+ * lpeg.P("HTTP/")
+ * (1-whitespace)^0
+ * lpeg.C(lpeg.P(1)^0)
+
function scripts.webserver.run(configuration)
-- check configuration
configuration.port = tonumber(configuration.port or os.getenv("MTX_SERVER_PORT") or portnumber) or portnumber
@@ -329,17 +343,24 @@ function scripts.webserver.run(configuration)
local from = client:getpeername()
report("request from: %s",tostring(from))
report("request data: %s",tostring(request))
- local fullurl = string.match(request,"GET (.+) HTTP/.*$") or "" -- todo: more clever / post
- if fullurl == "" then
+ -- local fullurl = string.match(request,"(GET) (.+) HTTP/.*$") or "" -- todo: more clever / post
+ -- if fullurl == "" then
+-- print("!!!!",request)
+ local method, fullurl, body = lpeg.match(identify,request)
+ if method == "" or fullurl == "" then
report("no url")
errormessage(client,configuration,404)
else
+
+ -- todo: method: POST
+
fullurl = url.unescapeget(fullurl)
report("requested url: %s",fullurl)
-- fullurl = socket.url.unescape(fullurl) -- happens later
local hashed = url.hashed(fullurl)
local query = url.query(hashed.query)
local filename = hashed.path -- hm, not query?
+ hashed.body = body
if script then
filename = script
report("forced script: %s",filename)
diff --git a/scripts/context/lua/mtx-update.lua b/scripts/context/lua/mtx-update.lua
index c7eb74395..daf4f5b16 100644
--- a/scripts/context/lua/mtx-update.lua
+++ b/scripts/context/lua/mtx-update.lua
@@ -11,13 +11,15 @@ if not modules then modules = { } end modules ['mtx-update'] = {
-- Together with Arthur Reutenauer she made sure that it worked well on all
-- platforms that matter.
+-- LuaTeX and LuajitTeX are now always installed together.
+
local helpinfo = [[
<?xml version="1.0"?>
<application>
<metadata>
<entry name="name">mtx-update</entry>
<entry name="detail">ConTeXt Minimals Updater</entry>
- <entry name="version">1.01</entry>
+ <entry name="version">1.02</entry>
</metadata>
<flags>
<category name="basic">
@@ -48,7 +50,7 @@ local helpinfo = [[
local application = logs.application {
name = "mtx-update",
- banner = "ConTeXt Minimals Updater 1.01",
+ banner = "ConTeXt Minimals Updater 1.02",
helpinfo = helpinfo,
}
@@ -124,7 +126,7 @@ scripts.update.engines = {
["luatex"] = {
{ "fonts/new/", "texmf" },
{ "bin/luatex/<platform>/", "texmf-<platform>" },
- { "bin/luajittex/<platform>/","texmf-<platform>" },
+ -- { "bin/luajittex/<platform>/","texmf-<platform>" },
},
["xetex"] = {
{ "base/xetex/", "texmf" },
@@ -142,7 +144,7 @@ scripts.update.engines = {
{ "fonts/old/", "texmf" },
{ "base/xetex/", "texmf" },
{ "bin/luatex/<platform>/", "texmf-<platform>" },
- { "bin/luajittex/<platform>/","texmf-<platform>" },
+ -- { "bin/luajittex/<platform>/","texmf-<platform>" },
{ "bin/xetex/<platform>/", "texmf-<platform>" },
{ "bin/pdftex/<platform>/", "texmf-<platform>" },
},
@@ -181,6 +183,8 @@ scripts.update.platforms = {
["linux-64"] = "linux-64",
["linux64"] = "linux-64",
--
+ ["linux-armhf"] = "linux-armhf",
+ --
["freebsd"] = "freebsd",
--
["freebsd-amd64"] = "freebsd-amd64",
@@ -561,9 +565,8 @@ function scripts.update.make()
local formatlist = concat(table.fromhash(texformats), " ")
if formatlist ~= "" then
for engine in table.sortedhash(engines) do
- if engine == "luatex" then
+ if engine == "luatex" or engine == "luajittex" then
scripts.update.run(format('mtxrun --tree="%s" --script context --autogenerate --make',texroot))
- elseif engine == "luajittex" then
scripts.update.run(format('mtxrun --tree="%s" --script context --autogenerate --make --engine=luajittex',texroot))
else
scripts.update.run(format('mtxrun --tree="%s" --script texexec --make --all --%s %s',texroot,engine,formatlist))
diff --git a/scripts/context/lua/mtxrun.lua b/scripts/context/lua/mtxrun.lua
index 0ff2d2897..edfeba8dd 100644
--- a/scripts/context/lua/mtxrun.lua
+++ b/scripts/context/lua/mtxrun.lua
@@ -56,7 +56,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-lua"] = package.loaded["l-lua"] or true
--- original size: 3123, stripped down to: 1694
+-- original size: 3888, stripped down to: 2197
if not modules then modules={} end modules ['l-lua']={
version=1.001,
@@ -136,6 +136,16 @@ function optionalrequire(...)
return result
end
end
+if lua then
+ lua.mask=load([[τεχ = 1]]) and "utf" or "ascii"
+end
+local flush=io.flush
+if flush then
+ local execute=os.execute if execute then function os.execute(...) flush() return execute(...) end end
+ local exec=os.exec if exec then function os.exec (...) flush() return exec (...) end end
+ local spawn=os.spawn if spawn then function os.spawn (...) flush() return spawn (...) end end
+ local popen=io.popen if popen then function io.popen (...) flush() return popen (...) end end
+end
end -- of closure
@@ -434,7 +444,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-lpeg"] = package.loaded["l-lpeg"] or true
--- original size: 29245, stripped down to: 15964
+-- original size: 36977, stripped down to: 20349
if not modules then modules={} end modules ['l-lpeg']={
version=1.001,
@@ -450,7 +460,9 @@ local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.forma
local floor=math.floor
local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt
local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print
-setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+if setinspector then
+ setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+end
lpeg.patterns=lpeg.patterns or {}
local patterns=lpeg.patterns
local anything=P(1)
@@ -469,7 +481,7 @@ local uppercase=R("AZ")
local underscore=P("_")
local hexdigit=digit+lowercase+uppercase
local cr,lf,crlf=P("\r"),P("\n"),P("\r\n")
-local newline=crlf+S("\r\n")
+local newline=P("\r")*(P("\n")+P(true))+P("\n")
local escaped=P("\\")*anything
local squote=P("'")
local dquote=P('"')
@@ -491,8 +503,10 @@ patterns.utfbom_32_le=utfbom_32_le
patterns.utfbom_16_be=utfbom_16_be
patterns.utfbom_16_le=utfbom_16_le
patterns.utfbom_8=utfbom_8
-patterns.utf_16_be_nl=P("\000\r\000\n")+P("\000\r")+P("\000\n")
-patterns.utf_16_le_nl=P("\r\000\n\000")+P("\r\000")+P("\n\000")
+patterns.utf_16_be_nl=P("\000\r\000\n")+P("\000\r")+P("\000\n")
+patterns.utf_16_le_nl=P("\r\000\n\000")+P("\r\000")+P("\n\000")
+patterns.utf_32_be_nl=P("\000\000\000\r\000\000\000\n")+P("\000\000\000\r")+P("\000\000\000\n")
+patterns.utf_32_le_nl=P("\r\000\000\000\n\000\000\000")+P("\r\000\000\000")+P("\n\000\000\000")
patterns.utf8one=R("\000\127")
patterns.utf8two=R("\194\223")*utf8next
patterns.utf8three=R("\224\239")*utf8next*utf8next
@@ -519,10 +533,24 @@ patterns.spacer=spacer
patterns.whitespace=whitespace
patterns.nonspacer=nonspacer
patterns.nonwhitespace=nonwhitespace
-local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
+local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
+local fullstripper=whitespace^0*C((whitespace^0*nonwhitespace^1)^0)
local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0))
+local b_collapser=Cs(whitespace^0/""*(nonwhitespace^1+whitespace^1/" ")^0)
+local e_collapser=Cs((whitespace^1*P(-1)/""+nonwhitespace^1+whitespace^1/" ")^0)
+local m_collapser=Cs((nonwhitespace^1+whitespace^1/" ")^0)
+local b_stripper=Cs(spacer^0/""*(nonspacer^1+spacer^1/" ")^0)
+local e_stripper=Cs((spacer^1*P(-1)/""+nonspacer^1+spacer^1/" ")^0)
+local m_stripper=Cs((nonspacer^1+spacer^1/" ")^0)
patterns.stripper=stripper
+patterns.fullstripper=fullstripper
patterns.collapser=collapser
+patterns.b_collapser=b_collapser
+patterns.m_collapser=m_collapser
+patterns.e_collapser=e_collapser
+patterns.b_stripper=b_stripper
+patterns.m_stripper=m_stripper
+patterns.e_stripper=e_stripper
patterns.lowercase=lowercase
patterns.uppercase=uppercase
patterns.letter=patterns.lowercase+patterns.uppercase
@@ -559,9 +587,12 @@ patterns.integer=sign^-1*digit^1
patterns.unsigned=digit^0*period*digit^1
patterns.float=sign^-1*patterns.unsigned
patterns.cunsigned=digit^0*comma*digit^1
+patterns.cpunsigned=digit^0*(period+comma)*digit^1
patterns.cfloat=sign^-1*patterns.cunsigned
+patterns.cpfloat=sign^-1*patterns.cpunsigned
patterns.number=patterns.float+patterns.integer
patterns.cnumber=patterns.cfloat+patterns.integer
+patterns.cpnumber=patterns.cpfloat+patterns.integer
patterns.oct=zero*octdigit^1
patterns.octal=patterns.oct
patterns.HEX=zero*P("X")*(digit+uppercase)^1
@@ -744,7 +775,7 @@ function lpeg.replacer(one,two,makefunction,isutf)
return pattern
end
end
-function lpeg.finder(lst,makefunction)
+function lpeg.finder(lst,makefunction,isutf)
local pattern
if type(lst)=="table" then
pattern=P(false)
@@ -760,7 +791,11 @@ function lpeg.finder(lst,makefunction)
else
pattern=P(lst)
end
- pattern=(1-pattern)^0*pattern
+ if isutf then
+ pattern=((utf8char or 1)-pattern)^0*pattern
+ else
+ pattern=(1-pattern)^0*pattern
+ end
if makefunction then
return function(str)
return lpegmatch(pattern,str)
@@ -974,37 +1009,139 @@ function lpeg.append(list,pp,delayed,checked)
end
return p
end
+local p_false=P(false)
+local p_true=P(true)
local function make(t)
- local p
+ local function making(t)
+ local p=p_false
+ local keys=sortedkeys(t)
+ for i=1,#keys do
+ local k=keys[i]
+ if k~="" then
+ local v=t[k]
+ if v==true then
+ p=p+P(k)*p_true
+ elseif v==false then
+ else
+ p=p+P(k)*making(v)
+ end
+ end
+ end
+ if t[""] then
+ p=p+p_true
+ end
+ return p
+ end
+ local p=p_false
local keys=sortedkeys(t)
for i=1,#keys do
local k=keys[i]
- local v=t[k]
- if not p then
- if next(v) then
- p=P(k)*make(v)
+ if k~="" then
+ local v=t[k]
+ if v==true then
+ p=p+P(k)*p_true
+ elseif v==false then
else
- p=P(k)
+ p=p+P(k)*making(v)
end
- else
- if next(v) then
- p=p+P(k)*make(v)
+ end
+ end
+ return p
+end
+local function collapse(t,x)
+ if type(t)~="table" then
+ return t,x
+ else
+ local n=next(t)
+ if n==nil then
+ return t,x
+ elseif next(t,n)==nil then
+ local k=n
+ local v=t[k]
+ if type(v)=="table" then
+ return collapse(v,x..k)
else
- p=p+P(k)
+ return v,x..k
+ end
+ else
+ local tt={}
+ for k,v in next,t do
+ local vv,kk=collapse(v,k)
+ tt[kk]=vv
end
+ return tt,x
end
end
- return p
end
function lpeg.utfchartabletopattern(list)
local tree={}
- for i=1,#list do
- local t=tree
- for c in gmatch(list[i],".") do
- if not t[c] then
- t[c]={}
+ local n=#list
+ if n==0 then
+ for s in next,list do
+ local t=tree
+ local p,pk
+ for c in gmatch(s,".") do
+ if t==true then
+ t={ [c]=true,[""]=true }
+ p[pk]=t
+ p=t
+ t=false
+ elseif t==false then
+ t={ [c]=false }
+ p[pk]=t
+ p=t
+ t=false
+ else
+ local tc=t[c]
+ if not tc then
+ tc=false
+ t[c]=false
+ end
+ p=t
+ t=tc
+ end
+ pk=c
+ end
+ if t==false then
+ p[pk]=true
+ elseif t==true then
+ else
+ t[""]=true
+ end
+ end
+ else
+ for i=1,n do
+ local s=list[i]
+ local t=tree
+ local p,pk
+ for c in gmatch(s,".") do
+ if t==true then
+ t={ [c]=true,[""]=true }
+ p[pk]=t
+ p=t
+ t=false
+ elseif t==false then
+ t={ [c]=false }
+ p[pk]=t
+ p=t
+ t=false
+ else
+ local tc=t[c]
+ if not tc then
+ tc=false
+ t[c]=false
+ end
+ p=t
+ t=tc
+ end
+ pk=c
+ end
+ if t==false then
+ p[pk]=true
+ elseif t==true then
+ else
+ t[""]=true
end
- t=t[c]
end
end
return make(tree)
@@ -1044,6 +1181,65 @@ local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"")
local number=digit^1*(case_1+case_2)
local stripper=Cs((number+1)^0)
lpeg.patterns.stripzeros=stripper
+local byte_to_HEX={}
+local byte_to_hex={}
+local byte_to_dec={}
+local hex_to_byte={}
+for i=0,255 do
+ local H=format("%02X",i)
+ local h=format("%02x",i)
+ local d=format("%03i",i)
+ local c=char(i)
+ byte_to_HEX[c]=H
+ byte_to_hex[c]=h
+ byte_to_dec[c]=d
+ hex_to_byte[h]=c
+ hex_to_byte[H]=c
+end
+local hextobyte=P(2)/hex_to_byte
+local bytetoHEX=P(1)/byte_to_HEX
+local bytetohex=P(1)/byte_to_hex
+local bytetodec=P(1)/byte_to_dec
+local hextobytes=Cs(hextobyte^0)
+local bytestoHEX=Cs(bytetoHEX^0)
+local bytestohex=Cs(bytetohex^0)
+local bytestodec=Cs(bytetodec^0)
+patterns.hextobyte=hextobyte
+patterns.bytetoHEX=bytetoHEX
+patterns.bytetohex=bytetohex
+patterns.bytetodec=bytetodec
+patterns.hextobytes=hextobytes
+patterns.bytestoHEX=bytestoHEX
+patterns.bytestohex=bytestohex
+patterns.bytestodec=bytestodec
+function string.toHEX(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(bytestoHEX,s)
+ end
+end
+function string.tohex(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(bytestohex,s)
+ end
+end
+function string.todec(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(bytestodec,s)
+ end
+end
+function string.tobytes(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(hextobytes,s)
+ end
+end
end -- of closure
@@ -1071,7 +1267,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-string"] = package.loaded["l-string"] or true
--- original size: 5547, stripped down to: 2708
+-- original size: 5694, stripped down to: 2827
if not modules then modules={} end modules ['l-string']={
version=1.001,
@@ -1107,11 +1303,15 @@ function string.limit(str,n,sentinel)
end
end
local stripper=patterns.stripper
+local fullstripper=patterns.fullstripper
local collapser=patterns.collapser
local longtostring=patterns.longtostring
function string.strip(str)
return lpegmatch(stripper,str) or ""
end
+function string.fullstrip(str)
+ return lpegmatch(fullstripper,str) or ""
+end
function string.collapsespaces(str)
return lpegmatch(collapser,str) or ""
end
@@ -1172,7 +1372,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-table"] = package.loaded["l-table"] or true
--- original size: 31113, stripped down to: 20256
+-- original size: 35724, stripped down to: 21525
if not modules then modules={} end modules ['l-table']={
version=1.001,
@@ -1205,7 +1405,7 @@ end
function table.keys(t)
if t then
local keys,k={},0
- for key,_ in next,t do
+ for key in next,t do
k=k+1
keys[k]=key
end
@@ -1215,32 +1415,52 @@ function table.keys(t)
end
end
local function compare(a,b)
- local ta,tb=type(a),type(b)
- if ta==tb then
- return a<b
- else
- return tostring(a)<tostring(b)
+ local ta=type(a)
+ if ta=="number" then
+ local tb=type(b)
+ if ta==tb then
+ return a<b
+ elseif tb=="string" then
+ return tostring(a)<b
+ end
+ elseif ta=="string" then
+ local tb=type(b)
+ if ta==tb then
+ return a<b
+ else
+ return a<tostring(b)
+ end
end
+ return tostring(a)<tostring(b)
end
local function sortedkeys(tab)
if tab then
local srt,category,s={},0,0
- for key,_ in next,tab do
+ for key in next,tab do
s=s+1
srt[s]=key
if category==3 then
+ elseif category==1 then
+ if type(key)~="string" then
+ category=3
+ end
+ elseif category==2 then
+ if type(key)~="number" then
+ category=3
+ end
else
local tkey=type(key)
if tkey=="string" then
- category=(category==2 and 3) or 1
+ category=1
elseif tkey=="number" then
- category=(category==1 and 3) or 2
+ category=2
else
category=3
end
end
end
- if category==0 or category==3 then
+ if s<2 then
+ elseif category==3 then
sort(srt,compare)
else
sort(srt)
@@ -1250,16 +1470,52 @@ local function sortedkeys(tab)
return {}
end
end
+local function sortedhashonly(tab)
+ if tab then
+ local srt,s={},0
+ for key in next,tab do
+ if type(key)=="string" then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ if s>1 then
+ sort(srt)
+ end
+ return srt
+ else
+ return {}
+ end
+end
+local function sortedindexonly(tab)
+ if tab then
+ local srt,s={},0
+ for key in next,tab do
+ if type(key)=="number" then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ if s>1 then
+ sort(srt)
+ end
+ return srt
+ else
+ return {}
+ end
+end
local function sortedhashkeys(tab,cmp)
if tab then
local srt,s={},0
- for key,_ in next,tab do
+ for key in next,tab do
if key then
s=s+1
srt[s]=key
end
end
- sort(srt,cmp)
+ if s>1 then
+ sort(srt,cmp)
+ end
return srt
else
return {}
@@ -1268,13 +1524,15 @@ end
function table.allkeys(t)
local keys={}
for k,v in next,t do
- for k,v in next,v do
+ for k in next,v do
keys[k]=true
end
end
return sortedkeys(keys)
end
table.sortedkeys=sortedkeys
+table.sortedhashonly=sortedhashonly
+table.sortedindexonly=sortedindexonly
table.sortedhashkeys=sortedhashkeys
local function nothing() end
local function sortedhash(t,cmp)
@@ -1285,19 +1543,21 @@ local function sortedhash(t,cmp)
else
s=sortedkeys(t)
end
- local n=0
local m=#s
- local function kv(s)
- if n<m then
- n=n+1
- local k=s[n]
- return k,t[k]
+ if m==1 then
+ return next,t
+ elseif m>0 then
+ local n=0
+ return function()
+ if n<m then
+ n=n+1
+ local k=s[n]
+ return k,t[k]
+ end
end
end
- return kv,s
- else
- return nothing
end
+ return nothing
end
table.sortedhash=sortedhash
table.sortedpairs=sortedhash
@@ -1439,39 +1699,36 @@ function table.fromhash(t)
end
return hsh
end
-local noquotes,hexify,handle,reduce,compact,inline,functions
+local noquotes,hexify,handle,compact,inline,functions
local reserved=table.tohash {
'and','break','do','else','elseif','end','false','for','function','if',
'in','local','nil','not','or','repeat','return','then','true','until','while',
'NaN','goto',
}
local function simple_table(t)
- if #t>0 then
+ local nt=#t
+ if nt>0 then
local n=0
for _,v in next,t do
n=n+1
end
- if n==#t then
- local tt,nt={},0
- for i=1,#t do
+ if n==nt then
+ local tt={}
+ for i=1,nt do
local v=t[i]
local tv=type(v)
if tv=="number" then
- nt=nt+1
if hexify then
- tt[nt]=format("0x%04X",v)
+ tt[i]=format("0x%X",v)
else
- tt[nt]=tostring(v)
+ tt[i]=tostring(v)
end
elseif tv=="string" then
- nt=nt+1
- tt[nt]=format("%q",v)
+ tt[i]=format("%q",v)
elseif tv=="boolean" then
- nt=nt+1
- tt[nt]=v and "true" or "false"
+ tt[i]=v and "true" or "false"
else
- tt=nil
- break
+ return nil
end
end
return tt
@@ -1490,7 +1747,7 @@ local function do_serialize(root,name,depth,level,indexed)
local tn=type(name)
if tn=="number" then
if hexify then
- handle(format("%s[0x%04X]={",depth,name))
+ handle(format("%s[0x%X]={",depth,name))
else
handle(format("%s[%s]={",depth,name))
end
@@ -1507,7 +1764,7 @@ local function do_serialize(root,name,depth,level,indexed)
end
end
end
- if root and next(root) then
+ if root and next(root)~=nil then
local first,last=nil,0
if compact then
last=#root
@@ -1525,22 +1782,19 @@ local function do_serialize(root,name,depth,level,indexed)
for i=1,#sk do
local k=sk[i]
local v=root[k]
- local tv,tk=type(v),type(k)
+ local tv=type(v)
+ local tk=type(k)
if compact and first and tk=="number" and k>=first and k<=last then
if tv=="number" then
if hexify then
- handle(format("%s 0x%04X,",depth,v))
+ handle(format("%s 0x%X,",depth,v))
else
handle(format("%s %s,",depth,v))
end
elseif tv=="string" then
- if reduce and tonumber(v) then
- handle(format("%s %s,",depth,v))
- else
- handle(format("%s %q,",depth,v))
- end
+ handle(format("%s %q,",depth,v))
elseif tv=="table" then
- if not next(v) then
+ if next(v)==nil then
handle(format("%s {},",depth))
elseif inline then
local st=simple_table(v)
@@ -1570,64 +1824,48 @@ local function do_serialize(root,name,depth,level,indexed)
elseif tv=="number" then
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
+ handle(format("%s [0x%X]=0x%X,",depth,k,v))
else
handle(format("%s [%s]=%s,",depth,k,v))
end
elseif tk=="boolean" then
if hexify then
- handle(format("%s [%s]=0x%04X,",depth,k and "true" or "false",v))
+ handle(format("%s [%s]=0x%X,",depth,k and "true" or "false",v))
else
handle(format("%s [%s]=%s,",depth,k and "true" or "false",v))
end
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
if hexify then
- handle(format("%s %s=0x%04X,",depth,k,v))
+ handle(format("%s %s=0x%X,",depth,k,v))
else
handle(format("%s %s=%s,",depth,k,v))
end
else
if hexify then
- handle(format("%s [%q]=0x%04X,",depth,k,v))
+ handle(format("%s [%q]=0x%X,",depth,k,v))
else
handle(format("%s [%q]=%s,",depth,k,v))
end
end
elseif tv=="string" then
- if reduce and tonumber(v) then
- if tk=="number" then
- if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,v))
- else
- handle(format("%s [%s]=%s,",depth,k,v))
- end
- elseif tk=="boolean" then
- handle(format("%s [%s]=%s,",depth,k and "true" or "false",v))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%s,",depth,k,v))
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]=%q,",depth,k,v))
else
- handle(format("%s [%q]=%s,",depth,k,v))
+ handle(format("%s [%s]=%q,",depth,k,v))
end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,k and "true" or "false",v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,v))
else
- if tk=="number" then
- if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,v))
- else
- handle(format("%s [%s]=%q,",depth,k,v))
- end
- elseif tk=="boolean" then
- handle(format("%s [%s]=%q,",depth,k and "true" or "false",v))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%q,",depth,k,v))
- else
- handle(format("%s [%q]=%q,",depth,k,v))
- end
+ handle(format("%s [%q]=%q,",depth,k,v))
end
elseif tv=="table" then
- if not next(v) then
+ if next(v)==nil then
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]={},",depth,k))
+ handle(format("%s [0x%X]={},",depth,k))
else
handle(format("%s [%s]={},",depth,k))
end
@@ -1643,7 +1881,7 @@ local function do_serialize(root,name,depth,level,indexed)
if st then
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
+ handle(format("%s [0x%X]={ %s },",depth,k,concat(st,", ")))
else
handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
end
@@ -1663,7 +1901,7 @@ local function do_serialize(root,name,depth,level,indexed)
elseif tv=="boolean" then
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,v and "true" or "false"))
+ handle(format("%s [0x%X]=%s,",depth,k,v and "true" or "false"))
else
handle(format("%s [%s]=%s,",depth,k,v and "true" or "false"))
end
@@ -1679,7 +1917,7 @@ local function do_serialize(root,name,depth,level,indexed)
local f=getinfo(v).what=="C" and dump(dummy) or dump(v)
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]=load(%q),",depth,k,f))
+ handle(format("%s [0x%X]=load(%q),",depth,k,f))
else
handle(format("%s [%s]=load(%q),",depth,k,f))
end
@@ -1694,7 +1932,7 @@ local function do_serialize(root,name,depth,level,indexed)
else
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
+ handle(format("%s [0x%X]=%q,",depth,k,tostring(v)))
else
handle(format("%s [%s]=%q,",depth,k,tostring(v)))
end
@@ -1718,7 +1956,6 @@ local function serialize(_handle,root,name,specification)
noquotes=specification.noquotes
hexify=specification.hexify
handle=_handle or specification.handle or print
- reduce=specification.reduce or false
functions=specification.functions
compact=specification.compact
inline=specification.inline and compact
@@ -1735,7 +1972,6 @@ local function serialize(_handle,root,name,specification)
noquotes=false
hexify=false
handle=_handle or print
- reduce=false
compact=true
inline=true
functions=true
@@ -1748,7 +1984,7 @@ local function serialize(_handle,root,name,specification)
end
elseif tname=="number" then
if hexify then
- handle(format("[0x%04X]={",name))
+ handle(format("[0x%X]={",name))
else
handle("["..name.."]={")
end
@@ -1766,7 +2002,7 @@ local function serialize(_handle,root,name,specification)
local dummy=root._w_h_a_t_e_v_e_r_
root._w_h_a_t_e_v_e_r_=nil
end
- if next(root) then
+ if next(root)~=nil then
do_serialize(root,name,"",0)
end
end
@@ -1895,14 +2131,25 @@ local function identical(a,b)
end
table.identical=identical
table.are_equal=are_equal
-function table.compact(t)
- if t then
- for k,v in next,t do
- if not next(v) then
- t[k]=nil
+local function sparse(old,nest,keeptables)
+ local new={}
+ for k,v in next,old do
+ if not (v=="" or v==false) then
+ if nest and type(v)=="table" then
+ v=sparse(v,nest)
+ if keeptables or next(v)~=nil then
+ new[k]=v
+ end
+ else
+ new[k]=v
end
end
end
+ return new
+end
+table.sparse=sparse
+function table.compact(t)
+ return sparse(t,true,true)
end
function table.contains(t,v)
if t then
@@ -2000,15 +2247,17 @@ function table.print(t,...)
serialize(print,t,...)
end
end
-setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
+if setinspector then
+ setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
+end
function table.sub(t,i,j)
return { unpack(t,i,j) }
end
function table.is_empty(t)
- return not t or not next(t)
+ return not t or next(t)==nil
end
function table.has_one_entry(t)
- return t and not next(t,next(t))
+ return t and next(t,next(t))==nil
end
function table.loweredkeys(t)
local l={}
@@ -2053,6 +2302,44 @@ function table.values(t,s)
return {}
end
end
+function table.filtered(t,pattern,sort,cmp)
+ if t and type(pattern)=="string" then
+ if sort then
+ local s
+ if cmp then
+ s=sortedhashkeys(t,function(a,b) return cmp(t,a,b) end)
+ else
+ s=sortedkeys(t)
+ end
+ local n=0
+ local m=#s
+ local function kv(s)
+ while n<m do
+ n=n+1
+ local k=s[n]
+ if find(k,pattern) then
+ return k,t[k]
+ end
+ end
+ end
+ return kv,s
+ else
+ local n=next(t)
+ local function iterator()
+ while n~=nil do
+ local k=n
+ n=next(t,k)
+ if find(k,pattern) then
+ return k,t[k]
+ end
+ end
+ end
+ return iterator,t
+ end
+ else
+ return nothing
+ end
+end
end -- of closure
@@ -2061,7 +2348,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-io"] = package.loaded["l-io"] or true
--- original size: 8817, stripped down to: 6340
+-- original size: 8643, stripped down to: 6232
if not modules then modules={} end modules ['l-io']={
version=1.001,
@@ -2075,7 +2362,7 @@ local byte,find,gsub,format=string.byte,string.find,string.gsub,string.format
local concat=table.concat
local floor=math.floor
local type=type
-if string.find(os.getenv("PATH"),";") then
+if string.find(os.getenv("PATH"),";",1,true) then
io.fileseparator,io.pathseparator="\\",";"
else
io.fileseparator,io.pathseparator="/",":"
@@ -2368,8 +2655,6 @@ function io.readstring(f,n,m)
local str=gsub(f:read(n),"\000","")
return str
end
-if not io.i_limiter then function io.i_limiter() end end
-if not io.o_limiter then function io.o_limiter() end end
end -- of closure
@@ -2596,7 +2881,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-os"] = package.loaded["l-os"] or true
--- original size: 16023, stripped down to: 9634
+-- original size: 15832, stripped down to: 9456
if not modules then modules={} end modules ['l-os']={
version=1.001,
@@ -2670,13 +2955,10 @@ if not os.__getenv__ then
setmetatable(os.env,{ __index=__index,__newindex=__newindex } )
end
end
-local execute,spawn,exec,iopopen,ioflush=os.execute,os.spawn or os.execute,os.exec or os.execute,io.popen,io.flush
-function os.execute(...) ioflush() return execute(...) end
-function os.spawn (...) ioflush() return spawn (...) end
-function os.exec (...) ioflush() return exec (...) end
-function io.popen (...) ioflush() return iopopen(...) end
+local execute=os.execute
+local iopopen=io.popen
function os.resultof(command)
- local handle=io.popen(command,"r")
+ local handle=iopopen(command,"r")
if handle then
local result=handle:read("*all") or ""
handle:close()
@@ -2686,7 +2968,7 @@ function os.resultof(command)
end
end
if not io.fileseparator then
- if find(os.getenv("PATH"),";") then
+ if find(os.getenv("PATH"),";",1,true) then
io.fileseparator,io.pathseparator,os.type="\\",";",os.type or "mswin"
else
io.fileseparator,io.pathseparator,os.type="/",":",os.type or "unix"
@@ -2705,7 +2987,7 @@ local launchers={
unix="$BROWSER %s &> /dev/null &",
}
function os.launch(str)
- os.execute(format(launchers[os.name] or launchers.unix,str))
+ execute(format(launchers[os.name] or launchers.unix,str))
end
if not os.times then
function os.times()
@@ -2746,7 +3028,7 @@ if platform~="" then
elseif os.type=="windows" then
function resolvers.platform(t,k)
local platform,architecture="",os.getenv("PROCESSOR_ARCHITECTURE") or ""
- if find(architecture,"AMD64") then
+ if find(architecture,"AMD64",1,true) then
platform="win64"
else
platform="mswin"
@@ -2758,9 +3040,9 @@ elseif os.type=="windows" then
elseif name=="linux" then
function resolvers.platform(t,k)
local platform,architecture="",os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
- if find(architecture,"x86_64") then
+ if find(architecture,"x86_64",1,true) then
platform="linux-64"
- elseif find(architecture,"ppc") then
+ elseif find(architecture,"ppc",1,true) then
platform="linux-ppc"
else
platform="linux"
@@ -2774,9 +3056,9 @@ elseif name=="macosx" then
local platform,architecture="",os.resultof("echo $HOSTTYPE") or ""
if architecture=="" then
platform="osx-intel"
- elseif find(architecture,"i386") then
+ elseif find(architecture,"i386",1,true) then
platform="osx-intel"
- elseif find(architecture,"x86_64") then
+ elseif find(architecture,"x86_64",1,true) then
platform="osx-64"
else
platform="osx-ppc"
@@ -2788,7 +3070,7 @@ elseif name=="macosx" then
elseif name=="sunos" then
function resolvers.platform(t,k)
local platform,architecture="",os.resultof("uname -m") or ""
- if find(architecture,"sparc") then
+ if find(architecture,"sparc",1,true) then
platform="solaris-sparc"
else
platform="solaris-intel"
@@ -2800,7 +3082,7 @@ elseif name=="sunos" then
elseif name=="freebsd" then
function resolvers.platform(t,k)
local platform,architecture="",os.resultof("uname -m") or ""
- if find(architecture,"amd64") then
+ if find(architecture,"amd64",1,true) then
platform="freebsd-amd64"
else
platform="freebsd"
@@ -2812,7 +3094,7 @@ elseif name=="freebsd" then
elseif name=="kfreebsd" then
function resolvers.platform(t,k)
local platform,architecture="",os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
- if find(architecture,"x86_64") then
+ if find(architecture,"x86_64",1,true) then
platform="kfreebsd-amd64"
else
platform="kfreebsd-i386"
@@ -2829,8 +3111,9 @@ else
return platform
end
end
+os.newline=name=="windows" and "\013\010" or "\010"
function resolvers.bits(t,k)
- local bits=find(os.platform,"64") and 64 or 32
+ local bits=find(os.platform,"64",1,true) and 64 or 32
os.bits=bits
return bits
end
@@ -2980,7 +3263,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-file"] = package.loaded["l-file"] or true
--- original size: 18308, stripped down to: 9948
+-- original size: 20949, stripped down to: 9945
if not modules then modules={} end modules ['l-file']={
version=1.001,
@@ -2994,41 +3277,28 @@ local file=file
if not lfs then
lfs=optionalrequire("lfs")
end
-if not lfs then
- lfs={
- getcurrentdir=function()
- return "."
- end,
- attributes=function()
- return nil
- end,
- isfile=function(name)
- local f=io.open(name,'rb')
- if f then
- f:close()
- return true
- end
- end,
- isdir=function(name)
- print("you need to load lfs")
- return false
- end
- }
-elseif not lfs.isfile then
- local attributes=lfs.attributes
- function lfs.isdir(name)
- return attributes(name,"mode")=="directory"
- end
- function lfs.isfile(name)
- return attributes(name,"mode")=="file"
- end
-end
local insert,concat=table.insert,table.concat
local match,find,gmatch=string.match,string.find,string.gmatch
local lpegmatch=lpeg.match
local getcurrentdir,attributes=lfs.currentdir,lfs.attributes
local checkedsplit=string.checkedsplit
local P,R,S,C,Cs,Cp,Cc,Ct=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Cs,lpeg.Cp,lpeg.Cc,lpeg.Ct
+local tricky=S("/\\")*P(-1)
+local attributes=lfs.attributes
+if sandbox then
+ sandbox.redefine(lfs.isfile,"lfs.isfile")
+ sandbox.redefine(lfs.isdir,"lfs.isdir")
+end
+function lfs.isdir(name)
+ if lpegmatch(tricky,name) then
+ return attributes(name,"mode")=="directory"
+ else
+ return attributes(name.."/.","mode")=="directory"
+ end
+end
+function lfs.isfile(name)
+ return attributes(name,"mode")=="file"
+end
local colon=P(":")
local period=P(".")
local periods=P("..")
@@ -3230,28 +3500,30 @@ local isroot=fwslash^1*-1
local hasroot=fwslash^1
local reslasher=lpeg.replacer(S("\\/"),"/")
local deslasher=lpeg.replacer(S("\\/")^1,"/")
-function file.join(...)
- local lst={... }
- local one=lst[1]
+function file.join(one,two,three,...)
+ if not two then
+ return one=="" and one or lpegmatch(stripper,one)
+ end
+ if one=="" then
+ return lpegmatch(stripper,three and concat({ two,three,... },"/") or two)
+ end
if lpegmatch(isnetwork,one) then
local one=lpegmatch(reslasher,one)
- local two=lpegmatch(deslasher,concat(lst,"/",2))
+ local two=lpegmatch(deslasher,three and concat({ two,three,... },"/") or two)
if lpegmatch(hasroot,two) then
return one..two
else
return one.."/"..two
end
elseif lpegmatch(isroot,one) then
- local two=lpegmatch(deslasher,concat(lst,"/",2))
+ local two=lpegmatch(deslasher,three and concat({ two,three,... },"/") or two)
if lpegmatch(hasroot,two) then
return two
else
return "/"..two
end
- elseif one=="" then
- return lpegmatch(stripper,concat(lst,"/",2))
else
- return lpegmatch(deslasher,concat(lst,"/"))
+ return lpegmatch(deslasher,concat({ one,two,three,... },"/"))
end
end
local drivespec=R("az","AZ")^1*colon
@@ -3425,7 +3697,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-md5"] = package.loaded["l-md5"] or true
--- original size: 3760, stripped down to: 2088
+-- original size: 3248, stripped down to: 2266
if not modules then modules={} end modules ['l-md5']={
version=1.001,
@@ -3443,14 +3715,20 @@ if not md5 then
}
end
local md5,file=md5,file
-local gsub,format,byte=string.gsub,string.format,string.byte
-local md5sum=md5.sum
-local function convert(str,fmt)
- return (gsub(md5sum(str),".",function(chr) return format(fmt,byte(chr)) end))
-end
-if not md5.HEX then function md5.HEX(str) return convert(str,"%02X") end end
-if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end
-if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end
+local gsub=string.gsub
+do
+ local patterns=lpeg and lpeg.patterns
+ if patterns then
+ local bytestoHEX=patterns.bytestoHEX
+ local bytestohex=patterns.bytestohex
+ local bytestodec=patterns.bytestodec
+ local lpegmatch=lpeg.match
+ local md5sum=md5.sum
+ if not md5.HEX then function md5.HEX(str) if str then return lpegmatch(bytestoHEX,md5sum(str)) end end end
+ if not md5.hex then function md5.hex(str) if str then return lpegmatch(bytestohex,md5sum(str)) end end end
+ if not md5.dec then function md5.dec(str) if str then return lpegmatch(bytestodec,md5sum(str)) end end end
+ end
+end
function file.needsupdating(oldname,newname,threshold)
local oldtime=lfs.attributes(oldname,"modification")
if oldtime then
@@ -3507,7 +3785,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-url"] = package.loaded["l-url"] or true
--- original size: 11993, stripped down to: 5584
+-- original size: 12531, stripped down to: 5721
if not modules then modules={} end modules ['l-url']={
version=1.001,
@@ -3534,7 +3812,7 @@ local hexdigit=R("09","AF","af")
local plus=P("+")
local nothing=Cc("")
local escapedchar=(percent*C(hexdigit*hexdigit))/tochar
-local escaped=(plus/" ")+escapedchar
+local escaped=(plus/" ")+escapedchar
local noslash=P("/")/""
local schemestr=Cs((escaped+(1-colon-slash-qmark-hash))^2)
local authoritystr=Cs((escaped+(1- slash-qmark-hash))^0)
@@ -3593,19 +3871,25 @@ local splitquery=Cf (Ct("")*P { "sequence",
pair=Cg(key*equal*value),
},rawset)
local function hashed(str)
- if str=="" then
+ if not str or str=="" then
return {
scheme="invalid",
original=str,
}
end
- local s=split(str)
- local rawscheme=s[1]
- local rawquery=s[4]
- local somescheme=rawscheme~=""
- local somequery=rawquery~=""
+ local detailed=split(str)
+ local rawscheme=""
+ local rawquery=""
+ local somescheme=false
+ local somequery=false
+ if detailed then
+ rawscheme=detailed[1]
+ rawquery=detailed[4]
+ somescheme=rawscheme~=""
+ somequery=rawquery~=""
+ end
if not somescheme and not somequery then
- s={
+ return {
scheme="file",
authority="",
path=str,
@@ -3615,28 +3899,28 @@ local function hashed(str)
noscheme=true,
filename=str,
}
- else
- local authority,path,filename=s[2],s[3]
- if authority=="" then
- filename=path
- elseif path=="" then
- filename=""
- else
- filename=authority.."/"..path
- end
- s={
- scheme=rawscheme,
- authority=authority,
- path=path,
- query=lpegmatch(unescaper,rawquery),
- queries=lpegmatch(splitquery,rawquery),
- fragment=s[5],
- original=str,
- noscheme=false,
- filename=filename,
- }
end
- return s
+ local authority=detailed[2]
+ local path=detailed[3]
+ local filename=nil
+ if authority=="" then
+ filename=path
+ elseif path=="" then
+ filename=""
+ else
+ filename=authority.."/"..path
+ end
+ return {
+ scheme=rawscheme,
+ authority=authority,
+ path=path,
+ query=lpegmatch(unescaper,rawquery),
+ queries=lpegmatch(splitquery,rawquery),
+ fragment=detailed[5],
+ original=str,
+ noscheme=false,
+ filename=filename,
+ }
end
url.split=split
url.hasscheme=hasscheme
@@ -3670,7 +3954,7 @@ function url.construct(hash)
end
return lpegmatch(escaper,concat(fullurl))
end
-local pattern=Cs(noslash*R("az","AZ")*(S(":|")/":")*noslash*P(1)^0)
+local pattern=Cs(slash^-1/""*R("az","AZ")*((S(":|")/":")+P(":"))*slash*P(1)^0)
function url.filename(filename)
local spec=hashed(filename)
local path=spec.path
@@ -3718,7 +4002,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-dir"] = package.loaded["l-dir"] or true
--- original size: 14229, stripped down to: 8740
+-- original size: 16765, stripped down to: 11003
if not modules then modules={} end modules ['l-dir']={
version=1.001,
@@ -3728,7 +4012,7 @@ if not modules then modules={} end modules ['l-dir']={
license="see context related readme files"
}
local type,select=type,select
-local find,gmatch,match,gsub=string.find,string.gmatch,string.match,string.gsub
+local find,gmatch,match,gsub,sub=string.find,string.gmatch,string.match,string.gsub,string.sub
local concat,insert,remove,unpack=table.concat,table.insert,table.remove,table.unpack
local lpegmatch=lpeg.match
local P,S,R,C,Cc,Cs,Ct,Cv,V=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cc,lpeg.Cs,lpeg.Ct,lpeg.Cv,lpeg.V
@@ -3737,53 +4021,127 @@ local dir=dir
local lfs=lfs
local attributes=lfs.attributes
local walkdir=lfs.dir
-local isdir=lfs.isdir
-local isfile=lfs.isfile
+local isdir=lfs.isdir
+local isfile=lfs.isfile
local currentdir=lfs.currentdir
local chdir=lfs.chdir
-local onwindows=os.type=="windows" or find(os.getenv("PATH"),";")
-if not isdir then
- function isdir(name)
- local a=attributes(name)
- return a and a.mode=="directory"
+local mkdir=lfs.mkdir
+local onwindows=os.type=="windows" or find(os.getenv("PATH"),";",1,true)
+if onwindows then
+ local tricky=S("/\\")*P(-1)
+ isdir=function(name)
+ if lpegmatch(tricky,name) then
+ return attributes(name,"mode")=="directory"
+ else
+ return attributes(name.."/.","mode")=="directory"
+ end
+ end
+ isfile=function(name)
+ return attributes(name,"mode")=="file"
end
lfs.isdir=isdir
-end
-if not isfile then
- function isfile(name)
- local a=attributes(name)
- return a and a.mode=="file"
+ lfs.isfile=isfile
+else
+ isdir=function(name)
+ return attributes(name,"mode")=="directory"
end
+ isfile=function(name)
+ return attributes(name,"mode")=="file"
+ end
+ lfs.isdir=isdir
lfs.isfile=isfile
end
function dir.current()
return (gsub(currentdir(),"\\","/"))
end
-local lfsisdir=isdir
-local function isdir(path)
- path=gsub(path,"[/\\]+$","")
- return lfsisdir(path)
+local function glob_pattern_function(path,patt,recurse,action)
+ if isdir(path) then
+ local usedpath
+ if path=="/" then
+ usedpath="/."
+ elseif not find(path,"/$") then
+ usedpath=path.."/."
+ path=path.."/"
+ else
+ usedpath=path
+ end
+ local dirs
+ for name in walkdir(usedpath) do
+ if name~="." and name~=".." then
+ local full=path..name
+ local mode=attributes(full,'mode')
+ if mode=='file' then
+ if not patt or find(full,patt) then
+ action(full)
+ end
+ elseif recurse and mode=="directory" then
+ if not dirs then
+ dirs={ full }
+ else
+ dirs[#dirs+1]=full
+ end
+ end
+ end
+ end
+ if dirs then
+ for i=1,#dirs do
+ glob_pattern_function(dirs[i],patt,recurse,action)
+ end
+ end
+ end
end
-lfs.isdir=isdir
-local function globpattern(path,patt,recurse,action)
- if path=="/" then
- path=path.."."
- elseif not find(path,"/$") then
- path=path..'/'
- end
- if isdir(path) then
- for name in walkdir(path) do
- local full=path..name
- local mode=attributes(full,'mode')
- if mode=='file' then
- if find(full,patt) then
- action(full)
+local function glob_pattern_table(path,patt,recurse,result)
+ if not result then
+ result={}
+ end
+ if isdir(path) then
+ local usedpath
+ if path=="/" then
+ usedpath="/."
+ elseif not find(path,"/$") then
+ usedpath=path.."/."
+ path=path.."/"
+ else
+ usedpath=path
+ end
+ local dirs
+ for name in walkdir(usedpath) do
+ if name~="." and name~=".." then
+ local full=path..name
+ local mode=attributes(full,'mode')
+ if mode=='file' then
+ if not patt or find(full,patt) then
+ result[#result+1]=full
+ end
+ elseif recurse and mode=="directory" then
+ if not dirs then
+ dirs={ full }
+ else
+ dirs[#dirs+1]=full
+ end
end
- elseif recurse and (mode=="directory") and (name~='.') and (name~="..") then
- globpattern(full,patt,recurse,action)
+ end
+ end
+ if dirs then
+ for i=1,#dirs do
+ glob_pattern_table(dirs[i],patt,recurse,result)
end
end
end
+ return result
+end
+local function globpattern(path,patt,recurse,method)
+ local kind=type(method)
+ if patt and sub(patt,1,-3)==path then
+ patt=false
+ end
+ if kind=="function" then
+ return glob_pattern_function(path,patt,recurse,method)
+ elseif kind=="table" then
+ return glob_pattern_table(path,patt,recurse,method)
+ else
+ return glob_pattern_table(path,patt,recurse,{})
+ end
end
dir.globpattern=globpattern
local function collectpattern(path,patt,recurse,result)
@@ -3795,34 +4153,40 @@ local function collectpattern(path,patt,recurse,result)
ok,scanner,first=xpcall(function() return walkdir(path) end,function() end)
end
if ok and type(scanner)=="function" then
- if not find(path,"/$") then path=path..'/' end
+ if not find(path,"/$") then
+ path=path..'/'
+ end
for name in scanner,first do
- local full=path..name
- local attr=attributes(full)
- local mode=attr.mode
- if mode=='file' then
- if find(full,patt) then
+ if name=="." then
+ elseif name==".." then
+ else
+ local full=path..name
+ local attr=attributes(full)
+ local mode=attr.mode
+ if mode=='file' then
+ if find(full,patt) then
+ result[name]=attr
+ end
+ elseif recurse and mode=="directory" then
+ attr.list=collectpattern(full,patt,recurse)
result[name]=attr
end
- elseif recurse and (mode=="directory") and (name~='.') and (name~="..") then
- attr.list=collectpattern(full,patt,recurse)
- result[name]=attr
end
end
end
return result
end
dir.collectpattern=collectpattern
-local separator
-if onwindows then
+local separator,pattern
+if onwindows then
local slash=S("/\\")/"/"
- pattern=Ct {
+ pattern={
[1]=(Cs(P(".")+slash^1)+Cs(R("az","AZ")*P(":")*slash^0)+Cc("./"))*V(2)*V(3),
[2]=Cs(((1-S("*?/\\"))^0*slash)^0),
[3]=Cs(P(1)^0)
}
-else
- pattern=Ct {
+else
+ pattern={
[1]=(C(P(".")+P("/")^1)+Cc("./"))*V(2)*V(3),
[2]=C(((1-S("*?/"))^0*P("/"))^0),
[3]=C(P(1)^0)
@@ -3840,10 +4204,9 @@ local function glob(str,t)
elseif isfile(str) then
t(str)
else
- local split=lpegmatch(pattern,str)
- if split then
- local root,path,base=split[1],split[2],split[3]
- local recurse=find(base,"%*%*")
+ local root,path,base=lpegmatch(pattern,str)
+ if root and path and base then
+ local recurse=find(base,"**",1,true)
local start=root..path
local result=lpegmatch(filter,start..base)
globpattern(start,result,recurse,t)
@@ -3864,16 +4227,12 @@ local function glob(str,t)
return { str }
end
else
- local split=lpegmatch(pattern,str)
- if split then
- local t=t or {}
- local action=action or function(name) t[#t+1]=name end
- local root,path,base=split[1],split[2],split[3]
- local recurse=find(base,"%*%*")
+ local root,path,base=lpegmatch(pattern,str)
+ if root and path and base then
+ local recurse=find(base,"**",1,true)
local start=root..path
local result=lpegmatch(filter,start..base)
- globpattern(start,result,recurse,action)
- return t
+ return globpattern(start,result,recurse,t)
else
return {}
end
@@ -3913,16 +4272,26 @@ end
local make_indeed=true
if onwindows then
function dir.mkdirs(...)
- local str,pth="",""
- for i=1,select("#",...) do
- local s=select(i,...)
- if s=="" then
- elseif str=="" then
- str=s
- else
- str=str.."/"..s
+ local n=select("#",...)
+ local str
+ if n==1 then
+ str=select(1,...)
+ if isdir(str) then
+ return str,true
+ end
+ else
+ str=""
+ for i=1,n do
+ local s=select(i,...)
+ if s=="" then
+ elseif str=="" then
+ str=s
+ else
+ str=str.."/"..s
+ end
end
end
+ local pth=""
local drive=false
local first,middle,last=match(str,"^(//)(//*)(.*)$")
if first then
@@ -3957,21 +4326,30 @@ if onwindows then
pth=pth.."/"..s
end
if make_indeed and not isdir(pth) then
- lfs.mkdir(pth)
+ mkdir(pth)
end
end
return pth,(isdir(pth)==true)
end
else
function dir.mkdirs(...)
- local str,pth="",""
- for i=1,select("#",...) do
- local s=select(i,...)
- if s and s~="" then
- if str~="" then
- str=str.."/"..s
- else
- str=s
+ local n=select("#",...)
+ local str,pth
+ if n==1 then
+ str=select(1,...)
+ if isdir(str) then
+ return str,true
+ end
+ else
+ str=""
+ for i=1,n do
+ local s=select(i,...)
+ if s and s~="" then
+ if str~="" then
+ str=str.."/"..s
+ else
+ str=s
+ end
end
end
end
@@ -3986,7 +4364,7 @@ else
pth=pth.."/"..s
end
if make_indeed and not first and not isdir(pth) then
- lfs.mkdir(pth)
+ mkdir(pth)
end
end
else
@@ -3994,7 +4372,7 @@ else
for s in gmatch(str,"[^/]+") do
pth=pth.."/"..s
if make_indeed and not isdir(pth) then
- lfs.mkdir(pth)
+ mkdir(pth)
end
end
end
@@ -4002,47 +4380,51 @@ else
end
end
dir.makedirs=dir.mkdirs
-if onwindows then
- function dir.expandname(str)
- local first,nothing,last=match(str,"^(//)(//*)(.*)$")
- if first then
- first=dir.current().."/"
- end
- if not first then
- first,last=match(str,"^(//)/*(.*)$")
- end
- if not first then
- first,last=match(str,"^([a-zA-Z]:)(.*)$")
- if first and not find(last,"^/") then
- local d=currentdir()
- if chdir(first) then
- first=dir.current()
+do
+ local chdir=sandbox and sandbox.original(chdir) or chdir
+ if onwindows then
+ local xcurrentdir=dir.current
+ function dir.expandname(str)
+ local first,nothing,last=match(str,"^(//)(//*)(.*)$")
+ if first then
+ first=xcurrentdir().."/"
+ end
+ if not first then
+ first,last=match(str,"^(//)/*(.*)$")
+ end
+ if not first then
+ first,last=match(str,"^([a-zA-Z]:)(.*)$")
+ if first and not find(last,"^/") then
+ local d=currentdir()
+ if chdir(first) then
+ first=xcurrentdir()
+ end
+ chdir(d)
end
- chdir(d)
+ end
+ if not first then
+ first,last=xcurrentdir(),str
+ end
+ last=gsub(last,"//","/")
+ last=gsub(last,"/%./","/")
+ last=gsub(last,"^/*","")
+ first=gsub(first,"/*$","")
+ if last=="" or last=="." then
+ return first
+ else
+ return first.."/"..last
end
end
- if not first then
- first,last=dir.current(),str
- end
- last=gsub(last,"//","/")
- last=gsub(last,"/%./","/")
- last=gsub(last,"^/*","")
- first=gsub(first,"/*$","")
- if last=="" or last=="." then
- return first
- else
- return first.."/"..last
- end
- end
-else
- function dir.expandname(str)
- if not find(str,"^/") then
- str=currentdir().."/"..str
+ else
+ function dir.expandname(str)
+ if not find(str,"^/") then
+ str=currentdir().."/"..str
+ end
+ str=gsub(str,"//","/")
+ str=gsub(str,"/%./","/")
+ str=gsub(str,"(.)/%.$","%1")
+ return str
end
- str=gsub(str,"//","/")
- str=gsub(str,"/%./","/")
- str=gsub(str,"(.)/%.$","%1")
- return str
end
end
file.expandname=dir.expandname
@@ -4085,7 +4467,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-boolean"] = package.loaded["l-boolean"] or true
--- original size: 1809, stripped down to: 1527
+-- original size: 1850, stripped down to: 1568
if not modules then modules={} end modules ['l-boolean']={
version=1.001,
@@ -4139,11 +4521,11 @@ function string.booleanstring(str)
return str=="yes" or str=="on" or str=="t"
end
end
-function string.is_boolean(str,default)
+function string.is_boolean(str,default,strict)
if type(str)=="string" then
- if str=="true" or str=="yes" or str=="on" or str=="t" or str=="1" then
+ if str=="true" or str=="yes" or str=="on" or str=="t" or (not strict and str=="1") then
return true
- elseif str=="false" or str=="no" or str=="off" or str=="f" or str=="0" then
+ elseif str=="false" or str=="no" or str=="off" or str=="f" or (not strict and str=="0") then
return false
end
end
@@ -4157,7 +4539,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-unicode"] = package.loaded["l-unicode"] or true
--- original size: 33473, stripped down to: 14938
+-- original size: 37388, stripped down to: 15817
if not modules then modules={} end modules ['l-unicode']={
version=1.001,
@@ -4173,7 +4555,9 @@ local type=type
local char,byte,format,sub,gmatch=string.char,string.byte,string.format,string.sub,string.gmatch
local concat=table.concat
local P,C,R,Cs,Ct,Cmt,Cc,Carg,Cp=lpeg.P,lpeg.C,lpeg.R,lpeg.Cs,lpeg.Ct,lpeg.Cmt,lpeg.Cc,lpeg.Carg,lpeg.Cp
-local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local lpegmatch=lpeg.match
+local patterns=lpeg.patterns
+local tabletopattern=lpeg.utfchartabletopattern
local bytepairs=string.bytepairs
local finder=lpeg.finder
local replacer=lpeg.replacer
@@ -4182,7 +4566,7 @@ local utfgmatch=utf.gmatch
local p_utftype=patterns.utftype
local p_utfstricttype=patterns.utfstricttype
local p_utfoffset=patterns.utfoffset
-local p_utf8char=patterns.utf8char
+local p_utf8char=patterns.utf8character
local p_utf8byte=patterns.utf8byte
local p_utfbom=patterns.utfbom
local p_newline=patterns.newline
@@ -4321,6 +4705,7 @@ if not utf.sub then
local pattern_zero=Cmt(p_utf8char,slide_zero)^0
local pattern_one=Cmt(p_utf8char,slide_one )^0
local pattern_two=Cmt(p_utf8char,slide_two )^0
+ local pattern_first=C(patterns.utf8character)
function utf.sub(str,start,stop)
if not start then
return str
@@ -4362,7 +4747,9 @@ if not utf.sub then
end
end
end
- if start>stop then
+ if start==1 and stop==1 then
+ return lpegmatch(pattern_first,str) or ""
+ elseif start>stop then
return ""
elseif start>1 then
b,e,n,first,last=0,0,0,start-1,stop
@@ -4381,15 +4768,52 @@ if not utf.sub then
end
end
end
-function utf.remapper(mapping)
- local pattern=Cs((p_utf8char/mapping)^0)
- return function(str)
- if not str or str=="" then
- return ""
+function utf.remapper(mapping,option)
+ local variant=type(mapping)
+ if variant=="table" then
+ if option=="dynamic" then
+ local pattern=false
+ table.setmetatablenewindex(mapping,function(t,k,v) rawset(t,k,v) pattern=false end)
+ return function(str)
+ if not str or str=="" then
+ return ""
+ else
+ if not pattern then
+ pattern=Cs((tabletopattern(mapping)/mapping+p_utf8char)^0)
+ end
+ return lpegmatch(pattern,str)
+ end
+ end
+ elseif option=="pattern" then
+ return Cs((tabletopattern(mapping)/mapping+p_utf8char)^0)
else
- return lpegmatch(pattern,str)
+ local pattern=Cs((tabletopattern(mapping)/mapping+p_utf8char)^0)
+ return function(str)
+ if not str or str=="" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+ end,pattern
+ end
+ elseif variant=="function" then
+ if option=="pattern" then
+ return Cs((p_utf8char/mapping+p_utf8char)^0)
+ else
+ local pattern=Cs((p_utf8char/mapping+p_utf8char)^0)
+ return function(str)
+ if not str or str=="" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+ end,pattern
end
- end,pattern
+ else
+ return function(str)
+ return str or ""
+ end
+ end
end
function utf.replacer(t)
local r=replacer(t,false,false,true)
@@ -4439,190 +4863,157 @@ function utf.magic(f)
end
local utf16_to_utf8_be,utf16_to_utf8_le
local utf32_to_utf8_be,utf32_to_utf8_le
-local utf_16_be_linesplitter=patterns.utfbom_16_be^-1*lpeg.tsplitat(patterns.utf_16_be_nl)
-local utf_16_le_linesplitter=patterns.utfbom_16_le^-1*lpeg.tsplitat(patterns.utf_16_le_nl)
-if bytepairs then
- utf16_to_utf8_be=function(t)
- if type(t)=="string" then
- t=lpegmatch(utf_16_be_linesplitter,t)
- end
- local result={}
- for i=1,#t do
- local r,more=0,0
- for left,right in bytepairs(t[i]) do
- if right then
- local now=256*left+right
- if more>0 then
- now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
- more=0
- r=r+1
- result[r]=utfchar(now)
- elseif now>=0xD800 and now<=0xDBFF then
- more=now
- else
- r=r+1
- result[r]=utfchar(now)
- end
- end
- end
- t[i]=concat(result,"",1,r)
- end
- return t
+local utf_16_be_getbom=patterns.utfbom_16_be^-1
+local utf_16_le_getbom=patterns.utfbom_16_le^-1
+local utf_32_be_getbom=patterns.utfbom_32_be^-1
+local utf_32_le_getbom=patterns.utfbom_32_le^-1
+local utf_16_be_linesplitter=utf_16_be_getbom*lpeg.tsplitat(patterns.utf_16_be_nl)
+local utf_16_le_linesplitter=utf_16_le_getbom*lpeg.tsplitat(patterns.utf_16_le_nl)
+local utf_32_be_linesplitter=utf_32_be_getbom*lpeg.tsplitat(patterns.utf_32_be_nl)
+local utf_32_le_linesplitter=utf_32_le_getbom*lpeg.tsplitat(patterns.utf_32_le_nl)
+local more=0
+local p_utf16_to_utf8_be=C(1)*C(1)/function(left,right)
+ local now=256*byte(left)+byte(right)
+ if more>0 then
+ now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
+ more=0
+ return utfchar(now)
+ elseif now>=0xD800 and now<=0xDBFF then
+ more=now
+ return ""
+ else
+ return utfchar(now)
+ end
+end
+local p_utf16_to_utf8_le=C(1)*C(1)/function(right,left)
+ local now=256*byte(left)+byte(right)
+ if more>0 then
+ now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
+ more=0
+ return utfchar(now)
+ elseif now>=0xD800 and now<=0xDBFF then
+ more=now
+ return ""
+ else
+ return utfchar(now)
+ end
+end
+local p_utf32_to_utf8_be=C(1)*C(1)*C(1)*C(1)/function(a,b,c,d)
+ return utfchar(256*256*256*byte(a)+256*256*byte(b)+256*byte(c)+byte(d))
+end
+local p_utf32_to_utf8_le=C(1)*C(1)*C(1)*C(1)/function(a,b,c,d)
+ return utfchar(256*256*256*byte(d)+256*256*byte(c)+256*byte(b)+byte(a))
+end
+p_utf16_to_utf8_be=P(true)/function() more=0 end*utf_16_be_getbom*Cs(p_utf16_to_utf8_be^0)
+p_utf16_to_utf8_le=P(true)/function() more=0 end*utf_16_le_getbom*Cs(p_utf16_to_utf8_le^0)
+p_utf32_to_utf8_be=P(true)/function() more=0 end*utf_32_be_getbom*Cs(p_utf32_to_utf8_be^0)
+p_utf32_to_utf8_le=P(true)/function() more=0 end*utf_32_le_getbom*Cs(p_utf32_to_utf8_le^0)
+patterns.utf16_to_utf8_be=p_utf16_to_utf8_be
+patterns.utf16_to_utf8_le=p_utf16_to_utf8_le
+patterns.utf32_to_utf8_be=p_utf32_to_utf8_be
+patterns.utf32_to_utf8_le=p_utf32_to_utf8_le
+utf16_to_utf8_be=function(s)
+ if s and s~="" then
+ return lpegmatch(p_utf16_to_utf8_be,s)
+ else
+ return s
end
- utf16_to_utf8_le=function(t)
- if type(t)=="string" then
- t=lpegmatch(utf_16_le_linesplitter,t)
- end
- local result={}
- for i=1,#t do
- local r,more=0,0
- for left,right in bytepairs(t[i]) do
- if right then
- local now=256*right+left
- if more>0 then
- now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
- more=0
- r=r+1
- result[r]=utfchar(now)
- elseif now>=0xD800 and now<=0xDBFF then
- more=now
- else
- r=r+1
- result[r]=utfchar(now)
- end
- end
- end
- t[i]=concat(result,"",1,r)
- end
- return t
+end
+local utf16_to_utf8_be_t=function(t)
+ if not t then
+ return nil
+ elseif type(t)=="string" then
+ t=lpegmatch(utf_16_be_linesplitter,t)
end
- utf32_to_utf8_be=function(t)
- if type(t)=="string" then
- t=lpegmatch(utflinesplitter,t)
- end
- local result={}
- for i=1,#t do
- local r,more=0,-1
- for a,b in bytepairs(t[i]) do
- if a and b then
- if more<0 then
- more=256*256*256*a+256*256*b
- else
- r=r+1
- result[t]=utfchar(more+256*a+b)
- more=-1
- end
- else
- break
- end
- end
- t[i]=concat(result,"",1,r)
+ for i=1,#t do
+ local s=t[i]
+ if s~="" then
+ t[i]=lpegmatch(p_utf16_to_utf8_be,s)
end
- return t
end
- utf32_to_utf8_le=function(t)
- if type(t)=="string" then
- t=lpegmatch(utflinesplitter,t)
- end
- local result={}
- for i=1,#t do
- local r,more=0,-1
- for a,b in bytepairs(t[i]) do
- if a and b then
- if more<0 then
- more=256*b+a
- else
- r=r+1
- result[t]=utfchar(more+256*256*256*b+256*256*a)
- more=-1
- end
- else
- break
- end
- end
- t[i]=concat(result,"",1,r)
- end
- return t
+ return t
+end
+utf16_to_utf8_le=function(s)
+ if s and s~="" then
+ return lpegmatch(p_utf16_to_utf8_le,s)
+ else
+ return s
end
-else
- utf16_to_utf8_be=function(t)
- if type(t)=="string" then
- t=lpegmatch(utf_16_be_linesplitter,t)
- end
- local result={}
- for i=1,#t do
- local r,more=0,0
- for left,right in gmatch(t[i],"(.)(.)") do
- if left=="\000" then
- r=r+1
- result[r]=utfchar(byte(right))
- elseif right then
- local now=256*byte(left)+byte(right)
- if more>0 then
- now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
- more=0
- r=r+1
- result[r]=utfchar(now)
- elseif now>=0xD800 and now<=0xDBFF then
- more=now
- else
- r=r+1
- result[r]=utfchar(now)
- end
- end
- end
- t[i]=concat(result,"",1,r)
+end
+local utf16_to_utf8_le_t=function(t)
+ if not t then
+ return nil
+ elseif type(t)=="string" then
+ t=lpegmatch(utf_16_le_linesplitter,t)
+ end
+ for i=1,#t do
+ local s=t[i]
+ if s~="" then
+ t[i]=lpegmatch(p_utf16_to_utf8_le,s)
end
- return t
end
- utf16_to_utf8_le=function(t)
- if type(t)=="string" then
- t=lpegmatch(utf_16_le_linesplitter,t)
+ return t
+end
+utf32_to_utf8_be=function(s)
+ if s and s~="" then
+ return lpegmatch(p_utf32_to_utf8_be,s)
+ else
+ return s
+ end
+end
+local utf32_to_utf8_be_t=function(t)
+ if not t then
+ return nil
+ elseif type(t)=="string" then
+ t=lpegmatch(utf_32_be_linesplitter,t)
+ end
+ for i=1,#t do
+ local s=t[i]
+ if s~="" then
+ t[i]=lpegmatch(p_utf32_to_utf8_be,s)
end
- local result={}
- for i=1,#t do
- local r,more=0,0
- for left,right in gmatch(t[i],"(.)(.)") do
- if right=="\000" then
- r=r+1
- result[r]=utfchar(byte(left))
- elseif right then
- local now=256*byte(right)+byte(left)
- if more>0 then
- now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
- more=0
- r=r+1
- result[r]=utfchar(now)
- elseif now>=0xD800 and now<=0xDBFF then
- more=now
- else
- r=r+1
- result[r]=utfchar(now)
- end
- end
- end
- t[i]=concat(result,"",1,r)
+ end
+ return t
+end
+utf32_to_utf8_le=function(s)
+ if s and s~="" then
+ return lpegmatch(p_utf32_to_utf8_le,s)
+ else
+ return s
+ end
+end
+local utf32_to_utf8_le_t=function(t)
+ if not t then
+ return nil
+ elseif type(t)=="string" then
+ t=lpegmatch(utf_32_le_linesplitter,t)
+ end
+ for i=1,#t do
+ local s=t[i]
+ if s~="" then
+ t[i]=lpegmatch(p_utf32_to_utf8_le,s)
end
- return t
end
- utf32_to_utf8_le=function() return {} end
- utf32_to_utf8_be=function() return {} end
+ return t
end
+utf.utf16_to_utf8_le_t=utf16_to_utf8_le_t
+utf.utf16_to_utf8_be_t=utf16_to_utf8_be_t
+utf.utf32_to_utf8_le_t=utf32_to_utf8_le_t
+utf.utf32_to_utf8_be_t=utf32_to_utf8_be_t
utf.utf16_to_utf8_le=utf16_to_utf8_le
utf.utf16_to_utf8_be=utf16_to_utf8_be
utf.utf32_to_utf8_le=utf32_to_utf8_le
utf.utf32_to_utf8_be=utf32_to_utf8_be
-function utf.utf8_to_utf8(t)
+function utf.utf8_to_utf8_t(t)
return type(t)=="string" and lpegmatch(utflinesplitter,t) or t
end
-function utf.utf16_to_utf8(t,endian)
- return endian and utf16_to_utf8_be(t) or utf16_to_utf8_le(t) or t
+function utf.utf16_to_utf8_t(t,endian)
+ return endian and utf16_to_utf8_be_t(t) or utf16_to_utf8_le_t(t) or t
end
-function utf.utf32_to_utf8(t,endian)
- return endian and utf32_to_utf8_be(t) or utf32_to_utf8_le(t) or t
+function utf.utf32_to_utf8_t(t,endian)
+ return endian and utf32_to_utf8_be_t(t) or utf32_to_utf8_le_t(t) or t
end
-local function little(c)
- local b=byte(c)
+local function little(b)
if b<0x10000 then
return char(b%256,b/256)
else
@@ -4631,8 +5022,7 @@ local function little(c)
return char(b1%256,b1/256,b2%256,b2/256)
end
end
-local function big(c)
- local b=byte(c)
+local function big(b)
if b<0x10000 then
return char(b/256,b%256)
else
@@ -4641,27 +5031,29 @@ local function big(c)
return char(b1/256,b1%256,b2/256,b2%256)
end
end
-local _,l_remap=utf.remapper(little)
-local _,b_remap=utf.remapper(big)
-function utf.utf8_to_utf16_be(str,nobom)
+local l_remap=Cs((p_utf8byte/little+P(1)/"")^0)
+local b_remap=Cs((p_utf8byte/big+P(1)/"")^0)
+local function utf8_to_utf16_be(str,nobom)
if nobom then
return lpegmatch(b_remap,str)
else
return char(254,255)..lpegmatch(b_remap,str)
end
end
-function utf.utf8_to_utf16_le(str,nobom)
+local function utf8_to_utf16_le(str,nobom)
if nobom then
return lpegmatch(l_remap,str)
else
return char(255,254)..lpegmatch(l_remap,str)
end
end
+utf.utf8_to_utf16_be=utf8_to_utf16_be
+utf.utf8_to_utf16_le=utf8_to_utf16_le
function utf.utf8_to_utf16(str,littleendian,nobom)
if littleendian then
- return utf.utf8_to_utf16_le(str,nobom)
+ return utf8_to_utf16_le(str,nobom)
else
- return utf.utf8_to_utf16_be(str,nobom)
+ return utf8_to_utf16_be(str,nobom)
end
end
local pattern=Cs (
@@ -4677,16 +5069,16 @@ function utf.xstring(s)
return format("0x%05X",type(s)=="number" and s or utfbyte(s))
end
function utf.toeight(str)
- if not str then
+ if not str or str=="" then
return nil
end
local utftype=lpegmatch(p_utfstricttype,str)
if utftype=="utf-8" then
- return sub(str,4)
- elseif utftype=="utf-16-le" then
- return utf16_to_utf8_le(str)
+ return sub(str,4)
elseif utftype=="utf-16-be" then
- return utf16_to_utf8_ne(str)
+ return utf16_to_utf8_be(str)
+ elseif utftype=="utf-16-le" then
+ return utf16_to_utf8_le(str)
else
return str
end
@@ -4765,7 +5157,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-math"] = package.loaded["l-math"] or true
--- original size: 915, stripped down to: 836
+-- original size: 974, stripped down to: 890
if not modules then modules={} end modules ['l-math']={
version=1.001,
@@ -4775,6 +5167,9 @@ if not modules then modules={} end modules ['l-math']={
license="see context related readme files"
}
local floor,sin,cos,tan=math.floor,math.sin,math.cos,math.tan
+if not math.ceiling then
+ math.ceiling=math.ceil
+end
if not math.round then
function math.round(x) return floor(x+0.5) end
end
@@ -4802,7 +5197,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-str"] = package.loaded["util-str"] or true
--- original size: 26857, stripped down to: 15062
+-- original size: 34503, stripped down to: 18933
if not modules then modules={} end modules ['util-str']={
version=1.001,
@@ -4821,25 +5216,43 @@ local unpack,concat=table.unpack,table.concat
local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc
local patterns,lpegmatch=lpeg.patterns,lpeg.match
local utfchar,utfbyte=utf.char,utf.byte
-local loadstripped=_LUAVERSION<5.2 and load or function(str)
- return load(dump(load(str),true))
+local loadstripped=nil
+if _LUAVERSION<5.2 then
+ loadstripped=function(str,shortcuts)
+ return load(str)
+ end
+else
+ loadstripped=function(str,shortcuts)
+ if shortcuts then
+ return load(dump(load(str),true),nil,nil,shortcuts)
+ else
+ return load(dump(load(str),true))
+ end
+ end
end
if not number then number={} end
local stripper=patterns.stripzeros
+local newline=patterns.newline
+local endofstring=patterns.endofstring
+local whitespace=patterns.whitespace
+local spacer=patterns.spacer
+local spaceortab=patterns.spaceortab
local function points(n)
+ n=tonumber(n)
return (not n or n==0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536))
end
local function basepoints(n)
+ n=tonumber(n)
return (not n or n==0) and "0bp" or lpegmatch(stripper,format("%.5fbp",n*(7200/7227)/65536))
end
number.points=points
number.basepoints=basepoints
-local rubish=patterns.spaceortab^0*patterns.newline
-local anyrubish=patterns.spaceortab+patterns.newline
+local rubish=spaceortab^0*newline
+local anyrubish=spaceortab+newline
local anything=patterns.anything
-local stripped=(patterns.spaceortab^1/"")*patterns.newline
+local stripped=(spaceortab^1/"")*newline
local leading=rubish^0/""
-local trailing=(anyrubish^1*patterns.endofstring)/""
+local trailing=(anyrubish^1*endofstring)/""
local redundant=rubish^3/"\n"
local pattern=Cs(leading*(trailing+redundant+stripped+anything)^0)
function strings.collapsecrlf(str)
@@ -4885,18 +5298,44 @@ local pattern=Carg(1)/function(t)
else
return ""
end
- end+patterns.newline*Cp()/function(position)
+ end+newline*Cp()/function(position)
extra,start=0,position
end+patterns.anything
)^1)
function strings.tabtospace(str,tab)
return lpegmatch(pattern,str,1,tab or 7)
end
-function strings.striplong(str)
- str=gsub(str,"^%s*","")
- str=gsub(str,"[\n\r]+ *","\n")
- return str
+local space=spacer^0
+local nospace=space/""
+local endofline=nospace*newline
+local stripend=(whitespace^1*endofstring)/""
+local normalline=(nospace*((1-space*(newline+endofstring))^1)*nospace)
+local stripempty=endofline^1/""
+local normalempty=endofline^1
+local singleempty=endofline*(endofline^0/"")
+local doubleempty=endofline*endofline^-1*(endofline^0/"")
+local stripstart=stripempty^0
+local p_prune_normal=Cs (stripstart*(stripend+normalline+normalempty )^0 )
+local p_prune_collapse=Cs (stripstart*(stripend+normalline+doubleempty )^0 )
+local p_prune_noempty=Cs (stripstart*(stripend+normalline+singleempty )^0 )
+local p_retain_normal=Cs ((normalline+normalempty )^0 )
+local p_retain_collapse=Cs ((normalline+doubleempty )^0 )
+local p_retain_noempty=Cs ((normalline+singleempty )^0 )
+local striplinepatterns={
+ ["prune"]=p_prune_normal,
+ ["prune and collapse"]=p_prune_collapse,
+ ["prune and no empty"]=p_prune_noempty,
+ ["retain"]=p_retain_normal,
+ ["retain and collapse"]=p_retain_collapse,
+ ["retain and no empty"]=p_retain_noempty,
+ ["collapse"]=patterns.collapser,
+}
+setmetatable(striplinepatterns,{ __index=function(t,k) return p_prune_collapse end })
+strings.striplinepatterns=striplinepatterns
+function strings.striplines(str,how)
+ return str and lpegmatch(striplinepatterns[how],str) or str
end
+strings.striplong=strings.striplines
function strings.nice(str)
str=gsub(str,"[:%-+_]+"," ")
return str
@@ -4934,10 +5373,10 @@ string.tracedchars=tracedchars
strings.tracers=tracedchars
function string.tracedchar(b)
if type(b)=="number" then
- return tracedchars[b] or (utfchar(b).." (U+"..format('%05X',b)..")")
+ return tracedchars[b] or (utfchar(b).." (U+"..format("%05X",b)..")")
else
local c=utfbyte(b)
- return tracedchars[c] or (b.." (U+"..format('%05X',c)..")")
+ return tracedchars[c] or (b.." (U+"..(c and format("%05X",c) or "?????")..")")
end
end
function number.signed(i)
@@ -4972,31 +5411,58 @@ function number.sparseexponent(f,n)
end
return tostring(n)
end
-local preamble=[[
-local type = type
-local tostring = tostring
-local tonumber = tonumber
-local format = string.format
-local concat = table.concat
-local signed = number.signed
-local points = number.points
-local basepoints = number.basepoints
-local utfchar = utf.char
-local utfbyte = utf.byte
-local lpegmatch = lpeg.match
-local nspaces = string.nspaces
-local tracedchar = string.tracedchar
-local autosingle = string.autosingle
-local autodouble = string.autodouble
-local sequenced = table.sequenced
-local formattednumber = number.formatted
-local sparseexponent = number.sparseexponent
-]]
local template=[[
%s
%s
return function(%s) return %s end
]]
+local preamble,environment="",{}
+if _LUAVERSION<5.2 then
+ preamble=[[
+local lpeg=lpeg
+local type=type
+local tostring=tostring
+local tonumber=tonumber
+local format=string.format
+local concat=table.concat
+local signed=number.signed
+local points=number.points
+local basepoints= number.basepoints
+local utfchar=utf.char
+local utfbyte=utf.byte
+local lpegmatch=lpeg.match
+local nspaces=string.nspaces
+local tracedchar=string.tracedchar
+local autosingle=string.autosingle
+local autodouble=string.autodouble
+local sequenced=table.sequenced
+local formattednumber=number.formatted
+local sparseexponent=number.sparseexponent
+ ]]
+else
+ environment={
+ global=global or _G,
+ lpeg=lpeg,
+ type=type,
+ tostring=tostring,
+ tonumber=tonumber,
+ format=string.format,
+ concat=table.concat,
+ signed=number.signed,
+ points=number.points,
+ basepoints=number.basepoints,
+ utfchar=utf.char,
+ utfbyte=utf.byte,
+ lpegmatch=lpeg.match,
+ nspaces=string.nspaces,
+ tracedchar=string.tracedchar,
+ autosingle=string.autosingle,
+ autodouble=string.autodouble,
+ sequenced=table.sequenced,
+ formattednumber=number.formatted,
+ sparseexponent=number.sparseexponent,
+ }
+end
local arguments={ "a1" }
setmetatable(arguments,{ __index=function(t,k)
local v=t[k-1]..",a"..k
@@ -5035,7 +5501,7 @@ local format_i=function(f)
if f and f~="" then
return format("format('%%%si',a%s)",f,n)
else
- return format("format('%%i',a%s)",n)
+ return format("format('%%i',a%s)",n)
end
end
local format_d=format_i
@@ -5047,6 +5513,14 @@ local format_f=function(f)
n=n+1
return format("format('%%%sf',a%s)",f,n)
end
+local format_F=function(f)
+ n=n+1
+ if not f or f=="" then
+ return format("(((a%s > -0.0000000005 and a%s < 0.0000000005) and '0') or format((a%s %% 1 == 0) and '%%i' or '%%.9f',a%s))",n,n,n,n)
+ else
+ return format("format((a%s %% 1 == 0) and '%%i' or '%%%sf',a%s)",n,f,n)
+ end
+end
local format_g=function(f)
n=n+1
return format("format('%%%sg',a%s)",f,n)
@@ -5261,7 +5735,7 @@ local builder=Cs { "start",
(
P("%")/""*(
V("!")
-+V("s")+V("q")+V("i")+V("d")+V("f")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o")
++V("s")+V("q")+V("i")+V("d")+V("f")+V("F")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o")
+V("c")+V("C")+V("S")
+V("Q")
+V("N")
@@ -5272,7 +5746,6 @@ local builder=Cs { "start",
+V("j")+V("J")
+V("m")+V("M")
+V("z")
-+V("*")
)+V("*")
)*(P(-1)+Carg(1))
)^0,
@@ -5281,6 +5754,7 @@ local builder=Cs { "start",
["i"]=(prefix_any*P("i"))/format_i,
["d"]=(prefix_any*P("d"))/format_d,
["f"]=(prefix_any*P("f"))/format_f,
+ ["F"]=(prefix_any*P("F"))/format_F,
["g"]=(prefix_any*P("g"))/format_g,
["G"]=(prefix_any*P("G"))/format_G,
["e"]=(prefix_any*P("e"))/format_e,
@@ -5315,11 +5789,12 @@ local builder=Cs { "start",
["a"]=(prefix_any*P("a"))/format_a,
["A"]=(prefix_any*P("A"))/format_A,
["*"]=Cs(((1-P("%"))^1+P("%%")/"%%")^1)/format_rest,
+ ["?"]=Cs(((1-P("%"))^1 )^1)/format_rest,
["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension,
}
local direct=Cs (
- P("%")/""*Cc([[local format = string.format return function(str) return format("%]])*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*Cc([[",str) end]])*P(-1)
- )
+ P("%")*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*P(-1)/[[local format = string.format return function(str) return format("%0",str) end]]
+)
local function make(t,str)
local f
local p
@@ -5328,10 +5803,10 @@ local function make(t,str)
f=loadstripped(p)()
else
n=0
- p=lpegmatch(builder,str,1,"..",t._extensions_)
+ p=lpegmatch(builder,str,1,t._connector_,t._extensions_)
if n>0 then
p=format(template,preamble,t._preamble_,arguments[n],p)
- f=loadstripped(p)()
+ f=loadstripped(p,t._environment_)()
else
f=function() return str end
end
@@ -5343,10 +5818,22 @@ local function use(t,fmt,...)
return t[fmt](...)
end
strings.formatters={}
-function strings.formatters.new()
- local t={ _extensions_={},_preamble_="",_type_="formatter" }
- setmetatable(t,{ __index=make,__call=use })
- return t
+if _LUAVERSION<5.2 then
+ function strings.formatters.new(noconcat)
+ local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_=preamble,_environment_={} }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
+ end
+else
+ function strings.formatters.new(noconcat)
+ local e={}
+ for k,v in next,environment do
+ e[k]=v
+ end
+ local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_="",_environment_=e }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
+ end
end
local formatters=strings.formatters.new()
string.formatters=formatters
@@ -5354,8 +5841,12 @@ string.formatter=function(str,...) return formatters[str](...) end
local function add(t,name,template,preamble)
if type(t)=="table" and t._type_=="formatter" then
t._extensions_[name]=template or "%s"
- if preamble then
+ if type(preamble)=="string" then
t._preamble_=preamble.."\n"..t._preamble_
+ elseif type(preamble)=="table" then
+ for k,v in next,preamble do
+ t._environment_[k]=v
+ end
end
end
end
@@ -5364,9 +5855,28 @@ patterns.xmlescape=Cs((P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"+P('"')/"&quot;
patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0)
patterns.luaescape=Cs(((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0)
patterns.luaquoted=Cs(Cc('"')*((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0*Cc('"'))
-add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
-add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
-add(formatters,"lua",[[lpegmatch(luaescape,%s)]],[[local luaescape = lpeg.patterns.luaescape]])
+if _LUAVERSION<5.2 then
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape")
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape")
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape")
+else
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape=lpeg.patterns.xmlescape })
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape=lpeg.patterns.texescape })
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape=lpeg.patterns.luaescape })
+end
+local dquote=patterns.dquote
+local equote=patterns.escaped+dquote/'\\"'+1
+local space=patterns.space
+local cquote=Cc('"')
+local pattern=Cs(dquote*(equote-P(-2))^0*dquote)
++Cs(cquote*(equote-space)^0*space*equote^0*cquote)
+function string.optionalquoted(str)
+ return lpegmatch(pattern,str) or str
+end
+local pattern=Cs((newline/os.newline+1)^0)
+function string.replacenewlines(str)
+ return lpegmatch(pattern,str)
+end
end -- of closure
@@ -5375,7 +5885,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-tab"] = package.loaded["util-tab"] or true
--- original size: 23952, stripped down to: 16092
+-- original size: 25338, stripped down to: 16247
if not modules then modules={} end modules ['util-tab']={
version=1.001,
@@ -5388,7 +5898,7 @@ utilities=utilities or {}
utilities.tables=utilities.tables or {}
local tables=utilities.tables
local format,gmatch,gsub,sub=string.format,string.gmatch,string.gsub,string.sub
-local concat,insert,remove=table.concat,table.insert,table.remove
+local concat,insert,remove,sort=table.concat,table.insert,table.remove,table.sort
local setmetatable,getmetatable,tonumber,tostring=setmetatable,getmetatable,tonumber,tostring
local type,next,rawset,tonumber,tostring,load,select=type,next,rawset,tonumber,tostring,load,select
local lpegmatch,P,Cs,Cc=lpeg.match,lpeg.P,lpeg.Cs,lpeg.Cc
@@ -5396,27 +5906,29 @@ local sortedkeys,sortedpairs=table.sortedkeys,table.sortedpairs
local formatters=string.formatters
local utftoeight=utf.toeight
local splitter=lpeg.tsplitat(".")
-function tables.definetable(target,nofirst,nolast)
- local composed,shortcut,t=nil,nil,{}
+function utilities.tables.definetable(target,nofirst,nolast)
+ local composed,t=nil,{}
local snippets=lpegmatch(splitter,target)
for i=1,#snippets-(nolast and 1 or 0) do
local name=snippets[i]
if composed then
- composed=shortcut.."."..name
- shortcut=shortcut.."_"..name
- t[#t+1]=formatters["local %s = %s if not %s then %s = { } %s = %s end"](shortcut,composed,shortcut,shortcut,composed,shortcut)
+ composed=composed.."."..name
+ t[#t+1]=formatters["if not %s then %s = { } end"](composed,composed)
else
composed=name
- shortcut=name
if not nofirst then
t[#t+1]=formatters["%s = %s or { }"](composed,composed)
end
end
end
- if nolast then
- composed=shortcut.."."..snippets[#snippets]
+ if composed then
+ if nolast then
+ composed=composed.."."..snippets[#snippets]
+ end
+ return concat(t,"\n"),composed
+ else
+ return "",target
end
- return concat(t,"\n"),composed
end
function tables.definedtable(...)
local t=_G
@@ -5443,7 +5955,7 @@ function tables.accesstable(target,root)
end
function tables.migratetable(target,v,root)
local t=root or _G
- local names=string.split(target,".")
+ local names=lpegmatch(splitter,target)
for i=1,#names-1 do
local name=names[i]
t[name]=t[name] or {}
@@ -5463,6 +5975,15 @@ function tables.removevalue(t,value)
end
end
end
+function tables.replacevalue(t,oldvalue,newvalue)
+ if oldvalue and newvalue then
+ for i=1,#t do
+ if t[i]==oldvalue then
+ t[i]=newvalue
+ end
+ end
+ end
+end
function tables.insertbeforevalue(t,value,extra)
for i=1,#t do
if t[i]==extra then
@@ -5610,7 +6131,7 @@ local f_ordered_string=formatters["%q,"]
local f_ordered_number=formatters["%s,"]
local f_ordered_boolean=formatters["%l,"]
function table.fastserialize(t,prefix)
- local r={ prefix or "return" }
+ local r={ type(prefix)=="string" and prefix or "return" }
local m=1
local function fastserialize(t,outer)
local n=#t
@@ -5807,7 +6328,8 @@ function table.serialize(root,name,specification)
local t
local n=1
local function simple_table(t)
- if #t>0 then
+ local nt=#t
+ if nt>0 then
local n=0
for _,v in next,t do
n=n+1
@@ -5815,19 +6337,17 @@ function table.serialize(root,name,specification)
return nil
end
end
- if n==#t then
+ if n==nt then
local tt={}
- local nt=0
- for i=1,#t do
+ for i=1,nt do
local v=t[i]
local tv=type(v)
- nt=nt+1
if tv=="number" then
- tt[nt]=v
+ tt[i]=v
elseif tv=="string" then
- tt[nt]=format("%q",v)
+ tt[i]=format("%q",v)
elseif tv=="boolean" then
- tt[nt]=v and "true" or "false"
+ tt[i]=v and "true" or "false"
else
return nil
end
@@ -5856,7 +6376,7 @@ function table.serialize(root,name,specification)
end
depth=depth+1
end
- if root and next(root) then
+ if root and next(root)~=nil then
local first=nil
local last=0
last=#root
@@ -5875,13 +6395,13 @@ function table.serialize(root,name,specification)
local v=root[k]
local tv=type(v)
local tk=type(k)
- if first and tk=="number" and k>=first and k<=last then
+ if first and tk=="number" and k<=last and k>=first then
if tv=="number" then
n=n+1 t[n]=f_val_num(depth,v)
elseif tv=="string" then
n=n+1 t[n]=f_val_str(depth,v)
elseif tv=="table" then
- if not next(v) then
+ if next(v)==nil then
n=n+1 t[n]=f_val_not(depth)
else
local st=simple_table(v)
@@ -5911,13 +6431,13 @@ function table.serialize(root,name,specification)
n=n+1 t[n]=f_key_boo_value_str(depth,k,v)
end
elseif tv=="table" then
- if not next(v) then
+ if next(v)==nil then
if tk=="number" then
- n=n+1 t[n]=f_key_num_value_not(depth,k,v)
+ n=n+1 t[n]=f_key_num_value_not(depth,k)
elseif tk=="string" then
- n=n+1 t[n]=f_key_str_value_not(depth,k,v)
+ n=n+1 t[n]=f_key_str_value_not(depth,k)
elseif tk=="boolean" then
- n=n+1 t[n]=f_key_boo_value_not(depth,k,v)
+ n=n+1 t[n]=f_key_boo_value_not(depth,k)
end
else
local st=simple_table(v)
@@ -5969,7 +6489,7 @@ function table.serialize(root,name,specification)
local dummy=root._w_h_a_t_e_v_e_r_
root._w_h_a_t_e_v_e_r_=nil
end
- if next(root) then
+ if next(root)~=nil then
do_serialize(root,name,1,0)
end
end
@@ -6132,7 +6652,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-prs"] = package.loaded["util-prs"] or true
--- original size: 19604, stripped down to: 13998
+-- original size: 21780, stripped down to: 15121
if not modules then modules={} end modules ['util-prs']={
version=1.001,
@@ -6154,6 +6674,8 @@ local patterns=parsers.patterns or {}
parsers.patterns=patterns
local setmetatableindex=table.setmetatableindex
local sortedhash=table.sortedhash
+local sortedkeys=table.sortedkeys
+local tohash=table.tohash
local digit=R("09")
local space=P(' ')
local equal=P("=")
@@ -6203,9 +6725,7 @@ patterns.settings_to_hash_a=pattern_a_s
patterns.settings_to_hash_b=pattern_b_s
patterns.settings_to_hash_c=pattern_c_s
function parsers.make_settings_to_hash_pattern(set,how)
- if type(str)=="table" then
- return set
- elseif how=="strict" then
+ if how=="strict" then
return (pattern_c/set)^1
elseif how=="tolerant" then
return (pattern_b/set)^1
@@ -6214,7 +6734,9 @@ function parsers.make_settings_to_hash_pattern(set,how)
end
end
function parsers.settings_to_hash(str,existing)
- if type(str)=="table" then
+ if not str or str=="" then
+ return {}
+ elseif type(str)=="table" then
if existing then
for k,v in next,str do
existing[k]=v
@@ -6223,16 +6745,16 @@ function parsers.settings_to_hash(str,existing)
else
return str
end
- elseif str and str~="" then
+ else
hash=existing or {}
lpegmatch(pattern_a_s,str)
return hash
- else
- return {}
end
end
function parsers.settings_to_hash_tolerant(str,existing)
- if type(str)=="table" then
+ if not str or str=="" then
+ return {}
+ elseif type(str)=="table" then
if existing then
for k,v in next,str do
existing[k]=v
@@ -6241,16 +6763,16 @@ function parsers.settings_to_hash_tolerant(str,existing)
else
return str
end
- elseif str and str~="" then
+ else
hash=existing or {}
lpegmatch(pattern_b_s,str)
return hash
- else
- return {}
end
end
function parsers.settings_to_hash_strict(str,existing)
- if type(str)=="table" then
+ if not str or str=="" then
+ return nil
+ elseif type(str)=="table" then
if existing then
for k,v in next,str do
existing[k]=v
@@ -6263,8 +6785,6 @@ function parsers.settings_to_hash_strict(str,existing)
hash=existing or {}
lpegmatch(pattern_c_s,str)
return next(hash) and hash
- else
- return nil
end
end
local separator=comma*space^0
@@ -6272,27 +6792,46 @@ local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C((nestedbraces+(1-comm
local pattern=spaces*Ct(value*(separator*value)^0)
patterns.settings_to_array=pattern
function parsers.settings_to_array(str,strict)
- if type(str)=="table" then
- return str
- elseif not str or str=="" then
+ if not str or str=="" then
return {}
+ elseif type(str)=="table" then
+ return str
elseif strict then
- if find(str,"{") then
+ if find(str,"{",1,true) then
return lpegmatch(pattern,str)
else
return { str }
end
- elseif find(str,",") then
+ elseif find(str,",",1,true) then
return lpegmatch(pattern,str)
else
return { str }
end
end
-local separator=space^0*comma*space^0
-local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C((nestedbraces+(1-(space^0*(comma+P(-1)))))^0)
-local withvalue=Carg(1)*value/function(f,s) return f(s) end
-local pattern_a=spaces*Ct(value*(separator*value)^0)
-local pattern_b=spaces*withvalue*(separator*withvalue)^0
+local cache_a={}
+local cache_b={}
+function parsers.groupedsplitat(symbol,withaction)
+ if not symbol then
+ symbol=","
+ end
+ local pattern=(withaction and cache_b or cache_a)[symbol]
+ if not pattern then
+ local symbols=S(symbol)
+ local separator=space^0*symbols*space^0
+ local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C((nestedbraces+(1-(space^0*(symbols+P(-1)))))^0)
+ if withaction then
+ local withvalue=Carg(1)*value/function(f,s) return f(s) end
+ pattern=spaces*withvalue*(separator*withvalue)^0
+ cache_b[symbol]=pattern
+ else
+ pattern=spaces*Ct(value*(separator*value)^0)
+ cache_a[symbol]=pattern
+ end
+ end
+ return pattern
+end
+local pattern_a=parsers.groupedsplitat(",",false)
+local pattern_b=parsers.groupedsplitat(",",true)
function parsers.stripped_settings_to_array(str)
if not str or str=="" then
return {}
@@ -6317,8 +6856,8 @@ function parsers.add_settings_to_array(t,str)
end
function parsers.hash_to_string(h,separator,yes,no,strict,omit)
if h then
- local t,tn,s={},0,table.sortedkeys(h)
- omit=omit and table.tohash(omit)
+ local t,tn,s={},0,sortedkeys(h)
+ omit=omit and tohash(omit)
for i=1,#s do
local key=s[i]
if not omit or not omit[key] then
@@ -6354,12 +6893,9 @@ function parsers.array_to_string(a,separator)
return ""
end
end
-function parsers.settings_to_set(str,t)
- t=t or {}
- for s in gmatch(str,"[^, ]+") do
- t[s]=true
- end
- return t
+local pattern=Cf(Ct("")*Cg(C((1-S(", "))^1)*S(", ")^0*Cc(true))^1,rawset)
+function utilities.parsers.settings_to_set(str,t)
+ return str and lpegmatch(pattern,str) or {}
end
function parsers.simple_hash_to_string(h,separator)
local t,tn={},0
@@ -6371,12 +6907,16 @@ function parsers.simple_hash_to_string(h,separator)
end
return concat(t,separator or ",")
end
-local str=C((1-whitespace-equal)^1)
+local str=Cs(lpegpatterns.unquoted)+C((1-whitespace-equal)^1)
local setting=Cf(Carg(1)*(whitespace^0*Cg(str*whitespace^0*(equal*whitespace^0*str+Cc(""))))^1,rawset)
local splitter=setting^1
function utilities.parsers.options_to_hash(str,target)
return str and lpegmatch(splitter,str,1,target or {}) or {}
end
+local splitter=lpeg.tsplitat(" ")
+function utilities.parsers.options_to_array(str)
+ return str and lpegmatch(splitter,str) or {}
+end
local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C(digit^1*lparent*(noparent+nestedparents)^1*rparent)+C((nestedbraces+(1-comma))^1)
local pattern_a=spaces*Ct(value*(separator*value)^0)
local function repeater(n,str)
@@ -6463,7 +7003,7 @@ function parsers.keq_to_hash(str)
end
local defaultspecification={ separator=",",quote='"' }
function parsers.csvsplitter(specification)
- specification=specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification
+ specification=specification and setmetatableindex(specification,defaultspecification) or defaultspecification
local separator=specification.separator
local quotechar=specification.quote
local separator=S(separator~="" and separator or ",")
@@ -6487,7 +7027,7 @@ function parsers.csvsplitter(specification)
end
end
function parsers.rfc4180splitter(specification)
- specification=specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification
+ specification=specification and setmetatableindex(specification,defaultspecification) or defaultspecification
local separator=specification.separator
local quotechar=P(specification.quote)
local dquotechar=quotechar*quotechar
@@ -6498,7 +7038,7 @@ function parsers.rfc4180splitter(specification)
local field=escaped+non_escaped+Cc("")
local record=Ct(field*(separator*field)^1)
local headerline=record*Cp()
- local wholeblob=Ct((newline^-1*record)^0)
+ local wholeblob=Ct((newline^(specification.strict and -1 or 1)*record)^0)
return function(data,getheader)
if getheader then
local header,position=lpegmatch(headerline,data)
@@ -6535,20 +7075,20 @@ function parsers.stepper(str,n,action)
lpegmatch(stepper,str,1,n,action or print)
end
end
-local pattern_math=Cs((P("%")/"\\percent "+P("^")*Cc("{")*lpegpatterns.integer*Cc("}")+P(1))^0)
-local pattern_text=Cs((P("%")/"\\percent "+(P("^")/"\\high")*Cc("{")*lpegpatterns.integer*Cc("}")+P(1))^0)
+local pattern_math=Cs((P("%")/"\\percent "+P("^")*Cc("{")*lpegpatterns.integer*Cc("}")+anything)^0)
+local pattern_text=Cs((P("%")/"\\percent "+(P("^")/"\\high")*Cc("{")*lpegpatterns.integer*Cc("}")+anything)^0)
patterns.unittotex=pattern
function parsers.unittotex(str,textmode)
return lpegmatch(textmode and pattern_text or pattern_math,str)
end
-local pattern=Cs((P("^")/"<sup>"*lpegpatterns.integer*Cc("</sup>")+P(1))^0)
+local pattern=Cs((P("^")/"<sup>"*lpegpatterns.integer*Cc("</sup>")+anything)^0)
function parsers.unittoxml(str)
return lpegmatch(pattern,str)
end
local cache={}
-local spaces=lpeg.patterns.space^0
+local spaces=lpegpatterns.space^0
local dummy=function() end
-table.setmetatableindex(cache,function(t,k)
+setmetatableindex(cache,function(t,k)
local separator=P(k)
local value=(1-separator)^0
local pattern=spaces*C(value)*separator^0*Cp()
@@ -6613,6 +7153,18 @@ function utilities.parsers.runtime(time)
local seconds=mod(time,60)
return days,hours,minutes,seconds
end
+local spacing=whitespace^0
+local apply=P("->")
+local method=C((1-apply)^1)
+local token=lbrace*C((1-rbrace)^1)*rbrace+C(anything^1)
+local pattern=spacing*(method*spacing*apply+Carg(1))*spacing*token
+function utilities.parsers.splitmethod(str,default)
+ if str then
+ return lpegmatch(pattern,str,1,default or false)
+ else
+ return default or false,""
+ end
+end
end -- of closure
@@ -6702,7 +7254,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["trac-set"] = package.loaded["trac-set"] or true
--- original size: 12365, stripped down to: 8799
+-- original size: 12482, stripped down to: 8864
if not modules then modules={} end modules ['trac-set']={
version=1.001,
@@ -6730,7 +7282,7 @@ function setters.initialize(filename,name,values)
local data=setter.data
if data then
for key,newvalue in next,values do
- local newvalue=is_boolean(newvalue,newvalue)
+ local newvalue=is_boolean(newvalue,newvalue,true)
local functions=data[key]
if functions then
local oldvalue=functions.value
@@ -6784,7 +7336,7 @@ local function set(t,what,newvalue)
elseif not value then
value=false
else
- value=is_boolean(value,value)
+ value=is_boolean(value,value,true)
end
w=topattern(w,true,true)
for name,functions in next,data do
@@ -6923,6 +7475,7 @@ function setters.new(name)
report=function(...) setters.report (setter,...) end,
enable=function(...) enable (setter,...) end,
disable=function(...) disable (setter,...) end,
+ reset=function(...) reset (setter,...) end,
register=function(...) register(setter,...) end,
list=function(...) list (setter,...) end,
show=function(...) show (setter,...) end,
@@ -7014,7 +7567,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["trac-log"] = package.loaded["trac-log"] or true
--- original size: 25391, stripped down to: 16561
+-- original size: 29359, stripped down to: 20483
if not modules then modules={} end modules ['trac-log']={
version=1.001,
@@ -7023,15 +7576,18 @@ if not modules then modules={} end modules ['trac-log']={
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
+local next,type,select,print=next,type,select,print
local write_nl,write=texio and texio.write_nl or print,texio and texio.write or io.write
local format,gmatch,find=string.format,string.gmatch,string.find
local concat,insert,remove=table.concat,table.insert,table.remove
local topattern=string.topattern
-local next,type,select=next,type,select
local utfchar=utf.char
+local datetime=os.date
+local openfile=io.open
local setmetatableindex=table.setmetatableindex
local formatters=string.formatters
local texgetcount=tex and tex.getcount
+local variant="default"
logs=logs or {}
local logs=logs
local moreinfo=[[
@@ -7041,32 +7597,122 @@ maillist : ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
webpage : http://www.pragma-ade.nl / http://tex.aanhet.net
wiki : http://contextgarden.net
]]
-utilities.strings.formatters.add (
+formatters.add (
formatters,"unichr",
[["U+" .. format("%%05X",%s) .. " (" .. utfchar(%s) .. ")"]]
)
-utilities.strings.formatters.add (
+formatters.add (
formatters,"chruni",
[[utfchar(%s) .. " (U+" .. format("%%05X",%s) .. ")"]]
)
local function ignore() end
setmetatableindex(logs,function(t,k) t[k]=ignore;return ignore end)
local report,subreport,status,settarget,setformats,settranslations
-local direct,subdirect,writer,pushtarget,poptarget,setlogfile,settimedlog,setprocessor,setformatters
+local direct,subdirect,writer,pushtarget,poptarget,setlogfile,settimedlog,setprocessor,setformatters,newline
if tex and (tex.jobname or tex.formatname) then
- local valueiskey={ __index=function(t,k) t[k]=k return k end }
- local target="term and log"
+ local function useluawrites()
+ local texio_write_nl=texio.write_nl
+ local texio_write=texio.write
+ local io_write=io.write
+ write_nl=function(target,...)
+ if not io_write then
+ io_write=io.write
+ end
+ if target=="term and log" then
+ texio_write_nl("log",...)
+ texio_write_nl("term","")
+ io_write(...)
+ elseif target=="log" then
+ texio_write_nl("log",...)
+ elseif target=="term" then
+ texio_write_nl("term","")
+ io_write(...)
+ elseif target~="none" then
+ texio_write_nl("log",target,...)
+ texio_write_nl("term","")
+ io_write(target,...)
+ end
+ end
+ write=function(target,...)
+ if not io_write then
+ io_write=io.write
+ end
+ if target=="term and log" then
+ texio_write("log",...)
+ io_write(...)
+ elseif target=="log" then
+ texio_write("log",...)
+ elseif target=="term" then
+ io_write(...)
+ elseif target~="none" then
+ texio_write("log",target,...)
+ io_write(target,...)
+ end
+ end
+ texio.write=write
+ texio.write_nl=write_nl
+ useluawrites=ignore
+ end
+ local whereto="both"
+ local target=nil
+ local targets=nil
+ local formats=table.setmetatableindex("self")
+ local translations=table.setmetatableindex("self")
+ local report_yes,subreport_yes,direct_yes,subdirect_yes,status_yes
+ local report_nop,subreport_nop,direct_nop,subdirect_nop,status_nop
+ local variants={
+ default={
+ formats={
+ report_yes=formatters["%-15s > %s\n"],
+ report_nop=formatters["%-15s >\n"],
+ direct_yes=formatters["%-15s > %s"],
+ direct_nop=formatters["%-15s >"],
+ subreport_yes=formatters["%-15s > %s > %s\n"],
+ subreport_nop=formatters["%-15s > %s >\n"],
+ subdirect_yes=formatters["%-15s > %s > %s"],
+ subdirect_nop=formatters["%-15s > %s >"],
+ status_yes=formatters["%-15s : %s\n"],
+ status_nop=formatters["%-15s :\n"],
+ },
+ targets={
+ logfile="log",
+ log="log",
+ file="log",
+ console="term",
+ terminal="term",
+ both="term and log",
+ },
+ },
+ ansi={
+ formats={
+ report_yes=formatters["%-15s > %s\n"],
+ report_nop=formatters["%-15s >\n"],
+ direct_yes=formatters["%-15s > %s"],
+ direct_nop=formatters["%-15s >"],
+ subreport_yes=formatters["%-15s > %s > %s\n"],
+ subreport_nop=formatters["%-15s > %s >\n"],
+ subdirect_yes=formatters["%-15s > %s > %s"],
+ subdirect_nop=formatters["%-15s > %s >"],
+ status_yes=formatters["%-15s : %s\n"],
+ status_nop=formatters["%-15s :\n"],
+ },
+ targets={
+ logfile="none",
+ log="none",
+ file="none",
+ console="term",
+ terminal="term",
+ both="term",
+ },
+ }
+ }
logs.flush=io.flush
- local formats={} setmetatable(formats,valueiskey)
- local translations={} setmetatable(translations,valueiskey)
writer=function(...)
write_nl(target,...)
end
newline=function()
write_nl(target,"\n")
end
- local report_yes=formatters["%-15s > %s\n"]
- local report_nop=formatters["%-15s >\n"]
report=function(a,b,c,...)
if c then
write_nl(target,report_yes(translations[a],formatters[formats[b]](c,...)))
@@ -7078,8 +7724,6 @@ if tex and (tex.jobname or tex.formatname) then
write_nl(target,"\n")
end
end
- local direct_yes=formatters["%-15s > %s"]
- local direct_nop=formatters["%-15s >"]
direct=function(a,b,c,...)
if c then
return direct_yes(translations[a],formatters[formats[b]](c,...))
@@ -7091,8 +7735,6 @@ if tex and (tex.jobname or tex.formatname) then
return ""
end
end
- local subreport_yes=formatters["%-15s > %s > %s\n"]
- local subreport_nop=formatters["%-15s > %s >\n"]
subreport=function(a,s,b,c,...)
if c then
write_nl(target,subreport_yes(translations[a],translations[s],formatters[formats[b]](c,...)))
@@ -7104,8 +7746,6 @@ if tex and (tex.jobname or tex.formatname) then
write_nl(target,"\n")
end
end
- local subdirect_yes=formatters["%-15s > %s > %s"]
- local subdirect_nop=formatters["%-15s > %s >"]
subdirect=function(a,s,b,c,...)
if c then
return subdirect_yes(translations[a],translations[s],formatters[formats[b]](c,...))
@@ -7117,8 +7757,6 @@ if tex and (tex.jobname or tex.formatname) then
return ""
end
end
- local status_yes=formatters["%-15s : %s\n"]
- local status_nop=formatters["%-15s :\n"]
status=function(a,b,c,...)
if c then
write_nl(target,status_yes(translations[a],formatters[formats[b]](c,...)))
@@ -7130,16 +7768,13 @@ if tex and (tex.jobname or tex.formatname) then
write_nl(target,"\n")
end
end
- local targets={
- logfile="log",
- log="log",
- file="log",
- console="term",
- terminal="term",
- both="term and log",
- }
- settarget=function(whereto)
- target=targets[whereto or "both"] or targets.both
+ settarget=function(askedwhereto)
+ whereto=askedwhereto or whereto or "both"
+ target=targets[whereto]
+ if not target then
+ whereto="both"
+ target=targets[whereto]
+ end
if target=="term" or target=="term and log" then
logs.flush=io.flush
else
@@ -7168,21 +7803,74 @@ if tex and (tex.jobname or tex.formatname) then
writeline(target,f(...))
end
end
- setformatters=function(f)
- report_yes=f.report_yes or report_yes
- report_nop=f.report_nop or report_nop
- subreport_yes=f.subreport_yes or subreport_yes
- subreport_nop=f.subreport_nop or subreport_nop
- direct_yes=f.direct_yes or direct_yes
- direct_nop=f.direct_nop or direct_nop
- subdirect_yes=f.subdirect_yes or subdirect_yes
- subdirect_nop=f.subdirect_nop or subdirect_nop
- status_yes=f.status_yes or status_yes
- status_nop=f.status_nop or status_nop
- end
+ setformatters=function(specification)
+ local t=nil
+ local f=nil
+ local d=variants.default
+ if not specification then
+ elseif type(specification)=="table" then
+ t=specification.targets
+ f=specification.formats or specification
+ else
+ local v=variants[specification]
+ if v then
+ t=v.targets
+ f=v.formats
+ variant=specification
+ end
+ end
+ targets=t or d.targets
+ target=targets[whereto] or target
+ if f then
+ d=d.formats
+ else
+ f=d.formats
+ d=f
+ end
+ setmetatableindex(f,d)
+ report_yes=f.report_yes
+ report_nop=f.report_nop
+ subreport_yes=f.subreport_yes
+ subreport_nop=f.subreport_nop
+ direct_yes=f.direct_yes
+ direct_nop=f.direct_nop
+ subdirect_yes=f.subdirect_yes
+ subdirect_nop=f.subdirect_nop
+ status_yes=f.status_yes
+ status_nop=f.status_nop
+ if variant=="ansi" then
+ useluawrites()
+ end
+ settarget(whereto)
+ end
+ setformatters(variant)
setlogfile=ignore
settimedlog=ignore
else
+ local report_yes,subreport_yes,status_yes
+ local report_nop,subreport_nop,status_nop
+ local variants={
+ default={
+ formats={
+ report_yes=formatters["%-15s | %s"],
+ report_nop=formatters["%-15s |"],
+ subreport_yes=formatters["%-15s | %s | %s"],
+ subreport_nop=formatters["%-15s | %s |"],
+ status_yes=formatters["%-15s : %s\n"],
+ status_nop=formatters["%-15s :\n"],
+ },
+ },
+ ansi={
+ formats={
+ report_yes=formatters["%-15s | %s"],
+ report_nop=formatters["%-15s |"],
+ subreport_yes=formatters["%-15s | %s | %s"],
+ subreport_nop=formatters["%-15s | %s |"],
+ status_yes=formatters["%-15s : %s\n"],
+ status_nop=formatters["%-15s :\n"],
+ },
+ },
+ }
logs.flush=ignore
writer=function(s)
write_nl(s)
@@ -7190,8 +7878,6 @@ else
newline=function()
write_nl("\n")
end
- local report_yes=formatters["%-15s | %s"]
- local report_nop=formatters["%-15s |"]
report=function(a,b,c,...)
if c then
write_nl(report_yes(a,formatters[b](c,...)))
@@ -7203,8 +7889,6 @@ else
write_nl("")
end
end
- local subreport_yes=formatters["%-15s | %s | %s"]
- local subreport_nop=formatters["%-15s | %s |"]
subreport=function(a,sub,b,c,...)
if c then
write_nl(subreport_yes(a,sub,formatters[b](c,...)))
@@ -7216,8 +7900,6 @@ else
write_nl("")
end
end
- local status_yes=formatters["%-15s : %s\n"]
- local status_nop=formatters["%-15s :\n"]
status=function(a,b,c,...)
if c then
write_nl(status_yes(a,formatters[b](c,...)))
@@ -7242,14 +7924,34 @@ else
writeline(f(s))
end
end
- setformatters=function(f)
- report_yes=f.report_yes or report_yes
- report_nop=f.report_nop or report_nop
- subreport_yes=f.subreport_yes or subreport_yes
- subreport_nop=f.subreport_nop or subreport_nop
- status_yes=f.status_yes or status_yes
- status_nop=f.status_nop or status_nop
- end
+ setformatters=function(specification)
+ local f=nil
+ local d=variants.default
+ if specification then
+ if type(specification)=="table" then
+ f=specification.formats or specification
+ else
+ local v=variants[specification]
+ if v then
+ f=v.formats
+ end
+ end
+ end
+ if f then
+ d=d.formats
+ else
+ f=d.formats
+ d=f
+ end
+ setmetatableindex(f,d)
+ report_yes=f.report_yes
+ report_nop=f.report_nop
+ subreport_yes=f.subreport_yes
+ subreport_nop=f.subreport_nop
+ status_yes=f.status_yes
+ status_nop=f.status_nop
+ end
+ setformatters(variant)
setlogfile=function(name,keepopen)
if name and name~="" then
local localtime=os.localtime
@@ -7368,9 +8070,10 @@ local function setblocked(category,value)
v.state=value
end
else
- states=utilities.parsers.settings_to_hash(category)
+ states=utilities.parsers.settings_to_hash(category,type(states)=="table" and states or nil)
for c,_ in next,states do
- if data[c] then
+ local v=data[c]
+ if v then
v.state=value
else
c=topattern(c,true,true)
@@ -7501,13 +8204,13 @@ end
local simple=logs.reporter("comment")
logs.simple=simple
logs.simpleline=simple
-function logs.setprogram () end
-function logs.extendbanner() end
-function logs.reportlines () end
-function logs.reportbanner() end
-function logs.reportline () end
-function logs.simplelines () end
-function logs.help () end
+logs.setprogram=ignore
+logs.extendbanner=ignore
+logs.reportlines=ignore
+logs.reportbanner=ignore
+logs.reportline=ignore
+logs.simplelines=ignore
+logs.help=ignore
local Carg,C,lpegmatch=lpeg.Carg,lpeg.C,lpeg.match
local p_newline=lpeg.patterns.newline
local linewise=(
@@ -7584,10 +8287,11 @@ function logs.application(t)
end
return t
end
-function logs.system(whereto,process,jobname,category,...)
- local message=formatters["%s %s => %s => %s => %s\r"](os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...))
+local f_syslog=formatters["%s %s => %s => %s => %s\r"]
+function logs.system(whereto,process,jobname,category,fmt,arg,...)
+ local message=f_syslog(datetime("%d/%m/%y %H:%m:%S"),process,jobname,category,arg==nil and fmt or format(fmt,arg,...))
for i=1,10 do
- local f=io.open(whereto,"a")
+ local f=openfile(whereto,"a")
if f then
f:write(message)
f:close()
@@ -7649,7 +8353,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["trac-inf"] = package.loaded["trac-inf"] or true
--- original size: 6501, stripped down to: 5156
+-- original size: 6704, stripped down to: 5343
if not modules then modules={} end modules ['trac-inf']={
version=1.001,
@@ -7659,7 +8363,7 @@ if not modules then modules={} end modules ['trac-inf']={
license="see context related readme files"
}
local type,tonumber,select=type,tonumber,select
-local format,lower=string.format,string.lower
+local format,lower,find=string.format,string.lower,string.find
local concat=table.concat
local clock=os.gettimeofday or os.clock
local setmetatableindex=table.setmetatableindex
@@ -7750,7 +8454,8 @@ function statistics.show()
if statistics.enable then
local register=statistics.register
register("used platform",function()
- return format("%s, type: %s, binary subtree: %s",os.platform or "unknown",os.type or "unknown",environment.texos or "unknown")
+ return format("%s, type: %s, binary subtree: %s",
+ os.platform or "unknown",os.type or "unknown",environment.texos or "unknown")
end)
register("luatex banner",function()
return lower(status.banner)
@@ -7763,14 +8468,23 @@ function statistics.show()
return format("%s direct, %s indirect, %s total",total-indirect,indirect,total)
end)
if jit then
- local status={ jit.status() }
- if status[1] then
- register("luajit status",function()
- return concat(status," ",2)
- end)
- end
- end
- register("current memory usage",statistics.memused)
+ local jitstatus={ jit.status() }
+ if jitstatus[1] then
+ register("luajit options",concat(jitstatus," ",2))
+ end
+ end
+ register("lua properties",function()
+ local list=status.list()
+ local hashchar=tonumber(list.luatex_hashchars)
+ local mask=lua.mask or "ascii"
+ return format("engine: %s, used memory: %s, hash type: %s, hash chars: min(%s,40), symbol mask: %s (%s)",
+ jit and "luajit" or "lua",
+ statistics.memused(),
+ list.luatex_hashtype or "default",
+ hashchar and 2^hashchar or "unknown",
+ mask,
+ mask=="utf" and "τεχ" or "tex")
+ end)
register("runtime",statistics.runtime)
logs.newline()
for i=1,#statusinfo do
@@ -7812,15 +8526,6 @@ function statistics.tracefunction(base,tag,...)
statistics.register(formatters["%s.%s"](tag,name),function() return serialize(stat,"calls") end)
end
end
-commands=commands or {}
-function commands.resettimer(name)
- resettiming(name or "whatever")
- starttiming(name or "whatever")
-end
-function commands.elapsedtime(name)
- stoptiming(name or "whatever")
- context(elapsedtime(name or "whatever"))
-end
end -- of closure
@@ -7829,7 +8534,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["trac-pro"] = package.loaded["trac-pro"] or true
--- original size: 5773, stripped down to: 3453
+-- original size: 5829, stripped down to: 3501
if not modules then modules={} end modules ['trac-pro']={
version=1.001,
@@ -7846,14 +8551,16 @@ local namespaces=namespaces
local registered={}
local function report_index(k,name)
if trace_namespaces then
- report_system("reference to %a in protected namespace %a: %s",k,name,debug.traceback())
+ report_system("reference to %a in protected namespace %a: %s",k,name)
+ debugger.showtraceback(report_system)
else
report_system("reference to %a in protected namespace %a",k,name)
end
end
local function report_newindex(k,name)
if trace_namespaces then
- report_system("assignment to %a in protected namespace %a: %s",k,name,debug.traceback())
+ report_system("assignment to %a in protected namespace %a: %s",k,name)
+ debugger.showtraceback(report_system)
else
report_system("assignment to %a in protected namespace %a",k,name)
end
@@ -8104,7 +8811,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-deb"] = package.loaded["util-deb"] or true
--- original size: 3708, stripped down to: 2568
+-- original size: 3898, stripped down to: 2644
if not modules then modules={} end modules ['util-deb']={
version=1.001,
@@ -8184,20 +8891,22 @@ end
function debugger.disable()
debug.sethook()
end
-function traceback()
- local level=1
+local function showtraceback(rep)
+ local level=2
+ local reporter=rep or report
while true do
- local info=debug.getinfo(level,"Sl")
+ local info=getinfo(level,"Sl")
if not info then
break
elseif info.what=="C" then
- print(format("%3i : C function",level))
+ reporter("%2i : %s",level-1,"C function")
else
- print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ reporter("%2i : %s : %s",level-1,info.short_src,info.currentline)
end
level=level+1
end
end
+debugger.showtraceback=showtraceback
end -- of closure
@@ -8383,7 +9092,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-tpl"] = package.loaded["util-tpl"] or true
--- original size: 6251, stripped down to: 3488
+-- original size: 7100, stripped down to: 3978
if not modules then modules={} end modules ['util-tpl']={
version=1.001,
@@ -8425,7 +9134,7 @@ local sqlescape=lpeg.replacer {
{ "\r\n","\\n" },
{ "\r","\\n" },
}
-local sqlquoted=lpeg.Cs(lpeg.Cc("'")*sqlescape*lpeg.Cc("'"))
+local sqlquoted=Cs(Cc("'")*sqlescape*Cc("'"))
lpegpatterns.sqlescape=sqlescape
lpegpatterns.sqlquoted=sqlquoted
local luaescape=lpegpatterns.luaescape
@@ -8448,12 +9157,24 @@ local quotedescapers={
local luaescaper=escapers.lua
local quotedluaescaper=quotedescapers.lua
local function replacekeyunquoted(s,t,how,recurse)
- local escaper=how and escapers[how] or luaescaper
- return escaper(replacekey(s,t,how,recurse))
+ if how==false then
+ return replacekey(s,t,how,recurse)
+ else
+ local escaper=how and escapers[how] or luaescaper
+ return escaper(replacekey(s,t,how,recurse))
+ end
end
local function replacekeyquoted(s,t,how,recurse)
- local escaper=how and quotedescapers[how] or quotedluaescaper
- return escaper(replacekey(s,t,how,recurse))
+ if how==false then
+ return replacekey(s,t,how,recurse)
+ else
+ local escaper=how and quotedescapers[how] or quotedluaescaper
+ return escaper(replacekey(s,t,how,recurse))
+ end
+end
+local function replaceoptional(l,m,r,t,how,recurse)
+ local v=t[l]
+ return v and v~="" and lpegmatch(replacer,r,1,t,how or "lua",recurse or false) or ""
end
local single=P("%")
local double=P("%%")
@@ -8468,11 +9189,16 @@ local nolquoted=lquoted/''
local norquoted=rquoted/''
local nolquotedq=lquotedq/''
local norquotedq=rquotedq/''
-local key=nosingle*((C((1-nosingle )^1)*Carg(1)*Carg(2)*Carg(3))/replacekey )*nosingle
-local quoted=nolquotedq*((C((1-norquotedq)^1)*Carg(1)*Carg(2)*Carg(3))/replacekeyquoted )*norquotedq
-local unquoted=nolquoted*((C((1-norquoted )^1)*Carg(1)*Carg(2)*Carg(3))/replacekeyunquoted)*norquoted
+local noloptional=P("%?")/''
+local noroptional=P("?%")/''
+local nomoptional=P(":")/''
+local args=Carg(1)*Carg(2)*Carg(3)
+local key=nosingle*((C((1-nosingle )^1)*args)/replacekey )*nosingle
+local quoted=nolquotedq*((C((1-norquotedq )^1)*args)/replacekeyquoted )*norquotedq
+local unquoted=nolquoted*((C((1-norquoted )^1)*args)/replacekeyunquoted)*norquoted
+local optional=noloptional*((C((1-nomoptional)^1)*nomoptional*C((1-noroptional)^1)*args)/replaceoptional)*noroptional
local any=P(1)
- replacer=Cs((unquoted+quoted+escape+key+any)^0)
+ replacer=Cs((unquoted+quoted+escape+optional+key+any)^0)
local function replace(str,mapping,how,recurse)
if mapping and str then
return lpegmatch(replacer,str,1,mapping,how or "lua",recurse or false) or str
@@ -8511,7 +9237,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-env"] = package.loaded["util-env"] or true
--- original size: 8807, stripped down to: 5085
+-- original size: 8022, stripped down to: 5038
if not modules then modules={} end modules ['util-env']={
version=1.001,
@@ -8522,7 +9248,7 @@ if not modules then modules={} end modules ['util-env']={
}
local allocate,mark=utilities.storage.allocate,utilities.storage.mark
local format,sub,match,gsub,find=string.format,string.sub,string.match,string.gsub,string.find
-local unquoted,quoted=string.unquoted,string.quoted
+local unquoted,quoted,optionalquoted=string.unquoted,string.quoted,string.optionalquoted
local concat,insert,remove=table.concat,table.insert,table.remove
environment=environment or {}
local environment=environment
@@ -8635,24 +9361,14 @@ function environment.splitarguments(separator)
return before,after
end
function environment.reconstructcommandline(arg,noquote)
+ local resolveprefix=resolvers.resolve
arg=arg or environment.originalarguments
if noquote and #arg==1 then
- local a=arg[1]
- a=resolvers.resolve(a)
- a=unquoted(a)
- return a
+ return unquoted(resolveprefix and resolveprefix(arg[1]) or arg[1])
elseif #arg>0 then
local result={}
for i=1,#arg do
- local a=arg[i]
- a=resolvers.resolve(a)
- a=unquoted(a)
- a=gsub(a,'"','\\"')
- if find(a," ") then
- result[#result+1]=quoted(a)
- else
- result[#result+1]=a
- end
+ result[i]=optionalquoted(resolveprefix and resolveprefix(arg[i]) or resolveprefix)
end
return concat(result," ")
else
@@ -8708,7 +9424,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["luat-env"] = package.loaded["luat-env"] or true
--- original size: 5930, stripped down to: 4235
+-- original size: 6174, stripped down to: 4141
if not modules then modules={} end modules ['luat-env']={
version=1.001,
@@ -8786,15 +9502,13 @@ function environment.luafilechunk(filename,silent)
filename=file.replacesuffix(filename,"lua")
local fullname=environment.luafile(filename)
if fullname and fullname~="" then
- local data=luautilities.loadedluacode(fullname,strippable,filename)
- if trace_locating then
+ local data=luautilities.loadedluacode(fullname,strippable,filename)
+ if not silent then
report_lua("loading file %a %s",fullname,not data and "failed" or "succeeded")
- elseif not silent then
- texio.write("<",data and "+ " or "- ",fullname,">")
end
return data
else
- if trace_locating then
+ if not silent then
report_lua("unknown file %a",filename)
end
return nil
@@ -8863,7 +9577,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-tab"] = package.loaded["lxml-tab"] or true
--- original size: 42447, stripped down to: 26589
+-- original size: 45683, stripped down to: 27866
if not modules then modules={} end modules ['lxml-tab']={
version=1.001,
@@ -8878,10 +9592,10 @@ if lpeg.setmaxstack then lpeg.setmaxstack(1000) end
xml=xml or {}
local xml=xml
local concat,remove,insert=table.concat,table.remove,table.insert
-local type,next,setmetatable,getmetatable,tonumber=type,next,setmetatable,getmetatable,tonumber
+local type,next,setmetatable,getmetatable,tonumber,rawset=type,next,setmetatable,getmetatable,tonumber,rawset
local lower,find,match,gsub=string.lower,string.find,string.match,string.gsub
local utfchar=utf.char
-local lpegmatch=lpeg.match
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
local P,S,R,C,V,C,Cs=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.C,lpeg.Cs
local formatters=string.formatters
xml.xmlns=xml.xmlns or {}
@@ -8976,8 +9690,10 @@ local function add_end(spacing,namespace,tag)
top=stack[#stack]
if #stack<1 then
errorstr=formatters["unable to close %s %s"](tag,xml.checkerror(top,toclose) or "")
+ report_xml(errorstr)
elseif toclose.tg~=tag then
errorstr=formatters["unable to close %s with %s %s"](toclose.tg,tag,xml.checkerror(top,toclose) or "")
+ report_xml(errorstr)
end
dt=top.dt
dt[#dt+1]=toclose
@@ -8986,10 +9702,29 @@ local function add_end(spacing,namespace,tag)
end
end
local function add_text(text)
+ local n=#dt
if cleanup and #text>0 then
- dt[#dt+1]=cleanup(text)
+ if n>0 then
+ local s=dt[n]
+ if type(s)=="string" then
+ dt[n]=s..cleanup(text)
+ else
+ dt[n+1]=cleanup(text)
+ end
+ else
+ dt[1]=cleanup(text)
+ end
else
- dt[#dt+1]=text
+ if n>0 then
+ local s=dt[n]
+ if type(s)=="string" then
+ dt[n]=s..text
+ else
+ dt[n+1]=text
+ end
+ else
+ dt[1]=text
+ end
end
end
local function add_special(what,spacing,text)
@@ -9021,8 +9756,10 @@ local function attribute_specification_error(str)
end
return str
end
+local badentity="&error;"
+local badentity="&"
xml.placeholders={
- unknown_dec_entity=function(str) return str=="" and "&error;" or formatters["&%s;"](str) end,
+ unknown_dec_entity=function(str) return str=="" and badentity or formatters["&%s;"](str) end,
unknown_hex_entity=function(str) return formatters["&#x%s;"](str) end,
unknown_any_entity=function(str) return formatters["&#x%s;"](str) end,
}
@@ -9043,9 +9780,10 @@ local function fromdec(s)
return formatters["d:%s"](s),true
end
end
-local rest=(1-P(";"))^0
-local many=P(1)^0
-local parsedentity=P("&")*(P("#x")*(rest/fromhex)+P("#")*(rest/fromdec))*P(";")*P(-1)+(P("#x")*(many/fromhex)+P("#")*(many/fromdec))
+local p_rest=(1-P(";"))^0
+local p_many=P(1)^0
+local p_char=lpegpatterns.utf8character
+local parsedentity=P("&")*(P("#x")*(p_rest/fromhex)+P("#")*(p_rest/fromdec))*P(";")*P(-1)+(P("#x")*(p_many/fromhex)+P("#")*(p_many/fromdec))
local predefined_unified={
[38]="&amp;",
[42]="&quot;",
@@ -9071,7 +9809,9 @@ local privates_u={
local privates_p={}
local privates_n={
}
-local escaped=utf.remapper(privates_u)
+local escaped=utf.remapper(privates_u,"dynamic")
+local unprivatized=utf.remapper(privates_p,"dynamic")
+xml.unprivatized=unprivatized
local function unescaped(s)
local p=privates_n[s]
if not p then
@@ -9084,9 +9824,7 @@ local function unescaped(s)
end
return p
end
-local unprivatized=utf.remapper(privates_p)
xml.privatetoken=unescaped
-xml.unprivatized=unprivatized
xml.privatecodes=privates_n
local function handle_hex_entity(str)
local h=hcache[str]
@@ -9181,7 +9919,7 @@ local function handle_any_entity(str)
report_xml("keeping entity &%s;",str)
end
if str=="" then
- a="&error;"
+ a=badentity
else
a="&"..str..";"
end
@@ -9209,7 +9947,7 @@ local function handle_any_entity(str)
if trace_entities then
report_xml("invalid entity &%s;",str)
end
- a="&error;"
+ a=badentity
acache[str]=a
else
if trace_entities then
@@ -9222,8 +9960,14 @@ local function handle_any_entity(str)
return a
end
end
-local function handle_end_entity(chr)
- report_xml("error in entity, %a found instead of %a",chr,";")
+local function handle_end_entity(str)
+ report_xml("error in entity, %a found without ending %a",str,";")
+ return str
+end
+local function handle_crap_error(chr)
+ report_xml("error in parsing, unexpected %a found ",chr)
+ add_text(chr)
+ return chr
end
local space=S(' \r\n\t')
local open=P('<')
@@ -9239,15 +9983,15 @@ local valid=R('az','AZ','09')+S('_-.')
local name_yes=C(valid^1)*colon*C(valid^1)
local name_nop=C(P(true))*C(valid^1)
local name=name_yes+name_nop
-local utfbom=lpeg.patterns.utfbom
+local utfbom=lpegpatterns.utfbom
local spacing=C(space^0)
-local anyentitycontent=(1-open-semicolon-space-close)^0
+local anyentitycontent=(1-open-semicolon-space-close-ampersand)^0
local hexentitycontent=R("AF","af","09")^0
local decentitycontent=R("09")^0
local parsedentity=P("#")/""*(
P("x")/""*(hexentitycontent/handle_hex_entity)+(decentitycontent/handle_dec_entity)
)+(anyentitycontent/handle_any_entity)
-local entity=ampersand/""*parsedentity*((semicolon/"")+#(P(1)/handle_end_entity))
+local entity=(ampersand/"")*parsedentity*(semicolon/"")+ampersand*(anyentitycontent/handle_end_entity)
local text_unparsed=C((1-open)^1)
local text_parsed=Cs(((1-open-ampersand)^1+entity)^1)
local somespace=space^1
@@ -9298,16 +10042,20 @@ local instruction=(spacing*begininstruction*someinstruction*endinstruction)/func
local comment=(spacing*begincomment*somecomment*endcomment )/function(...) add_special("@cm@",...) end
local cdata=(spacing*begincdata*somecdata*endcdata )/function(...) add_special("@cd@",...) end
local doctype=(spacing*begindoctype*somedoctype*enddoctype )/function(...) add_special("@dt@",...) end
+local crap_parsed=1-beginelement-endelement-emptyelement-begininstruction-begincomment-begincdata-ampersand
+local crap_unparsed=1-beginelement-endelement-emptyelement-begininstruction-begincomment-begincdata
+local parsedcrap=Cs((crap_parsed^1+entity)^1)/handle_crap_error
+local unparsedcrap=Cs((crap_unparsed )^1)/handle_crap_error
local trailer=space^0*(text_unparsed/set_message)^0
local grammar_parsed_text=P { "preamble",
preamble=utfbom^0*instruction^0*(doctype+comment+instruction)^0*V("parent")*trailer,
parent=beginelement*V("children")^0*endelement,
- children=parsedtext+V("parent")+emptyelement+comment+cdata+instruction,
+ children=parsedtext+V("parent")+emptyelement+comment+cdata+instruction+parsedcrap,
}
local grammar_unparsed_text=P { "preamble",
preamble=utfbom^0*instruction^0*(doctype+comment+instruction)^0*V("parent")*trailer,
parent=beginelement*V("children")^0*endelement,
- children=unparsedtext+V("parent")+emptyelement+comment+cdata+instruction,
+ children=unparsedtext+V("parent")+emptyelement+comment+cdata+instruction+unparsedcrap,
}
local function _xmlconvert_(data,settings)
settings=settings or {}
@@ -9341,7 +10089,6 @@ local function _xmlconvert_(data,settings)
errorstr="empty xml file"
elseif utfize or resolve then
if lpegmatch(grammar_parsed_text,data) then
- errorstr=""
else
errorstr="invalid xml file - parsed text"
end
@@ -9357,6 +10104,8 @@ local function _xmlconvert_(data,settings)
local result
if errorstr and errorstr~="" then
result={ dt={ { ns="",tg="error",dt={ errorstr },at={},er=true } } }
+setmetatable(result,mt)
+setmetatable(result.dt[1],mt)
setmetatable(stack,mt)
local errorhandler=settings.error_handler
if errorhandler==false then
@@ -9389,8 +10138,11 @@ local function _xmlconvert_(data,settings)
end
if errorstr and errorstr~="" then
result.error=true
+ else
+ errorstr=nil
end
result.statistics={
+ errormessage=errorstr,
entities={
decimals=dcache,
hexadecimals=hcache,
@@ -9404,7 +10156,7 @@ local function _xmlconvert_(data,settings)
reported_attribute_errors,mt,errorhandler=nil,nil,nil
return result
end
-function xmlconvert(data,settings)
+local function xmlconvert(data,settings)
local ok,result=pcall(function() return _xmlconvert_(data,settings) end)
if ok then
return result
@@ -9496,14 +10248,17 @@ function xml.checkbom(root)
insert(dt,2,"\n" )
end
end
-local function verbose_element(e,handlers)
+local f_attribute=formatters['%s=%q']
+local function verbose_element(e,handlers,escape)
local handle=handlers.handle
local serialize=handlers.serialize
local ens,etg,eat,edt,ern=e.ns,e.tg,e.at,e.dt,e.rn
local ats=eat and next(eat) and {}
if ats then
+ local n=0
for k,v in next,eat do
- ats[#ats+1]=formatters['%s=%q'](k,escaped(v))
+ n=n+1
+ ats[n]=f_attribute(k,escaped(v))
end
end
if ern and trace_entities and ern~=ens then
@@ -9588,23 +10343,25 @@ local function verbose_document(e,handlers)
end
end
local function serialize(e,handlers,...)
- local initialize=handlers.initialize
- local finalize=handlers.finalize
- local functions=handlers.functions
- if initialize then
- local state=initialize(...)
- if not state==true then
- return state
+ if e then
+ local initialize=handlers.initialize
+ local finalize=handlers.finalize
+ local functions=handlers.functions
+ if initialize then
+ local state=initialize(...)
+ if not state==true then
+ return state
+ end
+ end
+ local etg=e.tg
+ if etg then
+ (functions[etg] or functions["@el@"])(e,handlers)
+ else
+ functions["@dc@"](e,handlers)
+ end
+ if finalize then
+ return finalize()
end
- end
- local etg=e.tg
- if etg then
- (functions[etg] or functions["@el@"])(e,handlers)
- else
- functions["@dc@"](e,handlers)
- end
- if finalize then
- return finalize()
end
end
local function xserialize(e,handlers)
@@ -9845,7 +10602,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-lpt"] = package.loaded["lxml-lpt"] or true
--- original size: 48956, stripped down to: 30516
+-- original size: 48229, stripped down to: 30684
if not modules then modules={} end modules ['lxml-lpt']={
version=1.001,
@@ -10230,8 +10987,8 @@ local lp_builtin=P (
P("ns")/"ll.ns"
)*((spaces*P("(")*spaces*P(")"))/"")
local lp_attribute=(P("@")+P("attribute::"))/""*Cc("(ll.at and ll.at['")*((R("az","AZ")+S("-_:"))^1)*Cc("'])")
-lp_fastpos_p=P("+")^0*R("09")^1*P(-1)/"l==%0"
-lp_fastpos_n=P("-")*R("09")^1*P(-1)/"(%0<0 and (#list+%0==l))"
+local lp_fastpos_p=P("+")^0*R("09")^1*P(-1)/"l==%0"
+local lp_fastpos_n=P("-")*R("09")^1*P(-1)/"(%0<0 and (#list+%0==l))"
local lp_fastpos=lp_fastpos_n+lp_fastpos_p
local lp_reserved=C("and")+C("or")+C("not")+C("div")+C("mod")+C("true")+C("false")
local lp_lua_function=Cs((R("az","AZ","__")^1*(P(".")*R("az","AZ","__")^1)^1)*("("))/"%0"
@@ -10410,7 +11167,7 @@ local function nodesettostring(set,nodetest)
if not ns or ns=="" then ns="*" end
if not tg or tg=="" then tg="*" end
tg=(tg=="@rt@" and "[root]") or format("%s:%s",ns,tg)
- t[i]=(directive and tg) or format("not(%s)",tg)
+ t[#t+1]=(directive and tg) or format("not(%s)",tg)
end
if nodetest==false then
return format("not(%s)",concat(t,"|"))
@@ -10676,7 +11433,6 @@ expressions.print=function(...)
print(...)
return true
end
-expressions.contains=find
expressions.find=find
expressions.upper=upper
expressions.lower=lower
@@ -10698,6 +11454,9 @@ function expressions.contains(str,pattern)
end
return false
end
+function xml.expressions.idstring(str)
+ return type(str)=="string" and gsub(str,"^#","") or ""
+end
local function traverse(root,pattern,handle)
local collected=applylpath(root,pattern)
if collected then
@@ -10826,8 +11585,13 @@ function xml.elements(root,pattern,reverse)
local collected=applylpath(root,pattern)
if not collected then
return dummy
- elseif reverse then
- local c=#collected+1
+ end
+ local n=#collected
+ if n==0 then
+ return dummy
+ end
+ if reverse then
+ local c=n+1
return function()
if c>1 then
c=c-1
@@ -10837,7 +11601,7 @@ function xml.elements(root,pattern,reverse)
end
end
else
- local n,c=#collected,0
+ local c=0
return function()
if c<n then
c=c+1
@@ -10852,8 +11616,13 @@ function xml.collected(root,pattern,reverse)
local collected=applylpath(root,pattern)
if not collected then
return dummy
- elseif reverse then
- local c=#collected+1
+ end
+ local n=#collected
+ if n==0 then
+ return dummy
+ end
+ if reverse then
+ local c=n+1
return function()
if c>1 then
c=c-1
@@ -10861,7 +11630,7 @@ function xml.collected(root,pattern,reverse)
end
end
else
- local n,c=#collected,0
+ local c=0
return function()
if c<n then
c=c+1
@@ -10876,7 +11645,7 @@ function xml.inspect(collection,pattern)
report_lpath("pattern: %s\n\n%s\n",pattern,xml.tostring(e))
end
end
-local function split(e)
+local function split(e)
local dt=e.dt
if dt then
for i=1,#dt do
@@ -10975,7 +11744,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-aux"] = package.loaded["lxml-aux"] or true
--- original size: 23804, stripped down to: 16817
+-- original size: 28786, stripped down to: 20578
if not modules then modules={} end modules ['lxml-aux']={
version=1.001,
@@ -10985,16 +11754,19 @@ if not modules then modules={} end modules ['lxml-aux']={
license="see context related readme files"
}
local trace_manipulations=false trackers.register("lxml.manipulations",function(v) trace_manipulations=v end)
+local trace_inclusions=false trackers.register("lxml.inclusions",function(v) trace_inclusions=v end)
local report_xml=logs.reporter("xml")
local xml=xml
-local xmlconvert,xmlcopy,xmlname=xml.convert,xml.copy,xml.name
+local xmlcopy,xmlname=xml.copy,xml.name
local xmlinheritedconvert=xml.inheritedconvert
local xmlapplylpath=xml.applylpath
local xmlfilter=xml.filter
-local type,setmetatable,getmetatable=type,setmetatable,getmetatable
+local type,next,setmetatable,getmetatable=type,next,setmetatable,getmetatable
local insert,remove,fastcopy,concat=table.insert,table.remove,table.fastcopy,table.concat
local gmatch,gsub,format,find,strip=string.gmatch,string.gsub,string.format,string.find,string.strip
local utfbyte=utf.byte
+local lpegmatch=lpeg.match
+local striplinepatterns=utilities.strings.striplinepatterns
local function report(what,pattern,c,e)
report_xml("%s element %a, root %a, position %a, index %a, pattern %a",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
end
@@ -11049,13 +11821,15 @@ end
function xml.each(root,pattern,handle,reverse)
local collected=xmlapplylpath(root,pattern)
if collected then
- if reverse then
- for c=#collected,1,-1 do
- handle(collected[c])
- end
- else
- for c=1,#collected do
- handle(collected[c])
+ if handle then
+ if reverse then
+ for c=#collected,1,-1 do
+ handle(collected[c])
+ end
+ else
+ for c=1,#collected do
+ handle(collected[c])
+ end
end
end
return collected
@@ -11111,6 +11885,7 @@ local function redo_ni(d)
end
end
end
+xml.reindex=redo_ni
local function xmltoelement(whatever,root)
if not whatever then
return nil
@@ -11162,8 +11937,16 @@ function xml.delete(root,pattern)
report('deleting',pattern,c,e)
end
local d=p.dt
- remove(d,e.ni)
- redo_ni(d)
+ local ni=e.ni
+ if ni<=#d then
+ if false then
+ p.dt[ni]=""
+ else
+ remove(d,ni)
+ redo_ni(d)
+ end
+ else
+ end
end
end
end
@@ -11283,28 +12066,40 @@ xml.insertafter=insert_element
xml.insertbefore=function(r,p,e) insert_element(r,p,e,true) end
xml.injectafter=inject_element
xml.injectbefore=function(r,p,e) inject_element(r,p,e,true) end
-local function include(xmldata,pattern,attribute,recursive,loaddata)
+local function include(xmldata,pattern,attribute,recursive,loaddata,level)
pattern=pattern or 'include'
loaddata=loaddata or io.loaddata
local collected=xmlapplylpath(xmldata,pattern)
if collected then
+ if not level then
+ level=1
+ end
for c=1,#collected do
local ek=collected[c]
local name=nil
local ekdt=ek.dt
local ekat=ek.at
- local epdt=ek.__p__.dt
+ local ekrt=ek.__p__
+ local epdt=ekrt.dt
if not attribute or attribute=="" then
name=(type(ekdt)=="table" and ekdt[1]) or ekdt
end
if not name then
for a in gmatch(attribute or "href","([^|]+)") do
name=ekat[a]
- if name then break end
+ if name then
+ break
+ end
+ end
+ end
+ local data=nil
+ if name and name~="" then
+ data=loaddata(name) or ""
+ if trace_inclusions then
+ report_xml("including %s bytes from %a at level %s by pattern %a and attribute %a (%srecursing)",#data,name,level,pattern,attribute or "",recursive and "" or "not ")
end
end
- local data=(name and name~="" and loaddata(name)) or ""
- if data=="" then
+ if not data or data=="" then
epdt[ek.ni]=""
elseif ekat["parse"]=="text" then
epdt[ek.ni]=xml.escaped(data)
@@ -11314,70 +12109,127 @@ local function include(xmldata,pattern,attribute,recursive,loaddata)
epdt[ek.ni]=""
else
if recursive then
- include(xi,pattern,attribute,recursive,loaddata)
+ include(xi,pattern,attribute,recursive,loaddata,level+1)
+ end
+ local child=xml.body(xi)
+ child.__p__=ekrt
+ child.__f__=name
+ epdt[ek.ni]=child
+ local inclusions=xmldata.settings.inclusions
+ if inclusions then
+ inclusions[#inclusions+1]=name
+ else
+ xmldata.settings.inclusions={ name }
+ end
+ if child.er then
+ local badinclusions=xmldata.settings.badinclusions
+ if badinclusions then
+ badinclusions[#badinclusions+1]=name
+ else
+ xmldata.settings.badinclusions={ name }
+ end
end
- epdt[ek.ni]=xml.body(xi)
end
end
end
end
end
xml.include=include
+function xml.inclusion(e,default)
+ while e do
+ local f=e.__f__
+ if f then
+ return f
+ else
+ e=e.__p__
+ end
+ end
+ return default
+end
+local function getinclusions(key,e,sorted)
+ while e do
+ local settings=e.settings
+ if settings then
+ local inclusions=settings[key]
+ if inclusions then
+ inclusions=table.unique(inclusions)
+ if sorted then
+ table.sort(inclusions)
+ end
+ return inclusions
+ else
+ e=e.__p__
+ end
+ else
+ e=e.__p__
+ end
+ end
+end
+function xml.inclusions(e,sorted)
+ return getinclusions("inclusions",e,sorted)
+end
+function xml.badinclusions(e,sorted)
+ return getinclusions("badinclusions",e,sorted)
+end
+local b_collapser=lpeg.patterns.b_collapser
+local m_collapser=lpeg.patterns.m_collapser
+local e_collapser=lpeg.patterns.e_collapser
+local b_stripper=lpeg.patterns.b_stripper
+local m_stripper=lpeg.patterns.m_stripper
+local e_stripper=lpeg.patterns.e_stripper
+local lpegmatch=lpeg.match
local function stripelement(e,nolines,anywhere)
local edt=e.dt
if edt then
- if anywhere then
- local t,n={},0
- for e=1,#edt do
+ local n=#edt
+ if n==0 then
+ return e
+ elseif anywhere then
+ local t={}
+ local m=0
+ for e=1,n do
local str=edt[e]
if type(str)~="string" then
- n=n+1
- t[n]=str
+ m=m+1
+ t[m]=str
elseif str~="" then
if nolines then
- str=gsub(str,"%s+"," ")
+ str=lpegmatch((n==1 and b_collapser) or (n==m and e_collapser) or m_collapser,str)
+ else
+ str=lpegmatch((n==1 and b_stripper) or (n==m and e_stripper) or m_stripper,str)
end
- str=gsub(str,"^%s*(.-)%s*$","%1")
if str~="" then
- n=n+1
- t[n]=str
+ m=m+1
+ t[m]=str
end
end
end
e.dt=t
else
- if #edt>0 then
- local str=edt[1]
- if type(str)~="string" then
- elseif str=="" then
+ local str=edt[1]
+ if type(str)=="string" then
+ if str~="" then
+ str=lpegmatch(nolines and b_collapser or b_stripper,str)
+ end
+ if str=="" then
remove(edt,1)
+ n=n-1
else
- if nolines then
- str=gsub(str,"%s+"," ")
- end
- str=gsub(str,"^%s+","")
- if str=="" then
- remove(edt,1)
- else
- edt[1]=str
- end
+ edt[1]=str
end
end
- local nedt=#edt
- if nedt>0 then
- local str=edt[nedt]
- if type(str)~="string" then
- elseif str=="" then
- remove(edt)
- else
- if nolines then
- str=gsub(str,"%s+"," ")
- end
- str=gsub(str,"%s+$","")
+ if n>0 then
+ str=edt[n]
+ if type(str)=="string" then
if str=="" then
remove(edt)
else
- edt[nedt]=str
+ str=lpegmatch(nolines and e_collapser or e_stripper,str)
+ if str=="" then
+ remove(edt)
+ else
+ edt[n]=str
+ end
end
end
end
@@ -11563,8 +12415,8 @@ function xml.finalizers.xml.cdata(collected)
end
return ""
end
-function xml.insertcomment(e,str,n)
- table.insert(e.dt,n or 1,{
+function xml.insertcomment(e,str,n)
+ insert(e.dt,n or 1,{
tg="@cm@",
ns="",
special=true,
@@ -11572,7 +12424,25 @@ function xml.insertcomment(e,str,n)
dt={ str },
})
end
-function xml.setcdata(e,str)
+function xml.insertcdata(e,str,n)
+ insert(e.dt,n or 1,{
+ tg="@cd@",
+ ns="",
+ special=true,
+ at={},
+ dt={ str },
+ })
+end
+function xml.setcomment(e,str,n)
+ e.dt={ {
+ tg="@cm@",
+ ns="",
+ special=true,
+ at={},
+ dt={ str },
+ } }
+end
+function xml.setcdata(e,str)
e.dt={ {
tg="@cd@",
ns="",
@@ -11642,7 +12512,7 @@ local function recurse(e,action)
for i=1,#edt do
local str=edt[i]
if type(str)~="string" then
- recurse(str,action,recursive)
+ recurse(str,action)
elseif str~="" then
edt[i]=action(str)
end
@@ -11660,6 +12530,65 @@ function helpers.recursetext(collected,action,recursive)
end
end
end
+local specials={
+ ["@rt@"]="root",
+ ["@pi@"]="instruction",
+ ["@cm@"]="comment",
+ ["@dt@"]="declaration",
+ ["@cd@"]="cdata",
+}
+local function convert(x,strip,flat)
+ local ns=x.ns
+ local tg=x.tg
+ local at=x.at
+ local dt=x.dt
+ local node=flat and {
+ [0]=(not x.special and (ns~="" and ns..":"..tg or tg)) or nil,
+ } or {
+ _namespace=ns~="" and ns or nil,
+ _tag=not x.special and tg or nil,
+ _type=specials[tg] or "_element",
+ }
+ if at then
+ for k,v in next,at do
+ node[k]=v
+ end
+ end
+ local n=0
+ for i=1,#dt do
+ local di=dt[i]
+ if type(di)=="table" then
+ if flat and di.special then
+ else
+ di=convert(di,strip,flat)
+ if di then
+ n=n+1
+ node[n]=di
+ end
+ end
+ elseif strip then
+ di=lpegmatch(strip,di)
+ if di~="" then
+ n=n+1
+ node[n]=di
+ end
+ else
+ n=n+1
+ node[n]=di
+ end
+ end
+ if next(node) then
+ return node
+ end
+end
+function xml.totable(x,strip,flat)
+ if type(x)=="table" then
+ if strip then
+ strip=striplinepatterns[strip]
+ end
+ return convert(x,strip,flat)
+ end
+end
end -- of closure
@@ -12216,7 +13145,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-ini"] = package.loaded["data-ini"] or true
--- original size: 7898, stripped down to: 5501
+-- original size: 11085, stripped down to: 7662
if not modules then modules={} end modules ['data-ini']={
version=1.001,
@@ -12225,14 +13154,15 @@ if not modules then modules={} end modules ['data-ini']={
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files",
}
+local next,type,getmetatable,rawset=next,type,getmetatable,rawset
local gsub,find,gmatch,char=string.gsub,string.find,string.gmatch,string.char
-local next,type=next,type
local filedirname,filebasename,filejoin=file.dirname,file.basename,file.join
+local ostype,osname,osuname,ossetenv,osgetenv=os.type,os.name,os.uname,os.setenv,os.getenv
+local P,S,R,C,Cs,Cc,lpegmatch=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.match
local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
local trace_detail=false trackers.register("resolvers.details",function(v) trace_detail=v end)
local trace_expansions=false trackers.register("resolvers.expansions",function(v) trace_expansions=v end)
local report_initialization=logs.reporter("resolvers","initialization")
-local ostype,osname,ossetenv,osgetenv=os.type,os.name,os.setenv,os.getenv
resolvers=resolvers or {}
local resolvers=resolvers
texconfig.kpse_init=false
@@ -12360,15 +13290,108 @@ if not texroot or texroot=="" then
ossetenv('TEXROOT',texroot)
end
environment.texroot=file.collapsepath(texroot)
-if profiler then
+if type(profiler)=="table" and not jit then
directives.register("system.profile",function()
profiler.start("luatex-profile.log")
end)
end
-if not resolvers.resolve then
- function resolvers.resolve (s) return s end
- function resolvers.unresolve(s) return s end
- function resolvers.repath (s) return s end
+local prefixes=utilities.storage.allocate()
+resolvers.prefixes=prefixes
+local resolved={}
+local abstract={}
+local dynamic={}
+function resolvers.resetresolve(str)
+ resolved,abstract={},{}
+end
+function resolvers.allprefixes(separator)
+ local all=table.sortedkeys(prefixes)
+ if separator then
+ for i=1,#all do
+ all[i]=all[i]..":"
+ end
+ end
+ return all
+end
+local function _resolve_(method,target)
+ local action=prefixes[method]
+ if action then
+ return action(target)
+ else
+ return method..":"..target
+ end
+end
+function resolvers.unresolve(str)
+ return abstract[str] or str
+end
+function resolvers.setdynamic(str)
+ dynamic[str]=true
+end
+local pattern=Cs((C(R("az")^2)*P(":")*C((1-S(" \"\';,"))^1)/_resolve_+P(1))^0)
+local prefix=C(R("az")^2)*P(":")
+local target=C((1-S(" \"\';,"))^1)
+local notarget=(#S(";,")+P(-1))*Cc("")
+local p_resolve=Cs(((prefix*(target+notarget))/_resolve_+P(1))^0)
+local p_simple=prefix*P(-1)
+local function resolve(str)
+ if type(str)=="table" then
+ local res={}
+ for i=1,#str do
+ res[i]=resolve(str[i])
+ end
+ return res
+ end
+ local res=resolved[str]
+ if res then
+ return res
+ end
+ local simple=lpegmatch(p_simple,str)
+ local action=prefixes[simple]
+ if action then
+ local res=action(res)
+ if not dynamic[simple] then
+ resolved[simple]=res
+ abstract[res]=simple
+ end
+ return res
+ end
+ res=lpegmatch(p_resolve,str)
+ resolved[str]=res
+ abstract[res]=str
+ return res
+end
+resolvers.resolve=resolve
+if type(osuname)=="function" then
+ for k,v in next,osuname() do
+ if not prefixes[k] then
+ prefixes[k]=function() return v end
+ end
+ end
+end
+if ostype=="unix" then
+ local pattern
+ local function makepattern(t,k,v)
+ if t then
+ rawset(t,k,v)
+ end
+ local colon=P(":")
+ for k,v in table.sortedpairs(prefixes) do
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ pattern=Cs((p*colon+colon/";"+P(1))^0)
+ end
+ makepattern()
+ table.setmetatablenewindex(prefixes,makepattern)
+ function resolvers.repath(str)
+ return lpegmatch(pattern,str)
+ end
+else
+ function resolvers.repath(str)
+ return str
+ end
end
@@ -12378,7 +13401,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-exp"] = package.loaded["data-exp"] or true
--- original size: 15303, stripped down to: 9716
+-- original size: 17216, stripped down to: 10657
if not modules then modules={} end modules ['data-exp']={
version=1.001,
@@ -12392,12 +13415,16 @@ local concat,sort=table.concat,table.sort
local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
local Ct,Cs,Cc,Carg,P,C,S=lpeg.Ct,lpeg.Cs,lpeg.Cc,lpeg.Carg,lpeg.P,lpeg.C,lpeg.S
local type,next=type,next
+local isdir=lfs.isdir
local ostype=os.type
-local collapsepath=file.collapsepath
+local collapsepath,joinpath,basename=file.collapsepath,file.join,file.basename
local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
local trace_expansions=false trackers.register("resolvers.expansions",function(v) trace_expansions=v end)
+local trace_globbing=true trackers.register("resolvers.globbing",function(v) trace_globbing=v end)
local report_expansions=logs.reporter("resolvers","expansions")
+local report_globbing=logs.reporter("resolvers","globbing")
local resolvers=resolvers
+local resolveprefix=resolvers.resolve
local function f_both(a,b)
local t,n={},0
for sb in gmatch(b,"[^,]+") do
@@ -12487,35 +13514,27 @@ function resolvers.expandedpathfromlist(pathlist)
end
return newlist
end
-local cleanup=lpeg.replacer {
- { "!","" },
- { "\\","/" },
-}
-function resolvers.cleanpath(str)
- local doslashes=(P("\\")/"/"+1)^0
- local donegation=(P("!")/"" )^0
- local homedir=lpegmatch(Cs(donegation*doslashes),environment.homedir or "")
- if homedir=="~" or homedir=="" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if not str or find(str,"~") then
- return ""
- else
- return lpegmatch(cleanup,str)
+local usedhomedir=nil
+local donegation=(P("!")/"" )^0
+local doslashes=(P("\\")/"/"+1)^0
+local function expandedhome()
+ if not usedhomedir then
+ usedhomedir=lpegmatch(Cs(donegation*doslashes),environment.homedir or "")
+ if usedhomedir=="~" or usedhomedir=="" or not isdir(usedhomedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent path using current path")
end
- end
- else
- local dohome=((P("~")+P("$HOME"))/homedir)^0
- local cleanup=Cs(donegation*dohome*doslashes)
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str) or ""
+ usedhomedir="."
end
end
- return resolvers.cleanpath(str)
+ return usedhomedir
end
-local expandhome=P("~")/"$HOME"
+local dohome=((P("~")+P("$HOME")+P("%HOME%"))/expandedhome)^0
+local cleanup=Cs(donegation*dohome*doslashes)
+resolvers.cleanpath=function(str)
+ return str and lpegmatch(cleanup,str) or ""
+end
+local expandhome=P("~")/"$HOME"
local dodouble=P('"')/""*(expandhome+(1-P('"')))^0*P('"')/""
local dosingle=P("'")/""*(expandhome+(1-P("'")))^0*P("'")/""
local dostring=(expandhome+1 )^0
@@ -12567,46 +13586,67 @@ function resolvers.splitpath(str)
end
function resolvers.joinpath(str)
if type(str)=='table' then
- return file.joinpath(str)
+ return joinpath(str)
else
return str
end
end
local attributes,directory=lfs.attributes,lfs.dir
local weird=P(".")^1+lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local lessweird=P(".")^1+lpeg.anywhere(S("~`#$%^&*:;\"\'||<>,?\n\r\t"))
local timer={}
local scanned={}
local nofscans=0
local scancache={}
-local function scan(files,spec,path,n,m,r)
- local full=(path=="" and spec) or (spec..path..'/')
+local fullcache={}
+local nofsharedscans=0
+local function scan(files,remap,spec,path,n,m,r,onlyone,tolerant)
+ local full=path=="" and spec or (spec..path..'/')
local dirs={}
local nofdirs=0
+ local pattern=tolerant and lessweird or weird
for name in directory(full) do
- if not lpegmatch(weird,name) then
- local mode=attributes(full..name,'mode')
- if mode=='file' then
+ if not lpegmatch(pattern,name) then
+ local mode=attributes(full..name,"mode")
+ if mode=="file" then
n=n+1
- local f=files[name]
- if f then
- if type(f)=='string' then
- files[name]={ f,path }
+ local lower=lower(name)
+ local paths=files[lower]
+ if paths then
+ if onlyone then
else
- f[#f+1]=path
+ if type(paths)=="string" then
+ files[lower]={ paths,path }
+ else
+ paths[#paths+1]=path
+ end
+ if name~=lower then
+ local rl=remap[lower]
+ if not rl then
+ remap[lower]=name
+ r=r+1
+ elseif trace_globbing and rl~=name then
+ report_globbing("confusing filename, name: %a, lower: %a, already: %a",name,lower,rl)
+ end
+ end
end
else
- files[name]=path
- local lower=lower(name)
+ files[lower]=path
if name~=lower then
- files["remap:"..lower]=name
- r=r+1
+ local rl=remap[lower]
+ if not rl then
+ remap[lower]=name
+ r=r+1
+ elseif trace_globbing and rl~=name then
+ report_globbing("confusing filename, name: %a, lower: %a, already: %a",name,lower,rl)
+ end
end
end
- elseif mode=='directory' then
+ elseif mode=="directory" then
m=m+1
nofdirs=nofdirs+1
if path~="" then
- dirs[nofdirs]=path..'/'..name
+ dirs[nofdirs]=path.."/"..name
else
dirs[nofdirs]=name
end
@@ -12616,107 +13656,69 @@ local function scan(files,spec,path,n,m,r)
if nofdirs>0 then
sort(dirs)
for i=1,nofdirs do
- files,n,m,r=scan(files,spec,dirs[i],n,m,r)
+ files,remap,n,m,r=scan(files,remap,spec,dirs[i],n,m,r,onlyonce,tolerant)
end
end
scancache[sub(full,1,-2)]=files
- return files,n,m,r
+ return files,remap,n,m,r
end
-local fullcache={}
-function resolvers.scanfiles(path,branch,usecache)
- statistics.starttiming(timer)
- local realpath=resolvers.resolve(path)
+function resolvers.scanfiles(path,branch,usecache,onlyonce,tolerant)
+ local realpath=resolveprefix(path)
if usecache then
- local files=fullcache[realpath]
- if files then
+ local content=fullcache[realpath]
+ if content then
if trace_locating then
- report_expansions("using caches scan of path %a, branch %a",path,branch or path)
+ report_expansions("using cached scan of path %a, branch %a",path,branch or path)
end
- return files
+ nofsharedscans=nofsharedscans+1
+ return content
end
end
+ statistics.starttiming(timer)
if trace_locating then
report_expansions("scanning path %a, branch %a",path,branch or path)
end
- local files,n,m,r=scan({},realpath..'/',"",0,0,0)
- files.__path__=path
- files.__files__=n
- files.__directories__=m
- files.__remappings__=r
- if trace_locating then
- report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
- end
- if usecache then
- scanned[#scanned+1]=realpath
- fullcache[realpath]=files
- end
- nofscans=nofscans+1
- statistics.stoptiming(timer)
- return files
-end
-local function simplescan(files,spec,path)
- local full=(path=="" and spec) or (spec..path..'/')
- local dirs={}
- local nofdirs=0
- for name in directory(full) do
- if not lpegmatch(weird,name) then
- local mode=attributes(full..name,'mode')
- if mode=='file' then
- if not files[name] then
- files[name]=path
- end
- elseif mode=='directory' then
- nofdirs=nofdirs+1
- if path~="" then
- dirs[nofdirs]=path..'/'..name
- else
- dirs[nofdirs]=name
- end
- end
- end
- end
- if nofdirs>0 then
- sort(dirs)
- for i=1,nofdirs do
- files=simplescan(files,spec,dirs[i])
- end
- end
- return files
-end
-local simplecache={}
-local nofsharedscans=0
-function resolvers.simplescanfiles(path,branch,usecache)
- statistics.starttiming(timer)
- local realpath=resolvers.resolve(path)
- if usecache then
- local files=simplecache[realpath]
- if not files then
- files=scancache[realpath]
- if files then
- nofsharedscans=nofsharedscans+1
- end
+ local content
+ if isdir(realpath) then
+ local files,remap,n,m,r=scan({},{},realpath..'/',"",0,0,0,onlyonce,tolerant)
+ content={
+ metadata={
+ path=path,
+ files=n,
+ directories=m,
+ remappings=r,
+ },
+ files=files,
+ remap=remap,
+ }
+ if trace_locating then
+ report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
- if files then
- if trace_locating then
- report_expansions("using caches scan of path %a, branch %a",path,branch or path)
- end
- return files
+ else
+ content={
+ metadata={
+ path=path,
+ files=0,
+ directories=0,
+ remappings=0,
+ },
+ files={},
+ remap={},
+ }
+ if trace_locating then
+ report_expansions("invalid path %a",realpath)
end
end
- if trace_locating then
- report_expansions("scanning path %a, branch %a",path,branch or path)
- end
- local files=simplescan({},realpath..'/',"")
- if trace_locating then
- report_expansions("%s files found",table.count(files))
- end
if usecache then
scanned[#scanned+1]=realpath
- simplecache[realpath]=files
+ fullcache[realpath]=content
end
nofscans=nofscans+1
statistics.stoptiming(timer)
- return files
+ return content
+end
+function resolvers.simplescanfiles(path,branch,usecache)
+ return resolvers.scanfiles(path,branch,usecache,true,true)
end
function resolvers.scandata()
table.sort(scanned)
@@ -12727,6 +13729,52 @@ function resolvers.scandata()
paths=scanned,
}
end
+function resolvers.get_from_content(content,path,name)
+ if not content then
+ return
+ end
+ local files=content.files
+ if not files then
+ return
+ end
+ local remap=content.remap
+ if not remap then
+ return
+ end
+ if name then
+ local used=lower(name)
+ return path,remap[used] or used
+ else
+ local name=path
+ local used=lower(name)
+ local path=files[used]
+ if path then
+ return path,remap[used] or used
+ end
+ end
+end
+local nothing=function() end
+function resolvers.filtered_from_content(content,pattern)
+ if content and type(pattern)=="string" then
+ local pattern=lower(pattern)
+ local files=content.files
+ local remap=content.remap
+ if files and remap then
+ local n=next(files)
+ local function iterator()
+ while n do
+ local k=n
+ n=next(files,k)
+ if find(k,pattern) then
+ return files[k],remap and remap[k] or k
+ end
+ end
+ end
+ return iterator
+ end
+ end
+ return nothing
+end
end -- of closure
@@ -12735,7 +13783,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-env"] = package.loaded["data-env"] or true
--- original size: 8769, stripped down to: 6490
+-- original size: 9216, stripped down to: 6798
if not modules then modules={} end modules ['data-env']={
version=1.001,
@@ -12753,10 +13801,12 @@ local formats=allocate()
local suffixes=allocate()
local dangerous=allocate()
local suffixmap=allocate()
+local usertypes=allocate()
resolvers.formats=formats
resolvers.suffixes=suffixes
resolvers.dangerous=dangerous
resolvers.suffixmap=suffixmap
+resolvers.usertypes=usertypes
local luasuffixes=utilities.lua.suffixes
local relations=allocate {
core={
@@ -12824,11 +13874,13 @@ local relations=allocate {
names={ "mp" },
variable='MPINPUTS',
suffixes={ 'mp','mpvi','mpiv','mpii' },
+ usertype=true,
},
tex={
names={ "tex" },
variable='TEXINPUTS',
- suffixes={ 'tex',"mkvi","mkiv","mkii" },
+ suffixes={ "tex","mkvi","mkiv","mkii","cld","lfg","xml" },
+ usertype=true,
},
icc={
names={ "icc","icc profile","icc profiles" },
@@ -12844,6 +13896,7 @@ local relations=allocate {
names={ "lua" },
variable='LUAINPUTS',
suffixes={ luasuffixes.lua,luasuffixes.luc,luasuffixes.tma,luasuffixes.tmc },
+ usertype=true,
},
lib={
names={ "lib" },
@@ -12852,11 +13905,15 @@ local relations=allocate {
},
bib={
names={ 'bib' },
+ variable='BIBINPUTS',
suffixes={ 'bib' },
+ usertype=true,
},
bst={
names={ 'bst' },
+ variable='BSTINPUTS',
suffixes={ 'bst' },
+ usertype=true,
},
fontconfig={
names={ 'fontconfig','fontconfig file','fontconfig files' },
@@ -12938,8 +13995,9 @@ function resolvers.updaterelations()
for name,relation in next,categories do
local rn=relation.names
local rv=relation.variable
- local rs=relation.suffixes
if rn and rv then
+ local rs=relation.suffixes
+ local ru=relation.usertype
for i=1,#rn do
local rni=lower(gsub(rn[i]," ",""))
formats[rni]=rv
@@ -12951,8 +14009,9 @@ function resolvers.updaterelations()
end
end
end
- end
- if rs then
+ if ru then
+ usertypes[name]=true
+ end
end
end
end
@@ -13003,7 +14062,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-tmp"] = package.loaded["data-tmp"] or true
--- original size: 15532, stripped down to: 11648
+-- original size: 15618, stripped down to: 11629
if not modules then modules={} end modules ['data-tmp']={
version=1.100,
@@ -13013,7 +14072,7 @@ if not modules then modules={} end modules ['data-tmp']={
license="see context related readme files"
}
local format,lower,gsub,concat=string.format,string.lower,string.gsub,table.concat
-local concat,serialize,serializetofile=table.concat,table.serialize,table.tofile
+local concat=table.concat
local mkdirs,isdir,isfile=dir.mkdirs,lfs.isdir,lfs.isfile
local addsuffix,is_writable,is_readable=file.addsuffix,file.is_writable,file.is_readable
local formatters=string.formatters
@@ -13022,6 +14081,7 @@ local trace_cache=false trackers.register("resolvers.cache",function(v) trace_ca
local report_caches=logs.reporter("resolvers","caches")
local report_resolvers=logs.reporter("resolvers","caching")
local resolvers=resolvers
+local cleanpath=resolvers.cleanpath
local directive_cleanup=false directives.register("system.compile.cleanup",function(v) directive_cleanup=v end)
local directive_strip=false directives.register("system.compile.strip",function(v) directive_strip=v end)
local compile=utilities.lua.compile
@@ -13043,7 +14103,7 @@ caches.relocate=false
caches.defaults={ "TMPDIR","TEMPDIR","TMP","TEMP","HOME","HOMEPATH" }
local writable,readables,usedreadables=nil,{},{}
local function identify()
- local texmfcaches=resolvers.cleanpathlist("TEXMFCACHE")
+ local texmfcaches=resolvers.cleanpathlist("TEXMFCACHE")
if texmfcaches then
for k=1,#texmfcaches do
local cachepath=texmfcaches[k]
@@ -13281,15 +14341,11 @@ end
local saveoptions={ compact=true }
function caches.savedata(filepath,filename,data,raw)
local tmaname,tmcname=caches.setluanames(filepath,filename)
- local reduce,simplify=true,true
- if raw then
- reduce,simplify=false,false
- end
data.cache_uuid=os.uuid()
if caches.direct then
- file.savedata(tmaname,serialize(data,true,saveoptions))
+ file.savedata(tmaname,table.serialize(data,true,saveoptions))
else
- serializetofile(tmaname,data,true,saveoptions)
+ table.tofile(tmaname,data,true,saveoptions)
end
utilities.lua.compile(tmaname,tmcname)
end
@@ -13297,10 +14353,12 @@ local content_state={}
function caches.contentstate()
return content_state or {}
end
-function caches.loadcontent(cachename,dataname)
- local name=caches.hashed(cachename)
- local full,path=caches.getfirstreadablefile(addsuffix(name,luasuffixes.lua),"trees")
- local filename=file.join(path,name)
+function caches.loadcontent(cachename,dataname,filename)
+ if not filename then
+ local name=caches.hashed(cachename)
+ local full,path=caches.getfirstreadablefile(addsuffix(name,luasuffixes.lua),"trees")
+ filename=file.join(path,name)
+ end
local blob=loadfile(addsuffix(filename,luasuffixes.luc)) or loadfile(addsuffix(filename,luasuffixes.lua))
if blob then
local data=blob()
@@ -13332,10 +14390,12 @@ function caches.collapsecontent(content)
end
end
end
-function caches.savecontent(cachename,dataname,content)
- local name=caches.hashed(cachename)
- local full,path=caches.setfirstwritablefile(addsuffix(name,luasuffixes.lua),"trees")
- local filename=file.join(path,name)
+function caches.savecontent(cachename,dataname,content,filename)
+ if not filename then
+ local name=caches.hashed(cachename)
+ local full,path=caches.setfirstwritablefile(addsuffix(name,luasuffixes.lua),"trees")
+ filename=file.join(path,name)
+ end
local luaname=addsuffix(filename,luasuffixes.lua)
local lucname=addsuffix(filename,luasuffixes.luc)
if trace_locating then
@@ -13350,7 +14410,7 @@ function caches.savecontent(cachename,dataname,content)
content=content,
uuid=os.uuid(),
}
- local ok=io.savedata(luaname,serialize(data,true))
+ local ok=io.savedata(luaname,table.serialize(data,true))
if ok then
if trace_locating then
report_resolvers("category %a, cachename %a saved in %a",dataname,cachename,luaname)
@@ -13378,7 +14438,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-met"] = package.loaded["data-met"] or true
--- original size: 5453, stripped down to: 4007
+-- original size: 5347, stripped down to: 4015
if not modules then modules={} end modules ['data-met']={
version=1.100,
@@ -13406,8 +14466,8 @@ local function splitmethod(filename)
if type(filename)=="table" then
return filename
end
- filename=file.collapsepath(filename,".")
- if not find(filename,"://") then
+ filename=file.collapsepath(filename,".")
+ if not find(filename,"://",1,true) then
return { scheme="file",path=filename,original=filename,filename=filename }
end
local specification=url.hashed(filename)
@@ -13497,7 +14557,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-res"] = package.loaded["data-res"] or true
--- original size: 61799, stripped down to: 42957
+-- original size: 67003, stripped down to: 46291
if not modules then modules={} end modules ['data-res']={
version=1.001,
@@ -13507,7 +14567,7 @@ if not modules then modules={} end modules ['data-res']={
license="see context related readme files",
}
local gsub,find,lower,upper,match,gmatch=string.gsub,string.find,string.lower,string.upper,string.match,string.gmatch
-local concat,insert,sortedkeys=table.concat,table.insert,table.sortedkeys
+local concat,insert,remove,sortedkeys,sortedhash=table.concat,table.insert,table.remove,table.sortedkeys,table.sortedhash
local next,type,rawget=next,type,rawget
local os=os
local P,S,R,C,Cc,Cs,Ct,Carg=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cc,lpeg.Cs,lpeg.Ct,lpeg.Carg
@@ -13516,27 +14576,38 @@ local formatters=string.formatters
local filedirname=file.dirname
local filebasename=file.basename
local suffixonly=file.suffixonly
+local addsuffix=file.addsuffix
+local removesuffix=file.removesuffix
local filejoin=file.join
local collapsepath=file.collapsepath
local joinpath=file.joinpath
+local is_qualified_path=file.is_qualified_path
local allocate=utilities.storage.allocate
local settings_to_array=utilities.parsers.settings_to_array
+local getcurrentdir=lfs.currentdir
+local isfile=lfs.isfile
+local isdir=lfs.isdir
local setmetatableindex=table.setmetatableindex
local luasuffixes=utilities.lua.suffixes
-local getcurrentdir=lfs.currentdir
-local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
-local trace_detail=false trackers.register("resolvers.details",function(v) trace_detail=v end)
-local trace_expansions=false trackers.register("resolvers.expansions",function(v) trace_expansions=v end)
+local trace_locating=false trackers .register("resolvers.locating",function(v) trace_locating=v end)
+local trace_detail=false trackers .register("resolvers.details",function(v) trace_detail=v end)
+local trace_expansions=false trackers .register("resolvers.expansions",function(v) trace_expansions=v end)
+local trace_paths=false trackers .register("resolvers.paths",function(v) trace_paths=v end)
+local resolve_otherwise=true directives.register("resolvers.otherwise",function(v) resolve_otherwise=v end)
local report_resolving=logs.reporter("resolvers","resolving")
local resolvers=resolvers
local expandedpathfromlist=resolvers.expandedpathfromlist
local checkedvariable=resolvers.checkedvariable
local splitconfigurationpath=resolvers.splitconfigurationpath
local methodhandler=resolvers.methodhandler
+local filtered=resolvers.filtered_from_content
+local lookup=resolvers.get_from_content
+local cleanpath=resolvers.cleanpath
+local resolveprefix=resolvers.resolve
local initializesetter=utilities.setters.initialize
local ostype,osname,osenv,ossetenv,osgetenv=os.type,os.name,os.env,os.setenv,os.getenv
-resolvers.cacheversion='1.0.1'
-resolvers.configbanner=''
+resolvers.cacheversion="1.100"
+resolvers.configbanner=""
resolvers.homedir=environment.homedir
resolvers.criticalvars=allocate { "SELFAUTOLOC","SELFAUTODIR","SELFAUTOPARENT","TEXMFCNF","TEXMF","TEXOS" }
resolvers.luacnfname="texmfcnf.lua"
@@ -13555,6 +14626,7 @@ end
local unset_variable="unset"
local formats=resolvers.formats
local suffixes=resolvers.suffixes
+local usertypes=resolvers.usertypes
local dangerous=resolvers.dangerous
local suffixmap=resolvers.suffixmap
resolvers.defaultsuffixes={ "tex" }
@@ -13563,7 +14635,7 @@ local instance=resolvers.instance or nil
function resolvers.setenv(key,value,raw)
if instance then
instance.environment[key]=value
- ossetenv(key,raw and value or resolvers.resolve(value))
+ ossetenv(key,raw and value or resolveprefix(value))
end
end
local function getenv(key)
@@ -13577,7 +14649,7 @@ local function getenv(key)
end
resolvers.getenv=getenv
resolvers.env=getenv
-local function resolve(k)
+local function resolvevariable(k)
return instance.expansions[k]
end
local dollarstripper=lpeg.stripper("$")
@@ -13586,19 +14658,19 @@ local backslashswapper=lpeg.replacer("\\","/")
local somevariable=P("$")/""
local somekey=C(R("az","AZ","09","__","--")^1)
local somethingelse=P(";")*((1-S("!{}/\\"))^1*P(";")/"")+P(";")*(P(";")/"")+P(1)
-local variableexpander=Cs((somevariable*(somekey/resolve)+somethingelse)^1 )
+local variableexpander=Cs((somevariable*(somekey/resolvevariable)+somethingelse)^1 )
local cleaner=P("\\")/"/"+P(";")*S("!{}/\\")^0*P(";")^1/";"
local variablecleaner=Cs((cleaner+P(1))^0)
-local somevariable=R("az","AZ","09","__","--")^1/resolve
+local somevariable=R("az","AZ","09","__","--")^1/resolvevariable
local variable=(P("$")/"")*(somevariable+(P("{")/"")*somevariable*(P("}")/""))
local variableresolver=Cs((variable+P(1))^0)
local function expandedvariable(var)
return lpegmatch(variableexpander,var) or var
end
-function resolvers.newinstance()
- if trace_locating then
+function resolvers.newinstance()
+ if trace_locating then
report_resolving("creating instance")
- end
+ end
local environment,variables,expansions,order=allocate(),allocate(),allocate(),allocate()
local newinstance={
environment=environment,
@@ -13611,6 +14683,7 @@ function resolvers.newinstance()
foundintrees=allocate(),
hashes=allocate(),
hashed=allocate(),
+ pathlists=false,
specification=allocate(),
lists=allocate(),
data=allocate(),
@@ -13623,6 +14696,7 @@ function resolvers.newinstance()
savelists=true,
pattern=nil,
force_suffixes=true,
+ pathstack={},
}
setmetatableindex(variables,function(t,k)
local v
@@ -13672,8 +14746,13 @@ function resolvers.reset()
end
local function reset_hashes()
instance.lists={}
+ instance.pathlists=false
instance.found={}
end
+local function reset_caches()
+ instance.lists={}
+ instance.pathlists=false
+end
local slash=P("/")
local pathexpressionpattern=Cs (
Cc("^")*(
@@ -13725,13 +14804,13 @@ local function identify_configuration_files()
for i=1,#cnfpaths do
local filepath=cnfpaths[i]
local filename=collapsepath(filejoin(filepath,luacnfname))
- local realname=resolvers.resolve(filename)
+ local realname=resolveprefix(filename)
if trace_locating then
- local fullpath=gsub(resolvers.resolve(collapsepath(filepath)),"//","/")
- local weirdpath=find(fullpath,"/texmf.+/texmf") or not find(fullpath,"/web2c")
+ local fullpath=gsub(resolveprefix(collapsepath(filepath)),"//","/")
+ local weirdpath=find(fullpath,"/texmf.+/texmf") or not find(fullpath,"/web2c",1,true)
report_resolving("looking for %a on %s path %a from specification %a",luacnfname,weirdpath and "weird" or "given",fullpath,filepath)
end
- if lfs.isfile(realname) then
+ if isfile(realname) then
specification[#specification+1]=filename
if trace_locating then
report_resolving("found configuration file %a",realname)
@@ -13753,7 +14832,7 @@ local function load_configuration_files()
local filename=specification[i]
local pathname=filedirname(filename)
local filename=filejoin(pathname,luacnfname)
- local realname=resolvers.resolve(filename)
+ local realname=resolveprefix(filename)
local blob=loadfile(realname)
if blob then
local setups=instance.setups
@@ -13761,7 +14840,7 @@ local function load_configuration_files()
local parent=data and data.parent
if parent then
local filename=filejoin(pathname,parent)
- local realname=resolvers.resolve(filename)
+ local realname=resolveprefix(filename)
local blob=loadfile(realname)
if blob then
local parentdata=blob()
@@ -13786,7 +14865,7 @@ local function load_configuration_files()
elseif variables[k]==nil then
if trace_locating and not warning then
report_resolving("variables like %a in configuration file %a should move to the 'variables' subtable",
- k,resolvers.resolve(filename))
+ k,resolveprefix(filename))
warning=true
end
variables[k]=v
@@ -13846,7 +14925,7 @@ local function locate_file_databases()
local stripped=lpegmatch(inhibitstripper,path)
if stripped~="" then
local runtime=stripped==path
- path=resolvers.cleanpath(path)
+ path=cleanpath(path)
local spec=resolvers.splitmethod(stripped)
if runtime and (spec.noscheme or spec.scheme=="file") then
stripped="tree:///"..stripped
@@ -13909,8 +14988,8 @@ function resolvers.renew(hashname)
report_resolving("identifying tree %a",hashname)
end
end
- local realpath=resolvers.resolve(hashname)
- if lfs.isdir(realpath) then
+ local realpath=resolveprefix(hashname)
+ if isdir(realpath) then
if trace_locating then
report_resolving("using path %a",realpath)
end
@@ -14011,19 +15090,53 @@ end
function resolvers.unexpandedpath(str)
return joinpath(resolvers.unexpandedpathlist(str))
end
+function resolvers.pushpath(name)
+ local pathstack=instance.pathstack
+ local lastpath=pathstack[#pathstack]
+ local pluspath=filedirname(name)
+ if lastpath then
+ lastpath=collapsepath(filejoin(lastpath,pluspath))
+ else
+ lastpath=collapsepath(pluspath)
+ end
+ insert(pathstack,lastpath)
+ if trace_paths then
+ report_resolving("pushing path %a",lastpath)
+ end
+end
+function resolvers.poppath()
+ local pathstack=instance.pathstack
+ if trace_paths and #pathstack>0 then
+ report_resolving("popping path %a",pathstack[#pathstack])
+ end
+ remove(pathstack)
+end
+function resolvers.stackpath()
+ local pathstack=instance.pathstack
+ local currentpath=pathstack[#pathstack]
+ return currentpath~="" and currentpath or nil
+end
local done={}
function resolvers.resetextrapath()
local ep=instance.extra_paths
if not ep then
- ep,done={},{}
- instance.extra_paths=ep
+ done={}
+ instance.extra_paths={}
elseif #ep>0 then
- instance.lists,done={},{}
+ done={}
+ reset_caches()
end
end
function resolvers.registerextrapath(paths,subpaths)
- paths=settings_to_array(paths)
- subpaths=settings_to_array(subpaths)
+ if not subpaths or subpaths=="" then
+ if not paths or path=="" then
+ return
+ elseif done[paths] then
+ return
+ end
+ end
+ local paths=settings_to_array(paths)
+ local subpaths=settings_to_array(subpaths)
local ep=instance.extra_paths or {}
local oldn=#ep
local newn=oldn
@@ -14038,7 +15151,7 @@ function resolvers.registerextrapath(paths,subpaths)
local ps=p.."/"..s
if not done[ps] then
newn=newn+1
- ep[newn]=resolvers.cleanpath(ps)
+ ep[newn]=cleanpath(ps)
done[ps]=true
end
end
@@ -14048,7 +15161,7 @@ function resolvers.registerextrapath(paths,subpaths)
local p=paths[i]
if not done[p] then
newn=newn+1
- ep[newn]=resolvers.cleanpath(p)
+ ep[newn]=cleanpath(p)
done[p]=true
end
end
@@ -14060,7 +15173,7 @@ function resolvers.registerextrapath(paths,subpaths)
local ps=ep[i].."/"..s
if not done[ps] then
newn=newn+1
- ep[newn]=resolvers.cleanpath(ps)
+ ep[newn]=cleanpath(ps)
done[ps]=true
end
end
@@ -14069,52 +15182,70 @@ function resolvers.registerextrapath(paths,subpaths)
if newn>0 then
instance.extra_paths=ep
end
- if newn>oldn then
- instance.lists={}
+ if newn~=oldn then
+ reset_caches()
end
end
-local function made_list(instance,list)
- local ep=instance.extra_paths
- if not ep or #ep==0 then
- return list
+function resolvers.pushextrapath(path)
+ local paths=settings_to_array(path)
+ if instance.extra_stack then
+ insert(instance.extra_stack,1,paths)
else
- local done,new,newn={},{},0
- for k=1,#list do
- local v=list[k]
- if not done[v] then
- if find(v,"^[%.%/]$") then
- done[v]=true
- newn=newn+1
- new[newn]=v
- else
- break
- end
- end
- end
- for k=1,#ep do
- local v=ep[k]
+ instance.extra_stack={ paths }
+ end
+ reset_caches()
+end
+function resolvers.popextrapath()
+ if instance.extra_stack then
+ reset_caches()
+ return remove(instance.extra_stack,1)
+ end
+end
+local function made_list(instance,list,extra_too)
+ local done={}
+ local new={}
+ local newn=0
+ local function add(p)
+ for k=1,#p do
+ local v=p[k]
if not done[v] then
done[v]=true
newn=newn+1
new[newn]=v
end
end
- for k=1,#list do
- local v=list[k]
- if not done[v] then
- done[v]=true
- newn=newn+1
- new[newn]=v
+ end
+ for k=1,#list do
+ local v=list[k]
+ if done[v] then
+ elseif find(v,"^[%.%/]$") then
+ done[v]=true
+ newn=newn+1
+ new[newn]=v
+ else
+ break
+ end
+ end
+ if extra_too then
+ local es=instance.extra_stack
+ if es and #es>0 then
+ for k=1,#es do
+ add(es[k])
end
end
- return new
+ local ep=instance.extra_paths
+ if ep and #ep>0 then
+ add(ep)
+ end
end
+ add(list)
+ return new
end
function resolvers.cleanpathlist(str)
local t=resolvers.expandedpathlist(str)
if t then
for i=1,#t do
- t[i]=collapsepath(resolvers.cleanpath(t[i]))
+ t[i]=collapsepath(cleanpath(t[i]))
end
end
return t
@@ -14122,22 +15253,22 @@ end
function resolvers.expandpath(str)
return joinpath(resolvers.expandedpathlist(str))
end
-function resolvers.expandedpathlist(str)
+function resolvers.expandedpathlist(str,extra_too)
if not str then
return {}
- elseif instance.savelists then
+ elseif instance.savelists then
str=lpegmatch(dollarstripper,str)
local lists=instance.lists
local lst=lists[str]
if not lst then
- local l=made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
+ local l=made_list(instance,resolvers.splitpath(resolvers.expansion(str)),extra_too)
lst=expandedpathfromlist(l)
lists[str]=lst
end
return lst
else
local lst=resolvers.splitpath(resolvers.expansion(str))
- return made_list(instance,expandedpathfromlist(lst))
+ return made_list(instance,expandedpathfromlist(lst),extra_too)
end
end
function resolvers.expandedpathlistfromvariable(str)
@@ -14148,6 +15279,13 @@ end
function resolvers.expandpathfromvariable(str)
return joinpath(resolvers.expandedpathlistfromvariable(str))
end
+function resolvers.cleanedpathlist(v)
+ local t=resolvers.expandedpathlist(v)
+ for i=1,#t do
+ t[i]=resolvers.resolve(resolvers.cleanpath(t[i]))
+ end
+ return t
+end
function resolvers.expandbraces(str)
local ori=str
local pth=expandedpathfromlist(resolvers.splitpath(ori))
@@ -14164,7 +15302,7 @@ function resolvers.registerfilehash(name,content,someerror)
end
end
local function isreadable(name)
- local readable=lfs.isfile(name)
+ local readable=isfile(name)
if trace_detail then
if readable then
report_resolving("file %a is readable",name)
@@ -14174,70 +15312,57 @@ local function isreadable(name)
end
return readable
end
-local function collect_files(names)
- local filelist,noffiles={},0
+local function collect_files(names)
+ local filelist={}
+ local noffiles=0
+ local function check(hash,root,pathname,path,name)
+ if not pathname or find(path,pathname) then
+ local variant=hash.type
+ local search=filejoin(root,path,name)
+ local result=methodhandler('concatinators',variant,root,path,name)
+ if trace_detail then
+ report_resolving("match: variant %a, search %a, result %a",variant,search,result)
+ end
+ noffiles=noffiles+1
+ filelist[noffiles]={ variant,search,result }
+ end
+ end
for k=1,#names do
- local fname=names[k]
+ local filename=names[k]
if trace_detail then
- report_resolving("checking name %a",fname)
+ report_resolving("checking name %a",filename)
end
- local bname=filebasename(fname)
- local dname=filedirname(fname)
- if dname=="" or find(dname,"^%.") then
- dname=false
+ local basename=filebasename(filename)
+ local pathname=filedirname(filename)
+ if pathname=="" or find(pathname,"^%.") then
+ pathname=false
else
- dname=gsub(dname,"%*",".*")
- dname="/"..dname.."$"
+ pathname=gsub(pathname,"%*",".*")
+ pathname="/"..pathname.."$"
end
local hashes=instance.hashes
for h=1,#hashes do
local hash=hashes[h]
- local blobpath=hash.name
- local files=blobpath and instance.files[blobpath]
- if files then
+ local hashname=hash.name
+ local content=hashname and instance.files[hashname]
+ if content then
if trace_detail then
- report_resolving("deep checking %a, base %a, pattern %a",blobpath,bname,dname)
+ report_resolving("deep checking %a, base %a, pattern %a",hashname,basename,pathname)
end
- local blobfile=files[bname]
- if not blobfile then
- local rname="remap:"..bname
- blobfile=files[rname]
- if blobfile then
- bname=files[rname]
- blobfile=files[bname]
- end
- end
- if blobfile then
- local blobroot=files.__path__ or blobpath
- if type(blobfile)=='string' then
- if not dname or find(blobfile,dname) then
- local variant=hash.type
- local search=filejoin(blobroot,blobfile,bname)
- local result=methodhandler('concatinators',hash.type,blobroot,blobfile,bname)
- if trace_detail then
- report_resolving("match: variant %a, search %a, result %a",variant,search,result)
- end
- noffiles=noffiles+1
- filelist[noffiles]={ variant,search,result }
- end
+ local path,name=lookup(content,basename)
+ if path then
+ local metadata=content.metadata
+ local realroot=metadata and metadata.path or hashname
+ if type(path)=="string" then
+ check(hash,realroot,pathname,path,name)
else
- for kk=1,#blobfile do
- local vv=blobfile[kk]
- if not dname or find(vv,dname) then
- local variant=hash.type
- local search=filejoin(blobroot,vv,bname)
- local result=methodhandler('concatinators',hash.type,blobroot,vv,bname)
- if trace_detail then
- report_resolving("match: variant %a, search %a, result %a",variant,search,result)
- end
- noffiles=noffiles+1
- filelist[noffiles]={ variant,search,result }
- end
+ for i=1,#path do
+ check(hash,realroot,pathname,path[i],name)
end
end
end
elseif trace_locating then
- report_resolving("no match in %a (%s)",blobpath,bname)
+ report_resolving("no match in %a (%s)",hashname,basename)
end
end
end
@@ -14262,7 +15387,7 @@ end
local function can_be_dir(name)
local fakepaths=instance.fakepaths
if not fakepaths[name] then
- if lfs.isdir(name) then
+ if isdir(name) then
fakepaths[name]=1
else
fakepaths[name]=2
@@ -14278,10 +15403,11 @@ local function find_analyze(filename,askedformat,allresults)
if askedformat=="" then
if ext=="" or not suffixmap[ext] then
local defaultsuffixes=resolvers.defaultsuffixes
+ local formatofsuffix=resolvers.formatofsuffix
for i=1,#defaultsuffixes do
local forcedname=filename..'.'..defaultsuffixes[i]
wantedfiles[#wantedfiles+1]=forcedname
- filetype=resolvers.formatofsuffix(forcedname)
+ filetype=formatofsuffix(forcedname)
if trace_locating then
report_resolving("forcing filetype %a",filetype)
end
@@ -14317,18 +15443,18 @@ local function find_direct(filename,allresults)
end
end
local function find_wildcard(filename,allresults)
- if find(filename,'%*') then
+ if find(filename,'*',1,true) then
if trace_locating then
report_resolving("checking wildcard %a",filename)
end
- local method,result=resolvers.findwildcardfiles(filename)
+ local result=resolvers.findwildcardfiles(filename)
if result then
return "wildcard",result
end
end
end
local function find_qualified(filename,allresults,askedformat,alsostripped)
- if not file.is_qualified_path(filename) then
+ if not is_qualified_path(filename) then
return
end
if trace_locating then
@@ -14402,33 +15528,66 @@ local function check_subpath(fname)
return fname
end
end
-local function find_intree(filename,filetype,wantedfiles,allresults)
+local function makepathlist(list,filetype)
local typespec=resolvers.variableofformat(filetype)
- local pathlist=resolvers.expandedpathlist(typespec)
- local method="intree"
+ local pathlist=resolvers.expandedpathlist(typespec,filetype and usertypes[filetype])
+ local entry={}
if pathlist and #pathlist>0 then
- local filelist=collect_files(wantedfiles)
+ for k=1,#pathlist do
+ local path=pathlist[k]
+ local prescanned=find(path,'^!!')
+ local resursive=find(path,'//$')
+ local pathname=lpegmatch(inhibitstripper,path)
+ local expression=makepathexpression(pathname)
+ local barename=gsub(pathname,"/+$","")
+ barename=resolveprefix(barename)
+ local scheme=url.hasscheme(barename)
+ local schemename=gsub(barename,"%.%*$",'')
+ entry[k]={
+ path=path,
+ pathname=pathname,
+ prescanned=prescanned,
+ recursive=recursive,
+ expression=expression,
+ barename=barename,
+ scheme=scheme,
+ schemename=schemename,
+ }
+ end
+ entry.typespec=typespec
+ list[filetype]=entry
+ else
+ list[filetype]=false
+ end
+ return entry
+end
+local function find_intree(filename,filetype,wantedfiles,allresults)
+ local pathlists=instance.pathlists
+ if not pathlists then
+ pathlists=setmetatableindex(allocate(),makepathlist)
+ instance.pathlists=pathlists
+ end
+ local pathlist=pathlists[filetype]
+ if pathlist then
+ local method="intree"
+ local filelist=collect_files(wantedfiles)
local dirlist={}
+ local result={}
if filelist then
for i=1,#filelist do
dirlist[i]=filedirname(filelist[i][3]).."/"
end
end
if trace_detail then
- report_resolving("checking filename %a",filename)
+ report_resolving("checking filename %a in tree",filename)
end
- local resolve=resolvers.resolve
- local result={}
for k=1,#pathlist do
- local path=pathlist[k]
- local pathname=lpegmatch(inhibitstripper,path)
- local doscan=path==pathname
- if not find (pathname,'//$') then
- doscan=false
- end
+ local entry=pathlist[k]
+ local path=entry.path
+ local pathname=entry.pathname
local done=false
if filelist then
- local expression=makepathexpression(pathname)
+ local expression=entry.expression
if trace_detail then
report_resolving("using pattern %a for path %a",expression,pathname)
end
@@ -14436,8 +15595,8 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
local fl=filelist[k]
local f=fl[2]
local d=dirlist[k]
- if find(d,expression) or find(resolve(d),expression) then
- result[#result+1]=resolve(fl[3])
+ if find(d,expression) or find(resolveprefix(d),expression) then
+ result[#result+1]=resolveprefix(fl[3])
done=true
if allresults then
if trace_detail then
@@ -14458,56 +15617,62 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
method="database"
else
method="filesystem"
- pathname=gsub(pathname,"/+$","")
- pathname=resolve(pathname)
- local scheme=url.hasscheme(pathname)
+ local scheme=entry.scheme
if not scheme or scheme=="file" then
- local pname=gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
+ local pname=entry.schemename
+ if not find(pname,"*",1,true) then
if can_be_dir(pname) then
- for k=1,#wantedfiles do
- local w=wantedfiles[k]
- local fname=check_subpath(filejoin(pname,w))
- if fname then
- result[#result+1]=fname
- done=true
- if not allresults then
- break
- end
+ if not done and not entry.prescanned then
+ if trace_detail then
+ report_resolving("quick root scan for %a",pname)
end
- end
- if not done and doscan then
- local files=resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w=wantedfiles[k]
- local subpath=files[w]
- if not subpath or subpath=="" then
- elseif type(subpath)=="string" then
- local fname=check_subpath(filejoin(pname,subpath,w))
- if fname then
- result[#result+1]=fname
- done=true
- if not allresults then
- break
- end
+ local fname=check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
+ break
end
- else
- for i=1,#subpath do
- local sp=subpath[i]
- if sp=="" then
- else
- local fname=check_subpath(filejoin(pname,sp,w))
- if fname then
- result[#result+1]=fname
- done=true
- if not allresults then
- break
+ end
+ end
+ if not done and entry.recursive then
+ if trace_detail then
+ report_resolving("scanning filesystem for %a",pname)
+ end
+ local files=resolvers.simplescanfiles(pname,false,true)
+ for k=1,#wantedfiles do
+ local w=wantedfiles[k]
+ local subpath=files[w]
+ if not subpath or subpath=="" then
+ elseif type(subpath)=="string" then
+ local fname=check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp=subpath[i]
+ if sp=="" then
+ else
+ local fname=check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
+ break
+ end
end
end
end
- end
- if done and not allresults then
- break
+ if done and not allresults then
+ break
+ end
end
end
end
@@ -14515,6 +15680,18 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
end
else
end
+ else
+ for k=1,#wantedfiles do
+ local pname=entry.barename
+ local fname=methodhandler('finders',pname.."/"..wantedfiles[k])
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
+ break
+ end
+ end
+ end
end
end
if done and not allresults then
@@ -14549,10 +15726,13 @@ local function find_otherwise(filename,filetype,wantedfiles,allresults)
local filelist=collect_files(wantedfiles)
local fl=filelist and filelist[1]
if fl then
- return "otherwise",{ resolvers.resolve(fl[3]) }
+ return "otherwise",{ resolveprefix(fl[3]) }
end
end
collect_instance_files=function(filename,askedformat,allresults)
+ if not filename or filename=="" then
+ return {}
+ end
askedformat=askedformat or ""
filename=collapsepath(filename,".")
filename=gsub(filename,"^%./",getcurrentdir().."/")
@@ -14587,7 +15767,11 @@ collect_instance_files=function(filename,askedformat,allresults)
else
local method,result,stamp,filetype,wantedfiles
if instance.remember then
- stamp=formatters["%s--%s"](filename,askedformat)
+ if askedformat=="" then
+ stamp=formatters["%s::%s"](suffixonly(filename),filename)
+ else
+ stamp=formatters["%s::%s"](askedformat,filename)
+ end
result=stamp and instance.found[stamp]
if result then
if trace_locating then
@@ -14606,7 +15790,7 @@ collect_instance_files=function(filename,askedformat,allresults)
method,result=find_intree(filename,filetype,wantedfiles)
if not result then
method,result=find_onpath(filename,filetype,wantedfiles)
- if not result then
+ if resolve_otherwise and not result then
method,result=find_otherwise(filename,filetype,wantedfiles)
end
end
@@ -14622,7 +15806,7 @@ collect_instance_files=function(filename,askedformat,allresults)
end
if stamp then
if trace_locating then
- report_resolving("remembering file %a",filename)
+ report_resolving("remembering file %a using hash %a",filename,stamp)
end
instance.found[stamp]=result
end
@@ -14630,6 +15814,9 @@ collect_instance_files=function(filename,askedformat,allresults)
end
end
local function findfiles(filename,filetype,allresults)
+ if not filename or filename=="" then
+ return {}
+ end
local result,status=collect_instance_files(filename,filetype or "",allresults)
if not result or #result==0 then
local lowered=lower(filename)
@@ -14649,39 +15836,30 @@ function resolvers.findpath(filename,filetype)
return filedirname(findfiles(filename,filetype,false)[1] or "")
end
local function findgivenfiles(filename,allresults)
- local bname,result=filebasename(filename),{}
+ local base=filebasename(filename)
+ local result={}
local hashes=instance.hashes
- local noffound=0
+ local function okay(hash,path,name)
+ local found=methodhandler('concatinators',hash.type,hash.name,path,name)
+ if found and found~="" then
+ result[#result+1]=resolveprefix(found)
+ return not allresults
+ end
+ end
for k=1,#hashes do
local hash=hashes[k]
- local files=instance.files[hash.name] or {}
- local blist=files[bname]
- if not blist then
- local rname="remap:"..bname
- blist=files[rname]
- if blist then
- bname=files[rname]
- blist=files[bname]
- end
- end
- if blist then
- if type(blist)=='string' then
- local found=methodhandler('concatinators',hash.type,hash.name,blist,bname) or ""
- if found~="" then
- noffound=noffound+1
- result[noffound]=resolvers.resolve(found)
- if not allresults then
- break
- end
+ local content=instance.files[hash.name]
+ if content then
+ local path,name=lookup(content,base)
+ if not path then
+ elseif type(path)=="string" then
+ if okay(hash,path,name) then
+ return result
end
else
- for kk=1,#blist do
- local vv=blist[kk]
- local found=methodhandler('concatinators',hash.type,hash.name,vv,bname) or ""
- if found~="" then
- noffound=noffound+1
- result[noffound]=resolvers.resolve(found)
- if not allresults then break end
+ for i=1,#path do
+ if okay(hash,path[i],name) then
+ return result
end
end
end
@@ -14695,64 +15873,80 @@ end
function resolvers.findgivenfile(filename)
return findgivenfiles(filename,false)[1] or ""
end
-local function doit(path,blist,bname,tag,variant,result,allresults)
- local done=false
- if blist and variant then
- local resolve=resolvers.resolve
- if type(blist)=='string' then
- if find(lower(blist),path) then
- local full=methodhandler('concatinators',variant,tag,blist,bname) or ""
- result[#result+1]=resolve(full)
- done=true
- end
- else
- for kk=1,#blist do
- local vv=blist[kk]
- if find(lower(vv),path) then
- local full=methodhandler('concatinators',variant,tag,vv,bname) or ""
- result[#result+1]=resolve(full)
- done=true
- if not allresults then break end
- end
- end
- end
- end
- return done
-end
local makewildcard=Cs(
(P("^")^0*P("/")*P(-1)+P(-1))/".*"+(P("^")^0*P("/")/"")^0*(P("*")/".*"+P("-")/"%%-"+P(".")/"%%."+P("?")/"."+P("\\")/"/"+P(1))^0
)
function resolvers.wildcardpattern(pattern)
return lpegmatch(makewildcard,pattern) or pattern
end
-local function findwildcardfiles(filename,allresults,result)
- result=result or {}
+local function findwildcardfiles(filename,allresults,result)
+ local result=result or {}
local base=filebasename(filename)
local dirn=filedirname(filename)
local path=lower(lpegmatch(makewildcard,dirn) or dirn)
local name=lower(lpegmatch(makewildcard,base) or base)
- local files,done=instance.files,false
- if find(name,"%*") then
+ local files=instance.files
+ if find(name,"*",1,true) then
local hashes=instance.hashes
+ local function okay(found,path,base,hashname,hashtype)
+ if find(found,path) then
+ local full=methodhandler('concatinators',hashtype,hashname,found,base)
+ if full and full~="" then
+ result[#result+1]=resolveprefix(full)
+ return not allresults
+ end
+ end
+ end
for k=1,#hashes do
local hash=hashes[k]
- local hashname,hashtype=hash.name,hash.type
- for kk,hh in next,files[hashname] do
- if not find(kk,"^remap:") then
- if find(lower(kk),name) then
- if doit(path,hh,kk,hashname,hashtype,result,allresults) then done=true end
- if done and not allresults then break end
+ local hashname=hash.name
+ local hashtype=hash.type
+ if hashname and hashtype then
+ for found,base in filtered(files[hashname],name) do
+ if type(found)=='string' then
+ if okay(found,path,base,hashname,hashtype) then
+ break
+ end
+ else
+ for i=1,#found do
+ if okay(found[i],path,base,hashname,hashtype) then
+ break
+ end
+ end
end
end
end
end
else
+ local function okayokay(found,path,base,hashname,hashtype)
+ if find(found,path) then
+ local full=methodhandler('concatinators',hashtype,hashname,found,base)
+ if full and full~="" then
+ result[#result+1]=resolveprefix(full)
+ return not allresults
+ end
+ end
+ end
local hashes=instance.hashes
for k=1,#hashes do
local hash=hashes[k]
- local hashname,hashtype=hash.name,hash.type
- if doit(path,files[hashname][base],base,hashname,hashtype,result,allresults) then done=true end
- if done and not allresults then break end
+ local hashname=hash.name
+ local hashtype=hash.type
+ if hashname and hashtype then
+ local found,base=lookup(content,base)
+ if not found then
+ elseif type(found)=='string' then
+ if okay(found,path,base,hashname,hashtype) then
+ break
+ end
+ else
+ for i=1,#found do
+ if okay(found[i],path,base,hashname,hashtype) then
+ break
+ end
+ end
+ end
+ end
end
end
return result
@@ -14825,7 +16019,7 @@ end
function resolvers.dowithpath(name,func)
local pathlist=resolvers.expandedpathlist(name)
for i=1,#pathlist do
- func("^"..resolvers.cleanpath(pathlist[i]))
+ func("^"..cleanpath(pathlist[i]))
end
end
function resolvers.dowithvariable(name,func)
@@ -14833,23 +16027,23 @@ function resolvers.dowithvariable(name,func)
end
function resolvers.locateformat(name)
local engine=environment.ownmain or "luatex"
- local barename=file.removesuffix(name)
- local fullname=file.addsuffix(barename,"fmt")
+ local barename=removesuffix(name)
+ local fullname=addsuffix(barename,"fmt")
local fmtname=caches.getfirstreadablefile(fullname,"formats",engine) or ""
if fmtname=="" then
fmtname=resolvers.findfile(fullname)
- fmtname=resolvers.cleanpath(fmtname)
+ fmtname=cleanpath(fmtname)
end
if fmtname~="" then
- local barename=file.removesuffix(fmtname)
- local luaname=file.addsuffix(barename,luasuffixes.lua)
- local lucname=file.addsuffix(barename,luasuffixes.luc)
- local luiname=file.addsuffix(barename,luasuffixes.lui)
- if lfs.isfile(luiname) then
+ local barename=removesuffix(fmtname)
+ local luaname=addsuffix(barename,luasuffixes.lua)
+ local lucname=addsuffix(barename,luasuffixes.luc)
+ local luiname=addsuffix(barename,luasuffixes.lui)
+ if isfile(luiname) then
return barename,luiname
- elseif lfs.isfile(lucname) then
+ elseif isfile(lucname) then
return barename,lucname
- elseif lfs.isfile(luaname) then
+ elseif isfile(luaname) then
return barename,luaname
end
end
@@ -14871,29 +16065,24 @@ function resolvers.dowithfilesintree(pattern,handle,before,after)
local hash=hashes[i]
local blobtype=hash.type
local blobpath=hash.name
- if blobpath then
+ if blobtype and blobpath then
+ local total=0
+ local checked=0
+ local done=0
if before then
before(blobtype,blobpath,pattern)
end
- local files=instance.files[blobpath]
- local total,checked,done=0,0,0
- if files then
- for k,v in table.sortedhash(files) do
- total=total+1
- if find(k,"^remap:") then
- elseif find(k,pattern) then
- if type(v)=="string" then
- checked=checked+1
- if handle(blobtype,blobpath,v,k) then
- done=done+1
- end
- else
- checked=checked+#v
- for i=1,#v do
- if handle(blobtype,blobpath,v[i],k) then
- done=done+1
- end
- end
+ for path,name in filtered(instance.files[blobpath],pattern) do
+ if type(path)=="string" then
+ checked=checked+1
+ if handle(blobtype,blobpath,path,name) then
+ done=done+1
+ end
+ else
+ checked=checked+#path
+ for i=1,#path do
+ if handle(blobtype,blobpath,path[i],name) then
+ done=done+1
end
end
end
@@ -14904,8 +16093,8 @@ function resolvers.dowithfilesintree(pattern,handle,before,after)
end
end
end
-resolvers.obsolete=resolvers.obsolete or {}
-local obsolete=resolvers.obsolete
+local obsolete=resolvers.obsolete or {}
+resolvers.obsolete=obsolete
resolvers.find_file=resolvers.findfile obsolete.find_file=resolvers.findfile
resolvers.find_files=resolvers.findfiles obsolete.find_files=resolvers.findfiles
@@ -14916,7 +16105,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-pre"] = package.loaded["data-pre"] or true
--- original size: 6643, stripped down to: 4401
+-- original size: 3950, stripped down to: 2935
if not modules then modules={} end modules ['data-pre']={
version=1.001,
@@ -14926,44 +16115,51 @@ if not modules then modules={} end modules ['data-pre']={
license="see context related readme files"
}
local resolvers=resolvers
-local prefixes=utilities.storage.allocate()
-resolvers.prefixes=prefixes
-local cleanpath,findgivenfile,expansion=resolvers.cleanpath,resolvers.findgivenfile,resolvers.expansion
+local prefixes=resolvers.prefixes
+local cleanpath=resolvers.cleanpath
+local findgivenfile=resolvers.findgivenfile
+local expansion=resolvers.expansion
local getenv=resolvers.getenv
-local P,S,R,C,Cs,Cc,lpegmatch=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.match
-local joinpath,basename,dirname=file.join,file.basename,file.dirname
-local getmetatable,rawset,type=getmetatable,rawset,type
+local basename=file.basename
+local dirname=file.dirname
+local joinpath=file.join
+local isfile=lfs.isfile
prefixes.environment=function(str)
return cleanpath(expansion(str))
end
-prefixes.relative=function(str,n)
- if io.exists(str) then
- elseif io.exists("./"..str) then
- str="./"..str
- else
- local p="../"
- for i=1,n or 2 do
- if io.exists(p..str) then
- str=p..str
- break
- else
- p=p.."../"
+local function relative(str,n)
+ if not isfile(str) then
+ local pstr="./"..str
+ if isfile(pstr) then
+ str=pstr
+ else
+ local p="../"
+ for i=1,n or 2 do
+ local pstr=p..str
+ if isfile(pstr) then
+ str=pstr
+ break
+ else
+ p=p.."../"
+ end
end
end
end
return cleanpath(str)
end
+local function locate(str)
+ local fullname=findgivenfile(str) or ""
+ return cleanpath(fullname~="" and fullname or str)
+end
+prefixes.relative=relative
+prefixes.locate=locate
prefixes.auto=function(str)
- local fullname=prefixes.relative(str)
- if not lfs.isfile(fullname) then
- fullname=prefixes.locate(str)
+ local fullname=relative(str)
+ if not isfile(fullname) then
+ fullname=locate(str)
end
return fullname
end
-prefixes.locate=function(str)
- local fullname=findgivenfile(str) or ""
- return cleanpath((fullname~="" and fullname) or str)
-end
prefixes.filename=function(str)
local fullname=findgivenfile(str) or ""
return cleanpath(basename((fullname~="" and fullname) or str))
@@ -14984,6 +16180,13 @@ end
prefixes.home=function(str)
return cleanpath(joinpath(getenv('HOME'),str))
end
+prefixes.env=prefixes.environment
+prefixes.rel=prefixes.relative
+prefixes.loc=prefixes.locate
+prefixes.kpse=prefixes.locate
+prefixes.full=prefixes.locate
+prefixes.file=prefixes.filename
+prefixes.path=prefixes.pathname
local function toppath()
local inputstack=resolvers.inputstack
if not inputstack then
@@ -14996,98 +16199,22 @@ local function toppath()
return pathname
end
end
-resolvers.toppath=toppath
-prefixes.toppath=function(str)
- return cleanpath(joinpath(toppath(),str))
-end
-prefixes.env=prefixes.environment
-prefixes.rel=prefixes.relative
-prefixes.loc=prefixes.locate
-prefixes.kpse=prefixes.locate
-prefixes.full=prefixes.locate
-prefixes.file=prefixes.filename
-prefixes.path=prefixes.pathname
-function resolvers.allprefixes(separator)
- local all=table.sortedkeys(prefixes)
- if separator then
- for i=1,#all do
- all[i]=all[i]..":"
- end
- end
- return all
-end
-local function _resolve_(method,target)
- local action=prefixes[method]
- if action then
- return action(target)
- else
- return method..":"..target
- end
-end
-local resolved,abstract={},{}
-function resolvers.resetresolve(str)
- resolved,abstract={},{}
-end
-local pattern=Cs((C(R("az")^2)*P(":")*C((1-S(" \"\';,"))^1)/_resolve_+P(1))^0)
-local prefix=C(R("az")^2)*P(":")
-local target=C((1-S(" \"\';,"))^1)
-local notarget=(#S(";,")+P(-1))*Cc("")
-local pattern=Cs(((prefix*(target+notarget))/_resolve_+P(1))^0)
-local function resolve(str)
- if type(str)=="table" then
- local t={}
- for i=1,#str do
- t[i]=resolve(str[i])
- end
- return t
+local function jobpath()
+ local path=resolvers.stackpath()
+ if not path or path=="" then
+ return "."
else
- local res=resolved[str]
- if not res then
- res=lpegmatch(pattern,str)
- resolved[str]=res
- abstract[res]=str
- end
- return res
- end
-end
-local function unresolve(str)
- return abstract[str] or str
-end
-resolvers.resolve=resolve
-resolvers.unresolve=unresolve
-if type(os.uname)=="function" then
- for k,v in next,os.uname() do
- if not prefixes[k] then
- prefixes[k]=function() return v end
- end
- end
-end
-if os.type=="unix" then
- local pattern
- local function makepattern(t,k,v)
- if t then
- rawset(t,k,v)
- end
- local colon=P(":")
- for k,v in table.sortedpairs(prefixes) do
- if p then
- p=P(k)+p
- else
- p=P(k)
- end
- end
- pattern=Cs((p*colon+colon/";"+P(1))^0)
- end
- makepattern()
- getmetatable(prefixes).__newindex=makepattern
- function resolvers.repath(str)
- return lpegmatch(pattern,str)
- end
-else
- function resolvers.repath(str)
- return str
+ return path
end
end
+resolvers.toppath=toppath
+resolvers.jobpath=jobpath
+prefixes.toppath=function(str) return cleanpath(joinpath(toppath(),str)) end
+prefixes.jobpath=function(str) return cleanpath(joinpath(jobpath(),str)) end
+resolvers.setdynamic("toppath")
+resolvers.setdynamic("jobpath")
+prefixes.jobfile=prefixes.jobpath
+resolvers.setdynamic("jobfile")
end -- of closure
@@ -15149,7 +16276,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-fil"] = package.loaded["data-fil"] or true
--- original size: 3801, stripped down to: 3231
+-- original size: 3863, stripped down to: 3310
if not modules then modules={} end modules ['data-fil']={
version=1.001,
@@ -15161,30 +16288,31 @@ if not modules then modules={} end modules ['data-fil']={
local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
local report_files=logs.reporter("resolvers","files")
local resolvers=resolvers
+local resolveprefix=resolvers.resolve
local finders,openers,loaders,savers=resolvers.finders,resolvers.openers,resolvers.loaders,resolvers.savers
local locators,hashers,generators,concatinators=resolvers.locators,resolvers.hashers,resolvers.generators,resolvers.concatinators
local checkgarbage=utilities.garbagecollector and utilities.garbagecollector.check
function locators.file(specification)
- local name=specification.filename
- local realname=resolvers.resolve(name)
+ local filename=specification.filename
+ local realname=resolveprefix(filename)
if realname and realname~='' and lfs.isdir(realname) then
if trace_locating then
- report_files("file locator %a found as %a",name,realname)
+ report_files("file locator %a found as %a",filename,realname)
end
- resolvers.appendhash('file',name,true)
+ resolvers.appendhash('file',filename,true)
elseif trace_locating then
- report_files("file locator %a not found",name)
+ report_files("file locator %a not found",filename)
end
end
function hashers.file(specification)
- local name=specification.filename
- local content=caches.loadcontent(name,'files')
- resolvers.registerfilehash(name,content,content==nil)
+ local pathname=specification.filename
+ local content=caches.loadcontent(pathname,'files')
+ resolvers.registerfilehash(pathname,content,content==nil)
end
function generators.file(specification)
- local path=specification.filename
- local content=resolvers.scanfiles(path,false,true)
- resolvers.registerfilehash(path,content,true)
+ local pathname=specification.filename
+ local content=resolvers.scanfiles(pathname,false,true)
+ resolvers.registerfilehash(pathname,content,true)
end
concatinators.file=file.join
function finders.file(specification,filetype)
@@ -15375,7 +16503,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-use"] = package.loaded["data-use"] or true
--- original size: 3913, stripped down to: 2998
+-- original size: 3899, stripped down to: 2984
if not modules then modules={} end modules ['data-use']={
version=1.001,
@@ -15421,7 +16549,7 @@ end
statistics.register("used config file",function() return caches.configfiles() end)
statistics.register("used cache path",function() return caches.usedpaths() end)
function statistics.savefmtstatus(texname,formatbanner,sourcefile)
- local enginebanner=status.list().banner
+ local enginebanner=status.banner
if formatbanner and enginebanner and sourcefile then
local luvname=file.replacesuffix(texname,"luv")
local luvdata={
@@ -15434,7 +16562,7 @@ function statistics.savefmtstatus(texname,formatbanner,sourcefile)
end
end
function statistics.checkfmtstatus(texname)
- local enginebanner=status.list().banner
+ local enginebanner=status.banner
if enginebanner and texname then
local luvname=file.replacesuffix(texname,"luv")
if lfs.isfile(luvname) then
@@ -15466,7 +16594,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-zip"] = package.loaded["data-zip"] or true
--- original size: 8489, stripped down to: 6757
+-- original size: 8772, stripped down to: 6841
if not modules then modules={} end modules ['data-zip']={
version=1.001,
@@ -15485,16 +16613,6 @@ zip.archives=zip.archives or {}
local archives=zip.archives
zip.registeredfiles=zip.registeredfiles or {}
local registeredfiles=zip.registeredfiles
-local limited=false
-directives.register("system.inputmode",function(v)
- if not limited then
- local i_limiter=io.i_limiter(v)
- if i_limiter then
- zip.open=i_limiter.protect(zip.open)
- limited=true
- end
- end
-end)
local function validzip(str)
if not find(str,"^zip://") then
return "zip:///"..str
@@ -15509,7 +16627,7 @@ function zip.openarchive(name)
local arch=archives[name]
if not arch then
local full=resolvers.findfile(name) or ""
- arch=(full~="" and zip.open(full)) or false
+ arch=full~="" and zip.open(full) or false
archives[name]=arch
end
return arch
@@ -15668,31 +16786,42 @@ function resolvers.usezipfile(archive)
end
end
function resolvers.registerzipfile(z,tree)
- local files,filter={},""
- if tree=="" then
- filter="^(.+)/(.-)$"
- else
- filter=format("^%s/(.+)/(.-)$",tree)
- end
+ local names={}
+ local files={}
+ local remap={}
+ local n=0
+ local filter=tree=="" and "^(.+)/(.-)$" or format("^%s/(.+)/(.-)$",tree)
+ local register=resolvers.registerfile
if trace_locating then
report_zip("registering: using filter %a",filter)
end
- local register,n=resolvers.registerfile,0
for i in z:files() do
- local path,name=match(i.filename,filter)
- if path then
- if name and name~='' then
- register(files,name,path)
- n=n+1
- else
+ local filename=i.filename
+ local path,name=match(filename,filter)
+ if not path then
+ n=n+1
+ register(names,filename,"")
+ local usedname=lower(filename)
+ files[usedname]=""
+ if usedname~=filename then
+ remap[usedname]=filename
end
- else
- register(files,i.filename,'')
+ elseif name and name~="" then
n=n+1
+ register(names,name,path)
+ local usedname=lower(name)
+ files[usedname]=path
+ if usedname~=name then
+ remap[usedname]=name
+ end
+ else
end
end
report_zip("registering: %s files registered",n)
- return files
+ return {
+ files=files,
+ remap=remap,
+ }
end
@@ -15702,7 +16831,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-tre"] = package.loaded["data-tre"] or true
--- original size: 2508, stripped down to: 2074
+-- original size: 8479, stripped down to: 5580
if not modules then modules={} end modules ['data-tre']={
version=1.001,
@@ -15711,42 +16840,64 @@ if not modules then modules={} end modules ['data-tre']={
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-local find,gsub,format=string.find,string.gsub,string.format
+local find,gsub,lower=string.find,string.gsub,string.lower
+local basename,dirname,joinname=file.basename,file.dirname,file .join
+local globdir,isdir,isfile=dir.glob,lfs.isdir,lfs.isfile
+local P,lpegmatch=lpeg.P,lpeg.match
local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
local report_trees=logs.reporter("resolvers","trees")
local resolvers=resolvers
-local done,found,notfound={},{},resolvers.finders.notfound
-function resolvers.finders.tree(specification)
+local resolveprefix=resolvers.resolve
+local notfound=resolvers.finders.notfound
+local lookup=resolvers.get_from_content
+local collectors={}
+local found={}
+function resolvers.finders.tree(specification)
local spec=specification.filename
- local fnd=found[spec]
- if fnd==nil then
+ local okay=found[spec]
+ if okay==nil then
if spec~="" then
- local path,name=file.dirname(spec),file.basename(spec)
- if path=="" then path="." end
- local hash=done[path]
- if not hash then
- local pattern=path.."/*"
- hash=dir.glob(pattern)
- done[path]=hash
+ local path=dirname(spec)
+ local name=basename(spec)
+ if path=="" then
+ path="."
+ end
+ local names=collectors[path]
+ if not names then
+ local pattern=find(path,"/%*+$") and path or (path.."/*")
+ names=globdir(pattern)
+ collectors[path]=names
end
local pattern="/"..gsub(name,"([%.%-%+])","%%%1").."$"
- for k=1,#hash do
- local v=hash[k]
- if find(v,pattern) then
- found[spec]=v
- return v
+ for i=1,#names do
+ local fullname=names[i]
+ if find(fullname,pattern) then
+ found[spec]=fullname
+ return fullname
+ end
+ end
+ local pattern=lower(pattern)
+ for i=1,#names do
+ local fullname=lower(names[i])
+ if find(fullname,pattern) then
+ if isfile(fullname) then
+ found[spec]=fullname
+ return fullname
+ else
+ break
+ end
end
end
end
- fnd=notfound()
- found[spec]=fnd
+ okay=notfound()
+ found[spec]=okay
end
- return fnd
+ return okay
end
function resolvers.locators.tree(specification)
local name=specification.filename
- local realname=resolvers.resolve(name)
- if realname and realname~='' and lfs.isdir(realname) then
+ local realname=resolveprefix(name)
+ if realname and realname~='' and isdir(realname) then
if trace_locating then
report_trees("locator %a found",realname)
end
@@ -15757,16 +16908,110 @@ function resolvers.locators.tree(specification)
end
function resolvers.hashers.tree(specification)
local name=specification.filename
- if trace_locating then
- report_trees("analysing %a",name)
- end
+ report_trees("analyzing %a",name)
resolvers.methodhandler("hashers",name)
resolvers.generators.file(specification)
end
-resolvers.concatinators.tree=resolvers.concatinators.file
-resolvers.generators.tree=resolvers.generators.file
-resolvers.openers.tree=resolvers.openers.file
-resolvers.loaders.tree=resolvers.loaders.file
+local collectors={}
+local splitter=lpeg.splitat("/**/")
+local stripper=lpeg.replacer { [P("/")*P("*")^1*P(-1)]="" }
+table.setmetatableindex(collectors,function(t,k)
+ local rootname=lpegmatch(stripper,k)
+ local dataname=joinname(rootname,"dirlist")
+ local content=caches.loadcontent(dataname,"files",dataname)
+ if not content then
+ content=resolvers.scanfiles(rootname,nil,nil,false,true)
+ caches.savecontent(dataname,"files",content,dataname)
+ end
+ t[k]=content
+ return content
+end)
+local function checked(root,p,n)
+ if p then
+ if type(p)=="table" then
+ for i=1,#p do
+ local fullname=joinname(root,p[i],n)
+ if isfile(fullname) then
+ return fullname
+ end
+ end
+ else
+ local fullname=joinname(root,p,n)
+ if isfile(fullname) then
+ return fullname
+ end
+ end
+ end
+ return notfound()
+end
+local function resolve(specification)
+ local filename=specification.filename
+ if filename~="" then
+ local root,rest=lpegmatch(splitter,filename)
+ if root and rest then
+ local path,name=dirname(rest),basename(rest)
+ if name~=rest then
+ local content=collectors[root]
+ local p,n=lookup(content,name)
+ if not p then
+ return notfound()
+ end
+ local pattern=".*/"..path.."$"
+ local istable=type(p)=="table"
+ if istable then
+ for i=1,#p do
+ local pi=p[i]
+ if pi==path or find(pi,pattern) then
+ local fullname=joinname(root,pi,n)
+ if isfile(fullname) then
+ return fullname
+ end
+ end
+ end
+ elseif p==path or find(p,pattern) then
+ local fullname=joinname(root,p,n)
+ if isfile(fullname) then
+ return fullname
+ end
+ end
+ local queries=specification.queries
+ if queries and queries.option=="fileonly" then
+ return checked(root,p,n)
+ else
+ return notfound()
+ end
+ end
+ end
+ local path,name=dirname(filename),basename(filename)
+ local root=lpegmatch(stripper,path)
+ local content=collectors[path]
+ local p,n=lookup(content,name)
+ if p then
+ return checked(root,p,n)
+ end
+ end
+ return notfound()
+end
+resolvers.finders .dirlist=resolve
+resolvers.locators .dirlist=resolvers.locators .tree
+resolvers.hashers .dirlist=resolvers.hashers .tree
+resolvers.generators.dirlist=resolvers.generators.file
+resolvers.openers .dirlist=resolvers.openers .file
+resolvers.loaders .dirlist=resolvers.loaders .file
+function resolvers.finders.dirfile(specification)
+ local queries=specification.queries
+ if queries then
+ queries.option="fileonly"
+ else
+ specification.queries={ option="fileonly" }
+ end
+ return resolve(specification)
+end
+resolvers.locators .dirfile=resolvers.locators .dirlist
+resolvers.hashers .dirfile=resolvers.hashers .dirlist
+resolvers.generators.dirfile=resolvers.generators.dirlist
+resolvers.openers .dirfile=resolvers.openers .dirlist
+resolvers.loaders .dirfile=resolvers.loaders .dirlist
end -- of closure
@@ -15775,7 +17020,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-sch"] = package.loaded["data-sch"] or true
--- original size: 6202, stripped down to: 5149
+-- original size: 6569, stripped down to: 5304
if not modules then modules={} end modules ['data-sch']={
version=1.001,
@@ -15801,8 +17046,13 @@ directives.register("schemes.threshold",function(v) threshold=tonumber(v) or thr
function cleaners.none(specification)
return specification.original
end
-function cleaners.strip(specification)
- return (gsub(specification.original,"[^%a%d%.]+","-"))
+function cleaners.strip(specification)
+ local path,name=file.splitbase(specification.original)
+ if path=="" then
+ return (gsub(name,"[^%a%d%.]+","-"))
+ else
+ return (gsub((gsub(path,"%.","-").."-"..name),"[^%a%d%.]+","-"))
+ end
end
function cleaners.md5(specification)
return file.addsuffix(md5.hex(specification.original),file.suffix(specification.path))
@@ -15818,8 +17068,8 @@ function resolvers.schemes.cleanname(specification)
end
local cached,loaded,reused,thresholds,handlers={},{},{},{},{}
local function runcurl(name,cachename)
- local command="curl --silent --create-dirs --output "..cachename.." "..name
- os.spawn(command)
+ local command="curl --silent --insecure --create-dirs --output "..cachename.." "..name
+ os.execute(command)
end
local function fetch(specification)
local original=specification.original
@@ -15951,7 +17201,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-lua"] = package.loaded["data-lua"] or true
--- original size: 4237, stripped down to: 3177
+-- original size: 4313, stripped down to: 3227
if not modules then modules={} end modules ['data-lua']={
version=1.001,
@@ -15960,7 +17210,7 @@ if not modules then modules={} end modules ['data-lua']={
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-local resolvers,package=resolvers,package
+local package,lpeg=package,lpeg
local gsub=string.gsub
local concat=table.concat
local addsuffix=file.addsuffix
@@ -15971,9 +17221,11 @@ local luaformats={ 'TEXINPUTS','LUAINPUTS' }
local libformats={ 'CLUAINPUTS' }
local helpers=package.helpers or {}
local methods=helpers.methods or {}
+local resolvers=resolvers
+local resolveprefix=resolvers.resolve
+helpers.report=logs.reporter("resolvers","libraries")
trackers.register("resolvers.libraries",function(v) helpers.trace=v end)
trackers.register("resolvers.locating",function(v) helpers.trace=v end)
-helpers.report=logs.reporter("resolvers","libraries")
helpers.sequence={
"already loaded",
"preload table",
@@ -15988,7 +17240,7 @@ helpers.sequence={
}
local pattern=Cs(P("!")^0/""*(P("/")*P(-1)/"/"+P("/")^1/"/"+1)^0)
function helpers.cleanpath(path)
- return resolvers.resolve(lpegmatch(pattern,path))
+ return resolveprefix(lpegmatch(pattern,path))
end
local loadedaslib=helpers.loadedaslib
local getextraluapaths=package.extraluapaths
@@ -16058,7 +17310,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-aux"] = package.loaded["data-aux"] or true
--- original size: 2394, stripped down to: 2005
+-- original size: 2431, stripped down to: 1996
if not modules then modules={} end modules ['data-aux']={
version=1.001,
@@ -16072,8 +17324,8 @@ local type,next=type,next
local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
local resolvers=resolvers
local report_scripts=logs.reporter("resolvers","scripts")
-function resolvers.updatescript(oldname,newname)
- local scriptpath="scripts/context/lua"
+function resolvers.updatescript(oldname,newname)
+ local scriptpath="context/lua"
newname=file.addsuffix(newname,"lua")
local oldscript=resolvers.cleanpath(oldname)
if trace_locating then
@@ -16125,7 +17377,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-tmf"] = package.loaded["data-tmf"] or true
--- original size: 2600, stripped down to: 1627
+-- original size: 2601, stripped down to: 1627
if not modules then modules={} end modules ['data-tmf']={
version=1.001,
@@ -16181,7 +17433,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-lst"] = package.loaded["data-lst"] or true
--- original size: 2654, stripped down to: 2301
+-- original size: 2734, stripped down to: 2354
if not modules then modules={} end modules ['data-lst']={
version=1.001,
@@ -16190,10 +17442,13 @@ if not modules then modules={} end modules ['data-lst']={
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-local find,concat,upper,format=string.find,table.concat,string.upper,string.format
+local rawget,type,next=rawget,type,next
+local find,concat,upper=string.find,table.concat,string.upper
local fastcopy,sortedpairs=table.fastcopy,table.sortedpairs
-resolvers.listers=resolvers.listers or {}
local resolvers=resolvers
+local listers=resolvers.listers or {}
+resolvers.listers=listers
+local resolveprefix=resolvers.resolve
local report_lists=logs.reporter("resolvers","lists")
local function tabstr(str)
if type(str)=='table' then
@@ -16202,7 +17457,7 @@ local function tabstr(str)
return str
end
end
-function resolvers.listers.variables(pattern)
+function listers.variables(pattern)
local instance=resolvers.instance
local environment=instance.environment
local variables=instance.variables
@@ -16223,10 +17478,10 @@ function resolvers.listers.variables(pattern)
for key,value in sortedpairs(configured) do
if key~="" and (pattern=="" or find(upper(key),pattern)) then
report_lists(key)
- report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
- report_lists(" var: %s",tabstr(configured[key]) or "unset")
- report_lists(" exp: %s",tabstr(expansions[key]) or "unset")
- report_lists(" res: %s",tabstr(resolvers.resolve(expansions[key])) or "unset")
+ report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
+ report_lists(" var: %s",tabstr(configured[key]) or "unset")
+ report_lists(" exp: %s",tabstr(expansions[key]) or "unset")
+ report_lists(" res: %s",tabstr(resolveprefix(expansions[key])) or "unset")
end
end
instance.environment=fastcopy(env)
@@ -16234,15 +17489,15 @@ function resolvers.listers.variables(pattern)
instance.expansions=fastcopy(exp)
end
local report_resolved=logs.reporter("system","resolved")
-function resolvers.listers.configurations()
+function listers.configurations()
local configurations=resolvers.instance.specification
for i=1,#configurations do
- report_resolved("file : %s",resolvers.resolve(configurations[i]))
+ report_resolved("file : %s",resolveprefix(configurations[i]))
end
report_resolved("")
local list=resolvers.expandedpathfromlist(resolvers.splitpath(resolvers.luacnfspec))
for i=1,#list do
- local li=resolvers.resolve(list[i])
+ local li=resolveprefix(list[i])
if lfs.isdir(li) then
report_resolved("path - %s",li)
else
@@ -16547,7 +17802,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["luat-fmt"] = package.loaded["luat-fmt"] or true
--- original size: 5951, stripped down to: 4922
+-- original size: 5955, stripped down to: 4926
if not modules then modules={} end modules ['luat-fmt']={
version=1.001,
@@ -16635,7 +17890,7 @@ function environment.make_format(name)
end
local command=format("%s --ini %s --lua=%s %s %sdump",engine,primaryflags(),quoted(usedluastub),quoted(fulltexsourcename),os.platform=="unix" and "\\\\" or "\\")
report_format("running command: %s\n",command)
- os.spawn(command)
+ os.execute(command)
local pattern=file.removesuffix(file.basename(usedluastub)).."-*.mem"
local mp=dir.glob(pattern)
if mp then
@@ -16670,7 +17925,7 @@ function environment.run_format(name,data,more)
else
local command=format("%s %s --fmt=%s --lua=%s %s %s",engine,primaryflags(),quoted(barename),quoted(luaname),quoted(data),more~="" and quoted(more) or "")
report_format("running command: %s",command)
- os.spawn(command)
+ os.execute(command)
end
end
end
@@ -16681,8 +17936,8 @@ end -- of closure
-- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 685064
--- stripped bytes : 242353
+-- original bytes : 745618
+-- stripped bytes : 269191
-- end library merge
@@ -16781,17 +18036,18 @@ local ownlibs = { -- order can be made better
}
+-- c:/data/develop/tex-context/tex/texmf-win64/bin/../../texmf-context/tex/context/base/data-tmf.lua
+-- c:/data/develop/context/sources/data-tmf.lua
+
local ownlist = {
- '.',
- ownpath ,
- ownpath .. "/../sources", -- HH's development path
+ -- '.',
+ -- ownpath ,
+ owntree .. "/../../../../context/sources", -- HH's development path
owntree .. "/../../texmf-local/tex/context/base",
owntree .. "/../../texmf-context/tex/context/base",
- owntree .. "/../../texmf-dist/tex/context/base",
owntree .. "/../../texmf/tex/context/base",
owntree .. "/../../../texmf-local/tex/context/base",
owntree .. "/../../../texmf-context/tex/context/base",
- owntree .. "/../../../texmf-dist/tex/context/base",
owntree .. "/../../../texmf/tex/context/base",
}
@@ -16907,6 +18163,7 @@ local helpinfo = [[
<category name="basic">
<subcategory>
<flag name="script"><short>run an mtx script (lua prefered method) (<ref name="noquotes"/>), no script gives list</short></flag>
+ <flag name="evaluate"><short>run code passed on the commandline (between quotes)</short></flag>
<flag name="execute"><short>run a script or program (texmfstart method) (<ref name="noquotes"/>)</short></flag>
<flag name="resolve"><short>resolve prefixed arguments</short></flag>
<flag name="ctxlua"><short>run internally (using preloaded libs)</short></flag>
@@ -16932,6 +18189,7 @@ local helpinfo = [[
<flag name="verbose"><short>give a bit more info</short></flag>
<flag name="trackers" value="list"><short>enable given trackers</short></flag>
<flag name="progname" value="str"><short>format or backend</short></flag>
+ <flag name="systeminfo" value="str"><short>show current operating system, processor, etc</short></flag>
</subcategory>
<subcategory>
<flag name="edit"><short>launch editor with found file</short></flag>
@@ -17561,6 +18819,39 @@ function runners.associate(filename)
os.launch(filename)
end
+function runners.evaluate(code,filename) -- for Luigi
+ if code == "loop" then
+ while true do
+ io.write("> ")
+ local code = io.read()
+ if code ~= "" then
+ local temp = string.match(code,"^= (.*)$")
+ if temp then
+ code = "print("..temp..")"
+ end
+ local compiled, message = loadstring(code)
+ if type(compiled) ~= "function" then
+ io.write("! " .. (message or code).."\n")
+ else
+ io.write(compiled())
+ end
+ end
+ end
+ else
+ if type(code) ~= "string" or code == "" then
+ code = filename
+ end
+ if code ~= "" then
+ local compiled, message = loadstring(code)
+ if type(compiled) ~= "function" then
+ io.write("invalid lua code: " .. (message or code))
+ return
+ end
+ io.write(compiled())
+ end
+ end
+end
+
function runners.gethelp(filename)
local url = environment.argument("url")
if url and url ~= "" then
@@ -17572,6 +18863,15 @@ function runners.gethelp(filename)
end
end
+function runners.systeminfo()
+ report("architecture : %s",os.platform or "<unset>")
+ report("operating system : %s",os.name or "<unset>")
+ report("file architecture : %s",os.type or "<unset>")
+ report("binary path : %s",os.selfdir or "<unset>")
+ report("binary suffix : %s",os.binsuffix or "<unset>")
+ report("library suffix : %s",os.libsuffix or "<unset>")
+end
+
-- this is a bit dirty ... first we store the first filename and next we
-- split the arguments so that we only see the ones meant for this script
-- ... later we will use the second half
@@ -17687,16 +18987,13 @@ end
if e_argument("ansi") then
- local formatters = string.formatters
+ logs.setformatters("ansi")
- logs.setformatters {
- report_yes = formatters["%-15s | %s"],
- report_nop = formatters["%-15s |"],
- subreport_yes = formatters["%-15s | %s | %s"],
- subreport_nop = formatters["%-15s | %s |"],
- status_yes = formatters["%-15s : %s\n"],
- status_nop = formatters["%-15s :\n"],
- }
+ local script = e_argument("script") or e_argument("scripts")
+
+ if type(script) == "string" then
+ logs.writer("]0;"..script.."") -- for Alan to test
+ end
end
@@ -17715,14 +19012,26 @@ if e_argument("script") or e_argument("scripts") then
ok = runners.execute_ctx_script(filename)
end
+elseif e_argument("evaluate") then
+
+ runners.evaluate(e_argument("evaluate"),filename)
+
elseif e_argument("selfmerge") then
-- embed used libraries
runners.loadbase()
local found = locate_libs()
+
if found then
- utilities.merger.selfmerge(own.name,own.libs,{ found })
+ local mtxrun = resolvers.findfile("mtxrun.lua") -- includes local name
+ if lfs.isfile(mtxrun) then
+ utilities.merger.selfmerge(mtxrun,own.libs,{ found })
+ application.report("runner updated on resolved path: %s",mtxrun)
+ else
+ utilities.merger.selfmerge(own.name,own.libs,{ found })
+ application.report("runner updated on relative path: %s",own.name)
+ end
end
elseif e_argument("selfclean") then
@@ -17730,7 +19039,15 @@ elseif e_argument("selfclean") then
-- remove embedded libraries
runners.loadbase()
- utilities.merger.selfclean(own.name)
+
+ local mtxrun = resolvers.findfile("mtxrun.lua") -- includes local name
+ if lfs.isfile(mtxrun) then
+ utilities.merger.selfclean(mtxrun)
+ application.report("runner cleaned on resolved path: %s",mtxrun)
+ else
+ utilities.merger.selfclean(own.name)
+ application.report("runner cleaned on relative path: %s",own.name)
+ end
elseif e_argument("selfupdate") then
@@ -17972,6 +19289,8 @@ elseif e_argument("version") then
application.version()
+ application.report("source path",environment.ownbin)
+
elseif e_argument("directives") then
directives.show()
@@ -17989,6 +19308,10 @@ elseif e_argument("exporthelp") then
runners.loadbase()
application.export(e_argument("exporthelp"),filename)
+elseif e_argument("systeminfo") then
+
+ runners.systeminfo()
+
elseif e_argument("help") or filename=='help' or filename == "" then
application.help()
diff --git a/scripts/context/ruby/texexec.rb b/scripts/context/ruby/texexec.rb
index c673cb46b..7f8298c09 100644
--- a/scripts/context/ruby/texexec.rb
+++ b/scripts/context/ruby/texexec.rb
@@ -685,22 +685,22 @@ end
# so far for compatibility, will move to tex
-@@extrastringvars = [
+extrastringvars = [
'pages', 'background', 'backspace', 'topspace', 'boxtype', 'tempdir','bannerheight',
'printformat', 'method', 'scale', 'selection',
'combination', 'textwidth', 'addempty', 'logfile',
'startline', 'endline', 'startcolumn', 'endcolumn', 'scale'
]
-@@extrabooleanvars = [
+extrabooleanvars = [
'centerpage', 'noduplex', 'color', 'pretty',
'fullscreen', 'screensaver', 'markings'
]
if job = TEX.new(logger) then
- job.setextrastringvars(@@extrastringvars)
- job.setextrabooleanvars(@@extrabooleanvars)
+ job.setextrastringvars(extrastringvars)
+ job.setextrabooleanvars(extrabooleanvars)
job.booleanvars.each do |k|
commandline.registerflag(k)
diff --git a/scripts/context/stubs/mswin/first-setup.bat b/scripts/context/stubs/install/first-setup.bat
index f06ad0e6b..f06ad0e6b 100644
--- a/scripts/context/stubs/mswin/first-setup.bat
+++ b/scripts/context/stubs/install/first-setup.bat
diff --git a/scripts/context/stubs/install/first-setup.sh b/scripts/context/stubs/install/first-setup.sh
new file mode 100644
index 000000000..9249fd2e0
--- /dev/null
+++ b/scripts/context/stubs/install/first-setup.sh
@@ -0,0 +1,120 @@
+#!/bin/sh
+
+# Takes the same arguments as mtx-update
+
+# you may change this if you want ...
+CONTEXTROOT="$PWD/tex"
+
+# suggested by Tobias Florek to check for ruby & rsync
+if [ ! -x "`which rsync`" ]; then
+ echo "You need to install rsync first."
+ exit 1
+fi
+if [ ! -x "`which ruby`" ]; then
+ echo "You might want to install Ruby first if you want to use pdfTeX or XeTeX."
+fi
+
+system=`uname -s`
+cpu=`uname -m`
+
+case "$system" in
+ # linux
+ Linux)
+ case "$cpu" in
+ i*86) platform="linux" ;;
+ x86_64|ia64) platform="linux-64" ;;
+ # a little bit of cheating with ppc64 (won't work on Gentoo)
+ ppc|ppc64) platform="linux-ppc" ;;
+ # we currently support just mipsel, but Debian is lying (reports mips64)
+ # we need more hacks to fix the situation, this is just a temporary solution
+ mips|mips64|mipsel|mips64el) platform="linux-mipsel" ;;
+ *) platform="unknown" ;;
+ esac ;;
+ # Mac OS X
+ Darwin)
+ case "$cpu" in
+ i*86) platform="osx-intel" ;;
+ x86_64) platform="osx-64" ;;
+ ppc*|powerpc|power*|Power*) platform="osx-ppc" ;;
+ *) platform="unknown" ;;
+ esac ;;
+ # FreeBSD
+ FreeBSD|freebsd)
+ case "$cpu" in
+ i*86) platform="freebsd" ;;
+ x86_64) platform="freebsd" ;; # no special binaries are available yet
+ amd64) platform="freebsd-amd64" ;;
+ *) platform="unknown" ;;
+ esac ;;
+ # kFreeBSD (debian)
+ GNU/kFreeBSD)
+ case "$cpu" in
+ i*86) platform="kfreebsd-i386" ;;
+ x86_64|amd64) platform="kfreebsd-amd64" ;;
+ *) platform="unknown" ;;
+ esac ;;
+ # cygwin
+ CYGWIN*)
+ case "$cpu" in
+ i*86) platform="cygwin" ;;
+ x86_64|ia64) platform="cygwin-64" ;;
+ *) platform="unknown" ;;
+ esac ;;
+ # SunOS/Solaris
+ SunOS)
+ case "$cpu" in
+ sparc) platform="solaris-sparc" ;;
+ i86pc) platform="solaris-intel" ;;
+ *) platform="unknown" ;;
+ esac ;;
+ *) platform="unknown"
+esac
+
+# temporary patch for 64-bit Leopard with 32-bit kernel
+if test "$platform" = "osx-intel"; then
+ # if running Snow Leopard or later
+ # better: /usr/bin/sw_vers -productVersion
+ if test `uname -r|cut -f1 -d"."` -ge 10 ; then
+ # if working on 64-bit hardware
+ if test `sysctl -n hw.cpu64bit_capable` = 1; then
+ # snowleopard32=TRUE
+ platform="osx-64"
+ fi
+ fi
+fi
+
+if test "$platform" = "unknown" ; then
+ echo "Error: your system \"$system $cpu\" is not supported yet."
+ echo "Please report to the ConTeXt mailing-list (ntg-context@ntg.nl)"
+ exit
+fi
+
+# if you want to enforce some specific platform
+# (when 'uname' doesn't agree with true architecture), uncomment and modify next line:
+# platform=linux
+
+# download or rsync the latest scripts first
+rsync -rlptv rsync://contextgarden.net/minimals/setup/$platform/bin .
+
+# download or update the distribution
+# you may remove the --context=beta switch if you want to use "current"
+# you can use --engine=luatex if you want just mkiv
+env PATH="$PWD/bin:$CONTEXTROOT/texmf-$platform/bin:$PATH" \
+mtxrun --script ./bin/mtx-update.lua --force --update --make --context=beta --platform=$platform --texroot="$CONTEXTROOT" $@
+
+echo
+echo "When you want to use context, you need to initialize the tree by typing:"
+echo
+echo " . $CONTEXTROOT/setuptex"
+echo
+echo "in your shell or add"
+echo " \"$CONTEXTROOT/texmf-$platform/bin\""
+echo "to PATH variable if you want to set it permanently."
+echo "This can usually be done in .bashrc, .bash_profile"
+echo "(or whatever file is used to initialize your shell)."
+echo
+
+if [ ! -x "`which ruby`" ]; then
+ echo "You might want to install Ruby first if you want to use pdfTeX or XeTeX."
+ echo
+fi
diff --git a/scripts/context/stubs/mswin/context.exe b/scripts/context/stubs/mswin/context.exe
index faae5caa7..0e7882cf9 100644
--- a/scripts/context/stubs/mswin/context.exe
+++ b/scripts/context/stubs/mswin/context.exe
Binary files differ
diff --git a/scripts/context/stubs/mswin/ctxtools.exe b/scripts/context/stubs/mswin/ctxtools.exe
index faae5caa7..0e7882cf9 100644
--- a/scripts/context/stubs/mswin/ctxtools.exe
+++ b/scripts/context/stubs/mswin/ctxtools.exe
Binary files differ
diff --git a/scripts/context/stubs/mswin/luatools.exe b/scripts/context/stubs/mswin/luatools.exe
index faae5caa7..0e7882cf9 100644
--- a/scripts/context/stubs/mswin/luatools.exe
+++ b/scripts/context/stubs/mswin/luatools.exe
Binary files differ
diff --git a/scripts/context/stubs/mswin/metatex.exe b/scripts/context/stubs/mswin/metatex.exe
index faae5caa7..0e7882cf9 100644
--- a/scripts/context/stubs/mswin/metatex.exe
+++ b/scripts/context/stubs/mswin/metatex.exe
Binary files differ
diff --git a/scripts/context/stubs/mswin/mptopdf.exe b/scripts/context/stubs/mswin/mptopdf.exe
index faae5caa7..0e7882cf9 100644
--- a/scripts/context/stubs/mswin/mptopdf.exe
+++ b/scripts/context/stubs/mswin/mptopdf.exe
Binary files differ
diff --git a/scripts/context/stubs/mswin/mtxrun.dll b/scripts/context/stubs/mswin/mtxrun.dll
index 5a79e1bad..3c4481c31 100644
--- a/scripts/context/stubs/mswin/mtxrun.dll
+++ b/scripts/context/stubs/mswin/mtxrun.dll
Binary files differ
diff --git a/scripts/context/stubs/mswin/mtxrun.exe b/scripts/context/stubs/mswin/mtxrun.exe
index faae5caa7..0e7882cf9 100644
--- a/scripts/context/stubs/mswin/mtxrun.exe
+++ b/scripts/context/stubs/mswin/mtxrun.exe
Binary files differ
diff --git a/scripts/context/stubs/mswin/mtxrun.lua b/scripts/context/stubs/mswin/mtxrun.lua
index 0ff2d2897..edfeba8dd 100644
--- a/scripts/context/stubs/mswin/mtxrun.lua
+++ b/scripts/context/stubs/mswin/mtxrun.lua
@@ -56,7 +56,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-lua"] = package.loaded["l-lua"] or true
--- original size: 3123, stripped down to: 1694
+-- original size: 3888, stripped down to: 2197
if not modules then modules={} end modules ['l-lua']={
version=1.001,
@@ -136,6 +136,16 @@ function optionalrequire(...)
return result
end
end
+if lua then
+ lua.mask=load([[τεχ = 1]]) and "utf" or "ascii"
+end
+local flush=io.flush
+if flush then
+ local execute=os.execute if execute then function os.execute(...) flush() return execute(...) end end
+ local exec=os.exec if exec then function os.exec (...) flush() return exec (...) end end
+ local spawn=os.spawn if spawn then function os.spawn (...) flush() return spawn (...) end end
+ local popen=io.popen if popen then function io.popen (...) flush() return popen (...) end end
+end
end -- of closure
@@ -434,7 +444,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-lpeg"] = package.loaded["l-lpeg"] or true
--- original size: 29245, stripped down to: 15964
+-- original size: 36977, stripped down to: 20349
if not modules then modules={} end modules ['l-lpeg']={
version=1.001,
@@ -450,7 +460,9 @@ local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.forma
local floor=math.floor
local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt
local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print
-setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+if setinspector then
+ setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+end
lpeg.patterns=lpeg.patterns or {}
local patterns=lpeg.patterns
local anything=P(1)
@@ -469,7 +481,7 @@ local uppercase=R("AZ")
local underscore=P("_")
local hexdigit=digit+lowercase+uppercase
local cr,lf,crlf=P("\r"),P("\n"),P("\r\n")
-local newline=crlf+S("\r\n")
+local newline=P("\r")*(P("\n")+P(true))+P("\n")
local escaped=P("\\")*anything
local squote=P("'")
local dquote=P('"')
@@ -491,8 +503,10 @@ patterns.utfbom_32_le=utfbom_32_le
patterns.utfbom_16_be=utfbom_16_be
patterns.utfbom_16_le=utfbom_16_le
patterns.utfbom_8=utfbom_8
-patterns.utf_16_be_nl=P("\000\r\000\n")+P("\000\r")+P("\000\n")
-patterns.utf_16_le_nl=P("\r\000\n\000")+P("\r\000")+P("\n\000")
+patterns.utf_16_be_nl=P("\000\r\000\n")+P("\000\r")+P("\000\n")
+patterns.utf_16_le_nl=P("\r\000\n\000")+P("\r\000")+P("\n\000")
+patterns.utf_32_be_nl=P("\000\000\000\r\000\000\000\n")+P("\000\000\000\r")+P("\000\000\000\n")
+patterns.utf_32_le_nl=P("\r\000\000\000\n\000\000\000")+P("\r\000\000\000")+P("\n\000\000\000")
patterns.utf8one=R("\000\127")
patterns.utf8two=R("\194\223")*utf8next
patterns.utf8three=R("\224\239")*utf8next*utf8next
@@ -519,10 +533,24 @@ patterns.spacer=spacer
patterns.whitespace=whitespace
patterns.nonspacer=nonspacer
patterns.nonwhitespace=nonwhitespace
-local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
+local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
+local fullstripper=whitespace^0*C((whitespace^0*nonwhitespace^1)^0)
local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0))
+local b_collapser=Cs(whitespace^0/""*(nonwhitespace^1+whitespace^1/" ")^0)
+local e_collapser=Cs((whitespace^1*P(-1)/""+nonwhitespace^1+whitespace^1/" ")^0)
+local m_collapser=Cs((nonwhitespace^1+whitespace^1/" ")^0)
+local b_stripper=Cs(spacer^0/""*(nonspacer^1+spacer^1/" ")^0)
+local e_stripper=Cs((spacer^1*P(-1)/""+nonspacer^1+spacer^1/" ")^0)
+local m_stripper=Cs((nonspacer^1+spacer^1/" ")^0)
patterns.stripper=stripper
+patterns.fullstripper=fullstripper
patterns.collapser=collapser
+patterns.b_collapser=b_collapser
+patterns.m_collapser=m_collapser
+patterns.e_collapser=e_collapser
+patterns.b_stripper=b_stripper
+patterns.m_stripper=m_stripper
+patterns.e_stripper=e_stripper
patterns.lowercase=lowercase
patterns.uppercase=uppercase
patterns.letter=patterns.lowercase+patterns.uppercase
@@ -559,9 +587,12 @@ patterns.integer=sign^-1*digit^1
patterns.unsigned=digit^0*period*digit^1
patterns.float=sign^-1*patterns.unsigned
patterns.cunsigned=digit^0*comma*digit^1
+patterns.cpunsigned=digit^0*(period+comma)*digit^1
patterns.cfloat=sign^-1*patterns.cunsigned
+patterns.cpfloat=sign^-1*patterns.cpunsigned
patterns.number=patterns.float+patterns.integer
patterns.cnumber=patterns.cfloat+patterns.integer
+patterns.cpnumber=patterns.cpfloat+patterns.integer
patterns.oct=zero*octdigit^1
patterns.octal=patterns.oct
patterns.HEX=zero*P("X")*(digit+uppercase)^1
@@ -744,7 +775,7 @@ function lpeg.replacer(one,two,makefunction,isutf)
return pattern
end
end
-function lpeg.finder(lst,makefunction)
+function lpeg.finder(lst,makefunction,isutf)
local pattern
if type(lst)=="table" then
pattern=P(false)
@@ -760,7 +791,11 @@ function lpeg.finder(lst,makefunction)
else
pattern=P(lst)
end
- pattern=(1-pattern)^0*pattern
+ if isutf then
+ pattern=((utf8char or 1)-pattern)^0*pattern
+ else
+ pattern=(1-pattern)^0*pattern
+ end
if makefunction then
return function(str)
return lpegmatch(pattern,str)
@@ -974,37 +1009,139 @@ function lpeg.append(list,pp,delayed,checked)
end
return p
end
+local p_false=P(false)
+local p_true=P(true)
local function make(t)
- local p
+ local function making(t)
+ local p=p_false
+ local keys=sortedkeys(t)
+ for i=1,#keys do
+ local k=keys[i]
+ if k~="" then
+ local v=t[k]
+ if v==true then
+ p=p+P(k)*p_true
+ elseif v==false then
+ else
+ p=p+P(k)*making(v)
+ end
+ end
+ end
+ if t[""] then
+ p=p+p_true
+ end
+ return p
+ end
+ local p=p_false
local keys=sortedkeys(t)
for i=1,#keys do
local k=keys[i]
- local v=t[k]
- if not p then
- if next(v) then
- p=P(k)*make(v)
+ if k~="" then
+ local v=t[k]
+ if v==true then
+ p=p+P(k)*p_true
+ elseif v==false then
else
- p=P(k)
+ p=p+P(k)*making(v)
end
- else
- if next(v) then
- p=p+P(k)*make(v)
+ end
+ end
+ return p
+end
+local function collapse(t,x)
+ if type(t)~="table" then
+ return t,x
+ else
+ local n=next(t)
+ if n==nil then
+ return t,x
+ elseif next(t,n)==nil then
+ local k=n
+ local v=t[k]
+ if type(v)=="table" then
+ return collapse(v,x..k)
else
- p=p+P(k)
+ return v,x..k
+ end
+ else
+ local tt={}
+ for k,v in next,t do
+ local vv,kk=collapse(v,k)
+ tt[kk]=vv
end
+ return tt,x
end
end
- return p
end
function lpeg.utfchartabletopattern(list)
local tree={}
- for i=1,#list do
- local t=tree
- for c in gmatch(list[i],".") do
- if not t[c] then
- t[c]={}
+ local n=#list
+ if n==0 then
+ for s in next,list do
+ local t=tree
+ local p,pk
+ for c in gmatch(s,".") do
+ if t==true then
+ t={ [c]=true,[""]=true }
+ p[pk]=t
+ p=t
+ t=false
+ elseif t==false then
+ t={ [c]=false }
+ p[pk]=t
+ p=t
+ t=false
+ else
+ local tc=t[c]
+ if not tc then
+ tc=false
+ t[c]=false
+ end
+ p=t
+ t=tc
+ end
+ pk=c
+ end
+ if t==false then
+ p[pk]=true
+ elseif t==true then
+ else
+ t[""]=true
+ end
+ end
+ else
+ for i=1,n do
+ local s=list[i]
+ local t=tree
+ local p,pk
+ for c in gmatch(s,".") do
+ if t==true then
+ t={ [c]=true,[""]=true }
+ p[pk]=t
+ p=t
+ t=false
+ elseif t==false then
+ t={ [c]=false }
+ p[pk]=t
+ p=t
+ t=false
+ else
+ local tc=t[c]
+ if not tc then
+ tc=false
+ t[c]=false
+ end
+ p=t
+ t=tc
+ end
+ pk=c
+ end
+ if t==false then
+ p[pk]=true
+ elseif t==true then
+ else
+ t[""]=true
end
- t=t[c]
end
end
return make(tree)
@@ -1044,6 +1181,65 @@ local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"")
local number=digit^1*(case_1+case_2)
local stripper=Cs((number+1)^0)
lpeg.patterns.stripzeros=stripper
+local byte_to_HEX={}
+local byte_to_hex={}
+local byte_to_dec={}
+local hex_to_byte={}
+for i=0,255 do
+ local H=format("%02X",i)
+ local h=format("%02x",i)
+ local d=format("%03i",i)
+ local c=char(i)
+ byte_to_HEX[c]=H
+ byte_to_hex[c]=h
+ byte_to_dec[c]=d
+ hex_to_byte[h]=c
+ hex_to_byte[H]=c
+end
+local hextobyte=P(2)/hex_to_byte
+local bytetoHEX=P(1)/byte_to_HEX
+local bytetohex=P(1)/byte_to_hex
+local bytetodec=P(1)/byte_to_dec
+local hextobytes=Cs(hextobyte^0)
+local bytestoHEX=Cs(bytetoHEX^0)
+local bytestohex=Cs(bytetohex^0)
+local bytestodec=Cs(bytetodec^0)
+patterns.hextobyte=hextobyte
+patterns.bytetoHEX=bytetoHEX
+patterns.bytetohex=bytetohex
+patterns.bytetodec=bytetodec
+patterns.hextobytes=hextobytes
+patterns.bytestoHEX=bytestoHEX
+patterns.bytestohex=bytestohex
+patterns.bytestodec=bytestodec
+function string.toHEX(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(bytestoHEX,s)
+ end
+end
+function string.tohex(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(bytestohex,s)
+ end
+end
+function string.todec(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(bytestodec,s)
+ end
+end
+function string.tobytes(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(hextobytes,s)
+ end
+end
end -- of closure
@@ -1071,7 +1267,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-string"] = package.loaded["l-string"] or true
--- original size: 5547, stripped down to: 2708
+-- original size: 5694, stripped down to: 2827
if not modules then modules={} end modules ['l-string']={
version=1.001,
@@ -1107,11 +1303,15 @@ function string.limit(str,n,sentinel)
end
end
local stripper=patterns.stripper
+local fullstripper=patterns.fullstripper
local collapser=patterns.collapser
local longtostring=patterns.longtostring
function string.strip(str)
return lpegmatch(stripper,str) or ""
end
+function string.fullstrip(str)
+ return lpegmatch(fullstripper,str) or ""
+end
function string.collapsespaces(str)
return lpegmatch(collapser,str) or ""
end
@@ -1172,7 +1372,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-table"] = package.loaded["l-table"] or true
--- original size: 31113, stripped down to: 20256
+-- original size: 35724, stripped down to: 21525
if not modules then modules={} end modules ['l-table']={
version=1.001,
@@ -1205,7 +1405,7 @@ end
function table.keys(t)
if t then
local keys,k={},0
- for key,_ in next,t do
+ for key in next,t do
k=k+1
keys[k]=key
end
@@ -1215,32 +1415,52 @@ function table.keys(t)
end
end
local function compare(a,b)
- local ta,tb=type(a),type(b)
- if ta==tb then
- return a<b
- else
- return tostring(a)<tostring(b)
+ local ta=type(a)
+ if ta=="number" then
+ local tb=type(b)
+ if ta==tb then
+ return a<b
+ elseif tb=="string" then
+ return tostring(a)<b
+ end
+ elseif ta=="string" then
+ local tb=type(b)
+ if ta==tb then
+ return a<b
+ else
+ return a<tostring(b)
+ end
end
+ return tostring(a)<tostring(b)
end
local function sortedkeys(tab)
if tab then
local srt,category,s={},0,0
- for key,_ in next,tab do
+ for key in next,tab do
s=s+1
srt[s]=key
if category==3 then
+ elseif category==1 then
+ if type(key)~="string" then
+ category=3
+ end
+ elseif category==2 then
+ if type(key)~="number" then
+ category=3
+ end
else
local tkey=type(key)
if tkey=="string" then
- category=(category==2 and 3) or 1
+ category=1
elseif tkey=="number" then
- category=(category==1 and 3) or 2
+ category=2
else
category=3
end
end
end
- if category==0 or category==3 then
+ if s<2 then
+ elseif category==3 then
sort(srt,compare)
else
sort(srt)
@@ -1250,16 +1470,52 @@ local function sortedkeys(tab)
return {}
end
end
+local function sortedhashonly(tab)
+ if tab then
+ local srt,s={},0
+ for key in next,tab do
+ if type(key)=="string" then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ if s>1 then
+ sort(srt)
+ end
+ return srt
+ else
+ return {}
+ end
+end
+local function sortedindexonly(tab)
+ if tab then
+ local srt,s={},0
+ for key in next,tab do
+ if type(key)=="number" then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ if s>1 then
+ sort(srt)
+ end
+ return srt
+ else
+ return {}
+ end
+end
local function sortedhashkeys(tab,cmp)
if tab then
local srt,s={},0
- for key,_ in next,tab do
+ for key in next,tab do
if key then
s=s+1
srt[s]=key
end
end
- sort(srt,cmp)
+ if s>1 then
+ sort(srt,cmp)
+ end
return srt
else
return {}
@@ -1268,13 +1524,15 @@ end
function table.allkeys(t)
local keys={}
for k,v in next,t do
- for k,v in next,v do
+ for k in next,v do
keys[k]=true
end
end
return sortedkeys(keys)
end
table.sortedkeys=sortedkeys
+table.sortedhashonly=sortedhashonly
+table.sortedindexonly=sortedindexonly
table.sortedhashkeys=sortedhashkeys
local function nothing() end
local function sortedhash(t,cmp)
@@ -1285,19 +1543,21 @@ local function sortedhash(t,cmp)
else
s=sortedkeys(t)
end
- local n=0
local m=#s
- local function kv(s)
- if n<m then
- n=n+1
- local k=s[n]
- return k,t[k]
+ if m==1 then
+ return next,t
+ elseif m>0 then
+ local n=0
+ return function()
+ if n<m then
+ n=n+1
+ local k=s[n]
+ return k,t[k]
+ end
end
end
- return kv,s
- else
- return nothing
end
+ return nothing
end
table.sortedhash=sortedhash
table.sortedpairs=sortedhash
@@ -1439,39 +1699,36 @@ function table.fromhash(t)
end
return hsh
end
-local noquotes,hexify,handle,reduce,compact,inline,functions
+local noquotes,hexify,handle,compact,inline,functions
local reserved=table.tohash {
'and','break','do','else','elseif','end','false','for','function','if',
'in','local','nil','not','or','repeat','return','then','true','until','while',
'NaN','goto',
}
local function simple_table(t)
- if #t>0 then
+ local nt=#t
+ if nt>0 then
local n=0
for _,v in next,t do
n=n+1
end
- if n==#t then
- local tt,nt={},0
- for i=1,#t do
+ if n==nt then
+ local tt={}
+ for i=1,nt do
local v=t[i]
local tv=type(v)
if tv=="number" then
- nt=nt+1
if hexify then
- tt[nt]=format("0x%04X",v)
+ tt[i]=format("0x%X",v)
else
- tt[nt]=tostring(v)
+ tt[i]=tostring(v)
end
elseif tv=="string" then
- nt=nt+1
- tt[nt]=format("%q",v)
+ tt[i]=format("%q",v)
elseif tv=="boolean" then
- nt=nt+1
- tt[nt]=v and "true" or "false"
+ tt[i]=v and "true" or "false"
else
- tt=nil
- break
+ return nil
end
end
return tt
@@ -1490,7 +1747,7 @@ local function do_serialize(root,name,depth,level,indexed)
local tn=type(name)
if tn=="number" then
if hexify then
- handle(format("%s[0x%04X]={",depth,name))
+ handle(format("%s[0x%X]={",depth,name))
else
handle(format("%s[%s]={",depth,name))
end
@@ -1507,7 +1764,7 @@ local function do_serialize(root,name,depth,level,indexed)
end
end
end
- if root and next(root) then
+ if root and next(root)~=nil then
local first,last=nil,0
if compact then
last=#root
@@ -1525,22 +1782,19 @@ local function do_serialize(root,name,depth,level,indexed)
for i=1,#sk do
local k=sk[i]
local v=root[k]
- local tv,tk=type(v),type(k)
+ local tv=type(v)
+ local tk=type(k)
if compact and first and tk=="number" and k>=first and k<=last then
if tv=="number" then
if hexify then
- handle(format("%s 0x%04X,",depth,v))
+ handle(format("%s 0x%X,",depth,v))
else
handle(format("%s %s,",depth,v))
end
elseif tv=="string" then
- if reduce and tonumber(v) then
- handle(format("%s %s,",depth,v))
- else
- handle(format("%s %q,",depth,v))
- end
+ handle(format("%s %q,",depth,v))
elseif tv=="table" then
- if not next(v) then
+ if next(v)==nil then
handle(format("%s {},",depth))
elseif inline then
local st=simple_table(v)
@@ -1570,64 +1824,48 @@ local function do_serialize(root,name,depth,level,indexed)
elseif tv=="number" then
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
+ handle(format("%s [0x%X]=0x%X,",depth,k,v))
else
handle(format("%s [%s]=%s,",depth,k,v))
end
elseif tk=="boolean" then
if hexify then
- handle(format("%s [%s]=0x%04X,",depth,k and "true" or "false",v))
+ handle(format("%s [%s]=0x%X,",depth,k and "true" or "false",v))
else
handle(format("%s [%s]=%s,",depth,k and "true" or "false",v))
end
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
if hexify then
- handle(format("%s %s=0x%04X,",depth,k,v))
+ handle(format("%s %s=0x%X,",depth,k,v))
else
handle(format("%s %s=%s,",depth,k,v))
end
else
if hexify then
- handle(format("%s [%q]=0x%04X,",depth,k,v))
+ handle(format("%s [%q]=0x%X,",depth,k,v))
else
handle(format("%s [%q]=%s,",depth,k,v))
end
end
elseif tv=="string" then
- if reduce and tonumber(v) then
- if tk=="number" then
- if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,v))
- else
- handle(format("%s [%s]=%s,",depth,k,v))
- end
- elseif tk=="boolean" then
- handle(format("%s [%s]=%s,",depth,k and "true" or "false",v))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%s,",depth,k,v))
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]=%q,",depth,k,v))
else
- handle(format("%s [%q]=%s,",depth,k,v))
+ handle(format("%s [%s]=%q,",depth,k,v))
end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,k and "true" or "false",v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,v))
else
- if tk=="number" then
- if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,v))
- else
- handle(format("%s [%s]=%q,",depth,k,v))
- end
- elseif tk=="boolean" then
- handle(format("%s [%s]=%q,",depth,k and "true" or "false",v))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%q,",depth,k,v))
- else
- handle(format("%s [%q]=%q,",depth,k,v))
- end
+ handle(format("%s [%q]=%q,",depth,k,v))
end
elseif tv=="table" then
- if not next(v) then
+ if next(v)==nil then
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]={},",depth,k))
+ handle(format("%s [0x%X]={},",depth,k))
else
handle(format("%s [%s]={},",depth,k))
end
@@ -1643,7 +1881,7 @@ local function do_serialize(root,name,depth,level,indexed)
if st then
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
+ handle(format("%s [0x%X]={ %s },",depth,k,concat(st,", ")))
else
handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
end
@@ -1663,7 +1901,7 @@ local function do_serialize(root,name,depth,level,indexed)
elseif tv=="boolean" then
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,v and "true" or "false"))
+ handle(format("%s [0x%X]=%s,",depth,k,v and "true" or "false"))
else
handle(format("%s [%s]=%s,",depth,k,v and "true" or "false"))
end
@@ -1679,7 +1917,7 @@ local function do_serialize(root,name,depth,level,indexed)
local f=getinfo(v).what=="C" and dump(dummy) or dump(v)
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]=load(%q),",depth,k,f))
+ handle(format("%s [0x%X]=load(%q),",depth,k,f))
else
handle(format("%s [%s]=load(%q),",depth,k,f))
end
@@ -1694,7 +1932,7 @@ local function do_serialize(root,name,depth,level,indexed)
else
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
+ handle(format("%s [0x%X]=%q,",depth,k,tostring(v)))
else
handle(format("%s [%s]=%q,",depth,k,tostring(v)))
end
@@ -1718,7 +1956,6 @@ local function serialize(_handle,root,name,specification)
noquotes=specification.noquotes
hexify=specification.hexify
handle=_handle or specification.handle or print
- reduce=specification.reduce or false
functions=specification.functions
compact=specification.compact
inline=specification.inline and compact
@@ -1735,7 +1972,6 @@ local function serialize(_handle,root,name,specification)
noquotes=false
hexify=false
handle=_handle or print
- reduce=false
compact=true
inline=true
functions=true
@@ -1748,7 +1984,7 @@ local function serialize(_handle,root,name,specification)
end
elseif tname=="number" then
if hexify then
- handle(format("[0x%04X]={",name))
+ handle(format("[0x%X]={",name))
else
handle("["..name.."]={")
end
@@ -1766,7 +2002,7 @@ local function serialize(_handle,root,name,specification)
local dummy=root._w_h_a_t_e_v_e_r_
root._w_h_a_t_e_v_e_r_=nil
end
- if next(root) then
+ if next(root)~=nil then
do_serialize(root,name,"",0)
end
end
@@ -1895,14 +2131,25 @@ local function identical(a,b)
end
table.identical=identical
table.are_equal=are_equal
-function table.compact(t)
- if t then
- for k,v in next,t do
- if not next(v) then
- t[k]=nil
+local function sparse(old,nest,keeptables)
+ local new={}
+ for k,v in next,old do
+ if not (v=="" or v==false) then
+ if nest and type(v)=="table" then
+ v=sparse(v,nest)
+ if keeptables or next(v)~=nil then
+ new[k]=v
+ end
+ else
+ new[k]=v
end
end
end
+ return new
+end
+table.sparse=sparse
+function table.compact(t)
+ return sparse(t,true,true)
end
function table.contains(t,v)
if t then
@@ -2000,15 +2247,17 @@ function table.print(t,...)
serialize(print,t,...)
end
end
-setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
+if setinspector then
+ setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
+end
function table.sub(t,i,j)
return { unpack(t,i,j) }
end
function table.is_empty(t)
- return not t or not next(t)
+ return not t or next(t)==nil
end
function table.has_one_entry(t)
- return t and not next(t,next(t))
+ return t and next(t,next(t))==nil
end
function table.loweredkeys(t)
local l={}
@@ -2053,6 +2302,44 @@ function table.values(t,s)
return {}
end
end
+function table.filtered(t,pattern,sort,cmp)
+ if t and type(pattern)=="string" then
+ if sort then
+ local s
+ if cmp then
+ s=sortedhashkeys(t,function(a,b) return cmp(t,a,b) end)
+ else
+ s=sortedkeys(t)
+ end
+ local n=0
+ local m=#s
+ local function kv(s)
+ while n<m do
+ n=n+1
+ local k=s[n]
+ if find(k,pattern) then
+ return k,t[k]
+ end
+ end
+ end
+ return kv,s
+ else
+ local n=next(t)
+ local function iterator()
+ while n~=nil do
+ local k=n
+ n=next(t,k)
+ if find(k,pattern) then
+ return k,t[k]
+ end
+ end
+ end
+ return iterator,t
+ end
+ else
+ return nothing
+ end
+end
end -- of closure
@@ -2061,7 +2348,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-io"] = package.loaded["l-io"] or true
--- original size: 8817, stripped down to: 6340
+-- original size: 8643, stripped down to: 6232
if not modules then modules={} end modules ['l-io']={
version=1.001,
@@ -2075,7 +2362,7 @@ local byte,find,gsub,format=string.byte,string.find,string.gsub,string.format
local concat=table.concat
local floor=math.floor
local type=type
-if string.find(os.getenv("PATH"),";") then
+if string.find(os.getenv("PATH"),";",1,true) then
io.fileseparator,io.pathseparator="\\",";"
else
io.fileseparator,io.pathseparator="/",":"
@@ -2368,8 +2655,6 @@ function io.readstring(f,n,m)
local str=gsub(f:read(n),"\000","")
return str
end
-if not io.i_limiter then function io.i_limiter() end end
-if not io.o_limiter then function io.o_limiter() end end
end -- of closure
@@ -2596,7 +2881,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-os"] = package.loaded["l-os"] or true
--- original size: 16023, stripped down to: 9634
+-- original size: 15832, stripped down to: 9456
if not modules then modules={} end modules ['l-os']={
version=1.001,
@@ -2670,13 +2955,10 @@ if not os.__getenv__ then
setmetatable(os.env,{ __index=__index,__newindex=__newindex } )
end
end
-local execute,spawn,exec,iopopen,ioflush=os.execute,os.spawn or os.execute,os.exec or os.execute,io.popen,io.flush
-function os.execute(...) ioflush() return execute(...) end
-function os.spawn (...) ioflush() return spawn (...) end
-function os.exec (...) ioflush() return exec (...) end
-function io.popen (...) ioflush() return iopopen(...) end
+local execute=os.execute
+local iopopen=io.popen
function os.resultof(command)
- local handle=io.popen(command,"r")
+ local handle=iopopen(command,"r")
if handle then
local result=handle:read("*all") or ""
handle:close()
@@ -2686,7 +2968,7 @@ function os.resultof(command)
end
end
if not io.fileseparator then
- if find(os.getenv("PATH"),";") then
+ if find(os.getenv("PATH"),";",1,true) then
io.fileseparator,io.pathseparator,os.type="\\",";",os.type or "mswin"
else
io.fileseparator,io.pathseparator,os.type="/",":",os.type or "unix"
@@ -2705,7 +2987,7 @@ local launchers={
unix="$BROWSER %s &> /dev/null &",
}
function os.launch(str)
- os.execute(format(launchers[os.name] or launchers.unix,str))
+ execute(format(launchers[os.name] or launchers.unix,str))
end
if not os.times then
function os.times()
@@ -2746,7 +3028,7 @@ if platform~="" then
elseif os.type=="windows" then
function resolvers.platform(t,k)
local platform,architecture="",os.getenv("PROCESSOR_ARCHITECTURE") or ""
- if find(architecture,"AMD64") then
+ if find(architecture,"AMD64",1,true) then
platform="win64"
else
platform="mswin"
@@ -2758,9 +3040,9 @@ elseif os.type=="windows" then
elseif name=="linux" then
function resolvers.platform(t,k)
local platform,architecture="",os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
- if find(architecture,"x86_64") then
+ if find(architecture,"x86_64",1,true) then
platform="linux-64"
- elseif find(architecture,"ppc") then
+ elseif find(architecture,"ppc",1,true) then
platform="linux-ppc"
else
platform="linux"
@@ -2774,9 +3056,9 @@ elseif name=="macosx" then
local platform,architecture="",os.resultof("echo $HOSTTYPE") or ""
if architecture=="" then
platform="osx-intel"
- elseif find(architecture,"i386") then
+ elseif find(architecture,"i386",1,true) then
platform="osx-intel"
- elseif find(architecture,"x86_64") then
+ elseif find(architecture,"x86_64",1,true) then
platform="osx-64"
else
platform="osx-ppc"
@@ -2788,7 +3070,7 @@ elseif name=="macosx" then
elseif name=="sunos" then
function resolvers.platform(t,k)
local platform,architecture="",os.resultof("uname -m") or ""
- if find(architecture,"sparc") then
+ if find(architecture,"sparc",1,true) then
platform="solaris-sparc"
else
platform="solaris-intel"
@@ -2800,7 +3082,7 @@ elseif name=="sunos" then
elseif name=="freebsd" then
function resolvers.platform(t,k)
local platform,architecture="",os.resultof("uname -m") or ""
- if find(architecture,"amd64") then
+ if find(architecture,"amd64",1,true) then
platform="freebsd-amd64"
else
platform="freebsd"
@@ -2812,7 +3094,7 @@ elseif name=="freebsd" then
elseif name=="kfreebsd" then
function resolvers.platform(t,k)
local platform,architecture="",os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
- if find(architecture,"x86_64") then
+ if find(architecture,"x86_64",1,true) then
platform="kfreebsd-amd64"
else
platform="kfreebsd-i386"
@@ -2829,8 +3111,9 @@ else
return platform
end
end
+os.newline=name=="windows" and "\013\010" or "\010"
function resolvers.bits(t,k)
- local bits=find(os.platform,"64") and 64 or 32
+ local bits=find(os.platform,"64",1,true) and 64 or 32
os.bits=bits
return bits
end
@@ -2980,7 +3263,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-file"] = package.loaded["l-file"] or true
--- original size: 18308, stripped down to: 9948
+-- original size: 20949, stripped down to: 9945
if not modules then modules={} end modules ['l-file']={
version=1.001,
@@ -2994,41 +3277,28 @@ local file=file
if not lfs then
lfs=optionalrequire("lfs")
end
-if not lfs then
- lfs={
- getcurrentdir=function()
- return "."
- end,
- attributes=function()
- return nil
- end,
- isfile=function(name)
- local f=io.open(name,'rb')
- if f then
- f:close()
- return true
- end
- end,
- isdir=function(name)
- print("you need to load lfs")
- return false
- end
- }
-elseif not lfs.isfile then
- local attributes=lfs.attributes
- function lfs.isdir(name)
- return attributes(name,"mode")=="directory"
- end
- function lfs.isfile(name)
- return attributes(name,"mode")=="file"
- end
-end
local insert,concat=table.insert,table.concat
local match,find,gmatch=string.match,string.find,string.gmatch
local lpegmatch=lpeg.match
local getcurrentdir,attributes=lfs.currentdir,lfs.attributes
local checkedsplit=string.checkedsplit
local P,R,S,C,Cs,Cp,Cc,Ct=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Cs,lpeg.Cp,lpeg.Cc,lpeg.Ct
+local tricky=S("/\\")*P(-1)
+local attributes=lfs.attributes
+if sandbox then
+ sandbox.redefine(lfs.isfile,"lfs.isfile")
+ sandbox.redefine(lfs.isdir,"lfs.isdir")
+end
+function lfs.isdir(name)
+ if lpegmatch(tricky,name) then
+ return attributes(name,"mode")=="directory"
+ else
+ return attributes(name.."/.","mode")=="directory"
+ end
+end
+function lfs.isfile(name)
+ return attributes(name,"mode")=="file"
+end
local colon=P(":")
local period=P(".")
local periods=P("..")
@@ -3230,28 +3500,30 @@ local isroot=fwslash^1*-1
local hasroot=fwslash^1
local reslasher=lpeg.replacer(S("\\/"),"/")
local deslasher=lpeg.replacer(S("\\/")^1,"/")
-function file.join(...)
- local lst={... }
- local one=lst[1]
+function file.join(one,two,three,...)
+ if not two then
+ return one=="" and one or lpegmatch(stripper,one)
+ end
+ if one=="" then
+ return lpegmatch(stripper,three and concat({ two,three,... },"/") or two)
+ end
if lpegmatch(isnetwork,one) then
local one=lpegmatch(reslasher,one)
- local two=lpegmatch(deslasher,concat(lst,"/",2))
+ local two=lpegmatch(deslasher,three and concat({ two,three,... },"/") or two)
if lpegmatch(hasroot,two) then
return one..two
else
return one.."/"..two
end
elseif lpegmatch(isroot,one) then
- local two=lpegmatch(deslasher,concat(lst,"/",2))
+ local two=lpegmatch(deslasher,three and concat({ two,three,... },"/") or two)
if lpegmatch(hasroot,two) then
return two
else
return "/"..two
end
- elseif one=="" then
- return lpegmatch(stripper,concat(lst,"/",2))
else
- return lpegmatch(deslasher,concat(lst,"/"))
+ return lpegmatch(deslasher,concat({ one,two,three,... },"/"))
end
end
local drivespec=R("az","AZ")^1*colon
@@ -3425,7 +3697,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-md5"] = package.loaded["l-md5"] or true
--- original size: 3760, stripped down to: 2088
+-- original size: 3248, stripped down to: 2266
if not modules then modules={} end modules ['l-md5']={
version=1.001,
@@ -3443,14 +3715,20 @@ if not md5 then
}
end
local md5,file=md5,file
-local gsub,format,byte=string.gsub,string.format,string.byte
-local md5sum=md5.sum
-local function convert(str,fmt)
- return (gsub(md5sum(str),".",function(chr) return format(fmt,byte(chr)) end))
-end
-if not md5.HEX then function md5.HEX(str) return convert(str,"%02X") end end
-if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end
-if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end
+local gsub=string.gsub
+do
+ local patterns=lpeg and lpeg.patterns
+ if patterns then
+ local bytestoHEX=patterns.bytestoHEX
+ local bytestohex=patterns.bytestohex
+ local bytestodec=patterns.bytestodec
+ local lpegmatch=lpeg.match
+ local md5sum=md5.sum
+ if not md5.HEX then function md5.HEX(str) if str then return lpegmatch(bytestoHEX,md5sum(str)) end end end
+ if not md5.hex then function md5.hex(str) if str then return lpegmatch(bytestohex,md5sum(str)) end end end
+ if not md5.dec then function md5.dec(str) if str then return lpegmatch(bytestodec,md5sum(str)) end end end
+ end
+end
function file.needsupdating(oldname,newname,threshold)
local oldtime=lfs.attributes(oldname,"modification")
if oldtime then
@@ -3507,7 +3785,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-url"] = package.loaded["l-url"] or true
--- original size: 11993, stripped down to: 5584
+-- original size: 12531, stripped down to: 5721
if not modules then modules={} end modules ['l-url']={
version=1.001,
@@ -3534,7 +3812,7 @@ local hexdigit=R("09","AF","af")
local plus=P("+")
local nothing=Cc("")
local escapedchar=(percent*C(hexdigit*hexdigit))/tochar
-local escaped=(plus/" ")+escapedchar
+local escaped=(plus/" ")+escapedchar
local noslash=P("/")/""
local schemestr=Cs((escaped+(1-colon-slash-qmark-hash))^2)
local authoritystr=Cs((escaped+(1- slash-qmark-hash))^0)
@@ -3593,19 +3871,25 @@ local splitquery=Cf (Ct("")*P { "sequence",
pair=Cg(key*equal*value),
},rawset)
local function hashed(str)
- if str=="" then
+ if not str or str=="" then
return {
scheme="invalid",
original=str,
}
end
- local s=split(str)
- local rawscheme=s[1]
- local rawquery=s[4]
- local somescheme=rawscheme~=""
- local somequery=rawquery~=""
+ local detailed=split(str)
+ local rawscheme=""
+ local rawquery=""
+ local somescheme=false
+ local somequery=false
+ if detailed then
+ rawscheme=detailed[1]
+ rawquery=detailed[4]
+ somescheme=rawscheme~=""
+ somequery=rawquery~=""
+ end
if not somescheme and not somequery then
- s={
+ return {
scheme="file",
authority="",
path=str,
@@ -3615,28 +3899,28 @@ local function hashed(str)
noscheme=true,
filename=str,
}
- else
- local authority,path,filename=s[2],s[3]
- if authority=="" then
- filename=path
- elseif path=="" then
- filename=""
- else
- filename=authority.."/"..path
- end
- s={
- scheme=rawscheme,
- authority=authority,
- path=path,
- query=lpegmatch(unescaper,rawquery),
- queries=lpegmatch(splitquery,rawquery),
- fragment=s[5],
- original=str,
- noscheme=false,
- filename=filename,
- }
end
- return s
+ local authority=detailed[2]
+ local path=detailed[3]
+ local filename=nil
+ if authority=="" then
+ filename=path
+ elseif path=="" then
+ filename=""
+ else
+ filename=authority.."/"..path
+ end
+ return {
+ scheme=rawscheme,
+ authority=authority,
+ path=path,
+ query=lpegmatch(unescaper,rawquery),
+ queries=lpegmatch(splitquery,rawquery),
+ fragment=detailed[5],
+ original=str,
+ noscheme=false,
+ filename=filename,
+ }
end
url.split=split
url.hasscheme=hasscheme
@@ -3670,7 +3954,7 @@ function url.construct(hash)
end
return lpegmatch(escaper,concat(fullurl))
end
-local pattern=Cs(noslash*R("az","AZ")*(S(":|")/":")*noslash*P(1)^0)
+local pattern=Cs(slash^-1/""*R("az","AZ")*((S(":|")/":")+P(":"))*slash*P(1)^0)
function url.filename(filename)
local spec=hashed(filename)
local path=spec.path
@@ -3718,7 +4002,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-dir"] = package.loaded["l-dir"] or true
--- original size: 14229, stripped down to: 8740
+-- original size: 16765, stripped down to: 11003
if not modules then modules={} end modules ['l-dir']={
version=1.001,
@@ -3728,7 +4012,7 @@ if not modules then modules={} end modules ['l-dir']={
license="see context related readme files"
}
local type,select=type,select
-local find,gmatch,match,gsub=string.find,string.gmatch,string.match,string.gsub
+local find,gmatch,match,gsub,sub=string.find,string.gmatch,string.match,string.gsub,string.sub
local concat,insert,remove,unpack=table.concat,table.insert,table.remove,table.unpack
local lpegmatch=lpeg.match
local P,S,R,C,Cc,Cs,Ct,Cv,V=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cc,lpeg.Cs,lpeg.Ct,lpeg.Cv,lpeg.V
@@ -3737,53 +4021,127 @@ local dir=dir
local lfs=lfs
local attributes=lfs.attributes
local walkdir=lfs.dir
-local isdir=lfs.isdir
-local isfile=lfs.isfile
+local isdir=lfs.isdir
+local isfile=lfs.isfile
local currentdir=lfs.currentdir
local chdir=lfs.chdir
-local onwindows=os.type=="windows" or find(os.getenv("PATH"),";")
-if not isdir then
- function isdir(name)
- local a=attributes(name)
- return a and a.mode=="directory"
+local mkdir=lfs.mkdir
+local onwindows=os.type=="windows" or find(os.getenv("PATH"),";",1,true)
+if onwindows then
+ local tricky=S("/\\")*P(-1)
+ isdir=function(name)
+ if lpegmatch(tricky,name) then
+ return attributes(name,"mode")=="directory"
+ else
+ return attributes(name.."/.","mode")=="directory"
+ end
+ end
+ isfile=function(name)
+ return attributes(name,"mode")=="file"
end
lfs.isdir=isdir
-end
-if not isfile then
- function isfile(name)
- local a=attributes(name)
- return a and a.mode=="file"
+ lfs.isfile=isfile
+else
+ isdir=function(name)
+ return attributes(name,"mode")=="directory"
end
+ isfile=function(name)
+ return attributes(name,"mode")=="file"
+ end
+ lfs.isdir=isdir
lfs.isfile=isfile
end
function dir.current()
return (gsub(currentdir(),"\\","/"))
end
-local lfsisdir=isdir
-local function isdir(path)
- path=gsub(path,"[/\\]+$","")
- return lfsisdir(path)
+local function glob_pattern_function(path,patt,recurse,action)
+ if isdir(path) then
+ local usedpath
+ if path=="/" then
+ usedpath="/."
+ elseif not find(path,"/$") then
+ usedpath=path.."/."
+ path=path.."/"
+ else
+ usedpath=path
+ end
+ local dirs
+ for name in walkdir(usedpath) do
+ if name~="." and name~=".." then
+ local full=path..name
+ local mode=attributes(full,'mode')
+ if mode=='file' then
+ if not patt or find(full,patt) then
+ action(full)
+ end
+ elseif recurse and mode=="directory" then
+ if not dirs then
+ dirs={ full }
+ else
+ dirs[#dirs+1]=full
+ end
+ end
+ end
+ end
+ if dirs then
+ for i=1,#dirs do
+ glob_pattern_function(dirs[i],patt,recurse,action)
+ end
+ end
+ end
end
-lfs.isdir=isdir
-local function globpattern(path,patt,recurse,action)
- if path=="/" then
- path=path.."."
- elseif not find(path,"/$") then
- path=path..'/'
- end
- if isdir(path) then
- for name in walkdir(path) do
- local full=path..name
- local mode=attributes(full,'mode')
- if mode=='file' then
- if find(full,patt) then
- action(full)
+local function glob_pattern_table(path,patt,recurse,result)
+ if not result then
+ result={}
+ end
+ if isdir(path) then
+ local usedpath
+ if path=="/" then
+ usedpath="/."
+ elseif not find(path,"/$") then
+ usedpath=path.."/."
+ path=path.."/"
+ else
+ usedpath=path
+ end
+ local dirs
+ for name in walkdir(usedpath) do
+ if name~="." and name~=".." then
+ local full=path..name
+ local mode=attributes(full,'mode')
+ if mode=='file' then
+ if not patt or find(full,patt) then
+ result[#result+1]=full
+ end
+ elseif recurse and mode=="directory" then
+ if not dirs then
+ dirs={ full }
+ else
+ dirs[#dirs+1]=full
+ end
end
- elseif recurse and (mode=="directory") and (name~='.') and (name~="..") then
- globpattern(full,patt,recurse,action)
+ end
+ end
+ if dirs then
+ for i=1,#dirs do
+ glob_pattern_table(dirs[i],patt,recurse,result)
end
end
end
+ return result
+end
+local function globpattern(path,patt,recurse,method)
+ local kind=type(method)
+ if patt and sub(patt,1,-3)==path then
+ patt=false
+ end
+ if kind=="function" then
+ return glob_pattern_function(path,patt,recurse,method)
+ elseif kind=="table" then
+ return glob_pattern_table(path,patt,recurse,method)
+ else
+ return glob_pattern_table(path,patt,recurse,{})
+ end
end
dir.globpattern=globpattern
local function collectpattern(path,patt,recurse,result)
@@ -3795,34 +4153,40 @@ local function collectpattern(path,patt,recurse,result)
ok,scanner,first=xpcall(function() return walkdir(path) end,function() end)
end
if ok and type(scanner)=="function" then
- if not find(path,"/$") then path=path..'/' end
+ if not find(path,"/$") then
+ path=path..'/'
+ end
for name in scanner,first do
- local full=path..name
- local attr=attributes(full)
- local mode=attr.mode
- if mode=='file' then
- if find(full,patt) then
+ if name=="." then
+ elseif name==".." then
+ else
+ local full=path..name
+ local attr=attributes(full)
+ local mode=attr.mode
+ if mode=='file' then
+ if find(full,patt) then
+ result[name]=attr
+ end
+ elseif recurse and mode=="directory" then
+ attr.list=collectpattern(full,patt,recurse)
result[name]=attr
end
- elseif recurse and (mode=="directory") and (name~='.') and (name~="..") then
- attr.list=collectpattern(full,patt,recurse)
- result[name]=attr
end
end
end
return result
end
dir.collectpattern=collectpattern
-local separator
-if onwindows then
+local separator,pattern
+if onwindows then
local slash=S("/\\")/"/"
- pattern=Ct {
+ pattern={
[1]=(Cs(P(".")+slash^1)+Cs(R("az","AZ")*P(":")*slash^0)+Cc("./"))*V(2)*V(3),
[2]=Cs(((1-S("*?/\\"))^0*slash)^0),
[3]=Cs(P(1)^0)
}
-else
- pattern=Ct {
+else
+ pattern={
[1]=(C(P(".")+P("/")^1)+Cc("./"))*V(2)*V(3),
[2]=C(((1-S("*?/"))^0*P("/"))^0),
[3]=C(P(1)^0)
@@ -3840,10 +4204,9 @@ local function glob(str,t)
elseif isfile(str) then
t(str)
else
- local split=lpegmatch(pattern,str)
- if split then
- local root,path,base=split[1],split[2],split[3]
- local recurse=find(base,"%*%*")
+ local root,path,base=lpegmatch(pattern,str)
+ if root and path and base then
+ local recurse=find(base,"**",1,true)
local start=root..path
local result=lpegmatch(filter,start..base)
globpattern(start,result,recurse,t)
@@ -3864,16 +4227,12 @@ local function glob(str,t)
return { str }
end
else
- local split=lpegmatch(pattern,str)
- if split then
- local t=t or {}
- local action=action or function(name) t[#t+1]=name end
- local root,path,base=split[1],split[2],split[3]
- local recurse=find(base,"%*%*")
+ local root,path,base=lpegmatch(pattern,str)
+ if root and path and base then
+ local recurse=find(base,"**",1,true)
local start=root..path
local result=lpegmatch(filter,start..base)
- globpattern(start,result,recurse,action)
- return t
+ return globpattern(start,result,recurse,t)
else
return {}
end
@@ -3913,16 +4272,26 @@ end
local make_indeed=true
if onwindows then
function dir.mkdirs(...)
- local str,pth="",""
- for i=1,select("#",...) do
- local s=select(i,...)
- if s=="" then
- elseif str=="" then
- str=s
- else
- str=str.."/"..s
+ local n=select("#",...)
+ local str
+ if n==1 then
+ str=select(1,...)
+ if isdir(str) then
+ return str,true
+ end
+ else
+ str=""
+ for i=1,n do
+ local s=select(i,...)
+ if s=="" then
+ elseif str=="" then
+ str=s
+ else
+ str=str.."/"..s
+ end
end
end
+ local pth=""
local drive=false
local first,middle,last=match(str,"^(//)(//*)(.*)$")
if first then
@@ -3957,21 +4326,30 @@ if onwindows then
pth=pth.."/"..s
end
if make_indeed and not isdir(pth) then
- lfs.mkdir(pth)
+ mkdir(pth)
end
end
return pth,(isdir(pth)==true)
end
else
function dir.mkdirs(...)
- local str,pth="",""
- for i=1,select("#",...) do
- local s=select(i,...)
- if s and s~="" then
- if str~="" then
- str=str.."/"..s
- else
- str=s
+ local n=select("#",...)
+ local str,pth
+ if n==1 then
+ str=select(1,...)
+ if isdir(str) then
+ return str,true
+ end
+ else
+ str=""
+ for i=1,n do
+ local s=select(i,...)
+ if s and s~="" then
+ if str~="" then
+ str=str.."/"..s
+ else
+ str=s
+ end
end
end
end
@@ -3986,7 +4364,7 @@ else
pth=pth.."/"..s
end
if make_indeed and not first and not isdir(pth) then
- lfs.mkdir(pth)
+ mkdir(pth)
end
end
else
@@ -3994,7 +4372,7 @@ else
for s in gmatch(str,"[^/]+") do
pth=pth.."/"..s
if make_indeed and not isdir(pth) then
- lfs.mkdir(pth)
+ mkdir(pth)
end
end
end
@@ -4002,47 +4380,51 @@ else
end
end
dir.makedirs=dir.mkdirs
-if onwindows then
- function dir.expandname(str)
- local first,nothing,last=match(str,"^(//)(//*)(.*)$")
- if first then
- first=dir.current().."/"
- end
- if not first then
- first,last=match(str,"^(//)/*(.*)$")
- end
- if not first then
- first,last=match(str,"^([a-zA-Z]:)(.*)$")
- if first and not find(last,"^/") then
- local d=currentdir()
- if chdir(first) then
- first=dir.current()
+do
+ local chdir=sandbox and sandbox.original(chdir) or chdir
+ if onwindows then
+ local xcurrentdir=dir.current
+ function dir.expandname(str)
+ local first,nothing,last=match(str,"^(//)(//*)(.*)$")
+ if first then
+ first=xcurrentdir().."/"
+ end
+ if not first then
+ first,last=match(str,"^(//)/*(.*)$")
+ end
+ if not first then
+ first,last=match(str,"^([a-zA-Z]:)(.*)$")
+ if first and not find(last,"^/") then
+ local d=currentdir()
+ if chdir(first) then
+ first=xcurrentdir()
+ end
+ chdir(d)
end
- chdir(d)
+ end
+ if not first then
+ first,last=xcurrentdir(),str
+ end
+ last=gsub(last,"//","/")
+ last=gsub(last,"/%./","/")
+ last=gsub(last,"^/*","")
+ first=gsub(first,"/*$","")
+ if last=="" or last=="." then
+ return first
+ else
+ return first.."/"..last
end
end
- if not first then
- first,last=dir.current(),str
- end
- last=gsub(last,"//","/")
- last=gsub(last,"/%./","/")
- last=gsub(last,"^/*","")
- first=gsub(first,"/*$","")
- if last=="" or last=="." then
- return first
- else
- return first.."/"..last
- end
- end
-else
- function dir.expandname(str)
- if not find(str,"^/") then
- str=currentdir().."/"..str
+ else
+ function dir.expandname(str)
+ if not find(str,"^/") then
+ str=currentdir().."/"..str
+ end
+ str=gsub(str,"//","/")
+ str=gsub(str,"/%./","/")
+ str=gsub(str,"(.)/%.$","%1")
+ return str
end
- str=gsub(str,"//","/")
- str=gsub(str,"/%./","/")
- str=gsub(str,"(.)/%.$","%1")
- return str
end
end
file.expandname=dir.expandname
@@ -4085,7 +4467,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-boolean"] = package.loaded["l-boolean"] or true
--- original size: 1809, stripped down to: 1527
+-- original size: 1850, stripped down to: 1568
if not modules then modules={} end modules ['l-boolean']={
version=1.001,
@@ -4139,11 +4521,11 @@ function string.booleanstring(str)
return str=="yes" or str=="on" or str=="t"
end
end
-function string.is_boolean(str,default)
+function string.is_boolean(str,default,strict)
if type(str)=="string" then
- if str=="true" or str=="yes" or str=="on" or str=="t" or str=="1" then
+ if str=="true" or str=="yes" or str=="on" or str=="t" or (not strict and str=="1") then
return true
- elseif str=="false" or str=="no" or str=="off" or str=="f" or str=="0" then
+ elseif str=="false" or str=="no" or str=="off" or str=="f" or (not strict and str=="0") then
return false
end
end
@@ -4157,7 +4539,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-unicode"] = package.loaded["l-unicode"] or true
--- original size: 33473, stripped down to: 14938
+-- original size: 37388, stripped down to: 15817
if not modules then modules={} end modules ['l-unicode']={
version=1.001,
@@ -4173,7 +4555,9 @@ local type=type
local char,byte,format,sub,gmatch=string.char,string.byte,string.format,string.sub,string.gmatch
local concat=table.concat
local P,C,R,Cs,Ct,Cmt,Cc,Carg,Cp=lpeg.P,lpeg.C,lpeg.R,lpeg.Cs,lpeg.Ct,lpeg.Cmt,lpeg.Cc,lpeg.Carg,lpeg.Cp
-local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local lpegmatch=lpeg.match
+local patterns=lpeg.patterns
+local tabletopattern=lpeg.utfchartabletopattern
local bytepairs=string.bytepairs
local finder=lpeg.finder
local replacer=lpeg.replacer
@@ -4182,7 +4566,7 @@ local utfgmatch=utf.gmatch
local p_utftype=patterns.utftype
local p_utfstricttype=patterns.utfstricttype
local p_utfoffset=patterns.utfoffset
-local p_utf8char=patterns.utf8char
+local p_utf8char=patterns.utf8character
local p_utf8byte=patterns.utf8byte
local p_utfbom=patterns.utfbom
local p_newline=patterns.newline
@@ -4321,6 +4705,7 @@ if not utf.sub then
local pattern_zero=Cmt(p_utf8char,slide_zero)^0
local pattern_one=Cmt(p_utf8char,slide_one )^0
local pattern_two=Cmt(p_utf8char,slide_two )^0
+ local pattern_first=C(patterns.utf8character)
function utf.sub(str,start,stop)
if not start then
return str
@@ -4362,7 +4747,9 @@ if not utf.sub then
end
end
end
- if start>stop then
+ if start==1 and stop==1 then
+ return lpegmatch(pattern_first,str) or ""
+ elseif start>stop then
return ""
elseif start>1 then
b,e,n,first,last=0,0,0,start-1,stop
@@ -4381,15 +4768,52 @@ if not utf.sub then
end
end
end
-function utf.remapper(mapping)
- local pattern=Cs((p_utf8char/mapping)^0)
- return function(str)
- if not str or str=="" then
- return ""
+function utf.remapper(mapping,option)
+ local variant=type(mapping)
+ if variant=="table" then
+ if option=="dynamic" then
+ local pattern=false
+ table.setmetatablenewindex(mapping,function(t,k,v) rawset(t,k,v) pattern=false end)
+ return function(str)
+ if not str or str=="" then
+ return ""
+ else
+ if not pattern then
+ pattern=Cs((tabletopattern(mapping)/mapping+p_utf8char)^0)
+ end
+ return lpegmatch(pattern,str)
+ end
+ end
+ elseif option=="pattern" then
+ return Cs((tabletopattern(mapping)/mapping+p_utf8char)^0)
else
- return lpegmatch(pattern,str)
+ local pattern=Cs((tabletopattern(mapping)/mapping+p_utf8char)^0)
+ return function(str)
+ if not str or str=="" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+ end,pattern
+ end
+ elseif variant=="function" then
+ if option=="pattern" then
+ return Cs((p_utf8char/mapping+p_utf8char)^0)
+ else
+ local pattern=Cs((p_utf8char/mapping+p_utf8char)^0)
+ return function(str)
+ if not str or str=="" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+ end,pattern
end
- end,pattern
+ else
+ return function(str)
+ return str or ""
+ end
+ end
end
function utf.replacer(t)
local r=replacer(t,false,false,true)
@@ -4439,190 +4863,157 @@ function utf.magic(f)
end
local utf16_to_utf8_be,utf16_to_utf8_le
local utf32_to_utf8_be,utf32_to_utf8_le
-local utf_16_be_linesplitter=patterns.utfbom_16_be^-1*lpeg.tsplitat(patterns.utf_16_be_nl)
-local utf_16_le_linesplitter=patterns.utfbom_16_le^-1*lpeg.tsplitat(patterns.utf_16_le_nl)
-if bytepairs then
- utf16_to_utf8_be=function(t)
- if type(t)=="string" then
- t=lpegmatch(utf_16_be_linesplitter,t)
- end
- local result={}
- for i=1,#t do
- local r,more=0,0
- for left,right in bytepairs(t[i]) do
- if right then
- local now=256*left+right
- if more>0 then
- now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
- more=0
- r=r+1
- result[r]=utfchar(now)
- elseif now>=0xD800 and now<=0xDBFF then
- more=now
- else
- r=r+1
- result[r]=utfchar(now)
- end
- end
- end
- t[i]=concat(result,"",1,r)
- end
- return t
+local utf_16_be_getbom=patterns.utfbom_16_be^-1
+local utf_16_le_getbom=patterns.utfbom_16_le^-1
+local utf_32_be_getbom=patterns.utfbom_32_be^-1
+local utf_32_le_getbom=patterns.utfbom_32_le^-1
+local utf_16_be_linesplitter=utf_16_be_getbom*lpeg.tsplitat(patterns.utf_16_be_nl)
+local utf_16_le_linesplitter=utf_16_le_getbom*lpeg.tsplitat(patterns.utf_16_le_nl)
+local utf_32_be_linesplitter=utf_32_be_getbom*lpeg.tsplitat(patterns.utf_32_be_nl)
+local utf_32_le_linesplitter=utf_32_le_getbom*lpeg.tsplitat(patterns.utf_32_le_nl)
+local more=0
+local p_utf16_to_utf8_be=C(1)*C(1)/function(left,right)
+ local now=256*byte(left)+byte(right)
+ if more>0 then
+ now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
+ more=0
+ return utfchar(now)
+ elseif now>=0xD800 and now<=0xDBFF then
+ more=now
+ return ""
+ else
+ return utfchar(now)
+ end
+end
+local p_utf16_to_utf8_le=C(1)*C(1)/function(right,left)
+ local now=256*byte(left)+byte(right)
+ if more>0 then
+ now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
+ more=0
+ return utfchar(now)
+ elseif now>=0xD800 and now<=0xDBFF then
+ more=now
+ return ""
+ else
+ return utfchar(now)
+ end
+end
+local p_utf32_to_utf8_be=C(1)*C(1)*C(1)*C(1)/function(a,b,c,d)
+ return utfchar(256*256*256*byte(a)+256*256*byte(b)+256*byte(c)+byte(d))
+end
+local p_utf32_to_utf8_le=C(1)*C(1)*C(1)*C(1)/function(a,b,c,d)
+ return utfchar(256*256*256*byte(d)+256*256*byte(c)+256*byte(b)+byte(a))
+end
+p_utf16_to_utf8_be=P(true)/function() more=0 end*utf_16_be_getbom*Cs(p_utf16_to_utf8_be^0)
+p_utf16_to_utf8_le=P(true)/function() more=0 end*utf_16_le_getbom*Cs(p_utf16_to_utf8_le^0)
+p_utf32_to_utf8_be=P(true)/function() more=0 end*utf_32_be_getbom*Cs(p_utf32_to_utf8_be^0)
+p_utf32_to_utf8_le=P(true)/function() more=0 end*utf_32_le_getbom*Cs(p_utf32_to_utf8_le^0)
+patterns.utf16_to_utf8_be=p_utf16_to_utf8_be
+patterns.utf16_to_utf8_le=p_utf16_to_utf8_le
+patterns.utf32_to_utf8_be=p_utf32_to_utf8_be
+patterns.utf32_to_utf8_le=p_utf32_to_utf8_le
+utf16_to_utf8_be=function(s)
+ if s and s~="" then
+ return lpegmatch(p_utf16_to_utf8_be,s)
+ else
+ return s
end
- utf16_to_utf8_le=function(t)
- if type(t)=="string" then
- t=lpegmatch(utf_16_le_linesplitter,t)
- end
- local result={}
- for i=1,#t do
- local r,more=0,0
- for left,right in bytepairs(t[i]) do
- if right then
- local now=256*right+left
- if more>0 then
- now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
- more=0
- r=r+1
- result[r]=utfchar(now)
- elseif now>=0xD800 and now<=0xDBFF then
- more=now
- else
- r=r+1
- result[r]=utfchar(now)
- end
- end
- end
- t[i]=concat(result,"",1,r)
- end
- return t
+end
+local utf16_to_utf8_be_t=function(t)
+ if not t then
+ return nil
+ elseif type(t)=="string" then
+ t=lpegmatch(utf_16_be_linesplitter,t)
end
- utf32_to_utf8_be=function(t)
- if type(t)=="string" then
- t=lpegmatch(utflinesplitter,t)
- end
- local result={}
- for i=1,#t do
- local r,more=0,-1
- for a,b in bytepairs(t[i]) do
- if a and b then
- if more<0 then
- more=256*256*256*a+256*256*b
- else
- r=r+1
- result[t]=utfchar(more+256*a+b)
- more=-1
- end
- else
- break
- end
- end
- t[i]=concat(result,"",1,r)
+ for i=1,#t do
+ local s=t[i]
+ if s~="" then
+ t[i]=lpegmatch(p_utf16_to_utf8_be,s)
end
- return t
end
- utf32_to_utf8_le=function(t)
- if type(t)=="string" then
- t=lpegmatch(utflinesplitter,t)
- end
- local result={}
- for i=1,#t do
- local r,more=0,-1
- for a,b in bytepairs(t[i]) do
- if a and b then
- if more<0 then
- more=256*b+a
- else
- r=r+1
- result[t]=utfchar(more+256*256*256*b+256*256*a)
- more=-1
- end
- else
- break
- end
- end
- t[i]=concat(result,"",1,r)
- end
- return t
+ return t
+end
+utf16_to_utf8_le=function(s)
+ if s and s~="" then
+ return lpegmatch(p_utf16_to_utf8_le,s)
+ else
+ return s
end
-else
- utf16_to_utf8_be=function(t)
- if type(t)=="string" then
- t=lpegmatch(utf_16_be_linesplitter,t)
- end
- local result={}
- for i=1,#t do
- local r,more=0,0
- for left,right in gmatch(t[i],"(.)(.)") do
- if left=="\000" then
- r=r+1
- result[r]=utfchar(byte(right))
- elseif right then
- local now=256*byte(left)+byte(right)
- if more>0 then
- now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
- more=0
- r=r+1
- result[r]=utfchar(now)
- elseif now>=0xD800 and now<=0xDBFF then
- more=now
- else
- r=r+1
- result[r]=utfchar(now)
- end
- end
- end
- t[i]=concat(result,"",1,r)
+end
+local utf16_to_utf8_le_t=function(t)
+ if not t then
+ return nil
+ elseif type(t)=="string" then
+ t=lpegmatch(utf_16_le_linesplitter,t)
+ end
+ for i=1,#t do
+ local s=t[i]
+ if s~="" then
+ t[i]=lpegmatch(p_utf16_to_utf8_le,s)
end
- return t
end
- utf16_to_utf8_le=function(t)
- if type(t)=="string" then
- t=lpegmatch(utf_16_le_linesplitter,t)
+ return t
+end
+utf32_to_utf8_be=function(s)
+ if s and s~="" then
+ return lpegmatch(p_utf32_to_utf8_be,s)
+ else
+ return s
+ end
+end
+local utf32_to_utf8_be_t=function(t)
+ if not t then
+ return nil
+ elseif type(t)=="string" then
+ t=lpegmatch(utf_32_be_linesplitter,t)
+ end
+ for i=1,#t do
+ local s=t[i]
+ if s~="" then
+ t[i]=lpegmatch(p_utf32_to_utf8_be,s)
end
- local result={}
- for i=1,#t do
- local r,more=0,0
- for left,right in gmatch(t[i],"(.)(.)") do
- if right=="\000" then
- r=r+1
- result[r]=utfchar(byte(left))
- elseif right then
- local now=256*byte(right)+byte(left)
- if more>0 then
- now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
- more=0
- r=r+1
- result[r]=utfchar(now)
- elseif now>=0xD800 and now<=0xDBFF then
- more=now
- else
- r=r+1
- result[r]=utfchar(now)
- end
- end
- end
- t[i]=concat(result,"",1,r)
+ end
+ return t
+end
+utf32_to_utf8_le=function(s)
+ if s and s~="" then
+ return lpegmatch(p_utf32_to_utf8_le,s)
+ else
+ return s
+ end
+end
+local utf32_to_utf8_le_t=function(t)
+ if not t then
+ return nil
+ elseif type(t)=="string" then
+ t=lpegmatch(utf_32_le_linesplitter,t)
+ end
+ for i=1,#t do
+ local s=t[i]
+ if s~="" then
+ t[i]=lpegmatch(p_utf32_to_utf8_le,s)
end
- return t
end
- utf32_to_utf8_le=function() return {} end
- utf32_to_utf8_be=function() return {} end
+ return t
end
+utf.utf16_to_utf8_le_t=utf16_to_utf8_le_t
+utf.utf16_to_utf8_be_t=utf16_to_utf8_be_t
+utf.utf32_to_utf8_le_t=utf32_to_utf8_le_t
+utf.utf32_to_utf8_be_t=utf32_to_utf8_be_t
utf.utf16_to_utf8_le=utf16_to_utf8_le
utf.utf16_to_utf8_be=utf16_to_utf8_be
utf.utf32_to_utf8_le=utf32_to_utf8_le
utf.utf32_to_utf8_be=utf32_to_utf8_be
-function utf.utf8_to_utf8(t)
+function utf.utf8_to_utf8_t(t)
return type(t)=="string" and lpegmatch(utflinesplitter,t) or t
end
-function utf.utf16_to_utf8(t,endian)
- return endian and utf16_to_utf8_be(t) or utf16_to_utf8_le(t) or t
+function utf.utf16_to_utf8_t(t,endian)
+ return endian and utf16_to_utf8_be_t(t) or utf16_to_utf8_le_t(t) or t
end
-function utf.utf32_to_utf8(t,endian)
- return endian and utf32_to_utf8_be(t) or utf32_to_utf8_le(t) or t
+function utf.utf32_to_utf8_t(t,endian)
+ return endian and utf32_to_utf8_be_t(t) or utf32_to_utf8_le_t(t) or t
end
-local function little(c)
- local b=byte(c)
+local function little(b)
if b<0x10000 then
return char(b%256,b/256)
else
@@ -4631,8 +5022,7 @@ local function little(c)
return char(b1%256,b1/256,b2%256,b2/256)
end
end
-local function big(c)
- local b=byte(c)
+local function big(b)
if b<0x10000 then
return char(b/256,b%256)
else
@@ -4641,27 +5031,29 @@ local function big(c)
return char(b1/256,b1%256,b2/256,b2%256)
end
end
-local _,l_remap=utf.remapper(little)
-local _,b_remap=utf.remapper(big)
-function utf.utf8_to_utf16_be(str,nobom)
+local l_remap=Cs((p_utf8byte/little+P(1)/"")^0)
+local b_remap=Cs((p_utf8byte/big+P(1)/"")^0)
+local function utf8_to_utf16_be(str,nobom)
if nobom then
return lpegmatch(b_remap,str)
else
return char(254,255)..lpegmatch(b_remap,str)
end
end
-function utf.utf8_to_utf16_le(str,nobom)
+local function utf8_to_utf16_le(str,nobom)
if nobom then
return lpegmatch(l_remap,str)
else
return char(255,254)..lpegmatch(l_remap,str)
end
end
+utf.utf8_to_utf16_be=utf8_to_utf16_be
+utf.utf8_to_utf16_le=utf8_to_utf16_le
function utf.utf8_to_utf16(str,littleendian,nobom)
if littleendian then
- return utf.utf8_to_utf16_le(str,nobom)
+ return utf8_to_utf16_le(str,nobom)
else
- return utf.utf8_to_utf16_be(str,nobom)
+ return utf8_to_utf16_be(str,nobom)
end
end
local pattern=Cs (
@@ -4677,16 +5069,16 @@ function utf.xstring(s)
return format("0x%05X",type(s)=="number" and s or utfbyte(s))
end
function utf.toeight(str)
- if not str then
+ if not str or str=="" then
return nil
end
local utftype=lpegmatch(p_utfstricttype,str)
if utftype=="utf-8" then
- return sub(str,4)
- elseif utftype=="utf-16-le" then
- return utf16_to_utf8_le(str)
+ return sub(str,4)
elseif utftype=="utf-16-be" then
- return utf16_to_utf8_ne(str)
+ return utf16_to_utf8_be(str)
+ elseif utftype=="utf-16-le" then
+ return utf16_to_utf8_le(str)
else
return str
end
@@ -4765,7 +5157,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-math"] = package.loaded["l-math"] or true
--- original size: 915, stripped down to: 836
+-- original size: 974, stripped down to: 890
if not modules then modules={} end modules ['l-math']={
version=1.001,
@@ -4775,6 +5167,9 @@ if not modules then modules={} end modules ['l-math']={
license="see context related readme files"
}
local floor,sin,cos,tan=math.floor,math.sin,math.cos,math.tan
+if not math.ceiling then
+ math.ceiling=math.ceil
+end
if not math.round then
function math.round(x) return floor(x+0.5) end
end
@@ -4802,7 +5197,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-str"] = package.loaded["util-str"] or true
--- original size: 26857, stripped down to: 15062
+-- original size: 34503, stripped down to: 18933
if not modules then modules={} end modules ['util-str']={
version=1.001,
@@ -4821,25 +5216,43 @@ local unpack,concat=table.unpack,table.concat
local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc
local patterns,lpegmatch=lpeg.patterns,lpeg.match
local utfchar,utfbyte=utf.char,utf.byte
-local loadstripped=_LUAVERSION<5.2 and load or function(str)
- return load(dump(load(str),true))
+local loadstripped=nil
+if _LUAVERSION<5.2 then
+ loadstripped=function(str,shortcuts)
+ return load(str)
+ end
+else
+ loadstripped=function(str,shortcuts)
+ if shortcuts then
+ return load(dump(load(str),true),nil,nil,shortcuts)
+ else
+ return load(dump(load(str),true))
+ end
+ end
end
if not number then number={} end
local stripper=patterns.stripzeros
+local newline=patterns.newline
+local endofstring=patterns.endofstring
+local whitespace=patterns.whitespace
+local spacer=patterns.spacer
+local spaceortab=patterns.spaceortab
local function points(n)
+ n=tonumber(n)
return (not n or n==0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536))
end
local function basepoints(n)
+ n=tonumber(n)
return (not n or n==0) and "0bp" or lpegmatch(stripper,format("%.5fbp",n*(7200/7227)/65536))
end
number.points=points
number.basepoints=basepoints
-local rubish=patterns.spaceortab^0*patterns.newline
-local anyrubish=patterns.spaceortab+patterns.newline
+local rubish=spaceortab^0*newline
+local anyrubish=spaceortab+newline
local anything=patterns.anything
-local stripped=(patterns.spaceortab^1/"")*patterns.newline
+local stripped=(spaceortab^1/"")*newline
local leading=rubish^0/""
-local trailing=(anyrubish^1*patterns.endofstring)/""
+local trailing=(anyrubish^1*endofstring)/""
local redundant=rubish^3/"\n"
local pattern=Cs(leading*(trailing+redundant+stripped+anything)^0)
function strings.collapsecrlf(str)
@@ -4885,18 +5298,44 @@ local pattern=Carg(1)/function(t)
else
return ""
end
- end+patterns.newline*Cp()/function(position)
+ end+newline*Cp()/function(position)
extra,start=0,position
end+patterns.anything
)^1)
function strings.tabtospace(str,tab)
return lpegmatch(pattern,str,1,tab or 7)
end
-function strings.striplong(str)
- str=gsub(str,"^%s*","")
- str=gsub(str,"[\n\r]+ *","\n")
- return str
+local space=spacer^0
+local nospace=space/""
+local endofline=nospace*newline
+local stripend=(whitespace^1*endofstring)/""
+local normalline=(nospace*((1-space*(newline+endofstring))^1)*nospace)
+local stripempty=endofline^1/""
+local normalempty=endofline^1
+local singleempty=endofline*(endofline^0/"")
+local doubleempty=endofline*endofline^-1*(endofline^0/"")
+local stripstart=stripempty^0
+local p_prune_normal=Cs (stripstart*(stripend+normalline+normalempty )^0 )
+local p_prune_collapse=Cs (stripstart*(stripend+normalline+doubleempty )^0 )
+local p_prune_noempty=Cs (stripstart*(stripend+normalline+singleempty )^0 )
+local p_retain_normal=Cs ((normalline+normalempty )^0 )
+local p_retain_collapse=Cs ((normalline+doubleempty )^0 )
+local p_retain_noempty=Cs ((normalline+singleempty )^0 )
+local striplinepatterns={
+ ["prune"]=p_prune_normal,
+ ["prune and collapse"]=p_prune_collapse,
+ ["prune and no empty"]=p_prune_noempty,
+ ["retain"]=p_retain_normal,
+ ["retain and collapse"]=p_retain_collapse,
+ ["retain and no empty"]=p_retain_noempty,
+ ["collapse"]=patterns.collapser,
+}
+setmetatable(striplinepatterns,{ __index=function(t,k) return p_prune_collapse end })
+strings.striplinepatterns=striplinepatterns
+function strings.striplines(str,how)
+ return str and lpegmatch(striplinepatterns[how],str) or str
end
+strings.striplong=strings.striplines
function strings.nice(str)
str=gsub(str,"[:%-+_]+"," ")
return str
@@ -4934,10 +5373,10 @@ string.tracedchars=tracedchars
strings.tracers=tracedchars
function string.tracedchar(b)
if type(b)=="number" then
- return tracedchars[b] or (utfchar(b).." (U+"..format('%05X',b)..")")
+ return tracedchars[b] or (utfchar(b).." (U+"..format("%05X",b)..")")
else
local c=utfbyte(b)
- return tracedchars[c] or (b.." (U+"..format('%05X',c)..")")
+ return tracedchars[c] or (b.." (U+"..(c and format("%05X",c) or "?????")..")")
end
end
function number.signed(i)
@@ -4972,31 +5411,58 @@ function number.sparseexponent(f,n)
end
return tostring(n)
end
-local preamble=[[
-local type = type
-local tostring = tostring
-local tonumber = tonumber
-local format = string.format
-local concat = table.concat
-local signed = number.signed
-local points = number.points
-local basepoints = number.basepoints
-local utfchar = utf.char
-local utfbyte = utf.byte
-local lpegmatch = lpeg.match
-local nspaces = string.nspaces
-local tracedchar = string.tracedchar
-local autosingle = string.autosingle
-local autodouble = string.autodouble
-local sequenced = table.sequenced
-local formattednumber = number.formatted
-local sparseexponent = number.sparseexponent
-]]
local template=[[
%s
%s
return function(%s) return %s end
]]
+local preamble,environment="",{}
+if _LUAVERSION<5.2 then
+ preamble=[[
+local lpeg=lpeg
+local type=type
+local tostring=tostring
+local tonumber=tonumber
+local format=string.format
+local concat=table.concat
+local signed=number.signed
+local points=number.points
+local basepoints= number.basepoints
+local utfchar=utf.char
+local utfbyte=utf.byte
+local lpegmatch=lpeg.match
+local nspaces=string.nspaces
+local tracedchar=string.tracedchar
+local autosingle=string.autosingle
+local autodouble=string.autodouble
+local sequenced=table.sequenced
+local formattednumber=number.formatted
+local sparseexponent=number.sparseexponent
+ ]]
+else
+ environment={
+ global=global or _G,
+ lpeg=lpeg,
+ type=type,
+ tostring=tostring,
+ tonumber=tonumber,
+ format=string.format,
+ concat=table.concat,
+ signed=number.signed,
+ points=number.points,
+ basepoints=number.basepoints,
+ utfchar=utf.char,
+ utfbyte=utf.byte,
+ lpegmatch=lpeg.match,
+ nspaces=string.nspaces,
+ tracedchar=string.tracedchar,
+ autosingle=string.autosingle,
+ autodouble=string.autodouble,
+ sequenced=table.sequenced,
+ formattednumber=number.formatted,
+ sparseexponent=number.sparseexponent,
+ }
+end
local arguments={ "a1" }
setmetatable(arguments,{ __index=function(t,k)
local v=t[k-1]..",a"..k
@@ -5035,7 +5501,7 @@ local format_i=function(f)
if f and f~="" then
return format("format('%%%si',a%s)",f,n)
else
- return format("format('%%i',a%s)",n)
+ return format("format('%%i',a%s)",n)
end
end
local format_d=format_i
@@ -5047,6 +5513,14 @@ local format_f=function(f)
n=n+1
return format("format('%%%sf',a%s)",f,n)
end
+local format_F=function(f)
+ n=n+1
+ if not f or f=="" then
+ return format("(((a%s > -0.0000000005 and a%s < 0.0000000005) and '0') or format((a%s %% 1 == 0) and '%%i' or '%%.9f',a%s))",n,n,n,n)
+ else
+ return format("format((a%s %% 1 == 0) and '%%i' or '%%%sf',a%s)",n,f,n)
+ end
+end
local format_g=function(f)
n=n+1
return format("format('%%%sg',a%s)",f,n)
@@ -5261,7 +5735,7 @@ local builder=Cs { "start",
(
P("%")/""*(
V("!")
-+V("s")+V("q")+V("i")+V("d")+V("f")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o")
++V("s")+V("q")+V("i")+V("d")+V("f")+V("F")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o")
+V("c")+V("C")+V("S")
+V("Q")
+V("N")
@@ -5272,7 +5746,6 @@ local builder=Cs { "start",
+V("j")+V("J")
+V("m")+V("M")
+V("z")
-+V("*")
)+V("*")
)*(P(-1)+Carg(1))
)^0,
@@ -5281,6 +5754,7 @@ local builder=Cs { "start",
["i"]=(prefix_any*P("i"))/format_i,
["d"]=(prefix_any*P("d"))/format_d,
["f"]=(prefix_any*P("f"))/format_f,
+ ["F"]=(prefix_any*P("F"))/format_F,
["g"]=(prefix_any*P("g"))/format_g,
["G"]=(prefix_any*P("G"))/format_G,
["e"]=(prefix_any*P("e"))/format_e,
@@ -5315,11 +5789,12 @@ local builder=Cs { "start",
["a"]=(prefix_any*P("a"))/format_a,
["A"]=(prefix_any*P("A"))/format_A,
["*"]=Cs(((1-P("%"))^1+P("%%")/"%%")^1)/format_rest,
+ ["?"]=Cs(((1-P("%"))^1 )^1)/format_rest,
["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension,
}
local direct=Cs (
- P("%")/""*Cc([[local format = string.format return function(str) return format("%]])*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*Cc([[",str) end]])*P(-1)
- )
+ P("%")*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*P(-1)/[[local format = string.format return function(str) return format("%0",str) end]]
+)
local function make(t,str)
local f
local p
@@ -5328,10 +5803,10 @@ local function make(t,str)
f=loadstripped(p)()
else
n=0
- p=lpegmatch(builder,str,1,"..",t._extensions_)
+ p=lpegmatch(builder,str,1,t._connector_,t._extensions_)
if n>0 then
p=format(template,preamble,t._preamble_,arguments[n],p)
- f=loadstripped(p)()
+ f=loadstripped(p,t._environment_)()
else
f=function() return str end
end
@@ -5343,10 +5818,22 @@ local function use(t,fmt,...)
return t[fmt](...)
end
strings.formatters={}
-function strings.formatters.new()
- local t={ _extensions_={},_preamble_="",_type_="formatter" }
- setmetatable(t,{ __index=make,__call=use })
- return t
+if _LUAVERSION<5.2 then
+ function strings.formatters.new(noconcat)
+ local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_=preamble,_environment_={} }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
+ end
+else
+ function strings.formatters.new(noconcat)
+ local e={}
+ for k,v in next,environment do
+ e[k]=v
+ end
+ local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_="",_environment_=e }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
+ end
end
local formatters=strings.formatters.new()
string.formatters=formatters
@@ -5354,8 +5841,12 @@ string.formatter=function(str,...) return formatters[str](...) end
local function add(t,name,template,preamble)
if type(t)=="table" and t._type_=="formatter" then
t._extensions_[name]=template or "%s"
- if preamble then
+ if type(preamble)=="string" then
t._preamble_=preamble.."\n"..t._preamble_
+ elseif type(preamble)=="table" then
+ for k,v in next,preamble do
+ t._environment_[k]=v
+ end
end
end
end
@@ -5364,9 +5855,28 @@ patterns.xmlescape=Cs((P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"+P('"')/"&quot;
patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0)
patterns.luaescape=Cs(((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0)
patterns.luaquoted=Cs(Cc('"')*((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0*Cc('"'))
-add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
-add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
-add(formatters,"lua",[[lpegmatch(luaescape,%s)]],[[local luaescape = lpeg.patterns.luaescape]])
+if _LUAVERSION<5.2 then
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape")
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape")
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape")
+else
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape=lpeg.patterns.xmlescape })
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape=lpeg.patterns.texescape })
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape=lpeg.patterns.luaescape })
+end
+local dquote=patterns.dquote
+local equote=patterns.escaped+dquote/'\\"'+1
+local space=patterns.space
+local cquote=Cc('"')
+local pattern=Cs(dquote*(equote-P(-2))^0*dquote)
++Cs(cquote*(equote-space)^0*space*equote^0*cquote)
+function string.optionalquoted(str)
+ return lpegmatch(pattern,str) or str
+end
+local pattern=Cs((newline/os.newline+1)^0)
+function string.replacenewlines(str)
+ return lpegmatch(pattern,str)
+end
end -- of closure
@@ -5375,7 +5885,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-tab"] = package.loaded["util-tab"] or true
--- original size: 23952, stripped down to: 16092
+-- original size: 25338, stripped down to: 16247
if not modules then modules={} end modules ['util-tab']={
version=1.001,
@@ -5388,7 +5898,7 @@ utilities=utilities or {}
utilities.tables=utilities.tables or {}
local tables=utilities.tables
local format,gmatch,gsub,sub=string.format,string.gmatch,string.gsub,string.sub
-local concat,insert,remove=table.concat,table.insert,table.remove
+local concat,insert,remove,sort=table.concat,table.insert,table.remove,table.sort
local setmetatable,getmetatable,tonumber,tostring=setmetatable,getmetatable,tonumber,tostring
local type,next,rawset,tonumber,tostring,load,select=type,next,rawset,tonumber,tostring,load,select
local lpegmatch,P,Cs,Cc=lpeg.match,lpeg.P,lpeg.Cs,lpeg.Cc
@@ -5396,27 +5906,29 @@ local sortedkeys,sortedpairs=table.sortedkeys,table.sortedpairs
local formatters=string.formatters
local utftoeight=utf.toeight
local splitter=lpeg.tsplitat(".")
-function tables.definetable(target,nofirst,nolast)
- local composed,shortcut,t=nil,nil,{}
+function utilities.tables.definetable(target,nofirst,nolast)
+ local composed,t=nil,{}
local snippets=lpegmatch(splitter,target)
for i=1,#snippets-(nolast and 1 or 0) do
local name=snippets[i]
if composed then
- composed=shortcut.."."..name
- shortcut=shortcut.."_"..name
- t[#t+1]=formatters["local %s = %s if not %s then %s = { } %s = %s end"](shortcut,composed,shortcut,shortcut,composed,shortcut)
+ composed=composed.."."..name
+ t[#t+1]=formatters["if not %s then %s = { } end"](composed,composed)
else
composed=name
- shortcut=name
if not nofirst then
t[#t+1]=formatters["%s = %s or { }"](composed,composed)
end
end
end
- if nolast then
- composed=shortcut.."."..snippets[#snippets]
+ if composed then
+ if nolast then
+ composed=composed.."."..snippets[#snippets]
+ end
+ return concat(t,"\n"),composed
+ else
+ return "",target
end
- return concat(t,"\n"),composed
end
function tables.definedtable(...)
local t=_G
@@ -5443,7 +5955,7 @@ function tables.accesstable(target,root)
end
function tables.migratetable(target,v,root)
local t=root or _G
- local names=string.split(target,".")
+ local names=lpegmatch(splitter,target)
for i=1,#names-1 do
local name=names[i]
t[name]=t[name] or {}
@@ -5463,6 +5975,15 @@ function tables.removevalue(t,value)
end
end
end
+function tables.replacevalue(t,oldvalue,newvalue)
+ if oldvalue and newvalue then
+ for i=1,#t do
+ if t[i]==oldvalue then
+ t[i]=newvalue
+ end
+ end
+ end
+end
function tables.insertbeforevalue(t,value,extra)
for i=1,#t do
if t[i]==extra then
@@ -5610,7 +6131,7 @@ local f_ordered_string=formatters["%q,"]
local f_ordered_number=formatters["%s,"]
local f_ordered_boolean=formatters["%l,"]
function table.fastserialize(t,prefix)
- local r={ prefix or "return" }
+ local r={ type(prefix)=="string" and prefix or "return" }
local m=1
local function fastserialize(t,outer)
local n=#t
@@ -5807,7 +6328,8 @@ function table.serialize(root,name,specification)
local t
local n=1
local function simple_table(t)
- if #t>0 then
+ local nt=#t
+ if nt>0 then
local n=0
for _,v in next,t do
n=n+1
@@ -5815,19 +6337,17 @@ function table.serialize(root,name,specification)
return nil
end
end
- if n==#t then
+ if n==nt then
local tt={}
- local nt=0
- for i=1,#t do
+ for i=1,nt do
local v=t[i]
local tv=type(v)
- nt=nt+1
if tv=="number" then
- tt[nt]=v
+ tt[i]=v
elseif tv=="string" then
- tt[nt]=format("%q",v)
+ tt[i]=format("%q",v)
elseif tv=="boolean" then
- tt[nt]=v and "true" or "false"
+ tt[i]=v and "true" or "false"
else
return nil
end
@@ -5856,7 +6376,7 @@ function table.serialize(root,name,specification)
end
depth=depth+1
end
- if root and next(root) then
+ if root and next(root)~=nil then
local first=nil
local last=0
last=#root
@@ -5875,13 +6395,13 @@ function table.serialize(root,name,specification)
local v=root[k]
local tv=type(v)
local tk=type(k)
- if first and tk=="number" and k>=first and k<=last then
+ if first and tk=="number" and k<=last and k>=first then
if tv=="number" then
n=n+1 t[n]=f_val_num(depth,v)
elseif tv=="string" then
n=n+1 t[n]=f_val_str(depth,v)
elseif tv=="table" then
- if not next(v) then
+ if next(v)==nil then
n=n+1 t[n]=f_val_not(depth)
else
local st=simple_table(v)
@@ -5911,13 +6431,13 @@ function table.serialize(root,name,specification)
n=n+1 t[n]=f_key_boo_value_str(depth,k,v)
end
elseif tv=="table" then
- if not next(v) then
+ if next(v)==nil then
if tk=="number" then
- n=n+1 t[n]=f_key_num_value_not(depth,k,v)
+ n=n+1 t[n]=f_key_num_value_not(depth,k)
elseif tk=="string" then
- n=n+1 t[n]=f_key_str_value_not(depth,k,v)
+ n=n+1 t[n]=f_key_str_value_not(depth,k)
elseif tk=="boolean" then
- n=n+1 t[n]=f_key_boo_value_not(depth,k,v)
+ n=n+1 t[n]=f_key_boo_value_not(depth,k)
end
else
local st=simple_table(v)
@@ -5969,7 +6489,7 @@ function table.serialize(root,name,specification)
local dummy=root._w_h_a_t_e_v_e_r_
root._w_h_a_t_e_v_e_r_=nil
end
- if next(root) then
+ if next(root)~=nil then
do_serialize(root,name,1,0)
end
end
@@ -6132,7 +6652,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-prs"] = package.loaded["util-prs"] or true
--- original size: 19604, stripped down to: 13998
+-- original size: 21780, stripped down to: 15121
if not modules then modules={} end modules ['util-prs']={
version=1.001,
@@ -6154,6 +6674,8 @@ local patterns=parsers.patterns or {}
parsers.patterns=patterns
local setmetatableindex=table.setmetatableindex
local sortedhash=table.sortedhash
+local sortedkeys=table.sortedkeys
+local tohash=table.tohash
local digit=R("09")
local space=P(' ')
local equal=P("=")
@@ -6203,9 +6725,7 @@ patterns.settings_to_hash_a=pattern_a_s
patterns.settings_to_hash_b=pattern_b_s
patterns.settings_to_hash_c=pattern_c_s
function parsers.make_settings_to_hash_pattern(set,how)
- if type(str)=="table" then
- return set
- elseif how=="strict" then
+ if how=="strict" then
return (pattern_c/set)^1
elseif how=="tolerant" then
return (pattern_b/set)^1
@@ -6214,7 +6734,9 @@ function parsers.make_settings_to_hash_pattern(set,how)
end
end
function parsers.settings_to_hash(str,existing)
- if type(str)=="table" then
+ if not str or str=="" then
+ return {}
+ elseif type(str)=="table" then
if existing then
for k,v in next,str do
existing[k]=v
@@ -6223,16 +6745,16 @@ function parsers.settings_to_hash(str,existing)
else
return str
end
- elseif str and str~="" then
+ else
hash=existing or {}
lpegmatch(pattern_a_s,str)
return hash
- else
- return {}
end
end
function parsers.settings_to_hash_tolerant(str,existing)
- if type(str)=="table" then
+ if not str or str=="" then
+ return {}
+ elseif type(str)=="table" then
if existing then
for k,v in next,str do
existing[k]=v
@@ -6241,16 +6763,16 @@ function parsers.settings_to_hash_tolerant(str,existing)
else
return str
end
- elseif str and str~="" then
+ else
hash=existing or {}
lpegmatch(pattern_b_s,str)
return hash
- else
- return {}
end
end
function parsers.settings_to_hash_strict(str,existing)
- if type(str)=="table" then
+ if not str or str=="" then
+ return nil
+ elseif type(str)=="table" then
if existing then
for k,v in next,str do
existing[k]=v
@@ -6263,8 +6785,6 @@ function parsers.settings_to_hash_strict(str,existing)
hash=existing or {}
lpegmatch(pattern_c_s,str)
return next(hash) and hash
- else
- return nil
end
end
local separator=comma*space^0
@@ -6272,27 +6792,46 @@ local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C((nestedbraces+(1-comm
local pattern=spaces*Ct(value*(separator*value)^0)
patterns.settings_to_array=pattern
function parsers.settings_to_array(str,strict)
- if type(str)=="table" then
- return str
- elseif not str or str=="" then
+ if not str or str=="" then
return {}
+ elseif type(str)=="table" then
+ return str
elseif strict then
- if find(str,"{") then
+ if find(str,"{",1,true) then
return lpegmatch(pattern,str)
else
return { str }
end
- elseif find(str,",") then
+ elseif find(str,",",1,true) then
return lpegmatch(pattern,str)
else
return { str }
end
end
-local separator=space^0*comma*space^0
-local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C((nestedbraces+(1-(space^0*(comma+P(-1)))))^0)
-local withvalue=Carg(1)*value/function(f,s) return f(s) end
-local pattern_a=spaces*Ct(value*(separator*value)^0)
-local pattern_b=spaces*withvalue*(separator*withvalue)^0
+local cache_a={}
+local cache_b={}
+function parsers.groupedsplitat(symbol,withaction)
+ if not symbol then
+ symbol=","
+ end
+ local pattern=(withaction and cache_b or cache_a)[symbol]
+ if not pattern then
+ local symbols=S(symbol)
+ local separator=space^0*symbols*space^0
+ local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C((nestedbraces+(1-(space^0*(symbols+P(-1)))))^0)
+ if withaction then
+ local withvalue=Carg(1)*value/function(f,s) return f(s) end
+ pattern=spaces*withvalue*(separator*withvalue)^0
+ cache_b[symbol]=pattern
+ else
+ pattern=spaces*Ct(value*(separator*value)^0)
+ cache_a[symbol]=pattern
+ end
+ end
+ return pattern
+end
+local pattern_a=parsers.groupedsplitat(",",false)
+local pattern_b=parsers.groupedsplitat(",",true)
function parsers.stripped_settings_to_array(str)
if not str or str=="" then
return {}
@@ -6317,8 +6856,8 @@ function parsers.add_settings_to_array(t,str)
end
function parsers.hash_to_string(h,separator,yes,no,strict,omit)
if h then
- local t,tn,s={},0,table.sortedkeys(h)
- omit=omit and table.tohash(omit)
+ local t,tn,s={},0,sortedkeys(h)
+ omit=omit and tohash(omit)
for i=1,#s do
local key=s[i]
if not omit or not omit[key] then
@@ -6354,12 +6893,9 @@ function parsers.array_to_string(a,separator)
return ""
end
end
-function parsers.settings_to_set(str,t)
- t=t or {}
- for s in gmatch(str,"[^, ]+") do
- t[s]=true
- end
- return t
+local pattern=Cf(Ct("")*Cg(C((1-S(", "))^1)*S(", ")^0*Cc(true))^1,rawset)
+function utilities.parsers.settings_to_set(str,t)
+ return str and lpegmatch(pattern,str) or {}
end
function parsers.simple_hash_to_string(h,separator)
local t,tn={},0
@@ -6371,12 +6907,16 @@ function parsers.simple_hash_to_string(h,separator)
end
return concat(t,separator or ",")
end
-local str=C((1-whitespace-equal)^1)
+local str=Cs(lpegpatterns.unquoted)+C((1-whitespace-equal)^1)
local setting=Cf(Carg(1)*(whitespace^0*Cg(str*whitespace^0*(equal*whitespace^0*str+Cc(""))))^1,rawset)
local splitter=setting^1
function utilities.parsers.options_to_hash(str,target)
return str and lpegmatch(splitter,str,1,target or {}) or {}
end
+local splitter=lpeg.tsplitat(" ")
+function utilities.parsers.options_to_array(str)
+ return str and lpegmatch(splitter,str) or {}
+end
local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C(digit^1*lparent*(noparent+nestedparents)^1*rparent)+C((nestedbraces+(1-comma))^1)
local pattern_a=spaces*Ct(value*(separator*value)^0)
local function repeater(n,str)
@@ -6463,7 +7003,7 @@ function parsers.keq_to_hash(str)
end
local defaultspecification={ separator=",",quote='"' }
function parsers.csvsplitter(specification)
- specification=specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification
+ specification=specification and setmetatableindex(specification,defaultspecification) or defaultspecification
local separator=specification.separator
local quotechar=specification.quote
local separator=S(separator~="" and separator or ",")
@@ -6487,7 +7027,7 @@ function parsers.csvsplitter(specification)
end
end
function parsers.rfc4180splitter(specification)
- specification=specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification
+ specification=specification and setmetatableindex(specification,defaultspecification) or defaultspecification
local separator=specification.separator
local quotechar=P(specification.quote)
local dquotechar=quotechar*quotechar
@@ -6498,7 +7038,7 @@ function parsers.rfc4180splitter(specification)
local field=escaped+non_escaped+Cc("")
local record=Ct(field*(separator*field)^1)
local headerline=record*Cp()
- local wholeblob=Ct((newline^-1*record)^0)
+ local wholeblob=Ct((newline^(specification.strict and -1 or 1)*record)^0)
return function(data,getheader)
if getheader then
local header,position=lpegmatch(headerline,data)
@@ -6535,20 +7075,20 @@ function parsers.stepper(str,n,action)
lpegmatch(stepper,str,1,n,action or print)
end
end
-local pattern_math=Cs((P("%")/"\\percent "+P("^")*Cc("{")*lpegpatterns.integer*Cc("}")+P(1))^0)
-local pattern_text=Cs((P("%")/"\\percent "+(P("^")/"\\high")*Cc("{")*lpegpatterns.integer*Cc("}")+P(1))^0)
+local pattern_math=Cs((P("%")/"\\percent "+P("^")*Cc("{")*lpegpatterns.integer*Cc("}")+anything)^0)
+local pattern_text=Cs((P("%")/"\\percent "+(P("^")/"\\high")*Cc("{")*lpegpatterns.integer*Cc("}")+anything)^0)
patterns.unittotex=pattern
function parsers.unittotex(str,textmode)
return lpegmatch(textmode and pattern_text or pattern_math,str)
end
-local pattern=Cs((P("^")/"<sup>"*lpegpatterns.integer*Cc("</sup>")+P(1))^0)
+local pattern=Cs((P("^")/"<sup>"*lpegpatterns.integer*Cc("</sup>")+anything)^0)
function parsers.unittoxml(str)
return lpegmatch(pattern,str)
end
local cache={}
-local spaces=lpeg.patterns.space^0
+local spaces=lpegpatterns.space^0
local dummy=function() end
-table.setmetatableindex(cache,function(t,k)
+setmetatableindex(cache,function(t,k)
local separator=P(k)
local value=(1-separator)^0
local pattern=spaces*C(value)*separator^0*Cp()
@@ -6613,6 +7153,18 @@ function utilities.parsers.runtime(time)
local seconds=mod(time,60)
return days,hours,minutes,seconds
end
+local spacing=whitespace^0
+local apply=P("->")
+local method=C((1-apply)^1)
+local token=lbrace*C((1-rbrace)^1)*rbrace+C(anything^1)
+local pattern=spacing*(method*spacing*apply+Carg(1))*spacing*token
+function utilities.parsers.splitmethod(str,default)
+ if str then
+ return lpegmatch(pattern,str,1,default or false)
+ else
+ return default or false,""
+ end
+end
end -- of closure
@@ -6702,7 +7254,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["trac-set"] = package.loaded["trac-set"] or true
--- original size: 12365, stripped down to: 8799
+-- original size: 12482, stripped down to: 8864
if not modules then modules={} end modules ['trac-set']={
version=1.001,
@@ -6730,7 +7282,7 @@ function setters.initialize(filename,name,values)
local data=setter.data
if data then
for key,newvalue in next,values do
- local newvalue=is_boolean(newvalue,newvalue)
+ local newvalue=is_boolean(newvalue,newvalue,true)
local functions=data[key]
if functions then
local oldvalue=functions.value
@@ -6784,7 +7336,7 @@ local function set(t,what,newvalue)
elseif not value then
value=false
else
- value=is_boolean(value,value)
+ value=is_boolean(value,value,true)
end
w=topattern(w,true,true)
for name,functions in next,data do
@@ -6923,6 +7475,7 @@ function setters.new(name)
report=function(...) setters.report (setter,...) end,
enable=function(...) enable (setter,...) end,
disable=function(...) disable (setter,...) end,
+ reset=function(...) reset (setter,...) end,
register=function(...) register(setter,...) end,
list=function(...) list (setter,...) end,
show=function(...) show (setter,...) end,
@@ -7014,7 +7567,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["trac-log"] = package.loaded["trac-log"] or true
--- original size: 25391, stripped down to: 16561
+-- original size: 29359, stripped down to: 20483
if not modules then modules={} end modules ['trac-log']={
version=1.001,
@@ -7023,15 +7576,18 @@ if not modules then modules={} end modules ['trac-log']={
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
+local next,type,select,print=next,type,select,print
local write_nl,write=texio and texio.write_nl or print,texio and texio.write or io.write
local format,gmatch,find=string.format,string.gmatch,string.find
local concat,insert,remove=table.concat,table.insert,table.remove
local topattern=string.topattern
-local next,type,select=next,type,select
local utfchar=utf.char
+local datetime=os.date
+local openfile=io.open
local setmetatableindex=table.setmetatableindex
local formatters=string.formatters
local texgetcount=tex and tex.getcount
+local variant="default"
logs=logs or {}
local logs=logs
local moreinfo=[[
@@ -7041,32 +7597,122 @@ maillist : ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
webpage : http://www.pragma-ade.nl / http://tex.aanhet.net
wiki : http://contextgarden.net
]]
-utilities.strings.formatters.add (
+formatters.add (
formatters,"unichr",
[["U+" .. format("%%05X",%s) .. " (" .. utfchar(%s) .. ")"]]
)
-utilities.strings.formatters.add (
+formatters.add (
formatters,"chruni",
[[utfchar(%s) .. " (U+" .. format("%%05X",%s) .. ")"]]
)
local function ignore() end
setmetatableindex(logs,function(t,k) t[k]=ignore;return ignore end)
local report,subreport,status,settarget,setformats,settranslations
-local direct,subdirect,writer,pushtarget,poptarget,setlogfile,settimedlog,setprocessor,setformatters
+local direct,subdirect,writer,pushtarget,poptarget,setlogfile,settimedlog,setprocessor,setformatters,newline
if tex and (tex.jobname or tex.formatname) then
- local valueiskey={ __index=function(t,k) t[k]=k return k end }
- local target="term and log"
+ local function useluawrites()
+ local texio_write_nl=texio.write_nl
+ local texio_write=texio.write
+ local io_write=io.write
+ write_nl=function(target,...)
+ if not io_write then
+ io_write=io.write
+ end
+ if target=="term and log" then
+ texio_write_nl("log",...)
+ texio_write_nl("term","")
+ io_write(...)
+ elseif target=="log" then
+ texio_write_nl("log",...)
+ elseif target=="term" then
+ texio_write_nl("term","")
+ io_write(...)
+ elseif target~="none" then
+ texio_write_nl("log",target,...)
+ texio_write_nl("term","")
+ io_write(target,...)
+ end
+ end
+ write=function(target,...)
+ if not io_write then
+ io_write=io.write
+ end
+ if target=="term and log" then
+ texio_write("log",...)
+ io_write(...)
+ elseif target=="log" then
+ texio_write("log",...)
+ elseif target=="term" then
+ io_write(...)
+ elseif target~="none" then
+ texio_write("log",target,...)
+ io_write(target,...)
+ end
+ end
+ texio.write=write
+ texio.write_nl=write_nl
+ useluawrites=ignore
+ end
+ local whereto="both"
+ local target=nil
+ local targets=nil
+ local formats=table.setmetatableindex("self")
+ local translations=table.setmetatableindex("self")
+ local report_yes,subreport_yes,direct_yes,subdirect_yes,status_yes
+ local report_nop,subreport_nop,direct_nop,subdirect_nop,status_nop
+ local variants={
+ default={
+ formats={
+ report_yes=formatters["%-15s > %s\n"],
+ report_nop=formatters["%-15s >\n"],
+ direct_yes=formatters["%-15s > %s"],
+ direct_nop=formatters["%-15s >"],
+ subreport_yes=formatters["%-15s > %s > %s\n"],
+ subreport_nop=formatters["%-15s > %s >\n"],
+ subdirect_yes=formatters["%-15s > %s > %s"],
+ subdirect_nop=formatters["%-15s > %s >"],
+ status_yes=formatters["%-15s : %s\n"],
+ status_nop=formatters["%-15s :\n"],
+ },
+ targets={
+ logfile="log",
+ log="log",
+ file="log",
+ console="term",
+ terminal="term",
+ both="term and log",
+ },
+ },
+ ansi={
+ formats={
+ report_yes=formatters["%-15s > %s\n"],
+ report_nop=formatters["%-15s >\n"],
+ direct_yes=formatters["%-15s > %s"],
+ direct_nop=formatters["%-15s >"],
+ subreport_yes=formatters["%-15s > %s > %s\n"],
+ subreport_nop=formatters["%-15s > %s >\n"],
+ subdirect_yes=formatters["%-15s > %s > %s"],
+ subdirect_nop=formatters["%-15s > %s >"],
+ status_yes=formatters["%-15s : %s\n"],
+ status_nop=formatters["%-15s :\n"],
+ },
+ targets={
+ logfile="none",
+ log="none",
+ file="none",
+ console="term",
+ terminal="term",
+ both="term",
+ },
+ }
+ }
logs.flush=io.flush
- local formats={} setmetatable(formats,valueiskey)
- local translations={} setmetatable(translations,valueiskey)
writer=function(...)
write_nl(target,...)
end
newline=function()
write_nl(target,"\n")
end
- local report_yes=formatters["%-15s > %s\n"]
- local report_nop=formatters["%-15s >\n"]
report=function(a,b,c,...)
if c then
write_nl(target,report_yes(translations[a],formatters[formats[b]](c,...)))
@@ -7078,8 +7724,6 @@ if tex and (tex.jobname or tex.formatname) then
write_nl(target,"\n")
end
end
- local direct_yes=formatters["%-15s > %s"]
- local direct_nop=formatters["%-15s >"]
direct=function(a,b,c,...)
if c then
return direct_yes(translations[a],formatters[formats[b]](c,...))
@@ -7091,8 +7735,6 @@ if tex and (tex.jobname or tex.formatname) then
return ""
end
end
- local subreport_yes=formatters["%-15s > %s > %s\n"]
- local subreport_nop=formatters["%-15s > %s >\n"]
subreport=function(a,s,b,c,...)
if c then
write_nl(target,subreport_yes(translations[a],translations[s],formatters[formats[b]](c,...)))
@@ -7104,8 +7746,6 @@ if tex and (tex.jobname or tex.formatname) then
write_nl(target,"\n")
end
end
- local subdirect_yes=formatters["%-15s > %s > %s"]
- local subdirect_nop=formatters["%-15s > %s >"]
subdirect=function(a,s,b,c,...)
if c then
return subdirect_yes(translations[a],translations[s],formatters[formats[b]](c,...))
@@ -7117,8 +7757,6 @@ if tex and (tex.jobname or tex.formatname) then
return ""
end
end
- local status_yes=formatters["%-15s : %s\n"]
- local status_nop=formatters["%-15s :\n"]
status=function(a,b,c,...)
if c then
write_nl(target,status_yes(translations[a],formatters[formats[b]](c,...)))
@@ -7130,16 +7768,13 @@ if tex and (tex.jobname or tex.formatname) then
write_nl(target,"\n")
end
end
- local targets={
- logfile="log",
- log="log",
- file="log",
- console="term",
- terminal="term",
- both="term and log",
- }
- settarget=function(whereto)
- target=targets[whereto or "both"] or targets.both
+ settarget=function(askedwhereto)
+ whereto=askedwhereto or whereto or "both"
+ target=targets[whereto]
+ if not target then
+ whereto="both"
+ target=targets[whereto]
+ end
if target=="term" or target=="term and log" then
logs.flush=io.flush
else
@@ -7168,21 +7803,74 @@ if tex and (tex.jobname or tex.formatname) then
writeline(target,f(...))
end
end
- setformatters=function(f)
- report_yes=f.report_yes or report_yes
- report_nop=f.report_nop or report_nop
- subreport_yes=f.subreport_yes or subreport_yes
- subreport_nop=f.subreport_nop or subreport_nop
- direct_yes=f.direct_yes or direct_yes
- direct_nop=f.direct_nop or direct_nop
- subdirect_yes=f.subdirect_yes or subdirect_yes
- subdirect_nop=f.subdirect_nop or subdirect_nop
- status_yes=f.status_yes or status_yes
- status_nop=f.status_nop or status_nop
- end
+ setformatters=function(specification)
+ local t=nil
+ local f=nil
+ local d=variants.default
+ if not specification then
+ elseif type(specification)=="table" then
+ t=specification.targets
+ f=specification.formats or specification
+ else
+ local v=variants[specification]
+ if v then
+ t=v.targets
+ f=v.formats
+ variant=specification
+ end
+ end
+ targets=t or d.targets
+ target=targets[whereto] or target
+ if f then
+ d=d.formats
+ else
+ f=d.formats
+ d=f
+ end
+ setmetatableindex(f,d)
+ report_yes=f.report_yes
+ report_nop=f.report_nop
+ subreport_yes=f.subreport_yes
+ subreport_nop=f.subreport_nop
+ direct_yes=f.direct_yes
+ direct_nop=f.direct_nop
+ subdirect_yes=f.subdirect_yes
+ subdirect_nop=f.subdirect_nop
+ status_yes=f.status_yes
+ status_nop=f.status_nop
+ if variant=="ansi" then
+ useluawrites()
+ end
+ settarget(whereto)
+ end
+ setformatters(variant)
setlogfile=ignore
settimedlog=ignore
else
+ local report_yes,subreport_yes,status_yes
+ local report_nop,subreport_nop,status_nop
+ local variants={
+ default={
+ formats={
+ report_yes=formatters["%-15s | %s"],
+ report_nop=formatters["%-15s |"],
+ subreport_yes=formatters["%-15s | %s | %s"],
+ subreport_nop=formatters["%-15s | %s |"],
+ status_yes=formatters["%-15s : %s\n"],
+ status_nop=formatters["%-15s :\n"],
+ },
+ },
+ ansi={
+ formats={
+ report_yes=formatters["%-15s | %s"],
+ report_nop=formatters["%-15s |"],
+ subreport_yes=formatters["%-15s | %s | %s"],
+ subreport_nop=formatters["%-15s | %s |"],
+ status_yes=formatters["%-15s : %s\n"],
+ status_nop=formatters["%-15s :\n"],
+ },
+ },
+ }
logs.flush=ignore
writer=function(s)
write_nl(s)
@@ -7190,8 +7878,6 @@ else
newline=function()
write_nl("\n")
end
- local report_yes=formatters["%-15s | %s"]
- local report_nop=formatters["%-15s |"]
report=function(a,b,c,...)
if c then
write_nl(report_yes(a,formatters[b](c,...)))
@@ -7203,8 +7889,6 @@ else
write_nl("")
end
end
- local subreport_yes=formatters["%-15s | %s | %s"]
- local subreport_nop=formatters["%-15s | %s |"]
subreport=function(a,sub,b,c,...)
if c then
write_nl(subreport_yes(a,sub,formatters[b](c,...)))
@@ -7216,8 +7900,6 @@ else
write_nl("")
end
end
- local status_yes=formatters["%-15s : %s\n"]
- local status_nop=formatters["%-15s :\n"]
status=function(a,b,c,...)
if c then
write_nl(status_yes(a,formatters[b](c,...)))
@@ -7242,14 +7924,34 @@ else
writeline(f(s))
end
end
- setformatters=function(f)
- report_yes=f.report_yes or report_yes
- report_nop=f.report_nop or report_nop
- subreport_yes=f.subreport_yes or subreport_yes
- subreport_nop=f.subreport_nop or subreport_nop
- status_yes=f.status_yes or status_yes
- status_nop=f.status_nop or status_nop
- end
+ setformatters=function(specification)
+ local f=nil
+ local d=variants.default
+ if specification then
+ if type(specification)=="table" then
+ f=specification.formats or specification
+ else
+ local v=variants[specification]
+ if v then
+ f=v.formats
+ end
+ end
+ end
+ if f then
+ d=d.formats
+ else
+ f=d.formats
+ d=f
+ end
+ setmetatableindex(f,d)
+ report_yes=f.report_yes
+ report_nop=f.report_nop
+ subreport_yes=f.subreport_yes
+ subreport_nop=f.subreport_nop
+ status_yes=f.status_yes
+ status_nop=f.status_nop
+ end
+ setformatters(variant)
setlogfile=function(name,keepopen)
if name and name~="" then
local localtime=os.localtime
@@ -7368,9 +8070,10 @@ local function setblocked(category,value)
v.state=value
end
else
- states=utilities.parsers.settings_to_hash(category)
+ states=utilities.parsers.settings_to_hash(category,type(states)=="table" and states or nil)
for c,_ in next,states do
- if data[c] then
+ local v=data[c]
+ if v then
v.state=value
else
c=topattern(c,true,true)
@@ -7501,13 +8204,13 @@ end
local simple=logs.reporter("comment")
logs.simple=simple
logs.simpleline=simple
-function logs.setprogram () end
-function logs.extendbanner() end
-function logs.reportlines () end
-function logs.reportbanner() end
-function logs.reportline () end
-function logs.simplelines () end
-function logs.help () end
+logs.setprogram=ignore
+logs.extendbanner=ignore
+logs.reportlines=ignore
+logs.reportbanner=ignore
+logs.reportline=ignore
+logs.simplelines=ignore
+logs.help=ignore
local Carg,C,lpegmatch=lpeg.Carg,lpeg.C,lpeg.match
local p_newline=lpeg.patterns.newline
local linewise=(
@@ -7584,10 +8287,11 @@ function logs.application(t)
end
return t
end
-function logs.system(whereto,process,jobname,category,...)
- local message=formatters["%s %s => %s => %s => %s\r"](os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...))
+local f_syslog=formatters["%s %s => %s => %s => %s\r"]
+function logs.system(whereto,process,jobname,category,fmt,arg,...)
+ local message=f_syslog(datetime("%d/%m/%y %H:%m:%S"),process,jobname,category,arg==nil and fmt or format(fmt,arg,...))
for i=1,10 do
- local f=io.open(whereto,"a")
+ local f=openfile(whereto,"a")
if f then
f:write(message)
f:close()
@@ -7649,7 +8353,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["trac-inf"] = package.loaded["trac-inf"] or true
--- original size: 6501, stripped down to: 5156
+-- original size: 6704, stripped down to: 5343
if not modules then modules={} end modules ['trac-inf']={
version=1.001,
@@ -7659,7 +8363,7 @@ if not modules then modules={} end modules ['trac-inf']={
license="see context related readme files"
}
local type,tonumber,select=type,tonumber,select
-local format,lower=string.format,string.lower
+local format,lower,find=string.format,string.lower,string.find
local concat=table.concat
local clock=os.gettimeofday or os.clock
local setmetatableindex=table.setmetatableindex
@@ -7750,7 +8454,8 @@ function statistics.show()
if statistics.enable then
local register=statistics.register
register("used platform",function()
- return format("%s, type: %s, binary subtree: %s",os.platform or "unknown",os.type or "unknown",environment.texos or "unknown")
+ return format("%s, type: %s, binary subtree: %s",
+ os.platform or "unknown",os.type or "unknown",environment.texos or "unknown")
end)
register("luatex banner",function()
return lower(status.banner)
@@ -7763,14 +8468,23 @@ function statistics.show()
return format("%s direct, %s indirect, %s total",total-indirect,indirect,total)
end)
if jit then
- local status={ jit.status() }
- if status[1] then
- register("luajit status",function()
- return concat(status," ",2)
- end)
- end
- end
- register("current memory usage",statistics.memused)
+ local jitstatus={ jit.status() }
+ if jitstatus[1] then
+ register("luajit options",concat(jitstatus," ",2))
+ end
+ end
+ register("lua properties",function()
+ local list=status.list()
+ local hashchar=tonumber(list.luatex_hashchars)
+ local mask=lua.mask or "ascii"
+ return format("engine: %s, used memory: %s, hash type: %s, hash chars: min(%s,40), symbol mask: %s (%s)",
+ jit and "luajit" or "lua",
+ statistics.memused(),
+ list.luatex_hashtype or "default",
+ hashchar and 2^hashchar or "unknown",
+ mask,
+ mask=="utf" and "τεχ" or "tex")
+ end)
register("runtime",statistics.runtime)
logs.newline()
for i=1,#statusinfo do
@@ -7812,15 +8526,6 @@ function statistics.tracefunction(base,tag,...)
statistics.register(formatters["%s.%s"](tag,name),function() return serialize(stat,"calls") end)
end
end
-commands=commands or {}
-function commands.resettimer(name)
- resettiming(name or "whatever")
- starttiming(name or "whatever")
-end
-function commands.elapsedtime(name)
- stoptiming(name or "whatever")
- context(elapsedtime(name or "whatever"))
-end
end -- of closure
@@ -7829,7 +8534,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["trac-pro"] = package.loaded["trac-pro"] or true
--- original size: 5773, stripped down to: 3453
+-- original size: 5829, stripped down to: 3501
if not modules then modules={} end modules ['trac-pro']={
version=1.001,
@@ -7846,14 +8551,16 @@ local namespaces=namespaces
local registered={}
local function report_index(k,name)
if trace_namespaces then
- report_system("reference to %a in protected namespace %a: %s",k,name,debug.traceback())
+ report_system("reference to %a in protected namespace %a: %s",k,name)
+ debugger.showtraceback(report_system)
else
report_system("reference to %a in protected namespace %a",k,name)
end
end
local function report_newindex(k,name)
if trace_namespaces then
- report_system("assignment to %a in protected namespace %a: %s",k,name,debug.traceback())
+ report_system("assignment to %a in protected namespace %a: %s",k,name)
+ debugger.showtraceback(report_system)
else
report_system("assignment to %a in protected namespace %a",k,name)
end
@@ -8104,7 +8811,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-deb"] = package.loaded["util-deb"] or true
--- original size: 3708, stripped down to: 2568
+-- original size: 3898, stripped down to: 2644
if not modules then modules={} end modules ['util-deb']={
version=1.001,
@@ -8184,20 +8891,22 @@ end
function debugger.disable()
debug.sethook()
end
-function traceback()
- local level=1
+local function showtraceback(rep)
+ local level=2
+ local reporter=rep or report
while true do
- local info=debug.getinfo(level,"Sl")
+ local info=getinfo(level,"Sl")
if not info then
break
elseif info.what=="C" then
- print(format("%3i : C function",level))
+ reporter("%2i : %s",level-1,"C function")
else
- print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ reporter("%2i : %s : %s",level-1,info.short_src,info.currentline)
end
level=level+1
end
end
+debugger.showtraceback=showtraceback
end -- of closure
@@ -8383,7 +9092,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-tpl"] = package.loaded["util-tpl"] or true
--- original size: 6251, stripped down to: 3488
+-- original size: 7100, stripped down to: 3978
if not modules then modules={} end modules ['util-tpl']={
version=1.001,
@@ -8425,7 +9134,7 @@ local sqlescape=lpeg.replacer {
{ "\r\n","\\n" },
{ "\r","\\n" },
}
-local sqlquoted=lpeg.Cs(lpeg.Cc("'")*sqlescape*lpeg.Cc("'"))
+local sqlquoted=Cs(Cc("'")*sqlescape*Cc("'"))
lpegpatterns.sqlescape=sqlescape
lpegpatterns.sqlquoted=sqlquoted
local luaescape=lpegpatterns.luaescape
@@ -8448,12 +9157,24 @@ local quotedescapers={
local luaescaper=escapers.lua
local quotedluaescaper=quotedescapers.lua
local function replacekeyunquoted(s,t,how,recurse)
- local escaper=how and escapers[how] or luaescaper
- return escaper(replacekey(s,t,how,recurse))
+ if how==false then
+ return replacekey(s,t,how,recurse)
+ else
+ local escaper=how and escapers[how] or luaescaper
+ return escaper(replacekey(s,t,how,recurse))
+ end
end
local function replacekeyquoted(s,t,how,recurse)
- local escaper=how and quotedescapers[how] or quotedluaescaper
- return escaper(replacekey(s,t,how,recurse))
+ if how==false then
+ return replacekey(s,t,how,recurse)
+ else
+ local escaper=how and quotedescapers[how] or quotedluaescaper
+ return escaper(replacekey(s,t,how,recurse))
+ end
+end
+local function replaceoptional(l,m,r,t,how,recurse)
+ local v=t[l]
+ return v and v~="" and lpegmatch(replacer,r,1,t,how or "lua",recurse or false) or ""
end
local single=P("%")
local double=P("%%")
@@ -8468,11 +9189,16 @@ local nolquoted=lquoted/''
local norquoted=rquoted/''
local nolquotedq=lquotedq/''
local norquotedq=rquotedq/''
-local key=nosingle*((C((1-nosingle )^1)*Carg(1)*Carg(2)*Carg(3))/replacekey )*nosingle
-local quoted=nolquotedq*((C((1-norquotedq)^1)*Carg(1)*Carg(2)*Carg(3))/replacekeyquoted )*norquotedq
-local unquoted=nolquoted*((C((1-norquoted )^1)*Carg(1)*Carg(2)*Carg(3))/replacekeyunquoted)*norquoted
+local noloptional=P("%?")/''
+local noroptional=P("?%")/''
+local nomoptional=P(":")/''
+local args=Carg(1)*Carg(2)*Carg(3)
+local key=nosingle*((C((1-nosingle )^1)*args)/replacekey )*nosingle
+local quoted=nolquotedq*((C((1-norquotedq )^1)*args)/replacekeyquoted )*norquotedq
+local unquoted=nolquoted*((C((1-norquoted )^1)*args)/replacekeyunquoted)*norquoted
+local optional=noloptional*((C((1-nomoptional)^1)*nomoptional*C((1-noroptional)^1)*args)/replaceoptional)*noroptional
local any=P(1)
- replacer=Cs((unquoted+quoted+escape+key+any)^0)
+ replacer=Cs((unquoted+quoted+escape+optional+key+any)^0)
local function replace(str,mapping,how,recurse)
if mapping and str then
return lpegmatch(replacer,str,1,mapping,how or "lua",recurse or false) or str
@@ -8511,7 +9237,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-env"] = package.loaded["util-env"] or true
--- original size: 8807, stripped down to: 5085
+-- original size: 8022, stripped down to: 5038
if not modules then modules={} end modules ['util-env']={
version=1.001,
@@ -8522,7 +9248,7 @@ if not modules then modules={} end modules ['util-env']={
}
local allocate,mark=utilities.storage.allocate,utilities.storage.mark
local format,sub,match,gsub,find=string.format,string.sub,string.match,string.gsub,string.find
-local unquoted,quoted=string.unquoted,string.quoted
+local unquoted,quoted,optionalquoted=string.unquoted,string.quoted,string.optionalquoted
local concat,insert,remove=table.concat,table.insert,table.remove
environment=environment or {}
local environment=environment
@@ -8635,24 +9361,14 @@ function environment.splitarguments(separator)
return before,after
end
function environment.reconstructcommandline(arg,noquote)
+ local resolveprefix=resolvers.resolve
arg=arg or environment.originalarguments
if noquote and #arg==1 then
- local a=arg[1]
- a=resolvers.resolve(a)
- a=unquoted(a)
- return a
+ return unquoted(resolveprefix and resolveprefix(arg[1]) or arg[1])
elseif #arg>0 then
local result={}
for i=1,#arg do
- local a=arg[i]
- a=resolvers.resolve(a)
- a=unquoted(a)
- a=gsub(a,'"','\\"')
- if find(a," ") then
- result[#result+1]=quoted(a)
- else
- result[#result+1]=a
- end
+ result[i]=optionalquoted(resolveprefix and resolveprefix(arg[i]) or resolveprefix)
end
return concat(result," ")
else
@@ -8708,7 +9424,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["luat-env"] = package.loaded["luat-env"] or true
--- original size: 5930, stripped down to: 4235
+-- original size: 6174, stripped down to: 4141
if not modules then modules={} end modules ['luat-env']={
version=1.001,
@@ -8786,15 +9502,13 @@ function environment.luafilechunk(filename,silent)
filename=file.replacesuffix(filename,"lua")
local fullname=environment.luafile(filename)
if fullname and fullname~="" then
- local data=luautilities.loadedluacode(fullname,strippable,filename)
- if trace_locating then
+ local data=luautilities.loadedluacode(fullname,strippable,filename)
+ if not silent then
report_lua("loading file %a %s",fullname,not data and "failed" or "succeeded")
- elseif not silent then
- texio.write("<",data and "+ " or "- ",fullname,">")
end
return data
else
- if trace_locating then
+ if not silent then
report_lua("unknown file %a",filename)
end
return nil
@@ -8863,7 +9577,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-tab"] = package.loaded["lxml-tab"] or true
--- original size: 42447, stripped down to: 26589
+-- original size: 45683, stripped down to: 27866
if not modules then modules={} end modules ['lxml-tab']={
version=1.001,
@@ -8878,10 +9592,10 @@ if lpeg.setmaxstack then lpeg.setmaxstack(1000) end
xml=xml or {}
local xml=xml
local concat,remove,insert=table.concat,table.remove,table.insert
-local type,next,setmetatable,getmetatable,tonumber=type,next,setmetatable,getmetatable,tonumber
+local type,next,setmetatable,getmetatable,tonumber,rawset=type,next,setmetatable,getmetatable,tonumber,rawset
local lower,find,match,gsub=string.lower,string.find,string.match,string.gsub
local utfchar=utf.char
-local lpegmatch=lpeg.match
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
local P,S,R,C,V,C,Cs=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.C,lpeg.Cs
local formatters=string.formatters
xml.xmlns=xml.xmlns or {}
@@ -8976,8 +9690,10 @@ local function add_end(spacing,namespace,tag)
top=stack[#stack]
if #stack<1 then
errorstr=formatters["unable to close %s %s"](tag,xml.checkerror(top,toclose) or "")
+ report_xml(errorstr)
elseif toclose.tg~=tag then
errorstr=formatters["unable to close %s with %s %s"](toclose.tg,tag,xml.checkerror(top,toclose) or "")
+ report_xml(errorstr)
end
dt=top.dt
dt[#dt+1]=toclose
@@ -8986,10 +9702,29 @@ local function add_end(spacing,namespace,tag)
end
end
local function add_text(text)
+ local n=#dt
if cleanup and #text>0 then
- dt[#dt+1]=cleanup(text)
+ if n>0 then
+ local s=dt[n]
+ if type(s)=="string" then
+ dt[n]=s..cleanup(text)
+ else
+ dt[n+1]=cleanup(text)
+ end
+ else
+ dt[1]=cleanup(text)
+ end
else
- dt[#dt+1]=text
+ if n>0 then
+ local s=dt[n]
+ if type(s)=="string" then
+ dt[n]=s..text
+ else
+ dt[n+1]=text
+ end
+ else
+ dt[1]=text
+ end
end
end
local function add_special(what,spacing,text)
@@ -9021,8 +9756,10 @@ local function attribute_specification_error(str)
end
return str
end
+local badentity="&error;"
+local badentity="&"
xml.placeholders={
- unknown_dec_entity=function(str) return str=="" and "&error;" or formatters["&%s;"](str) end,
+ unknown_dec_entity=function(str) return str=="" and badentity or formatters["&%s;"](str) end,
unknown_hex_entity=function(str) return formatters["&#x%s;"](str) end,
unknown_any_entity=function(str) return formatters["&#x%s;"](str) end,
}
@@ -9043,9 +9780,10 @@ local function fromdec(s)
return formatters["d:%s"](s),true
end
end
-local rest=(1-P(";"))^0
-local many=P(1)^0
-local parsedentity=P("&")*(P("#x")*(rest/fromhex)+P("#")*(rest/fromdec))*P(";")*P(-1)+(P("#x")*(many/fromhex)+P("#")*(many/fromdec))
+local p_rest=(1-P(";"))^0
+local p_many=P(1)^0
+local p_char=lpegpatterns.utf8character
+local parsedentity=P("&")*(P("#x")*(p_rest/fromhex)+P("#")*(p_rest/fromdec))*P(";")*P(-1)+(P("#x")*(p_many/fromhex)+P("#")*(p_many/fromdec))
local predefined_unified={
[38]="&amp;",
[42]="&quot;",
@@ -9071,7 +9809,9 @@ local privates_u={
local privates_p={}
local privates_n={
}
-local escaped=utf.remapper(privates_u)
+local escaped=utf.remapper(privates_u,"dynamic")
+local unprivatized=utf.remapper(privates_p,"dynamic")
+xml.unprivatized=unprivatized
local function unescaped(s)
local p=privates_n[s]
if not p then
@@ -9084,9 +9824,7 @@ local function unescaped(s)
end
return p
end
-local unprivatized=utf.remapper(privates_p)
xml.privatetoken=unescaped
-xml.unprivatized=unprivatized
xml.privatecodes=privates_n
local function handle_hex_entity(str)
local h=hcache[str]
@@ -9181,7 +9919,7 @@ local function handle_any_entity(str)
report_xml("keeping entity &%s;",str)
end
if str=="" then
- a="&error;"
+ a=badentity
else
a="&"..str..";"
end
@@ -9209,7 +9947,7 @@ local function handle_any_entity(str)
if trace_entities then
report_xml("invalid entity &%s;",str)
end
- a="&error;"
+ a=badentity
acache[str]=a
else
if trace_entities then
@@ -9222,8 +9960,14 @@ local function handle_any_entity(str)
return a
end
end
-local function handle_end_entity(chr)
- report_xml("error in entity, %a found instead of %a",chr,";")
+local function handle_end_entity(str)
+ report_xml("error in entity, %a found without ending %a",str,";")
+ return str
+end
+local function handle_crap_error(chr)
+ report_xml("error in parsing, unexpected %a found ",chr)
+ add_text(chr)
+ return chr
end
local space=S(' \r\n\t')
local open=P('<')
@@ -9239,15 +9983,15 @@ local valid=R('az','AZ','09')+S('_-.')
local name_yes=C(valid^1)*colon*C(valid^1)
local name_nop=C(P(true))*C(valid^1)
local name=name_yes+name_nop
-local utfbom=lpeg.patterns.utfbom
+local utfbom=lpegpatterns.utfbom
local spacing=C(space^0)
-local anyentitycontent=(1-open-semicolon-space-close)^0
+local anyentitycontent=(1-open-semicolon-space-close-ampersand)^0
local hexentitycontent=R("AF","af","09")^0
local decentitycontent=R("09")^0
local parsedentity=P("#")/""*(
P("x")/""*(hexentitycontent/handle_hex_entity)+(decentitycontent/handle_dec_entity)
)+(anyentitycontent/handle_any_entity)
-local entity=ampersand/""*parsedentity*((semicolon/"")+#(P(1)/handle_end_entity))
+local entity=(ampersand/"")*parsedentity*(semicolon/"")+ampersand*(anyentitycontent/handle_end_entity)
local text_unparsed=C((1-open)^1)
local text_parsed=Cs(((1-open-ampersand)^1+entity)^1)
local somespace=space^1
@@ -9298,16 +10042,20 @@ local instruction=(spacing*begininstruction*someinstruction*endinstruction)/func
local comment=(spacing*begincomment*somecomment*endcomment )/function(...) add_special("@cm@",...) end
local cdata=(spacing*begincdata*somecdata*endcdata )/function(...) add_special("@cd@",...) end
local doctype=(spacing*begindoctype*somedoctype*enddoctype )/function(...) add_special("@dt@",...) end
+local crap_parsed=1-beginelement-endelement-emptyelement-begininstruction-begincomment-begincdata-ampersand
+local crap_unparsed=1-beginelement-endelement-emptyelement-begininstruction-begincomment-begincdata
+local parsedcrap=Cs((crap_parsed^1+entity)^1)/handle_crap_error
+local unparsedcrap=Cs((crap_unparsed )^1)/handle_crap_error
local trailer=space^0*(text_unparsed/set_message)^0
local grammar_parsed_text=P { "preamble",
preamble=utfbom^0*instruction^0*(doctype+comment+instruction)^0*V("parent")*trailer,
parent=beginelement*V("children")^0*endelement,
- children=parsedtext+V("parent")+emptyelement+comment+cdata+instruction,
+ children=parsedtext+V("parent")+emptyelement+comment+cdata+instruction+parsedcrap,
}
local grammar_unparsed_text=P { "preamble",
preamble=utfbom^0*instruction^0*(doctype+comment+instruction)^0*V("parent")*trailer,
parent=beginelement*V("children")^0*endelement,
- children=unparsedtext+V("parent")+emptyelement+comment+cdata+instruction,
+ children=unparsedtext+V("parent")+emptyelement+comment+cdata+instruction+unparsedcrap,
}
local function _xmlconvert_(data,settings)
settings=settings or {}
@@ -9341,7 +10089,6 @@ local function _xmlconvert_(data,settings)
errorstr="empty xml file"
elseif utfize or resolve then
if lpegmatch(grammar_parsed_text,data) then
- errorstr=""
else
errorstr="invalid xml file - parsed text"
end
@@ -9357,6 +10104,8 @@ local function _xmlconvert_(data,settings)
local result
if errorstr and errorstr~="" then
result={ dt={ { ns="",tg="error",dt={ errorstr },at={},er=true } } }
+setmetatable(result,mt)
+setmetatable(result.dt[1],mt)
setmetatable(stack,mt)
local errorhandler=settings.error_handler
if errorhandler==false then
@@ -9389,8 +10138,11 @@ local function _xmlconvert_(data,settings)
end
if errorstr and errorstr~="" then
result.error=true
+ else
+ errorstr=nil
end
result.statistics={
+ errormessage=errorstr,
entities={
decimals=dcache,
hexadecimals=hcache,
@@ -9404,7 +10156,7 @@ local function _xmlconvert_(data,settings)
reported_attribute_errors,mt,errorhandler=nil,nil,nil
return result
end
-function xmlconvert(data,settings)
+local function xmlconvert(data,settings)
local ok,result=pcall(function() return _xmlconvert_(data,settings) end)
if ok then
return result
@@ -9496,14 +10248,17 @@ function xml.checkbom(root)
insert(dt,2,"\n" )
end
end
-local function verbose_element(e,handlers)
+local f_attribute=formatters['%s=%q']
+local function verbose_element(e,handlers,escape)
local handle=handlers.handle
local serialize=handlers.serialize
local ens,etg,eat,edt,ern=e.ns,e.tg,e.at,e.dt,e.rn
local ats=eat and next(eat) and {}
if ats then
+ local n=0
for k,v in next,eat do
- ats[#ats+1]=formatters['%s=%q'](k,escaped(v))
+ n=n+1
+ ats[n]=f_attribute(k,escaped(v))
end
end
if ern and trace_entities and ern~=ens then
@@ -9588,23 +10343,25 @@ local function verbose_document(e,handlers)
end
end
local function serialize(e,handlers,...)
- local initialize=handlers.initialize
- local finalize=handlers.finalize
- local functions=handlers.functions
- if initialize then
- local state=initialize(...)
- if not state==true then
- return state
+ if e then
+ local initialize=handlers.initialize
+ local finalize=handlers.finalize
+ local functions=handlers.functions
+ if initialize then
+ local state=initialize(...)
+ if not state==true then
+ return state
+ end
+ end
+ local etg=e.tg
+ if etg then
+ (functions[etg] or functions["@el@"])(e,handlers)
+ else
+ functions["@dc@"](e,handlers)
+ end
+ if finalize then
+ return finalize()
end
- end
- local etg=e.tg
- if etg then
- (functions[etg] or functions["@el@"])(e,handlers)
- else
- functions["@dc@"](e,handlers)
- end
- if finalize then
- return finalize()
end
end
local function xserialize(e,handlers)
@@ -9845,7 +10602,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-lpt"] = package.loaded["lxml-lpt"] or true
--- original size: 48956, stripped down to: 30516
+-- original size: 48229, stripped down to: 30684
if not modules then modules={} end modules ['lxml-lpt']={
version=1.001,
@@ -10230,8 +10987,8 @@ local lp_builtin=P (
P("ns")/"ll.ns"
)*((spaces*P("(")*spaces*P(")"))/"")
local lp_attribute=(P("@")+P("attribute::"))/""*Cc("(ll.at and ll.at['")*((R("az","AZ")+S("-_:"))^1)*Cc("'])")
-lp_fastpos_p=P("+")^0*R("09")^1*P(-1)/"l==%0"
-lp_fastpos_n=P("-")*R("09")^1*P(-1)/"(%0<0 and (#list+%0==l))"
+local lp_fastpos_p=P("+")^0*R("09")^1*P(-1)/"l==%0"
+local lp_fastpos_n=P("-")*R("09")^1*P(-1)/"(%0<0 and (#list+%0==l))"
local lp_fastpos=lp_fastpos_n+lp_fastpos_p
local lp_reserved=C("and")+C("or")+C("not")+C("div")+C("mod")+C("true")+C("false")
local lp_lua_function=Cs((R("az","AZ","__")^1*(P(".")*R("az","AZ","__")^1)^1)*("("))/"%0"
@@ -10410,7 +11167,7 @@ local function nodesettostring(set,nodetest)
if not ns or ns=="" then ns="*" end
if not tg or tg=="" then tg="*" end
tg=(tg=="@rt@" and "[root]") or format("%s:%s",ns,tg)
- t[i]=(directive and tg) or format("not(%s)",tg)
+ t[#t+1]=(directive and tg) or format("not(%s)",tg)
end
if nodetest==false then
return format("not(%s)",concat(t,"|"))
@@ -10676,7 +11433,6 @@ expressions.print=function(...)
print(...)
return true
end
-expressions.contains=find
expressions.find=find
expressions.upper=upper
expressions.lower=lower
@@ -10698,6 +11454,9 @@ function expressions.contains(str,pattern)
end
return false
end
+function xml.expressions.idstring(str)
+ return type(str)=="string" and gsub(str,"^#","") or ""
+end
local function traverse(root,pattern,handle)
local collected=applylpath(root,pattern)
if collected then
@@ -10826,8 +11585,13 @@ function xml.elements(root,pattern,reverse)
local collected=applylpath(root,pattern)
if not collected then
return dummy
- elseif reverse then
- local c=#collected+1
+ end
+ local n=#collected
+ if n==0 then
+ return dummy
+ end
+ if reverse then
+ local c=n+1
return function()
if c>1 then
c=c-1
@@ -10837,7 +11601,7 @@ function xml.elements(root,pattern,reverse)
end
end
else
- local n,c=#collected,0
+ local c=0
return function()
if c<n then
c=c+1
@@ -10852,8 +11616,13 @@ function xml.collected(root,pattern,reverse)
local collected=applylpath(root,pattern)
if not collected then
return dummy
- elseif reverse then
- local c=#collected+1
+ end
+ local n=#collected
+ if n==0 then
+ return dummy
+ end
+ if reverse then
+ local c=n+1
return function()
if c>1 then
c=c-1
@@ -10861,7 +11630,7 @@ function xml.collected(root,pattern,reverse)
end
end
else
- local n,c=#collected,0
+ local c=0
return function()
if c<n then
c=c+1
@@ -10876,7 +11645,7 @@ function xml.inspect(collection,pattern)
report_lpath("pattern: %s\n\n%s\n",pattern,xml.tostring(e))
end
end
-local function split(e)
+local function split(e)
local dt=e.dt
if dt then
for i=1,#dt do
@@ -10975,7 +11744,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-aux"] = package.loaded["lxml-aux"] or true
--- original size: 23804, stripped down to: 16817
+-- original size: 28786, stripped down to: 20578
if not modules then modules={} end modules ['lxml-aux']={
version=1.001,
@@ -10985,16 +11754,19 @@ if not modules then modules={} end modules ['lxml-aux']={
license="see context related readme files"
}
local trace_manipulations=false trackers.register("lxml.manipulations",function(v) trace_manipulations=v end)
+local trace_inclusions=false trackers.register("lxml.inclusions",function(v) trace_inclusions=v end)
local report_xml=logs.reporter("xml")
local xml=xml
-local xmlconvert,xmlcopy,xmlname=xml.convert,xml.copy,xml.name
+local xmlcopy,xmlname=xml.copy,xml.name
local xmlinheritedconvert=xml.inheritedconvert
local xmlapplylpath=xml.applylpath
local xmlfilter=xml.filter
-local type,setmetatable,getmetatable=type,setmetatable,getmetatable
+local type,next,setmetatable,getmetatable=type,next,setmetatable,getmetatable
local insert,remove,fastcopy,concat=table.insert,table.remove,table.fastcopy,table.concat
local gmatch,gsub,format,find,strip=string.gmatch,string.gsub,string.format,string.find,string.strip
local utfbyte=utf.byte
+local lpegmatch=lpeg.match
+local striplinepatterns=utilities.strings.striplinepatterns
local function report(what,pattern,c,e)
report_xml("%s element %a, root %a, position %a, index %a, pattern %a",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
end
@@ -11049,13 +11821,15 @@ end
function xml.each(root,pattern,handle,reverse)
local collected=xmlapplylpath(root,pattern)
if collected then
- if reverse then
- for c=#collected,1,-1 do
- handle(collected[c])
- end
- else
- for c=1,#collected do
- handle(collected[c])
+ if handle then
+ if reverse then
+ for c=#collected,1,-1 do
+ handle(collected[c])
+ end
+ else
+ for c=1,#collected do
+ handle(collected[c])
+ end
end
end
return collected
@@ -11111,6 +11885,7 @@ local function redo_ni(d)
end
end
end
+xml.reindex=redo_ni
local function xmltoelement(whatever,root)
if not whatever then
return nil
@@ -11162,8 +11937,16 @@ function xml.delete(root,pattern)
report('deleting',pattern,c,e)
end
local d=p.dt
- remove(d,e.ni)
- redo_ni(d)
+ local ni=e.ni
+ if ni<=#d then
+ if false then
+ p.dt[ni]=""
+ else
+ remove(d,ni)
+ redo_ni(d)
+ end
+ else
+ end
end
end
end
@@ -11283,28 +12066,40 @@ xml.insertafter=insert_element
xml.insertbefore=function(r,p,e) insert_element(r,p,e,true) end
xml.injectafter=inject_element
xml.injectbefore=function(r,p,e) inject_element(r,p,e,true) end
-local function include(xmldata,pattern,attribute,recursive,loaddata)
+local function include(xmldata,pattern,attribute,recursive,loaddata,level)
pattern=pattern or 'include'
loaddata=loaddata or io.loaddata
local collected=xmlapplylpath(xmldata,pattern)
if collected then
+ if not level then
+ level=1
+ end
for c=1,#collected do
local ek=collected[c]
local name=nil
local ekdt=ek.dt
local ekat=ek.at
- local epdt=ek.__p__.dt
+ local ekrt=ek.__p__
+ local epdt=ekrt.dt
if not attribute or attribute=="" then
name=(type(ekdt)=="table" and ekdt[1]) or ekdt
end
if not name then
for a in gmatch(attribute or "href","([^|]+)") do
name=ekat[a]
- if name then break end
+ if name then
+ break
+ end
+ end
+ end
+ local data=nil
+ if name and name~="" then
+ data=loaddata(name) or ""
+ if trace_inclusions then
+ report_xml("including %s bytes from %a at level %s by pattern %a and attribute %a (%srecursing)",#data,name,level,pattern,attribute or "",recursive and "" or "not ")
end
end
- local data=(name and name~="" and loaddata(name)) or ""
- if data=="" then
+ if not data or data=="" then
epdt[ek.ni]=""
elseif ekat["parse"]=="text" then
epdt[ek.ni]=xml.escaped(data)
@@ -11314,70 +12109,127 @@ local function include(xmldata,pattern,attribute,recursive,loaddata)
epdt[ek.ni]=""
else
if recursive then
- include(xi,pattern,attribute,recursive,loaddata)
+ include(xi,pattern,attribute,recursive,loaddata,level+1)
+ end
+ local child=xml.body(xi)
+ child.__p__=ekrt
+ child.__f__=name
+ epdt[ek.ni]=child
+ local inclusions=xmldata.settings.inclusions
+ if inclusions then
+ inclusions[#inclusions+1]=name
+ else
+ xmldata.settings.inclusions={ name }
+ end
+ if child.er then
+ local badinclusions=xmldata.settings.badinclusions
+ if badinclusions then
+ badinclusions[#badinclusions+1]=name
+ else
+ xmldata.settings.badinclusions={ name }
+ end
end
- epdt[ek.ni]=xml.body(xi)
end
end
end
end
end
xml.include=include
+function xml.inclusion(e,default)
+ while e do
+ local f=e.__f__
+ if f then
+ return f
+ else
+ e=e.__p__
+ end
+ end
+ return default
+end
+local function getinclusions(key,e,sorted)
+ while e do
+ local settings=e.settings
+ if settings then
+ local inclusions=settings[key]
+ if inclusions then
+ inclusions=table.unique(inclusions)
+ if sorted then
+ table.sort(inclusions)
+ end
+ return inclusions
+ else
+ e=e.__p__
+ end
+ else
+ e=e.__p__
+ end
+ end
+end
+function xml.inclusions(e,sorted)
+ return getinclusions("inclusions",e,sorted)
+end
+function xml.badinclusions(e,sorted)
+ return getinclusions("badinclusions",e,sorted)
+end
+local b_collapser=lpeg.patterns.b_collapser
+local m_collapser=lpeg.patterns.m_collapser
+local e_collapser=lpeg.patterns.e_collapser
+local b_stripper=lpeg.patterns.b_stripper
+local m_stripper=lpeg.patterns.m_stripper
+local e_stripper=lpeg.patterns.e_stripper
+local lpegmatch=lpeg.match
local function stripelement(e,nolines,anywhere)
local edt=e.dt
if edt then
- if anywhere then
- local t,n={},0
- for e=1,#edt do
+ local n=#edt
+ if n==0 then
+ return e
+ elseif anywhere then
+ local t={}
+ local m=0
+ for e=1,n do
local str=edt[e]
if type(str)~="string" then
- n=n+1
- t[n]=str
+ m=m+1
+ t[m]=str
elseif str~="" then
if nolines then
- str=gsub(str,"%s+"," ")
+ str=lpegmatch((n==1 and b_collapser) or (n==m and e_collapser) or m_collapser,str)
+ else
+ str=lpegmatch((n==1 and b_stripper) or (n==m and e_stripper) or m_stripper,str)
end
- str=gsub(str,"^%s*(.-)%s*$","%1")
if str~="" then
- n=n+1
- t[n]=str
+ m=m+1
+ t[m]=str
end
end
end
e.dt=t
else
- if #edt>0 then
- local str=edt[1]
- if type(str)~="string" then
- elseif str=="" then
+ local str=edt[1]
+ if type(str)=="string" then
+ if str~="" then
+ str=lpegmatch(nolines and b_collapser or b_stripper,str)
+ end
+ if str=="" then
remove(edt,1)
+ n=n-1
else
- if nolines then
- str=gsub(str,"%s+"," ")
- end
- str=gsub(str,"^%s+","")
- if str=="" then
- remove(edt,1)
- else
- edt[1]=str
- end
+ edt[1]=str
end
end
- local nedt=#edt
- if nedt>0 then
- local str=edt[nedt]
- if type(str)~="string" then
- elseif str=="" then
- remove(edt)
- else
- if nolines then
- str=gsub(str,"%s+"," ")
- end
- str=gsub(str,"%s+$","")
+ if n>0 then
+ str=edt[n]
+ if type(str)=="string" then
if str=="" then
remove(edt)
else
- edt[nedt]=str
+ str=lpegmatch(nolines and e_collapser or e_stripper,str)
+ if str=="" then
+ remove(edt)
+ else
+ edt[n]=str
+ end
end
end
end
@@ -11563,8 +12415,8 @@ function xml.finalizers.xml.cdata(collected)
end
return ""
end
-function xml.insertcomment(e,str,n)
- table.insert(e.dt,n or 1,{
+function xml.insertcomment(e,str,n)
+ insert(e.dt,n or 1,{
tg="@cm@",
ns="",
special=true,
@@ -11572,7 +12424,25 @@ function xml.insertcomment(e,str,n)
dt={ str },
})
end
-function xml.setcdata(e,str)
+function xml.insertcdata(e,str,n)
+ insert(e.dt,n or 1,{
+ tg="@cd@",
+ ns="",
+ special=true,
+ at={},
+ dt={ str },
+ })
+end
+function xml.setcomment(e,str,n)
+ e.dt={ {
+ tg="@cm@",
+ ns="",
+ special=true,
+ at={},
+ dt={ str },
+ } }
+end
+function xml.setcdata(e,str)
e.dt={ {
tg="@cd@",
ns="",
@@ -11642,7 +12512,7 @@ local function recurse(e,action)
for i=1,#edt do
local str=edt[i]
if type(str)~="string" then
- recurse(str,action,recursive)
+ recurse(str,action)
elseif str~="" then
edt[i]=action(str)
end
@@ -11660,6 +12530,65 @@ function helpers.recursetext(collected,action,recursive)
end
end
end
+local specials={
+ ["@rt@"]="root",
+ ["@pi@"]="instruction",
+ ["@cm@"]="comment",
+ ["@dt@"]="declaration",
+ ["@cd@"]="cdata",
+}
+local function convert(x,strip,flat)
+ local ns=x.ns
+ local tg=x.tg
+ local at=x.at
+ local dt=x.dt
+ local node=flat and {
+ [0]=(not x.special and (ns~="" and ns..":"..tg or tg)) or nil,
+ } or {
+ _namespace=ns~="" and ns or nil,
+ _tag=not x.special and tg or nil,
+ _type=specials[tg] or "_element",
+ }
+ if at then
+ for k,v in next,at do
+ node[k]=v
+ end
+ end
+ local n=0
+ for i=1,#dt do
+ local di=dt[i]
+ if type(di)=="table" then
+ if flat and di.special then
+ else
+ di=convert(di,strip,flat)
+ if di then
+ n=n+1
+ node[n]=di
+ end
+ end
+ elseif strip then
+ di=lpegmatch(strip,di)
+ if di~="" then
+ n=n+1
+ node[n]=di
+ end
+ else
+ n=n+1
+ node[n]=di
+ end
+ end
+ if next(node) then
+ return node
+ end
+end
+function xml.totable(x,strip,flat)
+ if type(x)=="table" then
+ if strip then
+ strip=striplinepatterns[strip]
+ end
+ return convert(x,strip,flat)
+ end
+end
end -- of closure
@@ -12216,7 +13145,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-ini"] = package.loaded["data-ini"] or true
--- original size: 7898, stripped down to: 5501
+-- original size: 11085, stripped down to: 7662
if not modules then modules={} end modules ['data-ini']={
version=1.001,
@@ -12225,14 +13154,15 @@ if not modules then modules={} end modules ['data-ini']={
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files",
}
+local next,type,getmetatable,rawset=next,type,getmetatable,rawset
local gsub,find,gmatch,char=string.gsub,string.find,string.gmatch,string.char
-local next,type=next,type
local filedirname,filebasename,filejoin=file.dirname,file.basename,file.join
+local ostype,osname,osuname,ossetenv,osgetenv=os.type,os.name,os.uname,os.setenv,os.getenv
+local P,S,R,C,Cs,Cc,lpegmatch=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.match
local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
local trace_detail=false trackers.register("resolvers.details",function(v) trace_detail=v end)
local trace_expansions=false trackers.register("resolvers.expansions",function(v) trace_expansions=v end)
local report_initialization=logs.reporter("resolvers","initialization")
-local ostype,osname,ossetenv,osgetenv=os.type,os.name,os.setenv,os.getenv
resolvers=resolvers or {}
local resolvers=resolvers
texconfig.kpse_init=false
@@ -12360,15 +13290,108 @@ if not texroot or texroot=="" then
ossetenv('TEXROOT',texroot)
end
environment.texroot=file.collapsepath(texroot)
-if profiler then
+if type(profiler)=="table" and not jit then
directives.register("system.profile",function()
profiler.start("luatex-profile.log")
end)
end
-if not resolvers.resolve then
- function resolvers.resolve (s) return s end
- function resolvers.unresolve(s) return s end
- function resolvers.repath (s) return s end
+local prefixes=utilities.storage.allocate()
+resolvers.prefixes=prefixes
+local resolved={}
+local abstract={}
+local dynamic={}
+function resolvers.resetresolve(str)
+ resolved,abstract={},{}
+end
+function resolvers.allprefixes(separator)
+ local all=table.sortedkeys(prefixes)
+ if separator then
+ for i=1,#all do
+ all[i]=all[i]..":"
+ end
+ end
+ return all
+end
+local function _resolve_(method,target)
+ local action=prefixes[method]
+ if action then
+ return action(target)
+ else
+ return method..":"..target
+ end
+end
+function resolvers.unresolve(str)
+ return abstract[str] or str
+end
+function resolvers.setdynamic(str)
+ dynamic[str]=true
+end
+local pattern=Cs((C(R("az")^2)*P(":")*C((1-S(" \"\';,"))^1)/_resolve_+P(1))^0)
+local prefix=C(R("az")^2)*P(":")
+local target=C((1-S(" \"\';,"))^1)
+local notarget=(#S(";,")+P(-1))*Cc("")
+local p_resolve=Cs(((prefix*(target+notarget))/_resolve_+P(1))^0)
+local p_simple=prefix*P(-1)
+local function resolve(str)
+ if type(str)=="table" then
+ local res={}
+ for i=1,#str do
+ res[i]=resolve(str[i])
+ end
+ return res
+ end
+ local res=resolved[str]
+ if res then
+ return res
+ end
+ local simple=lpegmatch(p_simple,str)
+ local action=prefixes[simple]
+ if action then
+ local res=action(res)
+ if not dynamic[simple] then
+ resolved[simple]=res
+ abstract[res]=simple
+ end
+ return res
+ end
+ res=lpegmatch(p_resolve,str)
+ resolved[str]=res
+ abstract[res]=str
+ return res
+end
+resolvers.resolve=resolve
+if type(osuname)=="function" then
+ for k,v in next,osuname() do
+ if not prefixes[k] then
+ prefixes[k]=function() return v end
+ end
+ end
+end
+if ostype=="unix" then
+ local pattern
+ local function makepattern(t,k,v)
+ if t then
+ rawset(t,k,v)
+ end
+ local colon=P(":")
+ for k,v in table.sortedpairs(prefixes) do
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ pattern=Cs((p*colon+colon/";"+P(1))^0)
+ end
+ makepattern()
+ table.setmetatablenewindex(prefixes,makepattern)
+ function resolvers.repath(str)
+ return lpegmatch(pattern,str)
+ end
+else
+ function resolvers.repath(str)
+ return str
+ end
end
@@ -12378,7 +13401,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-exp"] = package.loaded["data-exp"] or true
--- original size: 15303, stripped down to: 9716
+-- original size: 17216, stripped down to: 10657
if not modules then modules={} end modules ['data-exp']={
version=1.001,
@@ -12392,12 +13415,16 @@ local concat,sort=table.concat,table.sort
local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
local Ct,Cs,Cc,Carg,P,C,S=lpeg.Ct,lpeg.Cs,lpeg.Cc,lpeg.Carg,lpeg.P,lpeg.C,lpeg.S
local type,next=type,next
+local isdir=lfs.isdir
local ostype=os.type
-local collapsepath=file.collapsepath
+local collapsepath,joinpath,basename=file.collapsepath,file.join,file.basename
local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
local trace_expansions=false trackers.register("resolvers.expansions",function(v) trace_expansions=v end)
+local trace_globbing=true trackers.register("resolvers.globbing",function(v) trace_globbing=v end)
local report_expansions=logs.reporter("resolvers","expansions")
+local report_globbing=logs.reporter("resolvers","globbing")
local resolvers=resolvers
+local resolveprefix=resolvers.resolve
local function f_both(a,b)
local t,n={},0
for sb in gmatch(b,"[^,]+") do
@@ -12487,35 +13514,27 @@ function resolvers.expandedpathfromlist(pathlist)
end
return newlist
end
-local cleanup=lpeg.replacer {
- { "!","" },
- { "\\","/" },
-}
-function resolvers.cleanpath(str)
- local doslashes=(P("\\")/"/"+1)^0
- local donegation=(P("!")/"" )^0
- local homedir=lpegmatch(Cs(donegation*doslashes),environment.homedir or "")
- if homedir=="~" or homedir=="" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if not str or find(str,"~") then
- return ""
- else
- return lpegmatch(cleanup,str)
+local usedhomedir=nil
+local donegation=(P("!")/"" )^0
+local doslashes=(P("\\")/"/"+1)^0
+local function expandedhome()
+ if not usedhomedir then
+ usedhomedir=lpegmatch(Cs(donegation*doslashes),environment.homedir or "")
+ if usedhomedir=="~" or usedhomedir=="" or not isdir(usedhomedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent path using current path")
end
- end
- else
- local dohome=((P("~")+P("$HOME"))/homedir)^0
- local cleanup=Cs(donegation*dohome*doslashes)
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str) or ""
+ usedhomedir="."
end
end
- return resolvers.cleanpath(str)
+ return usedhomedir
end
-local expandhome=P("~")/"$HOME"
+local dohome=((P("~")+P("$HOME")+P("%HOME%"))/expandedhome)^0
+local cleanup=Cs(donegation*dohome*doslashes)
+resolvers.cleanpath=function(str)
+ return str and lpegmatch(cleanup,str) or ""
+end
+local expandhome=P("~")/"$HOME"
local dodouble=P('"')/""*(expandhome+(1-P('"')))^0*P('"')/""
local dosingle=P("'")/""*(expandhome+(1-P("'")))^0*P("'")/""
local dostring=(expandhome+1 )^0
@@ -12567,46 +13586,67 @@ function resolvers.splitpath(str)
end
function resolvers.joinpath(str)
if type(str)=='table' then
- return file.joinpath(str)
+ return joinpath(str)
else
return str
end
end
local attributes,directory=lfs.attributes,lfs.dir
local weird=P(".")^1+lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local lessweird=P(".")^1+lpeg.anywhere(S("~`#$%^&*:;\"\'||<>,?\n\r\t"))
local timer={}
local scanned={}
local nofscans=0
local scancache={}
-local function scan(files,spec,path,n,m,r)
- local full=(path=="" and spec) or (spec..path..'/')
+local fullcache={}
+local nofsharedscans=0
+local function scan(files,remap,spec,path,n,m,r,onlyone,tolerant)
+ local full=path=="" and spec or (spec..path..'/')
local dirs={}
local nofdirs=0
+ local pattern=tolerant and lessweird or weird
for name in directory(full) do
- if not lpegmatch(weird,name) then
- local mode=attributes(full..name,'mode')
- if mode=='file' then
+ if not lpegmatch(pattern,name) then
+ local mode=attributes(full..name,"mode")
+ if mode=="file" then
n=n+1
- local f=files[name]
- if f then
- if type(f)=='string' then
- files[name]={ f,path }
+ local lower=lower(name)
+ local paths=files[lower]
+ if paths then
+ if onlyone then
else
- f[#f+1]=path
+ if type(paths)=="string" then
+ files[lower]={ paths,path }
+ else
+ paths[#paths+1]=path
+ end
+ if name~=lower then
+ local rl=remap[lower]
+ if not rl then
+ remap[lower]=name
+ r=r+1
+ elseif trace_globbing and rl~=name then
+ report_globbing("confusing filename, name: %a, lower: %a, already: %a",name,lower,rl)
+ end
+ end
end
else
- files[name]=path
- local lower=lower(name)
+ files[lower]=path
if name~=lower then
- files["remap:"..lower]=name
- r=r+1
+ local rl=remap[lower]
+ if not rl then
+ remap[lower]=name
+ r=r+1
+ elseif trace_globbing and rl~=name then
+ report_globbing("confusing filename, name: %a, lower: %a, already: %a",name,lower,rl)
+ end
end
end
- elseif mode=='directory' then
+ elseif mode=="directory" then
m=m+1
nofdirs=nofdirs+1
if path~="" then
- dirs[nofdirs]=path..'/'..name
+ dirs[nofdirs]=path.."/"..name
else
dirs[nofdirs]=name
end
@@ -12616,107 +13656,69 @@ local function scan(files,spec,path,n,m,r)
if nofdirs>0 then
sort(dirs)
for i=1,nofdirs do
- files,n,m,r=scan(files,spec,dirs[i],n,m,r)
+ files,remap,n,m,r=scan(files,remap,spec,dirs[i],n,m,r,onlyonce,tolerant)
end
end
scancache[sub(full,1,-2)]=files
- return files,n,m,r
+ return files,remap,n,m,r
end
-local fullcache={}
-function resolvers.scanfiles(path,branch,usecache)
- statistics.starttiming(timer)
- local realpath=resolvers.resolve(path)
+function resolvers.scanfiles(path,branch,usecache,onlyonce,tolerant)
+ local realpath=resolveprefix(path)
if usecache then
- local files=fullcache[realpath]
- if files then
+ local content=fullcache[realpath]
+ if content then
if trace_locating then
- report_expansions("using caches scan of path %a, branch %a",path,branch or path)
+ report_expansions("using cached scan of path %a, branch %a",path,branch or path)
end
- return files
+ nofsharedscans=nofsharedscans+1
+ return content
end
end
+ statistics.starttiming(timer)
if trace_locating then
report_expansions("scanning path %a, branch %a",path,branch or path)
end
- local files,n,m,r=scan({},realpath..'/',"",0,0,0)
- files.__path__=path
- files.__files__=n
- files.__directories__=m
- files.__remappings__=r
- if trace_locating then
- report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
- end
- if usecache then
- scanned[#scanned+1]=realpath
- fullcache[realpath]=files
- end
- nofscans=nofscans+1
- statistics.stoptiming(timer)
- return files
-end
-local function simplescan(files,spec,path)
- local full=(path=="" and spec) or (spec..path..'/')
- local dirs={}
- local nofdirs=0
- for name in directory(full) do
- if not lpegmatch(weird,name) then
- local mode=attributes(full..name,'mode')
- if mode=='file' then
- if not files[name] then
- files[name]=path
- end
- elseif mode=='directory' then
- nofdirs=nofdirs+1
- if path~="" then
- dirs[nofdirs]=path..'/'..name
- else
- dirs[nofdirs]=name
- end
- end
- end
- end
- if nofdirs>0 then
- sort(dirs)
- for i=1,nofdirs do
- files=simplescan(files,spec,dirs[i])
- end
- end
- return files
-end
-local simplecache={}
-local nofsharedscans=0
-function resolvers.simplescanfiles(path,branch,usecache)
- statistics.starttiming(timer)
- local realpath=resolvers.resolve(path)
- if usecache then
- local files=simplecache[realpath]
- if not files then
- files=scancache[realpath]
- if files then
- nofsharedscans=nofsharedscans+1
- end
+ local content
+ if isdir(realpath) then
+ local files,remap,n,m,r=scan({},{},realpath..'/',"",0,0,0,onlyonce,tolerant)
+ content={
+ metadata={
+ path=path,
+ files=n,
+ directories=m,
+ remappings=r,
+ },
+ files=files,
+ remap=remap,
+ }
+ if trace_locating then
+ report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
- if files then
- if trace_locating then
- report_expansions("using caches scan of path %a, branch %a",path,branch or path)
- end
- return files
+ else
+ content={
+ metadata={
+ path=path,
+ files=0,
+ directories=0,
+ remappings=0,
+ },
+ files={},
+ remap={},
+ }
+ if trace_locating then
+ report_expansions("invalid path %a",realpath)
end
end
- if trace_locating then
- report_expansions("scanning path %a, branch %a",path,branch or path)
- end
- local files=simplescan({},realpath..'/',"")
- if trace_locating then
- report_expansions("%s files found",table.count(files))
- end
if usecache then
scanned[#scanned+1]=realpath
- simplecache[realpath]=files
+ fullcache[realpath]=content
end
nofscans=nofscans+1
statistics.stoptiming(timer)
- return files
+ return content
+end
+function resolvers.simplescanfiles(path,branch,usecache)
+ return resolvers.scanfiles(path,branch,usecache,true,true)
end
function resolvers.scandata()
table.sort(scanned)
@@ -12727,6 +13729,52 @@ function resolvers.scandata()
paths=scanned,
}
end
+function resolvers.get_from_content(content,path,name)
+ if not content then
+ return
+ end
+ local files=content.files
+ if not files then
+ return
+ end
+ local remap=content.remap
+ if not remap then
+ return
+ end
+ if name then
+ local used=lower(name)
+ return path,remap[used] or used
+ else
+ local name=path
+ local used=lower(name)
+ local path=files[used]
+ if path then
+ return path,remap[used] or used
+ end
+ end
+end
+local nothing=function() end
+function resolvers.filtered_from_content(content,pattern)
+ if content and type(pattern)=="string" then
+ local pattern=lower(pattern)
+ local files=content.files
+ local remap=content.remap
+ if files and remap then
+ local n=next(files)
+ local function iterator()
+ while n do
+ local k=n
+ n=next(files,k)
+ if find(k,pattern) then
+ return files[k],remap and remap[k] or k
+ end
+ end
+ end
+ return iterator
+ end
+ end
+ return nothing
+end
end -- of closure
@@ -12735,7 +13783,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-env"] = package.loaded["data-env"] or true
--- original size: 8769, stripped down to: 6490
+-- original size: 9216, stripped down to: 6798
if not modules then modules={} end modules ['data-env']={
version=1.001,
@@ -12753,10 +13801,12 @@ local formats=allocate()
local suffixes=allocate()
local dangerous=allocate()
local suffixmap=allocate()
+local usertypes=allocate()
resolvers.formats=formats
resolvers.suffixes=suffixes
resolvers.dangerous=dangerous
resolvers.suffixmap=suffixmap
+resolvers.usertypes=usertypes
local luasuffixes=utilities.lua.suffixes
local relations=allocate {
core={
@@ -12824,11 +13874,13 @@ local relations=allocate {
names={ "mp" },
variable='MPINPUTS',
suffixes={ 'mp','mpvi','mpiv','mpii' },
+ usertype=true,
},
tex={
names={ "tex" },
variable='TEXINPUTS',
- suffixes={ 'tex',"mkvi","mkiv","mkii" },
+ suffixes={ "tex","mkvi","mkiv","mkii","cld","lfg","xml" },
+ usertype=true,
},
icc={
names={ "icc","icc profile","icc profiles" },
@@ -12844,6 +13896,7 @@ local relations=allocate {
names={ "lua" },
variable='LUAINPUTS',
suffixes={ luasuffixes.lua,luasuffixes.luc,luasuffixes.tma,luasuffixes.tmc },
+ usertype=true,
},
lib={
names={ "lib" },
@@ -12852,11 +13905,15 @@ local relations=allocate {
},
bib={
names={ 'bib' },
+ variable='BIBINPUTS',
suffixes={ 'bib' },
+ usertype=true,
},
bst={
names={ 'bst' },
+ variable='BSTINPUTS',
suffixes={ 'bst' },
+ usertype=true,
},
fontconfig={
names={ 'fontconfig','fontconfig file','fontconfig files' },
@@ -12938,8 +13995,9 @@ function resolvers.updaterelations()
for name,relation in next,categories do
local rn=relation.names
local rv=relation.variable
- local rs=relation.suffixes
if rn and rv then
+ local rs=relation.suffixes
+ local ru=relation.usertype
for i=1,#rn do
local rni=lower(gsub(rn[i]," ",""))
formats[rni]=rv
@@ -12951,8 +14009,9 @@ function resolvers.updaterelations()
end
end
end
- end
- if rs then
+ if ru then
+ usertypes[name]=true
+ end
end
end
end
@@ -13003,7 +14062,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-tmp"] = package.loaded["data-tmp"] or true
--- original size: 15532, stripped down to: 11648
+-- original size: 15618, stripped down to: 11629
if not modules then modules={} end modules ['data-tmp']={
version=1.100,
@@ -13013,7 +14072,7 @@ if not modules then modules={} end modules ['data-tmp']={
license="see context related readme files"
}
local format,lower,gsub,concat=string.format,string.lower,string.gsub,table.concat
-local concat,serialize,serializetofile=table.concat,table.serialize,table.tofile
+local concat=table.concat
local mkdirs,isdir,isfile=dir.mkdirs,lfs.isdir,lfs.isfile
local addsuffix,is_writable,is_readable=file.addsuffix,file.is_writable,file.is_readable
local formatters=string.formatters
@@ -13022,6 +14081,7 @@ local trace_cache=false trackers.register("resolvers.cache",function(v) trace_ca
local report_caches=logs.reporter("resolvers","caches")
local report_resolvers=logs.reporter("resolvers","caching")
local resolvers=resolvers
+local cleanpath=resolvers.cleanpath
local directive_cleanup=false directives.register("system.compile.cleanup",function(v) directive_cleanup=v end)
local directive_strip=false directives.register("system.compile.strip",function(v) directive_strip=v end)
local compile=utilities.lua.compile
@@ -13043,7 +14103,7 @@ caches.relocate=false
caches.defaults={ "TMPDIR","TEMPDIR","TMP","TEMP","HOME","HOMEPATH" }
local writable,readables,usedreadables=nil,{},{}
local function identify()
- local texmfcaches=resolvers.cleanpathlist("TEXMFCACHE")
+ local texmfcaches=resolvers.cleanpathlist("TEXMFCACHE")
if texmfcaches then
for k=1,#texmfcaches do
local cachepath=texmfcaches[k]
@@ -13281,15 +14341,11 @@ end
local saveoptions={ compact=true }
function caches.savedata(filepath,filename,data,raw)
local tmaname,tmcname=caches.setluanames(filepath,filename)
- local reduce,simplify=true,true
- if raw then
- reduce,simplify=false,false
- end
data.cache_uuid=os.uuid()
if caches.direct then
- file.savedata(tmaname,serialize(data,true,saveoptions))
+ file.savedata(tmaname,table.serialize(data,true,saveoptions))
else
- serializetofile(tmaname,data,true,saveoptions)
+ table.tofile(tmaname,data,true,saveoptions)
end
utilities.lua.compile(tmaname,tmcname)
end
@@ -13297,10 +14353,12 @@ local content_state={}
function caches.contentstate()
return content_state or {}
end
-function caches.loadcontent(cachename,dataname)
- local name=caches.hashed(cachename)
- local full,path=caches.getfirstreadablefile(addsuffix(name,luasuffixes.lua),"trees")
- local filename=file.join(path,name)
+function caches.loadcontent(cachename,dataname,filename)
+ if not filename then
+ local name=caches.hashed(cachename)
+ local full,path=caches.getfirstreadablefile(addsuffix(name,luasuffixes.lua),"trees")
+ filename=file.join(path,name)
+ end
local blob=loadfile(addsuffix(filename,luasuffixes.luc)) or loadfile(addsuffix(filename,luasuffixes.lua))
if blob then
local data=blob()
@@ -13332,10 +14390,12 @@ function caches.collapsecontent(content)
end
end
end
-function caches.savecontent(cachename,dataname,content)
- local name=caches.hashed(cachename)
- local full,path=caches.setfirstwritablefile(addsuffix(name,luasuffixes.lua),"trees")
- local filename=file.join(path,name)
+function caches.savecontent(cachename,dataname,content,filename)
+ if not filename then
+ local name=caches.hashed(cachename)
+ local full,path=caches.setfirstwritablefile(addsuffix(name,luasuffixes.lua),"trees")
+ filename=file.join(path,name)
+ end
local luaname=addsuffix(filename,luasuffixes.lua)
local lucname=addsuffix(filename,luasuffixes.luc)
if trace_locating then
@@ -13350,7 +14410,7 @@ function caches.savecontent(cachename,dataname,content)
content=content,
uuid=os.uuid(),
}
- local ok=io.savedata(luaname,serialize(data,true))
+ local ok=io.savedata(luaname,table.serialize(data,true))
if ok then
if trace_locating then
report_resolvers("category %a, cachename %a saved in %a",dataname,cachename,luaname)
@@ -13378,7 +14438,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-met"] = package.loaded["data-met"] or true
--- original size: 5453, stripped down to: 4007
+-- original size: 5347, stripped down to: 4015
if not modules then modules={} end modules ['data-met']={
version=1.100,
@@ -13406,8 +14466,8 @@ local function splitmethod(filename)
if type(filename)=="table" then
return filename
end
- filename=file.collapsepath(filename,".")
- if not find(filename,"://") then
+ filename=file.collapsepath(filename,".")
+ if not find(filename,"://",1,true) then
return { scheme="file",path=filename,original=filename,filename=filename }
end
local specification=url.hashed(filename)
@@ -13497,7 +14557,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-res"] = package.loaded["data-res"] or true
--- original size: 61799, stripped down to: 42957
+-- original size: 67003, stripped down to: 46291
if not modules then modules={} end modules ['data-res']={
version=1.001,
@@ -13507,7 +14567,7 @@ if not modules then modules={} end modules ['data-res']={
license="see context related readme files",
}
local gsub,find,lower,upper,match,gmatch=string.gsub,string.find,string.lower,string.upper,string.match,string.gmatch
-local concat,insert,sortedkeys=table.concat,table.insert,table.sortedkeys
+local concat,insert,remove,sortedkeys,sortedhash=table.concat,table.insert,table.remove,table.sortedkeys,table.sortedhash
local next,type,rawget=next,type,rawget
local os=os
local P,S,R,C,Cc,Cs,Ct,Carg=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cc,lpeg.Cs,lpeg.Ct,lpeg.Carg
@@ -13516,27 +14576,38 @@ local formatters=string.formatters
local filedirname=file.dirname
local filebasename=file.basename
local suffixonly=file.suffixonly
+local addsuffix=file.addsuffix
+local removesuffix=file.removesuffix
local filejoin=file.join
local collapsepath=file.collapsepath
local joinpath=file.joinpath
+local is_qualified_path=file.is_qualified_path
local allocate=utilities.storage.allocate
local settings_to_array=utilities.parsers.settings_to_array
+local getcurrentdir=lfs.currentdir
+local isfile=lfs.isfile
+local isdir=lfs.isdir
local setmetatableindex=table.setmetatableindex
local luasuffixes=utilities.lua.suffixes
-local getcurrentdir=lfs.currentdir
-local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
-local trace_detail=false trackers.register("resolvers.details",function(v) trace_detail=v end)
-local trace_expansions=false trackers.register("resolvers.expansions",function(v) trace_expansions=v end)
+local trace_locating=false trackers .register("resolvers.locating",function(v) trace_locating=v end)
+local trace_detail=false trackers .register("resolvers.details",function(v) trace_detail=v end)
+local trace_expansions=false trackers .register("resolvers.expansions",function(v) trace_expansions=v end)
+local trace_paths=false trackers .register("resolvers.paths",function(v) trace_paths=v end)
+local resolve_otherwise=true directives.register("resolvers.otherwise",function(v) resolve_otherwise=v end)
local report_resolving=logs.reporter("resolvers","resolving")
local resolvers=resolvers
local expandedpathfromlist=resolvers.expandedpathfromlist
local checkedvariable=resolvers.checkedvariable
local splitconfigurationpath=resolvers.splitconfigurationpath
local methodhandler=resolvers.methodhandler
+local filtered=resolvers.filtered_from_content
+local lookup=resolvers.get_from_content
+local cleanpath=resolvers.cleanpath
+local resolveprefix=resolvers.resolve
local initializesetter=utilities.setters.initialize
local ostype,osname,osenv,ossetenv,osgetenv=os.type,os.name,os.env,os.setenv,os.getenv
-resolvers.cacheversion='1.0.1'
-resolvers.configbanner=''
+resolvers.cacheversion="1.100"
+resolvers.configbanner=""
resolvers.homedir=environment.homedir
resolvers.criticalvars=allocate { "SELFAUTOLOC","SELFAUTODIR","SELFAUTOPARENT","TEXMFCNF","TEXMF","TEXOS" }
resolvers.luacnfname="texmfcnf.lua"
@@ -13555,6 +14626,7 @@ end
local unset_variable="unset"
local formats=resolvers.formats
local suffixes=resolvers.suffixes
+local usertypes=resolvers.usertypes
local dangerous=resolvers.dangerous
local suffixmap=resolvers.suffixmap
resolvers.defaultsuffixes={ "tex" }
@@ -13563,7 +14635,7 @@ local instance=resolvers.instance or nil
function resolvers.setenv(key,value,raw)
if instance then
instance.environment[key]=value
- ossetenv(key,raw and value or resolvers.resolve(value))
+ ossetenv(key,raw and value or resolveprefix(value))
end
end
local function getenv(key)
@@ -13577,7 +14649,7 @@ local function getenv(key)
end
resolvers.getenv=getenv
resolvers.env=getenv
-local function resolve(k)
+local function resolvevariable(k)
return instance.expansions[k]
end
local dollarstripper=lpeg.stripper("$")
@@ -13586,19 +14658,19 @@ local backslashswapper=lpeg.replacer("\\","/")
local somevariable=P("$")/""
local somekey=C(R("az","AZ","09","__","--")^1)
local somethingelse=P(";")*((1-S("!{}/\\"))^1*P(";")/"")+P(";")*(P(";")/"")+P(1)
-local variableexpander=Cs((somevariable*(somekey/resolve)+somethingelse)^1 )
+local variableexpander=Cs((somevariable*(somekey/resolvevariable)+somethingelse)^1 )
local cleaner=P("\\")/"/"+P(";")*S("!{}/\\")^0*P(";")^1/";"
local variablecleaner=Cs((cleaner+P(1))^0)
-local somevariable=R("az","AZ","09","__","--")^1/resolve
+local somevariable=R("az","AZ","09","__","--")^1/resolvevariable
local variable=(P("$")/"")*(somevariable+(P("{")/"")*somevariable*(P("}")/""))
local variableresolver=Cs((variable+P(1))^0)
local function expandedvariable(var)
return lpegmatch(variableexpander,var) or var
end
-function resolvers.newinstance()
- if trace_locating then
+function resolvers.newinstance()
+ if trace_locating then
report_resolving("creating instance")
- end
+ end
local environment,variables,expansions,order=allocate(),allocate(),allocate(),allocate()
local newinstance={
environment=environment,
@@ -13611,6 +14683,7 @@ function resolvers.newinstance()
foundintrees=allocate(),
hashes=allocate(),
hashed=allocate(),
+ pathlists=false,
specification=allocate(),
lists=allocate(),
data=allocate(),
@@ -13623,6 +14696,7 @@ function resolvers.newinstance()
savelists=true,
pattern=nil,
force_suffixes=true,
+ pathstack={},
}
setmetatableindex(variables,function(t,k)
local v
@@ -13672,8 +14746,13 @@ function resolvers.reset()
end
local function reset_hashes()
instance.lists={}
+ instance.pathlists=false
instance.found={}
end
+local function reset_caches()
+ instance.lists={}
+ instance.pathlists=false
+end
local slash=P("/")
local pathexpressionpattern=Cs (
Cc("^")*(
@@ -13725,13 +14804,13 @@ local function identify_configuration_files()
for i=1,#cnfpaths do
local filepath=cnfpaths[i]
local filename=collapsepath(filejoin(filepath,luacnfname))
- local realname=resolvers.resolve(filename)
+ local realname=resolveprefix(filename)
if trace_locating then
- local fullpath=gsub(resolvers.resolve(collapsepath(filepath)),"//","/")
- local weirdpath=find(fullpath,"/texmf.+/texmf") or not find(fullpath,"/web2c")
+ local fullpath=gsub(resolveprefix(collapsepath(filepath)),"//","/")
+ local weirdpath=find(fullpath,"/texmf.+/texmf") or not find(fullpath,"/web2c",1,true)
report_resolving("looking for %a on %s path %a from specification %a",luacnfname,weirdpath and "weird" or "given",fullpath,filepath)
end
- if lfs.isfile(realname) then
+ if isfile(realname) then
specification[#specification+1]=filename
if trace_locating then
report_resolving("found configuration file %a",realname)
@@ -13753,7 +14832,7 @@ local function load_configuration_files()
local filename=specification[i]
local pathname=filedirname(filename)
local filename=filejoin(pathname,luacnfname)
- local realname=resolvers.resolve(filename)
+ local realname=resolveprefix(filename)
local blob=loadfile(realname)
if blob then
local setups=instance.setups
@@ -13761,7 +14840,7 @@ local function load_configuration_files()
local parent=data and data.parent
if parent then
local filename=filejoin(pathname,parent)
- local realname=resolvers.resolve(filename)
+ local realname=resolveprefix(filename)
local blob=loadfile(realname)
if blob then
local parentdata=blob()
@@ -13786,7 +14865,7 @@ local function load_configuration_files()
elseif variables[k]==nil then
if trace_locating and not warning then
report_resolving("variables like %a in configuration file %a should move to the 'variables' subtable",
- k,resolvers.resolve(filename))
+ k,resolveprefix(filename))
warning=true
end
variables[k]=v
@@ -13846,7 +14925,7 @@ local function locate_file_databases()
local stripped=lpegmatch(inhibitstripper,path)
if stripped~="" then
local runtime=stripped==path
- path=resolvers.cleanpath(path)
+ path=cleanpath(path)
local spec=resolvers.splitmethod(stripped)
if runtime and (spec.noscheme or spec.scheme=="file") then
stripped="tree:///"..stripped
@@ -13909,8 +14988,8 @@ function resolvers.renew(hashname)
report_resolving("identifying tree %a",hashname)
end
end
- local realpath=resolvers.resolve(hashname)
- if lfs.isdir(realpath) then
+ local realpath=resolveprefix(hashname)
+ if isdir(realpath) then
if trace_locating then
report_resolving("using path %a",realpath)
end
@@ -14011,19 +15090,53 @@ end
function resolvers.unexpandedpath(str)
return joinpath(resolvers.unexpandedpathlist(str))
end
+function resolvers.pushpath(name)
+ local pathstack=instance.pathstack
+ local lastpath=pathstack[#pathstack]
+ local pluspath=filedirname(name)
+ if lastpath then
+ lastpath=collapsepath(filejoin(lastpath,pluspath))
+ else
+ lastpath=collapsepath(pluspath)
+ end
+ insert(pathstack,lastpath)
+ if trace_paths then
+ report_resolving("pushing path %a",lastpath)
+ end
+end
+function resolvers.poppath()
+ local pathstack=instance.pathstack
+ if trace_paths and #pathstack>0 then
+ report_resolving("popping path %a",pathstack[#pathstack])
+ end
+ remove(pathstack)
+end
+function resolvers.stackpath()
+ local pathstack=instance.pathstack
+ local currentpath=pathstack[#pathstack]
+ return currentpath~="" and currentpath or nil
+end
local done={}
function resolvers.resetextrapath()
local ep=instance.extra_paths
if not ep then
- ep,done={},{}
- instance.extra_paths=ep
+ done={}
+ instance.extra_paths={}
elseif #ep>0 then
- instance.lists,done={},{}
+ done={}
+ reset_caches()
end
end
function resolvers.registerextrapath(paths,subpaths)
- paths=settings_to_array(paths)
- subpaths=settings_to_array(subpaths)
+ if not subpaths or subpaths=="" then
+ if not paths or path=="" then
+ return
+ elseif done[paths] then
+ return
+ end
+ end
+ local paths=settings_to_array(paths)
+ local subpaths=settings_to_array(subpaths)
local ep=instance.extra_paths or {}
local oldn=#ep
local newn=oldn
@@ -14038,7 +15151,7 @@ function resolvers.registerextrapath(paths,subpaths)
local ps=p.."/"..s
if not done[ps] then
newn=newn+1
- ep[newn]=resolvers.cleanpath(ps)
+ ep[newn]=cleanpath(ps)
done[ps]=true
end
end
@@ -14048,7 +15161,7 @@ function resolvers.registerextrapath(paths,subpaths)
local p=paths[i]
if not done[p] then
newn=newn+1
- ep[newn]=resolvers.cleanpath(p)
+ ep[newn]=cleanpath(p)
done[p]=true
end
end
@@ -14060,7 +15173,7 @@ function resolvers.registerextrapath(paths,subpaths)
local ps=ep[i].."/"..s
if not done[ps] then
newn=newn+1
- ep[newn]=resolvers.cleanpath(ps)
+ ep[newn]=cleanpath(ps)
done[ps]=true
end
end
@@ -14069,52 +15182,70 @@ function resolvers.registerextrapath(paths,subpaths)
if newn>0 then
instance.extra_paths=ep
end
- if newn>oldn then
- instance.lists={}
+ if newn~=oldn then
+ reset_caches()
end
end
-local function made_list(instance,list)
- local ep=instance.extra_paths
- if not ep or #ep==0 then
- return list
+function resolvers.pushextrapath(path)
+ local paths=settings_to_array(path)
+ if instance.extra_stack then
+ insert(instance.extra_stack,1,paths)
else
- local done,new,newn={},{},0
- for k=1,#list do
- local v=list[k]
- if not done[v] then
- if find(v,"^[%.%/]$") then
- done[v]=true
- newn=newn+1
- new[newn]=v
- else
- break
- end
- end
- end
- for k=1,#ep do
- local v=ep[k]
+ instance.extra_stack={ paths }
+ end
+ reset_caches()
+end
+function resolvers.popextrapath()
+ if instance.extra_stack then
+ reset_caches()
+ return remove(instance.extra_stack,1)
+ end
+end
+local function made_list(instance,list,extra_too)
+ local done={}
+ local new={}
+ local newn=0
+ local function add(p)
+ for k=1,#p do
+ local v=p[k]
if not done[v] then
done[v]=true
newn=newn+1
new[newn]=v
end
end
- for k=1,#list do
- local v=list[k]
- if not done[v] then
- done[v]=true
- newn=newn+1
- new[newn]=v
+ end
+ for k=1,#list do
+ local v=list[k]
+ if done[v] then
+ elseif find(v,"^[%.%/]$") then
+ done[v]=true
+ newn=newn+1
+ new[newn]=v
+ else
+ break
+ end
+ end
+ if extra_too then
+ local es=instance.extra_stack
+ if es and #es>0 then
+ for k=1,#es do
+ add(es[k])
end
end
- return new
+ local ep=instance.extra_paths
+ if ep and #ep>0 then
+ add(ep)
+ end
end
+ add(list)
+ return new
end
function resolvers.cleanpathlist(str)
local t=resolvers.expandedpathlist(str)
if t then
for i=1,#t do
- t[i]=collapsepath(resolvers.cleanpath(t[i]))
+ t[i]=collapsepath(cleanpath(t[i]))
end
end
return t
@@ -14122,22 +15253,22 @@ end
function resolvers.expandpath(str)
return joinpath(resolvers.expandedpathlist(str))
end
-function resolvers.expandedpathlist(str)
+function resolvers.expandedpathlist(str,extra_too)
if not str then
return {}
- elseif instance.savelists then
+ elseif instance.savelists then
str=lpegmatch(dollarstripper,str)
local lists=instance.lists
local lst=lists[str]
if not lst then
- local l=made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
+ local l=made_list(instance,resolvers.splitpath(resolvers.expansion(str)),extra_too)
lst=expandedpathfromlist(l)
lists[str]=lst
end
return lst
else
local lst=resolvers.splitpath(resolvers.expansion(str))
- return made_list(instance,expandedpathfromlist(lst))
+ return made_list(instance,expandedpathfromlist(lst),extra_too)
end
end
function resolvers.expandedpathlistfromvariable(str)
@@ -14148,6 +15279,13 @@ end
function resolvers.expandpathfromvariable(str)
return joinpath(resolvers.expandedpathlistfromvariable(str))
end
+function resolvers.cleanedpathlist(v)
+ local t=resolvers.expandedpathlist(v)
+ for i=1,#t do
+ t[i]=resolvers.resolve(resolvers.cleanpath(t[i]))
+ end
+ return t
+end
function resolvers.expandbraces(str)
local ori=str
local pth=expandedpathfromlist(resolvers.splitpath(ori))
@@ -14164,7 +15302,7 @@ function resolvers.registerfilehash(name,content,someerror)
end
end
local function isreadable(name)
- local readable=lfs.isfile(name)
+ local readable=isfile(name)
if trace_detail then
if readable then
report_resolving("file %a is readable",name)
@@ -14174,70 +15312,57 @@ local function isreadable(name)
end
return readable
end
-local function collect_files(names)
- local filelist,noffiles={},0
+local function collect_files(names)
+ local filelist={}
+ local noffiles=0
+ local function check(hash,root,pathname,path,name)
+ if not pathname or find(path,pathname) then
+ local variant=hash.type
+ local search=filejoin(root,path,name)
+ local result=methodhandler('concatinators',variant,root,path,name)
+ if trace_detail then
+ report_resolving("match: variant %a, search %a, result %a",variant,search,result)
+ end
+ noffiles=noffiles+1
+ filelist[noffiles]={ variant,search,result }
+ end
+ end
for k=1,#names do
- local fname=names[k]
+ local filename=names[k]
if trace_detail then
- report_resolving("checking name %a",fname)
+ report_resolving("checking name %a",filename)
end
- local bname=filebasename(fname)
- local dname=filedirname(fname)
- if dname=="" or find(dname,"^%.") then
- dname=false
+ local basename=filebasename(filename)
+ local pathname=filedirname(filename)
+ if pathname=="" or find(pathname,"^%.") then
+ pathname=false
else
- dname=gsub(dname,"%*",".*")
- dname="/"..dname.."$"
+ pathname=gsub(pathname,"%*",".*")
+ pathname="/"..pathname.."$"
end
local hashes=instance.hashes
for h=1,#hashes do
local hash=hashes[h]
- local blobpath=hash.name
- local files=blobpath and instance.files[blobpath]
- if files then
+ local hashname=hash.name
+ local content=hashname and instance.files[hashname]
+ if content then
if trace_detail then
- report_resolving("deep checking %a, base %a, pattern %a",blobpath,bname,dname)
+ report_resolving("deep checking %a, base %a, pattern %a",hashname,basename,pathname)
end
- local blobfile=files[bname]
- if not blobfile then
- local rname="remap:"..bname
- blobfile=files[rname]
- if blobfile then
- bname=files[rname]
- blobfile=files[bname]
- end
- end
- if blobfile then
- local blobroot=files.__path__ or blobpath
- if type(blobfile)=='string' then
- if not dname or find(blobfile,dname) then
- local variant=hash.type
- local search=filejoin(blobroot,blobfile,bname)
- local result=methodhandler('concatinators',hash.type,blobroot,blobfile,bname)
- if trace_detail then
- report_resolving("match: variant %a, search %a, result %a",variant,search,result)
- end
- noffiles=noffiles+1
- filelist[noffiles]={ variant,search,result }
- end
+ local path,name=lookup(content,basename)
+ if path then
+ local metadata=content.metadata
+ local realroot=metadata and metadata.path or hashname
+ if type(path)=="string" then
+ check(hash,realroot,pathname,path,name)
else
- for kk=1,#blobfile do
- local vv=blobfile[kk]
- if not dname or find(vv,dname) then
- local variant=hash.type
- local search=filejoin(blobroot,vv,bname)
- local result=methodhandler('concatinators',hash.type,blobroot,vv,bname)
- if trace_detail then
- report_resolving("match: variant %a, search %a, result %a",variant,search,result)
- end
- noffiles=noffiles+1
- filelist[noffiles]={ variant,search,result }
- end
+ for i=1,#path do
+ check(hash,realroot,pathname,path[i],name)
end
end
end
elseif trace_locating then
- report_resolving("no match in %a (%s)",blobpath,bname)
+ report_resolving("no match in %a (%s)",hashname,basename)
end
end
end
@@ -14262,7 +15387,7 @@ end
local function can_be_dir(name)
local fakepaths=instance.fakepaths
if not fakepaths[name] then
- if lfs.isdir(name) then
+ if isdir(name) then
fakepaths[name]=1
else
fakepaths[name]=2
@@ -14278,10 +15403,11 @@ local function find_analyze(filename,askedformat,allresults)
if askedformat=="" then
if ext=="" or not suffixmap[ext] then
local defaultsuffixes=resolvers.defaultsuffixes
+ local formatofsuffix=resolvers.formatofsuffix
for i=1,#defaultsuffixes do
local forcedname=filename..'.'..defaultsuffixes[i]
wantedfiles[#wantedfiles+1]=forcedname
- filetype=resolvers.formatofsuffix(forcedname)
+ filetype=formatofsuffix(forcedname)
if trace_locating then
report_resolving("forcing filetype %a",filetype)
end
@@ -14317,18 +15443,18 @@ local function find_direct(filename,allresults)
end
end
local function find_wildcard(filename,allresults)
- if find(filename,'%*') then
+ if find(filename,'*',1,true) then
if trace_locating then
report_resolving("checking wildcard %a",filename)
end
- local method,result=resolvers.findwildcardfiles(filename)
+ local result=resolvers.findwildcardfiles(filename)
if result then
return "wildcard",result
end
end
end
local function find_qualified(filename,allresults,askedformat,alsostripped)
- if not file.is_qualified_path(filename) then
+ if not is_qualified_path(filename) then
return
end
if trace_locating then
@@ -14402,33 +15528,66 @@ local function check_subpath(fname)
return fname
end
end
-local function find_intree(filename,filetype,wantedfiles,allresults)
+local function makepathlist(list,filetype)
local typespec=resolvers.variableofformat(filetype)
- local pathlist=resolvers.expandedpathlist(typespec)
- local method="intree"
+ local pathlist=resolvers.expandedpathlist(typespec,filetype and usertypes[filetype])
+ local entry={}
if pathlist and #pathlist>0 then
- local filelist=collect_files(wantedfiles)
+ for k=1,#pathlist do
+ local path=pathlist[k]
+ local prescanned=find(path,'^!!')
+ local resursive=find(path,'//$')
+ local pathname=lpegmatch(inhibitstripper,path)
+ local expression=makepathexpression(pathname)
+ local barename=gsub(pathname,"/+$","")
+ barename=resolveprefix(barename)
+ local scheme=url.hasscheme(barename)
+ local schemename=gsub(barename,"%.%*$",'')
+ entry[k]={
+ path=path,
+ pathname=pathname,
+ prescanned=prescanned,
+ recursive=recursive,
+ expression=expression,
+ barename=barename,
+ scheme=scheme,
+ schemename=schemename,
+ }
+ end
+ entry.typespec=typespec
+ list[filetype]=entry
+ else
+ list[filetype]=false
+ end
+ return entry
+end
+local function find_intree(filename,filetype,wantedfiles,allresults)
+ local pathlists=instance.pathlists
+ if not pathlists then
+ pathlists=setmetatableindex(allocate(),makepathlist)
+ instance.pathlists=pathlists
+ end
+ local pathlist=pathlists[filetype]
+ if pathlist then
+ local method="intree"
+ local filelist=collect_files(wantedfiles)
local dirlist={}
+ local result={}
if filelist then
for i=1,#filelist do
dirlist[i]=filedirname(filelist[i][3]).."/"
end
end
if trace_detail then
- report_resolving("checking filename %a",filename)
+ report_resolving("checking filename %a in tree",filename)
end
- local resolve=resolvers.resolve
- local result={}
for k=1,#pathlist do
- local path=pathlist[k]
- local pathname=lpegmatch(inhibitstripper,path)
- local doscan=path==pathname
- if not find (pathname,'//$') then
- doscan=false
- end
+ local entry=pathlist[k]
+ local path=entry.path
+ local pathname=entry.pathname
local done=false
if filelist then
- local expression=makepathexpression(pathname)
+ local expression=entry.expression
if trace_detail then
report_resolving("using pattern %a for path %a",expression,pathname)
end
@@ -14436,8 +15595,8 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
local fl=filelist[k]
local f=fl[2]
local d=dirlist[k]
- if find(d,expression) or find(resolve(d),expression) then
- result[#result+1]=resolve(fl[3])
+ if find(d,expression) or find(resolveprefix(d),expression) then
+ result[#result+1]=resolveprefix(fl[3])
done=true
if allresults then
if trace_detail then
@@ -14458,56 +15617,62 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
method="database"
else
method="filesystem"
- pathname=gsub(pathname,"/+$","")
- pathname=resolve(pathname)
- local scheme=url.hasscheme(pathname)
+ local scheme=entry.scheme
if not scheme or scheme=="file" then
- local pname=gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
+ local pname=entry.schemename
+ if not find(pname,"*",1,true) then
if can_be_dir(pname) then
- for k=1,#wantedfiles do
- local w=wantedfiles[k]
- local fname=check_subpath(filejoin(pname,w))
- if fname then
- result[#result+1]=fname
- done=true
- if not allresults then
- break
- end
+ if not done and not entry.prescanned then
+ if trace_detail then
+ report_resolving("quick root scan for %a",pname)
end
- end
- if not done and doscan then
- local files=resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w=wantedfiles[k]
- local subpath=files[w]
- if not subpath or subpath=="" then
- elseif type(subpath)=="string" then
- local fname=check_subpath(filejoin(pname,subpath,w))
- if fname then
- result[#result+1]=fname
- done=true
- if not allresults then
- break
- end
+ local fname=check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
+ break
end
- else
- for i=1,#subpath do
- local sp=subpath[i]
- if sp=="" then
- else
- local fname=check_subpath(filejoin(pname,sp,w))
- if fname then
- result[#result+1]=fname
- done=true
- if not allresults then
- break
+ end
+ end
+ if not done and entry.recursive then
+ if trace_detail then
+ report_resolving("scanning filesystem for %a",pname)
+ end
+ local files=resolvers.simplescanfiles(pname,false,true)
+ for k=1,#wantedfiles do
+ local w=wantedfiles[k]
+ local subpath=files[w]
+ if not subpath or subpath=="" then
+ elseif type(subpath)=="string" then
+ local fname=check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp=subpath[i]
+ if sp=="" then
+ else
+ local fname=check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
+ break
+ end
end
end
end
- end
- if done and not allresults then
- break
+ if done and not allresults then
+ break
+ end
end
end
end
@@ -14515,6 +15680,18 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
end
else
end
+ else
+ for k=1,#wantedfiles do
+ local pname=entry.barename
+ local fname=methodhandler('finders',pname.."/"..wantedfiles[k])
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
+ break
+ end
+ end
+ end
end
end
if done and not allresults then
@@ -14549,10 +15726,13 @@ local function find_otherwise(filename,filetype,wantedfiles,allresults)
local filelist=collect_files(wantedfiles)
local fl=filelist and filelist[1]
if fl then
- return "otherwise",{ resolvers.resolve(fl[3]) }
+ return "otherwise",{ resolveprefix(fl[3]) }
end
end
collect_instance_files=function(filename,askedformat,allresults)
+ if not filename or filename=="" then
+ return {}
+ end
askedformat=askedformat or ""
filename=collapsepath(filename,".")
filename=gsub(filename,"^%./",getcurrentdir().."/")
@@ -14587,7 +15767,11 @@ collect_instance_files=function(filename,askedformat,allresults)
else
local method,result,stamp,filetype,wantedfiles
if instance.remember then
- stamp=formatters["%s--%s"](filename,askedformat)
+ if askedformat=="" then
+ stamp=formatters["%s::%s"](suffixonly(filename),filename)
+ else
+ stamp=formatters["%s::%s"](askedformat,filename)
+ end
result=stamp and instance.found[stamp]
if result then
if trace_locating then
@@ -14606,7 +15790,7 @@ collect_instance_files=function(filename,askedformat,allresults)
method,result=find_intree(filename,filetype,wantedfiles)
if not result then
method,result=find_onpath(filename,filetype,wantedfiles)
- if not result then
+ if resolve_otherwise and not result then
method,result=find_otherwise(filename,filetype,wantedfiles)
end
end
@@ -14622,7 +15806,7 @@ collect_instance_files=function(filename,askedformat,allresults)
end
if stamp then
if trace_locating then
- report_resolving("remembering file %a",filename)
+ report_resolving("remembering file %a using hash %a",filename,stamp)
end
instance.found[stamp]=result
end
@@ -14630,6 +15814,9 @@ collect_instance_files=function(filename,askedformat,allresults)
end
end
local function findfiles(filename,filetype,allresults)
+ if not filename or filename=="" then
+ return {}
+ end
local result,status=collect_instance_files(filename,filetype or "",allresults)
if not result or #result==0 then
local lowered=lower(filename)
@@ -14649,39 +15836,30 @@ function resolvers.findpath(filename,filetype)
return filedirname(findfiles(filename,filetype,false)[1] or "")
end
local function findgivenfiles(filename,allresults)
- local bname,result=filebasename(filename),{}
+ local base=filebasename(filename)
+ local result={}
local hashes=instance.hashes
- local noffound=0
+ local function okay(hash,path,name)
+ local found=methodhandler('concatinators',hash.type,hash.name,path,name)
+ if found and found~="" then
+ result[#result+1]=resolveprefix(found)
+ return not allresults
+ end
+ end
for k=1,#hashes do
local hash=hashes[k]
- local files=instance.files[hash.name] or {}
- local blist=files[bname]
- if not blist then
- local rname="remap:"..bname
- blist=files[rname]
- if blist then
- bname=files[rname]
- blist=files[bname]
- end
- end
- if blist then
- if type(blist)=='string' then
- local found=methodhandler('concatinators',hash.type,hash.name,blist,bname) or ""
- if found~="" then
- noffound=noffound+1
- result[noffound]=resolvers.resolve(found)
- if not allresults then
- break
- end
+ local content=instance.files[hash.name]
+ if content then
+ local path,name=lookup(content,base)
+ if not path then
+ elseif type(path)=="string" then
+ if okay(hash,path,name) then
+ return result
end
else
- for kk=1,#blist do
- local vv=blist[kk]
- local found=methodhandler('concatinators',hash.type,hash.name,vv,bname) or ""
- if found~="" then
- noffound=noffound+1
- result[noffound]=resolvers.resolve(found)
- if not allresults then break end
+ for i=1,#path do
+ if okay(hash,path[i],name) then
+ return result
end
end
end
@@ -14695,64 +15873,80 @@ end
function resolvers.findgivenfile(filename)
return findgivenfiles(filename,false)[1] or ""
end
-local function doit(path,blist,bname,tag,variant,result,allresults)
- local done=false
- if blist and variant then
- local resolve=resolvers.resolve
- if type(blist)=='string' then
- if find(lower(blist),path) then
- local full=methodhandler('concatinators',variant,tag,blist,bname) or ""
- result[#result+1]=resolve(full)
- done=true
- end
- else
- for kk=1,#blist do
- local vv=blist[kk]
- if find(lower(vv),path) then
- local full=methodhandler('concatinators',variant,tag,vv,bname) or ""
- result[#result+1]=resolve(full)
- done=true
- if not allresults then break end
- end
- end
- end
- end
- return done
-end
local makewildcard=Cs(
(P("^")^0*P("/")*P(-1)+P(-1))/".*"+(P("^")^0*P("/")/"")^0*(P("*")/".*"+P("-")/"%%-"+P(".")/"%%."+P("?")/"."+P("\\")/"/"+P(1))^0
)
function resolvers.wildcardpattern(pattern)
return lpegmatch(makewildcard,pattern) or pattern
end
-local function findwildcardfiles(filename,allresults,result)
- result=result or {}
+local function findwildcardfiles(filename,allresults,result)
+ local result=result or {}
local base=filebasename(filename)
local dirn=filedirname(filename)
local path=lower(lpegmatch(makewildcard,dirn) or dirn)
local name=lower(lpegmatch(makewildcard,base) or base)
- local files,done=instance.files,false
- if find(name,"%*") then
+ local files=instance.files
+ if find(name,"*",1,true) then
local hashes=instance.hashes
+ local function okay(found,path,base,hashname,hashtype)
+ if find(found,path) then
+ local full=methodhandler('concatinators',hashtype,hashname,found,base)
+ if full and full~="" then
+ result[#result+1]=resolveprefix(full)
+ return not allresults
+ end
+ end
+ end
for k=1,#hashes do
local hash=hashes[k]
- local hashname,hashtype=hash.name,hash.type
- for kk,hh in next,files[hashname] do
- if not find(kk,"^remap:") then
- if find(lower(kk),name) then
- if doit(path,hh,kk,hashname,hashtype,result,allresults) then done=true end
- if done and not allresults then break end
+ local hashname=hash.name
+ local hashtype=hash.type
+ if hashname and hashtype then
+ for found,base in filtered(files[hashname],name) do
+ if type(found)=='string' then
+ if okay(found,path,base,hashname,hashtype) then
+ break
+ end
+ else
+ for i=1,#found do
+ if okay(found[i],path,base,hashname,hashtype) then
+ break
+ end
+ end
end
end
end
end
else
+ local function okayokay(found,path,base,hashname,hashtype)
+ if find(found,path) then
+ local full=methodhandler('concatinators',hashtype,hashname,found,base)
+ if full and full~="" then
+ result[#result+1]=resolveprefix(full)
+ return not allresults
+ end
+ end
+ end
local hashes=instance.hashes
for k=1,#hashes do
local hash=hashes[k]
- local hashname,hashtype=hash.name,hash.type
- if doit(path,files[hashname][base],base,hashname,hashtype,result,allresults) then done=true end
- if done and not allresults then break end
+ local hashname=hash.name
+ local hashtype=hash.type
+ if hashname and hashtype then
+ local found,base=lookup(content,base)
+ if not found then
+ elseif type(found)=='string' then
+ if okay(found,path,base,hashname,hashtype) then
+ break
+ end
+ else
+ for i=1,#found do
+ if okay(found[i],path,base,hashname,hashtype) then
+ break
+ end
+ end
+ end
+ end
end
end
return result
@@ -14825,7 +16019,7 @@ end
function resolvers.dowithpath(name,func)
local pathlist=resolvers.expandedpathlist(name)
for i=1,#pathlist do
- func("^"..resolvers.cleanpath(pathlist[i]))
+ func("^"..cleanpath(pathlist[i]))
end
end
function resolvers.dowithvariable(name,func)
@@ -14833,23 +16027,23 @@ function resolvers.dowithvariable(name,func)
end
function resolvers.locateformat(name)
local engine=environment.ownmain or "luatex"
- local barename=file.removesuffix(name)
- local fullname=file.addsuffix(barename,"fmt")
+ local barename=removesuffix(name)
+ local fullname=addsuffix(barename,"fmt")
local fmtname=caches.getfirstreadablefile(fullname,"formats",engine) or ""
if fmtname=="" then
fmtname=resolvers.findfile(fullname)
- fmtname=resolvers.cleanpath(fmtname)
+ fmtname=cleanpath(fmtname)
end
if fmtname~="" then
- local barename=file.removesuffix(fmtname)
- local luaname=file.addsuffix(barename,luasuffixes.lua)
- local lucname=file.addsuffix(barename,luasuffixes.luc)
- local luiname=file.addsuffix(barename,luasuffixes.lui)
- if lfs.isfile(luiname) then
+ local barename=removesuffix(fmtname)
+ local luaname=addsuffix(barename,luasuffixes.lua)
+ local lucname=addsuffix(barename,luasuffixes.luc)
+ local luiname=addsuffix(barename,luasuffixes.lui)
+ if isfile(luiname) then
return barename,luiname
- elseif lfs.isfile(lucname) then
+ elseif isfile(lucname) then
return barename,lucname
- elseif lfs.isfile(luaname) then
+ elseif isfile(luaname) then
return barename,luaname
end
end
@@ -14871,29 +16065,24 @@ function resolvers.dowithfilesintree(pattern,handle,before,after)
local hash=hashes[i]
local blobtype=hash.type
local blobpath=hash.name
- if blobpath then
+ if blobtype and blobpath then
+ local total=0
+ local checked=0
+ local done=0
if before then
before(blobtype,blobpath,pattern)
end
- local files=instance.files[blobpath]
- local total,checked,done=0,0,0
- if files then
- for k,v in table.sortedhash(files) do
- total=total+1
- if find(k,"^remap:") then
- elseif find(k,pattern) then
- if type(v)=="string" then
- checked=checked+1
- if handle(blobtype,blobpath,v,k) then
- done=done+1
- end
- else
- checked=checked+#v
- for i=1,#v do
- if handle(blobtype,blobpath,v[i],k) then
- done=done+1
- end
- end
+ for path,name in filtered(instance.files[blobpath],pattern) do
+ if type(path)=="string" then
+ checked=checked+1
+ if handle(blobtype,blobpath,path,name) then
+ done=done+1
+ end
+ else
+ checked=checked+#path
+ for i=1,#path do
+ if handle(blobtype,blobpath,path[i],name) then
+ done=done+1
end
end
end
@@ -14904,8 +16093,8 @@ function resolvers.dowithfilesintree(pattern,handle,before,after)
end
end
end
-resolvers.obsolete=resolvers.obsolete or {}
-local obsolete=resolvers.obsolete
+local obsolete=resolvers.obsolete or {}
+resolvers.obsolete=obsolete
resolvers.find_file=resolvers.findfile obsolete.find_file=resolvers.findfile
resolvers.find_files=resolvers.findfiles obsolete.find_files=resolvers.findfiles
@@ -14916,7 +16105,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-pre"] = package.loaded["data-pre"] or true
--- original size: 6643, stripped down to: 4401
+-- original size: 3950, stripped down to: 2935
if not modules then modules={} end modules ['data-pre']={
version=1.001,
@@ -14926,44 +16115,51 @@ if not modules then modules={} end modules ['data-pre']={
license="see context related readme files"
}
local resolvers=resolvers
-local prefixes=utilities.storage.allocate()
-resolvers.prefixes=prefixes
-local cleanpath,findgivenfile,expansion=resolvers.cleanpath,resolvers.findgivenfile,resolvers.expansion
+local prefixes=resolvers.prefixes
+local cleanpath=resolvers.cleanpath
+local findgivenfile=resolvers.findgivenfile
+local expansion=resolvers.expansion
local getenv=resolvers.getenv
-local P,S,R,C,Cs,Cc,lpegmatch=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.match
-local joinpath,basename,dirname=file.join,file.basename,file.dirname
-local getmetatable,rawset,type=getmetatable,rawset,type
+local basename=file.basename
+local dirname=file.dirname
+local joinpath=file.join
+local isfile=lfs.isfile
prefixes.environment=function(str)
return cleanpath(expansion(str))
end
-prefixes.relative=function(str,n)
- if io.exists(str) then
- elseif io.exists("./"..str) then
- str="./"..str
- else
- local p="../"
- for i=1,n or 2 do
- if io.exists(p..str) then
- str=p..str
- break
- else
- p=p.."../"
+local function relative(str,n)
+ if not isfile(str) then
+ local pstr="./"..str
+ if isfile(pstr) then
+ str=pstr
+ else
+ local p="../"
+ for i=1,n or 2 do
+ local pstr=p..str
+ if isfile(pstr) then
+ str=pstr
+ break
+ else
+ p=p.."../"
+ end
end
end
end
return cleanpath(str)
end
+local function locate(str)
+ local fullname=findgivenfile(str) or ""
+ return cleanpath(fullname~="" and fullname or str)
+end
+prefixes.relative=relative
+prefixes.locate=locate
prefixes.auto=function(str)
- local fullname=prefixes.relative(str)
- if not lfs.isfile(fullname) then
- fullname=prefixes.locate(str)
+ local fullname=relative(str)
+ if not isfile(fullname) then
+ fullname=locate(str)
end
return fullname
end
-prefixes.locate=function(str)
- local fullname=findgivenfile(str) or ""
- return cleanpath((fullname~="" and fullname) or str)
-end
prefixes.filename=function(str)
local fullname=findgivenfile(str) or ""
return cleanpath(basename((fullname~="" and fullname) or str))
@@ -14984,6 +16180,13 @@ end
prefixes.home=function(str)
return cleanpath(joinpath(getenv('HOME'),str))
end
+prefixes.env=prefixes.environment
+prefixes.rel=prefixes.relative
+prefixes.loc=prefixes.locate
+prefixes.kpse=prefixes.locate
+prefixes.full=prefixes.locate
+prefixes.file=prefixes.filename
+prefixes.path=prefixes.pathname
local function toppath()
local inputstack=resolvers.inputstack
if not inputstack then
@@ -14996,98 +16199,22 @@ local function toppath()
return pathname
end
end
-resolvers.toppath=toppath
-prefixes.toppath=function(str)
- return cleanpath(joinpath(toppath(),str))
-end
-prefixes.env=prefixes.environment
-prefixes.rel=prefixes.relative
-prefixes.loc=prefixes.locate
-prefixes.kpse=prefixes.locate
-prefixes.full=prefixes.locate
-prefixes.file=prefixes.filename
-prefixes.path=prefixes.pathname
-function resolvers.allprefixes(separator)
- local all=table.sortedkeys(prefixes)
- if separator then
- for i=1,#all do
- all[i]=all[i]..":"
- end
- end
- return all
-end
-local function _resolve_(method,target)
- local action=prefixes[method]
- if action then
- return action(target)
- else
- return method..":"..target
- end
-end
-local resolved,abstract={},{}
-function resolvers.resetresolve(str)
- resolved,abstract={},{}
-end
-local pattern=Cs((C(R("az")^2)*P(":")*C((1-S(" \"\';,"))^1)/_resolve_+P(1))^0)
-local prefix=C(R("az")^2)*P(":")
-local target=C((1-S(" \"\';,"))^1)
-local notarget=(#S(";,")+P(-1))*Cc("")
-local pattern=Cs(((prefix*(target+notarget))/_resolve_+P(1))^0)
-local function resolve(str)
- if type(str)=="table" then
- local t={}
- for i=1,#str do
- t[i]=resolve(str[i])
- end
- return t
+local function jobpath()
+ local path=resolvers.stackpath()
+ if not path or path=="" then
+ return "."
else
- local res=resolved[str]
- if not res then
- res=lpegmatch(pattern,str)
- resolved[str]=res
- abstract[res]=str
- end
- return res
- end
-end
-local function unresolve(str)
- return abstract[str] or str
-end
-resolvers.resolve=resolve
-resolvers.unresolve=unresolve
-if type(os.uname)=="function" then
- for k,v in next,os.uname() do
- if not prefixes[k] then
- prefixes[k]=function() return v end
- end
- end
-end
-if os.type=="unix" then
- local pattern
- local function makepattern(t,k,v)
- if t then
- rawset(t,k,v)
- end
- local colon=P(":")
- for k,v in table.sortedpairs(prefixes) do
- if p then
- p=P(k)+p
- else
- p=P(k)
- end
- end
- pattern=Cs((p*colon+colon/";"+P(1))^0)
- end
- makepattern()
- getmetatable(prefixes).__newindex=makepattern
- function resolvers.repath(str)
- return lpegmatch(pattern,str)
- end
-else
- function resolvers.repath(str)
- return str
+ return path
end
end
+resolvers.toppath=toppath
+resolvers.jobpath=jobpath
+prefixes.toppath=function(str) return cleanpath(joinpath(toppath(),str)) end
+prefixes.jobpath=function(str) return cleanpath(joinpath(jobpath(),str)) end
+resolvers.setdynamic("toppath")
+resolvers.setdynamic("jobpath")
+prefixes.jobfile=prefixes.jobpath
+resolvers.setdynamic("jobfile")
end -- of closure
@@ -15149,7 +16276,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-fil"] = package.loaded["data-fil"] or true
--- original size: 3801, stripped down to: 3231
+-- original size: 3863, stripped down to: 3310
if not modules then modules={} end modules ['data-fil']={
version=1.001,
@@ -15161,30 +16288,31 @@ if not modules then modules={} end modules ['data-fil']={
local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
local report_files=logs.reporter("resolvers","files")
local resolvers=resolvers
+local resolveprefix=resolvers.resolve
local finders,openers,loaders,savers=resolvers.finders,resolvers.openers,resolvers.loaders,resolvers.savers
local locators,hashers,generators,concatinators=resolvers.locators,resolvers.hashers,resolvers.generators,resolvers.concatinators
local checkgarbage=utilities.garbagecollector and utilities.garbagecollector.check
function locators.file(specification)
- local name=specification.filename
- local realname=resolvers.resolve(name)
+ local filename=specification.filename
+ local realname=resolveprefix(filename)
if realname and realname~='' and lfs.isdir(realname) then
if trace_locating then
- report_files("file locator %a found as %a",name,realname)
+ report_files("file locator %a found as %a",filename,realname)
end
- resolvers.appendhash('file',name,true)
+ resolvers.appendhash('file',filename,true)
elseif trace_locating then
- report_files("file locator %a not found",name)
+ report_files("file locator %a not found",filename)
end
end
function hashers.file(specification)
- local name=specification.filename
- local content=caches.loadcontent(name,'files')
- resolvers.registerfilehash(name,content,content==nil)
+ local pathname=specification.filename
+ local content=caches.loadcontent(pathname,'files')
+ resolvers.registerfilehash(pathname,content,content==nil)
end
function generators.file(specification)
- local path=specification.filename
- local content=resolvers.scanfiles(path,false,true)
- resolvers.registerfilehash(path,content,true)
+ local pathname=specification.filename
+ local content=resolvers.scanfiles(pathname,false,true)
+ resolvers.registerfilehash(pathname,content,true)
end
concatinators.file=file.join
function finders.file(specification,filetype)
@@ -15375,7 +16503,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-use"] = package.loaded["data-use"] or true
--- original size: 3913, stripped down to: 2998
+-- original size: 3899, stripped down to: 2984
if not modules then modules={} end modules ['data-use']={
version=1.001,
@@ -15421,7 +16549,7 @@ end
statistics.register("used config file",function() return caches.configfiles() end)
statistics.register("used cache path",function() return caches.usedpaths() end)
function statistics.savefmtstatus(texname,formatbanner,sourcefile)
- local enginebanner=status.list().banner
+ local enginebanner=status.banner
if formatbanner and enginebanner and sourcefile then
local luvname=file.replacesuffix(texname,"luv")
local luvdata={
@@ -15434,7 +16562,7 @@ function statistics.savefmtstatus(texname,formatbanner,sourcefile)
end
end
function statistics.checkfmtstatus(texname)
- local enginebanner=status.list().banner
+ local enginebanner=status.banner
if enginebanner and texname then
local luvname=file.replacesuffix(texname,"luv")
if lfs.isfile(luvname) then
@@ -15466,7 +16594,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-zip"] = package.loaded["data-zip"] or true
--- original size: 8489, stripped down to: 6757
+-- original size: 8772, stripped down to: 6841
if not modules then modules={} end modules ['data-zip']={
version=1.001,
@@ -15485,16 +16613,6 @@ zip.archives=zip.archives or {}
local archives=zip.archives
zip.registeredfiles=zip.registeredfiles or {}
local registeredfiles=zip.registeredfiles
-local limited=false
-directives.register("system.inputmode",function(v)
- if not limited then
- local i_limiter=io.i_limiter(v)
- if i_limiter then
- zip.open=i_limiter.protect(zip.open)
- limited=true
- end
- end
-end)
local function validzip(str)
if not find(str,"^zip://") then
return "zip:///"..str
@@ -15509,7 +16627,7 @@ function zip.openarchive(name)
local arch=archives[name]
if not arch then
local full=resolvers.findfile(name) or ""
- arch=(full~="" and zip.open(full)) or false
+ arch=full~="" and zip.open(full) or false
archives[name]=arch
end
return arch
@@ -15668,31 +16786,42 @@ function resolvers.usezipfile(archive)
end
end
function resolvers.registerzipfile(z,tree)
- local files,filter={},""
- if tree=="" then
- filter="^(.+)/(.-)$"
- else
- filter=format("^%s/(.+)/(.-)$",tree)
- end
+ local names={}
+ local files={}
+ local remap={}
+ local n=0
+ local filter=tree=="" and "^(.+)/(.-)$" or format("^%s/(.+)/(.-)$",tree)
+ local register=resolvers.registerfile
if trace_locating then
report_zip("registering: using filter %a",filter)
end
- local register,n=resolvers.registerfile,0
for i in z:files() do
- local path,name=match(i.filename,filter)
- if path then
- if name and name~='' then
- register(files,name,path)
- n=n+1
- else
+ local filename=i.filename
+ local path,name=match(filename,filter)
+ if not path then
+ n=n+1
+ register(names,filename,"")
+ local usedname=lower(filename)
+ files[usedname]=""
+ if usedname~=filename then
+ remap[usedname]=filename
end
- else
- register(files,i.filename,'')
+ elseif name and name~="" then
n=n+1
+ register(names,name,path)
+ local usedname=lower(name)
+ files[usedname]=path
+ if usedname~=name then
+ remap[usedname]=name
+ end
+ else
end
end
report_zip("registering: %s files registered",n)
- return files
+ return {
+ files=files,
+ remap=remap,
+ }
end
@@ -15702,7 +16831,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-tre"] = package.loaded["data-tre"] or true
--- original size: 2508, stripped down to: 2074
+-- original size: 8479, stripped down to: 5580
if not modules then modules={} end modules ['data-tre']={
version=1.001,
@@ -15711,42 +16840,64 @@ if not modules then modules={} end modules ['data-tre']={
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-local find,gsub,format=string.find,string.gsub,string.format
+local find,gsub,lower=string.find,string.gsub,string.lower
+local basename,dirname,joinname=file.basename,file.dirname,file .join
+local globdir,isdir,isfile=dir.glob,lfs.isdir,lfs.isfile
+local P,lpegmatch=lpeg.P,lpeg.match
local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
local report_trees=logs.reporter("resolvers","trees")
local resolvers=resolvers
-local done,found,notfound={},{},resolvers.finders.notfound
-function resolvers.finders.tree(specification)
+local resolveprefix=resolvers.resolve
+local notfound=resolvers.finders.notfound
+local lookup=resolvers.get_from_content
+local collectors={}
+local found={}
+function resolvers.finders.tree(specification)
local spec=specification.filename
- local fnd=found[spec]
- if fnd==nil then
+ local okay=found[spec]
+ if okay==nil then
if spec~="" then
- local path,name=file.dirname(spec),file.basename(spec)
- if path=="" then path="." end
- local hash=done[path]
- if not hash then
- local pattern=path.."/*"
- hash=dir.glob(pattern)
- done[path]=hash
+ local path=dirname(spec)
+ local name=basename(spec)
+ if path=="" then
+ path="."
+ end
+ local names=collectors[path]
+ if not names then
+ local pattern=find(path,"/%*+$") and path or (path.."/*")
+ names=globdir(pattern)
+ collectors[path]=names
end
local pattern="/"..gsub(name,"([%.%-%+])","%%%1").."$"
- for k=1,#hash do
- local v=hash[k]
- if find(v,pattern) then
- found[spec]=v
- return v
+ for i=1,#names do
+ local fullname=names[i]
+ if find(fullname,pattern) then
+ found[spec]=fullname
+ return fullname
+ end
+ end
+ local pattern=lower(pattern)
+ for i=1,#names do
+ local fullname=lower(names[i])
+ if find(fullname,pattern) then
+ if isfile(fullname) then
+ found[spec]=fullname
+ return fullname
+ else
+ break
+ end
end
end
end
- fnd=notfound()
- found[spec]=fnd
+ okay=notfound()
+ found[spec]=okay
end
- return fnd
+ return okay
end
function resolvers.locators.tree(specification)
local name=specification.filename
- local realname=resolvers.resolve(name)
- if realname and realname~='' and lfs.isdir(realname) then
+ local realname=resolveprefix(name)
+ if realname and realname~='' and isdir(realname) then
if trace_locating then
report_trees("locator %a found",realname)
end
@@ -15757,16 +16908,110 @@ function resolvers.locators.tree(specification)
end
function resolvers.hashers.tree(specification)
local name=specification.filename
- if trace_locating then
- report_trees("analysing %a",name)
- end
+ report_trees("analyzing %a",name)
resolvers.methodhandler("hashers",name)
resolvers.generators.file(specification)
end
-resolvers.concatinators.tree=resolvers.concatinators.file
-resolvers.generators.tree=resolvers.generators.file
-resolvers.openers.tree=resolvers.openers.file
-resolvers.loaders.tree=resolvers.loaders.file
+local collectors={}
+local splitter=lpeg.splitat("/**/")
+local stripper=lpeg.replacer { [P("/")*P("*")^1*P(-1)]="" }
+table.setmetatableindex(collectors,function(t,k)
+ local rootname=lpegmatch(stripper,k)
+ local dataname=joinname(rootname,"dirlist")
+ local content=caches.loadcontent(dataname,"files",dataname)
+ if not content then
+ content=resolvers.scanfiles(rootname,nil,nil,false,true)
+ caches.savecontent(dataname,"files",content,dataname)
+ end
+ t[k]=content
+ return content
+end)
+local function checked(root,p,n)
+ if p then
+ if type(p)=="table" then
+ for i=1,#p do
+ local fullname=joinname(root,p[i],n)
+ if isfile(fullname) then
+ return fullname
+ end
+ end
+ else
+ local fullname=joinname(root,p,n)
+ if isfile(fullname) then
+ return fullname
+ end
+ end
+ end
+ return notfound()
+end
+local function resolve(specification)
+ local filename=specification.filename
+ if filename~="" then
+ local root,rest=lpegmatch(splitter,filename)
+ if root and rest then
+ local path,name=dirname(rest),basename(rest)
+ if name~=rest then
+ local content=collectors[root]
+ local p,n=lookup(content,name)
+ if not p then
+ return notfound()
+ end
+ local pattern=".*/"..path.."$"
+ local istable=type(p)=="table"
+ if istable then
+ for i=1,#p do
+ local pi=p[i]
+ if pi==path or find(pi,pattern) then
+ local fullname=joinname(root,pi,n)
+ if isfile(fullname) then
+ return fullname
+ end
+ end
+ end
+ elseif p==path or find(p,pattern) then
+ local fullname=joinname(root,p,n)
+ if isfile(fullname) then
+ return fullname
+ end
+ end
+ local queries=specification.queries
+ if queries and queries.option=="fileonly" then
+ return checked(root,p,n)
+ else
+ return notfound()
+ end
+ end
+ end
+ local path,name=dirname(filename),basename(filename)
+ local root=lpegmatch(stripper,path)
+ local content=collectors[path]
+ local p,n=lookup(content,name)
+ if p then
+ return checked(root,p,n)
+ end
+ end
+ return notfound()
+end
+resolvers.finders .dirlist=resolve
+resolvers.locators .dirlist=resolvers.locators .tree
+resolvers.hashers .dirlist=resolvers.hashers .tree
+resolvers.generators.dirlist=resolvers.generators.file
+resolvers.openers .dirlist=resolvers.openers .file
+resolvers.loaders .dirlist=resolvers.loaders .file
+function resolvers.finders.dirfile(specification)
+ local queries=specification.queries
+ if queries then
+ queries.option="fileonly"
+ else
+ specification.queries={ option="fileonly" }
+ end
+ return resolve(specification)
+end
+resolvers.locators .dirfile=resolvers.locators .dirlist
+resolvers.hashers .dirfile=resolvers.hashers .dirlist
+resolvers.generators.dirfile=resolvers.generators.dirlist
+resolvers.openers .dirfile=resolvers.openers .dirlist
+resolvers.loaders .dirfile=resolvers.loaders .dirlist
end -- of closure
@@ -15775,7 +17020,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-sch"] = package.loaded["data-sch"] or true
--- original size: 6202, stripped down to: 5149
+-- original size: 6569, stripped down to: 5304
if not modules then modules={} end modules ['data-sch']={
version=1.001,
@@ -15801,8 +17046,13 @@ directives.register("schemes.threshold",function(v) threshold=tonumber(v) or thr
function cleaners.none(specification)
return specification.original
end
-function cleaners.strip(specification)
- return (gsub(specification.original,"[^%a%d%.]+","-"))
+function cleaners.strip(specification)
+ local path,name=file.splitbase(specification.original)
+ if path=="" then
+ return (gsub(name,"[^%a%d%.]+","-"))
+ else
+ return (gsub((gsub(path,"%.","-").."-"..name),"[^%a%d%.]+","-"))
+ end
end
function cleaners.md5(specification)
return file.addsuffix(md5.hex(specification.original),file.suffix(specification.path))
@@ -15818,8 +17068,8 @@ function resolvers.schemes.cleanname(specification)
end
local cached,loaded,reused,thresholds,handlers={},{},{},{},{}
local function runcurl(name,cachename)
- local command="curl --silent --create-dirs --output "..cachename.." "..name
- os.spawn(command)
+ local command="curl --silent --insecure --create-dirs --output "..cachename.." "..name
+ os.execute(command)
end
local function fetch(specification)
local original=specification.original
@@ -15951,7 +17201,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-lua"] = package.loaded["data-lua"] or true
--- original size: 4237, stripped down to: 3177
+-- original size: 4313, stripped down to: 3227
if not modules then modules={} end modules ['data-lua']={
version=1.001,
@@ -15960,7 +17210,7 @@ if not modules then modules={} end modules ['data-lua']={
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-local resolvers,package=resolvers,package
+local package,lpeg=package,lpeg
local gsub=string.gsub
local concat=table.concat
local addsuffix=file.addsuffix
@@ -15971,9 +17221,11 @@ local luaformats={ 'TEXINPUTS','LUAINPUTS' }
local libformats={ 'CLUAINPUTS' }
local helpers=package.helpers or {}
local methods=helpers.methods or {}
+local resolvers=resolvers
+local resolveprefix=resolvers.resolve
+helpers.report=logs.reporter("resolvers","libraries")
trackers.register("resolvers.libraries",function(v) helpers.trace=v end)
trackers.register("resolvers.locating",function(v) helpers.trace=v end)
-helpers.report=logs.reporter("resolvers","libraries")
helpers.sequence={
"already loaded",
"preload table",
@@ -15988,7 +17240,7 @@ helpers.sequence={
}
local pattern=Cs(P("!")^0/""*(P("/")*P(-1)/"/"+P("/")^1/"/"+1)^0)
function helpers.cleanpath(path)
- return resolvers.resolve(lpegmatch(pattern,path))
+ return resolveprefix(lpegmatch(pattern,path))
end
local loadedaslib=helpers.loadedaslib
local getextraluapaths=package.extraluapaths
@@ -16058,7 +17310,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-aux"] = package.loaded["data-aux"] or true
--- original size: 2394, stripped down to: 2005
+-- original size: 2431, stripped down to: 1996
if not modules then modules={} end modules ['data-aux']={
version=1.001,
@@ -16072,8 +17324,8 @@ local type,next=type,next
local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
local resolvers=resolvers
local report_scripts=logs.reporter("resolvers","scripts")
-function resolvers.updatescript(oldname,newname)
- local scriptpath="scripts/context/lua"
+function resolvers.updatescript(oldname,newname)
+ local scriptpath="context/lua"
newname=file.addsuffix(newname,"lua")
local oldscript=resolvers.cleanpath(oldname)
if trace_locating then
@@ -16125,7 +17377,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-tmf"] = package.loaded["data-tmf"] or true
--- original size: 2600, stripped down to: 1627
+-- original size: 2601, stripped down to: 1627
if not modules then modules={} end modules ['data-tmf']={
version=1.001,
@@ -16181,7 +17433,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-lst"] = package.loaded["data-lst"] or true
--- original size: 2654, stripped down to: 2301
+-- original size: 2734, stripped down to: 2354
if not modules then modules={} end modules ['data-lst']={
version=1.001,
@@ -16190,10 +17442,13 @@ if not modules then modules={} end modules ['data-lst']={
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-local find,concat,upper,format=string.find,table.concat,string.upper,string.format
+local rawget,type,next=rawget,type,next
+local find,concat,upper=string.find,table.concat,string.upper
local fastcopy,sortedpairs=table.fastcopy,table.sortedpairs
-resolvers.listers=resolvers.listers or {}
local resolvers=resolvers
+local listers=resolvers.listers or {}
+resolvers.listers=listers
+local resolveprefix=resolvers.resolve
local report_lists=logs.reporter("resolvers","lists")
local function tabstr(str)
if type(str)=='table' then
@@ -16202,7 +17457,7 @@ local function tabstr(str)
return str
end
end
-function resolvers.listers.variables(pattern)
+function listers.variables(pattern)
local instance=resolvers.instance
local environment=instance.environment
local variables=instance.variables
@@ -16223,10 +17478,10 @@ function resolvers.listers.variables(pattern)
for key,value in sortedpairs(configured) do
if key~="" and (pattern=="" or find(upper(key),pattern)) then
report_lists(key)
- report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
- report_lists(" var: %s",tabstr(configured[key]) or "unset")
- report_lists(" exp: %s",tabstr(expansions[key]) or "unset")
- report_lists(" res: %s",tabstr(resolvers.resolve(expansions[key])) or "unset")
+ report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
+ report_lists(" var: %s",tabstr(configured[key]) or "unset")
+ report_lists(" exp: %s",tabstr(expansions[key]) or "unset")
+ report_lists(" res: %s",tabstr(resolveprefix(expansions[key])) or "unset")
end
end
instance.environment=fastcopy(env)
@@ -16234,15 +17489,15 @@ function resolvers.listers.variables(pattern)
instance.expansions=fastcopy(exp)
end
local report_resolved=logs.reporter("system","resolved")
-function resolvers.listers.configurations()
+function listers.configurations()
local configurations=resolvers.instance.specification
for i=1,#configurations do
- report_resolved("file : %s",resolvers.resolve(configurations[i]))
+ report_resolved("file : %s",resolveprefix(configurations[i]))
end
report_resolved("")
local list=resolvers.expandedpathfromlist(resolvers.splitpath(resolvers.luacnfspec))
for i=1,#list do
- local li=resolvers.resolve(list[i])
+ local li=resolveprefix(list[i])
if lfs.isdir(li) then
report_resolved("path - %s",li)
else
@@ -16547,7 +17802,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["luat-fmt"] = package.loaded["luat-fmt"] or true
--- original size: 5951, stripped down to: 4922
+-- original size: 5955, stripped down to: 4926
if not modules then modules={} end modules ['luat-fmt']={
version=1.001,
@@ -16635,7 +17890,7 @@ function environment.make_format(name)
end
local command=format("%s --ini %s --lua=%s %s %sdump",engine,primaryflags(),quoted(usedluastub),quoted(fulltexsourcename),os.platform=="unix" and "\\\\" or "\\")
report_format("running command: %s\n",command)
- os.spawn(command)
+ os.execute(command)
local pattern=file.removesuffix(file.basename(usedluastub)).."-*.mem"
local mp=dir.glob(pattern)
if mp then
@@ -16670,7 +17925,7 @@ function environment.run_format(name,data,more)
else
local command=format("%s %s --fmt=%s --lua=%s %s %s",engine,primaryflags(),quoted(barename),quoted(luaname),quoted(data),more~="" and quoted(more) or "")
report_format("running command: %s",command)
- os.spawn(command)
+ os.execute(command)
end
end
end
@@ -16681,8 +17936,8 @@ end -- of closure
-- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 685064
--- stripped bytes : 242353
+-- original bytes : 745618
+-- stripped bytes : 269191
-- end library merge
@@ -16781,17 +18036,18 @@ local ownlibs = { -- order can be made better
}
+-- c:/data/develop/tex-context/tex/texmf-win64/bin/../../texmf-context/tex/context/base/data-tmf.lua
+-- c:/data/develop/context/sources/data-tmf.lua
+
local ownlist = {
- '.',
- ownpath ,
- ownpath .. "/../sources", -- HH's development path
+ -- '.',
+ -- ownpath ,
+ owntree .. "/../../../../context/sources", -- HH's development path
owntree .. "/../../texmf-local/tex/context/base",
owntree .. "/../../texmf-context/tex/context/base",
- owntree .. "/../../texmf-dist/tex/context/base",
owntree .. "/../../texmf/tex/context/base",
owntree .. "/../../../texmf-local/tex/context/base",
owntree .. "/../../../texmf-context/tex/context/base",
- owntree .. "/../../../texmf-dist/tex/context/base",
owntree .. "/../../../texmf/tex/context/base",
}
@@ -16907,6 +18163,7 @@ local helpinfo = [[
<category name="basic">
<subcategory>
<flag name="script"><short>run an mtx script (lua prefered method) (<ref name="noquotes"/>), no script gives list</short></flag>
+ <flag name="evaluate"><short>run code passed on the commandline (between quotes)</short></flag>
<flag name="execute"><short>run a script or program (texmfstart method) (<ref name="noquotes"/>)</short></flag>
<flag name="resolve"><short>resolve prefixed arguments</short></flag>
<flag name="ctxlua"><short>run internally (using preloaded libs)</short></flag>
@@ -16932,6 +18189,7 @@ local helpinfo = [[
<flag name="verbose"><short>give a bit more info</short></flag>
<flag name="trackers" value="list"><short>enable given trackers</short></flag>
<flag name="progname" value="str"><short>format or backend</short></flag>
+ <flag name="systeminfo" value="str"><short>show current operating system, processor, etc</short></flag>
</subcategory>
<subcategory>
<flag name="edit"><short>launch editor with found file</short></flag>
@@ -17561,6 +18819,39 @@ function runners.associate(filename)
os.launch(filename)
end
+function runners.evaluate(code,filename) -- for Luigi
+ if code == "loop" then
+ while true do
+ io.write("> ")
+ local code = io.read()
+ if code ~= "" then
+ local temp = string.match(code,"^= (.*)$")
+ if temp then
+ code = "print("..temp..")"
+ end
+ local compiled, message = loadstring(code)
+ if type(compiled) ~= "function" then
+ io.write("! " .. (message or code).."\n")
+ else
+ io.write(compiled())
+ end
+ end
+ end
+ else
+ if type(code) ~= "string" or code == "" then
+ code = filename
+ end
+ if code ~= "" then
+ local compiled, message = loadstring(code)
+ if type(compiled) ~= "function" then
+ io.write("invalid lua code: " .. (message or code))
+ return
+ end
+ io.write(compiled())
+ end
+ end
+end
+
function runners.gethelp(filename)
local url = environment.argument("url")
if url and url ~= "" then
@@ -17572,6 +18863,15 @@ function runners.gethelp(filename)
end
end
+function runners.systeminfo()
+ report("architecture : %s",os.platform or "<unset>")
+ report("operating system : %s",os.name or "<unset>")
+ report("file architecture : %s",os.type or "<unset>")
+ report("binary path : %s",os.selfdir or "<unset>")
+ report("binary suffix : %s",os.binsuffix or "<unset>")
+ report("library suffix : %s",os.libsuffix or "<unset>")
+end
+
-- this is a bit dirty ... first we store the first filename and next we
-- split the arguments so that we only see the ones meant for this script
-- ... later we will use the second half
@@ -17687,16 +18987,13 @@ end
if e_argument("ansi") then
- local formatters = string.formatters
+ logs.setformatters("ansi")
- logs.setformatters {
- report_yes = formatters["%-15s | %s"],
- report_nop = formatters["%-15s |"],
- subreport_yes = formatters["%-15s | %s | %s"],
- subreport_nop = formatters["%-15s | %s |"],
- status_yes = formatters["%-15s : %s\n"],
- status_nop = formatters["%-15s :\n"],
- }
+ local script = e_argument("script") or e_argument("scripts")
+
+ if type(script) == "string" then
+ logs.writer("]0;"..script.."") -- for Alan to test
+ end
end
@@ -17715,14 +19012,26 @@ if e_argument("script") or e_argument("scripts") then
ok = runners.execute_ctx_script(filename)
end
+elseif e_argument("evaluate") then
+
+ runners.evaluate(e_argument("evaluate"),filename)
+
elseif e_argument("selfmerge") then
-- embed used libraries
runners.loadbase()
local found = locate_libs()
+
if found then
- utilities.merger.selfmerge(own.name,own.libs,{ found })
+ local mtxrun = resolvers.findfile("mtxrun.lua") -- includes local name
+ if lfs.isfile(mtxrun) then
+ utilities.merger.selfmerge(mtxrun,own.libs,{ found })
+ application.report("runner updated on resolved path: %s",mtxrun)
+ else
+ utilities.merger.selfmerge(own.name,own.libs,{ found })
+ application.report("runner updated on relative path: %s",own.name)
+ end
end
elseif e_argument("selfclean") then
@@ -17730,7 +19039,15 @@ elseif e_argument("selfclean") then
-- remove embedded libraries
runners.loadbase()
- utilities.merger.selfclean(own.name)
+
+ local mtxrun = resolvers.findfile("mtxrun.lua") -- includes local name
+ if lfs.isfile(mtxrun) then
+ utilities.merger.selfclean(mtxrun)
+ application.report("runner cleaned on resolved path: %s",mtxrun)
+ else
+ utilities.merger.selfclean(own.name)
+ application.report("runner cleaned on relative path: %s",own.name)
+ end
elseif e_argument("selfupdate") then
@@ -17972,6 +19289,8 @@ elseif e_argument("version") then
application.version()
+ application.report("source path",environment.ownbin)
+
elseif e_argument("directives") then
directives.show()
@@ -17989,6 +19308,10 @@ elseif e_argument("exporthelp") then
runners.loadbase()
application.export(e_argument("exporthelp"),filename)
+elseif e_argument("systeminfo") then
+
+ runners.systeminfo()
+
elseif e_argument("help") or filename=='help' or filename == "" then
application.help()
diff --git a/scripts/context/stubs/mswin/mtxrunjit.exe b/scripts/context/stubs/mswin/mtxrunjit.exe
new file mode 100644
index 000000000..0e7882cf9
--- /dev/null
+++ b/scripts/context/stubs/mswin/mtxrunjit.exe
Binary files differ
diff --git a/scripts/context/stubs/mswin/mtxworks.exe b/scripts/context/stubs/mswin/mtxworks.exe
index faae5caa7..0e7882cf9 100644
--- a/scripts/context/stubs/mswin/mtxworks.exe
+++ b/scripts/context/stubs/mswin/mtxworks.exe
Binary files differ
diff --git a/scripts/context/stubs/mswin/pstopdf.exe b/scripts/context/stubs/mswin/pstopdf.exe
index faae5caa7..0e7882cf9 100644
--- a/scripts/context/stubs/mswin/pstopdf.exe
+++ b/scripts/context/stubs/mswin/pstopdf.exe
Binary files differ
diff --git a/scripts/context/stubs/mswin/texexec.exe b/scripts/context/stubs/mswin/texexec.exe
index faae5caa7..0e7882cf9 100644
--- a/scripts/context/stubs/mswin/texexec.exe
+++ b/scripts/context/stubs/mswin/texexec.exe
Binary files differ
diff --git a/scripts/context/stubs/mswin/texmfstart.exe b/scripts/context/stubs/mswin/texmfstart.exe
index faae5caa7..0e7882cf9 100644
--- a/scripts/context/stubs/mswin/texmfstart.exe
+++ b/scripts/context/stubs/mswin/texmfstart.exe
Binary files differ
diff --git a/scripts/context/stubs/setup/setuptex b/scripts/context/stubs/setup/setuptex
new file mode 100644
index 000000000..d41e36707
--- /dev/null
+++ b/scripts/context/stubs/setup/setuptex
@@ -0,0 +1,167 @@
+# Example setup file for ConTeXt distribution
+#
+# Author: Hans Hagen
+# Patches: Arthur R. & Mojca M.
+#
+# Usage:
+# . setuptex [texroot]
+#
+# On the first run also execute:
+# mktexlsr
+# texexec --make --alone
+
+#
+# PLATFORM
+#
+
+# we will try to guess the platform first
+# (needs to be kept in sync with first-setup.sh and mtxrun)
+# if yours is missing, let us know
+
+system=`uname -s`
+cpu=`uname -m`
+
+case "$system" in
+ # linux
+ Linux)
+ case "$cpu" in
+ i*86) platform="linux" ;;
+ x86_64|ia64) platform="linux-64" ;;
+ # a little bit of cheating with ppc64 (won't work on Gentoo)
+ ppc|ppc64) platform="linux-ppc" ;;
+ *) platform="unknown" ;;
+ esac ;;
+ # Mac OS X
+ Darwin)
+ case "$cpu" in
+ i*86) platform="osx-intel" ;;
+ x86_64) platform="osx-64" ;;
+ ppc*|powerpc|power*|Power*) platform="osx-ppc" ;;
+ *) platform="unknown" ;;
+ esac ;;
+ # FreeBSD
+ FreeBSD|freebsd)
+ case "$cpu" in
+ i*86) platform="freebsd" ;;
+ x86_64) platform="freebsd" ;;
+ amd64) platform="freebsd-amd64" ;;
+ *) platform="unknown" ;;
+ esac ;;
+ # kFreeBSD (Debian)
+ GNU/kFreeBSD)
+ case "$cpu" in
+ i*86) platform="kfreebsd-i386" ;;
+ x86_64|amd64) platform="kfreebsd-amd64" ;;
+ *) platform="unknown" ;;
+ esac ;;
+ # cygwin
+ CYGWIN)
+ case "$cpu" in
+ i*86) platform="cygwin" ;;
+ x86_64|ia64) platform="cygwin-64" ;;
+ *) platform="unknown" ;;
+ esac ;;
+ # SunOS/Solaris
+ SunOS)
+ case "$cpu" in
+ sparc) platform="solaris-sparc" ;;
+ i86pc) platform="solaris-intel" ;;
+ *) platform="unknown" ;;
+ esac ;;
+ *) platform="unknown"
+esac
+
+# temporary fix for Snow Leopard
+if test "$platform" = "osx-intel"; then
+ # running Snow Leopard or later
+ if test `uname -r|cut -f1 -d"."` -ge 10 ; then
+ # working on 64-bit hardware
+ if test `sysctl -n hw.cpu64bit_capable` = 1; then
+ platform="osx-64"
+ fi
+ fi
+fi
+
+if test "$platform" = "unknown" ; then
+ echo "Error: your system \"$system $cpu\" is not supported yet."
+ echo "Please report to the ConTeXt mailing-list (ntg-context@ntg.nl)"
+fi
+
+#
+# PATH
+#
+
+# this resolves to path of the setuptex script
+# We use $0 for determine the path to the script, except for:
+# * bash where $0 always is bash; here we use BASH_SOURCE
+# * ksh93 where we use ${.sh.file}
+# Thanks to Vasile Gaburici and Alessandro Perucchi for reporting this
+# * http://www.ntg.nl/pipermail/ntg-context/2008/033953.html
+# * http://www.ntg.nl/pipermail/ntg-context/2012/068658.html
+if [ z"$BASH_SOURCE" != z ]; then
+ SCRIPTPATH="$BASH_SOURCE"
+elif [ z"$KSH_VERSION" != z ]; then
+ SCRIPTPATH="${.sh.file}"
+else
+ SCRIPTPATH="$0"
+fi
+
+OWNPATH=$(cd -P -- "$(dirname -- "$SCRIPTPATH")" && pwd -P)
+
+# but one can also call
+# . setuptex path-to-tree
+
+TEXROOT=""
+# first check if any path has been provided in the argument, and try to use that one
+if [ $# -ne 0 ] ; then
+ # TODO: resolve any errors
+ ARGPATH=$(cd -P -- "$(dirname -- "$1")" && pwd -P) && ARGPATH=$ARGPATH/$(basename -- "$1")
+ if test -f "$ARGPATH/texmf/tex/plain/base/plain.tex" ; then
+ if [ -d "$ARGPATH/texmf-$platform/bin" ]; then
+ TEXROOT="$ARGPATH"
+ else
+ echo "Binaries for platform '$platform' are missing."
+ echo "(There is no folder \"$ARGPATH/texmf-$platform/bin\")"
+ fi
+ else
+ echo "The argument \"$ARGPATH\" is not a valid TEXROOT path."
+ echo "(There is no file \"$ARGPATH/texmf/tex/plain/base/plain.tex\")"
+
+ if [ -f "$OWNPATH/texmf/tex/plain/base/plain.tex" ]; then
+ TEXROOT="$OWNPATH"
+ fi
+ fi
+else
+ if [ -f "$OWNPATH/texmf/tex/plain/base/plain.tex" ]; then
+ if [ -d "$OWNPATH/texmf-$platform/bin" ]; then
+ TEXROOT="$OWNPATH"
+ else
+ echo "Binaries for platform '$platform' are missing."
+ echo "(There is no folder \"$OWNPATH/texmf-$platform/bin\")"
+ fi
+ else
+ echo "\"$OWNPATH\" is not a valid TEXROOT path."
+ echo "(There is no file \"$OWNPATH/texmf/tex/plain/base/plain.tex\")"
+ fi
+fi
+
+if [ "$TEXROOT" != "" ]; then
+ # for Alan Braslau's server :)
+ if [ "x$PS1" != "x" ] ; then
+ echo "Setting \"$TEXROOT\" as ConTeXt root."
+ fi
+
+# ConTeXt binaries have to be added to PATH
+TEXMFOS=$TEXROOT/texmf-$platform
+export PATH=$TEXMFOS/bin:$PATH
+
+# unset variables that won't be used lately
+unset platform cpu system OWNPATH SCRIPTPATH ARGPATH TEXMFOS
+
+# not sure why this would be needed
+# export CTXMINIMAL=yes
+
+else
+ echo "provide a proper tex root (like '. setuptex /something/tex')" ;
+fi
+
diff --git a/scripts/context/stubs/mswin/setuptex.bat b/scripts/context/stubs/setup/setuptex.bat
index b61fd4494..b61fd4494 100644
--- a/scripts/context/stubs/mswin/setuptex.bat
+++ b/scripts/context/stubs/setup/setuptex.bat
diff --git a/scripts/context/stubs/setup/setuptex.csh b/scripts/context/stubs/setup/setuptex.csh
new file mode 100644
index 000000000..c1160675f
--- /dev/null
+++ b/scripts/context/stubs/setup/setuptex.csh
@@ -0,0 +1,164 @@
+# Example setup file for ConTeXt distribution
+#
+# Author: Hans Hagen
+# Patches: Arthur R. & Mojca M.
+# (t)csh version: Alan B.
+#
+# Usage :
+# source setuptex.csh [texroot]
+#
+# On the first run also execute:
+# mktexlsr
+# texexec --make --alone
+
+echo "We are considering removing setuptex.csh in case that nobody uses it."
+echo "If you still use this file please drop us some mail at"
+echo " gardeners (at) contextgarden (dot) net"
+echo "If we don't get any response, we will delete it in near future."
+
+#
+# PLATFORM
+#
+
+# we will try to guess the platform first
+# (needs to be kept in sync with first-setup.sh and mtxrun)
+# if yours is missing, let us know
+
+set system=`uname -s`
+set cpu=`uname -m`
+
+switch ( $system )
+ # linux
+ case Linux:
+ switch ( $cpu )
+ case i*86:
+ set platform="linux"
+ breaksw
+ case x86_64:
+ case ia64:
+ set platform="linux-64"
+ breaksw
+ case ppc:
+ case ppc64:
+ set platform="linux-ppc"
+ breaksw
+ default:
+ set platform="unknown"
+ endsw
+ breaksw
+ # Mac OS X
+ case Darwin:
+ switch ( $cpu )
+ case i*86:
+ set platform="osx-intel"
+ breaksw
+ case x86_64:
+ set platform="osx-64"
+ breaksw
+ case ppc*:
+ case powerpc:
+ case power*:
+ case Power*:
+ set platform="osx-ppc"
+ breaksw
+ default:
+ set platform="unknown"
+ endsw
+ breaksw
+ # FreeBSD
+ case FreeBSD:
+ case freebsd:
+ switch ( $cpu )
+ case i*86:
+ set platform="freebsd"
+ breaksw
+ case x86_64:
+ set platform="freebsd"
+ breaksw
+ case amd64:
+ set platform="freebsd-amd64"
+ breaksw
+ default:
+ set platform="unknown"
+ endsw
+ breaksw
+ # cygwin
+ case CYGWIN:
+ switch ( $cpu )
+ case i*86:
+ set platform="cygwin"
+ breaksw
+ case x86_64:
+ case ia64:
+ set platform="cygwin-64"
+ breaksw
+ default:
+ set platform="unknown"
+ endsw
+ breaksw
+ # SunOS/Solaris
+ case SunOS:
+ switch ( $cpu )
+ case sparc:
+ set platform="solaris-sparc"
+ breaksw
+ case i86pc:
+ set platform="solaris-intel"
+ default:
+ set platform="unknown"
+ endsw
+ breaksw
+ # Other
+ default:
+ set platform="unknown"
+endsw
+
+if ( $platform == "unknown" ) then
+ echo Error: your system \"$system $cpu\" is not supported yet.
+ echo Please report to the ConTeXt mailing-list (ntg-context@ntg.nl).
+endif
+
+#
+# PATH
+#
+
+# this resolves to path of the setuptex script
+# We use $0 for determine the path to the script, except for bash and (t)csh where $0
+# always is bash or (t)csh.
+
+# but one can also call
+# . setuptex path-to-tex-tree
+
+# first check if any path has been provided in the argument, and try to use that one
+if ( $# > 0 ) then
+ setenv TEXROOT $1
+else
+ # $_ should be `history -h 1` but doesn't seem to work...
+ set cmd=`history -h 1`
+ if ( $cmd[2]:h == $cmd[2]:t ) then
+ setenv TEXROOT $cwd
+ else
+ setenv TEXROOT $cmd[2]:h
+ endif
+ unset cmd
+endif
+cd $TEXROOT; setenv TEXROOT $cwd; cd -
+
+if ( -f "$TEXROOT/texmf/tex/plain/base/plain.tex" ) then
+ echo Setting \"$TEXROOT\" as TEXROOT.
+else
+ echo \"$TEXROOT\" is not a valid TEXROOT path.
+ echo There is no file \"$TEXROOT/texmf/tex/plain/base/plain.tex\".
+ echo Please provide a proper tex root (like \"source setuptex /path/tex\")
+ unsetenv TEXROOT
+ exit
+endif
+
+unsetenv TEXINPUTS MPINPUTS MFINPUTS
+
+# ConTeXt binaries have to be added to PATH
+setenv TEXMFOS $TEXROOT/texmf-$platform
+setenv PATH $TEXMFOS/bin:$PATH
+# TODO: we could set OSFONTDIR on Mac for example
+
+# setenv CTXMINIMAL yes
diff --git a/scripts/context/stubs/source/mtxrun_dll.c b/scripts/context/stubs/source/mtxrun_dll.c
index 400ed6778..fc2e260f5 100644
--- a/scripts/context/stubs/source/mtxrun_dll.c
+++ b/scripts/context/stubs/source/mtxrun_dll.c
@@ -55,7 +55,6 @@
return 1; \
}
-char texlua_name[] = "texlua"; // just a bare name, luatex strips the rest anyway
static char cmdline[MAX_CMD];
static char dirpath[MAX_PATH];
static char progname[MAX_PATH];
@@ -70,12 +69,12 @@ int main( int argc, char *argv[] )
__declspec(dllexport) int dllrunscript( int argc, char *argv[] )
#endif
{
- char *s, *luatexfname, *argstr, **lua_argv;
+ char *binary, *s, *luatexfname, *argstr, **lua_argv;
int k, quoted, lua_argc;
int passprogname = 0;
+ unsigned char is_jit=0;
// directory of this module/executable
-
HMODULE module_handle = GetModuleHandle( "mtxrun.dll" );
// if ( module_handle == NULL ) exe path will be used, which is OK too
k = (int) GetModuleFileName( module_handle, dirpath, MAX_PATH );
@@ -86,13 +85,20 @@ __declspec(dllexport) int dllrunscript( int argc, char *argv[] )
*(++s) = '\0'; //remove file name, leave trailing backslash
// program name
-
k = strlen(argv[0]);
while ( k && (argv[0][k-1] != '/') && (argv[0][k-1] != '\\') ) k--;
strcpy(progname, &argv[0][k]);
s = progname;
if ( s = strrchr(s, '.') ) *s = '\0'; // remove file extension part
+ /* check "jit" : strlen("jit") = 3 */
+ if (strncmp(progname + strlen(progname) - 3, "jit", 3) == 0) {
+ is_jit = 1;
+ progname[strlen(progname) - 3]='\0';
+ }
+ else
+ is_jit = 0;
+
// script path
strcpy( scriptpath, dirpath );
@@ -114,44 +120,110 @@ __declspec(dllexport) int dllrunscript( int argc, char *argv[] )
strcat( scriptpath, "mtxrun.lua" );
passprogname = 1;
}
-
if ( GetFileAttributes(scriptpath) == INVALID_FILE_ATTRIBUTES )
DIE( "file not found: %s\n", scriptpath );
- // find texlua.exe
-
- if ( !SearchPath(
- getenv( "PATH" ), // path to search (optional)
- "texlua.exe", // file name to search
- NULL, // file extension to add (optional)
- MAX_PATH, // output buffer size
- luatexpath, // output buffer pointer
- &luatexfname ) // pointer to a file part in the output buffer (optional)
- )
- if ( !SearchPath(
- dirpath, // path to search (optional)
- "texlua.exe", // file name to search
- NULL, // file extension to add (optional)
- MAX_PATH, // output buffer size
- luatexpath, // output buffer pointer
- &luatexfname ) // pointer to a file part in the output buffer (optional)
- )
- DIE( "unable to locate texlua.exe on the search path" );
+ // find luatex.exe /luajittex.exe
+ if ( SearchPath(
+ dirpath, // was getenv( "PATH" ), // path to search (optional)
+ (is_jit ? "luajittex.exe":"luatex.exe"), // file name to search
+ NULL, // file extension to add (optional)
+ MAX_PATH, // output buffer size
+ luatexpath, // output buffer pointer
+ &luatexfname ) // pointer to a file part in the output buffer (optional)
+ ) {
+ binary = (is_jit ? "luajittex.exe":"luatex.exe");
+ } else if ( SearchPath(
+ dirpath, // was getenv( "PATH" ), // path to search (optional)
+ (is_jit ? "texluajit.exe":"texlua.exe"), // file name to search
+ NULL, // file extension to add (optional)
+ MAX_PATH, // output buffer size
+ luatexpath, // output buffer pointer
+ &luatexfname ) // pointer to a file part in the output buffer (optional)
+ ) {
+ binary = (is_jit ? "texluajit.exe":"texlua.exe");
+ } else if ( SearchPath(
+ getenv("PATH"), // was dirpath, // path to search (optional)
+ (is_jit ? "luajittex.exe":"luatex.exe"), // file name to search
+ NULL, // file extension to add (optional)
+ MAX_PATH, // output buffer size
+ luatexpath, // output buffer pointer
+ &luatexfname ) // pointer to a file part in the output buffer (optional)
+ ) {
+ binary = (is_jit ? "luajittex.exe":"luatex.exe");
+ } else if ( SearchPath(
+ getenv("PATH") , // was dirpath, // path to search (optional)
+ (is_jit ? "texluajit.exe":"texlua.exe"), // file name to search
+ NULL, // file extension to add (optional)
+ MAX_PATH, // output buffer size
+ luatexpath, // output buffer pointer
+ &luatexfname ) // pointer to a file part in the output buffer (optional)
+ ) {
+ binary = (is_jit ? "texluajit.exe":"texlua.exe");
+ }else {
+ DIE( "unable to locate texlua.exe on the search path" );
+ }
+
+ /* if ( SearchPath( */
+ /* dirpath, // was getenv( "PATH" ), // path to search (optional) */
+ /* (is_jit ? "luajittex.exe":"luatex.exe"), // file name to search */
+ /* NULL, // file extension to add (optional) */
+ /* MAX_PATH, // output buffer size */
+ /* luatexpath, // output buffer pointer */
+ /* &luatexfname ) // pointer to a file part in the output buffer (optional) */
+ /* ) { */
+ /* binary = (is_jit ? "luajittex.exe":"luatex.exe"); */
+ /* }else if ( SearchPath( */
+ /* getenv("PATH"), // was dirpath, // path to search (optional) */
+ /* (is_jit ? "luajittex.exe":"luatex.exe"), // file name to search */
+ /* NULL, // file extension to add (optional) */
+ /* MAX_PATH, // output buffer size */
+ /* luatexpath, // output buffer pointer */
+ /* &luatexfname ) // pointer to a file part in the output buffer (optional) */
+ /* ) { */
+ /* binary = (is_jit ? "luajittex.exe":"luatex.exe"); */
+ /* }else if ( SearchPath( */
+ /* dirpath, // was getenv( "PATH" ), // path to search (optional) */
+ /* (is_jit ? "texluajit.exe":"texlua.exe"), // file name to search */
+ /* NULL, // file extension to add (optional) */
+ /* MAX_PATH, // output buffer size */
+ /* luatexpath, // output buffer pointer */
+ /* &luatexfname ) // pointer to a file part in the output buffer (optional) */
+ /* ) { */
+ /* binary = (is_jit ? "texluajit.exe":"texlua.exe"); */
+ /* }else if ( SearchPath( */
+ /* getenv("PATH") , // was dirpath, // path to search (optional) */
+ /* (is_jit ? "texluajit.exe":"texlua.exe"), // file name to search */
+ /* NULL, // file extension to add (optional) */
+ /* MAX_PATH, // output buffer size */
+ /* luatexpath, // output buffer pointer */
+ /* &luatexfname ) // pointer to a file part in the output buffer (optional) */
+ /* ) { */
+ /* binary = (is_jit ? "texluajit.exe":"texlua.exe"); */
+ /* }else { */
+ /* DIE( "unable to locate texlua.exe on the search path" ); */
+ /* } */
+
- // link directly with luatex.dll if available in texlua's dir
- strcpy( luatexfname, "luatex.dll" );
+
+ // link directly with luatex.dll if available in texlua's dir
+ strcpy( luatexfname, (is_jit ? "luajittex.dll":"luatex.dll") );
if ( dllluatex = LoadLibrary(luatexpath) )
{
- mainlikeproc dllluatexmain = (mainlikeproc) GetProcAddress( dllluatex, "dllluatexmain" );
+ mainlikeproc dllluatexmain = (mainlikeproc) GetProcAddress( dllluatex, (is_jit ? "dllluajittexmain": "dllluatexmain" ));
if ( dllluatexmain == NULL )
- DIE( "unable to locate dllluatexmain procedure in luatex.dll" );
+ if (is_jit)
+ DIE( "unable to locate dllluatexmain procedure in luajittex.dll" )
+ else
+ DIE( "unable to locate dllluatexmain procedure in luatex.dll" );
// set up argument list for texlua script
- lua_argv = (char **)malloc( (argc + 4) * sizeof(char *) );
+ lua_argv = (char **)malloc( (argc + 5) * sizeof(char *) );
if ( lua_argv == NULL ) DIE( "out of memory\n" );
- lua_argv[lua_argc=0] = texlua_name;
+ lua_argv[lua_argc=0] = luatexfname;
+ lua_argv[++lua_argc] = "--luaonly";
lua_argv[++lua_argc] = scriptpath; // script to execute
if (passprogname) {
lua_argv[++lua_argc] = "--script";
@@ -162,15 +234,15 @@ __declspec(dllexport) int dllrunscript( int argc, char *argv[] )
// call texlua interpreter
// dllluatexmain never returns, but we pretend that it does
-
+
k = dllluatexmain( lua_argc, lua_argv );
if (lua_argv) free( lua_argv );
return k;
}
-
// we are still here, so no luatex.dll; spawn texlua.exe instead
- strcpy( luatexfname, "texlua.exe" );
+ strcpy( luatexfname,binary);
+ strcpy( cmdline, " --luaonly " );
strcpy( cmdline, "\"" );
strcat( cmdline, luatexpath );
strcat( cmdline, "\" \"" );
@@ -180,7 +252,6 @@ __declspec(dllexport) int dllrunscript( int argc, char *argv[] )
strcat( cmdline, " --script " );
strcat( cmdline, progname );
}
-
argstr = GetCommandLine(); // get the command line of this process
if ( argstr == NULL ) DIE( "unable to retrieve the command line string\n" );
@@ -209,7 +280,6 @@ __declspec(dllexport) int dllrunscript( int argc, char *argv[] )
si.hStdOutput = GetStdHandle( STD_OUTPUT_HANDLE );
si.hStdError = GetStdHandle( STD_ERROR_HANDLE );
ZeroMemory( &pi, sizeof(pi) );
-
if( !CreateProcess(
NULL, // module name (uses command line if NULL)
cmdline, // command line
@@ -222,7 +292,6 @@ __declspec(dllexport) int dllrunscript( int argc, char *argv[] )
&si, // STARTUPINFO structure
&pi ) // PROCESS_INFORMATION structure
) DIE( "command execution failed: %s\n", cmdline );
-
DWORD ret = 0;
CloseHandle( pi.hThread ); // thread handle is not needed
if ( WaitForSingleObject( pi.hProcess, INFINITE ) == WAIT_OBJECT_0 ) {
@@ -232,7 +301,6 @@ __declspec(dllexport) int dllrunscript( int argc, char *argv[] )
CloseHandle( pi.hProcess );
// propagate exit code from the child process
-
return ret;
}
diff --git a/scripts/context/stubs/source/readme.txt b/scripts/context/stubs/source/readme.txt
index 354d85b09..72892ee2f 100644
--- a/scripts/context/stubs/source/readme.txt
+++ b/scripts/context/stubs/source/readme.txt
@@ -1,36 +1,40 @@
Copyright:
-The originally 'runscript' program was written by in 2009 by
-T.M.Trzeciak and is public domain. This derived mtxrun program
-is an adapted version by Hans Hagen.
+The originally 'runscript' program was written by in 2009 by T.M.Trzeciak and is
+public domain. This derived mtxrun program is an adapted version by Hans Hagen and
+Luigi Scarso.
Comment:
-In ConTeXt MkIV we have two core scripts: luatools.lua and
-mtxrun.lua where the second one is used to launch other scripts.
+In ConTeXt MkIV we have two core scripts: luatools.lua and mtxrun.lua where the
+second one is used to launch other scripts. The mtxrun.exe program calls luatex.exe.
+
Normally a user will use a call like:
-mtxrun --script font --reload
+ mtxrun --script font --reload
+
+Here mtxrun is a lua script. In order to avoid the usage of a cmd file on windows this
+runner will start texlua directly. In TeXlive a runner is added for each cmd file but
+we don't want that overhead (and extra files). By using an exe we can call these
+scripts in batch files without the need for using call.
+
+The mtxrun.exe file can be copied to a mtxrunjit.exe file in which case luajittex.exe
+is called.
-Here mtxrun is a lua script. In order to avoid the usage of a cmd
-file on windows this runner will start texlua directly. In TeXlive
-a runner is added for each cmd file but we don't want that overhead
-(and extra files). By using an exe we can call these scripts in
-batch files without the need for using call.
+ mtxrunjit --script font --reload
-We also don't want to use other runners, like those that use kpse
-to locate the script as this is exactly what mtxrun itself is doing
-already. Therefore the runscript program is adapted to a more direct
-approach suitable for mtxrun.
+We also don't want to use other runners, like those that use kpse to locate the script
+as this is exactly what mtxrun itself is doing already. Therefore the runscript program
+is adapted to a more direct approach suitable for mtxrun.
Compilation:
with gcc (size optimized):
-gcc -Os -s -shared -o mtxrun.dll mtxrun_dll.c
-gcc -Os -s -o mtxrun.exe mtxrun_exe.c -L./ -lmtxrun
+ gcc -Os -s -shared -o mtxrun.dll mtxrun_dll.c
+ gcc -Os -s -o mtxrun.exe mtxrun_exe.c -L./ -lmtxrun
with tcc (ver. 0.9.24), extra small size
-tcc -shared -o runscript.dll runscript_dll.c
-tcc -o runscript.exe runscript_exe.c runscript.def
+ tcc -shared -o runscript.dll runscript_dll.c
+ tcc -o runscript.exe runscript_exe.c runscript.def
diff --git a/scripts/context/stubs/unix/contextjit b/scripts/context/stubs/unix/contextjit
new file mode 100644
index 000000000..5ac1947c7
--- /dev/null
+++ b/scripts/context/stubs/unix/contextjit
@@ -0,0 +1,5 @@
+#!/bin/sh
+
+luajittex --luaonly $(dirname $0)/mtxrun --script context "$@"
+
+# luajittex --luaonly ${0%contextjit}mtxrun --script context "$@"
diff --git a/scripts/context/stubs/unix/ctxtools b/scripts/context/stubs/unix/ctxtools
deleted file mode 100644
index 2e6bd4afa..000000000
--- a/scripts/context/stubs/unix/ctxtools
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/bin/sh
-mtxrun --script ctxtools "$@"
diff --git a/scripts/context/stubs/unix/mptopdf b/scripts/context/stubs/unix/mptopdf
deleted file mode 100644
index 147333740..000000000
--- a/scripts/context/stubs/unix/mptopdf
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/bin/sh
-mtxrun --script mptopdf "$@"
diff --git a/scripts/context/stubs/unix/mtxrun b/scripts/context/stubs/unix/mtxrun
index 0ff2d2897..edfeba8dd 100644
--- a/scripts/context/stubs/unix/mtxrun
+++ b/scripts/context/stubs/unix/mtxrun
@@ -56,7 +56,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-lua"] = package.loaded["l-lua"] or true
--- original size: 3123, stripped down to: 1694
+-- original size: 3888, stripped down to: 2197
if not modules then modules={} end modules ['l-lua']={
version=1.001,
@@ -136,6 +136,16 @@ function optionalrequire(...)
return result
end
end
+if lua then
+ lua.mask=load([[τεχ = 1]]) and "utf" or "ascii"
+end
+local flush=io.flush
+if flush then
+ local execute=os.execute if execute then function os.execute(...) flush() return execute(...) end end
+ local exec=os.exec if exec then function os.exec (...) flush() return exec (...) end end
+ local spawn=os.spawn if spawn then function os.spawn (...) flush() return spawn (...) end end
+ local popen=io.popen if popen then function io.popen (...) flush() return popen (...) end end
+end
end -- of closure
@@ -434,7 +444,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-lpeg"] = package.loaded["l-lpeg"] or true
--- original size: 29245, stripped down to: 15964
+-- original size: 36977, stripped down to: 20349
if not modules then modules={} end modules ['l-lpeg']={
version=1.001,
@@ -450,7 +460,9 @@ local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.forma
local floor=math.floor
local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt
local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print
-setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+if setinspector then
+ setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+end
lpeg.patterns=lpeg.patterns or {}
local patterns=lpeg.patterns
local anything=P(1)
@@ -469,7 +481,7 @@ local uppercase=R("AZ")
local underscore=P("_")
local hexdigit=digit+lowercase+uppercase
local cr,lf,crlf=P("\r"),P("\n"),P("\r\n")
-local newline=crlf+S("\r\n")
+local newline=P("\r")*(P("\n")+P(true))+P("\n")
local escaped=P("\\")*anything
local squote=P("'")
local dquote=P('"')
@@ -491,8 +503,10 @@ patterns.utfbom_32_le=utfbom_32_le
patterns.utfbom_16_be=utfbom_16_be
patterns.utfbom_16_le=utfbom_16_le
patterns.utfbom_8=utfbom_8
-patterns.utf_16_be_nl=P("\000\r\000\n")+P("\000\r")+P("\000\n")
-patterns.utf_16_le_nl=P("\r\000\n\000")+P("\r\000")+P("\n\000")
+patterns.utf_16_be_nl=P("\000\r\000\n")+P("\000\r")+P("\000\n")
+patterns.utf_16_le_nl=P("\r\000\n\000")+P("\r\000")+P("\n\000")
+patterns.utf_32_be_nl=P("\000\000\000\r\000\000\000\n")+P("\000\000\000\r")+P("\000\000\000\n")
+patterns.utf_32_le_nl=P("\r\000\000\000\n\000\000\000")+P("\r\000\000\000")+P("\n\000\000\000")
patterns.utf8one=R("\000\127")
patterns.utf8two=R("\194\223")*utf8next
patterns.utf8three=R("\224\239")*utf8next*utf8next
@@ -519,10 +533,24 @@ patterns.spacer=spacer
patterns.whitespace=whitespace
patterns.nonspacer=nonspacer
patterns.nonwhitespace=nonwhitespace
-local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
+local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
+local fullstripper=whitespace^0*C((whitespace^0*nonwhitespace^1)^0)
local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0))
+local b_collapser=Cs(whitespace^0/""*(nonwhitespace^1+whitespace^1/" ")^0)
+local e_collapser=Cs((whitespace^1*P(-1)/""+nonwhitespace^1+whitespace^1/" ")^0)
+local m_collapser=Cs((nonwhitespace^1+whitespace^1/" ")^0)
+local b_stripper=Cs(spacer^0/""*(nonspacer^1+spacer^1/" ")^0)
+local e_stripper=Cs((spacer^1*P(-1)/""+nonspacer^1+spacer^1/" ")^0)
+local m_stripper=Cs((nonspacer^1+spacer^1/" ")^0)
patterns.stripper=stripper
+patterns.fullstripper=fullstripper
patterns.collapser=collapser
+patterns.b_collapser=b_collapser
+patterns.m_collapser=m_collapser
+patterns.e_collapser=e_collapser
+patterns.b_stripper=b_stripper
+patterns.m_stripper=m_stripper
+patterns.e_stripper=e_stripper
patterns.lowercase=lowercase
patterns.uppercase=uppercase
patterns.letter=patterns.lowercase+patterns.uppercase
@@ -559,9 +587,12 @@ patterns.integer=sign^-1*digit^1
patterns.unsigned=digit^0*period*digit^1
patterns.float=sign^-1*patterns.unsigned
patterns.cunsigned=digit^0*comma*digit^1
+patterns.cpunsigned=digit^0*(period+comma)*digit^1
patterns.cfloat=sign^-1*patterns.cunsigned
+patterns.cpfloat=sign^-1*patterns.cpunsigned
patterns.number=patterns.float+patterns.integer
patterns.cnumber=patterns.cfloat+patterns.integer
+patterns.cpnumber=patterns.cpfloat+patterns.integer
patterns.oct=zero*octdigit^1
patterns.octal=patterns.oct
patterns.HEX=zero*P("X")*(digit+uppercase)^1
@@ -744,7 +775,7 @@ function lpeg.replacer(one,two,makefunction,isutf)
return pattern
end
end
-function lpeg.finder(lst,makefunction)
+function lpeg.finder(lst,makefunction,isutf)
local pattern
if type(lst)=="table" then
pattern=P(false)
@@ -760,7 +791,11 @@ function lpeg.finder(lst,makefunction)
else
pattern=P(lst)
end
- pattern=(1-pattern)^0*pattern
+ if isutf then
+ pattern=((utf8char or 1)-pattern)^0*pattern
+ else
+ pattern=(1-pattern)^0*pattern
+ end
if makefunction then
return function(str)
return lpegmatch(pattern,str)
@@ -974,37 +1009,139 @@ function lpeg.append(list,pp,delayed,checked)
end
return p
end
+local p_false=P(false)
+local p_true=P(true)
local function make(t)
- local p
+ local function making(t)
+ local p=p_false
+ local keys=sortedkeys(t)
+ for i=1,#keys do
+ local k=keys[i]
+ if k~="" then
+ local v=t[k]
+ if v==true then
+ p=p+P(k)*p_true
+ elseif v==false then
+ else
+ p=p+P(k)*making(v)
+ end
+ end
+ end
+ if t[""] then
+ p=p+p_true
+ end
+ return p
+ end
+ local p=p_false
local keys=sortedkeys(t)
for i=1,#keys do
local k=keys[i]
- local v=t[k]
- if not p then
- if next(v) then
- p=P(k)*make(v)
+ if k~="" then
+ local v=t[k]
+ if v==true then
+ p=p+P(k)*p_true
+ elseif v==false then
else
- p=P(k)
+ p=p+P(k)*making(v)
end
- else
- if next(v) then
- p=p+P(k)*make(v)
+ end
+ end
+ return p
+end
+local function collapse(t,x)
+ if type(t)~="table" then
+ return t,x
+ else
+ local n=next(t)
+ if n==nil then
+ return t,x
+ elseif next(t,n)==nil then
+ local k=n
+ local v=t[k]
+ if type(v)=="table" then
+ return collapse(v,x..k)
else
- p=p+P(k)
+ return v,x..k
+ end
+ else
+ local tt={}
+ for k,v in next,t do
+ local vv,kk=collapse(v,k)
+ tt[kk]=vv
end
+ return tt,x
end
end
- return p
end
function lpeg.utfchartabletopattern(list)
local tree={}
- for i=1,#list do
- local t=tree
- for c in gmatch(list[i],".") do
- if not t[c] then
- t[c]={}
+ local n=#list
+ if n==0 then
+ for s in next,list do
+ local t=tree
+ local p,pk
+ for c in gmatch(s,".") do
+ if t==true then
+ t={ [c]=true,[""]=true }
+ p[pk]=t
+ p=t
+ t=false
+ elseif t==false then
+ t={ [c]=false }
+ p[pk]=t
+ p=t
+ t=false
+ else
+ local tc=t[c]
+ if not tc then
+ tc=false
+ t[c]=false
+ end
+ p=t
+ t=tc
+ end
+ pk=c
+ end
+ if t==false then
+ p[pk]=true
+ elseif t==true then
+ else
+ t[""]=true
+ end
+ end
+ else
+ for i=1,n do
+ local s=list[i]
+ local t=tree
+ local p,pk
+ for c in gmatch(s,".") do
+ if t==true then
+ t={ [c]=true,[""]=true }
+ p[pk]=t
+ p=t
+ t=false
+ elseif t==false then
+ t={ [c]=false }
+ p[pk]=t
+ p=t
+ t=false
+ else
+ local tc=t[c]
+ if not tc then
+ tc=false
+ t[c]=false
+ end
+ p=t
+ t=tc
+ end
+ pk=c
+ end
+ if t==false then
+ p[pk]=true
+ elseif t==true then
+ else
+ t[""]=true
end
- t=t[c]
end
end
return make(tree)
@@ -1044,6 +1181,65 @@ local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"")
local number=digit^1*(case_1+case_2)
local stripper=Cs((number+1)^0)
lpeg.patterns.stripzeros=stripper
+local byte_to_HEX={}
+local byte_to_hex={}
+local byte_to_dec={}
+local hex_to_byte={}
+for i=0,255 do
+ local H=format("%02X",i)
+ local h=format("%02x",i)
+ local d=format("%03i",i)
+ local c=char(i)
+ byte_to_HEX[c]=H
+ byte_to_hex[c]=h
+ byte_to_dec[c]=d
+ hex_to_byte[h]=c
+ hex_to_byte[H]=c
+end
+local hextobyte=P(2)/hex_to_byte
+local bytetoHEX=P(1)/byte_to_HEX
+local bytetohex=P(1)/byte_to_hex
+local bytetodec=P(1)/byte_to_dec
+local hextobytes=Cs(hextobyte^0)
+local bytestoHEX=Cs(bytetoHEX^0)
+local bytestohex=Cs(bytetohex^0)
+local bytestodec=Cs(bytetodec^0)
+patterns.hextobyte=hextobyte
+patterns.bytetoHEX=bytetoHEX
+patterns.bytetohex=bytetohex
+patterns.bytetodec=bytetodec
+patterns.hextobytes=hextobytes
+patterns.bytestoHEX=bytestoHEX
+patterns.bytestohex=bytestohex
+patterns.bytestodec=bytestodec
+function string.toHEX(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(bytestoHEX,s)
+ end
+end
+function string.tohex(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(bytestohex,s)
+ end
+end
+function string.todec(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(bytestodec,s)
+ end
+end
+function string.tobytes(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(hextobytes,s)
+ end
+end
end -- of closure
@@ -1071,7 +1267,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-string"] = package.loaded["l-string"] or true
--- original size: 5547, stripped down to: 2708
+-- original size: 5694, stripped down to: 2827
if not modules then modules={} end modules ['l-string']={
version=1.001,
@@ -1107,11 +1303,15 @@ function string.limit(str,n,sentinel)
end
end
local stripper=patterns.stripper
+local fullstripper=patterns.fullstripper
local collapser=patterns.collapser
local longtostring=patterns.longtostring
function string.strip(str)
return lpegmatch(stripper,str) or ""
end
+function string.fullstrip(str)
+ return lpegmatch(fullstripper,str) or ""
+end
function string.collapsespaces(str)
return lpegmatch(collapser,str) or ""
end
@@ -1172,7 +1372,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-table"] = package.loaded["l-table"] or true
--- original size: 31113, stripped down to: 20256
+-- original size: 35724, stripped down to: 21525
if not modules then modules={} end modules ['l-table']={
version=1.001,
@@ -1205,7 +1405,7 @@ end
function table.keys(t)
if t then
local keys,k={},0
- for key,_ in next,t do
+ for key in next,t do
k=k+1
keys[k]=key
end
@@ -1215,32 +1415,52 @@ function table.keys(t)
end
end
local function compare(a,b)
- local ta,tb=type(a),type(b)
- if ta==tb then
- return a<b
- else
- return tostring(a)<tostring(b)
+ local ta=type(a)
+ if ta=="number" then
+ local tb=type(b)
+ if ta==tb then
+ return a<b
+ elseif tb=="string" then
+ return tostring(a)<b
+ end
+ elseif ta=="string" then
+ local tb=type(b)
+ if ta==tb then
+ return a<b
+ else
+ return a<tostring(b)
+ end
end
+ return tostring(a)<tostring(b)
end
local function sortedkeys(tab)
if tab then
local srt,category,s={},0,0
- for key,_ in next,tab do
+ for key in next,tab do
s=s+1
srt[s]=key
if category==3 then
+ elseif category==1 then
+ if type(key)~="string" then
+ category=3
+ end
+ elseif category==2 then
+ if type(key)~="number" then
+ category=3
+ end
else
local tkey=type(key)
if tkey=="string" then
- category=(category==2 and 3) or 1
+ category=1
elseif tkey=="number" then
- category=(category==1 and 3) or 2
+ category=2
else
category=3
end
end
end
- if category==0 or category==3 then
+ if s<2 then
+ elseif category==3 then
sort(srt,compare)
else
sort(srt)
@@ -1250,16 +1470,52 @@ local function sortedkeys(tab)
return {}
end
end
+local function sortedhashonly(tab)
+ if tab then
+ local srt,s={},0
+ for key in next,tab do
+ if type(key)=="string" then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ if s>1 then
+ sort(srt)
+ end
+ return srt
+ else
+ return {}
+ end
+end
+local function sortedindexonly(tab)
+ if tab then
+ local srt,s={},0
+ for key in next,tab do
+ if type(key)=="number" then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ if s>1 then
+ sort(srt)
+ end
+ return srt
+ else
+ return {}
+ end
+end
local function sortedhashkeys(tab,cmp)
if tab then
local srt,s={},0
- for key,_ in next,tab do
+ for key in next,tab do
if key then
s=s+1
srt[s]=key
end
end
- sort(srt,cmp)
+ if s>1 then
+ sort(srt,cmp)
+ end
return srt
else
return {}
@@ -1268,13 +1524,15 @@ end
function table.allkeys(t)
local keys={}
for k,v in next,t do
- for k,v in next,v do
+ for k in next,v do
keys[k]=true
end
end
return sortedkeys(keys)
end
table.sortedkeys=sortedkeys
+table.sortedhashonly=sortedhashonly
+table.sortedindexonly=sortedindexonly
table.sortedhashkeys=sortedhashkeys
local function nothing() end
local function sortedhash(t,cmp)
@@ -1285,19 +1543,21 @@ local function sortedhash(t,cmp)
else
s=sortedkeys(t)
end
- local n=0
local m=#s
- local function kv(s)
- if n<m then
- n=n+1
- local k=s[n]
- return k,t[k]
+ if m==1 then
+ return next,t
+ elseif m>0 then
+ local n=0
+ return function()
+ if n<m then
+ n=n+1
+ local k=s[n]
+ return k,t[k]
+ end
end
end
- return kv,s
- else
- return nothing
end
+ return nothing
end
table.sortedhash=sortedhash
table.sortedpairs=sortedhash
@@ -1439,39 +1699,36 @@ function table.fromhash(t)
end
return hsh
end
-local noquotes,hexify,handle,reduce,compact,inline,functions
+local noquotes,hexify,handle,compact,inline,functions
local reserved=table.tohash {
'and','break','do','else','elseif','end','false','for','function','if',
'in','local','nil','not','or','repeat','return','then','true','until','while',
'NaN','goto',
}
local function simple_table(t)
- if #t>0 then
+ local nt=#t
+ if nt>0 then
local n=0
for _,v in next,t do
n=n+1
end
- if n==#t then
- local tt,nt={},0
- for i=1,#t do
+ if n==nt then
+ local tt={}
+ for i=1,nt do
local v=t[i]
local tv=type(v)
if tv=="number" then
- nt=nt+1
if hexify then
- tt[nt]=format("0x%04X",v)
+ tt[i]=format("0x%X",v)
else
- tt[nt]=tostring(v)
+ tt[i]=tostring(v)
end
elseif tv=="string" then
- nt=nt+1
- tt[nt]=format("%q",v)
+ tt[i]=format("%q",v)
elseif tv=="boolean" then
- nt=nt+1
- tt[nt]=v and "true" or "false"
+ tt[i]=v and "true" or "false"
else
- tt=nil
- break
+ return nil
end
end
return tt
@@ -1490,7 +1747,7 @@ local function do_serialize(root,name,depth,level,indexed)
local tn=type(name)
if tn=="number" then
if hexify then
- handle(format("%s[0x%04X]={",depth,name))
+ handle(format("%s[0x%X]={",depth,name))
else
handle(format("%s[%s]={",depth,name))
end
@@ -1507,7 +1764,7 @@ local function do_serialize(root,name,depth,level,indexed)
end
end
end
- if root and next(root) then
+ if root and next(root)~=nil then
local first,last=nil,0
if compact then
last=#root
@@ -1525,22 +1782,19 @@ local function do_serialize(root,name,depth,level,indexed)
for i=1,#sk do
local k=sk[i]
local v=root[k]
- local tv,tk=type(v),type(k)
+ local tv=type(v)
+ local tk=type(k)
if compact and first and tk=="number" and k>=first and k<=last then
if tv=="number" then
if hexify then
- handle(format("%s 0x%04X,",depth,v))
+ handle(format("%s 0x%X,",depth,v))
else
handle(format("%s %s,",depth,v))
end
elseif tv=="string" then
- if reduce and tonumber(v) then
- handle(format("%s %s,",depth,v))
- else
- handle(format("%s %q,",depth,v))
- end
+ handle(format("%s %q,",depth,v))
elseif tv=="table" then
- if not next(v) then
+ if next(v)==nil then
handle(format("%s {},",depth))
elseif inline then
local st=simple_table(v)
@@ -1570,64 +1824,48 @@ local function do_serialize(root,name,depth,level,indexed)
elseif tv=="number" then
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
+ handle(format("%s [0x%X]=0x%X,",depth,k,v))
else
handle(format("%s [%s]=%s,",depth,k,v))
end
elseif tk=="boolean" then
if hexify then
- handle(format("%s [%s]=0x%04X,",depth,k and "true" or "false",v))
+ handle(format("%s [%s]=0x%X,",depth,k and "true" or "false",v))
else
handle(format("%s [%s]=%s,",depth,k and "true" or "false",v))
end
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
if hexify then
- handle(format("%s %s=0x%04X,",depth,k,v))
+ handle(format("%s %s=0x%X,",depth,k,v))
else
handle(format("%s %s=%s,",depth,k,v))
end
else
if hexify then
- handle(format("%s [%q]=0x%04X,",depth,k,v))
+ handle(format("%s [%q]=0x%X,",depth,k,v))
else
handle(format("%s [%q]=%s,",depth,k,v))
end
end
elseif tv=="string" then
- if reduce and tonumber(v) then
- if tk=="number" then
- if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,v))
- else
- handle(format("%s [%s]=%s,",depth,k,v))
- end
- elseif tk=="boolean" then
- handle(format("%s [%s]=%s,",depth,k and "true" or "false",v))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%s,",depth,k,v))
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]=%q,",depth,k,v))
else
- handle(format("%s [%q]=%s,",depth,k,v))
+ handle(format("%s [%s]=%q,",depth,k,v))
end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,k and "true" or "false",v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,v))
else
- if tk=="number" then
- if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,v))
- else
- handle(format("%s [%s]=%q,",depth,k,v))
- end
- elseif tk=="boolean" then
- handle(format("%s [%s]=%q,",depth,k and "true" or "false",v))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%q,",depth,k,v))
- else
- handle(format("%s [%q]=%q,",depth,k,v))
- end
+ handle(format("%s [%q]=%q,",depth,k,v))
end
elseif tv=="table" then
- if not next(v) then
+ if next(v)==nil then
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]={},",depth,k))
+ handle(format("%s [0x%X]={},",depth,k))
else
handle(format("%s [%s]={},",depth,k))
end
@@ -1643,7 +1881,7 @@ local function do_serialize(root,name,depth,level,indexed)
if st then
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
+ handle(format("%s [0x%X]={ %s },",depth,k,concat(st,", ")))
else
handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
end
@@ -1663,7 +1901,7 @@ local function do_serialize(root,name,depth,level,indexed)
elseif tv=="boolean" then
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,v and "true" or "false"))
+ handle(format("%s [0x%X]=%s,",depth,k,v and "true" or "false"))
else
handle(format("%s [%s]=%s,",depth,k,v and "true" or "false"))
end
@@ -1679,7 +1917,7 @@ local function do_serialize(root,name,depth,level,indexed)
local f=getinfo(v).what=="C" and dump(dummy) or dump(v)
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]=load(%q),",depth,k,f))
+ handle(format("%s [0x%X]=load(%q),",depth,k,f))
else
handle(format("%s [%s]=load(%q),",depth,k,f))
end
@@ -1694,7 +1932,7 @@ local function do_serialize(root,name,depth,level,indexed)
else
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
+ handle(format("%s [0x%X]=%q,",depth,k,tostring(v)))
else
handle(format("%s [%s]=%q,",depth,k,tostring(v)))
end
@@ -1718,7 +1956,6 @@ local function serialize(_handle,root,name,specification)
noquotes=specification.noquotes
hexify=specification.hexify
handle=_handle or specification.handle or print
- reduce=specification.reduce or false
functions=specification.functions
compact=specification.compact
inline=specification.inline and compact
@@ -1735,7 +1972,6 @@ local function serialize(_handle,root,name,specification)
noquotes=false
hexify=false
handle=_handle or print
- reduce=false
compact=true
inline=true
functions=true
@@ -1748,7 +1984,7 @@ local function serialize(_handle,root,name,specification)
end
elseif tname=="number" then
if hexify then
- handle(format("[0x%04X]={",name))
+ handle(format("[0x%X]={",name))
else
handle("["..name.."]={")
end
@@ -1766,7 +2002,7 @@ local function serialize(_handle,root,name,specification)
local dummy=root._w_h_a_t_e_v_e_r_
root._w_h_a_t_e_v_e_r_=nil
end
- if next(root) then
+ if next(root)~=nil then
do_serialize(root,name,"",0)
end
end
@@ -1895,14 +2131,25 @@ local function identical(a,b)
end
table.identical=identical
table.are_equal=are_equal
-function table.compact(t)
- if t then
- for k,v in next,t do
- if not next(v) then
- t[k]=nil
+local function sparse(old,nest,keeptables)
+ local new={}
+ for k,v in next,old do
+ if not (v=="" or v==false) then
+ if nest and type(v)=="table" then
+ v=sparse(v,nest)
+ if keeptables or next(v)~=nil then
+ new[k]=v
+ end
+ else
+ new[k]=v
end
end
end
+ return new
+end
+table.sparse=sparse
+function table.compact(t)
+ return sparse(t,true,true)
end
function table.contains(t,v)
if t then
@@ -2000,15 +2247,17 @@ function table.print(t,...)
serialize(print,t,...)
end
end
-setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
+if setinspector then
+ setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
+end
function table.sub(t,i,j)
return { unpack(t,i,j) }
end
function table.is_empty(t)
- return not t or not next(t)
+ return not t or next(t)==nil
end
function table.has_one_entry(t)
- return t and not next(t,next(t))
+ return t and next(t,next(t))==nil
end
function table.loweredkeys(t)
local l={}
@@ -2053,6 +2302,44 @@ function table.values(t,s)
return {}
end
end
+function table.filtered(t,pattern,sort,cmp)
+ if t and type(pattern)=="string" then
+ if sort then
+ local s
+ if cmp then
+ s=sortedhashkeys(t,function(a,b) return cmp(t,a,b) end)
+ else
+ s=sortedkeys(t)
+ end
+ local n=0
+ local m=#s
+ local function kv(s)
+ while n<m do
+ n=n+1
+ local k=s[n]
+ if find(k,pattern) then
+ return k,t[k]
+ end
+ end
+ end
+ return kv,s
+ else
+ local n=next(t)
+ local function iterator()
+ while n~=nil do
+ local k=n
+ n=next(t,k)
+ if find(k,pattern) then
+ return k,t[k]
+ end
+ end
+ end
+ return iterator,t
+ end
+ else
+ return nothing
+ end
+end
end -- of closure
@@ -2061,7 +2348,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-io"] = package.loaded["l-io"] or true
--- original size: 8817, stripped down to: 6340
+-- original size: 8643, stripped down to: 6232
if not modules then modules={} end modules ['l-io']={
version=1.001,
@@ -2075,7 +2362,7 @@ local byte,find,gsub,format=string.byte,string.find,string.gsub,string.format
local concat=table.concat
local floor=math.floor
local type=type
-if string.find(os.getenv("PATH"),";") then
+if string.find(os.getenv("PATH"),";",1,true) then
io.fileseparator,io.pathseparator="\\",";"
else
io.fileseparator,io.pathseparator="/",":"
@@ -2368,8 +2655,6 @@ function io.readstring(f,n,m)
local str=gsub(f:read(n),"\000","")
return str
end
-if not io.i_limiter then function io.i_limiter() end end
-if not io.o_limiter then function io.o_limiter() end end
end -- of closure
@@ -2596,7 +2881,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-os"] = package.loaded["l-os"] or true
--- original size: 16023, stripped down to: 9634
+-- original size: 15832, stripped down to: 9456
if not modules then modules={} end modules ['l-os']={
version=1.001,
@@ -2670,13 +2955,10 @@ if not os.__getenv__ then
setmetatable(os.env,{ __index=__index,__newindex=__newindex } )
end
end
-local execute,spawn,exec,iopopen,ioflush=os.execute,os.spawn or os.execute,os.exec or os.execute,io.popen,io.flush
-function os.execute(...) ioflush() return execute(...) end
-function os.spawn (...) ioflush() return spawn (...) end
-function os.exec (...) ioflush() return exec (...) end
-function io.popen (...) ioflush() return iopopen(...) end
+local execute=os.execute
+local iopopen=io.popen
function os.resultof(command)
- local handle=io.popen(command,"r")
+ local handle=iopopen(command,"r")
if handle then
local result=handle:read("*all") or ""
handle:close()
@@ -2686,7 +2968,7 @@ function os.resultof(command)
end
end
if not io.fileseparator then
- if find(os.getenv("PATH"),";") then
+ if find(os.getenv("PATH"),";",1,true) then
io.fileseparator,io.pathseparator,os.type="\\",";",os.type or "mswin"
else
io.fileseparator,io.pathseparator,os.type="/",":",os.type or "unix"
@@ -2705,7 +2987,7 @@ local launchers={
unix="$BROWSER %s &> /dev/null &",
}
function os.launch(str)
- os.execute(format(launchers[os.name] or launchers.unix,str))
+ execute(format(launchers[os.name] or launchers.unix,str))
end
if not os.times then
function os.times()
@@ -2746,7 +3028,7 @@ if platform~="" then
elseif os.type=="windows" then
function resolvers.platform(t,k)
local platform,architecture="",os.getenv("PROCESSOR_ARCHITECTURE") or ""
- if find(architecture,"AMD64") then
+ if find(architecture,"AMD64",1,true) then
platform="win64"
else
platform="mswin"
@@ -2758,9 +3040,9 @@ elseif os.type=="windows" then
elseif name=="linux" then
function resolvers.platform(t,k)
local platform,architecture="",os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
- if find(architecture,"x86_64") then
+ if find(architecture,"x86_64",1,true) then
platform="linux-64"
- elseif find(architecture,"ppc") then
+ elseif find(architecture,"ppc",1,true) then
platform="linux-ppc"
else
platform="linux"
@@ -2774,9 +3056,9 @@ elseif name=="macosx" then
local platform,architecture="",os.resultof("echo $HOSTTYPE") or ""
if architecture=="" then
platform="osx-intel"
- elseif find(architecture,"i386") then
+ elseif find(architecture,"i386",1,true) then
platform="osx-intel"
- elseif find(architecture,"x86_64") then
+ elseif find(architecture,"x86_64",1,true) then
platform="osx-64"
else
platform="osx-ppc"
@@ -2788,7 +3070,7 @@ elseif name=="macosx" then
elseif name=="sunos" then
function resolvers.platform(t,k)
local platform,architecture="",os.resultof("uname -m") or ""
- if find(architecture,"sparc") then
+ if find(architecture,"sparc",1,true) then
platform="solaris-sparc"
else
platform="solaris-intel"
@@ -2800,7 +3082,7 @@ elseif name=="sunos" then
elseif name=="freebsd" then
function resolvers.platform(t,k)
local platform,architecture="",os.resultof("uname -m") or ""
- if find(architecture,"amd64") then
+ if find(architecture,"amd64",1,true) then
platform="freebsd-amd64"
else
platform="freebsd"
@@ -2812,7 +3094,7 @@ elseif name=="freebsd" then
elseif name=="kfreebsd" then
function resolvers.platform(t,k)
local platform,architecture="",os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
- if find(architecture,"x86_64") then
+ if find(architecture,"x86_64",1,true) then
platform="kfreebsd-amd64"
else
platform="kfreebsd-i386"
@@ -2829,8 +3111,9 @@ else
return platform
end
end
+os.newline=name=="windows" and "\013\010" or "\010"
function resolvers.bits(t,k)
- local bits=find(os.platform,"64") and 64 or 32
+ local bits=find(os.platform,"64",1,true) and 64 or 32
os.bits=bits
return bits
end
@@ -2980,7 +3263,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-file"] = package.loaded["l-file"] or true
--- original size: 18308, stripped down to: 9948
+-- original size: 20949, stripped down to: 9945
if not modules then modules={} end modules ['l-file']={
version=1.001,
@@ -2994,41 +3277,28 @@ local file=file
if not lfs then
lfs=optionalrequire("lfs")
end
-if not lfs then
- lfs={
- getcurrentdir=function()
- return "."
- end,
- attributes=function()
- return nil
- end,
- isfile=function(name)
- local f=io.open(name,'rb')
- if f then
- f:close()
- return true
- end
- end,
- isdir=function(name)
- print("you need to load lfs")
- return false
- end
- }
-elseif not lfs.isfile then
- local attributes=lfs.attributes
- function lfs.isdir(name)
- return attributes(name,"mode")=="directory"
- end
- function lfs.isfile(name)
- return attributes(name,"mode")=="file"
- end
-end
local insert,concat=table.insert,table.concat
local match,find,gmatch=string.match,string.find,string.gmatch
local lpegmatch=lpeg.match
local getcurrentdir,attributes=lfs.currentdir,lfs.attributes
local checkedsplit=string.checkedsplit
local P,R,S,C,Cs,Cp,Cc,Ct=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Cs,lpeg.Cp,lpeg.Cc,lpeg.Ct
+local tricky=S("/\\")*P(-1)
+local attributes=lfs.attributes
+if sandbox then
+ sandbox.redefine(lfs.isfile,"lfs.isfile")
+ sandbox.redefine(lfs.isdir,"lfs.isdir")
+end
+function lfs.isdir(name)
+ if lpegmatch(tricky,name) then
+ return attributes(name,"mode")=="directory"
+ else
+ return attributes(name.."/.","mode")=="directory"
+ end
+end
+function lfs.isfile(name)
+ return attributes(name,"mode")=="file"
+end
local colon=P(":")
local period=P(".")
local periods=P("..")
@@ -3230,28 +3500,30 @@ local isroot=fwslash^1*-1
local hasroot=fwslash^1
local reslasher=lpeg.replacer(S("\\/"),"/")
local deslasher=lpeg.replacer(S("\\/")^1,"/")
-function file.join(...)
- local lst={... }
- local one=lst[1]
+function file.join(one,two,three,...)
+ if not two then
+ return one=="" and one or lpegmatch(stripper,one)
+ end
+ if one=="" then
+ return lpegmatch(stripper,three and concat({ two,three,... },"/") or two)
+ end
if lpegmatch(isnetwork,one) then
local one=lpegmatch(reslasher,one)
- local two=lpegmatch(deslasher,concat(lst,"/",2))
+ local two=lpegmatch(deslasher,three and concat({ two,three,... },"/") or two)
if lpegmatch(hasroot,two) then
return one..two
else
return one.."/"..two
end
elseif lpegmatch(isroot,one) then
- local two=lpegmatch(deslasher,concat(lst,"/",2))
+ local two=lpegmatch(deslasher,three and concat({ two,three,... },"/") or two)
if lpegmatch(hasroot,two) then
return two
else
return "/"..two
end
- elseif one=="" then
- return lpegmatch(stripper,concat(lst,"/",2))
else
- return lpegmatch(deslasher,concat(lst,"/"))
+ return lpegmatch(deslasher,concat({ one,two,three,... },"/"))
end
end
local drivespec=R("az","AZ")^1*colon
@@ -3425,7 +3697,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-md5"] = package.loaded["l-md5"] or true
--- original size: 3760, stripped down to: 2088
+-- original size: 3248, stripped down to: 2266
if not modules then modules={} end modules ['l-md5']={
version=1.001,
@@ -3443,14 +3715,20 @@ if not md5 then
}
end
local md5,file=md5,file
-local gsub,format,byte=string.gsub,string.format,string.byte
-local md5sum=md5.sum
-local function convert(str,fmt)
- return (gsub(md5sum(str),".",function(chr) return format(fmt,byte(chr)) end))
-end
-if not md5.HEX then function md5.HEX(str) return convert(str,"%02X") end end
-if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end
-if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end
+local gsub=string.gsub
+do
+ local patterns=lpeg and lpeg.patterns
+ if patterns then
+ local bytestoHEX=patterns.bytestoHEX
+ local bytestohex=patterns.bytestohex
+ local bytestodec=patterns.bytestodec
+ local lpegmatch=lpeg.match
+ local md5sum=md5.sum
+ if not md5.HEX then function md5.HEX(str) if str then return lpegmatch(bytestoHEX,md5sum(str)) end end end
+ if not md5.hex then function md5.hex(str) if str then return lpegmatch(bytestohex,md5sum(str)) end end end
+ if not md5.dec then function md5.dec(str) if str then return lpegmatch(bytestodec,md5sum(str)) end end end
+ end
+end
function file.needsupdating(oldname,newname,threshold)
local oldtime=lfs.attributes(oldname,"modification")
if oldtime then
@@ -3507,7 +3785,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-url"] = package.loaded["l-url"] or true
--- original size: 11993, stripped down to: 5584
+-- original size: 12531, stripped down to: 5721
if not modules then modules={} end modules ['l-url']={
version=1.001,
@@ -3534,7 +3812,7 @@ local hexdigit=R("09","AF","af")
local plus=P("+")
local nothing=Cc("")
local escapedchar=(percent*C(hexdigit*hexdigit))/tochar
-local escaped=(plus/" ")+escapedchar
+local escaped=(plus/" ")+escapedchar
local noslash=P("/")/""
local schemestr=Cs((escaped+(1-colon-slash-qmark-hash))^2)
local authoritystr=Cs((escaped+(1- slash-qmark-hash))^0)
@@ -3593,19 +3871,25 @@ local splitquery=Cf (Ct("")*P { "sequence",
pair=Cg(key*equal*value),
},rawset)
local function hashed(str)
- if str=="" then
+ if not str or str=="" then
return {
scheme="invalid",
original=str,
}
end
- local s=split(str)
- local rawscheme=s[1]
- local rawquery=s[4]
- local somescheme=rawscheme~=""
- local somequery=rawquery~=""
+ local detailed=split(str)
+ local rawscheme=""
+ local rawquery=""
+ local somescheme=false
+ local somequery=false
+ if detailed then
+ rawscheme=detailed[1]
+ rawquery=detailed[4]
+ somescheme=rawscheme~=""
+ somequery=rawquery~=""
+ end
if not somescheme and not somequery then
- s={
+ return {
scheme="file",
authority="",
path=str,
@@ -3615,28 +3899,28 @@ local function hashed(str)
noscheme=true,
filename=str,
}
- else
- local authority,path,filename=s[2],s[3]
- if authority=="" then
- filename=path
- elseif path=="" then
- filename=""
- else
- filename=authority.."/"..path
- end
- s={
- scheme=rawscheme,
- authority=authority,
- path=path,
- query=lpegmatch(unescaper,rawquery),
- queries=lpegmatch(splitquery,rawquery),
- fragment=s[5],
- original=str,
- noscheme=false,
- filename=filename,
- }
end
- return s
+ local authority=detailed[2]
+ local path=detailed[3]
+ local filename=nil
+ if authority=="" then
+ filename=path
+ elseif path=="" then
+ filename=""
+ else
+ filename=authority.."/"..path
+ end
+ return {
+ scheme=rawscheme,
+ authority=authority,
+ path=path,
+ query=lpegmatch(unescaper,rawquery),
+ queries=lpegmatch(splitquery,rawquery),
+ fragment=detailed[5],
+ original=str,
+ noscheme=false,
+ filename=filename,
+ }
end
url.split=split
url.hasscheme=hasscheme
@@ -3670,7 +3954,7 @@ function url.construct(hash)
end
return lpegmatch(escaper,concat(fullurl))
end
-local pattern=Cs(noslash*R("az","AZ")*(S(":|")/":")*noslash*P(1)^0)
+local pattern=Cs(slash^-1/""*R("az","AZ")*((S(":|")/":")+P(":"))*slash*P(1)^0)
function url.filename(filename)
local spec=hashed(filename)
local path=spec.path
@@ -3718,7 +4002,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-dir"] = package.loaded["l-dir"] or true
--- original size: 14229, stripped down to: 8740
+-- original size: 16765, stripped down to: 11003
if not modules then modules={} end modules ['l-dir']={
version=1.001,
@@ -3728,7 +4012,7 @@ if not modules then modules={} end modules ['l-dir']={
license="see context related readme files"
}
local type,select=type,select
-local find,gmatch,match,gsub=string.find,string.gmatch,string.match,string.gsub
+local find,gmatch,match,gsub,sub=string.find,string.gmatch,string.match,string.gsub,string.sub
local concat,insert,remove,unpack=table.concat,table.insert,table.remove,table.unpack
local lpegmatch=lpeg.match
local P,S,R,C,Cc,Cs,Ct,Cv,V=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cc,lpeg.Cs,lpeg.Ct,lpeg.Cv,lpeg.V
@@ -3737,53 +4021,127 @@ local dir=dir
local lfs=lfs
local attributes=lfs.attributes
local walkdir=lfs.dir
-local isdir=lfs.isdir
-local isfile=lfs.isfile
+local isdir=lfs.isdir
+local isfile=lfs.isfile
local currentdir=lfs.currentdir
local chdir=lfs.chdir
-local onwindows=os.type=="windows" or find(os.getenv("PATH"),";")
-if not isdir then
- function isdir(name)
- local a=attributes(name)
- return a and a.mode=="directory"
+local mkdir=lfs.mkdir
+local onwindows=os.type=="windows" or find(os.getenv("PATH"),";",1,true)
+if onwindows then
+ local tricky=S("/\\")*P(-1)
+ isdir=function(name)
+ if lpegmatch(tricky,name) then
+ return attributes(name,"mode")=="directory"
+ else
+ return attributes(name.."/.","mode")=="directory"
+ end
+ end
+ isfile=function(name)
+ return attributes(name,"mode")=="file"
end
lfs.isdir=isdir
-end
-if not isfile then
- function isfile(name)
- local a=attributes(name)
- return a and a.mode=="file"
+ lfs.isfile=isfile
+else
+ isdir=function(name)
+ return attributes(name,"mode")=="directory"
end
+ isfile=function(name)
+ return attributes(name,"mode")=="file"
+ end
+ lfs.isdir=isdir
lfs.isfile=isfile
end
function dir.current()
return (gsub(currentdir(),"\\","/"))
end
-local lfsisdir=isdir
-local function isdir(path)
- path=gsub(path,"[/\\]+$","")
- return lfsisdir(path)
+local function glob_pattern_function(path,patt,recurse,action)
+ if isdir(path) then
+ local usedpath
+ if path=="/" then
+ usedpath="/."
+ elseif not find(path,"/$") then
+ usedpath=path.."/."
+ path=path.."/"
+ else
+ usedpath=path
+ end
+ local dirs
+ for name in walkdir(usedpath) do
+ if name~="." and name~=".." then
+ local full=path..name
+ local mode=attributes(full,'mode')
+ if mode=='file' then
+ if not patt or find(full,patt) then
+ action(full)
+ end
+ elseif recurse and mode=="directory" then
+ if not dirs then
+ dirs={ full }
+ else
+ dirs[#dirs+1]=full
+ end
+ end
+ end
+ end
+ if dirs then
+ for i=1,#dirs do
+ glob_pattern_function(dirs[i],patt,recurse,action)
+ end
+ end
+ end
end
-lfs.isdir=isdir
-local function globpattern(path,patt,recurse,action)
- if path=="/" then
- path=path.."."
- elseif not find(path,"/$") then
- path=path..'/'
- end
- if isdir(path) then
- for name in walkdir(path) do
- local full=path..name
- local mode=attributes(full,'mode')
- if mode=='file' then
- if find(full,patt) then
- action(full)
+local function glob_pattern_table(path,patt,recurse,result)
+ if not result then
+ result={}
+ end
+ if isdir(path) then
+ local usedpath
+ if path=="/" then
+ usedpath="/."
+ elseif not find(path,"/$") then
+ usedpath=path.."/."
+ path=path.."/"
+ else
+ usedpath=path
+ end
+ local dirs
+ for name in walkdir(usedpath) do
+ if name~="." and name~=".." then
+ local full=path..name
+ local mode=attributes(full,'mode')
+ if mode=='file' then
+ if not patt or find(full,patt) then
+ result[#result+1]=full
+ end
+ elseif recurse and mode=="directory" then
+ if not dirs then
+ dirs={ full }
+ else
+ dirs[#dirs+1]=full
+ end
end
- elseif recurse and (mode=="directory") and (name~='.') and (name~="..") then
- globpattern(full,patt,recurse,action)
+ end
+ end
+ if dirs then
+ for i=1,#dirs do
+ glob_pattern_table(dirs[i],patt,recurse,result)
end
end
end
+ return result
+end
+local function globpattern(path,patt,recurse,method)
+ local kind=type(method)
+ if patt and sub(patt,1,-3)==path then
+ patt=false
+ end
+ if kind=="function" then
+ return glob_pattern_function(path,patt,recurse,method)
+ elseif kind=="table" then
+ return glob_pattern_table(path,patt,recurse,method)
+ else
+ return glob_pattern_table(path,patt,recurse,{})
+ end
end
dir.globpattern=globpattern
local function collectpattern(path,patt,recurse,result)
@@ -3795,34 +4153,40 @@ local function collectpattern(path,patt,recurse,result)
ok,scanner,first=xpcall(function() return walkdir(path) end,function() end)
end
if ok and type(scanner)=="function" then
- if not find(path,"/$") then path=path..'/' end
+ if not find(path,"/$") then
+ path=path..'/'
+ end
for name in scanner,first do
- local full=path..name
- local attr=attributes(full)
- local mode=attr.mode
- if mode=='file' then
- if find(full,patt) then
+ if name=="." then
+ elseif name==".." then
+ else
+ local full=path..name
+ local attr=attributes(full)
+ local mode=attr.mode
+ if mode=='file' then
+ if find(full,patt) then
+ result[name]=attr
+ end
+ elseif recurse and mode=="directory" then
+ attr.list=collectpattern(full,patt,recurse)
result[name]=attr
end
- elseif recurse and (mode=="directory") and (name~='.') and (name~="..") then
- attr.list=collectpattern(full,patt,recurse)
- result[name]=attr
end
end
end
return result
end
dir.collectpattern=collectpattern
-local separator
-if onwindows then
+local separator,pattern
+if onwindows then
local slash=S("/\\")/"/"
- pattern=Ct {
+ pattern={
[1]=(Cs(P(".")+slash^1)+Cs(R("az","AZ")*P(":")*slash^0)+Cc("./"))*V(2)*V(3),
[2]=Cs(((1-S("*?/\\"))^0*slash)^0),
[3]=Cs(P(1)^0)
}
-else
- pattern=Ct {
+else
+ pattern={
[1]=(C(P(".")+P("/")^1)+Cc("./"))*V(2)*V(3),
[2]=C(((1-S("*?/"))^0*P("/"))^0),
[3]=C(P(1)^0)
@@ -3840,10 +4204,9 @@ local function glob(str,t)
elseif isfile(str) then
t(str)
else
- local split=lpegmatch(pattern,str)
- if split then
- local root,path,base=split[1],split[2],split[3]
- local recurse=find(base,"%*%*")
+ local root,path,base=lpegmatch(pattern,str)
+ if root and path and base then
+ local recurse=find(base,"**",1,true)
local start=root..path
local result=lpegmatch(filter,start..base)
globpattern(start,result,recurse,t)
@@ -3864,16 +4227,12 @@ local function glob(str,t)
return { str }
end
else
- local split=lpegmatch(pattern,str)
- if split then
- local t=t or {}
- local action=action or function(name) t[#t+1]=name end
- local root,path,base=split[1],split[2],split[3]
- local recurse=find(base,"%*%*")
+ local root,path,base=lpegmatch(pattern,str)
+ if root and path and base then
+ local recurse=find(base,"**",1,true)
local start=root..path
local result=lpegmatch(filter,start..base)
- globpattern(start,result,recurse,action)
- return t
+ return globpattern(start,result,recurse,t)
else
return {}
end
@@ -3913,16 +4272,26 @@ end
local make_indeed=true
if onwindows then
function dir.mkdirs(...)
- local str,pth="",""
- for i=1,select("#",...) do
- local s=select(i,...)
- if s=="" then
- elseif str=="" then
- str=s
- else
- str=str.."/"..s
+ local n=select("#",...)
+ local str
+ if n==1 then
+ str=select(1,...)
+ if isdir(str) then
+ return str,true
+ end
+ else
+ str=""
+ for i=1,n do
+ local s=select(i,...)
+ if s=="" then
+ elseif str=="" then
+ str=s
+ else
+ str=str.."/"..s
+ end
end
end
+ local pth=""
local drive=false
local first,middle,last=match(str,"^(//)(//*)(.*)$")
if first then
@@ -3957,21 +4326,30 @@ if onwindows then
pth=pth.."/"..s
end
if make_indeed and not isdir(pth) then
- lfs.mkdir(pth)
+ mkdir(pth)
end
end
return pth,(isdir(pth)==true)
end
else
function dir.mkdirs(...)
- local str,pth="",""
- for i=1,select("#",...) do
- local s=select(i,...)
- if s and s~="" then
- if str~="" then
- str=str.."/"..s
- else
- str=s
+ local n=select("#",...)
+ local str,pth
+ if n==1 then
+ str=select(1,...)
+ if isdir(str) then
+ return str,true
+ end
+ else
+ str=""
+ for i=1,n do
+ local s=select(i,...)
+ if s and s~="" then
+ if str~="" then
+ str=str.."/"..s
+ else
+ str=s
+ end
end
end
end
@@ -3986,7 +4364,7 @@ else
pth=pth.."/"..s
end
if make_indeed and not first and not isdir(pth) then
- lfs.mkdir(pth)
+ mkdir(pth)
end
end
else
@@ -3994,7 +4372,7 @@ else
for s in gmatch(str,"[^/]+") do
pth=pth.."/"..s
if make_indeed and not isdir(pth) then
- lfs.mkdir(pth)
+ mkdir(pth)
end
end
end
@@ -4002,47 +4380,51 @@ else
end
end
dir.makedirs=dir.mkdirs
-if onwindows then
- function dir.expandname(str)
- local first,nothing,last=match(str,"^(//)(//*)(.*)$")
- if first then
- first=dir.current().."/"
- end
- if not first then
- first,last=match(str,"^(//)/*(.*)$")
- end
- if not first then
- first,last=match(str,"^([a-zA-Z]:)(.*)$")
- if first and not find(last,"^/") then
- local d=currentdir()
- if chdir(first) then
- first=dir.current()
+do
+ local chdir=sandbox and sandbox.original(chdir) or chdir
+ if onwindows then
+ local xcurrentdir=dir.current
+ function dir.expandname(str)
+ local first,nothing,last=match(str,"^(//)(//*)(.*)$")
+ if first then
+ first=xcurrentdir().."/"
+ end
+ if not first then
+ first,last=match(str,"^(//)/*(.*)$")
+ end
+ if not first then
+ first,last=match(str,"^([a-zA-Z]:)(.*)$")
+ if first and not find(last,"^/") then
+ local d=currentdir()
+ if chdir(first) then
+ first=xcurrentdir()
+ end
+ chdir(d)
end
- chdir(d)
+ end
+ if not first then
+ first,last=xcurrentdir(),str
+ end
+ last=gsub(last,"//","/")
+ last=gsub(last,"/%./","/")
+ last=gsub(last,"^/*","")
+ first=gsub(first,"/*$","")
+ if last=="" or last=="." then
+ return first
+ else
+ return first.."/"..last
end
end
- if not first then
- first,last=dir.current(),str
- end
- last=gsub(last,"//","/")
- last=gsub(last,"/%./","/")
- last=gsub(last,"^/*","")
- first=gsub(first,"/*$","")
- if last=="" or last=="." then
- return first
- else
- return first.."/"..last
- end
- end
-else
- function dir.expandname(str)
- if not find(str,"^/") then
- str=currentdir().."/"..str
+ else
+ function dir.expandname(str)
+ if not find(str,"^/") then
+ str=currentdir().."/"..str
+ end
+ str=gsub(str,"//","/")
+ str=gsub(str,"/%./","/")
+ str=gsub(str,"(.)/%.$","%1")
+ return str
end
- str=gsub(str,"//","/")
- str=gsub(str,"/%./","/")
- str=gsub(str,"(.)/%.$","%1")
- return str
end
end
file.expandname=dir.expandname
@@ -4085,7 +4467,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-boolean"] = package.loaded["l-boolean"] or true
--- original size: 1809, stripped down to: 1527
+-- original size: 1850, stripped down to: 1568
if not modules then modules={} end modules ['l-boolean']={
version=1.001,
@@ -4139,11 +4521,11 @@ function string.booleanstring(str)
return str=="yes" or str=="on" or str=="t"
end
end
-function string.is_boolean(str,default)
+function string.is_boolean(str,default,strict)
if type(str)=="string" then
- if str=="true" or str=="yes" or str=="on" or str=="t" or str=="1" then
+ if str=="true" or str=="yes" or str=="on" or str=="t" or (not strict and str=="1") then
return true
- elseif str=="false" or str=="no" or str=="off" or str=="f" or str=="0" then
+ elseif str=="false" or str=="no" or str=="off" or str=="f" or (not strict and str=="0") then
return false
end
end
@@ -4157,7 +4539,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-unicode"] = package.loaded["l-unicode"] or true
--- original size: 33473, stripped down to: 14938
+-- original size: 37388, stripped down to: 15817
if not modules then modules={} end modules ['l-unicode']={
version=1.001,
@@ -4173,7 +4555,9 @@ local type=type
local char,byte,format,sub,gmatch=string.char,string.byte,string.format,string.sub,string.gmatch
local concat=table.concat
local P,C,R,Cs,Ct,Cmt,Cc,Carg,Cp=lpeg.P,lpeg.C,lpeg.R,lpeg.Cs,lpeg.Ct,lpeg.Cmt,lpeg.Cc,lpeg.Carg,lpeg.Cp
-local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local lpegmatch=lpeg.match
+local patterns=lpeg.patterns
+local tabletopattern=lpeg.utfchartabletopattern
local bytepairs=string.bytepairs
local finder=lpeg.finder
local replacer=lpeg.replacer
@@ -4182,7 +4566,7 @@ local utfgmatch=utf.gmatch
local p_utftype=patterns.utftype
local p_utfstricttype=patterns.utfstricttype
local p_utfoffset=patterns.utfoffset
-local p_utf8char=patterns.utf8char
+local p_utf8char=patterns.utf8character
local p_utf8byte=patterns.utf8byte
local p_utfbom=patterns.utfbom
local p_newline=patterns.newline
@@ -4321,6 +4705,7 @@ if not utf.sub then
local pattern_zero=Cmt(p_utf8char,slide_zero)^0
local pattern_one=Cmt(p_utf8char,slide_one )^0
local pattern_two=Cmt(p_utf8char,slide_two )^0
+ local pattern_first=C(patterns.utf8character)
function utf.sub(str,start,stop)
if not start then
return str
@@ -4362,7 +4747,9 @@ if not utf.sub then
end
end
end
- if start>stop then
+ if start==1 and stop==1 then
+ return lpegmatch(pattern_first,str) or ""
+ elseif start>stop then
return ""
elseif start>1 then
b,e,n,first,last=0,0,0,start-1,stop
@@ -4381,15 +4768,52 @@ if not utf.sub then
end
end
end
-function utf.remapper(mapping)
- local pattern=Cs((p_utf8char/mapping)^0)
- return function(str)
- if not str or str=="" then
- return ""
+function utf.remapper(mapping,option)
+ local variant=type(mapping)
+ if variant=="table" then
+ if option=="dynamic" then
+ local pattern=false
+ table.setmetatablenewindex(mapping,function(t,k,v) rawset(t,k,v) pattern=false end)
+ return function(str)
+ if not str or str=="" then
+ return ""
+ else
+ if not pattern then
+ pattern=Cs((tabletopattern(mapping)/mapping+p_utf8char)^0)
+ end
+ return lpegmatch(pattern,str)
+ end
+ end
+ elseif option=="pattern" then
+ return Cs((tabletopattern(mapping)/mapping+p_utf8char)^0)
else
- return lpegmatch(pattern,str)
+ local pattern=Cs((tabletopattern(mapping)/mapping+p_utf8char)^0)
+ return function(str)
+ if not str or str=="" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+ end,pattern
+ end
+ elseif variant=="function" then
+ if option=="pattern" then
+ return Cs((p_utf8char/mapping+p_utf8char)^0)
+ else
+ local pattern=Cs((p_utf8char/mapping+p_utf8char)^0)
+ return function(str)
+ if not str or str=="" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+ end,pattern
end
- end,pattern
+ else
+ return function(str)
+ return str or ""
+ end
+ end
end
function utf.replacer(t)
local r=replacer(t,false,false,true)
@@ -4439,190 +4863,157 @@ function utf.magic(f)
end
local utf16_to_utf8_be,utf16_to_utf8_le
local utf32_to_utf8_be,utf32_to_utf8_le
-local utf_16_be_linesplitter=patterns.utfbom_16_be^-1*lpeg.tsplitat(patterns.utf_16_be_nl)
-local utf_16_le_linesplitter=patterns.utfbom_16_le^-1*lpeg.tsplitat(patterns.utf_16_le_nl)
-if bytepairs then
- utf16_to_utf8_be=function(t)
- if type(t)=="string" then
- t=lpegmatch(utf_16_be_linesplitter,t)
- end
- local result={}
- for i=1,#t do
- local r,more=0,0
- for left,right in bytepairs(t[i]) do
- if right then
- local now=256*left+right
- if more>0 then
- now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
- more=0
- r=r+1
- result[r]=utfchar(now)
- elseif now>=0xD800 and now<=0xDBFF then
- more=now
- else
- r=r+1
- result[r]=utfchar(now)
- end
- end
- end
- t[i]=concat(result,"",1,r)
- end
- return t
+local utf_16_be_getbom=patterns.utfbom_16_be^-1
+local utf_16_le_getbom=patterns.utfbom_16_le^-1
+local utf_32_be_getbom=patterns.utfbom_32_be^-1
+local utf_32_le_getbom=patterns.utfbom_32_le^-1
+local utf_16_be_linesplitter=utf_16_be_getbom*lpeg.tsplitat(patterns.utf_16_be_nl)
+local utf_16_le_linesplitter=utf_16_le_getbom*lpeg.tsplitat(patterns.utf_16_le_nl)
+local utf_32_be_linesplitter=utf_32_be_getbom*lpeg.tsplitat(patterns.utf_32_be_nl)
+local utf_32_le_linesplitter=utf_32_le_getbom*lpeg.tsplitat(patterns.utf_32_le_nl)
+local more=0
+local p_utf16_to_utf8_be=C(1)*C(1)/function(left,right)
+ local now=256*byte(left)+byte(right)
+ if more>0 then
+ now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
+ more=0
+ return utfchar(now)
+ elseif now>=0xD800 and now<=0xDBFF then
+ more=now
+ return ""
+ else
+ return utfchar(now)
+ end
+end
+local p_utf16_to_utf8_le=C(1)*C(1)/function(right,left)
+ local now=256*byte(left)+byte(right)
+ if more>0 then
+ now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
+ more=0
+ return utfchar(now)
+ elseif now>=0xD800 and now<=0xDBFF then
+ more=now
+ return ""
+ else
+ return utfchar(now)
+ end
+end
+local p_utf32_to_utf8_be=C(1)*C(1)*C(1)*C(1)/function(a,b,c,d)
+ return utfchar(256*256*256*byte(a)+256*256*byte(b)+256*byte(c)+byte(d))
+end
+local p_utf32_to_utf8_le=C(1)*C(1)*C(1)*C(1)/function(a,b,c,d)
+ return utfchar(256*256*256*byte(d)+256*256*byte(c)+256*byte(b)+byte(a))
+end
+p_utf16_to_utf8_be=P(true)/function() more=0 end*utf_16_be_getbom*Cs(p_utf16_to_utf8_be^0)
+p_utf16_to_utf8_le=P(true)/function() more=0 end*utf_16_le_getbom*Cs(p_utf16_to_utf8_le^0)
+p_utf32_to_utf8_be=P(true)/function() more=0 end*utf_32_be_getbom*Cs(p_utf32_to_utf8_be^0)
+p_utf32_to_utf8_le=P(true)/function() more=0 end*utf_32_le_getbom*Cs(p_utf32_to_utf8_le^0)
+patterns.utf16_to_utf8_be=p_utf16_to_utf8_be
+patterns.utf16_to_utf8_le=p_utf16_to_utf8_le
+patterns.utf32_to_utf8_be=p_utf32_to_utf8_be
+patterns.utf32_to_utf8_le=p_utf32_to_utf8_le
+utf16_to_utf8_be=function(s)
+ if s and s~="" then
+ return lpegmatch(p_utf16_to_utf8_be,s)
+ else
+ return s
end
- utf16_to_utf8_le=function(t)
- if type(t)=="string" then
- t=lpegmatch(utf_16_le_linesplitter,t)
- end
- local result={}
- for i=1,#t do
- local r,more=0,0
- for left,right in bytepairs(t[i]) do
- if right then
- local now=256*right+left
- if more>0 then
- now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
- more=0
- r=r+1
- result[r]=utfchar(now)
- elseif now>=0xD800 and now<=0xDBFF then
- more=now
- else
- r=r+1
- result[r]=utfchar(now)
- end
- end
- end
- t[i]=concat(result,"",1,r)
- end
- return t
+end
+local utf16_to_utf8_be_t=function(t)
+ if not t then
+ return nil
+ elseif type(t)=="string" then
+ t=lpegmatch(utf_16_be_linesplitter,t)
end
- utf32_to_utf8_be=function(t)
- if type(t)=="string" then
- t=lpegmatch(utflinesplitter,t)
- end
- local result={}
- for i=1,#t do
- local r,more=0,-1
- for a,b in bytepairs(t[i]) do
- if a and b then
- if more<0 then
- more=256*256*256*a+256*256*b
- else
- r=r+1
- result[t]=utfchar(more+256*a+b)
- more=-1
- end
- else
- break
- end
- end
- t[i]=concat(result,"",1,r)
+ for i=1,#t do
+ local s=t[i]
+ if s~="" then
+ t[i]=lpegmatch(p_utf16_to_utf8_be,s)
end
- return t
end
- utf32_to_utf8_le=function(t)
- if type(t)=="string" then
- t=lpegmatch(utflinesplitter,t)
- end
- local result={}
- for i=1,#t do
- local r,more=0,-1
- for a,b in bytepairs(t[i]) do
- if a and b then
- if more<0 then
- more=256*b+a
- else
- r=r+1
- result[t]=utfchar(more+256*256*256*b+256*256*a)
- more=-1
- end
- else
- break
- end
- end
- t[i]=concat(result,"",1,r)
- end
- return t
+ return t
+end
+utf16_to_utf8_le=function(s)
+ if s and s~="" then
+ return lpegmatch(p_utf16_to_utf8_le,s)
+ else
+ return s
end
-else
- utf16_to_utf8_be=function(t)
- if type(t)=="string" then
- t=lpegmatch(utf_16_be_linesplitter,t)
- end
- local result={}
- for i=1,#t do
- local r,more=0,0
- for left,right in gmatch(t[i],"(.)(.)") do
- if left=="\000" then
- r=r+1
- result[r]=utfchar(byte(right))
- elseif right then
- local now=256*byte(left)+byte(right)
- if more>0 then
- now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
- more=0
- r=r+1
- result[r]=utfchar(now)
- elseif now>=0xD800 and now<=0xDBFF then
- more=now
- else
- r=r+1
- result[r]=utfchar(now)
- end
- end
- end
- t[i]=concat(result,"",1,r)
+end
+local utf16_to_utf8_le_t=function(t)
+ if not t then
+ return nil
+ elseif type(t)=="string" then
+ t=lpegmatch(utf_16_le_linesplitter,t)
+ end
+ for i=1,#t do
+ local s=t[i]
+ if s~="" then
+ t[i]=lpegmatch(p_utf16_to_utf8_le,s)
end
- return t
end
- utf16_to_utf8_le=function(t)
- if type(t)=="string" then
- t=lpegmatch(utf_16_le_linesplitter,t)
+ return t
+end
+utf32_to_utf8_be=function(s)
+ if s and s~="" then
+ return lpegmatch(p_utf32_to_utf8_be,s)
+ else
+ return s
+ end
+end
+local utf32_to_utf8_be_t=function(t)
+ if not t then
+ return nil
+ elseif type(t)=="string" then
+ t=lpegmatch(utf_32_be_linesplitter,t)
+ end
+ for i=1,#t do
+ local s=t[i]
+ if s~="" then
+ t[i]=lpegmatch(p_utf32_to_utf8_be,s)
end
- local result={}
- for i=1,#t do
- local r,more=0,0
- for left,right in gmatch(t[i],"(.)(.)") do
- if right=="\000" then
- r=r+1
- result[r]=utfchar(byte(left))
- elseif right then
- local now=256*byte(right)+byte(left)
- if more>0 then
- now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
- more=0
- r=r+1
- result[r]=utfchar(now)
- elseif now>=0xD800 and now<=0xDBFF then
- more=now
- else
- r=r+1
- result[r]=utfchar(now)
- end
- end
- end
- t[i]=concat(result,"",1,r)
+ end
+ return t
+end
+utf32_to_utf8_le=function(s)
+ if s and s~="" then
+ return lpegmatch(p_utf32_to_utf8_le,s)
+ else
+ return s
+ end
+end
+local utf32_to_utf8_le_t=function(t)
+ if not t then
+ return nil
+ elseif type(t)=="string" then
+ t=lpegmatch(utf_32_le_linesplitter,t)
+ end
+ for i=1,#t do
+ local s=t[i]
+ if s~="" then
+ t[i]=lpegmatch(p_utf32_to_utf8_le,s)
end
- return t
end
- utf32_to_utf8_le=function() return {} end
- utf32_to_utf8_be=function() return {} end
+ return t
end
+utf.utf16_to_utf8_le_t=utf16_to_utf8_le_t
+utf.utf16_to_utf8_be_t=utf16_to_utf8_be_t
+utf.utf32_to_utf8_le_t=utf32_to_utf8_le_t
+utf.utf32_to_utf8_be_t=utf32_to_utf8_be_t
utf.utf16_to_utf8_le=utf16_to_utf8_le
utf.utf16_to_utf8_be=utf16_to_utf8_be
utf.utf32_to_utf8_le=utf32_to_utf8_le
utf.utf32_to_utf8_be=utf32_to_utf8_be
-function utf.utf8_to_utf8(t)
+function utf.utf8_to_utf8_t(t)
return type(t)=="string" and lpegmatch(utflinesplitter,t) or t
end
-function utf.utf16_to_utf8(t,endian)
- return endian and utf16_to_utf8_be(t) or utf16_to_utf8_le(t) or t
+function utf.utf16_to_utf8_t(t,endian)
+ return endian and utf16_to_utf8_be_t(t) or utf16_to_utf8_le_t(t) or t
end
-function utf.utf32_to_utf8(t,endian)
- return endian and utf32_to_utf8_be(t) or utf32_to_utf8_le(t) or t
+function utf.utf32_to_utf8_t(t,endian)
+ return endian and utf32_to_utf8_be_t(t) or utf32_to_utf8_le_t(t) or t
end
-local function little(c)
- local b=byte(c)
+local function little(b)
if b<0x10000 then
return char(b%256,b/256)
else
@@ -4631,8 +5022,7 @@ local function little(c)
return char(b1%256,b1/256,b2%256,b2/256)
end
end
-local function big(c)
- local b=byte(c)
+local function big(b)
if b<0x10000 then
return char(b/256,b%256)
else
@@ -4641,27 +5031,29 @@ local function big(c)
return char(b1/256,b1%256,b2/256,b2%256)
end
end
-local _,l_remap=utf.remapper(little)
-local _,b_remap=utf.remapper(big)
-function utf.utf8_to_utf16_be(str,nobom)
+local l_remap=Cs((p_utf8byte/little+P(1)/"")^0)
+local b_remap=Cs((p_utf8byte/big+P(1)/"")^0)
+local function utf8_to_utf16_be(str,nobom)
if nobom then
return lpegmatch(b_remap,str)
else
return char(254,255)..lpegmatch(b_remap,str)
end
end
-function utf.utf8_to_utf16_le(str,nobom)
+local function utf8_to_utf16_le(str,nobom)
if nobom then
return lpegmatch(l_remap,str)
else
return char(255,254)..lpegmatch(l_remap,str)
end
end
+utf.utf8_to_utf16_be=utf8_to_utf16_be
+utf.utf8_to_utf16_le=utf8_to_utf16_le
function utf.utf8_to_utf16(str,littleendian,nobom)
if littleendian then
- return utf.utf8_to_utf16_le(str,nobom)
+ return utf8_to_utf16_le(str,nobom)
else
- return utf.utf8_to_utf16_be(str,nobom)
+ return utf8_to_utf16_be(str,nobom)
end
end
local pattern=Cs (
@@ -4677,16 +5069,16 @@ function utf.xstring(s)
return format("0x%05X",type(s)=="number" and s or utfbyte(s))
end
function utf.toeight(str)
- if not str then
+ if not str or str=="" then
return nil
end
local utftype=lpegmatch(p_utfstricttype,str)
if utftype=="utf-8" then
- return sub(str,4)
- elseif utftype=="utf-16-le" then
- return utf16_to_utf8_le(str)
+ return sub(str,4)
elseif utftype=="utf-16-be" then
- return utf16_to_utf8_ne(str)
+ return utf16_to_utf8_be(str)
+ elseif utftype=="utf-16-le" then
+ return utf16_to_utf8_le(str)
else
return str
end
@@ -4765,7 +5157,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-math"] = package.loaded["l-math"] or true
--- original size: 915, stripped down to: 836
+-- original size: 974, stripped down to: 890
if not modules then modules={} end modules ['l-math']={
version=1.001,
@@ -4775,6 +5167,9 @@ if not modules then modules={} end modules ['l-math']={
license="see context related readme files"
}
local floor,sin,cos,tan=math.floor,math.sin,math.cos,math.tan
+if not math.ceiling then
+ math.ceiling=math.ceil
+end
if not math.round then
function math.round(x) return floor(x+0.5) end
end
@@ -4802,7 +5197,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-str"] = package.loaded["util-str"] or true
--- original size: 26857, stripped down to: 15062
+-- original size: 34503, stripped down to: 18933
if not modules then modules={} end modules ['util-str']={
version=1.001,
@@ -4821,25 +5216,43 @@ local unpack,concat=table.unpack,table.concat
local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc
local patterns,lpegmatch=lpeg.patterns,lpeg.match
local utfchar,utfbyte=utf.char,utf.byte
-local loadstripped=_LUAVERSION<5.2 and load or function(str)
- return load(dump(load(str),true))
+local loadstripped=nil
+if _LUAVERSION<5.2 then
+ loadstripped=function(str,shortcuts)
+ return load(str)
+ end
+else
+ loadstripped=function(str,shortcuts)
+ if shortcuts then
+ return load(dump(load(str),true),nil,nil,shortcuts)
+ else
+ return load(dump(load(str),true))
+ end
+ end
end
if not number then number={} end
local stripper=patterns.stripzeros
+local newline=patterns.newline
+local endofstring=patterns.endofstring
+local whitespace=patterns.whitespace
+local spacer=patterns.spacer
+local spaceortab=patterns.spaceortab
local function points(n)
+ n=tonumber(n)
return (not n or n==0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536))
end
local function basepoints(n)
+ n=tonumber(n)
return (not n or n==0) and "0bp" or lpegmatch(stripper,format("%.5fbp",n*(7200/7227)/65536))
end
number.points=points
number.basepoints=basepoints
-local rubish=patterns.spaceortab^0*patterns.newline
-local anyrubish=patterns.spaceortab+patterns.newline
+local rubish=spaceortab^0*newline
+local anyrubish=spaceortab+newline
local anything=patterns.anything
-local stripped=(patterns.spaceortab^1/"")*patterns.newline
+local stripped=(spaceortab^1/"")*newline
local leading=rubish^0/""
-local trailing=(anyrubish^1*patterns.endofstring)/""
+local trailing=(anyrubish^1*endofstring)/""
local redundant=rubish^3/"\n"
local pattern=Cs(leading*(trailing+redundant+stripped+anything)^0)
function strings.collapsecrlf(str)
@@ -4885,18 +5298,44 @@ local pattern=Carg(1)/function(t)
else
return ""
end
- end+patterns.newline*Cp()/function(position)
+ end+newline*Cp()/function(position)
extra,start=0,position
end+patterns.anything
)^1)
function strings.tabtospace(str,tab)
return lpegmatch(pattern,str,1,tab or 7)
end
-function strings.striplong(str)
- str=gsub(str,"^%s*","")
- str=gsub(str,"[\n\r]+ *","\n")
- return str
+local space=spacer^0
+local nospace=space/""
+local endofline=nospace*newline
+local stripend=(whitespace^1*endofstring)/""
+local normalline=(nospace*((1-space*(newline+endofstring))^1)*nospace)
+local stripempty=endofline^1/""
+local normalempty=endofline^1
+local singleempty=endofline*(endofline^0/"")
+local doubleempty=endofline*endofline^-1*(endofline^0/"")
+local stripstart=stripempty^0
+local p_prune_normal=Cs (stripstart*(stripend+normalline+normalempty )^0 )
+local p_prune_collapse=Cs (stripstart*(stripend+normalline+doubleempty )^0 )
+local p_prune_noempty=Cs (stripstart*(stripend+normalline+singleempty )^0 )
+local p_retain_normal=Cs ((normalline+normalempty )^0 )
+local p_retain_collapse=Cs ((normalline+doubleempty )^0 )
+local p_retain_noempty=Cs ((normalline+singleempty )^0 )
+local striplinepatterns={
+ ["prune"]=p_prune_normal,
+ ["prune and collapse"]=p_prune_collapse,
+ ["prune and no empty"]=p_prune_noempty,
+ ["retain"]=p_retain_normal,
+ ["retain and collapse"]=p_retain_collapse,
+ ["retain and no empty"]=p_retain_noempty,
+ ["collapse"]=patterns.collapser,
+}
+setmetatable(striplinepatterns,{ __index=function(t,k) return p_prune_collapse end })
+strings.striplinepatterns=striplinepatterns
+function strings.striplines(str,how)
+ return str and lpegmatch(striplinepatterns[how],str) or str
end
+strings.striplong=strings.striplines
function strings.nice(str)
str=gsub(str,"[:%-+_]+"," ")
return str
@@ -4934,10 +5373,10 @@ string.tracedchars=tracedchars
strings.tracers=tracedchars
function string.tracedchar(b)
if type(b)=="number" then
- return tracedchars[b] or (utfchar(b).." (U+"..format('%05X',b)..")")
+ return tracedchars[b] or (utfchar(b).." (U+"..format("%05X",b)..")")
else
local c=utfbyte(b)
- return tracedchars[c] or (b.." (U+"..format('%05X',c)..")")
+ return tracedchars[c] or (b.." (U+"..(c and format("%05X",c) or "?????")..")")
end
end
function number.signed(i)
@@ -4972,31 +5411,58 @@ function number.sparseexponent(f,n)
end
return tostring(n)
end
-local preamble=[[
-local type = type
-local tostring = tostring
-local tonumber = tonumber
-local format = string.format
-local concat = table.concat
-local signed = number.signed
-local points = number.points
-local basepoints = number.basepoints
-local utfchar = utf.char
-local utfbyte = utf.byte
-local lpegmatch = lpeg.match
-local nspaces = string.nspaces
-local tracedchar = string.tracedchar
-local autosingle = string.autosingle
-local autodouble = string.autodouble
-local sequenced = table.sequenced
-local formattednumber = number.formatted
-local sparseexponent = number.sparseexponent
-]]
local template=[[
%s
%s
return function(%s) return %s end
]]
+local preamble,environment="",{}
+if _LUAVERSION<5.2 then
+ preamble=[[
+local lpeg=lpeg
+local type=type
+local tostring=tostring
+local tonumber=tonumber
+local format=string.format
+local concat=table.concat
+local signed=number.signed
+local points=number.points
+local basepoints= number.basepoints
+local utfchar=utf.char
+local utfbyte=utf.byte
+local lpegmatch=lpeg.match
+local nspaces=string.nspaces
+local tracedchar=string.tracedchar
+local autosingle=string.autosingle
+local autodouble=string.autodouble
+local sequenced=table.sequenced
+local formattednumber=number.formatted
+local sparseexponent=number.sparseexponent
+ ]]
+else
+ environment={
+ global=global or _G,
+ lpeg=lpeg,
+ type=type,
+ tostring=tostring,
+ tonumber=tonumber,
+ format=string.format,
+ concat=table.concat,
+ signed=number.signed,
+ points=number.points,
+ basepoints=number.basepoints,
+ utfchar=utf.char,
+ utfbyte=utf.byte,
+ lpegmatch=lpeg.match,
+ nspaces=string.nspaces,
+ tracedchar=string.tracedchar,
+ autosingle=string.autosingle,
+ autodouble=string.autodouble,
+ sequenced=table.sequenced,
+ formattednumber=number.formatted,
+ sparseexponent=number.sparseexponent,
+ }
+end
local arguments={ "a1" }
setmetatable(arguments,{ __index=function(t,k)
local v=t[k-1]..",a"..k
@@ -5035,7 +5501,7 @@ local format_i=function(f)
if f and f~="" then
return format("format('%%%si',a%s)",f,n)
else
- return format("format('%%i',a%s)",n)
+ return format("format('%%i',a%s)",n)
end
end
local format_d=format_i
@@ -5047,6 +5513,14 @@ local format_f=function(f)
n=n+1
return format("format('%%%sf',a%s)",f,n)
end
+local format_F=function(f)
+ n=n+1
+ if not f or f=="" then
+ return format("(((a%s > -0.0000000005 and a%s < 0.0000000005) and '0') or format((a%s %% 1 == 0) and '%%i' or '%%.9f',a%s))",n,n,n,n)
+ else
+ return format("format((a%s %% 1 == 0) and '%%i' or '%%%sf',a%s)",n,f,n)
+ end
+end
local format_g=function(f)
n=n+1
return format("format('%%%sg',a%s)",f,n)
@@ -5261,7 +5735,7 @@ local builder=Cs { "start",
(
P("%")/""*(
V("!")
-+V("s")+V("q")+V("i")+V("d")+V("f")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o")
++V("s")+V("q")+V("i")+V("d")+V("f")+V("F")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o")
+V("c")+V("C")+V("S")
+V("Q")
+V("N")
@@ -5272,7 +5746,6 @@ local builder=Cs { "start",
+V("j")+V("J")
+V("m")+V("M")
+V("z")
-+V("*")
)+V("*")
)*(P(-1)+Carg(1))
)^0,
@@ -5281,6 +5754,7 @@ local builder=Cs { "start",
["i"]=(prefix_any*P("i"))/format_i,
["d"]=(prefix_any*P("d"))/format_d,
["f"]=(prefix_any*P("f"))/format_f,
+ ["F"]=(prefix_any*P("F"))/format_F,
["g"]=(prefix_any*P("g"))/format_g,
["G"]=(prefix_any*P("G"))/format_G,
["e"]=(prefix_any*P("e"))/format_e,
@@ -5315,11 +5789,12 @@ local builder=Cs { "start",
["a"]=(prefix_any*P("a"))/format_a,
["A"]=(prefix_any*P("A"))/format_A,
["*"]=Cs(((1-P("%"))^1+P("%%")/"%%")^1)/format_rest,
+ ["?"]=Cs(((1-P("%"))^1 )^1)/format_rest,
["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension,
}
local direct=Cs (
- P("%")/""*Cc([[local format = string.format return function(str) return format("%]])*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*Cc([[",str) end]])*P(-1)
- )
+ P("%")*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*P(-1)/[[local format = string.format return function(str) return format("%0",str) end]]
+)
local function make(t,str)
local f
local p
@@ -5328,10 +5803,10 @@ local function make(t,str)
f=loadstripped(p)()
else
n=0
- p=lpegmatch(builder,str,1,"..",t._extensions_)
+ p=lpegmatch(builder,str,1,t._connector_,t._extensions_)
if n>0 then
p=format(template,preamble,t._preamble_,arguments[n],p)
- f=loadstripped(p)()
+ f=loadstripped(p,t._environment_)()
else
f=function() return str end
end
@@ -5343,10 +5818,22 @@ local function use(t,fmt,...)
return t[fmt](...)
end
strings.formatters={}
-function strings.formatters.new()
- local t={ _extensions_={},_preamble_="",_type_="formatter" }
- setmetatable(t,{ __index=make,__call=use })
- return t
+if _LUAVERSION<5.2 then
+ function strings.formatters.new(noconcat)
+ local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_=preamble,_environment_={} }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
+ end
+else
+ function strings.formatters.new(noconcat)
+ local e={}
+ for k,v in next,environment do
+ e[k]=v
+ end
+ local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_="",_environment_=e }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
+ end
end
local formatters=strings.formatters.new()
string.formatters=formatters
@@ -5354,8 +5841,12 @@ string.formatter=function(str,...) return formatters[str](...) end
local function add(t,name,template,preamble)
if type(t)=="table" and t._type_=="formatter" then
t._extensions_[name]=template or "%s"
- if preamble then
+ if type(preamble)=="string" then
t._preamble_=preamble.."\n"..t._preamble_
+ elseif type(preamble)=="table" then
+ for k,v in next,preamble do
+ t._environment_[k]=v
+ end
end
end
end
@@ -5364,9 +5855,28 @@ patterns.xmlescape=Cs((P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"+P('"')/"&quot;
patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0)
patterns.luaescape=Cs(((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0)
patterns.luaquoted=Cs(Cc('"')*((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0*Cc('"'))
-add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
-add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
-add(formatters,"lua",[[lpegmatch(luaescape,%s)]],[[local luaescape = lpeg.patterns.luaescape]])
+if _LUAVERSION<5.2 then
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape")
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape")
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape")
+else
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape=lpeg.patterns.xmlescape })
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape=lpeg.patterns.texescape })
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape=lpeg.patterns.luaescape })
+end
+local dquote=patterns.dquote
+local equote=patterns.escaped+dquote/'\\"'+1
+local space=patterns.space
+local cquote=Cc('"')
+local pattern=Cs(dquote*(equote-P(-2))^0*dquote)
++Cs(cquote*(equote-space)^0*space*equote^0*cquote)
+function string.optionalquoted(str)
+ return lpegmatch(pattern,str) or str
+end
+local pattern=Cs((newline/os.newline+1)^0)
+function string.replacenewlines(str)
+ return lpegmatch(pattern,str)
+end
end -- of closure
@@ -5375,7 +5885,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-tab"] = package.loaded["util-tab"] or true
--- original size: 23952, stripped down to: 16092
+-- original size: 25338, stripped down to: 16247
if not modules then modules={} end modules ['util-tab']={
version=1.001,
@@ -5388,7 +5898,7 @@ utilities=utilities or {}
utilities.tables=utilities.tables or {}
local tables=utilities.tables
local format,gmatch,gsub,sub=string.format,string.gmatch,string.gsub,string.sub
-local concat,insert,remove=table.concat,table.insert,table.remove
+local concat,insert,remove,sort=table.concat,table.insert,table.remove,table.sort
local setmetatable,getmetatable,tonumber,tostring=setmetatable,getmetatable,tonumber,tostring
local type,next,rawset,tonumber,tostring,load,select=type,next,rawset,tonumber,tostring,load,select
local lpegmatch,P,Cs,Cc=lpeg.match,lpeg.P,lpeg.Cs,lpeg.Cc
@@ -5396,27 +5906,29 @@ local sortedkeys,sortedpairs=table.sortedkeys,table.sortedpairs
local formatters=string.formatters
local utftoeight=utf.toeight
local splitter=lpeg.tsplitat(".")
-function tables.definetable(target,nofirst,nolast)
- local composed,shortcut,t=nil,nil,{}
+function utilities.tables.definetable(target,nofirst,nolast)
+ local composed,t=nil,{}
local snippets=lpegmatch(splitter,target)
for i=1,#snippets-(nolast and 1 or 0) do
local name=snippets[i]
if composed then
- composed=shortcut.."."..name
- shortcut=shortcut.."_"..name
- t[#t+1]=formatters["local %s = %s if not %s then %s = { } %s = %s end"](shortcut,composed,shortcut,shortcut,composed,shortcut)
+ composed=composed.."."..name
+ t[#t+1]=formatters["if not %s then %s = { } end"](composed,composed)
else
composed=name
- shortcut=name
if not nofirst then
t[#t+1]=formatters["%s = %s or { }"](composed,composed)
end
end
end
- if nolast then
- composed=shortcut.."."..snippets[#snippets]
+ if composed then
+ if nolast then
+ composed=composed.."."..snippets[#snippets]
+ end
+ return concat(t,"\n"),composed
+ else
+ return "",target
end
- return concat(t,"\n"),composed
end
function tables.definedtable(...)
local t=_G
@@ -5443,7 +5955,7 @@ function tables.accesstable(target,root)
end
function tables.migratetable(target,v,root)
local t=root or _G
- local names=string.split(target,".")
+ local names=lpegmatch(splitter,target)
for i=1,#names-1 do
local name=names[i]
t[name]=t[name] or {}
@@ -5463,6 +5975,15 @@ function tables.removevalue(t,value)
end
end
end
+function tables.replacevalue(t,oldvalue,newvalue)
+ if oldvalue and newvalue then
+ for i=1,#t do
+ if t[i]==oldvalue then
+ t[i]=newvalue
+ end
+ end
+ end
+end
function tables.insertbeforevalue(t,value,extra)
for i=1,#t do
if t[i]==extra then
@@ -5610,7 +6131,7 @@ local f_ordered_string=formatters["%q,"]
local f_ordered_number=formatters["%s,"]
local f_ordered_boolean=formatters["%l,"]
function table.fastserialize(t,prefix)
- local r={ prefix or "return" }
+ local r={ type(prefix)=="string" and prefix or "return" }
local m=1
local function fastserialize(t,outer)
local n=#t
@@ -5807,7 +6328,8 @@ function table.serialize(root,name,specification)
local t
local n=1
local function simple_table(t)
- if #t>0 then
+ local nt=#t
+ if nt>0 then
local n=0
for _,v in next,t do
n=n+1
@@ -5815,19 +6337,17 @@ function table.serialize(root,name,specification)
return nil
end
end
- if n==#t then
+ if n==nt then
local tt={}
- local nt=0
- for i=1,#t do
+ for i=1,nt do
local v=t[i]
local tv=type(v)
- nt=nt+1
if tv=="number" then
- tt[nt]=v
+ tt[i]=v
elseif tv=="string" then
- tt[nt]=format("%q",v)
+ tt[i]=format("%q",v)
elseif tv=="boolean" then
- tt[nt]=v and "true" or "false"
+ tt[i]=v and "true" or "false"
else
return nil
end
@@ -5856,7 +6376,7 @@ function table.serialize(root,name,specification)
end
depth=depth+1
end
- if root and next(root) then
+ if root and next(root)~=nil then
local first=nil
local last=0
last=#root
@@ -5875,13 +6395,13 @@ function table.serialize(root,name,specification)
local v=root[k]
local tv=type(v)
local tk=type(k)
- if first and tk=="number" and k>=first and k<=last then
+ if first and tk=="number" and k<=last and k>=first then
if tv=="number" then
n=n+1 t[n]=f_val_num(depth,v)
elseif tv=="string" then
n=n+1 t[n]=f_val_str(depth,v)
elseif tv=="table" then
- if not next(v) then
+ if next(v)==nil then
n=n+1 t[n]=f_val_not(depth)
else
local st=simple_table(v)
@@ -5911,13 +6431,13 @@ function table.serialize(root,name,specification)
n=n+1 t[n]=f_key_boo_value_str(depth,k,v)
end
elseif tv=="table" then
- if not next(v) then
+ if next(v)==nil then
if tk=="number" then
- n=n+1 t[n]=f_key_num_value_not(depth,k,v)
+ n=n+1 t[n]=f_key_num_value_not(depth,k)
elseif tk=="string" then
- n=n+1 t[n]=f_key_str_value_not(depth,k,v)
+ n=n+1 t[n]=f_key_str_value_not(depth,k)
elseif tk=="boolean" then
- n=n+1 t[n]=f_key_boo_value_not(depth,k,v)
+ n=n+1 t[n]=f_key_boo_value_not(depth,k)
end
else
local st=simple_table(v)
@@ -5969,7 +6489,7 @@ function table.serialize(root,name,specification)
local dummy=root._w_h_a_t_e_v_e_r_
root._w_h_a_t_e_v_e_r_=nil
end
- if next(root) then
+ if next(root)~=nil then
do_serialize(root,name,1,0)
end
end
@@ -6132,7 +6652,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-prs"] = package.loaded["util-prs"] or true
--- original size: 19604, stripped down to: 13998
+-- original size: 21780, stripped down to: 15121
if not modules then modules={} end modules ['util-prs']={
version=1.001,
@@ -6154,6 +6674,8 @@ local patterns=parsers.patterns or {}
parsers.patterns=patterns
local setmetatableindex=table.setmetatableindex
local sortedhash=table.sortedhash
+local sortedkeys=table.sortedkeys
+local tohash=table.tohash
local digit=R("09")
local space=P(' ')
local equal=P("=")
@@ -6203,9 +6725,7 @@ patterns.settings_to_hash_a=pattern_a_s
patterns.settings_to_hash_b=pattern_b_s
patterns.settings_to_hash_c=pattern_c_s
function parsers.make_settings_to_hash_pattern(set,how)
- if type(str)=="table" then
- return set
- elseif how=="strict" then
+ if how=="strict" then
return (pattern_c/set)^1
elseif how=="tolerant" then
return (pattern_b/set)^1
@@ -6214,7 +6734,9 @@ function parsers.make_settings_to_hash_pattern(set,how)
end
end
function parsers.settings_to_hash(str,existing)
- if type(str)=="table" then
+ if not str or str=="" then
+ return {}
+ elseif type(str)=="table" then
if existing then
for k,v in next,str do
existing[k]=v
@@ -6223,16 +6745,16 @@ function parsers.settings_to_hash(str,existing)
else
return str
end
- elseif str and str~="" then
+ else
hash=existing or {}
lpegmatch(pattern_a_s,str)
return hash
- else
- return {}
end
end
function parsers.settings_to_hash_tolerant(str,existing)
- if type(str)=="table" then
+ if not str or str=="" then
+ return {}
+ elseif type(str)=="table" then
if existing then
for k,v in next,str do
existing[k]=v
@@ -6241,16 +6763,16 @@ function parsers.settings_to_hash_tolerant(str,existing)
else
return str
end
- elseif str and str~="" then
+ else
hash=existing or {}
lpegmatch(pattern_b_s,str)
return hash
- else
- return {}
end
end
function parsers.settings_to_hash_strict(str,existing)
- if type(str)=="table" then
+ if not str or str=="" then
+ return nil
+ elseif type(str)=="table" then
if existing then
for k,v in next,str do
existing[k]=v
@@ -6263,8 +6785,6 @@ function parsers.settings_to_hash_strict(str,existing)
hash=existing or {}
lpegmatch(pattern_c_s,str)
return next(hash) and hash
- else
- return nil
end
end
local separator=comma*space^0
@@ -6272,27 +6792,46 @@ local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C((nestedbraces+(1-comm
local pattern=spaces*Ct(value*(separator*value)^0)
patterns.settings_to_array=pattern
function parsers.settings_to_array(str,strict)
- if type(str)=="table" then
- return str
- elseif not str or str=="" then
+ if not str or str=="" then
return {}
+ elseif type(str)=="table" then
+ return str
elseif strict then
- if find(str,"{") then
+ if find(str,"{",1,true) then
return lpegmatch(pattern,str)
else
return { str }
end
- elseif find(str,",") then
+ elseif find(str,",",1,true) then
return lpegmatch(pattern,str)
else
return { str }
end
end
-local separator=space^0*comma*space^0
-local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C((nestedbraces+(1-(space^0*(comma+P(-1)))))^0)
-local withvalue=Carg(1)*value/function(f,s) return f(s) end
-local pattern_a=spaces*Ct(value*(separator*value)^0)
-local pattern_b=spaces*withvalue*(separator*withvalue)^0
+local cache_a={}
+local cache_b={}
+function parsers.groupedsplitat(symbol,withaction)
+ if not symbol then
+ symbol=","
+ end
+ local pattern=(withaction and cache_b or cache_a)[symbol]
+ if not pattern then
+ local symbols=S(symbol)
+ local separator=space^0*symbols*space^0
+ local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C((nestedbraces+(1-(space^0*(symbols+P(-1)))))^0)
+ if withaction then
+ local withvalue=Carg(1)*value/function(f,s) return f(s) end
+ pattern=spaces*withvalue*(separator*withvalue)^0
+ cache_b[symbol]=pattern
+ else
+ pattern=spaces*Ct(value*(separator*value)^0)
+ cache_a[symbol]=pattern
+ end
+ end
+ return pattern
+end
+local pattern_a=parsers.groupedsplitat(",",false)
+local pattern_b=parsers.groupedsplitat(",",true)
function parsers.stripped_settings_to_array(str)
if not str or str=="" then
return {}
@@ -6317,8 +6856,8 @@ function parsers.add_settings_to_array(t,str)
end
function parsers.hash_to_string(h,separator,yes,no,strict,omit)
if h then
- local t,tn,s={},0,table.sortedkeys(h)
- omit=omit and table.tohash(omit)
+ local t,tn,s={},0,sortedkeys(h)
+ omit=omit and tohash(omit)
for i=1,#s do
local key=s[i]
if not omit or not omit[key] then
@@ -6354,12 +6893,9 @@ function parsers.array_to_string(a,separator)
return ""
end
end
-function parsers.settings_to_set(str,t)
- t=t or {}
- for s in gmatch(str,"[^, ]+") do
- t[s]=true
- end
- return t
+local pattern=Cf(Ct("")*Cg(C((1-S(", "))^1)*S(", ")^0*Cc(true))^1,rawset)
+function utilities.parsers.settings_to_set(str,t)
+ return str and lpegmatch(pattern,str) or {}
end
function parsers.simple_hash_to_string(h,separator)
local t,tn={},0
@@ -6371,12 +6907,16 @@ function parsers.simple_hash_to_string(h,separator)
end
return concat(t,separator or ",")
end
-local str=C((1-whitespace-equal)^1)
+local str=Cs(lpegpatterns.unquoted)+C((1-whitespace-equal)^1)
local setting=Cf(Carg(1)*(whitespace^0*Cg(str*whitespace^0*(equal*whitespace^0*str+Cc(""))))^1,rawset)
local splitter=setting^1
function utilities.parsers.options_to_hash(str,target)
return str and lpegmatch(splitter,str,1,target or {}) or {}
end
+local splitter=lpeg.tsplitat(" ")
+function utilities.parsers.options_to_array(str)
+ return str and lpegmatch(splitter,str) or {}
+end
local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C(digit^1*lparent*(noparent+nestedparents)^1*rparent)+C((nestedbraces+(1-comma))^1)
local pattern_a=spaces*Ct(value*(separator*value)^0)
local function repeater(n,str)
@@ -6463,7 +7003,7 @@ function parsers.keq_to_hash(str)
end
local defaultspecification={ separator=",",quote='"' }
function parsers.csvsplitter(specification)
- specification=specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification
+ specification=specification and setmetatableindex(specification,defaultspecification) or defaultspecification
local separator=specification.separator
local quotechar=specification.quote
local separator=S(separator~="" and separator or ",")
@@ -6487,7 +7027,7 @@ function parsers.csvsplitter(specification)
end
end
function parsers.rfc4180splitter(specification)
- specification=specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification
+ specification=specification and setmetatableindex(specification,defaultspecification) or defaultspecification
local separator=specification.separator
local quotechar=P(specification.quote)
local dquotechar=quotechar*quotechar
@@ -6498,7 +7038,7 @@ function parsers.rfc4180splitter(specification)
local field=escaped+non_escaped+Cc("")
local record=Ct(field*(separator*field)^1)
local headerline=record*Cp()
- local wholeblob=Ct((newline^-1*record)^0)
+ local wholeblob=Ct((newline^(specification.strict and -1 or 1)*record)^0)
return function(data,getheader)
if getheader then
local header,position=lpegmatch(headerline,data)
@@ -6535,20 +7075,20 @@ function parsers.stepper(str,n,action)
lpegmatch(stepper,str,1,n,action or print)
end
end
-local pattern_math=Cs((P("%")/"\\percent "+P("^")*Cc("{")*lpegpatterns.integer*Cc("}")+P(1))^0)
-local pattern_text=Cs((P("%")/"\\percent "+(P("^")/"\\high")*Cc("{")*lpegpatterns.integer*Cc("}")+P(1))^0)
+local pattern_math=Cs((P("%")/"\\percent "+P("^")*Cc("{")*lpegpatterns.integer*Cc("}")+anything)^0)
+local pattern_text=Cs((P("%")/"\\percent "+(P("^")/"\\high")*Cc("{")*lpegpatterns.integer*Cc("}")+anything)^0)
patterns.unittotex=pattern
function parsers.unittotex(str,textmode)
return lpegmatch(textmode and pattern_text or pattern_math,str)
end
-local pattern=Cs((P("^")/"<sup>"*lpegpatterns.integer*Cc("</sup>")+P(1))^0)
+local pattern=Cs((P("^")/"<sup>"*lpegpatterns.integer*Cc("</sup>")+anything)^0)
function parsers.unittoxml(str)
return lpegmatch(pattern,str)
end
local cache={}
-local spaces=lpeg.patterns.space^0
+local spaces=lpegpatterns.space^0
local dummy=function() end
-table.setmetatableindex(cache,function(t,k)
+setmetatableindex(cache,function(t,k)
local separator=P(k)
local value=(1-separator)^0
local pattern=spaces*C(value)*separator^0*Cp()
@@ -6613,6 +7153,18 @@ function utilities.parsers.runtime(time)
local seconds=mod(time,60)
return days,hours,minutes,seconds
end
+local spacing=whitespace^0
+local apply=P("->")
+local method=C((1-apply)^1)
+local token=lbrace*C((1-rbrace)^1)*rbrace+C(anything^1)
+local pattern=spacing*(method*spacing*apply+Carg(1))*spacing*token
+function utilities.parsers.splitmethod(str,default)
+ if str then
+ return lpegmatch(pattern,str,1,default or false)
+ else
+ return default or false,""
+ end
+end
end -- of closure
@@ -6702,7 +7254,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["trac-set"] = package.loaded["trac-set"] or true
--- original size: 12365, stripped down to: 8799
+-- original size: 12482, stripped down to: 8864
if not modules then modules={} end modules ['trac-set']={
version=1.001,
@@ -6730,7 +7282,7 @@ function setters.initialize(filename,name,values)
local data=setter.data
if data then
for key,newvalue in next,values do
- local newvalue=is_boolean(newvalue,newvalue)
+ local newvalue=is_boolean(newvalue,newvalue,true)
local functions=data[key]
if functions then
local oldvalue=functions.value
@@ -6784,7 +7336,7 @@ local function set(t,what,newvalue)
elseif not value then
value=false
else
- value=is_boolean(value,value)
+ value=is_boolean(value,value,true)
end
w=topattern(w,true,true)
for name,functions in next,data do
@@ -6923,6 +7475,7 @@ function setters.new(name)
report=function(...) setters.report (setter,...) end,
enable=function(...) enable (setter,...) end,
disable=function(...) disable (setter,...) end,
+ reset=function(...) reset (setter,...) end,
register=function(...) register(setter,...) end,
list=function(...) list (setter,...) end,
show=function(...) show (setter,...) end,
@@ -7014,7 +7567,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["trac-log"] = package.loaded["trac-log"] or true
--- original size: 25391, stripped down to: 16561
+-- original size: 29359, stripped down to: 20483
if not modules then modules={} end modules ['trac-log']={
version=1.001,
@@ -7023,15 +7576,18 @@ if not modules then modules={} end modules ['trac-log']={
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
+local next,type,select,print=next,type,select,print
local write_nl,write=texio and texio.write_nl or print,texio and texio.write or io.write
local format,gmatch,find=string.format,string.gmatch,string.find
local concat,insert,remove=table.concat,table.insert,table.remove
local topattern=string.topattern
-local next,type,select=next,type,select
local utfchar=utf.char
+local datetime=os.date
+local openfile=io.open
local setmetatableindex=table.setmetatableindex
local formatters=string.formatters
local texgetcount=tex and tex.getcount
+local variant="default"
logs=logs or {}
local logs=logs
local moreinfo=[[
@@ -7041,32 +7597,122 @@ maillist : ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
webpage : http://www.pragma-ade.nl / http://tex.aanhet.net
wiki : http://contextgarden.net
]]
-utilities.strings.formatters.add (
+formatters.add (
formatters,"unichr",
[["U+" .. format("%%05X",%s) .. " (" .. utfchar(%s) .. ")"]]
)
-utilities.strings.formatters.add (
+formatters.add (
formatters,"chruni",
[[utfchar(%s) .. " (U+" .. format("%%05X",%s) .. ")"]]
)
local function ignore() end
setmetatableindex(logs,function(t,k) t[k]=ignore;return ignore end)
local report,subreport,status,settarget,setformats,settranslations
-local direct,subdirect,writer,pushtarget,poptarget,setlogfile,settimedlog,setprocessor,setformatters
+local direct,subdirect,writer,pushtarget,poptarget,setlogfile,settimedlog,setprocessor,setformatters,newline
if tex and (tex.jobname or tex.formatname) then
- local valueiskey={ __index=function(t,k) t[k]=k return k end }
- local target="term and log"
+ local function useluawrites()
+ local texio_write_nl=texio.write_nl
+ local texio_write=texio.write
+ local io_write=io.write
+ write_nl=function(target,...)
+ if not io_write then
+ io_write=io.write
+ end
+ if target=="term and log" then
+ texio_write_nl("log",...)
+ texio_write_nl("term","")
+ io_write(...)
+ elseif target=="log" then
+ texio_write_nl("log",...)
+ elseif target=="term" then
+ texio_write_nl("term","")
+ io_write(...)
+ elseif target~="none" then
+ texio_write_nl("log",target,...)
+ texio_write_nl("term","")
+ io_write(target,...)
+ end
+ end
+ write=function(target,...)
+ if not io_write then
+ io_write=io.write
+ end
+ if target=="term and log" then
+ texio_write("log",...)
+ io_write(...)
+ elseif target=="log" then
+ texio_write("log",...)
+ elseif target=="term" then
+ io_write(...)
+ elseif target~="none" then
+ texio_write("log",target,...)
+ io_write(target,...)
+ end
+ end
+ texio.write=write
+ texio.write_nl=write_nl
+ useluawrites=ignore
+ end
+ local whereto="both"
+ local target=nil
+ local targets=nil
+ local formats=table.setmetatableindex("self")
+ local translations=table.setmetatableindex("self")
+ local report_yes,subreport_yes,direct_yes,subdirect_yes,status_yes
+ local report_nop,subreport_nop,direct_nop,subdirect_nop,status_nop
+ local variants={
+ default={
+ formats={
+ report_yes=formatters["%-15s > %s\n"],
+ report_nop=formatters["%-15s >\n"],
+ direct_yes=formatters["%-15s > %s"],
+ direct_nop=formatters["%-15s >"],
+ subreport_yes=formatters["%-15s > %s > %s\n"],
+ subreport_nop=formatters["%-15s > %s >\n"],
+ subdirect_yes=formatters["%-15s > %s > %s"],
+ subdirect_nop=formatters["%-15s > %s >"],
+ status_yes=formatters["%-15s : %s\n"],
+ status_nop=formatters["%-15s :\n"],
+ },
+ targets={
+ logfile="log",
+ log="log",
+ file="log",
+ console="term",
+ terminal="term",
+ both="term and log",
+ },
+ },
+ ansi={
+ formats={
+ report_yes=formatters["%-15s > %s\n"],
+ report_nop=formatters["%-15s >\n"],
+ direct_yes=formatters["%-15s > %s"],
+ direct_nop=formatters["%-15s >"],
+ subreport_yes=formatters["%-15s > %s > %s\n"],
+ subreport_nop=formatters["%-15s > %s >\n"],
+ subdirect_yes=formatters["%-15s > %s > %s"],
+ subdirect_nop=formatters["%-15s > %s >"],
+ status_yes=formatters["%-15s : %s\n"],
+ status_nop=formatters["%-15s :\n"],
+ },
+ targets={
+ logfile="none",
+ log="none",
+ file="none",
+ console="term",
+ terminal="term",
+ both="term",
+ },
+ }
+ }
logs.flush=io.flush
- local formats={} setmetatable(formats,valueiskey)
- local translations={} setmetatable(translations,valueiskey)
writer=function(...)
write_nl(target,...)
end
newline=function()
write_nl(target,"\n")
end
- local report_yes=formatters["%-15s > %s\n"]
- local report_nop=formatters["%-15s >\n"]
report=function(a,b,c,...)
if c then
write_nl(target,report_yes(translations[a],formatters[formats[b]](c,...)))
@@ -7078,8 +7724,6 @@ if tex and (tex.jobname or tex.formatname) then
write_nl(target,"\n")
end
end
- local direct_yes=formatters["%-15s > %s"]
- local direct_nop=formatters["%-15s >"]
direct=function(a,b,c,...)
if c then
return direct_yes(translations[a],formatters[formats[b]](c,...))
@@ -7091,8 +7735,6 @@ if tex and (tex.jobname or tex.formatname) then
return ""
end
end
- local subreport_yes=formatters["%-15s > %s > %s\n"]
- local subreport_nop=formatters["%-15s > %s >\n"]
subreport=function(a,s,b,c,...)
if c then
write_nl(target,subreport_yes(translations[a],translations[s],formatters[formats[b]](c,...)))
@@ -7104,8 +7746,6 @@ if tex and (tex.jobname or tex.formatname) then
write_nl(target,"\n")
end
end
- local subdirect_yes=formatters["%-15s > %s > %s"]
- local subdirect_nop=formatters["%-15s > %s >"]
subdirect=function(a,s,b,c,...)
if c then
return subdirect_yes(translations[a],translations[s],formatters[formats[b]](c,...))
@@ -7117,8 +7757,6 @@ if tex and (tex.jobname or tex.formatname) then
return ""
end
end
- local status_yes=formatters["%-15s : %s\n"]
- local status_nop=formatters["%-15s :\n"]
status=function(a,b,c,...)
if c then
write_nl(target,status_yes(translations[a],formatters[formats[b]](c,...)))
@@ -7130,16 +7768,13 @@ if tex and (tex.jobname or tex.formatname) then
write_nl(target,"\n")
end
end
- local targets={
- logfile="log",
- log="log",
- file="log",
- console="term",
- terminal="term",
- both="term and log",
- }
- settarget=function(whereto)
- target=targets[whereto or "both"] or targets.both
+ settarget=function(askedwhereto)
+ whereto=askedwhereto or whereto or "both"
+ target=targets[whereto]
+ if not target then
+ whereto="both"
+ target=targets[whereto]
+ end
if target=="term" or target=="term and log" then
logs.flush=io.flush
else
@@ -7168,21 +7803,74 @@ if tex and (tex.jobname or tex.formatname) then
writeline(target,f(...))
end
end
- setformatters=function(f)
- report_yes=f.report_yes or report_yes
- report_nop=f.report_nop or report_nop
- subreport_yes=f.subreport_yes or subreport_yes
- subreport_nop=f.subreport_nop or subreport_nop
- direct_yes=f.direct_yes or direct_yes
- direct_nop=f.direct_nop or direct_nop
- subdirect_yes=f.subdirect_yes or subdirect_yes
- subdirect_nop=f.subdirect_nop or subdirect_nop
- status_yes=f.status_yes or status_yes
- status_nop=f.status_nop or status_nop
- end
+ setformatters=function(specification)
+ local t=nil
+ local f=nil
+ local d=variants.default
+ if not specification then
+ elseif type(specification)=="table" then
+ t=specification.targets
+ f=specification.formats or specification
+ else
+ local v=variants[specification]
+ if v then
+ t=v.targets
+ f=v.formats
+ variant=specification
+ end
+ end
+ targets=t or d.targets
+ target=targets[whereto] or target
+ if f then
+ d=d.formats
+ else
+ f=d.formats
+ d=f
+ end
+ setmetatableindex(f,d)
+ report_yes=f.report_yes
+ report_nop=f.report_nop
+ subreport_yes=f.subreport_yes
+ subreport_nop=f.subreport_nop
+ direct_yes=f.direct_yes
+ direct_nop=f.direct_nop
+ subdirect_yes=f.subdirect_yes
+ subdirect_nop=f.subdirect_nop
+ status_yes=f.status_yes
+ status_nop=f.status_nop
+ if variant=="ansi" then
+ useluawrites()
+ end
+ settarget(whereto)
+ end
+ setformatters(variant)
setlogfile=ignore
settimedlog=ignore
else
+ local report_yes,subreport_yes,status_yes
+ local report_nop,subreport_nop,status_nop
+ local variants={
+ default={
+ formats={
+ report_yes=formatters["%-15s | %s"],
+ report_nop=formatters["%-15s |"],
+ subreport_yes=formatters["%-15s | %s | %s"],
+ subreport_nop=formatters["%-15s | %s |"],
+ status_yes=formatters["%-15s : %s\n"],
+ status_nop=formatters["%-15s :\n"],
+ },
+ },
+ ansi={
+ formats={
+ report_yes=formatters["%-15s | %s"],
+ report_nop=formatters["%-15s |"],
+ subreport_yes=formatters["%-15s | %s | %s"],
+ subreport_nop=formatters["%-15s | %s |"],
+ status_yes=formatters["%-15s : %s\n"],
+ status_nop=formatters["%-15s :\n"],
+ },
+ },
+ }
logs.flush=ignore
writer=function(s)
write_nl(s)
@@ -7190,8 +7878,6 @@ else
newline=function()
write_nl("\n")
end
- local report_yes=formatters["%-15s | %s"]
- local report_nop=formatters["%-15s |"]
report=function(a,b,c,...)
if c then
write_nl(report_yes(a,formatters[b](c,...)))
@@ -7203,8 +7889,6 @@ else
write_nl("")
end
end
- local subreport_yes=formatters["%-15s | %s | %s"]
- local subreport_nop=formatters["%-15s | %s |"]
subreport=function(a,sub,b,c,...)
if c then
write_nl(subreport_yes(a,sub,formatters[b](c,...)))
@@ -7216,8 +7900,6 @@ else
write_nl("")
end
end
- local status_yes=formatters["%-15s : %s\n"]
- local status_nop=formatters["%-15s :\n"]
status=function(a,b,c,...)
if c then
write_nl(status_yes(a,formatters[b](c,...)))
@@ -7242,14 +7924,34 @@ else
writeline(f(s))
end
end
- setformatters=function(f)
- report_yes=f.report_yes or report_yes
- report_nop=f.report_nop or report_nop
- subreport_yes=f.subreport_yes or subreport_yes
- subreport_nop=f.subreport_nop or subreport_nop
- status_yes=f.status_yes or status_yes
- status_nop=f.status_nop or status_nop
- end
+ setformatters=function(specification)
+ local f=nil
+ local d=variants.default
+ if specification then
+ if type(specification)=="table" then
+ f=specification.formats or specification
+ else
+ local v=variants[specification]
+ if v then
+ f=v.formats
+ end
+ end
+ end
+ if f then
+ d=d.formats
+ else
+ f=d.formats
+ d=f
+ end
+ setmetatableindex(f,d)
+ report_yes=f.report_yes
+ report_nop=f.report_nop
+ subreport_yes=f.subreport_yes
+ subreport_nop=f.subreport_nop
+ status_yes=f.status_yes
+ status_nop=f.status_nop
+ end
+ setformatters(variant)
setlogfile=function(name,keepopen)
if name and name~="" then
local localtime=os.localtime
@@ -7368,9 +8070,10 @@ local function setblocked(category,value)
v.state=value
end
else
- states=utilities.parsers.settings_to_hash(category)
+ states=utilities.parsers.settings_to_hash(category,type(states)=="table" and states or nil)
for c,_ in next,states do
- if data[c] then
+ local v=data[c]
+ if v then
v.state=value
else
c=topattern(c,true,true)
@@ -7501,13 +8204,13 @@ end
local simple=logs.reporter("comment")
logs.simple=simple
logs.simpleline=simple
-function logs.setprogram () end
-function logs.extendbanner() end
-function logs.reportlines () end
-function logs.reportbanner() end
-function logs.reportline () end
-function logs.simplelines () end
-function logs.help () end
+logs.setprogram=ignore
+logs.extendbanner=ignore
+logs.reportlines=ignore
+logs.reportbanner=ignore
+logs.reportline=ignore
+logs.simplelines=ignore
+logs.help=ignore
local Carg,C,lpegmatch=lpeg.Carg,lpeg.C,lpeg.match
local p_newline=lpeg.patterns.newline
local linewise=(
@@ -7584,10 +8287,11 @@ function logs.application(t)
end
return t
end
-function logs.system(whereto,process,jobname,category,...)
- local message=formatters["%s %s => %s => %s => %s\r"](os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...))
+local f_syslog=formatters["%s %s => %s => %s => %s\r"]
+function logs.system(whereto,process,jobname,category,fmt,arg,...)
+ local message=f_syslog(datetime("%d/%m/%y %H:%m:%S"),process,jobname,category,arg==nil and fmt or format(fmt,arg,...))
for i=1,10 do
- local f=io.open(whereto,"a")
+ local f=openfile(whereto,"a")
if f then
f:write(message)
f:close()
@@ -7649,7 +8353,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["trac-inf"] = package.loaded["trac-inf"] or true
--- original size: 6501, stripped down to: 5156
+-- original size: 6704, stripped down to: 5343
if not modules then modules={} end modules ['trac-inf']={
version=1.001,
@@ -7659,7 +8363,7 @@ if not modules then modules={} end modules ['trac-inf']={
license="see context related readme files"
}
local type,tonumber,select=type,tonumber,select
-local format,lower=string.format,string.lower
+local format,lower,find=string.format,string.lower,string.find
local concat=table.concat
local clock=os.gettimeofday or os.clock
local setmetatableindex=table.setmetatableindex
@@ -7750,7 +8454,8 @@ function statistics.show()
if statistics.enable then
local register=statistics.register
register("used platform",function()
- return format("%s, type: %s, binary subtree: %s",os.platform or "unknown",os.type or "unknown",environment.texos or "unknown")
+ return format("%s, type: %s, binary subtree: %s",
+ os.platform or "unknown",os.type or "unknown",environment.texos or "unknown")
end)
register("luatex banner",function()
return lower(status.banner)
@@ -7763,14 +8468,23 @@ function statistics.show()
return format("%s direct, %s indirect, %s total",total-indirect,indirect,total)
end)
if jit then
- local status={ jit.status() }
- if status[1] then
- register("luajit status",function()
- return concat(status," ",2)
- end)
- end
- end
- register("current memory usage",statistics.memused)
+ local jitstatus={ jit.status() }
+ if jitstatus[1] then
+ register("luajit options",concat(jitstatus," ",2))
+ end
+ end
+ register("lua properties",function()
+ local list=status.list()
+ local hashchar=tonumber(list.luatex_hashchars)
+ local mask=lua.mask or "ascii"
+ return format("engine: %s, used memory: %s, hash type: %s, hash chars: min(%s,40), symbol mask: %s (%s)",
+ jit and "luajit" or "lua",
+ statistics.memused(),
+ list.luatex_hashtype or "default",
+ hashchar and 2^hashchar or "unknown",
+ mask,
+ mask=="utf" and "τεχ" or "tex")
+ end)
register("runtime",statistics.runtime)
logs.newline()
for i=1,#statusinfo do
@@ -7812,15 +8526,6 @@ function statistics.tracefunction(base,tag,...)
statistics.register(formatters["%s.%s"](tag,name),function() return serialize(stat,"calls") end)
end
end
-commands=commands or {}
-function commands.resettimer(name)
- resettiming(name or "whatever")
- starttiming(name or "whatever")
-end
-function commands.elapsedtime(name)
- stoptiming(name or "whatever")
- context(elapsedtime(name or "whatever"))
-end
end -- of closure
@@ -7829,7 +8534,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["trac-pro"] = package.loaded["trac-pro"] or true
--- original size: 5773, stripped down to: 3453
+-- original size: 5829, stripped down to: 3501
if not modules then modules={} end modules ['trac-pro']={
version=1.001,
@@ -7846,14 +8551,16 @@ local namespaces=namespaces
local registered={}
local function report_index(k,name)
if trace_namespaces then
- report_system("reference to %a in protected namespace %a: %s",k,name,debug.traceback())
+ report_system("reference to %a in protected namespace %a: %s",k,name)
+ debugger.showtraceback(report_system)
else
report_system("reference to %a in protected namespace %a",k,name)
end
end
local function report_newindex(k,name)
if trace_namespaces then
- report_system("assignment to %a in protected namespace %a: %s",k,name,debug.traceback())
+ report_system("assignment to %a in protected namespace %a: %s",k,name)
+ debugger.showtraceback(report_system)
else
report_system("assignment to %a in protected namespace %a",k,name)
end
@@ -8104,7 +8811,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-deb"] = package.loaded["util-deb"] or true
--- original size: 3708, stripped down to: 2568
+-- original size: 3898, stripped down to: 2644
if not modules then modules={} end modules ['util-deb']={
version=1.001,
@@ -8184,20 +8891,22 @@ end
function debugger.disable()
debug.sethook()
end
-function traceback()
- local level=1
+local function showtraceback(rep)
+ local level=2
+ local reporter=rep or report
while true do
- local info=debug.getinfo(level,"Sl")
+ local info=getinfo(level,"Sl")
if not info then
break
elseif info.what=="C" then
- print(format("%3i : C function",level))
+ reporter("%2i : %s",level-1,"C function")
else
- print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ reporter("%2i : %s : %s",level-1,info.short_src,info.currentline)
end
level=level+1
end
end
+debugger.showtraceback=showtraceback
end -- of closure
@@ -8383,7 +9092,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-tpl"] = package.loaded["util-tpl"] or true
--- original size: 6251, stripped down to: 3488
+-- original size: 7100, stripped down to: 3978
if not modules then modules={} end modules ['util-tpl']={
version=1.001,
@@ -8425,7 +9134,7 @@ local sqlescape=lpeg.replacer {
{ "\r\n","\\n" },
{ "\r","\\n" },
}
-local sqlquoted=lpeg.Cs(lpeg.Cc("'")*sqlescape*lpeg.Cc("'"))
+local sqlquoted=Cs(Cc("'")*sqlescape*Cc("'"))
lpegpatterns.sqlescape=sqlescape
lpegpatterns.sqlquoted=sqlquoted
local luaescape=lpegpatterns.luaescape
@@ -8448,12 +9157,24 @@ local quotedescapers={
local luaescaper=escapers.lua
local quotedluaescaper=quotedescapers.lua
local function replacekeyunquoted(s,t,how,recurse)
- local escaper=how and escapers[how] or luaescaper
- return escaper(replacekey(s,t,how,recurse))
+ if how==false then
+ return replacekey(s,t,how,recurse)
+ else
+ local escaper=how and escapers[how] or luaescaper
+ return escaper(replacekey(s,t,how,recurse))
+ end
end
local function replacekeyquoted(s,t,how,recurse)
- local escaper=how and quotedescapers[how] or quotedluaescaper
- return escaper(replacekey(s,t,how,recurse))
+ if how==false then
+ return replacekey(s,t,how,recurse)
+ else
+ local escaper=how and quotedescapers[how] or quotedluaescaper
+ return escaper(replacekey(s,t,how,recurse))
+ end
+end
+local function replaceoptional(l,m,r,t,how,recurse)
+ local v=t[l]
+ return v and v~="" and lpegmatch(replacer,r,1,t,how or "lua",recurse or false) or ""
end
local single=P("%")
local double=P("%%")
@@ -8468,11 +9189,16 @@ local nolquoted=lquoted/''
local norquoted=rquoted/''
local nolquotedq=lquotedq/''
local norquotedq=rquotedq/''
-local key=nosingle*((C((1-nosingle )^1)*Carg(1)*Carg(2)*Carg(3))/replacekey )*nosingle
-local quoted=nolquotedq*((C((1-norquotedq)^1)*Carg(1)*Carg(2)*Carg(3))/replacekeyquoted )*norquotedq
-local unquoted=nolquoted*((C((1-norquoted )^1)*Carg(1)*Carg(2)*Carg(3))/replacekeyunquoted)*norquoted
+local noloptional=P("%?")/''
+local noroptional=P("?%")/''
+local nomoptional=P(":")/''
+local args=Carg(1)*Carg(2)*Carg(3)
+local key=nosingle*((C((1-nosingle )^1)*args)/replacekey )*nosingle
+local quoted=nolquotedq*((C((1-norquotedq )^1)*args)/replacekeyquoted )*norquotedq
+local unquoted=nolquoted*((C((1-norquoted )^1)*args)/replacekeyunquoted)*norquoted
+local optional=noloptional*((C((1-nomoptional)^1)*nomoptional*C((1-noroptional)^1)*args)/replaceoptional)*noroptional
local any=P(1)
- replacer=Cs((unquoted+quoted+escape+key+any)^0)
+ replacer=Cs((unquoted+quoted+escape+optional+key+any)^0)
local function replace(str,mapping,how,recurse)
if mapping and str then
return lpegmatch(replacer,str,1,mapping,how or "lua",recurse or false) or str
@@ -8511,7 +9237,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-env"] = package.loaded["util-env"] or true
--- original size: 8807, stripped down to: 5085
+-- original size: 8022, stripped down to: 5038
if not modules then modules={} end modules ['util-env']={
version=1.001,
@@ -8522,7 +9248,7 @@ if not modules then modules={} end modules ['util-env']={
}
local allocate,mark=utilities.storage.allocate,utilities.storage.mark
local format,sub,match,gsub,find=string.format,string.sub,string.match,string.gsub,string.find
-local unquoted,quoted=string.unquoted,string.quoted
+local unquoted,quoted,optionalquoted=string.unquoted,string.quoted,string.optionalquoted
local concat,insert,remove=table.concat,table.insert,table.remove
environment=environment or {}
local environment=environment
@@ -8635,24 +9361,14 @@ function environment.splitarguments(separator)
return before,after
end
function environment.reconstructcommandline(arg,noquote)
+ local resolveprefix=resolvers.resolve
arg=arg or environment.originalarguments
if noquote and #arg==1 then
- local a=arg[1]
- a=resolvers.resolve(a)
- a=unquoted(a)
- return a
+ return unquoted(resolveprefix and resolveprefix(arg[1]) or arg[1])
elseif #arg>0 then
local result={}
for i=1,#arg do
- local a=arg[i]
- a=resolvers.resolve(a)
- a=unquoted(a)
- a=gsub(a,'"','\\"')
- if find(a," ") then
- result[#result+1]=quoted(a)
- else
- result[#result+1]=a
- end
+ result[i]=optionalquoted(resolveprefix and resolveprefix(arg[i]) or resolveprefix)
end
return concat(result," ")
else
@@ -8708,7 +9424,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["luat-env"] = package.loaded["luat-env"] or true
--- original size: 5930, stripped down to: 4235
+-- original size: 6174, stripped down to: 4141
if not modules then modules={} end modules ['luat-env']={
version=1.001,
@@ -8786,15 +9502,13 @@ function environment.luafilechunk(filename,silent)
filename=file.replacesuffix(filename,"lua")
local fullname=environment.luafile(filename)
if fullname and fullname~="" then
- local data=luautilities.loadedluacode(fullname,strippable,filename)
- if trace_locating then
+ local data=luautilities.loadedluacode(fullname,strippable,filename)
+ if not silent then
report_lua("loading file %a %s",fullname,not data and "failed" or "succeeded")
- elseif not silent then
- texio.write("<",data and "+ " or "- ",fullname,">")
end
return data
else
- if trace_locating then
+ if not silent then
report_lua("unknown file %a",filename)
end
return nil
@@ -8863,7 +9577,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-tab"] = package.loaded["lxml-tab"] or true
--- original size: 42447, stripped down to: 26589
+-- original size: 45683, stripped down to: 27866
if not modules then modules={} end modules ['lxml-tab']={
version=1.001,
@@ -8878,10 +9592,10 @@ if lpeg.setmaxstack then lpeg.setmaxstack(1000) end
xml=xml or {}
local xml=xml
local concat,remove,insert=table.concat,table.remove,table.insert
-local type,next,setmetatable,getmetatable,tonumber=type,next,setmetatable,getmetatable,tonumber
+local type,next,setmetatable,getmetatable,tonumber,rawset=type,next,setmetatable,getmetatable,tonumber,rawset
local lower,find,match,gsub=string.lower,string.find,string.match,string.gsub
local utfchar=utf.char
-local lpegmatch=lpeg.match
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
local P,S,R,C,V,C,Cs=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.C,lpeg.Cs
local formatters=string.formatters
xml.xmlns=xml.xmlns or {}
@@ -8976,8 +9690,10 @@ local function add_end(spacing,namespace,tag)
top=stack[#stack]
if #stack<1 then
errorstr=formatters["unable to close %s %s"](tag,xml.checkerror(top,toclose) or "")
+ report_xml(errorstr)
elseif toclose.tg~=tag then
errorstr=formatters["unable to close %s with %s %s"](toclose.tg,tag,xml.checkerror(top,toclose) or "")
+ report_xml(errorstr)
end
dt=top.dt
dt[#dt+1]=toclose
@@ -8986,10 +9702,29 @@ local function add_end(spacing,namespace,tag)
end
end
local function add_text(text)
+ local n=#dt
if cleanup and #text>0 then
- dt[#dt+1]=cleanup(text)
+ if n>0 then
+ local s=dt[n]
+ if type(s)=="string" then
+ dt[n]=s..cleanup(text)
+ else
+ dt[n+1]=cleanup(text)
+ end
+ else
+ dt[1]=cleanup(text)
+ end
else
- dt[#dt+1]=text
+ if n>0 then
+ local s=dt[n]
+ if type(s)=="string" then
+ dt[n]=s..text
+ else
+ dt[n+1]=text
+ end
+ else
+ dt[1]=text
+ end
end
end
local function add_special(what,spacing,text)
@@ -9021,8 +9756,10 @@ local function attribute_specification_error(str)
end
return str
end
+local badentity="&error;"
+local badentity="&"
xml.placeholders={
- unknown_dec_entity=function(str) return str=="" and "&error;" or formatters["&%s;"](str) end,
+ unknown_dec_entity=function(str) return str=="" and badentity or formatters["&%s;"](str) end,
unknown_hex_entity=function(str) return formatters["&#x%s;"](str) end,
unknown_any_entity=function(str) return formatters["&#x%s;"](str) end,
}
@@ -9043,9 +9780,10 @@ local function fromdec(s)
return formatters["d:%s"](s),true
end
end
-local rest=(1-P(";"))^0
-local many=P(1)^0
-local parsedentity=P("&")*(P("#x")*(rest/fromhex)+P("#")*(rest/fromdec))*P(";")*P(-1)+(P("#x")*(many/fromhex)+P("#")*(many/fromdec))
+local p_rest=(1-P(";"))^0
+local p_many=P(1)^0
+local p_char=lpegpatterns.utf8character
+local parsedentity=P("&")*(P("#x")*(p_rest/fromhex)+P("#")*(p_rest/fromdec))*P(";")*P(-1)+(P("#x")*(p_many/fromhex)+P("#")*(p_many/fromdec))
local predefined_unified={
[38]="&amp;",
[42]="&quot;",
@@ -9071,7 +9809,9 @@ local privates_u={
local privates_p={}
local privates_n={
}
-local escaped=utf.remapper(privates_u)
+local escaped=utf.remapper(privates_u,"dynamic")
+local unprivatized=utf.remapper(privates_p,"dynamic")
+xml.unprivatized=unprivatized
local function unescaped(s)
local p=privates_n[s]
if not p then
@@ -9084,9 +9824,7 @@ local function unescaped(s)
end
return p
end
-local unprivatized=utf.remapper(privates_p)
xml.privatetoken=unescaped
-xml.unprivatized=unprivatized
xml.privatecodes=privates_n
local function handle_hex_entity(str)
local h=hcache[str]
@@ -9181,7 +9919,7 @@ local function handle_any_entity(str)
report_xml("keeping entity &%s;",str)
end
if str=="" then
- a="&error;"
+ a=badentity
else
a="&"..str..";"
end
@@ -9209,7 +9947,7 @@ local function handle_any_entity(str)
if trace_entities then
report_xml("invalid entity &%s;",str)
end
- a="&error;"
+ a=badentity
acache[str]=a
else
if trace_entities then
@@ -9222,8 +9960,14 @@ local function handle_any_entity(str)
return a
end
end
-local function handle_end_entity(chr)
- report_xml("error in entity, %a found instead of %a",chr,";")
+local function handle_end_entity(str)
+ report_xml("error in entity, %a found without ending %a",str,";")
+ return str
+end
+local function handle_crap_error(chr)
+ report_xml("error in parsing, unexpected %a found ",chr)
+ add_text(chr)
+ return chr
end
local space=S(' \r\n\t')
local open=P('<')
@@ -9239,15 +9983,15 @@ local valid=R('az','AZ','09')+S('_-.')
local name_yes=C(valid^1)*colon*C(valid^1)
local name_nop=C(P(true))*C(valid^1)
local name=name_yes+name_nop
-local utfbom=lpeg.patterns.utfbom
+local utfbom=lpegpatterns.utfbom
local spacing=C(space^0)
-local anyentitycontent=(1-open-semicolon-space-close)^0
+local anyentitycontent=(1-open-semicolon-space-close-ampersand)^0
local hexentitycontent=R("AF","af","09")^0
local decentitycontent=R("09")^0
local parsedentity=P("#")/""*(
P("x")/""*(hexentitycontent/handle_hex_entity)+(decentitycontent/handle_dec_entity)
)+(anyentitycontent/handle_any_entity)
-local entity=ampersand/""*parsedentity*((semicolon/"")+#(P(1)/handle_end_entity))
+local entity=(ampersand/"")*parsedentity*(semicolon/"")+ampersand*(anyentitycontent/handle_end_entity)
local text_unparsed=C((1-open)^1)
local text_parsed=Cs(((1-open-ampersand)^1+entity)^1)
local somespace=space^1
@@ -9298,16 +10042,20 @@ local instruction=(spacing*begininstruction*someinstruction*endinstruction)/func
local comment=(spacing*begincomment*somecomment*endcomment )/function(...) add_special("@cm@",...) end
local cdata=(spacing*begincdata*somecdata*endcdata )/function(...) add_special("@cd@",...) end
local doctype=(spacing*begindoctype*somedoctype*enddoctype )/function(...) add_special("@dt@",...) end
+local crap_parsed=1-beginelement-endelement-emptyelement-begininstruction-begincomment-begincdata-ampersand
+local crap_unparsed=1-beginelement-endelement-emptyelement-begininstruction-begincomment-begincdata
+local parsedcrap=Cs((crap_parsed^1+entity)^1)/handle_crap_error
+local unparsedcrap=Cs((crap_unparsed )^1)/handle_crap_error
local trailer=space^0*(text_unparsed/set_message)^0
local grammar_parsed_text=P { "preamble",
preamble=utfbom^0*instruction^0*(doctype+comment+instruction)^0*V("parent")*trailer,
parent=beginelement*V("children")^0*endelement,
- children=parsedtext+V("parent")+emptyelement+comment+cdata+instruction,
+ children=parsedtext+V("parent")+emptyelement+comment+cdata+instruction+parsedcrap,
}
local grammar_unparsed_text=P { "preamble",
preamble=utfbom^0*instruction^0*(doctype+comment+instruction)^0*V("parent")*trailer,
parent=beginelement*V("children")^0*endelement,
- children=unparsedtext+V("parent")+emptyelement+comment+cdata+instruction,
+ children=unparsedtext+V("parent")+emptyelement+comment+cdata+instruction+unparsedcrap,
}
local function _xmlconvert_(data,settings)
settings=settings or {}
@@ -9341,7 +10089,6 @@ local function _xmlconvert_(data,settings)
errorstr="empty xml file"
elseif utfize or resolve then
if lpegmatch(grammar_parsed_text,data) then
- errorstr=""
else
errorstr="invalid xml file - parsed text"
end
@@ -9357,6 +10104,8 @@ local function _xmlconvert_(data,settings)
local result
if errorstr and errorstr~="" then
result={ dt={ { ns="",tg="error",dt={ errorstr },at={},er=true } } }
+setmetatable(result,mt)
+setmetatable(result.dt[1],mt)
setmetatable(stack,mt)
local errorhandler=settings.error_handler
if errorhandler==false then
@@ -9389,8 +10138,11 @@ local function _xmlconvert_(data,settings)
end
if errorstr and errorstr~="" then
result.error=true
+ else
+ errorstr=nil
end
result.statistics={
+ errormessage=errorstr,
entities={
decimals=dcache,
hexadecimals=hcache,
@@ -9404,7 +10156,7 @@ local function _xmlconvert_(data,settings)
reported_attribute_errors,mt,errorhandler=nil,nil,nil
return result
end
-function xmlconvert(data,settings)
+local function xmlconvert(data,settings)
local ok,result=pcall(function() return _xmlconvert_(data,settings) end)
if ok then
return result
@@ -9496,14 +10248,17 @@ function xml.checkbom(root)
insert(dt,2,"\n" )
end
end
-local function verbose_element(e,handlers)
+local f_attribute=formatters['%s=%q']
+local function verbose_element(e,handlers,escape)
local handle=handlers.handle
local serialize=handlers.serialize
local ens,etg,eat,edt,ern=e.ns,e.tg,e.at,e.dt,e.rn
local ats=eat and next(eat) and {}
if ats then
+ local n=0
for k,v in next,eat do
- ats[#ats+1]=formatters['%s=%q'](k,escaped(v))
+ n=n+1
+ ats[n]=f_attribute(k,escaped(v))
end
end
if ern and trace_entities and ern~=ens then
@@ -9588,23 +10343,25 @@ local function verbose_document(e,handlers)
end
end
local function serialize(e,handlers,...)
- local initialize=handlers.initialize
- local finalize=handlers.finalize
- local functions=handlers.functions
- if initialize then
- local state=initialize(...)
- if not state==true then
- return state
+ if e then
+ local initialize=handlers.initialize
+ local finalize=handlers.finalize
+ local functions=handlers.functions
+ if initialize then
+ local state=initialize(...)
+ if not state==true then
+ return state
+ end
+ end
+ local etg=e.tg
+ if etg then
+ (functions[etg] or functions["@el@"])(e,handlers)
+ else
+ functions["@dc@"](e,handlers)
+ end
+ if finalize then
+ return finalize()
end
- end
- local etg=e.tg
- if etg then
- (functions[etg] or functions["@el@"])(e,handlers)
- else
- functions["@dc@"](e,handlers)
- end
- if finalize then
- return finalize()
end
end
local function xserialize(e,handlers)
@@ -9845,7 +10602,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-lpt"] = package.loaded["lxml-lpt"] or true
--- original size: 48956, stripped down to: 30516
+-- original size: 48229, stripped down to: 30684
if not modules then modules={} end modules ['lxml-lpt']={
version=1.001,
@@ -10230,8 +10987,8 @@ local lp_builtin=P (
P("ns")/"ll.ns"
)*((spaces*P("(")*spaces*P(")"))/"")
local lp_attribute=(P("@")+P("attribute::"))/""*Cc("(ll.at and ll.at['")*((R("az","AZ")+S("-_:"))^1)*Cc("'])")
-lp_fastpos_p=P("+")^0*R("09")^1*P(-1)/"l==%0"
-lp_fastpos_n=P("-")*R("09")^1*P(-1)/"(%0<0 and (#list+%0==l))"
+local lp_fastpos_p=P("+")^0*R("09")^1*P(-1)/"l==%0"
+local lp_fastpos_n=P("-")*R("09")^1*P(-1)/"(%0<0 and (#list+%0==l))"
local lp_fastpos=lp_fastpos_n+lp_fastpos_p
local lp_reserved=C("and")+C("or")+C("not")+C("div")+C("mod")+C("true")+C("false")
local lp_lua_function=Cs((R("az","AZ","__")^1*(P(".")*R("az","AZ","__")^1)^1)*("("))/"%0"
@@ -10410,7 +11167,7 @@ local function nodesettostring(set,nodetest)
if not ns or ns=="" then ns="*" end
if not tg or tg=="" then tg="*" end
tg=(tg=="@rt@" and "[root]") or format("%s:%s",ns,tg)
- t[i]=(directive and tg) or format("not(%s)",tg)
+ t[#t+1]=(directive and tg) or format("not(%s)",tg)
end
if nodetest==false then
return format("not(%s)",concat(t,"|"))
@@ -10676,7 +11433,6 @@ expressions.print=function(...)
print(...)
return true
end
-expressions.contains=find
expressions.find=find
expressions.upper=upper
expressions.lower=lower
@@ -10698,6 +11454,9 @@ function expressions.contains(str,pattern)
end
return false
end
+function xml.expressions.idstring(str)
+ return type(str)=="string" and gsub(str,"^#","") or ""
+end
local function traverse(root,pattern,handle)
local collected=applylpath(root,pattern)
if collected then
@@ -10826,8 +11585,13 @@ function xml.elements(root,pattern,reverse)
local collected=applylpath(root,pattern)
if not collected then
return dummy
- elseif reverse then
- local c=#collected+1
+ end
+ local n=#collected
+ if n==0 then
+ return dummy
+ end
+ if reverse then
+ local c=n+1
return function()
if c>1 then
c=c-1
@@ -10837,7 +11601,7 @@ function xml.elements(root,pattern,reverse)
end
end
else
- local n,c=#collected,0
+ local c=0
return function()
if c<n then
c=c+1
@@ -10852,8 +11616,13 @@ function xml.collected(root,pattern,reverse)
local collected=applylpath(root,pattern)
if not collected then
return dummy
- elseif reverse then
- local c=#collected+1
+ end
+ local n=#collected
+ if n==0 then
+ return dummy
+ end
+ if reverse then
+ local c=n+1
return function()
if c>1 then
c=c-1
@@ -10861,7 +11630,7 @@ function xml.collected(root,pattern,reverse)
end
end
else
- local n,c=#collected,0
+ local c=0
return function()
if c<n then
c=c+1
@@ -10876,7 +11645,7 @@ function xml.inspect(collection,pattern)
report_lpath("pattern: %s\n\n%s\n",pattern,xml.tostring(e))
end
end
-local function split(e)
+local function split(e)
local dt=e.dt
if dt then
for i=1,#dt do
@@ -10975,7 +11744,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-aux"] = package.loaded["lxml-aux"] or true
--- original size: 23804, stripped down to: 16817
+-- original size: 28786, stripped down to: 20578
if not modules then modules={} end modules ['lxml-aux']={
version=1.001,
@@ -10985,16 +11754,19 @@ if not modules then modules={} end modules ['lxml-aux']={
license="see context related readme files"
}
local trace_manipulations=false trackers.register("lxml.manipulations",function(v) trace_manipulations=v end)
+local trace_inclusions=false trackers.register("lxml.inclusions",function(v) trace_inclusions=v end)
local report_xml=logs.reporter("xml")
local xml=xml
-local xmlconvert,xmlcopy,xmlname=xml.convert,xml.copy,xml.name
+local xmlcopy,xmlname=xml.copy,xml.name
local xmlinheritedconvert=xml.inheritedconvert
local xmlapplylpath=xml.applylpath
local xmlfilter=xml.filter
-local type,setmetatable,getmetatable=type,setmetatable,getmetatable
+local type,next,setmetatable,getmetatable=type,next,setmetatable,getmetatable
local insert,remove,fastcopy,concat=table.insert,table.remove,table.fastcopy,table.concat
local gmatch,gsub,format,find,strip=string.gmatch,string.gsub,string.format,string.find,string.strip
local utfbyte=utf.byte
+local lpegmatch=lpeg.match
+local striplinepatterns=utilities.strings.striplinepatterns
local function report(what,pattern,c,e)
report_xml("%s element %a, root %a, position %a, index %a, pattern %a",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
end
@@ -11049,13 +11821,15 @@ end
function xml.each(root,pattern,handle,reverse)
local collected=xmlapplylpath(root,pattern)
if collected then
- if reverse then
- for c=#collected,1,-1 do
- handle(collected[c])
- end
- else
- for c=1,#collected do
- handle(collected[c])
+ if handle then
+ if reverse then
+ for c=#collected,1,-1 do
+ handle(collected[c])
+ end
+ else
+ for c=1,#collected do
+ handle(collected[c])
+ end
end
end
return collected
@@ -11111,6 +11885,7 @@ local function redo_ni(d)
end
end
end
+xml.reindex=redo_ni
local function xmltoelement(whatever,root)
if not whatever then
return nil
@@ -11162,8 +11937,16 @@ function xml.delete(root,pattern)
report('deleting',pattern,c,e)
end
local d=p.dt
- remove(d,e.ni)
- redo_ni(d)
+ local ni=e.ni
+ if ni<=#d then
+ if false then
+ p.dt[ni]=""
+ else
+ remove(d,ni)
+ redo_ni(d)
+ end
+ else
+ end
end
end
end
@@ -11283,28 +12066,40 @@ xml.insertafter=insert_element
xml.insertbefore=function(r,p,e) insert_element(r,p,e,true) end
xml.injectafter=inject_element
xml.injectbefore=function(r,p,e) inject_element(r,p,e,true) end
-local function include(xmldata,pattern,attribute,recursive,loaddata)
+local function include(xmldata,pattern,attribute,recursive,loaddata,level)
pattern=pattern or 'include'
loaddata=loaddata or io.loaddata
local collected=xmlapplylpath(xmldata,pattern)
if collected then
+ if not level then
+ level=1
+ end
for c=1,#collected do
local ek=collected[c]
local name=nil
local ekdt=ek.dt
local ekat=ek.at
- local epdt=ek.__p__.dt
+ local ekrt=ek.__p__
+ local epdt=ekrt.dt
if not attribute or attribute=="" then
name=(type(ekdt)=="table" and ekdt[1]) or ekdt
end
if not name then
for a in gmatch(attribute or "href","([^|]+)") do
name=ekat[a]
- if name then break end
+ if name then
+ break
+ end
+ end
+ end
+ local data=nil
+ if name and name~="" then
+ data=loaddata(name) or ""
+ if trace_inclusions then
+ report_xml("including %s bytes from %a at level %s by pattern %a and attribute %a (%srecursing)",#data,name,level,pattern,attribute or "",recursive and "" or "not ")
end
end
- local data=(name and name~="" and loaddata(name)) or ""
- if data=="" then
+ if not data or data=="" then
epdt[ek.ni]=""
elseif ekat["parse"]=="text" then
epdt[ek.ni]=xml.escaped(data)
@@ -11314,70 +12109,127 @@ local function include(xmldata,pattern,attribute,recursive,loaddata)
epdt[ek.ni]=""
else
if recursive then
- include(xi,pattern,attribute,recursive,loaddata)
+ include(xi,pattern,attribute,recursive,loaddata,level+1)
+ end
+ local child=xml.body(xi)
+ child.__p__=ekrt
+ child.__f__=name
+ epdt[ek.ni]=child
+ local inclusions=xmldata.settings.inclusions
+ if inclusions then
+ inclusions[#inclusions+1]=name
+ else
+ xmldata.settings.inclusions={ name }
+ end
+ if child.er then
+ local badinclusions=xmldata.settings.badinclusions
+ if badinclusions then
+ badinclusions[#badinclusions+1]=name
+ else
+ xmldata.settings.badinclusions={ name }
+ end
end
- epdt[ek.ni]=xml.body(xi)
end
end
end
end
end
xml.include=include
+function xml.inclusion(e,default)
+ while e do
+ local f=e.__f__
+ if f then
+ return f
+ else
+ e=e.__p__
+ end
+ end
+ return default
+end
+local function getinclusions(key,e,sorted)
+ while e do
+ local settings=e.settings
+ if settings then
+ local inclusions=settings[key]
+ if inclusions then
+ inclusions=table.unique(inclusions)
+ if sorted then
+ table.sort(inclusions)
+ end
+ return inclusions
+ else
+ e=e.__p__
+ end
+ else
+ e=e.__p__
+ end
+ end
+end
+function xml.inclusions(e,sorted)
+ return getinclusions("inclusions",e,sorted)
+end
+function xml.badinclusions(e,sorted)
+ return getinclusions("badinclusions",e,sorted)
+end
+local b_collapser=lpeg.patterns.b_collapser
+local m_collapser=lpeg.patterns.m_collapser
+local e_collapser=lpeg.patterns.e_collapser
+local b_stripper=lpeg.patterns.b_stripper
+local m_stripper=lpeg.patterns.m_stripper
+local e_stripper=lpeg.patterns.e_stripper
+local lpegmatch=lpeg.match
local function stripelement(e,nolines,anywhere)
local edt=e.dt
if edt then
- if anywhere then
- local t,n={},0
- for e=1,#edt do
+ local n=#edt
+ if n==0 then
+ return e
+ elseif anywhere then
+ local t={}
+ local m=0
+ for e=1,n do
local str=edt[e]
if type(str)~="string" then
- n=n+1
- t[n]=str
+ m=m+1
+ t[m]=str
elseif str~="" then
if nolines then
- str=gsub(str,"%s+"," ")
+ str=lpegmatch((n==1 and b_collapser) or (n==m and e_collapser) or m_collapser,str)
+ else
+ str=lpegmatch((n==1 and b_stripper) or (n==m and e_stripper) or m_stripper,str)
end
- str=gsub(str,"^%s*(.-)%s*$","%1")
if str~="" then
- n=n+1
- t[n]=str
+ m=m+1
+ t[m]=str
end
end
end
e.dt=t
else
- if #edt>0 then
- local str=edt[1]
- if type(str)~="string" then
- elseif str=="" then
+ local str=edt[1]
+ if type(str)=="string" then
+ if str~="" then
+ str=lpegmatch(nolines and b_collapser or b_stripper,str)
+ end
+ if str=="" then
remove(edt,1)
+ n=n-1
else
- if nolines then
- str=gsub(str,"%s+"," ")
- end
- str=gsub(str,"^%s+","")
- if str=="" then
- remove(edt,1)
- else
- edt[1]=str
- end
+ edt[1]=str
end
end
- local nedt=#edt
- if nedt>0 then
- local str=edt[nedt]
- if type(str)~="string" then
- elseif str=="" then
- remove(edt)
- else
- if nolines then
- str=gsub(str,"%s+"," ")
- end
- str=gsub(str,"%s+$","")
+ if n>0 then
+ str=edt[n]
+ if type(str)=="string" then
if str=="" then
remove(edt)
else
- edt[nedt]=str
+ str=lpegmatch(nolines and e_collapser or e_stripper,str)
+ if str=="" then
+ remove(edt)
+ else
+ edt[n]=str
+ end
end
end
end
@@ -11563,8 +12415,8 @@ function xml.finalizers.xml.cdata(collected)
end
return ""
end
-function xml.insertcomment(e,str,n)
- table.insert(e.dt,n or 1,{
+function xml.insertcomment(e,str,n)
+ insert(e.dt,n or 1,{
tg="@cm@",
ns="",
special=true,
@@ -11572,7 +12424,25 @@ function xml.insertcomment(e,str,n)
dt={ str },
})
end
-function xml.setcdata(e,str)
+function xml.insertcdata(e,str,n)
+ insert(e.dt,n or 1,{
+ tg="@cd@",
+ ns="",
+ special=true,
+ at={},
+ dt={ str },
+ })
+end
+function xml.setcomment(e,str,n)
+ e.dt={ {
+ tg="@cm@",
+ ns="",
+ special=true,
+ at={},
+ dt={ str },
+ } }
+end
+function xml.setcdata(e,str)
e.dt={ {
tg="@cd@",
ns="",
@@ -11642,7 +12512,7 @@ local function recurse(e,action)
for i=1,#edt do
local str=edt[i]
if type(str)~="string" then
- recurse(str,action,recursive)
+ recurse(str,action)
elseif str~="" then
edt[i]=action(str)
end
@@ -11660,6 +12530,65 @@ function helpers.recursetext(collected,action,recursive)
end
end
end
+local specials={
+ ["@rt@"]="root",
+ ["@pi@"]="instruction",
+ ["@cm@"]="comment",
+ ["@dt@"]="declaration",
+ ["@cd@"]="cdata",
+}
+local function convert(x,strip,flat)
+ local ns=x.ns
+ local tg=x.tg
+ local at=x.at
+ local dt=x.dt
+ local node=flat and {
+ [0]=(not x.special and (ns~="" and ns..":"..tg or tg)) or nil,
+ } or {
+ _namespace=ns~="" and ns or nil,
+ _tag=not x.special and tg or nil,
+ _type=specials[tg] or "_element",
+ }
+ if at then
+ for k,v in next,at do
+ node[k]=v
+ end
+ end
+ local n=0
+ for i=1,#dt do
+ local di=dt[i]
+ if type(di)=="table" then
+ if flat and di.special then
+ else
+ di=convert(di,strip,flat)
+ if di then
+ n=n+1
+ node[n]=di
+ end
+ end
+ elseif strip then
+ di=lpegmatch(strip,di)
+ if di~="" then
+ n=n+1
+ node[n]=di
+ end
+ else
+ n=n+1
+ node[n]=di
+ end
+ end
+ if next(node) then
+ return node
+ end
+end
+function xml.totable(x,strip,flat)
+ if type(x)=="table" then
+ if strip then
+ strip=striplinepatterns[strip]
+ end
+ return convert(x,strip,flat)
+ end
+end
end -- of closure
@@ -12216,7 +13145,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-ini"] = package.loaded["data-ini"] or true
--- original size: 7898, stripped down to: 5501
+-- original size: 11085, stripped down to: 7662
if not modules then modules={} end modules ['data-ini']={
version=1.001,
@@ -12225,14 +13154,15 @@ if not modules then modules={} end modules ['data-ini']={
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files",
}
+local next,type,getmetatable,rawset=next,type,getmetatable,rawset
local gsub,find,gmatch,char=string.gsub,string.find,string.gmatch,string.char
-local next,type=next,type
local filedirname,filebasename,filejoin=file.dirname,file.basename,file.join
+local ostype,osname,osuname,ossetenv,osgetenv=os.type,os.name,os.uname,os.setenv,os.getenv
+local P,S,R,C,Cs,Cc,lpegmatch=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.match
local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
local trace_detail=false trackers.register("resolvers.details",function(v) trace_detail=v end)
local trace_expansions=false trackers.register("resolvers.expansions",function(v) trace_expansions=v end)
local report_initialization=logs.reporter("resolvers","initialization")
-local ostype,osname,ossetenv,osgetenv=os.type,os.name,os.setenv,os.getenv
resolvers=resolvers or {}
local resolvers=resolvers
texconfig.kpse_init=false
@@ -12360,15 +13290,108 @@ if not texroot or texroot=="" then
ossetenv('TEXROOT',texroot)
end
environment.texroot=file.collapsepath(texroot)
-if profiler then
+if type(profiler)=="table" and not jit then
directives.register("system.profile",function()
profiler.start("luatex-profile.log")
end)
end
-if not resolvers.resolve then
- function resolvers.resolve (s) return s end
- function resolvers.unresolve(s) return s end
- function resolvers.repath (s) return s end
+local prefixes=utilities.storage.allocate()
+resolvers.prefixes=prefixes
+local resolved={}
+local abstract={}
+local dynamic={}
+function resolvers.resetresolve(str)
+ resolved,abstract={},{}
+end
+function resolvers.allprefixes(separator)
+ local all=table.sortedkeys(prefixes)
+ if separator then
+ for i=1,#all do
+ all[i]=all[i]..":"
+ end
+ end
+ return all
+end
+local function _resolve_(method,target)
+ local action=prefixes[method]
+ if action then
+ return action(target)
+ else
+ return method..":"..target
+ end
+end
+function resolvers.unresolve(str)
+ return abstract[str] or str
+end
+function resolvers.setdynamic(str)
+ dynamic[str]=true
+end
+local pattern=Cs((C(R("az")^2)*P(":")*C((1-S(" \"\';,"))^1)/_resolve_+P(1))^0)
+local prefix=C(R("az")^2)*P(":")
+local target=C((1-S(" \"\';,"))^1)
+local notarget=(#S(";,")+P(-1))*Cc("")
+local p_resolve=Cs(((prefix*(target+notarget))/_resolve_+P(1))^0)
+local p_simple=prefix*P(-1)
+local function resolve(str)
+ if type(str)=="table" then
+ local res={}
+ for i=1,#str do
+ res[i]=resolve(str[i])
+ end
+ return res
+ end
+ local res=resolved[str]
+ if res then
+ return res
+ end
+ local simple=lpegmatch(p_simple,str)
+ local action=prefixes[simple]
+ if action then
+ local res=action(res)
+ if not dynamic[simple] then
+ resolved[simple]=res
+ abstract[res]=simple
+ end
+ return res
+ end
+ res=lpegmatch(p_resolve,str)
+ resolved[str]=res
+ abstract[res]=str
+ return res
+end
+resolvers.resolve=resolve
+if type(osuname)=="function" then
+ for k,v in next,osuname() do
+ if not prefixes[k] then
+ prefixes[k]=function() return v end
+ end
+ end
+end
+if ostype=="unix" then
+ local pattern
+ local function makepattern(t,k,v)
+ if t then
+ rawset(t,k,v)
+ end
+ local colon=P(":")
+ for k,v in table.sortedpairs(prefixes) do
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ pattern=Cs((p*colon+colon/";"+P(1))^0)
+ end
+ makepattern()
+ table.setmetatablenewindex(prefixes,makepattern)
+ function resolvers.repath(str)
+ return lpegmatch(pattern,str)
+ end
+else
+ function resolvers.repath(str)
+ return str
+ end
end
@@ -12378,7 +13401,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-exp"] = package.loaded["data-exp"] or true
--- original size: 15303, stripped down to: 9716
+-- original size: 17216, stripped down to: 10657
if not modules then modules={} end modules ['data-exp']={
version=1.001,
@@ -12392,12 +13415,16 @@ local concat,sort=table.concat,table.sort
local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
local Ct,Cs,Cc,Carg,P,C,S=lpeg.Ct,lpeg.Cs,lpeg.Cc,lpeg.Carg,lpeg.P,lpeg.C,lpeg.S
local type,next=type,next
+local isdir=lfs.isdir
local ostype=os.type
-local collapsepath=file.collapsepath
+local collapsepath,joinpath,basename=file.collapsepath,file.join,file.basename
local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
local trace_expansions=false trackers.register("resolvers.expansions",function(v) trace_expansions=v end)
+local trace_globbing=true trackers.register("resolvers.globbing",function(v) trace_globbing=v end)
local report_expansions=logs.reporter("resolvers","expansions")
+local report_globbing=logs.reporter("resolvers","globbing")
local resolvers=resolvers
+local resolveprefix=resolvers.resolve
local function f_both(a,b)
local t,n={},0
for sb in gmatch(b,"[^,]+") do
@@ -12487,35 +13514,27 @@ function resolvers.expandedpathfromlist(pathlist)
end
return newlist
end
-local cleanup=lpeg.replacer {
- { "!","" },
- { "\\","/" },
-}
-function resolvers.cleanpath(str)
- local doslashes=(P("\\")/"/"+1)^0
- local donegation=(P("!")/"" )^0
- local homedir=lpegmatch(Cs(donegation*doslashes),environment.homedir or "")
- if homedir=="~" or homedir=="" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if not str or find(str,"~") then
- return ""
- else
- return lpegmatch(cleanup,str)
+local usedhomedir=nil
+local donegation=(P("!")/"" )^0
+local doslashes=(P("\\")/"/"+1)^0
+local function expandedhome()
+ if not usedhomedir then
+ usedhomedir=lpegmatch(Cs(donegation*doslashes),environment.homedir or "")
+ if usedhomedir=="~" or usedhomedir=="" or not isdir(usedhomedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent path using current path")
end
- end
- else
- local dohome=((P("~")+P("$HOME"))/homedir)^0
- local cleanup=Cs(donegation*dohome*doslashes)
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str) or ""
+ usedhomedir="."
end
end
- return resolvers.cleanpath(str)
+ return usedhomedir
end
-local expandhome=P("~")/"$HOME"
+local dohome=((P("~")+P("$HOME")+P("%HOME%"))/expandedhome)^0
+local cleanup=Cs(donegation*dohome*doslashes)
+resolvers.cleanpath=function(str)
+ return str and lpegmatch(cleanup,str) or ""
+end
+local expandhome=P("~")/"$HOME"
local dodouble=P('"')/""*(expandhome+(1-P('"')))^0*P('"')/""
local dosingle=P("'")/""*(expandhome+(1-P("'")))^0*P("'")/""
local dostring=(expandhome+1 )^0
@@ -12567,46 +13586,67 @@ function resolvers.splitpath(str)
end
function resolvers.joinpath(str)
if type(str)=='table' then
- return file.joinpath(str)
+ return joinpath(str)
else
return str
end
end
local attributes,directory=lfs.attributes,lfs.dir
local weird=P(".")^1+lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local lessweird=P(".")^1+lpeg.anywhere(S("~`#$%^&*:;\"\'||<>,?\n\r\t"))
local timer={}
local scanned={}
local nofscans=0
local scancache={}
-local function scan(files,spec,path,n,m,r)
- local full=(path=="" and spec) or (spec..path..'/')
+local fullcache={}
+local nofsharedscans=0
+local function scan(files,remap,spec,path,n,m,r,onlyone,tolerant)
+ local full=path=="" and spec or (spec..path..'/')
local dirs={}
local nofdirs=0
+ local pattern=tolerant and lessweird or weird
for name in directory(full) do
- if not lpegmatch(weird,name) then
- local mode=attributes(full..name,'mode')
- if mode=='file' then
+ if not lpegmatch(pattern,name) then
+ local mode=attributes(full..name,"mode")
+ if mode=="file" then
n=n+1
- local f=files[name]
- if f then
- if type(f)=='string' then
- files[name]={ f,path }
+ local lower=lower(name)
+ local paths=files[lower]
+ if paths then
+ if onlyone then
else
- f[#f+1]=path
+ if type(paths)=="string" then
+ files[lower]={ paths,path }
+ else
+ paths[#paths+1]=path
+ end
+ if name~=lower then
+ local rl=remap[lower]
+ if not rl then
+ remap[lower]=name
+ r=r+1
+ elseif trace_globbing and rl~=name then
+ report_globbing("confusing filename, name: %a, lower: %a, already: %a",name,lower,rl)
+ end
+ end
end
else
- files[name]=path
- local lower=lower(name)
+ files[lower]=path
if name~=lower then
- files["remap:"..lower]=name
- r=r+1
+ local rl=remap[lower]
+ if not rl then
+ remap[lower]=name
+ r=r+1
+ elseif trace_globbing and rl~=name then
+ report_globbing("confusing filename, name: %a, lower: %a, already: %a",name,lower,rl)
+ end
end
end
- elseif mode=='directory' then
+ elseif mode=="directory" then
m=m+1
nofdirs=nofdirs+1
if path~="" then
- dirs[nofdirs]=path..'/'..name
+ dirs[nofdirs]=path.."/"..name
else
dirs[nofdirs]=name
end
@@ -12616,107 +13656,69 @@ local function scan(files,spec,path,n,m,r)
if nofdirs>0 then
sort(dirs)
for i=1,nofdirs do
- files,n,m,r=scan(files,spec,dirs[i],n,m,r)
+ files,remap,n,m,r=scan(files,remap,spec,dirs[i],n,m,r,onlyonce,tolerant)
end
end
scancache[sub(full,1,-2)]=files
- return files,n,m,r
+ return files,remap,n,m,r
end
-local fullcache={}
-function resolvers.scanfiles(path,branch,usecache)
- statistics.starttiming(timer)
- local realpath=resolvers.resolve(path)
+function resolvers.scanfiles(path,branch,usecache,onlyonce,tolerant)
+ local realpath=resolveprefix(path)
if usecache then
- local files=fullcache[realpath]
- if files then
+ local content=fullcache[realpath]
+ if content then
if trace_locating then
- report_expansions("using caches scan of path %a, branch %a",path,branch or path)
+ report_expansions("using cached scan of path %a, branch %a",path,branch or path)
end
- return files
+ nofsharedscans=nofsharedscans+1
+ return content
end
end
+ statistics.starttiming(timer)
if trace_locating then
report_expansions("scanning path %a, branch %a",path,branch or path)
end
- local files,n,m,r=scan({},realpath..'/',"",0,0,0)
- files.__path__=path
- files.__files__=n
- files.__directories__=m
- files.__remappings__=r
- if trace_locating then
- report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
- end
- if usecache then
- scanned[#scanned+1]=realpath
- fullcache[realpath]=files
- end
- nofscans=nofscans+1
- statistics.stoptiming(timer)
- return files
-end
-local function simplescan(files,spec,path)
- local full=(path=="" and spec) or (spec..path..'/')
- local dirs={}
- local nofdirs=0
- for name in directory(full) do
- if not lpegmatch(weird,name) then
- local mode=attributes(full..name,'mode')
- if mode=='file' then
- if not files[name] then
- files[name]=path
- end
- elseif mode=='directory' then
- nofdirs=nofdirs+1
- if path~="" then
- dirs[nofdirs]=path..'/'..name
- else
- dirs[nofdirs]=name
- end
- end
- end
- end
- if nofdirs>0 then
- sort(dirs)
- for i=1,nofdirs do
- files=simplescan(files,spec,dirs[i])
- end
- end
- return files
-end
-local simplecache={}
-local nofsharedscans=0
-function resolvers.simplescanfiles(path,branch,usecache)
- statistics.starttiming(timer)
- local realpath=resolvers.resolve(path)
- if usecache then
- local files=simplecache[realpath]
- if not files then
- files=scancache[realpath]
- if files then
- nofsharedscans=nofsharedscans+1
- end
+ local content
+ if isdir(realpath) then
+ local files,remap,n,m,r=scan({},{},realpath..'/',"",0,0,0,onlyonce,tolerant)
+ content={
+ metadata={
+ path=path,
+ files=n,
+ directories=m,
+ remappings=r,
+ },
+ files=files,
+ remap=remap,
+ }
+ if trace_locating then
+ report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
- if files then
- if trace_locating then
- report_expansions("using caches scan of path %a, branch %a",path,branch or path)
- end
- return files
+ else
+ content={
+ metadata={
+ path=path,
+ files=0,
+ directories=0,
+ remappings=0,
+ },
+ files={},
+ remap={},
+ }
+ if trace_locating then
+ report_expansions("invalid path %a",realpath)
end
end
- if trace_locating then
- report_expansions("scanning path %a, branch %a",path,branch or path)
- end
- local files=simplescan({},realpath..'/',"")
- if trace_locating then
- report_expansions("%s files found",table.count(files))
- end
if usecache then
scanned[#scanned+1]=realpath
- simplecache[realpath]=files
+ fullcache[realpath]=content
end
nofscans=nofscans+1
statistics.stoptiming(timer)
- return files
+ return content
+end
+function resolvers.simplescanfiles(path,branch,usecache)
+ return resolvers.scanfiles(path,branch,usecache,true,true)
end
function resolvers.scandata()
table.sort(scanned)
@@ -12727,6 +13729,52 @@ function resolvers.scandata()
paths=scanned,
}
end
+function resolvers.get_from_content(content,path,name)
+ if not content then
+ return
+ end
+ local files=content.files
+ if not files then
+ return
+ end
+ local remap=content.remap
+ if not remap then
+ return
+ end
+ if name then
+ local used=lower(name)
+ return path,remap[used] or used
+ else
+ local name=path
+ local used=lower(name)
+ local path=files[used]
+ if path then
+ return path,remap[used] or used
+ end
+ end
+end
+local nothing=function() end
+function resolvers.filtered_from_content(content,pattern)
+ if content and type(pattern)=="string" then
+ local pattern=lower(pattern)
+ local files=content.files
+ local remap=content.remap
+ if files and remap then
+ local n=next(files)
+ local function iterator()
+ while n do
+ local k=n
+ n=next(files,k)
+ if find(k,pattern) then
+ return files[k],remap and remap[k] or k
+ end
+ end
+ end
+ return iterator
+ end
+ end
+ return nothing
+end
end -- of closure
@@ -12735,7 +13783,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-env"] = package.loaded["data-env"] or true
--- original size: 8769, stripped down to: 6490
+-- original size: 9216, stripped down to: 6798
if not modules then modules={} end modules ['data-env']={
version=1.001,
@@ -12753,10 +13801,12 @@ local formats=allocate()
local suffixes=allocate()
local dangerous=allocate()
local suffixmap=allocate()
+local usertypes=allocate()
resolvers.formats=formats
resolvers.suffixes=suffixes
resolvers.dangerous=dangerous
resolvers.suffixmap=suffixmap
+resolvers.usertypes=usertypes
local luasuffixes=utilities.lua.suffixes
local relations=allocate {
core={
@@ -12824,11 +13874,13 @@ local relations=allocate {
names={ "mp" },
variable='MPINPUTS',
suffixes={ 'mp','mpvi','mpiv','mpii' },
+ usertype=true,
},
tex={
names={ "tex" },
variable='TEXINPUTS',
- suffixes={ 'tex',"mkvi","mkiv","mkii" },
+ suffixes={ "tex","mkvi","mkiv","mkii","cld","lfg","xml" },
+ usertype=true,
},
icc={
names={ "icc","icc profile","icc profiles" },
@@ -12844,6 +13896,7 @@ local relations=allocate {
names={ "lua" },
variable='LUAINPUTS',
suffixes={ luasuffixes.lua,luasuffixes.luc,luasuffixes.tma,luasuffixes.tmc },
+ usertype=true,
},
lib={
names={ "lib" },
@@ -12852,11 +13905,15 @@ local relations=allocate {
},
bib={
names={ 'bib' },
+ variable='BIBINPUTS',
suffixes={ 'bib' },
+ usertype=true,
},
bst={
names={ 'bst' },
+ variable='BSTINPUTS',
suffixes={ 'bst' },
+ usertype=true,
},
fontconfig={
names={ 'fontconfig','fontconfig file','fontconfig files' },
@@ -12938,8 +13995,9 @@ function resolvers.updaterelations()
for name,relation in next,categories do
local rn=relation.names
local rv=relation.variable
- local rs=relation.suffixes
if rn and rv then
+ local rs=relation.suffixes
+ local ru=relation.usertype
for i=1,#rn do
local rni=lower(gsub(rn[i]," ",""))
formats[rni]=rv
@@ -12951,8 +14009,9 @@ function resolvers.updaterelations()
end
end
end
- end
- if rs then
+ if ru then
+ usertypes[name]=true
+ end
end
end
end
@@ -13003,7 +14062,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-tmp"] = package.loaded["data-tmp"] or true
--- original size: 15532, stripped down to: 11648
+-- original size: 15618, stripped down to: 11629
if not modules then modules={} end modules ['data-tmp']={
version=1.100,
@@ -13013,7 +14072,7 @@ if not modules then modules={} end modules ['data-tmp']={
license="see context related readme files"
}
local format,lower,gsub,concat=string.format,string.lower,string.gsub,table.concat
-local concat,serialize,serializetofile=table.concat,table.serialize,table.tofile
+local concat=table.concat
local mkdirs,isdir,isfile=dir.mkdirs,lfs.isdir,lfs.isfile
local addsuffix,is_writable,is_readable=file.addsuffix,file.is_writable,file.is_readable
local formatters=string.formatters
@@ -13022,6 +14081,7 @@ local trace_cache=false trackers.register("resolvers.cache",function(v) trace_ca
local report_caches=logs.reporter("resolvers","caches")
local report_resolvers=logs.reporter("resolvers","caching")
local resolvers=resolvers
+local cleanpath=resolvers.cleanpath
local directive_cleanup=false directives.register("system.compile.cleanup",function(v) directive_cleanup=v end)
local directive_strip=false directives.register("system.compile.strip",function(v) directive_strip=v end)
local compile=utilities.lua.compile
@@ -13043,7 +14103,7 @@ caches.relocate=false
caches.defaults={ "TMPDIR","TEMPDIR","TMP","TEMP","HOME","HOMEPATH" }
local writable,readables,usedreadables=nil,{},{}
local function identify()
- local texmfcaches=resolvers.cleanpathlist("TEXMFCACHE")
+ local texmfcaches=resolvers.cleanpathlist("TEXMFCACHE")
if texmfcaches then
for k=1,#texmfcaches do
local cachepath=texmfcaches[k]
@@ -13281,15 +14341,11 @@ end
local saveoptions={ compact=true }
function caches.savedata(filepath,filename,data,raw)
local tmaname,tmcname=caches.setluanames(filepath,filename)
- local reduce,simplify=true,true
- if raw then
- reduce,simplify=false,false
- end
data.cache_uuid=os.uuid()
if caches.direct then
- file.savedata(tmaname,serialize(data,true,saveoptions))
+ file.savedata(tmaname,table.serialize(data,true,saveoptions))
else
- serializetofile(tmaname,data,true,saveoptions)
+ table.tofile(tmaname,data,true,saveoptions)
end
utilities.lua.compile(tmaname,tmcname)
end
@@ -13297,10 +14353,12 @@ local content_state={}
function caches.contentstate()
return content_state or {}
end
-function caches.loadcontent(cachename,dataname)
- local name=caches.hashed(cachename)
- local full,path=caches.getfirstreadablefile(addsuffix(name,luasuffixes.lua),"trees")
- local filename=file.join(path,name)
+function caches.loadcontent(cachename,dataname,filename)
+ if not filename then
+ local name=caches.hashed(cachename)
+ local full,path=caches.getfirstreadablefile(addsuffix(name,luasuffixes.lua),"trees")
+ filename=file.join(path,name)
+ end
local blob=loadfile(addsuffix(filename,luasuffixes.luc)) or loadfile(addsuffix(filename,luasuffixes.lua))
if blob then
local data=blob()
@@ -13332,10 +14390,12 @@ function caches.collapsecontent(content)
end
end
end
-function caches.savecontent(cachename,dataname,content)
- local name=caches.hashed(cachename)
- local full,path=caches.setfirstwritablefile(addsuffix(name,luasuffixes.lua),"trees")
- local filename=file.join(path,name)
+function caches.savecontent(cachename,dataname,content,filename)
+ if not filename then
+ local name=caches.hashed(cachename)
+ local full,path=caches.setfirstwritablefile(addsuffix(name,luasuffixes.lua),"trees")
+ filename=file.join(path,name)
+ end
local luaname=addsuffix(filename,luasuffixes.lua)
local lucname=addsuffix(filename,luasuffixes.luc)
if trace_locating then
@@ -13350,7 +14410,7 @@ function caches.savecontent(cachename,dataname,content)
content=content,
uuid=os.uuid(),
}
- local ok=io.savedata(luaname,serialize(data,true))
+ local ok=io.savedata(luaname,table.serialize(data,true))
if ok then
if trace_locating then
report_resolvers("category %a, cachename %a saved in %a",dataname,cachename,luaname)
@@ -13378,7 +14438,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-met"] = package.loaded["data-met"] or true
--- original size: 5453, stripped down to: 4007
+-- original size: 5347, stripped down to: 4015
if not modules then modules={} end modules ['data-met']={
version=1.100,
@@ -13406,8 +14466,8 @@ local function splitmethod(filename)
if type(filename)=="table" then
return filename
end
- filename=file.collapsepath(filename,".")
- if not find(filename,"://") then
+ filename=file.collapsepath(filename,".")
+ if not find(filename,"://",1,true) then
return { scheme="file",path=filename,original=filename,filename=filename }
end
local specification=url.hashed(filename)
@@ -13497,7 +14557,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-res"] = package.loaded["data-res"] or true
--- original size: 61799, stripped down to: 42957
+-- original size: 67003, stripped down to: 46291
if not modules then modules={} end modules ['data-res']={
version=1.001,
@@ -13507,7 +14567,7 @@ if not modules then modules={} end modules ['data-res']={
license="see context related readme files",
}
local gsub,find,lower,upper,match,gmatch=string.gsub,string.find,string.lower,string.upper,string.match,string.gmatch
-local concat,insert,sortedkeys=table.concat,table.insert,table.sortedkeys
+local concat,insert,remove,sortedkeys,sortedhash=table.concat,table.insert,table.remove,table.sortedkeys,table.sortedhash
local next,type,rawget=next,type,rawget
local os=os
local P,S,R,C,Cc,Cs,Ct,Carg=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cc,lpeg.Cs,lpeg.Ct,lpeg.Carg
@@ -13516,27 +14576,38 @@ local formatters=string.formatters
local filedirname=file.dirname
local filebasename=file.basename
local suffixonly=file.suffixonly
+local addsuffix=file.addsuffix
+local removesuffix=file.removesuffix
local filejoin=file.join
local collapsepath=file.collapsepath
local joinpath=file.joinpath
+local is_qualified_path=file.is_qualified_path
local allocate=utilities.storage.allocate
local settings_to_array=utilities.parsers.settings_to_array
+local getcurrentdir=lfs.currentdir
+local isfile=lfs.isfile
+local isdir=lfs.isdir
local setmetatableindex=table.setmetatableindex
local luasuffixes=utilities.lua.suffixes
-local getcurrentdir=lfs.currentdir
-local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
-local trace_detail=false trackers.register("resolvers.details",function(v) trace_detail=v end)
-local trace_expansions=false trackers.register("resolvers.expansions",function(v) trace_expansions=v end)
+local trace_locating=false trackers .register("resolvers.locating",function(v) trace_locating=v end)
+local trace_detail=false trackers .register("resolvers.details",function(v) trace_detail=v end)
+local trace_expansions=false trackers .register("resolvers.expansions",function(v) trace_expansions=v end)
+local trace_paths=false trackers .register("resolvers.paths",function(v) trace_paths=v end)
+local resolve_otherwise=true directives.register("resolvers.otherwise",function(v) resolve_otherwise=v end)
local report_resolving=logs.reporter("resolvers","resolving")
local resolvers=resolvers
local expandedpathfromlist=resolvers.expandedpathfromlist
local checkedvariable=resolvers.checkedvariable
local splitconfigurationpath=resolvers.splitconfigurationpath
local methodhandler=resolvers.methodhandler
+local filtered=resolvers.filtered_from_content
+local lookup=resolvers.get_from_content
+local cleanpath=resolvers.cleanpath
+local resolveprefix=resolvers.resolve
local initializesetter=utilities.setters.initialize
local ostype,osname,osenv,ossetenv,osgetenv=os.type,os.name,os.env,os.setenv,os.getenv
-resolvers.cacheversion='1.0.1'
-resolvers.configbanner=''
+resolvers.cacheversion="1.100"
+resolvers.configbanner=""
resolvers.homedir=environment.homedir
resolvers.criticalvars=allocate { "SELFAUTOLOC","SELFAUTODIR","SELFAUTOPARENT","TEXMFCNF","TEXMF","TEXOS" }
resolvers.luacnfname="texmfcnf.lua"
@@ -13555,6 +14626,7 @@ end
local unset_variable="unset"
local formats=resolvers.formats
local suffixes=resolvers.suffixes
+local usertypes=resolvers.usertypes
local dangerous=resolvers.dangerous
local suffixmap=resolvers.suffixmap
resolvers.defaultsuffixes={ "tex" }
@@ -13563,7 +14635,7 @@ local instance=resolvers.instance or nil
function resolvers.setenv(key,value,raw)
if instance then
instance.environment[key]=value
- ossetenv(key,raw and value or resolvers.resolve(value))
+ ossetenv(key,raw and value or resolveprefix(value))
end
end
local function getenv(key)
@@ -13577,7 +14649,7 @@ local function getenv(key)
end
resolvers.getenv=getenv
resolvers.env=getenv
-local function resolve(k)
+local function resolvevariable(k)
return instance.expansions[k]
end
local dollarstripper=lpeg.stripper("$")
@@ -13586,19 +14658,19 @@ local backslashswapper=lpeg.replacer("\\","/")
local somevariable=P("$")/""
local somekey=C(R("az","AZ","09","__","--")^1)
local somethingelse=P(";")*((1-S("!{}/\\"))^1*P(";")/"")+P(";")*(P(";")/"")+P(1)
-local variableexpander=Cs((somevariable*(somekey/resolve)+somethingelse)^1 )
+local variableexpander=Cs((somevariable*(somekey/resolvevariable)+somethingelse)^1 )
local cleaner=P("\\")/"/"+P(";")*S("!{}/\\")^0*P(";")^1/";"
local variablecleaner=Cs((cleaner+P(1))^0)
-local somevariable=R("az","AZ","09","__","--")^1/resolve
+local somevariable=R("az","AZ","09","__","--")^1/resolvevariable
local variable=(P("$")/"")*(somevariable+(P("{")/"")*somevariable*(P("}")/""))
local variableresolver=Cs((variable+P(1))^0)
local function expandedvariable(var)
return lpegmatch(variableexpander,var) or var
end
-function resolvers.newinstance()
- if trace_locating then
+function resolvers.newinstance()
+ if trace_locating then
report_resolving("creating instance")
- end
+ end
local environment,variables,expansions,order=allocate(),allocate(),allocate(),allocate()
local newinstance={
environment=environment,
@@ -13611,6 +14683,7 @@ function resolvers.newinstance()
foundintrees=allocate(),
hashes=allocate(),
hashed=allocate(),
+ pathlists=false,
specification=allocate(),
lists=allocate(),
data=allocate(),
@@ -13623,6 +14696,7 @@ function resolvers.newinstance()
savelists=true,
pattern=nil,
force_suffixes=true,
+ pathstack={},
}
setmetatableindex(variables,function(t,k)
local v
@@ -13672,8 +14746,13 @@ function resolvers.reset()
end
local function reset_hashes()
instance.lists={}
+ instance.pathlists=false
instance.found={}
end
+local function reset_caches()
+ instance.lists={}
+ instance.pathlists=false
+end
local slash=P("/")
local pathexpressionpattern=Cs (
Cc("^")*(
@@ -13725,13 +14804,13 @@ local function identify_configuration_files()
for i=1,#cnfpaths do
local filepath=cnfpaths[i]
local filename=collapsepath(filejoin(filepath,luacnfname))
- local realname=resolvers.resolve(filename)
+ local realname=resolveprefix(filename)
if trace_locating then
- local fullpath=gsub(resolvers.resolve(collapsepath(filepath)),"//","/")
- local weirdpath=find(fullpath,"/texmf.+/texmf") or not find(fullpath,"/web2c")
+ local fullpath=gsub(resolveprefix(collapsepath(filepath)),"//","/")
+ local weirdpath=find(fullpath,"/texmf.+/texmf") or not find(fullpath,"/web2c",1,true)
report_resolving("looking for %a on %s path %a from specification %a",luacnfname,weirdpath and "weird" or "given",fullpath,filepath)
end
- if lfs.isfile(realname) then
+ if isfile(realname) then
specification[#specification+1]=filename
if trace_locating then
report_resolving("found configuration file %a",realname)
@@ -13753,7 +14832,7 @@ local function load_configuration_files()
local filename=specification[i]
local pathname=filedirname(filename)
local filename=filejoin(pathname,luacnfname)
- local realname=resolvers.resolve(filename)
+ local realname=resolveprefix(filename)
local blob=loadfile(realname)
if blob then
local setups=instance.setups
@@ -13761,7 +14840,7 @@ local function load_configuration_files()
local parent=data and data.parent
if parent then
local filename=filejoin(pathname,parent)
- local realname=resolvers.resolve(filename)
+ local realname=resolveprefix(filename)
local blob=loadfile(realname)
if blob then
local parentdata=blob()
@@ -13786,7 +14865,7 @@ local function load_configuration_files()
elseif variables[k]==nil then
if trace_locating and not warning then
report_resolving("variables like %a in configuration file %a should move to the 'variables' subtable",
- k,resolvers.resolve(filename))
+ k,resolveprefix(filename))
warning=true
end
variables[k]=v
@@ -13846,7 +14925,7 @@ local function locate_file_databases()
local stripped=lpegmatch(inhibitstripper,path)
if stripped~="" then
local runtime=stripped==path
- path=resolvers.cleanpath(path)
+ path=cleanpath(path)
local spec=resolvers.splitmethod(stripped)
if runtime and (spec.noscheme or spec.scheme=="file") then
stripped="tree:///"..stripped
@@ -13909,8 +14988,8 @@ function resolvers.renew(hashname)
report_resolving("identifying tree %a",hashname)
end
end
- local realpath=resolvers.resolve(hashname)
- if lfs.isdir(realpath) then
+ local realpath=resolveprefix(hashname)
+ if isdir(realpath) then
if trace_locating then
report_resolving("using path %a",realpath)
end
@@ -14011,19 +15090,53 @@ end
function resolvers.unexpandedpath(str)
return joinpath(resolvers.unexpandedpathlist(str))
end
+function resolvers.pushpath(name)
+ local pathstack=instance.pathstack
+ local lastpath=pathstack[#pathstack]
+ local pluspath=filedirname(name)
+ if lastpath then
+ lastpath=collapsepath(filejoin(lastpath,pluspath))
+ else
+ lastpath=collapsepath(pluspath)
+ end
+ insert(pathstack,lastpath)
+ if trace_paths then
+ report_resolving("pushing path %a",lastpath)
+ end
+end
+function resolvers.poppath()
+ local pathstack=instance.pathstack
+ if trace_paths and #pathstack>0 then
+ report_resolving("popping path %a",pathstack[#pathstack])
+ end
+ remove(pathstack)
+end
+function resolvers.stackpath()
+ local pathstack=instance.pathstack
+ local currentpath=pathstack[#pathstack]
+ return currentpath~="" and currentpath or nil
+end
local done={}
function resolvers.resetextrapath()
local ep=instance.extra_paths
if not ep then
- ep,done={},{}
- instance.extra_paths=ep
+ done={}
+ instance.extra_paths={}
elseif #ep>0 then
- instance.lists,done={},{}
+ done={}
+ reset_caches()
end
end
function resolvers.registerextrapath(paths,subpaths)
- paths=settings_to_array(paths)
- subpaths=settings_to_array(subpaths)
+ if not subpaths or subpaths=="" then
+ if not paths or path=="" then
+ return
+ elseif done[paths] then
+ return
+ end
+ end
+ local paths=settings_to_array(paths)
+ local subpaths=settings_to_array(subpaths)
local ep=instance.extra_paths or {}
local oldn=#ep
local newn=oldn
@@ -14038,7 +15151,7 @@ function resolvers.registerextrapath(paths,subpaths)
local ps=p.."/"..s
if not done[ps] then
newn=newn+1
- ep[newn]=resolvers.cleanpath(ps)
+ ep[newn]=cleanpath(ps)
done[ps]=true
end
end
@@ -14048,7 +15161,7 @@ function resolvers.registerextrapath(paths,subpaths)
local p=paths[i]
if not done[p] then
newn=newn+1
- ep[newn]=resolvers.cleanpath(p)
+ ep[newn]=cleanpath(p)
done[p]=true
end
end
@@ -14060,7 +15173,7 @@ function resolvers.registerextrapath(paths,subpaths)
local ps=ep[i].."/"..s
if not done[ps] then
newn=newn+1
- ep[newn]=resolvers.cleanpath(ps)
+ ep[newn]=cleanpath(ps)
done[ps]=true
end
end
@@ -14069,52 +15182,70 @@ function resolvers.registerextrapath(paths,subpaths)
if newn>0 then
instance.extra_paths=ep
end
- if newn>oldn then
- instance.lists={}
+ if newn~=oldn then
+ reset_caches()
end
end
-local function made_list(instance,list)
- local ep=instance.extra_paths
- if not ep or #ep==0 then
- return list
+function resolvers.pushextrapath(path)
+ local paths=settings_to_array(path)
+ if instance.extra_stack then
+ insert(instance.extra_stack,1,paths)
else
- local done,new,newn={},{},0
- for k=1,#list do
- local v=list[k]
- if not done[v] then
- if find(v,"^[%.%/]$") then
- done[v]=true
- newn=newn+1
- new[newn]=v
- else
- break
- end
- end
- end
- for k=1,#ep do
- local v=ep[k]
+ instance.extra_stack={ paths }
+ end
+ reset_caches()
+end
+function resolvers.popextrapath()
+ if instance.extra_stack then
+ reset_caches()
+ return remove(instance.extra_stack,1)
+ end
+end
+local function made_list(instance,list,extra_too)
+ local done={}
+ local new={}
+ local newn=0
+ local function add(p)
+ for k=1,#p do
+ local v=p[k]
if not done[v] then
done[v]=true
newn=newn+1
new[newn]=v
end
end
- for k=1,#list do
- local v=list[k]
- if not done[v] then
- done[v]=true
- newn=newn+1
- new[newn]=v
+ end
+ for k=1,#list do
+ local v=list[k]
+ if done[v] then
+ elseif find(v,"^[%.%/]$") then
+ done[v]=true
+ newn=newn+1
+ new[newn]=v
+ else
+ break
+ end
+ end
+ if extra_too then
+ local es=instance.extra_stack
+ if es and #es>0 then
+ for k=1,#es do
+ add(es[k])
end
end
- return new
+ local ep=instance.extra_paths
+ if ep and #ep>0 then
+ add(ep)
+ end
end
+ add(list)
+ return new
end
function resolvers.cleanpathlist(str)
local t=resolvers.expandedpathlist(str)
if t then
for i=1,#t do
- t[i]=collapsepath(resolvers.cleanpath(t[i]))
+ t[i]=collapsepath(cleanpath(t[i]))
end
end
return t
@@ -14122,22 +15253,22 @@ end
function resolvers.expandpath(str)
return joinpath(resolvers.expandedpathlist(str))
end
-function resolvers.expandedpathlist(str)
+function resolvers.expandedpathlist(str,extra_too)
if not str then
return {}
- elseif instance.savelists then
+ elseif instance.savelists then
str=lpegmatch(dollarstripper,str)
local lists=instance.lists
local lst=lists[str]
if not lst then
- local l=made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
+ local l=made_list(instance,resolvers.splitpath(resolvers.expansion(str)),extra_too)
lst=expandedpathfromlist(l)
lists[str]=lst
end
return lst
else
local lst=resolvers.splitpath(resolvers.expansion(str))
- return made_list(instance,expandedpathfromlist(lst))
+ return made_list(instance,expandedpathfromlist(lst),extra_too)
end
end
function resolvers.expandedpathlistfromvariable(str)
@@ -14148,6 +15279,13 @@ end
function resolvers.expandpathfromvariable(str)
return joinpath(resolvers.expandedpathlistfromvariable(str))
end
+function resolvers.cleanedpathlist(v)
+ local t=resolvers.expandedpathlist(v)
+ for i=1,#t do
+ t[i]=resolvers.resolve(resolvers.cleanpath(t[i]))
+ end
+ return t
+end
function resolvers.expandbraces(str)
local ori=str
local pth=expandedpathfromlist(resolvers.splitpath(ori))
@@ -14164,7 +15302,7 @@ function resolvers.registerfilehash(name,content,someerror)
end
end
local function isreadable(name)
- local readable=lfs.isfile(name)
+ local readable=isfile(name)
if trace_detail then
if readable then
report_resolving("file %a is readable",name)
@@ -14174,70 +15312,57 @@ local function isreadable(name)
end
return readable
end
-local function collect_files(names)
- local filelist,noffiles={},0
+local function collect_files(names)
+ local filelist={}
+ local noffiles=0
+ local function check(hash,root,pathname,path,name)
+ if not pathname or find(path,pathname) then
+ local variant=hash.type
+ local search=filejoin(root,path,name)
+ local result=methodhandler('concatinators',variant,root,path,name)
+ if trace_detail then
+ report_resolving("match: variant %a, search %a, result %a",variant,search,result)
+ end
+ noffiles=noffiles+1
+ filelist[noffiles]={ variant,search,result }
+ end
+ end
for k=1,#names do
- local fname=names[k]
+ local filename=names[k]
if trace_detail then
- report_resolving("checking name %a",fname)
+ report_resolving("checking name %a",filename)
end
- local bname=filebasename(fname)
- local dname=filedirname(fname)
- if dname=="" or find(dname,"^%.") then
- dname=false
+ local basename=filebasename(filename)
+ local pathname=filedirname(filename)
+ if pathname=="" or find(pathname,"^%.") then
+ pathname=false
else
- dname=gsub(dname,"%*",".*")
- dname="/"..dname.."$"
+ pathname=gsub(pathname,"%*",".*")
+ pathname="/"..pathname.."$"
end
local hashes=instance.hashes
for h=1,#hashes do
local hash=hashes[h]
- local blobpath=hash.name
- local files=blobpath and instance.files[blobpath]
- if files then
+ local hashname=hash.name
+ local content=hashname and instance.files[hashname]
+ if content then
if trace_detail then
- report_resolving("deep checking %a, base %a, pattern %a",blobpath,bname,dname)
+ report_resolving("deep checking %a, base %a, pattern %a",hashname,basename,pathname)
end
- local blobfile=files[bname]
- if not blobfile then
- local rname="remap:"..bname
- blobfile=files[rname]
- if blobfile then
- bname=files[rname]
- blobfile=files[bname]
- end
- end
- if blobfile then
- local blobroot=files.__path__ or blobpath
- if type(blobfile)=='string' then
- if not dname or find(blobfile,dname) then
- local variant=hash.type
- local search=filejoin(blobroot,blobfile,bname)
- local result=methodhandler('concatinators',hash.type,blobroot,blobfile,bname)
- if trace_detail then
- report_resolving("match: variant %a, search %a, result %a",variant,search,result)
- end
- noffiles=noffiles+1
- filelist[noffiles]={ variant,search,result }
- end
+ local path,name=lookup(content,basename)
+ if path then
+ local metadata=content.metadata
+ local realroot=metadata and metadata.path or hashname
+ if type(path)=="string" then
+ check(hash,realroot,pathname,path,name)
else
- for kk=1,#blobfile do
- local vv=blobfile[kk]
- if not dname or find(vv,dname) then
- local variant=hash.type
- local search=filejoin(blobroot,vv,bname)
- local result=methodhandler('concatinators',hash.type,blobroot,vv,bname)
- if trace_detail then
- report_resolving("match: variant %a, search %a, result %a",variant,search,result)
- end
- noffiles=noffiles+1
- filelist[noffiles]={ variant,search,result }
- end
+ for i=1,#path do
+ check(hash,realroot,pathname,path[i],name)
end
end
end
elseif trace_locating then
- report_resolving("no match in %a (%s)",blobpath,bname)
+ report_resolving("no match in %a (%s)",hashname,basename)
end
end
end
@@ -14262,7 +15387,7 @@ end
local function can_be_dir(name)
local fakepaths=instance.fakepaths
if not fakepaths[name] then
- if lfs.isdir(name) then
+ if isdir(name) then
fakepaths[name]=1
else
fakepaths[name]=2
@@ -14278,10 +15403,11 @@ local function find_analyze(filename,askedformat,allresults)
if askedformat=="" then
if ext=="" or not suffixmap[ext] then
local defaultsuffixes=resolvers.defaultsuffixes
+ local formatofsuffix=resolvers.formatofsuffix
for i=1,#defaultsuffixes do
local forcedname=filename..'.'..defaultsuffixes[i]
wantedfiles[#wantedfiles+1]=forcedname
- filetype=resolvers.formatofsuffix(forcedname)
+ filetype=formatofsuffix(forcedname)
if trace_locating then
report_resolving("forcing filetype %a",filetype)
end
@@ -14317,18 +15443,18 @@ local function find_direct(filename,allresults)
end
end
local function find_wildcard(filename,allresults)
- if find(filename,'%*') then
+ if find(filename,'*',1,true) then
if trace_locating then
report_resolving("checking wildcard %a",filename)
end
- local method,result=resolvers.findwildcardfiles(filename)
+ local result=resolvers.findwildcardfiles(filename)
if result then
return "wildcard",result
end
end
end
local function find_qualified(filename,allresults,askedformat,alsostripped)
- if not file.is_qualified_path(filename) then
+ if not is_qualified_path(filename) then
return
end
if trace_locating then
@@ -14402,33 +15528,66 @@ local function check_subpath(fname)
return fname
end
end
-local function find_intree(filename,filetype,wantedfiles,allresults)
+local function makepathlist(list,filetype)
local typespec=resolvers.variableofformat(filetype)
- local pathlist=resolvers.expandedpathlist(typespec)
- local method="intree"
+ local pathlist=resolvers.expandedpathlist(typespec,filetype and usertypes[filetype])
+ local entry={}
if pathlist and #pathlist>0 then
- local filelist=collect_files(wantedfiles)
+ for k=1,#pathlist do
+ local path=pathlist[k]
+ local prescanned=find(path,'^!!')
+ local resursive=find(path,'//$')
+ local pathname=lpegmatch(inhibitstripper,path)
+ local expression=makepathexpression(pathname)
+ local barename=gsub(pathname,"/+$","")
+ barename=resolveprefix(barename)
+ local scheme=url.hasscheme(barename)
+ local schemename=gsub(barename,"%.%*$",'')
+ entry[k]={
+ path=path,
+ pathname=pathname,
+ prescanned=prescanned,
+ recursive=recursive,
+ expression=expression,
+ barename=barename,
+ scheme=scheme,
+ schemename=schemename,
+ }
+ end
+ entry.typespec=typespec
+ list[filetype]=entry
+ else
+ list[filetype]=false
+ end
+ return entry
+end
+local function find_intree(filename,filetype,wantedfiles,allresults)
+ local pathlists=instance.pathlists
+ if not pathlists then
+ pathlists=setmetatableindex(allocate(),makepathlist)
+ instance.pathlists=pathlists
+ end
+ local pathlist=pathlists[filetype]
+ if pathlist then
+ local method="intree"
+ local filelist=collect_files(wantedfiles)
local dirlist={}
+ local result={}
if filelist then
for i=1,#filelist do
dirlist[i]=filedirname(filelist[i][3]).."/"
end
end
if trace_detail then
- report_resolving("checking filename %a",filename)
+ report_resolving("checking filename %a in tree",filename)
end
- local resolve=resolvers.resolve
- local result={}
for k=1,#pathlist do
- local path=pathlist[k]
- local pathname=lpegmatch(inhibitstripper,path)
- local doscan=path==pathname
- if not find (pathname,'//$') then
- doscan=false
- end
+ local entry=pathlist[k]
+ local path=entry.path
+ local pathname=entry.pathname
local done=false
if filelist then
- local expression=makepathexpression(pathname)
+ local expression=entry.expression
if trace_detail then
report_resolving("using pattern %a for path %a",expression,pathname)
end
@@ -14436,8 +15595,8 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
local fl=filelist[k]
local f=fl[2]
local d=dirlist[k]
- if find(d,expression) or find(resolve(d),expression) then
- result[#result+1]=resolve(fl[3])
+ if find(d,expression) or find(resolveprefix(d),expression) then
+ result[#result+1]=resolveprefix(fl[3])
done=true
if allresults then
if trace_detail then
@@ -14458,56 +15617,62 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
method="database"
else
method="filesystem"
- pathname=gsub(pathname,"/+$","")
- pathname=resolve(pathname)
- local scheme=url.hasscheme(pathname)
+ local scheme=entry.scheme
if not scheme or scheme=="file" then
- local pname=gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
+ local pname=entry.schemename
+ if not find(pname,"*",1,true) then
if can_be_dir(pname) then
- for k=1,#wantedfiles do
- local w=wantedfiles[k]
- local fname=check_subpath(filejoin(pname,w))
- if fname then
- result[#result+1]=fname
- done=true
- if not allresults then
- break
- end
+ if not done and not entry.prescanned then
+ if trace_detail then
+ report_resolving("quick root scan for %a",pname)
end
- end
- if not done and doscan then
- local files=resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w=wantedfiles[k]
- local subpath=files[w]
- if not subpath or subpath=="" then
- elseif type(subpath)=="string" then
- local fname=check_subpath(filejoin(pname,subpath,w))
- if fname then
- result[#result+1]=fname
- done=true
- if not allresults then
- break
- end
+ local fname=check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
+ break
end
- else
- for i=1,#subpath do
- local sp=subpath[i]
- if sp=="" then
- else
- local fname=check_subpath(filejoin(pname,sp,w))
- if fname then
- result[#result+1]=fname
- done=true
- if not allresults then
- break
+ end
+ end
+ if not done and entry.recursive then
+ if trace_detail then
+ report_resolving("scanning filesystem for %a",pname)
+ end
+ local files=resolvers.simplescanfiles(pname,false,true)
+ for k=1,#wantedfiles do
+ local w=wantedfiles[k]
+ local subpath=files[w]
+ if not subpath or subpath=="" then
+ elseif type(subpath)=="string" then
+ local fname=check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp=subpath[i]
+ if sp=="" then
+ else
+ local fname=check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
+ break
+ end
end
end
end
- end
- if done and not allresults then
- break
+ if done and not allresults then
+ break
+ end
end
end
end
@@ -14515,6 +15680,18 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
end
else
end
+ else
+ for k=1,#wantedfiles do
+ local pname=entry.barename
+ local fname=methodhandler('finders',pname.."/"..wantedfiles[k])
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
+ break
+ end
+ end
+ end
end
end
if done and not allresults then
@@ -14549,10 +15726,13 @@ local function find_otherwise(filename,filetype,wantedfiles,allresults)
local filelist=collect_files(wantedfiles)
local fl=filelist and filelist[1]
if fl then
- return "otherwise",{ resolvers.resolve(fl[3]) }
+ return "otherwise",{ resolveprefix(fl[3]) }
end
end
collect_instance_files=function(filename,askedformat,allresults)
+ if not filename or filename=="" then
+ return {}
+ end
askedformat=askedformat or ""
filename=collapsepath(filename,".")
filename=gsub(filename,"^%./",getcurrentdir().."/")
@@ -14587,7 +15767,11 @@ collect_instance_files=function(filename,askedformat,allresults)
else
local method,result,stamp,filetype,wantedfiles
if instance.remember then
- stamp=formatters["%s--%s"](filename,askedformat)
+ if askedformat=="" then
+ stamp=formatters["%s::%s"](suffixonly(filename),filename)
+ else
+ stamp=formatters["%s::%s"](askedformat,filename)
+ end
result=stamp and instance.found[stamp]
if result then
if trace_locating then
@@ -14606,7 +15790,7 @@ collect_instance_files=function(filename,askedformat,allresults)
method,result=find_intree(filename,filetype,wantedfiles)
if not result then
method,result=find_onpath(filename,filetype,wantedfiles)
- if not result then
+ if resolve_otherwise and not result then
method,result=find_otherwise(filename,filetype,wantedfiles)
end
end
@@ -14622,7 +15806,7 @@ collect_instance_files=function(filename,askedformat,allresults)
end
if stamp then
if trace_locating then
- report_resolving("remembering file %a",filename)
+ report_resolving("remembering file %a using hash %a",filename,stamp)
end
instance.found[stamp]=result
end
@@ -14630,6 +15814,9 @@ collect_instance_files=function(filename,askedformat,allresults)
end
end
local function findfiles(filename,filetype,allresults)
+ if not filename or filename=="" then
+ return {}
+ end
local result,status=collect_instance_files(filename,filetype or "",allresults)
if not result or #result==0 then
local lowered=lower(filename)
@@ -14649,39 +15836,30 @@ function resolvers.findpath(filename,filetype)
return filedirname(findfiles(filename,filetype,false)[1] or "")
end
local function findgivenfiles(filename,allresults)
- local bname,result=filebasename(filename),{}
+ local base=filebasename(filename)
+ local result={}
local hashes=instance.hashes
- local noffound=0
+ local function okay(hash,path,name)
+ local found=methodhandler('concatinators',hash.type,hash.name,path,name)
+ if found and found~="" then
+ result[#result+1]=resolveprefix(found)
+ return not allresults
+ end
+ end
for k=1,#hashes do
local hash=hashes[k]
- local files=instance.files[hash.name] or {}
- local blist=files[bname]
- if not blist then
- local rname="remap:"..bname
- blist=files[rname]
- if blist then
- bname=files[rname]
- blist=files[bname]
- end
- end
- if blist then
- if type(blist)=='string' then
- local found=methodhandler('concatinators',hash.type,hash.name,blist,bname) or ""
- if found~="" then
- noffound=noffound+1
- result[noffound]=resolvers.resolve(found)
- if not allresults then
- break
- end
+ local content=instance.files[hash.name]
+ if content then
+ local path,name=lookup(content,base)
+ if not path then
+ elseif type(path)=="string" then
+ if okay(hash,path,name) then
+ return result
end
else
- for kk=1,#blist do
- local vv=blist[kk]
- local found=methodhandler('concatinators',hash.type,hash.name,vv,bname) or ""
- if found~="" then
- noffound=noffound+1
- result[noffound]=resolvers.resolve(found)
- if not allresults then break end
+ for i=1,#path do
+ if okay(hash,path[i],name) then
+ return result
end
end
end
@@ -14695,64 +15873,80 @@ end
function resolvers.findgivenfile(filename)
return findgivenfiles(filename,false)[1] or ""
end
-local function doit(path,blist,bname,tag,variant,result,allresults)
- local done=false
- if blist and variant then
- local resolve=resolvers.resolve
- if type(blist)=='string' then
- if find(lower(blist),path) then
- local full=methodhandler('concatinators',variant,tag,blist,bname) or ""
- result[#result+1]=resolve(full)
- done=true
- end
- else
- for kk=1,#blist do
- local vv=blist[kk]
- if find(lower(vv),path) then
- local full=methodhandler('concatinators',variant,tag,vv,bname) or ""
- result[#result+1]=resolve(full)
- done=true
- if not allresults then break end
- end
- end
- end
- end
- return done
-end
local makewildcard=Cs(
(P("^")^0*P("/")*P(-1)+P(-1))/".*"+(P("^")^0*P("/")/"")^0*(P("*")/".*"+P("-")/"%%-"+P(".")/"%%."+P("?")/"."+P("\\")/"/"+P(1))^0
)
function resolvers.wildcardpattern(pattern)
return lpegmatch(makewildcard,pattern) or pattern
end
-local function findwildcardfiles(filename,allresults,result)
- result=result or {}
+local function findwildcardfiles(filename,allresults,result)
+ local result=result or {}
local base=filebasename(filename)
local dirn=filedirname(filename)
local path=lower(lpegmatch(makewildcard,dirn) or dirn)
local name=lower(lpegmatch(makewildcard,base) or base)
- local files,done=instance.files,false
- if find(name,"%*") then
+ local files=instance.files
+ if find(name,"*",1,true) then
local hashes=instance.hashes
+ local function okay(found,path,base,hashname,hashtype)
+ if find(found,path) then
+ local full=methodhandler('concatinators',hashtype,hashname,found,base)
+ if full and full~="" then
+ result[#result+1]=resolveprefix(full)
+ return not allresults
+ end
+ end
+ end
for k=1,#hashes do
local hash=hashes[k]
- local hashname,hashtype=hash.name,hash.type
- for kk,hh in next,files[hashname] do
- if not find(kk,"^remap:") then
- if find(lower(kk),name) then
- if doit(path,hh,kk,hashname,hashtype,result,allresults) then done=true end
- if done and not allresults then break end
+ local hashname=hash.name
+ local hashtype=hash.type
+ if hashname and hashtype then
+ for found,base in filtered(files[hashname],name) do
+ if type(found)=='string' then
+ if okay(found,path,base,hashname,hashtype) then
+ break
+ end
+ else
+ for i=1,#found do
+ if okay(found[i],path,base,hashname,hashtype) then
+ break
+ end
+ end
end
end
end
end
else
+ local function okayokay(found,path,base,hashname,hashtype)
+ if find(found,path) then
+ local full=methodhandler('concatinators',hashtype,hashname,found,base)
+ if full and full~="" then
+ result[#result+1]=resolveprefix(full)
+ return not allresults
+ end
+ end
+ end
local hashes=instance.hashes
for k=1,#hashes do
local hash=hashes[k]
- local hashname,hashtype=hash.name,hash.type
- if doit(path,files[hashname][base],base,hashname,hashtype,result,allresults) then done=true end
- if done and not allresults then break end
+ local hashname=hash.name
+ local hashtype=hash.type
+ if hashname and hashtype then
+ local found,base=lookup(content,base)
+ if not found then
+ elseif type(found)=='string' then
+ if okay(found,path,base,hashname,hashtype) then
+ break
+ end
+ else
+ for i=1,#found do
+ if okay(found[i],path,base,hashname,hashtype) then
+ break
+ end
+ end
+ end
+ end
end
end
return result
@@ -14825,7 +16019,7 @@ end
function resolvers.dowithpath(name,func)
local pathlist=resolvers.expandedpathlist(name)
for i=1,#pathlist do
- func("^"..resolvers.cleanpath(pathlist[i]))
+ func("^"..cleanpath(pathlist[i]))
end
end
function resolvers.dowithvariable(name,func)
@@ -14833,23 +16027,23 @@ function resolvers.dowithvariable(name,func)
end
function resolvers.locateformat(name)
local engine=environment.ownmain or "luatex"
- local barename=file.removesuffix(name)
- local fullname=file.addsuffix(barename,"fmt")
+ local barename=removesuffix(name)
+ local fullname=addsuffix(barename,"fmt")
local fmtname=caches.getfirstreadablefile(fullname,"formats",engine) or ""
if fmtname=="" then
fmtname=resolvers.findfile(fullname)
- fmtname=resolvers.cleanpath(fmtname)
+ fmtname=cleanpath(fmtname)
end
if fmtname~="" then
- local barename=file.removesuffix(fmtname)
- local luaname=file.addsuffix(barename,luasuffixes.lua)
- local lucname=file.addsuffix(barename,luasuffixes.luc)
- local luiname=file.addsuffix(barename,luasuffixes.lui)
- if lfs.isfile(luiname) then
+ local barename=removesuffix(fmtname)
+ local luaname=addsuffix(barename,luasuffixes.lua)
+ local lucname=addsuffix(barename,luasuffixes.luc)
+ local luiname=addsuffix(barename,luasuffixes.lui)
+ if isfile(luiname) then
return barename,luiname
- elseif lfs.isfile(lucname) then
+ elseif isfile(lucname) then
return barename,lucname
- elseif lfs.isfile(luaname) then
+ elseif isfile(luaname) then
return barename,luaname
end
end
@@ -14871,29 +16065,24 @@ function resolvers.dowithfilesintree(pattern,handle,before,after)
local hash=hashes[i]
local blobtype=hash.type
local blobpath=hash.name
- if blobpath then
+ if blobtype and blobpath then
+ local total=0
+ local checked=0
+ local done=0
if before then
before(blobtype,blobpath,pattern)
end
- local files=instance.files[blobpath]
- local total,checked,done=0,0,0
- if files then
- for k,v in table.sortedhash(files) do
- total=total+1
- if find(k,"^remap:") then
- elseif find(k,pattern) then
- if type(v)=="string" then
- checked=checked+1
- if handle(blobtype,blobpath,v,k) then
- done=done+1
- end
- else
- checked=checked+#v
- for i=1,#v do
- if handle(blobtype,blobpath,v[i],k) then
- done=done+1
- end
- end
+ for path,name in filtered(instance.files[blobpath],pattern) do
+ if type(path)=="string" then
+ checked=checked+1
+ if handle(blobtype,blobpath,path,name) then
+ done=done+1
+ end
+ else
+ checked=checked+#path
+ for i=1,#path do
+ if handle(blobtype,blobpath,path[i],name) then
+ done=done+1
end
end
end
@@ -14904,8 +16093,8 @@ function resolvers.dowithfilesintree(pattern,handle,before,after)
end
end
end
-resolvers.obsolete=resolvers.obsolete or {}
-local obsolete=resolvers.obsolete
+local obsolete=resolvers.obsolete or {}
+resolvers.obsolete=obsolete
resolvers.find_file=resolvers.findfile obsolete.find_file=resolvers.findfile
resolvers.find_files=resolvers.findfiles obsolete.find_files=resolvers.findfiles
@@ -14916,7 +16105,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-pre"] = package.loaded["data-pre"] or true
--- original size: 6643, stripped down to: 4401
+-- original size: 3950, stripped down to: 2935
if not modules then modules={} end modules ['data-pre']={
version=1.001,
@@ -14926,44 +16115,51 @@ if not modules then modules={} end modules ['data-pre']={
license="see context related readme files"
}
local resolvers=resolvers
-local prefixes=utilities.storage.allocate()
-resolvers.prefixes=prefixes
-local cleanpath,findgivenfile,expansion=resolvers.cleanpath,resolvers.findgivenfile,resolvers.expansion
+local prefixes=resolvers.prefixes
+local cleanpath=resolvers.cleanpath
+local findgivenfile=resolvers.findgivenfile
+local expansion=resolvers.expansion
local getenv=resolvers.getenv
-local P,S,R,C,Cs,Cc,lpegmatch=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.match
-local joinpath,basename,dirname=file.join,file.basename,file.dirname
-local getmetatable,rawset,type=getmetatable,rawset,type
+local basename=file.basename
+local dirname=file.dirname
+local joinpath=file.join
+local isfile=lfs.isfile
prefixes.environment=function(str)
return cleanpath(expansion(str))
end
-prefixes.relative=function(str,n)
- if io.exists(str) then
- elseif io.exists("./"..str) then
- str="./"..str
- else
- local p="../"
- for i=1,n or 2 do
- if io.exists(p..str) then
- str=p..str
- break
- else
- p=p.."../"
+local function relative(str,n)
+ if not isfile(str) then
+ local pstr="./"..str
+ if isfile(pstr) then
+ str=pstr
+ else
+ local p="../"
+ for i=1,n or 2 do
+ local pstr=p..str
+ if isfile(pstr) then
+ str=pstr
+ break
+ else
+ p=p.."../"
+ end
end
end
end
return cleanpath(str)
end
+local function locate(str)
+ local fullname=findgivenfile(str) or ""
+ return cleanpath(fullname~="" and fullname or str)
+end
+prefixes.relative=relative
+prefixes.locate=locate
prefixes.auto=function(str)
- local fullname=prefixes.relative(str)
- if not lfs.isfile(fullname) then
- fullname=prefixes.locate(str)
+ local fullname=relative(str)
+ if not isfile(fullname) then
+ fullname=locate(str)
end
return fullname
end
-prefixes.locate=function(str)
- local fullname=findgivenfile(str) or ""
- return cleanpath((fullname~="" and fullname) or str)
-end
prefixes.filename=function(str)
local fullname=findgivenfile(str) or ""
return cleanpath(basename((fullname~="" and fullname) or str))
@@ -14984,6 +16180,13 @@ end
prefixes.home=function(str)
return cleanpath(joinpath(getenv('HOME'),str))
end
+prefixes.env=prefixes.environment
+prefixes.rel=prefixes.relative
+prefixes.loc=prefixes.locate
+prefixes.kpse=prefixes.locate
+prefixes.full=prefixes.locate
+prefixes.file=prefixes.filename
+prefixes.path=prefixes.pathname
local function toppath()
local inputstack=resolvers.inputstack
if not inputstack then
@@ -14996,98 +16199,22 @@ local function toppath()
return pathname
end
end
-resolvers.toppath=toppath
-prefixes.toppath=function(str)
- return cleanpath(joinpath(toppath(),str))
-end
-prefixes.env=prefixes.environment
-prefixes.rel=prefixes.relative
-prefixes.loc=prefixes.locate
-prefixes.kpse=prefixes.locate
-prefixes.full=prefixes.locate
-prefixes.file=prefixes.filename
-prefixes.path=prefixes.pathname
-function resolvers.allprefixes(separator)
- local all=table.sortedkeys(prefixes)
- if separator then
- for i=1,#all do
- all[i]=all[i]..":"
- end
- end
- return all
-end
-local function _resolve_(method,target)
- local action=prefixes[method]
- if action then
- return action(target)
- else
- return method..":"..target
- end
-end
-local resolved,abstract={},{}
-function resolvers.resetresolve(str)
- resolved,abstract={},{}
-end
-local pattern=Cs((C(R("az")^2)*P(":")*C((1-S(" \"\';,"))^1)/_resolve_+P(1))^0)
-local prefix=C(R("az")^2)*P(":")
-local target=C((1-S(" \"\';,"))^1)
-local notarget=(#S(";,")+P(-1))*Cc("")
-local pattern=Cs(((prefix*(target+notarget))/_resolve_+P(1))^0)
-local function resolve(str)
- if type(str)=="table" then
- local t={}
- for i=1,#str do
- t[i]=resolve(str[i])
- end
- return t
+local function jobpath()
+ local path=resolvers.stackpath()
+ if not path or path=="" then
+ return "."
else
- local res=resolved[str]
- if not res then
- res=lpegmatch(pattern,str)
- resolved[str]=res
- abstract[res]=str
- end
- return res
- end
-end
-local function unresolve(str)
- return abstract[str] or str
-end
-resolvers.resolve=resolve
-resolvers.unresolve=unresolve
-if type(os.uname)=="function" then
- for k,v in next,os.uname() do
- if not prefixes[k] then
- prefixes[k]=function() return v end
- end
- end
-end
-if os.type=="unix" then
- local pattern
- local function makepattern(t,k,v)
- if t then
- rawset(t,k,v)
- end
- local colon=P(":")
- for k,v in table.sortedpairs(prefixes) do
- if p then
- p=P(k)+p
- else
- p=P(k)
- end
- end
- pattern=Cs((p*colon+colon/";"+P(1))^0)
- end
- makepattern()
- getmetatable(prefixes).__newindex=makepattern
- function resolvers.repath(str)
- return lpegmatch(pattern,str)
- end
-else
- function resolvers.repath(str)
- return str
+ return path
end
end
+resolvers.toppath=toppath
+resolvers.jobpath=jobpath
+prefixes.toppath=function(str) return cleanpath(joinpath(toppath(),str)) end
+prefixes.jobpath=function(str) return cleanpath(joinpath(jobpath(),str)) end
+resolvers.setdynamic("toppath")
+resolvers.setdynamic("jobpath")
+prefixes.jobfile=prefixes.jobpath
+resolvers.setdynamic("jobfile")
end -- of closure
@@ -15149,7 +16276,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-fil"] = package.loaded["data-fil"] or true
--- original size: 3801, stripped down to: 3231
+-- original size: 3863, stripped down to: 3310
if not modules then modules={} end modules ['data-fil']={
version=1.001,
@@ -15161,30 +16288,31 @@ if not modules then modules={} end modules ['data-fil']={
local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
local report_files=logs.reporter("resolvers","files")
local resolvers=resolvers
+local resolveprefix=resolvers.resolve
local finders,openers,loaders,savers=resolvers.finders,resolvers.openers,resolvers.loaders,resolvers.savers
local locators,hashers,generators,concatinators=resolvers.locators,resolvers.hashers,resolvers.generators,resolvers.concatinators
local checkgarbage=utilities.garbagecollector and utilities.garbagecollector.check
function locators.file(specification)
- local name=specification.filename
- local realname=resolvers.resolve(name)
+ local filename=specification.filename
+ local realname=resolveprefix(filename)
if realname and realname~='' and lfs.isdir(realname) then
if trace_locating then
- report_files("file locator %a found as %a",name,realname)
+ report_files("file locator %a found as %a",filename,realname)
end
- resolvers.appendhash('file',name,true)
+ resolvers.appendhash('file',filename,true)
elseif trace_locating then
- report_files("file locator %a not found",name)
+ report_files("file locator %a not found",filename)
end
end
function hashers.file(specification)
- local name=specification.filename
- local content=caches.loadcontent(name,'files')
- resolvers.registerfilehash(name,content,content==nil)
+ local pathname=specification.filename
+ local content=caches.loadcontent(pathname,'files')
+ resolvers.registerfilehash(pathname,content,content==nil)
end
function generators.file(specification)
- local path=specification.filename
- local content=resolvers.scanfiles(path,false,true)
- resolvers.registerfilehash(path,content,true)
+ local pathname=specification.filename
+ local content=resolvers.scanfiles(pathname,false,true)
+ resolvers.registerfilehash(pathname,content,true)
end
concatinators.file=file.join
function finders.file(specification,filetype)
@@ -15375,7 +16503,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-use"] = package.loaded["data-use"] or true
--- original size: 3913, stripped down to: 2998
+-- original size: 3899, stripped down to: 2984
if not modules then modules={} end modules ['data-use']={
version=1.001,
@@ -15421,7 +16549,7 @@ end
statistics.register("used config file",function() return caches.configfiles() end)
statistics.register("used cache path",function() return caches.usedpaths() end)
function statistics.savefmtstatus(texname,formatbanner,sourcefile)
- local enginebanner=status.list().banner
+ local enginebanner=status.banner
if formatbanner and enginebanner and sourcefile then
local luvname=file.replacesuffix(texname,"luv")
local luvdata={
@@ -15434,7 +16562,7 @@ function statistics.savefmtstatus(texname,formatbanner,sourcefile)
end
end
function statistics.checkfmtstatus(texname)
- local enginebanner=status.list().banner
+ local enginebanner=status.banner
if enginebanner and texname then
local luvname=file.replacesuffix(texname,"luv")
if lfs.isfile(luvname) then
@@ -15466,7 +16594,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-zip"] = package.loaded["data-zip"] or true
--- original size: 8489, stripped down to: 6757
+-- original size: 8772, stripped down to: 6841
if not modules then modules={} end modules ['data-zip']={
version=1.001,
@@ -15485,16 +16613,6 @@ zip.archives=zip.archives or {}
local archives=zip.archives
zip.registeredfiles=zip.registeredfiles or {}
local registeredfiles=zip.registeredfiles
-local limited=false
-directives.register("system.inputmode",function(v)
- if not limited then
- local i_limiter=io.i_limiter(v)
- if i_limiter then
- zip.open=i_limiter.protect(zip.open)
- limited=true
- end
- end
-end)
local function validzip(str)
if not find(str,"^zip://") then
return "zip:///"..str
@@ -15509,7 +16627,7 @@ function zip.openarchive(name)
local arch=archives[name]
if not arch then
local full=resolvers.findfile(name) or ""
- arch=(full~="" and zip.open(full)) or false
+ arch=full~="" and zip.open(full) or false
archives[name]=arch
end
return arch
@@ -15668,31 +16786,42 @@ function resolvers.usezipfile(archive)
end
end
function resolvers.registerzipfile(z,tree)
- local files,filter={},""
- if tree=="" then
- filter="^(.+)/(.-)$"
- else
- filter=format("^%s/(.+)/(.-)$",tree)
- end
+ local names={}
+ local files={}
+ local remap={}
+ local n=0
+ local filter=tree=="" and "^(.+)/(.-)$" or format("^%s/(.+)/(.-)$",tree)
+ local register=resolvers.registerfile
if trace_locating then
report_zip("registering: using filter %a",filter)
end
- local register,n=resolvers.registerfile,0
for i in z:files() do
- local path,name=match(i.filename,filter)
- if path then
- if name and name~='' then
- register(files,name,path)
- n=n+1
- else
+ local filename=i.filename
+ local path,name=match(filename,filter)
+ if not path then
+ n=n+1
+ register(names,filename,"")
+ local usedname=lower(filename)
+ files[usedname]=""
+ if usedname~=filename then
+ remap[usedname]=filename
end
- else
- register(files,i.filename,'')
+ elseif name and name~="" then
n=n+1
+ register(names,name,path)
+ local usedname=lower(name)
+ files[usedname]=path
+ if usedname~=name then
+ remap[usedname]=name
+ end
+ else
end
end
report_zip("registering: %s files registered",n)
- return files
+ return {
+ files=files,
+ remap=remap,
+ }
end
@@ -15702,7 +16831,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-tre"] = package.loaded["data-tre"] or true
--- original size: 2508, stripped down to: 2074
+-- original size: 8479, stripped down to: 5580
if not modules then modules={} end modules ['data-tre']={
version=1.001,
@@ -15711,42 +16840,64 @@ if not modules then modules={} end modules ['data-tre']={
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-local find,gsub,format=string.find,string.gsub,string.format
+local find,gsub,lower=string.find,string.gsub,string.lower
+local basename,dirname,joinname=file.basename,file.dirname,file .join
+local globdir,isdir,isfile=dir.glob,lfs.isdir,lfs.isfile
+local P,lpegmatch=lpeg.P,lpeg.match
local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
local report_trees=logs.reporter("resolvers","trees")
local resolvers=resolvers
-local done,found,notfound={},{},resolvers.finders.notfound
-function resolvers.finders.tree(specification)
+local resolveprefix=resolvers.resolve
+local notfound=resolvers.finders.notfound
+local lookup=resolvers.get_from_content
+local collectors={}
+local found={}
+function resolvers.finders.tree(specification)
local spec=specification.filename
- local fnd=found[spec]
- if fnd==nil then
+ local okay=found[spec]
+ if okay==nil then
if spec~="" then
- local path,name=file.dirname(spec),file.basename(spec)
- if path=="" then path="." end
- local hash=done[path]
- if not hash then
- local pattern=path.."/*"
- hash=dir.glob(pattern)
- done[path]=hash
+ local path=dirname(spec)
+ local name=basename(spec)
+ if path=="" then
+ path="."
+ end
+ local names=collectors[path]
+ if not names then
+ local pattern=find(path,"/%*+$") and path or (path.."/*")
+ names=globdir(pattern)
+ collectors[path]=names
end
local pattern="/"..gsub(name,"([%.%-%+])","%%%1").."$"
- for k=1,#hash do
- local v=hash[k]
- if find(v,pattern) then
- found[spec]=v
- return v
+ for i=1,#names do
+ local fullname=names[i]
+ if find(fullname,pattern) then
+ found[spec]=fullname
+ return fullname
+ end
+ end
+ local pattern=lower(pattern)
+ for i=1,#names do
+ local fullname=lower(names[i])
+ if find(fullname,pattern) then
+ if isfile(fullname) then
+ found[spec]=fullname
+ return fullname
+ else
+ break
+ end
end
end
end
- fnd=notfound()
- found[spec]=fnd
+ okay=notfound()
+ found[spec]=okay
end
- return fnd
+ return okay
end
function resolvers.locators.tree(specification)
local name=specification.filename
- local realname=resolvers.resolve(name)
- if realname and realname~='' and lfs.isdir(realname) then
+ local realname=resolveprefix(name)
+ if realname and realname~='' and isdir(realname) then
if trace_locating then
report_trees("locator %a found",realname)
end
@@ -15757,16 +16908,110 @@ function resolvers.locators.tree(specification)
end
function resolvers.hashers.tree(specification)
local name=specification.filename
- if trace_locating then
- report_trees("analysing %a",name)
- end
+ report_trees("analyzing %a",name)
resolvers.methodhandler("hashers",name)
resolvers.generators.file(specification)
end
-resolvers.concatinators.tree=resolvers.concatinators.file
-resolvers.generators.tree=resolvers.generators.file
-resolvers.openers.tree=resolvers.openers.file
-resolvers.loaders.tree=resolvers.loaders.file
+local collectors={}
+local splitter=lpeg.splitat("/**/")
+local stripper=lpeg.replacer { [P("/")*P("*")^1*P(-1)]="" }
+table.setmetatableindex(collectors,function(t,k)
+ local rootname=lpegmatch(stripper,k)
+ local dataname=joinname(rootname,"dirlist")
+ local content=caches.loadcontent(dataname,"files",dataname)
+ if not content then
+ content=resolvers.scanfiles(rootname,nil,nil,false,true)
+ caches.savecontent(dataname,"files",content,dataname)
+ end
+ t[k]=content
+ return content
+end)
+local function checked(root,p,n)
+ if p then
+ if type(p)=="table" then
+ for i=1,#p do
+ local fullname=joinname(root,p[i],n)
+ if isfile(fullname) then
+ return fullname
+ end
+ end
+ else
+ local fullname=joinname(root,p,n)
+ if isfile(fullname) then
+ return fullname
+ end
+ end
+ end
+ return notfound()
+end
+local function resolve(specification)
+ local filename=specification.filename
+ if filename~="" then
+ local root,rest=lpegmatch(splitter,filename)
+ if root and rest then
+ local path,name=dirname(rest),basename(rest)
+ if name~=rest then
+ local content=collectors[root]
+ local p,n=lookup(content,name)
+ if not p then
+ return notfound()
+ end
+ local pattern=".*/"..path.."$"
+ local istable=type(p)=="table"
+ if istable then
+ for i=1,#p do
+ local pi=p[i]
+ if pi==path or find(pi,pattern) then
+ local fullname=joinname(root,pi,n)
+ if isfile(fullname) then
+ return fullname
+ end
+ end
+ end
+ elseif p==path or find(p,pattern) then
+ local fullname=joinname(root,p,n)
+ if isfile(fullname) then
+ return fullname
+ end
+ end
+ local queries=specification.queries
+ if queries and queries.option=="fileonly" then
+ return checked(root,p,n)
+ else
+ return notfound()
+ end
+ end
+ end
+ local path,name=dirname(filename),basename(filename)
+ local root=lpegmatch(stripper,path)
+ local content=collectors[path]
+ local p,n=lookup(content,name)
+ if p then
+ return checked(root,p,n)
+ end
+ end
+ return notfound()
+end
+resolvers.finders .dirlist=resolve
+resolvers.locators .dirlist=resolvers.locators .tree
+resolvers.hashers .dirlist=resolvers.hashers .tree
+resolvers.generators.dirlist=resolvers.generators.file
+resolvers.openers .dirlist=resolvers.openers .file
+resolvers.loaders .dirlist=resolvers.loaders .file
+function resolvers.finders.dirfile(specification)
+ local queries=specification.queries
+ if queries then
+ queries.option="fileonly"
+ else
+ specification.queries={ option="fileonly" }
+ end
+ return resolve(specification)
+end
+resolvers.locators .dirfile=resolvers.locators .dirlist
+resolvers.hashers .dirfile=resolvers.hashers .dirlist
+resolvers.generators.dirfile=resolvers.generators.dirlist
+resolvers.openers .dirfile=resolvers.openers .dirlist
+resolvers.loaders .dirfile=resolvers.loaders .dirlist
end -- of closure
@@ -15775,7 +17020,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-sch"] = package.loaded["data-sch"] or true
--- original size: 6202, stripped down to: 5149
+-- original size: 6569, stripped down to: 5304
if not modules then modules={} end modules ['data-sch']={
version=1.001,
@@ -15801,8 +17046,13 @@ directives.register("schemes.threshold",function(v) threshold=tonumber(v) or thr
function cleaners.none(specification)
return specification.original
end
-function cleaners.strip(specification)
- return (gsub(specification.original,"[^%a%d%.]+","-"))
+function cleaners.strip(specification)
+ local path,name=file.splitbase(specification.original)
+ if path=="" then
+ return (gsub(name,"[^%a%d%.]+","-"))
+ else
+ return (gsub((gsub(path,"%.","-").."-"..name),"[^%a%d%.]+","-"))
+ end
end
function cleaners.md5(specification)
return file.addsuffix(md5.hex(specification.original),file.suffix(specification.path))
@@ -15818,8 +17068,8 @@ function resolvers.schemes.cleanname(specification)
end
local cached,loaded,reused,thresholds,handlers={},{},{},{},{}
local function runcurl(name,cachename)
- local command="curl --silent --create-dirs --output "..cachename.." "..name
- os.spawn(command)
+ local command="curl --silent --insecure --create-dirs --output "..cachename.." "..name
+ os.execute(command)
end
local function fetch(specification)
local original=specification.original
@@ -15951,7 +17201,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-lua"] = package.loaded["data-lua"] or true
--- original size: 4237, stripped down to: 3177
+-- original size: 4313, stripped down to: 3227
if not modules then modules={} end modules ['data-lua']={
version=1.001,
@@ -15960,7 +17210,7 @@ if not modules then modules={} end modules ['data-lua']={
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-local resolvers,package=resolvers,package
+local package,lpeg=package,lpeg
local gsub=string.gsub
local concat=table.concat
local addsuffix=file.addsuffix
@@ -15971,9 +17221,11 @@ local luaformats={ 'TEXINPUTS','LUAINPUTS' }
local libformats={ 'CLUAINPUTS' }
local helpers=package.helpers or {}
local methods=helpers.methods or {}
+local resolvers=resolvers
+local resolveprefix=resolvers.resolve
+helpers.report=logs.reporter("resolvers","libraries")
trackers.register("resolvers.libraries",function(v) helpers.trace=v end)
trackers.register("resolvers.locating",function(v) helpers.trace=v end)
-helpers.report=logs.reporter("resolvers","libraries")
helpers.sequence={
"already loaded",
"preload table",
@@ -15988,7 +17240,7 @@ helpers.sequence={
}
local pattern=Cs(P("!")^0/""*(P("/")*P(-1)/"/"+P("/")^1/"/"+1)^0)
function helpers.cleanpath(path)
- return resolvers.resolve(lpegmatch(pattern,path))
+ return resolveprefix(lpegmatch(pattern,path))
end
local loadedaslib=helpers.loadedaslib
local getextraluapaths=package.extraluapaths
@@ -16058,7 +17310,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-aux"] = package.loaded["data-aux"] or true
--- original size: 2394, stripped down to: 2005
+-- original size: 2431, stripped down to: 1996
if not modules then modules={} end modules ['data-aux']={
version=1.001,
@@ -16072,8 +17324,8 @@ local type,next=type,next
local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
local resolvers=resolvers
local report_scripts=logs.reporter("resolvers","scripts")
-function resolvers.updatescript(oldname,newname)
- local scriptpath="scripts/context/lua"
+function resolvers.updatescript(oldname,newname)
+ local scriptpath="context/lua"
newname=file.addsuffix(newname,"lua")
local oldscript=resolvers.cleanpath(oldname)
if trace_locating then
@@ -16125,7 +17377,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-tmf"] = package.loaded["data-tmf"] or true
--- original size: 2600, stripped down to: 1627
+-- original size: 2601, stripped down to: 1627
if not modules then modules={} end modules ['data-tmf']={
version=1.001,
@@ -16181,7 +17433,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-lst"] = package.loaded["data-lst"] or true
--- original size: 2654, stripped down to: 2301
+-- original size: 2734, stripped down to: 2354
if not modules then modules={} end modules ['data-lst']={
version=1.001,
@@ -16190,10 +17442,13 @@ if not modules then modules={} end modules ['data-lst']={
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-local find,concat,upper,format=string.find,table.concat,string.upper,string.format
+local rawget,type,next=rawget,type,next
+local find,concat,upper=string.find,table.concat,string.upper
local fastcopy,sortedpairs=table.fastcopy,table.sortedpairs
-resolvers.listers=resolvers.listers or {}
local resolvers=resolvers
+local listers=resolvers.listers or {}
+resolvers.listers=listers
+local resolveprefix=resolvers.resolve
local report_lists=logs.reporter("resolvers","lists")
local function tabstr(str)
if type(str)=='table' then
@@ -16202,7 +17457,7 @@ local function tabstr(str)
return str
end
end
-function resolvers.listers.variables(pattern)
+function listers.variables(pattern)
local instance=resolvers.instance
local environment=instance.environment
local variables=instance.variables
@@ -16223,10 +17478,10 @@ function resolvers.listers.variables(pattern)
for key,value in sortedpairs(configured) do
if key~="" and (pattern=="" or find(upper(key),pattern)) then
report_lists(key)
- report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
- report_lists(" var: %s",tabstr(configured[key]) or "unset")
- report_lists(" exp: %s",tabstr(expansions[key]) or "unset")
- report_lists(" res: %s",tabstr(resolvers.resolve(expansions[key])) or "unset")
+ report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
+ report_lists(" var: %s",tabstr(configured[key]) or "unset")
+ report_lists(" exp: %s",tabstr(expansions[key]) or "unset")
+ report_lists(" res: %s",tabstr(resolveprefix(expansions[key])) or "unset")
end
end
instance.environment=fastcopy(env)
@@ -16234,15 +17489,15 @@ function resolvers.listers.variables(pattern)
instance.expansions=fastcopy(exp)
end
local report_resolved=logs.reporter("system","resolved")
-function resolvers.listers.configurations()
+function listers.configurations()
local configurations=resolvers.instance.specification
for i=1,#configurations do
- report_resolved("file : %s",resolvers.resolve(configurations[i]))
+ report_resolved("file : %s",resolveprefix(configurations[i]))
end
report_resolved("")
local list=resolvers.expandedpathfromlist(resolvers.splitpath(resolvers.luacnfspec))
for i=1,#list do
- local li=resolvers.resolve(list[i])
+ local li=resolveprefix(list[i])
if lfs.isdir(li) then
report_resolved("path - %s",li)
else
@@ -16547,7 +17802,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["luat-fmt"] = package.loaded["luat-fmt"] or true
--- original size: 5951, stripped down to: 4922
+-- original size: 5955, stripped down to: 4926
if not modules then modules={} end modules ['luat-fmt']={
version=1.001,
@@ -16635,7 +17890,7 @@ function environment.make_format(name)
end
local command=format("%s --ini %s --lua=%s %s %sdump",engine,primaryflags(),quoted(usedluastub),quoted(fulltexsourcename),os.platform=="unix" and "\\\\" or "\\")
report_format("running command: %s\n",command)
- os.spawn(command)
+ os.execute(command)
local pattern=file.removesuffix(file.basename(usedluastub)).."-*.mem"
local mp=dir.glob(pattern)
if mp then
@@ -16670,7 +17925,7 @@ function environment.run_format(name,data,more)
else
local command=format("%s %s --fmt=%s --lua=%s %s %s",engine,primaryflags(),quoted(barename),quoted(luaname),quoted(data),more~="" and quoted(more) or "")
report_format("running command: %s",command)
- os.spawn(command)
+ os.execute(command)
end
end
end
@@ -16681,8 +17936,8 @@ end -- of closure
-- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 685064
--- stripped bytes : 242353
+-- original bytes : 745618
+-- stripped bytes : 269191
-- end library merge
@@ -16781,17 +18036,18 @@ local ownlibs = { -- order can be made better
}
+-- c:/data/develop/tex-context/tex/texmf-win64/bin/../../texmf-context/tex/context/base/data-tmf.lua
+-- c:/data/develop/context/sources/data-tmf.lua
+
local ownlist = {
- '.',
- ownpath ,
- ownpath .. "/../sources", -- HH's development path
+ -- '.',
+ -- ownpath ,
+ owntree .. "/../../../../context/sources", -- HH's development path
owntree .. "/../../texmf-local/tex/context/base",
owntree .. "/../../texmf-context/tex/context/base",
- owntree .. "/../../texmf-dist/tex/context/base",
owntree .. "/../../texmf/tex/context/base",
owntree .. "/../../../texmf-local/tex/context/base",
owntree .. "/../../../texmf-context/tex/context/base",
- owntree .. "/../../../texmf-dist/tex/context/base",
owntree .. "/../../../texmf/tex/context/base",
}
@@ -16907,6 +18163,7 @@ local helpinfo = [[
<category name="basic">
<subcategory>
<flag name="script"><short>run an mtx script (lua prefered method) (<ref name="noquotes"/>), no script gives list</short></flag>
+ <flag name="evaluate"><short>run code passed on the commandline (between quotes)</short></flag>
<flag name="execute"><short>run a script or program (texmfstart method) (<ref name="noquotes"/>)</short></flag>
<flag name="resolve"><short>resolve prefixed arguments</short></flag>
<flag name="ctxlua"><short>run internally (using preloaded libs)</short></flag>
@@ -16932,6 +18189,7 @@ local helpinfo = [[
<flag name="verbose"><short>give a bit more info</short></flag>
<flag name="trackers" value="list"><short>enable given trackers</short></flag>
<flag name="progname" value="str"><short>format or backend</short></flag>
+ <flag name="systeminfo" value="str"><short>show current operating system, processor, etc</short></flag>
</subcategory>
<subcategory>
<flag name="edit"><short>launch editor with found file</short></flag>
@@ -17561,6 +18819,39 @@ function runners.associate(filename)
os.launch(filename)
end
+function runners.evaluate(code,filename) -- for Luigi
+ if code == "loop" then
+ while true do
+ io.write("> ")
+ local code = io.read()
+ if code ~= "" then
+ local temp = string.match(code,"^= (.*)$")
+ if temp then
+ code = "print("..temp..")"
+ end
+ local compiled, message = loadstring(code)
+ if type(compiled) ~= "function" then
+ io.write("! " .. (message or code).."\n")
+ else
+ io.write(compiled())
+ end
+ end
+ end
+ else
+ if type(code) ~= "string" or code == "" then
+ code = filename
+ end
+ if code ~= "" then
+ local compiled, message = loadstring(code)
+ if type(compiled) ~= "function" then
+ io.write("invalid lua code: " .. (message or code))
+ return
+ end
+ io.write(compiled())
+ end
+ end
+end
+
function runners.gethelp(filename)
local url = environment.argument("url")
if url and url ~= "" then
@@ -17572,6 +18863,15 @@ function runners.gethelp(filename)
end
end
+function runners.systeminfo()
+ report("architecture : %s",os.platform or "<unset>")
+ report("operating system : %s",os.name or "<unset>")
+ report("file architecture : %s",os.type or "<unset>")
+ report("binary path : %s",os.selfdir or "<unset>")
+ report("binary suffix : %s",os.binsuffix or "<unset>")
+ report("library suffix : %s",os.libsuffix or "<unset>")
+end
+
-- this is a bit dirty ... first we store the first filename and next we
-- split the arguments so that we only see the ones meant for this script
-- ... later we will use the second half
@@ -17687,16 +18987,13 @@ end
if e_argument("ansi") then
- local formatters = string.formatters
+ logs.setformatters("ansi")
- logs.setformatters {
- report_yes = formatters["%-15s | %s"],
- report_nop = formatters["%-15s |"],
- subreport_yes = formatters["%-15s | %s | %s"],
- subreport_nop = formatters["%-15s | %s |"],
- status_yes = formatters["%-15s : %s\n"],
- status_nop = formatters["%-15s :\n"],
- }
+ local script = e_argument("script") or e_argument("scripts")
+
+ if type(script) == "string" then
+ logs.writer("]0;"..script.."") -- for Alan to test
+ end
end
@@ -17715,14 +19012,26 @@ if e_argument("script") or e_argument("scripts") then
ok = runners.execute_ctx_script(filename)
end
+elseif e_argument("evaluate") then
+
+ runners.evaluate(e_argument("evaluate"),filename)
+
elseif e_argument("selfmerge") then
-- embed used libraries
runners.loadbase()
local found = locate_libs()
+
if found then
- utilities.merger.selfmerge(own.name,own.libs,{ found })
+ local mtxrun = resolvers.findfile("mtxrun.lua") -- includes local name
+ if lfs.isfile(mtxrun) then
+ utilities.merger.selfmerge(mtxrun,own.libs,{ found })
+ application.report("runner updated on resolved path: %s",mtxrun)
+ else
+ utilities.merger.selfmerge(own.name,own.libs,{ found })
+ application.report("runner updated on relative path: %s",own.name)
+ end
end
elseif e_argument("selfclean") then
@@ -17730,7 +19039,15 @@ elseif e_argument("selfclean") then
-- remove embedded libraries
runners.loadbase()
- utilities.merger.selfclean(own.name)
+
+ local mtxrun = resolvers.findfile("mtxrun.lua") -- includes local name
+ if lfs.isfile(mtxrun) then
+ utilities.merger.selfclean(mtxrun)
+ application.report("runner cleaned on resolved path: %s",mtxrun)
+ else
+ utilities.merger.selfclean(own.name)
+ application.report("runner cleaned on relative path: %s",own.name)
+ end
elseif e_argument("selfupdate") then
@@ -17972,6 +19289,8 @@ elseif e_argument("version") then
application.version()
+ application.report("source path",environment.ownbin)
+
elseif e_argument("directives") then
directives.show()
@@ -17989,6 +19308,10 @@ elseif e_argument("exporthelp") then
runners.loadbase()
application.export(e_argument("exporthelp"),filename)
+elseif e_argument("systeminfo") then
+
+ runners.systeminfo()
+
elseif e_argument("help") or filename=='help' or filename == "" then
application.help()
diff --git a/scripts/context/stubs/unix/mtxrunjit b/scripts/context/stubs/unix/mtxrunjit
new file mode 100644
index 000000000..117105aa5
--- /dev/null
+++ b/scripts/context/stubs/unix/mtxrunjit
@@ -0,0 +1,5 @@
+#!/bin/sh
+
+luajittex --luaonly $(dirname $0)/mtxrun "$@"
+
+# luajittex --luaonly ${0%jit} "$@"
diff --git a/scripts/context/stubs/unix/pstopdf b/scripts/context/stubs/unix/pstopdf
deleted file mode 100644
index 116f5f4a3..000000000
--- a/scripts/context/stubs/unix/pstopdf
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/bin/sh
-mtxrun --script pstopdf "$@"
diff --git a/scripts/context/stubs/win64/context.exe b/scripts/context/stubs/win64/context.exe
new file mode 100644
index 000000000..93290a6e0
--- /dev/null
+++ b/scripts/context/stubs/win64/context.exe
Binary files differ
diff --git a/scripts/context/stubs/win64/contextjit.exe b/scripts/context/stubs/win64/contextjit.exe
new file mode 100644
index 000000000..93290a6e0
--- /dev/null
+++ b/scripts/context/stubs/win64/contextjit.exe
Binary files differ
diff --git a/scripts/context/stubs/win64/ctxtools.exe b/scripts/context/stubs/win64/ctxtools.exe
new file mode 100644
index 000000000..93290a6e0
--- /dev/null
+++ b/scripts/context/stubs/win64/ctxtools.exe
Binary files differ
diff --git a/scripts/context/stubs/win64/luatools.exe b/scripts/context/stubs/win64/luatools.exe
new file mode 100644
index 000000000..93290a6e0
--- /dev/null
+++ b/scripts/context/stubs/win64/luatools.exe
Binary files differ
diff --git a/scripts/context/stubs/win64/metatex.exe b/scripts/context/stubs/win64/metatex.exe
new file mode 100644
index 000000000..93290a6e0
--- /dev/null
+++ b/scripts/context/stubs/win64/metatex.exe
Binary files differ
diff --git a/scripts/context/stubs/win64/mptopdf.exe b/scripts/context/stubs/win64/mptopdf.exe
new file mode 100644
index 000000000..93290a6e0
--- /dev/null
+++ b/scripts/context/stubs/win64/mptopdf.exe
Binary files differ
diff --git a/scripts/context/stubs/win64/mtxrun.dll b/scripts/context/stubs/win64/mtxrun.dll
new file mode 100644
index 000000000..910502735
--- /dev/null
+++ b/scripts/context/stubs/win64/mtxrun.dll
Binary files differ
diff --git a/scripts/context/stubs/win64/mtxrun.exe b/scripts/context/stubs/win64/mtxrun.exe
new file mode 100644
index 000000000..93290a6e0
--- /dev/null
+++ b/scripts/context/stubs/win64/mtxrun.exe
Binary files differ
diff --git a/scripts/context/stubs/win64/mtxrun.lua b/scripts/context/stubs/win64/mtxrun.lua
new file mode 100644
index 000000000..edfeba8dd
--- /dev/null
+++ b/scripts/context/stubs/win64/mtxrun.lua
@@ -0,0 +1,19363 @@
+#!/usr/bin/env texlua
+
+-- for k, v in next, _G.string do
+-- local tv = type(v)
+-- if tv == "table" then
+-- for kk, vv in next, v do
+-- print(k,kk,vv)
+-- end
+-- else
+-- print(tv,k,v)
+-- end
+-- end
+
+if not modules then modules = { } end modules ['mtxrun'] = {
+ version = 1.001,
+ comment = "runner, lua replacement for texmfstart.rb",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- one can make a stub:
+--
+-- #!/bin/sh
+-- env LUATEXDIR=/....../texmf/scripts/context/lua luatex --luaonly mtxrun.lua "$@"
+
+-- filename : mtxrun.lua
+-- comment : companion to context.tex
+-- author : Hans Hagen, PRAGMA-ADE, Hasselt NL
+-- copyright: PRAGMA ADE / ConTeXt Development Team
+-- license : see context related readme files
+
+-- This script is based on texmfstart.rb but does not use kpsewhich to
+-- locate files. Although kpse is a library it never came to opening up
+-- its interface to other programs (esp scripting languages) and so we
+-- do it ourselves. The lua variant evolved out of an experimental ruby
+-- one. Interesting is that using a scripting language instead of c does
+-- not have a speed penalty. Actually the lua variant is more efficient,
+-- especially when multiple calls to kpsewhich are involved. The lua
+-- library also gives way more control.
+
+-- to be done / considered
+--
+-- support for --exec or make it default
+-- support for jar files (or maybe not, never used, too messy)
+-- support for $RUBYINPUTS cum suis (if still needed)
+-- remember for subruns: _CTX_K_V_#{original}_
+-- remember for subruns: _CTX_K_S_#{original}_
+-- remember for subruns: TEXMFSTART.#{original} [tex.rb texmfstart.rb]
+
+-- begin library merge
+
+
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-lua"] = package.loaded["l-lua"] or true
+
+-- original size: 3888, stripped down to: 2197
+
+if not modules then modules={} end modules ['l-lua']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local major,minor=string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$")
+_MAJORVERSION=tonumber(major) or 5
+_MINORVERSION=tonumber(minor) or 1
+_LUAVERSION=_MAJORVERSION+_MINORVERSION/10
+if not lpeg then
+ lpeg=require("lpeg")
+end
+if loadstring then
+ local loadnormal=load
+ function load(first,...)
+ if type(first)=="string" then
+ return loadstring(first,...)
+ else
+ return loadnormal(first,...)
+ end
+ end
+else
+ loadstring=load
+end
+if not ipairs then
+ local function iterate(a,i)
+ i=i+1
+ local v=a[i]
+ if v~=nil then
+ return i,v
+ end
+ end
+ function ipairs(a)
+ return iterate,a,0
+ end
+end
+if not pairs then
+ function pairs(t)
+ return next,t
+ end
+end
+if not table.unpack then
+ table.unpack=_G.unpack
+elseif not unpack then
+ _G.unpack=table.unpack
+end
+if not package.loaders then
+ package.loaders=package.searchers
+end
+local print,select,tostring=print,select,tostring
+local inspectors={}
+function setinspector(inspector)
+ inspectors[#inspectors+1]=inspector
+end
+function inspect(...)
+ for s=1,select("#",...) do
+ local value=select(s,...)
+ local done=false
+ for i=1,#inspectors do
+ done=inspectors[i](value)
+ if done then
+ break
+ end
+ end
+ if not done then
+ print(tostring(value))
+ end
+ end
+end
+local dummy=function() end
+function optionalrequire(...)
+ local ok,result=xpcall(require,dummy,...)
+ if ok then
+ return result
+ end
+end
+if lua then
+ lua.mask=load([[τεχ = 1]]) and "utf" or "ascii"
+end
+local flush=io.flush
+if flush then
+ local execute=os.execute if execute then function os.execute(...) flush() return execute(...) end end
+ local exec=os.exec if exec then function os.exec (...) flush() return exec (...) end end
+ local spawn=os.spawn if spawn then function os.spawn (...) flush() return spawn (...) end end
+ local popen=io.popen if popen then function io.popen (...) flush() return popen (...) end end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-package"] = package.loaded["l-package"] or true
+
+-- original size: 10587, stripped down to: 7815
+
+if not modules then modules={} end modules ['l-package']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type=type
+local gsub,format,find=string.gsub,string.format,string.find
+local P,S,Cs,lpegmatch=lpeg.P,lpeg.S,lpeg.Cs,lpeg.match
+local package=package
+local searchers=package.searchers or package.loaders
+local filejoin=file and file.join or function(path,name) return path.."/"..name end
+local isreadable=file and file.is_readable or function(name) local f=io.open(name) if f then f:close() return true end end
+local addsuffix=file and file.addsuffix or function(name,suffix) return name.."."..suffix end
+local function cleanpath(path)
+ return path
+end
+local pattern=Cs((((1-S("\\/"))^0*(S("\\/")^1/"/"))^0*(P(".")^1/"/"+P(1))^1)*-1)
+local function lualibfile(name)
+ return lpegmatch(pattern,name) or name
+end
+local offset=luarocks and 1 or 0
+local helpers=package.helpers or {
+ cleanpath=cleanpath,
+ lualibfile=lualibfile,
+ trace=false,
+ report=function(...) print(format(...)) end,
+ builtin={
+ ["preload table"]=searchers[1+offset],
+ ["path specification"]=searchers[2+offset],
+ ["cpath specification"]=searchers[3+offset],
+ ["all in one fallback"]=searchers[4+offset],
+ },
+ methods={},
+ sequence={
+ "already loaded",
+ "preload table",
+ "qualified path",
+ "lua extra list",
+ "lib extra list",
+ "path specification",
+ "cpath specification",
+ "all in one fallback",
+ "not loaded",
+ }
+}
+package.helpers=helpers
+local methods=helpers.methods
+local builtin=helpers.builtin
+local extraluapaths={}
+local extralibpaths={}
+local luapaths=nil
+local libpaths=nil
+local oldluapath=nil
+local oldlibpath=nil
+local nofextralua=-1
+local nofextralib=-1
+local nofpathlua=-1
+local nofpathlib=-1
+local function listpaths(what,paths)
+ local nofpaths=#paths
+ if nofpaths>0 then
+ for i=1,nofpaths do
+ helpers.report("using %s path %i: %s",what,i,paths[i])
+ end
+ else
+ helpers.report("no %s paths defined",what)
+ end
+ return nofpaths
+end
+local function getextraluapaths()
+ if helpers.trace and #extraluapaths~=nofextralua then
+ nofextralua=listpaths("extra lua",extraluapaths)
+ end
+ return extraluapaths
+end
+local function getextralibpaths()
+ if helpers.trace and #extralibpaths~=nofextralib then
+ nofextralib=listpaths("extra lib",extralibpaths)
+ end
+ return extralibpaths
+end
+local function getluapaths()
+ local luapath=package.path or ""
+ if oldluapath~=luapath then
+ luapaths=file.splitpath(luapath,";")
+ oldluapath=luapath
+ nofpathlua=-1
+ end
+ if helpers.trace and #luapaths~=nofpathlua then
+ nofpathlua=listpaths("builtin lua",luapaths)
+ end
+ return luapaths
+end
+local function getlibpaths()
+ local libpath=package.cpath or ""
+ if oldlibpath~=libpath then
+ libpaths=file.splitpath(libpath,";")
+ oldlibpath=libpath
+ nofpathlib=-1
+ end
+ if helpers.trace and #libpaths~=nofpathlib then
+ nofpathlib=listpaths("builtin lib",libpaths)
+ end
+ return libpaths
+end
+package.luapaths=getluapaths
+package.libpaths=getlibpaths
+package.extraluapaths=getextraluapaths
+package.extralibpaths=getextralibpaths
+local hashes={
+ lua={},
+ lib={},
+}
+local function registerpath(tag,what,target,...)
+ local pathlist={... }
+ local cleanpath=helpers.cleanpath
+ local trace=helpers.trace
+ local report=helpers.report
+ local hash=hashes[what]
+ local function add(path)
+ local path=cleanpath(path)
+ if not hash[path] then
+ target[#target+1]=path
+ hash[path]=true
+ if trace then
+ report("registered %s path %s: %s",tag,#target,path)
+ end
+ else
+ if trace then
+ report("duplicate %s path: %s",tag,path)
+ end
+ end
+ end
+ for p=1,#pathlist do
+ local path=pathlist[p]
+ if type(path)=="table" then
+ for i=1,#path do
+ add(path[i])
+ end
+ else
+ add(path)
+ end
+ end
+ return paths
+end
+helpers.registerpath=registerpath
+function package.extraluapath(...)
+ registerpath("extra lua","lua",extraluapaths,...)
+end
+function package.extralibpath(...)
+ registerpath("extra lib","lib",extralibpaths,...)
+end
+local function loadedaslib(resolved,rawname)
+ local base=gsub(rawname,"%.","_")
+ local init="luaopen_"..gsub(base,"%.","_")
+ if helpers.trace then
+ helpers.report("calling loadlib with '%s' with init '%s'",resolved,init)
+ end
+ return package.loadlib(resolved,init)
+end
+helpers.loadedaslib=loadedaslib
+local function loadedbypath(name,rawname,paths,islib,what)
+ local trace=helpers.trace
+ for p=1,#paths do
+ local path=paths[p]
+ local resolved=filejoin(path,name)
+ if trace then
+ helpers.report("%s path, identifying '%s' on '%s'",what,name,path)
+ end
+ if isreadable(resolved) then
+ if trace then
+ helpers.report("%s path, '%s' found on '%s'",what,name,resolved)
+ end
+ if islib then
+ return loadedaslib(resolved,rawname)
+ else
+ return loadfile(resolved)
+ end
+ end
+ end
+end
+helpers.loadedbypath=loadedbypath
+local function loadedbyname(name,rawname)
+ if find(name,"^/") or find(name,"^[a-zA-Z]:/") then
+ local trace=helpers.trace
+ if trace then
+ helpers.report("qualified name, identifying '%s'",what,name)
+ end
+ if isreadable(name) then
+ if trace then
+ helpers.report("qualified name, '%s' found",what,name)
+ end
+ return loadfile(name)
+ end
+ end
+end
+helpers.loadedbyname=loadedbyname
+methods["already loaded"]=function(name)
+ return package.loaded[name]
+end
+methods["preload table"]=function(name)
+ return builtin["preload table"](name)
+end
+methods["qualified path"]=function(name)
+ return loadedbyname(addsuffix(lualibfile(name),"lua"),name)
+end
+methods["lua extra list"]=function(name)
+ return loadedbypath(addsuffix(lualibfile(name),"lua"),name,getextraluapaths(),false,"lua")
+end
+methods["lib extra list"]=function(name)
+ return loadedbypath(addsuffix(lualibfile(name),os.libsuffix),name,getextralibpaths(),true,"lib")
+end
+methods["path specification"]=function(name)
+ getluapaths()
+ return builtin["path specification"](name)
+end
+methods["cpath specification"]=function(name)
+ getlibpaths()
+ return builtin["cpath specification"](name)
+end
+methods["all in one fallback"]=function(name)
+ return builtin["all in one fallback"](name)
+end
+methods["not loaded"]=function(name)
+ if helpers.trace then
+ helpers.report("unable to locate '%s'",name or "?")
+ end
+ return nil
+end
+local level=0
+local used={}
+helpers.traceused=false
+function helpers.loaded(name)
+ local sequence=helpers.sequence
+ level=level+1
+ for i=1,#sequence do
+ local method=sequence[i]
+ if helpers.trace then
+ helpers.report("%s, level '%s', method '%s', name '%s'","locating",level,method,name)
+ end
+ local result,rest=methods[method](name)
+ if type(result)=="function" then
+ if helpers.trace then
+ helpers.report("%s, level '%s', method '%s', name '%s'","found",level,method,name)
+ end
+ if helpers.traceused then
+ used[#used+1]={ level=level,name=name }
+ end
+ level=level-1
+ return result,rest
+ end
+ end
+ level=level-1
+ return nil
+end
+function helpers.showused()
+ local n=#used
+ if n>0 then
+ helpers.report("%s libraries loaded:",n)
+ helpers.report()
+ for i=1,n do
+ local u=used[i]
+ helpers.report("%i %a",u.level,u.name)
+ end
+ helpers.report()
+ end
+end
+function helpers.unload(name)
+ if helpers.trace then
+ if package.loaded[name] then
+ helpers.report("unloading, name '%s', %s",name,"done")
+ else
+ helpers.report("unloading, name '%s', %s",name,"not loaded")
+ end
+ end
+ package.loaded[name]=nil
+end
+table.insert(searchers,1,helpers.loaded)
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-lpeg"] = package.loaded["l-lpeg"] or true
+
+-- original size: 36977, stripped down to: 20349
+
+if not modules then modules={} end modules ['l-lpeg']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+lpeg=require("lpeg")
+if not lpeg.print then function lpeg.print(...) print(lpeg.pcode(...)) end end
+local type,next,tostring=type,next,tostring
+local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.format
+local floor=math.floor
+local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt
+local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print
+if setinspector then
+ setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+end
+lpeg.patterns=lpeg.patterns or {}
+local patterns=lpeg.patterns
+local anything=P(1)
+local endofstring=P(-1)
+local alwaysmatched=P(true)
+patterns.anything=anything
+patterns.endofstring=endofstring
+patterns.beginofstring=alwaysmatched
+patterns.alwaysmatched=alwaysmatched
+local sign=S('+-')
+local zero=P('0')
+local digit=R('09')
+local octdigit=R("07")
+local lowercase=R("az")
+local uppercase=R("AZ")
+local underscore=P("_")
+local hexdigit=digit+lowercase+uppercase
+local cr,lf,crlf=P("\r"),P("\n"),P("\r\n")
+local newline=P("\r")*(P("\n")+P(true))+P("\n")
+local escaped=P("\\")*anything
+local squote=P("'")
+local dquote=P('"')
+local space=P(" ")
+local period=P(".")
+local comma=P(",")
+local utfbom_32_be=P('\000\000\254\255')
+local utfbom_32_le=P('\255\254\000\000')
+local utfbom_16_be=P('\254\255')
+local utfbom_16_le=P('\255\254')
+local utfbom_8=P('\239\187\191')
+local utfbom=utfbom_32_be+utfbom_32_le+utfbom_16_be+utfbom_16_le+utfbom_8
+local utftype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")+alwaysmatched*Cc("utf-8")
+local utfstricttype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")
+local utfoffset=utfbom_32_be*Cc(4)+utfbom_32_le*Cc(4)+utfbom_16_be*Cc(2)+utfbom_16_le*Cc(2)+utfbom_8*Cc(3)+Cc(0)
+local utf8next=R("\128\191")
+patterns.utfbom_32_be=utfbom_32_be
+patterns.utfbom_32_le=utfbom_32_le
+patterns.utfbom_16_be=utfbom_16_be
+patterns.utfbom_16_le=utfbom_16_le
+patterns.utfbom_8=utfbom_8
+patterns.utf_16_be_nl=P("\000\r\000\n")+P("\000\r")+P("\000\n")
+patterns.utf_16_le_nl=P("\r\000\n\000")+P("\r\000")+P("\n\000")
+patterns.utf_32_be_nl=P("\000\000\000\r\000\000\000\n")+P("\000\000\000\r")+P("\000\000\000\n")
+patterns.utf_32_le_nl=P("\r\000\000\000\n\000\000\000")+P("\r\000\000\000")+P("\n\000\000\000")
+patterns.utf8one=R("\000\127")
+patterns.utf8two=R("\194\223")*utf8next
+patterns.utf8three=R("\224\239")*utf8next*utf8next
+patterns.utf8four=R("\240\244")*utf8next*utf8next*utf8next
+patterns.utfbom=utfbom
+patterns.utftype=utftype
+patterns.utfstricttype=utfstricttype
+patterns.utfoffset=utfoffset
+local utf8char=patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four
+local validutf8char=utf8char^0*endofstring*Cc(true)+Cc(false)
+local utf8character=P(1)*R("\128\191")^0
+patterns.utf8=utf8char
+patterns.utf8char=utf8char
+patterns.utf8character=utf8character
+patterns.validutf8=validutf8char
+patterns.validutf8char=validutf8char
+local eol=S("\n\r")
+local spacer=S(" \t\f\v")
+local whitespace=eol+spacer
+local nonspacer=1-spacer
+local nonwhitespace=1-whitespace
+patterns.eol=eol
+patterns.spacer=spacer
+patterns.whitespace=whitespace
+patterns.nonspacer=nonspacer
+patterns.nonwhitespace=nonwhitespace
+local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
+local fullstripper=whitespace^0*C((whitespace^0*nonwhitespace^1)^0)
+local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0))
+local b_collapser=Cs(whitespace^0/""*(nonwhitespace^1+whitespace^1/" ")^0)
+local e_collapser=Cs((whitespace^1*P(-1)/""+nonwhitespace^1+whitespace^1/" ")^0)
+local m_collapser=Cs((nonwhitespace^1+whitespace^1/" ")^0)
+local b_stripper=Cs(spacer^0/""*(nonspacer^1+spacer^1/" ")^0)
+local e_stripper=Cs((spacer^1*P(-1)/""+nonspacer^1+spacer^1/" ")^0)
+local m_stripper=Cs((nonspacer^1+spacer^1/" ")^0)
+patterns.stripper=stripper
+patterns.fullstripper=fullstripper
+patterns.collapser=collapser
+patterns.b_collapser=b_collapser
+patterns.m_collapser=m_collapser
+patterns.e_collapser=e_collapser
+patterns.b_stripper=b_stripper
+patterns.m_stripper=m_stripper
+patterns.e_stripper=e_stripper
+patterns.lowercase=lowercase
+patterns.uppercase=uppercase
+patterns.letter=patterns.lowercase+patterns.uppercase
+patterns.space=space
+patterns.tab=P("\t")
+patterns.spaceortab=patterns.space+patterns.tab
+patterns.newline=newline
+patterns.emptyline=newline^1
+patterns.equal=P("=")
+patterns.comma=comma
+patterns.commaspacer=comma*spacer^0
+patterns.period=period
+patterns.colon=P(":")
+patterns.semicolon=P(";")
+patterns.underscore=underscore
+patterns.escaped=escaped
+patterns.squote=squote
+patterns.dquote=dquote
+patterns.nosquote=(escaped+(1-squote))^0
+patterns.nodquote=(escaped+(1-dquote))^0
+patterns.unsingle=(squote/"")*patterns.nosquote*(squote/"")
+patterns.undouble=(dquote/"")*patterns.nodquote*(dquote/"")
+patterns.unquoted=patterns.undouble+patterns.unsingle
+patterns.unspacer=((patterns.spacer^1)/"")^0
+patterns.singlequoted=squote*patterns.nosquote*squote
+patterns.doublequoted=dquote*patterns.nodquote*dquote
+patterns.quoted=patterns.doublequoted+patterns.singlequoted
+patterns.digit=digit
+patterns.octdigit=octdigit
+patterns.hexdigit=hexdigit
+patterns.sign=sign
+patterns.cardinal=digit^1
+patterns.integer=sign^-1*digit^1
+patterns.unsigned=digit^0*period*digit^1
+patterns.float=sign^-1*patterns.unsigned
+patterns.cunsigned=digit^0*comma*digit^1
+patterns.cpunsigned=digit^0*(period+comma)*digit^1
+patterns.cfloat=sign^-1*patterns.cunsigned
+patterns.cpfloat=sign^-1*patterns.cpunsigned
+patterns.number=patterns.float+patterns.integer
+patterns.cnumber=patterns.cfloat+patterns.integer
+patterns.cpnumber=patterns.cpfloat+patterns.integer
+patterns.oct=zero*octdigit^1
+patterns.octal=patterns.oct
+patterns.HEX=zero*P("X")*(digit+uppercase)^1
+patterns.hex=zero*P("x")*(digit+lowercase)^1
+patterns.hexadecimal=zero*S("xX")*hexdigit^1
+patterns.hexafloat=sign^-1*zero*S("xX")*(hexdigit^0*period*hexdigit^1+hexdigit^1*period*hexdigit^0+hexdigit^1)*(S("pP")*sign^-1*hexdigit^1)^-1
+patterns.decafloat=sign^-1*(digit^0*period*digit^1+digit^1*period*digit^0+digit^1)*S("eE")*sign^-1*digit^1
+patterns.propername=(uppercase+lowercase+underscore)*(uppercase+lowercase+underscore+digit)^0*endofstring
+patterns.somecontent=(anything-newline-space)^1
+patterns.beginline=#(1-newline)
+patterns.longtostring=Cs(whitespace^0/""*((patterns.quoted+nonwhitespace^1+whitespace^1/""*(P(-1)+Cc(" ")))^0))
+local function anywhere(pattern)
+ return P { P(pattern)+1*V(1) }
+end
+lpeg.anywhere=anywhere
+function lpeg.instringchecker(p)
+ p=anywhere(p)
+ return function(str)
+ return lpegmatch(p,str) and true or false
+ end
+end
+function lpeg.splitter(pattern,action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+function lpeg.tsplitter(pattern,action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+local splitters_s,splitters_m,splitters_t={},{},{}
+local function splitat(separator,single)
+ local splitter=(single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator=P(separator)
+ local other=C((1-separator)^0)
+ if single then
+ local any=anything
+ splitter=other*(separator*C(any^0)+"")
+ splitters_s[separator]=splitter
+ else
+ splitter=other*(separator*other)^0
+ splitters_m[separator]=splitter
+ end
+ end
+ return splitter
+end
+local function tsplitat(separator)
+ local splitter=splitters_t[separator]
+ if not splitter then
+ splitter=Ct(splitat(separator))
+ splitters_t[separator]=splitter
+ end
+ return splitter
+end
+lpeg.splitat=splitat
+lpeg.tsplitat=tsplitat
+function string.splitup(str,separator)
+ if not separator then
+ separator=","
+ end
+ return lpegmatch(splitters_m[separator] or splitat(separator),str)
+end
+local cache={}
+function lpeg.split(separator,str)
+ local c=cache[separator]
+ if not c then
+ c=tsplitat(separator)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+function string.split(str,separator)
+ if separator then
+ local c=cache[separator]
+ if not c then
+ c=tsplitat(separator)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+ else
+ return { str }
+ end
+end
+local spacing=patterns.spacer^0*newline
+local empty=spacing*Cc("")
+local nonempty=Cs((1-spacing)^1)*spacing^-1
+local content=(empty+nonempty)^1
+patterns.textline=content
+local linesplitter=tsplitat(newline)
+patterns.linesplitter=linesplitter
+function string.splitlines(str)
+ return lpegmatch(linesplitter,str)
+end
+local cache={}
+function lpeg.checkedsplit(separator,str)
+ local c=cache[separator]
+ if not c then
+ separator=P(separator)
+ local other=C((1-separator)^1)
+ c=Ct(separator^0*other*(separator^1*other)^0)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+function string.checkedsplit(str,separator)
+ local c=cache[separator]
+ if not c then
+ separator=P(separator)
+ local other=C((1-separator)^1)
+ c=Ct(separator^0*other*(separator^1*other)^0)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+local function f2(s) local c1,c2=byte(s,1,2) return c1*64+c2-12416 end
+local function f3(s) local c1,c2,c3=byte(s,1,3) return (c1*64+c2)*64+c3-925824 end
+local function f4(s) local c1,c2,c3,c4=byte(s,1,4) return ((c1*64+c2)*64+c3)*64+c4-63447168 end
+local utf8byte=patterns.utf8one/byte+patterns.utf8two/f2+patterns.utf8three/f3+patterns.utf8four/f4
+patterns.utf8byte=utf8byte
+local cache={}
+function lpeg.stripper(str)
+ if type(str)=="string" then
+ local s=cache[str]
+ if not s then
+ s=Cs(((S(str)^1)/""+1)^0)
+ cache[str]=s
+ end
+ return s
+ else
+ return Cs(((str^1)/""+1)^0)
+ end
+end
+local cache={}
+function lpeg.keeper(str)
+ if type(str)=="string" then
+ local s=cache[str]
+ if not s then
+ s=Cs((((1-S(str))^1)/""+1)^0)
+ cache[str]=s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/""+1)^0)
+ end
+end
+function lpeg.frontstripper(str)
+ return (P(str)+P(true))*Cs(anything^0)
+end
+function lpeg.endstripper(str)
+ return Cs((1-P(str)*endofstring)^0)
+end
+function lpeg.replacer(one,two,makefunction,isutf)
+ local pattern
+ local u=isutf and utf8char or 1
+ if type(one)=="table" then
+ local no=#one
+ local p=P(false)
+ if no==0 then
+ for k,v in next,one do
+ p=p+P(k)/v
+ end
+ pattern=Cs((p+u)^0)
+ elseif no==1 then
+ local o=one[1]
+ one,two=P(o[1]),o[2]
+ pattern=Cs((one/two+u)^0)
+ else
+ for i=1,no do
+ local o=one[i]
+ p=p+P(o[1])/o[2]
+ end
+ pattern=Cs((p+u)^0)
+ end
+ else
+ pattern=Cs((P(one)/(two or "")+u)^0)
+ end
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
+ end
+end
+function lpeg.finder(lst,makefunction,isutf)
+ local pattern
+ if type(lst)=="table" then
+ pattern=P(false)
+ if #lst==0 then
+ for k,v in next,lst do
+ pattern=pattern+P(k)
+ end
+ else
+ for i=1,#lst do
+ pattern=pattern+P(lst[i])
+ end
+ end
+ else
+ pattern=P(lst)
+ end
+ if isutf then
+ pattern=((utf8char or 1)-pattern)^0*pattern
+ else
+ pattern=(1-pattern)^0*pattern
+ end
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
+ end
+end
+local splitters_f,splitters_s={},{}
+function lpeg.firstofsplit(separator)
+ local splitter=splitters_f[separator]
+ if not splitter then
+ local pattern=P(separator)
+ splitter=C((1-pattern)^0)
+ splitters_f[separator]=splitter
+ end
+ return splitter
+end
+function lpeg.secondofsplit(separator)
+ local splitter=splitters_s[separator]
+ if not splitter then
+ local pattern=P(separator)
+ splitter=(1-pattern)^0*pattern*C(anything^0)
+ splitters_s[separator]=splitter
+ end
+ return splitter
+end
+local splitters_s,splitters_p={},{}
+function lpeg.beforesuffix(separator)
+ local splitter=splitters_s[separator]
+ if not splitter then
+ local pattern=P(separator)
+ splitter=C((1-pattern)^0)*pattern*endofstring
+ splitters_s[separator]=splitter
+ end
+ return splitter
+end
+function lpeg.afterprefix(separator)
+ local splitter=splitters_p[separator]
+ if not splitter then
+ local pattern=P(separator)
+ splitter=pattern*C(anything^0)
+ splitters_p[separator]=splitter
+ end
+ return splitter
+end
+function lpeg.balancer(left,right)
+ left,right=P(left),P(right)
+ return P { left*((1-left-right)+V(1))^0*right }
+end
+local nany=utf8char/""
+function lpeg.counter(pattern)
+ pattern=Cs((P(pattern)/" "+nany)^0)
+ return function(str)
+ return #lpegmatch(pattern,str)
+ end
+end
+utf=utf or (unicode and unicode.utf8) or {}
+local utfcharacters=utf and utf.characters or string.utfcharacters
+local utfgmatch=utf and utf.gmatch
+local utfchar=utf and utf.char
+lpeg.UP=lpeg.P
+if utfcharacters then
+ function lpeg.US(str)
+ local p=P(false)
+ for uc in utfcharacters(str) do
+ p=p+P(uc)
+ end
+ return p
+ end
+elseif utfgmatch then
+ function lpeg.US(str)
+ local p=P(false)
+ for uc in utfgmatch(str,".") do
+ p=p+P(uc)
+ end
+ return p
+ end
+else
+ function lpeg.US(str)
+ local p=P(false)
+ local f=function(uc)
+ p=p+P(uc)
+ end
+ lpegmatch((utf8char/f)^0,str)
+ return p
+ end
+end
+local range=utf8byte*utf8byte+Cc(false)
+function lpeg.UR(str,more)
+ local first,last
+ if type(str)=="number" then
+ first=str
+ last=more or first
+ else
+ first,last=lpegmatch(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first==last then
+ return P(str)
+ elseif utfchar and (last-first<8) then
+ local p=P(false)
+ for i=first,last do
+ p=p+P(utfchar(i))
+ end
+ return p
+ else
+ local f=function(b)
+ return b>=first and b<=last
+ end
+ return utf8byte/f
+ end
+end
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p)=="pattern"
+end
+function lpeg.oneof(list,...)
+ if type(list)~="table" then
+ list={ list,... }
+ end
+ local p=P(list[1])
+ for l=2,#list do
+ p=p+P(list[l])
+ end
+ return p
+end
+local sort=table.sort
+local function copyindexed(old)
+ local new={}
+ for i=1,#old do
+ new[i]=old
+ end
+ return new
+end
+local function sortedkeys(tab)
+ local keys,s={},0
+ for key,_ in next,tab do
+ s=s+1
+ keys[s]=key
+ end
+ sort(keys)
+ return keys
+end
+function lpeg.append(list,pp,delayed,checked)
+ local p=pp
+ if #list>0 then
+ local keys=copyindexed(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k=keys[i]
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ elseif delayed then
+ local keys=sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k=keys[i]
+ local v=list[k]
+ p=P(k)/list+p
+ end
+ else
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ if p then
+ p=p/list
+ end
+ end
+ elseif checked then
+ local keys=sortedkeys(list)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ if k==v then
+ p=P(k)+p
+ else
+ p=P(k)/v+p
+ end
+ else
+ if k==v then
+ p=P(k)
+ else
+ p=P(k)/v
+ end
+ end
+ end
+ else
+ local keys=sortedkeys(list)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ p=P(k)/v+p
+ else
+ p=P(k)/v
+ end
+ end
+ end
+ return p
+end
+local p_false=P(false)
+local p_true=P(true)
+local function make(t)
+ local function making(t)
+ local p=p_false
+ local keys=sortedkeys(t)
+ for i=1,#keys do
+ local k=keys[i]
+ if k~="" then
+ local v=t[k]
+ if v==true then
+ p=p+P(k)*p_true
+ elseif v==false then
+ else
+ p=p+P(k)*making(v)
+ end
+ end
+ end
+ if t[""] then
+ p=p+p_true
+ end
+ return p
+ end
+ local p=p_false
+ local keys=sortedkeys(t)
+ for i=1,#keys do
+ local k=keys[i]
+ if k~="" then
+ local v=t[k]
+ if v==true then
+ p=p+P(k)*p_true
+ elseif v==false then
+ else
+ p=p+P(k)*making(v)
+ end
+ end
+ end
+ return p
+end
+local function collapse(t,x)
+ if type(t)~="table" then
+ return t,x
+ else
+ local n=next(t)
+ if n==nil then
+ return t,x
+ elseif next(t,n)==nil then
+ local k=n
+ local v=t[k]
+ if type(v)=="table" then
+ return collapse(v,x..k)
+ else
+ return v,x..k
+ end
+ else
+ local tt={}
+ for k,v in next,t do
+ local vv,kk=collapse(v,k)
+ tt[kk]=vv
+ end
+ return tt,x
+ end
+ end
+end
+function lpeg.utfchartabletopattern(list)
+ local tree={}
+ local n=#list
+ if n==0 then
+ for s in next,list do
+ local t=tree
+ local p,pk
+ for c in gmatch(s,".") do
+ if t==true then
+ t={ [c]=true,[""]=true }
+ p[pk]=t
+ p=t
+ t=false
+ elseif t==false then
+ t={ [c]=false }
+ p[pk]=t
+ p=t
+ t=false
+ else
+ local tc=t[c]
+ if not tc then
+ tc=false
+ t[c]=false
+ end
+ p=t
+ t=tc
+ end
+ pk=c
+ end
+ if t==false then
+ p[pk]=true
+ elseif t==true then
+ else
+ t[""]=true
+ end
+ end
+ else
+ for i=1,n do
+ local s=list[i]
+ local t=tree
+ local p,pk
+ for c in gmatch(s,".") do
+ if t==true then
+ t={ [c]=true,[""]=true }
+ p[pk]=t
+ p=t
+ t=false
+ elseif t==false then
+ t={ [c]=false }
+ p[pk]=t
+ p=t
+ t=false
+ else
+ local tc=t[c]
+ if not tc then
+ tc=false
+ t[c]=false
+ end
+ p=t
+ t=tc
+ end
+ pk=c
+ end
+ if t==false then
+ p[pk]=true
+ elseif t==true then
+ else
+ t[""]=true
+ end
+ end
+ end
+ return make(tree)
+end
+patterns.containseol=lpeg.finder(eol)
+local function nextstep(n,step,result)
+ local m=n%step
+ local d=floor(n/step)
+ if d>0 then
+ local v=V(tostring(step))
+ local s=result.start
+ for i=1,d do
+ if s then
+ s=v*s
+ else
+ s=v
+ end
+ end
+ result.start=s
+ end
+ if step>1 and result.start then
+ local v=V(tostring(step/2))
+ result[tostring(step)]=v*v
+ end
+ if step>0 then
+ return nextstep(m,step/2,result)
+ else
+ return result
+ end
+end
+function lpeg.times(pattern,n)
+ return P(nextstep(n,2^16,{ "start",["1"]=pattern }))
+end
+local trailingzeros=zero^0*-digit
+local case_1=period*trailingzeros/""
+local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"")
+local number=digit^1*(case_1+case_2)
+local stripper=Cs((number+1)^0)
+lpeg.patterns.stripzeros=stripper
+local byte_to_HEX={}
+local byte_to_hex={}
+local byte_to_dec={}
+local hex_to_byte={}
+for i=0,255 do
+ local H=format("%02X",i)
+ local h=format("%02x",i)
+ local d=format("%03i",i)
+ local c=char(i)
+ byte_to_HEX[c]=H
+ byte_to_hex[c]=h
+ byte_to_dec[c]=d
+ hex_to_byte[h]=c
+ hex_to_byte[H]=c
+end
+local hextobyte=P(2)/hex_to_byte
+local bytetoHEX=P(1)/byte_to_HEX
+local bytetohex=P(1)/byte_to_hex
+local bytetodec=P(1)/byte_to_dec
+local hextobytes=Cs(hextobyte^0)
+local bytestoHEX=Cs(bytetoHEX^0)
+local bytestohex=Cs(bytetohex^0)
+local bytestodec=Cs(bytetodec^0)
+patterns.hextobyte=hextobyte
+patterns.bytetoHEX=bytetoHEX
+patterns.bytetohex=bytetohex
+patterns.bytetodec=bytetodec
+patterns.hextobytes=hextobytes
+patterns.bytestoHEX=bytestoHEX
+patterns.bytestohex=bytestohex
+patterns.bytestodec=bytestodec
+function string.toHEX(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(bytestoHEX,s)
+ end
+end
+function string.tohex(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(bytestohex,s)
+ end
+end
+function string.todec(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(bytestodec,s)
+ end
+end
+function string.tobytes(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(hextobytes,s)
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-function"] = package.loaded["l-function"] or true
+
+-- original size: 361, stripped down to: 322
+
+if not modules then modules={} end modules ['l-functions']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+functions=functions or {}
+function functions.dummy() end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-string"] = package.loaded["l-string"] or true
+
+-- original size: 5694, stripped down to: 2827
+
+if not modules then modules={} end modules ['l-string']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local string=string
+local sub,gmatch,format,char,byte,rep,lower=string.sub,string.gmatch,string.format,string.char,string.byte,string.rep,string.lower
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local P,S,C,Ct,Cc,Cs=lpeg.P,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.Cs
+local unquoted=patterns.squote*C(patterns.nosquote)*patterns.squote+patterns.dquote*C(patterns.nodquote)*patterns.dquote
+function string.unquoted(str)
+ return lpegmatch(unquoted,str) or str
+end
+function string.quoted(str)
+ return format("%q",str)
+end
+function string.count(str,pattern)
+ local n=0
+ for _ in gmatch(str,pattern) do
+ n=n+1
+ end
+ return n
+end
+function string.limit(str,n,sentinel)
+ if #str>n then
+ sentinel=sentinel or "..."
+ return sub(str,1,(n-#sentinel))..sentinel
+ else
+ return str
+ end
+end
+local stripper=patterns.stripper
+local fullstripper=patterns.fullstripper
+local collapser=patterns.collapser
+local longtostring=patterns.longtostring
+function string.strip(str)
+ return lpegmatch(stripper,str) or ""
+end
+function string.fullstrip(str)
+ return lpegmatch(fullstripper,str) or ""
+end
+function string.collapsespaces(str)
+ return lpegmatch(collapser,str) or ""
+end
+function string.longtostring(str)
+ return lpegmatch(longtostring,str) or ""
+end
+local pattern=P(" ")^0*P(-1)
+function string.is_empty(str)
+ if str=="" then
+ return true
+ else
+ return lpegmatch(pattern,str) and true or false
+ end
+end
+local anything=patterns.anything
+local allescapes=Cc("%")*S(".-+%?()[]*")
+local someescapes=Cc("%")*S(".-+%()[]")
+local matchescapes=Cc(".")*S("*?")
+local pattern_a=Cs ((allescapes+anything )^0 )
+local pattern_b=Cs ((someescapes+matchescapes+anything )^0 )
+local pattern_c=Cs (Cc("^")*(someescapes+matchescapes+anything )^0*Cc("$") )
+function string.escapedpattern(str,simple)
+ return lpegmatch(simple and pattern_b or pattern_a,str)
+end
+function string.topattern(str,lowercase,strict)
+ if str=="" or type(str)~="string" then
+ return ".*"
+ elseif strict then
+ str=lpegmatch(pattern_c,str)
+ else
+ str=lpegmatch(pattern_b,str)
+ end
+ if lowercase then
+ return lower(str)
+ else
+ return str
+ end
+end
+function string.valid(str,default)
+ return (type(str)=="string" and str~="" and str) or default or nil
+end
+string.itself=function(s) return s end
+local pattern=Ct(C(1)^0)
+function string.totable(str)
+ return lpegmatch(pattern,str)
+end
+local replacer=lpeg.replacer("@","%%")
+function string.tformat(fmt,...)
+ return format(lpegmatch(replacer,fmt),...)
+end
+string.quote=string.quoted
+string.unquote=string.unquoted
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-table"] = package.loaded["l-table"] or true
+
+-- original size: 35724, stripped down to: 21525
+
+if not modules then modules={} end modules ['l-table']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,next,tostring,tonumber,ipairs,select=type,next,tostring,tonumber,ipairs,select
+local table,string=table,string
+local concat,sort,insert,remove=table.concat,table.sort,table.insert,table.remove
+local format,lower,dump=string.format,string.lower,string.dump
+local getmetatable,setmetatable=getmetatable,setmetatable
+local getinfo=debug.getinfo
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local floor=math.floor
+local stripper=patterns.stripper
+function table.strip(tab)
+ local lst,l={},0
+ for i=1,#tab do
+ local s=lpegmatch(stripper,tab[i]) or ""
+ if s=="" then
+ else
+ l=l+1
+ lst[l]=s
+ end
+ end
+ return lst
+end
+function table.keys(t)
+ if t then
+ local keys,k={},0
+ for key in next,t do
+ k=k+1
+ keys[k]=key
+ end
+ return keys
+ else
+ return {}
+ end
+end
+local function compare(a,b)
+ local ta=type(a)
+ if ta=="number" then
+ local tb=type(b)
+ if ta==tb then
+ return a<b
+ elseif tb=="string" then
+ return tostring(a)<b
+ end
+ elseif ta=="string" then
+ local tb=type(b)
+ if ta==tb then
+ return a<b
+ else
+ return a<tostring(b)
+ end
+ end
+ return tostring(a)<tostring(b)
+end
+local function sortedkeys(tab)
+ if tab then
+ local srt,category,s={},0,0
+ for key in next,tab do
+ s=s+1
+ srt[s]=key
+ if category==3 then
+ elseif category==1 then
+ if type(key)~="string" then
+ category=3
+ end
+ elseif category==2 then
+ if type(key)~="number" then
+ category=3
+ end
+ else
+ local tkey=type(key)
+ if tkey=="string" then
+ category=1
+ elseif tkey=="number" then
+ category=2
+ else
+ category=3
+ end
+ end
+ end
+ if s<2 then
+ elseif category==3 then
+ sort(srt,compare)
+ else
+ sort(srt)
+ end
+ return srt
+ else
+ return {}
+ end
+end
+local function sortedhashonly(tab)
+ if tab then
+ local srt,s={},0
+ for key in next,tab do
+ if type(key)=="string" then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ if s>1 then
+ sort(srt)
+ end
+ return srt
+ else
+ return {}
+ end
+end
+local function sortedindexonly(tab)
+ if tab then
+ local srt,s={},0
+ for key in next,tab do
+ if type(key)=="number" then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ if s>1 then
+ sort(srt)
+ end
+ return srt
+ else
+ return {}
+ end
+end
+local function sortedhashkeys(tab,cmp)
+ if tab then
+ local srt,s={},0
+ for key in next,tab do
+ if key then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ if s>1 then
+ sort(srt,cmp)
+ end
+ return srt
+ else
+ return {}
+ end
+end
+function table.allkeys(t)
+ local keys={}
+ for k,v in next,t do
+ for k in next,v do
+ keys[k]=true
+ end
+ end
+ return sortedkeys(keys)
+end
+table.sortedkeys=sortedkeys
+table.sortedhashonly=sortedhashonly
+table.sortedindexonly=sortedindexonly
+table.sortedhashkeys=sortedhashkeys
+local function nothing() end
+local function sortedhash(t,cmp)
+ if t then
+ local s
+ if cmp then
+ s=sortedhashkeys(t,function(a,b) return cmp(t,a,b) end)
+ else
+ s=sortedkeys(t)
+ end
+ local m=#s
+ if m==1 then
+ return next,t
+ elseif m>0 then
+ local n=0
+ return function()
+ if n<m then
+ n=n+1
+ local k=s[n]
+ return k,t[k]
+ end
+ end
+ end
+ end
+ return nothing
+end
+table.sortedhash=sortedhash
+table.sortedpairs=sortedhash
+function table.append(t,list)
+ local n=#t
+ for i=1,#list do
+ n=n+1
+ t[n]=list[i]
+ end
+ return t
+end
+function table.prepend(t,list)
+ local nl=#list
+ local nt=nl+#t
+ for i=#t,1,-1 do
+ t[nt]=t[i]
+ nt=nt-1
+ end
+ for i=1,#list do
+ t[i]=list[i]
+ end
+ return t
+end
+function table.merge(t,...)
+ t=t or {}
+ for i=1,select("#",...) do
+ for k,v in next,(select(i,...)) do
+ t[k]=v
+ end
+ end
+ return t
+end
+function table.merged(...)
+ local t={}
+ for i=1,select("#",...) do
+ for k,v in next,(select(i,...)) do
+ t[k]=v
+ end
+ end
+ return t
+end
+function table.imerge(t,...)
+ local nt=#t
+ for i=1,select("#",...) do
+ local nst=select(i,...)
+ for j=1,#nst do
+ nt=nt+1
+ t[nt]=nst[j]
+ end
+ end
+ return t
+end
+function table.imerged(...)
+ local tmp,ntmp={},0
+ for i=1,select("#",...) do
+ local nst=select(i,...)
+ for j=1,#nst do
+ ntmp=ntmp+1
+ tmp[ntmp]=nst[j]
+ end
+ end
+ return tmp
+end
+local function fastcopy(old,metatabletoo)
+ if old then
+ local new={}
+ for k,v in next,old do
+ if type(v)=="table" then
+ new[k]=fastcopy(v,metatabletoo)
+ else
+ new[k]=v
+ end
+ end
+ if metatabletoo then
+ local mt=getmetatable(old)
+ if mt then
+ setmetatable(new,mt)
+ end
+ end
+ return new
+ else
+ return {}
+ end
+end
+local function copy(t,tables)
+ tables=tables or {}
+ local tcopy={}
+ if not tables[t] then
+ tables[t]=tcopy
+ end
+ for i,v in next,t do
+ if type(i)=="table" then
+ if tables[i] then
+ i=tables[i]
+ else
+ i=copy(i,tables)
+ end
+ end
+ if type(v)~="table" then
+ tcopy[i]=v
+ elseif tables[v] then
+ tcopy[i]=tables[v]
+ else
+ tcopy[i]=copy(v,tables)
+ end
+ end
+ local mt=getmetatable(t)
+ if mt then
+ setmetatable(tcopy,mt)
+ end
+ return tcopy
+end
+table.fastcopy=fastcopy
+table.copy=copy
+function table.derive(parent)
+ local child={}
+ if parent then
+ setmetatable(child,{ __index=parent })
+ end
+ return child
+end
+function table.tohash(t,value)
+ local h={}
+ if t then
+ if value==nil then value=true end
+ for _,v in next,t do
+ h[v]=value
+ end
+ end
+ return h
+end
+function table.fromhash(t)
+ local hsh,h={},0
+ for k,v in next,t do
+ if v then
+ h=h+1
+ hsh[h]=k
+ end
+ end
+ return hsh
+end
+local noquotes,hexify,handle,compact,inline,functions
+local reserved=table.tohash {
+ 'and','break','do','else','elseif','end','false','for','function','if',
+ 'in','local','nil','not','or','repeat','return','then','true','until','while',
+ 'NaN','goto',
+}
+local function simple_table(t)
+ local nt=#t
+ if nt>0 then
+ local n=0
+ for _,v in next,t do
+ n=n+1
+ end
+ if n==nt then
+ local tt={}
+ for i=1,nt do
+ local v=t[i]
+ local tv=type(v)
+ if tv=="number" then
+ if hexify then
+ tt[i]=format("0x%X",v)
+ else
+ tt[i]=tostring(v)
+ end
+ elseif tv=="string" then
+ tt[i]=format("%q",v)
+ elseif tv=="boolean" then
+ tt[i]=v and "true" or "false"
+ else
+ return nil
+ end
+ end
+ return tt
+ end
+ end
+ return nil
+end
+local propername=patterns.propername
+local function dummy() end
+local function do_serialize(root,name,depth,level,indexed)
+ if level>0 then
+ depth=depth.." "
+ if indexed then
+ handle(format("%s{",depth))
+ else
+ local tn=type(name)
+ if tn=="number" then
+ if hexify then
+ handle(format("%s[0x%X]={",depth,name))
+ else
+ handle(format("%s[%s]={",depth,name))
+ end
+ elseif tn=="string" then
+ if noquotes and not reserved[name] and lpegmatch(propername,name) then
+ handle(format("%s%s={",depth,name))
+ else
+ handle(format("%s[%q]={",depth,name))
+ end
+ elseif tn=="boolean" then
+ handle(format("%s[%s]={",depth,name and "true" or "false"))
+ else
+ handle(format("%s{",depth))
+ end
+ end
+ end
+ if root and next(root)~=nil then
+ local first,last=nil,0
+ if compact then
+ last=#root
+ for k=1,last do
+ if root[k]==nil then
+ last=k-1
+ break
+ end
+ end
+ if last>0 then
+ first=1
+ end
+ end
+ local sk=sortedkeys(root)
+ for i=1,#sk do
+ local k=sk[i]
+ local v=root[k]
+ local tv=type(v)
+ local tk=type(k)
+ if compact and first and tk=="number" and k>=first and k<=last then
+ if tv=="number" then
+ if hexify then
+ handle(format("%s 0x%X,",depth,v))
+ else
+ handle(format("%s %s,",depth,v))
+ end
+ elseif tv=="string" then
+ handle(format("%s %q,",depth,v))
+ elseif tv=="table" then
+ if next(v)==nil then
+ handle(format("%s {},",depth))
+ elseif inline then
+ local st=simple_table(v)
+ if st then
+ handle(format("%s { %s },",depth,concat(st,", ")))
+ else
+ do_serialize(v,k,depth,level+1,true)
+ end
+ else
+ do_serialize(v,k,depth,level+1,true)
+ end
+ elseif tv=="boolean" then
+ handle(format("%s %s,",depth,v and "true" or "false"))
+ elseif tv=="function" then
+ if functions then
+ handle(format('%s load(%q),',depth,dump(v)))
+ else
+ handle(format('%s "function",',depth))
+ end
+ else
+ handle(format("%s %q,",depth,tostring(v)))
+ end
+ elseif k=="__p__" then
+ if false then
+ handle(format("%s __p__=nil,",depth))
+ end
+ elseif tv=="number" then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]=0x%X,",depth,k,v))
+ else
+ handle(format("%s [%s]=%s,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ if hexify then
+ handle(format("%s [%s]=0x%X,",depth,k and "true" or "false",v))
+ else
+ handle(format("%s [%s]=%s,",depth,k and "true" or "false",v))
+ end
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ if hexify then
+ handle(format("%s %s=0x%X,",depth,k,v))
+ else
+ handle(format("%s %s=%s,",depth,k,v))
+ end
+ else
+ if hexify then
+ handle(format("%s [%q]=0x%X,",depth,k,v))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v))
+ end
+ end
+ elseif tv=="string" then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]=%q,",depth,k,v))
+ else
+ handle(format("%s [%s]=%q,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,k and "true" or "false",v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,v))
+ else
+ handle(format("%s [%q]=%q,",depth,k,v))
+ end
+ elseif tv=="table" then
+ if next(v)==nil then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]={},",depth,k))
+ else
+ handle(format("%s [%s]={},",depth,k))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]={},",depth,k and "true" or "false"))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s={},",depth,k))
+ else
+ handle(format("%s [%q]={},",depth,k))
+ end
+ elseif inline then
+ local st=simple_table(v)
+ if st then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]={ %s },",depth,k,concat(st,", ")))
+ else
+ handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]={ %s },",depth,k and "true" or "false",concat(st,", ")))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s={ %s },",depth,k,concat(st,", ")))
+ else
+ handle(format("%s [%q]={ %s },",depth,k,concat(st,", ")))
+ end
+ else
+ do_serialize(v,k,depth,level+1)
+ end
+ else
+ do_serialize(v,k,depth,level+1)
+ end
+ elseif tv=="boolean" then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]=%s,",depth,k,v and "true" or "false"))
+ else
+ handle(format("%s [%s]=%s,",depth,k,v and "true" or "false"))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%s,",depth,tostring(k),v and "true" or "false"))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%s,",depth,k,v and "true" or "false"))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v and "true" or "false"))
+ end
+ elseif tv=="function" then
+ if functions then
+ local f=getinfo(v).what=="C" and dump(dummy) or dump(v)
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]=load(%q),",depth,k,f))
+ else
+ handle(format("%s [%s]=load(%q),",depth,k,f))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=load(%q),",depth,k and "true" or "false",f))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=load(%q),",depth,k,f))
+ else
+ handle(format("%s [%q]=load(%q),",depth,k,f))
+ end
+ end
+ else
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]=%q,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%s]=%q,",depth,k,tostring(v)))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,k and "true" or "false",tostring(v)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%q]=%q,",depth,k,tostring(v)))
+ end
+ end
+ end
+ end
+ if level>0 then
+ handle(format("%s},",depth))
+ end
+end
+local function serialize(_handle,root,name,specification)
+ local tname=type(name)
+ if type(specification)=="table" then
+ noquotes=specification.noquotes
+ hexify=specification.hexify
+ handle=_handle or specification.handle or print
+ functions=specification.functions
+ compact=specification.compact
+ inline=specification.inline and compact
+ if functions==nil then
+ functions=true
+ end
+ if compact==nil then
+ compact=true
+ end
+ if inline==nil then
+ inline=compact
+ end
+ else
+ noquotes=false
+ hexify=false
+ handle=_handle or print
+ compact=true
+ inline=true
+ functions=true
+ end
+ if tname=="string" then
+ if name=="return" then
+ handle("return {")
+ else
+ handle(name.."={")
+ end
+ elseif tname=="number" then
+ if hexify then
+ handle(format("[0x%X]={",name))
+ else
+ handle("["..name.."]={")
+ end
+ elseif tname=="boolean" then
+ if name then
+ handle("return {")
+ else
+ handle("{")
+ end
+ else
+ handle("t={")
+ end
+ if root then
+ if getmetatable(root) then
+ local dummy=root._w_h_a_t_e_v_e_r_
+ root._w_h_a_t_e_v_e_r_=nil
+ end
+ if next(root)~=nil then
+ do_serialize(root,name,"",0)
+ end
+ end
+ handle("}")
+end
+function table.serialize(root,name,specification)
+ local t,n={},0
+ local function flush(s)
+ n=n+1
+ t[n]=s
+ end
+ serialize(flush,root,name,specification)
+ return concat(t,"\n")
+end
+table.tohandle=serialize
+local maxtab=2*1024
+function table.tofile(filename,root,name,specification)
+ local f=io.open(filename,'w')
+ if f then
+ if maxtab>1 then
+ local t,n={},0
+ local function flush(s)
+ n=n+1
+ t[n]=s
+ if n>maxtab then
+ f:write(concat(t,"\n"),"\n")
+ t,n={},0
+ end
+ end
+ serialize(flush,root,name,specification)
+ f:write(concat(t,"\n"),"\n")
+ else
+ local function flush(s)
+ f:write(s,"\n")
+ end
+ serialize(flush,root,name,specification)
+ end
+ f:close()
+ io.flush()
+ end
+end
+local function flattened(t,f,depth)
+ if f==nil then
+ f={}
+ depth=0xFFFF
+ elseif tonumber(f) then
+ depth=f
+ f={}
+ elseif not depth then
+ depth=0xFFFF
+ end
+ for k,v in next,t do
+ if type(k)~="number" then
+ if depth>0 and type(v)=="table" then
+ flattened(v,f,depth-1)
+ else
+ f[#f+1]=v
+ end
+ end
+ end
+ for k=1,#t do
+ local v=t[k]
+ if depth>0 and type(v)=="table" then
+ flattened(v,f,depth-1)
+ else
+ f[#f+1]=v
+ end
+ end
+ return f
+end
+table.flattened=flattened
+local function unnest(t,f)
+ if not f then
+ f={}
+ end
+ for i=1,#t do
+ local v=t[i]
+ if type(v)=="table" then
+ if type(v[1])=="table" then
+ unnest(v,f)
+ else
+ f[#f+1]=v
+ end
+ else
+ f[#f+1]=v
+ end
+ end
+ return f
+end
+function table.unnest(t)
+ return unnest(t)
+end
+local function are_equal(a,b,n,m)
+ if a and b and #a==#b then
+ n=n or 1
+ m=m or #a
+ for i=n,m do
+ local ai,bi=a[i],b[i]
+ if ai==bi then
+ elseif type(ai)=="table" and type(bi)=="table" then
+ if not are_equal(ai,bi) then
+ return false
+ end
+ else
+ return false
+ end
+ end
+ return true
+ else
+ return false
+ end
+end
+local function identical(a,b)
+ for ka,va in next,a do
+ local vb=b[ka]
+ if va==vb then
+ elseif type(va)=="table" and type(vb)=="table" then
+ if not identical(va,vb) then
+ return false
+ end
+ else
+ return false
+ end
+ end
+ return true
+end
+table.identical=identical
+table.are_equal=are_equal
+local function sparse(old,nest,keeptables)
+ local new={}
+ for k,v in next,old do
+ if not (v=="" or v==false) then
+ if nest and type(v)=="table" then
+ v=sparse(v,nest)
+ if keeptables or next(v)~=nil then
+ new[k]=v
+ end
+ else
+ new[k]=v
+ end
+ end
+ end
+ return new
+end
+table.sparse=sparse
+function table.compact(t)
+ return sparse(t,true,true)
+end
+function table.contains(t,v)
+ if t then
+ for i=1,#t do
+ if t[i]==v then
+ return i
+ end
+ end
+ end
+ return false
+end
+function table.count(t)
+ local n=0
+ for k,v in next,t do
+ n=n+1
+ end
+ return n
+end
+function table.swapped(t,s)
+ local n={}
+ if s then
+ for k,v in next,s do
+ n[k]=v
+ end
+ end
+ for k,v in next,t do
+ n[v]=k
+ end
+ return n
+end
+function table.mirrored(t)
+ local n={}
+ for k,v in next,t do
+ n[v]=k
+ n[k]=v
+ end
+ return n
+end
+function table.reversed(t)
+ if t then
+ local tt,tn={},#t
+ if tn>0 then
+ local ttn=0
+ for i=tn,1,-1 do
+ ttn=ttn+1
+ tt[ttn]=t[i]
+ end
+ end
+ return tt
+ end
+end
+function table.reverse(t)
+ if t then
+ local n=#t
+ for i=1,floor(n/2) do
+ local j=n-i+1
+ t[i],t[j]=t[j],t[i]
+ end
+ return t
+ end
+end
+function table.sequenced(t,sep,simple)
+ if not t then
+ return ""
+ end
+ local n=#t
+ local s={}
+ if n>0 then
+ for i=1,n do
+ s[i]=tostring(t[i])
+ end
+ else
+ n=0
+ for k,v in sortedhash(t) do
+ if simple then
+ if v==true then
+ n=n+1
+ s[n]=k
+ elseif v and v~="" then
+ n=n+1
+ s[n]=k.."="..tostring(v)
+ end
+ else
+ n=n+1
+ s[n]=k.."="..tostring(v)
+ end
+ end
+ end
+ return concat(s,sep or " | ")
+end
+function table.print(t,...)
+ if type(t)~="table" then
+ print(tostring(t))
+ else
+ serialize(print,t,...)
+ end
+end
+if setinspector then
+ setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
+end
+function table.sub(t,i,j)
+ return { unpack(t,i,j) }
+end
+function table.is_empty(t)
+ return not t or next(t)==nil
+end
+function table.has_one_entry(t)
+ return t and next(t,next(t))==nil
+end
+function table.loweredkeys(t)
+ local l={}
+ for k,v in next,t do
+ l[lower(k)]=v
+ end
+ return l
+end
+function table.unique(old)
+ local hash={}
+ local new={}
+ local n=0
+ for i=1,#old do
+ local oi=old[i]
+ if not hash[oi] then
+ n=n+1
+ new[n]=oi
+ hash[oi]=true
+ end
+ end
+ return new
+end
+function table.sorted(t,...)
+ sort(t,...)
+ return t
+end
+function table.values(t,s)
+ if t then
+ local values,keys,v={},{},0
+ for key,value in next,t do
+ if not keys[value] then
+ v=v+1
+ values[v]=value
+ keys[k]=key
+ end
+ end
+ if s then
+ sort(values)
+ end
+ return values
+ else
+ return {}
+ end
+end
+function table.filtered(t,pattern,sort,cmp)
+ if t and type(pattern)=="string" then
+ if sort then
+ local s
+ if cmp then
+ s=sortedhashkeys(t,function(a,b) return cmp(t,a,b) end)
+ else
+ s=sortedkeys(t)
+ end
+ local n=0
+ local m=#s
+ local function kv(s)
+ while n<m do
+ n=n+1
+ local k=s[n]
+ if find(k,pattern) then
+ return k,t[k]
+ end
+ end
+ end
+ return kv,s
+ else
+ local n=next(t)
+ local function iterator()
+ while n~=nil do
+ local k=n
+ n=next(t,k)
+ if find(k,pattern) then
+ return k,t[k]
+ end
+ end
+ end
+ return iterator,t
+ end
+ else
+ return nothing
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-io"] = package.loaded["l-io"] or true
+
+-- original size: 8643, stripped down to: 6232
+
+if not modules then modules={} end modules ['l-io']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local io=io
+local byte,find,gsub,format=string.byte,string.find,string.gsub,string.format
+local concat=table.concat
+local floor=math.floor
+local type=type
+if string.find(os.getenv("PATH"),";",1,true) then
+ io.fileseparator,io.pathseparator="\\",";"
+else
+ io.fileseparator,io.pathseparator="/",":"
+end
+local function readall(f)
+ return f:read("*all")
+end
+local function readall(f)
+ local size=f:seek("end")
+ if size==0 then
+ return ""
+ elseif size<1024*1024 then
+ f:seek("set",0)
+ return f:read('*all')
+ else
+ local done=f:seek("set",0)
+ local step
+ if size<1024*1024 then
+ step=1024*1024
+ elseif size>16*1024*1024 then
+ step=16*1024*1024
+ else
+ step=floor(size/(1024*1024))*1024*1024/8
+ end
+ local data={}
+ while true do
+ local r=f:read(step)
+ if not r then
+ return concat(data)
+ else
+ data[#data+1]=r
+ end
+ end
+ end
+end
+io.readall=readall
+function io.loaddata(filename,textmode)
+ local f=io.open(filename,(textmode and 'r') or 'rb')
+ if f then
+ local data=readall(f)
+ f:close()
+ if #data>0 then
+ return data
+ end
+ end
+end
+function io.savedata(filename,data,joiner)
+ local f=io.open(filename,"wb")
+ if f then
+ if type(data)=="table" then
+ f:write(concat(data,joiner or ""))
+ elseif type(data)=="function" then
+ data(f)
+ else
+ f:write(data or "")
+ end
+ f:close()
+ io.flush()
+ return true
+ else
+ return false
+ end
+end
+function io.loadlines(filename,n)
+ local f=io.open(filename,'r')
+ if not f then
+ elseif n then
+ local lines={}
+ for i=1,n do
+ local line=f:read("*lines")
+ if line then
+ lines[#lines+1]=line
+ else
+ break
+ end
+ end
+ f:close()
+ lines=concat(lines,"\n")
+ if #lines>0 then
+ return lines
+ end
+ else
+ local line=f:read("*line") or ""
+ f:close()
+ if #line>0 then
+ return line
+ end
+ end
+end
+function io.loadchunk(filename,n)
+ local f=io.open(filename,'rb')
+ if f then
+ local data=f:read(n or 1024)
+ f:close()
+ if #data>0 then
+ return data
+ end
+ end
+end
+function io.exists(filename)
+ local f=io.open(filename)
+ if f==nil then
+ return false
+ else
+ f:close()
+ return true
+ end
+end
+function io.size(filename)
+ local f=io.open(filename)
+ if f==nil then
+ return 0
+ else
+ local s=f:seek("end")
+ f:close()
+ return s
+ end
+end
+function io.noflines(f)
+ if type(f)=="string" then
+ local f=io.open(filename)
+ if f then
+ local n=f and io.noflines(f) or 0
+ f:close()
+ return n
+ else
+ return 0
+ end
+ else
+ local n=0
+ for _ in f:lines() do
+ n=n+1
+ end
+ f:seek('set',0)
+ return n
+ end
+end
+local nextchar={
+ [ 4]=function(f)
+ return f:read(1,1,1,1)
+ end,
+ [ 2]=function(f)
+ return f:read(1,1)
+ end,
+ [ 1]=function(f)
+ return f:read(1)
+ end,
+ [-2]=function(f)
+ local a,b=f:read(1,1)
+ return b,a
+ end,
+ [-4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ return d,c,b,a
+ end
+}
+function io.characters(f,n)
+ if f then
+ return nextchar[n or 1],f
+ end
+end
+local nextbyte={
+ [4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ if d then
+ return byte(a),byte(b),byte(c),byte(d)
+ end
+ end,
+ [3]=function(f)
+ local a,b,c=f:read(1,1,1)
+ if b then
+ return byte(a),byte(b),byte(c)
+ end
+ end,
+ [2]=function(f)
+ local a,b=f:read(1,1)
+ if b then
+ return byte(a),byte(b)
+ end
+ end,
+ [1]=function (f)
+ local a=f:read(1)
+ if a then
+ return byte(a)
+ end
+ end,
+ [-2]=function (f)
+ local a,b=f:read(1,1)
+ if b then
+ return byte(b),byte(a)
+ end
+ end,
+ [-3]=function(f)
+ local a,b,c=f:read(1,1,1)
+ if b then
+ return byte(c),byte(b),byte(a)
+ end
+ end,
+ [-4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ if d then
+ return byte(d),byte(c),byte(b),byte(a)
+ end
+ end
+}
+function io.bytes(f,n)
+ if f then
+ return nextbyte[n or 1],f
+ else
+ return nil,nil
+ end
+end
+function io.ask(question,default,options)
+ while true do
+ io.write(question)
+ if options then
+ io.write(format(" [%s]",concat(options,"|")))
+ end
+ if default then
+ io.write(format(" [%s]",default))
+ end
+ io.write(format(" "))
+ io.flush()
+ local answer=io.read()
+ answer=gsub(answer,"^%s*(.*)%s*$","%1")
+ if answer=="" and default then
+ return default
+ elseif not options then
+ return answer
+ else
+ for k=1,#options do
+ if options[k]==answer then
+ return answer
+ end
+ end
+ local pattern="^"..answer
+ for k=1,#options do
+ local v=options[k]
+ if find(v,pattern) then
+ return v
+ end
+ end
+ end
+ end
+end
+local function readnumber(f,n,m)
+ if m then
+ f:seek("set",n)
+ n=m
+ end
+ if n==1 then
+ return byte(f:read(1))
+ elseif n==2 then
+ local a,b=byte(f:read(2),1,2)
+ return 256*a+b
+ elseif n==3 then
+ local a,b,c=byte(f:read(3),1,3)
+ return 256*256*a+256*b+c
+ elseif n==4 then
+ local a,b,c,d=byte(f:read(4),1,4)
+ return 256*256*256*a+256*256*b+256*c+d
+ elseif n==8 then
+ local a,b=readnumber(f,4),readnumber(f,4)
+ return 256*a+b
+ elseif n==12 then
+ local a,b,c=readnumber(f,4),readnumber(f,4),readnumber(f,4)
+ return 256*256*a+256*b+c
+ elseif n==-2 then
+ local b,a=byte(f:read(2),1,2)
+ return 256*a+b
+ elseif n==-3 then
+ local c,b,a=byte(f:read(3),1,3)
+ return 256*256*a+256*b+c
+ elseif n==-4 then
+ local d,c,b,a=byte(f:read(4),1,4)
+ return 256*256*256*a+256*256*b+256*c+d
+ elseif n==-8 then
+ local h,g,f,e,d,c,b,a=byte(f:read(8),1,8)
+ return 256*256*256*256*256*256*256*a+256*256*256*256*256*256*b+256*256*256*256*256*c+256*256*256*256*d+256*256*256*e+256*256*f+256*g+h
+ else
+ return 0
+ end
+end
+io.readnumber=readnumber
+function io.readstring(f,n,m)
+ if m then
+ f:seek("set",n)
+ n=m
+ end
+ local str=gsub(f:read(n),"\000","")
+ return str
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-number"] = package.loaded["l-number"] or true
+
+-- original size: 4939, stripped down to: 2830
+
+if not modules then modules={} end modules ['l-number']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local tostring,tonumber=tostring,tonumber
+local format,floor,match,rep=string.format,math.floor,string.match,string.rep
+local concat,insert=table.concat,table.insert
+local lpegmatch=lpeg.match
+number=number or {}
+local number=number
+if bit32 then
+ local btest,bor=bit32.btest,bit32.bor
+ function number.bit(p)
+ return 2^(p-1)
+ end
+ number.hasbit=btest
+ number.setbit=bor
+ function number.setbit(x,p)
+ return btest(x,p) and x or x+p
+ end
+ function number.clearbit(x,p)
+ return btest(x,p) and x-p or x
+ end
+else
+ function number.bit(p)
+ return 2^(p-1)
+ end
+ function number.hasbit(x,p)
+ return x%(p+p)>=p
+ end
+ function number.setbit(x,p)
+ return (x%(p+p)>=p) and x or x+p
+ end
+ function number.clearbit(x,p)
+ return (x%(p+p)>=p) and x-p or x
+ end
+end
+if bit32 then
+ local bextract=bit32.extract
+ local t={
+ "0","0","0","0","0","0","0","0",
+ "0","0","0","0","0","0","0","0",
+ "0","0","0","0","0","0","0","0",
+ "0","0","0","0","0","0","0","0",
+ }
+ function number.tobitstring(b,m)
+ local n=32
+ for i=0,31 do
+ local v=bextract(b,i)
+ local k=32-i
+ if v==1 then
+ n=k
+ t[k]="1"
+ else
+ t[k]="0"
+ end
+ end
+ if m then
+ m=33-m*8
+ if m<1 then
+ m=1
+ end
+ return concat(t,"",m)
+ elseif n<8 then
+ return concat(t)
+ elseif n<16 then
+ return concat(t,"",9)
+ elseif n<24 then
+ return concat(t,"",17)
+ else
+ return concat(t,"",25)
+ end
+ end
+else
+ function number.tobitstring(n,m)
+ if n>0 then
+ local t={}
+ while n>0 do
+ insert(t,1,n%2>0 and 1 or 0)
+ n=floor(n/2)
+ end
+ local nn=8-#t%8
+ if nn>0 and nn<8 then
+ for i=1,nn do
+ insert(t,1,0)
+ end
+ end
+ if m then
+ m=m*8-#t
+ if m>0 then
+ insert(t,1,rep("0",m))
+ end
+ end
+ return concat(t)
+ elseif m then
+ rep("00000000",m)
+ else
+ return "00000000"
+ end
+ end
+end
+function number.valid(str,default)
+ return tonumber(str) or default or nil
+end
+function number.toevenhex(n)
+ local s=format("%X",n)
+ if #s%2==0 then
+ return s
+ else
+ return "0"..s
+ end
+end
+local one=lpeg.C(1-lpeg.S('')/tonumber)^1
+function number.toset(n)
+ return lpegmatch(one,tostring(n))
+end
+local function bits(n,i,...)
+ if n>0 then
+ local m=n%2
+ local n=floor(n/2)
+ if m>0 then
+ return bits(n,i+1,i,...)
+ else
+ return bits(n,i+1,...)
+ end
+ else
+ return...
+ end
+end
+function number.bits(n)
+ return { bits(n,1) }
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-set"] = package.loaded["l-set"] or true
+
+-- original size: 1923, stripped down to: 1133
+
+if not modules then modules={} end modules ['l-set']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+set=set or {}
+local nums={}
+local tabs={}
+local concat=table.concat
+local next,type=next,type
+set.create=table.tohash
+function set.tonumber(t)
+ if next(t) then
+ local s=""
+ for k,v in next,t do
+ if v then
+ s=s.." "..k
+ end
+ end
+ local n=nums[s]
+ if not n then
+ n=#tabs+1
+ tabs[n]=t
+ nums[s]=n
+ end
+ return n
+ else
+ return 0
+ end
+end
+function set.totable(n)
+ if n==0 then
+ return {}
+ else
+ return tabs[n] or {}
+ end
+end
+function set.tolist(n)
+ if n==0 or not tabs[n] then
+ return ""
+ else
+ local t,n={},0
+ for k,v in next,tabs[n] do
+ if v then
+ n=n+1
+ t[n]=k
+ end
+ end
+ return concat(t," ")
+ end
+end
+function set.contains(n,s)
+ if type(n)=="table" then
+ return n[s]
+ elseif n==0 then
+ return false
+ else
+ local t=tabs[n]
+ return t and t[s]
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-os"] = package.loaded["l-os"] or true
+
+-- original size: 15832, stripped down to: 9456
+
+if not modules then modules={} end modules ['l-os']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local os=os
+local date,time=os.date,os.time
+local find,format,gsub,upper,gmatch=string.find,string.format,string.gsub,string.upper,string.gmatch
+local concat=table.concat
+local random,ceil,randomseed=math.random,math.ceil,math.randomseed
+local rawget,rawset,type,getmetatable,setmetatable,tonumber,tostring=rawget,rawset,type,getmetatable,setmetatable,tonumber,tostring
+math.initialseed=tonumber(string.sub(string.reverse(tostring(ceil(socket and socket.gettime()*10000 or time()))),1,6))
+randomseed(math.initialseed)
+if not os.__getenv__ then
+ os.__getenv__=os.getenv
+ os.__setenv__=os.setenv
+ if os.env then
+ local osgetenv=os.getenv
+ local ossetenv=os.setenv
+ local osenv=os.env local _=osenv.PATH
+ function os.setenv(k,v)
+ if v==nil then
+ v=""
+ end
+ local K=upper(k)
+ osenv[K]=v
+ if type(v)=="table" then
+ v=concat(v,";")
+ end
+ ossetenv(K,v)
+ end
+ function os.getenv(k)
+ local K=upper(k)
+ local v=osenv[K] or osenv[k] or osgetenv(K) or osgetenv(k)
+ if v=="" then
+ return nil
+ else
+ return v
+ end
+ end
+ else
+ local ossetenv=os.setenv
+ local osgetenv=os.getenv
+ local osenv={}
+ function os.setenv(k,v)
+ if v==nil then
+ v=""
+ end
+ local K=upper(k)
+ osenv[K]=v
+ end
+ function os.getenv(k)
+ local K=upper(k)
+ local v=osenv[K] or osgetenv(K) or osgetenv(k)
+ if v=="" then
+ return nil
+ else
+ return v
+ end
+ end
+ local function __index(t,k)
+ return os.getenv(k)
+ end
+ local function __newindex(t,k,v)
+ os.setenv(k,v)
+ end
+ os.env={}
+ setmetatable(os.env,{ __index=__index,__newindex=__newindex } )
+ end
+end
+local execute=os.execute
+local iopopen=io.popen
+function os.resultof(command)
+ local handle=iopopen(command,"r")
+ if handle then
+ local result=handle:read("*all") or ""
+ handle:close()
+ return result
+ else
+ return ""
+ end
+end
+if not io.fileseparator then
+ if find(os.getenv("PATH"),";",1,true) then
+ io.fileseparator,io.pathseparator,os.type="\\",";",os.type or "mswin"
+ else
+ io.fileseparator,io.pathseparator,os.type="/",":",os.type or "unix"
+ end
+end
+os.type=os.type or (io.pathseparator==";" and "windows") or "unix"
+os.name=os.name or (os.type=="windows" and "mswin" ) or "linux"
+if os.type=="windows" then
+ os.libsuffix,os.binsuffix,os.binsuffixes='dll','exe',{ 'exe','cmd','bat' }
+else
+ os.libsuffix,os.binsuffix,os.binsuffixes='so','',{ '' }
+end
+local launchers={
+ windows="start %s",
+ macosx="open %s",
+ unix="$BROWSER %s &> /dev/null &",
+}
+function os.launch(str)
+ execute(format(launchers[os.name] or launchers.unix,str))
+end
+if not os.times then
+ function os.times()
+ return {
+ utime=os.gettimeofday(),
+ stime=0,
+ cutime=0,
+ cstime=0,
+ }
+ end
+end
+local gettimeofday=os.gettimeofday or os.clock
+os.gettimeofday=gettimeofday
+local startuptime=gettimeofday()
+function os.runtime()
+ return gettimeofday()-startuptime
+end
+local resolvers=os.resolvers or {}
+os.resolvers=resolvers
+setmetatable(os,{ __index=function(t,k)
+ local r=resolvers[k]
+ return r and r(t,k) or nil
+end })
+local name,platform=os.name or "linux",os.getenv("MTX_PLATFORM") or ""
+local function guess()
+ local architecture=os.resultof("uname -m") or ""
+ if architecture~="" then
+ return architecture
+ end
+ architecture=os.getenv("HOSTTYPE") or ""
+ if architecture~="" then
+ return architecture
+ end
+ return os.resultof("echo $HOSTTYPE") or ""
+end
+if platform~="" then
+ os.platform=platform
+elseif os.type=="windows" then
+ function resolvers.platform(t,k)
+ local platform,architecture="",os.getenv("PROCESSOR_ARCHITECTURE") or ""
+ if find(architecture,"AMD64",1,true) then
+ platform="win64"
+ else
+ platform="mswin"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+elseif name=="linux" then
+ function resolvers.platform(t,k)
+ local platform,architecture="",os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
+ if find(architecture,"x86_64",1,true) then
+ platform="linux-64"
+ elseif find(architecture,"ppc",1,true) then
+ platform="linux-ppc"
+ else
+ platform="linux"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+elseif name=="macosx" then
+ function resolvers.platform(t,k)
+ local platform,architecture="",os.resultof("echo $HOSTTYPE") or ""
+ if architecture=="" then
+ platform="osx-intel"
+ elseif find(architecture,"i386",1,true) then
+ platform="osx-intel"
+ elseif find(architecture,"x86_64",1,true) then
+ platform="osx-64"
+ else
+ platform="osx-ppc"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+elseif name=="sunos" then
+ function resolvers.platform(t,k)
+ local platform,architecture="",os.resultof("uname -m") or ""
+ if find(architecture,"sparc",1,true) then
+ platform="solaris-sparc"
+ else
+ platform="solaris-intel"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+elseif name=="freebsd" then
+ function resolvers.platform(t,k)
+ local platform,architecture="",os.resultof("uname -m") or ""
+ if find(architecture,"amd64",1,true) then
+ platform="freebsd-amd64"
+ else
+ platform="freebsd"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+elseif name=="kfreebsd" then
+ function resolvers.platform(t,k)
+ local platform,architecture="",os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
+ if find(architecture,"x86_64",1,true) then
+ platform="kfreebsd-amd64"
+ else
+ platform="kfreebsd-i386"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+else
+ function resolvers.platform(t,k)
+ local platform="linux"
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform=platform
+ return platform
+ end
+end
+os.newline=name=="windows" and "\013\010" or "\010"
+function resolvers.bits(t,k)
+ local bits=find(os.platform,"64",1,true) and 64 or 32
+ os.bits=bits
+ return bits
+end
+local t={ 8,9,"a","b" }
+function os.uuid()
+ return format("%04x%04x-4%03x-%s%03x-%04x-%04x%04x%04x",
+ random(0xFFFF),random(0xFFFF),
+ random(0x0FFF),
+ t[ceil(random(4))] or 8,random(0x0FFF),
+ random(0xFFFF),
+ random(0xFFFF),random(0xFFFF),random(0xFFFF)
+ )
+end
+local d
+function os.timezone(delta)
+ d=d or tonumber(tonumber(date("%H")-date("!%H")))
+ if delta then
+ if d>0 then
+ return format("+%02i:00",d)
+ else
+ return format("-%02i:00",-d)
+ end
+ else
+ return 1
+ end
+end
+local timeformat=format("%%s%s",os.timezone(true))
+local dateformat="!%Y-%m-%d %H:%M:%S"
+local lasttime=nil
+local lastdate=nil
+function os.fulltime(t,default)
+ t=t and tonumber(t) or 0
+ if t>0 then
+ elseif default then
+ return default
+ else
+ t=time()
+ end
+ if t~=lasttime then
+ lasttime=t
+ lastdate=format(timeformat,date(dateformat))
+ end
+ return lastdate
+end
+local dateformat="%Y-%m-%d %H:%M:%S"
+local lasttime=nil
+local lastdate=nil
+function os.localtime(t,default)
+ t=t and tonumber(t) or 0
+ if t>0 then
+ elseif default then
+ return default
+ else
+ t=time()
+ end
+ if t~=lasttime then
+ lasttime=t
+ lastdate=date(dateformat,t)
+ end
+ return lastdate
+end
+function os.converttime(t,default)
+ local t=tonumber(t)
+ if t and t>0 then
+ return date(dateformat,t)
+ else
+ return default or "-"
+ end
+end
+local memory={}
+local function which(filename)
+ local fullname=memory[filename]
+ if fullname==nil then
+ local suffix=file.suffix(filename)
+ local suffixes=suffix=="" and os.binsuffixes or { suffix }
+ for directory in gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do
+ local df=file.join(directory,filename)
+ for i=1,#suffixes do
+ local dfs=file.addsuffix(df,suffixes[i])
+ if io.exists(dfs) then
+ fullname=dfs
+ break
+ end
+ end
+ end
+ if not fullname then
+ fullname=false
+ end
+ memory[filename]=fullname
+ end
+ return fullname
+end
+os.which=which
+os.where=which
+function os.today()
+ return date("!*t")
+end
+function os.now()
+ return date("!%Y-%m-%d %H:%M:%S")
+end
+if not os.sleep then
+ local socket=socket
+ function os.sleep(n)
+ if not socket then
+ socket=require("socket")
+ end
+ socket.sleep(n)
+ end
+end
+local function isleapyear(year)
+ return (year%400==0) or ((year%100~=0) and (year%4==0))
+end
+os.isleapyear=isleapyear
+local days={ 31,28,31,30,31,30,31,31,30,31,30,31 }
+local function nofdays(year,month)
+ if not month then
+ return isleapyear(year) and 365 or 364
+ else
+ return month==2 and isleapyear(year) and 29 or days[month]
+ end
+end
+os.nofdays=nofdays
+function os.weekday(day,month,year)
+ return date("%w",time { year=year,month=month,day=day })+1
+end
+function os.validdate(year,month,day)
+ if month<1 then
+ month=1
+ elseif month>12 then
+ month=12
+ end
+ if day<1 then
+ day=1
+ else
+ local max=nofdays(year,month)
+ if day>max then
+ day=max
+ end
+ end
+ return year,month,day
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-file"] = package.loaded["l-file"] or true
+
+-- original size: 20949, stripped down to: 9945
+
+if not modules then modules={} end modules ['l-file']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+file=file or {}
+local file=file
+if not lfs then
+ lfs=optionalrequire("lfs")
+end
+local insert,concat=table.insert,table.concat
+local match,find,gmatch=string.match,string.find,string.gmatch
+local lpegmatch=lpeg.match
+local getcurrentdir,attributes=lfs.currentdir,lfs.attributes
+local checkedsplit=string.checkedsplit
+local P,R,S,C,Cs,Cp,Cc,Ct=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Cs,lpeg.Cp,lpeg.Cc,lpeg.Ct
+local tricky=S("/\\")*P(-1)
+local attributes=lfs.attributes
+if sandbox then
+ sandbox.redefine(lfs.isfile,"lfs.isfile")
+ sandbox.redefine(lfs.isdir,"lfs.isdir")
+end
+function lfs.isdir(name)
+ if lpegmatch(tricky,name) then
+ return attributes(name,"mode")=="directory"
+ else
+ return attributes(name.."/.","mode")=="directory"
+ end
+end
+function lfs.isfile(name)
+ return attributes(name,"mode")=="file"
+end
+local colon=P(":")
+local period=P(".")
+local periods=P("..")
+local fwslash=P("/")
+local bwslash=P("\\")
+local slashes=S("\\/")
+local noperiod=1-period
+local noslashes=1-slashes
+local name=noperiod^1
+local suffix=period/""*(1-period-slashes)^1*-1
+local pattern=C((1-(slashes^1*noslashes^1*-1))^1)*P(1)
+local function pathpart(name,default)
+ return name and lpegmatch(pattern,name) or default or ""
+end
+local pattern=(noslashes^0*slashes)^1*C(noslashes^1)*-1
+local function basename(name)
+ return name and lpegmatch(pattern,name) or name
+end
+local pattern=(noslashes^0*slashes^1)^0*Cs((1-suffix)^1)*suffix^0
+local function nameonly(name)
+ return name and lpegmatch(pattern,name) or name
+end
+local pattern=(noslashes^0*slashes)^0*(noperiod^1*period)^1*C(noperiod^1)*-1
+local function suffixonly(name)
+ return name and lpegmatch(pattern,name) or ""
+end
+local pattern=(noslashes^0*slashes)^0*noperiod^1*((period*C(noperiod^1))^1)*-1+Cc("")
+local function suffixesonly(name)
+ if name then
+ return lpegmatch(pattern,name)
+ else
+ return ""
+ end
+end
+file.pathpart=pathpart
+file.basename=basename
+file.nameonly=nameonly
+file.suffixonly=suffixonly
+file.suffix=suffixonly
+file.suffixesonly=suffixesonly
+file.suffixes=suffixesonly
+file.dirname=pathpart
+file.extname=suffixonly
+local drive=C(R("az","AZ"))*colon
+local path=C((noslashes^0*slashes)^0)
+local suffix=period*C(P(1-period)^0*P(-1))
+local base=C((1-suffix)^0)
+local rest=C(P(1)^0)
+drive=drive+Cc("")
+path=path+Cc("")
+base=base+Cc("")
+suffix=suffix+Cc("")
+local pattern_a=drive*path*base*suffix
+local pattern_b=path*base*suffix
+local pattern_c=C(drive*path)*C(base*suffix)
+local pattern_d=path*rest
+function file.splitname(str,splitdrive)
+ if not str then
+ elseif splitdrive then
+ return lpegmatch(pattern_a,str)
+ else
+ return lpegmatch(pattern_b,str)
+ end
+end
+function file.splitbase(str)
+ if str then
+ return lpegmatch(pattern_d,str)
+ else
+ return "",str
+ end
+end
+function file.nametotable(str,splitdrive)
+ if str then
+ local path,drive,subpath,name,base,suffix=lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path=path,
+ drive=drive,
+ subpath=subpath,
+ name=name,
+ base=base,
+ suffix=suffix,
+ }
+ else
+ return {
+ path=path,
+ name=name,
+ base=base,
+ suffix=suffix,
+ }
+ end
+ end
+end
+local pattern=Cs(((period*(1-period-slashes)^1*-1)/""+1)^1)
+function file.removesuffix(name)
+ return name and lpegmatch(pattern,name)
+end
+local suffix=period/""*(1-period-slashes)^1*-1
+local pattern=Cs((noslashes^0*slashes^1)^0*((1-suffix)^1))*Cs(suffix)
+function file.addsuffix(filename,suffix,criterium)
+ if not filename or not suffix or suffix=="" then
+ return filename
+ elseif criterium==true then
+ return filename.."."..suffix
+ elseif not criterium then
+ local n,s=lpegmatch(pattern,filename)
+ if not s or s=="" then
+ return filename.."."..suffix
+ else
+ return filename
+ end
+ else
+ local n,s=lpegmatch(pattern,filename)
+ if s and s~="" then
+ local t=type(criterium)
+ if t=="table" then
+ for i=1,#criterium do
+ if s==criterium[i] then
+ return filename
+ end
+ end
+ elseif t=="string" then
+ if s==criterium then
+ return filename
+ end
+ end
+ end
+ return (n or filename).."."..suffix
+ end
+end
+local suffix=period*(1-period-slashes)^1*-1
+local pattern=Cs((1-suffix)^0)
+function file.replacesuffix(name,suffix)
+ if name and suffix and suffix~="" then
+ return lpegmatch(pattern,name).."."..suffix
+ else
+ return name
+ end
+end
+local reslasher=lpeg.replacer(P("\\"),"/")
+function file.reslash(str)
+ return str and lpegmatch(reslasher,str)
+end
+function file.is_writable(name)
+ if not name then
+ elseif lfs.isdir(name) then
+ name=name.."/m_t_x_t_e_s_t.tmp"
+ local f=io.open(name,"wb")
+ if f then
+ f:close()
+ os.remove(name)
+ return true
+ end
+ elseif lfs.isfile(name) then
+ local f=io.open(name,"ab")
+ if f then
+ f:close()
+ return true
+ end
+ else
+ local f=io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
+ return true
+ end
+ end
+ return false
+end
+local readable=P("r")*Cc(true)
+function file.is_readable(name)
+ if name then
+ local a=attributes(name)
+ return a and lpegmatch(readable,a.permissions) or false
+ else
+ return false
+ end
+end
+file.isreadable=file.is_readable
+file.iswritable=file.is_writable
+function file.size(name)
+ if name then
+ local a=attributes(name)
+ return a and a.size or 0
+ else
+ return 0
+ end
+end
+function file.splitpath(str,separator)
+ return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator)
+end
+function file.joinpath(tab,separator)
+ return tab and concat(tab,separator or io.pathseparator)
+end
+local someslash=S("\\/")
+local stripper=Cs(P(fwslash)^0/""*reslasher)
+local isnetwork=someslash*someslash*(1-someslash)+(1-fwslash-colon)^1*colon
+local isroot=fwslash^1*-1
+local hasroot=fwslash^1
+local reslasher=lpeg.replacer(S("\\/"),"/")
+local deslasher=lpeg.replacer(S("\\/")^1,"/")
+function file.join(one,two,three,...)
+ if not two then
+ return one=="" and one or lpegmatch(stripper,one)
+ end
+ if one=="" then
+ return lpegmatch(stripper,three and concat({ two,three,... },"/") or two)
+ end
+ if lpegmatch(isnetwork,one) then
+ local one=lpegmatch(reslasher,one)
+ local two=lpegmatch(deslasher,three and concat({ two,three,... },"/") or two)
+ if lpegmatch(hasroot,two) then
+ return one..two
+ else
+ return one.."/"..two
+ end
+ elseif lpegmatch(isroot,one) then
+ local two=lpegmatch(deslasher,three and concat({ two,three,... },"/") or two)
+ if lpegmatch(hasroot,two) then
+ return two
+ else
+ return "/"..two
+ end
+ else
+ return lpegmatch(deslasher,concat({ one,two,three,... },"/"))
+ end
+end
+local drivespec=R("az","AZ")^1*colon
+local anchors=fwslash+drivespec
+local untouched=periods+(1-period)^1*P(-1)
+local mswindrive=Cs(drivespec*(bwslash/"/"+fwslash)^0)
+local mswinuncpath=(bwslash+fwslash)*(bwslash+fwslash)*Cc("//")
+local splitstarter=(mswindrive+mswinuncpath+Cc(false))*Ct(lpeg.splitat(S("/\\")^1))
+local absolute=fwslash
+function file.collapsepath(str,anchor)
+ if not str then
+ return
+ end
+ if anchor==true and not lpegmatch(anchors,str) then
+ str=getcurrentdir().."/"..str
+ end
+ if str=="" or str=="." then
+ return "."
+ elseif lpegmatch(untouched,str) then
+ return lpegmatch(reslasher,str)
+ end
+ local starter,oldelements=lpegmatch(splitstarter,str)
+ local newelements={}
+ local i=#oldelements
+ while i>0 do
+ local element=oldelements[i]
+ if element=='.' then
+ elseif element=='..' then
+ local n=i-1
+ while n>0 do
+ local element=oldelements[n]
+ if element~='..' and element~='.' then
+ oldelements[n]='.'
+ break
+ else
+ n=n-1
+ end
+ end
+ if n<1 then
+ insert(newelements,1,'..')
+ end
+ elseif element~="" then
+ insert(newelements,1,element)
+ end
+ i=i-1
+ end
+ if #newelements==0 then
+ return starter or "."
+ elseif starter then
+ return starter..concat(newelements,'/')
+ elseif lpegmatch(absolute,str) then
+ return "/"..concat(newelements,'/')
+ else
+ newelements=concat(newelements,'/')
+ if anchor=="." and find(str,"^%./") then
+ return "./"..newelements
+ else
+ return newelements
+ end
+ end
+end
+local validchars=R("az","09","AZ","--","..")
+local pattern_a=lpeg.replacer(1-validchars)
+local pattern_a=Cs((validchars+P(1)/"-")^1)
+local whatever=P("-")^0/""
+local pattern_b=Cs(whatever*(1-whatever*-1)^1)
+function file.robustname(str,strict)
+ if str then
+ str=lpegmatch(pattern_a,str) or str
+ if strict then
+ return lpegmatch(pattern_b,str) or str
+ else
+ return str
+ end
+ end
+end
+file.readdata=io.loaddata
+file.savedata=io.savedata
+function file.copy(oldname,newname)
+ if oldname and newname then
+ local data=io.loaddata(oldname)
+ if data and data~="" then
+ file.savedata(newname,data)
+ end
+ end
+end
+local letter=R("az","AZ")+S("_-+")
+local separator=P("://")
+local qualified=period^0*fwslash+letter*colon+letter^1*separator+letter^1*fwslash
+local rootbased=fwslash+letter*colon
+lpeg.patterns.qualified=qualified
+lpeg.patterns.rootbased=rootbased
+function file.is_qualified_path(filename)
+ return filename and lpegmatch(qualified,filename)~=nil
+end
+function file.is_rootbased_path(filename)
+ return filename and lpegmatch(rootbased,filename)~=nil
+end
+function file.strip(name,dir)
+ if name then
+ local b,a=match(name,"^(.-)"..dir.."(.*)$")
+ return a~="" and a or name
+ end
+end
+function lfs.mkdirs(path)
+ local full=""
+ for sub in gmatch(path,"(/*[^\\/]+)") do
+ full=full..sub
+ lfs.mkdir(full)
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-gzip"] = package.loaded["l-gzip"] or true
+
+-- original size: 1211, stripped down to: 1002
+
+if not modules then modules={} end modules ['l-gzip']={
+ version=1.001,
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if not gzip then
+ return
+end
+local suffix,suffixes=file.suffix,file.suffixes
+function gzip.load(filename)
+ local f=io.open(filename,"rb")
+ if not f then
+ elseif suffix(filename)=="gz" then
+ f:close()
+ local g=gzip.open(filename,"rb")
+ if g then
+ local str=g:read("*all")
+ g:close()
+ return str
+ end
+ else
+ local str=f:read("*all")
+ f:close()
+ return str
+ end
+end
+function gzip.save(filename,data)
+ if suffix(filename)~="gz" then
+ filename=filename..".gz"
+ end
+ local f=io.open(filename,"wb")
+ if f then
+ local s=zlib.compress(data or "",9,nil,15+16)
+ f:write(s)
+ f:close()
+ return #s
+ end
+end
+function gzip.suffix(filename)
+ local suffix,extra=suffixes(filename)
+ local gzipped=extra=="gz"
+ return suffix,gzipped
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-md5"] = package.loaded["l-md5"] or true
+
+-- original size: 3248, stripped down to: 2266
+
+if not modules then modules={} end modules ['l-md5']={
+ version=1.001,
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if not md5 then
+ md5=optionalrequire("md5")
+end
+if not md5 then
+ md5={
+ sum=function(str) print("error: md5 is not loaded (sum ignored)") return str end,
+ sumhexa=function(str) print("error: md5 is not loaded (sumhexa ignored)") return str end,
+ }
+end
+local md5,file=md5,file
+local gsub=string.gsub
+do
+ local patterns=lpeg and lpeg.patterns
+ if patterns then
+ local bytestoHEX=patterns.bytestoHEX
+ local bytestohex=patterns.bytestohex
+ local bytestodec=patterns.bytestodec
+ local lpegmatch=lpeg.match
+ local md5sum=md5.sum
+ if not md5.HEX then function md5.HEX(str) if str then return lpegmatch(bytestoHEX,md5sum(str)) end end end
+ if not md5.hex then function md5.hex(str) if str then return lpegmatch(bytestohex,md5sum(str)) end end end
+ if not md5.dec then function md5.dec(str) if str then return lpegmatch(bytestodec,md5sum(str)) end end end
+ end
+end
+function file.needsupdating(oldname,newname,threshold)
+ local oldtime=lfs.attributes(oldname,"modification")
+ if oldtime then
+ local newtime=lfs.attributes(newname,"modification")
+ if not newtime then
+ return true
+ elseif newtime>=oldtime then
+ return false
+ elseif oldtime-newtime<(threshold or 1) then
+ return false
+ else
+ return true
+ end
+ else
+ return false
+ end
+end
+file.needs_updating=file.needsupdating
+function file.syncmtimes(oldname,newname)
+ local oldtime=lfs.attributes(oldname,"modification")
+ if oldtime and lfs.isfile(newname) then
+ lfs.touch(newname,oldtime,oldtime)
+ end
+end
+function file.checksum(name)
+ if md5 then
+ local data=io.loaddata(name)
+ if data then
+ return md5.HEX(data)
+ end
+ end
+ return nil
+end
+function file.loadchecksum(name)
+ if md5 then
+ local data=io.loaddata(name..".md5")
+ return data and (gsub(data,"%s",""))
+ end
+ return nil
+end
+function file.savechecksum(name,checksum)
+ if not checksum then checksum=file.checksum(name) end
+ if checksum then
+ io.savedata(name..".md5",checksum)
+ return checksum
+ end
+ return nil
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-url"] = package.loaded["l-url"] or true
+
+-- original size: 12531, stripped down to: 5721
+
+if not modules then modules={} end modules ['l-url']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local char,format,byte=string.char,string.format,string.byte
+local concat=table.concat
+local tonumber,type=tonumber,type
+local P,C,R,S,Cs,Cc,Ct,Cf,Cg,V=lpeg.P,lpeg.C,lpeg.R,lpeg.S,lpeg.Cs,lpeg.Cc,lpeg.Ct,lpeg.Cf,lpeg.Cg,lpeg.V
+local lpegmatch,lpegpatterns,replacer=lpeg.match,lpeg.patterns,lpeg.replacer
+url=url or {}
+local url=url
+local tochar=function(s) return char(tonumber(s,16)) end
+local colon=P(":")
+local qmark=P("?")
+local hash=P("#")
+local slash=P("/")
+local percent=P("%")
+local endofstring=P(-1)
+local hexdigit=R("09","AF","af")
+local plus=P("+")
+local nothing=Cc("")
+local escapedchar=(percent*C(hexdigit*hexdigit))/tochar
+local escaped=(plus/" ")+escapedchar
+local noslash=P("/")/""
+local schemestr=Cs((escaped+(1-colon-slash-qmark-hash))^2)
+local authoritystr=Cs((escaped+(1- slash-qmark-hash))^0)
+local pathstr=Cs((escaped+(1- qmark-hash))^0)
+local querystr=Cs(((1- hash))^0)
+local fragmentstr=Cs((escaped+(1- endofstring))^0)
+local scheme=schemestr*colon+nothing
+local authority=slash*slash*authoritystr+nothing
+local path=slash*pathstr+nothing
+local query=qmark*querystr+nothing
+local fragment=hash*fragmentstr+nothing
+local validurl=scheme*authority*path*query*fragment
+local parser=Ct(validurl)
+lpegpatterns.url=validurl
+lpegpatterns.urlsplitter=parser
+local escapes={}
+setmetatable(escapes,{ __index=function(t,k)
+ local v=format("%%%02X",byte(k))
+ t[k]=v
+ return v
+end })
+local escaper=Cs((R("09","AZ","az")^1+P(" ")/"%%20"+S("-./_")^1+P(1)/escapes)^0)
+local unescaper=Cs((escapedchar+1)^0)
+local getcleaner=Cs((P("+++")/"%%2B"+P("+")/"%%20"+P(1))^1)
+lpegpatterns.urlunescaped=escapedchar
+lpegpatterns.urlescaper=escaper
+lpegpatterns.urlunescaper=unescaper
+lpegpatterns.urlgetcleaner=getcleaner
+function url.unescapeget(str)
+ return lpegmatch(getcleaner,str)
+end
+local function split(str)
+ return (type(str)=="string" and lpegmatch(parser,str)) or str
+end
+local isscheme=schemestr*colon*slash*slash
+local function hasscheme(str)
+ if str then
+ local scheme=lpegmatch(isscheme,str)
+ return scheme~="" and scheme or false
+ else
+ return false
+ end
+end
+local rootletter=R("az","AZ")+S("_-+")
+local separator=P("://")
+local qualified=P(".")^0*P("/")+rootletter*P(":")+rootletter^1*separator+rootletter^1*P("/")
+local rootbased=P("/")+rootletter*P(":")
+local barswapper=replacer("|",":")
+local backslashswapper=replacer("\\","/")
+local equal=P("=")
+local amp=P("&")
+local key=Cs(((escapedchar+1)-equal )^0)
+local value=Cs(((escapedchar+1)-amp -endofstring)^0)
+local splitquery=Cf (Ct("")*P { "sequence",
+ sequence=V("pair")*(amp*V("pair"))^0,
+ pair=Cg(key*equal*value),
+},rawset)
+local function hashed(str)
+ if not str or str=="" then
+ return {
+ scheme="invalid",
+ original=str,
+ }
+ end
+ local detailed=split(str)
+ local rawscheme=""
+ local rawquery=""
+ local somescheme=false
+ local somequery=false
+ if detailed then
+ rawscheme=detailed[1]
+ rawquery=detailed[4]
+ somescheme=rawscheme~=""
+ somequery=rawquery~=""
+ end
+ if not somescheme and not somequery then
+ return {
+ scheme="file",
+ authority="",
+ path=str,
+ query="",
+ fragment="",
+ original=str,
+ noscheme=true,
+ filename=str,
+ }
+ end
+ local authority=detailed[2]
+ local path=detailed[3]
+ local filename=nil
+ if authority=="" then
+ filename=path
+ elseif path=="" then
+ filename=""
+ else
+ filename=authority.."/"..path
+ end
+ return {
+ scheme=rawscheme,
+ authority=authority,
+ path=path,
+ query=lpegmatch(unescaper,rawquery),
+ queries=lpegmatch(splitquery,rawquery),
+ fragment=detailed[5],
+ original=str,
+ noscheme=false,
+ filename=filename,
+ }
+end
+url.split=split
+url.hasscheme=hasscheme
+url.hashed=hashed
+function url.addscheme(str,scheme)
+ if hasscheme(str) then
+ return str
+ elseif not scheme then
+ return "file:///"..str
+ else
+ return scheme..":///"..str
+ end
+end
+function url.construct(hash)
+ local fullurl,f={},0
+ local scheme,authority,path,query,fragment=hash.scheme,hash.authority,hash.path,hash.query,hash.fragment
+ if scheme and scheme~="" then
+ f=f+1;fullurl[f]=scheme.."://"
+ end
+ if authority and authority~="" then
+ f=f+1;fullurl[f]=authority
+ end
+ if path and path~="" then
+ f=f+1;fullurl[f]="/"..path
+ end
+ if query and query~="" then
+ f=f+1;fullurl[f]="?"..query
+ end
+ if fragment and fragment~="" then
+ f=f+1;fullurl[f]="#"..fragment
+ end
+ return lpegmatch(escaper,concat(fullurl))
+end
+local pattern=Cs(slash^-1/""*R("az","AZ")*((S(":|")/":")+P(":"))*slash*P(1)^0)
+function url.filename(filename)
+ local spec=hashed(filename)
+ local path=spec.path
+ return (spec.scheme=="file" and path and lpegmatch(pattern,path)) or filename
+end
+local function escapestring(str)
+ return lpegmatch(escaper,str)
+end
+url.escape=escapestring
+function url.query(str)
+ if type(str)=="string" then
+ return lpegmatch(splitquery,str) or ""
+ else
+ return str
+ end
+end
+function url.toquery(data)
+ local td=type(data)
+ if td=="string" then
+ return #str and escape(data) or nil
+ elseif td=="table" then
+ if next(data) then
+ local t={}
+ for k,v in next,data do
+ t[#t+1]=format("%s=%s",k,escapestring(v))
+ end
+ return concat(t,"&")
+ end
+ else
+ end
+end
+local pattern=Cs(noslash^0*(1-noslash*P(-1))^0)
+function url.barepath(path)
+ if not path or path=="" then
+ return ""
+ else
+ return lpegmatch(pattern,path)
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-dir"] = package.loaded["l-dir"] or true
+
+-- original size: 16765, stripped down to: 11003
+
+if not modules then modules={} end modules ['l-dir']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,select=type,select
+local find,gmatch,match,gsub,sub=string.find,string.gmatch,string.match,string.gsub,string.sub
+local concat,insert,remove,unpack=table.concat,table.insert,table.remove,table.unpack
+local lpegmatch=lpeg.match
+local P,S,R,C,Cc,Cs,Ct,Cv,V=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cc,lpeg.Cs,lpeg.Ct,lpeg.Cv,lpeg.V
+dir=dir or {}
+local dir=dir
+local lfs=lfs
+local attributes=lfs.attributes
+local walkdir=lfs.dir
+local isdir=lfs.isdir
+local isfile=lfs.isfile
+local currentdir=lfs.currentdir
+local chdir=lfs.chdir
+local mkdir=lfs.mkdir
+local onwindows=os.type=="windows" or find(os.getenv("PATH"),";",1,true)
+if onwindows then
+ local tricky=S("/\\")*P(-1)
+ isdir=function(name)
+ if lpegmatch(tricky,name) then
+ return attributes(name,"mode")=="directory"
+ else
+ return attributes(name.."/.","mode")=="directory"
+ end
+ end
+ isfile=function(name)
+ return attributes(name,"mode")=="file"
+ end
+ lfs.isdir=isdir
+ lfs.isfile=isfile
+else
+ isdir=function(name)
+ return attributes(name,"mode")=="directory"
+ end
+ isfile=function(name)
+ return attributes(name,"mode")=="file"
+ end
+ lfs.isdir=isdir
+ lfs.isfile=isfile
+end
+function dir.current()
+ return (gsub(currentdir(),"\\","/"))
+end
+local function glob_pattern_function(path,patt,recurse,action)
+ if isdir(path) then
+ local usedpath
+ if path=="/" then
+ usedpath="/."
+ elseif not find(path,"/$") then
+ usedpath=path.."/."
+ path=path.."/"
+ else
+ usedpath=path
+ end
+ local dirs
+ for name in walkdir(usedpath) do
+ if name~="." and name~=".." then
+ local full=path..name
+ local mode=attributes(full,'mode')
+ if mode=='file' then
+ if not patt or find(full,patt) then
+ action(full)
+ end
+ elseif recurse and mode=="directory" then
+ if not dirs then
+ dirs={ full }
+ else
+ dirs[#dirs+1]=full
+ end
+ end
+ end
+ end
+ if dirs then
+ for i=1,#dirs do
+ glob_pattern_function(dirs[i],patt,recurse,action)
+ end
+ end
+ end
+end
+local function glob_pattern_table(path,patt,recurse,result)
+ if not result then
+ result={}
+ end
+ if isdir(path) then
+ local usedpath
+ if path=="/" then
+ usedpath="/."
+ elseif not find(path,"/$") then
+ usedpath=path.."/."
+ path=path.."/"
+ else
+ usedpath=path
+ end
+ local dirs
+ for name in walkdir(usedpath) do
+ if name~="." and name~=".." then
+ local full=path..name
+ local mode=attributes(full,'mode')
+ if mode=='file' then
+ if not patt or find(full,patt) then
+ result[#result+1]=full
+ end
+ elseif recurse and mode=="directory" then
+ if not dirs then
+ dirs={ full }
+ else
+ dirs[#dirs+1]=full
+ end
+ end
+ end
+ end
+ if dirs then
+ for i=1,#dirs do
+ glob_pattern_table(dirs[i],patt,recurse,result)
+ end
+ end
+ end
+ return result
+end
+local function globpattern(path,patt,recurse,method)
+ local kind=type(method)
+ if patt and sub(patt,1,-3)==path then
+ patt=false
+ end
+ if kind=="function" then
+ return glob_pattern_function(path,patt,recurse,method)
+ elseif kind=="table" then
+ return glob_pattern_table(path,patt,recurse,method)
+ else
+ return glob_pattern_table(path,patt,recurse,{})
+ end
+end
+dir.globpattern=globpattern
+local function collectpattern(path,patt,recurse,result)
+ local ok,scanner
+ result=result or {}
+ if path=="/" then
+ ok,scanner,first=xpcall(function() return walkdir(path..".") end,function() end)
+ else
+ ok,scanner,first=xpcall(function() return walkdir(path) end,function() end)
+ end
+ if ok and type(scanner)=="function" then
+ if not find(path,"/$") then
+ path=path..'/'
+ end
+ for name in scanner,first do
+ if name=="." then
+ elseif name==".." then
+ else
+ local full=path..name
+ local attr=attributes(full)
+ local mode=attr.mode
+ if mode=='file' then
+ if find(full,patt) then
+ result[name]=attr
+ end
+ elseif recurse and mode=="directory" then
+ attr.list=collectpattern(full,patt,recurse)
+ result[name]=attr
+ end
+ end
+ end
+ end
+ return result
+end
+dir.collectpattern=collectpattern
+local separator,pattern
+if onwindows then
+ local slash=S("/\\")/"/"
+ pattern={
+ [1]=(Cs(P(".")+slash^1)+Cs(R("az","AZ")*P(":")*slash^0)+Cc("./"))*V(2)*V(3),
+ [2]=Cs(((1-S("*?/\\"))^0*slash)^0),
+ [3]=Cs(P(1)^0)
+ }
+else
+ pattern={
+ [1]=(C(P(".")+P("/")^1)+Cc("./"))*V(2)*V(3),
+ [2]=C(((1-S("*?/"))^0*P("/"))^0),
+ [3]=C(P(1)^0)
+ }
+end
+local filter=Cs ((
+ P("**")/".*"+P("*")/"[^/]*"+P("?")/"[^/]"+P(".")/"%%."+P("+")/"%%+"+P("-")/"%%-"+P(1)
+)^0 )
+local function glob(str,t)
+ if type(t)=="function" then
+ if type(str)=="table" then
+ for s=1,#str do
+ glob(str[s],t)
+ end
+ elseif isfile(str) then
+ t(str)
+ else
+ local root,path,base=lpegmatch(pattern,str)
+ if root and path and base then
+ local recurse=find(base,"**",1,true)
+ local start=root..path
+ local result=lpegmatch(filter,start..base)
+ globpattern(start,result,recurse,t)
+ end
+ end
+ else
+ if type(str)=="table" then
+ local t=t or {}
+ for s=1,#str do
+ glob(str[s],t)
+ end
+ return t
+ elseif isfile(str) then
+ if t then
+ t[#t+1]=str
+ return t
+ else
+ return { str }
+ end
+ else
+ local root,path,base=lpegmatch(pattern,str)
+ if root and path and base then
+ local recurse=find(base,"**",1,true)
+ local start=root..path
+ local result=lpegmatch(filter,start..base)
+ return globpattern(start,result,recurse,t)
+ else
+ return {}
+ end
+ end
+ end
+end
+dir.glob=glob
+local function globfiles(path,recurse,func,files)
+ if type(func)=="string" then
+ local s=func
+ func=function(name) return find(name,s) end
+ end
+ files=files or {}
+ local noffiles=#files
+ for name in walkdir(path) do
+ if find(name,"^%.") then
+ else
+ local mode=attributes(name,'mode')
+ if mode=="directory" then
+ if recurse then
+ globfiles(path.."/"..name,recurse,func,files)
+ end
+ elseif mode=="file" then
+ if not func or func(name) then
+ noffiles=noffiles+1
+ files[noffiles]=path.."/"..name
+ end
+ end
+ end
+ end
+ return files
+end
+dir.globfiles=globfiles
+function dir.ls(pattern)
+ return concat(glob(pattern),"\n")
+end
+local make_indeed=true
+if onwindows then
+ function dir.mkdirs(...)
+ local n=select("#",...)
+ local str
+ if n==1 then
+ str=select(1,...)
+ if isdir(str) then
+ return str,true
+ end
+ else
+ str=""
+ for i=1,n do
+ local s=select(i,...)
+ if s=="" then
+ elseif str=="" then
+ str=s
+ else
+ str=str.."/"..s
+ end
+ end
+ end
+ local pth=""
+ local drive=false
+ local first,middle,last=match(str,"^(//)(//*)(.*)$")
+ if first then
+ else
+ first,last=match(str,"^(//)/*(.-)$")
+ if first then
+ middle,last=match(str,"([^/]+)/+(.-)$")
+ if middle then
+ pth="//"..middle
+ else
+ pth="//"..last
+ last=""
+ end
+ else
+ first,middle,last=match(str,"^([a-zA-Z]:)(/*)(.-)$")
+ if first then
+ pth,drive=first..middle,true
+ else
+ middle,last=match(str,"^(/*)(.-)$")
+ if not middle then
+ last=str
+ end
+ end
+ end
+ end
+ for s in gmatch(last,"[^/]+") do
+ if pth=="" then
+ pth=s
+ elseif drive then
+ pth,drive=pth..s,false
+ else
+ pth=pth.."/"..s
+ end
+ if make_indeed and not isdir(pth) then
+ mkdir(pth)
+ end
+ end
+ return pth,(isdir(pth)==true)
+ end
+else
+ function dir.mkdirs(...)
+ local n=select("#",...)
+ local str,pth
+ if n==1 then
+ str=select(1,...)
+ if isdir(str) then
+ return str,true
+ end
+ else
+ str=""
+ for i=1,n do
+ local s=select(i,...)
+ if s and s~="" then
+ if str~="" then
+ str=str.."/"..s
+ else
+ str=s
+ end
+ end
+ end
+ end
+ str=gsub(str,"/+","/")
+ if find(str,"^/") then
+ pth="/"
+ for s in gmatch(str,"[^/]+") do
+ local first=(pth=="/")
+ if first then
+ pth=pth..s
+ else
+ pth=pth.."/"..s
+ end
+ if make_indeed and not first and not isdir(pth) then
+ mkdir(pth)
+ end
+ end
+ else
+ pth="."
+ for s in gmatch(str,"[^/]+") do
+ pth=pth.."/"..s
+ if make_indeed and not isdir(pth) then
+ mkdir(pth)
+ end
+ end
+ end
+ return pth,(isdir(pth)==true)
+ end
+end
+dir.makedirs=dir.mkdirs
+do
+ local chdir=sandbox and sandbox.original(chdir) or chdir
+ if onwindows then
+ local xcurrentdir=dir.current
+ function dir.expandname(str)
+ local first,nothing,last=match(str,"^(//)(//*)(.*)$")
+ if first then
+ first=xcurrentdir().."/"
+ end
+ if not first then
+ first,last=match(str,"^(//)/*(.*)$")
+ end
+ if not first then
+ first,last=match(str,"^([a-zA-Z]:)(.*)$")
+ if first and not find(last,"^/") then
+ local d=currentdir()
+ if chdir(first) then
+ first=xcurrentdir()
+ end
+ chdir(d)
+ end
+ end
+ if not first then
+ first,last=xcurrentdir(),str
+ end
+ last=gsub(last,"//","/")
+ last=gsub(last,"/%./","/")
+ last=gsub(last,"^/*","")
+ first=gsub(first,"/*$","")
+ if last=="" or last=="." then
+ return first
+ else
+ return first.."/"..last
+ end
+ end
+ else
+ function dir.expandname(str)
+ if not find(str,"^/") then
+ str=currentdir().."/"..str
+ end
+ str=gsub(str,"//","/")
+ str=gsub(str,"/%./","/")
+ str=gsub(str,"(.)/%.$","%1")
+ return str
+ end
+ end
+end
+file.expandname=dir.expandname
+local stack={}
+function dir.push(newdir)
+ insert(stack,currentdir())
+ if newdir and newdir~="" then
+ chdir(newdir)
+ end
+end
+function dir.pop()
+ local d=remove(stack)
+ if d then
+ chdir(d)
+ end
+ return d
+end
+local function found(...)
+ for i=1,select("#",...) do
+ local path=select(i,...)
+ local kind=type(path)
+ if kind=="string" then
+ if isdir(path) then
+ return path
+ end
+ elseif kind=="table" then
+ local path=found(unpack(path))
+ if path then
+ return path
+ end
+ end
+ end
+end
+dir.found=found
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-boolean"] = package.loaded["l-boolean"] or true
+
+-- original size: 1850, stripped down to: 1568
+
+if not modules then modules={} end modules ['l-boolean']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,tonumber=type,tonumber
+boolean=boolean or {}
+local boolean=boolean
+function boolean.tonumber(b)
+ if b then return 1 else return 0 end
+end
+function toboolean(str,tolerant)
+ if str==nil then
+ return false
+ elseif str==false then
+ return false
+ elseif str==true then
+ return true
+ elseif str=="true" then
+ return true
+ elseif str=="false" then
+ return false
+ elseif not tolerant then
+ return false
+ elseif str==0 then
+ return false
+ elseif (tonumber(str) or 0)>0 then
+ return true
+ else
+ return str=="yes" or str=="on" or str=="t"
+ end
+end
+string.toboolean=toboolean
+function string.booleanstring(str)
+ if str=="0" then
+ return false
+ elseif str=="1" then
+ return true
+ elseif str=="" then
+ return false
+ elseif str=="false" then
+ return false
+ elseif str=="true" then
+ return true
+ elseif (tonumber(str) or 0)>0 then
+ return true
+ else
+ return str=="yes" or str=="on" or str=="t"
+ end
+end
+function string.is_boolean(str,default,strict)
+ if type(str)=="string" then
+ if str=="true" or str=="yes" or str=="on" or str=="t" or (not strict and str=="1") then
+ return true
+ elseif str=="false" or str=="no" or str=="off" or str=="f" or (not strict and str=="0") then
+ return false
+ end
+ end
+ return default
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-unicode"] = package.loaded["l-unicode"] or true
+
+-- original size: 37388, stripped down to: 15817
+
+if not modules then modules={} end modules ['l-unicode']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+utf=utf or (unicode and unicode.utf8) or {}
+utf.characters=utf.characters or string.utfcharacters
+utf.values=utf.values or string.utfvalues
+local type=type
+local char,byte,format,sub,gmatch=string.char,string.byte,string.format,string.sub,string.gmatch
+local concat=table.concat
+local P,C,R,Cs,Ct,Cmt,Cc,Carg,Cp=lpeg.P,lpeg.C,lpeg.R,lpeg.Cs,lpeg.Ct,lpeg.Cmt,lpeg.Cc,lpeg.Carg,lpeg.Cp
+local lpegmatch=lpeg.match
+local patterns=lpeg.patterns
+local tabletopattern=lpeg.utfchartabletopattern
+local bytepairs=string.bytepairs
+local finder=lpeg.finder
+local replacer=lpeg.replacer
+local utfvalues=utf.values
+local utfgmatch=utf.gmatch
+local p_utftype=patterns.utftype
+local p_utfstricttype=patterns.utfstricttype
+local p_utfoffset=patterns.utfoffset
+local p_utf8char=patterns.utf8character
+local p_utf8byte=patterns.utf8byte
+local p_utfbom=patterns.utfbom
+local p_newline=patterns.newline
+local p_whitespace=patterns.whitespace
+if not unicode then
+ unicode={ utf=utf }
+end
+if not utf.char then
+ local floor,char=math.floor,string.char
+ function utf.char(n)
+ if n<0x80 then
+ return char(n)
+ elseif n<0x800 then
+ return char(
+ 0xC0+floor(n/0x40),
+ 0x80+(n%0x40)
+ )
+ elseif n<0x10000 then
+ return char(
+ 0xE0+floor(n/0x1000),
+ 0x80+(floor(n/0x40)%0x40),
+ 0x80+(n%0x40)
+ )
+ elseif n<0x200000 then
+ return char(
+ 0xF0+floor(n/0x40000),
+ 0x80+(floor(n/0x1000)%0x40),
+ 0x80+(floor(n/0x40)%0x40),
+ 0x80+(n%0x40)
+ )
+ else
+ return ""
+ end
+ end
+end
+if not utf.byte then
+ local utf8byte=patterns.utf8byte
+ function utf.byte(c)
+ return lpegmatch(utf8byte,c)
+ end
+end
+local utfchar,utfbyte=utf.char,utf.byte
+function utf.filetype(data)
+ return data and lpegmatch(p_utftype,data) or "unknown"
+end
+local toentities=Cs (
+ (
+ patterns.utf8one+(
+ patterns.utf8two+patterns.utf8three+patterns.utf8four
+ )/function(s) local b=utfbyte(s) if b<127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+patterns.toentities=toentities
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+local one=P(1)
+local two=C(1)*C(1)
+local four=C(R(utfchar(0xD8),utfchar(0xFF)))*C(1)*C(1)*C(1)
+local pattern=P("\254\255")*Cs((
+ four/function(a,b,c,d)
+ local ab=0xFF*byte(a)+byte(b)
+ local cd=0xFF*byte(c)+byte(d)
+ return utfchar((ab-0xD800)*0x400+(cd-0xDC00)+0x10000)
+ end+two/function(a,b)
+ return utfchar(byte(a)*256+byte(b))
+ end+one
+ )^1 )+P("\255\254")*Cs((
+ four/function(b,a,d,c)
+ local ab=0xFF*byte(a)+byte(b)
+ local cd=0xFF*byte(c)+byte(d)
+ return utfchar((ab-0xD800)*0x400+(cd-0xDC00)+0x10000)
+ end+two/function(b,a)
+ return utfchar(byte(a)*256+byte(b))
+ end+one
+ )^1 )
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s
+end
+local validatedutf=Cs (
+ (
+ patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four+P(1)/"�"
+ )^0
+)
+patterns.validatedutf=validatedutf
+function utf.is_valid(str)
+ return type(str)=="string" and lpegmatch(validatedutf,str) or false
+end
+if not utf.len then
+ local n,f=0,1
+ local utfcharcounter=patterns.utfbom^-1*Cmt (
+ Cc(1)*patterns.utf8one^1+Cc(2)*patterns.utf8two^1+Cc(3)*patterns.utf8three^1+Cc(4)*patterns.utf8four^1,
+ function(_,t,d)
+ n=n+(t-f)/d
+ f=t
+ return true
+ end
+ )^0
+ function utf.len(str)
+ n,f=0,1
+ lpegmatch(utfcharcounter,str or "")
+ return n
+ end
+end
+utf.length=utf.len
+if not utf.sub then
+ local utflength=utf.length
+ local b,e,n,first,last=0,0,0,0,0
+ local function slide_zero(s,p)
+ n=n+1
+ if n>=last then
+ e=p-1
+ else
+ return p
+ end
+ end
+ local function slide_one(s,p)
+ n=n+1
+ if n==first then
+ b=p
+ end
+ if n>=last then
+ e=p-1
+ else
+ return p
+ end
+ end
+ local function slide_two(s,p)
+ n=n+1
+ if n==first then
+ b=p
+ else
+ return true
+ end
+ end
+ local pattern_zero=Cmt(p_utf8char,slide_zero)^0
+ local pattern_one=Cmt(p_utf8char,slide_one )^0
+ local pattern_two=Cmt(p_utf8char,slide_two )^0
+ local pattern_first=C(patterns.utf8character)
+ function utf.sub(str,start,stop)
+ if not start then
+ return str
+ end
+ if start==0 then
+ start=1
+ end
+ if not stop then
+ if start<0 then
+ local l=utflength(str)
+ start=l+start
+ else
+ start=start-1
+ end
+ b,n,first=0,0,start
+ lpegmatch(pattern_two,str)
+ if n>=first then
+ return sub(str,b)
+ else
+ return ""
+ end
+ end
+ if start<0 or stop<0 then
+ local l=utf.length(str)
+ if start<0 then
+ start=l+start
+ if start<=0 then
+ start=1
+ else
+ start=start+1
+ end
+ end
+ if stop<0 then
+ stop=l+stop
+ if stop==0 then
+ stop=1
+ else
+ stop=stop+1
+ end
+ end
+ end
+ if start==1 and stop==1 then
+ return lpegmatch(pattern_first,str) or ""
+ elseif start>stop then
+ return ""
+ elseif start>1 then
+ b,e,n,first,last=0,0,0,start-1,stop
+ lpegmatch(pattern_one,str)
+ if n>=first and e==0 then
+ e=#str
+ end
+ return sub(str,b,e)
+ else
+ b,e,n,last=1,0,0,stop
+ lpegmatch(pattern_zero,str)
+ if e==0 then
+ e=#str
+ end
+ return sub(str,b,e)
+ end
+ end
+end
+function utf.remapper(mapping,option)
+ local variant=type(mapping)
+ if variant=="table" then
+ if option=="dynamic" then
+ local pattern=false
+ table.setmetatablenewindex(mapping,function(t,k,v) rawset(t,k,v) pattern=false end)
+ return function(str)
+ if not str or str=="" then
+ return ""
+ else
+ if not pattern then
+ pattern=Cs((tabletopattern(mapping)/mapping+p_utf8char)^0)
+ end
+ return lpegmatch(pattern,str)
+ end
+ end
+ elseif option=="pattern" then
+ return Cs((tabletopattern(mapping)/mapping+p_utf8char)^0)
+ else
+ local pattern=Cs((tabletopattern(mapping)/mapping+p_utf8char)^0)
+ return function(str)
+ if not str or str=="" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+ end,pattern
+ end
+ elseif variant=="function" then
+ if option=="pattern" then
+ return Cs((p_utf8char/mapping+p_utf8char)^0)
+ else
+ local pattern=Cs((p_utf8char/mapping+p_utf8char)^0)
+ return function(str)
+ if not str or str=="" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+ end,pattern
+ end
+ else
+ return function(str)
+ return str or ""
+ end
+ end
+end
+function utf.replacer(t)
+ local r=replacer(t,false,false,true)
+ return function(str)
+ return lpegmatch(r,str)
+ end
+end
+function utf.subtituter(t)
+ local f=finder (t)
+ local r=replacer(t,false,false,true)
+ return function(str)
+ local i=lpegmatch(f,str)
+ if not i then
+ return str
+ elseif i>#str then
+ return str
+ else
+ return lpegmatch(r,str)
+ end
+ end
+end
+local utflinesplitter=p_utfbom^-1*lpeg.tsplitat(p_newline)
+local utfcharsplitter_ows=p_utfbom^-1*Ct(C(p_utf8char)^0)
+local utfcharsplitter_iws=p_utfbom^-1*Ct((p_whitespace^1+C(p_utf8char))^0)
+local utfcharsplitter_raw=Ct(C(p_utf8char)^0)
+patterns.utflinesplitter=utflinesplitter
+function utf.splitlines(str)
+ return lpegmatch(utflinesplitter,str or "")
+end
+function utf.split(str,ignorewhitespace)
+ if ignorewhitespace then
+ return lpegmatch(utfcharsplitter_iws,str or "")
+ else
+ return lpegmatch(utfcharsplitter_ows,str or "")
+ end
+end
+function utf.totable(str)
+ return lpegmatch(utfcharsplitter_raw,str)
+end
+function utf.magic(f)
+ local str=f:read(4) or ""
+ local off=lpegmatch(p_utfoffset,str)
+ if off<4 then
+ f:seek('set',off)
+ end
+ return lpegmatch(p_utftype,str)
+end
+local utf16_to_utf8_be,utf16_to_utf8_le
+local utf32_to_utf8_be,utf32_to_utf8_le
+local utf_16_be_getbom=patterns.utfbom_16_be^-1
+local utf_16_le_getbom=patterns.utfbom_16_le^-1
+local utf_32_be_getbom=patterns.utfbom_32_be^-1
+local utf_32_le_getbom=patterns.utfbom_32_le^-1
+local utf_16_be_linesplitter=utf_16_be_getbom*lpeg.tsplitat(patterns.utf_16_be_nl)
+local utf_16_le_linesplitter=utf_16_le_getbom*lpeg.tsplitat(patterns.utf_16_le_nl)
+local utf_32_be_linesplitter=utf_32_be_getbom*lpeg.tsplitat(patterns.utf_32_be_nl)
+local utf_32_le_linesplitter=utf_32_le_getbom*lpeg.tsplitat(patterns.utf_32_le_nl)
+local more=0
+local p_utf16_to_utf8_be=C(1)*C(1)/function(left,right)
+ local now=256*byte(left)+byte(right)
+ if more>0 then
+ now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
+ more=0
+ return utfchar(now)
+ elseif now>=0xD800 and now<=0xDBFF then
+ more=now
+ return ""
+ else
+ return utfchar(now)
+ end
+end
+local p_utf16_to_utf8_le=C(1)*C(1)/function(right,left)
+ local now=256*byte(left)+byte(right)
+ if more>0 then
+ now=(more-0xD800)*0x400+(now-0xDC00)+0x10000
+ more=0
+ return utfchar(now)
+ elseif now>=0xD800 and now<=0xDBFF then
+ more=now
+ return ""
+ else
+ return utfchar(now)
+ end
+end
+local p_utf32_to_utf8_be=C(1)*C(1)*C(1)*C(1)/function(a,b,c,d)
+ return utfchar(256*256*256*byte(a)+256*256*byte(b)+256*byte(c)+byte(d))
+end
+local p_utf32_to_utf8_le=C(1)*C(1)*C(1)*C(1)/function(a,b,c,d)
+ return utfchar(256*256*256*byte(d)+256*256*byte(c)+256*byte(b)+byte(a))
+end
+p_utf16_to_utf8_be=P(true)/function() more=0 end*utf_16_be_getbom*Cs(p_utf16_to_utf8_be^0)
+p_utf16_to_utf8_le=P(true)/function() more=0 end*utf_16_le_getbom*Cs(p_utf16_to_utf8_le^0)
+p_utf32_to_utf8_be=P(true)/function() more=0 end*utf_32_be_getbom*Cs(p_utf32_to_utf8_be^0)
+p_utf32_to_utf8_le=P(true)/function() more=0 end*utf_32_le_getbom*Cs(p_utf32_to_utf8_le^0)
+patterns.utf16_to_utf8_be=p_utf16_to_utf8_be
+patterns.utf16_to_utf8_le=p_utf16_to_utf8_le
+patterns.utf32_to_utf8_be=p_utf32_to_utf8_be
+patterns.utf32_to_utf8_le=p_utf32_to_utf8_le
+utf16_to_utf8_be=function(s)
+ if s and s~="" then
+ return lpegmatch(p_utf16_to_utf8_be,s)
+ else
+ return s
+ end
+end
+local utf16_to_utf8_be_t=function(t)
+ if not t then
+ return nil
+ elseif type(t)=="string" then
+ t=lpegmatch(utf_16_be_linesplitter,t)
+ end
+ for i=1,#t do
+ local s=t[i]
+ if s~="" then
+ t[i]=lpegmatch(p_utf16_to_utf8_be,s)
+ end
+ end
+ return t
+end
+utf16_to_utf8_le=function(s)
+ if s and s~="" then
+ return lpegmatch(p_utf16_to_utf8_le,s)
+ else
+ return s
+ end
+end
+local utf16_to_utf8_le_t=function(t)
+ if not t then
+ return nil
+ elseif type(t)=="string" then
+ t=lpegmatch(utf_16_le_linesplitter,t)
+ end
+ for i=1,#t do
+ local s=t[i]
+ if s~="" then
+ t[i]=lpegmatch(p_utf16_to_utf8_le,s)
+ end
+ end
+ return t
+end
+utf32_to_utf8_be=function(s)
+ if s and s~="" then
+ return lpegmatch(p_utf32_to_utf8_be,s)
+ else
+ return s
+ end
+end
+local utf32_to_utf8_be_t=function(t)
+ if not t then
+ return nil
+ elseif type(t)=="string" then
+ t=lpegmatch(utf_32_be_linesplitter,t)
+ end
+ for i=1,#t do
+ local s=t[i]
+ if s~="" then
+ t[i]=lpegmatch(p_utf32_to_utf8_be,s)
+ end
+ end
+ return t
+end
+utf32_to_utf8_le=function(s)
+ if s and s~="" then
+ return lpegmatch(p_utf32_to_utf8_le,s)
+ else
+ return s
+ end
+end
+local utf32_to_utf8_le_t=function(t)
+ if not t then
+ return nil
+ elseif type(t)=="string" then
+ t=lpegmatch(utf_32_le_linesplitter,t)
+ end
+ for i=1,#t do
+ local s=t[i]
+ if s~="" then
+ t[i]=lpegmatch(p_utf32_to_utf8_le,s)
+ end
+ end
+ return t
+end
+utf.utf16_to_utf8_le_t=utf16_to_utf8_le_t
+utf.utf16_to_utf8_be_t=utf16_to_utf8_be_t
+utf.utf32_to_utf8_le_t=utf32_to_utf8_le_t
+utf.utf32_to_utf8_be_t=utf32_to_utf8_be_t
+utf.utf16_to_utf8_le=utf16_to_utf8_le
+utf.utf16_to_utf8_be=utf16_to_utf8_be
+utf.utf32_to_utf8_le=utf32_to_utf8_le
+utf.utf32_to_utf8_be=utf32_to_utf8_be
+function utf.utf8_to_utf8_t(t)
+ return type(t)=="string" and lpegmatch(utflinesplitter,t) or t
+end
+function utf.utf16_to_utf8_t(t,endian)
+ return endian and utf16_to_utf8_be_t(t) or utf16_to_utf8_le_t(t) or t
+end
+function utf.utf32_to_utf8_t(t,endian)
+ return endian and utf32_to_utf8_be_t(t) or utf32_to_utf8_le_t(t) or t
+end
+local function little(b)
+ if b<0x10000 then
+ return char(b%256,b/256)
+ else
+ b=b-0x10000
+ local b1,b2=b/1024+0xD800,b%1024+0xDC00
+ return char(b1%256,b1/256,b2%256,b2/256)
+ end
+end
+local function big(b)
+ if b<0x10000 then
+ return char(b/256,b%256)
+ else
+ b=b-0x10000
+ local b1,b2=b/1024+0xD800,b%1024+0xDC00
+ return char(b1/256,b1%256,b2/256,b2%256)
+ end
+end
+local l_remap=Cs((p_utf8byte/little+P(1)/"")^0)
+local b_remap=Cs((p_utf8byte/big+P(1)/"")^0)
+local function utf8_to_utf16_be(str,nobom)
+ if nobom then
+ return lpegmatch(b_remap,str)
+ else
+ return char(254,255)..lpegmatch(b_remap,str)
+ end
+end
+local function utf8_to_utf16_le(str,nobom)
+ if nobom then
+ return lpegmatch(l_remap,str)
+ else
+ return char(255,254)..lpegmatch(l_remap,str)
+ end
+end
+utf.utf8_to_utf16_be=utf8_to_utf16_be
+utf.utf8_to_utf16_le=utf8_to_utf16_le
+function utf.utf8_to_utf16(str,littleendian,nobom)
+ if littleendian then
+ return utf8_to_utf16_le(str,nobom)
+ else
+ return utf8_to_utf16_be(str,nobom)
+ end
+end
+local pattern=Cs (
+ (p_utf8byte/function(unicode ) return format("0x%04X",unicode) end)*(p_utf8byte*Carg(1)/function(unicode,separator) return format("%s0x%04X",separator,unicode) end)^0
+)
+function utf.tocodes(str,separator)
+ return lpegmatch(pattern,str,1,separator or " ")
+end
+function utf.ustring(s)
+ return format("U+%05X",type(s)=="number" and s or utfbyte(s))
+end
+function utf.xstring(s)
+ return format("0x%05X",type(s)=="number" and s or utfbyte(s))
+end
+function utf.toeight(str)
+ if not str or str=="" then
+ return nil
+ end
+ local utftype=lpegmatch(p_utfstricttype,str)
+ if utftype=="utf-8" then
+ return sub(str,4)
+ elseif utftype=="utf-16-be" then
+ return utf16_to_utf8_be(str)
+ elseif utftype=="utf-16-le" then
+ return utf16_to_utf8_le(str)
+ else
+ return str
+ end
+end
+local p_nany=p_utf8char/""
+if utfgmatch then
+ function utf.count(str,what)
+ if type(what)=="string" then
+ local n=0
+ for _ in utfgmatch(str,what) do
+ n=n+1
+ end
+ return n
+ else
+ return #lpegmatch(Cs((P(what)/" "+p_nany)^0),str)
+ end
+ end
+else
+ local cache={}
+ function utf.count(str,what)
+ if type(what)=="string" then
+ local p=cache[what]
+ if not p then
+ p=Cs((P(what)/" "+p_nany)^0)
+ cache[p]=p
+ end
+ return #lpegmatch(p,str)
+ else
+ return #lpegmatch(Cs((P(what)/" "+p_nany)^0),str)
+ end
+ end
+end
+if not utf.characters then
+ function utf.characters(str)
+ return gmatch(str,".[\128-\191]*")
+ end
+ string.utfcharacters=utf.characters
+end
+if not utf.values then
+ local find=string.find
+ local dummy=function()
+ end
+ function utf.values(str)
+ local n=#str
+ if n==0 then
+ return dummy
+ elseif n==1 then
+ return function() return utfbyte(str) end
+ else
+ local p=1
+ return function()
+ local b,e=find(str,".[\128-\191]*",p)
+ if b then
+ p=e+1
+ return utfbyte(sub(str,b,e))
+ end
+ end
+ end
+ end
+ string.utfvalues=utf.values
+end
+function utf.chrlen(u)
+ return
+ (u<0x80 and 1) or
+ (u<0xE0 and 2) or
+ (u<0xF0 and 3) or
+ (u<0xF8 and 4) or
+ (u<0xFC and 5) or
+ (u<0xFE and 6) or 0
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["l-math"] = package.loaded["l-math"] or true
+
+-- original size: 974, stripped down to: 890
+
+if not modules then modules={} end modules ['l-math']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local floor,sin,cos,tan=math.floor,math.sin,math.cos,math.tan
+if not math.ceiling then
+ math.ceiling=math.ceil
+end
+if not math.round then
+ function math.round(x) return floor(x+0.5) end
+end
+if not math.div then
+ function math.div(n,m) return floor(n/m) end
+end
+if not math.mod then
+ function math.mod(n,m) return n%m end
+end
+local pipi=2*math.pi/360
+if not math.sind then
+ function math.sind(d) return sin(d*pipi) end
+ function math.cosd(d) return cos(d*pipi) end
+ function math.tand(d) return tan(d*pipi) end
+end
+if not math.odd then
+ function math.odd (n) return n%2~=0 end
+ function math.even(n) return n%2==0 end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["util-str"] = package.loaded["util-str"] or true
+
+-- original size: 34503, stripped down to: 18933
+
+if not modules then modules={} end modules ['util-str']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+utilities=utilities or {}
+utilities.strings=utilities.strings or {}
+local strings=utilities.strings
+local format,gsub,rep,sub=string.format,string.gsub,string.rep,string.sub
+local load,dump=load,string.dump
+local tonumber,type,tostring=tonumber,type,tostring
+local unpack,concat=table.unpack,table.concat
+local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc
+local patterns,lpegmatch=lpeg.patterns,lpeg.match
+local utfchar,utfbyte=utf.char,utf.byte
+local loadstripped=nil
+if _LUAVERSION<5.2 then
+ loadstripped=function(str,shortcuts)
+ return load(str)
+ end
+else
+ loadstripped=function(str,shortcuts)
+ if shortcuts then
+ return load(dump(load(str),true),nil,nil,shortcuts)
+ else
+ return load(dump(load(str),true))
+ end
+ end
+end
+if not number then number={} end
+local stripper=patterns.stripzeros
+local newline=patterns.newline
+local endofstring=patterns.endofstring
+local whitespace=patterns.whitespace
+local spacer=patterns.spacer
+local spaceortab=patterns.spaceortab
+local function points(n)
+ n=tonumber(n)
+ return (not n or n==0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536))
+end
+local function basepoints(n)
+ n=tonumber(n)
+ return (not n or n==0) and "0bp" or lpegmatch(stripper,format("%.5fbp",n*(7200/7227)/65536))
+end
+number.points=points
+number.basepoints=basepoints
+local rubish=spaceortab^0*newline
+local anyrubish=spaceortab+newline
+local anything=patterns.anything
+local stripped=(spaceortab^1/"")*newline
+local leading=rubish^0/""
+local trailing=(anyrubish^1*endofstring)/""
+local redundant=rubish^3/"\n"
+local pattern=Cs(leading*(trailing+redundant+stripped+anything)^0)
+function strings.collapsecrlf(str)
+ return lpegmatch(pattern,str)
+end
+local repeaters={}
+function strings.newrepeater(str,offset)
+ offset=offset or 0
+ local s=repeaters[str]
+ if not s then
+ s={}
+ repeaters[str]=s
+ end
+ local t=s[offset]
+ if t then
+ return t
+ end
+ t={}
+ setmetatable(t,{ __index=function(t,k)
+ if not k then
+ return ""
+ end
+ local n=k+offset
+ local s=n>0 and rep(str,n) or ""
+ t[k]=s
+ return s
+ end })
+ s[offset]=t
+ return t
+end
+local extra,tab,start=0,0,4,0
+local nspaces=strings.newrepeater(" ")
+string.nspaces=nspaces
+local pattern=Carg(1)/function(t)
+ extra,tab,start=0,t or 7,1
+ end*Cs((
+ Cp()*patterns.tab/function(position)
+ local current=(position-start+1)+extra
+ local spaces=tab-(current-1)%tab
+ if spaces>0 then
+ extra=extra+spaces-1
+ return nspaces[spaces]
+ else
+ return ""
+ end
+ end+newline*Cp()/function(position)
+ extra,start=0,position
+ end+patterns.anything
+ )^1)
+function strings.tabtospace(str,tab)
+ return lpegmatch(pattern,str,1,tab or 7)
+end
+local space=spacer^0
+local nospace=space/""
+local endofline=nospace*newline
+local stripend=(whitespace^1*endofstring)/""
+local normalline=(nospace*((1-space*(newline+endofstring))^1)*nospace)
+local stripempty=endofline^1/""
+local normalempty=endofline^1
+local singleempty=endofline*(endofline^0/"")
+local doubleempty=endofline*endofline^-1*(endofline^0/"")
+local stripstart=stripempty^0
+local p_prune_normal=Cs (stripstart*(stripend+normalline+normalempty )^0 )
+local p_prune_collapse=Cs (stripstart*(stripend+normalline+doubleempty )^0 )
+local p_prune_noempty=Cs (stripstart*(stripend+normalline+singleempty )^0 )
+local p_retain_normal=Cs ((normalline+normalempty )^0 )
+local p_retain_collapse=Cs ((normalline+doubleempty )^0 )
+local p_retain_noempty=Cs ((normalline+singleempty )^0 )
+local striplinepatterns={
+ ["prune"]=p_prune_normal,
+ ["prune and collapse"]=p_prune_collapse,
+ ["prune and no empty"]=p_prune_noempty,
+ ["retain"]=p_retain_normal,
+ ["retain and collapse"]=p_retain_collapse,
+ ["retain and no empty"]=p_retain_noempty,
+ ["collapse"]=patterns.collapser,
+}
+setmetatable(striplinepatterns,{ __index=function(t,k) return p_prune_collapse end })
+strings.striplinepatterns=striplinepatterns
+function strings.striplines(str,how)
+ return str and lpegmatch(striplinepatterns[how],str) or str
+end
+strings.striplong=strings.striplines
+function strings.nice(str)
+ str=gsub(str,"[:%-+_]+"," ")
+ return str
+end
+local n=0
+local sequenced=table.sequenced
+function string.autodouble(s,sep)
+ if s==nil then
+ return '""'
+ end
+ local t=type(s)
+ if t=="number" then
+ return tostring(s)
+ end
+ if t=="table" then
+ return ('"'..sequenced(s,sep or ",")..'"')
+ end
+ return ('"'..tostring(s)..'"')
+end
+function string.autosingle(s,sep)
+ if s==nil then
+ return "''"
+ end
+ local t=type(s)
+ if t=="number" then
+ return tostring(s)
+ end
+ if t=="table" then
+ return ("'"..sequenced(s,sep or ",").."'")
+ end
+ return ("'"..tostring(s).."'")
+end
+local tracedchars={}
+string.tracedchars=tracedchars
+strings.tracers=tracedchars
+function string.tracedchar(b)
+ if type(b)=="number" then
+ return tracedchars[b] or (utfchar(b).." (U+"..format("%05X",b)..")")
+ else
+ local c=utfbyte(b)
+ return tracedchars[c] or (b.." (U+"..(c and format("%05X",c) or "?????")..")")
+ end
+end
+function number.signed(i)
+ if i>0 then
+ return "+",i
+ else
+ return "-",-i
+ end
+end
+local zero=P("0")^1/""
+local plus=P("+")/""
+local minus=P("-")
+local separator=S(".")
+local digit=R("09")
+local trailing=zero^1*#S("eE")
+local exponent=(S("eE")*(plus+Cs((minus*zero^0*P(-1))/"")+minus)*zero^0*(P(-1)*Cc("0")+P(1)^1))
+local pattern_a=Cs(minus^0*digit^1*(separator/""*trailing+separator*(trailing+digit)^0)*exponent)
+local pattern_b=Cs((exponent+P(1))^0)
+function number.sparseexponent(f,n)
+ if not n then
+ n=f
+ f="%e"
+ end
+ local tn=type(n)
+ if tn=="string" then
+ local m=tonumber(n)
+ if m then
+ return lpegmatch((f=="%e" or f=="%E") and pattern_a or pattern_b,format(f,m))
+ end
+ elseif tn=="number" then
+ return lpegmatch((f=="%e" or f=="%E") and pattern_a or pattern_b,format(f,n))
+ end
+ return tostring(n)
+end
+local template=[[
+%s
+%s
+return function(%s) return %s end
+]]
+local preamble,environment="",{}
+if _LUAVERSION<5.2 then
+ preamble=[[
+local lpeg=lpeg
+local type=type
+local tostring=tostring
+local tonumber=tonumber
+local format=string.format
+local concat=table.concat
+local signed=number.signed
+local points=number.points
+local basepoints= number.basepoints
+local utfchar=utf.char
+local utfbyte=utf.byte
+local lpegmatch=lpeg.match
+local nspaces=string.nspaces
+local tracedchar=string.tracedchar
+local autosingle=string.autosingle
+local autodouble=string.autodouble
+local sequenced=table.sequenced
+local formattednumber=number.formatted
+local sparseexponent=number.sparseexponent
+ ]]
+else
+ environment={
+ global=global or _G,
+ lpeg=lpeg,
+ type=type,
+ tostring=tostring,
+ tonumber=tonumber,
+ format=string.format,
+ concat=table.concat,
+ signed=number.signed,
+ points=number.points,
+ basepoints=number.basepoints,
+ utfchar=utf.char,
+ utfbyte=utf.byte,
+ lpegmatch=lpeg.match,
+ nspaces=string.nspaces,
+ tracedchar=string.tracedchar,
+ autosingle=string.autosingle,
+ autodouble=string.autodouble,
+ sequenced=table.sequenced,
+ formattednumber=number.formatted,
+ sparseexponent=number.sparseexponent,
+ }
+end
+local arguments={ "a1" }
+setmetatable(arguments,{ __index=function(t,k)
+ local v=t[k-1]..",a"..k
+ t[k]=v
+ return v
+ end
+})
+local prefix_any=C((S("+- .")+R("09"))^0)
+local prefix_tab=P("{")*C((1-P("}"))^0)*P("}")+C((1-R("az","AZ","09","%%"))^0)
+local format_s=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%ss',a%s)",f,n)
+ else
+ return format("(a%s or '')",n)
+ end
+end
+local format_S=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%ss',tostring(a%s))",f,n)
+ else
+ return format("tostring(a%s)",n)
+ end
+end
+local format_q=function()
+ n=n+1
+ return format("(a%s and format('%%q',a%s) or '')",n,n)
+end
+local format_Q=function()
+ n=n+1
+ return format("format('%%q',tostring(a%s))",n)
+end
+local format_i=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%si',a%s)",f,n)
+ else
+ return format("format('%%i',a%s)",n)
+ end
+end
+local format_d=format_i
+local format_I=function(f)
+ n=n+1
+ return format("format('%%s%%%si',signed(a%s))",f,n)
+end
+local format_f=function(f)
+ n=n+1
+ return format("format('%%%sf',a%s)",f,n)
+end
+local format_F=function(f)
+ n=n+1
+ if not f or f=="" then
+ return format("(((a%s > -0.0000000005 and a%s < 0.0000000005) and '0') or format((a%s %% 1 == 0) and '%%i' or '%%.9f',a%s))",n,n,n,n)
+ else
+ return format("format((a%s %% 1 == 0) and '%%i' or '%%%sf',a%s)",n,f,n)
+ end
+end
+local format_g=function(f)
+ n=n+1
+ return format("format('%%%sg',a%s)",f,n)
+end
+local format_G=function(f)
+ n=n+1
+ return format("format('%%%sG',a%s)",f,n)
+end
+local format_e=function(f)
+ n=n+1
+ return format("format('%%%se',a%s)",f,n)
+end
+local format_E=function(f)
+ n=n+1
+ return format("format('%%%sE',a%s)",f,n)
+end
+local format_j=function(f)
+ n=n+1
+ return format("sparseexponent('%%%se',a%s)",f,n)
+end
+local format_J=function(f)
+ n=n+1
+ return format("sparseexponent('%%%sE',a%s)",f,n)
+end
+local format_x=function(f)
+ n=n+1
+ return format("format('%%%sx',a%s)",f,n)
+end
+local format_X=function(f)
+ n=n+1
+ return format("format('%%%sX',a%s)",f,n)
+end
+local format_o=function(f)
+ n=n+1
+ return format("format('%%%so',a%s)",f,n)
+end
+local format_c=function()
+ n=n+1
+ return format("utfchar(a%s)",n)
+end
+local format_C=function()
+ n=n+1
+ return format("tracedchar(a%s)",n)
+end
+local format_r=function(f)
+ n=n+1
+ return format("format('%%%s.0f',a%s)",f,n)
+end
+local format_h=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('0x%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_H=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('0x%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_u=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('u+%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_U=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('U+%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_p=function()
+ n=n+1
+ return format("points(a%s)",n)
+end
+local format_b=function()
+ n=n+1
+ return format("basepoints(a%s)",n)
+end
+local format_t=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("concat(a%s,%q)",n,f)
+ else
+ return format("concat(a%s)",n)
+ end
+end
+local format_T=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("sequenced(a%s,%q)",n,f)
+ else
+ return format("sequenced(a%s)",n)
+ end
+end
+local format_l=function()
+ n=n+1
+ return format("(a%s and 'true' or 'false')",n)
+end
+local format_L=function()
+ n=n+1
+ return format("(a%s and 'TRUE' or 'FALSE')",n)
+end
+local format_N=function()
+ n=n+1
+ return format("tostring(tonumber(a%s) or a%s)",n,n)
+end
+local format_a=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("autosingle(a%s,%q)",n,f)
+ else
+ return format("autosingle(a%s)",n)
+ end
+end
+local format_A=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("autodouble(a%s,%q)",n,f)
+ else
+ return format("autodouble(a%s)",n)
+ end
+end
+local format_w=function(f)
+ n=n+1
+ f=tonumber(f)
+ if f then
+ return format("nspaces[%s+a%s]",f,n)
+ else
+ return format("nspaces[a%s]",n)
+ end
+end
+local format_W=function(f)
+ return format("nspaces[%s]",tonumber(f) or 0)
+end
+local digit=patterns.digit
+local period=patterns.period
+local three=digit*digit*digit
+local splitter=Cs (
+ (((1-(three^1*period))^1+C(three))*(Carg(1)*three)^1+C((1-period)^1))*(P(1)/""*Carg(2))*C(2)
+)
+patterns.formattednumber=splitter
+function number.formatted(n,sep1,sep2)
+ local s=type(s)=="string" and n or format("%0.2f",n)
+ if sep1==true then
+ return lpegmatch(splitter,s,1,".",",")
+ elseif sep1=="." then
+ return lpegmatch(splitter,s,1,sep1,sep2 or ",")
+ elseif sep1=="," then
+ return lpegmatch(splitter,s,1,sep1,sep2 or ".")
+ else
+ return lpegmatch(splitter,s,1,sep1 or ",",sep2 or ".")
+ end
+end
+local format_m=function(f)
+ n=n+1
+ if not f or f=="" then
+ f=","
+ end
+ return format([[formattednumber(a%s,%q,".")]],n,f)
+end
+local format_M=function(f)
+ n=n+1
+ if not f or f=="" then
+ f="."
+ end
+ return format([[formattednumber(a%s,%q,",")]],n,f)
+end
+local format_z=function(f)
+ n=n+(tonumber(f) or 1)
+ return "''"
+end
+local format_rest=function(s)
+ return format("%q",s)
+end
+local format_extension=function(extensions,f,name)
+ local extension=extensions[name] or "tostring(%s)"
+ local f=tonumber(f) or 1
+ if f==0 then
+ return extension
+ elseif f==1 then
+ n=n+1
+ local a="a"..n
+ return format(extension,a,a)
+ elseif f<0 then
+ local a="a"..(n+f+1)
+ return format(extension,a,a)
+ else
+ local t={}
+ for i=1,f do
+ n=n+1
+ t[#t+1]="a"..n
+ end
+ return format(extension,unpack(t))
+ end
+end
+local builder=Cs { "start",
+ start=(
+ (
+ P("%")/""*(
+ V("!")
++V("s")+V("q")+V("i")+V("d")+V("f")+V("F")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o")
++V("c")+V("C")+V("S")
++V("Q")
++V("N")
++V("r")+V("h")+V("H")+V("u")+V("U")+V("p")+V("b")+V("t")+V("T")+V("l")+V("L")+V("I")+V("w")
++V("W")
++V("a")
++V("A")
++V("j")+V("J")
++V("m")+V("M")
++V("z")
+ )+V("*")
+ )*(P(-1)+Carg(1))
+ )^0,
+ ["s"]=(prefix_any*P("s"))/format_s,
+ ["q"]=(prefix_any*P("q"))/format_q,
+ ["i"]=(prefix_any*P("i"))/format_i,
+ ["d"]=(prefix_any*P("d"))/format_d,
+ ["f"]=(prefix_any*P("f"))/format_f,
+ ["F"]=(prefix_any*P("F"))/format_F,
+ ["g"]=(prefix_any*P("g"))/format_g,
+ ["G"]=(prefix_any*P("G"))/format_G,
+ ["e"]=(prefix_any*P("e"))/format_e,
+ ["E"]=(prefix_any*P("E"))/format_E,
+ ["x"]=(prefix_any*P("x"))/format_x,
+ ["X"]=(prefix_any*P("X"))/format_X,
+ ["o"]=(prefix_any*P("o"))/format_o,
+ ["S"]=(prefix_any*P("S"))/format_S,
+ ["Q"]=(prefix_any*P("Q"))/format_S,
+ ["N"]=(prefix_any*P("N"))/format_N,
+ ["c"]=(prefix_any*P("c"))/format_c,
+ ["C"]=(prefix_any*P("C"))/format_C,
+ ["r"]=(prefix_any*P("r"))/format_r,
+ ["h"]=(prefix_any*P("h"))/format_h,
+ ["H"]=(prefix_any*P("H"))/format_H,
+ ["u"]=(prefix_any*P("u"))/format_u,
+ ["U"]=(prefix_any*P("U"))/format_U,
+ ["p"]=(prefix_any*P("p"))/format_p,
+ ["b"]=(prefix_any*P("b"))/format_b,
+ ["t"]=(prefix_tab*P("t"))/format_t,
+ ["T"]=(prefix_tab*P("T"))/format_T,
+ ["l"]=(prefix_any*P("l"))/format_l,
+ ["L"]=(prefix_any*P("L"))/format_L,
+ ["I"]=(prefix_any*P("I"))/format_I,
+ ["w"]=(prefix_any*P("w"))/format_w,
+ ["W"]=(prefix_any*P("W"))/format_W,
+ ["j"]=(prefix_any*P("j"))/format_j,
+ ["J"]=(prefix_any*P("J"))/format_J,
+ ["m"]=(prefix_tab*P("m"))/format_m,
+ ["M"]=(prefix_tab*P("M"))/format_M,
+ ["z"]=(prefix_any*P("z"))/format_z,
+ ["a"]=(prefix_any*P("a"))/format_a,
+ ["A"]=(prefix_any*P("A"))/format_A,
+ ["*"]=Cs(((1-P("%"))^1+P("%%")/"%%")^1)/format_rest,
+ ["?"]=Cs(((1-P("%"))^1 )^1)/format_rest,
+ ["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension,
+}
+local direct=Cs (
+ P("%")*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*P(-1)/[[local format = string.format return function(str) return format("%0",str) end]]
+)
+local function make(t,str)
+ local f
+ local p
+ local p=lpegmatch(direct,str)
+ if p then
+ f=loadstripped(p)()
+ else
+ n=0
+ p=lpegmatch(builder,str,1,t._connector_,t._extensions_)
+ if n>0 then
+ p=format(template,preamble,t._preamble_,arguments[n],p)
+ f=loadstripped(p,t._environment_)()
+ else
+ f=function() return str end
+ end
+ end
+ t[str]=f
+ return f
+end
+local function use(t,fmt,...)
+ return t[fmt](...)
+end
+strings.formatters={}
+if _LUAVERSION<5.2 then
+ function strings.formatters.new(noconcat)
+ local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_=preamble,_environment_={} }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
+ end
+else
+ function strings.formatters.new(noconcat)
+ local e={}
+ for k,v in next,environment do
+ e[k]=v
+ end
+ local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_="",_environment_=e }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
+ end
+end
+local formatters=strings.formatters.new()
+string.formatters=formatters
+string.formatter=function(str,...) return formatters[str](...) end
+local function add(t,name,template,preamble)
+ if type(t)=="table" and t._type_=="formatter" then
+ t._extensions_[name]=template or "%s"
+ if type(preamble)=="string" then
+ t._preamble_=preamble.."\n"..t._preamble_
+ elseif type(preamble)=="table" then
+ for k,v in next,preamble do
+ t._environment_[k]=v
+ end
+ end
+ end
+end
+strings.formatters.add=add
+patterns.xmlescape=Cs((P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"+P('"')/"&quot;"+P(1))^0)
+patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0)
+patterns.luaescape=Cs(((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0)
+patterns.luaquoted=Cs(Cc('"')*((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0*Cc('"'))
+if _LUAVERSION<5.2 then
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape")
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape")
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape")
+else
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape=lpeg.patterns.xmlescape })
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape=lpeg.patterns.texescape })
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape=lpeg.patterns.luaescape })
+end
+local dquote=patterns.dquote
+local equote=patterns.escaped+dquote/'\\"'+1
+local space=patterns.space
+local cquote=Cc('"')
+local pattern=Cs(dquote*(equote-P(-2))^0*dquote)
++Cs(cquote*(equote-space)^0*space*equote^0*cquote)
+function string.optionalquoted(str)
+ return lpegmatch(pattern,str) or str
+end
+local pattern=Cs((newline/os.newline+1)^0)
+function string.replacenewlines(str)
+ return lpegmatch(pattern,str)
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["util-tab"] = package.loaded["util-tab"] or true
+
+-- original size: 25338, stripped down to: 16247
+
+if not modules then modules={} end modules ['util-tab']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+utilities=utilities or {}
+utilities.tables=utilities.tables or {}
+local tables=utilities.tables
+local format,gmatch,gsub,sub=string.format,string.gmatch,string.gsub,string.sub
+local concat,insert,remove,sort=table.concat,table.insert,table.remove,table.sort
+local setmetatable,getmetatable,tonumber,tostring=setmetatable,getmetatable,tonumber,tostring
+local type,next,rawset,tonumber,tostring,load,select=type,next,rawset,tonumber,tostring,load,select
+local lpegmatch,P,Cs,Cc=lpeg.match,lpeg.P,lpeg.Cs,lpeg.Cc
+local sortedkeys,sortedpairs=table.sortedkeys,table.sortedpairs
+local formatters=string.formatters
+local utftoeight=utf.toeight
+local splitter=lpeg.tsplitat(".")
+function utilities.tables.definetable(target,nofirst,nolast)
+ local composed,t=nil,{}
+ local snippets=lpegmatch(splitter,target)
+ for i=1,#snippets-(nolast and 1 or 0) do
+ local name=snippets[i]
+ if composed then
+ composed=composed.."."..name
+ t[#t+1]=formatters["if not %s then %s = { } end"](composed,composed)
+ else
+ composed=name
+ if not nofirst then
+ t[#t+1]=formatters["%s = %s or { }"](composed,composed)
+ end
+ end
+ end
+ if composed then
+ if nolast then
+ composed=composed.."."..snippets[#snippets]
+ end
+ return concat(t,"\n"),composed
+ else
+ return "",target
+ end
+end
+function tables.definedtable(...)
+ local t=_G
+ for i=1,select("#",...) do
+ local li=select(i,...)
+ local tl=t[li]
+ if not tl then
+ tl={}
+ t[li]=tl
+ end
+ t=tl
+ end
+ return t
+end
+function tables.accesstable(target,root)
+ local t=root or _G
+ for name in gmatch(target,"([^%.]+)") do
+ t=t[name]
+ if not t then
+ return
+ end
+ end
+ return t
+end
+function tables.migratetable(target,v,root)
+ local t=root or _G
+ local names=lpegmatch(splitter,target)
+ for i=1,#names-1 do
+ local name=names[i]
+ t[name]=t[name] or {}
+ t=t[name]
+ if not t then
+ return
+ end
+ end
+ t[names[#names]]=v
+end
+function tables.removevalue(t,value)
+ if value then
+ for i=1,#t do
+ if t[i]==value then
+ remove(t,i)
+ end
+ end
+ end
+end
+function tables.replacevalue(t,oldvalue,newvalue)
+ if oldvalue and newvalue then
+ for i=1,#t do
+ if t[i]==oldvalue then
+ t[i]=newvalue
+ end
+ end
+ end
+end
+function tables.insertbeforevalue(t,value,extra)
+ for i=1,#t do
+ if t[i]==extra then
+ remove(t,i)
+ end
+ end
+ for i=1,#t do
+ if t[i]==value then
+ insert(t,i,extra)
+ return
+ end
+ end
+ insert(t,1,extra)
+end
+function tables.insertaftervalue(t,value,extra)
+ for i=1,#t do
+ if t[i]==extra then
+ remove(t,i)
+ end
+ end
+ for i=1,#t do
+ if t[i]==value then
+ insert(t,i+1,extra)
+ return
+ end
+ end
+ insert(t,#t+1,extra)
+end
+local escape=Cs(Cc('"')*((P('"')/'""'+P(1))^0)*Cc('"'))
+function table.tocsv(t,specification)
+ if t and #t>0 then
+ local result={}
+ local r={}
+ specification=specification or {}
+ local fields=specification.fields
+ if type(fields)~="string" then
+ fields=sortedkeys(t[1])
+ end
+ local separator=specification.separator or ","
+ if specification.preamble==true then
+ for f=1,#fields do
+ r[f]=lpegmatch(escape,tostring(fields[f]))
+ end
+ result[1]=concat(r,separator)
+ end
+ for i=1,#t do
+ local ti=t[i]
+ for f=1,#fields do
+ local field=ti[fields[f]]
+ if type(field)=="string" then
+ r[f]=lpegmatch(escape,field)
+ else
+ r[f]=tostring(field)
+ end
+ end
+ result[#result+1]=concat(r,separator)
+ end
+ return concat(result,"\n")
+ else
+ return ""
+ end
+end
+local nspaces=utilities.strings.newrepeater(" ")
+local function toxml(t,d,result,step)
+ for k,v in sortedpairs(t) do
+ local s=nspaces[d]
+ local tk=type(k)
+ local tv=type(v)
+ if tv=="table" then
+ if tk=="number" then
+ result[#result+1]=formatters["%s<entry n='%s'>"](s,k)
+ toxml(v,d+step,result,step)
+ result[#result+1]=formatters["%s</entry>"](s,k)
+ else
+ result[#result+1]=formatters["%s<%s>"](s,k)
+ toxml(v,d+step,result,step)
+ result[#result+1]=formatters["%s</%s>"](s,k)
+ end
+ elseif tv=="string" then
+ if tk=="number" then
+ result[#result+1]=formatters["%s<entry n='%s'>%!xml!</entry>"](s,k,v,k)
+ else
+ result[#result+1]=formatters["%s<%s>%!xml!</%s>"](s,k,v,k)
+ end
+ elseif tk=="number" then
+ result[#result+1]=formatters["%s<entry n='%s'>%S</entry>"](s,k,v,k)
+ else
+ result[#result+1]=formatters["%s<%s>%S</%s>"](s,k,v,k)
+ end
+ end
+end
+function table.toxml(t,specification)
+ specification=specification or {}
+ local name=specification.name
+ local noroot=name==false
+ local result=(specification.nobanner or noroot) and {} or { "<?xml version='1.0' standalone='yes' ?>" }
+ local indent=specification.indent or 0
+ local spaces=specification.spaces or 1
+ if noroot then
+ toxml(t,indent,result,spaces)
+ else
+ toxml({ [name or "data"]=t },indent,result,spaces)
+ end
+ return concat(result,"\n")
+end
+function tables.encapsulate(core,capsule,protect)
+ if type(capsule)~="table" then
+ protect=true
+ capsule={}
+ end
+ for key,value in next,core do
+ if capsule[key] then
+ print(formatters["\ninvalid %s %a in %a"]("inheritance",key,core))
+ os.exit()
+ else
+ capsule[key]=value
+ end
+ end
+ if protect then
+ for key,value in next,core do
+ core[key]=nil
+ end
+ setmetatable(core,{
+ __index=capsule,
+ __newindex=function(t,key,value)
+ if capsule[key] then
+ print(formatters["\ninvalid %s %a' in %a"]("overload",key,core))
+ os.exit()
+ else
+ rawset(t,key,value)
+ end
+ end
+ } )
+ end
+end
+local f_hashed_string=formatters["[%q]=%q,"]
+local f_hashed_number=formatters["[%q]=%s,"]
+local f_hashed_boolean=formatters["[%q]=%l,"]
+local f_hashed_table=formatters["[%q]="]
+local f_indexed_string=formatters["[%s]=%q,"]
+local f_indexed_number=formatters["[%s]=%s,"]
+local f_indexed_boolean=formatters["[%s]=%l,"]
+local f_indexed_table=formatters["[%s]="]
+local f_ordered_string=formatters["%q,"]
+local f_ordered_number=formatters["%s,"]
+local f_ordered_boolean=formatters["%l,"]
+function table.fastserialize(t,prefix)
+ local r={ type(prefix)=="string" and prefix or "return" }
+ local m=1
+ local function fastserialize(t,outer)
+ local n=#t
+ m=m+1
+ r[m]="{"
+ if n>0 then
+ for i=0,n do
+ local v=t[i]
+ local tv=type(v)
+ if tv=="string" then
+ m=m+1 r[m]=f_ordered_string(v)
+ elseif tv=="number" then
+ m=m+1 r[m]=f_ordered_number(v)
+ elseif tv=="table" then
+ fastserialize(v)
+ elseif tv=="boolean" then
+ m=m+1 r[m]=f_ordered_boolean(v)
+ end
+ end
+ end
+ for k,v in next,t do
+ local tk=type(k)
+ if tk=="number" then
+ if k>n or k<0 then
+ local tv=type(v)
+ if tv=="string" then
+ m=m+1 r[m]=f_indexed_string(k,v)
+ elseif tv=="number" then
+ m=m+1 r[m]=f_indexed_number(k,v)
+ elseif tv=="table" then
+ m=m+1 r[m]=f_indexed_table(k)
+ fastserialize(v)
+ elseif tv=="boolean" then
+ m=m+1 r[m]=f_indexed_boolean(k,v)
+ end
+ end
+ else
+ local tv=type(v)
+ if tv=="string" then
+ m=m+1 r[m]=f_hashed_string(k,v)
+ elseif tv=="number" then
+ m=m+1 r[m]=f_hashed_number(k,v)
+ elseif tv=="table" then
+ m=m+1 r[m]=f_hashed_table(k)
+ fastserialize(v)
+ elseif tv=="boolean" then
+ m=m+1 r[m]=f_hashed_boolean(k,v)
+ end
+ end
+ end
+ m=m+1
+ if outer then
+ r[m]="}"
+ else
+ r[m]="},"
+ end
+ return r
+ end
+ return concat(fastserialize(t,true))
+end
+function table.deserialize(str)
+ if not str or str=="" then
+ return
+ end
+ local code=load(str)
+ if not code then
+ return
+ end
+ code=code()
+ if not code then
+ return
+ end
+ return code
+end
+function table.load(filename,loader)
+ if filename then
+ local t=(loader or io.loaddata)(filename)
+ if t and t~="" then
+ local t=utftoeight(t)
+ t=load(t)
+ if type(t)=="function" then
+ t=t()
+ if type(t)=="table" then
+ return t
+ end
+ end
+ end
+ end
+end
+function table.save(filename,t,n,...)
+ io.savedata(filename,table.serialize(t,n==nil and true or n,...))
+end
+local f_key_value=formatters["%s=%q"]
+local f_add_table=formatters[" {%t},\n"]
+local f_return_table=formatters["return {\n%t}"]
+local function slowdrop(t)
+ local r={}
+ local l={}
+ for i=1,#t do
+ local ti=t[i]
+ local j=0
+ for k,v in next,ti do
+ j=j+1
+ l[j]=f_key_value(k,v)
+ end
+ r[i]=f_add_table(l)
+ end
+ return f_return_table(r)
+end
+local function fastdrop(t)
+ local r={ "return {\n" }
+ local m=1
+ for i=1,#t do
+ local ti=t[i]
+ m=m+1 r[m]=" {"
+ for k,v in next,ti do
+ m=m+1 r[m]=f_key_value(k,v)
+ end
+ m=m+1 r[m]="},\n"
+ end
+ m=m+1
+ r[m]="}"
+ return concat(r)
+end
+function table.drop(t,slow)
+ if #t==0 then
+ return "return { }"
+ elseif slow==true then
+ return slowdrop(t)
+ else
+ return fastdrop(t)
+ end
+end
+function table.autokey(t,k)
+ local v={}
+ t[k]=v
+ return v
+end
+local selfmapper={ __index=function(t,k) t[k]=k return k end }
+function table.twowaymapper(t)
+ if not t then
+ t={}
+ else
+ for i=0,#t do
+ local ti=t[i]
+ if ti then
+ local i=tostring(i)
+ t[i]=ti
+ t[ti]=i
+ end
+ end
+ t[""]=t[0] or ""
+ end
+ setmetatable(t,selfmapper)
+ return t
+end
+local f_start_key_idx=formatters["%w{"]
+local f_start_key_num=formatters["%w[%s]={"]
+local f_start_key_str=formatters["%w[%q]={"]
+local f_start_key_boo=formatters["%w[%l]={"]
+local f_start_key_nop=formatters["%w{"]
+local f_stop=formatters["%w},"]
+local f_key_num_value_num=formatters["%w[%s]=%s,"]
+local f_key_str_value_num=formatters["%w[%q]=%s,"]
+local f_key_boo_value_num=formatters["%w[%l]=%s,"]
+local f_key_num_value_str=formatters["%w[%s]=%q,"]
+local f_key_str_value_str=formatters["%w[%q]=%q,"]
+local f_key_boo_value_str=formatters["%w[%l]=%q,"]
+local f_key_num_value_boo=formatters["%w[%s]=%l,"]
+local f_key_str_value_boo=formatters["%w[%q]=%l,"]
+local f_key_boo_value_boo=formatters["%w[%l]=%l,"]
+local f_key_num_value_not=formatters["%w[%s]={},"]
+local f_key_str_value_not=formatters["%w[%q]={},"]
+local f_key_boo_value_not=formatters["%w[%l]={},"]
+local f_key_num_value_seq=formatters["%w[%s]={ %, t },"]
+local f_key_str_value_seq=formatters["%w[%q]={ %, t },"]
+local f_key_boo_value_seq=formatters["%w[%l]={ %, t },"]
+local f_val_num=formatters["%w%s,"]
+local f_val_str=formatters["%w%q,"]
+local f_val_boo=formatters["%w%l,"]
+local f_val_not=formatters["%w{},"]
+local f_val_seq=formatters["%w{ %, t },"]
+local f_table_return=formatters["return {"]
+local f_table_name=formatters["%s={"]
+local f_table_direct=formatters["{"]
+local f_table_entry=formatters["[%q]={"]
+local f_table_finish=formatters["}"]
+local spaces=utilities.strings.newrepeater(" ")
+local serialize=table.serialize
+function table.serialize(root,name,specification)
+ if type(specification)=="table" then
+ return serialize(root,name,specification)
+ end
+ local t
+ local n=1
+ local function simple_table(t)
+ local nt=#t
+ if nt>0 then
+ local n=0
+ for _,v in next,t do
+ n=n+1
+ if type(v)=="table" then
+ return nil
+ end
+ end
+ if n==nt then
+ local tt={}
+ for i=1,nt do
+ local v=t[i]
+ local tv=type(v)
+ if tv=="number" then
+ tt[i]=v
+ elseif tv=="string" then
+ tt[i]=format("%q",v)
+ elseif tv=="boolean" then
+ tt[i]=v and "true" or "false"
+ else
+ return nil
+ end
+ end
+ return tt
+ end
+ end
+ return nil
+ end
+ local function do_serialize(root,name,depth,level,indexed)
+ if level>0 then
+ n=n+1
+ if indexed then
+ t[n]=f_start_key_idx(depth)
+ else
+ local tn=type(name)
+ if tn=="number" then
+ t[n]=f_start_key_num(depth,name)
+ elseif tn=="string" then
+ t[n]=f_start_key_str(depth,name)
+ elseif tn=="boolean" then
+ t[n]=f_start_key_boo(depth,name)
+ else
+ t[n]=f_start_key_nop(depth)
+ end
+ end
+ depth=depth+1
+ end
+ if root and next(root)~=nil then
+ local first=nil
+ local last=0
+ last=#root
+ for k=1,last do
+ if root[k]==nil then
+ last=k-1
+ break
+ end
+ end
+ if last>0 then
+ first=1
+ end
+ local sk=sortedkeys(root)
+ for i=1,#sk do
+ local k=sk[i]
+ local v=root[k]
+ local tv=type(v)
+ local tk=type(k)
+ if first and tk=="number" and k<=last and k>=first then
+ if tv=="number" then
+ n=n+1 t[n]=f_val_num(depth,v)
+ elseif tv=="string" then
+ n=n+1 t[n]=f_val_str(depth,v)
+ elseif tv=="table" then
+ if next(v)==nil then
+ n=n+1 t[n]=f_val_not(depth)
+ else
+ local st=simple_table(v)
+ if st then
+ n=n+1 t[n]=f_val_seq(depth,st)
+ else
+ do_serialize(v,k,depth,level+1,true)
+ end
+ end
+ elseif tv=="boolean" then
+ n=n+1 t[n]=f_val_boo(depth,v)
+ end
+ elseif tv=="number" then
+ if tk=="number" then
+ n=n+1 t[n]=f_key_num_value_num(depth,k,v)
+ elseif tk=="string" then
+ n=n+1 t[n]=f_key_str_value_num(depth,k,v)
+ elseif tk=="boolean" then
+ n=n+1 t[n]=f_key_boo_value_num(depth,k,v)
+ end
+ elseif tv=="string" then
+ if tk=="number" then
+ n=n+1 t[n]=f_key_num_value_str(depth,k,v)
+ elseif tk=="string" then
+ n=n+1 t[n]=f_key_str_value_str(depth,k,v)
+ elseif tk=="boolean" then
+ n=n+1 t[n]=f_key_boo_value_str(depth,k,v)
+ end
+ elseif tv=="table" then
+ if next(v)==nil then
+ if tk=="number" then
+ n=n+1 t[n]=f_key_num_value_not(depth,k)
+ elseif tk=="string" then
+ n=n+1 t[n]=f_key_str_value_not(depth,k)
+ elseif tk=="boolean" then
+ n=n+1 t[n]=f_key_boo_value_not(depth,k)
+ end
+ else
+ local st=simple_table(v)
+ if not st then
+ do_serialize(v,k,depth,level+1)
+ elseif tk=="number" then
+ n=n+1 t[n]=f_key_num_value_seq(depth,k,st)
+ elseif tk=="string" then
+ n=n+1 t[n]=f_key_str_value_seq(depth,k,st)
+ elseif tk=="boolean" then
+ n=n+1 t[n]=f_key_boo_value_seq(depth,k,st)
+ end
+ end
+ elseif tv=="boolean" then
+ if tk=="number" then
+ n=n+1 t[n]=f_key_num_value_boo(depth,k,v)
+ elseif tk=="string" then
+ n=n+1 t[n]=f_key_str_value_boo(depth,k,v)
+ elseif tk=="boolean" then
+ n=n+1 t[n]=f_key_boo_value_boo(depth,k,v)
+ end
+ end
+ end
+ end
+ if level>0 then
+ n=n+1 t[n]=f_stop(depth-1)
+ end
+ end
+ local tname=type(name)
+ if tname=="string" then
+ if name=="return" then
+ t={ f_table_return() }
+ else
+ t={ f_table_name(name) }
+ end
+ elseif tname=="number" then
+ t={ f_table_entry(name) }
+ elseif tname=="boolean" then
+ if name then
+ t={ f_table_return() }
+ else
+ t={ f_table_direct() }
+ end
+ else
+ t={ f_table_name("t") }
+ end
+ if root then
+ if getmetatable(root) then
+ local dummy=root._w_h_a_t_e_v_e_r_
+ root._w_h_a_t_e_v_e_r_=nil
+ end
+ if next(root)~=nil then
+ do_serialize(root,name,1,0)
+ end
+ end
+ n=n+1
+ t[n]=f_table_finish()
+ return concat(t,"\n")
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["util-sto"] = package.loaded["util-sto"] or true
+
+-- original size: 4172, stripped down to: 2953
+
+if not modules then modules={} end modules ['util-sto']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local setmetatable,getmetatable,type=setmetatable,getmetatable,type
+utilities=utilities or {}
+utilities.storage=utilities.storage or {}
+local storage=utilities.storage
+function storage.mark(t)
+ if not t then
+ print("\nfatal error: storage cannot be marked\n")
+ os.exit()
+ return
+ end
+ local m=getmetatable(t)
+ if not m then
+ m={}
+ setmetatable(t,m)
+ end
+ m.__storage__=true
+ return t
+end
+function storage.allocate(t)
+ t=t or {}
+ local m=getmetatable(t)
+ if not m then
+ m={}
+ setmetatable(t,m)
+ end
+ m.__storage__=true
+ return t
+end
+function storage.marked(t)
+ local m=getmetatable(t)
+ return m and m.__storage__
+end
+function storage.checked(t)
+ if not t then
+ report("\nfatal error: storage has not been allocated\n")
+ os.exit()
+ return
+ end
+ return t
+end
+function storage.setinitializer(data,initialize)
+ local m=getmetatable(data) or {}
+ m.__index=function(data,k)
+ m.__index=nil
+ initialize()
+ return data[k]
+ end
+ setmetatable(data,m)
+end
+local keyisvalue={ __index=function(t,k)
+ t[k]=k
+ return k
+end }
+function storage.sparse(t)
+ t=t or {}
+ setmetatable(t,keyisvalue)
+ return t
+end
+local function f_empty () return "" end
+local function f_self (t,k) t[k]=k return k end
+local function f_table (t,k) local v={} t[k]=v return v end
+local function f_number(t,k) t[k]=0 return 0 end
+local function f_ignore() end
+local f_index={
+ ["empty"]=f_empty,
+ ["self"]=f_self,
+ ["table"]=f_table,
+ ["number"]=f_number,
+}
+local t_index={
+ ["empty"]={ __index=f_empty },
+ ["self"]={ __index=f_self },
+ ["table"]={ __index=f_table },
+ ["number"]={ __index=f_number },
+}
+function table.setmetatableindex(t,f)
+ if type(t)~="table" then
+ f,t=t,{}
+ end
+ local m=getmetatable(t)
+ if m then
+ m.__index=f_index[f] or f
+ else
+ setmetatable(t,t_index[f] or { __index=f })
+ end
+ return t
+end
+local f_index={
+ ["ignore"]=f_ignore,
+}
+local t_index={
+ ["ignore"]={ __newindex=f_ignore },
+}
+function table.setmetatablenewindex(t,f)
+ if type(t)~="table" then
+ f,t=t,{}
+ end
+ local m=getmetatable(t)
+ if m then
+ m.__newindex=f_index[f] or f
+ else
+ setmetatable(t,t_index[f] or { __newindex=f })
+ end
+ return t
+end
+function table.setmetatablecall(t,f)
+ if type(t)~="table" then
+ f,t=t,{}
+ end
+ local m=getmetatable(t)
+ if m then
+ m.__call=f
+ else
+ setmetatable(t,{ __call=f })
+ end
+ return t
+end
+function table.setmetatablekey(t,key,value)
+ local m=getmetatable(t)
+ if not m then
+ m={}
+ setmetatable(t,m)
+ end
+ m[key]=value
+ return t
+end
+function table.getmetatablekey(t,key,value)
+ local m=getmetatable(t)
+ return m and m[key]
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["util-prs"] = package.loaded["util-prs"] or true
+
+-- original size: 21780, stripped down to: 15121
+
+if not modules then modules={} end modules ['util-prs']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local lpeg,table,string=lpeg,table,string
+local P,R,V,S,C,Ct,Cs,Carg,Cc,Cg,Cf,Cp=lpeg.P,lpeg.R,lpeg.V,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cs,lpeg.Carg,lpeg.Cc,lpeg.Cg,lpeg.Cf,lpeg.Cp
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
+local concat,gmatch,find=table.concat,string.gmatch,string.find
+local tostring,type,next,rawset=tostring,type,next,rawset
+local mod,div=math.mod,math.div
+utilities=utilities or {}
+local parsers=utilities.parsers or {}
+utilities.parsers=parsers
+local patterns=parsers.patterns or {}
+parsers.patterns=patterns
+local setmetatableindex=table.setmetatableindex
+local sortedhash=table.sortedhash
+local sortedkeys=table.sortedkeys
+local tohash=table.tohash
+local digit=R("09")
+local space=P(' ')
+local equal=P("=")
+local comma=P(",")
+local lbrace=P("{")
+local rbrace=P("}")
+local lparent=P("(")
+local rparent=P(")")
+local period=S(".")
+local punctuation=S(".,:;")
+local spacer=lpegpatterns.spacer
+local whitespace=lpegpatterns.whitespace
+local newline=lpegpatterns.newline
+local anything=lpegpatterns.anything
+local endofstring=lpegpatterns.endofstring
+local nobrace=1-(lbrace+rbrace )
+local noparent=1-(lparent+rparent)
+local escape,left,right=P("\\"),P('{'),P('}')
+lpegpatterns.balanced=P {
+ [1]=((escape*(left+right))+(1-(left+right))+V(2))^0,
+ [2]=left*V(1)*right
+}
+local nestedbraces=P { lbrace*(nobrace+V(1))^0*rbrace }
+local nestedparents=P { lparent*(noparent+V(1))^0*rparent }
+local spaces=space^0
+local argument=Cs((lbrace/"")*((nobrace+nestedbraces)^0)*(rbrace/""))
+local content=(1-endofstring)^0
+lpegpatterns.nestedbraces=nestedbraces
+lpegpatterns.nestedparents=nestedparents
+lpegpatterns.nested=nestedbraces
+lpegpatterns.argument=argument
+lpegpatterns.content=content
+local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C((nestedbraces+(1-comma))^0)
+local key=C((1-equal-comma)^1)
+local pattern_a=(space+comma)^0*(key*equal*value+key*C(""))
+local pattern_c=(space+comma)^0*(key*equal*value)
+local key=C((1-space-equal-comma)^1)
+local pattern_b=spaces*comma^0*spaces*(key*((spaces*equal*spaces*value)+C("")))
+local hash={}
+local function set(key,value)
+ hash[key]=value
+end
+local pattern_a_s=(pattern_a/set)^1
+local pattern_b_s=(pattern_b/set)^1
+local pattern_c_s=(pattern_c/set)^1
+patterns.settings_to_hash_a=pattern_a_s
+patterns.settings_to_hash_b=pattern_b_s
+patterns.settings_to_hash_c=pattern_c_s
+function parsers.make_settings_to_hash_pattern(set,how)
+ if how=="strict" then
+ return (pattern_c/set)^1
+ elseif how=="tolerant" then
+ return (pattern_b/set)^1
+ else
+ return (pattern_a/set)^1
+ end
+end
+function parsers.settings_to_hash(str,existing)
+ if not str or str=="" then
+ return {}
+ elseif type(str)=="table" then
+ if existing then
+ for k,v in next,str do
+ existing[k]=v
+ end
+ return exiting
+ else
+ return str
+ end
+ else
+ hash=existing or {}
+ lpegmatch(pattern_a_s,str)
+ return hash
+ end
+end
+function parsers.settings_to_hash_tolerant(str,existing)
+ if not str or str=="" then
+ return {}
+ elseif type(str)=="table" then
+ if existing then
+ for k,v in next,str do
+ existing[k]=v
+ end
+ return exiting
+ else
+ return str
+ end
+ else
+ hash=existing or {}
+ lpegmatch(pattern_b_s,str)
+ return hash
+ end
+end
+function parsers.settings_to_hash_strict(str,existing)
+ if not str or str=="" then
+ return nil
+ elseif type(str)=="table" then
+ if existing then
+ for k,v in next,str do
+ existing[k]=v
+ end
+ return exiting
+ else
+ return str
+ end
+ elseif str and str~="" then
+ hash=existing or {}
+ lpegmatch(pattern_c_s,str)
+ return next(hash) and hash
+ end
+end
+local separator=comma*space^0
+local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C((nestedbraces+(1-comma))^0)
+local pattern=spaces*Ct(value*(separator*value)^0)
+patterns.settings_to_array=pattern
+function parsers.settings_to_array(str,strict)
+ if not str or str=="" then
+ return {}
+ elseif type(str)=="table" then
+ return str
+ elseif strict then
+ if find(str,"{",1,true) then
+ return lpegmatch(pattern,str)
+ else
+ return { str }
+ end
+ elseif find(str,",",1,true) then
+ return lpegmatch(pattern,str)
+ else
+ return { str }
+ end
+end
+local cache_a={}
+local cache_b={}
+function parsers.groupedsplitat(symbol,withaction)
+ if not symbol then
+ symbol=","
+ end
+ local pattern=(withaction and cache_b or cache_a)[symbol]
+ if not pattern then
+ local symbols=S(symbol)
+ local separator=space^0*symbols*space^0
+ local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C((nestedbraces+(1-(space^0*(symbols+P(-1)))))^0)
+ if withaction then
+ local withvalue=Carg(1)*value/function(f,s) return f(s) end
+ pattern=spaces*withvalue*(separator*withvalue)^0
+ cache_b[symbol]=pattern
+ else
+ pattern=spaces*Ct(value*(separator*value)^0)
+ cache_a[symbol]=pattern
+ end
+ end
+ return pattern
+end
+local pattern_a=parsers.groupedsplitat(",",false)
+local pattern_b=parsers.groupedsplitat(",",true)
+function parsers.stripped_settings_to_array(str)
+ if not str or str=="" then
+ return {}
+ else
+ return lpegmatch(pattern_a,str)
+ end
+end
+function parsers.process_stripped_settings(str,action)
+ if not str or str=="" then
+ return {}
+ else
+ return lpegmatch(pattern_b,str,1,action)
+ end
+end
+local function set(t,v)
+ t[#t+1]=v
+end
+local value=P(Carg(1)*value)/set
+local pattern=value*(separator*value)^0*Carg(1)
+function parsers.add_settings_to_array(t,str)
+ return lpegmatch(pattern,str,nil,t)
+end
+function parsers.hash_to_string(h,separator,yes,no,strict,omit)
+ if h then
+ local t,tn,s={},0,sortedkeys(h)
+ omit=omit and tohash(omit)
+ for i=1,#s do
+ local key=s[i]
+ if not omit or not omit[key] then
+ local value=h[key]
+ if type(value)=="boolean" then
+ if yes and no then
+ if value then
+ tn=tn+1
+ t[tn]=key..'='..yes
+ elseif not strict then
+ tn=tn+1
+ t[tn]=key..'='..no
+ end
+ elseif value or not strict then
+ tn=tn+1
+ t[tn]=key..'='..tostring(value)
+ end
+ else
+ tn=tn+1
+ t[tn]=key..'='..value
+ end
+ end
+ end
+ return concat(t,separator or ",")
+ else
+ return ""
+ end
+end
+function parsers.array_to_string(a,separator)
+ if a then
+ return concat(a,separator or ",")
+ else
+ return ""
+ end
+end
+local pattern=Cf(Ct("")*Cg(C((1-S(", "))^1)*S(", ")^0*Cc(true))^1,rawset)
+function utilities.parsers.settings_to_set(str,t)
+ return str and lpegmatch(pattern,str) or {}
+end
+function parsers.simple_hash_to_string(h,separator)
+ local t,tn={},0
+ for k,v in sortedhash(h) do
+ if v then
+ tn=tn+1
+ t[tn]=k
+ end
+ end
+ return concat(t,separator or ",")
+end
+local str=Cs(lpegpatterns.unquoted)+C((1-whitespace-equal)^1)
+local setting=Cf(Carg(1)*(whitespace^0*Cg(str*whitespace^0*(equal*whitespace^0*str+Cc(""))))^1,rawset)
+local splitter=setting^1
+function utilities.parsers.options_to_hash(str,target)
+ return str and lpegmatch(splitter,str,1,target or {}) or {}
+end
+local splitter=lpeg.tsplitat(" ")
+function utilities.parsers.options_to_array(str)
+ return str and lpegmatch(splitter,str) or {}
+end
+local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+C(digit^1*lparent*(noparent+nestedparents)^1*rparent)+C((nestedbraces+(1-comma))^1)
+local pattern_a=spaces*Ct(value*(separator*value)^0)
+local function repeater(n,str)
+ if not n then
+ return str
+ else
+ local s=lpegmatch(pattern_a,str)
+ if n==1 then
+ return unpack(s)
+ else
+ local t,tn={},0
+ for i=1,n do
+ for j=1,#s do
+ tn=tn+1
+ t[tn]=s[j]
+ end
+ end
+ return unpack(t)
+ end
+ end
+end
+local value=P(lbrace*C((nobrace+nestedbraces)^0)*rbrace)+(C(digit^1)/tonumber*lparent*Cs((noparent+nestedparents)^1)*rparent)/repeater+C((nestedbraces+(1-comma))^1)
+local pattern_b=spaces*Ct(value*(separator*value)^0)
+function parsers.settings_to_array_with_repeat(str,expand)
+ if expand then
+ return lpegmatch(pattern_b,str) or {}
+ else
+ return lpegmatch(pattern_a,str) or {}
+ end
+end
+local value=lbrace*C((nobrace+nestedbraces)^0)*rbrace
+local pattern=Ct((space+value)^0)
+function parsers.arguments_to_table(str)
+ return lpegmatch(pattern,str)
+end
+function parsers.getparameters(self,class,parentclass,settings)
+ local sc=self[class]
+ if not sc then
+ sc={}
+ self[class]=sc
+ if parentclass then
+ local sp=self[parentclass]
+ if not sp then
+ sp={}
+ self[parentclass]=sp
+ end
+ setmetatableindex(sc,sp)
+ end
+ end
+ parsers.settings_to_hash(settings,sc)
+end
+function parsers.listitem(str)
+ return gmatch(str,"[^, ]+")
+end
+local pattern=Cs { "start",
+ start=V("one")+V("two")+V("three"),
+ rest=(Cc(",")*V("thousand"))^0*(P(".")+endofstring)*anything^0,
+ thousand=digit*digit*digit,
+ one=digit*V("rest"),
+ two=digit*digit*V("rest"),
+ three=V("thousand")*V("rest"),
+}
+lpegpatterns.splitthousands=pattern
+function parsers.splitthousands(str)
+ return lpegmatch(pattern,str) or str
+end
+local optionalwhitespace=whitespace^0
+lpegpatterns.words=Ct((Cs((1-punctuation-whitespace)^1)+anything)^1)
+lpegpatterns.sentences=Ct((optionalwhitespace*Cs((1-period)^0*period))^1)
+lpegpatterns.paragraphs=Ct((optionalwhitespace*Cs((whitespace^1*endofstring/""+1-(spacer^0*newline*newline))^1))^1)
+local dquote=P('"')
+local equal=P('=')
+local escape=P('\\')
+local separator=S(' ,')
+local key=C((1-equal)^1)
+local value=dquote*C((1-dquote-escape*dquote)^0)*dquote
+local pattern=Cf(Ct("")*(Cg(key*equal*value)*separator^0)^1,rawset)^0*P(-1)
+function parsers.keq_to_hash(str)
+ if str and str~="" then
+ return lpegmatch(pattern,str)
+ else
+ return {}
+ end
+end
+local defaultspecification={ separator=",",quote='"' }
+function parsers.csvsplitter(specification)
+ specification=specification and setmetatableindex(specification,defaultspecification) or defaultspecification
+ local separator=specification.separator
+ local quotechar=specification.quote
+ local separator=S(separator~="" and separator or ",")
+ local whatever=C((1-separator-newline)^0)
+ if quotechar and quotechar~="" then
+ local quotedata=nil
+ for chr in gmatch(quotechar,".") do
+ local quotechar=P(chr)
+ local quoteword=quotechar*C((1-quotechar)^0)*quotechar
+ if quotedata then
+ quotedata=quotedata+quoteword
+ else
+ quotedata=quoteword
+ end
+ end
+ whatever=quotedata+whatever
+ end
+ local parser=Ct((Ct(whatever*(separator*whatever)^0)*S("\n\r")^1)^0 )
+ return function(data)
+ return lpegmatch(parser,data)
+ end
+end
+function parsers.rfc4180splitter(specification)
+ specification=specification and setmetatableindex(specification,defaultspecification) or defaultspecification
+ local separator=specification.separator
+ local quotechar=P(specification.quote)
+ local dquotechar=quotechar*quotechar
+/specification.quote
+ local separator=S(separator~="" and separator or ",")
+ local escaped=quotechar*Cs((dquotechar+(1-quotechar))^0)*quotechar
+ local non_escaped=C((1-quotechar-newline-separator)^1)
+ local field=escaped+non_escaped+Cc("")
+ local record=Ct(field*(separator*field)^1)
+ local headerline=record*Cp()
+ local wholeblob=Ct((newline^(specification.strict and -1 or 1)*record)^0)
+ return function(data,getheader)
+ if getheader then
+ local header,position=lpegmatch(headerline,data)
+ local data=lpegmatch(wholeblob,data,position)
+ return data,header
+ else
+ return lpegmatch(wholeblob,data)
+ end
+ end
+end
+local function ranger(first,last,n,action)
+ if not first then
+ elseif last==true then
+ for i=first,n or first do
+ action(i)
+ end
+ elseif last then
+ for i=first,last do
+ action(i)
+ end
+ else
+ action(first)
+ end
+end
+local cardinal=lpegpatterns.cardinal/tonumber
+local spacers=lpegpatterns.spacer^0
+local endofstring=lpegpatterns.endofstring
+local stepper=spacers*(C(cardinal)*(spacers*S(":-")*spacers*(C(cardinal)+Cc(true) )+Cc(false) )*Carg(1)*Carg(2)/ranger*S(", ")^0 )^1
+local stepper=spacers*(C(cardinal)*(spacers*S(":-")*spacers*(C(cardinal)+(P("*")+endofstring)*Cc(true) )+Cc(false) )*Carg(1)*Carg(2)/ranger*S(", ")^0 )^1*endofstring
+function parsers.stepper(str,n,action)
+ if type(n)=="function" then
+ lpegmatch(stepper,str,1,false,n or print)
+ else
+ lpegmatch(stepper,str,1,n,action or print)
+ end
+end
+local pattern_math=Cs((P("%")/"\\percent "+P("^")*Cc("{")*lpegpatterns.integer*Cc("}")+anything)^0)
+local pattern_text=Cs((P("%")/"\\percent "+(P("^")/"\\high")*Cc("{")*lpegpatterns.integer*Cc("}")+anything)^0)
+patterns.unittotex=pattern
+function parsers.unittotex(str,textmode)
+ return lpegmatch(textmode and pattern_text or pattern_math,str)
+end
+local pattern=Cs((P("^")/"<sup>"*lpegpatterns.integer*Cc("</sup>")+anything)^0)
+function parsers.unittoxml(str)
+ return lpegmatch(pattern,str)
+end
+local cache={}
+local spaces=lpegpatterns.space^0
+local dummy=function() end
+setmetatableindex(cache,function(t,k)
+ local separator=P(k)
+ local value=(1-separator)^0
+ local pattern=spaces*C(value)*separator^0*Cp()
+ t[k]=pattern
+ return pattern
+end)
+local commalistiterator=cache[","]
+function utilities.parsers.iterator(str,separator)
+ local n=#str
+ if n==0 then
+ return dummy
+ else
+ local pattern=separator and cache[separator] or commalistiterator
+ local p=1
+ return function()
+ if p<=n then
+ local s,e=lpegmatch(pattern,str,p)
+ if e then
+ p=e
+ return s
+ end
+ end
+ end
+ end
+end
+local function initialize(t,name)
+ local source=t[name]
+ if source then
+ local result={}
+ for k,v in next,t[name] do
+ result[k]=v
+ end
+ return result
+ else
+ return {}
+ end
+end
+local function fetch(t,name)
+ return t[name] or {}
+end
+local function process(result,more)
+ for k,v in next,more do
+ result[k]=v
+ end
+ return result
+end
+local name=C((1-S(", "))^1)
+local parser=(Carg(1)*name/initialize)*(S(", ")^1*(Carg(1)*name/fetch))^0
+local merge=Cf(parser,process)
+function utilities.parsers.mergehashes(hash,list)
+ return lpegmatch(merge,list,1,hash)
+end
+function utilities.parsers.runtime(time)
+ if not time then
+ time=os.runtime()
+ end
+ local days=div(time,24*60*60)
+ time=mod(time,24*60*60)
+ local hours=div(time,60*60)
+ time=mod(time,60*60)
+ local minutes=div(time,60)
+ local seconds=mod(time,60)
+ return days,hours,minutes,seconds
+end
+local spacing=whitespace^0
+local apply=P("->")
+local method=C((1-apply)^1)
+local token=lbrace*C((1-rbrace)^1)*rbrace+C(anything^1)
+local pattern=spacing*(method*spacing*apply+Carg(1))*spacing*token
+function utilities.parsers.splitmethod(str,default)
+ if str then
+ return lpegmatch(pattern,str,1,default or false)
+ else
+ return default or false,""
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["util-fmt"] = package.loaded["util-fmt"] or true
+
+-- original size: 2274, stripped down to: 1781
+
+if not modules then modules={} end modules ['util-fmt']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+utilities=utilities or {}
+utilities.formatters=utilities.formatters or {}
+local formatters=utilities.formatters
+local concat,format=table.concat,string.format
+local tostring,type=tostring,type
+local strip=string.strip
+local lpegmatch=lpeg.match
+local stripper=lpeg.patterns.stripzeros
+function formatters.stripzeros(str)
+ return lpegmatch(stripper,str)
+end
+function formatters.formatcolumns(result,between)
+ if result and #result>0 then
+ between=between or " "
+ local widths,numbers={},{}
+ local first=result[1]
+ local n=#first
+ for i=1,n do
+ widths[i]=0
+ end
+ for i=1,#result do
+ local r=result[i]
+ for j=1,n do
+ local rj=r[j]
+ local tj=type(rj)
+ if tj=="number" then
+ numbers[j]=true
+ end
+ if tj~="string" then
+ rj=tostring(rj)
+ r[j]=rj
+ end
+ local w=#rj
+ if w>widths[j] then
+ widths[j]=w
+ end
+ end
+ end
+ for i=1,n do
+ local w=widths[i]
+ if numbers[i] then
+ if w>80 then
+ widths[i]="%s"..between
+ else
+ widths[i]="%0"..w.."i"..between
+ end
+ else
+ if w>80 then
+ widths[i]="%s"..between
+ elseif w>0 then
+ widths[i]="%-"..w.."s"..between
+ else
+ widths[i]="%s"
+ end
+ end
+ end
+ local template=strip(concat(widths))
+ for i=1,#result do
+ local str=format(template,unpack(result[i]))
+ result[i]=strip(str)
+ end
+ end
+ return result
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["trac-set"] = package.loaded["trac-set"] or true
+
+-- original size: 12482, stripped down to: 8864
+
+if not modules then modules={} end modules ['trac-set']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,next,tostring=type,next,tostring
+local concat=table.concat
+local format,find,lower,gsub,topattern=string.format,string.find,string.lower,string.gsub,string.topattern
+local is_boolean=string.is_boolean
+local settings_to_hash=utilities.parsers.settings_to_hash
+local allocate=utilities.storage.allocate
+utilities=utilities or {}
+local utilities=utilities
+local setters=utilities.setters or {}
+utilities.setters=setters
+local data={}
+local trace_initialize=false
+function setters.initialize(filename,name,values)
+ local setter=data[name]
+ if setter then
+ frozen=true
+ local data=setter.data
+ if data then
+ for key,newvalue in next,values do
+ local newvalue=is_boolean(newvalue,newvalue,true)
+ local functions=data[key]
+ if functions then
+ local oldvalue=functions.value
+ if functions.frozen then
+ if trace_initialize then
+ setter.report("%s: %a is %s to %a",filename,key,"frozen",oldvalue)
+ end
+ elseif #functions>0 and not oldvalue then
+ if trace_initialize then
+ setter.report("%s: %a is %s to %a",filename,key,"set",newvalue)
+ end
+ for i=1,#functions do
+ functions[i](newvalue)
+ end
+ functions.value=newvalue
+ functions.frozen=functions.frozen or frozen
+ else
+ if trace_initialize then
+ setter.report("%s: %a is %s as %a",filename,key,"kept",oldvalue)
+ end
+ end
+ else
+ functions={ default=newvalue,frozen=frozen }
+ data[key]=functions
+ if trace_initialize then
+ setter.report("%s: %a is %s to %a",filename,key,"defaulted",newvalue)
+ end
+ end
+ end
+ return true
+ end
+ end
+end
+local function set(t,what,newvalue)
+ local data=t.data
+ if not data.frozen then
+ local done=t.done
+ if type(what)=="string" then
+ what=settings_to_hash(what)
+ end
+ if type(what)~="table" then
+ return
+ end
+ if not done then
+ done={}
+ t.done=done
+ end
+ for w,value in next,what do
+ if value=="" then
+ value=newvalue
+ elseif not value then
+ value=false
+ else
+ value=is_boolean(value,value,true)
+ end
+ w=topattern(w,true,true)
+ for name,functions in next,data do
+ if done[name] then
+ elseif find(name,w) then
+ done[name]=true
+ for i=1,#functions do
+ functions[i](value)
+ end
+ functions.value=value
+ end
+ end
+ end
+ end
+end
+local function reset(t)
+ local data=t.data
+ if not data.frozen then
+ for name,functions in next,data do
+ for i=1,#functions do
+ functions[i](false)
+ end
+ functions.value=false
+ end
+ end
+end
+local function enable(t,what)
+ set(t,what,true)
+end
+local function disable(t,what)
+ local data=t.data
+ if not what or what=="" then
+ t.done={}
+ reset(t)
+ else
+ set(t,what,false)
+ end
+end
+function setters.register(t,what,...)
+ local data=t.data
+ what=lower(what)
+ local functions=data[what]
+ if not functions then
+ functions={}
+ data[what]=functions
+ if trace_initialize then
+ t.report("defining %a",what)
+ end
+ end
+ local default=functions.default
+ for i=1,select("#",...) do
+ local fnc=select(i,...)
+ local typ=type(fnc)
+ if typ=="string" then
+ if trace_initialize then
+ t.report("coupling %a to %a",what,fnc)
+ end
+ local s=fnc
+ fnc=function(value) set(t,s,value) end
+ elseif typ~="function" then
+ fnc=nil
+ end
+ if fnc then
+ functions[#functions+1]=fnc
+ local value=functions.value or default
+ if value~=nil then
+ fnc(value)
+ functions.value=value
+ end
+ end
+ end
+ return false
+end
+function setters.enable(t,what)
+ local e=t.enable
+ t.enable,t.done=enable,{}
+ enable(t,what)
+ t.enable,t.done=e,{}
+end
+function setters.disable(t,what)
+ local e=t.disable
+ t.disable,t.done=disable,{}
+ disable(t,what)
+ t.disable,t.done=e,{}
+end
+function setters.reset(t)
+ t.done={}
+ reset(t)
+end
+function setters.list(t)
+ local list=table.sortedkeys(t.data)
+ local user,system={},{}
+ for l=1,#list do
+ local what=list[l]
+ if find(what,"^%*") then
+ system[#system+1]=what
+ else
+ user[#user+1]=what
+ end
+ end
+ return user,system
+end
+function setters.show(t)
+ local category=t.name
+ local list=setters.list(t)
+ t.report()
+ for k=1,#list do
+ local name=list[k]
+ local functions=t.data[name]
+ if functions then
+ local value,default,modules=functions.value,functions.default,#functions
+ value=value==nil and "unset" or tostring(value)
+ default=default==nil and "unset" or tostring(default)
+ t.report("%-50s modules: %2i default: %-12s value: %-12s",name,modules,default,value)
+ end
+ end
+ t.report()
+end
+local enable,disable,register,list,show=setters.enable,setters.disable,setters.register,setters.list,setters.show
+function setters.report(setter,...)
+ print(format("%-15s : %s\n",setter.name,format(...)))
+end
+local function default(setter,name)
+ local d=setter.data[name]
+ return d and d.default
+end
+local function value(setter,name)
+ local d=setter.data[name]
+ return d and (d.value or d.default)
+end
+function setters.new(name)
+ local setter
+ setter={
+ data=allocate(),
+ name=name,
+ report=function(...) setters.report (setter,...) end,
+ enable=function(...) enable (setter,...) end,
+ disable=function(...) disable (setter,...) end,
+ reset=function(...) reset (setter,...) end,
+ register=function(...) register(setter,...) end,
+ list=function(...) list (setter,...) end,
+ show=function(...) show (setter,...) end,
+ default=function(...) return default (setter,...) end,
+ value=function(...) return value (setter,...) end,
+ }
+ data[name]=setter
+ return setter
+end
+trackers=setters.new("trackers")
+directives=setters.new("directives")
+experiments=setters.new("experiments")
+local t_enable,t_disable=trackers .enable,trackers .disable
+local d_enable,d_disable=directives .enable,directives .disable
+local e_enable,e_disable=experiments.enable,experiments.disable
+local trace_directives=false local trace_directives=false trackers.register("system.directives",function(v) trace_directives=v end)
+local trace_experiments=false local trace_experiments=false trackers.register("system.experiments",function(v) trace_experiments=v end)
+function directives.enable(...)
+ if trace_directives then
+ directives.report("enabling: % t",{...})
+ end
+ d_enable(...)
+end
+function directives.disable(...)
+ if trace_directives then
+ directives.report("disabling: % t",{...})
+ end
+ d_disable(...)
+end
+function experiments.enable(...)
+ if trace_experiments then
+ experiments.report("enabling: % t",{...})
+ end
+ e_enable(...)
+end
+function experiments.disable(...)
+ if trace_experiments then
+ experiments.report("disabling: % t",{...})
+ end
+ e_disable(...)
+end
+directives.register("system.nostatistics",function(v)
+ if statistics then
+ statistics.enable=not v
+ else
+ end
+end)
+directives.register("system.nolibraries",function(v)
+ if libraries then
+ libraries=nil
+ else
+ end
+end)
+if environment then
+ local engineflags=environment.engineflags
+ if engineflags then
+ local list=engineflags["c:trackers"] or engineflags["trackers"]
+ if type(list)=="string" then
+ setters.initialize("commandline flags","trackers",settings_to_hash(list))
+ end
+ local list=engineflags["c:directives"] or engineflags["directives"]
+ if type(list)=="string" then
+ setters.initialize("commandline flags","directives",settings_to_hash(list))
+ end
+ end
+end
+if texconfig then
+ local function set(k,v)
+ v=tonumber(v)
+ if v then
+ texconfig[k]=v
+ end
+ end
+ directives.register("luatex.expanddepth",function(v) set("expand_depth",v) end)
+ directives.register("luatex.hashextra",function(v) set("hash_extra",v) end)
+ directives.register("luatex.nestsize",function(v) set("nest_size",v) end)
+ directives.register("luatex.maxinopen",function(v) set("max_in_open",v) end)
+ directives.register("luatex.maxprintline",function(v) set("max_print_line",v) end)
+ directives.register("luatex.maxstrings",function(v) set("max_strings",v) end)
+ directives.register("luatex.paramsize",function(v) set("param_size",v) end)
+ directives.register("luatex.savesize",function(v) set("save_size",v) end)
+ directives.register("luatex.stacksize",function(v) set("stack_size",v) end)
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["trac-log"] = package.loaded["trac-log"] or true
+
+-- original size: 29359, stripped down to: 20483
+
+if not modules then modules={} end modules ['trac-log']={
+ version=1.001,
+ comment="companion to trac-log.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local next,type,select,print=next,type,select,print
+local write_nl,write=texio and texio.write_nl or print,texio and texio.write or io.write
+local format,gmatch,find=string.format,string.gmatch,string.find
+local concat,insert,remove=table.concat,table.insert,table.remove
+local topattern=string.topattern
+local utfchar=utf.char
+local datetime=os.date
+local openfile=io.open
+local setmetatableindex=table.setmetatableindex
+local formatters=string.formatters
+local texgetcount=tex and tex.getcount
+local variant="default"
+logs=logs or {}
+local logs=logs
+local moreinfo=[[
+More information about ConTeXt and the tools that come with it can be found at:
+]].."\n"..[[
+maillist : ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+webpage : http://www.pragma-ade.nl / http://tex.aanhet.net
+wiki : http://contextgarden.net
+]]
+formatters.add (
+ formatters,"unichr",
+ [["U+" .. format("%%05X",%s) .. " (" .. utfchar(%s) .. ")"]]
+)
+formatters.add (
+ formatters,"chruni",
+ [[utfchar(%s) .. " (U+" .. format("%%05X",%s) .. ")"]]
+)
+local function ignore() end
+setmetatableindex(logs,function(t,k) t[k]=ignore;return ignore end)
+local report,subreport,status,settarget,setformats,settranslations
+local direct,subdirect,writer,pushtarget,poptarget,setlogfile,settimedlog,setprocessor,setformatters,newline
+if tex and (tex.jobname or tex.formatname) then
+ local function useluawrites()
+ local texio_write_nl=texio.write_nl
+ local texio_write=texio.write
+ local io_write=io.write
+ write_nl=function(target,...)
+ if not io_write then
+ io_write=io.write
+ end
+ if target=="term and log" then
+ texio_write_nl("log",...)
+ texio_write_nl("term","")
+ io_write(...)
+ elseif target=="log" then
+ texio_write_nl("log",...)
+ elseif target=="term" then
+ texio_write_nl("term","")
+ io_write(...)
+ elseif target~="none" then
+ texio_write_nl("log",target,...)
+ texio_write_nl("term","")
+ io_write(target,...)
+ end
+ end
+ write=function(target,...)
+ if not io_write then
+ io_write=io.write
+ end
+ if target=="term and log" then
+ texio_write("log",...)
+ io_write(...)
+ elseif target=="log" then
+ texio_write("log",...)
+ elseif target=="term" then
+ io_write(...)
+ elseif target~="none" then
+ texio_write("log",target,...)
+ io_write(target,...)
+ end
+ end
+ texio.write=write
+ texio.write_nl=write_nl
+ useluawrites=ignore
+ end
+ local whereto="both"
+ local target=nil
+ local targets=nil
+ local formats=table.setmetatableindex("self")
+ local translations=table.setmetatableindex("self")
+ local report_yes,subreport_yes,direct_yes,subdirect_yes,status_yes
+ local report_nop,subreport_nop,direct_nop,subdirect_nop,status_nop
+ local variants={
+ default={
+ formats={
+ report_yes=formatters["%-15s > %s\n"],
+ report_nop=formatters["%-15s >\n"],
+ direct_yes=formatters["%-15s > %s"],
+ direct_nop=formatters["%-15s >"],
+ subreport_yes=formatters["%-15s > %s > %s\n"],
+ subreport_nop=formatters["%-15s > %s >\n"],
+ subdirect_yes=formatters["%-15s > %s > %s"],
+ subdirect_nop=formatters["%-15s > %s >"],
+ status_yes=formatters["%-15s : %s\n"],
+ status_nop=formatters["%-15s :\n"],
+ },
+ targets={
+ logfile="log",
+ log="log",
+ file="log",
+ console="term",
+ terminal="term",
+ both="term and log",
+ },
+ },
+ ansi={
+ formats={
+ report_yes=formatters["%-15s > %s\n"],
+ report_nop=formatters["%-15s >\n"],
+ direct_yes=formatters["%-15s > %s"],
+ direct_nop=formatters["%-15s >"],
+ subreport_yes=formatters["%-15s > %s > %s\n"],
+ subreport_nop=formatters["%-15s > %s >\n"],
+ subdirect_yes=formatters["%-15s > %s > %s"],
+ subdirect_nop=formatters["%-15s > %s >"],
+ status_yes=formatters["%-15s : %s\n"],
+ status_nop=formatters["%-15s :\n"],
+ },
+ targets={
+ logfile="none",
+ log="none",
+ file="none",
+ console="term",
+ terminal="term",
+ both="term",
+ },
+ }
+ }
+ logs.flush=io.flush
+ writer=function(...)
+ write_nl(target,...)
+ end
+ newline=function()
+ write_nl(target,"\n")
+ end
+ report=function(a,b,c,...)
+ if c then
+ write_nl(target,report_yes(translations[a],formatters[formats[b]](c,...)))
+ elseif b then
+ write_nl(target,report_yes(translations[a],formats[b]))
+ elseif a then
+ write_nl(target,report_nop(translations[a]))
+ else
+ write_nl(target,"\n")
+ end
+ end
+ direct=function(a,b,c,...)
+ if c then
+ return direct_yes(translations[a],formatters[formats[b]](c,...))
+ elseif b then
+ return direct_yes(translations[a],formats[b])
+ elseif a then
+ return direct_nop(translations[a])
+ else
+ return ""
+ end
+ end
+ subreport=function(a,s,b,c,...)
+ if c then
+ write_nl(target,subreport_yes(translations[a],translations[s],formatters[formats[b]](c,...)))
+ elseif b then
+ write_nl(target,subreport_yes(translations[a],translations[s],formats[b]))
+ elseif a then
+ write_nl(target,subreport_nop(translations[a],translations[s]))
+ else
+ write_nl(target,"\n")
+ end
+ end
+ subdirect=function(a,s,b,c,...)
+ if c then
+ return subdirect_yes(translations[a],translations[s],formatters[formats[b]](c,...))
+ elseif b then
+ return subdirect_yes(translations[a],translations[s],formats[b])
+ elseif a then
+ return subdirect_nop(translations[a],translations[s])
+ else
+ return ""
+ end
+ end
+ status=function(a,b,c,...)
+ if c then
+ write_nl(target,status_yes(translations[a],formatters[formats[b]](c,...)))
+ elseif b then
+ write_nl(target,status_yes(translations[a],formats[b]))
+ elseif a then
+ write_nl(target,status_nop(translations[a]))
+ else
+ write_nl(target,"\n")
+ end
+ end
+ settarget=function(askedwhereto)
+ whereto=askedwhereto or whereto or "both"
+ target=targets[whereto]
+ if not target then
+ whereto="both"
+ target=targets[whereto]
+ end
+ if target=="term" or target=="term and log" then
+ logs.flush=io.flush
+ else
+ logs.flush=ignore
+ end
+ end
+ local stack={}
+ pushtarget=function(newtarget)
+ insert(stack,target)
+ settarget(newtarget)
+ end
+ poptarget=function()
+ if #stack>0 then
+ settarget(remove(stack))
+ end
+ end
+ setformats=function(f)
+ formats=f
+ end
+ settranslations=function(t)
+ translations=t
+ end
+ setprocessor=function(f)
+ local writeline=write_nl
+ write_nl=function(target,...)
+ writeline(target,f(...))
+ end
+ end
+ setformatters=function(specification)
+ local t=nil
+ local f=nil
+ local d=variants.default
+ if not specification then
+ elseif type(specification)=="table" then
+ t=specification.targets
+ f=specification.formats or specification
+ else
+ local v=variants[specification]
+ if v then
+ t=v.targets
+ f=v.formats
+ variant=specification
+ end
+ end
+ targets=t or d.targets
+ target=targets[whereto] or target
+ if f then
+ d=d.formats
+ else
+ f=d.formats
+ d=f
+ end
+ setmetatableindex(f,d)
+ report_yes=f.report_yes
+ report_nop=f.report_nop
+ subreport_yes=f.subreport_yes
+ subreport_nop=f.subreport_nop
+ direct_yes=f.direct_yes
+ direct_nop=f.direct_nop
+ subdirect_yes=f.subdirect_yes
+ subdirect_nop=f.subdirect_nop
+ status_yes=f.status_yes
+ status_nop=f.status_nop
+ if variant=="ansi" then
+ useluawrites()
+ end
+ settarget(whereto)
+ end
+ setformatters(variant)
+ setlogfile=ignore
+ settimedlog=ignore
+else
+ local report_yes,subreport_yes,status_yes
+ local report_nop,subreport_nop,status_nop
+ local variants={
+ default={
+ formats={
+ report_yes=formatters["%-15s | %s"],
+ report_nop=formatters["%-15s |"],
+ subreport_yes=formatters["%-15s | %s | %s"],
+ subreport_nop=formatters["%-15s | %s |"],
+ status_yes=formatters["%-15s : %s\n"],
+ status_nop=formatters["%-15s :\n"],
+ },
+ },
+ ansi={
+ formats={
+ report_yes=formatters["%-15s | %s"],
+ report_nop=formatters["%-15s |"],
+ subreport_yes=formatters["%-15s | %s | %s"],
+ subreport_nop=formatters["%-15s | %s |"],
+ status_yes=formatters["%-15s : %s\n"],
+ status_nop=formatters["%-15s :\n"],
+ },
+ },
+ }
+ logs.flush=ignore
+ writer=function(s)
+ write_nl(s)
+ end
+ newline=function()
+ write_nl("\n")
+ end
+ report=function(a,b,c,...)
+ if c then
+ write_nl(report_yes(a,formatters[b](c,...)))
+ elseif b then
+ write_nl(report_yes(a,b))
+ elseif a then
+ write_nl(report_nop(a))
+ else
+ write_nl("")
+ end
+ end
+ subreport=function(a,sub,b,c,...)
+ if c then
+ write_nl(subreport_yes(a,sub,formatters[b](c,...)))
+ elseif b then
+ write_nl(subreport_yes(a,sub,b))
+ elseif a then
+ write_nl(subreport_nop(a,sub))
+ else
+ write_nl("")
+ end
+ end
+ status=function(a,b,c,...)
+ if c then
+ write_nl(status_yes(a,formatters[b](c,...)))
+ elseif b then
+ write_nl(status_yes(a,b))
+ elseif a then
+ write_nl(status_nop(a))
+ else
+ write_nl("\n")
+ end
+ end
+ direct=ignore
+ subdirect=ignore
+ settarget=ignore
+ pushtarget=ignore
+ poptarget=ignore
+ setformats=ignore
+ settranslations=ignore
+ setprocessor=function(f)
+ local writeline=write_nl
+ write_nl=function(s)
+ writeline(f(s))
+ end
+ end
+ setformatters=function(specification)
+ local f=nil
+ local d=variants.default
+ if specification then
+ if type(specification)=="table" then
+ f=specification.formats or specification
+ else
+ local v=variants[specification]
+ if v then
+ f=v.formats
+ end
+ end
+ end
+ if f then
+ d=d.formats
+ else
+ f=d.formats
+ d=f
+ end
+ setmetatableindex(f,d)
+ report_yes=f.report_yes
+ report_nop=f.report_nop
+ subreport_yes=f.subreport_yes
+ subreport_nop=f.subreport_nop
+ status_yes=f.status_yes
+ status_nop=f.status_nop
+ end
+ setformatters(variant)
+ setlogfile=function(name,keepopen)
+ if name and name~="" then
+ local localtime=os.localtime
+ local writeline=write_nl
+ if keepopen then
+ local f=io.open(name,"ab")
+ write_nl=function(s)
+ writeline(s)
+ f:write(localtime()," | ",s,"\n")
+ end
+ else
+ write_nl=function(s)
+ writeline(s)
+ local f=io.open(name,"ab")
+ f:write(localtime()," | ",s,"\n")
+ f:close()
+ end
+ end
+ end
+ setlogfile=ignore
+ end
+ settimedlog=function()
+ local localtime=os.localtime
+ local writeline=write_nl
+ write_nl=function(s)
+ writeline(localtime().." | "..s)
+ end
+ settimedlog=ignore
+ end
+end
+logs.report=report
+logs.subreport=subreport
+logs.status=status
+logs.settarget=settarget
+logs.pushtarget=pushtarget
+logs.poptarget=poptarget
+logs.setformats=setformats
+logs.settranslations=settranslations
+logs.setlogfile=setlogfile
+logs.settimedlog=settimedlog
+logs.setprocessor=setprocessor
+logs.setformatters=setformatters
+logs.direct=direct
+logs.subdirect=subdirect
+logs.writer=writer
+logs.newline=newline
+local data,states={},nil
+function logs.reporter(category,subcategory)
+ local logger=data[category]
+ if not logger then
+ local state=false
+ if states==true then
+ state=true
+ elseif type(states)=="table" then
+ for c,_ in next,states do
+ if find(category,c) then
+ state=true
+ break
+ end
+ end
+ end
+ logger={
+ reporters={},
+ state=state,
+ }
+ data[category]=logger
+ end
+ local reporter=logger.reporters[subcategory or "default"]
+ if not reporter then
+ if subcategory then
+ reporter=function(...)
+ if not logger.state then
+ subreport(category,subcategory,...)
+ end
+ end
+ logger.reporters[subcategory]=reporter
+ else
+ local tag=category
+ reporter=function(...)
+ if not logger.state then
+ report(category,...)
+ end
+ end
+ logger.reporters.default=reporter
+ end
+ end
+ return reporter
+end
+logs.new=logs.reporter
+local ctxreport=logs.writer
+function logs.setmessenger(m)
+ ctxreport=m
+end
+function logs.messenger(category,subcategory)
+ if subcategory then
+ return function(...)
+ ctxreport(subdirect(category,subcategory,...))
+ end
+ else
+ return function(...)
+ ctxreport(direct(category,...))
+ end
+ end
+end
+local function setblocked(category,value)
+ if category==true then
+ category,value="*",true
+ elseif category==false then
+ category,value="*",false
+ elseif value==nil then
+ value=true
+ end
+ if category=="*" then
+ states=value
+ for k,v in next,data do
+ v.state=value
+ end
+ else
+ states=utilities.parsers.settings_to_hash(category,type(states)=="table" and states or nil)
+ for c,_ in next,states do
+ local v=data[c]
+ if v then
+ v.state=value
+ else
+ c=topattern(c,true,true)
+ for k,v in next,data do
+ if find(k,c) then
+ v.state=value
+ end
+ end
+ end
+ end
+ end
+end
+function logs.disable(category,value)
+ setblocked(category,value==nil and true or value)
+end
+function logs.enable(category)
+ setblocked(category,false)
+end
+function logs.categories()
+ return table.sortedkeys(data)
+end
+function logs.show()
+ local n,c,s,max=0,0,0,0
+ for category,v in table.sortedpairs(data) do
+ n=n+1
+ local state=v.state
+ local reporters=v.reporters
+ local nc=#category
+ if nc>c then
+ c=nc
+ end
+ for subcategory,_ in next,reporters do
+ local ns=#subcategory
+ if ns>c then
+ s=ns
+ end
+ local m=nc+ns
+ if m>max then
+ max=m
+ end
+ end
+ local subcategories=concat(table.sortedkeys(reporters),", ")
+ if state==true then
+ state="disabled"
+ elseif state==false then
+ state="enabled"
+ else
+ state="unknown"
+ end
+ report("logging","category %a, subcategories %a, state %a",category,subcategories,state)
+ end
+ report("logging","categories: %s, max category: %s, max subcategory: %s, max combined: %s",n,c,s,max)
+end
+local delayed_reporters={}
+setmetatableindex(delayed_reporters,function(t,k)
+ local v=logs.reporter(k.name)
+ t[k]=v
+ return v
+end)
+function utilities.setters.report(setter,...)
+ delayed_reporters[setter](...)
+end
+directives.register("logs.blocked",function(v)
+ setblocked(v,true)
+end)
+directives.register("logs.target",function(v)
+ settarget(v)
+end)
+local report_pages=logs.reporter("pages")
+local real,user,sub
+function logs.start_page_number()
+ real=texgetcount("realpageno")
+ user=texgetcount("userpageno")
+ sub=texgetcount("subpageno")
+end
+local timing=false
+local starttime=nil
+local lasttime=nil
+trackers.register("pages.timing",function(v)
+ starttime=os.clock()
+ timing=true
+end)
+function logs.stop_page_number()
+ if timing then
+ local elapsed,average
+ local stoptime=os.clock()
+ if not lasttime or real<2 then
+ elapsed=stoptime
+ average=stoptime
+ starttime=stoptime
+ else
+ elapsed=stoptime-lasttime
+ average=(stoptime-starttime)/(real-1)
+ end
+ lasttime=stoptime
+ if real<=0 then
+ report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
+ elseif user<=0 then
+ report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
+ elseif sub<=0 then
+ report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ else
+ report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
+ end
+ else
+ if real<=0 then
+ report_pages("flushing page")
+ elseif user<=0 then
+ report_pages("flushing realpage %s",real)
+ elseif sub<=0 then
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ else
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ end
+ end
+ logs.flush()
+end
+local report_files=logs.reporter("files")
+local nesting=0
+local verbose=false
+local hasscheme=url.hasscheme
+function logs.show_open(name)
+end
+function logs.show_close(name)
+end
+function logs.show_load(name)
+end
+local simple=logs.reporter("comment")
+logs.simple=simple
+logs.simpleline=simple
+logs.setprogram=ignore
+logs.extendbanner=ignore
+logs.reportlines=ignore
+logs.reportbanner=ignore
+logs.reportline=ignore
+logs.simplelines=ignore
+logs.help=ignore
+local Carg,C,lpegmatch=lpeg.Carg,lpeg.C,lpeg.match
+local p_newline=lpeg.patterns.newline
+local linewise=(
+ Carg(1)*C((1-p_newline)^1)/function(t,s) t.report(s) end+Carg(1)*p_newline^2/function(t) t.report() end+p_newline
+)^1
+local function reportlines(t,str)
+ if str then
+ lpegmatch(linewise,str,1,t)
+ end
+end
+local function reportbanner(t)
+ local banner=t.banner
+ if banner then
+ t.report(banner)
+ t.report()
+ end
+end
+local function reportversion(t)
+ local banner=t.banner
+ if banner then
+ t.report(banner)
+ end
+end
+local function reporthelp(t,...)
+ local helpinfo=t.helpinfo
+ if type(helpinfo)=="string" then
+ reportlines(t,helpinfo)
+ elseif type(helpinfo)=="table" then
+ for i=1,select("#",...) do
+ reportlines(t,t.helpinfo[select(i,...)])
+ if i<n then
+ t.report()
+ end
+ end
+ end
+end
+local function reportinfo(t)
+ t.report()
+ reportlines(t,t.moreinfo)
+end
+local function reportexport(t,method)
+ report(t.helpinfo)
+end
+local reporters={
+ lines=reportlines,
+ banner=reportbanner,
+ version=reportversion,
+ help=reporthelp,
+ info=reportinfo,
+ export=reportexport,
+}
+local exporters={
+}
+logs.reporters=reporters
+logs.exporters=exporters
+function logs.application(t)
+ t.name=t.name or "unknown"
+ t.banner=t.banner
+ t.moreinfo=moreinfo
+ t.report=logs.reporter(t.name)
+ t.help=function(...)
+ reporters.banner(t)
+ reporters.help(t,...)
+ reporters.info(t)
+ end
+ t.export=function(...)
+ reporters.export(t,...)
+ end
+ t.identify=function()
+ reporters.banner(t)
+ end
+ t.version=function()
+ reporters.version(t)
+ end
+ return t
+end
+local f_syslog=formatters["%s %s => %s => %s => %s\r"]
+function logs.system(whereto,process,jobname,category,fmt,arg,...)
+ local message=f_syslog(datetime("%d/%m/%y %H:%m:%S"),process,jobname,category,arg==nil and fmt or format(fmt,arg,...))
+ for i=1,10 do
+ local f=openfile(whereto,"a")
+ if f then
+ f:write(message)
+ f:close()
+ break
+ else
+ sleep(0.1)
+ end
+ end
+end
+local report_system=logs.reporter("system","logs")
+function logs.obsolete(old,new)
+ local o=loadstring("return "..new)()
+ if type(o)=="function" then
+ return function(...)
+ report_system("function %a is obsolete, use %a",old,new)
+ loadstring(old.."="..new.." return "..old)()(...)
+ end
+ elseif type(o)=="table" then
+ local t,m={},{}
+ m.__index=function(t,k)
+ report_system("table %a is obsolete, use %a",old,new)
+ m.__index,m.__newindex=o,o
+ return o[k]
+ end
+ m.__newindex=function(t,k,v)
+ report_system("table %a is obsolete, use %a",old,new)
+ m.__index,m.__newindex=o,o
+ o[k]=v
+ end
+ if libraries then
+ libraries.obsolete[old]=t
+ end
+ setmetatable(t,m)
+ return t
+ end
+end
+if utilities then
+ utilities.report=report_system
+end
+if tex and tex.error then
+ function logs.texerrormessage(...)
+ tex.error(format(...),{})
+ end
+else
+ function logs.texerrormessage(...)
+ print(format(...))
+ end
+end
+io.stdout:setvbuf('no')
+io.stderr:setvbuf('no')
+if package.helpers.report then
+ package.helpers.report=logs.reporter("package loader")
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["trac-inf"] = package.loaded["trac-inf"] or true
+
+-- original size: 6704, stripped down to: 5343
+
+if not modules then modules={} end modules ['trac-inf']={
+ version=1.001,
+ comment="companion to trac-inf.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,tonumber,select=type,tonumber,select
+local format,lower,find=string.format,string.lower,string.find
+local concat=table.concat
+local clock=os.gettimeofday or os.clock
+local setmetatableindex=table.setmetatableindex
+local serialize=table.serialize
+local formatters=string.formatters
+statistics=statistics or {}
+local statistics=statistics
+statistics.enable=true
+statistics.threshold=0.01
+local statusinfo,n,registered,timers={},0,{},{}
+setmetatableindex(timers,function(t,k)
+ local v={ timing=0,loadtime=0 }
+ t[k]=v
+ return v
+end)
+local function hastiming(instance)
+ return instance and timers[instance]
+end
+local function resettiming(instance)
+ timers[instance or "notimer"]={ timing=0,loadtime=0 }
+end
+local function starttiming(instance)
+ local timer=timers[instance or "notimer"]
+ local it=timer.timing or 0
+ if it==0 then
+ timer.starttime=clock()
+ if not timer.loadtime then
+ timer.loadtime=0
+ end
+ end
+ timer.timing=it+1
+end
+local function stoptiming(instance)
+ local timer=timers[instance or "notimer"]
+ local it=timer.timing
+ if it>1 then
+ timer.timing=it-1
+ else
+ local starttime=timer.starttime
+ if starttime then
+ local stoptime=clock()
+ local loadtime=stoptime-starttime
+ timer.stoptime=stoptime
+ timer.loadtime=timer.loadtime+loadtime
+ timer.timing=0
+ return loadtime
+ end
+ end
+ return 0
+end
+local function elapsed(instance)
+ if type(instance)=="number" then
+ return instance or 0
+ else
+ local timer=timers[instance or "notimer"]
+ return timer and timer.loadtime or 0
+ end
+end
+local function elapsedtime(instance)
+ return format("%0.3f",elapsed(instance))
+end
+local function elapsedindeed(instance)
+ return elapsed(instance)>statistics.threshold
+end
+local function elapsedseconds(instance,rest)
+ if elapsedindeed(instance) then
+ return format("%0.3f seconds %s",elapsed(instance),rest or "")
+ end
+end
+statistics.hastiming=hastiming
+statistics.resettiming=resettiming
+statistics.starttiming=starttiming
+statistics.stoptiming=stoptiming
+statistics.elapsed=elapsed
+statistics.elapsedtime=elapsedtime
+statistics.elapsedindeed=elapsedindeed
+statistics.elapsedseconds=elapsedseconds
+function statistics.register(tag,fnc)
+ if statistics.enable and type(fnc)=="function" then
+ local rt=registered[tag] or (#statusinfo+1)
+ statusinfo[rt]={ tag,fnc }
+ registered[tag]=rt
+ if #tag>n then n=#tag end
+ end
+end
+local report=logs.reporter("mkiv lua stats")
+function statistics.show()
+ if statistics.enable then
+ local register=statistics.register
+ register("used platform",function()
+ return format("%s, type: %s, binary subtree: %s",
+ os.platform or "unknown",os.type or "unknown",environment.texos or "unknown")
+ end)
+ register("luatex banner",function()
+ return lower(status.banner)
+ end)
+ register("control sequences",function()
+ return format("%s of %s + %s",status.cs_count,status.hash_size,status.hash_extra)
+ end)
+ register("callbacks",function()
+ local total,indirect=status.callbacks or 0,status.indirect_callbacks or 0
+ return format("%s direct, %s indirect, %s total",total-indirect,indirect,total)
+ end)
+ if jit then
+ local jitstatus={ jit.status() }
+ if jitstatus[1] then
+ register("luajit options",concat(jitstatus," ",2))
+ end
+ end
+ register("lua properties",function()
+ local list=status.list()
+ local hashchar=tonumber(list.luatex_hashchars)
+ local mask=lua.mask or "ascii"
+ return format("engine: %s, used memory: %s, hash type: %s, hash chars: min(%s,40), symbol mask: %s (%s)",
+ jit and "luajit" or "lua",
+ statistics.memused(),
+ list.luatex_hashtype or "default",
+ hashchar and 2^hashchar or "unknown",
+ mask,
+ mask=="utf" and "τεχ" or "tex")
+ end)
+ register("runtime",statistics.runtime)
+ logs.newline()
+ for i=1,#statusinfo do
+ local s=statusinfo[i]
+ local r=s[2]()
+ if r then
+ report("%s: %s",s[1],r)
+ end
+ end
+ statistics.enable=false
+ end
+end
+function statistics.memused()
+ local round=math.round or math.floor
+ return format("%s MB (ctx: %s MB)",round(collectgarbage("count")/1000),round(status.luastate_bytes/1000000))
+end
+starttiming(statistics)
+function statistics.formatruntime(runtime)
+ return format("%s seconds",runtime)
+end
+function statistics.runtime()
+ stoptiming(statistics)
+ return statistics.formatruntime(elapsedtime(statistics))
+end
+local report=logs.reporter("system")
+function statistics.timed(action)
+ starttiming("run")
+ action()
+ stoptiming("run")
+ report("total runtime: %s seconds",elapsedtime("run"))
+end
+function statistics.tracefunction(base,tag,...)
+ for i=1,select("#",...) do
+ local name=select(i,...)
+ local stat={}
+ local func=base[name]
+ setmetatableindex(stat,function(t,k) t[k]=0 return 0 end)
+ base[name]=function(n,k,v) stat[k]=stat[k]+1 return func(n,k,v) end
+ statistics.register(formatters["%s.%s"](tag,name),function() return serialize(stat,"calls") end)
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["trac-pro"] = package.loaded["trac-pro"] or true
+
+-- original size: 5829, stripped down to: 3501
+
+if not modules then modules={} end modules ['trac-pro']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local getmetatable,setmetatable,rawset,type=getmetatable,setmetatable,rawset,type
+local trace_namespaces=false trackers.register("system.namespaces",function(v) trace_namespaces=v end)
+local report_system=logs.reporter("system","protection")
+namespaces=namespaces or {}
+local namespaces=namespaces
+local registered={}
+local function report_index(k,name)
+ if trace_namespaces then
+ report_system("reference to %a in protected namespace %a: %s",k,name)
+ debugger.showtraceback(report_system)
+ else
+ report_system("reference to %a in protected namespace %a",k,name)
+ end
+end
+local function report_newindex(k,name)
+ if trace_namespaces then
+ report_system("assignment to %a in protected namespace %a: %s",k,name)
+ debugger.showtraceback(report_system)
+ else
+ report_system("assignment to %a in protected namespace %a",k,name)
+ end
+end
+local function register(name)
+ local data=name=="global" and _G or _G[name]
+ if not data then
+ return
+ end
+ registered[name]=data
+ local m=getmetatable(data)
+ if not m then
+ m={}
+ setmetatable(data,m)
+ end
+ local index,newindex={},{}
+ m.__saved__index=m.__index
+ m.__no__index=function(t,k)
+ if not index[k] then
+ index[k]=true
+ report_index(k,name)
+ end
+ return nil
+ end
+ m.__saved__newindex=m.__newindex
+ m.__no__newindex=function(t,k,v)
+ if not newindex[k] then
+ newindex[k]=true
+ report_newindex(k,name)
+ end
+ rawset(t,k,v)
+ end
+ m.__protection__depth=0
+end
+local function private(name)
+ local data=registered[name]
+ if not data then
+ data=_G[name]
+ if not data then
+ data={}
+ _G[name]=data
+ end
+ register(name)
+ end
+ return data
+end
+local function protect(name)
+ local data=registered[name]
+ if not data then
+ return
+ end
+ local m=getmetatable(data)
+ local pd=m.__protection__depth
+ if pd>0 then
+ m.__protection__depth=pd+1
+ else
+ m.__save_d_index,m.__saved__newindex=m.__index,m.__newindex
+ m.__index,m.__newindex=m.__no__index,m.__no__newindex
+ m.__protection__depth=1
+ end
+end
+local function unprotect(name)
+ local data=registered[name]
+ if not data then
+ return
+ end
+ local m=getmetatable(data)
+ local pd=m.__protection__depth
+ if pd>1 then
+ m.__protection__depth=pd-1
+ else
+ m.__index,m.__newindex=m.__saved__index,m.__saved__newindex
+ m.__protection__depth=0
+ end
+end
+local function protectall()
+ for name,_ in next,registered do
+ if name~="global" then
+ protect(name)
+ end
+ end
+end
+local function unprotectall()
+ for name,_ in next,registered do
+ if name~="global" then
+ unprotect(name)
+ end
+ end
+end
+namespaces.register=register
+namespaces.private=private
+namespaces.protect=protect
+namespaces.unprotect=unprotect
+namespaces.protectall=protectall
+namespaces.unprotectall=unprotectall
+namespaces.private("namespaces") registered={} register("global")
+directives.register("system.protect",function(v)
+ if v then
+ protectall()
+ else
+ unprotectall()
+ end
+end)
+directives.register("system.checkglobals",function(v)
+ if v then
+ report_system("enabling global namespace guard")
+ protect("global")
+ else
+ report_system("disabling global namespace guard")
+ unprotect("global")
+ end
+end)
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["util-lua"] = package.loaded["util-lua"] or true
+
+-- original size: 4982, stripped down to: 3511
+
+if not modules then modules={} end modules ['util-lua']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ comment="the strip code is written by Peter Cawley",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local rep,sub,byte,dump,format=string.rep,string.sub,string.byte,string.dump,string.format
+local load,loadfile,type=load,loadfile,type
+utilities=utilities or {}
+utilities.lua=utilities.lua or {}
+local luautilities=utilities.lua
+local report_lua=logs.reporter("system","lua")
+local tracestripping=false
+local forcestupidcompile=true
+luautilities.stripcode=true
+luautilities.alwaysstripcode=false
+luautilities.nofstrippedchunks=0
+luautilities.nofstrippedbytes=0
+local strippedchunks={}
+luautilities.strippedchunks=strippedchunks
+luautilities.suffixes={
+ tma="tma",
+ tmc=jit and "tmb" or "tmc",
+ lua="lua",
+ luc=jit and "lub" or "luc",
+ lui="lui",
+ luv="luv",
+ luj="luj",
+ tua="tua",
+ tuc="tuc",
+}
+local function register(name)
+ if tracestripping then
+ report_lua("stripped bytecode from %a",name or "unknown")
+ end
+ strippedchunks[#strippedchunks+1]=name
+ luautilities.nofstrippedchunks=luautilities.nofstrippedchunks+1
+end
+local function stupidcompile(luafile,lucfile,strip)
+ local code=io.loaddata(luafile)
+ if code and code~="" then
+ code=load(code)
+ if code then
+ code=dump(code,strip and luautilities.stripcode or luautilities.alwaysstripcode)
+ if code and code~="" then
+ register(name)
+ io.savedata(lucfile,code)
+ return true,0
+ end
+ else
+ report_lua("fatal error %a in file %a",1,luafile)
+ end
+ else
+ report_lua("fatal error %a in file %a",2,luafile)
+ end
+ return false,0
+end
+function luautilities.loadedluacode(fullname,forcestrip,name)
+ name=name or fullname
+ local code,message
+ if environment.loadpreprocessedfile then
+ code,message=environment.loadpreprocessedfile(fullname)
+ else
+ code,message=loadfile(fullname)
+ end
+ if code then
+ code()
+ else
+ report_lua("loading of file %a failed:\n\t%s",fullname,message or "no message")
+ end
+ if forcestrip and luautilities.stripcode then
+ if type(forcestrip)=="function" then
+ forcestrip=forcestrip(fullname)
+ end
+ if forcestrip or luautilities.alwaysstripcode then
+ register(name)
+ return load(dump(code,true)),0
+ else
+ return code,0
+ end
+ elseif luautilities.alwaysstripcode then
+ register(name)
+ return load(dump(code,true)),0
+ else
+ return code,0
+ end
+end
+function luautilities.strippedloadstring(code,forcestrip,name)
+ local code,message=load(code)
+ if not code then
+ report_lua("loading of file %a failed:\n\t%s",name,message or "no message")
+ end
+ if forcestrip and luautilities.stripcode or luautilities.alwaysstripcode then
+ register(name)
+ return load(dump(code,true)),0
+ else
+ return code,0
+ end
+end
+function luautilities.compile(luafile,lucfile,cleanup,strip,fallback)
+ report_lua("compiling %a into %a",luafile,lucfile)
+ os.remove(lucfile)
+ local done=stupidcompile(luafile,lucfile,strip~=false)
+ if done then
+ report_lua("dumping %a into %a stripped",luafile,lucfile)
+ if cleanup==true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
+ report_lua("removing %a",luafile)
+ os.remove(luafile)
+ end
+ end
+ return done
+end
+function luautilities.loadstripped(...)
+ local l=load(...)
+ if l then
+ return load(dump(l,true))
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["util-deb"] = package.loaded["util-deb"] or true
+
+-- original size: 3898, stripped down to: 2644
+
+if not modules then modules={} end modules ['util-deb']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local debug=require "debug"
+local getinfo=debug.getinfo
+local type,next,tostring=type,next,tostring
+local format,find=string.format,string.find
+local is_boolean=string.is_boolean
+utilities=utilities or {}
+local debugger=utilities.debugger or {}
+utilities.debugger=debugger
+local counters={}
+local names={}
+local report=logs.reporter("debugger")
+local function hook()
+ local f=getinfo(2)
+ if f then
+ local n="unknown"
+ if f.what=="C" then
+ n=f.name or '<anonymous>'
+ if not names[n] then
+ names[n]=format("%42s",n)
+ end
+ else
+ n=f.name or f.namewhat or f.what
+ if not n or n=="" then
+ n="?"
+ end
+ if not names[n] then
+ names[n]=format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
+ end
+ end
+ counters[n]=(counters[n] or 0)+1
+ end
+end
+function debugger.showstats(printer,threshold)
+ printer=printer or report
+ threshold=threshold or 0
+ local total,grandtotal,functions=0,0,0
+ local dataset={}
+ for name,count in next,counters do
+ dataset[#dataset+1]={ name,count }
+ end
+ table.sort(dataset,function(a,b) return a[2]==b[2] and b[1]>a[1] or a[2]>b[2] end)
+ for i=1,#dataset do
+ local d=dataset[i]
+ local name=d[1]
+ local count=d[2]
+ if count>threshold and not find(name,"for generator") then
+ printer(format("%8i %s\n",count,names[name]))
+ total=total+count
+ end
+ grandtotal=grandtotal+count
+ functions=functions+1
+ end
+ printer("\n")
+ printer(format("functions : % 10i\n",functions))
+ printer(format("total : % 10i\n",total))
+ printer(format("grand total: % 10i\n",grandtotal))
+ printer(format("threshold : % 10i\n",threshold))
+end
+function debugger.savestats(filename,threshold)
+ local f=io.open(filename,'w')
+ if f then
+ debugger.showstats(function(str) f:write(str) end,threshold)
+ f:close()
+ end
+end
+function debugger.enable()
+ debug.sethook(hook,"c")
+end
+function debugger.disable()
+ debug.sethook()
+end
+local function showtraceback(rep)
+ local level=2
+ local reporter=rep or report
+ while true do
+ local info=getinfo(level,"Sl")
+ if not info then
+ break
+ elseif info.what=="C" then
+ reporter("%2i : %s",level-1,"C function")
+ else
+ reporter("%2i : %s : %s",level-1,info.short_src,info.currentline)
+ end
+ level=level+1
+ end
+end
+debugger.showtraceback=showtraceback
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["util-mrg"] = package.loaded["util-mrg"] or true
+
+-- original size: 7757, stripped down to: 6015
+
+if not modules then modules={} end modules ['util-mrg']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local gsub,format=string.gsub,string.format
+local concat=table.concat
+local type,next=type,next
+local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt,Cb,Cg=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt,lpeg.Cb,lpeg.Cg
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+utilities=utilities or {}
+local merger=utilities.merger or {}
+utilities.merger=merger
+merger.strip_comment=true
+local report=logs.reporter("system","merge")
+utilities.report=report
+local m_begin_merge="begin library merge"
+local m_end_merge="end library merge"
+local m_begin_closure="do -- create closure to overcome 200 locals limit"
+local m_end_closure="end -- of closure"
+local m_pattern="%c+".."%-%-%s+"..m_begin_merge.."%c+(.-)%c+".."%-%-%s+"..m_end_merge.."%c+"
+local m_format="\n\n-- "..m_begin_merge.."\n%s\n".."-- "..m_end_merge.."\n\n"
+local m_faked="-- ".."created merged file".."\n\n".."-- "..m_begin_merge.."\n\n".."-- "..m_end_merge.."\n\n"
+local m_report=[[
+-- used libraries : %s
+-- skipped libraries : %s
+-- original bytes : %s
+-- stripped bytes : %s
+]]
+local m_preloaded=[[package.loaded[%q] = package.loaded[%q] or true]]
+local function self_fake()
+ return m_faked
+end
+local function self_nothing()
+ return ""
+end
+local function self_load(name)
+ local data=io.loaddata(name) or ""
+ if data=="" then
+ report("unknown file %a",name)
+ else
+ report("inserting file %a",name)
+ end
+ return data or ""
+end
+local space=patterns.space
+local eol=patterns.newline
+local equals=P("=")^0
+local open=P("[")*Cg(equals,"init")*P("[")*P("\n")^-1
+local close=P("]")*C(equals)*P("]")
+local closeeq=Cmt(close*Cb("init"),function(s,i,a,b) return a==b end)
+local longstring=open*(1-closeeq)^0*close
+local quoted=patterns.quoted
+local digit=patterns.digit
+local emptyline=space^0*eol
+local operator1=P("<=")+P(">=")+P("~=")+P("..")+S("/^<>=*+%%")
+local operator2=S("*+/")
+local operator3=S("-")
+local operator4=P("..")
+local separator=S(",;")
+local ignore=(P("]")*space^1*P("=")*space^1*P("]"))/"]=["+(P("=")*space^1*P("{"))/"={"+(P("(")*space^1)/"("+(P("{")*(space+eol)^1*P("}"))/"{}"
+local strings=quoted
+local longcmt=(emptyline^0*P("--")*longstring*emptyline^0)/""
+local longstr=longstring
+local comment=emptyline^0*P("--")*P("-")^0*(1-eol)^0*emptyline^1/"\n"
+local optionalspaces=space^0/""
+local mandatespaces=space^1/""
+local optionalspacing=(eol+space)^0/""
+local mandatespacing=(eol+space)^1/""
+local pack=digit*space^1*operator4*optionalspacing+optionalspacing*operator1*optionalspacing+optionalspacing*operator2*optionalspaces+mandatespacing*operator3*mandatespaces+optionalspaces*separator*optionalspaces
+local lines=emptyline^2/"\n"
+local spaces=(space*space)/" "
+local compact=Cs ((
+ ignore+strings+longcmt+longstr+comment+pack+lines+spaces+1
+)^1 )
+local strip=Cs((emptyline^2/"\n"+1)^0)
+local stripreturn=Cs((1-P("return")*space^1*P(1-space-eol)^1*(space+eol)^0*P(-1))^1)
+function merger.compact(data)
+ return lpegmatch(strip,lpegmatch(compact,data))
+end
+local function self_compact(data)
+ local delta=0
+ if merger.strip_comment then
+ local before=#data
+ data=lpegmatch(compact,data)
+ data=lpegmatch(strip,data)
+ local after=#data
+ delta=before-after
+ report("original size %s, compacted to %s, stripped %s",before,after,delta)
+ data=format("-- original size: %s, stripped down to: %s\n\n%s",before,after,data)
+ end
+ return lpegmatch(stripreturn,data) or data,delta
+end
+local function self_save(name,data)
+ if data~="" then
+ io.savedata(name,data)
+ report("saving %s with size %s",name,#data)
+ end
+end
+local function self_swap(data,code)
+ return data~="" and (gsub(data,m_pattern,function() return format(m_format,code) end,1)) or ""
+end
+local function self_libs(libs,list)
+ local result,f,frozen,foundpath={},nil,false,nil
+ result[#result+1]="\n"
+ if type(libs)=='string' then libs={ libs } end
+ if type(list)=='string' then list={ list } end
+ for i=1,#libs do
+ local lib=libs[i]
+ for j=1,#list do
+ local pth=gsub(list[j],"\\","/")
+ report("checking library path %a",pth)
+ local name=pth.."/"..lib
+ if lfs.isfile(name) then
+ foundpath=pth
+ end
+ end
+ if foundpath then break end
+ end
+ if foundpath then
+ report("using library path %a",foundpath)
+ local right,wrong,original,stripped={},{},0,0
+ for i=1,#libs do
+ local lib=libs[i]
+ local fullname=foundpath.."/"..lib
+ if lfs.isfile(fullname) then
+ report("using library %a",fullname)
+ local preloaded=file.nameonly(lib)
+ local data=io.loaddata(fullname,true)
+ original=original+#data
+ local data,delta=self_compact(data)
+ right[#right+1]=lib
+ result[#result+1]=m_begin_closure
+ result[#result+1]=format(m_preloaded,preloaded,preloaded)
+ result[#result+1]=data
+ result[#result+1]=m_end_closure
+ stripped=stripped+delta
+ else
+ report("skipping library %a",fullname)
+ wrong[#wrong+1]=lib
+ end
+ end
+ right=#right>0 and concat(right," ") or "-"
+ wrong=#wrong>0 and concat(wrong," ") or "-"
+ report("used libraries: %a",right)
+ report("skipped libraries: %a",wrong)
+ report("original bytes: %a",original)
+ report("stripped bytes: %a",stripped)
+ result[#result+1]=format(m_report,right,wrong,original,stripped)
+ else
+ report("no valid library path found")
+ end
+ return concat(result,"\n\n")
+end
+function merger.selfcreate(libs,list,target)
+ if target then
+ self_save(target,self_swap(self_fake(),self_libs(libs,list)))
+ end
+end
+function merger.selfmerge(name,libs,list,target)
+ self_save(target or name,self_swap(self_load(name),self_libs(libs,list)))
+end
+function merger.selfclean(name)
+ self_save(name,self_swap(self_load(name),self_nothing()))
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["util-tpl"] = package.loaded["util-tpl"] or true
+
+-- original size: 7100, stripped down to: 3978
+
+if not modules then modules={} end modules ['util-tpl']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+utilities.templates=utilities.templates or {}
+local templates=utilities.templates
+local trace_template=false trackers.register("templates.trace",function(v) trace_template=v end)
+local report_template=logs.reporter("template")
+local tostring=tostring
+local format,sub,byte=string.format,string.sub,string.byte
+local P,C,R,Cs,Cc,Carg,lpegmatch,lpegpatterns=lpeg.P,lpeg.C,lpeg.R,lpeg.Cs,lpeg.Cc,lpeg.Carg,lpeg.match,lpeg.patterns
+local replacer
+local function replacekey(k,t,how,recursive)
+ local v=t[k]
+ if not v then
+ if trace_template then
+ report_template("unknown key %a",k)
+ end
+ return ""
+ else
+ v=tostring(v)
+ if trace_template then
+ report_template("setting key %a to value %a",k,v)
+ end
+ if recursive then
+ return lpegmatch(replacer,v,1,t,how,recursive)
+ else
+ return v
+ end
+ end
+end
+local sqlescape=lpeg.replacer {
+ { "'","''" },
+ { "\\","\\\\" },
+ { "\r\n","\\n" },
+ { "\r","\\n" },
+}
+local sqlquoted=Cs(Cc("'")*sqlescape*Cc("'"))
+lpegpatterns.sqlescape=sqlescape
+lpegpatterns.sqlquoted=sqlquoted
+local luaescape=lpegpatterns.luaescape
+local escapers={
+ lua=function(s)
+ return lpegmatch(luaescape,s)
+ end,
+ sql=function(s)
+ return lpegmatch(sqlescape,s)
+ end,
+}
+local quotedescapers={
+ lua=function(s)
+ return format("%q",s)
+ end,
+ sql=function(s)
+ return lpegmatch(sqlquoted,s)
+ end,
+}
+local luaescaper=escapers.lua
+local quotedluaescaper=quotedescapers.lua
+local function replacekeyunquoted(s,t,how,recurse)
+ if how==false then
+ return replacekey(s,t,how,recurse)
+ else
+ local escaper=how and escapers[how] or luaescaper
+ return escaper(replacekey(s,t,how,recurse))
+ end
+end
+local function replacekeyquoted(s,t,how,recurse)
+ if how==false then
+ return replacekey(s,t,how,recurse)
+ else
+ local escaper=how and quotedescapers[how] or quotedluaescaper
+ return escaper(replacekey(s,t,how,recurse))
+ end
+end
+local function replaceoptional(l,m,r,t,how,recurse)
+ local v=t[l]
+ return v and v~="" and lpegmatch(replacer,r,1,t,how or "lua",recurse or false) or ""
+end
+local single=P("%")
+local double=P("%%")
+local lquoted=P("%[")
+local rquoted=P("]%")
+local lquotedq=P("%(")
+local rquotedq=P(")%")
+local escape=double/'%%'
+local nosingle=single/''
+local nodouble=double/''
+local nolquoted=lquoted/''
+local norquoted=rquoted/''
+local nolquotedq=lquotedq/''
+local norquotedq=rquotedq/''
+local noloptional=P("%?")/''
+local noroptional=P("?%")/''
+local nomoptional=P(":")/''
+local args=Carg(1)*Carg(2)*Carg(3)
+local key=nosingle*((C((1-nosingle )^1)*args)/replacekey )*nosingle
+local quoted=nolquotedq*((C((1-norquotedq )^1)*args)/replacekeyquoted )*norquotedq
+local unquoted=nolquoted*((C((1-norquoted )^1)*args)/replacekeyunquoted)*norquoted
+local optional=noloptional*((C((1-nomoptional)^1)*nomoptional*C((1-noroptional)^1)*args)/replaceoptional)*noroptional
+local any=P(1)
+ replacer=Cs((unquoted+quoted+escape+optional+key+any)^0)
+local function replace(str,mapping,how,recurse)
+ if mapping and str then
+ return lpegmatch(replacer,str,1,mapping,how or "lua",recurse or false) or str
+ else
+ return str
+ end
+end
+templates.replace=replace
+function templates.replacer(str,how,recurse)
+ return function(mapping)
+ return lpegmatch(replacer,str,1,mapping,how or "lua",recurse or false) or str
+ end
+end
+function templates.load(filename,mapping,how,recurse)
+ local data=io.loaddata(filename) or ""
+ if mapping and next(mapping) then
+ return replace(data,mapping,how,recurse)
+ else
+ return data
+ end
+end
+function templates.resolve(t,mapping,how,recurse)
+ if not mapping then
+ mapping=t
+ end
+ for k,v in next,t do
+ t[k]=replace(v,mapping,how,recurse)
+ end
+ return t
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["util-env"] = package.loaded["util-env"] or true
+
+-- original size: 8022, stripped down to: 5038
+
+if not modules then modules={} end modules ['util-env']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local allocate,mark=utilities.storage.allocate,utilities.storage.mark
+local format,sub,match,gsub,find=string.format,string.sub,string.match,string.gsub,string.find
+local unquoted,quoted,optionalquoted=string.unquoted,string.quoted,string.optionalquoted
+local concat,insert,remove=table.concat,table.insert,table.remove
+environment=environment or {}
+local environment=environment
+os.setlocale(nil,nil)
+function os.setlocale()
+end
+local validengines=allocate {
+ ["luatex"]=true,
+ ["luajittex"]=true,
+}
+local basicengines=allocate {
+ ["luatex"]="luatex",
+ ["texlua"]="luatex",
+ ["texluac"]="luatex",
+ ["luajittex"]="luajittex",
+ ["texluajit"]="luajittex",
+}
+local luaengines=allocate {
+ ["lua"]=true,
+ ["luajit"]=true,
+}
+environment.validengines=validengines
+environment.basicengines=basicengines
+if not arg then
+ environment.used_as_library=true
+elseif luaengines[file.removesuffix(arg[-1])] then
+elseif validengines[file.removesuffix(arg[0])] then
+ if arg[1]=="--luaonly" then
+ arg[-1]=arg[0]
+ arg[ 0]=arg[2]
+ for k=3,#arg do
+ arg[k-2]=arg[k]
+ end
+ remove(arg)
+ remove(arg)
+ else
+ end
+ local originalzero=file.basename(arg[0])
+ local specialmapping={ luatools=="base" }
+ if originalzero~="mtxrun" and originalzero~="mtxrun.lua" then
+ arg[0]=specialmapping[originalzero] or originalzero
+ insert(arg,0,"--script")
+ insert(arg,0,"mtxrun")
+ end
+end
+environment.arguments=allocate()
+environment.files=allocate()
+environment.sortedflags=nil
+function environment.initializearguments(arg)
+ local arguments,files={},{}
+ environment.arguments,environment.files,environment.sortedflags=arguments,files,nil
+ for index=1,#arg do
+ local argument=arg[index]
+ if index>0 then
+ local flag,value=match(argument,"^%-+(.-)=(.-)$")
+ if flag then
+ flag=gsub(flag,"^c:","")
+ arguments[flag]=unquoted(value or "")
+ else
+ flag=match(argument,"^%-+(.+)")
+ if flag then
+ flag=gsub(flag,"^c:","")
+ arguments[flag]=true
+ else
+ files[#files+1]=argument
+ end
+ end
+ end
+ end
+ environment.ownname=file.reslash(environment.ownname or arg[0] or 'unknown.lua')
+end
+function environment.setargument(name,value)
+ environment.arguments[name]=value
+end
+function environment.getargument(name,partial)
+ local arguments,sortedflags=environment.arguments,environment.sortedflags
+ if arguments[name] then
+ return arguments[name]
+ elseif partial then
+ if not sortedflags then
+ sortedflags=allocate(table.sortedkeys(arguments))
+ for k=1,#sortedflags do
+ sortedflags[k]="^"..sortedflags[k]
+ end
+ environment.sortedflags=sortedflags
+ end
+ for k=1,#sortedflags do
+ local v=sortedflags[k]
+ if find(name,v) then
+ return arguments[sub(v,2,#v)]
+ end
+ end
+ end
+ return nil
+end
+environment.argument=environment.getargument
+function environment.splitarguments(separator)
+ local done,before,after=false,{},{}
+ local originalarguments=environment.originalarguments
+ for k=1,#originalarguments do
+ local v=originalarguments[k]
+ if not done and v==separator then
+ done=true
+ elseif done then
+ after[#after+1]=v
+ else
+ before[#before+1]=v
+ end
+ end
+ return before,after
+end
+function environment.reconstructcommandline(arg,noquote)
+ local resolveprefix=resolvers.resolve
+ arg=arg or environment.originalarguments
+ if noquote and #arg==1 then
+ return unquoted(resolveprefix and resolveprefix(arg[1]) or arg[1])
+ elseif #arg>0 then
+ local result={}
+ for i=1,#arg do
+ result[i]=optionalquoted(resolveprefix and resolveprefix(arg[i]) or resolveprefix)
+ end
+ return concat(result," ")
+ else
+ return ""
+ end
+end
+function environment.relativepath(path,root)
+ if not path then
+ path=""
+ end
+ if not file.is_rootbased_path(path) then
+ if not root then
+ root=file.pathpart(environment.ownscript or environment.ownname or ".")
+ end
+ if root=="" then
+ root="."
+ end
+ path=root.."/"..path
+ end
+ return file.collapsepath(path,true)
+end
+if arg then
+ local newarg,instring={},false
+ for index=1,#arg do
+ local argument=arg[index]
+ if find(argument,"^\"") then
+ newarg[#newarg+1]=gsub(argument,"^\"","")
+ if not find(argument,"\"$") then
+ instring=true
+ end
+ elseif find(argument,"\"$") then
+ newarg[#newarg]=newarg[#newarg].." "..gsub(argument,"\"$","")
+ instring=false
+ elseif instring then
+ newarg[#newarg]=newarg[#newarg].." "..argument
+ else
+ newarg[#newarg+1]=argument
+ end
+ end
+ for i=1,-5,-1 do
+ newarg[i]=arg[i]
+ end
+ environment.initializearguments(newarg)
+ environment.originalarguments=mark(newarg)
+ environment.rawarguments=mark(arg)
+ arg={}
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["luat-env"] = package.loaded["luat-env"] or true
+
+-- original size: 6174, stripped down to: 4141
+
+ if not modules then modules={} end modules ['luat-env']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local rawset,rawget,loadfile,assert=rawset,rawget,loadfile,assert
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_lua=logs.reporter("resolvers","lua")
+local luautilities=utilities.lua
+local luasuffixes=luautilities.suffixes
+local texgettoks=tex and tex.gettoks
+environment=environment or {}
+local environment=environment
+local mt={
+ __index=function(_,k)
+ if k=="version" then
+ local version=texgettoks and texgettoks("contextversiontoks")
+ if version and version~="" then
+ rawset(environment,"version",version)
+ return version
+ else
+ return "unknown"
+ end
+ elseif k=="kind" then
+ local kind=texgettoks and texgettoks("contextkindtoks")
+ if kind and kind~="" then
+ rawset(environment,"kind",kind)
+ return kind
+ else
+ return "unknown"
+ end
+ elseif k=="jobname" or k=="formatname" then
+ local name=tex and tex[k]
+ if name or name=="" then
+ rawset(environment,k,name)
+ return name
+ else
+ return "unknown"
+ end
+ elseif k=="outputfilename" then
+ local name=environment.jobname
+ rawset(environment,k,name)
+ return name
+ end
+ end
+}
+setmetatable(environment,mt)
+function environment.texfile(filename)
+ return resolvers.findfile(filename,'tex')
+end
+function environment.luafile(filename)
+ local resolved=resolvers.findfile(filename,'tex') or ""
+ if resolved~="" then
+ return resolved
+ end
+ resolved=resolvers.findfile(filename,'texmfscripts') or ""
+ if resolved~="" then
+ return resolved
+ end
+ return resolvers.findfile(filename,'luatexlibs') or ""
+end
+local stripindeed=false directives.register("system.compile.strip",function(v) stripindeed=v end)
+local function strippable(filename)
+ if stripindeed then
+ local modu=modules[file.nameonly(filename)]
+ return modu and modu.dataonly
+ else
+ return false
+ end
+end
+function environment.luafilechunk(filename,silent)
+ filename=file.replacesuffix(filename,"lua")
+ local fullname=environment.luafile(filename)
+ if fullname and fullname~="" then
+ local data=luautilities.loadedluacode(fullname,strippable,filename)
+ if not silent then
+ report_lua("loading file %a %s",fullname,not data and "failed" or "succeeded")
+ end
+ return data
+ else
+ if not silent then
+ report_lua("unknown file %a",filename)
+ end
+ return nil
+ end
+end
+function environment.loadluafile(filename,version)
+ local lucname,luaname,chunk
+ local basename=file.removesuffix(filename)
+ if basename==filename then
+ luaname=file.addsuffix(basename,luasuffixes.lua)
+ lucname=file.addsuffix(basename,luasuffixes.luc)
+ else
+ luaname=basename
+ lucname=nil
+ end
+ local fullname=(lucname and environment.luafile(lucname)) or ""
+ if fullname~="" then
+ if trace_locating then
+ report_lua("loading %a",fullname)
+ end
+ chunk=loadfile(fullname)
+ end
+ if chunk then
+ assert(chunk)()
+ if version then
+ local v=version
+ if modules and modules[filename] then
+ v=modules[filename].version
+ elseif versions and versions[filename] then
+ v=versions[filename]
+ end
+ if v==version then
+ return true
+ else
+ if trace_locating then
+ report_lua("version mismatch for %a, lua version %a, luc version %a",filename,v,version)
+ end
+ environment.loadluafile(filename)
+ end
+ else
+ return true
+ end
+ end
+ fullname=(luaname and environment.luafile(luaname)) or ""
+ if fullname~="" then
+ if trace_locating then
+ report_lua("loading %a",fullname)
+ end
+ chunk=loadfile(fullname)
+ if not chunk then
+ if trace_locating then
+ report_lua("unknown file %a",filename)
+ end
+ else
+ assert(chunk)()
+ return true
+ end
+ end
+ return false
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["lxml-tab"] = package.loaded["lxml-tab"] or true
+
+-- original size: 45683, stripped down to: 27866
+
+if not modules then modules={} end modules ['lxml-tab']={
+ version=1.001,
+ comment="this module is the basis for the lxml-* ones",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local trace_entities=false trackers.register("xml.entities",function(v) trace_entities=v end)
+local report_xml=logs and logs.reporter("xml","core") or function(...) print(string.format(...)) end
+if lpeg.setmaxstack then lpeg.setmaxstack(1000) end
+xml=xml or {}
+local xml=xml
+local concat,remove,insert=table.concat,table.remove,table.insert
+local type,next,setmetatable,getmetatable,tonumber,rawset=type,next,setmetatable,getmetatable,tonumber,rawset
+local lower,find,match,gsub=string.lower,string.find,string.match,string.gsub
+local utfchar=utf.char
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
+local P,S,R,C,V,C,Cs=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.C,lpeg.Cs
+local formatters=string.formatters
+xml.xmlns=xml.xmlns or {}
+local check=P(false)
+local parse=check
+function xml.registerns(namespace,pattern)
+ check=check+C(P(lower(pattern)))/namespace
+ parse=P { P(check)+1*V(1) }
+end
+function xml.checkns(namespace,url)
+ local ns=lpegmatch(parse,lower(url))
+ if ns and namespace~=ns then
+ xml.xmlns[namespace]=ns
+ end
+end
+function xml.resolvens(url)
+ return lpegmatch(parse,lower(url)) or ""
+end
+local nsremap,resolvens=xml.xmlns,xml.resolvens
+local stack={}
+local top={}
+local dt={}
+local at={}
+local xmlns={}
+local errorstr=nil
+local entities={}
+local strip=false
+local cleanup=false
+local utfize=false
+local resolve_predefined=false
+local unify_predefined=false
+local dcache={}
+local hcache={}
+local acache={}
+local mt={}
+local function initialize_mt(root)
+ mt={ __index=root }
+end
+function xml.setproperty(root,k,v)
+ getmetatable(root).__index[k]=v
+end
+function xml.checkerror(top,toclose)
+ return ""
+end
+local function add_attribute(namespace,tag,value)
+ if cleanup and #value>0 then
+ value=cleanup(value)
+ end
+ if tag=="xmlns" then
+ xmlns[#xmlns+1]=resolvens(value)
+ at[tag]=value
+ elseif namespace=="" then
+ at[tag]=value
+ elseif namespace=="xmlns" then
+ xml.checkns(tag,value)
+ at["xmlns:"..tag]=value
+ else
+ at[namespace..":"..tag]=value
+ end
+end
+local function add_empty(spacing,namespace,tag)
+ if #spacing>0 then
+ dt[#dt+1]=spacing
+ end
+ local resolved=namespace=="" and xmlns[#xmlns] or nsremap[namespace] or namespace
+ top=stack[#stack]
+ dt=top.dt
+ local t={ ns=namespace or "",rn=resolved,tg=tag,at=at,dt={},__p__=top }
+ dt[#dt+1]=t
+ setmetatable(t,mt)
+ if at.xmlns then
+ remove(xmlns)
+ end
+ at={}
+end
+local function add_begin(spacing,namespace,tag)
+ if #spacing>0 then
+ dt[#dt+1]=spacing
+ end
+ local resolved=namespace=="" and xmlns[#xmlns] or nsremap[namespace] or namespace
+ top={ ns=namespace or "",rn=resolved,tg=tag,at=at,dt={},__p__=stack[#stack] }
+ setmetatable(top,mt)
+ dt=top.dt
+ stack[#stack+1]=top
+ at={}
+end
+local function add_end(spacing,namespace,tag)
+ if #spacing>0 then
+ dt[#dt+1]=spacing
+ end
+ local toclose=remove(stack)
+ top=stack[#stack]
+ if #stack<1 then
+ errorstr=formatters["unable to close %s %s"](tag,xml.checkerror(top,toclose) or "")
+ report_xml(errorstr)
+ elseif toclose.tg~=tag then
+ errorstr=formatters["unable to close %s with %s %s"](toclose.tg,tag,xml.checkerror(top,toclose) or "")
+ report_xml(errorstr)
+ end
+ dt=top.dt
+ dt[#dt+1]=toclose
+ if toclose.at.xmlns then
+ remove(xmlns)
+ end
+end
+local function add_text(text)
+ local n=#dt
+ if cleanup and #text>0 then
+ if n>0 then
+ local s=dt[n]
+ if type(s)=="string" then
+ dt[n]=s..cleanup(text)
+ else
+ dt[n+1]=cleanup(text)
+ end
+ else
+ dt[1]=cleanup(text)
+ end
+ else
+ if n>0 then
+ local s=dt[n]
+ if type(s)=="string" then
+ dt[n]=s..text
+ else
+ dt[n+1]=text
+ end
+ else
+ dt[1]=text
+ end
+ end
+end
+local function add_special(what,spacing,text)
+ if #spacing>0 then
+ dt[#dt+1]=spacing
+ end
+ if strip and (what=="@cm@" or what=="@dt@") then
+ else
+ dt[#dt+1]={ special=true,ns="",tg=what,dt={ text } }
+ end
+end
+local function set_message(txt)
+ errorstr="garbage at the end of the file: "..gsub(txt,"([ \n\r\t]*)","")
+end
+local reported_attribute_errors={}
+local function attribute_value_error(str)
+ if not reported_attribute_errors[str] then
+ report_xml("invalid attribute value %a",str)
+ reported_attribute_errors[str]=true
+ at._error_=str
+ end
+ return str
+end
+local function attribute_specification_error(str)
+ if not reported_attribute_errors[str] then
+ report_xml("invalid attribute specification %a",str)
+ reported_attribute_errors[str]=true
+ at._error_=str
+ end
+ return str
+end
+local badentity="&error;"
+local badentity="&"
+xml.placeholders={
+ unknown_dec_entity=function(str) return str=="" and badentity or formatters["&%s;"](str) end,
+ unknown_hex_entity=function(str) return formatters["&#x%s;"](str) end,
+ unknown_any_entity=function(str) return formatters["&#x%s;"](str) end,
+}
+local placeholders=xml.placeholders
+local function fromhex(s)
+ local n=tonumber(s,16)
+ if n then
+ return utfchar(n)
+ else
+ return formatters["h:%s"](s),true
+ end
+end
+local function fromdec(s)
+ local n=tonumber(s)
+ if n then
+ return utfchar(n)
+ else
+ return formatters["d:%s"](s),true
+ end
+end
+local p_rest=(1-P(";"))^0
+local p_many=P(1)^0
+local p_char=lpegpatterns.utf8character
+local parsedentity=P("&")*(P("#x")*(p_rest/fromhex)+P("#")*(p_rest/fromdec))*P(";")*P(-1)+(P("#x")*(p_many/fromhex)+P("#")*(p_many/fromdec))
+local predefined_unified={
+ [38]="&amp;",
+ [42]="&quot;",
+ [47]="&apos;",
+ [74]="&lt;",
+ [76]="&gt;",
+}
+local predefined_simplified={
+ [38]="&",amp="&",
+ [42]='"',quot='"',
+ [47]="'",apos="'",
+ [74]="<",lt="<",
+ [76]=">",gt=">",
+}
+local nofprivates=0xF0000
+local privates_u={
+ [ [[&]] ]="&amp;",
+ [ [["]] ]="&quot;",
+ [ [[']] ]="&apos;",
+ [ [[<]] ]="&lt;",
+ [ [[>]] ]="&gt;",
+}
+local privates_p={}
+local privates_n={
+}
+local escaped=utf.remapper(privates_u,"dynamic")
+local unprivatized=utf.remapper(privates_p,"dynamic")
+xml.unprivatized=unprivatized
+local function unescaped(s)
+ local p=privates_n[s]
+ if not p then
+ nofprivates=nofprivates+1
+ p=utfchar(nofprivates)
+ privates_n[s]=p
+ s="&"..s..";"
+ privates_u[p]=s
+ privates_p[p]=s
+ end
+ return p
+end
+xml.privatetoken=unescaped
+xml.privatecodes=privates_n
+local function handle_hex_entity(str)
+ local h=hcache[str]
+ if not h then
+ local n=tonumber(str,16)
+ h=unify_predefined and predefined_unified[n]
+ if h then
+ if trace_entities then
+ report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
+ end
+ elseif utfize then
+ h=(n and utfchar(n)) or xml.unknown_hex_entity(str) or ""
+ if not n then
+ report_xml("utfize, ignoring hex entity &#x%s;",str)
+ elseif trace_entities then
+ report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
+ end
+ else
+ if trace_entities then
+ report_xml("found entity &#x%s;",str)
+ end
+ h="&#x"..str..";"
+ end
+ hcache[str]=h
+ end
+ return h
+end
+local function handle_dec_entity(str)
+ local d=dcache[str]
+ if not d then
+ local n=tonumber(str)
+ d=unify_predefined and predefined_unified[n]
+ if d then
+ if trace_entities then
+ report_xml("utfize, converting dec entity &#%s; into %a",str,d)
+ end
+ elseif utfize then
+ d=(n and utfchar(n)) or placeholders.unknown_dec_entity(str) or ""
+ if not n then
+ report_xml("utfize, ignoring dec entity &#%s;",str)
+ elseif trace_entities then
+ report_xml("utfize, converting dec entity &#%s; into %a",str,d)
+ end
+ else
+ if trace_entities then
+ report_xml("found entity &#%s;",str)
+ end
+ d="&#"..str..";"
+ end
+ dcache[str]=d
+ end
+ return d
+end
+xml.parsedentitylpeg=parsedentity
+local function handle_any_entity(str)
+ if resolve then
+ local a=acache[str]
+ if not a then
+ a=resolve_predefined and predefined_simplified[str]
+ if a then
+ if trace_entities then
+ report_xml("resolving entity &%s; to predefined %a",str,a)
+ end
+ else
+ if type(resolve)=="function" then
+ a=resolve(str) or entities[str]
+ else
+ a=entities[str]
+ end
+ if a then
+ if type(a)=="function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; to function call",str)
+ end
+ a=a(str) or ""
+ end
+ a=lpegmatch(parsedentity,a) or a
+ if trace_entities then
+ report_xml("resolving entity &%s; to internal %a",str,a)
+ end
+ else
+ local unknown_any_entity=placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a=unknown_any_entity(str) or ""
+ end
+ if a then
+ if trace_entities then
+ report_xml("resolving entity &%s; to external %s",str,a)
+ end
+ else
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str=="" then
+ a=badentity
+ else
+ a="&"..str..";"
+ end
+ end
+ end
+ end
+ acache[str]=a
+ elseif trace_entities then
+ if not acache[str] then
+ report_xml("converting entity &%s; to %a",str,a)
+ acache[str]=a
+ end
+ end
+ return a
+ else
+ local a=acache[str]
+ if not a then
+ a=resolve_predefined and predefined_simplified[str]
+ if a then
+ acache[str]=a
+ if trace_entities then
+ report_xml("entity &%s; becomes %a",str,a)
+ end
+ elseif str=="" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
+ a=badentity
+ acache[str]=a
+ else
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ a=unescaped(str)
+ acache[str]=a
+ end
+ end
+ return a
+ end
+end
+local function handle_end_entity(str)
+ report_xml("error in entity, %a found without ending %a",str,";")
+ return str
+end
+local function handle_crap_error(chr)
+ report_xml("error in parsing, unexpected %a found ",chr)
+ add_text(chr)
+ return chr
+end
+local space=S(' \r\n\t')
+local open=P('<')
+local close=P('>')
+local squote=S("'")
+local dquote=S('"')
+local equal=P('=')
+local slash=P('/')
+local colon=P(':')
+local semicolon=P(';')
+local ampersand=P('&')
+local valid=R('az','AZ','09')+S('_-.')
+local name_yes=C(valid^1)*colon*C(valid^1)
+local name_nop=C(P(true))*C(valid^1)
+local name=name_yes+name_nop
+local utfbom=lpegpatterns.utfbom
+local spacing=C(space^0)
+local anyentitycontent=(1-open-semicolon-space-close-ampersand)^0
+local hexentitycontent=R("AF","af","09")^0
+local decentitycontent=R("09")^0
+local parsedentity=P("#")/""*(
+ P("x")/""*(hexentitycontent/handle_hex_entity)+(decentitycontent/handle_dec_entity)
+ )+(anyentitycontent/handle_any_entity)
+local entity=(ampersand/"")*parsedentity*(semicolon/"")+ampersand*(anyentitycontent/handle_end_entity)
+local text_unparsed=C((1-open)^1)
+local text_parsed=Cs(((1-open-ampersand)^1+entity)^1)
+local somespace=space^1
+local optionalspace=space^0
+local value=(squote*Cs((entity+(1-squote))^0)*squote)+(dquote*Cs((entity+(1-dquote))^0)*dquote)
+local endofattributes=slash*close+close
+local whatever=space*name*optionalspace*equal
+local wrongvalue=Cs(P(entity+(1-space-endofattributes))^1)/attribute_value_error
+local attributevalue=value+wrongvalue
+local attribute=(somespace*name*optionalspace*equal*optionalspace*attributevalue)/add_attribute
+local attributes=(attribute+somespace^-1*(((1-endofattributes)^1)/attribute_specification_error))^0
+local parsedtext=text_parsed/add_text
+local unparsedtext=text_unparsed/add_text
+local balanced=P { "["*((1-S"[]")+V(1))^0*"]" }
+local emptyelement=(spacing*open*name*attributes*optionalspace*slash*close)/add_empty
+local beginelement=(spacing*open*name*attributes*optionalspace*close)/add_begin
+local endelement=(spacing*open*slash*name*optionalspace*close)/add_end
+local begincomment=open*P("!--")
+local endcomment=P("--")*close
+local begininstruction=open*P("?")
+local endinstruction=P("?")*close
+local begincdata=open*P("![CDATA[")
+local endcdata=P("]]")*close
+local someinstruction=C((1-endinstruction)^0)
+local somecomment=C((1-endcomment )^0)
+local somecdata=C((1-endcdata )^0)
+local function normalentity(k,v ) entities[k]=v end
+local function systementity(k,v,n) entities[k]=v end
+local function publicentity(k,v,n) entities[k]=v end
+local begindoctype=open*P("!DOCTYPE")
+local enddoctype=close
+local beginset=P("[")
+local endset=P("]")
+local doctypename=C((1-somespace-close)^0)
+local elementdoctype=optionalspace*P("<!ELEMENT")*(1-close)^0*close
+local basiccomment=begincomment*((1-endcomment)^0)*endcomment
+local normalentitytype=(doctypename*somespace*value)/normalentity
+local publicentitytype=(doctypename*somespace*P("PUBLIC")*somespace*value)/publicentity
+local systementitytype=(doctypename*somespace*P("SYSTEM")*somespace*value*somespace*P("NDATA")*somespace*doctypename)/systementity
+local entitydoctype=optionalspace*P("<!ENTITY")*somespace*(systementitytype+publicentitytype+normalentitytype)*optionalspace*close
+local doctypeset=beginset*optionalspace*P(elementdoctype+entitydoctype+basiccomment+space)^0*optionalspace*endset
+local definitiondoctype=doctypename*somespace*doctypeset
+local publicdoctype=doctypename*somespace*P("PUBLIC")*somespace*value*somespace*value*somespace*doctypeset
+local systemdoctype=doctypename*somespace*P("SYSTEM")*somespace*value*somespace*doctypeset
+local simpledoctype=(1-close)^1
+local somedoctype=C((somespace*(publicdoctype+systemdoctype+definitiondoctype+simpledoctype)*optionalspace)^0)
+local instruction=(spacing*begininstruction*someinstruction*endinstruction)/function(...) add_special("@pi@",...) end
+local comment=(spacing*begincomment*somecomment*endcomment )/function(...) add_special("@cm@",...) end
+local cdata=(spacing*begincdata*somecdata*endcdata )/function(...) add_special("@cd@",...) end
+local doctype=(spacing*begindoctype*somedoctype*enddoctype )/function(...) add_special("@dt@",...) end
+local crap_parsed=1-beginelement-endelement-emptyelement-begininstruction-begincomment-begincdata-ampersand
+local crap_unparsed=1-beginelement-endelement-emptyelement-begininstruction-begincomment-begincdata
+local parsedcrap=Cs((crap_parsed^1+entity)^1)/handle_crap_error
+local unparsedcrap=Cs((crap_unparsed )^1)/handle_crap_error
+local trailer=space^0*(text_unparsed/set_message)^0
+local grammar_parsed_text=P { "preamble",
+ preamble=utfbom^0*instruction^0*(doctype+comment+instruction)^0*V("parent")*trailer,
+ parent=beginelement*V("children")^0*endelement,
+ children=parsedtext+V("parent")+emptyelement+comment+cdata+instruction+parsedcrap,
+}
+local grammar_unparsed_text=P { "preamble",
+ preamble=utfbom^0*instruction^0*(doctype+comment+instruction)^0*V("parent")*trailer,
+ parent=beginelement*V("children")^0*endelement,
+ children=unparsedtext+V("parent")+emptyelement+comment+cdata+instruction+unparsedcrap,
+}
+local function _xmlconvert_(data,settings)
+ settings=settings or {}
+ strip=settings.strip_cm_and_dt
+ utfize=settings.utfize_entities
+ resolve=settings.resolve_entities
+ resolve_predefined=settings.resolve_predefined_entities
+ unify_predefined=settings.unify_predefined_entities
+ cleanup=settings.text_cleanup
+ entities=settings.entities or {}
+ if utfize==nil then
+ settings.utfize_entities=true
+ utfize=true
+ end
+ if resolve_predefined==nil then
+ settings.resolve_predefined_entities=true
+ resolve_predefined=true
+ end
+ stack,top,at,xmlns,errorstr={},{},{},{},nil
+ acache,hcache,dcache={},{},{}
+ reported_attribute_errors={}
+ if settings.parent_root then
+ mt=getmetatable(settings.parent_root)
+ else
+ initialize_mt(top)
+ end
+ stack[#stack+1]=top
+ top.dt={}
+ dt=top.dt
+ if not data or data=="" then
+ errorstr="empty xml file"
+ elseif utfize or resolve then
+ if lpegmatch(grammar_parsed_text,data) then
+ else
+ errorstr="invalid xml file - parsed text"
+ end
+ elseif type(data)=="string" then
+ if lpegmatch(grammar_unparsed_text,data) then
+ errorstr=""
+ else
+ errorstr="invalid xml file - unparsed text"
+ end
+ else
+ errorstr="invalid xml file - no text at all"
+ end
+ local result
+ if errorstr and errorstr~="" then
+ result={ dt={ { ns="",tg="error",dt={ errorstr },at={},er=true } } }
+setmetatable(result,mt)
+setmetatable(result.dt[1],mt)
+ setmetatable(stack,mt)
+ local errorhandler=settings.error_handler
+ if errorhandler==false then
+ else
+ errorhandler=errorhandler or xml.errorhandler
+ if errorhandler then
+ local currentresource=settings.currentresource
+ if currentresource and currentresource~="" then
+ xml.errorhandler(formatters["load error in [%s]: %s"](currentresource,errorstr))
+ else
+ xml.errorhandler(formatters["load error: %s"](errorstr))
+ end
+ end
+ end
+ else
+ result=stack[1]
+ end
+ if not settings.no_root then
+ result={ special=true,ns="",tg='@rt@',dt=result.dt,at={},entities=entities,settings=settings }
+ setmetatable(result,mt)
+ local rdt=result.dt
+ for k=1,#rdt do
+ local v=rdt[k]
+ if type(v)=="table" and not v.special then
+ result.ri=k
+ v.__p__=result
+ break
+ end
+ end
+ end
+ if errorstr and errorstr~="" then
+ result.error=true
+ else
+ errorstr=nil
+ end
+ result.statistics={
+ errormessage=errorstr,
+ entities={
+ decimals=dcache,
+ hexadecimals=hcache,
+ names=acache,
+ }
+ }
+ strip,utfize,resolve,resolve_predefined=nil,nil,nil,nil
+ unify_predefined,cleanup,entities=nil,nil,nil
+ stack,top,at,xmlns,errorstr=nil,nil,nil,nil,nil
+ acache,hcache,dcache=nil,nil,nil
+ reported_attribute_errors,mt,errorhandler=nil,nil,nil
+ return result
+end
+local function xmlconvert(data,settings)
+ local ok,result=pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("",settings)
+ end
+end
+xml.convert=xmlconvert
+function xml.inheritedconvert(data,xmldata)
+ local settings=xmldata.settings
+ if settings then
+ settings.parent_root=xmldata
+ end
+ local xc=xmlconvert(data,settings)
+ return xc
+end
+function xml.is_valid(root)
+ return root and root.dt and root.dt[1] and type(root.dt[1])=="table" and not root.dt[1].er
+end
+function xml.package(tag,attributes,data)
+ local ns,tg=match(tag,"^(.-):?([^:]+)$")
+ local t={ ns=ns,tg=tg,dt=data or "",at=attributes or {} }
+ setmetatable(t,mt)
+ return t
+end
+function xml.is_valid(root)
+ return root and not root.error
+end
+xml.errorhandler=report_xml
+function xml.load(filename,settings)
+ local data=""
+ if type(filename)=="string" then
+ local f=io.open(filename,'r')
+ if f then
+ data=f:read("*all")
+ f:close()
+ end
+ elseif filename then
+ data=filename:read("*all")
+ end
+ if settings then
+ settings.currentresource=filename
+ local result=xmlconvert(data,settings)
+ settings.currentresource=nil
+ return result
+ else
+ return xmlconvert(data,{ currentresource=filename })
+ end
+end
+local no_root={ no_root=true }
+function xml.toxml(data)
+ if type(data)=="string" then
+ local root={ xmlconvert(data,no_root) }
+ return (#root>1 and root) or root[1]
+ else
+ return data
+ end
+end
+local function copy(old,tables)
+ if old then
+ tables=tables or {}
+ local new={}
+ if not tables[old] then
+ tables[old]=new
+ end
+ for k,v in next,old do
+ new[k]=(type(v)=="table" and (tables[v] or copy(v,tables))) or v
+ end
+ local mt=getmetatable(old)
+ if mt then
+ setmetatable(new,mt)
+ end
+ return new
+ else
+ return {}
+ end
+end
+xml.copy=copy
+function xml.checkbom(root)
+ if root.ri then
+ local dt=root.dt
+ for k=1,#dt do
+ local v=dt[k]
+ if type(v)=="table" and v.special and v.tg=="@pi@" and find(v.dt[1],"xml.*version=") then
+ return
+ end
+ end
+ insert(dt,1,{ special=true,ns="",tg="@pi@",dt={ "xml version='1.0' standalone='yes'" } } )
+ insert(dt,2,"\n" )
+ end
+end
+local f_attribute=formatters['%s=%q']
+local function verbose_element(e,handlers,escape)
+ local handle=handlers.handle
+ local serialize=handlers.serialize
+ local ens,etg,eat,edt,ern=e.ns,e.tg,e.at,e.dt,e.rn
+ local ats=eat and next(eat) and {}
+ if ats then
+ local n=0
+ for k,v in next,eat do
+ n=n+1
+ ats[n]=f_attribute(k,escaped(v))
+ end
+ end
+ if ern and trace_entities and ern~=ens then
+ ens=ern
+ end
+ if ens~="" then
+ if edt and #edt>0 then
+ if ats then
+ handle("<",ens,":",etg," ",concat(ats," "),">")
+ else
+ handle("<",ens,":",etg,">")
+ end
+ for i=1,#edt do
+ local e=edt[i]
+ if type(e)=="string" then
+ handle(escaped(e))
+ else
+ serialize(e,handlers)
+ end
+ end
+ handle("</",ens,":",etg,">")
+ else
+ if ats then
+ handle("<",ens,":",etg," ",concat(ats," "),"/>")
+ else
+ handle("<",ens,":",etg,"/>")
+ end
+ end
+ else
+ if edt and #edt>0 then
+ if ats then
+ handle("<",etg," ",concat(ats," "),">")
+ else
+ handle("<",etg,">")
+ end
+ for i=1,#edt do
+ local e=edt[i]
+ if type(e)=="string" then
+ handle(escaped(e))
+ else
+ serialize(e,handlers)
+ end
+ end
+ handle("</",etg,">")
+ else
+ if ats then
+ handle("<",etg," ",concat(ats," "),"/>")
+ else
+ handle("<",etg,"/>")
+ end
+ end
+ end
+end
+local function verbose_pi(e,handlers)
+ handlers.handle("<?",e.dt[1],"?>")
+end
+local function verbose_comment(e,handlers)
+ handlers.handle("<!--",e.dt[1],"-->")
+end
+local function verbose_cdata(e,handlers)
+ handlers.handle("<![CDATA[",e.dt[1],"]]>")
+end
+local function verbose_doctype(e,handlers)
+ handlers.handle("<!DOCTYPE ",e.dt[1],">")
+end
+local function verbose_root(e,handlers)
+ handlers.serialize(e.dt,handlers)
+end
+local function verbose_text(e,handlers)
+ handlers.handle(escaped(e))
+end
+local function verbose_document(e,handlers)
+ local serialize=handlers.serialize
+ local functions=handlers.functions
+ for i=1,#e do
+ local ei=e[i]
+ if type(ei)=="string" then
+ functions["@tx@"](ei,handlers)
+ else
+ serialize(ei,handlers)
+ end
+ end
+end
+local function serialize(e,handlers,...)
+ if e then
+ local initialize=handlers.initialize
+ local finalize=handlers.finalize
+ local functions=handlers.functions
+ if initialize then
+ local state=initialize(...)
+ if not state==true then
+ return state
+ end
+ end
+ local etg=e.tg
+ if etg then
+ (functions[etg] or functions["@el@"])(e,handlers)
+ else
+ functions["@dc@"](e,handlers)
+ end
+ if finalize then
+ return finalize()
+ end
+ end
+end
+local function xserialize(e,handlers)
+ local functions=handlers.functions
+ local etg=e.tg
+ if etg then
+ (functions[etg] or functions["@el@"])(e,handlers)
+ else
+ functions["@dc@"](e,handlers)
+ end
+end
+local handlers={}
+local function newhandlers(settings)
+ local t=table.copy(handlers[settings and settings.parent or "verbose"] or {})
+ if settings then
+ for k,v in next,settings do
+ if type(v)=="table" then
+ local tk=t[k] if not tk then tk={} t[k]=tk end
+ for kk,vv in next,v do
+ tk[kk]=vv
+ end
+ else
+ t[k]=v
+ end
+ end
+ if settings.name then
+ handlers[settings.name]=t
+ end
+ end
+ utilities.storage.mark(t)
+ return t
+end
+local nofunction=function() end
+function xml.sethandlersfunction(handler,name,fnc)
+ handler.functions[name]=fnc or nofunction
+end
+function xml.gethandlersfunction(handler,name)
+ return handler.functions[name]
+end
+function xml.gethandlers(name)
+ return handlers[name]
+end
+newhandlers {
+ name="verbose",
+ initialize=false,
+ finalize=false,
+ serialize=xserialize,
+ handle=print,
+ functions={
+ ["@dc@"]=verbose_document,
+ ["@dt@"]=verbose_doctype,
+ ["@rt@"]=verbose_root,
+ ["@el@"]=verbose_element,
+ ["@pi@"]=verbose_pi,
+ ["@cm@"]=verbose_comment,
+ ["@cd@"]=verbose_cdata,
+ ["@tx@"]=verbose_text,
+ }
+}
+local result
+local xmlfilehandler=newhandlers {
+ name="file",
+ initialize=function(name)
+ result=io.open(name,"wb")
+ return result
+ end,
+ finalize=function()
+ result:close()
+ return true
+ end,
+ handle=function(...)
+ result:write(...)
+ end,
+}
+function xml.save(root,name)
+ serialize(root,xmlfilehandler,name)
+end
+local result
+local xmlstringhandler=newhandlers {
+ name="string",
+ initialize=function()
+ result={}
+ return result
+ end,
+ finalize=function()
+ return concat(result)
+ end,
+ handle=function(...)
+ result[#result+1]=concat {... }
+ end,
+}
+local function xmltostring(root)
+ if not root then
+ return ""
+ elseif type(root)=="string" then
+ return root
+ else
+ return serialize(root,xmlstringhandler) or ""
+ end
+end
+local function __tostring(root)
+ return (root and xmltostring(root)) or ""
+end
+initialize_mt=function(root)
+ mt={ __tostring=__tostring,__index=root }
+end
+xml.defaulthandlers=handlers
+xml.newhandlers=newhandlers
+xml.serialize=serialize
+xml.tostring=xmltostring
+local function xmlstring(e,handle)
+ if not handle or (e.special and e.tg~="@rt@") then
+ elseif e.tg then
+ local edt=e.dt
+ if edt then
+ for i=1,#edt do
+ xmlstring(edt[i],handle)
+ end
+ end
+ else
+ handle(e)
+ end
+end
+xml.string=xmlstring
+function xml.settings(e)
+ while e do
+ local s=e.settings
+ if s then
+ return s
+ else
+ e=e.__p__
+ end
+ end
+ return nil
+end
+function xml.root(e)
+ local r=e
+ while e do
+ e=e.__p__
+ if e then
+ r=e
+ end
+ end
+ return r
+end
+function xml.parent(root)
+ return root.__p__
+end
+function xml.body(root)
+ return root.ri and root.dt[root.ri] or root
+end
+function xml.name(root)
+ if not root then
+ return ""
+ end
+ local ns=root.ns
+ local tg=root.tg
+ if ns=="" then
+ return tg
+ else
+ return ns..":"..tg
+ end
+end
+function xml.erase(dt,k)
+ if dt then
+ if k then
+ dt[k]=""
+ else for k=1,#dt do
+ dt[1]={ "" }
+ end end
+ end
+end
+function xml.assign(dt,k,root)
+ if dt and k then
+ dt[k]=type(root)=="table" and xml.body(root) or root
+ return dt[k]
+ else
+ return xml.body(root)
+ end
+end
+function xml.tocdata(e,wrapper)
+ local whatever=type(e)=="table" and xmltostring(e.dt) or e or ""
+ if wrapper then
+ whatever=formatters["<%s>%s</%s>"](wrapper,whatever,wrapper)
+ end
+ local t={ special=true,ns="",tg="@cd@",at={},rn="",dt={ whatever },__p__=e }
+ setmetatable(t,getmetatable(e))
+ e.dt={ t }
+end
+function xml.makestandalone(root)
+ if root.ri then
+ local dt=root.dt
+ for k=1,#dt do
+ local v=dt[k]
+ if type(v)=="table" and v.special and v.tg=="@pi@" then
+ local txt=v.dt[1]
+ if find(txt,"xml.*version=") then
+ v.dt[1]=txt.." standalone='yes'"
+ break
+ end
+ end
+ end
+ end
+ return root
+end
+function xml.kind(e)
+ local dt=e and e.dt
+ if dt then
+ local n=#dt
+ if n==1 then
+ local d=dt[1]
+ if d.special then
+ local tg=d.tg
+ if tg=="@cd@" then
+ return "cdata"
+ elseif tg=="@cm" then
+ return "comment"
+ elseif tg=="@pi@" then
+ return "instruction"
+ elseif tg=="@dt@" then
+ return "declaration"
+ end
+ elseif type(d)=="string" then
+ return "text"
+ end
+ return "element"
+ elseif n>0 then
+ return "mixed"
+ end
+ end
+ return "empty"
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["lxml-lpt"] = package.loaded["lxml-lpt"] or true
+
+-- original size: 48229, stripped down to: 30684
+
+if not modules then modules={} end modules ['lxml-lpt']={
+ version=1.001,
+ comment="this module is the basis for the lxml-* ones",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local concat,remove,insert=table.concat,table.remove,table.insert
+local type,next,tonumber,tostring,setmetatable,load,select=type,next,tonumber,tostring,setmetatable,load,select
+local format,upper,lower,gmatch,gsub,find,rep=string.format,string.upper,string.lower,string.gmatch,string.gsub,string.find,string.rep
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
+local setmetatableindex=table.setmetatableindex
+local formatters=string.formatters
+local trace_lpath=false if trackers then trackers.register("xml.path",function(v) trace_lpath=v end) end
+local trace_lparse=false if trackers then trackers.register("xml.parse",function(v) trace_lparse=v end) end
+local trace_lprofile=false if trackers then trackers.register("xml.profile",function(v) trace_lpath=v trace_lparse=v trace_lprofile=v end) end
+local report_lpath=logs.reporter("xml","lpath")
+local xml=xml
+local lpathcalls=0 function xml.lpathcalls () return lpathcalls end
+local lpathcached=0 function xml.lpathcached() return lpathcached end
+xml.functions=xml.functions or {}
+local functions=xml.functions
+xml.expressions=xml.expressions or {}
+local expressions=xml.expressions
+xml.finalizers=xml.finalizers or {}
+local finalizers=xml.finalizers
+xml.specialhandler=xml.specialhandler or {}
+local specialhandler=xml.specialhandler
+lpegpatterns.xml=lpegpatterns.xml or {}
+local xmlpatterns=lpegpatterns.xml
+finalizers.xml=finalizers.xml or {}
+finalizers.tex=finalizers.tex or {}
+local function fallback (t,name)
+ local fn=finalizers[name]
+ if fn then
+ t[name]=fn
+ else
+ report_lpath("unknown sub finalizer %a",name)
+ fn=function() end
+ end
+ return fn
+end
+setmetatableindex(finalizers.xml,fallback)
+setmetatableindex(finalizers.tex,fallback)
+xml.defaultprotocol="xml"
+local apply_axis={}
+apply_axis['root']=function(list)
+ local collected={}
+ for l=1,#list do
+ local ll=list[l]
+ local rt=ll
+ while ll do
+ ll=ll.__p__
+ if ll then
+ rt=ll
+ end
+ end
+ collected[l]=rt
+ end
+ return collected
+end
+apply_axis['self']=function(list)
+ return list
+end
+apply_axis['child']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ local dt=ll.dt
+ if dt then
+ local en=0
+ for k=1,#dt do
+ local dk=dt[k]
+ if dk.tg then
+ c=c+1
+ collected[c]=dk
+ dk.ni=k
+ en=en+1
+ dk.ei=en
+ end
+ end
+ ll.en=en
+ end
+ end
+ return collected
+end
+local function collect(list,collected,c)
+ local dt=list.dt
+ if dt then
+ local en=0
+ for k=1,#dt do
+ local dk=dt[k]
+ if dk.tg then
+ c=c+1
+ collected[c]=dk
+ dk.ni=k
+ en=en+1
+ dk.ei=en
+ c=collect(dk,collected,c)
+ end
+ end
+ list.en=en
+ end
+ return c
+end
+apply_axis['descendant']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ c=collect(list[l],collected,c)
+ end
+ return collected
+end
+local function collect(list,collected,c)
+ local dt=list.dt
+ if dt then
+ local en=0
+ for k=1,#dt do
+ local dk=dt[k]
+ if dk.tg then
+ c=c+1
+ collected[c]=dk
+ dk.ni=k
+ en=en+1
+ dk.ei=en
+ c=collect(dk,collected,c)
+ end
+ end
+ list.en=en
+ end
+ return c
+end
+apply_axis['descendant-or-self']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ if ll.special~=true then
+ c=c+1
+ collected[c]=ll
+ end
+ c=collect(ll,collected,c)
+ end
+ return collected
+end
+apply_axis['ancestor']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ while ll do
+ ll=ll.__p__
+ if ll then
+ c=c+1
+ collected[c]=ll
+ end
+ end
+ end
+ return collected
+end
+apply_axis['ancestor-or-self']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ c=c+1
+ collected[c]=ll
+ while ll do
+ ll=ll.__p__
+ if ll then
+ c=c+1
+ collected[c]=ll
+ end
+ end
+ end
+ return collected
+end
+apply_axis['parent']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local pl=list[l].__p__
+ if pl then
+ c=c+1
+ collected[c]=pl
+ end
+ end
+ return collected
+end
+apply_axis['attribute']=function(list)
+ return {}
+end
+apply_axis['namespace']=function(list)
+ return {}
+end
+apply_axis['following']=function(list)
+ return {}
+end
+apply_axis['preceding']=function(list)
+ return {}
+end
+apply_axis['following-sibling']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ local p=ll.__p__
+ local d=p.dt
+ for i=ll.ni+1,#d do
+ local di=d[i]
+ if type(di)=="table" then
+ c=c+1
+ collected[c]=di
+ end
+ end
+ end
+ return collected
+end
+apply_axis['preceding-sibling']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ local p=ll.__p__
+ local d=p.dt
+ for i=1,ll.ni-1 do
+ local di=d[i]
+ if type(di)=="table" then
+ c=c+1
+ collected[c]=di
+ end
+ end
+ end
+ return collected
+end
+apply_axis['reverse-sibling']=function(list)
+ local collected,c={},0
+ for l=1,#list do
+ local ll=list[l]
+ local p=ll.__p__
+ local d=p.dt
+ for i=ll.ni-1,1,-1 do
+ local di=d[i]
+ if type(di)=="table" then
+ c=c+1
+ collected[c]=di
+ end
+ end
+ end
+ return collected
+end
+apply_axis['auto-descendant-or-self']=apply_axis['descendant-or-self']
+apply_axis['auto-descendant']=apply_axis['descendant']
+apply_axis['auto-child']=apply_axis['child']
+apply_axis['auto-self']=apply_axis['self']
+apply_axis['initial-child']=apply_axis['child']
+local function apply_nodes(list,directive,nodes)
+ local maxn=#nodes
+ if maxn==3 then
+ local nns,ntg=nodes[2],nodes[3]
+ if not nns and not ntg then
+ if directive then
+ return list
+ else
+ return {}
+ end
+ else
+ local collected,c,m,p={},0,0,nil
+ if not nns then
+ for l=1,#list do
+ local ll=list[l]
+ local ltg=ll.tg
+ if ltg then
+ if directive then
+ if ntg==ltg then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ elseif ntg~=ltg then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ end
+ end
+ elseif not ntg then
+ for l=1,#list do
+ local ll=list[l]
+ local lns=ll.rn or ll.ns
+ if lns then
+ if directive then
+ if lns==nns then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ elseif lns~=nns then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ end
+ end
+ else
+ for l=1,#list do
+ local ll=list[l]
+ local ltg=ll.tg
+ if ltg then
+ local lns=ll.rn or ll.ns
+ local ok=ltg==ntg and lns==nns
+ if directive then
+ if ok then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ elseif not ok then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ end
+ end
+ end
+ return collected
+ end
+ else
+ local collected,c,m,p={},0,0,nil
+ for l=1,#list do
+ local ll=list[l]
+ local ltg=ll.tg
+ if ltg then
+ local lns=ll.rn or ll.ns
+ local ok=false
+ for n=1,maxn,3 do
+ local nns,ntg=nodes[n+1],nodes[n+2]
+ ok=(not ntg or ltg==ntg) and (not nns or lns==nns)
+ if ok then
+ break
+ end
+ end
+ if directive then
+ if ok then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ elseif not ok then
+ local llp=ll.__p__;if llp~=p then p,m=llp,1 else m=m+1 end
+ c=c+1
+ collected[c],ll.mi=ll,m
+ end
+ end
+ end
+ return collected
+ end
+end
+local quit_expression=false
+local function apply_expression(list,expression,order)
+ local collected,c={},0
+ quit_expression=false
+ for l=1,#list do
+ local ll=list[l]
+ if expression(list,ll,l,order) then
+ c=c+1
+ collected[c]=ll
+ end
+ if quit_expression then
+ break
+ end
+ end
+ return collected
+end
+local P,V,C,Cs,Cc,Ct,R,S,Cg,Cb=lpeg.P,lpeg.V,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Ct,lpeg.R,lpeg.S,lpeg.Cg,lpeg.Cb
+local spaces=S(" \n\r\t\f")^0
+local lp_space=S(" \n\r\t\f")
+local lp_any=P(1)
+local lp_noequal=P("!=")/"~="+P("<=")+P(">=")+P("==")
+local lp_doequal=P("=")/"=="
+local lp_or=P("|")/" or "
+local lp_and=P("&")/" and "
+local lp_builtin=P (
+ P("text")/"(ll.dt[1] or '')"+
+ P("content")/"ll.dt"+
+ P("name")/"((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)"+P("tag")/"ll.tg"+P("position")/"l"+
+ P("firstindex")/"1"+P("lastindex")/"(#ll.__p__.dt or 1)"+P("firstelement")/"1"+P("lastelement")/"(ll.__p__.en or 1)"+P("first")/"1"+P("last")/"#list"+P("rootposition")/"order"+P("order")/"order"+P("element")/"(ll.ei or 1)"+P("index")/"(ll.ni or 1)"+P("match")/"(ll.mi or 1)"+
+ P("ns")/"ll.ns"
+ )*((spaces*P("(")*spaces*P(")"))/"")
+local lp_attribute=(P("@")+P("attribute::"))/""*Cc("(ll.at and ll.at['")*((R("az","AZ")+S("-_:"))^1)*Cc("'])")
+local lp_fastpos_p=P("+")^0*R("09")^1*P(-1)/"l==%0"
+local lp_fastpos_n=P("-")*R("09")^1*P(-1)/"(%0<0 and (#list+%0==l))"
+local lp_fastpos=lp_fastpos_n+lp_fastpos_p
+local lp_reserved=C("and")+C("or")+C("not")+C("div")+C("mod")+C("true")+C("false")
+local lp_lua_function=Cs((R("az","AZ","__")^1*(P(".")*R("az","AZ","__")^1)^1)*("("))/"%0"
+local lp_function=C(R("az","AZ","__")^1)*P("(")/function(t)
+ if expressions[t] then
+ return "expr."..t.."("
+ else
+ return "expr.error("
+ end
+end
+local lparent=P("(")
+local rparent=P(")")
+local noparent=1-(lparent+rparent)
+local nested=P{lparent*(noparent+V(1))^0*rparent}
+local value=P(lparent*C((noparent+nested)^0)*rparent)
+local lp_child=Cc("expr.child(ll,'")*R("az","AZ","--","__")^1*Cc("')")
+local lp_number=S("+-")*R("09")^1
+local lp_string=Cc("'")*R("az","AZ","--","__")^1*Cc("'")
+local lp_content=(P("'")*(1-P("'"))^0*P("'")+P('"')*(1-P('"'))^0*P('"'))
+local cleaner
+local lp_special=(C(P("name")+P("text")+P("tag")+P("count")+P("child")))*value/function(t,s)
+ if expressions[t] then
+ s=s and s~="" and lpegmatch(cleaner,s)
+ if s and s~="" then
+ return "expr."..t.."(ll,"..s..")"
+ else
+ return "expr."..t.."(ll)"
+ end
+ else
+ return "expr.error("..t..")"
+ end
+end
+local content=lp_builtin+lp_attribute+lp_special+lp_noequal+lp_doequal+lp_or+lp_and+lp_reserved+lp_lua_function+lp_function+lp_content+
+ lp_child+lp_any
+local converter=Cs (
+ lp_fastpos+(P { lparent*(V(1))^0*rparent+content } )^0
+)
+cleaner=Cs ((
+ lp_reserved+lp_number+lp_string+1 )^1 )
+local template_e=[[
+ local expr = xml.expressions
+ return function(list,ll,l,order)
+ return %s
+ end
+]]
+local template_f_y=[[
+ local finalizer = xml.finalizers['%s']['%s']
+ return function(collection)
+ return finalizer(collection,%s)
+ end
+]]
+local template_f_n=[[
+ return xml.finalizers['%s']['%s']
+]]
+local register_self={ kind="axis",axis="self" }
+local register_parent={ kind="axis",axis="parent" }
+local register_descendant={ kind="axis",axis="descendant" }
+local register_child={ kind="axis",axis="child" }
+local register_descendant_or_self={ kind="axis",axis="descendant-or-self" }
+local register_root={ kind="axis",axis="root" }
+local register_ancestor={ kind="axis",axis="ancestor" }
+local register_ancestor_or_self={ kind="axis",axis="ancestor-or-self" }
+local register_attribute={ kind="axis",axis="attribute" }
+local register_namespace={ kind="axis",axis="namespace" }
+local register_following={ kind="axis",axis="following" }
+local register_following_sibling={ kind="axis",axis="following-sibling" }
+local register_preceding={ kind="axis",axis="preceding" }
+local register_preceding_sibling={ kind="axis",axis="preceding-sibling" }
+local register_reverse_sibling={ kind="axis",axis="reverse-sibling" }
+local register_auto_descendant_or_self={ kind="axis",axis="auto-descendant-or-self" }
+local register_auto_descendant={ kind="axis",axis="auto-descendant" }
+local register_auto_self={ kind="axis",axis="auto-self" }
+local register_auto_child={ kind="axis",axis="auto-child" }
+local register_initial_child={ kind="axis",axis="initial-child" }
+local register_all_nodes={ kind="nodes",nodetest=true,nodes={ true,false,false } }
+local skip={}
+local function errorrunner_e(str,cnv)
+ if not skip[str] then
+ report_lpath("error in expression: %s => %s",str,cnv)
+ skip[str]=cnv or str
+ end
+ return false
+end
+local function errorrunner_f(str,arg)
+ report_lpath("error in finalizer: %s(%s)",str,arg or "")
+ return false
+end
+local function register_nodes(nodetest,nodes)
+ return { kind="nodes",nodetest=nodetest,nodes=nodes }
+end
+local function register_expression(expression)
+ local converted=lpegmatch(converter,expression)
+ local runner=load(format(template_e,converted))
+ runner=(runner and runner()) or function() errorrunner_e(expression,converted) end
+ return { kind="expression",expression=expression,converted=converted,evaluator=runner }
+end
+local function register_finalizer(protocol,name,arguments)
+ local runner
+ if arguments and arguments~="" then
+ runner=load(format(template_f_y,protocol or xml.defaultprotocol,name,arguments))
+ else
+ runner=load(format(template_f_n,protocol or xml.defaultprotocol,name))
+ end
+ runner=(runner and runner()) or function() errorrunner_f(name,arguments) end
+ return { kind="finalizer",name=name,arguments=arguments,finalizer=runner }
+end
+local expression=P { "ex",
+ ex="["*C((V("sq")+V("dq")+(1-S("[]"))+V("ex"))^0)*"]",
+ sq="'"*(1-S("'"))^0*"'",
+ dq='"'*(1-S('"'))^0*'"',
+}
+local arguments=P { "ar",
+ ar="("*Cs((V("sq")+V("dq")+V("nq")+P(1-P(")")))^0)*")",
+ nq=((1-S("),'\""))^1)/function(s) return format("%q",s) end,
+ sq=P("'")*(1-P("'"))^0*P("'"),
+ dq=P('"')*(1-P('"'))^0*P('"'),
+}
+local function register_error(str)
+ return { kind="error",error=format("unparsed: %s",str) }
+end
+local special_1=P("*")*Cc(register_auto_descendant)*Cc(register_all_nodes)
+local special_2=P("/")*Cc(register_auto_self)
+local special_3=P("")*Cc(register_auto_self)
+local no_nextcolon=P(-1)+#(1-P(":"))
+local no_nextlparent=P(-1)+#(1-P("("))
+local pathparser=Ct { "patterns",
+ patterns=spaces*V("protocol")*spaces*(
+ (V("special")*spaces*P(-1) )+(V("initial")*spaces*V("step")*spaces*(P("/")*spaces*V("step")*spaces)^0 )
+ ),
+ protocol=Cg(V("letters"),"protocol")*P("://")+Cg(Cc(nil),"protocol"),
+ step=((V("shortcuts")+P("/")+V("axis"))*spaces*V("nodes")^0+V("error"))*spaces*V("expressions")^0*spaces*V("finalizer")^0,
+ axis=V("descendant")+V("child")+V("parent")+V("self")+V("root")+V("ancestor")+V("descendant_or_self")+V("following_sibling")+V("following")+V("reverse_sibling")+V("preceding_sibling")+V("preceding")+V("ancestor_or_self")+#(1-P(-1))*Cc(register_auto_child),
+ special=special_1+special_2+special_3,
+ initial=(P("/")*spaces*Cc(register_initial_child))^-1,
+ error=(P(1)^1)/register_error,
+ shortcuts_a=V("s_descendant_or_self")+V("s_descendant")+V("s_child")+V("s_parent")+V("s_self")+V("s_root")+V("s_ancestor"),
+ shortcuts=V("shortcuts_a")*(spaces*"/"*spaces*V("shortcuts_a"))^0,
+ s_descendant_or_self=(P("***/")+P("/"))*Cc(register_descendant_or_self),
+ s_descendant=P("**")*Cc(register_descendant),
+ s_child=P("*")*no_nextcolon*Cc(register_child ),
+ s_parent=P("..")*Cc(register_parent ),
+ s_self=P("." )*Cc(register_self ),
+ s_root=P("^^")*Cc(register_root ),
+ s_ancestor=P("^")*Cc(register_ancestor ),
+ descendant=P("descendant::")*Cc(register_descendant ),
+ child=P("child::")*Cc(register_child ),
+ parent=P("parent::")*Cc(register_parent ),
+ self=P("self::")*Cc(register_self ),
+ root=P('root::')*Cc(register_root ),
+ ancestor=P('ancestor::')*Cc(register_ancestor ),
+ descendant_or_self=P('descendant-or-self::')*Cc(register_descendant_or_self ),
+ ancestor_or_self=P('ancestor-or-self::')*Cc(register_ancestor_or_self ),
+ following=P('following::')*Cc(register_following ),
+ following_sibling=P('following-sibling::')*Cc(register_following_sibling ),
+ preceding=P('preceding::')*Cc(register_preceding ),
+ preceding_sibling=P('preceding-sibling::')*Cc(register_preceding_sibling ),
+ reverse_sibling=P('reverse-sibling::')*Cc(register_reverse_sibling ),
+ nodes=(V("nodefunction")*spaces*P("(")*V("nodeset")*P(")")+V("nodetest")*V("nodeset"))/register_nodes,
+ expressions=expression/register_expression,
+ letters=R("az")^1,
+ name=(1-S("/[]()|:*!"))^1,
+ negate=P("!")*Cc(false),
+ nodefunction=V("negate")+P("not")*Cc(false)+Cc(true),
+ nodetest=V("negate")+Cc(true),
+ nodename=(V("negate")+Cc(true))*spaces*((V("wildnodename")*P(":")*V("wildnodename"))+(Cc(false)*V("wildnodename"))),
+ wildnodename=(C(V("name"))+P("*")*Cc(false))*no_nextlparent,
+ nodeset=spaces*Ct(V("nodename")*(spaces*P("|")*spaces*V("nodename"))^0)*spaces,
+ finalizer=(Cb("protocol")*P("/")^-1*C(V("name"))*arguments*P(-1))/register_finalizer,
+}
+xmlpatterns.pathparser=pathparser
+local cache={}
+local function nodesettostring(set,nodetest)
+ local t={}
+ for i=1,#set,3 do
+ local directive,ns,tg=set[i],set[i+1],set[i+2]
+ if not ns or ns=="" then ns="*" end
+ if not tg or tg=="" then tg="*" end
+ tg=(tg=="@rt@" and "[root]") or format("%s:%s",ns,tg)
+ t[#t+1]=(directive and tg) or format("not(%s)",tg)
+ end
+ if nodetest==false then
+ return format("not(%s)",concat(t,"|"))
+ else
+ return concat(t,"|")
+ end
+end
+local function tagstostring(list)
+ if #list==0 then
+ return "no elements"
+ else
+ local t={}
+ for i=1,#list do
+ local li=list[i]
+ local ns,tg=li.ns,li.tg
+ if not ns or ns=="" then ns="*" end
+ if not tg or tg=="" then tg="*" end
+ t[i]=(tg=="@rt@" and "[root]") or format("%s:%s",ns,tg)
+ end
+ return concat(t," ")
+ end
+end
+xml.nodesettostring=nodesettostring
+local lpath
+local lshowoptions={ functions=false }
+local function lshow(parsed)
+ if type(parsed)=="string" then
+ parsed=lpath(parsed)
+ end
+ report_lpath("%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,
+ table.serialize(parsed,false,lshowoptions))
+end
+xml.lshow=lshow
+local function add_comment(p,str)
+ local pc=p.comment
+ if not pc then
+ p.comment={ str }
+ else
+ pc[#pc+1]=str
+ end
+end
+lpath=function (pattern)
+ lpathcalls=lpathcalls+1
+ if type(pattern)=="table" then
+ return pattern
+ else
+ local parsed=cache[pattern]
+ if parsed then
+ lpathcached=lpathcached+1
+ else
+ parsed=lpegmatch(pathparser,pattern)
+ if parsed then
+ parsed.pattern=pattern
+ local np=#parsed
+ if np==0 then
+ parsed={ pattern=pattern,register_self,state="parsing error" }
+ report_lpath("parsing error in pattern: %s",pattern)
+ lshow(parsed)
+ else
+ local pi=parsed[1]
+ if pi.axis=="auto-child" then
+ if false then
+ add_comment(parsed,"auto-child replaced by auto-descendant-or-self")
+ parsed[1]=register_auto_descendant_or_self
+ else
+ add_comment(parsed,"auto-child replaced by auto-descendant")
+ parsed[1]=register_auto_descendant
+ end
+ elseif pi.axis=="initial-child" and np>1 and parsed[2].axis then
+ add_comment(parsed,"initial-child removed")
+ remove(parsed,1)
+ end
+ local np=#parsed
+ if np>1 then
+ local pnp=parsed[np]
+ if pnp.kind=="nodes" and pnp.nodetest==true then
+ local nodes=pnp.nodes
+ if nodes[1]==true and nodes[2]==false and nodes[3]==false then
+ add_comment(parsed,"redundant final wildcard filter removed")
+ remove(parsed,np)
+ end
+ end
+ end
+ end
+ else
+ parsed={ pattern=pattern }
+ end
+ cache[pattern]=parsed
+ if trace_lparse and not trace_lprofile then
+ lshow(parsed)
+ end
+ end
+ return parsed
+ end
+end
+xml.lpath=lpath
+local profiled={} xml.profiled=profiled
+local function profiled_apply(list,parsed,nofparsed,order)
+ local p=profiled[parsed.pattern]
+ if p then
+ p.tested=p.tested+1
+ else
+ p={ tested=1,matched=0,finalized=0 }
+ profiled[parsed.pattern]=p
+ end
+ local collected=list
+ for i=1,nofparsed do
+ local pi=parsed[i]
+ local kind=pi.kind
+ if kind=="axis" then
+ collected=apply_axis[pi.axis](collected)
+ elseif kind=="nodes" then
+ collected=apply_nodes(collected,pi.nodetest,pi.nodes)
+ elseif kind=="expression" then
+ collected=apply_expression(collected,pi.evaluator,order)
+ elseif kind=="finalizer" then
+ collected=pi.finalizer(collected)
+ p.matched=p.matched+1
+ p.finalized=p.finalized+1
+ return collected
+ end
+ if not collected or #collected==0 then
+ local pn=i<nofparsed and parsed[nofparsed]
+ if pn and pn.kind=="finalizer" then
+ collected=pn.finalizer(collected)
+ p.finalized=p.finalized+1
+ return collected
+ end
+ return nil
+ end
+ end
+ if collected then
+ p.matched=p.matched+1
+ end
+ return collected
+end
+local function traced_apply(list,parsed,nofparsed,order)
+ if trace_lparse then
+ lshow(parsed)
+ end
+ report_lpath("collecting: %s",parsed.pattern)
+ report_lpath("root tags : %s",tagstostring(list))
+ report_lpath("order : %s",order or "unset")
+ local collected=list
+ for i=1,nofparsed do
+ local pi=parsed[i]
+ local kind=pi.kind
+ if kind=="axis" then
+ collected=apply_axis[pi.axis](collected)
+ report_lpath("% 10i : ax : %s",(collected and #collected) or 0,pi.axis)
+ elseif kind=="nodes" then
+ collected=apply_nodes(collected,pi.nodetest,pi.nodes)
+ report_lpath("% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest))
+ elseif kind=="expression" then
+ collected=apply_expression(collected,pi.evaluator,order)
+ report_lpath("% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted)
+ elseif kind=="finalizer" then
+ collected=pi.finalizer(collected)
+ report_lpath("% 10i : fi : %s : %s(%s)",(type(collected)=="table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
+ return collected
+ end
+ if not collected or #collected==0 then
+ local pn=i<nofparsed and parsed[nofparsed]
+ if pn and pn.kind=="finalizer" then
+ collected=pn.finalizer(collected)
+ report_lpath("% 10i : fi : %s : %s(%s)",(type(collected)=="table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "")
+ return collected
+ end
+ return nil
+ end
+ end
+ return collected
+end
+local function normal_apply(list,parsed,nofparsed,order)
+ local collected=list
+ for i=1,nofparsed do
+ local pi=parsed[i]
+ local kind=pi.kind
+ if kind=="axis" then
+ local axis=pi.axis
+ if axis~="self" then
+ collected=apply_axis[axis](collected)
+ end
+ elseif kind=="nodes" then
+ collected=apply_nodes(collected,pi.nodetest,pi.nodes)
+ elseif kind=="expression" then
+ collected=apply_expression(collected,pi.evaluator,order)
+ elseif kind=="finalizer" then
+ return pi.finalizer(collected)
+ end
+ if not collected or #collected==0 then
+ local pf=i<nofparsed and parsed[nofparsed].finalizer
+ if pf then
+ return pf(collected)
+ end
+ return nil
+ end
+ end
+ return collected
+end
+local function applylpath(list,pattern)
+ if not list then
+ return
+ end
+ local parsed=cache[pattern]
+ if parsed then
+ lpathcalls=lpathcalls+1
+ lpathcached=lpathcached+1
+ elseif type(pattern)=="table" then
+ lpathcalls=lpathcalls+1
+ parsed=pattern
+ else
+ parsed=lpath(pattern) or pattern
+ end
+ if not parsed then
+ return
+ end
+ local nofparsed=#parsed
+ if nofparsed==0 then
+ return
+ end
+ if not trace_lpath then
+ return normal_apply ({ list },parsed,nofparsed,list.mi)
+ elseif trace_lprofile then
+ return profiled_apply({ list },parsed,nofparsed,list.mi)
+ else
+ return traced_apply ({ list },parsed,nofparsed,list.mi)
+ end
+end
+xml.applylpath=applylpath
+function xml.filter(root,pattern)
+ return applylpath(root,pattern)
+end
+expressions.child=function(e,pattern)
+ return applylpath(e,pattern)
+end
+expressions.count=function(e,pattern)
+ local collected=applylpath(e,pattern)
+ return pattern and (collected and #collected) or 0
+end
+expressions.oneof=function(s,...)
+ for i=1,select("#",...) do
+ if s==select(i,...) then
+ return true
+ end
+ end
+ return false
+end
+expressions.error=function(str)
+ xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
+ return false
+end
+expressions.undefined=function(s)
+ return s==nil
+end
+expressions.quit=function(s)
+ if s or s==nil then
+ quit_expression=true
+ end
+ return true
+end
+expressions.print=function(...)
+ print(...)
+ return true
+end
+expressions.find=find
+expressions.upper=upper
+expressions.lower=lower
+expressions.number=tonumber
+expressions.boolean=toboolean
+function expressions.contains(str,pattern)
+ local t=type(str)
+ if t=="string" then
+ if find(str,pattern) then
+ return true
+ end
+ elseif t=="table" then
+ for i=1,#str do
+ local d=str[i]
+ if type(d)=="string" and find(d,pattern) then
+ return true
+ end
+ end
+ end
+ return false
+end
+function xml.expressions.idstring(str)
+ return type(str)=="string" and gsub(str,"^#","") or ""
+end
+local function traverse(root,pattern,handle)
+ local collected=applylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ local r=e.__p__
+ handle(r,r.dt,e.ni)
+ end
+ end
+end
+local function selection(root,pattern,handle)
+ local collected=applylpath(root,pattern)
+ if collected then
+ if handle then
+ for c=1,#collected do
+ handle(collected[c])
+ end
+ else
+ return collected
+ end
+ end
+end
+xml.traverse=traverse
+xml.selection=selection
+local function dofunction(collected,fnc,...)
+ if collected then
+ local f=functions[fnc]
+ if f then
+ for c=1,#collected do
+ f(collected[c],...)
+ end
+ else
+ report_lpath("unknown function %a",fnc)
+ end
+ end
+end
+finalizers.xml["function"]=dofunction
+finalizers.tex["function"]=dofunction
+expressions.text=function(e,n)
+ local rdt=e.__p__.dt
+ return rdt and rdt[n] or ""
+end
+expressions.name=function(e,n)
+ local found=false
+ n=tonumber(n) or 0
+ if n==0 then
+ found=type(e)=="table" and e
+ elseif n<0 then
+ local d,k=e.__p__.dt,e.ni
+ for i=k-1,1,-1 do
+ local di=d[i]
+ if type(di)=="table" then
+ if n==-1 then
+ found=di
+ break
+ else
+ n=n+1
+ end
+ end
+ end
+ else
+ local d,k=e.__p__.dt,e.ni
+ for i=k+1,#d,1 do
+ local di=d[i]
+ if type(di)=="table" then
+ if n==1 then
+ found=di
+ break
+ else
+ n=n-1
+ end
+ end
+ end
+ end
+ if found then
+ local ns,tg=found.rn or found.ns or "",found.tg
+ if ns~="" then
+ return ns..":"..tg
+ else
+ return tg
+ end
+ else
+ return ""
+ end
+end
+expressions.tag=function(e,n)
+ if not e then
+ return ""
+ else
+ local found=false
+ n=tonumber(n) or 0
+ if n==0 then
+ found=(type(e)=="table") and e
+ elseif n<0 then
+ local d,k=e.__p__.dt,e.ni
+ for i=k-1,1,-1 do
+ local di=d[i]
+ if type(di)=="table" then
+ if n==-1 then
+ found=di
+ break
+ else
+ n=n+1
+ end
+ end
+ end
+ else
+ local d,k=e.__p__.dt,e.ni
+ for i=k+1,#d,1 do
+ local di=d[i]
+ if type(di)=="table" then
+ if n==1 then
+ found=di
+ break
+ else
+ n=n-1
+ end
+ end
+ end
+ end
+ return (found and found.tg) or ""
+ end
+end
+local dummy=function() end
+function xml.elements(root,pattern,reverse)
+ local collected=applylpath(root,pattern)
+ if not collected then
+ return dummy
+ end
+ local n=#collected
+ if n==0 then
+ return dummy
+ end
+ if reverse then
+ local c=n+1
+ return function()
+ if c>1 then
+ c=c-1
+ local e=collected[c]
+ local r=e.__p__
+ return r,r.dt,e.ni
+ end
+ end
+ else
+ local c=0
+ return function()
+ if c<n then
+ c=c+1
+ local e=collected[c]
+ local r=e.__p__
+ return r,r.dt,e.ni
+ end
+ end
+ end
+end
+function xml.collected(root,pattern,reverse)
+ local collected=applylpath(root,pattern)
+ if not collected then
+ return dummy
+ end
+ local n=#collected
+ if n==0 then
+ return dummy
+ end
+ if reverse then
+ local c=n+1
+ return function()
+ if c>1 then
+ c=c-1
+ return collected[c]
+ end
+ end
+ else
+ local c=0
+ return function()
+ if c<n then
+ c=c+1
+ return collected[c]
+ end
+ end
+ end
+end
+function xml.inspect(collection,pattern)
+ pattern=pattern or "."
+ for e in xml.collected(collection,pattern or ".") do
+ report_lpath("pattern: %s\n\n%s\n",pattern,xml.tostring(e))
+ end
+end
+local function split(e)
+ local dt=e.dt
+ if dt then
+ for i=1,#dt do
+ local dti=dt[i]
+ if type(dti)=="string" then
+ dti=gsub(dti,"^[\n\r]*(.-)[\n\r]*","%1")
+ dti=gsub(dti,"[\n\r]+","\n\n")
+ dt[i]=dti
+ else
+ split(dti)
+ end
+ end
+ end
+ return e
+end
+function xml.finalizers.paragraphs(c)
+ for i=1,#c do
+ split(c[i])
+ end
+ return c
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["lxml-mis"] = package.loaded["lxml-mis"] or true
+
+-- original size: 3684, stripped down to: 1957
+
+if not modules then modules={} end modules ['lxml-mis']={
+ version=1.001,
+ comment="this module is the basis for the lxml-* ones",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local xml,lpeg,string=xml,lpeg,string
+local concat=table.concat
+local type,next,tonumber,tostring,setmetatable,loadstring=type,next,tonumber,tostring,setmetatable,loadstring
+local format,gsub,match=string.format,string.gsub,string.match
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
+local P,S,R,C,V,Cc,Cs=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.Cc,lpeg.Cs
+lpegpatterns.xml=lpegpatterns.xml or {}
+local xmlpatterns=lpegpatterns.xml
+local function xmlgsub(t,old,new)
+ local dt=t.dt
+ if dt then
+ for k=1,#dt do
+ local v=dt[k]
+ if type(v)=="string" then
+ dt[k]=gsub(v,old,new)
+ else
+ xmlgsub(v,old,new)
+ end
+ end
+ end
+end
+function xml.stripleadingspaces(dk,d,k)
+ if d and k then
+ local dkm=d[k-1]
+ if dkm and type(dkm)=="string" then
+ local s=match(dkm,"\n(%s+)")
+ xmlgsub(dk,"\n"..rep(" ",#s),"\n")
+ end
+ end
+end
+local normal=(1-S("<&>"))^0
+local special=P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"
+local escaped=Cs(normal*(special*normal)^0)
+local normal=(1-S"&")^0
+local special=P("&lt;")/"<"+P("&gt;")/">"+P("&amp;")/"&"
+local unescaped=Cs(normal*(special*normal)^0)
+local cleansed=Cs(((P("<")*(1-P(">"))^0*P(">"))/""+1)^0)
+xmlpatterns.escaped=escaped
+xmlpatterns.unescaped=unescaped
+xmlpatterns.cleansed=cleansed
+function xml.escaped (str) return lpegmatch(escaped,str) end
+function xml.unescaped(str) return lpegmatch(unescaped,str) end
+function xml.cleansed (str) return lpegmatch(cleansed,str) end
+function xml.fillin(root,pattern,str,check)
+ local e=xml.first(root,pattern)
+ if e then
+ local n=#e.dt
+ if not check or n==0 or (n==1 and e.dt[1]=="") then
+ e.dt={ str }
+ end
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["lxml-aux"] = package.loaded["lxml-aux"] or true
+
+-- original size: 28786, stripped down to: 20578
+
+if not modules then modules={} end modules ['lxml-aux']={
+ version=1.001,
+ comment="this module is the basis for the lxml-* ones",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local trace_manipulations=false trackers.register("lxml.manipulations",function(v) trace_manipulations=v end)
+local trace_inclusions=false trackers.register("lxml.inclusions",function(v) trace_inclusions=v end)
+local report_xml=logs.reporter("xml")
+local xml=xml
+local xmlcopy,xmlname=xml.copy,xml.name
+local xmlinheritedconvert=xml.inheritedconvert
+local xmlapplylpath=xml.applylpath
+local xmlfilter=xml.filter
+local type,next,setmetatable,getmetatable=type,next,setmetatable,getmetatable
+local insert,remove,fastcopy,concat=table.insert,table.remove,table.fastcopy,table.concat
+local gmatch,gsub,format,find,strip=string.gmatch,string.gsub,string.format,string.find,string.strip
+local utfbyte=utf.byte
+local lpegmatch=lpeg.match
+local striplinepatterns=utilities.strings.striplinepatterns
+local function report(what,pattern,c,e)
+ report_xml("%s element %a, root %a, position %a, index %a, pattern %a",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
+end
+local function withelements(e,handle,depth)
+ if e and handle then
+ local edt=e.dt
+ if edt then
+ depth=depth or 0
+ for i=1,#edt do
+ local e=edt[i]
+ if type(e)=="table" then
+ handle(e,depth)
+ withelements(e,handle,depth+1)
+ end
+ end
+ end
+ end
+end
+xml.withelements=withelements
+function xml.withelement(e,n,handle)
+ if e and n~=0 and handle then
+ local edt=e.dt
+ if edt then
+ if n>0 then
+ for i=1,#edt do
+ local ei=edt[i]
+ if type(ei)=="table" then
+ if n==1 then
+ handle(ei)
+ return
+ else
+ n=n-1
+ end
+ end
+ end
+ elseif n<0 then
+ for i=#edt,1,-1 do
+ local ei=edt[i]
+ if type(ei)=="table" then
+ if n==-1 then
+ handle(ei)
+ return
+ else
+ n=n+1
+ end
+ end
+ end
+ end
+ end
+ end
+end
+function xml.each(root,pattern,handle,reverse)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ if handle then
+ if reverse then
+ for c=#collected,1,-1 do
+ handle(collected[c])
+ end
+ else
+ for c=1,#collected do
+ handle(collected[c])
+ end
+ end
+ end
+ return collected
+ end
+end
+function xml.processattributes(root,pattern,handle)
+ local collected=xmlapplylpath(root,pattern)
+ if collected and handle then
+ for c=1,#collected do
+ handle(collected[c].at)
+ end
+ end
+ return collected
+end
+function xml.collect(root,pattern)
+ return xmlapplylpath(root,pattern)
+end
+function xml.collecttexts(root,pattern,flatten)
+ local collected=xmlapplylpath(root,pattern)
+ if collected and flatten then
+ local xmltostring=xml.tostring
+ for c=1,#collected do
+ collected[c]=xmltostring(collected[c].dt)
+ end
+ end
+ return collected or {}
+end
+function xml.collect_tags(root,pattern,nonamespace)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ local t,n={},0
+ for c=1,#collected do
+ local e=collected[c]
+ local ns,tg=e.ns,e.tg
+ n=n+1
+ if nonamespace then
+ t[n]=tg
+ elseif ns=="" then
+ t[n]=tg
+ else
+ t[n]=ns..":"..tg
+ end
+ end
+ return t
+ end
+end
+local no_root={ no_root=true }
+local function redo_ni(d)
+ for k=1,#d do
+ local dk=d[k]
+ if type(dk)=="table" then
+ dk.ni=k
+ end
+ end
+end
+xml.reindex=redo_ni
+local function xmltoelement(whatever,root)
+ if not whatever then
+ return nil
+ end
+ local element
+ if type(whatever)=="string" then
+ element=xmlinheritedconvert(whatever,root)
+ else
+ element=whatever
+ end
+ if element.error then
+ return whatever
+ end
+ if element then
+ end
+ return element
+end
+xml.toelement=xmltoelement
+local function copiedelement(element,newparent)
+ if type(element)=="string" then
+ return element
+ else
+ element=xmlcopy(element).dt
+ if newparent and type(element)=="table" then
+ element.__p__=newparent
+ end
+ return element
+ end
+end
+function xml.delete(root,pattern)
+ if not pattern or pattern=="" then
+ local p=root.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',"--",c,root)
+ end
+ local d=p.dt
+ remove(d,root.ni)
+ redo_ni(d)
+ end
+ else
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ local p=e.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',pattern,c,e)
+ end
+ local d=p.dt
+ local ni=e.ni
+ if ni<=#d then
+ if false then
+ p.dt[ni]=""
+ else
+ remove(d,ni)
+ redo_ni(d)
+ end
+ else
+ end
+ end
+ end
+ end
+ end
+end
+function xml.replace(root,pattern,whatever)
+ local element=root and xmltoelement(whatever,root)
+ local collected=element and xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ local p=e.__p__
+ if p then
+ if trace_manipulations then
+ report('replacing',pattern,c,e)
+ end
+ local d=p.dt
+ d[e.ni]=copiedelement(element,p)
+ redo_ni(d)
+ end
+ end
+ end
+end
+local function wrap(e,wrapper)
+ local t={
+ rn=e.rn,
+ tg=e.tg,
+ ns=e.ns,
+ at=e.at,
+ dt=e.dt,
+ __p__=e,
+ }
+ setmetatable(t,getmetatable(e))
+ e.rn=wrapper.rn or e.rn or ""
+ e.tg=wrapper.tg or e.tg or ""
+ e.ns=wrapper.ns or e.ns or ""
+ e.at=fastcopy(wrapper.at)
+ e.dt={ t }
+end
+function xml.wrap(root,pattern,whatever)
+ if whatever then
+ local wrapper=xmltoelement(whatever,root)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ if trace_manipulations then
+ report('wrapping',pattern,c,e)
+ end
+ wrap(e,wrapper)
+ end
+ end
+ else
+ wrap(root,xmltoelement(pattern))
+ end
+end
+local function inject_element(root,pattern,whatever,prepend)
+ local element=root and xmltoelement(whatever,root)
+ local collected=element and xmlapplylpath(root,pattern)
+ local function inject_e(e)
+ local r=e.__p__
+ local d,k,rri=r.dt,e.ni,r.ri
+ local edt=(rri and d[rri].dt) or (d and d[k] and d[k].dt)
+ if edt then
+ local be,af
+ local cp=copiedelement(element,e)
+ if prepend then
+ be,af=cp,edt
+ else
+ be,af=edt,cp
+ end
+ local bn=#be
+ for i=1,#af do
+ bn=bn+1
+ be[bn]=af[i]
+ end
+ if rri then
+ r.dt[rri].dt=be
+ else
+ d[k].dt=be
+ end
+ redo_ni(d)
+ end
+ end
+ if not collected then
+ elseif collected.tg then
+ inject_e(collected)
+ else
+ for c=1,#collected do
+ inject_e(collected[c])
+ end
+ end
+end
+local function insert_element(root,pattern,whatever,before)
+ local element=root and xmltoelement(whatever,root)
+ local collected=element and xmlapplylpath(root,pattern)
+ local function insert_e(e)
+ local r=e.__p__
+ local d,k=r.dt,e.ni
+ if not before then
+ k=k+1
+ end
+ insert(d,k,copiedelement(element,r))
+ redo_ni(d)
+ end
+ if not collected then
+ elseif collected.tg then
+ insert_e(collected)
+ else
+ for c=1,#collected do
+ insert_e(collected[c])
+ end
+ end
+end
+xml.insert_element=insert_element
+xml.insertafter=insert_element
+xml.insertbefore=function(r,p,e) insert_element(r,p,e,true) end
+xml.injectafter=inject_element
+xml.injectbefore=function(r,p,e) inject_element(r,p,e,true) end
+local function include(xmldata,pattern,attribute,recursive,loaddata,level)
+ pattern=pattern or 'include'
+ loaddata=loaddata or io.loaddata
+ local collected=xmlapplylpath(xmldata,pattern)
+ if collected then
+ if not level then
+ level=1
+ end
+ for c=1,#collected do
+ local ek=collected[c]
+ local name=nil
+ local ekdt=ek.dt
+ local ekat=ek.at
+ local ekrt=ek.__p__
+ local epdt=ekrt.dt
+ if not attribute or attribute=="" then
+ name=(type(ekdt)=="table" and ekdt[1]) or ekdt
+ end
+ if not name then
+ for a in gmatch(attribute or "href","([^|]+)") do
+ name=ekat[a]
+ if name then
+ break
+ end
+ end
+ end
+ local data=nil
+ if name and name~="" then
+ data=loaddata(name) or ""
+ if trace_inclusions then
+ report_xml("including %s bytes from %a at level %s by pattern %a and attribute %a (%srecursing)",#data,name,level,pattern,attribute or "",recursive and "" or "not ")
+ end
+ end
+ if not data or data=="" then
+ epdt[ek.ni]=""
+ elseif ekat["parse"]=="text" then
+ epdt[ek.ni]=xml.escaped(data)
+ else
+ local xi=xmlinheritedconvert(data,xmldata)
+ if not xi then
+ epdt[ek.ni]=""
+ else
+ if recursive then
+ include(xi,pattern,attribute,recursive,loaddata,level+1)
+ end
+ local child=xml.body(xi)
+ child.__p__=ekrt
+ child.__f__=name
+ epdt[ek.ni]=child
+ local inclusions=xmldata.settings.inclusions
+ if inclusions then
+ inclusions[#inclusions+1]=name
+ else
+ xmldata.settings.inclusions={ name }
+ end
+ if child.er then
+ local badinclusions=xmldata.settings.badinclusions
+ if badinclusions then
+ badinclusions[#badinclusions+1]=name
+ else
+ xmldata.settings.badinclusions={ name }
+ end
+ end
+ end
+ end
+ end
+ end
+end
+xml.include=include
+function xml.inclusion(e,default)
+ while e do
+ local f=e.__f__
+ if f then
+ return f
+ else
+ e=e.__p__
+ end
+ end
+ return default
+end
+local function getinclusions(key,e,sorted)
+ while e do
+ local settings=e.settings
+ if settings then
+ local inclusions=settings[key]
+ if inclusions then
+ inclusions=table.unique(inclusions)
+ if sorted then
+ table.sort(inclusions)
+ end
+ return inclusions
+ else
+ e=e.__p__
+ end
+ else
+ e=e.__p__
+ end
+ end
+end
+function xml.inclusions(e,sorted)
+ return getinclusions("inclusions",e,sorted)
+end
+function xml.badinclusions(e,sorted)
+ return getinclusions("badinclusions",e,sorted)
+end
+local b_collapser=lpeg.patterns.b_collapser
+local m_collapser=lpeg.patterns.m_collapser
+local e_collapser=lpeg.patterns.e_collapser
+local b_stripper=lpeg.patterns.b_stripper
+local m_stripper=lpeg.patterns.m_stripper
+local e_stripper=lpeg.patterns.e_stripper
+local lpegmatch=lpeg.match
+local function stripelement(e,nolines,anywhere)
+ local edt=e.dt
+ if edt then
+ local n=#edt
+ if n==0 then
+ return e
+ elseif anywhere then
+ local t={}
+ local m=0
+ for e=1,n do
+ local str=edt[e]
+ if type(str)~="string" then
+ m=m+1
+ t[m]=str
+ elseif str~="" then
+ if nolines then
+ str=lpegmatch((n==1 and b_collapser) or (n==m and e_collapser) or m_collapser,str)
+ else
+ str=lpegmatch((n==1 and b_stripper) or (n==m and e_stripper) or m_stripper,str)
+ end
+ if str~="" then
+ m=m+1
+ t[m]=str
+ end
+ end
+ end
+ e.dt=t
+ else
+ local str=edt[1]
+ if type(str)=="string" then
+ if str~="" then
+ str=lpegmatch(nolines and b_collapser or b_stripper,str)
+ end
+ if str=="" then
+ remove(edt,1)
+ n=n-1
+ else
+ edt[1]=str
+ end
+ end
+ if n>0 then
+ str=edt[n]
+ if type(str)=="string" then
+ if str=="" then
+ remove(edt)
+ else
+ str=lpegmatch(nolines and e_collapser or e_stripper,str)
+ if str=="" then
+ remove(edt)
+ else
+ edt[n]=str
+ end
+ end
+ end
+ end
+ end
+ end
+ return e
+end
+xml.stripelement=stripelement
+function xml.strip(root,pattern,nolines,anywhere)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for i=1,#collected do
+ stripelement(collected[i],nolines,anywhere)
+ end
+ end
+end
+local function renamespace(root,oldspace,newspace)
+ local ndt=#root.dt
+ for i=1,ndt or 0 do
+ local e=root[i]
+ if type(e)=="table" then
+ if e.ns==oldspace then
+ e.ns=newspace
+ if e.rn then
+ e.rn=newspace
+ end
+ end
+ local edt=e.dt
+ if edt then
+ renamespace(edt,oldspace,newspace)
+ end
+ end
+ end
+end
+xml.renamespace=renamespace
+function xml.remaptag(root,pattern,newtg)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ collected[c].tg=newtg
+ end
+ end
+end
+function xml.remapnamespace(root,pattern,newns)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ collected[c].ns=newns
+ end
+ end
+end
+function xml.checknamespace(root,pattern,newns)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ if (not e.rn or e.rn=="") and e.ns=="" then
+ e.rn=newns
+ end
+ end
+ end
+end
+function xml.remapname(root,pattern,newtg,newns,newrn)
+ local collected=xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ e.tg,e.ns,e.rn=newtg,newns,newrn
+ end
+ end
+end
+function xml.cdatatotext(e)
+ local dt=e.dt
+ if #dt==1 then
+ local first=dt[1]
+ if first.tg=="@cd@" then
+ e.dt=first.dt
+ end
+ else
+ end
+end
+function xml.texttocdata(e)
+ local dt=e.dt
+ local s=xml.tostring(dt)
+ e.tg="@cd@"
+ e.special=true
+ e.ns=""
+ e.rn=""
+ e.dt={ s }
+ e.at=nil
+end
+function xml.elementtocdata(e)
+ local dt=e.dt
+ local s=xml.tostring(e)
+ e.tg="@cd@"
+ e.special=true
+ e.ns=""
+ e.rn=""
+ e.dt={ s }
+ e.at=nil
+end
+xml.builtinentities=table.tohash { "amp","quot","apos","lt","gt" }
+local entities=characters and characters.entities or nil
+local builtinentities=xml.builtinentities
+function xml.addentitiesdoctype(root,option)
+ if not entities then
+ require("char-ent")
+ entities=characters.entities
+ end
+ if entities and root and root.tg=="@rt@" and root.statistics then
+ local list={}
+ local hexify=option=="hexadecimal"
+ for k,v in table.sortedhash(root.statistics.entities.names) do
+ if not builtinentities[k] then
+ local e=entities[k]
+ if not e then
+ e=format("[%s]",k)
+ elseif hexify then
+ e=format("&#%05X;",utfbyte(k))
+ end
+ list[#list+1]=format(" <!ENTITY %s %q >",k,e)
+ end
+ end
+ local dt=root.dt
+ local n=dt[1].tg=="@pi@" and 2 or 1
+ if #list>0 then
+ insert(dt,n,{ "\n" })
+ insert(dt,n,{
+ tg="@dt@",
+ dt={ format("Something [\n%s\n] ",concat(list)) },
+ ns="",
+ special=true,
+ })
+ insert(dt,n,{ "\n\n" })
+ else
+ end
+ end
+end
+xml.all=xml.each
+xml.insert=xml.insertafter
+xml.inject=xml.injectafter
+xml.after=xml.insertafter
+xml.before=xml.insertbefore
+xml.process=xml.each
+xml.obsolete=xml.obsolete or {}
+local obsolete=xml.obsolete
+xml.strip_whitespace=xml.strip obsolete.strip_whitespace=xml.strip
+xml.collect_elements=xml.collect obsolete.collect_elements=xml.collect
+xml.delete_element=xml.delete obsolete.delete_element=xml.delete
+xml.replace_element=xml.replace obsolete.replace_element=xml.replacet
+xml.each_element=xml.each obsolete.each_element=xml.each
+xml.process_elements=xml.process obsolete.process_elements=xml.process
+xml.insert_element_after=xml.insertafter obsolete.insert_element_after=xml.insertafter
+xml.insert_element_before=xml.insertbefore obsolete.insert_element_before=xml.insertbefore
+xml.inject_element_after=xml.injectafter obsolete.inject_element_after=xml.injectafter
+xml.inject_element_before=xml.injectbefore obsolete.inject_element_before=xml.injectbefore
+xml.process_attributes=xml.processattributes obsolete.process_attributes=xml.processattributes
+xml.collect_texts=xml.collecttexts obsolete.collect_texts=xml.collecttexts
+xml.inject_element=xml.inject obsolete.inject_element=xml.inject
+xml.remap_tag=xml.remaptag obsolete.remap_tag=xml.remaptag
+xml.remap_name=xml.remapname obsolete.remap_name=xml.remapname
+xml.remap_namespace=xml.remapnamespace obsolete.remap_namespace=xml.remapnamespace
+function xml.cdata(e)
+ if e then
+ local dt=e.dt
+ if dt and #dt==1 then
+ local first=dt[1]
+ return first.tg=="@cd@" and first.dt[1] or ""
+ end
+ end
+ return ""
+end
+function xml.finalizers.xml.cdata(collected)
+ if collected then
+ local e=collected[1]
+ if e then
+ local dt=e.dt
+ if dt and #dt==1 then
+ local first=dt[1]
+ return first.tg=="@cd@" and first.dt[1] or ""
+ end
+ end
+ end
+ return ""
+end
+function xml.insertcomment(e,str,n)
+ insert(e.dt,n or 1,{
+ tg="@cm@",
+ ns="",
+ special=true,
+ at={},
+ dt={ str },
+ })
+end
+function xml.insertcdata(e,str,n)
+ insert(e.dt,n or 1,{
+ tg="@cd@",
+ ns="",
+ special=true,
+ at={},
+ dt={ str },
+ })
+end
+function xml.setcomment(e,str,n)
+ e.dt={ {
+ tg="@cm@",
+ ns="",
+ special=true,
+ at={},
+ dt={ str },
+ } }
+end
+function xml.setcdata(e,str)
+ e.dt={ {
+ tg="@cd@",
+ ns="",
+ special=true,
+ at={},
+ dt={ str },
+ } }
+end
+function xml.separate(x,pattern)
+ local collected=xmlapplylpath(x,pattern)
+ if collected then
+ for c=1,#collected do
+ local e=collected[c]
+ local d=e.dt
+ if d==x then
+ report_xml("warning: xml.separate changes root")
+ x=d
+ end
+ local t,n={ "\n" },1
+ local i,nd=1,#d
+ while i<=nd do
+ while i<=nd do
+ local di=d[i]
+ if type(di)=="string" then
+ if di=="\n" or find(di,"^%s+$") then
+ i=i+1
+ else
+ d[i]=strip(di)
+ break
+ end
+ else
+ break
+ end
+ end
+ if i>nd then
+ break
+ end
+ t[n+1]="\n"
+ t[n+2]=d[i]
+ t[n+3]="\n"
+ n=n+3
+ i=i+1
+ end
+ t[n+1]="\n"
+ setmetatable(t,getmetatable(d))
+ e.dt=t
+ end
+ end
+ return x
+end
+local helpers=xml.helpers or {}
+xml.helpers=helpers
+local function normal(e,action)
+ local edt=e.dt
+ if edt then
+ for i=1,#edt do
+ local str=edt[i]
+ if type(str)=="string" and str~="" then
+ edt[i]=action(str)
+ end
+ end
+ end
+end
+local function recurse(e,action)
+ local edt=e.dt
+ if edt then
+ for i=1,#edt do
+ local str=edt[i]
+ if type(str)~="string" then
+ recurse(str,action)
+ elseif str~="" then
+ edt[i]=action(str)
+ end
+ end
+ end
+end
+function helpers.recursetext(collected,action,recursive)
+ if recursive then
+ for i=1,#collected do
+ recurse(collected[i],action)
+ end
+ else
+ for i=1,#collected do
+ normal(collected[i],action)
+ end
+ end
+end
+local specials={
+ ["@rt@"]="root",
+ ["@pi@"]="instruction",
+ ["@cm@"]="comment",
+ ["@dt@"]="declaration",
+ ["@cd@"]="cdata",
+}
+local function convert(x,strip,flat)
+ local ns=x.ns
+ local tg=x.tg
+ local at=x.at
+ local dt=x.dt
+ local node=flat and {
+ [0]=(not x.special and (ns~="" and ns..":"..tg or tg)) or nil,
+ } or {
+ _namespace=ns~="" and ns or nil,
+ _tag=not x.special and tg or nil,
+ _type=specials[tg] or "_element",
+ }
+ if at then
+ for k,v in next,at do
+ node[k]=v
+ end
+ end
+ local n=0
+ for i=1,#dt do
+ local di=dt[i]
+ if type(di)=="table" then
+ if flat and di.special then
+ else
+ di=convert(di,strip,flat)
+ if di then
+ n=n+1
+ node[n]=di
+ end
+ end
+ elseif strip then
+ di=lpegmatch(strip,di)
+ if di~="" then
+ n=n+1
+ node[n]=di
+ end
+ else
+ n=n+1
+ node[n]=di
+ end
+ end
+ if next(node) then
+ return node
+ end
+end
+function xml.totable(x,strip,flat)
+ if type(x)=="table" then
+ if strip then
+ strip=striplinepatterns[strip]
+ end
+ return convert(x,strip,flat)
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["lxml-xml"] = package.loaded["lxml-xml"] or true
+
+-- original size: 10274, stripped down to: 7538
+
+if not modules then modules={} end modules ['lxml-xml']={
+ version=1.001,
+ comment="this module is the basis for the lxml-* ones",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local concat=table.concat
+local find,lower,upper=string.find,string.lower,string.upper
+local xml=xml
+local finalizers=xml.finalizers.xml
+local xmlfilter=xml.filter
+local xmltostring=xml.tostring
+local xmlserialize=xml.serialize
+local xmlcollected=xml.collected
+local xmlnewhandlers=xml.newhandlers
+local function first(collected)
+ return collected and collected[1]
+end
+local function last(collected)
+ return collected and collected[#collected]
+end
+local function all(collected)
+ return collected
+end
+local reverse=table.reversed
+local function attribute(collected,name)
+ if collected and #collected>0 then
+ local at=collected[1].at
+ return at and at[name]
+ end
+end
+local function att(id,name)
+ local at=id.at
+ return at and at[name]
+end
+local function count(collected)
+ return collected and #collected or 0
+end
+local function position(collected,n)
+ if not collected then
+ return 0
+ end
+ local nc=#collected
+ if nc==0 then
+ return 0
+ end
+ n=tonumber(n) or 0
+ if n<0 then
+ return collected[nc+n+1]
+ elseif n>0 then
+ return collected[n]
+ else
+ return collected[1].mi or 0
+ end
+end
+local function match(collected)
+ return collected and #collected>0 and collected[1].mi or 0
+end
+local function index(collected)
+ return collected and #collected>0 and collected[1].ni or 0
+end
+local function attributes(collected,arguments)
+ if collected and #collected>0 then
+ local at=collected[1].at
+ if arguments then
+ return at[arguments]
+ elseif next(at) then
+ return at
+ end
+ end
+end
+local function chainattribute(collected,arguments)
+ if collected and #collected>0 then
+ local e=collected[1]
+ while e do
+ local at=e.at
+ if at then
+ local a=at[arguments]
+ if a then
+ return a
+ end
+ else
+ break
+ end
+ e=e.__p__
+ end
+ end
+ return ""
+end
+local function raw(collected)
+ if collected and #collected>0 then
+ local e=collected[1] or collected
+ return e and xmltostring(e) or ""
+ else
+ return ""
+ end
+end
+local xmltexthandler=xmlnewhandlers {
+ name="string",
+ initialize=function()
+ result={}
+ return result
+ end,
+ finalize=function()
+ return concat(result)
+ end,
+ handle=function(...)
+ result[#result+1]=concat {... }
+ end,
+ escape=false,
+}
+local function xmltotext(root)
+ local dt=root.dt
+ if not dt then
+ return ""
+ end
+ local nt=#dt
+ if nt==0 then
+ return ""
+ elseif nt==1 and type(dt[1])=="string" then
+ return dt[1]
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+local function text(collected)
+ if collected then
+ local e=collected[1] or collected
+ return e and xmltotext(e) or ""
+ else
+ return ""
+ end
+end
+local function texts(collected)
+ if not collected then
+ return {}
+ end
+ local nc=#collected
+ if nc==0 then
+ return {}
+ end
+ local t,n={},0
+ for c=1,nc do
+ local e=collected[c]
+ if e and e.dt then
+ n=n+1
+ t[n]=e.dt
+ end
+ end
+ return t
+end
+local function tag(collected,n)
+ if not collected then
+ return
+ end
+ local nc=#collected
+ if nc==0 then
+ return
+ end
+ local c
+ if n==0 or not n then
+ c=collected[1]
+ elseif n>1 then
+ c=collected[n]
+ else
+ c=collected[nc-n+1]
+ end
+ return c and c.tg
+end
+local function name(collected,n)
+ if not collected then
+ return
+ end
+ local nc=#collected
+ if nc==0 then
+ return
+ end
+ local c
+ if n==0 or not n then
+ c=collected[1]
+ elseif n>1 then
+ c=collected[n]
+ else
+ c=collected[nc-n+1]
+ end
+ if not c then
+ elseif c.ns=="" then
+ return c.tg
+ else
+ return c.ns..":"..c.tg
+ end
+end
+local function tags(collected,nonamespace)
+ if not collected then
+ return
+ end
+ local nc=#collected
+ if nc==0 then
+ return
+ end
+ local t,n={},0
+ for c=1,nc do
+ local e=collected[c]
+ local ns,tg=e.ns,e.tg
+ n=n+1
+ if nonamespace or ns=="" then
+ t[n]=tg
+ else
+ t[n]=ns..":"..tg
+ end
+ end
+ return t
+end
+local function empty(collected,spacesonly)
+ if not collected then
+ return true
+ end
+ local nc=#collected
+ if nc==0 then
+ return true
+ end
+ for c=1,nc do
+ local e=collected[c]
+ if e then
+ local edt=e.dt
+ if edt then
+ local n=#edt
+ if n==1 then
+ local edk=edt[1]
+ local typ=type(edk)
+ if typ=="table" then
+ return false
+ elseif edk~="" then
+ return false
+ elseif spacesonly and not find(edk,"%S") then
+ return false
+ end
+ elseif n>1 then
+ return false
+ end
+ end
+ end
+ end
+ return true
+end
+finalizers.first=first
+finalizers.last=last
+finalizers.all=all
+finalizers.reverse=reverse
+finalizers.elements=all
+finalizers.default=all
+finalizers.attribute=attribute
+finalizers.att=att
+finalizers.count=count
+finalizers.position=position
+finalizers.match=match
+finalizers.index=index
+finalizers.attributes=attributes
+finalizers.chainattribute=chainattribute
+finalizers.text=text
+finalizers.texts=texts
+finalizers.tag=tag
+finalizers.name=name
+finalizers.tags=tags
+finalizers.empty=empty
+function xml.first(id,pattern)
+ return first(xmlfilter(id,pattern))
+end
+function xml.last(id,pattern)
+ return last(xmlfilter(id,pattern))
+end
+function xml.count(id,pattern)
+ return count(xmlfilter(id,pattern))
+end
+function xml.attribute(id,pattern,a,default)
+ return attribute(xmlfilter(id,pattern),a,default)
+end
+function xml.raw(id,pattern)
+ if pattern then
+ return raw(xmlfilter(id,pattern))
+ else
+ return raw(id)
+ end
+end
+function xml.text(id,pattern)
+ if pattern then
+ local collected=xmlfilter(id,pattern)
+ return collected and #collected>0 and xmltotext(collected[1]) or ""
+ elseif id then
+ return xmltotext(id) or ""
+ else
+ return ""
+ end
+end
+xml.content=text
+function xml.position(id,pattern,n)
+ return position(xmlfilter(id,pattern),n)
+end
+function xml.match(id,pattern)
+ return match(xmlfilter(id,pattern))
+end
+function xml.empty(id,pattern,spacesonly)
+ return empty(xmlfilter(id,pattern),spacesonly)
+end
+xml.all=xml.filter
+xml.index=xml.position
+xml.found=xml.filter
+local function totable(x)
+ local t={}
+ for e in xmlcollected(x[1] or x,"/*") do
+ t[e.tg]=xmltostring(e.dt) or ""
+ end
+ return next(t) and t or nil
+end
+xml.table=totable
+finalizers.table=totable
+local function textonly(e,t)
+ if e then
+ local edt=e.dt
+ if edt then
+ for i=1,#edt do
+ local e=edt[i]
+ if type(e)=="table" then
+ textonly(e,t)
+ else
+ t[#t+1]=e
+ end
+ end
+ end
+ end
+ return t
+end
+function xml.textonly(e)
+ return concat(textonly(e,{}))
+end
+function finalizers.lowerall(collected)
+ for c=1,#collected do
+ local e=collected[c]
+ if not e.special then
+ e.tg=lower(e.tg)
+ local eat=e.at
+ if eat then
+ local t={}
+ for k,v in next,eat do
+ t[lower(k)]=v
+ end
+ e.at=t
+ end
+ end
+ end
+end
+function finalizers.upperall(collected)
+ for c=1,#collected do
+ local e=collected[c]
+ if not e.special then
+ e.tg=upper(e.tg)
+ local eat=e.at
+ if eat then
+ local t={}
+ for k,v in next,eat do
+ t[upper(k)]=v
+ end
+ e.at=t
+ end
+ end
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["trac-xml"] = package.loaded["trac-xml"] or true
+
+-- original size: 6351, stripped down to: 4919
+
+if not modules then modules={} end modules ['trac-xml']={
+ version=1.001,
+ comment="companion to trac-log.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local formatters=string.formatters
+local reporters=logs.reporters
+local xmlserialize=xml.serialize
+local xmlcollected=xml.collected
+local xmltext=xml.text
+local xmlfirst=xml.first
+local function showhelp(specification,...)
+ local root=xml.convert(specification.helpinfo or "")
+ if not root then
+ return
+ end
+ local xs=xml.gethandlers("string")
+ xml.sethandlersfunction(xs,"short",function(e,handler) xmlserialize(e.dt,handler) end)
+ xml.sethandlersfunction(xs,"ref",function(e,handler) handler.handle("--"..e.at.name) end)
+ local wantedcategories=select("#",...)==0 and true or table.tohash {... }
+ local nofcategories=xml.count(root,"/application/flags/category")
+ local report=specification.report
+ for category in xmlcollected(root,"/application/flags/category") do
+ local categoryname=category.at.name or ""
+ if wantedcategories==true or wantedcategories[categoryname] then
+ if nofcategories>1 then
+ report("%s options:",categoryname)
+ report()
+ end
+ for subcategory in xmlcollected(category,"/subcategory") do
+ for flag in xmlcollected(subcategory,"/flag") do
+ local name=flag.at.name
+ local value=flag.at.value
+ local short=xmltext(xmlfirst(flag,"/short"))
+ if value then
+ report("--%-20s %s",formatters["%s=%s"](name,value),short)
+ else
+ report("--%-20s %s",name,short)
+ end
+ end
+ report()
+ end
+ end
+ end
+ for category in xmlcollected(root,"/application/examples/category") do
+ local title=xmltext(xmlfirst(category,"/title"))
+ if title and title~="" then
+ report()
+ report(title)
+ report()
+ end
+ for subcategory in xmlcollected(category,"/subcategory") do
+ for example in xmlcollected(subcategory,"/example") do
+ local command=xmltext(xmlfirst(example,"/command"))
+ local comment=xmltext(xmlfirst(example,"/comment"))
+ report(command)
+ end
+ report()
+ end
+ end
+ for comment in xmlcollected(root,"/application/comments/comment") do
+ local comment=xmltext(comment)
+ report()
+ report(comment)
+ report()
+ end
+end
+local reporthelp=reporters.help
+local exporthelp=reporters.export
+local function xmlfound(t)
+ local helpinfo=t.helpinfo
+ if type(helpinfo)=="table" then
+ return false
+ end
+ if type(helpinfo)~="string" then
+ helpinfo="Warning: no helpinfo found."
+ t.helpinfo=helpinfo
+ return false
+ end
+ if string.find(helpinfo,".xml$") then
+ local ownscript=environment.ownscript
+ local helpdata=false
+ if ownscript then
+ local helpfile=file.join(file.pathpart(ownscript),helpinfo)
+ helpdata=io.loaddata(helpfile)
+ if helpdata=="" then
+ helpdata=false
+ end
+ end
+ if not helpdata then
+ local helpfile=resolvers.findfile(helpinfo,"tex")
+ helpdata=helpfile and io.loaddata(helpfile)
+ end
+ if helpdata and helpdata~="" then
+ helpinfo=helpdata
+ else
+ helpinfo=formatters["Warning: help file %a is not found."](helpinfo)
+ end
+ end
+ t.helpinfo=helpinfo
+ return string.find(t.helpinfo,"^<%?xml") and true or false
+end
+function reporters.help(t,...)
+ if xmlfound(t) then
+ showhelp(t,...)
+ else
+ reporthelp(t,...)
+ end
+end
+function reporters.export(t,methods,filename)
+ if not xmlfound(t) then
+ return exporthelp(t)
+ end
+ if not methods or methods=="" then
+ methods=environment.arguments["exporthelp"]
+ end
+ if not filename or filename=="" then
+ filename=environment.files[1]
+ end
+ dofile(resolvers.findfile("trac-exp.lua","tex"))
+ local exporters=logs.exporters
+ if not exporters or not methods then
+ return exporthelp(t)
+ end
+ if methods=="all" then
+ methods=table.keys(exporters)
+ elseif type(methods)=="string" then
+ methods=utilities.parsers.settings_to_array(methods)
+ else
+ return exporthelp(t)
+ end
+ if type(filename)~="string" or filename=="" then
+ filename=false
+ elseif file.pathpart(filename)=="" then
+ t.report("export file %a will not be saved on the current path (safeguard)",filename)
+ return
+ end
+ for i=1,#methods do
+ local method=methods[i]
+ local exporter=exporters[method]
+ if exporter then
+ local result=exporter(t,method)
+ if result and result~="" then
+ if filename then
+ local fullname=file.replacesuffix(filename,method)
+ t.report("saving export in %a",fullname)
+ io.savedata(fullname,result)
+ else
+ reporters.lines(t,result)
+ end
+ else
+ t.report("no output from exporter %a",method)
+ end
+ else
+ t.report("unknown exporter %a",method)
+ end
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-ini"] = package.loaded["data-ini"] or true
+
+-- original size: 11085, stripped down to: 7662
+
+if not modules then modules={} end modules ['data-ini']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local next,type,getmetatable,rawset=next,type,getmetatable,rawset
+local gsub,find,gmatch,char=string.gsub,string.find,string.gmatch,string.char
+local filedirname,filebasename,filejoin=file.dirname,file.basename,file.join
+local ostype,osname,osuname,ossetenv,osgetenv=os.type,os.name,os.uname,os.setenv,os.getenv
+local P,S,R,C,Cs,Cc,lpegmatch=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.match
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local trace_detail=false trackers.register("resolvers.details",function(v) trace_detail=v end)
+local trace_expansions=false trackers.register("resolvers.expansions",function(v) trace_expansions=v end)
+local report_initialization=logs.reporter("resolvers","initialization")
+resolvers=resolvers or {}
+local resolvers=resolvers
+texconfig.kpse_init=false
+texconfig.shell_escape='t'
+if not (environment and environment.default_texmfcnf) and kpse and kpse.default_texmfcnf then
+ local default_texmfcnf=kpse.default_texmfcnf()
+ default_texmfcnf=gsub(default_texmfcnf,"$SELFAUTOLOC","selfautoloc:")
+ default_texmfcnf=gsub(default_texmfcnf,"$SELFAUTODIR","selfautodir:")
+ default_texmfcnf=gsub(default_texmfcnf,"$SELFAUTOPARENT","selfautoparent:")
+ default_texmfcnf=gsub(default_texmfcnf,"$HOME","home:")
+ environment.default_texmfcnf=default_texmfcnf
+end
+kpse={ original=kpse }
+setmetatable(kpse,{
+ __index=function(kp,name)
+ report_initialization("fatal error: kpse library is accessed (key: %s)",name)
+ os.exit()
+ end
+} )
+do
+ local osfontdir=osgetenv("OSFONTDIR")
+ if osfontdir and osfontdir~="" then
+ elseif osname=="windows" then
+ ossetenv("OSFONTDIR","c:/windows/fonts//")
+ elseif osname=="macosx" then
+ ossetenv("OSFONTDIR","$HOME/Library/Fonts//;/Library/Fonts//;/System/Library/Fonts//")
+ end
+end
+do
+ local homedir=osgetenv(ostype=="windows" and 'USERPROFILE' or 'HOME') or ''
+ if not homedir or homedir=="" then
+ homedir=char(127)
+ end
+ homedir=file.collapsepath(homedir)
+ ossetenv("HOME",homedir)
+ ossetenv("USERPROFILE",homedir)
+ environment.homedir=homedir
+end
+do
+ local args=environment.originalarguments or arg
+ if not environment.ownmain then
+ environment.ownmain=status and string.match(string.lower(status.banner),"this is ([%a]+)") or "luatex"
+ end
+ local ownbin=environment.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex"
+ local ownpath=environment.ownpath or os.selfdir
+ ownbin=file.collapsepath(ownbin)
+ ownpath=file.collapsepath(ownpath)
+ if not ownpath or ownpath=="" or ownpath=="unset" then
+ ownpath=args[-1] or arg[-1]
+ ownpath=ownpath and filedirname(gsub(ownpath,"\\","/"))
+ if not ownpath or ownpath=="" then
+ ownpath=args[-0] or arg[-0]
+ ownpath=ownpath and filedirname(gsub(ownpath,"\\","/"))
+ end
+ local binary=ownbin
+ if not ownpath or ownpath=="" then
+ ownpath=ownpath and filedirname(binary)
+ end
+ if not ownpath or ownpath=="" then
+ if os.binsuffix~="" then
+ binary=file.replacesuffix(binary,os.binsuffix)
+ end
+ local path=osgetenv("PATH")
+ if path then
+ for p in gmatch(path,"[^"..io.pathseparator.."]+") do
+ local b=filejoin(p,binary)
+ if lfs.isfile(b) then
+ local olddir=lfs.currentdir()
+ if lfs.chdir(p) then
+ local pp=lfs.currentdir()
+ if trace_locating and p~=pp then
+ report_initialization("following symlink %a to %a",p,pp)
+ end
+ ownpath=pp
+ lfs.chdir(olddir)
+ else
+ if trace_locating then
+ report_initialization("unable to check path %a",p)
+ end
+ ownpath=p
+ end
+ break
+ end
+ end
+ end
+ end
+ if not ownpath or ownpath=="" then
+ ownpath="."
+ report_initialization("forcing fallback to ownpath %a",ownpath)
+ elseif trace_locating then
+ report_initialization("using ownpath %a",ownpath)
+ end
+ end
+ environment.ownbin=ownbin
+ environment.ownpath=ownpath
+end
+resolvers.ownpath=environment.ownpath
+function resolvers.getownpath()
+ return environment.ownpath
+end
+do
+ local ownpath=environment.ownpath or dir.current()
+ if ownpath then
+ ossetenv('SELFAUTOLOC',file.collapsepath(ownpath))
+ ossetenv('SELFAUTODIR',file.collapsepath(ownpath.."/.."))
+ ossetenv('SELFAUTOPARENT',file.collapsepath(ownpath.."/../.."))
+ else
+ report_initialization("error: unable to locate ownpath")
+ os.exit()
+ end
+end
+local texos=environment.texos or osgetenv("TEXOS")
+local texmfos=environment.texmfos or osgetenv('SELFAUTODIR')
+if not texos or texos=="" then
+ texos=file.basename(texmfos)
+end
+ossetenv('TEXMFOS',texmfos)
+ossetenv('TEXOS',texos)
+ossetenv('SELFAUTOSYSTEM',os.platform)
+environment.texos=texos
+environment.texmfos=texmfos
+local texroot=environment.texroot or osgetenv("TEXROOT")
+if not texroot or texroot=="" then
+ texroot=osgetenv('SELFAUTOPARENT')
+ ossetenv('TEXROOT',texroot)
+end
+environment.texroot=file.collapsepath(texroot)
+if type(profiler)=="table" and not jit then
+ directives.register("system.profile",function()
+ profiler.start("luatex-profile.log")
+ end)
+end
+local prefixes=utilities.storage.allocate()
+resolvers.prefixes=prefixes
+local resolved={}
+local abstract={}
+local dynamic={}
+function resolvers.resetresolve(str)
+ resolved,abstract={},{}
+end
+function resolvers.allprefixes(separator)
+ local all=table.sortedkeys(prefixes)
+ if separator then
+ for i=1,#all do
+ all[i]=all[i]..":"
+ end
+ end
+ return all
+end
+local function _resolve_(method,target)
+ local action=prefixes[method]
+ if action then
+ return action(target)
+ else
+ return method..":"..target
+ end
+end
+function resolvers.unresolve(str)
+ return abstract[str] or str
+end
+function resolvers.setdynamic(str)
+ dynamic[str]=true
+end
+local pattern=Cs((C(R("az")^2)*P(":")*C((1-S(" \"\';,"))^1)/_resolve_+P(1))^0)
+local prefix=C(R("az")^2)*P(":")
+local target=C((1-S(" \"\';,"))^1)
+local notarget=(#S(";,")+P(-1))*Cc("")
+local p_resolve=Cs(((prefix*(target+notarget))/_resolve_+P(1))^0)
+local p_simple=prefix*P(-1)
+local function resolve(str)
+ if type(str)=="table" then
+ local res={}
+ for i=1,#str do
+ res[i]=resolve(str[i])
+ end
+ return res
+ end
+ local res=resolved[str]
+ if res then
+ return res
+ end
+ local simple=lpegmatch(p_simple,str)
+ local action=prefixes[simple]
+ if action then
+ local res=action(res)
+ if not dynamic[simple] then
+ resolved[simple]=res
+ abstract[res]=simple
+ end
+ return res
+ end
+ res=lpegmatch(p_resolve,str)
+ resolved[str]=res
+ abstract[res]=str
+ return res
+end
+resolvers.resolve=resolve
+if type(osuname)=="function" then
+ for k,v in next,osuname() do
+ if not prefixes[k] then
+ prefixes[k]=function() return v end
+ end
+ end
+end
+if ostype=="unix" then
+ local pattern
+ local function makepattern(t,k,v)
+ if t then
+ rawset(t,k,v)
+ end
+ local colon=P(":")
+ for k,v in table.sortedpairs(prefixes) do
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ pattern=Cs((p*colon+colon/";"+P(1))^0)
+ end
+ makepattern()
+ table.setmetatablenewindex(prefixes,makepattern)
+ function resolvers.repath(str)
+ return lpegmatch(pattern,str)
+ end
+else
+ function resolvers.repath(str)
+ return str
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-exp"] = package.loaded["data-exp"] or true
+
+-- original size: 17216, stripped down to: 10657
+
+if not modules then modules={} end modules ['data-exp']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local format,find,gmatch,lower,char,sub=string.format,string.find,string.gmatch,string.lower,string.char,string.sub
+local concat,sort=table.concat,table.sort
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
+local Ct,Cs,Cc,Carg,P,C,S=lpeg.Ct,lpeg.Cs,lpeg.Cc,lpeg.Carg,lpeg.P,lpeg.C,lpeg.S
+local type,next=type,next
+local isdir=lfs.isdir
+local ostype=os.type
+local collapsepath,joinpath,basename=file.collapsepath,file.join,file.basename
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local trace_expansions=false trackers.register("resolvers.expansions",function(v) trace_expansions=v end)
+local trace_globbing=true trackers.register("resolvers.globbing",function(v) trace_globbing=v end)
+local report_expansions=logs.reporter("resolvers","expansions")
+local report_globbing=logs.reporter("resolvers","globbing")
+local resolvers=resolvers
+local resolveprefix=resolvers.resolve
+local function f_both(a,b)
+ local t,n={},0
+ for sb in gmatch(b,"[^,]+") do
+ for sa in gmatch(a,"[^,]+") do
+ n=n+1;t[n]=sa..sb
+ end
+ end
+ return concat(t,",")
+end
+local comma=P(",")
+local nocomma=(1-comma)^1
+local docomma=comma^1/","
+local before=Cs((nocomma*Carg(1)+docomma)^0)
+local after=Cs((Carg(1)*nocomma+docomma)^0)
+local both=Cs(((C(nocomma)*Carg(1))/function(a,b) return lpegmatch(before,b,1,a) end+docomma)^0)
+local function f_first (a,b) return lpegmatch(after,b,1,a) end
+local function f_second(a,b) return lpegmatch(before,a,1,b) end
+local function f_both (a,b) return lpegmatch(both,b,1,a) end
+local left=P("{")
+local right=P("}")
+local var=P((1-S("{}" ))^0)
+local set=P((1-S("{},"))^0)
+local other=P(1)
+local l_first=Cs((Cc("{")*(C(set)*left*C(var)*right/f_first)*Cc("}")+other )^0 )
+local l_second=Cs((Cc("{")*(left*C(var)*right*C(set)/f_second)*Cc("}")+other )^0 )
+local l_both=Cs((Cc("{")*(left*C(var)*right*left*C(var)*right/f_both)*Cc("}")+other )^0 )
+local l_rest=Cs((left*var*(left/"")*var*(right/"")*var*right+other )^0 )
+local stripper_1=lpeg.stripper ("{}@")
+local replacer_1=lpeg.replacer { { ",}",",@}" },{ "{,","{@," },}
+local function splitpathexpr(str,newlist,validate)
+ if trace_expansions then
+ report_expansions("expanding variable %a",str)
+ end
+ local t,ok,done=newlist or {},false,false
+ local n=#t
+ str=lpegmatch(replacer_1,str)
+ repeat
+ local old=str
+ repeat
+ local old=str
+ str=lpegmatch(l_first,str)
+ until old==str
+ repeat
+ local old=str
+ str=lpegmatch(l_second,str)
+ until old==str
+ repeat
+ local old=str
+ str=lpegmatch(l_both,str)
+ until old==str
+ repeat
+ local old=str
+ str=lpegmatch(l_rest,str)
+ until old==str
+ until old==str
+ str=lpegmatch(stripper_1,str)
+ if validate then
+ for s in gmatch(str,"[^,]+") do
+ s=validate(s)
+ if s then
+ n=n+1
+ t[n]=s
+ end
+ end
+ else
+ for s in gmatch(str,"[^,]+") do
+ n=n+1
+ t[n]=s
+ end
+ end
+ if trace_expansions then
+ for k=1,#t do
+ report_expansions("% 4i: %s",k,t[k])
+ end
+ end
+ return t
+end
+local function validate(s)
+ s=collapsepath(s)
+ return s~="" and not find(s,"^!*unset/*$") and s
+end
+resolvers.validatedpath=validate
+function resolvers.expandedpathfromlist(pathlist)
+ local newlist={}
+ for k=1,#pathlist do
+ splitpathexpr(pathlist[k],newlist,validate)
+ end
+ return newlist
+end
+local usedhomedir=nil
+local donegation=(P("!")/"" )^0
+local doslashes=(P("\\")/"/"+1)^0
+local function expandedhome()
+ if not usedhomedir then
+ usedhomedir=lpegmatch(Cs(donegation*doslashes),environment.homedir or "")
+ if usedhomedir=="~" or usedhomedir=="" or not isdir(usedhomedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent path using current path")
+ end
+ usedhomedir="."
+ end
+ end
+ return usedhomedir
+end
+local dohome=((P("~")+P("$HOME")+P("%HOME%"))/expandedhome)^0
+local cleanup=Cs(donegation*dohome*doslashes)
+resolvers.cleanpath=function(str)
+ return str and lpegmatch(cleanup,str) or ""
+end
+local expandhome=P("~")/"$HOME"
+local dodouble=P('"')/""*(expandhome+(1-P('"')))^0*P('"')/""
+local dosingle=P("'")/""*(expandhome+(1-P("'")))^0*P("'")/""
+local dostring=(expandhome+1 )^0
+local stripper=Cs(
+ lpegpatterns.unspacer*(dosingle+dodouble+dostring)*lpegpatterns.unspacer
+)
+function resolvers.checkedvariable(str)
+ return type(str)=="string" and lpegmatch(stripper,str) or str
+end
+local cache={}
+local splitter=lpeg.tsplitat(";")
+local backslashswapper=lpeg.replacer("\\","/")
+local function splitconfigurationpath(str)
+ if str then
+ local found=cache[str]
+ if not found then
+ if str=="" then
+ found={}
+ else
+ local split=lpegmatch(splitter,lpegmatch(backslashswapper,str))
+ found={}
+ local noffound=0
+ for i=1,#split do
+ local s=split[i]
+ if not find(s,"^{*unset}*") then
+ noffound=noffound+1
+ found[noffound]=s
+ end
+ end
+ if trace_expansions then
+ report_expansions("splitting path specification %a",str)
+ for k=1,noffound do
+ report_expansions("% 4i: %s",k,found[k])
+ end
+ end
+ cache[str]=found
+ end
+ end
+ return found
+ end
+end
+resolvers.splitconfigurationpath=splitconfigurationpath
+function resolvers.splitpath(str)
+ if type(str)=='table' then
+ return str
+ else
+ return splitconfigurationpath(str)
+ end
+end
+function resolvers.joinpath(str)
+ if type(str)=='table' then
+ return joinpath(str)
+ else
+ return str
+ end
+end
+local attributes,directory=lfs.attributes,lfs.dir
+local weird=P(".")^1+lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local lessweird=P(".")^1+lpeg.anywhere(S("~`#$%^&*:;\"\'||<>,?\n\r\t"))
+local timer={}
+local scanned={}
+local nofscans=0
+local scancache={}
+local fullcache={}
+local nofsharedscans=0
+local function scan(files,remap,spec,path,n,m,r,onlyone,tolerant)
+ local full=path=="" and spec or (spec..path..'/')
+ local dirs={}
+ local nofdirs=0
+ local pattern=tolerant and lessweird or weird
+ for name in directory(full) do
+ if not lpegmatch(pattern,name) then
+ local mode=attributes(full..name,"mode")
+ if mode=="file" then
+ n=n+1
+ local lower=lower(name)
+ local paths=files[lower]
+ if paths then
+ if onlyone then
+ else
+ if type(paths)=="string" then
+ files[lower]={ paths,path }
+ else
+ paths[#paths+1]=path
+ end
+ if name~=lower then
+ local rl=remap[lower]
+ if not rl then
+ remap[lower]=name
+ r=r+1
+ elseif trace_globbing and rl~=name then
+ report_globbing("confusing filename, name: %a, lower: %a, already: %a",name,lower,rl)
+ end
+ end
+ end
+ else
+ files[lower]=path
+ if name~=lower then
+ local rl=remap[lower]
+ if not rl then
+ remap[lower]=name
+ r=r+1
+ elseif trace_globbing and rl~=name then
+ report_globbing("confusing filename, name: %a, lower: %a, already: %a",name,lower,rl)
+ end
+ end
+ end
+ elseif mode=="directory" then
+ m=m+1
+ nofdirs=nofdirs+1
+ if path~="" then
+ dirs[nofdirs]=path.."/"..name
+ else
+ dirs[nofdirs]=name
+ end
+ end
+ end
+ end
+ if nofdirs>0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files,remap,n,m,r=scan(files,remap,spec,dirs[i],n,m,r,onlyonce,tolerant)
+ end
+ end
+ scancache[sub(full,1,-2)]=files
+ return files,remap,n,m,r
+end
+function resolvers.scanfiles(path,branch,usecache,onlyonce,tolerant)
+ local realpath=resolveprefix(path)
+ if usecache then
+ local content=fullcache[realpath]
+ if content then
+ if trace_locating then
+ report_expansions("using cached scan of path %a, branch %a",path,branch or path)
+ end
+ nofsharedscans=nofsharedscans+1
+ return content
+ end
+ end
+ statistics.starttiming(timer)
+ if trace_locating then
+ report_expansions("scanning path %a, branch %a",path,branch or path)
+ end
+ local content
+ if isdir(realpath) then
+ local files,remap,n,m,r=scan({},{},realpath..'/',"",0,0,0,onlyonce,tolerant)
+ content={
+ metadata={
+ path=path,
+ files=n,
+ directories=m,
+ remappings=r,
+ },
+ files=files,
+ remap=remap,
+ }
+ if trace_locating then
+ report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
+ end
+ else
+ content={
+ metadata={
+ path=path,
+ files=0,
+ directories=0,
+ remappings=0,
+ },
+ files={},
+ remap={},
+ }
+ if trace_locating then
+ report_expansions("invalid path %a",realpath)
+ end
+ end
+ if usecache then
+ scanned[#scanned+1]=realpath
+ fullcache[realpath]=content
+ end
+ nofscans=nofscans+1
+ statistics.stoptiming(timer)
+ return content
+end
+function resolvers.simplescanfiles(path,branch,usecache)
+ return resolvers.scanfiles(path,branch,usecache,true,true)
+end
+function resolvers.scandata()
+ table.sort(scanned)
+ return {
+ n=nofscans,
+ shared=nofsharedscans,
+ time=statistics.elapsedtime(timer),
+ paths=scanned,
+ }
+end
+function resolvers.get_from_content(content,path,name)
+ if not content then
+ return
+ end
+ local files=content.files
+ if not files then
+ return
+ end
+ local remap=content.remap
+ if not remap then
+ return
+ end
+ if name then
+ local used=lower(name)
+ return path,remap[used] or used
+ else
+ local name=path
+ local used=lower(name)
+ local path=files[used]
+ if path then
+ return path,remap[used] or used
+ end
+ end
+end
+local nothing=function() end
+function resolvers.filtered_from_content(content,pattern)
+ if content and type(pattern)=="string" then
+ local pattern=lower(pattern)
+ local files=content.files
+ local remap=content.remap
+ if files and remap then
+ local n=next(files)
+ local function iterator()
+ while n do
+ local k=n
+ n=next(files,k)
+ if find(k,pattern) then
+ return files[k],remap and remap[k] or k
+ end
+ end
+ end
+ return iterator
+ end
+ end
+ return nothing
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-env"] = package.loaded["data-env"] or true
+
+-- original size: 9216, stripped down to: 6798
+
+if not modules then modules={} end modules ['data-env']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local lower,gsub=string.lower,string.gsub
+local resolvers=resolvers
+local allocate=utilities.storage.allocate
+local setmetatableindex=table.setmetatableindex
+local suffixonly=file.suffixonly
+local formats=allocate()
+local suffixes=allocate()
+local dangerous=allocate()
+local suffixmap=allocate()
+local usertypes=allocate()
+resolvers.formats=formats
+resolvers.suffixes=suffixes
+resolvers.dangerous=dangerous
+resolvers.suffixmap=suffixmap
+resolvers.usertypes=usertypes
+local luasuffixes=utilities.lua.suffixes
+local relations=allocate {
+ core={
+ ofm={
+ names={ "ofm","omega font metric","omega font metrics" },
+ variable='OFMFONTS',
+ suffixes={ 'ofm','tfm' },
+ },
+ ovf={
+ names={ "ovf","omega virtual font","omega virtual fonts" },
+ variable='OVFFONTS',
+ suffixes={ 'ovf','vf' },
+ },
+ tfm={
+ names={ "tfm","tex font metric","tex font metrics" },
+ variable='TFMFONTS',
+ suffixes={ 'tfm' },
+ },
+ vf={
+ names={ "vf","virtual font","virtual fonts" },
+ variable='VFFONTS',
+ suffixes={ 'vf' },
+ },
+ otf={
+ names={ "otf","opentype","opentype font","opentype fonts"},
+ variable='OPENTYPEFONTS',
+ suffixes={ 'otf' },
+ },
+ ttf={
+ names={ "ttf","truetype","truetype font","truetype fonts","truetype collection","truetype collections","truetype dictionary","truetype dictionaries" },
+ variable='TTFONTS',
+ suffixes={ 'ttf','ttc','dfont' },
+ },
+ afm={
+ names={ "afm","adobe font metric","adobe font metrics" },
+ variable="AFMFONTS",
+ suffixes={ "afm" },
+ },
+ pfb={
+ names={ "pfb","type1","type 1","type1 font","type 1 font","type1 fonts","type 1 fonts" },
+ variable='T1FONTS',
+ suffixes={ 'pfb','pfa' },
+ },
+ fea={
+ names={ "fea","font feature","font features","font feature file","font feature files" },
+ variable='FONTFEATURES',
+ suffixes={ 'fea' },
+ },
+ cid={
+ names={ "cid","cid map","cid maps","cid file","cid files" },
+ variable='FONTCIDMAPS',
+ suffixes={ 'cid','cidmap' },
+ },
+ fmt={
+ names={ "fmt","format","tex format" },
+ variable='TEXFORMATS',
+ suffixes={ 'fmt' },
+ },
+ mem={
+ names={ 'mem',"metapost format" },
+ variable='MPMEMS',
+ suffixes={ 'mem' },
+ },
+ mp={
+ names={ "mp" },
+ variable='MPINPUTS',
+ suffixes={ 'mp','mpvi','mpiv','mpii' },
+ usertype=true,
+ },
+ tex={
+ names={ "tex" },
+ variable='TEXINPUTS',
+ suffixes={ "tex","mkvi","mkiv","mkii","cld","lfg","xml" },
+ usertype=true,
+ },
+ icc={
+ names={ "icc","icc profile","icc profiles" },
+ variable='ICCPROFILES',
+ suffixes={ 'icc' },
+ },
+ texmfscripts={
+ names={ "texmfscript","texmfscripts","script","scripts" },
+ variable='TEXMFSCRIPTS',
+ suffixes={ 'lua','rb','pl','py' },
+ },
+ lua={
+ names={ "lua" },
+ variable='LUAINPUTS',
+ suffixes={ luasuffixes.lua,luasuffixes.luc,luasuffixes.tma,luasuffixes.tmc },
+ usertype=true,
+ },
+ lib={
+ names={ "lib" },
+ variable='CLUAINPUTS',
+ suffixes=os.libsuffix and { os.libsuffix } or { 'dll','so' },
+ },
+ bib={
+ names={ 'bib' },
+ variable='BIBINPUTS',
+ suffixes={ 'bib' },
+ usertype=true,
+ },
+ bst={
+ names={ 'bst' },
+ variable='BSTINPUTS',
+ suffixes={ 'bst' },
+ usertype=true,
+ },
+ fontconfig={
+ names={ 'fontconfig','fontconfig file','fontconfig files' },
+ variable='FONTCONFIG_PATH',
+ },
+ },
+ obsolete={
+ enc={
+ names={ "enc","enc files","enc file","encoding files","encoding file" },
+ variable='ENCFONTS',
+ suffixes={ 'enc' },
+ },
+ map={
+ names={ "map","map files","map file" },
+ variable='TEXFONTMAPS',
+ suffixes={ 'map' },
+ },
+ lig={
+ names={ "lig files","lig file","ligature file","ligature files" },
+ variable='LIGFONTS',
+ suffixes={ 'lig' },
+ },
+ opl={
+ names={ "opl" },
+ variable='OPLFONTS',
+ suffixes={ 'opl' },
+ },
+ ovp={
+ names={ "ovp" },
+ variable='OVPFONTS',
+ suffixes={ 'ovp' },
+ },
+ },
+ kpse={
+ base={
+ names={ 'base',"metafont format" },
+ variable='MFBASES',
+ suffixes={ 'base','bas' },
+ },
+ cmap={
+ names={ 'cmap','cmap files','cmap file' },
+ variable='CMAPFONTS',
+ suffixes={ 'cmap' },
+ },
+ cnf={
+ names={ 'cnf' },
+ suffixes={ 'cnf' },
+ },
+ web={
+ names={ 'web' },
+ suffixes={ 'web','ch' }
+ },
+ cweb={
+ names={ 'cweb' },
+ suffixes={ 'w','web','ch' },
+ },
+ gf={
+ names={ 'gf' },
+ suffixes={ '<resolution>gf' },
+ },
+ mf={
+ names={ 'mf' },
+ variable='MFINPUTS',
+ suffixes={ 'mf' },
+ },
+ mft={
+ names={ 'mft' },
+ suffixes={ 'mft' },
+ },
+ pk={
+ names={ 'pk' },
+ suffixes={ '<resolution>pk' },
+ },
+ },
+}
+resolvers.relations=relations
+function resolvers.updaterelations()
+ for category,categories in next,relations do
+ for name,relation in next,categories do
+ local rn=relation.names
+ local rv=relation.variable
+ if rn and rv then
+ local rs=relation.suffixes
+ local ru=relation.usertype
+ for i=1,#rn do
+ local rni=lower(gsub(rn[i]," ",""))
+ formats[rni]=rv
+ if rs then
+ suffixes[rni]=rs
+ for i=1,#rs do
+ local rsi=rs[i]
+ suffixmap[rsi]=rni
+ end
+ end
+ end
+ if ru then
+ usertypes[name]=true
+ end
+ end
+ end
+ end
+end
+resolvers.updaterelations()
+local function simplified(t,k)
+ return k and rawget(t,lower(gsub(k," ",""))) or nil
+end
+setmetatableindex(formats,simplified)
+setmetatableindex(suffixes,simplified)
+setmetatableindex(suffixmap,simplified)
+function resolvers.suffixofformat(str)
+ local s=suffixes[str]
+ return s and s[1] or ""
+end
+function resolvers.suffixofformat(str)
+ return suffixes[str] or {}
+end
+for name,format in next,formats do
+ dangerous[name]=true
+end
+dangerous.tex=nil
+function resolvers.formatofvariable(str)
+ return formats[str] or ''
+end
+function resolvers.formatofsuffix(str)
+ return suffixmap[suffixonly(str)] or 'tex'
+end
+function resolvers.variableofformat(str)
+ return formats[str] or ''
+end
+function resolvers.variableofformatorsuffix(str)
+ local v=formats[str]
+ if v then
+ return v
+ end
+ v=suffixmap[suffixonly(str)]
+ if v then
+ return formats[v]
+ end
+ return ''
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-tmp"] = package.loaded["data-tmp"] or true
+
+-- original size: 15618, stripped down to: 11629
+
+if not modules then modules={} end modules ['data-tmp']={
+ version=1.100,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,lower,gsub,concat=string.format,string.lower,string.gsub,table.concat
+local concat=table.concat
+local mkdirs,isdir,isfile=dir.mkdirs,lfs.isdir,lfs.isfile
+local addsuffix,is_writable,is_readable=file.addsuffix,file.is_writable,file.is_readable
+local formatters=string.formatters
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end)
+local report_caches=logs.reporter("resolvers","caches")
+local report_resolvers=logs.reporter("resolvers","caching")
+local resolvers=resolvers
+local cleanpath=resolvers.cleanpath
+local directive_cleanup=false directives.register("system.compile.cleanup",function(v) directive_cleanup=v end)
+local directive_strip=false directives.register("system.compile.strip",function(v) directive_strip=v end)
+local compile=utilities.lua.compile
+function utilities.lua.compile(luafile,lucfile,cleanup,strip)
+ if cleanup==nil then cleanup=directive_cleanup end
+ if strip==nil then strip=directive_strip end
+ return compile(luafile,lucfile,cleanup,strip)
+end
+caches=caches or {}
+local caches=caches
+local luasuffixes=utilities.lua.suffixes
+caches.base=caches.base or "luatex-cache"
+caches.more=caches.more or "context"
+caches.direct=false
+caches.tree=false
+caches.force=true
+caches.ask=false
+caches.relocate=false
+caches.defaults={ "TMPDIR","TEMPDIR","TMP","TEMP","HOME","HOMEPATH" }
+local writable,readables,usedreadables=nil,{},{}
+local function identify()
+ local texmfcaches=resolvers.cleanpathlist("TEXMFCACHE")
+ if texmfcaches then
+ for k=1,#texmfcaches do
+ local cachepath=texmfcaches[k]
+ if cachepath~="" then
+ cachepath=resolvers.resolve(cachepath)
+ cachepath=resolvers.cleanpath(cachepath)
+ cachepath=file.collapsepath(cachepath)
+ local valid=isdir(cachepath)
+ if valid then
+ if is_readable(cachepath) then
+ readables[#readables+1]=cachepath
+ if not writable and is_writable(cachepath) then
+ writable=cachepath
+ end
+ end
+ elseif not writable and caches.force then
+ local cacheparent=file.dirname(cachepath)
+ if is_writable(cacheparent) and true then
+ if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath),"no",{ "yes","no" })=="yes" then
+ mkdirs(cachepath)
+ if isdir(cachepath) and is_writable(cachepath) then
+ report_caches("path %a created",cachepath)
+ writable=cachepath
+ readables[#readables+1]=cachepath
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ local texmfcaches=caches.defaults
+ if texmfcaches then
+ for k=1,#texmfcaches do
+ local cachepath=texmfcaches[k]
+ cachepath=resolvers.expansion(cachepath)
+ if cachepath~="" then
+ cachepath=resolvers.resolve(cachepath)
+ cachepath=resolvers.cleanpath(cachepath)
+ local valid=isdir(cachepath)
+ if valid and is_readable(cachepath) then
+ if not writable and is_writable(cachepath) then
+ readables[#readables+1]=cachepath
+ writable=cachepath
+ break
+ end
+ end
+ end
+ end
+ end
+ if not writable then
+ report_caches("fatal error: there is no valid writable cache path defined")
+ os.exit()
+ elseif #readables==0 then
+ report_caches("fatal error: there is no valid readable cache path defined")
+ os.exit()
+ end
+ writable=dir.expandname(resolvers.cleanpath(writable))
+ local base,more,tree=caches.base,caches.more,caches.tree or caches.treehash()
+ if tree then
+ caches.tree=tree
+ writable=mkdirs(writable,base,more,tree)
+ for i=1,#readables do
+ readables[i]=file.join(readables[i],base,more,tree)
+ end
+ else
+ writable=mkdirs(writable,base,more)
+ for i=1,#readables do
+ readables[i]=file.join(readables[i],base,more)
+ end
+ end
+ if trace_cache then
+ for i=1,#readables do
+ report_caches("using readable path %a (order %s)",readables[i],i)
+ end
+ report_caches("using writable path %a",writable)
+ end
+ identify=function()
+ return writable,readables
+ end
+ return writable,readables
+end
+function caches.usedpaths(separator)
+ local writable,readables=identify()
+ if #readables>1 then
+ local result={}
+ local done={}
+ for i=1,#readables do
+ local readable=readables[i]
+ if readable==writable then
+ done[readable]=true
+ result[#result+1]=formatters["readable+writable: %a"](readable)
+ elseif usedreadables[i] then
+ done[readable]=true
+ result[#result+1]=formatters["readable: %a"](readable)
+ end
+ end
+ if not done[writable] then
+ result[#result+1]=formatters["writable: %a"](writable)
+ end
+ return concat(result,separator or " | ")
+ else
+ return writable or "?"
+ end
+end
+function caches.configfiles()
+ return concat(resolvers.instance.specification,";")
+end
+function caches.hashed(tree)
+ tree=gsub(tree,"[\\/]+$","")
+ tree=lower(tree)
+ local hash=md5.hex(tree)
+ if trace_cache or trace_locating then
+ report_caches("hashing tree %a, hash %a",tree,hash)
+ end
+ return hash
+end
+function caches.treehash()
+ local tree=caches.configfiles()
+ if not tree or tree=="" then
+ return false
+ else
+ return caches.hashed(tree)
+ end
+end
+local r_cache,w_cache={},{}
+local function getreadablepaths(...)
+ local tags={... }
+ local hash=concat(tags,"/")
+ local done=r_cache[hash]
+ if not done then
+ local writable,readables=identify()
+ if #tags>0 then
+ done={}
+ for i=1,#readables do
+ done[i]=file.join(readables[i],...)
+ end
+ else
+ done=readables
+ end
+ r_cache[hash]=done
+ end
+ return done
+end
+local function getwritablepath(...)
+ local tags={... }
+ local hash=concat(tags,"/")
+ local done=w_cache[hash]
+ if not done then
+ local writable,readables=identify()
+ if #tags>0 then
+ done=mkdirs(writable,...)
+ else
+ done=writable
+ end
+ w_cache[hash]=done
+ end
+ return done
+end
+caches.getreadablepaths=getreadablepaths
+caches.getwritablepath=getwritablepath
+function caches.getfirstreadablefile(filename,...)
+ local rd=getreadablepaths(...)
+ for i=1,#rd do
+ local path=rd[i]
+ local fullname=file.join(path,filename)
+ if is_readable(fullname) then
+ usedreadables[i]=true
+ return fullname,path
+ end
+ end
+ return caches.setfirstwritablefile(filename,...)
+end
+function caches.getfirstreadablefile_TEST_ME_FIRST(filename,...)
+ local fullname,path=caches.setfirstwritablefile(filename,...)
+ if is_readable(fullname) then
+ return fullname,path
+ end
+ local rd=getreadablepaths(...)
+ for i=1,#rd do
+ local path=rd[i]
+ local fullname=file.join(path,filename)
+ if is_readable(fullname) then
+ usedreadables[i]=true
+ return fullname,path
+ end
+ end
+ return fullname,path
+end
+function caches.setfirstwritablefile(filename,...)
+ local wr=getwritablepath(...)
+ local fullname=file.join(wr,filename)
+ return fullname,wr
+end
+function caches.define(category,subcategory)
+ return function()
+ return getwritablepath(category,subcategory)
+ end
+end
+function caches.setluanames(path,name)
+ return format("%s/%s.%s",path,name,luasuffixes.tma),format("%s/%s.%s",path,name,luasuffixes.tmc)
+end
+function caches.loaddata(readables,name)
+ if type(readables)=="string" then
+ readables={ readables }
+ end
+ for i=1,#readables do
+ local path=readables[i]
+ local tmaname,tmcname=caches.setluanames(path,name)
+ local loader=false
+ if isfile(tmcname) then
+ loader=loadfile(tmcname)
+ end
+ if not loader and isfile(tmaname) then
+ utilities.lua.compile(tmaname,tmcname)
+ if isfile(tmcname) then
+ loader=loadfile(tmcname)
+ end
+ if not loader then
+ loader=loadfile(tmaname)
+ end
+ end
+ if loader then
+ loader=loader()
+ collectgarbage("step")
+ return loader
+ end
+ end
+ return false
+end
+function caches.is_writable(filepath,filename)
+ local tmaname,tmcname=caches.setluanames(filepath,filename)
+ return is_writable(tmaname)
+end
+local saveoptions={ compact=true }
+function caches.savedata(filepath,filename,data,raw)
+ local tmaname,tmcname=caches.setluanames(filepath,filename)
+ data.cache_uuid=os.uuid()
+ if caches.direct then
+ file.savedata(tmaname,table.serialize(data,true,saveoptions))
+ else
+ table.tofile(tmaname,data,true,saveoptions)
+ end
+ utilities.lua.compile(tmaname,tmcname)
+end
+local content_state={}
+function caches.contentstate()
+ return content_state or {}
+end
+function caches.loadcontent(cachename,dataname,filename)
+ if not filename then
+ local name=caches.hashed(cachename)
+ local full,path=caches.getfirstreadablefile(addsuffix(name,luasuffixes.lua),"trees")
+ filename=file.join(path,name)
+ end
+ local blob=loadfile(addsuffix(filename,luasuffixes.luc)) or loadfile(addsuffix(filename,luasuffixes.lua))
+ if blob then
+ local data=blob()
+ if data and data.content then
+ if data.type==dataname then
+ if data.version==resolvers.cacheversion then
+ content_state[#content_state+1]=data.uuid
+ if trace_locating then
+ report_resolvers("loading %a for %a from %a",dataname,cachename,filename)
+ end
+ return data.content
+ else
+ report_resolvers("skipping %a for %a from %a (version mismatch)",dataname,cachename,filename)
+ end
+ else
+ report_resolvers("skipping %a for %a from %a (datatype mismatch)",dataname,cachename,filename)
+ end
+ elseif trace_locating then
+ report_resolvers("skipping %a for %a from %a (no content)",dataname,cachename,filename)
+ end
+ elseif trace_locating then
+ report_resolvers("skipping %a for %a from %a (invalid file)",dataname,cachename,filename)
+ end
+end
+function caches.collapsecontent(content)
+ for k,v in next,content do
+ if type(v)=="table" and #v==1 then
+ content[k]=v[1]
+ end
+ end
+end
+function caches.savecontent(cachename,dataname,content,filename)
+ if not filename then
+ local name=caches.hashed(cachename)
+ local full,path=caches.setfirstwritablefile(addsuffix(name,luasuffixes.lua),"trees")
+ filename=file.join(path,name)
+ end
+ local luaname=addsuffix(filename,luasuffixes.lua)
+ local lucname=addsuffix(filename,luasuffixes.luc)
+ if trace_locating then
+ report_resolvers("preparing %a for %a",dataname,cachename)
+ end
+ local data={
+ type=dataname,
+ root=cachename,
+ version=resolvers.cacheversion,
+ date=os.date("%Y-%m-%d"),
+ time=os.date("%H:%M:%S"),
+ content=content,
+ uuid=os.uuid(),
+ }
+ local ok=io.savedata(luaname,table.serialize(data,true))
+ if ok then
+ if trace_locating then
+ report_resolvers("category %a, cachename %a saved in %a",dataname,cachename,luaname)
+ end
+ if utilities.lua.compile(luaname,lucname) then
+ if trace_locating then
+ report_resolvers("%a compiled to %a",dataname,lucname)
+ end
+ return true
+ else
+ if trace_locating then
+ report_resolvers("compiling failed for %a, deleting file %a",dataname,lucname)
+ end
+ os.remove(lucname)
+ end
+ elseif trace_locating then
+ report_resolvers("unable to save %a in %a (access error)",dataname,luaname)
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-met"] = package.loaded["data-met"] or true
+
+-- original size: 5347, stripped down to: 4015
+
+if not modules then modules={} end modules ['data-met']={
+ version=1.100,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local find,format=string.find,string.format
+local sequenced=table.sequenced
+local addurlscheme,urlhashed=url.addscheme,url.hashed
+local getcurrentdir=lfs.currentdir
+local trace_locating=false
+local trace_methods=false
+trackers.register("resolvers.locating",function(v) trace_methods=v end)
+trackers.register("resolvers.methods",function(v) trace_methods=v end)
+local report_methods=logs.reporter("resolvers","methods")
+local allocate=utilities.storage.allocate
+local resolvers=resolvers
+local registered={}
+local function splitmethod(filename)
+ if not filename then
+ return { scheme="unknown",original=filename }
+ end
+ if type(filename)=="table" then
+ return filename
+ end
+ filename=file.collapsepath(filename,".")
+ if not find(filename,"://",1,true) then
+ return { scheme="file",path=filename,original=filename,filename=filename }
+ end
+ local specification=url.hashed(filename)
+ if not specification.scheme or specification.scheme=="" then
+ return { scheme="file",path=filename,original=filename,filename=filename }
+ else
+ return specification
+ end
+end
+resolvers.splitmethod=splitmethod
+local function methodhandler(what,first,...)
+ local method=registered[what]
+ if method then
+ local how,namespace=method.how,method.namespace
+ if how=="uri" or how=="url" then
+ local specification=splitmethod(first)
+ local scheme=specification.scheme
+ local resolver=namespace and namespace[scheme]
+ if resolver then
+ if trace_methods then
+ report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,scheme,first)
+ end
+ return resolver(specification,...)
+ else
+ resolver=namespace.default or namespace.file
+ if resolver then
+ if trace_methods then
+ report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,"default",first)
+ end
+ return resolver(specification,...)
+ elseif trace_methods then
+ report_methods("resolving, method %a, how %a, handler %a, argument %a",what,how,"unset")
+ end
+ end
+ elseif how=="tag" then
+ local resolver=namespace and namespace[first]
+ if resolver then
+ if trace_methods then
+ report_methods("resolving, method %a, how %a, tag %a",what,how,first)
+ end
+ return resolver(...)
+ else
+ resolver=namespace.default or namespace.file
+ if resolver then
+ if trace_methods then
+ report_methods("resolving, method %a, how %a, tag %a",what,how,"default")
+ end
+ return resolver(...)
+ elseif trace_methods then
+ report_methods("resolving, method %a, how %a, tag %a",what,how,"unset")
+ end
+ end
+ end
+ else
+ report_methods("resolving, invalid method %a")
+ end
+end
+resolvers.methodhandler=methodhandler
+function resolvers.registermethod(name,namespace,how)
+ registered[name]={ how=how or "tag",namespace=namespace }
+ namespace["byscheme"]=function(scheme,filename,...)
+ if scheme=="file" then
+ return methodhandler(name,filename,...)
+ else
+ return methodhandler(name,addurlscheme(filename,scheme),...)
+ end
+ end
+end
+local concatinators=allocate { notfound=file.join }
+local locators=allocate { notfound=function() end }
+local hashers=allocate { notfound=function() end }
+local generators=allocate { notfound=function() end }
+resolvers.concatinators=concatinators
+resolvers.locators=locators
+resolvers.hashers=hashers
+resolvers.generators=generators
+local registermethod=resolvers.registermethod
+registermethod("concatinators",concatinators,"tag")
+registermethod("locators",locators,"uri")
+registermethod("hashers",hashers,"uri")
+registermethod("generators",generators,"uri")
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-res"] = package.loaded["data-res"] or true
+
+-- original size: 67003, stripped down to: 46291
+
+if not modules then modules={} end modules ['data-res']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local gsub,find,lower,upper,match,gmatch=string.gsub,string.find,string.lower,string.upper,string.match,string.gmatch
+local concat,insert,remove,sortedkeys,sortedhash=table.concat,table.insert,table.remove,table.sortedkeys,table.sortedhash
+local next,type,rawget=next,type,rawget
+local os=os
+local P,S,R,C,Cc,Cs,Ct,Carg=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cc,lpeg.Cs,lpeg.Ct,lpeg.Carg
+local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
+local formatters=string.formatters
+local filedirname=file.dirname
+local filebasename=file.basename
+local suffixonly=file.suffixonly
+local addsuffix=file.addsuffix
+local removesuffix=file.removesuffix
+local filejoin=file.join
+local collapsepath=file.collapsepath
+local joinpath=file.joinpath
+local is_qualified_path=file.is_qualified_path
+local allocate=utilities.storage.allocate
+local settings_to_array=utilities.parsers.settings_to_array
+local getcurrentdir=lfs.currentdir
+local isfile=lfs.isfile
+local isdir=lfs.isdir
+local setmetatableindex=table.setmetatableindex
+local luasuffixes=utilities.lua.suffixes
+local trace_locating=false trackers .register("resolvers.locating",function(v) trace_locating=v end)
+local trace_detail=false trackers .register("resolvers.details",function(v) trace_detail=v end)
+local trace_expansions=false trackers .register("resolvers.expansions",function(v) trace_expansions=v end)
+local trace_paths=false trackers .register("resolvers.paths",function(v) trace_paths=v end)
+local resolve_otherwise=true directives.register("resolvers.otherwise",function(v) resolve_otherwise=v end)
+local report_resolving=logs.reporter("resolvers","resolving")
+local resolvers=resolvers
+local expandedpathfromlist=resolvers.expandedpathfromlist
+local checkedvariable=resolvers.checkedvariable
+local splitconfigurationpath=resolvers.splitconfigurationpath
+local methodhandler=resolvers.methodhandler
+local filtered=resolvers.filtered_from_content
+local lookup=resolvers.get_from_content
+local cleanpath=resolvers.cleanpath
+local resolveprefix=resolvers.resolve
+local initializesetter=utilities.setters.initialize
+local ostype,osname,osenv,ossetenv,osgetenv=os.type,os.name,os.env,os.setenv,os.getenv
+resolvers.cacheversion="1.100"
+resolvers.configbanner=""
+resolvers.homedir=environment.homedir
+resolvers.criticalvars=allocate { "SELFAUTOLOC","SELFAUTODIR","SELFAUTOPARENT","TEXMFCNF","TEXMF","TEXOS" }
+resolvers.luacnfname="texmfcnf.lua"
+resolvers.luacnfstate="unknown"
+if environment.default_texmfcnf then
+ resolvers.luacnfspec="home:texmf/web2c;"..environment.default_texmfcnf
+else
+ resolvers.luacnfspec=concat ({
+ "home:texmf/web2c",
+ "selfautoparent:/texmf-local/web2c",
+ "selfautoparent:/texmf-context/web2c",
+ "selfautoparent:/texmf-dist/web2c",
+ "selfautoparent:/texmf/web2c",
+ },";")
+end
+local unset_variable="unset"
+local formats=resolvers.formats
+local suffixes=resolvers.suffixes
+local usertypes=resolvers.usertypes
+local dangerous=resolvers.dangerous
+local suffixmap=resolvers.suffixmap
+resolvers.defaultsuffixes={ "tex" }
+resolvers.instance=resolvers.instance or nil
+local instance=resolvers.instance or nil
+function resolvers.setenv(key,value,raw)
+ if instance then
+ instance.environment[key]=value
+ ossetenv(key,raw and value or resolveprefix(value))
+ end
+end
+local function getenv(key)
+ local value=rawget(instance.environment,key)
+ if value and value~="" then
+ return value
+ else
+ local e=osgetenv(key)
+ return e~=nil and e~="" and checkedvariable(e) or ""
+ end
+end
+resolvers.getenv=getenv
+resolvers.env=getenv
+local function resolvevariable(k)
+ return instance.expansions[k]
+end
+local dollarstripper=lpeg.stripper("$")
+local inhibitstripper=P("!")^0*Cs(P(1)^0)
+local backslashswapper=lpeg.replacer("\\","/")
+local somevariable=P("$")/""
+local somekey=C(R("az","AZ","09","__","--")^1)
+local somethingelse=P(";")*((1-S("!{}/\\"))^1*P(";")/"")+P(";")*(P(";")/"")+P(1)
+local variableexpander=Cs((somevariable*(somekey/resolvevariable)+somethingelse)^1 )
+local cleaner=P("\\")/"/"+P(";")*S("!{}/\\")^0*P(";")^1/";"
+local variablecleaner=Cs((cleaner+P(1))^0)
+local somevariable=R("az","AZ","09","__","--")^1/resolvevariable
+local variable=(P("$")/"")*(somevariable+(P("{")/"")*somevariable*(P("}")/""))
+local variableresolver=Cs((variable+P(1))^0)
+local function expandedvariable(var)
+ return lpegmatch(variableexpander,var) or var
+end
+function resolvers.newinstance()
+ if trace_locating then
+ report_resolving("creating instance")
+ end
+ local environment,variables,expansions,order=allocate(),allocate(),allocate(),allocate()
+ local newinstance={
+ environment=environment,
+ variables=variables,
+ expansions=expansions,
+ order=order,
+ files=allocate(),
+ setups=allocate(),
+ found=allocate(),
+ foundintrees=allocate(),
+ hashes=allocate(),
+ hashed=allocate(),
+ pathlists=false,
+ specification=allocate(),
+ lists=allocate(),
+ data=allocate(),
+ fakepaths=allocate(),
+ remember=true,
+ diskcache=true,
+ renewcache=false,
+ renewtree=false,
+ loaderror=false,
+ savelists=true,
+ pattern=nil,
+ force_suffixes=true,
+ pathstack={},
+ }
+ setmetatableindex(variables,function(t,k)
+ local v
+ for i=1,#order do
+ v=order[i][k]
+ if v~=nil then
+ t[k]=v
+ return v
+ end
+ end
+ if v==nil then
+ v=""
+ end
+ t[k]=v
+ return v
+ end)
+ setmetatableindex(environment,function(t,k)
+ local v=osgetenv(k)
+ if v==nil then
+ v=variables[k]
+ end
+ if v~=nil then
+ v=checkedvariable(v) or ""
+ end
+ v=resolvers.repath(v)
+ t[k]=v
+ return v
+ end)
+ setmetatableindex(expansions,function(t,k)
+ local v=environment[k]
+ if type(v)=="string" then
+ v=lpegmatch(variableresolver,v)
+ v=lpegmatch(variablecleaner,v)
+ end
+ t[k]=v
+ return v
+ end)
+ return newinstance
+end
+function resolvers.setinstance(someinstance)
+ instance=someinstance
+ resolvers.instance=someinstance
+ return someinstance
+end
+function resolvers.reset()
+ return resolvers.setinstance(resolvers.newinstance())
+end
+local function reset_hashes()
+ instance.lists={}
+ instance.pathlists=false
+ instance.found={}
+end
+local function reset_caches()
+ instance.lists={}
+ instance.pathlists=false
+end
+local slash=P("/")
+local pathexpressionpattern=Cs (
+ Cc("^")*(
+ Cc("%")*S(".-")+slash^2*P(-1)/"/.*"
++slash^2/"/"+(1-slash)*P(-1)*Cc("/")+P(1)
+ )^1*Cc("$")
+)
+local cache={}
+local function makepathexpression(str)
+ if str=="." then
+ return "^%./$"
+ else
+ local c=cache[str]
+ if not c then
+ c=lpegmatch(pathexpressionpattern,str)
+ cache[str]=c
+ end
+ return c
+ end
+end
+local function reportcriticalvariables(cnfspec)
+ if trace_locating then
+ for i=1,#resolvers.criticalvars do
+ local k=resolvers.criticalvars[i]
+ local v=resolvers.getenv(k) or "unknown"
+ report_resolving("variable %a set to %a",k,v)
+ end
+ report_resolving()
+ if cnfspec then
+ report_resolving("using configuration specification %a",type(cnfspec)=="table" and concat(cnfspec,",") or cnfspec)
+ end
+ report_resolving()
+ end
+ reportcriticalvariables=function() end
+end
+local function identify_configuration_files()
+ local specification=instance.specification
+ if #specification==0 then
+ local cnfspec=getenv("TEXMFCNF")
+ if cnfspec=="" then
+ cnfspec=resolvers.luacnfspec
+ resolvers.luacnfstate="default"
+ else
+ resolvers.luacnfstate="environment"
+ end
+ reportcriticalvariables(cnfspec)
+ local cnfpaths=expandedpathfromlist(resolvers.splitpath(cnfspec))
+ local luacnfname=resolvers.luacnfname
+ for i=1,#cnfpaths do
+ local filepath=cnfpaths[i]
+ local filename=collapsepath(filejoin(filepath,luacnfname))
+ local realname=resolveprefix(filename)
+ if trace_locating then
+ local fullpath=gsub(resolveprefix(collapsepath(filepath)),"//","/")
+ local weirdpath=find(fullpath,"/texmf.+/texmf") or not find(fullpath,"/web2c",1,true)
+ report_resolving("looking for %a on %s path %a from specification %a",luacnfname,weirdpath and "weird" or "given",fullpath,filepath)
+ end
+ if isfile(realname) then
+ specification[#specification+1]=filename
+ if trace_locating then
+ report_resolving("found configuration file %a",realname)
+ end
+ end
+ end
+ if trace_locating then
+ report_resolving()
+ end
+ elseif trace_locating then
+ report_resolving("configuration files already identified")
+ end
+end
+local function load_configuration_files()
+ local specification=instance.specification
+ if #specification>0 then
+ local luacnfname=resolvers.luacnfname
+ for i=1,#specification do
+ local filename=specification[i]
+ local pathname=filedirname(filename)
+ local filename=filejoin(pathname,luacnfname)
+ local realname=resolveprefix(filename)
+ local blob=loadfile(realname)
+ if blob then
+ local setups=instance.setups
+ local data=blob()
+ local parent=data and data.parent
+ if parent then
+ local filename=filejoin(pathname,parent)
+ local realname=resolveprefix(filename)
+ local blob=loadfile(realname)
+ if blob then
+ local parentdata=blob()
+ if parentdata then
+ report_resolving("loading configuration file %a",filename)
+ data=table.merged(parentdata,data)
+ end
+ end
+ end
+ data=data and data.content
+ if data then
+ if trace_locating then
+ report_resolving("loading configuration file %a",filename)
+ report_resolving()
+ end
+ local variables=data.variables or {}
+ local warning=false
+ for k,v in next,data do
+ local variant=type(v)
+ if variant=="table" then
+ initializesetter(filename,k,v)
+ elseif variables[k]==nil then
+ if trace_locating and not warning then
+ report_resolving("variables like %a in configuration file %a should move to the 'variables' subtable",
+ k,resolveprefix(filename))
+ warning=true
+ end
+ variables[k]=v
+ end
+ end
+ setups[pathname]=variables
+ if resolvers.luacnfstate=="default" then
+ local cnfspec=variables["TEXMFCNF"]
+ if cnfspec then
+ if trace_locating then
+ report_resolving("reloading configuration due to TEXMF redefinition")
+ end
+ resolvers.setenv("TEXMFCNF",cnfspec)
+ instance.specification={}
+ identify_configuration_files()
+ load_configuration_files()
+ resolvers.luacnfstate="configuration"
+ break
+ end
+ end
+ else
+ if trace_locating then
+ report_resolving("skipping configuration file %a (no content)",filename)
+ end
+ setups[pathname]={}
+ instance.loaderror=true
+ end
+ elseif trace_locating then
+ report_resolving("skipping configuration file %a (no valid format)",filename)
+ end
+ instance.order[#instance.order+1]=instance.setups[pathname]
+ if instance.loaderror then
+ break
+ end
+ end
+ elseif trace_locating then
+ report_resolving("warning: no lua configuration files found")
+ end
+end
+local function load_file_databases()
+ instance.loaderror,instance.files=false,allocate()
+ if not instance.renewcache then
+ local hashes=instance.hashes
+ for k=1,#hashes do
+ local hash=hashes[k]
+ resolvers.hashers.byscheme(hash.type,hash.name)
+ if instance.loaderror then break end
+ end
+ end
+end
+local function locate_file_databases()
+ local texmfpaths=resolvers.expandedpathlist("TEXMF")
+ if #texmfpaths>0 then
+ for i=1,#texmfpaths do
+ local path=collapsepath(texmfpaths[i])
+ path=gsub(path,"/+$","")
+ local stripped=lpegmatch(inhibitstripper,path)
+ if stripped~="" then
+ local runtime=stripped==path
+ path=cleanpath(path)
+ local spec=resolvers.splitmethod(stripped)
+ if runtime and (spec.noscheme or spec.scheme=="file") then
+ stripped="tree:///"..stripped
+ elseif spec.scheme=="cache" or spec.scheme=="file" then
+ stripped=spec.path
+ end
+ if trace_locating then
+ if runtime then
+ report_resolving("locating list of %a (runtime) (%s)",path,stripped)
+ else
+ report_resolving("locating list of %a (cached)",path)
+ end
+ end
+ methodhandler('locators',stripped)
+ end
+ end
+ if trace_locating then
+ report_resolving()
+ end
+ elseif trace_locating then
+ report_resolving("no texmf paths are defined (using TEXMF)")
+ end
+end
+local function generate_file_databases()
+ local hashes=instance.hashes
+ for k=1,#hashes do
+ local hash=hashes[k]
+ methodhandler('generators',hash.name)
+ end
+ if trace_locating then
+ report_resolving()
+ end
+end
+local function save_file_databases()
+ for i=1,#instance.hashes do
+ local hash=instance.hashes[i]
+ local cachename=hash.name
+ if hash.cache then
+ local content=instance.files[cachename]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree %a",cachename)
+ end
+ caches.savecontent(cachename,"files",content)
+ elseif trace_locating then
+ report_resolving("not saving runtime tree %a",cachename)
+ end
+ end
+end
+function resolvers.renew(hashname)
+ if hashname and hashname~="" then
+ local expanded=resolvers.expansion(hashname) or ""
+ if expanded~="" then
+ if trace_locating then
+ report_resolving("identifying tree %a from %a",expanded,hashname)
+ end
+ hashname=expanded
+ else
+ if trace_locating then
+ report_resolving("identifying tree %a",hashname)
+ end
+ end
+ local realpath=resolveprefix(hashname)
+ if isdir(realpath) then
+ if trace_locating then
+ report_resolving("using path %a",realpath)
+ end
+ methodhandler('generators',hashname)
+ local content=instance.files[hashname]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree %a",hashname)
+ end
+ caches.savecontent(hashname,"files",content)
+ else
+ report_resolving("invalid path %a",realpath)
+ end
+ end
+end
+local function load_databases()
+ locate_file_databases()
+ if instance.diskcache and not instance.renewcache then
+ load_file_databases()
+ if instance.loaderror then
+ generate_file_databases()
+ save_file_databases()
+ end
+ else
+ generate_file_databases()
+ if instance.renewcache then
+ save_file_databases()
+ end
+ end
+end
+function resolvers.appendhash(type,name,cache)
+ if not instance.hashed[name] then
+ if trace_locating then
+ report_resolving("hash %a appended",name)
+ end
+ insert(instance.hashes,{ type=type,name=name,cache=cache } )
+ instance.hashed[name]=cache
+ end
+end
+function resolvers.prependhash(type,name,cache)
+ if not instance.hashed[name] then
+ if trace_locating then
+ report_resolving("hash %a prepended",name)
+ end
+ insert(instance.hashes,1,{ type=type,name=name,cache=cache } )
+ instance.hashed[name]=cache
+ end
+end
+function resolvers.extendtexmfvariable(specification)
+ local t=resolvers.splitpath(getenv("TEXMF"))
+ insert(t,1,specification)
+ local newspec=concat(t,",")
+ if instance.environment["TEXMF"] then
+ instance.environment["TEXMF"]=newspec
+ elseif instance.variables["TEXMF"] then
+ instance.variables["TEXMF"]=newspec
+ else
+ end
+ reset_hashes()
+end
+function resolvers.splitexpansions()
+ local ie=instance.expansions
+ for k,v in next,ie do
+ local t,tn,h,p={},0,{},splitconfigurationpath(v)
+ for kk=1,#p do
+ local vv=p[kk]
+ if vv~="" and not h[vv] then
+ tn=tn+1
+ t[tn]=vv
+ h[vv]=true
+ end
+ end
+ if #t>1 then
+ ie[k]=t
+ else
+ ie[k]=t[1]
+ end
+ end
+end
+function resolvers.datastate()
+ return caches.contentstate()
+end
+function resolvers.variable(name)
+ local name=name and lpegmatch(dollarstripper,name)
+ local result=name and instance.variables[name]
+ return result~=nil and result or ""
+end
+function resolvers.expansion(name)
+ local name=name and lpegmatch(dollarstripper,name)
+ local result=name and instance.expansions[name]
+ return result~=nil and result or ""
+end
+function resolvers.unexpandedpathlist(str)
+ local pth=resolvers.variable(str)
+ local lst=resolvers.splitpath(pth)
+ return expandedpathfromlist(lst)
+end
+function resolvers.unexpandedpath(str)
+ return joinpath(resolvers.unexpandedpathlist(str))
+end
+function resolvers.pushpath(name)
+ local pathstack=instance.pathstack
+ local lastpath=pathstack[#pathstack]
+ local pluspath=filedirname(name)
+ if lastpath then
+ lastpath=collapsepath(filejoin(lastpath,pluspath))
+ else
+ lastpath=collapsepath(pluspath)
+ end
+ insert(pathstack,lastpath)
+ if trace_paths then
+ report_resolving("pushing path %a",lastpath)
+ end
+end
+function resolvers.poppath()
+ local pathstack=instance.pathstack
+ if trace_paths and #pathstack>0 then
+ report_resolving("popping path %a",pathstack[#pathstack])
+ end
+ remove(pathstack)
+end
+function resolvers.stackpath()
+ local pathstack=instance.pathstack
+ local currentpath=pathstack[#pathstack]
+ return currentpath~="" and currentpath or nil
+end
+local done={}
+function resolvers.resetextrapath()
+ local ep=instance.extra_paths
+ if not ep then
+ done={}
+ instance.extra_paths={}
+ elseif #ep>0 then
+ done={}
+ reset_caches()
+ end
+end
+function resolvers.registerextrapath(paths,subpaths)
+ if not subpaths or subpaths=="" then
+ if not paths or path=="" then
+ return
+ elseif done[paths] then
+ return
+ end
+ end
+ local paths=settings_to_array(paths)
+ local subpaths=settings_to_array(subpaths)
+ local ep=instance.extra_paths or {}
+ local oldn=#ep
+ local newn=oldn
+ local nofpaths=#paths
+ local nofsubpaths=#subpaths
+ if nofpaths>0 then
+ if nofsubpaths>0 then
+ for i=1,nofpaths do
+ local p=paths[i]
+ for j=1,nofsubpaths do
+ local s=subpaths[j]
+ local ps=p.."/"..s
+ if not done[ps] then
+ newn=newn+1
+ ep[newn]=cleanpath(ps)
+ done[ps]=true
+ end
+ end
+ end
+ else
+ for i=1,nofpaths do
+ local p=paths[i]
+ if not done[p] then
+ newn=newn+1
+ ep[newn]=cleanpath(p)
+ done[p]=true
+ end
+ end
+ end
+ elseif nofsubpaths>0 then
+ for i=1,oldn do
+ for j=1,nofsubpaths do
+ local s=subpaths[j]
+ local ps=ep[i].."/"..s
+ if not done[ps] then
+ newn=newn+1
+ ep[newn]=cleanpath(ps)
+ done[ps]=true
+ end
+ end
+ end
+ end
+ if newn>0 then
+ instance.extra_paths=ep
+ end
+ if newn~=oldn then
+ reset_caches()
+ end
+end
+function resolvers.pushextrapath(path)
+ local paths=settings_to_array(path)
+ if instance.extra_stack then
+ insert(instance.extra_stack,1,paths)
+ else
+ instance.extra_stack={ paths }
+ end
+ reset_caches()
+end
+function resolvers.popextrapath()
+ if instance.extra_stack then
+ reset_caches()
+ return remove(instance.extra_stack,1)
+ end
+end
+local function made_list(instance,list,extra_too)
+ local done={}
+ local new={}
+ local newn=0
+ local function add(p)
+ for k=1,#p do
+ local v=p[k]
+ if not done[v] then
+ done[v]=true
+ newn=newn+1
+ new[newn]=v
+ end
+ end
+ end
+ for k=1,#list do
+ local v=list[k]
+ if done[v] then
+ elseif find(v,"^[%.%/]$") then
+ done[v]=true
+ newn=newn+1
+ new[newn]=v
+ else
+ break
+ end
+ end
+ if extra_too then
+ local es=instance.extra_stack
+ if es and #es>0 then
+ for k=1,#es do
+ add(es[k])
+ end
+ end
+ local ep=instance.extra_paths
+ if ep and #ep>0 then
+ add(ep)
+ end
+ end
+ add(list)
+ return new
+end
+function resolvers.cleanpathlist(str)
+ local t=resolvers.expandedpathlist(str)
+ if t then
+ for i=1,#t do
+ t[i]=collapsepath(cleanpath(t[i]))
+ end
+ end
+ return t
+end
+function resolvers.expandpath(str)
+ return joinpath(resolvers.expandedpathlist(str))
+end
+function resolvers.expandedpathlist(str,extra_too)
+ if not str then
+ return {}
+ elseif instance.savelists then
+ str=lpegmatch(dollarstripper,str)
+ local lists=instance.lists
+ local lst=lists[str]
+ if not lst then
+ local l=made_list(instance,resolvers.splitpath(resolvers.expansion(str)),extra_too)
+ lst=expandedpathfromlist(l)
+ lists[str]=lst
+ end
+ return lst
+ else
+ local lst=resolvers.splitpath(resolvers.expansion(str))
+ return made_list(instance,expandedpathfromlist(lst),extra_too)
+ end
+end
+function resolvers.expandedpathlistfromvariable(str)
+ str=lpegmatch(dollarstripper,str)
+ local tmp=resolvers.variableofformatorsuffix(str)
+ return resolvers.expandedpathlist(tmp~="" and tmp or str)
+end
+function resolvers.expandpathfromvariable(str)
+ return joinpath(resolvers.expandedpathlistfromvariable(str))
+end
+function resolvers.cleanedpathlist(v)
+ local t=resolvers.expandedpathlist(v)
+ for i=1,#t do
+ t[i]=resolvers.resolve(resolvers.cleanpath(t[i]))
+ end
+ return t
+end
+function resolvers.expandbraces(str)
+ local ori=str
+ local pth=expandedpathfromlist(resolvers.splitpath(ori))
+ return joinpath(pth)
+end
+function resolvers.registerfilehash(name,content,someerror)
+ if content then
+ instance.files[name]=content
+ else
+ instance.files[name]={}
+ if somerror==true then
+ instance.loaderror=someerror
+ end
+ end
+end
+local function isreadable(name)
+ local readable=isfile(name)
+ if trace_detail then
+ if readable then
+ report_resolving("file %a is readable",name)
+ else
+ report_resolving("file %a is not readable",name)
+ end
+ end
+ return readable
+end
+local function collect_files(names)
+ local filelist={}
+ local noffiles=0
+ local function check(hash,root,pathname,path,name)
+ if not pathname or find(path,pathname) then
+ local variant=hash.type
+ local search=filejoin(root,path,name)
+ local result=methodhandler('concatinators',variant,root,path,name)
+ if trace_detail then
+ report_resolving("match: variant %a, search %a, result %a",variant,search,result)
+ end
+ noffiles=noffiles+1
+ filelist[noffiles]={ variant,search,result }
+ end
+ end
+ for k=1,#names do
+ local filename=names[k]
+ if trace_detail then
+ report_resolving("checking name %a",filename)
+ end
+ local basename=filebasename(filename)
+ local pathname=filedirname(filename)
+ if pathname=="" or find(pathname,"^%.") then
+ pathname=false
+ else
+ pathname=gsub(pathname,"%*",".*")
+ pathname="/"..pathname.."$"
+ end
+ local hashes=instance.hashes
+ for h=1,#hashes do
+ local hash=hashes[h]
+ local hashname=hash.name
+ local content=hashname and instance.files[hashname]
+ if content then
+ if trace_detail then
+ report_resolving("deep checking %a, base %a, pattern %a",hashname,basename,pathname)
+ end
+ local path,name=lookup(content,basename)
+ if path then
+ local metadata=content.metadata
+ local realroot=metadata and metadata.path or hashname
+ if type(path)=="string" then
+ check(hash,realroot,pathname,path,name)
+ else
+ for i=1,#path do
+ check(hash,realroot,pathname,path[i],name)
+ end
+ end
+ end
+ elseif trace_locating then
+ report_resolving("no match in %a (%s)",hashname,basename)
+ end
+ end
+ end
+ return noffiles>0 and filelist or nil
+end
+local fit={}
+function resolvers.registerintrees(filename,format,filetype,usedmethod,foundname)
+ local foundintrees=instance.foundintrees
+ if usedmethod=="direct" and filename==foundname and fit[foundname] then
+ else
+ local t={
+ filename=filename,
+ format=format~="" and format or nil,
+ filetype=filetype~="" and filetype or nil,
+ usedmethod=usedmethod,
+ foundname=foundname,
+ }
+ fit[foundname]=t
+ foundintrees[#foundintrees+1]=t
+ end
+end
+local function can_be_dir(name)
+ local fakepaths=instance.fakepaths
+ if not fakepaths[name] then
+ if isdir(name) then
+ fakepaths[name]=1
+ else
+ fakepaths[name]=2
+ end
+ end
+ return fakepaths[name]==1
+end
+local preparetreepattern=Cs((P(".")/"%%."+P("-")/"%%-"+P(1))^0*Cc("$"))
+local collect_instance_files
+local function find_analyze(filename,askedformat,allresults)
+ local filetype,wantedfiles,ext='',{},suffixonly(filename)
+ wantedfiles[#wantedfiles+1]=filename
+ if askedformat=="" then
+ if ext=="" or not suffixmap[ext] then
+ local defaultsuffixes=resolvers.defaultsuffixes
+ local formatofsuffix=resolvers.formatofsuffix
+ for i=1,#defaultsuffixes do
+ local forcedname=filename..'.'..defaultsuffixes[i]
+ wantedfiles[#wantedfiles+1]=forcedname
+ filetype=formatofsuffix(forcedname)
+ if trace_locating then
+ report_resolving("forcing filetype %a",filetype)
+ end
+ end
+ else
+ filetype=resolvers.formatofsuffix(filename)
+ if trace_locating then
+ report_resolving("using suffix based filetype %a",filetype)
+ end
+ end
+ else
+ if ext=="" or not suffixmap[ext] then
+ local format_suffixes=suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1]=filename.."."..format_suffixes[i]
+ end
+ end
+ end
+ filetype=askedformat
+ if trace_locating then
+ report_resolving("using given filetype %a",filetype)
+ end
+ end
+ return filetype,wantedfiles
+end
+local function find_direct(filename,allresults)
+ if not dangerous[askedformat] and isreadable(filename) then
+ if trace_detail then
+ report_resolving("file %a found directly",filename)
+ end
+ return "direct",{ filename }
+ end
+end
+local function find_wildcard(filename,allresults)
+ if find(filename,'*',1,true) then
+ if trace_locating then
+ report_resolving("checking wildcard %a",filename)
+ end
+ local result=resolvers.findwildcardfiles(filename)
+ if result then
+ return "wildcard",result
+ end
+ end
+end
+local function find_qualified(filename,allresults,askedformat,alsostripped)
+ if not is_qualified_path(filename) then
+ return
+ end
+ if trace_locating then
+ report_resolving("checking qualified name %a",filename)
+ end
+ if isreadable(filename) then
+ if trace_detail then
+ report_resolving("qualified file %a found",filename)
+ end
+ return "qualified",{ filename }
+ end
+ if trace_detail then
+ report_resolving("locating qualified file %a",filename)
+ end
+ local forcedname,suffix="",suffixonly(filename)
+ if suffix=="" then
+ local format_suffixes=askedformat=="" and resolvers.defaultsuffixes or suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s=format_suffixes[i]
+ forcedname=filename.."."..s
+ if isreadable(forcedname) then
+ if trace_locating then
+ report_resolving("no suffix, forcing format filetype %a",s)
+ end
+ return "qualified",{ forcedname }
+ end
+ end
+ end
+ end
+ if alsostripped and suffix and suffix~="" then
+ local basename=filebasename(filename)
+ local pattern=lpegmatch(preparetreepattern,filename)
+ local savedformat=askedformat
+ local format=savedformat or ""
+ if format=="" then
+ askedformat=resolvers.formatofsuffix(suffix)
+ end
+ if not format then
+ askedformat="othertextfiles"
+ end
+ if basename~=filename then
+ local resolved=collect_instance_files(basename,askedformat,allresults)
+ if #resolved==0 then
+ local lowered=lower(basename)
+ if filename~=lowered then
+ resolved=collect_instance_files(lowered,askedformat,allresults)
+ end
+ end
+ resolvers.format=savedformat
+ if #resolved>0 then
+ local result={}
+ for r=1,#resolved do
+ local rr=resolved[r]
+ if find(rr,pattern) then
+ result[#result+1]=rr
+ end
+ end
+ if #result>0 then
+ return "qualified",result
+ end
+ end
+ end
+ end
+end
+local function check_subpath(fname)
+ if isreadable(fname) then
+ if trace_detail then
+ report_resolving("found %a by deep scanning",fname)
+ end
+ return fname
+ end
+end
+local function makepathlist(list,filetype)
+ local typespec=resolvers.variableofformat(filetype)
+ local pathlist=resolvers.expandedpathlist(typespec,filetype and usertypes[filetype])
+ local entry={}
+ if pathlist and #pathlist>0 then
+ for k=1,#pathlist do
+ local path=pathlist[k]
+ local prescanned=find(path,'^!!')
+ local resursive=find(path,'//$')
+ local pathname=lpegmatch(inhibitstripper,path)
+ local expression=makepathexpression(pathname)
+ local barename=gsub(pathname,"/+$","")
+ barename=resolveprefix(barename)
+ local scheme=url.hasscheme(barename)
+ local schemename=gsub(barename,"%.%*$",'')
+ entry[k]={
+ path=path,
+ pathname=pathname,
+ prescanned=prescanned,
+ recursive=recursive,
+ expression=expression,
+ barename=barename,
+ scheme=scheme,
+ schemename=schemename,
+ }
+ end
+ entry.typespec=typespec
+ list[filetype]=entry
+ else
+ list[filetype]=false
+ end
+ return entry
+end
+local function find_intree(filename,filetype,wantedfiles,allresults)
+ local pathlists=instance.pathlists
+ if not pathlists then
+ pathlists=setmetatableindex(allocate(),makepathlist)
+ instance.pathlists=pathlists
+ end
+ local pathlist=pathlists[filetype]
+ if pathlist then
+ local method="intree"
+ local filelist=collect_files(wantedfiles)
+ local dirlist={}
+ local result={}
+ if filelist then
+ for i=1,#filelist do
+ dirlist[i]=filedirname(filelist[i][3]).."/"
+ end
+ end
+ if trace_detail then
+ report_resolving("checking filename %a in tree",filename)
+ end
+ for k=1,#pathlist do
+ local entry=pathlist[k]
+ local path=entry.path
+ local pathname=entry.pathname
+ local done=false
+ if filelist then
+ local expression=entry.expression
+ if trace_detail then
+ report_resolving("using pattern %a for path %a",expression,pathname)
+ end
+ for k=1,#filelist do
+ local fl=filelist[k]
+ local f=fl[2]
+ local d=dirlist[k]
+ if find(d,expression) or find(resolveprefix(d),expression) then
+ result[#result+1]=resolveprefix(fl[3])
+ done=true
+ if allresults then
+ if trace_detail then
+ report_resolving("match to %a in hash for file %a and path %a, continue scanning",expression,f,d)
+ end
+ else
+ if trace_detail then
+ report_resolving("match to %a in hash for file %a and path %a, quit scanning",expression,f,d)
+ end
+ break
+ end
+ elseif trace_detail then
+ report_resolving("no match to %a in hash for file %a and path %a",expression,f,d)
+ end
+ end
+ end
+ if done then
+ method="database"
+ else
+ method="filesystem"
+ local scheme=entry.scheme
+ if not scheme or scheme=="file" then
+ local pname=entry.schemename
+ if not find(pname,"*",1,true) then
+ if can_be_dir(pname) then
+ if not done and not entry.prescanned then
+ if trace_detail then
+ report_resolving("quick root scan for %a",pname)
+ end
+ for k=1,#wantedfiles do
+ local w=wantedfiles[k]
+ local fname=check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
+ break
+ end
+ end
+ end
+ if not done and entry.recursive then
+ if trace_detail then
+ report_resolving("scanning filesystem for %a",pname)
+ end
+ local files=resolvers.simplescanfiles(pname,false,true)
+ for k=1,#wantedfiles do
+ local w=wantedfiles[k]
+ local subpath=files[w]
+ if not subpath or subpath=="" then
+ elseif type(subpath)=="string" then
+ local fname=check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp=subpath[i]
+ if sp=="" then
+ else
+ local fname=check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
+ break
+ end
+ end
+ end
+ end
+ if done and not allresults then
+ break
+ end
+ end
+ end
+ end
+ end
+ end
+ else
+ end
+ else
+ for k=1,#wantedfiles do
+ local pname=entry.barename
+ local fname=methodhandler('finders',pname.."/"..wantedfiles[k])
+ if fname then
+ result[#result+1]=fname
+ done=true
+ if not allresults then
+ break
+ end
+ end
+ end
+ end
+ end
+ if done and not allresults then
+ break
+ end
+ end
+ if #result>0 then
+ return method,result
+ end
+ end
+end
+local function find_onpath(filename,filetype,wantedfiles,allresults)
+ if trace_detail then
+ report_resolving("checking filename %a, filetype %a, wanted files %a",filename,filetype,concat(wantedfiles," | "))
+ end
+ local result={}
+ for k=1,#wantedfiles do
+ local fname=wantedfiles[k]
+ if fname and isreadable(fname) then
+ filename=fname
+ result[#result+1]=filejoin('.',fname)
+ if not allresults then
+ break
+ end
+ end
+ end
+ if #result>0 then
+ return "onpath",result
+ end
+end
+local function find_otherwise(filename,filetype,wantedfiles,allresults)
+ local filelist=collect_files(wantedfiles)
+ local fl=filelist and filelist[1]
+ if fl then
+ return "otherwise",{ resolveprefix(fl[3]) }
+ end
+end
+collect_instance_files=function(filename,askedformat,allresults)
+ if not filename or filename=="" then
+ return {}
+ end
+ askedformat=askedformat or ""
+ filename=collapsepath(filename,".")
+ filename=gsub(filename,"^%./",getcurrentdir().."/")
+ if allresults then
+ local filetype,wantedfiles=find_analyze(filename,askedformat)
+ local results={
+ { find_direct (filename,true) },
+ { find_wildcard (filename,true) },
+ { find_qualified(filename,true,askedformat) },
+ { find_intree (filename,filetype,wantedfiles,true) },
+ { find_onpath (filename,filetype,wantedfiles,true) },
+ { find_otherwise(filename,filetype,wantedfiles,true) },
+ }
+ local result,status,done={},{},{}
+ for k,r in next,results do
+ local method,list=r[1],r[2]
+ if method and list then
+ for i=1,#list do
+ local c=collapsepath(list[i])
+ if not done[c] then
+ result[#result+1]=c
+ done[c]=true
+ end
+ status[#status+1]=formatters["%-10s: %s"](method,c)
+ end
+ end
+ end
+ if trace_detail then
+ report_resolving("lookup status: %s",table.serialize(status,filename))
+ end
+ return result,status
+ else
+ local method,result,stamp,filetype,wantedfiles
+ if instance.remember then
+ if askedformat=="" then
+ stamp=formatters["%s::%s"](suffixonly(filename),filename)
+ else
+ stamp=formatters["%s::%s"](askedformat,filename)
+ end
+ result=stamp and instance.found[stamp]
+ if result then
+ if trace_locating then
+ report_resolving("remembered file %a",filename)
+ end
+ return result
+ end
+ end
+ method,result=find_direct(filename)
+ if not result then
+ method,result=find_wildcard(filename)
+ if not result then
+ method,result=find_qualified(filename,false,askedformat)
+ if not result then
+ filetype,wantedfiles=find_analyze(filename,askedformat)
+ method,result=find_intree(filename,filetype,wantedfiles)
+ if not result then
+ method,result=find_onpath(filename,filetype,wantedfiles)
+ if resolve_otherwise and not result then
+ method,result=find_otherwise(filename,filetype,wantedfiles)
+ end
+ end
+ end
+ end
+ end
+ if result and #result>0 then
+ local foundname=collapsepath(result[1])
+ resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
+ result={ foundname }
+ else
+ result={}
+ end
+ if stamp then
+ if trace_locating then
+ report_resolving("remembering file %a using hash %a",filename,stamp)
+ end
+ instance.found[stamp]=result
+ end
+ return result
+ end
+end
+local function findfiles(filename,filetype,allresults)
+ if not filename or filename=="" then
+ return {}
+ end
+ local result,status=collect_instance_files(filename,filetype or "",allresults)
+ if not result or #result==0 then
+ local lowered=lower(filename)
+ if filename~=lowered then
+ result,status=collect_instance_files(lowered,filetype or "",allresults)
+ end
+ end
+ return result or {},status
+end
+function resolvers.findfiles(filename,filetype)
+ return findfiles(filename,filetype,true)
+end
+function resolvers.findfile(filename,filetype)
+ return findfiles(filename,filetype,false)[1] or ""
+end
+function resolvers.findpath(filename,filetype)
+ return filedirname(findfiles(filename,filetype,false)[1] or "")
+end
+local function findgivenfiles(filename,allresults)
+ local base=filebasename(filename)
+ local result={}
+ local hashes=instance.hashes
+ local function okay(hash,path,name)
+ local found=methodhandler('concatinators',hash.type,hash.name,path,name)
+ if found and found~="" then
+ result[#result+1]=resolveprefix(found)
+ return not allresults
+ end
+ end
+ for k=1,#hashes do
+ local hash=hashes[k]
+ local content=instance.files[hash.name]
+ if content then
+ local path,name=lookup(content,base)
+ if not path then
+ elseif type(path)=="string" then
+ if okay(hash,path,name) then
+ return result
+ end
+ else
+ for i=1,#path do
+ if okay(hash,path[i],name) then
+ return result
+ end
+ end
+ end
+ end
+ end
+ return result
+end
+function resolvers.findgivenfiles(filename)
+ return findgivenfiles(filename,true)
+end
+function resolvers.findgivenfile(filename)
+ return findgivenfiles(filename,false)[1] or ""
+end
+local makewildcard=Cs(
+ (P("^")^0*P("/")*P(-1)+P(-1))/".*"+(P("^")^0*P("/")/"")^0*(P("*")/".*"+P("-")/"%%-"+P(".")/"%%."+P("?")/"."+P("\\")/"/"+P(1))^0
+)
+function resolvers.wildcardpattern(pattern)
+ return lpegmatch(makewildcard,pattern) or pattern
+end
+local function findwildcardfiles(filename,allresults,result)
+ local result=result or {}
+ local base=filebasename(filename)
+ local dirn=filedirname(filename)
+ local path=lower(lpegmatch(makewildcard,dirn) or dirn)
+ local name=lower(lpegmatch(makewildcard,base) or base)
+ local files=instance.files
+ if find(name,"*",1,true) then
+ local hashes=instance.hashes
+ local function okay(found,path,base,hashname,hashtype)
+ if find(found,path) then
+ local full=methodhandler('concatinators',hashtype,hashname,found,base)
+ if full and full~="" then
+ result[#result+1]=resolveprefix(full)
+ return not allresults
+ end
+ end
+ end
+ for k=1,#hashes do
+ local hash=hashes[k]
+ local hashname=hash.name
+ local hashtype=hash.type
+ if hashname and hashtype then
+ for found,base in filtered(files[hashname],name) do
+ if type(found)=='string' then
+ if okay(found,path,base,hashname,hashtype) then
+ break
+ end
+ else
+ for i=1,#found do
+ if okay(found[i],path,base,hashname,hashtype) then
+ break
+ end
+ end
+ end
+ end
+ end
+ end
+ else
+ local function okayokay(found,path,base,hashname,hashtype)
+ if find(found,path) then
+ local full=methodhandler('concatinators',hashtype,hashname,found,base)
+ if full and full~="" then
+ result[#result+1]=resolveprefix(full)
+ return not allresults
+ end
+ end
+ end
+ local hashes=instance.hashes
+ for k=1,#hashes do
+ local hash=hashes[k]
+ local hashname=hash.name
+ local hashtype=hash.type
+ if hashname and hashtype then
+ local found,base=lookup(content,base)
+ if not found then
+ elseif type(found)=='string' then
+ if okay(found,path,base,hashname,hashtype) then
+ break
+ end
+ else
+ for i=1,#found do
+ if okay(found[i],path,base,hashname,hashtype) then
+ break
+ end
+ end
+ end
+ end
+ end
+ end
+ return result
+end
+function resolvers.findwildcardfiles(filename,result)
+ return findwildcardfiles(filename,true,result)
+end
+function resolvers.findwildcardfile(filename)
+ return findwildcardfiles(filename,false)[1] or ""
+end
+function resolvers.automount()
+end
+function resolvers.load(option)
+ statistics.starttiming(instance)
+ identify_configuration_files()
+ load_configuration_files()
+ if option~="nofiles" then
+ load_databases()
+ resolvers.automount()
+ end
+ statistics.stoptiming(instance)
+ local files=instance.files
+ return files and next(files) and true
+end
+function resolvers.loadtime()
+ return statistics.elapsedtime(instance)
+end
+local function report(str)
+ if trace_locating then
+ report_resolving(str)
+ else
+ print(str)
+ end
+end
+function resolvers.dowithfilesandreport(command,files,...)
+ if files and #files>0 then
+ if trace_locating then
+ report('')
+ end
+ if type(files)=="string" then
+ files={ files }
+ end
+ for f=1,#files do
+ local file=files[f]
+ local result=command(file,...)
+ if type(result)=='string' then
+ report(result)
+ else
+ for i=1,#result do
+ report(result[i])
+ end
+ end
+ end
+ end
+end
+function resolvers.showpath(str)
+ return joinpath(resolvers.expandedpathlist(resolvers.formatofvariable(str)))
+end
+function resolvers.registerfile(files,name,path)
+ if files[name] then
+ if type(files[name])=='string' then
+ files[name]={ files[name],path }
+ else
+ files[name]=path
+ end
+ else
+ files[name]=path
+ end
+end
+function resolvers.dowithpath(name,func)
+ local pathlist=resolvers.expandedpathlist(name)
+ for i=1,#pathlist do
+ func("^"..cleanpath(pathlist[i]))
+ end
+end
+function resolvers.dowithvariable(name,func)
+ func(expandedvariable(name))
+end
+function resolvers.locateformat(name)
+ local engine=environment.ownmain or "luatex"
+ local barename=removesuffix(name)
+ local fullname=addsuffix(barename,"fmt")
+ local fmtname=caches.getfirstreadablefile(fullname,"formats",engine) or ""
+ if fmtname=="" then
+ fmtname=resolvers.findfile(fullname)
+ fmtname=cleanpath(fmtname)
+ end
+ if fmtname~="" then
+ local barename=removesuffix(fmtname)
+ local luaname=addsuffix(barename,luasuffixes.lua)
+ local lucname=addsuffix(barename,luasuffixes.luc)
+ local luiname=addsuffix(barename,luasuffixes.lui)
+ if isfile(luiname) then
+ return barename,luiname
+ elseif isfile(lucname) then
+ return barename,lucname
+ elseif isfile(luaname) then
+ return barename,luaname
+ end
+ end
+ return nil,nil
+end
+function resolvers.booleanvariable(str,default)
+ local b=resolvers.expansion(str)
+ if b=="" then
+ return default
+ else
+ b=toboolean(b)
+ return (b==nil and default) or b
+ end
+end
+function resolvers.dowithfilesintree(pattern,handle,before,after)
+ local instance=resolvers.instance
+ local hashes=instance.hashes
+ for i=1,#hashes do
+ local hash=hashes[i]
+ local blobtype=hash.type
+ local blobpath=hash.name
+ if blobtype and blobpath then
+ local total=0
+ local checked=0
+ local done=0
+ if before then
+ before(blobtype,blobpath,pattern)
+ end
+ for path,name in filtered(instance.files[blobpath],pattern) do
+ if type(path)=="string" then
+ checked=checked+1
+ if handle(blobtype,blobpath,path,name) then
+ done=done+1
+ end
+ else
+ checked=checked+#path
+ for i=1,#path do
+ if handle(blobtype,blobpath,path[i],name) then
+ done=done+1
+ end
+ end
+ end
+ end
+ if after then
+ after(blobtype,blobpath,pattern,total,checked,done)
+ end
+ end
+ end
+end
+local obsolete=resolvers.obsolete or {}
+resolvers.obsolete=obsolete
+resolvers.find_file=resolvers.findfile obsolete.find_file=resolvers.findfile
+resolvers.find_files=resolvers.findfiles obsolete.find_files=resolvers.findfiles
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-pre"] = package.loaded["data-pre"] or true
+
+-- original size: 3950, stripped down to: 2935
+
+if not modules then modules={} end modules ['data-pre']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local resolvers=resolvers
+local prefixes=resolvers.prefixes
+local cleanpath=resolvers.cleanpath
+local findgivenfile=resolvers.findgivenfile
+local expansion=resolvers.expansion
+local getenv=resolvers.getenv
+local basename=file.basename
+local dirname=file.dirname
+local joinpath=file.join
+local isfile=lfs.isfile
+prefixes.environment=function(str)
+ return cleanpath(expansion(str))
+end
+local function relative(str,n)
+ if not isfile(str) then
+ local pstr="./"..str
+ if isfile(pstr) then
+ str=pstr
+ else
+ local p="../"
+ for i=1,n or 2 do
+ local pstr=p..str
+ if isfile(pstr) then
+ str=pstr
+ break
+ else
+ p=p.."../"
+ end
+ end
+ end
+ end
+ return cleanpath(str)
+end
+local function locate(str)
+ local fullname=findgivenfile(str) or ""
+ return cleanpath(fullname~="" and fullname or str)
+end
+prefixes.relative=relative
+prefixes.locate=locate
+prefixes.auto=function(str)
+ local fullname=relative(str)
+ if not isfile(fullname) then
+ fullname=locate(str)
+ end
+ return fullname
+end
+prefixes.filename=function(str)
+ local fullname=findgivenfile(str) or ""
+ return cleanpath(basename((fullname~="" and fullname) or str))
+end
+prefixes.pathname=function(str)
+ local fullname=findgivenfile(str) or ""
+ return cleanpath(dirname((fullname~="" and fullname) or str))
+end
+prefixes.selfautoloc=function(str)
+ return cleanpath(joinpath(getenv('SELFAUTOLOC'),str))
+end
+prefixes.selfautoparent=function(str)
+ return cleanpath(joinpath(getenv('SELFAUTOPARENT'),str))
+end
+prefixes.selfautodir=function(str)
+ return cleanpath(joinpath(getenv('SELFAUTODIR'),str))
+end
+prefixes.home=function(str)
+ return cleanpath(joinpath(getenv('HOME'),str))
+end
+prefixes.env=prefixes.environment
+prefixes.rel=prefixes.relative
+prefixes.loc=prefixes.locate
+prefixes.kpse=prefixes.locate
+prefixes.full=prefixes.locate
+prefixes.file=prefixes.filename
+prefixes.path=prefixes.pathname
+local function toppath()
+ local inputstack=resolvers.inputstack
+ if not inputstack then
+ return "."
+ end
+ local pathname=dirname(inputstack[#inputstack] or "")
+ if pathname=="" then
+ return "."
+ else
+ return pathname
+ end
+end
+local function jobpath()
+ local path=resolvers.stackpath()
+ if not path or path=="" then
+ return "."
+ else
+ return path
+ end
+end
+resolvers.toppath=toppath
+resolvers.jobpath=jobpath
+prefixes.toppath=function(str) return cleanpath(joinpath(toppath(),str)) end
+prefixes.jobpath=function(str) return cleanpath(joinpath(jobpath(),str)) end
+resolvers.setdynamic("toppath")
+resolvers.setdynamic("jobpath")
+prefixes.jobfile=prefixes.jobpath
+resolvers.setdynamic("jobfile")
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-inp"] = package.loaded["data-inp"] or true
+
+-- original size: 910, stripped down to: 823
+
+if not modules then modules={} end modules ['data-inp']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local allocate=utilities.storage.allocate
+local resolvers=resolvers
+local methodhandler=resolvers.methodhandler
+local registermethod=resolvers.registermethod
+local finders=allocate { helpers={},notfound=function() end }
+local openers=allocate { helpers={},notfound=function() end }
+local loaders=allocate { helpers={},notfound=function() return false,nil,0 end }
+registermethod("finders",finders,"uri")
+registermethod("openers",openers,"uri")
+registermethod("loaders",loaders,"uri")
+resolvers.finders=finders
+resolvers.openers=openers
+resolvers.loaders=loaders
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-out"] = package.loaded["data-out"] or true
+
+-- original size: 530, stripped down to: 475
+
+if not modules then modules={} end modules ['data-out']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local allocate=utilities.storage.allocate
+local resolvers=resolvers
+local registermethod=resolvers.registermethod
+local savers=allocate { helpers={} }
+resolvers.savers=savers
+registermethod("savers",savers,"uri")
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-fil"] = package.loaded["data-fil"] or true
+
+-- original size: 3863, stripped down to: 3310
+
+if not modules then modules={} end modules ['data-fil']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_files=logs.reporter("resolvers","files")
+local resolvers=resolvers
+local resolveprefix=resolvers.resolve
+local finders,openers,loaders,savers=resolvers.finders,resolvers.openers,resolvers.loaders,resolvers.savers
+local locators,hashers,generators,concatinators=resolvers.locators,resolvers.hashers,resolvers.generators,resolvers.concatinators
+local checkgarbage=utilities.garbagecollector and utilities.garbagecollector.check
+function locators.file(specification)
+ local filename=specification.filename
+ local realname=resolveprefix(filename)
+ if realname and realname~='' and lfs.isdir(realname) then
+ if trace_locating then
+ report_files("file locator %a found as %a",filename,realname)
+ end
+ resolvers.appendhash('file',filename,true)
+ elseif trace_locating then
+ report_files("file locator %a not found",filename)
+ end
+end
+function hashers.file(specification)
+ local pathname=specification.filename
+ local content=caches.loadcontent(pathname,'files')
+ resolvers.registerfilehash(pathname,content,content==nil)
+end
+function generators.file(specification)
+ local pathname=specification.filename
+ local content=resolvers.scanfiles(pathname,false,true)
+ resolvers.registerfilehash(pathname,content,true)
+end
+concatinators.file=file.join
+function finders.file(specification,filetype)
+ local filename=specification.filename
+ local foundname=resolvers.findfile(filename,filetype)
+ if foundname and foundname~="" then
+ if trace_locating then
+ report_files("file finder: %a found",filename)
+ end
+ return foundname
+ else
+ if trace_locating then
+ report_files("file finder: %a not found",filename)
+ end
+ return finders.notfound()
+ end
+end
+function openers.helpers.textopener(tag,filename,f)
+ return {
+ reader=function() return f:read () end,
+ close=function() logs.show_close(filename) return f:close() end,
+ }
+end
+function openers.file(specification,filetype)
+ local filename=specification.filename
+ if filename and filename~="" then
+ local f=io.open(filename,"r")
+ if f then
+ if trace_locating then
+ report_files("file opener: %a opened",filename)
+ end
+ return openers.helpers.textopener("file",filename,f)
+ end
+ end
+ if trace_locating then
+ report_files("file opener: %a not found",filename)
+ end
+ return openers.notfound()
+end
+function loaders.file(specification,filetype)
+ local filename=specification.filename
+ if filename and filename~="" then
+ local f=io.open(filename,"rb")
+ if f then
+ logs.show_load(filename)
+ if trace_locating then
+ report_files("file loader: %a loaded",filename)
+ end
+ local s=f:read("*a")
+ if checkgarbage then
+ checkgarbage(#s)
+ end
+ f:close()
+ if s then
+ return true,s,#s
+ end
+ end
+ end
+ if trace_locating then
+ report_files("file loader: %a not found",filename)
+ end
+ return loaders.notfound()
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-con"] = package.loaded["data-con"] or true
+
+-- original size: 5010, stripped down to: 3588
+
+if not modules then modules={} end modules ['data-con']={
+ version=1.100,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,lower,gsub=string.format,string.lower,string.gsub
+local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end)
+local trace_containers=false trackers.register("resolvers.containers",function(v) trace_containers=v end)
+local trace_storage=false trackers.register("resolvers.storage",function(v) trace_storage=v end)
+containers=containers or {}
+local containers=containers
+containers.usecache=true
+local report_containers=logs.reporter("resolvers","containers")
+local allocated={}
+local mt={
+ __index=function(t,k)
+ if k=="writable" then
+ local writable=caches.getwritablepath(t.category,t.subcategory) or { "." }
+ t.writable=writable
+ return writable
+ elseif k=="readables" then
+ local readables=caches.getreadablepaths(t.category,t.subcategory) or { "." }
+ t.readables=readables
+ return readables
+ end
+ end,
+ __storage__=true
+}
+function containers.define(category,subcategory,version,enabled)
+ if category and subcategory then
+ local c=allocated[category]
+ if not c then
+ c={}
+ allocated[category]=c
+ end
+ local s=c[subcategory]
+ if not s then
+ s={
+ category=category,
+ subcategory=subcategory,
+ storage={},
+ enabled=enabled,
+ version=version or math.pi,
+ trace=false,
+ }
+ setmetatable(s,mt)
+ c[subcategory]=s
+ end
+ return s
+ end
+end
+function containers.is_usable(container,name)
+ return container.enabled and caches and caches.is_writable(container.writable,name)
+end
+function containers.is_valid(container,name)
+ if name and name~="" then
+ local storage=container.storage[name]
+ return storage and storage.cache_version==container.version
+ else
+ return false
+ end
+end
+function containers.read(container,name)
+ local storage=container.storage
+ local stored=storage[name]
+ if not stored and container.enabled and caches and containers.usecache then
+ stored=caches.loaddata(container.readables,name)
+ if stored and stored.cache_version==container.version then
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","load",container.subcategory,name)
+ end
+ else
+ stored=nil
+ end
+ storage[name]=stored
+ elseif stored then
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","reuse",container.subcategory,name)
+ end
+ end
+ return stored
+end
+function containers.write(container,name,data)
+ if data then
+ data.cache_version=container.version
+ if container.enabled and caches then
+ local unique,shared=data.unique,data.shared
+ data.unique,data.shared=nil,nil
+ caches.savedata(container.writable,name,data)
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","save",container.subcategory,name)
+ end
+ data.unique,data.shared=unique,shared
+ end
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","store",container.subcategory,name)
+ end
+ container.storage[name]=data
+ end
+ return data
+end
+function containers.content(container,name)
+ return container.storage[name]
+end
+function containers.cleanname(name)
+ return (gsub(lower(name),"[^%w\128-\255]+","-"))
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-use"] = package.loaded["data-use"] or true
+
+-- original size: 3899, stripped down to: 2984
+
+if not modules then modules={} end modules ['data-use']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,lower,gsub,find=string.format,string.lower,string.gsub,string.find
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_mounts=logs.reporter("resolvers","mounts")
+local resolvers=resolvers
+resolvers.automounted=resolvers.automounted or {}
+function resolvers.automount(usecache)
+ local mountpaths=resolvers.cleanpathlist(resolvers.expansion('TEXMFMOUNT'))
+ if (not mountpaths or #mountpaths==0) and usecache then
+ mountpaths=caches.getreadablepaths("mount")
+ end
+ if mountpaths and #mountpaths>0 then
+ statistics.starttiming(resolvers.instance)
+ for k=1,#mountpaths do
+ local root=mountpaths[k]
+ local f=io.open(root.."/url.tmi")
+ if f then
+ for line in f:lines() do
+ if line then
+ if find(line,"^[%%#%-]") then
+ elseif find(line,"^zip://") then
+ if trace_locating then
+ report_mounts("mounting %a",line)
+ end
+ table.insert(resolvers.automounted,line)
+ resolvers.usezipfile(line)
+ end
+ end
+ end
+ f:close()
+ end
+ end
+ statistics.stoptiming(resolvers.instance)
+ end
+end
+statistics.register("used config file",function() return caches.configfiles() end)
+statistics.register("used cache path",function() return caches.usedpaths() end)
+function statistics.savefmtstatus(texname,formatbanner,sourcefile)
+ local enginebanner=status.banner
+ if formatbanner and enginebanner and sourcefile then
+ local luvname=file.replacesuffix(texname,"luv")
+ local luvdata={
+ enginebanner=enginebanner,
+ formatbanner=formatbanner,
+ sourcehash=md5.hex(io.loaddata(resolvers.findfile(sourcefile)) or "unknown"),
+ sourcefile=sourcefile,
+ }
+ io.savedata(luvname,table.serialize(luvdata,true))
+ end
+end
+function statistics.checkfmtstatus(texname)
+ local enginebanner=status.banner
+ if enginebanner and texname then
+ local luvname=file.replacesuffix(texname,"luv")
+ if lfs.isfile(luvname) then
+ local luv=dofile(luvname)
+ if luv and luv.sourcefile then
+ local sourcehash=md5.hex(io.loaddata(resolvers.findfile(luv.sourcefile)) or "unknown")
+ local luvbanner=luv.enginebanner or "?"
+ if luvbanner~=enginebanner then
+ return format("engine mismatch (luv: %s <> bin: %s)",luvbanner,enginebanner)
+ end
+ local luvhash=luv.sourcehash or "?"
+ if luvhash~=sourcehash then
+ return format("source mismatch (luv: %s <> bin: %s)",luvhash,sourcehash)
+ end
+ else
+ return "invalid status file"
+ end
+ else
+ return "missing status file"
+ end
+ end
+ return true
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-zip"] = package.loaded["data-zip"] or true
+
+-- original size: 8772, stripped down to: 6841
+
+if not modules then modules={} end modules ['data-zip']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,find,match=string.format,string.find,string.match
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_zip=logs.reporter("resolvers","zip")
+local resolvers=resolvers
+zip=zip or {}
+local zip=zip
+zip.archives=zip.archives or {}
+local archives=zip.archives
+zip.registeredfiles=zip.registeredfiles or {}
+local registeredfiles=zip.registeredfiles
+local function validzip(str)
+ if not find(str,"^zip://") then
+ return "zip:///"..str
+ else
+ return str
+ end
+end
+function zip.openarchive(name)
+ if not name or name=="" then
+ return nil
+ else
+ local arch=archives[name]
+ if not arch then
+ local full=resolvers.findfile(name) or ""
+ arch=full~="" and zip.open(full) or false
+ archives[name]=arch
+ end
+ return arch
+ end
+end
+function zip.closearchive(name)
+ if not name or (name=="" and archives[name]) then
+ zip.close(archives[name])
+ archives[name]=nil
+ end
+end
+function resolvers.locators.zip(specification)
+ local archive=specification.filename
+ local zipfile=archive and archive~="" and zip.openarchive(archive)
+ if trace_locating then
+ if zipfile then
+ report_zip("locator: archive %a found",archive)
+ else
+ report_zip("locator: archive %a not found",archive)
+ end
+ end
+end
+function resolvers.hashers.zip(specification)
+ local archive=specification.filename
+ if trace_locating then
+ report_zip("loading file %a",archive)
+ end
+ resolvers.usezipfile(specification.original)
+end
+function resolvers.concatinators.zip(zipfile,path,name)
+ if not path or path=="" then
+ return format('%s?name=%s',zipfile,name)
+ else
+ return format('%s?name=%s/%s',zipfile,path,name)
+ end
+end
+function resolvers.finders.zip(specification)
+ local original=specification.original
+ local archive=specification.filename
+ if archive then
+ local query=url.query(specification.query)
+ local queryname=query.name
+ if queryname then
+ local zfile=zip.openarchive(archive)
+ if zfile then
+ if trace_locating then
+ report_zip("finder: archive %a found",archive)
+ end
+ local dfile=zfile:open(queryname)
+ if dfile then
+ dfile=zfile:close()
+ if trace_locating then
+ report_zip("finder: file %a found",queryname)
+ end
+ return specification.original
+ elseif trace_locating then
+ report_zip("finder: file %a not found",queryname)
+ end
+ elseif trace_locating then
+ report_zip("finder: unknown archive %a",archive)
+ end
+ end
+ end
+ if trace_locating then
+ report_zip("finder: %a not found",original)
+ end
+ return resolvers.finders.notfound()
+end
+function resolvers.openers.zip(specification)
+ local original=specification.original
+ local archive=specification.filename
+ if archive then
+ local query=url.query(specification.query)
+ local queryname=query.name
+ if queryname then
+ local zfile=zip.openarchive(archive)
+ if zfile then
+ if trace_locating then
+ report_zip("opener; archive %a opened",archive)
+ end
+ local dfile=zfile:open(queryname)
+ if dfile then
+ if trace_locating then
+ report_zip("opener: file %a found",queryname)
+ end
+ return resolvers.openers.helpers.textopener('zip',original,dfile)
+ elseif trace_locating then
+ report_zip("opener: file %a not found",queryname)
+ end
+ elseif trace_locating then
+ report_zip("opener: unknown archive %a",archive)
+ end
+ end
+ end
+ if trace_locating then
+ report_zip("opener: %a not found",original)
+ end
+ return resolvers.openers.notfound()
+end
+function resolvers.loaders.zip(specification)
+ local original=specification.original
+ local archive=specification.filename
+ if archive then
+ local query=url.query(specification.query)
+ local queryname=query.name
+ if queryname then
+ local zfile=zip.openarchive(archive)
+ if zfile then
+ if trace_locating then
+ report_zip("loader: archive %a opened",archive)
+ end
+ local dfile=zfile:open(queryname)
+ if dfile then
+ logs.show_load(original)
+ if trace_locating then
+ report_zip("loader; file %a loaded",original)
+ end
+ local s=dfile:read("*all")
+ dfile:close()
+ return true,s,#s
+ elseif trace_locating then
+ report_zip("loader: file %a not found",queryname)
+ end
+ elseif trace_locating then
+ report_zip("loader; unknown archive %a",archive)
+ end
+ end
+ end
+ if trace_locating then
+ report_zip("loader: %a not found",original)
+ end
+ return resolvers.openers.notfound()
+end
+function resolvers.usezipfile(archive)
+ local specification=resolvers.splitmethod(archive)
+ local archive=specification.filename
+ if archive and not registeredfiles[archive] then
+ local z=zip.openarchive(archive)
+ if z then
+ local instance=resolvers.instance
+ local tree=url.query(specification.query).tree or ""
+ if trace_locating then
+ report_zip("registering: archive %a",archive)
+ end
+ statistics.starttiming(instance)
+ resolvers.prependhash('zip',archive)
+ resolvers.extendtexmfvariable(archive)
+ registeredfiles[archive]=z
+ instance.files[archive]=resolvers.registerzipfile(z,tree)
+ statistics.stoptiming(instance)
+ elseif trace_locating then
+ report_zip("registering: unknown archive %a",archive)
+ end
+ elseif trace_locating then
+ report_zip("registering: archive %a not found",archive)
+ end
+end
+function resolvers.registerzipfile(z,tree)
+ local names={}
+ local files={}
+ local remap={}
+ local n=0
+ local filter=tree=="" and "^(.+)/(.-)$" or format("^%s/(.+)/(.-)$",tree)
+ local register=resolvers.registerfile
+ if trace_locating then
+ report_zip("registering: using filter %a",filter)
+ end
+ for i in z:files() do
+ local filename=i.filename
+ local path,name=match(filename,filter)
+ if not path then
+ n=n+1
+ register(names,filename,"")
+ local usedname=lower(filename)
+ files[usedname]=""
+ if usedname~=filename then
+ remap[usedname]=filename
+ end
+ elseif name and name~="" then
+ n=n+1
+ register(names,name,path)
+ local usedname=lower(name)
+ files[usedname]=path
+ if usedname~=name then
+ remap[usedname]=name
+ end
+ else
+ end
+ end
+ report_zip("registering: %s files registered",n)
+ return {
+ files=files,
+ remap=remap,
+ }
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-tre"] = package.loaded["data-tre"] or true
+
+-- original size: 8479, stripped down to: 5580
+
+if not modules then modules={} end modules ['data-tre']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local find,gsub,lower=string.find,string.gsub,string.lower
+local basename,dirname,joinname=file.basename,file.dirname,file .join
+local globdir,isdir,isfile=dir.glob,lfs.isdir,lfs.isfile
+local P,lpegmatch=lpeg.P,lpeg.match
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_trees=logs.reporter("resolvers","trees")
+local resolvers=resolvers
+local resolveprefix=resolvers.resolve
+local notfound=resolvers.finders.notfound
+local lookup=resolvers.get_from_content
+local collectors={}
+local found={}
+function resolvers.finders.tree(specification)
+ local spec=specification.filename
+ local okay=found[spec]
+ if okay==nil then
+ if spec~="" then
+ local path=dirname(spec)
+ local name=basename(spec)
+ if path=="" then
+ path="."
+ end
+ local names=collectors[path]
+ if not names then
+ local pattern=find(path,"/%*+$") and path or (path.."/*")
+ names=globdir(pattern)
+ collectors[path]=names
+ end
+ local pattern="/"..gsub(name,"([%.%-%+])","%%%1").."$"
+ for i=1,#names do
+ local fullname=names[i]
+ if find(fullname,pattern) then
+ found[spec]=fullname
+ return fullname
+ end
+ end
+ local pattern=lower(pattern)
+ for i=1,#names do
+ local fullname=lower(names[i])
+ if find(fullname,pattern) then
+ if isfile(fullname) then
+ found[spec]=fullname
+ return fullname
+ else
+ break
+ end
+ end
+ end
+ end
+ okay=notfound()
+ found[spec]=okay
+ end
+ return okay
+end
+function resolvers.locators.tree(specification)
+ local name=specification.filename
+ local realname=resolveprefix(name)
+ if realname and realname~='' and isdir(realname) then
+ if trace_locating then
+ report_trees("locator %a found",realname)
+ end
+ resolvers.appendhash('tree',name,false)
+ elseif trace_locating then
+ report_trees("locator %a not found",name)
+ end
+end
+function resolvers.hashers.tree(specification)
+ local name=specification.filename
+ report_trees("analyzing %a",name)
+ resolvers.methodhandler("hashers",name)
+ resolvers.generators.file(specification)
+end
+local collectors={}
+local splitter=lpeg.splitat("/**/")
+local stripper=lpeg.replacer { [P("/")*P("*")^1*P(-1)]="" }
+table.setmetatableindex(collectors,function(t,k)
+ local rootname=lpegmatch(stripper,k)
+ local dataname=joinname(rootname,"dirlist")
+ local content=caches.loadcontent(dataname,"files",dataname)
+ if not content then
+ content=resolvers.scanfiles(rootname,nil,nil,false,true)
+ caches.savecontent(dataname,"files",content,dataname)
+ end
+ t[k]=content
+ return content
+end)
+local function checked(root,p,n)
+ if p then
+ if type(p)=="table" then
+ for i=1,#p do
+ local fullname=joinname(root,p[i],n)
+ if isfile(fullname) then
+ return fullname
+ end
+ end
+ else
+ local fullname=joinname(root,p,n)
+ if isfile(fullname) then
+ return fullname
+ end
+ end
+ end
+ return notfound()
+end
+local function resolve(specification)
+ local filename=specification.filename
+ if filename~="" then
+ local root,rest=lpegmatch(splitter,filename)
+ if root and rest then
+ local path,name=dirname(rest),basename(rest)
+ if name~=rest then
+ local content=collectors[root]
+ local p,n=lookup(content,name)
+ if not p then
+ return notfound()
+ end
+ local pattern=".*/"..path.."$"
+ local istable=type(p)=="table"
+ if istable then
+ for i=1,#p do
+ local pi=p[i]
+ if pi==path or find(pi,pattern) then
+ local fullname=joinname(root,pi,n)
+ if isfile(fullname) then
+ return fullname
+ end
+ end
+ end
+ elseif p==path or find(p,pattern) then
+ local fullname=joinname(root,p,n)
+ if isfile(fullname) then
+ return fullname
+ end
+ end
+ local queries=specification.queries
+ if queries and queries.option=="fileonly" then
+ return checked(root,p,n)
+ else
+ return notfound()
+ end
+ end
+ end
+ local path,name=dirname(filename),basename(filename)
+ local root=lpegmatch(stripper,path)
+ local content=collectors[path]
+ local p,n=lookup(content,name)
+ if p then
+ return checked(root,p,n)
+ end
+ end
+ return notfound()
+end
+resolvers.finders .dirlist=resolve
+resolvers.locators .dirlist=resolvers.locators .tree
+resolvers.hashers .dirlist=resolvers.hashers .tree
+resolvers.generators.dirlist=resolvers.generators.file
+resolvers.openers .dirlist=resolvers.openers .file
+resolvers.loaders .dirlist=resolvers.loaders .file
+function resolvers.finders.dirfile(specification)
+ local queries=specification.queries
+ if queries then
+ queries.option="fileonly"
+ else
+ specification.queries={ option="fileonly" }
+ end
+ return resolve(specification)
+end
+resolvers.locators .dirfile=resolvers.locators .dirlist
+resolvers.hashers .dirfile=resolvers.hashers .dirlist
+resolvers.generators.dirfile=resolvers.generators.dirlist
+resolvers.openers .dirfile=resolvers.openers .dirlist
+resolvers.loaders .dirfile=resolvers.loaders .dirlist
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-sch"] = package.loaded["data-sch"] or true
+
+-- original size: 6569, stripped down to: 5304
+
+if not modules then modules={} end modules ['data-sch']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local load=load
+local gsub,concat,format=string.gsub,table.concat,string.format
+local finders,openers,loaders=resolvers.finders,resolvers.openers,resolvers.loaders
+local trace_schemes=false trackers.register("resolvers.schemes",function(v) trace_schemes=v end)
+local report_schemes=logs.reporter("resolvers","schemes")
+local http=require("socket.http")
+local ltn12=require("ltn12")
+local resolvers=resolvers
+local schemes=resolvers.schemes or {}
+resolvers.schemes=schemes
+local cleaners={}
+schemes.cleaners=cleaners
+local threshold=24*60*60
+directives.register("schemes.threshold",function(v) threshold=tonumber(v) or threshold end)
+function cleaners.none(specification)
+ return specification.original
+end
+function cleaners.strip(specification)
+ local path,name=file.splitbase(specification.original)
+ if path=="" then
+ return (gsub(name,"[^%a%d%.]+","-"))
+ else
+ return (gsub((gsub(path,"%.","-").."-"..name),"[^%a%d%.]+","-"))
+ end
+end
+function cleaners.md5(specification)
+ return file.addsuffix(md5.hex(specification.original),file.suffix(specification.path))
+end
+local cleaner=cleaners.strip
+directives.register("schemes.cleanmethod",function(v) cleaner=cleaners[v] or cleaners.strip end)
+function resolvers.schemes.cleanname(specification)
+ local hash=cleaner(specification)
+ if trace_schemes then
+ report_schemes("hashing %a to %a",specification.original,hash)
+ end
+ return hash
+end
+local cached,loaded,reused,thresholds,handlers={},{},{},{},{}
+local function runcurl(name,cachename)
+ local command="curl --silent --insecure --create-dirs --output "..cachename.." "..name
+ os.execute(command)
+end
+local function fetch(specification)
+ local original=specification.original
+ local scheme=specification.scheme
+ local cleanname=schemes.cleanname(specification)
+ local cachename=caches.setfirstwritablefile(cleanname,"schemes")
+ if not cached[original] then
+ statistics.starttiming(schemes)
+ if not io.exists(cachename) or (os.difftime(os.time(),lfs.attributes(cachename).modification)>(thresholds[protocol] or threshold)) then
+ cached[original]=cachename
+ local handler=handlers[scheme]
+ if handler then
+ if trace_schemes then
+ report_schemes("fetching %a, protocol %a, method %a",original,scheme,"built-in")
+ end
+ logs.flush()
+ handler(specification,cachename)
+ else
+ if trace_schemes then
+ report_schemes("fetching %a, protocol %a, method %a",original,scheme,"curl")
+ end
+ logs.flush()
+ runcurl(original,cachename)
+ end
+ end
+ if io.exists(cachename) then
+ cached[original]=cachename
+ if trace_schemes then
+ report_schemes("using cached %a, protocol %a, cachename %a",original,scheme,cachename)
+ end
+ else
+ cached[original]=""
+ if trace_schemes then
+ report_schemes("using missing %a, protocol %a",original,scheme)
+ end
+ end
+ loaded[scheme]=loaded[scheme]+1
+ statistics.stoptiming(schemes)
+ else
+ if trace_schemes then
+ report_schemes("reusing %a, protocol %a",original,scheme)
+ end
+ reused[scheme]=reused[scheme]+1
+ end
+ return cached[original]
+end
+local function finder(specification,filetype)
+ return resolvers.methodhandler("finders",fetch(specification),filetype)
+end
+local opener=openers.file
+local loader=loaders.file
+local function install(scheme,handler,newthreshold)
+ handlers [scheme]=handler
+ loaded [scheme]=0
+ reused [scheme]=0
+ finders [scheme]=finder
+ openers [scheme]=opener
+ loaders [scheme]=loader
+ thresholds[scheme]=newthreshold or threshold
+end
+schemes.install=install
+local function http_handler(specification,cachename)
+ local tempname=cachename..".tmp"
+ local f=io.open(tempname,"wb")
+ local status,message=http.request {
+ url=specification.original,
+ sink=ltn12.sink.file(f)
+ }
+ if not status then
+ os.remove(tempname)
+ else
+ os.remove(cachename)
+ os.rename(tempname,cachename)
+ end
+ return cachename
+end
+install('http',http_handler)
+install('https')
+install('ftp')
+statistics.register("scheme handling time",function()
+ local l,r,nl,nr={},{},0,0
+ for k,v in table.sortedhash(loaded) do
+ if v>0 then
+ nl=nl+1
+ l[nl]=k..":"..v
+ end
+ end
+ for k,v in table.sortedhash(reused) do
+ if v>0 then
+ nr=nr+1
+ r[nr]=k..":"..v
+ end
+ end
+ local n=nl+nr
+ if n>0 then
+ l=nl>0 and concat(l) or "none"
+ r=nr>0 and concat(r) or "none"
+ return format("%s seconds, %s processed, threshold %s seconds, loaded: %s, reused: %s",
+ statistics.elapsedtime(schemes),n,threshold,l,r)
+ else
+ return nil
+ end
+end)
+local httprequest=http.request
+local toquery=url.toquery
+local function fetchstring(url,data)
+ local q=data and toquery(data)
+ if q then
+ url=url.."?"..q
+ end
+ local reply=httprequest(url)
+ return reply
+end
+schemes.fetchstring=fetchstring
+function schemes.fetchtable(url,data)
+ local reply=fetchstring(url,data)
+ if reply then
+ local s=load("return "..reply)
+ if s then
+ return s()
+ end
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-lua"] = package.loaded["data-lua"] or true
+
+-- original size: 4313, stripped down to: 3227
+
+if not modules then modules={} end modules ['data-lua']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local package,lpeg=package,lpeg
+local gsub=string.gsub
+local concat=table.concat
+local addsuffix=file.addsuffix
+local P,S,Cs,lpegmatch=lpeg.P,lpeg.S,lpeg.Cs,lpeg.match
+local luasuffixes={ 'tex','lua' }
+local libsuffixes={ 'lib' }
+local luaformats={ 'TEXINPUTS','LUAINPUTS' }
+local libformats={ 'CLUAINPUTS' }
+local helpers=package.helpers or {}
+local methods=helpers.methods or {}
+local resolvers=resolvers
+local resolveprefix=resolvers.resolve
+helpers.report=logs.reporter("resolvers","libraries")
+trackers.register("resolvers.libraries",function(v) helpers.trace=v end)
+trackers.register("resolvers.locating",function(v) helpers.trace=v end)
+helpers.sequence={
+ "already loaded",
+ "preload table",
+ "lua variable format",
+ "lib variable format",
+ "lua extra list",
+ "lib extra list",
+ "path specification",
+ "cpath specification",
+ "all in one fallback",
+ "not loaded",
+}
+local pattern=Cs(P("!")^0/""*(P("/")*P(-1)/"/"+P("/")^1/"/"+1)^0)
+function helpers.cleanpath(path)
+ return resolveprefix(lpegmatch(pattern,path))
+end
+local loadedaslib=helpers.loadedaslib
+local getextraluapaths=package.extraluapaths
+local getextralibpaths=package.extralibpaths
+local registerpath=helpers.registerpath
+local lualibfile=helpers.lualibfile
+local luaformatpaths
+local libformatpaths
+local function getluaformatpaths()
+ if not luaformatpaths then
+ luaformatpaths={}
+ for i=1,#luaformats do
+ registerpath("lua format","lua",luaformatpaths,resolvers.expandedpathlistfromvariable(luaformats[i]))
+ end
+ end
+ return luaformatpaths
+end
+local function getlibformatpaths()
+ if not libformatpaths then
+ libformatpaths={}
+ for i=1,#libformats do
+ registerpath("lib format","lib",libformatpaths,resolvers.expandedpathlistfromvariable(libformats[i]))
+ end
+ end
+ return libformatpaths
+end
+local function loadedbyformat(name,rawname,suffixes,islib,what)
+ local trace=helpers.trace
+ local report=helpers.report
+ for i=1,#suffixes do
+ local format=suffixes[i]
+ local resolved=resolvers.findfile(name,format) or ""
+ if trace then
+ report("%s format, identifying %a using format %a",what,name,format)
+ end
+ if resolved~="" then
+ if trace then
+ report("%s format, %a found on %a",what,name,resolved)
+ end
+ if islib then
+ return loadedaslib(resolved,rawname)
+ else
+ return loadfile(resolved)
+ end
+ end
+ end
+end
+helpers.loadedbyformat=loadedbyformat
+methods["lua variable format"]=function(name)
+ if helpers.trace then
+ helpers.report("%s format, checking %s paths","lua",#getluaformatpaths())
+ end
+ return loadedbyformat(addsuffix(lualibfile(name),"lua"),name,luasuffixes,false,"lua")
+end
+methods["lib variable format"]=function(name)
+ if helpers.trace then
+ helpers.report("%s format, checking %s paths","lib",#getlibformatpaths())
+ end
+ return loadedbyformat(addsuffix(lualibfile(name),os.libsuffix),name,libsuffixes,true,"lib")
+end
+resolvers.loadlualib=require
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-aux"] = package.loaded["data-aux"] or true
+
+-- original size: 2431, stripped down to: 1996
+
+if not modules then modules={} end modules ['data-aux']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local find=string.find
+local type,next=type,next
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local resolvers=resolvers
+local report_scripts=logs.reporter("resolvers","scripts")
+function resolvers.updatescript(oldname,newname)
+ local scriptpath="context/lua"
+ newname=file.addsuffix(newname,"lua")
+ local oldscript=resolvers.cleanpath(oldname)
+ if trace_locating then
+ report_scripts("to be replaced old script %a",oldscript)
+ end
+ local newscripts=resolvers.findfiles(newname) or {}
+ if #newscripts==0 then
+ if trace_locating then
+ report_scripts("unable to locate new script")
+ end
+ else
+ for i=1,#newscripts do
+ local newscript=resolvers.cleanpath(newscripts[i])
+ if trace_locating then
+ report_scripts("checking new script %a",newscript)
+ end
+ if oldscript==newscript then
+ if trace_locating then
+ report_scripts("old and new script are the same")
+ end
+ elseif not find(newscript,scriptpath) then
+ if trace_locating then
+ report_scripts("new script should come from %a",scriptpath)
+ end
+ elseif not (find(oldscript,file.removesuffix(newname).."$") or find(oldscript,newname.."$")) then
+ if trace_locating then
+ report_scripts("invalid new script name")
+ end
+ else
+ local newdata=io.loaddata(newscript)
+ if newdata then
+ if trace_locating then
+ report_scripts("old script content replaced by new content")
+ end
+ io.savedata(oldscript,newdata)
+ break
+ elseif trace_locating then
+ report_scripts("unable to load new script")
+ end
+ end
+ end
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-tmf"] = package.loaded["data-tmf"] or true
+
+-- original size: 2601, stripped down to: 1627
+
+if not modules then modules={} end modules ['data-tmf']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local resolvers=resolvers
+local report_tds=logs.reporter("resolvers","tds")
+function resolvers.load_tree(tree,resolve)
+ if type(tree)=="string" and tree~="" then
+ local getenv,setenv=resolvers.getenv,resolvers.setenv
+ local texos="texmf-"..os.platform
+ local oldroot=environment.texroot
+ local newroot=file.collapsepath(tree)
+ local newtree=file.join(newroot,texos)
+ local newpath=file.join(newtree,"bin")
+ if not lfs.isdir(newtree) then
+ report_tds("no %a under tree %a",texos,tree)
+ os.exit()
+ end
+ if not lfs.isdir(newpath) then
+ report_tds("no '%s/bin' under tree %a",texos,tree)
+ os.exit()
+ end
+ local texmfos=newtree
+ environment.texroot=newroot
+ environment.texos=texos
+ environment.texmfos=texmfos
+ if resolve then
+ resolvers.luacnfspec=resolvers.resolve(resolvers.luacnfspec)
+ end
+ setenv('SELFAUTOPARENT',newroot)
+ setenv('SELFAUTODIR',newtree)
+ setenv('SELFAUTOLOC',newpath)
+ setenv('TEXROOT',newroot)
+ setenv('TEXOS',texos)
+ setenv('TEXMFOS',texmfos)
+ setenv('TEXMFCNF',resolvers.luacnfspec,true)
+ setenv('PATH',newpath..io.pathseparator..getenv('PATH'))
+ report_tds("changing from root %a to %a",oldroot,newroot)
+ report_tds("prepending %a to PATH",newpath)
+ report_tds("setting TEXMFCNF to %a",resolvers.luacnfspec)
+ report_tds()
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["data-lst"] = package.loaded["data-lst"] or true
+
+-- original size: 2734, stripped down to: 2354
+
+if not modules then modules={} end modules ['data-lst']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local rawget,type,next=rawget,type,next
+local find,concat,upper=string.find,table.concat,string.upper
+local fastcopy,sortedpairs=table.fastcopy,table.sortedpairs
+local resolvers=resolvers
+local listers=resolvers.listers or {}
+resolvers.listers=listers
+local resolveprefix=resolvers.resolve
+local report_lists=logs.reporter("resolvers","lists")
+local function tabstr(str)
+ if type(str)=='table' then
+ return concat(str," | ")
+ else
+ return str
+ end
+end
+function listers.variables(pattern)
+ local instance=resolvers.instance
+ local environment=instance.environment
+ local variables=instance.variables
+ local expansions=instance.expansions
+ local pattern=upper(pattern or "")
+ local configured={}
+ local order=instance.order
+ for i=1,#order do
+ for k,v in next,order[i] do
+ if v~=nil and configured[k]==nil then
+ configured[k]=v
+ end
+ end
+ end
+ local env=fastcopy(environment)
+ local var=fastcopy(variables)
+ local exp=fastcopy(expansions)
+ for key,value in sortedpairs(configured) do
+ if key~="" and (pattern=="" or find(upper(key),pattern)) then
+ report_lists(key)
+ report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
+ report_lists(" var: %s",tabstr(configured[key]) or "unset")
+ report_lists(" exp: %s",tabstr(expansions[key]) or "unset")
+ report_lists(" res: %s",tabstr(resolveprefix(expansions[key])) or "unset")
+ end
+ end
+ instance.environment=fastcopy(env)
+ instance.variables=fastcopy(var)
+ instance.expansions=fastcopy(exp)
+end
+local report_resolved=logs.reporter("system","resolved")
+function listers.configurations()
+ local configurations=resolvers.instance.specification
+ for i=1,#configurations do
+ report_resolved("file : %s",resolveprefix(configurations[i]))
+ end
+ report_resolved("")
+ local list=resolvers.expandedpathfromlist(resolvers.splitpath(resolvers.luacnfspec))
+ for i=1,#list do
+ local li=resolveprefix(list[i])
+ if lfs.isdir(li) then
+ report_resolved("path - %s",li)
+ else
+ report_resolved("path + %s",li)
+ end
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["util-lib"] = package.loaded["util-lib"] or true
+
+-- original size: 11549, stripped down to: 5905
+
+if not modules then modules={} end modules ['util-lib']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local gsub,find=string.gsub,string.find
+local pathpart,nameonly,joinfile=file.pathpart,file.nameonly,file.join
+local findfile,findfiles=resolvers and resolvers.findfile,resolvers and resolvers.findfiles
+local loaded=package.loaded
+local report_swiglib=logs.reporter("swiglib")
+local trace_swiglib=false trackers.register("resolvers.swiglib",function(v) trace_swiglib=v end)
+local done=false
+local function requireswiglib(required,version)
+ local trace_swiglib=trace_swiglib or package.helpers.trace
+ local library=loaded[required]
+ if library==nil then
+ if trace_swiglib then
+ report_swiglib("requiring library %a with version %a",required,version or "any")
+ end
+ local required_full=gsub(required,"%.","/")
+ local required_path=pathpart(required_full)
+ local required_base=nameonly(required_full)
+ local required_name=required_base.."."..os.libsuffix
+ local version=type(version)=="string" and version~="" and version or false
+ local engine=environment.ownmain or false
+ if trace_swiglib and not done then
+ local list=resolvers.expandedpathlistfromvariable("lib")
+ for i=1,#list do
+ report_swiglib("tds path %i: %s",i,list[i])
+ end
+ end
+ local function found(locate,asked_library,how,...)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a",how,asked_library)
+ end
+ return locate(asked_library,...)
+ end
+ local function check(locate,...)
+ local found=nil
+ if version then
+ local asked_library=joinfile(required_path,version,required_name)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a","with version",asked_library)
+ end
+ found=locate(asked_library,...)
+ end
+ if not found or found=="" then
+ local asked_library=joinfile(required_path,required_name)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a","with version",asked_library)
+ end
+ found=locate(asked_library,...)
+ end
+ return found and found~="" and found or false
+ end
+ local function attempt(checkpattern)
+ if trace_swiglib then
+ report_swiglib("checking tds lib paths strictly")
+ end
+ local found=findfile and check(findfile,"lib")
+ if found and (not checkpattern or find(found,checkpattern)) then
+ return found
+ end
+ if trace_swiglib then
+ report_swiglib("checking tds lib paths with wildcard")
+ end
+ local asked_library=joinfile(required_path,".*",required_name)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a","latest version",asked_library)
+ end
+ local list=findfiles(asked_library,"lib",true)
+ if list and #list>0 then
+ table.sort(list)
+ local found=list[#list]
+ if found and (not checkpattern or find(found,checkpattern)) then
+ return found
+ end
+ end
+ if trace_swiglib then
+ report_swiglib("checking lib paths")
+ end
+ package.extralibpath(environment.ownpath)
+ local paths=package.libpaths()
+ for i=1,#paths do
+ local found=check(lfs.isfile)
+ if found and (not checkpattern or find(found,checkpattern)) then
+ return found
+ end
+ end
+ return false
+ end
+ local found_library=nil
+ if engine then
+ if trace_swiglib then
+ report_swiglib("attemp 1, engine %a",engine)
+ end
+ found_library=attempt("/"..engine.."/")
+ if not found_library then
+ if trace_swiglib then
+ report_swiglib("attemp 2, no engine",asked_library)
+ end
+ found_library=attempt()
+ end
+ else
+ found_library=attempt()
+ end
+ if not found_library then
+ if trace_swiglib then
+ report_swiglib("not found: %a",required)
+ end
+ library=false
+ else
+ local path=pathpart(found_library)
+ local base=nameonly(found_library)
+ dir.push(path)
+ if trace_swiglib then
+ report_swiglib("found: %a",found_library)
+ end
+ local message=nil
+ local opener="luaopen_"..required_base
+ library,message=package.loadlib(found_library,opener)
+ local libtype=type(library)
+ if libtype=="function" then
+ library=library()
+ else
+ report_swiglib("load error: %a returns %a, message %a, library %a",opener,libtype,(string.gsub(message or "no message","[%s]+$","")),found_library or "no library")
+ library=false
+ end
+ dir.pop()
+ end
+ if not library then
+ report_swiglib("unknown: %a",required)
+ elseif trace_swiglib then
+ report_swiglib("stored: %a",required)
+ end
+ loaded[required]=library
+ else
+ report_swiglib("reused: %a",required)
+ end
+ return library
+end
+local savedrequire=require
+function require(name,version)
+ if find(name,"^swiglib%.") then
+ return requireswiglib(name,version)
+ else
+ return savedrequire(name)
+ end
+end
+local swiglibs={}
+local initializer="core"
+function swiglib(name,version)
+ local library=swiglibs[name]
+ if not library then
+ statistics.starttiming(swiglibs)
+ if trace_swiglib then
+ report_swiglib("loading %a",name)
+ end
+ if not find(name,"%."..initializer.."$") then
+ fullname="swiglib."..name.."."..initializer
+ else
+ fullname="swiglib."..name
+ end
+ library=requireswiglib(fullname,version)
+ swiglibs[name]=library
+ statistics.stoptiming(swiglibs)
+ end
+ return library
+end
+statistics.register("used swiglibs",function()
+ if next(swiglibs) then
+ return string.format("%s, initial load time %s seconds",table.concat(table.sortedkeys(swiglibs)," "),statistics.elapsedtime(swiglibs))
+ end
+end)
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["luat-sta"] = package.loaded["luat-sta"] or true
+
+-- original size: 5703, stripped down to: 2507
+
+if not modules then modules={} end modules ['luat-sta']={
+ version=1.001,
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local gmatch,match=string.gmatch,string.match
+local type=type
+states=states or {}
+local states=states
+states.data=states.data or {}
+local data=states.data
+states.hash=states.hash or {}
+local hash=states.hash
+states.tag=states.tag or ""
+states.filename=states.filename or ""
+function states.save(filename,tag)
+ tag=tag or states.tag
+ filename=file.addsuffix(filename or states.filename,'lus')
+ io.savedata(filename,
+ "-- generator : luat-sta.lua\n".."-- state tag : "..tag.."\n\n"..table.serialize(data[tag or states.tag] or {},true)
+ )
+end
+function states.load(filename,tag)
+ states.filename=filename
+ states.tag=tag or "whatever"
+ states.filename=file.addsuffix(states.filename,'lus')
+ data[states.tag],hash[states.tag]=(io.exists(filename) and dofile(filename)) or {},{}
+end
+local function set_by_tag(tag,key,value,default,persistent)
+ local d,h=data[tag],hash[tag]
+ if d then
+ if type(d)=="table" then
+ local dkey,hkey=key,key
+ local pre,post=match(key,"(.+)%.([^%.]+)$")
+ if pre and post then
+ for k in gmatch(pre,"[^%.]+") do
+ local dk=d[k]
+ if not dk then
+ dk={}
+ d[k]=dk
+ elseif type(dk)=="string" then
+ break
+ end
+ d=dk
+ end
+ dkey,hkey=post,key
+ end
+ if value==nil then
+ value=default
+ elseif value==false then
+ elseif persistent then
+ value=value or d[dkey] or default
+ else
+ value=value or default
+ end
+ d[dkey],h[hkey]=value,value
+ elseif type(d)=="string" then
+ data[tag],hash[tag]=value,value
+ end
+ end
+end
+local function get_by_tag(tag,key,default)
+ local h=hash[tag]
+ if h and h[key] then
+ return h[key]
+ else
+ local d=data[tag]
+ if d then
+ for k in gmatch(key,"[^%.]+") do
+ local dk=d[k]
+ if dk~=nil then
+ d=dk
+ else
+ return default
+ end
+ end
+ if d==false then
+ return false
+ else
+ return d or default
+ end
+ end
+ end
+end
+states.set_by_tag=set_by_tag
+states.get_by_tag=get_by_tag
+function states.set(key,value,default,persistent)
+ set_by_tag(states.tag,key,value,default,persistent)
+end
+function states.get(key,default)
+ return get_by_tag(states.tag,key,default)
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+package.loaded["luat-fmt"] = package.loaded["luat-fmt"] or true
+
+-- original size: 5955, stripped down to: 4926
+
+if not modules then modules={} end modules ['luat-fmt']={
+ version=1.001,
+ comment="companion to mtxrun",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format=string.format
+local concat=table.concat
+local quoted=string.quoted
+local luasuffixes=utilities.lua.suffixes
+local report_format=logs.reporter("resolvers","formats")
+local function primaryflags()
+ local trackers=environment.argument("trackers")
+ local directives=environment.argument("directives")
+ local flags={}
+ if trackers and trackers~="" then
+ flags={ "--trackers="..quoted(trackers) }
+ end
+ if directives and directives~="" then
+ flags={ "--directives="..quoted(directives) }
+ end
+ if environment.argument("jit") then
+ flags={ "--jiton" }
+ end
+ return concat(flags," ")
+end
+function environment.make_format(name)
+ local engine=environment.ownmain or "luatex"
+ local olddir=dir.current()
+ local path=caches.getwritablepath("formats",engine) or ""
+ if path~="" then
+ lfs.chdir(path)
+ end
+ report_format("using format path %a",dir.current())
+ local texsourcename=file.addsuffix(name,"mkiv")
+ local fulltexsourcename=resolvers.findfile(texsourcename,"tex") or ""
+ if fulltexsourcename=="" then
+ texsourcename=file.addsuffix(name,"tex")
+ fulltexsourcename=resolvers.findfile(texsourcename,"tex") or ""
+ end
+ if fulltexsourcename=="" then
+ report_format("no tex source file with name %a (mkiv or tex)",name)
+ lfs.chdir(olddir)
+ return
+ else
+ report_format("using tex source file %a",fulltexsourcename)
+ end
+ local texsourcepath=dir.expandname(file.dirname(fulltexsourcename))
+ local specificationname=file.replacesuffix(fulltexsourcename,"lus")
+ local fullspecificationname=resolvers.findfile(specificationname,"tex") or ""
+ if fullspecificationname=="" then
+ specificationname=file.join(texsourcepath,"context.lus")
+ fullspecificationname=resolvers.findfile(specificationname,"tex") or ""
+ end
+ if fullspecificationname=="" then
+ report_format("unknown stub specification %a",specificationname)
+ lfs.chdir(olddir)
+ return
+ end
+ local specificationpath=file.dirname(fullspecificationname)
+ local usedluastub=nil
+ local usedlualibs=dofile(fullspecificationname)
+ if type(usedlualibs)=="string" then
+ usedluastub=file.join(file.dirname(fullspecificationname),usedlualibs)
+ elseif type(usedlualibs)=="table" then
+ report_format("using stub specification %a",fullspecificationname)
+ local texbasename=file.basename(name)
+ local luastubname=file.addsuffix(texbasename,luasuffixes.lua)
+ local lucstubname=file.addsuffix(texbasename,luasuffixes.luc)
+ report_format("creating initialization file %a",luastubname)
+ utilities.merger.selfcreate(usedlualibs,specificationpath,luastubname)
+ if utilities.lua.compile(luastubname,lucstubname) and lfs.isfile(lucstubname) then
+ report_format("using compiled initialization file %a",lucstubname)
+ usedluastub=lucstubname
+ else
+ report_format("using uncompiled initialization file %a",luastubname)
+ usedluastub=luastubname
+ end
+ else
+ report_format("invalid stub specification %a",fullspecificationname)
+ lfs.chdir(olddir)
+ return
+ end
+ local command=format("%s --ini %s --lua=%s %s %sdump",engine,primaryflags(),quoted(usedluastub),quoted(fulltexsourcename),os.platform=="unix" and "\\\\" or "\\")
+ report_format("running command: %s\n",command)
+ os.execute(command)
+ local pattern=file.removesuffix(file.basename(usedluastub)).."-*.mem"
+ local mp=dir.glob(pattern)
+ if mp then
+ for i=1,#mp do
+ local name=mp[i]
+ report_format("removing related mplib format %a",file.basename(name))
+ os.remove(name)
+ end
+ end
+ lfs.chdir(olddir)
+end
+function environment.run_format(name,data,more)
+ if name and name~="" then
+ local engine=environment.ownmain or "luatex"
+ local barename=file.removesuffix(name)
+ local fmtname=caches.getfirstreadablefile(file.addsuffix(barename,"fmt"),"formats",engine)
+ if fmtname=="" then
+ fmtname=resolvers.findfile(file.addsuffix(barename,"fmt")) or ""
+ end
+ fmtname=resolvers.cleanpath(fmtname)
+ if fmtname=="" then
+ report_format("no format with name %a",name)
+ else
+ local barename=file.removesuffix(name)
+ local luaname=file.addsuffix(barename,"luc")
+ if not lfs.isfile(luaname) then
+ luaname=file.addsuffix(barename,"lua")
+ end
+ if not lfs.isfile(luaname) then
+ report_format("using format name %a",fmtname)
+ report_format("no luc/lua file with name %a",barename)
+ else
+ local command=format("%s %s --fmt=%s --lua=%s %s %s",engine,primaryflags(),quoted(barename),quoted(luaname),quoted(data),more~="" and quoted(more) or "")
+ report_format("running command: %s",command)
+ os.execute(command)
+ end
+ end
+ end
+end
+
+
+end -- of closure
+
+-- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
+-- skipped libraries : -
+-- original bytes : 745618
+-- stripped bytes : 269191
+
+-- end library merge
+
+-- We need this hack till luatex is fixed.
+--
+-- for k,v in pairs(arg) do print(k,v) end
+
+if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaonly" then
+ arg[-1]=arg[0] arg[0]=arg[2] for k=3,#arg do arg[k-2]=arg[k] end arg[#arg]=nil arg[#arg]=nil
+end
+
+-- End of hack.
+
+local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
+local concat = table.concat
+
+local ownname = environment and environment.ownname or arg[0] or 'mtxrun.lua'
+local ownpath = gsub(match(ownname,"^(.+)[\\/].-$") or ".","\\","/")
+local owntree = environment and environment.ownpath or ownpath
+
+local ownlibs = { -- order can be made better
+
+ 'l-lua.lua',
+ 'l-package.lua',
+ 'l-lpeg.lua',
+ 'l-function.lua',
+ 'l-string.lua',
+ 'l-table.lua',
+ 'l-io.lua',
+ 'l-number.lua',
+ 'l-set.lua',
+ 'l-os.lua',
+ 'l-file.lua',
+ 'l-gzip.lua',
+ 'l-md5.lua',
+ 'l-url.lua',
+ 'l-dir.lua',
+ 'l-boolean.lua',
+ 'l-unicode.lua',
+ 'l-math.lua',
+
+ 'util-str.lua', -- code might move to l-string
+ 'util-tab.lua',
+ 'util-sto.lua',
+ 'util-prs.lua',
+ 'util-fmt.lua',
+
+ 'trac-set.lua',
+ 'trac-log.lua',
+ 'trac-inf.lua', -- was before trac-set
+ 'trac-pro.lua', -- not really needed
+ 'util-lua.lua', -- indeed here?
+ 'util-deb.lua',
+
+ 'util-mrg.lua',
+ 'util-tpl.lua',
+
+ 'util-env.lua',
+ 'luat-env.lua', -- can come before inf (as in mkiv)
+
+ 'lxml-tab.lua',
+ 'lxml-lpt.lua',
+ -- 'lxml-ent.lua',
+ 'lxml-mis.lua',
+ 'lxml-aux.lua',
+ 'lxml-xml.lua',
+
+ 'trac-xml.lua',
+
+ 'data-ini.lua',
+ 'data-exp.lua',
+ 'data-env.lua',
+ 'data-tmp.lua',
+ 'data-met.lua',
+ 'data-res.lua',
+ 'data-pre.lua',
+ 'data-inp.lua',
+ 'data-out.lua',
+ 'data-fil.lua',
+ 'data-con.lua',
+ 'data-use.lua',
+-- 'data-tex.lua',
+-- 'data-bin.lua',
+ 'data-zip.lua',
+ 'data-tre.lua',
+ 'data-sch.lua',
+ 'data-lua.lua',
+ 'data-aux.lua', -- updater
+ 'data-tmf.lua',
+ 'data-lst.lua',
+
+ 'util-lib.lua', -- swiglib
+
+ 'luat-sta.lua',
+ 'luat-fmt.lua',
+
+}
+
+-- c:/data/develop/tex-context/tex/texmf-win64/bin/../../texmf-context/tex/context/base/data-tmf.lua
+-- c:/data/develop/context/sources/data-tmf.lua
+
+local ownlist = {
+ -- '.',
+ -- ownpath ,
+ owntree .. "/../../../../context/sources", -- HH's development path
+ owntree .. "/../../texmf-local/tex/context/base",
+ owntree .. "/../../texmf-context/tex/context/base",
+ owntree .. "/../../texmf/tex/context/base",
+ owntree .. "/../../../texmf-local/tex/context/base",
+ owntree .. "/../../../texmf-context/tex/context/base",
+ owntree .. "/../../../texmf/tex/context/base",
+}
+
+if ownpath == "." then table.remove(ownlist,1) end
+
+own = {
+ name = ownname,
+ path = ownpath,
+ tree = owntree,
+ list = ownlist,
+ libs = ownlibs,
+}
+
+local function locate_libs()
+ for l=1,#ownlibs do
+ local lib = ownlibs[l]
+ for p =1,#ownlist do
+ local pth = ownlist[p]
+ local filename = pth .. "/" .. lib
+ local found = lfs.isfile(filename)
+ if found then
+ package.path = package.path .. ";" .. pth .. "/?.lua" -- in case l-* does a require
+ return pth
+ end
+ end
+ end
+end
+
+local function load_libs()
+ local found = locate_libs()
+ if found then
+ for l=1,#ownlibs do
+ local filename = found .. "/" .. ownlibs[l]
+ local codeblob = loadfile(filename)
+ if codeblob then
+ codeblob()
+ end
+ end
+ else
+ resolvers = nil
+ end
+end
+
+if not resolvers then
+ load_libs()
+end
+
+if not resolvers then
+ print("")
+ print("Mtxrun is unable to start up due to lack of libraries. You may")
+ print("try to run 'lua mtxrun.lua --selfmerge' in the path where this")
+ print("script is located (normally under ..../scripts/context/lua) which")
+ print("will make this script library independent.")
+ os.exit()
+end
+
+-- verbosity
+
+----- e_verbose = environment.arguments["verbose"]
+
+local e_verbose = false
+
+-- some common flags (also passed through environment)
+
+local e_silent = environment.argument("silent")
+local e_noconsole = environment.argument("noconsole")
+
+local e_trackers = environment.argument("trackers")
+local e_directives = environment.argument("directives")
+local e_experiments = environment.argument("experiments")
+
+if e_silent == true then
+ e_silent = "*"
+end
+
+if type(e_silent) == "string" then
+ if type(e_directives) == "string" then
+ e_directives = format("%s,logs.blocked={%s}",e_directives,e_silent)
+ else
+ e_directives = format("logs.blocked={%s}",e_silent)
+ end
+end
+
+if e_noconsole then
+ if type(e_directives) == "string" then
+ e_directives = format("%s,logs.target=file",e_directives)
+ else
+ e_directives = format("logs.target=file")
+ end
+end
+
+if e_trackers then trackers .enable(e_trackers) end
+if e_directives then directives .enable(e_directives) end
+if e_experiments then experiments.enable(e_experiments) end
+
+if not environment.trackers then environment.trackers = e_trackers end
+if not environment.directives then environment.directives = e_directives end
+if not environment.experiments then environment.experiments = e_experiments end
+
+--
+
+local instance = resolvers.reset()
+
+local helpinfo = [[
+<?xml version="1.0" ?>
+<application>
+ <metadata>
+ <entry name="name">mtxrun</entry>
+ <entry name="detail">ConTeXt TDS Runner Tool</entry>
+ <entry name="version">1.31</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="script"><short>run an mtx script (lua prefered method) (<ref name="noquotes"/>), no script gives list</short></flag>
+ <flag name="evaluate"><short>run code passed on the commandline (between quotes)</short></flag>
+ <flag name="execute"><short>run a script or program (texmfstart method) (<ref name="noquotes"/>)</short></flag>
+ <flag name="resolve"><short>resolve prefixed arguments</short></flag>
+ <flag name="ctxlua"><short>run internally (using preloaded libs)</short></flag>
+ <flag name="internal"><short>run script using built in libraries (same as <ref name="ctxlua"/>)</short></flag>
+ <flag name="locate"><short>locate given filename in database (default) or system (<ref name="first"/> <ref name="all"/> <ref name="detail"/>)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="autotree"><short>use texmf tree cf. env texmfstart_tree or texmfstarttree</short></flag>
+ <flag name="tree" value="pathtotree"><short>use given texmf tree (default file: setuptex.tmf)</short></flag>
+ <flag name="environment" value="name"><short>use given (tmf) environment file</short></flag>
+ <flag name="path" value="runpath"><short>go to given path before execution</short></flag>
+ <flag name="ifchanged" value="filename"><short>only execute when given file has changed (md checksum)</short></flag>
+ <flag name="iftouched" value="old,new"><short>only execute when given file has changed (time stamp)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="makestubs"><short>create stubs for (context related) scripts</short></flag>
+ <flag name="removestubs"><short>remove stubs (context related) scripts</short></flag>
+ <flag name="stubpath" value="binpath"><short>paths where stubs wil be written</short></flag>
+ <flag name="windows"><short>create windows (mswin) stubs</short></flag>
+ <flag name="unix"><short>create unix (linux) stubs</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="verbose"><short>give a bit more info</short></flag>
+ <flag name="trackers" value="list"><short>enable given trackers</short></flag>
+ <flag name="progname" value="str"><short>format or backend</short></flag>
+ <flag name="systeminfo" value="str"><short>show current operating system, processor, etc</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="edit"><short>launch editor with found file</short></flag>
+ <flag name="launch"><short>launch files like manuals, assumes os support (<ref name="all"/>)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="timedrun"><short>run a script and time its run</short></flag>
+ <flag name="autogenerate"><short>regenerate databases if needed (handy when used to run context in an editor)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="usekpse"><short>use kpse as fallback (when no mkiv and cache installed, often slower)</short></flag>
+ <flag name="forcekpse"><short>force using kpse (handy when no mkiv and cache installed but less functionality)</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="prefixes"><short>show supported prefixes</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="generate"><short>generate file database</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="variables"><short>show configuration variables</short></flag>
+ <flag name="configurations"><short>show configuration order</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="directives"><short>show (known) directives</short></flag>
+ <flag name="trackers"><short>show (known) trackers</short></flag>
+ <flag name="experiments"><short>show (known) experiments</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="expand-braces"><short>expand complex variable</short></flag>
+ <flag name="expand-path"><short>expand variable (resolve paths)</short></flag>
+ <flag name="expand-var"><short>expand variable (resolve references)</short></flag>
+ <flag name="show-path"><short>show path expansion of ...</short></flag>
+ <flag name="var-value"><short>report value of variable</short></flag>
+ <flag name="find-file"><short>report file location</short></flag>
+ <flag name="find-path"><short>report path of file</short></flag>
+ </subcategory>
+ <subcategory>
+ <flag name="pattern" value="string"><short>filter variables</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+</application>
+]]
+
+local application = logs.application {
+ name = "mtxrun",
+ banner = "ConTeXt TDS Runner Tool 1.31",
+ helpinfo = helpinfo,
+}
+
+local report = application.report
+
+messages = messages or { } -- for the moment
+
+runners = runners or { } -- global (might become local)
+
+runners.applications = {
+ ["lua"] = "luatex --luaonly",
+ ["luc"] = "luatex --luaonly",
+ ["pl"] = "perl",
+ ["py"] = "python",
+ ["rb"] = "ruby",
+}
+
+runners.suffixes = {
+ 'rb', 'lua', 'py', 'pl'
+}
+
+runners.registered = {
+ texexec = { 'texexec.rb', false }, -- context mkii runner (only tool not to be luafied)
+ texutil = { 'texutil.rb', true }, -- old perl based index sorter for mkii (old versions need it)
+ texfont = { 'texfont.pl', true }, -- perl script that makes mkii font metric files
+ texfind = { 'texfind.pl', false }, -- perltk based tex searching tool, mostly used at pragma
+ texshow = { 'texshow.pl', false }, -- perltk based context help system, will be luafied
+ -- texwork = { 'texwork.pl', false }, -- perltk based editing environment, only used at pragma
+ makempy = { 'makempy.pl', true },
+ mptopdf = { 'mptopdf.pl', true },
+ pstopdf = { 'pstopdf.rb', true }, -- converts ps (and some more) images, does some cleaning (replaced)
+ -- examplex = { 'examplex.rb', false },
+ concheck = { 'concheck.rb', false },
+ runtools = { 'runtools.rb', true },
+ textools = { 'textools.rb', true },
+ tmftools = { 'tmftools.rb', true },
+ ctxtools = { 'ctxtools.rb', true },
+ rlxtools = { 'rlxtools.rb', true },
+ pdftools = { 'pdftools.rb', true },
+ mpstools = { 'mpstools.rb', true },
+ -- exatools = { 'exatools.rb', true },
+ xmltools = { 'xmltools.rb', true },
+ -- luatools = { 'luatools.lua', true },
+ mtxtools = { 'mtxtools.rb', true },
+ pdftrimwhite = { 'pdftrimwhite.pl', false },
+}
+
+runners.launchers = {
+ windows = { },
+ unix = { },
+}
+
+-- like runners.libpath("framework"): looks on script's subpath
+
+function runners.libpath(...)
+ package.prepend_libpath(file.dirname(environment.ownscript),...)
+ package.prepend_libpath(file.dirname(environment.ownname) ,...)
+end
+
+function runners.prepare()
+ local checkname = environment.argument("ifchanged")
+ if type(checkname) == "string" and checkname ~= "" then
+ local oldchecksum = file.loadchecksum(checkname)
+ local newchecksum = file.checksum(checkname)
+ if oldchecksum == newchecksum then
+ if e_verbose then
+ report("file '%s' is unchanged",checkname)
+ end
+ return "skip"
+ elseif e_verbose then
+ report("file '%s' is changed, processing started",checkname)
+ end
+ file.savechecksum(checkname)
+ end
+ local touchname = environment.argument("iftouched")
+ if type(touchname) == "string" and touchname ~= "" then
+ local oldname, newname = string.splitup(touchname, ",")
+ if oldname and newname and oldname ~= "" and newname ~= "" then
+ if not file.needs_updating(oldname,newname) then
+ if e_verbose then
+ report("file '%s' and '%s' have same age",oldname,newname)
+ end
+ return "skip"
+ elseif e_verbose then
+ report("file '%s' is older than '%s'",oldname,newname)
+ end
+ end
+ end
+ local runpath = environment.argument("path")
+ if type(runpath) == "string" and not lfs.chdir(runpath) then
+ report("unable to change to path '%s'",runpath)
+ return "error"
+ end
+ runners.prepare = function() end
+ return "run"
+end
+
+function runners.execute_script(fullname,internal,nosplit)
+ local noquote = environment.argument("noquotes")
+ if fullname and fullname ~= "" then
+ local state = runners.prepare()
+ if state == 'error' then
+ return false
+ elseif state == 'skip' then
+ return true
+ elseif state == "run" then
+ local path, name, suffix = file.splitname(fullname)
+ local result = ""
+ if path ~= "" then
+ result = fullname
+ elseif name then
+ name = gsub(name,"^int[%a]*:",function()
+ internal = true
+ return ""
+ end )
+ name = gsub(name,"^script:","")
+ if suffix == "" and runners.registered[name] and runners.registered[name][1] then
+ name = runners.registered[name][1]
+ suffix = file.suffix(name)
+ end
+ if suffix == "" then
+ -- loop over known suffixes
+ for _,s in pairs(runners.suffixes) do
+ result = resolvers.findfile(name .. "." .. s, 'texmfscripts')
+ if result ~= "" then
+ break
+ end
+ end
+ elseif runners.applications[suffix] then
+ result = resolvers.findfile(name, 'texmfscripts')
+ else
+ -- maybe look on path
+ result = resolvers.findfile(name, 'other text files')
+ end
+ end
+ if result and result ~= "" then
+ if not no_split then
+ local before, after = environment.splitarguments(fullname) -- already done
+ environment.arguments_before, environment.arguments_after = before, after
+ end
+ if internal then
+ arg = { } for _,v in pairs(environment.arguments_after) do arg[#arg+1] = v end
+ environment.ownscript = result
+ dofile(result)
+ else
+local texmfcnf = resolvers.getenv("TEXMFCNF")
+if not texmfcnf or texmfcnf == "" then
+ texmfcnf = resolvers.expandedpathfromlist(resolvers.splitpath(resolvers.resolve(resolvers.luacnfspec)))
+ resolvers.setenv("TEXMFCNF",table.concat(texmfcnf,";")) -- for running texexec etc (after tl change to texmf-dist)
+end
+ local binary = runners.applications[file.suffix(result)]
+ result = string.quoted(string.unquoted(result))
+ -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
+ -- result = '"' .. result .. '"'
+ -- end
+ if binary and binary ~= "" then
+ result = binary .. " " .. result
+ end
+ local command = result .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
+ if e_verbose then
+ report()
+ report("executing: %s",command)
+ report()
+ report()
+ io.flush()
+ end
+ -- no os.exec because otherwise we get the wrong return value
+ local code = os.execute(command) -- maybe spawn
+ if code == 0 then
+ return true
+ else
+ if binary then
+ binary = file.addsuffix(binary,os.binsuffix)
+ for p in gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do
+ if lfs.isfile(file.join(p,binary)) then
+ return false
+ end
+ end
+ report()
+ report("This script needs '%s' which seems not to be installed.",binary)
+ report()
+ end
+ return false
+ end
+ end
+ end
+ end
+ end
+ return false
+end
+
+function runners.execute_program(fullname)
+ local noquote = environment.argument("noquotes")
+ if fullname and fullname ~= "" then
+ local state = runners.prepare()
+ if state == 'error' then
+ return false
+ elseif state == 'skip' then
+ return true
+ elseif state == "run" then
+ local before, after = environment.splitarguments(fullname)
+ for k=1,#after do after[k] = resolvers.resolve(after[k]) end
+ environment.initializearguments(after)
+ fullname = gsub(fullname,"^bin:","")
+ local command = fullname .. " " .. (environment.reconstructcommandline(after or "",noquote) or "")
+ report()
+ report("executing: %s",command)
+ report()
+ report()
+ io.flush()
+ local code = os.exec(command) -- (fullname,unpack(after)) does not work / maybe spawn
+ return code == 0
+ end
+ end
+ return false
+end
+
+-- the --usekpse flag will fallback (not default) on kpse (hm, we can better update mtx-stubs)
+
+local windows_stub = '@echo off\013\010setlocal\013\010set ownpath=%%~dp0%%\013\010texlua "%%ownpath%%mtxrun.lua" --usekpse --execute %s %%*\013\010endlocal\013\010'
+local unix_stub = '#!/bin/sh\010mtxrun --usekpse --execute %s \"$@\"\010'
+
+function runners.handle_stubs(create)
+ local stubpath = environment.argument('stubpath') or '.' -- 'auto' no longer subpathssupported
+ local windows = environment.argument('windows') or environment.argument('mswin') or false
+ local unix = environment.argument('unix') or environment.argument('linux') or false
+ if not windows and not unix then
+ if os.platform == "unix" then
+ unix = true
+ else
+ windows = true
+ end
+ end
+ for _,v in pairs(runners.registered) do
+ local name, doit = v[1], v[2]
+ if doit then
+ local base = gsub(file.basename(name), "%.(.-)$", "")
+ if create then
+ if windows then
+ io.savedata(file.join(stubpath,base..".bat"),format(windows_stub,name))
+ report("windows stub for '%s' created",base)
+ end
+ if unix then
+ io.savedata(file.join(stubpath,base),format(unix_stub,name))
+ report("unix stub for '%s' created",base)
+ end
+ else
+ if windows and (os.remove(file.join(stubpath,base..'.bat')) or os.remove(file.join(stubpath,base..'.cmd'))) then
+ report("windows stub for '%s' removed", base)
+ end
+ if unix and (os.remove(file.join(stubpath,base)) or os.remove(file.join(stubpath,base..'.sh'))) then
+ report("unix stub for '%s' removed",base)
+ end
+ end
+ end
+ end
+end
+
+function runners.resolve_string(filename)
+ if filename and filename ~= "" then
+ runners.report_location(resolvers.resolve(filename))
+ end
+end
+
+-- differs from texmfstart where locate appends .com .exe .bat ... todo
+
+function runners.locate_file(filename) -- was given file but only searches in tree
+ if filename and filename ~= "" then
+ if environment.argument("first") then
+ runners.report_location(resolvers.findfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findfile,filename)
+ elseif environment.argument("all") then
+ local result, status = resolvers.findfiles(filename)
+ if status and environment.argument("detail") then
+ runners.report_location(status)
+ else
+ runners.report_location(result)
+ end
+ else
+ runners.report_location(resolvers.findgivenfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findgivenfile,filename)
+ end
+ end
+end
+
+function runners.locate_platform()
+ runners.report_location(os.platform)
+end
+
+function runners.report_location(result)
+ if type(result) == "table" then
+ for i=1,#result do
+ if i > 1 then
+ io.write("\n")
+ end
+ io.write(result[i])
+ end
+ else
+ io.write(result)
+ end
+end
+
+function runners.edit_script(filename) -- we assume that gvim is present on most systems (todo: also in cnf file)
+ local editor = os.getenv("MTXRUN_EDITOR") or os.getenv("TEXMFSTART_EDITOR") or os.getenv("EDITOR") or 'gvim'
+ local rest = resolvers.resolve(filename)
+ if rest ~= "" then
+ local command = editor .. " " .. rest
+ if e_verbose then
+ report()
+ report("starting editor: %s",command)
+ report()
+ report()
+ end
+ os.launch(command)
+ end
+end
+
+function runners.save_script_session(filename, list)
+ local t = { }
+ for i=1,#list do
+ local key = list[i]
+ t[key] = environment.arguments[key]
+ end
+ io.savedata(filename,table.serialize(t,true))
+end
+
+function runners.load_script_session(filename)
+ if lfs.isfile(filename) then
+ local t = io.loaddata(filename)
+ if t then
+ t = loadstring(t)
+ if t then t = t() end
+ for key, value in pairs(t) do
+ environment.arguments[key] = value
+ end
+ end
+ end
+end
+
+function resolvers.launch(str)
+ -- maybe we also need to test on mtxrun.launcher.suffix environment
+ -- variable or on windows consult the assoc and ftype vars and such
+ local launchers = runners.launchers[os.platform] if launchers then
+ local suffix = file.suffix(str) if suffix then
+ local runner = launchers[suffix] if runner then
+ str = runner .. " " .. str
+ end
+ end
+ end
+ os.launch(str)
+end
+
+function runners.launch_file(filename)
+ trackers.enable("resolvers.locating")
+ local allresults = environment.arguments["all"]
+ local pattern = environment.arguments["pattern"]
+ if not pattern or pattern == "" then
+ pattern = filename
+ end
+ if not pattern or pattern == "" then
+ report("provide name or --pattern=")
+ else
+ local t = resolvers.findfiles(pattern,nil,allresults)
+ if not t or #t == 0 then
+ t = resolvers.findfiles("*/" .. pattern,nil,allresults)
+ end
+ if not t or #t == 0 then
+ t = resolvers.findfiles("*/" .. pattern .. "*",nil,allresults)
+ end
+ if t and #t > 0 then
+ if allresults then
+ for _, v in pairs(t) do
+ report("launching %s", v)
+ resolvers.launch(v)
+ end
+ else
+ report("launching %s", t[1])
+ resolvers.launch(t[1])
+ end
+ else
+ report("no match for %s", pattern)
+ end
+ end
+end
+
+local mtxprefixes = {
+ { "^mtx%-", "mtx-" },
+ { "^mtx%-t%-", "mtx-t-" },
+}
+
+function runners.find_mtx_script(filename)
+ local function found(name)
+ local path = file.dirname(name)
+ if path and path ~= "" then
+ return false
+ else
+ local fullname = own and own.path and file.join(own.path,name)
+ return io.exists(fullname) and fullname
+ end
+ end
+ filename = file.addsuffix(filename,"lua")
+ local basename = file.removesuffix(file.basename(filename))
+ local suffix = file.suffix(filename)
+ -- qualified path, raw name
+ local fullname = file.is_qualified_path(filename) and io.exists(filename) and filename
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- current path, raw name
+ fullname = "./" .. filename
+ fullname = io.exists(fullname) and fullname
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- mtx- prefix checking
+ for i=1,#mtxprefixes do
+ local mtxprefix = mtxprefixes[i]
+ mtxprefix = find(filename,mtxprefix[1]) and "" or mtxprefix[2]
+ -- context namespace, mtx-<filename>
+ fullname = mtxprefix .. filename
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename>s
+ fullname = mtxprefix .. basename .. "s" .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename minus trailing s>
+ fullname = mtxprefix .. gsub(basename,"s$","") .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ end
+ -- context namespace, just <filename>
+ fullname = resolvers.findfile(filename)
+ return fullname
+end
+
+function runners.register_arguments(...)
+ local arguments = environment.arguments_after
+ local passedon = { ... }
+ for i=#passedon,1,-1 do
+ local pi = passedon[i]
+ if pi then
+ table.insert(arguments,1,pi)
+ end
+ end
+end
+
+function runners.execute_ctx_script(filename,...)
+ runners.register_arguments(...)
+ local arguments = environment.arguments_after
+ local fullname = runners.find_mtx_script(filename) or ""
+ if file.suffix(fullname) == "cld" then
+ -- handy in editors where we force --autopdf
+ report("running cld script: %s",filename)
+ table.insert(arguments,1,fullname)
+ table.insert(arguments,"--autopdf")
+ fullname = runners.find_mtx_script("context") or ""
+ end
+ -- retry after generate but only if --autogenerate
+ if fullname == "" and environment.argument("autogenerate") then -- might become the default
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+ --
+ fullname = runners.find_mtx_script(filename) or ""
+ end
+ -- that should do it
+ if fullname ~= "" then
+ local state = runners.prepare()
+ if state == 'error' then
+ return false
+ elseif state == 'skip' then
+ return true
+ elseif state == "run" then
+ -- load and save ... kind of undocumented
+ arg = { } for _,v in pairs(arguments) do arg[#arg+1] = resolvers.resolve(v) end
+ environment.initializearguments(arg)
+ local loadname = environment.arguments['load']
+ if loadname then
+ if type(loadname) ~= "string" then loadname = file.basename(fullname) end
+ loadname = file.replacesuffix(loadname,"cfg")
+ runners.load_script_session(loadname)
+ end
+ filename = environment.files[1]
+ if e_verbose then
+ report("using script: %s\n",fullname)
+ end
+ environment.ownscript = fullname
+ dofile(fullname)
+ local savename = environment.arguments['save']
+ if savename then
+ local save_list = runners.save_list
+ if save_list and next(save_list) then
+ if type(savename) ~= "string" then savename = file.basename(fullname) end
+ savename = file.replacesuffix(savename,"cfg")
+ runners.save_script_session(savename,save_list)
+ end
+ end
+ return true
+ end
+ else
+ if filename == "" or filename == "help" then
+ local context = resolvers.findfile("mtx-context.lua")
+ trackers.enable("resolvers.locating")
+ if context ~= "" then
+ local result = dir.glob((gsub(context,"mtx%-context","mtx-*"))) -- () needed
+ local valid = { }
+ table.sort(result)
+ for i=1,#result do
+ local scriptname = result[i]
+ local scriptbase = match(scriptname,".*mtx%-([^%-]-)%.lua")
+ if scriptbase then
+ local data = io.loaddata(scriptname)
+local application = match(data,"local application.-=.-(%{.-%})")
+if application then
+ application = loadstring("return " .. application)
+ if application then
+ application = application()
+ local banner = application.banner
+ if banner then
+ local description, version = match(banner,"^(.-) ([%d.]+)$")
+ if description then
+ valid[#valid+1] = { scriptbase, version, description }
+ else
+ valid[#valid+1] = { scriptbase, "", banner }
+ end
+ end
+ end
+end
+ end
+ end
+ if #valid > 0 then
+ application.identify()
+ report("no script name given, known scripts:")
+ report()
+ for k=1,#valid do
+ local v = valid[k]
+ report("%-12s %4s %s",v[1],v[2],v[3])
+ end
+ end
+ else
+ report("no script name given")
+ end
+ else
+ filename = file.addsuffix(filename,"lua")
+ if file.is_qualified_path(filename) then
+ report("unknown script '%s'",filename)
+ else
+ report("unknown script '%s' or 'mtx-%s'",filename,filename)
+ end
+ end
+ return false
+ end
+end
+
+function runners.prefixes()
+ application.identify()
+ report()
+ report(concat(resolvers.allprefixes(true)," "))
+end
+
+function runners.timedrun(filename) -- just for me
+ if filename and filename ~= "" then
+ runners.timed(function() os.execute(filename) end)
+ end
+end
+
+function runners.timed(action)
+ statistics.timed(action)
+end
+
+function runners.associate(filename)
+ os.launch(filename)
+end
+
+function runners.evaluate(code,filename) -- for Luigi
+ if code == "loop" then
+ while true do
+ io.write("> ")
+ local code = io.read()
+ if code ~= "" then
+ local temp = string.match(code,"^= (.*)$")
+ if temp then
+ code = "print("..temp..")"
+ end
+ local compiled, message = loadstring(code)
+ if type(compiled) ~= "function" then
+ io.write("! " .. (message or code).."\n")
+ else
+ io.write(compiled())
+ end
+ end
+ end
+ else
+ if type(code) ~= "string" or code == "" then
+ code = filename
+ end
+ if code ~= "" then
+ local compiled, message = loadstring(code)
+ if type(compiled) ~= "function" then
+ io.write("invalid lua code: " .. (message or code))
+ return
+ end
+ io.write(compiled())
+ end
+ end
+end
+
+function runners.gethelp(filename)
+ local url = environment.argument("url")
+ if url and url ~= "" then
+ local command = string.gsub(environment.argument("command") or "unknown","^%s*\\*(.-)%s*$","%1")
+ url = utilities.templates.replace(url,{ command = command })
+ os.launch(url)
+ else
+ report("no --url given")
+ end
+end
+
+function runners.systeminfo()
+ report("architecture : %s",os.platform or "<unset>")
+ report("operating system : %s",os.name or "<unset>")
+ report("file architecture : %s",os.type or "<unset>")
+ report("binary path : %s",os.selfdir or "<unset>")
+ report("binary suffix : %s",os.binsuffix or "<unset>")
+ report("library suffix : %s",os.libsuffix or "<unset>")
+end
+
+-- this is a bit dirty ... first we store the first filename and next we
+-- split the arguments so that we only see the ones meant for this script
+-- ... later we will use the second half
+
+local filename = environment.files[1] or ""
+local ok = true
+
+local before, after = environment.splitarguments(filename)
+environment.arguments_before, environment.arguments_after = before, after
+environment.initializearguments(before)
+
+instance.lsrmode = environment.argument("lsr") or false
+
+e_verbose = environment.arguments["verbose"] -- delayed till here (we need the ones before script)
+
+if e_verbose then
+ trackers.enable("resolvers.locating")
+end
+
+-- maybe the unset has to go to this level
+
+local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename))]
+
+local e_argument = environment.argument
+
+if e_argument("timedlog") then
+ logs.settimedlog()
+end
+
+if e_argument("usekpse") or e_argument("forcekpse") or is_mkii_stub then
+
+ resolvers.load_tree(e_argument('tree'),true) -- force resolve of TEXMFCNF
+
+ os.setenv("engine","")
+ os.setenv("progname","")
+
+ local remapper = {
+ otf = "opentype fonts",
+ ttf = "truetype fonts",
+ ttc = "truetype fonts",
+ pfb = "type1 fonts",
+ other = "other text files",
+ }
+
+ local progname = e_argument("progname") or 'context'
+
+ local function kpse_initialized()
+ texconfig.kpse_init = true
+ local t = os.clock()
+ local k = kpse.original.new("luatex",progname)
+ local dummy = k:find_file("mtxrun.lua") -- so that we're initialized
+ report("kpse fallback with progname '%s' initialized in %s seconds",progname,os.clock()-t)
+ kpse_initialized = function() return k end
+ return k
+ end
+
+ local findfile = resolvers.findfile
+ local showpath = resolvers.showpath
+
+ if e_argument("forcekpse") then
+
+ function resolvers.findfile(name,kind)
+ return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
+ end
+ function resolvers.showpath(name)
+ return (kpse_initialized():show_path(name)) or ""
+ end
+
+ elseif e_argument("usekpse") or is_mkii_stub then
+
+ resolvers.load()
+
+ function resolvers.findfile(name,kind)
+ local found = findfile(name,kind) or ""
+ if found ~= "" then
+ return found
+ else
+ return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
+ end
+ end
+ function resolvers.showpath(name)
+ local found = showpath(name) or ""
+ if found ~= "" then
+ return found
+ else
+ return (kpse_initialized():show_path(name)) or ""
+ end
+ end
+
+ end
+
+ function runners.loadbase()
+ end
+
+else
+
+ function runners.loadbase(...)
+ if not resolvers.load(...) then
+ report("forcing cache reload")
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ if not resolvers.load(...) then
+ report("the resolver databases are not present or outdated")
+ end
+ end
+ end
+
+ resolvers.load_tree(e_argument('tree'),e_argument("resolve"))
+
+end
+
+-- joke .. reminds me of messing with gigi terminals
+
+if e_argument("ansi") then
+
+ logs.setformatters("ansi")
+
+ local script = e_argument("script") or e_argument("scripts")
+
+ if type(script) == "string" then
+ logs.writer("]0;"..script.."") -- for Alan to test
+ end
+
+end
+
+if e_argument("script") or e_argument("scripts") then
+
+ -- run a script by loading it (using libs), pass args
+
+ if e_argument("nofiledatabase") then
+ -- handy for mtx-update
+ else
+ runners.loadbase()
+ end
+ if is_mkii_stub then
+ ok = runners.execute_script(filename,false,true)
+ else
+ ok = runners.execute_ctx_script(filename)
+ end
+
+elseif e_argument("evaluate") then
+
+ runners.evaluate(e_argument("evaluate"),filename)
+
+elseif e_argument("selfmerge") then
+
+ -- embed used libraries
+
+ runners.loadbase()
+ local found = locate_libs()
+
+ if found then
+ local mtxrun = resolvers.findfile("mtxrun.lua") -- includes local name
+ if lfs.isfile(mtxrun) then
+ utilities.merger.selfmerge(mtxrun,own.libs,{ found })
+ application.report("runner updated on resolved path: %s",mtxrun)
+ else
+ utilities.merger.selfmerge(own.name,own.libs,{ found })
+ application.report("runner updated on relative path: %s",own.name)
+ end
+ end
+
+elseif e_argument("selfclean") then
+
+ -- remove embedded libraries
+
+ runners.loadbase()
+
+ local mtxrun = resolvers.findfile("mtxrun.lua") -- includes local name
+ if lfs.isfile(mtxrun) then
+ utilities.merger.selfclean(mtxrun)
+ application.report("runner cleaned on resolved path: %s",mtxrun)
+ else
+ utilities.merger.selfclean(own.name)
+ application.report("runner cleaned on relative path: %s",own.name)
+ end
+
+elseif e_argument("selfupdate") then
+
+ runners.loadbase()
+ trackers.enable("resolvers.locating")
+ resolvers.updatescript(own.name,"mtxrun")
+
+elseif e_argument("ctxlua") or e_argument("internal") then
+
+ -- run a script by loading it (using libs)
+
+ runners.loadbase()
+ ok = runners.execute_script(filename,true)
+
+elseif e_argument("execute") then
+
+ -- execute script
+
+ runners.loadbase()
+ ok = runners.execute_script(filename)
+
+elseif e_argument("direct") then
+
+ -- equals bin:
+
+ runners.loadbase()
+ ok = runners.execute_program(filename)
+
+elseif e_argument("edit") then
+
+ -- edit file
+
+ runners.loadbase()
+ runners.edit_script(filename)
+
+elseif e_argument("launch") then
+
+ runners.loadbase()
+ runners.launch_file(filename)
+
+elseif e_argument("associate") then
+
+ runners.associate(filename)
+
+elseif e_argument("gethelp") then
+
+ runners.gethelp()
+
+elseif e_argument("makestubs") then
+
+ -- make stubs (depricated)
+
+ runners.handle_stubs(true)
+
+elseif e_argument("removestubs") then
+
+ -- remove stub (depricated)
+
+ runners.loadbase()
+ runners.handle_stubs(false)
+
+elseif e_argument("resolve") then
+
+ -- resolve string
+
+ runners.loadbase()
+ runners.resolve_string(filename)
+
+elseif e_argument("locate") then
+
+ -- locate file (only database)
+
+ runners.loadbase()
+ runners.locate_file(filename)
+
+elseif e_argument("platform") or e_argument("show-platform") then
+
+ -- locate platform
+
+ runners.loadbase()
+ runners.locate_platform()
+
+elseif e_argument("prefixes") then
+
+ runners.loadbase()
+ runners.prefixes()
+
+elseif e_argument("timedrun") then
+
+ -- locate platform
+
+ runners.loadbase()
+ runners.timedrun(filename)
+
+elseif e_argument("variables") or e_argument("show-variables") or e_argument("expansions") or e_argument("show-expansions") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--expansions",filename)
+
+ resolvers.load("nofiles")
+ resolvers.listers.variables(e_argument("pattern"))
+
+elseif e_argument("configurations") or e_argument("show-configurations") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--configurations",filename)
+
+ resolvers.load("nofiles")
+ resolvers.listers.configurations()
+
+elseif e_argument("find-file") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename)
+
+ resolvers.load()
+ local e_all = e_argument("all")
+ local e_pattern = e_argument("pattern")
+ local e_format = e_argument("format")
+ local finder = e_all and resolvers.findfiles or resolvers.findfile
+ if not e_pattern then
+ runners.register_arguments(filename)
+ environment.initializearguments(environment.arguments_after)
+ resolvers.dowithfilesandreport(finder,environment.files,e_format)
+ elseif type(e_pattern) == "string" then
+ resolvers.dowithfilesandreport(finder,{ e_pattern },e_format)
+ end
+
+elseif e_argument("find-path") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--find-path",filename)
+
+ resolvers.load()
+ local path = resolvers.findpath(filename, instance.my_format)
+ if e_verbose then
+ report(path)
+ else
+ print(path)
+ end
+
+elseif e_argument("expand-braces") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initializearguments(environment.arguments_after)
+ resolvers.dowithfilesandreport(resolvers.expandbraces, environment.files)
+
+elseif e_argument("expand-path") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--expand-path",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initializearguments(environment.arguments_after)
+ resolvers.dowithfilesandreport(resolvers.expandpath, environment.files)
+
+elseif e_argument("expand-var") or e_argument("expand-variable") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--expand-var",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initializearguments(environment.arguments_after)
+ resolvers.dowithfilesandreport(resolvers.expansion, environment.files)
+
+elseif e_argument("show-path") or e_argument("path-value") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--show-path",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initializearguments(environment.arguments_after)
+ resolvers.dowithfilesandreport(resolvers.showpath, environment.files)
+
+elseif e_argument("var-value") or e_argument("show-value") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--show-value",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initializearguments(environment.arguments_after)
+ resolvers.dowithfilesandreport(resolvers.variable,environment.files)
+
+elseif e_argument("format-path") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--format-path",filename)
+
+ resolvers.load()
+ report(caches.getwritablepath("format"))
+
+elseif e_argument("pattern") then
+
+ -- luatools
+
+ runners.execute_ctx_script("mtx-base","--pattern='" .. e_argument("pattern") .. "'",filename)
+
+elseif e_argument("generate") then
+
+ -- luatools
+
+ if filename and filename ~= "" then
+ resolvers.load("nofiles")
+ trackers.enable("resolvers.locating")
+ resolvers.renew(filename)
+ else
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+ end
+
+ e_verbose = true
+
+elseif e_argument("make") or e_argument("ini") or e_argument("compile") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--make",filename)
+
+ resolvers.load()
+ trackers.enable("resolvers.locating")
+ environment.make_format(filename)
+
+elseif e_argument("run") then
+
+ -- luatools
+
+ runners.execute_ctx_script("mtx-base","--run",filename)
+
+elseif e_argument("fmt") then
+
+ -- luatools
+
+ runners.execute_ctx_script("mtx-base","--fmt",filename)
+
+elseif e_argument("help") and filename=='base' then
+
+ -- luatools
+
+ runners.execute_ctx_script("mtx-base","--help")
+
+elseif e_argument("version") then
+
+ application.version()
+
+ application.report("source path",environment.ownbin)
+
+elseif e_argument("directives") then
+
+ directives.show()
+
+elseif e_argument("trackers") then
+
+ trackers.show()
+
+elseif e_argument("experiments") then
+
+ experiments.show()
+
+elseif e_argument("exporthelp") then
+
+ runners.loadbase()
+ application.export(e_argument("exporthelp"),filename)
+
+elseif e_argument("systeminfo") then
+
+ runners.systeminfo()
+
+elseif e_argument("help") or filename=='help' or filename == "" then
+
+ application.help()
+
+elseif find(filename,"^bin:") then
+
+ runners.loadbase()
+ ok = runners.execute_program(filename)
+
+elseif is_mkii_stub then
+
+ -- execute mkii script
+
+ runners.loadbase()
+ ok = runners.execute_script(filename,false,true)
+
+elseif false then
+
+ runners.loadbase()
+ ok = runners.execute_ctx_script(filename)
+ if not ok then
+ ok = runners.execute_script(filename)
+ end
+
+elseif environment.files[1] == 'texmfcnf.lua' then -- so that we don't need to load mtx-base
+
+ resolvers.load("nofiles")
+ resolvers.listers.configurations()
+
+else
+ runners.loadbase()
+ runners.execute_ctx_script("mtx-base",filename)
+
+end
+
+if e_verbose then
+ report()
+ report("elapsed lua time: %0.3f seconds",os.runtime())
+end
+
+if os.type ~= "windows" then
+ texio.write("\n") -- is this still valid?
+end
+
+if ok == false then ok = 1 elseif ok == true or ok == nil then ok = 0 end
+
+-- os.exit(ok,true) -- true forces a cleanup in 5.2+
+
+os.exit(ok) -- true forces a cleanup in 5.2+ but reports a wrong number then
diff --git a/scripts/context/stubs/win64/mtxrunjit.exe b/scripts/context/stubs/win64/mtxrunjit.exe
new file mode 100644
index 000000000..93290a6e0
--- /dev/null
+++ b/scripts/context/stubs/win64/mtxrunjit.exe
Binary files differ
diff --git a/scripts/context/stubs/win64/mtxworks.exe b/scripts/context/stubs/win64/mtxworks.exe
new file mode 100644
index 000000000..93290a6e0
--- /dev/null
+++ b/scripts/context/stubs/win64/mtxworks.exe
Binary files differ
diff --git a/scripts/context/stubs/win64/pstopdf.exe b/scripts/context/stubs/win64/pstopdf.exe
new file mode 100644
index 000000000..93290a6e0
--- /dev/null
+++ b/scripts/context/stubs/win64/pstopdf.exe
Binary files differ
diff --git a/scripts/context/stubs/win64/texexec.exe b/scripts/context/stubs/win64/texexec.exe
new file mode 100644
index 000000000..93290a6e0
--- /dev/null
+++ b/scripts/context/stubs/win64/texexec.exe
Binary files differ
diff --git a/scripts/context/stubs/win64/texmfstart.exe b/scripts/context/stubs/win64/texmfstart.exe
new file mode 100644
index 000000000..93290a6e0
--- /dev/null
+++ b/scripts/context/stubs/win64/texmfstart.exe
Binary files differ
diff --git a/tex/context/base/anch-bar.mkiv b/tex/context/base/anch-bar.mkiv
index c7c6190be..b5df21a07 100644
--- a/tex/context/base/anch-bar.mkiv
+++ b/tex/context/base/anch-bar.mkiv
@@ -19,7 +19,7 @@
%D functionality from \type {core-pos}.
%D
%D \starttyping
-%D \definesidebar[whow][rulecolor=green,distance=]
+%D \definesidebar[whow][rulecolor=green,distance=0pt]
%D
%D \input tufte \par
%D \startsidebar
@@ -81,9 +81,6 @@
\let\setupsidebars\setupsidebar
\unexpanded\def\startsidebar
- {\dosingleempty\anch_sidebars_start}
-
-\unexpanded\def\startsidebar
{\dodoubleempty\anch_sidebars_start}
\def\anch_sidebars_start[#1][#2]%
@@ -92,7 +89,7 @@
\advance\c_anch_sidebars_level\plusone
\global\advance\c_anch_sidebars_n\plusone
\c_anch_sidebars_current\c_anch_sidebars_n\relax % relax needed
- \doifassignmentelse{#1}
+ \doifelseassignment{#1}
{\edef\currentsidebar{\the\c_anch_sidebars_level}%
\checksidebarparent
\setupcurrentsidebar[#1]}
@@ -123,7 +120,7 @@
\d_anch_sidebars_distance\dimexpr\scratchdimen+\numexpr\m_level-\plusone\relax\dimexpr\scratchdistance\relax\relax
\fi
\fi
- \startpositionoverlay{text-1}%
+ \startpositionoverlay{\v!text-1}%
\normalexpanded{\setMPpositiongraphicrange % maybe expand in definition
{b:sidebar:\the\c_anch_sidebars_n}%
{e:sidebar:\the\c_anch_sidebars_n}%
@@ -211,15 +208,12 @@
\unexpanded\def\startmarginrule
{\dosingleempty\anch_marginrules_start}
-\unexpanded\def\startmarginrule
- {\dosingleempty\anch_marginrules_start}
-
\def\anch_marginrules_start[#1]% pretty inefficient checking
{\edef\m_anch_marginrules_kind{#1}%
\ifx\m_anch_marginrules_kind\empty
\anch_sidebars_start[\v!margin][]%
\else
- \doifassignmentelse\m_anch_marginrules_kind
+ \doifelseassignment\m_anch_marginrules_kind
{\anch_sidebars_start[\v!margin][#1]}%
{\anch_marginrules_check{#1}%
\anch_sidebars_start[\v!margin:#1][\c!level=#1]}%
diff --git a/tex/context/base/anch-bck.mkvi b/tex/context/base/anch-bck.mkvi
index cccf14ee4..273cf0159 100644
--- a/tex/context/base/anch-bck.mkvi
+++ b/tex/context/base/anch-bck.mkvi
@@ -20,10 +20,6 @@
\unprotect
-% we can flush 5 in one call (saved 4 lua calls) .. brrr wself .. will change
-
-\def\MPposset#1{\ctxcommand{MPposset("#1")}} % will go
-
% This might be overloaded later on:
% \defineoverlay[\v!text-2][\positionoverlay{\v!text-2}]
@@ -50,7 +46,7 @@
% tricky: we need to catch newly set! otherwise an old run can have positions
\unexpanded\def\anch_backgrounds_text_initialize
- {\ctxcommand{doifelsepositionsused()}\enableparpositions\donothing
+ {\doifelsepositionsused\enableparpositions\donothing
\global\let\anch_backgrounds_text_initialize\relax}
\appendtoks
diff --git a/tex/context/base/anch-pgr.lua b/tex/context/base/anch-pgr.lua
index c7f56a92b..b5c2ae628 100644
--- a/tex/context/base/anch-pgr.lua
+++ b/tex/context/base/anch-pgr.lua
@@ -8,8 +8,6 @@ if not modules then modules = { } end modules ['anch-pgr'] = {
-- todo: we need to clean up lists (of previous pages)
-local commands, context = commands, context
-
local format = string.format
local abs = math.abs
local concat, sort = table.concat, table.sort
@@ -19,6 +17,11 @@ local lpegmatch = lpeg.match
local jobpositions = job.positions
local formatters = string.formatters
+local commands = commands
+local context = context
+
+local implement = interfaces.implement
+
local report_graphics = logs.reporter("graphics")
local f_b_tag = formatters["b:%s"]
@@ -59,7 +62,7 @@ local function add(t,x,y,last,direction)
if x == lx and y == ly then
-- quick skip
elseif n == 1 then
--- if abs(lx-x) <= eps or abs(ly-y) <= eps then
+ -- if abs(lx-x) <= eps or abs(ly-y) <= eps then
if abs(lx-x) > eps or abs(ly-y) > eps then
t[n+1] = { x, y }
end
@@ -67,10 +70,9 @@ local function add(t,x,y,last,direction)
local tm = t[n-1]
local px = tm[1]
local py = tm[2]
-if (direction == "down" and y > ly) or (direction == "up" and y < ly) then
- -- move back from too much hang
-else
- if abs(lx-px) <= eps and abs(lx-x) <= eps then
+ if (direction == "down" and y > ly) or (direction == "up" and y < ly) then
+ -- move back from too much hang
+ elseif abs(lx-px) <= eps and abs(lx-x) <= eps then
if abs(ly-y) > eps then
tn[2] = y
end
@@ -81,7 +83,6 @@ else
elseif not last then
t[n+1] = { x, y }
end
-end
end
end
end
@@ -430,7 +431,6 @@ local function calculatemultipar(tag,obeyhang)
end
-- Obeying intermediate changes of left/rightskip makes no sense as it will
-- look bad, so we only look at the begin situation.
- --
local bn = b.n
if bn then
local bp = collected[f_p_tag(bn)]
@@ -555,7 +555,7 @@ f_template_b = formatters[f_template_b]
f_template_c = formatters[f_template_c]
f_template_d = formatters[f_template_d]
-function backgrounds.fetchmultipar(n,anchor,page,obeyhang)
+local function fetchmultipar(n,anchor,page,obeyhang)
local data = pbg[n]
if not data then
data = calculatemultipar(n,obeyhang)
@@ -599,17 +599,25 @@ function backgrounds.fetchmultipar(n,anchor,page,obeyhang)
return f_template_a(0,"origin",0,0,0)
end
+backgrounds.fetchmultipar = fetchmultipar
+
backgrounds.point = f_point
backgrounds.pair = f_pair
backgrounds.path = f_path
-function commands.fetchmultipar(n,anchor,page)
- context(backgrounds.fetchmultipar(n,anchor,page))
-end
+-- n anchor page
-function commands.fetchmultishape(n,anchor,page)
- context(backgrounds.fetchmultipar(n,anchor,page,true))
-end
+implement {
+ name = "fetchmultipar",
+ actions = { fetchmultipar, context },
+ arguments = { "string", "string", "integer" }
+}
+
+implement {
+ name = "fetchmultishape",
+ actions = { fetchmultipar, context },
+ arguments = { "string", "string", "integer", true }
+}
local f_template_a = [[
path posboxes[], posregions[] ;
@@ -628,67 +636,62 @@ posregions[%s] := (%p,%p)--(%p,%p)--(%p,%p)--(%p,%p)--cycle ;
f_template_a = formatters[f_template_a]
f_template_b = formatters[f_template_b]
-function commands.fetchposboxes(tags,anchor,page) -- no caching (yet) / todo: anchor, page
- local collected = jobpositions.collected
- if type(tags) == "string" then
- tags = utilities.parsers.settings_to_array(tags)
- end
- local list, nofboxes = { }, 0
- for i=1,#tags do
- local tag= tags[i]
- local c = collected[tag]
- if c then
- local r = c.r
- if r then
- r = collected[r]
+implement {
+ name = "fetchposboxes",
+ arguments = { "string", "string", "integer" },
+ actions = function(tags,anchor,page) -- no caching (yet) / todo: anchor, page
+ local collected = jobpositions.collected
+ if type(tags) == "string" then
+ tags = utilities.parsers.settings_to_array(tags)
+ end
+ local list, nofboxes = { }, 0
+ for i=1,#tags do
+ local tag= tags[i]
+ local c = collected[tag]
+ if c then
+ local r = c.r
if r then
- local rx, ry, rw, rh, rd = r.x, r.y, r.w, r.h, r.d
- local cx = c.x - rx
- local cy = c.y - ry
- local cw = cx + c.w
- local ch = cy + c.h
- local cd = cy - c.d
- nofboxes = nofboxes + 1
- list[nofboxes] = f_template_b(
- nofboxes,c.p,
- nofboxes,cx,ch,cw,ch,cw,cd,cx,cd,
- nofboxes,0,rh,rw,rh,rw,rd,0,rd
- )
+ r = collected[r]
+ if r then
+ local rx, ry, rw, rh, rd = r.x, r.y, r.w, r.h, r.d
+ local cx = c.x - rx
+ local cy = c.y - ry
+ local cw = cx + c.w
+ local ch = cy + c.h
+ local cd = cy - c.d
+ nofboxes = nofboxes + 1
+ list[nofboxes] = f_template_b(
+ nofboxes,c.p,
+ nofboxes,cx,ch,cw,ch,cw,cd,cx,cd,
+ nofboxes,0,rh,rw,rh,rw,rd,0,rd
+ )
+ end
end
+ else
+ print("\n missing",tag)
end
- else
- print("\n missing",tag)
end
+ context(f_template_a(nofboxes,list))
end
- context(f_template_a(nofboxes,list))
-end
+}
local doifelse = commands.doifelse
-function commands.doifelsemultipar(n,page,obeyhang)
- local data = pbg[n]
- if not data then
- data = calculatemultipar(n,obeyhang)
- pbg[n] = data
- end
- if page then
- doifelse(data and data[page] and true)
- else
- doifelse(data and next(data) and true)
- end
-end
-
-function commands.doifelserangeonpage(first,last,page)
- local collected = jobpositions.collected
- local f = collected[first]
- if not f or f.p == true then
- doifelse(false)
- return
- end
- local l = collected[last]
- if not l or l.p == true then
- doifelse(false)
- return
+implement {
+ name = "doifelserangeonpage",
+ arguments = { "string", "string", "integer" },
+ actions = function(first,last,page)
+ local collected = jobpositions.collected
+ local f = collected[first]
+ if not f or f.p == true then
+ doifelse(false)
+ return
+ end
+ local l = collected[last]
+ if not l or l.p == true then
+ doifelse(false)
+ return
+ end
+ doifelse(page >= f.p and page <= l.p)
end
- doifelse(page >= f.p and page <= l.p)
-end
+}
diff --git a/tex/context/base/anch-pgr.mkiv b/tex/context/base/anch-pgr.mkiv
index c18a1b669..56ff656d9 100644
--- a/tex/context/base/anch-pgr.mkiv
+++ b/tex/context/base/anch-pgr.mkiv
@@ -43,13 +43,15 @@
\expandafter\gobbleoneargument
\fi}
-\unexpanded\def\doifpositionactionelse#1%
+\unexpanded\def\doifelsepositionaction#1%
{\ifcsname\??positionaction#1\endcsname
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
+\let\doifpositionactionelse\doifelsepositionaction
+
\unexpanded\def\dopositionaction#1%
{\edef\currentpositionaction{#1}%
\ifcsname\??positionaction\currentpositionaction\endcsname
@@ -57,7 +59,7 @@
\fi}
\def\anch_positions_action_indeed
- {\doifpositionelse\currentpositionaction
+ {\doifelseposition\currentpositionaction
\anch_positions_action_indeed_yes
\anch_positions_action_indeed_nop}
@@ -127,23 +129,26 @@
%D the head and tail anchors from this one. We set these
%D anchors before and after each page.
+\newdimen\c_anch_page_width
+\newdimen\c_anch_page_height
+
\unexpanded\def\anch_positions_register_page#1% this one is flushed first ! ... can't we avoid this one
{\ifpositioning\ifcase\realpageno\or
- \ifdim\printpaperheight=\paperheight
- \ifdim\printpaperwidth=\paperwidth
- % not needed,
+ \ifdim\c_anch_page_height=\paperheight
+ \ifdim\c_anch_page_width=\paperwidth
+ % no change
\else
- \anch_positions_register_page_indeed{#1}%
+ \c_anch_page_width \paperwidth
+ \c_anch_page_height\paperheight
+ \anch_make_page_box{#1}% \ifvbox#1\setbox#1\hbox{\box#1}\fi
\fi
\else
- \anch_positions_register_page_indeed{#1}%
+ \c_anch_page_width \paperwidth
+ \c_anch_page_height\paperheight
+ \anch_make_page_box{#1}% \ifvbox#1\setbox#1\hbox{\box#1}\fi
\fi
\fi\fi}
-\def\anch_positions_register_page_indeed#1% maybe like text
- {\ifvbox#1\setbox#1\hbox{\box#1}\fi
- \anch_make_page_box{#1}}
-
\unexpanded\def\anch_positions_place_anchors
{\ifpositioning
\anch_positions_place_anchors_yes
@@ -251,17 +256,17 @@
\def\MPoverlayanchor#1{\MPpos\MPanchorid}
\def\anch_positions_overlay_compose
- {\vbox to \overlayheight
+ {\vbox to \d_overlay_height
{%\writestatus{!!!}{\currentpositionoverlay/\MPanchoridentifier/\MPanchornumber}%
\edef\MPanchorid{\currentpositionoverlay::\MPanchoridentifier:\MPanchornumber}% realpageno
% \edef\MPanchor##1{\MPpos\MPanchorid}%
\let\MPanchor\MPoverlayanchor % no need to fetch it already, seldom used
\the\everyinsertpositionaction
\copyposition{\currentpositionoverlay::\MPanchoridentifier}\MPanchorid
- \setbox\scratchbox\hbox to \overlaywidth{\dopositionaction{\currentpositionoverlay::\MPanchoridentifier}\hss}%
- \ht\scratchbox\overlayheight
+ \setbox\scratchbox\hbox to \d_overlay_width{\dopositionaction{\currentpositionoverlay::\MPanchoridentifier}\hss}%
+ \ht\scratchbox\d_overlay_height
\dp\scratchbox\zeropoint
- \ctxcommand{markregionbox(\number\scratchbox,"\MPanchorid")}% needs an hbox
+ \anch_mark_tagged_box\scratchbox\MPanchorid % needs an hbox
\box\scratchbox
\vfill}}
@@ -284,13 +289,13 @@
\endgroup}
\def\anch_positions_region_overlay_compose
- {\vbox to \overlayheight
+ {\vbox to \d_overlay_height
{\let\MPanchorid\currentpositionregion
\let\MPanchor\MPoverlayanchor % no need to fetch it already, seldom used
\the\everyinsertpositionaction
\copyposition{\currentpositionoverlay::\MPanchoridentifier}\MPanchorid
- \setbox\scratchbox\hbox to \overlaywidth{\dopositionaction{\currentpositionoverlay::\MPanchoridentifier}\hss}%
- \ht\scratchbox\overlayheight
+ \setbox\scratchbox\hbox to \d_overlay_width{\dopositionaction{\currentpositionoverlay::\MPanchoridentifier}\hss}%
+ \ht\scratchbox\d_overlay_height
\dp\scratchbox\zeropoint
\box\scratchbox
\vfill}}
@@ -475,7 +480,7 @@
{\handlepositionaction\anch_positions_meta_graphic_handle_range_indeed\with{#1}{#2}{#3}{#4}\on{#2}}
\def\anch_positions_meta_graphic_insert_range#1#2#3#4% pos pos tag setups
- {\ctxcommand{doifelserangeonpage("#1","#2",\number\realpageno)}%
+ {\clf_doifelserangeonpage{#1}{#2}\realpageno
{\def\currentposition{#1}%
\MPpositiongraphic{#3}{#4}}%
{}}
@@ -497,8 +502,8 @@
% Helpers:
-\def\MPgetposboxes #1#2{\ctxcommand{fetchposboxes("#1","#2",\the\realpageno)}}
-\def\MPgetmultipars #1#2{\ctxcommand{fetchmultipar("#1","#2",\the\realpageno)}}
-\def\MPgetmultishapes#1#2{\ctxcommand{fetchmultishape("#1","#2",\the\realpageno)}}
+\def\MPgetposboxes #1#2{\clf_fetchposboxes {#1}{#2}\realpageno}
+\def\MPgetmultipars #1#2{\clf_fetchmultipar {#1}{#2}\realpageno}
+\def\MPgetmultishapes#1#2{\clf_fetchmultishape{#1}{#2}\realpageno}
\protect \endinput
diff --git a/tex/context/base/anch-pos.lua b/tex/context/base/anch-pos.lua
index 9cc9fb128..77e36e85e 100644
--- a/tex/context/base/anch-pos.lua
+++ b/tex/context/base/anch-pos.lua
@@ -14,11 +14,13 @@ more efficient.</p>
-- plus (extra) is obsolete but we will keep it for a while
+-- context(new_latelua_node(f_enhance(tag)))
+-- =>
+-- context.lateluafunction(function() f_enhance(tag) end)
+
-- maybe replace texsp by our own converter (stay at the lua end)
-- eventually mp will have large numbers so we can use sp there too
-local commands, context = commands, context
-
local tostring, next, rawget, setmetatable = tostring, next, rawget, setmetatable
local sort = table.sort
local format, gmatch, match = string.format, string.gmatch, string.match
@@ -26,19 +28,41 @@ local rawget = rawget
local lpegmatch = lpeg.match
local insert, remove = table.insert, table.remove
local allocate, mark = utilities.storage.allocate, utilities.storage.mark
-local texsp = tex.sp
------ texsp = string.todimen -- because we cache this is much faster but no rounding
+local scanners = tokens.scanners
+local scanstring = scanners.string
+local scaninteger = scanners.integer
+local scandimen = scanners.dimen
+
+local compilescanner = tokens.compile
+local scanners = interfaces.scanners
+
+local commands = commands
+local context = context
+
+local tex = tex
local texgetcount = tex.getcount
-local texgetbox = tex.getbox
local texsetcount = tex.setcount
local texget = tex.get
+local texsp = tex.sp
+----- texsp = string.todimen -- because we cache this is much faster but no rounding
local pdf = pdf -- h and v are variables
local setmetatableindex = table.setmetatableindex
-local new_latelua = nodes.pool.latelua
-local find_tail = node.slide
+
+local nuts = nodes.nuts
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getlist = nuts.getlist
+local getbox = nuts.getbox
+local getskip = nuts.getskip
+
+local find_tail = nuts.tail
+
+local new_latelua = nuts.pool.latelua
+local new_latelua_node = nodes.pool.latelua
local variables = interfaces.variables
local v_text = variables.text
@@ -104,11 +128,28 @@ local nofusedregions = 0
local nofmissingregions = 0
local nofregular = 0
+jobpositions.used = false
+
-- todo: register subsets and count them indepently
local function initializer()
tobesaved = jobpositions.tobesaved
collected = jobpositions.collected
+ -- add sparse regions
+ local pages = structures.pages.collected
+ if pages then
+ local last = nil
+ for p=1,#pages do
+ local region = "page:" .. p
+ local data = collected[region]
+ if data then
+ last = data
+ last.p = nil -- no need for a page
+ elseif last then
+ collected[region] = last
+ end
+ end
+ end
-- enhance regions with paragraphs
for tag, data in next, collected do
local region = data.r
@@ -148,6 +189,7 @@ local function initializer()
end
-- so, we can be sparse and don't need 'or 0' code
end
+ jobpositions.used = next(collected)
end
job.register('job.positions.collected', tobesaved, initializer)
@@ -164,9 +206,12 @@ local nofpages = nil
-- beware ... we're not sparse here as lua will reserve slots for the nilled
+local getpos = function() getpos = backends.codeinjections.getpos return getpos () end
+local gethpos = function() gethpos = backends.codeinjections.gethpos return gethpos() end
+local getvpos = function() getvpos = backends.codeinjections.getvpos return getvpos() end
+
local function setdim(name,w,h,d,extra) -- will be used when we move to sp allover
- local x = pdf.h
- local y = pdf.v
+ local x, y = getpos()
if x == 0 then x = nil end
if y == 0 then y = nil end
if w == 0 then w = nil end
@@ -216,10 +261,13 @@ local function enhance(data)
data.r = region
end
if data.x == true then
- data.x = pdf.h
- end
- if data.y == true then
- data.y = pdf.v
+ if data.y == true then
+ data.x, data.y = getpos()
+ else
+ data.x = gethpos()
+ end
+ elseif data.y == true then
+ data.y = getvpos()
end
if data.p == true then
data.p = texgetcount("realpageno")
@@ -239,9 +287,15 @@ local function enhance(data)
return data
end
-local function set(name,index,val)
+-- analyze some files (with lots if margindata) and then when one key optionally
+-- use that one instead of a table (so, a 3rd / 4th argument: key, e.g. "x")
+
+local function set(name,index,val) -- ,key
local data = enhance(val or index)
if val then
+-- if data[key] and not next(next(data)) then
+-- data = data[key]
+-- end
container = tobesaved[name]
if not container then
tobesaved[name] = {
@@ -269,7 +323,26 @@ jobpositions.setall = setall
jobpositions.set = set
jobpositions.get = get
-commands.setpos = setall
+-- scanners.setpos = setall
+
+-- trackers.enable("tokens.compi*")
+
+-- something weird: the compiler fails us here
+
+scanners.dosaveposition = compilescanner {
+ actions = setall, -- name p x y
+ arguments = { "string", "integer", "dimen", "dimen" }
+}
+
+scanners.dosavepositionwhd = compilescanner { -- somehow fails
+ actions = setall, -- name p x y w h d
+ arguments = { "string", "integer", "dimen", "dimen", "dimen", "dimen", "dimen" }
+}
+
+scanners.dosavepositionplus = compilescanner {
+ actions = setall, -- name p x y w h d extra
+ arguments = { "string", "integer", "dimen", "dimen", "dimen", "dimen", "dimen", "string" }
+}
-- will become private table (could also become attribute driven but too nasty
-- as attributes can bleed e.g. in margin stuff)
@@ -279,7 +352,7 @@ commands.setpos = setall
function jobpositions.b_col(tag)
tobesaved[tag] = {
r = true,
- x = pdf.h,
+ x = gethpos(),
w = 0,
}
insert(columns,tag)
@@ -291,25 +364,33 @@ function jobpositions.e_col(tag)
if not t then
-- something's wrong
else
- t.w = pdf.h - t.x
+ t.w = gethpos() - t.x
t.r = region
end
remove(columns)
column = columns[#columns]
end
-function commands.bcolumn(tag,register) -- name will change
+scanners.bposcolumn = function() -- tag
+ local tag = scanstring()
insert(columns,tag)
column = tag
- if register then
- context(new_latelua(f_b_column(tag)))
- end
end
-function commands.ecolumn(register) -- name will change
- if register then
- context(new_latelua(f_e_column()))
- end
+scanners.bposcolumnregistered = function() -- tag
+ local tag = scanstring()
+ insert(columns,tag)
+ column = tag
+ context(new_latelua_node(f_b_column(tag)))
+end
+
+scanners.eposcolumn = function()
+ remove(columns)
+ column = columns[#columns]
+end
+
+scanners.eposcolumnregistered = function()
+ context(new_latelua_node(f_e_column()))
remove(columns)
column = columns[#columns]
end
@@ -318,8 +399,7 @@ end
function jobpositions.b_region(tag)
local last = tobesaved[tag]
- last.x = pdf.h
- last.y = pdf.v
+ last.x, last.y = getpos()
last.p = texgetcount("realpageno")
insert(regions,tag)
region = tag
@@ -327,75 +407,85 @@ end
function jobpositions.e_region(correct)
local last = tobesaved[region]
+ local v = getvpos()
if correct then
- last.h = last.y - pdf.v
+ last.h = last.y - v
end
- last.y = pdf.v
+ last.y = v
remove(regions)
region = regions[#regions]
end
-function jobpositions.markregionbox(n,tag,correct)
+local function setregionbox(n,tag)
if not tag or tag == "" then
nofregions = nofregions + 1
tag = f_region(nofregions)
end
- local box = texgetbox(n)
- local w = box.width
- local h = box.height
- local d = box.depth
+ local box = getbox(n)
+ local w = getfield(box,"width")
+ local h = getfield(box,"height")
+ local d = getfield(box,"depth")
tobesaved[tag] = {
p = true,
x = true,
- y = pdf.v, -- true,
+ y = getvpos(), -- true,
w = w ~= 0 and w or nil,
h = h ~= 0 and h or nil,
d = d ~= 0 and d or nil,
}
+ return tag, box
+end
+
+local function markregionbox(n,tag,correct)
+ local tag, box = setregionbox(n,tag)
local push = new_latelua(f_b_region(tag))
local pop = new_latelua(f_e_region(tostring(correct))) -- todo: check if tostring is needed with formatter
-- maybe we should construct a hbox first (needs experimenting) so that we can avoid some at the tex end
- local head = box.list
+ local head = getlist(box)
if head then
local tail = find_tail(head)
- head.prev = push
- push.next = head
- pop .prev = tail
- tail.next = pop
+ setfield(head,"prev",push)
+ setfield(push,"next",head)
+ setfield(pop,"prev",tail)
+ setfield(tail,"next",pop)
else -- we can have a simple push/pop
- push.next = pop
- pop.prev = push
+ setfield(push,"next",pop)
+ setfield(pop,"prev",push)
end
- box.list = push
+ setfield(box,"list",push)
end
+jobpositions.markregionbox = markregionbox
+jobpositions.setregionbox = setregionbox
+
function jobpositions.enhance(name)
enhance(tobesaved[name])
end
-function commands.pos(name,t)
- tobesaved[name] = t
- context(new_latelua(f_enhance(name)))
-end
+-- scanners.pos = function(name,t) -- name t
+-- local name = scanstring()
+-- tobesaved[name] = scanstring()
+-- context(new_latelua_node(f_enhance(name)))
+-- end
local nofparagraphs = 0
-function commands.parpos() -- todo: relate to localpar (so this is an intermediate variant)
+scanners.parpos = function() -- todo: relate to localpar (so this is an intermediate variant)
nofparagraphs = nofparagraphs + 1
texsetcount("global","c_anch_positions_paragraph",nofparagraphs)
- local strutbox = texgetbox("strutbox")
+ local strutbox = getbox("strutbox")
local t = {
p = true,
c = true,
r = true,
x = true,
y = true,
- h = strutbox.height,
- d = strutbox.depth,
+ h = getfield(strutbox,"height"),
+ d = getfield(strutbox,"depth"),
hs = texget("hsize"),
}
- local leftskip = texget("leftskip").width
- local rightskip = texget("rightskip").width
+ local leftskip = getfield(getskip("leftskip"),"width")
+ local rightskip = getfield(getskip("rightskip"),"width")
local hangindent = texget("hangindent")
local hangafter = texget("hangafter")
local parindent = texget("parindent")
@@ -420,10 +510,11 @@ function commands.parpos() -- todo: relate to localpar (so this is an intermedia
end
local tag = f_p_tag(nofparagraphs)
tobesaved[tag] = t
- context(new_latelua(f_enhance(tag)))
+ context(new_latelua_node(f_enhance(tag)))
end
-function commands.posxy(name) -- can node.write be used here?
+scanners.dosetposition = function() -- name
+ local name = scanstring()
tobesaved[name] = {
p = true,
c = column,
@@ -432,53 +523,73 @@ function commands.posxy(name) -- can node.write be used here?
y = true,
n = nofparagraphs > 0 and nofparagraphs or nil,
}
- context(new_latelua(f_enhance(name)))
+ context(new_latelua_node(f_enhance(name)))
end
-function commands.poswhd(name,w,h,d)
+scanners.dosetpositionwhd = function() -- name w h d extra
+ local name = scanstring()
tobesaved[name] = {
p = true,
c = column,
r = true,
x = true,
y = true,
- w = w,
- h = h,
- d = d,
+ w = scandimen(),
+ h = scandimen(),
+ d = scandimen(),
n = nofparagraphs > 0 and nofparagraphs or nil,
}
- context(new_latelua(f_enhance(name)))
+ context(new_latelua_node(f_enhance(name)))
end
-function commands.posplus(name,w,h,d,extra)
+scanners.dosetpositionbox = function() -- name box
+ local name = scanstring()
+ local box = getbox(scaninteger())
tobesaved[name] = {
p = true,
c = column,
r = true,
x = true,
y = true,
- w = w,
- h = h,
- d = d,
+ w = getfield(box,"width"),
+ h = getfield(box,"height"),
+ d = getfield(box,"depth"),
n = nofparagraphs > 0 and nofparagraphs or nil,
- e = extra,
}
- context(new_latelua(f_enhance(name)))
+ context(new_latelua_node(f_enhance(name)))
+end
+
+scanners.dosetpositionplus = function() -- name w h d extra
+ local name = scanstring()
+ tobesaved[name] = {
+ p = true,
+ c = column,
+ r = true,
+ x = true,
+ y = true,
+ w = scandimen(),
+ h = scandimen(),
+ d = scandimen(),
+ n = nofparagraphs > 0 and nofparagraphs or nil,
+ e = scanstring(),
+ }
+ context(new_latelua_node(f_enhance(name)))
end
-function commands.posstrut(name,w,h,d)
- local strutbox = texgetbox("strutbox")
+scanners.dosetpositionstrut = function() -- name
+ local name = scanstring()
+ local strutbox = getbox("strutbox")
tobesaved[name] = {
p = true,
c = column,
r = true,
x = true,
y = true,
- h = strutbox.height,
- d = strutbox.depth,
+ h = getfield(strutbox,"height"),
+ d = getfield(strutbox,"depth"),
n = nofparagraphs > 0 and nofparagraphs or nil,
}
- context(new_latelua(f_enhance(name)))
+ context(new_latelua_node(f_enhance(name)))
end
function jobpositions.getreserved(tag,n)
@@ -512,7 +623,17 @@ end
function jobpositions.region(id)
local jpi = collected[id]
- return jpi and jpi.r or false
+ if jpi then
+ local r = jpi.r
+ if r then
+ return r
+ end
+ local p = jpi.p
+ if p then
+ return "page:" .. p
+ end
+ end
+ return false
end
function jobpositions.column(id)
@@ -718,11 +839,23 @@ jobpositions.onsamepage = onsamepage
-- interface
-commands.replacepospxywhd = jobpositions.replace
-commands.copyposition = jobpositions.copy
+scanners.replacepospxywhd = function() -- name page x y w h d
+ collected[scanstring()] = {
+ p = scaninteger(),
+ x = scandimen(),
+ y = scandimen(),
+ w = scandimen(),
+ h = scandimen(),
+ d = scandimen(),
+ }
+end
-function commands.MPp(id)
- local jpi = collected[id]
+scanners.copyposition = function() -- target source
+ collected[scanstring()] = collected[scanstring()]
+end
+
+scanners.MPp = function() -- name
+ local jpi = collected[scanstring()]
if jpi then
local p = jpi.p
if p and p ~= true then
@@ -733,70 +866,70 @@ function commands.MPp(id)
context('0')
end
-function commands.MPx(id)
- local jpi = collected[id]
+scanners.MPx = function() -- name
+ local jpi = collected[scanstring()]
if jpi then
local x = jpi.x
if x and x ~= true and x ~= 0 then
- context("%.5fpt",x*pt)
+ context("%.5Fpt",x*pt)
return
end
end
context('0pt')
end
-function commands.MPy(id)
- local jpi = collected[id]
+scanners.MPy = function() -- name
+ local jpi = collected[scanstring()]
if jpi then
local y = jpi.y
if y and y ~= true and y ~= 0 then
- context("%.5fpt",y*pt)
+ context("%.5Fpt",y*pt)
return
end
end
context('0pt')
end
-function commands.MPw(id)
- local jpi = collected[id]
+scanners.MPw = function() -- name
+ local jpi = collected[scanstring()]
if jpi then
local w = jpi.w
if w and w ~= 0 then
- context("%.5fpt",w*pt)
+ context("%.5Fpt",w*pt)
return
end
end
context('0pt')
end
-function commands.MPh(id)
- local jpi = collected[id]
+scanners.MPh = function() -- name
+ local jpi = collected[scanstring()]
if jpi then
local h = jpi.h
if h and h ~= 0 then
- context("%.5fpt",h*pt)
+ context("%.5Fpt",h*pt)
return
end
end
context('0pt')
end
-function commands.MPd(id)
- local jpi = collected[id]
+scanners.MPd = function() -- name
+ local jpi = collected[scanstring()]
if jpi then
local d = jpi.d
if d and d ~= 0 then
- context("%.5fpt",d*pt)
+ context("%.5Fpt",d*pt)
return
end
end
context('0pt')
end
-function commands.MPxy(id)
- local jpi = collected[id]
+scanners.MPxy = function() -- name
+ local jpi = collected[scanstring()]
if jpi then
- context('(%.5fpt,%.5fpt)',
+ context('(%.5Fpt,%.5Fpt)',
jpi.x*pt,
jpi.y*pt
)
@@ -805,10 +938,10 @@ function commands.MPxy(id)
end
end
-function commands.MPll(id)
- local jpi = collected[id]
+scanners.MPll = function() -- name
+ local jpi = collected[scanstring()]
if jpi then
- context('(%.5fpt,%.5fpt)',
+ context('(%.5Fpt,%.5Fpt)',
jpi.x *pt,
(jpi.y-jpi.d)*pt
)
@@ -817,10 +950,10 @@ function commands.MPll(id)
end
end
-function commands.MPlr(id)
- local jpi = collected[id]
+scanners.MPlr = function() -- name
+ local jpi = collected[scanstring()]
if jpi then
- context('(%.5fpt,%.5fpt)',
+ context('(%.5Fpt,%.5Fpt)',
(jpi.x + jpi.w)*pt,
(jpi.y - jpi.d)*pt
)
@@ -829,10 +962,10 @@ function commands.MPlr(id)
end
end
-function commands.MPur(id)
- local jpi = collected[id]
+scanners.MPur = function() -- name
+ local jpi = collected[scanstring()]
if jpi then
- context('(%.5fpt,%.5fpt)',
+ context('(%.5Fpt,%.5Fpt)',
(jpi.x + jpi.w)*pt,
(jpi.y + jpi.h)*pt
)
@@ -841,10 +974,10 @@ function commands.MPur(id)
end
end
-function commands.MPul(id)
- local jpi = collected[id]
+scanners.MPul = function() -- name
+ local jpi = collected[scanstring()]
if jpi then
- context('(%.5fpt,%.5fpt)',
+ context('(%.5Fpt,%.5Fpt)',
jpi.x *pt,
(jpi.y + jpi.h)*pt
)
@@ -858,7 +991,7 @@ local function MPpos(id)
if jpi then
local p = jpi.p
if p then
- context("%s,%.5fpt,%.5fpt,%.5fpt,%.5fpt,%.5fpt",
+ context("%s,%.5Fpt,%.5Fpt,%.5Fpt,%.5Fpt,%.5Fpt",
p,
jpi.x*pt,
jpi.y*pt,
@@ -872,10 +1005,12 @@ local function MPpos(id)
context('0,0,0,0,0,0') -- for mp only
end
-commands.MPpos = MPpos
+scanners.MPpos = function() -- name
+ MPpos(scanstring())
+end
-function commands.MPn(id)
- local jpi = collected[id]
+scanners.MPn = function() -- name
+ local jpi = collected[scanstring()]
if jpi then
local n = jpi.n
if n then
@@ -886,25 +1021,28 @@ function commands.MPn(id)
context(0)
end
-function commands.MPc(id)
- local jpi = collected[id]
+scanners.MPc = function() -- name
+ local jpi = collected[scanstring()]
if jpi then
local c = jpi.c
- if c and p ~= true then
+ if c and c ~= true then
context(c)
return
end
end
- context(c) -- number
+ context('0') -- okay ?
end
-function commands.MPr(id)
- local jpi = collected[id]
+scanners.MPr = function() -- name
+ local jpi = collected[scanstring()]
if jpi then
local r = jpi.r
- if r and p ~= true then
+ if r and r ~= true then
context(r)
- return
+ end
+ local p = jpi.p
+ if p then
+ context("page:" .. p)
end
end
end
@@ -916,7 +1054,7 @@ local function MPpardata(n)
t = collected[tag]
end
if t then
- context("%.5fpt,%.5fpt,%.5fpt,%.5fpt,%s,%.5fpt",
+ context("%.5Fpt,%.5Fpt,%.5Fpt,%.5Fpt,%s,%.5Fpt",
t.hs*pt,
t.ls*pt,
t.rs*pt,
@@ -929,29 +1067,32 @@ local function MPpardata(n)
end
end
-commands.MPpardata = MPpardata
+scanners.MPpardata = function() -- name
+ MPpardata(scanstring())
+end
-function commands.MPposset(id) -- special helper, used in backgrounds
- local b = f_b_tag(id)
- local e = f_e_tag(id)
- local w = f_w_tag(id)
+scanners.MPposset = function() -- name (special helper, used in backgrounds)
+ local name = scanstring()
+ local b = f_b_tag(name)
+ local e = f_e_tag(name)
+ local w = f_w_tag(name)
local p = f_p_tag(jobpositions.n(b))
MPpos(b) context(",") MPpos(e) context(",") MPpos(w) context(",") MPpos(p) context(",") MPpardata(p)
end
-function commands.MPls(id)
- local t = collected[id]
- if t then
- context("%.5fpt",t.ls*pt)
+scanners.MPls = function() -- name
+ local jpi = collected[scanstring()]
+ if jpi then
+ context("%.5Fpt",jpi.ls*pt)
else
context("0pt")
end
end
-function commands.MPrs(id)
- local t = collected[id]
- if t then
- context("%.5fpt",t.rs*pt)
+scanners.MPrs = function() -- name
+ local jpi = collected[scanstring()]
+ if jpi then
+ context("%.5Fpt",jpi.rs*pt)
else
context("0pt")
end
@@ -959,8 +1100,10 @@ end
local splitter = lpeg.tsplitat(",")
-function commands.MPplus(id,n,default)
- local jpi = collected[id]
+scanners.MPplus = function() -- name n default
+ local jpi = collected[scanstring()]
+ local n = scaninteger()
+ local default = scanstring()
if jpi then
local e = jpi.e
if e then
@@ -976,59 +1119,79 @@ function commands.MPplus(id,n,default)
context(default)
end
-function commands.MPrest(id,default)
- local jpi = collected[id]
+scanners.MPrest = function() -- name default
+ local jpi = collected[scanstring()]
+ local default = scanstring()
context(jpi and jpi.e or default)
end
-function commands.MPxywhd(id)
- local t = collected[id]
- if t then
- context("%.5fpt,%.5fpt,%.5fpt,%.5fpt,%.5fpt",
- t.x*pt,
- t.y*pt,
- t.w*pt,
- t.h*pt,
- t.d*pt
+scanners.MPxywhd = function() -- name
+ local jpi = collected[scanstring()]
+ if jpi then
+ context("%.5Fpt,%.5Fpt,%.5Fpt,%.5Fpt,%.5Fpt",
+ jpi.x*pt,
+ jpi.y*pt,
+ jpi.w*pt,
+ jpi.h*pt,
+ jpi.d*pt
)
else
context("0,0,0,0,0") -- for mp only
end
end
-local doif, doifelse = commands.doif, commands.doifelse
+local doif = commands.doif
+local doifelse = commands.doifelse
-function commands.doifpositionelse(name)
- doifelse(collected[name])
+scanners.doifelseposition = function() -- name
+ doifelse(collected[scanstring()])
end
-function commands.doifposition(name)
- doif(collected[name])
+scanners.doifposition = function() -- name
+ doif(collected[scanstring()])
end
-function commands.doifpositiononpage(name,page) -- probably always realpageno
- local c = collected[name]
- doifelse(c and c.p == page)
+scanners.doifelsepositiononpage = function() -- name page -- probably always realpageno
+ local c = collected[scanstring()]
+ local p = scaninteger()
+ doifelse(c and c.p == p)
end
-function commands.doifoverlappingelse(one,two,overlappingmargin)
- doifelse(overlapping(one,two,overlappingmargin))
+scanners.doifelseoverlapping = function() -- one two
+ doifelse(overlapping(scanstring(),scanstring()))
end
-function commands.doifpositionsonsamepageelse(list,page)
- doifelse(onsamepage(list))
+scanners.doifelsepositionsonsamepage = function() -- list
+ doifelse(onsamepage(scanstring()))
end
-function commands.doifpositionsonthispageelse(list)
- doifelse(onsamepage(list,tostring(texgetcount("realpageno"))))
+scanners.doifelsepositionsonthispage = function() -- list
+ doifelse(onsamepage(scanstring(),tostring(texgetcount("realpageno"))))
end
-function commands.doifelsepositionsused()
+scanners.doifelsepositionsused = function()
doifelse(next(collected))
end
-commands.markcolumnbox = jobpositions.markcolumnbox
-commands.markregionbox = jobpositions.markregionbox
+scanners.markregionbox = function() -- box
+ markregionbox(scaninteger())
+end
+
+scanners.setregionbox = function() -- box
+ setregionbox(scaninteger())
+end
+
+scanners.markregionboxtagged = function() -- box tag
+ markregionbox(scaninteger(),scanstring())
+end
+
+scanners.setregionboxtagged = function() -- box tag
+ setregionbox(scaninteger(),scanstring())
+end
+
+scanners.markregionboxcorrected = function() -- box tag
+ markregionbox(scaninteger(),scanstring(),true)
+end
-- statistics (at least for the moment, when testing)
diff --git a/tex/context/base/anch-pos.mkiv b/tex/context/base/anch-pos.mkiv
index 7ecaa296e..5d9c2fd34 100644
--- a/tex/context/base/anch-pos.mkiv
+++ b/tex/context/base/anch-pos.mkiv
@@ -32,15 +32,15 @@
%D \dosetpositionplus {identifier} {width} {height} {depth} {list}
%D \stoptyping
-\def\dosaveposition #1#2#3#4{\ctxcommand{setpos("#1",\number#2,\number\dimexpr#3,\number\dimexpr#4)}}
-\def\dosavepositionwhd #1#2#3#4#5#6#7{\ctxcommand{setpos("#1",\number#2,\number\dimexpr#3,\number\dimexpr#4,\number\dimexpr#5,\number\dimexpr#6,\number\dimexpr#7)}}
-\def\dosavepositionplus#1#2#3#4#5#6#7#8{\ctxcommand{setpos("#1",\number#2,\number\dimexpr#3,\number\dimexpr#4,\number\dimexpr#5,\number\dimexpr#6,\number\dimexpr#7,"#8")}}
+\def\dosaveposition #1#2#3#4{\clf_dosaveposition {#1}#2 #3 #4\relax}
+\def\dosavepositionwhd #1#2#3#4#5#6#7{\clf_dosavepositionwhd {#1}#2 #3 #4 #5 #6 #7\relax}
+\def\dosavepositionplus#1#2#3#4#5#6#7#8{\clf_dosavepositionplus{#1}#2 #3 #4 #5 #6 #7{#8}}
-\def\dosetposition #1{\ctxcommand{posxy("#1")}}
-\def\dosetpositionwhd #1#2#3#4{\ctxcommand{poswhd("#1",\number\dimexpr#2,\number\dimexpr#3,\number\dimexpr#4)}}
-\def\dosetpositionplus#1#2#3#4#5{\ctxcommand{posplus("#1",\number\dimexpr#2,\number\dimexpr#3,\number\dimexpr#4,"#5")}}
-\def\dosetpositionbox #1#2{\ctxcommand{poswhd("#1",\number\wd#2,\number\ht#2,\number\dp#2)}}
-\def\dosetpositionstrut #1{\ctxcommand{posstrut("#1")}}
+\def\dosetposition #1{\clf_dosetposition {#1}} % {} expands
+\def\dosetpositionwhd #1#2#3#4{\clf_dosetpositionwhd {#1}#2 #3 #4\relax}
+\def\dosetpositionplus#1#2#3#4#5{\clf_dosetpositionplus {#1}#2 #3 #4{#5}}
+\def\dosetpositionbox #1#2{\clf_dosetpositionbox {#1}#2\relax}
+\def\dosetpositionstrut #1{\clf_dosetpositionstrut{#1}}
\newbox\b_anch_position
\newif \ifpositioning % sort of public
@@ -48,7 +48,7 @@
%D Sometimes we want to trick the position handler a bit:
\def\replacepospxywhd#1#2#3#4#5#6#7% when used we can better make a helper
- {\ctxcommand{replacepospxywhd('#1',\number#2,\number\dimexpr#3,\number\dimexpr#4,\number\dimexpr#5,\number\dimexpr#6,\number\dimexpr#7)}}
+ {\clf_replacepospxywhd{#1}#2 #3 #4 #5 #6 #7\relax}
%D \macros
%D {MPp, MPx, MPy, MPw, MPh, MPd, MPxy, MPll, MPlr, MPur, MPul, MPpos, MPanchor}
@@ -56,28 +56,35 @@
%D Access to the positional information is provided by macros with short names
%S that are clearly meant for \METAPOST\ but nowadays also used for other purposes.
-\def\MPp #1{\ctxcommand{MPp("#1")}} \let\MPpage \MPp
-\def\MPr #1{\ctxcommand{MPr("#1")}} \let\MPregion \MPr
-\def\MPc #1{\ctxcommand{MPc("#1")}} \let\MPcolumn \MPc
-\def\MPn #1{\ctxcommand{MPn("#1")}} \let\MPparagraph\MPn
-\def\MPx #1{\ctxcommand{MPx("#1")}}
-\def\MPy #1{\ctxcommand{MPy("#1")}}
-\def\MPw #1{\ctxcommand{MPw("#1")}} % first we need to replace \MPwidth etc
-\def\MPh #1{\ctxcommand{MPh("#1")}}
-\def\MPd #1{\ctxcommand{MPd("#1")}}
-\def\MPxy #1{\ctxcommand{MPxy("#1")}}
-\def\MPll #1{\ctxcommand{MPll("#1")}}
-\def\MPlr #1{\ctxcommand{MPlr("#1")}}
-\def\MPur #1{\ctxcommand{MPur("#1")}}
-\def\MPul #1{\ctxcommand{MPul("#1")}}
-\def\MPpos #1{\ctxcommand{MPpos("#1")}} \let\MPanchor\MPpos % overloaded locally when needed
-\def\MPe #1{\ctxcommand{MPe("#1")}}
-
-\def\MPls #1{\ctxcommand{MPls("#1")}} \let\MPleftskip\MPls % compatible feature
-\def\MPrs #1{\ctxcommand{MPrs("#1")}} \let\MPrightkip\MPrs % compatible feature
-
-\def\MPpardata#1{\ctxcommand{MPpardata("#1")}}
-\def\MPxywhd #1{\ctxcommand{MPxywhd("#1")}}
+\def\MPp #1{\clf_MPp {#1}}
+\def\MPr #1{\clf_MPr {#1}}
+\def\MPc #1{\clf_MPc {#1}}
+\def\MPn #1{\clf_MPn {#1}}
+\def\MPx #1{\clf_MPx {#1}}
+\def\MPy #1{\clf_MPy {#1}}
+\def\MPw #1{\clf_MPw {#1}}
+\def\MPh #1{\clf_MPh {#1}}
+\def\MPd #1{\clf_MPd {#1}}
+\def\MPxy #1{\clf_MPxy {#1}}
+\def\MPll #1{\clf_MPll {#1}}
+\def\MPlr #1{\clf_MPlr {#1}}
+\def\MPur #1{\clf_MPur {#1}}
+\def\MPul #1{\clf_MPul {#1}}
+\def\MPpos #1{\clf_MPpos {#1}}
+\def\MPls #1{\clf_MPls {#1}}
+\def\MPrs #1{\clf_MPrs {#1}}
+\def\MPpardata#1{\clf_MPpardata{#1}}
+\def\MPxywhd #1{\clf_MPxywhd {#1}}
+\def\MPposset #1{\clf_MPposset {#1}}
+
+\let\MPpage \MPp
+\let\MPregion \MPr
+\let\MPcolumn \MPc
+\let\MPparagraph\MPn
+
+\let\MPanchor \MPpos % overloaded locally when needed
+\let\MPleftskip \MPls % compatible feature
+\let\MPrightkip \MPrs % compatible feature
%D \macros
%D {MPplus, MPrest, MPv, MPvv}
@@ -98,8 +105,8 @@
%D
%D The extra parameters are not treated.
-\def\MPplus#1#2#3{\ctxcommand{MPplus("#1",#2,"#3")}} \let\MPv \MPplus
-\def\MPrest #1#2{\ctxcommand{MPrest("#1","#2")}} \let\MPvv\MPrest
+\def\MPplus#1#2#3{\clf_MPplus{#1}#2{#3}} \let\MPv \MPplus
+\def\MPrest #1#2{\clf_MPrest{#1}{#2}} \let\MPvv\MPrest
%D There are two low level positioning macros. Both store the position as well
%D as execute an action associated with that position.
@@ -248,9 +255,11 @@
\newcount\c_anch_column % will be delegated to lua
\newcount\c_anch_text % will be delegated to lua
+% beware we need to pass \somethingexpanded or { }
+
\unexpanded\def\anch_mark_column_box#1%
{\global\advance\c_anch_column\plusone
- \ctxcommand{markregionbox(\number#1,"columnarea:\the\c_anch_column")}} % extra height
+ \clf_markregionboxtagged#1{columnarea:\the\c_anch_column}} % extra height
\unexpanded\def\anch_mark_region_box
{\iftrialtypesetting
@@ -262,24 +271,27 @@
\fi\fi}
\unexpanded\def\anch_mark_region_box_indeed#1%
- {\ctxcommand{markregionbox(\number#1)}}
+ {\clf_markregionbox#1\relax}
\unexpanded\def\anch_mark_flow_box#1% will be extended / renamed
{\hbox\bgroup
\global\advance\c_anch_text\plusone
- \ctxcommand{markregionbox(\number#1,"textarea:\the\c_anch_text")}%
+ \clf_markregionboxtagged#1{textarea:\the\c_anch_text}%
\box#1%
\egroup}
+\unexpanded\def\anch_mark_tagged_box#1#2%
+ {\clf_markregionboxtagged#1{#2}}
+
\unexpanded\def\anch_mark_flow_only#1% will be extended / renamed
{\global\advance\c_anch_text\plusone
- \ctxcommand{markregionbox(\number#1,"textarea:\the\c_anch_text",true)}}
+ \clf_markregionboxcorrected#1{textarea:\the\c_anch_text}}
\unexpanded\def\anch_make_page_box#1% maybe like text
- {\ctxcommand{markregionbox(\number#1,"\pageanchor")}} % needs an hbox
+ {\clf_setregionboxtagged#1{page:\the\realpageno}}
\unexpanded\def\anch_mark_text_box#1%
- {\ctxcommand{markregionbox(\number#1,"\textanchor")}} % needs an hbox
+ {\clf_markregionboxtagged#1{text:\the\realpageno}} % needs an hbox
%D We can copy a position with:
%D
@@ -289,7 +301,8 @@
%D
%D Again, this is a global operation.
-\def\copyposition#1#2{\ctxcommand{copyposition('#1','#2')}}
+\unexpanded\def\copyposition#1#2%
+ {\clf_copyposition{#1}{#2}}
%D The fact that handling positions is a two pass operation, is one of the
%D reasons why we need to be able to test for existence, using:
@@ -298,8 +311,12 @@
%D \doifpositionelse {identifier} {found action} {not found action}
%D \stoptyping
-\def\doifpositionelse#1{\ctxcommand{doifpositionelse('#1')}}
-\def\doifposition #1{\ctxcommand{doifposition('#1')}}
+\unexpanded\def\doifposition #1{\clf_doifposition {#1}}
+\unexpanded\def\doifelseposition #1{\clf_doifelseposition {#1}}
+\unexpanded\def\doifelsepositiononpage#1#2{\clf_doifelsepositiononpage{#1}#2\relax}
+
+\let\doifpositionelse \doifelseposition
+\let\doifpositiononpageelse\doifelsepositiononpage
%D \macros
%D {xypos}
@@ -315,8 +332,8 @@
%D \NC \type {\epos} \NC e: \NC end point in a line \NC \NR
%D \stoptabulate
%D
-%D Each macro takes an identifier as argument, and the \type
-%D {\hpos} and \type {\vpos} also expect box content.
+%D Each macro takes an identifier as argument, and the \type {\hpos} and
+%D \type {\vpos} also expect box content.
\let\xypos\setpositiononly
@@ -352,7 +369,7 @@
\fi}
\def\anch_positions_register_par_options_normal
- {\dontleavehmode\ctxcommand{parpos()}}
+ {\dontleavehmode\clf_parpos}
\def\anch_positions_register_par_options_traced
{\anch_positions_register_par_options_normal
@@ -407,7 +424,9 @@
%D {action when not overlapping}
%D \stoptyping
-\def\doifoverlappingelse#1#2{\ctxcommand{doifoverlappingelse("#1","#2")}}
+\unexpanded\def\doifelseoverlapping#1#2{\clf_doifelseoverlapping{#1}{#2}}
+
+\let\doifoverlappingelse\doifelseoverlapping
%D \macros
%D {doifpositionsonsamepageelse,
@@ -425,7 +444,16 @@
%D {action when not on this page}
%D \stoptyping
-\def\doifpositionsonsamepageelse#1{\ctxcommand{doifpositionsonsamepageelse("#1")}}
-\def\doifpositionsonthispageelse#1{\ctxcommand{doifpositionsonthispageelse("#1")}}
+\unexpanded\def\doifelsepositionsonsamepage#1{\clf_doifelsepositionsonsamepage{#1}}
+\unexpanded\def\doifelsepositionsonthispage#1{\clf_doifelsepositionsonthispage{#1}}
+
+\let\doifpositionsonsamepageelse\doifelsepositionsonsamepage
+\let\doifpositionsonthispageelse\doifelsepositionsonthispage
+
+%D Moved here:
+
+\unexpanded\def\doifelsepositionsused{\clf_doifelsepositionsused}
+
+\let\doifpositionsusedelse\doifelsepositionsused
\protect \endinput
diff --git a/tex/context/base/anch-snc.mkiv b/tex/context/base/anch-snc.mkiv
index 77ebc3e0e..27769fbf9 100644
--- a/tex/context/base/anch-snc.mkiv
+++ b/tex/context/base/anch-snc.mkiv
@@ -51,6 +51,8 @@
\def\doifelselastsyncposition#1#2%
{\doifelse{\lastsyncclass\lastsyncposition}{#1#2}}
+\let\doiflastsyncpositionelse\doifelselastsyncposition
+
\def\dodosyncposition#1#2#3%
{\letgvalue{\s!reset:\s!syncpos:#1}\relax
\letgvalue{\s!preset:\s!syncpos:#1}\relax
@@ -74,7 +76,7 @@
\!!counta\zerocount
\!!countc\zerocount
\doloop
- {\doifpositionelse{\s!syncpos:#1:\recurselevel}
+ {\doifelseposition{\s!syncpos:#1:\recurselevel}
{\!!dimenb\MPy{\s!syncpos:#1:\recurselevel}\relax
\!!countb\MPp{\s!syncpos:#1:\recurselevel}\relax
\ifnum\!!countb=\!!counta % same page
diff --git a/tex/context/base/anch-tab.mkiv b/tex/context/base/anch-tab.mkiv
index a70f63e24..da735b49d 100644
--- a/tex/context/base/anch-tab.mkiv
+++ b/tex/context/base/anch-tab.mkiv
@@ -43,16 +43,16 @@
\fi}
\unexpanded\def\tabl_tabulate_hook_b_first
- {\ctxcommand{bcolumn("tabulate:\the\c_anch_tabs:\the\c_tabl_tabulate_column",true)}}
+ {\clf_bposcolumnregistered{tabulate:\the\c_anch_tabs:\the\c_tabl_tabulate_column}}
\unexpanded\def\tabl_tabulate_hook_b_next
- {\ctxcommand{bcolumn("tabulate:\the\c_anch_tabs:\the\c_tabl_tabulate_column")}}
+ {\clf_bposcolumn{tabulate:\the\c_anch_tabs:\the\c_tabl_tabulate_column}}
\unexpanded\def\tabl_tabulate_hook_e_first
- {\ctxcommand{ecolumn(true)}}
+ {\clf_eposcolumnregistered}
\unexpanded\def\tabl_tabulate_hook_e_next
- {\ctxcommand{ecolumn()}}
+ {\clf_eposcolumn}
% \appendtoks \registerparoptions \to \everypar
diff --git a/tex/context/base/attr-col.lua b/tex/context/base/attr-col.lua
index 7c6b7909b..b5fac9c2d 100644
--- a/tex/context/base/attr-col.lua
+++ b/tex/context/base/attr-col.lua
@@ -17,9 +17,14 @@ local format = string.format
local concat = table.concat
local min, max, floor = math.min, math.max, math.floor
-local attributes, nodes, utilities, logs, backends, storage = attributes, nodes, utilities, logs, backends, storage
-local commands, context, interfaces = commands, context, interfaces
-local tex = tex
+local attributes = attributes
+local nodes = nodes
+local utilities = utilities
+local logs = logs
+local backends = backends
+local storage = storage
+local context = context
+local tex = tex
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
@@ -43,6 +48,9 @@ local unsetvalue = attributes.unsetvalue
local registerstorage = storage.register
local formatters = string.formatters
+local interfaces = interfaces
+local implement = interfaces.implement
+
-- We can distinguish between rules and glyphs but it's not worth the trouble. A
-- first implementation did that and while it saves a bit for glyphs and rules, it
-- costs more resourses for transparencies. So why bother.
@@ -119,15 +127,25 @@ local models = {
}
local function rgbtocmyk(r,g,b) -- we could reduce
- return 1-r, 1-g, 1-b, 0
+ if not r then
+ return 0, 0, 0
+ else
+ return 1-r, 1-g, 1-b, 0
+ end
end
local function cmyktorgb(c,m,y,k)
- return 1.0 - min(1.0,c+k), 1.0 - min(1.0,m+k), 1.0 - min(1.0,y+k)
+ if not c then
+ return 0, 0, 0, 1
+ else
+ return 1.0 - min(1.0,c+k), 1.0 - min(1.0,m+k), 1.0 - min(1.0,y+k)
+ end
end
local function rgbtogray(r,g,b)
- if colors.weightgray then
+ if not r then
+ return 0
+ elseif colors.weightgray then
return .30*r + .59*g + .11*b
else
return r/3 + g/3 + b/3
@@ -246,7 +264,11 @@ end
--~ return { 5, .5, .5, .5, .5, 0, 0, 0, .5, parent, f, d, p }
--~ end
+local p_split = lpeg.tsplitat(",")
+local lpegmatch = lpeg.match
+
function colors.spot(parent,f,d,p)
+ -- inspect(parent) inspect(f) inspect(d) inspect(p)
if type(p) == "number" then
local n = list[numbers.color][parent] -- hard coded ref to color number
if n then
@@ -261,6 +283,33 @@ function colors.spot(parent,f,d,p)
end
else
-- todo, multitone (maybe p should be a table)
+ local ps = lpegmatch(p_split,p)
+ local ds = lpegmatch(p_split,d)
+ local c, m, y, k = 0, 0, 0, 0
+ local done = false
+ for i=1,#ps do
+ local p = tonumber(ps[i])
+ local d = ds[i]
+ if p and d then
+ local n = list[numbers.color][d] -- hard coded ref to color number
+ if n then
+ local v = values[n]
+ if v then
+ c = c + p*v[6]
+ m = m + p*v[7]
+ y = y + p*v[8]
+ k = k + p*v[8]
+ done = true
+ end
+ end
+ end
+ end
+ if done then
+ local r, g, b = cmyktorgb(c,m,y,k)
+ local s = cmyktogray(c,m,y,k)
+ local f = tonumber(f)
+ return { 5, s, r, g, b, c, m, y, k, parent, f, d, p }
+ end
end
return { 5, .5, .5, .5, .5, 0, 0, 0, .5, parent, f, d, p }
end
@@ -529,10 +578,10 @@ end
-- interface
-commands.enablecolor = colors.enable
-commands.enabletransparency = transparencies.enable
-commands.enablecolorintents = colorintents.enable
+implement { name = "enablecolor", onlyonce = true, actions = colors.enable }
+implement { name = "enabletransparency", onlyonce = true, actions = transparencies.enable }
+implement { name = "enablecolorintents", onlyonce = true, actions = colorintents.enable }
-function commands.registercolor (...) context(colors .register(...)) end
-function commands.registertransparency(...) context(transparencies.register(...)) end
-function commands.registercolorintent (...) context(colorintents .register(...)) end
+--------- { name = "registercolor", actions = { colors .register, context }, arguments = "string" }
+--------- { name = "registertransparency", actions = { transparencies.register, context }, arguments = { ... } }
+implement { name = "registercolorintent", actions = { colorintents .register, context }, arguments = { ... } }
diff --git a/tex/context/base/attr-eff.lua b/tex/context/base/attr-eff.lua
index b187b64c7..ff41e12de 100644
--- a/tex/context/base/attr-eff.lua
+++ b/tex/context/base/attr-eff.lua
@@ -6,7 +6,6 @@ if not modules then modules = { } end modules ['attr-eff'] = {
license = "see context related readme files"
}
-local commands, interfaces = commands, interfaces
local attributes, nodes, backends, utilities = attributes, nodes, backends, utilities
local tex = tex
@@ -18,6 +17,9 @@ local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
local formatters = string.formatters
+local interfaces = interfaces
+local implement = interfaces.implement
+
local variables = interfaces.variables
local v_normal = variables.normal
@@ -91,8 +93,13 @@ local function register(specification)
return n
end
+local enabled = false
+
local function enable()
- tasks.enableaction("shipouts","attributes.effects.handler")
+ if not enabled then
+ tasks.enableaction("shipouts","attributes.effects.handler")
+ enabled = true
+ end
end
effects.register = register
@@ -100,12 +107,28 @@ effects.enable = enable
-- interface
-local enabled = false
+implement {
+ name = "seteffect",
+ actions = function(specification)
+ if not enabled then
+ enable()
+ end
+ texsetattribute(a_effect,register(specification))
+ end,
+ arguments = {
+ {
+ { "alternative", "string" },
+ { "stretch", "integer" },
+ { "rulethickness", "dimen" }
+ }
+ }
+}
-function commands.triggereffect(specification)
- if not enabled then
- enable()
- enabled = true
+implement {
+ name = "reseteffect",
+ actions = function()
+ if enabled then
+ texsetattribute(a_effect,register())
+ end
end
- texsetattribute(a_effect,register(specification))
-end
+}
diff --git a/tex/context/base/attr-eff.mkiv b/tex/context/base/attr-eff.mkiv
index 43f575a7a..bec8687ea 100644
--- a/tex/context/base/attr-eff.mkiv
+++ b/tex/context/base/attr-eff.mkiv
@@ -36,14 +36,14 @@
\to \everydefineeffect
\unexpanded\def\starteffect[#1]%
- {\ctxcommand{triggereffect{
- alternative = "\namedeffectparameter{#1}\c!alternative",
- stretch = \number\namedeffectparameter{#1}\c!stretch,
- rulethickness = \number\dimexpr\namedeffectparameter{#1}\c!rulethickness\relax
- }}}
+ {\clf_seteffect
+ alternative {\namedeffectparameter{#1}\c!alternative}
+ stretch \numexpr\namedeffectparameter{#1}\c!stretch\relax
+ rulethickness \dimexpr\namedeffectparameter{#1}\c!rulethickness\relax
+ \relax}
\unexpanded\def\stopeffect % can be special
- {\ctxcommand{triggereffect()}} % v!normal 0 0
+ {\clf_reseteffect} % v!normal 0 0
\unexpanded\def\effect[#1]%
{\groupedcommand{\starteffect[#1]}{\stopeffect}}
diff --git a/tex/context/base/attr-ini.lua b/tex/context/base/attr-ini.lua
index ad4081681..df7404d11 100644
--- a/tex/context/base/attr-ini.lua
+++ b/tex/context/base/attr-ini.lua
@@ -6,8 +6,6 @@ if not modules then modules = { } end modules ['attr-ini'] = {
license = "see context related readme files"
}
-local commands, context, nodes, storage = commands, context, nodes, storage
-
local next, type = next, type
--[[ldx--
@@ -15,6 +13,13 @@ local next, type = next, type
symbolic names later on.</p>
--ldx]]--
+local nodes = nodes
+local context = context
+local storage = storage
+local commands = commands
+
+local implement = interfaces.implement
+
attributes = attributes or { }
local attributes = attributes
@@ -38,13 +43,13 @@ storage.register("attributes/names", names, "attributes.names")
storage.register("attributes/numbers", numbers, "attributes.numbers")
storage.register("attributes/list", list, "attributes.list")
-function attributes.define(name,number) -- at the tex end
- if not numbers[name] then
- numbers[name] = number
- names[number] = name
- list[number] = { }
- end
-end
+-- function attributes.define(name,number) -- at the tex end
+-- if not numbers[name] then
+-- numbers[name] = number
+-- names[number] = name
+-- list[number] = { }
+-- end
+-- end
--[[ldx--
<p>We reserve this one as we really want it to be always set (faster).</p>
@@ -53,38 +58,19 @@ end
names[0], numbers["fontdynamic"] = "fontdynamic", 0
--[[ldx--
-<p>We can use the attributes in the range 127-255 (outside user space). These
-are only used when no attribute is set at the \TEX\ end which normally
-happens in <l n='context'/>.</p>
+<p>private attributes are used by the system and public ones are for users. We use dedicated
+ranges of numbers for them. Of course a the <l n='context'/> end a private attribute can be
+accessible too, so a private attribute can have a public appearance.</p>
--ldx]]--
-sharedstorage.attributes_last_private = sharedstorage.attributes_last_private or 127
-
--- to be considered (so that we can use an array access):
---
--- local private = { } attributes.private = private
---
--- setmetatable(private, {
--- __index = function(t,name)
--- local number = sharedstorage.attributes_last_private
--- if number < 1023 then -- texgetcount("minallocatedattribute") - 1
--- number = number + 1
--- sharedstorage.attributes_last_private = number
--- end
--- numbers[name], names[number], list[number] = number, name, { }
--- private[name] = number
--- return number
--- end,
--- __call = function(t,name)
--- return t[name]
--- end
--- } )
+sharedstorage.attributes_last_private = sharedstorage.attributes_last_private or 127 -- very private (can become 15)
+sharedstorage.attributes_last_public = sharedstorage.attributes_last_public or 1024 -- less private
function attributes.private(name) -- at the lua end (hidden from user)
local number = numbers[name]
if not number then
local last = sharedstorage.attributes_last_private
- if last < 1023 then -- texgetcount("minallocatedattribute") - 1
+ if last < 1023 then
last = last + 1
sharedstorage.attributes_last_private = last
else
@@ -97,6 +83,29 @@ function attributes.private(name) -- at the lua end (hidden from user)
return number
end
+function attributes.public(name) -- at the lua end (hidden from user)
+ local number = numbers[name]
+ if not number then
+ local last = sharedstorage.attributes_last_public
+ if last < 65535 then
+ last = last + 1
+ sharedstorage.attributes_last_public = last
+ else
+ report_attribute("no more room for public attributes")
+ os.exit()
+ end
+ number = last
+ numbers[name], names[number], list[number] = number, name, { }
+ end
+ return number
+end
+
+attributes.system = attributes.private
+
+function attributes.define(name,category)
+ return (attributes[category or "public"] or attributes["public"])(name)
+end
+
-- tracers
local report_attribute = logs.reporter("attributes")
@@ -122,20 +131,11 @@ function attributes.ofnode(n)
showlist(n,n.attr)
end
--- interface
-
-commands.defineattribute = attributes.define
-commands.showattributes = attributes.showcurrent
-
-function commands.getprivateattribute(name)
- context(attributes.private(name))
-end
-
-- rather special
local store = { }
-function commands.savecurrentattributes(name)
+function attributes.save(name)
name = name or ""
local n = node.current_attr()
n = n and n.next
@@ -150,7 +150,7 @@ function commands.savecurrentattributes(name)
}
end
-function commands.restorecurrentattributes(name)
+function attributes.restore(name)
name = name or ""
local t = store[name]
if t then
@@ -168,3 +168,28 @@ function commands.restorecurrentattributes(name)
end
-- store[name] = nil
end
+
+implement {
+ name = "defineattribute",
+ arguments = { "string", "string" },
+ actions = { attributes.define, context }
+}
+
+-- interface
+
+implement {
+ name = "showattributes",
+ actions = attributes.showcurrent
+}
+
+implement {
+ name = "savecurrentattributes",
+ arguments = "string",
+ actions = attributes.save
+}
+
+implement {
+ name = "restorecurrentattributes",
+ arguments = "string",
+ actions = attributes.restore
+}
diff --git a/tex/context/base/attr-ini.mkiv b/tex/context/base/attr-ini.mkiv
index 9dfa7baae..c6b798d18 100644
--- a/tex/context/base/attr-ini.mkiv
+++ b/tex/context/base/attr-ini.mkiv
@@ -40,31 +40,31 @@
\newtoks \attributesresetlist
-\ifdefined \s!global \else \def\s!global{global} \fi % for metatex % or hard check later
-\ifdefined \s!public \else \def\s!public{public} \fi % for metatex % or hard check later
-
-\unexpanded\def\defineattribute
- {\dodoubleempty\attr_basics_define}
-
-\def\attr_basics_define[#1][#2]% alternatively we can let lua do the housekeeping
- {\expandafter\newattribute\csname\??attributecount#1\endcsname
- \expandafter\newconstant \csname\??attributeid#1\endcsname
- \csname\??attributeid#1\endcsname\c_syst_last_allocated_attribute
- \ctxcommand{defineattribute("#1",\number\c_syst_last_allocated_attribute)}%
- \doifnotinset\s!global{#2}{\appendetoks\csname\??attributecount#1\endcsname\attributeunsetvalue\to\attributesresetlist}%
- \doifinset \s!public{#2}{\expandafter\let\csname#1attribute\expandafter\endcsname\csname\??attributeid#1\endcsname}}
-
-\unexpanded\def\definesystemattribute
- {\dodoubleempty\attr_basics_define_system}
-
-\def\attr_basics_define_system[#1][#2]% alternatively we can let lua do the housekeeping
- {\scratchcounter\ctxcommand{getprivateattribute("#1")}\relax
- \expandafter\attributedef\csname\??attributecount#1\endcsname\scratchcounter
- \expandafter\newconstant \csname\??attributeid#1\endcsname
- \csname\??attributeid#1\endcsname\scratchcounter
- %\writestatus\m!system{defining system attribute #1 with number \number\scratchcounter}%
- \doifnotinset\s!global{#2}{\appendetoks\csname\??attributecount#1\endcsname\attributeunsetvalue\to\attributesresetlist}%
- \doifinset \s!public{#2}{\expandafter\let\csname#1attribute\expandafter\endcsname\csname\??attributeid#1\endcsname}}
+\ifdefined \s!global \else \def\s!global {global} \fi % for metatex % or hard check later
+\ifdefined \s!public \else \def\s!public {public} \fi % for metatex % or hard check later
+\ifdefined \s!private \else \def\s!private {private} \fi % for metatex % or hard check later
+\ifdefined \s!attribute \else \def\s!attribute{attribute} \fi % for metatex % or hard check later
+
+\unexpanded\def\defineattribute {\dodoubleempty\attr_basics_define}
+\unexpanded\def\definesystemattribute{\dodoubleempty\attr_basics_define_system}
+
+\def\attr_basics_define {\attr_basics_define_indeed\s!public}
+\def\attr_basics_define_system{\attr_basics_define_indeed\s!private}
+
+\def\attr_basics_define_indeed#1[#2][#3]%
+ {\scratchcounter\clf_defineattribute{#2}{#1}\relax
+ %\writestatus\m!system{defining #1 attribute #2 with number \number\scratchcounter}%
+ \expandafter\attributedef\csname\??attributecount#2\endcsname\scratchcounter
+ \expandafter\newconstant \csname\??attributeid#2\endcsname
+ \csname\??attributeid#2\endcsname\scratchcounter
+ % some attributes are always global
+ \doifnotinset\s!global{#3}{\appendetoks\csname\??attributecount#2\endcsname\attributeunsetvalue\to\attributesresetlist}%
+ % here public means 'visible' so it's not to be confused with 'public' at the lua end
+ \doifinset \s!public{#3}{\expandafter\let\csname#2\s!attribute\expandafter\endcsname\csname\??attributeid#2\endcsname}}
+
+\unexpanded\def\newattribute#1%
+ {\attr_basics_define_indeed\s!public[\strippedcsname#1][]%
+ \expandafter\let\expandafter#1\csname\??attributeid\strippedcsname#1\endcsname}
% expandable so we can \edef them for speed
@@ -79,13 +79,13 @@
%D Rather special.
-\unexpanded\def\savecurrentattributes #1{\ctxcommand{savecurrentattributes ("#1")}}
-\unexpanded\def\restorecurrentattributes#1{\ctxcommand{restorecurrentattributes("#1")}}
+\unexpanded\def\savecurrentattributes #1{\clf_savecurrentattributes {#1}}
+\unexpanded\def\restorecurrentattributes#1{\clf_restorecurrentattributes{#1}}
%D For the moment we put this here (later it will move to where it's used):
\definesystemattribute [state]
-\definesystemattribute [color] [public]
+\definesystemattribute [color] [public] % global
\definesystemattribute [colormodel] [public,global]
\definesystemattribute [skip]
\definesystemattribute [penalty]
@@ -105,6 +105,6 @@
\definesystemattribute [checkedbreak] [public]
\definesystemattribute [vboxtohboxseparator] [public]
-\unexpanded\def\showattributes{\ctxcommand{showattributes()}}
+\unexpanded\def\showattributes{\clf_showattributes}
\protect \endinput
diff --git a/tex/context/base/attr-lay.lua b/tex/context/base/attr-lay.lua
index 176af1a2c..0d43979c8 100644
--- a/tex/context/base/attr-lay.lua
+++ b/tex/context/base/attr-lay.lua
@@ -15,9 +15,17 @@ if not modules then modules = { } end modules ['attr-lay'] = {
local type = type
local insert, remove = table.insert, table.remove
-local attributes, nodes, utilities, logs, backends = attributes, nodes, utilities, logs, backends
-local commands, context, interfaces = commands, context, interfaces
-local tex = tex
+local attributes = attributes
+local nodes = nodes
+local utilities = utilities
+local logs = logs
+local backends = backends
+
+local context = context
+local interfaces = interfaces
+local tex = tex
+
+local implement = interfaces.implement
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
@@ -38,6 +46,8 @@ local viewerlayers = attributes.viewerlayers
local variables = interfaces.variables
local v_local = variables["local"]
local v_global = variables["global"]
+local v_start = variables["start"]
+local v_yes = variables["yes"]
local a_viewerlayer = attributes.private("viewerlayer")
@@ -80,15 +90,11 @@ local layerstacker = utilities.stacker.new("layers") -- experiment
layerstacker.mode = "stack"
layerstacker.unset = attributes.unsetvalue
+viewerlayers.resolve_reset = layerstacker.resolve_reset
viewerlayers.resolve_begin = layerstacker.resolve_begin
viewerlayers.resolve_step = layerstacker.resolve_step
viewerlayers.resolve_end = layerstacker.resolve_end
-function commands.cleanuplayers()
- layerstacker.clean()
- -- todo
-end
-
-- stacked
local function startlayer(...) startlayer = nodeinjections.startlayer return startlayer(...) end
@@ -239,15 +245,63 @@ function viewerlayers.define(settings)
end
end
-commands.defineviewerlayer = viewerlayers.define
-commands.startviewerlayer = viewerlayers.start
-commands.stopviewerlayer = viewerlayers.stop
-
-function commands.definedviewerlayer(settings)
- viewerlayers.define(settings)
- context(register(settings.tag,true)) -- true forces a use
+function viewerlayers.definedlayoutcomponent(tag)
+ viewerlayers.define {
+ tag = tag,
+ title = utilities.strings.nice(tag),
+ visible = v_start,
+ editable = v_yes,
+ printable = v_yes,
+ }
+ return register(tag,true) -- true forces a use
end
-function commands.registeredviewerlayer(name)
- context(register(name,true)) -- true forces a use
+function viewerlayers.cleanup()
+ layerstacker.clean()
+ -- todo
end
+
+implement {
+ name = "cleanuplayers",
+ actions = viewerlayers.cleanup
+}
+
+implement {
+ name = "defineviewerlayer",
+ actions = viewerlayers.define,
+ arguments = {
+ {
+ { "tag" },
+ { "title" },
+ { "visible" },
+ { "editable" },
+ { "export" },
+ { "printable" },
+ { "scope" },
+ },
+ true
+ }
+}
+
+implement {
+ name = "definedlayoutcomponent",
+ actions = { viewerlayers.definedlayoutcomponent, context },
+ arguments = "string"
+}
+
+implement {
+ name = "startviewerlayer",
+ actions = viewerlayers.start,
+ arguments = "string",
+}
+
+implement {
+ name = "stopviewerlayer",
+ actions = viewerlayers.stop
+}
+
+implement {
+ name = "registeredviewerlayer",
+ actions = { register, context },
+ arguments = { "string", true } -- true forces a use
+}
diff --git a/tex/context/base/attr-lay.mkiv b/tex/context/base/attr-lay.mkiv
index d4aae3060..14ef4a6d4 100644
--- a/tex/context/base/attr-lay.mkiv
+++ b/tex/context/base/attr-lay.mkiv
@@ -33,27 +33,31 @@
\setupviewerlayer
[\c!state=\v!start,
\c!title=,
+ \c!export=\v!yes, % exportable is ugly
\c!printable=\v!yes,
\c!scope=\v!local, % maybe global but needs checking with layout
\c!method=\v!none]
\appendtoks
- \ctxcommand{defineviewerlayer{
- tag = "\currentviewerlayer",
- title = "\viewerlayerparameter\c!title",
- visible = "\viewerlayerparameter\c!state",
- editable = "\v!yes",
- printable = "\viewerlayerparameter\c!printable",
- scope = "\viewerlayerparameter\c!scope"
- }}%
+ \clf_defineviewerlayer
+ tag {\currentviewerlayer}%
+ title {\viewerlayerparameter\c!title}%
+ visible {\viewerlayerparameter\c!state}%
+ editable {\v!yes}%
+ export {\viewerlayerparameter\c!export}%
+ printable {\viewerlayerparameter\c!printable}%
+ scope {\viewerlayerparameter\c!scope}%
+ \relax
\doif{\viewerlayerparameter\c!method}\v!command
{\setuxvalue{\e!start#1}{\startviewerlayer[\currentviewerlayer]}%
\setuxvalue{\e!stop #1}{\stopviewerlayer}}%
\to \everydefineviewerlayer
-\unexpanded\def\startviewerlayer[#1]{\ctxcommand{startviewerlayer("#1")}} % not grouped
-\unexpanded\def\stopviewerlayer {\ctxcommand{stopviewerlayer()}} % not grouped
-\unexpanded\def\viewerlayer [#1]{\groupedcommand{\startviewerlayer[#1]}{\stopviewerlayer}} % grouped
+\unexpanded\def\startviewerlayer[#1]{\clf_startviewerlayer{#1}}% not grouped
+\unexpanded\def\stopviewerlayer {\clf_stopviewerlayer} % not grouped
+\unexpanded\def\viewerlayer [#1]{\groupedcommand
+ {\clf_startviewerlayer{#1}}% grouped
+ {\clf_stopviewerlayer}} % grouped
% some day we will keep this at the lua end as the info is only needed there
@@ -70,13 +74,7 @@
\installcorenamespace{layoutcomponentattribute}
\def\attr_layoutcomponent_initialize#1%
- {\edef\layoutcomponentboxattribute{\ctxcommand{definedviewerlayer{%
- tag = "#1",
- title = utilities.strings.nice("#1"), % only here as in steps we have step:<number>
- visible = "\v!start",
- editable = "\v!yes",
- printable = "\v!yes"
- }}}%
+ {\edef\layoutcomponentboxattribute{\clf_definedlayoutcomponent{#1}}%
\edef\layoutcomponentboxattribute{attr \viewerlayerattribute \layoutcomponentboxattribute\relax}%
\expandafter\glet\csname\??layoutcomponentattribute#1\endcsname\layoutcomponentboxattribute}
@@ -94,12 +92,11 @@
\let\layoutcomponentboxattribute \empty
\unexpanded\def\showlayoutcomponents
- {%\ctxlua{attributes.viewerlayers.enable()}% automatic
- \let\setlayoutcomponentattribute \attr_layoutcomponent_set
+ {\let\setlayoutcomponentattribute \attr_layoutcomponent_set
\let\resetlayoutcomponentattribute\attr_layoutcomponent_reset}
\unexpanded\def\attr_layoutcomponent_cleanup
- {\ctxcommand{cleanuplayers()}}
+ {\clf_cleanuplayers}
\appendtoks
\attr_layoutcomponent_cleanup
diff --git a/tex/context/base/attr-neg.lua b/tex/context/base/attr-neg.lua
index c32cec956..1347c3d1a 100644
--- a/tex/context/base/attr-neg.lua
+++ b/tex/context/base/attr-neg.lua
@@ -18,7 +18,7 @@ local tex = tex
local states = attributes.states
local tasks = nodes.tasks
local nodeinjections = backends.nodeinjections
-local settexattribute = tex.setattribute
+local texsetattribute = tex.setattribute
local variables = interfaces.variables
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
@@ -89,10 +89,16 @@ negatives.enable = enable
local enabled = false
-function commands.triggernegative(stamp)
+function negatives.set(stamp)
if not enabled then
enable()
enabled = true
end
- settexattribute(a_negative,register(stamp))
+ texsetattribute(a_negative,register(stamp))
end
+
+interfaces.implement {
+ name = "setnegative",
+ actions = negatives.set,
+ arguments = "string",
+}
diff --git a/tex/context/base/attr-neg.mkiv b/tex/context/base/attr-neg.mkiv
index 102b220ba..ff1f52a96 100644
--- a/tex/context/base/attr-neg.mkiv
+++ b/tex/context/base/attr-neg.mkiv
@@ -25,6 +25,6 @@
\unexpanded\def\startpositive{\attr_trigger_negative\v!positive}
\unexpanded\def\stoppositive {\attr_trigger_negative\v!negative}
-\def\attr_trigger_negative#1{\ctxcommand{triggernegative('#1')}}
+\def\attr_trigger_negative#1{\clf_setnegative{#1}}
\protect \endinput
diff --git a/tex/context/base/back-exp.lua b/tex/context/base/back-exp.lua
index 18a339247..33b6aa1e8 100644
--- a/tex/context/base/back-exp.lua
+++ b/tex/context/base/back-exp.lua
@@ -6,7 +6,14 @@ if not modules then modules = { } end modules ['back-exp'] = {
license = "see context related readme files"
}
--- beware: we run out of the 200 local limit
+-- Todo: share properties more with tagged pdf (or thge reverse)
+
+-- Because we run into the 200 local limit we quite some do .. end wrappers .. not always
+-- that nice but it has to be.
+
+-- Experiments demonstrated that mapping to <div> and classes is messy because we have to
+-- package attributes (some 30) into one set of (space seperatated but prefixed classes)
+-- which only makes things worse .. so if you want something else, use xslt to get there.
-- language -> only mainlanguage, local languages should happen through start/stoplanguage
-- tocs/registers -> maybe add a stripper (i.e. just don't flush entries in final tree)
@@ -23,21 +30,29 @@ if not modules then modules = { } end modules ['back-exp'] = {
-- todo: move critital formatters out of functions
-- todo: delay loading (apart from basic tag stuff)
-local next, type = next, type
-local format, concat, sub, gsub = string.format, table.concat, string.sub, string.gsub
+-- problem : too many local variables
+
+-- check setting __i__
+
+local next, type, tonumber = next, type, tonumber
+local concat, sub, gsub = table.concat, string.sub, string.gsub
local validstring = string.valid
local lpegmatch = lpeg.match
local utfchar, utfvalues = utf.char, utf.values
local insert, remove = table.insert, table.remove
-local fromunicode16 = fonts.mappings.fromunicode16
local sortedhash = table.sortedhash
local formatters = string.formatters
+local todimen = number.todimen
+local replacetemplate = utilities.templates.replace
local trace_export = false trackers.register ("export.trace", function(v) trace_export = v end)
local trace_spacing = false trackers.register ("export.trace.spacing", function(v) trace_spacing = v end)
+
local less_state = false directives.register("export.lessstate", function(v) less_state = v end)
local show_comment = true directives.register("export.comment", function(v) show_comment = v end)
+show_comment = false -- figure out why break comment
+
-- maybe we will also support these:
--
-- local css_hyphens = false directives.register("export.css.hyphens", function(v) css_hyphens = v end)
@@ -49,7 +64,13 @@ local report_export = logs.reporter("backend","export")
local nodes = nodes
local attributes = attributes
+
local variables = interfaces.variables
+local v_yes = variables.yes
+local v_no = variables.no
+local v_hidden = variables.hidden
+
+local implement = interfaces.implement
local settings_to_array = utilities.parsers.settings_to_array
@@ -85,36 +106,55 @@ local line_code = listcodes.line
local texgetcount = tex.getcount
-local a_characters = attributes.private('characters')
-local a_exportstatus = attributes.private('exportstatus')
+local privateattribute = attributes.private
+local a_characters = privateattribute('characters')
+local a_exportstatus = privateattribute('exportstatus')
+local a_tagged = privateattribute('tagged')
+local a_taggedpar = privateattribute("taggedpar")
+local a_image = privateattribute('image')
+local a_reference = privateattribute('reference')
+local a_textblock = privateattribute("textblock")
-local a_tagged = attributes.private('tagged')
-local a_taggedpar = attributes.private("taggedpar")
-local a_image = attributes.private('image')
-local a_reference = attributes.private('reference')
+local nuts = nodes.nuts
+local tonut = nuts.tonut
-local a_textblock = attributes.private("textblock")
+local getnext = nuts.getnext
+local getsubtype = nuts.getsubtype
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+local getlist = nuts.getlist
+local getid = nuts.getid
+local getfield = nuts.getfield
+local getattr = nuts.getattr
-local traverse_id = node.traverse_id
-local traverse_nodes = node.traverse
-local slide_nodelist = node.slide
-local locate_node = nodes.locate
+local setattr = nuts.setattr
+
+local traverse_id = nuts.traverse_id
+local traverse_nodes = nuts.traverse
local references = structures.references
local structurestags = structures.tags
local taglist = structurestags.taglist
+local specifications = structurestags.specifications
local properties = structurestags.properties
-local userdata = structurestags.userdata -- might be combines with taglist
-local tagdata = structurestags.data
-local tagmetadata = structurestags.metadata
-local detailedtag = structurestags.detailedtag
+local locatedtag = structurestags.locatedtag
local starttiming = statistics.starttiming
local stoptiming = statistics.stoptiming
+local characterdata = characters.data
+local overloads = fonts.mappings.overloads
+
-- todo: more locals (and optimize)
-local exportversion = "0.30"
+local exportversion = "0.34"
+local mathmlns = "http://www.w3.org/1998/Math/MathML"
+local contextns = "http://www.contextgarden.net/context/export" -- whatever suits
+local cssnamespaceurl = "@namespace context url('%namespace%') ;"
+local cssnamespace = "context|"
+----- cssnamespacenop = "/* no namespace */"
+
+local usecssnamespace = false
local nofcurrentcontent = 0 -- so we don't free (less garbage collection)
local currentcontent = { }
@@ -125,14 +165,15 @@ local currentparagraph = nil
local noftextblocks = 0
-local attributehash = { } -- to be considered: set the values at the tex end
local hyphencode = 0xAD
local hyphen = utfchar(0xAD) -- todo: also emdash etc
-local colonsplitter = lpeg.splitat(":")
-local dashsplitter = lpeg.splitat("-")
+local tagsplitter = structurestags.patterns.splitter
+----- colonsplitter = lpeg.splitat(":")
+----- dashsplitter = lpeg.splitat("-")
local threshold = 65536
local indexing = false
local keephyphens = false
+local exportproperties = false
local finetuning = { }
@@ -140,6 +181,8 @@ local treestack = { }
local nesting = { }
local currentdepth = 0
+local wrapups = { }
+
local tree = { data = { }, fulltag == "root" } -- root
local treeroot = tree
local treehash = { }
@@ -155,20 +198,18 @@ local somespace = { [0x20] = true, [" "] = true } -- for testing
local entities = { ["&"] = "&amp;", [">"] = "&gt;", ["<"] = "&lt;" }
local attribentities = { ["&"] = "&amp;", [">"] = "&gt;", ["<"] = "&lt;", ['"'] = "quot;" }
-local entityremapper = utf.remapper(entities)
+local p_entity = lpeg.replacer(entities) -- was: entityremapper = utf.remapper(entities)
+local p_attribute = lpeg.replacer(attribentities)
+local p_stripper = lpeg.patterns.stripper
+local p_escaped = lpeg.patterns.xml.escaped
-local alignmapping = {
- flushright = "right",
- middle = "center",
- flushleft = "left",
-}
+local f_tagid = formatters["%s-%04i"]
-local numbertoallign = {
- [0] = "justify", ["0"] = "justify", [variables.normal ] = "justify",
- [1] = "right", ["1"] = "right", [variables.flushright] = "right",
- [2] = "center", ["2"] = "center", [variables.middle ] = "center",
- [3] = "left", ["3"] = "left", [variables.flushleft ] = "left",
-}
+-- local alignmapping = {
+-- flushright = "right",
+-- middle = "center",
+-- flushleft = "left",
+-- }
local defaultnature = "mixed" -- "inline"
@@ -180,10 +221,14 @@ setmetatableindex(used, function(t,k)
end
end)
+local f_entity = formatters["&#x%X;"]
+local f_attribute = formatters[" %s=%q"]
+local f_property = formatters[" %s%s=%q"]
+
setmetatableindex(specialspaces, function(t,k)
local v = utfchar(k)
t[k] = v
- entities[v] = formatters["&#x%X;"](k)
+ entities[v] = f_entity(k)
somespace[k] = true
somespace[v] = true
return v
@@ -195,30 +240,35 @@ local namespaced = {
}
local namespaces = {
- msubsup = "m",
- msub = "m",
- msup = "m",
- mn = "m",
- mi = "m",
- ms = "m",
- mo = "m",
- mtext = "m",
- mrow = "m",
- mfrac = "m",
- mroot = "m",
- msqrt = "m",
- munderover = "m",
- munder = "m",
- mover = "m",
- merror = "m",
- math = "m",
- mrow = "m",
- mtable = "m",
- mtr = "m",
- mtd = "m",
- mfenced = "m",
- maction = "m",
- mspace = "m",
+ msubsup = "m",
+ msub = "m",
+ msup = "m",
+ mn = "m",
+ mi = "m",
+ ms = "m",
+ mo = "m",
+ mtext = "m",
+ mrow = "m",
+ mfrac = "m",
+ mroot = "m",
+ msqrt = "m",
+ munderover = "m",
+ munder = "m",
+ mover = "m",
+ merror = "m",
+ math = "m",
+ mrow = "m",
+ mtable = "m",
+ mtr = "m",
+ mtd = "m",
+ mfenced = "m",
+ maction = "m",
+ mspace = "m",
+ -- only when testing
+ mstacker = "m",
+ mstackertop = "m",
+ mstackermid = "m",
+ mstackerbot = "m",
}
setmetatableindex(namespaced, function(t,k)
@@ -232,171 +282,231 @@ end)
local function attribute(key,value)
if value and value ~= "" then
- return formatters[' %s="%s"'](key,gsub(value,".",attribentities))
+ return f_attribute(key,lpegmatch(p_attribute,value))
else
return ""
end
end
--- local P, C, Cc = lpeg.P, lpeg.C, lpeg.Cc
---
--- local dash, colon = P("-"), P(":")
---
--- local precolon, predash, rest = P((1-colon)^1), P((1-dash )^1), P(1)^1
---
--- local tagsplitter = C(precolon) * colon * C(predash) * dash * C(rest) +
--- C(predash) * dash * Cc(nil) * C(rest)
+local function setattribute(di,key,value,escaped)
+ if value and value ~= "" then
+ local a = di.attributes
+ if escaped then
+ value = lpegmatch(p_escaped,value)
+ end
+ if not a then
+ di.attributes = { [key] = value }
+ else
+ a[key] = value
+ end
+ end
+end
-local listdata = { }
+local listdata = { } -- this has to be done otherwise: each element can just point back to ...
-local function hashlistdata()
+function wrapups.hashlistdata()
local c = structures.lists.collected
for i=1,#c do
local ci = c[i]
local tag = ci.references.tag
if tag then
local m = ci.metadata
- listdata[m.kind .. ":" .. m.name .. "-" .. tag] = ci
+-- listdata[m.kind .. ":" .. m.name .. "-" .. tag] = ci
+ listdata[m.kind .. ">" .. tag] = ci
end
end
end
-local spaces = utilities.strings.newrepeater(" ",-1)
-
-function structurestags.setattributehash(fulltag,key,value) -- public hash
- if type(fulltag) == "number" then
- fulltag = taglist[fulltag]
- if fulltag then
- fulltag = fulltag[#fulltag]
- end
- end
- if fulltag then
- local ah = attributehash[fulltag] -- could be metatable magic
- if not ah then
- ah = { }
- attributehash[fulltag] = ah
- end
- ah[key] = value
+function structurestags.setattributehash(attr,key,value) -- public hash
+ local specification = taglist[attr]
+ if specification then
+ specification[key] = value
+ else
+ -- some kind of error
end
end
+local usedstyles = { }
--- experiment: styles and images
---
--- officially we should convert to bp but we round anyway
+local namespacetemplate = [[
+/* %what% for file %filename% */
-local usedstyles = { }
+%cssnamespaceurl%
+]]
+
+do
+
+ -- experiment: styles and images
+ --
+ -- officially we should convert to bp but we round anyway
--- /* padding : ; */
--- /* text-justify : inter-word ; */
+ -- /* padding : ; */
+ -- /* text-justify : inter-word ; */
+ -- /* text-align : justify ; */
local documenttemplate = [[
-document {
- font-size : %s !important ;
- max-width : %s !important ;
- text-align : %s !important ;
- hyphens : %s !important ;
+document, %namespace%div.document {
+ font-size : %size% !important ;
+ max-width : %width% !important ;
+ text-width : %align% !important ;
+ hyphens : %hyphens% !important ;
}
]]
local styletemplate = [[
-%s[detail='%s'] {
- font-style : %s ;
- font-variant : %s ;
- font-weight : %s ;
- font-family : %s ;
- color : %s ;
+%element%[detail="%detail%"], %namespace%div.%element%.%detail% {
+ display : inline ;
+ font-style : %style% ;
+ font-variant : %variant% ;
+ font-weight : %weight% ;
+ font-family : %family% ;
+ color : %color% ;
}]]
-local function allusedstyles(xmlfile)
- local result = { format("/* styles for file %s */",xmlfile) }
- --
- local bodyfont = finetuning.bodyfont
- local width = finetuning.width
- local hyphen = finetuning.hyphen
- local align = finetuning.align
- --
- if not bodyfont or bodyfont == "" then
- bodyfont = "12pt"
- elseif type(bodyfont) == "number" then
- bodyfont = number.todimen(bodyfont,"pt","%ipt") or "12pt"
- end
- if not width or width == "" then
- width = "50em"
- elseif type(width) == "number" then
- width = number.todimen(width,"pt","%ipt") or "50em"
- end
- if hyphen == variables.yes then
- hyphen = "manual"
- else
- hyphen = "inherited"
- end
- if align then
- align = numbertoallign[align]
- end
- if not align then
- align = hyphens and "justify" or "inherited"
+ local numbertoallign = {
+ [0] = "justify", ["0"] = "justify", [variables.normal ] = "justify",
+ [1] = "right", ["1"] = "right", [variables.flushright] = "right",
+ [2] = "center", ["2"] = "center", [variables.middle ] = "center",
+ [3] = "left", ["3"] = "left", [variables.flushleft ] = "left",
+ }
+
+ function wrapups.allusedstyles(basename)
+ local result = { replacetemplate(namespacetemplate, {
+ what = "styles",
+ filename = basename,
+ namespace = contextns,
+ -- cssnamespaceurl = usecssnamespace and cssnamespaceurl or cssnamespacenop,
+ cssnamespaceurl = cssnamespaceurl,
+ }) }
+ --
+ local bodyfont = finetuning.bodyfont
+ local width = finetuning.width
+ local hyphen = finetuning.hyphen
+ local align = finetuning.align
+ --
+ if type(bodyfont) == "number" then
+ bodyfont = todimen(bodyfont)
+ else
+ bodyfont = "12pt"
+ end
+ if type(width) == "number" then
+ width = todimen(width) or "50em"
+ else
+ width = "50em"
+ end
+ if hyphen == v_yes then
+ hyphen = "manual"
+ else
+ hyphen = "inherited"
+ end
+ if align then
+ align = numbertoallign[align]
+ end
+ if not align then
+ align = hyphen and "justify" or "inherited"
+ end
+ --
+ result[#result+1] = replacetemplate(documenttemplate,{
+ size = bodyfont,
+ width = width,
+ align = align,
+ hyphens = hyphen
+ })
+ --
+ local colorspecification = xml.css.colorspecification
+ local fontspecification = xml.css.fontspecification
+ for element, details in sortedhash(usedstyles) do
+ for detail, data in sortedhash(details) do
+ local s = fontspecification(data.style)
+ local c = colorspecification(data.color)
+ detail = gsub(detail,"[^A-Za-z0-9]+","-")
+ result[#result+1] = replacetemplate(styletemplate,{
+ namespace = usecssnamespace and cssnamespace or "",
+ element = element,
+ detail = detail,
+ style = s.style or "inherit",
+ variant = s.variant or "inherit",
+ weight = s.weight or "inherit",
+ family = s.family or "inherit",
+ color = c or "inherit",
+ })
+ end
+ end
+ return concat(result,"\n\n")
end
- --
- result[#result+1] = format(documenttemplate,bodyfont,width,align,hyphen)
- --
- local colorspecification = xml.css.colorspecification
- local fontspecification = xml.css.fontspecification
- for element, details in sortedhash(usedstyles) do
- for detail, data in sortedhash(details) do
- local s = fontspecification(data.style)
- local c = colorspecification(data.color)
- result[#result+1] = formatters[styletemplate](element,detail,
- s.style or "inherit",
- s.variant or "inherit",
- s.weight or "inherit",
- s.family or "inherit",
- c or "inherit")
- end
- end
- return concat(result,"\n\n")
+
end
local usedimages = { }
+do
+
local imagetemplate = [[
-%s[id="%s"] {
+%element%[id="%id%"], %namespace%div.%element%[id="%id%"] {
display : block ;
- background-image : url(%s) ;
+ background-image : url('%url%') ;
background-size : 100%% auto ;
background-repeat : no-repeat ;
- width : %s ;
- height : %s ;
+ width : %width% ;
+ height : %height% ;
}]]
-local function allusedimages(xmlfile)
- local result = { format("/* images for file %s */",xmlfile) }
- for element, details in sortedhash(usedimages) do
- for detail, data in sortedhash(details) do
- local name = data.name
- if file.suffix(name) == "pdf" then
- -- temp hack .. we will have a remapper
- name = file.replacesuffix(name,"svg")
- end
- result[#result+1] = formatters[imagetemplate](element,detail,name,data.width,data.height)
- end
- end
- return concat(result,"\n\n")
-end
+ local f_svgname = formatters["%s.svg"]
+ local f_svgpage = formatters["%s-page-%s.svg"]
+ local collected = { }
-local function uniqueusedimages()
- local unique = { }
- for element, details in next, usedimages do
- for detail, data in next, details do
- local name = data.name
- if file.suffix(name) == "pdf" then
- unique[file.replacesuffix(name,"svg")] = name
+ local function usedname(name,page)
+ if file.suffix(name) == "pdf" then
+ -- temp hack .. we will have a remapper
+ if page and page > 1 then
+ name = f_svgpage(file.nameonly(name),page)
else
- unique[name] = name
+ name = f_svgname(file.nameonly(name))
end
end
+ local scheme = url.hasscheme(name)
+ if not scheme or scheme == "file" then
+ -- or can we just use the name ?
+ return file.join("../images",file.basename(url.filename(name)))
+ else
+ return name
+ end
+ end
+
+ function wrapups.allusedimages(basename)
+ local result = { replacetemplate(namespacetemplate, {
+ what = "images",
+ filename = basename,
+ namespace = contextns,
+ -- cssnamespaceurl = usecssnamespace and cssnamespaceurl or "",
+ cssnamespaceurl = cssnamespaceurl,
+ }) }
+ for element, details in sortedhash(usedimages) do
+ for detail, data in sortedhash(details) do
+ local name = data.name
+ local page = tonumber(data.page) or 1
+ local spec = {
+ element = element,
+ id = data.id,
+ name = name,
+ page = page,
+ url = usedname(name,page),
+ width = data.width,
+ height = data.height,
+ used = data.used,
+ namespace = usecssnamespace and cssnamespace or "",
+ }
+ result[#result+1] = replacetemplate(imagetemplate,spec)
+ collected[detail] = spec
+ end
+ end
+ return concat(result,"\n\n")
+ end
+
+ function wrapups.uniqueusedimages() -- todo: combine these two
+ return collected
end
- return unique
+
end
--
@@ -407,13 +517,14 @@ properties.vspace = { export = "break", nature = "display" }
local function makebreaklist(list)
nofbreaks = nofbreaks + 1
local t = { }
- if list then
+ local l = list and list.taglist
+ if l then
for i=1,#list do
- t[i] = list[i]
+ t[i] = l[i]
end
end
- t[#t+1] = "break-" .. nofbreaks -- maybe no number
- return t
+ t[#t+1] = "break>" .. nofbreaks -- maybe no number or 0
+ return { taglist = t }
end
local breakattributes = {
@@ -424,7 +535,7 @@ local function makebreaknode(attributes) -- maybe no fulltag
nofbreaks = nofbreaks + 1
return {
tg = "break",
- fulltag = "break-" .. nofbreaks,
+ fulltag = "break>" .. nofbreaks,
n = nofbreaks,
element = "break",
nature = "display",
@@ -435,1217 +546,1764 @@ local function makebreaknode(attributes) -- maybe no fulltag
}
end
-local fields = { "title", "subtitle", "author", "keywords" }
+local function ignorebreaks(di,element,n,fulltag)
+ local data = di.data
+ for i=1,#data do
+ local d = data[i]
+ if d.content == " " then
+ d.content = ""
+ end
+ end
+end
-local function checkdocument(root)
- local data = root.data
- if data then
- for i=1,#data do
- local di = data[i]
- if di.content then
- -- ok
- elseif di.tg == "ignore" then
- di.element = ""
- checkdocument(di)
- else
- -- can't happen
- end
+local function ignorespaces(di,element,n,fulltag)
+ local data = di.data
+ for i=1,#data do
+ local d = data[i]
+ local c = d.content
+ if type(c) == "string" then
+ d.content = lpegmatch(p_stripper,c)
end
end
end
-function extras.document(result,element,detail,n,fulltag,di)
- result[#result+1] = format(" language=%q",languagenames[texgetcount("mainlanguagenumber")])
- if not less_state then
- result[#result+1] = format(" file=%q",tex.jobname)
- result[#result+1] = format(" date=%q",os.date())
- result[#result+1] = format(" context=%q",environment.version)
- result[#result+1] = format(" version=%q",exportversion)
- result[#result+1] = format(" xmlns:m=%q","http://www.w3.org/1998/Math/MathML")
- local identity = interactions.general.getidentity()
- for i=1,#fields do
- local key = fields[i]
- local value = identity[key]
- if value and value ~= "" then
- result[#result+1] = formatters[" %s=%q"](key,value)
+do
+
+ local fields = { "title", "subtitle", "author", "keywords" }
+
+ local function checkdocument(root)
+ local data = root.data
+ if data then
+ for i=1,#data do
+ local di = data[i]
+ local tg = di.tg
+ if tg == "noexport" then
+ local s = specifications[di.fulltag]
+ local u = s and s.userdata
+ if u then
+ local comment = u.comment
+ if comment then
+ di.element = "comment"
+ di.data = { { content = comment } }
+ u.comment = nil
+ else
+ data[i] = false
+ end
+ else
+ data[i] = false
+ end
+ elseif di.content then
+ -- okay
+ elseif tg == "ignore" then
+ di.element = ""
+ checkdocument(di)
+ else
+ checkdocument(di) -- new, else no noexport handling
+ end
end
end
end
- checkdocument(di)
-end
-local itemgroups = { }
+ function extras.document(di,element,n,fulltag)
+ setattribute(di,"language",languagenames[texgetcount("mainlanguagenumber")])
+ if not less_state then
+ setattribute(di,"file",tex.jobname)
+ setattribute(di,"date",os.date())
+ setattribute(di,"context",environment.version)
+ setattribute(di,"version",exportversion)
+ setattribute(di,"xmlns:m",mathmlns)
+ local identity = interactions.general.getidentity()
+ for i=1,#fields do
+ local key = fields[i]
+ local value = identity[key]
+ if value and value ~= "" then
+ setattribute(di,key,value)
+ end
+ end
+ end
+ checkdocument(di)
+ end
-function structurestags.setitemgroup(current,packed,symbol)
- itemgroups[detailedtag("itemgroup",current)] = {
- packed = packed,
- symbol = symbol,
- }
end
-function extras.itemgroup(result,element,detail,n,fulltag,di)
- local hash = itemgroups[fulltag]
- if hash then
- local v = hash.packed
- if v then
- result[#result+1] = " packed='yes'"
+do
+
+ local itemgroups = { }
+
+ function structurestags.setitemgroup(packed,level,symbol)
+ itemgroups[locatedtag("itemgroup")] = {
+ packed = packed,
+ symbol = symbol,
+ level = level,
+ }
+ end
+
+ function structurestags.setitem(kind)
+ itemgroups[locatedtag("item")] = {
+ kind = kind,
+ }
+ end
+
+ function extras.itemgroup(di,element,n,fulltag)
+ local hash = itemgroups[fulltag]
+ if hash then
+ setattribute(di,"packed",hash.packed and "yes" or nil)
+ setattribute(di,"symbol",hash.symbol)
+ setattribute(di,"level",hash.level)
end
- local v = hash.symbol
- if v then
- result[#result+1] = attribute("symbol",v)
+ end
+
+ function extras.item(di,element,n,fulltag)
+ local hash = itemgroups[fulltag]
+ if hash then
+ local kind = hash.kind
+ if kind and kind ~= "" then
+ setattribute(di,"kind",kind)
+ end
end
end
+
end
-local synonyms = { }
+do
-function structurestags.setsynonym(current,tag)
- synonyms[detailedtag("synonym",current)] = tag
-end
+ local synonyms = { }
+ local sortings = { }
-function extras.synonym(result,element,detail,n,fulltag,di)
- local tag = synonyms[fulltag]
- if tag then
- result[#result+1] = formatters[" tag='%s'"](tag)
+ function structurestags.setsynonym(tag)
+ synonyms[locatedtag("synonym")] = tag
end
-end
-local sortings = { }
+ function extras.synonym(di,element,n,fulltag)
+ local tag = synonyms[fulltag]
+ if tag then
+ setattribute(di,"tag",tag)
+ end
+ end
-function structurestags.setsorting(current,tag)
- sortings[detailedtag("sorting",current)] = tag
-end
+ function structurestags.setsorting(tag)
+ sortings[locatedtag("sorting")] = tag
+ end
-function extras.sorting(result,element,detail,n,fulltag,di)
- local tag = sortings[fulltag]
- if tag then
- result[#result+1] = formatters[" tag='%s'"](tag)
+ function extras.sorting(di,element,n,fulltag)
+ local tag = sortings[fulltag]
+ if tag then
+ setattribute(di,"tag",tag)
+ end
end
+
end
-usedstyles.highlight = { }
+do
-function structurestags.sethighlight(current,style,color) -- we assume global styles
- usedstyles.highlight[current] = {
- style = style, -- xml.css.fontspecification(style),
- color = color, -- xml.css.colorspec(color),
- }
-end
+ local highlight = { }
+ usedstyles.highlight = highlight
-local descriptions = { }
-local symbols = { }
-local linked = { }
+ local strippedtag = structurestags.strip -- we assume global styles
-function structurestags.setdescription(tag,n)
- local nd = structures.notes.get(tag,n) -- todo: use listdata instead
- if nd then
- local references = nd.references
- descriptions[references and references.internal] = detailedtag("description",tag)
+ function structurestags.sethighlight(style,color)
+ highlight[strippedtag(locatedtag("highlight"))] = {
+ style = style, -- xml.css.fontspecification(style),
+ color = color, -- xml.css.colorspec(color),
+ }
end
+
end
-function structurestags.setdescriptionsymbol(tag,n)
- local nd = structures.notes.get(tag,n) -- todo: use listdata instead
- if nd then
- local references = nd.references
- symbols[references and references.internal] = detailedtag("descriptionsymbol",tag)
+do
+
+ -- is this referencing still needed?
+
+ local descriptions = { }
+ local symbols = { }
+ local linked = { }
+
+ function structurestags.setdescription(tag,n) -- needs checking (is tag needed)
+ -- we can also use the internals hash or list
+ local nd = structures.notes.get(tag,n)
+ if nd then
+ local references = nd.references
+ descriptions[references and references.internal] = locatedtag("description")
+ end
end
-end
-function finalizers.descriptions(tree)
- local n = 0
- for id, tag in next, descriptions do
- local sym = symbols[id]
- if sym then
- n = n + 1
- linked[tag] = n
- linked[sym] = n
+ function structurestags.setdescriptionsymbol(tag,n) -- needs checking (is tag needed)
+ local nd = structures.notes.get(tag,n) -- todo: use listdata instead
+ if nd then
+ local references = nd.references
+ symbols[references and references.internal] = locatedtag("descriptionsymbol")
end
end
-end
-function extras.description(result,element,detail,n,fulltag,di)
- local id = linked[fulltag]
- if id then
- result[#result+1] = formatters[" insert='%s'"](id) -- maybe just fulltag
+ function finalizers.descriptions(tree)
+ local n = 0
+ for id, tag in next, descriptions do
+ local sym = symbols[id]
+ if sym then
+ n = n + 1
+ linked[tag] = n
+ linked[sym] = n
+ end
+ end
end
-end
-function extras.descriptionsymbol(result,element,detail,n,fulltag,di)
- local id = linked[fulltag]
- if id then
- result[#result+1] = formatters[" insert='%s'"](id)
+ function extras.description(di,element,n,fulltag)
+ local id = linked[fulltag]
+ if id then
+ setattribute(di,"insert",id)
+ end
end
-end
-usedimages.image = { }
+ function extras.descriptionsymbol(di,element,n,fulltag)
+ local id = linked[fulltag]
+ if id then
+ setattribute(di,"insert",id)
+ end
+ end
-function structurestags.setfigure(name,page,width,height)
- usedimages.image[detailedtag("image")] = {
- name = name,
- page = page,
- width = number.todimen(width,"cm","%0.3fcm"),
- height = number.todimen(height,"cm","%0.3fcm"),
- }
end
-function extras.image(result,element,detail,n,fulltag,di)
- local data = usedimages.image[fulltag]
- if data then
- result[#result+1] = attribute("name",data.name)
- if tonumber(data.page) > 1 then
- result[#result+1] = formatters[" page='%s'"](data.page)
+-- -- todo: ignore breaks
+--
+-- function extras.verbatimline(di,element,n,fulltag)
+-- inspect(di)
+-- end
+
+do
+
+ local image = { }
+ usedimages.image = image
+
+ local f_id = formatters["%s-%s"]
+
+ function structurestags.setfigure(name,used,page,width,height)
+ local fulltag = locatedtag("image")
+ local spec = specifications[fulltag]
+ local page = tonumber(page)
+ image[fulltag] = {
+ id = f_id(spec.tagname,spec.tagindex),
+ name = name,
+ used = used,
+ page = page and page > 1 and page or nil,
+ width = todimen(width, "cm","%0.3F%s"),
+ height = todimen(height,"cm","%0.3F%s"),
+ }
+ end
+
+ function extras.image(di,element,n,fulltag)
+ local data = image[fulltag]
+ if data then
+ setattribute(di,"name",data.name)
+ setattribute(di,"page",data.page)
+ setattribute(di,"id",data.id)
+ setattribute(di,"width",data.width)
+ setattribute(di,"height",data.height)
end
- result[#result+1] = formatters[" id='%s' width='%s' height='%s'"](fulltag,data.width,data.height)
end
+
end
-local combinations = { }
+do
-function structurestags.setcombination(nx,ny)
- combinations[detailedtag("combination")] = {
- nx = nx,
- ny = ny,
- }
-end
+ local combinations = { }
+
+ function structurestags.setcombination(nx,ny)
+ combinations[locatedtag("combination")] = {
+ nx = nx,
+ ny = ny,
+ }
+ end
-function extras.combination(result,element,detail,n,fulltag,di)
- local data = combinations[fulltag]
- if data then
- result[#result+1] = formatters[" nx='%s' ny='%s'"](data.nx,data.ny)
+ function extras.combination(di,element,n,fulltag)
+ local data = combinations[fulltag]
+ if data then
+ setattribute(di,"nx",data.nx)
+ setattribute(di,"ny",data.ny)
+ end
end
+
end
-- quite some code deals with exporting references --
+-- links:
+--
+-- url :
+-- file :
+-- internal : automatic location
+-- location : named reference
+
+-- references:
+--
+-- implicit : automatic reference
+-- explicit : named reference
+
local evaluators = { }
local specials = { }
+local explicits = { }
-evaluators.inner = function(result,var)
+evaluators.inner = function(di,var)
local inner = var.inner
if inner then
- result[#result+1] = attribute("location",inner)
+ setattribute(di,"location",inner,true)
end
end
-evaluators.outer = function(result,var)
+evaluators.outer = function(di,var)
local file, url = references.checkedfileorurl(var.outer,var.outer)
if url then
- result[#result+1] = attribute("url",url)
+ setattribute(di,"url",url,true)
elseif file then
- result[#result+1] = attribute("file",file)
+ setattribute(di,"file",file,true)
end
end
-evaluators["outer with inner"] = function(result,var)
+evaluators["outer with inner"] = function(di,var)
local file = references.checkedfile(var.f)
if file then
- result[#result+1] = attribute("file",file)
+ setattribute(di,"file",file,true)
end
local inner = var.inner
if inner then
- result[#result+1] = attribute("location",inner)
+ setattribute(di,"inner",inner,true)
end
end
-evaluators.special = function(result,var)
+evaluators.special = function(di,var)
local handler = specials[var.special]
if handler then
- handler(result,var)
+ handler(di,var)
end
end
-evaluators["special outer with operation"] = evaluators.special
-evaluators["special operation"] = evaluators.special
-evaluators["special operation with arguments"] = evaluators.special
+local referencehash = { }
-function specials.url(result,var)
- local url = references.checkedurl(var.operation)
- if url then
- result[#result+1] = attribute("url",url)
- end
-end
+do
-function specials.file(result,var)
- local file = references.checkedfile(var.operation)
- if file then
- result[#result+1] = attribute("file",file)
+ evaluators["special outer with operation"] = evaluators.special
+ evaluators["special operation"] = evaluators.special
+ evaluators["special operation with arguments"] = evaluators.special
+
+ function specials.url(di,var)
+ local url = references.checkedurl(var.operation)
+ if url and url ~= "" then
+ setattribute(di,"url",url,true)
+ end
end
-end
-function specials.fileorurl(result,var)
- local file, url = references.checkedfileorurl(var.operation,var.operation)
- if url then
- result[#result+1] = attribute("url",url)
- elseif file then
- result[#result+1] = attribute("file",file)
+ function specials.file(di,var)
+ local file = references.checkedfile(var.operation)
+ if file and file ~= "" then
+ setattribute(di,"file",file,true)
+ end
end
-end
-function specials.internal(result,var)
- local internal = references.checkedurl(var.operation)
- if internal then
- result[#result+1] = formatters[" location='aut:%s'"](internal)
+ function specials.fileorurl(di,var)
+ local file, url = references.checkedfileorurl(var.operation,var.operation)
+ if url and url ~= "" then
+ setattribute(di,"url",url,true)
+ elseif file and file ~= "" then
+ setattribute(di,"file",file,true)
+ end
end
-end
-local referencehash = { }
+ function specials.internal(di,var)
+ local internal = references.checkedurl(var.operation)
+ if internal then
+ setattribute(di,"location",internal)
+ end
+ end
-local function adddestination(result,references) -- todo: specials -> exporters and then concat
- if references then
- local reference = references.reference
- if reference and reference ~= "" then
- local prefix = references.prefix
- if prefix and prefix ~= "" then
- result[#result+1] = formatters[" prefix='%s'"](prefix)
- end
- result[#result+1] = formatters[" destination='%s'"](reference)
- for i=1,#references do
- local r = references[i]
- local e = evaluators[r.kind]
- if e then
- e(result,r)
+ local function adddestination(di,references) -- todo: specials -> exporters and then concat
+ if references then
+ local reference = references.reference
+ if reference and reference ~= "" then
+ local prefix = references.prefix
+ if prefix and prefix ~= "" then
+ setattribute(di,"prefix",prefix,true)
+ end
+ setattribute(di,"destination",reference,true)
+ for i=1,#references do
+ local r = references[i]
+ local e = evaluators[r.kind]
+ if e then
+ e(di,r)
+ end
end
end
end
end
-end
-local function addreference(result,references)
- if references then
- local reference = references.reference
- if reference and reference ~= "" then
- local prefix = references.prefix
- if prefix and prefix ~= "" then
- result[#result+1] = formatters[" prefix='%s'"](prefix)
+ function extras.addimplicit(di,references)
+ if references then
+ local internal = references.internal
+ if internal then
+ setattribute(di,"implicit",internal)
end
- result[#result+1] = formatters[" reference='%s'"](reference)
end
- local internal = references.internal
- if internal and internal ~= "" then
- result[#result+1] = formatters[" location='aut:%s'"](internal)
+ end
+
+ function extras.addinternal(di,references)
+ if references then
+ local internal = references.internal
+ if internal then
+ setattribute(di,"internal",internal)
+ end
end
end
-end
-function extras.link(result,element,detail,n,fulltag,di)
- -- for instance in lists a link has nested elements and no own text
- local reference = referencehash[fulltag]
- if reference then
- adddestination(result,structures.references.get(reference))
- return true
- else
- local data = di.data
- if data then
- for i=1,#data do
- local di = data[i]
- if di then
- local fulltag = di.fulltag
- if fulltag and extras.link(result,element,detail,n,fulltag,di) then
- return true
+ local p_firstpart = lpeg.Cs((1-lpeg.P(","))^0)
+
+ local function addreference(di,references)
+ if references then
+ local reference = references.reference
+ if reference and reference ~= "" then
+ local prefix = references.prefix
+ if prefix and prefix ~= "" then
+ setattribute(di,"prefix",prefix)
+ end
+ setattribute(di,"reference",reference,true)
+ setattribute(di,"explicit",lpegmatch(p_firstpart,reference),true)
+ end
+ local internal = references.internal
+ if internal and internal ~= "" then
+ setattribute(di,"implicit",internal)
+ end
+ end
+ end
+
+ local function link(di,element,n,fulltag)
+ -- for instance in lists a link has nested elements and no own text
+ local reference = referencehash[fulltag]
+ if reference then
+ adddestination(di,structures.references.get(reference))
+ return true
+ else
+ local data = di.data
+ if data then
+ for i=1,#data do
+ local di = data[i]
+ if di then
+ local fulltag = di.fulltag
+ if fulltag and link(di,element,n,fulltag) then
+ return true
+ end
end
end
end
end
end
+
+ extras.adddestination = adddestination
+ extras.addreference = addreference
+
+ extras.link = link
+
end
-- no settings, as these are obscure ones
-local automathrows = true directives.register("backend.export.math.autorows", function(v) automathrows = v end)
-local automathapply = true directives.register("backend.export.math.autoapply", function(v) automathapply = v end)
-local automathnumber = true directives.register("backend.export.math.autonumber", function(v) automathnumber = v end)
-local automathstrip = true directives.register("backend.export.math.autostrip", function(v) automathstrip = v end)
-
-local functions = mathematics.categories.functions
-
-local function collapse(di,i,data,ndata,detail,element)
- local collapsing = di.data
- if data then
- di.element = element
- di.detail = nil
- i = i + 1
- while i <= ndata do
- local dn = data[i]
- if dn.detail == detail then
- collapsing[#collapsing+1] = dn.data[1]
- dn.skip = "ignore"
- i = i + 1
- else
- break
+do
+
+ local automathrows = true directives.register("export.math.autorows", function(v) automathrows = v end)
+ local automathapply = true directives.register("export.math.autoapply", function(v) automathapply = v end)
+ local automathnumber = true directives.register("export.math.autonumber", function(v) automathnumber = v end)
+ local automathstrip = true directives.register("export.math.autostrip", function(v) automathstrip = v end)
+
+ local functions = mathematics.categories.functions
+
+ local function collapse(di,i,data,ndata,detail,element)
+ local collapsing = di.data
+ if data then
+ di.element = element
+ di.detail = nil
+ i = i + 1
+ while i <= ndata do
+ local dn = data[i]
+ if dn.detail == detail then
+ collapsing[#collapsing+1] = dn.data[1]
+ dn.skip = "ignore"
+ i = i + 1
+ else
+ break
+ end
end
end
+ return i
end
- return i
-end
-local function collapse_mn(di,i,data,ndata)
- local collapsing = di.data
- if data then
- i = i + 1
- while i <= ndata do
- local dn = data[i]
- local tg = dn.tg
- if tg == "mn" then
- collapsing[#collapsing+1] = dn.data[1]
- dn.skip = "ignore"
- i = i + 1
- elseif tg == "mo" then
- local d = dn.data[1]
- if d == "." then
- collapsing[#collapsing+1] = d
+ local function collapse_mn(di,i,data,ndata)
+ -- this is tricky ... we need to make sure that we wrap in mrows if we want
+ -- to bypass this one
+ local collapsing = di.data
+ if data then
+ i = i + 1
+ while i <= ndata do
+ local dn = data[i]
+ local tg = dn.tg
+ if tg == "mn" then
+ collapsing[#collapsing+1] = dn.data[1]
dn.skip = "ignore"
i = i + 1
+ elseif tg == "mo" then
+ local d = dn.data[1]
+ if d == "." then
+ collapsing[#collapsing+1] = d
+ dn.skip = "ignore"
+ i = i + 1
+ else
+ break
+ end
else
break
end
- else
- break
end
end
+ return i
end
- return i
-end
--- maybe delay __i__ till we need it
+ -- maybe delay __i__ till we need it
-local apply_function = {
- {
- element = "mo",
- -- comment = "apply function",
- -- data = { utfchar(0x2061) },
- data = { "&#x2061;" },
- nature = "mixed",
+ local apply_function = {
+ {
+ element = "mo",
+ -- comment = "apply function",
+ -- data = { utfchar(0x2061) },
+ data = { "&#x2061;" },
+ nature = "mixed",
+ }
}
-}
-local functioncontent = { }
+ local functioncontent = { }
-setmetatableindex(functioncontent,function(t,k)
- local v = { { content = k } }
- t[k] = v
- return v
-end)
+ setmetatableindex(functioncontent,function(t,k)
+ local v = { { content = k } }
+ t[k] = v
+ return v
+ end)
+
+ local dummy_nucleus = {
+ element = "mtext",
+ data = { content = "" },
+ nature = "inline",
+ comment = "dummy nucleus",
+ fulltag = "mtext>0"
+ }
-local function checkmath(root) -- we can provide utf.toentities as an option
- local data = root.data
- if data then
- local ndata = #data
- local roottg = root.tg
- if roottg == "msubsup" then
- local nucleus, superscript, subscript
- for i=1,ndata do
- local di = data[i]
- if not di then
- -- weird
- elseif di.content then
- -- text
- elseif not nucleus then
- nucleus = i
- elseif not superscript then
- superscript = i
- elseif not subscript then
- subscript = i
- else
- -- error
+ local function accentchar(d)
+ for i=1,3 do
+ d = d.data
+ if not d then
+ return
+ end
+ d = d[1]
+ if not d then
+ return
+ end
+ local tg = d.tg
+ if tg == "mover" then
+ local s = specifications[d.fulltag]
+ local t = s.top
+ if t then
+ d = d.data[1]
+ local d1 = d.data[1]
+ d1.content = utfchar(t)
+ d.data = { d1 }
+ return d
+ end
+ elseif tg == "munder" then
+ local s = specifications[d.fulltag]
+ local b = s.bottom
+ if b then
+ d = d.data[1]
+ local d1 = d.data[1]
+ d1.content = utfchar(b)
+ d.data = { d1 }
+ return d
end
end
- if superscript and subscript then
- local sup, sub = data[superscript], data[subscript]
- data[superscript], data[subscript] = sub, sup
- -- sub.__o__, sup.__o__ = subscript, superscript
- sub.__i__, sup.__i__ = superscript, subscript
- end
- elseif roottg == "mfenced" then
- local new, n = { }, 0
- local attributes = { }
- root.attributes = attributes
- for i=1,ndata do
- local di = data[i]
- if not di then
- -- weird
- elseif di.content then
- n = n + 1
- new[n] = di
- else
- local tg = di.tg
- if tg == "mleft" then
- attributes.left = tostring(di.data[1].data[1].content)
- elseif tg == "mmiddle" then
- attributes.middle = tostring(di.data[1].data[1].content)
- elseif tg == "mright" then
- attributes.right = tostring(di.data[1].data[1].content)
+ end
+ end
+
+ local no_mrow = {
+ mrow = true,
+ mfenced = true,
+ mfrac = true,
+ mroot = true,
+ msqrt = true,
+ mi = true,
+ mo = true,
+ mn = true,
+ }
+
+ local function checkmath(root) -- we can provide utf.toentities as an option
+ local data = root.data
+ if data then
+ local ndata = #data
+ local roottg = root.tg
+ if roottg == "msubsup" then
+ local nucleus, superscript, subscript
+ for i=1,ndata do
+ local di = data[i]
+ if not di then
+ -- weird
+ elseif di.content then
+ -- text
+ elseif not nucleus then
+ nucleus = i
+ elseif not superscript then
+ superscript = i
+ elseif not subscript then
+ subscript = i
else
- n = n + 1
- di.__i__ = n
- new[n] = di
+ -- error
end
end
+ if superscript and subscript then
+ local sup, sub = data[superscript], data[subscript]
+ data[superscript], data[subscript] = sub, sup
+ -- sub.__o__, sup.__o__ = subscript, superscript
+ sub.__i__, sup.__i__ = superscript, subscript
+ end
+-- elseif roottg == "msup" or roottg == "msub" then
+-- -- m$^2$
+-- if ndata == 1 then
+-- local d = data[1]
+-- data[2] = d
+-- d.__i__ = 2
+-- data[1] = dummy_nucleus
+-- end
+ elseif roottg == "mfenced" then
+ local s = specifications[root.fulltag]
+ local l, m, r = s.left, s.middle, s.right
+ if l then
+ l = utfchar(l)
+ end
+ if m then
+ local t = { }
+ for i=1,#m do
+ t[i] = utfchar(m[i])
+ end
+ m = concat(t)
+ end
+ if r then
+ r = utfchar(r)
+ end
+ root.attributes = {
+ open = l,
+ separators = m,
+ close = r,
+ }
end
- root.data = new
- ndata = n
- end
- if ndata == 0 then
- return
- elseif ndata == 1 then
- local d = data[1]
- if not d then
+ if ndata == 0 then
return
- elseif d.content then
- return
- elseif #root.data == 1 then
- local tg = d.tg
- if automathrows and roottg == "mrow" then
- -- maybe just always ! check spec first
- if tg == "mrow" or tg == "mfenced" or tg == "mfrac" or tg == "mroot" or tg == "msqrt"then
- root.skip = "comment"
- elseif tg == "mo" then
- root.skip = "comment"
- end
- elseif roottg == "mo" then
- if tg == "mo" then
- root.skip = "comment"
+ elseif ndata == 1 then
+ local d = data[1]
+ if not d then
+ return
+ elseif d.content then
+ return
+ elseif #root.data == 1 then
+ local tg = d.tg
+ if automathrows and roottg == "mrow" then
+ -- maybe just always ! check spec first
+ if no_mrow[tg] then
+ root.skip = "comment"
+ end
+ elseif roottg == "mo" then
+ if tg == "mo" then
+ root.skip = "comment"
+ end
end
end
end
- end
- local i = 1
- while i <= ndata do -- -- -- TOO MUCH NESTED CHECKING -- -- --
- local di = data[i]
- if di and not di.content then
- local tg = di.tg
- local detail = di.detail
- if tg == "math" then
- -- di.element = "mrow" -- when properties
- di.skip = "comment"
- checkmath(di)
- i = i + 1
- elseif tg == "mover" or tg == "munder" or tg == "munderover" then
- if detail == "accent" then
- di.attributes = { accent = "true" }
- di.detail = nil
- end
- checkmath(di)
- i = i + 1
- elseif tg == "mroot" then
- if #di.data == 1 then
- -- else firefox complains
- di.element = "msqrt"
- end
- checkmath(di)
- i = i + 1
- elseif tg == "break" then
- di.skip = "comment"
- i = i + 1
- elseif tg == "mrow" and detail then
- di.detail = nil
- checkmath(di)
- di = {
- element = "maction",
- nature = "display",
- attributes = { actiontype = detail },
- data = { di },
- n = 0,
- }
- data[i] = di
- i = i + 1
- elseif detail then
- -- no checkmath(di) here
- local category = tonumber(detail) or 0
- if category == 1 then -- mo
- i = collapse(di,i,data,ndata,detail,"mo")
- elseif category == 2 then -- mi
- i = collapse(di,i,data,ndata,detail,"mi")
- elseif category == 3 then -- mn
- i = collapse(di,i,data,ndata,detail,"mn")
- elseif category == 4 then -- ms
- i = collapse(di,i,data,ndata,detail,"ms")
- elseif category >= 1000 then
- local apply = category >= 2000
- if apply then
- category = category - 1000
+ local i = 1
+ while i <= ndata do -- -- -- TOO MUCH NESTED CHECKING -- -- --
+ local di = data[i]
+ if di and not di.content then
+ local tg = di.tg
+ if tg == "math" then
+ -- di.element = "mrow" -- when properties
+ di.skip = "comment"
+ checkmath(di)
+ i = i + 1
+ elseif tg == "mover" then
+ local s = specifications[di.fulltag]
+ if s.accent then
+ local t = s.top
+ local d = di.data
+ -- todo: accent = "false" (for scripts like limits)
+ di.attributes = {
+ accent = "true",
+ }
+ -- todo: p.topfixed
+ if t then
+ -- mover
+ d[1].data[1].content = utfchar(t)
+ di.data = { d[2], d[1] }
+ end
+ else
+ -- can't happen
end
- if tg == "mi" then -- function
- if roottg == "mrow" then
- root.skip = "comment"
- root.element = "function"
+ checkmath(di)
+ i = i + 1
+ elseif tg == "munder" then
+ local s = specifications[di.fulltag]
+ if s.accent then
+ local b = s.bottom
+ local d = di.data
+ -- todo: accent = "false" (for scripts like limits)
+ di.attributes = {
+ accent = "true",
+ }
+ -- todo: p.bottomfixed
+ if b then
+ -- munder
+ d[2].data[1].content = utfchar(b)
end
- i = collapse(di,i,data,ndata,detail,"mi")
- local tag = functions[category]
- if tag then
- di.data = functioncontent[tag]
+ else
+ -- can't happen
+ end
+ checkmath(di)
+ i = i + 1
+ elseif tg == "munderover" then
+ local s = specifications[di.fulltag]
+ if s.accent then
+ local t = s.top
+ local b = s.bottom
+ local d = di.data
+ -- todo: accent = "false" (for scripts like limits)
+ -- todo: accentunder = "false" (for scripts like limits)
+ di.attributes = {
+ accent = "true",
+ accentunder = "true",
+ }
+ -- todo: p.topfixed
+ -- todo: p.bottomfixed
+ if t and b then
+ -- munderover
+ d[1].data[1].content = utfchar(t)
+ d[3].data[1].content = utfchar(b)
+ di.data = { d[2], d[3], d[1] }
+ else
+ -- can't happen
end
- if apply then
- di.after = apply_function
- elseif automathapply then -- make function
- local following
- if i <= ndata then
- -- normally not the case
- following = data[i]
+ else
+ -- can't happen
+ end
+ checkmath(di)
+ i = i + 1
+ elseif tg == "mstacker" then
+ local d = di.data
+ local d1 = d[1]
+ local d2 = d[2]
+ local d3 = d[3]
+ local t1 = d1 and d1.tg
+ local t2 = d2 and d2.tg
+ local t3 = d3 and d3.tg
+ local m = nil -- d1.data[1]
+ local t = nil
+ local b = nil
+ -- only accent when top / bot have stretch
+ -- normally we flush [base under over] which is better for tagged pdf
+ if t1 == "mstackermid" then
+ m = accentchar(d1) -- or m
+ if t2 == "mstackertop" then
+ if t3 == "mstackerbot" then
+ t = accentchar(d2)
+ b = accentchar(d3)
+ di.element = "munderover"
+ di.data = { m or d1.data[1], b or d3.data[1], t or d2.data[1] }
else
- local parent = di.__p__ -- == root
- if parent.tg == "mrow" then
- parent = parent.__p__
- end
- local index = parent.__i__
- following = parent.data[index+1]
+ t = accentchar(d2)
+ di.element = "mover"
+ di.data = { m or d1.data[1], t or d2.data[1] }
+ end
+ elseif t2 == "mstackerbot" then
+ if t3 == "mstackertop" then
+ b = accentchar(d2)
+ t = accentchar(d3)
+ di.element = "munderover"
+ di.data = { m or d1.data[1], t or d3.data[1], m, b or d2.data[1] }
+ else
+ b = accentchar(d2)
+ di.element = "munder"
+ di.data = { m or d1.data[1], b or d2.data[1] }
end
- if following then
- local tg = following.tg
- if tg == "mrow" or tg == "mfenced" then -- we need to figure out the right condition
+ else
+ -- can't happen
+ end
+ else
+ -- can't happen
+ end
+ if t or b then
+ di.attributes = {
+ accent = t and "true" or nil,
+ accentunder = b and "true" or nil,
+ }
+ di.detail = nil
+ end
+ checkmath(di)
+ i = i + 1
+ elseif tg == "mroot" then
+ local data = di.data
+ local size = #data
+ if size == 1 then
+ -- else firefox complains ... code in math-tag (for pdf tagging)
+ di.element = "msqrt"
+ elseif size == 2 then
+ data[1], data[2] = data[2], data[1]
+ end
+ checkmath(di)
+ i = i + 1
+ elseif tg == "break" then
+ di.skip = "comment"
+ i = i + 1
+ elseif tg == "mtext" then
+ -- this is only needed for unboxed mtexts ... all kind of special
+ -- tex border cases and optimizations ... trial and error
+ local data = di.data
+ if #data > 1 then
+ for i=1,#data do
+ local di = data[i]
+ local content = di.content
+ if content then
+ data[i] = {
+ element = "mtext",
+ nature = "inline",
+ data = { di },
+ n = 0,
+ }
+ elseif di.tg == "math" then
+ local di = di.data[1]
+ data[i] = di
+ checkmath(di)
+ end
+ end
+ di.element = "mrow"
+ -- di.tg = "mrow"
+ -- di.nature = "inline"
+ end
+ checkmath(di)
+ i = i + 1
+ elseif tg == "mrow" and detail then -- hm, falls through
+ di.detail = nil
+ checkmath(di)
+ di = {
+ element = "maction",
+ nature = "display",
+ attributes = { actiontype = detail },
+ data = { di },
+ n = 0,
+ }
+ data[i] = di
+ i = i + 1
+ else
+ local category = di.mathcategory
+ if category then
+ -- no checkmath(di) here
+ if category == 1 then -- mo
+ i = collapse(di,i,data,ndata,detail,"mo")
+ elseif category == 2 then -- mi
+ i = collapse(di,i,data,ndata,detail,"mi")
+ elseif category == 3 then -- mn
+ i = collapse(di,i,data,ndata,detail,"mn")
+ elseif category == 4 then -- ms
+ i = collapse(di,i,data,ndata,detail,"ms")
+ elseif category >= 1000 then
+ local apply = category >= 2000
+ if apply then
+ category = category - 1000
+ end
+ if tg == "mi" then -- function
+ if roottg == "mrow" then
+ root.skip = "comment"
+ root.element = "function"
+ end
+ i = collapse(di,i,data,ndata,detail,"mi")
+ local tag = functions[category]
+ if tag then
+ di.data = functioncontent[tag]
+ end
+ if apply then
di.after = apply_function
+ elseif automathapply then -- make function
+ local following
+ if i <= ndata then
+ -- normally not the case
+ following = data[i]
+ else
+ local parent = di.__p__ -- == root
+ if parent.tg == "mrow" then
+ parent = parent.__p__
+ end
+ local index = parent.__i__
+ following = parent.data[index+1]
+ end
+ if following then
+ local tg = following.tg
+ if tg == "mrow" or tg == "mfenced" then -- we need to figure out the right condition
+ di.after = apply_function
+ end
+ end
end
+ else -- some problem
+ checkmath(di)
+ i = i + 1
end
+ else
+ checkmath(di)
+ i = i + 1
end
- else -- some problem
+ elseif automathnumber and tg == "mn" then
+ checkmath(di)
+ i = collapse_mn(di,i,data,ndata)
+ else
checkmath(di)
i = i + 1
end
- else
- checkmath(di)
- i = i + 1
end
- elseif automathnumber and tg == "mn" then
- checkmath(di)
- i = collapse_mn(di,i,data,ndata)
- else
- checkmath(di)
+ else -- can be string or boolean
+ if parenttg ~= "mtext" and di == " " then
+ data[i] = false
+ end
i = i + 1
end
- else -- can be string or boolean
- if parenttg ~= "mtext" and di == " " then
- data[i] = false
- end
- i = i + 1
end
end
end
-end
-function stripmath(di)
- if not di then
- --
- elseif di.content then
- return di
- else
- local tg = di.tg
- if tg == "mtext" or tg == "ms" then
+ local function stripmath(di)
+ if not di then
+ --
+ elseif di.content then
return di
else
- local data = di.data
- local ndata = #data
- local n = 0
- for i=1,ndata do
- local di = data[i]
- if di and not di.content then
- di = stripmath(di)
- end
- if di then
- local content = di.content
- if not content then
- n = n + 1
- di.__i__ = n
- data[n] = di
- elseif content == " " or content == "" then
- -- skip
- else
- n = n + 1
- data[n] = di
+ local tg = di.tg
+ if tg == "mtext" or tg == "ms" then
+ return di
+ else
+ local data = di.data
+ local ndata = #data
+ local n = 0
+ for i=1,ndata do
+ local d = data[i]
+ if d and not d.content then
+ d = stripmath(d)
+ end
+ if d then
+ local content = d.content
+ if not content then
+ n = n + 1
+ d.__i__ = n
+ data[n] = d
+ elseif content == " " or content == "" then
+ if di.tg == "mspace" then
+ -- we append or prepend a space to a preceding or following mtext
+ local parent = di.__p__
+ local index = di.__i__ -- == i
+ local data = parent.data
+ if index > 1 then
+ local d = data[index-1]
+ if d.tg == "mtext" then
+ local dd = d.data
+ local dn = dd[#dd]
+ local dc = dn.content
+ if dc then
+ dn.content = dc .. content
+ end
+ end
+ elseif index < ndata then
+ local d = data[index+1]
+ if d.tg == "mtext" then
+ local dd = d.data
+ local dn = dd[1]
+ local dc = dn.content
+ if dc then
+ dn.content = content .. dc
+ end
+ end
+ end
+ end
+ else
+ n = n + 1
+ data[n] = d
+ end
end
end
+ for i=ndata,n+1,-1 do
+ data[i] = nil
+ end
+ if #data > 0 then
+ return di
+ end
+-- end
end
- for i=ndata,n+1,-1 do
- data[i] = nil
- end
- if #data > 0 then
- return di
- end
+ -- could be integrated but is messy then
+-- while roottg == "mrow" and #data == 1 do
+-- data = data[1]
+-- for k, v in next, data do
+-- root[k] = v
+-- end
+-- roottg = data.tg
+-- end
+ end
+ end
+
+ function checks.math(di)
+ local specification = specifications[di.fulltag]
+ local mode = specification and specification.mode == "display" and "block" or "inline"
+ di.attributes = {
+ ["display"] = mode,
+ ["xmlns:m"] = mathmlns,
+ }
+ -- can be option if needed:
+ if mode == "inline" then
+ -- di.nature = "mixed" -- else spacing problem (maybe inline)
+ di.nature = "inline" -- we need to catch x$X$x and x $X$ x
+ else
+ di.nature = "display"
end
+ if automathstrip then
+ stripmath(di)
+ end
+ checkmath(di)
end
-end
-
-function checks.math(di)
- local hash = attributehash[di.fulltag]
- local mode = (hash and hash.mode) == "display" and "block" or "inline"
- di.attributes = {
- display = mode
- }
- -- can be option if needed:
- if mode == "inline" then
- di.nature = "mixed" -- else spacing problem (maybe inline)
- else
- di.nature = "display"
- end
- if automathstrip then
- stripmath(di)
- end
- checkmath(di)
-end
-local a, z, A, Z = 0x61, 0x7A, 0x41, 0x5A
+ local a, z, A, Z = 0x61, 0x7A, 0x41, 0x5A
-function extras.mi(result,element,detail,n,fulltag,di) -- check with content
- local str = di.data[1].content
- if str and sub(str,1,1) ~= "&" then -- hack but good enough (maybe gsub op eerste)
- for v in utfvalues(str) do
- if (v >= a and v <= z) or (v >= A and v <= Z) then
- local a = di.attributes
- if a then
- a.mathvariant = "normal"
- else
- di.attributes = { mathvariant = "normal" }
+ function extras.mi(di,element,n,fulltag) -- check with content
+ local str = di.data[1].content
+ if str and sub(str,1,1) ~= "&" then -- hack but good enough (maybe gsub op eerste)
+ for v in utfvalues(str) do
+ if (v >= a and v <= z) or (v >= A and v <= Z) then
+ local a = di.attributes
+ if a then
+ a.mathvariant = "normal"
+ else
+ di.attributes = { mathvariant = "normal" }
+ end
end
end
end
end
-end
-function extras.section(result,element,detail,n,fulltag,di)
- local data = listdata[fulltag]
- if data then
- addreference(result,data.references)
- return true
- else
+ function extras.msub(di,element,n,fulltag)
+ -- m$^2$
local data = di.data
- if data then
- for i=1,#data do
- local di = data[i]
- if di then
- local ft = di.fulltag
- if ft and extras.section(result,element,detail,n,ft,di) then
- return true
- end
- end
- end
+ if #data == 1 then
+ local d = data[1]
+ data[2] = d
+ d.__i__ = 2
+ data[1] = dummy_nucleus
end
end
+
+ extras.msup = extras.msub
+
end
-function extras.float(result,element,detail,n,fulltag,di)
- local data = listdata[fulltag]
- if data then
- addreference(result,data.references)
- return true
- else
- local data = di.data
+do
+
+ local registered = structures.sections.registered
+
+ local function resolve(di,element,n,fulltag)
+ local data = listdata[fulltag]
if data then
- for i=1,#data do
- local di = data[i]
- if di and extras.section(result,element,detail,n,di.fulltag,di) then
- return true
+ extras.addreference(di,data.references)
+ return true
+ else
+ local data = di.data
+ if data then
+ for i=1,#data do
+ local di = data[i]
+ if di then
+ local ft = di.fulltag
+ if ft and resolve(di,element,n,ft) then
+ return true
+ end
+ end
end
end
end
end
-end
-local tabledata = { }
-
-function structurestags.settablecell(rows,columns,align)
- if align > 0 or rows > 1 or columns > 1 then
- tabledata[detailedtag("tablecell")] = {
- rows = rows,
- columns = columns,
- align = align,
- }
+ function extras.section(di,element,n,fulltag)
+ local r = registered[specifications[fulltag].detail]
+ if r then
+ setattribute(di,"level",r.level)
+ end
+ resolve(di,element,n,fulltag)
end
-end
-function extras.tablecell(result,element,detail,n,fulltag,di)
- local hash = tabledata[fulltag]
- if hash then
- local v = hash.columns
- if v and v > 1 then
- result[#result+1] = formatters[" columns='%s'"](v)
- end
- local v = hash.rows
- if v and v > 1 then
- result[#result+1] = formatters[" rows='%s'"](v)
+ extras.float = resolve
+
+ -- todo: internal is already hashed
+
+ function structurestags.setlist(n)
+ local data = structures.lists.getresult(n)
+ if data then
+ referencehash[locatedtag("listitem")] = data
end
- local v = hash.align
- if not v or v == 0 then
- -- normal
- elseif v == 1 then -- use numbertoalign here
- result[#result+1] = " align='flushright'"
- elseif v == 2 then
- result[#result+1] = " align='middle'"
- elseif v == 3 then
- result[#result+1] = " align='flushleft'"
+ end
+
+ function extras.listitem(di,element,n,fulltag)
+ local data = referencehash[fulltag]
+ if data then
+ extras.addinternal(di,data.references)
+ return true
end
end
+
end
-local tabulatedata = { }
+do
-function structurestags.settabulatecell(align)
- if align > 0 then
- tabulatedata[detailedtag("tabulatecell")] = {
- align = align,
- }
- end
-end
+ -- todo: internal is already hashed
-function extras.tabulate(result,element,detail,n,fulltag,di)
- local data = di.data
- for i=1,#data do
- local di = data[i]
- if di.tg == "tabulaterow" then
- local did = di.data
- local content = false
- for i=1,#did do
- local d = did[i].data
- if d and #d > 0 and d[1].content then
- content = true
- break
- end
- end
- if not content then
- di.element = "" -- or simply remove
- end
+ function structurestags.setregister(tag,n) -- check if tag is needed
+ local data = structures.registers.get(tag,n)
+ if data then
+ referencehash[locatedtag("registerlocation")] = data
end
end
-end
-function extras.tabulatecell(result,element,detail,n,fulltag,di)
- local hash = tabulatedata[fulltag]
- if hash then
- local v = hash.align
- if not v or v == 0 then
- -- normal
- elseif v == 1 then
- result[#result+1] = " align='flushleft'"
- elseif v == 2 then
- result[#result+1] = " align='flushright'"
- elseif v == 3 then
- result[#result+1] = " align='middle'"
+ function extras.registerlocation(di,element,n,fulltag)
+ local data = referencehash[fulltag]
+ if data then
+ extras.addinternal(di,data.references)
+ return true
end
end
-end
--- flusher
+ extras.registerpages = ignorebreaks
+ extras.registerseparator = ignorespaces
-local linedone = false -- can go ... we strip newlines anyway
-local inlinedepth = 0
+end
--- todo: #result -> nofresult
+do
-local function emptytag(result,element,nature,depth,di) -- currently only break but at some point
- local a = di.attributes -- we might add detail etc
- if a then -- happens seldom
- if linedone then
- result[#result+1] = formatters["%w<%s"](depth,namespaced[element])
- else
- result[#result+1] = formatters["\n%w<%s"](depth,namespaced[element])
- end
- for k, v in next, a do
- result[#result+1] = formatters[" %s=%q"](k,v)
- end
- result[#result+1] = "/>\n"
- else
- if linedone then
- result[#result+1] = formatters["%w<%s/>\n"](depth,namespaced[element])
- else
- result[#result+1] = formatters["\n%w<%s/>\n"](depth,namespaced[element])
+ local tabledata = { }
+
+ local function hascontent(data)
+ for i=1,#data do
+ local di = data[i]
+ if not di then
+ --
+ elseif di.content then
+ return true
+ else
+ local d = di.data
+ if d and #d > 0 and hascontent(d) then
+ return true
+ end
+ end
end
end
- linedone = false
-end
-local function begintag(result,element,nature,depth,di,skip)
- -- if needed we can use a local result with xresult
- local detail = di.detail
- local n = di.n
- local fulltag = di.fulltag
- local comment = di.comment
- if nature == "inline" then
- linedone = false
- inlinedepth = inlinedepth + 1
- if show_comment and comment then
- result[#result+1] = formatters["<!-- %s -->"](comment)
- end
- elseif nature == "mixed" then
- if inlinedepth > 0 then
- if show_comment and comment then
- result[#result+1] = formatters["<!-- %s -->"](comment)
- end
- elseif linedone then
- result[#result+1] = spaces[depth]
- if show_comment and comment then
- result[#result+1] = formatters["<!-- %s -->"](comment)
- end
- else
- result[#result+1] = formatters["\n%w"](depth)
- linedone = false
- if show_comment and comment then
- result[#result+1] = formatters["<!-- %s -->\n%w"](comment,depth)
- end
+ function structurestags.settablecell(rows,columns,align)
+ if align > 0 or rows > 1 or columns > 1 then
+ tabledata[locatedtag("tablecell")] = {
+ rows = rows,
+ columns = columns,
+ align = align,
+ }
end
- inlinedepth = inlinedepth + 1
- else
- if inlinedepth > 0 then
- if show_comment and comment then
- result[#result+1] = formatters["<!-- %s -->"](comment)
+ end
+
+ function extras.tablecell(di,element,n,fulltag)
+ local hash = tabledata[fulltag]
+ if hash then
+ local columns = hash.columns
+ if columns and columns > 1 then
+ setattribute(di,"columns",columns)
end
- elseif linedone then
- result[#result+1] = spaces[depth]
- if show_comment and comment then
- result[#result+1] = formatters["<!-- %s -->"](comment)
+ local rows = hash.rows
+ if rows and rows > 1 then
+ setattribute(di,"rows",rows)
end
- else
- result[#result+1] = formatters["\n%w"](depth) -- can introduced extra line in mixed+mixed (filtered later on)
- linedone = false
- if show_comment and comment then
- result[#result+1] = formatters["<!-- %s -->\n%w"](comment,depth)
+ local align = hash.align
+ if not align or align == 0 then
+ -- normal
+ elseif align == 1 then -- use numbertoalign here
+ setattribute(di,"align","flushright")
+ elseif align == 2 then
+ setattribute(di,"align","middle")
+ elseif align == 3 then
+ setattribute(di,"align","flushleft")
end
end
end
- if skip == "comment" then
- if show_comment then
- result[#result+1] = formatters["<!-- begin %s -->"](namespaced[element])
- end
- elseif skip then
- -- ignore
- else
- result[#result+1] = formatters["<%s"](namespaced[element])
- if detail then
- result[#result+1] = formatters[" detail=%q"](detail)
- end
- if indexing and n then
- result[#result+1] = formatters[" n=%q"](n)
- end
- local extra = extras[element]
- if extra then
- extra(result,element,detail,n,fulltag,di)
+
+ local tabulatedata = { }
+
+ function structurestags.settabulatecell(align)
+ if align > 0 then
+ tabulatedata[locatedtag("tabulatecell")] = {
+ align = align,
+ }
end
- local u = userdata[fulltag]
- if u then
- for k, v in next, u do
- result[#result+1] = formatters[" %s=%q"](k,v)
+ end
+
+ function extras.tabulate(di,element,n,fulltag)
+ local data = di.data
+ for i=1,#data do
+ local di = data[i]
+ if di.tg == "tabulaterow" and not hascontent(di.data) then
+ di.element = "" -- or simply remove
end
end
- local a = di.attributes
- if a then
- for k, v in next, a do
- result[#result+1] = formatters[" %s=%q"](k,v)
+ end
+
+ function extras.tabulatecell(di,element,n,fulltag)
+ local hash = tabulatedata[fulltag]
+ if hash then
+ local align = hash.align
+ if not align or align == 0 then
+ -- normal
+ elseif align == 1 then
+ setattribute(di,"align","flushleft")
+ elseif align == 2 then
+ setattribute(di,"align","flushright")
+ elseif align == 3 then
+ setattribute(di,"align","middle")
end
end
- result[#result+1] = ">"
end
- if inlinedepth > 0 then
- elseif nature == "display" then
- result[#result+1] = "\n"
- linedone = true
- end
- used[element][detail or ""] = nature -- for template css
- local metadata = tagmetadata[fulltag]
- if metadata then
- if not linedone then
- result[#result+1] = "\n"
- linedone = true
- end
- result[#result+1] = formatters["%w<metadata>\n"](depth)
- for k, v in table.sortedpairs(metadata) do
- v = entityremapper(v)
- result[#result+1] = formatters["%w<metavariable name=%q>%s</metavariable>\n"](depth+1,k,v)
+
+end
+
+-- flusher
+
+do
+
+ local f_detail = formatters[' detail="%s"']
+ local f_chain = formatters[' chain="%s"']
+ local f_index = formatters[' n="%s"']
+ local f_spacing = formatters['<c n="%s">%s</c>']
+
+ local f_empty_inline = formatters["<%s/>"]
+ local f_empty_mixed = formatters["%w<%s/>\n"]
+ local f_empty_display = formatters["\n%w<%s/>\n"]
+ local f_empty_inline_attr = formatters["<%s%s/>"]
+ local f_empty_mixed_attr = formatters["%w<%s%s/>"]
+ local f_empty_display_attr = formatters["\n%w<%s%s/>\n"]
+
+ local f_begin_inline = formatters["<%s>"]
+ local f_begin_mixed = formatters["%w<%s>"]
+ local f_begin_display = formatters["\n%w<%s>\n"]
+ local f_begin_inline_attr = formatters["<%s%s>"]
+ local f_begin_mixed_attr = formatters["%w<%s%s>"]
+ local f_begin_display_attr = formatters["\n%w<%s%s>\n"]
+
+ local f_end_inline = formatters["</%s>"]
+ local f_end_mixed = formatters["</%s>\n"]
+ local f_end_display = formatters["%w</%s>\n"]
+
+ local f_begin_inline_comment = formatters["<!-- %s --><%s>"]
+ local f_begin_mixed_comment = formatters["%w<!-- %s --><%s>"]
+ local f_begin_display_comment = formatters["\n%w<!-- %s -->\n%w<%s>\n"]
+ local f_begin_inline_attr_comment = formatters["<!-- %s --><%s%s>"]
+ local f_begin_mixed_attr_comment = formatters["%w<!-- %s --><%s%s>"]
+ local f_begin_display_attr_comment = formatters["\n%w<!-- %s -->\n%w<%s%s>\n"]
+
+ local f_comment_begin_inline = formatters["<!-- begin %s -->"]
+ local f_comment_begin_mixed = formatters["%w<!-- begin %s -->"]
+ local f_comment_begin_display = formatters["\n%w<!-- begin %s -->\n"]
+
+ local f_comment_end_inline = formatters["<!-- end %s -->"]
+ local f_comment_end_mixed = formatters["<!-- end %s -->\n"]
+ local f_comment_end_display = formatters["%w<!-- end %s -->\n"]
+
+ local f_metadata_begin = formatters["\n%w<metadata>\n"]
+ local f_metadata = formatters["%w<metavariable name=%q>%s</metavariable>\n"]
+ local f_metadata_end = formatters["%w</metadata>\n"]
+
+ --- we could share the r tables ... but it's fast enough anyway
+
+ local function attributes(a)
+ local r = { } -- can be shared
+ local n = 0
+ for k, v in next, a do
+ n = n + 1
+ r[n] = f_attribute(k,v) -- lpegmatch(p_escaped,v)
end
- result[#result+1] = formatters["%w</metadata>\n"](depth)
+ return concat(r,"",1,n)
end
-end
-local function endtag(result,element,nature,depth,skip)
- if nature == "display" then
- if inlinedepth == 0 then
- if not linedone then
- result[#result+1] = "\n"
- end
- if skip == "comment" then
- if show_comment then
- result[#result+1] = formatters["%w<!-- end %s -->\n"](depth,namespaced[element])
- end
- elseif skip then
- -- ignore
+ local depth = 0
+ local inline = 0
+
+ local function bpar(result)
+ result[#result+1] = "\n<p>"
+ end
+ local function epar(result)
+ result[#result+1] = "</p>\n"
+ end
+
+ local function emptytag(result,embedded,element,nature,di) -- currently only break but at some point
+ local a = di.attributes -- we might add detail etc
+ if a then -- happens seldom
+ if nature == "display" then
+ result[#result+1] = f_empty_display_attr(depth,namespaced[element],attributes(a))
+ elseif nature == "mixed" then
+ result[#result+1] = f_empty_mixed_attr(depth,namespaced[element],attributes(a))
else
- result[#result+1] = formatters["%w</%s>\n"](depth,namespaced[element])
+ result[#result+1] = f_empty_inline_attr(namespaced[element],attributes(a))
end
- linedone = true
else
- if skip == "comment" then
- if show_comment then
- result[#result+1] = formatters["<!-- end %s -->"](namespaced[element])
- end
- elseif skip then
- -- ignore
+ if nature == "display" then
+ result[#result+1] = f_empty_display(depth,namespaced[element])
+ elseif nature == "mixed" then
+ result[#result+1] = f_empty_mixed(depth,namespaced[element])
else
- result[#result+1] = formatters["</%s>"](namespaced[element])
+ result[#result+1] = f_empty_inline(namespaced[element])
end
end
- else
- inlinedepth = inlinedepth - 1
+ end
+
+ local function begintag(result,embedded,element,nature,di,skip)
+ local index = di.n
+ local fulltag = di.fulltag
+ local specification = specifications[fulltag] or { } -- we can have a dummy
+ local comment = di.comment
+ local detail = specification.detail
if skip == "comment" then
if show_comment then
- result[#result+1] = formatters["<!-- end %s -->"](namespaced[element])
+ if nature == "inline" or inline > 0 then
+ result[#result+1] = f_comment_begin_inline(namespaced[element])
+ inline = inline + 1
+ elseif nature == "mixed" then
+ result[#result+1] = f_comment_begin_mixed(depth,namespaced[element])
+ depth = depth + 1
+ inline = 1
+ else
+ result[#result+1] = f_comment_begin_display(depth,namespaced[element])
+ depth = depth + 1
+ end
end
elseif skip then
-- ignore
else
- result[#result+1] = formatters["</%s>"](namespaced[element])
- end
- linedone = false
- end
-end
-local function flushtree(result,data,nature,depth)
- depth = depth + 1
- local nofdata = #data
- for i=1,nofdata do
- local di = data[i]
- if not di then -- hm, di can be string
- -- whatever
- elseif di.content then
- -- already has breaks
- local content = entityremapper(di.content)
- if i == nofdata and sub(content,-1) == "\n" then -- move check
- -- can be an end of line in par but can also be the last line
- if trace_spacing then
- result[#result+1] = formatters["<c n='%s'>%s</c>"](di.parnumber or 0,sub(content,1,-2))
+ -- if embedded then
+ -- if element == "math" then
+ -- embedded[f_tagid(element,index)] = #result+1
+ -- end
+ -- end
+
+ local n = 0
+ local r = { } -- delay this
+ if detail then
+ detail = gsub(detail,"[^A-Za-z0-9]+","-")
+ specification.detail = detail -- we use it later in for the div
+ n = n + 1
+ r[n] = f_detail(detail)
+ end
+ local parents = specification.parents
+ if parents then
+ parents = gsub(parents,"[^A-Za-z0-9 ]+","-")
+ specification.parents = parents -- we use it later in for the div
+ n = n + 1
+ r[n] = f_chain(parents)
+ end
+ if indexing and index then
+ n = n + 1
+ r[n] = f_index(index)
+ end
+ local extra = extras[element]
+ if extra then
+ extra(di,element,index,fulltag)
+ end
+ if exportproperties then
+ local p = specification.userdata
+ if not p then
+ -- skip
+ elseif exportproperties == v_yes then
+ for k, v in next, p do
+ n = n + 1
+ r[n] = f_attribute(k,v)
+ end
else
- result[#result+1] = sub(content,1,-2)
+ for k, v in next, p do
+ n = n + 1
+ r[n] = f_property(exportproperties,k,v)
+ end
end
- result[#result+1] = " "
- else
- if trace_spacing then
- result[#result+1] = formatters["<c n='%s'>%s</c>"](di.parnumber or 0,content)
- else
- result[#result+1] = content
+ end
+ local a = di.attributes
+ if a then
+ for k, v in next, a do
+ n = n + 1
+ r[n] = f_attribute(k,v)
end
end
- linedone = false
- elseif not di.collapsed then -- ignore collapsed data (is appended, reconstructed par)
- local element = di.element
- if not element then
- -- skip
- elseif element == "break" then -- or element == "pagebreak"
- emptytag(result,element,nature,depth,di)
- elseif element == "" or di.skip == "ignore" then
- -- skip
- else
- if di.before then
- flushtree(result,di.before,nature,depth)
+ if n == 0 then
+ if nature == "inline" or inline > 0 then
+ if show_comment and comment then
+ result[#result+1] = f_begin_inline_comment(comment,namespaced[element])
+ else
+ result[#result+1] = f_begin_inline(namespaced[element])
+ end
+ inline = inline + 1
+ elseif nature == "mixed" then
+ if show_comment and comment then
+ result[#result+1] = f_begin_mixed_comment(depth,comment,namespaced[element])
+ else
+ result[#result+1] = f_begin_mixed(depth,namespaced[element])
+ end
+ depth = depth + 1
+ inline = 1
+ else
+ if show_comment and comment then
+ result[#result+1] = f_begin_display_comment(depth,comment,depth,namespaced[element])
+ else
+ result[#result+1] = f_begin_display(depth,namespaced[element])
+ end
+ depth = depth + 1
end
- local natu = di.nature
- local skip = di.skip
- if di.breaknode then
- emptytag(result,"break","display",depth,di)
+ else
+ r = concat(r,"",1,n)
+ if nature == "inline" or inline > 0 then
+ if show_comment and comment then
+ result[#result+1] = f_begin_inline_attr_comment(comment,namespaced[element],r)
+ else
+ result[#result+1] = f_begin_inline_attr(namespaced[element],r)
+ end
+ inline = inline + 1
+ elseif nature == "mixed" then
+ if show_comment and comment then
+ result[#result+1] = f_begin_mixed_attr_comment(depth,comment,namespaced[element],r)
+ else
+ result[#result+1] = f_begin_mixed_attr(depth,namespaced[element],r)
+ end
+ depth = depth + 1
+ inline = 1
+ else
+ if show_comment and comment then
+ result[#result+1] = f_begin_display_attr_comment(depth,comment,depth,namespaced[element],r)
+ else
+ result[#result+1] = f_begin_display_attr(depth,namespaced[element],r)
+ end
+ depth = depth + 1
end
- begintag(result,element,natu,depth,di,skip)
- flushtree(result,di.data,natu,depth)
- -- if sub(result[#result],-1) == " " and natu ~= "inline" then
- -- result[#result] = sub(result[#result],1,-2)
- -- end
- endtag(result,element,natu,depth,skip)
- if di.after then
- flushtree(result,di.after,nature,depth)
+ end
+ end
+ used[element][detail or ""] = { nature, specification.parents } -- for template css
+ -- also in last else ?
+ local metadata = specification.metadata
+ if metadata then
+ result[#result+1] = f_metadata_begin(depth)
+ for k, v in table.sortedpairs(metadata) do
+ result[#result+1] = f_metadata(depth+1,k,lpegmatch(p_entity,v))
+ end
+ result[#result+1] = f_metadata_end(depth)
+ end
+ end
+
+ local function endtag(result,embedded,element,nature,di,skip)
+ if skip == "comment" then
+ if show_comment then
+ if nature == "display" and (inline == 0 or inline == 1) then
+ depth = depth - 1
+ result[#result+1] = f_comment_end_display(depth,namespaced[element])
+ inline = 0
+ elseif nature == "mixed" and (inline == 0 or inline == 1) then
+ depth = depth - 1
+ result[#result+1] = f_comment_end_mixed(namespaced[element])
+ inline = 0
+ else
+ inline = inline - 1
+ result[#result+1] = f_comment_end_inline(namespaced[element])
end
end
+ elseif skip then
+ -- ignore
+ else
+ if nature == "display" and (inline == 0 or inline == 1) then
+ depth = depth - 1
+ result[#result+1] = f_end_display(depth,namespaced[element])
+ inline = 0
+ elseif nature == "mixed" and (inline == 0 or inline == 1) then
+ depth = depth - 1
+ result[#result+1] = f_end_mixed(namespaced[element])
+ inline = 0
+ else
+ inline = inline - 1
+ result[#result+1] = f_end_inline(namespaced[element])
+ end
+
+ -- if embedded then
+ -- if element == "math" then
+ -- local id = f_tagid(element,di.n) -- index)
+ -- local tx = concat(result,"",embedded[id],#result)
+ -- embedded[id] = "<?xml version='1.0' standalone='yes'?>" .. "\n" .. tx
+ -- end
+ -- end
end
end
-end
-local function breaktree(tree,parent,parentelement) -- also removes double breaks
- local data = tree.data
- if data then
+ local function flushtree(result,embedded,data,nature)
local nofdata = #data
- local prevelement
- local prevnature
- local prevparnumber
- local newdata = { }
- local nofnewdata = 0
for i=1,nofdata do
local di = data[i]
- if not di then
- -- skip
- elseif di.content then
- local parnumber = di.parnumber
- if prevnature == "inline" and prevparnumber and prevparnumber ~= parnumber then
- nofnewdata = nofnewdata + 1
- if trace_spacing then
- newdata[nofnewdata] = makebreaknode { type = "a", p = prevparnumber, n = parnumber }
+ if not di then -- hm, di can be string
+ -- whatever
+ else
+ local content = di.content
+ -- also optimize for content == "" : trace that first
+ if content then
+ -- already has breaks
+ local content = lpegmatch(p_entity,content)
+ if i == nofdata and sub(content,-1) == "\n" then -- move check
+ -- can be an end of line in par but can also be the last line
+ if trace_spacing then
+ result[#result+1] = f_spacing(di.parnumber or 0,sub(content,1,-2))
+ else
+ result[#result+1] = sub(content,1,-2)
+ end
+ result[#result+1] = " "
+ else
+ if trace_spacing then
+ result[#result+1] = f_spacing(di.parnumber or 0,content)
+ else
+ result[#result+1] = content
+ end
+ end
+ elseif not di.collapsed then -- ignore collapsed data (is appended, reconstructed par)
+ local element = di.element
+ if not element then
+ -- skip
+ elseif element == "break" then -- or element == "pagebreak"
+ emptytag(result,embedded,element,nature,di)
+ elseif element == "" or di.skip == "ignore" then
+ -- skip
else
- newdata[nofnewdata] = makebreaknode()
+ if di.before then
+ flushtree(result,embedded,di.before,nature)
+ end
+ local natu = di.nature
+ local skip = di.skip
+ if di.breaknode then
+ emptytag(result,embedded,"break","display",di)
+ end
+ begintag(result,embedded,element,natu,di,skip)
+ flushtree(result,embedded,di.data,natu)
+ endtag(result,embedded,element,natu,di,skip)
+ if di.after then
+ flushtree(result,embedded,di.after,nature)
+ end
end
end
- prevelement = nil
- prevnature = "inline"
- prevparnumber = parnumber
- nofnewdata = nofnewdata + 1
- newdata[nofnewdata] = di
- elseif not di.collapsed then
- local element = di.element
- if element == "break" then -- or element == "pagebreak"
- if prevelement == "break" then
- di.element = ""
- end
- prevelement = element
- prevnature = "display"
- elseif element == "" or di.skip == "ignore" then
+ end
+ end
+ end
+
+ local function breaktree(tree,parent,parentelement) -- also removes double breaks
+ local data = tree.data
+ if data then
+ local nofdata = #data
+ local prevelement
+ local prevnature
+ local prevparnumber
+ local newdata = { }
+ local nofnewdata = 0
+ for i=1,nofdata do
+ local di = data[i]
+ if not di then
-- skip
+ elseif di.content then
+ local parnumber = di.parnumber
+ if prevnature == "inline" and prevparnumber and prevparnumber ~= parnumber then
+ nofnewdata = nofnewdata + 1
+ if trace_spacing then
+ newdata[nofnewdata] = makebreaknode { type = "a", p = prevparnumber, n = parnumber }
+ else
+ newdata[nofnewdata] = makebreaknode()
+ end
+ end
+ prevelement = nil
+ prevnature = "inline"
+ prevparnumber = parnumber
+ nofnewdata = nofnewdata + 1
+ newdata[nofnewdata] = di
+ elseif not di.collapsed then
+ local element = di.element
+ if element == "break" then -- or element == "pagebreak"
+ if prevelement == "break" then
+ di.element = ""
+ end
+ prevelement = element
+ prevnature = "display"
+ elseif element == "" or di.skip == "ignore" then
+ -- skip
+ else
+ local nature = di.nature
+ local parnumber = di.parnumber
+ if prevnature == "inline" and nature == "inline" and prevparnumber and prevparnumber ~= parnumber then
+ nofnewdata = nofnewdata + 1
+ if trace_spacing then
+ newdata[nofnewdata] = makebreaknode { type = "b", p = prevparnumber, n = parnumber }
+ else
+ newdata[nofnewdata] = makebreaknode()
+ end
+ end
+ prevnature = nature
+ prevparnumber = parnumber
+ prevelement = element
+ breaktree(di,tree,element)
+ end
+ nofnewdata = nofnewdata + 1
+ newdata[nofnewdata] = di
else
local nature = di.nature
local parnumber = di.parnumber
if prevnature == "inline" and nature == "inline" and prevparnumber and prevparnumber ~= parnumber then
nofnewdata = nofnewdata + 1
if trace_spacing then
- newdata[nofnewdata] = makebreaknode { type = "b", p = prevparnumber, n = parnumber }
+ newdata[nofnewdata] = makebreaknode { type = "c", p = prevparnumber, n = parnumber }
else
newdata[nofnewdata] = makebreaknode()
end
end
prevnature = nature
prevparnumber = parnumber
- prevelement = element
- breaktree(di,tree,element)
- end
- nofnewdata = nofnewdata + 1
- newdata[nofnewdata] = di
- else
- local nature = di.nature
- local parnumber = di.parnumber
- if prevnature == "inline" and nature == "inline" and prevparnumber and prevparnumber ~= parnumber then
nofnewdata = nofnewdata + 1
- if trace_spacing then
- newdata[nofnewdata] = makebreaknode { type = "c", p = prevparnumber, n = parnumber }
- else
- newdata[nofnewdata] = makebreaknode()
- end
+ newdata[nofnewdata] = di
end
- prevnature = nature
- prevparnumber = parnumber
- nofnewdata = nofnewdata + 1
- newdata[nofnewdata] = di
end
- end
- tree.data = newdata
- end
-end
-
--- also tabulaterow reconstruction .. maybe better as a checker
--- i.e cell attribute
-
-local function collapsetree()
- for tag, trees in next, treehash do
- local d = trees[1].data
- if d then
- local nd = #d
- if nd > 0 then
- for i=2,#trees do
- local currenttree = trees[i]
- local currentdata = currenttree.data
- local currentpar = currenttree.parnumber
- local previouspar = trees[i-1].parnumber
- currenttree.collapsed = true
- -- is the next ok?
- if previouspar == 0 or not (di and di.content) then
- previouspar = nil -- no need anyway so no further testing needed
- end
- for j=1,#currentdata do
- local cd = currentdata[j]
- if not cd or cd == "" then
- -- skip
- elseif cd.content then
- if not currentpar then
- -- add space ?
- elseif not previouspar then
- -- add space ?
- elseif currentpar ~= previouspar then
- nd = nd + 1
- if trace_spacing then
- d[nd] = makebreaknode { type = "d", p = previouspar, n = currentpar }
- else
- d[nd] = makebreaknode()
+ tree.data = newdata
+ end
+ end
+
+ -- also tabulaterow reconstruction .. maybe better as a checker
+ -- i.e cell attribute
+
+ local function collapsetree()
+ for tag, trees in next, treehash do
+ local d = trees[1].data
+ if d then
+ local nd = #d
+ if nd > 0 then
+ for i=2,#trees do
+ local currenttree = trees[i]
+ local currentdata = currenttree.data
+ local currentpar = currenttree.parnumber
+ local previouspar = trees[i-1].parnumber
+ currenttree.collapsed = true
+ -- is the next ok?
+ if previouspar == 0 or not (di and di.content) then
+ previouspar = nil -- no need anyway so no further testing needed
+ end
+ for j=1,#currentdata do
+ local cd = currentdata[j]
+ if not cd or cd == "" then
+ -- skip
+ elseif cd.content then
+ if not currentpar then
+ -- add space ?
+ elseif not previouspar then
+ -- add space ?
+ elseif currentpar ~= previouspar then
+ nd = nd + 1
+ if trace_spacing then
+ d[nd] = makebreaknode { type = "d", p = previouspar, n = currentpar }
+ else
+ d[nd] = makebreaknode()
+ end
end
+ previouspar = currentpar
+ nd = nd + 1
+ d[nd] = cd
+ else
+ nd = nd + 1
+ d[nd] = cd
end
- previouspar = currentpar
- nd = nd + 1
- d[nd] = cd
- else
- nd = nd + 1
- d[nd] = cd
+ currentdata[j] = false
end
- currentdata[j] = false
end
end
end
end
end
-end
-local function finalizetree(tree)
- for _, finalizer in next, finalizers do
- finalizer(tree)
+ local function finalizetree(tree)
+ for _, finalizer in next, finalizers do
+ finalizer(tree)
+ end
end
-end
-local function indextree(tree)
- local data = tree.data
- if data then
- local n, new = 0, { }
- for i=1,#data do
- local d = data[i]
- if not d then
- -- skip
- elseif d.content then
- n = n + 1
- new[n] = d
- elseif not d.collapsed then
- n = n + 1
- d.__i__ = n
- d.__p__ = tree
- indextree(d)
- new[n] = d
+ local function indextree(tree)
+ local data = tree.data
+ if data then
+ local n, new = 0, { }
+ for i=1,#data do
+ local d = data[i]
+ if not d then
+ -- skip
+ elseif d.content then
+ n = n + 1
+ new[n] = d
+ elseif not d.collapsed then
+ n = n + 1
+ d.__i__ = n
+ d.__p__ = tree
+ indextree(d)
+ new[n] = d
+ end
end
+ tree.data = new
end
- tree.data = new
end
-end
-local function checktree(tree)
- local data = tree.data
- if data then
- for i=1,#data do
- local d = data[i]
- if type(d) == "table" then
- local check = checks[d.tg]
- if check then
- check(d)
+ local function checktree(tree)
+ local data = tree.data
+ if data then
+ for i=1,#data do
+ local d = data[i]
+ if type(d) == "table" then
+ local check = checks[d.tg]
+ if check then
+ check(d)
+ end
+ checktree(d)
end
- checktree(d)
end
end
end
+
+ wrapups.flushtree = flushtree
+ wrapups.breaktree = breaktree
+ wrapups.collapsetree = collapsetree
+ wrapups.finalizetree = finalizetree
+ wrapups.indextree = indextree
+ wrapups.checktree = checktree
+
end
-- collector code
local function push(fulltag,depth)
- local tag, n = lpegmatch(dashsplitter,fulltag)
- local tg, detail = lpegmatch(colonsplitter,tag)
- local element, nature
- if detail then
- local pd = properties[tag]
- local pt = properties[tg]
- element = pd and pd.export or pt and pt.export or tg
- nature = pd and pd.nature or pt and pt.nature or defaultnature
+ local tg, n, detail
+ local specification = specifications[fulltag]
+ if specification then
+ tg = specification.tagname
+ n = specification.tagindex
+ detail = specification.detail
else
- local p = properties[tg]
- element = p and p.export or tg
- nature = p and p.nature or "inline"
+ -- a break (more efficient if we don't store those in specifications)
+ tg, n = lpegmatch(tagsplitter,fulltag)
+ n = tonumber(n) -- to tonumber in tagsplitter
end
+ local p = properties[tg]
+ local element = p and p.export or tg
+ local nature = p and p.nature or "inline" -- defaultnature
local treedata = tree.data
- local t = {
+ local t = { -- maybe we can use the tag table
tg = tg,
fulltag = fulltag,
detail = detail,
- n = tonumber(n), -- more efficient
+ n = n, -- already a number
element = element,
nature = nature,
data = { },
@@ -1658,9 +2316,9 @@ local function push(fulltag,depth)
treestack[currentdepth] = tree
if trace_export then
if detail and detail ~= "" then
- report_export("%w<%s trigger=%a paragraph=%a index=%a detail=%a>",currentdepth-1,fulltag,currentattribute or 0,currentparagraph or 0,#treedata,detail)
+ report_export("%w<%s trigger=%q n=%q paragraph=%q index=%q detail=%q>",currentdepth-1,tg,n,currentattribute or 0,currentparagraph or 0,#treedata,detail)
else
- report_export("%w<%s trigger=%a paragraph=%a index=%a>",currentdepth-1,fulltag,currentattribute or 0,currentparagraph or 0,#treedata)
+ report_export("%w<%s trigger=%q n=%q paragraph=%q index=%q>",currentdepth-1,tg,n,currentattribute or 0,currentparagraph or 0,#treedata)
end
end
tree = t
@@ -1677,15 +2335,19 @@ local function push(fulltag,depth)
end
local function pop()
- local top = nesting[currentdepth]
- tree = treestack[currentdepth]
- currentdepth = currentdepth - 1
- if trace_export then
- if top then
- report_export("%w</%s>",currentdepth,top)
- else
- report_export("</%s>",top)
+ if currentdepth > 0 then
+ local top = nesting[currentdepth]
+ tree = treestack[currentdepth]
+ currentdepth = currentdepth - 1
+ if trace_export then
+ if top then
+ report_export("%w</%s>",currentdepth,top)
+ else
+ report_export("</%s>",top)
+ end
end
+ else
+ report_export("%w<!-- too many pops -->",currentdepth)
end
end
@@ -1700,63 +2362,70 @@ local function continueexport()
end
local function pushentry(current)
- if current then
- if restart then
- continueexport()
- restart = false
- end
- local newdepth = #current
- local olddepth = currentdepth
- if trace_export then
- report_export("%w<!-- moving from depth %s to %s (%s) -->",currentdepth,olddepth,newdepth,current[newdepth])
+ if not current then
+ -- bad news
+ return
+ end
+ current = current.taglist
+ if not current then
+ -- even worse news
+ return
+ end
+ if restart then
+ continueexport()
+ restart = false
+ end
+ local newdepth = #current
+ local olddepth = currentdepth
+ if trace_export then
+ report_export("%w<!-- moving from depth %s to %s (%s) -->",currentdepth,olddepth,newdepth,current[newdepth])
+ end
+ if olddepth <= 0 then
+ for i=1,newdepth do
+ push(current[i],i)
end
- if olddepth <= 0 then
- for i=1,newdepth do
- push(current[i],i)
+ else
+ local difference
+ if olddepth < newdepth then
+ for i=1,olddepth do
+ if current[i] ~= nesting[i] then
+ difference = i
+ break
+ end
end
else
- local difference
- if olddepth < newdepth then
- for i=1,olddepth do
- if current[i] ~= nesting[i] then
- difference = i
- break
- end
- end
- else
- for i=1,newdepth do
- if current[i] ~= nesting[i] then
- difference = i
- break
- end
+ for i=1,newdepth do
+ if current[i] ~= nesting[i] then
+ difference = i
+ break
end
end
- if difference then
- for i=olddepth,difference,-1 do
- pop()
- end
- for i=difference,newdepth do
- push(current[i],i)
- end
- elseif newdepth > olddepth then
- for i=olddepth+1,newdepth do
- push(current[i],i)
- end
- elseif newdepth < olddepth then
- for i=olddepth,newdepth,-1 do
- pop()
- end
- elseif trace_export then
- report_export("%w<!-- staying at depth %s (%s) -->",currentdepth,newdepth,nesting[newdepth] or "?")
+ end
+ if difference then
+ for i=olddepth,difference,-1 do
+ pop()
+ end
+ for i=difference,newdepth do
+ push(current[i],i)
end
+ elseif newdepth > olddepth then
+ for i=olddepth+1,newdepth do
+ push(current[i],i)
+ end
+ elseif newdepth < olddepth then
+ for i=olddepth,newdepth,-1 do
+ pop()
+ end
+ elseif trace_export then
+ report_export("%w<!-- staying at depth %s (%s) -->",currentdepth,newdepth,nesting[newdepth] or "?")
end
- return olddepth, newdepth
end
+ return olddepth, newdepth
end
-local function pushcontent(currentparagraph,newparagraph)
+local function pushcontent(oldparagraph,newparagraph)
if nofcurrentcontent > 0 then
- if currentparagraph then
+ if oldparagraph then
if currentcontent[nofcurrentcontent] == "\n" then
if trace_export then
report_export("%w<!-- removing newline -->",currentdepth)
@@ -1766,35 +2435,37 @@ local function pushcontent(currentparagraph,newparagraph)
end
local content = concat(currentcontent,"",1,nofcurrentcontent)
if content == "" then
- -- omit; when currentparagraph we could push, remove spaces, pop
- elseif somespace[content] and currentparagraph then
- -- omit; when currentparagraph we could push, remove spaces, pop
+ -- omit; when oldparagraph we could push, remove spaces, pop
+ elseif somespace[content] and oldparagraph then
+ -- omit; when oldparagraph we could push, remove spaces, pop
else
local olddepth, newdepth
local list = taglist[currentattribute]
if list then
olddepth, newdepth = pushentry(list)
end
- local td = tree.data
- local nd = #td
- td[nd+1] = { parnumber = currentparagraph, content = content }
- if trace_export then
- report_export("%w<!-- start content with length %s -->",currentdepth,#content)
- report_export("%w%s",currentdepth,(gsub(content,"\n","\\n")))
- report_export("%w<!-- stop content -->",currentdepth)
- end
- if olddepth then
- for i=newdepth-1,olddepth,-1 do
- pop()
+ if tree then
+ local td = tree.data
+ local nd = #td
+ td[nd+1] = { parnumber = oldparagraph or currentparagraph, content = content }
+ if trace_export then
+ report_export("%w<!-- start content with length %s -->",currentdepth,#content)
+ report_export("%w%s",currentdepth,(gsub(content,"\n","\\n")))
+ report_export("%w<!-- stop content -->",currentdepth)
+ end
+ if olddepth then
+ for i=newdepth-1,olddepth,-1 do
+ pop()
+ end
end
end
end
nofcurrentcontent = 0
end
- if currentparagraph then
+ if oldparagraph then
pushentry(makebreaklist(currentnesting))
if trace_export then
- report_export("%w<!-- break added betweep paragraph %a and %a -->",currentdepth,currentparagraph,newparagraph)
+ report_export("%w<!-- break added between paragraph %a and %a -->",currentdepth,oldparagraph,newparagraph)
end
end
end
@@ -1823,28 +2494,28 @@ end
-- whatsit_code localpar_code
-local function collectresults(head,list) -- is last used (we also have currentattribute)
+local function collectresults(head,list,pat,pap) -- is last used (we also have currentattribute)
local p
for n in traverse_nodes(head) do
- local id = n.id -- 14: image, 8: literal (mp)
+ local id = getid(n) -- 14: image, 8: literal (mp)
if id == glyph_code then
- local at = n[a_tagged]
+ local at = getattr(n,a_tagged) or pat
if not at then
-- we need to tag the pagebody stuff as being valid skippable
--
-- report_export("skipping character: %C (no attribute)",n.char)
else
-- we could add tonunicodes for ligatures (todo)
- local components = n.components
- if components then -- we loose data
- collectresults(components,nil)
+ local components = getfield(n,"components")
+ local c = getchar(n)
+ if components and (not characterdata[c] or overloads[c]) then -- we loose data
+ collectresults(components,nil,at) -- this assumes that components have the same attribute as the glyph ... we should be more tolerant (see math)
else
- local c = n.char
if last ~= at then
local tl = taglist[at]
pushcontent()
currentnesting = tl
- currentparagraph = n[a_taggedpar]
+ currentparagraph = getattr(n,a_taggedpar) or pap
currentattribute = at
last = at
pushentry(currentnesting)
@@ -1853,13 +2524,17 @@ local function collectresults(head,list) -- is last used (we also have currentat
end
-- We need to intercept this here; maybe I will also move this
-- to a regular setter at the tex end.
- local r = n[a_reference]
+ local r = getattr(n,a_reference)
if r then
- referencehash[tl[#tl]] = r -- fulltag
+ local t = tl.taglist
+ referencehash[t[#t]] = r -- fulltag
end
--
elseif last then
- local ap = n[a_taggedpar]
+ -- we can consider tagging the pars (lines) in the parbuilder but then we loose some
+ -- information unless we inject a special node (but even then we can run into nesting
+ -- issues)
+ local ap = getattr(n,a_taggedpar) or pap
if ap ~= currentparagraph then
pushcontent(currentparagraph,ap)
pushentry(currentnesting)
@@ -1874,7 +2549,7 @@ local function collectresults(head,list) -- is last used (we also have currentat
report_export("%w<!-- processing glyph %C tagged %a) -->",currentdepth,c,at)
end
end
- local s = n[a_exportstatus]
+ local s = getattr(n,a_exportstatus)
if s then
c = s
end
@@ -1883,7 +2558,7 @@ local function collectresults(head,list) -- is last used (we also have currentat
report_export("%w<!-- skipping last glyph -->",currentdepth)
end
elseif c == 0x20 then
- local a = n[a_characters]
+ local a = getattr(n,a_characters)
nofcurrentcontent = nofcurrentcontent + 1
if a then
if trace_export then
@@ -1894,45 +2569,57 @@ local function collectresults(head,list) -- is last used (we also have currentat
currentcontent[nofcurrentcontent] = " "
end
else
- local fc = fontchar[n.font]
+ local fc = fontchar[getfont(n)]
if fc then
fc = fc and fc[c]
if fc then
- local u = fc.tounicode
- if u and u ~= "" then
+ local u = fc.unicode
+ if not u then
nofcurrentcontent = nofcurrentcontent + 1
- currentcontent[nofcurrentcontent] = utfchar(fromunicode16(u))
+ currentcontent[nofcurrentcontent] = utfchar(c)
+ elseif type(u) == "table" then
+ for i=1,#u do
+ nofcurrentcontent = nofcurrentcontent + 1
+ currentcontent[nofcurrentcontent] = utfchar(u[i])
+ end
else
nofcurrentcontent = nofcurrentcontent + 1
- currentcontent[nofcurrentcontent] = utfchar(c)
+ currentcontent[nofcurrentcontent] = utfchar(u)
end
- else -- weird, happens in hz (we really need to get rid of the pseudo fonts)
+ elseif c > 0 then
nofcurrentcontent = nofcurrentcontent + 1
currentcontent[nofcurrentcontent] = utfchar(c)
+ else
+ -- we can have -1 as side effect of an explicit hyphen (unless we expand)
end
- else
+ elseif c > 0 then
nofcurrentcontent = nofcurrentcontent + 1
currentcontent[nofcurrentcontent] = utfchar(c)
+ else
+ -- we can have -1 as side effect of an explicit hyphen (unless we expand)
end
end
end
end
elseif id == disc_code then -- probably too late
if keephyphens then
- local pre = n.pre
- if pre and not pre.next and pre.id == glyph_code and pre.char == hyphencode then
+ local pre = getfield(n,"pre")
+ if pre and not getnext(pre) and getid(pre) == glyph_code and getchar(pre) == hyphencode then
nofcurrentcontent = nofcurrentcontent + 1
currentcontent[nofcurrentcontent] = hyphen
end
end
- collectresults(n.replace,nil)
+ local replace = getfield(n,"replace")
+ if replace then
+ collectresults(replace,nil)
+ end
elseif id == glue_code then
-- we need to distinguish between hskips and vskips
- local ca = n[a_characters]
+ local ca = getattr(n,a_characters)
if ca == 0 then
-- skip this one ... already converted special character (node-acc)
elseif ca then
- local a = n[a_tagged]
+ local a = getattr(n,a_tagged) or pat
if a then
local c = specialspaces[ca]
if last ~= a then
@@ -1942,13 +2629,13 @@ local function collectresults(head,list) -- is last used (we also have currentat
end
pushcontent()
currentnesting = tl
- currentparagraph = n[a_taggedpar]
+ currentparagraph = getattr(n,a_taggedpar) or pap
currentattribute = a
last = a
pushentry(currentnesting)
-- no reference check (see above)
elseif last then
- local ap = n[a_taggedpar]
+ local ap = getattr(n,a_taggedpar) or pap
if ap ~= currentparagraph then
pushcontent(currentparagraph,ap)
pushentry(currentnesting)
@@ -1969,11 +2656,12 @@ local function collectresults(head,list) -- is last used (we also have currentat
currentcontent[nofcurrentcontent] = c
end
else
- local subtype = n.subtype
+ local subtype = getsubtype(n)
if subtype == userskip_code then
- if n.spec.width > threshold then
+ local spec = getfield(n,"spec")
+ if getfield(spec,"width") > threshold then
if last and not somespace[currentcontent[nofcurrentcontent]] then
- local a = n[a_tagged]
+ local a = getattr(n,a_tagged) or pat
if a == last then
if trace_export then
report_export("%w<!-- injecting spacing 5a -->",currentdepth)
@@ -2000,7 +2688,7 @@ local function collectresults(head,list) -- is last used (we also have currentat
end
elseif subtype == spaceskip_code or subtype == xspaceskip_code then
if not somespace[currentcontent[nofcurrentcontent]] then
- local a = n[a_tagged]
+ local a = getattr(n,a_tagged) or pat
if a == last then
if trace_export then
report_export("%w<!-- injecting spacing 7 (stay in element) -->",currentdepth)
@@ -2029,7 +2717,7 @@ local function collectresults(head,list) -- is last used (we also have currentat
nofcurrentcontent = nofcurrentcontent - 1
end
elseif not somespace[r] then
- local a = n[a_tagged]
+ local a = getattr(n,a_tagged) or pat
if a == last then
if trace_export then
report_export("%w<!-- injecting spacing 1 (end of line, stay in element) -->",currentdepth)
@@ -2057,9 +2745,9 @@ local function collectresults(head,list) -- is last used (we also have currentat
end
end
elseif id == hlist_code or id == vlist_code then
- local ai = n[a_image]
+ local ai = getattr(n,a_image)
if ai then
- local at = n[a_tagged]
+ local at = getattr(n,a_tagged) or pat
if nofcurrentcontent > 0 then
pushcontent()
pushentry(currentnesting) -- ??
@@ -2072,18 +2760,22 @@ local function collectresults(head,list) -- is last used (we also have currentat
currentparagraph = nil
else
-- we need to determine an end-of-line
- collectresults(n.list,n)
+ local list = getlist(n)
+ if list then
+ local at = getattr(n,a_tagged) or pat
+ collectresults(list,n,at)
+ end
end
elseif id == kern_code then
- local kern = n.kern
+ local kern = getfield(n,"kern")
if kern > 0 then
local limit = threshold
- if p and p.id == glyph_code then
- limit = fontquads[p.font] / 4
+ if p and getid(p) == glyph_code then
+ limit = fontquads[getfont(p)] / 4
end
if kern > limit then
if last and not somespace[currentcontent[nofcurrentcontent]] then
- local a = n[a_tagged]
+ local a = getattr(n,a_tagged) or pat
if a == last then
if not somespace[currentcontent[nofcurrentcontent]] then
if trace_export then
@@ -2123,7 +2815,20 @@ function nodes.handlers.export(head) -- hooks into the page builder
end
-- continueexport()
restart = true
- collectresults(head)
+
+-- local function f(head,depth,pat)
+-- for n in node.traverse(head) do
+-- local a = n[a_tagged] or pat
+-- local t = taglist[a]
+-- print(depth,n,a,t and table.concat(t," "))
+-- if n.id == hlist_code or n.id == vlist_code and n.list then
+-- f(n.list,depth+1,a)
+-- end
+-- end
+-- end
+-- f(head,1)
+
+ collectresults(tonut(head))
if trace_export then
report_export("%w<!-- stop flushing page -->",currentdepth)
end
@@ -2133,280 +2838,819 @@ end
function builders.paragraphs.tag(head)
noftextblocks = noftextblocks + 1
- for n in traverse_id(hlist_code,head) do
- local subtype = n.subtype
+ for n in traverse_id(hlist_code,tonut(head)) do
+ local subtype = getsubtype(n)
if subtype == line_code then
- n[a_textblock] = noftextblocks
+ setattr(n,a_textblock,noftextblocks)
elseif subtype == glue_code or subtype == kern_code then
- n[a_textblock] = 0
+ setattr(n,a_textblock,0)
end
end
return false
end
--- encoding='utf-8'
+do
local xmlpreamble = [[
-<?xml version='1.0' encoding='UTF-8' standalone='yes' ?>
+<?xml version="1.0" encoding="UTF-8" standalone="%standalone%" ?>
+
+<!--
-<!-- input filename : %- 17s -->
-<!-- processing date : %- 17s -->
-<!-- context version : %- 17s -->
-<!-- exporter version : %- 17s -->
+ input filename : %filename%
+ processing date : %date%
+ context version : %contextversion%
+ exporter version : %exportversion%
+
+-->
]]
-local function wholepreamble()
- return format(xmlpreamble,tex.jobname,os.date(),environment.version,exportversion)
-end
+ local flushtree = wrapups.flushtree
+
+ local function wholepreamble(standalone)
+ return replacetemplate(xmlpreamble, {
+ standalone = standalone and "yes" or "no",
+ filename = tex.jobname,
+ date = os.date(),
+ contextversion = environment.version,
+ exportversion = exportversion,
+ })
+ end
local csspreamble = [[
-<?xml-stylesheet type="text/css" href="%s"?>
+<?xml-stylesheet type="text/css" href="%filename%" ?>
]]
-local function allusedstylesheets(xmlfile,cssfiles,files)
- local result = { }
- for i=1,#cssfiles do
- local cssfile = cssfiles[i]
- if type(cssfile) ~= "string" or cssfile == variables.yes or cssfile == "" or cssfile == xmlfile then
- cssfile = file.replacesuffix(xmlfile,"css")
- else
- cssfile = file.addsuffix(cssfile,"css")
+local cssheadlink = [[
+<link type="text/css" rel="stylesheet" href="%filename%" />
+]]
+
+ local function allusedstylesheets(cssfiles,files,path)
+ local done = { }
+ local result = { }
+ local extras = { }
+ for i=1,#cssfiles do
+ local cssfile = cssfiles[i]
+ if type(cssfile) ~= "string" then
+ -- error
+ elseif cssfile == "export-example.css" then
+ -- ignore
+ elseif not done[cssfile] then
+ cssfile = file.join(path,cssfile)
+ report_export("adding css reference '%s'",cssfile)
+ files[#files+1] = cssfile
+ result[#result+1] = replacetemplate(csspreamble, { filename = cssfile })
+ extras[#extras+1] = replacetemplate(cssheadlink, { filename = cssfile })
+ done[cssfile] = true
+ end
end
- files[#files+1] = cssfile
- report_export("adding css reference '%s'",cssfile)
- result[#result+1] = format(csspreamble,cssfile)
+ return concat(result), concat(extras)
end
- return concat(result)
-end
-local e_template = [[
-%s {
- display: %s ;
+local elementtemplate = [[
+/* element="%element%" detail="%detail%" chain="%chain%" */
+
+%element%, %namespace%div.%element% {
+ display: %display% ;
}]]
-local d_template = [[
-%s[detail=%s] {
- display: %s ;
+local detailtemplate = [[
+/* element="%element%" detail="%detail%" chain="%chain%" */
+
+%element%[detail=%detail%], %namespace%div.%element%.%detail% {
+ display: %display% ;
}]]
-local displaymapping = {
- inline = "inline",
- display = "block",
- mixed = "inline",
-}
+-- <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1 plus MathML 2.0 plus SVG 1.1//EN" "http://www.w3.org/2002/04/xhtml-math-svg/xhtml-math-svg.dtd" >
+
+local htmltemplate = [[
+%preamble%
+
+<html xmlns="http://www.w3.org/1999/xhtml" xmlns:math="http://www.w3.org/1998/Math/MathML">
+
+ <head>
+
+ <meta charset="utf-8"/>
+
+ <title>%title%</title>
+
+%style%
+
+ </head>
+ <body>
+ <div xmlns="http://www.pragma-ade.com/context/export">
+
+<div class="warning">Rendering can be suboptimal because there is no default/fallback css loaded.</div>
+
+%body%
-local function allusedelements(xmlfile)
- local result = { format("/* template for file %s */",xmlfile) }
- for element, details in sortedhash(used) do
- result[#result+1] = format("/* category: %s */",element)
- for detail, nature in sortedhash(details) do
- local d = displaymapping[nature or "display"] or "block"
- if detail == "" then
- result[#result+1] = formatters[e_template](element,d)
+ </div>
+ </body>
+</html>
+]]
+
+ local displaymapping = {
+ inline = "inline",
+ display = "block",
+ mixed = "inline",
+ }
+
+ local function allusedelements(basename)
+ local result = { replacetemplate(namespacetemplate, {
+ what = "template",
+ filename = basename,
+ namespace = contextns,
+ -- cssnamespaceurl = usecssnamespace and cssnamespaceurl or "",
+ cssnamespaceurl = cssnamespaceurl,
+ }) }
+ for element, details in sortedhash(used) do
+ if namespaces[element] then
+ -- skip math
else
- result[#result+1] = formatters[d_template](element,detail,d)
+ for detail, what in sortedhash(details) do
+ local nature = what[1] or "display"
+ local chain = what[2]
+ local display = displaymapping[nature] or "block"
+ if detail == "" then
+ result[#result+1] = replacetemplate(elementtemplate, {
+ element = element,
+ display = display,
+ chain = chain,
+ namespace = usecssnamespace and namespace or "",
+ })
+ else
+ result[#result+1] = replacetemplate(detailtemplate, {
+ element = element,
+ display = display,
+ detail = detail,
+ chain = chain,
+ namespace = usecssnamespace and cssnamespace or "",
+ })
+ end
+ end
end
end
+ return concat(result,"\n\n")
+ end
+
+ local function allcontent(tree,embed)
+ local result = { }
+ local embedded = embed and { }
+ flushtree(result,embedded,tree.data,"display") -- we need to collect images
+ result = concat(result)
+ -- no need to lpeg .. fast enough
+ result = gsub(result,"\n *\n","\n")
+ result = gsub(result,"\n +([^< ])","\n%1")
+ return result, embedded
+ end
+
+ -- local xhtmlpreamble = [[
+ -- <!DOCTYPE html PUBLIC
+ -- "-//W3C//DTD XHTML 1.1 plus MathML 2.0 plus SVG 1.1//EN"
+ -- "http://www.w3.org/2002/04/xhtml-math-svg/xhtml-math-svg.dtd"
+ -- >
+ -- ]]
+
+ local function cleanxhtmltree(xmltree)
+ if xmltree then
+ local implicits = { }
+ local explicits = { }
+ local overloads = { }
+ for e in xml.collected(xmltree,"*") do
+ local at = e.at
+ if at then
+ local explicit = at.explicit
+ local implicit = at.implicit
+ if explicit then
+ if not explicits[explicit] then
+ explicits[explicit] = true
+ at.id = explicit
+ if implicit then
+ overloads[implicit] = explicit
+ end
+ end
+ else
+ if implicit and not implicits[implicit] then
+ implicits[implicit] = true
+ at.id = "aut:" .. implicit
+ end
+ end
+ end
+ end
+ for e in xml.collected(xmltree,"*") do
+ local at = e.at
+ if at then
+ local internal = at.internal
+ local location = at.location
+ if internal then
+ if location then
+ local explicit = overloads[location]
+ if explicit then
+ at.href = "#" .. explicit
+ else
+ at.href = "#aut:" .. internal
+ end
+ else
+ at.href = "#aut:" .. internal
+ end
+ else
+ if location then
+ at.href = "#" .. location
+ else
+ local url = at.url
+ if url then
+ at.href = url
+ else
+ local file = at.file
+ if file then
+ at.href = file
+ end
+ end
+ end
+ end
+ end
+ end
+ return xmltree
+ else
+ return xml.convert('<?xml version="1.0"?>\n<error>invalid xhtml tree</error>')
+ end
end
- return concat(result,"\n\n")
-end
-local function allcontent(tree)
- local result = { }
- flushtree(result,tree.data,"display",0) -- we need to collect images
- result = concat(result)
- result = gsub(result,"\n *\n","\n")
- result = gsub(result,"\n +([^< ])","\n%1")
- return result
-end
+ -- maybe the reverse: be explicit about what is permitted
--- local xhtmlpreamble = [[
--- <!DOCTYPE html PUBLIC
--- "-//W3C//DTD XHTML 1.1 plus MathML 2.0 plus SVG 1.1//EN"
--- "http://www.w3.org/2002/04/xhtml-math-svg/xhtml-math-svg.dtd"
--- >
--- ]]
-
-local function cleanxhtmltree(xmltree)
- if xmltree then
- local xmlwrap = xml.wrap
- for e in xml.collected(xmltree,"/document") do
- e.at["xmlns:xhtml"] = "http://www.w3.org/1999/xhtml"
- break
- end
- -- todo: inject xhtmlpreamble (xmlns should have be enough)
- local wrapper = { tg = "a", ns = "xhtml", at = { href = "unknown" } }
- for e in xml.collected(xmltree,"link") do
- local at = e.at
- local href
- if at.location then
- href = "#" .. gsub(at.location,":","_")
- elseif at.url then
- href = at.url
- elseif at.file then
- href = at.file
- end
- if href then
- wrapper.at.href = href
- xmlwrap(e,wrapper)
- end
- end
- local wrapper = { tg = "a", ns = "xhtml", at = { name = "unknown" } }
- for e in xml.collected(xmltree,"!link[@location]") do
- local location = e.at.location
- if location then
- wrapper.at.name = gsub(location,":","_")
- xmlwrap(e,wrapper)
- end
- end
- return xmltree
- else
- return xml.convert("<?xml version='1.0'?>\n<error>invalid xhtml tree</error>")
- end
-end
+ local private = {
+ destination = true,
+ prefix = true,
+ reference = true,
+ --
+ id = true,
+ href = true,
+ --
+ implicit = true,
+ explicit = true,
+ --
+ url = true,
+ file = true,
+ internal = true,
+ location = true,
+ --
+ name = true, -- image name
+ used = true, -- image name
+ page = true, -- image name
+ width = true,
+ height = true,
+ --
+ }
-local cssfile, xhtmlfile = nil, nil
+ local addclicks = true
+ local f_onclick = formatters[ [[location.href='%s']] ]
+ local f_onclick = formatters[ [[location.href='%s']] ]
-directives.register("backend.export.css", function(v) cssfile = v end)
-directives.register("backend.export.xhtml",function(v) xhtmlfile = v end)
+ local p_cleanid = lpeg.replacer { [":"] = "-" }
+ local p_cleanhref = lpeg.Cs(lpeg.P("#") * p_cleanid)
-local function stopexport(v)
- starttiming(treehash)
- --
- finishexport()
- --
- collapsetree(tree)
- indextree(tree)
- checktree(tree)
- breaktree(tree)
- finalizetree(tree)
- --
- hashlistdata()
- --
- if type(v) ~= "string" or v == variables.yes or v == "" then
- v = tex.jobname
- end
- local basename = file.basename(v)
- local xmlfile = file.addsuffix(basename,"export")
- --
- local imagefilename = file.addsuffix(file.removesuffix(xmlfile) .. "-images","css")
- local stylefilename = file.addsuffix(file.removesuffix(xmlfile) .. "-styles","css")
- local templatefilename = file.replacesuffix(xmlfile,"template")
- local specificationfilename = file.replacesuffix(xmlfile,"specification")
- --
- if xhtml and not cssfile then
- cssfile = true
- end
- local cssfiles = { }
- if cssfile then
- if cssfile == true then
- cssfiles = { "export-example.css" }
+ local p_splitter = lpeg.Ct ( (
+ lpeg.Carg(1) * lpeg.C((1-lpeg.P(" "))^1) / function(d,s) if not d[s] then d[s] = true return s end end
+ * lpeg.P(" ")^0 )^1 )
+
+
+ local classes = table.setmetatableindex(function(t,k)
+ local v = concat(lpegmatch(p_splitter,k,1,{})," ")
+ t[k] = v
+ return v
+ end)
+
+ local function makeclass(tg,at)
+ local detail = at.detail
+ local chain = at.chain
+ local result
+ at.detail = nil
+ at.chain = nil
+ if detail and detail ~= "" then
+ if chain and chain ~= "" then
+ if chain ~= detail then
+ result = { classes[tg .. " " .. chain .. " " .. detail] } -- we need to remove duplicates
+ elseif tg ~= detail then
+ result = { tg, detail }
+ else
+ result = { tg }
+ end
+ elseif tg ~= detail then
+ result = { tg, detail }
+ else
+ result = { tg }
+ end
+ elseif chain and chain ~= "" then
+ if tg ~= chain then
+ result = { tg, chain }
+ else
+ result = { tg }
+ end
else
- cssfiles = settings_to_array(cssfile or "")
+ result = { tg }
end
- insert(cssfiles,1,imagefilename)
- insert(cssfiles,1,stylefilename)
+ for k, v in next, at do
+ if not private[k] then
+ result[#result+1] = k .. "-" .. v
+ end
+ end
+ return concat(result, " ")
end
- cssfiles = table.unique(cssfiles)
- --
- local result = allcontent(tree) -- also does some housekeeping and data collecting
- --
- local files = {
- }
- local results = concat {
- wholepreamble(),
- allusedstylesheets(xmlfile,cssfiles,files), -- ads to files
- result,
- }
- --
- files = table.unique(files)
- --
- report_export("saving xml data in %a",xmlfile)
- io.savedata(xmlfile,results)
- --
- report_export("saving css image definitions in %a",imagefilename)
- io.savedata(imagefilename,allusedimages(xmlfile))
- --
- report_export("saving css style definitions in %a",stylefilename)
- io.savedata(stylefilename,allusedstyles(xmlfile))
- --
- report_export("saving css template in %a",templatefilename)
- io.savedata(templatefilename,allusedelements(xmlfile))
- --
- if xhtmlfile then
- if type(v) ~= "string" or xhtmlfile == true or xhtmlfile == variables.yes or xhtmlfile == "" or xhtmlfile == xmlfile then
- xhtmlfile = file.replacesuffix(xmlfile,"xhtml")
- else
- xhtmlfile = file.addsuffix(xhtmlfile,"xhtml")
+
+ local function remap(specification,source,target)
+ local comment = nil -- share comments
+ for c in xml.collected(source,"*") do
+ if not c.special then
+ local tg = c.tg
+ local ns = c.ns
+ if ns == "m" then
+ if false then -- yes or no
+ c.ns = ""
+ c.at["xmlns:m"] = nil
+ end
+ -- elseif tg == "a" then
+ -- c.ns = ""
+ else
+ -- if tg == "tabulatecell" or tg == "tablecell" then
+ local dt = c.dt
+ local nt = #dt
+ if nt == 0 or (nt == 1 and dt[1] == "") then
+ if comment then
+ c.dt = comment
+ else
+ xml.setcomment(c,"empty")
+ comment = c.dt
+ end
+ end
+ -- end
+ local at = c.at
+ local class = nil
+ if tg == "document" then
+ at.href = nil
+ at.detail = nil
+ at.chain = nil
+ else
+ class = makeclass(tg,at)
+ end
+ local id = at.id
+ local href = at.href
+ if id then
+ id = lpegmatch(p_cleanid, id) or id
+ if href then
+ href = lpegmatch(p_cleanhref,href) or href
+ c.at = {
+ class = class,
+ id = id,
+ href = href,
+ onclick = addclicks and f_onclick(href) or nil,
+ }
+ else
+ c.at = {
+ class = class,
+ id = id,
+ }
+ end
+ else
+ if href then
+ href = lpegmatch(p_cleanhref,href) or href
+ c.at = {
+ class = class,
+ href = href,
+ onclick = addclicks and f_onclick(href) or nil,
+ }
+ else
+ c.at = {
+ class = class,
+ }
+ end
+ end
+ c.tg = "div"
+ end
+ end
+ end
+ end
+
+ -- local cssfile = nil directives.register("backend.export.css", function(v) cssfile = v end)
+
+ local addsuffix = file.addsuffix
+ local joinfile = file.join
+
+ local embedfile = false directives.register("export.embed",function(v) embedfile = v end)
+ local embedmath = false
+
+ local function stopexport(v)
+
+ starttiming(treehash)
+ --
+ finishexport()
+ --
+ report_export("")
+ report_export("exporting xml, xhtml and html files")
+ report_export("")
+ --
+ wrapups.collapsetree(tree)
+ wrapups.indextree(tree)
+ wrapups.checktree(tree)
+ wrapups.breaktree(tree)
+ wrapups.finalizetree(tree)
+ --
+ wrapups.hashlistdata()
+ --
+ if type(v) ~= "string" or v == v_yes or v == "" then
+ v = tex.jobname
+ end
+
+ -- we use a dedicated subpath:
+ --
+ -- ./jobname-export
+ -- ./jobname-export/images
+ -- ./jobname-export/styles
+ -- ./jobname-export/styles
+ -- ./jobname-export/jobname-export.xml
+ -- ./jobname-export/jobname-export.xhtml
+ -- ./jobname-export/jobname-export.html
+ -- ./jobname-export/jobname-specification.lua
+ -- ./jobname-export/styles/jobname-defaults.css
+ -- ./jobname-export/styles/jobname-styles.css
+ -- ./jobname-export/styles/jobname-images.css
+ -- ./jobname-export/styles/jobname-templates.css
+
+ local basename = file.basename(v)
+ local corename = file.removesuffix(basename)
+ local basepath = basename .. "-export"
+ local imagepath = joinfile(basepath,"images")
+ local stylepath = joinfile(basepath,"styles")
+
+ local function validpath(what,pathname)
+ if lfs.isdir(pathname) then
+ report_export("using exiting %s path %a",what,pathname)
+ return pathname
+ end
+ lfs.mkdir(pathname)
+ if lfs.isdir(pathname) then
+ report_export("using cretated %s path %a",what,basepath)
+ return pathname
+ else
+ report_export("unable to create %s path %a",what,basepath)
+ return false
+ end
end
- files[#files+1] = xhtmlfile
- report_export("saving xhtml variant in %a",xhtmlfile)
- local xmltree = cleanxhtmltree(xml.convert(results))
- xml.save(xmltree,xhtmlfile)
+
+ if not (validpath("export",basepath) and validpath("images",imagepath) and validpath("styles",stylepath)) then
+ return
+ end
+
+ -- we're now on the dedicated export subpath so we can't clash names
+
+ local xmlfilebase = addsuffix(basename .. "-raw","xml" )
+ local xhtmlfilebase = addsuffix(basename .. "-tag","xhtml")
+ local htmlfilebase = addsuffix(basename .. "-div","xhtml")
+ local specificationfilebase = addsuffix(basename .. "-pub","lua" )
+
+ local xmlfilename = joinfile(basepath, xmlfilebase )
+ local xhtmlfilename = joinfile(basepath, xhtmlfilebase )
+ local htmlfilename = joinfile(basepath, htmlfilebase )
+ local specificationfilename = joinfile(basepath, specificationfilebase)
+ --
+ local defaultfilebase = addsuffix(basename .. "-defaults", "css")
+ local imagefilebase = addsuffix(basename .. "-images", "css")
+ local stylefilebase = addsuffix(basename .. "-styles", "css")
+ local templatefilebase = addsuffix(basename .. "-templates","css")
+ --
+ local defaultfilename = joinfile(stylepath,defaultfilebase )
+ local imagefilename = joinfile(stylepath,imagefilebase )
+ local stylefilename = joinfile(stylepath,stylefilebase )
+ local templatefilename = joinfile(stylepath,templatefilebase)
+
+ local cssfile = finetuning.cssfile
+
+ -- we keep track of all used files
+
+ local files = {
+ }
+
+ -- we always load the defaults and optionally extra css files; we also copy the example
+ -- css file so that we always have the latest version
+
+ local cssfiles = {
+ defaultfilebase,
+ imagefilebase,
+ stylefilebase,
+ }
+
+ local examplefilename = resolvers.find_file("export-example.css")
+ if examplefilename then
+ local data = io.loaddata(examplefilename)
+ if not data or data == "" then
+ data = "/* missing css file */"
+ elseif not usecssnamespace then
+ data = gsub(data,cssnamespace,"")
+ end
+ io.savedata(defaultfilename,data)
+ end
+
+ if cssfile then
+ local list = table.unique(settings_to_array(cssfile))
+ for i=1,#list do
+ local source = addsuffix(list[i],"css")
+ local target = joinfile(stylepath,file.basename(source))
+ cssfiles[#cssfiles+1] = source
+ if not lfs.isfile(source) then
+ source = joinfile("../",source)
+ end
+ if lfs.isfile(source) then
+ report_export("copying %s",source)
+ file.copy(source,target)
+ end
+ end
+ end
+
+ local x_styles, h_styles = allusedstylesheets(cssfiles,files,"styles")
+
+ -- at this point we're ready for the content; the collector also does some
+ -- housekeeping and data collecting; at this point we still have an xml
+ -- representation that uses verbose element names and carries information in
+ -- attributes
+
+
+ local data = tree.data
+ for i=1,#data do
+ if data[i].tg ~= "document" then
+ data[i] = { }
+ end
+ end
+
+ local result, embedded = allcontent(tree,embedmath) -- embedfile is for testing
+
+ local attach = backends.nodeinjections.attachfile
+
+ if embedfile and attach then
+ -- only for testing
+ attach {
+ data = concat{ wholepreamble(true), result },
+ name = file.basename(xmlfilename),
+ registered = "export",
+ title = "raw xml export",
+ method = v_hidden,
+ mimetype = "application/mathml+xml",
+ }
+ end
+ -- if embedmath and attach then
+ -- local refs = { }
+ -- for k, v in sortedhash(embedded) do
+ -- attach {
+ -- data = v,
+ -- file = file.basename(k),
+ -- name = file.addsuffix(k,"xml"),
+ -- registered = k,
+ -- reference = k,
+ -- title = "xml export snippet: " .. k,
+ -- method = v_hidden,
+ -- mimetype = "application/mathml+xml",
+ -- }
+ -- refs[k] = 0
+ -- end
+ -- end
+
+ result = concat {
+ wholepreamble(true),
+ x_styles, -- adds to files
+ result,
+ }
+
+ cssfiles = table.unique(cssfiles)
+
+ -- we're now ready for saving the result in the xml file
+
+ report_export("saving xml data in %a",xmlfilename)
+ io.savedata(xmlfilename,result)
+
+ report_export("saving css image definitions in %a",imagefilename)
+ io.savedata(imagefilename,wrapups.allusedimages(basename))
+
+ report_export("saving css style definitions in %a",stylefilename)
+ io.savedata(stylefilename,wrapups.allusedstyles(basename))
+
+ report_export("saving css template in %a",templatefilename)
+ io.savedata(templatefilename,allusedelements(basename))
+
+ -- additionally we save an xhtml file; for that we load the file as xml tree
+
+ report_export("saving xhtml variant in %a",xhtmlfilename)
+
+ local xmltree = cleanxhtmltree(xml.convert(result))
+
+ xml.save(xmltree,xhtmlfilename)
+
+ -- now we save a specification file that can b eused for generating an epub file
+
-- looking at identity is somewhat redundant as we also inherit from interaction
-- at the tex end
+
local identity = interactions.general.getidentity()
+
local specification = {
- name = file.removesuffix(v),
- identifier = os.uuid(),
- images = uniqueusedimages(),
- root = xhtmlfile,
- files = files,
- language = languagenames[texgetcount("mainlanguagenumber")],
- title = validstring(finetuning.title) or validstring(identity.title),
- subtitle = validstring(finetuning.subtitle) or validstring(identity.subtitle),
- author = validstring(finetuning.author) or validstring(identity.author),
- firstpage = validstring(finetuning.firstpage),
- lastpage = validstring(finetuning.lastpage),
+ name = file.removesuffix(v),
+ identifier = os.uuid(),
+ images = wrapups.uniqueusedimages(),
+ imagefile = joinfile("styles",imagefilebase),
+ imagepath = "images",
+ stylepath = "styles",
+ xmlfiles = { xmlfilebase },
+ xhtmlfiles = { xhtmlfilebase },
+ htmlfiles = { htmlfilebase },
+ styles = cssfiles,
+ htmlroot = htmlfilebase,
+ language = languagenames[texgetcount("mainlanguagenumber")],
+ title = validstring(finetuning.title) or validstring(identity.title),
+ subtitle = validstring(finetuning.subtitle) or validstring(identity.subtitle),
+ author = validstring(finetuning.author) or validstring(identity.author),
+ firstpage = validstring(finetuning.firstpage),
+ lastpage = validstring(finetuning.lastpage),
}
- report_export("saving specification in %a (mtxrun --script epub --make %s)",specificationfilename,specificationfilename)
+
+ report_export("saving specification in %a",specificationfilename,specificationfilename)
+
io.savedata(specificationfilename,table.serialize(specification,true))
- end
- stoptiming(treehash)
-end
-local appendaction = nodes.tasks.appendaction
-local enableaction = nodes.tasks.enableaction
+ -- the html export for epub is different in the sense that it uses div's instead of
+ -- specific tags
-function commands.setupexport(t)
- table.merge(finetuning,t)
- keephyphens = finetuning.hyphen == variables.yes
-end
+ report_export("saving div based alternative in %a",htmlfilename)
+
+ remap(specification,xmltree)
-local function startexport(v)
- if v and not exporting then
- report_export("enabling export to xml")
- -- not yet known in task-ini
- appendaction("shipouts","normalizers", "nodes.handlers.export")
- -- enableaction("shipouts","nodes.handlers.export")
- enableaction("shipouts","nodes.handlers.accessibility")
- enableaction("math", "noads.handlers.tags")
- -- appendaction("finalizers","lists","builders.paragraphs.tag")
- -- enableaction("finalizers","builders.paragraphs.tag")
- luatex.registerstopactions(function() stopexport(v) end)
- exporting = true
+ local title = specification.title
+
+ if not title or title == "" then
+ title = "no title" -- believe it or not, but a <title/> can prevent viewing in browsers
+ end
+
+ local variables = {
+ style = h_styles,
+ body = xml.tostring(xml.first(xmltree,"/div")),
+ preamble = wholepreamble(false),
+ title = title,
+ }
+
+ io.savedata(htmlfilename,replacetemplate(htmltemplate,variables,"xml"))
+
+ -- finally we report how an epub file can be made (using the specification)
+
+ report_export("")
+ report_export('create epub with: mtxrun --script epub --make "%s" [--purge --rename --svgmath]',file.nameonly(basename))
+ report_export("")
+
+ stoptiming(treehash)
end
-end
-directives.register("backend.export",startexport) -- maybe .name
+ local appendaction = nodes.tasks.appendaction
+ local enableaction = nodes.tasks.enableaction
-statistics.register("xml exporting time", function()
- if exporting then
- return format("%s seconds, version %s", statistics.elapsedtime(treehash),exportversion)
+ function structurestags.setupexport(t)
+ table.merge(finetuning,t)
+ keephyphens = finetuning.hyphen == v_yes
+ exportproperties = finetuning.properties
+ if exportproperties == v_no then
+ exportproperties = false
+ end
end
-end)
+
+ local function startexport(v)
+ if v and not exporting then
+ report_export("enabling export to xml")
+ -- not yet known in task-ini
+ appendaction("shipouts","normalizers", "nodes.handlers.export")
+ -- enableaction("shipouts","nodes.handlers.export")
+ enableaction("shipouts","nodes.handlers.accessibility")
+ enableaction("math", "noads.handlers.tags")
+ -- appendaction("finalizers","lists","builders.paragraphs.tag")
+ -- enableaction("finalizers","builders.paragraphs.tag")
+ luatex.registerstopactions(structurestags.finishexport)
+ exporting = v
+ end
+ end
+
+ function structurestags.finishexport()
+ if exporting then
+ stopexport(exporting)
+ exporting = false
+ end
+ end
+
+ directives.register("backend.export",startexport) -- maybe .name
+
+ statistics.register("xml exporting time", function()
+ if exporting then
+ return string.format("%s seconds, version %s", statistics.elapsedtime(treehash),exportversion)
+ end
+ end)
+
+end
-- These are called at the tex end:
-commands.settagitemgroup = structurestags.setitemgroup
-commands.settagsynonym = structurestags.setsynonym
-commands.settagsorting = structurestags.setsorting
-commands.settagdescription = structurestags.setdescription
-commands.settagdescriptionsymbol = structurestags.setdescriptionsymbol
-commands.settaghighlight = structurestags.sethighlight
-commands.settagfigure = structurestags.setfigure
-commands.settagcombination = structurestags.setcombination
-commands.settagtablecell = structurestags.settablecell
-commands.settagtabulatecell = structurestags.settabulatecell
+implement {
+ name = "setupexport",
+ actions = structurestags.setupexport,
+ arguments = {
+ {
+ { "align" },
+ { "bodyfont", "dimen" },
+ { "width", "dimen" },
+ { "properties" },
+ { "hyphen" },
+ { "title" },
+ { "subtitle" },
+ { "author" },
+ { "firstpage" },
+ { "lastpage" },
+ { "svgstyle" },
+ { "cssfile" },
+ }
+ }
+}
+
+implement {
+ name = "finishexport",
+ actions = structurestags.finishexport,
+}
+
+implement {
+ name = "settagitemgroup",
+ actions = structurestags.setitemgroup,
+ arguments = { "boolean", "integer", "string" }
+}
+
+implement {
+ name = "settagitem",
+ actions = structurestags.setitem,
+ arguments = "string"
+}
+
+implement {
+ name = "settagsynonym",
+ actions = structurestags.setsynonym,
+ arguments = "string"
+}
+
+implement {
+ name = "settagsorting",
+ actions = structurestags.setsorting,
+ arguments = "string"
+}
+
+implement {
+ name = "settagdescription",
+ actions = structurestags.setdescription,
+ arguments = { "string", "integer" }
+}
+
+implement {
+ name = "settagdescriptionsymbol",
+ actions = structurestags.setdescriptionsymbol,
+ arguments = { "string", "integer" }
+}
+
+implement {
+ name = "settaghighlight",
+ actions = structurestags.sethighlight,
+ arguments = { "string", "integer" }
+}
+
+implement {
+ name = "settagfigure",
+ actions = structurestags.setfigure,
+ arguments = { "string", "string", "string", "dimen", "dimen" }
+}
+
+implement {
+ name = "settagcombination",
+ actions = structurestags.setcombination,
+ arguments = { "integer", "integer" }
+}
+
+implement {
+ name = "settagtablecell",
+ actions = structurestags.settablecell,
+ arguments = { "integer", "integer", "integer" }
+}
+
+implement {
+ name = "settagtabulatecell",
+ actions = structurestags.settabulatecell,
+ arguments = "integer"
+}
+
+implement {
+ name = "settagregister",
+ actions = structurestags.setregister,
+ arguments = { "string", "integer" }
+}
+
+implement {
+ name = "settaglist",
+ actions = structurestags.setlist,
+ arguments = "integer"
+}
diff --git a/tex/context/base/back-exp.mkiv b/tex/context/base/back-exp.mkiv
index c7696d383..a4ebe38aa 100644
--- a/tex/context/base/back-exp.mkiv
+++ b/tex/context/base/back-exp.mkiv
@@ -38,8 +38,8 @@
\doubleexpandafter\back_export_set_element_tag_b
\fi\fi}
-\def\back_export_set_element_tag_a[#1][#2][#3]{\taggedctxcommand{settagproperty("#1","#2","#3")}}
-\def\back_export_set_element_tag_b[#1][#2][#3]{\taggedctxcommand{settagproperty("#1","export","#2")}}
+\def\back_export_set_element_tag_a[#1][#2][#3]{\clf_settagproperty{#1}{#2}{#3}}
+\def\back_export_set_element_tag_b[#1][#2][#3]{\clf_settagproperty{#1}{export}{#2}}
% todo: no need for calls when trialtypesetting
@@ -53,12 +53,25 @@
\newcount\tagparcounter
\let\dotagsetparcounter\relax
+\let\doresetparcounter \relax
\appendtoks
+ \doresetparcounter
+\to \everyflushatnextpar
+
+% \appendtoks
+% \dotagsetparcounter
+% \to \everypar
+
+\prependtoks
\dotagsetparcounter
\to \everypar
-\appendtoks
+% \appendtoks
+% \dotagsetparcounter
+% \to \neverypar
+
+\prependtoks
\dotagsetparcounter
\to \neverypar
@@ -67,54 +80,117 @@
\to \everytabulatepar % tricky, maybe this should be neverypar
\appendtoks
- \unexpanded\def\dotagTABLEcell {\taggedctxcommand{settagtablecell(\number\tablecellrows,\number\tablecellcolumns,\number\raggedstatus)}}%
- \unexpanded\def\dotagTABLEsignal{\char\zerocount}% brrr, we need to tag empty cells (unless we start numbering)
+ \unexpanded\def\dotagTABLEcell
+ {\iftrialtypesetting\else
+ \clf_settagtablecell
+ \numexpr\tablecellrows\relax
+ \numexpr\tablecellcolumns\relax
+ \numexpr\raggedstatus\relax
+ \fi}%
+ % brrr, we need to tag empty cells (unless we start numbering)
+ \unexpanded\def\dotagTABLEsignal
+ {\char\zerocount}%
\to \everyenableelements
\appendtoks
- \unexpanded\def\dotagtabulatecell {\taggedctxcommand{settagtabulatecell(\number\c_tabl_tabulate_align)}}%
- \unexpanded\def\dotagtabulatesignal{\dontleavehmode\char\zerocount\ignorespaces}%
+ \unexpanded\def\dotagtabulatecell
+ {\iftrialtypesetting\else\clf_settagtabulatecell\c_tabl_tabulate_align\fi}%
+ \unexpanded\def\dotagtabulatesignal
+ {\dontleavehmode\char\zerocount\ignorespaces}%
\to \everyenableelements
\appendtoks
- \unexpanded\def\dotagsynonym{\taggedctxcommand{settagsynonym("\currentsynonym","\currentsynonymtag")}}%
+ \unexpanded\def\dotagsynonym
+ {\iftrialtypesetting\else\clf_settagsynonym{\currentsynonymtag}\fi}%
+\to \everyenableelements
+
+\appendtoks
+ \unexpanded\def\dotagsorting
+ {\iftrialtypesetting\else\clf_settagsorting{\currentsortingtag}\fi}%
\to \everyenableelements
\appendtoks % frozen and assumed global per highlight class
- \unexpanded\def\dotaghighlight{\taggedctxcommand{settaghighlight("\currenthighlight",\!!bs\highlightparameter\c!style\!!es,\number\attribute\colorattribute)}}%
+ \unexpanded\def\dotaghighlight
+ {\iftrialtypesetting\else
+ \clf_settaghighlight{\highlightparameter\c!style}\attribute\colorattribute\relax
+ \fi}%
\to \everyenableelements
\appendtoks % we can have differently scaled images
- \unexpanded\def\dotagfigure{\taggedctxcommand{settagfigure("\figurefileoriginal","\figurefilepage",\number\dimexpr\figurewidth,\number\dimexpr\figureheight)}}%
+ \unexpanded\def\dotagfigure
+ {\iftrialtypesetting\else
+ \clf_settagfigure
+ {\figurefileoriginal}%
+ {\figurefullname}%
+ {\figurefilepage}%
+ \dimexpr\figurewidth\relax
+ \dimexpr\figureheight\relax
+ \fi}%
\to \everyenableelements
\appendtoks
- \unexpanded\def\dotagcombination{\taggedctxcommand{settagcombination(\combinationparameter\c!nx,\combinationparameter\c!ny)}}%
- %\unexpanded\def\dotagcombination{\taggedctxcommand{settagcombination(\number\horcombination,\number\totcombination)}}%
+ \unexpanded\def\dotagcombination
+ {\iftrialtypesetting\else
+ \clf_settagcombination
+ \numexpr\combinationparameter\c!nx\relax
+ \numexpr\combinationparameter\c!ny\relax
+ \fi}%
\to \everyenableelements
\appendtoks
- \unexpanded\def\dotagsorting{\taggedctxcommand{settagsorting("\currentsorting","\currentsortingtag")}}%
+ \unexpanded\def\dotagsetparcounter
+ {\global\advance\tagparcounter\plusone\attribute\taggedparattribute\tagparcounter}%
+ \unexpanded\def\doresetparcounter
+ {\attribute\taggedparattribute\attributeunsetvalue}%
\to \everyenableelements
\appendtoks
- \unexpanded\def\dotagsetparcounter{\global\advance\tagparcounter\plusone\attribute\taggedparattribute\tagparcounter}%
+ \unexpanded\def\dotagsetitemgroup
+ {\iftrialtypesetting\else
+ \clf_settagitemgroup
+ \ifconditional\c_strc_itemgroups_pack true\else false\fi\space
+ \numexpr\currentitemlevel\relax
+ {\currentitemgroupsymbol}%
+ \fi}%
+ \unexpanded\def\dotagsetitem#1%
+ {\iftrialtypesetting\else
+ \clf_settagitem{#1}%
+ \fi}%
\to \everyenableelements
\appendtoks
- \unexpanded\def\dotagsetitemgroup{\taggedctxcommand{settagitemgroup("\currentitemgroup",\ifconditional\c_strc_itemgroups_pack true\else false\fi,"\currentitemgroupsymbol")}}%
+ \unexpanded\def\dotagsetdescription
+ {\iftrialtypesetting\else
+ \clf_settagdescription{\currentdescription}\currentdescriptionnumberentry\relax
+ \fi}%
\to \everyenableelements
\appendtoks
- \unexpanded\def\dotagsetdescription{\taggedctxcommand{settagdescription("\currentdescription",\currentdescriptionnumberentry)}}%
+ \unexpanded\def\dotagsetnotesymbol
+ {\iftrialtypesetting\else
+ \clf_settagdescriptionsymbol{\currentnote}\currentnotenumber\relax
+ \fi}%
\to \everyenableelements
\appendtoks
- \unexpanded\def\dotagsetnotesymbol{\taggedctxcommand{settagdescriptionsymbol("\currentnote",\currentnotenumber)}}%
+ \unexpanded\def\dotagregisterlocation
+ {\iftrialtypesetting\else
+ \clf_settagregister{\currentregister}\currentregisternumber\relax
+ \fi}%
\to \everyenableelements
\appendtoks
- \unexpanded\def\doverbatimspace{\char32\relax}% will be done permanently
+ \unexpanded\def\dotaglistlocation
+ {\iftrialtypesetting\else
+ \clf_settaglist\currentlistindex\relax
+ \fi}%
+\to \everyenableelements
+
+\appendtoks
+ \let\specialfixedspace \explicitfixedspace
+ \let\specialobeyedspace \explicitobeyedspace
+ \let\specialstretchedspace\explicitstretchedspace
+ \let\specialcontrolspace \explicitcontrolspace
\to \everyenableelements
% The action: \setupbackend[export=yes] % or filename
@@ -137,36 +213,51 @@
\c!author={\directinteractionparameter\c!author},
% \c!firstpage=, % imagename
% \c!lastpage=, % imagename
- \c!hyphen=\v!no]
+ \c!alternative=, % html, div
+ \c!properties=\v!no, % no: ignore, yes: as attribute, otherwise: use as prefix
+ \c!hyphen=\v!no,
+ \c!svgstyle=,
+ \c!cssfile=]
\def\dosynchronizeexport
{\let\currentexport\empty
- \ctxcommand{setupexport{
- align = "\exportparameter\c!align",
- bodyfont = \number\dimexpr\exportparameter\c!bodyfont,
- width = \number\dimexpr\exportparameter\c!width,
- hyphen = "\exportparameter\c!hyphen",
- title = \!!bs\exportparameter\c!title\!!es,
- subtitle = \!!bs\exportparameter\c!subtitle\!!es,
- author = \!!bs\exportparameter\c!author\!!es,
- firstpage = "\exportparameter\c!firstpage",
- lastpage = "\exportparameter\c!lastpage",
- }}}
+ \clf_setupexport
+ align {\exportparameter\c!align}%
+ bodyfont \dimexpr\exportparameter\c!bodyfont\relax
+ width \dimexpr\exportparameter\c!width\relax
+ properties {\exportparameter\c!properties}%
+ hyphen {\exportparameter\c!hyphen}%
+ title {\exportparameter\c!title}%
+ subtitle {\exportparameter\c!subtitle}%
+ author {\exportparameter\c!author}%
+ firstpage {\exportparameter\c!firstpage}%
+ lastpage {\exportparameter\c!lastpage}%
+ svgstyle {\exportparameter\c!svgstyle}%
+ cssfile {\exportparameter\c!cssfile}%
+ \relax}
\appendtoks
\doifsomething{\backendparameter\c!export}\dosynchronizeexport
\to \everystarttext
+% better (before pdf gets closed, so we can embed), but it needs testing:
+
+\appendtoks
+ \clf_finishexport
+\to \everystoptext
+
\appendtoks
\doifsomething{\backendparameter\c!export}\dosynchronizeexport % in case it is done inside \starttext
\to \everysetupdocument
-\appendtoks
- \doifsomething{\backendparameter\c!xhtml}
- {\enabledirectives[backend.export.xhtml=\backendparameter\c!xhtml]}%
- \doifsomething{\backendparameter\c!css}
- {\enabledirectives[backend.export.css={\backendparameter\c!css}]}%
-\to \everysetupbackend
+% \appendtoks
+% \doifsomething{\backendparameter\c!xhtml}
+% {\enabledirectives[backend.export.xhtml=\backendparameter\c!xhtml]}%
+% \doifsomething{\backendparameter\c!css}
+% {\enabledirectives[backend.export.css={\backendparameter\c!css}]}%
+% \doifsomething{\backendparameter\c!alternative}
+% {\enabledirectives[backend.export.alternative={\backendparameter\c!alternative}]}%
+% \to \everysetupbackend
\appendtoks
\doifelsenothing{\backendparameter\c!export}
diff --git a/tex/context/base/back-ini.lua b/tex/context/base/back-ini.lua
index 6f58b3262..e8af4d9d9 100644
--- a/tex/context/base/back-ini.lua
+++ b/tex/context/base/back-ini.lua
@@ -6,6 +6,13 @@ if not modules then modules = { } end modules ['back-ini'] = {
license = "see context related readme files"
}
+-- -- how to create a shortcut:
+--
+-- local function something(...)
+-- something = backends.codeinjections.something
+-- return something(...)
+-- end
+
local next, type = next, type
local format = string.format
@@ -95,3 +102,21 @@ tables.vfspecials = allocate {
startslant = comment,
stopslant = comment,
}
+
+-- we'd better have this return something (defaults)
+
+function codeinjections.getpos () return 0, 0 end
+function codeinjections.gethpos () return 0 end
+function codeinjections.getvpos () return 0 end
+function codeinjections.hasmatrix() return false end
+function codeinjections.getmatrix() return 1, 0, 0, 1, 0, 0 end
+
+-- can best be here
+
+interfaces.implement {
+ name = "setrealspaces",
+ arguments = "string",
+ actions = function(v)
+ nodes.tasks.setaction("shipouts","nodes.handlers.accessibility",v == interfaces.variables.yes)
+ end
+}
diff --git a/tex/context/base/back-ini.mkiv b/tex/context/base/back-ini.mkiv
index fc8759c14..2b0f054ee 100644
--- a/tex/context/base/back-ini.mkiv
+++ b/tex/context/base/back-ini.mkiv
@@ -23,8 +23,9 @@
\unprotect
-\ifdefined\everybackendshipout \else \newtoks\everybackendshipout \fi
-\ifdefined\everylastbackendshipout \else \newtoks\everylastbackendshipout \fi
+\ifdefined\everybackendshipout \else \newtoks\everybackendshipout \fi
+\ifdefined\everylastbackendshipout \else \newtoks\everylastbackendshipout \fi
+\ifdefined\everybackendlastinshipout \else \newtoks\everybackendlastinshipout \fi % e.g. finalize via latelua
%D Right from the start \CONTEXT\ had a backend system based on
%D runtime pluggable code. As most backend issues involved specials
@@ -50,8 +51,6 @@
%D Not everything here makes sense and the content of this file will
%D definitely change.
-\newcount\backendtransformlevel
-
\let \dostartrotation \gobbleoneargument
\let \dostoprotation \donothing
\let \dostartscaling \gobbletwoarguments
@@ -126,9 +125,9 @@
%D From now on, mapfile loading is also a special; we assume the
%D more or less standard dvips syntax.
-\let \doresetmapfilelist \donothing
-\let \doloadmapfile \gobbletwoarguments % + - = | filename
-\let \doloadmapline \gobbletwoarguments % + - = | fileline
+%let \doresetmapfilelist \donothing
+%let \doloadmapfile \gobbletwoarguments % + - = | filename
+%let \doloadmapline \gobbletwoarguments % + - = | fileline
%D \macros
%D {jobsuffix}
@@ -153,4 +152,14 @@
\back_job_set_suffix{pdf} % default
+% \setupbackend[space=yes] % replace spacing in (pdf) file
+
+\appendtoks
+ \clf_setrealspaces{\backendparameter\c!space}%
+\to \everysetupbackend
+
+%D For older styles:
+
+\let\setupoutput\gobbleoneoptional
+
\protect \endinput
diff --git a/tex/context/base/back-pdf.lua b/tex/context/base/back-pdf.lua
index f8a5dab6f..7f0b1acc7 100644
--- a/tex/context/base/back-pdf.lua
+++ b/tex/context/base/back-pdf.lua
@@ -6,8 +6,24 @@ if not modules then modules = { } end modules ['back-pdf'] = {
license = "see context related readme files"
}
+-- we could do \pdfmatrix sx <> sy <> etc
+
+local tonumber = tonumber
+local sind, cosd = math.sind, math.cosd
+local insert, remove = table.insert, table.remove
+
local codeinjections = backends.pdf.codeinjections
+local context = context
+
+local scanners = tokens.scanners
+local scanstring = scanners.string
+local scannumber = scanners.number
+local scaninteger = scanners.integer
+local scankeyword = scanners.keyword
+
+local scanners = interfaces.scanners
+
local outputfilename
function codeinjections.getoutputfilename()
@@ -19,15 +35,11 @@ end
backends.install("pdf")
-local context = context
-
-local sind, cosd = math.sind, math.cosd
-local insert, remove = table.insert, table.remove
+local f_matrix = string.formatters["%F %F %F %F"] -- 0.8 is default
-local f_matrix = string.formatters["%0.8f %0.8f %0.8f %0.8f"]
-
-function commands.pdfrotation(a)
+scanners.pdfrotation = function() -- a
-- todo: check for 1 and 0 and flush sparse
+ local a = scannumber()
local s, c = sind(a), cosd(a)
context(f_matrix(c,s,-s,c))
end
@@ -38,27 +50,36 @@ end
--
-- we could also do the save restore wrapping here + colorhack
+local pdfsave = nodes.pool.pdfsave
+local pdfrestore = nodes.pool.pdfrestore
local pdfsetmatrix = nodes.pool.pdfsetmatrix
-local stack = { }
-local function popmatrix()
- local top = remove(stack)
- if top then
- context(pdfsetmatrix(unpack(top)))
- end
-end
+local stack = { }
+local restore = true -- false
-function commands.pdfstartrotation(a)
+scanners.pdfstartrotation = function() -- a
+ local a = scannumber()
if a == 0 then
insert(stack,false)
else
local s, c = sind(a), cosd(a)
+ context(pdfsave())
context(pdfsetmatrix(c,s,-s,c))
- insert(stack,{ c, -s, s, c })
+ insert(stack,restore and { c, -s, s, c } or true)
end
end
-function commands.pdfstartscaling(sx,sy)
+scanners.pdfstartscaling = function() -- sx sy
+ local sx, sy = 0, 0
+ while true do
+ if scankeyword("sx") then
+ sx = scannumber()
+ elseif scankeyword("sy") then
+ sy = scannumber()
+ else
+ break
+ end
+ end
if sx == 1 and sy == 1 then
insert(stack,false)
else
@@ -68,25 +89,62 @@ function commands.pdfstartscaling(sx,sy)
if sy == 0 then
sy = 0.0001
end
+ context(pdfsave())
context(pdfsetmatrix(sx,0,0,sy))
- insert(stack,{ 1/sx, 0, 0, 1/sy })
+ insert(stack,restore and { 1/sx, 0, 0, 1/sy } or true)
end
end
-function commands.pdfstartmirroring()
- context(pdfsetmatrix(-1,0,0,1))
-end
-
-function commands.pdfstartmatrix(sx,rx,ry,sy) -- tx, ty
+scanners.pdfstartmatrix = function() -- sx rx ry sy -- tx, ty
+ local sx, rx, ry, sy = 0, 0, 0, 0
+ while true do
+ if scankeyword("sx") then
+ sx = scannumber()
+ elseif scankeyword("sy") then
+ sy = scannumber()
+ elseif scankeyword("rx") then
+ rx = scannumber()
+ elseif scankeyword("ry") then
+ ry = scannumber()
+ else
+ break
+ end
+ end
if sx == 1 and rx == 0 and ry == 0 and sy == 1 then
insert(stack,false)
else
+ context(pdfsave())
context(pdfsetmatrix(sx,rx,ry,sy))
- insert(stack,{ -sx, -rx, -ry, -sy })
+ insert(stack,store and { -sx, -rx, -ry, -sy } or true)
+ end
+end
+
+local function pdfstopsomething()
+ local top = remove(stack)
+ if top == false then
+ -- not wrapped
+ elseif top == true then
+ context(pdfrestore())
+ elseif top then
+ context(pdfsetmatrix(unpack(top)))
+ context(pdfrestore())
+ else
+ -- nesting error
end
end
-commands.pdfstoprotation = popmatrix
-commands.pdfstopscaling = popmatrix
-commands.pdfstopmirroring = commands.pdfstartmirroring
-commands.pdfstopmatrix = popmatrix
+scanners.pdfstoprotation = pdfstopsomething
+scanners.pdfstopscaling = pdfstopsomething
+scanners.pdfstopmatrix = pdfstopsomething
+
+scanners.pdfstartmirroring = function()
+ context(pdfsetmatrix(-1,0,0,1))
+end
+
+scanners.pdfstopmirroring = scanners.pdfstartmirroring
+
+scanners.registerbackendsymbol = function()
+ backends.codeinjections.registersymbol(scanstring(),scaninteger())
+end
+
+-- todo : clipping
diff --git a/tex/context/base/back-pdf.mkiv b/tex/context/base/back-pdf.mkiv
index 948a14138..38de4bbfd 100644
--- a/tex/context/base/back-pdf.mkiv
+++ b/tex/context/base/back-pdf.mkiv
@@ -18,8 +18,8 @@
\registerctxluafile{lpdf-nod}{1.001}
\registerctxluafile{lpdf-col}{1.000}
\registerctxluafile{lpdf-xmp}{1.001}
-\registerctxluafile{lpdf-mis}{1.001}
\registerctxluafile{lpdf-ano}{1.001}
+\registerctxluafile{lpdf-mis}{1.001}
\registerctxluafile{lpdf-ren}{1.001}
\registerctxluafile{lpdf-grp}{1.001}
\registerctxluafile{lpdf-wid}{1.001}
@@ -41,17 +41,31 @@
%D
%D Here we initialize some internal quantities. We also protect them.
-\pdfoutput 1 \let\pdfoutput \undefined \newcount\pdfoutput \pdfoutput 1
-\pdfhorigin 1 true in \let\pdfhorigin\undefined \newdimen\pdfhorigin \pdfhorigin 1 true in
-\pdfvorigin 1 true in \let\pdfvorigin\undefined \newdimen\pdfvorigin \pdfvorigin 1 true in
+\pdfoutput \plusone \let\pdfoutput \undefined \newcount\pdfoutput \pdfoutput \plusone
+
+%D These are already set:
+
+\pdfhorigin 1 true in \let\pdfhorigin\undefined \newdimen\pdfhorigin \pdfhorigin 1 true in
+\pdfvorigin \pdfhorigin \let\pdfvorigin\undefined \newdimen\pdfvorigin \pdfvorigin \pdfhorigin
-% most of these will be protected as well
+%D These too and most of them will be protected as well:
+\pdfminorversion \plusseven
+\pdfgentounicode \plusone \let\pdfgentounicode \undefined \newcount\pdfgentounicode
+\pdfinclusioncopyfonts \plusone \let\pdfinclusioncopyfonts \undefined \newcount\pdfinclusioncopyfonts
+\pdfinclusionerrorlevel \zerocount \let\pdfinclusionerrorlevel\undefined \newcount\pdfinclusionerrorlevel
+\pdfdecimaldigits \plusten \let\pdfdecimaldigits \undefined \newcount\pdfdecimaldigits
\pdfimageresolution 300
\pdfpkresolution 600
-\pdfdecimaldigits 10
-\pdfinclusionerrorlevel 0
-\pdfminorversion 6 % use setupbackend for changes
+
+%D Let's block these (we could share a dummy:
+
+\let\pdfcatalog \relax \newtoks\pdfcatalog
+\let\pdfinfo \relax \newtoks\pdfinfo
+\let\pdfnames \relax \newtoks\pdfnames
+\let\pdfpageresources\relax \newtoks\pdfpageresources
+\let\pdfpageattr \relax \newtoks\pdfpageattr
+\let\pdfpagesattr \relax \newtoks\pdfpagesattr
%D This one can be consulted by users although the suffix is also
%D a system mode.
@@ -65,20 +79,20 @@
\appendtoks
\doifsomething{\backendparameter{xmpfile}}
- {\ctxcommand{setxmpfile("\backendparameter{xmpfile}")}}%
+ {\clf_setxmpfile{\backendparameter{xmpfile}}}%
\to \everysetupbackend
% \doifsomething{\backendparameter\c!format} .. at the lua end
\appendtoks
- \ctxcommand{setformat {
- format = "\backendparameter\c!format",
- level = "\backendparameter\c!level",
- option = "\backendparameter\c!option",
- profile = "\backendparameter\c!profile",
- intent = "\backendparameter\c!intent",
- file = "\backendparameter\c!file",
- }}%
+ \clf_setformat
+ format {\backendparameter\c!format}%
+ level {\backendparameter\c!level}%
+ option {\backendparameter\c!option}%
+ profile {\backendparameter\c!profile}%
+ intent {\backendparameter\c!intent}%
+ file {\backendparameter\c!file}%
+ \relax
\to \everysetupbackend
%D For the moment we keep these.
@@ -88,30 +102,22 @@
%D These are the only official methods to add stuff to the resources.
-\unexpanded\def\pdfbackendsetcatalog #1#2{\ctxlua{lpdf.addtocatalog ("#1",\!!bs#2\!!es)}}
-\unexpanded\def\pdfbackendsetinfo #1#2{\ctxlua{lpdf.addtoinfo ("#1",\!!bs#2\!!es)}}
-\unexpanded\def\pdfbackendsetname #1#2{\ctxlua{lpdf.addtonames ("#1",\!!bs#2\!!es)}}
-
-\unexpanded\def\pdfbackendsetpageattribute #1#2{\ctxlua{lpdf.addtopageattributes ("#1",\!!bs#2\!!es)}}
-\unexpanded\def\pdfbackendsetpagesattribute#1#2{\ctxlua{lpdf.addtopagesattributes("#1",\!!bs#2\!!es)}}
-\unexpanded\def\pdfbackendsetpageresource #1#2{\ctxlua{lpdf.addtopageresources ("#1",\!!bs#2\!!es)}}
+\unexpanded\def\pdfbackendsetcatalog #1#2{\clf_lpdf_addtocatalog {#1}{#2}}
+\unexpanded\def\pdfbackendsetinfo #1#2{\clf_lpdf_addtoinfo {#1}{#2}}
+\unexpanded\def\pdfbackendsetname #1#2{\clf_lpdf_addtonames {#1}{#2}}
-\unexpanded\def\pdfbackendsetextgstate #1#2{\ctxlua{lpdf.adddocumentextgstate ("#1",lpdf.verbose(\!!bs#2\!!es))}}
-\unexpanded\def\pdfbackendsetcolorspace #1#2{\ctxlua{lpdf.adddocumentcolorspace("#1",lpdf.verbose(\!!bs#2\!!es))}}
-\unexpanded\def\pdfbackendsetpattern #1#2{\ctxlua{lpdf.adddocumentpattern ("#1",lpdf.verbose(\!!bs#2\!!es))}}
-\unexpanded\def\pdfbackendsetshade #1#2{\ctxlua{lpdf.adddocumentshade ("#1",lpdf.verbose(\!!bs#2\!!es))}}
+\unexpanded\def\pdfbackendsetpageattribute #1#2{\clf_lpdf_addtopageattributes {#1}{#2}}
+\unexpanded\def\pdfbackendsetpagesattribute#1#2{\clf_lpdf_addtopagesattributes {#1}{#2}}
+\unexpanded\def\pdfbackendsetpageresource #1#2{\clf_lpdf_addtopageresources {#1}{#2}}
- \def\pdfbackendcurrentresources {\cldcontext{lpdf.collectedresources()}}
- \def\pdfcolor #1{\ctxlua{lpdf.pdfcolor(\thecolorattribute{#1})}} \let\PDFcolor\pdfcolor
+\unexpanded\def\pdfbackendsetextgstate #1#2{\clf_lpdf_adddocumentextgstate {#1}{#2}}
+\unexpanded\def\pdfbackendsetcolorspace #1#2{\clf_lpdf_adddocumentcolorspace{#1}{#2}}
+\unexpanded\def\pdfbackendsetpattern #1#2{\clf_lpdf_adddocumentpattern {#1}{#2}}
+\unexpanded\def\pdfbackendsetshade #1#2{\clf_lpdf_adddocumentshade {#1}{#2}}
-%D Let's block these:
-
-\let\pdfcatalog \relax \newtoks\pdfcatalog
-\let\pdfinfo \relax \newtoks\pdfinfo
-\let\pdfnames \relax \newtoks\pdfnames
-\let\pdfpageresources\relax \newtoks\pdfpageresources
-\let\pdfpageattr \relax \newtoks\pdfpageattr
-\let\pdfpagesattr \relax \newtoks\pdfpagesattr
+ \def\pdfbackendcurrentresources {\clf_lpdf_collectedresources}
+ \def\pdfcolor #1{\clf_lpdf_color\numexpr\thecolorattribute{#1}\relax}
+ \let\PDFcolor\pdfcolor
%D An example of usage is:
@@ -120,99 +126,85 @@
\pdfbackendsetinfo{ConTeXt.Time} {\number\normalyear.\twodigits\normalmonth.\twodigits\normalday\space \twodigits\currenthour:\twodigits\currentminute}%
\pdfbackendsetinfo{ConTeXt.Jobname}{\jobname}%
\pdfbackendsetinfo{ConTeXt.Url} {www.pragma-ade.com}%
+ \pdfbackendsetinfo{ConTeXt.Support}{contextgarden.net}%
\to \everylastbackendshipout
%D Transformations. Some day we will use primitives (once they're fixed).
+
% todo: inject at the lua end cq. deal with #5 and #6 too
% % % rotation % % %
-\unexpanded\def\dostartrotation#1% grouped
- {\advance\backendtransformlevel\plusone
- \forcecolorhack
- \pdfsave
- \pdfsetmatrix{\ctxcommand{pdfrotation(#1)}}}
-
-\unexpanded\def\dostoprotation
- {\pdfrestore
- \forcecolorhack
- \advance\backendtransformlevel\minusone}
-
% \unexpanded\def\dostartrotation#1% grouped
% {\forcecolorhack
-% \advance\backendtransformlevel\plusone
-% \ctxcommand{pdfstartrotation(#1)}}
+% \pdfsave
+% \pdfsetmatrix{\clf_pdfrotation#1}}
% \unexpanded\def\dostoprotation
-% {\ctxcommand{pdfstoprotation()}%
-% \advance\backendtransformlevel\minusone}
+% {\pdfrestore
+% \forcecolorhack}
-% % % scaling % % %
+\unexpanded\def\dostartrotation#1%
+ {\forcecolorhack
+ \clf_pdfstartrotation#1\relax}
-\unexpanded\def\dostartscaling#1#2% the test is needed because acrobat is bugged!
- {\advance\backendtransformlevel\plusone
- \forcecolorhack % maybe use signal instead
- \pdfsave
- \pdfsetmatrix
- {\ifdim#1\points=\zeropoint.0001\else#1\fi\space 0 0
- \ifdim#2\points=\zeropoint.0001\else#2\fi\space}}% 0 0
+\unexpanded\def\dostoprotation
+ {\clf_pdfstoprotation}
-\unexpanded\def\dostopscaling
- {\pdfrestore
- \forcecolorhack
- \advance\backendtransformlevel\minusone}
+% % % scaling % % %
% \unexpanded\def\dostartscaling#1#2% the test is needed because acrobat is bugged!
-% {\forcecolorhack
-% \advance\backendtransformlevel\plusone
-% \ctxcommand{pdfstartscaling(#1,#2)}}
+% {\forcecolorhack % maybe use signal instead
+% \pdfsave
+% \pdfsetmatrix
+% {\ifdim#1\points=\zeropoint.0001\else#1\fi\space 0 0
+% \ifdim#2\points=\zeropoint.0001\else#2\fi\space}}% 0 0
% \unexpanded\def\dostopscaling
-% {\ctxcommand{pdfstopscaling()}%
-% \advance\backendtransformlevel\minusone}
+% {\pdfrestore
+% \forcecolorhack}
-% % % mirroring % % %
+\unexpanded\def\dostartscaling#1#2%
+ {\forcecolorhack
+ \clf_pdfstartscaling sx #1 sy #2\relax}
-\unexpanded\def\dostartmirroring
- {\advance\backendtransformlevel\plusone
- \forcecolorhack
- \pdfsave
- \pdfsetmatrix{-1 0 0 1}} % 0 0
+\unexpanded\def\dostopscaling
+ {\clf_pdfstopscaling}
-\unexpanded\def\dostopmirroring
- {\pdfrestore
- \forcecolorhack
- \advance\backendtransformlevel\minusone}
+% % % mirroring % % %
% \unexpanded\def\dostartmirroring
-% {\advance\backendtransformlevel\plusone
-% \ctxcommand{pdfstartmirroring()}}
+% {\forcecolorhack
+% \pdfsave
+% \pdfsetmatrix{-1 0 0 1}} % 0 0
% \unexpanded\def\dostopmirroring
-% {\ctxcommand{pdfstopmirroring()}%
-% \advance\backendtransformlevel\minusone}
+% {\pdfrestore
+% \forcecolorhack}
+
+\unexpanded\def\dostartmirroring
+ {\clf_pdfstartmirroring}
+
+\unexpanded\def\dostopmirroring
+ {\clf_pdfstopmirroring}
% % % transform % % %
-\unexpanded\def\dotransformnextbox#1#2#3#4#5#6% sx rx ry sy tx ty (will change) / basepoints !
- {\advance\backendtransformlevel\plusone
- % fixing ht/dp/wd should happen elsewhere
- \dowithnextbox{\dodotransformnextbox{#5}{#6}{#1 #2 #3 #4}}}
-
-\unexpanded\def\dodotransformnextbox#1#2#3%
- {%\forcecolorhack
- \hbox
- {\kern#1\onebasepoint
- \raise#2\onebasepoint\hbox
- {\pdfsave
- \pdfsetmatrix{#3}% 0 0 (no #5 #6 yet)
- \box\nextbox
- \pdfrestore
- \advance\backendtransformlevel\minusone}}}
+% \unexpanded\def\dotransformnextbox#1#2#3#4#5#6% sx rx ry sy tx ty (will change) / basepoints !
+% {\dowithnextbox{\dodotransformnextbox{#5}{#6}{#1 #2 #3 #4}}}
+
+% \unexpanded\def\dodotransformnextbox#1#2#3%
+% {\hbox
+% {\kern#1\onebasepoint
+% \raise#2\onebasepoint\hbox
+% {\pdfsave
+% \pdfsetmatrix{#3}% 0 0 (no #5 #6 yet)
+% \box\nextbox
+% \pdfrestore
+% }}}
% \unexpanded\def\dotransformnextbox#1#2#3#4#5#6% sx rx ry sy tx ty (will change) / basepoints !
-% {\advance\backendtransformlevel\plusone
-% % fixing ht/dp/wd should happen elsewhere
+% {% fixing ht/dp/wd should happen elsewhere
% \dowithnextbox{\dodotransformnextbox{#1}{#2}{#3}{#4}{#5}{#6}}}
% \unexpanded\def\dodotransformnextbox#1#2#3#4#5#6%
@@ -220,17 +212,67 @@
% {\kern #5\onebasepoint
% \raise#6\onebasepoint
% \hbox
-% {\ctxcommand{pdfstartmatrix(#1,#2,#3,#4)}%
+% {\clf_pdfstartmatrix sx #1 rx #2 ry #3 sy #4\relax
% \box\nextbox
-% \ctxcommand{pdfstopmatrix()}%
-% \advance\backendtransformlevel\minusone}}}
+% \clf_pdfstopmatrix}}}
+
+\unexpanded\def\dotransformnextbox#1#2#3#4#5#6% sx rx ry sy tx ty (will change) / basepoints !
+ {\dowithnextbox{\dodotransformnextbox{#1}{#2}{#3}{#4}{#5}{#6}}}
+
+\unexpanded\def\dodotransformnextbox#1#2#3#4#5#6%
+ {\hbox
+ {\kern #5\onebasepoint
+ \raise#6\onebasepoint
+ \hbox
+ {\clf_pdfstartmatrix sx #1 rx #2 ry #3 sy #4\relax
+ \box\nextbox
+ \clf_pdfstopmatrix}}}
+
+% somehow the shift is not happening .. bug in luatex?
+%
+% \unexpanded\def\dodotransformnextbox#1#2#3#4#5#6%
+% {\ctxcommand{pdftransformbox(\number\nextbox,#1,#2,#3,#4,\number\dimexpr#5\onebasepoint,\number\dimexpr#6\onebasepoint)}%
+% \box\nextbox}
+%
+% \startluacode
+% function commands.pdftransformbox(box,sx,rx,ry,sy,tx,ty)
+% if sx == 1 and rx == 0 and ry == 0 and sy == 1 then
+% if tx == 0 and ty == 0 then
+% local b = nodes.hpack(nodes.concat {
+% nodes.pool.kern(tx),
+% nodes.takebox(box),
+% })
+% b.shift = -ty
+% tex.setbox(box,b)
+% else
+% -- no need to transform
+% end
+% else
+% local b = nodes.hpack(nodes.concat {
+% nodes.pool.kern(tx),
+% nodes.pool.pdfsave(),
+% nodes.pool.pdfsetmatrix(sx,rx,ry,sy),
+% nodes.takebox(box),
+% nodes.pool.pdfsetmatrix(-sx,-rx,-ry,-sy),
+% nodes.pool.pdfrestore(),
+% })
+% b.shift = -ty
+% tex.setbox(box,b)
+% end
+% end
+% \stopluacode
% % % clipping % % %
\unexpanded\def\dostartclipping#1#2#3% we can move this to lua and only set a box here
{\PointsToBigPoints{#2}\width
\PointsToBigPoints{#3}\height
- \meta_grab_clip_path{#1}\width\height{0 0 m \width\space 0 l \width \height l 0 \height l}%
+ \meta_grab_clip_path{#1}\width\height{%
+ 0 0 m %
+ \width\space 0 l %
+ \width\space \height\space l %
+ 0 \height\space l%
+ }%
\pdfliteral{q 0 w \MPclippath\space W n}}
\unexpanded\def\dostopclipping
@@ -238,7 +280,7 @@
%D The following will move to the backend \LUA\ code:
-\appendtoks \ctxlua{backends.codeinjections.finalizepage ()}\to \everybackendshipout % is immediate
+%appendtoks \ctxlua{backends.codeinjections.finalizepage ()}\to \everybackendshipout % is immediate
%appendtoks \ctxlua{backends.codeinjections.finalizedocument()}\to \everylastbackendshipout % is immediate
%D Temporary hack, will be removed or improved or default.
@@ -270,13 +312,13 @@
{\the\pdfbackendeveryxform
\finalizeobjectbox\objectbox
\immediate\pdfxform resources {\pdfbackendcurrentresources}\objectbox
- \dosetobjectreference{#1}{#2}{\the\pdflastxform}}
+ \dosetobjectreference{#1}{#2}\pdflastxform}
\let\m_back_object_reference\empty
-\def\doinsertobject#1#2%
+\unexpanded\def\doinsertobject#1#2%
{\begingroup
- \doifobjectreferencefoundelse{#1}{#2}
+ \doifelseobjectreferencefound{#1}{#2}
{\dogetobjectreference{#1}{#2}\m_back_object_reference
\pdfrefxform\m_back_object_reference}%
\donothing
@@ -284,12 +326,12 @@
\let\lastpredefinedsymbol\empty % some day we can do more at the lua end
-\def\predefinesymbol[#1]%
+\unexpanded\def\predefinesymbol[#1]%
{\begingroup
\xdef\lastpredefinedsymbol{#1}%
\settightobject{SYM}{#1}\hbox{\symbol[#1]}% to be checked ... maybe only fitting
\dogetobjectreference{SYM}{#1}\m_back_object_reference
- \ctxlua{backends.codeinjections.registersymbol("#1",\m_back_object_reference)}%
+ \clf_registerbackendsymbol{#1}\m_back_object_reference\relax
\endgroup}
% for the moment here
@@ -527,7 +569,7 @@
\xmin \space \ymin \space \xmin \space \yymin\space y
\or % 28
\fi
- \ifnum\mode>8\space
+ \ifnum\mode>\pluseight\space
S
\else
\ifnum\dostroke=\plusone S \fi
@@ -541,13 +583,15 @@
\box\scratchbox
\egroup}
-\unexpanded\def\pdfactualtext#1#2% not interfaced
- {\pdfliteral direct{/Span <</ActualText \ctxlua{tex.write(lpdf.tosixteen("#2"))} >> BDC}%
+\unexpanded\def\pdfbackendactualtext#1#2% not interfaced
+ {\clf_startactualtext{#2}%
#1%
- \pdfliteral direct{EMC}}
+ \clf_stopactualtext}
+
+\let\pdfactualtext\pdfbackendactualtext
% \starttext
-% text \pdfactualtext{Meier}{Müller} text
+% text \pdfbackendactualtext{Meier}{Müller} text
% \stoptext
\protect \endinput
diff --git a/tex/context/base/back-u3d.mkiv b/tex/context/base/back-u3d.mkiv
index 89d26ee41..297dd5545 100644
--- a/tex/context/base/back-u3d.mkiv
+++ b/tex/context/base/back-u3d.mkiv
@@ -127,7 +127,7 @@
\startluaparameterset[u3d:myset:display:3]
toolbar = true,
tree = false,
- preview = 'area.png'
+ preview = 'axes.png'
\stopluaparameterset
\startluaparameterset[u3d:myset:display:4]
diff --git a/tex/context/base/bibl-bib.lua b/tex/context/base/bibl-bib.lua
index 65ca1f9e1..baeb3d2f9 100644
--- a/tex/context/base/bibl-bib.lua
+++ b/tex/context/base/bibl-bib.lua
@@ -105,7 +105,7 @@ local spacing = space^0
local equal = P("=")
local collapsed = (space^1)/ " "
-local function add(a,b) if b then return a..b else return a end end
+----- function add(a,b) if b then return a..b else return a end end
local keyword = C((R("az","AZ","09") + S("@_:-"))^1) -- C((1-space)^1)
local s_quoted = ((escape*single) + collapsed + (1-single))^0
diff --git a/tex/context/base/bibl-bib.mkiv b/tex/context/base/bibl-bib.mkiv
index d9010294d..80d04099b 100644
--- a/tex/context/base/bibl-bib.mkiv
+++ b/tex/context/base/bibl-bib.mkiv
@@ -320,7 +320,7 @@
% [\c!sorttype=\v!cite,
% \c!sort=no]
-% \long\unexpanded\def\startpublication#1\stoppublication
+% \unexpanded\def\startpublication#1\stoppublication
% {\blank
% todo
% \blank}
@@ -628,7 +628,7 @@
\strc_lists_place_current
{\currentbibtexsession}
{\currentbibtexcriterium}
- {\namedlistparameter\currentbibtexsession\c!number}%
+ {}%
{\namedlistparameter\currentbibtexsession\c!extras}%
{\namedlistparameter\currentbibtexsession\c!order}%
\ctxlua{bibtex.hacks.flush("\bibtexpublicationsparameter\c!sorttype")}%
@@ -648,7 +648,7 @@
\fi\fi}
\def\dotypesetbibtexpublication
- {\doifbibreferencefoundelse\currentbibtexsessiontag
+ {\doifelsebibreferencefound\currentbibtexsessiontag
{\global\advance\bibtexcounter\plusone
\ctxlua{bibtex.hacks.registerplaced("\currentbibtexsessiontag")}%
\let\currentlist\currentbibtexsession
@@ -680,13 +680,13 @@
\unexpanded\def\bibtexcitation[#1]%
{\edef\currentbibtexsession{#1}%
- \strictdoifnextoptionalelse\dobibtexcitation\dobibtexref}
+ \strictdoifelsenextoptional\dobibtexcitation\dobibtexref}
\def\dobibtexref#1%
{\dodobibtexcitation[#1][]}
\def\dobibtexcitation[#1]%
- {\strictdoifnextoptionalelse{\dodobibtexcitation[#1]}{\dodobibtexcitation[#1][]}}
+ {\strictdoifelsenextoptional{\dodobibtexcitation[#1]}{\dodobibtexcitation[#1][]}}
\def\dodobibtexcitation[#1][#2]%
{\dontleavehmode
@@ -702,7 +702,7 @@
\def\dowhatevercitation#1#2%
{\processcommalist[#2]\dobibtexcitationindeed
\setupinteraction[\c!style=]% use flag instead
- \doifassignmentelse{#1}
+ \doifelseassignment{#1}
{\getparameters[\??pb\??pb][\c!alternative=,\c!extras=,#1]%
\edef\currentbibtexvariant{\@@pb@@pbalternative}%
\ifx\currentbibtexvariant\empty
@@ -808,7 +808,7 @@
% helpers
-\def\doifbibtexinteractionelse
+\unexpanded\def\doifelsebibtexinteraction
{\iflocation
\edef\temp{\bibtexcitationparameter\c!interaction}%
\ifx\temp\v!stop
@@ -820,6 +820,8 @@
\@EA\secondoftwoarguments
\fi}
+\let\doifbibtexinteractionelse\doifelsebibtexinteraction
+
% variants
% todo: lastsep here
@@ -837,23 +839,23 @@
\fi}
\def\inbibtexlink#1#2%
- {\doifreferencefoundelse{\bibtexrefprefix#1}
+ {\doifelsereferencefound{\bibtexrefprefix#1}
{\goto{#2}[\bibtexrefprefix#1]}
{!#1!}}
\def\dobibtexgotolink#1#2%
- {\doifreferencefoundelse{\bibtexrefprefix#1}
+ {\doifelsereferencefound{\bibtexrefprefix#1}
{\goto{#2}[\bibtexrefprefix#1]}
{!#1!}}
\def\dobibattexlink#1#2%
- {\doifreferencefoundelse{\bibtexrefprefix#1}
+ {\doifelsereferencefound{\bibtexrefprefix#1}
{\at{#2}[\bibtexrefprefix#1]}
{!#1!}}
\def\dobibtexurllink#1#2%
{\expanded{\useURL[bibtex:url:#1][#2]}%
- \doifbibtexinteractionelse
+ \doifelsebibtexinteraction
{\goto{\url[bibtex:url:#1]}[url(bibtex:url:#1)]}
{\url[bibtex:url:#1]}}
diff --git a/tex/context/base/bibl-tra.lua b/tex/context/base/bibl-tra.lua
index 82f8dc2aa..223554b4d 100644
--- a/tex/context/base/bibl-tra.lua
+++ b/tex/context/base/bibl-tra.lua
@@ -10,7 +10,7 @@ if not modules then modules = { } end modules ['bibl-tra'] = {
-- temporary hack, needed for transition
-if not punlications then
+if not publications then
local hacks = utilities.storage.allocate()
@@ -55,11 +55,11 @@ local ordered = { }
local shorts = { }
local mode = 0
-local template = utilities.strings.striplong([[
- \citation{*}
- \bibstyle{cont-%s}
- \bibdata{%s}
-]])
+local template = [[
+\citation{*}
+\bibstyle{cont-%s}
+\bibdata{%s}
+]]
local bibtexbin = environment.arguments.mlbibtex and "mlbibcontext" or "bibtex"
diff --git a/tex/context/base/bibl-tra.mkii b/tex/context/base/bibl-tra.mkii
index a9ce392a2..868ca0fc6 100644
--- a/tex/context/base/bibl-tra.mkii
+++ b/tex/context/base/bibl-tra.mkii
@@ -642,8 +642,7 @@
%D If you want to write an extension to the styles, you might
%D as well define some of these commands yourself.
%D
-%D The argument liust has been reordered here, and the meanings
-%D are:
+%D The argument list has been reordered here, and the meanings are:
%D
%D {\obeylines\parskip0pt
%D \type{#1} firstnames
diff --git a/tex/context/base/bibl-tra.mkiv b/tex/context/base/bibl-tra.mkiv
index 06af7e11d..4c9e83fdf 100644
--- a/tex/context/base/bibl-tra.mkiv
+++ b/tex/context/base/bibl-tra.mkiv
@@ -450,8 +450,8 @@
\unexpanded\def\bibinsertmonth#1#2#3%
{\bibdoifelse\@@pb@month
- {#1\doifnumberelse\@@pb@month
- {\doifconversiondefinedelse\@@pbmonthconversion
+ {#1\doifelsenumber\@@pb@month
+ {\doifelseconversiondefined\@@pbmonthconversion
{\convertnumber\@@pbmonthconversion\@@pb@month}{\@@pb@month}}%
{\@@pb@month}#2}%
{#3}}
@@ -686,10 +686,10 @@
\unexpanded\def\dostartpublication[#1]%
{\begingroup
- \doifassignmentelse{#1}%
+ \doifelseassignment{#1}%
{\getparameters[\??pb][k=\s!unknown,t=article,n=,s=,a=,y=,o=,u=,#1]}%
{\getparameters[\??pb][k=#1,t=article,n=,s=,a=,y=,o=,u=]}%
- \ctxlua{bibtex.hacks.register("\@@pbk","\@@pbs")}%
+ \ctxlua{bibtex.hacks.register(\!!bs\@@pbk\!!es,\!!bs\@@pbs\!!es)}%
\catcode\commentasciicode\othercatcode
\dodostartpublication}
@@ -717,12 +717,14 @@
\def\bibgetvaru#1{\csname pbd:#1\endcsname\pluseight}
\def\bibgetvard#1{\csname pbd:#1\endcsname\plusnine }
-\unexpanded\def\doifbibreferencefoundelse#1%
+\unexpanded\def\doifelsebibreferencefound#1%
{\preloadbiblist
- \doifdefinedelse{pbd:#1}
+ \doifelsedefined{pbd:#1}
\firstoftwoarguments
{\showmessage\m!publications{5}{#1,\the\inputlineno}\secondoftwoarguments}}
+\let\doifbibreferencefoundelse\doifelsebibreferencefound
+
%D \macros{bib@crossref}
%D
%D \type{\crossref} is used in database files to point to another
@@ -765,7 +767,7 @@
\strc_lists_place_current
{pubs}%
{\listparameter\c!criterium}%
- {\listparameter\c!number}%
+ {}%
{\listparameter\c!extras}%
{\listparameter\c!order}}%
\ctxlua{bibtex.hacks.flush("\@@pbsorttype")}%
@@ -878,7 +880,7 @@
% we'll define proper handlers later
\unexpanded\def\doplacepublicationindeed#1%
- {\doifbibreferencefoundelse{#1}
+ {\doifelsebibreferencefound{#1}
{\global\advance\bibtexcounter\plusone
\def\currentpublicationkey{#1}%
\ctxlua{bibtex.hacks.registerplaced("#1")}%
@@ -932,13 +934,13 @@
%D in included data from the \type{.bib} file).
\unexpanded\def\cite
- {\strictdoifnextoptionalelse\dodocite\dobibref}
+ {\strictdoifelsenextoptional\dodocite\dobibref}
\unexpanded\def\dobibref#1%
{\docite[#1][]}
\unexpanded\def\dodocite[#1]%
- {\strictdoifnextoptionalelse{\docite[#1]}{\docite[#1][]}}
+ {\strictdoifelsenextoptional{\docite[#1]}{\docite[#1][]}}
\unexpanded\def\docite[#1][#2]%
{\begingroup
@@ -953,7 +955,7 @@
\unexpanded\def\dowhatevercite#1#2%
{\processcommalist[#2]\docitation
\setupinteraction[\c!style=]%
- \doifassignmentelse
+ \doifelseassignment
{#1}%
{\getparameters[LO][\c!alternative=,\c!extras=,#1]%
\edef\@@currentalternative{\LOalternative}%
@@ -1007,7 +1009,7 @@
\unexpanded\def\getcitedata#1[#2]#3[#4]#5to#6%
{\bgroup
\dofetchapublication{#4}%
- \doifdefinedelse{\??pb @bib#2}%
+ \doifelsedefined{\??pb @bib#2}%
{\xdef#6{\getvalue{\??pb @bib#2}}}%
{\xdef#6{\getvalue{\??pb @#2}}}%
\egroup}
@@ -1139,7 +1141,7 @@
%D but it will do for now.
\unexpanded\def\docurrentbibauthor#1,#2%
- {\doifemptyelse{#2}
+ {\doifelseempty{#2}
{\def\currentbibauthor{#1\bibalternative{otherstext}}}
{\expandafter\ifx\csname\??pv\@@currentalternative authoretallimit\endcsname\relax
\edef\currentbibauthor{#1\bibalternative{andtext}#2}%
@@ -1154,7 +1156,7 @@
%D \type{\doglobal }
\unexpanded\def\robustaddtocommalist#1#2% {item} \cs
- {\robustdoifinsetelse{#1}#2\resetglobal
+ {\robustdoifelseinset{#1}#2\resetglobal
{\dodoglobal\xdef#2{\ifx#2\empty\else#2,\fi#1}}}
%D \macros{donormalbibauthoryear}
@@ -1165,7 +1167,7 @@
\unexpanded\def\donormalbibauthoryear#1%
{\def\myauthor{Xxxxxxxxxx}%
\def\myyear{0000}%
- \doifbibreferencefoundelse{#1}
+ \doifelsebibreferencefound{#1}
{\def\myauthor{{\bibgetvara{#1}}}%
\def\myyear {\bibgetvary{#1}}}%
{}%
@@ -1181,7 +1183,7 @@
\unexpanded\def\docompressbibauthoryear#1%
{\def\myauthor{Xxxxxxxxxx}%
\def\myyear {0000}%
- \doifbibreferencefoundelse{#1}
+ \doifelsebibreferencefound{#1}
{\xdef\myauthor{\bibgetvara{#1}}%
\xdef\myyear {\bibgetvary{#1}}}
{}%
@@ -1202,7 +1204,7 @@
%D information in the correct item of \type{\thebibyears}
\unexpanded\def\checkifmyauthoralreadyexists
- {\doifemptyelsevalue{thebibauthors}
+ {\doifelseemptyvalue{thebibauthors}
{\global\bibitemwanted \plusone
\global\bibitemcounter\plusone
\xdef\thebibauthors{{\myauthor}}}
@@ -1250,9 +1252,9 @@
%D Beware, we can have cites without reference match.
-\unexpanded\def\gotobiblink#1[#2]{\doifreferencefoundelse{\bibrefprefix#2}{\goto{#1}[\bibrefprefix#2]}{#1}}
-\unexpanded\def\atbiblink [#1]{\doifreferencefoundelse{\bibrefprefix#1}{\at [\bibrefprefix#1]}{#1}}
-\unexpanded\def\inbiblink [#1]{\doifreferencefoundelse{\bibrefprefix#1}{\expanded{\goto{\currentreferencetext}}[\bibrefprefix#1]}{#1}}
+\unexpanded\def\gotobiblink#1[#2]{\doifelsereferencefound{\bibrefprefix#2}{\goto{#1}[\bibrefprefix#2]}{#1}}
+\unexpanded\def\atbiblink [#1]{\doifelsereferencefound{\bibrefprefix#1}{\at [\bibrefprefix#1]}{#1}}
+\unexpanded\def\inbiblink [#1]{\doifelsereferencefound{\bibrefprefix#1}{\expanded{\goto{\currentreferencetext}}[\bibrefprefix#1]}{#1}}
%D \macros{bibauthoryearref,bibauthoryearsref,bibauthorref,bibyearref}
%D
@@ -1260,7 +1262,7 @@
%D \type{\ixbibauthoryearref} stores the data in the macros
%D \type{\currentbibauthor} and \type{\currentbibyear}.
-\unexpanded\def\doifbibinteractionelse
+\unexpanded\def\doifelsebibinteraction
{\iflocation
\edef\test{\bibalternative\c!interaction}%
\ifx\test\v!stop
@@ -1272,8 +1274,10 @@
\expandafter\secondoftwoarguments
\fi}
+\let\doifbibinteractionelse\doifelsebibinteraction
+
\unexpanded\def\bibmaybeinteractive#1#2%
- {\doifbibinteractionelse{\gotobiblink{#2}[#1]}{#2}}
+ {\doifelsebibinteraction{\gotobiblink{#2}[#1]}{#2}}
\unexpanded\def\bibauthoryearref[#1]%
{\ixbibauthoryear{#1}%
@@ -1338,7 +1342,7 @@
\unexpanded\def\dobibshortref#1%
{\bibinsertrefsep
- \doifbibreferencefoundelse{#1}
+ \doifelsebibreferencefound{#1}
{\gotobiblink{\bibgetvars{#1}}[#1]}
{}}
@@ -1349,7 +1353,7 @@
\unexpanded\def\dobibserialref#1%
{\bibinsertrefsep
- \doifbibreferencefoundelse{#1}
+ \doifelsebibreferencefound{#1}
{\gotobiblink{\bibgetvarn{#1}}[#1]}
{}}
@@ -1363,7 +1367,7 @@
\gotobiblink{#1}[#1]}
\unexpanded\def\bibgotoDOI#1#2%
- {\doifbibinteractionelse
+ {\doifelsebibinteraction
{\useURL[bibfooDoi#1][#2]%
\useURL[bibfoo#1][http://dx.doi.org/#2]%
\goto{\url[bibfooDoi#1]}[url(bibfoo#1)]}
@@ -1376,7 +1380,7 @@
\unexpanded\def\dobibdoiref#1%
{\bibinsertrefsep
- \doifbibreferencefoundelse{#1}
+ \doifelsebibreferencefound{#1}
{\expanded{\bibgotoDOI{#1}{\bibgetvaro{#1}}}}
{}}
@@ -1386,13 +1390,13 @@
\bibalternative\v!right}
\unexpanded\def\bibgotoURL#1#2%
- {\doifbibinteractionelse
+ {\doifelsebibinteraction
{\useURL[bibfoo#1][#2]\goto{\url[bibfoo#1]}[url(bibfoo#1)]}
{\hyphenatedurl{#2}}}
\unexpanded\def\dobiburlref#1%
{\bibinsertrefsep
- \doifbibreferencefoundelse{#1}
+ \doifelsebibreferencefound{#1}
{\expanded{\bibgotoURL{#1}{\bibgetvaru{#1}}}}
{}}
@@ -1403,7 +1407,7 @@
\unexpanded\def\dobibtyperef#1%
{\bibinsertrefsep
- \doifbibreferencefoundelse{#1}
+ \doifelsebibreferencefound{#1}
{\gotobiblink{\bibgetvart{#1}}[#1]}
{}}
@@ -1414,7 +1418,7 @@
\unexpanded\def\dobibpageref#1%
{\bibinsertrefsep
- \doifbibinteractionelse
+ \doifelsebibinteraction
{\atbiblink[#1]}
{{\referencingfalse\at[#1]}}}
@@ -1425,7 +1429,7 @@
\unexpanded\def\dobibdata#1%
{\bibinsertrefsep
- \doifbibreferencefoundelse{#1}
+ \doifelsebibreferencefound{#1}
{\dotypesetapublication{#1}}
{}}
@@ -1476,7 +1480,7 @@
\unexpanded\def\dobibauthornumref#1%
{\bibinsertrefsep
- \doifbibreferencefoundelse{#1}
+ \doifelsebibreferencefound{#1}
{\begingroup
\cite[\c!left=,\c!right=,\c!alternative=\v!author][#1]%
\bibalternative\c!inbetween
diff --git a/tex/context/base/blob-ini.lua b/tex/context/base/blob-ini.lua
index 32fac7662..b837250ce 100644
--- a/tex/context/base/blob-ini.lua
+++ b/tex/context/base/blob-ini.lua
@@ -45,6 +45,8 @@ local tohpackfast = typesetters.tohpackfast
local tovpack = typesetters.tovpack
local tovpackfast = typesetters.tovpackfast
+local implement = interfaces.implement
+
blobs = blobs or { }
-- provide copies here (nicer for manuals)
@@ -195,18 +197,7 @@ blobs.strht = strht
blobs.strdp = strdp
blobs.strhd = strhd
-function commands.strwd(str) context(strwd(str)) end
-function commands.strht(str) context(strht(str)) end
-function commands.strdp(str) context(strdp(str)) end
-function commands.strhd(str) context(strhd(str)) end
-
--- less efficient:
---
--- function commands.widthof(str)
--- local b = blobs.new()
--- blobs.append(b,str)
--- blobs.pack(b)
--- local w = blobs.dimensions(b)
--- context(number.todimen(w))
--- blobs.dispose(b)
--- end
+implement { name = "strwd", arguments = "string", actions = { strwd, context } }
+implement { name = "strht", arguments = "string", actions = { strht, context } }
+implement { name = "strdp", arguments = "string", actions = { strdp, context } }
+implement { name = "strhd", arguments = "string", actions = { strhd, context } }
diff --git a/tex/context/base/blob-ini.mkiv b/tex/context/base/blob-ini.mkiv
index 1dfb766f4..64582c0db 100644
--- a/tex/context/base/blob-ini.mkiv
+++ b/tex/context/base/blob-ini.mkiv
@@ -27,10 +27,10 @@
% this one takes simple (utf) strings
-\def\wdofstring#1{\dimexpr\ctxcommand{strwd(\!!bs#1\!!es)}\scaledpoint\relax}
-\def\htofstring#1{\dimexpr\ctxcommand{strht(\!!bs#1\!!es)}\scaledpoint\relax}
-\def\dpofstring#1{\dimexpr\ctxcommand{strdp(\!!bs#1\!!es)}\scaledpoint\relax}
-\def\hdofstring#1{\dimexpr\ctxcommand{strhd(\!!bs#1\!!es)}\scaledpoint\relax}
+\def\wdofstring#1{\dimexpr\clf_strwd{#1}\scaledpoint\relax}
+\def\htofstring#1{\dimexpr\clf_strht{#1}\scaledpoint\relax}
+\def\dpofstring#1{\dimexpr\clf_strdp{#1}\scaledpoint\relax}
+\def\hdofstring#1{\dimexpr\clf_strhd{#1}\scaledpoint\relax}
\def\widthofstring {\the\wdofstring}
\def\heightofstring {\the\htofstring}
diff --git a/tex/context/base/buff-imp-default.lua b/tex/context/base/buff-imp-default.lua
index 72a49d625..79f01c47d 100644
--- a/tex/context/base/buff-imp-default.lua
+++ b/tex/context/base/buff-imp-default.lua
@@ -20,7 +20,7 @@ local grammar = { "visualizer",
beginline = makepattern(handler,"beginline",patterns.beginline),
newline = makepattern(handler,"newline", patterns.newline),
space = makepattern(handler,"space", patterns.space),
- default = makepattern(handler,"default", patterns.utf8char),
+ default = makepattern(handler,"default", patterns.utf8character),
content = makepattern(handler,"default", patterns.somecontent), -- not too efficient
-- handy
diff --git a/tex/context/base/buff-imp-lua.lua b/tex/context/base/buff-imp-lua.lua
index 04e79afba..e873a34d1 100644
--- a/tex/context/base/buff-imp-lua.lua
+++ b/tex/context/base/buff-imp-lua.lua
@@ -139,7 +139,7 @@ local comment = P("--")
local name = (patterns.letter + patterns.underscore)
* (patterns.letter + patterns.underscore + patterns.digit)^0
local boundary = S('()[]{}')
-local special = S("-+/*^%=#") + P("..")
+local special = S("-+/*^%=#~|<>") + P("..")
-- The following longstring parser is taken from Roberto's documentation
-- that can be found at http://www.inf.puc-rio.br/~roberto/lpeg/lpeg.html.
@@ -159,11 +159,11 @@ end
local grammar = visualizers.newgrammar("default", { "visualizer",
sstring =
makepattern(handler,"quote",patterns.dquote)
- * (V("whitespace") + makepattern(handler,"string",1-patterns.dquote))^0 -- patterns.nodquote
+ * (V("whitespace") + makepattern(handler,"string",(1-patterns.dquote-V("whitespace"))^1))^0 -- patterns.nodquote
* makepattern(handler,"quote",patterns.dquote),
dstring =
makepattern(handler,"quote",patterns.squote)
- * (V("whitespace") + makepattern(handler,"string",1-patterns.squote))^0 -- patterns.nosquote
+ * (V("whitespace") + makepattern(handler,"string",(1-patterns.squote-V("whitespace"))^1))^0 -- patterns.nosquote
* makepattern(handler,"quote",patterns.squote),
longstring =
longstring / long,
diff --git a/tex/context/base/buff-ini.lua b/tex/context/base/buff-ini.lua
index 08416c9ad..dafe2ad0e 100644
--- a/tex/context/base/buff-ini.lua
+++ b/tex/context/base/buff-ini.lua
@@ -9,12 +9,14 @@ if not modules then modules = { } end modules ['buff-ini'] = {
local concat = table.concat
local type, next, load = type, next, load
local sub, format = string.sub, string.format
-local splitlines, validstring = string.splitlines, string.valid
+local splitlines, validstring, replacenewlines = string.splitlines, string.valid, string.replacenewlines
local P, Cs, patterns, lpegmatch = lpeg.P, lpeg.Cs, lpeg.patterns, lpeg.match
+local utfchar = utf.char
+local totable = string.totable
-local trace_run = false trackers .register("buffers.run", function(v) trace_run = v end)
-local trace_grab = false trackers .register("buffers.grab", function(v) trace_grab = v end)
-local trace_visualize = false trackers .register("buffers.visualize", function(v) trace_visualize = v end)
+local trace_run = false trackers.register("buffers.run", function(v) trace_run = v end)
+local trace_grab = false trackers.register("buffers.grab", function(v) trace_grab = v end)
+local trace_visualize = false trackers.register("buffers.visualize", function(v) trace_visualize = v end)
local report_buffers = logs.reporter("buffers","usage")
local report_typeset = logs.reporter("buffers","typeset")
@@ -23,6 +25,21 @@ local report_grabbing = logs.reporter("buffers","grabbing")
local context = context
local commands = commands
+local implement = interfaces.implement
+
+local scanners = tokens.scanners
+local scanstring = scanners.string
+local scaninteger = scanners.integer
+local scanboolean = scanners.boolean
+local scancode = scanners.code
+local scantoken = scanners.token
+
+local getters = tokens.getters
+local gettoken = getters.token
+
+local compilescanner = tokens.compile
+local scanners = interfaces.scanners
+
local variables = interfaces.variables
local settings_to_array = utilities.parsers.settings_to_array
local formatters = string.formatters
@@ -33,6 +50,8 @@ local registertempfile = luatex.registertempfile
local v_yes = variables.yes
+local p_whitespace = patterns.whitespace
+
local catcodenumbers = catcodes.numbers
local ctxcatcodes = catcodenumbers.ctxcatcodes
@@ -138,42 +157,19 @@ local function collectcontent(name,separator) -- no print
t[n] = c
end
end
- return concat(t,separator or "\n") -- was \r
+ -- the default separator was \r, then \n and is now os.newline because buffers
+ -- can be loaded in other applications
+ return concat(t,separator or os.newline)
end
end
local function loadcontent(name) -- no print
- local names = getnames(name)
- local nnames = #names
- local ok = false
- if nnames == 0 then
- ok = load(getcontent("")) -- default buffer
- elseif nnames == 1 then
- ok = load(getcontent(names[1]))
- else
- -- lua 5.2 chunked load
- local i = 0
- ok = load(function()
- while true do
- i = i + 1
- if i > nnames then
- return nil
- end
- local c = getcontent(names[i])
- if c == "" then
- -- would trigger end of load
- else
- return c
- end
- end
- end)
- end
+ local content = collectcontent(name,"\n") -- tex likes \n
+ local ok, err = load(content)
if ok then
return ok()
- elseif nnames == 0 then
- report_buffers("invalid lua code in default buffer")
else
- report_buffers("invalid lua code in buffer %a",concat(names,","))
+ report_buffers("invalid lua code in buffer %a: %s",name,err or "unknown error")
end
end
@@ -190,17 +186,28 @@ buffers.loadcontent = loadcontent
-- the context interface
-commands.erasebuffer = erase
-commands.assignbuffer = assign
+implement {
+ name = "assignbuffer",
+ actions = assign,
+ arguments = { "string", "string", "integer" }
+}
+
+implement {
+ name = "erasebuffer",
+ actions = erase,
+ arguments = "string"
+}
local anything = patterns.anything
local alwaysmatched = patterns.alwaysmatched
+local utf8character = patterns.utf8character
local function countnesting(b,e)
local n
local g = P(b) / function() n = n + 1 end
+ P(e) / function() n = n - 1 end
- + anything
+ -- + anything
+ + utf8character
local p = alwaysmatched / function() n = 0 end
* g^0
* alwaysmatched / function() return n end
@@ -219,7 +226,7 @@ local continue = false
-- An \n is unlikely to show up as \r is the endlinechar but \n is more generic
-- for us.
--- This fits the way we fetch verbatim: the indentatio before the sentinel
+-- This fits the way we fetch verbatim: the indentation before the sentinel
-- determines the stripping.
-- str = [[
@@ -257,13 +264,13 @@ local continue = false
-- how about tabs
-local getmargin = (Cs(P(" ")^1)*P(-1)+1)^1
+local getmargin = (Cs(P(" ")^1)*P(-1)+1)^1 -- 1 or utf8character
local eol = patterns.eol
local whatever = (P(1)-eol)^0 * eol^1
local strippers = { }
-local function undent(str) -- new version, needs testing
+local function undent(str) -- new version, needs testing: todo: not always needed, like in xtables
local margin = lpegmatch(getmargin,str)
if type(margin) ~= "string" then
return str
@@ -280,51 +287,162 @@ local function undent(str) -- new version, needs testing
return lpegmatch(stripper,str) or str
end
-function commands.grabbuffer(name,begintag,endtag,bufferdata,catcodes) -- maybe move \\ to call
- local dn = getcontent(name)
- if dn == "" then
- nesting = 0
- continue = false
- end
- if trace_grab then
- if #bufferdata > 30 then
- report_grabbing("%s => |%s..%s|",name,sub(bufferdata,1,10),sub(bufferdata,-10,#bufferdata))
+buffers.undent = undent
+
+-- function commands.grabbuffer(name,begintag,endtag,bufferdata,catcodes,doundent) -- maybe move \\ to call
+-- local dn = getcontent(name)
+-- if dn == "" then
+-- nesting = 0
+-- continue = false
+-- end
+-- if trace_grab then
+-- if #bufferdata > 30 then
+-- report_grabbing("%s => |%s..%s|",name,sub(bufferdata,1,10),sub(bufferdata,-10,#bufferdata))
+-- else
+-- report_grabbing("%s => |%s|",name,bufferdata)
+-- end
+-- end
+-- local counter = counters[begintag]
+-- if not counter then
+-- counter = countnesting(begintag,endtag)
+-- counters[begintag] = counter
+-- end
+-- nesting = nesting + lpegmatch(counter,bufferdata)
+-- local more = nesting > 0
+-- if more then
+-- dn = dn .. sub(bufferdata,2,-1) .. endtag
+-- nesting = nesting - 1
+-- continue = true
+-- else
+-- if continue then
+-- dn = dn .. sub(bufferdata,2,-2) -- no \r, \n is more generic
+-- elseif dn == "" then
+-- dn = sub(bufferdata,2,-2)
+-- else
+-- dn = dn .. "\n" .. sub(bufferdata,2,-2) -- no \r, \n is more generic
+-- end
+-- local last = sub(dn,-1)
+-- if last == "\n" or last == "\r" then -- \n is unlikely as \r is the endlinechar
+-- dn = sub(dn,1,-2)
+-- end
+-- if doundent or (autoundent and doundent == nil) then
+-- dn = undent(dn)
+-- end
+-- end
+-- assign(name,dn,catcodes)
+-- commands.doifelse(more)
+-- end
+
+function tokens.pickup(start,stop)
+ local stoplist = totable(stop)
+ local stoplength = #stoplist
+ local stoplast = stoplist[stoplength]
+ local startlist = totable(start)
+ local startlength = #startlist
+ local startlast = startlist[startlength]
+ local list = { }
+ local size = 0
+ local depth = 0
+ while true do -- or use depth
+ local char = scancode()
+ if char then
+ char = utfchar(char)
+ size = size + 1
+ list[size] = char
+ if char == stoplast and size >= stoplength then
+ local done = true
+ local last = size
+ for i=stoplength,1,-1 do
+ if stoplist[i] ~= list[last] then
+ done = false
+ break
+ end
+ last = last - 1
+ end
+ if done then
+ if depth > 0 then
+ depth = depth - 1
+ else
+ break
+ end
+ char = false -- trick: let's skip the next (start) test
+ end
+ end
+ if char == startlast and size >= startlength then
+ local done = true
+ local last = size
+ for i=startlength,1,-1 do
+ if startlist[i] ~= list[last] then
+ done = false
+ break
+ end
+ last = last - 1
+ end
+ if done then
+ depth = depth + 1
+ end
+ end
else
- report_grabbing("%s => |%s|",name,bufferdata)
+ -- local t = scantoken()
+ local t = gettoken()
+ if t then
+ -- we're skipping leading stuff, like obeyedlines and relaxes
+ else
+ break
+ end
end
end
- local counter = counters[begintag]
- if not counter then
- counter = countnesting(begintag,endtag)
- counters[begintag] = counter
- end
- nesting = nesting + lpegmatch(counter,bufferdata)
- local more = nesting > 0
- if more then
- dn = dn .. sub(bufferdata,2,-1) .. endtag
- nesting = nesting - 1
- continue = true
- else
- if continue then
- dn = dn .. sub(bufferdata,2,-2) -- no \r, \n is more generic
- elseif dn == "" then
- dn = sub(bufferdata,2,-2)
+ local start = 1
+ local stop = size-stoplength-1
+ for i=start,stop do
+ if lpegmatch(p_whitespace,list[i]) then
+ start = i + 1
else
- dn = dn .. "\n" .. sub(bufferdata,2,-2) -- no \r, \n is more generic
- end
- local last = sub(dn,-1)
- if last == "\n" or last == "\r" then -- \n is unlikely as \r is the endlinechar
- dn = sub(dn,1,-2)
+ break
end
- if autoundent then
- dn = undent(dn)
+ end
+ for i=stop,start,-1 do
+ if lpegmatch(p_whitespace,list[i]) then
+ stop = i - 1
+ else
+ break
end
end
- assign(name,dn,catcodes)
- commands.doifelse(more)
+ if start <= stop then
+ return concat(list,"",start,stop)
+ else
+ return ""
+ end
+end
+
+-- function buffers.pickup(name,start,stop,finish,catcodes,doundent)
+-- local data = tokens.pickup(start,stop)
+-- if doundent or (autoundent and doundent == nil) then
+-- data = buffers.undent(data)
+-- end
+-- buffers.assign(name,data,catcodes)
+-- context(finish)
+-- end
+
+-- commands.pickupbuffer = buffers.pickup
+
+scanners.pickupbuffer = function()
+ local name = scanstring()
+ local start = scanstring()
+ local stop = scanstring()
+ local finish = scanstring()
+ local catcodes = scaninteger()
+ local doundent = scanboolean()
+ local data = tokens.pickup(start,stop)
+ if doundent or (autoundent and doundent == nil) then
+ data = buffers.undent(data)
+ end
+ buffers.assign(name,data,catcodes)
+ -- context[finish]()
+ context(finish)
end
-function commands.savebuffer(list,name,prefix) -- name is optional
+local function savebuffer(list,name,prefix) -- name is optional
if not list or list == "" then
list = name
end
@@ -338,50 +456,21 @@ function commands.savebuffer(list,name,prefix) -- name is optional
if prefix == v_yes then
name = addsuffix(tex.jobname .. "-" .. name,"tmp")
end
- io.savedata(name,content)
+ io.savedata(name,replacenewlines(content))
end
--- local files = { }
--- local last = 0
---
--- function commands.runbuffer(name,encapsulate) -- we used to compare the saved file with content
--- local names = getnames(name)
--- local filename = files[name]
--- local tobedone = not istypeset(names)
--- if tobedone or not filename then
--- last = last + 1
--- filename = formatters["%s-typeset-buffer-%03i"](tex.jobname,last)
--- files[name] = filename
--- end
--- if tobedone then
--- if trace_run then
--- report_typeset("changes in %a, processing forced",name)
--- end
--- local filename = addsuffix(filename,"tmp")
--- local content = collectcontent(names,nil) or ""
--- if content == "" then
--- content = "empty buffer"
--- end
--- if encapsulate then
--- content = formatters["\\starttext\n%s\n\\stoptext\n"](content)
--- end
--- io.savedata(filename,content)
--- local command = formatters["context %s %s"](jit and "--jit" or "",filename)
--- report_typeset("running: %s\n",command)
--- os.execute(command)
--- markastypeset(names)
--- elseif trace_run then
--- report_typeset("no changes in %a, not processed",name)
--- end
--- context(replacesuffix(filename,"pdf"))
--- end
+implement {
+ name = "savebuffer",
+ actions = savebuffer,
+ arguments = { "string", "string", "string" }
+}
-- we can consider adding a size to avoid unlikely clashes
local oldhashes = nil
local newhashes = nil
-function commands.runbuffer(name,encapsulate)
+local function runbuffer(name,encapsulate)
if not oldhashes then
oldhashes = job.datasets.getdata("typeset buffers","hashes") or { }
for hash, n in next, oldhashes do
@@ -427,10 +516,10 @@ function commands.runbuffer(name,encapsulate)
registertempfile(filename)
registertempfile(resultname,nil,true)
--
- context(resultname)
+ return resultname
end
-function commands.getbuffer(name)
+local function getbuffer(name)
local str = getcontent(name)
if str ~= "" then
-- characters.showstring(str)
@@ -438,11 +527,11 @@ function commands.getbuffer(name)
end
end
-function commands.getbuffermkvi(name) -- rather direct !
+local function getbuffermkvi(name) -- rather direct !
context.viafile(resolvers.macros.preprocessed(getcontent(name)),formatters["buffer.%s.mkiv"](validstring(name,"noname")))
end
-function commands.gettexbuffer(name)
+local function gettexbuffer(name)
local buffer = name and cache[name]
if buffer and buffer.data ~= "" then
context.pushcatcodetable()
@@ -457,20 +546,29 @@ function commands.gettexbuffer(name)
end
end
-commands.getbufferctxlua = loadcontent
+implement { name = "getbufferctxlua", actions = loadcontent, arguments = "string" }
+implement { name = "getbuffer", actions = getbuffer, arguments = "string" }
+implement { name = "getbuffermkvi", actions = getbuffermkvi, arguments = "string" }
+implement { name = "gettexbuffer", actions = gettexbuffer, arguments = "string" }
-function commands.doifelsebuffer(name)
- commands.doifelse(exists(name))
-end
+implement {
+ name = "runbuffer",
+ actions = { runbuffer, context },
+ arguments = { "string", true }
+}
+
+implement {
+ name = "doifelsebuffer",
+ actions = { exists, commands.doifelse },
+ arguments = "string"
+}
-- This only used for mp buffers and is a kludge. Don't change the
-- texprint into texsprint as it fails because "p<nl>enddef" becomes
-- "penddef" then.
--- function commands.feedback(names)
--- texprint(ctxcatcodes,splitlines(collectcontent(names)))
--- end
-
-function commands.feedback(names) -- bad name, maybe rename to injectbuffercontent
- context.printlines(collectcontent(names))
-end
+implement {
+ name = "feedback", -- bad name, maybe rename to injectbuffercontent
+ actions = { collectcontent, context.printlines },
+ arguments = "string"
+}
diff --git a/tex/context/base/buff-ini.mkiv b/tex/context/base/buff-ini.mkiv
index 4ea3042b6..c10680085 100644
--- a/tex/context/base/buff-ini.mkiv
+++ b/tex/context/base/buff-ini.mkiv
@@ -23,13 +23,15 @@
\let\currentbuffer\empty
\def\doifelsebuffer#1%
- {\ctxcommand{doifelsebuffer("#1")}}
+ {\clf_doifelsebuffer{#1}}
+
+\let\doifbufferelse\doifelsebuffer
\unexpanded\def\resetbuffer
{\dosingleempty\buff_reset}
\def\buff_reset[#1]%
- {\ctxcommand{erasebuffer("#1")}}
+ {\clf_erasebuffer{#1}}
\setuvalue{\e!start\v!buffer}%
{\begingroup % (3)
@@ -39,8 +41,11 @@
\def\buff_start[#1][#2]%
{\buff_start_indeed{}{#1}{\e!start\v!buffer}{\e!stop\v!buffer}}
-\def\buff_start_indeed#1#2#3#4% \donothing needed !
- {\normalexpanded{\buff_pickup{#2}{#3}{#4}{}{\buff_stop{#4}}}}
+% \def\buff_start_indeed#1#2#3#4#5% \donothing needed ! #5=undent)
+% {\normalexpanded{\buff_pickup{#2}{#3}{#4}{}{\buff_stop{#4}}\plusone}}
+
+\def\buff_start_indeed#1#2#3#4%
+ {\normalexpanded{\buff_pickup{#2}{#3}{#4}{}{\buff_stop{#4}}\plusone}}
\unexpanded\def\grabbufferdata % was: \dostartbuffer
{\begingroup % (4)
@@ -64,35 +69,70 @@
\let\buff_finish\relax
\let\buff_gobble\relax
-\unexpanded\def\buff_pickup#1#2#3#4#5% name, startsequence, stopsequence, before, after
+% \unexpanded\def\buff_pickup#1#2#3#4#5#6% name, startsequence, stopsequence, before, after, undent
+% {\begingroup % (1)
+% #4%
+% \begingroup % (2)
+% \edef\catcodetableofbuffer{\number\catcodetable}%
+% \clf_erasebuffer{#1}%
+% \setcatcodetable\vrbcatcodes
+% \def\buff_finish
+% {\endgroup % (1)
+% \endgroup % (2)
+% #5}%
+% \def\buff_gobble##1#3% is detokenize needed? TEST
+% %{\ctxcommand{grabbuffer("#1","#2","#3",\!!bs\detokenize{##1}\!!es)} % space ?
+% {\ctxcommand{grabbuffer("#1","#2","#3",\!!bs>##1\!!es,\catcodetableofbuffer,\ifnum#6=\plusone true\else false\fi)}% space ?
+% \buff_gobble
+% \buff_finish}%
+% \buff_gobble}
+
+% % \def\startgrab
+% % {\begingroup
+% % \setcatcodetable\vrbcatcodes
+% % \ctxlua{tokens.pickup("\\startgrab","\\stopgrab") context("\\endgroup")}}
+% %
+% % \def\stopgrab
+% % {}
+
+\unexpanded\def\buff_pickup#1#2#3#4#5#6% name, startsequence, stopsequence, before, after, undent
{\begingroup % (1)
#4%
\begingroup % (2)
- \edef\catcodetableofbuffer{\number\catcodetable}%
- \ctxcommand{erasebuffer("#1")}%
+ \scratchcounter\catcodetable
+ \clf_erasebuffer{#1}%
\setcatcodetable\vrbcatcodes
- \def\buff_finish
+ \unexpanded\def\dofinishpickupbuffer
{\endgroup % (1)
\endgroup % (2)
#5}%
- \def\buff_gobble##1#3% is detokenize needed? TEST
- %{\ctxcommand{grabbuffer("#1","#2","#3",\!!bs\detokenize{##1}\!!es)} % space ?
- {\ctxcommand{grabbuffer("#1","#2","#3",\!!bs>##1\!!es,\catcodetableofbuffer)}% space ?
- \buff_gobble
- \buff_finish}%
- \buff_gobble}
+ % todo: we need to skip the first lineending which is na active character
+ % but sometimes we have something different ... this is a side effect of
+ % checking for optional arguments i.e. the next token is already tokenized
+ % and for that reason we have the \relax as well as the \string
+ \clf_pickupbuffer
+ {#1}%
+ {#2}%
+ {#3}%
+ {\string\dofinishpickupbuffer}%
+ \scratchcounter
+ \ifnum#6=\plusone\s!true\else\s!false\fi
+ % \relax}
+ \expandafter\relax\string}
\unexpanded\def\buff_stop#1%
{\endgroup % (3 & 4 & 5 & 6)
\getvalue{#1}}
+% \installctxfunction\dopickupbuffer{commands.dopickupbuffer}
+
\unexpanded\def\setbuffer
{\dosingleempty\buff_set}
\let\endbuffer\relax
\def\buff_set[#1]#2\endbuffer % seldom used so we just pass #2
- {\ctxcommand{assignbuffer("#1",\!!bs\detokenize{#2}\!!es,\number\catcodetable)}}
+ {\clf_assignbuffer{#1}{\detokenize{#2}}\catcodetable\relax}
% beware, never adapt the global buffer settings, actually we might introduce
% a broken parent chain for this purpose but on the other hand it's not that
@@ -141,7 +181,7 @@
\namedbufferparameter{#1}\c!after}
\unexpanded\def\buff_get_stored_indeed#1%
- {\ctxcommand{getbuffer("#1")}}
+ {\clf_getbuffer{#1}}
\unexpanded\def\getdefinedbuffer[#1]%
{\buff_get_stored{#1}{\thedefinedbuffer{#1}}}%
@@ -155,7 +195,10 @@
{\processcommalist[#1]\buff_get_stored_inline_indeed}}
\unexpanded\def\buff_get_stored_inline_indeed#1%
- {\ignorespaces\ctxcommand{getbuffer("#1")}\removeunwantedspaces}
+ {\ignorespaces\clf_getbuffer{#1}\removeunwantedspaces}
+
+\def\rawbuffer#1% expandable
+ {\clf_getbuffer{#1}}
\definebuffer
[\v!hiding]
@@ -197,10 +240,10 @@
\def\buff_save[#1][#2]%
{\begingroup
- \doifassignmentelse{#1}
+ \doifelseassignment{#1}
{\setupcurrentsavebuffer[#1]}%
{\setupcurrentsavebuffer[\c!list={#1},\c!file=#2]}%
- \ctxcommand{savebuffer("\directsavebufferparameter\c!list","\directsavebufferparameter\c!file","\directsavebufferparameter\c!prefix")}%
+ \clf_savebuffer{\directsavebufferparameter\c!list}{\directsavebufferparameter\c!file}{\directsavebufferparameter\c!prefix}%
\endgroup}
%D Experimental: no expansion of commands in buffer!
@@ -218,8 +261,8 @@
\unexpanded\def\mkvibuffer {\dosingleempty\buff_mkvi}
% what was: \mkvibufferraw
-\def\buff_ctxlua[#1]{\ctxcommand{getbufferctxlua("#1")}}
-\def\buff_mkvi [#1]{\ctxcommand{getbuffermkvi("#1")}}
+\def\buff_ctxlua[#1]{\clf_getbufferctxlua{#1}}
+\def\buff_mkvi [#1]{\clf_getbuffermkvi {#1}}
% used elsewhere
diff --git a/tex/context/base/buff-par.lua b/tex/context/base/buff-par.lua
index 2c1cd40e9..58ea9ab9d 100644
--- a/tex/context/base/buff-par.lua
+++ b/tex/context/base/buff-par.lua
@@ -6,22 +6,24 @@ if not modules then modules = { } end modules ['buff-par'] = {
license = "see context related readme files"
}
-local context, commands = context, commands
-
local insert, remove, find, gmatch = table.insert, table.remove, string.find, string.gmatch
-local strip, format = string.strip, string.format
+local fullstrip, formatters = string.fullstrip, string.formatters
local trace_parallel = false trackers.register("buffers.parallel", function(v) trace_parallel = v end)
local report_parallel = logs.reporter("buffers","parallel")
local variables = interfaces.variables
+local v_all = variables.all
local parallel = buffers.parallel or { }
buffers.parallel = parallel
local settings_to_array = utilities.parsers.settings_to_array
+local context = context
+local implement = interfaces.implement
+
local data = { }
function parallel.define(category,tags)
@@ -40,7 +42,7 @@ function parallel.define(category,tags)
end
function parallel.reset(category,tags)
- if not tags or tags == "" or tags == variables.all then
+ if not tags or tags == "" or tags == v_all then
tags = table.keys(entries)
else
tags = settings_to_array(tags)
@@ -62,13 +64,18 @@ function parallel.next(category)
end
end
-function parallel.save(category,tag,content)
+function parallel.save(category,tag,content,frombuffer)
+ if frombuffer then
+ content = buffers.raw(content)
+ end
local dc = data[category]
if not dc then
+ report_parallel("unknown category %a",category)
return
end
local entries = dc.entries[tag]
if not entries then
+ report_parallel("unknown entry %a",tag)
return
end
local lines = entries.lines
@@ -93,13 +100,14 @@ function parallel.save(category,tag,content)
if trace_parallel and label ~= "" then
report_parallel("reference found of category %a, tag %a, label %a",category,tag,label)
end
+ line.content = fullstrip(content)
line.label = label
- line.content = strip(content)
end
else
- line.content = strip(content)
+ line.content = fullstrip(content)
line.label = ""
end
+ -- print("[["..line.content.."]]")
end
function parallel.hassomecontent(category,tags)
@@ -108,7 +116,7 @@ function parallel.hassomecontent(category,tags)
return false
end
local entries = dc.entries
- if not tags or tags == "" or tags == variables.all then
+ if not tags or tags == "" or tags == v_all then
tags = table.keys(entries)
else
tags = utilities.parsers.settings_to_array(tags)
@@ -126,22 +134,25 @@ function parallel.hassomecontent(category,tags)
return false
end
-local save = resolvers.savers.byscheme
+local ctx_doflushparallel = context.doflushparallel
+local f_content = formatters["\\input{%s}"]
+local save_byscheme = resolvers.savers.byscheme
function parallel.place(category,tags,options)
local dc = data[category]
if not dc then
return
end
- local entries = dc.entries
- local tags = utilities.parsers.settings_to_array(tags)
- local options = utilities.parsers.settings_to_hash(options)
- local start, n, criterium = options.start, options.n, options.criterium
- start, n = start and tonumber(start), n and tonumber(n)
- local max = 1
+ local entries = dc.entries
+ local tags = utilities.parsers.settings_to_array(tags)
+ local options = utilities.parsers.settings_to_hash(options) -- options can be hash too
+ local start = tonumber(options.start)
+ local n = tonumber(options.n)
+ local criterium = options.criterium
+ local max = 1
if n then
max = n
- elseif criterium == variables.all then
+ elseif criterium == v_all then
max = 0
for t=1,#tags do
local tag = tags[t]
@@ -156,15 +167,17 @@ function parallel.place(category,tags,options)
local tag = tags[t]
local entry = entries[tag]
if entry then
- local lines = entry.lines
- local number = entry.number + 1
- entry.number = number
- local line = remove(lines,1)
- if line and line.content then
- local content = format("\\input{%s}",save("virtual","parallel",line.content))
- context.doflushparallel(tag,1,number,line.label,content)
+ local lines = entry.lines
+ local number = entry.number + 1
+ entry.number = number
+ local line = remove(lines,1)
+ local content = line and line.content
+ local label = line and line.label or ""
+ if content then
+ local virtual = save_byscheme("virtual","parallel",content)
+ ctx_doflushparallel(tag,1,number,label,f_content(virtual))
else
- context.doflushparallel(tag,0,number,"","")
+ ctx_doflushparallel(tag,0,number,"","")
end
end
end
@@ -173,12 +186,47 @@ end
-- interface
-commands.defineparallel = parallel.define
-commands.nextparallel = parallel.next
-commands.saveparallel = parallel.save
-commands.placeparallel = parallel.place
-commands.resetparallel = parallel.reset
+implement {
+ name = "defineparallel",
+ actions = parallel.define,
+ arguments = { "string", "string" }
+}
-function commands.doifelseparallel(category,tags)
- commands.doifelse(parallel.hassomecontent(category,tags))
-end
+implement {
+ name = "nextparallel",
+ actions = parallel.next,
+ arguments = "string"
+}
+
+implement {
+ name = "saveparallel",
+ actions = parallel.save,
+ arguments = { "string", "string", "string", true },
+}
+
+implement {
+ name = "placeparallel",
+ actions = parallel.place,
+ arguments = {
+ "string",
+ "string",
+ {
+ { "start" },
+ { "n" },
+ { "criterium" },
+ { "setups" },
+ }
+ }
+}
+
+implement {
+ name = "resetparallel",
+ actions = parallel.reset,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "doifelseparallel",
+ actions = { parallel.hassomecontent, commands.doifelse } ,
+ arguments = { "string", "string" }
+}
diff --git a/tex/context/base/buff-par.mkvi b/tex/context/base/buff-par.mkvi
index 404fa8ef3..5af04ba7c 100644
--- a/tex/context/base/buff-par.mkvi
+++ b/tex/context/base/buff-par.mkvi
@@ -39,6 +39,7 @@
\unprotect
\installcorenamespace{parallel}
+\installcorenamespace{parallelhandler}
\installcommandhandler \??parallel {parallel} \??parallel
@@ -46,32 +47,55 @@
[\c!setups=parallel:place:default]
\let\buff_parallel_define_saved\defineparallel
+\let\buff_parallel_setup_saved \setupparallel
-\unexpanded\def\defineparallel
- {\dodoubleargument\buff_parallel_define}
+\unexpanded\def\defineparallel{\dodoubleargument\buff_parallel_define}
+\unexpanded\def\setupparallel {\dotripleargument\buff_parallel_setup}
\def\buff_parallel_define[#name][#instances]%
- {\buff_parallel_define_saved[#name]
- \ctxcommand{defineparallel("#name","#instances")}%
- \processcommalist[#instances]\buff_parallel_define_instance
+ {\buff_parallel_define_saved[#name][\c!list={#instances}]% list is internal
+ \clf_defineparallel{#name}{#instances}%
+ \expandafter\newtoks\csname\??parallelhandler#name\endcsname
+ \processcommacommand[#instances]{\buff_parallel_define_instance{#name}}%
\setuevalue{\e!start#name}{\buff_parallel_start{#name}}%
\setuevalue{\e!stop #name}{\buff_parallel_stop}}
-\def\buff_parallel_define_instance#instance%
- {\normalexpanded{\buff_parallel_define_saved[#instance][\currentparallel]}%
+\let\buff_process_parallel_instance\relax
+
+\def\buff_process_parallel_instances
+ {\the\csname\??parallelhandler\currentparallel\endcsname}
+
+\def\buff_parallel_define_instance#name#instance%
+ {\normalexpanded{\buff_parallel_define_saved[#instance][#name]}%
\definebuffer[#instance]%
+ %\edef\t_buff_parallel{\csname\??parallelhandler#name\endcsname}%
+ \expandafter\let\expandafter\t_buff_parallel\csname\??parallelhandler#name\endcsname
+ \appendtoks
+ \edef\currentparallelinstance{#instance}%
+ \buff_process_parallel_instance\relax
+ \to \t_buff_parallel
\setuevalue{\e!stop#instance}{\buff_parallel_save{#instance}}}
+\def\buff_parallel_setup[#name][#instances][#setups]%
+ {\processcommalist[#instances]{\buff_parallel_setup_instance{#name}{#setups}}}
+
+\def\buff_parallel_setup_instance#name#setups#instance%
+ {\buff_parallel_setup_saved[#name:#instance][#setups]}
+
\unexpanded\def\buff_parallel_start#name%
{\pushmacro\currentparallel
\edef\currentparallel{#name}%
- \ctxcommand{nextparallel("\currentparallel")}}
+ \clf_nextparallel{\currentparallel}}
\unexpanded\def\buff_parallel_stop
{\popmacro\currentparallel}
\unexpanded\def\buff_parallel_save#instance% defined moet ook aan de lua kant kunnen
- {\ctxcommand{saveparallel("\currentparallel","#instance",buffers.raw("\thedefinedbuffer{#instance}"))}}
+ {\clf_saveparallel
+ {\currentparallel}%
+ {#instance},
+ {\thedefinedbuffer{#instance}}%
+ \relax}
\unexpanded\def\placeparallel
{\dotripleempty\buff_parallel_place}
@@ -79,7 +103,17 @@
\def\buff_parallel_place[#name][#instance][#settings]%
{\begingroup
\edef\currentparallel{#name}%
- \ctxcommand{placeparallel("\currentparallel","#instance","#settings")}% -- todo: pass options as k/v
+ \setupcurrentparallel[#settings]%
+ \clf_placeparallel
+ {\currentparallel}%
+ {#instance}%
+ {%
+ % setups {\parallelparameter\c!setups}%
+ start {\parallelparameter\c!start}%
+ n {\parallelparameter\c!n}%
+ criterium {\parallelparameter\c!criterium}%
+ }%
+ \relax
\endgroup}
\def\doflushparallel#instance#status#line#label#content% called at lua end
@@ -88,7 +122,7 @@
\def\currentparallelnumber {#status}%
\def\currentparallelline {#line}%
\def\currentparallellabel {#label}%
- \def\currentparallelcontent {#content}%
+ \def\currentparallelcontent {#content}% can be kep at the lua end .. no need to use a virtual file
\ifcase#status\relax
\expandafter\buff_parallel_flush_nop
\or
@@ -100,22 +134,24 @@
{}
\def\buff_parallel_flush_yes
- {\directsetup{\namedparallelparameter\currentparallelinstance\c!setups}}
+ {\directsetup{\namedparallelparameter{\currentparallel:\currentparallelinstance}\c!setups}}
\unexpanded\def\doifelseparallel#name#instance%
- {\ctxcommand{doifelseparallel("#name","#instance")}}
+ {\clf_doifelseparallel{#name}{#instance}}
+
+\let\doifparallelelse\doifelseparallel
\unexpanded\def\resetparallel
{\dodoubleempty\buff_parallel_reset}
\def\buff_parallel_reset[#name][#instance]%
- {\ctxcommand{resetparallel("#name","#instance"))}}
+ {\clf_resetparallel{#name}{#instance}}
\startsetups parallel:place:default
\hangafter\plusone
- \hangindent4em
+ \hangindent4\emwidth
\dontleavehmode
- \hbox to 3em \bgroup
+ \hbox to 3\emwidth \bgroup
\hss
\bf
\doifsomething \currentparallellabel {
diff --git a/tex/context/base/buff-ver.lua b/tex/context/base/buff-ver.lua
index 3300ac6cb..0168c551d 100644
--- a/tex/context/base/buff-ver.lua
+++ b/tex/context/base/buff-ver.lua
@@ -31,6 +31,7 @@ visualizers.specifications = specifications
local context = context
local commands = commands
+local implement = interfaces.implement
local tabtospace = utilities.strings.tabtospace
local variables = interfaces.variables
@@ -43,73 +44,74 @@ local v_auto = variables.auto
local v_yes = variables.yes
local v_last = variables.last
local v_all = variables.all
+local v_absolute = variables.absolute
-- beware, all macros have an argument:
-local doinlineverbatimnewline = context.doinlineverbatimnewline
-local doinlineverbatimbeginline = context.doinlineverbatimbeginline
-local doinlineverbatimemptyline = context.doinlineverbatimemptyline
-local doinlineverbatimstart = context.doinlineverbatimstart
-local doinlineverbatimstop = context.doinlineverbatimstop
+local ctx_inlineverbatimnewline = context.doinlineverbatimnewline
+local ctx_inlineverbatimbeginline = context.doinlineverbatimbeginline
+local ctx_inlineverbatimemptyline = context.doinlineverbatimemptyline
+local ctx_inlineverbatimstart = context.doinlineverbatimstart
+local ctx_inlineverbatimstop = context.doinlineverbatimstop
-local dodisplayverbatiminitialize = context.dodisplayverbatiminitialize -- the number of arguments might change over time
-local dodisplayverbatimnewline = context.dodisplayverbatimnewline
-local dodisplayverbatimbeginline = context.dodisplayverbatimbeginline
-local dodisplayverbatimemptyline = context.dodisplayverbatimemptyline
-local dodisplayverbatimstart = context.dodisplayverbatimstart
-local dodisplayverbatimstop = context.dodisplayverbatimstop
+local ctx_displayverbatiminitialize = context.dodisplayverbatiminitialize -- the number of arguments might change over time
+local ctx_displayverbatimnewline = context.dodisplayverbatimnewline
+local ctx_displayverbatimbeginline = context.dodisplayverbatimbeginline
+local ctx_displayverbatimemptyline = context.dodisplayverbatimemptyline
+local ctx_displayverbatimstart = context.dodisplayverbatimstart
+local ctx_displayverbatimstop = context.dodisplayverbatimstop
-local verbatim = context.verbatim
-local doverbatimspace = context.doverbatimspace
+local ctx_verbatim = context.verbatim
+local ctx_verbatimspace = context.doverbatimspace
local CargOne = Carg(1)
local function f_emptyline(s,settings)
if settings and settings.nature == "inline" then
- doinlineverbatimemptyline()
+ ctx_inlineverbatimemptyline()
else
- dodisplayverbatimemptyline()
+ ctx_displayverbatimemptyline()
end
end
local function f_beginline(s,settings)
if settings and settings.nature == "inline" then
- doinlineverbatimbeginline()
+ ctx_inlineverbatimbeginline()
else
- dodisplayverbatimbeginline()
+ ctx_displayverbatimbeginline()
end
end
local function f_newline(s,settings)
if settings and settings.nature == "inline" then
- doinlineverbatimnewline()
+ ctx_inlineverbatimnewline()
else
- dodisplayverbatimnewline()
+ ctx_displayverbatimnewline()
end
end
local function f_start(s,settings)
if settings and settings.nature == "inline" then
- doinlineverbatimstart()
+ ctx_inlineverbatimstart()
else
- dodisplayverbatimstart()
+ ctx_displayverbatimstart()
end
end
local function f_stop(s,settings)
if settings and settings.nature == "inline" then
- doinlineverbatimstop()
+ ctx_inlineverbatimstop()
else
- dodisplayverbatimstop()
+ ctx_displayverbatimstop()
end
end
local function f_default(s) -- (s,settings)
- verbatim(s)
+ ctx_verbatim(s)
end
local function f_space() -- (s,settings)
- doverbatimspace()
+ ctx_verbatimspace()
end
local function f_signal() -- (s,settings)
@@ -200,7 +202,7 @@ local function getvisualizer(method,nature)
end
end
-local fallback = context.verbatim
+local ctx_fallback = ctx_verbatim
local function makepattern(visualizer,replacement,pattern)
if not pattern then
@@ -208,9 +210,9 @@ local function makepattern(visualizer,replacement,pattern)
return patterns.alwaystrue
else
if type(visualizer) == "table" and type(replacement) == "string" then
- replacement = visualizer[replacement] or fallback
+ replacement = visualizer[replacement] or ctx_fallback
else
- replacement = fallback
+ replacement = ctx_fallback
end
return (C(pattern) * CargOne) / replacement
end
@@ -319,6 +321,10 @@ function visualizers.register(name,specification)
return specification
end
+function visualizers.getspecification(name)
+ return specifications[lower(name)]
+end
+
local escapepatterns = allocate()
visualizers.escapepatterns = escapepatterns
@@ -506,7 +512,7 @@ local function visualize(content,settings) -- maybe also method in settings
if trace_visualize then
report_visualizers("visualize using method %a",method)
end
- fallback(content,1,settings)
+ ctx_fallback(content,1,settings)
end
end
end
@@ -623,14 +629,16 @@ end
local function getrange(lines,first,last,range) -- 1,3 1,+3 fromhere,tothere
local noflines = #lines
- local first, last = first or 1, last or noflines
+ local first = first or 1
+ local last = last or noflines
if last < 0 then
last = noflines + last
end
- local range = settings.range
local what = settings_to_array(range)
- local r_first, r_last = what[1], what[2]
- local f, l = tonumber(r_first), tonumber(r_last)
+ local r_first = what[1]
+ local r_last = what[2]
+ local f = tonumber(r_first)
+ local l = tonumber(r_last)
if r_first then
if f then
if f > first then
@@ -687,6 +695,7 @@ local function filter(lines,settings) -- todo: inline or display in settings
end
local line, n = 0, 0
local first, last, m = getstrip(lines)
+ local range = settings.range
if range then
first, last = getrange(lines,first,last,range)
first, last = getstrip(lines,first,last)
@@ -698,20 +707,12 @@ end
local getlines = buffers.getlines
--- interface
-
-function commands.doifelsevisualizer(name)
- commands.doifelse(specifications[lower(name)])
-end
-
-commands.loadvisualizer = visualizers.load
-
-- local decodecomment = resolvers.macros.decodecomment -- experiment
-function commands.typebuffer(settings)
+local function typebuffer(settings)
local lines = getlines(settings.name)
if lines then
- dodisplayverbatiminitialize(#lines)
+ ctx_displayverbatiminitialize(#lines)
local content, m = filter(lines,settings)
if content and content ~= "" then
-- content = decodecomment(content)
@@ -721,7 +722,7 @@ function commands.typebuffer(settings)
end
end
-function commands.processbuffer(settings)
+local function processbuffer(settings)
local lines = getlines(settings.name)
if lines then
local content, m = filter(lines,settings)
@@ -738,21 +739,30 @@ end
-- match but slower when there is no match. But anyway, we need a more clever
-- parser so we use lpeg.
--
--- [[\text ]] [[\text{}]] [[\text \text ]] [[\text \\ \text ]]
+-- [[\text ]] [[\text{}]] [[\foo\bar .tex]] [[\text \text ]] [[\text \\ \text ]]
--
-- needed in e.g. tabulate (manuals)
-local compact_all = Cs((P("\\") * ((1-S("\\ "))^1) * (P(" ")/"") * (P(-1) + S("[{")) + 1)^0)
-local compact_last = Cs((P(" ")^1 * P(-1)/"" + 1)^0)
+local fences = S([[[{]])
+local symbols = S([[!#"$%&'*()+,-./:;<=>?@[]^_`{|}~]])
+local space = S([[ ]])
+local backslash = S([[\]])
+local nospace = space^1/""
+local endstring = P(-1)
+
+local compactors = {
+ [v_all] = Cs((backslash * (1-backslash-space)^1 * nospace * (endstring + fences) + 1)^0),
+ [v_absolute] = Cs((backslash * (1-symbols -space)^1 * nospace * (symbols+backslash) + 1) ^0),
+ [v_last] = Cs((space^1 * endstring/"" + 1)^0),
+}
-function commands.typestring(settings)
+local function typestring(settings)
local content = settings.data
if content and content ~= "" then
- local compact = settings.compact
- if compact == v_all then
- content = lpegmatch(compact_all,content)
- elseif compact == v_last then
- content = lpegmatch(compact_last,content)
+ local compact = settings.compact
+ local compactor = compact and compactors[compact]
+ if compactor then
+ content = lpegmatch(compactor,content)
end
-- content = decodecomment(content)
-- content = dotabs(content,settings)
@@ -760,7 +770,7 @@ function commands.typestring(settings)
end
end
-function commands.typefile(settings)
+local function typefile(settings)
local filename = settings.name
local foundname = resolvers.findtexfile(filename)
if foundname and foundname ~= "" then
@@ -782,3 +792,78 @@ function commands.typefile(settings)
end
end
end
+
+implement {
+ name = "type",
+ actions = typestring,
+ arguments = {
+ {
+ { "data" },
+ { "tab" },
+ { "method" },
+ { "compact" },
+ { "nature" },
+ { "escape" },
+ }
+ }
+}
+
+implement {
+ name = "processbuffer",
+ actions = processbuffer,
+ arguments = {
+ {
+ { "name" },
+ { "strip" },
+ { "tab" },
+ { "method" },
+ { "nature" },
+ }
+ }
+}
+
+implement {
+ name = "typebuffer",
+ actions = typebuffer,
+ arguments = {
+ {
+ { "name" },
+ { "strip" },
+ { "range" },
+ { "regime" },
+ { "tab" },
+ { "method" },
+ { "escape" },
+ { "nature" },
+ }
+ }
+}
+
+implement {
+ name = "typefile",
+ actions = typefile,
+ arguments = {
+ {
+ { "name" },
+ { "strip" },
+ { "range" },
+ { "regime" },
+ { "tab" },
+ { "method" },
+ { "escape" },
+ { "nature" },
+ }
+ }
+}
+
+implement {
+ name = "doifelsevisualizer",
+ actions = { visualizers.getspecification, commands.doifelse },
+ arguments = "string"
+}
+
+implement {
+ name = "loadvisualizer",
+ actions = visualizers.load,
+ arguments = "string"
+}
diff --git a/tex/context/base/buff-ver.mkiv b/tex/context/base/buff-ver.mkiv
index bdde5df9d..67f861ba0 100644
--- a/tex/context/base/buff-ver.mkiv
+++ b/tex/context/base/buff-ver.mkiv
@@ -19,6 +19,8 @@
\unprotect
+\startcontextdefinitioncode
+
\definesystemattribute[verbatimline][public]
\appendtoksonce
@@ -43,8 +45,6 @@
{\spaceskip\fontcharwd\font`x\relax
\xspaceskip\spaceskip}
-\unexpanded\def\specialcontrolspace{\hskip\zeropoint\fastcontrolspace\hskip\zeropoint}
-
\setvalue{\??typinglines\v!no }{\buff_verbatim_ignore_hyphens}
\setvalue{\??typinglines\v!normal }{\buff_verbatim_ignore_hyphens}
\setvalue{\??typinglines\v!yes }{\buff_verbatim_obey_breakpoints}
@@ -73,26 +73,30 @@
\unexpanded\def\buff_verbatim_ignore_hyphens
{\language\minusone} % tricky as this affects the pagebuilder
-\def\buff_verbatim_initialize_breaks % order matters
+\def\buff_verbatim_initialize_type_one
+ {\let\obeylines\ignorelines
+ \edef\p_buff_option{\typeparameter\c!option}%
+ \ifx\p_buff_option\v!none
+ \usetypestyleandcolor\c!style\c!color
+ \else\ifx\p_buff_option\empty
+ \usetypestyleandcolor\c!style\c!color
+ \else
+ \usetypestyleparameter\c!style % no color
+ \fi\fi
+ \setcatcodetable\vrbcatcodes}
+
+\def\buff_verbatim_initialize_type_two
{\spaceskip.5\emwidth\relax
\let\obeyedspace\specialobeyedspace
\let\controlspace\specialcontrolspace
\edef\p_buff_lines{\typeparameter\c!lines}%
- \ifcsname\??typinglines\p_buff_lines\endcsname % sets \obeyedspace, \controlspace,
+ \ifcsname\??typinglines\p_buff_lines\endcsname
\csname\??typinglines\p_buff_lines\endcsname
\fi
\edef\p_buff_space{\typeparameter\c!space}%
- \ifcsname\??typingspace\p_buff_space\endcsname % sets \obeyedspace
+ \ifcsname\??typingspace\p_buff_space\endcsname
\csname\??typingspace\p_buff_space\endcsname
- \fi}
-
-\def\buff_verbatim_initialize_type_one
- {\let\obeylines\ignorelines
- \usetypestyleandcolor\c!style\c!color
- \setcatcodetable\vrbcatcodes}
-
-\def\buff_verbatim_initialize_type_two
- {\buff_verbatim_initialize_breaks
+ \fi
\relax\the\everyinitializeverbatim\relax}
\unexpanded\def\doinitializeverbatim % for use elsewhere .. temp hack (see lxml-ini)
@@ -102,10 +106,11 @@
\let\buff_verbatim_set_line_margin\relax
\def\buff_verbatim_set_line_margin_indeed
- {\hskip\doifoddpageelse{\typingparameter\c!oddmargin}{\typingparameter\c!evenmargin}\relax}
+ {\hskip\doifelseoddpage{\typingparameter\c!oddmargin}{\typingparameter\c!evenmargin}\relax}
-\def\buff_verbatim_check_margins
- {\scratchskip\typingparameter\c!oddmargin\relax
+\def\buff_verbatim_initialize_typing_one
+ {\usebodyfontparameter\typingparameter
+ \scratchskip\typingparameter\c!oddmargin\relax
\ifzeropt\scratchskip \else
\let\buff_verbatim_set_line_margin\buff_verbatim_set_line_margin_indeed
\fi
@@ -115,16 +120,29 @@
\fi
\ifx\buff_verbatim_set_line_margin\relax
\doadaptleftskip{\typingparameter\c!margin}%
- \fi}
-
-\def\buff_verbatim_initialize_typing_one
- {\switchtobodyfont[\typingparameter\c!bodyfont]% can be low level call
- \buff_verbatim_check_margins
- \usetypingstyleandcolor\c!style\c!color
- \doifsomething{\typingparameter\c!align}{\setupalign[\typingparameter\c!align]}}
+ \fi
+ \edef\p_buff_option{\typingparameter\c!option}%
+ \ifx\p_buff_option\v!none
+ \usetypingstyleandcolor\c!style\c!color
+ \else\ifx\p_buff_option\empty
+ \usetypingstyleandcolor\c!style\c!color
+ \else
+ \usetypingstyleparameter\c!style % no color !
+ \fi\fi
+ \usealignparameter\typingparameter}
\def\buff_verbatim_initialize_typing_two
- {\buff_verbatim_initialize_breaks
+ {\spaceskip.5\emwidth\relax
+ \let\obeyedspace\specialobeyedspace
+ \let\controlspace\specialcontrolspace
+ \edef\p_buff_lines{\typingparameter\c!lines}%
+ \ifcsname\??typinglines\p_buff_lines\endcsname
+ \csname\??typinglines\p_buff_lines\endcsname
+ \fi
+ \edef\p_buff_space{\typingparameter\c!space}%
+ \ifcsname\??typingspace\p_buff_space\endcsname
+ \csname\??typingspace\p_buff_space\endcsname
+ \fi
\relax\the\everyinitializeverbatim\relax}
%D \macros
@@ -169,7 +187,11 @@
\appendtoks
\setuevalue{\e!start\currenttyping}{\buff_verbatim_typing_start{\currenttyping}}%
\setuevalue{\e!stop \currenttyping}{\buff_verbatim_typing_stop {\currenttyping}}%
- \normalexpanded{\definelinenumbering[\currenttyping]}%
+ \ifx\currenttypingparent\empty
+ \normalexpanded{\definelinenumbering[\currenttyping]}%
+ \else
+ \normalexpanded{\definelinenumbering[\currenttyping][\currenttypingparent]}%
+ \fi
\to \everydefinetyping
\appendtoks
@@ -177,7 +199,12 @@
\to \everysetuptyping
\def\buff_verbatim_initialize_visualizer#1%
- {\ifproductionrun\ctxcommand{loadvisualizer("#1")}\fi}
+ {\ifproductionrun\clf_loadvisualizer{#1}\fi}
+
+\unexpanded\def\doifelsevisualizer#1%
+ {\clf_doifelsevisualizer{#1}}
+
+\let\doifvisualizerelse\doifelsevisualizer
%D Verbatim command are very sensitive to argument processing,
%D which is a direct result of the \CATCODES\ being fixed at
@@ -261,7 +288,7 @@
{\dontleavehmode
\bgroup
\edef\currenttype{#1}%
- \doifnextoptionalelse\buff_verbatim_type_yes\buff_verbatim_type_nop}
+ \doifelsenextoptionalcs\buff_verbatim_type_yes\buff_verbatim_type_nop}
\def\buff_verbatim_type_yes[#1]%
{\setupcurrenttype[#1]%
@@ -277,7 +304,7 @@
\edef\currenttype{#1}%
\lettypeparameter\c!lines\v!hyphenated
\let\specialobeyedspace\specialstretchedspace
- \doifnextoptionalelse\buff_verbatim_type_yes\buff_verbatim_type_nop}
+ \doifelsenextoptionalcs\buff_verbatim_type_yes\buff_verbatim_type_nop}
\def\buff_verbatim_type_one
{\ifx\next\bgroup
@@ -316,33 +343,47 @@
\def\buff_verbatim_type_normal#1%
{\buff_verbatim_initialize_type_two
- \dostarttagged\t!verbatim\currenttype
- \ctxcommand{typestring{
- data = \!!bs\detokenize{#1}\!!es,
- tab = "\typeparameter\c!tab",
- method = "\typeparameter\c!option",
- nature = "inline",
- compact = "\typeparameter\c!compact", % none | all | last (all needed in tabulate etc for manuals)
- }}%
+ \dostarttaggedchained\t!verbatim\currenttype\??type
+ \clf_type
+ data {\detokenize{#1}}%
+ tab {\typeparameter\c!tab}%
+ method {\p_buff_option}%
+ compact {\typeparameter\c!compact}% % none | all | last (all needed in tabulate etc for manuals)
+ escape {\typeparameter\c!escape}% % new but rather useless imo (escaping in general is not used much)
+ % nature {inline}% is default
+ \relax
\dostoptagged
\buff_verbatim_right_of_type
\egroup}
\def\buff_verbatim_type_nested#1%
{\buff_verbatim_initialize_type_two
- \dostarttagged\t!verbatim\currenttype
- \ctxcommand{typestring{
- data = \!!bs\detokenize{#1}\!!es,
- tab = "\typeparameter\c!tab",
- method = "nested", % we force a special visualizer
- option = "\typeparameter\c!option", % extra visualizer (maybe: nested,\typeparameter\c!option)
- nature = "inline",
- }}%
+ \dostarttaggedchained\t!verbatim\currenttype\??type
+ \clf_type
+ data {\detokenize{#1}}%
+ tab {\typeparameter\c!tab}%
+ method {\p_buff_option}% % extra visualizer (maybe: nested,\typeparameter\c!option)
+ escape {\typeparameter\c!escape}% % new but rather useless imo (escaping in general is not used much)
+ % nature {inline}% is default
+ method {nested}%
+ \relax
\dostoptagged
\buff_verbatim_right_of_type
\egroup
\gobbleoneargument} % grab last >
+%D The \type {compact} option can come in handy in the case of inline argument passing
+%D
+%D \starttyping
+%D \definetype[TeXcodeA][option=TEX]
+%D \definetype[TeXcodeB][option=TEX,compact=all]
+%D \definetype[TeXcodeC][option=TEX,compact=absolute]
+%D
+%D \def\argA#1{\TeXcodeA{{#1}}}
+%D \def\argB#1{\TeXcodeB{{#1}}}
+%D \def\argC#1{\TeXcodeC{{#1}}}
+%D \stoptyping
+
%D \macros
%D {obeyhyphens,obeybreakpoints}
%D
@@ -353,15 +394,24 @@
%D works all right, but a decent hyphenation support of
%D \type{\tt} text will be implemented soon.
-\unexpanded\def\specialfixedspace {\kern\interwordspace\relax}
-\unexpanded\def\specialobeyedspace {\hskip\interwordspace\relax} % better than spaceskip
-\unexpanded\def\specialstretchedspace{\hskip.5\interwordspace\s!plus.125\interwordspace\relax} % \interwordstretch can be zero
-\unexpanded\def\specialcontrolspace {\hskip\zeropoint\hbox{\normalcontrolspace}\hskip\zeropoint\relax}
+\unexpanded\def\specialfixedspace {\kern\interwordspace\relax}
+\unexpanded\def\specialobeyedspace {\hskip\interwordspace\relax} % better than spaceskip
+\unexpanded\def\specialstretchedspace {\hskip.5\interwordspace\s!plus.125\interwordspace\relax} % \interwordstretch can be zero
+\unexpanded\def\specialcontrolspace {\normalcontrolspace\allowbreak} % uses fallback
+
+\unexpanded\def\explicitfixedspace {\asciispacechar}
+\unexpanded\def\explicitobeyedspace {\asciispacechar\allowbreak}
+\unexpanded\def\explicitstretchedspace{\asciispacechar\hskip\zeropoint\s!plus.125\interwordspace\relax}%
+\unexpanded\def\explicitcontrolspace {\optionalcontrolspace\allowbreak} % uses asciispace
+
+\appendtoks
+ \unexpanded\def\obeyedspace{\hskip\zeropoint\asciispacechar\hskip\zeropoint}%
+\to \everyenableelements
\unexpanded\def\obeyhyphens
{\let\obeyedspace \specialobeyedspace % maybe \specialstretchedspace
\let\controlspace\specialcontrolspace
- \spaceskip.25em\relax} % hm a bit of stretch !
+ \spaceskip.25\emwidth\relax} % hm a bit of stretch !
\unexpanded\def\obeybreakpoints
{\ignorehyphens
@@ -414,9 +464,15 @@
\let\normaltextmat\mat
\let\normaltextdis\dis
-\unexpanded\def\astype{\bgroup\usetypestyleandcolor\c!style\c!color\let\nexttoken}
+\unexpanded\def\astype
+ {\bgroup\usetypestyleandcolor\c!style\c!color\let\nexttoken}
-\unexpanded\def\asciistr#1{\dontleavehmode{\verbatimfont\detokenize{#1}}} % use in some old styles
+\unexpanded\def\asciistr#1% used in some old styles
+ {\dontleavehmode\begingroup
+ \dostarttagged\t!verbatim\empty
+ \usetypestyleandcolor\c!style\c!color\detokenize{#1}%
+ \dostoptagged
+ \endgroup}
%D The basic display verbatim commands are defined in an indirect way. As we
%D will see, they are a specific case of a more general mechanism.
@@ -456,7 +512,7 @@
\def\buff_verbatim_typing_start_yes[#1]%
{\typingparameter\c!before
\startpacked[\v!blank]
- \doifassignmentelse{#1}
+ \doifelseassignment{#1}
{\setupcurrenttyping[#1]}
{\doifinset\v!continue{#1}{\lettypingparameter\c!continue\v!yes}}%
\buff_verbatim_setup_line_numbering
@@ -465,22 +521,22 @@
\normalexpanded{\buff_verbatim_type_block{\e!start\currenttyping}{\e!stop\currenttyping}}}
\unexpanded\def\buff_verbatim_type_block#1#2%
- {\buff_pickup{_typing_}{#1}{#2}{}{\buff_verbatim_type_block_verbatim_indeed{#1}{#2}}} % was dowithbuffer
+ {\buff_pickup{_typing_}{#1}{#2}{}{\buff_verbatim_type_block_verbatim_indeed{#1}{#2}}\plusone} % was dowithbuffer
\def\buff_verbatim_type_block_verbatim_indeed#1#2%
{\buff_verbatim_initialize_typing_two
- \dostarttagged\t!verbatimblock\currenttyping
+ \dostarttaggedchained\t!verbatimblock\currenttyping\??typing
\beginofverbatimlines
\dostarttagged\t!verbatimlines\empty
- \ctxcommand{typebuffer {
- name = "_typing_",
- strip = "\typingparameter\c!strip",
- range = "\typingparameter\c!range",
- tab = "\typingparameter\c!tab",
- method = "\typingparameter\c!option",
- escape = \!!bs\typingparameter\c!escape\!!es,
- nature = "display",
- }}%
+ \clf_typebuffer
+ name {_typing_}%
+ strip {\typingparameter\c!strip}%
+ range {\typingparameter\c!range}%
+ tab {\typingparameter\c!tab}%
+ method {\p_buff_option}%
+ escape {\typingparameter\c!escape}%
+ nature {display}%
+ \relax
\dostoptagged
\endofverbatimlines
\dostoptagged
@@ -556,7 +612,7 @@
\setuptyping[#1][#2]%
\buff_verbatim_type_file_checked{#1}{#3}%
\else\iffirstargument
- \doifassignmentelse{#1}
+ \doifelseassignment{#1}
{\setuptyping[\v!file][#1]
\buff_verbatim_type_file_checked\v!file{#3}}
{\buff_verbatim_type_file_checked{#1}{#3}}%
@@ -581,6 +637,8 @@
\expandafter\firstoftwoarguments
\fi}
+\let\doiftypingfileelse\doifelsetypingfile
+
\def\buff_verbatim_type_file_indeed#1#2% category name
{\edef\currenttyping{#1}%
\typingparameter\c!before
@@ -588,18 +646,18 @@
\buff_verbatim_setup_line_numbering
\buff_verbatim_initialize_typing_one
\buff_verbatim_initialize_typing_two
- \dostarttagged\t!verbatimblock\currenttyping
+ \dostarttaggedchained\t!verbatimblock\currenttyping\??typing
\beginofverbatimlines
\dostarttagged\t!verbatimlines\empty
- \ctxcommand{typefile {
- name = "#2",
- strip = "\typingparameter\c!strip",
- range = "\typingparameter\c!range",
- regime = "\currentregime",
- tab = "\typingparameter\c!tab",
- method = "\typingparameter\c!option",
- nature = "display",
- }}%
+ \clf_typefile
+ name {#2}%
+ strip {\typingparameter\c!strip}%
+ range {\typingparameter\c!range}%
+ regime {\currentregime}%
+ tab {\typingparameter\c!tab}%
+ method {\p_buff_option}%
+ nature {display}%
+ \relax
\dostoptagged
\endofverbatimlines
\dostoptagged
@@ -696,8 +754,11 @@
\definetyping[\v!typing]
-\setuptyping[\v!file] [\s!parent=\??typing\v!typing] % we don't want \start..\stop overload
-\setuptyping[\v!buffer][\s!parent=\??typing\v!file] % we don't want \start..\stop overload
+\setuptyping [\v!file] [\s!parent=\??typing \v!typing] % we don't want \start..\stop overload
+\setuplinenumbering[\v!file] [\s!parent=\??linenumbering\v!typing]
+
+\setuptyping [\v!buffer][\s!parent=\??typing \v!file] % we don't want \start..\stop overload
+\setuplinenumbering[\v!buffer][\s!parent=\??linenumbering\v!file]
%D The setups for inline verbatim default to:
@@ -734,7 +795,7 @@
\setuptyping[\v!buffer][#2]%
\processcommalist[#1]{\buff_verbatim_type_buffer_indeed\v!buffer}% [name] [settings]
\else\iffirstargument
- \doifassignmentelse{#1}
+ \doifelseassignment{#1}
{\setuptyping[\v!buffer][#1]%
\buff_verbatim_type_buffer_indeed\v!buffer\empty}% [settings]
{\processcommalist[#1]{\buff_verbatim_type_buffer_indeed\v!buffer}}% [name]
@@ -758,20 +819,22 @@
\buff_verbatim_setup_line_numbering
\buff_verbatim_initialize_typing_one
\buff_verbatim_initialize_typing_two
+ \dostarttaggedchained\t!verbatimblock{#1}\??typing
\beginofverbatimlines
- \dostarttagged\t!verbatimblock{#1}%
- \ctxcommand{typebuffer {
- name = "#2",
- strip = "\typingparameter\c!strip",
- range = "\typingparameter\c!range",
- regime = "\currentregime",
- tab = "\typingparameter\c!tab",
- method = "\typingparameter\c!option",
- escape = \!!bs\typingparameter\c!escape\!!es,
- nature = "display",
- }}%
+ \dostarttagged\t!verbatimlines\empty
+ \clf_typebuffer
+ name {#2}%
+ strip {\typingparameter\c!strip}%
+ range {\typingparameter\c!range}%
+ regime {\currentregime}%
+ tab {\typingparameter\c!tab}%
+ method {\p_buff_option}%
+ escape {\typingparameter\c!escape}%
+ % nature {display}%
+ \relax
\dostoptagged
\endofverbatimlines
+ \dostoptagged
\stoppacked
\typingparameter\c!after}
@@ -792,7 +855,7 @@
\setuptyping[\v!buffer][#2]%
\processcommalist[#1]{\buff_verbatim_process_indeed\v!buffer}% [name] [settings]
\else\iffirstargument
- \doifassignmentelse{#1}
+ \doifelseassignment{#1}
{\setuptyping[\v!buffer][#1]%
\buff_verbatim_process_indeed\v!buffer\empty}% [settings]
{\processcommalist[#1]{\buff_verbatim_process_indeed\v!buffer}}% [name]
@@ -803,13 +866,13 @@
\def\buff_verbatim_process_indeed#1#2%
{\edef\currenttyping{#1}%
- \ctxcommand{processbuffer {
- name = "#2",
- strip = "\typingparameter\c!strip",
- tab = "\typingparameter\c!tab",
- method = "\typingparameter\c!option",
- nature = "direct",
- }}}
+ \clf_processbuffer
+ name {#2}%
+ strip {\typingparameter\c!strip}%
+ tab {\typingparameter\c!tab}%
+ % method {\p_buff_option}%
+ nature {direct}%
+ \relax}
% so far for obsolete
@@ -824,8 +887,10 @@
\fi
\noindent
\buff_verbatim_set_line_margin
- \the\everyline\strut
- \dostarttagged\t!verbatimline\empty}
+ \the\everyline % maybe also after starttagged
+ \strut % after starttagged, else break !
+ \dostarttagged\t!verbatimline\empty
+ }
\unexpanded\def\buff_verbatim_end_of_line
{\dostoptagged
@@ -850,7 +915,7 @@
\newcount \c_buff_verbatim_current
\newconditional\c_buff_optimize_linebreaks
-\def\doverbatimspace {\obeyedspace}
+ \def\doverbatimspace {\obeyedspace}
\unexpanded\def\doinlineverbatimstart {}
\unexpanded\def\doinlineverbatimstop {}
@@ -908,4 +973,6 @@
\def\tex #1{\letterbackslash#1}%
\to \everysimplifycommands
+\stopcontextdefinitioncode
+
\protect \endinput
diff --git a/tex/context/base/bxml-apa.mkiv b/tex/context/base/bxml-apa.mkiv
index 7665d829c..6513a71c2 100644
--- a/tex/context/base/bxml-apa.mkiv
+++ b/tex/context/base/bxml-apa.mkiv
@@ -342,7 +342,7 @@
\bibxmlsetup{bibtex:apa:common:author-editors-crossref-year}
\bibxmldoif {title} {
\bgroup\it\bibxmlflush{title}\/\egroup
- \doifmodeelse {bibtex:apa:edited-book} {
+ \doifelsemode {bibtex:apa:edited-book} {
\bibxmldoifelse {volume} {
\bibtexspace Number\nonbreakablespace\bibxmlflush{volume}
\bibxmldoifelse {series} {
@@ -396,7 +396,7 @@
\bibxmldoifelse {title} {
\bgroup\it\bibxmlflush{title}\/\egroup
} {
- \doifmodeelse {bibtex:apa:edited-book} {
+ \doifelsemode {bibtex:apa:edited-book} {
\bibxmldoifelse {volume} {
\bibtexspace number\nonbreakablespace\bibxmlflush{volume}
\bibxmldoifelse {series} {
diff --git a/tex/context/base/catc-act.mkiv b/tex/context/base/catc-act.mkiv
index df228c203..8c7935d4d 100644
--- a/tex/context/base/catc-act.mkiv
+++ b/tex/context/base/catc-act.mkiv
@@ -39,10 +39,10 @@
\setnewconstant\c_syst_catcodes_hack\tildeasciicode % also defined in catc-ini.mkiv
-\normalprotected\def\defineactivecharacter #1#2 #3% uses \doifnumberelse which is not yet defined
+\normalprotected\def\defineactivecharacter #1#2 #3% uses \doifelsenumber which is not yet defined
{\c_syst_catcodes_c\uccode\c_syst_catcodes_hack
- \if#1"\uccode\c_syst_catcodes_hack\expandafter\doifnumberelse\expandafter{\string#1#2}\empty #1#2\else
- \uccode\c_syst_catcodes_hack\expandafter\doifnumberelse\expandafter{\string#1#2}\empty`#1#2\fi
+ \if#1"\uccode\c_syst_catcodes_hack\expandafter\doifelsenumber\expandafter{\string#1#2}\empty #1#2\else
+ \uccode\c_syst_catcodes_hack\expandafter\doifelsenumber\expandafter{\string#1#2}\empty`#1#2\fi
\catcode\uccode\c_syst_catcodes_hack\activecatcode
\uppercase{\def\m_syst_catcodes_temp{~}}% brrr
\uccode\c_syst_catcodes_hack\c_syst_catcodes_c
diff --git a/tex/context/base/catc-ctx.mkiv b/tex/context/base/catc-ctx.mkiv
index ddade7f52..5af8a5035 100644
--- a/tex/context/base/catc-ctx.mkiv
+++ b/tex/context/base/catc-ctx.mkiv
@@ -142,4 +142,38 @@
\normalprotected\def\stopcontextcode
{\popcatcodetable}
+% not visible, only for special cases
+
+\newcatcodetable \ctdcatcodes % context definitions
+
+\startcatcodetable \ctdcatcodes
+ \catcode\tabasciicode \ignorecatcode
+ \catcode\endoflineasciicode \ignorecatcode
+ \catcode\formfeedasciicode \ignorecatcode
+ \catcode\spaceasciicode \ignorecatcode
+ \catcode\endoffileasciicode \ignorecatcode
+ \catcode\circumflexasciicode \superscriptcatcode % candidate
+ \catcode\underscoreasciicode \lettercatcode
+ \catcode\ampersandasciicode \alignmentcatcode
+% \catcode\colonasciicode \lettercatcode % candidate
+ \catcode\backslashasciicode \escapecatcode
+ \catcode\leftbraceasciicode \begingroupcatcode
+ \catcode\rightbraceasciicode \endgroupcatcode
+ \catcode\dollarasciicode \mathshiftcatcode
+ \catcode\hashasciicode \parametercatcode
+ \catcode\commentasciicode \commentcatcode
+ \catcode\atsignasciicode \lettercatcode
+ \catcode\exclamationmarkasciicode\lettercatcode
+ \catcode\questionmarkasciicode \lettercatcode
+ \catcode\tildeasciicode \activecatcode
+ \catcode\barasciicode \activecatcode
+\stopcatcodetable
+
+\normalprotected\def\startcontextdefinitioncode
+ {\pushcatcodetable
+ \catcodetable\ctdcatcodes}
+
+\normalprotected\def\stopcontextdefinitioncode
+ {\popcatcodetable}
+
\endinput
diff --git a/tex/context/base/catc-ini.lua b/tex/context/base/catc-ini.lua
index d4f9b65af..9241f5a1b 100644
--- a/tex/context/base/catc-ini.lua
+++ b/tex/context/base/catc-ini.lua
@@ -39,3 +39,7 @@ end
table.setmetatableindex(numbers,function(t,k) if type(k) == "number" then t[k] = k return k end end)
table.setmetatableindex(names, function(t,k) if type(k) == "string" then t[k] = k return k end end)
+
+commands.registercatcodetable = catcodes.register
+--------.definecatcodetable = characters.define -- not yet defined
+--------.setcharactercodes = characters.setcodes -- not yet defined
diff --git a/tex/context/base/catc-ini.mkiv b/tex/context/base/catc-ini.mkiv
index 791ce31c4..bb1e47faa 100644
--- a/tex/context/base/catc-ini.mkiv
+++ b/tex/context/base/catc-ini.mkiv
@@ -54,6 +54,7 @@
\setnewconstant\ampersandasciicode 38
\setnewconstant\singlequoteasciicode 39 % '
\setnewconstant\primeasciicode 39 % '
+\setnewconstant\hyphenasciicode 45
\setnewconstant\forwardslashasciicode 47 % /
\setnewconstant\colonasciicode 58
\setnewconstant\lessthanasciicode 60 % < used as alternative verbatim {
@@ -87,16 +88,25 @@
\xdef\outputnewlinechar{^^J}%
\endgroup}
-%D We predefine some prefixes ahead of syst-aux and mult-sys. We reserve 8 slots
-%D for catcodes.
+%D We predefine some prefixes ahead of syst-aux and mult-sys.
-\def\??catcodelet {1>>} % let : \let
-\def\??catcodedef {2>>} % def : \def
-\def\??catcodeued {3>>} % ued : \unexpanded\def
-\def\??catcodeget {4>>} % \meaning
+% We reserve 8 slots for catcodes.
+%
+% \def\??catcodelet {1>>} % let : \let
+% \def\??catcodedef {2>>} % def : \def
+% \def\??catcodeued {3>>} % ued : \unexpanded\def
+% \def\??catcodeget {4>>} % \meaning
+%
+% \def\??catcodetablet{5>>}
+% \def\??catcodetablen{6>>}
+
+\installsystemnamespace {catcodelet} % let : \let
+\installsystemnamespace {catcodedef} % def : \def
+\installsystemnamespace {catcodeued} % ued : \unexpanded\def
+\installsystemnamespace {catcodeget} % \meaning
-\def\??catcodetablet{5>>}
-\def\??catcodetablen{6>>}
+\installsystemnamespace {catcodetablet}
+\installsystemnamespace {catcodetablen}
\newcount\c_syst_catcodes_n \c_syst_catcodes_n\zerocount % 0 = signal, so advance before allocate
\newcount\c_syst_catcodes_a
@@ -108,7 +118,7 @@
\expandafter\xdef\csname\??catcodetablen\number\c_syst_catcodes_n\endcsname{\string#1}% logging
\newconstant#1%
#1\c_syst_catcodes_n
- \ctxlua{catcodes.register("\expandafter\gobbleoneargument\string#1",\number#1)}}
+ \ctxcommand{registercatcodetable("\expandafter\gobbleoneargument\string#1",\number#1)}}
\newtoks \everysetdefaultcatcodes
@@ -119,7 +129,7 @@
\catcode\commentasciicode \othercatcode
\catcode\delasciicode \othercatcode}
-\long\normalprotected\def\startcatcodetable#1#2\stopcatcodetable
+\normalprotected\def\startcatcodetable#1#2\stopcatcodetable
{\begingroup
\catcodetable\inicatcodes
\the\everysetdefaultcatcodes
@@ -129,7 +139,7 @@
\let\stopcatcodetable\relax
-\long\normalprotected\def\startextendcatcodetable#1#2\stopextendcatcodetable
+\normalprotected\def\startextendcatcodetable#1#2\stopextendcatcodetable
{\begingroup
\catcodetable#1\relax
\globaldefs\plusone
@@ -146,7 +156,7 @@
% ==
%
-% \long\normalprotected\def\startextendcatcodetable#1#2\stopextendcatcodetable
+% \normalprotected\def\startextendcatcodetable#1#2\stopextendcatcodetable
% {\bgroup
% \scratchcounter\the\catcodetable
% \catcodetable #1 #2
diff --git a/tex/context/base/catc-sym.mkiv b/tex/context/base/catc-sym.mkiv
index 425a5393c..71e02f31d 100644
--- a/tex/context/base/catc-sym.mkiv
+++ b/tex/context/base/catc-sym.mkiv
@@ -177,12 +177,12 @@
%D
%D \getbuffer[c]
-\long\def\rescan#1{\scantokens{#1\ignorespaces}}
-\long\def\rescanwithsetup#1#2{\begingroup\directsetup{#1}\scantokens{#2\ignorespaces}\endgroup}
+\def\rescan #1{\scantokens{#1\ignorespaces}}
+\def\rescanwithsetup#1#2{\begingroup\directsetup{#1}\scantokens{#2\ignorespaces}\endgroup}
\ifx\scantextokens\undefined \else
- \long\def\rescan#1{\scantextokens{#1}}
- \long\def\rescanwithsetup#1#2{\begingroup\directsetup{#1}\scantextokens{#2}\endgroup}
+ \def\rescan #1{\scantextokens{#1}}
+ \def\rescanwithsetup#1#2{\begingroup\directsetup{#1}\scantextokens{#2}\endgroup}
\fi
\endinput
diff --git a/tex/context/base/catc-xml.mkiv b/tex/context/base/catc-xml.mkiv
index 5e7df11f5..a23a2fe0a 100644
--- a/tex/context/base/catc-xml.mkiv
+++ b/tex/context/base/catc-xml.mkiv
@@ -114,20 +114,11 @@
%D We register the catcodetables at the \LUA\ end where some further
%D initializations take place.
-\ctxlua {
- characters.define(
- { % letter catcodes
- \number\xmlcatcodesn,
- \number\xmlcatcodese,
- \number\xmlcatcodesr,
- },
- { % activate catcodes
- \number\xmlcatcodesn,
- \number\xmlcatcodese,
- \number\xmlcatcodesr,
- }
- )
- catcodes.register("xmlcatcodes",\number\xmlcatcodes)
-}
+\ctxcommand{definecatcodetable(
+ {\number\xmlcatcodesn,\number\xmlcatcodese,\number\xmlcatcodesr},% letter catcodes
+ {\number\xmlcatcodesn,\number\xmlcatcodese,\number\xmlcatcodesr} % activate catcodes
+)}
+
+\ctxcommand{registercatcodetable("xmlcatcodes",\number\xmlcatcodes)}
\endinput
diff --git a/tex/context/base/char-act.mkiv b/tex/context/base/char-act.mkiv
index 011c29d07..7d7268c8b 100644
--- a/tex/context/base/char-act.mkiv
+++ b/tex/context/base/char-act.mkiv
@@ -24,6 +24,8 @@
%D \NEWLINE\ and \NEWPAGE\ active and assigning them
%D \type{\obeysomething}, but first we set some default values.
+% These are expandable!
+
\def\obeyedspace {\space}
\def\obeyedtab {\obeyedspace}
\def\obeyedline {\par}
@@ -36,7 +38,10 @@
%D spaces (control spaces) we only have to adapt the definition
%D of \type{\obeyedspace} to:
-\unexpanded\def\controlspace{\hbox{\char32}} % rather tex, we need the unicode value
+\chardef\asciispacechar\spaceasciicode % a real space character
+
+\unexpanded\def\naturalspace{\asciispacechar}
+\unexpanded\def\controlspace{\hbox{\asciispacechar}} % rather tex, we need the unicode value
\unexpanded\def\normalspaces{\catcode\spaceasciicode\spacecatcode}
\bgroup
diff --git a/tex/context/base/char-cjk.lua b/tex/context/base/char-cjk.lua
index 3d7de1423..30f618896 100644
--- a/tex/context/base/char-cjk.lua
+++ b/tex/context/base/char-cjk.lua
@@ -9,7 +9,7 @@ if not modules then modules = { } end modules ['char-cjk'] = {
local setmetatable = setmetatable
local insert = table.insert
local floor = math.floor
-local format = string.format
+local formatters = string.formatters
local utfchar = utf.char
local ranges = characters.ranges
@@ -85,8 +85,7 @@ local tail_consonants = { [0] =
-- local lead_consonant = floor( index / NCount)
-- local medial_vowel = floor((index % NCount) / TCount)
-- local tail_consonant = index % TCount
--- return format(
--- "HANGUL SYLLABLE %s%s%s",
+-- return formatters["HANGUL SYLLABLE %s%s%s"](
-- lead_consonants[lead_consonant],
-- medial_vowels [medial_vowel ],
-- tail_consonants[tail_consonant]
@@ -100,8 +99,7 @@ local function description(unicode)
local lead_consonant = floor( index / (21 * 28))
local medial_vowel = floor((index % (21 * 28)) / 28)
local tail_consonant = index % 28
- return format(
- "HANGUL SYLLABLE %s%s%s",
+ return formatters["HANGUL SYLLABLE %s%s%s"](
lead_consonants[lead_consonant],
medial_vowels [medial_vowel ],
tail_consonants[tail_consonant]
@@ -363,3 +361,5 @@ insert(ranges, hangul_syllable_range)
insert(ranges, cjk_ideograph_range)
insert(ranges, cjk_ideograph_extension_a_range)
insert(ranges, cjk_ideograph_extension_b_range)
+
+-- Japanese
diff --git a/tex/context/base/char-def.lua b/tex/context/base/char-def.lua
index 9642d1736..90ac4609d 100644
--- a/tex/context/base/char-def.lua
+++ b/tex/context/base/char-def.lua
@@ -21,12 +21,12 @@ the trouble. Some additional data is kept in other files.
characters = characters or { }
characters.data={
- [0x0000]={
+ [0x0]={
category="cc",
description="NULL",
direction="bn",
linebreak="cm",
- unicodeslot=0x0000,
+ unicodeslot=0x0,
},
{
adobename="controlSTX",
@@ -34,7 +34,7 @@ characters.data={
description="START OF HEADING",
direction="bn",
linebreak="cm",
- unicodeslot=0x0001,
+ unicodeslot=0x1,
},
{
adobename="controlSOT",
@@ -42,7 +42,7 @@ characters.data={
description="START OF TEXT",
direction="bn",
linebreak="cm",
- unicodeslot=0x0002,
+ unicodeslot=0x2,
},
{
adobename="controlETX",
@@ -50,7 +50,7 @@ characters.data={
description="END OF TEXT",
direction="bn",
linebreak="cm",
- unicodeslot=0x0003,
+ unicodeslot=0x3,
},
{
adobename="controlEOT",
@@ -58,7 +58,7 @@ characters.data={
description="END OF TRANSMISSION",
direction="bn",
linebreak="cm",
- unicodeslot=0x0004,
+ unicodeslot=0x4,
},
{
adobename="controlENQ",
@@ -66,7 +66,7 @@ characters.data={
description="ENQUIRY",
direction="bn",
linebreak="cm",
- unicodeslot=0x0005,
+ unicodeslot=0x5,
},
{
adobename="controlACK",
@@ -74,7 +74,7 @@ characters.data={
description="ACKNOWLEDGE",
direction="bn",
linebreak="cm",
- unicodeslot=0x0006,
+ unicodeslot=0x6,
},
{
adobename="controlBEL",
@@ -82,7 +82,7 @@ characters.data={
description="BELL",
direction="bn",
linebreak="cm",
- unicodeslot=0x0007,
+ unicodeslot=0x7,
},
{
adobename="controlBS",
@@ -90,7 +90,7 @@ characters.data={
description="BACKSPACE",
direction="bn",
linebreak="cm",
- unicodeslot=0x0008,
+ unicodeslot=0x8,
},
{
adobename="controlHT",
@@ -98,7 +98,7 @@ characters.data={
description="CHARACTER TABULATION",
direction="s",
linebreak="ba",
- unicodeslot=0x0009,
+ unicodeslot=0x9,
},
{
adobename="controlLF",
@@ -106,7 +106,7 @@ characters.data={
description="LINE FEED (LF)",
direction="b",
linebreak="lf",
- unicodeslot=0x000A,
+ unicodeslot=0xA,
},
{
adobename="controlVT",
@@ -114,7 +114,7 @@ characters.data={
description="LINE TABULATION",
direction="s",
linebreak="bk",
- unicodeslot=0x000B,
+ unicodeslot=0xB,
},
{
adobename="controlFF",
@@ -122,7 +122,7 @@ characters.data={
description="FORM FEED (FF)",
direction="ws",
linebreak="bk",
- unicodeslot=0x000C,
+ unicodeslot=0xC,
},
{
adobename="controlCR",
@@ -130,7 +130,7 @@ characters.data={
description="CARRIAGE RETURN (CR)",
direction="b",
linebreak="cr",
- unicodeslot=0x000D,
+ unicodeslot=0xD,
},
{
adobename="controlSO",
@@ -138,7 +138,7 @@ characters.data={
description="SHIFT OUT",
direction="bn",
linebreak="cm",
- unicodeslot=0x000E,
+ unicodeslot=0xE,
},
{
adobename="controlSI",
@@ -146,7 +146,7 @@ characters.data={
description="SHIFT IN",
direction="bn",
linebreak="cm",
- unicodeslot=0x000F,
+ unicodeslot=0xF,
},
{
adobename="controlDLE",
@@ -154,7 +154,7 @@ characters.data={
description="DATA LINK ESCAPE",
direction="bn",
linebreak="cm",
- unicodeslot=0x0010,
+ unicodeslot=0x10,
},
{
adobename="controlDC1",
@@ -162,7 +162,7 @@ characters.data={
description="DEVICE CONTROL ONE",
direction="bn",
linebreak="cm",
- unicodeslot=0x0011,
+ unicodeslot=0x11,
},
{
adobename="controlDC2",
@@ -170,7 +170,7 @@ characters.data={
description="DEVICE CONTROL TWO",
direction="bn",
linebreak="cm",
- unicodeslot=0x0012,
+ unicodeslot=0x12,
},
{
adobename="controlDC3",
@@ -178,7 +178,7 @@ characters.data={
description="DEVICE CONTROL THREE",
direction="bn",
linebreak="cm",
- unicodeslot=0x0013,
+ unicodeslot=0x13,
},
{
adobename="controlDC4",
@@ -186,7 +186,7 @@ characters.data={
description="DEVICE CONTROL FOUR",
direction="bn",
linebreak="cm",
- unicodeslot=0x0014,
+ unicodeslot=0x14,
},
{
adobename="controlNAK",
@@ -194,7 +194,7 @@ characters.data={
description="NEGATIVE ACKNOWLEDGE",
direction="bn",
linebreak="cm",
- unicodeslot=0x0015,
+ unicodeslot=0x15,
},
{
adobename="controlSYN",
@@ -202,7 +202,7 @@ characters.data={
description="SYNCHRONOUS IDLE",
direction="bn",
linebreak="cm",
- unicodeslot=0x0016,
+ unicodeslot=0x16,
},
{
adobename="controlETB",
@@ -210,7 +210,7 @@ characters.data={
description="END OF TRANSMISSION BLOCK",
direction="bn",
linebreak="cm",
- unicodeslot=0x0017,
+ unicodeslot=0x17,
},
{
adobename="controlCAN",
@@ -218,7 +218,7 @@ characters.data={
description="CANCEL",
direction="bn",
linebreak="cm",
- unicodeslot=0x0018,
+ unicodeslot=0x18,
},
{
adobename="controlEM",
@@ -226,7 +226,7 @@ characters.data={
description="END OF MEDIUM",
direction="bn",
linebreak="cm",
- unicodeslot=0x0019,
+ unicodeslot=0x19,
},
{
adobename="controlSUB",
@@ -234,7 +234,7 @@ characters.data={
description="SUBSTITUTE",
direction="bn",
linebreak="cm",
- unicodeslot=0x001A,
+ unicodeslot=0x1A,
},
{
adobename="controlESC",
@@ -242,7 +242,7 @@ characters.data={
description="ESCAPE",
direction="bn",
linebreak="cm",
- unicodeslot=0x001B,
+ unicodeslot=0x1B,
},
{
adobename="controlFS",
@@ -250,7 +250,7 @@ characters.data={
description="INFORMATION SEPARATOR FOUR",
direction="b",
linebreak="cm",
- unicodeslot=0x001C,
+ unicodeslot=0x1C,
},
{
adobename="controlGS",
@@ -258,7 +258,7 @@ characters.data={
description="INFORMATION SEPARATOR THREE",
direction="b",
linebreak="cm",
- unicodeslot=0x001D,
+ unicodeslot=0x1D,
},
{
adobename="controlRS",
@@ -266,7 +266,7 @@ characters.data={
description="INFORMATION SEPARATOR TWO",
direction="b",
linebreak="cm",
- unicodeslot=0x001E,
+ unicodeslot=0x1E,
},
{
adobename="controlUS",
@@ -274,7 +274,7 @@ characters.data={
description="INFORMATION SEPARATOR ONE",
direction="s",
linebreak="cm",
- unicodeslot=0x001F,
+ unicodeslot=0x1F,
},
{
adobename="space",
@@ -283,7 +283,7 @@ characters.data={
description="SPACE",
direction="ws",
linebreak="sp",
- unicodeslot=0x0020,
+ unicodeslot=0x20,
},
{
adobename="exclam",
@@ -293,7 +293,7 @@ characters.data={
direction="on",
linebreak="ex",
mathclass="close",
- unicodeslot=0x0021,
+ unicodeslot=0x21,
},
{
adobename="quotedbl",
@@ -304,16 +304,19 @@ characters.data={
direction="on",
linebreak="qu",
mathclass="default",
- unicodeslot=0x0022,
+ unicodeslot=0x22,
},
{
adobename="numbersign",
category="po",
cjkwd="na",
+ contextname="texthash",
description="NUMBER SIGN",
direction="et",
linebreak="al",
- unicodeslot=0x0023,
+ mathclass="binary",
+ mathname="mathhash",
+ unicodeslot=0x23,
variants={
[0xFE0E]="text style",
[0xFE0F]="emoji style",
@@ -327,26 +330,33 @@ characters.data={
description="DOLLAR SIGN",
direction="et",
linebreak="pr",
- unicodeslot=0x0024,
+ mathclass="binary",
+ mathname="mathdollar",
+ unicodeslot=0x24,
},
{
adobename="percent",
category="po",
cjkwd="na",
- contextname="percent",
+ contextname="textpercent",
description="PERCENT SIGN",
direction="et",
linebreak="po",
- unicodeslot=0x0025,
+ mathclass="binary",
+ mathname="mathpercent",
+ unicodeslot=0x25,
},
{
adobename="ampersand",
category="po",
cjkwd="na",
+ contextname="textampersand",
description="AMPERSAND",
direction="on",
linebreak="al",
- unicodeslot=0x0026,
+ mathclass="binary",
+ mathname="mathampersand",
+ unicodeslot=0x26,
},
{
adobename="quotesingle",
@@ -357,7 +367,7 @@ characters.data={
direction="on",
linebreak="qu",
mathclass="default",
- unicodeslot=0x0027,
+ unicodeslot=0x27,
},
{
adobename="parenleft",
@@ -368,9 +378,9 @@ characters.data={
linebreak="op",
mathclass="open",
mathname="lparent",
- mirror=0x0029,
+ mirror=0x29,
textclass="open",
- unicodeslot=0x0028,
+ unicodeslot=0x28,
},
{
adobename="parenright",
@@ -381,9 +391,9 @@ characters.data={
linebreak="cp",
mathclass="close",
mathname="rparent",
- mirror=0x0028,
+ mirror=0x28,
textclass="close",
- unicodeslot=0x0029,
+ unicodeslot=0x29,
},
{
adobename="asterisk",
@@ -395,7 +405,7 @@ characters.data={
mathclass="binary",
mathname="ast",
mathsymbol=0x2217,
- unicodeslot=0x002A,
+ unicodeslot=0x2A,
},
{
adobename="plus",
@@ -405,7 +415,7 @@ characters.data={
direction="es",
linebreak="pr",
mathclass="binary",
- unicodeslot=0x002B,
+ unicodeslot=0x2B,
},
{
adobename="comma",
@@ -416,7 +426,7 @@ characters.data={
direction="cs",
linebreak="is",
mathclass="punctuation",
- unicodeslot=0x002C,
+ unicodeslot=0x2C,
},
{
adobename="hyphen",
@@ -428,7 +438,7 @@ characters.data={
mathextensible="h",
mathfiller="relfill",
mathsymbol=0x2212,
- unicodeslot=0x002D,
+ unicodeslot=0x2D,
},
{
adobename="period",
@@ -446,7 +456,7 @@ characters.data={
name="ldotp",
},
},
- unicodeslot=0x002E,
+ unicodeslot=0x2E,
},
{
adobename="slash",
@@ -459,16 +469,12 @@ characters.data={
mathspec={
{
class="middle",
- -- unicode=0x2044,
- -- unicode=0x2215,
},
{
class="ordinary",
- -- unicode=0x2044,
- -- unicode=0x2215,
},
},
- unicodeslot=0x002F,
+ unicodeslot=0x2F,
},
{
adobename="zero",
@@ -478,7 +484,7 @@ characters.data={
direction="en",
linebreak="nu",
mathclass="number",
- unicodeslot=0x0030,
+ unicodeslot=0x30,
variants={
[0xFE0E]="text style",
[0xFE0F]="emoji style",
@@ -492,7 +498,7 @@ characters.data={
direction="en",
linebreak="nu",
mathclass="number",
- unicodeslot=0x0031,
+ unicodeslot=0x31,
variants={
[0xFE0E]="text style",
[0xFE0F]="emoji style",
@@ -506,7 +512,7 @@ characters.data={
direction="en",
linebreak="nu",
mathclass="number",
- unicodeslot=0x0032,
+ unicodeslot=0x32,
variants={
[0xFE0E]="text style",
[0xFE0F]="emoji style",
@@ -520,7 +526,7 @@ characters.data={
direction="en",
linebreak="nu",
mathclass="number",
- unicodeslot=0x0033,
+ unicodeslot=0x33,
variants={
[0xFE0E]="text style",
[0xFE0F]="emoji style",
@@ -534,7 +540,7 @@ characters.data={
direction="en",
linebreak="nu",
mathclass="number",
- unicodeslot=0x0034,
+ unicodeslot=0x34,
variants={
[0xFE0E]="text style",
[0xFE0F]="emoji style",
@@ -548,7 +554,7 @@ characters.data={
direction="en",
linebreak="nu",
mathclass="number",
- unicodeslot=0x0035,
+ unicodeslot=0x35,
variants={
[0xFE0E]="text style",
[0xFE0F]="emoji style",
@@ -562,7 +568,7 @@ characters.data={
direction="en",
linebreak="nu",
mathclass="number",
- unicodeslot=0x0036,
+ unicodeslot=0x36,
variants={
[0xFE0E]="text style",
[0xFE0F]="emoji style",
@@ -576,7 +582,7 @@ characters.data={
direction="en",
linebreak="nu",
mathclass="number",
- unicodeslot=0x0037,
+ unicodeslot=0x37,
variants={
[0xFE0E]="text style",
[0xFE0F]="emoji style",
@@ -590,7 +596,7 @@ characters.data={
direction="en",
linebreak="nu",
mathclass="number",
- unicodeslot=0x0038,
+ unicodeslot=0x38,
variants={
[0xFE0E]="text style",
[0xFE0F]="emoji style",
@@ -604,7 +610,7 @@ characters.data={
direction="en",
linebreak="nu",
mathclass="number",
- unicodeslot=0x0039,
+ unicodeslot=0x39,
variants={
[0xFE0E]="text style",
[0xFE0F]="emoji style",
@@ -619,7 +625,7 @@ characters.data={
direction="cs",
linebreak="is",
mathclass="relation",
- unicodeslot=0x003A,
+ unicodeslot=0x3A,
},
{
adobename="semicolon",
@@ -629,7 +635,7 @@ characters.data={
direction="on",
linebreak="is",
mathclass="punctuation",
- unicodeslot=0x003B,
+ unicodeslot=0x3B,
},
{
adobename="less",
@@ -640,9 +646,9 @@ characters.data={
linebreak="al",
mathclass="relation",
mathname="lt",
- mirror=0x003E,
+ mirror=0x3E,
textclass="open",
- unicodeslot=0x003C,
+ unicodeslot=0x3C,
},
{
adobename="equal",
@@ -664,7 +670,7 @@ characters.data={
name="Relbar",
},
},
- unicodeslot=0x003D,
+ unicodeslot=0x3D,
},
{
adobename="greater",
@@ -675,9 +681,9 @@ characters.data={
linebreak="al",
mathclass="relation",
mathname="gt",
- mirror=0x003C,
+ mirror=0x3C,
textclass="close",
- unicodeslot=0x003E,
+ unicodeslot=0x3E,
},
{
adobename="question",
@@ -687,7 +693,7 @@ characters.data={
direction="on",
linebreak="ex",
mathclass="close",
- unicodeslot=0x003F,
+ unicodeslot=0x3F,
},
{
adobename="at",
@@ -697,7 +703,7 @@ characters.data={
description="COMMERCIAL AT",
direction="on",
linebreak="al",
- unicodeslot=0x0040,
+ unicodeslot=0x40,
},
{
adobename="A",
@@ -705,10 +711,10 @@ characters.data={
cjkwd="na",
description="LATIN CAPITAL LETTER A",
direction="l",
- lccode=0x0061,
+ lccode=0x61,
linebreak="al",
mathclass="variable",
- unicodeslot=0x0041,
+ unicodeslot=0x41,
},
{
adobename="B",
@@ -716,10 +722,10 @@ characters.data={
cjkwd="na",
description="LATIN CAPITAL LETTER B",
direction="l",
- lccode=0x0062,
+ lccode=0x62,
linebreak="al",
mathclass="variable",
- unicodeslot=0x0042,
+ unicodeslot=0x42,
},
{
adobename="C",
@@ -727,10 +733,10 @@ characters.data={
cjkwd="na",
description="LATIN CAPITAL LETTER C",
direction="l",
- lccode=0x0063,
+ lccode=0x63,
linebreak="al",
mathclass="variable",
- unicodeslot=0x0043,
+ unicodeslot=0x43,
},
{
adobename="D",
@@ -738,10 +744,10 @@ characters.data={
cjkwd="na",
description="LATIN CAPITAL LETTER D",
direction="l",
- lccode=0x0064,
+ lccode=0x64,
linebreak="al",
mathclass="variable",
- unicodeslot=0x0044,
+ unicodeslot=0x44,
},
{
adobename="E",
@@ -749,10 +755,10 @@ characters.data={
cjkwd="na",
description="LATIN CAPITAL LETTER E",
direction="l",
- lccode=0x0065,
+ lccode=0x65,
linebreak="al",
mathclass="variable",
- unicodeslot=0x0045,
+ unicodeslot=0x45,
},
{
adobename="F",
@@ -760,10 +766,10 @@ characters.data={
cjkwd="na",
description="LATIN CAPITAL LETTER F",
direction="l",
- lccode=0x0066,
+ lccode=0x66,
linebreak="al",
mathclass="variable",
- unicodeslot=0x0046,
+ unicodeslot=0x46,
},
{
adobename="G",
@@ -771,10 +777,10 @@ characters.data={
cjkwd="na",
description="LATIN CAPITAL LETTER G",
direction="l",
- lccode=0x0067,
+ lccode=0x67,
linebreak="al",
mathclass="variable",
- unicodeslot=0x0047,
+ unicodeslot=0x47,
},
{
adobename="H",
@@ -782,10 +788,10 @@ characters.data={
cjkwd="na",
description="LATIN CAPITAL LETTER H",
direction="l",
- lccode=0x0068,
+ lccode=0x68,
linebreak="al",
mathclass="variable",
- unicodeslot=0x0048,
+ unicodeslot=0x48,
},
{
adobename="I",
@@ -794,10 +800,10 @@ characters.data={
contextname="dotlessI",
description="LATIN CAPITAL LETTER I",
direction="l",
- lccode=0x0069,
+ lccode=0x69,
linebreak="al",
mathclass="variable",
- unicodeslot=0x0049,
+ unicodeslot=0x49,
},
{
adobename="J",
@@ -806,10 +812,10 @@ characters.data={
contextname="dotlessJ",
description="LATIN CAPITAL LETTER J",
direction="l",
- lccode=0x006A,
+ lccode=0x6A,
linebreak="al",
mathclass="variable",
- unicodeslot=0x004A,
+ unicodeslot=0x4A,
},
{
adobename="K",
@@ -817,10 +823,10 @@ characters.data={
cjkwd="na",
description="LATIN CAPITAL LETTER K",
direction="l",
- lccode=0x006B,
+ lccode=0x6B,
linebreak="al",
mathclass="variable",
- unicodeslot=0x004B,
+ unicodeslot=0x4B,
},
{
adobename="L",
@@ -828,10 +834,10 @@ characters.data={
cjkwd="na",
description="LATIN CAPITAL LETTER L",
direction="l",
- lccode=0x006C,
+ lccode=0x6C,
linebreak="al",
mathclass="variable",
- unicodeslot=0x004C,
+ unicodeslot=0x4C,
},
{
adobename="M",
@@ -839,10 +845,10 @@ characters.data={
cjkwd="na",
description="LATIN CAPITAL LETTER M",
direction="l",
- lccode=0x006D,
+ lccode=0x6D,
linebreak="al",
mathclass="variable",
- unicodeslot=0x004D,
+ unicodeslot=0x4D,
},
{
adobename="N",
@@ -850,10 +856,10 @@ characters.data={
cjkwd="na",
description="LATIN CAPITAL LETTER N",
direction="l",
- lccode=0x006E,
+ lccode=0x6E,
linebreak="al",
mathclass="variable",
- unicodeslot=0x004E,
+ unicodeslot=0x4E,
},
{
adobename="O",
@@ -861,10 +867,10 @@ characters.data={
cjkwd="na",
description="LATIN CAPITAL LETTER O",
direction="l",
- lccode=0x006F,
+ lccode=0x6F,
linebreak="al",
mathclass="variable",
- unicodeslot=0x004F,
+ unicodeslot=0x4F,
},
{
adobename="P",
@@ -872,10 +878,10 @@ characters.data={
cjkwd="na",
description="LATIN CAPITAL LETTER P",
direction="l",
- lccode=0x0070,
+ lccode=0x70,
linebreak="al",
mathclass="variable",
- unicodeslot=0x0050,
+ unicodeslot=0x50,
},
{
adobename="Q",
@@ -883,10 +889,10 @@ characters.data={
cjkwd="na",
description="LATIN CAPITAL LETTER Q",
direction="l",
- lccode=0x0071,
+ lccode=0x71,
linebreak="al",
mathclass="variable",
- unicodeslot=0x0051,
+ unicodeslot=0x51,
},
{
adobename="R",
@@ -894,10 +900,10 @@ characters.data={
cjkwd="na",
description="LATIN CAPITAL LETTER R",
direction="l",
- lccode=0x0072,
+ lccode=0x72,
linebreak="al",
mathclass="variable",
- unicodeslot=0x0052,
+ unicodeslot=0x52,
},
{
adobename="S",
@@ -905,10 +911,10 @@ characters.data={
cjkwd="na",
description="LATIN CAPITAL LETTER S",
direction="l",
- lccode=0x0073,
+ lccode=0x73,
linebreak="al",
mathclass="variable",
- unicodeslot=0x0053,
+ unicodeslot=0x53,
},
{
adobename="T",
@@ -916,10 +922,10 @@ characters.data={
cjkwd="na",
description="LATIN CAPITAL LETTER T",
direction="l",
- lccode=0x0074,
+ lccode=0x74,
linebreak="al",
mathclass="variable",
- unicodeslot=0x0054,
+ unicodeslot=0x54,
},
{
adobename="U",
@@ -927,10 +933,10 @@ characters.data={
cjkwd="na",
description="LATIN CAPITAL LETTER U",
direction="l",
- lccode=0x0075,
+ lccode=0x75,
linebreak="al",
mathclass="variable",
- unicodeslot=0x0055,
+ unicodeslot=0x55,
},
{
adobename="V",
@@ -938,10 +944,10 @@ characters.data={
cjkwd="na",
description="LATIN CAPITAL LETTER V",
direction="l",
- lccode=0x0076,
+ lccode=0x76,
linebreak="al",
mathclass="variable",
- unicodeslot=0x0056,
+ unicodeslot=0x56,
},
{
adobename="W",
@@ -949,10 +955,10 @@ characters.data={
cjkwd="na",
description="LATIN CAPITAL LETTER W",
direction="l",
- lccode=0x0077,
+ lccode=0x77,
linebreak="al",
mathclass="variable",
- unicodeslot=0x0057,
+ unicodeslot=0x57,
},
{
adobename="X",
@@ -960,10 +966,10 @@ characters.data={
cjkwd="na",
description="LATIN CAPITAL LETTER X",
direction="l",
- lccode=0x0078,
+ lccode=0x78,
linebreak="al",
mathclass="variable",
- unicodeslot=0x0058,
+ unicodeslot=0x58,
},
{
adobename="Y",
@@ -971,10 +977,10 @@ characters.data={
cjkwd="na",
description="LATIN CAPITAL LETTER Y",
direction="l",
- lccode=0x0079,
+ lccode=0x79,
linebreak="al",
mathclass="variable",
- unicodeslot=0x0059,
+ unicodeslot=0x59,
},
{
adobename="Z",
@@ -982,10 +988,10 @@ characters.data={
cjkwd="na",
description="LATIN CAPITAL LETTER Z",
direction="l",
- lccode=0x007A,
+ lccode=0x7A,
linebreak="al",
mathclass="variable",
- unicodeslot=0x005A,
+ unicodeslot=0x5A,
},
{
adobename="bracketleft",
@@ -996,9 +1002,9 @@ characters.data={
linebreak="op",
mathclass="open",
mathname="lbracket",
- mirror=0x005D,
+ mirror=0x5D,
textclass="open",
- unicodeslot=0x005B,
+ unicodeslot=0x5B,
},
{
adobename="backslash",
@@ -1010,7 +1016,7 @@ characters.data={
linebreak="pr",
mathclass="nothing",
mathname="backslash",
- unicodeslot=0x005C,
+ unicodeslot=0x5C,
},
{
adobename="bracketright",
@@ -1021,9 +1027,9 @@ characters.data={
linebreak="cp",
mathclass="close",
mathname="rbracket",
- mirror=0x005B,
+ mirror=0x5B,
textclass="close",
- unicodeslot=0x005D,
+ unicodeslot=0x5D,
},
{
adobename="asciicircum",
@@ -1034,7 +1040,8 @@ characters.data={
direction="on",
linebreak="al",
mathclass="topaccent",
- unicodeslot=0x005E,
+ mathname="Hat",
+ unicodeslot=0x5E,
},
{
adobename="underscore",
@@ -1044,7 +1051,7 @@ characters.data={
description="LOW LINE",
direction="on",
linebreak="al",
- unicodeslot=0x005F,
+ unicodeslot=0x5F,
},
{
adobename="grave",
@@ -1056,7 +1063,7 @@ characters.data={
linebreak="al",
mathclass="topaccent",
mathname="grave",
- unicodeslot=0x0060,
+ unicodeslot=0x60,
},
{
adobename="a",
@@ -1066,8 +1073,8 @@ characters.data={
direction="l",
linebreak="al",
mathclass="variable",
- uccode=0x0041,
- unicodeslot=0x0061,
+ uccode=0x41,
+ unicodeslot=0x61,
},
{
adobename="b",
@@ -1077,8 +1084,8 @@ characters.data={
direction="l",
linebreak="al",
mathclass="variable",
- uccode=0x0042,
- unicodeslot=0x0062,
+ uccode=0x42,
+ unicodeslot=0x62,
},
{
adobename="c",
@@ -1088,8 +1095,8 @@ characters.data={
direction="l",
linebreak="al",
mathclass="variable",
- uccode=0x0043,
- unicodeslot=0x0063,
+ uccode=0x43,
+ unicodeslot=0x63,
},
{
adobename="d",
@@ -1099,8 +1106,8 @@ characters.data={
direction="l",
linebreak="al",
mathclass="variable",
- uccode=0x0044,
- unicodeslot=0x0064,
+ uccode=0x44,
+ unicodeslot=0x64,
},
{
adobename="e",
@@ -1110,8 +1117,8 @@ characters.data={
direction="l",
linebreak="al",
mathclass="variable",
- uccode=0x0045,
- unicodeslot=0x0065,
+ uccode=0x45,
+ unicodeslot=0x65,
},
{
adobename="f",
@@ -1121,8 +1128,8 @@ characters.data={
direction="l",
linebreak="al",
mathclass="variable",
- uccode=0x0046,
- unicodeslot=0x0066,
+ uccode=0x46,
+ unicodeslot=0x66,
},
{
adobename="g",
@@ -1132,8 +1139,8 @@ characters.data={
direction="l",
linebreak="al",
mathclass="variable",
- uccode=0x0047,
- unicodeslot=0x0067,
+ uccode=0x47,
+ unicodeslot=0x67,
},
{
adobename="h",
@@ -1143,8 +1150,8 @@ characters.data={
direction="l",
linebreak="al",
mathclass="variable",
- uccode=0x0048,
- unicodeslot=0x0068,
+ uccode=0x48,
+ unicodeslot=0x68,
},
{
adobename="i",
@@ -1155,8 +1162,8 @@ characters.data={
direction="l",
linebreak="al",
mathclass="variable",
- uccode=0x0049,
- unicodeslot=0x0069,
+ uccode=0x49,
+ unicodeslot=0x69,
},
{
adobename="j",
@@ -1166,8 +1173,8 @@ characters.data={
direction="l",
linebreak="al",
mathclass="variable",
- uccode=0x004A,
- unicodeslot=0x006A,
+ uccode=0x4A,
+ unicodeslot=0x6A,
},
{
adobename="k",
@@ -1177,8 +1184,8 @@ characters.data={
direction="l",
linebreak="al",
mathclass="variable",
- uccode=0x004B,
- unicodeslot=0x006B,
+ uccode=0x4B,
+ unicodeslot=0x6B,
},
{
adobename="l",
@@ -1188,8 +1195,8 @@ characters.data={
direction="l",
linebreak="al",
mathclass="variable",
- uccode=0x004C,
- unicodeslot=0x006C,
+ uccode=0x4C,
+ unicodeslot=0x6C,
},
{
adobename="m",
@@ -1199,8 +1206,8 @@ characters.data={
direction="l",
linebreak="al",
mathclass="variable",
- uccode=0x004D,
- unicodeslot=0x006D,
+ uccode=0x4D,
+ unicodeslot=0x6D,
},
{
adobename="n",
@@ -1210,8 +1217,8 @@ characters.data={
direction="l",
linebreak="al",
mathclass="variable",
- uccode=0x004E,
- unicodeslot=0x006E,
+ uccode=0x4E,
+ unicodeslot=0x6E,
},
{
adobename="o",
@@ -1221,8 +1228,8 @@ characters.data={
direction="l",
linebreak="al",
mathclass="variable",
- uccode=0x004F,
- unicodeslot=0x006F,
+ uccode=0x4F,
+ unicodeslot=0x6F,
},
{
adobename="p",
@@ -1232,8 +1239,8 @@ characters.data={
direction="l",
linebreak="al",
mathclass="variable",
- uccode=0x0050,
- unicodeslot=0x0070,
+ uccode=0x50,
+ unicodeslot=0x70,
},
{
adobename="q",
@@ -1243,8 +1250,8 @@ characters.data={
direction="l",
linebreak="al",
mathclass="variable",
- uccode=0x0051,
- unicodeslot=0x0071,
+ uccode=0x51,
+ unicodeslot=0x71,
},
{
adobename="r",
@@ -1254,8 +1261,8 @@ characters.data={
direction="l",
linebreak="al",
mathclass="variable",
- uccode=0x0052,
- unicodeslot=0x0072,
+ uccode=0x52,
+ unicodeslot=0x72,
},
{
adobename="s",
@@ -1265,8 +1272,8 @@ characters.data={
direction="l",
linebreak="al",
mathclass="variable",
- uccode=0x0053,
- unicodeslot=0x0073,
+ uccode=0x53,
+ unicodeslot=0x73,
},
{
adobename="t",
@@ -1276,8 +1283,8 @@ characters.data={
direction="l",
linebreak="al",
mathclass="variable",
- uccode=0x0054,
- unicodeslot=0x0074,
+ uccode=0x54,
+ unicodeslot=0x74,
},
{
adobename="u",
@@ -1287,8 +1294,8 @@ characters.data={
direction="l",
linebreak="al",
mathclass="variable",
- uccode=0x0055,
- unicodeslot=0x0075,
+ uccode=0x55,
+ unicodeslot=0x75,
},
{
adobename="v",
@@ -1298,8 +1305,8 @@ characters.data={
direction="l",
linebreak="al",
mathclass="variable",
- uccode=0x0056,
- unicodeslot=0x0076,
+ uccode=0x56,
+ unicodeslot=0x76,
},
{
adobename="w",
@@ -1309,8 +1316,8 @@ characters.data={
direction="l",
linebreak="al",
mathclass="variable",
- uccode=0x0057,
- unicodeslot=0x0077,
+ uccode=0x57,
+ unicodeslot=0x77,
},
{
adobename="x",
@@ -1320,8 +1327,8 @@ characters.data={
direction="l",
linebreak="al",
mathclass="variable",
- uccode=0x0058,
- unicodeslot=0x0078,
+ uccode=0x58,
+ unicodeslot=0x78,
},
{
adobename="y",
@@ -1331,8 +1338,8 @@ characters.data={
direction="l",
linebreak="al",
mathclass="variable",
- uccode=0x0059,
- unicodeslot=0x0079,
+ uccode=0x59,
+ unicodeslot=0x79,
},
{
adobename="z",
@@ -1342,8 +1349,8 @@ characters.data={
direction="l",
linebreak="al",
mathclass="variable",
- uccode=0x005A,
- unicodeslot=0x007A,
+ uccode=0x5A,
+ unicodeslot=0x7A,
},
{
adobename="braceleft",
@@ -1355,9 +1362,9 @@ characters.data={
linebreak="op",
mathclass="open",
mathname="lbrace",
- mirror=0x007D,
+ mirror=0x7D,
textclass="open",
- unicodeslot=0x007B,
+ unicodeslot=0x7B,
},
{
adobename="verticalbar",
@@ -1394,7 +1401,7 @@ characters.data={
name="singleverticalbar",
},
},
- unicodeslot=0x007C,
+ unicodeslot=0x7C,
},
{
adobename="braceright",
@@ -1406,9 +1413,9 @@ characters.data={
linebreak="cl",
mathclass="close",
mathname="rbrace",
- mirror=0x007B,
+ mirror=0x7B,
textclass="close",
- unicodeslot=0x007D,
+ unicodeslot=0x7D,
},
{
adobename="asciitilde",
@@ -1418,7 +1425,7 @@ characters.data={
description="TILDE",
direction="on",
linebreak="al",
- unicodeslot=0x007E,
+ unicodeslot=0x7E,
},
{
adobename="controlDEL",
@@ -1426,231 +1433,231 @@ characters.data={
description="DELETE",
direction="bn",
linebreak="cm",
- unicodeslot=0x007F,
+ unicodeslot=0x7F,
},
{
category="cc",
description="NONE",
direction="bn",
linebreak="cm",
- unicodeslot=0x0080,
+ unicodeslot=0x80,
},
{
category="cc",
description="NONE",
direction="bn",
linebreak="cm",
- unicodeslot=0x0081,
+ unicodeslot=0x81,
},
{
category="cc",
description="BREAK PERMITTED HERE",
direction="bn",
linebreak="cm",
- unicodeslot=0x0082,
+ unicodeslot=0x82,
},
{
category="cc",
description="NO BREAK HERE",
direction="bn",
linebreak="cm",
- unicodeslot=0x0083,
+ unicodeslot=0x83,
},
{
category="cc",
description="NONE",
direction="bn",
linebreak="cm",
- unicodeslot=0x0084,
+ unicodeslot=0x84,
},
{
category="cc",
description="NEXT LINE (NEL)",
direction="b",
linebreak="nl",
- unicodeslot=0x0085,
+ unicodeslot=0x85,
},
{
category="cc",
description="START OF SELECTED AREA",
direction="bn",
linebreak="cm",
- unicodeslot=0x0086,
+ unicodeslot=0x86,
},
{
category="cc",
description="END OF SELECTED AREA",
direction="bn",
linebreak="cm",
- unicodeslot=0x0087,
+ unicodeslot=0x87,
},
{
category="cc",
description="CHARACTER TABULATION SET",
direction="bn",
linebreak="cm",
- unicodeslot=0x0088,
+ unicodeslot=0x88,
},
{
category="cc",
description="CHARACTER TABULATION WITH JUSTIFICATION",
direction="bn",
linebreak="cm",
- unicodeslot=0x0089,
+ unicodeslot=0x89,
},
{
category="cc",
description="LINE TABULATION SET",
direction="bn",
linebreak="cm",
- unicodeslot=0x008A,
+ unicodeslot=0x8A,
},
{
category="cc",
description="PARTIAL LINE FORWARD",
direction="bn",
linebreak="cm",
- unicodeslot=0x008B,
+ unicodeslot=0x8B,
},
{
category="cc",
description="PARTIAL LINE BACKWARD",
direction="bn",
linebreak="cm",
- unicodeslot=0x008C,
+ unicodeslot=0x8C,
},
{
category="cc",
description="REVERSE LINE FEED",
direction="bn",
linebreak="cm",
- unicodeslot=0x008D,
+ unicodeslot=0x8D,
},
{
category="cc",
description="SINGLE SHIFT TWO",
direction="bn",
linebreak="cm",
- unicodeslot=0x008E,
+ unicodeslot=0x8E,
},
{
category="cc",
description="SINGLE SHIFT THREE",
direction="bn",
linebreak="cm",
- unicodeslot=0x008F,
+ unicodeslot=0x8F,
},
{
category="cc",
description="DEVICE CONTROL STRING",
direction="bn",
linebreak="cm",
- unicodeslot=0x0090,
+ unicodeslot=0x90,
},
{
category="cc",
description="PRIVATE USE ONE",
direction="bn",
linebreak="cm",
- unicodeslot=0x0091,
+ unicodeslot=0x91,
},
{
category="cc",
description="PRIVATE USE TWO",
direction="bn",
linebreak="cm",
- unicodeslot=0x0092,
+ unicodeslot=0x92,
},
{
category="cc",
description="SET TRANSMIT STATE",
direction="bn",
linebreak="cm",
- unicodeslot=0x0093,
+ unicodeslot=0x93,
},
{
category="cc",
description="CANCEL CHARACTER",
direction="bn",
linebreak="cm",
- unicodeslot=0x0094,
+ unicodeslot=0x94,
},
{
category="cc",
description="MESSAGE WAITING",
direction="bn",
linebreak="cm",
- unicodeslot=0x0095,
+ unicodeslot=0x95,
},
{
category="cc",
description="START OF GUARDED AREA",
direction="bn",
linebreak="cm",
- unicodeslot=0x0096,
+ unicodeslot=0x96,
},
{
category="cc",
description="END OF GUARDED AREA",
direction="bn",
linebreak="cm",
- unicodeslot=0x0097,
+ unicodeslot=0x97,
},
{
category="cc",
description="START OF STRING",
direction="bn",
linebreak="cm",
- unicodeslot=0x0098,
+ unicodeslot=0x98,
},
{
category="cc",
description="NONE",
direction="bn",
linebreak="cm",
- unicodeslot=0x0099,
+ unicodeslot=0x99,
},
{
category="cc",
description="SINGLE CHARACTER INTRODUCER",
direction="bn",
linebreak="cm",
- unicodeslot=0x009A,
+ unicodeslot=0x9A,
},
{
category="cc",
description="CONTROL SEQUENCE INTRODUCER",
direction="bn",
linebreak="cm",
- unicodeslot=0x009B,
+ unicodeslot=0x9B,
},
{
category="cc",
description="STRING TERMINATOR",
direction="bn",
linebreak="cm",
- unicodeslot=0x009C,
+ unicodeslot=0x9C,
},
{
category="cc",
description="OPERATING SYSTEM COMMAND",
direction="bn",
linebreak="cm",
- unicodeslot=0x009D,
+ unicodeslot=0x9D,
},
{
category="cc",
description="PRIVACY MESSAGE",
direction="bn",
linebreak="cm",
- unicodeslot=0x009E,
+ unicodeslot=0x9E,
},
{
category="cc",
description="APPLICATION PROGRAM COMMAND",
direction="bn",
linebreak="cm",
- unicodeslot=0x009F,
+ unicodeslot=0x9F,
},
{
adobename="nonbreakingspace",
@@ -1659,8 +1666,8 @@ characters.data={
description="NO-BREAK SPACE",
direction="cs",
linebreak="gl",
- specials={ "nobreak", 0x0020 },
- unicodeslot=0x00A0,
+ specials={ "nobreak", 0x20 },
+ unicodeslot=0xA0,
},
{
adobename="exclamdown",
@@ -1670,7 +1677,7 @@ characters.data={
description="INVERTED EXCLAMATION MARK",
direction="on",
linebreak="op",
- unicodeslot=0x00A1,
+ unicodeslot=0xA1,
},
{
adobename="cent",
@@ -1680,7 +1687,7 @@ characters.data={
description="CENT SIGN",
direction="et",
linebreak="po",
- unicodeslot=0x00A2,
+ unicodeslot=0xA2,
},
{
adobename="sterling",
@@ -1690,7 +1697,7 @@ characters.data={
description="POUND SIGN",
direction="et",
linebreak="pr",
- unicodeslot=0x00A3,
+ unicodeslot=0xA3,
},
{
adobename="currency",
@@ -1700,7 +1707,7 @@ characters.data={
description="CURRENCY SIGN",
direction="et",
linebreak="pr",
- unicodeslot=0x00A4,
+ unicodeslot=0xA4,
},
{
adobename="yen",
@@ -1712,7 +1719,7 @@ characters.data={
linebreak="pr",
mathclass="nothing",
mathname="yen",
- unicodeslot=0x00A5,
+ unicodeslot=0xA5,
},
{
adobename="brokenbar",
@@ -1722,7 +1729,7 @@ characters.data={
description="BROKEN BAR",
direction="on",
linebreak="al",
- unicodeslot=0x00A6,
+ unicodeslot=0xA6,
},
{
adobename="section",
@@ -1734,7 +1741,7 @@ characters.data={
linebreak="ai",
mathclass="box",
mathname="S",
- unicodeslot=0x00A7,
+ unicodeslot=0xA7,
},
{
adobename="dieresis",
@@ -1746,8 +1753,8 @@ characters.data={
linebreak="ai",
mathclass="topaccent",
mathname="ddot",
- specials={ "compat", 0x0020, 0x0308 },
- unicodeslot=0x00A8,
+ specials={ "compat", 0x20, 0x308 },
+ unicodeslot=0xA8,
},
{
adobename="copyright",
@@ -1756,7 +1763,7 @@ characters.data={
description="COPYRIGHT SIGN",
direction="on",
linebreak="al",
- unicodeslot=0x00A9,
+ unicodeslot=0xA9,
},
{
adobename="ordfeminine",
@@ -1766,8 +1773,8 @@ characters.data={
description="FEMININE ORDINAL INDICATOR",
direction="l",
linebreak="ai",
- specials={ "super", 0x0061 },
- unicodeslot=0x00AA,
+ specials={ "super", 0x61 },
+ unicodeslot=0xAA,
},
{
adobename="guillemotleft",
@@ -1776,9 +1783,9 @@ characters.data={
description="LEFT-POINTING DOUBLE ANGLE QUOTATION MARK",
direction="on",
linebreak="qu",
- mirror=0x00BB,
+ mirror=0xBB,
textclass="open",
- unicodeslot=0x00AB,
+ unicodeslot=0xAB,
},
{
adobename="logicalnot",
@@ -1798,7 +1805,7 @@ characters.data={
name="neg",
},
},
- unicodeslot=0x00AC,
+ unicodeslot=0xAC,
},
{
adobename="softhyphen",
@@ -1808,7 +1815,7 @@ characters.data={
description="SOFT HYPHEN",
direction="bn",
linebreak="ba",
- unicodeslot=0x00AD,
+ unicodeslot=0xAD,
},
{
adobename="registered",
@@ -1818,7 +1825,7 @@ characters.data={
description="REGISTERED SIGN",
direction="on",
linebreak="al",
- unicodeslot=0x00AE,
+ unicodeslot=0xAE,
},
{
adobename="macron",
@@ -1829,9 +1836,11 @@ characters.data={
direction="on",
linebreak="al",
mathclass="topaccent",
+ mathfiller="barfill",
+ mathmleq=0x203E,
mathname="bar",
- specials={ "compat", 0x0020, 0x0304 },
- unicodeslot=0x00AF,
+ specials={ "compat", 0x20, 0x304 },
+ unicodeslot=0xAF,
},
{
adobename="degree",
@@ -1841,7 +1850,7 @@ characters.data={
description="DEGREE SIGN",
direction="et",
linebreak="po",
- unicodeslot=0x00B0,
+ unicodeslot=0xB0,
},
{
adobename="plusminus",
@@ -1853,7 +1862,7 @@ characters.data={
linebreak="pr",
mathclass="binary",
mathname="pm",
- unicodeslot=0x00B1,
+ unicodeslot=0xB1,
},
{
adobename="twosuperior",
@@ -1863,8 +1872,8 @@ characters.data={
description="SUPERSCRIPT TWO",
direction="en",
linebreak="ai",
- specials={ "super", 0x0032 },
- unicodeslot=0x00B2,
+ specials={ "super", 0x32 },
+ unicodeslot=0xB2,
},
{
adobename="threesuperior",
@@ -1874,8 +1883,8 @@ characters.data={
description="SUPERSCRIPT THREE",
direction="en",
linebreak="ai",
- specials={ "super", 0x0033 },
- unicodeslot=0x00B3,
+ specials={ "super", 0x33 },
+ unicodeslot=0xB3,
},
{
adobename="acute",
@@ -1887,8 +1896,8 @@ characters.data={
linebreak="bb",
mathclass="topaccent",
mathname="acute",
- specials={ "compat", 0x0020, 0x0301 },
- unicodeslot=0x00B4,
+ specials={ "compat", 0x20, 0x301 },
+ unicodeslot=0xB4,
},
{
adobename="mu1",
@@ -1897,9 +1906,9 @@ characters.data={
description="MICRO SIGN",
direction="l",
linebreak="al",
- specials={ "compat", 0x03BC },
- uccode=0x039C,
- unicodeslot=0x00B5,
+ specials={ "compat", 0x3BC },
+ uccode=0x39C,
+ unicodeslot=0xB5,
},
{
adobename="paragraph",
@@ -1911,7 +1920,7 @@ characters.data={
linebreak="ai",
mathclass="box",
mathname="P",
- unicodeslot=0x00B6,
+ unicodeslot=0xB6,
},
{
adobename="periodcentered",
@@ -1923,7 +1932,7 @@ characters.data={
linebreak="ai",
mathclass="binary",
mathname="centerdot",
- unicodeslot=0x00B7,
+ unicodeslot=0xB7,
},
{
adobename="cedilla",
@@ -1933,8 +1942,8 @@ characters.data={
description="CEDILLA",
direction="on",
linebreak="ai",
- specials={ "compat", 0x0020, 0x0327 },
- unicodeslot=0x00B8,
+ specials={ "compat", 0x20, 0x327 },
+ unicodeslot=0xB8,
},
{
adobename="onesuperior",
@@ -1944,8 +1953,8 @@ characters.data={
description="SUPERSCRIPT ONE",
direction="en",
linebreak="ai",
- specials={ "super", 0x0031 },
- unicodeslot=0x00B9,
+ specials={ "super", 0x31 },
+ unicodeslot=0xB9,
},
{
adobename="ordmasculine",
@@ -1955,8 +1964,8 @@ characters.data={
description="MASCULINE ORDINAL INDICATOR",
direction="l",
linebreak="ai",
- specials={ "super", 0x006F },
- unicodeslot=0x00BA,
+ specials={ "super", 0x6F },
+ unicodeslot=0xBA,
},
{
adobename="guillemotright",
@@ -1965,9 +1974,9 @@ characters.data={
description="RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK",
direction="on",
linebreak="qu",
- mirror=0x00AB,
+ mirror=0xAB,
textclass="close",
- unicodeslot=0x00BB,
+ unicodeslot=0xBB,
},
{
adobename="onequarter",
@@ -1977,8 +1986,8 @@ characters.data={
description="VULGAR FRACTION ONE QUARTER",
direction="on",
linebreak="ai",
- specials={ "fraction", 0x0031, 0x2044, 0x0034 },
- unicodeslot=0x00BC,
+ specials={ "fraction", 0x31, 0x2044, 0x34 },
+ unicodeslot=0xBC,
},
{
adobename="onehalf",
@@ -1988,8 +1997,8 @@ characters.data={
description="VULGAR FRACTION ONE HALF",
direction="on",
linebreak="ai",
- specials={ "fraction", 0x0031, 0x2044, 0x0032 },
- unicodeslot=0x00BD,
+ specials={ "fraction", 0x31, 0x2044, 0x32 },
+ unicodeslot=0xBD,
},
{
adobename="threequarters",
@@ -1999,8 +2008,8 @@ characters.data={
description="VULGAR FRACTION THREE QUARTERS",
direction="on",
linebreak="ai",
- specials={ "fraction", 0x0033, 0x2044, 0x0034 },
- unicodeslot=0x00BE,
+ specials={ "fraction", 0x33, 0x2044, 0x34 },
+ unicodeslot=0xBE,
},
{
adobename="questiondown",
@@ -2010,7 +2019,7 @@ characters.data={
description="INVERTED QUESTION MARK",
direction="on",
linebreak="op",
- unicodeslot=0x00BF,
+ unicodeslot=0xBF,
},
{
adobename="Agrave",
@@ -2018,11 +2027,11 @@ characters.data={
contextname="Agrave",
description="LATIN CAPITAL LETTER A WITH GRAVE",
direction="l",
- lccode=0x00E0,
+ lccode=0xE0,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x0041, 0x0300 },
- unicodeslot=0x00C0,
+ shcode=0x41,
+ specials={ "char", 0x41, 0x300 },
+ unicodeslot=0xC0,
},
{
adobename="Aacute",
@@ -2030,11 +2039,11 @@ characters.data={
contextname="Aacute",
description="LATIN CAPITAL LETTER A WITH ACUTE",
direction="l",
- lccode=0x00E1,
+ lccode=0xE1,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x0041, 0x0301 },
- unicodeslot=0x00C1,
+ shcode=0x41,
+ specials={ "char", 0x41, 0x301 },
+ unicodeslot=0xC1,
},
{
adobename="Acircumflex",
@@ -2042,11 +2051,11 @@ characters.data={
contextname="Acircumflex",
description="LATIN CAPITAL LETTER A WITH CIRCUMFLEX",
direction="l",
- lccode=0x00E2,
+ lccode=0xE2,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x0041, 0x0302 },
- unicodeslot=0x00C2,
+ shcode=0x41,
+ specials={ "char", 0x41, 0x302 },
+ unicodeslot=0xC2,
},
{
adobename="Atilde",
@@ -2054,11 +2063,11 @@ characters.data={
contextname="Atilde",
description="LATIN CAPITAL LETTER A WITH TILDE",
direction="l",
- lccode=0x00E3,
+ lccode=0xE3,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x0041, 0x0303 },
- unicodeslot=0x00C3,
+ shcode=0x41,
+ specials={ "char", 0x41, 0x303 },
+ unicodeslot=0xC3,
},
{
adobename="Adieresis",
@@ -2066,11 +2075,11 @@ characters.data={
contextname="Adiaeresis",
description="LATIN CAPITAL LETTER A WITH DIAERESIS",
direction="l",
- lccode=0x00E4,
+ lccode=0xE4,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x0041, 0x0308 },
- unicodeslot=0x00C4,
+ shcode=0x41,
+ specials={ "char", 0x41, 0x308 },
+ unicodeslot=0xC4,
},
{
adobename="Aring",
@@ -2078,11 +2087,11 @@ characters.data={
contextname="Aring",
description="LATIN CAPITAL LETTER A WITH RING ABOVE",
direction="l",
- lccode=0x00E5,
+ lccode=0xE5,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x0041, 0x030A },
- unicodeslot=0x00C5,
+ shcode=0x41,
+ specials={ "char", 0x41, 0x30A },
+ unicodeslot=0xC5,
},
{
adobename="AE",
@@ -2091,10 +2100,10 @@ characters.data={
contextname="AEligature",
description="LATIN CAPITAL LETTER AE",
direction="l",
- lccode=0x00E6,
+ lccode=0xE6,
linebreak="al",
- shcode={ 0x0041, 0x0045 },
- unicodeslot=0x00C6,
+ shcode={ 0x41, 0x45 },
+ unicodeslot=0xC6,
},
{
adobename="Ccedilla",
@@ -2102,11 +2111,11 @@ characters.data={
contextname="Ccedilla",
description="LATIN CAPITAL LETTER C WITH CEDILLA",
direction="l",
- lccode=0x00E7,
+ lccode=0xE7,
linebreak="al",
- shcode=0x0043,
- specials={ "char", 0x0043, 0x0327 },
- unicodeslot=0x00C7,
+ shcode=0x43,
+ specials={ "char", 0x43, 0x327 },
+ unicodeslot=0xC7,
},
{
adobename="Egrave",
@@ -2114,11 +2123,11 @@ characters.data={
contextname="Egrave",
description="LATIN CAPITAL LETTER E WITH GRAVE",
direction="l",
- lccode=0x00E8,
+ lccode=0xE8,
linebreak="al",
- shcode=0x0045,
- specials={ "char", 0x0045, 0x0300 },
- unicodeslot=0x00C8,
+ shcode=0x45,
+ specials={ "char", 0x45, 0x300 },
+ unicodeslot=0xC8,
},
{
adobename="Eacute",
@@ -2126,11 +2135,11 @@ characters.data={
contextname="Eacute",
description="LATIN CAPITAL LETTER E WITH ACUTE",
direction="l",
- lccode=0x00E9,
+ lccode=0xE9,
linebreak="al",
- shcode=0x0045,
- specials={ "char", 0x0045, 0x0301 },
- unicodeslot=0x00C9,
+ shcode=0x45,
+ specials={ "char", 0x45, 0x301 },
+ unicodeslot=0xC9,
},
{
adobename="Ecircumflex",
@@ -2138,11 +2147,11 @@ characters.data={
contextname="Ecircumflex",
description="LATIN CAPITAL LETTER E WITH CIRCUMFLEX",
direction="l",
- lccode=0x00EA,
+ lccode=0xEA,
linebreak="al",
- shcode=0x0045,
- specials={ "char", 0x0045, 0x0302 },
- unicodeslot=0x00CA,
+ shcode=0x45,
+ specials={ "char", 0x45, 0x302 },
+ unicodeslot=0xCA,
},
{
adobename="Edieresis",
@@ -2150,11 +2159,11 @@ characters.data={
contextname="Ediaeresis",
description="LATIN CAPITAL LETTER E WITH DIAERESIS",
direction="l",
- lccode=0x00EB,
+ lccode=0xEB,
linebreak="al",
- shcode=0x0045,
- specials={ "char", 0x0045, 0x0308 },
- unicodeslot=0x00CB,
+ shcode=0x45,
+ specials={ "char", 0x45, 0x308 },
+ unicodeslot=0xCB,
},
{
adobename="Igrave",
@@ -2162,11 +2171,11 @@ characters.data={
contextname="Igrave",
description="LATIN CAPITAL LETTER I WITH GRAVE",
direction="l",
- lccode=0x00EC,
+ lccode=0xEC,
linebreak="al",
- shcode=0x0049,
- specials={ "char", 0x0049, 0x0300 },
- unicodeslot=0x00CC,
+ shcode=0x49,
+ specials={ "char", 0x49, 0x300 },
+ unicodeslot=0xCC,
},
{
adobename="Iacute",
@@ -2174,11 +2183,11 @@ characters.data={
contextname="Iacute",
description="LATIN CAPITAL LETTER I WITH ACUTE",
direction="l",
- lccode=0x00ED,
+ lccode=0xED,
linebreak="al",
- shcode=0x0049,
- specials={ "char", 0x0049, 0x0301 },
- unicodeslot=0x00CD,
+ shcode=0x49,
+ specials={ "char", 0x49, 0x301 },
+ unicodeslot=0xCD,
},
{
adobename="Icircumflex",
@@ -2186,11 +2195,11 @@ characters.data={
contextname="Icircumflex",
description="LATIN CAPITAL LETTER I WITH CIRCUMFLEX",
direction="l",
- lccode=0x00EE,
+ lccode=0xEE,
linebreak="al",
- shcode=0x0049,
- specials={ "char", 0x0049, 0x0302 },
- unicodeslot=0x00CE,
+ shcode=0x49,
+ specials={ "char", 0x49, 0x302 },
+ unicodeslot=0xCE,
},
{
adobename="Idieresis",
@@ -2198,11 +2207,11 @@ characters.data={
contextname="Idiaeresis",
description="LATIN CAPITAL LETTER I WITH DIAERESIS",
direction="l",
- lccode=0x00EF,
+ lccode=0xEF,
linebreak="al",
- shcode=0x0049,
- specials={ "char", 0x0049, 0x0308 },
- unicodeslot=0x00CF,
+ shcode=0x49,
+ specials={ "char", 0x49, 0x308 },
+ unicodeslot=0xCF,
},
{
adobename="Eth",
@@ -2211,9 +2220,9 @@ characters.data={
contextname="Eth",
description="LATIN CAPITAL LETTER ETH",
direction="l",
- lccode=0x00F0,
+ lccode=0xF0,
linebreak="al",
- unicodeslot=0x00D0,
+ unicodeslot=0xD0,
},
{
adobename="Ntilde",
@@ -2221,11 +2230,11 @@ characters.data={
contextname="Ntilde",
description="LATIN CAPITAL LETTER N WITH TILDE",
direction="l",
- lccode=0x00F1,
+ lccode=0xF1,
linebreak="al",
- shcode=0x004E,
- specials={ "char", 0x004E, 0x0303 },
- unicodeslot=0x00D1,
+ shcode=0x4E,
+ specials={ "char", 0x4E, 0x303 },
+ unicodeslot=0xD1,
},
{
adobename="Ograve",
@@ -2233,11 +2242,11 @@ characters.data={
contextname="Ograve",
description="LATIN CAPITAL LETTER O WITH GRAVE",
direction="l",
- lccode=0x00F2,
+ lccode=0xF2,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x004F, 0x0300 },
- unicodeslot=0x00D2,
+ shcode=0x4F,
+ specials={ "char", 0x4F, 0x300 },
+ unicodeslot=0xD2,
},
{
adobename="Oacute",
@@ -2245,11 +2254,11 @@ characters.data={
contextname="Oacute",
description="LATIN CAPITAL LETTER O WITH ACUTE",
direction="l",
- lccode=0x00F3,
+ lccode=0xF3,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x004F, 0x0301 },
- unicodeslot=0x00D3,
+ shcode=0x4F,
+ specials={ "char", 0x4F, 0x301 },
+ unicodeslot=0xD3,
},
{
adobename="Ocircumflex",
@@ -2257,11 +2266,11 @@ characters.data={
contextname="Ocircumflex",
description="LATIN CAPITAL LETTER O WITH CIRCUMFLEX",
direction="l",
- lccode=0x00F4,
+ lccode=0xF4,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x004F, 0x0302 },
- unicodeslot=0x00D4,
+ shcode=0x4F,
+ specials={ "char", 0x4F, 0x302 },
+ unicodeslot=0xD4,
},
{
adobename="Otilde",
@@ -2269,11 +2278,11 @@ characters.data={
contextname="Otilde",
description="LATIN CAPITAL LETTER O WITH TILDE",
direction="l",
- lccode=0x00F5,
+ lccode=0xF5,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x004F, 0x0303 },
- unicodeslot=0x00D5,
+ shcode=0x4F,
+ specials={ "char", 0x4F, 0x303 },
+ unicodeslot=0xD5,
},
{
adobename="Odieresis",
@@ -2281,11 +2290,11 @@ characters.data={
contextname="Odiaeresis",
description="LATIN CAPITAL LETTER O WITH DIAERESIS",
direction="l",
- lccode=0x00F6,
+ lccode=0xF6,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x004F, 0x0308 },
- unicodeslot=0x00D6,
+ shcode=0x4F,
+ specials={ "char", 0x4F, 0x308 },
+ unicodeslot=0xD6,
},
{
adobename="multiply",
@@ -2297,7 +2306,7 @@ characters.data={
linebreak="ai",
mathclass="binary",
mathname="times",
- unicodeslot=0x00D7,
+ unicodeslot=0xD7,
},
{
adobename="Oslash",
@@ -2306,10 +2315,10 @@ characters.data={
contextname="Ostroke",
description="LATIN CAPITAL LETTER O WITH STROKE",
direction="l",
- lccode=0x00F8,
+ lccode=0xF8,
linebreak="al",
- shcode=0x004F,
- unicodeslot=0x00D8,
+ shcode=0x4F,
+ unicodeslot=0xD8,
},
{
adobename="Ugrave",
@@ -2317,11 +2326,11 @@ characters.data={
contextname="Ugrave",
description="LATIN CAPITAL LETTER U WITH GRAVE",
direction="l",
- lccode=0x00F9,
+ lccode=0xF9,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x0055, 0x0300 },
- unicodeslot=0x00D9,
+ shcode=0x55,
+ specials={ "char", 0x55, 0x300 },
+ unicodeslot=0xD9,
},
{
adobename="Uacute",
@@ -2329,11 +2338,11 @@ characters.data={
contextname="Uacute",
description="LATIN CAPITAL LETTER U WITH ACUTE",
direction="l",
- lccode=0x00FA,
+ lccode=0xFA,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x0055, 0x0301 },
- unicodeslot=0x00DA,
+ shcode=0x55,
+ specials={ "char", 0x55, 0x301 },
+ unicodeslot=0xDA,
},
{
adobename="Ucircumflex",
@@ -2341,11 +2350,11 @@ characters.data={
contextname="Ucircumflex",
description="LATIN CAPITAL LETTER U WITH CIRCUMFLEX",
direction="l",
- lccode=0x00FB,
+ lccode=0xFB,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x0055, 0x0302 },
- unicodeslot=0x00DB,
+ shcode=0x55,
+ specials={ "char", 0x55, 0x302 },
+ unicodeslot=0xDB,
},
{
adobename="Udieresis",
@@ -2353,11 +2362,11 @@ characters.data={
contextname="Udiaeresis",
description="LATIN CAPITAL LETTER U WITH DIAERESIS",
direction="l",
- lccode=0x00FC,
+ lccode=0xFC,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x0055, 0x0308 },
- unicodeslot=0x00DC,
+ shcode=0x55,
+ specials={ "char", 0x55, 0x308 },
+ unicodeslot=0xDC,
},
{
adobename="Yacute",
@@ -2365,11 +2374,11 @@ characters.data={
contextname="Yacute",
description="LATIN CAPITAL LETTER Y WITH ACUTE",
direction="l",
- lccode=0x00FD,
+ lccode=0xFD,
linebreak="al",
- shcode=0x0059,
- specials={ "char", 0x0059, 0x0301 },
- unicodeslot=0x00DD,
+ shcode=0x59,
+ specials={ "char", 0x59, 0x301 },
+ unicodeslot=0xDD,
},
{
adobename="Thorn",
@@ -2378,9 +2387,9 @@ characters.data={
contextname="Thorn",
description="LATIN CAPITAL LETTER THORN",
direction="l",
- lccode=0x00FE,
+ lccode=0xFE,
linebreak="al",
- unicodeslot=0x00DE,
+ unicodeslot=0xDE,
},
{
adobename="germandbls",
@@ -2390,8 +2399,9 @@ characters.data={
description="LATIN SMALL LETTER SHARP S",
direction="l",
linebreak="al",
- uccode={ 0x0053, 0x0053 },
- unicodeslot=0x00DF,
+ shcode={ 0x73, 0x73 },
+ uccode={ 0x53, 0x53 },
+ unicodeslot=0xDF,
},
{
adobename="agrave",
@@ -2401,10 +2411,10 @@ characters.data={
description="LATIN SMALL LETTER A WITH GRAVE",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x0061, 0x0300 },
- uccode=0x00C0,
- unicodeslot=0x00E0,
+ shcode=0x61,
+ specials={ "char", 0x61, 0x300 },
+ uccode=0xC0,
+ unicodeslot=0xE0,
},
{
adobename="aacute",
@@ -2414,10 +2424,10 @@ characters.data={
description="LATIN SMALL LETTER A WITH ACUTE",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x0061, 0x0301 },
- uccode=0x00C1,
- unicodeslot=0x00E1,
+ shcode=0x61,
+ specials={ "char", 0x61, 0x301 },
+ uccode=0xC1,
+ unicodeslot=0xE1,
},
{
adobename="acircumflex",
@@ -2426,10 +2436,10 @@ characters.data={
description="LATIN SMALL LETTER A WITH CIRCUMFLEX",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x0061, 0x0302 },
- uccode=0x00C2,
- unicodeslot=0x00E2,
+ shcode=0x61,
+ specials={ "char", 0x61, 0x302 },
+ uccode=0xC2,
+ unicodeslot=0xE2,
},
{
adobename="atilde",
@@ -2438,10 +2448,10 @@ characters.data={
description="LATIN SMALL LETTER A WITH TILDE",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x0061, 0x0303 },
- uccode=0x00C3,
- unicodeslot=0x00E3,
+ shcode=0x61,
+ specials={ "char", 0x61, 0x303 },
+ uccode=0xC3,
+ unicodeslot=0xE3,
},
{
adobename="adieresis",
@@ -2450,10 +2460,10 @@ characters.data={
description="LATIN SMALL LETTER A WITH DIAERESIS",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x0061, 0x0308 },
- uccode=0x00C4,
- unicodeslot=0x00E4,
+ shcode=0x61,
+ specials={ "char", 0x61, 0x308 },
+ uccode=0xC4,
+ unicodeslot=0xE4,
},
{
adobename="aring",
@@ -2462,10 +2472,10 @@ characters.data={
description="LATIN SMALL LETTER A WITH RING ABOVE",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x0061, 0x030A },
- uccode=0x00C5,
- unicodeslot=0x00E5,
+ shcode=0x61,
+ specials={ "char", 0x61, 0x30A },
+ uccode=0xC5,
+ unicodeslot=0xE5,
},
{
adobename="ae",
@@ -2475,9 +2485,9 @@ characters.data={
description="LATIN SMALL LETTER AE",
direction="l",
linebreak="al",
- shcode={ 0x0061, 0x0065 },
- uccode=0x00C6,
- unicodeslot=0x00E6,
+ shcode={ 0x61, 0x65 },
+ uccode=0xC6,
+ unicodeslot=0xE6,
},
{
adobename="ccedilla",
@@ -2486,10 +2496,10 @@ characters.data={
description="LATIN SMALL LETTER C WITH CEDILLA",
direction="l",
linebreak="al",
- shcode=0x0063,
- specials={ "char", 0x0063, 0x0327 },
- uccode=0x00C7,
- unicodeslot=0x00E7,
+ shcode=0x63,
+ specials={ "char", 0x63, 0x327 },
+ uccode=0xC7,
+ unicodeslot=0xE7,
},
{
adobename="egrave",
@@ -2499,10 +2509,10 @@ characters.data={
description="LATIN SMALL LETTER E WITH GRAVE",
direction="l",
linebreak="al",
- shcode=0x0065,
- specials={ "char", 0x0065, 0x0300 },
- uccode=0x00C8,
- unicodeslot=0x00E8,
+ shcode=0x65,
+ specials={ "char", 0x65, 0x300 },
+ uccode=0xC8,
+ unicodeslot=0xE8,
},
{
adobename="eacute",
@@ -2512,10 +2522,10 @@ characters.data={
description="LATIN SMALL LETTER E WITH ACUTE",
direction="l",
linebreak="al",
- shcode=0x0065,
- specials={ "char", 0x0065, 0x0301 },
- uccode=0x00C9,
- unicodeslot=0x00E9,
+ shcode=0x65,
+ specials={ "char", 0x65, 0x301 },
+ uccode=0xC9,
+ unicodeslot=0xE9,
},
{
adobename="ecircumflex",
@@ -2525,10 +2535,10 @@ characters.data={
description="LATIN SMALL LETTER E WITH CIRCUMFLEX",
direction="l",
linebreak="al",
- shcode=0x0065,
- specials={ "char", 0x0065, 0x0302 },
- uccode=0x00CA,
- unicodeslot=0x00EA,
+ shcode=0x65,
+ specials={ "char", 0x65, 0x302 },
+ uccode=0xCA,
+ unicodeslot=0xEA,
},
{
adobename="edieresis",
@@ -2537,10 +2547,10 @@ characters.data={
description="LATIN SMALL LETTER E WITH DIAERESIS",
direction="l",
linebreak="al",
- shcode=0x0065,
- specials={ "char", 0x0065, 0x0308 },
- uccode=0x00CB,
- unicodeslot=0x00EB,
+ shcode=0x65,
+ specials={ "char", 0x65, 0x308 },
+ uccode=0xCB,
+ unicodeslot=0xEB,
},
{
adobename="igrave",
@@ -2550,10 +2560,10 @@ characters.data={
description="LATIN SMALL LETTER I WITH GRAVE",
direction="l",
linebreak="al",
- shcode=0x0069,
- specials={ "char", 0x0069, 0x0300 },
- uccode=0x00CC,
- unicodeslot=0x00EC,
+ shcode=0x69,
+ specials={ "char", 0x69, 0x300 },
+ uccode=0xCC,
+ unicodeslot=0xEC,
},
{
adobename="iacute",
@@ -2563,10 +2573,10 @@ characters.data={
description="LATIN SMALL LETTER I WITH ACUTE",
direction="l",
linebreak="al",
- shcode=0x0069,
- specials={ "char", 0x0069, 0x0301 },
- uccode=0x00CD,
- unicodeslot=0x00ED,
+ shcode=0x69,
+ specials={ "char", 0x69, 0x301 },
+ uccode=0xCD,
+ unicodeslot=0xED,
},
{
adobename="icircumflex",
@@ -2575,10 +2585,10 @@ characters.data={
description="LATIN SMALL LETTER I WITH CIRCUMFLEX",
direction="l",
linebreak="al",
- shcode=0x0069,
- specials={ "char", 0x0069, 0x0302 },
- uccode=0x00CE,
- unicodeslot=0x00EE,
+ shcode=0x69,
+ specials={ "char", 0x69, 0x302 },
+ uccode=0xCE,
+ unicodeslot=0xEE,
},
{
adobename="idieresis",
@@ -2587,10 +2597,10 @@ characters.data={
description="LATIN SMALL LETTER I WITH DIAERESIS",
direction="l",
linebreak="al",
- shcode=0x0069,
- specials={ "char", 0x0069, 0x0308 },
- uccode=0x00CF,
- unicodeslot=0x00EF,
+ shcode=0x69,
+ specials={ "char", 0x69, 0x308 },
+ uccode=0xCF,
+ unicodeslot=0xEF,
},
{
adobename="eth",
@@ -2602,8 +2612,8 @@ characters.data={
linebreak="al",
mathclass="ordinary",
mathname="eth",
- uccode=0x00D0,
- unicodeslot=0x00F0,
+ uccode=0xD0,
+ unicodeslot=0xF0,
},
{
adobename="ntilde",
@@ -2612,10 +2622,10 @@ characters.data={
description="LATIN SMALL LETTER N WITH TILDE",
direction="l",
linebreak="al",
- shcode=0x006E,
- specials={ "char", 0x006E, 0x0303 },
- uccode=0x00D1,
- unicodeslot=0x00F1,
+ shcode=0x6E,
+ specials={ "char", 0x6E, 0x303 },
+ uccode=0xD1,
+ unicodeslot=0xF1,
},
{
adobename="ograve",
@@ -2625,10 +2635,10 @@ characters.data={
description="LATIN SMALL LETTER O WITH GRAVE",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x006F, 0x0300 },
- uccode=0x00D2,
- unicodeslot=0x00F2,
+ shcode=0x6F,
+ specials={ "char", 0x6F, 0x300 },
+ uccode=0xD2,
+ unicodeslot=0xF2,
},
{
adobename="oacute",
@@ -2638,10 +2648,10 @@ characters.data={
description="LATIN SMALL LETTER O WITH ACUTE",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x006F, 0x0301 },
- uccode=0x00D3,
- unicodeslot=0x00F3,
+ shcode=0x6F,
+ specials={ "char", 0x6F, 0x301 },
+ uccode=0xD3,
+ unicodeslot=0xF3,
},
{
adobename="ocircumflex",
@@ -2650,10 +2660,10 @@ characters.data={
description="LATIN SMALL LETTER O WITH CIRCUMFLEX",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x006F, 0x0302 },
- uccode=0x00D4,
- unicodeslot=0x00F4,
+ shcode=0x6F,
+ specials={ "char", 0x6F, 0x302 },
+ uccode=0xD4,
+ unicodeslot=0xF4,
},
{
adobename="otilde",
@@ -2662,10 +2672,10 @@ characters.data={
description="LATIN SMALL LETTER O WITH TILDE",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x006F, 0x0303 },
- uccode=0x00D5,
- unicodeslot=0x00F5,
+ shcode=0x6F,
+ specials={ "char", 0x6F, 0x303 },
+ uccode=0xD5,
+ unicodeslot=0xF5,
},
{
adobename="odieresis",
@@ -2674,10 +2684,10 @@ characters.data={
description="LATIN SMALL LETTER O WITH DIAERESIS",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x006F, 0x0308 },
- uccode=0x00D6,
- unicodeslot=0x00F6,
+ shcode=0x6F,
+ specials={ "char", 0x6F, 0x308 },
+ uccode=0xD6,
+ unicodeslot=0xF6,
},
{
adobename="divide",
@@ -2689,7 +2699,7 @@ characters.data={
linebreak="ai",
mathclass="binary",
mathname="div",
- unicodeslot=0x00F7,
+ unicodeslot=0xF7,
},
{
adobename="oslash",
@@ -2699,9 +2709,9 @@ characters.data={
description="LATIN SMALL LETTER O WITH STROKE",
direction="l",
linebreak="al",
- shcode=0x006F,
- uccode=0x00D8,
- unicodeslot=0x00F8,
+ shcode=0x6F,
+ uccode=0xD8,
+ unicodeslot=0xF8,
},
{
adobename="ugrave",
@@ -2711,10 +2721,10 @@ characters.data={
description="LATIN SMALL LETTER U WITH GRAVE",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x0075, 0x0300 },
- uccode=0x00D9,
- unicodeslot=0x00F9,
+ shcode=0x75,
+ specials={ "char", 0x75, 0x300 },
+ uccode=0xD9,
+ unicodeslot=0xF9,
},
{
adobename="uacute",
@@ -2724,10 +2734,10 @@ characters.data={
description="LATIN SMALL LETTER U WITH ACUTE",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x0075, 0x0301 },
- uccode=0x00DA,
- unicodeslot=0x00FA,
+ shcode=0x75,
+ specials={ "char", 0x75, 0x301 },
+ uccode=0xDA,
+ unicodeslot=0xFA,
},
{
adobename="ucircumflex",
@@ -2736,10 +2746,10 @@ characters.data={
description="LATIN SMALL LETTER U WITH CIRCUMFLEX",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x0075, 0x0302 },
- uccode=0x00DB,
- unicodeslot=0x00FB,
+ shcode=0x75,
+ specials={ "char", 0x75, 0x302 },
+ uccode=0xDB,
+ unicodeslot=0xFB,
},
{
adobename="udieresis",
@@ -2749,10 +2759,10 @@ characters.data={
description="LATIN SMALL LETTER U WITH DIAERESIS",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x0075, 0x0308 },
- uccode=0x00DC,
- unicodeslot=0x00FC,
+ shcode=0x75,
+ specials={ "char", 0x75, 0x308 },
+ uccode=0xDC,
+ unicodeslot=0xFC,
},
{
adobename="yacute",
@@ -2761,10 +2771,10 @@ characters.data={
description="LATIN SMALL LETTER Y WITH ACUTE",
direction="l",
linebreak="al",
- shcode=0x0079,
- specials={ "char", 0x0079, 0x0301 },
- uccode=0x00DD,
- unicodeslot=0x00FD,
+ shcode=0x79,
+ specials={ "char", 0x79, 0x301 },
+ uccode=0xDD,
+ unicodeslot=0xFD,
},
{
adobename="thorn",
@@ -2774,8 +2784,8 @@ characters.data={
description="LATIN SMALL LETTER THORN",
direction="l",
linebreak="al",
- uccode=0x00DE,
- unicodeslot=0x00FE,
+ uccode=0xDE,
+ unicodeslot=0xFE,
},
{
adobename="ydieresis",
@@ -2784,10 +2794,10 @@ characters.data={
description="LATIN SMALL LETTER Y WITH DIAERESIS",
direction="l",
linebreak="al",
- shcode=0x0079,
- specials={ "char", 0x0079, 0x0308 },
- uccode=0x0178,
- unicodeslot=0x00FF,
+ shcode=0x79,
+ specials={ "char", 0x79, 0x308 },
+ uccode=0x178,
+ unicodeslot=0xFF,
},
{
adobename="Amacron",
@@ -2795,11 +2805,11 @@ characters.data={
contextname="Amacron",
description="LATIN CAPITAL LETTER A WITH MACRON",
direction="l",
- lccode=0x0101,
+ lccode=0x101,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x0041, 0x0304 },
- unicodeslot=0x0100,
+ shcode=0x41,
+ specials={ "char", 0x41, 0x304 },
+ unicodeslot=0x100,
},
{
adobename="amacron",
@@ -2809,10 +2819,10 @@ characters.data={
description="LATIN SMALL LETTER A WITH MACRON",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x0061, 0x0304 },
- uccode=0x0100,
- unicodeslot=0x0101,
+ shcode=0x61,
+ specials={ "char", 0x61, 0x304 },
+ uccode=0x100,
+ unicodeslot=0x101,
},
{
adobename="Abreve",
@@ -2820,11 +2830,11 @@ characters.data={
contextname="Abreve",
description="LATIN CAPITAL LETTER A WITH BREVE",
direction="l",
- lccode=0x0103,
+ lccode=0x103,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x0041, 0x0306 },
- unicodeslot=0x0102,
+ shcode=0x41,
+ specials={ "char", 0x41, 0x306 },
+ unicodeslot=0x102,
},
{
adobename="abreve",
@@ -2833,10 +2843,10 @@ characters.data={
description="LATIN SMALL LETTER A WITH BREVE",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x0061, 0x0306 },
- uccode=0x0102,
- unicodeslot=0x0103,
+ shcode=0x61,
+ specials={ "char", 0x61, 0x306 },
+ uccode=0x102,
+ unicodeslot=0x103,
},
{
adobename="Aogonek",
@@ -2844,11 +2854,11 @@ characters.data={
contextname="Aogonek",
description="LATIN CAPITAL LETTER A WITH OGONEK",
direction="l",
- lccode=0x0105,
+ lccode=0x105,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x0041, 0x0328 },
- unicodeslot=0x0104,
+ shcode=0x41,
+ specials={ "char", 0x41, 0x328 },
+ unicodeslot=0x104,
},
{
adobename="aogonek",
@@ -2857,10 +2867,10 @@ characters.data={
description="LATIN SMALL LETTER A WITH OGONEK",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x0061, 0x0328 },
- uccode=0x0104,
- unicodeslot=0x0105,
+ shcode=0x61,
+ specials={ "char", 0x61, 0x328 },
+ uccode=0x104,
+ unicodeslot=0x105,
},
{
adobename="Cacute",
@@ -2868,11 +2878,11 @@ characters.data={
contextname="Cacute",
description="LATIN CAPITAL LETTER C WITH ACUTE",
direction="l",
- lccode=0x0107,
+ lccode=0x107,
linebreak="al",
- shcode=0x0043,
- specials={ "char", 0x0043, 0x0301 },
- unicodeslot=0x0106,
+ shcode=0x43,
+ specials={ "char", 0x43, 0x301 },
+ unicodeslot=0x106,
},
{
adobename="cacute",
@@ -2881,10 +2891,10 @@ characters.data={
description="LATIN SMALL LETTER C WITH ACUTE",
direction="l",
linebreak="al",
- shcode=0x0063,
- specials={ "char", 0x0063, 0x0301 },
- uccode=0x0106,
- unicodeslot=0x0107,
+ shcode=0x63,
+ specials={ "char", 0x63, 0x301 },
+ uccode=0x106,
+ unicodeslot=0x107,
},
{
adobename="Ccircumflex",
@@ -2892,11 +2902,11 @@ characters.data={
contextname="Ccircumflex",
description="LATIN CAPITAL LETTER C WITH CIRCUMFLEX",
direction="l",
- lccode=0x0109,
+ lccode=0x109,
linebreak="al",
- shcode=0x0043,
- specials={ "char", 0x0043, 0x0302 },
- unicodeslot=0x0108,
+ shcode=0x43,
+ specials={ "char", 0x43, 0x302 },
+ unicodeslot=0x108,
},
{
adobename="ccircumflex",
@@ -2905,10 +2915,10 @@ characters.data={
description="LATIN SMALL LETTER C WITH CIRCUMFLEX",
direction="l",
linebreak="al",
- shcode=0x0063,
- specials={ "char", 0x0063, 0x0302 },
- uccode=0x0108,
- unicodeslot=0x0109,
+ shcode=0x63,
+ specials={ "char", 0x63, 0x302 },
+ uccode=0x108,
+ unicodeslot=0x109,
},
{
adobename="Cdotaccent",
@@ -2916,11 +2926,11 @@ characters.data={
contextname="Cdotaccent",
description="LATIN CAPITAL LETTER C WITH DOT ABOVE",
direction="l",
- lccode=0x010B,
+ lccode=0x10B,
linebreak="al",
- shcode=0x0043,
- specials={ "char", 0x0043, 0x0307 },
- unicodeslot=0x010A,
+ shcode=0x43,
+ specials={ "char", 0x43, 0x307 },
+ unicodeslot=0x10A,
},
{
adobename="cdotaccent",
@@ -2929,10 +2939,10 @@ characters.data={
description="LATIN SMALL LETTER C WITH DOT ABOVE",
direction="l",
linebreak="al",
- shcode=0x0063,
- specials={ "char", 0x0063, 0x0307 },
- uccode=0x010A,
- unicodeslot=0x010B,
+ shcode=0x63,
+ specials={ "char", 0x63, 0x307 },
+ uccode=0x10A,
+ unicodeslot=0x10B,
},
{
adobename="Ccaron",
@@ -2940,11 +2950,11 @@ characters.data={
contextname="Ccaron",
description="LATIN CAPITAL LETTER C WITH CARON",
direction="l",
- lccode=0x010D,
+ lccode=0x10D,
linebreak="al",
- shcode=0x0043,
- specials={ "char", 0x0043, 0x030C },
- unicodeslot=0x010C,
+ shcode=0x43,
+ specials={ "char", 0x43, 0x30C },
+ unicodeslot=0x10C,
},
{
adobename="ccaron",
@@ -2953,10 +2963,10 @@ characters.data={
description="LATIN SMALL LETTER C WITH CARON",
direction="l",
linebreak="al",
- shcode=0x0063,
- specials={ "char", 0x0063, 0x030C },
- uccode=0x010C,
- unicodeslot=0x010D,
+ shcode=0x63,
+ specials={ "char", 0x63, 0x30C },
+ uccode=0x10C,
+ unicodeslot=0x10D,
},
{
adobename="Dcaron",
@@ -2964,11 +2974,11 @@ characters.data={
contextname="Dcaron",
description="LATIN CAPITAL LETTER D WITH CARON",
direction="l",
- lccode=0x010F,
+ lccode=0x10F,
linebreak="al",
- shcode=0x0044,
- specials={ "char", 0x0044, 0x030C },
- unicodeslot=0x010E,
+ shcode=0x44,
+ specials={ "char", 0x44, 0x30C },
+ unicodeslot=0x10E,
},
{
adobename="dcaron",
@@ -2977,10 +2987,10 @@ characters.data={
description="LATIN SMALL LETTER D WITH CARON",
direction="l",
linebreak="al",
- shcode=0x0064,
- specials={ "char", 0x0064, 0x030C },
- uccode=0x010E,
- unicodeslot=0x010F,
+ shcode=0x64,
+ specials={ "char", 0x64, 0x30C },
+ uccode=0x10E,
+ unicodeslot=0x10F,
},
{
adobename="Dslash",
@@ -2988,10 +2998,10 @@ characters.data={
contextname="Dstroke",
description="LATIN CAPITAL LETTER D WITH STROKE",
direction="l",
- lccode=0x0111,
+ lccode=0x111,
linebreak="al",
- shcode=0x0044,
- unicodeslot=0x0110,
+ shcode=0x44,
+ unicodeslot=0x110,
},
{
adobename="dmacron",
@@ -3001,9 +3011,9 @@ characters.data={
description="LATIN SMALL LETTER D WITH STROKE",
direction="l",
linebreak="al",
- shcode=0x0064,
- uccode=0x0110,
- unicodeslot=0x0111,
+ shcode=0x64,
+ uccode=0x110,
+ unicodeslot=0x111,
},
{
adobename="Emacron",
@@ -3011,11 +3021,11 @@ characters.data={
contextname="Emacron",
description="LATIN CAPITAL LETTER E WITH MACRON",
direction="l",
- lccode=0x0113,
+ lccode=0x113,
linebreak="al",
- shcode=0x0045,
- specials={ "char", 0x0045, 0x0304 },
- unicodeslot=0x0112,
+ shcode=0x45,
+ specials={ "char", 0x45, 0x304 },
+ unicodeslot=0x112,
},
{
adobename="emacron",
@@ -3025,10 +3035,10 @@ characters.data={
description="LATIN SMALL LETTER E WITH MACRON",
direction="l",
linebreak="al",
- shcode=0x0065,
- specials={ "char", 0x0065, 0x0304 },
- uccode=0x0112,
- unicodeslot=0x0113,
+ shcode=0x65,
+ specials={ "char", 0x65, 0x304 },
+ uccode=0x112,
+ unicodeslot=0x113,
},
{
adobename="Ebreve",
@@ -3036,11 +3046,11 @@ characters.data={
contextname="Ebreve",
description="LATIN CAPITAL LETTER E WITH BREVE",
direction="l",
- lccode=0x0115,
+ lccode=0x115,
linebreak="al",
- shcode=0x0045,
- specials={ "char", 0x0045, 0x0306 },
- unicodeslot=0x0114,
+ shcode=0x45,
+ specials={ "char", 0x45, 0x306 },
+ unicodeslot=0x114,
},
{
adobename="ebreve",
@@ -3049,10 +3059,10 @@ characters.data={
description="LATIN SMALL LETTER E WITH BREVE",
direction="l",
linebreak="al",
- shcode=0x0065,
- specials={ "char", 0x0065, 0x0306 },
- uccode=0x0114,
- unicodeslot=0x0115,
+ shcode=0x65,
+ specials={ "char", 0x65, 0x306 },
+ uccode=0x114,
+ unicodeslot=0x115,
},
{
adobename="Edotaccent",
@@ -3060,11 +3070,11 @@ characters.data={
contextname="Edotaccent",
description="LATIN CAPITAL LETTER E WITH DOT ABOVE",
direction="l",
- lccode=0x0117,
+ lccode=0x117,
linebreak="al",
- shcode=0x0045,
- specials={ "char", 0x0045, 0x0307 },
- unicodeslot=0x0116,
+ shcode=0x45,
+ specials={ "char", 0x45, 0x307 },
+ unicodeslot=0x116,
},
{
adobename="edotaccent",
@@ -3073,10 +3083,10 @@ characters.data={
description="LATIN SMALL LETTER E WITH DOT ABOVE",
direction="l",
linebreak="al",
- shcode=0x0065,
- specials={ "char", 0x0065, 0x0307 },
- uccode=0x0116,
- unicodeslot=0x0117,
+ shcode=0x65,
+ specials={ "char", 0x65, 0x307 },
+ uccode=0x116,
+ unicodeslot=0x117,
},
{
adobename="Eogonek",
@@ -3084,11 +3094,11 @@ characters.data={
contextname="Eogonek",
description="LATIN CAPITAL LETTER E WITH OGONEK",
direction="l",
- lccode=0x0119,
+ lccode=0x119,
linebreak="al",
- shcode=0x0045,
- specials={ "char", 0x0045, 0x0328 },
- unicodeslot=0x0118,
+ shcode=0x45,
+ specials={ "char", 0x45, 0x328 },
+ unicodeslot=0x118,
},
{
adobename="eogonek",
@@ -3097,10 +3107,10 @@ characters.data={
description="LATIN SMALL LETTER E WITH OGONEK",
direction="l",
linebreak="al",
- shcode=0x0065,
- specials={ "char", 0x0065, 0x0328 },
- uccode=0x0118,
- unicodeslot=0x0119,
+ shcode=0x65,
+ specials={ "char", 0x65, 0x328 },
+ uccode=0x118,
+ unicodeslot=0x119,
},
{
adobename="Ecaron",
@@ -3108,11 +3118,11 @@ characters.data={
contextname="Ecaron",
description="LATIN CAPITAL LETTER E WITH CARON",
direction="l",
- lccode=0x011B,
+ lccode=0x11B,
linebreak="al",
- shcode=0x0045,
- specials={ "char", 0x0045, 0x030C },
- unicodeslot=0x011A,
+ shcode=0x45,
+ specials={ "char", 0x45, 0x30C },
+ unicodeslot=0x11A,
},
{
adobename="ecaron",
@@ -3122,10 +3132,10 @@ characters.data={
description="LATIN SMALL LETTER E WITH CARON",
direction="l",
linebreak="al",
- shcode=0x0065,
- specials={ "char", 0x0065, 0x030C },
- uccode=0x011A,
- unicodeslot=0x011B,
+ shcode=0x65,
+ specials={ "char", 0x65, 0x30C },
+ uccode=0x11A,
+ unicodeslot=0x11B,
},
{
adobename="Gcircumflex",
@@ -3133,11 +3143,11 @@ characters.data={
contextname="Gcircumflex",
description="LATIN CAPITAL LETTER G WITH CIRCUMFLEX",
direction="l",
- lccode=0x011D,
+ lccode=0x11D,
linebreak="al",
- shcode=0x0047,
- specials={ "char", 0x0047, 0x0302 },
- unicodeslot=0x011C,
+ shcode=0x47,
+ specials={ "char", 0x47, 0x302 },
+ unicodeslot=0x11C,
},
{
adobename="gcircumflex",
@@ -3146,10 +3156,10 @@ characters.data={
description="LATIN SMALL LETTER G WITH CIRCUMFLEX",
direction="l",
linebreak="al",
- shcode=0x0067,
- specials={ "char", 0x0067, 0x0302 },
- uccode=0x011C,
- unicodeslot=0x011D,
+ shcode=0x67,
+ specials={ "char", 0x67, 0x302 },
+ uccode=0x11C,
+ unicodeslot=0x11D,
},
{
adobename="Gbreve",
@@ -3157,11 +3167,11 @@ characters.data={
contextname="Gbreve",
description="LATIN CAPITAL LETTER G WITH BREVE",
direction="l",
- lccode=0x011F,
+ lccode=0x11F,
linebreak="al",
- shcode=0x0047,
- specials={ "char", 0x0047, 0x0306 },
- unicodeslot=0x011E,
+ shcode=0x47,
+ specials={ "char", 0x47, 0x306 },
+ unicodeslot=0x11E,
},
{
adobename="gbreve",
@@ -3170,10 +3180,10 @@ characters.data={
description="LATIN SMALL LETTER G WITH BREVE",
direction="l",
linebreak="al",
- shcode=0x0067,
- specials={ "char", 0x0067, 0x0306 },
- uccode=0x011E,
- unicodeslot=0x011F,
+ shcode=0x67,
+ specials={ "char", 0x67, 0x306 },
+ uccode=0x11E,
+ unicodeslot=0x11F,
},
{
adobename="Gdotaccent",
@@ -3181,11 +3191,11 @@ characters.data={
contextname="Gdotaccent",
description="LATIN CAPITAL LETTER G WITH DOT ABOVE",
direction="l",
- lccode=0x0121,
+ lccode=0x121,
linebreak="al",
- shcode=0x0047,
- specials={ "char", 0x0047, 0x0307 },
- unicodeslot=0x0120,
+ shcode=0x47,
+ specials={ "char", 0x47, 0x307 },
+ unicodeslot=0x120,
},
{
adobename="gdotaccent",
@@ -3194,10 +3204,10 @@ characters.data={
description="LATIN SMALL LETTER G WITH DOT ABOVE",
direction="l",
linebreak="al",
- shcode=0x0067,
- specials={ "char", 0x0067, 0x0307 },
- uccode=0x0120,
- unicodeslot=0x0121,
+ shcode=0x67,
+ specials={ "char", 0x67, 0x307 },
+ uccode=0x120,
+ unicodeslot=0x121,
},
{
adobename="Gcommaaccent",
@@ -3205,11 +3215,11 @@ characters.data={
contextname="Gcommaaccent",
description="LATIN CAPITAL LETTER G WITH CEDILLA",
direction="l",
- lccode=0x0123,
+ lccode=0x123,
linebreak="al",
- shcode=0x0047,
- specials={ "char", 0x0047, 0x0327 },
- unicodeslot=0x0122,
+ shcode=0x47,
+ specials={ "char", 0x47, 0x327 },
+ unicodeslot=0x122,
},
{
adobename="gcommaaccent",
@@ -3218,10 +3228,10 @@ characters.data={
description="LATIN SMALL LETTER G WITH CEDILLA",
direction="l",
linebreak="al",
- shcode=0x0067,
- specials={ "char", 0x0067, 0x0327 },
- uccode=0x0122,
- unicodeslot=0x0123,
+ shcode=0x67,
+ specials={ "char", 0x67, 0x327 },
+ uccode=0x122,
+ unicodeslot=0x123,
},
{
adobename="Hcircumflex",
@@ -3229,11 +3239,11 @@ characters.data={
contextname="Hcircumflex",
description="LATIN CAPITAL LETTER H WITH CIRCUMFLEX",
direction="l",
- lccode=0x0125,
+ lccode=0x125,
linebreak="al",
- shcode=0x0048,
- specials={ "char", 0x0048, 0x0302 },
- unicodeslot=0x0124,
+ shcode=0x48,
+ specials={ "char", 0x48, 0x302 },
+ unicodeslot=0x124,
},
{
adobename="hcircumflex",
@@ -3242,10 +3252,10 @@ characters.data={
description="LATIN SMALL LETTER H WITH CIRCUMFLEX",
direction="l",
linebreak="al",
- shcode=0x0068,
- specials={ "char", 0x0068, 0x0302 },
- uccode=0x0124,
- unicodeslot=0x0125,
+ shcode=0x68,
+ specials={ "char", 0x68, 0x302 },
+ uccode=0x124,
+ unicodeslot=0x125,
},
{
adobename="Hbar",
@@ -3254,10 +3264,10 @@ characters.data={
contextname="Hstroke",
description="LATIN CAPITAL LETTER H WITH STROKE",
direction="l",
- lccode=0x0127,
+ lccode=0x127,
linebreak="al",
- shcode=0x0048,
- unicodeslot=0x0126,
+ shcode=0x48,
+ unicodeslot=0x126,
},
{
adobename="hbar",
@@ -3267,9 +3277,9 @@ characters.data={
description="LATIN SMALL LETTER H WITH STROKE",
direction="l",
linebreak="al",
- shcode=0x0068,
- uccode=0x0126,
- unicodeslot=0x0127,
+ shcode=0x68,
+ uccode=0x126,
+ unicodeslot=0x127,
},
{
adobename="Itilde",
@@ -3277,11 +3287,11 @@ characters.data={
contextname="Itilde",
description="LATIN CAPITAL LETTER I WITH TILDE",
direction="l",
- lccode=0x0129,
+ lccode=0x129,
linebreak="al",
- shcode=0x0049,
- specials={ "char", 0x0049, 0x0303 },
- unicodeslot=0x0128,
+ shcode=0x49,
+ specials={ "char", 0x49, 0x303 },
+ unicodeslot=0x128,
},
{
adobename="itilde",
@@ -3290,10 +3300,10 @@ characters.data={
description="LATIN SMALL LETTER I WITH TILDE",
direction="l",
linebreak="al",
- shcode=0x0069,
- specials={ "char", 0x0069, 0x0303 },
- uccode=0x0128,
- unicodeslot=0x0129,
+ shcode=0x69,
+ specials={ "char", 0x69, 0x303 },
+ uccode=0x128,
+ unicodeslot=0x129,
},
{
adobename="Imacron",
@@ -3301,11 +3311,11 @@ characters.data={
contextname="Imacron",
description="LATIN CAPITAL LETTER I WITH MACRON",
direction="l",
- lccode=0x012B,
+ lccode=0x12B,
linebreak="al",
- shcode=0x0049,
- specials={ "char", 0x0049, 0x0304 },
- unicodeslot=0x012A,
+ shcode=0x49,
+ specials={ "char", 0x49, 0x304 },
+ unicodeslot=0x12A,
},
{
adobename="imacron",
@@ -3315,10 +3325,10 @@ characters.data={
description="LATIN SMALL LETTER I WITH MACRON",
direction="l",
linebreak="al",
- shcode=0x0069,
- specials={ "char", 0x0069, 0x0304 },
- uccode=0x012A,
- unicodeslot=0x012B,
+ shcode=0x69,
+ specials={ "char", 0x69, 0x304 },
+ uccode=0x12A,
+ unicodeslot=0x12B,
},
{
adobename="Ibreve",
@@ -3326,11 +3336,11 @@ characters.data={
contextname="Ibreve",
description="LATIN CAPITAL LETTER I WITH BREVE",
direction="l",
- lccode=0x012D,
+ lccode=0x12D,
linebreak="al",
- shcode=0x0049,
- specials={ "char", 0x0049, 0x0306 },
- unicodeslot=0x012C,
+ shcode=0x49,
+ specials={ "char", 0x49, 0x306 },
+ unicodeslot=0x12C,
},
{
adobename="ibreve",
@@ -3339,10 +3349,10 @@ characters.data={
description="LATIN SMALL LETTER I WITH BREVE",
direction="l",
linebreak="al",
- shcode=0x0069,
- specials={ "char", 0x0069, 0x0306 },
- uccode=0x012C,
- unicodeslot=0x012D,
+ shcode=0x69,
+ specials={ "char", 0x69, 0x306 },
+ uccode=0x12C,
+ unicodeslot=0x12D,
},
{
adobename="Iogonek",
@@ -3350,11 +3360,11 @@ characters.data={
contextname="Iogonek",
description="LATIN CAPITAL LETTER I WITH OGONEK",
direction="l",
- lccode=0x012F,
+ lccode=0x12F,
linebreak="al",
- shcode=0x0049,
- specials={ "char", 0x0049, 0x0328 },
- unicodeslot=0x012E,
+ shcode=0x49,
+ specials={ "char", 0x49, 0x328 },
+ unicodeslot=0x12E,
},
{
adobename="iogonek",
@@ -3363,10 +3373,10 @@ characters.data={
description="LATIN SMALL LETTER I WITH OGONEK",
direction="l",
linebreak="al",
- shcode=0x0069,
- specials={ "char", 0x0069, 0x0328 },
- uccode=0x012E,
- unicodeslot=0x012F,
+ shcode=0x69,
+ specials={ "char", 0x69, 0x328 },
+ uccode=0x12E,
+ unicodeslot=0x12F,
},
{
adobename="Idotaccent",
@@ -3374,11 +3384,11 @@ characters.data={
contextname="Idotaccent",
description="LATIN CAPITAL LETTER I WITH DOT ABOVE",
direction="l",
- lccode=0x0069,
+ lccode=0x69,
linebreak="al",
- shcode=0x0049,
- specials={ "char", 0x0049, 0x0307 },
- unicodeslot=0x0130,
+ shcode=0x49,
+ specials={ "char", 0x49, 0x307 },
+ unicodeslot=0x130,
},
{
adobename="dotlessi",
@@ -3388,8 +3398,8 @@ characters.data={
description="LATIN SMALL LETTER DOTLESS I",
direction="l",
linebreak="al",
- uccode=0x0049,
- unicodeslot=0x0131,
+ uccode=0x49,
+ unicodeslot=0x131,
},
{
adobename="IJ",
@@ -3398,11 +3408,11 @@ characters.data={
contextname="IJligature",
description="LATIN CAPITAL LIGATURE IJ",
direction="l",
- lccode=0x0133,
+ lccode=0x133,
linebreak="al",
- shcode={ 0x0049, 0x004A },
- specials={ "compat", 0x0049, 0x004A },
- unicodeslot=0x0132,
+ shcode={ 0x49, 0x4A },
+ specials={ "compat", 0x49, 0x4A },
+ unicodeslot=0x132,
},
{
adobename="ij",
@@ -3412,10 +3422,10 @@ characters.data={
description="LATIN SMALL LIGATURE IJ",
direction="l",
linebreak="al",
- shcode={ 0x0069, 0x006A },
- specials={ "compat", 0x0069, 0x006A },
- uccode=0x0132,
- unicodeslot=0x0133,
+ shcode={ 0x69, 0x6A },
+ specials={ "compat", 0x69, 0x6A },
+ uccode=0x132,
+ unicodeslot=0x133,
},
{
adobename="Jcircumflex",
@@ -3423,11 +3433,11 @@ characters.data={
contextname="Jcircumflex",
description="LATIN CAPITAL LETTER J WITH CIRCUMFLEX",
direction="l",
- lccode=0x0135,
+ lccode=0x135,
linebreak="al",
- shcode=0x004A,
- specials={ "char", 0x004A, 0x0302 },
- unicodeslot=0x0134,
+ shcode=0x4A,
+ specials={ "char", 0x4A, 0x302 },
+ unicodeslot=0x134,
},
{
adobename="jcircumflex",
@@ -3436,10 +3446,10 @@ characters.data={
description="LATIN SMALL LETTER J WITH CIRCUMFLEX",
direction="l",
linebreak="al",
- shcode=0x006A,
- specials={ "char", 0x006A, 0x0302 },
- uccode=0x0134,
- unicodeslot=0x0135,
+ shcode=0x6A,
+ specials={ "char", 0x6A, 0x302 },
+ uccode=0x134,
+ unicodeslot=0x135,
},
{
adobename="Kcommaaccent",
@@ -3447,11 +3457,11 @@ characters.data={
contextname="Kcommaaccent",
description="LATIN CAPITAL LETTER K WITH CEDILLA",
direction="l",
- lccode=0x0137,
+ lccode=0x137,
linebreak="al",
- shcode=0x004B,
- specials={ "char", 0x004B, 0x0327 },
- unicodeslot=0x0136,
+ shcode=0x4B,
+ specials={ "char", 0x4B, 0x327 },
+ unicodeslot=0x136,
},
{
adobename="kcommaaccent",
@@ -3460,10 +3470,10 @@ characters.data={
description="LATIN SMALL LETTER K WITH CEDILLA",
direction="l",
linebreak="al",
- shcode=0x006B,
- specials={ "char", 0x006B, 0x0327 },
- uccode=0x0136,
- unicodeslot=0x0137,
+ shcode=0x6B,
+ specials={ "char", 0x6B, 0x327 },
+ uccode=0x136,
+ unicodeslot=0x137,
},
{
adobename="kgreenlandic",
@@ -3473,7 +3483,7 @@ characters.data={
description="LATIN SMALL LETTER KRA",
direction="l",
linebreak="al",
- unicodeslot=0x0138,
+ unicodeslot=0x138,
},
{
adobename="Lacute",
@@ -3481,11 +3491,11 @@ characters.data={
contextname="Lacute",
description="LATIN CAPITAL LETTER L WITH ACUTE",
direction="l",
- lccode=0x013A,
+ lccode=0x13A,
linebreak="al",
- shcode=0x004C,
- specials={ "char", 0x004C, 0x0301 },
- unicodeslot=0x0139,
+ shcode=0x4C,
+ specials={ "char", 0x4C, 0x301 },
+ unicodeslot=0x139,
},
{
adobename="lacute",
@@ -3494,10 +3504,10 @@ characters.data={
description="LATIN SMALL LETTER L WITH ACUTE",
direction="l",
linebreak="al",
- shcode=0x006C,
- specials={ "char", 0x006C, 0x0301 },
- uccode=0x0139,
- unicodeslot=0x013A,
+ shcode=0x6C,
+ specials={ "char", 0x6C, 0x301 },
+ uccode=0x139,
+ unicodeslot=0x13A,
},
{
adobename="Lcommaaccent",
@@ -3505,11 +3515,11 @@ characters.data={
contextname="Lcommaaccent",
description="LATIN CAPITAL LETTER L WITH CEDILLA",
direction="l",
- lccode=0x013C,
+ lccode=0x13C,
linebreak="al",
- shcode=0x004C,
- specials={ "char", 0x004C, 0x0327 },
- unicodeslot=0x013B,
+ shcode=0x4C,
+ specials={ "char", 0x4C, 0x327 },
+ unicodeslot=0x13B,
},
{
adobename="lcommaaccent",
@@ -3518,10 +3528,10 @@ characters.data={
description="LATIN SMALL LETTER L WITH CEDILLA",
direction="l",
linebreak="al",
- shcode=0x006C,
- specials={ "char", 0x006C, 0x0327 },
- uccode=0x013B,
- unicodeslot=0x013C,
+ shcode=0x6C,
+ specials={ "char", 0x6C, 0x327 },
+ uccode=0x13B,
+ unicodeslot=0x13C,
},
{
adobename="Lcaron",
@@ -3529,11 +3539,11 @@ characters.data={
contextname="Lcaron",
description="LATIN CAPITAL LETTER L WITH CARON",
direction="l",
- lccode=0x013E,
+ lccode=0x13E,
linebreak="al",
- shcode=0x004C,
- specials={ "char", 0x004C, 0x030C },
- unicodeslot=0x013D,
+ shcode=0x4C,
+ specials={ "char", 0x4C, 0x30C },
+ unicodeslot=0x13D,
},
{
adobename="lcaron",
@@ -3542,10 +3552,10 @@ characters.data={
description="LATIN SMALL LETTER L WITH CARON",
direction="l",
linebreak="al",
- shcode=0x006C,
- specials={ "char", 0x006C, 0x030C },
- uccode=0x013D,
- unicodeslot=0x013E,
+ shcode=0x6C,
+ specials={ "char", 0x6C, 0x30C },
+ uccode=0x13D,
+ unicodeslot=0x13E,
},
{
adobename="Ldotaccent",
@@ -3554,11 +3564,11 @@ characters.data={
contextname="Ldotmiddle",
description="LATIN CAPITAL LETTER L WITH MIDDLE DOT",
direction="l",
- lccode=0x0140,
+ lccode=0x140,
linebreak="al",
- shcode=0x004C,
- specials={ "compat", 0x004C, 0x00B7 },
- unicodeslot=0x013F,
+ shcode=0x4C,
+ specials={ "compat", 0x4C, 0xB7 },
+ unicodeslot=0x13F,
},
{
adobename="ldotaccent",
@@ -3568,10 +3578,10 @@ characters.data={
description="LATIN SMALL LETTER L WITH MIDDLE DOT",
direction="l",
linebreak="al",
- shcode=0x006C,
- specials={ "compat", 0x006C, 0x00B7 },
- uccode=0x013F,
- unicodeslot=0x0140,
+ shcode=0x6C,
+ specials={ "compat", 0x6C, 0xB7 },
+ uccode=0x13F,
+ unicodeslot=0x140,
},
{
adobename="Lslash",
@@ -3580,10 +3590,10 @@ characters.data={
contextname="Lstroke",
description="LATIN CAPITAL LETTER L WITH STROKE",
direction="l",
- lccode=0x0142,
+ lccode=0x142,
linebreak="al",
- shcode=0x004C,
- unicodeslot=0x0141,
+ shcode=0x4C,
+ unicodeslot=0x141,
},
{
adobename="lslash",
@@ -3593,9 +3603,9 @@ characters.data={
description="LATIN SMALL LETTER L WITH STROKE",
direction="l",
linebreak="al",
- shcode=0x006C,
- uccode=0x0141,
- unicodeslot=0x0142,
+ shcode=0x6C,
+ uccode=0x141,
+ unicodeslot=0x142,
},
{
adobename="Nacute",
@@ -3603,11 +3613,11 @@ characters.data={
contextname="Nacute",
description="LATIN CAPITAL LETTER N WITH ACUTE",
direction="l",
- lccode=0x0144,
+ lccode=0x144,
linebreak="al",
- shcode=0x004E,
- specials={ "char", 0x004E, 0x0301 },
- unicodeslot=0x0143,
+ shcode=0x4E,
+ specials={ "char", 0x4E, 0x301 },
+ unicodeslot=0x143,
},
{
adobename="nacute",
@@ -3617,10 +3627,10 @@ characters.data={
description="LATIN SMALL LETTER N WITH ACUTE",
direction="l",
linebreak="al",
- shcode=0x006E,
- specials={ "char", 0x006E, 0x0301 },
- uccode=0x0143,
- unicodeslot=0x0144,
+ shcode=0x6E,
+ specials={ "char", 0x6E, 0x301 },
+ uccode=0x143,
+ unicodeslot=0x144,
},
{
adobename="Ncommaaccent",
@@ -3628,11 +3638,11 @@ characters.data={
contextname="Ncommaaccent",
description="LATIN CAPITAL LETTER N WITH CEDILLA",
direction="l",
- lccode=0x0146,
+ lccode=0x146,
linebreak="al",
- shcode=0x004E,
- specials={ "char", 0x004E, 0x0327 },
- unicodeslot=0x0145,
+ shcode=0x4E,
+ specials={ "char", 0x4E, 0x327 },
+ unicodeslot=0x145,
},
{
adobename="ncommaaccent",
@@ -3641,10 +3651,10 @@ characters.data={
description="LATIN SMALL LETTER N WITH CEDILLA",
direction="l",
linebreak="al",
- shcode=0x006E,
- specials={ "char", 0x006E, 0x0327 },
- uccode=0x0145,
- unicodeslot=0x0146,
+ shcode=0x6E,
+ specials={ "char", 0x6E, 0x327 },
+ uccode=0x145,
+ unicodeslot=0x146,
},
{
adobename="Ncaron",
@@ -3652,11 +3662,11 @@ characters.data={
contextname="Ncaron",
description="LATIN CAPITAL LETTER N WITH CARON",
direction="l",
- lccode=0x0148,
+ lccode=0x148,
linebreak="al",
- shcode=0x004E,
- specials={ "char", 0x004E, 0x030C },
- unicodeslot=0x0147,
+ shcode=0x4E,
+ specials={ "char", 0x4E, 0x30C },
+ unicodeslot=0x147,
},
{
adobename="ncaron",
@@ -3666,10 +3676,10 @@ characters.data={
description="LATIN SMALL LETTER N WITH CARON",
direction="l",
linebreak="al",
- shcode=0x006E,
- specials={ "char", 0x006E, 0x030C },
- uccode=0x0147,
- unicodeslot=0x0148,
+ shcode=0x6E,
+ specials={ "char", 0x6E, 0x30C },
+ uccode=0x147,
+ unicodeslot=0x148,
},
{
adobename="quoterightn",
@@ -3679,8 +3689,8 @@ characters.data={
description="LATIN SMALL LETTER N PRECEDED BY APOSTROPHE",
direction="l",
linebreak="al",
- specials={ "compat", 0x02BC, 0x006E },
- unicodeslot=0x0149,
+ specials={ "compat", 0x2BC, 0x6E },
+ unicodeslot=0x149,
},
{
adobename="Eng",
@@ -3689,9 +3699,9 @@ characters.data={
contextname="Neng",
description="LATIN CAPITAL LETTER ENG",
direction="l",
- lccode=0x014B,
+ lccode=0x14B,
linebreak="al",
- unicodeslot=0x014A,
+ unicodeslot=0x14A,
},
{
adobename="eng",
@@ -3701,8 +3711,8 @@ characters.data={
description="LATIN SMALL LETTER ENG",
direction="l",
linebreak="al",
- uccode=0x014A,
- unicodeslot=0x014B,
+ uccode=0x14A,
+ unicodeslot=0x14B,
},
{
adobename="Omacron",
@@ -3710,11 +3720,11 @@ characters.data={
contextname="Omacron",
description="LATIN CAPITAL LETTER O WITH MACRON",
direction="l",
- lccode=0x014D,
+ lccode=0x14D,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x004F, 0x0304 },
- unicodeslot=0x014C,
+ shcode=0x4F,
+ specials={ "char", 0x4F, 0x304 },
+ unicodeslot=0x14C,
},
{
adobename="omacron",
@@ -3724,10 +3734,10 @@ characters.data={
description="LATIN SMALL LETTER O WITH MACRON",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x006F, 0x0304 },
- uccode=0x014C,
- unicodeslot=0x014D,
+ shcode=0x6F,
+ specials={ "char", 0x6F, 0x304 },
+ uccode=0x14C,
+ unicodeslot=0x14D,
},
{
adobename="Obreve",
@@ -3735,11 +3745,11 @@ characters.data={
contextname="Obreve",
description="LATIN CAPITAL LETTER O WITH BREVE",
direction="l",
- lccode=0x014F,
+ lccode=0x14F,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x004F, 0x0306 },
- unicodeslot=0x014E,
+ shcode=0x4F,
+ specials={ "char", 0x4F, 0x306 },
+ unicodeslot=0x14E,
},
{
adobename="obreve",
@@ -3748,10 +3758,10 @@ characters.data={
description="LATIN SMALL LETTER O WITH BREVE",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x006F, 0x0306 },
- uccode=0x014E,
- unicodeslot=0x014F,
+ shcode=0x6F,
+ specials={ "char", 0x6F, 0x306 },
+ uccode=0x14E,
+ unicodeslot=0x14F,
},
{
adobename="Ohungarumlaut",
@@ -3759,11 +3769,11 @@ characters.data={
contextname="Ohungarumlaut",
description="LATIN CAPITAL LETTER O WITH DOUBLE ACUTE",
direction="l",
- lccode=0x0151,
+ lccode=0x151,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x004F, 0x030B },
- unicodeslot=0x0150,
+ shcode=0x4F,
+ specials={ "char", 0x4F, 0x30B },
+ unicodeslot=0x150,
},
{
adobename="ohungarumlaut",
@@ -3772,10 +3782,10 @@ characters.data={
description="LATIN SMALL LETTER O WITH DOUBLE ACUTE",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x006F, 0x030B },
- uccode=0x0150,
- unicodeslot=0x0151,
+ shcode=0x6F,
+ specials={ "char", 0x6F, 0x30B },
+ uccode=0x150,
+ unicodeslot=0x151,
},
{
adobename="OE",
@@ -3784,10 +3794,10 @@ characters.data={
contextname="OEligature",
description="LATIN CAPITAL LIGATURE OE",
direction="l",
- lccode=0x0153,
+ lccode=0x153,
linebreak="al",
- shcode={ 0x004F, 0x0045 },
- unicodeslot=0x0152,
+ shcode={ 0x4F, 0x45 },
+ unicodeslot=0x152,
},
{
adobename="oe",
@@ -3797,9 +3807,9 @@ characters.data={
description="LATIN SMALL LIGATURE OE",
direction="l",
linebreak="al",
- shcode={ 0x006F, 0x0065 },
- uccode=0x0152,
- unicodeslot=0x0153,
+ shcode={ 0x6F, 0x65 },
+ uccode=0x152,
+ unicodeslot=0x153,
},
{
adobename="Racute",
@@ -3807,11 +3817,11 @@ characters.data={
contextname="Racute",
description="LATIN CAPITAL LETTER R WITH ACUTE",
direction="l",
- lccode=0x0155,
+ lccode=0x155,
linebreak="al",
- shcode=0x0052,
- specials={ "char", 0x0052, 0x0301 },
- unicodeslot=0x0154,
+ shcode=0x52,
+ specials={ "char", 0x52, 0x301 },
+ unicodeslot=0x154,
},
{
adobename="racute",
@@ -3820,10 +3830,10 @@ characters.data={
description="LATIN SMALL LETTER R WITH ACUTE",
direction="l",
linebreak="al",
- shcode=0x0072,
- specials={ "char", 0x0072, 0x0301 },
- uccode=0x0154,
- unicodeslot=0x0155,
+ shcode=0x72,
+ specials={ "char", 0x72, 0x301 },
+ uccode=0x154,
+ unicodeslot=0x155,
},
{
adobename="Rcommaaccent",
@@ -3831,11 +3841,11 @@ characters.data={
contextname="Rcommaaccent",
description="LATIN CAPITAL LETTER R WITH CEDILLA",
direction="l",
- lccode=0x0157,
+ lccode=0x157,
linebreak="al",
- shcode=0x0052,
- specials={ "char", 0x0052, 0x0327 },
- unicodeslot=0x0156,
+ shcode=0x52,
+ specials={ "char", 0x52, 0x327 },
+ unicodeslot=0x156,
},
{
adobename="rcommaaccent",
@@ -3844,10 +3854,10 @@ characters.data={
description="LATIN SMALL LETTER R WITH CEDILLA",
direction="l",
linebreak="al",
- shcode=0x0072,
- specials={ "char", 0x0072, 0x0327 },
- uccode=0x0156,
- unicodeslot=0x0157,
+ shcode=0x72,
+ specials={ "char", 0x72, 0x327 },
+ uccode=0x156,
+ unicodeslot=0x157,
},
{
adobename="Rcaron",
@@ -3855,11 +3865,11 @@ characters.data={
contextname="Rcaron",
description="LATIN CAPITAL LETTER R WITH CARON",
direction="l",
- lccode=0x0159,
+ lccode=0x159,
linebreak="al",
- shcode=0x0052,
- specials={ "char", 0x0052, 0x030C },
- unicodeslot=0x0158,
+ shcode=0x52,
+ specials={ "char", 0x52, 0x30C },
+ unicodeslot=0x158,
},
{
adobename="rcaron",
@@ -3868,10 +3878,10 @@ characters.data={
description="LATIN SMALL LETTER R WITH CARON",
direction="l",
linebreak="al",
- shcode=0x0072,
- specials={ "char", 0x0072, 0x030C },
- uccode=0x0158,
- unicodeslot=0x0159,
+ shcode=0x72,
+ specials={ "char", 0x72, 0x30C },
+ uccode=0x158,
+ unicodeslot=0x159,
},
{
adobename="Sacute",
@@ -3879,11 +3889,11 @@ characters.data={
contextname="Sacute",
description="LATIN CAPITAL LETTER S WITH ACUTE",
direction="l",
- lccode=0x015B,
+ lccode=0x15B,
linebreak="al",
- shcode=0x0053,
- specials={ "char", 0x0053, 0x0301 },
- unicodeslot=0x015A,
+ shcode=0x53,
+ specials={ "char", 0x53, 0x301 },
+ unicodeslot=0x15A,
},
{
adobename="sacute",
@@ -3892,10 +3902,10 @@ characters.data={
description="LATIN SMALL LETTER S WITH ACUTE",
direction="l",
linebreak="al",
- shcode=0x0073,
- specials={ "char", 0x0073, 0x0301 },
- uccode=0x015A,
- unicodeslot=0x015B,
+ shcode=0x73,
+ specials={ "char", 0x73, 0x301 },
+ uccode=0x15A,
+ unicodeslot=0x15B,
},
{
adobename="Scircumflex",
@@ -3903,11 +3913,11 @@ characters.data={
contextname="Scircumflex",
description="LATIN CAPITAL LETTER S WITH CIRCUMFLEX",
direction="l",
- lccode=0x015D,
+ lccode=0x15D,
linebreak="al",
- shcode=0x0053,
- specials={ "char", 0x0053, 0x0302 },
- unicodeslot=0x015C,
+ shcode=0x53,
+ specials={ "char", 0x53, 0x302 },
+ unicodeslot=0x15C,
},
{
adobename="scircumflex",
@@ -3916,10 +3926,10 @@ characters.data={
description="LATIN SMALL LETTER S WITH CIRCUMFLEX",
direction="l",
linebreak="al",
- shcode=0x0073,
- specials={ "char", 0x0073, 0x0302 },
- uccode=0x015C,
- unicodeslot=0x015D,
+ shcode=0x73,
+ specials={ "char", 0x73, 0x302 },
+ uccode=0x15C,
+ unicodeslot=0x15D,
},
{
adobename="Scedilla",
@@ -3927,11 +3937,11 @@ characters.data={
contextname="Scedilla",
description="LATIN CAPITAL LETTER S WITH CEDILLA",
direction="l",
- lccode=0x015F,
+ lccode=0x15F,
linebreak="al",
- shcode=0x0053,
- specials={ "char", 0x0053, 0x0327 },
- unicodeslot=0x015E,
+ shcode=0x53,
+ specials={ "char", 0x53, 0x327 },
+ unicodeslot=0x15E,
},
{
adobename="scedilla",
@@ -3940,10 +3950,10 @@ characters.data={
description="LATIN SMALL LETTER S WITH CEDILLA",
direction="l",
linebreak="al",
- shcode=0x0073,
- specials={ "char", 0x0073, 0x0327 },
- uccode=0x015E,
- unicodeslot=0x015F,
+ shcode=0x73,
+ specials={ "char", 0x73, 0x327 },
+ uccode=0x15E,
+ unicodeslot=0x15F,
},
{
adobename="Scaron",
@@ -3951,11 +3961,11 @@ characters.data={
contextname="Scaron",
description="LATIN CAPITAL LETTER S WITH CARON",
direction="l",
- lccode=0x0161,
+ lccode=0x161,
linebreak="al",
- shcode=0x0053,
- specials={ "char", 0x0053, 0x030C },
- unicodeslot=0x0160,
+ shcode=0x53,
+ specials={ "char", 0x53, 0x30C },
+ unicodeslot=0x160,
},
{
adobename="scaron",
@@ -3964,10 +3974,10 @@ characters.data={
description="LATIN SMALL LETTER S WITH CARON",
direction="l",
linebreak="al",
- shcode=0x0073,
- specials={ "char", 0x0073, 0x030C },
- uccode=0x0160,
- unicodeslot=0x0161,
+ shcode=0x73,
+ specials={ "char", 0x73, 0x30C },
+ uccode=0x160,
+ unicodeslot=0x161,
},
{
adobename="Tcommaaccent",
@@ -3975,11 +3985,11 @@ characters.data={
contextname="Tcedilla",
description="LATIN CAPITAL LETTER T WITH CEDILLA",
direction="l",
- lccode=0x0163,
+ lccode=0x163,
linebreak="al",
- shcode=0x0054,
- specials={ "char", 0x0054, 0x0327 },
- unicodeslot=0x0162,
+ shcode=0x54,
+ specials={ "char", 0x54, 0x327 },
+ unicodeslot=0x162,
},
{
adobename="tcommaaccent",
@@ -3988,10 +3998,10 @@ characters.data={
description="LATIN SMALL LETTER T WITH CEDILLA",
direction="l",
linebreak="al",
- shcode=0x0074,
- specials={ "char", 0x0074, 0x0327 },
- uccode=0x0162,
- unicodeslot=0x0163,
+ shcode=0x74,
+ specials={ "char", 0x74, 0x327 },
+ uccode=0x162,
+ unicodeslot=0x163,
},
{
adobename="Tcaron",
@@ -3999,11 +4009,11 @@ characters.data={
contextname="Tcaron",
description="LATIN CAPITAL LETTER T WITH CARON",
direction="l",
- lccode=0x0165,
+ lccode=0x165,
linebreak="al",
- shcode=0x0054,
- specials={ "char", 0x0054, 0x030C },
- unicodeslot=0x0164,
+ shcode=0x54,
+ specials={ "char", 0x54, 0x30C },
+ unicodeslot=0x164,
},
{
adobename="tcaron",
@@ -4012,10 +4022,10 @@ characters.data={
description="LATIN SMALL LETTER T WITH CARON",
direction="l",
linebreak="al",
- shcode=0x0074,
- specials={ "char", 0x0074, 0x030C },
- uccode=0x0164,
- unicodeslot=0x0165,
+ shcode=0x74,
+ specials={ "char", 0x74, 0x30C },
+ uccode=0x164,
+ unicodeslot=0x165,
},
{
adobename="Tbar",
@@ -4024,10 +4034,10 @@ characters.data={
contextname="Tstroke",
description="LATIN CAPITAL LETTER T WITH STROKE",
direction="l",
- lccode=0x0167,
+ lccode=0x167,
linebreak="al",
- shcode=0x0054,
- unicodeslot=0x0166,
+ shcode=0x54,
+ unicodeslot=0x166,
},
{
adobename="tbar",
@@ -4037,9 +4047,9 @@ characters.data={
description="LATIN SMALL LETTER T WITH STROKE",
direction="l",
linebreak="al",
- shcode=0x0074,
- uccode=0x0166,
- unicodeslot=0x0167,
+ shcode=0x74,
+ uccode=0x166,
+ unicodeslot=0x167,
},
{
adobename="Utilde",
@@ -4047,11 +4057,11 @@ characters.data={
contextname="Utilde",
description="LATIN CAPITAL LETTER U WITH TILDE",
direction="l",
- lccode=0x0169,
+ lccode=0x169,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x0055, 0x0303 },
- unicodeslot=0x0168,
+ shcode=0x55,
+ specials={ "char", 0x55, 0x303 },
+ unicodeslot=0x168,
},
{
adobename="utilde",
@@ -4060,10 +4070,10 @@ characters.data={
description="LATIN SMALL LETTER U WITH TILDE",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x0075, 0x0303 },
- uccode=0x0168,
- unicodeslot=0x0169,
+ shcode=0x75,
+ specials={ "char", 0x75, 0x303 },
+ uccode=0x168,
+ unicodeslot=0x169,
},
{
adobename="Umacron",
@@ -4071,11 +4081,11 @@ characters.data={
contextname="Umacron",
description="LATIN CAPITAL LETTER U WITH MACRON",
direction="l",
- lccode=0x016B,
+ lccode=0x16B,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x0055, 0x0304 },
- unicodeslot=0x016A,
+ shcode=0x55,
+ specials={ "char", 0x55, 0x304 },
+ unicodeslot=0x16A,
},
{
adobename="umacron",
@@ -4085,10 +4095,10 @@ characters.data={
description="LATIN SMALL LETTER U WITH MACRON",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x0075, 0x0304 },
- uccode=0x016A,
- unicodeslot=0x016B,
+ shcode=0x75,
+ specials={ "char", 0x75, 0x304 },
+ uccode=0x16A,
+ unicodeslot=0x16B,
},
{
adobename="Ubreve",
@@ -4096,11 +4106,11 @@ characters.data={
contextname="Ubreve",
description="LATIN CAPITAL LETTER U WITH BREVE",
direction="l",
- lccode=0x016D,
+ lccode=0x16D,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x0055, 0x0306 },
- unicodeslot=0x016C,
+ shcode=0x55,
+ specials={ "char", 0x55, 0x306 },
+ unicodeslot=0x16C,
},
{
adobename="ubreve",
@@ -4109,10 +4119,10 @@ characters.data={
description="LATIN SMALL LETTER U WITH BREVE",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x0075, 0x0306 },
- uccode=0x016C,
- unicodeslot=0x016D,
+ shcode=0x75,
+ specials={ "char", 0x75, 0x306 },
+ uccode=0x16C,
+ unicodeslot=0x16D,
},
{
adobename="Uring",
@@ -4120,11 +4130,11 @@ characters.data={
contextname="Uring",
description="LATIN CAPITAL LETTER U WITH RING ABOVE",
direction="l",
- lccode=0x016F,
+ lccode=0x16F,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x0055, 0x030A },
- unicodeslot=0x016E,
+ shcode=0x55,
+ specials={ "char", 0x55, 0x30A },
+ unicodeslot=0x16E,
},
{
adobename="uring",
@@ -4133,10 +4143,10 @@ characters.data={
description="LATIN SMALL LETTER U WITH RING ABOVE",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x0075, 0x030A },
- uccode=0x016E,
- unicodeslot=0x016F,
+ shcode=0x75,
+ specials={ "char", 0x75, 0x30A },
+ uccode=0x16E,
+ unicodeslot=0x16F,
},
{
adobename="Uhungarumlaut",
@@ -4144,11 +4154,11 @@ characters.data={
contextname="Uhungarumlaut",
description="LATIN CAPITAL LETTER U WITH DOUBLE ACUTE",
direction="l",
- lccode=0x0171,
+ lccode=0x171,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x0055, 0x030B },
- unicodeslot=0x0170,
+ shcode=0x55,
+ specials={ "char", 0x55, 0x30B },
+ unicodeslot=0x170,
},
{
adobename="uhungarumlaut",
@@ -4157,10 +4167,10 @@ characters.data={
description="LATIN SMALL LETTER U WITH DOUBLE ACUTE",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x0075, 0x030B },
- uccode=0x0170,
- unicodeslot=0x0171,
+ shcode=0x75,
+ specials={ "char", 0x75, 0x30B },
+ uccode=0x170,
+ unicodeslot=0x171,
},
{
adobename="Uogonek",
@@ -4168,11 +4178,11 @@ characters.data={
contextname="Uogonek",
description="LATIN CAPITAL LETTER U WITH OGONEK",
direction="l",
- lccode=0x0173,
+ lccode=0x173,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x0055, 0x0328 },
- unicodeslot=0x0172,
+ shcode=0x55,
+ specials={ "char", 0x55, 0x328 },
+ unicodeslot=0x172,
},
{
adobename="uogonek",
@@ -4181,10 +4191,10 @@ characters.data={
description="LATIN SMALL LETTER U WITH OGONEK",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x0075, 0x0328 },
- uccode=0x0172,
- unicodeslot=0x0173,
+ shcode=0x75,
+ specials={ "char", 0x75, 0x328 },
+ uccode=0x172,
+ unicodeslot=0x173,
},
{
adobename="Wcircumflex",
@@ -4192,11 +4202,11 @@ characters.data={
contextname="Wcircumflex",
description="LATIN CAPITAL LETTER W WITH CIRCUMFLEX",
direction="l",
- lccode=0x0175,
+ lccode=0x175,
linebreak="al",
- shcode=0x0057,
- specials={ "char", 0x0057, 0x0302 },
- unicodeslot=0x0174,
+ shcode=0x57,
+ specials={ "char", 0x57, 0x302 },
+ unicodeslot=0x174,
},
{
adobename="wcircumflex",
@@ -4205,10 +4215,10 @@ characters.data={
description="LATIN SMALL LETTER W WITH CIRCUMFLEX",
direction="l",
linebreak="al",
- shcode=0x0077,
- specials={ "char", 0x0077, 0x0302 },
- uccode=0x0174,
- unicodeslot=0x0175,
+ shcode=0x77,
+ specials={ "char", 0x77, 0x302 },
+ uccode=0x174,
+ unicodeslot=0x175,
},
{
adobename="Ycircumflex",
@@ -4216,11 +4226,11 @@ characters.data={
contextname="Ycircumflex",
description="LATIN CAPITAL LETTER Y WITH CIRCUMFLEX",
direction="l",
- lccode=0x0177,
+ lccode=0x177,
linebreak="al",
- shcode=0x0059,
- specials={ "char", 0x0059, 0x0302 },
- unicodeslot=0x0176,
+ shcode=0x59,
+ specials={ "char", 0x59, 0x302 },
+ unicodeslot=0x176,
},
{
adobename="ycircumflex",
@@ -4229,10 +4239,10 @@ characters.data={
description="LATIN SMALL LETTER Y WITH CIRCUMFLEX",
direction="l",
linebreak="al",
- shcode=0x0079,
- specials={ "char", 0x0079, 0x0302 },
- uccode=0x0176,
- unicodeslot=0x0177,
+ shcode=0x79,
+ specials={ "char", 0x79, 0x302 },
+ uccode=0x176,
+ unicodeslot=0x177,
},
{
adobename="Ydieresis",
@@ -4240,11 +4250,11 @@ characters.data={
contextname="Ydiaeresis",
description="LATIN CAPITAL LETTER Y WITH DIAERESIS",
direction="l",
- lccode=0x00FF,
+ lccode=0xFF,
linebreak="al",
- shcode=0x0059,
- specials={ "char", 0x0059, 0x0308 },
- unicodeslot=0x0178,
+ shcode=0x59,
+ specials={ "char", 0x59, 0x308 },
+ unicodeslot=0x178,
},
{
adobename="Zacute",
@@ -4252,11 +4262,11 @@ characters.data={
contextname="Zacute",
description="LATIN CAPITAL LETTER Z WITH ACUTE",
direction="l",
- lccode=0x017A,
+ lccode=0x17A,
linebreak="al",
- shcode=0x005A,
- specials={ "char", 0x005A, 0x0301 },
- unicodeslot=0x0179,
+ shcode=0x5A,
+ specials={ "char", 0x5A, 0x301 },
+ unicodeslot=0x179,
},
{
adobename="zacute",
@@ -4265,10 +4275,10 @@ characters.data={
description="LATIN SMALL LETTER Z WITH ACUTE",
direction="l",
linebreak="al",
- shcode=0x007A,
- specials={ "char", 0x007A, 0x0301 },
- uccode=0x0179,
- unicodeslot=0x017A,
+ shcode=0x7A,
+ specials={ "char", 0x7A, 0x301 },
+ uccode=0x179,
+ unicodeslot=0x17A,
},
{
adobename="Zdotaccent",
@@ -4276,11 +4286,11 @@ characters.data={
contextname="Zdotaccent",
description="LATIN CAPITAL LETTER Z WITH DOT ABOVE",
direction="l",
- lccode=0x017C,
+ lccode=0x17C,
linebreak="al",
- shcode=0x005A,
- specials={ "char", 0x005A, 0x0307 },
- unicodeslot=0x017B,
+ shcode=0x5A,
+ specials={ "char", 0x5A, 0x307 },
+ unicodeslot=0x17B,
},
{
adobename="zdotaccent",
@@ -4289,10 +4299,10 @@ characters.data={
description="LATIN SMALL LETTER Z WITH DOT ABOVE",
direction="l",
linebreak="al",
- shcode=0x007A,
- specials={ "char", 0x007A, 0x0307 },
- uccode=0x017B,
- unicodeslot=0x017C,
+ shcode=0x7A,
+ specials={ "char", 0x7A, 0x307 },
+ uccode=0x17B,
+ unicodeslot=0x17C,
},
{
adobename="Zcaron",
@@ -4300,11 +4310,11 @@ characters.data={
contextname="Zcaron",
description="LATIN CAPITAL LETTER Z WITH CARON",
direction="l",
- lccode=0x017E,
+ lccode=0x17E,
linebreak="al",
- shcode=0x005A,
- specials={ "char", 0x005A, 0x030C },
- unicodeslot=0x017D,
+ shcode=0x5A,
+ specials={ "char", 0x5A, 0x30C },
+ unicodeslot=0x17D,
},
{
adobename="zcaron",
@@ -4313,10 +4323,10 @@ characters.data={
description="LATIN SMALL LETTER Z WITH CARON",
direction="l",
linebreak="al",
- shcode=0x007A,
- specials={ "char", 0x007A, 0x030C },
- uccode=0x017D,
- unicodeslot=0x017E,
+ shcode=0x7A,
+ specials={ "char", 0x7A, 0x30C },
+ uccode=0x17D,
+ unicodeslot=0x17E,
},
{
adobename="slong",
@@ -4325,9 +4335,9 @@ characters.data={
description="LATIN SMALL LETTER LONG S",
direction="l",
linebreak="al",
- specials={ "compat", 0x0073 },
- uccode=0x0053,
- unicodeslot=0x017F,
+ specials={ "compat", 0x73 },
+ uccode=0x53,
+ unicodeslot=0x17F,
},
{
adobename="bstroke",
@@ -4336,9 +4346,9 @@ characters.data={
description="LATIN SMALL LETTER B WITH STROKE",
direction="l",
linebreak="al",
- shcode=0x0062,
- uccode=0x0243,
- unicodeslot=0x0180,
+ shcode=0x62,
+ uccode=0x243,
+ unicodeslot=0x180,
},
{
adobename="Bhook",
@@ -4346,20 +4356,20 @@ characters.data={
contextname="Bhook",
description="LATIN CAPITAL LETTER B WITH HOOK",
direction="l",
- lccode=0x0253,
+ lccode=0x253,
linebreak="al",
- shcode=0x0042,
- unicodeslot=0x0181,
+ shcode=0x42,
+ unicodeslot=0x181,
},
{
adobename="Btopbar",
category="lu",
description="LATIN CAPITAL LETTER B WITH TOPBAR",
direction="l",
- lccode=0x0183,
+ lccode=0x183,
linebreak="al",
- shcode=0x0042,
- unicodeslot=0x0182,
+ shcode=0x42,
+ unicodeslot=0x182,
},
{
adobename="btopbar",
@@ -4367,18 +4377,18 @@ characters.data={
description="LATIN SMALL LETTER B WITH TOPBAR",
direction="l",
linebreak="al",
- shcode=0x0062,
- uccode=0x0182,
- unicodeslot=0x0183,
+ shcode=0x62,
+ uccode=0x182,
+ unicodeslot=0x183,
},
{
adobename="Tonesix",
category="lu",
description="LATIN CAPITAL LETTER TONE SIX",
direction="l",
- lccode=0x0185,
+ lccode=0x185,
linebreak="al",
- unicodeslot=0x0184,
+ unicodeslot=0x184,
},
{
adobename="tonesix",
@@ -4386,17 +4396,17 @@ characters.data={
description="LATIN SMALL LETTER TONE SIX",
direction="l",
linebreak="al",
- uccode=0x0184,
- unicodeslot=0x0185,
+ uccode=0x184,
+ unicodeslot=0x185,
},
{
adobename="Oopen",
category="lu",
description="LATIN CAPITAL LETTER OPEN O",
direction="l",
- lccode=0x0254,
+ lccode=0x254,
linebreak="al",
- unicodeslot=0x0186,
+ unicodeslot=0x186,
},
{
adobename="Chook",
@@ -4404,10 +4414,10 @@ characters.data={
contextname="Chook",
description="LATIN CAPITAL LETTER C WITH HOOK",
direction="l",
- lccode=0x0188,
+ lccode=0x188,
linebreak="al",
- shcode=0x0043,
- unicodeslot=0x0187,
+ shcode=0x43,
+ unicodeslot=0x187,
},
{
adobename="chook",
@@ -4416,9 +4426,9 @@ characters.data={
description="LATIN SMALL LETTER C WITH HOOK",
direction="l",
linebreak="al",
- shcode=0x0063,
- uccode=0x0187,
- unicodeslot=0x0188,
+ shcode=0x63,
+ uccode=0x187,
+ unicodeslot=0x188,
},
{
adobename="Dafrican",
@@ -4426,9 +4436,9 @@ characters.data={
contextname="Dafrican",
description="LATIN CAPITAL LETTER AFRICAN D",
direction="l",
- lccode=0x0256,
+ lccode=0x256,
linebreak="al",
- unicodeslot=0x0189,
+ unicodeslot=0x189,
},
{
adobename="Dhook",
@@ -4436,20 +4446,20 @@ characters.data={
contextname="Dhook",
description="LATIN CAPITAL LETTER D WITH HOOK",
direction="l",
- lccode=0x0257,
+ lccode=0x257,
linebreak="al",
- shcode=0x0044,
- unicodeslot=0x018A,
+ shcode=0x44,
+ unicodeslot=0x18A,
},
{
adobename="Dtopbar",
category="lu",
description="LATIN CAPITAL LETTER D WITH TOPBAR",
direction="l",
- lccode=0x018C,
+ lccode=0x18C,
linebreak="al",
- shcode=0x0044,
- unicodeslot=0x018B,
+ shcode=0x44,
+ unicodeslot=0x18B,
},
{
adobename="dtopbar",
@@ -4457,9 +4467,9 @@ characters.data={
description="LATIN SMALL LETTER D WITH TOPBAR",
direction="l",
linebreak="al",
- shcode=0x0064,
- uccode=0x018B,
- unicodeslot=0x018C,
+ shcode=0x64,
+ uccode=0x18B,
+ unicodeslot=0x18C,
},
{
adobename="deltaturned",
@@ -4467,16 +4477,16 @@ characters.data={
description="LATIN SMALL LETTER TURNED DELTA",
direction="l",
linebreak="al",
- unicodeslot=0x018D,
+ unicodeslot=0x18D,
},
{
adobename="Ereversed",
category="lu",
description="LATIN CAPITAL LETTER REVERSED E",
direction="l",
- lccode=0x01DD,
+ lccode=0x1DD,
linebreak="al",
- unicodeslot=0x018E,
+ unicodeslot=0x18E,
},
{
adobename="Schwa",
@@ -4484,18 +4494,18 @@ characters.data={
contextname="Schwa",
description="LATIN CAPITAL LETTER SCHWA",
direction="l",
- lccode=0x0259,
+ lccode=0x259,
linebreak="al",
- unicodeslot=0x018F,
+ unicodeslot=0x18F,
},
{
adobename="Eopen",
category="lu",
description="LATIN CAPITAL LETTER OPEN E",
direction="l",
- lccode=0x025B,
+ lccode=0x25B,
linebreak="al",
- unicodeslot=0x0190,
+ unicodeslot=0x190,
},
{
adobename="Fhook",
@@ -4503,10 +4513,10 @@ characters.data={
contextname="Fhook",
description="LATIN CAPITAL LETTER F WITH HOOK",
direction="l",
- lccode=0x0192,
+ lccode=0x192,
linebreak="al",
- shcode=0x0046,
- unicodeslot=0x0191,
+ shcode=0x46,
+ unicodeslot=0x191,
},
{
adobename="florin",
@@ -4515,9 +4525,9 @@ characters.data={
description="LATIN SMALL LETTER F WITH HOOK",
direction="l",
linebreak="al",
- shcode=0x0066,
- uccode=0x0191,
- unicodeslot=0x0192,
+ shcode=0x66,
+ uccode=0x191,
+ unicodeslot=0x192,
},
{
adobename="Ghook",
@@ -4525,19 +4535,19 @@ characters.data={
contextname="Ghook",
description="LATIN CAPITAL LETTER G WITH HOOK",
direction="l",
- lccode=0x0260,
+ lccode=0x260,
linebreak="al",
- shcode=0x0047,
- unicodeslot=0x0193,
+ shcode=0x47,
+ unicodeslot=0x193,
},
{
adobename="Gammaafrican",
category="lu",
description="LATIN CAPITAL LETTER GAMMA",
direction="l",
- lccode=0x0263,
+ lccode=0x263,
linebreak="al",
- unicodeslot=0x0194,
+ unicodeslot=0x194,
},
{
adobename="hv",
@@ -4545,17 +4555,17 @@ characters.data={
description="LATIN SMALL LETTER HV",
direction="l",
linebreak="al",
- uccode=0x01F6,
- unicodeslot=0x0195,
+ uccode=0x1F6,
+ unicodeslot=0x195,
},
{
adobename="Iotaafrican",
category="lu",
description="LATIN CAPITAL LETTER IOTA",
direction="l",
- lccode=0x0269,
+ lccode=0x269,
linebreak="al",
- unicodeslot=0x0196,
+ unicodeslot=0x196,
},
{
adobename="Istroke",
@@ -4563,10 +4573,10 @@ characters.data={
contextname="Istroke",
description="LATIN CAPITAL LETTER I WITH STROKE",
direction="l",
- lccode=0x0268,
+ lccode=0x268,
linebreak="al",
- shcode=0x0049,
- unicodeslot=0x0197,
+ shcode=0x49,
+ unicodeslot=0x197,
},
{
adobename="Khook",
@@ -4574,10 +4584,10 @@ characters.data={
contextname="Khook",
description="LATIN CAPITAL LETTER K WITH HOOK",
direction="l",
- lccode=0x0199,
+ lccode=0x199,
linebreak="al",
- shcode=0x004B,
- unicodeslot=0x0198,
+ shcode=0x4B,
+ unicodeslot=0x198,
},
{
adobename="khook",
@@ -4586,9 +4596,9 @@ characters.data={
description="LATIN SMALL LETTER K WITH HOOK",
direction="l",
linebreak="al",
- shcode=0x006B,
- uccode=0x0198,
- unicodeslot=0x0199,
+ shcode=0x6B,
+ uccode=0x198,
+ unicodeslot=0x199,
},
{
adobename="lbar",
@@ -4597,9 +4607,9 @@ characters.data={
description="LATIN SMALL LETTER L WITH BAR",
direction="l",
linebreak="al",
- shcode=0x006C,
- uccode=0x023D,
- unicodeslot=0x019A,
+ shcode=0x6C,
+ uccode=0x23D,
+ unicodeslot=0x19A,
},
{
adobename="lambdastroke",
@@ -4607,26 +4617,28 @@ characters.data={
description="LATIN SMALL LETTER LAMBDA WITH STROKE",
direction="l",
linebreak="al",
- unicodeslot=0x019B,
+ mathclass="variable",
+ mathname="lambdabar",
+ unicodeslot=0x19B,
},
{
adobename="Mturned",
category="lu",
description="LATIN CAPITAL LETTER TURNED M",
direction="l",
- lccode=0x026F,
+ lccode=0x26F,
linebreak="al",
- unicodeslot=0x019C,
+ unicodeslot=0x19C,
},
{
adobename="Nhookleft",
category="lu",
description="LATIN CAPITAL LETTER N WITH LEFT HOOK",
direction="l",
- lccode=0x0272,
+ lccode=0x272,
linebreak="al",
- shcode=0x004E,
- unicodeslot=0x019D,
+ shcode=0x4E,
+ unicodeslot=0x19D,
},
{
adobename="nlegrightlong",
@@ -4634,19 +4646,19 @@ characters.data={
description="LATIN SMALL LETTER N WITH LONG RIGHT LEG",
direction="l",
linebreak="al",
- shcode=0x006E,
- uccode=0x0220,
- unicodeslot=0x019E,
+ shcode=0x6E,
+ uccode=0x220,
+ unicodeslot=0x19E,
},
{
adobename="Ocenteredtilde",
category="lu",
description="LATIN CAPITAL LETTER O WITH MIDDLE TILDE",
direction="l",
- lccode=0x0275,
+ lccode=0x275,
linebreak="al",
- shcode=0x004F,
- unicodeslot=0x019F,
+ shcode=0x4F,
+ unicodeslot=0x19F,
},
{
adobename="Ohorn",
@@ -4654,11 +4666,11 @@ characters.data={
contextname="Ohorn",
description="LATIN CAPITAL LETTER O WITH HORN",
direction="l",
- lccode=0x01A1,
+ lccode=0x1A1,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x004F, 0x031B },
- unicodeslot=0x01A0,
+ shcode=0x4F,
+ specials={ "char", 0x4F, 0x31B },
+ unicodeslot=0x1A0,
},
{
adobename="ohorn",
@@ -4667,19 +4679,19 @@ characters.data={
description="LATIN SMALL LETTER O WITH HORN",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x006F, 0x031B },
- uccode=0x01A0,
- unicodeslot=0x01A1,
+ shcode=0x6F,
+ specials={ "char", 0x6F, 0x31B },
+ uccode=0x1A0,
+ unicodeslot=0x1A1,
},
{
adobename="Oi",
category="lu",
description="LATIN CAPITAL LETTER OI",
direction="l",
- lccode=0x01A3,
+ lccode=0x1A3,
linebreak="al",
- unicodeslot=0x01A2,
+ unicodeslot=0x1A2,
},
{
adobename="oi",
@@ -4687,8 +4699,8 @@ characters.data={
description="LATIN SMALL LETTER OI",
direction="l",
linebreak="al",
- uccode=0x01A2,
- unicodeslot=0x01A3,
+ uccode=0x1A2,
+ unicodeslot=0x1A3,
},
{
adobename="Phook",
@@ -4696,10 +4708,10 @@ characters.data={
contextname="Phook",
description="LATIN CAPITAL LETTER P WITH HOOK",
direction="l",
- lccode=0x01A5,
+ lccode=0x1A5,
linebreak="al",
- shcode=0x0050,
- unicodeslot=0x01A4,
+ shcode=0x50,
+ unicodeslot=0x1A4,
},
{
adobename="phook",
@@ -4708,27 +4720,27 @@ characters.data={
description="LATIN SMALL LETTER P WITH HOOK",
direction="l",
linebreak="al",
- shcode=0x0070,
- uccode=0x01A4,
- unicodeslot=0x01A5,
+ shcode=0x70,
+ uccode=0x1A4,
+ unicodeslot=0x1A5,
},
{
adobename="yr",
category="lu",
description="LATIN LETTER YR",
direction="l",
- lccode=0x0280,
+ lccode=0x280,
linebreak="al",
- unicodeslot=0x01A6,
+ unicodeslot=0x1A6,
},
{
adobename="Tonetwo",
category="lu",
description="LATIN CAPITAL LETTER TONE TWO",
direction="l",
- lccode=0x01A8,
+ lccode=0x1A8,
linebreak="al",
- unicodeslot=0x01A7,
+ unicodeslot=0x1A7,
},
{
adobename="tonetwo",
@@ -4736,17 +4748,17 @@ characters.data={
description="LATIN SMALL LETTER TONE TWO",
direction="l",
linebreak="al",
- uccode=0x01A7,
- unicodeslot=0x01A8,
+ uccode=0x1A7,
+ unicodeslot=0x1A8,
},
{
adobename="Esh",
category="lu",
description="LATIN CAPITAL LETTER ESH",
direction="l",
- lccode=0x0283,
+ lccode=0x283,
linebreak="al",
- unicodeslot=0x01A9,
+ unicodeslot=0x1A9,
},
{
adobename="eshreversedloop",
@@ -4754,7 +4766,7 @@ characters.data={
description="LATIN LETTER REVERSED ESH LOOP",
direction="l",
linebreak="al",
- unicodeslot=0x01AA,
+ unicodeslot=0x1AA,
},
{
adobename="tpalatalhook",
@@ -4762,8 +4774,8 @@ characters.data={
description="LATIN SMALL LETTER T WITH PALATAL HOOK",
direction="l",
linebreak="al",
- shcode=0x0074,
- unicodeslot=0x01AB,
+ shcode=0x74,
+ unicodeslot=0x1AB,
},
{
adobename="Thook",
@@ -4771,10 +4783,10 @@ characters.data={
contextname="Thook",
description="LATIN CAPITAL LETTER T WITH HOOK",
direction="l",
- lccode=0x01AD,
+ lccode=0x1AD,
linebreak="al",
- shcode=0x0054,
- unicodeslot=0x01AC,
+ shcode=0x54,
+ unicodeslot=0x1AC,
},
{
adobename="thook",
@@ -4783,19 +4795,19 @@ characters.data={
description="LATIN SMALL LETTER T WITH HOOK",
direction="l",
linebreak="al",
- shcode=0x0074,
- uccode=0x01AC,
- unicodeslot=0x01AD,
+ shcode=0x74,
+ uccode=0x1AC,
+ unicodeslot=0x1AD,
},
{
adobename="Tretroflexhook",
category="lu",
description="LATIN CAPITAL LETTER T WITH RETROFLEX HOOK",
direction="l",
- lccode=0x0288,
+ lccode=0x288,
linebreak="al",
- shcode=0x0054,
- unicodeslot=0x01AE,
+ shcode=0x54,
+ unicodeslot=0x1AE,
},
{
adobename="Uhorn",
@@ -4803,11 +4815,11 @@ characters.data={
contextname="Uhorn",
description="LATIN CAPITAL LETTER U WITH HORN",
direction="l",
- lccode=0x01B0,
+ lccode=0x1B0,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x0055, 0x031B },
- unicodeslot=0x01AF,
+ shcode=0x55,
+ specials={ "char", 0x55, 0x31B },
+ unicodeslot=0x1AF,
},
{
adobename="uhorn",
@@ -4816,19 +4828,19 @@ characters.data={
description="LATIN SMALL LETTER U WITH HORN",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x0075, 0x031B },
- uccode=0x01AF,
- unicodeslot=0x01B0,
+ shcode=0x75,
+ specials={ "char", 0x75, 0x31B },
+ uccode=0x1AF,
+ unicodeslot=0x1B0,
},
{
adobename="Upsilonafrican",
category="lu",
description="LATIN CAPITAL LETTER UPSILON",
direction="l",
- lccode=0x028A,
+ lccode=0x28A,
linebreak="al",
- unicodeslot=0x01B1,
+ unicodeslot=0x1B1,
},
{
adobename="Vhook",
@@ -4836,10 +4848,10 @@ characters.data={
contextname="Uhook",
description="LATIN CAPITAL LETTER V WITH HOOK",
direction="l",
- lccode=0x028B,
+ lccode=0x28B,
linebreak="al",
- shcode=0x0056,
- unicodeslot=0x01B2,
+ shcode=0x56,
+ unicodeslot=0x1B2,
},
{
adobename="Yhook",
@@ -4847,10 +4859,10 @@ characters.data={
contextname="Yhook",
description="LATIN CAPITAL LETTER Y WITH HOOK",
direction="l",
- lccode=0x01B4,
+ lccode=0x1B4,
linebreak="al",
- shcode=0x0059,
- unicodeslot=0x01B3,
+ shcode=0x59,
+ unicodeslot=0x1B3,
},
{
adobename="yhook",
@@ -4859,9 +4871,9 @@ characters.data={
description="LATIN SMALL LETTER Y WITH HOOK",
direction="l",
linebreak="al",
- shcode=0x0079,
- uccode=0x01B3,
- unicodeslot=0x01B4,
+ shcode=0x79,
+ uccode=0x1B3,
+ unicodeslot=0x1B4,
},
{
adobename="Zstroke",
@@ -4869,10 +4881,10 @@ characters.data={
contextname="Zstroke",
description="LATIN CAPITAL LETTER Z WITH STROKE",
direction="l",
- lccode=0x01B6,
+ lccode=0x1B6,
linebreak="al",
- shcode=0x005A,
- unicodeslot=0x01B5,
+ shcode=0x5A,
+ unicodeslot=0x1B5,
},
{
adobename="zstroke",
@@ -4881,27 +4893,27 @@ characters.data={
description="LATIN SMALL LETTER Z WITH STROKE",
direction="l",
linebreak="al",
- shcode=0x007A,
- uccode=0x01B5,
- unicodeslot=0x01B6,
+ shcode=0x7A,
+ uccode=0x1B5,
+ unicodeslot=0x1B6,
},
{
adobename="Ezh",
category="lu",
description="LATIN CAPITAL LETTER EZH",
direction="l",
- lccode=0x0292,
+ lccode=0x292,
linebreak="al",
- unicodeslot=0x01B7,
+ unicodeslot=0x1B7,
},
{
adobename="Ezhreversed",
category="lu",
description="LATIN CAPITAL LETTER EZH REVERSED",
direction="l",
- lccode=0x01B9,
+ lccode=0x1B9,
linebreak="al",
- unicodeslot=0x01B8,
+ unicodeslot=0x1B8,
},
{
adobename="ezhreversed",
@@ -4909,8 +4921,8 @@ characters.data={
description="LATIN SMALL LETTER EZH REVERSED",
direction="l",
linebreak="al",
- uccode=0x01B8,
- unicodeslot=0x01B9,
+ uccode=0x1B8,
+ unicodeslot=0x1B9,
},
{
adobename="ezhtail",
@@ -4918,8 +4930,8 @@ characters.data={
description="LATIN SMALL LETTER EZH WITH TAIL",
direction="l",
linebreak="al",
- shcode=0x0292,
- unicodeslot=0x01BA,
+ shcode=0x292,
+ unicodeslot=0x1BA,
},
{
adobename="twostroke",
@@ -4927,16 +4939,16 @@ characters.data={
description="LATIN LETTER TWO WITH STROKE",
direction="l",
linebreak="al",
- unicodeslot=0x01BB,
+ unicodeslot=0x1BB,
},
{
adobename="Tonefive",
category="lu",
description="LATIN CAPITAL LETTER TONE FIVE",
direction="l",
- lccode=0x01BD,
+ lccode=0x1BD,
linebreak="al",
- unicodeslot=0x01BC,
+ unicodeslot=0x1BC,
},
{
adobename="tonefive",
@@ -4944,8 +4956,8 @@ characters.data={
description="LATIN SMALL LETTER TONE FIVE",
direction="l",
linebreak="al",
- uccode=0x01BC,
- unicodeslot=0x01BD,
+ uccode=0x1BC,
+ unicodeslot=0x1BD,
},
{
adobename="glottalinvertedstroke",
@@ -4953,7 +4965,7 @@ characters.data={
description="LATIN LETTER INVERTED GLOTTAL STOP WITH STROKE",
direction="l",
linebreak="al",
- unicodeslot=0x01BE,
+ unicodeslot=0x1BE,
},
{
adobename="wynn",
@@ -4961,8 +4973,8 @@ characters.data={
description="LATIN LETTER WYNN",
direction="l",
linebreak="al",
- uccode=0x01F7,
- unicodeslot=0x01BF,
+ uccode=0x1F7,
+ unicodeslot=0x1BF,
},
{
adobename="clickdental",
@@ -4970,7 +4982,7 @@ characters.data={
description="LATIN LETTER DENTAL CLICK",
direction="l",
linebreak="al",
- unicodeslot=0x01C0,
+ unicodeslot=0x1C0,
},
{
adobename="clicklateral",
@@ -4978,7 +4990,7 @@ characters.data={
description="LATIN LETTER LATERAL CLICK",
direction="l",
linebreak="al",
- unicodeslot=0x01C1,
+ unicodeslot=0x1C1,
},
{
adobename="clickalveolar",
@@ -4986,7 +4998,7 @@ characters.data={
description="LATIN LETTER ALVEOLAR CLICK",
direction="l",
linebreak="al",
- unicodeslot=0x01C2,
+ unicodeslot=0x1C2,
},
{
adobename="clickretroflex",
@@ -4994,7 +5006,7 @@ characters.data={
description="LATIN LETTER RETROFLEX CLICK",
direction="l",
linebreak="al",
- unicodeslot=0x01C3,
+ unicodeslot=0x1C3,
},
{
adobename="DZcaron",
@@ -5002,12 +5014,12 @@ characters.data={
contextname="DZcaronligature",
description="LATIN CAPITAL LETTER DZ WITH CARON",
direction="l",
- lccode=0x01C6,
+ lccode=0x1C6,
linebreak="al",
- shcode=0x01F1,
- specials={ "compat", 0x0044, 0x017D },
- uccode=0x01C5,
- unicodeslot=0x01C4,
+ shcode=0x1F1,
+ specials={ "compat", 0x44, 0x17D },
+ uccode=0x1C5,
+ unicodeslot=0x1C4,
},
{
adobename="Dzcaron",
@@ -5015,12 +5027,12 @@ characters.data={
contextname="Dzcaronligature",
description="LATIN CAPITAL LETTER D WITH SMALL LETTER Z WITH CARON",
direction="l",
- lccode=0x01C6,
+ lccode=0x1C6,
linebreak="al",
- shcode=0x01F2,
- specials={ "compat", 0x0044, 0x017E },
- uccode=0x01C4,
- unicodeslot=0x01C5,
+ shcode=0x1F2,
+ specials={ "compat", 0x44, 0x17E },
+ uccode=0x1C4,
+ unicodeslot=0x1C5,
},
{
adobename="dzcaron",
@@ -5029,10 +5041,10 @@ characters.data={
description="LATIN SMALL LETTER DZ WITH CARON",
direction="l",
linebreak="al",
- shcode=0x01F3,
- specials={ "compat", 0x0064, 0x017E },
- uccode=0x01C4,
- unicodeslot=0x01C6,
+ shcode=0x1F3,
+ specials={ "compat", 0x64, 0x17E },
+ uccode=0x1C4,
+ unicodeslot=0x1C6,
},
{
adobename="LJ",
@@ -5040,12 +5052,12 @@ characters.data={
contextname="LJligature",
description="LATIN CAPITAL LETTER LJ",
direction="l",
- lccode=0x01C9,
+ lccode=0x1C9,
linebreak="al",
- shcode={ 0x004C, 0x004A },
- specials={ "compat", 0x004C, 0x004A },
- uccode=0x01C8,
- unicodeslot=0x01C7,
+ shcode={ 0x4C, 0x4A },
+ specials={ "compat", 0x4C, 0x4A },
+ uccode=0x1C8,
+ unicodeslot=0x1C7,
},
{
adobename="Lj",
@@ -5053,12 +5065,12 @@ characters.data={
contextname="Ljligature",
description="LATIN CAPITAL LETTER L WITH SMALL LETTER J",
direction="l",
- lccode=0x01C9,
+ lccode=0x1C9,
linebreak="al",
- shcode={ 0x004C, 0x006A },
- specials={ "compat", 0x004C, 0x006A },
- uccode=0x01C7,
- unicodeslot=0x01C8,
+ shcode={ 0x4C, 0x6A },
+ specials={ "compat", 0x4C, 0x6A },
+ uccode=0x1C7,
+ unicodeslot=0x1C8,
},
{
adobename="lj",
@@ -5067,10 +5079,10 @@ characters.data={
description="LATIN SMALL LETTER LJ",
direction="l",
linebreak="al",
- shcode={ 0x006C, 0x006A },
- specials={ "compat", 0x006C, 0x006A },
- uccode=0x01C7,
- unicodeslot=0x01C9,
+ shcode={ 0x6C, 0x6A },
+ specials={ "compat", 0x6C, 0x6A },
+ uccode=0x1C7,
+ unicodeslot=0x1C9,
},
{
adobename="NJ",
@@ -5078,12 +5090,12 @@ characters.data={
contextname="NJligature",
description="LATIN CAPITAL LETTER NJ",
direction="l",
- lccode=0x01CC,
+ lccode=0x1CC,
linebreak="al",
- shcode={ 0x004E, 0x004A },
- specials={ "compat", 0x004E, 0x004A },
- uccode=0x01CB,
- unicodeslot=0x01CA,
+ shcode={ 0x4E, 0x4A },
+ specials={ "compat", 0x4E, 0x4A },
+ uccode=0x1CB,
+ unicodeslot=0x1CA,
},
{
adobename="Nj",
@@ -5091,12 +5103,12 @@ characters.data={
contextname="Njligature",
description="LATIN CAPITAL LETTER N WITH SMALL LETTER J",
direction="l",
- lccode=0x01CC,
+ lccode=0x1CC,
linebreak="al",
- shcode=0x004E,
- specials={ "compat", 0x004E, 0x006A },
- uccode=0x01CA,
- unicodeslot=0x01CB,
+ shcode=0x4E,
+ specials={ "compat", 0x4E, 0x6A },
+ uccode=0x1CA,
+ unicodeslot=0x1CB,
},
{
adobename="nj",
@@ -5105,10 +5117,10 @@ characters.data={
description="LATIN SMALL LETTER NJ",
direction="l",
linebreak="al",
- shcode={ 0x006E, 0x006A },
- specials={ "compat", 0x006E, 0x006A },
- uccode=0x01CA,
- unicodeslot=0x01CC,
+ shcode={ 0x6E, 0x6A },
+ specials={ "compat", 0x6E, 0x6A },
+ uccode=0x1CA,
+ unicodeslot=0x1CC,
},
{
adobename="Acaron",
@@ -5116,11 +5128,11 @@ characters.data={
contextname="Acaron",
description="LATIN CAPITAL LETTER A WITH CARON",
direction="l",
- lccode=0x01CE,
+ lccode=0x1CE,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x0041, 0x030C },
- unicodeslot=0x01CD,
+ shcode=0x41,
+ specials={ "char", 0x41, 0x30C },
+ unicodeslot=0x1CD,
},
{
adobename="acaron",
@@ -5130,10 +5142,10 @@ characters.data={
description="LATIN SMALL LETTER A WITH CARON",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x0061, 0x030C },
- uccode=0x01CD,
- unicodeslot=0x01CE,
+ shcode=0x61,
+ specials={ "char", 0x61, 0x30C },
+ uccode=0x1CD,
+ unicodeslot=0x1CE,
},
{
adobename="Icaron",
@@ -5141,11 +5153,11 @@ characters.data={
contextname="Icaron",
description="LATIN CAPITAL LETTER I WITH CARON",
direction="l",
- lccode=0x01D0,
+ lccode=0x1D0,
linebreak="al",
- shcode=0x0049,
- specials={ "char", 0x0049, 0x030C },
- unicodeslot=0x01CF,
+ shcode=0x49,
+ specials={ "char", 0x49, 0x30C },
+ unicodeslot=0x1CF,
},
{
adobename="icaron",
@@ -5155,10 +5167,10 @@ characters.data={
description="LATIN SMALL LETTER I WITH CARON",
direction="l",
linebreak="al",
- shcode=0x0069,
- specials={ "char", 0x0069, 0x030C },
- uccode=0x01CF,
- unicodeslot=0x01D0,
+ shcode=0x69,
+ specials={ "char", 0x69, 0x30C },
+ uccode=0x1CF,
+ unicodeslot=0x1D0,
},
{
adobename="Ocaron",
@@ -5166,11 +5178,11 @@ characters.data={
contextname="Ocaron",
description="LATIN CAPITAL LETTER O WITH CARON",
direction="l",
- lccode=0x01D2,
+ lccode=0x1D2,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x004F, 0x030C },
- unicodeslot=0x01D1,
+ shcode=0x4F,
+ specials={ "char", 0x4F, 0x30C },
+ unicodeslot=0x1D1,
},
{
adobename="ocaron",
@@ -5180,10 +5192,10 @@ characters.data={
description="LATIN SMALL LETTER O WITH CARON",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x006F, 0x030C },
- uccode=0x01D1,
- unicodeslot=0x01D2,
+ shcode=0x6F,
+ specials={ "char", 0x6F, 0x30C },
+ uccode=0x1D1,
+ unicodeslot=0x1D2,
},
{
adobename="Ucaron",
@@ -5191,11 +5203,11 @@ characters.data={
contextname="Ucaron",
description="LATIN CAPITAL LETTER U WITH CARON",
direction="l",
- lccode=0x01D4,
+ lccode=0x1D4,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x0055, 0x030C },
- unicodeslot=0x01D3,
+ shcode=0x55,
+ specials={ "char", 0x55, 0x30C },
+ unicodeslot=0x1D3,
},
{
adobename="ucaron",
@@ -5205,10 +5217,10 @@ characters.data={
description="LATIN SMALL LETTER U WITH CARON",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x0075, 0x030C },
- uccode=0x01D3,
- unicodeslot=0x01D4,
+ shcode=0x75,
+ specials={ "char", 0x75, 0x30C },
+ uccode=0x1D3,
+ unicodeslot=0x1D4,
},
{
adobename="Udieresismacron",
@@ -5216,11 +5228,11 @@ characters.data={
contextname="Udiaeresismacron",
description="LATIN CAPITAL LETTER U WITH DIAERESIS AND MACRON",
direction="l",
- lccode=0x01D6,
+ lccode=0x1D6,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x00DC, 0x0304 },
- unicodeslot=0x01D5,
+ shcode=0x55,
+ specials={ "char", 0xDC, 0x304 },
+ unicodeslot=0x1D5,
},
{
adobename="udieresismacron",
@@ -5230,10 +5242,10 @@ characters.data={
description="LATIN SMALL LETTER U WITH DIAERESIS AND MACRON",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x00FC, 0x0304 },
- uccode=0x01D5,
- unicodeslot=0x01D6,
+ shcode=0x75,
+ specials={ "char", 0xFC, 0x304 },
+ uccode=0x1D5,
+ unicodeslot=0x1D6,
},
{
adobename="Udieresisacute",
@@ -5241,11 +5253,11 @@ characters.data={
contextname="Udiaeresisacute",
description="LATIN CAPITAL LETTER U WITH DIAERESIS AND ACUTE",
direction="l",
- lccode=0x01D8,
+ lccode=0x1D8,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x00DC, 0x0301 },
- unicodeslot=0x01D7,
+ shcode=0x55,
+ specials={ "char", 0xDC, 0x301 },
+ unicodeslot=0x1D7,
},
{
adobename="udieresisacute",
@@ -5255,10 +5267,10 @@ characters.data={
description="LATIN SMALL LETTER U WITH DIAERESIS AND ACUTE",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x00FC, 0x0301 },
- uccode=0x01D7,
- unicodeslot=0x01D8,
+ shcode=0x75,
+ specials={ "char", 0xFC, 0x301 },
+ uccode=0x1D7,
+ unicodeslot=0x1D8,
},
{
adobename="Udieresiscaron",
@@ -5266,11 +5278,11 @@ characters.data={
contextname="Udiaeresiscaron",
description="LATIN CAPITAL LETTER U WITH DIAERESIS AND CARON",
direction="l",
- lccode=0x01DA,
+ lccode=0x1DA,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x00DC, 0x030C },
- unicodeslot=0x01D9,
+ shcode=0x55,
+ specials={ "char", 0xDC, 0x30C },
+ unicodeslot=0x1D9,
},
{
adobename="udieresiscaron",
@@ -5280,10 +5292,10 @@ characters.data={
description="LATIN SMALL LETTER U WITH DIAERESIS AND CARON",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x00FC, 0x030C },
- uccode=0x01D9,
- unicodeslot=0x01DA,
+ shcode=0x75,
+ specials={ "char", 0xFC, 0x30C },
+ uccode=0x1D9,
+ unicodeslot=0x1DA,
},
{
adobename="Udieresisgrave",
@@ -5291,11 +5303,11 @@ characters.data={
contextname="Udiaeresisgrave",
description="LATIN CAPITAL LETTER U WITH DIAERESIS AND GRAVE",
direction="l",
- lccode=0x01DC,
+ lccode=0x1DC,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x00DC, 0x0300 },
- unicodeslot=0x01DB,
+ shcode=0x55,
+ specials={ "char", 0xDC, 0x300 },
+ unicodeslot=0x1DB,
},
{
adobename="udieresisgrave",
@@ -5305,10 +5317,10 @@ characters.data={
description="LATIN SMALL LETTER U WITH DIAERESIS AND GRAVE",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x00FC, 0x0300 },
- uccode=0x01DB,
- unicodeslot=0x01DC,
+ shcode=0x75,
+ specials={ "char", 0xFC, 0x300 },
+ uccode=0x1DB,
+ unicodeslot=0x1DC,
},
{
adobename="eturned",
@@ -5316,8 +5328,8 @@ characters.data={
description="LATIN SMALL LETTER TURNED E",
direction="l",
linebreak="al",
- uccode=0x018E,
- unicodeslot=0x01DD,
+ uccode=0x18E,
+ unicodeslot=0x1DD,
},
{
adobename="Adieresismacron",
@@ -5325,11 +5337,11 @@ characters.data={
contextname="Adiaeresismacron",
description="LATIN CAPITAL LETTER A WITH DIAERESIS AND MACRON",
direction="l",
- lccode=0x01DF,
+ lccode=0x1DF,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x00C4, 0x0304 },
- unicodeslot=0x01DE,
+ shcode=0x41,
+ specials={ "char", 0xC4, 0x304 },
+ unicodeslot=0x1DE,
},
{
adobename="adieresismacron",
@@ -5338,10 +5350,10 @@ characters.data={
description="LATIN SMALL LETTER A WITH DIAERESIS AND MACRON",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x00E4, 0x0304 },
- uccode=0x01DE,
- unicodeslot=0x01DF,
+ shcode=0x61,
+ specials={ "char", 0xE4, 0x304 },
+ uccode=0x1DE,
+ unicodeslot=0x1DF,
},
{
adobename="Adotmacron",
@@ -5349,11 +5361,11 @@ characters.data={
contextname="Adotaccentmacron",
description="LATIN CAPITAL LETTER A WITH DOT ABOVE AND MACRON",
direction="l",
- lccode=0x01E1,
+ lccode=0x1E1,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x0226, 0x0304 },
- unicodeslot=0x01E0,
+ shcode=0x41,
+ specials={ "char", 0x226, 0x304 },
+ unicodeslot=0x1E0,
},
{
adobename="adotmacron",
@@ -5362,10 +5374,10 @@ characters.data={
description="LATIN SMALL LETTER A WITH DOT ABOVE AND MACRON",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x0227, 0x0304 },
- uccode=0x01E0,
- unicodeslot=0x01E1,
+ shcode=0x61,
+ specials={ "char", 0x227, 0x304 },
+ uccode=0x1E0,
+ unicodeslot=0x1E1,
},
{
adobename="AEmacron",
@@ -5373,11 +5385,11 @@ characters.data={
contextname="AEmacron",
description="LATIN CAPITAL LETTER AE WITH MACRON",
direction="l",
- lccode=0x01E3,
+ lccode=0x1E3,
linebreak="al",
- shcode=0x00C6,
- specials={ "char", 0x00C6, 0x0304 },
- unicodeslot=0x01E2,
+ shcode=0xC6,
+ specials={ "char", 0xC6, 0x304 },
+ unicodeslot=0x1E2,
},
{
adobename="aemacron",
@@ -5386,10 +5398,10 @@ characters.data={
description="LATIN SMALL LETTER AE WITH MACRON",
direction="l",
linebreak="al",
- shcode=0x00E6,
- specials={ "char", 0x00E6, 0x0304 },
- uccode=0x01E2,
- unicodeslot=0x01E3,
+ shcode=0xE6,
+ specials={ "char", 0xE6, 0x304 },
+ uccode=0x1E2,
+ unicodeslot=0x1E3,
},
{
adobename="Gstroke",
@@ -5397,10 +5409,10 @@ characters.data={
contextname="Gstroke",
description="LATIN CAPITAL LETTER G WITH STROKE",
direction="l",
- lccode=0x01E5,
+ lccode=0x1E5,
linebreak="al",
- shcode=0x0047,
- unicodeslot=0x01E4,
+ shcode=0x47,
+ unicodeslot=0x1E4,
},
{
adobename="gstroke",
@@ -5409,9 +5421,9 @@ characters.data={
description="LATIN SMALL LETTER G WITH STROKE",
direction="l",
linebreak="al",
- shcode=0x0067,
- uccode=0x01E4,
- unicodeslot=0x01E5,
+ shcode=0x67,
+ uccode=0x1E4,
+ unicodeslot=0x1E5,
},
{
adobename="Gcaron",
@@ -5419,11 +5431,11 @@ characters.data={
contextname="Gcaron",
description="LATIN CAPITAL LETTER G WITH CARON",
direction="l",
- lccode=0x01E7,
+ lccode=0x1E7,
linebreak="al",
- shcode=0x0047,
- specials={ "char", 0x0047, 0x030C },
- unicodeslot=0x01E6,
+ shcode=0x47,
+ specials={ "char", 0x47, 0x30C },
+ unicodeslot=0x1E6,
},
{
adobename="gcaron",
@@ -5432,10 +5444,10 @@ characters.data={
description="LATIN SMALL LETTER G WITH CARON",
direction="l",
linebreak="al",
- shcode=0x0067,
- specials={ "char", 0x0067, 0x030C },
- uccode=0x01E6,
- unicodeslot=0x01E7,
+ shcode=0x67,
+ specials={ "char", 0x67, 0x30C },
+ uccode=0x1E6,
+ unicodeslot=0x1E7,
},
{
adobename="Kcaron",
@@ -5443,11 +5455,11 @@ characters.data={
contextname="Kcaron",
description="LATIN CAPITAL LETTER K WITH CARON",
direction="l",
- lccode=0x01E9,
+ lccode=0x1E9,
linebreak="al",
- shcode=0x004B,
- specials={ "char", 0x004B, 0x030C },
- unicodeslot=0x01E8,
+ shcode=0x4B,
+ specials={ "char", 0x4B, 0x30C },
+ unicodeslot=0x1E8,
},
{
adobename="kcaron",
@@ -5456,10 +5468,10 @@ characters.data={
description="LATIN SMALL LETTER K WITH CARON",
direction="l",
linebreak="al",
- shcode=0x006B,
- specials={ "char", 0x006B, 0x030C },
- uccode=0x01E8,
- unicodeslot=0x01E9,
+ shcode=0x6B,
+ specials={ "char", 0x6B, 0x30C },
+ uccode=0x1E8,
+ unicodeslot=0x1E9,
},
{
adobename="Oogonek",
@@ -5467,11 +5479,11 @@ characters.data={
contextname="Oogonek",
description="LATIN CAPITAL LETTER O WITH OGONEK",
direction="l",
- lccode=0x01EB,
+ lccode=0x1EB,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x004F, 0x0328 },
- unicodeslot=0x01EA,
+ shcode=0x4F,
+ specials={ "char", 0x4F, 0x328 },
+ unicodeslot=0x1EA,
},
{
adobename="oogonek",
@@ -5480,10 +5492,10 @@ characters.data={
description="LATIN SMALL LETTER O WITH OGONEK",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x006F, 0x0328 },
- uccode=0x01EA,
- unicodeslot=0x01EB,
+ shcode=0x6F,
+ specials={ "char", 0x6F, 0x328 },
+ uccode=0x1EA,
+ unicodeslot=0x1EB,
},
{
adobename="Oogonekmacron",
@@ -5491,11 +5503,11 @@ characters.data={
contextname="Oogonekmacron",
description="LATIN CAPITAL LETTER O WITH OGONEK AND MACRON",
direction="l",
- lccode=0x01ED,
+ lccode=0x1ED,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x01EA, 0x0304 },
- unicodeslot=0x01EC,
+ shcode=0x4F,
+ specials={ "char", 0x1EA, 0x304 },
+ unicodeslot=0x1EC,
},
{
adobename="oogonekmacron",
@@ -5504,21 +5516,21 @@ characters.data={
description="LATIN SMALL LETTER O WITH OGONEK AND MACRON",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x01EB, 0x0304 },
- uccode=0x01EC,
- unicodeslot=0x01ED,
+ shcode=0x6F,
+ specials={ "char", 0x1EB, 0x304 },
+ uccode=0x1EC,
+ unicodeslot=0x1ED,
},
{
adobename="Ezhcaron",
category="lu",
description="LATIN CAPITAL LETTER EZH WITH CARON",
direction="l",
- lccode=0x01EF,
+ lccode=0x1EF,
linebreak="al",
- shcode=0x01B7,
- specials={ "char", 0x01B7, 0x030C },
- unicodeslot=0x01EE,
+ shcode=0x1B7,
+ specials={ "char", 0x1B7, 0x30C },
+ unicodeslot=0x1EE,
},
{
adobename="ezhcaron",
@@ -5526,10 +5538,10 @@ characters.data={
description="LATIN SMALL LETTER EZH WITH CARON",
direction="l",
linebreak="al",
- shcode=0x0292,
- specials={ "char", 0x0292, 0x030C },
- uccode=0x01EE,
- unicodeslot=0x01EF,
+ shcode=0x292,
+ specials={ "char", 0x292, 0x30C },
+ uccode=0x1EE,
+ unicodeslot=0x1EF,
},
{
adobename="jcaron",
@@ -5538,9 +5550,9 @@ characters.data={
description="LATIN SMALL LETTER J WITH CARON",
direction="l",
linebreak="al",
- shcode=0x006A,
- specials={ "char", 0x006A, 0x030C },
- unicodeslot=0x01F0,
+ shcode=0x6A,
+ specials={ "char", 0x6A, 0x30C },
+ unicodeslot=0x1F0,
},
{
adobename="DZ",
@@ -5548,11 +5560,11 @@ characters.data={
contextname="DZligature",
description="LATIN CAPITAL LETTER DZ",
direction="l",
- lccode=0x01F3,
+ lccode=0x1F3,
linebreak="al",
- specials={ "compat", 0x0044, 0x005A },
- uccode=0x01F2,
- unicodeslot=0x01F1,
+ specials={ "compat", 0x44, 0x5A },
+ uccode=0x1F2,
+ unicodeslot=0x1F1,
},
{
adobename="Dz",
@@ -5560,12 +5572,12 @@ characters.data={
contextname="Dzligature",
description="LATIN CAPITAL LETTER D WITH SMALL LETTER Z",
direction="l",
- lccode=0x01F3,
+ lccode=0x1F3,
linebreak="al",
- shcode=0x0044,
- specials={ "compat", 0x0044, 0x007A },
- uccode=0x01F1,
- unicodeslot=0x01F2,
+ shcode=0x44,
+ specials={ "compat", 0x44, 0x7A },
+ uccode=0x1F1,
+ unicodeslot=0x1F2,
},
{
adobename="dz",
@@ -5574,9 +5586,9 @@ characters.data={
description="LATIN SMALL LETTER DZ",
direction="l",
linebreak="al",
- specials={ "compat", 0x0064, 0x007A },
- uccode=0x01F1,
- unicodeslot=0x01F3,
+ specials={ "compat", 0x64, 0x7A },
+ uccode=0x1F1,
+ unicodeslot=0x1F3,
},
{
adobename="Gacute",
@@ -5584,11 +5596,11 @@ characters.data={
contextname="Gacute",
description="LATIN CAPITAL LETTER G WITH ACUTE",
direction="l",
- lccode=0x01F5,
+ lccode=0x1F5,
linebreak="al",
- shcode=0x0047,
- specials={ "char", 0x0047, 0x0301 },
- unicodeslot=0x01F4,
+ shcode=0x47,
+ specials={ "char", 0x47, 0x301 },
+ unicodeslot=0x1F4,
},
{
adobename="gacute",
@@ -5597,37 +5609,37 @@ characters.data={
description="LATIN SMALL LETTER G WITH ACUTE",
direction="l",
linebreak="al",
- shcode=0x0067,
- specials={ "char", 0x0067, 0x0301 },
- uccode=0x01F4,
- unicodeslot=0x01F5,
+ shcode=0x67,
+ specials={ "char", 0x67, 0x301 },
+ uccode=0x1F4,
+ unicodeslot=0x1F5,
},
{
category="lu",
description="LATIN CAPITAL LETTER HWAIR",
direction="l",
- lccode=0x0195,
+ lccode=0x195,
linebreak="al",
- unicodeslot=0x01F6,
+ unicodeslot=0x1F6,
},
{
category="lu",
description="LATIN CAPITAL LETTER WYNN",
direction="l",
- lccode=0x01BF,
+ lccode=0x1BF,
linebreak="al",
- unicodeslot=0x01F7,
+ unicodeslot=0x1F7,
},
{
category="lu",
contextname="Ngrave",
description="LATIN CAPITAL LETTER N WITH GRAVE",
direction="l",
- lccode=0x01F9,
+ lccode=0x1F9,
linebreak="al",
- shcode=0x004E,
- specials={ "char", 0x004E, 0x0300 },
- unicodeslot=0x01F8,
+ shcode=0x4E,
+ specials={ "char", 0x4E, 0x300 },
+ unicodeslot=0x1F8,
},
{
category="ll",
@@ -5635,10 +5647,10 @@ characters.data={
description="LATIN SMALL LETTER N WITH GRAVE",
direction="l",
linebreak="al",
- shcode=0x006E,
- specials={ "char", 0x006E, 0x0300 },
- uccode=0x01F8,
- unicodeslot=0x01F9,
+ shcode=0x6E,
+ specials={ "char", 0x6E, 0x300 },
+ uccode=0x1F8,
+ unicodeslot=0x1F9,
},
{
adobename="Aringacute",
@@ -5646,11 +5658,11 @@ characters.data={
contextname="Aringacute",
description="LATIN CAPITAL LETTER A WITH RING ABOVE AND ACUTE",
direction="l",
- lccode=0x01FB,
+ lccode=0x1FB,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x00C5, 0x0301 },
- unicodeslot=0x01FA,
+ shcode=0x41,
+ specials={ "char", 0xC5, 0x301 },
+ unicodeslot=0x1FA,
},
{
adobename="aringacute",
@@ -5659,10 +5671,10 @@ characters.data={
description="LATIN SMALL LETTER A WITH RING ABOVE AND ACUTE",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x00E5, 0x0301 },
- uccode=0x01FA,
- unicodeslot=0x01FB,
+ shcode=0x61,
+ specials={ "char", 0xE5, 0x301 },
+ uccode=0x1FA,
+ unicodeslot=0x1FB,
},
{
adobename="AEacute",
@@ -5670,11 +5682,11 @@ characters.data={
contextname="AEacute",
description="LATIN CAPITAL LETTER AE WITH ACUTE",
direction="l",
- lccode=0x01FD,
+ lccode=0x1FD,
linebreak="al",
- shcode=0x00C6,
- specials={ "char", 0x00C6, 0x0301 },
- unicodeslot=0x01FC,
+ shcode=0xC6,
+ specials={ "char", 0xC6, 0x301 },
+ unicodeslot=0x1FC,
},
{
adobename="aeacute",
@@ -5683,10 +5695,10 @@ characters.data={
description="LATIN SMALL LETTER AE WITH ACUTE",
direction="l",
linebreak="al",
- shcode=0x00E6,
- specials={ "char", 0x00E6, 0x0301 },
- uccode=0x01FC,
- unicodeslot=0x01FD,
+ shcode=0xE6,
+ specials={ "char", 0xE6, 0x301 },
+ uccode=0x1FC,
+ unicodeslot=0x1FD,
},
{
adobename="Ostrokeacute",
@@ -5694,11 +5706,11 @@ characters.data={
contextname="Ostrokeacute",
description="LATIN CAPITAL LETTER O WITH STROKE AND ACUTE",
direction="l",
- lccode=0x01FF,
+ lccode=0x1FF,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x00D8, 0x0301 },
- unicodeslot=0x01FE,
+ shcode=0x4F,
+ specials={ "char", 0xD8, 0x301 },
+ unicodeslot=0x1FE,
},
{
adobename="ostrokeacute",
@@ -5707,10 +5719,10 @@ characters.data={
description="LATIN SMALL LETTER O WITH STROKE AND ACUTE",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x00F8, 0x0301 },
- uccode=0x01FE,
- unicodeslot=0x01FF,
+ shcode=0x6F,
+ specials={ "char", 0xF8, 0x301 },
+ uccode=0x1FE,
+ unicodeslot=0x1FF,
},
{
adobename="Adblgrave",
@@ -5718,11 +5730,11 @@ characters.data={
contextname="Adoublegrave",
description="LATIN CAPITAL LETTER A WITH DOUBLE GRAVE",
direction="l",
- lccode=0x0201,
+ lccode=0x201,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x0041, 0x030F },
- unicodeslot=0x0200,
+ shcode=0x41,
+ specials={ "char", 0x41, 0x30F },
+ unicodeslot=0x200,
},
{
adobename="adblgrave",
@@ -5731,10 +5743,10 @@ characters.data={
description="LATIN SMALL LETTER A WITH DOUBLE GRAVE",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x0061, 0x030F },
- uccode=0x0200,
- unicodeslot=0x0201,
+ shcode=0x61,
+ specials={ "char", 0x61, 0x30F },
+ uccode=0x200,
+ unicodeslot=0x201,
},
{
adobename="Ainvertedbreve",
@@ -5742,11 +5754,11 @@ characters.data={
contextname="Ainvertedbreve",
description="LATIN CAPITAL LETTER A WITH INVERTED BREVE",
direction="l",
- lccode=0x0203,
+ lccode=0x203,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x0041, 0x0311 },
- unicodeslot=0x0202,
+ shcode=0x41,
+ specials={ "char", 0x41, 0x311 },
+ unicodeslot=0x202,
},
{
adobename="ainvertedbreve",
@@ -5755,10 +5767,10 @@ characters.data={
description="LATIN SMALL LETTER A WITH INVERTED BREVE",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x0061, 0x0311 },
- uccode=0x0202,
- unicodeslot=0x0203,
+ shcode=0x61,
+ specials={ "char", 0x61, 0x311 },
+ uccode=0x202,
+ unicodeslot=0x203,
},
{
adobename="Edblgrave",
@@ -5766,11 +5778,11 @@ characters.data={
contextname="Edoublegrave",
description="LATIN CAPITAL LETTER E WITH DOUBLE GRAVE",
direction="l",
- lccode=0x0205,
+ lccode=0x205,
linebreak="al",
- shcode=0x0045,
- specials={ "char", 0x0045, 0x030F },
- unicodeslot=0x0204,
+ shcode=0x45,
+ specials={ "char", 0x45, 0x30F },
+ unicodeslot=0x204,
},
{
adobename="edblgrave",
@@ -5779,10 +5791,10 @@ characters.data={
description="LATIN SMALL LETTER E WITH DOUBLE GRAVE",
direction="l",
linebreak="al",
- shcode=0x0065,
- specials={ "char", 0x0065, 0x030F },
- uccode=0x0204,
- unicodeslot=0x0205,
+ shcode=0x65,
+ specials={ "char", 0x65, 0x30F },
+ uccode=0x204,
+ unicodeslot=0x205,
},
{
adobename="Einvertedbreve",
@@ -5790,11 +5802,11 @@ characters.data={
contextname="Einvertedbreve",
description="LATIN CAPITAL LETTER E WITH INVERTED BREVE",
direction="l",
- lccode=0x0207,
+ lccode=0x207,
linebreak="al",
- shcode=0x0045,
- specials={ "char", 0x0045, 0x0311 },
- unicodeslot=0x0206,
+ shcode=0x45,
+ specials={ "char", 0x45, 0x311 },
+ unicodeslot=0x206,
},
{
adobename="einvertedbreve",
@@ -5803,10 +5815,10 @@ characters.data={
description="LATIN SMALL LETTER E WITH INVERTED BREVE",
direction="l",
linebreak="al",
- shcode=0x0065,
- specials={ "char", 0x0065, 0x0311 },
- uccode=0x0206,
- unicodeslot=0x0207,
+ shcode=0x65,
+ specials={ "char", 0x65, 0x311 },
+ uccode=0x206,
+ unicodeslot=0x207,
},
{
adobename="Idblgrave",
@@ -5814,11 +5826,11 @@ characters.data={
contextname="Idoublegrave",
description="LATIN CAPITAL LETTER I WITH DOUBLE GRAVE",
direction="l",
- lccode=0x0209,
+ lccode=0x209,
linebreak="al",
- shcode=0x0049,
- specials={ "char", 0x0049, 0x030F },
- unicodeslot=0x0208,
+ shcode=0x49,
+ specials={ "char", 0x49, 0x30F },
+ unicodeslot=0x208,
},
{
adobename="idblgrave",
@@ -5827,10 +5839,10 @@ characters.data={
description="LATIN SMALL LETTER I WITH DOUBLE GRAVE",
direction="l",
linebreak="al",
- shcode=0x0069,
- specials={ "char", 0x0069, 0x030F },
- uccode=0x0208,
- unicodeslot=0x0209,
+ shcode=0x69,
+ specials={ "char", 0x69, 0x30F },
+ uccode=0x208,
+ unicodeslot=0x209,
},
{
adobename="Iinvertedbreve",
@@ -5838,11 +5850,11 @@ characters.data={
contextname="Iinvertedbreve",
description="LATIN CAPITAL LETTER I WITH INVERTED BREVE",
direction="l",
- lccode=0x020B,
+ lccode=0x20B,
linebreak="al",
- shcode=0x0049,
- specials={ "char", 0x0049, 0x0311 },
- unicodeslot=0x020A,
+ shcode=0x49,
+ specials={ "char", 0x49, 0x311 },
+ unicodeslot=0x20A,
},
{
adobename="iinvertedbreve",
@@ -5851,10 +5863,10 @@ characters.data={
description="LATIN SMALL LETTER I WITH INVERTED BREVE",
direction="l",
linebreak="al",
- shcode=0x0069,
- specials={ "char", 0x0069, 0x0311 },
- uccode=0x020A,
- unicodeslot=0x020B,
+ shcode=0x69,
+ specials={ "char", 0x69, 0x311 },
+ uccode=0x20A,
+ unicodeslot=0x20B,
},
{
adobename="Odblgrave",
@@ -5862,11 +5874,11 @@ characters.data={
contextname="Odoublegrave",
description="LATIN CAPITAL LETTER O WITH DOUBLE GRAVE",
direction="l",
- lccode=0x020D,
+ lccode=0x20D,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x004F, 0x030F },
- unicodeslot=0x020C,
+ shcode=0x4F,
+ specials={ "char", 0x4F, 0x30F },
+ unicodeslot=0x20C,
},
{
adobename="odblgrave",
@@ -5875,10 +5887,10 @@ characters.data={
description="LATIN SMALL LETTER O WITH DOUBLE GRAVE",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x006F, 0x030F },
- uccode=0x020C,
- unicodeslot=0x020D,
+ shcode=0x6F,
+ specials={ "char", 0x6F, 0x30F },
+ uccode=0x20C,
+ unicodeslot=0x20D,
},
{
adobename="Oinvertedbreve",
@@ -5886,11 +5898,11 @@ characters.data={
contextname="Oinvertedbreve",
description="LATIN CAPITAL LETTER O WITH INVERTED BREVE",
direction="l",
- lccode=0x020F,
+ lccode=0x20F,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x004F, 0x0311 },
- unicodeslot=0x020E,
+ shcode=0x4F,
+ specials={ "char", 0x4F, 0x311 },
+ unicodeslot=0x20E,
},
{
adobename="oinvertedbreve",
@@ -5899,10 +5911,10 @@ characters.data={
description="LATIN SMALL LETTER O WITH INVERTED BREVE",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x006F, 0x0311 },
- uccode=0x020E,
- unicodeslot=0x020F,
+ shcode=0x6F,
+ specials={ "char", 0x6F, 0x311 },
+ uccode=0x20E,
+ unicodeslot=0x20F,
},
{
adobename="Rdblgrave",
@@ -5910,11 +5922,11 @@ characters.data={
contextname="Rdoublegrave",
description="LATIN CAPITAL LETTER R WITH DOUBLE GRAVE",
direction="l",
- lccode=0x0211,
+ lccode=0x211,
linebreak="al",
- shcode=0x0052,
- specials={ "char", 0x0052, 0x030F },
- unicodeslot=0x0210,
+ shcode=0x52,
+ specials={ "char", 0x52, 0x30F },
+ unicodeslot=0x210,
},
{
adobename="rdblgrave",
@@ -5923,10 +5935,10 @@ characters.data={
description="LATIN SMALL LETTER R WITH DOUBLE GRAVE",
direction="l",
linebreak="al",
- shcode=0x0072,
- specials={ "char", 0x0072, 0x030F },
- uccode=0x0210,
- unicodeslot=0x0211,
+ shcode=0x72,
+ specials={ "char", 0x72, 0x30F },
+ uccode=0x210,
+ unicodeslot=0x211,
},
{
adobename="Rinvertedbreve",
@@ -5934,11 +5946,11 @@ characters.data={
contextname="Rinvertedbreve",
description="LATIN CAPITAL LETTER R WITH INVERTED BREVE",
direction="l",
- lccode=0x0213,
+ lccode=0x213,
linebreak="al",
- shcode=0x0052,
- specials={ "char", 0x0052, 0x0311 },
- unicodeslot=0x0212,
+ shcode=0x52,
+ specials={ "char", 0x52, 0x311 },
+ unicodeslot=0x212,
},
{
adobename="rinvertedbreve",
@@ -5947,10 +5959,10 @@ characters.data={
description="LATIN SMALL LETTER R WITH INVERTED BREVE",
direction="l",
linebreak="al",
- shcode=0x0072,
- specials={ "char", 0x0072, 0x0311 },
- uccode=0x0212,
- unicodeslot=0x0213,
+ shcode=0x72,
+ specials={ "char", 0x72, 0x311 },
+ uccode=0x212,
+ unicodeslot=0x213,
},
{
adobename="Udblgrave",
@@ -5958,11 +5970,11 @@ characters.data={
contextname="Udoublegrave",
description="LATIN CAPITAL LETTER U WITH DOUBLE GRAVE",
direction="l",
- lccode=0x0215,
+ lccode=0x215,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x0055, 0x030F },
- unicodeslot=0x0214,
+ shcode=0x55,
+ specials={ "char", 0x55, 0x30F },
+ unicodeslot=0x214,
},
{
adobename="udblgrave",
@@ -5971,10 +5983,10 @@ characters.data={
description="LATIN SMALL LETTER U WITH DOUBLE GRAVE",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x0075, 0x030F },
- uccode=0x0214,
- unicodeslot=0x0215,
+ shcode=0x75,
+ specials={ "char", 0x75, 0x30F },
+ uccode=0x214,
+ unicodeslot=0x215,
},
{
adobename="Uinvertedbreve",
@@ -5982,11 +5994,11 @@ characters.data={
contextname="Uinvertedbreve",
description="LATIN CAPITAL LETTER U WITH INVERTED BREVE",
direction="l",
- lccode=0x0217,
+ lccode=0x217,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x0055, 0x0311 },
- unicodeslot=0x0216,
+ shcode=0x55,
+ specials={ "char", 0x55, 0x311 },
+ unicodeslot=0x216,
},
{
adobename="uinvertedbreve",
@@ -5995,10 +6007,10 @@ characters.data={
description="LATIN SMALL LETTER U WITH INVERTED BREVE",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x0075, 0x0311 },
- uccode=0x0216,
- unicodeslot=0x0217,
+ shcode=0x75,
+ specials={ "char", 0x75, 0x311 },
+ uccode=0x216,
+ unicodeslot=0x217,
},
{
adobename="Scommaaccent",
@@ -6006,11 +6018,11 @@ characters.data={
contextname="Scommaaccent",
description="LATIN CAPITAL LETTER S WITH COMMA BELOW",
direction="l",
- lccode=0x0219,
+ lccode=0x219,
linebreak="al",
- shcode=0x0053,
- specials={ "char", 0x0053, 0x0326 },
- unicodeslot=0x0218,
+ shcode=0x53,
+ specials={ "char", 0x53, 0x326 },
+ unicodeslot=0x218,
},
{
adobename="scommaaccent",
@@ -6019,21 +6031,21 @@ characters.data={
description="LATIN SMALL LETTER S WITH COMMA BELOW",
direction="l",
linebreak="al",
- shcode=0x0073,
- specials={ "char", 0x0073, 0x0326 },
- uccode=0x0218,
- unicodeslot=0x0219,
+ shcode=0x73,
+ specials={ "char", 0x73, 0x326 },
+ uccode=0x218,
+ unicodeslot=0x219,
},
{
category="lu",
contextname="Tcommaaccent",
description="LATIN CAPITAL LETTER T WITH COMMA BELOW",
direction="l",
- lccode=0x021B,
+ lccode=0x21B,
linebreak="al",
- shcode=0x0054,
- specials={ "char", 0x0054, 0x0326 },
- unicodeslot=0x021A,
+ shcode=0x54,
+ specials={ "char", 0x54, 0x326 },
+ unicodeslot=0x21A,
},
{
category="ll",
@@ -6041,37 +6053,37 @@ characters.data={
description="LATIN SMALL LETTER T WITH COMMA BELOW",
direction="l",
linebreak="al",
- shcode=0x0074,
- specials={ "char", 0x0074, 0x0326 },
- uccode=0x021A,
- unicodeslot=0x021B,
+ shcode=0x74,
+ specials={ "char", 0x74, 0x326 },
+ uccode=0x21A,
+ unicodeslot=0x21B,
},
{
category="lu",
description="LATIN CAPITAL LETTER YOGH",
direction="l",
- lccode=0x021D,
+ lccode=0x21D,
linebreak="al",
- unicodeslot=0x021C,
+ unicodeslot=0x21C,
},
{
category="ll",
description="LATIN SMALL LETTER YOGH",
direction="l",
linebreak="al",
- uccode=0x021C,
- unicodeslot=0x021D,
+ uccode=0x21C,
+ unicodeslot=0x21D,
},
{
category="lu",
contextname="Hcaron",
description="LATIN CAPITAL LETTER H WITH CARON",
direction="l",
- lccode=0x021F,
+ lccode=0x21F,
linebreak="al",
- shcode=0x0048,
- specials={ "char", 0x0048, 0x030C },
- unicodeslot=0x021E,
+ shcode=0x48,
+ specials={ "char", 0x48, 0x30C },
+ unicodeslot=0x21E,
},
{
category="ll",
@@ -6079,19 +6091,19 @@ characters.data={
description="LATIN SMALL LETTER H WITH CARON",
direction="l",
linebreak="al",
- shcode=0x0068,
- specials={ "char", 0x0068, 0x030C },
- uccode=0x021E,
- unicodeslot=0x021F,
+ shcode=0x68,
+ specials={ "char", 0x68, 0x30C },
+ uccode=0x21E,
+ unicodeslot=0x21F,
},
{
category="lu",
description="LATIN CAPITAL LETTER N WITH LONG RIGHT LEG",
direction="l",
- lccode=0x019E,
+ lccode=0x19E,
linebreak="al",
- shcode=0x004E,
- unicodeslot=0x0220,
+ shcode=0x4E,
+ unicodeslot=0x220,
},
{
category="ll",
@@ -6099,34 +6111,34 @@ characters.data={
description="LATIN SMALL LETTER D WITH CURL",
direction="l",
linebreak="al",
- shcode=0x0064,
- unicodeslot=0x0221,
+ shcode=0x64,
+ unicodeslot=0x221,
},
{
category="lu",
description="LATIN CAPITAL LETTER OU",
direction="l",
- lccode=0x0223,
+ lccode=0x223,
linebreak="al",
- unicodeslot=0x0222,
+ unicodeslot=0x222,
},
{
category="ll",
description="LATIN SMALL LETTER OU",
direction="l",
linebreak="al",
- uccode=0x0222,
- unicodeslot=0x0223,
+ uccode=0x222,
+ unicodeslot=0x223,
},
{
category="lu",
contextname="Zhook",
description="LATIN CAPITAL LETTER Z WITH HOOK",
direction="l",
- lccode=0x0225,
+ lccode=0x225,
linebreak="al",
- shcode=0x005A,
- unicodeslot=0x0224,
+ shcode=0x5A,
+ unicodeslot=0x224,
},
{
category="ll",
@@ -6134,20 +6146,20 @@ characters.data={
description="LATIN SMALL LETTER Z WITH HOOK",
direction="l",
linebreak="al",
- shcode=0x007A,
- uccode=0x0224,
- unicodeslot=0x0225,
+ shcode=0x7A,
+ uccode=0x224,
+ unicodeslot=0x225,
},
{
category="lu",
contextname="Adotaccent",
description="LATIN CAPITAL LETTER A WITH DOT ABOVE",
direction="l",
- lccode=0x0227,
+ lccode=0x227,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x0041, 0x0307 },
- unicodeslot=0x0226,
+ shcode=0x41,
+ specials={ "char", 0x41, 0x307 },
+ unicodeslot=0x226,
},
{
category="ll",
@@ -6155,21 +6167,21 @@ characters.data={
description="LATIN SMALL LETTER A WITH DOT ABOVE",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x0061, 0x0307 },
- uccode=0x0226,
- unicodeslot=0x0227,
+ shcode=0x61,
+ specials={ "char", 0x61, 0x307 },
+ uccode=0x226,
+ unicodeslot=0x227,
},
{
category="lu",
contextname="Ecedilla",
description="LATIN CAPITAL LETTER E WITH CEDILLA",
direction="l",
- lccode=0x0229,
+ lccode=0x229,
linebreak="al",
- shcode=0x0045,
- specials={ "char", 0x0045, 0x0327 },
- unicodeslot=0x0228,
+ shcode=0x45,
+ specials={ "char", 0x45, 0x327 },
+ unicodeslot=0x228,
},
{
category="ll",
@@ -6177,21 +6189,21 @@ characters.data={
description="LATIN SMALL LETTER E WITH CEDILLA",
direction="l",
linebreak="al",
- shcode=0x0065,
- specials={ "char", 0x0065, 0x0327 },
- uccode=0x0228,
- unicodeslot=0x0229,
+ shcode=0x65,
+ specials={ "char", 0x65, 0x327 },
+ uccode=0x228,
+ unicodeslot=0x229,
},
{
category="lu",
contextname="Odiaeresismacron",
description="LATIN CAPITAL LETTER O WITH DIAERESIS AND MACRON",
direction="l",
- lccode=0x022B,
+ lccode=0x22B,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x00D6, 0x0304 },
- unicodeslot=0x022A,
+ shcode=0x4F,
+ specials={ "char", 0xD6, 0x304 },
+ unicodeslot=0x22A,
},
{
category="ll",
@@ -6199,21 +6211,21 @@ characters.data={
description="LATIN SMALL LETTER O WITH DIAERESIS AND MACRON",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x00F6, 0x0304 },
- uccode=0x022A,
- unicodeslot=0x022B,
+ shcode=0x6F,
+ specials={ "char", 0xF6, 0x304 },
+ uccode=0x22A,
+ unicodeslot=0x22B,
},
{
category="lu",
contextname="Otildemacron",
description="LATIN CAPITAL LETTER O WITH TILDE AND MACRON",
direction="l",
- lccode=0x022D,
+ lccode=0x22D,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x00D5, 0x0304 },
- unicodeslot=0x022C,
+ shcode=0x4F,
+ specials={ "char", 0xD5, 0x304 },
+ unicodeslot=0x22C,
},
{
category="ll",
@@ -6221,21 +6233,21 @@ characters.data={
description="LATIN SMALL LETTER O WITH TILDE AND MACRON",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x00F5, 0x0304 },
- uccode=0x022C,
- unicodeslot=0x022D,
+ shcode=0x6F,
+ specials={ "char", 0xF5, 0x304 },
+ uccode=0x22C,
+ unicodeslot=0x22D,
},
{
category="lu",
contextname="Odotaccent",
description="LATIN CAPITAL LETTER O WITH DOT ABOVE",
direction="l",
- lccode=0x022F,
+ lccode=0x22F,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x004F, 0x0307 },
- unicodeslot=0x022E,
+ shcode=0x4F,
+ specials={ "char", 0x4F, 0x307 },
+ unicodeslot=0x22E,
},
{
category="ll",
@@ -6243,21 +6255,21 @@ characters.data={
description="LATIN SMALL LETTER O WITH DOT ABOVE",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x006F, 0x0307 },
- uccode=0x022E,
- unicodeslot=0x022F,
+ shcode=0x6F,
+ specials={ "char", 0x6F, 0x307 },
+ uccode=0x22E,
+ unicodeslot=0x22F,
},
{
category="lu",
contextname="Odotaccentmacron",
description="LATIN CAPITAL LETTER O WITH DOT ABOVE AND MACRON",
direction="l",
- lccode=0x0231,
+ lccode=0x231,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x022E, 0x0304 },
- unicodeslot=0x0230,
+ shcode=0x4F,
+ specials={ "char", 0x22E, 0x304 },
+ unicodeslot=0x230,
},
{
category="ll",
@@ -6265,21 +6277,21 @@ characters.data={
description="LATIN SMALL LETTER O WITH DOT ABOVE AND MACRON",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x022F, 0x0304 },
- uccode=0x0230,
- unicodeslot=0x0231,
+ shcode=0x6F,
+ specials={ "char", 0x22F, 0x304 },
+ uccode=0x230,
+ unicodeslot=0x231,
},
{
category="lu",
contextname="Ymacron",
description="LATIN CAPITAL LETTER Y WITH MACRON",
direction="l",
- lccode=0x0233,
+ lccode=0x233,
linebreak="al",
- shcode=0x0059,
- specials={ "char", 0x0059, 0x0304 },
- unicodeslot=0x0232,
+ shcode=0x59,
+ specials={ "char", 0x59, 0x304 },
+ unicodeslot=0x232,
},
{
category="ll",
@@ -6287,10 +6299,10 @@ characters.data={
description="LATIN SMALL LETTER Y WITH MACRON",
direction="l",
linebreak="al",
- shcode=0x0079,
- specials={ "char", 0x0079, 0x0304 },
- uccode=0x0232,
- unicodeslot=0x0233,
+ shcode=0x79,
+ specials={ "char", 0x79, 0x304 },
+ uccode=0x232,
+ unicodeslot=0x233,
},
{
category="ll",
@@ -6298,8 +6310,8 @@ characters.data={
description="LATIN SMALL LETTER L WITH CURL",
direction="l",
linebreak="al",
- shcode=0x006C,
- unicodeslot=0x0234,
+ shcode=0x6C,
+ unicodeslot=0x234,
},
{
category="ll",
@@ -6307,8 +6319,8 @@ characters.data={
description="LATIN SMALL LETTER N WITH CURL",
direction="l",
linebreak="al",
- shcode=0x006E,
- unicodeslot=0x0235,
+ shcode=0x6E,
+ unicodeslot=0x235,
},
{
category="ll",
@@ -6316,8 +6328,8 @@ characters.data={
description="LATIN SMALL LETTER T WITH CURL",
direction="l",
linebreak="al",
- shcode=0x0074,
- unicodeslot=0x0236,
+ shcode=0x74,
+ unicodeslot=0x236,
},
{
category="ll",
@@ -6325,21 +6337,21 @@ characters.data={
description="LATIN SMALL LETTER DOTLESS J",
direction="l",
linebreak="al",
- unicodeslot=0x0237,
+ unicodeslot=0x237,
},
{
category="ll",
description="LATIN SMALL LETTER DB DIGRAPH",
direction="l",
linebreak="al",
- unicodeslot=0x0238,
+ unicodeslot=0x238,
},
{
category="ll",
description="LATIN SMALL LETTER QP DIGRAPH",
direction="l",
linebreak="al",
- unicodeslot=0x0239,
+ unicodeslot=0x239,
},
{
category="lu",
@@ -6348,18 +6360,18 @@ characters.data={
direction="l",
lccode=0x2C65,
linebreak="al",
- shcode=0x0041,
- unicodeslot=0x023A,
+ shcode=0x41,
+ unicodeslot=0x23A,
},
{
category="lu",
contextname="Cstroke",
description="LATIN CAPITAL LETTER C WITH STROKE",
direction="l",
- lccode=0x023C,
+ lccode=0x23C,
linebreak="al",
- shcode=0x0043,
- unicodeslot=0x023B,
+ shcode=0x43,
+ unicodeslot=0x23B,
},
{
category="ll",
@@ -6367,19 +6379,19 @@ characters.data={
description="LATIN SMALL LETTER C WITH STROKE",
direction="l",
linebreak="al",
- shcode=0x0063,
- uccode=0x023B,
- unicodeslot=0x023C,
+ shcode=0x63,
+ uccode=0x23B,
+ unicodeslot=0x23C,
},
{
category="lu",
contextname="Lbar",
description="LATIN CAPITAL LETTER L WITH BAR",
direction="l",
- lccode=0x019A,
+ lccode=0x19A,
linebreak="al",
- shcode=0x004C,
- unicodeslot=0x023D,
+ shcode=0x4C,
+ unicodeslot=0x23D,
},
{
category="lu",
@@ -6387,154 +6399,154 @@ characters.data={
direction="l",
lccode=0x2C66,
linebreak="al",
- shcode=0x0054,
- unicodeslot=0x023E,
+ shcode=0x54,
+ unicodeslot=0x23E,
},
{
category="ll",
description="LATIN SMALL LETTER S WITH SWASH TAIL",
direction="l",
linebreak="al",
- shcode=0x0073,
- unicodeslot=0x023F,
+ shcode=0x73,
+ unicodeslot=0x23F,
},
{
category="ll",
description="LATIN SMALL LETTER Z WITH SWASH TAIL",
direction="l",
linebreak="al",
- shcode=0x007A,
- unicodeslot=0x0240,
+ shcode=0x7A,
+ unicodeslot=0x240,
},
{
category="lu",
description="LATIN CAPITAL LETTER GLOTTAL STOP",
direction="l",
- lccode=0x0242,
+ lccode=0x242,
linebreak="al",
- unicodeslot=0x0241,
+ unicodeslot=0x241,
},
{
category="ll",
description="LATIN SMALL LETTER GLOTTAL STOP",
direction="l",
linebreak="al",
- uccode=0x0241,
- unicodeslot=0x0242,
+ uccode=0x241,
+ unicodeslot=0x242,
},
{
category="lu",
description="LATIN CAPITAL LETTER B WITH STROKE",
direction="l",
- lccode=0x0180,
+ lccode=0x180,
linebreak="al",
- shcode=0x0042,
- unicodeslot=0x0243,
+ shcode=0x42,
+ unicodeslot=0x243,
},
{
category="lu",
description="LATIN CAPITAL LETTER U BAR",
direction="l",
- lccode=0x0289,
+ lccode=0x289,
linebreak="al",
- unicodeslot=0x0244,
+ unicodeslot=0x244,
},
{
category="lu",
description="LATIN CAPITAL LETTER TURNED V",
direction="l",
- lccode=0x028C,
+ lccode=0x28C,
linebreak="al",
- unicodeslot=0x0245,
+ unicodeslot=0x245,
},
{
category="lu",
description="LATIN CAPITAL LETTER E WITH STROKE",
direction="l",
- lccode=0x0247,
+ lccode=0x247,
linebreak="al",
- shcode=0x0045,
- unicodeslot=0x0246,
+ shcode=0x45,
+ unicodeslot=0x246,
},
{
category="ll",
description="LATIN SMALL LETTER E WITH STROKE",
direction="l",
linebreak="al",
- shcode=0x0065,
- uccode=0x0246,
- unicodeslot=0x0247,
+ shcode=0x65,
+ uccode=0x246,
+ unicodeslot=0x247,
},
{
category="lu",
description="LATIN CAPITAL LETTER J WITH STROKE",
direction="l",
- lccode=0x0249,
+ lccode=0x249,
linebreak="al",
- shcode=0x004A,
- unicodeslot=0x0248,
+ shcode=0x4A,
+ unicodeslot=0x248,
},
{
category="ll",
description="LATIN SMALL LETTER J WITH STROKE",
direction="l",
linebreak="al",
- shcode=0x006A,
- uccode=0x0248,
- unicodeslot=0x0249,
+ shcode=0x6A,
+ uccode=0x248,
+ unicodeslot=0x249,
},
{
category="lu",
description="LATIN CAPITAL LETTER SMALL Q WITH HOOK TAIL",
direction="l",
- lccode=0x024B,
+ lccode=0x24B,
linebreak="al",
- unicodeslot=0x024A,
+ unicodeslot=0x24A,
},
{
category="ll",
description="LATIN SMALL LETTER Q WITH HOOK TAIL",
direction="l",
linebreak="al",
- shcode=0x0071,
- uccode=0x024A,
- unicodeslot=0x024B,
+ shcode=0x71,
+ uccode=0x24A,
+ unicodeslot=0x24B,
},
{
category="lu",
description="LATIN CAPITAL LETTER R WITH STROKE",
direction="l",
- lccode=0x024D,
+ lccode=0x24D,
linebreak="al",
- shcode=0x0052,
- unicodeslot=0x024C,
+ shcode=0x52,
+ unicodeslot=0x24C,
},
{
category="ll",
description="LATIN SMALL LETTER R WITH STROKE",
direction="l",
linebreak="al",
- shcode=0x0072,
- uccode=0x024C,
- unicodeslot=0x024D,
+ shcode=0x72,
+ uccode=0x24C,
+ unicodeslot=0x24D,
},
{
category="lu",
description="LATIN CAPITAL LETTER Y WITH STROKE",
direction="l",
- lccode=0x024F,
+ lccode=0x24F,
linebreak="al",
- shcode=0x0059,
- unicodeslot=0x024E,
+ shcode=0x59,
+ unicodeslot=0x24E,
},
{
category="ll",
description="LATIN SMALL LETTER Y WITH STROKE",
direction="l",
linebreak="al",
- shcode=0x0079,
- uccode=0x024E,
- unicodeslot=0x024F,
+ shcode=0x79,
+ uccode=0x24E,
+ unicodeslot=0x24F,
},
{
adobename="aturned",
@@ -6542,7 +6554,7 @@ characters.data={
description="LATIN SMALL LETTER TURNED A",
direction="l",
linebreak="al",
- unicodeslot=0x0250,
+ unicodeslot=0x250,
},
{
adobename="ascript",
@@ -6551,7 +6563,7 @@ characters.data={
description="LATIN SMALL LETTER ALPHA",
direction="l",
linebreak="al",
- unicodeslot=0x0251,
+ unicodeslot=0x251,
},
{
adobename="ascriptturned",
@@ -6559,7 +6571,7 @@ characters.data={
description="LATIN SMALL LETTER TURNED ALPHA",
direction="l",
linebreak="al",
- unicodeslot=0x0252,
+ unicodeslot=0x252,
},
{
adobename="bhook",
@@ -6568,9 +6580,9 @@ characters.data={
description="LATIN SMALL LETTER B WITH HOOK",
direction="l",
linebreak="al",
- shcode=0x0062,
- uccode=0x0181,
- unicodeslot=0x0253,
+ shcode=0x62,
+ uccode=0x181,
+ unicodeslot=0x253,
},
{
adobename="oopen",
@@ -6578,8 +6590,8 @@ characters.data={
description="LATIN SMALL LETTER OPEN O",
direction="l",
linebreak="al",
- uccode=0x0186,
- unicodeslot=0x0254,
+ uccode=0x186,
+ unicodeslot=0x254,
},
{
adobename="ccurl",
@@ -6588,8 +6600,8 @@ characters.data={
description="LATIN SMALL LETTER C WITH CURL",
direction="l",
linebreak="al",
- shcode=0x0063,
- unicodeslot=0x0255,
+ shcode=0x63,
+ unicodeslot=0x255,
},
{
adobename="dtail",
@@ -6598,9 +6610,9 @@ characters.data={
description="LATIN SMALL LETTER D WITH TAIL",
direction="l",
linebreak="al",
- shcode=0x0064,
- uccode=0x0189,
- unicodeslot=0x0256,
+ shcode=0x64,
+ uccode=0x189,
+ unicodeslot=0x256,
},
{
adobename="dhook",
@@ -6609,9 +6621,9 @@ characters.data={
description="LATIN SMALL LETTER D WITH HOOK",
direction="l",
linebreak="al",
- shcode=0x0064,
- uccode=0x018A,
- unicodeslot=0x0257,
+ shcode=0x64,
+ uccode=0x18A,
+ unicodeslot=0x257,
},
{
adobename="ereversed",
@@ -6619,7 +6631,7 @@ characters.data={
description="LATIN SMALL LETTER REVERSED E",
direction="l",
linebreak="al",
- unicodeslot=0x0258,
+ unicodeslot=0x258,
},
{
adobename="schwa",
@@ -6628,8 +6640,8 @@ characters.data={
description="LATIN SMALL LETTER SCHWA",
direction="l",
linebreak="al",
- uccode=0x018F,
- unicodeslot=0x0259,
+ uccode=0x18F,
+ unicodeslot=0x259,
},
{
adobename="schwahook",
@@ -6638,8 +6650,8 @@ characters.data={
description="LATIN SMALL LETTER SCHWA WITH HOOK",
direction="l",
linebreak="al",
- shcode=0x0259,
- unicodeslot=0x025A,
+ shcode=0x259,
+ unicodeslot=0x25A,
},
{
adobename="eopen",
@@ -6647,8 +6659,8 @@ characters.data={
description="LATIN SMALL LETTER OPEN E",
direction="l",
linebreak="al",
- uccode=0x0190,
- unicodeslot=0x025B,
+ uccode=0x190,
+ unicodeslot=0x25B,
},
{
adobename="eopenreversed",
@@ -6656,7 +6668,7 @@ characters.data={
description="LATIN SMALL LETTER REVERSED OPEN E",
direction="l",
linebreak="al",
- unicodeslot=0x025C,
+ unicodeslot=0x25C,
},
{
adobename="eopenreversedhook",
@@ -6664,7 +6676,7 @@ characters.data={
description="LATIN SMALL LETTER REVERSED OPEN E WITH HOOK",
direction="l",
linebreak="al",
- unicodeslot=0x025D,
+ unicodeslot=0x25D,
},
{
adobename="eopenreversedclosed",
@@ -6672,7 +6684,7 @@ characters.data={
description="LATIN SMALL LETTER CLOSED REVERSED OPEN E",
direction="l",
linebreak="al",
- unicodeslot=0x025E,
+ unicodeslot=0x25E,
},
{
adobename="jdotlessstroke",
@@ -6681,7 +6693,7 @@ characters.data={
description="LATIN SMALL LETTER DOTLESS J WITH STROKE",
direction="l",
linebreak="al",
- unicodeslot=0x025F,
+ unicodeslot=0x25F,
},
{
adobename="ghook",
@@ -6689,9 +6701,9 @@ characters.data={
description="LATIN SMALL LETTER G WITH HOOK",
direction="l",
linebreak="al",
- shcode=0x0067,
- uccode=0x0193,
- unicodeslot=0x0260,
+ shcode=0x67,
+ uccode=0x193,
+ unicodeslot=0x260,
},
{
adobename="gscript",
@@ -6700,14 +6712,14 @@ characters.data={
description="LATIN SMALL LETTER SCRIPT G",
direction="l",
linebreak="al",
- unicodeslot=0x0261,
+ unicodeslot=0x261,
},
{
category="ll",
description="LATIN LETTER SMALL CAPITAL G",
direction="l",
linebreak="al",
- unicodeslot=0x0262,
+ unicodeslot=0x262,
},
{
adobename="gammalatinsmall",
@@ -6715,8 +6727,8 @@ characters.data={
description="LATIN SMALL LETTER GAMMA",
direction="l",
linebreak="al",
- uccode=0x0194,
- unicodeslot=0x0263,
+ uccode=0x194,
+ unicodeslot=0x263,
},
{
adobename="ramshorn",
@@ -6724,7 +6736,7 @@ characters.data={
description="LATIN SMALL LETTER RAMS HORN",
direction="l",
linebreak="al",
- unicodeslot=0x0264,
+ unicodeslot=0x264,
},
{
adobename="hturned",
@@ -6732,7 +6744,7 @@ characters.data={
description="LATIN SMALL LETTER TURNED H",
direction="l",
linebreak="al",
- unicodeslot=0x0265,
+ unicodeslot=0x265,
},
{
adobename="hhook",
@@ -6740,8 +6752,8 @@ characters.data={
description="LATIN SMALL LETTER H WITH HOOK",
direction="l",
linebreak="al",
- shcode=0x0068,
- unicodeslot=0x0266,
+ shcode=0x68,
+ unicodeslot=0x266,
},
{
adobename="henghook",
@@ -6749,7 +6761,7 @@ characters.data={
description="LATIN SMALL LETTER HENG WITH HOOK",
direction="l",
linebreak="al",
- unicodeslot=0x0267,
+ unicodeslot=0x267,
},
{
adobename="istroke",
@@ -6757,9 +6769,9 @@ characters.data={
description="LATIN SMALL LETTER I WITH STROKE",
direction="l",
linebreak="al",
- shcode=0x0069,
- uccode=0x0197,
- unicodeslot=0x0268,
+ shcode=0x69,
+ uccode=0x197,
+ unicodeslot=0x268,
},
{
adobename="iotalatin",
@@ -6767,15 +6779,15 @@ characters.data={
description="LATIN SMALL LETTER IOTA",
direction="l",
linebreak="al",
- uccode=0x0196,
- unicodeslot=0x0269,
+ uccode=0x196,
+ unicodeslot=0x269,
},
{
category="ll",
description="LATIN LETTER SMALL CAPITAL I",
direction="l",
linebreak="al",
- unicodeslot=0x026A,
+ unicodeslot=0x26A,
},
{
adobename="lmiddletilde",
@@ -6783,9 +6795,9 @@ characters.data={
description="LATIN SMALL LETTER L WITH MIDDLE TILDE",
direction="l",
linebreak="al",
- shcode=0x006C,
+ shcode=0x6C,
uccode=0x2C62,
- unicodeslot=0x026B,
+ unicodeslot=0x26B,
},
{
adobename="lbelt",
@@ -6793,8 +6805,8 @@ characters.data={
description="LATIN SMALL LETTER L WITH BELT",
direction="l",
linebreak="al",
- shcode=0x006C,
- unicodeslot=0x026C,
+ shcode=0x6C,
+ unicodeslot=0x26C,
},
{
adobename="lhookretroflex",
@@ -6802,8 +6814,8 @@ characters.data={
description="LATIN SMALL LETTER L WITH RETROFLEX HOOK",
direction="l",
linebreak="al",
- shcode=0x006C,
- unicodeslot=0x026D,
+ shcode=0x6C,
+ unicodeslot=0x26D,
},
{
adobename="lezh",
@@ -6811,7 +6823,7 @@ characters.data={
description="LATIN SMALL LETTER LEZH",
direction="l",
linebreak="al",
- unicodeslot=0x026E,
+ unicodeslot=0x26E,
},
{
adobename="mturned",
@@ -6819,8 +6831,8 @@ characters.data={
description="LATIN SMALL LETTER TURNED M",
direction="l",
linebreak="al",
- uccode=0x019C,
- unicodeslot=0x026F,
+ uccode=0x19C,
+ unicodeslot=0x26F,
},
{
adobename="mlonglegturned",
@@ -6828,7 +6840,7 @@ characters.data={
description="LATIN SMALL LETTER TURNED M WITH LONG LEG",
direction="l",
linebreak="al",
- unicodeslot=0x0270,
+ unicodeslot=0x270,
},
{
adobename="mhook",
@@ -6836,8 +6848,8 @@ characters.data={
description="LATIN SMALL LETTER M WITH HOOK",
direction="l",
linebreak="al",
- shcode=0x006D,
- unicodeslot=0x0271,
+ shcode=0x6D,
+ unicodeslot=0x271,
},
{
adobename="nhookleft",
@@ -6845,9 +6857,9 @@ characters.data={
description="LATIN SMALL LETTER N WITH LEFT HOOK",
direction="l",
linebreak="al",
- shcode=0x006E,
- uccode=0x019D,
- unicodeslot=0x0272,
+ shcode=0x6E,
+ uccode=0x19D,
+ unicodeslot=0x272,
},
{
adobename="nhookretroflex",
@@ -6855,15 +6867,15 @@ characters.data={
description="LATIN SMALL LETTER N WITH RETROFLEX HOOK",
direction="l",
linebreak="al",
- shcode=0x006E,
- unicodeslot=0x0273,
+ shcode=0x6E,
+ unicodeslot=0x273,
},
{
category="ll",
description="LATIN LETTER SMALL CAPITAL N",
direction="l",
linebreak="al",
- unicodeslot=0x0274,
+ unicodeslot=0x274,
},
{
adobename="obarred",
@@ -6871,15 +6883,15 @@ characters.data={
description="LATIN SMALL LETTER BARRED O",
direction="l",
linebreak="al",
- uccode=0x019F,
- unicodeslot=0x0275,
+ uccode=0x19F,
+ unicodeslot=0x275,
},
{
category="ll",
description="LATIN LETTER SMALL CAPITAL OE",
direction="l",
linebreak="al",
- unicodeslot=0x0276,
+ unicodeslot=0x276,
},
{
adobename="omegalatinclosed",
@@ -6887,7 +6899,7 @@ characters.data={
description="LATIN SMALL LETTER CLOSED OMEGA",
direction="l",
linebreak="al",
- unicodeslot=0x0277,
+ unicodeslot=0x277,
},
{
adobename="philatin",
@@ -6895,7 +6907,7 @@ characters.data={
description="LATIN SMALL LETTER PHI",
direction="l",
linebreak="al",
- unicodeslot=0x0278,
+ unicodeslot=0x278,
},
{
adobename="rturned",
@@ -6903,7 +6915,7 @@ characters.data={
description="LATIN SMALL LETTER TURNED R",
direction="l",
linebreak="al",
- unicodeslot=0x0279,
+ unicodeslot=0x279,
},
{
adobename="rlonglegturned",
@@ -6911,7 +6923,7 @@ characters.data={
description="LATIN SMALL LETTER TURNED R WITH LONG LEG",
direction="l",
linebreak="al",
- unicodeslot=0x027A,
+ unicodeslot=0x27A,
},
{
adobename="rhookturned",
@@ -6919,7 +6931,7 @@ characters.data={
description="LATIN SMALL LETTER TURNED R WITH HOOK",
direction="l",
linebreak="al",
- unicodeslot=0x027B,
+ unicodeslot=0x27B,
},
{
adobename="rlongleg",
@@ -6927,8 +6939,8 @@ characters.data={
description="LATIN SMALL LETTER R WITH LONG LEG",
direction="l",
linebreak="al",
- shcode=0x0072,
- unicodeslot=0x027C,
+ shcode=0x72,
+ unicodeslot=0x27C,
},
{
adobename="rhook",
@@ -6936,9 +6948,9 @@ characters.data={
description="LATIN SMALL LETTER R WITH TAIL",
direction="l",
linebreak="al",
- shcode=0x0072,
+ shcode=0x72,
uccode=0x2C64,
- unicodeslot=0x027D,
+ unicodeslot=0x27D,
},
{
adobename="rfishhook",
@@ -6946,8 +6958,8 @@ characters.data={
description="LATIN SMALL LETTER R WITH FISHHOOK",
direction="l",
linebreak="al",
- shcode=0x0072,
- unicodeslot=0x027E,
+ shcode=0x72,
+ unicodeslot=0x27E,
},
{
adobename="rfishhookreversed",
@@ -6955,15 +6967,15 @@ characters.data={
description="LATIN SMALL LETTER REVERSED R WITH FISHHOOK",
direction="l",
linebreak="al",
- unicodeslot=0x027F,
+ unicodeslot=0x27F,
},
{
category="ll",
description="LATIN LETTER SMALL CAPITAL R",
direction="l",
linebreak="al",
- uccode=0x01A6,
- unicodeslot=0x0280,
+ uccode=0x1A6,
+ unicodeslot=0x280,
},
{
adobename="Rsmallinverted",
@@ -6971,7 +6983,7 @@ characters.data={
description="LATIN LETTER SMALL CAPITAL INVERTED R",
direction="l",
linebreak="al",
- unicodeslot=0x0281,
+ unicodeslot=0x281,
},
{
adobename="shook",
@@ -6979,8 +6991,8 @@ characters.data={
description="LATIN SMALL LETTER S WITH HOOK",
direction="l",
linebreak="al",
- shcode=0x0073,
- unicodeslot=0x0282,
+ shcode=0x73,
+ unicodeslot=0x282,
},
{
adobename="esh",
@@ -6988,8 +7000,8 @@ characters.data={
description="LATIN SMALL LETTER ESH",
direction="l",
linebreak="al",
- uccode=0x01A9,
- unicodeslot=0x0283,
+ uccode=0x1A9,
+ unicodeslot=0x283,
},
{
adobename="dotlessjstrokehook",
@@ -6997,7 +7009,7 @@ characters.data={
description="LATIN SMALL LETTER DOTLESS J WITH STROKE AND HOOK",
direction="l",
linebreak="al",
- unicodeslot=0x0284,
+ unicodeslot=0x284,
},
{
adobename="eshsquatreversed",
@@ -7005,7 +7017,7 @@ characters.data={
description="LATIN SMALL LETTER SQUAT REVERSED ESH",
direction="l",
linebreak="al",
- unicodeslot=0x0285,
+ unicodeslot=0x285,
},
{
adobename="eshcurl",
@@ -7013,8 +7025,8 @@ characters.data={
description="LATIN SMALL LETTER ESH WITH CURL",
direction="l",
linebreak="al",
- shcode=0x0283,
- unicodeslot=0x0286,
+ shcode=0x283,
+ unicodeslot=0x286,
},
{
adobename="tturned",
@@ -7022,7 +7034,7 @@ characters.data={
description="LATIN SMALL LETTER TURNED T",
direction="l",
linebreak="al",
- unicodeslot=0x0287,
+ unicodeslot=0x287,
},
{
adobename="tretroflexhook",
@@ -7030,9 +7042,9 @@ characters.data={
description="LATIN SMALL LETTER T WITH RETROFLEX HOOK",
direction="l",
linebreak="al",
- shcode=0x0074,
- uccode=0x01AE,
- unicodeslot=0x0288,
+ shcode=0x74,
+ uccode=0x1AE,
+ unicodeslot=0x288,
},
{
adobename="ubar",
@@ -7040,8 +7052,8 @@ characters.data={
description="LATIN SMALL LETTER U BAR",
direction="l",
linebreak="al",
- uccode=0x0244,
- unicodeslot=0x0289,
+ uccode=0x244,
+ unicodeslot=0x289,
},
{
adobename="upsilonlatin",
@@ -7049,8 +7061,8 @@ characters.data={
description="LATIN SMALL LETTER UPSILON",
direction="l",
linebreak="al",
- uccode=0x01B1,
- unicodeslot=0x028A,
+ uccode=0x1B1,
+ unicodeslot=0x28A,
},
{
adobename="vhook",
@@ -7058,9 +7070,9 @@ characters.data={
description="LATIN SMALL LETTER V WITH HOOK",
direction="l",
linebreak="al",
- shcode=0x0076,
- uccode=0x01B2,
- unicodeslot=0x028B,
+ shcode=0x76,
+ uccode=0x1B2,
+ unicodeslot=0x28B,
},
{
adobename="vturned",
@@ -7068,8 +7080,8 @@ characters.data={
description="LATIN SMALL LETTER TURNED V",
direction="l",
linebreak="al",
- uccode=0x0245,
- unicodeslot=0x028C,
+ uccode=0x245,
+ unicodeslot=0x28C,
},
{
adobename="wturned",
@@ -7077,7 +7089,7 @@ characters.data={
description="LATIN SMALL LETTER TURNED W",
direction="l",
linebreak="al",
- unicodeslot=0x028D,
+ unicodeslot=0x28D,
},
{
adobename="yturned",
@@ -7085,14 +7097,14 @@ characters.data={
description="LATIN SMALL LETTER TURNED Y",
direction="l",
linebreak="al",
- unicodeslot=0x028E,
+ unicodeslot=0x28E,
},
{
category="ll",
description="LATIN LETTER SMALL CAPITAL Y",
direction="l",
linebreak="al",
- unicodeslot=0x028F,
+ unicodeslot=0x28F,
},
{
adobename="zretroflexhook",
@@ -7100,8 +7112,8 @@ characters.data={
description="LATIN SMALL LETTER Z WITH RETROFLEX HOOK",
direction="l",
linebreak="al",
- shcode=0x007A,
- unicodeslot=0x0290,
+ shcode=0x7A,
+ unicodeslot=0x290,
},
{
adobename="zcurl",
@@ -7109,8 +7121,8 @@ characters.data={
description="LATIN SMALL LETTER Z WITH CURL",
direction="l",
linebreak="al",
- shcode=0x007A,
- unicodeslot=0x0291,
+ shcode=0x7A,
+ unicodeslot=0x291,
},
{
adobename="ezh",
@@ -7118,8 +7130,8 @@ characters.data={
description="LATIN SMALL LETTER EZH",
direction="l",
linebreak="al",
- uccode=0x01B7,
- unicodeslot=0x0292,
+ uccode=0x1B7,
+ unicodeslot=0x292,
},
{
adobename="ezhcurl",
@@ -7127,8 +7139,8 @@ characters.data={
description="LATIN SMALL LETTER EZH WITH CURL",
direction="l",
linebreak="al",
- shcode=0x0292,
- unicodeslot=0x0293,
+ shcode=0x292,
+ unicodeslot=0x293,
},
{
adobename="glottalstop",
@@ -7136,7 +7148,7 @@ characters.data={
description="LATIN LETTER GLOTTAL STOP",
direction="l",
linebreak="al",
- unicodeslot=0x0294,
+ unicodeslot=0x294,
},
{
adobename="glottalstopreversed",
@@ -7144,7 +7156,7 @@ characters.data={
description="LATIN LETTER PHARYNGEAL VOICED FRICATIVE",
direction="l",
linebreak="al",
- unicodeslot=0x0295,
+ unicodeslot=0x295,
},
{
adobename="glottalstopinverted",
@@ -7152,7 +7164,7 @@ characters.data={
description="LATIN LETTER INVERTED GLOTTAL STOP",
direction="l",
linebreak="al",
- unicodeslot=0x0296,
+ unicodeslot=0x296,
},
{
adobename="cstretched",
@@ -7160,7 +7172,7 @@ characters.data={
description="LATIN LETTER STRETCHED C",
direction="l",
linebreak="al",
- unicodeslot=0x0297,
+ unicodeslot=0x297,
},
{
adobename="bilabialclick",
@@ -7168,14 +7180,14 @@ characters.data={
description="LATIN LETTER BILABIAL CLICK",
direction="l",
linebreak="al",
- unicodeslot=0x0298,
+ unicodeslot=0x298,
},
{
category="ll",
description="LATIN LETTER SMALL CAPITAL B",
direction="l",
linebreak="al",
- unicodeslot=0x0299,
+ unicodeslot=0x299,
},
{
adobename="eopenclosed",
@@ -7183,7 +7195,7 @@ characters.data={
description="LATIN SMALL LETTER CLOSED OPEN E",
direction="l",
linebreak="al",
- unicodeslot=0x029A,
+ unicodeslot=0x29A,
},
{
adobename="Gsmallhook",
@@ -7191,14 +7203,14 @@ characters.data={
description="LATIN LETTER SMALL CAPITAL G WITH HOOK",
direction="l",
linebreak="al",
- unicodeslot=0x029B,
+ unicodeslot=0x29B,
},
{
category="ll",
description="LATIN LETTER SMALL CAPITAL H",
direction="l",
linebreak="al",
- unicodeslot=0x029C,
+ unicodeslot=0x29C,
},
{
adobename="jcrossedtail",
@@ -7206,8 +7218,8 @@ characters.data={
description="LATIN SMALL LETTER J WITH CROSSED-TAIL",
direction="l",
linebreak="al",
- shcode=0x006A,
- unicodeslot=0x029D,
+ shcode=0x6A,
+ unicodeslot=0x29D,
},
{
adobename="kturned",
@@ -7215,14 +7227,14 @@ characters.data={
description="LATIN SMALL LETTER TURNED K",
direction="l",
linebreak="al",
- unicodeslot=0x029E,
+ unicodeslot=0x29E,
},
{
category="ll",
description="LATIN LETTER SMALL CAPITAL L",
direction="l",
linebreak="al",
- unicodeslot=0x029F,
+ unicodeslot=0x29F,
},
{
adobename="qhook",
@@ -7230,8 +7242,8 @@ characters.data={
description="LATIN SMALL LETTER Q WITH HOOK",
direction="l",
linebreak="al",
- shcode=0x0071,
- unicodeslot=0x02A0,
+ shcode=0x71,
+ unicodeslot=0x2A0,
},
{
adobename="glottalstopstroke",
@@ -7239,7 +7251,7 @@ characters.data={
description="LATIN LETTER GLOTTAL STOP WITH STROKE",
direction="l",
linebreak="al",
- unicodeslot=0x02A1,
+ unicodeslot=0x2A1,
},
{
adobename="glottalstopstrokereversed",
@@ -7247,7 +7259,7 @@ characters.data={
description="LATIN LETTER REVERSED GLOTTAL STOP WITH STROKE",
direction="l",
linebreak="al",
- unicodeslot=0x02A2,
+ unicodeslot=0x2A2,
},
{
adobename="dzaltone",
@@ -7255,7 +7267,7 @@ characters.data={
description="LATIN SMALL LETTER DZ DIGRAPH",
direction="l",
linebreak="al",
- unicodeslot=0x02A3,
+ unicodeslot=0x2A3,
},
{
adobename="dezh",
@@ -7263,7 +7275,7 @@ characters.data={
description="LATIN SMALL LETTER DEZH DIGRAPH",
direction="l",
linebreak="al",
- unicodeslot=0x02A4,
+ unicodeslot=0x2A4,
},
{
adobename="dzcurl",
@@ -7271,7 +7283,7 @@ characters.data={
description="LATIN SMALL LETTER DZ DIGRAPH WITH CURL",
direction="l",
linebreak="al",
- unicodeslot=0x02A5,
+ unicodeslot=0x2A5,
},
{
adobename="ts",
@@ -7279,7 +7291,7 @@ characters.data={
description="LATIN SMALL LETTER TS DIGRAPH",
direction="l",
linebreak="al",
- unicodeslot=0x02A6,
+ unicodeslot=0x2A6,
},
{
adobename="tesh",
@@ -7287,7 +7299,7 @@ characters.data={
description="LATIN SMALL LETTER TESH DIGRAPH",
direction="l",
linebreak="al",
- unicodeslot=0x02A7,
+ unicodeslot=0x2A7,
},
{
adobename="tccurl",
@@ -7295,56 +7307,56 @@ characters.data={
description="LATIN SMALL LETTER TC DIGRAPH WITH CURL",
direction="l",
linebreak="al",
- unicodeslot=0x02A8,
+ unicodeslot=0x2A8,
},
{
category="ll",
description="LATIN SMALL LETTER FENG DIGRAPH",
direction="l",
linebreak="al",
- unicodeslot=0x02A9,
+ unicodeslot=0x2A9,
},
{
category="ll",
description="LATIN SMALL LETTER LS DIGRAPH",
direction="l",
linebreak="al",
- unicodeslot=0x02AA,
+ unicodeslot=0x2AA,
},
{
category="ll",
description="LATIN SMALL LETTER LZ DIGRAPH",
direction="l",
linebreak="al",
- unicodeslot=0x02AB,
+ unicodeslot=0x2AB,
},
{
category="ll",
description="LATIN LETTER BILABIAL PERCUSSIVE",
direction="l",
linebreak="al",
- unicodeslot=0x02AC,
+ unicodeslot=0x2AC,
},
{
category="ll",
description="LATIN LETTER BIDENTAL PERCUSSIVE",
direction="l",
linebreak="al",
- unicodeslot=0x02AD,
+ unicodeslot=0x2AD,
},
{
category="ll",
description="LATIN SMALL LETTER TURNED H WITH FISHHOOK",
direction="l",
linebreak="al",
- unicodeslot=0x02AE,
+ unicodeslot=0x2AE,
},
{
category="ll",
description="LATIN SMALL LETTER TURNED H WITH FISHHOOK AND TAIL",
direction="l",
linebreak="al",
- unicodeslot=0x02AF,
+ unicodeslot=0x2AF,
},
{
adobename="hsuperior",
@@ -7352,8 +7364,8 @@ characters.data={
description="MODIFIER LETTER SMALL H",
direction="l",
linebreak="al",
- specials={ "super", 0x0068 },
- unicodeslot=0x02B0,
+ specials={ "super", 0x68 },
+ unicodeslot=0x2B0,
},
{
adobename="hhooksuperior",
@@ -7361,8 +7373,8 @@ characters.data={
description="MODIFIER LETTER SMALL H WITH HOOK",
direction="l",
linebreak="al",
- specials={ "super", 0x0266 },
- unicodeslot=0x02B1,
+ specials={ "super", 0x266 },
+ unicodeslot=0x2B1,
},
{
adobename="jsuperior",
@@ -7370,16 +7382,16 @@ characters.data={
description="MODIFIER LETTER SMALL J",
direction="l",
linebreak="al",
- specials={ "super", 0x006A },
- unicodeslot=0x02B2,
+ specials={ "super", 0x6A },
+ unicodeslot=0x2B2,
},
{
category="lm",
description="MODIFIER LETTER SMALL R",
direction="l",
linebreak="al",
- specials={ "super", 0x0072 },
- unicodeslot=0x02B3,
+ specials={ "super", 0x72 },
+ unicodeslot=0x2B3,
},
{
adobename="rturnedsuperior",
@@ -7387,8 +7399,8 @@ characters.data={
description="MODIFIER LETTER SMALL TURNED R",
direction="l",
linebreak="al",
- specials={ "super", 0x0279 },
- unicodeslot=0x02B4,
+ specials={ "super", 0x279 },
+ unicodeslot=0x2B4,
},
{
adobename="rhookturnedsuperior",
@@ -7396,8 +7408,8 @@ characters.data={
description="MODIFIER LETTER SMALL TURNED R WITH HOOK",
direction="l",
linebreak="al",
- specials={ "super", 0x027B },
- unicodeslot=0x02B5,
+ specials={ "super", 0x27B },
+ unicodeslot=0x2B5,
},
{
adobename="Rsmallinvertedsuperior",
@@ -7405,8 +7417,8 @@ characters.data={
description="MODIFIER LETTER SMALL CAPITAL INVERTED R",
direction="l",
linebreak="al",
- specials={ "super", 0x0281 },
- unicodeslot=0x02B6,
+ specials={ "super", 0x281 },
+ unicodeslot=0x2B6,
},
{
adobename="wsuperior",
@@ -7414,8 +7426,8 @@ characters.data={
description="MODIFIER LETTER SMALL W",
direction="l",
linebreak="al",
- specials={ "super", 0x0077 },
- unicodeslot=0x02B7,
+ specials={ "super", 0x77 },
+ unicodeslot=0x2B7,
},
{
adobename="ysuperior",
@@ -7423,8 +7435,8 @@ characters.data={
description="MODIFIER LETTER SMALL Y",
direction="l",
linebreak="al",
- specials={ "super", 0x0079 },
- unicodeslot=0x02B8,
+ specials={ "super", 0x79 },
+ unicodeslot=0x2B8,
},
{
adobename="primemod",
@@ -7432,7 +7444,7 @@ characters.data={
description="MODIFIER LETTER PRIME",
direction="on",
linebreak="al",
- unicodeslot=0x02B9,
+ unicodeslot=0x2B9,
},
{
adobename="dblprimemod",
@@ -7440,7 +7452,7 @@ characters.data={
description="MODIFIER LETTER DOUBLE PRIME",
direction="on",
linebreak="al",
- unicodeslot=0x02BA,
+ unicodeslot=0x2BA,
},
{
adobename="commaturnedmod",
@@ -7448,7 +7460,7 @@ characters.data={
description="MODIFIER LETTER TURNED COMMA",
direction="l",
linebreak="al",
- unicodeslot=0x02BB,
+ unicodeslot=0x2BB,
},
{
adobename="apostrophemod",
@@ -7456,7 +7468,7 @@ characters.data={
description="MODIFIER LETTER APOSTROPHE",
direction="l",
linebreak="al",
- unicodeslot=0x02BC,
+ unicodeslot=0x2BC,
},
{
adobename="commareversedmod",
@@ -7464,7 +7476,7 @@ characters.data={
description="MODIFIER LETTER REVERSED COMMA",
direction="l",
linebreak="al",
- unicodeslot=0x02BD,
+ unicodeslot=0x2BD,
},
{
adobename="ringhalfright",
@@ -7472,7 +7484,7 @@ characters.data={
description="MODIFIER LETTER RIGHT HALF RING",
direction="l",
linebreak="al",
- unicodeslot=0x02BE,
+ unicodeslot=0x2BE,
},
{
adobename="ringhalfleft",
@@ -7480,7 +7492,7 @@ characters.data={
description="MODIFIER LETTER LEFT HALF RING",
direction="l",
linebreak="al",
- unicodeslot=0x02BF,
+ unicodeslot=0x2BF,
},
{
adobename="glottalstopmod",
@@ -7488,7 +7500,7 @@ characters.data={
description="MODIFIER LETTER GLOTTAL STOP",
direction="l",
linebreak="al",
- unicodeslot=0x02C0,
+ unicodeslot=0x2C0,
},
{
adobename="glottalstopreversedmod",
@@ -7496,7 +7508,7 @@ characters.data={
description="MODIFIER LETTER REVERSED GLOTTAL STOP",
direction="l",
linebreak="al",
- unicodeslot=0x02C1,
+ unicodeslot=0x2C1,
},
{
adobename="arrowheadleftmod",
@@ -7504,7 +7516,7 @@ characters.data={
description="MODIFIER LETTER LEFT ARROWHEAD",
direction="on",
linebreak="al",
- unicodeslot=0x02C2,
+ unicodeslot=0x2C2,
},
{
adobename="arrowheadrightmod",
@@ -7512,7 +7524,7 @@ characters.data={
description="MODIFIER LETTER RIGHT ARROWHEAD",
direction="on",
linebreak="al",
- unicodeslot=0x02C3,
+ unicodeslot=0x2C3,
},
{
adobename="arrowheadupmod",
@@ -7521,7 +7533,7 @@ characters.data={
description="MODIFIER LETTER UP ARROWHEAD",
direction="on",
linebreak="al",
- unicodeslot=0x02C4,
+ unicodeslot=0x2C4,
},
{
adobename="arrowheaddownmod",
@@ -7529,7 +7541,7 @@ characters.data={
description="MODIFIER LETTER DOWN ARROWHEAD",
direction="on",
linebreak="al",
- unicodeslot=0x02C5,
+ unicodeslot=0x2C5,
},
{
adobename="circumflex",
@@ -7541,8 +7553,8 @@ characters.data={
linebreak="al",
mathclass="topaccent",
mathname="hat",
- specials={ "compat", 0x0020, 0x0302 },
- unicodeslot=0x02C6,
+ specials={ "compat", 0x20, 0x302 },
+ unicodeslot=0x2C6,
},
{
adobename="caron",
@@ -7555,8 +7567,8 @@ characters.data={
linebreak="ai",
mathclass="topaccent",
mathname="check",
- specials={ "compat", 0x0020, 0x030C },
- unicodeslot=0x02C7,
+ specials={ "compat", 0x20, 0x30C },
+ unicodeslot=0x2C7,
},
{
adobename="verticallinemod",
@@ -7564,7 +7576,7 @@ characters.data={
description="MODIFIER LETTER VERTICAL LINE",
direction="on",
linebreak="bb",
- unicodeslot=0x02C8,
+ unicodeslot=0x2C8,
},
{
adobename="firsttonechinese",
@@ -7573,7 +7585,7 @@ characters.data={
description="MODIFIER LETTER MACRON",
direction="on",
linebreak="ai",
- unicodeslot=0x02C9,
+ unicodeslot=0x2C9,
},
{
adobename="secondtonechinese",
@@ -7582,7 +7594,7 @@ characters.data={
description="MODIFIER LETTER ACUTE ACCENT",
direction="on",
linebreak="ai",
- unicodeslot=0x02CA,
+ unicodeslot=0x2CA,
},
{
adobename="fourthtonechinese",
@@ -7591,7 +7603,7 @@ characters.data={
description="MODIFIER LETTER GRAVE ACCENT",
direction="on",
linebreak="ai",
- unicodeslot=0x02CB,
+ unicodeslot=0x2CB,
},
{
adobename="verticallinelowmod",
@@ -7599,7 +7611,7 @@ characters.data={
description="MODIFIER LETTER LOW VERTICAL LINE",
direction="on",
linebreak="bb",
- unicodeslot=0x02CC,
+ unicodeslot=0x2CC,
},
{
adobename="macronlowmod",
@@ -7608,7 +7620,7 @@ characters.data={
description="MODIFIER LETTER LOW MACRON",
direction="on",
linebreak="ai",
- unicodeslot=0x02CD,
+ unicodeslot=0x2CD,
},
{
adobename="gravelowmod",
@@ -7616,7 +7628,7 @@ characters.data={
description="MODIFIER LETTER LOW GRAVE ACCENT",
direction="on",
linebreak="al",
- unicodeslot=0x02CE,
+ unicodeslot=0x2CE,
},
{
adobename="acutelowmod",
@@ -7624,7 +7636,7 @@ characters.data={
description="MODIFIER LETTER LOW ACUTE ACCENT",
direction="on",
linebreak="al",
- unicodeslot=0x02CF,
+ unicodeslot=0x2CF,
},
{
adobename="colontriangularmod",
@@ -7633,7 +7645,7 @@ characters.data={
description="MODIFIER LETTER TRIANGULAR COLON",
direction="l",
linebreak="ai",
- unicodeslot=0x02D0,
+ unicodeslot=0x2D0,
},
{
adobename="colontriangularhalfmod",
@@ -7641,7 +7653,7 @@ characters.data={
description="MODIFIER LETTER HALF TRIANGULAR COLON",
direction="l",
linebreak="al",
- unicodeslot=0x02D1,
+ unicodeslot=0x2D1,
},
{
adobename="ringhalfrightcentered",
@@ -7649,7 +7661,7 @@ characters.data={
description="MODIFIER LETTER CENTRED RIGHT HALF RING",
direction="on",
linebreak="al",
- unicodeslot=0x02D2,
+ unicodeslot=0x2D2,
},
{
adobename="ringhalfleftcentered",
@@ -7657,7 +7669,7 @@ characters.data={
description="MODIFIER LETTER CENTRED LEFT HALF RING",
direction="on",
linebreak="al",
- unicodeslot=0x02D3,
+ unicodeslot=0x2D3,
},
{
adobename="uptackmod",
@@ -7665,7 +7677,7 @@ characters.data={
description="MODIFIER LETTER UP TACK",
direction="on",
linebreak="al",
- unicodeslot=0x02D4,
+ unicodeslot=0x2D4,
},
{
adobename="downtackmod",
@@ -7673,7 +7685,7 @@ characters.data={
description="MODIFIER LETTER DOWN TACK",
direction="on",
linebreak="al",
- unicodeslot=0x02D5,
+ unicodeslot=0x2D5,
},
{
adobename="plusmod",
@@ -7681,7 +7693,7 @@ characters.data={
description="MODIFIER LETTER PLUS SIGN",
direction="on",
linebreak="al",
- unicodeslot=0x02D6,
+ unicodeslot=0x2D6,
},
{
adobename="minusmod",
@@ -7689,7 +7701,7 @@ characters.data={
description="MODIFIER LETTER MINUS SIGN",
direction="on",
linebreak="al",
- unicodeslot=0x02D7,
+ unicodeslot=0x2D7,
},
{
adobename="breve",
@@ -7701,8 +7713,8 @@ characters.data={
linebreak="ai",
mathclass="topaccent",
mathname="breve",
- specials={ "compat", 0x0020, 0x0306 },
- unicodeslot=0x02D8,
+ specials={ "compat", 0x20, 0x306 },
+ unicodeslot=0x2D8,
},
{
adobename="dotaccent",
@@ -7714,8 +7726,8 @@ characters.data={
linebreak="ai",
mathclass="topaccent",
mathname="dot",
- specials={ "compat", 0x0020, 0x0307 },
- unicodeslot=0x02D9,
+ specials={ "compat", 0x20, 0x307 },
+ unicodeslot=0x2D9,
},
{
adobename="ring",
@@ -7727,8 +7739,8 @@ characters.data={
linebreak="ai",
mathclass="topaccent",
mathname="mathring",
- specials={ "compat", 0x0020, 0x030A },
- unicodeslot=0x02DA,
+ specials={ "compat", 0x20, 0x30A },
+ unicodeslot=0x2DA,
},
{
adobename="ogonek",
@@ -7738,8 +7750,8 @@ characters.data={
description="OGONEK",
direction="on",
linebreak="ai",
- specials={ "compat", 0x0020, 0x0328 },
- unicodeslot=0x02DB,
+ specials={ "compat", 0x20, 0x328 },
+ unicodeslot=0x2DB,
},
{
adobename="tilde",
@@ -7750,8 +7762,8 @@ characters.data={
linebreak="al",
mathclass="topaccent",
mathname="tilde",
- specials={ "compat", 0x0020, 0x0303 },
- unicodeslot=0x02DC,
+ specials={ "compat", 0x20, 0x303 },
+ unicodeslot=0x2DC,
},
{
adobename="hungarumlaut",
@@ -7761,8 +7773,8 @@ characters.data={
description="DOUBLE ACUTE ACCENT",
direction="on",
linebreak="ai",
- specials={ "compat", 0x0020, 0x030B },
- unicodeslot=0x02DD,
+ specials={ "compat", 0x20, 0x30B },
+ unicodeslot=0x2DD,
},
{
adobename="rhotichookmod",
@@ -7770,7 +7782,7 @@ characters.data={
description="MODIFIER LETTER RHOTIC HOOK",
direction="on",
linebreak="al",
- unicodeslot=0x02DE,
+ unicodeslot=0x2DE,
},
{
category="sk",
@@ -7778,7 +7790,7 @@ characters.data={
description="MODIFIER LETTER CROSS ACCENT",
direction="on",
linebreak="bb",
- unicodeslot=0x02DF,
+ unicodeslot=0x2DF,
},
{
adobename="gammasuperior",
@@ -7786,24 +7798,24 @@ characters.data={
description="MODIFIER LETTER SMALL GAMMA",
direction="l",
linebreak="al",
- specials={ "super", 0x0263 },
- unicodeslot=0x02E0,
+ specials={ "super", 0x263 },
+ unicodeslot=0x2E0,
},
{
category="lm",
description="MODIFIER LETTER SMALL L",
direction="l",
linebreak="al",
- specials={ "super", 0x006C },
- unicodeslot=0x02E1,
+ specials={ "super", 0x6C },
+ unicodeslot=0x2E1,
},
{
category="lm",
description="MODIFIER LETTER SMALL S",
direction="l",
linebreak="al",
- specials={ "super", 0x0073 },
- unicodeslot=0x02E2,
+ specials={ "super", 0x73 },
+ unicodeslot=0x2E2,
},
{
adobename="xsuperior",
@@ -7811,8 +7823,8 @@ characters.data={
description="MODIFIER LETTER SMALL X",
direction="l",
linebreak="al",
- specials={ "super", 0x0078 },
- unicodeslot=0x02E3,
+ specials={ "super", 0x78 },
+ unicodeslot=0x2E3,
},
{
adobename="glottalstopreversedsuperior",
@@ -7820,8 +7832,8 @@ characters.data={
description="MODIFIER LETTER SMALL REVERSED GLOTTAL STOP",
direction="l",
linebreak="al",
- specials={ "super", 0x0295 },
- unicodeslot=0x02E4,
+ specials={ "super", 0x295 },
+ unicodeslot=0x2E4,
},
{
adobename="tonebarextrahighmod",
@@ -7829,7 +7841,7 @@ characters.data={
description="MODIFIER LETTER EXTRA-HIGH TONE BAR",
direction="on",
linebreak="al",
- unicodeslot=0x02E5,
+ unicodeslot=0x2E5,
},
{
adobename="tonebarhighmod",
@@ -7837,7 +7849,7 @@ characters.data={
description="MODIFIER LETTER HIGH TONE BAR",
direction="on",
linebreak="al",
- unicodeslot=0x02E6,
+ unicodeslot=0x2E6,
},
{
adobename="tonebarmidmod",
@@ -7845,7 +7857,7 @@ characters.data={
description="MODIFIER LETTER MID TONE BAR",
direction="on",
linebreak="al",
- unicodeslot=0x02E7,
+ unicodeslot=0x2E7,
},
{
adobename="tonebarlowmod",
@@ -7853,7 +7865,7 @@ characters.data={
description="MODIFIER LETTER LOW TONE BAR",
direction="on",
linebreak="al",
- unicodeslot=0x02E8,
+ unicodeslot=0x2E8,
},
{
adobename="tonebarextralowmod",
@@ -7861,877 +7873,956 @@ characters.data={
description="MODIFIER LETTER EXTRA-LOW TONE BAR",
direction="on",
linebreak="al",
- unicodeslot=0x02E9,
+ unicodeslot=0x2E9,
},
{
category="sk",
description="MODIFIER LETTER YIN DEPARTING TONE MARK",
direction="on",
linebreak="al",
- unicodeslot=0x02EA,
+ unicodeslot=0x2EA,
},
{
category="sk",
description="MODIFIER LETTER YANG DEPARTING TONE MARK",
direction="on",
linebreak="al",
- unicodeslot=0x02EB,
+ unicodeslot=0x2EB,
},
{
category="sk",
description="MODIFIER LETTER VOICING",
direction="on",
linebreak="al",
- unicodeslot=0x02EC,
+ unicodeslot=0x2EC,
},
{
category="sk",
description="MODIFIER LETTER UNASPIRATED",
direction="on",
linebreak="al",
- unicodeslot=0x02ED,
+ unicodeslot=0x2ED,
},
{
category="lm",
description="MODIFIER LETTER DOUBLE APOSTROPHE",
direction="l",
linebreak="al",
- unicodeslot=0x02EE,
+ unicodeslot=0x2EE,
},
{
category="sk",
description="MODIFIER LETTER LOW DOWN ARROWHEAD",
direction="on",
linebreak="al",
- unicodeslot=0x02EF,
+ unicodeslot=0x2EF,
},
{
category="sk",
description="MODIFIER LETTER LOW UP ARROWHEAD",
direction="on",
linebreak="al",
- unicodeslot=0x02F0,
+ unicodeslot=0x2F0,
},
{
category="sk",
description="MODIFIER LETTER LOW LEFT ARROWHEAD",
direction="on",
linebreak="al",
- unicodeslot=0x02F1,
+ unicodeslot=0x2F1,
},
{
category="sk",
description="MODIFIER LETTER LOW RIGHT ARROWHEAD",
direction="on",
linebreak="al",
- unicodeslot=0x02F2,
+ unicodeslot=0x2F2,
},
{
category="sk",
description="MODIFIER LETTER LOW RING",
direction="on",
linebreak="al",
- unicodeslot=0x02F3,
+ unicodeslot=0x2F3,
},
{
category="sk",
description="MODIFIER LETTER MIDDLE GRAVE ACCENT",
direction="on",
linebreak="al",
- unicodeslot=0x02F4,
+ unicodeslot=0x2F4,
},
{
category="sk",
description="MODIFIER LETTER MIDDLE DOUBLE GRAVE ACCENT",
direction="on",
linebreak="al",
- unicodeslot=0x02F5,
+ unicodeslot=0x2F5,
},
{
category="sk",
description="MODIFIER LETTER MIDDLE DOUBLE ACUTE ACCENT",
direction="on",
linebreak="al",
- unicodeslot=0x02F6,
+ unicodeslot=0x2F6,
},
{
category="sk",
description="MODIFIER LETTER LOW TILDE",
direction="on",
linebreak="al",
- unicodeslot=0x02F7,
+ unicodeslot=0x2F7,
},
{
category="sk",
description="MODIFIER LETTER RAISED COLON",
direction="on",
linebreak="al",
- unicodeslot=0x02F8,
+ unicodeslot=0x2F8,
},
{
category="sk",
description="MODIFIER LETTER BEGIN HIGH TONE",
direction="on",
linebreak="al",
- unicodeslot=0x02F9,
+ unicodeslot=0x2F9,
},
{
category="sk",
description="MODIFIER LETTER END HIGH TONE",
direction="on",
linebreak="al",
- unicodeslot=0x02FA,
+ unicodeslot=0x2FA,
},
{
category="sk",
description="MODIFIER LETTER BEGIN LOW TONE",
direction="on",
linebreak="al",
- unicodeslot=0x02FB,
+ unicodeslot=0x2FB,
},
{
category="sk",
description="MODIFIER LETTER END LOW TONE",
direction="on",
linebreak="al",
- unicodeslot=0x02FC,
+ unicodeslot=0x2FC,
},
{
category="sk",
description="MODIFIER LETTER SHELF",
direction="on",
linebreak="al",
- unicodeslot=0x02FD,
+ unicodeslot=0x2FD,
},
{
category="sk",
description="MODIFIER LETTER OPEN SHELF",
direction="on",
linebreak="al",
- unicodeslot=0x02FE,
+ unicodeslot=0x2FE,
},
{
category="sk",
description="MODIFIER LETTER LOW LEFT ARROW",
direction="on",
linebreak="al",
- unicodeslot=0x02FF,
+ unicodeslot=0x2FF,
},
{
adobename="gravecomb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING GRAVE ACCENT",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0300,
+ unicodeslot=0x300,
},
{
adobename="acutecomb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING ACUTE ACCENT",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0301,
+ unicodeslot=0x301,
},
{
adobename="circumflexcmb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING CIRCUMFLEX ACCENT",
direction="nsm",
linebreak="cm",
mathclass="topaccent",
mathname="widehat",
mathstretch="h",
- unicodeslot=0x0302,
+ unicodeslot=0x302,
},
{
adobename="tildecomb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING TILDE",
direction="nsm",
linebreak="cm",
mathclass="topaccent",
mathname="widetilde",
mathstretch="h",
- unicodeslot=0x0303,
+ unicodeslot=0x303,
},
{
adobename="macroncmb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING MACRON",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0304,
+ unicodeslot=0x304,
},
{
adobename="overlinecmb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING OVERLINE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0305,
+ unicodeslot=0x305,
},
{
adobename="brevecmb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING BREVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0306,
+ unicodeslot=0x306,
},
{
adobename="dotaccentcmb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING DOT ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0307,
+ unicodeslot=0x307,
},
{
adobename="dieresiscmb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING DIAERESIS",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0308,
+ unicodeslot=0x308,
},
{
adobename="hookcmb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING HOOK ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0309,
+ unicodeslot=0x309,
},
{
adobename="ringcmb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING RING ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x030A,
+ unicodeslot=0x30A,
},
{
adobename="hungarumlautcmb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING DOUBLE ACUTE ACCENT",
direction="nsm",
linebreak="cm",
- unicodeslot=0x030B,
+ unicodeslot=0x30B,
},
{
adobename="caroncmb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING CARON",
direction="nsm",
linebreak="cm",
- unicodeslot=0x030C,
+ unicodeslot=0x30C,
},
{
adobename="verticallineabovecmb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING VERTICAL LINE ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x030D,
+ unicodeslot=0x30D,
},
{
adobename="dblverticallineabovecmb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING DOUBLE VERTICAL LINE ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x030E,
+ unicodeslot=0x30E,
},
{
adobename="dblgravecmb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING DOUBLE GRAVE ACCENT",
direction="nsm",
linebreak="cm",
- unicodeslot=0x030F,
+ unicodeslot=0x30F,
},
{
adobename="candrabinducmb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING CANDRABINDU",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0310,
+ unicodeslot=0x310,
},
{
adobename="breveinvertedcmb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING INVERTED BREVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0311,
+ unicodeslot=0x311,
},
{
adobename="commaturnedabovecmb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING TURNED COMMA ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0312,
+ unicodeslot=0x312,
},
{
adobename="commaabovecmb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING COMMA ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0313,
+ unicodeslot=0x313,
},
{
adobename="commareversedabovecmb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING REVERSED COMMA ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0314,
+ unicodeslot=0x314,
},
{
adobename="commaaboverightcmb",
category="mn",
cjkwd="a",
+ combining=0xE8,
description="COMBINING COMMA ABOVE RIGHT",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0315,
+ unicodeslot=0x315,
},
{
adobename="gravebelowcmb",
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING GRAVE ACCENT BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0316,
+ unicodeslot=0x316,
},
{
adobename="acutebelowcmb",
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING ACUTE ACCENT BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0317,
+ unicodeslot=0x317,
},
{
adobename="lefttackbelowcmb",
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING LEFT TACK BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0318,
+ unicodeslot=0x318,
},
{
adobename="righttackbelowcmb",
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING RIGHT TACK BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0319,
+ unicodeslot=0x319,
},
{
adobename="leftangleabovecmb",
category="mn",
cjkwd="a",
+ combining=0xE8,
description="COMBINING LEFT ANGLE ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x031A,
+ unicodeslot=0x31A,
},
{
adobename="horncmb",
category="mn",
cjkwd="a",
+ combining=0xD8,
description="COMBINING HORN",
direction="nsm",
linebreak="cm",
- unicodeslot=0x031B,
+ unicodeslot=0x31B,
},
{
adobename="ringhalfleftbelowcmb",
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING LEFT HALF RING BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x031C,
+ unicodeslot=0x31C,
},
{
adobename="uptackbelowcmb",
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING UP TACK BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x031D,
+ unicodeslot=0x31D,
},
{
adobename="downtackbelowcmb",
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING DOWN TACK BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x031E,
+ unicodeslot=0x31E,
},
{
adobename="plusbelowcmb",
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING PLUS SIGN BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x031F,
+ unicodeslot=0x31F,
},
{
adobename="minusbelowcmb",
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING MINUS SIGN BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0320,
+ unicodeslot=0x320,
},
{
adobename="hookpalatalizedbelowcmb",
category="mn",
cjkwd="a",
+ combining=0xCA,
description="COMBINING PALATALIZED HOOK BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0321,
+ unicodeslot=0x321,
},
{
adobename="hookretroflexbelowcmb",
category="mn",
cjkwd="a",
+ combining=0xCA,
description="COMBINING RETROFLEX HOOK BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0322,
+ unicodeslot=0x322,
},
{
adobename="dotbelowcomb",
category="mn",
cjkwd="a",
+ combining=0xDC,
contextname="textbottomdot",
description="COMBINING DOT BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0323,
+ unicodeslot=0x323,
},
{
adobename="dieresisbelowcmb",
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING DIAERESIS BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0324,
+ unicodeslot=0x324,
},
{
adobename="ringbelowcmb",
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING RING BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0325,
+ unicodeslot=0x325,
},
{
category="mn",
cjkwd="a",
+ combining=0xDC,
contextname="textbottomcomma",
description="COMBINING COMMA BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0326,
+ unicodeslot=0x326,
},
{
adobename="cedillacmb",
category="mn",
cjkwd="a",
+ combining=0xCA,
description="COMBINING CEDILLA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0327,
+ unicodeslot=0x327,
},
{
adobename="ogonekcmb",
category="mn",
cjkwd="a",
+ combining=0xCA,
description="COMBINING OGONEK",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0328,
+ unicodeslot=0x328,
},
{
adobename="verticallinebelowcmb",
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING VERTICAL LINE BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0329,
+ unicodeslot=0x329,
},
{
adobename="bridgebelowcmb",
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING BRIDGE BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x032A,
+ unicodeslot=0x32A,
},
{
adobename="dblarchinvertedbelowcmb",
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING INVERTED DOUBLE ARCH BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x032B,
+ unicodeslot=0x32B,
},
{
adobename="caronbelowcmb",
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING CARON BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x032C,
+ unicodeslot=0x32C,
},
{
adobename="circumflexbelowcmb",
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING CIRCUMFLEX ACCENT BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x032D,
+ unicodeslot=0x32D,
},
{
adobename="brevebelowcmb",
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING BREVE BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x032E,
+ unicodeslot=0x32E,
},
{
adobename="breveinvertedbelowcmb",
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING INVERTED BREVE BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x032F,
+ unicodeslot=0x32F,
},
{
adobename="tildebelowcmb",
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING TILDE BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0330,
+ unicodeslot=0x330,
},
{
adobename="macronbelowcmb",
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING MACRON BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0331,
+ unicodeslot=0x331,
},
{
adobename="lowlinecmb",
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING LOW LINE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0332,
+ unicodeslot=0x332,
},
{
adobename="dbllowlinecmb",
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING DOUBLE LOW LINE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0333,
+ unicodeslot=0x333,
},
{
adobename="tildeoverlaycmb",
category="mn",
cjkwd="a",
+ combining=0x1,
description="COMBINING TILDE OVERLAY",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0334,
+ unicodeslot=0x334,
},
{
adobename="strokeshortoverlaycmb",
category="mn",
cjkwd="a",
+ combining=0x1,
description="COMBINING SHORT STROKE OVERLAY",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0335,
+ unicodeslot=0x335,
},
{
adobename="strokelongoverlaycmb",
category="mn",
cjkwd="a",
+ combining=0x1,
description="COMBINING LONG STROKE OVERLAY",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0336,
+ unicodeslot=0x336,
},
{
adobename="solidusshortoverlaycmb",
category="mn",
cjkwd="a",
+ combining=0x1,
description="COMBINING SHORT SOLIDUS OVERLAY",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0337,
+ unicodeslot=0x337,
},
{
adobename="soliduslongoverlaycmb",
category="mn",
cjkwd="a",
+ combining=0x1,
description="COMBINING LONG SOLIDUS OVERLAY",
direction="nsm",
linebreak="cm",
mathclass="relation",
mathname="not",
- unicodeslot=0x0338,
+ unicodeslot=0x338,
},
{
adobename="ringhalfrightbelowcmb",
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING RIGHT HALF RING BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0339,
+ unicodeslot=0x339,
},
{
adobename="bridgeinvertedbelowcmb",
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING INVERTED BRIDGE BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x033A,
+ unicodeslot=0x33A,
},
{
adobename="squarebelowcmb",
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING SQUARE BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x033B,
+ unicodeslot=0x33B,
},
{
adobename="seagullbelowcmb",
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING SEAGULL BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x033C,
+ unicodeslot=0x33C,
},
{
adobename="xabovecmb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING X ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x033D,
+ unicodeslot=0x33D,
},
{
adobename="tildeverticalcmb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING VERTICAL TILDE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x033E,
+ unicodeslot=0x33E,
},
{
adobename="dbloverlinecmb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING DOUBLE OVERLINE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x033F,
+ unicodeslot=0x33F,
},
{
adobename="gravetonecmb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING GRAVE TONE MARK",
direction="nsm",
linebreak="cm",
- specials={ "char", 0x0300 },
- unicodeslot=0x0340,
+ specials={ "char", 0x300 },
+ unicodeslot=0x340,
},
{
adobename="acutetonecmb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING ACUTE TONE MARK",
direction="nsm",
linebreak="cm",
- specials={ "char", 0x0301 },
- unicodeslot=0x0341,
+ specials={ "char", 0x301 },
+ unicodeslot=0x341,
},
{
adobename="perispomenigreekcmb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING GREEK PERISPOMENI",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0342,
+ unicodeslot=0x342,
},
{
adobename="koroniscmb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING GREEK KORONIS",
direction="nsm",
linebreak="cm",
- specials={ "char", 0x0313 },
- unicodeslot=0x0343,
+ specials={ "char", 0x313 },
+ unicodeslot=0x343,
},
{
adobename="dialytikatonoscmb",
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING GREEK DIALYTIKA TONOS",
direction="nsm",
linebreak="cm",
- specials={ "char", 0x0308, 0x0301 },
- unicodeslot=0x0344,
+ specials={ "char", 0x308, 0x301 },
+ unicodeslot=0x344,
},
{
adobename="ypogegrammenigreekcmb",
category="mn",
cjkwd="a",
+ combining=0xF0,
description="COMBINING GREEK YPOGEGRAMMENI",
direction="nsm",
linebreak="cm",
- uccode=0x0399,
- unicodeslot=0x0345,
+ uccode=0x399,
+ unicodeslot=0x345,
},
{
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING BRIDGE ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0346,
+ unicodeslot=0x346,
},
{
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING EQUALS SIGN BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0347,
+ unicodeslot=0x347,
},
{
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING DOUBLE VERTICAL LINE BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0348,
+ unicodeslot=0x348,
},
{
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING LEFT ANGLE BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0349,
+ unicodeslot=0x349,
},
{
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING NOT TILDE ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x034A,
+ unicodeslot=0x34A,
},
{
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING HOMOTHETIC ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x034B,
+ unicodeslot=0x34B,
},
{
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING ALMOST EQUAL TO ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x034C,
+ unicodeslot=0x34C,
},
{
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING LEFT RIGHT ARROW BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x034D,
+ unicodeslot=0x34D,
},
{
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING UPWARDS ARROW BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x034E,
+ unicodeslot=0x34E,
},
{
category="mn",
@@ -8739,293 +8830,325 @@ characters.data={
description="COMBINING GRAPHEME JOINER",
direction="nsm",
linebreak="gl",
- unicodeslot=0x034F,
+ unicodeslot=0x34F,
},
{
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING RIGHT ARROWHEAD ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0350,
+ unicodeslot=0x350,
},
{
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING LEFT HALF RING ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0351,
+ unicodeslot=0x351,
},
{
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING FERMATA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0352,
+ unicodeslot=0x352,
},
{
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING X BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0353,
+ unicodeslot=0x353,
},
{
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING LEFT ARROWHEAD BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0354,
+ unicodeslot=0x354,
},
{
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING RIGHT ARROWHEAD BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0355,
+ unicodeslot=0x355,
},
{
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING RIGHT ARROWHEAD AND UP ARROWHEAD BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0356,
+ unicodeslot=0x356,
},
{
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING RIGHT HALF RING ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0357,
+ unicodeslot=0x357,
},
{
category="mn",
cjkwd="a",
+ combining=0xE8,
description="COMBINING DOT ABOVE RIGHT",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0358,
+ unicodeslot=0x358,
},
{
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING ASTERISK BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0359,
+ unicodeslot=0x359,
},
{
category="mn",
cjkwd="a",
+ combining=0xDC,
description="COMBINING DOUBLE RING BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x035A,
+ unicodeslot=0x35A,
},
{
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING ZIGZAG ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x035B,
+ unicodeslot=0x35B,
},
{
category="mn",
cjkwd="a",
+ combining=0xE9,
description="COMBINING DOUBLE BREVE BELOW",
direction="nsm",
linebreak="gl",
- unicodeslot=0x035C,
+ unicodeslot=0x35C,
},
{
category="mn",
cjkwd="a",
+ combining=0xEA,
description="COMBINING DOUBLE BREVE",
direction="nsm",
linebreak="gl",
- unicodeslot=0x035D,
+ unicodeslot=0x35D,
},
{
category="mn",
cjkwd="a",
+ combining=0xEA,
description="COMBINING DOUBLE MACRON",
direction="nsm",
linebreak="gl",
- unicodeslot=0x035E,
+ unicodeslot=0x35E,
},
{
category="mn",
cjkwd="a",
+ combining=0xE9,
description="COMBINING DOUBLE MACRON BELOW",
direction="nsm",
linebreak="gl",
- unicodeslot=0x035F,
+ unicodeslot=0x35F,
},
{
adobename="tildedoublecmb",
category="mn",
cjkwd="a",
+ combining=0xEA,
description="COMBINING DOUBLE TILDE",
direction="nsm",
linebreak="gl",
- unicodeslot=0x0360,
+ unicodeslot=0x360,
},
{
adobename="breveinverteddoublecmb",
category="mn",
cjkwd="a",
+ combining=0xEA,
description="COMBINING DOUBLE INVERTED BREVE",
direction="nsm",
linebreak="gl",
- unicodeslot=0x0361,
+ unicodeslot=0x361,
},
{
category="mn",
cjkwd="a",
+ combining=0xE9,
description="COMBINING DOUBLE RIGHTWARDS ARROW BELOW",
direction="nsm",
linebreak="gl",
- unicodeslot=0x0362,
+ unicodeslot=0x362,
},
{
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER A",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0363,
+ unicodeslot=0x363,
},
{
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER E",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0364,
+ unicodeslot=0x364,
},
{
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER I",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0365,
+ unicodeslot=0x365,
},
{
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER O",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0366,
+ unicodeslot=0x366,
},
{
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER U",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0367,
+ unicodeslot=0x367,
},
{
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER C",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0368,
+ unicodeslot=0x368,
},
{
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER D",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0369,
+ unicodeslot=0x369,
},
{
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER H",
direction="nsm",
linebreak="cm",
- unicodeslot=0x036A,
+ unicodeslot=0x36A,
},
{
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER M",
direction="nsm",
linebreak="cm",
- unicodeslot=0x036B,
+ unicodeslot=0x36B,
},
{
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER R",
direction="nsm",
linebreak="cm",
- unicodeslot=0x036C,
+ unicodeslot=0x36C,
},
{
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER T",
direction="nsm",
linebreak="cm",
- unicodeslot=0x036D,
+ unicodeslot=0x36D,
},
{
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER V",
direction="nsm",
linebreak="cm",
- unicodeslot=0x036E,
+ unicodeslot=0x36E,
},
{
category="mn",
cjkwd="a",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER X",
direction="nsm",
linebreak="cm",
- unicodeslot=0x036F,
+ unicodeslot=0x36F,
},
{
category="lu",
description="GREEK CAPITAL LETTER HETA",
direction="l",
linebreak="al",
- unicodeslot=0x0370,
+ unicodeslot=0x370,
},
{
category="ll",
description="GREEK SMALL LETTER HETA",
direction="l",
linebreak="al",
- unicodeslot=0x0371,
+ unicodeslot=0x371,
},
{
category="lu",
description="GREEK CAPITAL LETTER ARCHAIC SAMPI",
direction="l",
linebreak="al",
- unicodeslot=0x0372,
+ unicodeslot=0x372,
},
{
category="ll",
description="GREEK SMALL LETTER ARCHAIC SAMPI",
direction="l",
linebreak="al",
- unicodeslot=0x0373,
+ unicodeslot=0x373,
},
{
adobename="numeralsigngreek",
@@ -9033,8 +9156,8 @@ characters.data={
description="GREEK NUMERAL SIGN",
direction="on",
linebreak="al",
- specials={ "char", 0x02B9 },
- unicodeslot=0x0374,
+ specials={ "char", 0x2B9 },
+ unicodeslot=0x374,
},
{
adobename="numeralsignlowergreek",
@@ -9042,584 +9165,591 @@ characters.data={
description="GREEK LOWER NUMERAL SIGN",
direction="on",
linebreak="al",
- unicodeslot=0x0375,
+ unicodeslot=0x375,
},
{
category="lu",
description="GREEK CAPITAL LETTER PAMPHYLIAN DIGAMMA",
direction="l",
linebreak="al",
- unicodeslot=0x0376,
+ unicodeslot=0x376,
},
{
category="ll",
description="GREEK SMALL LETTER PAMPHYLIAN DIGAMMA",
direction="l",
linebreak="al",
- unicodeslot=0x0377,
+ unicodeslot=0x377,
},
- [0x037A]={
+ [0x37A]={
adobename="ypogegrammeni",
category="lm",
description="GREEK YPOGEGRAMMENI",
direction="l",
linebreak="al",
- specials={ "compat", 0x0020, 0x0345 },
- unicodeslot=0x037A,
+ specials={ "compat", 0x20, 0x345 },
+ unicodeslot=0x37A,
},
- [0x037B]={
+ [0x37B]={
category="ll",
description="GREEK SMALL REVERSED LUNATE SIGMA SYMBOL",
direction="l",
linebreak="al",
- uccode=0x03FD,
- unicodeslot=0x037B,
+ uccode=0x3FD,
+ unicodeslot=0x37B,
},
- [0x037C]={
+ [0x37C]={
category="ll",
description="GREEK SMALL DOTTED LUNATE SIGMA SYMBOL",
direction="l",
linebreak="al",
- uccode=0x03FE,
- unicodeslot=0x037C,
+ uccode=0x3FE,
+ unicodeslot=0x37C,
},
- [0x037D]={
+ [0x37D]={
category="ll",
description="GREEK SMALL REVERSED DOTTED LUNATE SIGMA SYMBOL",
direction="l",
linebreak="al",
- uccode=0x03FF,
- unicodeslot=0x037D,
+ uccode=0x3FF,
+ unicodeslot=0x37D,
},
- [0x037E]={
+ [0x37E]={
adobename="questiongreek",
category="po",
description="GREEK QUESTION MARK",
direction="on",
linebreak="is",
- specials={ "char", 0x003B },
- unicodeslot=0x037E,
+ specials={ "char", 0x3B },
+ unicodeslot=0x37E,
},
- [0x0384]={
+ [0x37F]={
+ category="lu",
+ description="GREEK CAPITAL LETTER YOT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x37F,
+ },
+ [0x384]={
adobename="tonos",
category="sk",
contextname="greektonos",
description="GREEK TONOS",
direction="on",
linebreak="al",
- specials={ "compat", 0x0020, 0x0301 },
- unicodeslot=0x0384,
+ specials={ "compat", 0x20, 0x301 },
+ unicodeslot=0x384,
},
- [0x0385]={
+ [0x385]={
adobename="dieresistonos",
category="sk",
contextname="greekdialytikatonos",
description="GREEK DIALYTIKA TONOS",
direction="on",
linebreak="al",
- specials={ "char", 0x00A8, 0x0301 },
- unicodeslot=0x0385,
+ specials={ "char", 0xA8, 0x301 },
+ unicodeslot=0x385,
},
- [0x0386]={
+ [0x386]={
adobename="Alphatonos",
category="lu",
contextname="greekAlphatonos",
description="GREEK CAPITAL LETTER ALPHA WITH TONOS",
direction="l",
- lccode=0x03AC,
+ lccode=0x3AC,
linebreak="al",
- shcode=0x0391,
- specials={ "char", 0x0391, 0x0301 },
- unicodeslot=0x0386,
+ shcode=0x391,
+ specials={ "char", 0x391, 0x301 },
+ unicodeslot=0x386,
},
- [0x0387]={
+ [0x387]={
adobename="anoteleia",
category="po",
description="GREEK ANO TELEIA",
direction="on",
linebreak="al",
- specials={ "char", 0x00B7 },
- unicodeslot=0x0387,
+ specials={ "char", 0xB7 },
+ unicodeslot=0x387,
},
- [0x0388]={
+ [0x388]={
adobename="Epsilontonos",
category="lu",
contextname="greekEpsilontonos",
description="GREEK CAPITAL LETTER EPSILON WITH TONOS",
direction="l",
- lccode=0x03AD,
+ lccode=0x3AD,
linebreak="al",
- shcode=0x0395,
- specials={ "char", 0x0395, 0x0301 },
- unicodeslot=0x0388,
+ shcode=0x395,
+ specials={ "char", 0x395, 0x301 },
+ unicodeslot=0x388,
},
- [0x0389]={
+ [0x389]={
adobename="Etatonos",
category="lu",
contextname="greekEtatonos",
description="GREEK CAPITAL LETTER ETA WITH TONOS",
direction="l",
- lccode=0x03AE,
+ lccode=0x3AE,
linebreak="al",
- shcode=0x0397,
- specials={ "char", 0x0397, 0x0301 },
- unicodeslot=0x0389,
+ shcode=0x397,
+ specials={ "char", 0x397, 0x301 },
+ unicodeslot=0x389,
},
- [0x038A]={
+ [0x38A]={
adobename="Iotatonos",
category="lu",
contextname="greekIotatonos",
description="GREEK CAPITAL LETTER IOTA WITH TONOS",
direction="l",
- lccode=0x03AF,
+ lccode=0x3AF,
linebreak="al",
- shcode=0x0399,
- specials={ "char", 0x0399, 0x0301 },
- unicodeslot=0x038A,
+ shcode=0x399,
+ specials={ "char", 0x399, 0x301 },
+ unicodeslot=0x38A,
},
- [0x038C]={
+ [0x38C]={
adobename="Omicrontonos",
category="lu",
contextname="greekOmicrontonos",
description="GREEK CAPITAL LETTER OMICRON WITH TONOS",
direction="l",
- lccode=0x03CC,
+ lccode=0x3CC,
linebreak="al",
- shcode=0x039F,
- specials={ "char", 0x039F, 0x0301 },
- unicodeslot=0x038C,
+ shcode=0x39F,
+ specials={ "char", 0x39F, 0x301 },
+ unicodeslot=0x38C,
},
- [0x038E]={
+ [0x38E]={
adobename="Upsilontonos",
category="lu",
contextname="greekUpsilontonos",
description="GREEK CAPITAL LETTER UPSILON WITH TONOS",
direction="l",
- lccode=0x03CD,
+ lccode=0x3CD,
linebreak="al",
- shcode=0x03A5,
- specials={ "char", 0x03A5, 0x0301 },
- unicodeslot=0x038E,
+ shcode=0x3A5,
+ specials={ "char", 0x3A5, 0x301 },
+ unicodeslot=0x38E,
},
- [0x038F]={
+ [0x38F]={
adobename="Omegatonos",
category="lu",
contextname="greekOmegatonos",
description="GREEK CAPITAL LETTER OMEGA WITH TONOS",
direction="l",
- lccode=0x03CE,
+ lccode=0x3CE,
linebreak="al",
- shcode=0x03A9,
- specials={ "char", 0x03A9, 0x0301 },
- unicodeslot=0x038F,
+ shcode=0x3A9,
+ specials={ "char", 0x3A9, 0x301 },
+ unicodeslot=0x38F,
},
- [0x0390]={
+ [0x390]={
adobename="iotadieresistonos",
category="ll",
contextname="greekiotadialytikatonos",
description="GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS",
direction="l",
linebreak="al",
- shcode=0x03B9,
- specials={ "char", 0x03CA, 0x0301 },
- unicodeslot=0x0390,
+ shcode=0x3B9,
+ specials={ "char", 0x3CA, 0x301 },
+ unicodeslot=0x390,
},
- [0x0391]={
+ [0x391]={
adobename="Alpha",
category="lu",
cjkwd="a",
contextname="greekAlpha",
description="GREEK CAPITAL LETTER ALPHA",
direction="l",
- lccode=0x03B1,
+ lccode=0x3B1,
linebreak="al",
mathclass="variable",
mathname="Alpha",
- unicodeslot=0x0391,
+ unicodeslot=0x391,
},
- [0x0392]={
+ [0x392]={
adobename="Beta",
category="lu",
cjkwd="a",
contextname="greekBeta",
description="GREEK CAPITAL LETTER BETA",
direction="l",
- lccode=0x03B2,
+ lccode=0x3B2,
linebreak="al",
mathclass="variable",
mathname="Beta",
- unicodeslot=0x0392,
+ unicodeslot=0x392,
},
- [0x0393]={
+ [0x393]={
adobename="Gamma",
category="lu",
cjkwd="a",
contextname="greekGamma",
description="GREEK CAPITAL LETTER GAMMA",
direction="l",
- lccode=0x03B3,
+ lccode=0x3B3,
linebreak="al",
mathclass="variable",
mathname="Gamma",
- unicodeslot=0x0393,
+ unicodeslot=0x393,
},
- [0x0394]={
+ [0x394]={
adobename="Deltagreek",
category="lu",
cjkwd="a",
contextname="greekDelta",
description="GREEK CAPITAL LETTER DELTA",
direction="l",
- lccode=0x03B4,
+ lccode=0x3B4,
linebreak="al",
mathclass="variable",
mathname="Delta",
- unicodeslot=0x0394,
+ unicodeslot=0x394,
},
- [0x0395]={
+ [0x395]={
adobename="Epsilon",
category="lu",
cjkwd="a",
contextname="greekEpsilon",
description="GREEK CAPITAL LETTER EPSILON",
direction="l",
- lccode=0x03B5,
+ lccode=0x3B5,
linebreak="al",
mathclass="variable",
mathname="Epsilon",
- unicodeslot=0x0395,
+ unicodeslot=0x395,
},
- [0x0396]={
+ [0x396]={
adobename="Zeta",
category="lu",
cjkwd="a",
contextname="greekZeta",
description="GREEK CAPITAL LETTER ZETA",
direction="l",
- lccode=0x03B6,
+ lccode=0x3B6,
linebreak="al",
mathclass="variable",
mathname="Zeta",
- unicodeslot=0x0396,
+ unicodeslot=0x396,
},
- [0x0397]={
+ [0x397]={
adobename="Eta",
category="lu",
cjkwd="a",
contextname="greekEta",
description="GREEK CAPITAL LETTER ETA",
direction="l",
- lccode=0x03B7,
+ lccode=0x3B7,
linebreak="al",
mathclass="variable",
mathname="Eta",
- unicodeslot=0x0397,
+ unicodeslot=0x397,
},
- [0x0398]={
+ [0x398]={
adobename="Theta",
category="lu",
cjkwd="a",
contextname="greekTheta",
description="GREEK CAPITAL LETTER THETA",
direction="l",
- lccode=0x03B8,
+ lccode=0x3B8,
linebreak="al",
mathclass="variable",
mathname="Theta",
- unicodeslot=0x0398,
+ unicodeslot=0x398,
},
- [0x0399]={
+ [0x399]={
adobename="Iota",
category="lu",
cjkwd="a",
contextname="greekIota",
description="GREEK CAPITAL LETTER IOTA",
direction="l",
- lccode=0x03B9,
+ lccode=0x3B9,
linebreak="al",
mathclass="variable",
mathname="Iota",
- unicodeslot=0x0399,
+ unicodeslot=0x399,
},
- [0x039A]={
+ [0x39A]={
adobename="Kappa",
category="lu",
cjkwd="a",
contextname="greekKappa",
description="GREEK CAPITAL LETTER KAPPA",
direction="l",
- lccode=0x03BA,
+ lccode=0x3BA,
linebreak="al",
mathclass="variable",
mathname="Kappa",
- unicodeslot=0x039A,
+ unicodeslot=0x39A,
},
- [0x039B]={
+ [0x39B]={
adobename="Lambda",
category="lu",
cjkwd="a",
contextname="greekLambda",
description="GREEK CAPITAL LETTER LAMDA",
direction="l",
- lccode=0x03BB,
+ lccode=0x3BB,
linebreak="al",
mathclass="variable",
mathname="Lambda",
- unicodeslot=0x039B,
+ unicodeslot=0x39B,
},
- [0x039C]={
+ [0x39C]={
adobename="Mu",
category="lu",
cjkwd="a",
contextname="greekMu",
description="GREEK CAPITAL LETTER MU",
direction="l",
- lccode=0x03BC,
+ lccode=0x3BC,
linebreak="al",
mathclass="variable",
mathname="Mu",
- unicodeslot=0x039C,
+ unicodeslot=0x39C,
},
- [0x039D]={
+ [0x39D]={
adobename="Nu",
category="lu",
cjkwd="a",
contextname="greekNu",
description="GREEK CAPITAL LETTER NU",
direction="l",
- lccode=0x03BD,
+ lccode=0x3BD,
linebreak="al",
mathclass="variable",
mathname="Nu",
- unicodeslot=0x039D,
+ unicodeslot=0x39D,
},
- [0x039E]={
+ [0x39E]={
adobename="Xi",
category="lu",
cjkwd="a",
contextname="greekXi",
description="GREEK CAPITAL LETTER XI",
direction="l",
- lccode=0x03BE,
+ lccode=0x3BE,
linebreak="al",
mathclass="variable",
mathname="Xi",
- unicodeslot=0x039E,
+ unicodeslot=0x39E,
},
- [0x039F]={
+ [0x39F]={
adobename="Omicron",
category="lu",
cjkwd="a",
contextname="greekOmicron",
description="GREEK CAPITAL LETTER OMICRON",
direction="l",
- lccode=0x03BF,
+ lccode=0x3BF,
linebreak="al",
mathclass="variable",
mathname="Omicron",
- unicodeslot=0x039F,
+ unicodeslot=0x39F,
},
- [0x03A0]={
+ [0x3A0]={
adobename="Pi",
category="lu",
cjkwd="a",
contextname="greekPi",
description="GREEK CAPITAL LETTER PI",
direction="l",
- lccode=0x03C0,
+ lccode=0x3C0,
linebreak="al",
mathclass="variable",
mathname="Pi",
- unicodeslot=0x03A0,
+ unicodeslot=0x3A0,
},
- [0x03A1]={
+ [0x3A1]={
adobename="Rho",
category="lu",
cjkwd="a",
contextname="greekRho",
description="GREEK CAPITAL LETTER RHO",
direction="l",
- lccode=0x03C1,
+ lccode=0x3C1,
linebreak="al",
mathclass="variable",
mathname="Rho",
- unicodeslot=0x03A1,
+ unicodeslot=0x3A1,
},
- [0x03A3]={
+ [0x3A3]={
adobename="Sigma",
category="lu",
cjkwd="a",
contextname="greekSigma",
description="GREEK CAPITAL LETTER SIGMA",
direction="l",
- lccode=0x03C3,
+ lccode=0x3C3,
linebreak="al",
mathclass="variable",
mathname="Sigma",
- unicodeslot=0x03A3,
+ unicodeslot=0x3A3,
},
- [0x03A4]={
+ [0x3A4]={
adobename="Tau",
category="lu",
cjkwd="a",
contextname="greekTau",
description="GREEK CAPITAL LETTER TAU",
direction="l",
- lccode=0x03C4,
+ lccode=0x3C4,
linebreak="al",
mathclass="variable",
mathname="Tau",
- unicodeslot=0x03A4,
+ unicodeslot=0x3A4,
},
- [0x03A5]={
+ [0x3A5]={
adobename="Upsilon",
category="lu",
cjkwd="a",
contextname="greekUpsilon",
description="GREEK CAPITAL LETTER UPSILON",
direction="l",
- lccode=0x03C5,
+ lccode=0x3C5,
linebreak="al",
mathclass="variable",
mathname="Upsilon",
- unicodeslot=0x03A5,
+ unicodeslot=0x3A5,
},
- [0x03A6]={
+ [0x3A6]={
adobename="Phi",
category="lu",
cjkwd="a",
contextname="greekPhi",
description="GREEK CAPITAL LETTER PHI",
direction="l",
- lccode=0x03C6,
+ lccode=0x3C6,
linebreak="al",
mathclass="variable",
mathname="Phi",
- unicodeslot=0x03A6,
+ unicodeslot=0x3A6,
},
- [0x03A7]={
+ [0x3A7]={
adobename="Chi",
category="lu",
cjkwd="a",
contextname="greekChi",
description="GREEK CAPITAL LETTER CHI",
direction="l",
- lccode=0x03C7,
+ lccode=0x3C7,
linebreak="al",
mathclass="variable",
mathname="Chi",
- unicodeslot=0x03A7,
+ unicodeslot=0x3A7,
},
- [0x03A8]={
+ [0x3A8]={
adobename="Psi",
category="lu",
cjkwd="a",
contextname="greekPsi",
description="GREEK CAPITAL LETTER PSI",
direction="l",
- lccode=0x03C8,
+ lccode=0x3C8,
linebreak="al",
mathclass="variable",
mathname="Psi",
- unicodeslot=0x03A8,
+ unicodeslot=0x3A8,
},
- [0x03A9]={
+ [0x3A9]={
adobename="Omegagreek",
category="lu",
cjkwd="a",
contextname="greekOmega",
description="GREEK CAPITAL LETTER OMEGA",
direction="l",
- lccode=0x03C9,
+ lccode=0x3C9,
linebreak="al",
mathclass="variable",
mathname="Omega",
- unicodeslot=0x03A9,
+ unicodeslot=0x3A9,
},
- [0x03AA]={
+ [0x3AA]={
adobename="Iotadieresis",
category="lu",
contextname="greekIotadialytika",
description="GREEK CAPITAL LETTER IOTA WITH DIALYTIKA",
direction="l",
- lccode=0x03CA,
+ lccode=0x3CA,
linebreak="al",
- shcode=0x0399,
- specials={ "char", 0x0399, 0x0308 },
- unicodeslot=0x03AA,
+ shcode=0x399,
+ specials={ "char", 0x399, 0x308 },
+ unicodeslot=0x3AA,
},
- [0x03AB]={
+ [0x3AB]={
adobename="Upsilondieresis",
category="lu",
contextname="greekUpsilondialytika",
description="GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA",
direction="l",
- lccode=0x03CB,
+ lccode=0x3CB,
linebreak="al",
- shcode=0x03A5,
- specials={ "char", 0x03A5, 0x0308 },
- unicodeslot=0x03AB,
+ shcode=0x3A5,
+ specials={ "char", 0x3A5, 0x308 },
+ unicodeslot=0x3AB,
},
- [0x03AC]={
+ [0x3AC]={
adobename="alphatonos",
category="ll",
contextname="greekalphatonos",
description="GREEK SMALL LETTER ALPHA WITH TONOS",
direction="l",
linebreak="al",
- shcode=0x03B1,
- specials={ "char", 0x03B1, 0x0301 },
- uccode=0x0386,
- unicodeslot=0x03AC,
+ shcode=0x3B1,
+ specials={ "char", 0x3B1, 0x301 },
+ uccode=0x386,
+ unicodeslot=0x3AC,
},
- [0x03AD]={
+ [0x3AD]={
adobename="epsilontonos",
category="ll",
contextname="greekepsilontonos",
description="GREEK SMALL LETTER EPSILON WITH TONOS",
direction="l",
linebreak="al",
- shcode=0x03B5,
- specials={ "char", 0x03B5, 0x0301 },
- uccode=0x0388,
- unicodeslot=0x03AD,
+ shcode=0x3B5,
+ specials={ "char", 0x3B5, 0x301 },
+ uccode=0x388,
+ unicodeslot=0x3AD,
},
- [0x03AE]={
+ [0x3AE]={
adobename="etatonos",
category="ll",
contextname="greeketatonos",
description="GREEK SMALL LETTER ETA WITH TONOS",
direction="l",
linebreak="al",
- shcode=0x03B7,
- specials={ "char", 0x03B7, 0x0301 },
- uccode=0x0389,
- unicodeslot=0x03AE,
+ shcode=0x3B7,
+ specials={ "char", 0x3B7, 0x301 },
+ uccode=0x389,
+ unicodeslot=0x3AE,
},
- [0x03AF]={
+ [0x3AF]={
adobename="iotatonos",
category="ll",
contextname="greekiotatonos",
description="GREEK SMALL LETTER IOTA WITH TONOS",
direction="l",
linebreak="al",
- shcode=0x03B9,
- specials={ "char", 0x03B9, 0x0301 },
- uccode=0x038A,
- unicodeslot=0x03AF,
+ shcode=0x3B9,
+ specials={ "char", 0x3B9, 0x301 },
+ uccode=0x38A,
+ unicodeslot=0x3AF,
},
- [0x03B0]={
+ [0x3B0]={
adobename="upsilondieresistonos",
category="ll",
contextname="greekupsilondialytikatonos",
description="GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS",
direction="l",
linebreak="al",
- shcode=0x03C5,
- specials={ "char", 0x03CB, 0x0301 },
- unicodeslot=0x03B0,
+ shcode=0x3C5,
+ specials={ "char", 0x3CB, 0x301 },
+ unicodeslot=0x3B0,
},
- [0x03B1]={
+ [0x3B1]={
adobename="alpha",
category="ll",
cjkwd="a",
@@ -9629,10 +9759,10 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="alpha",
- uccode=0x0391,
- unicodeslot=0x03B1,
+ uccode=0x391,
+ unicodeslot=0x3B1,
},
- [0x03B2]={
+ [0x3B2]={
adobename="beta",
category="ll",
cjkwd="a",
@@ -9642,10 +9772,10 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="beta",
- uccode=0x0392,
- unicodeslot=0x03B2,
+ uccode=0x392,
+ unicodeslot=0x3B2,
},
- [0x03B3]={
+ [0x3B3]={
adobename="gamma",
category="ll",
cjkwd="a",
@@ -9655,10 +9785,10 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="gamma",
- uccode=0x0393,
- unicodeslot=0x03B3,
+ uccode=0x393,
+ unicodeslot=0x3B3,
},
- [0x03B4]={
+ [0x3B4]={
adobename="delta",
category="ll",
cjkwd="a",
@@ -9668,10 +9798,10 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="delta",
- uccode=0x0394,
- unicodeslot=0x03B4,
+ uccode=0x394,
+ unicodeslot=0x3B4,
},
- [0x03B5]={
+ [0x3B5]={
adobename="epsilon",
category="ll",
cjkwd="a",
@@ -9681,10 +9811,10 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="varepsilon",
- uccode=0x0395,
- unicodeslot=0x03B5,
+ uccode=0x395,
+ unicodeslot=0x3B5,
},
- [0x03B6]={
+ [0x3B6]={
adobename="zeta",
category="ll",
cjkwd="a",
@@ -9694,10 +9824,10 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="zeta",
- uccode=0x0396,
- unicodeslot=0x03B6,
+ uccode=0x396,
+ unicodeslot=0x3B6,
},
- [0x03B7]={
+ [0x3B7]={
adobename="eta",
category="ll",
cjkwd="a",
@@ -9707,10 +9837,10 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="eta",
- uccode=0x0397,
- unicodeslot=0x03B7,
+ uccode=0x397,
+ unicodeslot=0x3B7,
},
- [0x03B8]={
+ [0x3B8]={
adobename="theta",
category="ll",
cjkwd="a",
@@ -9720,10 +9850,10 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="theta",
- uccode=0x0398,
- unicodeslot=0x03B8,
+ uccode=0x398,
+ unicodeslot=0x3B8,
},
- [0x03B9]={
+ [0x3B9]={
adobename="iota",
category="ll",
cjkwd="a",
@@ -9733,10 +9863,10 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="iota",
- uccode=0x0399,
- unicodeslot=0x03B9,
+ uccode=0x399,
+ unicodeslot=0x3B9,
},
- [0x03BA]={
+ [0x3BA]={
adobename="kappa",
category="ll",
cjkwd="a",
@@ -9746,10 +9876,10 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="kappa",
- uccode=0x039A,
- unicodeslot=0x03BA,
+ uccode=0x39A,
+ unicodeslot=0x3BA,
},
- [0x03BB]={
+ [0x3BB]={
adobename="lambda",
category="ll",
cjkwd="a",
@@ -9759,10 +9889,10 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="lambda",
- uccode=0x039B,
- unicodeslot=0x03BB,
+ uccode=0x39B,
+ unicodeslot=0x3BB,
},
- [0x03BC]={
+ [0x3BC]={
adobename="mugreek",
category="ll",
cjkwd="a",
@@ -9772,10 +9902,10 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="mu",
- uccode=0x039C,
- unicodeslot=0x03BC,
+ uccode=0x39C,
+ unicodeslot=0x3BC,
},
- [0x03BD]={
+ [0x3BD]={
adobename="nu",
category="ll",
cjkwd="a",
@@ -9785,10 +9915,10 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="nu",
- uccode=0x039D,
- unicodeslot=0x03BD,
+ uccode=0x39D,
+ unicodeslot=0x3BD,
},
- [0x03BE]={
+ [0x3BE]={
adobename="xi",
category="ll",
cjkwd="a",
@@ -9798,10 +9928,10 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="xi",
- uccode=0x039E,
- unicodeslot=0x03BE,
+ uccode=0x39E,
+ unicodeslot=0x3BE,
},
- [0x03BF]={
+ [0x3BF]={
adobename="omicron",
category="ll",
cjkwd="a",
@@ -9811,10 +9941,10 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="omicron",
- uccode=0x039F,
- unicodeslot=0x03BF,
+ uccode=0x39F,
+ unicodeslot=0x3BF,
},
- [0x03C0]={
+ [0x3C0]={
adobename="pi",
category="ll",
cjkwd="a",
@@ -9824,10 +9954,10 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="pi",
- uccode=0x03A0,
- unicodeslot=0x03C0,
+ uccode=0x3A0,
+ unicodeslot=0x3C0,
},
- [0x03C1]={
+ [0x3C1]={
adobename="rho",
category="ll",
cjkwd="a",
@@ -9837,10 +9967,10 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="rho",
- uccode=0x03A1,
- unicodeslot=0x03C1,
+ uccode=0x3A1,
+ unicodeslot=0x3C1,
},
- [0x03C2]={
+ [0x3C2]={
adobename="sigmafinal",
category="ll",
contextname="greekfinalsigma",
@@ -9849,10 +9979,10 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="varsigma",
- uccode=0x03A3,
- unicodeslot=0x03C2,
+ uccode=0x3A3,
+ unicodeslot=0x3C2,
},
- [0x03C3]={
+ [0x3C3]={
adobename="sigma",
category="ll",
cjkwd="a",
@@ -9862,10 +9992,10 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="sigma",
- uccode=0x03A3,
- unicodeslot=0x03C3,
+ uccode=0x3A3,
+ unicodeslot=0x3C3,
},
- [0x03C4]={
+ [0x3C4]={
adobename="tau",
category="ll",
cjkwd="a",
@@ -9875,10 +10005,10 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="tau",
- uccode=0x03A4,
- unicodeslot=0x03C4,
+ uccode=0x3A4,
+ unicodeslot=0x3C4,
},
- [0x03C5]={
+ [0x3C5]={
adobename="upsilon",
category="ll",
cjkwd="a",
@@ -9888,10 +10018,10 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="upsilon",
- uccode=0x03A5,
- unicodeslot=0x03C5,
+ uccode=0x3A5,
+ unicodeslot=0x3C5,
},
- [0x03C6]={
+ [0x3C6]={
adobename="phi",
category="ll",
cjkwd="a",
@@ -9901,10 +10031,10 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="varphi",
- uccode=0x03A6,
- unicodeslot=0x03C6,
+ uccode=0x3A6,
+ unicodeslot=0x3C6,
},
- [0x03C7]={
+ [0x3C7]={
adobename="chi",
category="ll",
cjkwd="a",
@@ -9914,10 +10044,10 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="chi",
- uccode=0x03A7,
- unicodeslot=0x03C7,
+ uccode=0x3A7,
+ unicodeslot=0x3C7,
},
- [0x03C8]={
+ [0x3C8]={
adobename="psi",
category="ll",
cjkwd="a",
@@ -9927,10 +10057,10 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="psi",
- uccode=0x03A8,
- unicodeslot=0x03C8,
+ uccode=0x3A8,
+ unicodeslot=0x3C8,
},
- [0x03C9]={
+ [0x3C9]={
adobename="omega",
category="ll",
cjkwd="a",
@@ -9940,87 +10070,87 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="omega",
- uccode=0x03A9,
- unicodeslot=0x03C9,
+ uccode=0x3A9,
+ unicodeslot=0x3C9,
},
- [0x03CA]={
+ [0x3CA]={
adobename="iotadieresis",
category="ll",
contextname="greekiotadialytika",
description="GREEK SMALL LETTER IOTA WITH DIALYTIKA",
direction="l",
linebreak="al",
- shcode=0x03B9,
- specials={ "char", 0x03B9, 0x0308 },
- uccode=0x03AA,
- unicodeslot=0x03CA,
+ shcode=0x3B9,
+ specials={ "char", 0x3B9, 0x308 },
+ uccode=0x3AA,
+ unicodeslot=0x3CA,
},
- [0x03CB]={
+ [0x3CB]={
adobename="upsilondieresis",
category="ll",
contextname="greekupsilondiaeresis",
description="GREEK SMALL LETTER UPSILON WITH DIALYTIKA",
direction="l",
linebreak="al",
- shcode=0x03C5,
- specials={ "char", 0x03C5, 0x0308 },
- uccode=0x03AB,
- unicodeslot=0x03CB,
+ shcode=0x3C5,
+ specials={ "char", 0x3C5, 0x308 },
+ uccode=0x3AB,
+ unicodeslot=0x3CB,
},
- [0x03CC]={
+ [0x3CC]={
adobename="omicrontonos",
category="ll",
contextname="greekomicrontonos",
description="GREEK SMALL LETTER OMICRON WITH TONOS",
direction="l",
linebreak="al",
- shcode=0x03BF,
- specials={ "char", 0x03BF, 0x0301 },
- uccode=0x038C,
- unicodeslot=0x03CC,
+ shcode=0x3BF,
+ specials={ "char", 0x3BF, 0x301 },
+ uccode=0x38C,
+ unicodeslot=0x3CC,
},
- [0x03CD]={
+ [0x3CD]={
adobename="upsilontonos",
category="ll",
contextname="greekupsilontonos",
description="GREEK SMALL LETTER UPSILON WITH TONOS",
direction="l",
linebreak="al",
- shcode=0x03C5,
- specials={ "char", 0x03C5, 0x0301 },
- uccode=0x038E,
- unicodeslot=0x03CD,
+ shcode=0x3C5,
+ specials={ "char", 0x3C5, 0x301 },
+ uccode=0x38E,
+ unicodeslot=0x3CD,
},
- [0x03CE]={
+ [0x3CE]={
adobename="omegatonos",
category="ll",
contextname="greekomegatonos",
description="GREEK SMALL LETTER OMEGA WITH TONOS",
direction="l",
linebreak="al",
- shcode=0x03C9,
- specials={ "char", 0x03C9, 0x0301 },
- uccode=0x038F,
- unicodeslot=0x03CE,
+ shcode=0x3C9,
+ specials={ "char", 0x3C9, 0x301 },
+ uccode=0x38F,
+ unicodeslot=0x3CE,
},
- [0x03CF]={
+ [0x3CF]={
category="lu",
description="GREEK CAPITAL KAI SYMBOL",
direction="l",
linebreak="al",
- unicodeslot=0x03CF,
+ unicodeslot=0x3CF,
},
- [0x03D0]={
+ [0x3D0]={
adobename="betasymbolgreek",
category="ll",
description="GREEK BETA SYMBOL",
direction="l",
linebreak="al",
- specials={ "compat", 0x03B2 },
- uccode=0x0392,
- unicodeslot=0x03D0,
+ specials={ "compat", 0x3B2 },
+ uccode=0x392,
+ unicodeslot=0x3D0,
},
- [0x03D1]={
+ [0x3D1]={
adobename="thetasymbolgreek",
category="ll",
contextname="greekthetaalt",
@@ -10029,38 +10159,38 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="varTheta",
- specials={ "compat", 0x03B8 },
- uccode=0x0398,
- unicodeslot=0x03D1,
+ specials={ "compat", 0x3B8 },
+ uccode=0x398,
+ unicodeslot=0x3D1,
},
- [0x03D2]={
+ [0x3D2]={
adobename="Upsilonhooksymbol",
category="lu",
description="GREEK UPSILON WITH HOOK SYMBOL",
direction="l",
linebreak="al",
- specials={ "compat", 0x03A5 },
- unicodeslot=0x03D2,
+ specials={ "compat", 0x3A5 },
+ unicodeslot=0x3D2,
},
- [0x03D3]={
+ [0x3D3]={
adobename="Upsilonacutehooksymbolgreek",
category="lu",
description="GREEK UPSILON WITH ACUTE AND HOOK SYMBOL",
direction="l",
linebreak="al",
- specials={ "char", 0x03D2, 0x0301 },
- unicodeslot=0x03D3,
+ specials={ "char", 0x3D2, 0x301 },
+ unicodeslot=0x3D3,
},
- [0x03D4]={
+ [0x3D4]={
adobename="Upsilondieresishooksymbolgreek",
category="lu",
description="GREEK UPSILON WITH DIAERESIS AND HOOK SYMBOL",
direction="l",
linebreak="al",
- specials={ "char", 0x03D2, 0x0308 },
- unicodeslot=0x03D4,
+ specials={ "char", 0x3D2, 0x308 },
+ unicodeslot=0x3D4,
},
- [0x03D5]={
+ [0x3D5]={
adobename="phisymbolgreek",
category="ll",
contextname="greekphialt",
@@ -10069,11 +10199,11 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="phi",
- specials={ "compat", 0x03C6 },
- uccode=0x03A6,
- unicodeslot=0x03D5,
+ specials={ "compat", 0x3C6 },
+ uccode=0x3A6,
+ unicodeslot=0x3D5,
},
- [0x03D6]={
+ [0x3D6]={
adobename="pisymbolgreek",
category="ll",
contextname="greekpialt",
@@ -10082,235 +10212,235 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="varpi",
- specials={ "compat", 0x03C0 },
- uccode=0x03A0,
- unicodeslot=0x03D6,
+ specials={ "compat", 0x3C0 },
+ uccode=0x3A0,
+ unicodeslot=0x3D6,
},
- [0x03D7]={
+ [0x3D7]={
category="ll",
description="GREEK KAI SYMBOL",
direction="l",
linebreak="al",
- unicodeslot=0x03D7,
+ unicodeslot=0x3D7,
},
- [0x03D8]={
+ [0x3D8]={
category="lu",
description="GREEK LETTER ARCHAIC KOPPA",
direction="l",
- lccode=0x03D9,
+ lccode=0x3D9,
linebreak="al",
- unicodeslot=0x03D8,
+ unicodeslot=0x3D8,
},
- [0x03D9]={
+ [0x3D9]={
category="ll",
contextname="greekkoppa",
description="GREEK SMALL LETTER ARCHAIC KOPPA",
direction="l",
linebreak="al",
- uccode=0x03D8,
- unicodeslot=0x03D9,
+ uccode=0x3D8,
+ unicodeslot=0x3D9,
},
- [0x03DA]={
+ [0x3DA]={
adobename="Stigmagreek",
category="lu",
description="GREEK LETTER STIGMA",
direction="l",
- lccode=0x03DB,
+ lccode=0x3DB,
linebreak="al",
- unicodeslot=0x03DA,
+ unicodeslot=0x3DA,
},
- [0x03DB]={
+ [0x3DB]={
category="ll",
contextname="greekstigma",
description="GREEK SMALL LETTER STIGMA",
direction="l",
linebreak="al",
- uccode=0x03DA,
- unicodeslot=0x03DB,
+ uccode=0x3DA,
+ unicodeslot=0x3DB,
},
- [0x03DC]={
+ [0x3DC]={
adobename="Digammagreek",
category="lu",
description="GREEK LETTER DIGAMMA",
direction="l",
- lccode=0x03DD,
+ lccode=0x3DD,
linebreak="al",
mathclass="variable",
mathname="digamma",
- unicodeslot=0x03DC,
+ unicodeslot=0x3DC,
},
- [0x03DD]={
+ [0x3DD]={
category="ll",
contextname="greekdigamma",
description="GREEK SMALL LETTER DIGAMMA",
direction="l",
linebreak="al",
- uccode=0x03DC,
- unicodeslot=0x03DD,
+ uccode=0x3DC,
+ unicodeslot=0x3DD,
},
- [0x03DE]={
+ [0x3DE]={
adobename="Koppagreek",
category="lu",
description="GREEK LETTER KOPPA",
direction="l",
- lccode=0x03DF,
+ lccode=0x3DF,
linebreak="al",
- unicodeslot=0x03DE,
+ unicodeslot=0x3DE,
},
- [0x03DF]={
+ [0x3DF]={
category="ll",
contextname="greeknumkoppa",
description="GREEK SMALL LETTER KOPPA",
direction="l",
linebreak="al",
- uccode=0x03DE,
- unicodeslot=0x03DF,
+ uccode=0x3DE,
+ unicodeslot=0x3DF,
},
- [0x03E0]={
+ [0x3E0]={
adobename="Sampigreek",
category="lu",
description="GREEK LETTER SAMPI",
direction="l",
- lccode=0x03E1,
+ lccode=0x3E1,
linebreak="al",
- unicodeslot=0x03E0,
+ unicodeslot=0x3E0,
},
- [0x03E1]={
+ [0x3E1]={
category="ll",
contextname="greeksampi",
description="GREEK SMALL LETTER SAMPI",
direction="l",
linebreak="al",
- uccode=0x03E0,
- unicodeslot=0x03E1,
+ uccode=0x3E0,
+ unicodeslot=0x3E1,
},
- [0x03E2]={
+ [0x3E2]={
adobename="Sheicoptic",
category="lu",
description="COPTIC CAPITAL LETTER SHEI",
direction="l",
- lccode=0x03E3,
+ lccode=0x3E3,
linebreak="al",
- unicodeslot=0x03E2,
+ unicodeslot=0x3E2,
},
- [0x03E3]={
+ [0x3E3]={
adobename="sheicoptic",
category="ll",
description="COPTIC SMALL LETTER SHEI",
direction="l",
linebreak="al",
- uccode=0x03E2,
- unicodeslot=0x03E3,
+ uccode=0x3E2,
+ unicodeslot=0x3E3,
},
- [0x03E4]={
+ [0x3E4]={
adobename="Feicoptic",
category="lu",
description="COPTIC CAPITAL LETTER FEI",
direction="l",
- lccode=0x03E5,
+ lccode=0x3E5,
linebreak="al",
- unicodeslot=0x03E4,
+ unicodeslot=0x3E4,
},
- [0x03E5]={
+ [0x3E5]={
adobename="feicoptic",
category="ll",
description="COPTIC SMALL LETTER FEI",
direction="l",
linebreak="al",
- uccode=0x03E4,
- unicodeslot=0x03E5,
+ uccode=0x3E4,
+ unicodeslot=0x3E5,
},
- [0x03E6]={
+ [0x3E6]={
adobename="Kheicoptic",
category="lu",
description="COPTIC CAPITAL LETTER KHEI",
direction="l",
- lccode=0x03E7,
+ lccode=0x3E7,
linebreak="al",
- unicodeslot=0x03E6,
+ unicodeslot=0x3E6,
},
- [0x03E7]={
+ [0x3E7]={
adobename="kheicoptic",
category="ll",
description="COPTIC SMALL LETTER KHEI",
direction="l",
linebreak="al",
- uccode=0x03E6,
- unicodeslot=0x03E7,
+ uccode=0x3E6,
+ unicodeslot=0x3E7,
},
- [0x03E8]={
+ [0x3E8]={
adobename="Horicoptic",
category="lu",
description="COPTIC CAPITAL LETTER HORI",
direction="l",
- lccode=0x03E9,
+ lccode=0x3E9,
linebreak="al",
- unicodeslot=0x03E8,
+ unicodeslot=0x3E8,
},
- [0x03E9]={
+ [0x3E9]={
adobename="horicoptic",
category="ll",
description="COPTIC SMALL LETTER HORI",
direction="l",
linebreak="al",
- uccode=0x03E8,
- unicodeslot=0x03E9,
+ uccode=0x3E8,
+ unicodeslot=0x3E9,
},
- [0x03EA]={
+ [0x3EA]={
adobename="Gangiacoptic",
category="lu",
description="COPTIC CAPITAL LETTER GANGIA",
direction="l",
- lccode=0x03EB,
+ lccode=0x3EB,
linebreak="al",
- unicodeslot=0x03EA,
+ unicodeslot=0x3EA,
},
- [0x03EB]={
+ [0x3EB]={
adobename="gangiacoptic",
category="ll",
description="COPTIC SMALL LETTER GANGIA",
direction="l",
linebreak="al",
- uccode=0x03EA,
- unicodeslot=0x03EB,
+ uccode=0x3EA,
+ unicodeslot=0x3EB,
},
- [0x03EC]={
+ [0x3EC]={
adobename="Shimacoptic",
category="lu",
description="COPTIC CAPITAL LETTER SHIMA",
direction="l",
- lccode=0x03ED,
+ lccode=0x3ED,
linebreak="al",
- unicodeslot=0x03EC,
+ unicodeslot=0x3EC,
},
- [0x03ED]={
+ [0x3ED]={
adobename="shimacoptic",
category="ll",
description="COPTIC SMALL LETTER SHIMA",
direction="l",
linebreak="al",
- uccode=0x03EC,
- unicodeslot=0x03ED,
+ uccode=0x3EC,
+ unicodeslot=0x3ED,
},
- [0x03EE]={
+ [0x3EE]={
adobename="Deicoptic",
category="lu",
description="COPTIC CAPITAL LETTER DEI",
direction="l",
- lccode=0x03EF,
+ lccode=0x3EF,
linebreak="al",
- unicodeslot=0x03EE,
+ unicodeslot=0x3EE,
},
- [0x03EF]={
+ [0x3EF]={
adobename="deicoptic",
category="ll",
description="COPTIC SMALL LETTER DEI",
direction="l",
linebreak="al",
- uccode=0x03EE,
- unicodeslot=0x03EF,
+ uccode=0x3EE,
+ unicodeslot=0x3EF,
},
- [0x03F0]={
+ [0x3F0]={
adobename="kappasymbolgreek",
category="ll",
description="GREEK KAPPA SYMBOL",
@@ -10318,50 +10448,50 @@ characters.data={
linebreak="al",
mathclass="ordinary",
mathname="varkappa",
- specials={ "compat", 0x03BA },
- uccode=0x039A,
- unicodeslot=0x03F0,
+ specials={ "compat", 0x3BA },
+ uccode=0x39A,
+ unicodeslot=0x3F0,
},
- [0x03F1]={
+ [0x3F1]={
adobename="rhosymbolgreek",
category="ll",
contextname="greekrhoalt",
description="GREEK RHO SYMBOL",
direction="l",
linebreak="al",
- specials={ "compat", 0x03C1 },
- uccode=0x03A1,
- unicodeslot=0x03F1,
+ specials={ "compat", 0x3C1 },
+ uccode=0x3A1,
+ unicodeslot=0x3F1,
},
- [0x03F2]={
+ [0x3F2]={
adobename="sigmalunatesymbolgreek",
category="ll",
contextname="greeksigmalunate",
description="GREEK LUNATE SIGMA SYMBOL",
direction="l",
linebreak="al",
- specials={ "compat", 0x03C2 },
- uccode=0x03F9,
- unicodeslot=0x03F2,
+ specials={ "compat", 0x3C2 },
+ uccode=0x3F9,
+ unicodeslot=0x3F2,
},
- [0x03F3]={
+ [0x3F3]={
adobename="yotgreek",
category="ll",
description="GREEK LETTER YOT",
direction="l",
linebreak="al",
- unicodeslot=0x03F3,
+ unicodeslot=0x3F3,
},
- [0x03F4]={
+ [0x3F4]={
category="lu",
description="GREEK CAPITAL THETA SYMBOL",
direction="l",
- lccode=0x03B8,
+ lccode=0x3B8,
linebreak="al",
- specials={ "compat", 0x0398 },
- unicodeslot=0x03F4,
+ specials={ "compat", 0x398 },
+ unicodeslot=0x3F4,
},
- [0x03F5]={
+ [0x3F5]={
category="ll",
contextname="greekepsilonalt",
description="GREEK LUNATE EPSILON SYMBOL",
@@ -10369,614 +10499,614 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="epsilon",
- specials={ "compat", 0x03B5 },
- uccode=0x0395,
- unicodeslot=0x03F5,
+ specials={ "compat", 0x3B5 },
+ uccode=0x395,
+ unicodeslot=0x3F5,
},
- [0x03F6]={
+ [0x3F6]={
category="sm",
description="GREEK REVERSED LUNATE EPSILON SYMBOL",
direction="on",
linebreak="al",
mathclass="variable",
mathname="backepsilon",
- unicodeslot=0x03F6,
+ unicodeslot=0x3F6,
},
- [0x03F7]={
+ [0x3F7]={
category="lu",
description="GREEK CAPITAL LETTER SHO",
direction="l",
- lccode=0x03F8,
+ lccode=0x3F8,
linebreak="al",
- unicodeslot=0x03F7,
+ unicodeslot=0x3F7,
},
- [0x03F8]={
+ [0x3F8]={
category="ll",
description="GREEK SMALL LETTER SHO",
direction="l",
linebreak="al",
- uccode=0x03F7,
- unicodeslot=0x03F8,
+ uccode=0x3F7,
+ unicodeslot=0x3F8,
},
- [0x03F9]={
+ [0x3F9]={
category="lu",
contextname="greekSigmalunate",
description="GREEK CAPITAL LUNATE SIGMA SYMBOL",
direction="l",
- lccode=0x03F2,
+ lccode=0x3F2,
linebreak="al",
- specials={ "compat", 0x03A3 },
- unicodeslot=0x03F9,
+ specials={ "compat", 0x3A3 },
+ unicodeslot=0x3F9,
},
- [0x03FA]={
+ [0x3FA]={
category="lu",
description="GREEK CAPITAL LETTER SAN",
direction="l",
- lccode=0x03FB,
+ lccode=0x3FB,
linebreak="al",
- unicodeslot=0x03FA,
+ unicodeslot=0x3FA,
},
- [0x03FB]={
+ [0x3FB]={
category="ll",
description="GREEK SMALL LETTER SAN",
direction="l",
linebreak="al",
- uccode=0x03FA,
- unicodeslot=0x03FB,
+ uccode=0x3FA,
+ unicodeslot=0x3FB,
},
- [0x03FC]={
+ [0x3FC]={
category="ll",
description="GREEK RHO WITH STROKE SYMBOL",
direction="l",
linebreak="al",
- unicodeslot=0x03FC,
+ unicodeslot=0x3FC,
},
- [0x03FD]={
+ [0x3FD]={
category="lu",
description="GREEK CAPITAL REVERSED LUNATE SIGMA SYMBOL",
direction="l",
- lccode=0x037B,
+ lccode=0x37B,
linebreak="al",
- unicodeslot=0x03FD,
+ unicodeslot=0x3FD,
},
- [0x03FE]={
+ [0x3FE]={
category="lu",
description="GREEK CAPITAL DOTTED LUNATE SIGMA SYMBOL",
direction="l",
- lccode=0x037C,
+ lccode=0x37C,
linebreak="al",
- unicodeslot=0x03FE,
+ unicodeslot=0x3FE,
},
- [0x03FF]={
+ [0x3FF]={
category="lu",
description="GREEK CAPITAL REVERSED DOTTED LUNATE SIGMA SYMBOL",
direction="l",
- lccode=0x037D,
+ lccode=0x37D,
linebreak="al",
- unicodeslot=0x03FF,
+ unicodeslot=0x3FF,
},
- [0x0400]={
+ [0x400]={
category="lu",
contextname="cyrillicEgrave",
description="CYRILLIC CAPITAL LETTER IE WITH GRAVE",
direction="l",
- lccode=0x0450,
+ lccode=0x450,
linebreak="al",
- shcode=0x0415,
- specials={ "char", 0x0415, 0x0300 },
- unicodeslot=0x0400,
+ shcode=0x415,
+ specials={ "char", 0x415, 0x300 },
+ unicodeslot=0x400,
},
- [0x0401]={
+ [0x401]={
adobename="afii10023",
category="lu",
cjkwd="a",
contextname="cyrillicYO",
description="CYRILLIC CAPITAL LETTER IO",
direction="l",
- lccode=0x0451,
+ lccode=0x451,
linebreak="al",
- specials={ "char", 0x0415, 0x0308 },
- unicodeslot=0x0401,
+ specials={ "char", 0x415, 0x308 },
+ unicodeslot=0x401,
},
- [0x0402]={
+ [0x402]={
adobename="afii10051",
category="lu",
contextname="cyrillicDJE",
description="CYRILLIC CAPITAL LETTER DJE",
direction="l",
- lccode=0x0452,
+ lccode=0x452,
linebreak="al",
- unicodeslot=0x0402,
+ unicodeslot=0x402,
},
- [0x0403]={
+ [0x403]={
adobename="afii10052",
category="lu",
contextname="cyrillicGJE",
description="CYRILLIC CAPITAL LETTER GJE",
direction="l",
- lccode=0x0453,
+ lccode=0x453,
linebreak="al",
- specials={ "char", 0x0413, 0x0301 },
- unicodeslot=0x0403,
+ specials={ "char", 0x413, 0x301 },
+ unicodeslot=0x403,
},
- [0x0404]={
+ [0x404]={
adobename="afii10053",
category="lu",
contextname="cyrillicIE",
description="CYRILLIC CAPITAL LETTER UKRAINIAN IE",
direction="l",
- lccode=0x0454,
+ lccode=0x454,
linebreak="al",
- unicodeslot=0x0404,
+ unicodeslot=0x404,
},
- [0x0405]={
+ [0x405]={
adobename="afii10054",
category="lu",
contextname="cyrillicDZE",
description="CYRILLIC CAPITAL LETTER DZE",
direction="l",
- lccode=0x0455,
+ lccode=0x455,
linebreak="al",
- unicodeslot=0x0405,
+ unicodeslot=0x405,
},
- [0x0406]={
+ [0x406]={
adobename="afii10055",
category="lu",
contextname="cyrillicII",
description="CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I",
direction="l",
- lccode=0x0456,
+ lccode=0x456,
linebreak="al",
- unicodeslot=0x0406,
+ unicodeslot=0x406,
},
- [0x0407]={
+ [0x407]={
adobename="afii10056",
category="lu",
contextname="cyrillicYI",
description="CYRILLIC CAPITAL LETTER YI",
direction="l",
- lccode=0x0457,
+ lccode=0x457,
linebreak="al",
- specials={ "char", 0x0406, 0x0308 },
- unicodeslot=0x0407,
+ specials={ "char", 0x406, 0x308 },
+ unicodeslot=0x407,
},
- [0x0408]={
+ [0x408]={
adobename="afii10057",
category="lu",
contextname="cyrillicJE",
description="CYRILLIC CAPITAL LETTER JE",
direction="l",
- lccode=0x0458,
+ lccode=0x458,
linebreak="al",
- unicodeslot=0x0408,
+ unicodeslot=0x408,
},
- [0x0409]={
+ [0x409]={
adobename="afii10058",
category="lu",
contextname="cyrillicLJE",
description="CYRILLIC CAPITAL LETTER LJE",
direction="l",
- lccode=0x0459,
+ lccode=0x459,
linebreak="al",
- unicodeslot=0x0409,
+ unicodeslot=0x409,
},
- [0x040A]={
+ [0x40A]={
adobename="afii10059",
category="lu",
contextname="cyrillicNJE",
description="CYRILLIC CAPITAL LETTER NJE",
direction="l",
- lccode=0x045A,
+ lccode=0x45A,
linebreak="al",
- unicodeslot=0x040A,
+ unicodeslot=0x40A,
},
- [0x040B]={
+ [0x40B]={
adobename="afii10060",
category="lu",
contextname="cyrillicTSHE",
description="CYRILLIC CAPITAL LETTER TSHE",
direction="l",
- lccode=0x045B,
+ lccode=0x45B,
linebreak="al",
- unicodeslot=0x040B,
+ unicodeslot=0x40B,
},
- [0x040C]={
+ [0x40C]={
adobename="afii10061",
category="lu",
contextname="cyrillicKJE",
description="CYRILLIC CAPITAL LETTER KJE",
direction="l",
- lccode=0x045C,
+ lccode=0x45C,
linebreak="al",
- specials={ "char", 0x041A, 0x0301 },
- unicodeslot=0x040C,
+ specials={ "char", 0x41A, 0x301 },
+ unicodeslot=0x40C,
},
- [0x040D]={
+ [0x40D]={
category="lu",
contextname="cyrillicIgrave",
description="CYRILLIC CAPITAL LETTER I WITH GRAVE",
direction="l",
- lccode=0x045D,
+ lccode=0x45D,
linebreak="al",
- shcode=0x0418,
- specials={ "char", 0x0418, 0x0300 },
- unicodeslot=0x040D,
+ shcode=0x418,
+ specials={ "char", 0x418, 0x300 },
+ unicodeslot=0x40D,
},
- [0x040E]={
+ [0x40E]={
adobename="afii10062",
category="lu",
contextname="cyrillicUSHRT",
description="CYRILLIC CAPITAL LETTER SHORT U",
direction="l",
- lccode=0x045E,
+ lccode=0x45E,
linebreak="al",
- specials={ "char", 0x0423, 0x0306 },
- unicodeslot=0x040E,
+ specials={ "char", 0x423, 0x306 },
+ unicodeslot=0x40E,
},
- [0x040F]={
+ [0x40F]={
adobename="afii10145",
category="lu",
contextname="cyrillicDZHE",
description="CYRILLIC CAPITAL LETTER DZHE",
direction="l",
- lccode=0x045F,
+ lccode=0x45F,
linebreak="al",
- unicodeslot=0x040F,
+ unicodeslot=0x40F,
},
- [0x0410]={
+ [0x410]={
adobename="afii10017",
category="lu",
cjkwd="a",
contextname="cyrillicA",
description="CYRILLIC CAPITAL LETTER A",
direction="l",
- lccode=0x0430,
+ lccode=0x430,
linebreak="al",
- unicodeslot=0x0410,
+ unicodeslot=0x410,
},
- [0x0411]={
+ [0x411]={
adobename="afii10018",
category="lu",
cjkwd="a",
contextname="cyrillicB",
description="CYRILLIC CAPITAL LETTER BE",
direction="l",
- lccode=0x0431,
+ lccode=0x431,
linebreak="al",
- unicodeslot=0x0411,
+ unicodeslot=0x411,
},
- [0x0412]={
+ [0x412]={
adobename="afii10019",
category="lu",
cjkwd="a",
contextname="cyrillicV",
description="CYRILLIC CAPITAL LETTER VE",
direction="l",
- lccode=0x0432,
+ lccode=0x432,
linebreak="al",
- unicodeslot=0x0412,
+ unicodeslot=0x412,
},
- [0x0413]={
+ [0x413]={
adobename="afii10020",
category="lu",
cjkwd="a",
contextname="cyrillicG",
description="CYRILLIC CAPITAL LETTER GHE",
direction="l",
- lccode=0x0433,
+ lccode=0x433,
linebreak="al",
- unicodeslot=0x0413,
+ unicodeslot=0x413,
},
- [0x0414]={
+ [0x414]={
adobename="afii10021",
category="lu",
cjkwd="a",
contextname="cyrillicD",
description="CYRILLIC CAPITAL LETTER DE",
direction="l",
- lccode=0x0434,
+ lccode=0x434,
linebreak="al",
- unicodeslot=0x0414,
+ unicodeslot=0x414,
},
- [0x0415]={
+ [0x415]={
adobename="afii10022",
category="lu",
cjkwd="a",
contextname="cyrillicE",
description="CYRILLIC CAPITAL LETTER IE",
direction="l",
- lccode=0x0435,
+ lccode=0x435,
linebreak="al",
- unicodeslot=0x0415,
+ unicodeslot=0x415,
},
- [0x0416]={
+ [0x416]={
adobename="afii10024",
category="lu",
cjkwd="a",
contextname="cyrillicZH",
description="CYRILLIC CAPITAL LETTER ZHE",
direction="l",
- lccode=0x0436,
+ lccode=0x436,
linebreak="al",
- unicodeslot=0x0416,
+ unicodeslot=0x416,
},
- [0x0417]={
+ [0x417]={
adobename="afii10025",
category="lu",
cjkwd="a",
contextname="cyrillicZ",
description="CYRILLIC CAPITAL LETTER ZE",
direction="l",
- lccode=0x0437,
+ lccode=0x437,
linebreak="al",
- unicodeslot=0x0417,
+ unicodeslot=0x417,
},
- [0x0418]={
+ [0x418]={
adobename="afii10026",
category="lu",
cjkwd="a",
contextname="cyrillicI",
description="CYRILLIC CAPITAL LETTER I",
direction="l",
- lccode=0x0438,
+ lccode=0x438,
linebreak="al",
- unicodeslot=0x0418,
+ unicodeslot=0x418,
},
- [0x0419]={
+ [0x419]={
adobename="afii10027",
category="lu",
cjkwd="a",
contextname="cyrillicISHRT",
description="CYRILLIC CAPITAL LETTER SHORT I",
direction="l",
- lccode=0x0439,
+ lccode=0x439,
linebreak="al",
- specials={ "char", 0x0418, 0x0306 },
- unicodeslot=0x0419,
+ specials={ "char", 0x418, 0x306 },
+ unicodeslot=0x419,
},
- [0x041A]={
+ [0x41A]={
adobename="afii10028",
category="lu",
cjkwd="a",
contextname="cyrillicK",
description="CYRILLIC CAPITAL LETTER KA",
direction="l",
- lccode=0x043A,
+ lccode=0x43A,
linebreak="al",
- unicodeslot=0x041A,
+ unicodeslot=0x41A,
},
- [0x041B]={
+ [0x41B]={
adobename="afii10029",
category="lu",
cjkwd="a",
contextname="cyrillicL",
description="CYRILLIC CAPITAL LETTER EL",
direction="l",
- lccode=0x043B,
+ lccode=0x43B,
linebreak="al",
- unicodeslot=0x041B,
+ unicodeslot=0x41B,
},
- [0x041C]={
+ [0x41C]={
adobename="afii10030",
category="lu",
cjkwd="a",
contextname="cyrillicM",
description="CYRILLIC CAPITAL LETTER EM",
direction="l",
- lccode=0x043C,
+ lccode=0x43C,
linebreak="al",
- unicodeslot=0x041C,
+ unicodeslot=0x41C,
},
- [0x041D]={
+ [0x41D]={
adobename="afii10031",
category="lu",
cjkwd="a",
contextname="cyrillicN",
description="CYRILLIC CAPITAL LETTER EN",
direction="l",
- lccode=0x043D,
+ lccode=0x43D,
linebreak="al",
- unicodeslot=0x041D,
+ unicodeslot=0x41D,
},
- [0x041E]={
+ [0x41E]={
adobename="afii10032",
category="lu",
cjkwd="a",
contextname="cyrillicO",
description="CYRILLIC CAPITAL LETTER O",
direction="l",
- lccode=0x043E,
+ lccode=0x43E,
linebreak="al",
- unicodeslot=0x041E,
+ unicodeslot=0x41E,
},
- [0x041F]={
+ [0x41F]={
adobename="afii10033",
category="lu",
cjkwd="a",
contextname="cyrillicP",
description="CYRILLIC CAPITAL LETTER PE",
direction="l",
- lccode=0x043F,
+ lccode=0x43F,
linebreak="al",
- unicodeslot=0x041F,
+ unicodeslot=0x41F,
},
- [0x0420]={
+ [0x420]={
adobename="afii10034",
category="lu",
cjkwd="a",
contextname="cyrillicR",
description="CYRILLIC CAPITAL LETTER ER",
direction="l",
- lccode=0x0440,
+ lccode=0x440,
linebreak="al",
- unicodeslot=0x0420,
+ unicodeslot=0x420,
},
- [0x0421]={
+ [0x421]={
adobename="afii10035",
category="lu",
cjkwd="a",
contextname="cyrillicS",
description="CYRILLIC CAPITAL LETTER ES",
direction="l",
- lccode=0x0441,
+ lccode=0x441,
linebreak="al",
- unicodeslot=0x0421,
+ unicodeslot=0x421,
},
- [0x0422]={
+ [0x422]={
adobename="afii10036",
category="lu",
cjkwd="a",
contextname="cyrillicT",
description="CYRILLIC CAPITAL LETTER TE",
direction="l",
- lccode=0x0442,
+ lccode=0x442,
linebreak="al",
- unicodeslot=0x0422,
+ unicodeslot=0x422,
},
- [0x0423]={
+ [0x423]={
adobename="afii10037",
category="lu",
cjkwd="a",
contextname="cyrillicU",
description="CYRILLIC CAPITAL LETTER U",
direction="l",
- lccode=0x0443,
+ lccode=0x443,
linebreak="al",
- unicodeslot=0x0423,
+ unicodeslot=0x423,
},
- [0x0424]={
+ [0x424]={
adobename="afii10038",
category="lu",
cjkwd="a",
contextname="cyrillicF",
description="CYRILLIC CAPITAL LETTER EF",
direction="l",
- lccode=0x0444,
+ lccode=0x444,
linebreak="al",
- unicodeslot=0x0424,
+ unicodeslot=0x424,
},
- [0x0425]={
+ [0x425]={
adobename="afii10039",
category="lu",
cjkwd="a",
contextname="cyrillicH",
description="CYRILLIC CAPITAL LETTER HA",
direction="l",
- lccode=0x0445,
+ lccode=0x445,
linebreak="al",
- unicodeslot=0x0425,
+ unicodeslot=0x425,
},
- [0x0426]={
+ [0x426]={
adobename="afii10040",
category="lu",
cjkwd="a",
contextname="cyrillicC",
description="CYRILLIC CAPITAL LETTER TSE",
direction="l",
- lccode=0x0446,
+ lccode=0x446,
linebreak="al",
- unicodeslot=0x0426,
+ unicodeslot=0x426,
},
- [0x0427]={
+ [0x427]={
adobename="afii10041",
category="lu",
cjkwd="a",
contextname="cyrillicCH",
description="CYRILLIC CAPITAL LETTER CHE",
direction="l",
- lccode=0x0447,
+ lccode=0x447,
linebreak="al",
- unicodeslot=0x0427,
+ unicodeslot=0x427,
},
- [0x0428]={
+ [0x428]={
adobename="afii10042",
category="lu",
cjkwd="a",
contextname="cyrillicSH",
description="CYRILLIC CAPITAL LETTER SHA",
direction="l",
- lccode=0x0448,
+ lccode=0x448,
linebreak="al",
- unicodeslot=0x0428,
+ unicodeslot=0x428,
},
- [0x0429]={
+ [0x429]={
adobename="afii10043",
category="lu",
cjkwd="a",
contextname="cyrillicSHCH",
description="CYRILLIC CAPITAL LETTER SHCHA",
direction="l",
- lccode=0x0449,
+ lccode=0x449,
linebreak="al",
- unicodeslot=0x0429,
+ unicodeslot=0x429,
},
- [0x042A]={
+ [0x42A]={
adobename="afii10044",
category="lu",
cjkwd="a",
contextname="cyrillicHRDSN",
description="CYRILLIC CAPITAL LETTER HARD SIGN",
direction="l",
- lccode=0x044A,
+ lccode=0x44A,
linebreak="al",
- unicodeslot=0x042A,
+ unicodeslot=0x42A,
},
- [0x042B]={
+ [0x42B]={
adobename="afii10045",
category="lu",
cjkwd="a",
contextname="cyrillicERY",
description="CYRILLIC CAPITAL LETTER YERU",
direction="l",
- lccode=0x044B,
+ lccode=0x44B,
linebreak="al",
- unicodeslot=0x042B,
+ unicodeslot=0x42B,
},
- [0x042C]={
+ [0x42C]={
adobename="afii10046",
category="lu",
cjkwd="a",
contextname="cyrillicSFTSN",
description="CYRILLIC CAPITAL LETTER SOFT SIGN",
direction="l",
- lccode=0x044C,
+ lccode=0x44C,
linebreak="al",
- unicodeslot=0x042C,
+ unicodeslot=0x42C,
},
- [0x042D]={
+ [0x42D]={
adobename="afii10047",
category="lu",
cjkwd="a",
contextname="cyrillicEREV",
description="CYRILLIC CAPITAL LETTER E",
direction="l",
- lccode=0x044D,
+ lccode=0x44D,
linebreak="al",
- unicodeslot=0x042D,
+ unicodeslot=0x42D,
},
- [0x042E]={
+ [0x42E]={
adobename="afii10048",
category="lu",
cjkwd="a",
contextname="cyrillicYU",
description="CYRILLIC CAPITAL LETTER YU",
direction="l",
- lccode=0x044E,
+ lccode=0x44E,
linebreak="al",
- unicodeslot=0x042E,
+ unicodeslot=0x42E,
},
- [0x042F]={
+ [0x42F]={
adobename="afii10049",
category="lu",
cjkwd="a",
contextname="cyrillicYA",
description="CYRILLIC CAPITAL LETTER YA",
direction="l",
- lccode=0x044F,
+ lccode=0x44F,
linebreak="al",
- unicodeslot=0x042F,
+ unicodeslot=0x42F,
},
- [0x0430]={
+ [0x430]={
adobename="afii10065",
category="ll",
cjkwd="a",
@@ -10984,10 +11114,10 @@ characters.data={
description="CYRILLIC SMALL LETTER A",
direction="l",
linebreak="al",
- uccode=0x0410,
- unicodeslot=0x0430,
+ uccode=0x410,
+ unicodeslot=0x430,
},
- [0x0431]={
+ [0x431]={
adobename="becyrillic",
category="ll",
cjkwd="a",
@@ -10995,10 +11125,10 @@ characters.data={
description="CYRILLIC SMALL LETTER BE",
direction="l",
linebreak="al",
- uccode=0x0411,
- unicodeslot=0x0431,
+ uccode=0x411,
+ unicodeslot=0x431,
},
- [0x0432]={
+ [0x432]={
adobename="vecyrillic",
category="ll",
cjkwd="a",
@@ -11006,10 +11136,10 @@ characters.data={
description="CYRILLIC SMALL LETTER VE",
direction="l",
linebreak="al",
- uccode=0x0412,
- unicodeslot=0x0432,
+ uccode=0x412,
+ unicodeslot=0x432,
},
- [0x0433]={
+ [0x433]={
adobename="gecyrillic",
category="ll",
cjkwd="a",
@@ -11017,10 +11147,10 @@ characters.data={
description="CYRILLIC SMALL LETTER GHE",
direction="l",
linebreak="al",
- uccode=0x0413,
- unicodeslot=0x0433,
+ uccode=0x413,
+ unicodeslot=0x433,
},
- [0x0434]={
+ [0x434]={
adobename="decyrillic",
category="ll",
cjkwd="a",
@@ -11028,10 +11158,10 @@ characters.data={
description="CYRILLIC SMALL LETTER DE",
direction="l",
linebreak="al",
- uccode=0x0414,
- unicodeslot=0x0434,
+ uccode=0x414,
+ unicodeslot=0x434,
},
- [0x0435]={
+ [0x435]={
adobename="iecyrillic",
category="ll",
cjkwd="a",
@@ -11039,10 +11169,10 @@ characters.data={
description="CYRILLIC SMALL LETTER IE",
direction="l",
linebreak="al",
- uccode=0x0415,
- unicodeslot=0x0435,
+ uccode=0x415,
+ unicodeslot=0x435,
},
- [0x0436]={
+ [0x436]={
adobename="zhecyrillic",
category="ll",
cjkwd="a",
@@ -11050,10 +11180,10 @@ characters.data={
description="CYRILLIC SMALL LETTER ZHE",
direction="l",
linebreak="al",
- uccode=0x0416,
- unicodeslot=0x0436,
+ uccode=0x416,
+ unicodeslot=0x436,
},
- [0x0437]={
+ [0x437]={
adobename="zecyrillic",
category="ll",
cjkwd="a",
@@ -11061,10 +11191,10 @@ characters.data={
description="CYRILLIC SMALL LETTER ZE",
direction="l",
linebreak="al",
- uccode=0x0417,
- unicodeslot=0x0437,
+ uccode=0x417,
+ unicodeslot=0x437,
},
- [0x0438]={
+ [0x438]={
adobename="iicyrillic",
category="ll",
cjkwd="a",
@@ -11072,10 +11202,10 @@ characters.data={
description="CYRILLIC SMALL LETTER I",
direction="l",
linebreak="al",
- uccode=0x0418,
- unicodeslot=0x0438,
+ uccode=0x418,
+ unicodeslot=0x438,
},
- [0x0439]={
+ [0x439]={
adobename="iishortcyrillic",
category="ll",
cjkwd="a",
@@ -11083,11 +11213,11 @@ characters.data={
description="CYRILLIC SMALL LETTER SHORT I",
direction="l",
linebreak="al",
- specials={ "char", 0x0438, 0x0306 },
- uccode=0x0419,
- unicodeslot=0x0439,
+ specials={ "char", 0x438, 0x306 },
+ uccode=0x419,
+ unicodeslot=0x439,
},
- [0x043A]={
+ [0x43A]={
adobename="kacyrillic",
category="ll",
cjkwd="a",
@@ -11095,10 +11225,10 @@ characters.data={
description="CYRILLIC SMALL LETTER KA",
direction="l",
linebreak="al",
- uccode=0x041A,
- unicodeslot=0x043A,
+ uccode=0x41A,
+ unicodeslot=0x43A,
},
- [0x043B]={
+ [0x43B]={
adobename="elcyrillic",
category="ll",
cjkwd="a",
@@ -11106,10 +11236,10 @@ characters.data={
description="CYRILLIC SMALL LETTER EL",
direction="l",
linebreak="al",
- uccode=0x041B,
- unicodeslot=0x043B,
+ uccode=0x41B,
+ unicodeslot=0x43B,
},
- [0x043C]={
+ [0x43C]={
adobename="emcyrillic",
category="ll",
cjkwd="a",
@@ -11117,10 +11247,10 @@ characters.data={
description="CYRILLIC SMALL LETTER EM",
direction="l",
linebreak="al",
- uccode=0x041C,
- unicodeslot=0x043C,
+ uccode=0x41C,
+ unicodeslot=0x43C,
},
- [0x043D]={
+ [0x43D]={
adobename="encyrillic",
category="ll",
cjkwd="a",
@@ -11128,10 +11258,10 @@ characters.data={
description="CYRILLIC SMALL LETTER EN",
direction="l",
linebreak="al",
- uccode=0x041D,
- unicodeslot=0x043D,
+ uccode=0x41D,
+ unicodeslot=0x43D,
},
- [0x043E]={
+ [0x43E]={
adobename="ocyrillic",
category="ll",
cjkwd="a",
@@ -11139,10 +11269,10 @@ characters.data={
description="CYRILLIC SMALL LETTER O",
direction="l",
linebreak="al",
- uccode=0x041E,
- unicodeslot=0x043E,
+ uccode=0x41E,
+ unicodeslot=0x43E,
},
- [0x043F]={
+ [0x43F]={
adobename="pecyrillic",
category="ll",
cjkwd="a",
@@ -11150,10 +11280,10 @@ characters.data={
description="CYRILLIC SMALL LETTER PE",
direction="l",
linebreak="al",
- uccode=0x041F,
- unicodeslot=0x043F,
+ uccode=0x41F,
+ unicodeslot=0x43F,
},
- [0x0440]={
+ [0x440]={
adobename="ercyrillic",
category="ll",
cjkwd="a",
@@ -11161,10 +11291,10 @@ characters.data={
description="CYRILLIC SMALL LETTER ER",
direction="l",
linebreak="al",
- uccode=0x0420,
- unicodeslot=0x0440,
+ uccode=0x420,
+ unicodeslot=0x440,
},
- [0x0441]={
+ [0x441]={
adobename="escyrillic",
category="ll",
cjkwd="a",
@@ -11172,10 +11302,10 @@ characters.data={
description="CYRILLIC SMALL LETTER ES",
direction="l",
linebreak="al",
- uccode=0x0421,
- unicodeslot=0x0441,
+ uccode=0x421,
+ unicodeslot=0x441,
},
- [0x0442]={
+ [0x442]={
adobename="tecyrillic",
category="ll",
cjkwd="a",
@@ -11183,10 +11313,10 @@ characters.data={
description="CYRILLIC SMALL LETTER TE",
direction="l",
linebreak="al",
- uccode=0x0422,
- unicodeslot=0x0442,
+ uccode=0x422,
+ unicodeslot=0x442,
},
- [0x0443]={
+ [0x443]={
adobename="ucyrillic",
category="ll",
cjkwd="a",
@@ -11194,10 +11324,10 @@ characters.data={
description="CYRILLIC SMALL LETTER U",
direction="l",
linebreak="al",
- uccode=0x0423,
- unicodeslot=0x0443,
+ uccode=0x423,
+ unicodeslot=0x443,
},
- [0x0444]={
+ [0x444]={
adobename="efcyrillic",
category="ll",
cjkwd="a",
@@ -11205,10 +11335,10 @@ characters.data={
description="CYRILLIC SMALL LETTER EF",
direction="l",
linebreak="al",
- uccode=0x0424,
- unicodeslot=0x0444,
+ uccode=0x424,
+ unicodeslot=0x444,
},
- [0x0445]={
+ [0x445]={
adobename="khacyrillic",
category="ll",
cjkwd="a",
@@ -11216,10 +11346,10 @@ characters.data={
description="CYRILLIC SMALL LETTER HA",
direction="l",
linebreak="al",
- uccode=0x0425,
- unicodeslot=0x0445,
+ uccode=0x425,
+ unicodeslot=0x445,
},
- [0x0446]={
+ [0x446]={
adobename="tsecyrillic",
category="ll",
cjkwd="a",
@@ -11227,10 +11357,10 @@ characters.data={
description="CYRILLIC SMALL LETTER TSE",
direction="l",
linebreak="al",
- uccode=0x0426,
- unicodeslot=0x0446,
+ uccode=0x426,
+ unicodeslot=0x446,
},
- [0x0447]={
+ [0x447]={
adobename="checyrillic",
category="ll",
cjkwd="a",
@@ -11238,10 +11368,10 @@ characters.data={
description="CYRILLIC SMALL LETTER CHE",
direction="l",
linebreak="al",
- uccode=0x0427,
- unicodeslot=0x0447,
+ uccode=0x427,
+ unicodeslot=0x447,
},
- [0x0448]={
+ [0x448]={
adobename="shacyrillic",
category="ll",
cjkwd="a",
@@ -11249,10 +11379,10 @@ characters.data={
description="CYRILLIC SMALL LETTER SHA",
direction="l",
linebreak="al",
- uccode=0x0428,
- unicodeslot=0x0448,
+ uccode=0x428,
+ unicodeslot=0x448,
},
- [0x0449]={
+ [0x449]={
adobename="shchacyrillic",
category="ll",
cjkwd="a",
@@ -11260,10 +11390,10 @@ characters.data={
description="CYRILLIC SMALL LETTER SHCHA",
direction="l",
linebreak="al",
- uccode=0x0429,
- unicodeslot=0x0449,
+ uccode=0x429,
+ unicodeslot=0x449,
},
- [0x044A]={
+ [0x44A]={
adobename="hardsigncyrillic",
category="ll",
cjkwd="a",
@@ -11271,10 +11401,10 @@ characters.data={
description="CYRILLIC SMALL LETTER HARD SIGN",
direction="l",
linebreak="al",
- uccode=0x042A,
- unicodeslot=0x044A,
+ uccode=0x42A,
+ unicodeslot=0x44A,
},
- [0x044B]={
+ [0x44B]={
adobename="yericyrillic",
category="ll",
cjkwd="a",
@@ -11282,10 +11412,10 @@ characters.data={
description="CYRILLIC SMALL LETTER YERU",
direction="l",
linebreak="al",
- uccode=0x042B,
- unicodeslot=0x044B,
+ uccode=0x42B,
+ unicodeslot=0x44B,
},
- [0x044C]={
+ [0x44C]={
adobename="softsigncyrillic",
category="ll",
cjkwd="a",
@@ -11293,10 +11423,10 @@ characters.data={
description="CYRILLIC SMALL LETTER SOFT SIGN",
direction="l",
linebreak="al",
- uccode=0x042C,
- unicodeslot=0x044C,
+ uccode=0x42C,
+ unicodeslot=0x44C,
},
- [0x044D]={
+ [0x44D]={
adobename="ereversedcyrillic",
category="ll",
cjkwd="a",
@@ -11304,10 +11434,10 @@ characters.data={
description="CYRILLIC SMALL LETTER E",
direction="l",
linebreak="al",
- uccode=0x042D,
- unicodeslot=0x044D,
+ uccode=0x42D,
+ unicodeslot=0x44D,
},
- [0x044E]={
+ [0x44E]={
adobename="iucyrillic",
category="ll",
cjkwd="a",
@@ -11315,10 +11445,10 @@ characters.data={
description="CYRILLIC SMALL LETTER YU",
direction="l",
linebreak="al",
- uccode=0x042E,
- unicodeslot=0x044E,
+ uccode=0x42E,
+ unicodeslot=0x44E,
},
- [0x044F]={
+ [0x44F]={
adobename="iacyrillic",
category="ll",
cjkwd="a",
@@ -11326,21 +11456,21 @@ characters.data={
description="CYRILLIC SMALL LETTER YA",
direction="l",
linebreak="al",
- uccode=0x042F,
- unicodeslot=0x044F,
+ uccode=0x42F,
+ unicodeslot=0x44F,
},
- [0x0450]={
+ [0x450]={
category="ll",
contextname="cyrillicegrave",
description="CYRILLIC SMALL LETTER IE WITH GRAVE",
direction="l",
linebreak="al",
- shcode=0x0435,
- specials={ "char", 0x0435, 0x0300 },
- uccode=0x0400,
- unicodeslot=0x0450,
+ shcode=0x435,
+ specials={ "char", 0x435, 0x300 },
+ uccode=0x400,
+ unicodeslot=0x450,
},
- [0x0451]={
+ [0x451]={
adobename="iocyrillic",
category="ll",
cjkwd="a",
@@ -11348,17808 +11478,18319 @@ characters.data={
description="CYRILLIC SMALL LETTER IO",
direction="l",
linebreak="al",
- specials={ "char", 0x0435, 0x0308 },
- uccode=0x0401,
- unicodeslot=0x0451,
+ specials={ "char", 0x435, 0x308 },
+ uccode=0x401,
+ unicodeslot=0x451,
},
- [0x0452]={
+ [0x452]={
adobename="djecyrillic",
category="ll",
contextname="cyrillicdje",
description="CYRILLIC SMALL LETTER DJE",
direction="l",
linebreak="al",
- uccode=0x0402,
- unicodeslot=0x0452,
+ uccode=0x402,
+ unicodeslot=0x452,
},
- [0x0453]={
+ [0x453]={
adobename="gjecyrillic",
category="ll",
contextname="cyrillicgje",
description="CYRILLIC SMALL LETTER GJE",
direction="l",
linebreak="al",
- specials={ "char", 0x0433, 0x0301 },
- uccode=0x0403,
- unicodeslot=0x0453,
+ specials={ "char", 0x433, 0x301 },
+ uccode=0x403,
+ unicodeslot=0x453,
},
- [0x0454]={
+ [0x454]={
adobename="ecyrillic",
category="ll",
contextname="cyrillicie",
description="CYRILLIC SMALL LETTER UKRAINIAN IE",
direction="l",
linebreak="al",
- uccode=0x0404,
- unicodeslot=0x0454,
+ uccode=0x404,
+ unicodeslot=0x454,
},
- [0x0455]={
+ [0x455]={
adobename="dzecyrillic",
category="ll",
contextname="cyrillicdze",
description="CYRILLIC SMALL LETTER DZE",
direction="l",
linebreak="al",
- uccode=0x0405,
- unicodeslot=0x0455,
+ uccode=0x405,
+ unicodeslot=0x455,
},
- [0x0456]={
+ [0x456]={
adobename="icyrillic",
category="ll",
contextname="cyrillicii",
description="CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I",
direction="l",
linebreak="al",
- uccode=0x0406,
- unicodeslot=0x0456,
+ uccode=0x406,
+ unicodeslot=0x456,
},
- [0x0457]={
+ [0x457]={
adobename="yicyrillic",
category="ll",
contextname="cyrillicyi",
description="CYRILLIC SMALL LETTER YI",
direction="l",
linebreak="al",
- specials={ "char", 0x0456, 0x0308 },
- uccode=0x0407,
- unicodeslot=0x0457,
+ specials={ "char", 0x456, 0x308 },
+ uccode=0x407,
+ unicodeslot=0x457,
},
- [0x0458]={
+ [0x458]={
adobename="jecyrillic",
category="ll",
contextname="cyrillicje",
description="CYRILLIC SMALL LETTER JE",
direction="l",
linebreak="al",
- uccode=0x0408,
- unicodeslot=0x0458,
+ uccode=0x408,
+ unicodeslot=0x458,
},
- [0x0459]={
+ [0x459]={
adobename="ljecyrillic",
category="ll",
contextname="cyrilliclje",
description="CYRILLIC SMALL LETTER LJE",
direction="l",
linebreak="al",
- uccode=0x0409,
- unicodeslot=0x0459,
+ uccode=0x409,
+ unicodeslot=0x459,
},
- [0x045A]={
+ [0x45A]={
adobename="njecyrillic",
category="ll",
contextname="cyrillicnje",
description="CYRILLIC SMALL LETTER NJE",
direction="l",
linebreak="al",
- uccode=0x040A,
- unicodeslot=0x045A,
+ uccode=0x40A,
+ unicodeslot=0x45A,
},
- [0x045B]={
+ [0x45B]={
adobename="tshecyrillic",
category="ll",
contextname="cyrillictshe",
description="CYRILLIC SMALL LETTER TSHE",
direction="l",
linebreak="al",
- uccode=0x040B,
- unicodeslot=0x045B,
+ uccode=0x40B,
+ unicodeslot=0x45B,
},
- [0x045C]={
+ [0x45C]={
adobename="kjecyrillic",
category="ll",
contextname="cyrillickje",
description="CYRILLIC SMALL LETTER KJE",
direction="l",
linebreak="al",
- specials={ "char", 0x043A, 0x0301 },
- uccode=0x040C,
- unicodeslot=0x045C,
+ specials={ "char", 0x43A, 0x301 },
+ uccode=0x40C,
+ unicodeslot=0x45C,
},
- [0x045D]={
+ [0x45D]={
category="ll",
contextname="cyrillicigrave",
description="CYRILLIC SMALL LETTER I WITH GRAVE",
direction="l",
linebreak="al",
- shcode=0x0438,
- specials={ "char", 0x0438, 0x0300 },
- uccode=0x040D,
- unicodeslot=0x045D,
+ shcode=0x438,
+ specials={ "char", 0x438, 0x300 },
+ uccode=0x40D,
+ unicodeslot=0x45D,
},
- [0x045E]={
+ [0x45E]={
adobename="ushortcyrillic",
category="ll",
contextname="cyrillicushrt",
description="CYRILLIC SMALL LETTER SHORT U",
direction="l",
linebreak="al",
- specials={ "char", 0x0443, 0x0306 },
- uccode=0x040E,
- unicodeslot=0x045E,
+ specials={ "char", 0x443, 0x306 },
+ uccode=0x40E,
+ unicodeslot=0x45E,
},
- [0x045F]={
+ [0x45F]={
adobename="dzhecyrillic",
category="ll",
contextname="cyrillicdzhe",
description="CYRILLIC SMALL LETTER DZHE",
direction="l",
linebreak="al",
- uccode=0x040F,
- unicodeslot=0x045F,
+ uccode=0x40F,
+ unicodeslot=0x45F,
},
- [0x0460]={
+ [0x460]={
adobename="Omegacyrillic",
category="lu",
contextname="cyrillicOMEGA",
description="CYRILLIC CAPITAL LETTER OMEGA",
direction="l",
- lccode=0x0461,
+ lccode=0x461,
linebreak="al",
- unicodeslot=0x0460,
+ unicodeslot=0x460,
},
- [0x0461]={
+ [0x461]={
adobename="omegacyrillic",
category="ll",
contextname="cyrillicomega",
description="CYRILLIC SMALL LETTER OMEGA",
direction="l",
linebreak="al",
- uccode=0x0460,
- unicodeslot=0x0461,
+ uccode=0x460,
+ unicodeslot=0x461,
},
- [0x0462]={
+ [0x462]={
adobename="afii10146",
category="lu",
contextname="cyrillicYAT",
description="CYRILLIC CAPITAL LETTER YAT",
direction="l",
- lccode=0x0463,
+ lccode=0x463,
linebreak="al",
- unicodeslot=0x0462,
+ unicodeslot=0x462,
},
- [0x0463]={
+ [0x463]={
adobename="yatcyrillic",
category="ll",
contextname="cyrillicyat",
description="CYRILLIC SMALL LETTER YAT",
direction="l",
linebreak="al",
- uccode=0x0462,
- unicodeslot=0x0463,
+ uccode=0x462,
+ unicodeslot=0x463,
},
- [0x0464]={
+ [0x464]={
adobename="Eiotifiedcyrillic",
category="lu",
contextname="cyrillicEiotified",
description="CYRILLIC CAPITAL LETTER IOTIFIED E",
direction="l",
- lccode=0x0465,
+ lccode=0x465,
linebreak="al",
- unicodeslot=0x0464,
+ unicodeslot=0x464,
},
- [0x0465]={
+ [0x465]={
adobename="eiotifiedcyrillic",
category="ll",
contextname="cyrilliceiotified",
description="CYRILLIC SMALL LETTER IOTIFIED E",
direction="l",
linebreak="al",
- uccode=0x0464,
- unicodeslot=0x0465,
+ uccode=0x464,
+ unicodeslot=0x465,
},
- [0x0466]={
+ [0x466]={
adobename="Yuslittlecyrillic",
category="lu",
contextname="cyrillicLITTLEYUS",
description="CYRILLIC CAPITAL LETTER LITTLE YUS",
direction="l",
- lccode=0x0467,
+ lccode=0x467,
linebreak="al",
- unicodeslot=0x0466,
+ unicodeslot=0x466,
},
- [0x0467]={
+ [0x467]={
adobename="yuslittlecyrillic",
category="ll",
contextname="cyrilliclittleyus",
description="CYRILLIC SMALL LETTER LITTLE YUS",
direction="l",
linebreak="al",
- uccode=0x0466,
- unicodeslot=0x0467,
+ uccode=0x466,
+ unicodeslot=0x467,
},
- [0x0468]={
+ [0x468]={
adobename="Yuslittleiotifiedcyrillic",
category="lu",
contextname="cyrillicLITTLEYUSiotified",
description="CYRILLIC CAPITAL LETTER IOTIFIED LITTLE YUS",
direction="l",
- lccode=0x0469,
+ lccode=0x469,
linebreak="al",
- unicodeslot=0x0468,
+ unicodeslot=0x468,
},
- [0x0469]={
+ [0x469]={
adobename="yuslittleiotifiedcyrillic",
category="ll",
contextname="cyrilliclittleyusiotified",
description="CYRILLIC SMALL LETTER IOTIFIED LITTLE YUS",
direction="l",
linebreak="al",
- uccode=0x0468,
- unicodeslot=0x0469,
+ uccode=0x468,
+ unicodeslot=0x469,
},
- [0x046A]={
+ [0x46A]={
adobename="Yusbigcyrillic",
category="lu",
contextname="cyrillicBIGYUS",
description="CYRILLIC CAPITAL LETTER BIG YUS",
direction="l",
- lccode=0x046B,
+ lccode=0x46B,
linebreak="al",
- unicodeslot=0x046A,
+ unicodeslot=0x46A,
},
- [0x046B]={
+ [0x46B]={
adobename="yusbigcyrillic",
category="ll",
contextname="cyrillicbigyus",
description="CYRILLIC SMALL LETTER BIG YUS",
direction="l",
linebreak="al",
- uccode=0x046A,
- unicodeslot=0x046B,
+ uccode=0x46A,
+ unicodeslot=0x46B,
},
- [0x046C]={
+ [0x46C]={
adobename="Yusbigiotifiedcyrillic",
category="lu",
contextname="cyrillicBIGYUSiotified",
description="CYRILLIC CAPITAL LETTER IOTIFIED BIG YUS",
direction="l",
- lccode=0x046D,
+ lccode=0x46D,
linebreak="al",
- unicodeslot=0x046C,
+ unicodeslot=0x46C,
},
- [0x046D]={
+ [0x46D]={
adobename="yusbigiotifiedcyrillic",
category="ll",
contextname="cyrillicbigyusiotified",
description="CYRILLIC SMALL LETTER IOTIFIED BIG YUS",
direction="l",
linebreak="al",
- uccode=0x046C,
- unicodeslot=0x046D,
+ uccode=0x46C,
+ unicodeslot=0x46D,
},
- [0x046E]={
+ [0x46E]={
adobename="Ksicyrillic",
category="lu",
contextname="cyrillicKSI",
description="CYRILLIC CAPITAL LETTER KSI",
direction="l",
- lccode=0x046F,
+ lccode=0x46F,
linebreak="al",
- unicodeslot=0x046E,
+ unicodeslot=0x46E,
},
- [0x046F]={
+ [0x46F]={
adobename="ksicyrillic",
category="ll",
contextname="cyrillicksi",
description="CYRILLIC SMALL LETTER KSI",
direction="l",
linebreak="al",
- uccode=0x046E,
- unicodeslot=0x046F,
+ uccode=0x46E,
+ unicodeslot=0x46F,
},
- [0x0470]={
+ [0x470]={
adobename="Psicyrillic",
category="lu",
contextname="cyrillicPSI",
description="CYRILLIC CAPITAL LETTER PSI",
direction="l",
- lccode=0x0471,
+ lccode=0x471,
linebreak="al",
- unicodeslot=0x0470,
+ unicodeslot=0x470,
},
- [0x0471]={
+ [0x471]={
adobename="psicyrillic",
category="ll",
contextname="cyrillicpsi",
description="CYRILLIC SMALL LETTER PSI",
direction="l",
linebreak="al",
- uccode=0x0470,
- unicodeslot=0x0471,
+ uccode=0x470,
+ unicodeslot=0x471,
},
- [0x0472]={
+ [0x472]={
adobename="afii10147",
category="lu",
contextname="cyrillicFITA",
description="CYRILLIC CAPITAL LETTER FITA",
direction="l",
- lccode=0x0473,
+ lccode=0x473,
linebreak="al",
- unicodeslot=0x0472,
+ unicodeslot=0x472,
},
- [0x0473]={
+ [0x473]={
adobename="fitacyrillic",
category="ll",
contextname="cyrillicfita",
description="CYRILLIC SMALL LETTER FITA",
direction="l",
linebreak="al",
- uccode=0x0472,
- unicodeslot=0x0473,
+ uccode=0x472,
+ unicodeslot=0x473,
},
- [0x0474]={
+ [0x474]={
adobename="afii10148",
category="lu",
contextname="cyrillicIZHITSA",
description="CYRILLIC CAPITAL LETTER IZHITSA",
direction="l",
- lccode=0x0475,
+ lccode=0x475,
linebreak="al",
- unicodeslot=0x0474,
+ unicodeslot=0x474,
},
- [0x0475]={
+ [0x475]={
adobename="izhitsacyrillic",
category="ll",
contextname="cyrillicizhitsa",
description="CYRILLIC SMALL LETTER IZHITSA",
direction="l",
linebreak="al",
- uccode=0x0474,
- unicodeslot=0x0475,
+ uccode=0x474,
+ unicodeslot=0x475,
},
- [0x0476]={
+ [0x476]={
adobename="Izhitsadblgravecyrillic",
category="lu",
contextname="cyrillicIZHITSAdoublegrave",
description="CYRILLIC CAPITAL LETTER IZHITSA WITH DOUBLE GRAVE ACCENT",
direction="l",
- lccode=0x0477,
+ lccode=0x477,
linebreak="al",
- shcode=0x0474,
- specials={ "char", 0x0474, 0x030F },
- unicodeslot=0x0476,
+ shcode=0x474,
+ specials={ "char", 0x474, 0x30F },
+ unicodeslot=0x476,
},
- [0x0477]={
+ [0x477]={
adobename="izhitsadblgravecyrillic",
category="ll",
contextname="cyrillicizhitsadoublegrave",
description="CYRILLIC SMALL LETTER IZHITSA WITH DOUBLE GRAVE ACCENT",
direction="l",
linebreak="al",
- shcode=0x0475,
- specials={ "char", 0x0475, 0x030F },
- uccode=0x0476,
- unicodeslot=0x0477,
+ shcode=0x475,
+ specials={ "char", 0x475, 0x30F },
+ uccode=0x476,
+ unicodeslot=0x477,
},
- [0x0478]={
+ [0x478]={
adobename="Ukcyrillic",
category="lu",
contextname="cyrillicUK",
description="CYRILLIC CAPITAL LETTER UK",
direction="l",
- lccode=0x0479,
+ lccode=0x479,
linebreak="al",
- unicodeslot=0x0478,
+ unicodeslot=0x478,
},
- [0x0479]={
+ [0x479]={
adobename="ukcyrillic",
category="ll",
contextname="cyrillicuk",
description="CYRILLIC SMALL LETTER UK",
direction="l",
linebreak="al",
- uccode=0x0478,
- unicodeslot=0x0479,
+ uccode=0x478,
+ unicodeslot=0x479,
},
- [0x047A]={
+ [0x47A]={
adobename="Omegaroundcyrillic",
category="lu",
contextname="cyrillicOMEGAround",
description="CYRILLIC CAPITAL LETTER ROUND OMEGA",
direction="l",
- lccode=0x047B,
+ lccode=0x47B,
linebreak="al",
- unicodeslot=0x047A,
+ unicodeslot=0x47A,
},
- [0x047B]={
+ [0x47B]={
adobename="omegaroundcyrillic",
category="ll",
contextname="cyrillicomegaround",
description="CYRILLIC SMALL LETTER ROUND OMEGA",
direction="l",
linebreak="al",
- uccode=0x047A,
- unicodeslot=0x047B,
+ uccode=0x47A,
+ unicodeslot=0x47B,
},
- [0x047C]={
+ [0x47C]={
adobename="Omegatitlocyrillic",
category="lu",
contextname="cyrillicOMEGAtitlo",
description="CYRILLIC CAPITAL LETTER OMEGA WITH TITLO",
direction="l",
- lccode=0x047D,
+ lccode=0x47D,
linebreak="al",
- shcode=0x0460,
- unicodeslot=0x047C,
+ shcode=0x460,
+ unicodeslot=0x47C,
},
- [0x047D]={
+ [0x47D]={
adobename="omegatitlocyrillic",
category="ll",
contextname="cyrillicomegatitlo",
description="CYRILLIC SMALL LETTER OMEGA WITH TITLO",
direction="l",
linebreak="al",
- shcode=0x0461,
- uccode=0x047C,
- unicodeslot=0x047D,
+ shcode=0x461,
+ uccode=0x47C,
+ unicodeslot=0x47D,
},
- [0x047E]={
+ [0x47E]={
adobename="Otcyrillic",
category="lu",
contextname="cyrillicOT",
description="CYRILLIC CAPITAL LETTER OT",
direction="l",
- lccode=0x047F,
+ lccode=0x47F,
linebreak="al",
- unicodeslot=0x047E,
+ unicodeslot=0x47E,
},
- [0x047F]={
+ [0x47F]={
adobename="otcyrillic",
category="ll",
contextname="cyrillicot",
description="CYRILLIC SMALL LETTER OT",
direction="l",
linebreak="al",
- uccode=0x047E,
- unicodeslot=0x047F,
+ uccode=0x47E,
+ unicodeslot=0x47F,
},
- [0x0480]={
+ [0x480]={
adobename="Koppacyrillic",
category="lu",
contextname="cyrillicKOPPA",
description="CYRILLIC CAPITAL LETTER KOPPA",
direction="l",
- lccode=0x0481,
+ lccode=0x481,
linebreak="al",
- unicodeslot=0x0480,
+ unicodeslot=0x480,
},
- [0x0481]={
+ [0x481]={
adobename="koppacyrillic",
category="ll",
contextname="cyrillickoppa",
description="CYRILLIC SMALL LETTER KOPPA",
direction="l",
linebreak="al",
- uccode=0x0480,
- unicodeslot=0x0481,
+ uccode=0x480,
+ unicodeslot=0x481,
},
- [0x0482]={
+ [0x482]={
adobename="thousandcyrillic",
category="so",
description="CYRILLIC THOUSANDS SIGN",
direction="l",
linebreak="al",
- unicodeslot=0x0482,
+ unicodeslot=0x482,
},
- [0x0483]={
+ [0x483]={
adobename="titlocyrilliccmb",
category="mn",
+ combining=0xE6,
contextname="cyrillicTITLO",
description="COMBINING CYRILLIC TITLO",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0483,
+ unicodeslot=0x483,
},
- [0x0484]={
+ [0x484]={
adobename="palatalizationcyrilliccmb",
category="mn",
+ combining=0xE6,
contextname="cyrillicPALATALIZATION",
description="COMBINING CYRILLIC PALATALIZATION",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0484,
+ unicodeslot=0x484,
},
- [0x0485]={
+ [0x485]={
adobename="dasiapneumatacyrilliccmb",
category="mn",
+ combining=0xE6,
contextname="cyrillicDASIAPNEUMATA",
description="COMBINING CYRILLIC DASIA PNEUMATA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0485,
+ unicodeslot=0x485,
},
- [0x0486]={
+ [0x486]={
adobename="psilipneumatacyrilliccmb",
category="mn",
+ combining=0xE6,
contextname="cyrillicPSILIPNEUMATA",
description="COMBINING CYRILLIC PSILI PNEUMATA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0486,
+ unicodeslot=0x486,
},
- [0x0487]={
+ [0x487]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC POKRYTIE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0487,
+ unicodeslot=0x487,
},
- [0x0488]={
+ [0x488]={
category="me",
description="COMBINING CYRILLIC HUNDRED THOUSANDS SIGN",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0488,
+ unicodeslot=0x488,
},
- [0x0489]={
+ [0x489]={
category="me",
description="COMBINING CYRILLIC MILLIONS SIGN",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0489,
+ unicodeslot=0x489,
},
- [0x048A]={
+ [0x48A]={
category="lu",
contextname="cyrillicISHRTtail",
description="CYRILLIC CAPITAL LETTER SHORT I WITH TAIL",
direction="l",
- lccode=0x048B,
+ lccode=0x48B,
linebreak="al",
- unicodeslot=0x048A,
+ unicodeslot=0x48A,
},
- [0x048B]={
+ [0x48B]={
category="ll",
contextname="cyrillicishrttail",
description="CYRILLIC SMALL LETTER SHORT I WITH TAIL",
direction="l",
linebreak="al",
- uccode=0x048A,
- unicodeslot=0x048B,
+ uccode=0x48A,
+ unicodeslot=0x48B,
},
- [0x048C]={
+ [0x48C]={
category="lu",
contextname="cyrillicSEMISOFT",
description="CYRILLIC CAPITAL LETTER SEMISOFT SIGN",
direction="l",
- lccode=0x048D,
+ lccode=0x48D,
linebreak="al",
- unicodeslot=0x048C,
+ unicodeslot=0x48C,
},
- [0x048D]={
+ [0x48D]={
category="ll",
contextname="cyrillicsemisoft",
description="CYRILLIC SMALL LETTER SEMISOFT SIGN",
direction="l",
linebreak="al",
- uccode=0x048C,
- unicodeslot=0x048D,
+ uccode=0x48C,
+ unicodeslot=0x48D,
},
- [0x048E]={
+ [0x48E]={
category="lu",
contextname="cyrillicERtick",
description="CYRILLIC CAPITAL LETTER ER WITH TICK",
direction="l",
- lccode=0x048F,
+ lccode=0x48F,
linebreak="al",
- shcode=0x0420,
- unicodeslot=0x048E,
+ shcode=0x420,
+ unicodeslot=0x48E,
},
- [0x048F]={
+ [0x48F]={
category="ll",
contextname="cyrillicertick",
description="CYRILLIC SMALL LETTER ER WITH TICK",
direction="l",
linebreak="al",
- shcode=0x0440,
- uccode=0x048E,
- unicodeslot=0x048F,
+ shcode=0x440,
+ uccode=0x48E,
+ unicodeslot=0x48F,
},
- [0x0490]={
+ [0x490]={
adobename="afii10050",
category="lu",
contextname="cyrillicGHEupturn",
description="CYRILLIC CAPITAL LETTER GHE WITH UPTURN",
direction="l",
- lccode=0x0491,
+ lccode=0x491,
linebreak="al",
- shcode=0x0413,
- unicodeslot=0x0490,
+ shcode=0x413,
+ unicodeslot=0x490,
},
- [0x0491]={
+ [0x491]={
adobename="gheupturncyrillic",
category="ll",
contextname="cyrillicgheupturn",
description="CYRILLIC SMALL LETTER GHE WITH UPTURN",
direction="l",
linebreak="al",
- shcode=0x0433,
- uccode=0x0490,
- unicodeslot=0x0491,
+ shcode=0x433,
+ uccode=0x490,
+ unicodeslot=0x491,
},
- [0x0492]={
+ [0x492]={
adobename="Ghestrokecyrillic",
category="lu",
contextname="cyrillicGHEstroke",
description="CYRILLIC CAPITAL LETTER GHE WITH STROKE",
direction="l",
- lccode=0x0493,
+ lccode=0x493,
linebreak="al",
- shcode=0x0413,
- unicodeslot=0x0492,
+ shcode=0x413,
+ unicodeslot=0x492,
},
- [0x0493]={
+ [0x493]={
adobename="ghestrokecyrillic",
category="ll",
contextname="cyrillicghestroke",
description="CYRILLIC SMALL LETTER GHE WITH STROKE",
direction="l",
linebreak="al",
- shcode=0x0433,
- uccode=0x0492,
- unicodeslot=0x0493,
+ shcode=0x433,
+ uccode=0x492,
+ unicodeslot=0x493,
},
- [0x0494]={
+ [0x494]={
adobename="Ghemiddlehookcyrillic",
category="lu",
contextname="cyrillicGHEmidhook",
description="CYRILLIC CAPITAL LETTER GHE WITH MIDDLE HOOK",
direction="l",
- lccode=0x0495,
+ lccode=0x495,
linebreak="al",
- shcode=0x0413,
- unicodeslot=0x0494,
+ shcode=0x413,
+ unicodeslot=0x494,
},
- [0x0495]={
+ [0x495]={
adobename="ghemiddlehookcyrillic",
category="ll",
contextname="cyrillicghemidhook",
description="CYRILLIC SMALL LETTER GHE WITH MIDDLE HOOK",
direction="l",
linebreak="al",
- shcode=0x0433,
- uccode=0x0494,
- unicodeslot=0x0495,
+ shcode=0x433,
+ uccode=0x494,
+ unicodeslot=0x495,
},
- [0x0496]={
+ [0x496]={
adobename="Zhedescendercyrillic",
category="lu",
contextname="cyrillicZHEdescender",
description="CYRILLIC CAPITAL LETTER ZHE WITH DESCENDER",
direction="l",
- lccode=0x0497,
+ lccode=0x497,
linebreak="al",
- shcode=0x0416,
- unicodeslot=0x0496,
+ shcode=0x416,
+ unicodeslot=0x496,
},
- [0x0497]={
+ [0x497]={
adobename="zhedescendercyrillic",
category="ll",
contextname="cyrilliczhedescender",
description="CYRILLIC SMALL LETTER ZHE WITH DESCENDER",
direction="l",
linebreak="al",
- shcode=0x0436,
- uccode=0x0496,
- unicodeslot=0x0497,
+ shcode=0x436,
+ uccode=0x496,
+ unicodeslot=0x497,
},
- [0x0498]={
+ [0x498]={
adobename="Zedescendercyrillic",
category="lu",
contextname="cyrillicZDSC",
description="CYRILLIC CAPITAL LETTER ZE WITH DESCENDER",
direction="l",
- lccode=0x0499,
+ lccode=0x499,
linebreak="al",
- shcode=0x0417,
- unicodeslot=0x0498,
+ shcode=0x417,
+ unicodeslot=0x498,
},
- [0x0499]={
+ [0x499]={
adobename="zedescendercyrillic",
category="ll",
contextname="cyrilliczdsc",
description="CYRILLIC SMALL LETTER ZE WITH DESCENDER",
direction="l",
linebreak="al",
- shcode=0x0437,
- uccode=0x0498,
- unicodeslot=0x0499,
+ shcode=0x437,
+ uccode=0x498,
+ unicodeslot=0x499,
},
- [0x049A]={
+ [0x49A]={
adobename="Kadescendercyrillic",
category="lu",
contextname="cyrillicKADC",
description="CYRILLIC CAPITAL LETTER KA WITH DESCENDER",
direction="l",
- lccode=0x049B,
+ lccode=0x49B,
linebreak="al",
- shcode=0x041A,
- unicodeslot=0x049A,
+ shcode=0x41A,
+ unicodeslot=0x49A,
},
- [0x049B]={
+ [0x49B]={
adobename="kadescendercyrillic",
category="ll",
contextname="cyrillickadc",
description="CYRILLIC SMALL LETTER KA WITH DESCENDER",
direction="l",
linebreak="al",
- shcode=0x043A,
- uccode=0x049A,
- unicodeslot=0x049B,
+ shcode=0x43A,
+ uccode=0x49A,
+ unicodeslot=0x49B,
},
- [0x049C]={
+ [0x49C]={
adobename="Kaverticalstrokecyrillic",
category="lu",
contextname="cyrillicKAvertstroke",
description="CYRILLIC CAPITAL LETTER KA WITH VERTICAL STROKE",
direction="l",
- lccode=0x049D,
+ lccode=0x49D,
linebreak="al",
- shcode=0x041A,
- unicodeslot=0x049C,
+ shcode=0x41A,
+ unicodeslot=0x49C,
},
- [0x049D]={
+ [0x49D]={
adobename="kaverticalstrokecyrillic",
category="ll",
contextname="cyrillickavertstroke",
description="CYRILLIC SMALL LETTER KA WITH VERTICAL STROKE",
direction="l",
linebreak="al",
- shcode=0x043A,
- uccode=0x049C,
- unicodeslot=0x049D,
+ shcode=0x43A,
+ uccode=0x49C,
+ unicodeslot=0x49D,
},
- [0x049E]={
+ [0x49E]={
adobename="Kastrokecyrillic",
category="lu",
contextname="cyrillicKAstroke",
description="CYRILLIC CAPITAL LETTER KA WITH STROKE",
direction="l",
- lccode=0x049F,
+ lccode=0x49F,
linebreak="al",
- shcode=0x041A,
- unicodeslot=0x049E,
+ shcode=0x41A,
+ unicodeslot=0x49E,
},
- [0x049F]={
+ [0x49F]={
adobename="kastrokecyrillic",
category="ll",
contextname="cyrillickastroke",
description="CYRILLIC SMALL LETTER KA WITH STROKE",
direction="l",
linebreak="al",
- shcode=0x043A,
- uccode=0x049E,
- unicodeslot=0x049F,
+ shcode=0x43A,
+ uccode=0x49E,
+ unicodeslot=0x49F,
},
- [0x04A0]={
+ [0x4A0]={
adobename="Kabashkircyrillic",
category="lu",
contextname="cyrillicKAbashkir",
description="CYRILLIC CAPITAL LETTER BASHKIR KA",
direction="l",
- lccode=0x04A1,
+ lccode=0x4A1,
linebreak="al",
- unicodeslot=0x04A0,
+ unicodeslot=0x4A0,
},
- [0x04A1]={
+ [0x4A1]={
adobename="kabashkircyrillic",
category="ll",
contextname="cyrillickabashkir",
description="CYRILLIC SMALL LETTER BASHKIR KA",
direction="l",
linebreak="al",
- uccode=0x04A0,
- unicodeslot=0x04A1,
+ uccode=0x4A0,
+ unicodeslot=0x4A1,
},
- [0x04A2]={
+ [0x4A2]={
adobename="Endescendercyrillic",
category="lu",
contextname="cyrillicENDC",
description="CYRILLIC CAPITAL LETTER EN WITH DESCENDER",
direction="l",
- lccode=0x04A3,
+ lccode=0x4A3,
linebreak="al",
- shcode=0x041D,
- unicodeslot=0x04A2,
+ shcode=0x41D,
+ unicodeslot=0x4A2,
},
- [0x04A3]={
+ [0x4A3]={
adobename="endescendercyrillic",
category="ll",
contextname="cyrillicendc",
description="CYRILLIC SMALL LETTER EN WITH DESCENDER",
direction="l",
linebreak="al",
- shcode=0x043D,
- uccode=0x04A2,
- unicodeslot=0x04A3,
+ shcode=0x43D,
+ uccode=0x4A2,
+ unicodeslot=0x4A3,
},
- [0x04A4]={
+ [0x4A4]={
adobename="Enghecyrillic",
category="lu",
contextname="cyrillicENGHE",
description="CYRILLIC CAPITAL LIGATURE EN GHE",
direction="l",
- lccode=0x04A5,
+ lccode=0x4A5,
linebreak="al",
- unicodeslot=0x04A4,
+ unicodeslot=0x4A4,
},
- [0x04A5]={
+ [0x4A5]={
adobename="enghecyrillic",
category="ll",
contextname="cyrillicenghe",
description="CYRILLIC SMALL LIGATURE EN GHE",
direction="l",
linebreak="al",
- uccode=0x04A4,
- unicodeslot=0x04A5,
+ uccode=0x4A4,
+ unicodeslot=0x4A5,
},
- [0x04A6]={
+ [0x4A6]={
adobename="Pemiddlehookcyrillic",
category="lu",
contextname="cyrillicPEmidhook",
description="CYRILLIC CAPITAL LETTER PE WITH MIDDLE HOOK",
direction="l",
- lccode=0x04A7,
+ lccode=0x4A7,
linebreak="al",
- shcode=0x041F,
- unicodeslot=0x04A6,
+ shcode=0x41F,
+ unicodeslot=0x4A6,
},
- [0x04A7]={
+ [0x4A7]={
adobename="pemiddlehookcyrillic",
category="ll",
contextname="cyrillicpemidhook",
description="CYRILLIC SMALL LETTER PE WITH MIDDLE HOOK",
direction="l",
linebreak="al",
- shcode=0x043F,
- uccode=0x04A6,
- unicodeslot=0x04A7,
+ shcode=0x43F,
+ uccode=0x4A6,
+ unicodeslot=0x4A7,
},
- [0x04A8]={
+ [0x4A8]={
adobename="Haabkhasiancyrillic",
category="lu",
contextname="cyrillicHA",
description="CYRILLIC CAPITAL LETTER ABKHASIAN HA",
direction="l",
- lccode=0x04A9,
+ lccode=0x4A9,
linebreak="al",
- unicodeslot=0x04A8,
+ unicodeslot=0x4A8,
},
- [0x04A9]={
+ [0x4A9]={
adobename="haabkhasiancyrillic",
category="ll",
contextname="cyrillicha",
description="CYRILLIC SMALL LETTER ABKHASIAN HA",
direction="l",
linebreak="al",
- uccode=0x04A8,
- unicodeslot=0x04A9,
+ uccode=0x4A8,
+ unicodeslot=0x4A9,
},
- [0x04AA]={
+ [0x4AA]={
adobename="Esdescendercyrillic",
category="lu",
contextname="cyrillicSDSC",
description="CYRILLIC CAPITAL LETTER ES WITH DESCENDER",
direction="l",
- lccode=0x04AB,
+ lccode=0x4AB,
linebreak="al",
- shcode=0x0421,
- unicodeslot=0x04AA,
+ shcode=0x421,
+ unicodeslot=0x4AA,
},
- [0x04AB]={
+ [0x4AB]={
adobename="esdescendercyrillic",
category="ll",
contextname="cyrillicsdsc",
description="CYRILLIC SMALL LETTER ES WITH DESCENDER",
direction="l",
linebreak="al",
- shcode=0x0441,
- uccode=0x04AA,
- unicodeslot=0x04AB,
+ shcode=0x441,
+ uccode=0x4AA,
+ unicodeslot=0x4AB,
},
- [0x04AC]={
+ [0x4AC]={
adobename="Tedescendercyrillic",
category="lu",
contextname="cyrillicTEDC",
description="CYRILLIC CAPITAL LETTER TE WITH DESCENDER",
direction="l",
- lccode=0x04AD,
+ lccode=0x4AD,
linebreak="al",
- shcode=0x0422,
- unicodeslot=0x04AC,
+ shcode=0x422,
+ unicodeslot=0x4AC,
},
- [0x04AD]={
+ [0x4AD]={
adobename="tedescendercyrillic",
category="ll",
contextname="cyrillictedc",
description="CYRILLIC SMALL LETTER TE WITH DESCENDER",
direction="l",
linebreak="al",
- shcode=0x0442,
- uccode=0x04AC,
- unicodeslot=0x04AD,
+ shcode=0x442,
+ uccode=0x4AC,
+ unicodeslot=0x4AD,
},
- [0x04AE]={
+ [0x4AE]={
adobename="Ustraightcyrillic",
category="lu",
contextname="cyrillicYstr",
description="CYRILLIC CAPITAL LETTER STRAIGHT U",
direction="l",
- lccode=0x04AF,
+ lccode=0x4AF,
linebreak="al",
- unicodeslot=0x04AE,
+ unicodeslot=0x4AE,
},
- [0x04AF]={
+ [0x4AF]={
adobename="ustraightcyrillic",
category="ll",
contextname="cyrillicystr",
description="CYRILLIC SMALL LETTER STRAIGHT U",
direction="l",
linebreak="al",
- uccode=0x04AE,
- unicodeslot=0x04AF,
+ uccode=0x4AE,
+ unicodeslot=0x4AF,
},
- [0x04B0]={
+ [0x4B0]={
adobename="Ustraightstrokecyrillic",
category="lu",
contextname="cyrillicYstrstroke",
description="CYRILLIC CAPITAL LETTER STRAIGHT U WITH STROKE",
direction="l",
- lccode=0x04B1,
+ lccode=0x4B1,
linebreak="al",
- unicodeslot=0x04B0,
+ unicodeslot=0x4B0,
},
- [0x04B1]={
+ [0x4B1]={
adobename="ustraightstrokecyrillic",
category="ll",
contextname="cyrillicystrstroke",
description="CYRILLIC SMALL LETTER STRAIGHT U WITH STROKE",
direction="l",
linebreak="al",
- uccode=0x04B0,
- unicodeslot=0x04B1,
+ uccode=0x4B0,
+ unicodeslot=0x4B1,
},
- [0x04B2]={
+ [0x4B2]={
adobename="Hadescendercyrillic",
category="lu",
contextname="cyrillicHADC",
description="CYRILLIC CAPITAL LETTER HA WITH DESCENDER",
direction="l",
- lccode=0x04B3,
+ lccode=0x4B3,
linebreak="al",
- shcode=0x0425,
- unicodeslot=0x04B2,
+ shcode=0x425,
+ unicodeslot=0x4B2,
},
- [0x04B3]={
+ [0x4B3]={
adobename="hadescendercyrillic",
category="ll",
contextname="cyrillichadc",
description="CYRILLIC SMALL LETTER HA WITH DESCENDER",
direction="l",
linebreak="al",
- shcode=0x0445,
- uccode=0x04B2,
- unicodeslot=0x04B3,
+ shcode=0x445,
+ uccode=0x4B2,
+ unicodeslot=0x4B3,
},
- [0x04B4]={
+ [0x4B4]={
adobename="Tetsecyrillic",
category="lu",
contextname="cyrillicTETSE",
description="CYRILLIC CAPITAL LIGATURE TE TSE",
direction="l",
- lccode=0x04B5,
+ lccode=0x4B5,
linebreak="al",
- unicodeslot=0x04B4,
+ unicodeslot=0x4B4,
},
- [0x04B5]={
+ [0x4B5]={
adobename="tetsecyrillic",
category="ll",
contextname="cyrillictetse",
description="CYRILLIC SMALL LIGATURE TE TSE",
direction="l",
linebreak="al",
- uccode=0x04B4,
- unicodeslot=0x04B5,
+ uccode=0x4B4,
+ unicodeslot=0x4B5,
},
- [0x04B6]={
+ [0x4B6]={
adobename="Chedescendercyrillic",
category="lu",
contextname="cyrillicCHEDC",
description="CYRILLIC CAPITAL LETTER CHE WITH DESCENDER",
direction="l",
- lccode=0x04B7,
+ lccode=0x4B7,
linebreak="al",
- shcode=0x0427,
- unicodeslot=0x04B6,
+ shcode=0x427,
+ unicodeslot=0x4B6,
},
- [0x04B7]={
+ [0x4B7]={
adobename="chedescendercyrillic",
category="ll",
contextname="cyrillicchedc",
description="CYRILLIC SMALL LETTER CHE WITH DESCENDER",
direction="l",
linebreak="al",
- shcode=0x0447,
- uccode=0x04B6,
- unicodeslot=0x04B7,
+ shcode=0x447,
+ uccode=0x4B6,
+ unicodeslot=0x4B7,
},
- [0x04B8]={
+ [0x4B8]={
adobename="Cheverticalstrokecyrillic",
category="lu",
contextname="cyrillicCHEvertstroke",
description="CYRILLIC CAPITAL LETTER CHE WITH VERTICAL STROKE",
direction="l",
- lccode=0x04B9,
+ lccode=0x4B9,
linebreak="al",
- shcode=0x0427,
- unicodeslot=0x04B8,
+ shcode=0x427,
+ unicodeslot=0x4B8,
},
- [0x04B9]={
+ [0x4B9]={
adobename="cheverticalstrokecyrillic",
category="ll",
contextname="cyrillicchevertstroke",
description="CYRILLIC SMALL LETTER CHE WITH VERTICAL STROKE",
direction="l",
linebreak="al",
- shcode=0x0447,
- uccode=0x04B8,
- unicodeslot=0x04B9,
+ shcode=0x447,
+ uccode=0x4B8,
+ unicodeslot=0x4B9,
},
- [0x04BA]={
+ [0x4BA]={
adobename="Shhacyrillic",
category="lu",
contextname="cyrillicSHHA",
description="CYRILLIC CAPITAL LETTER SHHA",
direction="l",
- lccode=0x04BB,
+ lccode=0x4BB,
linebreak="al",
- unicodeslot=0x04BA,
+ unicodeslot=0x4BA,
},
- [0x04BB]={
+ [0x4BB]={
adobename="shhacyrillic",
category="ll",
contextname="cyrillicshha",
description="CYRILLIC SMALL LETTER SHHA",
direction="l",
linebreak="al",
- uccode=0x04BA,
- unicodeslot=0x04BB,
+ uccode=0x4BA,
+ unicodeslot=0x4BB,
},
- [0x04BC]={
+ [0x4BC]={
adobename="Cheabkhasiancyrillic",
category="lu",
contextname="cyrillicCHEabkhasian",
description="CYRILLIC CAPITAL LETTER ABKHASIAN CHE",
direction="l",
- lccode=0x04BD,
+ lccode=0x4BD,
linebreak="al",
- unicodeslot=0x04BC,
+ unicodeslot=0x4BC,
},
- [0x04BD]={
+ [0x4BD]={
adobename="cheabkhasiancyrillic",
category="ll",
contextname="cyrilliccheabkhasian",
description="CYRILLIC SMALL LETTER ABKHASIAN CHE",
direction="l",
linebreak="al",
- uccode=0x04BC,
- unicodeslot=0x04BD,
+ uccode=0x4BC,
+ unicodeslot=0x4BD,
},
- [0x04BE]={
+ [0x4BE]={
adobename="Chedescenderabkhasiancyrillic",
category="lu",
contextname="cyrillicCHEDCabkhasian",
description="CYRILLIC CAPITAL LETTER ABKHASIAN CHE WITH DESCENDER",
direction="l",
- lccode=0x04BF,
+ lccode=0x4BF,
linebreak="al",
- unicodeslot=0x04BE,
+ unicodeslot=0x4BE,
},
- [0x04BF]={
+ [0x4BF]={
adobename="chedescenderabkhasiancyrillic",
category="ll",
contextname="cyrillicchedcabkhasian",
description="CYRILLIC SMALL LETTER ABKHASIAN CHE WITH DESCENDER",
direction="l",
linebreak="al",
- uccode=0x04BE,
- unicodeslot=0x04BF,
+ uccode=0x4BE,
+ unicodeslot=0x4BF,
},
- [0x04C0]={
+ [0x4C0]={
adobename="palochkacyrillic",
category="lu",
contextname="cyrillicPALOCHKA",
description="CYRILLIC LETTER PALOCHKA",
direction="l",
- lccode=0x04CF,
+ lccode=0x4CF,
linebreak="al",
- unicodeslot=0x04C0,
+ unicodeslot=0x4C0,
},
- [0x04C1]={
+ [0x4C1]={
adobename="Zhebrevecyrillic",
category="lu",
contextname="cyrillicZHEbreve",
description="CYRILLIC CAPITAL LETTER ZHE WITH BREVE",
direction="l",
- lccode=0x04C2,
+ lccode=0x4C2,
linebreak="al",
- shcode=0x0416,
- specials={ "char", 0x0416, 0x0306 },
- unicodeslot=0x04C1,
+ shcode=0x416,
+ specials={ "char", 0x416, 0x306 },
+ unicodeslot=0x4C1,
},
- [0x04C2]={
+ [0x4C2]={
adobename="zhebrevecyrillic",
category="ll",
contextname="cyrilliczhebreve",
description="CYRILLIC SMALL LETTER ZHE WITH BREVE",
direction="l",
linebreak="al",
- shcode=0x0436,
- specials={ "char", 0x0436, 0x0306 },
- uccode=0x04C1,
- unicodeslot=0x04C2,
+ shcode=0x436,
+ specials={ "char", 0x436, 0x306 },
+ uccode=0x4C1,
+ unicodeslot=0x4C2,
},
- [0x04C3]={
+ [0x4C3]={
adobename="Kahookcyrillic",
category="lu",
contextname="cyrillicKAhook",
description="CYRILLIC CAPITAL LETTER KA WITH HOOK",
direction="l",
- lccode=0x04C4,
+ lccode=0x4C4,
linebreak="al",
- shcode=0x041A,
- unicodeslot=0x04C3,
+ shcode=0x41A,
+ unicodeslot=0x4C3,
},
- [0x04C4]={
+ [0x4C4]={
adobename="kahookcyrillic",
category="ll",
contextname="cyrillickahook",
description="CYRILLIC SMALL LETTER KA WITH HOOK",
direction="l",
linebreak="al",
- shcode=0x043A,
- uccode=0x04C3,
- unicodeslot=0x04C4,
+ shcode=0x43A,
+ uccode=0x4C3,
+ unicodeslot=0x4C4,
},
- [0x04C5]={
+ [0x4C5]={
category="lu",
contextname="cyrillicELtail",
description="CYRILLIC CAPITAL LETTER EL WITH TAIL",
direction="l",
- lccode=0x04C6,
+ lccode=0x4C6,
linebreak="al",
- shcode=0x041B,
- unicodeslot=0x04C5,
+ shcode=0x41B,
+ unicodeslot=0x4C5,
},
- [0x04C6]={
+ [0x4C6]={
category="ll",
contextname="cyrilliceltail",
description="CYRILLIC SMALL LETTER EL WITH TAIL",
direction="l",
linebreak="al",
- shcode=0x043B,
- uccode=0x04C5,
- unicodeslot=0x04C6,
+ shcode=0x43B,
+ uccode=0x4C5,
+ unicodeslot=0x4C6,
},
- [0x04C7]={
+ [0x4C7]={
adobename="Enhookcyrillic",
category="lu",
contextname="cyrillicENhook",
description="CYRILLIC CAPITAL LETTER EN WITH HOOK",
direction="l",
- lccode=0x04C8,
+ lccode=0x4C8,
linebreak="al",
- shcode=0x041D,
- unicodeslot=0x04C7,
+ shcode=0x41D,
+ unicodeslot=0x4C7,
},
- [0x04C8]={
+ [0x4C8]={
adobename="enhookcyrillic",
category="ll",
contextname="cyrillicenhook",
description="CYRILLIC SMALL LETTER EN WITH HOOK",
direction="l",
linebreak="al",
- shcode=0x043D,
- uccode=0x04C7,
- unicodeslot=0x04C8,
+ shcode=0x43D,
+ uccode=0x4C7,
+ unicodeslot=0x4C8,
},
- [0x04C9]={
+ [0x4C9]={
category="lu",
contextname="cyrillicENtail",
description="CYRILLIC CAPITAL LETTER EN WITH TAIL",
direction="l",
- lccode=0x04CA,
+ lccode=0x4CA,
linebreak="al",
- shcode=0x041D,
- unicodeslot=0x04C9,
+ shcode=0x41D,
+ unicodeslot=0x4C9,
},
- [0x04CA]={
+ [0x4CA]={
category="ll",
contextname="cyrillicentail",
description="CYRILLIC SMALL LETTER EN WITH TAIL",
direction="l",
linebreak="al",
- shcode=0x043D,
- uccode=0x04C9,
- unicodeslot=0x04CA,
+ shcode=0x43D,
+ uccode=0x4C9,
+ unicodeslot=0x4CA,
},
- [0x04CB]={
+ [0x4CB]={
adobename="Chekhakassiancyrillic",
category="lu",
contextname="cyrillicCHEkhakassian",
description="CYRILLIC CAPITAL LETTER KHAKASSIAN CHE",
direction="l",
- lccode=0x04CC,
+ lccode=0x4CC,
linebreak="al",
- unicodeslot=0x04CB,
+ unicodeslot=0x4CB,
},
- [0x04CC]={
+ [0x4CC]={
adobename="chekhakassiancyrillic",
category="ll",
contextname="cyrillicchekhakassian",
description="CYRILLIC SMALL LETTER KHAKASSIAN CHE",
direction="l",
linebreak="al",
- uccode=0x04CB,
- unicodeslot=0x04CC,
+ uccode=0x4CB,
+ unicodeslot=0x4CC,
},
- [0x04CD]={
+ [0x4CD]={
category="lu",
contextname="cyrillicEMtail",
description="CYRILLIC CAPITAL LETTER EM WITH TAIL",
direction="l",
- lccode=0x04CE,
+ lccode=0x4CE,
linebreak="al",
- shcode=0x041C,
- unicodeslot=0x04CD,
+ shcode=0x41C,
+ unicodeslot=0x4CD,
},
- [0x04CE]={
+ [0x4CE]={
category="ll",
contextname="cyrillicemtail",
description="CYRILLIC SMALL LETTER EM WITH TAIL",
direction="l",
linebreak="al",
- shcode=0x043C,
- uccode=0x04CD,
- unicodeslot=0x04CE,
+ shcode=0x43C,
+ uccode=0x4CD,
+ unicodeslot=0x4CE,
},
- [0x04CF]={
+ [0x4CF]={
category="ll",
description="CYRILLIC SMALL LETTER PALOCHKA",
direction="l",
linebreak="al",
- uccode=0x04C0,
- unicodeslot=0x04CF,
+ uccode=0x4C0,
+ unicodeslot=0x4CF,
},
- [0x04D0]={
+ [0x4D0]={
adobename="Abrevecyrillic",
category="lu",
contextname="cyrillicAbreve",
description="CYRILLIC CAPITAL LETTER A WITH BREVE",
direction="l",
- lccode=0x04D1,
+ lccode=0x4D1,
linebreak="al",
- shcode=0x0410,
- specials={ "char", 0x0410, 0x0306 },
- unicodeslot=0x04D0,
+ shcode=0x410,
+ specials={ "char", 0x410, 0x306 },
+ unicodeslot=0x4D0,
},
- [0x04D1]={
+ [0x4D1]={
adobename="abrevecyrillic",
category="ll",
contextname="cyrillicabreve",
description="CYRILLIC SMALL LETTER A WITH BREVE",
direction="l",
linebreak="al",
- shcode=0x0430,
- specials={ "char", 0x0430, 0x0306 },
- uccode=0x04D0,
- unicodeslot=0x04D1,
+ shcode=0x430,
+ specials={ "char", 0x430, 0x306 },
+ uccode=0x4D0,
+ unicodeslot=0x4D1,
},
- [0x04D2]={
+ [0x4D2]={
adobename="Adieresiscyrillic",
category="lu",
contextname="cyrillicAdiaeresis",
description="CYRILLIC CAPITAL LETTER A WITH DIAERESIS",
direction="l",
- lccode=0x04D3,
+ lccode=0x4D3,
linebreak="al",
- shcode=0x0410,
- specials={ "char", 0x0410, 0x0308 },
- unicodeslot=0x04D2,
+ shcode=0x410,
+ specials={ "char", 0x410, 0x308 },
+ unicodeslot=0x4D2,
},
- [0x04D3]={
+ [0x4D3]={
adobename="adieresiscyrillic",
category="ll",
contextname="cyrillicadiaeresis",
description="CYRILLIC SMALL LETTER A WITH DIAERESIS",
direction="l",
linebreak="al",
- shcode=0x0430,
- specials={ "char", 0x0430, 0x0308 },
- uccode=0x04D2,
- unicodeslot=0x04D3,
+ shcode=0x430,
+ specials={ "char", 0x430, 0x308 },
+ uccode=0x4D2,
+ unicodeslot=0x4D3,
},
- [0x04D4]={
+ [0x4D4]={
adobename="Aiecyrillic",
category="lu",
contextname="cyrillicAE",
description="CYRILLIC CAPITAL LIGATURE A IE",
direction="l",
- lccode=0x04D5,
+ lccode=0x4D5,
linebreak="al",
- unicodeslot=0x04D4,
+ unicodeslot=0x4D4,
},
- [0x04D5]={
+ [0x4D5]={
adobename="aiecyrillic",
category="ll",
contextname="cyrillicae",
description="CYRILLIC SMALL LIGATURE A IE",
direction="l",
linebreak="al",
- uccode=0x04D4,
- unicodeslot=0x04D5,
+ uccode=0x4D4,
+ unicodeslot=0x4D5,
},
- [0x04D6]={
+ [0x4D6]={
adobename="Iebrevecyrillic",
category="lu",
contextname="cyrillicEbreve",
description="CYRILLIC CAPITAL LETTER IE WITH BREVE",
direction="l",
- lccode=0x04D7,
+ lccode=0x4D7,
linebreak="al",
- shcode=0x0415,
- specials={ "char", 0x0415, 0x0306 },
- unicodeslot=0x04D6,
+ shcode=0x415,
+ specials={ "char", 0x415, 0x306 },
+ unicodeslot=0x4D6,
},
- [0x04D7]={
+ [0x4D7]={
adobename="iebrevecyrillic",
category="ll",
contextname="cyrillicebreve",
description="CYRILLIC SMALL LETTER IE WITH BREVE",
direction="l",
linebreak="al",
- shcode=0x0435,
- specials={ "char", 0x0435, 0x0306 },
- uccode=0x04D6,
- unicodeslot=0x04D7,
+ shcode=0x435,
+ specials={ "char", 0x435, 0x306 },
+ uccode=0x4D6,
+ unicodeslot=0x4D7,
},
- [0x04D8]={
+ [0x4D8]={
adobename="Schwacyrillic",
category="lu",
contextname="cyrillicSCHWA",
description="CYRILLIC CAPITAL LETTER SCHWA",
direction="l",
- lccode=0x04D9,
+ lccode=0x4D9,
linebreak="al",
- unicodeslot=0x04D8,
+ unicodeslot=0x4D8,
},
- [0x04D9]={
+ [0x4D9]={
adobename="schwacyrillic",
category="ll",
contextname="cyrillicschwa",
description="CYRILLIC SMALL LETTER SCHWA",
direction="l",
linebreak="al",
- uccode=0x04D8,
- unicodeslot=0x04D9,
+ uccode=0x4D8,
+ unicodeslot=0x4D9,
},
- [0x04DA]={
+ [0x4DA]={
adobename="Schwadieresiscyrillic",
category="lu",
contextname="cyrillicSCHWAdiaeresis",
description="CYRILLIC CAPITAL LETTER SCHWA WITH DIAERESIS",
direction="l",
- lccode=0x04DB,
+ lccode=0x4DB,
linebreak="al",
- shcode=0x04D8,
- specials={ "char", 0x04D8, 0x0308 },
- unicodeslot=0x04DA,
+ shcode=0x4D8,
+ specials={ "char", 0x4D8, 0x308 },
+ unicodeslot=0x4DA,
},
- [0x04DB]={
+ [0x4DB]={
adobename="schwadieresiscyrillic",
category="ll",
contextname="cyrillicschwadiaeresis",
description="CYRILLIC SMALL LETTER SCHWA WITH DIAERESIS",
direction="l",
linebreak="al",
- shcode=0x04D9,
- specials={ "char", 0x04D9, 0x0308 },
- uccode=0x04DA,
- unicodeslot=0x04DB,
+ shcode=0x4D9,
+ specials={ "char", 0x4D9, 0x308 },
+ uccode=0x4DA,
+ unicodeslot=0x4DB,
},
- [0x04DC]={
+ [0x4DC]={
adobename="Zhedieresiscyrillic",
category="lu",
contextname="cyrillicZHEdiaeresis",
description="CYRILLIC CAPITAL LETTER ZHE WITH DIAERESIS",
direction="l",
- lccode=0x04DD,
+ lccode=0x4DD,
linebreak="al",
- shcode=0x0416,
- specials={ "char", 0x0416, 0x0308 },
- unicodeslot=0x04DC,
+ shcode=0x416,
+ specials={ "char", 0x416, 0x308 },
+ unicodeslot=0x4DC,
},
- [0x04DD]={
+ [0x4DD]={
adobename="zhedieresiscyrillic",
category="ll",
contextname="cyrilliczhediaeresis",
description="CYRILLIC SMALL LETTER ZHE WITH DIAERESIS",
direction="l",
linebreak="al",
- shcode=0x0436,
- specials={ "char", 0x0436, 0x0308 },
- uccode=0x04DC,
- unicodeslot=0x04DD,
+ shcode=0x436,
+ specials={ "char", 0x436, 0x308 },
+ uccode=0x4DC,
+ unicodeslot=0x4DD,
},
- [0x04DE]={
+ [0x4DE]={
adobename="Zedieresiscyrillic",
category="lu",
contextname="cyrillicZEdiaeresis",
description="CYRILLIC CAPITAL LETTER ZE WITH DIAERESIS",
direction="l",
- lccode=0x04DF,
+ lccode=0x4DF,
linebreak="al",
- shcode=0x0417,
- specials={ "char", 0x0417, 0x0308 },
- unicodeslot=0x04DE,
+ shcode=0x417,
+ specials={ "char", 0x417, 0x308 },
+ unicodeslot=0x4DE,
},
- [0x04DF]={
+ [0x4DF]={
adobename="zedieresiscyrillic",
category="ll",
contextname="cyrilliczediaeresis",
description="CYRILLIC SMALL LETTER ZE WITH DIAERESIS",
direction="l",
linebreak="al",
- shcode=0x0437,
- specials={ "char", 0x0437, 0x0308 },
- uccode=0x04DE,
- unicodeslot=0x04DF,
+ shcode=0x437,
+ specials={ "char", 0x437, 0x308 },
+ uccode=0x4DE,
+ unicodeslot=0x4DF,
},
- [0x04E0]={
+ [0x4E0]={
adobename="Dzeabkhasiancyrillic",
category="lu",
contextname="cyrillicDZEabkhasian",
description="CYRILLIC CAPITAL LETTER ABKHASIAN DZE",
direction="l",
- lccode=0x04E1,
+ lccode=0x4E1,
linebreak="al",
- unicodeslot=0x04E0,
+ unicodeslot=0x4E0,
},
- [0x04E1]={
+ [0x4E1]={
adobename="dzeabkhasiancyrillic",
category="ll",
contextname="cyrillicdzeabkhasian",
description="CYRILLIC SMALL LETTER ABKHASIAN DZE",
direction="l",
linebreak="al",
- uccode=0x04E0,
- unicodeslot=0x04E1,
+ uccode=0x4E0,
+ unicodeslot=0x4E1,
},
- [0x04E2]={
+ [0x4E2]={
adobename="Imacroncyrillic",
category="lu",
contextname="cyrillicImacron",
description="CYRILLIC CAPITAL LETTER I WITH MACRON",
direction="l",
- lccode=0x04E3,
+ lccode=0x4E3,
linebreak="al",
- shcode=0x0418,
- specials={ "char", 0x0418, 0x0304 },
- unicodeslot=0x04E2,
+ shcode=0x418,
+ specials={ "char", 0x418, 0x304 },
+ unicodeslot=0x4E2,
},
- [0x04E3]={
+ [0x4E3]={
adobename="imacroncyrillic",
category="ll",
contextname="cyrillicimacron",
description="CYRILLIC SMALL LETTER I WITH MACRON",
direction="l",
linebreak="al",
- shcode=0x0438,
- specials={ "char", 0x0438, 0x0304 },
- uccode=0x04E2,
- unicodeslot=0x04E3,
+ shcode=0x438,
+ specials={ "char", 0x438, 0x304 },
+ uccode=0x4E2,
+ unicodeslot=0x4E3,
},
- [0x04E4]={
+ [0x4E4]={
adobename="Idieresiscyrillic",
category="lu",
contextname="cyrillicIdiaeresis",
description="CYRILLIC CAPITAL LETTER I WITH DIAERESIS",
direction="l",
- lccode=0x04E5,
+ lccode=0x4E5,
linebreak="al",
- shcode=0x0418,
- specials={ "char", 0x0418, 0x0308 },
- unicodeslot=0x04E4,
+ shcode=0x418,
+ specials={ "char", 0x418, 0x308 },
+ unicodeslot=0x4E4,
},
- [0x04E5]={
+ [0x4E5]={
adobename="idieresiscyrillic",
category="ll",
contextname="cyrillicidiaeresis",
description="CYRILLIC SMALL LETTER I WITH DIAERESIS",
direction="l",
linebreak="al",
- shcode=0x0438,
- specials={ "char", 0x0438, 0x0308 },
- uccode=0x04E4,
- unicodeslot=0x04E5,
+ shcode=0x438,
+ specials={ "char", 0x438, 0x308 },
+ uccode=0x4E4,
+ unicodeslot=0x4E5,
},
- [0x04E6]={
+ [0x4E6]={
adobename="Odieresiscyrillic",
category="lu",
contextname="cyrillicOdiaeresis",
description="CYRILLIC CAPITAL LETTER O WITH DIAERESIS",
direction="l",
- lccode=0x04E7,
+ lccode=0x4E7,
linebreak="al",
- shcode=0x041E,
- specials={ "char", 0x041E, 0x0308 },
- unicodeslot=0x04E6,
+ shcode=0x41E,
+ specials={ "char", 0x41E, 0x308 },
+ unicodeslot=0x4E6,
},
- [0x04E7]={
+ [0x4E7]={
adobename="odieresiscyrillic",
category="ll",
contextname="cyrillicodiaeresis",
description="CYRILLIC SMALL LETTER O WITH DIAERESIS",
direction="l",
linebreak="al",
- shcode=0x043E,
- specials={ "char", 0x043E, 0x0308 },
- uccode=0x04E6,
- unicodeslot=0x04E7,
+ shcode=0x43E,
+ specials={ "char", 0x43E, 0x308 },
+ uccode=0x4E6,
+ unicodeslot=0x4E7,
},
- [0x04E8]={
+ [0x4E8]={
adobename="Obarredcyrillic",
category="lu",
contextname="cyrillicObarred",
description="CYRILLIC CAPITAL LETTER BARRED O",
direction="l",
- lccode=0x04E9,
+ lccode=0x4E9,
linebreak="al",
- unicodeslot=0x04E8,
+ unicodeslot=0x4E8,
},
- [0x04E9]={
+ [0x4E9]={
adobename="obarredcyrillic",
category="ll",
contextname="cyrillicobarred",
description="CYRILLIC SMALL LETTER BARRED O",
direction="l",
linebreak="al",
- uccode=0x04E8,
- unicodeslot=0x04E9,
+ uccode=0x4E8,
+ unicodeslot=0x4E9,
},
- [0x04EA]={
+ [0x4EA]={
adobename="Obarreddieresiscyrillic",
category="lu",
contextname="cyrillicObarreddiaeresis",
description="CYRILLIC CAPITAL LETTER BARRED O WITH DIAERESIS",
direction="l",
- lccode=0x04EB,
+ lccode=0x4EB,
linebreak="al",
- specials={ "char", 0x04E8, 0x0308 },
- unicodeslot=0x04EA,
+ specials={ "char", 0x4E8, 0x308 },
+ unicodeslot=0x4EA,
},
- [0x04EB]={
+ [0x4EB]={
adobename="obarreddieresiscyrillic",
category="ll",
contextname="cyrillicobarreddiaeresis",
description="CYRILLIC SMALL LETTER BARRED O WITH DIAERESIS",
direction="l",
linebreak="al",
- specials={ "char", 0x04E9, 0x0308 },
- uccode=0x04EA,
- unicodeslot=0x04EB,
+ specials={ "char", 0x4E9, 0x308 },
+ uccode=0x4EA,
+ unicodeslot=0x4EB,
},
- [0x04EC]={
+ [0x4EC]={
category="lu",
contextname="cyrillicEdiaeresis",
description="CYRILLIC CAPITAL LETTER E WITH DIAERESIS",
direction="l",
- lccode=0x04ED,
+ lccode=0x4ED,
linebreak="al",
- shcode=0x042D,
- specials={ "char", 0x042D, 0x0308 },
- unicodeslot=0x04EC,
+ shcode=0x42D,
+ specials={ "char", 0x42D, 0x308 },
+ unicodeslot=0x4EC,
},
- [0x04ED]={
+ [0x4ED]={
category="ll",
contextname="cyrillicediaeresis",
description="CYRILLIC SMALL LETTER E WITH DIAERESIS",
direction="l",
linebreak="al",
- shcode=0x044D,
- specials={ "char", 0x044D, 0x0308 },
- uccode=0x04EC,
- unicodeslot=0x04ED,
+ shcode=0x44D,
+ specials={ "char", 0x44D, 0x308 },
+ uccode=0x4EC,
+ unicodeslot=0x4ED,
},
- [0x04EE]={
+ [0x4EE]={
adobename="Umacroncyrillic",
category="lu",
contextname="cyrillicUmacron",
description="CYRILLIC CAPITAL LETTER U WITH MACRON",
direction="l",
- lccode=0x04EF,
+ lccode=0x4EF,
linebreak="al",
- shcode=0x0423,
- specials={ "char", 0x0423, 0x0304 },
- unicodeslot=0x04EE,
+ shcode=0x423,
+ specials={ "char", 0x423, 0x304 },
+ unicodeslot=0x4EE,
},
- [0x04EF]={
+ [0x4EF]={
adobename="umacroncyrillic",
category="ll",
contextname="cyrillicumacron",
description="CYRILLIC SMALL LETTER U WITH MACRON",
direction="l",
linebreak="al",
- shcode=0x0443,
- specials={ "char", 0x0443, 0x0304 },
- uccode=0x04EE,
- unicodeslot=0x04EF,
+ shcode=0x443,
+ specials={ "char", 0x443, 0x304 },
+ uccode=0x4EE,
+ unicodeslot=0x4EF,
},
- [0x04F0]={
+ [0x4F0]={
adobename="Udieresiscyrillic",
category="lu",
contextname="cyrillicUdiaeresis",
description="CYRILLIC CAPITAL LETTER U WITH DIAERESIS",
direction="l",
- lccode=0x04F1,
+ lccode=0x4F1,
linebreak="al",
- shcode=0x0423,
- specials={ "char", 0x0423, 0x0308 },
- unicodeslot=0x04F0,
+ shcode=0x423,
+ specials={ "char", 0x423, 0x308 },
+ unicodeslot=0x4F0,
},
- [0x04F1]={
+ [0x4F1]={
adobename="udieresiscyrillic",
category="ll",
contextname="cyrillicudiaeresis",
description="CYRILLIC SMALL LETTER U WITH DIAERESIS",
direction="l",
linebreak="al",
- shcode=0x0443,
- specials={ "char", 0x0443, 0x0308 },
- uccode=0x04F0,
- unicodeslot=0x04F1,
+ shcode=0x443,
+ specials={ "char", 0x443, 0x308 },
+ uccode=0x4F0,
+ unicodeslot=0x4F1,
},
- [0x04F2]={
+ [0x4F2]={
adobename="Uhungarumlautcyrillic",
category="lu",
contextname="cyrillicUdoubleacute",
description="CYRILLIC CAPITAL LETTER U WITH DOUBLE ACUTE",
direction="l",
- lccode=0x04F3,
+ lccode=0x4F3,
linebreak="al",
- shcode=0x0423,
- specials={ "char", 0x0423, 0x030B },
- unicodeslot=0x04F2,
+ shcode=0x423,
+ specials={ "char", 0x423, 0x30B },
+ unicodeslot=0x4F2,
},
- [0x04F3]={
+ [0x4F3]={
adobename="uhungarumlautcyrillic",
category="ll",
contextname="cyrillicudoubleacute",
description="CYRILLIC SMALL LETTER U WITH DOUBLE ACUTE",
direction="l",
linebreak="al",
- shcode=0x0443,
- specials={ "char", 0x0443, 0x030B },
- uccode=0x04F2,
- unicodeslot=0x04F3,
+ shcode=0x443,
+ specials={ "char", 0x443, 0x30B },
+ uccode=0x4F2,
+ unicodeslot=0x4F3,
},
- [0x04F4]={
+ [0x4F4]={
adobename="Chedieresiscyrillic",
category="lu",
contextname="cyrillicCHEdiaeresis",
description="CYRILLIC CAPITAL LETTER CHE WITH DIAERESIS",
direction="l",
- lccode=0x04F5,
+ lccode=0x4F5,
linebreak="al",
- shcode=0x0427,
- specials={ "char", 0x0427, 0x0308 },
- unicodeslot=0x04F4,
+ shcode=0x427,
+ specials={ "char", 0x427, 0x308 },
+ unicodeslot=0x4F4,
},
- [0x04F5]={
+ [0x4F5]={
adobename="chedieresiscyrillic",
category="ll",
contextname="cyrillicchediaeresis",
description="CYRILLIC SMALL LETTER CHE WITH DIAERESIS",
direction="l",
linebreak="al",
- shcode=0x0447,
- specials={ "char", 0x0447, 0x0308 },
- uccode=0x04F4,
- unicodeslot=0x04F5,
+ shcode=0x447,
+ specials={ "char", 0x447, 0x308 },
+ uccode=0x4F4,
+ unicodeslot=0x4F5,
},
- [0x04F6]={
+ [0x4F6]={
category="lu",
description="CYRILLIC CAPITAL LETTER GHE WITH DESCENDER",
direction="l",
- lccode=0x04F7,
+ lccode=0x4F7,
linebreak="al",
- shcode=0x0413,
- unicodeslot=0x04F6,
+ shcode=0x413,
+ unicodeslot=0x4F6,
},
- [0x04F7]={
+ [0x4F7]={
category="ll",
description="CYRILLIC SMALL LETTER GHE WITH DESCENDER",
direction="l",
linebreak="al",
- shcode=0x0433,
- uccode=0x04F6,
- unicodeslot=0x04F7,
+ shcode=0x433,
+ uccode=0x4F6,
+ unicodeslot=0x4F7,
},
- [0x04F8]={
+ [0x4F8]={
adobename="Yerudieresiscyrillic",
category="lu",
contextname="cyrillicYERUdiaeresis",
description="CYRILLIC CAPITAL LETTER YERU WITH DIAERESIS",
direction="l",
- lccode=0x04F9,
+ lccode=0x4F9,
linebreak="al",
- shcode=0x042B,
- specials={ "char", 0x042B, 0x0308 },
- unicodeslot=0x04F8,
+ shcode=0x42B,
+ specials={ "char", 0x42B, 0x308 },
+ unicodeslot=0x4F8,
},
- [0x04F9]={
+ [0x4F9]={
adobename="yerudieresiscyrillic",
category="ll",
contextname="cyrillicyerudiaeresis",
description="CYRILLIC SMALL LETTER YERU WITH DIAERESIS",
direction="l",
linebreak="al",
- shcode=0x044B,
- specials={ "char", 0x044B, 0x0308 },
- uccode=0x04F8,
- unicodeslot=0x04F9,
+ shcode=0x44B,
+ specials={ "char", 0x44B, 0x308 },
+ uccode=0x4F8,
+ unicodeslot=0x4F9,
},
- [0x04FA]={
+ [0x4FA]={
category="lu",
description="CYRILLIC CAPITAL LETTER GHE WITH STROKE AND HOOK",
direction="l",
- lccode=0x04FB,
+ lccode=0x4FB,
linebreak="al",
- shcode=0x0413,
- unicodeslot=0x04FA,
+ shcode=0x413,
+ unicodeslot=0x4FA,
},
- [0x04FB]={
+ [0x4FB]={
category="ll",
description="CYRILLIC SMALL LETTER GHE WITH STROKE AND HOOK",
direction="l",
linebreak="al",
- shcode=0x0433,
- uccode=0x04FA,
- unicodeslot=0x04FB,
+ shcode=0x433,
+ uccode=0x4FA,
+ unicodeslot=0x4FB,
},
- [0x04FC]={
+ [0x4FC]={
category="lu",
description="CYRILLIC CAPITAL LETTER HA WITH HOOK",
direction="l",
- lccode=0x04FD,
+ lccode=0x4FD,
linebreak="al",
- shcode=0x0425,
- unicodeslot=0x04FC,
+ shcode=0x425,
+ unicodeslot=0x4FC,
},
- [0x04FD]={
+ [0x4FD]={
category="ll",
description="CYRILLIC SMALL LETTER HA WITH HOOK",
direction="l",
linebreak="al",
- shcode=0x0445,
- uccode=0x04FC,
- unicodeslot=0x04FD,
+ shcode=0x445,
+ uccode=0x4FC,
+ unicodeslot=0x4FD,
},
- [0x04FE]={
+ [0x4FE]={
category="lu",
description="CYRILLIC CAPITAL LETTER HA WITH STROKE",
direction="l",
- lccode=0x04FF,
+ lccode=0x4FF,
linebreak="al",
- shcode=0x0425,
- unicodeslot=0x04FE,
+ shcode=0x425,
+ unicodeslot=0x4FE,
},
- [0x04FF]={
+ [0x4FF]={
category="ll",
description="CYRILLIC SMALL LETTER HA WITH STROKE",
direction="l",
linebreak="al",
- shcode=0x0445,
- uccode=0x04FE,
- unicodeslot=0x04FF,
+ shcode=0x445,
+ uccode=0x4FE,
+ unicodeslot=0x4FF,
},
- [0x0500]={
+ [0x500]={
category="lu",
description="CYRILLIC CAPITAL LETTER KOMI DE",
direction="l",
- lccode=0x0501,
+ lccode=0x501,
linebreak="al",
- unicodeslot=0x0500,
+ unicodeslot=0x500,
},
- [0x0501]={
+ [0x501]={
category="ll",
description="CYRILLIC SMALL LETTER KOMI DE",
direction="l",
linebreak="al",
- uccode=0x0500,
- unicodeslot=0x0501,
+ uccode=0x500,
+ unicodeslot=0x501,
},
- [0x0502]={
+ [0x502]={
category="lu",
description="CYRILLIC CAPITAL LETTER KOMI DJE",
direction="l",
- lccode=0x0503,
+ lccode=0x503,
linebreak="al",
- unicodeslot=0x0502,
+ unicodeslot=0x502,
},
- [0x0503]={
+ [0x503]={
category="ll",
description="CYRILLIC SMALL LETTER KOMI DJE",
direction="l",
linebreak="al",
- uccode=0x0502,
- unicodeslot=0x0503,
+ uccode=0x502,
+ unicodeslot=0x503,
},
- [0x0504]={
+ [0x504]={
category="lu",
description="CYRILLIC CAPITAL LETTER KOMI ZJE",
direction="l",
- lccode=0x0505,
+ lccode=0x505,
linebreak="al",
- unicodeslot=0x0504,
+ unicodeslot=0x504,
},
- [0x0505]={
+ [0x505]={
category="ll",
description="CYRILLIC SMALL LETTER KOMI ZJE",
direction="l",
linebreak="al",
- uccode=0x0504,
- unicodeslot=0x0505,
+ uccode=0x504,
+ unicodeslot=0x505,
},
- [0x0506]={
+ [0x506]={
category="lu",
description="CYRILLIC CAPITAL LETTER KOMI DZJE",
direction="l",
- lccode=0x0507,
+ lccode=0x507,
linebreak="al",
- unicodeslot=0x0506,
+ unicodeslot=0x506,
},
- [0x0507]={
+ [0x507]={
category="ll",
description="CYRILLIC SMALL LETTER KOMI DZJE",
direction="l",
linebreak="al",
- uccode=0x0506,
- unicodeslot=0x0507,
+ uccode=0x506,
+ unicodeslot=0x507,
},
- [0x0508]={
+ [0x508]={
category="lu",
description="CYRILLIC CAPITAL LETTER KOMI LJE",
direction="l",
- lccode=0x0509,
+ lccode=0x509,
linebreak="al",
- unicodeslot=0x0508,
+ unicodeslot=0x508,
},
- [0x0509]={
+ [0x509]={
category="ll",
description="CYRILLIC SMALL LETTER KOMI LJE",
direction="l",
linebreak="al",
- uccode=0x0508,
- unicodeslot=0x0509,
+ uccode=0x508,
+ unicodeslot=0x509,
},
- [0x050A]={
+ [0x50A]={
category="lu",
description="CYRILLIC CAPITAL LETTER KOMI NJE",
direction="l",
- lccode=0x050B,
+ lccode=0x50B,
linebreak="al",
- unicodeslot=0x050A,
+ unicodeslot=0x50A,
},
- [0x050B]={
+ [0x50B]={
category="ll",
description="CYRILLIC SMALL LETTER KOMI NJE",
direction="l",
linebreak="al",
- uccode=0x050A,
- unicodeslot=0x050B,
+ uccode=0x50A,
+ unicodeslot=0x50B,
},
- [0x050C]={
+ [0x50C]={
category="lu",
description="CYRILLIC CAPITAL LETTER KOMI SJE",
direction="l",
- lccode=0x050D,
+ lccode=0x50D,
linebreak="al",
- unicodeslot=0x050C,
+ unicodeslot=0x50C,
},
- [0x050D]={
+ [0x50D]={
category="ll",
description="CYRILLIC SMALL LETTER KOMI SJE",
direction="l",
linebreak="al",
- uccode=0x050C,
- unicodeslot=0x050D,
+ uccode=0x50C,
+ unicodeslot=0x50D,
},
- [0x050E]={
+ [0x50E]={
category="lu",
description="CYRILLIC CAPITAL LETTER KOMI TJE",
direction="l",
- lccode=0x050F,
+ lccode=0x50F,
linebreak="al",
- unicodeslot=0x050E,
+ unicodeslot=0x50E,
},
- [0x050F]={
+ [0x50F]={
category="ll",
description="CYRILLIC SMALL LETTER KOMI TJE",
direction="l",
linebreak="al",
- uccode=0x050E,
- unicodeslot=0x050F,
+ uccode=0x50E,
+ unicodeslot=0x50F,
},
- [0x0510]={
+ [0x510]={
category="lu",
description="CYRILLIC CAPITAL LETTER REVERSED ZE",
direction="l",
- lccode=0x0511,
+ lccode=0x511,
linebreak="al",
- unicodeslot=0x0510,
+ unicodeslot=0x510,
},
- [0x0511]={
+ [0x511]={
category="ll",
description="CYRILLIC SMALL LETTER REVERSED ZE",
direction="l",
linebreak="al",
- uccode=0x0510,
- unicodeslot=0x0511,
+ uccode=0x510,
+ unicodeslot=0x511,
},
- [0x0512]={
+ [0x512]={
category="lu",
description="CYRILLIC CAPITAL LETTER EL WITH HOOK",
direction="l",
- lccode=0x0513,
+ lccode=0x513,
linebreak="al",
- shcode=0x041B,
- unicodeslot=0x0512,
+ shcode=0x41B,
+ unicodeslot=0x512,
},
- [0x0513]={
+ [0x513]={
category="ll",
description="CYRILLIC SMALL LETTER EL WITH HOOK",
direction="l",
linebreak="al",
- shcode=0x043B,
- uccode=0x0512,
- unicodeslot=0x0513,
+ shcode=0x43B,
+ uccode=0x512,
+ unicodeslot=0x513,
},
- [0x0514]={
+ [0x514]={
category="lu",
description="CYRILLIC CAPITAL LETTER LHA",
direction="l",
linebreak="al",
- unicodeslot=0x0514,
+ unicodeslot=0x514,
},
- [0x0515]={
+ [0x515]={
category="ll",
description="CYRILLIC SMALL LETTER LHA",
direction="l",
linebreak="al",
- unicodeslot=0x0515,
+ unicodeslot=0x515,
},
- [0x0516]={
+ [0x516]={
category="lu",
description="CYRILLIC CAPITAL LETTER RHA",
direction="l",
linebreak="al",
- unicodeslot=0x0516,
+ unicodeslot=0x516,
},
- [0x0517]={
+ [0x517]={
category="ll",
description="CYRILLIC SMALL LETTER RHA",
direction="l",
linebreak="al",
- unicodeslot=0x0517,
+ unicodeslot=0x517,
},
- [0x0518]={
+ [0x518]={
category="lu",
description="CYRILLIC CAPITAL LETTER YAE",
direction="l",
linebreak="al",
- unicodeslot=0x0518,
+ unicodeslot=0x518,
},
- [0x0519]={
+ [0x519]={
category="ll",
description="CYRILLIC SMALL LETTER YAE",
direction="l",
linebreak="al",
- unicodeslot=0x0519,
+ unicodeslot=0x519,
},
- [0x051A]={
+ [0x51A]={
category="lu",
description="CYRILLIC CAPITAL LETTER QA",
direction="l",
linebreak="al",
- unicodeslot=0x051A,
+ unicodeslot=0x51A,
},
- [0x051B]={
+ [0x51B]={
category="ll",
description="CYRILLIC SMALL LETTER QA",
direction="l",
linebreak="al",
- unicodeslot=0x051B,
+ unicodeslot=0x51B,
},
- [0x051C]={
+ [0x51C]={
category="lu",
description="CYRILLIC CAPITAL LETTER WE",
direction="l",
linebreak="al",
- unicodeslot=0x051C,
+ unicodeslot=0x51C,
},
- [0x051D]={
+ [0x51D]={
category="ll",
description="CYRILLIC SMALL LETTER WE",
direction="l",
linebreak="al",
- unicodeslot=0x051D,
+ unicodeslot=0x51D,
},
- [0x051E]={
+ [0x51E]={
category="lu",
description="CYRILLIC CAPITAL LETTER ALEUT KA",
direction="l",
linebreak="al",
- unicodeslot=0x051E,
+ unicodeslot=0x51E,
},
- [0x051F]={
+ [0x51F]={
category="ll",
description="CYRILLIC SMALL LETTER ALEUT KA",
direction="l",
linebreak="al",
- unicodeslot=0x051F,
+ unicodeslot=0x51F,
},
- [0x0520]={
+ [0x520]={
category="lu",
description="CYRILLIC CAPITAL LETTER EL WITH MIDDLE HOOK",
direction="l",
linebreak="al",
- unicodeslot=0x0520,
+ unicodeslot=0x520,
},
- [0x0521]={
+ [0x521]={
category="ll",
description="CYRILLIC SMALL LETTER EL WITH MIDDLE HOOK",
direction="l",
linebreak="al",
- unicodeslot=0x0521,
+ unicodeslot=0x521,
},
- [0x0522]={
+ [0x522]={
category="lu",
description="CYRILLIC CAPITAL LETTER EN WITH MIDDLE HOOK",
direction="l",
linebreak="al",
- unicodeslot=0x0522,
+ unicodeslot=0x522,
},
- [0x0523]={
+ [0x523]={
category="ll",
description="CYRILLIC SMALL LETTER EN WITH MIDDLE HOOK",
direction="l",
linebreak="al",
- unicodeslot=0x0523,
+ unicodeslot=0x523,
},
- [0x0524]={
+ [0x524]={
category="lu",
description="CYRILLIC CAPITAL LETTER PE WITH DESCENDER",
direction="l",
linebreak="al",
- unicodeslot=0x0524,
+ unicodeslot=0x524,
},
- [0x0525]={
+ [0x525]={
category="ll",
description="CYRILLIC SMALL LETTER PE WITH DESCENDER",
direction="l",
linebreak="al",
- unicodeslot=0x0525,
+ unicodeslot=0x525,
},
- [0x0526]={
+ [0x526]={
category="lu",
description="CYRILLIC CAPITAL LETTER SHHA WITH DESCENDER",
direction="l",
linebreak="al",
- unicodeslot=0x0526,
+ unicodeslot=0x526,
},
- [0x0527]={
+ [0x527]={
category="ll",
description="CYRILLIC SMALL LETTER SHHA WITH DESCENDER",
direction="l",
linebreak="al",
- unicodeslot=0x0527,
+ unicodeslot=0x527,
+ },
+ [0x528]={
+ category="lu",
+ description="CYRILLIC CAPITAL LETTER EN WITH LEFT HOOK",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x528,
+ },
+ [0x529]={
+ category="ll",
+ description="CYRILLIC SMALL LETTER EN WITH LEFT HOOK",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x529,
+ },
+ [0x52A]={
+ category="lu",
+ description="CYRILLIC CAPITAL LETTER DZZHE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x52A,
+ },
+ [0x52B]={
+ category="ll",
+ description="CYRILLIC SMALL LETTER DZZHE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x52B,
+ },
+ [0x52C]={
+ category="lu",
+ description="CYRILLIC CAPITAL LETTER DCHE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x52C,
+ },
+ [0x52D]={
+ category="ll",
+ description="CYRILLIC SMALL LETTER DCHE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x52D,
},
- [0x0531]={
+ [0x52E]={
+ category="lu",
+ description="CYRILLIC CAPITAL LETTER EL WITH DESCENDER",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x52E,
+ },
+ [0x52F]={
+ category="ll",
+ description="CYRILLIC SMALL LETTER EL WITH DESCENDER",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x52F,
+ },
+ [0x531]={
adobename="Aybarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER AYB",
direction="l",
- lccode=0x0561,
+ lccode=0x561,
linebreak="al",
- unicodeslot=0x0531,
+ unicodeslot=0x531,
},
- [0x0532]={
+ [0x532]={
adobename="Benarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER BEN",
direction="l",
- lccode=0x0562,
+ lccode=0x562,
linebreak="al",
- unicodeslot=0x0532,
+ unicodeslot=0x532,
},
- [0x0533]={
+ [0x533]={
adobename="Gimarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER GIM",
direction="l",
- lccode=0x0563,
+ lccode=0x563,
linebreak="al",
- unicodeslot=0x0533,
+ unicodeslot=0x533,
},
- [0x0534]={
+ [0x534]={
adobename="Daarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER DA",
direction="l",
- lccode=0x0564,
+ lccode=0x564,
linebreak="al",
- unicodeslot=0x0534,
+ unicodeslot=0x534,
},
- [0x0535]={
+ [0x535]={
adobename="Echarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER ECH",
direction="l",
- lccode=0x0565,
+ lccode=0x565,
linebreak="al",
- unicodeslot=0x0535,
+ unicodeslot=0x535,
},
- [0x0536]={
+ [0x536]={
adobename="Zaarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER ZA",
direction="l",
- lccode=0x0566,
+ lccode=0x566,
linebreak="al",
- unicodeslot=0x0536,
+ unicodeslot=0x536,
},
- [0x0537]={
+ [0x537]={
adobename="Eharmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER EH",
direction="l",
- lccode=0x0567,
+ lccode=0x567,
linebreak="al",
- unicodeslot=0x0537,
+ unicodeslot=0x537,
},
- [0x0538]={
+ [0x538]={
adobename="Etarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER ET",
direction="l",
- lccode=0x0568,
+ lccode=0x568,
linebreak="al",
- unicodeslot=0x0538,
+ unicodeslot=0x538,
},
- [0x0539]={
+ [0x539]={
adobename="Toarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER TO",
direction="l",
- lccode=0x0569,
+ lccode=0x569,
linebreak="al",
- unicodeslot=0x0539,
+ unicodeslot=0x539,
},
- [0x053A]={
+ [0x53A]={
adobename="Zhearmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER ZHE",
direction="l",
- lccode=0x056A,
+ lccode=0x56A,
linebreak="al",
- unicodeslot=0x053A,
+ unicodeslot=0x53A,
},
- [0x053B]={
+ [0x53B]={
adobename="Iniarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER INI",
direction="l",
- lccode=0x056B,
+ lccode=0x56B,
linebreak="al",
- unicodeslot=0x053B,
+ unicodeslot=0x53B,
},
- [0x053C]={
+ [0x53C]={
adobename="Liwnarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER LIWN",
direction="l",
- lccode=0x056C,
+ lccode=0x56C,
linebreak="al",
- unicodeslot=0x053C,
+ unicodeslot=0x53C,
},
- [0x053D]={
+ [0x53D]={
adobename="Xeharmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER XEH",
direction="l",
- lccode=0x056D,
+ lccode=0x56D,
linebreak="al",
- unicodeslot=0x053D,
+ unicodeslot=0x53D,
},
- [0x053E]={
+ [0x53E]={
adobename="Caarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER CA",
direction="l",
- lccode=0x056E,
+ lccode=0x56E,
linebreak="al",
- unicodeslot=0x053E,
+ unicodeslot=0x53E,
},
- [0x053F]={
+ [0x53F]={
adobename="Kenarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER KEN",
direction="l",
- lccode=0x056F,
+ lccode=0x56F,
linebreak="al",
- unicodeslot=0x053F,
+ unicodeslot=0x53F,
},
- [0x0540]={
+ [0x540]={
adobename="Hoarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER HO",
direction="l",
- lccode=0x0570,
+ lccode=0x570,
linebreak="al",
- unicodeslot=0x0540,
+ unicodeslot=0x540,
},
- [0x0541]={
+ [0x541]={
adobename="Jaarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER JA",
direction="l",
- lccode=0x0571,
+ lccode=0x571,
linebreak="al",
- unicodeslot=0x0541,
+ unicodeslot=0x541,
},
- [0x0542]={
+ [0x542]={
adobename="Ghadarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER GHAD",
direction="l",
- lccode=0x0572,
+ lccode=0x572,
linebreak="al",
- unicodeslot=0x0542,
+ unicodeslot=0x542,
},
- [0x0543]={
+ [0x543]={
adobename="Cheharmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER CHEH",
direction="l",
- lccode=0x0573,
+ lccode=0x573,
linebreak="al",
- unicodeslot=0x0543,
+ unicodeslot=0x543,
},
- [0x0544]={
+ [0x544]={
adobename="Menarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER MEN",
direction="l",
- lccode=0x0574,
+ lccode=0x574,
linebreak="al",
- unicodeslot=0x0544,
+ unicodeslot=0x544,
},
- [0x0545]={
+ [0x545]={
adobename="Yiarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER YI",
direction="l",
- lccode=0x0575,
+ lccode=0x575,
linebreak="al",
- unicodeslot=0x0545,
+ unicodeslot=0x545,
},
- [0x0546]={
+ [0x546]={
adobename="Nowarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER NOW",
direction="l",
- lccode=0x0576,
+ lccode=0x576,
linebreak="al",
- unicodeslot=0x0546,
+ unicodeslot=0x546,
},
- [0x0547]={
+ [0x547]={
adobename="Shaarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER SHA",
direction="l",
- lccode=0x0577,
+ lccode=0x577,
linebreak="al",
- unicodeslot=0x0547,
+ unicodeslot=0x547,
},
- [0x0548]={
+ [0x548]={
adobename="Voarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER VO",
direction="l",
- lccode=0x0578,
+ lccode=0x578,
linebreak="al",
- unicodeslot=0x0548,
+ unicodeslot=0x548,
},
- [0x0549]={
+ [0x549]={
adobename="Chaarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER CHA",
direction="l",
- lccode=0x0579,
+ lccode=0x579,
linebreak="al",
- unicodeslot=0x0549,
+ unicodeslot=0x549,
},
- [0x054A]={
+ [0x54A]={
adobename="Peharmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER PEH",
direction="l",
- lccode=0x057A,
+ lccode=0x57A,
linebreak="al",
- unicodeslot=0x054A,
+ unicodeslot=0x54A,
},
- [0x054B]={
+ [0x54B]={
adobename="Jheharmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER JHEH",
direction="l",
- lccode=0x057B,
+ lccode=0x57B,
linebreak="al",
- unicodeslot=0x054B,
+ unicodeslot=0x54B,
},
- [0x054C]={
+ [0x54C]={
adobename="Raarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER RA",
direction="l",
- lccode=0x057C,
+ lccode=0x57C,
linebreak="al",
- unicodeslot=0x054C,
+ unicodeslot=0x54C,
},
- [0x054D]={
+ [0x54D]={
adobename="Seharmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER SEH",
direction="l",
- lccode=0x057D,
+ lccode=0x57D,
linebreak="al",
- unicodeslot=0x054D,
+ unicodeslot=0x54D,
},
- [0x054E]={
+ [0x54E]={
adobename="Vewarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER VEW",
direction="l",
- lccode=0x057E,
+ lccode=0x57E,
linebreak="al",
- unicodeslot=0x054E,
+ unicodeslot=0x54E,
},
- [0x054F]={
+ [0x54F]={
adobename="Tiwnarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER TIWN",
direction="l",
- lccode=0x057F,
+ lccode=0x57F,
linebreak="al",
- unicodeslot=0x054F,
+ unicodeslot=0x54F,
},
- [0x0550]={
+ [0x550]={
adobename="Reharmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER REH",
direction="l",
- lccode=0x0580,
+ lccode=0x580,
linebreak="al",
- unicodeslot=0x0550,
+ unicodeslot=0x550,
},
- [0x0551]={
+ [0x551]={
adobename="Coarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER CO",
direction="l",
- lccode=0x0581,
+ lccode=0x581,
linebreak="al",
- unicodeslot=0x0551,
+ unicodeslot=0x551,
},
- [0x0552]={
+ [0x552]={
adobename="Yiwnarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER YIWN",
direction="l",
- lccode=0x0582,
+ lccode=0x582,
linebreak="al",
- unicodeslot=0x0552,
+ unicodeslot=0x552,
},
- [0x0553]={
+ [0x553]={
adobename="Piwrarmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER PIWR",
direction="l",
- lccode=0x0583,
+ lccode=0x583,
linebreak="al",
- unicodeslot=0x0553,
+ unicodeslot=0x553,
},
- [0x0554]={
+ [0x554]={
adobename="Keharmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER KEH",
direction="l",
- lccode=0x0584,
+ lccode=0x584,
linebreak="al",
- unicodeslot=0x0554,
+ unicodeslot=0x554,
},
- [0x0555]={
+ [0x555]={
adobename="Oharmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER OH",
direction="l",
- lccode=0x0585,
+ lccode=0x585,
linebreak="al",
- unicodeslot=0x0555,
+ unicodeslot=0x555,
},
- [0x0556]={
+ [0x556]={
adobename="Feharmenian",
category="lu",
description="ARMENIAN CAPITAL LETTER FEH",
direction="l",
- lccode=0x0586,
+ lccode=0x586,
linebreak="al",
- unicodeslot=0x0556,
+ unicodeslot=0x556,
},
- [0x0559]={
+ [0x559]={
adobename="ringhalfleftarmenian",
category="lm",
description="ARMENIAN MODIFIER LETTER LEFT HALF RING",
direction="l",
linebreak="al",
- unicodeslot=0x0559,
+ unicodeslot=0x559,
},
- [0x055A]={
+ [0x55A]={
adobename="apostrophearmenian",
category="po",
description="ARMENIAN APOSTROPHE",
direction="l",
linebreak="al",
- unicodeslot=0x055A,
+ unicodeslot=0x55A,
},
- [0x055B]={
+ [0x55B]={
adobename="emphasismarkarmenian",
category="po",
description="ARMENIAN EMPHASIS MARK",
direction="l",
linebreak="al",
- unicodeslot=0x055B,
+ unicodeslot=0x55B,
},
- [0x055C]={
+ [0x55C]={
adobename="exclamarmenian",
category="po",
description="ARMENIAN EXCLAMATION MARK",
direction="l",
linebreak="al",
- unicodeslot=0x055C,
+ unicodeslot=0x55C,
},
- [0x055D]={
+ [0x55D]={
adobename="commaarmenian",
category="po",
description="ARMENIAN COMMA",
direction="l",
linebreak="al",
- unicodeslot=0x055D,
+ unicodeslot=0x55D,
},
- [0x055E]={
+ [0x55E]={
adobename="questionarmenian",
category="po",
description="ARMENIAN QUESTION MARK",
direction="l",
linebreak="al",
- unicodeslot=0x055E,
+ unicodeslot=0x55E,
},
- [0x055F]={
+ [0x55F]={
adobename="abbreviationmarkarmenian",
category="po",
description="ARMENIAN ABBREVIATION MARK",
direction="l",
linebreak="al",
- unicodeslot=0x055F,
+ unicodeslot=0x55F,
},
- [0x0561]={
+ [0x561]={
adobename="aybarmenian",
category="ll",
description="ARMENIAN SMALL LETTER AYB",
direction="l",
linebreak="al",
- uccode=0x0531,
- unicodeslot=0x0561,
+ uccode=0x531,
+ unicodeslot=0x561,
},
- [0x0562]={
+ [0x562]={
adobename="benarmenian",
category="ll",
description="ARMENIAN SMALL LETTER BEN",
direction="l",
linebreak="al",
- uccode=0x0532,
- unicodeslot=0x0562,
+ uccode=0x532,
+ unicodeslot=0x562,
},
- [0x0563]={
+ [0x563]={
adobename="gimarmenian",
category="ll",
description="ARMENIAN SMALL LETTER GIM",
direction="l",
linebreak="al",
- uccode=0x0533,
- unicodeslot=0x0563,
+ uccode=0x533,
+ unicodeslot=0x563,
},
- [0x0564]={
+ [0x564]={
adobename="daarmenian",
category="ll",
description="ARMENIAN SMALL LETTER DA",
direction="l",
linebreak="al",
- uccode=0x0534,
- unicodeslot=0x0564,
+ uccode=0x534,
+ unicodeslot=0x564,
},
- [0x0565]={
+ [0x565]={
adobename="echarmenian",
category="ll",
description="ARMENIAN SMALL LETTER ECH",
direction="l",
linebreak="al",
- uccode=0x0535,
- unicodeslot=0x0565,
+ uccode=0x535,
+ unicodeslot=0x565,
},
- [0x0566]={
+ [0x566]={
adobename="zaarmenian",
category="ll",
description="ARMENIAN SMALL LETTER ZA",
direction="l",
linebreak="al",
- uccode=0x0536,
- unicodeslot=0x0566,
+ uccode=0x536,
+ unicodeslot=0x566,
},
- [0x0567]={
+ [0x567]={
adobename="eharmenian",
category="ll",
description="ARMENIAN SMALL LETTER EH",
direction="l",
linebreak="al",
- uccode=0x0537,
- unicodeslot=0x0567,
+ uccode=0x537,
+ unicodeslot=0x567,
},
- [0x0568]={
+ [0x568]={
adobename="etarmenian",
category="ll",
description="ARMENIAN SMALL LETTER ET",
direction="l",
linebreak="al",
- uccode=0x0538,
- unicodeslot=0x0568,
+ uccode=0x538,
+ unicodeslot=0x568,
},
- [0x0569]={
+ [0x569]={
adobename="toarmenian",
category="ll",
description="ARMENIAN SMALL LETTER TO",
direction="l",
linebreak="al",
- uccode=0x0539,
- unicodeslot=0x0569,
+ uccode=0x539,
+ unicodeslot=0x569,
},
- [0x056A]={
+ [0x56A]={
adobename="zhearmenian",
category="ll",
description="ARMENIAN SMALL LETTER ZHE",
direction="l",
linebreak="al",
- uccode=0x053A,
- unicodeslot=0x056A,
+ uccode=0x53A,
+ unicodeslot=0x56A,
},
- [0x056B]={
+ [0x56B]={
adobename="iniarmenian",
category="ll",
description="ARMENIAN SMALL LETTER INI",
direction="l",
linebreak="al",
- uccode=0x053B,
- unicodeslot=0x056B,
+ uccode=0x53B,
+ unicodeslot=0x56B,
},
- [0x056C]={
+ [0x56C]={
adobename="liwnarmenian",
category="ll",
description="ARMENIAN SMALL LETTER LIWN",
direction="l",
linebreak="al",
- uccode=0x053C,
- unicodeslot=0x056C,
+ uccode=0x53C,
+ unicodeslot=0x56C,
},
- [0x056D]={
+ [0x56D]={
adobename="xeharmenian",
category="ll",
description="ARMENIAN SMALL LETTER XEH",
direction="l",
linebreak="al",
- uccode=0x053D,
- unicodeslot=0x056D,
+ uccode=0x53D,
+ unicodeslot=0x56D,
},
- [0x056E]={
+ [0x56E]={
adobename="caarmenian",
category="ll",
description="ARMENIAN SMALL LETTER CA",
direction="l",
linebreak="al",
- uccode=0x053E,
- unicodeslot=0x056E,
+ uccode=0x53E,
+ unicodeslot=0x56E,
},
- [0x056F]={
+ [0x56F]={
adobename="kenarmenian",
category="ll",
description="ARMENIAN SMALL LETTER KEN",
direction="l",
linebreak="al",
- uccode=0x053F,
- unicodeslot=0x056F,
+ uccode=0x53F,
+ unicodeslot=0x56F,
},
- [0x0570]={
+ [0x570]={
adobename="hoarmenian",
category="ll",
description="ARMENIAN SMALL LETTER HO",
direction="l",
linebreak="al",
- uccode=0x0540,
- unicodeslot=0x0570,
+ uccode=0x540,
+ unicodeslot=0x570,
},
- [0x0571]={
+ [0x571]={
adobename="jaarmenian",
category="ll",
description="ARMENIAN SMALL LETTER JA",
direction="l",
linebreak="al",
- uccode=0x0541,
- unicodeslot=0x0571,
+ uccode=0x541,
+ unicodeslot=0x571,
},
- [0x0572]={
+ [0x572]={
adobename="ghadarmenian",
category="ll",
description="ARMENIAN SMALL LETTER GHAD",
direction="l",
linebreak="al",
- uccode=0x0542,
- unicodeslot=0x0572,
+ uccode=0x542,
+ unicodeslot=0x572,
},
- [0x0573]={
+ [0x573]={
adobename="cheharmenian",
category="ll",
description="ARMENIAN SMALL LETTER CHEH",
direction="l",
linebreak="al",
- uccode=0x0543,
- unicodeslot=0x0573,
+ uccode=0x543,
+ unicodeslot=0x573,
},
- [0x0574]={
+ [0x574]={
adobename="menarmenian",
category="ll",
description="ARMENIAN SMALL LETTER MEN",
direction="l",
linebreak="al",
- uccode=0x0544,
- unicodeslot=0x0574,
+ uccode=0x544,
+ unicodeslot=0x574,
},
- [0x0575]={
+ [0x575]={
adobename="yiarmenian",
category="ll",
description="ARMENIAN SMALL LETTER YI",
direction="l",
linebreak="al",
- uccode=0x0545,
- unicodeslot=0x0575,
+ uccode=0x545,
+ unicodeslot=0x575,
},
- [0x0576]={
+ [0x576]={
adobename="nowarmenian",
category="ll",
description="ARMENIAN SMALL LETTER NOW",
direction="l",
linebreak="al",
- uccode=0x0546,
- unicodeslot=0x0576,
+ uccode=0x546,
+ unicodeslot=0x576,
},
- [0x0577]={
+ [0x577]={
adobename="shaarmenian",
category="ll",
description="ARMENIAN SMALL LETTER SHA",
direction="l",
linebreak="al",
- uccode=0x0547,
- unicodeslot=0x0577,
+ uccode=0x547,
+ unicodeslot=0x577,
},
- [0x0578]={
+ [0x578]={
adobename="voarmenian",
category="ll",
description="ARMENIAN SMALL LETTER VO",
direction="l",
linebreak="al",
- uccode=0x0548,
- unicodeslot=0x0578,
+ uccode=0x548,
+ unicodeslot=0x578,
},
- [0x0579]={
+ [0x579]={
adobename="chaarmenian",
category="ll",
description="ARMENIAN SMALL LETTER CHA",
direction="l",
linebreak="al",
- uccode=0x0549,
- unicodeslot=0x0579,
+ uccode=0x549,
+ unicodeslot=0x579,
},
- [0x057A]={
+ [0x57A]={
adobename="peharmenian",
category="ll",
description="ARMENIAN SMALL LETTER PEH",
direction="l",
linebreak="al",
- uccode=0x054A,
- unicodeslot=0x057A,
+ uccode=0x54A,
+ unicodeslot=0x57A,
},
- [0x057B]={
+ [0x57B]={
adobename="jheharmenian",
category="ll",
description="ARMENIAN SMALL LETTER JHEH",
direction="l",
linebreak="al",
- uccode=0x054B,
- unicodeslot=0x057B,
+ uccode=0x54B,
+ unicodeslot=0x57B,
},
- [0x057C]={
+ [0x57C]={
adobename="raarmenian",
category="ll",
description="ARMENIAN SMALL LETTER RA",
direction="l",
linebreak="al",
- uccode=0x054C,
- unicodeslot=0x057C,
+ uccode=0x54C,
+ unicodeslot=0x57C,
},
- [0x057D]={
+ [0x57D]={
adobename="seharmenian",
category="ll",
description="ARMENIAN SMALL LETTER SEH",
direction="l",
linebreak="al",
- uccode=0x054D,
- unicodeslot=0x057D,
+ uccode=0x54D,
+ unicodeslot=0x57D,
},
- [0x057E]={
+ [0x57E]={
adobename="vewarmenian",
category="ll",
description="ARMENIAN SMALL LETTER VEW",
direction="l",
linebreak="al",
- uccode=0x054E,
- unicodeslot=0x057E,
+ uccode=0x54E,
+ unicodeslot=0x57E,
},
- [0x057F]={
+ [0x57F]={
adobename="tiwnarmenian",
category="ll",
description="ARMENIAN SMALL LETTER TIWN",
direction="l",
linebreak="al",
- uccode=0x054F,
- unicodeslot=0x057F,
+ uccode=0x54F,
+ unicodeslot=0x57F,
},
- [0x0580]={
+ [0x580]={
adobename="reharmenian",
category="ll",
description="ARMENIAN SMALL LETTER REH",
direction="l",
linebreak="al",
- uccode=0x0550,
- unicodeslot=0x0580,
+ uccode=0x550,
+ unicodeslot=0x580,
},
- [0x0581]={
+ [0x581]={
adobename="coarmenian",
category="ll",
description="ARMENIAN SMALL LETTER CO",
direction="l",
linebreak="al",
- uccode=0x0551,
- unicodeslot=0x0581,
+ uccode=0x551,
+ unicodeslot=0x581,
},
- [0x0582]={
+ [0x582]={
adobename="yiwnarmenian",
category="ll",
description="ARMENIAN SMALL LETTER YIWN",
direction="l",
linebreak="al",
- uccode=0x0552,
- unicodeslot=0x0582,
+ uccode=0x552,
+ unicodeslot=0x582,
},
- [0x0583]={
+ [0x583]={
adobename="piwrarmenian",
category="ll",
description="ARMENIAN SMALL LETTER PIWR",
direction="l",
linebreak="al",
- uccode=0x0553,
- unicodeslot=0x0583,
+ uccode=0x553,
+ unicodeslot=0x583,
},
- [0x0584]={
+ [0x584]={
adobename="keharmenian",
category="ll",
description="ARMENIAN SMALL LETTER KEH",
direction="l",
linebreak="al",
- uccode=0x0554,
- unicodeslot=0x0584,
+ uccode=0x554,
+ unicodeslot=0x584,
},
- [0x0585]={
+ [0x585]={
adobename="oharmenian",
category="ll",
description="ARMENIAN SMALL LETTER OH",
direction="l",
linebreak="al",
- uccode=0x0555,
- unicodeslot=0x0585,
+ uccode=0x555,
+ unicodeslot=0x585,
},
- [0x0586]={
+ [0x586]={
adobename="feharmenian",
category="ll",
description="ARMENIAN SMALL LETTER FEH",
direction="l",
linebreak="al",
- uccode=0x0556,
- unicodeslot=0x0586,
+ uccode=0x556,
+ unicodeslot=0x586,
},
- [0x0587]={
+ [0x587]={
adobename="echyiwnarmenian",
category="ll",
description="ARMENIAN SMALL LIGATURE ECH YIWN",
direction="l",
linebreak="al",
- specials={ "compat", 0x0565, 0x0582 },
- unicodeslot=0x0587,
+ specials={ "compat", 0x565, 0x582 },
+ unicodeslot=0x587,
},
- [0x0589]={
+ [0x589]={
adobename="periodarmenian",
category="po",
description="ARMENIAN FULL STOP",
direction="l",
linebreak="is",
- unicodeslot=0x0589,
+ unicodeslot=0x589,
},
- [0x058A]={
+ [0x58A]={
category="pd",
description="ARMENIAN HYPHEN",
direction="on",
linebreak="ba",
- unicodeslot=0x058A,
+ unicodeslot=0x58A,
+ },
+ [0x58D]={
+ category="so",
+ description="RIGHT-FACING ARMENIAN ETERNITY SIGN",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x58D,
},
- [0x058F]={
+ [0x58E]={
+ category="so",
+ description="LEFT-FACING ARMENIAN ETERNITY SIGN",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x58E,
+ },
+ [0x58F]={
category="sc",
description="ARMENIAN DRAM SIGN",
direction="et",
linebreak="pr",
- unicodeslot=0x058F,
+ unicodeslot=0x58F,
},
- [0x0591]={
+ [0x591]={
adobename="etnahtalefthebrew",
category="mn",
+ combining=0xDC,
description="HEBREW ACCENT ETNAHTA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0591,
+ unicodeslot=0x591,
},
- [0x0592]={
+ [0x592]={
adobename="segoltahebrew",
category="mn",
+ combining=0xE6,
description="HEBREW ACCENT SEGOL",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0592,
+ unicodeslot=0x592,
},
- [0x0593]={
+ [0x593]={
adobename="shalshelethebrew",
category="mn",
+ combining=0xE6,
description="HEBREW ACCENT SHALSHELET",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0593,
+ unicodeslot=0x593,
},
- [0x0594]={
+ [0x594]={
adobename="zaqefqatanhebrew",
category="mn",
+ combining=0xE6,
description="HEBREW ACCENT ZAQEF QATAN",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0594,
+ unicodeslot=0x594,
},
- [0x0595]={
+ [0x595]={
adobename="zaqefgadolhebrew",
category="mn",
+ combining=0xE6,
description="HEBREW ACCENT ZAQEF GADOL",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0595,
+ unicodeslot=0x595,
},
- [0x0596]={
+ [0x596]={
adobename="tipehalefthebrew",
category="mn",
+ combining=0xDC,
description="HEBREW ACCENT TIPEHA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0596,
+ unicodeslot=0x596,
},
- [0x0597]={
+ [0x597]={
adobename="reviamugrashhebrew",
category="mn",
+ combining=0xE6,
description="HEBREW ACCENT REVIA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0597,
+ unicodeslot=0x597,
},
- [0x0598]={
+ [0x598]={
adobename="zarqahebrew",
category="mn",
+ combining=0xE6,
description="HEBREW ACCENT ZARQA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0598,
+ unicodeslot=0x598,
},
- [0x0599]={
+ [0x599]={
adobename="pashtahebrew",
category="mn",
+ combining=0xE6,
description="HEBREW ACCENT PASHTA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0599,
+ unicodeslot=0x599,
},
- [0x059A]={
+ [0x59A]={
adobename="yetivhebrew",
category="mn",
+ combining=0xDE,
description="HEBREW ACCENT YETIV",
direction="nsm",
linebreak="cm",
- unicodeslot=0x059A,
+ unicodeslot=0x59A,
},
- [0x059B]={
+ [0x59B]={
adobename="tevirlefthebrew",
category="mn",
+ combining=0xDC,
description="HEBREW ACCENT TEVIR",
direction="nsm",
linebreak="cm",
- unicodeslot=0x059B,
+ unicodeslot=0x59B,
},
- [0x059C]={
+ [0x59C]={
adobename="gereshaccenthebrew",
category="mn",
+ combining=0xE6,
description="HEBREW ACCENT GERESH",
direction="nsm",
linebreak="cm",
- unicodeslot=0x059C,
+ unicodeslot=0x59C,
},
- [0x059D]={
+ [0x59D]={
adobename="gereshmuqdamhebrew",
category="mn",
+ combining=0xE6,
description="HEBREW ACCENT GERESH MUQDAM",
direction="nsm",
linebreak="cm",
- unicodeslot=0x059D,
+ unicodeslot=0x59D,
},
- [0x059E]={
+ [0x59E]={
adobename="gershayimaccenthebrew",
category="mn",
+ combining=0xE6,
description="HEBREW ACCENT GERSHAYIM",
direction="nsm",
linebreak="cm",
- unicodeslot=0x059E,
+ unicodeslot=0x59E,
},
- [0x059F]={
+ [0x59F]={
adobename="qarneyparahebrew",
category="mn",
+ combining=0xE6,
description="HEBREW ACCENT QARNEY PARA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x059F,
+ unicodeslot=0x59F,
},
- [0x05A0]={
+ [0x5A0]={
adobename="telishagedolahebrew",
category="mn",
+ combining=0xE6,
description="HEBREW ACCENT TELISHA GEDOLA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05A0,
+ unicodeslot=0x5A0,
},
- [0x05A1]={
+ [0x5A1]={
adobename="pazerhebrew",
category="mn",
+ combining=0xE6,
description="HEBREW ACCENT PAZER",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05A1,
+ unicodeslot=0x5A1,
},
- [0x05A2]={
+ [0x5A2]={
category="mn",
+ combining=0xDC,
description="HEBREW ACCENT ATNAH HAFUKH",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05A2,
+ unicodeslot=0x5A2,
},
- [0x05A3]={
+ [0x5A3]={
adobename="munahlefthebrew",
category="mn",
+ combining=0xDC,
description="HEBREW ACCENT MUNAH",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05A3,
+ unicodeslot=0x5A3,
},
- [0x05A4]={
+ [0x5A4]={
adobename="mahapakhlefthebrew",
category="mn",
+ combining=0xDC,
description="HEBREW ACCENT MAHAPAKH",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05A4,
+ unicodeslot=0x5A4,
},
- [0x05A5]={
+ [0x5A5]={
adobename="merkhalefthebrew",
category="mn",
+ combining=0xDC,
description="HEBREW ACCENT MERKHA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05A5,
+ unicodeslot=0x5A5,
},
- [0x05A6]={
+ [0x5A6]={
adobename="merkhakefulalefthebrew",
category="mn",
+ combining=0xDC,
description="HEBREW ACCENT MERKHA KEFULA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05A6,
+ unicodeslot=0x5A6,
},
- [0x05A7]={
+ [0x5A7]={
adobename="dargalefthebrew",
category="mn",
+ combining=0xDC,
description="HEBREW ACCENT DARGA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05A7,
+ unicodeslot=0x5A7,
},
- [0x05A8]={
+ [0x5A8]={
adobename="qadmahebrew",
category="mn",
+ combining=0xE6,
description="HEBREW ACCENT QADMA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05A8,
+ unicodeslot=0x5A8,
},
- [0x05A9]={
+ [0x5A9]={
adobename="telishaqetanahebrew",
category="mn",
+ combining=0xE6,
description="HEBREW ACCENT TELISHA QETANA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05A9,
+ unicodeslot=0x5A9,
},
- [0x05AA]={
+ [0x5AA]={
adobename="yerahbenyomolefthebrew",
category="mn",
+ combining=0xDC,
description="HEBREW ACCENT YERAH BEN YOMO",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05AA,
+ unicodeslot=0x5AA,
},
- [0x05AB]={
+ [0x5AB]={
adobename="olehebrew",
category="mn",
+ combining=0xE6,
description="HEBREW ACCENT OLE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05AB,
+ unicodeslot=0x5AB,
},
- [0x05AC]={
+ [0x5AC]={
adobename="iluyhebrew",
category="mn",
+ combining=0xE6,
description="HEBREW ACCENT ILUY",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05AC,
+ unicodeslot=0x5AC,
},
- [0x05AD]={
+ [0x5AD]={
adobename="dehihebrew",
category="mn",
+ combining=0xDE,
description="HEBREW ACCENT DEHI",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05AD,
+ unicodeslot=0x5AD,
},
- [0x05AE]={
+ [0x5AE]={
adobename="zinorhebrew",
category="mn",
+ combining=0xE4,
description="HEBREW ACCENT ZINOR",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05AE,
+ unicodeslot=0x5AE,
},
- [0x05AF]={
+ [0x5AF]={
adobename="masoracirclehebrew",
category="mn",
+ combining=0xE6,
description="HEBREW MARK MASORA CIRCLE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05AF,
+ unicodeslot=0x5AF,
},
- [0x05B0]={
+ [0x5B0]={
adobename="shevawidehebrew",
category="mn",
+ combining=0xA,
description="HEBREW POINT SHEVA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05B0,
+ unicodeslot=0x5B0,
},
- [0x05B1]={
+ [0x5B1]={
adobename="hatafsegolwidehebrew",
category="mn",
+ combining=0xB,
description="HEBREW POINT HATAF SEGOL",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05B1,
+ unicodeslot=0x5B1,
},
- [0x05B2]={
+ [0x5B2]={
adobename="hatafpatahwidehebrew",
category="mn",
+ combining=0xC,
description="HEBREW POINT HATAF PATAH",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05B2,
+ unicodeslot=0x5B2,
},
- [0x05B3]={
+ [0x5B3]={
adobename="hatafqamatswidehebrew",
category="mn",
+ combining=0xD,
description="HEBREW POINT HATAF QAMATS",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05B3,
+ unicodeslot=0x5B3,
},
- [0x05B4]={
+ [0x5B4]={
adobename="hiriqwidehebrew",
category="mn",
+ combining=0xE,
description="HEBREW POINT HIRIQ",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05B4,
+ unicodeslot=0x5B4,
},
- [0x05B5]={
+ [0x5B5]={
adobename="tserewidehebrew",
category="mn",
+ combining=0xF,
description="HEBREW POINT TSERE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05B5,
+ unicodeslot=0x5B5,
},
- [0x05B6]={
+ [0x5B6]={
adobename="segolwidehebrew",
category="mn",
+ combining=0x10,
description="HEBREW POINT SEGOL",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05B6,
+ unicodeslot=0x5B6,
},
- [0x05B7]={
+ [0x5B7]={
adobename="patahwidehebrew",
category="mn",
+ combining=0x11,
description="HEBREW POINT PATAH",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05B7,
+ unicodeslot=0x5B7,
},
- [0x05B8]={
+ [0x5B8]={
adobename="qamatswidehebrew",
category="mn",
+ combining=0x12,
description="HEBREW POINT QAMATS",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05B8,
+ unicodeslot=0x5B8,
},
- [0x05B9]={
+ [0x5B9]={
adobename="holamwidehebrew",
category="mn",
+ combining=0x13,
description="HEBREW POINT HOLAM",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05B9,
+ unicodeslot=0x5B9,
},
- [0x05BA]={
+ [0x5BA]={
category="mn",
+ combining=0x13,
description="HEBREW POINT HOLAM HASER FOR VAV",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05BA,
+ unicodeslot=0x5BA,
},
- [0x05BB]={
+ [0x5BB]={
adobename="qubutswidehebrew",
category="mn",
+ combining=0x14,
description="HEBREW POINT QUBUTS",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05BB,
+ unicodeslot=0x5BB,
},
- [0x05BC]={
+ [0x5BC]={
adobename="dageshhebrew",
category="mn",
+ combining=0x15,
description="HEBREW POINT DAGESH OR MAPIQ",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05BC,
+ unicodeslot=0x5BC,
},
- [0x05BD]={
+ [0x5BD]={
adobename="siluqlefthebrew",
category="mn",
+ combining=0x16,
description="HEBREW POINT METEG",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05BD,
+ unicodeslot=0x5BD,
},
- [0x05BE]={
+ [0x5BE]={
adobename="maqafhebrew",
category="po",
description="HEBREW PUNCTUATION MAQAF",
direction="r",
linebreak="ba",
- unicodeslot=0x05BE,
+ unicodeslot=0x5BE,
},
- [0x05BF]={
+ [0x5BF]={
adobename="rafehebrew",
category="mn",
+ combining=0x17,
description="HEBREW POINT RAFE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05BF,
+ unicodeslot=0x5BF,
},
- [0x05C0]={
+ [0x5C0]={
adobename="paseqhebrew",
category="po",
description="HEBREW PUNCTUATION PASEQ",
direction="r",
linebreak="al",
- unicodeslot=0x05C0,
+ unicodeslot=0x5C0,
},
- [0x05C1]={
+ [0x5C1]={
adobename="shindothebrew",
category="mn",
+ combining=0x18,
description="HEBREW POINT SHIN DOT",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05C1,
+ unicodeslot=0x5C1,
},
- [0x05C2]={
+ [0x5C2]={
adobename="sindothebrew",
category="mn",
+ combining=0x19,
description="HEBREW POINT SIN DOT",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05C2,
+ unicodeslot=0x5C2,
},
- [0x05C3]={
+ [0x5C3]={
adobename="sofpasuqhebrew",
category="po",
description="HEBREW PUNCTUATION SOF PASUQ",
direction="r",
linebreak="al",
- unicodeslot=0x05C3,
+ unicodeslot=0x5C3,
},
- [0x05C4]={
+ [0x5C4]={
adobename="upperdothebrew",
category="mn",
+ combining=0xE6,
description="HEBREW MARK UPPER DOT",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05C4,
+ unicodeslot=0x5C4,
},
- [0x05C5]={
+ [0x5C5]={
category="mn",
+ combining=0xDC,
description="HEBREW MARK LOWER DOT",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05C5,
+ unicodeslot=0x5C5,
},
- [0x05C6]={
+ [0x5C6]={
category="po",
description="HEBREW PUNCTUATION NUN HAFUKHA",
direction="r",
linebreak="ex",
- unicodeslot=0x05C6,
+ unicodeslot=0x5C6,
},
- [0x05C7]={
+ [0x5C7]={
category="mn",
+ combining=0x12,
description="HEBREW POINT QAMATS QATAN",
direction="nsm",
linebreak="cm",
- unicodeslot=0x05C7,
+ unicodeslot=0x5C7,
},
- [0x05D0]={
+ [0x5D0]={
adobename="alefhebrew",
category="lo",
contextname="hebrewAlef",
description="HEBREW LETTER ALEF",
direction="r",
linebreak="hl",
- unicodeslot=0x05D0,
+ unicodeslot=0x5D0,
},
- [0x05D1]={
+ [0x5D1]={
adobename="bethebrew",
category="lo",
contextname="hebrewBet",
description="HEBREW LETTER BET",
direction="r",
linebreak="hl",
- unicodeslot=0x05D1,
+ unicodeslot=0x5D1,
},
- [0x05D2]={
+ [0x5D2]={
adobename="gimelhebrew",
category="lo",
contextname="hebrewGimel",
description="HEBREW LETTER GIMEL",
direction="r",
linebreak="hl",
- unicodeslot=0x05D2,
+ unicodeslot=0x5D2,
},
- [0x05D3]={
+ [0x5D3]={
adobename="dalettserehebrew",
category="lo",
contextname="hebrewDalet",
description="HEBREW LETTER DALET",
direction="r",
linebreak="hl",
- unicodeslot=0x05D3,
+ unicodeslot=0x5D3,
},
- [0x05D4]={
+ [0x5D4]={
adobename="hehebrew",
category="lo",
contextname="hebrewHe",
description="HEBREW LETTER HE",
direction="r",
linebreak="hl",
- unicodeslot=0x05D4,
+ unicodeslot=0x5D4,
},
- [0x05D5]={
+ [0x5D5]={
adobename="vavhebrew",
category="lo",
contextname="hebrewVav",
description="HEBREW LETTER VAV",
direction="r",
linebreak="hl",
- unicodeslot=0x05D5,
+ unicodeslot=0x5D5,
},
- [0x05D6]={
+ [0x5D6]={
adobename="zayinhebrew",
category="lo",
contextname="hebrewZayin",
description="HEBREW LETTER ZAYIN",
direction="r",
linebreak="hl",
- unicodeslot=0x05D6,
+ unicodeslot=0x5D6,
},
- [0x05D7]={
+ [0x5D7]={
adobename="hethebrew",
category="lo",
contextname="hebrewHet",
description="HEBREW LETTER HET",
direction="r",
linebreak="hl",
- unicodeslot=0x05D7,
+ unicodeslot=0x5D7,
},
- [0x05D8]={
+ [0x5D8]={
adobename="tethebrew",
category="lo",
contextname="hebrewTet",
description="HEBREW LETTER TET",
direction="r",
linebreak="hl",
- unicodeslot=0x05D8,
+ unicodeslot=0x5D8,
},
- [0x05D9]={
+ [0x5D9]={
adobename="yodhebrew",
category="lo",
contextname="hebrewYod",
description="HEBREW LETTER YOD",
direction="r",
linebreak="hl",
- unicodeslot=0x05D9,
+ unicodeslot=0x5D9,
},
- [0x05DA]={
+ [0x5DA]={
adobename="finalkafshevahebrew",
category="lo",
contextname="hebrewKaffinal",
description="HEBREW LETTER FINAL KAF",
direction="r",
linebreak="hl",
- unicodeslot=0x05DA,
+ unicodeslot=0x5DA,
},
- [0x05DB]={
+ [0x5DB]={
adobename="kafhebrew",
category="lo",
contextname="hebrewKaf",
description="HEBREW LETTER KAF",
direction="r",
linebreak="hl",
- unicodeslot=0x05DB,
+ unicodeslot=0x5DB,
},
- [0x05DC]={
+ [0x5DC]={
adobename="lamedholamhebrew",
category="lo",
contextname="hebrewLamed",
description="HEBREW LETTER LAMED",
direction="r",
linebreak="hl",
- unicodeslot=0x05DC,
+ unicodeslot=0x5DC,
},
- [0x05DD]={
+ [0x5DD]={
adobename="finalmemhebrew",
category="lo",
contextname="hebrewMemfinal",
description="HEBREW LETTER FINAL MEM",
direction="r",
linebreak="hl",
- unicodeslot=0x05DD,
+ unicodeslot=0x5DD,
},
- [0x05DE]={
+ [0x5DE]={
adobename="memhebrew",
category="lo",
contextname="hebrewMem",
description="HEBREW LETTER MEM",
direction="r",
linebreak="hl",
- unicodeslot=0x05DE,
+ unicodeslot=0x5DE,
},
- [0x05DF]={
+ [0x5DF]={
adobename="finalnunhebrew",
category="lo",
contextname="hebrewNunfinal",
description="HEBREW LETTER FINAL NUN",
direction="r",
linebreak="hl",
- unicodeslot=0x05DF,
+ unicodeslot=0x5DF,
},
- [0x05E0]={
+ [0x5E0]={
adobename="nunhebrew",
category="lo",
contextname="hebrewNun",
description="HEBREW LETTER NUN",
direction="r",
linebreak="hl",
- unicodeslot=0x05E0,
+ unicodeslot=0x5E0,
},
- [0x05E1]={
+ [0x5E1]={
adobename="samekhhebrew",
category="lo",
contextname="hebrewSamekh",
description="HEBREW LETTER SAMEKH",
direction="r",
linebreak="hl",
- unicodeslot=0x05E1,
+ unicodeslot=0x5E1,
},
- [0x05E2]={
+ [0x5E2]={
adobename="ayinhebrew",
category="lo",
contextname="hebrewAyin",
description="HEBREW LETTER AYIN",
direction="r",
linebreak="hl",
- unicodeslot=0x05E2,
+ unicodeslot=0x5E2,
},
- [0x05E3]={
+ [0x5E3]={
adobename="finalpehebrew",
category="lo",
contextname="hebrewPefinal",
description="HEBREW LETTER FINAL PE",
direction="r",
linebreak="hl",
- unicodeslot=0x05E3,
+ unicodeslot=0x5E3,
},
- [0x05E4]={
+ [0x5E4]={
adobename="pehebrew",
category="lo",
contextname="hebrewPe",
description="HEBREW LETTER PE",
direction="r",
linebreak="hl",
- unicodeslot=0x05E4,
+ unicodeslot=0x5E4,
},
- [0x05E5]={
+ [0x5E5]={
adobename="finaltsadihebrew",
category="lo",
contextname="hebrewTsadifinal",
description="HEBREW LETTER FINAL TSADI",
direction="r",
linebreak="hl",
- unicodeslot=0x05E5,
+ unicodeslot=0x5E5,
},
- [0x05E6]={
+ [0x5E6]={
adobename="tsadihebrew",
category="lo",
contextname="hebrewTsadi",
description="HEBREW LETTER TSADI",
direction="r",
linebreak="hl",
- unicodeslot=0x05E6,
+ unicodeslot=0x5E6,
},
- [0x05E7]={
+ [0x5E7]={
adobename="qoftserehebrew",
category="lo",
contextname="hebrewQof",
description="HEBREW LETTER QOF",
direction="r",
linebreak="hl",
- unicodeslot=0x05E7,
+ unicodeslot=0x5E7,
},
- [0x05E8]={
+ [0x5E8]={
adobename="reshtserehebrew",
category="lo",
contextname="hebrewResh",
description="HEBREW LETTER RESH",
direction="r",
linebreak="hl",
- unicodeslot=0x05E8,
+ unicodeslot=0x5E8,
},
- [0x05E9]={
+ [0x5E9]={
adobename="shinhebrew",
category="lo",
contextname="hebrewShin",
description="HEBREW LETTER SHIN",
direction="r",
linebreak="hl",
- unicodeslot=0x05E9,
+ unicodeslot=0x5E9,
},
- [0x05EA]={
+ [0x5EA]={
adobename="tavhebrew",
category="lo",
contextname="hebrewTav",
description="HEBREW LETTER TAV",
direction="r",
linebreak="hl",
- unicodeslot=0x05EA,
+ unicodeslot=0x5EA,
},
- [0x05F0]={
+ [0x5F0]={
adobename="vavvavhebrew",
category="lo",
description="HEBREW LIGATURE YIDDISH DOUBLE VAV",
direction="r",
linebreak="hl",
- unicodeslot=0x05F0,
+ unicodeslot=0x5F0,
},
- [0x05F1]={
+ [0x5F1]={
adobename="vavyodhebrew",
category="lo",
description="HEBREW LIGATURE YIDDISH VAV YOD",
direction="r",
linebreak="hl",
- unicodeslot=0x05F1,
+ unicodeslot=0x5F1,
},
- [0x05F2]={
+ [0x5F2]={
adobename="yodyodhebrew",
category="lo",
description="HEBREW LIGATURE YIDDISH DOUBLE YOD",
direction="r",
linebreak="hl",
- unicodeslot=0x05F2,
+ unicodeslot=0x5F2,
},
- [0x05F3]={
+ [0x5F3]={
adobename="gereshhebrew",
category="po",
description="HEBREW PUNCTUATION GERESH",
direction="r",
linebreak="al",
- unicodeslot=0x05F3,
+ unicodeslot=0x5F3,
},
- [0x05F4]={
+ [0x5F4]={
adobename="gershayimhebrew",
category="po",
description="HEBREW PUNCTUATION GERSHAYIM",
direction="r",
linebreak="al",
- unicodeslot=0x05F4,
+ unicodeslot=0x5F4,
},
- [0x0600]={
+ [0x600]={
arabic="u",
category="cf",
description="ARABIC NUMBER SIGN",
direction="an",
linebreak="al",
- unicodeslot=0x0600,
+ unicodeslot=0x600,
visible="yes",
},
- [0x0601]={
+ [0x601]={
arabic="u",
category="cf",
description="ARABIC SIGN SANAH",
direction="an",
linebreak="al",
- unicodeslot=0x0601,
+ unicodeslot=0x601,
visible="yes",
},
- [0x0602]={
+ [0x602]={
arabic="u",
category="cf",
description="ARABIC FOOTNOTE MARKER",
direction="an",
linebreak="al",
- unicodeslot=0x0602,
+ unicodeslot=0x602,
visible="yes",
},
- [0x0603]={
+ [0x603]={
arabic="u",
category="cf",
description="ARABIC SIGN SAFHA",
direction="an",
linebreak="al",
- unicodeslot=0x0603,
+ unicodeslot=0x603,
visible="yes",
},
- [0x0604]={
+ [0x604]={
arabic="u",
category="cf",
description="ARABIC SIGN SAMVAT",
direction="an",
linebreak="al",
- unicodeslot=0x0604,
+ unicodeslot=0x604,
+ },
+ [0x605]={
+ arabic="u",
+ category="cf",
+ description="ARABIC NUMBER MARK ABOVE",
+ direction="an",
+ linebreak="al",
+ unicodeslot=0x605,
},
- [0x0606]={
+ [0x606]={
category="sm",
description="ARABIC-INDIC CUBE ROOT",
direction="on",
linebreak="al",
- unicodeslot=0x0606,
+ unicodeslot=0x606,
},
- [0x0607]={
+ [0x607]={
category="sm",
description="ARABIC-INDIC FOURTH ROOT",
direction="on",
linebreak="al",
- unicodeslot=0x0607,
+ unicodeslot=0x607,
},
- [0x0608]={
+ [0x608]={
arabic="u",
category="sm",
description="ARABIC RAY",
direction="al",
linebreak="al",
- unicodeslot=0x0608,
+ unicodeslot=0x608,
},
- [0x0609]={
+ [0x609]={
category="po",
description="ARABIC-INDIC PER MILLE SIGN",
direction="et",
linebreak="po",
- unicodeslot=0x0609,
+ unicodeslot=0x609,
},
- [0x060A]={
+ [0x60A]={
category="po",
description="ARABIC-INDIC PER TEN THOUSAND SIGN",
direction="et",
linebreak="po",
- unicodeslot=0x060A,
+ unicodeslot=0x60A,
},
- [0x060B]={
+ [0x60B]={
arabic="u",
category="sc",
description="AFGHANI SIGN",
direction="al",
linebreak="po",
- unicodeslot=0x060B,
+ unicodeslot=0x60B,
},
- [0x060C]={
+ [0x60C]={
adobename="commaarabic",
category="po",
description="ARABIC COMMA",
direction="cs",
linebreak="is",
- unicodeslot=0x060C,
+ unicodeslot=0x60C,
},
- [0x060D]={
+ [0x60D]={
category="po",
description="ARABIC DATE SEPARATOR",
direction="al",
linebreak="is",
- unicodeslot=0x060D,
+ unicodeslot=0x60D,
},
- [0x060E]={
+ [0x60E]={
category="so",
description="ARABIC POETIC VERSE SIGN",
direction="on",
linebreak="al",
- unicodeslot=0x060E,
+ unicodeslot=0x60E,
},
- [0x060F]={
+ [0x60F]={
category="so",
description="ARABIC SIGN MISRA",
direction="on",
linebreak="al",
- unicodeslot=0x060F,
+ unicodeslot=0x60F,
},
- [0x0610]={
+ [0x610]={
category="mn",
+ combining=0xE6,
description="ARABIC SIGN SALLALLAHOU ALAYHE WASSALLAM",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0610,
+ unicodeslot=0x610,
},
- [0x0611]={
+ [0x611]={
category="mn",
+ combining=0xE6,
description="ARABIC SIGN ALAYHE ASSALLAM",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0611,
+ unicodeslot=0x611,
},
- [0x0612]={
+ [0x612]={
category="mn",
+ combining=0xE6,
description="ARABIC SIGN RAHMATULLAH ALAYHE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0612,
+ unicodeslot=0x612,
},
- [0x0613]={
+ [0x613]={
category="mn",
+ combining=0xE6,
description="ARABIC SIGN RADI ALLAHOU ANHU",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0613,
+ unicodeslot=0x613,
},
- [0x0614]={
+ [0x614]={
category="mn",
+ combining=0xE6,
description="ARABIC SIGN TAKHALLUS",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0614,
+ unicodeslot=0x614,
},
- [0x0615]={
+ [0x615]={
category="mn",
+ combining=0xE6,
description="ARABIC SMALL HIGH TAH",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0615,
+ unicodeslot=0x615,
},
- [0x0616]={
+ [0x616]={
category="mn",
+ combining=0xE6,
description="ARABIC SMALL HIGH LIGATURE ALEF WITH LAM WITH YEH",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0616,
+ unicodeslot=0x616,
},
- [0x0617]={
+ [0x617]={
category="mn",
+ combining=0xE6,
description="ARABIC SMALL HIGH ZAIN",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0617,
+ unicodeslot=0x617,
},
- [0x0618]={
+ [0x618]={
category="mn",
+ combining=0x1E,
description="ARABIC SMALL FATHA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0618,
+ unicodeslot=0x618,
},
- [0x0619]={
+ [0x619]={
category="mn",
+ combining=0x1F,
description="ARABIC SMALL DAMMA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0619,
+ unicodeslot=0x619,
},
- [0x061A]={
+ [0x61A]={
category="mn",
+ combining=0x20,
description="ARABIC SMALL KASRA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x061A,
+ unicodeslot=0x61A,
},
- [0x061B]={
+ [0x61B]={
adobename="semicolonarabic",
category="po",
description="ARABIC SEMICOLON",
direction="al",
linebreak="ex",
- unicodeslot=0x061B,
+ unicodeslot=0x61B,
},
- [0x061E]={
+ [0x61C]={
+ category="cf",
+ description="ARABIC LETTER MARK",
+ direction="al",
+ linebreak="cm",
+ unicodeslot=0x61C,
+ },
+ [0x61E]={
category="po",
description="ARABIC TRIPLE DOT PUNCTUATION MARK",
direction="al",
linebreak="ex",
- unicodeslot=0x061E,
+ unicodeslot=0x61E,
},
- [0x061F]={
+ [0x61F]={
adobename="questionarabic",
category="po",
description="ARABIC QUESTION MARK",
direction="al",
linebreak="ex",
- unicodeslot=0x061F,
+ unicodeslot=0x61F,
},
- [0x0620]={
+ [0x620]={
arabic="d",
category="lo",
description="ARABIC LETTER KASHMIRI YEH",
direction="al",
linebreak="al",
- unicodeslot=0x0620,
+ unicodeslot=0x620,
},
- [0x0621]={
+ [0x621]={
adobename="hamzasukunarabic",
arabic="u",
category="lo",
description="ARABIC LETTER HAMZA",
direction="al",
linebreak="al",
- unicodeslot=0x0621,
+ unicodeslot=0x621,
},
- [0x0622]={
+ [0x622]={
adobename="alefmaddaabovearabic",
arabic="r",
category="lo",
description="ARABIC LETTER ALEF WITH MADDA ABOVE",
direction="al",
linebreak="al",
- shcode=0x0627,
- specials={ "char", 0x0627, 0x0653 },
- unicodeslot=0x0622,
+ shcode=0x627,
+ specials={ "char", 0x627, 0x653 },
+ unicodeslot=0x622,
},
- [0x0623]={
+ [0x623]={
adobename="alefhamzaabovearabic",
arabic="r",
category="lo",
description="ARABIC LETTER ALEF WITH HAMZA ABOVE",
direction="al",
linebreak="al",
- shcode=0x0627,
- specials={ "char", 0x0627, 0x0654 },
- unicodeslot=0x0623,
+ shcode=0x627,
+ specials={ "char", 0x627, 0x654 },
+ unicodeslot=0x623,
},
- [0x0624]={
+ [0x624]={
adobename="wawhamzaabovearabic",
arabic="r",
category="lo",
description="ARABIC LETTER WAW WITH HAMZA ABOVE",
direction="al",
linebreak="al",
- shcode=0x0648,
- specials={ "char", 0x0648, 0x0654 },
- unicodeslot=0x0624,
+ shcode=0x648,
+ specials={ "char", 0x648, 0x654 },
+ unicodeslot=0x624,
},
- [0x0625]={
+ [0x625]={
adobename="alefhamzabelowarabic",
arabic="r",
category="lo",
description="ARABIC LETTER ALEF WITH HAMZA BELOW",
direction="al",
linebreak="al",
- shcode=0x0627,
- specials={ "char", 0x0627, 0x0655 },
- unicodeslot=0x0625,
+ shcode=0x627,
+ specials={ "char", 0x627, 0x655 },
+ unicodeslot=0x625,
},
- [0x0626]={
+ [0x626]={
adobename="yehhamzaabovearabic",
arabic="d",
category="lo",
description="ARABIC LETTER YEH WITH HAMZA ABOVE",
direction="al",
linebreak="al",
- shcode=0x064A,
- specials={ "char", 0x064A, 0x0654 },
- unicodeslot=0x0626,
+ shcode=0x64A,
+ specials={ "char", 0x64A, 0x654 },
+ unicodeslot=0x626,
},
- [0x0627]={
+ [0x627]={
adobename="alefarabic",
arabic="r",
category="lo",
description="ARABIC LETTER ALEF",
direction="al",
linebreak="al",
- unicodeslot=0x0627,
+ unicodeslot=0x627,
},
- [0x0628]={
+ [0x628]={
adobename="beharabic",
arabic="d",
category="lo",
description="ARABIC LETTER BEH",
direction="al",
linebreak="al",
- unicodeslot=0x0628,
+ unicodeslot=0x628,
},
- [0x0629]={
+ [0x629]={
adobename="tehmarbutaarabic",
arabic="r",
category="lo",
description="ARABIC LETTER TEH MARBUTA",
direction="al",
linebreak="al",
- unicodeslot=0x0629,
+ unicodeslot=0x629,
},
- [0x062A]={
+ [0x62A]={
adobename="teharabic",
arabic="d",
category="lo",
description="ARABIC LETTER TEH",
direction="al",
linebreak="al",
- unicodeslot=0x062A,
+ unicodeslot=0x62A,
},
- [0x062B]={
+ [0x62B]={
adobename="theharabic",
arabic="d",
category="lo",
description="ARABIC LETTER THEH",
direction="al",
linebreak="al",
- unicodeslot=0x062B,
+ unicodeslot=0x62B,
},
- [0x062C]={
+ [0x62C]={
adobename="jeemarabic",
arabic="d",
category="lo",
description="ARABIC LETTER JEEM",
direction="al",
linebreak="al",
- unicodeslot=0x062C,
+ unicodeslot=0x62C,
},
- [0x062D]={
+ [0x62D]={
adobename="haharabic",
arabic="d",
category="lo",
description="ARABIC LETTER HAH",
direction="al",
linebreak="al",
- unicodeslot=0x062D,
+ unicodeslot=0x62D,
},
- [0x062E]={
+ [0x62E]={
adobename="khaharabic",
arabic="d",
category="lo",
description="ARABIC LETTER KHAH",
direction="al",
linebreak="al",
- unicodeslot=0x062E,
+ unicodeslot=0x62E,
},
- [0x062F]={
+ [0x62F]={
adobename="dalarabic",
arabic="r",
category="lo",
description="ARABIC LETTER DAL",
direction="al",
linebreak="al",
- unicodeslot=0x062F,
+ unicodeslot=0x62F,
},
- [0x0630]={
+ [0x630]={
adobename="thalarabic",
arabic="r",
category="lo",
description="ARABIC LETTER THAL",
direction="al",
linebreak="al",
- unicodeslot=0x0630,
+ unicodeslot=0x630,
},
- [0x0631]={
+ [0x631]={
adobename="rehyehaleflamarabic",
arabic="r",
category="lo",
description="ARABIC LETTER REH",
direction="al",
linebreak="al",
- unicodeslot=0x0631,
+ unicodeslot=0x631,
},
- [0x0632]={
+ [0x632]={
adobename="zainarabic",
arabic="r",
category="lo",
description="ARABIC LETTER ZAIN",
direction="al",
linebreak="al",
- unicodeslot=0x0632,
+ unicodeslot=0x632,
},
- [0x0633]={
+ [0x633]={
adobename="seenarabic",
arabic="d",
category="lo",
description="ARABIC LETTER SEEN",
direction="al",
linebreak="al",
- unicodeslot=0x0633,
+ unicodeslot=0x633,
},
- [0x0634]={
+ [0x634]={
adobename="sheenarabic",
arabic="d",
category="lo",
description="ARABIC LETTER SHEEN",
direction="al",
linebreak="al",
- unicodeslot=0x0634,
+ unicodeslot=0x634,
},
- [0x0635]={
+ [0x635]={
adobename="sadarabic",
arabic="d",
category="lo",
description="ARABIC LETTER SAD",
direction="al",
linebreak="al",
- unicodeslot=0x0635,
+ unicodeslot=0x635,
},
- [0x0636]={
+ [0x636]={
adobename="dadarabic",
arabic="d",
category="lo",
description="ARABIC LETTER DAD",
direction="al",
linebreak="al",
- unicodeslot=0x0636,
+ unicodeslot=0x636,
},
- [0x0637]={
+ [0x637]={
adobename="taharabic",
arabic="d",
category="lo",
description="ARABIC LETTER TAH",
direction="al",
linebreak="al",
- unicodeslot=0x0637,
+ unicodeslot=0x637,
},
- [0x0638]={
+ [0x638]={
adobename="zaharabic",
arabic="d",
category="lo",
description="ARABIC LETTER ZAH",
direction="al",
linebreak="al",
- unicodeslot=0x0638,
+ unicodeslot=0x638,
},
- [0x0639]={
+ [0x639]={
adobename="ainarabic",
arabic="d",
category="lo",
description="ARABIC LETTER AIN",
direction="al",
linebreak="al",
- unicodeslot=0x0639,
+ unicodeslot=0x639,
},
- [0x063A]={
+ [0x63A]={
adobename="ghainarabic",
arabic="d",
category="lo",
description="ARABIC LETTER GHAIN",
direction="al",
linebreak="al",
- unicodeslot=0x063A,
+ unicodeslot=0x63A,
},
- [0x063B]={
+ [0x63B]={
arabic="d",
category="lo",
description="ARABIC LETTER KEHEH WITH TWO DOTS ABOVE",
direction="al",
linebreak="al",
- unicodeslot=0x063B,
+ unicodeslot=0x63B,
},
- [0x063C]={
+ [0x63C]={
arabic="d",
category="lo",
description="ARABIC LETTER KEHEH WITH THREE DOTS BELOW",
direction="al",
linebreak="al",
- unicodeslot=0x063C,
+ unicodeslot=0x63C,
},
- [0x063D]={
+ [0x63D]={
arabic="d",
category="lo",
description="ARABIC LETTER FARSI YEH WITH INVERTED V",
direction="al",
linebreak="al",
- unicodeslot=0x063D,
+ unicodeslot=0x63D,
},
- [0x063E]={
+ [0x63E]={
arabic="d",
category="lo",
description="ARABIC LETTER FARSI YEH WITH TWO DOTS ABOVE",
direction="al",
linebreak="al",
- unicodeslot=0x063E,
+ unicodeslot=0x63E,
},
- [0x063F]={
+ [0x63F]={
arabic="d",
category="lo",
description="ARABIC LETTER FARSI YEH WITH THREE DOTS ABOVE",
direction="al",
linebreak="al",
- unicodeslot=0x063F,
+ unicodeslot=0x63F,
},
- [0x0640]={
+ [0x640]={
adobename="tatweelarabic",
arabic="c",
category="lm",
description="ARABIC TATWEEL",
direction="al",
linebreak="al",
- unicodeslot=0x0640,
+ unicodeslot=0x640,
},
- [0x0641]={
+ [0x641]={
adobename="feharabic",
arabic="d",
category="lo",
description="ARABIC LETTER FEH",
direction="al",
linebreak="al",
- unicodeslot=0x0641,
+ unicodeslot=0x641,
},
- [0x0642]={
+ [0x642]={
adobename="qafarabic",
arabic="d",
category="lo",
description="ARABIC LETTER QAF",
direction="al",
linebreak="al",
- unicodeslot=0x0642,
+ unicodeslot=0x642,
},
- [0x0643]={
+ [0x643]={
adobename="kafarabic",
arabic="d",
category="lo",
description="ARABIC LETTER KAF",
direction="al",
linebreak="al",
- unicodeslot=0x0643,
+ unicodeslot=0x643,
},
- [0x0644]={
+ [0x644]={
adobename="lamarabic",
arabic="d",
category="lo",
description="ARABIC LETTER LAM",
direction="al",
linebreak="al",
- unicodeslot=0x0644,
+ unicodeslot=0x644,
},
- [0x0645]={
+ [0x645]={
adobename="meemarabic",
arabic="d",
category="lo",
description="ARABIC LETTER MEEM",
direction="al",
linebreak="al",
- unicodeslot=0x0645,
+ unicodeslot=0x645,
},
- [0x0646]={
+ [0x646]={
adobename="noonarabic",
arabic="d",
category="lo",
description="ARABIC LETTER NOON",
direction="al",
linebreak="al",
- unicodeslot=0x0646,
+ unicodeslot=0x646,
},
- [0x0647]={
+ [0x647]={
adobename="heharabic",
arabic="d",
category="lo",
description="ARABIC LETTER HEH",
direction="al",
linebreak="al",
- unicodeslot=0x0647,
+ unicodeslot=0x647,
},
- [0x0648]={
+ [0x648]={
adobename="wawarabic",
arabic="r",
category="lo",
description="ARABIC LETTER WAW",
direction="al",
linebreak="al",
- unicodeslot=0x0648,
+ unicodeslot=0x648,
},
- [0x0649]={
+ [0x649]={
adobename="alefmaksuraarabic",
arabic="d",
category="lo",
description="ARABIC LETTER ALEF MAKSURA",
direction="al",
linebreak="al",
- unicodeslot=0x0649,
+ unicodeslot=0x649,
},
- [0x064A]={
+ [0x64A]={
adobename="yeharabic",
arabic="d",
category="lo",
description="ARABIC LETTER YEH",
direction="al",
linebreak="al",
- unicodeslot=0x064A,
+ unicodeslot=0x64A,
},
- [0x064B]={
+ [0x64B]={
adobename="fathatanarabic",
category="mn",
+ combining=0x1B,
description="ARABIC FATHATAN",
direction="nsm",
linebreak="cm",
- unicodeslot=0x064B,
+ unicodeslot=0x64B,
},
- [0x064C]={
+ [0x64C]={
adobename="dammatanarabic",
category="mn",
+ combining=0x1C,
description="ARABIC DAMMATAN",
direction="nsm",
linebreak="cm",
- unicodeslot=0x064C,
+ unicodeslot=0x64C,
},
- [0x064D]={
+ [0x64D]={
adobename="kasratanarabic",
category="mn",
+ combining=0x1D,
description="ARABIC KASRATAN",
direction="nsm",
linebreak="cm",
- unicodeslot=0x064D,
+ unicodeslot=0x64D,
},
- [0x064E]={
+ [0x64E]={
adobename="fathalowarabic",
category="mn",
+ combining=0x1E,
description="ARABIC FATHA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x064E,
+ unicodeslot=0x64E,
},
- [0x064F]={
+ [0x64F]={
adobename="dammalowarabic",
category="mn",
+ combining=0x1F,
description="ARABIC DAMMA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x064F,
+ unicodeslot=0x64F,
},
- [0x0650]={
+ [0x650]={
adobename="kasraarabic",
category="mn",
+ combining=0x20,
description="ARABIC KASRA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0650,
+ unicodeslot=0x650,
},
- [0x0651]={
+ [0x651]={
adobename="shaddafathatanarabic",
category="mn",
+ combining=0x21,
description="ARABIC SHADDA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0651,
+ unicodeslot=0x651,
},
- [0x0652]={
+ [0x652]={
adobename="sukunarabic",
category="mn",
+ combining=0x22,
description="ARABIC SUKUN",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0652,
+ unicodeslot=0x652,
},
- [0x0653]={
+ [0x653]={
category="mn",
+ combining=0xE6,
description="ARABIC MADDAH ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0653,
+ unicodeslot=0x653,
},
- [0x0654]={
+ [0x654]={
category="mn",
+ combining=0xE6,
description="ARABIC HAMZA ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0654,
+ unicodeslot=0x654,
},
- [0x0655]={
+ [0x655]={
category="mn",
+ combining=0xDC,
description="ARABIC HAMZA BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0655,
+ unicodeslot=0x655,
},
- [0x0656]={
+ [0x656]={
category="mn",
+ combining=0xDC,
description="ARABIC SUBSCRIPT ALEF",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0656,
+ unicodeslot=0x656,
},
- [0x0657]={
+ [0x657]={
category="mn",
+ combining=0xE6,
description="ARABIC INVERTED DAMMA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0657,
+ unicodeslot=0x657,
},
- [0x0658]={
+ [0x658]={
category="mn",
+ combining=0xE6,
description="ARABIC MARK NOON GHUNNA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0658,
+ unicodeslot=0x658,
},
- [0x0659]={
+ [0x659]={
category="mn",
+ combining=0xE6,
description="ARABIC ZWARAKAY",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0659,
+ unicodeslot=0x659,
},
- [0x065A]={
+ [0x65A]={
category="mn",
+ combining=0xE6,
description="ARABIC VOWEL SIGN SMALL V ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x065A,
+ unicodeslot=0x65A,
},
- [0x065B]={
+ [0x65B]={
category="mn",
+ combining=0xE6,
description="ARABIC VOWEL SIGN INVERTED SMALL V ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x065B,
+ unicodeslot=0x65B,
},
- [0x065C]={
+ [0x65C]={
category="mn",
+ combining=0xDC,
description="ARABIC VOWEL SIGN DOT BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x065C,
+ unicodeslot=0x65C,
},
- [0x065D]={
+ [0x65D]={
category="mn",
+ combining=0xE6,
description="ARABIC REVERSED DAMMA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x065D,
+ unicodeslot=0x65D,
},
- [0x065E]={
+ [0x65E]={
category="mn",
+ combining=0xE6,
description="ARABIC FATHA WITH TWO DOTS",
direction="nsm",
linebreak="cm",
- unicodeslot=0x065E,
+ unicodeslot=0x65E,
},
- [0x065F]={
+ [0x65F]={
category="mn",
+ combining=0xDC,
description="ARABIC WAVY HAMZA BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x065F,
+ unicodeslot=0x65F,
},
- [0x0660]={
+ [0x660]={
adobename="zerohackarabic",
category="nd",
description="ARABIC-INDIC DIGIT ZERO",
direction="an",
linebreak="nu",
- unicodeslot=0x0660,
+ unicodeslot=0x660,
},
- [0x0661]={
+ [0x661]={
adobename="onehackarabic",
category="nd",
description="ARABIC-INDIC DIGIT ONE",
direction="an",
linebreak="nu",
- unicodeslot=0x0661,
+ unicodeslot=0x661,
},
- [0x0662]={
+ [0x662]={
adobename="twohackarabic",
category="nd",
description="ARABIC-INDIC DIGIT TWO",
direction="an",
linebreak="nu",
- unicodeslot=0x0662,
+ unicodeslot=0x662,
},
- [0x0663]={
+ [0x663]={
adobename="threehackarabic",
category="nd",
description="ARABIC-INDIC DIGIT THREE",
direction="an",
linebreak="nu",
- unicodeslot=0x0663,
+ unicodeslot=0x663,
},
- [0x0664]={
+ [0x664]={
adobename="fourhackarabic",
category="nd",
description="ARABIC-INDIC DIGIT FOUR",
direction="an",
linebreak="nu",
- unicodeslot=0x0664,
+ unicodeslot=0x664,
},
- [0x0665]={
+ [0x665]={
adobename="fivehackarabic",
category="nd",
description="ARABIC-INDIC DIGIT FIVE",
direction="an",
linebreak="nu",
- unicodeslot=0x0665,
+ unicodeslot=0x665,
},
- [0x0666]={
+ [0x666]={
adobename="sixhackarabic",
category="nd",
description="ARABIC-INDIC DIGIT SIX",
direction="an",
linebreak="nu",
- unicodeslot=0x0666,
+ unicodeslot=0x666,
},
- [0x0667]={
+ [0x667]={
adobename="sevenhackarabic",
category="nd",
description="ARABIC-INDIC DIGIT SEVEN",
direction="an",
linebreak="nu",
- unicodeslot=0x0667,
+ unicodeslot=0x667,
},
- [0x0668]={
+ [0x668]={
adobename="eighthackarabic",
category="nd",
description="ARABIC-INDIC DIGIT EIGHT",
direction="an",
linebreak="nu",
- unicodeslot=0x0668,
+ unicodeslot=0x668,
},
- [0x0669]={
+ [0x669]={
adobename="ninehackarabic",
category="nd",
description="ARABIC-INDIC DIGIT NINE",
direction="an",
linebreak="nu",
- unicodeslot=0x0669,
+ unicodeslot=0x669,
},
- [0x066A]={
+ [0x66A]={
adobename="percentarabic",
category="po",
description="ARABIC PERCENT SIGN",
direction="et",
linebreak="po",
- unicodeslot=0x066A,
+ unicodeslot=0x66A,
},
- [0x066B]={
+ [0x66B]={
adobename="decimalseparatorpersian",
category="po",
description="ARABIC DECIMAL SEPARATOR",
direction="an",
linebreak="nu",
- unicodeslot=0x066B,
+ unicodeslot=0x66B,
},
- [0x066C]={
+ [0x66C]={
adobename="thousandsseparatorpersian",
category="po",
description="ARABIC THOUSANDS SEPARATOR",
direction="an",
linebreak="nu",
- unicodeslot=0x066C,
+ unicodeslot=0x66C,
},
- [0x066D]={
+ [0x66D]={
adobename="asteriskarabic",
category="po",
description="ARABIC FIVE POINTED STAR",
direction="al",
linebreak="al",
- unicodeslot=0x066D,
+ unicodeslot=0x66D,
},
- [0x066E]={
+ [0x66E]={
arabic="d",
category="lo",
description="ARABIC LETTER DOTLESS BEH",
direction="al",
linebreak="al",
- unicodeslot=0x066E,
+ unicodeslot=0x66E,
},
- [0x066F]={
+ [0x66F]={
arabic="d",
category="lo",
description="ARABIC LETTER DOTLESS QAF",
direction="al",
linebreak="al",
- unicodeslot=0x066F,
+ unicodeslot=0x66F,
},
- [0x0670]={
+ [0x670]={
category="mn",
+ combining=0x23,
description="ARABIC LETTER SUPERSCRIPT ALEF",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0670,
+ unicodeslot=0x670,
},
- [0x0671]={
+ [0x671]={
arabic="r",
category="lo",
description="ARABIC LETTER ALEF WASLA",
direction="al",
linebreak="al",
- unicodeslot=0x0671,
+ unicodeslot=0x671,
},
- [0x0672]={
+ [0x672]={
arabic="r",
category="lo",
description="ARABIC LETTER ALEF WITH WAVY HAMZA ABOVE",
direction="al",
linebreak="al",
- shcode=0x0627,
- unicodeslot=0x0672,
+ shcode=0x627,
+ unicodeslot=0x672,
},
- [0x0673]={
+ [0x673]={
arabic="r",
category="lo",
description="ARABIC LETTER ALEF WITH WAVY HAMZA BELOW",
direction="al",
linebreak="al",
- shcode=0x0627,
- unicodeslot=0x0673,
+ shcode=0x627,
+ unicodeslot=0x673,
},
- [0x0674]={
+ [0x674]={
arabic="u",
category="lo",
description="ARABIC LETTER HIGH HAMZA",
direction="al",
linebreak="al",
- unicodeslot=0x0674,
+ unicodeslot=0x674,
},
- [0x0675]={
+ [0x675]={
arabic="r",
category="lo",
description="ARABIC LETTER HIGH HAMZA ALEF",
direction="al",
linebreak="al",
- specials={ "compat", 0x0627, 0x0674 },
- unicodeslot=0x0675,
+ specials={ "compat", 0x627, 0x674 },
+ unicodeslot=0x675,
},
- [0x0676]={
+ [0x676]={
arabic="r",
category="lo",
description="ARABIC LETTER HIGH HAMZA WAW",
direction="al",
linebreak="al",
- specials={ "compat", 0x0648, 0x0674 },
- unicodeslot=0x0676,
+ specials={ "compat", 0x648, 0x674 },
+ unicodeslot=0x676,
},
- [0x0677]={
+ [0x677]={
arabic="r",
category="lo",
description="ARABIC LETTER U WITH HAMZA ABOVE",
direction="al",
linebreak="al",
- shcode=0x06C7,
- specials={ "compat", 0x06C7, 0x0674 },
- unicodeslot=0x0677,
+ shcode=0x6C7,
+ specials={ "compat", 0x6C7, 0x674 },
+ unicodeslot=0x677,
},
- [0x0678]={
+ [0x678]={
arabic="d",
category="lo",
description="ARABIC LETTER HIGH HAMZA YEH",
direction="al",
linebreak="al",
- specials={ "compat", 0x064A, 0x0674 },
- unicodeslot=0x0678,
+ specials={ "compat", 0x64A, 0x674 },
+ unicodeslot=0x678,
},
- [0x0679]={
+ [0x679]={
adobename="tteharabic",
arabic="d",
category="lo",
description="ARABIC LETTER TTEH",
direction="al",
linebreak="al",
- unicodeslot=0x0679,
+ unicodeslot=0x679,
},
- [0x067A]={
+ [0x67A]={
arabic="d",
category="lo",
description="ARABIC LETTER TTEHEH",
direction="al",
linebreak="al",
- unicodeslot=0x067A,
+ unicodeslot=0x67A,
},
- [0x067B]={
+ [0x67B]={
arabic="d",
category="lo",
description="ARABIC LETTER BEEH",
direction="al",
linebreak="al",
- unicodeslot=0x067B,
+ unicodeslot=0x67B,
},
- [0x067C]={
+ [0x67C]={
arabic="d",
category="lo",
description="ARABIC LETTER TEH WITH RING",
direction="al",
linebreak="al",
- shcode=0x062A,
- unicodeslot=0x067C,
+ shcode=0x62A,
+ unicodeslot=0x67C,
},
- [0x067D]={
+ [0x67D]={
arabic="d",
category="lo",
description="ARABIC LETTER TEH WITH THREE DOTS ABOVE DOWNWARDS",
direction="al",
linebreak="al",
- shcode=0x062A,
- unicodeslot=0x067D,
+ shcode=0x62A,
+ unicodeslot=0x67D,
},
- [0x067E]={
+ [0x67E]={
adobename="peharabic",
arabic="d",
category="lo",
description="ARABIC LETTER PEH",
direction="al",
linebreak="al",
- unicodeslot=0x067E,
+ unicodeslot=0x67E,
},
- [0x067F]={
+ [0x67F]={
arabic="d",
category="lo",
description="ARABIC LETTER TEHEH",
direction="al",
linebreak="al",
- unicodeslot=0x067F,
+ unicodeslot=0x67F,
},
- [0x0680]={
+ [0x680]={
arabic="d",
category="lo",
description="ARABIC LETTER BEHEH",
direction="al",
linebreak="al",
- unicodeslot=0x0680,
+ unicodeslot=0x680,
},
- [0x0681]={
+ [0x681]={
arabic="d",
category="lo",
description="ARABIC LETTER HAH WITH HAMZA ABOVE",
direction="al",
linebreak="al",
- shcode=0x062D,
- unicodeslot=0x0681,
+ shcode=0x62D,
+ unicodeslot=0x681,
},
- [0x0682]={
+ [0x682]={
arabic="d",
category="lo",
description="ARABIC LETTER HAH WITH TWO DOTS VERTICAL ABOVE",
direction="al",
linebreak="al",
- shcode=0x062D,
- unicodeslot=0x0682,
+ shcode=0x62D,
+ unicodeslot=0x682,
},
- [0x0683]={
+ [0x683]={
arabic="d",
category="lo",
description="ARABIC LETTER NYEH",
direction="al",
linebreak="al",
- unicodeslot=0x0683,
+ unicodeslot=0x683,
},
- [0x0684]={
+ [0x684]={
arabic="d",
category="lo",
description="ARABIC LETTER DYEH",
direction="al",
linebreak="al",
- unicodeslot=0x0684,
+ unicodeslot=0x684,
},
- [0x0685]={
+ [0x685]={
arabic="d",
category="lo",
description="ARABIC LETTER HAH WITH THREE DOTS ABOVE",
direction="al",
linebreak="al",
- shcode=0x062D,
- unicodeslot=0x0685,
+ shcode=0x62D,
+ unicodeslot=0x685,
},
- [0x0686]={
+ [0x686]={
adobename="tcheharabic",
arabic="d",
category="lo",
description="ARABIC LETTER TCHEH",
direction="al",
linebreak="al",
- unicodeslot=0x0686,
+ unicodeslot=0x686,
},
- [0x0687]={
+ [0x687]={
arabic="d",
category="lo",
description="ARABIC LETTER TCHEHEH",
direction="al",
linebreak="al",
- unicodeslot=0x0687,
+ unicodeslot=0x687,
},
- [0x0688]={
+ [0x688]={
adobename="ddalarabic",
arabic="r",
category="lo",
description="ARABIC LETTER DDAL",
direction="al",
linebreak="al",
- unicodeslot=0x0688,
+ unicodeslot=0x688,
},
- [0x0689]={
+ [0x689]={
arabic="r",
category="lo",
description="ARABIC LETTER DAL WITH RING",
direction="al",
linebreak="al",
- shcode=0x062F,
- unicodeslot=0x0689,
+ shcode=0x62F,
+ unicodeslot=0x689,
},
- [0x068A]={
+ [0x68A]={
arabic="r",
category="lo",
description="ARABIC LETTER DAL WITH DOT BELOW",
direction="al",
linebreak="al",
- shcode=0x062F,
- unicodeslot=0x068A,
+ shcode=0x62F,
+ unicodeslot=0x68A,
},
- [0x068B]={
+ [0x68B]={
arabic="r",
category="lo",
description="ARABIC LETTER DAL WITH DOT BELOW AND SMALL TAH",
direction="al",
linebreak="al",
- shcode=0x062F,
- unicodeslot=0x068B,
+ shcode=0x62F,
+ unicodeslot=0x68B,
},
- [0x068C]={
+ [0x68C]={
arabic="r",
category="lo",
description="ARABIC LETTER DAHAL",
direction="al",
linebreak="al",
- unicodeslot=0x068C,
+ unicodeslot=0x68C,
},
- [0x068D]={
+ [0x68D]={
arabic="r",
category="lo",
description="ARABIC LETTER DDAHAL",
direction="al",
linebreak="al",
- unicodeslot=0x068D,
+ unicodeslot=0x68D,
},
- [0x068E]={
+ [0x68E]={
arabic="r",
category="lo",
description="ARABIC LETTER DUL",
direction="al",
linebreak="al",
- unicodeslot=0x068E,
+ unicodeslot=0x68E,
},
- [0x068F]={
+ [0x68F]={
arabic="r",
category="lo",
description="ARABIC LETTER DAL WITH THREE DOTS ABOVE DOWNWARDS",
direction="al",
linebreak="al",
- shcode=0x062F,
- unicodeslot=0x068F,
+ shcode=0x62F,
+ unicodeslot=0x68F,
},
- [0x0690]={
+ [0x690]={
arabic="r",
category="lo",
description="ARABIC LETTER DAL WITH FOUR DOTS ABOVE",
direction="al",
linebreak="al",
- shcode=0x062F,
- unicodeslot=0x0690,
+ shcode=0x62F,
+ unicodeslot=0x690,
},
- [0x0691]={
+ [0x691]={
adobename="rreharabic",
arabic="r",
category="lo",
description="ARABIC LETTER RREH",
direction="al",
linebreak="al",
- unicodeslot=0x0691,
+ unicodeslot=0x691,
},
- [0x0692]={
+ [0x692]={
arabic="r",
category="lo",
description="ARABIC LETTER REH WITH SMALL V",
direction="al",
linebreak="al",
- shcode=0x0631,
- unicodeslot=0x0692,
+ shcode=0x631,
+ unicodeslot=0x692,
},
- [0x0693]={
+ [0x693]={
arabic="r",
category="lo",
description="ARABIC LETTER REH WITH RING",
direction="al",
linebreak="al",
- shcode=0x0631,
- unicodeslot=0x0693,
+ shcode=0x631,
+ unicodeslot=0x693,
},
- [0x0694]={
+ [0x694]={
arabic="r",
category="lo",
description="ARABIC LETTER REH WITH DOT BELOW",
direction="al",
linebreak="al",
- shcode=0x0631,
- unicodeslot=0x0694,
+ shcode=0x631,
+ unicodeslot=0x694,
},
- [0x0695]={
+ [0x695]={
arabic="r",
category="lo",
description="ARABIC LETTER REH WITH SMALL V BELOW",
direction="al",
linebreak="al",
- shcode=0x0631,
- unicodeslot=0x0695,
+ shcode=0x631,
+ unicodeslot=0x695,
},
- [0x0696]={
+ [0x696]={
arabic="r",
category="lo",
description="ARABIC LETTER REH WITH DOT BELOW AND DOT ABOVE",
direction="al",
linebreak="al",
- shcode=0x0631,
- unicodeslot=0x0696,
+ shcode=0x631,
+ unicodeslot=0x696,
},
- [0x0697]={
+ [0x697]={
arabic="r",
category="lo",
description="ARABIC LETTER REH WITH TWO DOTS ABOVE",
direction="al",
linebreak="al",
- shcode=0x0631,
- unicodeslot=0x0697,
+ shcode=0x631,
+ unicodeslot=0x697,
},
- [0x0698]={
+ [0x698]={
adobename="jeharabic",
arabic="r",
category="lo",
description="ARABIC LETTER JEH",
direction="al",
linebreak="al",
- unicodeslot=0x0698,
+ unicodeslot=0x698,
},
- [0x0699]={
+ [0x699]={
arabic="r",
category="lo",
description="ARABIC LETTER REH WITH FOUR DOTS ABOVE",
direction="al",
linebreak="al",
- shcode=0x0631,
- unicodeslot=0x0699,
+ shcode=0x631,
+ unicodeslot=0x699,
},
- [0x069A]={
+ [0x69A]={
arabic="d",
category="lo",
description="ARABIC LETTER SEEN WITH DOT BELOW AND DOT ABOVE",
direction="al",
linebreak="al",
- shcode=0x0633,
- unicodeslot=0x069A,
+ shcode=0x633,
+ unicodeslot=0x69A,
},
- [0x069B]={
+ [0x69B]={
arabic="d",
category="lo",
description="ARABIC LETTER SEEN WITH THREE DOTS BELOW",
direction="al",
linebreak="al",
- shcode=0x0633,
- unicodeslot=0x069B,
+ shcode=0x633,
+ unicodeslot=0x69B,
},
- [0x069C]={
+ [0x69C]={
arabic="d",
category="lo",
description="ARABIC LETTER SEEN WITH THREE DOTS BELOW AND THREE DOTS ABOVE",
direction="al",
linebreak="al",
- shcode=0x0633,
- unicodeslot=0x069C,
+ shcode=0x633,
+ unicodeslot=0x69C,
},
- [0x069D]={
+ [0x69D]={
arabic="d",
category="lo",
description="ARABIC LETTER SAD WITH TWO DOTS BELOW",
direction="al",
linebreak="al",
- shcode=0x0635,
- unicodeslot=0x069D,
+ shcode=0x635,
+ unicodeslot=0x69D,
},
- [0x069E]={
+ [0x69E]={
arabic="d",
category="lo",
description="ARABIC LETTER SAD WITH THREE DOTS ABOVE",
direction="al",
linebreak="al",
- shcode=0x0635,
- unicodeslot=0x069E,
+ shcode=0x635,
+ unicodeslot=0x69E,
},
- [0x069F]={
+ [0x69F]={
arabic="d",
category="lo",
description="ARABIC LETTER TAH WITH THREE DOTS ABOVE",
direction="al",
linebreak="al",
- shcode=0x0637,
- unicodeslot=0x069F,
+ shcode=0x637,
+ unicodeslot=0x69F,
},
- [0x06A0]={
+ [0x6A0]={
arabic="d",
category="lo",
description="ARABIC LETTER AIN WITH THREE DOTS ABOVE",
direction="al",
linebreak="al",
- shcode=0x0639,
- unicodeslot=0x06A0,
+ shcode=0x639,
+ unicodeslot=0x6A0,
},
- [0x06A1]={
+ [0x6A1]={
arabic="d",
category="lo",
description="ARABIC LETTER DOTLESS FEH",
direction="al",
linebreak="al",
- unicodeslot=0x06A1,
+ unicodeslot=0x6A1,
},
- [0x06A2]={
+ [0x6A2]={
arabic="d",
category="lo",
description="ARABIC LETTER FEH WITH DOT MOVED BELOW",
direction="al",
linebreak="al",
- shcode=0x0641,
- unicodeslot=0x06A2,
+ shcode=0x641,
+ unicodeslot=0x6A2,
},
- [0x06A3]={
+ [0x6A3]={
arabic="d",
category="lo",
description="ARABIC LETTER FEH WITH DOT BELOW",
direction="al",
linebreak="al",
- shcode=0x0641,
- unicodeslot=0x06A3,
+ shcode=0x641,
+ unicodeslot=0x6A3,
},
- [0x06A4]={
+ [0x6A4]={
adobename="veharabic",
arabic="d",
category="lo",
description="ARABIC LETTER VEH",
direction="al",
linebreak="al",
- unicodeslot=0x06A4,
+ unicodeslot=0x6A4,
},
- [0x06A5]={
+ [0x6A5]={
arabic="d",
category="lo",
description="ARABIC LETTER FEH WITH THREE DOTS BELOW",
direction="al",
linebreak="al",
- shcode=0x0641,
- unicodeslot=0x06A5,
+ shcode=0x641,
+ unicodeslot=0x6A5,
},
- [0x06A6]={
+ [0x6A6]={
arabic="d",
category="lo",
description="ARABIC LETTER PEHEH",
direction="al",
linebreak="al",
- unicodeslot=0x06A6,
+ unicodeslot=0x6A6,
},
- [0x06A7]={
+ [0x6A7]={
arabic="d",
category="lo",
description="ARABIC LETTER QAF WITH DOT ABOVE",
direction="al",
linebreak="al",
- shcode=0x0642,
- unicodeslot=0x06A7,
+ shcode=0x642,
+ unicodeslot=0x6A7,
},
- [0x06A8]={
+ [0x6A8]={
arabic="d",
category="lo",
description="ARABIC LETTER QAF WITH THREE DOTS ABOVE",
direction="al",
linebreak="al",
- shcode=0x0642,
- unicodeslot=0x06A8,
+ shcode=0x642,
+ unicodeslot=0x6A8,
},
- [0x06A9]={
+ [0x6A9]={
arabic="d",
category="lo",
description="ARABIC LETTER KEHEH",
direction="al",
linebreak="al",
- unicodeslot=0x06A9,
+ unicodeslot=0x6A9,
},
- [0x06AA]={
+ [0x6AA]={
arabic="d",
category="lo",
description="ARABIC LETTER SWASH KAF",
direction="al",
linebreak="al",
- unicodeslot=0x06AA,
+ unicodeslot=0x6AA,
},
- [0x06AB]={
+ [0x6AB]={
arabic="d",
category="lo",
description="ARABIC LETTER KAF WITH RING",
direction="al",
linebreak="al",
- shcode=0x0643,
- unicodeslot=0x06AB,
+ shcode=0x643,
+ unicodeslot=0x6AB,
},
- [0x06AC]={
+ [0x6AC]={
arabic="d",
category="lo",
description="ARABIC LETTER KAF WITH DOT ABOVE",
direction="al",
linebreak="al",
- shcode=0x0643,
- unicodeslot=0x06AC,
+ shcode=0x643,
+ unicodeslot=0x6AC,
},
- [0x06AD]={
+ [0x6AD]={
arabic="d",
category="lo",
description="ARABIC LETTER NG",
direction="al",
linebreak="al",
- unicodeslot=0x06AD,
+ unicodeslot=0x6AD,
},
- [0x06AE]={
+ [0x6AE]={
arabic="d",
category="lo",
description="ARABIC LETTER KAF WITH THREE DOTS BELOW",
direction="al",
linebreak="al",
- shcode=0x0643,
- unicodeslot=0x06AE,
+ shcode=0x643,
+ unicodeslot=0x6AE,
},
- [0x06AF]={
+ [0x6AF]={
adobename="gafarabic",
arabic="d",
category="lo",
description="ARABIC LETTER GAF",
direction="al",
linebreak="al",
- unicodeslot=0x06AF,
+ unicodeslot=0x6AF,
},
- [0x06B0]={
+ [0x6B0]={
arabic="d",
category="lo",
description="ARABIC LETTER GAF WITH RING",
direction="al",
linebreak="al",
- shcode=0x06AF,
- unicodeslot=0x06B0,
+ shcode=0x6AF,
+ unicodeslot=0x6B0,
},
- [0x06B1]={
+ [0x6B1]={
arabic="d",
category="lo",
description="ARABIC LETTER NGOEH",
direction="al",
linebreak="al",
- unicodeslot=0x06B1,
+ unicodeslot=0x6B1,
},
- [0x06B2]={
+ [0x6B2]={
arabic="d",
category="lo",
description="ARABIC LETTER GAF WITH TWO DOTS BELOW",
direction="al",
linebreak="al",
- shcode=0x06AF,
- unicodeslot=0x06B2,
+ shcode=0x6AF,
+ unicodeslot=0x6B2,
},
- [0x06B3]={
+ [0x6B3]={
arabic="d",
category="lo",
description="ARABIC LETTER GUEH",
direction="al",
linebreak="al",
- unicodeslot=0x06B3,
+ unicodeslot=0x6B3,
},
- [0x06B4]={
+ [0x6B4]={
arabic="d",
category="lo",
description="ARABIC LETTER GAF WITH THREE DOTS ABOVE",
direction="al",
linebreak="al",
- shcode=0x06AF,
- unicodeslot=0x06B4,
+ shcode=0x6AF,
+ unicodeslot=0x6B4,
},
- [0x06B5]={
+ [0x6B5]={
arabic="d",
category="lo",
description="ARABIC LETTER LAM WITH SMALL V",
direction="al",
linebreak="al",
- shcode=0x0644,
- unicodeslot=0x06B5,
+ shcode=0x644,
+ unicodeslot=0x6B5,
},
- [0x06B6]={
+ [0x6B6]={
arabic="d",
category="lo",
description="ARABIC LETTER LAM WITH DOT ABOVE",
direction="al",
linebreak="al",
- shcode=0x0644,
- unicodeslot=0x06B6,
+ shcode=0x644,
+ unicodeslot=0x6B6,
},
- [0x06B7]={
+ [0x6B7]={
arabic="d",
category="lo",
description="ARABIC LETTER LAM WITH THREE DOTS ABOVE",
direction="al",
linebreak="al",
- shcode=0x0644,
- unicodeslot=0x06B7,
+ shcode=0x644,
+ unicodeslot=0x6B7,
},
- [0x06B8]={
+ [0x6B8]={
arabic="d",
category="lo",
description="ARABIC LETTER LAM WITH THREE DOTS BELOW",
direction="al",
linebreak="al",
- shcode=0x0644,
- unicodeslot=0x06B8,
+ shcode=0x644,
+ unicodeslot=0x6B8,
},
- [0x06B9]={
+ [0x6B9]={
arabic="d",
category="lo",
description="ARABIC LETTER NOON WITH DOT BELOW",
direction="al",
linebreak="al",
- shcode=0x0646,
- unicodeslot=0x06B9,
+ shcode=0x646,
+ unicodeslot=0x6B9,
},
- [0x06BA]={
+ [0x6BA]={
adobename="noonghunnaarabic",
arabic="d",
category="lo",
description="ARABIC LETTER NOON GHUNNA",
direction="al",
linebreak="al",
- unicodeslot=0x06BA,
+ unicodeslot=0x6BA,
},
- [0x06BB]={
+ [0x6BB]={
arabic="d",
category="lo",
description="ARABIC LETTER RNOON",
direction="al",
linebreak="al",
- unicodeslot=0x06BB,
+ unicodeslot=0x6BB,
},
- [0x06BC]={
+ [0x6BC]={
arabic="d",
category="lo",
description="ARABIC LETTER NOON WITH RING",
direction="al",
linebreak="al",
- shcode=0x0646,
- unicodeslot=0x06BC,
+ shcode=0x646,
+ unicodeslot=0x6BC,
},
- [0x06BD]={
+ [0x6BD]={
arabic="d",
category="lo",
description="ARABIC LETTER NOON WITH THREE DOTS ABOVE",
direction="al",
linebreak="al",
- shcode=0x0646,
- unicodeslot=0x06BD,
+ shcode=0x646,
+ unicodeslot=0x6BD,
},
- [0x06BE]={
+ [0x6BE]={
arabic="d",
category="lo",
description="ARABIC LETTER HEH DOACHASHMEE",
direction="al",
linebreak="al",
- unicodeslot=0x06BE,
+ unicodeslot=0x6BE,
},
- [0x06BF]={
+ [0x6BF]={
arabic="d",
category="lo",
description="ARABIC LETTER TCHEH WITH DOT ABOVE",
direction="al",
linebreak="al",
- shcode=0x0686,
- unicodeslot=0x06BF,
+ shcode=0x686,
+ unicodeslot=0x6BF,
},
- [0x06C0]={
+ [0x6C0]={
arabic="r",
category="lo",
description="ARABIC LETTER HEH WITH YEH ABOVE",
direction="al",
linebreak="al",
- shcode=0x0647,
- specials={ "char", 0x06D5, 0x0654 },
- unicodeslot=0x06C0,
+ shcode=0x647,
+ specials={ "char", 0x6D5, 0x654 },
+ unicodeslot=0x6C0,
},
- [0x06C1]={
+ [0x6C1]={
adobename="hehaltonearabic",
arabic="d",
category="lo",
description="ARABIC LETTER HEH GOAL",
direction="al",
linebreak="al",
- unicodeslot=0x06C1,
+ unicodeslot=0x6C1,
},
- [0x06C2]={
+ [0x6C2]={
arabic="d",
category="lo",
description="ARABIC LETTER HEH GOAL WITH HAMZA ABOVE",
direction="al",
linebreak="al",
- specials={ "char", 0x06C1, 0x0654 },
- unicodeslot=0x06C2,
+ specials={ "char", 0x6C1, 0x654 },
+ unicodeslot=0x6C2,
},
- [0x06C3]={
+ [0x6C3]={
arabic="r",
category="lo",
description="ARABIC LETTER TEH MARBUTA GOAL",
direction="al",
linebreak="al",
- unicodeslot=0x06C3,
+ unicodeslot=0x6C3,
},
- [0x06C4]={
+ [0x6C4]={
arabic="r",
category="lo",
description="ARABIC LETTER WAW WITH RING",
direction="al",
linebreak="al",
- shcode=0x0648,
- unicodeslot=0x06C4,
+ shcode=0x648,
+ unicodeslot=0x6C4,
},
- [0x06C5]={
+ [0x6C5]={
arabic="r",
category="lo",
description="ARABIC LETTER KIRGHIZ OE",
direction="al",
linebreak="al",
- unicodeslot=0x06C5,
+ unicodeslot=0x6C5,
},
- [0x06C6]={
+ [0x6C6]={
arabic="r",
category="lo",
description="ARABIC LETTER OE",
direction="al",
linebreak="al",
- unicodeslot=0x06C6,
+ unicodeslot=0x6C6,
},
- [0x06C7]={
+ [0x6C7]={
arabic="r",
category="lo",
description="ARABIC LETTER U",
direction="al",
linebreak="al",
- unicodeslot=0x06C7,
+ unicodeslot=0x6C7,
},
- [0x06C8]={
+ [0x6C8]={
arabic="r",
category="lo",
description="ARABIC LETTER YU",
direction="al",
linebreak="al",
- unicodeslot=0x06C8,
+ unicodeslot=0x6C8,
},
- [0x06C9]={
+ [0x6C9]={
arabic="r",
category="lo",
description="ARABIC LETTER KIRGHIZ YU",
direction="al",
linebreak="al",
- unicodeslot=0x06C9,
+ unicodeslot=0x6C9,
},
- [0x06CA]={
+ [0x6CA]={
arabic="r",
category="lo",
description="ARABIC LETTER WAW WITH TWO DOTS ABOVE",
direction="al",
linebreak="al",
- shcode=0x0648,
- unicodeslot=0x06CA,
+ shcode=0x648,
+ unicodeslot=0x6CA,
},
- [0x06CB]={
+ [0x6CB]={
arabic="r",
category="lo",
description="ARABIC LETTER VE",
direction="al",
linebreak="al",
- unicodeslot=0x06CB,
+ unicodeslot=0x6CB,
},
- [0x06CC]={
+ [0x6CC]={
arabic="d",
category="lo",
description="ARABIC LETTER FARSI YEH",
direction="al",
linebreak="al",
- unicodeslot=0x06CC,
+ unicodeslot=0x6CC,
},
- [0x06CD]={
+ [0x6CD]={
arabic="r",
category="lo",
description="ARABIC LETTER YEH WITH TAIL",
direction="al",
linebreak="al",
- shcode=0x064A,
- unicodeslot=0x06CD,
+ shcode=0x64A,
+ unicodeslot=0x6CD,
},
- [0x06CE]={
+ [0x6CE]={
arabic="d",
category="lo",
description="ARABIC LETTER YEH WITH SMALL V",
direction="al",
linebreak="al",
- shcode=0x064A,
- unicodeslot=0x06CE,
+ shcode=0x64A,
+ unicodeslot=0x6CE,
},
- [0x06CF]={
+ [0x6CF]={
arabic="r",
category="lo",
description="ARABIC LETTER WAW WITH DOT ABOVE",
direction="al",
linebreak="al",
- shcode=0x0648,
- unicodeslot=0x06CF,
+ shcode=0x648,
+ unicodeslot=0x6CF,
},
- [0x06D0]={
+ [0x6D0]={
arabic="d",
category="lo",
description="ARABIC LETTER E",
direction="al",
linebreak="al",
- unicodeslot=0x06D0,
+ unicodeslot=0x6D0,
},
- [0x06D1]={
+ [0x6D1]={
adobename="yehthreedotsbelowarabic",
arabic="d",
category="lo",
description="ARABIC LETTER YEH WITH THREE DOTS BELOW",
direction="al",
linebreak="al",
- shcode=0x064A,
- unicodeslot=0x06D1,
+ shcode=0x64A,
+ unicodeslot=0x6D1,
},
- [0x06D2]={
+ [0x6D2]={
adobename="yehbarreearabic",
arabic="r",
category="lo",
description="ARABIC LETTER YEH BARREE",
direction="al",
linebreak="al",
- unicodeslot=0x06D2,
+ unicodeslot=0x6D2,
},
- [0x06D3]={
+ [0x6D3]={
arabic="r",
category="lo",
description="ARABIC LETTER YEH BARREE WITH HAMZA ABOVE",
direction="al",
linebreak="al",
- specials={ "char", 0x06D2, 0x0654 },
- unicodeslot=0x06D3,
+ specials={ "char", 0x6D2, 0x654 },
+ unicodeslot=0x6D3,
},
- [0x06D4]={
+ [0x6D4]={
category="po",
description="ARABIC FULL STOP",
direction="al",
linebreak="ex",
- unicodeslot=0x06D4,
+ unicodeslot=0x6D4,
},
- [0x06D5]={
+ [0x6D5]={
adobename="afii57534",
arabic="r",
category="lo",
description="ARABIC LETTER AE",
direction="al",
linebreak="al",
- unicodeslot=0x06D5,
+ unicodeslot=0x6D5,
},
- [0x06D6]={
+ [0x6D6]={
category="mn",
+ combining=0xE6,
description="ARABIC SMALL HIGH LIGATURE SAD WITH LAM WITH ALEF MAKSURA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x06D6,
+ unicodeslot=0x6D6,
},
- [0x06D7]={
+ [0x6D7]={
category="mn",
+ combining=0xE6,
description="ARABIC SMALL HIGH LIGATURE QAF WITH LAM WITH ALEF MAKSURA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x06D7,
+ unicodeslot=0x6D7,
},
- [0x06D8]={
+ [0x6D8]={
category="mn",
+ combining=0xE6,
description="ARABIC SMALL HIGH MEEM INITIAL FORM",
direction="nsm",
linebreak="cm",
- unicodeslot=0x06D8,
+ unicodeslot=0x6D8,
},
- [0x06D9]={
+ [0x6D9]={
category="mn",
+ combining=0xE6,
description="ARABIC SMALL HIGH LAM ALEF",
direction="nsm",
linebreak="cm",
- unicodeslot=0x06D9,
+ unicodeslot=0x6D9,
},
- [0x06DA]={
+ [0x6DA]={
category="mn",
+ combining=0xE6,
description="ARABIC SMALL HIGH JEEM",
direction="nsm",
linebreak="cm",
- unicodeslot=0x06DA,
+ unicodeslot=0x6DA,
},
- [0x06DB]={
+ [0x6DB]={
category="mn",
+ combining=0xE6,
description="ARABIC SMALL HIGH THREE DOTS",
direction="nsm",
linebreak="cm",
- unicodeslot=0x06DB,
+ unicodeslot=0x6DB,
},
- [0x06DC]={
+ [0x6DC]={
category="mn",
+ combining=0xE6,
description="ARABIC SMALL HIGH SEEN",
direction="nsm",
linebreak="cm",
- unicodeslot=0x06DC,
+ unicodeslot=0x6DC,
},
- [0x06DD]={
+ [0x6DD]={
arabic="u",
category="cf",
description="ARABIC END OF AYAH",
direction="an",
linebreak="al",
- unicodeslot=0x06DD,
+ unicodeslot=0x6DD,
visible="yes",
},
- [0x06DE]={
+ [0x6DE]={
category="me",
description="ARABIC START OF RUB EL HIZB",
direction="on",
linebreak="al",
- unicodeslot=0x06DE,
+ unicodeslot=0x6DE,
},
- [0x06DF]={
+ [0x6DF]={
category="mn",
+ combining=0xE6,
description="ARABIC SMALL HIGH ROUNDED ZERO",
direction="nsm",
linebreak="cm",
- unicodeslot=0x06DF,
+ unicodeslot=0x6DF,
},
- [0x06E0]={
+ [0x6E0]={
category="mn",
+ combining=0xE6,
description="ARABIC SMALL HIGH UPRIGHT RECTANGULAR ZERO",
direction="nsm",
linebreak="cm",
- unicodeslot=0x06E0,
+ unicodeslot=0x6E0,
},
- [0x06E1]={
+ [0x6E1]={
category="mn",
+ combining=0xE6,
description="ARABIC SMALL HIGH DOTLESS HEAD OF KHAH",
direction="nsm",
linebreak="cm",
- unicodeslot=0x06E1,
+ unicodeslot=0x6E1,
},
- [0x06E2]={
+ [0x6E2]={
category="mn",
+ combining=0xE6,
description="ARABIC SMALL HIGH MEEM ISOLATED FORM",
direction="nsm",
linebreak="cm",
- unicodeslot=0x06E2,
+ unicodeslot=0x6E2,
},
- [0x06E3]={
+ [0x6E3]={
category="mn",
+ combining=0xDC,
description="ARABIC SMALL LOW SEEN",
direction="nsm",
linebreak="cm",
- unicodeslot=0x06E3,
+ unicodeslot=0x6E3,
},
- [0x06E4]={
+ [0x6E4]={
category="mn",
+ combining=0xE6,
description="ARABIC SMALL HIGH MADDA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x06E4,
+ unicodeslot=0x6E4,
},
- [0x06E5]={
+ [0x6E5]={
category="lm",
description="ARABIC SMALL WAW",
direction="al",
linebreak="al",
- unicodeslot=0x06E5,
+ unicodeslot=0x6E5,
},
- [0x06E6]={
+ [0x6E6]={
category="lm",
description="ARABIC SMALL YEH",
direction="al",
linebreak="al",
- unicodeslot=0x06E6,
+ unicodeslot=0x6E6,
},
- [0x06E7]={
+ [0x6E7]={
category="mn",
+ combining=0xE6,
description="ARABIC SMALL HIGH YEH",
direction="nsm",
linebreak="cm",
- unicodeslot=0x06E7,
+ unicodeslot=0x6E7,
},
- [0x06E8]={
+ [0x6E8]={
category="mn",
+ combining=0xE6,
description="ARABIC SMALL HIGH NOON",
direction="nsm",
linebreak="cm",
- unicodeslot=0x06E8,
+ unicodeslot=0x6E8,
},
- [0x06E9]={
+ [0x6E9]={
category="so",
description="ARABIC PLACE OF SAJDAH",
direction="on",
linebreak="al",
- unicodeslot=0x06E9,
+ unicodeslot=0x6E9,
},
- [0x06EA]={
+ [0x6EA]={
category="mn",
+ combining=0xDC,
description="ARABIC EMPTY CENTRE LOW STOP",
direction="nsm",
linebreak="cm",
- unicodeslot=0x06EA,
+ unicodeslot=0x6EA,
},
- [0x06EB]={
+ [0x6EB]={
category="mn",
+ combining=0xE6,
description="ARABIC EMPTY CENTRE HIGH STOP",
direction="nsm",
linebreak="cm",
- unicodeslot=0x06EB,
+ unicodeslot=0x6EB,
},
- [0x06EC]={
+ [0x6EC]={
category="mn",
+ combining=0xE6,
description="ARABIC ROUNDED HIGH STOP WITH FILLED CENTRE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x06EC,
+ unicodeslot=0x6EC,
},
- [0x06ED]={
+ [0x6ED]={
category="mn",
+ combining=0xDC,
description="ARABIC SMALL LOW MEEM",
direction="nsm",
linebreak="cm",
- unicodeslot=0x06ED,
+ unicodeslot=0x6ED,
},
- [0x06EE]={
+ [0x6EE]={
arabic="r",
category="lo",
description="ARABIC LETTER DAL WITH INVERTED V",
direction="al",
linebreak="al",
- shcode=0x062F,
- unicodeslot=0x06EE,
+ shcode=0x62F,
+ unicodeslot=0x6EE,
},
- [0x06EF]={
+ [0x6EF]={
arabic="r",
category="lo",
description="ARABIC LETTER REH WITH INVERTED V",
direction="al",
linebreak="al",
- shcode=0x0631,
- unicodeslot=0x06EF,
+ shcode=0x631,
+ unicodeslot=0x6EF,
},
- [0x06F0]={
+ [0x6F0]={
adobename="zeropersian",
category="nd",
description="EXTENDED ARABIC-INDIC DIGIT ZERO",
direction="en",
linebreak="nu",
- unicodeslot=0x06F0,
+ unicodeslot=0x6F0,
},
- [0x06F1]={
+ [0x6F1]={
adobename="onepersian",
category="nd",
description="EXTENDED ARABIC-INDIC DIGIT ONE",
direction="en",
linebreak="nu",
- unicodeslot=0x06F1,
+ unicodeslot=0x6F1,
},
- [0x06F2]={
+ [0x6F2]={
adobename="twopersian",
category="nd",
description="EXTENDED ARABIC-INDIC DIGIT TWO",
direction="en",
linebreak="nu",
- unicodeslot=0x06F2,
+ unicodeslot=0x6F2,
},
- [0x06F3]={
+ [0x6F3]={
adobename="threepersian",
category="nd",
description="EXTENDED ARABIC-INDIC DIGIT THREE",
direction="en",
linebreak="nu",
- unicodeslot=0x06F3,
+ unicodeslot=0x6F3,
},
- [0x06F4]={
+ [0x6F4]={
adobename="fourpersian",
category="nd",
description="EXTENDED ARABIC-INDIC DIGIT FOUR",
direction="en",
linebreak="nu",
- unicodeslot=0x06F4,
+ unicodeslot=0x6F4,
},
- [0x06F5]={
+ [0x6F5]={
adobename="fivepersian",
category="nd",
description="EXTENDED ARABIC-INDIC DIGIT FIVE",
direction="en",
linebreak="nu",
- unicodeslot=0x06F5,
+ unicodeslot=0x6F5,
},
- [0x06F6]={
+ [0x6F6]={
adobename="sixpersian",
category="nd",
description="EXTENDED ARABIC-INDIC DIGIT SIX",
direction="en",
linebreak="nu",
- unicodeslot=0x06F6,
+ unicodeslot=0x6F6,
},
- [0x06F7]={
+ [0x6F7]={
adobename="sevenpersian",
category="nd",
description="EXTENDED ARABIC-INDIC DIGIT SEVEN",
direction="en",
linebreak="nu",
- unicodeslot=0x06F7,
+ unicodeslot=0x6F7,
},
- [0x06F8]={
+ [0x6F8]={
adobename="eightpersian",
category="nd",
description="EXTENDED ARABIC-INDIC DIGIT EIGHT",
direction="en",
linebreak="nu",
- unicodeslot=0x06F8,
+ unicodeslot=0x6F8,
},
- [0x06F9]={
+ [0x6F9]={
adobename="ninepersian",
category="nd",
description="EXTENDED ARABIC-INDIC DIGIT NINE",
direction="en",
linebreak="nu",
- unicodeslot=0x06F9,
+ unicodeslot=0x6F9,
},
- [0x06FA]={
+ [0x6FA]={
arabic="d",
category="lo",
description="ARABIC LETTER SHEEN WITH DOT BELOW",
direction="al",
linebreak="al",
- shcode=0x0634,
- unicodeslot=0x06FA,
+ shcode=0x634,
+ unicodeslot=0x6FA,
},
- [0x06FB]={
+ [0x6FB]={
arabic="d",
category="lo",
description="ARABIC LETTER DAD WITH DOT BELOW",
direction="al",
linebreak="al",
- shcode=0x0636,
- unicodeslot=0x06FB,
+ shcode=0x636,
+ unicodeslot=0x6FB,
},
- [0x06FC]={
+ [0x6FC]={
arabic="d",
category="lo",
description="ARABIC LETTER GHAIN WITH DOT BELOW",
direction="al",
linebreak="al",
- shcode=0x063A,
- unicodeslot=0x06FC,
+ shcode=0x63A,
+ unicodeslot=0x6FC,
},
- [0x06FD]={
+ [0x6FD]={
category="so",
description="ARABIC SIGN SINDHI AMPERSAND",
direction="al",
linebreak="al",
- unicodeslot=0x06FD,
+ unicodeslot=0x6FD,
},
- [0x06FE]={
+ [0x6FE]={
category="so",
description="ARABIC SIGN SINDHI POSTPOSITION MEN",
direction="al",
linebreak="al",
- unicodeslot=0x06FE,
+ unicodeslot=0x6FE,
},
- [0x06FF]={
+ [0x6FF]={
arabic="d",
category="lo",
description="ARABIC LETTER HEH WITH INVERTED V",
direction="al",
linebreak="al",
- shcode=0x0647,
- unicodeslot=0x06FF,
+ shcode=0x647,
+ unicodeslot=0x6FF,
},
- [0x0700]={
+ [0x700]={
category="po",
description="SYRIAC END OF PARAGRAPH",
direction="al",
linebreak="al",
- unicodeslot=0x0700,
+ unicodeslot=0x700,
},
- [0x0701]={
+ [0x701]={
category="po",
description="SYRIAC SUPRALINEAR FULL STOP",
direction="al",
linebreak="al",
- unicodeslot=0x0701,
+ unicodeslot=0x701,
},
- [0x0702]={
+ [0x702]={
category="po",
description="SYRIAC SUBLINEAR FULL STOP",
direction="al",
linebreak="al",
- unicodeslot=0x0702,
+ unicodeslot=0x702,
},
- [0x0703]={
+ [0x703]={
category="po",
description="SYRIAC SUPRALINEAR COLON",
direction="al",
linebreak="al",
- unicodeslot=0x0703,
+ unicodeslot=0x703,
},
- [0x0704]={
+ [0x704]={
category="po",
description="SYRIAC SUBLINEAR COLON",
direction="al",
linebreak="al",
- unicodeslot=0x0704,
+ unicodeslot=0x704,
},
- [0x0705]={
+ [0x705]={
category="po",
description="SYRIAC HORIZONTAL COLON",
direction="al",
linebreak="al",
- unicodeslot=0x0705,
+ unicodeslot=0x705,
},
- [0x0706]={
+ [0x706]={
category="po",
description="SYRIAC COLON SKEWED LEFT",
direction="al",
linebreak="al",
- unicodeslot=0x0706,
+ unicodeslot=0x706,
},
- [0x0707]={
+ [0x707]={
category="po",
description="SYRIAC COLON SKEWED RIGHT",
direction="al",
linebreak="al",
- unicodeslot=0x0707,
+ unicodeslot=0x707,
},
- [0x0708]={
+ [0x708]={
category="po",
description="SYRIAC SUPRALINEAR COLON SKEWED LEFT",
direction="al",
linebreak="al",
- unicodeslot=0x0708,
+ unicodeslot=0x708,
},
- [0x0709]={
+ [0x709]={
category="po",
description="SYRIAC SUBLINEAR COLON SKEWED RIGHT",
direction="al",
linebreak="al",
- unicodeslot=0x0709,
+ unicodeslot=0x709,
},
- [0x070A]={
+ [0x70A]={
category="po",
description="SYRIAC CONTRACTION",
direction="al",
linebreak="al",
- unicodeslot=0x070A,
+ unicodeslot=0x70A,
},
- [0x070B]={
+ [0x70B]={
category="po",
description="SYRIAC HARKLEAN OBELUS",
direction="al",
linebreak="al",
- unicodeslot=0x070B,
+ unicodeslot=0x70B,
},
- [0x070C]={
+ [0x70C]={
category="po",
description="SYRIAC HARKLEAN METOBELUS",
direction="al",
linebreak="al",
- unicodeslot=0x070C,
+ unicodeslot=0x70C,
},
- [0x070D]={
+ [0x70D]={
category="po",
description="SYRIAC HARKLEAN ASTERISCUS",
direction="al",
linebreak="al",
- unicodeslot=0x070D,
+ unicodeslot=0x70D,
},
- [0x070F]={
+ [0x70F]={
category="cf",
description="SYRIAC ABBREVIATION MARK",
direction="al",
linebreak="al",
- unicodeslot=0x070F,
+ unicodeslot=0x70F,
visible="yes",
},
- [0x0710]={
+ [0x710]={
arabic="r",
category="lo",
description="SYRIAC LETTER ALAPH",
direction="al",
linebreak="al",
- unicodeslot=0x0710,
+ unicodeslot=0x710,
},
- [0x0711]={
+ [0x711]={
category="mn",
+ combining=0x24,
description="SYRIAC LETTER SUPERSCRIPT ALAPH",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0711,
+ unicodeslot=0x711,
},
- [0x0712]={
+ [0x712]={
arabic="d",
category="lo",
description="SYRIAC LETTER BETH",
direction="al",
linebreak="al",
- unicodeslot=0x0712,
+ unicodeslot=0x712,
},
- [0x0713]={
+ [0x713]={
arabic="d",
category="lo",
description="SYRIAC LETTER GAMAL",
direction="al",
linebreak="al",
- unicodeslot=0x0713,
+ unicodeslot=0x713,
},
- [0x0714]={
+ [0x714]={
arabic="d",
category="lo",
description="SYRIAC LETTER GAMAL GARSHUNI",
direction="al",
linebreak="al",
- unicodeslot=0x0714,
+ unicodeslot=0x714,
},
- [0x0715]={
+ [0x715]={
arabic="r",
category="lo",
description="SYRIAC LETTER DALATH",
direction="al",
linebreak="al",
- unicodeslot=0x0715,
+ unicodeslot=0x715,
},
- [0x0716]={
+ [0x716]={
arabic="r",
category="lo",
description="SYRIAC LETTER DOTLESS DALATH RISH",
direction="al",
linebreak="al",
- unicodeslot=0x0716,
+ unicodeslot=0x716,
},
- [0x0717]={
+ [0x717]={
arabic="r",
category="lo",
description="SYRIAC LETTER HE",
direction="al",
linebreak="al",
- unicodeslot=0x0717,
+ unicodeslot=0x717,
},
- [0x0718]={
+ [0x718]={
arabic="r",
category="lo",
description="SYRIAC LETTER WAW",
direction="al",
linebreak="al",
- unicodeslot=0x0718,
+ unicodeslot=0x718,
},
- [0x0719]={
+ [0x719]={
arabic="r",
category="lo",
description="SYRIAC LETTER ZAIN",
direction="al",
linebreak="al",
- unicodeslot=0x0719,
+ unicodeslot=0x719,
},
- [0x071A]={
+ [0x71A]={
arabic="d",
category="lo",
description="SYRIAC LETTER HETH",
direction="al",
linebreak="al",
- unicodeslot=0x071A,
+ unicodeslot=0x71A,
},
- [0x071B]={
+ [0x71B]={
arabic="d",
category="lo",
description="SYRIAC LETTER TETH",
direction="al",
linebreak="al",
- unicodeslot=0x071B,
+ unicodeslot=0x71B,
},
- [0x071C]={
+ [0x71C]={
arabic="d",
category="lo",
description="SYRIAC LETTER TETH GARSHUNI",
direction="al",
linebreak="al",
- unicodeslot=0x071C,
+ unicodeslot=0x71C,
},
- [0x071D]={
+ [0x71D]={
arabic="d",
category="lo",
description="SYRIAC LETTER YUDH",
direction="al",
linebreak="al",
- unicodeslot=0x071D,
+ unicodeslot=0x71D,
},
- [0x071E]={
+ [0x71E]={
arabic="r",
category="lo",
description="SYRIAC LETTER YUDH HE",
direction="al",
linebreak="al",
- unicodeslot=0x071E,
+ unicodeslot=0x71E,
},
- [0x071F]={
+ [0x71F]={
arabic="d",
category="lo",
description="SYRIAC LETTER KAPH",
direction="al",
linebreak="al",
- unicodeslot=0x071F,
+ unicodeslot=0x71F,
},
- [0x0720]={
+ [0x720]={
arabic="d",
category="lo",
description="SYRIAC LETTER LAMADH",
direction="al",
linebreak="al",
- unicodeslot=0x0720,
+ unicodeslot=0x720,
},
- [0x0721]={
+ [0x721]={
arabic="d",
category="lo",
description="SYRIAC LETTER MIM",
direction="al",
linebreak="al",
- unicodeslot=0x0721,
+ unicodeslot=0x721,
},
- [0x0722]={
+ [0x722]={
arabic="d",
category="lo",
description="SYRIAC LETTER NUN",
direction="al",
linebreak="al",
- unicodeslot=0x0722,
+ unicodeslot=0x722,
},
- [0x0723]={
+ [0x723]={
arabic="d",
category="lo",
description="SYRIAC LETTER SEMKATH",
direction="al",
linebreak="al",
- unicodeslot=0x0723,
+ unicodeslot=0x723,
},
- [0x0724]={
+ [0x724]={
arabic="d",
category="lo",
description="SYRIAC LETTER FINAL SEMKATH",
direction="al",
linebreak="al",
- unicodeslot=0x0724,
+ unicodeslot=0x724,
},
- [0x0725]={
+ [0x725]={
arabic="d",
category="lo",
description="SYRIAC LETTER E",
direction="al",
linebreak="al",
- unicodeslot=0x0725,
+ unicodeslot=0x725,
},
- [0x0726]={
+ [0x726]={
arabic="d",
category="lo",
description="SYRIAC LETTER PE",
direction="al",
linebreak="al",
- unicodeslot=0x0726,
+ unicodeslot=0x726,
},
- [0x0727]={
+ [0x727]={
arabic="d",
category="lo",
description="SYRIAC LETTER REVERSED PE",
direction="al",
linebreak="al",
- unicodeslot=0x0727,
+ unicodeslot=0x727,
},
- [0x0728]={
+ [0x728]={
arabic="r",
category="lo",
description="SYRIAC LETTER SADHE",
direction="al",
linebreak="al",
- unicodeslot=0x0728,
+ unicodeslot=0x728,
},
- [0x0729]={
+ [0x729]={
arabic="d",
category="lo",
description="SYRIAC LETTER QAPH",
direction="al",
linebreak="al",
- unicodeslot=0x0729,
+ unicodeslot=0x729,
},
- [0x072A]={
+ [0x72A]={
arabic="r",
category="lo",
description="SYRIAC LETTER RISH",
direction="al",
linebreak="al",
- unicodeslot=0x072A,
+ unicodeslot=0x72A,
},
- [0x072B]={
+ [0x72B]={
arabic="d",
category="lo",
description="SYRIAC LETTER SHIN",
direction="al",
linebreak="al",
- unicodeslot=0x072B,
+ unicodeslot=0x72B,
},
- [0x072C]={
+ [0x72C]={
arabic="r",
category="lo",
description="SYRIAC LETTER TAW",
direction="al",
linebreak="al",
- unicodeslot=0x072C,
+ unicodeslot=0x72C,
},
- [0x072D]={
+ [0x72D]={
arabic="d",
category="lo",
description="SYRIAC LETTER PERSIAN BHETH",
direction="al",
linebreak="al",
- unicodeslot=0x072D,
+ unicodeslot=0x72D,
},
- [0x072E]={
+ [0x72E]={
arabic="d",
category="lo",
description="SYRIAC LETTER PERSIAN GHAMAL",
direction="al",
linebreak="al",
- unicodeslot=0x072E,
+ unicodeslot=0x72E,
},
- [0x072F]={
+ [0x72F]={
arabic="r",
category="lo",
description="SYRIAC LETTER PERSIAN DHALATH",
direction="al",
linebreak="al",
- unicodeslot=0x072F,
+ unicodeslot=0x72F,
},
- [0x0730]={
+ [0x730]={
category="mn",
+ combining=0xE6,
description="SYRIAC PTHAHA ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0730,
+ unicodeslot=0x730,
},
- [0x0731]={
+ [0x731]={
category="mn",
+ combining=0xDC,
description="SYRIAC PTHAHA BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0731,
+ unicodeslot=0x731,
},
- [0x0732]={
+ [0x732]={
category="mn",
+ combining=0xE6,
description="SYRIAC PTHAHA DOTTED",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0732,
+ unicodeslot=0x732,
},
- [0x0733]={
+ [0x733]={
category="mn",
+ combining=0xE6,
description="SYRIAC ZQAPHA ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0733,
+ unicodeslot=0x733,
},
- [0x0734]={
+ [0x734]={
category="mn",
+ combining=0xDC,
description="SYRIAC ZQAPHA BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0734,
+ unicodeslot=0x734,
},
- [0x0735]={
+ [0x735]={
category="mn",
+ combining=0xE6,
description="SYRIAC ZQAPHA DOTTED",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0735,
+ unicodeslot=0x735,
},
- [0x0736]={
+ [0x736]={
category="mn",
+ combining=0xE6,
description="SYRIAC RBASA ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0736,
+ unicodeslot=0x736,
},
- [0x0737]={
+ [0x737]={
category="mn",
+ combining=0xDC,
description="SYRIAC RBASA BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0737,
+ unicodeslot=0x737,
},
- [0x0738]={
+ [0x738]={
category="mn",
+ combining=0xDC,
description="SYRIAC DOTTED ZLAMA HORIZONTAL",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0738,
+ unicodeslot=0x738,
},
- [0x0739]={
+ [0x739]={
category="mn",
+ combining=0xDC,
description="SYRIAC DOTTED ZLAMA ANGULAR",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0739,
+ unicodeslot=0x739,
},
- [0x073A]={
+ [0x73A]={
category="mn",
+ combining=0xE6,
description="SYRIAC HBASA ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x073A,
+ unicodeslot=0x73A,
},
- [0x073B]={
+ [0x73B]={
category="mn",
+ combining=0xDC,
description="SYRIAC HBASA BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x073B,
+ unicodeslot=0x73B,
},
- [0x073C]={
+ [0x73C]={
category="mn",
+ combining=0xDC,
description="SYRIAC HBASA-ESASA DOTTED",
direction="nsm",
linebreak="cm",
- unicodeslot=0x073C,
+ unicodeslot=0x73C,
},
- [0x073D]={
+ [0x73D]={
category="mn",
+ combining=0xE6,
description="SYRIAC ESASA ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x073D,
+ unicodeslot=0x73D,
},
- [0x073E]={
+ [0x73E]={
category="mn",
+ combining=0xDC,
description="SYRIAC ESASA BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x073E,
+ unicodeslot=0x73E,
},
- [0x073F]={
+ [0x73F]={
category="mn",
+ combining=0xE6,
description="SYRIAC RWAHA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x073F,
+ unicodeslot=0x73F,
},
- [0x0740]={
+ [0x740]={
category="mn",
+ combining=0xE6,
description="SYRIAC FEMININE DOT",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0740,
+ unicodeslot=0x740,
},
- [0x0741]={
+ [0x741]={
category="mn",
+ combining=0xE6,
description="SYRIAC QUSHSHAYA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0741,
+ unicodeslot=0x741,
},
- [0x0742]={
+ [0x742]={
category="mn",
+ combining=0xDC,
description="SYRIAC RUKKAKHA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0742,
+ unicodeslot=0x742,
},
- [0x0743]={
+ [0x743]={
category="mn",
+ combining=0xE6,
description="SYRIAC TWO VERTICAL DOTS ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0743,
+ unicodeslot=0x743,
},
- [0x0744]={
+ [0x744]={
category="mn",
+ combining=0xDC,
description="SYRIAC TWO VERTICAL DOTS BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0744,
+ unicodeslot=0x744,
},
- [0x0745]={
+ [0x745]={
category="mn",
+ combining=0xE6,
description="SYRIAC THREE DOTS ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0745,
+ unicodeslot=0x745,
},
- [0x0746]={
+ [0x746]={
category="mn",
+ combining=0xDC,
description="SYRIAC THREE DOTS BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0746,
+ unicodeslot=0x746,
},
- [0x0747]={
+ [0x747]={
category="mn",
+ combining=0xE6,
description="SYRIAC OBLIQUE LINE ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0747,
+ unicodeslot=0x747,
},
- [0x0748]={
+ [0x748]={
category="mn",
+ combining=0xDC,
description="SYRIAC OBLIQUE LINE BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0748,
+ unicodeslot=0x748,
},
- [0x0749]={
+ [0x749]={
category="mn",
+ combining=0xE6,
description="SYRIAC MUSIC",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0749,
+ unicodeslot=0x749,
},
- [0x074A]={
+ [0x74A]={
category="mn",
+ combining=0xE6,
description="SYRIAC BARREKH",
direction="nsm",
linebreak="cm",
- unicodeslot=0x074A,
+ unicodeslot=0x74A,
},
- [0x074D]={
+ [0x74D]={
arabic="r",
category="lo",
description="SYRIAC LETTER SOGDIAN ZHAIN",
direction="al",
linebreak="al",
- unicodeslot=0x074D,
+ unicodeslot=0x74D,
},
- [0x074E]={
+ [0x74E]={
arabic="d",
category="lo",
description="SYRIAC LETTER SOGDIAN KHAPH",
direction="al",
linebreak="al",
- unicodeslot=0x074E,
+ unicodeslot=0x74E,
},
- [0x074F]={
+ [0x74F]={
arabic="d",
category="lo",
description="SYRIAC LETTER SOGDIAN FE",
direction="al",
linebreak="al",
- unicodeslot=0x074F,
+ unicodeslot=0x74F,
},
- [0x0750]={
+ [0x750]={
arabic="d",
category="lo",
description="ARABIC LETTER BEH WITH THREE DOTS HORIZONTALLY BELOW",
direction="al",
linebreak="al",
- shcode=0x0628,
- unicodeslot=0x0750,
+ shcode=0x628,
+ unicodeslot=0x750,
},
- [0x0751]={
+ [0x751]={
arabic="d",
category="lo",
description="ARABIC LETTER BEH WITH DOT BELOW AND THREE DOTS ABOVE",
direction="al",
linebreak="al",
- shcode=0x0628,
- unicodeslot=0x0751,
+ shcode=0x628,
+ unicodeslot=0x751,
},
- [0x0752]={
+ [0x752]={
arabic="d",
category="lo",
description="ARABIC LETTER BEH WITH THREE DOTS POINTING UPWARDS BELOW",
direction="al",
linebreak="al",
- shcode=0x0628,
- unicodeslot=0x0752,
+ shcode=0x628,
+ unicodeslot=0x752,
},
- [0x0753]={
+ [0x753]={
arabic="d",
category="lo",
description="ARABIC LETTER BEH WITH THREE DOTS POINTING UPWARDS BELOW AND TWO DOTS ABOVE",
direction="al",
linebreak="al",
- shcode=0x0628,
- unicodeslot=0x0753,
+ shcode=0x628,
+ unicodeslot=0x753,
},
- [0x0754]={
+ [0x754]={
arabic="d",
category="lo",
description="ARABIC LETTER BEH WITH TWO DOTS BELOW AND DOT ABOVE",
direction="al",
linebreak="al",
- shcode=0x0628,
- unicodeslot=0x0754,
+ shcode=0x628,
+ unicodeslot=0x754,
},
- [0x0755]={
+ [0x755]={
arabic="d",
category="lo",
description="ARABIC LETTER BEH WITH INVERTED SMALL V BELOW",
direction="al",
linebreak="al",
- shcode=0x0628,
- unicodeslot=0x0755,
+ shcode=0x628,
+ unicodeslot=0x755,
},
- [0x0756]={
+ [0x756]={
arabic="d",
category="lo",
description="ARABIC LETTER BEH WITH SMALL V",
direction="al",
linebreak="al",
- shcode=0x0628,
- unicodeslot=0x0756,
+ shcode=0x628,
+ unicodeslot=0x756,
},
- [0x0757]={
+ [0x757]={
arabic="d",
category="lo",
description="ARABIC LETTER HAH WITH TWO DOTS ABOVE",
direction="al",
linebreak="al",
- shcode=0x062D,
- unicodeslot=0x0757,
+ shcode=0x62D,
+ unicodeslot=0x757,
},
- [0x0758]={
+ [0x758]={
arabic="d",
category="lo",
description="ARABIC LETTER HAH WITH THREE DOTS POINTING UPWARDS BELOW",
direction="al",
linebreak="al",
- shcode=0x062D,
- unicodeslot=0x0758,
+ shcode=0x62D,
+ unicodeslot=0x758,
},
- [0x0759]={
+ [0x759]={
arabic="r",
category="lo",
description="ARABIC LETTER DAL WITH TWO DOTS VERTICALLY BELOW AND SMALL TAH",
direction="al",
linebreak="al",
- shcode=0x062F,
- unicodeslot=0x0759,
+ shcode=0x62F,
+ unicodeslot=0x759,
},
- [0x075A]={
+ [0x75A]={
arabic="r",
category="lo",
description="ARABIC LETTER DAL WITH INVERTED SMALL V BELOW",
direction="al",
linebreak="al",
- shcode=0x062F,
- unicodeslot=0x075A,
+ shcode=0x62F,
+ unicodeslot=0x75A,
},
- [0x075B]={
+ [0x75B]={
arabic="r",
category="lo",
description="ARABIC LETTER REH WITH STROKE",
direction="al",
linebreak="al",
- shcode=0x0631,
- unicodeslot=0x075B,
+ shcode=0x631,
+ unicodeslot=0x75B,
},
- [0x075C]={
+ [0x75C]={
arabic="d",
category="lo",
description="ARABIC LETTER SEEN WITH FOUR DOTS ABOVE",
direction="al",
linebreak="al",
- shcode=0x0633,
- unicodeslot=0x075C,
+ shcode=0x633,
+ unicodeslot=0x75C,
},
- [0x075D]={
+ [0x75D]={
arabic="d",
category="lo",
description="ARABIC LETTER AIN WITH TWO DOTS ABOVE",
direction="al",
linebreak="al",
- shcode=0x0639,
- unicodeslot=0x075D,
+ shcode=0x639,
+ unicodeslot=0x75D,
},
- [0x075E]={
+ [0x75E]={
arabic="d",
category="lo",
description="ARABIC LETTER AIN WITH THREE DOTS POINTING DOWNWARDS ABOVE",
direction="al",
linebreak="al",
- shcode=0x0639,
- unicodeslot=0x075E,
+ shcode=0x639,
+ unicodeslot=0x75E,
},
- [0x075F]={
+ [0x75F]={
arabic="d",
category="lo",
description="ARABIC LETTER AIN WITH TWO DOTS VERTICALLY ABOVE",
direction="al",
linebreak="al",
- shcode=0x0639,
- unicodeslot=0x075F,
+ shcode=0x639,
+ unicodeslot=0x75F,
},
- [0x0760]={
+ [0x760]={
arabic="d",
category="lo",
description="ARABIC LETTER FEH WITH TWO DOTS BELOW",
direction="al",
linebreak="al",
- shcode=0x0641,
- unicodeslot=0x0760,
+ shcode=0x641,
+ unicodeslot=0x760,
},
- [0x0761]={
+ [0x761]={
arabic="d",
category="lo",
description="ARABIC LETTER FEH WITH THREE DOTS POINTING UPWARDS BELOW",
direction="al",
linebreak="al",
- shcode=0x0641,
- unicodeslot=0x0761,
+ shcode=0x641,
+ unicodeslot=0x761,
},
- [0x0762]={
+ [0x762]={
arabic="d",
category="lo",
description="ARABIC LETTER KEHEH WITH DOT ABOVE",
direction="al",
linebreak="al",
- shcode=0x06A9,
- unicodeslot=0x0762,
+ shcode=0x6A9,
+ unicodeslot=0x762,
},
- [0x0763]={
+ [0x763]={
arabic="d",
category="lo",
description="ARABIC LETTER KEHEH WITH THREE DOTS ABOVE",
direction="al",
linebreak="al",
- shcode=0x06A9,
- unicodeslot=0x0763,
+ shcode=0x6A9,
+ unicodeslot=0x763,
},
- [0x0764]={
+ [0x764]={
arabic="d",
category="lo",
description="ARABIC LETTER KEHEH WITH THREE DOTS POINTING UPWARDS BELOW",
direction="al",
linebreak="al",
- shcode=0x06A9,
- unicodeslot=0x0764,
+ shcode=0x6A9,
+ unicodeslot=0x764,
},
- [0x0765]={
+ [0x765]={
arabic="d",
category="lo",
description="ARABIC LETTER MEEM WITH DOT ABOVE",
direction="al",
linebreak="al",
- shcode=0x0645,
- unicodeslot=0x0765,
+ shcode=0x645,
+ unicodeslot=0x765,
},
- [0x0766]={
+ [0x766]={
arabic="d",
category="lo",
description="ARABIC LETTER MEEM WITH DOT BELOW",
direction="al",
linebreak="al",
- shcode=0x0645,
- unicodeslot=0x0766,
+ shcode=0x645,
+ unicodeslot=0x766,
},
- [0x0767]={
+ [0x767]={
arabic="d",
category="lo",
description="ARABIC LETTER NOON WITH TWO DOTS BELOW",
direction="al",
linebreak="al",
- shcode=0x0646,
- unicodeslot=0x0767,
+ shcode=0x646,
+ unicodeslot=0x767,
},
- [0x0768]={
+ [0x768]={
arabic="d",
category="lo",
description="ARABIC LETTER NOON WITH SMALL TAH",
direction="al",
linebreak="al",
- shcode=0x0646,
- unicodeslot=0x0768,
+ shcode=0x646,
+ unicodeslot=0x768,
},
- [0x0769]={
+ [0x769]={
arabic="d",
category="lo",
description="ARABIC LETTER NOON WITH SMALL V",
direction="al",
linebreak="al",
- shcode=0x0646,
- unicodeslot=0x0769,
+ shcode=0x646,
+ unicodeslot=0x769,
},
- [0x076A]={
+ [0x76A]={
arabic="d",
category="lo",
description="ARABIC LETTER LAM WITH BAR",
direction="al",
linebreak="al",
- shcode=0x0644,
- unicodeslot=0x076A,
+ shcode=0x644,
+ unicodeslot=0x76A,
},
- [0x076B]={
+ [0x76B]={
arabic="r",
category="lo",
description="ARABIC LETTER REH WITH TWO DOTS VERTICALLY ABOVE",
direction="al",
linebreak="al",
- shcode=0x0631,
- unicodeslot=0x076B,
+ shcode=0x631,
+ unicodeslot=0x76B,
},
- [0x076C]={
+ [0x76C]={
arabic="r",
category="lo",
description="ARABIC LETTER REH WITH HAMZA ABOVE",
direction="al",
linebreak="al",
- shcode=0x0631,
- unicodeslot=0x076C,
+ shcode=0x631,
+ unicodeslot=0x76C,
},
- [0x076D]={
+ [0x76D]={
arabic="d",
category="lo",
description="ARABIC LETTER SEEN WITH TWO DOTS VERTICALLY ABOVE",
direction="al",
linebreak="al",
- shcode=0x0633,
- unicodeslot=0x076D,
+ shcode=0x633,
+ unicodeslot=0x76D,
},
- [0x076E]={
+ [0x76E]={
arabic="d",
category="lo",
description="ARABIC LETTER HAH WITH SMALL ARABIC LETTER TAH BELOW",
direction="al",
linebreak="al",
- unicodeslot=0x076E,
+ unicodeslot=0x76E,
},
- [0x076F]={
+ [0x76F]={
arabic="d",
category="lo",
description="ARABIC LETTER HAH WITH SMALL ARABIC LETTER TAH AND TWO DOTS",
direction="al",
linebreak="al",
- unicodeslot=0x076F,
+ unicodeslot=0x76F,
},
- [0x0770]={
+ [0x770]={
arabic="d",
category="lo",
description="ARABIC LETTER SEEN WITH SMALL ARABIC LETTER TAH AND TWO DOTS",
direction="al",
linebreak="al",
- unicodeslot=0x0770,
+ unicodeslot=0x770,
},
- [0x0771]={
+ [0x771]={
arabic="r",
category="lo",
description="ARABIC LETTER REH WITH SMALL ARABIC LETTER TAH AND TWO DOTS",
direction="al",
linebreak="al",
- unicodeslot=0x0771,
+ unicodeslot=0x771,
},
- [0x0772]={
+ [0x772]={
arabic="d",
category="lo",
description="ARABIC LETTER HAH WITH SMALL ARABIC LETTER TAH ABOVE",
direction="al",
linebreak="al",
- unicodeslot=0x0772,
+ unicodeslot=0x772,
},
- [0x0773]={
+ [0x773]={
arabic="r",
category="lo",
description="ARABIC LETTER ALEF WITH EXTENDED ARABIC-INDIC DIGIT TWO ABOVE",
direction="al",
linebreak="al",
- unicodeslot=0x0773,
+ unicodeslot=0x773,
},
- [0x0774]={
+ [0x774]={
arabic="r",
category="lo",
description="ARABIC LETTER ALEF WITH EXTENDED ARABIC-INDIC DIGIT THREE ABOVE",
direction="al",
linebreak="al",
- unicodeslot=0x0774,
+ unicodeslot=0x774,
},
- [0x0775]={
+ [0x775]={
arabic="d",
category="lo",
description="ARABIC LETTER FARSI YEH WITH EXTENDED ARABIC-INDIC DIGIT TWO ABOVE",
direction="al",
linebreak="al",
- unicodeslot=0x0775,
+ unicodeslot=0x775,
},
- [0x0776]={
+ [0x776]={
arabic="d",
category="lo",
description="ARABIC LETTER FARSI YEH WITH EXTENDED ARABIC-INDIC DIGIT THREE ABOVE",
direction="al",
linebreak="al",
- unicodeslot=0x0776,
+ unicodeslot=0x776,
},
- [0x0777]={
+ [0x777]={
arabic="d",
category="lo",
description="ARABIC LETTER FARSI YEH WITH EXTENDED ARABIC-INDIC DIGIT FOUR BELOW",
direction="al",
linebreak="al",
- unicodeslot=0x0777,
+ unicodeslot=0x777,
},
- [0x0778]={
+ [0x778]={
arabic="r",
category="lo",
description="ARABIC LETTER WAW WITH EXTENDED ARABIC-INDIC DIGIT TWO ABOVE",
direction="al",
linebreak="al",
- unicodeslot=0x0778,
+ unicodeslot=0x778,
},
- [0x0779]={
+ [0x779]={
arabic="r",
category="lo",
description="ARABIC LETTER WAW WITH EXTENDED ARABIC-INDIC DIGIT THREE ABOVE",
direction="al",
linebreak="al",
- unicodeslot=0x0779,
+ unicodeslot=0x779,
},
- [0x077A]={
+ [0x77A]={
arabic="d",
category="lo",
description="ARABIC LETTER YEH BARREE WITH EXTENDED ARABIC-INDIC DIGIT TWO ABOVE",
direction="al",
linebreak="al",
- unicodeslot=0x077A,
+ unicodeslot=0x77A,
},
- [0x077B]={
+ [0x77B]={
arabic="d",
category="lo",
description="ARABIC LETTER YEH BARREE WITH EXTENDED ARABIC-INDIC DIGIT THREE ABOVE",
direction="al",
linebreak="al",
- unicodeslot=0x077B,
+ unicodeslot=0x77B,
},
- [0x077C]={
+ [0x77C]={
arabic="d",
category="lo",
description="ARABIC LETTER HAH WITH EXTENDED ARABIC-INDIC DIGIT FOUR BELOW",
direction="al",
linebreak="al",
- unicodeslot=0x077C,
+ unicodeslot=0x77C,
},
- [0x077D]={
+ [0x77D]={
arabic="d",
category="lo",
description="ARABIC LETTER SEEN WITH EXTENDED ARABIC-INDIC DIGIT FOUR ABOVE",
direction="al",
linebreak="al",
- unicodeslot=0x077D,
+ unicodeslot=0x77D,
},
- [0x077E]={
+ [0x77E]={
arabic="d",
category="lo",
description="ARABIC LETTER SEEN WITH INVERTED V",
direction="al",
linebreak="al",
- unicodeslot=0x077E,
+ unicodeslot=0x77E,
},
- [0x077F]={
+ [0x77F]={
arabic="d",
category="lo",
description="ARABIC LETTER KAF WITH TWO DOTS ABOVE",
direction="al",
linebreak="al",
- unicodeslot=0x077F,
+ unicodeslot=0x77F,
},
- [0x0780]={
+ [0x780]={
category="lo",
description="THAANA LETTER HAA",
direction="al",
linebreak="al",
- unicodeslot=0x0780,
+ unicodeslot=0x780,
},
- [0x0781]={
+ [0x781]={
category="lo",
description="THAANA LETTER SHAVIYANI",
direction="al",
linebreak="al",
- unicodeslot=0x0781,
+ unicodeslot=0x781,
},
- [0x0782]={
+ [0x782]={
category="lo",
description="THAANA LETTER NOONU",
direction="al",
linebreak="al",
- unicodeslot=0x0782,
+ unicodeslot=0x782,
},
- [0x0783]={
+ [0x783]={
category="lo",
description="THAANA LETTER RAA",
direction="al",
linebreak="al",
- unicodeslot=0x0783,
+ unicodeslot=0x783,
},
- [0x0784]={
+ [0x784]={
category="lo",
description="THAANA LETTER BAA",
direction="al",
linebreak="al",
- unicodeslot=0x0784,
+ unicodeslot=0x784,
},
- [0x0785]={
+ [0x785]={
category="lo",
description="THAANA LETTER LHAVIYANI",
direction="al",
linebreak="al",
- unicodeslot=0x0785,
+ unicodeslot=0x785,
},
- [0x0786]={
+ [0x786]={
category="lo",
description="THAANA LETTER KAAFU",
direction="al",
linebreak="al",
- unicodeslot=0x0786,
+ unicodeslot=0x786,
},
- [0x0787]={
+ [0x787]={
category="lo",
description="THAANA LETTER ALIFU",
direction="al",
linebreak="al",
- unicodeslot=0x0787,
+ unicodeslot=0x787,
},
- [0x0788]={
+ [0x788]={
category="lo",
description="THAANA LETTER VAAVU",
direction="al",
linebreak="al",
- unicodeslot=0x0788,
+ unicodeslot=0x788,
},
- [0x0789]={
+ [0x789]={
category="lo",
description="THAANA LETTER MEEMU",
direction="al",
linebreak="al",
- unicodeslot=0x0789,
+ unicodeslot=0x789,
},
- [0x078A]={
+ [0x78A]={
category="lo",
description="THAANA LETTER FAAFU",
direction="al",
linebreak="al",
- unicodeslot=0x078A,
+ unicodeslot=0x78A,
},
- [0x078B]={
+ [0x78B]={
category="lo",
description="THAANA LETTER DHAALU",
direction="al",
linebreak="al",
- unicodeslot=0x078B,
+ unicodeslot=0x78B,
},
- [0x078C]={
+ [0x78C]={
category="lo",
description="THAANA LETTER THAA",
direction="al",
linebreak="al",
- unicodeslot=0x078C,
+ unicodeslot=0x78C,
},
- [0x078D]={
+ [0x78D]={
category="lo",
description="THAANA LETTER LAAMU",
direction="al",
linebreak="al",
- unicodeslot=0x078D,
+ unicodeslot=0x78D,
},
- [0x078E]={
+ [0x78E]={
category="lo",
description="THAANA LETTER GAAFU",
direction="al",
linebreak="al",
- unicodeslot=0x078E,
+ unicodeslot=0x78E,
},
- [0x078F]={
+ [0x78F]={
category="lo",
description="THAANA LETTER GNAVIYANI",
direction="al",
linebreak="al",
- unicodeslot=0x078F,
+ unicodeslot=0x78F,
},
- [0x0790]={
+ [0x790]={
category="lo",
description="THAANA LETTER SEENU",
direction="al",
linebreak="al",
- unicodeslot=0x0790,
+ unicodeslot=0x790,
},
- [0x0791]={
+ [0x791]={
category="lo",
description="THAANA LETTER DAVIYANI",
direction="al",
linebreak="al",
- unicodeslot=0x0791,
+ unicodeslot=0x791,
},
- [0x0792]={
+ [0x792]={
category="lo",
description="THAANA LETTER ZAVIYANI",
direction="al",
linebreak="al",
- unicodeslot=0x0792,
+ unicodeslot=0x792,
},
- [0x0793]={
+ [0x793]={
category="lo",
description="THAANA LETTER TAVIYANI",
direction="al",
linebreak="al",
- unicodeslot=0x0793,
+ unicodeslot=0x793,
},
- [0x0794]={
+ [0x794]={
category="lo",
description="THAANA LETTER YAA",
direction="al",
linebreak="al",
- unicodeslot=0x0794,
+ unicodeslot=0x794,
},
- [0x0795]={
+ [0x795]={
category="lo",
description="THAANA LETTER PAVIYANI",
direction="al",
linebreak="al",
- unicodeslot=0x0795,
+ unicodeslot=0x795,
},
- [0x0796]={
+ [0x796]={
category="lo",
description="THAANA LETTER JAVIYANI",
direction="al",
linebreak="al",
- unicodeslot=0x0796,
+ unicodeslot=0x796,
},
- [0x0797]={
+ [0x797]={
category="lo",
description="THAANA LETTER CHAVIYANI",
direction="al",
linebreak="al",
- unicodeslot=0x0797,
+ unicodeslot=0x797,
},
- [0x0798]={
+ [0x798]={
category="lo",
description="THAANA LETTER TTAA",
direction="al",
linebreak="al",
- unicodeslot=0x0798,
+ unicodeslot=0x798,
},
- [0x0799]={
+ [0x799]={
category="lo",
description="THAANA LETTER HHAA",
direction="al",
linebreak="al",
- unicodeslot=0x0799,
+ unicodeslot=0x799,
},
- [0x079A]={
+ [0x79A]={
category="lo",
description="THAANA LETTER KHAA",
direction="al",
linebreak="al",
- unicodeslot=0x079A,
+ unicodeslot=0x79A,
},
- [0x079B]={
+ [0x79B]={
category="lo",
description="THAANA LETTER THAALU",
direction="al",
linebreak="al",
- unicodeslot=0x079B,
+ unicodeslot=0x79B,
},
- [0x079C]={
+ [0x79C]={
category="lo",
description="THAANA LETTER ZAA",
direction="al",
linebreak="al",
- unicodeslot=0x079C,
+ unicodeslot=0x79C,
},
- [0x079D]={
+ [0x79D]={
category="lo",
description="THAANA LETTER SHEENU",
direction="al",
linebreak="al",
- unicodeslot=0x079D,
+ unicodeslot=0x79D,
},
- [0x079E]={
+ [0x79E]={
category="lo",
description="THAANA LETTER SAADHU",
direction="al",
linebreak="al",
- unicodeslot=0x079E,
+ unicodeslot=0x79E,
},
- [0x079F]={
+ [0x79F]={
category="lo",
description="THAANA LETTER DAADHU",
direction="al",
linebreak="al",
- unicodeslot=0x079F,
+ unicodeslot=0x79F,
},
- [0x07A0]={
+ [0x7A0]={
category="lo",
description="THAANA LETTER TO",
direction="al",
linebreak="al",
- unicodeslot=0x07A0,
+ unicodeslot=0x7A0,
},
- [0x07A1]={
+ [0x7A1]={
category="lo",
description="THAANA LETTER ZO",
direction="al",
linebreak="al",
- unicodeslot=0x07A1,
+ unicodeslot=0x7A1,
},
- [0x07A2]={
+ [0x7A2]={
category="lo",
description="THAANA LETTER AINU",
direction="al",
linebreak="al",
- unicodeslot=0x07A2,
+ unicodeslot=0x7A2,
},
- [0x07A3]={
+ [0x7A3]={
category="lo",
description="THAANA LETTER GHAINU",
direction="al",
linebreak="al",
- unicodeslot=0x07A3,
+ unicodeslot=0x7A3,
},
- [0x07A4]={
+ [0x7A4]={
category="lo",
description="THAANA LETTER QAAFU",
direction="al",
linebreak="al",
- unicodeslot=0x07A4,
+ unicodeslot=0x7A4,
},
- [0x07A5]={
+ [0x7A5]={
category="lo",
description="THAANA LETTER WAAVU",
direction="al",
linebreak="al",
- unicodeslot=0x07A5,
+ unicodeslot=0x7A5,
},
- [0x07A6]={
+ [0x7A6]={
category="mn",
description="THAANA ABAFILI",
direction="nsm",
linebreak="cm",
- unicodeslot=0x07A6,
+ unicodeslot=0x7A6,
},
- [0x07A7]={
+ [0x7A7]={
category="mn",
description="THAANA AABAAFILI",
direction="nsm",
linebreak="cm",
- unicodeslot=0x07A7,
+ unicodeslot=0x7A7,
},
- [0x07A8]={
+ [0x7A8]={
category="mn",
description="THAANA IBIFILI",
direction="nsm",
linebreak="cm",
- unicodeslot=0x07A8,
+ unicodeslot=0x7A8,
},
- [0x07A9]={
+ [0x7A9]={
category="mn",
description="THAANA EEBEEFILI",
direction="nsm",
linebreak="cm",
- unicodeslot=0x07A9,
+ unicodeslot=0x7A9,
},
- [0x07AA]={
+ [0x7AA]={
category="mn",
description="THAANA UBUFILI",
direction="nsm",
linebreak="cm",
- unicodeslot=0x07AA,
+ unicodeslot=0x7AA,
},
- [0x07AB]={
+ [0x7AB]={
category="mn",
description="THAANA OOBOOFILI",
direction="nsm",
linebreak="cm",
- unicodeslot=0x07AB,
+ unicodeslot=0x7AB,
},
- [0x07AC]={
+ [0x7AC]={
category="mn",
description="THAANA EBEFILI",
direction="nsm",
linebreak="cm",
- unicodeslot=0x07AC,
+ unicodeslot=0x7AC,
},
- [0x07AD]={
+ [0x7AD]={
category="mn",
description="THAANA EYBEYFILI",
direction="nsm",
linebreak="cm",
- unicodeslot=0x07AD,
+ unicodeslot=0x7AD,
},
- [0x07AE]={
+ [0x7AE]={
category="mn",
description="THAANA OBOFILI",
direction="nsm",
linebreak="cm",
- unicodeslot=0x07AE,
+ unicodeslot=0x7AE,
},
- [0x07AF]={
+ [0x7AF]={
category="mn",
description="THAANA OABOAFILI",
direction="nsm",
linebreak="cm",
- unicodeslot=0x07AF,
+ unicodeslot=0x7AF,
},
- [0x07B0]={
+ [0x7B0]={
category="mn",
description="THAANA SUKUN",
direction="nsm",
linebreak="cm",
- unicodeslot=0x07B0,
+ unicodeslot=0x7B0,
},
- [0x07B1]={
+ [0x7B1]={
category="lo",
description="THAANA LETTER NAA",
direction="al",
linebreak="al",
- unicodeslot=0x07B1,
+ unicodeslot=0x7B1,
},
- [0x07C0]={
+ [0x7C0]={
category="nd",
description="NKO DIGIT ZERO",
direction="r",
linebreak="nu",
- unicodeslot=0x07C0,
+ unicodeslot=0x7C0,
},
- [0x07C1]={
+ [0x7C1]={
category="nd",
description="NKO DIGIT ONE",
direction="r",
linebreak="nu",
- unicodeslot=0x07C1,
+ unicodeslot=0x7C1,
},
- [0x07C2]={
+ [0x7C2]={
category="nd",
description="NKO DIGIT TWO",
direction="r",
linebreak="nu",
- unicodeslot=0x07C2,
+ unicodeslot=0x7C2,
},
- [0x07C3]={
+ [0x7C3]={
category="nd",
description="NKO DIGIT THREE",
direction="r",
linebreak="nu",
- unicodeslot=0x07C3,
+ unicodeslot=0x7C3,
},
- [0x07C4]={
+ [0x7C4]={
category="nd",
description="NKO DIGIT FOUR",
direction="r",
linebreak="nu",
- unicodeslot=0x07C4,
+ unicodeslot=0x7C4,
},
- [0x07C5]={
+ [0x7C5]={
category="nd",
description="NKO DIGIT FIVE",
direction="r",
linebreak="nu",
- unicodeslot=0x07C5,
+ unicodeslot=0x7C5,
},
- [0x07C6]={
+ [0x7C6]={
category="nd",
description="NKO DIGIT SIX",
direction="r",
linebreak="nu",
- unicodeslot=0x07C6,
+ unicodeslot=0x7C6,
},
- [0x07C7]={
+ [0x7C7]={
category="nd",
description="NKO DIGIT SEVEN",
direction="r",
linebreak="nu",
- unicodeslot=0x07C7,
+ unicodeslot=0x7C7,
},
- [0x07C8]={
+ [0x7C8]={
category="nd",
description="NKO DIGIT EIGHT",
direction="r",
linebreak="nu",
- unicodeslot=0x07C8,
+ unicodeslot=0x7C8,
},
- [0x07C9]={
+ [0x7C9]={
category="nd",
description="NKO DIGIT NINE",
direction="r",
linebreak="nu",
- unicodeslot=0x07C9,
+ unicodeslot=0x7C9,
},
- [0x07CA]={
+ [0x7CA]={
arabic="d",
category="lo",
description="NKO LETTER A",
direction="r",
linebreak="al",
- unicodeslot=0x07CA,
+ unicodeslot=0x7CA,
},
- [0x07CB]={
+ [0x7CB]={
arabic="d",
category="lo",
description="NKO LETTER EE",
direction="r",
linebreak="al",
- unicodeslot=0x07CB,
+ unicodeslot=0x7CB,
},
- [0x07CC]={
+ [0x7CC]={
arabic="d",
category="lo",
description="NKO LETTER I",
direction="r",
linebreak="al",
- unicodeslot=0x07CC,
+ unicodeslot=0x7CC,
},
- [0x07CD]={
+ [0x7CD]={
arabic="d",
category="lo",
description="NKO LETTER E",
direction="r",
linebreak="al",
- unicodeslot=0x07CD,
+ unicodeslot=0x7CD,
},
- [0x07CE]={
+ [0x7CE]={
arabic="d",
category="lo",
description="NKO LETTER U",
direction="r",
linebreak="al",
- unicodeslot=0x07CE,
+ unicodeslot=0x7CE,
},
- [0x07CF]={
+ [0x7CF]={
arabic="d",
category="lo",
description="NKO LETTER OO",
direction="r",
linebreak="al",
- unicodeslot=0x07CF,
+ unicodeslot=0x7CF,
},
- [0x07D0]={
+ [0x7D0]={
arabic="d",
category="lo",
description="NKO LETTER O",
direction="r",
linebreak="al",
- unicodeslot=0x07D0,
+ unicodeslot=0x7D0,
},
- [0x07D1]={
+ [0x7D1]={
arabic="d",
category="lo",
description="NKO LETTER DAGBASINNA",
direction="r",
linebreak="al",
- unicodeslot=0x07D1,
+ unicodeslot=0x7D1,
},
- [0x07D2]={
+ [0x7D2]={
arabic="d",
category="lo",
description="NKO LETTER N",
direction="r",
linebreak="al",
- unicodeslot=0x07D2,
+ unicodeslot=0x7D2,
},
- [0x07D3]={
+ [0x7D3]={
arabic="d",
category="lo",
description="NKO LETTER BA",
direction="r",
linebreak="al",
- unicodeslot=0x07D3,
+ unicodeslot=0x7D3,
},
- [0x07D4]={
+ [0x7D4]={
arabic="d",
category="lo",
description="NKO LETTER PA",
direction="r",
linebreak="al",
- unicodeslot=0x07D4,
+ unicodeslot=0x7D4,
},
- [0x07D5]={
+ [0x7D5]={
arabic="d",
category="lo",
description="NKO LETTER TA",
direction="r",
linebreak="al",
- unicodeslot=0x07D5,
+ unicodeslot=0x7D5,
},
- [0x07D6]={
+ [0x7D6]={
arabic="d",
category="lo",
description="NKO LETTER JA",
direction="r",
linebreak="al",
- unicodeslot=0x07D6,
+ unicodeslot=0x7D6,
},
- [0x07D7]={
+ [0x7D7]={
arabic="d",
category="lo",
description="NKO LETTER CHA",
direction="r",
linebreak="al",
- unicodeslot=0x07D7,
+ unicodeslot=0x7D7,
},
- [0x07D8]={
+ [0x7D8]={
arabic="d",
category="lo",
description="NKO LETTER DA",
direction="r",
linebreak="al",
- unicodeslot=0x07D8,
+ unicodeslot=0x7D8,
},
- [0x07D9]={
+ [0x7D9]={
arabic="d",
category="lo",
description="NKO LETTER RA",
direction="r",
linebreak="al",
- unicodeslot=0x07D9,
+ unicodeslot=0x7D9,
},
- [0x07DA]={
+ [0x7DA]={
arabic="d",
category="lo",
description="NKO LETTER RRA",
direction="r",
linebreak="al",
- unicodeslot=0x07DA,
+ unicodeslot=0x7DA,
},
- [0x07DB]={
+ [0x7DB]={
arabic="d",
category="lo",
description="NKO LETTER SA",
direction="r",
linebreak="al",
- unicodeslot=0x07DB,
+ unicodeslot=0x7DB,
},
- [0x07DC]={
+ [0x7DC]={
arabic="d",
category="lo",
description="NKO LETTER GBA",
direction="r",
linebreak="al",
- unicodeslot=0x07DC,
+ unicodeslot=0x7DC,
},
- [0x07DD]={
+ [0x7DD]={
arabic="d",
category="lo",
description="NKO LETTER FA",
direction="r",
linebreak="al",
- unicodeslot=0x07DD,
+ unicodeslot=0x7DD,
},
- [0x07DE]={
+ [0x7DE]={
arabic="d",
category="lo",
description="NKO LETTER KA",
direction="r",
linebreak="al",
- unicodeslot=0x07DE,
+ unicodeslot=0x7DE,
},
- [0x07DF]={
+ [0x7DF]={
arabic="d",
category="lo",
description="NKO LETTER LA",
direction="r",
linebreak="al",
- unicodeslot=0x07DF,
+ unicodeslot=0x7DF,
},
- [0x07E0]={
+ [0x7E0]={
arabic="d",
category="lo",
description="NKO LETTER NA WOLOSO",
direction="r",
linebreak="al",
- unicodeslot=0x07E0,
+ unicodeslot=0x7E0,
},
- [0x07E1]={
+ [0x7E1]={
arabic="d",
category="lo",
description="NKO LETTER MA",
direction="r",
linebreak="al",
- unicodeslot=0x07E1,
+ unicodeslot=0x7E1,
},
- [0x07E2]={
+ [0x7E2]={
arabic="d",
category="lo",
description="NKO LETTER NYA",
direction="r",
linebreak="al",
- unicodeslot=0x07E2,
+ unicodeslot=0x7E2,
},
- [0x07E3]={
+ [0x7E3]={
arabic="d",
category="lo",
description="NKO LETTER NA",
direction="r",
linebreak="al",
- unicodeslot=0x07E3,
+ unicodeslot=0x7E3,
},
- [0x07E4]={
+ [0x7E4]={
arabic="d",
category="lo",
description="NKO LETTER HA",
direction="r",
linebreak="al",
- unicodeslot=0x07E4,
+ unicodeslot=0x7E4,
},
- [0x07E5]={
+ [0x7E5]={
arabic="d",
category="lo",
description="NKO LETTER WA",
direction="r",
linebreak="al",
- unicodeslot=0x07E5,
+ unicodeslot=0x7E5,
},
- [0x07E6]={
+ [0x7E6]={
arabic="d",
category="lo",
description="NKO LETTER YA",
direction="r",
linebreak="al",
- unicodeslot=0x07E6,
+ unicodeslot=0x7E6,
},
- [0x07E7]={
+ [0x7E7]={
arabic="d",
category="lo",
description="NKO LETTER NYA WOLOSO",
direction="r",
linebreak="al",
- unicodeslot=0x07E7,
+ unicodeslot=0x7E7,
},
- [0x07E8]={
+ [0x7E8]={
arabic="d",
category="lo",
description="NKO LETTER JONA JA",
direction="r",
linebreak="al",
- unicodeslot=0x07E8,
+ unicodeslot=0x7E8,
},
- [0x07E9]={
+ [0x7E9]={
arabic="d",
category="lo",
description="NKO LETTER JONA CHA",
direction="r",
linebreak="al",
- unicodeslot=0x07E9,
+ unicodeslot=0x7E9,
},
- [0x07EA]={
+ [0x7EA]={
arabic="d",
category="lo",
description="NKO LETTER JONA RA",
direction="r",
linebreak="al",
- unicodeslot=0x07EA,
+ unicodeslot=0x7EA,
},
- [0x07EB]={
+ [0x7EB]={
category="mn",
+ combining=0xE6,
description="NKO COMBINING SHORT HIGH TONE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x07EB,
+ unicodeslot=0x7EB,
},
- [0x07EC]={
+ [0x7EC]={
category="mn",
+ combining=0xE6,
description="NKO COMBINING SHORT LOW TONE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x07EC,
+ unicodeslot=0x7EC,
},
- [0x07ED]={
+ [0x7ED]={
category="mn",
+ combining=0xE6,
description="NKO COMBINING SHORT RISING TONE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x07ED,
+ unicodeslot=0x7ED,
},
- [0x07EE]={
+ [0x7EE]={
category="mn",
+ combining=0xE6,
description="NKO COMBINING LONG DESCENDING TONE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x07EE,
+ unicodeslot=0x7EE,
},
- [0x07EF]={
+ [0x7EF]={
category="mn",
+ combining=0xE6,
description="NKO COMBINING LONG HIGH TONE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x07EF,
+ unicodeslot=0x7EF,
},
- [0x07F0]={
+ [0x7F0]={
category="mn",
+ combining=0xE6,
description="NKO COMBINING LONG LOW TONE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x07F0,
+ unicodeslot=0x7F0,
},
- [0x07F1]={
+ [0x7F1]={
category="mn",
+ combining=0xE6,
description="NKO COMBINING LONG RISING TONE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x07F1,
+ unicodeslot=0x7F1,
},
- [0x07F2]={
+ [0x7F2]={
category="mn",
+ combining=0xDC,
description="NKO COMBINING NASALIZATION MARK",
direction="nsm",
linebreak="cm",
- unicodeslot=0x07F2,
+ unicodeslot=0x7F2,
},
- [0x07F3]={
+ [0x7F3]={
category="mn",
+ combining=0xE6,
description="NKO COMBINING DOUBLE DOT ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x07F3,
+ unicodeslot=0x7F3,
},
- [0x07F4]={
+ [0x7F4]={
category="lm",
description="NKO HIGH TONE APOSTROPHE",
direction="r",
linebreak="al",
- unicodeslot=0x07F4,
+ unicodeslot=0x7F4,
},
- [0x07F5]={
+ [0x7F5]={
category="lm",
description="NKO LOW TONE APOSTROPHE",
direction="r",
linebreak="al",
- unicodeslot=0x07F5,
+ unicodeslot=0x7F5,
},
- [0x07F6]={
+ [0x7F6]={
category="so",
description="NKO SYMBOL OO DENNEN",
direction="on",
linebreak="al",
- unicodeslot=0x07F6,
+ unicodeslot=0x7F6,
},
- [0x07F7]={
+ [0x7F7]={
category="po",
description="NKO SYMBOL GBAKURUNEN",
direction="on",
linebreak="al",
- unicodeslot=0x07F7,
+ unicodeslot=0x7F7,
},
- [0x07F8]={
+ [0x7F8]={
category="po",
description="NKO COMMA",
direction="on",
linebreak="is",
- unicodeslot=0x07F8,
+ unicodeslot=0x7F8,
},
- [0x07F9]={
+ [0x7F9]={
category="po",
description="NKO EXCLAMATION MARK",
direction="on",
linebreak="ex",
- unicodeslot=0x07F9,
+ unicodeslot=0x7F9,
},
- [0x07FA]={
+ [0x7FA]={
arabic="c",
category="lm",
description="NKO LAJANYALAN",
direction="r",
linebreak="al",
- unicodeslot=0x07FA,
+ unicodeslot=0x7FA,
},
- [0x0800]={
+ [0x800]={
category="lo",
description="SAMARITAN LETTER ALAF",
direction="r",
linebreak="al",
- unicodeslot=0x0800,
+ unicodeslot=0x800,
},
- [0x0801]={
+ [0x801]={
category="lo",
description="SAMARITAN LETTER BIT",
direction="r",
linebreak="al",
- unicodeslot=0x0801,
+ unicodeslot=0x801,
},
- [0x0802]={
+ [0x802]={
category="lo",
description="SAMARITAN LETTER GAMAN",
direction="r",
linebreak="al",
- unicodeslot=0x0802,
+ unicodeslot=0x802,
},
- [0x0803]={
+ [0x803]={
category="lo",
description="SAMARITAN LETTER DALAT",
direction="r",
linebreak="al",
- unicodeslot=0x0803,
+ unicodeslot=0x803,
},
- [0x0804]={
+ [0x804]={
category="lo",
description="SAMARITAN LETTER IY",
direction="r",
linebreak="al",
- unicodeslot=0x0804,
+ unicodeslot=0x804,
},
- [0x0805]={
+ [0x805]={
category="lo",
description="SAMARITAN LETTER BAA",
direction="r",
linebreak="al",
- unicodeslot=0x0805,
+ unicodeslot=0x805,
},
- [0x0806]={
+ [0x806]={
category="lo",
description="SAMARITAN LETTER ZEN",
direction="r",
linebreak="al",
- unicodeslot=0x0806,
+ unicodeslot=0x806,
},
- [0x0807]={
+ [0x807]={
category="lo",
description="SAMARITAN LETTER IT",
direction="r",
linebreak="al",
- unicodeslot=0x0807,
+ unicodeslot=0x807,
},
- [0x0808]={
+ [0x808]={
category="lo",
description="SAMARITAN LETTER TIT",
direction="r",
linebreak="al",
- unicodeslot=0x0808,
+ unicodeslot=0x808,
},
- [0x0809]={
+ [0x809]={
category="lo",
description="SAMARITAN LETTER YUT",
direction="r",
linebreak="al",
- unicodeslot=0x0809,
+ unicodeslot=0x809,
},
- [0x080A]={
+ [0x80A]={
category="lo",
description="SAMARITAN LETTER KAAF",
direction="r",
linebreak="al",
- unicodeslot=0x080A,
+ unicodeslot=0x80A,
},
- [0x080B]={
+ [0x80B]={
category="lo",
description="SAMARITAN LETTER LABAT",
direction="r",
linebreak="al",
- unicodeslot=0x080B,
+ unicodeslot=0x80B,
},
- [0x080C]={
+ [0x80C]={
category="lo",
description="SAMARITAN LETTER MIM",
direction="r",
linebreak="al",
- unicodeslot=0x080C,
+ unicodeslot=0x80C,
},
- [0x080D]={
+ [0x80D]={
category="lo",
description="SAMARITAN LETTER NUN",
direction="r",
linebreak="al",
- unicodeslot=0x080D,
+ unicodeslot=0x80D,
},
- [0x080E]={
+ [0x80E]={
category="lo",
description="SAMARITAN LETTER SINGAAT",
direction="r",
linebreak="al",
- unicodeslot=0x080E,
+ unicodeslot=0x80E,
},
- [0x080F]={
+ [0x80F]={
category="lo",
description="SAMARITAN LETTER IN",
direction="r",
linebreak="al",
- unicodeslot=0x080F,
+ unicodeslot=0x80F,
},
- [0x0810]={
+ [0x810]={
category="lo",
description="SAMARITAN LETTER FI",
direction="r",
linebreak="al",
- unicodeslot=0x0810,
+ unicodeslot=0x810,
},
- [0x0811]={
+ [0x811]={
category="lo",
description="SAMARITAN LETTER TSAADIY",
direction="r",
linebreak="al",
- unicodeslot=0x0811,
+ unicodeslot=0x811,
},
- [0x0812]={
+ [0x812]={
category="lo",
description="SAMARITAN LETTER QUF",
direction="r",
linebreak="al",
- unicodeslot=0x0812,
+ unicodeslot=0x812,
},
- [0x0813]={
+ [0x813]={
category="lo",
description="SAMARITAN LETTER RISH",
direction="r",
linebreak="al",
- unicodeslot=0x0813,
+ unicodeslot=0x813,
},
- [0x0814]={
+ [0x814]={
category="lo",
description="SAMARITAN LETTER SHAN",
direction="r",
linebreak="al",
- unicodeslot=0x0814,
+ unicodeslot=0x814,
},
- [0x0815]={
+ [0x815]={
category="lo",
description="SAMARITAN LETTER TAAF",
direction="r",
linebreak="al",
- unicodeslot=0x0815,
+ unicodeslot=0x815,
},
- [0x0816]={
+ [0x816]={
category="mn",
+ combining=0xE6,
description="SAMARITAN MARK IN",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0816,
+ unicodeslot=0x816,
},
- [0x0817]={
+ [0x817]={
category="mn",
+ combining=0xE6,
description="SAMARITAN MARK IN-ALAF",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0817,
+ unicodeslot=0x817,
},
- [0x0818]={
+ [0x818]={
category="mn",
+ combining=0xE6,
description="SAMARITAN MARK OCCLUSION",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0818,
+ unicodeslot=0x818,
},
- [0x0819]={
+ [0x819]={
category="mn",
+ combining=0xE6,
description="SAMARITAN MARK DAGESH",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0819,
+ unicodeslot=0x819,
},
- [0x081A]={
+ [0x81A]={
category="lm",
description="SAMARITAN MODIFIER LETTER EPENTHETIC YUT",
direction="r",
linebreak="al",
- unicodeslot=0x081A,
+ unicodeslot=0x81A,
},
- [0x081B]={
+ [0x81B]={
category="mn",
+ combining=0xE6,
description="SAMARITAN MARK EPENTHETIC YUT",
direction="nsm",
linebreak="cm",
- unicodeslot=0x081B,
+ unicodeslot=0x81B,
},
- [0x081C]={
+ [0x81C]={
category="mn",
+ combining=0xE6,
description="SAMARITAN VOWEL SIGN LONG E",
direction="nsm",
linebreak="cm",
- unicodeslot=0x081C,
+ unicodeslot=0x81C,
},
- [0x081D]={
+ [0x81D]={
category="mn",
+ combining=0xE6,
description="SAMARITAN VOWEL SIGN E",
direction="nsm",
linebreak="cm",
- unicodeslot=0x081D,
+ unicodeslot=0x81D,
},
- [0x081E]={
+ [0x81E]={
category="mn",
+ combining=0xE6,
description="SAMARITAN VOWEL SIGN OVERLONG AA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x081E,
+ unicodeslot=0x81E,
},
- [0x081F]={
+ [0x81F]={
category="mn",
+ combining=0xE6,
description="SAMARITAN VOWEL SIGN LONG AA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x081F,
+ unicodeslot=0x81F,
},
- [0x0820]={
+ [0x820]={
category="mn",
+ combining=0xE6,
description="SAMARITAN VOWEL SIGN AA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0820,
+ unicodeslot=0x820,
},
- [0x0821]={
+ [0x821]={
category="mn",
+ combining=0xE6,
description="SAMARITAN VOWEL SIGN OVERLONG A",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0821,
+ unicodeslot=0x821,
},
- [0x0822]={
+ [0x822]={
category="mn",
+ combining=0xE6,
description="SAMARITAN VOWEL SIGN LONG A",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0822,
+ unicodeslot=0x822,
},
- [0x0823]={
+ [0x823]={
category="mn",
+ combining=0xE6,
description="SAMARITAN VOWEL SIGN A",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0823,
+ unicodeslot=0x823,
},
- [0x0824]={
+ [0x824]={
category="lm",
description="SAMARITAN MODIFIER LETTER SHORT A",
direction="r",
linebreak="al",
- unicodeslot=0x0824,
+ unicodeslot=0x824,
},
- [0x0825]={
+ [0x825]={
category="mn",
+ combining=0xE6,
description="SAMARITAN VOWEL SIGN SHORT A",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0825,
+ unicodeslot=0x825,
},
- [0x0826]={
+ [0x826]={
category="mn",
+ combining=0xE6,
description="SAMARITAN VOWEL SIGN LONG U",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0826,
+ unicodeslot=0x826,
},
- [0x0827]={
+ [0x827]={
category="mn",
+ combining=0xE6,
description="SAMARITAN VOWEL SIGN U",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0827,
+ unicodeslot=0x827,
},
- [0x0828]={
+ [0x828]={
category="lm",
description="SAMARITAN MODIFIER LETTER I",
direction="r",
linebreak="al",
- unicodeslot=0x0828,
+ unicodeslot=0x828,
},
- [0x0829]={
+ [0x829]={
category="mn",
+ combining=0xE6,
description="SAMARITAN VOWEL SIGN LONG I",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0829,
+ unicodeslot=0x829,
},
- [0x082A]={
+ [0x82A]={
category="mn",
+ combining=0xE6,
description="SAMARITAN VOWEL SIGN I",
direction="nsm",
linebreak="cm",
- unicodeslot=0x082A,
+ unicodeslot=0x82A,
},
- [0x082B]={
+ [0x82B]={
category="mn",
+ combining=0xE6,
description="SAMARITAN VOWEL SIGN O",
direction="nsm",
linebreak="cm",
- unicodeslot=0x082B,
+ unicodeslot=0x82B,
},
- [0x082C]={
+ [0x82C]={
category="mn",
+ combining=0xE6,
description="SAMARITAN VOWEL SIGN SUKUN",
direction="nsm",
linebreak="cm",
- unicodeslot=0x082C,
+ unicodeslot=0x82C,
},
- [0x082D]={
+ [0x82D]={
category="mn",
+ combining=0xE6,
description="SAMARITAN MARK NEQUDAA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x082D,
+ unicodeslot=0x82D,
},
- [0x0830]={
+ [0x830]={
category="po",
description="SAMARITAN PUNCTUATION NEQUDAA",
direction="r",
linebreak="al",
- unicodeslot=0x0830,
+ unicodeslot=0x830,
},
- [0x0831]={
+ [0x831]={
category="po",
description="SAMARITAN PUNCTUATION AFSAAQ",
direction="r",
linebreak="al",
- unicodeslot=0x0831,
+ unicodeslot=0x831,
},
- [0x0832]={
+ [0x832]={
category="po",
description="SAMARITAN PUNCTUATION ANGED",
direction="r",
linebreak="al",
- unicodeslot=0x0832,
+ unicodeslot=0x832,
},
- [0x0833]={
+ [0x833]={
category="po",
description="SAMARITAN PUNCTUATION BAU",
direction="r",
linebreak="al",
- unicodeslot=0x0833,
+ unicodeslot=0x833,
},
- [0x0834]={
+ [0x834]={
category="po",
description="SAMARITAN PUNCTUATION ATMAAU",
direction="r",
linebreak="al",
- unicodeslot=0x0834,
+ unicodeslot=0x834,
},
- [0x0835]={
+ [0x835]={
category="po",
description="SAMARITAN PUNCTUATION SHIYYAALAA",
direction="r",
linebreak="al",
- unicodeslot=0x0835,
+ unicodeslot=0x835,
},
- [0x0836]={
+ [0x836]={
category="po",
description="SAMARITAN ABBREVIATION MARK",
direction="r",
linebreak="al",
- unicodeslot=0x0836,
+ unicodeslot=0x836,
},
- [0x0837]={
+ [0x837]={
category="po",
description="SAMARITAN PUNCTUATION MELODIC QITSA",
direction="r",
linebreak="al",
- unicodeslot=0x0837,
+ unicodeslot=0x837,
},
- [0x0838]={
+ [0x838]={
category="po",
description="SAMARITAN PUNCTUATION ZIQAA",
direction="r",
linebreak="al",
- unicodeslot=0x0838,
+ unicodeslot=0x838,
},
- [0x0839]={
+ [0x839]={
category="po",
description="SAMARITAN PUNCTUATION QITSA",
direction="r",
linebreak="al",
- unicodeslot=0x0839,
+ unicodeslot=0x839,
},
- [0x083A]={
+ [0x83A]={
category="po",
description="SAMARITAN PUNCTUATION ZAEF",
direction="r",
linebreak="al",
- unicodeslot=0x083A,
+ unicodeslot=0x83A,
},
- [0x083B]={
+ [0x83B]={
category="po",
description="SAMARITAN PUNCTUATION TURU",
direction="r",
linebreak="al",
- unicodeslot=0x083B,
+ unicodeslot=0x83B,
},
- [0x083C]={
+ [0x83C]={
category="po",
description="SAMARITAN PUNCTUATION ARKAANU",
direction="r",
linebreak="al",
- unicodeslot=0x083C,
+ unicodeslot=0x83C,
},
- [0x083D]={
+ [0x83D]={
category="po",
description="SAMARITAN PUNCTUATION SOF MASHFAAT",
direction="r",
linebreak="al",
- unicodeslot=0x083D,
+ unicodeslot=0x83D,
},
- [0x083E]={
+ [0x83E]={
category="po",
description="SAMARITAN PUNCTUATION ANNAAU",
direction="r",
linebreak="al",
- unicodeslot=0x083E,
+ unicodeslot=0x83E,
},
- [0x0840]={
+ [0x840]={
arabic="r",
category="lo",
description="MANDAIC LETTER HALQA",
direction="r",
linebreak="al",
- unicodeslot=0x0840,
+ unicodeslot=0x840,
},
- [0x0841]={
+ [0x841]={
arabic="d",
category="lo",
description="MANDAIC LETTER AB",
direction="r",
linebreak="al",
- unicodeslot=0x0841,
+ unicodeslot=0x841,
},
- [0x0842]={
+ [0x842]={
arabic="d",
category="lo",
description="MANDAIC LETTER AG",
direction="r",
linebreak="al",
- unicodeslot=0x0842,
+ unicodeslot=0x842,
},
- [0x0843]={
+ [0x843]={
arabic="d",
category="lo",
description="MANDAIC LETTER AD",
direction="r",
linebreak="al",
- unicodeslot=0x0843,
+ unicodeslot=0x843,
},
- [0x0844]={
+ [0x844]={
arabic="d",
category="lo",
description="MANDAIC LETTER AH",
direction="r",
linebreak="al",
- unicodeslot=0x0844,
+ unicodeslot=0x844,
},
- [0x0845]={
+ [0x845]={
arabic="d",
category="lo",
description="MANDAIC LETTER USHENNA",
direction="r",
linebreak="al",
- unicodeslot=0x0845,
+ unicodeslot=0x845,
},
- [0x0846]={
+ [0x846]={
arabic="r",
category="lo",
description="MANDAIC LETTER AZ",
direction="r",
linebreak="al",
- unicodeslot=0x0846,
+ unicodeslot=0x846,
},
- [0x0847]={
+ [0x847]={
arabic="d",
category="lo",
description="MANDAIC LETTER IT",
direction="r",
linebreak="al",
- unicodeslot=0x0847,
+ unicodeslot=0x847,
},
- [0x0848]={
+ [0x848]={
arabic="d",
category="lo",
description="MANDAIC LETTER ATT",
direction="r",
linebreak="al",
- unicodeslot=0x0848,
+ unicodeslot=0x848,
},
- [0x0849]={
+ [0x849]={
arabic="r",
category="lo",
description="MANDAIC LETTER AKSA",
direction="r",
linebreak="al",
- unicodeslot=0x0849,
+ unicodeslot=0x849,
},
- [0x084A]={
+ [0x84A]={
arabic="d",
category="lo",
description="MANDAIC LETTER AK",
direction="r",
linebreak="al",
- unicodeslot=0x084A,
+ unicodeslot=0x84A,
},
- [0x084B]={
+ [0x84B]={
arabic="d",
category="lo",
description="MANDAIC LETTER AL",
direction="r",
linebreak="al",
- unicodeslot=0x084B,
+ unicodeslot=0x84B,
},
- [0x084C]={
+ [0x84C]={
arabic="d",
category="lo",
description="MANDAIC LETTER AM",
direction="r",
linebreak="al",
- unicodeslot=0x084C,
+ unicodeslot=0x84C,
},
- [0x084D]={
+ [0x84D]={
arabic="d",
category="lo",
description="MANDAIC LETTER AN",
direction="r",
linebreak="al",
- unicodeslot=0x084D,
+ unicodeslot=0x84D,
},
- [0x084E]={
+ [0x84E]={
arabic="d",
category="lo",
description="MANDAIC LETTER AS",
direction="r",
linebreak="al",
- unicodeslot=0x084E,
+ unicodeslot=0x84E,
},
- [0x084F]={
+ [0x84F]={
arabic="r",
category="lo",
description="MANDAIC LETTER IN",
direction="r",
linebreak="al",
- unicodeslot=0x084F,
+ unicodeslot=0x84F,
},
- [0x0850]={
+ [0x850]={
arabic="d",
category="lo",
description="MANDAIC LETTER AP",
direction="r",
linebreak="al",
- unicodeslot=0x0850,
+ unicodeslot=0x850,
},
- [0x0851]={
+ [0x851]={
arabic="d",
category="lo",
description="MANDAIC LETTER ASZ",
direction="r",
linebreak="al",
- unicodeslot=0x0851,
+ unicodeslot=0x851,
},
- [0x0852]={
+ [0x852]={
arabic="d",
category="lo",
description="MANDAIC LETTER AQ",
direction="r",
linebreak="al",
- unicodeslot=0x0852,
+ unicodeslot=0x852,
},
- [0x0853]={
+ [0x853]={
arabic="d",
category="lo",
description="MANDAIC LETTER AR",
direction="r",
linebreak="al",
- unicodeslot=0x0853,
+ unicodeslot=0x853,
},
- [0x0854]={
+ [0x854]={
arabic="r",
category="lo",
description="MANDAIC LETTER ASH",
direction="r",
linebreak="al",
- unicodeslot=0x0854,
+ unicodeslot=0x854,
},
- [0x0855]={
+ [0x855]={
arabic="d",
category="lo",
description="MANDAIC LETTER AT",
direction="r",
linebreak="al",
- unicodeslot=0x0855,
+ unicodeslot=0x855,
},
- [0x0856]={
+ [0x856]={
arabic="u",
category="lo",
description="MANDAIC LETTER DUSHENNA",
direction="r",
linebreak="al",
- unicodeslot=0x0856,
+ unicodeslot=0x856,
},
- [0x0857]={
+ [0x857]={
arabic="u",
category="lo",
description="MANDAIC LETTER KAD",
direction="r",
linebreak="al",
- unicodeslot=0x0857,
+ unicodeslot=0x857,
},
- [0x0858]={
+ [0x858]={
arabic="u",
category="lo",
description="MANDAIC LETTER AIN",
direction="r",
linebreak="al",
- unicodeslot=0x0858,
+ unicodeslot=0x858,
},
- [0x0859]={
+ [0x859]={
category="mn",
+ combining=0xDC,
description="MANDAIC AFFRICATION MARK",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0859,
+ unicodeslot=0x859,
},
- [0x085A]={
+ [0x85A]={
category="mn",
+ combining=0xDC,
description="MANDAIC VOCALIZATION MARK",
direction="nsm",
linebreak="cm",
- unicodeslot=0x085A,
+ unicodeslot=0x85A,
},
- [0x085B]={
+ [0x85B]={
category="mn",
+ combining=0xDC,
description="MANDAIC GEMINATION MARK",
direction="nsm",
linebreak="cm",
- unicodeslot=0x085B,
+ unicodeslot=0x85B,
},
- [0x085E]={
+ [0x85E]={
category="po",
description="MANDAIC PUNCTUATION",
direction="r",
linebreak="al",
- unicodeslot=0x085E,
+ unicodeslot=0x85E,
},
- [0x08A0]={
+ [0x8A0]={
arabic="d",
category="lo",
description="ARABIC LETTER BEH WITH SMALL V BELOW",
direction="al",
linebreak="al",
- unicodeslot=0x08A0,
+ unicodeslot=0x8A0,
+ },
+ [0x8A1]={
+ arabic="d",
+ category="lo",
+ description="ARABIC LETTER BEH WITH HAMZA ABOVE",
+ direction="al",
+ linebreak="al",
+ unicodeslot=0x8A1,
},
- [0x08A2]={
+ [0x8A2]={
arabic="d",
category="lo",
description="ARABIC LETTER JEEM WITH TWO DOTS ABOVE",
direction="al",
linebreak="al",
- unicodeslot=0x08A2,
+ unicodeslot=0x8A2,
},
- [0x08A3]={
+ [0x8A3]={
arabic="d",
category="lo",
description="ARABIC LETTER TAH WITH TWO DOTS ABOVE",
direction="al",
linebreak="al",
- unicodeslot=0x08A3,
+ unicodeslot=0x8A3,
},
- [0x08A4]={
+ [0x8A4]={
arabic="d",
category="lo",
description="ARABIC LETTER FEH WITH DOT BELOW AND THREE DOTS ABOVE",
direction="al",
linebreak="al",
- unicodeslot=0x08A4,
+ unicodeslot=0x8A4,
},
- [0x08A5]={
+ [0x8A5]={
arabic="d",
category="lo",
description="ARABIC LETTER QAF WITH DOT BELOW",
direction="al",
linebreak="al",
- unicodeslot=0x08A5,
+ unicodeslot=0x8A5,
},
- [0x08A6]={
+ [0x8A6]={
arabic="d",
category="lo",
description="ARABIC LETTER LAM WITH DOUBLE BAR",
direction="al",
linebreak="al",
- unicodeslot=0x08A6,
+ unicodeslot=0x8A6,
},
- [0x08A7]={
+ [0x8A7]={
arabic="d",
category="lo",
description="ARABIC LETTER MEEM WITH THREE DOTS ABOVE",
direction="al",
linebreak="al",
- unicodeslot=0x08A7,
+ unicodeslot=0x8A7,
},
- [0x08A8]={
+ [0x8A8]={
arabic="d",
category="lo",
description="ARABIC LETTER YEH WITH TWO DOTS BELOW AND HAMZA ABOVE",
direction="al",
linebreak="al",
- unicodeslot=0x08A8,
+ unicodeslot=0x8A8,
},
- [0x08A9]={
+ [0x8A9]={
arabic="d",
category="lo",
description="ARABIC LETTER YEH WITH TWO DOTS BELOW AND DOT ABOVE",
direction="al",
linebreak="al",
- unicodeslot=0x08A9,
+ unicodeslot=0x8A9,
},
- [0x08AA]={
+ [0x8AA]={
arabic="r",
category="lo",
description="ARABIC LETTER REH WITH LOOP",
direction="al",
linebreak="al",
- unicodeslot=0x08AA,
+ unicodeslot=0x8AA,
},
- [0x08AB]={
+ [0x8AB]={
arabic="r",
category="lo",
description="ARABIC LETTER WAW WITH DOT WITHIN",
direction="al",
linebreak="al",
- unicodeslot=0x08AB,
+ unicodeslot=0x8AB,
},
- [0x08AC]={
+ [0x8AC]={
arabic="r",
category="lo",
description="ARABIC LETTER ROHINGYA YEH",
direction="al",
linebreak="al",
- unicodeslot=0x08AC,
+ unicodeslot=0x8AC,
+ },
+ [0x8AD]={
+ arabic="u",
+ category="lo",
+ description="ARABIC LETTER LOW ALEF",
+ direction="al",
+ linebreak="al",
+ unicodeslot=0x8AD,
},
- [0x08E4]={
+ [0x8AE]={
+ arabic="r",
+ category="lo",
+ description="ARABIC LETTER DAL WITH THREE DOTS BELOW",
+ direction="al",
+ linebreak="al",
+ unicodeslot=0x8AE,
+ },
+ [0x8AF]={
+ arabic="d",
+ category="lo",
+ description="ARABIC LETTER SAD WITH THREE DOTS BELOW",
+ direction="al",
+ linebreak="al",
+ unicodeslot=0x8AF,
+ },
+ [0x8B0]={
+ arabic="d",
+ category="lo",
+ description="ARABIC LETTER GAF WITH INVERTED STROKE",
+ direction="al",
+ linebreak="al",
+ unicodeslot=0x8B0,
+ },
+ [0x8B1]={
+ arabic="r",
+ category="lo",
+ description="ARABIC LETTER STRAIGHT WAW",
+ direction="al",
+ linebreak="al",
+ unicodeslot=0x8B1,
+ },
+ [0x8B2]={
+ arabic="r",
+ category="lo",
+ description="ARABIC LETTER ZAIN WITH INVERTED V ABOVE",
+ direction="al",
+ linebreak="al",
+ unicodeslot=0x8B2,
+ },
+ [0x8E4]={
category="mn",
+ combining=0xE6,
description="ARABIC CURLY FATHA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08E4,
+ unicodeslot=0x8E4,
},
- [0x08E5]={
+ [0x8E5]={
category="mn",
+ combining=0xE6,
description="ARABIC CURLY DAMMA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08E5,
+ unicodeslot=0x8E5,
},
- [0x08E6]={
+ [0x8E6]={
category="mn",
+ combining=0xDC,
description="ARABIC CURLY KASRA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08E6,
+ unicodeslot=0x8E6,
},
- [0x08E7]={
+ [0x8E7]={
category="mn",
+ combining=0xE6,
description="ARABIC CURLY FATHATAN",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08E7,
+ unicodeslot=0x8E7,
},
- [0x08E8]={
+ [0x8E8]={
category="mn",
+ combining=0xE6,
description="ARABIC CURLY DAMMATAN",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08E8,
+ unicodeslot=0x8E8,
},
- [0x08E9]={
+ [0x8E9]={
category="mn",
+ combining=0xDC,
description="ARABIC CURLY KASRATAN",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08E9,
+ unicodeslot=0x8E9,
},
- [0x08EA]={
+ [0x8EA]={
category="mn",
+ combining=0xE6,
description="ARABIC TONE ONE DOT ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08EA,
+ unicodeslot=0x8EA,
},
- [0x08EB]={
+ [0x8EB]={
category="mn",
+ combining=0xE6,
description="ARABIC TONE TWO DOTS ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08EB,
+ unicodeslot=0x8EB,
},
- [0x08EC]={
+ [0x8EC]={
category="mn",
+ combining=0xE6,
description="ARABIC TONE LOOP ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08EC,
+ unicodeslot=0x8EC,
},
- [0x08ED]={
+ [0x8ED]={
category="mn",
+ combining=0xDC,
description="ARABIC TONE ONE DOT BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08ED,
+ unicodeslot=0x8ED,
},
- [0x08EE]={
+ [0x8EE]={
category="mn",
+ combining=0xDC,
description="ARABIC TONE TWO DOTS BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08EE,
+ unicodeslot=0x8EE,
},
- [0x08EF]={
+ [0x8EF]={
category="mn",
+ combining=0xDC,
description="ARABIC TONE LOOP BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08EF,
+ unicodeslot=0x8EF,
},
- [0x08F0]={
+ [0x8F0]={
category="mn",
+ combining=0x1B,
description="ARABIC OPEN FATHATAN",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08F0,
+ unicodeslot=0x8F0,
},
- [0x08F1]={
+ [0x8F1]={
category="mn",
+ combining=0x1C,
description="ARABIC OPEN DAMMATAN",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08F1,
+ unicodeslot=0x8F1,
},
- [0x08F2]={
+ [0x8F2]={
category="mn",
+ combining=0x1D,
description="ARABIC OPEN KASRATAN",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08F2,
+ unicodeslot=0x8F2,
},
- [0x08F3]={
+ [0x8F3]={
category="mn",
+ combining=0xE6,
description="ARABIC SMALL HIGH WAW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08F3,
+ unicodeslot=0x8F3,
},
- [0x08F4]={
+ [0x8F4]={
category="mn",
+ combining=0xE6,
description="ARABIC FATHA WITH RING",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08F4,
+ unicodeslot=0x8F4,
},
- [0x08F5]={
+ [0x8F5]={
category="mn",
+ combining=0xE6,
description="ARABIC FATHA WITH DOT ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08F5,
+ unicodeslot=0x8F5,
},
- [0x08F6]={
+ [0x8F6]={
category="mn",
+ combining=0xDC,
description="ARABIC KASRA WITH DOT BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08F6,
+ unicodeslot=0x8F6,
},
- [0x08F7]={
+ [0x8F7]={
category="mn",
+ combining=0xE6,
description="ARABIC LEFT ARROWHEAD ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08F7,
+ unicodeslot=0x8F7,
},
- [0x08F8]={
+ [0x8F8]={
category="mn",
+ combining=0xE6,
description="ARABIC RIGHT ARROWHEAD ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08F8,
+ unicodeslot=0x8F8,
},
- [0x08F9]={
+ [0x8F9]={
category="mn",
+ combining=0xDC,
description="ARABIC LEFT ARROWHEAD BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08F9,
+ unicodeslot=0x8F9,
},
- [0x08FA]={
+ [0x8FA]={
category="mn",
+ combining=0xDC,
description="ARABIC RIGHT ARROWHEAD BELOW",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08FA,
+ unicodeslot=0x8FA,
},
- [0x08FB]={
+ [0x8FB]={
category="mn",
+ combining=0xE6,
description="ARABIC DOUBLE RIGHT ARROWHEAD ABOVE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08FB,
+ unicodeslot=0x8FB,
},
- [0x08FC]={
+ [0x8FC]={
category="mn",
+ combining=0xE6,
description="ARABIC DOUBLE RIGHT ARROWHEAD ABOVE WITH DOT",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08FC,
+ unicodeslot=0x8FC,
},
- [0x08FD]={
+ [0x8FD]={
category="mn",
+ combining=0xE6,
description="ARABIC RIGHT ARROWHEAD ABOVE WITH DOT",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08FD,
+ unicodeslot=0x8FD,
},
- [0x08FE]={
+ [0x8FE]={
category="mn",
+ combining=0xE6,
description="ARABIC DAMMA WITH DOT",
direction="nsm",
linebreak="cm",
- unicodeslot=0x08FE,
+ unicodeslot=0x8FE,
+ },
+ [0x8FF]={
+ category="mn",
+ combining=0xE6,
+ description="ARABIC MARK SIDEWAYS NOON GHUNNA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x8FF,
},
- [0x0900]={
+ [0x900]={
category="mn",
description="DEVANAGARI SIGN INVERTED CANDRABINDU",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0900,
+ unicodeslot=0x900,
},
- [0x0901]={
+ [0x901]={
adobename="candrabindudeva",
category="mn",
description="DEVANAGARI SIGN CANDRABINDU",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0901,
+ unicodeslot=0x901,
},
- [0x0902]={
+ [0x902]={
adobename="anusvaradeva",
category="mn",
description="DEVANAGARI SIGN ANUSVARA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0902,
+ unicodeslot=0x902,
},
- [0x0903]={
+ [0x903]={
adobename="visargadeva",
category="mc",
description="DEVANAGARI SIGN VISARGA",
direction="l",
linebreak="cm",
- unicodeslot=0x0903,
+ unicodeslot=0x903,
},
- [0x0904]={
+ [0x904]={
category="lo",
description="DEVANAGARI LETTER SHORT A",
direction="l",
linebreak="al",
- unicodeslot=0x0904,
+ unicodeslot=0x904,
},
- [0x0905]={
+ [0x905]={
adobename="adeva",
category="lo",
description="DEVANAGARI LETTER A",
direction="l",
linebreak="al",
- unicodeslot=0x0905,
+ unicodeslot=0x905,
},
- [0x0906]={
+ [0x906]={
adobename="aadeva",
category="lo",
description="DEVANAGARI LETTER AA",
direction="l",
linebreak="al",
- unicodeslot=0x0906,
+ unicodeslot=0x906,
},
- [0x0907]={
+ [0x907]={
adobename="ideva",
category="lo",
description="DEVANAGARI LETTER I",
direction="l",
linebreak="al",
- unicodeslot=0x0907,
+ unicodeslot=0x907,
},
- [0x0908]={
+ [0x908]={
adobename="iideva",
category="lo",
description="DEVANAGARI LETTER II",
direction="l",
linebreak="al",
- unicodeslot=0x0908,
+ unicodeslot=0x908,
},
- [0x0909]={
+ [0x909]={
adobename="udeva",
category="lo",
description="DEVANAGARI LETTER U",
direction="l",
linebreak="al",
- unicodeslot=0x0909,
+ unicodeslot=0x909,
},
- [0x090A]={
+ [0x90A]={
adobename="uudeva",
category="lo",
description="DEVANAGARI LETTER UU",
direction="l",
linebreak="al",
- unicodeslot=0x090A,
+ unicodeslot=0x90A,
},
- [0x090B]={
+ [0x90B]={
adobename="rvocalicdeva",
category="lo",
description="DEVANAGARI LETTER VOCALIC R",
direction="l",
linebreak="al",
- unicodeslot=0x090B,
+ unicodeslot=0x90B,
},
- [0x090C]={
+ [0x90C]={
adobename="lvocalicdeva",
category="lo",
description="DEVANAGARI LETTER VOCALIC L",
direction="l",
linebreak="al",
- unicodeslot=0x090C,
+ unicodeslot=0x90C,
},
- [0x090D]={
+ [0x90D]={
adobename="ecandradeva",
category="lo",
description="DEVANAGARI LETTER CANDRA E",
direction="l",
linebreak="al",
- unicodeslot=0x090D,
+ unicodeslot=0x90D,
},
- [0x090E]={
+ [0x90E]={
adobename="eshortdeva",
category="lo",
description="DEVANAGARI LETTER SHORT E",
direction="l",
linebreak="al",
- unicodeslot=0x090E,
+ unicodeslot=0x90E,
},
- [0x090F]={
+ [0x90F]={
adobename="edeva",
category="lo",
description="DEVANAGARI LETTER E",
direction="l",
linebreak="al",
- unicodeslot=0x090F,
+ unicodeslot=0x90F,
},
- [0x0910]={
+ [0x910]={
adobename="aideva",
category="lo",
description="DEVANAGARI LETTER AI",
direction="l",
linebreak="al",
- unicodeslot=0x0910,
+ unicodeslot=0x910,
},
- [0x0911]={
+ [0x911]={
adobename="ocandradeva",
category="lo",
description="DEVANAGARI LETTER CANDRA O",
direction="l",
linebreak="al",
- unicodeslot=0x0911,
+ unicodeslot=0x911,
},
- [0x0912]={
+ [0x912]={
adobename="oshortdeva",
category="lo",
description="DEVANAGARI LETTER SHORT O",
direction="l",
linebreak="al",
- unicodeslot=0x0912,
+ unicodeslot=0x912,
},
- [0x0913]={
+ [0x913]={
adobename="odeva",
category="lo",
description="DEVANAGARI LETTER O",
direction="l",
linebreak="al",
- unicodeslot=0x0913,
+ unicodeslot=0x913,
},
- [0x0914]={
+ [0x914]={
adobename="audeva",
category="lo",
description="DEVANAGARI LETTER AU",
direction="l",
linebreak="al",
- unicodeslot=0x0914,
+ unicodeslot=0x914,
},
- [0x0915]={
+ [0x915]={
adobename="kadeva",
category="lo",
description="DEVANAGARI LETTER KA",
direction="l",
linebreak="al",
- unicodeslot=0x0915,
+ unicodeslot=0x915,
},
- [0x0916]={
+ [0x916]={
adobename="khadeva",
category="lo",
description="DEVANAGARI LETTER KHA",
direction="l",
linebreak="al",
- unicodeslot=0x0916,
+ unicodeslot=0x916,
},
- [0x0917]={
+ [0x917]={
adobename="gadeva",
category="lo",
description="DEVANAGARI LETTER GA",
direction="l",
linebreak="al",
- unicodeslot=0x0917,
+ unicodeslot=0x917,
},
- [0x0918]={
+ [0x918]={
adobename="ghadeva",
category="lo",
description="DEVANAGARI LETTER GHA",
direction="l",
linebreak="al",
- unicodeslot=0x0918,
+ unicodeslot=0x918,
},
- [0x0919]={
+ [0x919]={
adobename="ngadeva",
category="lo",
description="DEVANAGARI LETTER NGA",
direction="l",
linebreak="al",
- unicodeslot=0x0919,
+ unicodeslot=0x919,
},
- [0x091A]={
+ [0x91A]={
adobename="cadeva",
category="lo",
description="DEVANAGARI LETTER CA",
direction="l",
linebreak="al",
- unicodeslot=0x091A,
+ unicodeslot=0x91A,
},
- [0x091B]={
+ [0x91B]={
adobename="chadeva",
category="lo",
description="DEVANAGARI LETTER CHA",
direction="l",
linebreak="al",
- unicodeslot=0x091B,
+ unicodeslot=0x91B,
},
- [0x091C]={
+ [0x91C]={
adobename="jadeva",
category="lo",
description="DEVANAGARI LETTER JA",
direction="l",
linebreak="al",
- unicodeslot=0x091C,
+ unicodeslot=0x91C,
},
- [0x091D]={
+ [0x91D]={
adobename="jhadeva",
category="lo",
description="DEVANAGARI LETTER JHA",
direction="l",
linebreak="al",
- unicodeslot=0x091D,
+ unicodeslot=0x91D,
},
- [0x091E]={
+ [0x91E]={
adobename="nyadeva",
category="lo",
description="DEVANAGARI LETTER NYA",
direction="l",
linebreak="al",
- unicodeslot=0x091E,
+ unicodeslot=0x91E,
},
- [0x091F]={
+ [0x91F]={
adobename="ttadeva",
category="lo",
description="DEVANAGARI LETTER TTA",
direction="l",
linebreak="al",
- unicodeslot=0x091F,
+ unicodeslot=0x91F,
},
- [0x0920]={
+ [0x920]={
adobename="tthadeva",
category="lo",
description="DEVANAGARI LETTER TTHA",
direction="l",
linebreak="al",
- unicodeslot=0x0920,
+ unicodeslot=0x920,
},
- [0x0921]={
+ [0x921]={
adobename="ddadeva",
category="lo",
description="DEVANAGARI LETTER DDA",
direction="l",
linebreak="al",
- unicodeslot=0x0921,
+ unicodeslot=0x921,
},
- [0x0922]={
+ [0x922]={
adobename="ddhadeva",
category="lo",
description="DEVANAGARI LETTER DDHA",
direction="l",
linebreak="al",
- unicodeslot=0x0922,
+ unicodeslot=0x922,
},
- [0x0923]={
+ [0x923]={
adobename="nnadeva",
category="lo",
description="DEVANAGARI LETTER NNA",
direction="l",
linebreak="al",
- unicodeslot=0x0923,
+ unicodeslot=0x923,
},
- [0x0924]={
+ [0x924]={
adobename="tadeva",
category="lo",
description="DEVANAGARI LETTER TA",
direction="l",
linebreak="al",
- unicodeslot=0x0924,
+ unicodeslot=0x924,
},
- [0x0925]={
+ [0x925]={
adobename="thadeva",
category="lo",
description="DEVANAGARI LETTER THA",
direction="l",
linebreak="al",
- unicodeslot=0x0925,
+ unicodeslot=0x925,
},
- [0x0926]={
+ [0x926]={
adobename="dadeva",
category="lo",
description="DEVANAGARI LETTER DA",
direction="l",
linebreak="al",
- unicodeslot=0x0926,
+ unicodeslot=0x926,
},
- [0x0927]={
+ [0x927]={
adobename="dhadeva",
category="lo",
description="DEVANAGARI LETTER DHA",
direction="l",
linebreak="al",
- unicodeslot=0x0927,
+ unicodeslot=0x927,
},
- [0x0928]={
+ [0x928]={
adobename="nadeva",
category="lo",
description="DEVANAGARI LETTER NA",
direction="l",
linebreak="al",
- unicodeslot=0x0928,
+ unicodeslot=0x928,
},
- [0x0929]={
+ [0x929]={
adobename="nnnadeva",
category="lo",
description="DEVANAGARI LETTER NNNA",
direction="l",
linebreak="al",
- specials={ "char", 0x0928, 0x093C },
- unicodeslot=0x0929,
+ specials={ "char", 0x928, 0x93C },
+ unicodeslot=0x929,
},
- [0x092A]={
+ [0x92A]={
adobename="padeva",
category="lo",
description="DEVANAGARI LETTER PA",
direction="l",
linebreak="al",
- unicodeslot=0x092A,
+ unicodeslot=0x92A,
},
- [0x092B]={
+ [0x92B]={
adobename="phadeva",
category="lo",
description="DEVANAGARI LETTER PHA",
direction="l",
linebreak="al",
- unicodeslot=0x092B,
+ unicodeslot=0x92B,
},
- [0x092C]={
+ [0x92C]={
adobename="badeva",
category="lo",
description="DEVANAGARI LETTER BA",
direction="l",
linebreak="al",
- unicodeslot=0x092C,
+ unicodeslot=0x92C,
},
- [0x092D]={
+ [0x92D]={
adobename="bhadeva",
category="lo",
description="DEVANAGARI LETTER BHA",
direction="l",
linebreak="al",
- unicodeslot=0x092D,
+ unicodeslot=0x92D,
},
- [0x092E]={
+ [0x92E]={
adobename="madeva",
category="lo",
description="DEVANAGARI LETTER MA",
direction="l",
linebreak="al",
- unicodeslot=0x092E,
+ unicodeslot=0x92E,
},
- [0x092F]={
+ [0x92F]={
adobename="yadeva",
category="lo",
description="DEVANAGARI LETTER YA",
direction="l",
linebreak="al",
- unicodeslot=0x092F,
+ unicodeslot=0x92F,
},
- [0x0930]={
+ [0x930]={
adobename="radeva",
category="lo",
description="DEVANAGARI LETTER RA",
direction="l",
linebreak="al",
- unicodeslot=0x0930,
+ unicodeslot=0x930,
},
- [0x0931]={
+ [0x931]={
adobename="rradeva",
category="lo",
description="DEVANAGARI LETTER RRA",
direction="l",
linebreak="al",
- specials={ "char", 0x0930, 0x093C },
- unicodeslot=0x0931,
+ specials={ "char", 0x930, 0x93C },
+ unicodeslot=0x931,
},
- [0x0932]={
+ [0x932]={
adobename="ladeva",
category="lo",
description="DEVANAGARI LETTER LA",
direction="l",
linebreak="al",
- unicodeslot=0x0932,
+ unicodeslot=0x932,
},
- [0x0933]={
+ [0x933]={
adobename="lladeva",
category="lo",
description="DEVANAGARI LETTER LLA",
direction="l",
linebreak="al",
- unicodeslot=0x0933,
+ unicodeslot=0x933,
},
- [0x0934]={
+ [0x934]={
adobename="llladeva",
category="lo",
description="DEVANAGARI LETTER LLLA",
direction="l",
linebreak="al",
- specials={ "char", 0x0933, 0x093C },
- unicodeslot=0x0934,
+ specials={ "char", 0x933, 0x93C },
+ unicodeslot=0x934,
},
- [0x0935]={
+ [0x935]={
adobename="vadeva",
category="lo",
description="DEVANAGARI LETTER VA",
direction="l",
linebreak="al",
- unicodeslot=0x0935,
+ unicodeslot=0x935,
},
- [0x0936]={
+ [0x936]={
adobename="shadeva",
category="lo",
description="DEVANAGARI LETTER SHA",
direction="l",
linebreak="al",
- unicodeslot=0x0936,
+ unicodeslot=0x936,
},
- [0x0937]={
+ [0x937]={
adobename="ssadeva",
category="lo",
description="DEVANAGARI LETTER SSA",
direction="l",
linebreak="al",
- unicodeslot=0x0937,
+ unicodeslot=0x937,
},
- [0x0938]={
+ [0x938]={
adobename="sadeva",
category="lo",
description="DEVANAGARI LETTER SA",
direction="l",
linebreak="al",
- unicodeslot=0x0938,
+ unicodeslot=0x938,
},
- [0x0939]={
+ [0x939]={
adobename="hadeva",
category="lo",
description="DEVANAGARI LETTER HA",
direction="l",
linebreak="al",
- unicodeslot=0x0939,
+ unicodeslot=0x939,
},
- [0x093A]={
+ [0x93A]={
category="mn",
description="DEVANAGARI VOWEL SIGN OE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x093A,
+ unicodeslot=0x93A,
},
- [0x093B]={
+ [0x93B]={
category="mc",
description="DEVANAGARI VOWEL SIGN OOE",
direction="l",
linebreak="cm",
- unicodeslot=0x093B,
+ unicodeslot=0x93B,
},
- [0x093C]={
+ [0x93C]={
adobename="nuktadeva",
category="mn",
+ combining=0x7,
description="DEVANAGARI SIGN NUKTA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x093C,
+ unicodeslot=0x93C,
},
- [0x093D]={
+ [0x93D]={
adobename="avagrahadeva",
category="lo",
description="DEVANAGARI SIGN AVAGRAHA",
direction="l",
linebreak="al",
- unicodeslot=0x093D,
+ unicodeslot=0x93D,
},
- [0x093E]={
+ [0x93E]={
adobename="aavowelsigndeva",
category="mc",
description="DEVANAGARI VOWEL SIGN AA",
direction="l",
linebreak="cm",
- unicodeslot=0x093E,
+ unicodeslot=0x93E,
},
- [0x093F]={
+ [0x93F]={
adobename="ivowelsigndeva",
category="mc",
description="DEVANAGARI VOWEL SIGN I",
direction="l",
linebreak="cm",
- unicodeslot=0x093F,
+ unicodeslot=0x93F,
},
- [0x0940]={
+ [0x940]={
adobename="iivowelsigndeva",
category="mc",
description="DEVANAGARI VOWEL SIGN II",
direction="l",
linebreak="cm",
- unicodeslot=0x0940,
+ unicodeslot=0x940,
},
- [0x0941]={
+ [0x941]={
adobename="uvowelsigndeva",
category="mn",
description="DEVANAGARI VOWEL SIGN U",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0941,
+ unicodeslot=0x941,
},
- [0x0942]={
+ [0x942]={
adobename="uuvowelsigndeva",
category="mn",
description="DEVANAGARI VOWEL SIGN UU",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0942,
+ unicodeslot=0x942,
},
- [0x0943]={
+ [0x943]={
adobename="rvocalicvowelsigndeva",
category="mn",
description="DEVANAGARI VOWEL SIGN VOCALIC R",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0943,
+ unicodeslot=0x943,
},
- [0x0944]={
+ [0x944]={
adobename="rrvocalicvowelsigndeva",
category="mn",
description="DEVANAGARI VOWEL SIGN VOCALIC RR",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0944,
+ unicodeslot=0x944,
},
- [0x0945]={
+ [0x945]={
adobename="ecandravowelsigndeva",
category="mn",
description="DEVANAGARI VOWEL SIGN CANDRA E",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0945,
+ unicodeslot=0x945,
},
- [0x0946]={
+ [0x946]={
adobename="eshortvowelsigndeva",
category="mn",
description="DEVANAGARI VOWEL SIGN SHORT E",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0946,
+ unicodeslot=0x946,
},
- [0x0947]={
+ [0x947]={
adobename="evowelsigndeva",
category="mn",
description="DEVANAGARI VOWEL SIGN E",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0947,
+ unicodeslot=0x947,
},
- [0x0948]={
+ [0x948]={
adobename="aivowelsigndeva",
category="mn",
description="DEVANAGARI VOWEL SIGN AI",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0948,
+ unicodeslot=0x948,
},
- [0x0949]={
+ [0x949]={
adobename="ocandravowelsigndeva",
category="mc",
description="DEVANAGARI VOWEL SIGN CANDRA O",
direction="l",
linebreak="cm",
- unicodeslot=0x0949,
+ unicodeslot=0x949,
},
- [0x094A]={
+ [0x94A]={
adobename="oshortvowelsigndeva",
category="mc",
description="DEVANAGARI VOWEL SIGN SHORT O",
direction="l",
linebreak="cm",
- unicodeslot=0x094A,
+ unicodeslot=0x94A,
},
- [0x094B]={
+ [0x94B]={
adobename="ovowelsigndeva",
category="mc",
description="DEVANAGARI VOWEL SIGN O",
direction="l",
linebreak="cm",
- unicodeslot=0x094B,
+ unicodeslot=0x94B,
},
- [0x094C]={
+ [0x94C]={
adobename="auvowelsigndeva",
category="mc",
description="DEVANAGARI VOWEL SIGN AU",
direction="l",
linebreak="cm",
- unicodeslot=0x094C,
+ unicodeslot=0x94C,
},
- [0x094D]={
+ [0x94D]={
adobename="viramadeva",
category="mn",
+ combining=0x9,
description="DEVANAGARI SIGN VIRAMA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x094D,
+ unicodeslot=0x94D,
},
- [0x094E]={
+ [0x94E]={
category="mc",
description="DEVANAGARI VOWEL SIGN PRISHTHAMATRA E",
direction="l",
linebreak="cm",
- unicodeslot=0x094E,
+ unicodeslot=0x94E,
},
- [0x094F]={
+ [0x94F]={
category="mc",
description="DEVANAGARI VOWEL SIGN AW",
direction="l",
linebreak="cm",
- unicodeslot=0x094F,
+ unicodeslot=0x94F,
},
- [0x0950]={
+ [0x950]={
adobename="omdeva",
category="lo",
description="DEVANAGARI OM",
direction="l",
linebreak="al",
- unicodeslot=0x0950,
+ unicodeslot=0x950,
},
- [0x0951]={
+ [0x951]={
adobename="udattadeva",
category="mn",
+ combining=0xE6,
description="DEVANAGARI STRESS SIGN UDATTA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0951,
+ unicodeslot=0x951,
},
- [0x0952]={
+ [0x952]={
adobename="anudattadeva",
category="mn",
+ combining=0xDC,
description="DEVANAGARI STRESS SIGN ANUDATTA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0952,
+ unicodeslot=0x952,
},
- [0x0953]={
+ [0x953]={
adobename="gravedeva",
category="mn",
+ combining=0xE6,
description="DEVANAGARI GRAVE ACCENT",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0953,
+ unicodeslot=0x953,
},
- [0x0954]={
+ [0x954]={
adobename="acutedeva",
category="mn",
+ combining=0xE6,
description="DEVANAGARI ACUTE ACCENT",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0954,
+ unicodeslot=0x954,
},
- [0x0955]={
+ [0x955]={
category="mn",
description="DEVANAGARI VOWEL SIGN CANDRA LONG E",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0955,
+ unicodeslot=0x955,
},
- [0x0956]={
+ [0x956]={
category="mn",
description="DEVANAGARI VOWEL SIGN UE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0956,
+ unicodeslot=0x956,
},
- [0x0957]={
+ [0x957]={
category="mn",
description="DEVANAGARI VOWEL SIGN UUE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0957,
+ unicodeslot=0x957,
},
- [0x0958]={
+ [0x958]={
adobename="qadeva",
category="lo",
description="DEVANAGARI LETTER QA",
direction="l",
linebreak="al",
- specials={ "char", 0x0915, 0x093C },
- unicodeslot=0x0958,
+ specials={ "char", 0x915, 0x93C },
+ unicodeslot=0x958,
},
- [0x0959]={
+ [0x959]={
adobename="khhadeva",
category="lo",
description="DEVANAGARI LETTER KHHA",
direction="l",
linebreak="al",
- specials={ "char", 0x0916, 0x093C },
- unicodeslot=0x0959,
+ specials={ "char", 0x916, 0x93C },
+ unicodeslot=0x959,
},
- [0x095A]={
+ [0x95A]={
adobename="ghhadeva",
category="lo",
description="DEVANAGARI LETTER GHHA",
direction="l",
linebreak="al",
- specials={ "char", 0x0917, 0x093C },
- unicodeslot=0x095A,
+ specials={ "char", 0x917, 0x93C },
+ unicodeslot=0x95A,
},
- [0x095B]={
+ [0x95B]={
adobename="zadeva",
category="lo",
description="DEVANAGARI LETTER ZA",
direction="l",
linebreak="al",
- specials={ "char", 0x091C, 0x093C },
- unicodeslot=0x095B,
+ specials={ "char", 0x91C, 0x93C },
+ unicodeslot=0x95B,
},
- [0x095C]={
+ [0x95C]={
adobename="dddhadeva",
category="lo",
description="DEVANAGARI LETTER DDDHA",
direction="l",
linebreak="al",
- specials={ "char", 0x0921, 0x093C },
- unicodeslot=0x095C,
+ specials={ "char", 0x921, 0x93C },
+ unicodeslot=0x95C,
},
- [0x095D]={
+ [0x95D]={
adobename="rhadeva",
category="lo",
description="DEVANAGARI LETTER RHA",
direction="l",
linebreak="al",
- specials={ "char", 0x0922, 0x093C },
- unicodeslot=0x095D,
+ specials={ "char", 0x922, 0x93C },
+ unicodeslot=0x95D,
},
- [0x095E]={
+ [0x95E]={
adobename="fadeva",
category="lo",
description="DEVANAGARI LETTER FA",
direction="l",
linebreak="al",
- specials={ "char", 0x092B, 0x093C },
- unicodeslot=0x095E,
+ specials={ "char", 0x92B, 0x93C },
+ unicodeslot=0x95E,
},
- [0x095F]={
+ [0x95F]={
adobename="yyadeva",
category="lo",
description="DEVANAGARI LETTER YYA",
direction="l",
linebreak="al",
- specials={ "char", 0x092F, 0x093C },
- unicodeslot=0x095F,
+ specials={ "char", 0x92F, 0x93C },
+ unicodeslot=0x95F,
},
- [0x0960]={
+ [0x960]={
adobename="rrvocalicdeva",
category="lo",
description="DEVANAGARI LETTER VOCALIC RR",
direction="l",
linebreak="al",
- unicodeslot=0x0960,
+ unicodeslot=0x960,
},
- [0x0961]={
+ [0x961]={
adobename="llvocalicdeva",
category="lo",
description="DEVANAGARI LETTER VOCALIC LL",
direction="l",
linebreak="al",
- unicodeslot=0x0961,
+ unicodeslot=0x961,
},
- [0x0962]={
+ [0x962]={
adobename="lvocalicvowelsigndeva",
category="mn",
description="DEVANAGARI VOWEL SIGN VOCALIC L",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0962,
+ unicodeslot=0x962,
},
- [0x0963]={
+ [0x963]={
adobename="llvocalicvowelsigndeva",
category="mn",
description="DEVANAGARI VOWEL SIGN VOCALIC LL",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0963,
+ unicodeslot=0x963,
},
- [0x0964]={
+ [0x964]={
adobename="danda",
category="po",
description="DEVANAGARI DANDA",
direction="l",
linebreak="ba",
- unicodeslot=0x0964,
+ unicodeslot=0x964,
},
- [0x0965]={
+ [0x965]={
adobename="dbldanda",
category="po",
description="DEVANAGARI DOUBLE DANDA",
direction="l",
linebreak="ba",
- unicodeslot=0x0965,
+ unicodeslot=0x965,
},
- [0x0966]={
+ [0x966]={
adobename="zerodeva",
category="nd",
description="DEVANAGARI DIGIT ZERO",
direction="l",
linebreak="nu",
- unicodeslot=0x0966,
+ unicodeslot=0x966,
},
- [0x0967]={
+ [0x967]={
adobename="onedeva",
category="nd",
description="DEVANAGARI DIGIT ONE",
direction="l",
linebreak="nu",
- unicodeslot=0x0967,
+ unicodeslot=0x967,
},
- [0x0968]={
+ [0x968]={
adobename="twodeva",
category="nd",
description="DEVANAGARI DIGIT TWO",
direction="l",
linebreak="nu",
- unicodeslot=0x0968,
+ unicodeslot=0x968,
},
- [0x0969]={
+ [0x969]={
adobename="threedeva",
category="nd",
description="DEVANAGARI DIGIT THREE",
direction="l",
linebreak="nu",
- unicodeslot=0x0969,
+ unicodeslot=0x969,
},
- [0x096A]={
+ [0x96A]={
adobename="fourdeva",
category="nd",
description="DEVANAGARI DIGIT FOUR",
direction="l",
linebreak="nu",
- unicodeslot=0x096A,
+ unicodeslot=0x96A,
},
- [0x096B]={
+ [0x96B]={
adobename="fivedeva",
category="nd",
description="DEVANAGARI DIGIT FIVE",
direction="l",
linebreak="nu",
- unicodeslot=0x096B,
+ unicodeslot=0x96B,
},
- [0x096C]={
+ [0x96C]={
adobename="sixdeva",
category="nd",
description="DEVANAGARI DIGIT SIX",
direction="l",
linebreak="nu",
- unicodeslot=0x096C,
+ unicodeslot=0x96C,
},
- [0x096D]={
+ [0x96D]={
adobename="sevendeva",
category="nd",
description="DEVANAGARI DIGIT SEVEN",
direction="l",
linebreak="nu",
- unicodeslot=0x096D,
+ unicodeslot=0x96D,
},
- [0x096E]={
+ [0x96E]={
adobename="eightdeva",
category="nd",
description="DEVANAGARI DIGIT EIGHT",
direction="l",
linebreak="nu",
- unicodeslot=0x096E,
+ unicodeslot=0x96E,
},
- [0x096F]={
+ [0x96F]={
adobename="ninedeva",
category="nd",
description="DEVANAGARI DIGIT NINE",
direction="l",
linebreak="nu",
- unicodeslot=0x096F,
+ unicodeslot=0x96F,
},
- [0x0970]={
+ [0x970]={
adobename="abbreviationsigndeva",
category="po",
description="DEVANAGARI ABBREVIATION SIGN",
direction="l",
linebreak="al",
- unicodeslot=0x0970,
+ unicodeslot=0x970,
},
- [0x0971]={
+ [0x971]={
category="lm",
description="DEVANAGARI SIGN HIGH SPACING DOT",
direction="l",
linebreak="al",
- unicodeslot=0x0971,
+ unicodeslot=0x971,
},
- [0x0972]={
+ [0x972]={
category="lo",
description="DEVANAGARI LETTER CANDRA A",
direction="l",
linebreak="al",
- unicodeslot=0x0972,
+ unicodeslot=0x972,
},
- [0x0973]={
+ [0x973]={
category="lo",
description="DEVANAGARI LETTER OE",
direction="l",
linebreak="al",
- unicodeslot=0x0973,
+ unicodeslot=0x973,
},
- [0x0974]={
+ [0x974]={
category="lo",
description="DEVANAGARI LETTER OOE",
direction="l",
linebreak="al",
- unicodeslot=0x0974,
+ unicodeslot=0x974,
},
- [0x0975]={
+ [0x975]={
category="lo",
description="DEVANAGARI LETTER AW",
direction="l",
linebreak="al",
- unicodeslot=0x0975,
+ unicodeslot=0x975,
},
- [0x0976]={
+ [0x976]={
category="lo",
description="DEVANAGARI LETTER UE",
direction="l",
linebreak="al",
- unicodeslot=0x0976,
+ unicodeslot=0x976,
},
- [0x0977]={
+ [0x977]={
category="lo",
description="DEVANAGARI LETTER UUE",
direction="l",
linebreak="al",
- unicodeslot=0x0977,
+ unicodeslot=0x977,
},
- [0x0979]={
+ [0x978]={
+ category="lo",
+ description="DEVANAGARI LETTER MARWARI DDA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x978,
+ },
+ [0x979]={
category="lo",
description="DEVANAGARI LETTER ZHA",
direction="l",
linebreak="al",
- unicodeslot=0x0979,
+ unicodeslot=0x979,
},
- [0x097A]={
+ [0x97A]={
category="lo",
description="DEVANAGARI LETTER HEAVY YA",
direction="l",
linebreak="al",
- unicodeslot=0x097A,
+ unicodeslot=0x97A,
},
- [0x097B]={
+ [0x97B]={
category="lo",
description="DEVANAGARI LETTER GGA",
direction="l",
linebreak="al",
- unicodeslot=0x097B,
+ unicodeslot=0x97B,
},
- [0x097C]={
+ [0x97C]={
category="lo",
description="DEVANAGARI LETTER JJA",
direction="l",
linebreak="al",
- unicodeslot=0x097C,
+ unicodeslot=0x97C,
},
- [0x097D]={
+ [0x97D]={
category="lo",
description="DEVANAGARI LETTER GLOTTAL STOP",
direction="l",
linebreak="al",
- unicodeslot=0x097D,
+ unicodeslot=0x97D,
},
- [0x097E]={
+ [0x97E]={
category="lo",
description="DEVANAGARI LETTER DDDA",
direction="l",
linebreak="al",
- unicodeslot=0x097E,
+ unicodeslot=0x97E,
},
- [0x097F]={
+ [0x97F]={
category="lo",
description="DEVANAGARI LETTER BBA",
direction="l",
linebreak="al",
- unicodeslot=0x097F,
+ unicodeslot=0x97F,
},
- [0x0981]={
+ [0x980]={
+ category="lo",
+ description="BENGALI ANJI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x980,
+ },
+ [0x981]={
adobename="candrabindubengali",
category="mn",
description="BENGALI SIGN CANDRABINDU",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0981,
+ unicodeslot=0x981,
},
- [0x0982]={
+ [0x982]={
adobename="anusvarabengali",
category="mc",
description="BENGALI SIGN ANUSVARA",
direction="l",
linebreak="cm",
- unicodeslot=0x0982,
+ unicodeslot=0x982,
},
- [0x0983]={
+ [0x983]={
adobename="visargabengali",
category="mc",
description="BENGALI SIGN VISARGA",
direction="l",
linebreak="cm",
- unicodeslot=0x0983,
+ unicodeslot=0x983,
},
- [0x0985]={
+ [0x985]={
adobename="abengali",
category="lo",
description="BENGALI LETTER A",
direction="l",
linebreak="al",
- unicodeslot=0x0985,
+ unicodeslot=0x985,
},
- [0x0986]={
+ [0x986]={
adobename="aabengali",
category="lo",
description="BENGALI LETTER AA",
direction="l",
linebreak="al",
- unicodeslot=0x0986,
+ unicodeslot=0x986,
},
- [0x0987]={
+ [0x987]={
adobename="ibengali",
category="lo",
description="BENGALI LETTER I",
direction="l",
linebreak="al",
- unicodeslot=0x0987,
+ unicodeslot=0x987,
},
- [0x0988]={
+ [0x988]={
adobename="iibengali",
category="lo",
description="BENGALI LETTER II",
direction="l",
linebreak="al",
- unicodeslot=0x0988,
+ unicodeslot=0x988,
},
- [0x0989]={
+ [0x989]={
adobename="ubengali",
category="lo",
description="BENGALI LETTER U",
direction="l",
linebreak="al",
- unicodeslot=0x0989,
+ unicodeslot=0x989,
},
- [0x098A]={
+ [0x98A]={
adobename="uubengali",
category="lo",
description="BENGALI LETTER UU",
direction="l",
linebreak="al",
- unicodeslot=0x098A,
+ unicodeslot=0x98A,
},
- [0x098B]={
+ [0x98B]={
adobename="rvocalicbengali",
category="lo",
description="BENGALI LETTER VOCALIC R",
direction="l",
linebreak="al",
- unicodeslot=0x098B,
+ unicodeslot=0x98B,
},
- [0x098C]={
+ [0x98C]={
adobename="lvocalicbengali",
category="lo",
description="BENGALI LETTER VOCALIC L",
direction="l",
linebreak="al",
- unicodeslot=0x098C,
+ unicodeslot=0x98C,
},
- [0x098F]={
+ [0x98F]={
adobename="ebengali",
category="lo",
description="BENGALI LETTER E",
direction="l",
linebreak="al",
- unicodeslot=0x098F,
+ unicodeslot=0x98F,
},
- [0x0990]={
+ [0x990]={
adobename="aibengali",
category="lo",
description="BENGALI LETTER AI",
direction="l",
linebreak="al",
- unicodeslot=0x0990,
+ unicodeslot=0x990,
},
- [0x0993]={
+ [0x993]={
adobename="obengali",
category="lo",
description="BENGALI LETTER O",
direction="l",
linebreak="al",
- unicodeslot=0x0993,
+ unicodeslot=0x993,
},
- [0x0994]={
+ [0x994]={
adobename="aubengali",
category="lo",
description="BENGALI LETTER AU",
direction="l",
linebreak="al",
- unicodeslot=0x0994,
+ unicodeslot=0x994,
},
- [0x0995]={
+ [0x995]={
adobename="kabengali",
category="lo",
description="BENGALI LETTER KA",
direction="l",
linebreak="al",
- unicodeslot=0x0995,
+ unicodeslot=0x995,
},
- [0x0996]={
+ [0x996]={
adobename="khabengali",
category="lo",
description="BENGALI LETTER KHA",
direction="l",
linebreak="al",
- unicodeslot=0x0996,
+ unicodeslot=0x996,
},
- [0x0997]={
+ [0x997]={
adobename="gabengali",
category="lo",
description="BENGALI LETTER GA",
direction="l",
linebreak="al",
- unicodeslot=0x0997,
+ unicodeslot=0x997,
},
- [0x0998]={
+ [0x998]={
adobename="ghabengali",
category="lo",
description="BENGALI LETTER GHA",
direction="l",
linebreak="al",
- unicodeslot=0x0998,
+ unicodeslot=0x998,
},
- [0x0999]={
+ [0x999]={
adobename="ngabengali",
category="lo",
description="BENGALI LETTER NGA",
direction="l",
linebreak="al",
- unicodeslot=0x0999,
+ unicodeslot=0x999,
},
- [0x099A]={
+ [0x99A]={
adobename="cabengali",
category="lo",
description="BENGALI LETTER CA",
direction="l",
linebreak="al",
- unicodeslot=0x099A,
+ unicodeslot=0x99A,
},
- [0x099B]={
+ [0x99B]={
adobename="chabengali",
category="lo",
description="BENGALI LETTER CHA",
direction="l",
linebreak="al",
- unicodeslot=0x099B,
+ unicodeslot=0x99B,
},
- [0x099C]={
+ [0x99C]={
adobename="jabengali",
category="lo",
description="BENGALI LETTER JA",
direction="l",
linebreak="al",
- unicodeslot=0x099C,
+ unicodeslot=0x99C,
},
- [0x099D]={
+ [0x99D]={
adobename="jhabengali",
category="lo",
description="BENGALI LETTER JHA",
direction="l",
linebreak="al",
- unicodeslot=0x099D,
+ unicodeslot=0x99D,
},
- [0x099E]={
+ [0x99E]={
adobename="nyabengali",
category="lo",
description="BENGALI LETTER NYA",
direction="l",
linebreak="al",
- unicodeslot=0x099E,
+ unicodeslot=0x99E,
},
- [0x099F]={
+ [0x99F]={
adobename="ttabengali",
category="lo",
description="BENGALI LETTER TTA",
direction="l",
linebreak="al",
- unicodeslot=0x099F,
+ unicodeslot=0x99F,
},
- [0x09A0]={
+ [0x9A0]={
adobename="tthabengali",
category="lo",
description="BENGALI LETTER TTHA",
direction="l",
linebreak="al",
- unicodeslot=0x09A0,
+ unicodeslot=0x9A0,
},
- [0x09A1]={
+ [0x9A1]={
adobename="ddabengali",
category="lo",
description="BENGALI LETTER DDA",
direction="l",
linebreak="al",
- unicodeslot=0x09A1,
+ unicodeslot=0x9A1,
},
- [0x09A2]={
+ [0x9A2]={
adobename="ddhabengali",
category="lo",
description="BENGALI LETTER DDHA",
direction="l",
linebreak="al",
- unicodeslot=0x09A2,
+ unicodeslot=0x9A2,
},
- [0x09A3]={
+ [0x9A3]={
adobename="nnabengali",
category="lo",
description="BENGALI LETTER NNA",
direction="l",
linebreak="al",
- unicodeslot=0x09A3,
+ unicodeslot=0x9A3,
},
- [0x09A4]={
+ [0x9A4]={
adobename="tabengali",
category="lo",
description="BENGALI LETTER TA",
direction="l",
linebreak="al",
- unicodeslot=0x09A4,
+ unicodeslot=0x9A4,
},
- [0x09A5]={
+ [0x9A5]={
adobename="thabengali",
category="lo",
description="BENGALI LETTER THA",
direction="l",
linebreak="al",
- unicodeslot=0x09A5,
+ unicodeslot=0x9A5,
},
- [0x09A6]={
+ [0x9A6]={
adobename="dabengali",
category="lo",
description="BENGALI LETTER DA",
direction="l",
linebreak="al",
- unicodeslot=0x09A6,
+ unicodeslot=0x9A6,
},
- [0x09A7]={
+ [0x9A7]={
adobename="dhabengali",
category="lo",
description="BENGALI LETTER DHA",
direction="l",
linebreak="al",
- unicodeslot=0x09A7,
+ unicodeslot=0x9A7,
},
- [0x09A8]={
+ [0x9A8]={
adobename="nabengali",
category="lo",
description="BENGALI LETTER NA",
direction="l",
linebreak="al",
- unicodeslot=0x09A8,
+ unicodeslot=0x9A8,
},
- [0x09AA]={
+ [0x9AA]={
adobename="pabengali",
category="lo",
description="BENGALI LETTER PA",
direction="l",
linebreak="al",
- unicodeslot=0x09AA,
+ unicodeslot=0x9AA,
},
- [0x09AB]={
+ [0x9AB]={
adobename="phabengali",
category="lo",
description="BENGALI LETTER PHA",
direction="l",
linebreak="al",
- unicodeslot=0x09AB,
+ unicodeslot=0x9AB,
},
- [0x09AC]={
+ [0x9AC]={
adobename="babengali",
category="lo",
description="BENGALI LETTER BA",
direction="l",
linebreak="al",
- unicodeslot=0x09AC,
+ unicodeslot=0x9AC,
},
- [0x09AD]={
+ [0x9AD]={
adobename="bhabengali",
category="lo",
description="BENGALI LETTER BHA",
direction="l",
linebreak="al",
- unicodeslot=0x09AD,
+ unicodeslot=0x9AD,
},
- [0x09AE]={
+ [0x9AE]={
adobename="mabengali",
category="lo",
description="BENGALI LETTER MA",
direction="l",
linebreak="al",
- unicodeslot=0x09AE,
+ unicodeslot=0x9AE,
},
- [0x09AF]={
+ [0x9AF]={
adobename="yabengali",
category="lo",
description="BENGALI LETTER YA",
direction="l",
linebreak="al",
- unicodeslot=0x09AF,
+ unicodeslot=0x9AF,
},
- [0x09B0]={
+ [0x9B0]={
adobename="rabengali",
category="lo",
description="BENGALI LETTER RA",
direction="l",
linebreak="al",
- unicodeslot=0x09B0,
+ unicodeslot=0x9B0,
},
- [0x09B2]={
+ [0x9B2]={
adobename="labengali",
category="lo",
description="BENGALI LETTER LA",
direction="l",
linebreak="al",
- unicodeslot=0x09B2,
+ unicodeslot=0x9B2,
},
- [0x09B6]={
+ [0x9B6]={
adobename="shabengali",
category="lo",
description="BENGALI LETTER SHA",
direction="l",
linebreak="al",
- unicodeslot=0x09B6,
+ unicodeslot=0x9B6,
},
- [0x09B7]={
+ [0x9B7]={
adobename="ssabengali",
category="lo",
description="BENGALI LETTER SSA",
direction="l",
linebreak="al",
- unicodeslot=0x09B7,
+ unicodeslot=0x9B7,
},
- [0x09B8]={
+ [0x9B8]={
adobename="sabengali",
category="lo",
description="BENGALI LETTER SA",
direction="l",
linebreak="al",
- unicodeslot=0x09B8,
+ unicodeslot=0x9B8,
},
- [0x09B9]={
+ [0x9B9]={
adobename="habengali",
category="lo",
description="BENGALI LETTER HA",
direction="l",
linebreak="al",
- unicodeslot=0x09B9,
+ unicodeslot=0x9B9,
},
- [0x09BC]={
+ [0x9BC]={
adobename="nuktabengali",
category="mn",
+ combining=0x7,
description="BENGALI SIGN NUKTA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x09BC,
+ unicodeslot=0x9BC,
},
- [0x09BD]={
+ [0x9BD]={
category="lo",
description="BENGALI SIGN AVAGRAHA",
direction="l",
linebreak="al",
- unicodeslot=0x09BD,
+ unicodeslot=0x9BD,
},
- [0x09BE]={
+ [0x9BE]={
adobename="aavowelsignbengali",
category="mc",
description="BENGALI VOWEL SIGN AA",
direction="l",
linebreak="cm",
- unicodeslot=0x09BE,
+ unicodeslot=0x9BE,
},
- [0x09BF]={
+ [0x9BF]={
adobename="ivowelsignbengali",
category="mc",
description="BENGALI VOWEL SIGN I",
direction="l",
linebreak="cm",
- unicodeslot=0x09BF,
+ unicodeslot=0x9BF,
},
- [0x09C0]={
+ [0x9C0]={
adobename="iivowelsignbengali",
category="mc",
description="BENGALI VOWEL SIGN II",
direction="l",
linebreak="cm",
- unicodeslot=0x09C0,
+ unicodeslot=0x9C0,
},
- [0x09C1]={
+ [0x9C1]={
adobename="uvowelsignbengali",
category="mn",
description="BENGALI VOWEL SIGN U",
direction="nsm",
linebreak="cm",
- unicodeslot=0x09C1,
+ unicodeslot=0x9C1,
},
- [0x09C2]={
+ [0x9C2]={
adobename="uuvowelsignbengali",
category="mn",
description="BENGALI VOWEL SIGN UU",
direction="nsm",
linebreak="cm",
- unicodeslot=0x09C2,
+ unicodeslot=0x9C2,
},
- [0x09C3]={
+ [0x9C3]={
adobename="rvocalicvowelsignbengali",
category="mn",
description="BENGALI VOWEL SIGN VOCALIC R",
direction="nsm",
linebreak="cm",
- unicodeslot=0x09C3,
+ unicodeslot=0x9C3,
},
- [0x09C4]={
+ [0x9C4]={
adobename="rrvocalicvowelsignbengali",
category="mn",
description="BENGALI VOWEL SIGN VOCALIC RR",
direction="nsm",
linebreak="cm",
- unicodeslot=0x09C4,
+ unicodeslot=0x9C4,
},
- [0x09C7]={
+ [0x9C7]={
adobename="evowelsignbengali",
category="mc",
description="BENGALI VOWEL SIGN E",
direction="l",
linebreak="cm",
- unicodeslot=0x09C7,
+ unicodeslot=0x9C7,
},
- [0x09C8]={
+ [0x9C8]={
adobename="aivowelsignbengali",
category="mc",
description="BENGALI VOWEL SIGN AI",
direction="l",
linebreak="cm",
- unicodeslot=0x09C8,
+ unicodeslot=0x9C8,
},
- [0x09CB]={
+ [0x9CB]={
adobename="ovowelsignbengali",
category="mc",
description="BENGALI VOWEL SIGN O",
direction="l",
linebreak="cm",
- specials={ "char", 0x09C7, 0x09BE },
- unicodeslot=0x09CB,
+ specials={ "char", 0x9C7, 0x9BE },
+ unicodeslot=0x9CB,
},
- [0x09CC]={
+ [0x9CC]={
adobename="auvowelsignbengali",
category="mc",
description="BENGALI VOWEL SIGN AU",
direction="l",
linebreak="cm",
- specials={ "char", 0x09C7, 0x09D7 },
- unicodeslot=0x09CC,
+ specials={ "char", 0x9C7, 0x9D7 },
+ unicodeslot=0x9CC,
},
- [0x09CD]={
+ [0x9CD]={
adobename="viramabengali",
category="mn",
+ combining=0x9,
description="BENGALI SIGN VIRAMA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x09CD,
+ unicodeslot=0x9CD,
},
- [0x09CE]={
+ [0x9CE]={
category="lo",
description="BENGALI LETTER KHANDA TA",
direction="l",
linebreak="al",
- unicodeslot=0x09CE,
+ unicodeslot=0x9CE,
},
- [0x09D7]={
+ [0x9D7]={
adobename="aulengthmarkbengali",
category="mc",
description="BENGALI AU LENGTH MARK",
direction="l",
linebreak="cm",
- unicodeslot=0x09D7,
+ unicodeslot=0x9D7,
},
- [0x09DC]={
+ [0x9DC]={
adobename="rrabengali",
category="lo",
description="BENGALI LETTER RRA",
direction="l",
linebreak="al",
- specials={ "char", 0x09A1, 0x09BC },
- unicodeslot=0x09DC,
+ specials={ "char", 0x9A1, 0x9BC },
+ unicodeslot=0x9DC,
},
- [0x09DD]={
+ [0x9DD]={
adobename="rhabengali",
category="lo",
description="BENGALI LETTER RHA",
direction="l",
linebreak="al",
- specials={ "char", 0x09A2, 0x09BC },
- unicodeslot=0x09DD,
+ specials={ "char", 0x9A2, 0x9BC },
+ unicodeslot=0x9DD,
},
- [0x09DF]={
+ [0x9DF]={
adobename="yyabengali",
category="lo",
description="BENGALI LETTER YYA",
direction="l",
linebreak="al",
- specials={ "char", 0x09AF, 0x09BC },
- unicodeslot=0x09DF,
+ specials={ "char", 0x9AF, 0x9BC },
+ unicodeslot=0x9DF,
},
- [0x09E0]={
+ [0x9E0]={
adobename="rrvocalicbengali",
category="lo",
description="BENGALI LETTER VOCALIC RR",
direction="l",
linebreak="al",
- unicodeslot=0x09E0,
+ unicodeslot=0x9E0,
},
- [0x09E1]={
+ [0x9E1]={
adobename="llvocalicbengali",
category="lo",
description="BENGALI LETTER VOCALIC LL",
direction="l",
linebreak="al",
- unicodeslot=0x09E1,
+ unicodeslot=0x9E1,
},
- [0x09E2]={
+ [0x9E2]={
adobename="lvocalicvowelsignbengali",
category="mn",
description="BENGALI VOWEL SIGN VOCALIC L",
direction="nsm",
linebreak="cm",
- unicodeslot=0x09E2,
+ unicodeslot=0x9E2,
},
- [0x09E3]={
+ [0x9E3]={
adobename="llvocalicvowelsignbengali",
category="mn",
description="BENGALI VOWEL SIGN VOCALIC LL",
direction="nsm",
linebreak="cm",
- unicodeslot=0x09E3,
+ unicodeslot=0x9E3,
},
- [0x09E6]={
+ [0x9E6]={
adobename="zerobengali",
category="nd",
description="BENGALI DIGIT ZERO",
direction="l",
linebreak="nu",
- unicodeslot=0x09E6,
+ unicodeslot=0x9E6,
},
- [0x09E7]={
+ [0x9E7]={
adobename="onebengali",
category="nd",
description="BENGALI DIGIT ONE",
direction="l",
linebreak="nu",
- unicodeslot=0x09E7,
+ unicodeslot=0x9E7,
},
- [0x09E8]={
+ [0x9E8]={
adobename="twobengali",
category="nd",
description="BENGALI DIGIT TWO",
direction="l",
linebreak="nu",
- unicodeslot=0x09E8,
+ unicodeslot=0x9E8,
},
- [0x09E9]={
+ [0x9E9]={
adobename="threebengali",
category="nd",
description="BENGALI DIGIT THREE",
direction="l",
linebreak="nu",
- unicodeslot=0x09E9,
+ unicodeslot=0x9E9,
},
- [0x09EA]={
+ [0x9EA]={
adobename="fourbengali",
category="nd",
description="BENGALI DIGIT FOUR",
direction="l",
linebreak="nu",
- unicodeslot=0x09EA,
+ unicodeslot=0x9EA,
},
- [0x09EB]={
+ [0x9EB]={
adobename="fivebengali",
category="nd",
description="BENGALI DIGIT FIVE",
direction="l",
linebreak="nu",
- unicodeslot=0x09EB,
+ unicodeslot=0x9EB,
},
- [0x09EC]={
+ [0x9EC]={
adobename="sixbengali",
category="nd",
description="BENGALI DIGIT SIX",
direction="l",
linebreak="nu",
- unicodeslot=0x09EC,
+ unicodeslot=0x9EC,
},
- [0x09ED]={
+ [0x9ED]={
adobename="sevenbengali",
category="nd",
description="BENGALI DIGIT SEVEN",
direction="l",
linebreak="nu",
- unicodeslot=0x09ED,
+ unicodeslot=0x9ED,
},
- [0x09EE]={
+ [0x9EE]={
adobename="eightbengali",
category="nd",
description="BENGALI DIGIT EIGHT",
direction="l",
linebreak="nu",
- unicodeslot=0x09EE,
+ unicodeslot=0x9EE,
},
- [0x09EF]={
+ [0x9EF]={
adobename="ninebengali",
category="nd",
description="BENGALI DIGIT NINE",
direction="l",
linebreak="nu",
- unicodeslot=0x09EF,
+ unicodeslot=0x9EF,
},
- [0x09F0]={
+ [0x9F0]={
adobename="ramiddlediagonalbengali",
category="lo",
description="BENGALI LETTER RA WITH MIDDLE DIAGONAL",
direction="l",
linebreak="al",
- shcode=0x09B0,
- unicodeslot=0x09F0,
+ shcode=0x9B0,
+ unicodeslot=0x9F0,
},
- [0x09F1]={
+ [0x9F1]={
adobename="ralowerdiagonalbengali",
category="lo",
description="BENGALI LETTER RA WITH LOWER DIAGONAL",
direction="l",
linebreak="al",
- shcode=0x09B0,
- unicodeslot=0x09F1,
+ shcode=0x9B0,
+ unicodeslot=0x9F1,
},
- [0x09F2]={
+ [0x9F2]={
adobename="rupeemarkbengali",
category="sc",
description="BENGALI RUPEE MARK",
direction="et",
linebreak="po",
- unicodeslot=0x09F2,
+ unicodeslot=0x9F2,
},
- [0x09F3]={
+ [0x9F3]={
adobename="rupeesignbengali",
category="sc",
description="BENGALI RUPEE SIGN",
direction="et",
linebreak="po",
- unicodeslot=0x09F3,
+ unicodeslot=0x9F3,
},
- [0x09F4]={
+ [0x9F4]={
adobename="onenumeratorbengali",
category="no",
description="BENGALI CURRENCY NUMERATOR ONE",
direction="l",
linebreak="al",
- unicodeslot=0x09F4,
+ unicodeslot=0x9F4,
},
- [0x09F5]={
+ [0x9F5]={
adobename="twonumeratorbengali",
category="no",
description="BENGALI CURRENCY NUMERATOR TWO",
direction="l",
linebreak="al",
- unicodeslot=0x09F5,
+ unicodeslot=0x9F5,
},
- [0x09F6]={
+ [0x9F6]={
adobename="threenumeratorbengali",
category="no",
description="BENGALI CURRENCY NUMERATOR THREE",
direction="l",
linebreak="al",
- unicodeslot=0x09F6,
+ unicodeslot=0x9F6,
},
- [0x09F7]={
+ [0x9F7]={
adobename="fournumeratorbengali",
category="no",
description="BENGALI CURRENCY NUMERATOR FOUR",
direction="l",
linebreak="al",
- unicodeslot=0x09F7,
+ unicodeslot=0x9F7,
},
- [0x09F8]={
+ [0x9F8]={
adobename="denominatorminusonenumeratorbengali",
category="no",
description="BENGALI CURRENCY NUMERATOR ONE LESS THAN THE DENOMINATOR",
direction="l",
linebreak="al",
- unicodeslot=0x09F8,
+ unicodeslot=0x9F8,
},
- [0x09F9]={
+ [0x9F9]={
adobename="sixteencurrencydenominatorbengali",
category="no",
description="BENGALI CURRENCY DENOMINATOR SIXTEEN",
direction="l",
linebreak="po",
- unicodeslot=0x09F9,
+ unicodeslot=0x9F9,
},
- [0x09FA]={
+ [0x9FA]={
adobename="issharbengali",
category="so",
description="BENGALI ISSHAR",
direction="l",
linebreak="al",
- unicodeslot=0x09FA,
+ unicodeslot=0x9FA,
},
- [0x09FB]={
+ [0x9FB]={
category="sc",
description="BENGALI GANDA MARK",
direction="et",
linebreak="pr",
- unicodeslot=0x09FB,
+ unicodeslot=0x9FB,
},
- [0x0A01]={
+ [0xA01]={
category="mn",
description="GURMUKHI SIGN ADAK BINDI",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0A01,
+ unicodeslot=0xA01,
},
- [0x0A02]={
+ [0xA02]={
adobename="bindigurmukhi",
category="mn",
description="GURMUKHI SIGN BINDI",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0A02,
+ unicodeslot=0xA02,
},
- [0x0A03]={
+ [0xA03]={
category="mc",
description="GURMUKHI SIGN VISARGA",
direction="l",
linebreak="cm",
- unicodeslot=0x0A03,
+ unicodeslot=0xA03,
},
- [0x0A05]={
+ [0xA05]={
adobename="agurmukhi",
category="lo",
description="GURMUKHI LETTER A",
direction="l",
linebreak="al",
- unicodeslot=0x0A05,
+ unicodeslot=0xA05,
},
- [0x0A06]={
+ [0xA06]={
adobename="aagurmukhi",
category="lo",
description="GURMUKHI LETTER AA",
direction="l",
linebreak="al",
- unicodeslot=0x0A06,
+ unicodeslot=0xA06,
},
- [0x0A07]={
+ [0xA07]={
adobename="igurmukhi",
category="lo",
description="GURMUKHI LETTER I",
direction="l",
linebreak="al",
- unicodeslot=0x0A07,
+ unicodeslot=0xA07,
},
- [0x0A08]={
+ [0xA08]={
adobename="iigurmukhi",
category="lo",
description="GURMUKHI LETTER II",
direction="l",
linebreak="al",
- unicodeslot=0x0A08,
+ unicodeslot=0xA08,
},
- [0x0A09]={
+ [0xA09]={
adobename="ugurmukhi",
category="lo",
description="GURMUKHI LETTER U",
direction="l",
linebreak="al",
- unicodeslot=0x0A09,
+ unicodeslot=0xA09,
},
- [0x0A0A]={
+ [0xA0A]={
adobename="uugurmukhi",
category="lo",
description="GURMUKHI LETTER UU",
direction="l",
linebreak="al",
- unicodeslot=0x0A0A,
+ unicodeslot=0xA0A,
},
- [0x0A0F]={
+ [0xA0F]={
adobename="eegurmukhi",
category="lo",
description="GURMUKHI LETTER EE",
direction="l",
linebreak="al",
- unicodeslot=0x0A0F,
+ unicodeslot=0xA0F,
},
- [0x0A10]={
+ [0xA10]={
adobename="aigurmukhi",
category="lo",
description="GURMUKHI LETTER AI",
direction="l",
linebreak="al",
- unicodeslot=0x0A10,
+ unicodeslot=0xA10,
},
- [0x0A13]={
+ [0xA13]={
adobename="oogurmukhi",
category="lo",
description="GURMUKHI LETTER OO",
direction="l",
linebreak="al",
- unicodeslot=0x0A13,
+ unicodeslot=0xA13,
},
- [0x0A14]={
+ [0xA14]={
adobename="augurmukhi",
category="lo",
description="GURMUKHI LETTER AU",
direction="l",
linebreak="al",
- unicodeslot=0x0A14,
+ unicodeslot=0xA14,
},
- [0x0A15]={
+ [0xA15]={
adobename="kagurmukhi",
category="lo",
description="GURMUKHI LETTER KA",
direction="l",
linebreak="al",
- unicodeslot=0x0A15,
+ unicodeslot=0xA15,
},
- [0x0A16]={
+ [0xA16]={
adobename="khagurmukhi",
category="lo",
description="GURMUKHI LETTER KHA",
direction="l",
linebreak="al",
- unicodeslot=0x0A16,
+ unicodeslot=0xA16,
},
- [0x0A17]={
+ [0xA17]={
adobename="gagurmukhi",
category="lo",
description="GURMUKHI LETTER GA",
direction="l",
linebreak="al",
- unicodeslot=0x0A17,
+ unicodeslot=0xA17,
},
- [0x0A18]={
+ [0xA18]={
adobename="ghagurmukhi",
category="lo",
description="GURMUKHI LETTER GHA",
direction="l",
linebreak="al",
- unicodeslot=0x0A18,
+ unicodeslot=0xA18,
},
- [0x0A19]={
+ [0xA19]={
adobename="ngagurmukhi",
category="lo",
description="GURMUKHI LETTER NGA",
direction="l",
linebreak="al",
- unicodeslot=0x0A19,
+ unicodeslot=0xA19,
},
- [0x0A1A]={
+ [0xA1A]={
adobename="cagurmukhi",
category="lo",
description="GURMUKHI LETTER CA",
direction="l",
linebreak="al",
- unicodeslot=0x0A1A,
+ unicodeslot=0xA1A,
},
- [0x0A1B]={
+ [0xA1B]={
adobename="chagurmukhi",
category="lo",
description="GURMUKHI LETTER CHA",
direction="l",
linebreak="al",
- unicodeslot=0x0A1B,
+ unicodeslot=0xA1B,
},
- [0x0A1C]={
+ [0xA1C]={
adobename="jagurmukhi",
category="lo",
description="GURMUKHI LETTER JA",
direction="l",
linebreak="al",
- unicodeslot=0x0A1C,
+ unicodeslot=0xA1C,
},
- [0x0A1D]={
+ [0xA1D]={
adobename="jhagurmukhi",
category="lo",
description="GURMUKHI LETTER JHA",
direction="l",
linebreak="al",
- unicodeslot=0x0A1D,
+ unicodeslot=0xA1D,
},
- [0x0A1E]={
+ [0xA1E]={
adobename="nyagurmukhi",
category="lo",
description="GURMUKHI LETTER NYA",
direction="l",
linebreak="al",
- unicodeslot=0x0A1E,
+ unicodeslot=0xA1E,
},
- [0x0A1F]={
+ [0xA1F]={
adobename="ttagurmukhi",
category="lo",
description="GURMUKHI LETTER TTA",
direction="l",
linebreak="al",
- unicodeslot=0x0A1F,
+ unicodeslot=0xA1F,
},
- [0x0A20]={
+ [0xA20]={
adobename="tthagurmukhi",
category="lo",
description="GURMUKHI LETTER TTHA",
direction="l",
linebreak="al",
- unicodeslot=0x0A20,
+ unicodeslot=0xA20,
},
- [0x0A21]={
+ [0xA21]={
adobename="ddagurmukhi",
category="lo",
description="GURMUKHI LETTER DDA",
direction="l",
linebreak="al",
- unicodeslot=0x0A21,
+ unicodeslot=0xA21,
},
- [0x0A22]={
+ [0xA22]={
adobename="ddhagurmukhi",
category="lo",
description="GURMUKHI LETTER DDHA",
direction="l",
linebreak="al",
- unicodeslot=0x0A22,
+ unicodeslot=0xA22,
},
- [0x0A23]={
+ [0xA23]={
adobename="nnagurmukhi",
category="lo",
description="GURMUKHI LETTER NNA",
direction="l",
linebreak="al",
- unicodeslot=0x0A23,
+ unicodeslot=0xA23,
},
- [0x0A24]={
+ [0xA24]={
adobename="tagurmukhi",
category="lo",
description="GURMUKHI LETTER TA",
direction="l",
linebreak="al",
- unicodeslot=0x0A24,
+ unicodeslot=0xA24,
},
- [0x0A25]={
+ [0xA25]={
adobename="thagurmukhi",
category="lo",
description="GURMUKHI LETTER THA",
direction="l",
linebreak="al",
- unicodeslot=0x0A25,
+ unicodeslot=0xA25,
},
- [0x0A26]={
+ [0xA26]={
adobename="dagurmukhi",
category="lo",
description="GURMUKHI LETTER DA",
direction="l",
linebreak="al",
- unicodeslot=0x0A26,
+ unicodeslot=0xA26,
},
- [0x0A27]={
+ [0xA27]={
adobename="dhagurmukhi",
category="lo",
description="GURMUKHI LETTER DHA",
direction="l",
linebreak="al",
- unicodeslot=0x0A27,
+ unicodeslot=0xA27,
},
- [0x0A28]={
+ [0xA28]={
adobename="nagurmukhi",
category="lo",
description="GURMUKHI LETTER NA",
direction="l",
linebreak="al",
- unicodeslot=0x0A28,
+ unicodeslot=0xA28,
},
- [0x0A2A]={
+ [0xA2A]={
adobename="pagurmukhi",
category="lo",
description="GURMUKHI LETTER PA",
direction="l",
linebreak="al",
- unicodeslot=0x0A2A,
+ unicodeslot=0xA2A,
},
- [0x0A2B]={
+ [0xA2B]={
adobename="phagurmukhi",
category="lo",
description="GURMUKHI LETTER PHA",
direction="l",
linebreak="al",
- unicodeslot=0x0A2B,
+ unicodeslot=0xA2B,
},
- [0x0A2C]={
+ [0xA2C]={
adobename="bagurmukhi",
category="lo",
description="GURMUKHI LETTER BA",
direction="l",
linebreak="al",
- unicodeslot=0x0A2C,
+ unicodeslot=0xA2C,
},
- [0x0A2D]={
+ [0xA2D]={
adobename="bhagurmukhi",
category="lo",
description="GURMUKHI LETTER BHA",
direction="l",
linebreak="al",
- unicodeslot=0x0A2D,
+ unicodeslot=0xA2D,
},
- [0x0A2E]={
+ [0xA2E]={
adobename="magurmukhi",
category="lo",
description="GURMUKHI LETTER MA",
direction="l",
linebreak="al",
- unicodeslot=0x0A2E,
+ unicodeslot=0xA2E,
},
- [0x0A2F]={
+ [0xA2F]={
adobename="yagurmukhi",
category="lo",
description="GURMUKHI LETTER YA",
direction="l",
linebreak="al",
- unicodeslot=0x0A2F,
+ unicodeslot=0xA2F,
},
- [0x0A30]={
+ [0xA30]={
adobename="ragurmukhi",
category="lo",
description="GURMUKHI LETTER RA",
direction="l",
linebreak="al",
- unicodeslot=0x0A30,
+ unicodeslot=0xA30,
},
- [0x0A32]={
+ [0xA32]={
adobename="lagurmukhi",
category="lo",
description="GURMUKHI LETTER LA",
direction="l",
linebreak="al",
- unicodeslot=0x0A32,
+ unicodeslot=0xA32,
},
- [0x0A33]={
+ [0xA33]={
category="lo",
description="GURMUKHI LETTER LLA",
direction="l",
linebreak="al",
- specials={ "char", 0x0A32, 0x0A3C },
- unicodeslot=0x0A33,
+ specials={ "char", 0xA32, 0xA3C },
+ unicodeslot=0xA33,
},
- [0x0A35]={
+ [0xA35]={
adobename="vagurmukhi",
category="lo",
description="GURMUKHI LETTER VA",
direction="l",
linebreak="al",
- unicodeslot=0x0A35,
+ unicodeslot=0xA35,
},
- [0x0A36]={
+ [0xA36]={
adobename="shagurmukhi",
category="lo",
description="GURMUKHI LETTER SHA",
direction="l",
linebreak="al",
- specials={ "char", 0x0A38, 0x0A3C },
- unicodeslot=0x0A36,
+ specials={ "char", 0xA38, 0xA3C },
+ unicodeslot=0xA36,
},
- [0x0A38]={
+ [0xA38]={
adobename="sagurmukhi",
category="lo",
description="GURMUKHI LETTER SA",
direction="l",
linebreak="al",
- unicodeslot=0x0A38,
+ unicodeslot=0xA38,
},
- [0x0A39]={
+ [0xA39]={
adobename="hagurmukhi",
category="lo",
description="GURMUKHI LETTER HA",
direction="l",
linebreak="al",
- unicodeslot=0x0A39,
+ unicodeslot=0xA39,
},
- [0x0A3C]={
+ [0xA3C]={
adobename="nuktagurmukhi",
category="mn",
+ combining=0x7,
description="GURMUKHI SIGN NUKTA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0A3C,
+ unicodeslot=0xA3C,
},
- [0x0A3E]={
+ [0xA3E]={
adobename="aamatragurmukhi",
category="mc",
description="GURMUKHI VOWEL SIGN AA",
direction="l",
linebreak="cm",
- unicodeslot=0x0A3E,
+ unicodeslot=0xA3E,
},
- [0x0A3F]={
+ [0xA3F]={
adobename="imatragurmukhi",
category="mc",
description="GURMUKHI VOWEL SIGN I",
direction="l",
linebreak="cm",
- unicodeslot=0x0A3F,
+ unicodeslot=0xA3F,
},
- [0x0A40]={
+ [0xA40]={
adobename="iimatragurmukhi",
category="mc",
description="GURMUKHI VOWEL SIGN II",
direction="l",
linebreak="cm",
- unicodeslot=0x0A40,
+ unicodeslot=0xA40,
},
- [0x0A41]={
+ [0xA41]={
adobename="umatragurmukhi",
category="mn",
description="GURMUKHI VOWEL SIGN U",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0A41,
+ unicodeslot=0xA41,
},
- [0x0A42]={
+ [0xA42]={
adobename="uumatragurmukhi",
category="mn",
description="GURMUKHI VOWEL SIGN UU",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0A42,
+ unicodeslot=0xA42,
},
- [0x0A47]={
+ [0xA47]={
adobename="eematragurmukhi",
category="mn",
description="GURMUKHI VOWEL SIGN EE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0A47,
+ unicodeslot=0xA47,
},
- [0x0A48]={
+ [0xA48]={
adobename="aimatragurmukhi",
category="mn",
description="GURMUKHI VOWEL SIGN AI",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0A48,
+ unicodeslot=0xA48,
},
- [0x0A4B]={
+ [0xA4B]={
adobename="oomatragurmukhi",
category="mn",
description="GURMUKHI VOWEL SIGN OO",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0A4B,
+ unicodeslot=0xA4B,
},
- [0x0A4C]={
+ [0xA4C]={
adobename="aumatragurmukhi",
category="mn",
description="GURMUKHI VOWEL SIGN AU",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0A4C,
+ unicodeslot=0xA4C,
},
- [0x0A4D]={
+ [0xA4D]={
adobename="halantgurmukhi",
category="mn",
+ combining=0x9,
description="GURMUKHI SIGN VIRAMA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0A4D,
+ unicodeslot=0xA4D,
},
- [0x0A51]={
+ [0xA51]={
category="mn",
description="GURMUKHI SIGN UDAAT",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0A51,
+ unicodeslot=0xA51,
},
- [0x0A59]={
+ [0xA59]={
adobename="khhagurmukhi",
category="lo",
description="GURMUKHI LETTER KHHA",
direction="l",
linebreak="al",
- specials={ "char", 0x0A16, 0x0A3C },
- unicodeslot=0x0A59,
+ specials={ "char", 0xA16, 0xA3C },
+ unicodeslot=0xA59,
},
- [0x0A5A]={
+ [0xA5A]={
adobename="ghhagurmukhi",
category="lo",
description="GURMUKHI LETTER GHHA",
direction="l",
linebreak="al",
- specials={ "char", 0x0A17, 0x0A3C },
- unicodeslot=0x0A5A,
+ specials={ "char", 0xA17, 0xA3C },
+ unicodeslot=0xA5A,
},
- [0x0A5B]={
+ [0xA5B]={
adobename="zagurmukhi",
category="lo",
description="GURMUKHI LETTER ZA",
direction="l",
linebreak="al",
- specials={ "char", 0x0A1C, 0x0A3C },
- unicodeslot=0x0A5B,
+ specials={ "char", 0xA1C, 0xA3C },
+ unicodeslot=0xA5B,
},
- [0x0A5C]={
+ [0xA5C]={
adobename="rragurmukhi",
category="lo",
description="GURMUKHI LETTER RRA",
direction="l",
linebreak="al",
- unicodeslot=0x0A5C,
+ unicodeslot=0xA5C,
},
- [0x0A5E]={
+ [0xA5E]={
adobename="fagurmukhi",
category="lo",
description="GURMUKHI LETTER FA",
direction="l",
linebreak="al",
- specials={ "char", 0x0A2B, 0x0A3C },
- unicodeslot=0x0A5E,
+ specials={ "char", 0xA2B, 0xA3C },
+ unicodeslot=0xA5E,
},
- [0x0A66]={
+ [0xA66]={
adobename="zerogurmukhi",
category="nd",
description="GURMUKHI DIGIT ZERO",
direction="l",
linebreak="nu",
- unicodeslot=0x0A66,
+ unicodeslot=0xA66,
},
- [0x0A67]={
+ [0xA67]={
adobename="onegurmukhi",
category="nd",
description="GURMUKHI DIGIT ONE",
direction="l",
linebreak="nu",
- unicodeslot=0x0A67,
+ unicodeslot=0xA67,
},
- [0x0A68]={
+ [0xA68]={
adobename="twogurmukhi",
category="nd",
description="GURMUKHI DIGIT TWO",
direction="l",
linebreak="nu",
- unicodeslot=0x0A68,
+ unicodeslot=0xA68,
},
- [0x0A69]={
+ [0xA69]={
adobename="threegurmukhi",
category="nd",
description="GURMUKHI DIGIT THREE",
direction="l",
linebreak="nu",
- unicodeslot=0x0A69,
+ unicodeslot=0xA69,
},
- [0x0A6A]={
+ [0xA6A]={
adobename="fourgurmukhi",
category="nd",
description="GURMUKHI DIGIT FOUR",
direction="l",
linebreak="nu",
- unicodeslot=0x0A6A,
+ unicodeslot=0xA6A,
},
- [0x0A6B]={
+ [0xA6B]={
adobename="fivegurmukhi",
category="nd",
description="GURMUKHI DIGIT FIVE",
direction="l",
linebreak="nu",
- unicodeslot=0x0A6B,
+ unicodeslot=0xA6B,
},
- [0x0A6C]={
+ [0xA6C]={
adobename="sixgurmukhi",
category="nd",
description="GURMUKHI DIGIT SIX",
direction="l",
linebreak="nu",
- unicodeslot=0x0A6C,
+ unicodeslot=0xA6C,
},
- [0x0A6D]={
+ [0xA6D]={
adobename="sevengurmukhi",
category="nd",
description="GURMUKHI DIGIT SEVEN",
direction="l",
linebreak="nu",
- unicodeslot=0x0A6D,
+ unicodeslot=0xA6D,
},
- [0x0A6E]={
+ [0xA6E]={
adobename="eightgurmukhi",
category="nd",
description="GURMUKHI DIGIT EIGHT",
direction="l",
linebreak="nu",
- unicodeslot=0x0A6E,
+ unicodeslot=0xA6E,
},
- [0x0A6F]={
+ [0xA6F]={
adobename="ninegurmukhi",
category="nd",
description="GURMUKHI DIGIT NINE",
direction="l",
linebreak="nu",
- unicodeslot=0x0A6F,
+ unicodeslot=0xA6F,
},
- [0x0A70]={
+ [0xA70]={
adobename="tippigurmukhi",
category="mn",
description="GURMUKHI TIPPI",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0A70,
+ unicodeslot=0xA70,
},
- [0x0A71]={
+ [0xA71]={
adobename="addakgurmukhi",
category="mn",
description="GURMUKHI ADDAK",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0A71,
+ unicodeslot=0xA71,
},
- [0x0A72]={
+ [0xA72]={
adobename="irigurmukhi",
category="lo",
description="GURMUKHI IRI",
direction="l",
linebreak="al",
- unicodeslot=0x0A72,
+ unicodeslot=0xA72,
},
- [0x0A73]={
+ [0xA73]={
adobename="uragurmukhi",
category="lo",
description="GURMUKHI URA",
direction="l",
linebreak="al",
- unicodeslot=0x0A73,
+ unicodeslot=0xA73,
},
- [0x0A74]={
+ [0xA74]={
adobename="ekonkargurmukhi",
category="lo",
description="GURMUKHI EK ONKAR",
direction="l",
linebreak="al",
- unicodeslot=0x0A74,
+ unicodeslot=0xA74,
},
- [0x0A75]={
+ [0xA75]={
category="mn",
description="GURMUKHI SIGN YAKASH",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0A75,
+ unicodeslot=0xA75,
},
- [0x0A81]={
+ [0xA81]={
adobename="candrabindugujarati",
category="mn",
description="GUJARATI SIGN CANDRABINDU",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0A81,
+ unicodeslot=0xA81,
},
- [0x0A82]={
+ [0xA82]={
adobename="anusvaragujarati",
category="mn",
description="GUJARATI SIGN ANUSVARA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0A82,
+ unicodeslot=0xA82,
},
- [0x0A83]={
+ [0xA83]={
adobename="visargagujarati",
category="mc",
description="GUJARATI SIGN VISARGA",
direction="l",
linebreak="cm",
- unicodeslot=0x0A83,
+ unicodeslot=0xA83,
},
- [0x0A85]={
+ [0xA85]={
adobename="agujarati",
category="lo",
description="GUJARATI LETTER A",
direction="l",
linebreak="al",
- unicodeslot=0x0A85,
+ unicodeslot=0xA85,
},
- [0x0A86]={
+ [0xA86]={
adobename="aagujarati",
category="lo",
description="GUJARATI LETTER AA",
direction="l",
linebreak="al",
- unicodeslot=0x0A86,
+ unicodeslot=0xA86,
},
- [0x0A87]={
+ [0xA87]={
adobename="igujarati",
category="lo",
description="GUJARATI LETTER I",
direction="l",
linebreak="al",
- unicodeslot=0x0A87,
+ unicodeslot=0xA87,
},
- [0x0A88]={
+ [0xA88]={
adobename="iigujarati",
category="lo",
description="GUJARATI LETTER II",
direction="l",
linebreak="al",
- unicodeslot=0x0A88,
+ unicodeslot=0xA88,
},
- [0x0A89]={
+ [0xA89]={
adobename="ugujarati",
category="lo",
description="GUJARATI LETTER U",
direction="l",
linebreak="al",
- unicodeslot=0x0A89,
+ unicodeslot=0xA89,
},
- [0x0A8A]={
+ [0xA8A]={
adobename="uugujarati",
category="lo",
description="GUJARATI LETTER UU",
direction="l",
linebreak="al",
- unicodeslot=0x0A8A,
+ unicodeslot=0xA8A,
},
- [0x0A8B]={
+ [0xA8B]={
adobename="rvocalicgujarati",
category="lo",
description="GUJARATI LETTER VOCALIC R",
direction="l",
linebreak="al",
- unicodeslot=0x0A8B,
+ unicodeslot=0xA8B,
},
- [0x0A8C]={
+ [0xA8C]={
category="lo",
description="GUJARATI LETTER VOCALIC L",
direction="l",
linebreak="al",
- unicodeslot=0x0A8C,
+ unicodeslot=0xA8C,
},
- [0x0A8D]={
+ [0xA8D]={
adobename="ecandragujarati",
category="lo",
description="GUJARATI VOWEL CANDRA E",
direction="l",
linebreak="al",
- unicodeslot=0x0A8D,
+ unicodeslot=0xA8D,
},
- [0x0A8F]={
+ [0xA8F]={
adobename="egujarati",
category="lo",
description="GUJARATI LETTER E",
direction="l",
linebreak="al",
- unicodeslot=0x0A8F,
+ unicodeslot=0xA8F,
},
- [0x0A90]={
+ [0xA90]={
adobename="aigujarati",
category="lo",
description="GUJARATI LETTER AI",
direction="l",
linebreak="al",
- unicodeslot=0x0A90,
+ unicodeslot=0xA90,
},
- [0x0A91]={
+ [0xA91]={
adobename="ocandragujarati",
category="lo",
description="GUJARATI VOWEL CANDRA O",
direction="l",
linebreak="al",
- unicodeslot=0x0A91,
+ unicodeslot=0xA91,
},
- [0x0A93]={
+ [0xA93]={
adobename="ogujarati",
category="lo",
description="GUJARATI LETTER O",
direction="l",
linebreak="al",
- unicodeslot=0x0A93,
+ unicodeslot=0xA93,
},
- [0x0A94]={
+ [0xA94]={
adobename="augujarati",
category="lo",
description="GUJARATI LETTER AU",
direction="l",
linebreak="al",
- unicodeslot=0x0A94,
+ unicodeslot=0xA94,
},
- [0x0A95]={
+ [0xA95]={
adobename="kagujarati",
category="lo",
description="GUJARATI LETTER KA",
direction="l",
linebreak="al",
- unicodeslot=0x0A95,
+ unicodeslot=0xA95,
},
- [0x0A96]={
+ [0xA96]={
adobename="khagujarati",
category="lo",
description="GUJARATI LETTER KHA",
direction="l",
linebreak="al",
- unicodeslot=0x0A96,
+ unicodeslot=0xA96,
},
- [0x0A97]={
+ [0xA97]={
adobename="gagujarati",
category="lo",
description="GUJARATI LETTER GA",
direction="l",
linebreak="al",
- unicodeslot=0x0A97,
+ unicodeslot=0xA97,
},
- [0x0A98]={
+ [0xA98]={
adobename="ghagujarati",
category="lo",
description="GUJARATI LETTER GHA",
direction="l",
linebreak="al",
- unicodeslot=0x0A98,
+ unicodeslot=0xA98,
},
- [0x0A99]={
+ [0xA99]={
adobename="ngagujarati",
category="lo",
description="GUJARATI LETTER NGA",
direction="l",
linebreak="al",
- unicodeslot=0x0A99,
+ unicodeslot=0xA99,
},
- [0x0A9A]={
+ [0xA9A]={
adobename="cagujarati",
category="lo",
description="GUJARATI LETTER CA",
direction="l",
linebreak="al",
- unicodeslot=0x0A9A,
+ unicodeslot=0xA9A,
},
- [0x0A9B]={
+ [0xA9B]={
adobename="chagujarati",
category="lo",
description="GUJARATI LETTER CHA",
direction="l",
linebreak="al",
- unicodeslot=0x0A9B,
+ unicodeslot=0xA9B,
},
- [0x0A9C]={
+ [0xA9C]={
adobename="jagujarati",
category="lo",
description="GUJARATI LETTER JA",
direction="l",
linebreak="al",
- unicodeslot=0x0A9C,
+ unicodeslot=0xA9C,
},
- [0x0A9D]={
+ [0xA9D]={
adobename="jhagujarati",
category="lo",
description="GUJARATI LETTER JHA",
direction="l",
linebreak="al",
- unicodeslot=0x0A9D,
+ unicodeslot=0xA9D,
},
- [0x0A9E]={
+ [0xA9E]={
adobename="nyagujarati",
category="lo",
description="GUJARATI LETTER NYA",
direction="l",
linebreak="al",
- unicodeslot=0x0A9E,
+ unicodeslot=0xA9E,
},
- [0x0A9F]={
+ [0xA9F]={
adobename="ttagujarati",
category="lo",
description="GUJARATI LETTER TTA",
direction="l",
linebreak="al",
- unicodeslot=0x0A9F,
+ unicodeslot=0xA9F,
},
- [0x0AA0]={
+ [0xAA0]={
adobename="tthagujarati",
category="lo",
description="GUJARATI LETTER TTHA",
direction="l",
linebreak="al",
- unicodeslot=0x0AA0,
+ unicodeslot=0xAA0,
},
- [0x0AA1]={
+ [0xAA1]={
adobename="ddagujarati",
category="lo",
description="GUJARATI LETTER DDA",
direction="l",
linebreak="al",
- unicodeslot=0x0AA1,
+ unicodeslot=0xAA1,
},
- [0x0AA2]={
+ [0xAA2]={
adobename="ddhagujarati",
category="lo",
description="GUJARATI LETTER DDHA",
direction="l",
linebreak="al",
- unicodeslot=0x0AA2,
+ unicodeslot=0xAA2,
},
- [0x0AA3]={
+ [0xAA3]={
adobename="nnagujarati",
category="lo",
description="GUJARATI LETTER NNA",
direction="l",
linebreak="al",
- unicodeslot=0x0AA3,
+ unicodeslot=0xAA3,
},
- [0x0AA4]={
+ [0xAA4]={
adobename="tagujarati",
category="lo",
description="GUJARATI LETTER TA",
direction="l",
linebreak="al",
- unicodeslot=0x0AA4,
+ unicodeslot=0xAA4,
},
- [0x0AA5]={
+ [0xAA5]={
adobename="thagujarati",
category="lo",
description="GUJARATI LETTER THA",
direction="l",
linebreak="al",
- unicodeslot=0x0AA5,
+ unicodeslot=0xAA5,
},
- [0x0AA6]={
+ [0xAA6]={
adobename="dagujarati",
category="lo",
description="GUJARATI LETTER DA",
direction="l",
linebreak="al",
- unicodeslot=0x0AA6,
+ unicodeslot=0xAA6,
},
- [0x0AA7]={
+ [0xAA7]={
adobename="dhagujarati",
category="lo",
description="GUJARATI LETTER DHA",
direction="l",
linebreak="al",
- unicodeslot=0x0AA7,
+ unicodeslot=0xAA7,
},
- [0x0AA8]={
+ [0xAA8]={
adobename="nagujarati",
category="lo",
description="GUJARATI LETTER NA",
direction="l",
linebreak="al",
- unicodeslot=0x0AA8,
+ unicodeslot=0xAA8,
},
- [0x0AAA]={
+ [0xAAA]={
adobename="pagujarati",
category="lo",
description="GUJARATI LETTER PA",
direction="l",
linebreak="al",
- unicodeslot=0x0AAA,
+ unicodeslot=0xAAA,
},
- [0x0AAB]={
+ [0xAAB]={
adobename="phagujarati",
category="lo",
description="GUJARATI LETTER PHA",
direction="l",
linebreak="al",
- unicodeslot=0x0AAB,
+ unicodeslot=0xAAB,
},
- [0x0AAC]={
+ [0xAAC]={
adobename="bagujarati",
category="lo",
description="GUJARATI LETTER BA",
direction="l",
linebreak="al",
- unicodeslot=0x0AAC,
+ unicodeslot=0xAAC,
},
- [0x0AAD]={
+ [0xAAD]={
adobename="bhagujarati",
category="lo",
description="GUJARATI LETTER BHA",
direction="l",
linebreak="al",
- unicodeslot=0x0AAD,
+ unicodeslot=0xAAD,
},
- [0x0AAE]={
+ [0xAAE]={
adobename="magujarati",
category="lo",
description="GUJARATI LETTER MA",
direction="l",
linebreak="al",
- unicodeslot=0x0AAE,
+ unicodeslot=0xAAE,
},
- [0x0AAF]={
+ [0xAAF]={
adobename="yagujarati",
category="lo",
description="GUJARATI LETTER YA",
direction="l",
linebreak="al",
- unicodeslot=0x0AAF,
+ unicodeslot=0xAAF,
},
- [0x0AB0]={
+ [0xAB0]={
adobename="ragujarati",
category="lo",
description="GUJARATI LETTER RA",
direction="l",
linebreak="al",
- unicodeslot=0x0AB0,
+ unicodeslot=0xAB0,
},
- [0x0AB2]={
+ [0xAB2]={
adobename="lagujarati",
category="lo",
description="GUJARATI LETTER LA",
direction="l",
linebreak="al",
- unicodeslot=0x0AB2,
+ unicodeslot=0xAB2,
},
- [0x0AB3]={
+ [0xAB3]={
adobename="llagujarati",
category="lo",
description="GUJARATI LETTER LLA",
direction="l",
linebreak="al",
- unicodeslot=0x0AB3,
+ unicodeslot=0xAB3,
},
- [0x0AB5]={
+ [0xAB5]={
adobename="vagujarati",
category="lo",
description="GUJARATI LETTER VA",
direction="l",
linebreak="al",
- unicodeslot=0x0AB5,
+ unicodeslot=0xAB5,
},
- [0x0AB6]={
+ [0xAB6]={
adobename="shagujarati",
category="lo",
description="GUJARATI LETTER SHA",
direction="l",
linebreak="al",
- unicodeslot=0x0AB6,
+ unicodeslot=0xAB6,
},
- [0x0AB7]={
+ [0xAB7]={
adobename="ssagujarati",
category="lo",
description="GUJARATI LETTER SSA",
direction="l",
linebreak="al",
- unicodeslot=0x0AB7,
+ unicodeslot=0xAB7,
},
- [0x0AB8]={
+ [0xAB8]={
adobename="sagujarati",
category="lo",
description="GUJARATI LETTER SA",
direction="l",
linebreak="al",
- unicodeslot=0x0AB8,
+ unicodeslot=0xAB8,
},
- [0x0AB9]={
+ [0xAB9]={
adobename="hagujarati",
category="lo",
description="GUJARATI LETTER HA",
direction="l",
linebreak="al",
- unicodeslot=0x0AB9,
+ unicodeslot=0xAB9,
},
- [0x0ABC]={
+ [0xABC]={
adobename="nuktagujarati",
category="mn",
+ combining=0x7,
description="GUJARATI SIGN NUKTA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0ABC,
+ unicodeslot=0xABC,
},
- [0x0ABD]={
+ [0xABD]={
category="lo",
description="GUJARATI SIGN AVAGRAHA",
direction="l",
linebreak="al",
- unicodeslot=0x0ABD,
+ unicodeslot=0xABD,
},
- [0x0ABE]={
+ [0xABE]={
adobename="aavowelsigngujarati",
category="mc",
description="GUJARATI VOWEL SIGN AA",
direction="l",
linebreak="cm",
- unicodeslot=0x0ABE,
+ unicodeslot=0xABE,
},
- [0x0ABF]={
+ [0xABF]={
adobename="ivowelsigngujarati",
category="mc",
description="GUJARATI VOWEL SIGN I",
direction="l",
linebreak="cm",
- unicodeslot=0x0ABF,
+ unicodeslot=0xABF,
},
- [0x0AC0]={
+ [0xAC0]={
adobename="iivowelsigngujarati",
category="mc",
description="GUJARATI VOWEL SIGN II",
direction="l",
linebreak="cm",
- unicodeslot=0x0AC0,
+ unicodeslot=0xAC0,
},
- [0x0AC1]={
+ [0xAC1]={
adobename="uvowelsigngujarati",
category="mn",
description="GUJARATI VOWEL SIGN U",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0AC1,
+ unicodeslot=0xAC1,
},
- [0x0AC2]={
+ [0xAC2]={
adobename="uuvowelsigngujarati",
category="mn",
description="GUJARATI VOWEL SIGN UU",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0AC2,
+ unicodeslot=0xAC2,
},
- [0x0AC3]={
+ [0xAC3]={
adobename="rvocalicvowelsigngujarati",
category="mn",
description="GUJARATI VOWEL SIGN VOCALIC R",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0AC3,
+ unicodeslot=0xAC3,
},
- [0x0AC4]={
+ [0xAC4]={
adobename="rrvocalicvowelsigngujarati",
category="mn",
description="GUJARATI VOWEL SIGN VOCALIC RR",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0AC4,
+ unicodeslot=0xAC4,
},
- [0x0AC5]={
+ [0xAC5]={
adobename="ecandravowelsigngujarati",
category="mn",
description="GUJARATI VOWEL SIGN CANDRA E",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0AC5,
+ unicodeslot=0xAC5,
},
- [0x0AC7]={
+ [0xAC7]={
adobename="evowelsigngujarati",
category="mn",
description="GUJARATI VOWEL SIGN E",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0AC7,
+ unicodeslot=0xAC7,
},
- [0x0AC8]={
+ [0xAC8]={
adobename="aivowelsigngujarati",
category="mn",
description="GUJARATI VOWEL SIGN AI",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0AC8,
+ unicodeslot=0xAC8,
},
- [0x0AC9]={
+ [0xAC9]={
adobename="ocandravowelsigngujarati",
category="mc",
description="GUJARATI VOWEL SIGN CANDRA O",
direction="l",
linebreak="cm",
- unicodeslot=0x0AC9,
+ unicodeslot=0xAC9,
},
- [0x0ACB]={
+ [0xACB]={
adobename="ovowelsigngujarati",
category="mc",
description="GUJARATI VOWEL SIGN O",
direction="l",
linebreak="cm",
- unicodeslot=0x0ACB,
+ unicodeslot=0xACB,
},
- [0x0ACC]={
+ [0xACC]={
adobename="auvowelsigngujarati",
category="mc",
description="GUJARATI VOWEL SIGN AU",
direction="l",
linebreak="cm",
- unicodeslot=0x0ACC,
+ unicodeslot=0xACC,
},
- [0x0ACD]={
+ [0xACD]={
adobename="viramagujarati",
category="mn",
+ combining=0x9,
description="GUJARATI SIGN VIRAMA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0ACD,
+ unicodeslot=0xACD,
},
- [0x0AD0]={
+ [0xAD0]={
adobename="omgujarati",
category="lo",
description="GUJARATI OM",
direction="l",
linebreak="al",
- unicodeslot=0x0AD0,
+ unicodeslot=0xAD0,
},
- [0x0AE0]={
+ [0xAE0]={
adobename="rrvocalicgujarati",
category="lo",
description="GUJARATI LETTER VOCALIC RR",
direction="l",
linebreak="al",
- unicodeslot=0x0AE0,
+ unicodeslot=0xAE0,
},
- [0x0AE1]={
+ [0xAE1]={
category="lo",
description="GUJARATI LETTER VOCALIC LL",
direction="l",
linebreak="al",
- unicodeslot=0x0AE1,
+ unicodeslot=0xAE1,
},
- [0x0AE2]={
+ [0xAE2]={
category="mn",
description="GUJARATI VOWEL SIGN VOCALIC L",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0AE2,
+ unicodeslot=0xAE2,
},
- [0x0AE3]={
+ [0xAE3]={
category="mn",
description="GUJARATI VOWEL SIGN VOCALIC LL",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0AE3,
+ unicodeslot=0xAE3,
},
- [0x0AE6]={
+ [0xAE6]={
adobename="zerogujarati",
category="nd",
description="GUJARATI DIGIT ZERO",
direction="l",
linebreak="nu",
- unicodeslot=0x0AE6,
+ unicodeslot=0xAE6,
},
- [0x0AE7]={
+ [0xAE7]={
adobename="onegujarati",
category="nd",
description="GUJARATI DIGIT ONE",
direction="l",
linebreak="nu",
- unicodeslot=0x0AE7,
+ unicodeslot=0xAE7,
},
- [0x0AE8]={
+ [0xAE8]={
adobename="twogujarati",
category="nd",
description="GUJARATI DIGIT TWO",
direction="l",
linebreak="nu",
- unicodeslot=0x0AE8,
+ unicodeslot=0xAE8,
},
- [0x0AE9]={
+ [0xAE9]={
adobename="threegujarati",
category="nd",
description="GUJARATI DIGIT THREE",
direction="l",
linebreak="nu",
- unicodeslot=0x0AE9,
+ unicodeslot=0xAE9,
},
- [0x0AEA]={
+ [0xAEA]={
adobename="fourgujarati",
category="nd",
description="GUJARATI DIGIT FOUR",
direction="l",
linebreak="nu",
- unicodeslot=0x0AEA,
+ unicodeslot=0xAEA,
},
- [0x0AEB]={
+ [0xAEB]={
adobename="fivegujarati",
category="nd",
description="GUJARATI DIGIT FIVE",
direction="l",
linebreak="nu",
- unicodeslot=0x0AEB,
+ unicodeslot=0xAEB,
},
- [0x0AEC]={
+ [0xAEC]={
adobename="sixgujarati",
category="nd",
description="GUJARATI DIGIT SIX",
direction="l",
linebreak="nu",
- unicodeslot=0x0AEC,
+ unicodeslot=0xAEC,
},
- [0x0AED]={
+ [0xAED]={
adobename="sevengujarati",
category="nd",
description="GUJARATI DIGIT SEVEN",
direction="l",
linebreak="nu",
- unicodeslot=0x0AED,
+ unicodeslot=0xAED,
},
- [0x0AEE]={
+ [0xAEE]={
adobename="eightgujarati",
category="nd",
description="GUJARATI DIGIT EIGHT",
direction="l",
linebreak="nu",
- unicodeslot=0x0AEE,
+ unicodeslot=0xAEE,
},
- [0x0AEF]={
+ [0xAEF]={
adobename="ninegujarati",
category="nd",
description="GUJARATI DIGIT NINE",
direction="l",
linebreak="nu",
- unicodeslot=0x0AEF,
+ unicodeslot=0xAEF,
},
- [0x0AF0]={
+ [0xAF0]={
category="po",
description="GUJARATI ABBREVIATION SIGN",
direction="l",
linebreak="al",
- unicodeslot=0x0AF0,
+ unicodeslot=0xAF0,
},
- [0x0AF1]={
+ [0xAF1]={
category="sc",
description="GUJARATI RUPEE SIGN",
direction="et",
linebreak="pr",
- unicodeslot=0x0AF1,
+ unicodeslot=0xAF1,
},
- [0x0B01]={
+ [0xB01]={
category="mn",
description="ORIYA SIGN CANDRABINDU",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0B01,
+ unicodeslot=0xB01,
},
- [0x0B02]={
+ [0xB02]={
category="mc",
description="ORIYA SIGN ANUSVARA",
direction="l",
linebreak="cm",
- unicodeslot=0x0B02,
+ unicodeslot=0xB02,
},
- [0x0B03]={
+ [0xB03]={
category="mc",
description="ORIYA SIGN VISARGA",
direction="l",
linebreak="cm",
- unicodeslot=0x0B03,
+ unicodeslot=0xB03,
},
- [0x0B05]={
+ [0xB05]={
category="lo",
description="ORIYA LETTER A",
direction="l",
linebreak="al",
- unicodeslot=0x0B05,
+ unicodeslot=0xB05,
},
- [0x0B06]={
+ [0xB06]={
category="lo",
description="ORIYA LETTER AA",
direction="l",
linebreak="al",
- unicodeslot=0x0B06,
+ unicodeslot=0xB06,
},
- [0x0B07]={
+ [0xB07]={
category="lo",
description="ORIYA LETTER I",
direction="l",
linebreak="al",
- unicodeslot=0x0B07,
+ unicodeslot=0xB07,
},
- [0x0B08]={
+ [0xB08]={
category="lo",
description="ORIYA LETTER II",
direction="l",
linebreak="al",
- unicodeslot=0x0B08,
+ unicodeslot=0xB08,
},
- [0x0B09]={
+ [0xB09]={
category="lo",
description="ORIYA LETTER U",
direction="l",
linebreak="al",
- unicodeslot=0x0B09,
+ unicodeslot=0xB09,
},
- [0x0B0A]={
+ [0xB0A]={
category="lo",
description="ORIYA LETTER UU",
direction="l",
linebreak="al",
- unicodeslot=0x0B0A,
+ unicodeslot=0xB0A,
},
- [0x0B0B]={
+ [0xB0B]={
category="lo",
description="ORIYA LETTER VOCALIC R",
direction="l",
linebreak="al",
- unicodeslot=0x0B0B,
+ unicodeslot=0xB0B,
},
- [0x0B0C]={
+ [0xB0C]={
category="lo",
description="ORIYA LETTER VOCALIC L",
direction="l",
linebreak="al",
- unicodeslot=0x0B0C,
+ unicodeslot=0xB0C,
},
- [0x0B0F]={
+ [0xB0F]={
category="lo",
description="ORIYA LETTER E",
direction="l",
linebreak="al",
- unicodeslot=0x0B0F,
+ unicodeslot=0xB0F,
},
- [0x0B10]={
+ [0xB10]={
category="lo",
description="ORIYA LETTER AI",
direction="l",
linebreak="al",
- unicodeslot=0x0B10,
+ unicodeslot=0xB10,
},
- [0x0B13]={
+ [0xB13]={
category="lo",
description="ORIYA LETTER O",
direction="l",
linebreak="al",
- unicodeslot=0x0B13,
+ unicodeslot=0xB13,
},
- [0x0B14]={
+ [0xB14]={
category="lo",
description="ORIYA LETTER AU",
direction="l",
linebreak="al",
- unicodeslot=0x0B14,
+ unicodeslot=0xB14,
},
- [0x0B15]={
+ [0xB15]={
category="lo",
description="ORIYA LETTER KA",
direction="l",
linebreak="al",
- unicodeslot=0x0B15,
+ unicodeslot=0xB15,
},
- [0x0B16]={
+ [0xB16]={
category="lo",
description="ORIYA LETTER KHA",
direction="l",
linebreak="al",
- unicodeslot=0x0B16,
+ unicodeslot=0xB16,
},
- [0x0B17]={
+ [0xB17]={
category="lo",
description="ORIYA LETTER GA",
direction="l",
linebreak="al",
- unicodeslot=0x0B17,
+ unicodeslot=0xB17,
},
- [0x0B18]={
+ [0xB18]={
category="lo",
description="ORIYA LETTER GHA",
direction="l",
linebreak="al",
- unicodeslot=0x0B18,
+ unicodeslot=0xB18,
},
- [0x0B19]={
+ [0xB19]={
category="lo",
description="ORIYA LETTER NGA",
direction="l",
linebreak="al",
- unicodeslot=0x0B19,
+ unicodeslot=0xB19,
},
- [0x0B1A]={
+ [0xB1A]={
category="lo",
description="ORIYA LETTER CA",
direction="l",
linebreak="al",
- unicodeslot=0x0B1A,
+ unicodeslot=0xB1A,
},
- [0x0B1B]={
+ [0xB1B]={
category="lo",
description="ORIYA LETTER CHA",
direction="l",
linebreak="al",
- unicodeslot=0x0B1B,
+ unicodeslot=0xB1B,
},
- [0x0B1C]={
+ [0xB1C]={
category="lo",
description="ORIYA LETTER JA",
direction="l",
linebreak="al",
- unicodeslot=0x0B1C,
+ unicodeslot=0xB1C,
},
- [0x0B1D]={
+ [0xB1D]={
category="lo",
description="ORIYA LETTER JHA",
direction="l",
linebreak="al",
- unicodeslot=0x0B1D,
+ unicodeslot=0xB1D,
},
- [0x0B1E]={
+ [0xB1E]={
category="lo",
description="ORIYA LETTER NYA",
direction="l",
linebreak="al",
- unicodeslot=0x0B1E,
+ unicodeslot=0xB1E,
},
- [0x0B1F]={
+ [0xB1F]={
category="lo",
description="ORIYA LETTER TTA",
direction="l",
linebreak="al",
- unicodeslot=0x0B1F,
+ unicodeslot=0xB1F,
},
- [0x0B20]={
+ [0xB20]={
category="lo",
description="ORIYA LETTER TTHA",
direction="l",
linebreak="al",
- unicodeslot=0x0B20,
+ unicodeslot=0xB20,
},
- [0x0B21]={
+ [0xB21]={
category="lo",
description="ORIYA LETTER DDA",
direction="l",
linebreak="al",
- unicodeslot=0x0B21,
+ unicodeslot=0xB21,
},
- [0x0B22]={
+ [0xB22]={
category="lo",
description="ORIYA LETTER DDHA",
direction="l",
linebreak="al",
- unicodeslot=0x0B22,
+ unicodeslot=0xB22,
},
- [0x0B23]={
+ [0xB23]={
category="lo",
description="ORIYA LETTER NNA",
direction="l",
linebreak="al",
- unicodeslot=0x0B23,
+ unicodeslot=0xB23,
},
- [0x0B24]={
+ [0xB24]={
category="lo",
description="ORIYA LETTER TA",
direction="l",
linebreak="al",
- unicodeslot=0x0B24,
+ unicodeslot=0xB24,
},
- [0x0B25]={
+ [0xB25]={
category="lo",
description="ORIYA LETTER THA",
direction="l",
linebreak="al",
- unicodeslot=0x0B25,
+ unicodeslot=0xB25,
},
- [0x0B26]={
+ [0xB26]={
category="lo",
description="ORIYA LETTER DA",
direction="l",
linebreak="al",
- unicodeslot=0x0B26,
+ unicodeslot=0xB26,
},
- [0x0B27]={
+ [0xB27]={
category="lo",
description="ORIYA LETTER DHA",
direction="l",
linebreak="al",
- unicodeslot=0x0B27,
+ unicodeslot=0xB27,
},
- [0x0B28]={
+ [0xB28]={
category="lo",
description="ORIYA LETTER NA",
direction="l",
linebreak="al",
- unicodeslot=0x0B28,
+ unicodeslot=0xB28,
},
- [0x0B2A]={
+ [0xB2A]={
category="lo",
description="ORIYA LETTER PA",
direction="l",
linebreak="al",
- unicodeslot=0x0B2A,
+ unicodeslot=0xB2A,
},
- [0x0B2B]={
+ [0xB2B]={
category="lo",
description="ORIYA LETTER PHA",
direction="l",
linebreak="al",
- unicodeslot=0x0B2B,
+ unicodeslot=0xB2B,
},
- [0x0B2C]={
+ [0xB2C]={
category="lo",
description="ORIYA LETTER BA",
direction="l",
linebreak="al",
- unicodeslot=0x0B2C,
+ unicodeslot=0xB2C,
},
- [0x0B2D]={
+ [0xB2D]={
category="lo",
description="ORIYA LETTER BHA",
direction="l",
linebreak="al",
- unicodeslot=0x0B2D,
+ unicodeslot=0xB2D,
},
- [0x0B2E]={
+ [0xB2E]={
category="lo",
description="ORIYA LETTER MA",
direction="l",
linebreak="al",
- unicodeslot=0x0B2E,
+ unicodeslot=0xB2E,
},
- [0x0B2F]={
+ [0xB2F]={
category="lo",
description="ORIYA LETTER YA",
direction="l",
linebreak="al",
- unicodeslot=0x0B2F,
+ unicodeslot=0xB2F,
},
- [0x0B30]={
+ [0xB30]={
category="lo",
description="ORIYA LETTER RA",
direction="l",
linebreak="al",
- unicodeslot=0x0B30,
+ unicodeslot=0xB30,
},
- [0x0B32]={
+ [0xB32]={
category="lo",
description="ORIYA LETTER LA",
direction="l",
linebreak="al",
- unicodeslot=0x0B32,
+ unicodeslot=0xB32,
},
- [0x0B33]={
+ [0xB33]={
category="lo",
description="ORIYA LETTER LLA",
direction="l",
linebreak="al",
- unicodeslot=0x0B33,
+ unicodeslot=0xB33,
},
- [0x0B35]={
+ [0xB35]={
category="lo",
description="ORIYA LETTER VA",
direction="l",
linebreak="al",
- unicodeslot=0x0B35,
+ unicodeslot=0xB35,
},
- [0x0B36]={
+ [0xB36]={
category="lo",
description="ORIYA LETTER SHA",
direction="l",
linebreak="al",
- unicodeslot=0x0B36,
+ unicodeslot=0xB36,
},
- [0x0B37]={
+ [0xB37]={
category="lo",
description="ORIYA LETTER SSA",
direction="l",
linebreak="al",
- unicodeslot=0x0B37,
+ unicodeslot=0xB37,
},
- [0x0B38]={
+ [0xB38]={
category="lo",
description="ORIYA LETTER SA",
direction="l",
linebreak="al",
- unicodeslot=0x0B38,
+ unicodeslot=0xB38,
},
- [0x0B39]={
+ [0xB39]={
category="lo",
description="ORIYA LETTER HA",
direction="l",
linebreak="al",
- unicodeslot=0x0B39,
+ unicodeslot=0xB39,
},
- [0x0B3C]={
+ [0xB3C]={
category="mn",
+ combining=0x7,
description="ORIYA SIGN NUKTA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0B3C,
+ unicodeslot=0xB3C,
},
- [0x0B3D]={
+ [0xB3D]={
category="lo",
description="ORIYA SIGN AVAGRAHA",
direction="l",
linebreak="al",
- unicodeslot=0x0B3D,
+ unicodeslot=0xB3D,
},
- [0x0B3E]={
+ [0xB3E]={
category="mc",
description="ORIYA VOWEL SIGN AA",
direction="l",
linebreak="cm",
- unicodeslot=0x0B3E,
+ unicodeslot=0xB3E,
},
- [0x0B3F]={
+ [0xB3F]={
category="mn",
description="ORIYA VOWEL SIGN I",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0B3F,
+ unicodeslot=0xB3F,
},
- [0x0B40]={
+ [0xB40]={
category="mc",
description="ORIYA VOWEL SIGN II",
direction="l",
linebreak="cm",
- unicodeslot=0x0B40,
+ unicodeslot=0xB40,
},
- [0x0B41]={
+ [0xB41]={
category="mn",
description="ORIYA VOWEL SIGN U",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0B41,
+ unicodeslot=0xB41,
},
- [0x0B42]={
+ [0xB42]={
category="mn",
description="ORIYA VOWEL SIGN UU",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0B42,
+ unicodeslot=0xB42,
},
- [0x0B43]={
+ [0xB43]={
category="mn",
description="ORIYA VOWEL SIGN VOCALIC R",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0B43,
+ unicodeslot=0xB43,
},
- [0x0B44]={
+ [0xB44]={
category="mn",
description="ORIYA VOWEL SIGN VOCALIC RR",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0B44,
+ unicodeslot=0xB44,
},
- [0x0B47]={
+ [0xB47]={
category="mc",
description="ORIYA VOWEL SIGN E",
direction="l",
linebreak="cm",
- unicodeslot=0x0B47,
+ unicodeslot=0xB47,
},
- [0x0B48]={
+ [0xB48]={
category="mc",
description="ORIYA VOWEL SIGN AI",
direction="l",
linebreak="cm",
- specials={ "char", 0x0B47, 0x0B56 },
- unicodeslot=0x0B48,
+ specials={ "char", 0xB47, 0xB56 },
+ unicodeslot=0xB48,
},
- [0x0B4B]={
+ [0xB4B]={
category="mc",
description="ORIYA VOWEL SIGN O",
direction="l",
linebreak="cm",
- specials={ "char", 0x0B47, 0x0B3E },
- unicodeslot=0x0B4B,
+ specials={ "char", 0xB47, 0xB3E },
+ unicodeslot=0xB4B,
},
- [0x0B4C]={
+ [0xB4C]={
category="mc",
description="ORIYA VOWEL SIGN AU",
direction="l",
linebreak="cm",
- specials={ "char", 0x0B47, 0x0B57 },
- unicodeslot=0x0B4C,
+ specials={ "char", 0xB47, 0xB57 },
+ unicodeslot=0xB4C,
},
- [0x0B4D]={
+ [0xB4D]={
category="mn",
+ combining=0x9,
description="ORIYA SIGN VIRAMA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0B4D,
+ unicodeslot=0xB4D,
},
- [0x0B56]={
+ [0xB56]={
category="mn",
description="ORIYA AI LENGTH MARK",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0B56,
+ unicodeslot=0xB56,
},
- [0x0B57]={
+ [0xB57]={
category="mc",
description="ORIYA AU LENGTH MARK",
direction="l",
linebreak="cm",
- unicodeslot=0x0B57,
+ unicodeslot=0xB57,
},
- [0x0B5C]={
+ [0xB5C]={
category="lo",
description="ORIYA LETTER RRA",
direction="l",
linebreak="al",
- specials={ "char", 0x0B21, 0x0B3C },
- unicodeslot=0x0B5C,
+ specials={ "char", 0xB21, 0xB3C },
+ unicodeslot=0xB5C,
},
- [0x0B5D]={
+ [0xB5D]={
category="lo",
description="ORIYA LETTER RHA",
direction="l",
linebreak="al",
- specials={ "char", 0x0B22, 0x0B3C },
- unicodeslot=0x0B5D,
+ specials={ "char", 0xB22, 0xB3C },
+ unicodeslot=0xB5D,
},
- [0x0B5F]={
+ [0xB5F]={
category="lo",
description="ORIYA LETTER YYA",
direction="l",
linebreak="al",
- unicodeslot=0x0B5F,
+ unicodeslot=0xB5F,
},
- [0x0B60]={
+ [0xB60]={
category="lo",
description="ORIYA LETTER VOCALIC RR",
direction="l",
linebreak="al",
- unicodeslot=0x0B60,
+ unicodeslot=0xB60,
},
- [0x0B61]={
+ [0xB61]={
category="lo",
description="ORIYA LETTER VOCALIC LL",
direction="l",
linebreak="al",
- unicodeslot=0x0B61,
+ unicodeslot=0xB61,
},
- [0x0B62]={
+ [0xB62]={
category="mn",
description="ORIYA VOWEL SIGN VOCALIC L",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0B62,
+ unicodeslot=0xB62,
},
- [0x0B63]={
+ [0xB63]={
category="mn",
description="ORIYA VOWEL SIGN VOCALIC LL",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0B63,
+ unicodeslot=0xB63,
},
- [0x0B66]={
+ [0xB66]={
category="nd",
description="ORIYA DIGIT ZERO",
direction="l",
linebreak="nu",
- unicodeslot=0x0B66,
+ unicodeslot=0xB66,
},
- [0x0B67]={
+ [0xB67]={
category="nd",
description="ORIYA DIGIT ONE",
direction="l",
linebreak="nu",
- unicodeslot=0x0B67,
+ unicodeslot=0xB67,
},
- [0x0B68]={
+ [0xB68]={
category="nd",
description="ORIYA DIGIT TWO",
direction="l",
linebreak="nu",
- unicodeslot=0x0B68,
+ unicodeslot=0xB68,
},
- [0x0B69]={
+ [0xB69]={
category="nd",
description="ORIYA DIGIT THREE",
direction="l",
linebreak="nu",
- unicodeslot=0x0B69,
+ unicodeslot=0xB69,
},
- [0x0B6A]={
+ [0xB6A]={
category="nd",
description="ORIYA DIGIT FOUR",
direction="l",
linebreak="nu",
- unicodeslot=0x0B6A,
+ unicodeslot=0xB6A,
},
- [0x0B6B]={
+ [0xB6B]={
category="nd",
description="ORIYA DIGIT FIVE",
direction="l",
linebreak="nu",
- unicodeslot=0x0B6B,
+ unicodeslot=0xB6B,
},
- [0x0B6C]={
+ [0xB6C]={
category="nd",
description="ORIYA DIGIT SIX",
direction="l",
linebreak="nu",
- unicodeslot=0x0B6C,
+ unicodeslot=0xB6C,
},
- [0x0B6D]={
+ [0xB6D]={
category="nd",
description="ORIYA DIGIT SEVEN",
direction="l",
linebreak="nu",
- unicodeslot=0x0B6D,
+ unicodeslot=0xB6D,
},
- [0x0B6E]={
+ [0xB6E]={
category="nd",
description="ORIYA DIGIT EIGHT",
direction="l",
linebreak="nu",
- unicodeslot=0x0B6E,
+ unicodeslot=0xB6E,
},
- [0x0B6F]={
+ [0xB6F]={
category="nd",
description="ORIYA DIGIT NINE",
direction="l",
linebreak="nu",
- unicodeslot=0x0B6F,
+ unicodeslot=0xB6F,
},
- [0x0B70]={
+ [0xB70]={
category="so",
description="ORIYA ISSHAR",
direction="l",
linebreak="al",
- unicodeslot=0x0B70,
+ unicodeslot=0xB70,
},
- [0x0B71]={
+ [0xB71]={
category="lo",
description="ORIYA LETTER WA",
direction="l",
linebreak="al",
- unicodeslot=0x0B71,
+ unicodeslot=0xB71,
},
- [0x0B72]={
+ [0xB72]={
category="no",
description="ORIYA FRACTION ONE QUARTER",
direction="l",
linebreak="al",
- unicodeslot=0x0B72,
+ unicodeslot=0xB72,
},
- [0x0B73]={
+ [0xB73]={
category="no",
description="ORIYA FRACTION ONE HALF",
direction="l",
linebreak="al",
- unicodeslot=0x0B73,
+ unicodeslot=0xB73,
},
- [0x0B74]={
+ [0xB74]={
category="no",
description="ORIYA FRACTION THREE QUARTERS",
direction="l",
linebreak="al",
- unicodeslot=0x0B74,
+ unicodeslot=0xB74,
},
- [0x0B75]={
+ [0xB75]={
category="no",
description="ORIYA FRACTION ONE SIXTEENTH",
direction="l",
linebreak="al",
- unicodeslot=0x0B75,
+ unicodeslot=0xB75,
},
- [0x0B76]={
+ [0xB76]={
category="no",
description="ORIYA FRACTION ONE EIGHTH",
direction="l",
linebreak="al",
- unicodeslot=0x0B76,
+ unicodeslot=0xB76,
},
- [0x0B77]={
+ [0xB77]={
category="no",
description="ORIYA FRACTION THREE SIXTEENTHS",
direction="l",
linebreak="al",
- unicodeslot=0x0B77,
+ unicodeslot=0xB77,
},
- [0x0B82]={
+ [0xB82]={
category="mn",
description="TAMIL SIGN ANUSVARA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0B82,
+ unicodeslot=0xB82,
},
- [0x0B83]={
+ [0xB83]={
category="lo",
description="TAMIL SIGN VISARGA",
direction="l",
linebreak="al",
- unicodeslot=0x0B83,
+ unicodeslot=0xB83,
},
- [0x0B85]={
+ [0xB85]={
category="lo",
description="TAMIL LETTER A",
direction="l",
linebreak="al",
- unicodeslot=0x0B85,
+ unicodeslot=0xB85,
},
- [0x0B86]={
+ [0xB86]={
category="lo",
description="TAMIL LETTER AA",
direction="l",
linebreak="al",
- unicodeslot=0x0B86,
+ unicodeslot=0xB86,
},
- [0x0B87]={
+ [0xB87]={
category="lo",
description="TAMIL LETTER I",
direction="l",
linebreak="al",
- unicodeslot=0x0B87,
+ unicodeslot=0xB87,
},
- [0x0B88]={
+ [0xB88]={
category="lo",
description="TAMIL LETTER II",
direction="l",
linebreak="al",
- unicodeslot=0x0B88,
+ unicodeslot=0xB88,
},
- [0x0B89]={
+ [0xB89]={
category="lo",
description="TAMIL LETTER U",
direction="l",
linebreak="al",
- unicodeslot=0x0B89,
+ unicodeslot=0xB89,
},
- [0x0B8A]={
+ [0xB8A]={
category="lo",
description="TAMIL LETTER UU",
direction="l",
linebreak="al",
- unicodeslot=0x0B8A,
+ unicodeslot=0xB8A,
},
- [0x0B8E]={
+ [0xB8E]={
category="lo",
description="TAMIL LETTER E",
direction="l",
linebreak="al",
- unicodeslot=0x0B8E,
+ unicodeslot=0xB8E,
},
- [0x0B8F]={
+ [0xB8F]={
category="lo",
description="TAMIL LETTER EE",
direction="l",
linebreak="al",
- unicodeslot=0x0B8F,
+ unicodeslot=0xB8F,
},
- [0x0B90]={
+ [0xB90]={
category="lo",
description="TAMIL LETTER AI",
direction="l",
linebreak="al",
- unicodeslot=0x0B90,
+ unicodeslot=0xB90,
},
- [0x0B92]={
+ [0xB92]={
category="lo",
description="TAMIL LETTER O",
direction="l",
linebreak="al",
- unicodeslot=0x0B92,
+ unicodeslot=0xB92,
},
- [0x0B93]={
+ [0xB93]={
category="lo",
description="TAMIL LETTER OO",
direction="l",
linebreak="al",
- unicodeslot=0x0B93,
+ unicodeslot=0xB93,
},
- [0x0B94]={
+ [0xB94]={
category="lo",
description="TAMIL LETTER AU",
direction="l",
linebreak="al",
- specials={ "char", 0x0B92, 0x0BD7 },
- unicodeslot=0x0B94,
+ specials={ "char", 0xB92, 0xBD7 },
+ unicodeslot=0xB94,
},
- [0x0B95]={
+ [0xB95]={
category="lo",
description="TAMIL LETTER KA",
direction="l",
linebreak="al",
- unicodeslot=0x0B95,
+ unicodeslot=0xB95,
},
- [0x0B99]={
+ [0xB99]={
category="lo",
description="TAMIL LETTER NGA",
direction="l",
linebreak="al",
- unicodeslot=0x0B99,
+ unicodeslot=0xB99,
},
- [0x0B9A]={
+ [0xB9A]={
category="lo",
description="TAMIL LETTER CA",
direction="l",
linebreak="al",
- unicodeslot=0x0B9A,
+ unicodeslot=0xB9A,
},
- [0x0B9C]={
+ [0xB9C]={
category="lo",
description="TAMIL LETTER JA",
direction="l",
linebreak="al",
- unicodeslot=0x0B9C,
+ unicodeslot=0xB9C,
},
- [0x0B9E]={
+ [0xB9E]={
category="lo",
description="TAMIL LETTER NYA",
direction="l",
linebreak="al",
- unicodeslot=0x0B9E,
+ unicodeslot=0xB9E,
},
- [0x0B9F]={
+ [0xB9F]={
category="lo",
description="TAMIL LETTER TTA",
direction="l",
linebreak="al",
- unicodeslot=0x0B9F,
+ unicodeslot=0xB9F,
},
- [0x0BA3]={
+ [0xBA3]={
category="lo",
description="TAMIL LETTER NNA",
direction="l",
linebreak="al",
- unicodeslot=0x0BA3,
+ unicodeslot=0xBA3,
},
- [0x0BA4]={
+ [0xBA4]={
category="lo",
description="TAMIL LETTER TA",
direction="l",
linebreak="al",
- unicodeslot=0x0BA4,
+ unicodeslot=0xBA4,
},
- [0x0BA8]={
+ [0xBA8]={
category="lo",
description="TAMIL LETTER NA",
direction="l",
linebreak="al",
- unicodeslot=0x0BA8,
+ unicodeslot=0xBA8,
},
- [0x0BA9]={
+ [0xBA9]={
category="lo",
description="TAMIL LETTER NNNA",
direction="l",
linebreak="al",
- unicodeslot=0x0BA9,
+ unicodeslot=0xBA9,
},
- [0x0BAA]={
+ [0xBAA]={
category="lo",
description="TAMIL LETTER PA",
direction="l",
linebreak="al",
- unicodeslot=0x0BAA,
+ unicodeslot=0xBAA,
},
- [0x0BAE]={
+ [0xBAE]={
category="lo",
description="TAMIL LETTER MA",
direction="l",
linebreak="al",
- unicodeslot=0x0BAE,
+ unicodeslot=0xBAE,
},
- [0x0BAF]={
+ [0xBAF]={
category="lo",
description="TAMIL LETTER YA",
direction="l",
linebreak="al",
- unicodeslot=0x0BAF,
+ unicodeslot=0xBAF,
},
- [0x0BB0]={
+ [0xBB0]={
category="lo",
description="TAMIL LETTER RA",
direction="l",
linebreak="al",
- unicodeslot=0x0BB0,
+ unicodeslot=0xBB0,
},
- [0x0BB1]={
+ [0xBB1]={
category="lo",
description="TAMIL LETTER RRA",
direction="l",
linebreak="al",
- unicodeslot=0x0BB1,
+ unicodeslot=0xBB1,
},
- [0x0BB2]={
+ [0xBB2]={
category="lo",
description="TAMIL LETTER LA",
direction="l",
linebreak="al",
- unicodeslot=0x0BB2,
+ unicodeslot=0xBB2,
},
- [0x0BB3]={
+ [0xBB3]={
category="lo",
description="TAMIL LETTER LLA",
direction="l",
linebreak="al",
- unicodeslot=0x0BB3,
+ unicodeslot=0xBB3,
},
- [0x0BB4]={
+ [0xBB4]={
category="lo",
description="TAMIL LETTER LLLA",
direction="l",
linebreak="al",
- unicodeslot=0x0BB4,
+ unicodeslot=0xBB4,
},
- [0x0BB5]={
+ [0xBB5]={
category="lo",
description="TAMIL LETTER VA",
direction="l",
linebreak="al",
- unicodeslot=0x0BB5,
+ unicodeslot=0xBB5,
},
- [0x0BB6]={
+ [0xBB6]={
category="lo",
description="TAMIL LETTER SHA",
direction="l",
linebreak="al",
- unicodeslot=0x0BB6,
+ unicodeslot=0xBB6,
},
- [0x0BB7]={
+ [0xBB7]={
category="lo",
description="TAMIL LETTER SSA",
direction="l",
linebreak="al",
- unicodeslot=0x0BB7,
+ unicodeslot=0xBB7,
},
- [0x0BB8]={
+ [0xBB8]={
category="lo",
description="TAMIL LETTER SA",
direction="l",
linebreak="al",
- unicodeslot=0x0BB8,
+ unicodeslot=0xBB8,
},
- [0x0BB9]={
+ [0xBB9]={
category="lo",
description="TAMIL LETTER HA",
direction="l",
linebreak="al",
- unicodeslot=0x0BB9,
+ unicodeslot=0xBB9,
},
- [0x0BBE]={
+ [0xBBE]={
category="mc",
description="TAMIL VOWEL SIGN AA",
direction="l",
linebreak="cm",
- unicodeslot=0x0BBE,
+ unicodeslot=0xBBE,
},
- [0x0BBF]={
+ [0xBBF]={
category="mc",
description="TAMIL VOWEL SIGN I",
direction="l",
linebreak="cm",
- unicodeslot=0x0BBF,
+ unicodeslot=0xBBF,
},
- [0x0BC0]={
+ [0xBC0]={
category="mn",
description="TAMIL VOWEL SIGN II",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0BC0,
+ unicodeslot=0xBC0,
},
- [0x0BC1]={
+ [0xBC1]={
category="mc",
description="TAMIL VOWEL SIGN U",
direction="l",
linebreak="cm",
- unicodeslot=0x0BC1,
+ unicodeslot=0xBC1,
},
- [0x0BC2]={
+ [0xBC2]={
category="mc",
description="TAMIL VOWEL SIGN UU",
direction="l",
linebreak="cm",
- unicodeslot=0x0BC2,
+ unicodeslot=0xBC2,
},
- [0x0BC6]={
+ [0xBC6]={
category="mc",
description="TAMIL VOWEL SIGN E",
direction="l",
linebreak="cm",
- unicodeslot=0x0BC6,
+ unicodeslot=0xBC6,
},
- [0x0BC7]={
+ [0xBC7]={
category="mc",
description="TAMIL VOWEL SIGN EE",
direction="l",
linebreak="cm",
- unicodeslot=0x0BC7,
+ unicodeslot=0xBC7,
},
- [0x0BC8]={
+ [0xBC8]={
category="mc",
description="TAMIL VOWEL SIGN AI",
direction="l",
linebreak="cm",
- unicodeslot=0x0BC8,
+ unicodeslot=0xBC8,
},
- [0x0BCA]={
+ [0xBCA]={
category="mc",
description="TAMIL VOWEL SIGN O",
direction="l",
linebreak="cm",
- specials={ "char", 0x0BC6, 0x0BBE },
- unicodeslot=0x0BCA,
+ specials={ "char", 0xBC6, 0xBBE },
+ unicodeslot=0xBCA,
},
- [0x0BCB]={
+ [0xBCB]={
category="mc",
description="TAMIL VOWEL SIGN OO",
direction="l",
linebreak="cm",
- specials={ "char", 0x0BC7, 0x0BBE },
- unicodeslot=0x0BCB,
+ specials={ "char", 0xBC7, 0xBBE },
+ unicodeslot=0xBCB,
},
- [0x0BCC]={
+ [0xBCC]={
category="mc",
description="TAMIL VOWEL SIGN AU",
direction="l",
linebreak="cm",
- specials={ "char", 0x0BC6, 0x0BD7 },
- unicodeslot=0x0BCC,
+ specials={ "char", 0xBC6, 0xBD7 },
+ unicodeslot=0xBCC,
},
- [0x0BCD]={
+ [0xBCD]={
category="mn",
+ combining=0x9,
description="TAMIL SIGN VIRAMA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0BCD,
+ unicodeslot=0xBCD,
},
- [0x0BD0]={
+ [0xBD0]={
category="lo",
description="TAMIL OM",
direction="l",
linebreak="al",
- unicodeslot=0x0BD0,
+ unicodeslot=0xBD0,
},
- [0x0BD7]={
+ [0xBD7]={
category="mc",
description="TAMIL AU LENGTH MARK",
direction="l",
linebreak="cm",
- unicodeslot=0x0BD7,
+ unicodeslot=0xBD7,
},
- [0x0BE6]={
+ [0xBE6]={
category="nd",
description="TAMIL DIGIT ZERO",
direction="l",
linebreak="nu",
- unicodeslot=0x0BE6,
+ unicodeslot=0xBE6,
},
- [0x0BE7]={
+ [0xBE7]={
category="nd",
description="TAMIL DIGIT ONE",
direction="l",
linebreak="nu",
- unicodeslot=0x0BE7,
+ unicodeslot=0xBE7,
},
- [0x0BE8]={
+ [0xBE8]={
category="nd",
description="TAMIL DIGIT TWO",
direction="l",
linebreak="nu",
- unicodeslot=0x0BE8,
+ unicodeslot=0xBE8,
},
- [0x0BE9]={
+ [0xBE9]={
category="nd",
description="TAMIL DIGIT THREE",
direction="l",
linebreak="nu",
- unicodeslot=0x0BE9,
+ unicodeslot=0xBE9,
},
- [0x0BEA]={
+ [0xBEA]={
category="nd",
description="TAMIL DIGIT FOUR",
direction="l",
linebreak="nu",
- unicodeslot=0x0BEA,
+ unicodeslot=0xBEA,
},
- [0x0BEB]={
+ [0xBEB]={
category="nd",
description="TAMIL DIGIT FIVE",
direction="l",
linebreak="nu",
- unicodeslot=0x0BEB,
+ unicodeslot=0xBEB,
},
- [0x0BEC]={
+ [0xBEC]={
category="nd",
description="TAMIL DIGIT SIX",
direction="l",
linebreak="nu",
- unicodeslot=0x0BEC,
+ unicodeslot=0xBEC,
},
- [0x0BED]={
+ [0xBED]={
category="nd",
description="TAMIL DIGIT SEVEN",
direction="l",
linebreak="nu",
- unicodeslot=0x0BED,
+ unicodeslot=0xBED,
},
- [0x0BEE]={
+ [0xBEE]={
category="nd",
description="TAMIL DIGIT EIGHT",
direction="l",
linebreak="nu",
- unicodeslot=0x0BEE,
+ unicodeslot=0xBEE,
},
- [0x0BEF]={
+ [0xBEF]={
category="nd",
description="TAMIL DIGIT NINE",
direction="l",
linebreak="nu",
- unicodeslot=0x0BEF,
+ unicodeslot=0xBEF,
},
- [0x0BF0]={
+ [0xBF0]={
category="no",
description="TAMIL NUMBER TEN",
direction="l",
linebreak="al",
- unicodeslot=0x0BF0,
+ unicodeslot=0xBF0,
},
- [0x0BF1]={
+ [0xBF1]={
category="no",
description="TAMIL NUMBER ONE HUNDRED",
direction="l",
linebreak="al",
- unicodeslot=0x0BF1,
+ unicodeslot=0xBF1,
},
- [0x0BF2]={
+ [0xBF2]={
category="no",
description="TAMIL NUMBER ONE THOUSAND",
direction="l",
linebreak="al",
- unicodeslot=0x0BF2,
+ unicodeslot=0xBF2,
},
- [0x0BF3]={
+ [0xBF3]={
category="so",
description="TAMIL DAY SIGN",
direction="on",
linebreak="al",
- unicodeslot=0x0BF3,
+ unicodeslot=0xBF3,
},
- [0x0BF4]={
+ [0xBF4]={
category="so",
description="TAMIL MONTH SIGN",
direction="on",
linebreak="al",
- unicodeslot=0x0BF4,
+ unicodeslot=0xBF4,
},
- [0x0BF5]={
+ [0xBF5]={
category="so",
description="TAMIL YEAR SIGN",
direction="on",
linebreak="al",
- unicodeslot=0x0BF5,
+ unicodeslot=0xBF5,
},
- [0x0BF6]={
+ [0xBF6]={
category="so",
description="TAMIL DEBIT SIGN",
direction="on",
linebreak="al",
- unicodeslot=0x0BF6,
+ unicodeslot=0xBF6,
},
- [0x0BF7]={
+ [0xBF7]={
category="so",
description="TAMIL CREDIT SIGN",
direction="on",
linebreak="al",
- unicodeslot=0x0BF7,
+ unicodeslot=0xBF7,
},
- [0x0BF8]={
+ [0xBF8]={
category="so",
description="TAMIL AS ABOVE SIGN",
direction="on",
linebreak="al",
- unicodeslot=0x0BF8,
+ unicodeslot=0xBF8,
},
- [0x0BF9]={
+ [0xBF9]={
category="sc",
description="TAMIL RUPEE SIGN",
direction="et",
linebreak="pr",
- unicodeslot=0x0BF9,
+ unicodeslot=0xBF9,
},
- [0x0BFA]={
+ [0xBFA]={
category="so",
description="TAMIL NUMBER SIGN",
direction="on",
linebreak="al",
- unicodeslot=0x0BFA,
+ unicodeslot=0xBFA,
+ },
+ [0xC00]={
+ category="mn",
+ description="TELUGU SIGN COMBINING CANDRABINDU ABOVE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0xC00,
},
- [0x0C01]={
+ [0xC01]={
category="mc",
description="TELUGU SIGN CANDRABINDU",
direction="l",
linebreak="cm",
- unicodeslot=0x0C01,
+ unicodeslot=0xC01,
},
- [0x0C02]={
+ [0xC02]={
category="mc",
description="TELUGU SIGN ANUSVARA",
direction="l",
linebreak="cm",
- unicodeslot=0x0C02,
+ unicodeslot=0xC02,
},
- [0x0C03]={
+ [0xC03]={
category="mc",
description="TELUGU SIGN VISARGA",
direction="l",
linebreak="cm",
- unicodeslot=0x0C03,
+ unicodeslot=0xC03,
},
- [0x0C05]={
+ [0xC05]={
category="lo",
description="TELUGU LETTER A",
direction="l",
linebreak="al",
- unicodeslot=0x0C05,
+ unicodeslot=0xC05,
},
- [0x0C06]={
+ [0xC06]={
category="lo",
description="TELUGU LETTER AA",
direction="l",
linebreak="al",
- unicodeslot=0x0C06,
+ unicodeslot=0xC06,
},
- [0x0C07]={
+ [0xC07]={
category="lo",
description="TELUGU LETTER I",
direction="l",
linebreak="al",
- unicodeslot=0x0C07,
+ unicodeslot=0xC07,
},
- [0x0C08]={
+ [0xC08]={
category="lo",
description="TELUGU LETTER II",
direction="l",
linebreak="al",
- unicodeslot=0x0C08,
+ unicodeslot=0xC08,
},
- [0x0C09]={
+ [0xC09]={
category="lo",
description="TELUGU LETTER U",
direction="l",
linebreak="al",
- unicodeslot=0x0C09,
+ unicodeslot=0xC09,
},
- [0x0C0A]={
+ [0xC0A]={
category="lo",
description="TELUGU LETTER UU",
direction="l",
linebreak="al",
- unicodeslot=0x0C0A,
+ unicodeslot=0xC0A,
},
- [0x0C0B]={
+ [0xC0B]={
category="lo",
description="TELUGU LETTER VOCALIC R",
direction="l",
linebreak="al",
- unicodeslot=0x0C0B,
+ unicodeslot=0xC0B,
},
- [0x0C0C]={
+ [0xC0C]={
category="lo",
description="TELUGU LETTER VOCALIC L",
direction="l",
linebreak="al",
- unicodeslot=0x0C0C,
+ unicodeslot=0xC0C,
},
- [0x0C0E]={
+ [0xC0E]={
category="lo",
description="TELUGU LETTER E",
direction="l",
linebreak="al",
- unicodeslot=0x0C0E,
+ unicodeslot=0xC0E,
},
- [0x0C0F]={
+ [0xC0F]={
category="lo",
description="TELUGU LETTER EE",
direction="l",
linebreak="al",
- unicodeslot=0x0C0F,
+ unicodeslot=0xC0F,
},
- [0x0C10]={
+ [0xC10]={
category="lo",
description="TELUGU LETTER AI",
direction="l",
linebreak="al",
- unicodeslot=0x0C10,
+ unicodeslot=0xC10,
},
- [0x0C12]={
+ [0xC12]={
category="lo",
description="TELUGU LETTER O",
direction="l",
linebreak="al",
- unicodeslot=0x0C12,
+ unicodeslot=0xC12,
},
- [0x0C13]={
+ [0xC13]={
category="lo",
description="TELUGU LETTER OO",
direction="l",
linebreak="al",
- unicodeslot=0x0C13,
+ unicodeslot=0xC13,
},
- [0x0C14]={
+ [0xC14]={
category="lo",
description="TELUGU LETTER AU",
direction="l",
linebreak="al",
- unicodeslot=0x0C14,
+ unicodeslot=0xC14,
},
- [0x0C15]={
+ [0xC15]={
category="lo",
description="TELUGU LETTER KA",
direction="l",
linebreak="al",
- unicodeslot=0x0C15,
+ unicodeslot=0xC15,
},
- [0x0C16]={
+ [0xC16]={
category="lo",
description="TELUGU LETTER KHA",
direction="l",
linebreak="al",
- unicodeslot=0x0C16,
+ unicodeslot=0xC16,
},
- [0x0C17]={
+ [0xC17]={
category="lo",
description="TELUGU LETTER GA",
direction="l",
linebreak="al",
- unicodeslot=0x0C17,
+ unicodeslot=0xC17,
},
- [0x0C18]={
+ [0xC18]={
category="lo",
description="TELUGU LETTER GHA",
direction="l",
linebreak="al",
- unicodeslot=0x0C18,
+ unicodeslot=0xC18,
},
- [0x0C19]={
+ [0xC19]={
category="lo",
description="TELUGU LETTER NGA",
direction="l",
linebreak="al",
- unicodeslot=0x0C19,
+ unicodeslot=0xC19,
},
- [0x0C1A]={
+ [0xC1A]={
category="lo",
description="TELUGU LETTER CA",
direction="l",
linebreak="al",
- unicodeslot=0x0C1A,
+ unicodeslot=0xC1A,
},
- [0x0C1B]={
+ [0xC1B]={
category="lo",
description="TELUGU LETTER CHA",
direction="l",
linebreak="al",
- unicodeslot=0x0C1B,
+ unicodeslot=0xC1B,
},
- [0x0C1C]={
+ [0xC1C]={
category="lo",
description="TELUGU LETTER JA",
direction="l",
linebreak="al",
- unicodeslot=0x0C1C,
+ unicodeslot=0xC1C,
},
- [0x0C1D]={
+ [0xC1D]={
category="lo",
description="TELUGU LETTER JHA",
direction="l",
linebreak="al",
- unicodeslot=0x0C1D,
+ unicodeslot=0xC1D,
},
- [0x0C1E]={
+ [0xC1E]={
category="lo",
description="TELUGU LETTER NYA",
direction="l",
linebreak="al",
- unicodeslot=0x0C1E,
+ unicodeslot=0xC1E,
},
- [0x0C1F]={
+ [0xC1F]={
category="lo",
description="TELUGU LETTER TTA",
direction="l",
linebreak="al",
- unicodeslot=0x0C1F,
+ unicodeslot=0xC1F,
},
- [0x0C20]={
+ [0xC20]={
category="lo",
description="TELUGU LETTER TTHA",
direction="l",
linebreak="al",
- unicodeslot=0x0C20,
+ unicodeslot=0xC20,
},
- [0x0C21]={
+ [0xC21]={
category="lo",
description="TELUGU LETTER DDA",
direction="l",
linebreak="al",
- unicodeslot=0x0C21,
+ unicodeslot=0xC21,
},
- [0x0C22]={
+ [0xC22]={
category="lo",
description="TELUGU LETTER DDHA",
direction="l",
linebreak="al",
- unicodeslot=0x0C22,
+ unicodeslot=0xC22,
},
- [0x0C23]={
+ [0xC23]={
category="lo",
description="TELUGU LETTER NNA",
direction="l",
linebreak="al",
- unicodeslot=0x0C23,
+ unicodeslot=0xC23,
},
- [0x0C24]={
+ [0xC24]={
category="lo",
description="TELUGU LETTER TA",
direction="l",
linebreak="al",
- unicodeslot=0x0C24,
+ unicodeslot=0xC24,
},
- [0x0C25]={
+ [0xC25]={
category="lo",
description="TELUGU LETTER THA",
direction="l",
linebreak="al",
- unicodeslot=0x0C25,
+ unicodeslot=0xC25,
},
- [0x0C26]={
+ [0xC26]={
category="lo",
description="TELUGU LETTER DA",
direction="l",
linebreak="al",
- unicodeslot=0x0C26,
+ unicodeslot=0xC26,
},
- [0x0C27]={
+ [0xC27]={
category="lo",
description="TELUGU LETTER DHA",
direction="l",
linebreak="al",
- unicodeslot=0x0C27,
+ unicodeslot=0xC27,
},
- [0x0C28]={
+ [0xC28]={
category="lo",
description="TELUGU LETTER NA",
direction="l",
linebreak="al",
- unicodeslot=0x0C28,
+ unicodeslot=0xC28,
},
- [0x0C2A]={
+ [0xC2A]={
category="lo",
description="TELUGU LETTER PA",
direction="l",
linebreak="al",
- unicodeslot=0x0C2A,
+ unicodeslot=0xC2A,
},
- [0x0C2B]={
+ [0xC2B]={
category="lo",
description="TELUGU LETTER PHA",
direction="l",
linebreak="al",
- unicodeslot=0x0C2B,
+ unicodeslot=0xC2B,
},
- [0x0C2C]={
+ [0xC2C]={
category="lo",
description="TELUGU LETTER BA",
direction="l",
linebreak="al",
- unicodeslot=0x0C2C,
+ unicodeslot=0xC2C,
},
- [0x0C2D]={
+ [0xC2D]={
category="lo",
description="TELUGU LETTER BHA",
direction="l",
linebreak="al",
- unicodeslot=0x0C2D,
+ unicodeslot=0xC2D,
},
- [0x0C2E]={
+ [0xC2E]={
category="lo",
description="TELUGU LETTER MA",
direction="l",
linebreak="al",
- unicodeslot=0x0C2E,
+ unicodeslot=0xC2E,
},
- [0x0C2F]={
+ [0xC2F]={
category="lo",
description="TELUGU LETTER YA",
direction="l",
linebreak="al",
- unicodeslot=0x0C2F,
+ unicodeslot=0xC2F,
},
- [0x0C30]={
+ [0xC30]={
category="lo",
description="TELUGU LETTER RA",
direction="l",
linebreak="al",
- unicodeslot=0x0C30,
+ unicodeslot=0xC30,
},
- [0x0C31]={
+ [0xC31]={
category="lo",
description="TELUGU LETTER RRA",
direction="l",
linebreak="al",
- unicodeslot=0x0C31,
+ unicodeslot=0xC31,
},
- [0x0C32]={
+ [0xC32]={
category="lo",
description="TELUGU LETTER LA",
direction="l",
linebreak="al",
- unicodeslot=0x0C32,
+ unicodeslot=0xC32,
},
- [0x0C33]={
+ [0xC33]={
category="lo",
description="TELUGU LETTER LLA",
direction="l",
linebreak="al",
- unicodeslot=0x0C33,
+ unicodeslot=0xC33,
},
- [0x0C35]={
+ [0xC34]={
+ category="lo",
+ description="TELUGU LETTER LLLA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xC34,
+ },
+ [0xC35]={
category="lo",
description="TELUGU LETTER VA",
direction="l",
linebreak="al",
- unicodeslot=0x0C35,
+ unicodeslot=0xC35,
},
- [0x0C36]={
+ [0xC36]={
category="lo",
description="TELUGU LETTER SHA",
direction="l",
linebreak="al",
- unicodeslot=0x0C36,
+ unicodeslot=0xC36,
},
- [0x0C37]={
+ [0xC37]={
category="lo",
description="TELUGU LETTER SSA",
direction="l",
linebreak="al",
- unicodeslot=0x0C37,
+ unicodeslot=0xC37,
},
- [0x0C38]={
+ [0xC38]={
category="lo",
description="TELUGU LETTER SA",
direction="l",
linebreak="al",
- unicodeslot=0x0C38,
+ unicodeslot=0xC38,
},
- [0x0C39]={
+ [0xC39]={
category="lo",
description="TELUGU LETTER HA",
direction="l",
linebreak="al",
- unicodeslot=0x0C39,
+ unicodeslot=0xC39,
},
- [0x0C3D]={
+ [0xC3D]={
category="lo",
description="TELUGU SIGN AVAGRAHA",
direction="l",
linebreak="al",
- unicodeslot=0x0C3D,
+ unicodeslot=0xC3D,
},
- [0x0C3E]={
+ [0xC3E]={
category="mn",
description="TELUGU VOWEL SIGN AA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0C3E,
+ unicodeslot=0xC3E,
},
- [0x0C3F]={
+ [0xC3F]={
category="mn",
description="TELUGU VOWEL SIGN I",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0C3F,
+ unicodeslot=0xC3F,
},
- [0x0C40]={
+ [0xC40]={
category="mn",
description="TELUGU VOWEL SIGN II",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0C40,
+ unicodeslot=0xC40,
},
- [0x0C41]={
+ [0xC41]={
category="mc",
description="TELUGU VOWEL SIGN U",
direction="l",
linebreak="cm",
- unicodeslot=0x0C41,
+ unicodeslot=0xC41,
},
- [0x0C42]={
+ [0xC42]={
category="mc",
description="TELUGU VOWEL SIGN UU",
direction="l",
linebreak="cm",
- unicodeslot=0x0C42,
+ unicodeslot=0xC42,
},
- [0x0C43]={
+ [0xC43]={
category="mc",
description="TELUGU VOWEL SIGN VOCALIC R",
direction="l",
linebreak="cm",
- unicodeslot=0x0C43,
+ unicodeslot=0xC43,
},
- [0x0C44]={
+ [0xC44]={
category="mc",
description="TELUGU VOWEL SIGN VOCALIC RR",
direction="l",
linebreak="cm",
- unicodeslot=0x0C44,
+ unicodeslot=0xC44,
},
- [0x0C46]={
+ [0xC46]={
category="mn",
description="TELUGU VOWEL SIGN E",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0C46,
+ unicodeslot=0xC46,
},
- [0x0C47]={
+ [0xC47]={
category="mn",
description="TELUGU VOWEL SIGN EE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0C47,
+ unicodeslot=0xC47,
},
- [0x0C48]={
+ [0xC48]={
category="mn",
description="TELUGU VOWEL SIGN AI",
direction="nsm",
linebreak="cm",
- specials={ "char", 0x0C46, 0x0C56 },
- unicodeslot=0x0C48,
+ specials={ "char", 0xC46, 0xC56 },
+ unicodeslot=0xC48,
},
- [0x0C4A]={
+ [0xC4A]={
category="mn",
description="TELUGU VOWEL SIGN O",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0C4A,
+ unicodeslot=0xC4A,
},
- [0x0C4B]={
+ [0xC4B]={
category="mn",
description="TELUGU VOWEL SIGN OO",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0C4B,
+ unicodeslot=0xC4B,
},
- [0x0C4C]={
+ [0xC4C]={
category="mn",
description="TELUGU VOWEL SIGN AU",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0C4C,
+ unicodeslot=0xC4C,
},
- [0x0C4D]={
+ [0xC4D]={
category="mn",
+ combining=0x9,
description="TELUGU SIGN VIRAMA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0C4D,
+ unicodeslot=0xC4D,
},
- [0x0C55]={
+ [0xC55]={
category="mn",
+ combining=0x54,
description="TELUGU LENGTH MARK",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0C55,
+ unicodeslot=0xC55,
},
- [0x0C56]={
+ [0xC56]={
category="mn",
+ combining=0x5B,
description="TELUGU AI LENGTH MARK",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0C56,
+ unicodeslot=0xC56,
},
- [0x0C58]={
+ [0xC58]={
category="lo",
description="TELUGU LETTER TSA",
direction="l",
linebreak="al",
- unicodeslot=0x0C58,
+ unicodeslot=0xC58,
},
- [0x0C59]={
+ [0xC59]={
category="lo",
description="TELUGU LETTER DZA",
direction="l",
linebreak="al",
- unicodeslot=0x0C59,
+ unicodeslot=0xC59,
},
- [0x0C60]={
+ [0xC60]={
category="lo",
description="TELUGU LETTER VOCALIC RR",
direction="l",
linebreak="al",
- unicodeslot=0x0C60,
+ unicodeslot=0xC60,
},
- [0x0C61]={
+ [0xC61]={
category="lo",
description="TELUGU LETTER VOCALIC LL",
direction="l",
linebreak="al",
- unicodeslot=0x0C61,
+ unicodeslot=0xC61,
},
- [0x0C62]={
+ [0xC62]={
category="mn",
description="TELUGU VOWEL SIGN VOCALIC L",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0C62,
+ unicodeslot=0xC62,
},
- [0x0C63]={
+ [0xC63]={
category="mn",
description="TELUGU VOWEL SIGN VOCALIC LL",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0C63,
+ unicodeslot=0xC63,
},
- [0x0C66]={
+ [0xC66]={
category="nd",
description="TELUGU DIGIT ZERO",
direction="l",
linebreak="nu",
- unicodeslot=0x0C66,
+ unicodeslot=0xC66,
},
- [0x0C67]={
+ [0xC67]={
category="nd",
description="TELUGU DIGIT ONE",
direction="l",
linebreak="nu",
- unicodeslot=0x0C67,
+ unicodeslot=0xC67,
},
- [0x0C68]={
+ [0xC68]={
category="nd",
description="TELUGU DIGIT TWO",
direction="l",
linebreak="nu",
- unicodeslot=0x0C68,
+ unicodeslot=0xC68,
},
- [0x0C69]={
+ [0xC69]={
category="nd",
description="TELUGU DIGIT THREE",
direction="l",
linebreak="nu",
- unicodeslot=0x0C69,
+ unicodeslot=0xC69,
},
- [0x0C6A]={
+ [0xC6A]={
category="nd",
description="TELUGU DIGIT FOUR",
direction="l",
linebreak="nu",
- unicodeslot=0x0C6A,
+ unicodeslot=0xC6A,
},
- [0x0C6B]={
+ [0xC6B]={
category="nd",
description="TELUGU DIGIT FIVE",
direction="l",
linebreak="nu",
- unicodeslot=0x0C6B,
+ unicodeslot=0xC6B,
},
- [0x0C6C]={
+ [0xC6C]={
category="nd",
description="TELUGU DIGIT SIX",
direction="l",
linebreak="nu",
- unicodeslot=0x0C6C,
+ unicodeslot=0xC6C,
},
- [0x0C6D]={
+ [0xC6D]={
category="nd",
description="TELUGU DIGIT SEVEN",
direction="l",
linebreak="nu",
- unicodeslot=0x0C6D,
+ unicodeslot=0xC6D,
},
- [0x0C6E]={
+ [0xC6E]={
category="nd",
description="TELUGU DIGIT EIGHT",
direction="l",
linebreak="nu",
- unicodeslot=0x0C6E,
+ unicodeslot=0xC6E,
},
- [0x0C6F]={
+ [0xC6F]={
category="nd",
description="TELUGU DIGIT NINE",
direction="l",
linebreak="nu",
- unicodeslot=0x0C6F,
+ unicodeslot=0xC6F,
},
- [0x0C78]={
+ [0xC78]={
category="no",
description="TELUGU FRACTION DIGIT ZERO FOR ODD POWERS OF FOUR",
direction="on",
linebreak="al",
- unicodeslot=0x0C78,
+ unicodeslot=0xC78,
},
- [0x0C79]={
+ [0xC79]={
category="no",
description="TELUGU FRACTION DIGIT ONE FOR ODD POWERS OF FOUR",
direction="on",
linebreak="al",
- unicodeslot=0x0C79,
+ unicodeslot=0xC79,
},
- [0x0C7A]={
+ [0xC7A]={
category="no",
description="TELUGU FRACTION DIGIT TWO FOR ODD POWERS OF FOUR",
direction="on",
linebreak="al",
- unicodeslot=0x0C7A,
+ unicodeslot=0xC7A,
},
- [0x0C7B]={
+ [0xC7B]={
category="no",
description="TELUGU FRACTION DIGIT THREE FOR ODD POWERS OF FOUR",
direction="on",
linebreak="al",
- unicodeslot=0x0C7B,
+ unicodeslot=0xC7B,
},
- [0x0C7C]={
+ [0xC7C]={
category="no",
description="TELUGU FRACTION DIGIT ONE FOR EVEN POWERS OF FOUR",
direction="on",
linebreak="al",
- unicodeslot=0x0C7C,
+ unicodeslot=0xC7C,
},
- [0x0C7D]={
+ [0xC7D]={
category="no",
description="TELUGU FRACTION DIGIT TWO FOR EVEN POWERS OF FOUR",
direction="on",
linebreak="al",
- unicodeslot=0x0C7D,
+ unicodeslot=0xC7D,
},
- [0x0C7E]={
+ [0xC7E]={
category="no",
description="TELUGU FRACTION DIGIT THREE FOR EVEN POWERS OF FOUR",
direction="on",
linebreak="al",
- unicodeslot=0x0C7E,
+ unicodeslot=0xC7E,
},
- [0x0C7F]={
+ [0xC7F]={
category="so",
description="TELUGU SIGN TUUMU",
direction="l",
linebreak="al",
- unicodeslot=0x0C7F,
+ unicodeslot=0xC7F,
+ },
+ [0xC81]={
+ category="mn",
+ description="KANNADA SIGN CANDRABINDU",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0xC81,
},
- [0x0C82]={
+ [0xC82]={
category="mc",
description="KANNADA SIGN ANUSVARA",
direction="l",
linebreak="cm",
- unicodeslot=0x0C82,
+ unicodeslot=0xC82,
},
- [0x0C83]={
+ [0xC83]={
category="mc",
description="KANNADA SIGN VISARGA",
direction="l",
linebreak="cm",
- unicodeslot=0x0C83,
+ unicodeslot=0xC83,
},
- [0x0C85]={
+ [0xC85]={
category="lo",
description="KANNADA LETTER A",
direction="l",
linebreak="al",
- unicodeslot=0x0C85,
+ unicodeslot=0xC85,
},
- [0x0C86]={
+ [0xC86]={
category="lo",
description="KANNADA LETTER AA",
direction="l",
linebreak="al",
- unicodeslot=0x0C86,
+ unicodeslot=0xC86,
},
- [0x0C87]={
+ [0xC87]={
category="lo",
description="KANNADA LETTER I",
direction="l",
linebreak="al",
- unicodeslot=0x0C87,
+ unicodeslot=0xC87,
},
- [0x0C88]={
+ [0xC88]={
category="lo",
description="KANNADA LETTER II",
direction="l",
linebreak="al",
- unicodeslot=0x0C88,
+ unicodeslot=0xC88,
},
- [0x0C89]={
+ [0xC89]={
category="lo",
description="KANNADA LETTER U",
direction="l",
linebreak="al",
- unicodeslot=0x0C89,
+ unicodeslot=0xC89,
},
- [0x0C8A]={
+ [0xC8A]={
category="lo",
description="KANNADA LETTER UU",
direction="l",
linebreak="al",
- unicodeslot=0x0C8A,
+ unicodeslot=0xC8A,
},
- [0x0C8B]={
+ [0xC8B]={
category="lo",
description="KANNADA LETTER VOCALIC R",
direction="l",
linebreak="al",
- unicodeslot=0x0C8B,
+ unicodeslot=0xC8B,
},
- [0x0C8C]={
+ [0xC8C]={
category="lo",
description="KANNADA LETTER VOCALIC L",
direction="l",
linebreak="al",
- unicodeslot=0x0C8C,
+ unicodeslot=0xC8C,
},
- [0x0C8E]={
+ [0xC8E]={
category="lo",
description="KANNADA LETTER E",
direction="l",
linebreak="al",
- unicodeslot=0x0C8E,
+ unicodeslot=0xC8E,
},
- [0x0C8F]={
+ [0xC8F]={
category="lo",
description="KANNADA LETTER EE",
direction="l",
linebreak="al",
- unicodeslot=0x0C8F,
+ unicodeslot=0xC8F,
},
- [0x0C90]={
+ [0xC90]={
category="lo",
description="KANNADA LETTER AI",
direction="l",
linebreak="al",
- unicodeslot=0x0C90,
+ unicodeslot=0xC90,
},
- [0x0C92]={
+ [0xC92]={
category="lo",
description="KANNADA LETTER O",
direction="l",
linebreak="al",
- unicodeslot=0x0C92,
+ unicodeslot=0xC92,
},
- [0x0C93]={
+ [0xC93]={
category="lo",
description="KANNADA LETTER OO",
direction="l",
linebreak="al",
- unicodeslot=0x0C93,
+ unicodeslot=0xC93,
},
- [0x0C94]={
+ [0xC94]={
category="lo",
description="KANNADA LETTER AU",
direction="l",
linebreak="al",
- unicodeslot=0x0C94,
+ unicodeslot=0xC94,
},
- [0x0C95]={
+ [0xC95]={
category="lo",
description="KANNADA LETTER KA",
direction="l",
linebreak="al",
- unicodeslot=0x0C95,
+ unicodeslot=0xC95,
},
- [0x0C96]={
+ [0xC96]={
category="lo",
description="KANNADA LETTER KHA",
direction="l",
linebreak="al",
- unicodeslot=0x0C96,
+ unicodeslot=0xC96,
},
- [0x0C97]={
+ [0xC97]={
category="lo",
description="KANNADA LETTER GA",
direction="l",
linebreak="al",
- unicodeslot=0x0C97,
+ unicodeslot=0xC97,
},
- [0x0C98]={
+ [0xC98]={
category="lo",
description="KANNADA LETTER GHA",
direction="l",
linebreak="al",
- unicodeslot=0x0C98,
+ unicodeslot=0xC98,
},
- [0x0C99]={
+ [0xC99]={
category="lo",
description="KANNADA LETTER NGA",
direction="l",
linebreak="al",
- unicodeslot=0x0C99,
+ unicodeslot=0xC99,
},
- [0x0C9A]={
+ [0xC9A]={
category="lo",
description="KANNADA LETTER CA",
direction="l",
linebreak="al",
- unicodeslot=0x0C9A,
+ unicodeslot=0xC9A,
},
- [0x0C9B]={
+ [0xC9B]={
category="lo",
description="KANNADA LETTER CHA",
direction="l",
linebreak="al",
- unicodeslot=0x0C9B,
+ unicodeslot=0xC9B,
},
- [0x0C9C]={
+ [0xC9C]={
category="lo",
description="KANNADA LETTER JA",
direction="l",
linebreak="al",
- unicodeslot=0x0C9C,
+ unicodeslot=0xC9C,
},
- [0x0C9D]={
+ [0xC9D]={
category="lo",
description="KANNADA LETTER JHA",
direction="l",
linebreak="al",
- unicodeslot=0x0C9D,
+ unicodeslot=0xC9D,
},
- [0x0C9E]={
+ [0xC9E]={
category="lo",
description="KANNADA LETTER NYA",
direction="l",
linebreak="al",
- unicodeslot=0x0C9E,
+ unicodeslot=0xC9E,
},
- [0x0C9F]={
+ [0xC9F]={
category="lo",
description="KANNADA LETTER TTA",
direction="l",
linebreak="al",
- unicodeslot=0x0C9F,
+ unicodeslot=0xC9F,
},
- [0x0CA0]={
+ [0xCA0]={
category="lo",
description="KANNADA LETTER TTHA",
direction="l",
linebreak="al",
- unicodeslot=0x0CA0,
+ unicodeslot=0xCA0,
},
- [0x0CA1]={
+ [0xCA1]={
category="lo",
description="KANNADA LETTER DDA",
direction="l",
linebreak="al",
- unicodeslot=0x0CA1,
+ unicodeslot=0xCA1,
},
- [0x0CA2]={
+ [0xCA2]={
category="lo",
description="KANNADA LETTER DDHA",
direction="l",
linebreak="al",
- unicodeslot=0x0CA2,
+ unicodeslot=0xCA2,
},
- [0x0CA3]={
+ [0xCA3]={
category="lo",
description="KANNADA LETTER NNA",
direction="l",
linebreak="al",
- unicodeslot=0x0CA3,
+ unicodeslot=0xCA3,
},
- [0x0CA4]={
+ [0xCA4]={
category="lo",
description="KANNADA LETTER TA",
direction="l",
linebreak="al",
- unicodeslot=0x0CA4,
+ unicodeslot=0xCA4,
},
- [0x0CA5]={
+ [0xCA5]={
category="lo",
description="KANNADA LETTER THA",
direction="l",
linebreak="al",
- unicodeslot=0x0CA5,
+ unicodeslot=0xCA5,
},
- [0x0CA6]={
+ [0xCA6]={
category="lo",
description="KANNADA LETTER DA",
direction="l",
linebreak="al",
- unicodeslot=0x0CA6,
+ unicodeslot=0xCA6,
},
- [0x0CA7]={
+ [0xCA7]={
category="lo",
description="KANNADA LETTER DHA",
direction="l",
linebreak="al",
- unicodeslot=0x0CA7,
+ unicodeslot=0xCA7,
},
- [0x0CA8]={
+ [0xCA8]={
category="lo",
description="KANNADA LETTER NA",
direction="l",
linebreak="al",
- unicodeslot=0x0CA8,
+ unicodeslot=0xCA8,
},
- [0x0CAA]={
+ [0xCAA]={
category="lo",
description="KANNADA LETTER PA",
direction="l",
linebreak="al",
- unicodeslot=0x0CAA,
+ unicodeslot=0xCAA,
},
- [0x0CAB]={
+ [0xCAB]={
category="lo",
description="KANNADA LETTER PHA",
direction="l",
linebreak="al",
- unicodeslot=0x0CAB,
+ unicodeslot=0xCAB,
},
- [0x0CAC]={
+ [0xCAC]={
category="lo",
description="KANNADA LETTER BA",
direction="l",
linebreak="al",
- unicodeslot=0x0CAC,
+ unicodeslot=0xCAC,
},
- [0x0CAD]={
+ [0xCAD]={
category="lo",
description="KANNADA LETTER BHA",
direction="l",
linebreak="al",
- unicodeslot=0x0CAD,
+ unicodeslot=0xCAD,
},
- [0x0CAE]={
+ [0xCAE]={
category="lo",
description="KANNADA LETTER MA",
direction="l",
linebreak="al",
- unicodeslot=0x0CAE,
+ unicodeslot=0xCAE,
},
- [0x0CAF]={
+ [0xCAF]={
category="lo",
description="KANNADA LETTER YA",
direction="l",
linebreak="al",
- unicodeslot=0x0CAF,
+ unicodeslot=0xCAF,
},
- [0x0CB0]={
+ [0xCB0]={
category="lo",
description="KANNADA LETTER RA",
direction="l",
linebreak="al",
- unicodeslot=0x0CB0,
+ unicodeslot=0xCB0,
},
- [0x0CB1]={
+ [0xCB1]={
category="lo",
description="KANNADA LETTER RRA",
direction="l",
linebreak="al",
- unicodeslot=0x0CB1,
+ unicodeslot=0xCB1,
},
- [0x0CB2]={
+ [0xCB2]={
category="lo",
description="KANNADA LETTER LA",
direction="l",
linebreak="al",
- unicodeslot=0x0CB2,
+ unicodeslot=0xCB2,
},
- [0x0CB3]={
+ [0xCB3]={
category="lo",
description="KANNADA LETTER LLA",
direction="l",
linebreak="al",
- unicodeslot=0x0CB3,
+ unicodeslot=0xCB3,
},
- [0x0CB5]={
+ [0xCB5]={
category="lo",
description="KANNADA LETTER VA",
direction="l",
linebreak="al",
- unicodeslot=0x0CB5,
+ unicodeslot=0xCB5,
},
- [0x0CB6]={
+ [0xCB6]={
category="lo",
description="KANNADA LETTER SHA",
direction="l",
linebreak="al",
- unicodeslot=0x0CB6,
+ unicodeslot=0xCB6,
},
- [0x0CB7]={
+ [0xCB7]={
category="lo",
description="KANNADA LETTER SSA",
direction="l",
linebreak="al",
- unicodeslot=0x0CB7,
+ unicodeslot=0xCB7,
},
- [0x0CB8]={
+ [0xCB8]={
category="lo",
description="KANNADA LETTER SA",
direction="l",
linebreak="al",
- unicodeslot=0x0CB8,
+ unicodeslot=0xCB8,
},
- [0x0CB9]={
+ [0xCB9]={
category="lo",
description="KANNADA LETTER HA",
direction="l",
linebreak="al",
- unicodeslot=0x0CB9,
+ unicodeslot=0xCB9,
},
- [0x0CBC]={
+ [0xCBC]={
category="mn",
+ combining=0x7,
description="KANNADA SIGN NUKTA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0CBC,
+ unicodeslot=0xCBC,
},
- [0x0CBD]={
+ [0xCBD]={
category="lo",
description="KANNADA SIGN AVAGRAHA",
direction="l",
linebreak="al",
- unicodeslot=0x0CBD,
+ unicodeslot=0xCBD,
},
- [0x0CBE]={
+ [0xCBE]={
category="mc",
description="KANNADA VOWEL SIGN AA",
direction="l",
linebreak="cm",
- unicodeslot=0x0CBE,
+ unicodeslot=0xCBE,
},
- [0x0CBF]={
+ [0xCBF]={
category="mn",
description="KANNADA VOWEL SIGN I",
direction="l",
linebreak="cm",
- unicodeslot=0x0CBF,
+ unicodeslot=0xCBF,
},
- [0x0CC0]={
+ [0xCC0]={
category="mc",
description="KANNADA VOWEL SIGN II",
direction="l",
linebreak="cm",
- specials={ "char", 0x0CBF, 0x0CD5 },
- unicodeslot=0x0CC0,
+ specials={ "char", 0xCBF, 0xCD5 },
+ unicodeslot=0xCC0,
},
- [0x0CC1]={
+ [0xCC1]={
category="mc",
description="KANNADA VOWEL SIGN U",
direction="l",
linebreak="cm",
- unicodeslot=0x0CC1,
+ unicodeslot=0xCC1,
},
- [0x0CC2]={
+ [0xCC2]={
category="mc",
description="KANNADA VOWEL SIGN UU",
direction="l",
linebreak="cm",
- unicodeslot=0x0CC2,
+ unicodeslot=0xCC2,
},
- [0x0CC3]={
+ [0xCC3]={
category="mc",
description="KANNADA VOWEL SIGN VOCALIC R",
direction="l",
linebreak="cm",
- unicodeslot=0x0CC3,
+ unicodeslot=0xCC3,
},
- [0x0CC4]={
+ [0xCC4]={
category="mc",
description="KANNADA VOWEL SIGN VOCALIC RR",
direction="l",
linebreak="cm",
- unicodeslot=0x0CC4,
+ unicodeslot=0xCC4,
},
- [0x0CC6]={
+ [0xCC6]={
category="mn",
description="KANNADA VOWEL SIGN E",
direction="l",
linebreak="cm",
- unicodeslot=0x0CC6,
+ unicodeslot=0xCC6,
},
- [0x0CC7]={
+ [0xCC7]={
category="mc",
description="KANNADA VOWEL SIGN EE",
direction="l",
linebreak="cm",
- specials={ "char", 0x0CC6, 0x0CD5 },
- unicodeslot=0x0CC7,
+ specials={ "char", 0xCC6, 0xCD5 },
+ unicodeslot=0xCC7,
},
- [0x0CC8]={
+ [0xCC8]={
category="mc",
description="KANNADA VOWEL SIGN AI",
direction="l",
linebreak="cm",
- specials={ "char", 0x0CC6, 0x0CD6 },
- unicodeslot=0x0CC8,
+ specials={ "char", 0xCC6, 0xCD6 },
+ unicodeslot=0xCC8,
},
- [0x0CCA]={
+ [0xCCA]={
category="mc",
description="KANNADA VOWEL SIGN O",
direction="l",
linebreak="cm",
- specials={ "char", 0x0CC6, 0x0CC2 },
- unicodeslot=0x0CCA,
+ specials={ "char", 0xCC6, 0xCC2 },
+ unicodeslot=0xCCA,
},
- [0x0CCB]={
+ [0xCCB]={
category="mc",
description="KANNADA VOWEL SIGN OO",
direction="l",
linebreak="cm",
- specials={ "char", 0x0CCA, 0x0CD5 },
- unicodeslot=0x0CCB,
+ specials={ "char", 0xCCA, 0xCD5 },
+ unicodeslot=0xCCB,
},
- [0x0CCC]={
+ [0xCCC]={
category="mn",
description="KANNADA VOWEL SIGN AU",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0CCC,
+ unicodeslot=0xCCC,
},
- [0x0CCD]={
+ [0xCCD]={
category="mn",
+ combining=0x9,
description="KANNADA SIGN VIRAMA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0CCD,
+ unicodeslot=0xCCD,
},
- [0x0CD5]={
+ [0xCD5]={
category="mc",
description="KANNADA LENGTH MARK",
direction="l",
linebreak="cm",
- unicodeslot=0x0CD5,
+ unicodeslot=0xCD5,
},
- [0x0CD6]={
+ [0xCD6]={
category="mc",
description="KANNADA AI LENGTH MARK",
direction="l",
linebreak="cm",
- unicodeslot=0x0CD6,
+ unicodeslot=0xCD6,
},
- [0x0CDE]={
+ [0xCDE]={
category="lo",
description="KANNADA LETTER FA",
direction="l",
linebreak="al",
- unicodeslot=0x0CDE,
+ unicodeslot=0xCDE,
},
- [0x0CE0]={
+ [0xCE0]={
category="lo",
description="KANNADA LETTER VOCALIC RR",
direction="l",
linebreak="al",
- unicodeslot=0x0CE0,
+ unicodeslot=0xCE0,
},
- [0x0CE1]={
+ [0xCE1]={
category="lo",
description="KANNADA LETTER VOCALIC LL",
direction="l",
linebreak="al",
- unicodeslot=0x0CE1,
+ unicodeslot=0xCE1,
},
- [0x0CE2]={
+ [0xCE2]={
category="mn",
description="KANNADA VOWEL SIGN VOCALIC L",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0CE2,
+ unicodeslot=0xCE2,
},
- [0x0CE3]={
+ [0xCE3]={
category="mn",
description="KANNADA VOWEL SIGN VOCALIC LL",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0CE3,
+ unicodeslot=0xCE3,
},
- [0x0CE6]={
+ [0xCE6]={
category="nd",
description="KANNADA DIGIT ZERO",
direction="l",
linebreak="nu",
- unicodeslot=0x0CE6,
+ unicodeslot=0xCE6,
},
- [0x0CE7]={
+ [0xCE7]={
category="nd",
description="KANNADA DIGIT ONE",
direction="l",
linebreak="nu",
- unicodeslot=0x0CE7,
+ unicodeslot=0xCE7,
},
- [0x0CE8]={
+ [0xCE8]={
category="nd",
description="KANNADA DIGIT TWO",
direction="l",
linebreak="nu",
- unicodeslot=0x0CE8,
+ unicodeslot=0xCE8,
},
- [0x0CE9]={
+ [0xCE9]={
category="nd",
description="KANNADA DIGIT THREE",
direction="l",
linebreak="nu",
- unicodeslot=0x0CE9,
+ unicodeslot=0xCE9,
},
- [0x0CEA]={
+ [0xCEA]={
category="nd",
description="KANNADA DIGIT FOUR",
direction="l",
linebreak="nu",
- unicodeslot=0x0CEA,
+ unicodeslot=0xCEA,
},
- [0x0CEB]={
+ [0xCEB]={
category="nd",
description="KANNADA DIGIT FIVE",
direction="l",
linebreak="nu",
- unicodeslot=0x0CEB,
+ unicodeslot=0xCEB,
},
- [0x0CEC]={
+ [0xCEC]={
category="nd",
description="KANNADA DIGIT SIX",
direction="l",
linebreak="nu",
- unicodeslot=0x0CEC,
+ unicodeslot=0xCEC,
},
- [0x0CED]={
+ [0xCED]={
category="nd",
description="KANNADA DIGIT SEVEN",
direction="l",
linebreak="nu",
- unicodeslot=0x0CED,
+ unicodeslot=0xCED,
},
- [0x0CEE]={
+ [0xCEE]={
category="nd",
description="KANNADA DIGIT EIGHT",
direction="l",
linebreak="nu",
- unicodeslot=0x0CEE,
+ unicodeslot=0xCEE,
},
- [0x0CEF]={
+ [0xCEF]={
category="nd",
description="KANNADA DIGIT NINE",
direction="l",
linebreak="nu",
- unicodeslot=0x0CEF,
+ unicodeslot=0xCEF,
},
- [0x0CF1]={
+ [0xCF1]={
category="so",
description="KANNADA SIGN JIHVAMULIYA",
direction="l",
linebreak="al",
- unicodeslot=0x0CF1,
+ unicodeslot=0xCF1,
},
- [0x0CF2]={
+ [0xCF2]={
category="so",
description="KANNADA SIGN UPADHMANIYA",
direction="l",
linebreak="al",
- unicodeslot=0x0CF2,
+ unicodeslot=0xCF2,
+ },
+ [0xD01]={
+ category="mn",
+ description="MALAYALAM SIGN CANDRABINDU",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0xD01,
},
- [0x0D02]={
+ [0xD02]={
category="mc",
description="MALAYALAM SIGN ANUSVARA",
direction="l",
linebreak="cm",
- unicodeslot=0x0D02,
+ unicodeslot=0xD02,
},
- [0x0D03]={
+ [0xD03]={
category="mc",
description="MALAYALAM SIGN VISARGA",
direction="l",
linebreak="cm",
- unicodeslot=0x0D03,
+ unicodeslot=0xD03,
},
- [0x0D05]={
+ [0xD05]={
category="lo",
description="MALAYALAM LETTER A",
direction="l",
linebreak="al",
- unicodeslot=0x0D05,
+ unicodeslot=0xD05,
},
- [0x0D06]={
+ [0xD06]={
category="lo",
description="MALAYALAM LETTER AA",
direction="l",
linebreak="al",
- unicodeslot=0x0D06,
+ unicodeslot=0xD06,
},
- [0x0D07]={
+ [0xD07]={
category="lo",
description="MALAYALAM LETTER I",
direction="l",
linebreak="al",
- unicodeslot=0x0D07,
+ unicodeslot=0xD07,
},
- [0x0D08]={
+ [0xD08]={
category="lo",
description="MALAYALAM LETTER II",
direction="l",
linebreak="al",
- unicodeslot=0x0D08,
+ unicodeslot=0xD08,
},
- [0x0D09]={
+ [0xD09]={
category="lo",
description="MALAYALAM LETTER U",
direction="l",
linebreak="al",
- unicodeslot=0x0D09,
+ unicodeslot=0xD09,
},
- [0x0D0A]={
+ [0xD0A]={
category="lo",
description="MALAYALAM LETTER UU",
direction="l",
linebreak="al",
- unicodeslot=0x0D0A,
+ unicodeslot=0xD0A,
},
- [0x0D0B]={
+ [0xD0B]={
category="lo",
description="MALAYALAM LETTER VOCALIC R",
direction="l",
linebreak="al",
- unicodeslot=0x0D0B,
+ unicodeslot=0xD0B,
},
- [0x0D0C]={
+ [0xD0C]={
category="lo",
description="MALAYALAM LETTER VOCALIC L",
direction="l",
linebreak="al",
- unicodeslot=0x0D0C,
+ unicodeslot=0xD0C,
},
- [0x0D0E]={
+ [0xD0E]={
category="lo",
description="MALAYALAM LETTER E",
direction="l",
linebreak="al",
- unicodeslot=0x0D0E,
+ unicodeslot=0xD0E,
},
- [0x0D0F]={
+ [0xD0F]={
category="lo",
description="MALAYALAM LETTER EE",
direction="l",
linebreak="al",
- unicodeslot=0x0D0F,
+ unicodeslot=0xD0F,
},
- [0x0D10]={
+ [0xD10]={
category="lo",
description="MALAYALAM LETTER AI",
direction="l",
linebreak="al",
- unicodeslot=0x0D10,
+ unicodeslot=0xD10,
},
- [0x0D12]={
+ [0xD12]={
category="lo",
description="MALAYALAM LETTER O",
direction="l",
linebreak="al",
- unicodeslot=0x0D12,
+ unicodeslot=0xD12,
},
- [0x0D13]={
+ [0xD13]={
category="lo",
description="MALAYALAM LETTER OO",
direction="l",
linebreak="al",
- unicodeslot=0x0D13,
+ unicodeslot=0xD13,
},
- [0x0D14]={
+ [0xD14]={
category="lo",
description="MALAYALAM LETTER AU",
direction="l",
linebreak="al",
- unicodeslot=0x0D14,
+ unicodeslot=0xD14,
},
- [0x0D15]={
+ [0xD15]={
category="lo",
description="MALAYALAM LETTER KA",
direction="l",
linebreak="al",
- unicodeslot=0x0D15,
+ unicodeslot=0xD15,
},
- [0x0D16]={
+ [0xD16]={
category="lo",
description="MALAYALAM LETTER KHA",
direction="l",
linebreak="al",
- unicodeslot=0x0D16,
+ unicodeslot=0xD16,
},
- [0x0D17]={
+ [0xD17]={
category="lo",
description="MALAYALAM LETTER GA",
direction="l",
linebreak="al",
- unicodeslot=0x0D17,
+ unicodeslot=0xD17,
},
- [0x0D18]={
+ [0xD18]={
category="lo",
description="MALAYALAM LETTER GHA",
direction="l",
linebreak="al",
- unicodeslot=0x0D18,
+ unicodeslot=0xD18,
},
- [0x0D19]={
+ [0xD19]={
category="lo",
description="MALAYALAM LETTER NGA",
direction="l",
linebreak="al",
- unicodeslot=0x0D19,
+ unicodeslot=0xD19,
},
- [0x0D1A]={
+ [0xD1A]={
category="lo",
description="MALAYALAM LETTER CA",
direction="l",
linebreak="al",
- unicodeslot=0x0D1A,
+ unicodeslot=0xD1A,
},
- [0x0D1B]={
+ [0xD1B]={
category="lo",
description="MALAYALAM LETTER CHA",
direction="l",
linebreak="al",
- unicodeslot=0x0D1B,
+ unicodeslot=0xD1B,
},
- [0x0D1C]={
+ [0xD1C]={
category="lo",
description="MALAYALAM LETTER JA",
direction="l",
linebreak="al",
- unicodeslot=0x0D1C,
+ unicodeslot=0xD1C,
},
- [0x0D1D]={
+ [0xD1D]={
category="lo",
description="MALAYALAM LETTER JHA",
direction="l",
linebreak="al",
- unicodeslot=0x0D1D,
+ unicodeslot=0xD1D,
},
- [0x0D1E]={
+ [0xD1E]={
category="lo",
description="MALAYALAM LETTER NYA",
direction="l",
linebreak="al",
- unicodeslot=0x0D1E,
+ unicodeslot=0xD1E,
},
- [0x0D1F]={
+ [0xD1F]={
category="lo",
description="MALAYALAM LETTER TTA",
direction="l",
linebreak="al",
- unicodeslot=0x0D1F,
+ unicodeslot=0xD1F,
},
- [0x0D20]={
+ [0xD20]={
category="lo",
description="MALAYALAM LETTER TTHA",
direction="l",
linebreak="al",
- unicodeslot=0x0D20,
+ unicodeslot=0xD20,
},
- [0x0D21]={
+ [0xD21]={
category="lo",
description="MALAYALAM LETTER DDA",
direction="l",
linebreak="al",
- unicodeslot=0x0D21,
+ unicodeslot=0xD21,
},
- [0x0D22]={
+ [0xD22]={
category="lo",
description="MALAYALAM LETTER DDHA",
direction="l",
linebreak="al",
- unicodeslot=0x0D22,
+ unicodeslot=0xD22,
},
- [0x0D23]={
+ [0xD23]={
category="lo",
description="MALAYALAM LETTER NNA",
direction="l",
linebreak="al",
- unicodeslot=0x0D23,
+ unicodeslot=0xD23,
},
- [0x0D24]={
+ [0xD24]={
category="lo",
description="MALAYALAM LETTER TA",
direction="l",
linebreak="al",
- unicodeslot=0x0D24,
+ unicodeslot=0xD24,
},
- [0x0D25]={
+ [0xD25]={
category="lo",
description="MALAYALAM LETTER THA",
direction="l",
linebreak="al",
- unicodeslot=0x0D25,
+ unicodeslot=0xD25,
},
- [0x0D26]={
+ [0xD26]={
category="lo",
description="MALAYALAM LETTER DA",
direction="l",
linebreak="al",
- unicodeslot=0x0D26,
+ unicodeslot=0xD26,
},
- [0x0D27]={
+ [0xD27]={
category="lo",
description="MALAYALAM LETTER DHA",
direction="l",
linebreak="al",
- unicodeslot=0x0D27,
+ unicodeslot=0xD27,
},
- [0x0D28]={
+ [0xD28]={
category="lo",
description="MALAYALAM LETTER NA",
direction="l",
linebreak="al",
- unicodeslot=0x0D28,
+ unicodeslot=0xD28,
},
- [0x0D29]={
+ [0xD29]={
category="lo",
description="MALAYALAM LETTER NNNA",
direction="l",
linebreak="al",
- unicodeslot=0x0D29,
+ unicodeslot=0xD29,
},
- [0x0D2A]={
+ [0xD2A]={
category="lo",
description="MALAYALAM LETTER PA",
direction="l",
linebreak="al",
- unicodeslot=0x0D2A,
+ unicodeslot=0xD2A,
},
- [0x0D2B]={
+ [0xD2B]={
category="lo",
description="MALAYALAM LETTER PHA",
direction="l",
linebreak="al",
- unicodeslot=0x0D2B,
+ unicodeslot=0xD2B,
},
- [0x0D2C]={
+ [0xD2C]={
category="lo",
description="MALAYALAM LETTER BA",
direction="l",
linebreak="al",
- unicodeslot=0x0D2C,
+ unicodeslot=0xD2C,
},
- [0x0D2D]={
+ [0xD2D]={
category="lo",
description="MALAYALAM LETTER BHA",
direction="l",
linebreak="al",
- unicodeslot=0x0D2D,
+ unicodeslot=0xD2D,
},
- [0x0D2E]={
+ [0xD2E]={
category="lo",
description="MALAYALAM LETTER MA",
direction="l",
linebreak="al",
- unicodeslot=0x0D2E,
+ unicodeslot=0xD2E,
},
- [0x0D2F]={
+ [0xD2F]={
category="lo",
description="MALAYALAM LETTER YA",
direction="l",
linebreak="al",
- unicodeslot=0x0D2F,
+ unicodeslot=0xD2F,
},
- [0x0D30]={
+ [0xD30]={
category="lo",
description="MALAYALAM LETTER RA",
direction="l",
linebreak="al",
- unicodeslot=0x0D30,
+ unicodeslot=0xD30,
},
- [0x0D31]={
+ [0xD31]={
category="lo",
description="MALAYALAM LETTER RRA",
direction="l",
linebreak="al",
- unicodeslot=0x0D31,
+ unicodeslot=0xD31,
},
- [0x0D32]={
+ [0xD32]={
category="lo",
description="MALAYALAM LETTER LA",
direction="l",
linebreak="al",
- unicodeslot=0x0D32,
+ unicodeslot=0xD32,
},
- [0x0D33]={
+ [0xD33]={
category="lo",
description="MALAYALAM LETTER LLA",
direction="l",
linebreak="al",
- unicodeslot=0x0D33,
+ unicodeslot=0xD33,
},
- [0x0D34]={
+ [0xD34]={
category="lo",
description="MALAYALAM LETTER LLLA",
direction="l",
linebreak="al",
- unicodeslot=0x0D34,
+ unicodeslot=0xD34,
},
- [0x0D35]={
+ [0xD35]={
category="lo",
description="MALAYALAM LETTER VA",
direction="l",
linebreak="al",
- unicodeslot=0x0D35,
+ unicodeslot=0xD35,
},
- [0x0D36]={
+ [0xD36]={
category="lo",
description="MALAYALAM LETTER SHA",
direction="l",
linebreak="al",
- unicodeslot=0x0D36,
+ unicodeslot=0xD36,
},
- [0x0D37]={
+ [0xD37]={
category="lo",
description="MALAYALAM LETTER SSA",
direction="l",
linebreak="al",
- unicodeslot=0x0D37,
+ unicodeslot=0xD37,
},
- [0x0D38]={
+ [0xD38]={
category="lo",
description="MALAYALAM LETTER SA",
direction="l",
linebreak="al",
- unicodeslot=0x0D38,
+ unicodeslot=0xD38,
},
- [0x0D39]={
+ [0xD39]={
category="lo",
description="MALAYALAM LETTER HA",
direction="l",
linebreak="al",
- unicodeslot=0x0D39,
+ unicodeslot=0xD39,
},
- [0x0D3A]={
+ [0xD3A]={
category="lo",
description="MALAYALAM LETTER TTTA",
direction="l",
linebreak="al",
- unicodeslot=0x0D3A,
+ unicodeslot=0xD3A,
},
- [0x0D3D]={
+ [0xD3D]={
category="lo",
description="MALAYALAM SIGN AVAGRAHA",
direction="l",
linebreak="al",
- unicodeslot=0x0D3D,
+ unicodeslot=0xD3D,
},
- [0x0D3E]={
+ [0xD3E]={
category="mc",
description="MALAYALAM VOWEL SIGN AA",
direction="l",
linebreak="cm",
- unicodeslot=0x0D3E,
+ unicodeslot=0xD3E,
},
- [0x0D3F]={
+ [0xD3F]={
category="mc",
description="MALAYALAM VOWEL SIGN I",
direction="l",
linebreak="cm",
- unicodeslot=0x0D3F,
+ unicodeslot=0xD3F,
},
- [0x0D40]={
+ [0xD40]={
category="mc",
description="MALAYALAM VOWEL SIGN II",
direction="l",
linebreak="cm",
- unicodeslot=0x0D40,
+ unicodeslot=0xD40,
},
- [0x0D41]={
+ [0xD41]={
category="mn",
description="MALAYALAM VOWEL SIGN U",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0D41,
+ unicodeslot=0xD41,
},
- [0x0D42]={
+ [0xD42]={
category="mn",
description="MALAYALAM VOWEL SIGN UU",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0D42,
+ unicodeslot=0xD42,
},
- [0x0D43]={
+ [0xD43]={
category="mn",
description="MALAYALAM VOWEL SIGN VOCALIC R",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0D43,
+ unicodeslot=0xD43,
},
- [0x0D44]={
+ [0xD44]={
category="mn",
description="MALAYALAM VOWEL SIGN VOCALIC RR",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0D44,
+ unicodeslot=0xD44,
},
- [0x0D46]={
+ [0xD46]={
category="mc",
description="MALAYALAM VOWEL SIGN E",
direction="l",
linebreak="cm",
- unicodeslot=0x0D46,
+ unicodeslot=0xD46,
},
- [0x0D47]={
+ [0xD47]={
category="mc",
description="MALAYALAM VOWEL SIGN EE",
direction="l",
linebreak="cm",
- unicodeslot=0x0D47,
+ unicodeslot=0xD47,
},
- [0x0D48]={
+ [0xD48]={
category="mc",
description="MALAYALAM VOWEL SIGN AI",
direction="l",
linebreak="cm",
- unicodeslot=0x0D48,
+ unicodeslot=0xD48,
},
- [0x0D4A]={
+ [0xD4A]={
category="mc",
description="MALAYALAM VOWEL SIGN O",
direction="l",
linebreak="cm",
- specials={ "char", 0x0D46, 0x0D3E },
- unicodeslot=0x0D4A,
+ specials={ "char", 0xD46, 0xD3E },
+ unicodeslot=0xD4A,
},
- [0x0D4B]={
+ [0xD4B]={
category="mc",
description="MALAYALAM VOWEL SIGN OO",
direction="l",
linebreak="cm",
- specials={ "char", 0x0D47, 0x0D3E },
- unicodeslot=0x0D4B,
+ specials={ "char", 0xD47, 0xD3E },
+ unicodeslot=0xD4B,
},
- [0x0D4C]={
+ [0xD4C]={
category="mc",
description="MALAYALAM VOWEL SIGN AU",
direction="l",
linebreak="cm",
- specials={ "char", 0x0D46, 0x0D57 },
- unicodeslot=0x0D4C,
+ specials={ "char", 0xD46, 0xD57 },
+ unicodeslot=0xD4C,
},
- [0x0D4D]={
+ [0xD4D]={
category="mn",
+ combining=0x9,
description="MALAYALAM SIGN VIRAMA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0D4D,
+ unicodeslot=0xD4D,
},
- [0x0D4E]={
+ [0xD4E]={
category="lo",
description="MALAYALAM LETTER DOT REPH",
direction="l",
linebreak="al",
- unicodeslot=0x0D4E,
+ unicodeslot=0xD4E,
},
- [0x0D57]={
+ [0xD57]={
category="mc",
description="MALAYALAM AU LENGTH MARK",
direction="l",
linebreak="cm",
- unicodeslot=0x0D57,
+ unicodeslot=0xD57,
},
- [0x0D60]={
+ [0xD60]={
category="lo",
description="MALAYALAM LETTER VOCALIC RR",
direction="l",
linebreak="al",
- unicodeslot=0x0D60,
+ unicodeslot=0xD60,
},
- [0x0D61]={
+ [0xD61]={
category="lo",
description="MALAYALAM LETTER VOCALIC LL",
direction="l",
linebreak="al",
- unicodeslot=0x0D61,
+ unicodeslot=0xD61,
},
- [0x0D62]={
+ [0xD62]={
category="mn",
description="MALAYALAM VOWEL SIGN VOCALIC L",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0D62,
+ unicodeslot=0xD62,
},
- [0x0D63]={
+ [0xD63]={
category="mn",
description="MALAYALAM VOWEL SIGN VOCALIC LL",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0D63,
+ unicodeslot=0xD63,
},
- [0x0D66]={
+ [0xD66]={
category="nd",
description="MALAYALAM DIGIT ZERO",
direction="l",
linebreak="nu",
- unicodeslot=0x0D66,
+ unicodeslot=0xD66,
},
- [0x0D67]={
+ [0xD67]={
category="nd",
description="MALAYALAM DIGIT ONE",
direction="l",
linebreak="nu",
- unicodeslot=0x0D67,
+ unicodeslot=0xD67,
},
- [0x0D68]={
+ [0xD68]={
category="nd",
description="MALAYALAM DIGIT TWO",
direction="l",
linebreak="nu",
- unicodeslot=0x0D68,
+ unicodeslot=0xD68,
},
- [0x0D69]={
+ [0xD69]={
category="nd",
description="MALAYALAM DIGIT THREE",
direction="l",
linebreak="nu",
- unicodeslot=0x0D69,
+ unicodeslot=0xD69,
},
- [0x0D6A]={
+ [0xD6A]={
category="nd",
description="MALAYALAM DIGIT FOUR",
direction="l",
linebreak="nu",
- unicodeslot=0x0D6A,
+ unicodeslot=0xD6A,
},
- [0x0D6B]={
+ [0xD6B]={
category="nd",
description="MALAYALAM DIGIT FIVE",
direction="l",
linebreak="nu",
- unicodeslot=0x0D6B,
+ unicodeslot=0xD6B,
},
- [0x0D6C]={
+ [0xD6C]={
category="nd",
description="MALAYALAM DIGIT SIX",
direction="l",
linebreak="nu",
- unicodeslot=0x0D6C,
+ unicodeslot=0xD6C,
},
- [0x0D6D]={
+ [0xD6D]={
category="nd",
description="MALAYALAM DIGIT SEVEN",
direction="l",
linebreak="nu",
- unicodeslot=0x0D6D,
+ unicodeslot=0xD6D,
},
- [0x0D6E]={
+ [0xD6E]={
category="nd",
description="MALAYALAM DIGIT EIGHT",
direction="l",
linebreak="nu",
- unicodeslot=0x0D6E,
+ unicodeslot=0xD6E,
},
- [0x0D6F]={
+ [0xD6F]={
category="nd",
description="MALAYALAM DIGIT NINE",
direction="l",
linebreak="nu",
- unicodeslot=0x0D6F,
+ unicodeslot=0xD6F,
},
- [0x0D70]={
+ [0xD70]={
category="no",
description="MALAYALAM NUMBER TEN",
direction="l",
linebreak="al",
- unicodeslot=0x0D70,
+ unicodeslot=0xD70,
},
- [0x0D71]={
+ [0xD71]={
category="no",
description="MALAYALAM NUMBER ONE HUNDRED",
direction="l",
linebreak="al",
- unicodeslot=0x0D71,
+ unicodeslot=0xD71,
},
- [0x0D72]={
+ [0xD72]={
category="no",
description="MALAYALAM NUMBER ONE THOUSAND",
direction="l",
linebreak="al",
- unicodeslot=0x0D72,
+ unicodeslot=0xD72,
},
- [0x0D73]={
+ [0xD73]={
category="no",
description="MALAYALAM FRACTION ONE QUARTER",
direction="l",
linebreak="al",
- unicodeslot=0x0D73,
+ unicodeslot=0xD73,
},
- [0x0D74]={
+ [0xD74]={
category="no",
description="MALAYALAM FRACTION ONE HALF",
direction="l",
linebreak="al",
- unicodeslot=0x0D74,
+ unicodeslot=0xD74,
},
- [0x0D75]={
+ [0xD75]={
category="no",
description="MALAYALAM FRACTION THREE QUARTERS",
direction="l",
linebreak="al",
- unicodeslot=0x0D75,
+ unicodeslot=0xD75,
},
- [0x0D79]={
+ [0xD79]={
category="so",
description="MALAYALAM DATE MARK",
direction="l",
linebreak="po",
- unicodeslot=0x0D79,
+ unicodeslot=0xD79,
},
- [0x0D7A]={
+ [0xD7A]={
category="lo",
description="MALAYALAM LETTER CHILLU NN",
direction="l",
linebreak="al",
- unicodeslot=0x0D7A,
+ unicodeslot=0xD7A,
},
- [0x0D7B]={
+ [0xD7B]={
category="lo",
description="MALAYALAM LETTER CHILLU N",
direction="l",
linebreak="al",
- unicodeslot=0x0D7B,
+ unicodeslot=0xD7B,
},
- [0x0D7C]={
+ [0xD7C]={
category="lo",
description="MALAYALAM LETTER CHILLU RR",
direction="l",
linebreak="al",
- unicodeslot=0x0D7C,
+ unicodeslot=0xD7C,
},
- [0x0D7D]={
+ [0xD7D]={
category="lo",
description="MALAYALAM LETTER CHILLU L",
direction="l",
linebreak="al",
- unicodeslot=0x0D7D,
+ unicodeslot=0xD7D,
},
- [0x0D7E]={
+ [0xD7E]={
category="lo",
description="MALAYALAM LETTER CHILLU LL",
direction="l",
linebreak="al",
- unicodeslot=0x0D7E,
+ unicodeslot=0xD7E,
},
- [0x0D7F]={
+ [0xD7F]={
category="lo",
description="MALAYALAM LETTER CHILLU K",
direction="l",
linebreak="al",
- unicodeslot=0x0D7F,
+ unicodeslot=0xD7F,
},
- [0x0D82]={
+ [0xD82]={
category="mc",
description="SINHALA SIGN ANUSVARAYA",
direction="l",
linebreak="cm",
- unicodeslot=0x0D82,
+ unicodeslot=0xD82,
},
- [0x0D83]={
+ [0xD83]={
category="mc",
description="SINHALA SIGN VISARGAYA",
direction="l",
linebreak="cm",
- unicodeslot=0x0D83,
+ unicodeslot=0xD83,
},
- [0x0D85]={
+ [0xD85]={
category="lo",
description="SINHALA LETTER AYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0D85,
+ unicodeslot=0xD85,
},
- [0x0D86]={
+ [0xD86]={
category="lo",
description="SINHALA LETTER AAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0D86,
+ unicodeslot=0xD86,
},
- [0x0D87]={
+ [0xD87]={
category="lo",
description="SINHALA LETTER AEYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0D87,
+ unicodeslot=0xD87,
},
- [0x0D88]={
+ [0xD88]={
category="lo",
description="SINHALA LETTER AEEYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0D88,
+ unicodeslot=0xD88,
},
- [0x0D89]={
+ [0xD89]={
category="lo",
description="SINHALA LETTER IYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0D89,
+ unicodeslot=0xD89,
},
- [0x0D8A]={
+ [0xD8A]={
category="lo",
description="SINHALA LETTER IIYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0D8A,
+ unicodeslot=0xD8A,
},
- [0x0D8B]={
+ [0xD8B]={
category="lo",
description="SINHALA LETTER UYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0D8B,
+ unicodeslot=0xD8B,
},
- [0x0D8C]={
+ [0xD8C]={
category="lo",
description="SINHALA LETTER UUYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0D8C,
+ unicodeslot=0xD8C,
},
- [0x0D8D]={
+ [0xD8D]={
category="lo",
description="SINHALA LETTER IRUYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0D8D,
+ unicodeslot=0xD8D,
},
- [0x0D8E]={
+ [0xD8E]={
category="lo",
description="SINHALA LETTER IRUUYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0D8E,
+ unicodeslot=0xD8E,
},
- [0x0D8F]={
+ [0xD8F]={
category="lo",
description="SINHALA LETTER ILUYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0D8F,
+ unicodeslot=0xD8F,
},
- [0x0D90]={
+ [0xD90]={
category="lo",
description="SINHALA LETTER ILUUYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0D90,
+ unicodeslot=0xD90,
},
- [0x0D91]={
+ [0xD91]={
category="lo",
description="SINHALA LETTER EYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0D91,
+ unicodeslot=0xD91,
},
- [0x0D92]={
+ [0xD92]={
category="lo",
description="SINHALA LETTER EEYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0D92,
+ unicodeslot=0xD92,
},
- [0x0D93]={
+ [0xD93]={
category="lo",
description="SINHALA LETTER AIYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0D93,
+ unicodeslot=0xD93,
},
- [0x0D94]={
+ [0xD94]={
category="lo",
description="SINHALA LETTER OYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0D94,
+ unicodeslot=0xD94,
},
- [0x0D95]={
+ [0xD95]={
category="lo",
description="SINHALA LETTER OOYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0D95,
+ unicodeslot=0xD95,
},
- [0x0D96]={
+ [0xD96]={
category="lo",
description="SINHALA LETTER AUYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0D96,
+ unicodeslot=0xD96,
},
- [0x0D9A]={
+ [0xD9A]={
category="lo",
description="SINHALA LETTER ALPAPRAANA KAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0D9A,
+ unicodeslot=0xD9A,
},
- [0x0D9B]={
+ [0xD9B]={
category="lo",
description="SINHALA LETTER MAHAAPRAANA KAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0D9B,
+ unicodeslot=0xD9B,
},
- [0x0D9C]={
+ [0xD9C]={
category="lo",
description="SINHALA LETTER ALPAPRAANA GAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0D9C,
+ unicodeslot=0xD9C,
},
- [0x0D9D]={
+ [0xD9D]={
category="lo",
description="SINHALA LETTER MAHAAPRAANA GAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0D9D,
+ unicodeslot=0xD9D,
},
- [0x0D9E]={
+ [0xD9E]={
category="lo",
description="SINHALA LETTER KANTAJA NAASIKYAYA",
direction="l",
linebreak="al",
- unicodeslot=0x0D9E,
+ unicodeslot=0xD9E,
},
- [0x0D9F]={
+ [0xD9F]={
category="lo",
description="SINHALA LETTER SANYAKA GAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0D9F,
+ unicodeslot=0xD9F,
},
- [0x0DA0]={
+ [0xDA0]={
category="lo",
description="SINHALA LETTER ALPAPRAANA CAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DA0,
+ unicodeslot=0xDA0,
},
- [0x0DA1]={
+ [0xDA1]={
category="lo",
description="SINHALA LETTER MAHAAPRAANA CAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DA1,
+ unicodeslot=0xDA1,
},
- [0x0DA2]={
+ [0xDA2]={
category="lo",
description="SINHALA LETTER ALPAPRAANA JAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DA2,
+ unicodeslot=0xDA2,
},
- [0x0DA3]={
+ [0xDA3]={
category="lo",
description="SINHALA LETTER MAHAAPRAANA JAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DA3,
+ unicodeslot=0xDA3,
},
- [0x0DA4]={
+ [0xDA4]={
category="lo",
description="SINHALA LETTER TAALUJA NAASIKYAYA",
direction="l",
linebreak="al",
- unicodeslot=0x0DA4,
+ unicodeslot=0xDA4,
},
- [0x0DA5]={
+ [0xDA5]={
category="lo",
description="SINHALA LETTER TAALUJA SANYOOGA NAAKSIKYAYA",
direction="l",
linebreak="al",
- unicodeslot=0x0DA5,
+ unicodeslot=0xDA5,
},
- [0x0DA6]={
+ [0xDA6]={
category="lo",
description="SINHALA LETTER SANYAKA JAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DA6,
+ unicodeslot=0xDA6,
},
- [0x0DA7]={
+ [0xDA7]={
category="lo",
description="SINHALA LETTER ALPAPRAANA TTAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DA7,
+ unicodeslot=0xDA7,
},
- [0x0DA8]={
+ [0xDA8]={
category="lo",
description="SINHALA LETTER MAHAAPRAANA TTAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DA8,
+ unicodeslot=0xDA8,
},
- [0x0DA9]={
+ [0xDA9]={
category="lo",
description="SINHALA LETTER ALPAPRAANA DDAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DA9,
+ unicodeslot=0xDA9,
},
- [0x0DAA]={
+ [0xDAA]={
category="lo",
description="SINHALA LETTER MAHAAPRAANA DDAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DAA,
+ unicodeslot=0xDAA,
},
- [0x0DAB]={
+ [0xDAB]={
category="lo",
description="SINHALA LETTER MUURDHAJA NAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DAB,
+ unicodeslot=0xDAB,
},
- [0x0DAC]={
+ [0xDAC]={
category="lo",
description="SINHALA LETTER SANYAKA DDAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DAC,
+ unicodeslot=0xDAC,
},
- [0x0DAD]={
+ [0xDAD]={
category="lo",
description="SINHALA LETTER ALPAPRAANA TAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DAD,
+ unicodeslot=0xDAD,
},
- [0x0DAE]={
+ [0xDAE]={
category="lo",
description="SINHALA LETTER MAHAAPRAANA TAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DAE,
+ unicodeslot=0xDAE,
},
- [0x0DAF]={
+ [0xDAF]={
category="lo",
description="SINHALA LETTER ALPAPRAANA DAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DAF,
+ unicodeslot=0xDAF,
},
- [0x0DB0]={
+ [0xDB0]={
category="lo",
description="SINHALA LETTER MAHAAPRAANA DAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DB0,
+ unicodeslot=0xDB0,
},
- [0x0DB1]={
+ [0xDB1]={
category="lo",
description="SINHALA LETTER DANTAJA NAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DB1,
+ unicodeslot=0xDB1,
},
- [0x0DB3]={
+ [0xDB3]={
category="lo",
description="SINHALA LETTER SANYAKA DAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DB3,
+ unicodeslot=0xDB3,
},
- [0x0DB4]={
+ [0xDB4]={
category="lo",
description="SINHALA LETTER ALPAPRAANA PAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DB4,
+ unicodeslot=0xDB4,
},
- [0x0DB5]={
+ [0xDB5]={
category="lo",
description="SINHALA LETTER MAHAAPRAANA PAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DB5,
+ unicodeslot=0xDB5,
},
- [0x0DB6]={
+ [0xDB6]={
category="lo",
description="SINHALA LETTER ALPAPRAANA BAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DB6,
+ unicodeslot=0xDB6,
},
- [0x0DB7]={
+ [0xDB7]={
category="lo",
description="SINHALA LETTER MAHAAPRAANA BAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DB7,
+ unicodeslot=0xDB7,
},
- [0x0DB8]={
+ [0xDB8]={
category="lo",
description="SINHALA LETTER MAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DB8,
+ unicodeslot=0xDB8,
},
- [0x0DB9]={
+ [0xDB9]={
category="lo",
description="SINHALA LETTER AMBA BAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DB9,
+ unicodeslot=0xDB9,
},
- [0x0DBA]={
+ [0xDBA]={
category="lo",
description="SINHALA LETTER YAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DBA,
+ unicodeslot=0xDBA,
},
- [0x0DBB]={
+ [0xDBB]={
category="lo",
description="SINHALA LETTER RAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DBB,
+ unicodeslot=0xDBB,
},
- [0x0DBD]={
+ [0xDBD]={
category="lo",
description="SINHALA LETTER DANTAJA LAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DBD,
+ unicodeslot=0xDBD,
},
- [0x0DC0]={
+ [0xDC0]={
category="lo",
description="SINHALA LETTER VAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DC0,
+ unicodeslot=0xDC0,
},
- [0x0DC1]={
+ [0xDC1]={
category="lo",
description="SINHALA LETTER TAALUJA SAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DC1,
+ unicodeslot=0xDC1,
},
- [0x0DC2]={
+ [0xDC2]={
category="lo",
description="SINHALA LETTER MUURDHAJA SAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DC2,
+ unicodeslot=0xDC2,
},
- [0x0DC3]={
+ [0xDC3]={
category="lo",
description="SINHALA LETTER DANTAJA SAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DC3,
+ unicodeslot=0xDC3,
},
- [0x0DC4]={
+ [0xDC4]={
category="lo",
description="SINHALA LETTER HAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DC4,
+ unicodeslot=0xDC4,
},
- [0x0DC5]={
+ [0xDC5]={
category="lo",
description="SINHALA LETTER MUURDHAJA LAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DC5,
+ unicodeslot=0xDC5,
},
- [0x0DC6]={
+ [0xDC6]={
category="lo",
description="SINHALA LETTER FAYANNA",
direction="l",
linebreak="al",
- unicodeslot=0x0DC6,
+ unicodeslot=0xDC6,
},
- [0x0DCA]={
+ [0xDCA]={
category="mn",
+ combining=0x9,
description="SINHALA SIGN AL-LAKUNA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0DCA,
+ unicodeslot=0xDCA,
},
- [0x0DCF]={
+ [0xDCF]={
category="mc",
description="SINHALA VOWEL SIGN AELA-PILLA",
direction="l",
linebreak="cm",
- unicodeslot=0x0DCF,
+ unicodeslot=0xDCF,
},
- [0x0DD0]={
+ [0xDD0]={
category="mc",
description="SINHALA VOWEL SIGN KETTI AEDA-PILLA",
direction="l",
linebreak="cm",
- unicodeslot=0x0DD0,
+ unicodeslot=0xDD0,
},
- [0x0DD1]={
+ [0xDD1]={
category="mc",
description="SINHALA VOWEL SIGN DIGA AEDA-PILLA",
direction="l",
linebreak="cm",
- unicodeslot=0x0DD1,
+ unicodeslot=0xDD1,
},
- [0x0DD2]={
+ [0xDD2]={
category="mn",
description="SINHALA VOWEL SIGN KETTI IS-PILLA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0DD2,
+ unicodeslot=0xDD2,
},
- [0x0DD3]={
+ [0xDD3]={
category="mn",
description="SINHALA VOWEL SIGN DIGA IS-PILLA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0DD3,
+ unicodeslot=0xDD3,
},
- [0x0DD4]={
+ [0xDD4]={
category="mn",
description="SINHALA VOWEL SIGN KETTI PAA-PILLA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0DD4,
+ unicodeslot=0xDD4,
},
- [0x0DD6]={
+ [0xDD6]={
category="mn",
description="SINHALA VOWEL SIGN DIGA PAA-PILLA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0DD6,
+ unicodeslot=0xDD6,
},
- [0x0DD8]={
+ [0xDD8]={
category="mc",
description="SINHALA VOWEL SIGN GAETTA-PILLA",
direction="l",
linebreak="cm",
- unicodeslot=0x0DD8,
+ unicodeslot=0xDD8,
},
- [0x0DD9]={
+ [0xDD9]={
category="mc",
description="SINHALA VOWEL SIGN KOMBUVA",
direction="l",
linebreak="cm",
- unicodeslot=0x0DD9,
+ unicodeslot=0xDD9,
},
- [0x0DDA]={
+ [0xDDA]={
category="mc",
description="SINHALA VOWEL SIGN DIGA KOMBUVA",
direction="l",
linebreak="cm",
- specials={ "char", 0x0DD9, 0x0DCA },
- unicodeslot=0x0DDA,
+ specials={ "char", 0xDD9, 0xDCA },
+ unicodeslot=0xDDA,
},
- [0x0DDB]={
+ [0xDDB]={
category="mc",
description="SINHALA VOWEL SIGN KOMBU DEKA",
direction="l",
linebreak="cm",
- unicodeslot=0x0DDB,
+ unicodeslot=0xDDB,
},
- [0x0DDC]={
+ [0xDDC]={
category="mc",
description="SINHALA VOWEL SIGN KOMBUVA HAA AELA-PILLA",
direction="l",
linebreak="cm",
- specials={ "char", 0x0DD9, 0x0DCF },
- unicodeslot=0x0DDC,
+ specials={ "char", 0xDD9, 0xDCF },
+ unicodeslot=0xDDC,
},
- [0x0DDD]={
+ [0xDDD]={
category="mc",
description="SINHALA VOWEL SIGN KOMBUVA HAA DIGA AELA-PILLA",
direction="l",
linebreak="cm",
- specials={ "char", 0x0DDC, 0x0DCA },
- unicodeslot=0x0DDD,
+ specials={ "char", 0xDDC, 0xDCA },
+ unicodeslot=0xDDD,
},
- [0x0DDE]={
+ [0xDDE]={
category="mc",
description="SINHALA VOWEL SIGN KOMBUVA HAA GAYANUKITTA",
direction="l",
linebreak="cm",
- specials={ "char", 0x0DD9, 0x0DDF },
- unicodeslot=0x0DDE,
+ specials={ "char", 0xDD9, 0xDDF },
+ unicodeslot=0xDDE,
},
- [0x0DDF]={
+ [0xDDF]={
category="mc",
description="SINHALA VOWEL SIGN GAYANUKITTA",
direction="l",
linebreak="cm",
- unicodeslot=0x0DDF,
+ unicodeslot=0xDDF,
+ },
+ [0xDE6]={
+ category="nd",
+ description="SINHALA LITH DIGIT ZERO",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0xDE6,
+ },
+ [0xDE7]={
+ category="nd",
+ description="SINHALA LITH DIGIT ONE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0xDE7,
+ },
+ [0xDE8]={
+ category="nd",
+ description="SINHALA LITH DIGIT TWO",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0xDE8,
+ },
+ [0xDE9]={
+ category="nd",
+ description="SINHALA LITH DIGIT THREE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0xDE9,
},
- [0x0DF2]={
+ [0xDEA]={
+ category="nd",
+ description="SINHALA LITH DIGIT FOUR",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0xDEA,
+ },
+ [0xDEB]={
+ category="nd",
+ description="SINHALA LITH DIGIT FIVE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0xDEB,
+ },
+ [0xDEC]={
+ category="nd",
+ description="SINHALA LITH DIGIT SIX",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0xDEC,
+ },
+ [0xDED]={
+ category="nd",
+ description="SINHALA LITH DIGIT SEVEN",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0xDED,
+ },
+ [0xDEE]={
+ category="nd",
+ description="SINHALA LITH DIGIT EIGHT",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0xDEE,
+ },
+ [0xDEF]={
+ category="nd",
+ description="SINHALA LITH DIGIT NINE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0xDEF,
+ },
+ [0xDF2]={
category="mc",
description="SINHALA VOWEL SIGN DIGA GAETTA-PILLA",
direction="l",
linebreak="cm",
- unicodeslot=0x0DF2,
+ unicodeslot=0xDF2,
},
- [0x0DF3]={
+ [0xDF3]={
category="mc",
description="SINHALA VOWEL SIGN DIGA GAYANUKITTA",
direction="l",
linebreak="cm",
- unicodeslot=0x0DF3,
+ unicodeslot=0xDF3,
},
- [0x0DF4]={
+ [0xDF4]={
category="po",
description="SINHALA PUNCTUATION KUNDDALIYA",
direction="l",
linebreak="al",
- unicodeslot=0x0DF4,
+ unicodeslot=0xDF4,
},
- [0x0E01]={
+ [0xE01]={
adobename="kokaithai",
category="lo",
description="THAI CHARACTER KO KAI",
direction="l",
linebreak="sa",
- unicodeslot=0x0E01,
+ unicodeslot=0xE01,
},
- [0x0E02]={
+ [0xE02]={
adobename="khokhaithai",
category="lo",
description="THAI CHARACTER KHO KHAI",
direction="l",
linebreak="sa",
- unicodeslot=0x0E02,
+ unicodeslot=0xE02,
},
- [0x0E03]={
+ [0xE03]={
adobename="khokhuatthai",
category="lo",
description="THAI CHARACTER KHO KHUAT",
direction="l",
linebreak="sa",
- unicodeslot=0x0E03,
+ unicodeslot=0xE03,
},
- [0x0E04]={
+ [0xE04]={
adobename="khokhwaithai",
category="lo",
description="THAI CHARACTER KHO KHWAI",
direction="l",
linebreak="sa",
- unicodeslot=0x0E04,
+ unicodeslot=0xE04,
},
- [0x0E05]={
+ [0xE05]={
adobename="khokhonthai",
category="lo",
description="THAI CHARACTER KHO KHON",
direction="l",
linebreak="sa",
- unicodeslot=0x0E05,
+ unicodeslot=0xE05,
},
- [0x0E06]={
+ [0xE06]={
adobename="khorakhangthai",
category="lo",
description="THAI CHARACTER KHO RAKHANG",
direction="l",
linebreak="sa",
- unicodeslot=0x0E06,
+ unicodeslot=0xE06,
},
- [0x0E07]={
+ [0xE07]={
adobename="ngonguthai",
category="lo",
description="THAI CHARACTER NGO NGU",
direction="l",
linebreak="sa",
- unicodeslot=0x0E07,
+ unicodeslot=0xE07,
},
- [0x0E08]={
+ [0xE08]={
adobename="chochanthai",
category="lo",
description="THAI CHARACTER CHO CHAN",
direction="l",
linebreak="sa",
- unicodeslot=0x0E08,
+ unicodeslot=0xE08,
},
- [0x0E09]={
+ [0xE09]={
adobename="chochingthai",
category="lo",
description="THAI CHARACTER CHO CHING",
direction="l",
linebreak="sa",
- unicodeslot=0x0E09,
+ unicodeslot=0xE09,
},
- [0x0E0A]={
+ [0xE0A]={
adobename="chochangthai",
category="lo",
description="THAI CHARACTER CHO CHANG",
direction="l",
linebreak="sa",
- unicodeslot=0x0E0A,
+ unicodeslot=0xE0A,
},
- [0x0E0B]={
+ [0xE0B]={
adobename="sosothai",
category="lo",
description="THAI CHARACTER SO SO",
direction="l",
linebreak="sa",
- unicodeslot=0x0E0B,
+ unicodeslot=0xE0B,
},
- [0x0E0C]={
+ [0xE0C]={
adobename="chochoethai",
category="lo",
description="THAI CHARACTER CHO CHOE",
direction="l",
linebreak="sa",
- unicodeslot=0x0E0C,
+ unicodeslot=0xE0C,
},
- [0x0E0D]={
+ [0xE0D]={
adobename="yoyingthai",
category="lo",
description="THAI CHARACTER YO YING",
direction="l",
linebreak="sa",
- unicodeslot=0x0E0D,
+ unicodeslot=0xE0D,
},
- [0x0E0E]={
+ [0xE0E]={
adobename="dochadathai",
category="lo",
description="THAI CHARACTER DO CHADA",
direction="l",
linebreak="sa",
- unicodeslot=0x0E0E,
+ unicodeslot=0xE0E,
},
- [0x0E0F]={
+ [0xE0F]={
adobename="topatakthai",
category="lo",
description="THAI CHARACTER TO PATAK",
direction="l",
linebreak="sa",
- unicodeslot=0x0E0F,
+ unicodeslot=0xE0F,
},
- [0x0E10]={
+ [0xE10]={
adobename="thothanthai",
category="lo",
description="THAI CHARACTER THO THAN",
direction="l",
linebreak="sa",
- unicodeslot=0x0E10,
+ unicodeslot=0xE10,
},
- [0x0E11]={
+ [0xE11]={
adobename="thonangmonthothai",
category="lo",
description="THAI CHARACTER THO NANGMONTHO",
direction="l",
linebreak="sa",
- unicodeslot=0x0E11,
+ unicodeslot=0xE11,
},
- [0x0E12]={
+ [0xE12]={
adobename="thophuthaothai",
category="lo",
description="THAI CHARACTER THO PHUTHAO",
direction="l",
linebreak="sa",
- unicodeslot=0x0E12,
+ unicodeslot=0xE12,
},
- [0x0E13]={
+ [0xE13]={
adobename="nonenthai",
category="lo",
description="THAI CHARACTER NO NEN",
direction="l",
linebreak="sa",
- unicodeslot=0x0E13,
+ unicodeslot=0xE13,
},
- [0x0E14]={
+ [0xE14]={
adobename="dodekthai",
category="lo",
description="THAI CHARACTER DO DEK",
direction="l",
linebreak="sa",
- unicodeslot=0x0E14,
+ unicodeslot=0xE14,
},
- [0x0E15]={
+ [0xE15]={
adobename="totaothai",
category="lo",
description="THAI CHARACTER TO TAO",
direction="l",
linebreak="sa",
- unicodeslot=0x0E15,
+ unicodeslot=0xE15,
},
- [0x0E16]={
+ [0xE16]={
adobename="thothungthai",
category="lo",
description="THAI CHARACTER THO THUNG",
direction="l",
linebreak="sa",
- unicodeslot=0x0E16,
+ unicodeslot=0xE16,
},
- [0x0E17]={
+ [0xE17]={
adobename="thothahanthai",
category="lo",
description="THAI CHARACTER THO THAHAN",
direction="l",
linebreak="sa",
- unicodeslot=0x0E17,
+ unicodeslot=0xE17,
},
- [0x0E18]={
+ [0xE18]={
adobename="thothongthai",
category="lo",
description="THAI CHARACTER THO THONG",
direction="l",
linebreak="sa",
- unicodeslot=0x0E18,
+ unicodeslot=0xE18,
},
- [0x0E19]={
+ [0xE19]={
adobename="nonuthai",
category="lo",
description="THAI CHARACTER NO NU",
direction="l",
linebreak="sa",
- unicodeslot=0x0E19,
+ unicodeslot=0xE19,
},
- [0x0E1A]={
+ [0xE1A]={
adobename="bobaimaithai",
category="lo",
description="THAI CHARACTER BO BAIMAI",
direction="l",
linebreak="sa",
- unicodeslot=0x0E1A,
+ unicodeslot=0xE1A,
},
- [0x0E1B]={
+ [0xE1B]={
adobename="poplathai",
category="lo",
description="THAI CHARACTER PO PLA",
direction="l",
linebreak="sa",
- unicodeslot=0x0E1B,
+ unicodeslot=0xE1B,
},
- [0x0E1C]={
+ [0xE1C]={
adobename="phophungthai",
category="lo",
description="THAI CHARACTER PHO PHUNG",
direction="l",
linebreak="sa",
- unicodeslot=0x0E1C,
+ unicodeslot=0xE1C,
},
- [0x0E1D]={
+ [0xE1D]={
adobename="fofathai",
category="lo",
description="THAI CHARACTER FO FA",
direction="l",
linebreak="sa",
- unicodeslot=0x0E1D,
+ unicodeslot=0xE1D,
},
- [0x0E1E]={
+ [0xE1E]={
adobename="phophanthai",
category="lo",
description="THAI CHARACTER PHO PHAN",
direction="l",
linebreak="sa",
- unicodeslot=0x0E1E,
+ unicodeslot=0xE1E,
},
- [0x0E1F]={
+ [0xE1F]={
adobename="fofanthai",
category="lo",
description="THAI CHARACTER FO FAN",
direction="l",
linebreak="sa",
- unicodeslot=0x0E1F,
+ unicodeslot=0xE1F,
},
- [0x0E20]={
+ [0xE20]={
adobename="phosamphaothai",
category="lo",
description="THAI CHARACTER PHO SAMPHAO",
direction="l",
linebreak="sa",
- unicodeslot=0x0E20,
+ unicodeslot=0xE20,
},
- [0x0E21]={
+ [0xE21]={
adobename="momathai",
category="lo",
description="THAI CHARACTER MO MA",
direction="l",
linebreak="sa",
- unicodeslot=0x0E21,
+ unicodeslot=0xE21,
},
- [0x0E22]={
+ [0xE22]={
adobename="yoyakthai",
category="lo",
description="THAI CHARACTER YO YAK",
direction="l",
linebreak="sa",
- unicodeslot=0x0E22,
+ unicodeslot=0xE22,
},
- [0x0E23]={
+ [0xE23]={
adobename="roruathai",
category="lo",
description="THAI CHARACTER RO RUA",
direction="l",
linebreak="sa",
- unicodeslot=0x0E23,
+ unicodeslot=0xE23,
},
- [0x0E24]={
+ [0xE24]={
adobename="ruthai",
category="lo",
description="THAI CHARACTER RU",
direction="l",
linebreak="sa",
- unicodeslot=0x0E24,
+ unicodeslot=0xE24,
},
- [0x0E25]={
+ [0xE25]={
adobename="lolingthai",
category="lo",
description="THAI CHARACTER LO LING",
direction="l",
linebreak="sa",
- unicodeslot=0x0E25,
+ unicodeslot=0xE25,
},
- [0x0E26]={
+ [0xE26]={
adobename="luthai",
category="lo",
description="THAI CHARACTER LU",
direction="l",
linebreak="sa",
- unicodeslot=0x0E26,
+ unicodeslot=0xE26,
},
- [0x0E27]={
+ [0xE27]={
adobename="wowaenthai",
category="lo",
description="THAI CHARACTER WO WAEN",
direction="l",
linebreak="sa",
- unicodeslot=0x0E27,
+ unicodeslot=0xE27,
},
- [0x0E28]={
+ [0xE28]={
adobename="sosalathai",
category="lo",
description="THAI CHARACTER SO SALA",
direction="l",
linebreak="sa",
- unicodeslot=0x0E28,
+ unicodeslot=0xE28,
},
- [0x0E29]={
+ [0xE29]={
adobename="sorusithai",
category="lo",
description="THAI CHARACTER SO RUSI",
direction="l",
linebreak="sa",
- unicodeslot=0x0E29,
+ unicodeslot=0xE29,
},
- [0x0E2A]={
+ [0xE2A]={
adobename="sosuathai",
category="lo",
description="THAI CHARACTER SO SUA",
direction="l",
linebreak="sa",
- unicodeslot=0x0E2A,
+ unicodeslot=0xE2A,
},
- [0x0E2B]={
+ [0xE2B]={
adobename="hohipthai",
category="lo",
description="THAI CHARACTER HO HIP",
direction="l",
linebreak="sa",
- unicodeslot=0x0E2B,
+ unicodeslot=0xE2B,
},
- [0x0E2C]={
+ [0xE2C]={
adobename="lochulathai",
category="lo",
description="THAI CHARACTER LO CHULA",
direction="l",
linebreak="sa",
- unicodeslot=0x0E2C,
+ unicodeslot=0xE2C,
},
- [0x0E2D]={
+ [0xE2D]={
adobename="oangthai",
category="lo",
description="THAI CHARACTER O ANG",
direction="l",
linebreak="sa",
- unicodeslot=0x0E2D,
+ unicodeslot=0xE2D,
},
- [0x0E2E]={
+ [0xE2E]={
adobename="honokhukthai",
category="lo",
description="THAI CHARACTER HO NOKHUK",
direction="l",
linebreak="sa",
- unicodeslot=0x0E2E,
+ unicodeslot=0xE2E,
},
- [0x0E2F]={
+ [0xE2F]={
adobename="paiyannoithai",
category="lo",
description="THAI CHARACTER PAIYANNOI",
direction="l",
linebreak="sa",
- unicodeslot=0x0E2F,
+ unicodeslot=0xE2F,
},
- [0x0E30]={
+ [0xE30]={
adobename="saraathai",
category="lo",
description="THAI CHARACTER SARA A",
direction="l",
linebreak="sa",
- unicodeslot=0x0E30,
+ unicodeslot=0xE30,
},
- [0x0E31]={
+ [0xE31]={
adobename="maihanakatthai",
category="mn",
description="THAI CHARACTER MAI HAN-AKAT",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0E31,
+ unicodeslot=0xE31,
},
- [0x0E32]={
+ [0xE32]={
adobename="saraaathai",
category="lo",
description="THAI CHARACTER SARA AA",
direction="l",
linebreak="sa",
- unicodeslot=0x0E32,
+ unicodeslot=0xE32,
},
- [0x0E33]={
+ [0xE33]={
adobename="saraamthai",
category="lo",
description="THAI CHARACTER SARA AM",
direction="l",
linebreak="sa",
- specials={ "compat", 0x0E4D, 0x0E32 },
- unicodeslot=0x0E33,
+ specials={ "compat", 0xE4D, 0xE32 },
+ unicodeslot=0xE33,
},
- [0x0E34]={
+ [0xE34]={
adobename="saraithai",
category="mn",
description="THAI CHARACTER SARA I",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0E34,
+ unicodeslot=0xE34,
},
- [0x0E35]={
+ [0xE35]={
adobename="saraiithai",
category="mn",
description="THAI CHARACTER SARA II",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0E35,
+ unicodeslot=0xE35,
},
- [0x0E36]={
+ [0xE36]={
adobename="sarauethai",
category="mn",
description="THAI CHARACTER SARA UE",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0E36,
+ unicodeslot=0xE36,
},
- [0x0E37]={
+ [0xE37]={
adobename="saraueethai",
category="mn",
description="THAI CHARACTER SARA UEE",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0E37,
+ unicodeslot=0xE37,
},
- [0x0E38]={
+ [0xE38]={
adobename="sarauthai",
category="mn",
+ combining=0x67,
description="THAI CHARACTER SARA U",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0E38,
+ unicodeslot=0xE38,
},
- [0x0E39]={
+ [0xE39]={
adobename="sarauuthai",
category="mn",
+ combining=0x67,
description="THAI CHARACTER SARA UU",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0E39,
+ unicodeslot=0xE39,
},
- [0x0E3A]={
+ [0xE3A]={
adobename="phinthuthai",
category="mn",
+ combining=0x9,
description="THAI CHARACTER PHINTHU",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0E3A,
+ unicodeslot=0xE3A,
},
- [0x0E3F]={
+ [0xE3F]={
adobename="bahtthai",
category="sc",
description="THAI CURRENCY SYMBOL BAHT",
direction="et",
linebreak="pr",
- unicodeslot=0x0E3F,
+ unicodeslot=0xE3F,
},
- [0x0E40]={
+ [0xE40]={
adobename="saraethai",
category="lo",
description="THAI CHARACTER SARA E",
direction="l",
linebreak="sa",
- unicodeslot=0x0E40,
+ unicodeslot=0xE40,
},
- [0x0E41]={
+ [0xE41]={
adobename="saraaethai",
category="lo",
description="THAI CHARACTER SARA AE",
direction="l",
linebreak="sa",
- unicodeslot=0x0E41,
+ unicodeslot=0xE41,
},
- [0x0E42]={
+ [0xE42]={
adobename="saraothai",
category="lo",
description="THAI CHARACTER SARA O",
direction="l",
linebreak="sa",
- unicodeslot=0x0E42,
+ unicodeslot=0xE42,
},
- [0x0E43]={
+ [0xE43]={
adobename="saraaimaimuanthai",
category="lo",
description="THAI CHARACTER SARA AI MAIMUAN",
direction="l",
linebreak="sa",
- unicodeslot=0x0E43,
+ unicodeslot=0xE43,
},
- [0x0E44]={
+ [0xE44]={
adobename="saraaimaimalaithai",
category="lo",
description="THAI CHARACTER SARA AI MAIMALAI",
direction="l",
linebreak="sa",
- unicodeslot=0x0E44,
+ unicodeslot=0xE44,
},
- [0x0E45]={
+ [0xE45]={
adobename="lakkhangyaothai",
category="lo",
description="THAI CHARACTER LAKKHANGYAO",
direction="l",
linebreak="sa",
- unicodeslot=0x0E45,
+ unicodeslot=0xE45,
},
- [0x0E46]={
+ [0xE46]={
adobename="maiyamokthai",
category="lm",
description="THAI CHARACTER MAIYAMOK",
direction="l",
linebreak="sa",
- unicodeslot=0x0E46,
+ unicodeslot=0xE46,
},
- [0x0E47]={
+ [0xE47]={
adobename="maitaikhuthai",
category="mn",
description="THAI CHARACTER MAITAIKHU",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0E47,
+ unicodeslot=0xE47,
},
- [0x0E48]={
+ [0xE48]={
adobename="maiekthai",
category="mn",
+ combining=0x6B,
description="THAI CHARACTER MAI EK",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0E48,
+ unicodeslot=0xE48,
},
- [0x0E49]={
+ [0xE49]={
adobename="maithothai",
category="mn",
+ combining=0x6B,
description="THAI CHARACTER MAI THO",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0E49,
+ unicodeslot=0xE49,
},
- [0x0E4A]={
+ [0xE4A]={
adobename="maitrithai",
category="mn",
+ combining=0x6B,
description="THAI CHARACTER MAI TRI",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0E4A,
+ unicodeslot=0xE4A,
},
- [0x0E4B]={
+ [0xE4B]={
adobename="maichattawathai",
category="mn",
+ combining=0x6B,
description="THAI CHARACTER MAI CHATTAWA",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0E4B,
+ unicodeslot=0xE4B,
},
- [0x0E4C]={
+ [0xE4C]={
adobename="thanthakhatthai",
category="mn",
description="THAI CHARACTER THANTHAKHAT",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0E4C,
+ unicodeslot=0xE4C,
},
- [0x0E4D]={
+ [0xE4D]={
adobename="nikhahitthai",
category="mn",
description="THAI CHARACTER NIKHAHIT",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0E4D,
+ unicodeslot=0xE4D,
},
- [0x0E4E]={
+ [0xE4E]={
adobename="yamakkanthai",
category="mn",
description="THAI CHARACTER YAMAKKAN",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0E4E,
+ unicodeslot=0xE4E,
},
- [0x0E4F]={
+ [0xE4F]={
adobename="fongmanthai",
category="po",
description="THAI CHARACTER FONGMAN",
direction="l",
linebreak="al",
- unicodeslot=0x0E4F,
+ unicodeslot=0xE4F,
},
- [0x0E50]={
+ [0xE50]={
adobename="zerothai",
category="nd",
description="THAI DIGIT ZERO",
direction="l",
linebreak="nu",
- unicodeslot=0x0E50,
+ unicodeslot=0xE50,
},
- [0x0E51]={
+ [0xE51]={
adobename="onethai",
category="nd",
description="THAI DIGIT ONE",
direction="l",
linebreak="nu",
- unicodeslot=0x0E51,
+ unicodeslot=0xE51,
},
- [0x0E52]={
+ [0xE52]={
adobename="twothai",
category="nd",
description="THAI DIGIT TWO",
direction="l",
linebreak="nu",
- unicodeslot=0x0E52,
+ unicodeslot=0xE52,
},
- [0x0E53]={
+ [0xE53]={
adobename="threethai",
category="nd",
description="THAI DIGIT THREE",
direction="l",
linebreak="nu",
- unicodeslot=0x0E53,
+ unicodeslot=0xE53,
},
- [0x0E54]={
+ [0xE54]={
adobename="fourthai",
category="nd",
description="THAI DIGIT FOUR",
direction="l",
linebreak="nu",
- unicodeslot=0x0E54,
+ unicodeslot=0xE54,
},
- [0x0E55]={
+ [0xE55]={
adobename="fivethai",
category="nd",
description="THAI DIGIT FIVE",
direction="l",
linebreak="nu",
- unicodeslot=0x0E55,
+ unicodeslot=0xE55,
},
- [0x0E56]={
+ [0xE56]={
adobename="sixthai",
category="nd",
description="THAI DIGIT SIX",
direction="l",
linebreak="nu",
- unicodeslot=0x0E56,
+ unicodeslot=0xE56,
},
- [0x0E57]={
+ [0xE57]={
adobename="seventhai",
category="nd",
description="THAI DIGIT SEVEN",
direction="l",
linebreak="nu",
- unicodeslot=0x0E57,
+ unicodeslot=0xE57,
},
- [0x0E58]={
+ [0xE58]={
adobename="eightthai",
category="nd",
description="THAI DIGIT EIGHT",
direction="l",
linebreak="nu",
- unicodeslot=0x0E58,
+ unicodeslot=0xE58,
},
- [0x0E59]={
+ [0xE59]={
adobename="ninethai",
category="nd",
description="THAI DIGIT NINE",
direction="l",
linebreak="nu",
- unicodeslot=0x0E59,
+ unicodeslot=0xE59,
},
- [0x0E5A]={
+ [0xE5A]={
adobename="angkhankhuthai",
category="po",
description="THAI CHARACTER ANGKHANKHU",
direction="l",
linebreak="ba",
- unicodeslot=0x0E5A,
+ unicodeslot=0xE5A,
},
- [0x0E5B]={
+ [0xE5B]={
adobename="khomutthai",
category="po",
description="THAI CHARACTER KHOMUT",
direction="l",
linebreak="ba",
- unicodeslot=0x0E5B,
+ unicodeslot=0xE5B,
},
- [0x0E81]={
+ [0xE81]={
category="lo",
description="LAO LETTER KO",
direction="l",
linebreak="sa",
- unicodeslot=0x0E81,
+ unicodeslot=0xE81,
},
- [0x0E82]={
+ [0xE82]={
category="lo",
description="LAO LETTER KHO SUNG",
direction="l",
linebreak="sa",
- unicodeslot=0x0E82,
+ unicodeslot=0xE82,
},
- [0x0E84]={
+ [0xE84]={
category="lo",
description="LAO LETTER KHO TAM",
direction="l",
linebreak="sa",
- unicodeslot=0x0E84,
+ unicodeslot=0xE84,
},
- [0x0E87]={
+ [0xE87]={
category="lo",
description="LAO LETTER NGO",
direction="l",
linebreak="sa",
- unicodeslot=0x0E87,
+ unicodeslot=0xE87,
},
- [0x0E88]={
+ [0xE88]={
category="lo",
description="LAO LETTER CO",
direction="l",
linebreak="sa",
- unicodeslot=0x0E88,
+ unicodeslot=0xE88,
},
- [0x0E8A]={
+ [0xE8A]={
category="lo",
description="LAO LETTER SO TAM",
direction="l",
linebreak="sa",
- unicodeslot=0x0E8A,
+ unicodeslot=0xE8A,
},
- [0x0E8D]={
+ [0xE8D]={
category="lo",
description="LAO LETTER NYO",
direction="l",
linebreak="sa",
- unicodeslot=0x0E8D,
+ unicodeslot=0xE8D,
},
- [0x0E94]={
+ [0xE94]={
category="lo",
description="LAO LETTER DO",
direction="l",
linebreak="sa",
- unicodeslot=0x0E94,
+ unicodeslot=0xE94,
},
- [0x0E95]={
+ [0xE95]={
category="lo",
description="LAO LETTER TO",
direction="l",
linebreak="sa",
- unicodeslot=0x0E95,
+ unicodeslot=0xE95,
},
- [0x0E96]={
+ [0xE96]={
category="lo",
description="LAO LETTER THO SUNG",
direction="l",
linebreak="sa",
- unicodeslot=0x0E96,
+ unicodeslot=0xE96,
},
- [0x0E97]={
+ [0xE97]={
category="lo",
description="LAO LETTER THO TAM",
direction="l",
linebreak="sa",
- unicodeslot=0x0E97,
+ unicodeslot=0xE97,
},
- [0x0E99]={
+ [0xE99]={
category="lo",
description="LAO LETTER NO",
direction="l",
linebreak="sa",
- unicodeslot=0x0E99,
+ unicodeslot=0xE99,
},
- [0x0E9A]={
+ [0xE9A]={
category="lo",
description="LAO LETTER BO",
direction="l",
linebreak="sa",
- unicodeslot=0x0E9A,
+ unicodeslot=0xE9A,
},
- [0x0E9B]={
+ [0xE9B]={
category="lo",
description="LAO LETTER PO",
direction="l",
linebreak="sa",
- unicodeslot=0x0E9B,
+ unicodeslot=0xE9B,
},
- [0x0E9C]={
+ [0xE9C]={
category="lo",
description="LAO LETTER PHO SUNG",
direction="l",
linebreak="sa",
- unicodeslot=0x0E9C,
+ unicodeslot=0xE9C,
},
- [0x0E9D]={
+ [0xE9D]={
category="lo",
description="LAO LETTER FO TAM",
direction="l",
linebreak="sa",
- unicodeslot=0x0E9D,
+ unicodeslot=0xE9D,
},
- [0x0E9E]={
+ [0xE9E]={
category="lo",
description="LAO LETTER PHO TAM",
direction="l",
linebreak="sa",
- unicodeslot=0x0E9E,
+ unicodeslot=0xE9E,
},
- [0x0E9F]={
+ [0xE9F]={
category="lo",
description="LAO LETTER FO SUNG",
direction="l",
linebreak="sa",
- unicodeslot=0x0E9F,
+ unicodeslot=0xE9F,
},
- [0x0EA1]={
+ [0xEA1]={
category="lo",
description="LAO LETTER MO",
direction="l",
linebreak="sa",
- unicodeslot=0x0EA1,
+ unicodeslot=0xEA1,
},
- [0x0EA2]={
+ [0xEA2]={
category="lo",
description="LAO LETTER YO",
direction="l",
linebreak="sa",
- unicodeslot=0x0EA2,
+ unicodeslot=0xEA2,
},
- [0x0EA3]={
+ [0xEA3]={
category="lo",
description="LAO LETTER LO LING",
direction="l",
linebreak="sa",
- unicodeslot=0x0EA3,
+ unicodeslot=0xEA3,
},
- [0x0EA5]={
+ [0xEA5]={
category="lo",
description="LAO LETTER LO LOOT",
direction="l",
linebreak="sa",
- unicodeslot=0x0EA5,
+ unicodeslot=0xEA5,
},
- [0x0EA7]={
+ [0xEA7]={
category="lo",
description="LAO LETTER WO",
direction="l",
linebreak="sa",
- unicodeslot=0x0EA7,
+ unicodeslot=0xEA7,
},
- [0x0EAA]={
+ [0xEAA]={
category="lo",
description="LAO LETTER SO SUNG",
direction="l",
linebreak="sa",
- unicodeslot=0x0EAA,
+ unicodeslot=0xEAA,
},
- [0x0EAB]={
+ [0xEAB]={
category="lo",
description="LAO LETTER HO SUNG",
direction="l",
linebreak="sa",
- unicodeslot=0x0EAB,
+ unicodeslot=0xEAB,
},
- [0x0EAD]={
+ [0xEAD]={
category="lo",
description="LAO LETTER O",
direction="l",
linebreak="sa",
- unicodeslot=0x0EAD,
+ unicodeslot=0xEAD,
},
- [0x0EAE]={
+ [0xEAE]={
category="lo",
description="LAO LETTER HO TAM",
direction="l",
linebreak="sa",
- unicodeslot=0x0EAE,
+ unicodeslot=0xEAE,
},
- [0x0EAF]={
+ [0xEAF]={
category="lo",
description="LAO ELLIPSIS",
direction="l",
linebreak="sa",
- unicodeslot=0x0EAF,
+ unicodeslot=0xEAF,
},
- [0x0EB0]={
+ [0xEB0]={
category="lo",
description="LAO VOWEL SIGN A",
direction="l",
linebreak="sa",
- unicodeslot=0x0EB0,
+ unicodeslot=0xEB0,
},
- [0x0EB1]={
+ [0xEB1]={
category="mn",
description="LAO VOWEL SIGN MAI KAN",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0EB1,
+ unicodeslot=0xEB1,
},
- [0x0EB2]={
+ [0xEB2]={
category="lo",
description="LAO VOWEL SIGN AA",
direction="l",
linebreak="sa",
- unicodeslot=0x0EB2,
+ unicodeslot=0xEB2,
},
- [0x0EB3]={
+ [0xEB3]={
category="lo",
description="LAO VOWEL SIGN AM",
direction="l",
linebreak="sa",
- specials={ "compat", 0x0ECD, 0x0EB2 },
- unicodeslot=0x0EB3,
+ specials={ "compat", 0xECD, 0xEB2 },
+ unicodeslot=0xEB3,
},
- [0x0EB4]={
+ [0xEB4]={
category="mn",
description="LAO VOWEL SIGN I",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0EB4,
+ unicodeslot=0xEB4,
},
- [0x0EB5]={
+ [0xEB5]={
category="mn",
description="LAO VOWEL SIGN II",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0EB5,
+ unicodeslot=0xEB5,
},
- [0x0EB6]={
+ [0xEB6]={
category="mn",
description="LAO VOWEL SIGN Y",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0EB6,
+ unicodeslot=0xEB6,
},
- [0x0EB7]={
+ [0xEB7]={
category="mn",
description="LAO VOWEL SIGN YY",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0EB7,
+ unicodeslot=0xEB7,
},
- [0x0EB8]={
+ [0xEB8]={
category="mn",
+ combining=0x76,
description="LAO VOWEL SIGN U",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0EB8,
+ unicodeslot=0xEB8,
},
- [0x0EB9]={
+ [0xEB9]={
category="mn",
+ combining=0x76,
description="LAO VOWEL SIGN UU",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0EB9,
+ unicodeslot=0xEB9,
},
- [0x0EBB]={
+ [0xEBB]={
category="mn",
description="LAO VOWEL SIGN MAI KON",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0EBB,
+ unicodeslot=0xEBB,
},
- [0x0EBC]={
+ [0xEBC]={
category="mn",
description="LAO SEMIVOWEL SIGN LO",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0EBC,
+ unicodeslot=0xEBC,
},
- [0x0EBD]={
+ [0xEBD]={
category="lo",
description="LAO SEMIVOWEL SIGN NYO",
direction="l",
linebreak="sa",
- unicodeslot=0x0EBD,
+ unicodeslot=0xEBD,
},
- [0x0EC0]={
+ [0xEC0]={
category="lo",
description="LAO VOWEL SIGN E",
direction="l",
linebreak="sa",
- unicodeslot=0x0EC0,
+ unicodeslot=0xEC0,
},
- [0x0EC1]={
+ [0xEC1]={
category="lo",
description="LAO VOWEL SIGN EI",
direction="l",
linebreak="sa",
- unicodeslot=0x0EC1,
+ unicodeslot=0xEC1,
},
- [0x0EC2]={
+ [0xEC2]={
category="lo",
description="LAO VOWEL SIGN O",
direction="l",
linebreak="sa",
- unicodeslot=0x0EC2,
+ unicodeslot=0xEC2,
},
- [0x0EC3]={
+ [0xEC3]={
category="lo",
description="LAO VOWEL SIGN AY",
direction="l",
linebreak="sa",
- unicodeslot=0x0EC3,
+ unicodeslot=0xEC3,
},
- [0x0EC4]={
+ [0xEC4]={
category="lo",
description="LAO VOWEL SIGN AI",
direction="l",
linebreak="sa",
- unicodeslot=0x0EC4,
+ unicodeslot=0xEC4,
},
- [0x0EC6]={
+ [0xEC6]={
category="lm",
description="LAO KO LA",
direction="l",
linebreak="sa",
- unicodeslot=0x0EC6,
+ unicodeslot=0xEC6,
},
- [0x0EC8]={
+ [0xEC8]={
category="mn",
+ combining=0x7A,
description="LAO TONE MAI EK",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0EC8,
+ unicodeslot=0xEC8,
},
- [0x0EC9]={
+ [0xEC9]={
category="mn",
+ combining=0x7A,
description="LAO TONE MAI THO",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0EC9,
+ unicodeslot=0xEC9,
},
- [0x0ECA]={
+ [0xECA]={
category="mn",
+ combining=0x7A,
description="LAO TONE MAI TI",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0ECA,
+ unicodeslot=0xECA,
},
- [0x0ECB]={
+ [0xECB]={
category="mn",
+ combining=0x7A,
description="LAO TONE MAI CATAWA",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0ECB,
+ unicodeslot=0xECB,
},
- [0x0ECC]={
+ [0xECC]={
category="mn",
description="LAO CANCELLATION MARK",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0ECC,
+ unicodeslot=0xECC,
},
- [0x0ECD]={
+ [0xECD]={
category="mn",
description="LAO NIGGAHITA",
direction="nsm",
linebreak="sa",
- unicodeslot=0x0ECD,
+ unicodeslot=0xECD,
},
- [0x0ED0]={
+ [0xED0]={
category="nd",
description="LAO DIGIT ZERO",
direction="l",
linebreak="nu",
- unicodeslot=0x0ED0,
+ unicodeslot=0xED0,
},
- [0x0ED1]={
+ [0xED1]={
category="nd",
description="LAO DIGIT ONE",
direction="l",
linebreak="nu",
- unicodeslot=0x0ED1,
+ unicodeslot=0xED1,
},
- [0x0ED2]={
+ [0xED2]={
category="nd",
description="LAO DIGIT TWO",
direction="l",
linebreak="nu",
- unicodeslot=0x0ED2,
+ unicodeslot=0xED2,
},
- [0x0ED3]={
+ [0xED3]={
category="nd",
description="LAO DIGIT THREE",
direction="l",
linebreak="nu",
- unicodeslot=0x0ED3,
+ unicodeslot=0xED3,
},
- [0x0ED4]={
+ [0xED4]={
category="nd",
description="LAO DIGIT FOUR",
direction="l",
linebreak="nu",
- unicodeslot=0x0ED4,
+ unicodeslot=0xED4,
},
- [0x0ED5]={
+ [0xED5]={
category="nd",
description="LAO DIGIT FIVE",
direction="l",
linebreak="nu",
- unicodeslot=0x0ED5,
+ unicodeslot=0xED5,
},
- [0x0ED6]={
+ [0xED6]={
category="nd",
description="LAO DIGIT SIX",
direction="l",
linebreak="nu",
- unicodeslot=0x0ED6,
+ unicodeslot=0xED6,
},
- [0x0ED7]={
+ [0xED7]={
category="nd",
description="LAO DIGIT SEVEN",
direction="l",
linebreak="nu",
- unicodeslot=0x0ED7,
+ unicodeslot=0xED7,
},
- [0x0ED8]={
+ [0xED8]={
category="nd",
description="LAO DIGIT EIGHT",
direction="l",
linebreak="nu",
- unicodeslot=0x0ED8,
+ unicodeslot=0xED8,
},
- [0x0ED9]={
+ [0xED9]={
category="nd",
description="LAO DIGIT NINE",
direction="l",
linebreak="nu",
- unicodeslot=0x0ED9,
+ unicodeslot=0xED9,
},
- [0x0EDC]={
+ [0xEDC]={
category="lo",
description="LAO HO NO",
direction="l",
linebreak="sa",
- specials={ "compat", 0x0EAB, 0x0E99 },
- unicodeslot=0x0EDC,
+ specials={ "compat", 0xEAB, 0xE99 },
+ unicodeslot=0xEDC,
},
- [0x0EDD]={
+ [0xEDD]={
category="lo",
description="LAO HO MO",
direction="l",
linebreak="sa",
- specials={ "compat", 0x0EAB, 0x0EA1 },
- unicodeslot=0x0EDD,
+ specials={ "compat", 0xEAB, 0xEA1 },
+ unicodeslot=0xEDD,
},
- [0x0EDE]={
+ [0xEDE]={
category="lo",
description="LAO LETTER KHMU GO",
direction="l",
linebreak="sa",
- unicodeslot=0x0EDE,
+ unicodeslot=0xEDE,
},
- [0x0EDF]={
+ [0xEDF]={
category="lo",
description="LAO LETTER KHMU NYO",
direction="l",
linebreak="sa",
- unicodeslot=0x0EDF,
+ unicodeslot=0xEDF,
},
- [0x0F00]={
+ [0xF00]={
category="lo",
description="TIBETAN SYLLABLE OM",
direction="l",
linebreak="al",
- unicodeslot=0x0F00,
+ unicodeslot=0xF00,
},
- [0x0F01]={
+ [0xF01]={
category="so",
description="TIBETAN MARK GTER YIG MGO TRUNCATED A",
direction="l",
linebreak="bb",
- unicodeslot=0x0F01,
+ unicodeslot=0xF01,
},
- [0x0F02]={
+ [0xF02]={
category="so",
description="TIBETAN MARK GTER YIG MGO -UM RNAM BCAD MA",
direction="l",
linebreak="bb",
- unicodeslot=0x0F02,
+ unicodeslot=0xF02,
},
- [0x0F03]={
+ [0xF03]={
category="so",
description="TIBETAN MARK GTER YIG MGO -UM GTER TSHEG MA",
direction="l",
linebreak="bb",
- unicodeslot=0x0F03,
+ unicodeslot=0xF03,
},
- [0x0F04]={
+ [0xF04]={
category="po",
description="TIBETAN MARK INITIAL YIG MGO MDUN MA",
direction="l",
linebreak="bb",
- unicodeslot=0x0F04,
+ unicodeslot=0xF04,
},
- [0x0F05]={
+ [0xF05]={
category="po",
description="TIBETAN MARK CLOSING YIG MGO SGAB MA",
direction="l",
linebreak="al",
- unicodeslot=0x0F05,
+ unicodeslot=0xF05,
},
- [0x0F06]={
+ [0xF06]={
category="po",
description="TIBETAN MARK CARET YIG MGO PHUR SHAD MA",
direction="l",
linebreak="bb",
- unicodeslot=0x0F06,
+ unicodeslot=0xF06,
},
- [0x0F07]={
+ [0xF07]={
category="po",
description="TIBETAN MARK YIG MGO TSHEG SHAD MA",
direction="l",
linebreak="bb",
- unicodeslot=0x0F07,
+ unicodeslot=0xF07,
},
- [0x0F08]={
+ [0xF08]={
category="po",
description="TIBETAN MARK SBRUL SHAD",
direction="l",
linebreak="gl",
- unicodeslot=0x0F08,
+ unicodeslot=0xF08,
},
- [0x0F09]={
+ [0xF09]={
category="po",
description="TIBETAN MARK BSKUR YIG MGO",
direction="l",
linebreak="bb",
- unicodeslot=0x0F09,
+ unicodeslot=0xF09,
},
- [0x0F0A]={
+ [0xF0A]={
category="po",
description="TIBETAN MARK BKA- SHOG YIG MGO",
direction="l",
linebreak="bb",
- unicodeslot=0x0F0A,
+ unicodeslot=0xF0A,
},
- [0x0F0B]={
+ [0xF0B]={
category="po",
description="TIBETAN MARK INTERSYLLABIC TSHEG",
direction="l",
linebreak="ba",
- unicodeslot=0x0F0B,
+ unicodeslot=0xF0B,
},
- [0x0F0C]={
+ [0xF0C]={
category="po",
description="TIBETAN MARK DELIMITER TSHEG BSTAR",
direction="l",
linebreak="gl",
- specials={ "nobreak", 0x0F0B },
- unicodeslot=0x0F0C,
+ specials={ "nobreak", 0xF0B },
+ unicodeslot=0xF0C,
},
- [0x0F0D]={
+ [0xF0D]={
category="po",
description="TIBETAN MARK SHAD",
direction="l",
linebreak="ex",
- unicodeslot=0x0F0D,
+ unicodeslot=0xF0D,
},
- [0x0F0E]={
+ [0xF0E]={
category="po",
description="TIBETAN MARK NYIS SHAD",
direction="l",
linebreak="ex",
- unicodeslot=0x0F0E,
+ unicodeslot=0xF0E,
},
- [0x0F0F]={
+ [0xF0F]={
category="po",
description="TIBETAN MARK TSHEG SHAD",
direction="l",
linebreak="ex",
- unicodeslot=0x0F0F,
+ unicodeslot=0xF0F,
},
- [0x0F10]={
+ [0xF10]={
category="po",
description="TIBETAN MARK NYIS TSHEG SHAD",
direction="l",
linebreak="ex",
- unicodeslot=0x0F10,
+ unicodeslot=0xF10,
},
- [0x0F11]={
+ [0xF11]={
category="po",
description="TIBETAN MARK RIN CHEN SPUNGS SHAD",
direction="l",
linebreak="ex",
- unicodeslot=0x0F11,
+ unicodeslot=0xF11,
},
- [0x0F12]={
+ [0xF12]={
category="po",
description="TIBETAN MARK RGYA GRAM SHAD",
direction="l",
linebreak="gl",
- unicodeslot=0x0F12,
+ unicodeslot=0xF12,
},
- [0x0F13]={
+ [0xF13]={
category="so",
description="TIBETAN MARK CARET -DZUD RTAGS ME LONG CAN",
direction="l",
linebreak="al",
- unicodeslot=0x0F13,
+ unicodeslot=0xF13,
},
- [0x0F14]={
+ [0xF14]={
category="so",
description="TIBETAN MARK GTER TSHEG",
direction="l",
linebreak="ex",
- unicodeslot=0x0F14,
+ unicodeslot=0xF14,
},
- [0x0F15]={
+ [0xF15]={
category="so",
description="TIBETAN LOGOTYPE SIGN CHAD RTAGS",
direction="l",
linebreak="al",
- unicodeslot=0x0F15,
+ unicodeslot=0xF15,
},
- [0x0F16]={
+ [0xF16]={
category="so",
description="TIBETAN LOGOTYPE SIGN LHAG RTAGS",
direction="l",
linebreak="al",
- unicodeslot=0x0F16,
+ unicodeslot=0xF16,
},
- [0x0F17]={
+ [0xF17]={
category="so",
description="TIBETAN ASTROLOGICAL SIGN SGRA GCAN -CHAR RTAGS",
direction="l",
linebreak="al",
- unicodeslot=0x0F17,
+ unicodeslot=0xF17,
},
- [0x0F18]={
+ [0xF18]={
category="mn",
+ combining=0xDC,
description="TIBETAN ASTROLOGICAL SIGN -KHYUD PA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F18,
+ unicodeslot=0xF18,
},
- [0x0F19]={
+ [0xF19]={
category="mn",
+ combining=0xDC,
description="TIBETAN ASTROLOGICAL SIGN SDONG TSHUGS",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F19,
+ unicodeslot=0xF19,
},
- [0x0F1A]={
+ [0xF1A]={
category="so",
description="TIBETAN SIGN RDEL DKAR GCIG",
direction="l",
linebreak="al",
- unicodeslot=0x0F1A,
+ unicodeslot=0xF1A,
},
- [0x0F1B]={
+ [0xF1B]={
category="so",
description="TIBETAN SIGN RDEL DKAR GNYIS",
direction="l",
linebreak="al",
- unicodeslot=0x0F1B,
+ unicodeslot=0xF1B,
},
- [0x0F1C]={
+ [0xF1C]={
category="so",
description="TIBETAN SIGN RDEL DKAR GSUM",
direction="l",
linebreak="al",
- unicodeslot=0x0F1C,
+ unicodeslot=0xF1C,
},
- [0x0F1D]={
+ [0xF1D]={
category="so",
description="TIBETAN SIGN RDEL NAG GCIG",
direction="l",
linebreak="al",
- unicodeslot=0x0F1D,
+ unicodeslot=0xF1D,
},
- [0x0F1E]={
+ [0xF1E]={
category="so",
description="TIBETAN SIGN RDEL NAG GNYIS",
direction="l",
linebreak="al",
- unicodeslot=0x0F1E,
+ unicodeslot=0xF1E,
},
- [0x0F1F]={
+ [0xF1F]={
category="so",
description="TIBETAN SIGN RDEL DKAR RDEL NAG",
direction="l",
linebreak="al",
- unicodeslot=0x0F1F,
+ unicodeslot=0xF1F,
},
- [0x0F20]={
+ [0xF20]={
category="nd",
description="TIBETAN DIGIT ZERO",
direction="l",
linebreak="nu",
- unicodeslot=0x0F20,
+ unicodeslot=0xF20,
},
- [0x0F21]={
+ [0xF21]={
category="nd",
description="TIBETAN DIGIT ONE",
direction="l",
linebreak="nu",
- unicodeslot=0x0F21,
+ unicodeslot=0xF21,
},
- [0x0F22]={
+ [0xF22]={
category="nd",
description="TIBETAN DIGIT TWO",
direction="l",
linebreak="nu",
- unicodeslot=0x0F22,
+ unicodeslot=0xF22,
},
- [0x0F23]={
+ [0xF23]={
category="nd",
description="TIBETAN DIGIT THREE",
direction="l",
linebreak="nu",
- unicodeslot=0x0F23,
+ unicodeslot=0xF23,
},
- [0x0F24]={
+ [0xF24]={
category="nd",
description="TIBETAN DIGIT FOUR",
direction="l",
linebreak="nu",
- unicodeslot=0x0F24,
+ unicodeslot=0xF24,
},
- [0x0F25]={
+ [0xF25]={
category="nd",
description="TIBETAN DIGIT FIVE",
direction="l",
linebreak="nu",
- unicodeslot=0x0F25,
+ unicodeslot=0xF25,
},
- [0x0F26]={
+ [0xF26]={
category="nd",
description="TIBETAN DIGIT SIX",
direction="l",
linebreak="nu",
- unicodeslot=0x0F26,
+ unicodeslot=0xF26,
},
- [0x0F27]={
+ [0xF27]={
category="nd",
description="TIBETAN DIGIT SEVEN",
direction="l",
linebreak="nu",
- unicodeslot=0x0F27,
+ unicodeslot=0xF27,
},
- [0x0F28]={
+ [0xF28]={
category="nd",
description="TIBETAN DIGIT EIGHT",
direction="l",
linebreak="nu",
- unicodeslot=0x0F28,
+ unicodeslot=0xF28,
},
- [0x0F29]={
+ [0xF29]={
category="nd",
description="TIBETAN DIGIT NINE",
direction="l",
linebreak="nu",
- unicodeslot=0x0F29,
+ unicodeslot=0xF29,
},
- [0x0F2A]={
+ [0xF2A]={
category="no",
description="TIBETAN DIGIT HALF ONE",
direction="l",
linebreak="al",
- unicodeslot=0x0F2A,
+ unicodeslot=0xF2A,
},
- [0x0F2B]={
+ [0xF2B]={
category="no",
description="TIBETAN DIGIT HALF TWO",
direction="l",
linebreak="al",
- unicodeslot=0x0F2B,
+ unicodeslot=0xF2B,
},
- [0x0F2C]={
+ [0xF2C]={
category="no",
description="TIBETAN DIGIT HALF THREE",
direction="l",
linebreak="al",
- unicodeslot=0x0F2C,
+ unicodeslot=0xF2C,
},
- [0x0F2D]={
+ [0xF2D]={
category="no",
description="TIBETAN DIGIT HALF FOUR",
direction="l",
linebreak="al",
- unicodeslot=0x0F2D,
+ unicodeslot=0xF2D,
},
- [0x0F2E]={
+ [0xF2E]={
category="no",
description="TIBETAN DIGIT HALF FIVE",
direction="l",
linebreak="al",
- unicodeslot=0x0F2E,
+ unicodeslot=0xF2E,
},
- [0x0F2F]={
+ [0xF2F]={
category="no",
description="TIBETAN DIGIT HALF SIX",
direction="l",
linebreak="al",
- unicodeslot=0x0F2F,
+ unicodeslot=0xF2F,
},
- [0x0F30]={
+ [0xF30]={
category="no",
description="TIBETAN DIGIT HALF SEVEN",
direction="l",
linebreak="al",
- unicodeslot=0x0F30,
+ unicodeslot=0xF30,
},
- [0x0F31]={
+ [0xF31]={
category="no",
description="TIBETAN DIGIT HALF EIGHT",
direction="l",
linebreak="al",
- unicodeslot=0x0F31,
+ unicodeslot=0xF31,
},
- [0x0F32]={
+ [0xF32]={
category="no",
description="TIBETAN DIGIT HALF NINE",
direction="l",
linebreak="al",
- unicodeslot=0x0F32,
+ unicodeslot=0xF32,
},
- [0x0F33]={
+ [0xF33]={
category="no",
description="TIBETAN DIGIT HALF ZERO",
direction="l",
linebreak="al",
- unicodeslot=0x0F33,
+ unicodeslot=0xF33,
},
- [0x0F34]={
+ [0xF34]={
category="so",
description="TIBETAN MARK BSDUS RTAGS",
direction="l",
linebreak="ba",
- unicodeslot=0x0F34,
+ unicodeslot=0xF34,
},
- [0x0F35]={
+ [0xF35]={
category="mn",
+ combining=0xDC,
description="TIBETAN MARK NGAS BZUNG NYI ZLA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F35,
+ unicodeslot=0xF35,
},
- [0x0F36]={
+ [0xF36]={
category="so",
description="TIBETAN MARK CARET -DZUD RTAGS BZHI MIG CAN",
direction="l",
linebreak="al",
- unicodeslot=0x0F36,
+ unicodeslot=0xF36,
},
- [0x0F37]={
+ [0xF37]={
category="mn",
+ combining=0xDC,
description="TIBETAN MARK NGAS BZUNG SGOR RTAGS",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F37,
+ unicodeslot=0xF37,
},
- [0x0F38]={
+ [0xF38]={
category="so",
description="TIBETAN MARK CHE MGO",
direction="l",
linebreak="al",
- unicodeslot=0x0F38,
+ unicodeslot=0xF38,
},
- [0x0F39]={
+ [0xF39]={
category="mn",
+ combining=0xD8,
description="TIBETAN MARK TSA -PHRU",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F39,
+ unicodeslot=0xF39,
},
- [0x0F3A]={
+ [0xF3A]={
category="ps",
description="TIBETAN MARK GUG RTAGS GYON",
direction="on",
linebreak="op",
- mirror=0x0F3B,
- unicodeslot=0x0F3A,
+ mirror=0xF3B,
+ unicodeslot=0xF3A,
},
- [0x0F3B]={
+ [0xF3B]={
category="pe",
description="TIBETAN MARK GUG RTAGS GYAS",
direction="on",
linebreak="cl",
- mirror=0x0F3A,
- unicodeslot=0x0F3B,
+ mirror=0xF3A,
+ unicodeslot=0xF3B,
},
- [0x0F3C]={
+ [0xF3C]={
category="ps",
description="TIBETAN MARK ANG KHANG GYON",
direction="on",
linebreak="op",
- mirror=0x0F3D,
- unicodeslot=0x0F3C,
+ mirror=0xF3D,
+ unicodeslot=0xF3C,
},
- [0x0F3D]={
+ [0xF3D]={
category="pe",
description="TIBETAN MARK ANG KHANG GYAS",
direction="on",
linebreak="cl",
- mirror=0x0F3C,
- unicodeslot=0x0F3D,
+ mirror=0xF3C,
+ unicodeslot=0xF3D,
},
- [0x0F3E]={
+ [0xF3E]={
category="mc",
description="TIBETAN SIGN YAR TSHES",
direction="l",
linebreak="cm",
- unicodeslot=0x0F3E,
+ unicodeslot=0xF3E,
},
- [0x0F3F]={
+ [0xF3F]={
category="mc",
description="TIBETAN SIGN MAR TSHES",
direction="l",
linebreak="cm",
- unicodeslot=0x0F3F,
+ unicodeslot=0xF3F,
},
- [0x0F40]={
+ [0xF40]={
category="lo",
description="TIBETAN LETTER KA",
direction="l",
linebreak="al",
- unicodeslot=0x0F40,
+ unicodeslot=0xF40,
},
- [0x0F41]={
+ [0xF41]={
category="lo",
description="TIBETAN LETTER KHA",
direction="l",
linebreak="al",
- unicodeslot=0x0F41,
+ unicodeslot=0xF41,
},
- [0x0F42]={
+ [0xF42]={
category="lo",
description="TIBETAN LETTER GA",
direction="l",
linebreak="al",
- unicodeslot=0x0F42,
+ unicodeslot=0xF42,
},
- [0x0F43]={
+ [0xF43]={
category="lo",
description="TIBETAN LETTER GHA",
direction="l",
linebreak="al",
- specials={ "char", 0x0F42, 0x0FB7 },
- unicodeslot=0x0F43,
+ specials={ "char", 0xF42, 0xFB7 },
+ unicodeslot=0xF43,
},
- [0x0F44]={
+ [0xF44]={
category="lo",
description="TIBETAN LETTER NGA",
direction="l",
linebreak="al",
- unicodeslot=0x0F44,
+ unicodeslot=0xF44,
},
- [0x0F45]={
+ [0xF45]={
category="lo",
description="TIBETAN LETTER CA",
direction="l",
linebreak="al",
- unicodeslot=0x0F45,
+ unicodeslot=0xF45,
},
- [0x0F46]={
+ [0xF46]={
category="lo",
description="TIBETAN LETTER CHA",
direction="l",
linebreak="al",
- unicodeslot=0x0F46,
+ unicodeslot=0xF46,
},
- [0x0F47]={
+ [0xF47]={
category="lo",
description="TIBETAN LETTER JA",
direction="l",
linebreak="al",
- unicodeslot=0x0F47,
+ unicodeslot=0xF47,
},
- [0x0F49]={
+ [0xF49]={
category="lo",
description="TIBETAN LETTER NYA",
direction="l",
linebreak="al",
- unicodeslot=0x0F49,
+ unicodeslot=0xF49,
},
- [0x0F4A]={
+ [0xF4A]={
category="lo",
description="TIBETAN LETTER TTA",
direction="l",
linebreak="al",
- unicodeslot=0x0F4A,
+ unicodeslot=0xF4A,
},
- [0x0F4B]={
+ [0xF4B]={
category="lo",
description="TIBETAN LETTER TTHA",
direction="l",
linebreak="al",
- unicodeslot=0x0F4B,
+ unicodeslot=0xF4B,
},
- [0x0F4C]={
+ [0xF4C]={
category="lo",
description="TIBETAN LETTER DDA",
direction="l",
linebreak="al",
- unicodeslot=0x0F4C,
+ unicodeslot=0xF4C,
},
- [0x0F4D]={
+ [0xF4D]={
category="lo",
description="TIBETAN LETTER DDHA",
direction="l",
linebreak="al",
- specials={ "char", 0x0F4C, 0x0FB7 },
- unicodeslot=0x0F4D,
+ specials={ "char", 0xF4C, 0xFB7 },
+ unicodeslot=0xF4D,
},
- [0x0F4E]={
+ [0xF4E]={
category="lo",
description="TIBETAN LETTER NNA",
direction="l",
linebreak="al",
- unicodeslot=0x0F4E,
+ unicodeslot=0xF4E,
},
- [0x0F4F]={
+ [0xF4F]={
category="lo",
description="TIBETAN LETTER TA",
direction="l",
linebreak="al",
- unicodeslot=0x0F4F,
+ unicodeslot=0xF4F,
},
- [0x0F50]={
+ [0xF50]={
category="lo",
description="TIBETAN LETTER THA",
direction="l",
linebreak="al",
- unicodeslot=0x0F50,
+ unicodeslot=0xF50,
},
- [0x0F51]={
+ [0xF51]={
category="lo",
description="TIBETAN LETTER DA",
direction="l",
linebreak="al",
- unicodeslot=0x0F51,
+ unicodeslot=0xF51,
},
- [0x0F52]={
+ [0xF52]={
category="lo",
description="TIBETAN LETTER DHA",
direction="l",
linebreak="al",
- specials={ "char", 0x0F51, 0x0FB7 },
- unicodeslot=0x0F52,
+ specials={ "char", 0xF51, 0xFB7 },
+ unicodeslot=0xF52,
},
- [0x0F53]={
+ [0xF53]={
category="lo",
description="TIBETAN LETTER NA",
direction="l",
linebreak="al",
- unicodeslot=0x0F53,
+ unicodeslot=0xF53,
},
- [0x0F54]={
+ [0xF54]={
category="lo",
description="TIBETAN LETTER PA",
direction="l",
linebreak="al",
- unicodeslot=0x0F54,
+ unicodeslot=0xF54,
},
- [0x0F55]={
+ [0xF55]={
category="lo",
description="TIBETAN LETTER PHA",
direction="l",
linebreak="al",
- unicodeslot=0x0F55,
+ unicodeslot=0xF55,
},
- [0x0F56]={
+ [0xF56]={
category="lo",
description="TIBETAN LETTER BA",
direction="l",
linebreak="al",
- unicodeslot=0x0F56,
+ unicodeslot=0xF56,
},
- [0x0F57]={
+ [0xF57]={
category="lo",
description="TIBETAN LETTER BHA",
direction="l",
linebreak="al",
- specials={ "char", 0x0F56, 0x0FB7 },
- unicodeslot=0x0F57,
+ specials={ "char", 0xF56, 0xFB7 },
+ unicodeslot=0xF57,
},
- [0x0F58]={
+ [0xF58]={
category="lo",
description="TIBETAN LETTER MA",
direction="l",
linebreak="al",
- unicodeslot=0x0F58,
+ unicodeslot=0xF58,
},
- [0x0F59]={
+ [0xF59]={
category="lo",
description="TIBETAN LETTER TSA",
direction="l",
linebreak="al",
- unicodeslot=0x0F59,
+ unicodeslot=0xF59,
},
- [0x0F5A]={
+ [0xF5A]={
category="lo",
description="TIBETAN LETTER TSHA",
direction="l",
linebreak="al",
- unicodeslot=0x0F5A,
+ unicodeslot=0xF5A,
},
- [0x0F5B]={
+ [0xF5B]={
category="lo",
description="TIBETAN LETTER DZA",
direction="l",
linebreak="al",
- unicodeslot=0x0F5B,
+ unicodeslot=0xF5B,
},
- [0x0F5C]={
+ [0xF5C]={
category="lo",
description="TIBETAN LETTER DZHA",
direction="l",
linebreak="al",
- specials={ "char", 0x0F5B, 0x0FB7 },
- unicodeslot=0x0F5C,
+ specials={ "char", 0xF5B, 0xFB7 },
+ unicodeslot=0xF5C,
},
- [0x0F5D]={
+ [0xF5D]={
category="lo",
description="TIBETAN LETTER WA",
direction="l",
linebreak="al",
- unicodeslot=0x0F5D,
+ unicodeslot=0xF5D,
},
- [0x0F5E]={
+ [0xF5E]={
category="lo",
description="TIBETAN LETTER ZHA",
direction="l",
linebreak="al",
- unicodeslot=0x0F5E,
+ unicodeslot=0xF5E,
},
- [0x0F5F]={
+ [0xF5F]={
category="lo",
description="TIBETAN LETTER ZA",
direction="l",
linebreak="al",
- unicodeslot=0x0F5F,
+ unicodeslot=0xF5F,
},
- [0x0F60]={
+ [0xF60]={
category="lo",
description="TIBETAN LETTER -A",
direction="l",
linebreak="al",
- unicodeslot=0x0F60,
+ unicodeslot=0xF60,
},
- [0x0F61]={
+ [0xF61]={
category="lo",
description="TIBETAN LETTER YA",
direction="l",
linebreak="al",
- unicodeslot=0x0F61,
+ unicodeslot=0xF61,
},
- [0x0F62]={
+ [0xF62]={
category="lo",
description="TIBETAN LETTER RA",
direction="l",
linebreak="al",
- unicodeslot=0x0F62,
+ unicodeslot=0xF62,
},
- [0x0F63]={
+ [0xF63]={
category="lo",
description="TIBETAN LETTER LA",
direction="l",
linebreak="al",
- unicodeslot=0x0F63,
+ unicodeslot=0xF63,
},
- [0x0F64]={
+ [0xF64]={
category="lo",
description="TIBETAN LETTER SHA",
direction="l",
linebreak="al",
- unicodeslot=0x0F64,
+ unicodeslot=0xF64,
},
- [0x0F65]={
+ [0xF65]={
category="lo",
description="TIBETAN LETTER SSA",
direction="l",
linebreak="al",
- unicodeslot=0x0F65,
+ unicodeslot=0xF65,
},
- [0x0F66]={
+ [0xF66]={
category="lo",
description="TIBETAN LETTER SA",
direction="l",
linebreak="al",
- unicodeslot=0x0F66,
+ unicodeslot=0xF66,
},
- [0x0F67]={
+ [0xF67]={
category="lo",
description="TIBETAN LETTER HA",
direction="l",
linebreak="al",
- unicodeslot=0x0F67,
+ unicodeslot=0xF67,
},
- [0x0F68]={
+ [0xF68]={
category="lo",
description="TIBETAN LETTER A",
direction="l",
linebreak="al",
- unicodeslot=0x0F68,
+ unicodeslot=0xF68,
},
- [0x0F69]={
+ [0xF69]={
category="lo",
description="TIBETAN LETTER KSSA",
direction="l",
linebreak="al",
- specials={ "char", 0x0F40, 0x0FB5 },
- unicodeslot=0x0F69,
+ specials={ "char", 0xF40, 0xFB5 },
+ unicodeslot=0xF69,
},
- [0x0F6A]={
+ [0xF6A]={
category="lo",
description="TIBETAN LETTER FIXED-FORM RA",
direction="l",
linebreak="al",
- unicodeslot=0x0F6A,
+ unicodeslot=0xF6A,
},
- [0x0F6B]={
+ [0xF6B]={
category="lo",
description="TIBETAN LETTER KKA",
direction="l",
linebreak="al",
- unicodeslot=0x0F6B,
+ unicodeslot=0xF6B,
},
- [0x0F6C]={
+ [0xF6C]={
category="lo",
description="TIBETAN LETTER RRA",
direction="l",
linebreak="al",
- unicodeslot=0x0F6C,
+ unicodeslot=0xF6C,
},
- [0x0F71]={
+ [0xF71]={
category="mn",
+ combining=0x81,
description="TIBETAN VOWEL SIGN AA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F71,
+ unicodeslot=0xF71,
},
- [0x0F72]={
+ [0xF72]={
category="mn",
+ combining=0x82,
description="TIBETAN VOWEL SIGN I",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F72,
+ unicodeslot=0xF72,
},
- [0x0F73]={
+ [0xF73]={
category="mn",
description="TIBETAN VOWEL SIGN II",
direction="nsm",
linebreak="cm",
- specials={ "char", 0x0F71, 0x0F72 },
- unicodeslot=0x0F73,
+ specials={ "char", 0xF71, 0xF72 },
+ unicodeslot=0xF73,
},
- [0x0F74]={
+ [0xF74]={
category="mn",
+ combining=0x84,
description="TIBETAN VOWEL SIGN U",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F74,
+ unicodeslot=0xF74,
},
- [0x0F75]={
+ [0xF75]={
category="mn",
description="TIBETAN VOWEL SIGN UU",
direction="nsm",
linebreak="cm",
- specials={ "char", 0x0F71, 0x0F74 },
- unicodeslot=0x0F75,
+ specials={ "char", 0xF71, 0xF74 },
+ unicodeslot=0xF75,
},
- [0x0F76]={
+ [0xF76]={
category="mn",
description="TIBETAN VOWEL SIGN VOCALIC R",
direction="nsm",
linebreak="cm",
- specials={ "char", 0x0FB2, 0x0F80 },
- unicodeslot=0x0F76,
+ specials={ "char", 0xFB2, 0xF80 },
+ unicodeslot=0xF76,
},
- [0x0F77]={
+ [0xF77]={
category="mn",
description="TIBETAN VOWEL SIGN VOCALIC RR",
direction="nsm",
linebreak="cm",
- specials={ "compat", 0x0FB2, 0x0F81 },
- unicodeslot=0x0F77,
+ specials={ "compat", 0xFB2, 0xF81 },
+ unicodeslot=0xF77,
},
- [0x0F78]={
+ [0xF78]={
category="mn",
description="TIBETAN VOWEL SIGN VOCALIC L",
direction="nsm",
linebreak="cm",
- specials={ "char", 0x0FB3, 0x0F80 },
- unicodeslot=0x0F78,
+ specials={ "char", 0xFB3, 0xF80 },
+ unicodeslot=0xF78,
},
- [0x0F79]={
+ [0xF79]={
category="mn",
description="TIBETAN VOWEL SIGN VOCALIC LL",
direction="nsm",
linebreak="cm",
- specials={ "compat", 0x0FB3, 0x0F81 },
- unicodeslot=0x0F79,
+ specials={ "compat", 0xFB3, 0xF81 },
+ unicodeslot=0xF79,
},
- [0x0F7A]={
+ [0xF7A]={
category="mn",
+ combining=0x82,
description="TIBETAN VOWEL SIGN E",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F7A,
+ unicodeslot=0xF7A,
},
- [0x0F7B]={
+ [0xF7B]={
category="mn",
+ combining=0x82,
description="TIBETAN VOWEL SIGN EE",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F7B,
+ unicodeslot=0xF7B,
},
- [0x0F7C]={
+ [0xF7C]={
category="mn",
+ combining=0x82,
description="TIBETAN VOWEL SIGN O",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F7C,
+ unicodeslot=0xF7C,
},
- [0x0F7D]={
+ [0xF7D]={
category="mn",
+ combining=0x82,
description="TIBETAN VOWEL SIGN OO",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F7D,
+ unicodeslot=0xF7D,
},
- [0x0F7E]={
+ [0xF7E]={
category="mn",
description="TIBETAN SIGN RJES SU NGA RO",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F7E,
+ unicodeslot=0xF7E,
},
- [0x0F7F]={
+ [0xF7F]={
category="mc",
description="TIBETAN SIGN RNAM BCAD",
direction="l",
linebreak="ba",
- unicodeslot=0x0F7F,
+ unicodeslot=0xF7F,
},
- [0x0F80]={
+ [0xF80]={
category="mn",
+ combining=0x82,
description="TIBETAN VOWEL SIGN REVERSED I",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F80,
+ unicodeslot=0xF80,
},
- [0x0F81]={
+ [0xF81]={
category="mn",
description="TIBETAN VOWEL SIGN REVERSED II",
direction="nsm",
linebreak="cm",
- specials={ "char", 0x0F71, 0x0F80 },
- unicodeslot=0x0F81,
+ specials={ "char", 0xF71, 0xF80 },
+ unicodeslot=0xF81,
},
- [0x0F82]={
+ [0xF82]={
category="mn",
+ combining=0xE6,
description="TIBETAN SIGN NYI ZLA NAA DA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F82,
+ unicodeslot=0xF82,
},
- [0x0F83]={
+ [0xF83]={
category="mn",
+ combining=0xE6,
description="TIBETAN SIGN SNA LDAN",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F83,
+ unicodeslot=0xF83,
},
- [0x0F84]={
+ [0xF84]={
category="mn",
+ combining=0x9,
description="TIBETAN MARK HALANTA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F84,
+ unicodeslot=0xF84,
},
- [0x0F85]={
+ [0xF85]={
category="po",
description="TIBETAN MARK PALUTA",
direction="l",
linebreak="ba",
- unicodeslot=0x0F85,
+ unicodeslot=0xF85,
},
- [0x0F86]={
+ [0xF86]={
category="mn",
+ combining=0xE6,
description="TIBETAN SIGN LCI RTAGS",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F86,
+ unicodeslot=0xF86,
},
- [0x0F87]={
+ [0xF87]={
category="mn",
+ combining=0xE6,
description="TIBETAN SIGN YANG RTAGS",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F87,
+ unicodeslot=0xF87,
},
- [0x0F88]={
+ [0xF88]={
category="lo",
description="TIBETAN SIGN LCE TSA CAN",
direction="l",
linebreak="al",
- unicodeslot=0x0F88,
+ unicodeslot=0xF88,
},
- [0x0F89]={
+ [0xF89]={
category="lo",
description="TIBETAN SIGN MCHU CAN",
direction="l",
linebreak="al",
- unicodeslot=0x0F89,
+ unicodeslot=0xF89,
},
- [0x0F8A]={
+ [0xF8A]={
category="lo",
description="TIBETAN SIGN GRU CAN RGYINGS",
direction="l",
linebreak="al",
- unicodeslot=0x0F8A,
+ unicodeslot=0xF8A,
},
- [0x0F8B]={
+ [0xF8B]={
category="lo",
description="TIBETAN SIGN GRU MED RGYINGS",
direction="l",
linebreak="al",
- unicodeslot=0x0F8B,
+ unicodeslot=0xF8B,
},
- [0x0F8C]={
+ [0xF8C]={
category="lo",
description="TIBETAN SIGN INVERTED MCHU CAN",
direction="l",
linebreak="al",
- unicodeslot=0x0F8C,
+ unicodeslot=0xF8C,
},
- [0x0F8D]={
+ [0xF8D]={
category="mn",
description="TIBETAN SUBJOINED SIGN LCE TSA CAN",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F8D,
+ unicodeslot=0xF8D,
},
- [0x0F8E]={
+ [0xF8E]={
category="mn",
description="TIBETAN SUBJOINED SIGN MCHU CAN",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F8E,
+ unicodeslot=0xF8E,
},
- [0x0F8F]={
+ [0xF8F]={
category="mn",
description="TIBETAN SUBJOINED SIGN INVERTED MCHU CAN",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F8F,
+ unicodeslot=0xF8F,
},
- [0x0F90]={
+ [0xF90]={
category="mn",
description="TIBETAN SUBJOINED LETTER KA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F90,
+ unicodeslot=0xF90,
},
- [0x0F91]={
+ [0xF91]={
category="mn",
description="TIBETAN SUBJOINED LETTER KHA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F91,
+ unicodeslot=0xF91,
},
- [0x0F92]={
+ [0xF92]={
category="mn",
description="TIBETAN SUBJOINED LETTER GA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F92,
+ unicodeslot=0xF92,
},
- [0x0F93]={
+ [0xF93]={
category="mn",
description="TIBETAN SUBJOINED LETTER GHA",
direction="nsm",
linebreak="cm",
- specials={ "char", 0x0F92, 0x0FB7 },
- unicodeslot=0x0F93,
+ specials={ "char", 0xF92, 0xFB7 },
+ unicodeslot=0xF93,
},
- [0x0F94]={
+ [0xF94]={
category="mn",
description="TIBETAN SUBJOINED LETTER NGA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F94,
+ unicodeslot=0xF94,
},
- [0x0F95]={
+ [0xF95]={
category="mn",
description="TIBETAN SUBJOINED LETTER CA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F95,
+ unicodeslot=0xF95,
},
- [0x0F96]={
+ [0xF96]={
category="mn",
description="TIBETAN SUBJOINED LETTER CHA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F96,
+ unicodeslot=0xF96,
},
- [0x0F97]={
+ [0xF97]={
category="mn",
description="TIBETAN SUBJOINED LETTER JA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F97,
+ unicodeslot=0xF97,
},
- [0x0F99]={
+ [0xF99]={
category="mn",
description="TIBETAN SUBJOINED LETTER NYA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F99,
+ unicodeslot=0xF99,
},
- [0x0F9A]={
+ [0xF9A]={
category="mn",
description="TIBETAN SUBJOINED LETTER TTA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F9A,
+ unicodeslot=0xF9A,
},
- [0x0F9B]={
+ [0xF9B]={
category="mn",
description="TIBETAN SUBJOINED LETTER TTHA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F9B,
+ unicodeslot=0xF9B,
},
- [0x0F9C]={
+ [0xF9C]={
category="mn",
description="TIBETAN SUBJOINED LETTER DDA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F9C,
+ unicodeslot=0xF9C,
},
- [0x0F9D]={
+ [0xF9D]={
category="mn",
description="TIBETAN SUBJOINED LETTER DDHA",
direction="nsm",
linebreak="cm",
- specials={ "char", 0x0F9C, 0x0FB7 },
- unicodeslot=0x0F9D,
+ specials={ "char", 0xF9C, 0xFB7 },
+ unicodeslot=0xF9D,
},
- [0x0F9E]={
+ [0xF9E]={
category="mn",
description="TIBETAN SUBJOINED LETTER NNA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F9E,
+ unicodeslot=0xF9E,
},
- [0x0F9F]={
+ [0xF9F]={
category="mn",
description="TIBETAN SUBJOINED LETTER TA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0F9F,
+ unicodeslot=0xF9F,
},
- [0x0FA0]={
+ [0xFA0]={
category="mn",
description="TIBETAN SUBJOINED LETTER THA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0FA0,
+ unicodeslot=0xFA0,
},
- [0x0FA1]={
+ [0xFA1]={
category="mn",
description="TIBETAN SUBJOINED LETTER DA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0FA1,
+ unicodeslot=0xFA1,
},
- [0x0FA2]={
+ [0xFA2]={
category="mn",
description="TIBETAN SUBJOINED LETTER DHA",
direction="nsm",
linebreak="cm",
- specials={ "char", 0x0FA1, 0x0FB7 },
- unicodeslot=0x0FA2,
+ specials={ "char", 0xFA1, 0xFB7 },
+ unicodeslot=0xFA2,
},
- [0x0FA3]={
+ [0xFA3]={
category="mn",
description="TIBETAN SUBJOINED LETTER NA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0FA3,
+ unicodeslot=0xFA3,
},
- [0x0FA4]={
+ [0xFA4]={
category="mn",
description="TIBETAN SUBJOINED LETTER PA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0FA4,
+ unicodeslot=0xFA4,
},
- [0x0FA5]={
+ [0xFA5]={
category="mn",
description="TIBETAN SUBJOINED LETTER PHA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0FA5,
+ unicodeslot=0xFA5,
},
- [0x0FA6]={
+ [0xFA6]={
category="mn",
description="TIBETAN SUBJOINED LETTER BA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0FA6,
+ unicodeslot=0xFA6,
},
- [0x0FA7]={
+ [0xFA7]={
category="mn",
description="TIBETAN SUBJOINED LETTER BHA",
direction="nsm",
linebreak="cm",
- specials={ "char", 0x0FA6, 0x0FB7 },
- unicodeslot=0x0FA7,
+ specials={ "char", 0xFA6, 0xFB7 },
+ unicodeslot=0xFA7,
},
- [0x0FA8]={
+ [0xFA8]={
category="mn",
description="TIBETAN SUBJOINED LETTER MA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0FA8,
+ unicodeslot=0xFA8,
},
- [0x0FA9]={
+ [0xFA9]={
category="mn",
description="TIBETAN SUBJOINED LETTER TSA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0FA9,
+ unicodeslot=0xFA9,
},
- [0x0FAA]={
+ [0xFAA]={
category="mn",
description="TIBETAN SUBJOINED LETTER TSHA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0FAA,
+ unicodeslot=0xFAA,
},
- [0x0FAB]={
+ [0xFAB]={
category="mn",
description="TIBETAN SUBJOINED LETTER DZA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0FAB,
+ unicodeslot=0xFAB,
},
- [0x0FAC]={
+ [0xFAC]={
category="mn",
description="TIBETAN SUBJOINED LETTER DZHA",
direction="nsm",
linebreak="cm",
- specials={ "char", 0x0FAB, 0x0FB7 },
- unicodeslot=0x0FAC,
+ specials={ "char", 0xFAB, 0xFB7 },
+ unicodeslot=0xFAC,
},
- [0x0FAD]={
+ [0xFAD]={
category="mn",
description="TIBETAN SUBJOINED LETTER WA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0FAD,
+ unicodeslot=0xFAD,
},
- [0x0FAE]={
+ [0xFAE]={
category="mn",
description="TIBETAN SUBJOINED LETTER ZHA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0FAE,
+ unicodeslot=0xFAE,
},
- [0x0FAF]={
+ [0xFAF]={
category="mn",
description="TIBETAN SUBJOINED LETTER ZA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0FAF,
+ unicodeslot=0xFAF,
},
- [0x0FB0]={
+ [0xFB0]={
category="mn",
description="TIBETAN SUBJOINED LETTER -A",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0FB0,
+ unicodeslot=0xFB0,
},
- [0x0FB1]={
+ [0xFB1]={
category="mn",
description="TIBETAN SUBJOINED LETTER YA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0FB1,
+ unicodeslot=0xFB1,
},
- [0x0FB2]={
+ [0xFB2]={
category="mn",
description="TIBETAN SUBJOINED LETTER RA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0FB2,
+ unicodeslot=0xFB2,
},
- [0x0FB3]={
+ [0xFB3]={
category="mn",
description="TIBETAN SUBJOINED LETTER LA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0FB3,
+ unicodeslot=0xFB3,
},
- [0x0FB4]={
+ [0xFB4]={
category="mn",
description="TIBETAN SUBJOINED LETTER SHA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0FB4,
+ unicodeslot=0xFB4,
},
- [0x0FB5]={
+ [0xFB5]={
category="mn",
description="TIBETAN SUBJOINED LETTER SSA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0FB5,
+ unicodeslot=0xFB5,
},
- [0x0FB6]={
+ [0xFB6]={
category="mn",
description="TIBETAN SUBJOINED LETTER SA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0FB6,
+ unicodeslot=0xFB6,
},
- [0x0FB7]={
+ [0xFB7]={
category="mn",
description="TIBETAN SUBJOINED LETTER HA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0FB7,
+ unicodeslot=0xFB7,
},
- [0x0FB8]={
+ [0xFB8]={
category="mn",
description="TIBETAN SUBJOINED LETTER A",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0FB8,
+ unicodeslot=0xFB8,
},
- [0x0FB9]={
+ [0xFB9]={
category="mn",
description="TIBETAN SUBJOINED LETTER KSSA",
direction="nsm",
linebreak="cm",
- specials={ "char", 0x0F90, 0x0FB5 },
- unicodeslot=0x0FB9,
+ specials={ "char", 0xF90, 0xFB5 },
+ unicodeslot=0xFB9,
},
- [0x0FBA]={
+ [0xFBA]={
category="mn",
description="TIBETAN SUBJOINED LETTER FIXED-FORM WA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0FBA,
+ unicodeslot=0xFBA,
},
- [0x0FBB]={
+ [0xFBB]={
category="mn",
description="TIBETAN SUBJOINED LETTER FIXED-FORM YA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0FBB,
+ unicodeslot=0xFBB,
},
- [0x0FBC]={
+ [0xFBC]={
category="mn",
description="TIBETAN SUBJOINED LETTER FIXED-FORM RA",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0FBC,
+ unicodeslot=0xFBC,
},
- [0x0FBE]={
+ [0xFBE]={
category="so",
description="TIBETAN KU RU KHA",
direction="l",
linebreak="ba",
- unicodeslot=0x0FBE,
+ unicodeslot=0xFBE,
},
- [0x0FBF]={
+ [0xFBF]={
category="so",
description="TIBETAN KU RU KHA BZHI MIG CAN",
direction="l",
linebreak="ba",
- unicodeslot=0x0FBF,
+ unicodeslot=0xFBF,
},
- [0x0FC0]={
+ [0xFC0]={
category="so",
description="TIBETAN CANTILLATION SIGN HEAVY BEAT",
direction="l",
linebreak="al",
- unicodeslot=0x0FC0,
+ unicodeslot=0xFC0,
},
- [0x0FC1]={
+ [0xFC1]={
category="so",
description="TIBETAN CANTILLATION SIGN LIGHT BEAT",
direction="l",
linebreak="al",
- unicodeslot=0x0FC1,
+ unicodeslot=0xFC1,
},
- [0x0FC2]={
+ [0xFC2]={
category="so",
description="TIBETAN CANTILLATION SIGN CANG TE-U",
direction="l",
linebreak="al",
- unicodeslot=0x0FC2,
+ unicodeslot=0xFC2,
},
- [0x0FC3]={
+ [0xFC3]={
category="so",
description="TIBETAN CANTILLATION SIGN SBUB -CHAL",
direction="l",
linebreak="al",
- unicodeslot=0x0FC3,
+ unicodeslot=0xFC3,
},
- [0x0FC4]={
+ [0xFC4]={
category="so",
description="TIBETAN SYMBOL DRIL BU",
direction="l",
linebreak="al",
- unicodeslot=0x0FC4,
+ unicodeslot=0xFC4,
},
- [0x0FC5]={
+ [0xFC5]={
category="so",
description="TIBETAN SYMBOL RDO RJE",
direction="l",
linebreak="al",
- unicodeslot=0x0FC5,
+ unicodeslot=0xFC5,
},
- [0x0FC6]={
+ [0xFC6]={
category="mn",
+ combining=0xDC,
description="TIBETAN SYMBOL PADMA GDAN",
direction="nsm",
linebreak="cm",
- unicodeslot=0x0FC6,
+ unicodeslot=0xFC6,
},
- [0x0FC7]={
+ [0xFC7]={
category="so",
description="TIBETAN SYMBOL RDO RJE RGYA GRAM",
direction="l",
linebreak="al",
- unicodeslot=0x0FC7,
+ unicodeslot=0xFC7,
},
- [0x0FC8]={
+ [0xFC8]={
category="so",
description="TIBETAN SYMBOL PHUR PA",
direction="l",
linebreak="al",
- unicodeslot=0x0FC8,
+ unicodeslot=0xFC8,
},
- [0x0FC9]={
+ [0xFC9]={
category="so",
description="TIBETAN SYMBOL NOR BU",
direction="l",
linebreak="al",
- unicodeslot=0x0FC9,
+ unicodeslot=0xFC9,
},
- [0x0FCA]={
+ [0xFCA]={
category="so",
description="TIBETAN SYMBOL NOR BU NYIS -KHYIL",
direction="l",
linebreak="al",
- unicodeslot=0x0FCA,
+ unicodeslot=0xFCA,
},
- [0x0FCB]={
+ [0xFCB]={
category="so",
description="TIBETAN SYMBOL NOR BU GSUM -KHYIL",
direction="l",
linebreak="al",
- unicodeslot=0x0FCB,
+ unicodeslot=0xFCB,
},
- [0x0FCC]={
+ [0xFCC]={
category="so",
description="TIBETAN SYMBOL NOR BU BZHI -KHYIL",
direction="l",
linebreak="al",
- unicodeslot=0x0FCC,
+ unicodeslot=0xFCC,
},
- [0x0FCE]={
+ [0xFCE]={
category="so",
description="TIBETAN SIGN RDEL NAG RDEL DKAR",
direction="l",
linebreak="al",
- unicodeslot=0x0FCE,
+ unicodeslot=0xFCE,
},
- [0x0FCF]={
+ [0xFCF]={
category="so",
description="TIBETAN SIGN RDEL NAG GSUM",
direction="l",
linebreak="al",
- unicodeslot=0x0FCF,
+ unicodeslot=0xFCF,
},
- [0x0FD0]={
+ [0xFD0]={
category="po",
description="TIBETAN MARK BSKA- SHOG GI MGO RGYAN",
direction="l",
linebreak="bb",
- unicodeslot=0x0FD0,
+ unicodeslot=0xFD0,
},
- [0x0FD1]={
+ [0xFD1]={
category="po",
description="TIBETAN MARK MNYAM YIG GI MGO RGYAN",
direction="l",
linebreak="bb",
- unicodeslot=0x0FD1,
+ unicodeslot=0xFD1,
},
- [0x0FD2]={
+ [0xFD2]={
category="po",
description="TIBETAN MARK NYIS TSHEG",
direction="l",
linebreak="ba",
- unicodeslot=0x0FD2,
+ unicodeslot=0xFD2,
},
- [0x0FD3]={
+ [0xFD3]={
category="po",
description="TIBETAN MARK INITIAL BRDA RNYING YIG MGO MDUN MA",
direction="l",
linebreak="bb",
- unicodeslot=0x0FD3,
+ unicodeslot=0xFD3,
},
- [0x0FD4]={
+ [0xFD4]={
category="po",
description="TIBETAN MARK CLOSING BRDA RNYING YIG MGO SGAB MA",
direction="l",
linebreak="al",
- unicodeslot=0x0FD4,
+ unicodeslot=0xFD4,
},
- [0x0FD5]={
+ [0xFD5]={
category="so",
description="RIGHT-FACING SVASTI SIGN",
direction="l",
linebreak="al",
- unicodeslot=0x0FD5,
+ unicodeslot=0xFD5,
},
- [0x0FD6]={
+ [0xFD6]={
category="so",
description="LEFT-FACING SVASTI SIGN",
direction="l",
linebreak="al",
- unicodeslot=0x0FD6,
+ unicodeslot=0xFD6,
},
- [0x0FD7]={
+ [0xFD7]={
category="so",
description="RIGHT-FACING SVASTI SIGN WITH DOTS",
direction="l",
linebreak="al",
- unicodeslot=0x0FD7,
+ unicodeslot=0xFD7,
},
- [0x0FD8]={
+ [0xFD8]={
category="so",
description="LEFT-FACING SVASTI SIGN WITH DOTS",
direction="l",
linebreak="al",
- unicodeslot=0x0FD8,
+ unicodeslot=0xFD8,
},
- [0x0FD9]={
+ [0xFD9]={
category="po",
description="TIBETAN MARK LEADING MCHAN RTAGS",
direction="l",
linebreak="gl",
- unicodeslot=0x0FD9,
+ unicodeslot=0xFD9,
},
- [0x0FDA]={
+ [0xFDA]={
category="po",
description="TIBETAN MARK TRAILING MCHAN RTAGS",
direction="l",
linebreak="gl",
- unicodeslot=0x0FDA,
+ unicodeslot=0xFDA,
},
[0x1000]={
category="lo",
@@ -29539,6 +30180,7 @@ characters.data={
},
[0x1037]={
category="mn",
+ combining=0x7,
description="MYANMAR SIGN DOT BELOW",
direction="nsm",
linebreak="sa",
@@ -29553,6 +30195,7 @@ characters.data={
},
[0x1039]={
category="mn",
+ combining=0x9,
description="MYANMAR SIGN VIRAMA",
direction="nsm",
linebreak="sa",
@@ -29560,6 +30203,7 @@ characters.data={
},
[0x103A]={
category="mn",
+ combining=0x9,
description="MYANMAR SIGN ASAT",
direction="nsm",
linebreak="sa",
@@ -30141,6 +30785,7 @@ characters.data={
},
[0x108D]={
category="mn",
+ combining=0xDC,
description="MYANMAR SIGN SHAN COUNCIL EMPHATIC TONE",
direction="nsm",
linebreak="sa",
@@ -35099,6 +35744,7 @@ characters.data={
},
[0x135D]={
category="mn",
+ combining=0xE6,
description="ETHIOPIC COMBINING GEMINATION AND VOWEL LENGTH MARK",
direction="nsm",
linebreak="cm",
@@ -35106,6 +35752,7 @@ characters.data={
},
[0x135E]={
category="mn",
+ combining=0xE6,
description="ETHIOPIC COMBINING VOWEL LENGTH MARK",
direction="nsm",
linebreak="cm",
@@ -35113,6 +35760,7 @@ characters.data={
},
[0x135F]={
category="mn",
+ combining=0xE6,
description="ETHIOPIC COMBINING GEMINATION MARK",
direction="nsm",
linebreak="cm",
@@ -41350,6 +41998,62 @@ characters.data={
linebreak="al",
unicodeslot=0x16F0,
},
+ [0x16F1]={
+ category="lo",
+ description="RUNIC LETTER K",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F1,
+ },
+ [0x16F2]={
+ category="lo",
+ description="RUNIC LETTER SH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F2,
+ },
+ [0x16F3]={
+ category="lo",
+ description="RUNIC LETTER OO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F3,
+ },
+ [0x16F4]={
+ category="lo",
+ description="RUNIC LETTER FRANKS CASKET OS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F4,
+ },
+ [0x16F5]={
+ category="lo",
+ description="RUNIC LETTER FRANKS CASKET IS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F5,
+ },
+ [0x16F6]={
+ category="lo",
+ description="RUNIC LETTER FRANKS CASKET EH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F6,
+ },
+ [0x16F7]={
+ category="lo",
+ description="RUNIC LETTER FRANKS CASKET AC",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F7,
+ },
+ [0x16F8]={
+ category="lo",
+ description="RUNIC LETTER FRANKS CASKET AESC",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16F8,
+ },
[0x1700]={
category="lo",
description="TAGALOG LETTER A",
@@ -41485,6 +42189,7 @@ characters.data={
},
[0x1714]={
category="mn",
+ combining=0x9,
description="TAGALOG SIGN VIRAMA",
direction="nsm",
linebreak="cm",
@@ -41632,6 +42337,7 @@ characters.data={
},
[0x1734]={
category="mn",
+ combining=0x9,
description="HANUNOO SIGN PAMUDPOD",
direction="nsm",
linebreak="cm",
@@ -42493,6 +43199,7 @@ characters.data={
},
[0x17D2]={
category="mn",
+ combining=0x9,
description="KHMER SIGN COENG",
direction="nsm",
linebreak="sa",
@@ -42570,6 +43277,7 @@ characters.data={
},
[0x17DD]={
category="mn",
+ combining=0xE6,
description="KHMER SIGN ATTHACAN",
direction="nsm",
linebreak="sa",
@@ -42758,6 +43466,7 @@ characters.data={
unicodeslot=0x1805,
},
[0x1806]={
+ arabic="u",
category="pd",
description="MONGOLIAN TODO SOFT HYPHEN",
direction="on",
@@ -42765,6 +43474,7 @@ characters.data={
unicodeslot=0x1806,
},
[0x1807]={
+ arabic="d",
category="po",
description="MONGOLIAN SIBE SYLLABLE BOUNDARY MARKER",
direction="on",
@@ -42786,6 +43496,7 @@ characters.data={
unicodeslot=0x1809,
},
[0x180A]={
+ arabic="c",
category="po",
description="MONGOLIAN NIRUGU",
direction="on",
@@ -42814,9 +43525,10 @@ characters.data={
unicodeslot=0x180D,
},
[0x180E]={
+ arabic="u",
category="zs",
description="MONGOLIAN VOWEL SEPARATOR",
- direction="ws",
+ direction="bn",
linebreak="gl",
unicodeslot=0x180E,
},
@@ -42891,6 +43603,7 @@ characters.data={
unicodeslot=0x1819,
},
[0x1820]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER A",
direction="l",
@@ -42902,6 +43615,7 @@ characters.data={
},
},
[0x1821]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER E",
direction="l",
@@ -42912,6 +43626,7 @@ characters.data={
},
},
[0x1822]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER I",
direction="l",
@@ -42922,6 +43637,7 @@ characters.data={
},
},
[0x1823]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER O",
direction="l",
@@ -42932,6 +43648,7 @@ characters.data={
},
},
[0x1824]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER U",
direction="l",
@@ -42942,6 +43659,7 @@ characters.data={
},
},
[0x1825]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER OE",
direction="l",
@@ -42953,6 +43671,7 @@ characters.data={
},
},
[0x1826]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER UE",
direction="l",
@@ -42964,6 +43683,7 @@ characters.data={
},
},
[0x1827]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER EE",
direction="l",
@@ -42971,6 +43691,7 @@ characters.data={
unicodeslot=0x1827,
},
[0x1828]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER NA",
direction="l",
@@ -42983,6 +43704,7 @@ characters.data={
},
},
[0x1829]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER ANG",
direction="l",
@@ -42990,6 +43712,7 @@ characters.data={
unicodeslot=0x1829,
},
[0x182A]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER BA",
direction="l",
@@ -43000,6 +43723,7 @@ characters.data={
},
},
[0x182B]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER PA",
direction="l",
@@ -43007,6 +43731,7 @@ characters.data={
unicodeslot=0x182B,
},
[0x182C]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER QA",
direction="l",
@@ -43019,6 +43744,7 @@ characters.data={
},
},
[0x182D]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER GA",
direction="l",
@@ -43031,6 +43757,7 @@ characters.data={
},
},
[0x182E]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER MA",
direction="l",
@@ -43038,6 +43765,7 @@ characters.data={
unicodeslot=0x182E,
},
[0x182F]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER LA",
direction="l",
@@ -43045,6 +43773,7 @@ characters.data={
unicodeslot=0x182F,
},
[0x1830]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER SA",
direction="l",
@@ -43056,6 +43785,7 @@ characters.data={
},
},
[0x1831]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER SHA",
direction="l",
@@ -43063,6 +43793,7 @@ characters.data={
unicodeslot=0x1831,
},
[0x1832]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TA",
direction="l",
@@ -43073,6 +43804,7 @@ characters.data={
},
},
[0x1833]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER DA",
direction="l",
@@ -43083,6 +43815,7 @@ characters.data={
},
},
[0x1834]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER CHA",
direction="l",
@@ -43090,6 +43823,7 @@ characters.data={
unicodeslot=0x1834,
},
[0x1835]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER JA",
direction="l",
@@ -43100,6 +43834,7 @@ characters.data={
},
},
[0x1836]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER YA",
direction="l",
@@ -43111,6 +43846,7 @@ characters.data={
},
},
[0x1837]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER RA",
direction="l",
@@ -43118,6 +43854,7 @@ characters.data={
unicodeslot=0x1837,
},
[0x1838]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER WA",
direction="l",
@@ -43128,6 +43865,7 @@ characters.data={
},
},
[0x1839]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER FA",
direction="l",
@@ -43135,6 +43873,7 @@ characters.data={
unicodeslot=0x1839,
},
[0x183A]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER KA",
direction="l",
@@ -43142,6 +43881,7 @@ characters.data={
unicodeslot=0x183A,
},
[0x183B]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER KHA",
direction="l",
@@ -43149,6 +43889,7 @@ characters.data={
unicodeslot=0x183B,
},
[0x183C]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TSA",
direction="l",
@@ -43156,6 +43897,7 @@ characters.data={
unicodeslot=0x183C,
},
[0x183D]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER ZA",
direction="l",
@@ -43163,6 +43905,7 @@ characters.data={
unicodeslot=0x183D,
},
[0x183E]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER HAA",
direction="l",
@@ -43170,6 +43913,7 @@ characters.data={
unicodeslot=0x183E,
},
[0x183F]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER ZRA",
direction="l",
@@ -43177,6 +43921,7 @@ characters.data={
unicodeslot=0x183F,
},
[0x1840]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER LHA",
direction="l",
@@ -43184,6 +43929,7 @@ characters.data={
unicodeslot=0x1840,
},
[0x1841]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER ZHI",
direction="l",
@@ -43191,6 +43937,7 @@ characters.data={
unicodeslot=0x1841,
},
[0x1842]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER CHI",
direction="l",
@@ -43198,6 +43945,7 @@ characters.data={
unicodeslot=0x1842,
},
[0x1843]={
+ arabic="d",
category="lm",
description="MONGOLIAN LETTER TODO LONG VOWEL SIGN",
direction="l",
@@ -43205,6 +43953,7 @@ characters.data={
unicodeslot=0x1843,
},
[0x1844]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO E",
direction="l",
@@ -43215,6 +43964,7 @@ characters.data={
},
},
[0x1845]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO I",
direction="l",
@@ -43225,6 +43975,7 @@ characters.data={
},
},
[0x1846]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO O",
direction="l",
@@ -43235,6 +43986,7 @@ characters.data={
},
},
[0x1847]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO U",
direction="l",
@@ -43246,6 +43998,7 @@ characters.data={
},
},
[0x1848]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO OE",
direction="l",
@@ -43256,6 +44009,7 @@ characters.data={
},
},
[0x1849]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO UE",
direction="l",
@@ -43266,6 +44020,7 @@ characters.data={
},
},
[0x184A]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO ANG",
direction="l",
@@ -43273,6 +44028,7 @@ characters.data={
unicodeslot=0x184A,
},
[0x184B]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO BA",
direction="l",
@@ -43280,6 +44036,7 @@ characters.data={
unicodeslot=0x184B,
},
[0x184C]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO PA",
direction="l",
@@ -43287,6 +44044,7 @@ characters.data={
unicodeslot=0x184C,
},
[0x184D]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO QA",
direction="l",
@@ -43297,6 +44055,7 @@ characters.data={
},
},
[0x184E]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO GA",
direction="l",
@@ -43307,6 +44066,7 @@ characters.data={
},
},
[0x184F]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO MA",
direction="l",
@@ -43314,6 +44074,7 @@ characters.data={
unicodeslot=0x184F,
},
[0x1850]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO TA",
direction="l",
@@ -43321,6 +44082,7 @@ characters.data={
unicodeslot=0x1850,
},
[0x1851]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO DA",
direction="l",
@@ -43328,6 +44090,7 @@ characters.data={
unicodeslot=0x1851,
},
[0x1852]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO CHA",
direction="l",
@@ -43335,6 +44098,7 @@ characters.data={
unicodeslot=0x1852,
},
[0x1853]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO JA",
direction="l",
@@ -43342,6 +44106,7 @@ characters.data={
unicodeslot=0x1853,
},
[0x1854]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO TSA",
direction="l",
@@ -43349,6 +44114,7 @@ characters.data={
unicodeslot=0x1854,
},
[0x1855]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO YA",
direction="l",
@@ -43356,6 +44122,7 @@ characters.data={
unicodeslot=0x1855,
},
[0x1856]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO WA",
direction="l",
@@ -43363,6 +44130,7 @@ characters.data={
unicodeslot=0x1856,
},
[0x1857]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO KA",
direction="l",
@@ -43370,6 +44138,7 @@ characters.data={
unicodeslot=0x1857,
},
[0x1858]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO GAA",
direction="l",
@@ -43377,6 +44146,7 @@ characters.data={
unicodeslot=0x1858,
},
[0x1859]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO HAA",
direction="l",
@@ -43384,6 +44154,7 @@ characters.data={
unicodeslot=0x1859,
},
[0x185A]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO JIA",
direction="l",
@@ -43391,6 +44162,7 @@ characters.data={
unicodeslot=0x185A,
},
[0x185B]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO NIA",
direction="l",
@@ -43398,6 +44170,7 @@ characters.data={
unicodeslot=0x185B,
},
[0x185C]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO DZA",
direction="l",
@@ -43405,6 +44178,7 @@ characters.data={
unicodeslot=0x185C,
},
[0x185D]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER SIBE E",
direction="l",
@@ -43415,6 +44189,7 @@ characters.data={
},
},
[0x185E]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER SIBE I",
direction="l",
@@ -43426,6 +44201,7 @@ characters.data={
},
},
[0x185F]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER SIBE IY",
direction="l",
@@ -43433,6 +44209,7 @@ characters.data={
unicodeslot=0x185F,
},
[0x1860]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER SIBE UE",
direction="l",
@@ -43443,6 +44220,7 @@ characters.data={
},
},
[0x1861]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER SIBE U",
direction="l",
@@ -43450,6 +44228,7 @@ characters.data={
unicodeslot=0x1861,
},
[0x1862]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER SIBE ANG",
direction="l",
@@ -43457,6 +44236,7 @@ characters.data={
unicodeslot=0x1862,
},
[0x1863]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER SIBE KA",
direction="l",
@@ -43467,6 +44247,7 @@ characters.data={
},
},
[0x1864]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER SIBE GA",
direction="l",
@@ -43474,6 +44255,7 @@ characters.data={
unicodeslot=0x1864,
},
[0x1865]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER SIBE HA",
direction="l",
@@ -43481,6 +44263,7 @@ characters.data={
unicodeslot=0x1865,
},
[0x1866]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER SIBE PA",
direction="l",
@@ -43488,6 +44271,7 @@ characters.data={
unicodeslot=0x1866,
},
[0x1867]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER SIBE SHA",
direction="l",
@@ -43495,6 +44279,7 @@ characters.data={
unicodeslot=0x1867,
},
[0x1868]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER SIBE TA",
direction="l",
@@ -43506,6 +44291,7 @@ characters.data={
},
},
[0x1869]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER SIBE DA",
direction="l",
@@ -43516,6 +44302,7 @@ characters.data={
},
},
[0x186A]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER SIBE JA",
direction="l",
@@ -43523,6 +44310,7 @@ characters.data={
unicodeslot=0x186A,
},
[0x186B]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER SIBE FA",
direction="l",
@@ -43530,6 +44318,7 @@ characters.data={
unicodeslot=0x186B,
},
[0x186C]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER SIBE GAA",
direction="l",
@@ -43537,6 +44326,7 @@ characters.data={
unicodeslot=0x186C,
},
[0x186D]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER SIBE HAA",
direction="l",
@@ -43544,6 +44334,7 @@ characters.data={
unicodeslot=0x186D,
},
[0x186E]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER SIBE TSA",
direction="l",
@@ -43551,6 +44342,7 @@ characters.data={
unicodeslot=0x186E,
},
[0x186F]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER SIBE ZA",
direction="l",
@@ -43561,6 +44353,7 @@ characters.data={
},
},
[0x1870]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER SIBE RAA",
direction="l",
@@ -43568,6 +44361,7 @@ characters.data={
unicodeslot=0x1870,
},
[0x1871]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER SIBE CHA",
direction="l",
@@ -43575,6 +44369,7 @@ characters.data={
unicodeslot=0x1871,
},
[0x1872]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER SIBE ZHA",
direction="l",
@@ -43582,6 +44377,7 @@ characters.data={
unicodeslot=0x1872,
},
[0x1873]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER MANCHU I",
direction="l",
@@ -43594,6 +44390,7 @@ characters.data={
},
},
[0x1874]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER MANCHU KA",
direction="l",
@@ -43606,6 +44403,7 @@ characters.data={
},
},
[0x1875]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER MANCHU RA",
direction="l",
@@ -43613,6 +44411,7 @@ characters.data={
unicodeslot=0x1875,
},
[0x1876]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER MANCHU FA",
direction="l",
@@ -43623,6 +44422,7 @@ characters.data={
},
},
[0x1877]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER MANCHU ZHA",
direction="l",
@@ -43630,6 +44430,7 @@ characters.data={
unicodeslot=0x1877,
},
[0x1880]={
+ arabic="u",
category="lo",
description="MONGOLIAN LETTER ALI GALI ANUSVARA ONE",
direction="l",
@@ -43640,6 +44441,7 @@ characters.data={
},
},
[0x1881]={
+ arabic="u",
category="lo",
description="MONGOLIAN LETTER ALI GALI VISARGA ONE",
direction="l",
@@ -43650,6 +44452,7 @@ characters.data={
},
},
[0x1882]={
+ arabic="u",
category="lo",
description="MONGOLIAN LETTER ALI GALI DAMARU",
direction="l",
@@ -43657,6 +44460,7 @@ characters.data={
unicodeslot=0x1882,
},
[0x1883]={
+ arabic="u",
category="lo",
description="MONGOLIAN LETTER ALI GALI UBADAMA",
direction="l",
@@ -43664,6 +44468,7 @@ characters.data={
unicodeslot=0x1883,
},
[0x1884]={
+ arabic="u",
category="lo",
description="MONGOLIAN LETTER ALI GALI INVERTED UBADAMA",
direction="l",
@@ -43671,6 +44476,7 @@ characters.data={
unicodeslot=0x1884,
},
[0x1885]={
+ arabic="u",
category="lo",
description="MONGOLIAN LETTER ALI GALI BALUDA",
direction="l",
@@ -43678,6 +44484,7 @@ characters.data={
unicodeslot=0x1885,
},
[0x1886]={
+ arabic="u",
category="lo",
description="MONGOLIAN LETTER ALI GALI THREE BALUDA",
direction="l",
@@ -43685,6 +44492,7 @@ characters.data={
unicodeslot=0x1886,
},
[0x1887]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER ALI GALI A",
direction="l",
@@ -43697,6 +44505,7 @@ characters.data={
},
},
[0x1888]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER ALI GALI I",
direction="l",
@@ -43707,6 +44516,7 @@ characters.data={
},
},
[0x1889]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER ALI GALI KA",
direction="l",
@@ -43714,6 +44524,7 @@ characters.data={
unicodeslot=0x1889,
},
[0x188A]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER ALI GALI NGA",
direction="l",
@@ -43724,6 +44535,7 @@ characters.data={
},
},
[0x188B]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER ALI GALI CA",
direction="l",
@@ -43731,6 +44543,7 @@ characters.data={
unicodeslot=0x188B,
},
[0x188C]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER ALI GALI TTA",
direction="l",
@@ -43738,6 +44551,7 @@ characters.data={
unicodeslot=0x188C,
},
[0x188D]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER ALI GALI TTHA",
direction="l",
@@ -43745,6 +44559,7 @@ characters.data={
unicodeslot=0x188D,
},
[0x188E]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER ALI GALI DDA",
direction="l",
@@ -43752,6 +44567,7 @@ characters.data={
unicodeslot=0x188E,
},
[0x188F]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER ALI GALI NNA",
direction="l",
@@ -43759,6 +44575,7 @@ characters.data={
unicodeslot=0x188F,
},
[0x1890]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER ALI GALI TA",
direction="l",
@@ -43766,6 +44583,7 @@ characters.data={
unicodeslot=0x1890,
},
[0x1891]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER ALI GALI DA",
direction="l",
@@ -43773,6 +44591,7 @@ characters.data={
unicodeslot=0x1891,
},
[0x1892]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER ALI GALI PA",
direction="l",
@@ -43780,6 +44599,7 @@ characters.data={
unicodeslot=0x1892,
},
[0x1893]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER ALI GALI PHA",
direction="l",
@@ -43787,6 +44607,7 @@ characters.data={
unicodeslot=0x1893,
},
[0x1894]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER ALI GALI SSA",
direction="l",
@@ -43794,6 +44615,7 @@ characters.data={
unicodeslot=0x1894,
},
[0x1895]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER ALI GALI ZHA",
direction="l",
@@ -43801,6 +44623,7 @@ characters.data={
unicodeslot=0x1895,
},
[0x1896]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER ALI GALI ZA",
direction="l",
@@ -43808,6 +44631,7 @@ characters.data={
unicodeslot=0x1896,
},
[0x1897]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER ALI GALI AH",
direction="l",
@@ -43815,6 +44639,7 @@ characters.data={
unicodeslot=0x1897,
},
[0x1898]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO ALI GALI TA",
direction="l",
@@ -43822,6 +44647,7 @@ characters.data={
unicodeslot=0x1898,
},
[0x1899]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER TODO ALI GALI ZHA",
direction="l",
@@ -43829,6 +44655,7 @@ characters.data={
unicodeslot=0x1899,
},
[0x189A]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER MANCHU ALI GALI GHA",
direction="l",
@@ -43836,6 +44663,7 @@ characters.data={
unicodeslot=0x189A,
},
[0x189B]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER MANCHU ALI GALI NGA",
direction="l",
@@ -43843,6 +44671,7 @@ characters.data={
unicodeslot=0x189B,
},
[0x189C]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER MANCHU ALI GALI CA",
direction="l",
@@ -43850,6 +44679,7 @@ characters.data={
unicodeslot=0x189C,
},
[0x189D]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER MANCHU ALI GALI JHA",
direction="l",
@@ -43857,6 +44687,7 @@ characters.data={
unicodeslot=0x189D,
},
[0x189E]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER MANCHU ALI GALI TTA",
direction="l",
@@ -43864,6 +44695,7 @@ characters.data={
unicodeslot=0x189E,
},
[0x189F]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER MANCHU ALI GALI DDHA",
direction="l",
@@ -43871,6 +44703,7 @@ characters.data={
unicodeslot=0x189F,
},
[0x18A0]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER MANCHU ALI GALI TA",
direction="l",
@@ -43878,6 +44711,7 @@ characters.data={
unicodeslot=0x18A0,
},
[0x18A1]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER MANCHU ALI GALI DHA",
direction="l",
@@ -43885,6 +44719,7 @@ characters.data={
unicodeslot=0x18A1,
},
[0x18A2]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER MANCHU ALI GALI SSA",
direction="l",
@@ -43892,6 +44727,7 @@ characters.data={
unicodeslot=0x18A2,
},
[0x18A3]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER MANCHU ALI GALI CYA",
direction="l",
@@ -43899,6 +44735,7 @@ characters.data={
unicodeslot=0x18A3,
},
[0x18A4]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER MANCHU ALI GALI ZHA",
direction="l",
@@ -43906,6 +44743,7 @@ characters.data={
unicodeslot=0x18A4,
},
[0x18A5]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER MANCHU ALI GALI ZA",
direction="l",
@@ -43913,6 +44751,7 @@ characters.data={
unicodeslot=0x18A5,
},
[0x18A6]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER ALI GALI HALF U",
direction="l",
@@ -43920,6 +44759,7 @@ characters.data={
unicodeslot=0x18A6,
},
[0x18A7]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER ALI GALI HALF YA",
direction="l",
@@ -43927,6 +44767,7 @@ characters.data={
unicodeslot=0x18A7,
},
[0x18A8]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER MANCHU ALI GALI BHA",
direction="l",
@@ -43935,12 +44776,14 @@ characters.data={
},
[0x18A9]={
category="mn",
+ combining=0xE4,
description="MONGOLIAN LETTER ALI GALI DAGALGA",
direction="nsm",
linebreak="cm",
unicodeslot=0x18A9,
},
[0x18AA]={
+ arabic="d",
category="lo",
description="MONGOLIAN LETTER MANCHU ALI GALI LHA",
direction="l",
@@ -44640,6 +45483,20 @@ characters.data={
linebreak="al",
unicodeslot=0x191C,
},
+ [0x191D]={
+ category="lo",
+ description="LIMBU LETTER GYAN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x191D,
+ },
+ [0x191E]={
+ category="lo",
+ description="LIMBU LETTER TRA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x191E,
+ },
[0x1920]={
category="mn",
description="LIMBU VOWEL SIGN A",
@@ -44789,6 +45646,7 @@ characters.data={
},
[0x1939]={
category="mn",
+ combining=0xDE,
description="LIMBU SIGN MUKPHRENG",
direction="nsm",
linebreak="cm",
@@ -44796,6 +45654,7 @@ characters.data={
},
[0x193A]={
category="mn",
+ combining=0xE6,
description="LIMBU SIGN KEMPHRENG",
direction="nsm",
linebreak="cm",
@@ -44803,6 +45662,7 @@ characters.data={
},
[0x193B]={
category="mn",
+ combining=0xDC,
description="LIMBU SIGN SA-I",
direction="nsm",
linebreak="cm",
@@ -46112,6 +46972,7 @@ characters.data={
},
[0x1A17]={
category="mn",
+ combining=0xE6,
description="BUGINESE VOWEL SIGN I",
direction="nsm",
linebreak="cm",
@@ -46119,6 +46980,7 @@ characters.data={
},
[0x1A18]={
category="mn",
+ combining=0xDC,
description="BUGINESE VOWEL SIGN U",
direction="nsm",
linebreak="cm",
@@ -46141,7 +47003,7 @@ characters.data={
[0x1A1B]={
category="mc",
description="BUGINESE VOWEL SIGN AE",
- direction="l",
+ direction="nsm",
linebreak="cm",
unicodeslot=0x1A1B,
},
@@ -46602,6 +47464,7 @@ characters.data={
},
[0x1A60]={
category="mn",
+ combining=0x9,
description="TAI THAM SIGN SAKOT",
direction="nsm",
linebreak="sa",
@@ -46749,6 +47612,7 @@ characters.data={
},
[0x1A75]={
category="mn",
+ combining=0xE6,
description="TAI THAM SIGN TONE-1",
direction="nsm",
linebreak="sa",
@@ -46756,6 +47620,7 @@ characters.data={
},
[0x1A76]={
category="mn",
+ combining=0xE6,
description="TAI THAM SIGN TONE-2",
direction="nsm",
linebreak="sa",
@@ -46763,6 +47628,7 @@ characters.data={
},
[0x1A77]={
category="mn",
+ combining=0xE6,
description="TAI THAM SIGN KHUEN TONE-3",
direction="nsm",
linebreak="sa",
@@ -46770,6 +47636,7 @@ characters.data={
},
[0x1A78]={
category="mn",
+ combining=0xE6,
description="TAI THAM SIGN KHUEN TONE-4",
direction="nsm",
linebreak="sa",
@@ -46777,6 +47644,7 @@ characters.data={
},
[0x1A79]={
category="mn",
+ combining=0xE6,
description="TAI THAM SIGN KHUEN TONE-5",
direction="nsm",
linebreak="sa",
@@ -46784,6 +47652,7 @@ characters.data={
},
[0x1A7A]={
category="mn",
+ combining=0xE6,
description="TAI THAM SIGN RA HAAM",
direction="nsm",
linebreak="sa",
@@ -46791,6 +47660,7 @@ characters.data={
},
[0x1A7B]={
category="mn",
+ combining=0xE6,
description="TAI THAM SIGN MAI SAM",
direction="nsm",
linebreak="sa",
@@ -46798,6 +47668,7 @@ characters.data={
},
[0x1A7C]={
category="mn",
+ combining=0xE6,
description="TAI THAM SIGN KHUEN-LUE KARAN",
direction="nsm",
linebreak="sa",
@@ -46805,6 +47676,7 @@ characters.data={
},
[0x1A7F]={
category="mn",
+ combining=0xDC,
description="TAI THAM COMBINING CRYPTOGRAMMIC DOT",
direction="nsm",
linebreak="cm",
@@ -47048,6 +47920,125 @@ characters.data={
linebreak="sa",
unicodeslot=0x1AAD,
},
+ [0x1AB0]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING DOUBLED CIRCUMFLEX ACCENT",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1AB0,
+ },
+ [0x1AB1]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING DIAERESIS-RING",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1AB1,
+ },
+ [0x1AB2]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING INFINITY",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1AB2,
+ },
+ [0x1AB3]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING DOWNWARDS ARROW",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1AB3,
+ },
+ [0x1AB4]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING TRIPLE DOT",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1AB4,
+ },
+ [0x1AB5]={
+ category="mn",
+ combining=0xDC,
+ description="COMBINING X-X BELOW",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1AB5,
+ },
+ [0x1AB6]={
+ category="mn",
+ combining=0xDC,
+ description="COMBINING WIGGLY LINE BELOW",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1AB6,
+ },
+ [0x1AB7]={
+ category="mn",
+ combining=0xDC,
+ description="COMBINING OPEN MARK BELOW",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1AB7,
+ },
+ [0x1AB8]={
+ category="mn",
+ combining=0xDC,
+ description="COMBINING DOUBLE OPEN MARK BELOW",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1AB8,
+ },
+ [0x1AB9]={
+ category="mn",
+ combining=0xDC,
+ description="COMBINING LIGHT CENTRALIZATION STROKE BELOW",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1AB9,
+ },
+ [0x1ABA]={
+ category="mn",
+ combining=0xDC,
+ description="COMBINING STRONG CENTRALIZATION STROKE BELOW",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1ABA,
+ },
+ [0x1ABB]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING PARENTHESES ABOVE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1ABB,
+ },
+ [0x1ABC]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING DOUBLE PARENTHESES ABOVE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1ABC,
+ },
+ [0x1ABD]={
+ category="mn",
+ combining=0xDC,
+ description="COMBINING PARENTHESES BELOW",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1ABD,
+ },
+ [0x1ABE]={
+ category="me",
+ description="COMBINING PARENTHESES OVERLAY",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1ABE,
+ },
[0x1B00]={
category="mn",
description="BALINESE SIGN ULU RICEM",
@@ -47420,6 +48411,7 @@ characters.data={
},
[0x1B34]={
category="mn",
+ combining=0x7,
description="BALINESE SIGN REREKAN",
direction="nsm",
linebreak="cm",
@@ -47537,6 +48529,7 @@ characters.data={
},
[0x1B44]={
category="mc",
+ combining=0x9,
description="BALINESE ADEG ADEG",
direction="l",
linebreak="cm",
@@ -47782,6 +48775,7 @@ characters.data={
},
[0x1B6B]={
category="mn",
+ combining=0xE6,
description="BALINESE MUSICAL SYMBOL COMBINING TEGEH",
direction="nsm",
linebreak="cm",
@@ -47789,6 +48783,7 @@ characters.data={
},
[0x1B6C]={
category="mn",
+ combining=0xDC,
description="BALINESE MUSICAL SYMBOL COMBINING ENDEP",
direction="nsm",
linebreak="cm",
@@ -47796,6 +48791,7 @@ characters.data={
},
[0x1B6D]={
category="mn",
+ combining=0xE6,
description="BALINESE MUSICAL SYMBOL COMBINING KEMPUL",
direction="nsm",
linebreak="cm",
@@ -47803,6 +48799,7 @@ characters.data={
},
[0x1B6E]={
category="mn",
+ combining=0xE6,
description="BALINESE MUSICAL SYMBOL COMBINING KEMPLI",
direction="nsm",
linebreak="cm",
@@ -47810,6 +48807,7 @@ characters.data={
},
[0x1B6F]={
category="mn",
+ combining=0xE6,
description="BALINESE MUSICAL SYMBOL COMBINING JEGOGAN",
direction="nsm",
linebreak="cm",
@@ -47817,6 +48815,7 @@ characters.data={
},
[0x1B70]={
category="mn",
+ combining=0xE6,
description="BALINESE MUSICAL SYMBOL COMBINING KEMPUL WITH JEGOGAN",
direction="nsm",
linebreak="cm",
@@ -47824,6 +48823,7 @@ characters.data={
},
[0x1B71]={
category="mn",
+ combining=0xE6,
description="BALINESE MUSICAL SYMBOL COMBINING KEMPLI WITH JEGOGAN",
direction="nsm",
linebreak="cm",
@@ -47831,6 +48831,7 @@ characters.data={
},
[0x1B72]={
category="mn",
+ combining=0xE6,
description="BALINESE MUSICAL SYMBOL COMBINING BENDE",
direction="nsm",
linebreak="cm",
@@ -47838,6 +48839,7 @@ characters.data={
},
[0x1B73]={
category="mn",
+ combining=0xE6,
description="BALINESE MUSICAL SYMBOL COMBINING GONG",
direction="nsm",
linebreak="cm",
@@ -48202,6 +49204,7 @@ characters.data={
},
[0x1BAA]={
category="mc",
+ combining=0x9,
description="SUNDANESE SIGN PAMAAEH",
direction="l",
linebreak="cm",
@@ -48209,6 +49212,7 @@ characters.data={
},
[0x1BAB]={
category="mn",
+ combining=0x9,
description="SUNDANESE SIGN VIRAMA",
direction="nsm",
linebreak="cm",
@@ -48217,14 +49221,14 @@ characters.data={
[0x1BAC]={
category="mc",
description="SUNDANESE CONSONANT SIGN PASANGAN MA",
- direction="l",
+ direction="nsm",
linebreak="cm",
unicodeslot=0x1BAC,
},
[0x1BAD]={
category="mc",
description="SUNDANESE CONSONANT SIGN PASANGAN WA",
- direction="l",
+ direction="nsm",
linebreak="cm",
unicodeslot=0x1BAD,
},
@@ -48622,6 +49626,7 @@ characters.data={
},
[0x1BE6]={
category="mn",
+ combining=0x7,
description="BATAK SIGN TOMPI",
direction="nsm",
linebreak="cm",
@@ -48706,6 +49711,7 @@ characters.data={
},
[0x1BF2]={
category="mc",
+ combining=0x9,
description="BATAK PANGOLAT",
direction="l",
linebreak="cm",
@@ -48713,6 +49719,7 @@ characters.data={
},
[0x1BF3]={
category="mc",
+ combining=0x9,
description="BATAK PANONGONAN",
direction="l",
linebreak="cm",
@@ -49133,6 +50140,7 @@ characters.data={
},
[0x1C37]={
category="mn",
+ combining=0x7,
description="LEPCHA SIGN NUKTA",
direction="nsm",
linebreak="cm",
@@ -49658,6 +50666,7 @@ characters.data={
},
[0x1CD0]={
category="mn",
+ combining=0xE6,
description="VEDIC TONE KARSHANA",
direction="nsm",
linebreak="cm",
@@ -49665,6 +50674,7 @@ characters.data={
},
[0x1CD1]={
category="mn",
+ combining=0xE6,
description="VEDIC TONE SHARA",
direction="nsm",
linebreak="cm",
@@ -49672,6 +50682,7 @@ characters.data={
},
[0x1CD2]={
category="mn",
+ combining=0xE6,
description="VEDIC TONE PRENKHA",
direction="nsm",
linebreak="cm",
@@ -49686,6 +50697,7 @@ characters.data={
},
[0x1CD4]={
category="mn",
+ combining=0x1,
description="VEDIC SIGN YAJURVEDIC MIDLINE SVARITA",
direction="nsm",
linebreak="cm",
@@ -49693,6 +50705,7 @@ characters.data={
},
[0x1CD5]={
category="mn",
+ combining=0xDC,
description="VEDIC TONE YAJURVEDIC AGGRAVATED INDEPENDENT SVARITA",
direction="nsm",
linebreak="cm",
@@ -49700,6 +50713,7 @@ characters.data={
},
[0x1CD6]={
category="mn",
+ combining=0xDC,
description="VEDIC TONE YAJURVEDIC INDEPENDENT SVARITA",
direction="nsm",
linebreak="cm",
@@ -49707,6 +50721,7 @@ characters.data={
},
[0x1CD7]={
category="mn",
+ combining=0xDC,
description="VEDIC TONE YAJURVEDIC KATHAKA INDEPENDENT SVARITA",
direction="nsm",
linebreak="cm",
@@ -49714,6 +50729,7 @@ characters.data={
},
[0x1CD8]={
category="mn",
+ combining=0xDC,
description="VEDIC TONE CANDRA BELOW",
direction="nsm",
linebreak="cm",
@@ -49721,6 +50737,7 @@ characters.data={
},
[0x1CD9]={
category="mn",
+ combining=0xDC,
description="VEDIC TONE YAJURVEDIC KATHAKA INDEPENDENT SVARITA SCHROEDER",
direction="nsm",
linebreak="cm",
@@ -49728,6 +50745,7 @@ characters.data={
},
[0x1CDA]={
category="mn",
+ combining=0xE6,
description="VEDIC TONE DOUBLE SVARITA",
direction="nsm",
linebreak="cm",
@@ -49735,6 +50753,7 @@ characters.data={
},
[0x1CDB]={
category="mn",
+ combining=0xE6,
description="VEDIC TONE TRIPLE SVARITA",
direction="nsm",
linebreak="cm",
@@ -49742,6 +50761,7 @@ characters.data={
},
[0x1CDC]={
category="mn",
+ combining=0xDC,
description="VEDIC TONE KATHAKA ANUDATTA",
direction="nsm",
linebreak="cm",
@@ -49749,6 +50769,7 @@ characters.data={
},
[0x1CDD]={
category="mn",
+ combining=0xDC,
description="VEDIC TONE DOT BELOW",
direction="nsm",
linebreak="cm",
@@ -49756,6 +50777,7 @@ characters.data={
},
[0x1CDE]={
category="mn",
+ combining=0xDC,
description="VEDIC TONE TWO DOTS BELOW",
direction="nsm",
linebreak="cm",
@@ -49763,6 +50785,7 @@ characters.data={
},
[0x1CDF]={
category="mn",
+ combining=0xDC,
description="VEDIC TONE THREE DOTS BELOW",
direction="nsm",
linebreak="cm",
@@ -49770,6 +50793,7 @@ characters.data={
},
[0x1CE0]={
category="mn",
+ combining=0xE6,
description="VEDIC TONE RIGVEDIC KASHMIRI INDEPENDENT SVARITA",
direction="nsm",
linebreak="cm",
@@ -49784,6 +50808,7 @@ characters.data={
},
[0x1CE2]={
category="mn",
+ combining=0x1,
description="VEDIC SIGN VISARGA SVARITA",
direction="nsm",
linebreak="cm",
@@ -49791,6 +50816,7 @@ characters.data={
},
[0x1CE3]={
category="mn",
+ combining=0x1,
description="VEDIC SIGN VISARGA UDATTA",
direction="nsm",
linebreak="cm",
@@ -49798,6 +50824,7 @@ characters.data={
},
[0x1CE4]={
category="mn",
+ combining=0x1,
description="VEDIC SIGN REVERSED VISARGA UDATTA",
direction="nsm",
linebreak="cm",
@@ -49805,6 +50832,7 @@ characters.data={
},
[0x1CE5]={
category="mn",
+ combining=0x1,
description="VEDIC SIGN VISARGA ANUDATTA",
direction="nsm",
linebreak="cm",
@@ -49812,6 +50840,7 @@ characters.data={
},
[0x1CE6]={
category="mn",
+ combining=0x1,
description="VEDIC SIGN REVERSED VISARGA ANUDATTA",
direction="nsm",
linebreak="cm",
@@ -49819,6 +50848,7 @@ characters.data={
},
[0x1CE7]={
category="mn",
+ combining=0x1,
description="VEDIC SIGN VISARGA UDATTA WITH TAIL",
direction="nsm",
linebreak="cm",
@@ -49826,6 +50856,7 @@ characters.data={
},
[0x1CE8]={
category="mn",
+ combining=0x1,
description="VEDIC SIGN VISARGA ANUDATTA WITH TAIL",
direction="nsm",
linebreak="cm",
@@ -49861,6 +50892,7 @@ characters.data={
},
[0x1CED]={
category="mn",
+ combining=0xDC,
description="VEDIC SIGN TIRYAK",
direction="nsm",
linebreak="cm",
@@ -49910,6 +50942,7 @@ characters.data={
},
[0x1CF4]={
category="mn",
+ combining=0xE6,
description="VEDIC TONE CANDRA ABOVE",
direction="nsm",
linebreak="cm",
@@ -49929,6 +50962,22 @@ characters.data={
linebreak="al",
unicodeslot=0x1CF6,
},
+ [0x1CF8]={
+ category="mn",
+ combining=0xE6,
+ description="VEDIC TONE RING ABOVE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1CF8,
+ },
+ [0x1CF9]={
+ category="mn",
+ combining=0xE6,
+ description="VEDIC TONE DOUBLE RING ABOVE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1CF9,
+ },
[0x1D00]={
category="ll",
description="LATIN LETTER SMALL CAPITAL A",
@@ -50242,7 +51291,7 @@ characters.data={
description="MODIFIER LETTER CAPITAL A",
direction="l",
linebreak="al",
- specials={ "super", 0x0041 },
+ specials={ "super", 0x41 },
unicodeslot=0x1D2C,
},
[0x1D2D]={
@@ -50250,7 +51299,7 @@ characters.data={
description="MODIFIER LETTER CAPITAL AE",
direction="l",
linebreak="al",
- specials={ "super", 0x00C6 },
+ specials={ "super", 0xC6 },
unicodeslot=0x1D2D,
},
[0x1D2E]={
@@ -50258,7 +51307,7 @@ characters.data={
description="MODIFIER LETTER CAPITAL B",
direction="l",
linebreak="al",
- specials={ "super", 0x0042 },
+ specials={ "super", 0x42 },
unicodeslot=0x1D2E,
},
[0x1D2F]={
@@ -50273,7 +51322,7 @@ characters.data={
description="MODIFIER LETTER CAPITAL D",
direction="l",
linebreak="al",
- specials={ "super", 0x0044 },
+ specials={ "super", 0x44 },
unicodeslot=0x1D30,
},
[0x1D31]={
@@ -50281,7 +51330,7 @@ characters.data={
description="MODIFIER LETTER CAPITAL E",
direction="l",
linebreak="al",
- specials={ "super", 0x0045 },
+ specials={ "super", 0x45 },
unicodeslot=0x1D31,
},
[0x1D32]={
@@ -50289,7 +51338,7 @@ characters.data={
description="MODIFIER LETTER CAPITAL REVERSED E",
direction="l",
linebreak="al",
- specials={ "super", 0x018E },
+ specials={ "super", 0x18E },
unicodeslot=0x1D32,
},
[0x1D33]={
@@ -50297,7 +51346,7 @@ characters.data={
description="MODIFIER LETTER CAPITAL G",
direction="l",
linebreak="al",
- specials={ "super", 0x0047 },
+ specials={ "super", 0x47 },
unicodeslot=0x1D33,
},
[0x1D34]={
@@ -50305,7 +51354,7 @@ characters.data={
description="MODIFIER LETTER CAPITAL H",
direction="l",
linebreak="al",
- specials={ "super", 0x0048 },
+ specials={ "super", 0x48 },
unicodeslot=0x1D34,
},
[0x1D35]={
@@ -50313,7 +51362,7 @@ characters.data={
description="MODIFIER LETTER CAPITAL I",
direction="l",
linebreak="al",
- specials={ "super", 0x0049 },
+ specials={ "super", 0x49 },
unicodeslot=0x1D35,
},
[0x1D36]={
@@ -50321,7 +51370,7 @@ characters.data={
description="MODIFIER LETTER CAPITAL J",
direction="l",
linebreak="al",
- specials={ "super", 0x004A },
+ specials={ "super", 0x4A },
unicodeslot=0x1D36,
},
[0x1D37]={
@@ -50329,7 +51378,7 @@ characters.data={
description="MODIFIER LETTER CAPITAL K",
direction="l",
linebreak="al",
- specials={ "super", 0x004B },
+ specials={ "super", 0x4B },
unicodeslot=0x1D37,
},
[0x1D38]={
@@ -50337,7 +51386,7 @@ characters.data={
description="MODIFIER LETTER CAPITAL L",
direction="l",
linebreak="al",
- specials={ "super", 0x004C },
+ specials={ "super", 0x4C },
unicodeslot=0x1D38,
},
[0x1D39]={
@@ -50345,7 +51394,7 @@ characters.data={
description="MODIFIER LETTER CAPITAL M",
direction="l",
linebreak="al",
- specials={ "super", 0x004D },
+ specials={ "super", 0x4D },
unicodeslot=0x1D39,
},
[0x1D3A]={
@@ -50353,7 +51402,7 @@ characters.data={
description="MODIFIER LETTER CAPITAL N",
direction="l",
linebreak="al",
- specials={ "super", 0x004E },
+ specials={ "super", 0x4E },
unicodeslot=0x1D3A,
},
[0x1D3B]={
@@ -50368,7 +51417,7 @@ characters.data={
description="MODIFIER LETTER CAPITAL O",
direction="l",
linebreak="al",
- specials={ "super", 0x004F },
+ specials={ "super", 0x4F },
unicodeslot=0x1D3C,
},
[0x1D3D]={
@@ -50376,7 +51425,7 @@ characters.data={
description="MODIFIER LETTER CAPITAL OU",
direction="l",
linebreak="al",
- specials={ "super", 0x0222 },
+ specials={ "super", 0x222 },
unicodeslot=0x1D3D,
},
[0x1D3E]={
@@ -50384,7 +51433,7 @@ characters.data={
description="MODIFIER LETTER CAPITAL P",
direction="l",
linebreak="al",
- specials={ "super", 0x0050 },
+ specials={ "super", 0x50 },
unicodeslot=0x1D3E,
},
[0x1D3F]={
@@ -50392,7 +51441,7 @@ characters.data={
description="MODIFIER LETTER CAPITAL R",
direction="l",
linebreak="al",
- specials={ "super", 0x0052 },
+ specials={ "super", 0x52 },
unicodeslot=0x1D3F,
},
[0x1D40]={
@@ -50400,7 +51449,7 @@ characters.data={
description="MODIFIER LETTER CAPITAL T",
direction="l",
linebreak="al",
- specials={ "super", 0x0054 },
+ specials={ "super", 0x54 },
unicodeslot=0x1D40,
},
[0x1D41]={
@@ -50408,7 +51457,7 @@ characters.data={
description="MODIFIER LETTER CAPITAL U",
direction="l",
linebreak="al",
- specials={ "super", 0x0055 },
+ specials={ "super", 0x55 },
unicodeslot=0x1D41,
},
[0x1D42]={
@@ -50416,7 +51465,7 @@ characters.data={
description="MODIFIER LETTER CAPITAL W",
direction="l",
linebreak="al",
- specials={ "super", 0x0057 },
+ specials={ "super", 0x57 },
unicodeslot=0x1D42,
},
[0x1D43]={
@@ -50424,7 +51473,7 @@ characters.data={
description="MODIFIER LETTER SMALL A",
direction="l",
linebreak="al",
- specials={ "super", 0x0061 },
+ specials={ "super", 0x61 },
unicodeslot=0x1D43,
},
[0x1D44]={
@@ -50432,7 +51481,7 @@ characters.data={
description="MODIFIER LETTER SMALL TURNED A",
direction="l",
linebreak="al",
- specials={ "super", 0x0250 },
+ specials={ "super", 0x250 },
unicodeslot=0x1D44,
},
[0x1D45]={
@@ -50440,7 +51489,7 @@ characters.data={
description="MODIFIER LETTER SMALL ALPHA",
direction="l",
linebreak="al",
- specials={ "super", 0x0251 },
+ specials={ "super", 0x251 },
unicodeslot=0x1D45,
},
[0x1D46]={
@@ -50456,7 +51505,7 @@ characters.data={
description="MODIFIER LETTER SMALL B",
direction="l",
linebreak="al",
- specials={ "super", 0x0062 },
+ specials={ "super", 0x62 },
unicodeslot=0x1D47,
},
[0x1D48]={
@@ -50464,7 +51513,7 @@ characters.data={
description="MODIFIER LETTER SMALL D",
direction="l",
linebreak="al",
- specials={ "super", 0x0064 },
+ specials={ "super", 0x64 },
unicodeslot=0x1D48,
},
[0x1D49]={
@@ -50472,7 +51521,7 @@ characters.data={
description="MODIFIER LETTER SMALL E",
direction="l",
linebreak="al",
- specials={ "super", 0x0065 },
+ specials={ "super", 0x65 },
unicodeslot=0x1D49,
},
[0x1D4A]={
@@ -50480,7 +51529,7 @@ characters.data={
description="MODIFIER LETTER SMALL SCHWA",
direction="l",
linebreak="al",
- specials={ "super", 0x0259 },
+ specials={ "super", 0x259 },
unicodeslot=0x1D4A,
},
[0x1D4B]={
@@ -50488,7 +51537,7 @@ characters.data={
description="MODIFIER LETTER SMALL OPEN E",
direction="l",
linebreak="al",
- specials={ "super", 0x025B },
+ specials={ "super", 0x25B },
unicodeslot=0x1D4B,
},
[0x1D4C]={
@@ -50496,7 +51545,7 @@ characters.data={
description="MODIFIER LETTER SMALL TURNED OPEN E",
direction="l",
linebreak="al",
- specials={ "super", 0x025C },
+ specials={ "super", 0x25C },
unicodeslot=0x1D4C,
},
[0x1D4D]={
@@ -50504,7 +51553,7 @@ characters.data={
description="MODIFIER LETTER SMALL G",
direction="l",
linebreak="al",
- specials={ "super", 0x0067 },
+ specials={ "super", 0x67 },
unicodeslot=0x1D4D,
},
[0x1D4E]={
@@ -50519,7 +51568,7 @@ characters.data={
description="MODIFIER LETTER SMALL K",
direction="l",
linebreak="al",
- specials={ "super", 0x006B },
+ specials={ "super", 0x6B },
unicodeslot=0x1D4F,
},
[0x1D50]={
@@ -50527,7 +51576,7 @@ characters.data={
description="MODIFIER LETTER SMALL M",
direction="l",
linebreak="al",
- specials={ "super", 0x006D },
+ specials={ "super", 0x6D },
unicodeslot=0x1D50,
},
[0x1D51]={
@@ -50535,7 +51584,7 @@ characters.data={
description="MODIFIER LETTER SMALL ENG",
direction="l",
linebreak="al",
- specials={ "super", 0x014B },
+ specials={ "super", 0x14B },
unicodeslot=0x1D51,
},
[0x1D52]={
@@ -50543,7 +51592,7 @@ characters.data={
description="MODIFIER LETTER SMALL O",
direction="l",
linebreak="al",
- specials={ "super", 0x006F },
+ specials={ "super", 0x6F },
unicodeslot=0x1D52,
},
[0x1D53]={
@@ -50551,7 +51600,7 @@ characters.data={
description="MODIFIER LETTER SMALL OPEN O",
direction="l",
linebreak="al",
- specials={ "super", 0x0254 },
+ specials={ "super", 0x254 },
unicodeslot=0x1D53,
},
[0x1D54]={
@@ -50575,7 +51624,7 @@ characters.data={
description="MODIFIER LETTER SMALL P",
direction="l",
linebreak="al",
- specials={ "super", 0x0070 },
+ specials={ "super", 0x70 },
unicodeslot=0x1D56,
},
[0x1D57]={
@@ -50583,7 +51632,7 @@ characters.data={
description="MODIFIER LETTER SMALL T",
direction="l",
linebreak="al",
- specials={ "super", 0x0074 },
+ specials={ "super", 0x74 },
unicodeslot=0x1D57,
},
[0x1D58]={
@@ -50591,7 +51640,7 @@ characters.data={
description="MODIFIER LETTER SMALL U",
direction="l",
linebreak="al",
- specials={ "super", 0x0075 },
+ specials={ "super", 0x75 },
unicodeslot=0x1D58,
},
[0x1D59]={
@@ -50607,7 +51656,7 @@ characters.data={
description="MODIFIER LETTER SMALL TURNED M",
direction="l",
linebreak="al",
- specials={ "super", 0x026F },
+ specials={ "super", 0x26F },
unicodeslot=0x1D5A,
},
[0x1D5B]={
@@ -50615,7 +51664,7 @@ characters.data={
description="MODIFIER LETTER SMALL V",
direction="l",
linebreak="al",
- specials={ "super", 0x0076 },
+ specials={ "super", 0x76 },
unicodeslot=0x1D5B,
},
[0x1D5C]={
@@ -50631,7 +51680,7 @@ characters.data={
description="MODIFIER LETTER SMALL BETA",
direction="l",
linebreak="al",
- specials={ "super", 0x03B2 },
+ specials={ "super", 0x3B2 },
unicodeslot=0x1D5D,
},
[0x1D5E]={
@@ -50639,7 +51688,7 @@ characters.data={
description="MODIFIER LETTER SMALL GREEK GAMMA",
direction="l",
linebreak="al",
- specials={ "super", 0x03B3 },
+ specials={ "super", 0x3B3 },
unicodeslot=0x1D5E,
},
[0x1D5F]={
@@ -50647,7 +51696,7 @@ characters.data={
description="MODIFIER LETTER SMALL DELTA",
direction="l",
linebreak="al",
- specials={ "super", 0x03B4 },
+ specials={ "super", 0x3B4 },
unicodeslot=0x1D5F,
},
[0x1D60]={
@@ -50655,7 +51704,7 @@ characters.data={
description="MODIFIER LETTER SMALL GREEK PHI",
direction="l",
linebreak="al",
- specials={ "super", 0x03C6 },
+ specials={ "super", 0x3C6 },
unicodeslot=0x1D60,
},
[0x1D61]={
@@ -50663,7 +51712,7 @@ characters.data={
description="MODIFIER LETTER SMALL CHI",
direction="l",
linebreak="al",
- specials={ "super", 0x03C7 },
+ specials={ "super", 0x3C7 },
unicodeslot=0x1D61,
},
[0x1D62]={
@@ -50671,7 +51720,7 @@ characters.data={
description="LATIN SUBSCRIPT SMALL LETTER I",
direction="l",
linebreak="al",
- specials={ "sub", 0x0069 },
+ specials={ "sub", 0x69 },
unicodeslot=0x1D62,
},
[0x1D63]={
@@ -50679,7 +51728,7 @@ characters.data={
description="LATIN SUBSCRIPT SMALL LETTER R",
direction="l",
linebreak="al",
- specials={ "sub", 0x0072 },
+ specials={ "sub", 0x72 },
unicodeslot=0x1D63,
},
[0x1D64]={
@@ -50687,7 +51736,7 @@ characters.data={
description="LATIN SUBSCRIPT SMALL LETTER U",
direction="l",
linebreak="al",
- specials={ "sub", 0x0075 },
+ specials={ "sub", 0x75 },
unicodeslot=0x1D64,
},
[0x1D65]={
@@ -50695,7 +51744,7 @@ characters.data={
description="LATIN SUBSCRIPT SMALL LETTER V",
direction="l",
linebreak="al",
- specials={ "sub", 0x0076 },
+ specials={ "sub", 0x76 },
unicodeslot=0x1D65,
},
[0x1D66]={
@@ -50703,7 +51752,7 @@ characters.data={
description="GREEK SUBSCRIPT SMALL LETTER BETA",
direction="l",
linebreak="al",
- specials={ "sub", 0x03B2 },
+ specials={ "sub", 0x3B2 },
unicodeslot=0x1D66,
},
[0x1D67]={
@@ -50711,7 +51760,7 @@ characters.data={
description="GREEK SUBSCRIPT SMALL LETTER GAMMA",
direction="l",
linebreak="al",
- specials={ "sub", 0x03B3 },
+ specials={ "sub", 0x3B3 },
unicodeslot=0x1D67,
},
[0x1D68]={
@@ -50719,7 +51768,7 @@ characters.data={
description="GREEK SUBSCRIPT SMALL LETTER RHO",
direction="l",
linebreak="al",
- specials={ "sub", 0x03C1 },
+ specials={ "sub", 0x3C1 },
unicodeslot=0x1D68,
},
[0x1D69]={
@@ -50727,7 +51776,7 @@ characters.data={
description="GREEK SUBSCRIPT SMALL LETTER PHI",
direction="l",
linebreak="al",
- specials={ "sub", 0x03C6 },
+ specials={ "sub", 0x3C6 },
unicodeslot=0x1D69,
},
[0x1D6A]={
@@ -50735,7 +51784,7 @@ characters.data={
description="GREEK SUBSCRIPT SMALL LETTER CHI",
direction="l",
linebreak="al",
- specials={ "sub", 0x03C7 },
+ specials={ "sub", 0x3C7 },
unicodeslot=0x1D6A,
},
[0x1D6B]={
@@ -50750,7 +51799,7 @@ characters.data={
description="LATIN SMALL LETTER B WITH MIDDLE TILDE",
direction="l",
linebreak="al",
- shcode=0x0062,
+ shcode=0x62,
unicodeslot=0x1D6C,
},
[0x1D6D]={
@@ -50758,7 +51807,7 @@ characters.data={
description="LATIN SMALL LETTER D WITH MIDDLE TILDE",
direction="l",
linebreak="al",
- shcode=0x0064,
+ shcode=0x64,
unicodeslot=0x1D6D,
},
[0x1D6E]={
@@ -50766,7 +51815,7 @@ characters.data={
description="LATIN SMALL LETTER F WITH MIDDLE TILDE",
direction="l",
linebreak="al",
- shcode=0x0066,
+ shcode=0x66,
unicodeslot=0x1D6E,
},
[0x1D6F]={
@@ -50774,7 +51823,7 @@ characters.data={
description="LATIN SMALL LETTER M WITH MIDDLE TILDE",
direction="l",
linebreak="al",
- shcode=0x006D,
+ shcode=0x6D,
unicodeslot=0x1D6F,
},
[0x1D70]={
@@ -50782,7 +51831,7 @@ characters.data={
description="LATIN SMALL LETTER N WITH MIDDLE TILDE",
direction="l",
linebreak="al",
- shcode=0x006E,
+ shcode=0x6E,
unicodeslot=0x1D70,
},
[0x1D71]={
@@ -50790,7 +51839,7 @@ characters.data={
description="LATIN SMALL LETTER P WITH MIDDLE TILDE",
direction="l",
linebreak="al",
- shcode=0x0070,
+ shcode=0x70,
unicodeslot=0x1D71,
},
[0x1D72]={
@@ -50798,7 +51847,7 @@ characters.data={
description="LATIN SMALL LETTER R WITH MIDDLE TILDE",
direction="l",
linebreak="al",
- shcode=0x0072,
+ shcode=0x72,
unicodeslot=0x1D72,
},
[0x1D73]={
@@ -50806,7 +51855,7 @@ characters.data={
description="LATIN SMALL LETTER R WITH FISHHOOK AND MIDDLE TILDE",
direction="l",
linebreak="al",
- shcode=0x0072,
+ shcode=0x72,
unicodeslot=0x1D73,
},
[0x1D74]={
@@ -50814,7 +51863,7 @@ characters.data={
description="LATIN SMALL LETTER S WITH MIDDLE TILDE",
direction="l",
linebreak="al",
- shcode=0x0073,
+ shcode=0x73,
unicodeslot=0x1D74,
},
[0x1D75]={
@@ -50822,7 +51871,7 @@ characters.data={
description="LATIN SMALL LETTER T WITH MIDDLE TILDE",
direction="l",
linebreak="al",
- shcode=0x0074,
+ shcode=0x74,
unicodeslot=0x1D75,
},
[0x1D76]={
@@ -50830,7 +51879,7 @@ characters.data={
description="LATIN SMALL LETTER Z WITH MIDDLE TILDE",
direction="l",
linebreak="al",
- shcode=0x007A,
+ shcode=0x7A,
unicodeslot=0x1D76,
},
[0x1D77]={
@@ -50845,7 +51894,7 @@ characters.data={
description="MODIFIER LETTER CYRILLIC EN",
direction="l",
linebreak="al",
- specials={ "super", 0x043D },
+ specials={ "super", 0x43D },
unicodeslot=0x1D78,
},
[0x1D79]={
@@ -50874,7 +51923,7 @@ characters.data={
description="LATIN SMALL LETTER IOTA WITH STROKE",
direction="l",
linebreak="al",
- shcode=0x0269,
+ shcode=0x269,
unicodeslot=0x1D7C,
},
[0x1D7D]={
@@ -50882,7 +51931,7 @@ characters.data={
description="LATIN SMALL LETTER P WITH STROKE",
direction="l",
linebreak="al",
- shcode=0x0070,
+ shcode=0x70,
uccode=0x2C63,
unicodeslot=0x1D7D,
},
@@ -50898,7 +51947,7 @@ characters.data={
description="LATIN SMALL LETTER UPSILON WITH STROKE",
direction="l",
linebreak="al",
- shcode=0x028A,
+ shcode=0x28A,
unicodeslot=0x1D7F,
},
[0x1D80]={
@@ -50906,7 +51955,7 @@ characters.data={
description="LATIN SMALL LETTER B WITH PALATAL HOOK",
direction="l",
linebreak="al",
- shcode=0x0062,
+ shcode=0x62,
unicodeslot=0x1D80,
},
[0x1D81]={
@@ -50914,7 +51963,7 @@ characters.data={
description="LATIN SMALL LETTER D WITH PALATAL HOOK",
direction="l",
linebreak="al",
- shcode=0x0064,
+ shcode=0x64,
unicodeslot=0x1D81,
},
[0x1D82]={
@@ -50922,7 +51971,7 @@ characters.data={
description="LATIN SMALL LETTER F WITH PALATAL HOOK",
direction="l",
linebreak="al",
- shcode=0x0066,
+ shcode=0x66,
unicodeslot=0x1D82,
},
[0x1D83]={
@@ -50930,7 +51979,7 @@ characters.data={
description="LATIN SMALL LETTER G WITH PALATAL HOOK",
direction="l",
linebreak="al",
- shcode=0x0067,
+ shcode=0x67,
unicodeslot=0x1D83,
},
[0x1D84]={
@@ -50938,7 +51987,7 @@ characters.data={
description="LATIN SMALL LETTER K WITH PALATAL HOOK",
direction="l",
linebreak="al",
- shcode=0x006B,
+ shcode=0x6B,
unicodeslot=0x1D84,
},
[0x1D85]={
@@ -50946,7 +51995,7 @@ characters.data={
description="LATIN SMALL LETTER L WITH PALATAL HOOK",
direction="l",
linebreak="al",
- shcode=0x006C,
+ shcode=0x6C,
unicodeslot=0x1D85,
},
[0x1D86]={
@@ -50954,7 +52003,7 @@ characters.data={
description="LATIN SMALL LETTER M WITH PALATAL HOOK",
direction="l",
linebreak="al",
- shcode=0x006D,
+ shcode=0x6D,
unicodeslot=0x1D86,
},
[0x1D87]={
@@ -50962,7 +52011,7 @@ characters.data={
description="LATIN SMALL LETTER N WITH PALATAL HOOK",
direction="l",
linebreak="al",
- shcode=0x006E,
+ shcode=0x6E,
unicodeslot=0x1D87,
},
[0x1D88]={
@@ -50970,7 +52019,7 @@ characters.data={
description="LATIN SMALL LETTER P WITH PALATAL HOOK",
direction="l",
linebreak="al",
- shcode=0x0070,
+ shcode=0x70,
unicodeslot=0x1D88,
},
[0x1D89]={
@@ -50978,7 +52027,7 @@ characters.data={
description="LATIN SMALL LETTER R WITH PALATAL HOOK",
direction="l",
linebreak="al",
- shcode=0x0072,
+ shcode=0x72,
unicodeslot=0x1D89,
},
[0x1D8A]={
@@ -50986,7 +52035,7 @@ characters.data={
description="LATIN SMALL LETTER S WITH PALATAL HOOK",
direction="l",
linebreak="al",
- shcode=0x0073,
+ shcode=0x73,
unicodeslot=0x1D8A,
},
[0x1D8B]={
@@ -50994,7 +52043,7 @@ characters.data={
description="LATIN SMALL LETTER ESH WITH PALATAL HOOK",
direction="l",
linebreak="al",
- shcode=0x0283,
+ shcode=0x283,
unicodeslot=0x1D8B,
},
[0x1D8C]={
@@ -51002,7 +52051,7 @@ characters.data={
description="LATIN SMALL LETTER V WITH PALATAL HOOK",
direction="l",
linebreak="al",
- shcode=0x0076,
+ shcode=0x76,
unicodeslot=0x1D8C,
},
[0x1D8D]={
@@ -51010,7 +52059,7 @@ characters.data={
description="LATIN SMALL LETTER X WITH PALATAL HOOK",
direction="l",
linebreak="al",
- shcode=0x0078,
+ shcode=0x78,
unicodeslot=0x1D8D,
},
[0x1D8E]={
@@ -51018,7 +52067,7 @@ characters.data={
description="LATIN SMALL LETTER Z WITH PALATAL HOOK",
direction="l",
linebreak="al",
- shcode=0x007A,
+ shcode=0x7A,
unicodeslot=0x1D8E,
},
[0x1D8F]={
@@ -51026,7 +52075,7 @@ characters.data={
description="LATIN SMALL LETTER A WITH RETROFLEX HOOK",
direction="l",
linebreak="al",
- shcode=0x0061,
+ shcode=0x61,
unicodeslot=0x1D8F,
},
[0x1D90]={
@@ -51034,7 +52083,7 @@ characters.data={
description="LATIN SMALL LETTER ALPHA WITH RETROFLEX HOOK",
direction="l",
linebreak="al",
- shcode=0x0251,
+ shcode=0x251,
unicodeslot=0x1D90,
},
[0x1D91]={
@@ -51042,7 +52091,7 @@ characters.data={
description="LATIN SMALL LETTER D WITH HOOK AND TAIL",
direction="l",
linebreak="al",
- shcode=0x0064,
+ shcode=0x64,
unicodeslot=0x1D91,
},
[0x1D92]={
@@ -51050,7 +52099,7 @@ characters.data={
description="LATIN SMALL LETTER E WITH RETROFLEX HOOK",
direction="l",
linebreak="al",
- shcode=0x0065,
+ shcode=0x65,
unicodeslot=0x1D92,
},
[0x1D93]={
@@ -51072,7 +52121,7 @@ characters.data={
description="LATIN SMALL LETTER SCHWA WITH RETROFLEX HOOK",
direction="l",
linebreak="al",
- shcode=0x0259,
+ shcode=0x259,
unicodeslot=0x1D95,
},
[0x1D96]={
@@ -51080,7 +52129,7 @@ characters.data={
description="LATIN SMALL LETTER I WITH RETROFLEX HOOK",
direction="l",
linebreak="al",
- shcode=0x0069,
+ shcode=0x69,
unicodeslot=0x1D96,
},
[0x1D97]={
@@ -51095,7 +52144,7 @@ characters.data={
description="LATIN SMALL LETTER ESH WITH RETROFLEX HOOK",
direction="l",
linebreak="al",
- shcode=0x0283,
+ shcode=0x283,
unicodeslot=0x1D98,
},
[0x1D99]={
@@ -51103,7 +52152,7 @@ characters.data={
description="LATIN SMALL LETTER U WITH RETROFLEX HOOK",
direction="l",
linebreak="al",
- shcode=0x0075,
+ shcode=0x75,
unicodeslot=0x1D99,
},
[0x1D9A]={
@@ -51111,7 +52160,7 @@ characters.data={
description="LATIN SMALL LETTER EZH WITH RETROFLEX HOOK",
direction="l",
linebreak="al",
- shcode=0x0292,
+ shcode=0x292,
unicodeslot=0x1D9A,
},
[0x1D9B]={
@@ -51119,7 +52168,7 @@ characters.data={
description="MODIFIER LETTER SMALL TURNED ALPHA",
direction="l",
linebreak="al",
- specials={ "super", 0x0252 },
+ specials={ "super", 0x252 },
unicodeslot=0x1D9B,
},
[0x1D9C]={
@@ -51127,7 +52176,7 @@ characters.data={
description="MODIFIER LETTER SMALL C",
direction="l",
linebreak="al",
- specials={ "super", 0x0063 },
+ specials={ "super", 0x63 },
unicodeslot=0x1D9C,
},
[0x1D9D]={
@@ -51135,7 +52184,7 @@ characters.data={
description="MODIFIER LETTER SMALL C WITH CURL",
direction="l",
linebreak="al",
- specials={ "super", 0x0255 },
+ specials={ "super", 0x255 },
unicodeslot=0x1D9D,
},
[0x1D9E]={
@@ -51143,7 +52192,7 @@ characters.data={
description="MODIFIER LETTER SMALL ETH",
direction="l",
linebreak="al",
- specials={ "super", 0x00F0 },
+ specials={ "super", 0xF0 },
unicodeslot=0x1D9E,
},
[0x1D9F]={
@@ -51151,7 +52200,7 @@ characters.data={
description="MODIFIER LETTER SMALL REVERSED OPEN E",
direction="l",
linebreak="al",
- specials={ "super", 0x025C },
+ specials={ "super", 0x25C },
unicodeslot=0x1D9F,
},
[0x1DA0]={
@@ -51159,7 +52208,7 @@ characters.data={
description="MODIFIER LETTER SMALL F",
direction="l",
linebreak="al",
- specials={ "super", 0x0066 },
+ specials={ "super", 0x66 },
unicodeslot=0x1DA0,
},
[0x1DA1]={
@@ -51167,7 +52216,7 @@ characters.data={
description="MODIFIER LETTER SMALL DOTLESS J WITH STROKE",
direction="l",
linebreak="al",
- specials={ "super", 0x025F },
+ specials={ "super", 0x25F },
unicodeslot=0x1DA1,
},
[0x1DA2]={
@@ -51175,7 +52224,7 @@ characters.data={
description="MODIFIER LETTER SMALL SCRIPT G",
direction="l",
linebreak="al",
- specials={ "super", 0x0261 },
+ specials={ "super", 0x261 },
unicodeslot=0x1DA2,
},
[0x1DA3]={
@@ -51183,7 +52232,7 @@ characters.data={
description="MODIFIER LETTER SMALL TURNED H",
direction="l",
linebreak="al",
- specials={ "super", 0x0265 },
+ specials={ "super", 0x265 },
unicodeslot=0x1DA3,
},
[0x1DA4]={
@@ -51191,7 +52240,7 @@ characters.data={
description="MODIFIER LETTER SMALL I WITH STROKE",
direction="l",
linebreak="al",
- specials={ "super", 0x0268 },
+ specials={ "super", 0x268 },
unicodeslot=0x1DA4,
},
[0x1DA5]={
@@ -51199,7 +52248,7 @@ characters.data={
description="MODIFIER LETTER SMALL IOTA",
direction="l",
linebreak="al",
- specials={ "super", 0x0269 },
+ specials={ "super", 0x269 },
unicodeslot=0x1DA5,
},
[0x1DA6]={
@@ -51207,7 +52256,7 @@ characters.data={
description="MODIFIER LETTER SMALL CAPITAL I",
direction="l",
linebreak="al",
- specials={ "super", 0x026A },
+ specials={ "super", 0x26A },
unicodeslot=0x1DA6,
},
[0x1DA7]={
@@ -51223,7 +52272,7 @@ characters.data={
description="MODIFIER LETTER SMALL J WITH CROSSED-TAIL",
direction="l",
linebreak="al",
- specials={ "super", 0x029D },
+ specials={ "super", 0x29D },
unicodeslot=0x1DA8,
},
[0x1DA9]={
@@ -51231,7 +52280,7 @@ characters.data={
description="MODIFIER LETTER SMALL L WITH RETROFLEX HOOK",
direction="l",
linebreak="al",
- specials={ "super", 0x026D },
+ specials={ "super", 0x26D },
unicodeslot=0x1DA9,
},
[0x1DAA]={
@@ -51247,7 +52296,7 @@ characters.data={
description="MODIFIER LETTER SMALL CAPITAL L",
direction="l",
linebreak="al",
- specials={ "super", 0x029F },
+ specials={ "super", 0x29F },
unicodeslot=0x1DAB,
},
[0x1DAC]={
@@ -51255,7 +52304,7 @@ characters.data={
description="MODIFIER LETTER SMALL M WITH HOOK",
direction="l",
linebreak="al",
- specials={ "super", 0x0271 },
+ specials={ "super", 0x271 },
unicodeslot=0x1DAC,
},
[0x1DAD]={
@@ -51263,7 +52312,7 @@ characters.data={
description="MODIFIER LETTER SMALL TURNED M WITH LONG LEG",
direction="l",
linebreak="al",
- specials={ "super", 0x0270 },
+ specials={ "super", 0x270 },
unicodeslot=0x1DAD,
},
[0x1DAE]={
@@ -51271,7 +52320,7 @@ characters.data={
description="MODIFIER LETTER SMALL N WITH LEFT HOOK",
direction="l",
linebreak="al",
- specials={ "super", 0x0272 },
+ specials={ "super", 0x272 },
unicodeslot=0x1DAE,
},
[0x1DAF]={
@@ -51279,7 +52328,7 @@ characters.data={
description="MODIFIER LETTER SMALL N WITH RETROFLEX HOOK",
direction="l",
linebreak="al",
- specials={ "super", 0x0273 },
+ specials={ "super", 0x273 },
unicodeslot=0x1DAF,
},
[0x1DB0]={
@@ -51287,7 +52336,7 @@ characters.data={
description="MODIFIER LETTER SMALL CAPITAL N",
direction="l",
linebreak="al",
- specials={ "super", 0x0274 },
+ specials={ "super", 0x274 },
unicodeslot=0x1DB0,
},
[0x1DB1]={
@@ -51295,7 +52344,7 @@ characters.data={
description="MODIFIER LETTER SMALL BARRED O",
direction="l",
linebreak="al",
- specials={ "super", 0x0275 },
+ specials={ "super", 0x275 },
unicodeslot=0x1DB1,
},
[0x1DB2]={
@@ -51303,7 +52352,7 @@ characters.data={
description="MODIFIER LETTER SMALL PHI",
direction="l",
linebreak="al",
- specials={ "super", 0x0278 },
+ specials={ "super", 0x278 },
unicodeslot=0x1DB2,
},
[0x1DB3]={
@@ -51311,7 +52360,7 @@ characters.data={
description="MODIFIER LETTER SMALL S WITH HOOK",
direction="l",
linebreak="al",
- specials={ "super", 0x0282 },
+ specials={ "super", 0x282 },
unicodeslot=0x1DB3,
},
[0x1DB4]={
@@ -51319,7 +52368,7 @@ characters.data={
description="MODIFIER LETTER SMALL ESH",
direction="l",
linebreak="al",
- specials={ "super", 0x0283 },
+ specials={ "super", 0x283 },
unicodeslot=0x1DB4,
},
[0x1DB5]={
@@ -51327,7 +52376,7 @@ characters.data={
description="MODIFIER LETTER SMALL T WITH PALATAL HOOK",
direction="l",
linebreak="al",
- specials={ "super", 0x01AB },
+ specials={ "super", 0x1AB },
unicodeslot=0x1DB5,
},
[0x1DB6]={
@@ -51335,7 +52384,7 @@ characters.data={
description="MODIFIER LETTER SMALL U BAR",
direction="l",
linebreak="al",
- specials={ "super", 0x0289 },
+ specials={ "super", 0x289 },
unicodeslot=0x1DB6,
},
[0x1DB7]={
@@ -51343,7 +52392,7 @@ characters.data={
description="MODIFIER LETTER SMALL UPSILON",
direction="l",
linebreak="al",
- specials={ "super", 0x028A },
+ specials={ "super", 0x28A },
unicodeslot=0x1DB7,
},
[0x1DB8]={
@@ -51359,7 +52408,7 @@ characters.data={
description="MODIFIER LETTER SMALL V WITH HOOK",
direction="l",
linebreak="al",
- specials={ "super", 0x028B },
+ specials={ "super", 0x28B },
unicodeslot=0x1DB9,
},
[0x1DBA]={
@@ -51367,7 +52416,7 @@ characters.data={
description="MODIFIER LETTER SMALL TURNED V",
direction="l",
linebreak="al",
- specials={ "super", 0x028C },
+ specials={ "super", 0x28C },
unicodeslot=0x1DBA,
},
[0x1DBB]={
@@ -51375,7 +52424,7 @@ characters.data={
description="MODIFIER LETTER SMALL Z",
direction="l",
linebreak="al",
- specials={ "super", 0x007A },
+ specials={ "super", 0x7A },
unicodeslot=0x1DBB,
},
[0x1DBC]={
@@ -51383,7 +52432,7 @@ characters.data={
description="MODIFIER LETTER SMALL Z WITH RETROFLEX HOOK",
direction="l",
linebreak="al",
- specials={ "super", 0x0290 },
+ specials={ "super", 0x290 },
unicodeslot=0x1DBC,
},
[0x1DBD]={
@@ -51391,7 +52440,7 @@ characters.data={
description="MODIFIER LETTER SMALL Z WITH CURL",
direction="l",
linebreak="al",
- specials={ "super", 0x0291 },
+ specials={ "super", 0x291 },
unicodeslot=0x1DBD,
},
[0x1DBE]={
@@ -51399,7 +52448,7 @@ characters.data={
description="MODIFIER LETTER SMALL EZH",
direction="l",
linebreak="al",
- specials={ "super", 0x0292 },
+ specials={ "super", 0x292 },
unicodeslot=0x1DBE,
},
[0x1DBF]={
@@ -51407,11 +52456,12 @@ characters.data={
description="MODIFIER LETTER SMALL THETA",
direction="l",
linebreak="al",
- specials={ "super", 0x03B8 },
+ specials={ "super", 0x3B8 },
unicodeslot=0x1DBF,
},
[0x1DC0]={
category="mn",
+ combining=0xE6,
description="COMBINING DOTTED GRAVE ACCENT",
direction="nsm",
linebreak="cm",
@@ -51419,6 +52469,7 @@ characters.data={
},
[0x1DC1]={
category="mn",
+ combining=0xE6,
description="COMBINING DOTTED ACUTE ACCENT",
direction="nsm",
linebreak="cm",
@@ -51426,6 +52477,7 @@ characters.data={
},
[0x1DC2]={
category="mn",
+ combining=0xDC,
description="COMBINING SNAKE BELOW",
direction="nsm",
linebreak="cm",
@@ -51433,6 +52485,7 @@ characters.data={
},
[0x1DC3]={
category="mn",
+ combining=0xE6,
description="COMBINING SUSPENSION MARK",
direction="nsm",
linebreak="cm",
@@ -51440,6 +52493,7 @@ characters.data={
},
[0x1DC4]={
category="mn",
+ combining=0xE6,
description="COMBINING MACRON-ACUTE",
direction="nsm",
linebreak="cm",
@@ -51447,6 +52501,7 @@ characters.data={
},
[0x1DC5]={
category="mn",
+ combining=0xE6,
description="COMBINING GRAVE-MACRON",
direction="nsm",
linebreak="cm",
@@ -51454,6 +52509,7 @@ characters.data={
},
[0x1DC6]={
category="mn",
+ combining=0xE6,
description="COMBINING MACRON-GRAVE",
direction="nsm",
linebreak="cm",
@@ -51461,6 +52517,7 @@ characters.data={
},
[0x1DC7]={
category="mn",
+ combining=0xE6,
description="COMBINING ACUTE-MACRON",
direction="nsm",
linebreak="cm",
@@ -51468,6 +52525,7 @@ characters.data={
},
[0x1DC8]={
category="mn",
+ combining=0xE6,
description="COMBINING GRAVE-ACUTE-GRAVE",
direction="nsm",
linebreak="cm",
@@ -51475,6 +52533,7 @@ characters.data={
},
[0x1DC9]={
category="mn",
+ combining=0xE6,
description="COMBINING ACUTE-GRAVE-ACUTE",
direction="nsm",
linebreak="cm",
@@ -51482,6 +52541,7 @@ characters.data={
},
[0x1DCA]={
category="mn",
+ combining=0xDC,
description="COMBINING LATIN SMALL LETTER R BELOW",
direction="nsm",
linebreak="cm",
@@ -51489,6 +52549,7 @@ characters.data={
},
[0x1DCB]={
category="mn",
+ combining=0xE6,
description="COMBINING BREVE-MACRON",
direction="nsm",
linebreak="cm",
@@ -51496,6 +52557,7 @@ characters.data={
},
[0x1DCC]={
category="mn",
+ combining=0xE6,
description="COMBINING MACRON-BREVE",
direction="nsm",
linebreak="cm",
@@ -51503,6 +52565,7 @@ characters.data={
},
[0x1DCD]={
category="mn",
+ combining=0xEA,
description="COMBINING DOUBLE CIRCUMFLEX ABOVE",
direction="nsm",
linebreak="cm",
@@ -51510,6 +52573,7 @@ characters.data={
},
[0x1DCE]={
category="mn",
+ combining=0xD6,
description="COMBINING OGONEK ABOVE",
direction="nsm",
linebreak="cm",
@@ -51517,6 +52581,7 @@ characters.data={
},
[0x1DCF]={
category="mn",
+ combining=0xDC,
description="COMBINING ZIGZAG BELOW",
direction="nsm",
linebreak="cm",
@@ -51524,6 +52589,7 @@ characters.data={
},
[0x1DD0]={
category="mn",
+ combining=0xCA,
description="COMBINING IS BELOW",
direction="nsm",
linebreak="cm",
@@ -51531,6 +52597,7 @@ characters.data={
},
[0x1DD1]={
category="mn",
+ combining=0xE6,
description="COMBINING UR ABOVE",
direction="nsm",
linebreak="cm",
@@ -51538,6 +52605,7 @@ characters.data={
},
[0x1DD2]={
category="mn",
+ combining=0xE6,
description="COMBINING US ABOVE",
direction="nsm",
linebreak="cm",
@@ -51545,6 +52613,7 @@ characters.data={
},
[0x1DD3]={
category="mn",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER FLATTENED OPEN A ABOVE",
direction="nsm",
linebreak="cm",
@@ -51552,6 +52621,7 @@ characters.data={
},
[0x1DD4]={
category="mn",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER AE",
direction="nsm",
linebreak="cm",
@@ -51559,6 +52629,7 @@ characters.data={
},
[0x1DD5]={
category="mn",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER AO",
direction="nsm",
linebreak="cm",
@@ -51566,6 +52637,7 @@ characters.data={
},
[0x1DD6]={
category="mn",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER AV",
direction="nsm",
linebreak="cm",
@@ -51573,6 +52645,7 @@ characters.data={
},
[0x1DD7]={
category="mn",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER C CEDILLA",
direction="nsm",
linebreak="cm",
@@ -51580,6 +52653,7 @@ characters.data={
},
[0x1DD8]={
category="mn",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER INSULAR D",
direction="nsm",
linebreak="cm",
@@ -51587,6 +52661,7 @@ characters.data={
},
[0x1DD9]={
category="mn",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER ETH",
direction="nsm",
linebreak="cm",
@@ -51594,6 +52669,7 @@ characters.data={
},
[0x1DDA]={
category="mn",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER G",
direction="nsm",
linebreak="cm",
@@ -51601,6 +52677,7 @@ characters.data={
},
[0x1DDB]={
category="mn",
+ combining=0xE6,
description="COMBINING LATIN LETTER SMALL CAPITAL G",
direction="nsm",
linebreak="cm",
@@ -51608,6 +52685,7 @@ characters.data={
},
[0x1DDC]={
category="mn",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER K",
direction="nsm",
linebreak="cm",
@@ -51615,6 +52693,7 @@ characters.data={
},
[0x1DDD]={
category="mn",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER L",
direction="nsm",
linebreak="cm",
@@ -51622,6 +52701,7 @@ characters.data={
},
[0x1DDE]={
category="mn",
+ combining=0xE6,
description="COMBINING LATIN LETTER SMALL CAPITAL L",
direction="nsm",
linebreak="cm",
@@ -51629,6 +52709,7 @@ characters.data={
},
[0x1DDF]={
category="mn",
+ combining=0xE6,
description="COMBINING LATIN LETTER SMALL CAPITAL M",
direction="nsm",
linebreak="cm",
@@ -51636,6 +52717,7 @@ characters.data={
},
[0x1DE0]={
category="mn",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER N",
direction="nsm",
linebreak="cm",
@@ -51643,6 +52725,7 @@ characters.data={
},
[0x1DE1]={
category="mn",
+ combining=0xE6,
description="COMBINING LATIN LETTER SMALL CAPITAL N",
direction="nsm",
linebreak="cm",
@@ -51650,6 +52733,7 @@ characters.data={
},
[0x1DE2]={
category="mn",
+ combining=0xE6,
description="COMBINING LATIN LETTER SMALL CAPITAL R",
direction="nsm",
linebreak="cm",
@@ -51657,6 +52741,7 @@ characters.data={
},
[0x1DE3]={
category="mn",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER R ROTUNDA",
direction="nsm",
linebreak="cm",
@@ -51664,6 +52749,7 @@ characters.data={
},
[0x1DE4]={
category="mn",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER S",
direction="nsm",
linebreak="cm",
@@ -51671,6 +52757,7 @@ characters.data={
},
[0x1DE5]={
category="mn",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER LONG S",
direction="nsm",
linebreak="cm",
@@ -51678,13 +52765,135 @@ characters.data={
},
[0x1DE6]={
category="mn",
+ combining=0xE6,
description="COMBINING LATIN SMALL LETTER Z",
direction="nsm",
linebreak="cm",
unicodeslot=0x1DE6,
},
+ [0x1DE7]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING LATIN SMALL LETTER ALPHA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1DE7,
+ },
+ [0x1DE8]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING LATIN SMALL LETTER B",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1DE8,
+ },
+ [0x1DE9]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING LATIN SMALL LETTER BETA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1DE9,
+ },
+ [0x1DEA]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING LATIN SMALL LETTER SCHWA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1DEA,
+ },
+ [0x1DEB]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING LATIN SMALL LETTER F",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1DEB,
+ },
+ [0x1DEC]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING LATIN SMALL LETTER L WITH DOUBLE MIDDLE TILDE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1DEC,
+ },
+ [0x1DED]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING LATIN SMALL LETTER O WITH LIGHT CENTRALIZATION STROKE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1DED,
+ },
+ [0x1DEE]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING LATIN SMALL LETTER P",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1DEE,
+ },
+ [0x1DEF]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING LATIN SMALL LETTER ESH",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1DEF,
+ },
+ [0x1DF0]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING LATIN SMALL LETTER U WITH LIGHT CENTRALIZATION STROKE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1DF0,
+ },
+ [0x1DF1]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING LATIN SMALL LETTER W",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1DF1,
+ },
+ [0x1DF2]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING LATIN SMALL LETTER A WITH DIAERESIS",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1DF2,
+ },
+ [0x1DF3]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING LATIN SMALL LETTER O WITH DIAERESIS",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1DF3,
+ },
+ [0x1DF4]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING LATIN SMALL LETTER U WITH DIAERESIS",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1DF4,
+ },
+ [0x1DF5]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING UP TACK ABOVE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1DF5,
+ },
[0x1DFC]={
category="mn",
+ combining=0xE9,
description="COMBINING DOUBLE INVERTED BREVE BELOW",
direction="nsm",
linebreak="cm",
@@ -51692,6 +52901,7 @@ characters.data={
},
[0x1DFD]={
category="mn",
+ combining=0xDC,
description="COMBINING ALMOST EQUAL TO BELOW",
direction="nsm",
linebreak="cm",
@@ -51699,6 +52909,7 @@ characters.data={
},
[0x1DFE]={
category="mn",
+ combining=0xE6,
description="COMBINING LEFT ARROWHEAD ABOVE",
direction="nsm",
linebreak="cm",
@@ -51706,6 +52917,7 @@ characters.data={
},
[0x1DFF]={
category="mn",
+ combining=0xDC,
description="COMBINING RIGHT ARROWHEAD AND DOWN ARROWHEAD BELOW",
direction="nsm",
linebreak="cm",
@@ -51718,8 +52930,8 @@ characters.data={
direction="l",
lccode=0x1E01,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x0041, 0x0325 },
+ shcode=0x41,
+ specials={ "char", 0x41, 0x325 },
unicodeslot=0x1E00,
},
[0x1E01]={
@@ -51728,8 +52940,8 @@ characters.data={
description="LATIN SMALL LETTER A WITH RING BELOW",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x0061, 0x0325 },
+ shcode=0x61,
+ specials={ "char", 0x61, 0x325 },
uccode=0x1E00,
unicodeslot=0x1E01,
},
@@ -51740,8 +52952,8 @@ characters.data={
direction="l",
lccode=0x1E03,
linebreak="al",
- shcode=0x0042,
- specials={ "char", 0x0042, 0x0307 },
+ shcode=0x42,
+ specials={ "char", 0x42, 0x307 },
unicodeslot=0x1E02,
},
[0x1E03]={
@@ -51750,8 +52962,8 @@ characters.data={
description="LATIN SMALL LETTER B WITH DOT ABOVE",
direction="l",
linebreak="al",
- shcode=0x0062,
- specials={ "char", 0x0062, 0x0307 },
+ shcode=0x62,
+ specials={ "char", 0x62, 0x307 },
uccode=0x1E02,
unicodeslot=0x1E03,
},
@@ -51762,8 +52974,8 @@ characters.data={
direction="l",
lccode=0x1E05,
linebreak="al",
- shcode=0x0042,
- specials={ "char", 0x0042, 0x0323 },
+ shcode=0x42,
+ specials={ "char", 0x42, 0x323 },
unicodeslot=0x1E04,
},
[0x1E05]={
@@ -51772,8 +52984,8 @@ characters.data={
description="LATIN SMALL LETTER B WITH DOT BELOW",
direction="l",
linebreak="al",
- shcode=0x0062,
- specials={ "char", 0x0062, 0x0323 },
+ shcode=0x62,
+ specials={ "char", 0x62, 0x323 },
uccode=0x1E04,
unicodeslot=0x1E05,
},
@@ -51784,8 +52996,8 @@ characters.data={
direction="l",
lccode=0x1E07,
linebreak="al",
- shcode=0x0042,
- specials={ "char", 0x0042, 0x0331 },
+ shcode=0x42,
+ specials={ "char", 0x42, 0x331 },
unicodeslot=0x1E06,
},
[0x1E07]={
@@ -51794,8 +53006,8 @@ characters.data={
description="LATIN SMALL LETTER B WITH LINE BELOW",
direction="l",
linebreak="al",
- shcode=0x0062,
- specials={ "char", 0x0062, 0x0331 },
+ shcode=0x62,
+ specials={ "char", 0x62, 0x331 },
uccode=0x1E06,
unicodeslot=0x1E07,
},
@@ -51806,8 +53018,8 @@ characters.data={
direction="l",
lccode=0x1E09,
linebreak="al",
- shcode=0x0043,
- specials={ "char", 0x00C7, 0x0301 },
+ shcode=0x43,
+ specials={ "char", 0xC7, 0x301 },
unicodeslot=0x1E08,
},
[0x1E09]={
@@ -51816,8 +53028,8 @@ characters.data={
description="LATIN SMALL LETTER C WITH CEDILLA AND ACUTE",
direction="l",
linebreak="al",
- shcode=0x0063,
- specials={ "char", 0x00E7, 0x0301 },
+ shcode=0x63,
+ specials={ "char", 0xE7, 0x301 },
uccode=0x1E08,
unicodeslot=0x1E09,
},
@@ -51828,8 +53040,8 @@ characters.data={
direction="l",
lccode=0x1E0B,
linebreak="al",
- shcode=0x0044,
- specials={ "char", 0x0044, 0x0307 },
+ shcode=0x44,
+ specials={ "char", 0x44, 0x307 },
unicodeslot=0x1E0A,
},
[0x1E0B]={
@@ -51838,8 +53050,8 @@ characters.data={
description="LATIN SMALL LETTER D WITH DOT ABOVE",
direction="l",
linebreak="al",
- shcode=0x0064,
- specials={ "char", 0x0064, 0x0307 },
+ shcode=0x64,
+ specials={ "char", 0x64, 0x307 },
uccode=0x1E0A,
unicodeslot=0x1E0B,
},
@@ -51850,8 +53062,8 @@ characters.data={
direction="l",
lccode=0x1E0D,
linebreak="al",
- shcode=0x0044,
- specials={ "char", 0x0044, 0x0323 },
+ shcode=0x44,
+ specials={ "char", 0x44, 0x323 },
unicodeslot=0x1E0C,
},
[0x1E0D]={
@@ -51860,8 +53072,8 @@ characters.data={
description="LATIN SMALL LETTER D WITH DOT BELOW",
direction="l",
linebreak="al",
- shcode=0x0064,
- specials={ "char", 0x0064, 0x0323 },
+ shcode=0x64,
+ specials={ "char", 0x64, 0x323 },
uccode=0x1E0C,
unicodeslot=0x1E0D,
},
@@ -51872,8 +53084,8 @@ characters.data={
direction="l",
lccode=0x1E0F,
linebreak="al",
- shcode=0x0044,
- specials={ "char", 0x0044, 0x0331 },
+ shcode=0x44,
+ specials={ "char", 0x44, 0x331 },
unicodeslot=0x1E0E,
},
[0x1E0F]={
@@ -51882,8 +53094,8 @@ characters.data={
description="LATIN SMALL LETTER D WITH LINE BELOW",
direction="l",
linebreak="al",
- shcode=0x0064,
- specials={ "char", 0x0064, 0x0331 },
+ shcode=0x64,
+ specials={ "char", 0x64, 0x331 },
uccode=0x1E0E,
unicodeslot=0x1E0F,
},
@@ -51894,8 +53106,8 @@ characters.data={
direction="l",
lccode=0x1E11,
linebreak="al",
- shcode=0x0044,
- specials={ "char", 0x0044, 0x0327 },
+ shcode=0x44,
+ specials={ "char", 0x44, 0x327 },
unicodeslot=0x1E10,
},
[0x1E11]={
@@ -51904,8 +53116,8 @@ characters.data={
description="LATIN SMALL LETTER D WITH CEDILLA",
direction="l",
linebreak="al",
- shcode=0x0064,
- specials={ "char", 0x0064, 0x0327 },
+ shcode=0x64,
+ specials={ "char", 0x64, 0x327 },
uccode=0x1E10,
unicodeslot=0x1E11,
},
@@ -51916,8 +53128,8 @@ characters.data={
direction="l",
lccode=0x1E13,
linebreak="al",
- shcode=0x0044,
- specials={ "char", 0x0044, 0x032D },
+ shcode=0x44,
+ specials={ "char", 0x44, 0x32D },
unicodeslot=0x1E12,
},
[0x1E13]={
@@ -51926,8 +53138,8 @@ characters.data={
description="LATIN SMALL LETTER D WITH CIRCUMFLEX BELOW",
direction="l",
linebreak="al",
- shcode=0x0064,
- specials={ "char", 0x0064, 0x032D },
+ shcode=0x64,
+ specials={ "char", 0x64, 0x32D },
uccode=0x1E12,
unicodeslot=0x1E13,
},
@@ -51938,8 +53150,8 @@ characters.data={
direction="l",
lccode=0x1E15,
linebreak="al",
- shcode=0x0045,
- specials={ "char", 0x0112, 0x0300 },
+ shcode=0x45,
+ specials={ "char", 0x112, 0x300 },
unicodeslot=0x1E14,
},
[0x1E15]={
@@ -51948,8 +53160,8 @@ characters.data={
description="LATIN SMALL LETTER E WITH MACRON AND GRAVE",
direction="l",
linebreak="al",
- shcode=0x0065,
- specials={ "char", 0x0113, 0x0300 },
+ shcode=0x65,
+ specials={ "char", 0x113, 0x300 },
uccode=0x1E14,
unicodeslot=0x1E15,
},
@@ -51960,8 +53172,8 @@ characters.data={
direction="l",
lccode=0x1E17,
linebreak="al",
- shcode=0x0045,
- specials={ "char", 0x0112, 0x0301 },
+ shcode=0x45,
+ specials={ "char", 0x112, 0x301 },
unicodeslot=0x1E16,
},
[0x1E17]={
@@ -51970,8 +53182,8 @@ characters.data={
description="LATIN SMALL LETTER E WITH MACRON AND ACUTE",
direction="l",
linebreak="al",
- shcode=0x0065,
- specials={ "char", 0x0113, 0x0301 },
+ shcode=0x65,
+ specials={ "char", 0x113, 0x301 },
uccode=0x1E16,
unicodeslot=0x1E17,
},
@@ -51982,8 +53194,8 @@ characters.data={
direction="l",
lccode=0x1E19,
linebreak="al",
- shcode=0x0045,
- specials={ "char", 0x0045, 0x032D },
+ shcode=0x45,
+ specials={ "char", 0x45, 0x32D },
unicodeslot=0x1E18,
},
[0x1E19]={
@@ -51992,8 +53204,8 @@ characters.data={
description="LATIN SMALL LETTER E WITH CIRCUMFLEX BELOW",
direction="l",
linebreak="al",
- shcode=0x0065,
- specials={ "char", 0x0065, 0x032D },
+ shcode=0x65,
+ specials={ "char", 0x65, 0x32D },
uccode=0x1E18,
unicodeslot=0x1E19,
},
@@ -52004,8 +53216,8 @@ characters.data={
direction="l",
lccode=0x1E1B,
linebreak="al",
- shcode=0x0045,
- specials={ "char", 0x0045, 0x0330 },
+ shcode=0x45,
+ specials={ "char", 0x45, 0x330 },
unicodeslot=0x1E1A,
},
[0x1E1B]={
@@ -52014,8 +53226,8 @@ characters.data={
description="LATIN SMALL LETTER E WITH TILDE BELOW",
direction="l",
linebreak="al",
- shcode=0x0065,
- specials={ "char", 0x0065, 0x0330 },
+ shcode=0x65,
+ specials={ "char", 0x65, 0x330 },
uccode=0x1E1A,
unicodeslot=0x1E1B,
},
@@ -52026,8 +53238,8 @@ characters.data={
direction="l",
lccode=0x1E1D,
linebreak="al",
- shcode=0x0045,
- specials={ "char", 0x0228, 0x0306 },
+ shcode=0x45,
+ specials={ "char", 0x228, 0x306 },
unicodeslot=0x1E1C,
},
[0x1E1D]={
@@ -52036,8 +53248,8 @@ characters.data={
description="LATIN SMALL LETTER E WITH CEDILLA AND BREVE",
direction="l",
linebreak="al",
- shcode=0x0065,
- specials={ "char", 0x0229, 0x0306 },
+ shcode=0x65,
+ specials={ "char", 0x229, 0x306 },
uccode=0x1E1C,
unicodeslot=0x1E1D,
},
@@ -52048,8 +53260,8 @@ characters.data={
direction="l",
lccode=0x1E1F,
linebreak="al",
- shcode=0x0046,
- specials={ "char", 0x0046, 0x0307 },
+ shcode=0x46,
+ specials={ "char", 0x46, 0x307 },
unicodeslot=0x1E1E,
},
[0x1E1F]={
@@ -52058,8 +53270,8 @@ characters.data={
description="LATIN SMALL LETTER F WITH DOT ABOVE",
direction="l",
linebreak="al",
- shcode=0x0066,
- specials={ "char", 0x0066, 0x0307 },
+ shcode=0x66,
+ specials={ "char", 0x66, 0x307 },
uccode=0x1E1E,
unicodeslot=0x1E1F,
},
@@ -52070,8 +53282,8 @@ characters.data={
direction="l",
lccode=0x1E21,
linebreak="al",
- shcode=0x0047,
- specials={ "char", 0x0047, 0x0304 },
+ shcode=0x47,
+ specials={ "char", 0x47, 0x304 },
unicodeslot=0x1E20,
},
[0x1E21]={
@@ -52080,8 +53292,8 @@ characters.data={
description="LATIN SMALL LETTER G WITH MACRON",
direction="l",
linebreak="al",
- shcode=0x0067,
- specials={ "char", 0x0067, 0x0304 },
+ shcode=0x67,
+ specials={ "char", 0x67, 0x304 },
uccode=0x1E20,
unicodeslot=0x1E21,
},
@@ -52092,8 +53304,8 @@ characters.data={
direction="l",
lccode=0x1E23,
linebreak="al",
- shcode=0x0048,
- specials={ "char", 0x0048, 0x0307 },
+ shcode=0x48,
+ specials={ "char", 0x48, 0x307 },
unicodeslot=0x1E22,
},
[0x1E23]={
@@ -52102,8 +53314,8 @@ characters.data={
description="LATIN SMALL LETTER H WITH DOT ABOVE",
direction="l",
linebreak="al",
- shcode=0x0068,
- specials={ "char", 0x0068, 0x0307 },
+ shcode=0x68,
+ specials={ "char", 0x68, 0x307 },
uccode=0x1E22,
unicodeslot=0x1E23,
},
@@ -52114,8 +53326,8 @@ characters.data={
direction="l",
lccode=0x1E25,
linebreak="al",
- shcode=0x0048,
- specials={ "char", 0x0048, 0x0323 },
+ shcode=0x48,
+ specials={ "char", 0x48, 0x323 },
unicodeslot=0x1E24,
},
[0x1E25]={
@@ -52124,8 +53336,8 @@ characters.data={
description="LATIN SMALL LETTER H WITH DOT BELOW",
direction="l",
linebreak="al",
- shcode=0x0068,
- specials={ "char", 0x0068, 0x0323 },
+ shcode=0x68,
+ specials={ "char", 0x68, 0x323 },
uccode=0x1E24,
unicodeslot=0x1E25,
},
@@ -52136,8 +53348,8 @@ characters.data={
direction="l",
lccode=0x1E27,
linebreak="al",
- shcode=0x0048,
- specials={ "char", 0x0048, 0x0308 },
+ shcode=0x48,
+ specials={ "char", 0x48, 0x308 },
unicodeslot=0x1E26,
},
[0x1E27]={
@@ -52146,8 +53358,8 @@ characters.data={
description="LATIN SMALL LETTER H WITH DIAERESIS",
direction="l",
linebreak="al",
- shcode=0x0068,
- specials={ "char", 0x0068, 0x0308 },
+ shcode=0x68,
+ specials={ "char", 0x68, 0x308 },
uccode=0x1E26,
unicodeslot=0x1E27,
},
@@ -52158,8 +53370,8 @@ characters.data={
direction="l",
lccode=0x1E29,
linebreak="al",
- shcode=0x0048,
- specials={ "char", 0x0048, 0x0327 },
+ shcode=0x48,
+ specials={ "char", 0x48, 0x327 },
unicodeslot=0x1E28,
},
[0x1E29]={
@@ -52168,8 +53380,8 @@ characters.data={
description="LATIN SMALL LETTER H WITH CEDILLA",
direction="l",
linebreak="al",
- shcode=0x0068,
- specials={ "char", 0x0068, 0x0327 },
+ shcode=0x68,
+ specials={ "char", 0x68, 0x327 },
uccode=0x1E28,
unicodeslot=0x1E29,
},
@@ -52180,8 +53392,8 @@ characters.data={
direction="l",
lccode=0x1E2B,
linebreak="al",
- shcode=0x0048,
- specials={ "char", 0x0048, 0x032E },
+ shcode=0x48,
+ specials={ "char", 0x48, 0x32E },
unicodeslot=0x1E2A,
},
[0x1E2B]={
@@ -52190,8 +53402,8 @@ characters.data={
description="LATIN SMALL LETTER H WITH BREVE BELOW",
direction="l",
linebreak="al",
- shcode=0x0068,
- specials={ "char", 0x0068, 0x032E },
+ shcode=0x68,
+ specials={ "char", 0x68, 0x32E },
uccode=0x1E2A,
unicodeslot=0x1E2B,
},
@@ -52202,8 +53414,8 @@ characters.data={
direction="l",
lccode=0x1E2D,
linebreak="al",
- shcode=0x0049,
- specials={ "char", 0x0049, 0x0330 },
+ shcode=0x49,
+ specials={ "char", 0x49, 0x330 },
unicodeslot=0x1E2C,
},
[0x1E2D]={
@@ -52212,8 +53424,8 @@ characters.data={
description="LATIN SMALL LETTER I WITH TILDE BELOW",
direction="l",
linebreak="al",
- shcode=0x0069,
- specials={ "char", 0x0069, 0x0330 },
+ shcode=0x69,
+ specials={ "char", 0x69, 0x330 },
uccode=0x1E2C,
unicodeslot=0x1E2D,
},
@@ -52224,8 +53436,8 @@ characters.data={
direction="l",
lccode=0x1E2F,
linebreak="al",
- shcode=0x0049,
- specials={ "char", 0x00CF, 0x0301 },
+ shcode=0x49,
+ specials={ "char", 0xCF, 0x301 },
unicodeslot=0x1E2E,
},
[0x1E2F]={
@@ -52234,8 +53446,8 @@ characters.data={
description="LATIN SMALL LETTER I WITH DIAERESIS AND ACUTE",
direction="l",
linebreak="al",
- shcode=0x0069,
- specials={ "char", 0x00EF, 0x0301 },
+ shcode=0x69,
+ specials={ "char", 0xEF, 0x301 },
uccode=0x1E2E,
unicodeslot=0x1E2F,
},
@@ -52246,8 +53458,8 @@ characters.data={
direction="l",
lccode=0x1E31,
linebreak="al",
- shcode=0x004B,
- specials={ "char", 0x004B, 0x0301 },
+ shcode=0x4B,
+ specials={ "char", 0x4B, 0x301 },
unicodeslot=0x1E30,
},
[0x1E31]={
@@ -52256,8 +53468,8 @@ characters.data={
description="LATIN SMALL LETTER K WITH ACUTE",
direction="l",
linebreak="al",
- shcode=0x006B,
- specials={ "char", 0x006B, 0x0301 },
+ shcode=0x6B,
+ specials={ "char", 0x6B, 0x301 },
uccode=0x1E30,
unicodeslot=0x1E31,
},
@@ -52268,8 +53480,8 @@ characters.data={
direction="l",
lccode=0x1E33,
linebreak="al",
- shcode=0x004B,
- specials={ "char", 0x004B, 0x0323 },
+ shcode=0x4B,
+ specials={ "char", 0x4B, 0x323 },
unicodeslot=0x1E32,
},
[0x1E33]={
@@ -52278,8 +53490,8 @@ characters.data={
description="LATIN SMALL LETTER K WITH DOT BELOW",
direction="l",
linebreak="al",
- shcode=0x006B,
- specials={ "char", 0x006B, 0x0323 },
+ shcode=0x6B,
+ specials={ "char", 0x6B, 0x323 },
uccode=0x1E32,
unicodeslot=0x1E33,
},
@@ -52290,8 +53502,8 @@ characters.data={
direction="l",
lccode=0x1E35,
linebreak="al",
- shcode=0x004B,
- specials={ "char", 0x004B, 0x0331 },
+ shcode=0x4B,
+ specials={ "char", 0x4B, 0x331 },
unicodeslot=0x1E34,
},
[0x1E35]={
@@ -52300,8 +53512,8 @@ characters.data={
description="LATIN SMALL LETTER K WITH LINE BELOW",
direction="l",
linebreak="al",
- shcode=0x006B,
- specials={ "char", 0x006B, 0x0331 },
+ shcode=0x6B,
+ specials={ "char", 0x6B, 0x331 },
uccode=0x1E34,
unicodeslot=0x1E35,
},
@@ -52312,8 +53524,8 @@ characters.data={
direction="l",
lccode=0x1E37,
linebreak="al",
- shcode=0x004C,
- specials={ "char", 0x004C, 0x0323 },
+ shcode=0x4C,
+ specials={ "char", 0x4C, 0x323 },
unicodeslot=0x1E36,
},
[0x1E37]={
@@ -52322,8 +53534,8 @@ characters.data={
description="LATIN SMALL LETTER L WITH DOT BELOW",
direction="l",
linebreak="al",
- shcode=0x006C,
- specials={ "char", 0x006C, 0x0323 },
+ shcode=0x6C,
+ specials={ "char", 0x6C, 0x323 },
uccode=0x1E36,
unicodeslot=0x1E37,
},
@@ -52334,8 +53546,8 @@ characters.data={
direction="l",
lccode=0x1E39,
linebreak="al",
- shcode=0x004C,
- specials={ "char", 0x1E36, 0x0304 },
+ shcode=0x4C,
+ specials={ "char", 0x1E36, 0x304 },
unicodeslot=0x1E38,
},
[0x1E39]={
@@ -52344,8 +53556,8 @@ characters.data={
description="LATIN SMALL LETTER L WITH DOT BELOW AND MACRON",
direction="l",
linebreak="al",
- shcode=0x006C,
- specials={ "char", 0x1E37, 0x0304 },
+ shcode=0x6C,
+ specials={ "char", 0x1E37, 0x304 },
uccode=0x1E38,
unicodeslot=0x1E39,
},
@@ -52356,8 +53568,8 @@ characters.data={
direction="l",
lccode=0x1E3B,
linebreak="al",
- shcode=0x004C,
- specials={ "char", 0x004C, 0x0331 },
+ shcode=0x4C,
+ specials={ "char", 0x4C, 0x331 },
unicodeslot=0x1E3A,
},
[0x1E3B]={
@@ -52366,8 +53578,8 @@ characters.data={
description="LATIN SMALL LETTER L WITH LINE BELOW",
direction="l",
linebreak="al",
- shcode=0x006C,
- specials={ "char", 0x006C, 0x0331 },
+ shcode=0x6C,
+ specials={ "char", 0x6C, 0x331 },
uccode=0x1E3A,
unicodeslot=0x1E3B,
},
@@ -52378,8 +53590,8 @@ characters.data={
direction="l",
lccode=0x1E3D,
linebreak="al",
- shcode=0x004C,
- specials={ "char", 0x004C, 0x032D },
+ shcode=0x4C,
+ specials={ "char", 0x4C, 0x32D },
unicodeslot=0x1E3C,
},
[0x1E3D]={
@@ -52388,8 +53600,8 @@ characters.data={
description="LATIN SMALL LETTER L WITH CIRCUMFLEX BELOW",
direction="l",
linebreak="al",
- shcode=0x006C,
- specials={ "char", 0x006C, 0x032D },
+ shcode=0x6C,
+ specials={ "char", 0x6C, 0x32D },
uccode=0x1E3C,
unicodeslot=0x1E3D,
},
@@ -52400,8 +53612,8 @@ characters.data={
direction="l",
lccode=0x1E3F,
linebreak="al",
- shcode=0x004D,
- specials={ "char", 0x004D, 0x0301 },
+ shcode=0x4D,
+ specials={ "char", 0x4D, 0x301 },
unicodeslot=0x1E3E,
},
[0x1E3F]={
@@ -52410,8 +53622,8 @@ characters.data={
description="LATIN SMALL LETTER M WITH ACUTE",
direction="l",
linebreak="al",
- shcode=0x006D,
- specials={ "char", 0x006D, 0x0301 },
+ shcode=0x6D,
+ specials={ "char", 0x6D, 0x301 },
uccode=0x1E3E,
unicodeslot=0x1E3F,
},
@@ -52422,8 +53634,8 @@ characters.data={
direction="l",
lccode=0x1E41,
linebreak="al",
- shcode=0x004D,
- specials={ "char", 0x004D, 0x0307 },
+ shcode=0x4D,
+ specials={ "char", 0x4D, 0x307 },
unicodeslot=0x1E40,
},
[0x1E41]={
@@ -52432,8 +53644,8 @@ characters.data={
description="LATIN SMALL LETTER M WITH DOT ABOVE",
direction="l",
linebreak="al",
- shcode=0x006D,
- specials={ "char", 0x006D, 0x0307 },
+ shcode=0x6D,
+ specials={ "char", 0x6D, 0x307 },
uccode=0x1E40,
unicodeslot=0x1E41,
},
@@ -52444,8 +53656,8 @@ characters.data={
direction="l",
lccode=0x1E43,
linebreak="al",
- shcode=0x004D,
- specials={ "char", 0x004D, 0x0323 },
+ shcode=0x4D,
+ specials={ "char", 0x4D, 0x323 },
unicodeslot=0x1E42,
},
[0x1E43]={
@@ -52454,8 +53666,8 @@ characters.data={
description="LATIN SMALL LETTER M WITH DOT BELOW",
direction="l",
linebreak="al",
- shcode=0x006D,
- specials={ "char", 0x006D, 0x0323 },
+ shcode=0x6D,
+ specials={ "char", 0x6D, 0x323 },
uccode=0x1E42,
unicodeslot=0x1E43,
},
@@ -52466,8 +53678,8 @@ characters.data={
direction="l",
lccode=0x1E45,
linebreak="al",
- shcode=0x004E,
- specials={ "char", 0x004E, 0x0307 },
+ shcode=0x4E,
+ specials={ "char", 0x4E, 0x307 },
unicodeslot=0x1E44,
},
[0x1E45]={
@@ -52476,8 +53688,8 @@ characters.data={
description="LATIN SMALL LETTER N WITH DOT ABOVE",
direction="l",
linebreak="al",
- shcode=0x006E,
- specials={ "char", 0x006E, 0x0307 },
+ shcode=0x6E,
+ specials={ "char", 0x6E, 0x307 },
uccode=0x1E44,
unicodeslot=0x1E45,
},
@@ -52488,8 +53700,8 @@ characters.data={
direction="l",
lccode=0x1E47,
linebreak="al",
- shcode=0x004E,
- specials={ "char", 0x004E, 0x0323 },
+ shcode=0x4E,
+ specials={ "char", 0x4E, 0x323 },
unicodeslot=0x1E46,
},
[0x1E47]={
@@ -52498,8 +53710,8 @@ characters.data={
description="LATIN SMALL LETTER N WITH DOT BELOW",
direction="l",
linebreak="al",
- shcode=0x006E,
- specials={ "char", 0x006E, 0x0323 },
+ shcode=0x6E,
+ specials={ "char", 0x6E, 0x323 },
uccode=0x1E46,
unicodeslot=0x1E47,
},
@@ -52510,8 +53722,8 @@ characters.data={
direction="l",
lccode=0x1E49,
linebreak="al",
- shcode=0x004E,
- specials={ "char", 0x004E, 0x0331 },
+ shcode=0x4E,
+ specials={ "char", 0x4E, 0x331 },
unicodeslot=0x1E48,
},
[0x1E49]={
@@ -52520,8 +53732,8 @@ characters.data={
description="LATIN SMALL LETTER N WITH LINE BELOW",
direction="l",
linebreak="al",
- shcode=0x006E,
- specials={ "char", 0x006E, 0x0331 },
+ shcode=0x6E,
+ specials={ "char", 0x6E, 0x331 },
uccode=0x1E48,
unicodeslot=0x1E49,
},
@@ -52532,8 +53744,8 @@ characters.data={
direction="l",
lccode=0x1E4B,
linebreak="al",
- shcode=0x004E,
- specials={ "char", 0x004E, 0x032D },
+ shcode=0x4E,
+ specials={ "char", 0x4E, 0x32D },
unicodeslot=0x1E4A,
},
[0x1E4B]={
@@ -52542,8 +53754,8 @@ characters.data={
description="LATIN SMALL LETTER N WITH CIRCUMFLEX BELOW",
direction="l",
linebreak="al",
- shcode=0x006E,
- specials={ "char", 0x006E, 0x032D },
+ shcode=0x6E,
+ specials={ "char", 0x6E, 0x32D },
uccode=0x1E4A,
unicodeslot=0x1E4B,
},
@@ -52554,8 +53766,8 @@ characters.data={
direction="l",
lccode=0x1E4D,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x00D5, 0x0301 },
+ shcode=0x4F,
+ specials={ "char", 0xD5, 0x301 },
unicodeslot=0x1E4C,
},
[0x1E4D]={
@@ -52564,8 +53776,8 @@ characters.data={
description="LATIN SMALL LETTER O WITH TILDE AND ACUTE",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x00F5, 0x0301 },
+ shcode=0x6F,
+ specials={ "char", 0xF5, 0x301 },
uccode=0x1E4C,
unicodeslot=0x1E4D,
},
@@ -52576,8 +53788,8 @@ characters.data={
direction="l",
lccode=0x1E4F,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x00D5, 0x0308 },
+ shcode=0x4F,
+ specials={ "char", 0xD5, 0x308 },
unicodeslot=0x1E4E,
},
[0x1E4F]={
@@ -52586,8 +53798,8 @@ characters.data={
description="LATIN SMALL LETTER O WITH TILDE AND DIAERESIS",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x00F5, 0x0308 },
+ shcode=0x6F,
+ specials={ "char", 0xF5, 0x308 },
uccode=0x1E4E,
unicodeslot=0x1E4F,
},
@@ -52598,8 +53810,8 @@ characters.data={
direction="l",
lccode=0x1E51,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x014C, 0x0300 },
+ shcode=0x4F,
+ specials={ "char", 0x14C, 0x300 },
unicodeslot=0x1E50,
},
[0x1E51]={
@@ -52608,8 +53820,8 @@ characters.data={
description="LATIN SMALL LETTER O WITH MACRON AND GRAVE",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x014D, 0x0300 },
+ shcode=0x6F,
+ specials={ "char", 0x14D, 0x300 },
uccode=0x1E50,
unicodeslot=0x1E51,
},
@@ -52620,8 +53832,8 @@ characters.data={
direction="l",
lccode=0x1E53,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x014C, 0x0301 },
+ shcode=0x4F,
+ specials={ "char", 0x14C, 0x301 },
unicodeslot=0x1E52,
},
[0x1E53]={
@@ -52630,8 +53842,8 @@ characters.data={
description="LATIN SMALL LETTER O WITH MACRON AND ACUTE",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x014D, 0x0301 },
+ shcode=0x6F,
+ specials={ "char", 0x14D, 0x301 },
uccode=0x1E52,
unicodeslot=0x1E53,
},
@@ -52642,8 +53854,8 @@ characters.data={
direction="l",
lccode=0x1E55,
linebreak="al",
- shcode=0x0050,
- specials={ "char", 0x0050, 0x0301 },
+ shcode=0x50,
+ specials={ "char", 0x50, 0x301 },
unicodeslot=0x1E54,
},
[0x1E55]={
@@ -52652,8 +53864,8 @@ characters.data={
description="LATIN SMALL LETTER P WITH ACUTE",
direction="l",
linebreak="al",
- shcode=0x0070,
- specials={ "char", 0x0070, 0x0301 },
+ shcode=0x70,
+ specials={ "char", 0x70, 0x301 },
uccode=0x1E54,
unicodeslot=0x1E55,
},
@@ -52664,8 +53876,8 @@ characters.data={
direction="l",
lccode=0x1E57,
linebreak="al",
- shcode=0x0050,
- specials={ "char", 0x0050, 0x0307 },
+ shcode=0x50,
+ specials={ "char", 0x50, 0x307 },
unicodeslot=0x1E56,
},
[0x1E57]={
@@ -52674,8 +53886,8 @@ characters.data={
description="LATIN SMALL LETTER P WITH DOT ABOVE",
direction="l",
linebreak="al",
- shcode=0x0070,
- specials={ "char", 0x0070, 0x0307 },
+ shcode=0x70,
+ specials={ "char", 0x70, 0x307 },
uccode=0x1E56,
unicodeslot=0x1E57,
},
@@ -52686,8 +53898,8 @@ characters.data={
direction="l",
lccode=0x1E59,
linebreak="al",
- shcode=0x0052,
- specials={ "char", 0x0052, 0x0307 },
+ shcode=0x52,
+ specials={ "char", 0x52, 0x307 },
unicodeslot=0x1E58,
},
[0x1E59]={
@@ -52696,8 +53908,8 @@ characters.data={
description="LATIN SMALL LETTER R WITH DOT ABOVE",
direction="l",
linebreak="al",
- shcode=0x0072,
- specials={ "char", 0x0072, 0x0307 },
+ shcode=0x72,
+ specials={ "char", 0x72, 0x307 },
uccode=0x1E58,
unicodeslot=0x1E59,
},
@@ -52708,8 +53920,8 @@ characters.data={
direction="l",
lccode=0x1E5B,
linebreak="al",
- shcode=0x0052,
- specials={ "char", 0x0052, 0x0323 },
+ shcode=0x52,
+ specials={ "char", 0x52, 0x323 },
unicodeslot=0x1E5A,
},
[0x1E5B]={
@@ -52718,8 +53930,8 @@ characters.data={
description="LATIN SMALL LETTER R WITH DOT BELOW",
direction="l",
linebreak="al",
- shcode=0x0072,
- specials={ "char", 0x0072, 0x0323 },
+ shcode=0x72,
+ specials={ "char", 0x72, 0x323 },
uccode=0x1E5A,
unicodeslot=0x1E5B,
},
@@ -52730,8 +53942,8 @@ characters.data={
direction="l",
lccode=0x1E5D,
linebreak="al",
- shcode=0x0052,
- specials={ "char", 0x1E5A, 0x0304 },
+ shcode=0x52,
+ specials={ "char", 0x1E5A, 0x304 },
unicodeslot=0x1E5C,
},
[0x1E5D]={
@@ -52740,8 +53952,8 @@ characters.data={
description="LATIN SMALL LETTER R WITH DOT BELOW AND MACRON",
direction="l",
linebreak="al",
- shcode=0x0072,
- specials={ "char", 0x1E5B, 0x0304 },
+ shcode=0x72,
+ specials={ "char", 0x1E5B, 0x304 },
uccode=0x1E5C,
unicodeslot=0x1E5D,
},
@@ -52752,8 +53964,8 @@ characters.data={
direction="l",
lccode=0x1E5F,
linebreak="al",
- shcode=0x0052,
- specials={ "char", 0x0052, 0x0331 },
+ shcode=0x52,
+ specials={ "char", 0x52, 0x331 },
unicodeslot=0x1E5E,
},
[0x1E5F]={
@@ -52762,8 +53974,8 @@ characters.data={
description="LATIN SMALL LETTER R WITH LINE BELOW",
direction="l",
linebreak="al",
- shcode=0x0072,
- specials={ "char", 0x0072, 0x0331 },
+ shcode=0x72,
+ specials={ "char", 0x72, 0x331 },
uccode=0x1E5E,
unicodeslot=0x1E5F,
},
@@ -52774,8 +53986,8 @@ characters.data={
direction="l",
lccode=0x1E61,
linebreak="al",
- shcode=0x0053,
- specials={ "char", 0x0053, 0x0307 },
+ shcode=0x53,
+ specials={ "char", 0x53, 0x307 },
unicodeslot=0x1E60,
},
[0x1E61]={
@@ -52784,8 +53996,8 @@ characters.data={
description="LATIN SMALL LETTER S WITH DOT ABOVE",
direction="l",
linebreak="al",
- shcode=0x0073,
- specials={ "char", 0x0073, 0x0307 },
+ shcode=0x73,
+ specials={ "char", 0x73, 0x307 },
uccode=0x1E60,
unicodeslot=0x1E61,
},
@@ -52796,8 +54008,8 @@ characters.data={
direction="l",
lccode=0x1E63,
linebreak="al",
- shcode=0x0053,
- specials={ "char", 0x0053, 0x0323 },
+ shcode=0x53,
+ specials={ "char", 0x53, 0x323 },
unicodeslot=0x1E62,
},
[0x1E63]={
@@ -52806,8 +54018,8 @@ characters.data={
description="LATIN SMALL LETTER S WITH DOT BELOW",
direction="l",
linebreak="al",
- shcode=0x0073,
- specials={ "char", 0x0073, 0x0323 },
+ shcode=0x73,
+ specials={ "char", 0x73, 0x323 },
uccode=0x1E62,
unicodeslot=0x1E63,
},
@@ -52818,8 +54030,8 @@ characters.data={
direction="l",
lccode=0x1E65,
linebreak="al",
- shcode=0x0053,
- specials={ "char", 0x015A, 0x0307 },
+ shcode=0x53,
+ specials={ "char", 0x15A, 0x307 },
unicodeslot=0x1E64,
},
[0x1E65]={
@@ -52828,8 +54040,8 @@ characters.data={
description="LATIN SMALL LETTER S WITH ACUTE AND DOT ABOVE",
direction="l",
linebreak="al",
- shcode=0x0073,
- specials={ "char", 0x015B, 0x0307 },
+ shcode=0x73,
+ specials={ "char", 0x15B, 0x307 },
uccode=0x1E64,
unicodeslot=0x1E65,
},
@@ -52840,8 +54052,8 @@ characters.data={
direction="l",
lccode=0x1E67,
linebreak="al",
- shcode=0x0053,
- specials={ "char", 0x0160, 0x0307 },
+ shcode=0x53,
+ specials={ "char", 0x160, 0x307 },
unicodeslot=0x1E66,
},
[0x1E67]={
@@ -52850,8 +54062,8 @@ characters.data={
description="LATIN SMALL LETTER S WITH CARON AND DOT ABOVE",
direction="l",
linebreak="al",
- shcode=0x0073,
- specials={ "char", 0x0161, 0x0307 },
+ shcode=0x73,
+ specials={ "char", 0x161, 0x307 },
uccode=0x1E66,
unicodeslot=0x1E67,
},
@@ -52862,8 +54074,8 @@ characters.data={
direction="l",
lccode=0x1E69,
linebreak="al",
- shcode=0x0053,
- specials={ "char", 0x1E62, 0x0307 },
+ shcode=0x53,
+ specials={ "char", 0x1E62, 0x307 },
unicodeslot=0x1E68,
},
[0x1E69]={
@@ -52872,8 +54084,8 @@ characters.data={
description="LATIN SMALL LETTER S WITH DOT BELOW AND DOT ABOVE",
direction="l",
linebreak="al",
- shcode=0x0073,
- specials={ "char", 0x1E63, 0x0307 },
+ shcode=0x73,
+ specials={ "char", 0x1E63, 0x307 },
uccode=0x1E68,
unicodeslot=0x1E69,
},
@@ -52884,8 +54096,8 @@ characters.data={
direction="l",
lccode=0x1E6B,
linebreak="al",
- shcode=0x0054,
- specials={ "char", 0x0054, 0x0307 },
+ shcode=0x54,
+ specials={ "char", 0x54, 0x307 },
unicodeslot=0x1E6A,
},
[0x1E6B]={
@@ -52894,8 +54106,8 @@ characters.data={
description="LATIN SMALL LETTER T WITH DOT ABOVE",
direction="l",
linebreak="al",
- shcode=0x0074,
- specials={ "char", 0x0074, 0x0307 },
+ shcode=0x74,
+ specials={ "char", 0x74, 0x307 },
uccode=0x1E6A,
unicodeslot=0x1E6B,
},
@@ -52906,8 +54118,8 @@ characters.data={
direction="l",
lccode=0x1E6D,
linebreak="al",
- shcode=0x0054,
- specials={ "char", 0x0054, 0x0323 },
+ shcode=0x54,
+ specials={ "char", 0x54, 0x323 },
unicodeslot=0x1E6C,
},
[0x1E6D]={
@@ -52916,8 +54128,8 @@ characters.data={
description="LATIN SMALL LETTER T WITH DOT BELOW",
direction="l",
linebreak="al",
- shcode=0x0074,
- specials={ "char", 0x0074, 0x0323 },
+ shcode=0x74,
+ specials={ "char", 0x74, 0x323 },
uccode=0x1E6C,
unicodeslot=0x1E6D,
},
@@ -52928,8 +54140,8 @@ characters.data={
direction="l",
lccode=0x1E6F,
linebreak="al",
- shcode=0x0054,
- specials={ "char", 0x0054, 0x0331 },
+ shcode=0x54,
+ specials={ "char", 0x54, 0x331 },
unicodeslot=0x1E6E,
},
[0x1E6F]={
@@ -52938,8 +54150,8 @@ characters.data={
description="LATIN SMALL LETTER T WITH LINE BELOW",
direction="l",
linebreak="al",
- shcode=0x0074,
- specials={ "char", 0x0074, 0x0331 },
+ shcode=0x74,
+ specials={ "char", 0x74, 0x331 },
uccode=0x1E6E,
unicodeslot=0x1E6F,
},
@@ -52950,8 +54162,8 @@ characters.data={
direction="l",
lccode=0x1E71,
linebreak="al",
- shcode=0x0054,
- specials={ "char", 0x0054, 0x032D },
+ shcode=0x54,
+ specials={ "char", 0x54, 0x32D },
unicodeslot=0x1E70,
},
[0x1E71]={
@@ -52960,8 +54172,8 @@ characters.data={
description="LATIN SMALL LETTER T WITH CIRCUMFLEX BELOW",
direction="l",
linebreak="al",
- shcode=0x0074,
- specials={ "char", 0x0074, 0x032D },
+ shcode=0x74,
+ specials={ "char", 0x74, 0x32D },
uccode=0x1E70,
unicodeslot=0x1E71,
},
@@ -52972,8 +54184,8 @@ characters.data={
direction="l",
lccode=0x1E73,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x0055, 0x0324 },
+ shcode=0x55,
+ specials={ "char", 0x55, 0x324 },
unicodeslot=0x1E72,
},
[0x1E73]={
@@ -52982,8 +54194,8 @@ characters.data={
description="LATIN SMALL LETTER U WITH DIAERESIS BELOW",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x0075, 0x0324 },
+ shcode=0x75,
+ specials={ "char", 0x75, 0x324 },
uccode=0x1E72,
unicodeslot=0x1E73,
},
@@ -52994,8 +54206,8 @@ characters.data={
direction="l",
lccode=0x1E75,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x0055, 0x0330 },
+ shcode=0x55,
+ specials={ "char", 0x55, 0x330 },
unicodeslot=0x1E74,
},
[0x1E75]={
@@ -53004,8 +54216,8 @@ characters.data={
description="LATIN SMALL LETTER U WITH TILDE BELOW",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x0075, 0x0330 },
+ shcode=0x75,
+ specials={ "char", 0x75, 0x330 },
uccode=0x1E74,
unicodeslot=0x1E75,
},
@@ -53016,8 +54228,8 @@ characters.data={
direction="l",
lccode=0x1E77,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x0055, 0x032D },
+ shcode=0x55,
+ specials={ "char", 0x55, 0x32D },
unicodeslot=0x1E76,
},
[0x1E77]={
@@ -53026,8 +54238,8 @@ characters.data={
description="LATIN SMALL LETTER U WITH CIRCUMFLEX BELOW",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x0075, 0x032D },
+ shcode=0x75,
+ specials={ "char", 0x75, 0x32D },
uccode=0x1E76,
unicodeslot=0x1E77,
},
@@ -53038,8 +54250,8 @@ characters.data={
direction="l",
lccode=0x1E79,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x0168, 0x0301 },
+ shcode=0x55,
+ specials={ "char", 0x168, 0x301 },
unicodeslot=0x1E78,
},
[0x1E79]={
@@ -53048,8 +54260,8 @@ characters.data={
description="LATIN SMALL LETTER U WITH TILDE AND ACUTE",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x0169, 0x0301 },
+ shcode=0x75,
+ specials={ "char", 0x169, 0x301 },
uccode=0x1E78,
unicodeslot=0x1E79,
},
@@ -53060,8 +54272,8 @@ characters.data={
direction="l",
lccode=0x1E7B,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x016A, 0x0308 },
+ shcode=0x55,
+ specials={ "char", 0x16A, 0x308 },
unicodeslot=0x1E7A,
},
[0x1E7B]={
@@ -53070,8 +54282,8 @@ characters.data={
description="LATIN SMALL LETTER U WITH MACRON AND DIAERESIS",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x016B, 0x0308 },
+ shcode=0x75,
+ specials={ "char", 0x16B, 0x308 },
uccode=0x1E7A,
unicodeslot=0x1E7B,
},
@@ -53082,8 +54294,8 @@ characters.data={
direction="l",
lccode=0x1E7D,
linebreak="al",
- shcode=0x0056,
- specials={ "char", 0x0056, 0x0303 },
+ shcode=0x56,
+ specials={ "char", 0x56, 0x303 },
unicodeslot=0x1E7C,
},
[0x1E7D]={
@@ -53092,8 +54304,8 @@ characters.data={
description="LATIN SMALL LETTER V WITH TILDE",
direction="l",
linebreak="al",
- shcode=0x0076,
- specials={ "char", 0x0076, 0x0303 },
+ shcode=0x76,
+ specials={ "char", 0x76, 0x303 },
uccode=0x1E7C,
unicodeslot=0x1E7D,
},
@@ -53104,8 +54316,8 @@ characters.data={
direction="l",
lccode=0x1E7F,
linebreak="al",
- shcode=0x0056,
- specials={ "char", 0x0056, 0x0323 },
+ shcode=0x56,
+ specials={ "char", 0x56, 0x323 },
unicodeslot=0x1E7E,
},
[0x1E7F]={
@@ -53114,8 +54326,8 @@ characters.data={
description="LATIN SMALL LETTER V WITH DOT BELOW",
direction="l",
linebreak="al",
- shcode=0x0076,
- specials={ "char", 0x0076, 0x0323 },
+ shcode=0x76,
+ specials={ "char", 0x76, 0x323 },
uccode=0x1E7E,
unicodeslot=0x1E7F,
},
@@ -53126,8 +54338,8 @@ characters.data={
direction="l",
lccode=0x1E81,
linebreak="al",
- shcode=0x0057,
- specials={ "char", 0x0057, 0x0300 },
+ shcode=0x57,
+ specials={ "char", 0x57, 0x300 },
unicodeslot=0x1E80,
},
[0x1E81]={
@@ -53136,8 +54348,8 @@ characters.data={
description="LATIN SMALL LETTER W WITH GRAVE",
direction="l",
linebreak="al",
- shcode=0x0077,
- specials={ "char", 0x0077, 0x0300 },
+ shcode=0x77,
+ specials={ "char", 0x77, 0x300 },
uccode=0x1E80,
unicodeslot=0x1E81,
},
@@ -53148,8 +54360,8 @@ characters.data={
direction="l",
lccode=0x1E83,
linebreak="al",
- shcode=0x0057,
- specials={ "char", 0x0057, 0x0301 },
+ shcode=0x57,
+ specials={ "char", 0x57, 0x301 },
unicodeslot=0x1E82,
},
[0x1E83]={
@@ -53158,8 +54370,8 @@ characters.data={
description="LATIN SMALL LETTER W WITH ACUTE",
direction="l",
linebreak="al",
- shcode=0x0077,
- specials={ "char", 0x0077, 0x0301 },
+ shcode=0x77,
+ specials={ "char", 0x77, 0x301 },
uccode=0x1E82,
unicodeslot=0x1E83,
},
@@ -53170,8 +54382,8 @@ characters.data={
direction="l",
lccode=0x1E85,
linebreak="al",
- shcode=0x0057,
- specials={ "char", 0x0057, 0x0308 },
+ shcode=0x57,
+ specials={ "char", 0x57, 0x308 },
unicodeslot=0x1E84,
},
[0x1E85]={
@@ -53180,8 +54392,8 @@ characters.data={
description="LATIN SMALL LETTER W WITH DIAERESIS",
direction="l",
linebreak="al",
- shcode=0x0077,
- specials={ "char", 0x0077, 0x0308 },
+ shcode=0x77,
+ specials={ "char", 0x77, 0x308 },
uccode=0x1E84,
unicodeslot=0x1E85,
},
@@ -53192,8 +54404,8 @@ characters.data={
direction="l",
lccode=0x1E87,
linebreak="al",
- shcode=0x0057,
- specials={ "char", 0x0057, 0x0307 },
+ shcode=0x57,
+ specials={ "char", 0x57, 0x307 },
unicodeslot=0x1E86,
},
[0x1E87]={
@@ -53202,8 +54414,8 @@ characters.data={
description="LATIN SMALL LETTER W WITH DOT ABOVE",
direction="l",
linebreak="al",
- shcode=0x0077,
- specials={ "char", 0x0077, 0x0307 },
+ shcode=0x77,
+ specials={ "char", 0x77, 0x307 },
uccode=0x1E86,
unicodeslot=0x1E87,
},
@@ -53214,8 +54426,8 @@ characters.data={
direction="l",
lccode=0x1E89,
linebreak="al",
- shcode=0x0057,
- specials={ "char", 0x0057, 0x0323 },
+ shcode=0x57,
+ specials={ "char", 0x57, 0x323 },
unicodeslot=0x1E88,
},
[0x1E89]={
@@ -53224,8 +54436,8 @@ characters.data={
description="LATIN SMALL LETTER W WITH DOT BELOW",
direction="l",
linebreak="al",
- shcode=0x0077,
- specials={ "char", 0x0077, 0x0323 },
+ shcode=0x77,
+ specials={ "char", 0x77, 0x323 },
uccode=0x1E88,
unicodeslot=0x1E89,
},
@@ -53236,8 +54448,8 @@ characters.data={
direction="l",
lccode=0x1E8B,
linebreak="al",
- shcode=0x0058,
- specials={ "char", 0x0058, 0x0307 },
+ shcode=0x58,
+ specials={ "char", 0x58, 0x307 },
unicodeslot=0x1E8A,
},
[0x1E8B]={
@@ -53246,8 +54458,8 @@ characters.data={
description="LATIN SMALL LETTER X WITH DOT ABOVE",
direction="l",
linebreak="al",
- shcode=0x0078,
- specials={ "char", 0x0078, 0x0307 },
+ shcode=0x78,
+ specials={ "char", 0x78, 0x307 },
uccode=0x1E8A,
unicodeslot=0x1E8B,
},
@@ -53258,8 +54470,8 @@ characters.data={
direction="l",
lccode=0x1E8D,
linebreak="al",
- shcode=0x0058,
- specials={ "char", 0x0058, 0x0308 },
+ shcode=0x58,
+ specials={ "char", 0x58, 0x308 },
unicodeslot=0x1E8C,
},
[0x1E8D]={
@@ -53268,8 +54480,8 @@ characters.data={
description="LATIN SMALL LETTER X WITH DIAERESIS",
direction="l",
linebreak="al",
- shcode=0x0078,
- specials={ "char", 0x0078, 0x0308 },
+ shcode=0x78,
+ specials={ "char", 0x78, 0x308 },
uccode=0x1E8C,
unicodeslot=0x1E8D,
},
@@ -53280,8 +54492,8 @@ characters.data={
direction="l",
lccode=0x1E8F,
linebreak="al",
- shcode=0x0059,
- specials={ "char", 0x0059, 0x0307 },
+ shcode=0x59,
+ specials={ "char", 0x59, 0x307 },
unicodeslot=0x1E8E,
},
[0x1E8F]={
@@ -53290,8 +54502,8 @@ characters.data={
description="LATIN SMALL LETTER Y WITH DOT ABOVE",
direction="l",
linebreak="al",
- shcode=0x0079,
- specials={ "char", 0x0079, 0x0307 },
+ shcode=0x79,
+ specials={ "char", 0x79, 0x307 },
uccode=0x1E8E,
unicodeslot=0x1E8F,
},
@@ -53302,8 +54514,8 @@ characters.data={
direction="l",
lccode=0x1E91,
linebreak="al",
- shcode=0x005A,
- specials={ "char", 0x005A, 0x0302 },
+ shcode=0x5A,
+ specials={ "char", 0x5A, 0x302 },
unicodeslot=0x1E90,
},
[0x1E91]={
@@ -53312,8 +54524,8 @@ characters.data={
description="LATIN SMALL LETTER Z WITH CIRCUMFLEX",
direction="l",
linebreak="al",
- shcode=0x007A,
- specials={ "char", 0x007A, 0x0302 },
+ shcode=0x7A,
+ specials={ "char", 0x7A, 0x302 },
uccode=0x1E90,
unicodeslot=0x1E91,
},
@@ -53324,8 +54536,8 @@ characters.data={
direction="l",
lccode=0x1E93,
linebreak="al",
- shcode=0x005A,
- specials={ "char", 0x005A, 0x0323 },
+ shcode=0x5A,
+ specials={ "char", 0x5A, 0x323 },
unicodeslot=0x1E92,
},
[0x1E93]={
@@ -53334,8 +54546,8 @@ characters.data={
description="LATIN SMALL LETTER Z WITH DOT BELOW",
direction="l",
linebreak="al",
- shcode=0x007A,
- specials={ "char", 0x007A, 0x0323 },
+ shcode=0x7A,
+ specials={ "char", 0x7A, 0x323 },
uccode=0x1E92,
unicodeslot=0x1E93,
},
@@ -53346,8 +54558,8 @@ characters.data={
direction="l",
lccode=0x1E95,
linebreak="al",
- shcode=0x005A,
- specials={ "char", 0x005A, 0x0331 },
+ shcode=0x5A,
+ specials={ "char", 0x5A, 0x331 },
unicodeslot=0x1E94,
},
[0x1E95]={
@@ -53356,8 +54568,8 @@ characters.data={
description="LATIN SMALL LETTER Z WITH LINE BELOW",
direction="l",
linebreak="al",
- shcode=0x007A,
- specials={ "char", 0x007A, 0x0331 },
+ shcode=0x7A,
+ specials={ "char", 0x7A, 0x331 },
uccode=0x1E94,
unicodeslot=0x1E95,
},
@@ -53367,8 +54579,8 @@ characters.data={
description="LATIN SMALL LETTER H WITH LINE BELOW",
direction="l",
linebreak="al",
- shcode=0x0068,
- specials={ "char", 0x0068, 0x0331 },
+ shcode=0x68,
+ specials={ "char", 0x68, 0x331 },
unicodeslot=0x1E96,
},
[0x1E97]={
@@ -53377,8 +54589,8 @@ characters.data={
description="LATIN SMALL LETTER T WITH DIAERESIS",
direction="l",
linebreak="al",
- shcode=0x0074,
- specials={ "char", 0x0074, 0x0308 },
+ shcode=0x74,
+ specials={ "char", 0x74, 0x308 },
unicodeslot=0x1E97,
},
[0x1E98]={
@@ -53387,8 +54599,8 @@ characters.data={
description="LATIN SMALL LETTER W WITH RING ABOVE",
direction="l",
linebreak="al",
- shcode=0x0077,
- specials={ "char", 0x0077, 0x030A },
+ shcode=0x77,
+ specials={ "char", 0x77, 0x30A },
unicodeslot=0x1E98,
},
[0x1E99]={
@@ -53397,8 +54609,8 @@ characters.data={
description="LATIN SMALL LETTER Y WITH RING ABOVE",
direction="l",
linebreak="al",
- shcode=0x0079,
- specials={ "char", 0x0079, 0x030A },
+ shcode=0x79,
+ specials={ "char", 0x79, 0x30A },
unicodeslot=0x1E99,
},
[0x1E9A]={
@@ -53407,8 +54619,8 @@ characters.data={
description="LATIN SMALL LETTER A WITH RIGHT HALF RING",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "compat", 0x0061, 0x02BE },
+ shcode=0x61,
+ specials={ "compat", 0x61, 0x2BE },
unicodeslot=0x1E9A,
},
[0x1E9B]={
@@ -53417,7 +54629,7 @@ characters.data={
description="LATIN SMALL LETTER LONG S WITH DOT ABOVE",
direction="l",
linebreak="al",
- specials={ "char", 0x017F, 0x0307 },
+ specials={ "char", 0x17F, 0x307 },
uccode=0x1E60,
unicodeslot=0x1E9B,
},
@@ -53457,8 +54669,8 @@ characters.data={
direction="l",
lccode=0x1EA1,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x0041, 0x0323 },
+ shcode=0x41,
+ specials={ "char", 0x41, 0x323 },
unicodeslot=0x1EA0,
},
[0x1EA1]={
@@ -53468,8 +54680,8 @@ characters.data={
description="LATIN SMALL LETTER A WITH DOT BELOW",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x0061, 0x0323 },
+ shcode=0x61,
+ specials={ "char", 0x61, 0x323 },
uccode=0x1EA0,
unicodeslot=0x1EA1,
},
@@ -53481,8 +54693,8 @@ characters.data={
direction="l",
lccode=0x1EA3,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x0041, 0x0309 },
+ shcode=0x41,
+ specials={ "char", 0x41, 0x309 },
unicodeslot=0x1EA2,
},
[0x1EA3]={
@@ -53492,8 +54704,8 @@ characters.data={
description="LATIN SMALL LETTER A WITH HOOK ABOVE",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x0061, 0x0309 },
+ shcode=0x61,
+ specials={ "char", 0x61, 0x309 },
uccode=0x1EA2,
unicodeslot=0x1EA3,
},
@@ -53505,8 +54717,8 @@ characters.data={
direction="l",
lccode=0x1EA5,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x00C2, 0x0301 },
+ shcode=0x41,
+ specials={ "char", 0xC2, 0x301 },
unicodeslot=0x1EA4,
},
[0x1EA5]={
@@ -53516,8 +54728,8 @@ characters.data={
description="LATIN SMALL LETTER A WITH CIRCUMFLEX AND ACUTE",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x00E2, 0x0301 },
+ shcode=0x61,
+ specials={ "char", 0xE2, 0x301 },
uccode=0x1EA4,
unicodeslot=0x1EA5,
},
@@ -53529,8 +54741,8 @@ characters.data={
direction="l",
lccode=0x1EA7,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x00C2, 0x0300 },
+ shcode=0x41,
+ specials={ "char", 0xC2, 0x300 },
unicodeslot=0x1EA6,
},
[0x1EA7]={
@@ -53540,8 +54752,8 @@ characters.data={
description="LATIN SMALL LETTER A WITH CIRCUMFLEX AND GRAVE",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x00E2, 0x0300 },
+ shcode=0x61,
+ specials={ "char", 0xE2, 0x300 },
uccode=0x1EA6,
unicodeslot=0x1EA7,
},
@@ -53553,8 +54765,8 @@ characters.data={
direction="l",
lccode=0x1EA9,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x00C2, 0x0309 },
+ shcode=0x41,
+ specials={ "char", 0xC2, 0x309 },
unicodeslot=0x1EA8,
},
[0x1EA9]={
@@ -53564,8 +54776,8 @@ characters.data={
description="LATIN SMALL LETTER A WITH CIRCUMFLEX AND HOOK ABOVE",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x00E2, 0x0309 },
+ shcode=0x61,
+ specials={ "char", 0xE2, 0x309 },
uccode=0x1EA8,
unicodeslot=0x1EA9,
},
@@ -53577,8 +54789,8 @@ characters.data={
direction="l",
lccode=0x1EAB,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x00C2, 0x0303 },
+ shcode=0x41,
+ specials={ "char", 0xC2, 0x303 },
unicodeslot=0x1EAA,
},
[0x1EAB]={
@@ -53588,8 +54800,8 @@ characters.data={
description="LATIN SMALL LETTER A WITH CIRCUMFLEX AND TILDE",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x00E2, 0x0303 },
+ shcode=0x61,
+ specials={ "char", 0xE2, 0x303 },
uccode=0x1EAA,
unicodeslot=0x1EAB,
},
@@ -53601,8 +54813,8 @@ characters.data={
direction="l",
lccode=0x1EAD,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x1EA0, 0x0302 },
+ shcode=0x41,
+ specials={ "char", 0x1EA0, 0x302 },
unicodeslot=0x1EAC,
},
[0x1EAD]={
@@ -53612,8 +54824,8 @@ characters.data={
description="LATIN SMALL LETTER A WITH CIRCUMFLEX AND DOT BELOW",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x1EA1, 0x0302 },
+ shcode=0x61,
+ specials={ "char", 0x1EA1, 0x302 },
uccode=0x1EAC,
unicodeslot=0x1EAD,
},
@@ -53625,8 +54837,8 @@ characters.data={
direction="l",
lccode=0x1EAF,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x0102, 0x0301 },
+ shcode=0x41,
+ specials={ "char", 0x102, 0x301 },
unicodeslot=0x1EAE,
},
[0x1EAF]={
@@ -53636,8 +54848,8 @@ characters.data={
description="LATIN SMALL LETTER A WITH BREVE AND ACUTE",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x0103, 0x0301 },
+ shcode=0x61,
+ specials={ "char", 0x103, 0x301 },
uccode=0x1EAE,
unicodeslot=0x1EAF,
},
@@ -53649,8 +54861,8 @@ characters.data={
direction="l",
lccode=0x1EB1,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x0102, 0x0300 },
+ shcode=0x41,
+ specials={ "char", 0x102, 0x300 },
unicodeslot=0x1EB0,
},
[0x1EB1]={
@@ -53660,8 +54872,8 @@ characters.data={
description="LATIN SMALL LETTER A WITH BREVE AND GRAVE",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x0103, 0x0300 },
+ shcode=0x61,
+ specials={ "char", 0x103, 0x300 },
uccode=0x1EB0,
unicodeslot=0x1EB1,
},
@@ -53673,8 +54885,8 @@ characters.data={
direction="l",
lccode=0x1EB3,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x0102, 0x0309 },
+ shcode=0x41,
+ specials={ "char", 0x102, 0x309 },
unicodeslot=0x1EB2,
},
[0x1EB3]={
@@ -53684,8 +54896,8 @@ characters.data={
description="LATIN SMALL LETTER A WITH BREVE AND HOOK ABOVE",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x0103, 0x0309 },
+ shcode=0x61,
+ specials={ "char", 0x103, 0x309 },
uccode=0x1EB2,
unicodeslot=0x1EB3,
},
@@ -53697,8 +54909,8 @@ characters.data={
direction="l",
lccode=0x1EB5,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x0102, 0x0303 },
+ shcode=0x41,
+ specials={ "char", 0x102, 0x303 },
unicodeslot=0x1EB4,
},
[0x1EB5]={
@@ -53708,8 +54920,8 @@ characters.data={
description="LATIN SMALL LETTER A WITH BREVE AND TILDE",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x0103, 0x0303 },
+ shcode=0x61,
+ specials={ "char", 0x103, 0x303 },
uccode=0x1EB4,
unicodeslot=0x1EB5,
},
@@ -53721,8 +54933,8 @@ characters.data={
direction="l",
lccode=0x1EB7,
linebreak="al",
- shcode=0x0041,
- specials={ "char", 0x1EA0, 0x0306 },
+ shcode=0x41,
+ specials={ "char", 0x1EA0, 0x306 },
unicodeslot=0x1EB6,
},
[0x1EB7]={
@@ -53732,8 +54944,8 @@ characters.data={
description="LATIN SMALL LETTER A WITH BREVE AND DOT BELOW",
direction="l",
linebreak="al",
- shcode=0x0061,
- specials={ "char", 0x1EA1, 0x0306 },
+ shcode=0x61,
+ specials={ "char", 0x1EA1, 0x306 },
uccode=0x1EB6,
unicodeslot=0x1EB7,
},
@@ -53745,8 +54957,8 @@ characters.data={
direction="l",
lccode=0x1EB9,
linebreak="al",
- shcode=0x0045,
- specials={ "char", 0x0045, 0x0323 },
+ shcode=0x45,
+ specials={ "char", 0x45, 0x323 },
unicodeslot=0x1EB8,
},
[0x1EB9]={
@@ -53756,8 +54968,8 @@ characters.data={
description="LATIN SMALL LETTER E WITH DOT BELOW",
direction="l",
linebreak="al",
- shcode=0x0065,
- specials={ "char", 0x0065, 0x0323 },
+ shcode=0x65,
+ specials={ "char", 0x65, 0x323 },
uccode=0x1EB8,
unicodeslot=0x1EB9,
},
@@ -53769,8 +54981,8 @@ characters.data={
direction="l",
lccode=0x1EBB,
linebreak="al",
- shcode=0x0045,
- specials={ "char", 0x0045, 0x0309 },
+ shcode=0x45,
+ specials={ "char", 0x45, 0x309 },
unicodeslot=0x1EBA,
},
[0x1EBB]={
@@ -53780,8 +54992,8 @@ characters.data={
description="LATIN SMALL LETTER E WITH HOOK ABOVE",
direction="l",
linebreak="al",
- shcode=0x0065,
- specials={ "char", 0x0065, 0x0309 },
+ shcode=0x65,
+ specials={ "char", 0x65, 0x309 },
uccode=0x1EBA,
unicodeslot=0x1EBB,
},
@@ -53793,8 +55005,8 @@ characters.data={
direction="l",
lccode=0x1EBD,
linebreak="al",
- shcode=0x0045,
- specials={ "char", 0x0045, 0x0303 },
+ shcode=0x45,
+ specials={ "char", 0x45, 0x303 },
unicodeslot=0x1EBC,
},
[0x1EBD]={
@@ -53804,8 +55016,8 @@ characters.data={
description="LATIN SMALL LETTER E WITH TILDE",
direction="l",
linebreak="al",
- shcode=0x0065,
- specials={ "char", 0x0065, 0x0303 },
+ shcode=0x65,
+ specials={ "char", 0x65, 0x303 },
uccode=0x1EBC,
unicodeslot=0x1EBD,
},
@@ -53817,8 +55029,8 @@ characters.data={
direction="l",
lccode=0x1EBF,
linebreak="al",
- shcode=0x0045,
- specials={ "char", 0x00CA, 0x0301 },
+ shcode=0x45,
+ specials={ "char", 0xCA, 0x301 },
unicodeslot=0x1EBE,
},
[0x1EBF]={
@@ -53828,8 +55040,8 @@ characters.data={
description="LATIN SMALL LETTER E WITH CIRCUMFLEX AND ACUTE",
direction="l",
linebreak="al",
- shcode=0x0065,
- specials={ "char", 0x00EA, 0x0301 },
+ shcode=0x65,
+ specials={ "char", 0xEA, 0x301 },
uccode=0x1EBE,
unicodeslot=0x1EBF,
},
@@ -53841,8 +55053,8 @@ characters.data={
direction="l",
lccode=0x1EC1,
linebreak="al",
- shcode=0x0045,
- specials={ "char", 0x00CA, 0x0300 },
+ shcode=0x45,
+ specials={ "char", 0xCA, 0x300 },
unicodeslot=0x1EC0,
},
[0x1EC1]={
@@ -53852,8 +55064,8 @@ characters.data={
description="LATIN SMALL LETTER E WITH CIRCUMFLEX AND GRAVE",
direction="l",
linebreak="al",
- shcode=0x0065,
- specials={ "char", 0x00EA, 0x0300 },
+ shcode=0x65,
+ specials={ "char", 0xEA, 0x300 },
uccode=0x1EC0,
unicodeslot=0x1EC1,
},
@@ -53865,8 +55077,8 @@ characters.data={
direction="l",
lccode=0x1EC3,
linebreak="al",
- shcode=0x0045,
- specials={ "char", 0x00CA, 0x0309 },
+ shcode=0x45,
+ specials={ "char", 0xCA, 0x309 },
unicodeslot=0x1EC2,
},
[0x1EC3]={
@@ -53876,8 +55088,8 @@ characters.data={
description="LATIN SMALL LETTER E WITH CIRCUMFLEX AND HOOK ABOVE",
direction="l",
linebreak="al",
- shcode=0x0065,
- specials={ "char", 0x00EA, 0x0309 },
+ shcode=0x65,
+ specials={ "char", 0xEA, 0x309 },
uccode=0x1EC2,
unicodeslot=0x1EC3,
},
@@ -53889,8 +55101,8 @@ characters.data={
direction="l",
lccode=0x1EC5,
linebreak="al",
- shcode=0x0045,
- specials={ "char", 0x00CA, 0x0303 },
+ shcode=0x45,
+ specials={ "char", 0xCA, 0x303 },
unicodeslot=0x1EC4,
},
[0x1EC5]={
@@ -53900,8 +55112,8 @@ characters.data={
description="LATIN SMALL LETTER E WITH CIRCUMFLEX AND TILDE",
direction="l",
linebreak="al",
- shcode=0x0065,
- specials={ "char", 0x00EA, 0x0303 },
+ shcode=0x65,
+ specials={ "char", 0xEA, 0x303 },
uccode=0x1EC4,
unicodeslot=0x1EC5,
},
@@ -53913,8 +55125,8 @@ characters.data={
direction="l",
lccode=0x1EC7,
linebreak="al",
- shcode=0x0045,
- specials={ "char", 0x1EB8, 0x0302 },
+ shcode=0x45,
+ specials={ "char", 0x1EB8, 0x302 },
unicodeslot=0x1EC6,
},
[0x1EC7]={
@@ -53924,8 +55136,8 @@ characters.data={
description="LATIN SMALL LETTER E WITH CIRCUMFLEX AND DOT BELOW",
direction="l",
linebreak="al",
- shcode=0x0065,
- specials={ "char", 0x1EB9, 0x0302 },
+ shcode=0x65,
+ specials={ "char", 0x1EB9, 0x302 },
uccode=0x1EC6,
unicodeslot=0x1EC7,
},
@@ -53937,8 +55149,8 @@ characters.data={
direction="l",
lccode=0x1EC9,
linebreak="al",
- shcode=0x0049,
- specials={ "char", 0x0049, 0x0309 },
+ shcode=0x49,
+ specials={ "char", 0x49, 0x309 },
unicodeslot=0x1EC8,
},
[0x1EC9]={
@@ -53948,8 +55160,8 @@ characters.data={
description="LATIN SMALL LETTER I WITH HOOK ABOVE",
direction="l",
linebreak="al",
- shcode=0x0069,
- specials={ "char", 0x0069, 0x0309 },
+ shcode=0x69,
+ specials={ "char", 0x69, 0x309 },
uccode=0x1EC8,
unicodeslot=0x1EC9,
},
@@ -53961,8 +55173,8 @@ characters.data={
direction="l",
lccode=0x1ECB,
linebreak="al",
- shcode=0x0049,
- specials={ "char", 0x0049, 0x0323 },
+ shcode=0x49,
+ specials={ "char", 0x49, 0x323 },
unicodeslot=0x1ECA,
},
[0x1ECB]={
@@ -53972,8 +55184,8 @@ characters.data={
description="LATIN SMALL LETTER I WITH DOT BELOW",
direction="l",
linebreak="al",
- shcode=0x0069,
- specials={ "char", 0x0069, 0x0323 },
+ shcode=0x69,
+ specials={ "char", 0x69, 0x323 },
uccode=0x1ECA,
unicodeslot=0x1ECB,
},
@@ -53985,8 +55197,8 @@ characters.data={
direction="l",
lccode=0x1ECD,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x004F, 0x0323 },
+ shcode=0x4F,
+ specials={ "char", 0x4F, 0x323 },
unicodeslot=0x1ECC,
},
[0x1ECD]={
@@ -53996,8 +55208,8 @@ characters.data={
description="LATIN SMALL LETTER O WITH DOT BELOW",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x006F, 0x0323 },
+ shcode=0x6F,
+ specials={ "char", 0x6F, 0x323 },
uccode=0x1ECC,
unicodeslot=0x1ECD,
},
@@ -54009,8 +55221,8 @@ characters.data={
direction="l",
lccode=0x1ECF,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x004F, 0x0309 },
+ shcode=0x4F,
+ specials={ "char", 0x4F, 0x309 },
unicodeslot=0x1ECE,
},
[0x1ECF]={
@@ -54020,8 +55232,8 @@ characters.data={
description="LATIN SMALL LETTER O WITH HOOK ABOVE",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x006F, 0x0309 },
+ shcode=0x6F,
+ specials={ "char", 0x6F, 0x309 },
uccode=0x1ECE,
unicodeslot=0x1ECF,
},
@@ -54033,8 +55245,8 @@ characters.data={
direction="l",
lccode=0x1ED1,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x00D4, 0x0301 },
+ shcode=0x4F,
+ specials={ "char", 0xD4, 0x301 },
unicodeslot=0x1ED0,
},
[0x1ED1]={
@@ -54044,8 +55256,8 @@ characters.data={
description="LATIN SMALL LETTER O WITH CIRCUMFLEX AND ACUTE",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x00F4, 0x0301 },
+ shcode=0x6F,
+ specials={ "char", 0xF4, 0x301 },
uccode=0x1ED0,
unicodeslot=0x1ED1,
},
@@ -54057,8 +55269,8 @@ characters.data={
direction="l",
lccode=0x1ED3,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x00D4, 0x0300 },
+ shcode=0x4F,
+ specials={ "char", 0xD4, 0x300 },
unicodeslot=0x1ED2,
},
[0x1ED3]={
@@ -54068,8 +55280,8 @@ characters.data={
description="LATIN SMALL LETTER O WITH CIRCUMFLEX AND GRAVE",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x00F4, 0x0300 },
+ shcode=0x6F,
+ specials={ "char", 0xF4, 0x300 },
uccode=0x1ED2,
unicodeslot=0x1ED3,
},
@@ -54081,8 +55293,8 @@ characters.data={
direction="l",
lccode=0x1ED5,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x00D4, 0x0309 },
+ shcode=0x4F,
+ specials={ "char", 0xD4, 0x309 },
unicodeslot=0x1ED4,
},
[0x1ED5]={
@@ -54092,8 +55304,8 @@ characters.data={
description="LATIN SMALL LETTER O WITH CIRCUMFLEX AND HOOK ABOVE",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x00F4, 0x0309 },
+ shcode=0x6F,
+ specials={ "char", 0xF4, 0x309 },
uccode=0x1ED4,
unicodeslot=0x1ED5,
},
@@ -54105,8 +55317,8 @@ characters.data={
direction="l",
lccode=0x1ED7,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x00D4, 0x0303 },
+ shcode=0x4F,
+ specials={ "char", 0xD4, 0x303 },
unicodeslot=0x1ED6,
},
[0x1ED7]={
@@ -54116,8 +55328,8 @@ characters.data={
description="LATIN SMALL LETTER O WITH CIRCUMFLEX AND TILDE",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x00F4, 0x0303 },
+ shcode=0x6F,
+ specials={ "char", 0xF4, 0x303 },
uccode=0x1ED6,
unicodeslot=0x1ED7,
},
@@ -54129,8 +55341,8 @@ characters.data={
direction="l",
lccode=0x1ED9,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x1ECC, 0x0302 },
+ shcode=0x4F,
+ specials={ "char", 0x1ECC, 0x302 },
unicodeslot=0x1ED8,
},
[0x1ED9]={
@@ -54140,8 +55352,8 @@ characters.data={
description="LATIN SMALL LETTER O WITH CIRCUMFLEX AND DOT BELOW",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x1ECD, 0x0302 },
+ shcode=0x6F,
+ specials={ "char", 0x1ECD, 0x302 },
uccode=0x1ED8,
unicodeslot=0x1ED9,
},
@@ -54153,8 +55365,8 @@ characters.data={
direction="l",
lccode=0x1EDB,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x01A0, 0x0301 },
+ shcode=0x4F,
+ specials={ "char", 0x1A0, 0x301 },
unicodeslot=0x1EDA,
},
[0x1EDB]={
@@ -54164,8 +55376,8 @@ characters.data={
description="LATIN SMALL LETTER O WITH HORN AND ACUTE",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x01A1, 0x0301 },
+ shcode=0x6F,
+ specials={ "char", 0x1A1, 0x301 },
uccode=0x1EDA,
unicodeslot=0x1EDB,
},
@@ -54177,8 +55389,8 @@ characters.data={
direction="l",
lccode=0x1EDD,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x01A0, 0x0300 },
+ shcode=0x4F,
+ specials={ "char", 0x1A0, 0x300 },
unicodeslot=0x1EDC,
},
[0x1EDD]={
@@ -54188,8 +55400,8 @@ characters.data={
description="LATIN SMALL LETTER O WITH HORN AND GRAVE",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x01A1, 0x0300 },
+ shcode=0x6F,
+ specials={ "char", 0x1A1, 0x300 },
uccode=0x1EDC,
unicodeslot=0x1EDD,
},
@@ -54201,8 +55413,8 @@ characters.data={
direction="l",
lccode=0x1EDF,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x01A0, 0x0309 },
+ shcode=0x4F,
+ specials={ "char", 0x1A0, 0x309 },
unicodeslot=0x1EDE,
},
[0x1EDF]={
@@ -54212,8 +55424,8 @@ characters.data={
description="LATIN SMALL LETTER O WITH HORN AND HOOK ABOVE",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x01A1, 0x0309 },
+ shcode=0x6F,
+ specials={ "char", 0x1A1, 0x309 },
uccode=0x1EDE,
unicodeslot=0x1EDF,
},
@@ -54225,8 +55437,8 @@ characters.data={
direction="l",
lccode=0x1EE1,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x01A0, 0x0303 },
+ shcode=0x4F,
+ specials={ "char", 0x1A0, 0x303 },
unicodeslot=0x1EE0,
},
[0x1EE1]={
@@ -54236,8 +55448,8 @@ characters.data={
description="LATIN SMALL LETTER O WITH HORN AND TILDE",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x01A1, 0x0303 },
+ shcode=0x6F,
+ specials={ "char", 0x1A1, 0x303 },
uccode=0x1EE0,
unicodeslot=0x1EE1,
},
@@ -54249,8 +55461,8 @@ characters.data={
direction="l",
lccode=0x1EE3,
linebreak="al",
- shcode=0x004F,
- specials={ "char", 0x01A0, 0x0323 },
+ shcode=0x4F,
+ specials={ "char", 0x1A0, 0x323 },
unicodeslot=0x1EE2,
},
[0x1EE3]={
@@ -54260,8 +55472,8 @@ characters.data={
description="LATIN SMALL LETTER O WITH HORN AND DOT BELOW",
direction="l",
linebreak="al",
- shcode=0x006F,
- specials={ "char", 0x01A1, 0x0323 },
+ shcode=0x6F,
+ specials={ "char", 0x1A1, 0x323 },
uccode=0x1EE2,
unicodeslot=0x1EE3,
},
@@ -54273,8 +55485,8 @@ characters.data={
direction="l",
lccode=0x1EE5,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x0055, 0x0323 },
+ shcode=0x55,
+ specials={ "char", 0x55, 0x323 },
unicodeslot=0x1EE4,
},
[0x1EE5]={
@@ -54284,8 +55496,8 @@ characters.data={
description="LATIN SMALL LETTER U WITH DOT BELOW",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x0075, 0x0323 },
+ shcode=0x75,
+ specials={ "char", 0x75, 0x323 },
uccode=0x1EE4,
unicodeslot=0x1EE5,
},
@@ -54297,8 +55509,8 @@ characters.data={
direction="l",
lccode=0x1EE7,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x0055, 0x0309 },
+ shcode=0x55,
+ specials={ "char", 0x55, 0x309 },
unicodeslot=0x1EE6,
},
[0x1EE7]={
@@ -54308,8 +55520,8 @@ characters.data={
description="LATIN SMALL LETTER U WITH HOOK ABOVE",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x0075, 0x0309 },
+ shcode=0x75,
+ specials={ "char", 0x75, 0x309 },
uccode=0x1EE6,
unicodeslot=0x1EE7,
},
@@ -54321,8 +55533,8 @@ characters.data={
direction="l",
lccode=0x1EE9,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x01AF, 0x0301 },
+ shcode=0x55,
+ specials={ "char", 0x1AF, 0x301 },
unicodeslot=0x1EE8,
},
[0x1EE9]={
@@ -54332,8 +55544,8 @@ characters.data={
description="LATIN SMALL LETTER U WITH HORN AND ACUTE",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x01B0, 0x0301 },
+ shcode=0x75,
+ specials={ "char", 0x1B0, 0x301 },
uccode=0x1EE8,
unicodeslot=0x1EE9,
},
@@ -54345,8 +55557,8 @@ characters.data={
direction="l",
lccode=0x1EEB,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x01AF, 0x0300 },
+ shcode=0x55,
+ specials={ "char", 0x1AF, 0x300 },
unicodeslot=0x1EEA,
},
[0x1EEB]={
@@ -54356,8 +55568,8 @@ characters.data={
description="LATIN SMALL LETTER U WITH HORN AND GRAVE",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x01B0, 0x0300 },
+ shcode=0x75,
+ specials={ "char", 0x1B0, 0x300 },
uccode=0x1EEA,
unicodeslot=0x1EEB,
},
@@ -54369,8 +55581,8 @@ characters.data={
direction="l",
lccode=0x1EED,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x01AF, 0x0309 },
+ shcode=0x55,
+ specials={ "char", 0x1AF, 0x309 },
unicodeslot=0x1EEC,
},
[0x1EED]={
@@ -54380,8 +55592,8 @@ characters.data={
description="LATIN SMALL LETTER U WITH HORN AND HOOK ABOVE",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x01B0, 0x0309 },
+ shcode=0x75,
+ specials={ "char", 0x1B0, 0x309 },
uccode=0x1EEC,
unicodeslot=0x1EED,
},
@@ -54393,8 +55605,8 @@ characters.data={
direction="l",
lccode=0x1EEF,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x01AF, 0x0303 },
+ shcode=0x55,
+ specials={ "char", 0x1AF, 0x303 },
unicodeslot=0x1EEE,
},
[0x1EEF]={
@@ -54404,8 +55616,8 @@ characters.data={
description="LATIN SMALL LETTER U WITH HORN AND TILDE",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x01B0, 0x0303 },
+ shcode=0x75,
+ specials={ "char", 0x1B0, 0x303 },
uccode=0x1EEE,
unicodeslot=0x1EEF,
},
@@ -54417,8 +55629,8 @@ characters.data={
direction="l",
lccode=0x1EF1,
linebreak="al",
- shcode=0x0055,
- specials={ "char", 0x01AF, 0x0323 },
+ shcode=0x55,
+ specials={ "char", 0x1AF, 0x323 },
unicodeslot=0x1EF0,
},
[0x1EF1]={
@@ -54428,8 +55640,8 @@ characters.data={
description="LATIN SMALL LETTER U WITH HORN AND DOT BELOW",
direction="l",
linebreak="al",
- shcode=0x0075,
- specials={ "char", 0x01B0, 0x0323 },
+ shcode=0x75,
+ specials={ "char", 0x1B0, 0x323 },
uccode=0x1EF0,
unicodeslot=0x1EF1,
},
@@ -54441,8 +55653,8 @@ characters.data={
direction="l",
lccode=0x1EF3,
linebreak="al",
- shcode=0x0059,
- specials={ "char", 0x0059, 0x0300 },
+ shcode=0x59,
+ specials={ "char", 0x59, 0x300 },
unicodeslot=0x1EF2,
},
[0x1EF3]={
@@ -54452,8 +55664,8 @@ characters.data={
description="LATIN SMALL LETTER Y WITH GRAVE",
direction="l",
linebreak="al",
- shcode=0x0079,
- specials={ "char", 0x0079, 0x0300 },
+ shcode=0x79,
+ specials={ "char", 0x79, 0x300 },
uccode=0x1EF2,
unicodeslot=0x1EF3,
},
@@ -54465,8 +55677,8 @@ characters.data={
direction="l",
lccode=0x1EF5,
linebreak="al",
- shcode=0x0059,
- specials={ "char", 0x0059, 0x0323 },
+ shcode=0x59,
+ specials={ "char", 0x59, 0x323 },
unicodeslot=0x1EF4,
},
[0x1EF5]={
@@ -54476,8 +55688,8 @@ characters.data={
description="LATIN SMALL LETTER Y WITH DOT BELOW",
direction="l",
linebreak="al",
- shcode=0x0079,
- specials={ "char", 0x0079, 0x0323 },
+ shcode=0x79,
+ specials={ "char", 0x79, 0x323 },
uccode=0x1EF4,
unicodeslot=0x1EF5,
},
@@ -54489,8 +55701,8 @@ characters.data={
direction="l",
lccode=0x1EF7,
linebreak="al",
- shcode=0x0059,
- specials={ "char", 0x0059, 0x0309 },
+ shcode=0x59,
+ specials={ "char", 0x59, 0x309 },
unicodeslot=0x1EF6,
},
[0x1EF7]={
@@ -54500,8 +55712,8 @@ characters.data={
description="LATIN SMALL LETTER Y WITH HOOK ABOVE",
direction="l",
linebreak="al",
- shcode=0x0079,
- specials={ "char", 0x0079, 0x0309 },
+ shcode=0x79,
+ specials={ "char", 0x79, 0x309 },
uccode=0x1EF6,
unicodeslot=0x1EF7,
},
@@ -54513,8 +55725,8 @@ characters.data={
direction="l",
lccode=0x1EF9,
linebreak="al",
- shcode=0x0059,
- specials={ "char", 0x0059, 0x0303 },
+ shcode=0x59,
+ specials={ "char", 0x59, 0x303 },
unicodeslot=0x1EF8,
},
[0x1EF9]={
@@ -54524,8 +55736,8 @@ characters.data={
description="LATIN SMALL LETTER Y WITH TILDE",
direction="l",
linebreak="al",
- shcode=0x0079,
- specials={ "char", 0x0079, 0x0303 },
+ shcode=0x79,
+ specials={ "char", 0x79, 0x303 },
uccode=0x1EF8,
unicodeslot=0x1EF9,
},
@@ -54577,8 +55789,8 @@ characters.data={
description="GREEK SMALL LETTER ALPHA WITH PSILI",
direction="l",
linebreak="al",
- shcode=0x03B1,
- specials={ "char", 0x03B1, 0x0313 },
+ shcode=0x3B1,
+ specials={ "char", 0x3B1, 0x313 },
uccode=0x1F08,
unicodeslot=0x1F00,
},
@@ -54588,8 +55800,8 @@ characters.data={
description="GREEK SMALL LETTER ALPHA WITH DASIA",
direction="l",
linebreak="al",
- shcode=0x03B1,
- specials={ "char", 0x03B1, 0x0314 },
+ shcode=0x3B1,
+ specials={ "char", 0x3B1, 0x314 },
uccode=0x1F09,
unicodeslot=0x1F01,
},
@@ -54599,8 +55811,8 @@ characters.data={
description="GREEK SMALL LETTER ALPHA WITH PSILI AND VARIA",
direction="l",
linebreak="al",
- shcode=0x03B1,
- specials={ "char", 0x1F00, 0x0300 },
+ shcode=0x3B1,
+ specials={ "char", 0x1F00, 0x300 },
uccode=0x1F0A,
unicodeslot=0x1F02,
},
@@ -54610,8 +55822,8 @@ characters.data={
description="GREEK SMALL LETTER ALPHA WITH DASIA AND VARIA",
direction="l",
linebreak="al",
- shcode=0x03B1,
- specials={ "char", 0x1F01, 0x0300 },
+ shcode=0x3B1,
+ specials={ "char", 0x1F01, 0x300 },
uccode=0x1F0B,
unicodeslot=0x1F03,
},
@@ -54621,8 +55833,8 @@ characters.data={
description="GREEK SMALL LETTER ALPHA WITH PSILI AND OXIA",
direction="l",
linebreak="al",
- shcode=0x03B1,
- specials={ "char", 0x1F00, 0x0301 },
+ shcode=0x3B1,
+ specials={ "char", 0x1F00, 0x301 },
uccode=0x1F0C,
unicodeslot=0x1F04,
},
@@ -54632,8 +55844,8 @@ characters.data={
description="GREEK SMALL LETTER ALPHA WITH DASIA AND OXIA",
direction="l",
linebreak="al",
- shcode=0x03B1,
- specials={ "char", 0x1F01, 0x0301 },
+ shcode=0x3B1,
+ specials={ "char", 0x1F01, 0x301 },
uccode=0x1F0D,
unicodeslot=0x1F05,
},
@@ -54643,8 +55855,8 @@ characters.data={
description="GREEK SMALL LETTER ALPHA WITH PSILI AND PERISPOMENI",
direction="l",
linebreak="al",
- shcode=0x03B1,
- specials={ "char", 0x1F00, 0x0342 },
+ shcode=0x3B1,
+ specials={ "char", 0x1F00, 0x342 },
uccode=0x1F0E,
unicodeslot=0x1F06,
},
@@ -54654,8 +55866,8 @@ characters.data={
description="GREEK SMALL LETTER ALPHA WITH DASIA AND PERISPOMENI",
direction="l",
linebreak="al",
- shcode=0x03B1,
- specials={ "char", 0x1F01, 0x0342 },
+ shcode=0x3B1,
+ specials={ "char", 0x1F01, 0x342 },
uccode=0x1F0F,
unicodeslot=0x1F07,
},
@@ -54666,8 +55878,8 @@ characters.data={
direction="l",
lccode=0x1F00,
linebreak="al",
- shcode=0x0391,
- specials={ "char", 0x0391, 0x0313 },
+ shcode=0x391,
+ specials={ "char", 0x391, 0x313 },
unicodeslot=0x1F08,
},
[0x1F09]={
@@ -54677,8 +55889,8 @@ characters.data={
direction="l",
lccode=0x1F01,
linebreak="al",
- shcode=0x0391,
- specials={ "char", 0x0391, 0x0314 },
+ shcode=0x391,
+ specials={ "char", 0x391, 0x314 },
unicodeslot=0x1F09,
},
[0x1F0A]={
@@ -54688,8 +55900,8 @@ characters.data={
direction="l",
lccode=0x1F02,
linebreak="al",
- shcode=0x0391,
- specials={ "char", 0x1F08, 0x0300 },
+ shcode=0x391,
+ specials={ "char", 0x1F08, 0x300 },
unicodeslot=0x1F0A,
},
[0x1F0B]={
@@ -54699,8 +55911,8 @@ characters.data={
direction="l",
lccode=0x1F03,
linebreak="al",
- shcode=0x0391,
- specials={ "char", 0x1F09, 0x0300 },
+ shcode=0x391,
+ specials={ "char", 0x1F09, 0x300 },
unicodeslot=0x1F0B,
},
[0x1F0C]={
@@ -54710,8 +55922,8 @@ characters.data={
direction="l",
lccode=0x1F04,
linebreak="al",
- shcode=0x0391,
- specials={ "char", 0x1F08, 0x0301 },
+ shcode=0x391,
+ specials={ "char", 0x1F08, 0x301 },
unicodeslot=0x1F0C,
},
[0x1F0D]={
@@ -54721,8 +55933,8 @@ characters.data={
direction="l",
lccode=0x1F05,
linebreak="al",
- shcode=0x0391,
- specials={ "char", 0x1F09, 0x0301 },
+ shcode=0x391,
+ specials={ "char", 0x1F09, 0x301 },
unicodeslot=0x1F0D,
},
[0x1F0E]={
@@ -54732,8 +55944,8 @@ characters.data={
direction="l",
lccode=0x1F06,
linebreak="al",
- shcode=0x0391,
- specials={ "char", 0x1F08, 0x0342 },
+ shcode=0x391,
+ specials={ "char", 0x1F08, 0x342 },
unicodeslot=0x1F0E,
},
[0x1F0F]={
@@ -54743,8 +55955,8 @@ characters.data={
direction="l",
lccode=0x1F07,
linebreak="al",
- shcode=0x0391,
- specials={ "char", 0x1F09, 0x0342 },
+ shcode=0x391,
+ specials={ "char", 0x1F09, 0x342 },
unicodeslot=0x1F0F,
},
[0x1F10]={
@@ -54753,8 +55965,8 @@ characters.data={
description="GREEK SMALL LETTER EPSILON WITH PSILI",
direction="l",
linebreak="al",
- shcode=0x03B5,
- specials={ "char", 0x03B5, 0x0313 },
+ shcode=0x3B5,
+ specials={ "char", 0x3B5, 0x313 },
uccode=0x1F18,
unicodeslot=0x1F10,
},
@@ -54764,8 +55976,8 @@ characters.data={
description="GREEK SMALL LETTER EPSILON WITH DASIA",
direction="l",
linebreak="al",
- shcode=0x03B5,
- specials={ "char", 0x03B5, 0x0314 },
+ shcode=0x3B5,
+ specials={ "char", 0x3B5, 0x314 },
uccode=0x1F19,
unicodeslot=0x1F11,
},
@@ -54775,8 +55987,8 @@ characters.data={
description="GREEK SMALL LETTER EPSILON WITH PSILI AND VARIA",
direction="l",
linebreak="al",
- shcode=0x03B5,
- specials={ "char", 0x1F10, 0x0300 },
+ shcode=0x3B5,
+ specials={ "char", 0x1F10, 0x300 },
uccode=0x1F1A,
unicodeslot=0x1F12,
},
@@ -54786,8 +55998,8 @@ characters.data={
description="GREEK SMALL LETTER EPSILON WITH DASIA AND VARIA",
direction="l",
linebreak="al",
- shcode=0x03B5,
- specials={ "char", 0x1F11, 0x0300 },
+ shcode=0x3B5,
+ specials={ "char", 0x1F11, 0x300 },
uccode=0x1F1B,
unicodeslot=0x1F13,
},
@@ -54797,8 +56009,8 @@ characters.data={
description="GREEK SMALL LETTER EPSILON WITH PSILI AND OXIA",
direction="l",
linebreak="al",
- shcode=0x03B5,
- specials={ "char", 0x1F10, 0x0301 },
+ shcode=0x3B5,
+ specials={ "char", 0x1F10, 0x301 },
uccode=0x1F1C,
unicodeslot=0x1F14,
},
@@ -54808,8 +56020,8 @@ characters.data={
description="GREEK SMALL LETTER EPSILON WITH DASIA AND OXIA",
direction="l",
linebreak="al",
- shcode=0x03B5,
- specials={ "char", 0x1F11, 0x0301 },
+ shcode=0x3B5,
+ specials={ "char", 0x1F11, 0x301 },
uccode=0x1F1D,
unicodeslot=0x1F15,
},
@@ -54820,8 +56032,8 @@ characters.data={
direction="l",
lccode=0x1F10,
linebreak="al",
- shcode=0x0395,
- specials={ "char", 0x0395, 0x0313 },
+ shcode=0x395,
+ specials={ "char", 0x395, 0x313 },
unicodeslot=0x1F18,
},
[0x1F19]={
@@ -54831,8 +56043,8 @@ characters.data={
direction="l",
lccode=0x1F11,
linebreak="al",
- shcode=0x0395,
- specials={ "char", 0x0395, 0x0314 },
+ shcode=0x395,
+ specials={ "char", 0x395, 0x314 },
unicodeslot=0x1F19,
},
[0x1F1A]={
@@ -54842,8 +56054,8 @@ characters.data={
direction="l",
lccode=0x1F12,
linebreak="al",
- shcode=0x0395,
- specials={ "char", 0x1F18, 0x0300 },
+ shcode=0x395,
+ specials={ "char", 0x1F18, 0x300 },
unicodeslot=0x1F1A,
},
[0x1F1B]={
@@ -54853,8 +56065,8 @@ characters.data={
direction="l",
lccode=0x1F13,
linebreak="al",
- shcode=0x0395,
- specials={ "char", 0x1F19, 0x0300 },
+ shcode=0x395,
+ specials={ "char", 0x1F19, 0x300 },
unicodeslot=0x1F1B,
},
[0x1F1C]={
@@ -54864,8 +56076,8 @@ characters.data={
direction="l",
lccode=0x1F14,
linebreak="al",
- shcode=0x0395,
- specials={ "char", 0x1F18, 0x0301 },
+ shcode=0x395,
+ specials={ "char", 0x1F18, 0x301 },
unicodeslot=0x1F1C,
},
[0x1F1D]={
@@ -54875,8 +56087,8 @@ characters.data={
direction="l",
lccode=0x1F15,
linebreak="al",
- shcode=0x0395,
- specials={ "char", 0x1F19, 0x0301 },
+ shcode=0x395,
+ specials={ "char", 0x1F19, 0x301 },
unicodeslot=0x1F1D,
},
[0x1F20]={
@@ -54885,8 +56097,8 @@ characters.data={
description="GREEK SMALL LETTER ETA WITH PSILI",
direction="l",
linebreak="al",
- shcode=0x03B7,
- specials={ "char", 0x03B7, 0x0313 },
+ shcode=0x3B7,
+ specials={ "char", 0x3B7, 0x313 },
uccode=0x1F28,
unicodeslot=0x1F20,
},
@@ -54896,8 +56108,8 @@ characters.data={
description="GREEK SMALL LETTER ETA WITH DASIA",
direction="l",
linebreak="al",
- shcode=0x03B7,
- specials={ "char", 0x03B7, 0x0314 },
+ shcode=0x3B7,
+ specials={ "char", 0x3B7, 0x314 },
uccode=0x1F29,
unicodeslot=0x1F21,
},
@@ -54907,8 +56119,8 @@ characters.data={
description="GREEK SMALL LETTER ETA WITH PSILI AND VARIA",
direction="l",
linebreak="al",
- shcode=0x03B7,
- specials={ "char", 0x1F20, 0x0300 },
+ shcode=0x3B7,
+ specials={ "char", 0x1F20, 0x300 },
uccode=0x1F2A,
unicodeslot=0x1F22,
},
@@ -54918,8 +56130,8 @@ characters.data={
description="GREEK SMALL LETTER ETA WITH DASIA AND VARIA",
direction="l",
linebreak="al",
- shcode=0x03B7,
- specials={ "char", 0x1F21, 0x0300 },
+ shcode=0x3B7,
+ specials={ "char", 0x1F21, 0x300 },
uccode=0x1F2B,
unicodeslot=0x1F23,
},
@@ -54929,8 +56141,8 @@ characters.data={
description="GREEK SMALL LETTER ETA WITH PSILI AND OXIA",
direction="l",
linebreak="al",
- shcode=0x03B7,
- specials={ "char", 0x1F20, 0x0301 },
+ shcode=0x3B7,
+ specials={ "char", 0x1F20, 0x301 },
uccode=0x1F2C,
unicodeslot=0x1F24,
},
@@ -54940,8 +56152,8 @@ characters.data={
description="GREEK SMALL LETTER ETA WITH DASIA AND OXIA",
direction="l",
linebreak="al",
- shcode=0x03B7,
- specials={ "char", 0x1F21, 0x0301 },
+ shcode=0x3B7,
+ specials={ "char", 0x1F21, 0x301 },
uccode=0x1F2D,
unicodeslot=0x1F25,
},
@@ -54951,8 +56163,8 @@ characters.data={
description="GREEK SMALL LETTER ETA WITH PSILI AND PERISPOMENI",
direction="l",
linebreak="al",
- shcode=0x03B7,
- specials={ "char", 0x1F20, 0x0342 },
+ shcode=0x3B7,
+ specials={ "char", 0x1F20, 0x342 },
uccode=0x1F2E,
unicodeslot=0x1F26,
},
@@ -54962,8 +56174,8 @@ characters.data={
description="GREEK SMALL LETTER ETA WITH DASIA AND PERISPOMENI",
direction="l",
linebreak="al",
- shcode=0x03B7,
- specials={ "char", 0x1F21, 0x0342 },
+ shcode=0x3B7,
+ specials={ "char", 0x1F21, 0x342 },
uccode=0x1F2F,
unicodeslot=0x1F27,
},
@@ -54974,8 +56186,8 @@ characters.data={
direction="l",
lccode=0x1F20,
linebreak="al",
- shcode=0x0397,
- specials={ "char", 0x0397, 0x0313 },
+ shcode=0x397,
+ specials={ "char", 0x397, 0x313 },
unicodeslot=0x1F28,
},
[0x1F29]={
@@ -54985,8 +56197,8 @@ characters.data={
direction="l",
lccode=0x1F21,
linebreak="al",
- shcode=0x0397,
- specials={ "char", 0x0397, 0x0314 },
+ shcode=0x397,
+ specials={ "char", 0x397, 0x314 },
unicodeslot=0x1F29,
},
[0x1F2A]={
@@ -54996,8 +56208,8 @@ characters.data={
direction="l",
lccode=0x1F22,
linebreak="al",
- shcode=0x0397,
- specials={ "char", 0x1F28, 0x0300 },
+ shcode=0x397,
+ specials={ "char", 0x1F28, 0x300 },
unicodeslot=0x1F2A,
},
[0x1F2B]={
@@ -55007,8 +56219,8 @@ characters.data={
direction="l",
lccode=0x1F23,
linebreak="al",
- shcode=0x0397,
- specials={ "char", 0x1F29, 0x0300 },
+ shcode=0x397,
+ specials={ "char", 0x1F29, 0x300 },
unicodeslot=0x1F2B,
},
[0x1F2C]={
@@ -55018,8 +56230,8 @@ characters.data={
direction="l",
lccode=0x1F24,
linebreak="al",
- shcode=0x0397,
- specials={ "char", 0x1F28, 0x0301 },
+ shcode=0x397,
+ specials={ "char", 0x1F28, 0x301 },
unicodeslot=0x1F2C,
},
[0x1F2D]={
@@ -55029,8 +56241,8 @@ characters.data={
direction="l",
lccode=0x1F25,
linebreak="al",
- shcode=0x0397,
- specials={ "char", 0x1F29, 0x0301 },
+ shcode=0x397,
+ specials={ "char", 0x1F29, 0x301 },
unicodeslot=0x1F2D,
},
[0x1F2E]={
@@ -55040,8 +56252,8 @@ characters.data={
direction="l",
lccode=0x1F26,
linebreak="al",
- shcode=0x0397,
- specials={ "char", 0x1F28, 0x0342 },
+ shcode=0x397,
+ specials={ "char", 0x1F28, 0x342 },
unicodeslot=0x1F2E,
},
[0x1F2F]={
@@ -55051,8 +56263,8 @@ characters.data={
direction="l",
lccode=0x1F27,
linebreak="al",
- shcode=0x0397,
- specials={ "char", 0x1F29, 0x0342 },
+ shcode=0x397,
+ specials={ "char", 0x1F29, 0x342 },
unicodeslot=0x1F2F,
},
[0x1F30]={
@@ -55061,8 +56273,8 @@ characters.data={
description="GREEK SMALL LETTER IOTA WITH PSILI",
direction="l",
linebreak="al",
- shcode=0x03B9,
- specials={ "char", 0x03B9, 0x0313 },
+ shcode=0x3B9,
+ specials={ "char", 0x3B9, 0x313 },
uccode=0x1F38,
unicodeslot=0x1F30,
},
@@ -55072,8 +56284,8 @@ characters.data={
description="GREEK SMALL LETTER IOTA WITH DASIA",
direction="l",
linebreak="al",
- shcode=0x03B9,
- specials={ "char", 0x03B9, 0x0314 },
+ shcode=0x3B9,
+ specials={ "char", 0x3B9, 0x314 },
uccode=0x1F39,
unicodeslot=0x1F31,
},
@@ -55083,8 +56295,8 @@ characters.data={
description="GREEK SMALL LETTER IOTA WITH PSILI AND VARIA",
direction="l",
linebreak="al",
- shcode=0x03B9,
- specials={ "char", 0x1F30, 0x0300 },
+ shcode=0x3B9,
+ specials={ "char", 0x1F30, 0x300 },
uccode=0x1F3A,
unicodeslot=0x1F32,
},
@@ -55094,8 +56306,8 @@ characters.data={
description="GREEK SMALL LETTER IOTA WITH DASIA AND VARIA",
direction="l",
linebreak="al",
- shcode=0x03B9,
- specials={ "char", 0x1F31, 0x0300 },
+ shcode=0x3B9,
+ specials={ "char", 0x1F31, 0x300 },
uccode=0x1F3B,
unicodeslot=0x1F33,
},
@@ -55105,8 +56317,8 @@ characters.data={
description="GREEK SMALL LETTER IOTA WITH PSILI AND OXIA",
direction="l",
linebreak="al",
- shcode=0x03B9,
- specials={ "char", 0x1F30, 0x0301 },
+ shcode=0x3B9,
+ specials={ "char", 0x1F30, 0x301 },
uccode=0x1F3C,
unicodeslot=0x1F34,
},
@@ -55116,8 +56328,8 @@ characters.data={
description="GREEK SMALL LETTER IOTA WITH DASIA AND OXIA",
direction="l",
linebreak="al",
- shcode=0x03B9,
- specials={ "char", 0x1F31, 0x0301 },
+ shcode=0x3B9,
+ specials={ "char", 0x1F31, 0x301 },
uccode=0x1F3D,
unicodeslot=0x1F35,
},
@@ -55127,8 +56339,8 @@ characters.data={
description="GREEK SMALL LETTER IOTA WITH PSILI AND PERISPOMENI",
direction="l",
linebreak="al",
- shcode=0x03B9,
- specials={ "char", 0x1F30, 0x0342 },
+ shcode=0x3B9,
+ specials={ "char", 0x1F30, 0x342 },
uccode=0x1F3E,
unicodeslot=0x1F36,
},
@@ -55138,8 +56350,8 @@ characters.data={
description="GREEK SMALL LETTER IOTA WITH DASIA AND PERISPOMENI",
direction="l",
linebreak="al",
- shcode=0x03B9,
- specials={ "char", 0x1F31, 0x0342 },
+ shcode=0x3B9,
+ specials={ "char", 0x1F31, 0x342 },
uccode=0x1F3F,
unicodeslot=0x1F37,
},
@@ -55150,8 +56362,8 @@ characters.data={
direction="l",
lccode=0x1F30,
linebreak="al",
- shcode=0x0399,
- specials={ "char", 0x0399, 0x0313 },
+ shcode=0x399,
+ specials={ "char", 0x399, 0x313 },
unicodeslot=0x1F38,
},
[0x1F39]={
@@ -55161,8 +56373,8 @@ characters.data={
direction="l",
lccode=0x1F31,
linebreak="al",
- shcode=0x0399,
- specials={ "char", 0x0399, 0x0314 },
+ shcode=0x399,
+ specials={ "char", 0x399, 0x314 },
unicodeslot=0x1F39,
},
[0x1F3A]={
@@ -55172,8 +56384,8 @@ characters.data={
direction="l",
lccode=0x1F32,
linebreak="al",
- shcode=0x0399,
- specials={ "char", 0x1F38, 0x0300 },
+ shcode=0x399,
+ specials={ "char", 0x1F38, 0x300 },
unicodeslot=0x1F3A,
},
[0x1F3B]={
@@ -55183,8 +56395,8 @@ characters.data={
direction="l",
lccode=0x1F33,
linebreak="al",
- shcode=0x0399,
- specials={ "char", 0x1F39, 0x0300 },
+ shcode=0x399,
+ specials={ "char", 0x1F39, 0x300 },
unicodeslot=0x1F3B,
},
[0x1F3C]={
@@ -55194,8 +56406,8 @@ characters.data={
direction="l",
lccode=0x1F34,
linebreak="al",
- shcode=0x0399,
- specials={ "char", 0x1F38, 0x0301 },
+ shcode=0x399,
+ specials={ "char", 0x1F38, 0x301 },
unicodeslot=0x1F3C,
},
[0x1F3D]={
@@ -55205,8 +56417,8 @@ characters.data={
direction="l",
lccode=0x1F35,
linebreak="al",
- shcode=0x0399,
- specials={ "char", 0x1F39, 0x0301 },
+ shcode=0x399,
+ specials={ "char", 0x1F39, 0x301 },
unicodeslot=0x1F3D,
},
[0x1F3E]={
@@ -55216,8 +56428,8 @@ characters.data={
direction="l",
lccode=0x1F36,
linebreak="al",
- shcode=0x0399,
- specials={ "char", 0x1F38, 0x0342 },
+ shcode=0x399,
+ specials={ "char", 0x1F38, 0x342 },
unicodeslot=0x1F3E,
},
[0x1F3F]={
@@ -55227,8 +56439,8 @@ characters.data={
direction="l",
lccode=0x1F37,
linebreak="al",
- shcode=0x0399,
- specials={ "char", 0x1F39, 0x0342 },
+ shcode=0x399,
+ specials={ "char", 0x1F39, 0x342 },
unicodeslot=0x1F3F,
},
[0x1F40]={
@@ -55237,8 +56449,8 @@ characters.data={
description="GREEK SMALL LETTER OMICRON WITH PSILI",
direction="l",
linebreak="al",
- shcode=0x03BF,
- specials={ "char", 0x03BF, 0x0313 },
+ shcode=0x3BF,
+ specials={ "char", 0x3BF, 0x313 },
uccode=0x1F48,
unicodeslot=0x1F40,
},
@@ -55248,8 +56460,8 @@ characters.data={
description="GREEK SMALL LETTER OMICRON WITH DASIA",
direction="l",
linebreak="al",
- shcode=0x03BF,
- specials={ "char", 0x03BF, 0x0314 },
+ shcode=0x3BF,
+ specials={ "char", 0x3BF, 0x314 },
uccode=0x1F49,
unicodeslot=0x1F41,
},
@@ -55259,8 +56471,8 @@ characters.data={
description="GREEK SMALL LETTER OMICRON WITH PSILI AND VARIA",
direction="l",
linebreak="al",
- shcode=0x03BF,
- specials={ "char", 0x1F40, 0x0300 },
+ shcode=0x3BF,
+ specials={ "char", 0x1F40, 0x300 },
uccode=0x1F4A,
unicodeslot=0x1F42,
},
@@ -55270,8 +56482,8 @@ characters.data={
description="GREEK SMALL LETTER OMICRON WITH DASIA AND VARIA",
direction="l",
linebreak="al",
- shcode=0x03BF,
- specials={ "char", 0x1F41, 0x0300 },
+ shcode=0x3BF,
+ specials={ "char", 0x1F41, 0x300 },
uccode=0x1F4B,
unicodeslot=0x1F43,
},
@@ -55281,8 +56493,8 @@ characters.data={
description="GREEK SMALL LETTER OMICRON WITH PSILI AND OXIA",
direction="l",
linebreak="al",
- shcode=0x03BF,
- specials={ "char", 0x1F40, 0x0301 },
+ shcode=0x3BF,
+ specials={ "char", 0x1F40, 0x301 },
uccode=0x1F4C,
unicodeslot=0x1F44,
},
@@ -55292,8 +56504,8 @@ characters.data={
description="GREEK SMALL LETTER OMICRON WITH DASIA AND OXIA",
direction="l",
linebreak="al",
- shcode=0x03BF,
- specials={ "char", 0x1F41, 0x0301 },
+ shcode=0x3BF,
+ specials={ "char", 0x1F41, 0x301 },
uccode=0x1F4D,
unicodeslot=0x1F45,
},
@@ -55304,8 +56516,8 @@ characters.data={
direction="l",
lccode=0x1F40,
linebreak="al",
- shcode=0x039F,
- specials={ "char", 0x039F, 0x0313 },
+ shcode=0x39F,
+ specials={ "char", 0x39F, 0x313 },
unicodeslot=0x1F48,
},
[0x1F49]={
@@ -55315,8 +56527,8 @@ characters.data={
direction="l",
lccode=0x1F41,
linebreak="al",
- shcode=0x039F,
- specials={ "char", 0x039F, 0x0314 },
+ shcode=0x39F,
+ specials={ "char", 0x39F, 0x314 },
unicodeslot=0x1F49,
},
[0x1F4A]={
@@ -55326,8 +56538,8 @@ characters.data={
direction="l",
lccode=0x1F42,
linebreak="al",
- shcode=0x039F,
- specials={ "char", 0x1F48, 0x0300 },
+ shcode=0x39F,
+ specials={ "char", 0x1F48, 0x300 },
unicodeslot=0x1F4A,
},
[0x1F4B]={
@@ -55337,8 +56549,8 @@ characters.data={
direction="l",
lccode=0x1F43,
linebreak="al",
- shcode=0x039F,
- specials={ "char", 0x1F49, 0x0300 },
+ shcode=0x39F,
+ specials={ "char", 0x1F49, 0x300 },
unicodeslot=0x1F4B,
},
[0x1F4C]={
@@ -55348,8 +56560,8 @@ characters.data={
direction="l",
lccode=0x1F44,
linebreak="al",
- shcode=0x039F,
- specials={ "char", 0x1F48, 0x0301 },
+ shcode=0x39F,
+ specials={ "char", 0x1F48, 0x301 },
unicodeslot=0x1F4C,
},
[0x1F4D]={
@@ -55359,8 +56571,8 @@ characters.data={
direction="l",
lccode=0x1F45,
linebreak="al",
- shcode=0x039F,
- specials={ "char", 0x1F49, 0x0301 },
+ shcode=0x39F,
+ specials={ "char", 0x1F49, 0x301 },
unicodeslot=0x1F4D,
},
[0x1F50]={
@@ -55369,8 +56581,8 @@ characters.data={
description="GREEK SMALL LETTER UPSILON WITH PSILI",
direction="l",
linebreak="al",
- shcode=0x03C5,
- specials={ "char", 0x03C5, 0x0313 },
+ shcode=0x3C5,
+ specials={ "char", 0x3C5, 0x313 },
unicodeslot=0x1F50,
},
[0x1F51]={
@@ -55379,8 +56591,8 @@ characters.data={
description="GREEK SMALL LETTER UPSILON WITH DASIA",
direction="l",
linebreak="al",
- shcode=0x03C5,
- specials={ "char", 0x03C5, 0x0314 },
+ shcode=0x3C5,
+ specials={ "char", 0x3C5, 0x314 },
uccode=0x1F59,
unicodeslot=0x1F51,
},
@@ -55390,8 +56602,8 @@ characters.data={
description="GREEK SMALL LETTER UPSILON WITH PSILI AND VARIA",
direction="l",
linebreak="al",
- shcode=0x03C5,
- specials={ "char", 0x1F50, 0x0300 },
+ shcode=0x3C5,
+ specials={ "char", 0x1F50, 0x300 },
unicodeslot=0x1F52,
},
[0x1F53]={
@@ -55400,8 +56612,8 @@ characters.data={
description="GREEK SMALL LETTER UPSILON WITH DASIA AND VARIA",
direction="l",
linebreak="al",
- shcode=0x03C5,
- specials={ "char", 0x1F51, 0x0300 },
+ shcode=0x3C5,
+ specials={ "char", 0x1F51, 0x300 },
uccode=0x1F5B,
unicodeslot=0x1F53,
},
@@ -55411,8 +56623,8 @@ characters.data={
description="GREEK SMALL LETTER UPSILON WITH PSILI AND OXIA",
direction="l",
linebreak="al",
- shcode=0x03C5,
- specials={ "char", 0x1F50, 0x0301 },
+ shcode=0x3C5,
+ specials={ "char", 0x1F50, 0x301 },
unicodeslot=0x1F54,
},
[0x1F55]={
@@ -55421,8 +56633,8 @@ characters.data={
description="GREEK SMALL LETTER UPSILON WITH DASIA AND OXIA",
direction="l",
linebreak="al",
- shcode=0x03C5,
- specials={ "char", 0x1F51, 0x0301 },
+ shcode=0x3C5,
+ specials={ "char", 0x1F51, 0x301 },
uccode=0x1F5D,
unicodeslot=0x1F55,
},
@@ -55432,8 +56644,8 @@ characters.data={
description="GREEK SMALL LETTER UPSILON WITH PSILI AND PERISPOMENI",
direction="l",
linebreak="al",
- shcode=0x03C5,
- specials={ "char", 0x1F50, 0x0342 },
+ shcode=0x3C5,
+ specials={ "char", 0x1F50, 0x342 },
unicodeslot=0x1F56,
},
[0x1F57]={
@@ -55442,8 +56654,8 @@ characters.data={
description="GREEK SMALL LETTER UPSILON WITH DASIA AND PERISPOMENI",
direction="l",
linebreak="al",
- shcode=0x03C5,
- specials={ "char", 0x1F51, 0x0342 },
+ shcode=0x3C5,
+ specials={ "char", 0x1F51, 0x342 },
uccode=0x1F5F,
unicodeslot=0x1F57,
},
@@ -55454,8 +56666,8 @@ characters.data={
direction="l",
lccode=0x1F51,
linebreak="al",
- shcode=0x03A5,
- specials={ "char", 0x03A5, 0x0314 },
+ shcode=0x3A5,
+ specials={ "char", 0x3A5, 0x314 },
unicodeslot=0x1F59,
},
[0x1F5B]={
@@ -55465,8 +56677,8 @@ characters.data={
direction="l",
lccode=0x1F53,
linebreak="al",
- shcode=0x03A5,
- specials={ "char", 0x1F59, 0x0300 },
+ shcode=0x3A5,
+ specials={ "char", 0x1F59, 0x300 },
unicodeslot=0x1F5B,
},
[0x1F5D]={
@@ -55476,8 +56688,8 @@ characters.data={
direction="l",
lccode=0x1F55,
linebreak="al",
- shcode=0x03A5,
- specials={ "char", 0x1F59, 0x0301 },
+ shcode=0x3A5,
+ specials={ "char", 0x1F59, 0x301 },
unicodeslot=0x1F5D,
},
[0x1F5F]={
@@ -55487,8 +56699,8 @@ characters.data={
direction="l",
lccode=0x1F57,
linebreak="al",
- shcode=0x03A5,
- specials={ "char", 0x1F59, 0x0342 },
+ shcode=0x3A5,
+ specials={ "char", 0x1F59, 0x342 },
unicodeslot=0x1F5F,
},
[0x1F60]={
@@ -55497,8 +56709,8 @@ characters.data={
description="GREEK SMALL LETTER OMEGA WITH PSILI",
direction="l",
linebreak="al",
- shcode=0x03C9,
- specials={ "char", 0x03C9, 0x0313 },
+ shcode=0x3C9,
+ specials={ "char", 0x3C9, 0x313 },
uccode=0x1F68,
unicodeslot=0x1F60,
},
@@ -55508,8 +56720,8 @@ characters.data={
description="GREEK SMALL LETTER OMEGA WITH DASIA",
direction="l",
linebreak="al",
- shcode=0x03C9,
- specials={ "char", 0x03C9, 0x0314 },
+ shcode=0x3C9,
+ specials={ "char", 0x3C9, 0x314 },
uccode=0x1F69,
unicodeslot=0x1F61,
},
@@ -55519,8 +56731,8 @@ characters.data={
description="GREEK SMALL LETTER OMEGA WITH PSILI AND VARIA",
direction="l",
linebreak="al",
- shcode=0x03C9,
- specials={ "char", 0x1F60, 0x0300 },
+ shcode=0x3C9,
+ specials={ "char", 0x1F60, 0x300 },
uccode=0x1F6A,
unicodeslot=0x1F62,
},
@@ -55530,8 +56742,8 @@ characters.data={
description="GREEK SMALL LETTER OMEGA WITH DASIA AND VARIA",
direction="l",
linebreak="al",
- shcode=0x03C9,
- specials={ "char", 0x1F61, 0x0300 },
+ shcode=0x3C9,
+ specials={ "char", 0x1F61, 0x300 },
uccode=0x1F6B,
unicodeslot=0x1F63,
},
@@ -55541,8 +56753,8 @@ characters.data={
description="GREEK SMALL LETTER OMEGA WITH PSILI AND OXIA",
direction="l",
linebreak="al",
- shcode=0x03C9,
- specials={ "char", 0x1F60, 0x0301 },
+ shcode=0x3C9,
+ specials={ "char", 0x1F60, 0x301 },
uccode=0x1F6C,
unicodeslot=0x1F64,
},
@@ -55552,8 +56764,8 @@ characters.data={
description="GREEK SMALL LETTER OMEGA WITH DASIA AND OXIA",
direction="l",
linebreak="al",
- shcode=0x03C9,
- specials={ "char", 0x1F61, 0x0301 },
+ shcode=0x3C9,
+ specials={ "char", 0x1F61, 0x301 },
uccode=0x1F6D,
unicodeslot=0x1F65,
},
@@ -55563,8 +56775,8 @@ characters.data={
description="GREEK SMALL LETTER OMEGA WITH PSILI AND PERISPOMENI",
direction="l",
linebreak="al",
- shcode=0x03C9,
- specials={ "char", 0x1F60, 0x0342 },
+ shcode=0x3C9,
+ specials={ "char", 0x1F60, 0x342 },
uccode=0x1F6E,
unicodeslot=0x1F66,
},
@@ -55574,8 +56786,8 @@ characters.data={
description="GREEK SMALL LETTER OMEGA WITH DASIA AND PERISPOMENI",
direction="l",
linebreak="al",
- shcode=0x03C9,
- specials={ "char", 0x1F61, 0x0342 },
+ shcode=0x3C9,
+ specials={ "char", 0x1F61, 0x342 },
uccode=0x1F6F,
unicodeslot=0x1F67,
},
@@ -55586,8 +56798,8 @@ characters.data={
direction="l",
lccode=0x1F60,
linebreak="al",
- shcode=0x03A9,
- specials={ "char", 0x03A9, 0x0313 },
+ shcode=0x3A9,
+ specials={ "char", 0x3A9, 0x313 },
unicodeslot=0x1F68,
},
[0x1F69]={
@@ -55597,8 +56809,8 @@ characters.data={
direction="l",
lccode=0x1F61,
linebreak="al",
- shcode=0x03A9,
- specials={ "char", 0x03A9, 0x0314 },
+ shcode=0x3A9,
+ specials={ "char", 0x3A9, 0x314 },
unicodeslot=0x1F69,
},
[0x1F6A]={
@@ -55608,8 +56820,8 @@ characters.data={
direction="l",
lccode=0x1F62,
linebreak="al",
- shcode=0x03A9,
- specials={ "char", 0x1F68, 0x0300 },
+ shcode=0x3A9,
+ specials={ "char", 0x1F68, 0x300 },
unicodeslot=0x1F6A,
},
[0x1F6B]={
@@ -55619,8 +56831,8 @@ characters.data={
direction="l",
lccode=0x1F63,
linebreak="al",
- shcode=0x03A9,
- specials={ "char", 0x1F69, 0x0300 },
+ shcode=0x3A9,
+ specials={ "char", 0x1F69, 0x300 },
unicodeslot=0x1F6B,
},
[0x1F6C]={
@@ -55630,8 +56842,8 @@ characters.data={
direction="l",
lccode=0x1F64,
linebreak="al",
- shcode=0x03A9,
- specials={ "char", 0x1F68, 0x0301 },
+ shcode=0x3A9,
+ specials={ "char", 0x1F68, 0x301 },
unicodeslot=0x1F6C,
},
[0x1F6D]={
@@ -55641,8 +56853,8 @@ characters.data={
direction="l",
lccode=0x1F65,
linebreak="al",
- shcode=0x03A9,
- specials={ "char", 0x1F69, 0x0301 },
+ shcode=0x3A9,
+ specials={ "char", 0x1F69, 0x301 },
unicodeslot=0x1F6D,
},
[0x1F6E]={
@@ -55652,8 +56864,8 @@ characters.data={
direction="l",
lccode=0x1F66,
linebreak="al",
- shcode=0x03A9,
- specials={ "char", 0x1F68, 0x0342 },
+ shcode=0x3A9,
+ specials={ "char", 0x1F68, 0x342 },
unicodeslot=0x1F6E,
},
[0x1F6F]={
@@ -55663,8 +56875,8 @@ characters.data={
direction="l",
lccode=0x1F67,
linebreak="al",
- shcode=0x03A9,
- specials={ "char", 0x1F69, 0x0342 },
+ shcode=0x3A9,
+ specials={ "char", 0x1F69, 0x342 },
unicodeslot=0x1F6F,
},
[0x1F70]={
@@ -55673,8 +56885,8 @@ characters.data={
description="GREEK SMALL LETTER ALPHA WITH VARIA",
direction="l",
linebreak="al",
- shcode=0x03B1,
- specials={ "char", 0x03B1, 0x0300 },
+ shcode=0x3B1,
+ specials={ "char", 0x3B1, 0x300 },
uccode=0x1FBA,
unicodeslot=0x1F70,
},
@@ -55684,8 +56896,8 @@ characters.data={
description="GREEK SMALL LETTER ALPHA WITH OXIA",
direction="l",
linebreak="al",
- shcode=0x03B1,
- specials={ "char", 0x03AC },
+ shcode=0x3B1,
+ specials={ "char", 0x3AC },
uccode=0x1FBB,
unicodeslot=0x1F71,
},
@@ -55695,8 +56907,8 @@ characters.data={
description="GREEK SMALL LETTER EPSILON WITH VARIA",
direction="l",
linebreak="al",
- shcode=0x03B5,
- specials={ "char", 0x03B5, 0x0300 },
+ shcode=0x3B5,
+ specials={ "char", 0x3B5, 0x300 },
uccode=0x1FC8,
unicodeslot=0x1F72,
},
@@ -55706,8 +56918,8 @@ characters.data={
description="GREEK SMALL LETTER EPSILON WITH OXIA",
direction="l",
linebreak="al",
- shcode=0x03B5,
- specials={ "char", 0x03AD },
+ shcode=0x3B5,
+ specials={ "char", 0x3AD },
uccode=0x1FC9,
unicodeslot=0x1F73,
},
@@ -55717,8 +56929,8 @@ characters.data={
description="GREEK SMALL LETTER ETA WITH VARIA",
direction="l",
linebreak="al",
- shcode=0x03B7,
- specials={ "char", 0x03B7, 0x0300 },
+ shcode=0x3B7,
+ specials={ "char", 0x3B7, 0x300 },
uccode=0x1FCA,
unicodeslot=0x1F74,
},
@@ -55728,8 +56940,8 @@ characters.data={
description="GREEK SMALL LETTER ETA WITH OXIA",
direction="l",
linebreak="al",
- shcode=0x03B7,
- specials={ "char", 0x03AE },
+ shcode=0x3B7,
+ specials={ "char", 0x3AE },
uccode=0x1FCB,
unicodeslot=0x1F75,
},
@@ -55739,8 +56951,8 @@ characters.data={
description="GREEK SMALL LETTER IOTA WITH VARIA",
direction="l",
linebreak="al",
- shcode=0x03B9,
- specials={ "char", 0x03B9, 0x0300 },
+ shcode=0x3B9,
+ specials={ "char", 0x3B9, 0x300 },
uccode=0x1FDA,
unicodeslot=0x1F76,
},
@@ -55750,8 +56962,8 @@ characters.data={
description="GREEK SMALL LETTER IOTA WITH OXIA",
direction="l",
linebreak="al",
- shcode=0x03B9,
- specials={ "char", 0x03AF },
+ shcode=0x3B9,
+ specials={ "char", 0x3AF },
uccode=0x1FDB,
unicodeslot=0x1F77,
},
@@ -55761,8 +56973,8 @@ characters.data={
description="GREEK SMALL LETTER OMICRON WITH VARIA",
direction="l",
linebreak="al",
- shcode=0x03BF,
- specials={ "char", 0x03BF, 0x0300 },
+ shcode=0x3BF,
+ specials={ "char", 0x3BF, 0x300 },
uccode=0x1FF8,
unicodeslot=0x1F78,
},
@@ -55772,8 +56984,8 @@ characters.data={
description="GREEK SMALL LETTER OMICRON WITH OXIA",
direction="l",
linebreak="al",
- shcode=0x03BF,
- specials={ "char", 0x03CC },
+ shcode=0x3BF,
+ specials={ "char", 0x3CC },
uccode=0x1FF9,
unicodeslot=0x1F79,
},
@@ -55783,8 +56995,8 @@ characters.data={
description="GREEK SMALL LETTER UPSILON WITH VARIA",
direction="l",
linebreak="al",
- shcode=0x03C5,
- specials={ "char", 0x03C5, 0x0300 },
+ shcode=0x3C5,
+ specials={ "char", 0x3C5, 0x300 },
uccode=0x1FEA,
unicodeslot=0x1F7A,
},
@@ -55794,8 +57006,8 @@ characters.data={
description="GREEK SMALL LETTER UPSILON WITH OXIA",
direction="l",
linebreak="al",
- shcode=0x03C5,
- specials={ "char", 0x03CD },
+ shcode=0x3C5,
+ specials={ "char", 0x3CD },
uccode=0x1FEB,
unicodeslot=0x1F7B,
},
@@ -55805,8 +57017,8 @@ characters.data={
description="GREEK SMALL LETTER OMEGA WITH VARIA",
direction="l",
linebreak="al",
- shcode=0x03C9,
- specials={ "char", 0x03C9, 0x0300 },
+ shcode=0x3C9,
+ specials={ "char", 0x3C9, 0x300 },
uccode=0x1FFA,
unicodeslot=0x1F7C,
},
@@ -55816,8 +57028,8 @@ characters.data={
description="GREEK SMALL LETTER OMEGA WITH OXIA",
direction="l",
linebreak="al",
- shcode=0x03C9,
- specials={ "char", 0x03CE },
+ shcode=0x3C9,
+ specials={ "char", 0x3CE },
uccode=0x1FFB,
unicodeslot=0x1F7D,
},
@@ -55827,8 +57039,8 @@ characters.data={
description="GREEK SMALL LETTER ALPHA WITH PSILI AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03B1,
- specials={ "char", 0x1F00, 0x0345 },
+ shcode=0x3B1,
+ specials={ "char", 0x1F00, 0x345 },
uccode=0x1F88,
unicodeslot=0x1F80,
},
@@ -55838,8 +57050,8 @@ characters.data={
description="GREEK SMALL LETTER ALPHA WITH DASIA AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03B1,
- specials={ "char", 0x1F01, 0x0345 },
+ shcode=0x3B1,
+ specials={ "char", 0x1F01, 0x345 },
uccode=0x1F89,
unicodeslot=0x1F81,
},
@@ -55849,8 +57061,8 @@ characters.data={
description="GREEK SMALL LETTER ALPHA WITH PSILI AND VARIA AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03B1,
- specials={ "char", 0x1F02, 0x0345 },
+ shcode=0x3B1,
+ specials={ "char", 0x1F02, 0x345 },
uccode=0x1F8A,
unicodeslot=0x1F82,
},
@@ -55860,8 +57072,8 @@ characters.data={
description="GREEK SMALL LETTER ALPHA WITH DASIA AND VARIA AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03B1,
- specials={ "char", 0x1F03, 0x0345 },
+ shcode=0x3B1,
+ specials={ "char", 0x1F03, 0x345 },
uccode=0x1F8B,
unicodeslot=0x1F83,
},
@@ -55871,8 +57083,8 @@ characters.data={
description="GREEK SMALL LETTER ALPHA WITH PSILI AND OXIA AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03B1,
- specials={ "char", 0x1F04, 0x0345 },
+ shcode=0x3B1,
+ specials={ "char", 0x1F04, 0x345 },
uccode=0x1F8C,
unicodeslot=0x1F84,
},
@@ -55882,8 +57094,8 @@ characters.data={
description="GREEK SMALL LETTER ALPHA WITH DASIA AND OXIA AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03B1,
- specials={ "char", 0x1F05, 0x0345 },
+ shcode=0x3B1,
+ specials={ "char", 0x1F05, 0x345 },
uccode=0x1F8D,
unicodeslot=0x1F85,
},
@@ -55893,8 +57105,8 @@ characters.data={
description="GREEK SMALL LETTER ALPHA WITH PSILI AND PERISPOMENI AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03B1,
- specials={ "char", 0x1F06, 0x0345 },
+ shcode=0x3B1,
+ specials={ "char", 0x1F06, 0x345 },
uccode=0x1F8E,
unicodeslot=0x1F86,
},
@@ -55904,8 +57116,8 @@ characters.data={
description="GREEK SMALL LETTER ALPHA WITH DASIA AND PERISPOMENI AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03B1,
- specials={ "char", 0x1F07, 0x0345 },
+ shcode=0x3B1,
+ specials={ "char", 0x1F07, 0x345 },
uccode=0x1F8F,
unicodeslot=0x1F87,
},
@@ -55916,8 +57128,8 @@ characters.data={
direction="l",
lccode=0x1F80,
linebreak="al",
- shcode=0x0391,
- specials={ "char", 0x1F08, 0x0345 },
+ shcode=0x391,
+ specials={ "char", 0x1F08, 0x345 },
unicodeslot=0x1F88,
},
[0x1F89]={
@@ -55927,8 +57139,8 @@ characters.data={
direction="l",
lccode=0x1F81,
linebreak="al",
- shcode=0x0391,
- specials={ "char", 0x1F09, 0x0345 },
+ shcode=0x391,
+ specials={ "char", 0x1F09, 0x345 },
unicodeslot=0x1F89,
},
[0x1F8A]={
@@ -55938,8 +57150,8 @@ characters.data={
direction="l",
lccode=0x1F82,
linebreak="al",
- shcode=0x0391,
- specials={ "char", 0x1F0A, 0x0345 },
+ shcode=0x391,
+ specials={ "char", 0x1F0A, 0x345 },
unicodeslot=0x1F8A,
},
[0x1F8B]={
@@ -55949,8 +57161,8 @@ characters.data={
direction="l",
lccode=0x1F83,
linebreak="al",
- shcode=0x0391,
- specials={ "char", 0x1F0B, 0x0345 },
+ shcode=0x391,
+ specials={ "char", 0x1F0B, 0x345 },
unicodeslot=0x1F8B,
},
[0x1F8C]={
@@ -55960,8 +57172,8 @@ characters.data={
direction="l",
lccode=0x1F84,
linebreak="al",
- shcode=0x0391,
- specials={ "char", 0x1F0C, 0x0345 },
+ shcode=0x391,
+ specials={ "char", 0x1F0C, 0x345 },
unicodeslot=0x1F8C,
},
[0x1F8D]={
@@ -55971,8 +57183,8 @@ characters.data={
direction="l",
lccode=0x1F85,
linebreak="al",
- shcode=0x0391,
- specials={ "char", 0x1F0D, 0x0345 },
+ shcode=0x391,
+ specials={ "char", 0x1F0D, 0x345 },
unicodeslot=0x1F8D,
},
[0x1F8E]={
@@ -55982,8 +57194,8 @@ characters.data={
direction="l",
lccode=0x1F86,
linebreak="al",
- shcode=0x0391,
- specials={ "char", 0x1F0E, 0x0345 },
+ shcode=0x391,
+ specials={ "char", 0x1F0E, 0x345 },
unicodeslot=0x1F8E,
},
[0x1F8F]={
@@ -55993,8 +57205,8 @@ characters.data={
direction="l",
lccode=0x1F87,
linebreak="al",
- shcode=0x0391,
- specials={ "char", 0x1F0F, 0x0345 },
+ shcode=0x391,
+ specials={ "char", 0x1F0F, 0x345 },
unicodeslot=0x1F8F,
},
[0x1F90]={
@@ -56003,8 +57215,8 @@ characters.data={
description="GREEK SMALL LETTER ETA WITH PSILI AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03B7,
- specials={ "char", 0x1F20, 0x0345 },
+ shcode=0x3B7,
+ specials={ "char", 0x1F20, 0x345 },
uccode=0x1F98,
unicodeslot=0x1F90,
},
@@ -56014,8 +57226,8 @@ characters.data={
description="GREEK SMALL LETTER ETA WITH DASIA AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03B7,
- specials={ "char", 0x1F21, 0x0345 },
+ shcode=0x3B7,
+ specials={ "char", 0x1F21, 0x345 },
uccode=0x1F99,
unicodeslot=0x1F91,
},
@@ -56025,8 +57237,8 @@ characters.data={
description="GREEK SMALL LETTER ETA WITH PSILI AND VARIA AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03B7,
- specials={ "char", 0x1F22, 0x0345 },
+ shcode=0x3B7,
+ specials={ "char", 0x1F22, 0x345 },
uccode=0x1F9A,
unicodeslot=0x1F92,
},
@@ -56036,8 +57248,8 @@ characters.data={
description="GREEK SMALL LETTER ETA WITH DASIA AND VARIA AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03B7,
- specials={ "char", 0x1F23, 0x0345 },
+ shcode=0x3B7,
+ specials={ "char", 0x1F23, 0x345 },
uccode=0x1F9B,
unicodeslot=0x1F93,
},
@@ -56047,8 +57259,8 @@ characters.data={
description="GREEK SMALL LETTER ETA WITH PSILI AND OXIA AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03B7,
- specials={ "char", 0x1F24, 0x0345 },
+ shcode=0x3B7,
+ specials={ "char", 0x1F24, 0x345 },
uccode=0x1F9C,
unicodeslot=0x1F94,
},
@@ -56058,8 +57270,8 @@ characters.data={
description="GREEK SMALL LETTER ETA WITH DASIA AND OXIA AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03B7,
- specials={ "char", 0x1F25, 0x0345 },
+ shcode=0x3B7,
+ specials={ "char", 0x1F25, 0x345 },
uccode=0x1F9D,
unicodeslot=0x1F95,
},
@@ -56069,8 +57281,8 @@ characters.data={
description="GREEK SMALL LETTER ETA WITH PSILI AND PERISPOMENI AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03B7,
- specials={ "char", 0x1F26, 0x0345 },
+ shcode=0x3B7,
+ specials={ "char", 0x1F26, 0x345 },
uccode=0x1F9E,
unicodeslot=0x1F96,
},
@@ -56080,8 +57292,8 @@ characters.data={
description="GREEK SMALL LETTER ETA WITH DASIA AND PERISPOMENI AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03B7,
- specials={ "char", 0x1F27, 0x0345 },
+ shcode=0x3B7,
+ specials={ "char", 0x1F27, 0x345 },
uccode=0x1F9F,
unicodeslot=0x1F97,
},
@@ -56092,8 +57304,8 @@ characters.data={
direction="l",
lccode=0x1F90,
linebreak="al",
- shcode=0x0397,
- specials={ "char", 0x1F28, 0x0345 },
+ shcode=0x397,
+ specials={ "char", 0x1F28, 0x345 },
unicodeslot=0x1F98,
},
[0x1F99]={
@@ -56103,8 +57315,8 @@ characters.data={
direction="l",
lccode=0x1F91,
linebreak="al",
- shcode=0x0397,
- specials={ "char", 0x1F29, 0x0345 },
+ shcode=0x397,
+ specials={ "char", 0x1F29, 0x345 },
unicodeslot=0x1F99,
},
[0x1F9A]={
@@ -56114,8 +57326,8 @@ characters.data={
direction="l",
lccode=0x1F92,
linebreak="al",
- shcode=0x0397,
- specials={ "char", 0x1F2A, 0x0345 },
+ shcode=0x397,
+ specials={ "char", 0x1F2A, 0x345 },
unicodeslot=0x1F9A,
},
[0x1F9B]={
@@ -56125,8 +57337,8 @@ characters.data={
direction="l",
lccode=0x1F93,
linebreak="al",
- shcode=0x0397,
- specials={ "char", 0x1F2B, 0x0345 },
+ shcode=0x397,
+ specials={ "char", 0x1F2B, 0x345 },
unicodeslot=0x1F9B,
},
[0x1F9C]={
@@ -56136,8 +57348,8 @@ characters.data={
direction="l",
lccode=0x1F94,
linebreak="al",
- shcode=0x0397,
- specials={ "char", 0x1F2C, 0x0345 },
+ shcode=0x397,
+ specials={ "char", 0x1F2C, 0x345 },
unicodeslot=0x1F9C,
},
[0x1F9D]={
@@ -56147,8 +57359,8 @@ characters.data={
direction="l",
lccode=0x1F95,
linebreak="al",
- shcode=0x0397,
- specials={ "char", 0x1F2D, 0x0345 },
+ shcode=0x397,
+ specials={ "char", 0x1F2D, 0x345 },
unicodeslot=0x1F9D,
},
[0x1F9E]={
@@ -56158,8 +57370,8 @@ characters.data={
direction="l",
lccode=0x1F96,
linebreak="al",
- shcode=0x0397,
- specials={ "char", 0x1F2E, 0x0345 },
+ shcode=0x397,
+ specials={ "char", 0x1F2E, 0x345 },
unicodeslot=0x1F9E,
},
[0x1F9F]={
@@ -56169,8 +57381,8 @@ characters.data={
direction="l",
lccode=0x1F97,
linebreak="al",
- shcode=0x0397,
- specials={ "char", 0x1F2F, 0x0345 },
+ shcode=0x397,
+ specials={ "char", 0x1F2F, 0x345 },
unicodeslot=0x1F9F,
},
[0x1FA0]={
@@ -56179,8 +57391,8 @@ characters.data={
description="GREEK SMALL LETTER OMEGA WITH PSILI AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03C9,
- specials={ "char", 0x1F60, 0x0345 },
+ shcode=0x3C9,
+ specials={ "char", 0x1F60, 0x345 },
uccode=0x1FA8,
unicodeslot=0x1FA0,
},
@@ -56190,8 +57402,8 @@ characters.data={
description="GREEK SMALL LETTER OMEGA WITH DASIA AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03C9,
- specials={ "char", 0x1F61, 0x0345 },
+ shcode=0x3C9,
+ specials={ "char", 0x1F61, 0x345 },
uccode=0x1FA9,
unicodeslot=0x1FA1,
},
@@ -56201,8 +57413,8 @@ characters.data={
description="GREEK SMALL LETTER OMEGA WITH PSILI AND VARIA AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03C9,
- specials={ "char", 0x1F62, 0x0345 },
+ shcode=0x3C9,
+ specials={ "char", 0x1F62, 0x345 },
uccode=0x1FAA,
unicodeslot=0x1FA2,
},
@@ -56212,8 +57424,8 @@ characters.data={
description="GREEK SMALL LETTER OMEGA WITH DASIA AND VARIA AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03C9,
- specials={ "char", 0x1F63, 0x0345 },
+ shcode=0x3C9,
+ specials={ "char", 0x1F63, 0x345 },
uccode=0x1FAB,
unicodeslot=0x1FA3,
},
@@ -56223,8 +57435,8 @@ characters.data={
description="GREEK SMALL LETTER OMEGA WITH PSILI AND OXIA AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03C9,
- specials={ "char", 0x1F64, 0x0345 },
+ shcode=0x3C9,
+ specials={ "char", 0x1F64, 0x345 },
uccode=0x1FAC,
unicodeslot=0x1FA4,
},
@@ -56234,8 +57446,8 @@ characters.data={
description="GREEK SMALL LETTER OMEGA WITH DASIA AND OXIA AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03C9,
- specials={ "char", 0x1F65, 0x0345 },
+ shcode=0x3C9,
+ specials={ "char", 0x1F65, 0x345 },
uccode=0x1FAD,
unicodeslot=0x1FA5,
},
@@ -56245,8 +57457,8 @@ characters.data={
description="GREEK SMALL LETTER OMEGA WITH PSILI AND PERISPOMENI AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03C9,
- specials={ "char", 0x1F66, 0x0345 },
+ shcode=0x3C9,
+ specials={ "char", 0x1F66, 0x345 },
uccode=0x1FAE,
unicodeslot=0x1FA6,
},
@@ -56256,8 +57468,8 @@ characters.data={
description="GREEK SMALL LETTER OMEGA WITH DASIA AND PERISPOMENI AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03C9,
- specials={ "char", 0x1F67, 0x0345 },
+ shcode=0x3C9,
+ specials={ "char", 0x1F67, 0x345 },
uccode=0x1FAF,
unicodeslot=0x1FA7,
},
@@ -56268,8 +57480,8 @@ characters.data={
direction="l",
lccode=0x1FA0,
linebreak="al",
- shcode=0x03A9,
- specials={ "char", 0x1F68, 0x0345 },
+ shcode=0x3A9,
+ specials={ "char", 0x1F68, 0x345 },
unicodeslot=0x1FA8,
},
[0x1FA9]={
@@ -56279,8 +57491,8 @@ characters.data={
direction="l",
lccode=0x1FA1,
linebreak="al",
- shcode=0x03A9,
- specials={ "char", 0x1F69, 0x0345 },
+ shcode=0x3A9,
+ specials={ "char", 0x1F69, 0x345 },
unicodeslot=0x1FA9,
},
[0x1FAA]={
@@ -56290,8 +57502,8 @@ characters.data={
direction="l",
lccode=0x1FA2,
linebreak="al",
- shcode=0x03A9,
- specials={ "char", 0x1F6A, 0x0345 },
+ shcode=0x3A9,
+ specials={ "char", 0x1F6A, 0x345 },
unicodeslot=0x1FAA,
},
[0x1FAB]={
@@ -56301,8 +57513,8 @@ characters.data={
direction="l",
lccode=0x1FA3,
linebreak="al",
- shcode=0x03A9,
- specials={ "char", 0x1F6B, 0x0345 },
+ shcode=0x3A9,
+ specials={ "char", 0x1F6B, 0x345 },
unicodeslot=0x1FAB,
},
[0x1FAC]={
@@ -56312,8 +57524,8 @@ characters.data={
direction="l",
lccode=0x1FA4,
linebreak="al",
- shcode=0x03A9,
- specials={ "char", 0x1F6C, 0x0345 },
+ shcode=0x3A9,
+ specials={ "char", 0x1F6C, 0x345 },
unicodeslot=0x1FAC,
},
[0x1FAD]={
@@ -56323,8 +57535,8 @@ characters.data={
direction="l",
lccode=0x1FA5,
linebreak="al",
- shcode=0x03A9,
- specials={ "char", 0x1F6D, 0x0345 },
+ shcode=0x3A9,
+ specials={ "char", 0x1F6D, 0x345 },
unicodeslot=0x1FAD,
},
[0x1FAE]={
@@ -56334,8 +57546,8 @@ characters.data={
direction="l",
lccode=0x1FA6,
linebreak="al",
- shcode=0x03A9,
- specials={ "char", 0x1F6E, 0x0345 },
+ shcode=0x3A9,
+ specials={ "char", 0x1F6E, 0x345 },
unicodeslot=0x1FAE,
},
[0x1FAF]={
@@ -56345,8 +57557,8 @@ characters.data={
direction="l",
lccode=0x1FA7,
linebreak="al",
- shcode=0x03A9,
- specials={ "char", 0x1F6F, 0x0345 },
+ shcode=0x3A9,
+ specials={ "char", 0x1F6F, 0x345 },
unicodeslot=0x1FAF,
},
[0x1FB0]={
@@ -56355,8 +57567,8 @@ characters.data={
description="GREEK SMALL LETTER ALPHA WITH VRACHY",
direction="l",
linebreak="al",
- shcode=0x03B1,
- specials={ "char", 0x03B1, 0x0306 },
+ shcode=0x3B1,
+ specials={ "char", 0x3B1, 0x306 },
uccode=0x1FB8,
unicodeslot=0x1FB0,
},
@@ -56366,8 +57578,8 @@ characters.data={
description="GREEK SMALL LETTER ALPHA WITH MACRON",
direction="l",
linebreak="al",
- shcode=0x03B1,
- specials={ "char", 0x03B1, 0x0304 },
+ shcode=0x3B1,
+ specials={ "char", 0x3B1, 0x304 },
uccode=0x1FB9,
unicodeslot=0x1FB1,
},
@@ -56377,8 +57589,8 @@ characters.data={
description="GREEK SMALL LETTER ALPHA WITH VARIA AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03B1,
- specials={ "char", 0x1F70, 0x0345 },
+ shcode=0x3B1,
+ specials={ "char", 0x1F70, 0x345 },
unicodeslot=0x1FB2,
},
[0x1FB3]={
@@ -56387,8 +57599,8 @@ characters.data={
description="GREEK SMALL LETTER ALPHA WITH YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03B1,
- specials={ "char", 0x03B1, 0x0345 },
+ shcode=0x3B1,
+ specials={ "char", 0x3B1, 0x345 },
uccode=0x1FBC,
unicodeslot=0x1FB3,
},
@@ -56398,8 +57610,8 @@ characters.data={
description="GREEK SMALL LETTER ALPHA WITH OXIA AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03B1,
- specials={ "char", 0x03AC, 0x0345 },
+ shcode=0x3B1,
+ specials={ "char", 0x3AC, 0x345 },
unicodeslot=0x1FB4,
},
[0x1FB6]={
@@ -56408,8 +57620,8 @@ characters.data={
description="GREEK SMALL LETTER ALPHA WITH PERISPOMENI",
direction="l",
linebreak="al",
- shcode=0x03B1,
- specials={ "char", 0x03B1, 0x0342 },
+ shcode=0x3B1,
+ specials={ "char", 0x3B1, 0x342 },
unicodeslot=0x1FB6,
},
[0x1FB7]={
@@ -56418,8 +57630,8 @@ characters.data={
description="GREEK SMALL LETTER ALPHA WITH PERISPOMENI AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03B1,
- specials={ "char", 0x1FB6, 0x0345 },
+ shcode=0x3B1,
+ specials={ "char", 0x1FB6, 0x345 },
unicodeslot=0x1FB7,
},
[0x1FB8]={
@@ -56429,8 +57641,8 @@ characters.data={
direction="l",
lccode=0x1FB0,
linebreak="al",
- shcode=0x0391,
- specials={ "char", 0x0391, 0x0306 },
+ shcode=0x391,
+ specials={ "char", 0x391, 0x306 },
unicodeslot=0x1FB8,
},
[0x1FB9]={
@@ -56440,8 +57652,8 @@ characters.data={
direction="l",
lccode=0x1FB1,
linebreak="al",
- shcode=0x0391,
- specials={ "char", 0x0391, 0x0304 },
+ shcode=0x391,
+ specials={ "char", 0x391, 0x304 },
unicodeslot=0x1FB9,
},
[0x1FBA]={
@@ -56451,8 +57663,8 @@ characters.data={
direction="l",
lccode=0x1F70,
linebreak="al",
- shcode=0x0391,
- specials={ "char", 0x0391, 0x0300 },
+ shcode=0x391,
+ specials={ "char", 0x391, 0x300 },
unicodeslot=0x1FBA,
},
[0x1FBB]={
@@ -56462,8 +57674,8 @@ characters.data={
direction="l",
lccode=0x1F71,
linebreak="al",
- shcode=0x0391,
- specials={ "char", 0x0386 },
+ shcode=0x391,
+ specials={ "char", 0x386 },
unicodeslot=0x1FBB,
},
[0x1FBC]={
@@ -56473,8 +57685,8 @@ characters.data={
direction="l",
lccode=0x1FB3,
linebreak="al",
- shcode=0x0391,
- specials={ "char", 0x0391, 0x0345 },
+ shcode=0x391,
+ specials={ "char", 0x391, 0x345 },
unicodeslot=0x1FBC,
},
[0x1FBD]={
@@ -56483,7 +57695,7 @@ characters.data={
description="GREEK KORONIS",
direction="on",
linebreak="al",
- specials={ "compat", 0x0020, 0x0313 },
+ specials={ "compat", 0x20, 0x313 },
unicodeslot=0x1FBD,
},
[0x1FBE]={
@@ -56492,8 +57704,8 @@ characters.data={
description="GREEK PROSGEGRAMMENI",
direction="l",
linebreak="al",
- specials={ "char", 0x03B9 },
- uccode=0x0399,
+ specials={ "char", 0x3B9 },
+ uccode=0x399,
unicodeslot=0x1FBE,
},
[0x1FBF]={
@@ -56502,7 +57714,7 @@ characters.data={
description="GREEK PSILI",
direction="on",
linebreak="al",
- specials={ "compat", 0x0020, 0x0313 },
+ specials={ "compat", 0x20, 0x313 },
unicodeslot=0x1FBF,
},
[0x1FC0]={
@@ -56511,7 +57723,7 @@ characters.data={
description="GREEK PERISPOMENI",
direction="on",
linebreak="al",
- specials={ "compat", 0x0020, 0x0342 },
+ specials={ "compat", 0x20, 0x342 },
unicodeslot=0x1FC0,
},
[0x1FC1]={
@@ -56520,7 +57732,7 @@ characters.data={
description="GREEK DIALYTIKA AND PERISPOMENI",
direction="on",
linebreak="al",
- specials={ "char", 0x00A8, 0x0342 },
+ specials={ "char", 0xA8, 0x342 },
unicodeslot=0x1FC1,
},
[0x1FC2]={
@@ -56529,8 +57741,8 @@ characters.data={
description="GREEK SMALL LETTER ETA WITH VARIA AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03B7,
- specials={ "char", 0x1F74, 0x0345 },
+ shcode=0x3B7,
+ specials={ "char", 0x1F74, 0x345 },
unicodeslot=0x1FC2,
},
[0x1FC3]={
@@ -56539,8 +57751,8 @@ characters.data={
description="GREEK SMALL LETTER ETA WITH YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03B7,
- specials={ "char", 0x03B7, 0x0345 },
+ shcode=0x3B7,
+ specials={ "char", 0x3B7, 0x345 },
uccode=0x1FCC,
unicodeslot=0x1FC3,
},
@@ -56550,8 +57762,8 @@ characters.data={
description="GREEK SMALL LETTER ETA WITH OXIA AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03B7,
- specials={ "char", 0x03AE, 0x0345 },
+ shcode=0x3B7,
+ specials={ "char", 0x3AE, 0x345 },
unicodeslot=0x1FC4,
},
[0x1FC6]={
@@ -56560,8 +57772,8 @@ characters.data={
description="GREEK SMALL LETTER ETA WITH PERISPOMENI",
direction="l",
linebreak="al",
- shcode=0x03B7,
- specials={ "char", 0x03B7, 0x0342 },
+ shcode=0x3B7,
+ specials={ "char", 0x3B7, 0x342 },
unicodeslot=0x1FC6,
},
[0x1FC7]={
@@ -56570,8 +57782,8 @@ characters.data={
description="GREEK SMALL LETTER ETA WITH PERISPOMENI AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03B7,
- specials={ "char", 0x1FC6, 0x0345 },
+ shcode=0x3B7,
+ specials={ "char", 0x1FC6, 0x345 },
unicodeslot=0x1FC7,
},
[0x1FC8]={
@@ -56581,8 +57793,8 @@ characters.data={
direction="l",
lccode=0x1F72,
linebreak="al",
- shcode=0x0395,
- specials={ "char", 0x0395, 0x0300 },
+ shcode=0x395,
+ specials={ "char", 0x395, 0x300 },
unicodeslot=0x1FC8,
},
[0x1FC9]={
@@ -56592,8 +57804,8 @@ characters.data={
direction="l",
lccode=0x1F73,
linebreak="al",
- shcode=0x0395,
- specials={ "char", 0x0388 },
+ shcode=0x395,
+ specials={ "char", 0x388 },
unicodeslot=0x1FC9,
},
[0x1FCA]={
@@ -56603,8 +57815,8 @@ characters.data={
direction="l",
lccode=0x1F74,
linebreak="al",
- shcode=0x0397,
- specials={ "char", 0x0397, 0x0300 },
+ shcode=0x397,
+ specials={ "char", 0x397, 0x300 },
unicodeslot=0x1FCA,
},
[0x1FCB]={
@@ -56614,8 +57826,8 @@ characters.data={
direction="l",
lccode=0x1F75,
linebreak="al",
- shcode=0x0397,
- specials={ "char", 0x0389 },
+ shcode=0x397,
+ specials={ "char", 0x389 },
unicodeslot=0x1FCB,
},
[0x1FCC]={
@@ -56625,8 +57837,8 @@ characters.data={
direction="l",
lccode=0x1FC3,
linebreak="al",
- shcode=0x0397,
- specials={ "char", 0x0397, 0x0345 },
+ shcode=0x397,
+ specials={ "char", 0x397, 0x345 },
unicodeslot=0x1FCC,
},
[0x1FCD]={
@@ -56635,7 +57847,7 @@ characters.data={
description="GREEK PSILI AND VARIA",
direction="on",
linebreak="al",
- specials={ "char", 0x1FBF, 0x0300 },
+ specials={ "char", 0x1FBF, 0x300 },
unicodeslot=0x1FCD,
},
[0x1FCE]={
@@ -56644,7 +57856,7 @@ characters.data={
description="GREEK PSILI AND OXIA",
direction="on",
linebreak="al",
- specials={ "char", 0x1FBF, 0x0301 },
+ specials={ "char", 0x1FBF, 0x301 },
unicodeslot=0x1FCE,
},
[0x1FCF]={
@@ -56653,7 +57865,7 @@ characters.data={
description="GREEK PSILI AND PERISPOMENI",
direction="on",
linebreak="al",
- specials={ "char", 0x1FBF, 0x0342 },
+ specials={ "char", 0x1FBF, 0x342 },
unicodeslot=0x1FCF,
},
[0x1FD0]={
@@ -56662,8 +57874,8 @@ characters.data={
description="GREEK SMALL LETTER IOTA WITH VRACHY",
direction="l",
linebreak="al",
- shcode=0x03B9,
- specials={ "char", 0x03B9, 0x0306 },
+ shcode=0x3B9,
+ specials={ "char", 0x3B9, 0x306 },
uccode=0x1FD8,
unicodeslot=0x1FD0,
},
@@ -56673,8 +57885,8 @@ characters.data={
description="GREEK SMALL LETTER IOTA WITH MACRON",
direction="l",
linebreak="al",
- shcode=0x03B9,
- specials={ "char", 0x03B9, 0x0304 },
+ shcode=0x3B9,
+ specials={ "char", 0x3B9, 0x304 },
uccode=0x1FD9,
unicodeslot=0x1FD1,
},
@@ -56684,8 +57896,8 @@ characters.data={
description="GREEK SMALL LETTER IOTA WITH DIALYTIKA AND VARIA",
direction="l",
linebreak="al",
- shcode=0x03B9,
- specials={ "char", 0x03CA, 0x0300 },
+ shcode=0x3B9,
+ specials={ "char", 0x3CA, 0x300 },
unicodeslot=0x1FD2,
},
[0x1FD3]={
@@ -56694,8 +57906,8 @@ characters.data={
description="GREEK SMALL LETTER IOTA WITH DIALYTIKA AND OXIA",
direction="l",
linebreak="al",
- shcode=0x03B9,
- specials={ "char", 0x0390 },
+ shcode=0x3B9,
+ specials={ "char", 0x390 },
unicodeslot=0x1FD3,
},
[0x1FD6]={
@@ -56704,8 +57916,8 @@ characters.data={
description="GREEK SMALL LETTER IOTA WITH PERISPOMENI",
direction="l",
linebreak="al",
- shcode=0x03B9,
- specials={ "char", 0x03B9, 0x0342 },
+ shcode=0x3B9,
+ specials={ "char", 0x3B9, 0x342 },
unicodeslot=0x1FD6,
},
[0x1FD7]={
@@ -56714,8 +57926,8 @@ characters.data={
description="GREEK SMALL LETTER IOTA WITH DIALYTIKA AND PERISPOMENI",
direction="l",
linebreak="al",
- shcode=0x03B9,
- specials={ "char", 0x03CA, 0x0342 },
+ shcode=0x3B9,
+ specials={ "char", 0x3CA, 0x342 },
unicodeslot=0x1FD7,
},
[0x1FD8]={
@@ -56725,8 +57937,8 @@ characters.data={
direction="l",
lccode=0x1FD0,
linebreak="al",
- shcode=0x0399,
- specials={ "char", 0x0399, 0x0306 },
+ shcode=0x399,
+ specials={ "char", 0x399, 0x306 },
unicodeslot=0x1FD8,
},
[0x1FD9]={
@@ -56736,8 +57948,8 @@ characters.data={
direction="l",
lccode=0x1FD1,
linebreak="al",
- shcode=0x0399,
- specials={ "char", 0x0399, 0x0304 },
+ shcode=0x399,
+ specials={ "char", 0x399, 0x304 },
unicodeslot=0x1FD9,
},
[0x1FDA]={
@@ -56747,8 +57959,8 @@ characters.data={
direction="l",
lccode=0x1F76,
linebreak="al",
- shcode=0x0399,
- specials={ "char", 0x0399, 0x0300 },
+ shcode=0x399,
+ specials={ "char", 0x399, 0x300 },
unicodeslot=0x1FDA,
},
[0x1FDB]={
@@ -56758,8 +57970,8 @@ characters.data={
direction="l",
lccode=0x1F77,
linebreak="al",
- shcode=0x0399,
- specials={ "char", 0x038A },
+ shcode=0x399,
+ specials={ "char", 0x38A },
unicodeslot=0x1FDB,
},
[0x1FDD]={
@@ -56768,7 +57980,7 @@ characters.data={
description="GREEK DASIA AND VARIA",
direction="on",
linebreak="al",
- specials={ "char", 0x1FFE, 0x0300 },
+ specials={ "char", 0x1FFE, 0x300 },
unicodeslot=0x1FDD,
},
[0x1FDE]={
@@ -56777,7 +57989,7 @@ characters.data={
description="GREEK DASIA AND OXIA",
direction="on",
linebreak="al",
- specials={ "char", 0x1FFE, 0x0301 },
+ specials={ "char", 0x1FFE, 0x301 },
unicodeslot=0x1FDE,
},
[0x1FDF]={
@@ -56786,7 +57998,7 @@ characters.data={
description="GREEK DASIA AND PERISPOMENI",
direction="on",
linebreak="al",
- specials={ "char", 0x1FFE, 0x0342 },
+ specials={ "char", 0x1FFE, 0x342 },
unicodeslot=0x1FDF,
},
[0x1FE0]={
@@ -56795,8 +58007,8 @@ characters.data={
description="GREEK SMALL LETTER UPSILON WITH VRACHY",
direction="l",
linebreak="al",
- shcode=0x03C5,
- specials={ "char", 0x03C5, 0x0306 },
+ shcode=0x3C5,
+ specials={ "char", 0x3C5, 0x306 },
uccode=0x1FE8,
unicodeslot=0x1FE0,
},
@@ -56806,8 +58018,8 @@ characters.data={
description="GREEK SMALL LETTER UPSILON WITH MACRON",
direction="l",
linebreak="al",
- shcode=0x03C5,
- specials={ "char", 0x03C5, 0x0304 },
+ shcode=0x3C5,
+ specials={ "char", 0x3C5, 0x304 },
uccode=0x1FE9,
unicodeslot=0x1FE1,
},
@@ -56817,8 +58029,8 @@ characters.data={
description="GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND VARIA",
direction="l",
linebreak="al",
- shcode=0x03C5,
- specials={ "char", 0x03CB, 0x0300 },
+ shcode=0x3C5,
+ specials={ "char", 0x3CB, 0x300 },
unicodeslot=0x1FE2,
},
[0x1FE3]={
@@ -56827,8 +58039,8 @@ characters.data={
description="GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND OXIA",
direction="l",
linebreak="al",
- shcode=0x03C5,
- specials={ "char", 0x03B0 },
+ shcode=0x3C5,
+ specials={ "char", 0x3B0 },
unicodeslot=0x1FE3,
},
[0x1FE4]={
@@ -56837,8 +58049,8 @@ characters.data={
description="GREEK SMALL LETTER RHO WITH PSILI",
direction="l",
linebreak="al",
- shcode=0x03C1,
- specials={ "char", 0x03C1, 0x0313 },
+ shcode=0x3C1,
+ specials={ "char", 0x3C1, 0x313 },
unicodeslot=0x1FE4,
},
[0x1FE5]={
@@ -56847,8 +58059,8 @@ characters.data={
description="GREEK SMALL LETTER RHO WITH DASIA",
direction="l",
linebreak="al",
- shcode=0x03C1,
- specials={ "char", 0x03C1, 0x0314 },
+ shcode=0x3C1,
+ specials={ "char", 0x3C1, 0x314 },
uccode=0x1FEC,
unicodeslot=0x1FE5,
},
@@ -56858,8 +58070,8 @@ characters.data={
description="GREEK SMALL LETTER UPSILON WITH PERISPOMENI",
direction="l",
linebreak="al",
- shcode=0x03C5,
- specials={ "char", 0x03C5, 0x0342 },
+ shcode=0x3C5,
+ specials={ "char", 0x3C5, 0x342 },
unicodeslot=0x1FE6,
},
[0x1FE7]={
@@ -56868,8 +58080,8 @@ characters.data={
description="GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND PERISPOMENI",
direction="l",
linebreak="al",
- shcode=0x03C5,
- specials={ "char", 0x03CB, 0x0342 },
+ shcode=0x3C5,
+ specials={ "char", 0x3CB, 0x342 },
unicodeslot=0x1FE7,
},
[0x1FE8]={
@@ -56879,8 +58091,8 @@ characters.data={
direction="l",
lccode=0x1FE0,
linebreak="al",
- shcode=0x03A5,
- specials={ "char", 0x03A5, 0x0306 },
+ shcode=0x3A5,
+ specials={ "char", 0x3A5, 0x306 },
unicodeslot=0x1FE8,
},
[0x1FE9]={
@@ -56890,8 +58102,8 @@ characters.data={
direction="l",
lccode=0x1FE1,
linebreak="al",
- shcode=0x03A5,
- specials={ "char", 0x03A5, 0x0304 },
+ shcode=0x3A5,
+ specials={ "char", 0x3A5, 0x304 },
unicodeslot=0x1FE9,
},
[0x1FEA]={
@@ -56901,8 +58113,8 @@ characters.data={
direction="l",
lccode=0x1F7A,
linebreak="al",
- shcode=0x03A5,
- specials={ "char", 0x03A5, 0x0300 },
+ shcode=0x3A5,
+ specials={ "char", 0x3A5, 0x300 },
unicodeslot=0x1FEA,
},
[0x1FEB]={
@@ -56912,8 +58124,8 @@ characters.data={
direction="l",
lccode=0x1F7B,
linebreak="al",
- shcode=0x03A5,
- specials={ "char", 0x038E },
+ shcode=0x3A5,
+ specials={ "char", 0x38E },
unicodeslot=0x1FEB,
},
[0x1FEC]={
@@ -56923,8 +58135,8 @@ characters.data={
direction="l",
lccode=0x1FE5,
linebreak="al",
- shcode=0x03A1,
- specials={ "char", 0x03A1, 0x0314 },
+ shcode=0x3A1,
+ specials={ "char", 0x3A1, 0x314 },
unicodeslot=0x1FEC,
},
[0x1FED]={
@@ -56933,7 +58145,7 @@ characters.data={
description="GREEK DIALYTIKA AND VARIA",
direction="on",
linebreak="al",
- specials={ "char", 0x00A8, 0x0300 },
+ specials={ "char", 0xA8, 0x300 },
unicodeslot=0x1FED,
},
[0x1FEE]={
@@ -56942,7 +58154,7 @@ characters.data={
description="GREEK DIALYTIKA AND OXIA",
direction="on",
linebreak="al",
- specials={ "char", 0x0385 },
+ specials={ "char", 0x385 },
unicodeslot=0x1FEE,
},
[0x1FEF]={
@@ -56951,7 +58163,7 @@ characters.data={
description="GREEK VARIA",
direction="on",
linebreak="al",
- specials={ "char", 0x0060 },
+ specials={ "char", 0x60 },
unicodeslot=0x1FEF,
},
[0x1FF2]={
@@ -56960,8 +58172,8 @@ characters.data={
description="GREEK SMALL LETTER OMEGA WITH VARIA AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03C9,
- specials={ "char", 0x1F7C, 0x0345 },
+ shcode=0x3C9,
+ specials={ "char", 0x1F7C, 0x345 },
unicodeslot=0x1FF2,
},
[0x1FF3]={
@@ -56970,8 +58182,8 @@ characters.data={
description="GREEK SMALL LETTER OMEGA WITH YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03C9,
- specials={ "char", 0x03C9, 0x0345 },
+ shcode=0x3C9,
+ specials={ "char", 0x3C9, 0x345 },
uccode=0x1FFC,
unicodeslot=0x1FF3,
},
@@ -56981,8 +58193,8 @@ characters.data={
description="GREEK SMALL LETTER OMEGA WITH OXIA AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03C9,
- specials={ "char", 0x03CE, 0x0345 },
+ shcode=0x3C9,
+ specials={ "char", 0x3CE, 0x345 },
unicodeslot=0x1FF4,
},
[0x1FF6]={
@@ -56991,8 +58203,8 @@ characters.data={
description="GREEK SMALL LETTER OMEGA WITH PERISPOMENI",
direction="l",
linebreak="al",
- shcode=0x03C9,
- specials={ "char", 0x03C9, 0x0342 },
+ shcode=0x3C9,
+ specials={ "char", 0x3C9, 0x342 },
unicodeslot=0x1FF6,
},
[0x1FF7]={
@@ -57001,8 +58213,8 @@ characters.data={
description="GREEK SMALL LETTER OMEGA WITH PERISPOMENI AND YPOGEGRAMMENI",
direction="l",
linebreak="al",
- shcode=0x03C9,
- specials={ "char", 0x1FF6, 0x0345 },
+ shcode=0x3C9,
+ specials={ "char", 0x1FF6, 0x345 },
unicodeslot=0x1FF7,
},
[0x1FF8]={
@@ -57012,8 +58224,8 @@ characters.data={
direction="l",
lccode=0x1F78,
linebreak="al",
- shcode=0x039F,
- specials={ "char", 0x039F, 0x0300 },
+ shcode=0x39F,
+ specials={ "char", 0x39F, 0x300 },
unicodeslot=0x1FF8,
},
[0x1FF9]={
@@ -57023,8 +58235,8 @@ characters.data={
direction="l",
lccode=0x1F79,
linebreak="al",
- shcode=0x039F,
- specials={ "char", 0x038C },
+ shcode=0x39F,
+ specials={ "char", 0x38C },
unicodeslot=0x1FF9,
},
[0x1FFA]={
@@ -57034,8 +58246,8 @@ characters.data={
direction="l",
lccode=0x1F7C,
linebreak="al",
- shcode=0x03A9,
- specials={ "char", 0x03A9, 0x0300 },
+ shcode=0x3A9,
+ specials={ "char", 0x3A9, 0x300 },
unicodeslot=0x1FFA,
},
[0x1FFB]={
@@ -57045,8 +58257,8 @@ characters.data={
direction="l",
lccode=0x1F7D,
linebreak="al",
- shcode=0x03A9,
- specials={ "char", 0x038F },
+ shcode=0x3A9,
+ specials={ "char", 0x38F },
unicodeslot=0x1FFB,
},
[0x1FFC]={
@@ -57056,8 +58268,8 @@ characters.data={
direction="l",
lccode=0x1FF3,
linebreak="al",
- shcode=0x03A9,
- specials={ "char", 0x03A9, 0x0345 },
+ shcode=0x3A9,
+ specials={ "char", 0x3A9, 0x345 },
unicodeslot=0x1FFC,
},
[0x1FFD]={
@@ -57066,7 +58278,7 @@ characters.data={
description="GREEK OXIA",
direction="on",
linebreak="bb",
- specials={ "char", 0x00B4 },
+ specials={ "char", 0xB4 },
unicodeslot=0x1FFD,
},
[0x1FFE]={
@@ -57075,7 +58287,7 @@ characters.data={
description="GREEK DASIA",
direction="on",
linebreak="al",
- specials={ "compat", 0x0020, 0x0314 },
+ specials={ "compat", 0x20, 0x314 },
unicodeslot=0x1FFE,
},
[0x2000]={
@@ -57101,7 +58313,7 @@ characters.data={
description="EN SPACE",
direction="ws",
linebreak="ba",
- specials={ "compat", 0x0020 },
+ specials={ "compat", 0x20 },
unicodeslot=0x2002,
},
[0x2003]={
@@ -57110,7 +58322,7 @@ characters.data={
description="EM SPACE",
direction="ws",
linebreak="ba",
- specials={ "compat", 0x0020 },
+ specials={ "compat", 0x20 },
unicodeslot=0x2003,
},
[0x2004]={
@@ -57119,7 +58331,7 @@ characters.data={
description="THREE-PER-EM SPACE",
direction="ws",
linebreak="ba",
- specials={ "compat", 0x0020 },
+ specials={ "compat", 0x20 },
unicodeslot=0x2004,
},
[0x2005]={
@@ -57128,7 +58340,7 @@ characters.data={
description="FOUR-PER-EM SPACE",
direction="ws",
linebreak="ba",
- specials={ "compat", 0x0020 },
+ specials={ "compat", 0x20 },
unicodeslot=0x2005,
},
[0x2006]={
@@ -57137,7 +58349,7 @@ characters.data={
description="SIX-PER-EM SPACE",
direction="ws",
linebreak="ba",
- specials={ "compat", 0x0020 },
+ specials={ "compat", 0x20 },
unicodeslot=0x2006,
},
[0x2007]={
@@ -57146,7 +58358,7 @@ characters.data={
description="FIGURE SPACE",
direction="ws",
linebreak="gl",
- specials={ "nobreak", 0x0020 },
+ specials={ "nobreak", 0x20 },
unicodeslot=0x2007,
},
[0x2008]={
@@ -57155,7 +58367,7 @@ characters.data={
description="PUNCTUATION SPACE",
direction="ws",
linebreak="ba",
- specials={ "compat", 0x0020 },
+ specials={ "compat", 0x20 },
unicodeslot=0x2008,
},
[0x2009]={
@@ -57164,7 +58376,7 @@ characters.data={
description="THIN SPACE",
direction="ws",
linebreak="ba",
- specials={ "compat", 0x0020 },
+ specials={ "compat", 0x20 },
unicodeslot=0x2009,
},
[0x200A]={
@@ -57173,7 +58385,7 @@ characters.data={
description="HAIR SPACE",
direction="ws",
linebreak="ba",
- specials={ "compat", 0x0020 },
+ specials={ "compat", 0x20 },
unicodeslot=0x200A,
},
[0x200B]={
@@ -57314,7 +58526,7 @@ characters.data={
description="DOUBLE LOW LINE",
direction="on",
linebreak="al",
- specials={ "compat", 0x0020, 0x0333 },
+ specials={ "compat", 0x20, 0x333 },
unicodeslot=0x2017,
},
[0x2018]={
@@ -57456,7 +58668,7 @@ characters.data={
description="ONE DOT LEADER",
direction="on",
linebreak="in",
- specials={ "compat", 0x002E },
+ specials={ "compat", 0x2E },
unicodeslot=0x2024,
},
[0x2025]={
@@ -57466,7 +58678,7 @@ characters.data={
description="TWO DOT LEADER",
direction="on",
linebreak="in",
- specials={ "compat", 0x002E, 0x002E },
+ specials={ "compat", 0x2E, 0x2E },
unicodeslot=0x2025,
},
[0x2026]={
@@ -57487,7 +58699,7 @@ characters.data={
name="dots",
},
},
- specials={ "compat", 0x002E, 0x002E, 0x002E },
+ specials={ "compat", 0x2E, 0x2E, 0x2E },
unicodeslot=0x2026,
},
[0x2027]={
@@ -57556,7 +58768,7 @@ characters.data={
description="NARROW NO-BREAK SPACE",
direction="cs",
linebreak="gl",
- specials={ "nobreak", 0x0020 },
+ specials={ "nobreak", 0x20 },
unicodeslot=0x202F,
},
[0x2030]={
@@ -57684,7 +58896,7 @@ characters.data={
description="DOUBLE EXCLAMATION MARK",
direction="on",
linebreak="ns",
- specials={ "compat", 0x0021, 0x0021 },
+ specials={ "compat", 0x21, 0x21 },
unicodeslot=0x203C,
variants={
[0xFE0E]="text style",
@@ -57702,7 +58914,7 @@ characters.data={
adobename="overline",
category="po",
cjkwd="a",
- comment=[[mathspec={ { class="topaccent", name="overbar" }, { class="botaccent", name="underbar" } }"]],
+ comment="mathspec={ { class=\"topaccent\", name=\"overbar\" }, { class=\"botaccent\", name=\"underbar\" } }\"",
description="OVERLINE",
direction="on",
linebreak="al",
@@ -57719,7 +58931,7 @@ characters.data={
},
},
mathstretch="h",
- specials={ "compat", 0x0020, 0x0305 },
+ specials={ "compat", 0x20, 0x305 },
unicodeslot=0x203E,
},
[0x203F]={
@@ -57798,7 +59010,7 @@ characters.data={
description="DOUBLE QUESTION MARK",
direction="on",
linebreak="ns",
- specials={ "compat", 0x003F, 0x003F },
+ specials={ "compat", 0x3F, 0x3F },
unicodeslot=0x2047,
},
[0x2048]={
@@ -57806,7 +59018,7 @@ characters.data={
description="QUESTION EXCLAMATION MARK",
direction="on",
linebreak="ns",
- specials={ "compat", 0x003F, 0x0021 },
+ specials={ "compat", 0x3F, 0x21 },
unicodeslot=0x2048,
},
[0x2049]={
@@ -57814,7 +59026,7 @@ characters.data={
description="EXCLAMATION QUESTION MARK",
direction="on",
linebreak="ns",
- specials={ "compat", 0x0021, 0x003F },
+ specials={ "compat", 0x21, 0x3F },
unicodeslot=0x2049,
variants={
[0xFE0E]="text style",
@@ -57977,7 +59189,7 @@ characters.data={
description="MEDIUM MATHEMATICAL SPACE",
direction="ws",
linebreak="ba",
- specials={ "compat", 0x0020 },
+ specials={ "compat", 0x20 },
unicodeslot=0x205F,
},
[0x2060]={
@@ -58017,6 +59229,38 @@ characters.data={
linebreak="al",
unicodeslot=0x2064,
},
+ [0x2066]={
+ arabic="u",
+ category="cf",
+ description="LEFT-TO-RIGHT ISOLATE",
+ direction="lri",
+ linebreak="cm",
+ unicodeslot=0x2066,
+ },
+ [0x2067]={
+ arabic="u",
+ category="cf",
+ description="RIGHT-TO-LEFT ISOLATE",
+ direction="rli",
+ linebreak="cm",
+ unicodeslot=0x2067,
+ },
+ [0x2068]={
+ arabic="u",
+ category="cf",
+ description="FIRST STRONG ISOLATE",
+ direction="fsi",
+ linebreak="cm",
+ unicodeslot=0x2068,
+ },
+ [0x2069]={
+ arabic="u",
+ category="cf",
+ description="POP DIRECTIONAL ISOLATE",
+ direction="pdi",
+ linebreak="cm",
+ unicodeslot=0x2069,
+ },
[0x206A]={
category="cf",
description="INHIBIT SYMMETRIC SWAPPING",
@@ -58065,7 +59309,7 @@ characters.data={
description="SUPERSCRIPT ZERO",
direction="en",
linebreak="al",
- specials={ "super", 0x0030 },
+ specials={ "super", 0x30 },
unicodeslot=0x2070,
},
[0x2071]={
@@ -58073,7 +59317,7 @@ characters.data={
description="SUPERSCRIPT LATIN SMALL LETTER I",
direction="l",
linebreak="al",
- specials={ "super", 0x0069 },
+ specials={ "super", 0x69 },
unicodeslot=0x2071,
},
[0x2074]={
@@ -58083,7 +59327,7 @@ characters.data={
description="SUPERSCRIPT FOUR",
direction="en",
linebreak="ai",
- specials={ "super", 0x0034 },
+ specials={ "super", 0x34 },
unicodeslot=0x2074,
},
[0x2075]={
@@ -58092,7 +59336,7 @@ characters.data={
description="SUPERSCRIPT FIVE",
direction="en",
linebreak="al",
- specials={ "super", 0x0035 },
+ specials={ "super", 0x35 },
unicodeslot=0x2075,
},
[0x2076]={
@@ -58101,7 +59345,7 @@ characters.data={
description="SUPERSCRIPT SIX",
direction="en",
linebreak="al",
- specials={ "super", 0x0036 },
+ specials={ "super", 0x36 },
unicodeslot=0x2076,
},
[0x2077]={
@@ -58110,7 +59354,7 @@ characters.data={
description="SUPERSCRIPT SEVEN",
direction="en",
linebreak="al",
- specials={ "super", 0x0037 },
+ specials={ "super", 0x37 },
unicodeslot=0x2077,
},
[0x2078]={
@@ -58119,7 +59363,7 @@ characters.data={
description="SUPERSCRIPT EIGHT",
direction="en",
linebreak="al",
- specials={ "super", 0x0038 },
+ specials={ "super", 0x38 },
unicodeslot=0x2078,
},
[0x2079]={
@@ -58128,7 +59372,7 @@ characters.data={
description="SUPERSCRIPT NINE",
direction="en",
linebreak="al",
- specials={ "super", 0x0039 },
+ specials={ "super", 0x39 },
unicodeslot=0x2079,
},
[0x207A]={
@@ -58139,7 +59383,7 @@ characters.data={
linebreak="al",
mathclass="binary",
mathname="positivesign",
- specials={ "super", 0x002B },
+ specials={ "super", 0x2B },
unicodeslot=0x207A,
},
[0x207B]={
@@ -58158,7 +59402,7 @@ characters.data={
description="SUPERSCRIPT EQUALS SIGN",
direction="on",
linebreak="al",
- specials={ "super", 0x003D },
+ specials={ "super", 0x3D },
unicodeslot=0x207C,
},
[0x207D]={
@@ -58168,8 +59412,8 @@ characters.data={
direction="on",
linebreak="op",
mirror=0x207E,
+ specials={ "super", 0x28 },
textclass="open",
- specials={ "super", 0x0028 },
unicodeslot=0x207D,
},
[0x207E]={
@@ -58179,8 +59423,8 @@ characters.data={
direction="on",
linebreak="cl",
mirror=0x207D,
+ specials={ "super", 0x29 },
textclass="close",
- specials={ "super", 0x0029 },
unicodeslot=0x207E,
},
[0x207F]={
@@ -58190,7 +59434,7 @@ characters.data={
description="SUPERSCRIPT LATIN SMALL LETTER N",
direction="l",
linebreak="ai",
- specials={ "super", 0x006E },
+ specials={ "super", 0x6E },
unicodeslot=0x207F,
},
[0x2080]={
@@ -58199,7 +59443,7 @@ characters.data={
description="SUBSCRIPT ZERO",
direction="en",
linebreak="al",
- specials={ "sub", 0x0030 },
+ specials={ "sub", 0x30 },
unicodeslot=0x2080,
},
[0x2081]={
@@ -58209,7 +59453,7 @@ characters.data={
description="SUBSCRIPT ONE",
direction="en",
linebreak="ai",
- specials={ "sub", 0x0031 },
+ specials={ "sub", 0x31 },
unicodeslot=0x2081,
},
[0x2082]={
@@ -58219,7 +59463,7 @@ characters.data={
description="SUBSCRIPT TWO",
direction="en",
linebreak="ai",
- specials={ "sub", 0x0032 },
+ specials={ "sub", 0x32 },
unicodeslot=0x2082,
},
[0x2083]={
@@ -58229,7 +59473,7 @@ characters.data={
description="SUBSCRIPT THREE",
direction="en",
linebreak="ai",
- specials={ "sub", 0x0033 },
+ specials={ "sub", 0x33 },
unicodeslot=0x2083,
},
[0x2084]={
@@ -58239,7 +59483,7 @@ characters.data={
description="SUBSCRIPT FOUR",
direction="en",
linebreak="ai",
- specials={ "sub", 0x0034 },
+ specials={ "sub", 0x34 },
unicodeslot=0x2084,
},
[0x2085]={
@@ -58248,7 +59492,7 @@ characters.data={
description="SUBSCRIPT FIVE",
direction="en",
linebreak="al",
- specials={ "sub", 0x0035 },
+ specials={ "sub", 0x35 },
unicodeslot=0x2085,
},
[0x2086]={
@@ -58257,7 +59501,7 @@ characters.data={
description="SUBSCRIPT SIX",
direction="en",
linebreak="al",
- specials={ "sub", 0x0036 },
+ specials={ "sub", 0x36 },
unicodeslot=0x2086,
},
[0x2087]={
@@ -58266,7 +59510,7 @@ characters.data={
description="SUBSCRIPT SEVEN",
direction="en",
linebreak="al",
- specials={ "sub", 0x0037 },
+ specials={ "sub", 0x37 },
unicodeslot=0x2087,
},
[0x2088]={
@@ -58275,7 +59519,7 @@ characters.data={
description="SUBSCRIPT EIGHT",
direction="en",
linebreak="al",
- specials={ "sub", 0x0038 },
+ specials={ "sub", 0x38 },
unicodeslot=0x2088,
},
[0x2089]={
@@ -58284,7 +59528,7 @@ characters.data={
description="SUBSCRIPT NINE",
direction="en",
linebreak="al",
- specials={ "sub", 0x0039 },
+ specials={ "sub", 0x39 },
unicodeslot=0x2089,
},
[0x208A]={
@@ -58292,7 +59536,7 @@ characters.data={
description="SUBSCRIPT PLUS SIGN",
direction="es",
linebreak="al",
- specials={ "sub", 0x002B },
+ specials={ "sub", 0x2B },
unicodeslot=0x208A,
},
[0x208B]={
@@ -58308,7 +59552,7 @@ characters.data={
description="SUBSCRIPT EQUALS SIGN",
direction="on",
linebreak="al",
- specials={ "sub", 0x003D },
+ specials={ "sub", 0x3D },
unicodeslot=0x208C,
},
[0x208D]={
@@ -58318,8 +59562,8 @@ characters.data={
direction="on",
linebreak="op",
mirror=0x208E,
+ specials={ "sub", 0x28 },
textclass="open",
- specials={ "sub", 0x0028 },
unicodeslot=0x208D,
},
[0x208E]={
@@ -58329,8 +59573,8 @@ characters.data={
direction="on",
linebreak="cl",
mirror=0x208D,
+ specials={ "sub", 0x29 },
textclass="close",
- specials={ "sub", 0x0029 },
unicodeslot=0x208E,
},
[0x2090]={
@@ -58338,7 +59582,7 @@ characters.data={
description="LATIN SUBSCRIPT SMALL LETTER A",
direction="l",
linebreak="al",
- specials={ "sub", 0x0061 },
+ specials={ "sub", 0x61 },
unicodeslot=0x2090,
},
[0x2091]={
@@ -58346,7 +59590,7 @@ characters.data={
description="LATIN SUBSCRIPT SMALL LETTER E",
direction="l",
linebreak="al",
- specials={ "sub", 0x0065 },
+ specials={ "sub", 0x65 },
unicodeslot=0x2091,
},
[0x2092]={
@@ -58354,7 +59598,7 @@ characters.data={
description="LATIN SUBSCRIPT SMALL LETTER O",
direction="l",
linebreak="al",
- specials={ "sub", 0x006F },
+ specials={ "sub", 0x6F },
unicodeslot=0x2092,
},
[0x2093]={
@@ -58362,7 +59606,7 @@ characters.data={
description="LATIN SUBSCRIPT SMALL LETTER X",
direction="l",
linebreak="al",
- specials={ "sub", 0x0078 },
+ specials={ "sub", 0x78 },
unicodeslot=0x2093,
},
[0x2094]={
@@ -58370,7 +59614,7 @@ characters.data={
description="LATIN SUBSCRIPT SMALL LETTER SCHWA",
direction="l",
linebreak="al",
- specials={ "sub", 0x0259 },
+ specials={ "sub", 0x259 },
unicodeslot=0x2094,
},
[0x2095]={
@@ -58378,7 +59622,7 @@ characters.data={
description="LATIN SUBSCRIPT SMALL LETTER H",
direction="l",
linebreak="al",
- specials={ "sub", 0x0068 },
+ specials={ "sub", 0x68 },
unicodeslot=0x2095,
},
[0x2096]={
@@ -58386,7 +59630,7 @@ characters.data={
description="LATIN SUBSCRIPT SMALL LETTER K",
direction="l",
linebreak="al",
- specials={ "sub", 0x006B },
+ specials={ "sub", 0x6B },
unicodeslot=0x2096,
},
[0x2097]={
@@ -58394,7 +59638,7 @@ characters.data={
description="LATIN SUBSCRIPT SMALL LETTER L",
direction="l",
linebreak="al",
- specials={ "sub", 0x006C },
+ specials={ "sub", 0x6C },
unicodeslot=0x2097,
},
[0x2098]={
@@ -58402,7 +59646,7 @@ characters.data={
description="LATIN SUBSCRIPT SMALL LETTER M",
direction="l",
linebreak="al",
- specials={ "sub", 0x006D },
+ specials={ "sub", 0x6D },
unicodeslot=0x2098,
},
[0x2099]={
@@ -58410,7 +59654,7 @@ characters.data={
description="LATIN SUBSCRIPT SMALL LETTER N",
direction="l",
linebreak="al",
- specials={ "sub", 0x006E },
+ specials={ "sub", 0x6E },
unicodeslot=0x2099,
},
[0x209A]={
@@ -58418,7 +59662,7 @@ characters.data={
description="LATIN SUBSCRIPT SMALL LETTER P",
direction="l",
linebreak="al",
- specials={ "sub", 0x0070 },
+ specials={ "sub", 0x70 },
unicodeslot=0x209A,
},
[0x209B]={
@@ -58426,7 +59670,7 @@ characters.data={
description="LATIN SUBSCRIPT SMALL LETTER S",
direction="l",
linebreak="al",
- specials={ "sub", 0x0073 },
+ specials={ "sub", 0x73 },
unicodeslot=0x209B,
},
[0x209C]={
@@ -58434,7 +59678,7 @@ characters.data={
description="LATIN SUBSCRIPT SMALL LETTER T",
direction="l",
linebreak="al",
- specials={ "sub", 0x0074 },
+ specials={ "sub", 0x74 },
unicodeslot=0x209C,
},
[0x20A0]={
@@ -58503,7 +59747,7 @@ characters.data={
description="RUPEE SIGN",
direction="et",
linebreak="pr",
- specials={ "compat", 0x0052, 0x0073 },
+ specials={ "compat", 0x52, 0x73 },
unicodeslot=0x20A8,
},
[0x20A9]={
@@ -58640,8 +59884,30 @@ characters.data={
linebreak="pr",
unicodeslot=0x20BA,
},
+ [0x20BB]={
+ category="sc",
+ description="NORDIC MARK SIGN",
+ direction="et",
+ linebreak="po",
+ unicodeslot=0x20BB,
+ },
+ [0x20BC]={
+ category="sc",
+ description="MANAT SIGN",
+ direction="et",
+ linebreak="pr",
+ unicodeslot=0x20BC,
+ },
+ [0x20BD]={
+ category="sc",
+ description="RUBLE SIGN",
+ direction="et",
+ linebreak="pr",
+ unicodeslot=0x20BD,
+ },
[0x20D0]={
category="mn",
+ combining=0xE6,
description="COMBINING LEFT HARPOON ABOVE",
direction="nsm",
linebreak="cm",
@@ -58649,6 +59915,7 @@ characters.data={
},
[0x20D1]={
category="mn",
+ combining=0xE6,
description="COMBINING RIGHT HARPOON ABOVE",
direction="nsm",
linebreak="cm",
@@ -58656,6 +59923,7 @@ characters.data={
},
[0x20D2]={
category="mn",
+ combining=0x1,
description="COMBINING LONG VERTICAL LINE OVERLAY",
direction="nsm",
linebreak="cm",
@@ -58663,6 +59931,7 @@ characters.data={
},
[0x20D3]={
category="mn",
+ combining=0x1,
description="COMBINING SHORT VERTICAL LINE OVERLAY",
direction="nsm",
linebreak="cm",
@@ -58670,6 +59939,7 @@ characters.data={
},
[0x20D4]={
category="mn",
+ combining=0xE6,
description="COMBINING ANTICLOCKWISE ARROW ABOVE",
direction="nsm",
linebreak="cm",
@@ -58677,6 +59947,7 @@ characters.data={
},
[0x20D5]={
category="mn",
+ combining=0xE6,
description="COMBINING CLOCKWISE ARROW ABOVE",
direction="nsm",
linebreak="cm",
@@ -58684,6 +59955,7 @@ characters.data={
},
[0x20D6]={
category="mn",
+ combining=0xE6,
description="COMBINING LEFT ARROW ABOVE",
direction="nsm",
linebreak="cm",
@@ -58691,6 +59963,7 @@ characters.data={
},
[0x20D7]={
category="mn",
+ combining=0xE6,
description="COMBINING RIGHT ARROW ABOVE",
direction="nsm",
linebreak="cm",
@@ -58700,6 +59973,7 @@ characters.data={
},
[0x20D8]={
category="mn",
+ combining=0x1,
description="COMBINING RING OVERLAY",
direction="nsm",
linebreak="cm",
@@ -58707,6 +59981,7 @@ characters.data={
},
[0x20D9]={
category="mn",
+ combining=0x1,
description="COMBINING CLOCKWISE RING OVERLAY",
direction="nsm",
linebreak="cm",
@@ -58714,6 +59989,7 @@ characters.data={
},
[0x20DA]={
category="mn",
+ combining=0x1,
description="COMBINING ANTICLOCKWISE RING OVERLAY",
direction="nsm",
linebreak="cm",
@@ -58721,6 +59997,7 @@ characters.data={
},
[0x20DB]={
category="mn",
+ combining=0xE6,
description="COMBINING THREE DOTS ABOVE",
direction="nsm",
linebreak="cm",
@@ -58730,6 +60007,7 @@ characters.data={
},
[0x20DC]={
category="mn",
+ combining=0xE6,
description="COMBINING FOUR DOTS ABOVE",
direction="nsm",
linebreak="cm",
@@ -58779,6 +60057,7 @@ characters.data={
},
[0x20E1]={
category="mn",
+ combining=0xE6,
description="COMBINING LEFT RIGHT ARROW ABOVE",
direction="nsm",
linebreak="cm",
@@ -58807,6 +60086,7 @@ characters.data={
},
[0x20E5]={
category="mn",
+ combining=0x1,
description="COMBINING REVERSE SOLIDUS OVERLAY",
direction="nsm",
linebreak="cm",
@@ -58814,6 +60094,7 @@ characters.data={
},
[0x20E6]={
category="mn",
+ combining=0x1,
description="COMBINING DOUBLE VERTICAL STROKE OVERLAY",
direction="nsm",
linebreak="cm",
@@ -58821,15 +60102,17 @@ characters.data={
},
[0x20E7]={
category="mn",
+ combining=0xE6,
description="COMBINING ANNUITY SYMBOL",
direction="nsm",
+ linebreak="cm",
mathclass="topaccent",
mathname="actuarial",
- linebreak="cm",
unicodeslot=0x20E7,
},
[0x20E8]={
category="mn",
+ combining=0xDC,
description="COMBINING TRIPLE UNDERDOT",
direction="nsm",
linebreak="cm",
@@ -58837,6 +60120,7 @@ characters.data={
},
[0x20E9]={
category="mn",
+ combining=0xE6,
description="COMBINING WIDE BRIDGE ABOVE",
direction="nsm",
linebreak="cm",
@@ -58845,6 +60129,7 @@ characters.data={
},
[0x20EA]={
category="mn",
+ combining=0x1,
description="COMBINING LEFTWARDS ARROW OVERLAY",
direction="nsm",
linebreak="cm",
@@ -58852,6 +60137,7 @@ characters.data={
},
[0x20EB]={
category="mn",
+ combining=0x1,
description="COMBINING LONG DOUBLE SOLIDUS OVERLAY",
direction="nsm",
linebreak="cm",
@@ -58859,6 +60145,7 @@ characters.data={
},
[0x20EC]={
category="mn",
+ combining=0xDC,
description="COMBINING RIGHTWARDS HARPOON WITH BARB DOWNWARDS",
direction="nsm",
linebreak="cm",
@@ -58866,6 +60153,7 @@ characters.data={
},
[0x20ED]={
category="mn",
+ combining=0xDC,
description="COMBINING LEFTWARDS HARPOON WITH BARB DOWNWARDS",
direction="nsm",
linebreak="cm",
@@ -58873,6 +60161,7 @@ characters.data={
},
[0x20EE]={
category="mn",
+ combining=0xDC,
description="COMBINING LEFT ARROW BELOW",
direction="nsm",
linebreak="cm",
@@ -58880,6 +60169,7 @@ characters.data={
},
[0x20EF]={
category="mn",
+ combining=0xDC,
description="COMBINING RIGHT ARROW BELOW",
direction="nsm",
linebreak="cm",
@@ -58887,6 +60177,7 @@ characters.data={
},
[0x20F0]={
category="mn",
+ combining=0xE6,
description="COMBINING ASTERISK ABOVE",
direction="nsm",
linebreak="cm",
@@ -58897,7 +60188,7 @@ characters.data={
description="ACCOUNT OF",
direction="on",
linebreak="al",
- specials={ "compat", 0x0061, 0x002F, 0x0063 },
+ specials={ "compat", 0x61, 0x2F, 0x63 },
unicodeslot=0x2100,
},
[0x2101]={
@@ -58905,7 +60196,7 @@ characters.data={
description="ADDRESSED TO THE SUBJECT",
direction="on",
linebreak="al",
- specials={ "compat", 0x0061, 0x002F, 0x0073 },
+ specials={ "compat", 0x61, 0x2F, 0x73 },
unicodeslot=0x2101,
},
[0x2102]={
@@ -58915,7 +60206,7 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="complexes",
- specials={ "font", 0x0043 },
+ specials={ "font", 0x43 },
unicodeslot=0x2102,
},
[0x2103]={
@@ -58926,7 +60217,7 @@ characters.data={
description="DEGREE CELSIUS",
direction="on",
linebreak="po",
- specials={ "compat", 0x00B0, 0x0043 },
+ specials={ "compat", 0xB0, 0x43 },
unicodeslot=0x2103,
},
[0x2104]={
@@ -58943,7 +60234,7 @@ characters.data={
description="CARE OF",
direction="on",
linebreak="ai",
- specials={ "compat", 0x0063, 0x002F, 0x006F },
+ specials={ "compat", 0x63, 0x2F, 0x6F },
unicodeslot=0x2105,
},
[0x2106]={
@@ -58951,7 +60242,7 @@ characters.data={
description="CADA UNA",
direction="on",
linebreak="al",
- specials={ "compat", 0x0063, 0x002F, 0x0075 },
+ specials={ "compat", 0x63, 0x2F, 0x75 },
unicodeslot=0x2106,
},
[0x2107]={
@@ -58961,7 +60252,7 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="Eulerconst",
- specials={ "compat", 0x0190 },
+ specials={ "compat", 0x190 },
unicodeslot=0x2107,
},
[0x2108]={
@@ -58978,7 +60269,7 @@ characters.data={
description="DEGREE FAHRENHEIT",
direction="on",
linebreak="po",
- specials={ "compat", 0x00B0, 0x0046 },
+ specials={ "compat", 0xB0, 0x46 },
unicodeslot=0x2109,
},
[0x210A]={
@@ -58986,7 +60277,7 @@ characters.data={
description="SCRIPT SMALL G",
direction="l",
linebreak="al",
- specials={ "font", 0x0067 },
+ specials={ "font", 0x67 },
unicodeslot=0x210A,
},
[0x210B]={
@@ -58994,7 +60285,7 @@ characters.data={
description="SCRIPT CAPITAL H",
direction="l",
linebreak="al",
- specials={ "font", 0x0048 },
+ specials={ "font", 0x48 },
unicodeslot=0x210B,
},
[0x210C]={
@@ -59002,7 +60293,7 @@ characters.data={
description="BLACK-LETTER CAPITAL H",
direction="l",
linebreak="al",
- specials={ "font", 0x0048 },
+ specials={ "font", 0x48 },
unicodeslot=0x210C,
},
[0x210D]={
@@ -59010,7 +60301,7 @@ characters.data={
description="DOUBLE-STRUCK CAPITAL H",
direction="l",
linebreak="al",
- specials={ "font", 0x0048 },
+ specials={ "font", 0x48 },
unicodeslot=0x210D,
},
[0x210E]={
@@ -59020,7 +60311,7 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="Plankconst",
- specials={ "font", 0x0068 },
+ specials={ "font", 0x68 },
unicodeslot=0x210E,
},
[0x210F]={
@@ -59038,7 +60329,7 @@ characters.data={
name="hbar",
},
},
- specials={ "font", 0x0127 },
+ specials={ "font", 0x127 },
unicodeslot=0x210F,
},
[0x2110]={
@@ -59046,7 +60337,7 @@ characters.data={
description="SCRIPT CAPITAL I",
direction="l",
linebreak="al",
- specials={ "font", 0x0049 },
+ specials={ "font", 0x49 },
unicodeslot=0x2110,
},
[0x2111]={
@@ -59057,7 +60348,7 @@ characters.data={
linebreak="al",
mathclass="default",
mathname="Im",
- specials={ "font", 0x0049 },
+ specials={ "font", 0x49 },
unicodeslot=0x2111,
},
[0x2112]={
@@ -59065,7 +60356,7 @@ characters.data={
description="SCRIPT CAPITAL L",
direction="l",
linebreak="al",
- specials={ "font", 0x004C },
+ specials={ "font", 0x4C },
unicodeslot=0x2112,
},
[0x2113]={
@@ -59077,7 +60368,7 @@ characters.data={
linebreak="ai",
mathclass="default",
mathname="ell",
- specials={ "font", 0x006C },
+ specials={ "font", 0x6C },
unicodeslot=0x2113,
},
[0x2114]={
@@ -59094,7 +60385,7 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="naturalnumbers",
- specials={ "font", 0x004E },
+ specials={ "font", 0x4E },
unicodeslot=0x2115,
},
[0x2116]={
@@ -59105,7 +60396,7 @@ characters.data={
description="NUMERO SIGN",
direction="on",
linebreak="pr",
- specials={ "compat", 0x004E, 0x006F },
+ specials={ "compat", 0x4E, 0x6F },
unicodeslot=0x2116,
},
[0x2117]={
@@ -59133,7 +60424,7 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="primes",
- specials={ "font", 0x0050 },
+ specials={ "font", 0x50 },
unicodeslot=0x2119,
},
[0x211A]={
@@ -59143,7 +60434,7 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="rationals",
- specials={ "font", 0x0051 },
+ specials={ "font", 0x51 },
unicodeslot=0x211A,
},
[0x211B]={
@@ -59151,7 +60442,7 @@ characters.data={
description="SCRIPT CAPITAL R",
direction="l",
linebreak="al",
- specials={ "font", 0x0052 },
+ specials={ "font", 0x52 },
unicodeslot=0x211B,
},
[0x211C]={
@@ -59162,7 +60453,7 @@ characters.data={
linebreak="al",
mathclass="default",
mathname="Re",
- specials={ "font", 0x0052 },
+ specials={ "font", 0x52 },
unicodeslot=0x211C,
},
[0x211D]={
@@ -59172,7 +60463,7 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="reals",
- specials={ "font", 0x0052 },
+ specials={ "font", 0x52 },
unicodeslot=0x211D,
},
[0x211E]={
@@ -59195,7 +60486,7 @@ characters.data={
description="SERVICE MARK",
direction="on",
linebreak="al",
- specials={ "super", 0x0053, 0x004D },
+ specials={ "super", 0x53, 0x4D },
unicodeslot=0x2120,
},
[0x2121]={
@@ -59205,7 +60496,7 @@ characters.data={
description="TELEPHONE SIGN",
direction="on",
linebreak="ai",
- specials={ "compat", 0x0054, 0x0045, 0x004C },
+ specials={ "compat", 0x54, 0x45, 0x4C },
unicodeslot=0x2121,
},
[0x2122]={
@@ -59216,7 +60507,7 @@ characters.data={
description="TRADE MARK SIGN",
direction="on",
linebreak="ai",
- specials={ "super", 0x0054, 0x004D },
+ specials={ "super", 0x54, 0x4D },
unicodeslot=0x2122,
},
[0x2123]={
@@ -59233,7 +60524,7 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="integers",
- specials={ "font", 0x005A },
+ specials={ "font", 0x5A },
unicodeslot=0x2124,
},
[0x2125]={
@@ -59251,11 +60542,11 @@ characters.data={
contextname="textohm",
description="OHM SIGN",
direction="l",
- lccode=0x03C9,
+ lccode=0x3C9,
linebreak="al",
mathclass="variable",
mathname="ohm",
- specials={ "char", 0x03A9 },
+ specials={ "char", 0x3A9 },
unicodeslot=0x2126,
},
[0x2127]={
@@ -59273,7 +60564,7 @@ characters.data={
description="BLACK-LETTER CAPITAL Z",
direction="l",
linebreak="al",
- specials={ "font", 0x005A },
+ specials={ "font", 0x5A },
unicodeslot=0x2128,
},
[0x2129]={
@@ -59290,9 +60581,9 @@ characters.data={
contextname="textkelvin",
description="KELVIN SIGN",
direction="l",
- lccode=0x006B,
+ lccode=0x6B,
linebreak="al",
- specials={ "char", 0x004B },
+ specials={ "char", 0x4B },
unicodeslot=0x212A,
},
[0x212B]={
@@ -59302,11 +60593,11 @@ characters.data={
contextname="textAngstrom",
description="ANGSTROM SIGN",
direction="l",
- lccode=0x00E5,
+ lccode=0xE5,
linebreak="ai",
mathclass="variable",
mathname="Angstrom",
- specials={ "char", 0x00C5 },
+ specials={ "char", 0xC5 },
unicodeslot=0x212B,
},
[0x212C]={
@@ -59314,7 +60605,7 @@ characters.data={
description="SCRIPT CAPITAL B",
direction="l",
linebreak="al",
- specials={ "font", 0x0042 },
+ specials={ "font", 0x42 },
unicodeslot=0x212C,
},
[0x212D]={
@@ -59322,7 +60613,7 @@ characters.data={
description="BLACK-LETTER CAPITAL C",
direction="l",
linebreak="al",
- specials={ "font", 0x0043 },
+ specials={ "font", 0x43 },
unicodeslot=0x212D,
},
[0x212E]={
@@ -59338,7 +60629,7 @@ characters.data={
description="SCRIPT SMALL E",
direction="l",
linebreak="al",
- specials={ "font", 0x0065 },
+ specials={ "font", 0x65 },
unicodeslot=0x212F,
},
[0x2130]={
@@ -59346,7 +60637,7 @@ characters.data={
description="SCRIPT CAPITAL E",
direction="l",
linebreak="al",
- specials={ "font", 0x0045 },
+ specials={ "font", 0x45 },
unicodeslot=0x2130,
},
[0x2131]={
@@ -59354,7 +60645,7 @@ characters.data={
description="SCRIPT CAPITAL F",
direction="l",
linebreak="al",
- specials={ "font", 0x0046 },
+ specials={ "font", 0x46 },
unicodeslot=0x2131,
},
[0x2132]={
@@ -59372,7 +60663,7 @@ characters.data={
description="SCRIPT CAPITAL M",
direction="l",
linebreak="al",
- specials={ "font", 0x004D },
+ specials={ "font", 0x4D },
unicodeslot=0x2133,
},
[0x2134]={
@@ -59380,7 +60671,7 @@ characters.data={
description="SCRIPT SMALL O",
direction="l",
linebreak="al",
- specials={ "font", 0x006F },
+ specials={ "font", 0x6F },
unicodeslot=0x2134,
},
[0x2135]={
@@ -59391,7 +60682,7 @@ characters.data={
linebreak="al",
mathclass="default",
mathname="aleph",
- specials={ "compat", 0x05D0 },
+ specials={ "compat", 0x5D0 },
unicodeslot=0x2135,
},
[0x2136]={
@@ -59401,7 +60692,7 @@ characters.data={
linebreak="al",
mathclass="default",
mathname="beth",
- specials={ "compat", 0x05D1 },
+ specials={ "compat", 0x5D1 },
unicodeslot=0x2136,
},
[0x2137]={
@@ -59411,7 +60702,7 @@ characters.data={
linebreak="al",
mathclass="default",
mathname="gimel",
- specials={ "compat", 0x05D2 },
+ specials={ "compat", 0x5D2 },
unicodeslot=0x2137,
},
[0x2138]={
@@ -59421,7 +60712,7 @@ characters.data={
linebreak="al",
mathclass="default",
mathname="daleth",
- specials={ "compat", 0x05D3 },
+ specials={ "compat", 0x5D3 },
unicodeslot=0x2138,
},
[0x2139]={
@@ -59429,7 +60720,7 @@ characters.data={
description="INFORMATION SOURCE",
direction="l",
linebreak="al",
- specials={ "font", 0x0069 },
+ specials={ "font", 0x69 },
unicodeslot=0x2139,
variants={
[0xFE0E]="text style",
@@ -59448,7 +60739,7 @@ characters.data={
description="FACSIMILE SIGN",
direction="on",
linebreak="al",
- specials={ "compat", 0x0046, 0x0041, 0x0058 },
+ specials={ "compat", 0x46, 0x41, 0x58 },
unicodeslot=0x213B,
},
[0x213C]={
@@ -59456,7 +60747,7 @@ characters.data={
description="DOUBLE-STRUCK SMALL PI",
direction="l",
linebreak="al",
- specials={ "font", 0x03C0 },
+ specials={ "font", 0x3C0 },
unicodeslot=0x213C,
},
[0x213D]={
@@ -59464,7 +60755,7 @@ characters.data={
description="DOUBLE-STRUCK SMALL GAMMA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B3 },
+ specials={ "font", 0x3B3 },
unicodeslot=0x213D,
},
[0x213E]={
@@ -59472,7 +60763,7 @@ characters.data={
description="DOUBLE-STRUCK CAPITAL GAMMA",
direction="l",
linebreak="al",
- specials={ "font", 0x0393 },
+ specials={ "font", 0x393 },
unicodeslot=0x213E,
},
[0x213F]={
@@ -59480,7 +60771,7 @@ characters.data={
description="DOUBLE-STRUCK CAPITAL PI",
direction="l",
linebreak="al",
- specials={ "font", 0x03A0 },
+ specials={ "font", 0x3A0 },
unicodeslot=0x213F,
},
[0x2140]={
@@ -59534,7 +60825,7 @@ characters.data={
linebreak="al",
mathclass="nothing",
mathname="differentialD",
- specials={ "font", 0x0044 },
+ specials={ "font", 0x44 },
unicodeslot=0x2145,
},
[0x2146]={
@@ -59544,7 +60835,7 @@ characters.data={
linebreak="al",
mathclass="nothing",
mathname="differentiald",
- specials={ "font", 0x0064 },
+ specials={ "font", 0x64 },
unicodeslot=0x2146,
},
[0x2147]={
@@ -59554,7 +60845,7 @@ characters.data={
linebreak="al",
mathclass="nothing",
mathname="exponentiale",
- specials={ "font", 0x0065 },
+ specials={ "font", 0x65 },
unicodeslot=0x2147,
},
[0x2148]={
@@ -59564,7 +60855,7 @@ characters.data={
linebreak="al",
mathclass="nothing",
mathname="imaginaryi",
- specials={ "font", 0x0069 },
+ specials={ "font", 0x69 },
unicodeslot=0x2148,
},
[0x2149]={
@@ -59574,7 +60865,7 @@ characters.data={
linebreak="al",
mathclass="nothing",
mathname="imaginaryj",
- specials={ "font", 0x006A },
+ specials={ "font", 0x6A },
unicodeslot=0x2149,
},
[0x214A]={
@@ -59629,7 +60920,7 @@ characters.data={
description="VULGAR FRACTION ONE SEVENTH",
direction="on",
linebreak="al",
- specials={ "fraction", 0x0031, 0x2044, 0x0037 },
+ specials={ "fraction", 0x31, 0x2044, 0x37 },
unicodeslot=0x2150,
},
[0x2151]={
@@ -59637,7 +60928,7 @@ characters.data={
description="VULGAR FRACTION ONE NINTH",
direction="on",
linebreak="al",
- specials={ "fraction", 0x0031, 0x2044, 0x0039 },
+ specials={ "fraction", 0x31, 0x2044, 0x39 },
unicodeslot=0x2151,
},
[0x2152]={
@@ -59645,7 +60936,7 @@ characters.data={
description="VULGAR FRACTION ONE TENTH",
direction="on",
linebreak="al",
- specials={ "fraction", 0x0031, 0x2044, 0x0031, 0x0030 },
+ specials={ "fraction", 0x31, 0x2044, 0x31, 0x30 },
unicodeslot=0x2152,
},
[0x2153]={
@@ -59656,7 +60947,7 @@ characters.data={
description="VULGAR FRACTION ONE THIRD",
direction="on",
linebreak="al",
- specials={ "fraction", 0x0031, 0x2044, 0x0033 },
+ specials={ "fraction", 0x31, 0x2044, 0x33 },
unicodeslot=0x2153,
},
[0x2154]={
@@ -59667,7 +60958,7 @@ characters.data={
description="VULGAR FRACTION TWO THIRDS",
direction="on",
linebreak="ai",
- specials={ "fraction", 0x0032, 0x2044, 0x0033 },
+ specials={ "fraction", 0x32, 0x2044, 0x33 },
unicodeslot=0x2154,
},
[0x2155]={
@@ -59676,7 +60967,7 @@ characters.data={
description="VULGAR FRACTION ONE FIFTH",
direction="on",
linebreak="ai",
- specials={ "fraction", 0x0031, 0x2044, 0x0035 },
+ specials={ "fraction", 0x31, 0x2044, 0x35 },
unicodeslot=0x2155,
},
[0x2156]={
@@ -59685,7 +60976,7 @@ characters.data={
description="VULGAR FRACTION TWO FIFTHS",
direction="on",
linebreak="al",
- specials={ "fraction", 0x0032, 0x2044, 0x0035 },
+ specials={ "fraction", 0x32, 0x2044, 0x35 },
unicodeslot=0x2156,
},
[0x2157]={
@@ -59694,7 +60985,7 @@ characters.data={
description="VULGAR FRACTION THREE FIFTHS",
direction="on",
linebreak="al",
- specials={ "fraction", 0x0033, 0x2044, 0x0035 },
+ specials={ "fraction", 0x33, 0x2044, 0x35 },
unicodeslot=0x2157,
},
[0x2158]={
@@ -59703,7 +60994,7 @@ characters.data={
description="VULGAR FRACTION FOUR FIFTHS",
direction="on",
linebreak="al",
- specials={ "fraction", 0x0034, 0x2044, 0x0035 },
+ specials={ "fraction", 0x34, 0x2044, 0x35 },
unicodeslot=0x2158,
},
[0x2159]={
@@ -59712,7 +61003,7 @@ characters.data={
description="VULGAR FRACTION ONE SIXTH",
direction="on",
linebreak="al",
- specials={ "fraction", 0x0031, 0x2044, 0x0036 },
+ specials={ "fraction", 0x31, 0x2044, 0x36 },
unicodeslot=0x2159,
},
[0x215A]={
@@ -59721,7 +61012,7 @@ characters.data={
description="VULGAR FRACTION FIVE SIXTHS",
direction="on",
linebreak="al",
- specials={ "fraction", 0x0035, 0x2044, 0x0036 },
+ specials={ "fraction", 0x35, 0x2044, 0x36 },
unicodeslot=0x215A,
},
[0x215B]={
@@ -59732,7 +61023,7 @@ characters.data={
description="VULGAR FRACTION ONE EIGHTH",
direction="on",
linebreak="ai",
- specials={ "fraction", 0x0031, 0x2044, 0x0038 },
+ specials={ "fraction", 0x31, 0x2044, 0x38 },
unicodeslot=0x215B,
},
[0x215C]={
@@ -59743,7 +61034,7 @@ characters.data={
description="VULGAR FRACTION THREE EIGHTHS",
direction="on",
linebreak="al",
- specials={ "fraction", 0x0033, 0x2044, 0x0038 },
+ specials={ "fraction", 0x33, 0x2044, 0x38 },
unicodeslot=0x215C,
},
[0x215D]={
@@ -59754,7 +61045,7 @@ characters.data={
description="VULGAR FRACTION FIVE EIGHTHS",
direction="on",
linebreak="al",
- specials={ "fraction", 0x0035, 0x2044, 0x0038 },
+ specials={ "fraction", 0x35, 0x2044, 0x38 },
unicodeslot=0x215D,
},
[0x215E]={
@@ -59765,7 +61056,7 @@ characters.data={
description="VULGAR FRACTION SEVEN EIGHTHS",
direction="on",
linebreak="ai",
- specials={ "fraction", 0x0037, 0x2044, 0x0038 },
+ specials={ "fraction", 0x37, 0x2044, 0x38 },
unicodeslot=0x215E,
},
[0x215F]={
@@ -59773,7 +61064,7 @@ characters.data={
description="FRACTION NUMERATOR ONE",
direction="on",
linebreak="al",
- specials={ "fraction", 0x0031, 0x2044 },
+ specials={ "fraction", 0x31, 0x2044 },
unicodeslot=0x215F,
},
[0x2160]={
@@ -59785,7 +61076,7 @@ characters.data={
direction="l",
lccode=0x2170,
linebreak="ai",
- specials={ "compat", 0x0049 },
+ specials={ "compat", 0x49 },
unicodeslot=0x2160,
},
[0x2161]={
@@ -59797,7 +61088,7 @@ characters.data={
direction="l",
lccode=0x2171,
linebreak="ai",
- specials={ "compat", 0x0049, 0x0049 },
+ specials={ "compat", 0x49, 0x49 },
unicodeslot=0x2161,
},
[0x2162]={
@@ -59809,7 +61100,7 @@ characters.data={
direction="l",
lccode=0x2172,
linebreak="ai",
- specials={ "compat", 0x0049, 0x0049, 0x0049 },
+ specials={ "compat", 0x49, 0x49, 0x49 },
unicodeslot=0x2162,
},
[0x2163]={
@@ -59821,7 +61112,7 @@ characters.data={
direction="l",
lccode=0x2173,
linebreak="ai",
- specials={ "compat", 0x0049, 0x0056 },
+ specials={ "compat", 0x49, 0x56 },
unicodeslot=0x2163,
},
[0x2164]={
@@ -59833,7 +61124,7 @@ characters.data={
direction="l",
lccode=0x2174,
linebreak="ai",
- specials={ "compat", 0x0056 },
+ specials={ "compat", 0x56 },
unicodeslot=0x2164,
},
[0x2165]={
@@ -59845,7 +61136,7 @@ characters.data={
direction="l",
lccode=0x2175,
linebreak="ai",
- specials={ "compat", 0x0056, 0x0049 },
+ specials={ "compat", 0x56, 0x49 },
unicodeslot=0x2165,
},
[0x2166]={
@@ -59857,7 +61148,7 @@ characters.data={
direction="l",
lccode=0x2176,
linebreak="ai",
- specials={ "compat", 0x0056, 0x0049, 0x0049 },
+ specials={ "compat", 0x56, 0x49, 0x49 },
unicodeslot=0x2166,
},
[0x2167]={
@@ -59869,7 +61160,7 @@ characters.data={
direction="l",
lccode=0x2177,
linebreak="ai",
- specials={ "compat", 0x0056, 0x0049, 0x0049, 0x0049 },
+ specials={ "compat", 0x56, 0x49, 0x49, 0x49 },
unicodeslot=0x2167,
},
[0x2168]={
@@ -59881,7 +61172,7 @@ characters.data={
direction="l",
lccode=0x2178,
linebreak="ai",
- specials={ "compat", 0x0049, 0x0058 },
+ specials={ "compat", 0x49, 0x58 },
unicodeslot=0x2168,
},
[0x2169]={
@@ -59893,7 +61184,7 @@ characters.data={
direction="l",
lccode=0x2179,
linebreak="ai",
- specials={ "compat", 0x0058 },
+ specials={ "compat", 0x58 },
unicodeslot=0x2169,
},
[0x216A]={
@@ -59905,7 +61196,7 @@ characters.data={
direction="l",
lccode=0x217A,
linebreak="ai",
- specials={ "compat", 0x0058, 0x0049 },
+ specials={ "compat", 0x58, 0x49 },
unicodeslot=0x216A,
},
[0x216B]={
@@ -59917,7 +61208,7 @@ characters.data={
direction="l",
lccode=0x217B,
linebreak="ai",
- specials={ "compat", 0x0058, 0x0049, 0x0049 },
+ specials={ "compat", 0x58, 0x49, 0x49 },
unicodeslot=0x216B,
},
[0x216C]={
@@ -59927,7 +61218,7 @@ characters.data={
direction="l",
lccode=0x217C,
linebreak="al",
- specials={ "compat", 0x004C },
+ specials={ "compat", 0x4C },
unicodeslot=0x216C,
},
[0x216D]={
@@ -59937,7 +61228,7 @@ characters.data={
direction="l",
lccode=0x217D,
linebreak="al",
- specials={ "compat", 0x0043 },
+ specials={ "compat", 0x43 },
unicodeslot=0x216D,
},
[0x216E]={
@@ -59947,7 +61238,7 @@ characters.data={
direction="l",
lccode=0x217E,
linebreak="al",
- specials={ "compat", 0x0044 },
+ specials={ "compat", 0x44 },
unicodeslot=0x216E,
},
[0x216F]={
@@ -59957,7 +61248,7 @@ characters.data={
direction="l",
lccode=0x217F,
linebreak="al",
- specials={ "compat", 0x004D },
+ specials={ "compat", 0x4D },
unicodeslot=0x216F,
},
[0x2170]={
@@ -59968,7 +61259,7 @@ characters.data={
description="SMALL ROMAN NUMERAL ONE",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0069 },
+ specials={ "compat", 0x69 },
uccode=0x2160,
unicodeslot=0x2170,
},
@@ -59980,7 +61271,7 @@ characters.data={
description="SMALL ROMAN NUMERAL TWO",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0069, 0x0069 },
+ specials={ "compat", 0x69, 0x69 },
uccode=0x2161,
unicodeslot=0x2171,
},
@@ -59992,7 +61283,7 @@ characters.data={
description="SMALL ROMAN NUMERAL THREE",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0069, 0x0069, 0x0069 },
+ specials={ "compat", 0x69, 0x69, 0x69 },
uccode=0x2162,
unicodeslot=0x2172,
},
@@ -60004,7 +61295,7 @@ characters.data={
description="SMALL ROMAN NUMERAL FOUR",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0069, 0x0076 },
+ specials={ "compat", 0x69, 0x76 },
uccode=0x2163,
unicodeslot=0x2173,
},
@@ -60016,7 +61307,7 @@ characters.data={
description="SMALL ROMAN NUMERAL FIVE",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0076 },
+ specials={ "compat", 0x76 },
uccode=0x2164,
unicodeslot=0x2174,
},
@@ -60028,7 +61319,7 @@ characters.data={
description="SMALL ROMAN NUMERAL SIX",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0076, 0x0069 },
+ specials={ "compat", 0x76, 0x69 },
uccode=0x2165,
unicodeslot=0x2175,
},
@@ -60040,7 +61331,7 @@ characters.data={
description="SMALL ROMAN NUMERAL SEVEN",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0076, 0x0069, 0x0069 },
+ specials={ "compat", 0x76, 0x69, 0x69 },
uccode=0x2166,
unicodeslot=0x2176,
},
@@ -60052,7 +61343,7 @@ characters.data={
description="SMALL ROMAN NUMERAL EIGHT",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0076, 0x0069, 0x0069, 0x0069 },
+ specials={ "compat", 0x76, 0x69, 0x69, 0x69 },
uccode=0x2167,
unicodeslot=0x2177,
},
@@ -60064,7 +61355,7 @@ characters.data={
description="SMALL ROMAN NUMERAL NINE",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0069, 0x0078 },
+ specials={ "compat", 0x69, 0x78 },
uccode=0x2168,
unicodeslot=0x2178,
},
@@ -60076,7 +61367,7 @@ characters.data={
description="SMALL ROMAN NUMERAL TEN",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0078 },
+ specials={ "compat", 0x78 },
uccode=0x2169,
unicodeslot=0x2179,
},
@@ -60087,7 +61378,7 @@ characters.data={
description="SMALL ROMAN NUMERAL ELEVEN",
direction="l",
linebreak="al",
- specials={ "compat", 0x0078, 0x0069 },
+ specials={ "compat", 0x78, 0x69 },
uccode=0x216A,
unicodeslot=0x217A,
},
@@ -60098,7 +61389,7 @@ characters.data={
description="SMALL ROMAN NUMERAL TWELVE",
direction="l",
linebreak="al",
- specials={ "compat", 0x0078, 0x0069, 0x0069 },
+ specials={ "compat", 0x78, 0x69, 0x69 },
uccode=0x216B,
unicodeslot=0x217B,
},
@@ -60108,7 +61399,7 @@ characters.data={
description="SMALL ROMAN NUMERAL FIFTY",
direction="l",
linebreak="al",
- specials={ "compat", 0x006C },
+ specials={ "compat", 0x6C },
uccode=0x216C,
unicodeslot=0x217C,
},
@@ -60118,7 +61409,7 @@ characters.data={
description="SMALL ROMAN NUMERAL ONE HUNDRED",
direction="l",
linebreak="al",
- specials={ "compat", 0x0063 },
+ specials={ "compat", 0x63 },
uccode=0x216D,
unicodeslot=0x217D,
},
@@ -60128,7 +61419,7 @@ characters.data={
description="SMALL ROMAN NUMERAL FIVE HUNDRED",
direction="l",
linebreak="al",
- specials={ "compat", 0x0064 },
+ specials={ "compat", 0x64 },
uccode=0x216E,
unicodeslot=0x217E,
},
@@ -60138,7 +61429,7 @@ characters.data={
description="SMALL ROMAN NUMERAL ONE THOUSAND",
direction="l",
linebreak="al",
- specials={ "compat", 0x006D },
+ specials={ "compat", 0x6D },
uccode=0x216F,
unicodeslot=0x217F,
},
@@ -60213,7 +61504,7 @@ characters.data={
description="VULGAR FRACTION ZERO THIRDS",
direction="on",
linebreak="ai",
- specials={ "fraction", 0x0030, 0x2044, 0x0033 },
+ specials={ "fraction", 0x30, 0x2044, 0x33 },
unicodeslot=0x2189,
},
[0x2190]={
@@ -60400,7 +61691,7 @@ characters.data={
mathclass="relation",
mathextensible="l",
mathname="nleftarrow",
- specials={ "char", 0x2190, 0x0338 },
+ specials={ "char", 0x2190, 0x338 },
unicodeslot=0x219A,
},
[0x219B]={
@@ -60411,7 +61702,7 @@ characters.data={
mathclass="relation",
mathextensible="r",
mathname="nrightarrow",
- specials={ "char", 0x2192, 0x0338 },
+ specials={ "char", 0x2192, 0x338 },
unicodeslot=0x219B,
},
[0x219C]={
@@ -60616,7 +61907,7 @@ characters.data={
mathclass="relation",
mathextensible="h",
mathname="nleftrightarrow",
- specials={ "char", 0x2194, 0x0338 },
+ specials={ "char", 0x2194, 0x338 },
unicodeslot=0x21AE,
},
[0x21AF]={
@@ -60960,7 +62251,7 @@ characters.data={
mathclass="relation",
mathextensible="l",
mathname="nLeftarrow",
- specials={ "char", 0x21D0, 0x0338 },
+ specials={ "char", 0x21D0, 0x338 },
unicodeslot=0x21CD,
},
[0x21CE]={
@@ -60971,7 +62262,7 @@ characters.data={
mathclass="relation",
mathextensible="h",
mathname="nLeftrightarrow",
- specials={ "char", 0x21D4, 0x0338 },
+ specials={ "char", 0x21D4, 0x338 },
unicodeslot=0x21CE,
},
[0x21CF]={
@@ -60983,7 +62274,7 @@ characters.data={
mathclass="relation",
mathextensible="r",
mathname="nRightarrow",
- specials={ "char", 0x21D2, 0x0338 },
+ specials={ "char", 0x21D2, 0x338 },
unicodeslot=0x21CF,
},
[0x21D0]={
@@ -61527,7 +62818,7 @@ characters.data={
linebreak="al",
mathclass="ordinary",
mathname="nexists",
- specials={ "char", 0x2203, 0x0338 },
+ specials={ "char", 0x2203, 0x338 },
unicodeslot=0x2204,
},
[0x2205]={
@@ -61588,7 +62879,7 @@ characters.data={
},
},
mirror=0x220C,
- specials={ "char", 0x2208, 0x0338 },
+ specials={ "char", 0x2208, 0x338 },
unicodeslot=0x2209,
},
[0x220A]={
@@ -61636,7 +62927,7 @@ characters.data={
},
},
mirror=0x2209,
- specials={ "char", 0x220B, 0x0338 },
+ specials={ "char", 0x220B, 0x338 },
unicodeslot=0x220C,
},
[0x220D]={
@@ -61901,7 +63192,7 @@ characters.data={
name="nmid",
},
},
- specials={ "char", 0x2223, 0x0338 },
+ specials={ "char", 0x2223, 0x338 },
unicodeslot=0x2224,
},
[0x2225]={
@@ -61927,7 +63218,7 @@ characters.data={
linebreak="al",
mathclass="relation",
mathname="nparallel",
- specials={ "char", 0x2225, 0x0338 },
+ specials={ "char", 0x2225, 0x338 },
unicodeslot=0x2226,
},
[0x2227]={
@@ -62244,7 +63535,7 @@ characters.data={
linebreak="al",
mathclass="relation",
mathname="nsim",
- specials={ "char", 0x223C, 0x0338 },
+ specials={ "char", 0x223C, 0x338 },
unicodeslot=0x2241,
},
[0x2242]={
@@ -62274,7 +63565,7 @@ characters.data={
linebreak="al",
mathclass="relation",
mathname="nsimeq",
- specials={ "char", 0x2243, 0x0338 },
+ specials={ "char", 0x2243, 0x338 },
unicodeslot=0x2244,
},
[0x2245]={
@@ -62319,7 +63610,7 @@ characters.data={
linebreak="al",
mathclass="relation",
mathname="approxnEq",
- specials={ "char", 0x2245, 0x0338 },
+ specials={ "char", 0x2245, 0x338 },
unicodeslot=0x2247,
},
[0x2248]={
@@ -62340,7 +63631,7 @@ characters.data={
linebreak="al",
mathclass="relation",
mathname="napprox",
- specials={ "char", 0x2248, 0x0338 },
+ specials={ "char", 0x2248, 0x338 },
unicodeslot=0x2249,
},
[0x224A]={
@@ -62570,7 +63861,7 @@ characters.data={
name="ne",
},
},
- specials={ "char", 0x003D, 0x0338 },
+ specials={ "char", 0x3D, 0x338 },
unicodeslot=0x2260,
},
[0x2261]={
@@ -62595,7 +63886,7 @@ characters.data={
mathextensible="h",
mathfiller="triplerelfill",
mathname="nequiv",
- specials={ "char", 0x2261, 0x0338 },
+ specials={ "char", 0x2261, 0x338 },
unicodeslot=0x2262,
},
[0x2263]={
@@ -62737,7 +64028,7 @@ characters.data={
linebreak="al",
mathclass="relation",
mathname="nasymp",
- specials={ "char", 0x224D, 0x0338 },
+ specials={ "char", 0x224D, 0x338 },
unicodeslot=0x226D,
},
[0x226E]={
@@ -62750,7 +64041,7 @@ characters.data={
mathclass="relation",
mathname="nless",
mirror=0x226F,
- specials={ "char", 0x003C, 0x0338 },
+ specials={ "char", 0x3C, 0x338 },
unicodeslot=0x226E,
},
[0x226F]={
@@ -62763,7 +64054,7 @@ characters.data={
mathclass="relation",
mathname="ngtr",
mirror=0x226E,
- specials={ "char", 0x003E, 0x0338 },
+ specials={ "char", 0x3E, 0x338 },
unicodeslot=0x226F,
},
[0x2270]={
@@ -62775,7 +64066,7 @@ characters.data={
mathclass="relation",
mathname="nleq",
mirror=0x2271,
- specials={ "char", 0x2264, 0x0338 },
+ specials={ "char", 0x2264, 0x338 },
unicodeslot=0x2270,
},
[0x2271]={
@@ -62787,7 +64078,7 @@ characters.data={
mathclass="relation",
mathname="ngeq",
mirror=0x2270,
- specials={ "char", 0x2265, 0x0338 },
+ specials={ "char", 0x2265, 0x338 },
unicodeslot=0x2271,
},
[0x2272]={
@@ -62826,7 +64117,7 @@ characters.data={
mathclass="relation",
mathname="nlesssim",
mirror=0x2275,
- specials={ "char", 0x2272, 0x0338 },
+ specials={ "char", 0x2272, 0x338 },
unicodeslot=0x2274,
},
[0x2275]={
@@ -62837,7 +64128,7 @@ characters.data={
mathclass="relation",
mathname="ngtrsim",
mirror=0x2274,
- specials={ "char", 0x2273, 0x0338 },
+ specials={ "char", 0x2273, 0x338 },
unicodeslot=0x2275,
},
[0x2276]={
@@ -62870,7 +64161,7 @@ characters.data={
mathclass="relation",
mathname="nlessgtr",
mirror=0x2279,
- specials={ "char", 0x2276, 0x0338 },
+ specials={ "char", 0x2276, 0x338 },
unicodeslot=0x2278,
},
[0x2279]={
@@ -62882,7 +64173,7 @@ characters.data={
mathclass="relation",
mathname="ngtrless",
mirror=0x2278,
- specials={ "char", 0x2277, 0x0338 },
+ specials={ "char", 0x2277, 0x338 },
unicodeslot=0x2279,
},
[0x227A]={
@@ -62956,7 +64247,7 @@ characters.data={
mathclass="relation",
mathname="nprec",
mirror=0x2281,
- specials={ "char", 0x227A, 0x0338 },
+ specials={ "char", 0x227A, 0x338 },
unicodeslot=0x2280,
},
[0x2281]={
@@ -62968,7 +64259,7 @@ characters.data={
mathclass="relation",
mathname="nsucc",
mirror=0x2280,
- specials={ "char", 0x227B, 0x0338 },
+ specials={ "char", 0x227B, 0x338 },
unicodeslot=0x2281,
},
[0x2282]={
@@ -63004,7 +64295,7 @@ characters.data={
mathclass="relation",
mathname="nsubset",
mirror=0x2285,
- specials={ "char", 0x2282, 0x0338 },
+ specials={ "char", 0x2282, 0x338 },
unicodeslot=0x2284,
},
[0x2285]={
@@ -63016,7 +64307,7 @@ characters.data={
mathclass="relation",
mathname="nsupset",
mirror=0x2284,
- specials={ "char", 0x2283, 0x0338 },
+ specials={ "char", 0x2283, 0x338 },
unicodeslot=0x2285,
},
[0x2286]={
@@ -63051,7 +64342,7 @@ characters.data={
mathclass="relation",
mathname="nsubseteq",
mirror=0x2289,
- specials={ "char", 0x2286, 0x0338 },
+ specials={ "char", 0x2286, 0x338 },
unicodeslot=0x2288,
},
[0x2289]={
@@ -63062,7 +64353,7 @@ characters.data={
mathclass="relation",
mathname="nsupseteq",
mirror=0x2288,
- specials={ "char", 0x2287, 0x0338 },
+ specials={ "char", 0x2287, 0x338 },
unicodeslot=0x2289,
},
[0x228A]={
@@ -63426,7 +64717,7 @@ characters.data={
linebreak="al",
mathclass="relation",
mathname="nvdash",
- specials={ "char", 0x22A2, 0x0338 },
+ specials={ "char", 0x22A2, 0x338 },
unicodeslot=0x22AC,
},
[0x22AD]={
@@ -63436,7 +64727,7 @@ characters.data={
linebreak="al",
mathclass="relation",
mathname="nvDash",
- specials={ "char", 0x22A8, 0x0338 },
+ specials={ "char", 0x22A8, 0x338 },
unicodeslot=0x22AD,
},
[0x22AE]={
@@ -63446,7 +64737,7 @@ characters.data={
linebreak="al",
mathclass="relation",
mathname="nVdash",
- specials={ "char", 0x22A9, 0x0338 },
+ specials={ "char", 0x22A9, 0x338 },
unicodeslot=0x22AE,
},
[0x22AF]={
@@ -63456,7 +64747,7 @@ characters.data={
linebreak="al",
mathclass="relation",
mathname="nVDash",
- specials={ "char", 0x22AB, 0x0338 },
+ specials={ "char", 0x22AB, 0x338 },
unicodeslot=0x22AF,
},
[0x22B0]={
@@ -63961,7 +65252,7 @@ characters.data={
mathclass="relation",
mathname="npreccurlyeq",
mirror=0x22E1,
- specials={ "char", 0x227C, 0x0338 },
+ specials={ "char", 0x227C, 0x338 },
unicodeslot=0x22E0,
},
[0x22E1]={
@@ -63972,7 +65263,7 @@ characters.data={
mathclass="relation",
mathname="nsucccurlyeq",
mirror=0x22E0,
- specials={ "char", 0x227D, 0x0338 },
+ specials={ "char", 0x227D, 0x338 },
unicodeslot=0x22E1,
},
[0x22E2]={
@@ -63983,7 +65274,7 @@ characters.data={
mathclass="relation",
mathname="nsqsubseteq",
mirror=0x22E3,
- specials={ "char", 0x2291, 0x0338 },
+ specials={ "char", 0x2291, 0x338 },
unicodeslot=0x22E2,
},
[0x22E3]={
@@ -63994,7 +65285,7 @@ characters.data={
mathclass="relation",
mathname="nsqsupseteq",
mirror=0x22E2,
- specials={ "char", 0x2292, 0x0338 },
+ specials={ "char", 0x2292, 0x338 },
unicodeslot=0x22E3,
},
[0x22E4]={
@@ -64065,7 +65356,7 @@ characters.data={
mathclass="relation",
mathname="ntriangleright",
mirror=0x22EB,
- specials={ "char", 0x22B2, 0x0338 },
+ specials={ "char", 0x22B2, 0x338 },
unicodeslot=0x22EA,
},
[0x22EB]={
@@ -64076,7 +65367,7 @@ characters.data={
mathclass="relation",
mathname="ntriangleleft",
mirror=0x22EA,
- specials={ "char", 0x22B3, 0x0338 },
+ specials={ "char", 0x22B3, 0x338 },
unicodeslot=0x22EB,
},
[0x22EC]={
@@ -64087,7 +65378,7 @@ characters.data={
mathclass="relation",
mathname="ntrianglelefteq",
mirror=0x22ED,
- specials={ "char", 0x22B4, 0x0338 },
+ specials={ "char", 0x22B4, 0x338 },
unicodeslot=0x22EC,
},
[0x22ED]={
@@ -64098,7 +65389,7 @@ characters.data={
mathclass="relation",
mathname="ntrianglerighteq",
mirror=0x22EC,
- specials={ "char", 0x22B5, 0x0338 },
+ specials={ "char", 0x22B5, 0x338 },
unicodeslot=0x22ED,
},
[0x22EE]={
@@ -64254,8 +65545,14 @@ characters.data={
direction="on",
linebreak="al",
mathspec={
- { class="ord", name="varnothing" },
- { class="ord", name="diameter" },
+ {
+ class="ord",
+ name="varnothing",
+ },
+ {
+ class="ord",
+ name="diameter",
+ },
},
unicodeslot=0x2300,
},
@@ -64315,7 +65612,7 @@ characters.data={
category="sm",
description="LEFT CEILING",
direction="on",
- linebreak="al",
+ linebreak="op",
mathspec={
{
class="open",
@@ -64333,7 +65630,7 @@ characters.data={
category="sm",
description="RIGHT CEILING",
direction="on",
- linebreak="al",
+ linebreak="cl",
mathspec={
{
class="close",
@@ -64351,7 +65648,7 @@ characters.data={
category="sm",
description="LEFT FLOOR",
direction="on",
- linebreak="al",
+ linebreak="op",
mathclass="open",
mathname="lfloor",
mirror=0x230B,
@@ -64361,7 +65658,7 @@ characters.data={
category="sm",
description="RIGHT FLOOR",
direction="on",
- linebreak="al",
+ linebreak="cl",
mathclass="close",
mathname="rfloor",
mirror=0x230A,
@@ -65591,6 +66888,8 @@ characters.data={
direction="on",
linebreak="al",
mathclass="topaccent",
+ mathextensible="h",
+ mathfiller="overbracketfill",
mathname="overbracket",
unicodeslot=0x23B4,
},
@@ -65600,6 +66899,8 @@ characters.data={
direction="on",
linebreak="al",
mathclass="botaccent",
+ mathextensible="h",
+ mathfiller="underbracketfill",
mathname="underbracket",
unicodeslot=0x23B5,
},
@@ -65875,6 +67176,8 @@ characters.data={
direction="on",
linebreak="al",
mathclass="topaccent",
+ mathextensible="h",
+ mathfiller="overparentfill",
mathname="overparent",
unicodeslot=0x23DC,
},
@@ -65884,6 +67187,8 @@ characters.data={
direction="on",
linebreak="al",
mathclass="botaccent",
+ mathextensible="h",
+ mathfiller="underparentfill",
mathname="underparent",
unicodeslot=0x23DD,
},
@@ -65893,6 +67198,8 @@ characters.data={
direction="on",
linebreak="al",
mathclass="topaccent",
+ mathextensible="h",
+ mathfiller="overbracefill",
mathname="overbrace",
unicodeslot=0x23DE,
},
@@ -65902,6 +67209,8 @@ characters.data={
direction="on",
linebreak="al",
mathclass="botaccent",
+ mathextensible="h",
+ mathfiller="underbracefill",
mathname="underbrace",
unicodeslot=0x23DF,
},
@@ -66047,6 +67356,55 @@ characters.data={
linebreak="id",
unicodeslot=0x23F3,
},
+ [0x23F4]={
+ category="so",
+ description="BLACK MEDIUM LEFT-POINTING TRIANGLE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x23F4,
+ },
+ [0x23F5]={
+ category="so",
+ description="BLACK MEDIUM RIGHT-POINTING TRIANGLE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x23F5,
+ },
+ [0x23F6]={
+ category="so",
+ description="BLACK MEDIUM UP-POINTING TRIANGLE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x23F6,
+ },
+ [0x23F7]={
+ category="so",
+ description="BLACK MEDIUM DOWN-POINTING TRIANGLE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x23F7,
+ },
+ [0x23F8]={
+ category="so",
+ description="DOUBLE VERTICAL BAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x23F8,
+ },
+ [0x23F9]={
+ category="so",
+ description="BLACK SQUARE FOR STOP",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x23F9,
+ },
+ [0x23FA]={
+ category="so",
+ description="BLACK CIRCLE FOR RECORD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x23FA,
+ },
[0x2400]={
category="so",
description="SYMBOL FOR NULL",
@@ -66405,7 +67763,7 @@ characters.data={
description="CIRCLED DIGIT ONE",
direction="on",
linebreak="ai",
- specials={ "circle", 0x0031 },
+ specials={ "circle", 0x31 },
unicodeslot=0x2460,
},
[0x2461]={
@@ -66415,7 +67773,7 @@ characters.data={
description="CIRCLED DIGIT TWO",
direction="on",
linebreak="ai",
- specials={ "circle", 0x0032 },
+ specials={ "circle", 0x32 },
unicodeslot=0x2461,
},
[0x2462]={
@@ -66425,7 +67783,7 @@ characters.data={
description="CIRCLED DIGIT THREE",
direction="on",
linebreak="ai",
- specials={ "circle", 0x0033 },
+ specials={ "circle", 0x33 },
unicodeslot=0x2462,
},
[0x2463]={
@@ -66435,7 +67793,7 @@ characters.data={
description="CIRCLED DIGIT FOUR",
direction="on",
linebreak="ai",
- specials={ "circle", 0x0034 },
+ specials={ "circle", 0x34 },
unicodeslot=0x2463,
},
[0x2464]={
@@ -66445,7 +67803,7 @@ characters.data={
description="CIRCLED DIGIT FIVE",
direction="on",
linebreak="ai",
- specials={ "circle", 0x0035 },
+ specials={ "circle", 0x35 },
unicodeslot=0x2464,
},
[0x2465]={
@@ -66455,7 +67813,7 @@ characters.data={
description="CIRCLED DIGIT SIX",
direction="on",
linebreak="ai",
- specials={ "circle", 0x0036 },
+ specials={ "circle", 0x36 },
unicodeslot=0x2465,
},
[0x2466]={
@@ -66465,7 +67823,7 @@ characters.data={
description="CIRCLED DIGIT SEVEN",
direction="on",
linebreak="ai",
- specials={ "circle", 0x0037 },
+ specials={ "circle", 0x37 },
unicodeslot=0x2466,
},
[0x2467]={
@@ -66475,7 +67833,7 @@ characters.data={
description="CIRCLED DIGIT EIGHT",
direction="on",
linebreak="ai",
- specials={ "circle", 0x0038 },
+ specials={ "circle", 0x38 },
unicodeslot=0x2467,
},
[0x2468]={
@@ -66485,7 +67843,7 @@ characters.data={
description="CIRCLED DIGIT NINE",
direction="on",
linebreak="ai",
- specials={ "circle", 0x0039 },
+ specials={ "circle", 0x39 },
unicodeslot=0x2468,
},
[0x2469]={
@@ -66495,7 +67853,7 @@ characters.data={
description="CIRCLED NUMBER TEN",
direction="on",
linebreak="ai",
- specials={ "circle", 0x0031, 0x0030 },
+ specials={ "circle", 0x31, 0x30 },
unicodeslot=0x2469,
},
[0x246A]={
@@ -66505,7 +67863,7 @@ characters.data={
description="CIRCLED NUMBER ELEVEN",
direction="on",
linebreak="ai",
- specials={ "circle", 0x0031, 0x0031 },
+ specials={ "circle", 0x31, 0x31 },
unicodeslot=0x246A,
},
[0x246B]={
@@ -66515,7 +67873,7 @@ characters.data={
description="CIRCLED NUMBER TWELVE",
direction="on",
linebreak="ai",
- specials={ "circle", 0x0031, 0x0032 },
+ specials={ "circle", 0x31, 0x32 },
unicodeslot=0x246B,
},
[0x246C]={
@@ -66525,7 +67883,7 @@ characters.data={
description="CIRCLED NUMBER THIRTEEN",
direction="on",
linebreak="ai",
- specials={ "circle", 0x0031, 0x0033 },
+ specials={ "circle", 0x31, 0x33 },
unicodeslot=0x246C,
},
[0x246D]={
@@ -66535,7 +67893,7 @@ characters.data={
description="CIRCLED NUMBER FOURTEEN",
direction="on",
linebreak="ai",
- specials={ "circle", 0x0031, 0x0034 },
+ specials={ "circle", 0x31, 0x34 },
unicodeslot=0x246D,
},
[0x246E]={
@@ -66545,7 +67903,7 @@ characters.data={
description="CIRCLED NUMBER FIFTEEN",
direction="on",
linebreak="ai",
- specials={ "circle", 0x0031, 0x0035 },
+ specials={ "circle", 0x31, 0x35 },
unicodeslot=0x246E,
},
[0x246F]={
@@ -66555,7 +67913,7 @@ characters.data={
description="CIRCLED NUMBER SIXTEEN",
direction="on",
linebreak="ai",
- specials={ "circle", 0x0031, 0x0036 },
+ specials={ "circle", 0x31, 0x36 },
unicodeslot=0x246F,
},
[0x2470]={
@@ -66565,7 +67923,7 @@ characters.data={
description="CIRCLED NUMBER SEVENTEEN",
direction="on",
linebreak="ai",
- specials={ "circle", 0x0031, 0x0037 },
+ specials={ "circle", 0x31, 0x37 },
unicodeslot=0x2470,
},
[0x2471]={
@@ -66575,7 +67933,7 @@ characters.data={
description="CIRCLED NUMBER EIGHTEEN",
direction="on",
linebreak="ai",
- specials={ "circle", 0x0031, 0x0038 },
+ specials={ "circle", 0x31, 0x38 },
unicodeslot=0x2471,
},
[0x2472]={
@@ -66585,7 +67943,7 @@ characters.data={
description="CIRCLED NUMBER NINETEEN",
direction="on",
linebreak="ai",
- specials={ "circle", 0x0031, 0x0039 },
+ specials={ "circle", 0x31, 0x39 },
unicodeslot=0x2472,
},
[0x2473]={
@@ -66595,7 +67953,7 @@ characters.data={
description="CIRCLED NUMBER TWENTY",
direction="on",
linebreak="ai",
- specials={ "circle", 0x0032, 0x0030 },
+ specials={ "circle", 0x32, 0x30 },
unicodeslot=0x2473,
},
[0x2474]={
@@ -66605,7 +67963,7 @@ characters.data={
description="PARENTHESIZED DIGIT ONE",
direction="on",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0031, 0x0029 },
+ specials={ "compat", 0x28, 0x31, 0x29 },
unicodeslot=0x2474,
},
[0x2475]={
@@ -66615,7 +67973,7 @@ characters.data={
description="PARENTHESIZED DIGIT TWO",
direction="on",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0032, 0x0029 },
+ specials={ "compat", 0x28, 0x32, 0x29 },
unicodeslot=0x2475,
},
[0x2476]={
@@ -66625,7 +67983,7 @@ characters.data={
description="PARENTHESIZED DIGIT THREE",
direction="on",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0033, 0x0029 },
+ specials={ "compat", 0x28, 0x33, 0x29 },
unicodeslot=0x2476,
},
[0x2477]={
@@ -66635,7 +67993,7 @@ characters.data={
description="PARENTHESIZED DIGIT FOUR",
direction="on",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0034, 0x0029 },
+ specials={ "compat", 0x28, 0x34, 0x29 },
unicodeslot=0x2477,
},
[0x2478]={
@@ -66645,7 +68003,7 @@ characters.data={
description="PARENTHESIZED DIGIT FIVE",
direction="on",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0035, 0x0029 },
+ specials={ "compat", 0x28, 0x35, 0x29 },
unicodeslot=0x2478,
},
[0x2479]={
@@ -66655,7 +68013,7 @@ characters.data={
description="PARENTHESIZED DIGIT SIX",
direction="on",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0036, 0x0029 },
+ specials={ "compat", 0x28, 0x36, 0x29 },
unicodeslot=0x2479,
},
[0x247A]={
@@ -66665,7 +68023,7 @@ characters.data={
description="PARENTHESIZED DIGIT SEVEN",
direction="on",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0037, 0x0029 },
+ specials={ "compat", 0x28, 0x37, 0x29 },
unicodeslot=0x247A,
},
[0x247B]={
@@ -66675,7 +68033,7 @@ characters.data={
description="PARENTHESIZED DIGIT EIGHT",
direction="on",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0038, 0x0029 },
+ specials={ "compat", 0x28, 0x38, 0x29 },
unicodeslot=0x247B,
},
[0x247C]={
@@ -66685,7 +68043,7 @@ characters.data={
description="PARENTHESIZED DIGIT NINE",
direction="on",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0039, 0x0029 },
+ specials={ "compat", 0x28, 0x39, 0x29 },
unicodeslot=0x247C,
},
[0x247D]={
@@ -66695,7 +68053,7 @@ characters.data={
description="PARENTHESIZED NUMBER TEN",
direction="on",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0031, 0x0030, 0x0029 },
+ specials={ "compat", 0x28, 0x31, 0x30, 0x29 },
unicodeslot=0x247D,
},
[0x247E]={
@@ -66705,7 +68063,7 @@ characters.data={
description="PARENTHESIZED NUMBER ELEVEN",
direction="on",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0031, 0x0031, 0x0029 },
+ specials={ "compat", 0x28, 0x31, 0x31, 0x29 },
unicodeslot=0x247E,
},
[0x247F]={
@@ -66715,7 +68073,7 @@ characters.data={
description="PARENTHESIZED NUMBER TWELVE",
direction="on",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0031, 0x0032, 0x0029 },
+ specials={ "compat", 0x28, 0x31, 0x32, 0x29 },
unicodeslot=0x247F,
},
[0x2480]={
@@ -66725,7 +68083,7 @@ characters.data={
description="PARENTHESIZED NUMBER THIRTEEN",
direction="on",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0031, 0x0033, 0x0029 },
+ specials={ "compat", 0x28, 0x31, 0x33, 0x29 },
unicodeslot=0x2480,
},
[0x2481]={
@@ -66735,7 +68093,7 @@ characters.data={
description="PARENTHESIZED NUMBER FOURTEEN",
direction="on",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0031, 0x0034, 0x0029 },
+ specials={ "compat", 0x28, 0x31, 0x34, 0x29 },
unicodeslot=0x2481,
},
[0x2482]={
@@ -66745,7 +68103,7 @@ characters.data={
description="PARENTHESIZED NUMBER FIFTEEN",
direction="on",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0031, 0x0035, 0x0029 },
+ specials={ "compat", 0x28, 0x31, 0x35, 0x29 },
unicodeslot=0x2482,
},
[0x2483]={
@@ -66755,7 +68113,7 @@ characters.data={
description="PARENTHESIZED NUMBER SIXTEEN",
direction="on",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0031, 0x0036, 0x0029 },
+ specials={ "compat", 0x28, 0x31, 0x36, 0x29 },
unicodeslot=0x2483,
},
[0x2484]={
@@ -66765,7 +68123,7 @@ characters.data={
description="PARENTHESIZED NUMBER SEVENTEEN",
direction="on",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0031, 0x0037, 0x0029 },
+ specials={ "compat", 0x28, 0x31, 0x37, 0x29 },
unicodeslot=0x2484,
},
[0x2485]={
@@ -66775,7 +68133,7 @@ characters.data={
description="PARENTHESIZED NUMBER EIGHTEEN",
direction="on",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0031, 0x0038, 0x0029 },
+ specials={ "compat", 0x28, 0x31, 0x38, 0x29 },
unicodeslot=0x2485,
},
[0x2486]={
@@ -66785,7 +68143,7 @@ characters.data={
description="PARENTHESIZED NUMBER NINETEEN",
direction="on",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0031, 0x0039, 0x0029 },
+ specials={ "compat", 0x28, 0x31, 0x39, 0x29 },
unicodeslot=0x2486,
},
[0x2487]={
@@ -66795,7 +68153,7 @@ characters.data={
description="PARENTHESIZED NUMBER TWENTY",
direction="on",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0032, 0x0030, 0x0029 },
+ specials={ "compat", 0x28, 0x32, 0x30, 0x29 },
unicodeslot=0x2487,
},
[0x2488]={
@@ -66805,7 +68163,7 @@ characters.data={
description="DIGIT ONE FULL STOP",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0031, 0x002E },
+ specials={ "compat", 0x31, 0x2E },
unicodeslot=0x2488,
},
[0x2489]={
@@ -66815,7 +68173,7 @@ characters.data={
description="DIGIT TWO FULL STOP",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0032, 0x002E },
+ specials={ "compat", 0x32, 0x2E },
unicodeslot=0x2489,
},
[0x248A]={
@@ -66825,7 +68183,7 @@ characters.data={
description="DIGIT THREE FULL STOP",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0033, 0x002E },
+ specials={ "compat", 0x33, 0x2E },
unicodeslot=0x248A,
},
[0x248B]={
@@ -66835,7 +68193,7 @@ characters.data={
description="DIGIT FOUR FULL STOP",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0034, 0x002E },
+ specials={ "compat", 0x34, 0x2E },
unicodeslot=0x248B,
},
[0x248C]={
@@ -66845,7 +68203,7 @@ characters.data={
description="DIGIT FIVE FULL STOP",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0035, 0x002E },
+ specials={ "compat", 0x35, 0x2E },
unicodeslot=0x248C,
},
[0x248D]={
@@ -66855,7 +68213,7 @@ characters.data={
description="DIGIT SIX FULL STOP",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0036, 0x002E },
+ specials={ "compat", 0x36, 0x2E },
unicodeslot=0x248D,
},
[0x248E]={
@@ -66865,7 +68223,7 @@ characters.data={
description="DIGIT SEVEN FULL STOP",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0037, 0x002E },
+ specials={ "compat", 0x37, 0x2E },
unicodeslot=0x248E,
},
[0x248F]={
@@ -66875,7 +68233,7 @@ characters.data={
description="DIGIT EIGHT FULL STOP",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0038, 0x002E },
+ specials={ "compat", 0x38, 0x2E },
unicodeslot=0x248F,
},
[0x2490]={
@@ -66885,7 +68243,7 @@ characters.data={
description="DIGIT NINE FULL STOP",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0039, 0x002E },
+ specials={ "compat", 0x39, 0x2E },
unicodeslot=0x2490,
},
[0x2491]={
@@ -66895,7 +68253,7 @@ characters.data={
description="NUMBER TEN FULL STOP",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0031, 0x0030, 0x002E },
+ specials={ "compat", 0x31, 0x30, 0x2E },
unicodeslot=0x2491,
},
[0x2492]={
@@ -66905,7 +68263,7 @@ characters.data={
description="NUMBER ELEVEN FULL STOP",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0031, 0x0031, 0x002E },
+ specials={ "compat", 0x31, 0x31, 0x2E },
unicodeslot=0x2492,
},
[0x2493]={
@@ -66915,7 +68273,7 @@ characters.data={
description="NUMBER TWELVE FULL STOP",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0031, 0x0032, 0x002E },
+ specials={ "compat", 0x31, 0x32, 0x2E },
unicodeslot=0x2493,
},
[0x2494]={
@@ -66925,7 +68283,7 @@ characters.data={
description="NUMBER THIRTEEN FULL STOP",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0031, 0x0033, 0x002E },
+ specials={ "compat", 0x31, 0x33, 0x2E },
unicodeslot=0x2494,
},
[0x2495]={
@@ -66935,7 +68293,7 @@ characters.data={
description="NUMBER FOURTEEN FULL STOP",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0031, 0x0034, 0x002E },
+ specials={ "compat", 0x31, 0x34, 0x2E },
unicodeslot=0x2495,
},
[0x2496]={
@@ -66945,7 +68303,7 @@ characters.data={
description="NUMBER FIFTEEN FULL STOP",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0031, 0x0035, 0x002E },
+ specials={ "compat", 0x31, 0x35, 0x2E },
unicodeslot=0x2496,
},
[0x2497]={
@@ -66955,7 +68313,7 @@ characters.data={
description="NUMBER SIXTEEN FULL STOP",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0031, 0x0036, 0x002E },
+ specials={ "compat", 0x31, 0x36, 0x2E },
unicodeslot=0x2497,
},
[0x2498]={
@@ -66965,7 +68323,7 @@ characters.data={
description="NUMBER SEVENTEEN FULL STOP",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0031, 0x0037, 0x002E },
+ specials={ "compat", 0x31, 0x37, 0x2E },
unicodeslot=0x2498,
},
[0x2499]={
@@ -66975,7 +68333,7 @@ characters.data={
description="NUMBER EIGHTEEN FULL STOP",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0031, 0x0038, 0x002E },
+ specials={ "compat", 0x31, 0x38, 0x2E },
unicodeslot=0x2499,
},
[0x249A]={
@@ -66985,7 +68343,7 @@ characters.data={
description="NUMBER NINETEEN FULL STOP",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0031, 0x0039, 0x002E },
+ specials={ "compat", 0x31, 0x39, 0x2E },
unicodeslot=0x249A,
},
[0x249B]={
@@ -66995,7 +68353,7 @@ characters.data={
description="NUMBER TWENTY FULL STOP",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0032, 0x0030, 0x002E },
+ specials={ "compat", 0x32, 0x30, 0x2E },
unicodeslot=0x249B,
},
[0x249C]={
@@ -67005,7 +68363,7 @@ characters.data={
description="PARENTHESIZED LATIN SMALL LETTER A",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0061, 0x0029 },
+ specials={ "compat", 0x28, 0x61, 0x29 },
unicodeslot=0x249C,
},
[0x249D]={
@@ -67015,7 +68373,7 @@ characters.data={
description="PARENTHESIZED LATIN SMALL LETTER B",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0062, 0x0029 },
+ specials={ "compat", 0x28, 0x62, 0x29 },
unicodeslot=0x249D,
},
[0x249E]={
@@ -67025,7 +68383,7 @@ characters.data={
description="PARENTHESIZED LATIN SMALL LETTER C",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0063, 0x0029 },
+ specials={ "compat", 0x28, 0x63, 0x29 },
unicodeslot=0x249E,
},
[0x249F]={
@@ -67035,7 +68393,7 @@ characters.data={
description="PARENTHESIZED LATIN SMALL LETTER D",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0064, 0x0029 },
+ specials={ "compat", 0x28, 0x64, 0x29 },
unicodeslot=0x249F,
},
[0x24A0]={
@@ -67045,7 +68403,7 @@ characters.data={
description="PARENTHESIZED LATIN SMALL LETTER E",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0065, 0x0029 },
+ specials={ "compat", 0x28, 0x65, 0x29 },
unicodeslot=0x24A0,
},
[0x24A1]={
@@ -67055,7 +68413,7 @@ characters.data={
description="PARENTHESIZED LATIN SMALL LETTER F",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0066, 0x0029 },
+ specials={ "compat", 0x28, 0x66, 0x29 },
unicodeslot=0x24A1,
},
[0x24A2]={
@@ -67065,7 +68423,7 @@ characters.data={
description="PARENTHESIZED LATIN SMALL LETTER G",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0067, 0x0029 },
+ specials={ "compat", 0x28, 0x67, 0x29 },
unicodeslot=0x24A2,
},
[0x24A3]={
@@ -67075,7 +68433,7 @@ characters.data={
description="PARENTHESIZED LATIN SMALL LETTER H",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0068, 0x0029 },
+ specials={ "compat", 0x28, 0x68, 0x29 },
unicodeslot=0x24A3,
},
[0x24A4]={
@@ -67085,7 +68443,7 @@ characters.data={
description="PARENTHESIZED LATIN SMALL LETTER I",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0069, 0x0029 },
+ specials={ "compat", 0x28, 0x69, 0x29 },
unicodeslot=0x24A4,
},
[0x24A5]={
@@ -67095,7 +68453,7 @@ characters.data={
description="PARENTHESIZED LATIN SMALL LETTER J",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x006A, 0x0029 },
+ specials={ "compat", 0x28, 0x6A, 0x29 },
unicodeslot=0x24A5,
},
[0x24A6]={
@@ -67105,7 +68463,7 @@ characters.data={
description="PARENTHESIZED LATIN SMALL LETTER K",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x006B, 0x0029 },
+ specials={ "compat", 0x28, 0x6B, 0x29 },
unicodeslot=0x24A6,
},
[0x24A7]={
@@ -67115,7 +68473,7 @@ characters.data={
description="PARENTHESIZED LATIN SMALL LETTER L",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x006C, 0x0029 },
+ specials={ "compat", 0x28, 0x6C, 0x29 },
unicodeslot=0x24A7,
},
[0x24A8]={
@@ -67125,7 +68483,7 @@ characters.data={
description="PARENTHESIZED LATIN SMALL LETTER M",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x006D, 0x0029 },
+ specials={ "compat", 0x28, 0x6D, 0x29 },
unicodeslot=0x24A8,
},
[0x24A9]={
@@ -67135,7 +68493,7 @@ characters.data={
description="PARENTHESIZED LATIN SMALL LETTER N",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x006E, 0x0029 },
+ specials={ "compat", 0x28, 0x6E, 0x29 },
unicodeslot=0x24A9,
},
[0x24AA]={
@@ -67145,7 +68503,7 @@ characters.data={
description="PARENTHESIZED LATIN SMALL LETTER O",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x006F, 0x0029 },
+ specials={ "compat", 0x28, 0x6F, 0x29 },
unicodeslot=0x24AA,
},
[0x24AB]={
@@ -67155,7 +68513,7 @@ characters.data={
description="PARENTHESIZED LATIN SMALL LETTER P",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0070, 0x0029 },
+ specials={ "compat", 0x28, 0x70, 0x29 },
unicodeslot=0x24AB,
},
[0x24AC]={
@@ -67165,7 +68523,7 @@ characters.data={
description="PARENTHESIZED LATIN SMALL LETTER Q",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0071, 0x0029 },
+ specials={ "compat", 0x28, 0x71, 0x29 },
unicodeslot=0x24AC,
},
[0x24AD]={
@@ -67175,7 +68533,7 @@ characters.data={
description="PARENTHESIZED LATIN SMALL LETTER R",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0072, 0x0029 },
+ specials={ "compat", 0x28, 0x72, 0x29 },
unicodeslot=0x24AD,
},
[0x24AE]={
@@ -67185,7 +68543,7 @@ characters.data={
description="PARENTHESIZED LATIN SMALL LETTER S",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0073, 0x0029 },
+ specials={ "compat", 0x28, 0x73, 0x29 },
unicodeslot=0x24AE,
},
[0x24AF]={
@@ -67195,7 +68553,7 @@ characters.data={
description="PARENTHESIZED LATIN SMALL LETTER T",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0074, 0x0029 },
+ specials={ "compat", 0x28, 0x74, 0x29 },
unicodeslot=0x24AF,
},
[0x24B0]={
@@ -67205,7 +68563,7 @@ characters.data={
description="PARENTHESIZED LATIN SMALL LETTER U",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0075, 0x0029 },
+ specials={ "compat", 0x28, 0x75, 0x29 },
unicodeslot=0x24B0,
},
[0x24B1]={
@@ -67215,7 +68573,7 @@ characters.data={
description="PARENTHESIZED LATIN SMALL LETTER V",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0076, 0x0029 },
+ specials={ "compat", 0x28, 0x76, 0x29 },
unicodeslot=0x24B1,
},
[0x24B2]={
@@ -67225,7 +68583,7 @@ characters.data={
description="PARENTHESIZED LATIN SMALL LETTER W",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0077, 0x0029 },
+ specials={ "compat", 0x28, 0x77, 0x29 },
unicodeslot=0x24B2,
},
[0x24B3]={
@@ -67235,7 +68593,7 @@ characters.data={
description="PARENTHESIZED LATIN SMALL LETTER X",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0078, 0x0029 },
+ specials={ "compat", 0x28, 0x78, 0x29 },
unicodeslot=0x24B3,
},
[0x24B4]={
@@ -67245,7 +68603,7 @@ characters.data={
description="PARENTHESIZED LATIN SMALL LETTER Y",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0079, 0x0029 },
+ specials={ "compat", 0x28, 0x79, 0x29 },
unicodeslot=0x24B4,
},
[0x24B5]={
@@ -67255,7 +68613,7 @@ characters.data={
description="PARENTHESIZED LATIN SMALL LETTER Z",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x007A, 0x0029 },
+ specials={ "compat", 0x28, 0x7A, 0x29 },
unicodeslot=0x24B5,
},
[0x24B6]={
@@ -67266,7 +68624,7 @@ characters.data={
direction="l",
lccode=0x24D0,
linebreak="ai",
- specials={ "circle", 0x0041 },
+ specials={ "circle", 0x41 },
unicodeslot=0x24B6,
},
[0x24B7]={
@@ -67277,7 +68635,7 @@ characters.data={
direction="l",
lccode=0x24D1,
linebreak="ai",
- specials={ "circle", 0x0042 },
+ specials={ "circle", 0x42 },
unicodeslot=0x24B7,
},
[0x24B8]={
@@ -67288,7 +68646,7 @@ characters.data={
direction="l",
lccode=0x24D2,
linebreak="ai",
- specials={ "circle", 0x0043 },
+ specials={ "circle", 0x43 },
unicodeslot=0x24B8,
},
[0x24B9]={
@@ -67299,7 +68657,7 @@ characters.data={
direction="l",
lccode=0x24D3,
linebreak="ai",
- specials={ "circle", 0x0044 },
+ specials={ "circle", 0x44 },
unicodeslot=0x24B9,
},
[0x24BA]={
@@ -67310,7 +68668,7 @@ characters.data={
direction="l",
lccode=0x24D4,
linebreak="ai",
- specials={ "circle", 0x0045 },
+ specials={ "circle", 0x45 },
unicodeslot=0x24BA,
},
[0x24BB]={
@@ -67321,7 +68679,7 @@ characters.data={
direction="l",
lccode=0x24D5,
linebreak="ai",
- specials={ "circle", 0x0046 },
+ specials={ "circle", 0x46 },
unicodeslot=0x24BB,
},
[0x24BC]={
@@ -67332,7 +68690,7 @@ characters.data={
direction="l",
lccode=0x24D6,
linebreak="ai",
- specials={ "circle", 0x0047 },
+ specials={ "circle", 0x47 },
unicodeslot=0x24BC,
},
[0x24BD]={
@@ -67343,7 +68701,7 @@ characters.data={
direction="l",
lccode=0x24D7,
linebreak="ai",
- specials={ "circle", 0x0048 },
+ specials={ "circle", 0x48 },
unicodeslot=0x24BD,
},
[0x24BE]={
@@ -67354,7 +68712,7 @@ characters.data={
direction="l",
lccode=0x24D8,
linebreak="ai",
- specials={ "circle", 0x0049 },
+ specials={ "circle", 0x49 },
unicodeslot=0x24BE,
},
[0x24BF]={
@@ -67365,7 +68723,7 @@ characters.data={
direction="l",
lccode=0x24D9,
linebreak="ai",
- specials={ "circle", 0x004A },
+ specials={ "circle", 0x4A },
unicodeslot=0x24BF,
},
[0x24C0]={
@@ -67376,7 +68734,7 @@ characters.data={
direction="l",
lccode=0x24DA,
linebreak="ai",
- specials={ "circle", 0x004B },
+ specials={ "circle", 0x4B },
unicodeslot=0x24C0,
},
[0x24C1]={
@@ -67387,7 +68745,7 @@ characters.data={
direction="l",
lccode=0x24DB,
linebreak="ai",
- specials={ "circle", 0x004C },
+ specials={ "circle", 0x4C },
unicodeslot=0x24C1,
},
[0x24C2]={
@@ -67398,7 +68756,7 @@ characters.data={
direction="l",
lccode=0x24DC,
linebreak="ai",
- specials={ "circle", 0x004D },
+ specials={ "circle", 0x4D },
unicodeslot=0x24C2,
variants={
[0xFE0E]="text style",
@@ -67413,7 +68771,7 @@ characters.data={
direction="l",
lccode=0x24DD,
linebreak="ai",
- specials={ "circle", 0x004E },
+ specials={ "circle", 0x4E },
unicodeslot=0x24C3,
},
[0x24C4]={
@@ -67424,7 +68782,7 @@ characters.data={
direction="l",
lccode=0x24DE,
linebreak="ai",
- specials={ "circle", 0x004F },
+ specials={ "circle", 0x4F },
unicodeslot=0x24C4,
},
[0x24C5]={
@@ -67435,7 +68793,7 @@ characters.data={
direction="l",
lccode=0x24DF,
linebreak="ai",
- specials={ "circle", 0x0050 },
+ specials={ "circle", 0x50 },
unicodeslot=0x24C5,
},
[0x24C6]={
@@ -67446,7 +68804,7 @@ characters.data={
direction="l",
lccode=0x24E0,
linebreak="ai",
- specials={ "circle", 0x0051 },
+ specials={ "circle", 0x51 },
unicodeslot=0x24C6,
},
[0x24C7]={
@@ -67459,7 +68817,7 @@ characters.data={
linebreak="ai",
mathclass="ordinary",
mathname="circledR",
- specials={ "circle", 0x0052 },
+ specials={ "circle", 0x52 },
unicodeslot=0x24C7,
},
[0x24C8]={
@@ -67472,7 +68830,7 @@ characters.data={
linebreak="ai",
mathclass="ordinary",
mathname="circledS",
- specials={ "circle", 0x0053 },
+ specials={ "circle", 0x53 },
unicodeslot=0x24C8,
},
[0x24C9]={
@@ -67483,7 +68841,7 @@ characters.data={
direction="l",
lccode=0x24E3,
linebreak="ai",
- specials={ "circle", 0x0054 },
+ specials={ "circle", 0x54 },
unicodeslot=0x24C9,
},
[0x24CA]={
@@ -67494,7 +68852,7 @@ characters.data={
direction="l",
lccode=0x24E4,
linebreak="ai",
- specials={ "circle", 0x0055 },
+ specials={ "circle", 0x55 },
unicodeslot=0x24CA,
},
[0x24CB]={
@@ -67505,7 +68863,7 @@ characters.data={
direction="l",
lccode=0x24E5,
linebreak="ai",
- specials={ "circle", 0x0056 },
+ specials={ "circle", 0x56 },
unicodeslot=0x24CB,
},
[0x24CC]={
@@ -67516,7 +68874,7 @@ characters.data={
direction="l",
lccode=0x24E6,
linebreak="ai",
- specials={ "circle", 0x0057 },
+ specials={ "circle", 0x57 },
unicodeslot=0x24CC,
},
[0x24CD]={
@@ -67527,7 +68885,7 @@ characters.data={
direction="l",
lccode=0x24E7,
linebreak="ai",
- specials={ "circle", 0x0058 },
+ specials={ "circle", 0x58 },
unicodeslot=0x24CD,
},
[0x24CE]={
@@ -67538,7 +68896,7 @@ characters.data={
direction="l",
lccode=0x24E8,
linebreak="ai",
- specials={ "circle", 0x0059 },
+ specials={ "circle", 0x59 },
unicodeslot=0x24CE,
},
[0x24CF]={
@@ -67549,7 +68907,7 @@ characters.data={
direction="l",
lccode=0x24E9,
linebreak="ai",
- specials={ "circle", 0x005A },
+ specials={ "circle", 0x5A },
unicodeslot=0x24CF,
},
[0x24D0]={
@@ -67559,7 +68917,7 @@ characters.data={
description="CIRCLED LATIN SMALL LETTER A",
direction="l",
linebreak="ai",
- specials={ "circle", 0x0061 },
+ specials={ "circle", 0x61 },
uccode=0x24B6,
unicodeslot=0x24D0,
},
@@ -67570,7 +68928,7 @@ characters.data={
description="CIRCLED LATIN SMALL LETTER B",
direction="l",
linebreak="ai",
- specials={ "circle", 0x0062 },
+ specials={ "circle", 0x62 },
uccode=0x24B7,
unicodeslot=0x24D1,
},
@@ -67581,7 +68939,7 @@ characters.data={
description="CIRCLED LATIN SMALL LETTER C",
direction="l",
linebreak="ai",
- specials={ "circle", 0x0063 },
+ specials={ "circle", 0x63 },
uccode=0x24B8,
unicodeslot=0x24D2,
},
@@ -67592,7 +68950,7 @@ characters.data={
description="CIRCLED LATIN SMALL LETTER D",
direction="l",
linebreak="ai",
- specials={ "circle", 0x0064 },
+ specials={ "circle", 0x64 },
uccode=0x24B9,
unicodeslot=0x24D3,
},
@@ -67603,7 +68961,7 @@ characters.data={
description="CIRCLED LATIN SMALL LETTER E",
direction="l",
linebreak="ai",
- specials={ "circle", 0x0065 },
+ specials={ "circle", 0x65 },
uccode=0x24BA,
unicodeslot=0x24D4,
},
@@ -67614,7 +68972,7 @@ characters.data={
description="CIRCLED LATIN SMALL LETTER F",
direction="l",
linebreak="ai",
- specials={ "circle", 0x0066 },
+ specials={ "circle", 0x66 },
uccode=0x24BB,
unicodeslot=0x24D5,
},
@@ -67625,7 +68983,7 @@ characters.data={
description="CIRCLED LATIN SMALL LETTER G",
direction="l",
linebreak="ai",
- specials={ "circle", 0x0067 },
+ specials={ "circle", 0x67 },
uccode=0x24BC,
unicodeslot=0x24D6,
},
@@ -67636,7 +68994,7 @@ characters.data={
description="CIRCLED LATIN SMALL LETTER H",
direction="l",
linebreak="ai",
- specials={ "circle", 0x0068 },
+ specials={ "circle", 0x68 },
uccode=0x24BD,
unicodeslot=0x24D7,
},
@@ -67647,7 +69005,7 @@ characters.data={
description="CIRCLED LATIN SMALL LETTER I",
direction="l",
linebreak="ai",
- specials={ "circle", 0x0069 },
+ specials={ "circle", 0x69 },
uccode=0x24BE,
unicodeslot=0x24D8,
},
@@ -67658,7 +69016,7 @@ characters.data={
description="CIRCLED LATIN SMALL LETTER J",
direction="l",
linebreak="ai",
- specials={ "circle", 0x006A },
+ specials={ "circle", 0x6A },
uccode=0x24BF,
unicodeslot=0x24D9,
},
@@ -67669,7 +69027,7 @@ characters.data={
description="CIRCLED LATIN SMALL LETTER K",
direction="l",
linebreak="ai",
- specials={ "circle", 0x006B },
+ specials={ "circle", 0x6B },
uccode=0x24C0,
unicodeslot=0x24DA,
},
@@ -67680,7 +69038,7 @@ characters.data={
description="CIRCLED LATIN SMALL LETTER L",
direction="l",
linebreak="ai",
- specials={ "circle", 0x006C },
+ specials={ "circle", 0x6C },
uccode=0x24C1,
unicodeslot=0x24DB,
},
@@ -67691,7 +69049,7 @@ characters.data={
description="CIRCLED LATIN SMALL LETTER M",
direction="l",
linebreak="ai",
- specials={ "circle", 0x006D },
+ specials={ "circle", 0x6D },
uccode=0x24C2,
unicodeslot=0x24DC,
},
@@ -67702,7 +69060,7 @@ characters.data={
description="CIRCLED LATIN SMALL LETTER N",
direction="l",
linebreak="ai",
- specials={ "circle", 0x006E },
+ specials={ "circle", 0x6E },
uccode=0x24C3,
unicodeslot=0x24DD,
},
@@ -67713,7 +69071,7 @@ characters.data={
description="CIRCLED LATIN SMALL LETTER O",
direction="l",
linebreak="ai",
- specials={ "circle", 0x006F },
+ specials={ "circle", 0x6F },
uccode=0x24C4,
unicodeslot=0x24DE,
},
@@ -67724,7 +69082,7 @@ characters.data={
description="CIRCLED LATIN SMALL LETTER P",
direction="l",
linebreak="ai",
- specials={ "circle", 0x0070 },
+ specials={ "circle", 0x70 },
uccode=0x24C5,
unicodeslot=0x24DF,
},
@@ -67735,7 +69093,7 @@ characters.data={
description="CIRCLED LATIN SMALL LETTER Q",
direction="l",
linebreak="ai",
- specials={ "circle", 0x0071 },
+ specials={ "circle", 0x71 },
uccode=0x24C6,
unicodeslot=0x24E0,
},
@@ -67746,7 +69104,7 @@ characters.data={
description="CIRCLED LATIN SMALL LETTER R",
direction="l",
linebreak="ai",
- specials={ "circle", 0x0072 },
+ specials={ "circle", 0x72 },
uccode=0x24C7,
unicodeslot=0x24E1,
},
@@ -67757,7 +69115,7 @@ characters.data={
description="CIRCLED LATIN SMALL LETTER S",
direction="l",
linebreak="ai",
- specials={ "circle", 0x0073 },
+ specials={ "circle", 0x73 },
uccode=0x24C8,
unicodeslot=0x24E2,
},
@@ -67768,7 +69126,7 @@ characters.data={
description="CIRCLED LATIN SMALL LETTER T",
direction="l",
linebreak="ai",
- specials={ "circle", 0x0074 },
+ specials={ "circle", 0x74 },
uccode=0x24C9,
unicodeslot=0x24E3,
},
@@ -67779,7 +69137,7 @@ characters.data={
description="CIRCLED LATIN SMALL LETTER U",
direction="l",
linebreak="ai",
- specials={ "circle", 0x0075 },
+ specials={ "circle", 0x75 },
uccode=0x24CA,
unicodeslot=0x24E4,
},
@@ -67790,7 +69148,7 @@ characters.data={
description="CIRCLED LATIN SMALL LETTER V",
direction="l",
linebreak="ai",
- specials={ "circle", 0x0076 },
+ specials={ "circle", 0x76 },
uccode=0x24CB,
unicodeslot=0x24E5,
},
@@ -67801,7 +69159,7 @@ characters.data={
description="CIRCLED LATIN SMALL LETTER W",
direction="l",
linebreak="ai",
- specials={ "circle", 0x0077 },
+ specials={ "circle", 0x77 },
uccode=0x24CC,
unicodeslot=0x24E6,
},
@@ -67812,7 +69170,7 @@ characters.data={
description="CIRCLED LATIN SMALL LETTER X",
direction="l",
linebreak="ai",
- specials={ "circle", 0x0078 },
+ specials={ "circle", 0x78 },
uccode=0x24CD,
unicodeslot=0x24E7,
},
@@ -67823,7 +69181,7 @@ characters.data={
description="CIRCLED LATIN SMALL LETTER Y",
direction="l",
linebreak="ai",
- specials={ "circle", 0x0079 },
+ specials={ "circle", 0x79 },
uccode=0x24CE,
unicodeslot=0x24E8,
},
@@ -67834,7 +69192,7 @@ characters.data={
description="CIRCLED LATIN SMALL LETTER Z",
direction="l",
linebreak="ai",
- specials={ "circle", 0x007A },
+ specials={ "circle", 0x7A },
uccode=0x24CF,
unicodeslot=0x24E9,
},
@@ -67843,7 +69201,7 @@ characters.data={
description="CIRCLED DIGIT ZERO",
direction="on",
linebreak="ai",
- specials={ "circle", 0x0030 },
+ specials={ "circle", 0x30 },
unicodeslot=0x24EA,
},
[0x24EB]={
@@ -72236,6 +73594,13 @@ characters.data={
linebreak="id",
unicodeslot=0x26FF,
},
+ [0x2700]={
+ category="so",
+ description="BLACK SAFETY SCISSORS",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x2700,
+ },
[0x2701]={
category="so",
description="UPPER BLADE SCISSORS",
@@ -72957,14 +74322,14 @@ characters.data={
category="so",
description="HEAVY LOW SINGLE COMMA QUOTATION MARK ORNAMENT",
direction="on",
- linebreak="al",
+ linebreak="qu",
unicodeslot=0x275F,
},
[0x2760]={
category="so",
description="HEAVY LOW DOUBLE COMMA QUOTATION MARK ORNAMENT",
direction="on",
- linebreak="al",
+ linebreak="qu",
unicodeslot=0x2760,
},
[0x2761]={
@@ -78794,7 +80159,7 @@ characters.data={
linebreak="al",
mathclass="relation",
mathname="coloncolonequals",
- specials={ "compat", 0x003A, 0x003A, 0x003D },
+ specials={ "compat", 0x3A, 0x3A, 0x3D },
unicodeslot=0x2A74,
},
[0x2A75]={
@@ -78802,7 +80167,7 @@ characters.data={
description="TWO CONSECUTIVE EQUALS SIGNS",
direction="on",
linebreak="al",
- specials={ "compat", 0x003D, 0x003D },
+ specials={ "compat", 0x3D, 0x3D },
unicodeslot=0x2A75,
},
[0x2A76]={
@@ -78810,7 +80175,7 @@ characters.data={
description="THREE CONSECUTIVE EQUALS SIGNS",
direction="on",
linebreak="al",
- specials={ "compat", 0x003D, 0x003D, 0x003D },
+ specials={ "compat", 0x3D, 0x3D, 0x3D },
unicodeslot=0x2A76,
},
[0x2A77]={
@@ -79659,7 +81024,7 @@ characters.data={
description="FORKING",
direction="on",
linebreak="al",
- specials={ "char", 0x2ADD, 0x0338 },
+ specials={ "char", 0x2ADD, 0x338 },
unicodeslot=0x2ADC,
},
[0x2ADD]={
@@ -80491,6 +81856,27 @@ characters.data={
linebreak="al",
unicodeslot=0x2B4C,
},
+ [0x2B4D]={
+ category="so",
+ description="DOWNWARDS TRIANGLE-HEADED ZIGZAG ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B4D,
+ },
+ [0x2B4E]={
+ category="so",
+ description="SHORT SLANTED NORTH ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B4E,
+ },
+ [0x2B4F]={
+ category="so",
+ description="SHORT BACKSLANTED SOUTH ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B4F,
+ },
[0x2B50]={
category="so",
description="WHITE MEDIUM STAR",
@@ -80574,6 +81960,790 @@ characters.data={
linebreak="ai",
unicodeslot=0x2B59,
},
+ [0x2B5A]={
+ category="so",
+ description="SLANTED NORTH ARROW WITH HOOKED HEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B5A,
+ },
+ [0x2B5B]={
+ category="so",
+ description="BACKSLANTED SOUTH ARROW WITH HOOKED TAIL",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B5B,
+ },
+ [0x2B5C]={
+ category="so",
+ description="SLANTED NORTH ARROW WITH HORIZONTAL TAIL",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B5C,
+ },
+ [0x2B5D]={
+ category="so",
+ description="BACKSLANTED SOUTH ARROW WITH HORIZONTAL TAIL",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B5D,
+ },
+ [0x2B5E]={
+ category="so",
+ description="BENT ARROW POINTING DOWNWARDS THEN NORTH EAST",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B5E,
+ },
+ [0x2B5F]={
+ category="so",
+ description="SHORT BENT ARROW POINTING DOWNWARDS THEN NORTH EAST",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B5F,
+ },
+ [0x2B60]={
+ category="so",
+ description="LEFTWARDS TRIANGLE-HEADED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B60,
+ },
+ [0x2B61]={
+ category="so",
+ description="UPWARDS TRIANGLE-HEADED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B61,
+ },
+ [0x2B62]={
+ category="so",
+ description="RIGHTWARDS TRIANGLE-HEADED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B62,
+ },
+ [0x2B63]={
+ category="so",
+ description="DOWNWARDS TRIANGLE-HEADED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B63,
+ },
+ [0x2B64]={
+ category="so",
+ description="LEFT RIGHT TRIANGLE-HEADED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B64,
+ },
+ [0x2B65]={
+ category="so",
+ description="UP DOWN TRIANGLE-HEADED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B65,
+ },
+ [0x2B66]={
+ category="so",
+ description="NORTH WEST TRIANGLE-HEADED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B66,
+ },
+ [0x2B67]={
+ category="so",
+ description="NORTH EAST TRIANGLE-HEADED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B67,
+ },
+ [0x2B68]={
+ category="so",
+ description="SOUTH EAST TRIANGLE-HEADED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B68,
+ },
+ [0x2B69]={
+ category="so",
+ description="SOUTH WEST TRIANGLE-HEADED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B69,
+ },
+ [0x2B6A]={
+ category="so",
+ description="LEFTWARDS TRIANGLE-HEADED DASHED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B6A,
+ },
+ [0x2B6B]={
+ category="so",
+ description="UPWARDS TRIANGLE-HEADED DASHED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B6B,
+ },
+ [0x2B6C]={
+ category="so",
+ description="RIGHTWARDS TRIANGLE-HEADED DASHED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B6C,
+ },
+ [0x2B6D]={
+ category="so",
+ description="DOWNWARDS TRIANGLE-HEADED DASHED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B6D,
+ },
+ [0x2B6E]={
+ category="so",
+ description="CLOCKWISE TRIANGLE-HEADED OPEN CIRCLE ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B6E,
+ },
+ [0x2B6F]={
+ category="so",
+ description="ANTICLOCKWISE TRIANGLE-HEADED OPEN CIRCLE ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B6F,
+ },
+ [0x2B70]={
+ category="so",
+ description="LEFTWARDS TRIANGLE-HEADED ARROW TO BAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B70,
+ },
+ [0x2B71]={
+ category="so",
+ description="UPWARDS TRIANGLE-HEADED ARROW TO BAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B71,
+ },
+ [0x2B72]={
+ category="so",
+ description="RIGHTWARDS TRIANGLE-HEADED ARROW TO BAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B72,
+ },
+ [0x2B73]={
+ category="so",
+ description="DOWNWARDS TRIANGLE-HEADED ARROW TO BAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B73,
+ },
+ [0x2B76]={
+ category="so",
+ description="NORTH WEST TRIANGLE-HEADED ARROW TO BAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B76,
+ },
+ [0x2B77]={
+ category="so",
+ description="NORTH EAST TRIANGLE-HEADED ARROW TO BAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B77,
+ },
+ [0x2B78]={
+ category="so",
+ description="SOUTH EAST TRIANGLE-HEADED ARROW TO BAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B78,
+ },
+ [0x2B79]={
+ category="so",
+ description="SOUTH WEST TRIANGLE-HEADED ARROW TO BAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B79,
+ },
+ [0x2B7A]={
+ category="so",
+ description="LEFTWARDS TRIANGLE-HEADED ARROW WITH DOUBLE HORIZONTAL STROKE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B7A,
+ },
+ [0x2B7B]={
+ category="so",
+ description="UPWARDS TRIANGLE-HEADED ARROW WITH DOUBLE HORIZONTAL STROKE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B7B,
+ },
+ [0x2B7C]={
+ category="so",
+ description="RIGHTWARDS TRIANGLE-HEADED ARROW WITH DOUBLE HORIZONTAL STROKE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B7C,
+ },
+ [0x2B7D]={
+ category="so",
+ description="DOWNWARDS TRIANGLE-HEADED ARROW WITH DOUBLE HORIZONTAL STROKE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B7D,
+ },
+ [0x2B7E]={
+ category="so",
+ description="HORIZONTAL TAB KEY",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B7E,
+ },
+ [0x2B7F]={
+ category="so",
+ description="VERTICAL TAB KEY",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B7F,
+ },
+ [0x2B80]={
+ category="so",
+ description="LEFTWARDS TRIANGLE-HEADED ARROW OVER RIGHTWARDS TRIANGLE-HEADED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B80,
+ },
+ [0x2B81]={
+ category="so",
+ description="UPWARDS TRIANGLE-HEADED ARROW LEFTWARDS OF DOWNWARDS TRIANGLE-HEADED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B81,
+ },
+ [0x2B82]={
+ category="so",
+ description="RIGHTWARDS TRIANGLE-HEADED ARROW OVER LEFTWARDS TRIANGLE-HEADED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B82,
+ },
+ [0x2B83]={
+ category="so",
+ description="DOWNWARDS TRIANGLE-HEADED ARROW LEFTWARDS OF UPWARDS TRIANGLE-HEADED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B83,
+ },
+ [0x2B84]={
+ category="so",
+ description="LEFTWARDS TRIANGLE-HEADED PAIRED ARROWS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B84,
+ },
+ [0x2B85]={
+ category="so",
+ description="UPWARDS TRIANGLE-HEADED PAIRED ARROWS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B85,
+ },
+ [0x2B86]={
+ category="so",
+ description="RIGHTWARDS TRIANGLE-HEADED PAIRED ARROWS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B86,
+ },
+ [0x2B87]={
+ category="so",
+ description="DOWNWARDS TRIANGLE-HEADED PAIRED ARROWS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B87,
+ },
+ [0x2B88]={
+ category="so",
+ description="LEFTWARDS BLACK CIRCLED WHITE ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B88,
+ },
+ [0x2B89]={
+ category="so",
+ description="UPWARDS BLACK CIRCLED WHITE ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B89,
+ },
+ [0x2B8A]={
+ category="so",
+ description="RIGHTWARDS BLACK CIRCLED WHITE ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B8A,
+ },
+ [0x2B8B]={
+ category="so",
+ description="DOWNWARDS BLACK CIRCLED WHITE ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B8B,
+ },
+ [0x2B8C]={
+ category="so",
+ description="ANTICLOCKWISE TRIANGLE-HEADED RIGHT U-SHAPED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B8C,
+ },
+ [0x2B8D]={
+ category="so",
+ description="ANTICLOCKWISE TRIANGLE-HEADED BOTTOM U-SHAPED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B8D,
+ },
+ [0x2B8E]={
+ category="so",
+ description="ANTICLOCKWISE TRIANGLE-HEADED LEFT U-SHAPED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B8E,
+ },
+ [0x2B8F]={
+ category="so",
+ description="ANTICLOCKWISE TRIANGLE-HEADED TOP U-SHAPED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B8F,
+ },
+ [0x2B90]={
+ category="so",
+ description="RETURN LEFT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B90,
+ },
+ [0x2B91]={
+ category="so",
+ description="RETURN RIGHT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B91,
+ },
+ [0x2B92]={
+ category="so",
+ description="NEWLINE LEFT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B92,
+ },
+ [0x2B93]={
+ category="so",
+ description="NEWLINE RIGHT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B93,
+ },
+ [0x2B94]={
+ category="so",
+ description="FOUR CORNER ARROWS CIRCLING ANTICLOCKWISE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B94,
+ },
+ [0x2B95]={
+ category="so",
+ description="RIGHTWARDS BLACK ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B95,
+ },
+ [0x2B98]={
+ category="so",
+ description="THREE-D TOP-LIGHTED LEFTWARDS EQUILATERAL ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B98,
+ },
+ [0x2B99]={
+ category="so",
+ description="THREE-D RIGHT-LIGHTED UPWARDS EQUILATERAL ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B99,
+ },
+ [0x2B9A]={
+ category="so",
+ description="THREE-D TOP-LIGHTED RIGHTWARDS EQUILATERAL ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B9A,
+ },
+ [0x2B9B]={
+ category="so",
+ description="THREE-D LEFT-LIGHTED DOWNWARDS EQUILATERAL ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B9B,
+ },
+ [0x2B9C]={
+ category="so",
+ description="BLACK LEFTWARDS EQUILATERAL ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B9C,
+ },
+ [0x2B9D]={
+ category="so",
+ description="BLACK UPWARDS EQUILATERAL ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B9D,
+ },
+ [0x2B9E]={
+ category="so",
+ description="BLACK RIGHTWARDS EQUILATERAL ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B9E,
+ },
+ [0x2B9F]={
+ category="so",
+ description="BLACK DOWNWARDS EQUILATERAL ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2B9F,
+ },
+ [0x2BA0]={
+ category="so",
+ description="DOWNWARDS TRIANGLE-HEADED ARROW WITH LONG TIP LEFTWARDS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BA0,
+ },
+ [0x2BA1]={
+ category="so",
+ description="DOWNWARDS TRIANGLE-HEADED ARROW WITH LONG TIP RIGHTWARDS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BA1,
+ },
+ [0x2BA2]={
+ category="so",
+ description="UPWARDS TRIANGLE-HEADED ARROW WITH LONG TIP LEFTWARDS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BA2,
+ },
+ [0x2BA3]={
+ category="so",
+ description="UPWARDS TRIANGLE-HEADED ARROW WITH LONG TIP RIGHTWARDS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BA3,
+ },
+ [0x2BA4]={
+ category="so",
+ description="LEFTWARDS TRIANGLE-HEADED ARROW WITH LONG TIP UPWARDS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BA4,
+ },
+ [0x2BA5]={
+ category="so",
+ description="RIGHTWARDS TRIANGLE-HEADED ARROW WITH LONG TIP UPWARDS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BA5,
+ },
+ [0x2BA6]={
+ category="so",
+ description="LEFTWARDS TRIANGLE-HEADED ARROW WITH LONG TIP DOWNWARDS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BA6,
+ },
+ [0x2BA7]={
+ category="so",
+ description="RIGHTWARDS TRIANGLE-HEADED ARROW WITH LONG TIP DOWNWARDS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BA7,
+ },
+ [0x2BA8]={
+ category="so",
+ description="BLACK CURVED DOWNWARDS AND LEFTWARDS ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BA8,
+ },
+ [0x2BA9]={
+ category="so",
+ description="BLACK CURVED DOWNWARDS AND RIGHTWARDS ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BA9,
+ },
+ [0x2BAA]={
+ category="so",
+ description="BLACK CURVED UPWARDS AND LEFTWARDS ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BAA,
+ },
+ [0x2BAB]={
+ category="so",
+ description="BLACK CURVED UPWARDS AND RIGHTWARDS ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BAB,
+ },
+ [0x2BAC]={
+ category="so",
+ description="BLACK CURVED LEFTWARDS AND UPWARDS ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BAC,
+ },
+ [0x2BAD]={
+ category="so",
+ description="BLACK CURVED RIGHTWARDS AND UPWARDS ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BAD,
+ },
+ [0x2BAE]={
+ category="so",
+ description="BLACK CURVED LEFTWARDS AND DOWNWARDS ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BAE,
+ },
+ [0x2BAF]={
+ category="so",
+ description="BLACK CURVED RIGHTWARDS AND DOWNWARDS ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BAF,
+ },
+ [0x2BB0]={
+ category="so",
+ description="RIBBON ARROW DOWN LEFT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BB0,
+ },
+ [0x2BB1]={
+ category="so",
+ description="RIBBON ARROW DOWN RIGHT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BB1,
+ },
+ [0x2BB2]={
+ category="so",
+ description="RIBBON ARROW UP LEFT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BB2,
+ },
+ [0x2BB3]={
+ category="so",
+ description="RIBBON ARROW UP RIGHT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BB3,
+ },
+ [0x2BB4]={
+ category="so",
+ description="RIBBON ARROW LEFT UP",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BB4,
+ },
+ [0x2BB5]={
+ category="so",
+ description="RIBBON ARROW RIGHT UP",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BB5,
+ },
+ [0x2BB6]={
+ category="so",
+ description="RIBBON ARROW LEFT DOWN",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BB6,
+ },
+ [0x2BB7]={
+ category="so",
+ description="RIBBON ARROW RIGHT DOWN",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BB7,
+ },
+ [0x2BB8]={
+ category="so",
+ description="UPWARDS WHITE ARROW FROM BAR WITH HORIZONTAL BAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BB8,
+ },
+ [0x2BB9]={
+ category="so",
+ description="UP ARROWHEAD IN A RECTANGLE BOX",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BB9,
+ },
+ [0x2BBD]={
+ category="so",
+ description="BALLOT BOX WITH LIGHT X",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BBD,
+ },
+ [0x2BBE]={
+ category="so",
+ description="CIRCLED X",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BBE,
+ },
+ [0x2BBF]={
+ category="so",
+ description="CIRCLED BOLD X",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BBF,
+ },
+ [0x2BC0]={
+ category="so",
+ description="BLACK SQUARE CENTRED",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BC0,
+ },
+ [0x2BC1]={
+ category="so",
+ description="BLACK DIAMOND CENTRED",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BC1,
+ },
+ [0x2BC2]={
+ category="so",
+ description="TURNED BLACK PENTAGON",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BC2,
+ },
+ [0x2BC3]={
+ category="so",
+ description="HORIZONTAL BLACK OCTAGON",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BC3,
+ },
+ [0x2BC4]={
+ category="so",
+ description="BLACK OCTAGON",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BC4,
+ },
+ [0x2BC5]={
+ category="so",
+ description="BLACK MEDIUM UP-POINTING TRIANGLE CENTRED",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BC5,
+ },
+ [0x2BC6]={
+ category="so",
+ description="BLACK MEDIUM DOWN-POINTING TRIANGLE CENTRED",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BC6,
+ },
+ [0x2BC7]={
+ category="so",
+ description="BLACK MEDIUM LEFT-POINTING TRIANGLE CENTRED",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BC7,
+ },
+ [0x2BC8]={
+ category="so",
+ description="BLACK MEDIUM RIGHT-POINTING TRIANGLE CENTRED",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BC8,
+ },
+ [0x2BCA]={
+ category="so",
+ description="TOP HALF BLACK CIRCLE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BCA,
+ },
+ [0x2BCB]={
+ category="so",
+ description="BOTTOM HALF BLACK CIRCLE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BCB,
+ },
+ [0x2BCC]={
+ category="so",
+ description="LIGHT FOUR POINTED BLACK CUSP",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BCC,
+ },
+ [0x2BCD]={
+ category="so",
+ description="ROTATED LIGHT FOUR POINTED BLACK CUSP",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BCD,
+ },
+ [0x2BCE]={
+ category="so",
+ description="WHITE FOUR POINTED CUSP",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BCE,
+ },
+ [0x2BCF]={
+ category="so",
+ description="ROTATED WHITE FOUR POINTED CUSP",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BCF,
+ },
+ [0x2BD0]={
+ category="so",
+ description="SQUARE POSITION INDICATOR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BD0,
+ },
+ [0x2BD1]={
+ category="so",
+ description="UNCERTAINTY SIGN",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2BD1,
+ },
[0x2C00]={
category="lu",
description="GLAGOLITIC CAPITAL LETTER AZU",
@@ -81332,7 +83502,7 @@ characters.data={
direction="l",
lccode=0x2C61,
linebreak="al",
- shcode=0x004C,
+ shcode=0x4C,
unicodeslot=0x2C60,
},
[0x2C61]={
@@ -81340,7 +83510,7 @@ characters.data={
description="LATIN SMALL LETTER L WITH DOUBLE BAR",
direction="l",
linebreak="al",
- shcode=0x006C,
+ shcode=0x6C,
uccode=0x2C60,
unicodeslot=0x2C61,
},
@@ -81348,9 +83518,9 @@ characters.data={
category="lu",
description="LATIN CAPITAL LETTER L WITH MIDDLE TILDE",
direction="l",
- lccode=0x026B,
+ lccode=0x26B,
linebreak="al",
- shcode=0x004C,
+ shcode=0x4C,
unicodeslot=0x2C62,
},
[0x2C63]={
@@ -81359,16 +83529,16 @@ characters.data={
direction="l",
lccode=0x1D7D,
linebreak="al",
- shcode=0x0050,
+ shcode=0x50,
unicodeslot=0x2C63,
},
[0x2C64]={
category="lu",
description="LATIN CAPITAL LETTER R WITH TAIL",
direction="l",
- lccode=0x027D,
+ lccode=0x27D,
linebreak="al",
- shcode=0x0052,
+ shcode=0x52,
unicodeslot=0x2C64,
},
[0x2C65]={
@@ -81376,8 +83546,8 @@ characters.data={
description="LATIN SMALL LETTER A WITH STROKE",
direction="l",
linebreak="al",
- shcode=0x0061,
- uccode=0x023A,
+ shcode=0x61,
+ uccode=0x23A,
unicodeslot=0x2C65,
},
[0x2C66]={
@@ -81385,8 +83555,8 @@ characters.data={
description="LATIN SMALL LETTER T WITH DIAGONAL STROKE",
direction="l",
linebreak="al",
- shcode=0x0074,
- uccode=0x023E,
+ shcode=0x74,
+ uccode=0x23E,
unicodeslot=0x2C66,
},
[0x2C67]={
@@ -81395,7 +83565,7 @@ characters.data={
direction="l",
lccode=0x2C68,
linebreak="al",
- shcode=0x0048,
+ shcode=0x48,
unicodeslot=0x2C67,
},
[0x2C68]={
@@ -81403,7 +83573,7 @@ characters.data={
description="LATIN SMALL LETTER H WITH DESCENDER",
direction="l",
linebreak="al",
- shcode=0x0068,
+ shcode=0x68,
uccode=0x2C67,
unicodeslot=0x2C68,
},
@@ -81413,7 +83583,7 @@ characters.data={
direction="l",
lccode=0x2C6A,
linebreak="al",
- shcode=0x004B,
+ shcode=0x4B,
unicodeslot=0x2C69,
},
[0x2C6A]={
@@ -81421,7 +83591,7 @@ characters.data={
description="LATIN SMALL LETTER K WITH DESCENDER",
direction="l",
linebreak="al",
- shcode=0x006B,
+ shcode=0x6B,
uccode=0x2C69,
unicodeslot=0x2C6A,
},
@@ -81431,7 +83601,7 @@ characters.data={
direction="l",
lccode=0x2C6C,
linebreak="al",
- shcode=0x005A,
+ shcode=0x5A,
unicodeslot=0x2C6B,
},
[0x2C6C]={
@@ -81439,7 +83609,7 @@ characters.data={
description="LATIN SMALL LETTER Z WITH DESCENDER",
direction="l",
linebreak="al",
- shcode=0x007A,
+ shcode=0x7A,
uccode=0x2C6B,
unicodeslot=0x2C6C,
},
@@ -81497,7 +83667,7 @@ characters.data={
description="LATIN SMALL LETTER V WITH CURL",
direction="l",
linebreak="al",
- shcode=0x0076,
+ shcode=0x76,
unicodeslot=0x2C74,
},
[0x2C75]={
@@ -81556,7 +83726,7 @@ characters.data={
description="LATIN SUBSCRIPT SMALL LETTER J",
direction="l",
linebreak="al",
- specials={ "sub", 0x006A },
+ specials={ "sub", 0x6A },
unicodeslot=0x2C7C,
},
[0x2C7D]={
@@ -81564,7 +83734,7 @@ characters.data={
description="MODIFIER LETTER CAPITAL V",
direction="l",
linebreak="al",
- specials={ "super", 0x0056 },
+ specials={ "super", 0x56 },
unicodeslot=0x2C7D,
},
[0x2C7E]={
@@ -82460,6 +84630,7 @@ characters.data={
},
[0x2CEF]={
category="mn",
+ combining=0xE6,
description="COPTIC COMBINING NI ABOVE",
direction="nsm",
linebreak="cm",
@@ -82467,6 +84638,7 @@ characters.data={
},
[0x2CF0]={
category="mn",
+ combining=0xE6,
description="COPTIC COMBINING SPIRITUS ASPER",
direction="nsm",
linebreak="cm",
@@ -82474,6 +84646,7 @@ characters.data={
},
[0x2CF1]={
category="mn",
+ combining=0xE6,
description="COPTIC COMBINING SPIRITUS LENIS",
direction="nsm",
linebreak="cm",
@@ -83269,6 +85442,7 @@ characters.data={
},
[0x2D7F]={
category="mn",
+ combining=0x9,
description="TIFINAGH CONSONANT JOINER",
direction="nsm",
linebreak="cm",
@@ -83563,7 +85737,6 @@ characters.data={
},
[0x2DB4]={
category="lo",
- cjkwd="w",
description="ETHIOPIC SYLLABLE ZZEE",
direction="l",
linebreak="al",
@@ -83571,7 +85744,6 @@ characters.data={
},
[0x2DB5]={
category="lo",
- cjkwd="w",
description="ETHIOPIC SYLLABLE ZZE",
direction="l",
linebreak="al",
@@ -83609,14 +85781,14 @@ characters.data={
category="lo",
description="ETHIOPIC SYLLABLE CCHAA",
direction="l",
- linebreak="id",
+ linebreak="al",
unicodeslot=0x2DBB,
},
[0x2DBC]={
category="lo",
description="ETHIOPIC SYLLABLE CCHEE",
direction="l",
- linebreak="id",
+ linebreak="al",
unicodeslot=0x2DBC,
},
[0x2DBD]={
@@ -83831,6 +86003,7 @@ characters.data={
},
[0x2DE0]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER BE",
direction="nsm",
linebreak="cm",
@@ -83838,6 +86011,7 @@ characters.data={
},
[0x2DE1]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER VE",
direction="nsm",
linebreak="cm",
@@ -83845,6 +86019,7 @@ characters.data={
},
[0x2DE2]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER GHE",
direction="nsm",
linebreak="cm",
@@ -83852,6 +86027,7 @@ characters.data={
},
[0x2DE3]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER DE",
direction="nsm",
linebreak="cm",
@@ -83859,6 +86035,7 @@ characters.data={
},
[0x2DE4]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER ZHE",
direction="nsm",
linebreak="cm",
@@ -83866,6 +86043,7 @@ characters.data={
},
[0x2DE5]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER ZE",
direction="nsm",
linebreak="cm",
@@ -83873,6 +86051,7 @@ characters.data={
},
[0x2DE6]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER KA",
direction="nsm",
linebreak="cm",
@@ -83880,6 +86059,7 @@ characters.data={
},
[0x2DE7]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER EL",
direction="nsm",
linebreak="cm",
@@ -83887,6 +86067,7 @@ characters.data={
},
[0x2DE8]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER EM",
direction="nsm",
linebreak="cm",
@@ -83894,6 +86075,7 @@ characters.data={
},
[0x2DE9]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER EN",
direction="nsm",
linebreak="cm",
@@ -83901,6 +86083,7 @@ characters.data={
},
[0x2DEA]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER O",
direction="nsm",
linebreak="cm",
@@ -83908,6 +86091,7 @@ characters.data={
},
[0x2DEB]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER PE",
direction="nsm",
linebreak="cm",
@@ -83915,6 +86099,7 @@ characters.data={
},
[0x2DEC]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER ER",
direction="nsm",
linebreak="cm",
@@ -83922,6 +86107,7 @@ characters.data={
},
[0x2DED]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER ES",
direction="nsm",
linebreak="cm",
@@ -83929,6 +86115,7 @@ characters.data={
},
[0x2DEE]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER TE",
direction="nsm",
linebreak="cm",
@@ -83936,6 +86123,7 @@ characters.data={
},
[0x2DEF]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER HA",
direction="nsm",
linebreak="cm",
@@ -83943,6 +86131,7 @@ characters.data={
},
[0x2DF0]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER TSE",
direction="nsm",
linebreak="cm",
@@ -83950,6 +86139,7 @@ characters.data={
},
[0x2DF1]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER CHE",
direction="nsm",
linebreak="cm",
@@ -83957,6 +86147,7 @@ characters.data={
},
[0x2DF2]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER SHA",
direction="nsm",
linebreak="cm",
@@ -83964,6 +86155,7 @@ characters.data={
},
[0x2DF3]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER SHCHA",
direction="nsm",
linebreak="cm",
@@ -83971,6 +86163,7 @@ characters.data={
},
[0x2DF4]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER FITA",
direction="nsm",
linebreak="cm",
@@ -83978,6 +86171,7 @@ characters.data={
},
[0x2DF5]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER ES-TE",
direction="nsm",
linebreak="cm",
@@ -83985,7 +86179,7 @@ characters.data={
},
[0x2DF6]={
category="mn",
- cjkwd="w",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER A",
direction="nsm",
linebreak="cm",
@@ -83993,7 +86187,7 @@ characters.data={
},
[0x2DF7]={
category="mn",
- cjkwd="w",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER IE",
direction="nsm",
linebreak="cm",
@@ -84001,6 +86195,7 @@ characters.data={
},
[0x2DF8]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER DJERV",
direction="nsm",
linebreak="cm",
@@ -84008,6 +86203,7 @@ characters.data={
},
[0x2DF9]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER MONOGRAPH UK",
direction="nsm",
linebreak="cm",
@@ -84015,6 +86211,7 @@ characters.data={
},
[0x2DFA]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER YAT",
direction="nsm",
linebreak="cm",
@@ -84022,6 +86219,7 @@ characters.data={
},
[0x2DFB]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER YU",
direction="nsm",
linebreak="cm",
@@ -84029,6 +86227,7 @@ characters.data={
},
[0x2DFC]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER IOTIFIED A",
direction="nsm",
linebreak="cm",
@@ -84036,20 +86235,23 @@ characters.data={
},
[0x2DFD]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER LITTLE YUS",
direction="nsm",
- linebreak="id",
+ linebreak="cm",
unicodeslot=0x2DFD,
},
[0x2DFE]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER BIG YUS",
direction="nsm",
- linebreak="id",
+ linebreak="cm",
unicodeslot=0x2DFE,
},
[0x2DFF]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER IOTIFIED BIG YUS",
direction="nsm",
linebreak="cm",
@@ -84495,6 +86697,55 @@ characters.data={
linebreak="b2",
unicodeslot=0x2E3B,
},
+ [0x2E3C]={
+ category="po",
+ description="STENOGRAPHIC FULL STOP",
+ direction="on",
+ linebreak="ba",
+ unicodeslot=0x2E3C,
+ },
+ [0x2E3D]={
+ category="po",
+ description="VERTICAL SIX DOTS",
+ direction="on",
+ linebreak="ba",
+ unicodeslot=0x2E3D,
+ },
+ [0x2E3E]={
+ category="po",
+ description="WIGGLY VERTICAL LINE",
+ direction="on",
+ linebreak="ba",
+ unicodeslot=0x2E3E,
+ },
+ [0x2E3F]={
+ category="po",
+ description="CAPITULUM",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x2E3F,
+ },
+ [0x2E40]={
+ category="pd",
+ description="DOUBLE HYPHEN",
+ direction="on",
+ linebreak="ba",
+ unicodeslot=0x2E40,
+ },
+ [0x2E41]={
+ category="po",
+ description="REVERSED COMMA",
+ direction="on",
+ linebreak="ba",
+ unicodeslot=0x2E41,
+ },
+ [0x2E42]={
+ category="ps",
+ description="DOUBLE LOW-REVERSED-9 QUOTATION MARK",
+ direction="on",
+ linebreak="op",
+ unicodeslot=0x2E42,
+ },
[0x2E80]={
category="so",
cjkwd="w",
@@ -87446,8 +89697,8 @@ characters.data={
contextname="ideographicspace",
description="IDEOGRAPHIC SPACE",
direction="ws",
- linebreak="id",
- specials={ "wide", 0x0020 },
+ linebreak="ba",
+ specials={ "wide", 0x20 },
unicodeslot=0x3000,
},
[0x3001]={
@@ -87837,6 +90088,7 @@ characters.data={
[0x302A]={
category="mn",
cjkwd="w",
+ combining=0xDA,
description="IDEOGRAPHIC LEVEL TONE MARK",
direction="nsm",
linebreak="cm",
@@ -87845,6 +90097,7 @@ characters.data={
[0x302B]={
category="mn",
cjkwd="w",
+ combining=0xE4,
description="IDEOGRAPHIC RISING TONE MARK",
direction="nsm",
linebreak="cm",
@@ -87853,6 +90106,7 @@ characters.data={
[0x302C]={
category="mn",
cjkwd="w",
+ combining=0xE8,
description="IDEOGRAPHIC DEPARTING TONE MARK",
direction="nsm",
linebreak="cm",
@@ -87861,6 +90115,7 @@ characters.data={
[0x302D]={
category="mn",
cjkwd="w",
+ combining=0xDE,
description="IDEOGRAPHIC ENTERING TONE MARK",
direction="nsm",
linebreak="cm",
@@ -87869,6 +90124,7 @@ characters.data={
[0x302E]={
category="mn",
cjkwd="w",
+ combining=0xE0,
description="HANGUL SINGLE DOT TONE MARK",
direction="l",
linebreak="cm",
@@ -87877,6 +90133,7 @@ characters.data={
[0x302F]={
category="mn",
cjkwd="w",
+ combining=0xE0,
description="HANGUL DOUBLE DOT TONE MARK",
direction="l",
linebreak="cm",
@@ -87927,7 +90184,7 @@ characters.data={
cjkwd="w",
description="VERTICAL KANA REPEAT MARK LOWER HALF",
direction="l",
- linebreak="id",
+ linebreak="cm",
unicodeslot=0x3035,
},
[0x3036]={
@@ -88024,8 +90281,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER SMALL A",
+ detail="sl",
direction="l",
linebreak="cj",
+ shcode=0x3042,
+ sound="unvoiced",
unicodeslot=0x3041,
},
[0x3042]={
@@ -88033,8 +90293,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER A",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3042,
+ sound="unvoiced",
unicodeslot=0x3042,
},
[0x3043]={
@@ -88042,8 +90305,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER SMALL I",
+ detail="sl",
direction="l",
linebreak="cj",
+ shcode=0x3044,
+ sound="unvoiced",
unicodeslot=0x3043,
},
[0x3044]={
@@ -88051,8 +90317,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER I",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3044,
+ sound="unvoiced",
unicodeslot=0x3044,
},
[0x3045]={
@@ -88060,8 +90329,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER SMALL U",
+ detail="sl",
direction="l",
linebreak="cj",
+ shcode=0x3046,
+ sound="unvoiced",
unicodeslot=0x3045,
},
[0x3046]={
@@ -88069,8 +90341,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER U",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3046,
+ sound="unvoiced",
unicodeslot=0x3046,
},
[0x3047]={
@@ -88078,8 +90353,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER SMALL E",
+ detail="sl",
direction="l",
linebreak="cj",
+ shcode=0x3048,
+ sound="unvoiced",
unicodeslot=0x3047,
},
[0x3048]={
@@ -88087,8 +90365,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER E",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3048,
+ sound="unvoiced",
unicodeslot=0x3048,
},
[0x3049]={
@@ -88096,8 +90377,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER SMALL O",
+ detail="sl",
direction="l",
linebreak="cj",
+ shcode=0x304A,
+ sound="unvoiced",
unicodeslot=0x3049,
},
[0x304A]={
@@ -88105,8 +90389,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER O",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x304A,
+ sound="unvoiced",
unicodeslot=0x304A,
},
[0x304B]={
@@ -88114,8 +90401,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER KA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x304B,
+ sound="unvoiced",
unicodeslot=0x304B,
},
[0x304C]={
@@ -88123,8 +90413,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER GA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x304B,
+ sound="voiced",
specials={ "char", 0x304B, 0x3099 },
unicodeslot=0x304C,
},
@@ -88133,8 +90426,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER KI",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x304D,
+ sound="unvoiced",
unicodeslot=0x304D,
},
[0x304E]={
@@ -88142,8 +90438,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER GI",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x304D,
+ sound="voiced",
specials={ "char", 0x304D, 0x3099 },
unicodeslot=0x304E,
},
@@ -88152,8 +90451,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER KU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x304F,
+ sound="unvoiced",
unicodeslot=0x304F,
},
[0x3050]={
@@ -88161,8 +90463,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER GU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x304F,
+ sound="voiced",
specials={ "char", 0x304F, 0x3099 },
unicodeslot=0x3050,
},
@@ -88171,8 +90476,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER KE",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3051,
+ sound="unvoiced",
unicodeslot=0x3051,
},
[0x3052]={
@@ -88180,8 +90488,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER GE",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3051,
+ sound="voiced",
specials={ "char", 0x3051, 0x3099 },
unicodeslot=0x3052,
},
@@ -88190,8 +90501,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER KO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3053,
+ sound="unvoiced",
unicodeslot=0x3053,
},
[0x3054]={
@@ -88199,8 +90513,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER GO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3053,
+ sound="voiced",
specials={ "char", 0x3053, 0x3099 },
unicodeslot=0x3054,
},
@@ -88209,8 +90526,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER SA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3055,
+ sound="unvoiced",
unicodeslot=0x3055,
},
[0x3056]={
@@ -88218,8 +90538,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER ZA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3055,
+ sound="voiced",
specials={ "char", 0x3055, 0x3099 },
unicodeslot=0x3056,
},
@@ -88228,8 +90551,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER SI",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3057,
+ sound="unvoiced",
unicodeslot=0x3057,
},
[0x3058]={
@@ -88237,8 +90563,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER ZI",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3057,
+ sound="voiced",
specials={ "char", 0x3057, 0x3099 },
unicodeslot=0x3058,
},
@@ -88247,8 +90576,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER SU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3059,
+ sound="unvoiced",
unicodeslot=0x3059,
},
[0x305A]={
@@ -88256,8 +90588,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER ZU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3059,
+ sound="voiced",
specials={ "char", 0x3059, 0x3099 },
unicodeslot=0x305A,
},
@@ -88266,8 +90601,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER SE",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x305B,
+ sound="unvoiced",
unicodeslot=0x305B,
},
[0x305C]={
@@ -88275,8 +90613,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER ZE",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x305B,
+ sound="voiced",
specials={ "char", 0x305B, 0x3099 },
unicodeslot=0x305C,
},
@@ -88285,8 +90626,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER SO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x305D,
+ sound="unvoiced",
unicodeslot=0x305D,
},
[0x305E]={
@@ -88294,8 +90638,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER ZO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x305D,
+ sound="voiced",
specials={ "char", 0x305D, 0x3099 },
unicodeslot=0x305E,
},
@@ -88304,8 +90651,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER TA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x305F,
+ sound="unvoiced",
unicodeslot=0x305F,
},
[0x3060]={
@@ -88313,8 +90663,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER DA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x305F,
+ sound="voiced",
specials={ "char", 0x305F, 0x3099 },
unicodeslot=0x3060,
},
@@ -88323,8 +90676,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER TI",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3061,
+ sound="unvoiced",
unicodeslot=0x3061,
},
[0x3062]={
@@ -88332,8 +90688,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER DI",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3061,
+ sound="voiced",
specials={ "char", 0x3061, 0x3099 },
unicodeslot=0x3062,
},
@@ -88342,8 +90701,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER SMALL TU",
+ detail="sl",
direction="l",
linebreak="cj",
+ shcode=0x3064,
+ sound="unvoiced",
unicodeslot=0x3063,
},
[0x3064]={
@@ -88351,8 +90713,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER TU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3064,
+ sound="unvoiced",
unicodeslot=0x3064,
},
[0x3065]={
@@ -88360,8 +90725,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER DU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3064,
+ sound="voiced",
specials={ "char", 0x3064, 0x3099 },
unicodeslot=0x3065,
},
@@ -88370,8 +90738,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER TE",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3066,
+ sound="unvoiced",
unicodeslot=0x3066,
},
[0x3067]={
@@ -88379,8 +90750,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER DE",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3066,
+ sound="voiced",
specials={ "char", 0x3066, 0x3099 },
unicodeslot=0x3067,
},
@@ -88389,8 +90763,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER TO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3068,
+ sound="unvoiced",
unicodeslot=0x3068,
},
[0x3069]={
@@ -88398,8 +90775,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER DO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3068,
+ sound="voiced",
specials={ "char", 0x3068, 0x3099 },
unicodeslot=0x3069,
},
@@ -88408,8 +90788,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER NA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x306A,
+ sound="unvoiced",
unicodeslot=0x306A,
},
[0x306B]={
@@ -88417,8 +90800,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER NI",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x306B,
+ sound="unvoiced",
unicodeslot=0x306B,
},
[0x306C]={
@@ -88426,8 +90812,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER NU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x306C,
+ sound="unvoiced",
unicodeslot=0x306C,
},
[0x306D]={
@@ -88435,8 +90824,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER NE",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x306D,
+ sound="unvoiced",
unicodeslot=0x306D,
},
[0x306E]={
@@ -88444,8 +90836,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER NO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x306E,
+ sound="unvoiced",
unicodeslot=0x306E,
},
[0x306F]={
@@ -88453,8 +90848,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER HA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x306F,
+ sound="unvoiced",
unicodeslot=0x306F,
},
[0x3070]={
@@ -88462,8 +90860,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER BA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x306F,
+ sound="voiced",
specials={ "char", 0x306F, 0x3099 },
unicodeslot=0x3070,
},
@@ -88472,8 +90873,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER PA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x306F,
+ sound="semivoiced",
specials={ "char", 0x306F, 0x309A },
unicodeslot=0x3071,
},
@@ -88482,8 +90886,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER HI",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3072,
+ sound="unvoiced",
unicodeslot=0x3072,
},
[0x3073]={
@@ -88491,8 +90898,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER BI",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3072,
+ sound="voiced",
specials={ "char", 0x3072, 0x3099 },
unicodeslot=0x3073,
},
@@ -88501,8 +90911,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER PI",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3072,
+ sound="semivoiced",
specials={ "char", 0x3072, 0x309A },
unicodeslot=0x3074,
},
@@ -88511,8 +90924,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER HU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3075,
+ sound="unvoiced",
unicodeslot=0x3075,
},
[0x3076]={
@@ -88520,8 +90936,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER BU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3075,
+ sound="voiced",
specials={ "char", 0x3075, 0x3099 },
unicodeslot=0x3076,
},
@@ -88530,8 +90949,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER PU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3075,
+ sound="semivoiced",
specials={ "char", 0x3075, 0x309A },
unicodeslot=0x3077,
},
@@ -88540,8 +90962,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER HE",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3078,
+ sound="unvoiced",
unicodeslot=0x3078,
},
[0x3079]={
@@ -88549,8 +90974,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER BE",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3078,
+ sound="voiced",
specials={ "char", 0x3078, 0x3099 },
unicodeslot=0x3079,
},
@@ -88559,8 +90987,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER PE",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3078,
+ sound="semivoiced",
specials={ "char", 0x3078, 0x309A },
unicodeslot=0x307A,
},
@@ -88569,8 +91000,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER HO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x307B,
+ sound="unvoiced",
unicodeslot=0x307B,
},
[0x307C]={
@@ -88578,8 +91012,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER BO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x307B,
+ sound="voiced",
specials={ "char", 0x307B, 0x3099 },
unicodeslot=0x307C,
},
@@ -88588,8 +91025,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER PO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x307B,
+ sound="semivoiced",
specials={ "char", 0x307B, 0x309A },
unicodeslot=0x307D,
},
@@ -88598,8 +91038,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER MA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x307E,
+ sound="unvoiced",
unicodeslot=0x307E,
},
[0x307F]={
@@ -88607,8 +91050,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER MI",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x307F,
+ sound="unvoiced",
unicodeslot=0x307F,
},
[0x3080]={
@@ -88616,8 +91062,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER MU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3080,
+ sound="unvoiced",
unicodeslot=0x3080,
},
[0x3081]={
@@ -88625,8 +91074,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER ME",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3081,
+ sound="unvoiced",
unicodeslot=0x3081,
},
[0x3082]={
@@ -88634,8 +91086,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER MO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3082,
+ sound="unvoiced",
unicodeslot=0x3082,
},
[0x3083]={
@@ -88643,8 +91098,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER SMALL YA",
+ detail="sl",
direction="l",
linebreak="cj",
+ shcode=0x3084,
+ sound="unvoiced",
unicodeslot=0x3083,
},
[0x3084]={
@@ -88652,8 +91110,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER YA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3084,
+ sound="unvoiced",
unicodeslot=0x3084,
},
[0x3085]={
@@ -88661,8 +91122,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER SMALL YU",
+ detail="sl",
direction="l",
linebreak="cj",
+ shcode=0x3086,
+ sound="unvoiced",
unicodeslot=0x3085,
},
[0x3086]={
@@ -88670,8 +91134,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER YU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3086,
+ sound="unvoiced",
unicodeslot=0x3086,
},
[0x3087]={
@@ -88679,8 +91146,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER SMALL YO",
+ detail="sl",
direction="l",
linebreak="cj",
+ shcode=0x3088,
+ sound="unvoiced",
unicodeslot=0x3087,
},
[0x3088]={
@@ -88688,8 +91158,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER YO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3088,
+ sound="unvoiced",
unicodeslot=0x3088,
},
[0x3089]={
@@ -88697,8 +91170,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER RA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3089,
+ sound="unvoiced",
unicodeslot=0x3089,
},
[0x308A]={
@@ -88706,8 +91182,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER RI",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x308A,
+ sound="unvoiced",
unicodeslot=0x308A,
},
[0x308B]={
@@ -88715,8 +91194,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER RU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x308B,
+ sound="unvoiced",
unicodeslot=0x308B,
},
[0x308C]={
@@ -88724,8 +91206,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER RE",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x308C,
+ sound="unvoiced",
unicodeslot=0x308C,
},
[0x308D]={
@@ -88733,8 +91218,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER RO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x308D,
+ sound="unvoiced",
unicodeslot=0x308D,
},
[0x308E]={
@@ -88742,8 +91230,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER SMALL WA",
+ detail="sl",
direction="l",
linebreak="cj",
+ shcode=0x308F,
+ sound="unvoiced",
unicodeslot=0x308E,
},
[0x308F]={
@@ -88751,8 +91242,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER WA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x308F,
+ sound="unvoiced",
unicodeslot=0x308F,
},
[0x3090]={
@@ -88760,8 +91254,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER WI",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3090,
+ sound="unvoiced",
unicodeslot=0x3090,
},
[0x3091]={
@@ -88769,8 +91266,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER WE",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3091,
+ sound="unvoiced",
unicodeslot=0x3091,
},
[0x3092]={
@@ -88778,8 +91278,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER WO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3092,
+ sound="unvoiced",
unicodeslot=0x3092,
},
[0x3093]={
@@ -88787,8 +91290,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER N",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3093,
+ sound="unvoiced",
unicodeslot=0x3093,
},
[0x3094]={
@@ -88796,8 +91302,11 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER VU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3046,
+ sound="voiced",
specials={ "char", 0x3046, 0x3099 },
unicodeslot=0x3094,
},
@@ -88805,32 +91314,42 @@ characters.data={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER SMALL KA",
+ detail="sl",
direction="l",
linebreak="cj",
+ shcode=0x304B,
+ sound="unvoiced",
unicodeslot=0x3095,
},
[0x3096]={
category="lo",
cjkwd="w",
description="HIRAGANA LETTER SMALL KE",
+ detail="sl",
direction="l",
linebreak="cj",
+ shcode=0x3051,
+ sound="unvoiced",
unicodeslot=0x3096,
},
[0x3099]={
category="mn",
cjkwd="w",
+ combining=0x8,
description="COMBINING KATAKANA-HIRAGANA VOICED SOUND MARK",
direction="nsm",
linebreak="cm",
+ sound="voiced",
unicodeslot=0x3099,
},
[0x309A]={
category="mn",
cjkwd="w",
+ combining=0x8,
description="COMBINING KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK",
direction="nsm",
linebreak="cm",
+ sound="semivoiced",
unicodeslot=0x309A,
},
[0x309B]={
@@ -88840,7 +91359,8 @@ characters.data={
description="KATAKANA-HIRAGANA VOICED SOUND MARK",
direction="on",
linebreak="ns",
- specials={ "compat", 0x0020, 0x3099 },
+ sound="voiced",
+ specials={ "compat", 0x20, 0x3099 },
unicodeslot=0x309B,
},
[0x309C]={
@@ -88850,7 +91370,8 @@ characters.data={
description="KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK",
direction="on",
linebreak="ns",
- specials={ "compat", 0x0020, 0x309A },
+ sound="semivoiced",
+ specials={ "compat", 0x20, 0x309A },
unicodeslot=0x309C,
},
[0x309D]={
@@ -88858,8 +91379,11 @@ characters.data={
category="lm",
cjkwd="w",
description="HIRAGANA ITERATION MARK",
+ detail="im",
direction="l",
linebreak="ns",
+ shcode=0x309D,
+ sound="unvoiced",
unicodeslot=0x309D,
},
[0x309E]={
@@ -88867,8 +91391,11 @@ characters.data={
category="lm",
cjkwd="w",
description="HIRAGANA VOICED ITERATION MARK",
+ detail="im",
direction="l",
linebreak="ns",
+ shcode=0x309D,
+ sound="voiced",
specials={ "char", 0x309D, 0x3099 },
unicodeslot=0x309E,
},
@@ -88894,8 +91421,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER SMALL A",
+ detail="sl",
direction="l",
linebreak="cj",
+ shcode=0x3042,
+ sound="unvoiced",
unicodeslot=0x30A1,
},
[0x30A2]={
@@ -88903,8 +91433,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER A",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3042,
+ sound="unvoiced",
unicodeslot=0x30A2,
},
[0x30A3]={
@@ -88912,8 +91445,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER SMALL I",
+ detail="sl",
direction="l",
linebreak="cj",
+ shcode=0x3044,
+ sound="unvoiced",
unicodeslot=0x30A3,
},
[0x30A4]={
@@ -88921,8 +91457,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER I",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3044,
+ sound="unvoiced",
unicodeslot=0x30A4,
},
[0x30A5]={
@@ -88930,8 +91469,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER SMALL U",
+ detail="sl",
direction="l",
linebreak="cj",
+ shcode=0x3046,
+ sound="unvoiced",
unicodeslot=0x30A5,
},
[0x30A6]={
@@ -88939,8 +91481,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER U",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3046,
+ sound="unvoiced",
unicodeslot=0x30A6,
},
[0x30A7]={
@@ -88948,8 +91493,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER SMALL E",
+ detail="sl",
direction="l",
linebreak="cj",
+ shcode=0x3048,
+ sound="unvoiced",
unicodeslot=0x30A7,
},
[0x30A8]={
@@ -88957,8 +91505,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER E",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3048,
+ sound="unvoiced",
unicodeslot=0x30A8,
},
[0x30A9]={
@@ -88966,8 +91517,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER SMALL O",
+ detail="sl",
direction="l",
linebreak="cj",
+ shcode=0x304A,
+ sound="unvoiced",
unicodeslot=0x30A9,
},
[0x30AA]={
@@ -88975,8 +91529,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER O",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x304A,
+ sound="unvoiced",
unicodeslot=0x30AA,
},
[0x30AB]={
@@ -88984,8 +91541,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER KA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x304B,
+ sound="unvoiced",
unicodeslot=0x30AB,
},
[0x30AC]={
@@ -88993,8 +91553,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER GA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x304B,
+ sound="voiced",
specials={ "char", 0x30AB, 0x3099 },
unicodeslot=0x30AC,
},
@@ -89003,8 +91566,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER KI",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x304D,
+ sound="unvoiced",
unicodeslot=0x30AD,
},
[0x30AE]={
@@ -89012,8 +91578,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER GI",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x304D,
+ sound="voiced",
specials={ "char", 0x30AD, 0x3099 },
unicodeslot=0x30AE,
},
@@ -89022,8 +91591,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER KU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x304F,
+ sound="unvoiced",
unicodeslot=0x30AF,
},
[0x30B0]={
@@ -89031,8 +91603,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER GU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x304F,
+ sound="voiced",
specials={ "char", 0x30AF, 0x3099 },
unicodeslot=0x30B0,
},
@@ -89041,8 +91616,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER KE",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3051,
+ sound="unvoiced",
unicodeslot=0x30B1,
},
[0x30B2]={
@@ -89050,8 +91628,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER GE",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3051,
+ sound="voiced",
specials={ "char", 0x30B1, 0x3099 },
unicodeslot=0x30B2,
},
@@ -89060,8 +91641,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER KO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3053,
+ sound="unvoiced",
unicodeslot=0x30B3,
},
[0x30B4]={
@@ -89069,8 +91653,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER GO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3053,
+ sound="voiced",
specials={ "char", 0x30B3, 0x3099 },
unicodeslot=0x30B4,
},
@@ -89079,8 +91666,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER SA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3055,
+ sound="unvoiced",
unicodeslot=0x30B5,
},
[0x30B6]={
@@ -89088,8 +91678,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER ZA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3055,
+ sound="voiced",
specials={ "char", 0x30B5, 0x3099 },
unicodeslot=0x30B6,
},
@@ -89098,8 +91691,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER SI",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3057,
+ sound="unvoiced",
unicodeslot=0x30B7,
},
[0x30B8]={
@@ -89107,8 +91703,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER ZI",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3057,
+ sound="voiced",
specials={ "char", 0x30B7, 0x3099 },
unicodeslot=0x30B8,
},
@@ -89117,8 +91716,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER SU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3059,
+ sound="unvoiced",
unicodeslot=0x30B9,
},
[0x30BA]={
@@ -89126,8 +91728,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER ZU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3059,
+ sound="voiced",
specials={ "char", 0x30B9, 0x3099 },
unicodeslot=0x30BA,
},
@@ -89136,8 +91741,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER SE",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x305B,
+ sound="unvoiced",
unicodeslot=0x30BB,
},
[0x30BC]={
@@ -89145,8 +91753,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER ZE",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x305B,
+ sound="voiced",
specials={ "char", 0x30BB, 0x3099 },
unicodeslot=0x30BC,
},
@@ -89155,8 +91766,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER SO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x305D,
+ sound="unvoiced",
unicodeslot=0x30BD,
},
[0x30BE]={
@@ -89164,8 +91778,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER ZO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x305D,
+ sound="voiced",
specials={ "char", 0x30BD, 0x3099 },
unicodeslot=0x30BE,
},
@@ -89174,8 +91791,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER TA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x305F,
+ sound="unvoiced",
unicodeslot=0x30BF,
},
[0x30C0]={
@@ -89183,8 +91803,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER DA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x305F,
+ sound="voiced",
specials={ "char", 0x30BF, 0x3099 },
unicodeslot=0x30C0,
},
@@ -89193,8 +91816,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER TI",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3061,
+ sound="unvoiced",
unicodeslot=0x30C1,
},
[0x30C2]={
@@ -89202,8 +91828,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER DI",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3061,
+ sound="voiced",
specials={ "char", 0x30C1, 0x3099 },
unicodeslot=0x30C2,
},
@@ -89212,8 +91841,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER SMALL TU",
+ detail="sl",
direction="l",
linebreak="cj",
+ shcode=0x3064,
+ sound="unvoiced",
unicodeslot=0x30C3,
},
[0x30C4]={
@@ -89221,8 +91853,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER TU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3064,
+ sound="unvoiced",
unicodeslot=0x30C4,
},
[0x30C5]={
@@ -89230,8 +91865,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER DU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3064,
+ sound="voiced",
specials={ "char", 0x30C4, 0x3099 },
unicodeslot=0x30C5,
},
@@ -89240,8 +91878,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER TE",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3066,
+ sound="unvoiced",
unicodeslot=0x30C6,
},
[0x30C7]={
@@ -89249,8 +91890,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER DE",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3066,
+ sound="voiced",
specials={ "char", 0x30C6, 0x3099 },
unicodeslot=0x30C7,
},
@@ -89259,8 +91903,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER TO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3068,
+ sound="unvoiced",
unicodeslot=0x30C8,
},
[0x30C9]={
@@ -89268,8 +91915,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER DO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3068,
+ sound="voiced",
specials={ "char", 0x30C8, 0x3099 },
unicodeslot=0x30C9,
},
@@ -89278,8 +91928,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER NA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x306A,
+ sound="unvoiced",
unicodeslot=0x30CA,
},
[0x30CB]={
@@ -89287,8 +91940,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER NI",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x306B,
+ sound="unvoiced",
unicodeslot=0x30CB,
},
[0x30CC]={
@@ -89296,8 +91952,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER NU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x306C,
+ sound="unvoiced",
unicodeslot=0x30CC,
},
[0x30CD]={
@@ -89305,8 +91964,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER NE",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x306D,
+ sound="unvoiced",
unicodeslot=0x30CD,
},
[0x30CE]={
@@ -89314,8 +91976,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER NO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x306E,
+ sound="unvoiced",
unicodeslot=0x30CE,
},
[0x30CF]={
@@ -89323,8 +91988,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER HA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x306F,
+ sound="unvoiced",
unicodeslot=0x30CF,
},
[0x30D0]={
@@ -89332,8 +92000,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER BA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x306F,
+ sound="voiced",
specials={ "char", 0x30CF, 0x3099 },
unicodeslot=0x30D0,
},
@@ -89342,8 +92013,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER PA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x306F,
+ sound="semivoiced",
specials={ "char", 0x30CF, 0x309A },
unicodeslot=0x30D1,
},
@@ -89352,8 +92026,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER HI",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3072,
+ sound="unvoiced",
unicodeslot=0x30D2,
},
[0x30D3]={
@@ -89361,8 +92038,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER BI",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3072,
+ sound="voiced",
specials={ "char", 0x30D2, 0x3099 },
unicodeslot=0x30D3,
},
@@ -89371,8 +92051,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER PI",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3072,
+ sound="semivoiced",
specials={ "char", 0x30D2, 0x309A },
unicodeslot=0x30D4,
},
@@ -89381,8 +92064,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER HU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3075,
+ sound="unvoiced",
unicodeslot=0x30D5,
},
[0x30D6]={
@@ -89390,8 +92076,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER BU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3075,
+ sound="voiced",
specials={ "char", 0x30D5, 0x3099 },
unicodeslot=0x30D6,
},
@@ -89400,8 +92089,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER PU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3075,
+ sound="semivoiced",
specials={ "char", 0x30D5, 0x309A },
unicodeslot=0x30D7,
},
@@ -89410,8 +92102,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER HE",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3078,
+ sound="unvoiced",
unicodeslot=0x30D8,
},
[0x30D9]={
@@ -89419,8 +92114,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER BE",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3078,
+ sound="voiced",
specials={ "char", 0x30D8, 0x3099 },
unicodeslot=0x30D9,
},
@@ -89429,8 +92127,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER PE",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3078,
+ sound="semivoiced",
specials={ "char", 0x30D8, 0x309A },
unicodeslot=0x30DA,
},
@@ -89439,8 +92140,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER HO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x307B,
+ sound="unvoiced",
unicodeslot=0x30DB,
},
[0x30DC]={
@@ -89448,8 +92152,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER BO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x307B,
+ sound="voiced",
specials={ "char", 0x30DB, 0x3099 },
unicodeslot=0x30DC,
},
@@ -89458,8 +92165,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER PO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x307B,
+ sound="semivoiced",
specials={ "char", 0x30DB, 0x309A },
unicodeslot=0x30DD,
},
@@ -89468,8 +92178,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER MA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x307E,
+ sound="unvoiced",
unicodeslot=0x30DE,
},
[0x30DF]={
@@ -89477,8 +92190,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER MI",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x307F,
+ sound="unvoiced",
unicodeslot=0x30DF,
},
[0x30E0]={
@@ -89486,8 +92202,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER MU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3080,
+ sound="unvoiced",
unicodeslot=0x30E0,
},
[0x30E1]={
@@ -89495,8 +92214,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER ME",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3081,
+ sound="unvoiced",
unicodeslot=0x30E1,
},
[0x30E2]={
@@ -89504,8 +92226,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER MO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3082,
+ sound="unvoiced",
unicodeslot=0x30E2,
},
[0x30E3]={
@@ -89513,8 +92238,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER SMALL YA",
+ detail="sl",
direction="l",
linebreak="cj",
+ shcode=0x3084,
+ sound="unvoiced",
unicodeslot=0x30E3,
},
[0x30E4]={
@@ -89522,8 +92250,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER YA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3084,
+ sound="unvoiced",
unicodeslot=0x30E4,
},
[0x30E5]={
@@ -89531,8 +92262,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER SMALL YU",
+ detail="sl",
direction="l",
linebreak="cj",
+ shcode=0x3086,
+ sound="unvoiced",
unicodeslot=0x30E5,
},
[0x30E6]={
@@ -89540,8 +92274,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER YU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3086,
+ sound="unvoiced",
unicodeslot=0x30E6,
},
[0x30E7]={
@@ -89549,8 +92286,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER SMALL YO",
+ detail="sl",
direction="l",
linebreak="cj",
+ shcode=0x3088,
+ sound="unvoiced",
unicodeslot=0x30E7,
},
[0x30E8]={
@@ -89558,8 +92298,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER YO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3088,
+ sound="unvoiced",
unicodeslot=0x30E8,
},
[0x30E9]={
@@ -89567,8 +92310,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER RA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3089,
+ sound="unvoiced",
unicodeslot=0x30E9,
},
[0x30EA]={
@@ -89576,8 +92322,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER RI",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x308A,
+ sound="unvoiced",
unicodeslot=0x30EA,
},
[0x30EB]={
@@ -89585,8 +92334,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER RU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x308B,
+ sound="unvoiced",
unicodeslot=0x30EB,
},
[0x30EC]={
@@ -89594,8 +92346,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER RE",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x308C,
+ sound="unvoiced",
unicodeslot=0x30EC,
},
[0x30ED]={
@@ -89603,8 +92358,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER RO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x308D,
+ sound="unvoiced",
unicodeslot=0x30ED,
},
[0x30EE]={
@@ -89612,8 +92370,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER SMALL WA",
+ detail="sl",
direction="l",
linebreak="cj",
+ shcode=0x308F,
+ sound="unvoiced",
unicodeslot=0x30EE,
},
[0x30EF]={
@@ -89621,8 +92382,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER WA",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x308F,
+ sound="unvoiced",
unicodeslot=0x30EF,
},
[0x30F0]={
@@ -89630,8 +92394,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER WI",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3090,
+ sound="unvoiced",
unicodeslot=0x30F0,
},
[0x30F1]={
@@ -89639,8 +92406,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER WE",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3091,
+ sound="unvoiced",
unicodeslot=0x30F1,
},
[0x30F2]={
@@ -89648,8 +92418,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER WO",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3092,
+ sound="unvoiced",
unicodeslot=0x30F2,
},
[0x30F3]={
@@ -89657,8 +92430,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER N",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3093,
+ sound="unvoiced",
unicodeslot=0x30F3,
},
[0x30F4]={
@@ -89666,8 +92442,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER VU",
+ detail="bl",
direction="l",
linebreak="id",
+ shcode=0x3046,
+ sound="voiced",
specials={ "char", 0x30A6, 0x3099 },
unicodeslot=0x30F4,
},
@@ -89676,8 +92455,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER SMALL KA",
+ detail="sl",
direction="l",
linebreak="cj",
+ shcode=0x304B,
+ sound="unvoiced",
unicodeslot=0x30F5,
},
[0x30F6]={
@@ -89685,8 +92467,11 @@ characters.data={
category="lo",
cjkwd="w",
description="KATAKANA LETTER SMALL KE",
+ detail="sl",
direction="l",
linebreak="cj",
+ shcode=0x3051,
+ sound="unvoiced",
unicodeslot=0x30F6,
},
[0x30F7]={
@@ -89696,6 +92481,8 @@ characters.data={
description="KATAKANA LETTER VA",
direction="l",
linebreak="id",
+ shcode=0x308F,
+ sound="voiced",
specials={ "char", 0x30EF, 0x3099 },
unicodeslot=0x30F7,
},
@@ -89706,6 +92493,8 @@ characters.data={
description="KATAKANA LETTER VI",
direction="l",
linebreak="id",
+ shcode=0x3090,
+ sound="voiced",
specials={ "char", 0x30F0, 0x3099 },
unicodeslot=0x30F8,
},
@@ -89716,6 +92505,8 @@ characters.data={
description="KATAKANA LETTER VE",
direction="l",
linebreak="id",
+ shcode=0x3091,
+ sound="voiced",
specials={ "char", 0x30F1, 0x3099 },
unicodeslot=0x30F9,
},
@@ -89726,6 +92517,8 @@ characters.data={
description="KATAKANA LETTER VO",
direction="l",
linebreak="id",
+ shcode=0x3092,
+ sound="voiced",
specials={ "char", 0x30F2, 0x3099 },
unicodeslot=0x30FA,
},
@@ -89743,8 +92536,11 @@ characters.data={
category="lm",
cjkwd="w",
description="KATAKANA-HIRAGANA PROLONGED SOUND MARK",
+ detail="pm",
direction="l",
linebreak="cj",
+ shcode=0x30FC,
+ sound="unvoiced",
unicodeslot=0x30FC,
},
[0x30FD]={
@@ -89752,8 +92548,11 @@ characters.data={
category="lm",
cjkwd="w",
description="KATAKANA ITERATION MARK",
+ detail="im",
direction="l",
linebreak="ns",
+ shcode=0x309D,
+ sound="unvoiced",
unicodeslot=0x30FD,
},
[0x30FE]={
@@ -89761,8 +92560,11 @@ characters.data={
category="lm",
cjkwd="w",
description="KATAKANA VOICED ITERATION MARK",
+ detail="im",
direction="l",
linebreak="ns",
+ shcode=0x309D,
+ sound="voiced",
specials={ "char", 0x30FD, 0x3099 },
unicodeslot=0x30FE,
},
@@ -91732,6 +94534,8 @@ characters.data={
description="KATAKANA LETTER SMALL KU",
direction="l",
linebreak="cj",
+ shcode=0x304F,
+ sound="unvoiced",
unicodeslot=0x31F0,
},
[0x31F1]={
@@ -91740,6 +94544,8 @@ characters.data={
description="KATAKANA LETTER SMALL SI",
direction="l",
linebreak="cj",
+ shcode=0x3057,
+ sound="unvoiced",
unicodeslot=0x31F1,
},
[0x31F2]={
@@ -91748,6 +94554,8 @@ characters.data={
description="KATAKANA LETTER SMALL SU",
direction="l",
linebreak="cj",
+ shcode=0x3059,
+ sound="unvoiced",
unicodeslot=0x31F2,
},
[0x31F3]={
@@ -91756,6 +94564,8 @@ characters.data={
description="KATAKANA LETTER SMALL TO",
direction="l",
linebreak="cj",
+ shcode=0x3068,
+ sound="unvoiced",
unicodeslot=0x31F3,
},
[0x31F4]={
@@ -91764,6 +94574,8 @@ characters.data={
description="KATAKANA LETTER SMALL NU",
direction="l",
linebreak="cj",
+ shcode=0x306C,
+ sound="unvoiced",
unicodeslot=0x31F4,
},
[0x31F5]={
@@ -91772,6 +94584,8 @@ characters.data={
description="KATAKANA LETTER SMALL HA",
direction="l",
linebreak="cj",
+ shcode=0x306F,
+ sound="unvoiced",
unicodeslot=0x31F5,
},
[0x31F6]={
@@ -91780,6 +94594,8 @@ characters.data={
description="KATAKANA LETTER SMALL HI",
direction="l",
linebreak="cj",
+ shcode=0x3072,
+ sound="unvoiced",
unicodeslot=0x31F6,
},
[0x31F7]={
@@ -91788,6 +94604,8 @@ characters.data={
description="KATAKANA LETTER SMALL HU",
direction="l",
linebreak="cj",
+ shcode=0x3075,
+ sound="unvoiced",
unicodeslot=0x31F7,
},
[0x31F8]={
@@ -91796,6 +94614,8 @@ characters.data={
description="KATAKANA LETTER SMALL HE",
direction="l",
linebreak="cj",
+ shcode=0x3078,
+ sound="unvoiced",
unicodeslot=0x31F8,
},
[0x31F9]={
@@ -91804,6 +94624,8 @@ characters.data={
description="KATAKANA LETTER SMALL HO",
direction="l",
linebreak="cj",
+ shcode=0x307B,
+ sound="unvoiced",
unicodeslot=0x31F9,
},
[0x31FA]={
@@ -91812,6 +94634,8 @@ characters.data={
description="KATAKANA LETTER SMALL MU",
direction="l",
linebreak="cj",
+ shcode=0x3080,
+ sound="unvoiced",
unicodeslot=0x31FA,
},
[0x31FB]={
@@ -91820,6 +94644,8 @@ characters.data={
description="KATAKANA LETTER SMALL RA",
direction="l",
linebreak="cj",
+ shcode=0x3089,
+ sound="unvoiced",
unicodeslot=0x31FB,
},
[0x31FC]={
@@ -91828,6 +94654,7 @@ characters.data={
description="KATAKANA LETTER SMALL RI",
direction="l",
linebreak="cj",
+ sound="unvoiced",
unicodeslot=0x31FC,
},
[0x31FD]={
@@ -91836,6 +94663,8 @@ characters.data={
description="KATAKANA LETTER SMALL RU",
direction="l",
linebreak="cj",
+ shcode=0x308B,
+ sound="unvoiced",
unicodeslot=0x31FD,
},
[0x31FE]={
@@ -91844,6 +94673,8 @@ characters.data={
description="KATAKANA LETTER SMALL RE",
direction="l",
linebreak="cj",
+ shcode=0x308C,
+ sound="unvoiced",
unicodeslot=0x31FE,
},
[0x31FF]={
@@ -91852,6 +94683,8 @@ characters.data={
description="KATAKANA LETTER SMALL RO",
direction="l",
linebreak="cj",
+ shcode=0x308D,
+ sound="unvoiced",
unicodeslot=0x31FF,
},
[0x3200]={
@@ -91861,7 +94694,7 @@ characters.data={
description="PARENTHESIZED HANGUL KIYEOK",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x1100, 0x0029 },
+ specials={ "compat", 0x28, 0x1100, 0x29 },
unicodeslot=0x3200,
},
[0x3201]={
@@ -91871,7 +94704,7 @@ characters.data={
description="PARENTHESIZED HANGUL NIEUN",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x1102, 0x0029 },
+ specials={ "compat", 0x28, 0x1102, 0x29 },
unicodeslot=0x3201,
},
[0x3202]={
@@ -91881,7 +94714,7 @@ characters.data={
description="PARENTHESIZED HANGUL TIKEUT",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x1103, 0x0029 },
+ specials={ "compat", 0x28, 0x1103, 0x29 },
unicodeslot=0x3202,
},
[0x3203]={
@@ -91891,7 +94724,7 @@ characters.data={
description="PARENTHESIZED HANGUL RIEUL",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x1105, 0x0029 },
+ specials={ "compat", 0x28, 0x1105, 0x29 },
unicodeslot=0x3203,
},
[0x3204]={
@@ -91901,7 +94734,7 @@ characters.data={
description="PARENTHESIZED HANGUL MIEUM",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x1106, 0x0029 },
+ specials={ "compat", 0x28, 0x1106, 0x29 },
unicodeslot=0x3204,
},
[0x3205]={
@@ -91911,7 +94744,7 @@ characters.data={
description="PARENTHESIZED HANGUL PIEUP",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x1107, 0x0029 },
+ specials={ "compat", 0x28, 0x1107, 0x29 },
unicodeslot=0x3205,
},
[0x3206]={
@@ -91921,7 +94754,7 @@ characters.data={
description="PARENTHESIZED HANGUL SIOS",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x1109, 0x0029 },
+ specials={ "compat", 0x28, 0x1109, 0x29 },
unicodeslot=0x3206,
},
[0x3207]={
@@ -91931,7 +94764,7 @@ characters.data={
description="PARENTHESIZED HANGUL IEUNG",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x110B, 0x0029 },
+ specials={ "compat", 0x28, 0x110B, 0x29 },
unicodeslot=0x3207,
},
[0x3208]={
@@ -91941,7 +94774,7 @@ characters.data={
description="PARENTHESIZED HANGUL CIEUC",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x110C, 0x0029 },
+ specials={ "compat", 0x28, 0x110C, 0x29 },
unicodeslot=0x3208,
},
[0x3209]={
@@ -91951,7 +94784,7 @@ characters.data={
description="PARENTHESIZED HANGUL CHIEUCH",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x110E, 0x0029 },
+ specials={ "compat", 0x28, 0x110E, 0x29 },
unicodeslot=0x3209,
},
[0x320A]={
@@ -91961,7 +94794,7 @@ characters.data={
description="PARENTHESIZED HANGUL KHIEUKH",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x110F, 0x0029 },
+ specials={ "compat", 0x28, 0x110F, 0x29 },
unicodeslot=0x320A,
},
[0x320B]={
@@ -91971,7 +94804,7 @@ characters.data={
description="PARENTHESIZED HANGUL THIEUTH",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x1110, 0x0029 },
+ specials={ "compat", 0x28, 0x1110, 0x29 },
unicodeslot=0x320B,
},
[0x320C]={
@@ -91981,7 +94814,7 @@ characters.data={
description="PARENTHESIZED HANGUL PHIEUPH",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x1111, 0x0029 },
+ specials={ "compat", 0x28, 0x1111, 0x29 },
unicodeslot=0x320C,
},
[0x320D]={
@@ -91991,7 +94824,7 @@ characters.data={
description="PARENTHESIZED HANGUL HIEUH",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x1112, 0x0029 },
+ specials={ "compat", 0x28, 0x1112, 0x29 },
unicodeslot=0x320D,
},
[0x320E]={
@@ -92001,7 +94834,7 @@ characters.data={
description="PARENTHESIZED HANGUL KIYEOK A",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x1100, 0x1161, 0x0029 },
+ specials={ "compat", 0x28, 0x1100, 0x1161, 0x29 },
unicodeslot=0x320E,
},
[0x320F]={
@@ -92011,7 +94844,7 @@ characters.data={
description="PARENTHESIZED HANGUL NIEUN A",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x1102, 0x1161, 0x0029 },
+ specials={ "compat", 0x28, 0x1102, 0x1161, 0x29 },
unicodeslot=0x320F,
},
[0x3210]={
@@ -92021,7 +94854,7 @@ characters.data={
description="PARENTHESIZED HANGUL TIKEUT A",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x1103, 0x1161, 0x0029 },
+ specials={ "compat", 0x28, 0x1103, 0x1161, 0x29 },
unicodeslot=0x3210,
},
[0x3211]={
@@ -92031,7 +94864,7 @@ characters.data={
description="PARENTHESIZED HANGUL RIEUL A",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x1105, 0x1161, 0x0029 },
+ specials={ "compat", 0x28, 0x1105, 0x1161, 0x29 },
unicodeslot=0x3211,
},
[0x3212]={
@@ -92041,7 +94874,7 @@ characters.data={
description="PARENTHESIZED HANGUL MIEUM A",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x1106, 0x1161, 0x0029 },
+ specials={ "compat", 0x28, 0x1106, 0x1161, 0x29 },
unicodeslot=0x3212,
},
[0x3213]={
@@ -92051,7 +94884,7 @@ characters.data={
description="PARENTHESIZED HANGUL PIEUP A",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x1107, 0x1161, 0x0029 },
+ specials={ "compat", 0x28, 0x1107, 0x1161, 0x29 },
unicodeslot=0x3213,
},
[0x3214]={
@@ -92061,7 +94894,7 @@ characters.data={
description="PARENTHESIZED HANGUL SIOS A",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x1109, 0x1161, 0x0029 },
+ specials={ "compat", 0x28, 0x1109, 0x1161, 0x29 },
unicodeslot=0x3214,
},
[0x3215]={
@@ -92071,7 +94904,7 @@ characters.data={
description="PARENTHESIZED HANGUL IEUNG A",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x110B, 0x1161, 0x0029 },
+ specials={ "compat", 0x28, 0x110B, 0x1161, 0x29 },
unicodeslot=0x3215,
},
[0x3216]={
@@ -92081,7 +94914,7 @@ characters.data={
description="PARENTHESIZED HANGUL CIEUC A",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x110C, 0x1161, 0x0029 },
+ specials={ "compat", 0x28, 0x110C, 0x1161, 0x29 },
unicodeslot=0x3216,
},
[0x3217]={
@@ -92091,7 +94924,7 @@ characters.data={
description="PARENTHESIZED HANGUL CHIEUCH A",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x110E, 0x1161, 0x0029 },
+ specials={ "compat", 0x28, 0x110E, 0x1161, 0x29 },
unicodeslot=0x3217,
},
[0x3218]={
@@ -92101,7 +94934,7 @@ characters.data={
description="PARENTHESIZED HANGUL KHIEUKH A",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x110F, 0x1161, 0x0029 },
+ specials={ "compat", 0x28, 0x110F, 0x1161, 0x29 },
unicodeslot=0x3218,
},
[0x3219]={
@@ -92111,7 +94944,7 @@ characters.data={
description="PARENTHESIZED HANGUL THIEUTH A",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x1110, 0x1161, 0x0029 },
+ specials={ "compat", 0x28, 0x1110, 0x1161, 0x29 },
unicodeslot=0x3219,
},
[0x321A]={
@@ -92121,7 +94954,7 @@ characters.data={
description="PARENTHESIZED HANGUL PHIEUPH A",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x1111, 0x1161, 0x0029 },
+ specials={ "compat", 0x28, 0x1111, 0x1161, 0x29 },
unicodeslot=0x321A,
},
[0x321B]={
@@ -92131,7 +94964,7 @@ characters.data={
description="PARENTHESIZED HANGUL HIEUH A",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x1112, 0x1161, 0x0029 },
+ specials={ "compat", 0x28, 0x1112, 0x1161, 0x29 },
unicodeslot=0x321B,
},
[0x321C]={
@@ -92141,7 +94974,7 @@ characters.data={
description="PARENTHESIZED HANGUL CIEUC U",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x110C, 0x116E, 0x0029 },
+ specials={ "compat", 0x28, 0x110C, 0x116E, 0x29 },
unicodeslot=0x321C,
},
[0x321D]={
@@ -92150,7 +94983,7 @@ characters.data={
description="PARENTHESIZED KOREAN CHARACTER OJEON",
direction="on",
linebreak="id",
- specials={ "compat", 0x0028, 0x110B, 0x1169, 0x110C, 0x1165, 0x11AB, 0x0029 },
+ specials={ "compat", 0x28, 0x110B, 0x1169, 0x110C, 0x1165, 0x11AB, 0x29 },
unicodeslot=0x321D,
},
[0x321E]={
@@ -92159,7 +94992,7 @@ characters.data={
description="PARENTHESIZED KOREAN CHARACTER O HU",
direction="on",
linebreak="id",
- specials={ "compat", 0x0028, 0x110B, 0x1169, 0x1112, 0x116E, 0x0029 },
+ specials={ "compat", 0x28, 0x110B, 0x1169, 0x1112, 0x116E, 0x29 },
unicodeslot=0x321E,
},
[0x3220]={
@@ -92169,7 +95002,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH ONE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x4E00, 0x0029 },
+ specials={ "compat", 0x28, 0x4E00, 0x29 },
unicodeslot=0x3220,
},
[0x3221]={
@@ -92179,7 +95012,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH TWO",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x4E8C, 0x0029 },
+ specials={ "compat", 0x28, 0x4E8C, 0x29 },
unicodeslot=0x3221,
},
[0x3222]={
@@ -92189,7 +95022,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH THREE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x4E09, 0x0029 },
+ specials={ "compat", 0x28, 0x4E09, 0x29 },
unicodeslot=0x3222,
},
[0x3223]={
@@ -92199,7 +95032,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH FOUR",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x56DB, 0x0029 },
+ specials={ "compat", 0x28, 0x56DB, 0x29 },
unicodeslot=0x3223,
},
[0x3224]={
@@ -92209,7 +95042,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH FIVE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x4E94, 0x0029 },
+ specials={ "compat", 0x28, 0x4E94, 0x29 },
unicodeslot=0x3224,
},
[0x3225]={
@@ -92219,7 +95052,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH SIX",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x516D, 0x0029 },
+ specials={ "compat", 0x28, 0x516D, 0x29 },
unicodeslot=0x3225,
},
[0x3226]={
@@ -92229,7 +95062,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH SEVEN",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x4E03, 0x0029 },
+ specials={ "compat", 0x28, 0x4E03, 0x29 },
unicodeslot=0x3226,
},
[0x3227]={
@@ -92239,7 +95072,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH EIGHT",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x516B, 0x0029 },
+ specials={ "compat", 0x28, 0x516B, 0x29 },
unicodeslot=0x3227,
},
[0x3228]={
@@ -92249,7 +95082,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH NINE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x4E5D, 0x0029 },
+ specials={ "compat", 0x28, 0x4E5D, 0x29 },
unicodeslot=0x3228,
},
[0x3229]={
@@ -92259,7 +95092,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH TEN",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x5341, 0x0029 },
+ specials={ "compat", 0x28, 0x5341, 0x29 },
unicodeslot=0x3229,
},
[0x322A]={
@@ -92269,7 +95102,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH MOON",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x6708, 0x0029 },
+ specials={ "compat", 0x28, 0x6708, 0x29 },
unicodeslot=0x322A,
},
[0x322B]={
@@ -92279,7 +95112,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH FIRE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x706B, 0x0029 },
+ specials={ "compat", 0x28, 0x706B, 0x29 },
unicodeslot=0x322B,
},
[0x322C]={
@@ -92289,7 +95122,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH WATER",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x6C34, 0x0029 },
+ specials={ "compat", 0x28, 0x6C34, 0x29 },
unicodeslot=0x322C,
},
[0x322D]={
@@ -92299,7 +95132,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH WOOD",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x6728, 0x0029 },
+ specials={ "compat", 0x28, 0x6728, 0x29 },
unicodeslot=0x322D,
},
[0x322E]={
@@ -92309,7 +95142,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH METAL",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x91D1, 0x0029 },
+ specials={ "compat", 0x28, 0x91D1, 0x29 },
unicodeslot=0x322E,
},
[0x322F]={
@@ -92319,7 +95152,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH EARTH",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x571F, 0x0029 },
+ specials={ "compat", 0x28, 0x571F, 0x29 },
unicodeslot=0x322F,
},
[0x3230]={
@@ -92329,7 +95162,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH SUN",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x65E5, 0x0029 },
+ specials={ "compat", 0x28, 0x65E5, 0x29 },
unicodeslot=0x3230,
},
[0x3231]={
@@ -92339,7 +95172,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH STOCK",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x682A, 0x0029 },
+ specials={ "compat", 0x28, 0x682A, 0x29 },
unicodeslot=0x3231,
},
[0x3232]={
@@ -92349,7 +95182,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH HAVE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x6709, 0x0029 },
+ specials={ "compat", 0x28, 0x6709, 0x29 },
unicodeslot=0x3232,
},
[0x3233]={
@@ -92359,7 +95192,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH SOCIETY",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x793E, 0x0029 },
+ specials={ "compat", 0x28, 0x793E, 0x29 },
unicodeslot=0x3233,
},
[0x3234]={
@@ -92369,7 +95202,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH NAME",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x540D, 0x0029 },
+ specials={ "compat", 0x28, 0x540D, 0x29 },
unicodeslot=0x3234,
},
[0x3235]={
@@ -92379,7 +95212,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH SPECIAL",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x7279, 0x0029 },
+ specials={ "compat", 0x28, 0x7279, 0x29 },
unicodeslot=0x3235,
},
[0x3236]={
@@ -92389,7 +95222,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH FINANCIAL",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x8CA1, 0x0029 },
+ specials={ "compat", 0x28, 0x8CA1, 0x29 },
unicodeslot=0x3236,
},
[0x3237]={
@@ -92399,7 +95232,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH CONGRATULATION",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x795D, 0x0029 },
+ specials={ "compat", 0x28, 0x795D, 0x29 },
unicodeslot=0x3237,
},
[0x3238]={
@@ -92409,7 +95242,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH LABOR",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x52B4, 0x0029 },
+ specials={ "compat", 0x28, 0x52B4, 0x29 },
unicodeslot=0x3238,
},
[0x3239]={
@@ -92419,7 +95252,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH REPRESENT",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x4EE3, 0x0029 },
+ specials={ "compat", 0x28, 0x4EE3, 0x29 },
unicodeslot=0x3239,
},
[0x323A]={
@@ -92429,7 +95262,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH CALL",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x547C, 0x0029 },
+ specials={ "compat", 0x28, 0x547C, 0x29 },
unicodeslot=0x323A,
},
[0x323B]={
@@ -92439,7 +95272,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH STUDY",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x5B66, 0x0029 },
+ specials={ "compat", 0x28, 0x5B66, 0x29 },
unicodeslot=0x323B,
},
[0x323C]={
@@ -92449,7 +95282,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH SUPERVISE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x76E3, 0x0029 },
+ specials={ "compat", 0x28, 0x76E3, 0x29 },
unicodeslot=0x323C,
},
[0x323D]={
@@ -92459,7 +95292,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH ENTERPRISE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x4F01, 0x0029 },
+ specials={ "compat", 0x28, 0x4F01, 0x29 },
unicodeslot=0x323D,
},
[0x323E]={
@@ -92469,7 +95302,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH RESOURCE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x8CC7, 0x0029 },
+ specials={ "compat", 0x28, 0x8CC7, 0x29 },
unicodeslot=0x323E,
},
[0x323F]={
@@ -92479,7 +95312,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH ALLIANCE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x5354, 0x0029 },
+ specials={ "compat", 0x28, 0x5354, 0x29 },
unicodeslot=0x323F,
},
[0x3240]={
@@ -92489,7 +95322,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH FESTIVAL",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x796D, 0x0029 },
+ specials={ "compat", 0x28, 0x796D, 0x29 },
unicodeslot=0x3240,
},
[0x3241]={
@@ -92498,7 +95331,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH REST",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x4F11, 0x0029 },
+ specials={ "compat", 0x28, 0x4F11, 0x29 },
unicodeslot=0x3241,
},
[0x3242]={
@@ -92508,7 +95341,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH SELF",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x81EA, 0x0029 },
+ specials={ "compat", 0x28, 0x81EA, 0x29 },
unicodeslot=0x3242,
},
[0x3243]={
@@ -92518,7 +95351,7 @@ characters.data={
description="PARENTHESIZED IDEOGRAPH REACH",
direction="l",
linebreak="id",
- specials={ "compat", 0x0028, 0x81F3, 0x0029 },
+ specials={ "compat", 0x28, 0x81F3, 0x29 },
unicodeslot=0x3243,
},
[0x3244]={
@@ -92627,7 +95460,7 @@ characters.data={
description="PARTNERSHIP SIGN",
direction="on",
linebreak="id",
- specials={ "square", 0x0050, 0x0054, 0x0045 },
+ specials={ "square", 0x50, 0x54, 0x45 },
unicodeslot=0x3250,
},
[0x3251]={
@@ -92636,7 +95469,7 @@ characters.data={
description="CIRCLED NUMBER TWENTY ONE",
direction="on",
linebreak="id",
- specials={ "circle", 0x0032, 0x0031 },
+ specials={ "circle", 0x32, 0x31 },
unicodeslot=0x3251,
},
[0x3252]={
@@ -92645,7 +95478,7 @@ characters.data={
description="CIRCLED NUMBER TWENTY TWO",
direction="on",
linebreak="id",
- specials={ "circle", 0x0032, 0x0032 },
+ specials={ "circle", 0x32, 0x32 },
unicodeslot=0x3252,
},
[0x3253]={
@@ -92654,7 +95487,7 @@ characters.data={
description="CIRCLED NUMBER TWENTY THREE",
direction="on",
linebreak="id",
- specials={ "circle", 0x0032, 0x0033 },
+ specials={ "circle", 0x32, 0x33 },
unicodeslot=0x3253,
},
[0x3254]={
@@ -92663,7 +95496,7 @@ characters.data={
description="CIRCLED NUMBER TWENTY FOUR",
direction="on",
linebreak="id",
- specials={ "circle", 0x0032, 0x0034 },
+ specials={ "circle", 0x32, 0x34 },
unicodeslot=0x3254,
},
[0x3255]={
@@ -92672,7 +95505,7 @@ characters.data={
description="CIRCLED NUMBER TWENTY FIVE",
direction="on",
linebreak="id",
- specials={ "circle", 0x0032, 0x0035 },
+ specials={ "circle", 0x32, 0x35 },
unicodeslot=0x3255,
},
[0x3256]={
@@ -92681,7 +95514,7 @@ characters.data={
description="CIRCLED NUMBER TWENTY SIX",
direction="on",
linebreak="id",
- specials={ "circle", 0x0032, 0x0036 },
+ specials={ "circle", 0x32, 0x36 },
unicodeslot=0x3256,
},
[0x3257]={
@@ -92690,7 +95523,7 @@ characters.data={
description="CIRCLED NUMBER TWENTY SEVEN",
direction="on",
linebreak="id",
- specials={ "circle", 0x0032, 0x0037 },
+ specials={ "circle", 0x32, 0x37 },
unicodeslot=0x3257,
},
[0x3258]={
@@ -92699,7 +95532,7 @@ characters.data={
description="CIRCLED NUMBER TWENTY EIGHT",
direction="on",
linebreak="id",
- specials={ "circle", 0x0032, 0x0038 },
+ specials={ "circle", 0x32, 0x38 },
unicodeslot=0x3258,
},
[0x3259]={
@@ -92708,7 +95541,7 @@ characters.data={
description="CIRCLED NUMBER TWENTY NINE",
direction="on",
linebreak="id",
- specials={ "circle", 0x0032, 0x0039 },
+ specials={ "circle", 0x32, 0x39 },
unicodeslot=0x3259,
},
[0x325A]={
@@ -92717,7 +95550,7 @@ characters.data={
description="CIRCLED NUMBER THIRTY",
direction="on",
linebreak="id",
- specials={ "circle", 0x0033, 0x0030 },
+ specials={ "circle", 0x33, 0x30 },
unicodeslot=0x325A,
},
[0x325B]={
@@ -92726,7 +95559,7 @@ characters.data={
description="CIRCLED NUMBER THIRTY ONE",
direction="on",
linebreak="id",
- specials={ "circle", 0x0033, 0x0031 },
+ specials={ "circle", 0x33, 0x31 },
unicodeslot=0x325B,
},
[0x325C]={
@@ -92735,7 +95568,7 @@ characters.data={
description="CIRCLED NUMBER THIRTY TWO",
direction="on",
linebreak="id",
- specials={ "circle", 0x0033, 0x0032 },
+ specials={ "circle", 0x33, 0x32 },
unicodeslot=0x325C,
},
[0x325D]={
@@ -92744,7 +95577,7 @@ characters.data={
description="CIRCLED NUMBER THIRTY THREE",
direction="on",
linebreak="id",
- specials={ "circle", 0x0033, 0x0033 },
+ specials={ "circle", 0x33, 0x33 },
unicodeslot=0x325D,
},
[0x325E]={
@@ -92753,7 +95586,7 @@ characters.data={
description="CIRCLED NUMBER THIRTY FOUR",
direction="on",
linebreak="id",
- specials={ "circle", 0x0033, 0x0034 },
+ specials={ "circle", 0x33, 0x34 },
unicodeslot=0x325E,
},
[0x325F]={
@@ -92762,7 +95595,7 @@ characters.data={
description="CIRCLED NUMBER THIRTY FIVE",
direction="on",
linebreak="id",
- specials={ "circle", 0x0033, 0x0035 },
+ specials={ "circle", 0x33, 0x35 },
unicodeslot=0x325F,
},
[0x3260]={
@@ -93556,7 +96389,7 @@ characters.data={
description="CIRCLED NUMBER THIRTY SIX",
direction="on",
linebreak="id",
- specials={ "circle", 0x0033, 0x0036 },
+ specials={ "circle", 0x33, 0x36 },
unicodeslot=0x32B1,
},
[0x32B2]={
@@ -93565,7 +96398,7 @@ characters.data={
description="CIRCLED NUMBER THIRTY SEVEN",
direction="on",
linebreak="id",
- specials={ "circle", 0x0033, 0x0037 },
+ specials={ "circle", 0x33, 0x37 },
unicodeslot=0x32B2,
},
[0x32B3]={
@@ -93574,7 +96407,7 @@ characters.data={
description="CIRCLED NUMBER THIRTY EIGHT",
direction="on",
linebreak="id",
- specials={ "circle", 0x0033, 0x0038 },
+ specials={ "circle", 0x33, 0x38 },
unicodeslot=0x32B3,
},
[0x32B4]={
@@ -93583,7 +96416,7 @@ characters.data={
description="CIRCLED NUMBER THIRTY NINE",
direction="on",
linebreak="id",
- specials={ "circle", 0x0033, 0x0039 },
+ specials={ "circle", 0x33, 0x39 },
unicodeslot=0x32B4,
},
[0x32B5]={
@@ -93592,7 +96425,7 @@ characters.data={
description="CIRCLED NUMBER FORTY",
direction="on",
linebreak="id",
- specials={ "circle", 0x0034, 0x0030 },
+ specials={ "circle", 0x34, 0x30 },
unicodeslot=0x32B5,
},
[0x32B6]={
@@ -93601,7 +96434,7 @@ characters.data={
description="CIRCLED NUMBER FORTY ONE",
direction="on",
linebreak="id",
- specials={ "circle", 0x0034, 0x0031 },
+ specials={ "circle", 0x34, 0x31 },
unicodeslot=0x32B6,
},
[0x32B7]={
@@ -93610,7 +96443,7 @@ characters.data={
description="CIRCLED NUMBER FORTY TWO",
direction="on",
linebreak="id",
- specials={ "circle", 0x0034, 0x0032 },
+ specials={ "circle", 0x34, 0x32 },
unicodeslot=0x32B7,
},
[0x32B8]={
@@ -93619,7 +96452,7 @@ characters.data={
description="CIRCLED NUMBER FORTY THREE",
direction="on",
linebreak="id",
- specials={ "circle", 0x0034, 0x0033 },
+ specials={ "circle", 0x34, 0x33 },
unicodeslot=0x32B8,
},
[0x32B9]={
@@ -93628,7 +96461,7 @@ characters.data={
description="CIRCLED NUMBER FORTY FOUR",
direction="on",
linebreak="id",
- specials={ "circle", 0x0034, 0x0034 },
+ specials={ "circle", 0x34, 0x34 },
unicodeslot=0x32B9,
},
[0x32BA]={
@@ -93637,7 +96470,7 @@ characters.data={
description="CIRCLED NUMBER FORTY FIVE",
direction="on",
linebreak="id",
- specials={ "circle", 0x0034, 0x0035 },
+ specials={ "circle", 0x34, 0x35 },
unicodeslot=0x32BA,
},
[0x32BB]={
@@ -93646,7 +96479,7 @@ characters.data={
description="CIRCLED NUMBER FORTY SIX",
direction="on",
linebreak="id",
- specials={ "circle", 0x0034, 0x0036 },
+ specials={ "circle", 0x34, 0x36 },
unicodeslot=0x32BB,
},
[0x32BC]={
@@ -93655,7 +96488,7 @@ characters.data={
description="CIRCLED NUMBER FORTY SEVEN",
direction="on",
linebreak="id",
- specials={ "circle", 0x0034, 0x0037 },
+ specials={ "circle", 0x34, 0x37 },
unicodeslot=0x32BC,
},
[0x32BD]={
@@ -93664,7 +96497,7 @@ characters.data={
description="CIRCLED NUMBER FORTY EIGHT",
direction="on",
linebreak="id",
- specials={ "circle", 0x0034, 0x0038 },
+ specials={ "circle", 0x34, 0x38 },
unicodeslot=0x32BD,
},
[0x32BE]={
@@ -93673,7 +96506,7 @@ characters.data={
description="CIRCLED NUMBER FORTY NINE",
direction="on",
linebreak="id",
- specials={ "circle", 0x0034, 0x0039 },
+ specials={ "circle", 0x34, 0x39 },
unicodeslot=0x32BE,
},
[0x32BF]={
@@ -93682,7 +96515,7 @@ characters.data={
description="CIRCLED NUMBER FIFTY",
direction="on",
linebreak="id",
- specials={ "circle", 0x0035, 0x0030 },
+ specials={ "circle", 0x35, 0x30 },
unicodeslot=0x32BF,
},
[0x32C0]={
@@ -93691,7 +96524,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR JANUARY",
direction="l",
linebreak="id",
- specials={ "compat", 0x0031, 0x6708 },
+ specials={ "compat", 0x31, 0x6708 },
unicodeslot=0x32C0,
},
[0x32C1]={
@@ -93700,7 +96533,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR FEBRUARY",
direction="l",
linebreak="id",
- specials={ "compat", 0x0032, 0x6708 },
+ specials={ "compat", 0x32, 0x6708 },
unicodeslot=0x32C1,
},
[0x32C2]={
@@ -93709,7 +96542,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR MARCH",
direction="l",
linebreak="id",
- specials={ "compat", 0x0033, 0x6708 },
+ specials={ "compat", 0x33, 0x6708 },
unicodeslot=0x32C2,
},
[0x32C3]={
@@ -93718,7 +96551,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR APRIL",
direction="l",
linebreak="id",
- specials={ "compat", 0x0034, 0x6708 },
+ specials={ "compat", 0x34, 0x6708 },
unicodeslot=0x32C3,
},
[0x32C4]={
@@ -93727,7 +96560,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR MAY",
direction="l",
linebreak="id",
- specials={ "compat", 0x0035, 0x6708 },
+ specials={ "compat", 0x35, 0x6708 },
unicodeslot=0x32C4,
},
[0x32C5]={
@@ -93736,7 +96569,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR JUNE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0036, 0x6708 },
+ specials={ "compat", 0x36, 0x6708 },
unicodeslot=0x32C5,
},
[0x32C6]={
@@ -93745,7 +96578,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR JULY",
direction="l",
linebreak="id",
- specials={ "compat", 0x0037, 0x6708 },
+ specials={ "compat", 0x37, 0x6708 },
unicodeslot=0x32C6,
},
[0x32C7]={
@@ -93754,7 +96587,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR AUGUST",
direction="l",
linebreak="id",
- specials={ "compat", 0x0038, 0x6708 },
+ specials={ "compat", 0x38, 0x6708 },
unicodeslot=0x32C7,
},
[0x32C8]={
@@ -93763,7 +96596,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR SEPTEMBER",
direction="l",
linebreak="id",
- specials={ "compat", 0x0039, 0x6708 },
+ specials={ "compat", 0x39, 0x6708 },
unicodeslot=0x32C8,
},
[0x32C9]={
@@ -93772,7 +96605,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR OCTOBER",
direction="l",
linebreak="id",
- specials={ "compat", 0x0031, 0x0030, 0x6708 },
+ specials={ "compat", 0x31, 0x30, 0x6708 },
unicodeslot=0x32C9,
},
[0x32CA]={
@@ -93781,7 +96614,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR NOVEMBER",
direction="l",
linebreak="id",
- specials={ "compat", 0x0031, 0x0031, 0x6708 },
+ specials={ "compat", 0x31, 0x31, 0x6708 },
unicodeslot=0x32CA,
},
[0x32CB]={
@@ -93790,7 +96623,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DECEMBER",
direction="l",
linebreak="id",
- specials={ "compat", 0x0031, 0x0032, 0x6708 },
+ specials={ "compat", 0x31, 0x32, 0x6708 },
unicodeslot=0x32CB,
},
[0x32CC]={
@@ -93799,7 +96632,7 @@ characters.data={
description="SQUARE HG",
direction="on",
linebreak="id",
- specials={ "square", 0x0048, 0x0067 },
+ specials={ "square", 0x48, 0x67 },
unicodeslot=0x32CC,
},
[0x32CD]={
@@ -93808,7 +96641,7 @@ characters.data={
description="SQUARE ERG",
direction="on",
linebreak="id",
- specials={ "square", 0x0065, 0x0072, 0x0067 },
+ specials={ "square", 0x65, 0x72, 0x67 },
unicodeslot=0x32CD,
},
[0x32CE]={
@@ -93817,7 +96650,7 @@ characters.data={
description="SQUARE EV",
direction="on",
linebreak="id",
- specials={ "square", 0x0065, 0x0056 },
+ specials={ "square", 0x65, 0x56 },
unicodeslot=0x32CE,
},
[0x32CF]={
@@ -93826,7 +96659,7 @@ characters.data={
description="LIMITED LIABILITY SIGN",
direction="on",
linebreak="id",
- specials={ "square", 0x004C, 0x0054, 0x0044 },
+ specials={ "square", 0x4C, 0x54, 0x44 },
unicodeslot=0x32CF,
},
[0x32D0]={
@@ -95078,7 +97911,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR ZERO",
direction="l",
linebreak="id",
- specials={ "compat", 0x0030, 0x70B9 },
+ specials={ "compat", 0x30, 0x70B9 },
unicodeslot=0x3358,
},
[0x3359]={
@@ -95087,7 +97920,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR ONE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0031, 0x70B9 },
+ specials={ "compat", 0x31, 0x70B9 },
unicodeslot=0x3359,
},
[0x335A]={
@@ -95096,7 +97929,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWO",
direction="l",
linebreak="id",
- specials={ "compat", 0x0032, 0x70B9 },
+ specials={ "compat", 0x32, 0x70B9 },
unicodeslot=0x335A,
},
[0x335B]={
@@ -95105,7 +97938,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR THREE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0033, 0x70B9 },
+ specials={ "compat", 0x33, 0x70B9 },
unicodeslot=0x335B,
},
[0x335C]={
@@ -95114,7 +97947,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR FOUR",
direction="l",
linebreak="id",
- specials={ "compat", 0x0034, 0x70B9 },
+ specials={ "compat", 0x34, 0x70B9 },
unicodeslot=0x335C,
},
[0x335D]={
@@ -95123,7 +97956,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR FIVE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0035, 0x70B9 },
+ specials={ "compat", 0x35, 0x70B9 },
unicodeslot=0x335D,
},
[0x335E]={
@@ -95132,7 +97965,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR SIX",
direction="l",
linebreak="id",
- specials={ "compat", 0x0036, 0x70B9 },
+ specials={ "compat", 0x36, 0x70B9 },
unicodeslot=0x335E,
},
[0x335F]={
@@ -95141,7 +97974,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR SEVEN",
direction="l",
linebreak="id",
- specials={ "compat", 0x0037, 0x70B9 },
+ specials={ "compat", 0x37, 0x70B9 },
unicodeslot=0x335F,
},
[0x3360]={
@@ -95150,7 +97983,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR EIGHT",
direction="l",
linebreak="id",
- specials={ "compat", 0x0038, 0x70B9 },
+ specials={ "compat", 0x38, 0x70B9 },
unicodeslot=0x3360,
},
[0x3361]={
@@ -95159,7 +97992,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR NINE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0039, 0x70B9 },
+ specials={ "compat", 0x39, 0x70B9 },
unicodeslot=0x3361,
},
[0x3362]={
@@ -95168,7 +98001,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TEN",
direction="l",
linebreak="id",
- specials={ "compat", 0x0031, 0x0030, 0x70B9 },
+ specials={ "compat", 0x31, 0x30, 0x70B9 },
unicodeslot=0x3362,
},
[0x3363]={
@@ -95177,7 +98010,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR ELEVEN",
direction="l",
linebreak="id",
- specials={ "compat", 0x0031, 0x0031, 0x70B9 },
+ specials={ "compat", 0x31, 0x31, 0x70B9 },
unicodeslot=0x3363,
},
[0x3364]={
@@ -95186,7 +98019,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWELVE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0031, 0x0032, 0x70B9 },
+ specials={ "compat", 0x31, 0x32, 0x70B9 },
unicodeslot=0x3364,
},
[0x3365]={
@@ -95195,7 +98028,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR THIRTEEN",
direction="l",
linebreak="id",
- specials={ "compat", 0x0031, 0x0033, 0x70B9 },
+ specials={ "compat", 0x31, 0x33, 0x70B9 },
unicodeslot=0x3365,
},
[0x3366]={
@@ -95204,7 +98037,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR FOURTEEN",
direction="l",
linebreak="id",
- specials={ "compat", 0x0031, 0x0034, 0x70B9 },
+ specials={ "compat", 0x31, 0x34, 0x70B9 },
unicodeslot=0x3366,
},
[0x3367]={
@@ -95213,7 +98046,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR FIFTEEN",
direction="l",
linebreak="id",
- specials={ "compat", 0x0031, 0x0035, 0x70B9 },
+ specials={ "compat", 0x31, 0x35, 0x70B9 },
unicodeslot=0x3367,
},
[0x3368]={
@@ -95222,7 +98055,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR SIXTEEN",
direction="l",
linebreak="id",
- specials={ "compat", 0x0031, 0x0036, 0x70B9 },
+ specials={ "compat", 0x31, 0x36, 0x70B9 },
unicodeslot=0x3368,
},
[0x3369]={
@@ -95231,7 +98064,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR SEVENTEEN",
direction="l",
linebreak="id",
- specials={ "compat", 0x0031, 0x0037, 0x70B9 },
+ specials={ "compat", 0x31, 0x37, 0x70B9 },
unicodeslot=0x3369,
},
[0x336A]={
@@ -95240,7 +98073,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR EIGHTEEN",
direction="l",
linebreak="id",
- specials={ "compat", 0x0031, 0x0038, 0x70B9 },
+ specials={ "compat", 0x31, 0x38, 0x70B9 },
unicodeslot=0x336A,
},
[0x336B]={
@@ -95249,7 +98082,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR NINETEEN",
direction="l",
linebreak="id",
- specials={ "compat", 0x0031, 0x0039, 0x70B9 },
+ specials={ "compat", 0x31, 0x39, 0x70B9 },
unicodeslot=0x336B,
},
[0x336C]={
@@ -95258,7 +98091,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWENTY",
direction="l",
linebreak="id",
- specials={ "compat", 0x0032, 0x0030, 0x70B9 },
+ specials={ "compat", 0x32, 0x30, 0x70B9 },
unicodeslot=0x336C,
},
[0x336D]={
@@ -95267,7 +98100,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWENTY-ONE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0032, 0x0031, 0x70B9 },
+ specials={ "compat", 0x32, 0x31, 0x70B9 },
unicodeslot=0x336D,
},
[0x336E]={
@@ -95276,7 +98109,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWENTY-TWO",
direction="l",
linebreak="id",
- specials={ "compat", 0x0032, 0x0032, 0x70B9 },
+ specials={ "compat", 0x32, 0x32, 0x70B9 },
unicodeslot=0x336E,
},
[0x336F]={
@@ -95285,7 +98118,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWENTY-THREE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0032, 0x0033, 0x70B9 },
+ specials={ "compat", 0x32, 0x33, 0x70B9 },
unicodeslot=0x336F,
},
[0x3370]={
@@ -95294,7 +98127,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWENTY-FOUR",
direction="l",
linebreak="id",
- specials={ "compat", 0x0032, 0x0034, 0x70B9 },
+ specials={ "compat", 0x32, 0x34, 0x70B9 },
unicodeslot=0x3370,
},
[0x3371]={
@@ -95303,7 +98136,7 @@ characters.data={
description="SQUARE HPA",
direction="l",
linebreak="id",
- specials={ "square", 0x0068, 0x0050, 0x0061 },
+ specials={ "square", 0x68, 0x50, 0x61 },
unicodeslot=0x3371,
},
[0x3372]={
@@ -95312,7 +98145,7 @@ characters.data={
description="SQUARE DA",
direction="l",
linebreak="id",
- specials={ "square", 0x0064, 0x0061 },
+ specials={ "square", 0x64, 0x61 },
unicodeslot=0x3372,
},
[0x3373]={
@@ -95321,7 +98154,7 @@ characters.data={
description="SQUARE AU",
direction="l",
linebreak="id",
- specials={ "square", 0x0041, 0x0055 },
+ specials={ "square", 0x41, 0x55 },
unicodeslot=0x3373,
},
[0x3374]={
@@ -95330,7 +98163,7 @@ characters.data={
description="SQUARE BAR",
direction="l",
linebreak="id",
- specials={ "square", 0x0062, 0x0061, 0x0072 },
+ specials={ "square", 0x62, 0x61, 0x72 },
unicodeslot=0x3374,
},
[0x3375]={
@@ -95339,7 +98172,7 @@ characters.data={
description="SQUARE OV",
direction="l",
linebreak="id",
- specials={ "square", 0x006F, 0x0056 },
+ specials={ "square", 0x6F, 0x56 },
unicodeslot=0x3375,
},
[0x3376]={
@@ -95348,7 +98181,7 @@ characters.data={
description="SQUARE PC",
direction="l",
linebreak="id",
- specials={ "square", 0x0070, 0x0063 },
+ specials={ "square", 0x70, 0x63 },
unicodeslot=0x3376,
},
[0x3377]={
@@ -95357,7 +98190,7 @@ characters.data={
description="SQUARE DM",
direction="on",
linebreak="id",
- specials={ "square", 0x0064, 0x006D },
+ specials={ "square", 0x64, 0x6D },
unicodeslot=0x3377,
},
[0x3378]={
@@ -95366,7 +98199,7 @@ characters.data={
description="SQUARE DM SQUARED",
direction="on",
linebreak="id",
- specials={ "square", 0x0064, 0x006D, 0x00B2 },
+ specials={ "square", 0x64, 0x6D, 0xB2 },
unicodeslot=0x3378,
},
[0x3379]={
@@ -95375,7 +98208,7 @@ characters.data={
description="SQUARE DM CUBED",
direction="on",
linebreak="id",
- specials={ "square", 0x0064, 0x006D, 0x00B3 },
+ specials={ "square", 0x64, 0x6D, 0xB3 },
unicodeslot=0x3379,
},
[0x337A]={
@@ -95384,7 +98217,7 @@ characters.data={
description="SQUARE IU",
direction="on",
linebreak="id",
- specials={ "square", 0x0049, 0x0055 },
+ specials={ "square", 0x49, 0x55 },
unicodeslot=0x337A,
},
[0x337B]={
@@ -95444,7 +98277,7 @@ characters.data={
description="SQUARE PA AMPS",
direction="l",
linebreak="id",
- specials={ "square", 0x0070, 0x0041 },
+ specials={ "square", 0x70, 0x41 },
unicodeslot=0x3380,
},
[0x3381]={
@@ -95454,7 +98287,7 @@ characters.data={
description="SQUARE NA",
direction="l",
linebreak="id",
- specials={ "square", 0x006E, 0x0041 },
+ specials={ "square", 0x6E, 0x41 },
unicodeslot=0x3381,
},
[0x3382]={
@@ -95464,7 +98297,7 @@ characters.data={
description="SQUARE MU A",
direction="l",
linebreak="id",
- specials={ "square", 0x03BC, 0x0041 },
+ specials={ "square", 0x3BC, 0x41 },
unicodeslot=0x3382,
},
[0x3383]={
@@ -95474,7 +98307,7 @@ characters.data={
description="SQUARE MA",
direction="l",
linebreak="id",
- specials={ "square", 0x006D, 0x0041 },
+ specials={ "square", 0x6D, 0x41 },
unicodeslot=0x3383,
},
[0x3384]={
@@ -95484,7 +98317,7 @@ characters.data={
description="SQUARE KA",
direction="l",
linebreak="id",
- specials={ "square", 0x006B, 0x0041 },
+ specials={ "square", 0x6B, 0x41 },
unicodeslot=0x3384,
},
[0x3385]={
@@ -95494,7 +98327,7 @@ characters.data={
description="SQUARE KB",
direction="l",
linebreak="id",
- specials={ "square", 0x004B, 0x0042 },
+ specials={ "square", 0x4B, 0x42 },
unicodeslot=0x3385,
},
[0x3386]={
@@ -95504,7 +98337,7 @@ characters.data={
description="SQUARE MB",
direction="l",
linebreak="id",
- specials={ "square", 0x004D, 0x0042 },
+ specials={ "square", 0x4D, 0x42 },
unicodeslot=0x3386,
},
[0x3387]={
@@ -95514,7 +98347,7 @@ characters.data={
description="SQUARE GB",
direction="l",
linebreak="id",
- specials={ "square", 0x0047, 0x0042 },
+ specials={ "square", 0x47, 0x42 },
unicodeslot=0x3387,
},
[0x3388]={
@@ -95524,7 +98357,7 @@ characters.data={
description="SQUARE CAL",
direction="l",
linebreak="id",
- specials={ "square", 0x0063, 0x0061, 0x006C },
+ specials={ "square", 0x63, 0x61, 0x6C },
unicodeslot=0x3388,
},
[0x3389]={
@@ -95534,7 +98367,7 @@ characters.data={
description="SQUARE KCAL",
direction="l",
linebreak="id",
- specials={ "square", 0x006B, 0x0063, 0x0061, 0x006C },
+ specials={ "square", 0x6B, 0x63, 0x61, 0x6C },
unicodeslot=0x3389,
},
[0x338A]={
@@ -95544,7 +98377,7 @@ characters.data={
description="SQUARE PF",
direction="l",
linebreak="id",
- specials={ "square", 0x0070, 0x0046 },
+ specials={ "square", 0x70, 0x46 },
unicodeslot=0x338A,
},
[0x338B]={
@@ -95554,7 +98387,7 @@ characters.data={
description="SQUARE NF",
direction="l",
linebreak="id",
- specials={ "square", 0x006E, 0x0046 },
+ specials={ "square", 0x6E, 0x46 },
unicodeslot=0x338B,
},
[0x338C]={
@@ -95564,7 +98397,7 @@ characters.data={
description="SQUARE MU F",
direction="l",
linebreak="id",
- specials={ "square", 0x03BC, 0x0046 },
+ specials={ "square", 0x3BC, 0x46 },
unicodeslot=0x338C,
},
[0x338D]={
@@ -95574,7 +98407,7 @@ characters.data={
description="SQUARE MU G",
direction="l",
linebreak="id",
- specials={ "square", 0x03BC, 0x0067 },
+ specials={ "square", 0x3BC, 0x67 },
unicodeslot=0x338D,
},
[0x338E]={
@@ -95584,7 +98417,7 @@ characters.data={
description="SQUARE MG",
direction="l",
linebreak="id",
- specials={ "square", 0x006D, 0x0067 },
+ specials={ "square", 0x6D, 0x67 },
unicodeslot=0x338E,
},
[0x338F]={
@@ -95594,7 +98427,7 @@ characters.data={
description="SQUARE KG",
direction="l",
linebreak="id",
- specials={ "square", 0x006B, 0x0067 },
+ specials={ "square", 0x6B, 0x67 },
unicodeslot=0x338F,
},
[0x3390]={
@@ -95604,7 +98437,7 @@ characters.data={
description="SQUARE HZ",
direction="l",
linebreak="id",
- specials={ "square", 0x0048, 0x007A },
+ specials={ "square", 0x48, 0x7A },
unicodeslot=0x3390,
},
[0x3391]={
@@ -95614,7 +98447,7 @@ characters.data={
description="SQUARE KHZ",
direction="l",
linebreak="id",
- specials={ "square", 0x006B, 0x0048, 0x007A },
+ specials={ "square", 0x6B, 0x48, 0x7A },
unicodeslot=0x3391,
},
[0x3392]={
@@ -95624,7 +98457,7 @@ characters.data={
description="SQUARE MHZ",
direction="l",
linebreak="id",
- specials={ "square", 0x004D, 0x0048, 0x007A },
+ specials={ "square", 0x4D, 0x48, 0x7A },
unicodeslot=0x3392,
},
[0x3393]={
@@ -95634,7 +98467,7 @@ characters.data={
description="SQUARE GHZ",
direction="l",
linebreak="id",
- specials={ "square", 0x0047, 0x0048, 0x007A },
+ specials={ "square", 0x47, 0x48, 0x7A },
unicodeslot=0x3393,
},
[0x3394]={
@@ -95644,7 +98477,7 @@ characters.data={
description="SQUARE THZ",
direction="l",
linebreak="id",
- specials={ "square", 0x0054, 0x0048, 0x007A },
+ specials={ "square", 0x54, 0x48, 0x7A },
unicodeslot=0x3394,
},
[0x3395]={
@@ -95654,7 +98487,7 @@ characters.data={
description="SQUARE MU L",
direction="l",
linebreak="id",
- specials={ "square", 0x03BC, 0x2113 },
+ specials={ "square", 0x3BC, 0x2113 },
unicodeslot=0x3395,
},
[0x3396]={
@@ -95664,7 +98497,7 @@ characters.data={
description="SQUARE ML",
direction="l",
linebreak="id",
- specials={ "square", 0x006D, 0x2113 },
+ specials={ "square", 0x6D, 0x2113 },
unicodeslot=0x3396,
},
[0x3397]={
@@ -95674,7 +98507,7 @@ characters.data={
description="SQUARE DL",
direction="l",
linebreak="id",
- specials={ "square", 0x0064, 0x2113 },
+ specials={ "square", 0x64, 0x2113 },
unicodeslot=0x3397,
},
[0x3398]={
@@ -95684,7 +98517,7 @@ characters.data={
description="SQUARE KL",
direction="l",
linebreak="id",
- specials={ "square", 0x006B, 0x2113 },
+ specials={ "square", 0x6B, 0x2113 },
unicodeslot=0x3398,
},
[0x3399]={
@@ -95694,7 +98527,7 @@ characters.data={
description="SQUARE FM",
direction="l",
linebreak="id",
- specials={ "square", 0x0066, 0x006D },
+ specials={ "square", 0x66, 0x6D },
unicodeslot=0x3399,
},
[0x339A]={
@@ -95704,7 +98537,7 @@ characters.data={
description="SQUARE NM",
direction="l",
linebreak="id",
- specials={ "square", 0x006E, 0x006D },
+ specials={ "square", 0x6E, 0x6D },
unicodeslot=0x339A,
},
[0x339B]={
@@ -95714,7 +98547,7 @@ characters.data={
description="SQUARE MU M",
direction="l",
linebreak="id",
- specials={ "square", 0x03BC, 0x006D },
+ specials={ "square", 0x3BC, 0x6D },
unicodeslot=0x339B,
},
[0x339C]={
@@ -95724,7 +98557,7 @@ characters.data={
description="SQUARE MM",
direction="l",
linebreak="id",
- specials={ "square", 0x006D, 0x006D },
+ specials={ "square", 0x6D, 0x6D },
unicodeslot=0x339C,
},
[0x339D]={
@@ -95734,7 +98567,7 @@ characters.data={
description="SQUARE CM",
direction="l",
linebreak="id",
- specials={ "square", 0x0063, 0x006D },
+ specials={ "square", 0x63, 0x6D },
unicodeslot=0x339D,
},
[0x339E]={
@@ -95744,7 +98577,7 @@ characters.data={
description="SQUARE KM",
direction="l",
linebreak="id",
- specials={ "square", 0x006B, 0x006D },
+ specials={ "square", 0x6B, 0x6D },
unicodeslot=0x339E,
},
[0x339F]={
@@ -95754,7 +98587,7 @@ characters.data={
description="SQUARE MM SQUARED",
direction="l",
linebreak="id",
- specials={ "square", 0x006D, 0x006D, 0x00B2 },
+ specials={ "square", 0x6D, 0x6D, 0xB2 },
unicodeslot=0x339F,
},
[0x33A0]={
@@ -95764,7 +98597,7 @@ characters.data={
description="SQUARE CM SQUARED",
direction="l",
linebreak="id",
- specials={ "square", 0x0063, 0x006D, 0x00B2 },
+ specials={ "square", 0x63, 0x6D, 0xB2 },
unicodeslot=0x33A0,
},
[0x33A1]={
@@ -95774,7 +98607,7 @@ characters.data={
description="SQUARE M SQUARED",
direction="l",
linebreak="id",
- specials={ "square", 0x006D, 0x00B2 },
+ specials={ "square", 0x6D, 0xB2 },
unicodeslot=0x33A1,
},
[0x33A2]={
@@ -95784,7 +98617,7 @@ characters.data={
description="SQUARE KM SQUARED",
direction="l",
linebreak="id",
- specials={ "square", 0x006B, 0x006D, 0x00B2 },
+ specials={ "square", 0x6B, 0x6D, 0xB2 },
unicodeslot=0x33A2,
},
[0x33A3]={
@@ -95794,7 +98627,7 @@ characters.data={
description="SQUARE MM CUBED",
direction="l",
linebreak="id",
- specials={ "square", 0x006D, 0x006D, 0x00B3 },
+ specials={ "square", 0x6D, 0x6D, 0xB3 },
unicodeslot=0x33A3,
},
[0x33A4]={
@@ -95804,7 +98637,7 @@ characters.data={
description="SQUARE CM CUBED",
direction="l",
linebreak="id",
- specials={ "square", 0x0063, 0x006D, 0x00B3 },
+ specials={ "square", 0x63, 0x6D, 0xB3 },
unicodeslot=0x33A4,
},
[0x33A5]={
@@ -95814,7 +98647,7 @@ characters.data={
description="SQUARE M CUBED",
direction="l",
linebreak="id",
- specials={ "square", 0x006D, 0x00B3 },
+ specials={ "square", 0x6D, 0xB3 },
unicodeslot=0x33A5,
},
[0x33A6]={
@@ -95824,7 +98657,7 @@ characters.data={
description="SQUARE KM CUBED",
direction="l",
linebreak="id",
- specials={ "square", 0x006B, 0x006D, 0x00B3 },
+ specials={ "square", 0x6B, 0x6D, 0xB3 },
unicodeslot=0x33A6,
},
[0x33A7]={
@@ -95834,7 +98667,7 @@ characters.data={
description="SQUARE M OVER S",
direction="l",
linebreak="id",
- specials={ "square", 0x006D, 0x2215, 0x0073 },
+ specials={ "square", 0x6D, 0x2215, 0x73 },
unicodeslot=0x33A7,
},
[0x33A8]={
@@ -95844,7 +98677,7 @@ characters.data={
description="SQUARE M OVER S SQUARED",
direction="l",
linebreak="id",
- specials={ "square", 0x006D, 0x2215, 0x0073, 0x00B2 },
+ specials={ "square", 0x6D, 0x2215, 0x73, 0xB2 },
unicodeslot=0x33A8,
},
[0x33A9]={
@@ -95854,7 +98687,7 @@ characters.data={
description="SQUARE PA",
direction="l",
linebreak="id",
- specials={ "square", 0x0050, 0x0061 },
+ specials={ "square", 0x50, 0x61 },
unicodeslot=0x33A9,
},
[0x33AA]={
@@ -95864,7 +98697,7 @@ characters.data={
description="SQUARE KPA",
direction="l",
linebreak="id",
- specials={ "square", 0x006B, 0x0050, 0x0061 },
+ specials={ "square", 0x6B, 0x50, 0x61 },
unicodeslot=0x33AA,
},
[0x33AB]={
@@ -95874,7 +98707,7 @@ characters.data={
description="SQUARE MPA",
direction="l",
linebreak="id",
- specials={ "square", 0x004D, 0x0050, 0x0061 },
+ specials={ "square", 0x4D, 0x50, 0x61 },
unicodeslot=0x33AB,
},
[0x33AC]={
@@ -95884,7 +98717,7 @@ characters.data={
description="SQUARE GPA",
direction="l",
linebreak="id",
- specials={ "square", 0x0047, 0x0050, 0x0061 },
+ specials={ "square", 0x47, 0x50, 0x61 },
unicodeslot=0x33AC,
},
[0x33AD]={
@@ -95894,7 +98727,7 @@ characters.data={
description="SQUARE RAD",
direction="l",
linebreak="id",
- specials={ "square", 0x0072, 0x0061, 0x0064 },
+ specials={ "square", 0x72, 0x61, 0x64 },
unicodeslot=0x33AD,
},
[0x33AE]={
@@ -95904,7 +98737,7 @@ characters.data={
description="SQUARE RAD OVER S",
direction="l",
linebreak="id",
- specials={ "square", 0x0072, 0x0061, 0x0064, 0x2215, 0x0073 },
+ specials={ "square", 0x72, 0x61, 0x64, 0x2215, 0x73 },
unicodeslot=0x33AE,
},
[0x33AF]={
@@ -95914,7 +98747,7 @@ characters.data={
description="SQUARE RAD OVER S SQUARED",
direction="l",
linebreak="id",
- specials={ "square", 0x0072, 0x0061, 0x0064, 0x2215, 0x0073, 0x00B2 },
+ specials={ "square", 0x72, 0x61, 0x64, 0x2215, 0x73, 0xB2 },
unicodeslot=0x33AF,
},
[0x33B0]={
@@ -95924,7 +98757,7 @@ characters.data={
description="SQUARE PS",
direction="l",
linebreak="id",
- specials={ "square", 0x0070, 0x0073 },
+ specials={ "square", 0x70, 0x73 },
unicodeslot=0x33B0,
},
[0x33B1]={
@@ -95934,7 +98767,7 @@ characters.data={
description="SQUARE NS",
direction="l",
linebreak="id",
- specials={ "square", 0x006E, 0x0073 },
+ specials={ "square", 0x6E, 0x73 },
unicodeslot=0x33B1,
},
[0x33B2]={
@@ -95944,7 +98777,7 @@ characters.data={
description="SQUARE MU S",
direction="l",
linebreak="id",
- specials={ "square", 0x03BC, 0x0073 },
+ specials={ "square", 0x3BC, 0x73 },
unicodeslot=0x33B2,
},
[0x33B3]={
@@ -95954,7 +98787,7 @@ characters.data={
description="SQUARE MS",
direction="l",
linebreak="id",
- specials={ "square", 0x006D, 0x0073 },
+ specials={ "square", 0x6D, 0x73 },
unicodeslot=0x33B3,
},
[0x33B4]={
@@ -95964,7 +98797,7 @@ characters.data={
description="SQUARE PV",
direction="l",
linebreak="id",
- specials={ "square", 0x0070, 0x0056 },
+ specials={ "square", 0x70, 0x56 },
unicodeslot=0x33B4,
},
[0x33B5]={
@@ -95974,7 +98807,7 @@ characters.data={
description="SQUARE NV",
direction="l",
linebreak="id",
- specials={ "square", 0x006E, 0x0056 },
+ specials={ "square", 0x6E, 0x56 },
unicodeslot=0x33B5,
},
[0x33B6]={
@@ -95984,7 +98817,7 @@ characters.data={
description="SQUARE MU V",
direction="l",
linebreak="id",
- specials={ "square", 0x03BC, 0x0056 },
+ specials={ "square", 0x3BC, 0x56 },
unicodeslot=0x33B6,
},
[0x33B7]={
@@ -95994,7 +98827,7 @@ characters.data={
description="SQUARE MV",
direction="l",
linebreak="id",
- specials={ "square", 0x006D, 0x0056 },
+ specials={ "square", 0x6D, 0x56 },
unicodeslot=0x33B7,
},
[0x33B8]={
@@ -96004,7 +98837,7 @@ characters.data={
description="SQUARE KV",
direction="l",
linebreak="id",
- specials={ "square", 0x006B, 0x0056 },
+ specials={ "square", 0x6B, 0x56 },
unicodeslot=0x33B8,
},
[0x33B9]={
@@ -96014,7 +98847,7 @@ characters.data={
description="SQUARE MV MEGA",
direction="l",
linebreak="id",
- specials={ "square", 0x004D, 0x0056 },
+ specials={ "square", 0x4D, 0x56 },
unicodeslot=0x33B9,
},
[0x33BA]={
@@ -96024,7 +98857,7 @@ characters.data={
description="SQUARE PW",
direction="l",
linebreak="id",
- specials={ "square", 0x0070, 0x0057 },
+ specials={ "square", 0x70, 0x57 },
unicodeslot=0x33BA,
},
[0x33BB]={
@@ -96034,7 +98867,7 @@ characters.data={
description="SQUARE NW",
direction="l",
linebreak="id",
- specials={ "square", 0x006E, 0x0057 },
+ specials={ "square", 0x6E, 0x57 },
unicodeslot=0x33BB,
},
[0x33BC]={
@@ -96044,7 +98877,7 @@ characters.data={
description="SQUARE MU W",
direction="l",
linebreak="id",
- specials={ "square", 0x03BC, 0x0057 },
+ specials={ "square", 0x3BC, 0x57 },
unicodeslot=0x33BC,
},
[0x33BD]={
@@ -96054,7 +98887,7 @@ characters.data={
description="SQUARE MW",
direction="l",
linebreak="id",
- specials={ "square", 0x006D, 0x0057 },
+ specials={ "square", 0x6D, 0x57 },
unicodeslot=0x33BD,
},
[0x33BE]={
@@ -96064,7 +98897,7 @@ characters.data={
description="SQUARE KW",
direction="l",
linebreak="id",
- specials={ "square", 0x006B, 0x0057 },
+ specials={ "square", 0x6B, 0x57 },
unicodeslot=0x33BE,
},
[0x33BF]={
@@ -96074,7 +98907,7 @@ characters.data={
description="SQUARE MW MEGA",
direction="l",
linebreak="id",
- specials={ "square", 0x004D, 0x0057 },
+ specials={ "square", 0x4D, 0x57 },
unicodeslot=0x33BF,
},
[0x33C0]={
@@ -96084,7 +98917,7 @@ characters.data={
description="SQUARE K OHM",
direction="l",
linebreak="id",
- specials={ "square", 0x006B, 0x03A9 },
+ specials={ "square", 0x6B, 0x3A9 },
unicodeslot=0x33C0,
},
[0x33C1]={
@@ -96094,7 +98927,7 @@ characters.data={
description="SQUARE M OHM",
direction="l",
linebreak="id",
- specials={ "square", 0x004D, 0x03A9 },
+ specials={ "square", 0x4D, 0x3A9 },
unicodeslot=0x33C1,
},
[0x33C2]={
@@ -96104,7 +98937,7 @@ characters.data={
description="SQUARE AM",
direction="l",
linebreak="id",
- specials={ "square", 0x0061, 0x002E, 0x006D, 0x002E },
+ specials={ "square", 0x61, 0x2E, 0x6D, 0x2E },
unicodeslot=0x33C2,
},
[0x33C3]={
@@ -96114,7 +98947,7 @@ characters.data={
description="SQUARE BQ",
direction="l",
linebreak="id",
- specials={ "square", 0x0042, 0x0071 },
+ specials={ "square", 0x42, 0x71 },
unicodeslot=0x33C3,
},
[0x33C4]={
@@ -96124,7 +98957,7 @@ characters.data={
description="SQUARE CC",
direction="l",
linebreak="id",
- specials={ "square", 0x0063, 0x0063 },
+ specials={ "square", 0x63, 0x63 },
unicodeslot=0x33C4,
},
[0x33C5]={
@@ -96134,7 +98967,7 @@ characters.data={
description="SQUARE CD",
direction="l",
linebreak="id",
- specials={ "square", 0x0063, 0x0064 },
+ specials={ "square", 0x63, 0x64 },
unicodeslot=0x33C5,
},
[0x33C6]={
@@ -96144,7 +98977,7 @@ characters.data={
description="SQUARE C OVER KG",
direction="l",
linebreak="id",
- specials={ "square", 0x0043, 0x2215, 0x006B, 0x0067 },
+ specials={ "square", 0x43, 0x2215, 0x6B, 0x67 },
unicodeslot=0x33C6,
},
[0x33C7]={
@@ -96154,7 +98987,7 @@ characters.data={
description="SQUARE CO",
direction="l",
linebreak="id",
- specials={ "square", 0x0043, 0x006F, 0x002E },
+ specials={ "square", 0x43, 0x6F, 0x2E },
unicodeslot=0x33C7,
},
[0x33C8]={
@@ -96164,7 +98997,7 @@ characters.data={
description="SQUARE DB",
direction="l",
linebreak="id",
- specials={ "square", 0x0064, 0x0042 },
+ specials={ "square", 0x64, 0x42 },
unicodeslot=0x33C8,
},
[0x33C9]={
@@ -96174,7 +99007,7 @@ characters.data={
description="SQUARE GY",
direction="l",
linebreak="id",
- specials={ "square", 0x0047, 0x0079 },
+ specials={ "square", 0x47, 0x79 },
unicodeslot=0x33C9,
},
[0x33CA]={
@@ -96184,7 +99017,7 @@ characters.data={
description="SQUARE HA",
direction="l",
linebreak="id",
- specials={ "square", 0x0068, 0x0061 },
+ specials={ "square", 0x68, 0x61 },
unicodeslot=0x33CA,
},
[0x33CB]={
@@ -96194,7 +99027,7 @@ characters.data={
description="SQUARE HP",
direction="l",
linebreak="id",
- specials={ "square", 0x0048, 0x0050 },
+ specials={ "square", 0x48, 0x50 },
unicodeslot=0x33CB,
},
[0x33CC]={
@@ -96203,7 +99036,7 @@ characters.data={
description="SQUARE IN",
direction="l",
linebreak="id",
- specials={ "square", 0x0069, 0x006E },
+ specials={ "square", 0x69, 0x6E },
unicodeslot=0x33CC,
},
[0x33CD]={
@@ -96213,7 +99046,7 @@ characters.data={
description="SQUARE KK",
direction="l",
linebreak="id",
- specials={ "square", 0x004B, 0x004B },
+ specials={ "square", 0x4B, 0x4B },
unicodeslot=0x33CD,
},
[0x33CE]={
@@ -96223,7 +99056,7 @@ characters.data={
description="SQUARE KM CAPITAL",
direction="l",
linebreak="id",
- specials={ "square", 0x004B, 0x004D },
+ specials={ "square", 0x4B, 0x4D },
unicodeslot=0x33CE,
},
[0x33CF]={
@@ -96233,7 +99066,7 @@ characters.data={
description="SQUARE KT",
direction="l",
linebreak="id",
- specials={ "square", 0x006B, 0x0074 },
+ specials={ "square", 0x6B, 0x74 },
unicodeslot=0x33CF,
},
[0x33D0]={
@@ -96243,7 +99076,7 @@ characters.data={
description="SQUARE LM",
direction="l",
linebreak="id",
- specials={ "square", 0x006C, 0x006D },
+ specials={ "square", 0x6C, 0x6D },
unicodeslot=0x33D0,
},
[0x33D1]={
@@ -96253,7 +99086,7 @@ characters.data={
description="SQUARE LN",
direction="l",
linebreak="id",
- specials={ "square", 0x006C, 0x006E },
+ specials={ "square", 0x6C, 0x6E },
unicodeslot=0x33D1,
},
[0x33D2]={
@@ -96263,7 +99096,7 @@ characters.data={
description="SQUARE LOG",
direction="l",
linebreak="id",
- specials={ "square", 0x006C, 0x006F, 0x0067 },
+ specials={ "square", 0x6C, 0x6F, 0x67 },
unicodeslot=0x33D2,
},
[0x33D3]={
@@ -96273,7 +99106,7 @@ characters.data={
description="SQUARE LX",
direction="l",
linebreak="id",
- specials={ "square", 0x006C, 0x0078 },
+ specials={ "square", 0x6C, 0x78 },
unicodeslot=0x33D3,
},
[0x33D4]={
@@ -96283,7 +99116,7 @@ characters.data={
description="SQUARE MB SMALL",
direction="l",
linebreak="id",
- specials={ "square", 0x006D, 0x0062 },
+ specials={ "square", 0x6D, 0x62 },
unicodeslot=0x33D4,
},
[0x33D5]={
@@ -96293,7 +99126,7 @@ characters.data={
description="SQUARE MIL",
direction="l",
linebreak="id",
- specials={ "square", 0x006D, 0x0069, 0x006C },
+ specials={ "square", 0x6D, 0x69, 0x6C },
unicodeslot=0x33D5,
},
[0x33D6]={
@@ -96303,7 +99136,7 @@ characters.data={
description="SQUARE MOL",
direction="l",
linebreak="id",
- specials={ "square", 0x006D, 0x006F, 0x006C },
+ specials={ "square", 0x6D, 0x6F, 0x6C },
unicodeslot=0x33D6,
},
[0x33D7]={
@@ -96312,7 +99145,7 @@ characters.data={
description="SQUARE PH",
direction="l",
linebreak="id",
- specials={ "square", 0x0050, 0x0048 },
+ specials={ "square", 0x50, 0x48 },
unicodeslot=0x33D7,
},
[0x33D8]={
@@ -96322,7 +99155,7 @@ characters.data={
description="SQUARE PM",
direction="l",
linebreak="id",
- specials={ "square", 0x0070, 0x002E, 0x006D, 0x002E },
+ specials={ "square", 0x70, 0x2E, 0x6D, 0x2E },
unicodeslot=0x33D8,
},
[0x33D9]={
@@ -96331,7 +99164,7 @@ characters.data={
description="SQUARE PPM",
direction="l",
linebreak="id",
- specials={ "square", 0x0050, 0x0050, 0x004D },
+ specials={ "square", 0x50, 0x50, 0x4D },
unicodeslot=0x33D9,
},
[0x33DA]={
@@ -96340,7 +99173,7 @@ characters.data={
description="SQUARE PR",
direction="l",
linebreak="id",
- specials={ "square", 0x0050, 0x0052 },
+ specials={ "square", 0x50, 0x52 },
unicodeslot=0x33DA,
},
[0x33DB]={
@@ -96350,7 +99183,7 @@ characters.data={
description="SQUARE SR",
direction="l",
linebreak="id",
- specials={ "square", 0x0073, 0x0072 },
+ specials={ "square", 0x73, 0x72 },
unicodeslot=0x33DB,
},
[0x33DC]={
@@ -96360,7 +99193,7 @@ characters.data={
description="SQUARE SV",
direction="l",
linebreak="id",
- specials={ "square", 0x0053, 0x0076 },
+ specials={ "square", 0x53, 0x76 },
unicodeslot=0x33DC,
},
[0x33DD]={
@@ -96370,7 +99203,7 @@ characters.data={
description="SQUARE WB",
direction="l",
linebreak="id",
- specials={ "square", 0x0057, 0x0062 },
+ specials={ "square", 0x57, 0x62 },
unicodeslot=0x33DD,
},
[0x33DE]={
@@ -96379,7 +99212,7 @@ characters.data={
description="SQUARE V OVER M",
direction="on",
linebreak="id",
- specials={ "square", 0x0056, 0x2215, 0x006D },
+ specials={ "square", 0x56, 0x2215, 0x6D },
unicodeslot=0x33DE,
},
[0x33DF]={
@@ -96388,7 +99221,7 @@ characters.data={
description="SQUARE A OVER M",
direction="on",
linebreak="id",
- specials={ "square", 0x0041, 0x2215, 0x006D },
+ specials={ "square", 0x41, 0x2215, 0x6D },
unicodeslot=0x33DF,
},
[0x33E0]={
@@ -96397,7 +99230,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY ONE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0031, 0x65E5 },
+ specials={ "compat", 0x31, 0x65E5 },
unicodeslot=0x33E0,
},
[0x33E1]={
@@ -96406,7 +99239,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWO",
direction="l",
linebreak="id",
- specials={ "compat", 0x0032, 0x65E5 },
+ specials={ "compat", 0x32, 0x65E5 },
unicodeslot=0x33E1,
},
[0x33E2]={
@@ -96415,7 +99248,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY THREE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0033, 0x65E5 },
+ specials={ "compat", 0x33, 0x65E5 },
unicodeslot=0x33E2,
},
[0x33E3]={
@@ -96424,7 +99257,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY FOUR",
direction="l",
linebreak="id",
- specials={ "compat", 0x0034, 0x65E5 },
+ specials={ "compat", 0x34, 0x65E5 },
unicodeslot=0x33E3,
},
[0x33E4]={
@@ -96433,7 +99266,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY FIVE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0035, 0x65E5 },
+ specials={ "compat", 0x35, 0x65E5 },
unicodeslot=0x33E4,
},
[0x33E5]={
@@ -96442,7 +99275,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY SIX",
direction="l",
linebreak="id",
- specials={ "compat", 0x0036, 0x65E5 },
+ specials={ "compat", 0x36, 0x65E5 },
unicodeslot=0x33E5,
},
[0x33E6]={
@@ -96451,7 +99284,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY SEVEN",
direction="l",
linebreak="id",
- specials={ "compat", 0x0037, 0x65E5 },
+ specials={ "compat", 0x37, 0x65E5 },
unicodeslot=0x33E6,
},
[0x33E7]={
@@ -96460,7 +99293,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY EIGHT",
direction="l",
linebreak="id",
- specials={ "compat", 0x0038, 0x65E5 },
+ specials={ "compat", 0x38, 0x65E5 },
unicodeslot=0x33E7,
},
[0x33E8]={
@@ -96469,7 +99302,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY NINE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0039, 0x65E5 },
+ specials={ "compat", 0x39, 0x65E5 },
unicodeslot=0x33E8,
},
[0x33E9]={
@@ -96478,7 +99311,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TEN",
direction="l",
linebreak="id",
- specials={ "compat", 0x0031, 0x0030, 0x65E5 },
+ specials={ "compat", 0x31, 0x30, 0x65E5 },
unicodeslot=0x33E9,
},
[0x33EA]={
@@ -96487,7 +99320,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY ELEVEN",
direction="l",
linebreak="id",
- specials={ "compat", 0x0031, 0x0031, 0x65E5 },
+ specials={ "compat", 0x31, 0x31, 0x65E5 },
unicodeslot=0x33EA,
},
[0x33EB]={
@@ -96496,7 +99329,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWELVE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0031, 0x0032, 0x65E5 },
+ specials={ "compat", 0x31, 0x32, 0x65E5 },
unicodeslot=0x33EB,
},
[0x33EC]={
@@ -96505,7 +99338,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY THIRTEEN",
direction="l",
linebreak="id",
- specials={ "compat", 0x0031, 0x0033, 0x65E5 },
+ specials={ "compat", 0x31, 0x33, 0x65E5 },
unicodeslot=0x33EC,
},
[0x33ED]={
@@ -96514,7 +99347,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY FOURTEEN",
direction="l",
linebreak="id",
- specials={ "compat", 0x0031, 0x0034, 0x65E5 },
+ specials={ "compat", 0x31, 0x34, 0x65E5 },
unicodeslot=0x33ED,
},
[0x33EE]={
@@ -96523,7 +99356,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY FIFTEEN",
direction="l",
linebreak="id",
- specials={ "compat", 0x0031, 0x0035, 0x65E5 },
+ specials={ "compat", 0x31, 0x35, 0x65E5 },
unicodeslot=0x33EE,
},
[0x33EF]={
@@ -96532,7 +99365,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY SIXTEEN",
direction="l",
linebreak="id",
- specials={ "compat", 0x0031, 0x0036, 0x65E5 },
+ specials={ "compat", 0x31, 0x36, 0x65E5 },
unicodeslot=0x33EF,
},
[0x33F0]={
@@ -96541,7 +99374,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY SEVENTEEN",
direction="l",
linebreak="id",
- specials={ "compat", 0x0031, 0x0037, 0x65E5 },
+ specials={ "compat", 0x31, 0x37, 0x65E5 },
unicodeslot=0x33F0,
},
[0x33F1]={
@@ -96550,7 +99383,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY EIGHTEEN",
direction="l",
linebreak="id",
- specials={ "compat", 0x0031, 0x0038, 0x65E5 },
+ specials={ "compat", 0x31, 0x38, 0x65E5 },
unicodeslot=0x33F1,
},
[0x33F2]={
@@ -96559,7 +99392,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY NINETEEN",
direction="l",
linebreak="id",
- specials={ "compat", 0x0031, 0x0039, 0x65E5 },
+ specials={ "compat", 0x31, 0x39, 0x65E5 },
unicodeslot=0x33F2,
},
[0x33F3]={
@@ -96568,7 +99401,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY",
direction="l",
linebreak="id",
- specials={ "compat", 0x0032, 0x0030, 0x65E5 },
+ specials={ "compat", 0x32, 0x30, 0x65E5 },
unicodeslot=0x33F3,
},
[0x33F4]={
@@ -96577,7 +99410,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-ONE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0032, 0x0031, 0x65E5 },
+ specials={ "compat", 0x32, 0x31, 0x65E5 },
unicodeslot=0x33F4,
},
[0x33F5]={
@@ -96586,7 +99419,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-TWO",
direction="l",
linebreak="id",
- specials={ "compat", 0x0032, 0x0032, 0x65E5 },
+ specials={ "compat", 0x32, 0x32, 0x65E5 },
unicodeslot=0x33F5,
},
[0x33F6]={
@@ -96595,7 +99428,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-THREE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0032, 0x0033, 0x65E5 },
+ specials={ "compat", 0x32, 0x33, 0x65E5 },
unicodeslot=0x33F6,
},
[0x33F7]={
@@ -96604,7 +99437,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-FOUR",
direction="l",
linebreak="id",
- specials={ "compat", 0x0032, 0x0034, 0x65E5 },
+ specials={ "compat", 0x32, 0x34, 0x65E5 },
unicodeslot=0x33F7,
},
[0x33F8]={
@@ -96613,7 +99446,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-FIVE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0032, 0x0035, 0x65E5 },
+ specials={ "compat", 0x32, 0x35, 0x65E5 },
unicodeslot=0x33F8,
},
[0x33F9]={
@@ -96622,7 +99455,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-SIX",
direction="l",
linebreak="id",
- specials={ "compat", 0x0032, 0x0036, 0x65E5 },
+ specials={ "compat", 0x32, 0x36, 0x65E5 },
unicodeslot=0x33F9,
},
[0x33FA]={
@@ -96631,7 +99464,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-SEVEN",
direction="l",
linebreak="id",
- specials={ "compat", 0x0032, 0x0037, 0x65E5 },
+ specials={ "compat", 0x32, 0x37, 0x65E5 },
unicodeslot=0x33FA,
},
[0x33FB]={
@@ -96640,7 +99473,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-EIGHT",
direction="l",
linebreak="id",
- specials={ "compat", 0x0032, 0x0038, 0x65E5 },
+ specials={ "compat", 0x32, 0x38, 0x65E5 },
unicodeslot=0x33FB,
},
[0x33FC]={
@@ -96649,7 +99482,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-NINE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0032, 0x0039, 0x65E5 },
+ specials={ "compat", 0x32, 0x39, 0x65E5 },
unicodeslot=0x33FC,
},
[0x33FD]={
@@ -96658,7 +99491,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY THIRTY",
direction="l",
linebreak="id",
- specials={ "compat", 0x0033, 0x0030, 0x65E5 },
+ specials={ "compat", 0x33, 0x30, 0x65E5 },
unicodeslot=0x33FD,
},
[0x33FE]={
@@ -96667,7 +99500,7 @@ characters.data={
description="IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY THIRTY-ONE",
direction="l",
linebreak="id",
- specials={ "compat", 0x0033, 0x0031, 0x65E5 },
+ specials={ "compat", 0x33, 0x31, 0x65E5 },
unicodeslot=0x33FE,
},
[0x33FF]={
@@ -96676,7 +99509,7 @@ characters.data={
description="SQUARE GAL",
direction="on",
linebreak="id",
- specials={ "square", 0x0067, 0x0061, 0x006C },
+ specials={ "square", 0x67, 0x61, 0x6C },
unicodeslot=0x33FF,
},
[0x4DC0]={
@@ -109654,6 +112487,7 @@ characters.data={
},
[0xA66F]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC VZMET",
direction="nsm",
linebreak="cm",
@@ -109689,6 +112523,7 @@ characters.data={
},
[0xA674]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER UKRAINIAN IE",
direction="nsm",
linebreak="cm",
@@ -109696,6 +112531,7 @@ characters.data={
},
[0xA675]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER I",
direction="nsm",
linebreak="cm",
@@ -109703,6 +112539,7 @@ characters.data={
},
[0xA676]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER YI",
direction="nsm",
linebreak="cm",
@@ -109710,6 +112547,7 @@ characters.data={
},
[0xA677]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER U",
direction="nsm",
linebreak="cm",
@@ -109717,6 +112555,7 @@ characters.data={
},
[0xA678]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER HARD SIGN",
direction="nsm",
linebreak="cm",
@@ -109724,6 +112563,7 @@ characters.data={
},
[0xA679]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER YERU",
direction="nsm",
linebreak="cm",
@@ -109731,6 +112571,7 @@ characters.data={
},
[0xA67A]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER SOFT SIGN",
direction="nsm",
linebreak="cm",
@@ -109738,6 +112579,7 @@ characters.data={
},
[0xA67B]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER OMEGA",
direction="nsm",
linebreak="cm",
@@ -109745,6 +112587,7 @@ characters.data={
},
[0xA67C]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC KAVYKA",
direction="nsm",
linebreak="cm",
@@ -109752,6 +112595,7 @@ characters.data={
},
[0xA67D]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC PAYEROK",
direction="nsm",
linebreak="cm",
@@ -109939,8 +112783,53 @@ characters.data={
linebreak="al",
unicodeslot=0xA697,
},
+ [0xA698]={
+ category="lu",
+ description="CYRILLIC CAPITAL LETTER DOUBLE O",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xA698,
+ },
+ [0xA699]={
+ category="ll",
+ description="CYRILLIC SMALL LETTER DOUBLE O",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xA699,
+ },
+ [0xA69A]={
+ category="lu",
+ description="CYRILLIC CAPITAL LETTER CROSSED O",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xA69A,
+ },
+ [0xA69B]={
+ category="ll",
+ description="CYRILLIC SMALL LETTER CROSSED O",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xA69B,
+ },
+ [0xA69C]={
+ category="lm",
+ description="MODIFIER LETTER CYRILLIC HARD SIGN",
+ direction="l",
+ linebreak="al",
+ specials={ "super", 0x44A },
+ unicodeslot=0xA69C,
+ },
+ [0xA69D]={
+ category="lm",
+ description="MODIFIER LETTER CYRILLIC SOFT SIGN",
+ direction="l",
+ linebreak="al",
+ specials={ "super", 0x44C },
+ unicodeslot=0xA69D,
+ },
[0xA69F]={
category="mn",
+ combining=0xE6,
description="COMBINING CYRILLIC LETTER IOTIFIED E",
direction="nsm",
linebreak="cm",
@@ -110508,6 +113397,7 @@ characters.data={
},
[0xA6F0]={
category="mn",
+ combining=0xE6,
description="BAMUM COMBINING MARK KOQNDON",
direction="nsm",
linebreak="cm",
@@ -110515,6 +113405,7 @@ characters.data={
},
[0xA6F1]={
category="mn",
+ combining=0xE6,
description="BAMUM COMBINING MARK TUKWENTIS",
direction="nsm",
linebreak="cm",
@@ -111592,6 +114483,90 @@ characters.data={
linebreak="al",
unicodeslot=0xA793,
},
+ [0xA794]={
+ category="ll",
+ description="LATIN SMALL LETTER C WITH PALATAL HOOK",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xA794,
+ },
+ [0xA795]={
+ category="ll",
+ description="LATIN SMALL LETTER H WITH PALATAL HOOK",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xA795,
+ },
+ [0xA796]={
+ category="lu",
+ description="LATIN CAPITAL LETTER B WITH FLOURISH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xA796,
+ },
+ [0xA797]={
+ category="ll",
+ description="LATIN SMALL LETTER B WITH FLOURISH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xA797,
+ },
+ [0xA798]={
+ category="lu",
+ description="LATIN CAPITAL LETTER F WITH STROKE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xA798,
+ },
+ [0xA799]={
+ category="ll",
+ description="LATIN SMALL LETTER F WITH STROKE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xA799,
+ },
+ [0xA79A]={
+ category="lu",
+ description="LATIN CAPITAL LETTER VOLAPUK AE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xA79A,
+ },
+ [0xA79B]={
+ category="ll",
+ description="LATIN SMALL LETTER VOLAPUK AE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xA79B,
+ },
+ [0xA79C]={
+ category="lu",
+ description="LATIN CAPITAL LETTER VOLAPUK OE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xA79C,
+ },
+ [0xA79D]={
+ category="ll",
+ description="LATIN SMALL LETTER VOLAPUK OE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xA79D,
+ },
+ [0xA79E]={
+ category="lu",
+ description="LATIN CAPITAL LETTER VOLAPUK UE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xA79E,
+ },
+ [0xA79F]={
+ category="ll",
+ description="LATIN SMALL LETTER VOLAPUK UE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xA79F,
+ },
[0xA7A0]={
category="lu",
description="LATIN CAPITAL LETTER G WITH OBLIQUE STROKE",
@@ -111669,12 +114644,54 @@ characters.data={
linebreak="al",
unicodeslot=0xA7AA,
},
+ [0xA7AB]={
+ category="lu",
+ description="LATIN CAPITAL LETTER REVERSED OPEN E",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xA7AB,
+ },
+ [0xA7AC]={
+ category="lu",
+ description="LATIN CAPITAL LETTER SCRIPT G",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xA7AC,
+ },
+ [0xA7AD]={
+ category="lu",
+ description="LATIN CAPITAL LETTER L WITH BELT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xA7AD,
+ },
+ [0xA7B0]={
+ category="lu",
+ description="LATIN CAPITAL LETTER TURNED K",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xA7B0,
+ },
+ [0xA7B1]={
+ category="lu",
+ description="LATIN CAPITAL LETTER TURNED T",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xA7B1,
+ },
+ [0xA7F7]={
+ category="lo",
+ description="LATIN EPIGRAPHIC LETTER SIDEWAYS I",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xA7F7,
+ },
[0xA7F8]={
category="lm",
description="MODIFIER LETTER CAPITAL H WITH STROKE",
direction="l",
linebreak="al",
- specials={ "super", 0x0126 },
+ specials={ "super", 0x126 },
unicodeslot=0xA7F8,
},
[0xA7F9]={
@@ -111682,7 +114699,7 @@ characters.data={
description="MODIFIER LETTER SMALL LIGATURE OE",
direction="l",
linebreak="al",
- specials={ "super", 0x0153 },
+ specials={ "super", 0x153 },
unicodeslot=0xA7F9,
},
[0xA7FA]={
@@ -111771,6 +114788,7 @@ characters.data={
},
[0xA806]={
category="mn",
+ combining=0x9,
description="SYLOTI NAGRI SIGN HASANTA",
direction="nsm",
linebreak="cm",
@@ -112106,6 +115124,7 @@ characters.data={
unicodeslot=0xA839,
},
[0xA840]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER KA",
direction="l",
@@ -112113,6 +115132,7 @@ characters.data={
unicodeslot=0xA840,
},
[0xA841]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER KHA",
direction="l",
@@ -112120,6 +115140,7 @@ characters.data={
unicodeslot=0xA841,
},
[0xA842]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER GA",
direction="l",
@@ -112127,6 +115148,7 @@ characters.data={
unicodeslot=0xA842,
},
[0xA843]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER NGA",
direction="l",
@@ -112134,6 +115156,7 @@ characters.data={
unicodeslot=0xA843,
},
[0xA844]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER CA",
direction="l",
@@ -112141,6 +115164,7 @@ characters.data={
unicodeslot=0xA844,
},
[0xA845]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER CHA",
direction="l",
@@ -112148,6 +115172,7 @@ characters.data={
unicodeslot=0xA845,
},
[0xA846]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER JA",
direction="l",
@@ -112155,6 +115180,7 @@ characters.data={
unicodeslot=0xA846,
},
[0xA847]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER NYA",
direction="l",
@@ -112162,6 +115188,7 @@ characters.data={
unicodeslot=0xA847,
},
[0xA848]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER TA",
direction="l",
@@ -112169,6 +115196,7 @@ characters.data={
unicodeslot=0xA848,
},
[0xA849]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER THA",
direction="l",
@@ -112176,6 +115204,7 @@ characters.data={
unicodeslot=0xA849,
},
[0xA84A]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER DA",
direction="l",
@@ -112183,6 +115212,7 @@ characters.data={
unicodeslot=0xA84A,
},
[0xA84B]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER NA",
direction="l",
@@ -112190,6 +115220,7 @@ characters.data={
unicodeslot=0xA84B,
},
[0xA84C]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER PA",
direction="l",
@@ -112197,6 +115228,7 @@ characters.data={
unicodeslot=0xA84C,
},
[0xA84D]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER PHA",
direction="l",
@@ -112204,6 +115236,7 @@ characters.data={
unicodeslot=0xA84D,
},
[0xA84E]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER BA",
direction="l",
@@ -112211,6 +115244,7 @@ characters.data={
unicodeslot=0xA84E,
},
[0xA84F]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER MA",
direction="l",
@@ -112218,6 +115252,7 @@ characters.data={
unicodeslot=0xA84F,
},
[0xA850]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER TSA",
direction="l",
@@ -112225,6 +115260,7 @@ characters.data={
unicodeslot=0xA850,
},
[0xA851]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER TSHA",
direction="l",
@@ -112232,6 +115268,7 @@ characters.data={
unicodeslot=0xA851,
},
[0xA852]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER DZA",
direction="l",
@@ -112239,6 +115276,7 @@ characters.data={
unicodeslot=0xA852,
},
[0xA853]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER WA",
direction="l",
@@ -112246,6 +115284,7 @@ characters.data={
unicodeslot=0xA853,
},
[0xA854]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER ZHA",
direction="l",
@@ -112253,6 +115292,7 @@ characters.data={
unicodeslot=0xA854,
},
[0xA855]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER ZA",
direction="l",
@@ -112260,6 +115300,7 @@ characters.data={
unicodeslot=0xA855,
},
[0xA856]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER SMALL A",
direction="l",
@@ -112270,6 +115311,7 @@ characters.data={
},
},
[0xA857]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER YA",
direction="l",
@@ -112277,6 +115319,7 @@ characters.data={
unicodeslot=0xA857,
},
[0xA858]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER RA",
direction="l",
@@ -112284,6 +115327,7 @@ characters.data={
unicodeslot=0xA858,
},
[0xA859]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER LA",
direction="l",
@@ -112291,6 +115335,7 @@ characters.data={
unicodeslot=0xA859,
},
[0xA85A]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER SHA",
direction="l",
@@ -112298,6 +115343,7 @@ characters.data={
unicodeslot=0xA85A,
},
[0xA85B]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER SA",
direction="l",
@@ -112305,6 +115351,7 @@ characters.data={
unicodeslot=0xA85B,
},
[0xA85C]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER HA",
direction="l",
@@ -112315,6 +115362,7 @@ characters.data={
},
},
[0xA85D]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER A",
direction="l",
@@ -112322,6 +115370,7 @@ characters.data={
unicodeslot=0xA85D,
},
[0xA85E]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER I",
direction="l",
@@ -112332,6 +115381,7 @@ characters.data={
},
},
[0xA85F]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER U",
direction="l",
@@ -112342,6 +115392,7 @@ characters.data={
},
},
[0xA860]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER E",
direction="l",
@@ -112352,6 +115403,7 @@ characters.data={
},
},
[0xA861]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER O",
direction="l",
@@ -112359,6 +115411,7 @@ characters.data={
unicodeslot=0xA861,
},
[0xA862]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER QA",
direction="l",
@@ -112366,6 +115419,7 @@ characters.data={
unicodeslot=0xA862,
},
[0xA863]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER XA",
direction="l",
@@ -112373,6 +115427,7 @@ characters.data={
unicodeslot=0xA863,
},
[0xA864]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER FA",
direction="l",
@@ -112380,6 +115435,7 @@ characters.data={
unicodeslot=0xA864,
},
[0xA865]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER GGA",
direction="l",
@@ -112387,6 +115443,7 @@ characters.data={
unicodeslot=0xA865,
},
[0xA866]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER EE",
direction="l",
@@ -112394,6 +115451,7 @@ characters.data={
unicodeslot=0xA866,
},
[0xA867]={
+ arabic="d",
category="lo",
description="PHAGS-PA SUBJOINED LETTER WA",
direction="l",
@@ -112401,6 +115459,7 @@ characters.data={
unicodeslot=0xA867,
},
[0xA868]={
+ arabic="d",
category="lo",
description="PHAGS-PA SUBJOINED LETTER YA",
direction="l",
@@ -112411,6 +115470,7 @@ characters.data={
},
},
[0xA869]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER TTA",
direction="l",
@@ -112418,6 +115478,7 @@ characters.data={
unicodeslot=0xA869,
},
[0xA86A]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER TTHA",
direction="l",
@@ -112425,6 +115486,7 @@ characters.data={
unicodeslot=0xA86A,
},
[0xA86B]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER DDA",
direction="l",
@@ -112432,6 +115494,7 @@ characters.data={
unicodeslot=0xA86B,
},
[0xA86C]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER NNA",
direction="l",
@@ -112439,6 +115502,7 @@ characters.data={
unicodeslot=0xA86C,
},
[0xA86D]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER ALTERNATE YA",
direction="l",
@@ -112446,6 +115510,7 @@ characters.data={
unicodeslot=0xA86D,
},
[0xA86E]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER VOICELESS SHA",
direction="l",
@@ -112453,6 +115518,7 @@ characters.data={
unicodeslot=0xA86E,
},
[0xA86F]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER VOICED HA",
direction="l",
@@ -112460,6 +115526,7 @@ characters.data={
unicodeslot=0xA86F,
},
[0xA870]={
+ arabic="d",
category="lo",
description="PHAGS-PA LETTER ASPIRATED FA",
direction="l",
@@ -112467,6 +115534,7 @@ characters.data={
unicodeslot=0xA870,
},
[0xA871]={
+ arabic="d",
category="lo",
description="PHAGS-PA SUBJOINED LETTER RA",
direction="l",
@@ -112474,6 +115542,7 @@ characters.data={
unicodeslot=0xA871,
},
[0xA872]={
+ arabic="l",
category="lo",
description="PHAGS-PA SUPERFIXED LETTER RA",
direction="l",
@@ -112481,6 +115550,7 @@ characters.data={
unicodeslot=0xA872,
},
[0xA873]={
+ arabic="u",
category="lo",
description="PHAGS-PA LETTER CANDRABINDU",
direction="l",
@@ -112993,6 +116063,7 @@ characters.data={
},
[0xA8C4]={
category="mn",
+ combining=0x9,
description="SAURASHTRA SIGN VIRAMA",
direction="nsm",
linebreak="cm",
@@ -113084,6 +116155,7 @@ characters.data={
},
[0xA8E0]={
category="mn",
+ combining=0xE6,
description="COMBINING DEVANAGARI DIGIT ZERO",
direction="nsm",
linebreak="cm",
@@ -113091,6 +116163,7 @@ characters.data={
},
[0xA8E1]={
category="mn",
+ combining=0xE6,
description="COMBINING DEVANAGARI DIGIT ONE",
direction="nsm",
linebreak="cm",
@@ -113098,6 +116171,7 @@ characters.data={
},
[0xA8E2]={
category="mn",
+ combining=0xE6,
description="COMBINING DEVANAGARI DIGIT TWO",
direction="nsm",
linebreak="cm",
@@ -113105,6 +116179,7 @@ characters.data={
},
[0xA8E3]={
category="mn",
+ combining=0xE6,
description="COMBINING DEVANAGARI DIGIT THREE",
direction="nsm",
linebreak="cm",
@@ -113112,6 +116187,7 @@ characters.data={
},
[0xA8E4]={
category="mn",
+ combining=0xE6,
description="COMBINING DEVANAGARI DIGIT FOUR",
direction="nsm",
linebreak="cm",
@@ -113119,6 +116195,7 @@ characters.data={
},
[0xA8E5]={
category="mn",
+ combining=0xE6,
description="COMBINING DEVANAGARI DIGIT FIVE",
direction="nsm",
linebreak="cm",
@@ -113126,6 +116203,7 @@ characters.data={
},
[0xA8E6]={
category="mn",
+ combining=0xE6,
description="COMBINING DEVANAGARI DIGIT SIX",
direction="nsm",
linebreak="cm",
@@ -113133,6 +116211,7 @@ characters.data={
},
[0xA8E7]={
category="mn",
+ combining=0xE6,
description="COMBINING DEVANAGARI DIGIT SEVEN",
direction="nsm",
linebreak="cm",
@@ -113140,6 +116219,7 @@ characters.data={
},
[0xA8E8]={
category="mn",
+ combining=0xE6,
description="COMBINING DEVANAGARI DIGIT EIGHT",
direction="nsm",
linebreak="cm",
@@ -113147,6 +116227,7 @@ characters.data={
},
[0xA8E9]={
category="mn",
+ combining=0xE6,
description="COMBINING DEVANAGARI DIGIT NINE",
direction="nsm",
linebreak="cm",
@@ -113154,6 +116235,7 @@ characters.data={
},
[0xA8EA]={
category="mn",
+ combining=0xE6,
description="COMBINING DEVANAGARI LETTER A",
direction="nsm",
linebreak="cm",
@@ -113161,6 +116243,7 @@ characters.data={
},
[0xA8EB]={
category="mn",
+ combining=0xE6,
description="COMBINING DEVANAGARI LETTER U",
direction="nsm",
linebreak="cm",
@@ -113168,6 +116251,7 @@ characters.data={
},
[0xA8EC]={
category="mn",
+ combining=0xE6,
description="COMBINING DEVANAGARI LETTER KA",
direction="nsm",
linebreak="cm",
@@ -113175,6 +116259,7 @@ characters.data={
},
[0xA8ED]={
category="mn",
+ combining=0xE6,
description="COMBINING DEVANAGARI LETTER NA",
direction="nsm",
linebreak="cm",
@@ -113182,6 +116267,7 @@ characters.data={
},
[0xA8EE]={
category="mn",
+ combining=0xE6,
description="COMBINING DEVANAGARI LETTER PA",
direction="nsm",
linebreak="cm",
@@ -113189,6 +116275,7 @@ characters.data={
},
[0xA8EF]={
category="mn",
+ combining=0xE6,
description="COMBINING DEVANAGARI LETTER RA",
direction="nsm",
linebreak="cm",
@@ -113196,6 +116283,7 @@ characters.data={
},
[0xA8F0]={
category="mn",
+ combining=0xE6,
description="COMBINING DEVANAGARI LETTER VI",
direction="nsm",
linebreak="cm",
@@ -113203,6 +116291,7 @@ characters.data={
},
[0xA8F1]={
category="mn",
+ combining=0xE6,
description="COMBINING DEVANAGARI SIGN AVAGRAHA",
direction="nsm",
linebreak="cm",
@@ -113581,6 +116670,7 @@ characters.data={
},
[0xA92B]={
category="mn",
+ combining=0xDC,
description="KAYAH LI TONE PLOPHU",
direction="nsm",
linebreak="cm",
@@ -113588,6 +116678,7 @@ characters.data={
},
[0xA92C]={
category="mn",
+ combining=0xDC,
description="KAYAH LI TONE CALYA",
direction="nsm",
linebreak="cm",
@@ -113595,6 +116686,7 @@ characters.data={
},
[0xA92D]={
category="mn",
+ combining=0xDC,
description="KAYAH LI TONE CALYA PLOPHU",
direction="nsm",
linebreak="cm",
@@ -113861,6 +116953,7 @@ characters.data={
},
[0xA953]={
category="mc",
+ combining=0x9,
description="REJANG VIRAMA",
direction="l",
linebreak="cm",
@@ -114464,6 +117557,7 @@ characters.data={
},
[0xA9B3]={
category="mn",
+ combining=0x7,
description="JAVANESE SIGN CECAK TELU",
direction="nsm",
linebreak="cm",
@@ -114555,6 +117649,7 @@ characters.data={
},
[0xA9C0]={
category="mc",
+ combining=0x9,
description="JAVANESE PANGKON",
direction="l",
linebreak="cm",
@@ -114742,6 +117837,223 @@ characters.data={
linebreak="al",
unicodeslot=0xA9DF,
},
+ [0xA9E0]={
+ category="lo",
+ description="MYANMAR LETTER SHAN GHA",
+ direction="l",
+ linebreak="sa",
+ unicodeslot=0xA9E0,
+ },
+ [0xA9E1]={
+ category="lo",
+ description="MYANMAR LETTER SHAN CHA",
+ direction="l",
+ linebreak="sa",
+ unicodeslot=0xA9E1,
+ },
+ [0xA9E2]={
+ category="lo",
+ description="MYANMAR LETTER SHAN JHA",
+ direction="l",
+ linebreak="sa",
+ unicodeslot=0xA9E2,
+ },
+ [0xA9E3]={
+ category="lo",
+ description="MYANMAR LETTER SHAN NNA",
+ direction="l",
+ linebreak="sa",
+ unicodeslot=0xA9E3,
+ },
+ [0xA9E4]={
+ category="lo",
+ description="MYANMAR LETTER SHAN BHA",
+ direction="l",
+ linebreak="sa",
+ unicodeslot=0xA9E4,
+ },
+ [0xA9E5]={
+ category="mn",
+ description="MYANMAR SIGN SHAN SAW",
+ direction="nsm",
+ linebreak="sa",
+ unicodeslot=0xA9E5,
+ },
+ [0xA9E6]={
+ category="lm",
+ description="MYANMAR MODIFIER LETTER SHAN REDUPLICATION",
+ direction="l",
+ linebreak="sa",
+ unicodeslot=0xA9E6,
+ },
+ [0xA9E7]={
+ category="lo",
+ description="MYANMAR LETTER TAI LAING NYA",
+ direction="l",
+ linebreak="sa",
+ unicodeslot=0xA9E7,
+ },
+ [0xA9E8]={
+ category="lo",
+ description="MYANMAR LETTER TAI LAING FA",
+ direction="l",
+ linebreak="sa",
+ unicodeslot=0xA9E8,
+ },
+ [0xA9E9]={
+ category="lo",
+ description="MYANMAR LETTER TAI LAING GA",
+ direction="l",
+ linebreak="sa",
+ unicodeslot=0xA9E9,
+ },
+ [0xA9EA]={
+ category="lo",
+ description="MYANMAR LETTER TAI LAING GHA",
+ direction="l",
+ linebreak="sa",
+ unicodeslot=0xA9EA,
+ },
+ [0xA9EB]={
+ category="lo",
+ description="MYANMAR LETTER TAI LAING JA",
+ direction="l",
+ linebreak="sa",
+ unicodeslot=0xA9EB,
+ },
+ [0xA9EC]={
+ category="lo",
+ description="MYANMAR LETTER TAI LAING JHA",
+ direction="l",
+ linebreak="sa",
+ unicodeslot=0xA9EC,
+ },
+ [0xA9ED]={
+ category="lo",
+ description="MYANMAR LETTER TAI LAING DDA",
+ direction="l",
+ linebreak="sa",
+ unicodeslot=0xA9ED,
+ },
+ [0xA9EE]={
+ category="lo",
+ description="MYANMAR LETTER TAI LAING DDHA",
+ direction="l",
+ linebreak="sa",
+ unicodeslot=0xA9EE,
+ },
+ [0xA9EF]={
+ category="lo",
+ description="MYANMAR LETTER TAI LAING NNA",
+ direction="l",
+ linebreak="sa",
+ unicodeslot=0xA9EF,
+ },
+ [0xA9F0]={
+ category="nd",
+ description="MYANMAR TAI LAING DIGIT ZERO",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0xA9F0,
+ },
+ [0xA9F1]={
+ category="nd",
+ description="MYANMAR TAI LAING DIGIT ONE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0xA9F1,
+ },
+ [0xA9F2]={
+ category="nd",
+ description="MYANMAR TAI LAING DIGIT TWO",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0xA9F2,
+ },
+ [0xA9F3]={
+ category="nd",
+ description="MYANMAR TAI LAING DIGIT THREE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0xA9F3,
+ },
+ [0xA9F4]={
+ category="nd",
+ description="MYANMAR TAI LAING DIGIT FOUR",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0xA9F4,
+ },
+ [0xA9F5]={
+ category="nd",
+ description="MYANMAR TAI LAING DIGIT FIVE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0xA9F5,
+ },
+ [0xA9F6]={
+ category="nd",
+ description="MYANMAR TAI LAING DIGIT SIX",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0xA9F6,
+ },
+ [0xA9F7]={
+ category="nd",
+ description="MYANMAR TAI LAING DIGIT SEVEN",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0xA9F7,
+ },
+ [0xA9F8]={
+ category="nd",
+ description="MYANMAR TAI LAING DIGIT EIGHT",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0xA9F8,
+ },
+ [0xA9F9]={
+ category="nd",
+ description="MYANMAR TAI LAING DIGIT NINE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0xA9F9,
+ },
+ [0xA9FA]={
+ category="lo",
+ description="MYANMAR LETTER TAI LAING LLA",
+ direction="l",
+ linebreak="sa",
+ unicodeslot=0xA9FA,
+ },
+ [0xA9FB]={
+ category="lo",
+ description="MYANMAR LETTER TAI LAING DA",
+ direction="l",
+ linebreak="sa",
+ unicodeslot=0xA9FB,
+ },
+ [0xA9FC]={
+ category="lo",
+ description="MYANMAR LETTER TAI LAING DHA",
+ direction="l",
+ linebreak="sa",
+ unicodeslot=0xA9FC,
+ },
+ [0xA9FD]={
+ category="lo",
+ description="MYANMAR LETTER TAI LAING BA",
+ direction="l",
+ linebreak="sa",
+ unicodeslot=0xA9FD,
+ },
+ [0xA9FE]={
+ category="lo",
+ description="MYANMAR LETTER TAI LAING BHA",
+ direction="l",
+ linebreak="sa",
+ unicodeslot=0xA9FE,
+ },
[0xAA00]={
category="lo",
description="CHAM LETTER A",
@@ -115519,6 +118831,34 @@ characters.data={
linebreak="sa",
unicodeslot=0xAA7B,
},
+ [0xAA7C]={
+ category="mn",
+ description="MYANMAR SIGN TAI LAING TONE-2",
+ direction="nsm",
+ linebreak="sa",
+ unicodeslot=0xAA7C,
+ },
+ [0xAA7D]={
+ category="mc",
+ description="MYANMAR SIGN TAI LAING TONE-5",
+ direction="l",
+ linebreak="sa",
+ unicodeslot=0xAA7D,
+ },
+ [0xAA7E]={
+ category="lo",
+ description="MYANMAR LETTER SHWE PALAUNG CHA",
+ direction="l",
+ linebreak="sa",
+ unicodeslot=0xAA7E,
+ },
+ [0xAA7F]={
+ category="lo",
+ description="MYANMAR LETTER SHWE PALAUNG SHA",
+ direction="l",
+ linebreak="sa",
+ unicodeslot=0xAA7F,
+ },
[0xAA80]={
category="lo",
description="TAI VIET LETTER LOW KO",
@@ -115857,6 +119197,7 @@ characters.data={
},
[0xAAB0]={
category="mn",
+ combining=0xE6,
description="TAI VIET MAI KANG",
direction="nsm",
linebreak="sa",
@@ -115871,6 +119212,7 @@ characters.data={
},
[0xAAB2]={
category="mn",
+ combining=0xE6,
description="TAI VIET VOWEL I",
direction="nsm",
linebreak="sa",
@@ -115878,6 +119220,7 @@ characters.data={
},
[0xAAB3]={
category="mn",
+ combining=0xE6,
description="TAI VIET VOWEL UE",
direction="nsm",
linebreak="sa",
@@ -115885,6 +119228,7 @@ characters.data={
},
[0xAAB4]={
category="mn",
+ combining=0xDC,
description="TAI VIET VOWEL U",
direction="nsm",
linebreak="sa",
@@ -115906,6 +119250,7 @@ characters.data={
},
[0xAAB7]={
category="mn",
+ combining=0xE6,
description="TAI VIET MAI KHIT",
direction="nsm",
linebreak="sa",
@@ -115913,6 +119258,7 @@ characters.data={
},
[0xAAB8]={
category="mn",
+ combining=0xE6,
description="TAI VIET VOWEL IA",
direction="nsm",
linebreak="sa",
@@ -115955,6 +119301,7 @@ characters.data={
},
[0xAABE]={
category="mn",
+ combining=0xE6,
description="TAI VIET VOWEL AM",
direction="nsm",
linebreak="sa",
@@ -115962,6 +119309,7 @@ characters.data={
},
[0xAABF]={
category="mn",
+ combining=0xE6,
description="TAI VIET TONE MAI EK",
direction="nsm",
linebreak="sa",
@@ -115976,6 +119324,7 @@ characters.data={
},
[0xAAC1]={
category="mn",
+ combining=0xE6,
description="TAI VIET TONE MAI THO",
direction="nsm",
linebreak="sa",
@@ -116179,6 +119528,7 @@ characters.data={
},
[0xAAF6]={
category="mn",
+ combining=0x9,
description="MEETEI MAYEK VIRAMA",
direction="nsm",
linebreak="cm",
@@ -116408,6 +119758,360 @@ characters.data={
linebreak="al",
unicodeslot=0xAB2E,
},
+ [0xAB30]={
+ category="ll",
+ description="LATIN SMALL LETTER BARRED ALPHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB30,
+ },
+ [0xAB31]={
+ category="ll",
+ description="LATIN SMALL LETTER A REVERSED-SCHWA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB31,
+ },
+ [0xAB32]={
+ category="ll",
+ description="LATIN SMALL LETTER BLACKLETTER E",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB32,
+ },
+ [0xAB33]={
+ category="ll",
+ description="LATIN SMALL LETTER BARRED E",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB33,
+ },
+ [0xAB34]={
+ category="ll",
+ description="LATIN SMALL LETTER E WITH FLOURISH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB34,
+ },
+ [0xAB35]={
+ category="ll",
+ description="LATIN SMALL LETTER LENIS F",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB35,
+ },
+ [0xAB36]={
+ category="ll",
+ description="LATIN SMALL LETTER SCRIPT G WITH CROSSED-TAIL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB36,
+ },
+ [0xAB37]={
+ category="ll",
+ description="LATIN SMALL LETTER L WITH INVERTED LAZY S",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB37,
+ },
+ [0xAB38]={
+ category="ll",
+ description="LATIN SMALL LETTER L WITH DOUBLE MIDDLE TILDE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB38,
+ },
+ [0xAB39]={
+ category="ll",
+ description="LATIN SMALL LETTER L WITH MIDDLE RING",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB39,
+ },
+ [0xAB3A]={
+ category="ll",
+ description="LATIN SMALL LETTER M WITH CROSSED-TAIL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB3A,
+ },
+ [0xAB3B]={
+ category="ll",
+ description="LATIN SMALL LETTER N WITH CROSSED-TAIL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB3B,
+ },
+ [0xAB3C]={
+ category="ll",
+ description="LATIN SMALL LETTER ENG WITH CROSSED-TAIL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB3C,
+ },
+ [0xAB3D]={
+ category="ll",
+ description="LATIN SMALL LETTER BLACKLETTER O",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB3D,
+ },
+ [0xAB3E]={
+ category="ll",
+ description="LATIN SMALL LETTER BLACKLETTER O WITH STROKE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB3E,
+ },
+ [0xAB3F]={
+ category="ll",
+ description="LATIN SMALL LETTER OPEN O WITH STROKE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB3F,
+ },
+ [0xAB40]={
+ category="ll",
+ description="LATIN SMALL LETTER INVERTED OE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB40,
+ },
+ [0xAB41]={
+ category="ll",
+ description="LATIN SMALL LETTER TURNED OE WITH STROKE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB41,
+ },
+ [0xAB42]={
+ category="ll",
+ description="LATIN SMALL LETTER TURNED OE WITH HORIZONTAL STROKE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB42,
+ },
+ [0xAB43]={
+ category="ll",
+ description="LATIN SMALL LETTER TURNED O OPEN-O",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB43,
+ },
+ [0xAB44]={
+ category="ll",
+ description="LATIN SMALL LETTER TURNED O OPEN-O WITH STROKE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB44,
+ },
+ [0xAB45]={
+ category="ll",
+ description="LATIN SMALL LETTER STIRRUP R",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB45,
+ },
+ [0xAB46]={
+ category="ll",
+ description="LATIN LETTER SMALL CAPITAL R WITH RIGHT LEG",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB46,
+ },
+ [0xAB47]={
+ category="ll",
+ description="LATIN SMALL LETTER R WITHOUT HANDLE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB47,
+ },
+ [0xAB48]={
+ category="ll",
+ description="LATIN SMALL LETTER DOUBLE R",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB48,
+ },
+ [0xAB49]={
+ category="ll",
+ description="LATIN SMALL LETTER R WITH CROSSED-TAIL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB49,
+ },
+ [0xAB4A]={
+ category="ll",
+ description="LATIN SMALL LETTER DOUBLE R WITH CROSSED-TAIL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB4A,
+ },
+ [0xAB4B]={
+ category="ll",
+ description="LATIN SMALL LETTER SCRIPT R",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB4B,
+ },
+ [0xAB4C]={
+ category="ll",
+ description="LATIN SMALL LETTER SCRIPT R WITH RING",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB4C,
+ },
+ [0xAB4D]={
+ category="ll",
+ description="LATIN SMALL LETTER BASELINE ESH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB4D,
+ },
+ [0xAB4E]={
+ category="ll",
+ description="LATIN SMALL LETTER U WITH SHORT RIGHT LEG",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB4E,
+ },
+ [0xAB4F]={
+ category="ll",
+ description="LATIN SMALL LETTER U BAR WITH SHORT RIGHT LEG",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB4F,
+ },
+ [0xAB50]={
+ category="ll",
+ description="LATIN SMALL LETTER UI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB50,
+ },
+ [0xAB51]={
+ category="ll",
+ description="LATIN SMALL LETTER TURNED UI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB51,
+ },
+ [0xAB52]={
+ category="ll",
+ description="LATIN SMALL LETTER U WITH LEFT HOOK",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB52,
+ },
+ [0xAB53]={
+ category="ll",
+ description="LATIN SMALL LETTER CHI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB53,
+ },
+ [0xAB54]={
+ category="ll",
+ description="LATIN SMALL LETTER CHI WITH LOW RIGHT RING",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB54,
+ },
+ [0xAB55]={
+ category="ll",
+ description="LATIN SMALL LETTER CHI WITH LOW LEFT SERIF",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB55,
+ },
+ [0xAB56]={
+ category="ll",
+ description="LATIN SMALL LETTER X WITH LOW RIGHT RING",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB56,
+ },
+ [0xAB57]={
+ category="ll",
+ description="LATIN SMALL LETTER X WITH LONG LEFT LEG",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB57,
+ },
+ [0xAB58]={
+ category="ll",
+ description="LATIN SMALL LETTER X WITH LONG LEFT LEG AND LOW RIGHT RING",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB58,
+ },
+ [0xAB59]={
+ category="ll",
+ description="LATIN SMALL LETTER X WITH LONG LEFT LEG WITH SERIF",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB59,
+ },
+ [0xAB5A]={
+ category="ll",
+ description="LATIN SMALL LETTER Y WITH SHORT RIGHT LEG",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB5A,
+ },
+ [0xAB5B]={
+ category="sk",
+ description="MODIFIER BREVE WITH INVERTED BREVE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB5B,
+ },
+ [0xAB5C]={
+ category="lm",
+ description="MODIFIER LETTER SMALL HENG",
+ direction="l",
+ linebreak="al",
+ specials={ "super", 0xA727 },
+ unicodeslot=0xAB5C,
+ },
+ [0xAB5D]={
+ category="lm",
+ description="MODIFIER LETTER SMALL L WITH INVERTED LAZY S",
+ direction="l",
+ linebreak="al",
+ specials={ "super", 0xAB37 },
+ unicodeslot=0xAB5D,
+ },
+ [0xAB5E]={
+ category="lm",
+ description="MODIFIER LETTER SMALL L WITH MIDDLE TILDE",
+ direction="l",
+ linebreak="al",
+ specials={ "super", 0x26B },
+ unicodeslot=0xAB5E,
+ },
+ [0xAB5F]={
+ category="lm",
+ description="MODIFIER LETTER SMALL U WITH LEFT HOOK",
+ direction="l",
+ linebreak="al",
+ specials={ "super", 0xAB52 },
+ unicodeslot=0xAB5F,
+ },
+ [0xAB64]={
+ category="ll",
+ description="LATIN SMALL LETTER INVERTED ALPHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB64,
+ },
+ [0xAB65]={
+ category="ll",
+ description="GREEK LETTER SMALL CAPITAL OMEGA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0xAB65,
+ },
[0xABC0]={
category="lo",
description="MEETEI MAYEK LETTER KOK",
@@ -116725,6 +120429,7 @@ characters.data={
},
[0xABED]={
category="mn",
+ combining=0x9,
description="MEETEI MAYEK APUN IYEK",
direction="nsm",
linebreak="cm",
@@ -121604,7 +125309,7 @@ characters.data={
description="LATIN SMALL LIGATURE FF",
direction="l",
linebreak="al",
- specials={ "compat", 0x0066, 0x0066 },
+ specials={ "compat", 0x66, 0x66 },
unicodeslot=0xFB00,
},
[0xFB01]={
@@ -121614,8 +125319,8 @@ characters.data={
description="LATIN SMALL LIGATURE FI",
direction="l",
linebreak="al",
- shcode={ 0x0066, 0x0069 },
- specials={ "compat", 0x0066, 0x0069 },
+ shcode={ 0x66, 0x69 },
+ specials={ "compat", 0x66, 0x69 },
unicodeslot=0xFB01,
},
[0xFB02]={
@@ -121625,8 +125330,8 @@ characters.data={
description="LATIN SMALL LIGATURE FL",
direction="l",
linebreak="al",
- shcode={ 0x0066, 0x006C },
- specials={ "compat", 0x0066, 0x006C },
+ shcode={ 0x66, 0x6C },
+ specials={ "compat", 0x66, 0x6C },
unicodeslot=0xFB02,
},
[0xFB03]={
@@ -121636,8 +125341,8 @@ characters.data={
description="LATIN SMALL LIGATURE FFI",
direction="l",
linebreak="al",
- shcode={ 0x0066, 0x0069 },
- specials={ "compat", 0x0066, 0x0066, 0x0069 },
+ shcode={ 0x66, 0x69 },
+ specials={ "compat", 0x66, 0x66, 0x69 },
unicodeslot=0xFB03,
},
[0xFB04]={
@@ -121647,8 +125352,8 @@ characters.data={
description="LATIN SMALL LIGATURE FFL",
direction="l",
linebreak="al",
- shcode={ 0x0066, 0x006C },
- specials={ "compat", 0x0066, 0x0066, 0x006C },
+ shcode={ 0x66, 0x6C },
+ specials={ "compat", 0x66, 0x66, 0x6C },
unicodeslot=0xFB04,
},
[0xFB05]={
@@ -121656,7 +125361,7 @@ characters.data={
description="LATIN SMALL LIGATURE LONG S T",
direction="l",
linebreak="al",
- specials={ "compat", 0x017F, 0x0074 },
+ specials={ "compat", 0x17F, 0x74 },
unicodeslot=0xFB05,
},
[0xFB06]={
@@ -121665,8 +125370,8 @@ characters.data={
description="LATIN SMALL LIGATURE ST",
direction="l",
linebreak="al",
- shcode={ 0x0073, 0x0074 },
- specials={ "compat", 0x0073, 0x0074 },
+ shcode={ 0x73, 0x74 },
+ specials={ "compat", 0x73, 0x74 },
unicodeslot=0xFB06,
},
[0xFB13]={
@@ -121674,7 +125379,7 @@ characters.data={
description="ARMENIAN SMALL LIGATURE MEN NOW",
direction="l",
linebreak="al",
- specials={ "compat", 0x0574, 0x0576 },
+ specials={ "compat", 0x574, 0x576 },
unicodeslot=0xFB13,
},
[0xFB14]={
@@ -121682,7 +125387,7 @@ characters.data={
description="ARMENIAN SMALL LIGATURE MEN ECH",
direction="l",
linebreak="al",
- specials={ "compat", 0x0574, 0x0565 },
+ specials={ "compat", 0x574, 0x565 },
unicodeslot=0xFB14,
},
[0xFB15]={
@@ -121690,7 +125395,7 @@ characters.data={
description="ARMENIAN SMALL LIGATURE MEN INI",
direction="l",
linebreak="al",
- specials={ "compat", 0x0574, 0x056B },
+ specials={ "compat", 0x574, 0x56B },
unicodeslot=0xFB15,
},
[0xFB16]={
@@ -121698,7 +125403,7 @@ characters.data={
description="ARMENIAN SMALL LIGATURE VEW NOW",
direction="l",
linebreak="al",
- specials={ "compat", 0x057E, 0x0576 },
+ specials={ "compat", 0x57E, 0x576 },
unicodeslot=0xFB16,
},
[0xFB17]={
@@ -121706,7 +125411,7 @@ characters.data={
description="ARMENIAN SMALL LIGATURE MEN XEH",
direction="l",
linebreak="al",
- specials={ "compat", 0x0574, 0x056D },
+ specials={ "compat", 0x574, 0x56D },
unicodeslot=0xFB17,
},
[0xFB1D]={
@@ -121714,12 +125419,13 @@ characters.data={
description="HEBREW LETTER YOD WITH HIRIQ",
direction="r",
linebreak="hl",
- shcode=0x05D9,
- specials={ "char", 0x05D9, 0x05B4 },
+ shcode=0x5D9,
+ specials={ "char", 0x5D9, 0x5B4 },
unicodeslot=0xFB1D,
},
[0xFB1E]={
category="mn",
+ combining=0x1A,
description="HEBREW POINT JUDEO-SPANISH VARIKA",
direction="nsm",
linebreak="cm",
@@ -121731,7 +125437,7 @@ characters.data={
description="HEBREW LIGATURE YIDDISH YOD YOD PATAH",
direction="r",
linebreak="hl",
- specials={ "char", 0x05F2, 0x05B7 },
+ specials={ "char", 0x5F2, 0x5B7 },
unicodeslot=0xFB1F,
},
[0xFB20]={
@@ -121740,7 +125446,7 @@ characters.data={
description="HEBREW LETTER ALTERNATIVE AYIN",
direction="r",
linebreak="hl",
- specials={ "font", 0x05E2 },
+ specials={ "font", 0x5E2 },
unicodeslot=0xFB20,
},
[0xFB21]={
@@ -121748,7 +125454,7 @@ characters.data={
description="HEBREW LETTER WIDE ALEF",
direction="r",
linebreak="hl",
- specials={ "font", 0x05D0 },
+ specials={ "font", 0x5D0 },
unicodeslot=0xFB21,
},
[0xFB22]={
@@ -121756,7 +125462,7 @@ characters.data={
description="HEBREW LETTER WIDE DALET",
direction="r",
linebreak="hl",
- specials={ "font", 0x05D3 },
+ specials={ "font", 0x5D3 },
unicodeslot=0xFB22,
},
[0xFB23]={
@@ -121764,7 +125470,7 @@ characters.data={
description="HEBREW LETTER WIDE HE",
direction="r",
linebreak="hl",
- specials={ "font", 0x05D4 },
+ specials={ "font", 0x5D4 },
unicodeslot=0xFB23,
},
[0xFB24]={
@@ -121772,7 +125478,7 @@ characters.data={
description="HEBREW LETTER WIDE KAF",
direction="r",
linebreak="hl",
- specials={ "font", 0x05DB },
+ specials={ "font", 0x5DB },
unicodeslot=0xFB24,
},
[0xFB25]={
@@ -121780,7 +125486,7 @@ characters.data={
description="HEBREW LETTER WIDE LAMED",
direction="r",
linebreak="hl",
- specials={ "font", 0x05DC },
+ specials={ "font", 0x5DC },
unicodeslot=0xFB25,
},
[0xFB26]={
@@ -121788,7 +125494,7 @@ characters.data={
description="HEBREW LETTER WIDE FINAL MEM",
direction="r",
linebreak="hl",
- specials={ "font", 0x05DD },
+ specials={ "font", 0x5DD },
unicodeslot=0xFB26,
},
[0xFB27]={
@@ -121796,7 +125502,7 @@ characters.data={
description="HEBREW LETTER WIDE RESH",
direction="r",
linebreak="hl",
- specials={ "font", 0x05E8 },
+ specials={ "font", 0x5E8 },
unicodeslot=0xFB27,
},
[0xFB28]={
@@ -121804,7 +125510,7 @@ characters.data={
description="HEBREW LETTER WIDE TAV",
direction="r",
linebreak="hl",
- specials={ "font", 0x05EA },
+ specials={ "font", 0x5EA },
unicodeslot=0xFB28,
},
[0xFB29]={
@@ -121812,7 +125518,7 @@ characters.data={
description="HEBREW LETTER ALTERNATIVE PLUS SIGN",
direction="es",
linebreak="al",
- specials={ "font", 0x002B },
+ specials={ "font", 0x2B },
unicodeslot=0xFB29,
},
[0xFB2A]={
@@ -121821,8 +125527,8 @@ characters.data={
description="HEBREW LETTER SHIN WITH SHIN DOT",
direction="r",
linebreak="hl",
- shcode=0x05E9,
- specials={ "char", 0x05E9, 0x05C1 },
+ shcode=0x5E9,
+ specials={ "char", 0x5E9, 0x5C1 },
unicodeslot=0xFB2A,
},
[0xFB2B]={
@@ -121831,8 +125537,8 @@ characters.data={
description="HEBREW LETTER SHIN WITH SIN DOT",
direction="r",
linebreak="hl",
- shcode=0x05E9,
- specials={ "char", 0x05E9, 0x05C2 },
+ shcode=0x5E9,
+ specials={ "char", 0x5E9, 0x5C2 },
unicodeslot=0xFB2B,
},
[0xFB2C]={
@@ -121841,8 +125547,8 @@ characters.data={
description="HEBREW LETTER SHIN WITH DAGESH AND SHIN DOT",
direction="r",
linebreak="hl",
- shcode=0x05E9,
- specials={ "char", 0xFB49, 0x05C1 },
+ shcode=0x5E9,
+ specials={ "char", 0xFB49, 0x5C1 },
unicodeslot=0xFB2C,
},
[0xFB2D]={
@@ -121851,8 +125557,8 @@ characters.data={
description="HEBREW LETTER SHIN WITH DAGESH AND SIN DOT",
direction="r",
linebreak="hl",
- shcode=0x05E9,
- specials={ "char", 0xFB49, 0x05C2 },
+ shcode=0x5E9,
+ specials={ "char", 0xFB49, 0x5C2 },
unicodeslot=0xFB2D,
},
[0xFB2E]={
@@ -121861,8 +125567,8 @@ characters.data={
description="HEBREW LETTER ALEF WITH PATAH",
direction="r",
linebreak="hl",
- shcode=0x05D0,
- specials={ "char", 0x05D0, 0x05B7 },
+ shcode=0x5D0,
+ specials={ "char", 0x5D0, 0x5B7 },
unicodeslot=0xFB2E,
},
[0xFB2F]={
@@ -121871,8 +125577,8 @@ characters.data={
description="HEBREW LETTER ALEF WITH QAMATS",
direction="r",
linebreak="hl",
- shcode=0x05D0,
- specials={ "char", 0x05D0, 0x05B8 },
+ shcode=0x5D0,
+ specials={ "char", 0x5D0, 0x5B8 },
unicodeslot=0xFB2F,
},
[0xFB30]={
@@ -121881,8 +125587,8 @@ characters.data={
description="HEBREW LETTER ALEF WITH MAPIQ",
direction="r",
linebreak="hl",
- shcode=0x05D0,
- specials={ "char", 0x05D0, 0x05BC },
+ shcode=0x5D0,
+ specials={ "char", 0x5D0, 0x5BC },
unicodeslot=0xFB30,
},
[0xFB31]={
@@ -121891,8 +125597,8 @@ characters.data={
description="HEBREW LETTER BET WITH DAGESH",
direction="r",
linebreak="hl",
- shcode=0x05D1,
- specials={ "char", 0x05D1, 0x05BC },
+ shcode=0x5D1,
+ specials={ "char", 0x5D1, 0x5BC },
unicodeslot=0xFB31,
},
[0xFB32]={
@@ -121901,8 +125607,8 @@ characters.data={
description="HEBREW LETTER GIMEL WITH DAGESH",
direction="r",
linebreak="hl",
- shcode=0x05D2,
- specials={ "char", 0x05D2, 0x05BC },
+ shcode=0x5D2,
+ specials={ "char", 0x5D2, 0x5BC },
unicodeslot=0xFB32,
},
[0xFB33]={
@@ -121911,8 +125617,8 @@ characters.data={
description="HEBREW LETTER DALET WITH DAGESH",
direction="r",
linebreak="hl",
- shcode=0x05D3,
- specials={ "char", 0x05D3, 0x05BC },
+ shcode=0x5D3,
+ specials={ "char", 0x5D3, 0x5BC },
unicodeslot=0xFB33,
},
[0xFB34]={
@@ -121921,8 +125627,8 @@ characters.data={
description="HEBREW LETTER HE WITH MAPIQ",
direction="r",
linebreak="hl",
- shcode=0x05D4,
- specials={ "char", 0x05D4, 0x05BC },
+ shcode=0x5D4,
+ specials={ "char", 0x5D4, 0x5BC },
unicodeslot=0xFB34,
},
[0xFB35]={
@@ -121931,8 +125637,8 @@ characters.data={
description="HEBREW LETTER VAV WITH DAGESH",
direction="r",
linebreak="hl",
- shcode=0x05D5,
- specials={ "char", 0x05D5, 0x05BC },
+ shcode=0x5D5,
+ specials={ "char", 0x5D5, 0x5BC },
unicodeslot=0xFB35,
},
[0xFB36]={
@@ -121941,8 +125647,8 @@ characters.data={
description="HEBREW LETTER ZAYIN WITH DAGESH",
direction="r",
linebreak="hl",
- shcode=0x05D6,
- specials={ "char", 0x05D6, 0x05BC },
+ shcode=0x5D6,
+ specials={ "char", 0x5D6, 0x5BC },
unicodeslot=0xFB36,
},
[0xFB38]={
@@ -121951,8 +125657,8 @@ characters.data={
description="HEBREW LETTER TET WITH DAGESH",
direction="r",
linebreak="hl",
- shcode=0x05D8,
- specials={ "char", 0x05D8, 0x05BC },
+ shcode=0x5D8,
+ specials={ "char", 0x5D8, 0x5BC },
unicodeslot=0xFB38,
},
[0xFB39]={
@@ -121961,8 +125667,8 @@ characters.data={
description="HEBREW LETTER YOD WITH DAGESH",
direction="r",
linebreak="hl",
- shcode=0x05D9,
- specials={ "char", 0x05D9, 0x05BC },
+ shcode=0x5D9,
+ specials={ "char", 0x5D9, 0x5BC },
unicodeslot=0xFB39,
},
[0xFB3A]={
@@ -121971,7 +125677,7 @@ characters.data={
description="HEBREW LETTER FINAL KAF WITH DAGESH",
direction="r",
linebreak="hl",
- specials={ "char", 0x05DA, 0x05BC },
+ specials={ "char", 0x5DA, 0x5BC },
unicodeslot=0xFB3A,
},
[0xFB3B]={
@@ -121980,8 +125686,8 @@ characters.data={
description="HEBREW LETTER KAF WITH DAGESH",
direction="r",
linebreak="hl",
- shcode=0x05DB,
- specials={ "char", 0x05DB, 0x05BC },
+ shcode=0x5DB,
+ specials={ "char", 0x5DB, 0x5BC },
unicodeslot=0xFB3B,
},
[0xFB3C]={
@@ -121990,8 +125696,8 @@ characters.data={
description="HEBREW LETTER LAMED WITH DAGESH",
direction="r",
linebreak="hl",
- shcode=0x05DC,
- specials={ "char", 0x05DC, 0x05BC },
+ shcode=0x5DC,
+ specials={ "char", 0x5DC, 0x5BC },
unicodeslot=0xFB3C,
},
[0xFB3E]={
@@ -122000,8 +125706,8 @@ characters.data={
description="HEBREW LETTER MEM WITH DAGESH",
direction="r",
linebreak="hl",
- shcode=0x05DE,
- specials={ "char", 0x05DE, 0x05BC },
+ shcode=0x5DE,
+ specials={ "char", 0x5DE, 0x5BC },
unicodeslot=0xFB3E,
},
[0xFB40]={
@@ -122010,8 +125716,8 @@ characters.data={
description="HEBREW LETTER NUN WITH DAGESH",
direction="r",
linebreak="hl",
- shcode=0x05E0,
- specials={ "char", 0x05E0, 0x05BC },
+ shcode=0x5E0,
+ specials={ "char", 0x5E0, 0x5BC },
unicodeslot=0xFB40,
},
[0xFB41]={
@@ -122020,8 +125726,8 @@ characters.data={
description="HEBREW LETTER SAMEKH WITH DAGESH",
direction="r",
linebreak="hl",
- shcode=0x05E1,
- specials={ "char", 0x05E1, 0x05BC },
+ shcode=0x5E1,
+ specials={ "char", 0x5E1, 0x5BC },
unicodeslot=0xFB41,
},
[0xFB43]={
@@ -122030,7 +125736,7 @@ characters.data={
description="HEBREW LETTER FINAL PE WITH DAGESH",
direction="r",
linebreak="hl",
- specials={ "char", 0x05E3, 0x05BC },
+ specials={ "char", 0x5E3, 0x5BC },
unicodeslot=0xFB43,
},
[0xFB44]={
@@ -122039,8 +125745,8 @@ characters.data={
description="HEBREW LETTER PE WITH DAGESH",
direction="r",
linebreak="hl",
- shcode=0x05E4,
- specials={ "char", 0x05E4, 0x05BC },
+ shcode=0x5E4,
+ specials={ "char", 0x5E4, 0x5BC },
unicodeslot=0xFB44,
},
[0xFB46]={
@@ -122049,8 +125755,8 @@ characters.data={
description="HEBREW LETTER TSADI WITH DAGESH",
direction="r",
linebreak="hl",
- shcode=0x05E6,
- specials={ "char", 0x05E6, 0x05BC },
+ shcode=0x5E6,
+ specials={ "char", 0x5E6, 0x5BC },
unicodeslot=0xFB46,
},
[0xFB47]={
@@ -122059,8 +125765,8 @@ characters.data={
description="HEBREW LETTER QOF WITH DAGESH",
direction="r",
linebreak="hl",
- shcode=0x05E7,
- specials={ "char", 0x05E7, 0x05BC },
+ shcode=0x5E7,
+ specials={ "char", 0x5E7, 0x5BC },
unicodeslot=0xFB47,
},
[0xFB48]={
@@ -122069,8 +125775,8 @@ characters.data={
description="HEBREW LETTER RESH WITH DAGESH",
direction="r",
linebreak="hl",
- shcode=0x05E8,
- specials={ "char", 0x05E8, 0x05BC },
+ shcode=0x5E8,
+ specials={ "char", 0x5E8, 0x5BC },
unicodeslot=0xFB48,
},
[0xFB49]={
@@ -122079,8 +125785,8 @@ characters.data={
description="HEBREW LETTER SHIN WITH DAGESH",
direction="r",
linebreak="hl",
- shcode=0x05E9,
- specials={ "char", 0x05E9, 0x05BC },
+ shcode=0x5E9,
+ specials={ "char", 0x5E9, 0x5BC },
unicodeslot=0xFB49,
},
[0xFB4A]={
@@ -122089,8 +125795,8 @@ characters.data={
description="HEBREW LETTER TAV WITH DAGESH",
direction="r",
linebreak="hl",
- shcode=0x05EA,
- specials={ "char", 0x05EA, 0x05BC },
+ shcode=0x5EA,
+ specials={ "char", 0x5EA, 0x5BC },
unicodeslot=0xFB4A,
},
[0xFB4B]={
@@ -122099,8 +125805,8 @@ characters.data={
description="HEBREW LETTER VAV WITH HOLAM",
direction="r",
linebreak="hl",
- shcode=0x05D5,
- specials={ "char", 0x05D5, 0x05B9 },
+ shcode=0x5D5,
+ specials={ "char", 0x5D5, 0x5B9 },
unicodeslot=0xFB4B,
},
[0xFB4C]={
@@ -122109,8 +125815,8 @@ characters.data={
description="HEBREW LETTER BET WITH RAFE",
direction="r",
linebreak="hl",
- shcode=0x05D1,
- specials={ "char", 0x05D1, 0x05BF },
+ shcode=0x5D1,
+ specials={ "char", 0x5D1, 0x5BF },
unicodeslot=0xFB4C,
},
[0xFB4D]={
@@ -122119,8 +125825,8 @@ characters.data={
description="HEBREW LETTER KAF WITH RAFE",
direction="r",
linebreak="hl",
- shcode=0x05DB,
- specials={ "char", 0x05DB, 0x05BF },
+ shcode=0x5DB,
+ specials={ "char", 0x5DB, 0x5BF },
unicodeslot=0xFB4D,
},
[0xFB4E]={
@@ -122129,8 +125835,8 @@ characters.data={
description="HEBREW LETTER PE WITH RAFE",
direction="r",
linebreak="hl",
- shcode=0x05E4,
- specials={ "char", 0x05E4, 0x05BF },
+ shcode=0x5E4,
+ specials={ "char", 0x5E4, 0x5BF },
unicodeslot=0xFB4E,
},
[0xFB4F]={
@@ -122139,7 +125845,7 @@ characters.data={
description="HEBREW LIGATURE ALEF LAMED",
direction="r",
linebreak="hl",
- specials={ "compat", 0x05D0, 0x05DC },
+ specials={ "compat", 0x5D0, 0x5DC },
unicodeslot=0xFB4F,
},
[0xFB50]={
@@ -122147,7 +125853,7 @@ characters.data={
description="ARABIC LETTER ALEF WASLA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0671 },
+ specials={ "isolated", 0x671 },
unicodeslot=0xFB50,
},
[0xFB51]={
@@ -122155,7 +125861,7 @@ characters.data={
description="ARABIC LETTER ALEF WASLA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0671 },
+ specials={ "final", 0x671 },
unicodeslot=0xFB51,
},
[0xFB52]={
@@ -122163,7 +125869,7 @@ characters.data={
description="ARABIC LETTER BEEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x067B },
+ specials={ "isolated", 0x67B },
unicodeslot=0xFB52,
},
[0xFB53]={
@@ -122171,7 +125877,7 @@ characters.data={
description="ARABIC LETTER BEEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x067B },
+ specials={ "final", 0x67B },
unicodeslot=0xFB53,
},
[0xFB54]={
@@ -122179,7 +125885,7 @@ characters.data={
description="ARABIC LETTER BEEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x067B },
+ specials={ "initial", 0x67B },
unicodeslot=0xFB54,
},
[0xFB55]={
@@ -122187,7 +125893,7 @@ characters.data={
description="ARABIC LETTER BEEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x067B },
+ specials={ "medial", 0x67B },
unicodeslot=0xFB55,
},
[0xFB56]={
@@ -122195,7 +125901,7 @@ characters.data={
description="ARABIC LETTER PEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x067E },
+ specials={ "isolated", 0x67E },
unicodeslot=0xFB56,
},
[0xFB57]={
@@ -122204,7 +125910,7 @@ characters.data={
description="ARABIC LETTER PEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x067E },
+ specials={ "final", 0x67E },
unicodeslot=0xFB57,
},
[0xFB58]={
@@ -122213,7 +125919,7 @@ characters.data={
description="ARABIC LETTER PEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x067E },
+ specials={ "initial", 0x67E },
unicodeslot=0xFB58,
},
[0xFB59]={
@@ -122222,7 +125928,7 @@ characters.data={
description="ARABIC LETTER PEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x067E },
+ specials={ "medial", 0x67E },
unicodeslot=0xFB59,
},
[0xFB5A]={
@@ -122230,7 +125936,7 @@ characters.data={
description="ARABIC LETTER BEHEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0680 },
+ specials={ "isolated", 0x680 },
unicodeslot=0xFB5A,
},
[0xFB5B]={
@@ -122238,7 +125944,7 @@ characters.data={
description="ARABIC LETTER BEHEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0680 },
+ specials={ "final", 0x680 },
unicodeslot=0xFB5B,
},
[0xFB5C]={
@@ -122246,7 +125952,7 @@ characters.data={
description="ARABIC LETTER BEHEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0680 },
+ specials={ "initial", 0x680 },
unicodeslot=0xFB5C,
},
[0xFB5D]={
@@ -122254,7 +125960,7 @@ characters.data={
description="ARABIC LETTER BEHEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0680 },
+ specials={ "medial", 0x680 },
unicodeslot=0xFB5D,
},
[0xFB5E]={
@@ -122262,7 +125968,7 @@ characters.data={
description="ARABIC LETTER TTEHEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x067A },
+ specials={ "isolated", 0x67A },
unicodeslot=0xFB5E,
},
[0xFB5F]={
@@ -122270,7 +125976,7 @@ characters.data={
description="ARABIC LETTER TTEHEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x067A },
+ specials={ "final", 0x67A },
unicodeslot=0xFB5F,
},
[0xFB60]={
@@ -122278,7 +125984,7 @@ characters.data={
description="ARABIC LETTER TTEHEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x067A },
+ specials={ "initial", 0x67A },
unicodeslot=0xFB60,
},
[0xFB61]={
@@ -122286,7 +125992,7 @@ characters.data={
description="ARABIC LETTER TTEHEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x067A },
+ specials={ "medial", 0x67A },
unicodeslot=0xFB61,
},
[0xFB62]={
@@ -122294,7 +126000,7 @@ characters.data={
description="ARABIC LETTER TEHEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x067F },
+ specials={ "isolated", 0x67F },
unicodeslot=0xFB62,
},
[0xFB63]={
@@ -122302,7 +126008,7 @@ characters.data={
description="ARABIC LETTER TEHEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x067F },
+ specials={ "final", 0x67F },
unicodeslot=0xFB63,
},
[0xFB64]={
@@ -122310,7 +126016,7 @@ characters.data={
description="ARABIC LETTER TEHEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x067F },
+ specials={ "initial", 0x67F },
unicodeslot=0xFB64,
},
[0xFB65]={
@@ -122318,7 +126024,7 @@ characters.data={
description="ARABIC LETTER TEHEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x067F },
+ specials={ "medial", 0x67F },
unicodeslot=0xFB65,
},
[0xFB66]={
@@ -122326,7 +126032,7 @@ characters.data={
description="ARABIC LETTER TTEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0679 },
+ specials={ "isolated", 0x679 },
unicodeslot=0xFB66,
},
[0xFB67]={
@@ -122335,7 +126041,7 @@ characters.data={
description="ARABIC LETTER TTEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0679 },
+ specials={ "final", 0x679 },
unicodeslot=0xFB67,
},
[0xFB68]={
@@ -122344,7 +126050,7 @@ characters.data={
description="ARABIC LETTER TTEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0679 },
+ specials={ "initial", 0x679 },
unicodeslot=0xFB68,
},
[0xFB69]={
@@ -122353,7 +126059,7 @@ characters.data={
description="ARABIC LETTER TTEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0679 },
+ specials={ "medial", 0x679 },
unicodeslot=0xFB69,
},
[0xFB6A]={
@@ -122361,7 +126067,7 @@ characters.data={
description="ARABIC LETTER VEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x06A4 },
+ specials={ "isolated", 0x6A4 },
unicodeslot=0xFB6A,
},
[0xFB6B]={
@@ -122370,7 +126076,7 @@ characters.data={
description="ARABIC LETTER VEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x06A4 },
+ specials={ "final", 0x6A4 },
unicodeslot=0xFB6B,
},
[0xFB6C]={
@@ -122379,7 +126085,7 @@ characters.data={
description="ARABIC LETTER VEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x06A4 },
+ specials={ "initial", 0x6A4 },
unicodeslot=0xFB6C,
},
[0xFB6D]={
@@ -122388,7 +126094,7 @@ characters.data={
description="ARABIC LETTER VEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x06A4 },
+ specials={ "medial", 0x6A4 },
unicodeslot=0xFB6D,
},
[0xFB6E]={
@@ -122396,7 +126102,7 @@ characters.data={
description="ARABIC LETTER PEHEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x06A6 },
+ specials={ "isolated", 0x6A6 },
unicodeslot=0xFB6E,
},
[0xFB6F]={
@@ -122404,7 +126110,7 @@ characters.data={
description="ARABIC LETTER PEHEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x06A6 },
+ specials={ "final", 0x6A6 },
unicodeslot=0xFB6F,
},
[0xFB70]={
@@ -122412,7 +126118,7 @@ characters.data={
description="ARABIC LETTER PEHEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x06A6 },
+ specials={ "initial", 0x6A6 },
unicodeslot=0xFB70,
},
[0xFB71]={
@@ -122420,7 +126126,7 @@ characters.data={
description="ARABIC LETTER PEHEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x06A6 },
+ specials={ "medial", 0x6A6 },
unicodeslot=0xFB71,
},
[0xFB72]={
@@ -122428,7 +126134,7 @@ characters.data={
description="ARABIC LETTER DYEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0684 },
+ specials={ "isolated", 0x684 },
unicodeslot=0xFB72,
},
[0xFB73]={
@@ -122436,7 +126142,7 @@ characters.data={
description="ARABIC LETTER DYEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0684 },
+ specials={ "final", 0x684 },
unicodeslot=0xFB73,
},
[0xFB74]={
@@ -122444,7 +126150,7 @@ characters.data={
description="ARABIC LETTER DYEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0684 },
+ specials={ "initial", 0x684 },
unicodeslot=0xFB74,
},
[0xFB75]={
@@ -122452,7 +126158,7 @@ characters.data={
description="ARABIC LETTER DYEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0684 },
+ specials={ "medial", 0x684 },
unicodeslot=0xFB75,
},
[0xFB76]={
@@ -122460,7 +126166,7 @@ characters.data={
description="ARABIC LETTER NYEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0683 },
+ specials={ "isolated", 0x683 },
unicodeslot=0xFB76,
},
[0xFB77]={
@@ -122468,7 +126174,7 @@ characters.data={
description="ARABIC LETTER NYEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0683 },
+ specials={ "final", 0x683 },
unicodeslot=0xFB77,
},
[0xFB78]={
@@ -122476,7 +126182,7 @@ characters.data={
description="ARABIC LETTER NYEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0683 },
+ specials={ "initial", 0x683 },
unicodeslot=0xFB78,
},
[0xFB79]={
@@ -122484,7 +126190,7 @@ characters.data={
description="ARABIC LETTER NYEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0683 },
+ specials={ "medial", 0x683 },
unicodeslot=0xFB79,
},
[0xFB7A]={
@@ -122492,7 +126198,7 @@ characters.data={
description="ARABIC LETTER TCHEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0686 },
+ specials={ "isolated", 0x686 },
unicodeslot=0xFB7A,
},
[0xFB7B]={
@@ -122501,7 +126207,7 @@ characters.data={
description="ARABIC LETTER TCHEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0686 },
+ specials={ "final", 0x686 },
unicodeslot=0xFB7B,
},
[0xFB7C]={
@@ -122510,7 +126216,7 @@ characters.data={
description="ARABIC LETTER TCHEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0686 },
+ specials={ "initial", 0x686 },
unicodeslot=0xFB7C,
},
[0xFB7D]={
@@ -122519,7 +126225,7 @@ characters.data={
description="ARABIC LETTER TCHEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0686 },
+ specials={ "medial", 0x686 },
unicodeslot=0xFB7D,
},
[0xFB7E]={
@@ -122527,7 +126233,7 @@ characters.data={
description="ARABIC LETTER TCHEHEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0687 },
+ specials={ "isolated", 0x687 },
unicodeslot=0xFB7E,
},
[0xFB7F]={
@@ -122535,7 +126241,7 @@ characters.data={
description="ARABIC LETTER TCHEHEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0687 },
+ specials={ "final", 0x687 },
unicodeslot=0xFB7F,
},
[0xFB80]={
@@ -122543,7 +126249,7 @@ characters.data={
description="ARABIC LETTER TCHEHEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0687 },
+ specials={ "initial", 0x687 },
unicodeslot=0xFB80,
},
[0xFB81]={
@@ -122551,7 +126257,7 @@ characters.data={
description="ARABIC LETTER TCHEHEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0687 },
+ specials={ "medial", 0x687 },
unicodeslot=0xFB81,
},
[0xFB82]={
@@ -122559,7 +126265,7 @@ characters.data={
description="ARABIC LETTER DDAHAL ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x068D },
+ specials={ "isolated", 0x68D },
unicodeslot=0xFB82,
},
[0xFB83]={
@@ -122567,7 +126273,7 @@ characters.data={
description="ARABIC LETTER DDAHAL FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x068D },
+ specials={ "final", 0x68D },
unicodeslot=0xFB83,
},
[0xFB84]={
@@ -122575,7 +126281,7 @@ characters.data={
description="ARABIC LETTER DAHAL ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x068C },
+ specials={ "isolated", 0x68C },
unicodeslot=0xFB84,
},
[0xFB85]={
@@ -122583,7 +126289,7 @@ characters.data={
description="ARABIC LETTER DAHAL FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x068C },
+ specials={ "final", 0x68C },
unicodeslot=0xFB85,
},
[0xFB86]={
@@ -122591,7 +126297,7 @@ characters.data={
description="ARABIC LETTER DUL ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x068E },
+ specials={ "isolated", 0x68E },
unicodeslot=0xFB86,
},
[0xFB87]={
@@ -122599,7 +126305,7 @@ characters.data={
description="ARABIC LETTER DUL FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x068E },
+ specials={ "final", 0x68E },
unicodeslot=0xFB87,
},
[0xFB88]={
@@ -122607,7 +126313,7 @@ characters.data={
description="ARABIC LETTER DDAL ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0688 },
+ specials={ "isolated", 0x688 },
unicodeslot=0xFB88,
},
[0xFB89]={
@@ -122616,7 +126322,7 @@ characters.data={
description="ARABIC LETTER DDAL FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0688 },
+ specials={ "final", 0x688 },
unicodeslot=0xFB89,
},
[0xFB8A]={
@@ -122624,7 +126330,7 @@ characters.data={
description="ARABIC LETTER JEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0698 },
+ specials={ "isolated", 0x698 },
unicodeslot=0xFB8A,
},
[0xFB8B]={
@@ -122633,7 +126339,7 @@ characters.data={
description="ARABIC LETTER JEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0698 },
+ specials={ "final", 0x698 },
unicodeslot=0xFB8B,
},
[0xFB8C]={
@@ -122641,7 +126347,7 @@ characters.data={
description="ARABIC LETTER RREH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0691 },
+ specials={ "isolated", 0x691 },
unicodeslot=0xFB8C,
},
[0xFB8D]={
@@ -122650,7 +126356,7 @@ characters.data={
description="ARABIC LETTER RREH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0691 },
+ specials={ "final", 0x691 },
unicodeslot=0xFB8D,
},
[0xFB8E]={
@@ -122658,7 +126364,7 @@ characters.data={
description="ARABIC LETTER KEHEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x06A9 },
+ specials={ "isolated", 0x6A9 },
unicodeslot=0xFB8E,
},
[0xFB8F]={
@@ -122666,7 +126372,7 @@ characters.data={
description="ARABIC LETTER KEHEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x06A9 },
+ specials={ "final", 0x6A9 },
unicodeslot=0xFB8F,
},
[0xFB90]={
@@ -122674,7 +126380,7 @@ characters.data={
description="ARABIC LETTER KEHEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x06A9 },
+ specials={ "initial", 0x6A9 },
unicodeslot=0xFB90,
},
[0xFB91]={
@@ -122682,7 +126388,7 @@ characters.data={
description="ARABIC LETTER KEHEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x06A9 },
+ specials={ "medial", 0x6A9 },
unicodeslot=0xFB91,
},
[0xFB92]={
@@ -122690,7 +126396,7 @@ characters.data={
description="ARABIC LETTER GAF ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x06AF },
+ specials={ "isolated", 0x6AF },
unicodeslot=0xFB92,
},
[0xFB93]={
@@ -122699,7 +126405,7 @@ characters.data={
description="ARABIC LETTER GAF FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x06AF },
+ specials={ "final", 0x6AF },
unicodeslot=0xFB93,
},
[0xFB94]={
@@ -122708,7 +126414,7 @@ characters.data={
description="ARABIC LETTER GAF INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x06AF },
+ specials={ "initial", 0x6AF },
unicodeslot=0xFB94,
},
[0xFB95]={
@@ -122717,7 +126423,7 @@ characters.data={
description="ARABIC LETTER GAF MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x06AF },
+ specials={ "medial", 0x6AF },
unicodeslot=0xFB95,
},
[0xFB96]={
@@ -122725,7 +126431,7 @@ characters.data={
description="ARABIC LETTER GUEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x06B3 },
+ specials={ "isolated", 0x6B3 },
unicodeslot=0xFB96,
},
[0xFB97]={
@@ -122733,7 +126439,7 @@ characters.data={
description="ARABIC LETTER GUEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x06B3 },
+ specials={ "final", 0x6B3 },
unicodeslot=0xFB97,
},
[0xFB98]={
@@ -122741,7 +126447,7 @@ characters.data={
description="ARABIC LETTER GUEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x06B3 },
+ specials={ "initial", 0x6B3 },
unicodeslot=0xFB98,
},
[0xFB99]={
@@ -122749,7 +126455,7 @@ characters.data={
description="ARABIC LETTER GUEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x06B3 },
+ specials={ "medial", 0x6B3 },
unicodeslot=0xFB99,
},
[0xFB9A]={
@@ -122757,7 +126463,7 @@ characters.data={
description="ARABIC LETTER NGOEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x06B1 },
+ specials={ "isolated", 0x6B1 },
unicodeslot=0xFB9A,
},
[0xFB9B]={
@@ -122765,7 +126471,7 @@ characters.data={
description="ARABIC LETTER NGOEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x06B1 },
+ specials={ "final", 0x6B1 },
unicodeslot=0xFB9B,
},
[0xFB9C]={
@@ -122773,7 +126479,7 @@ characters.data={
description="ARABIC LETTER NGOEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x06B1 },
+ specials={ "initial", 0x6B1 },
unicodeslot=0xFB9C,
},
[0xFB9D]={
@@ -122781,7 +126487,7 @@ characters.data={
description="ARABIC LETTER NGOEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x06B1 },
+ specials={ "medial", 0x6B1 },
unicodeslot=0xFB9D,
},
[0xFB9E]={
@@ -122789,7 +126495,7 @@ characters.data={
description="ARABIC LETTER NOON GHUNNA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x06BA },
+ specials={ "isolated", 0x6BA },
unicodeslot=0xFB9E,
},
[0xFB9F]={
@@ -122798,7 +126504,7 @@ characters.data={
description="ARABIC LETTER NOON GHUNNA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x06BA },
+ specials={ "final", 0x6BA },
unicodeslot=0xFB9F,
},
[0xFBA0]={
@@ -122806,7 +126512,7 @@ characters.data={
description="ARABIC LETTER RNOON ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x06BB },
+ specials={ "isolated", 0x6BB },
unicodeslot=0xFBA0,
},
[0xFBA1]={
@@ -122814,7 +126520,7 @@ characters.data={
description="ARABIC LETTER RNOON FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x06BB },
+ specials={ "final", 0x6BB },
unicodeslot=0xFBA1,
},
[0xFBA2]={
@@ -122822,7 +126528,7 @@ characters.data={
description="ARABIC LETTER RNOON INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x06BB },
+ specials={ "initial", 0x6BB },
unicodeslot=0xFBA2,
},
[0xFBA3]={
@@ -122830,7 +126536,7 @@ characters.data={
description="ARABIC LETTER RNOON MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x06BB },
+ specials={ "medial", 0x6BB },
unicodeslot=0xFBA3,
},
[0xFBA4]={
@@ -122839,8 +126545,8 @@ characters.data={
description="ARABIC LETTER HEH WITH YEH ABOVE ISOLATED FORM",
direction="al",
linebreak="al",
- shcode=0x0647,
- specials={ "isolated", 0x06C0 },
+ shcode=0x647,
+ specials={ "isolated", 0x6C0 },
unicodeslot=0xFBA4,
},
[0xFBA5]={
@@ -122849,8 +126555,8 @@ characters.data={
description="ARABIC LETTER HEH WITH YEH ABOVE FINAL FORM",
direction="al",
linebreak="al",
- shcode=0x0647,
- specials={ "final", 0x06C0 },
+ shcode=0x647,
+ specials={ "final", 0x6C0 },
unicodeslot=0xFBA5,
},
[0xFBA6]={
@@ -122858,7 +126564,7 @@ characters.data={
description="ARABIC LETTER HEH GOAL ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x06C1 },
+ specials={ "isolated", 0x6C1 },
unicodeslot=0xFBA6,
},
[0xFBA7]={
@@ -122867,7 +126573,7 @@ characters.data={
description="ARABIC LETTER HEH GOAL FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x06C1 },
+ specials={ "final", 0x6C1 },
unicodeslot=0xFBA7,
},
[0xFBA8]={
@@ -122876,7 +126582,7 @@ characters.data={
description="ARABIC LETTER HEH GOAL INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x06C1 },
+ specials={ "initial", 0x6C1 },
unicodeslot=0xFBA8,
},
[0xFBA9]={
@@ -122885,7 +126591,7 @@ characters.data={
description="ARABIC LETTER HEH GOAL MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x06C1 },
+ specials={ "medial", 0x6C1 },
unicodeslot=0xFBA9,
},
[0xFBAA]={
@@ -122893,7 +126599,7 @@ characters.data={
description="ARABIC LETTER HEH DOACHASHMEE ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x06BE },
+ specials={ "isolated", 0x6BE },
unicodeslot=0xFBAA,
},
[0xFBAB]={
@@ -122901,7 +126607,7 @@ characters.data={
description="ARABIC LETTER HEH DOACHASHMEE FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x06BE },
+ specials={ "final", 0x6BE },
unicodeslot=0xFBAB,
},
[0xFBAC]={
@@ -122909,7 +126615,7 @@ characters.data={
description="ARABIC LETTER HEH DOACHASHMEE INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x06BE },
+ specials={ "initial", 0x6BE },
unicodeslot=0xFBAC,
},
[0xFBAD]={
@@ -122917,7 +126623,7 @@ characters.data={
description="ARABIC LETTER HEH DOACHASHMEE MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x06BE },
+ specials={ "medial", 0x6BE },
unicodeslot=0xFBAD,
},
[0xFBAE]={
@@ -122925,7 +126631,7 @@ characters.data={
description="ARABIC LETTER YEH BARREE ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x06D2 },
+ specials={ "isolated", 0x6D2 },
unicodeslot=0xFBAE,
},
[0xFBAF]={
@@ -122934,7 +126640,7 @@ characters.data={
description="ARABIC LETTER YEH BARREE FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x06D2 },
+ specials={ "final", 0x6D2 },
unicodeslot=0xFBAF,
},
[0xFBB0]={
@@ -122942,7 +126648,7 @@ characters.data={
description="ARABIC LETTER YEH BARREE WITH HAMZA ABOVE ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x06D3 },
+ specials={ "isolated", 0x6D3 },
unicodeslot=0xFBB0,
},
[0xFBB1]={
@@ -122950,7 +126656,7 @@ characters.data={
description="ARABIC LETTER YEH BARREE WITH HAMZA ABOVE FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x06D3 },
+ specials={ "final", 0x6D3 },
unicodeslot=0xFBB1,
},
[0xFBB2]={
@@ -123070,7 +126776,7 @@ characters.data={
description="ARABIC LETTER NG ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x06AD },
+ specials={ "isolated", 0x6AD },
unicodeslot=0xFBD3,
},
[0xFBD4]={
@@ -123078,7 +126784,7 @@ characters.data={
description="ARABIC LETTER NG FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x06AD },
+ specials={ "final", 0x6AD },
unicodeslot=0xFBD4,
},
[0xFBD5]={
@@ -123086,7 +126792,7 @@ characters.data={
description="ARABIC LETTER NG INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x06AD },
+ specials={ "initial", 0x6AD },
unicodeslot=0xFBD5,
},
[0xFBD6]={
@@ -123094,7 +126800,7 @@ characters.data={
description="ARABIC LETTER NG MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x06AD },
+ specials={ "medial", 0x6AD },
unicodeslot=0xFBD6,
},
[0xFBD7]={
@@ -123102,7 +126808,7 @@ characters.data={
description="ARABIC LETTER U ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x06C7 },
+ specials={ "isolated", 0x6C7 },
unicodeslot=0xFBD7,
},
[0xFBD8]={
@@ -123110,7 +126816,7 @@ characters.data={
description="ARABIC LETTER U FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x06C7 },
+ specials={ "final", 0x6C7 },
unicodeslot=0xFBD8,
},
[0xFBD9]={
@@ -123118,7 +126824,7 @@ characters.data={
description="ARABIC LETTER OE ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x06C6 },
+ specials={ "isolated", 0x6C6 },
unicodeslot=0xFBD9,
},
[0xFBDA]={
@@ -123126,7 +126832,7 @@ characters.data={
description="ARABIC LETTER OE FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x06C6 },
+ specials={ "final", 0x6C6 },
unicodeslot=0xFBDA,
},
[0xFBDB]={
@@ -123134,7 +126840,7 @@ characters.data={
description="ARABIC LETTER YU ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x06C8 },
+ specials={ "isolated", 0x6C8 },
unicodeslot=0xFBDB,
},
[0xFBDC]={
@@ -123142,7 +126848,7 @@ characters.data={
description="ARABIC LETTER YU FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x06C8 },
+ specials={ "final", 0x6C8 },
unicodeslot=0xFBDC,
},
[0xFBDD]={
@@ -123150,8 +126856,8 @@ characters.data={
description="ARABIC LETTER U WITH HAMZA ABOVE ISOLATED FORM",
direction="al",
linebreak="al",
- shcode=0x06C7,
- specials={ "isolated", 0x0677 },
+ shcode=0x6C7,
+ specials={ "isolated", 0x677 },
unicodeslot=0xFBDD,
},
[0xFBDE]={
@@ -123159,7 +126865,7 @@ characters.data={
description="ARABIC LETTER VE ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x06CB },
+ specials={ "isolated", 0x6CB },
unicodeslot=0xFBDE,
},
[0xFBDF]={
@@ -123167,7 +126873,7 @@ characters.data={
description="ARABIC LETTER VE FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x06CB },
+ specials={ "final", 0x6CB },
unicodeslot=0xFBDF,
},
[0xFBE0]={
@@ -123175,7 +126881,7 @@ characters.data={
description="ARABIC LETTER KIRGHIZ OE ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x06C5 },
+ specials={ "isolated", 0x6C5 },
unicodeslot=0xFBE0,
},
[0xFBE1]={
@@ -123183,7 +126889,7 @@ characters.data={
description="ARABIC LETTER KIRGHIZ OE FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x06C5 },
+ specials={ "final", 0x6C5 },
unicodeslot=0xFBE1,
},
[0xFBE2]={
@@ -123191,7 +126897,7 @@ characters.data={
description="ARABIC LETTER KIRGHIZ YU ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x06C9 },
+ specials={ "isolated", 0x6C9 },
unicodeslot=0xFBE2,
},
[0xFBE3]={
@@ -123199,7 +126905,7 @@ characters.data={
description="ARABIC LETTER KIRGHIZ YU FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x06C9 },
+ specials={ "final", 0x6C9 },
unicodeslot=0xFBE3,
},
[0xFBE4]={
@@ -123207,7 +126913,7 @@ characters.data={
description="ARABIC LETTER E ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x06D0 },
+ specials={ "isolated", 0x6D0 },
unicodeslot=0xFBE4,
},
[0xFBE5]={
@@ -123215,7 +126921,7 @@ characters.data={
description="ARABIC LETTER E FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x06D0 },
+ specials={ "final", 0x6D0 },
unicodeslot=0xFBE5,
},
[0xFBE6]={
@@ -123223,7 +126929,7 @@ characters.data={
description="ARABIC LETTER E INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x06D0 },
+ specials={ "initial", 0x6D0 },
unicodeslot=0xFBE6,
},
[0xFBE7]={
@@ -123231,7 +126937,7 @@ characters.data={
description="ARABIC LETTER E MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x06D0 },
+ specials={ "medial", 0x6D0 },
unicodeslot=0xFBE7,
},
[0xFBE8]={
@@ -123239,7 +126945,7 @@ characters.data={
description="ARABIC LETTER UIGHUR KAZAKH KIRGHIZ ALEF MAKSURA INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0649 },
+ specials={ "initial", 0x649 },
unicodeslot=0xFBE8,
},
[0xFBE9]={
@@ -123247,7 +126953,7 @@ characters.data={
description="ARABIC LETTER UIGHUR KAZAKH KIRGHIZ ALEF MAKSURA MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0649 },
+ specials={ "medial", 0x649 },
unicodeslot=0xFBE9,
},
[0xFBEA]={
@@ -123255,7 +126961,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH ALEF ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0626, 0x0627 },
+ specials={ "isolated", 0x626, 0x627 },
unicodeslot=0xFBEA,
},
[0xFBEB]={
@@ -123263,7 +126969,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH ALEF FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0626, 0x0627 },
+ specials={ "final", 0x626, 0x627 },
unicodeslot=0xFBEB,
},
[0xFBEC]={
@@ -123271,7 +126977,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH AE ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0626, 0x06D5 },
+ specials={ "isolated", 0x626, 0x6D5 },
unicodeslot=0xFBEC,
},
[0xFBED]={
@@ -123279,7 +126985,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH AE FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0626, 0x06D5 },
+ specials={ "final", 0x626, 0x6D5 },
unicodeslot=0xFBED,
},
[0xFBEE]={
@@ -123287,7 +126993,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH WAW ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0626, 0x0648 },
+ specials={ "isolated", 0x626, 0x648 },
unicodeslot=0xFBEE,
},
[0xFBEF]={
@@ -123295,7 +127001,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH WAW FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0626, 0x0648 },
+ specials={ "final", 0x626, 0x648 },
unicodeslot=0xFBEF,
},
[0xFBF0]={
@@ -123303,7 +127009,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH U ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0626, 0x06C7 },
+ specials={ "isolated", 0x626, 0x6C7 },
unicodeslot=0xFBF0,
},
[0xFBF1]={
@@ -123311,7 +127017,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH U FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0626, 0x06C7 },
+ specials={ "final", 0x626, 0x6C7 },
unicodeslot=0xFBF1,
},
[0xFBF2]={
@@ -123319,7 +127025,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH OE ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0626, 0x06C6 },
+ specials={ "isolated", 0x626, 0x6C6 },
unicodeslot=0xFBF2,
},
[0xFBF3]={
@@ -123327,7 +127033,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH OE FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0626, 0x06C6 },
+ specials={ "final", 0x626, 0x6C6 },
unicodeslot=0xFBF3,
},
[0xFBF4]={
@@ -123335,7 +127041,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH YU ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0626, 0x06C8 },
+ specials={ "isolated", 0x626, 0x6C8 },
unicodeslot=0xFBF4,
},
[0xFBF5]={
@@ -123343,7 +127049,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH YU FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0626, 0x06C8 },
+ specials={ "final", 0x626, 0x6C8 },
unicodeslot=0xFBF5,
},
[0xFBF6]={
@@ -123351,7 +127057,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH E ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0626, 0x06D0 },
+ specials={ "isolated", 0x626, 0x6D0 },
unicodeslot=0xFBF6,
},
[0xFBF7]={
@@ -123359,7 +127065,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH E FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0626, 0x06D0 },
+ specials={ "final", 0x626, 0x6D0 },
unicodeslot=0xFBF7,
},
[0xFBF8]={
@@ -123367,7 +127073,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH E INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0626, 0x06D0 },
+ specials={ "initial", 0x626, 0x6D0 },
unicodeslot=0xFBF8,
},
[0xFBF9]={
@@ -123375,7 +127081,7 @@ characters.data={
description="ARABIC LIGATURE UIGHUR KIRGHIZ YEH WITH HAMZA ABOVE WITH ALEF MAKSURA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0626, 0x0649 },
+ specials={ "isolated", 0x626, 0x649 },
unicodeslot=0xFBF9,
},
[0xFBFA]={
@@ -123383,7 +127089,7 @@ characters.data={
description="ARABIC LIGATURE UIGHUR KIRGHIZ YEH WITH HAMZA ABOVE WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0626, 0x0649 },
+ specials={ "final", 0x626, 0x649 },
unicodeslot=0xFBFA,
},
[0xFBFB]={
@@ -123391,7 +127097,7 @@ characters.data={
description="ARABIC LIGATURE UIGHUR KIRGHIZ YEH WITH HAMZA ABOVE WITH ALEF MAKSURA INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0626, 0x0649 },
+ specials={ "initial", 0x626, 0x649 },
unicodeslot=0xFBFB,
},
[0xFBFC]={
@@ -123399,7 +127105,7 @@ characters.data={
description="ARABIC LETTER FARSI YEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x06CC },
+ specials={ "isolated", 0x6CC },
unicodeslot=0xFBFC,
},
[0xFBFD]={
@@ -123407,7 +127113,7 @@ characters.data={
description="ARABIC LETTER FARSI YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x06CC },
+ specials={ "final", 0x6CC },
unicodeslot=0xFBFD,
},
[0xFBFE]={
@@ -123415,7 +127121,7 @@ characters.data={
description="ARABIC LETTER FARSI YEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x06CC },
+ specials={ "initial", 0x6CC },
unicodeslot=0xFBFE,
},
[0xFBFF]={
@@ -123423,7 +127129,7 @@ characters.data={
description="ARABIC LETTER FARSI YEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x06CC },
+ specials={ "medial", 0x6CC },
unicodeslot=0xFBFF,
},
[0xFC00]={
@@ -123431,7 +127137,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH JEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0626, 0x062C },
+ specials={ "isolated", 0x626, 0x62C },
unicodeslot=0xFC00,
},
[0xFC01]={
@@ -123439,7 +127145,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH HAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0626, 0x062D },
+ specials={ "isolated", 0x626, 0x62D },
unicodeslot=0xFC01,
},
[0xFC02]={
@@ -123447,7 +127153,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH MEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0626, 0x0645 },
+ specials={ "isolated", 0x626, 0x645 },
unicodeslot=0xFC02,
},
[0xFC03]={
@@ -123455,7 +127161,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH ALEF MAKSURA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0626, 0x0649 },
+ specials={ "isolated", 0x626, 0x649 },
unicodeslot=0xFC03,
},
[0xFC04]={
@@ -123463,7 +127169,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH YEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0626, 0x064A },
+ specials={ "isolated", 0x626, 0x64A },
unicodeslot=0xFC04,
},
[0xFC05]={
@@ -123471,7 +127177,7 @@ characters.data={
description="ARABIC LIGATURE BEH WITH JEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0628, 0x062C },
+ specials={ "isolated", 0x628, 0x62C },
unicodeslot=0xFC05,
},
[0xFC06]={
@@ -123479,7 +127185,7 @@ characters.data={
description="ARABIC LIGATURE BEH WITH HAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0628, 0x062D },
+ specials={ "isolated", 0x628, 0x62D },
unicodeslot=0xFC06,
},
[0xFC07]={
@@ -123487,7 +127193,7 @@ characters.data={
description="ARABIC LIGATURE BEH WITH KHAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0628, 0x062E },
+ specials={ "isolated", 0x628, 0x62E },
unicodeslot=0xFC07,
},
[0xFC08]={
@@ -123496,7 +127202,7 @@ characters.data={
description="ARABIC LIGATURE BEH WITH MEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0628, 0x0645 },
+ specials={ "isolated", 0x628, 0x645 },
unicodeslot=0xFC08,
},
[0xFC09]={
@@ -123504,7 +127210,7 @@ characters.data={
description="ARABIC LIGATURE BEH WITH ALEF MAKSURA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0628, 0x0649 },
+ specials={ "isolated", 0x628, 0x649 },
unicodeslot=0xFC09,
},
[0xFC0A]={
@@ -123512,7 +127218,7 @@ characters.data={
description="ARABIC LIGATURE BEH WITH YEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0628, 0x064A },
+ specials={ "isolated", 0x628, 0x64A },
unicodeslot=0xFC0A,
},
[0xFC0B]={
@@ -123521,7 +127227,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH JEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062A, 0x062C },
+ specials={ "isolated", 0x62A, 0x62C },
unicodeslot=0xFC0B,
},
[0xFC0C]={
@@ -123530,7 +127236,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH HAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062A, 0x062D },
+ specials={ "isolated", 0x62A, 0x62D },
unicodeslot=0xFC0C,
},
[0xFC0D]={
@@ -123538,7 +127244,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH KHAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062A, 0x062E },
+ specials={ "isolated", 0x62A, 0x62E },
unicodeslot=0xFC0D,
},
[0xFC0E]={
@@ -123547,7 +127253,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH MEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062A, 0x0645 },
+ specials={ "isolated", 0x62A, 0x645 },
unicodeslot=0xFC0E,
},
[0xFC0F]={
@@ -123555,7 +127261,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH ALEF MAKSURA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062A, 0x0649 },
+ specials={ "isolated", 0x62A, 0x649 },
unicodeslot=0xFC0F,
},
[0xFC10]={
@@ -123563,7 +127269,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH YEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062A, 0x064A },
+ specials={ "isolated", 0x62A, 0x64A },
unicodeslot=0xFC10,
},
[0xFC11]={
@@ -123571,7 +127277,7 @@ characters.data={
description="ARABIC LIGATURE THEH WITH JEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062B, 0x062C },
+ specials={ "isolated", 0x62B, 0x62C },
unicodeslot=0xFC11,
},
[0xFC12]={
@@ -123579,7 +127285,7 @@ characters.data={
description="ARABIC LIGATURE THEH WITH MEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062B, 0x0645 },
+ specials={ "isolated", 0x62B, 0x645 },
unicodeslot=0xFC12,
},
[0xFC13]={
@@ -123587,7 +127293,7 @@ characters.data={
description="ARABIC LIGATURE THEH WITH ALEF MAKSURA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062B, 0x0649 },
+ specials={ "isolated", 0x62B, 0x649 },
unicodeslot=0xFC13,
},
[0xFC14]={
@@ -123595,7 +127301,7 @@ characters.data={
description="ARABIC LIGATURE THEH WITH YEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062B, 0x064A },
+ specials={ "isolated", 0x62B, 0x64A },
unicodeslot=0xFC14,
},
[0xFC15]={
@@ -123603,7 +127309,7 @@ characters.data={
description="ARABIC LIGATURE JEEM WITH HAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062C, 0x062D },
+ specials={ "isolated", 0x62C, 0x62D },
unicodeslot=0xFC15,
},
[0xFC16]={
@@ -123611,7 +127317,7 @@ characters.data={
description="ARABIC LIGATURE JEEM WITH MEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062C, 0x0645 },
+ specials={ "isolated", 0x62C, 0x645 },
unicodeslot=0xFC16,
},
[0xFC17]={
@@ -123619,7 +127325,7 @@ characters.data={
description="ARABIC LIGATURE HAH WITH JEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062D, 0x062C },
+ specials={ "isolated", 0x62D, 0x62C },
unicodeslot=0xFC17,
},
[0xFC18]={
@@ -123627,7 +127333,7 @@ characters.data={
description="ARABIC LIGATURE HAH WITH MEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062D, 0x0645 },
+ specials={ "isolated", 0x62D, 0x645 },
unicodeslot=0xFC18,
},
[0xFC19]={
@@ -123635,7 +127341,7 @@ characters.data={
description="ARABIC LIGATURE KHAH WITH JEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062E, 0x062C },
+ specials={ "isolated", 0x62E, 0x62C },
unicodeslot=0xFC19,
},
[0xFC1A]={
@@ -123643,7 +127349,7 @@ characters.data={
description="ARABIC LIGATURE KHAH WITH HAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062E, 0x062D },
+ specials={ "isolated", 0x62E, 0x62D },
unicodeslot=0xFC1A,
},
[0xFC1B]={
@@ -123651,7 +127357,7 @@ characters.data={
description="ARABIC LIGATURE KHAH WITH MEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062E, 0x0645 },
+ specials={ "isolated", 0x62E, 0x645 },
unicodeslot=0xFC1B,
},
[0xFC1C]={
@@ -123659,7 +127365,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH JEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0633, 0x062C },
+ specials={ "isolated", 0x633, 0x62C },
unicodeslot=0xFC1C,
},
[0xFC1D]={
@@ -123667,7 +127373,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH HAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0633, 0x062D },
+ specials={ "isolated", 0x633, 0x62D },
unicodeslot=0xFC1D,
},
[0xFC1E]={
@@ -123675,7 +127381,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH KHAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0633, 0x062E },
+ specials={ "isolated", 0x633, 0x62E },
unicodeslot=0xFC1E,
},
[0xFC1F]={
@@ -123683,7 +127389,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH MEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0633, 0x0645 },
+ specials={ "isolated", 0x633, 0x645 },
unicodeslot=0xFC1F,
},
[0xFC20]={
@@ -123691,7 +127397,7 @@ characters.data={
description="ARABIC LIGATURE SAD WITH HAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0635, 0x062D },
+ specials={ "isolated", 0x635, 0x62D },
unicodeslot=0xFC20,
},
[0xFC21]={
@@ -123699,7 +127405,7 @@ characters.data={
description="ARABIC LIGATURE SAD WITH MEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0635, 0x0645 },
+ specials={ "isolated", 0x635, 0x645 },
unicodeslot=0xFC21,
},
[0xFC22]={
@@ -123707,7 +127413,7 @@ characters.data={
description="ARABIC LIGATURE DAD WITH JEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0636, 0x062C },
+ specials={ "isolated", 0x636, 0x62C },
unicodeslot=0xFC22,
},
[0xFC23]={
@@ -123715,7 +127421,7 @@ characters.data={
description="ARABIC LIGATURE DAD WITH HAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0636, 0x062D },
+ specials={ "isolated", 0x636, 0x62D },
unicodeslot=0xFC23,
},
[0xFC24]={
@@ -123723,7 +127429,7 @@ characters.data={
description="ARABIC LIGATURE DAD WITH KHAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0636, 0x062E },
+ specials={ "isolated", 0x636, 0x62E },
unicodeslot=0xFC24,
},
[0xFC25]={
@@ -123731,7 +127437,7 @@ characters.data={
description="ARABIC LIGATURE DAD WITH MEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0636, 0x0645 },
+ specials={ "isolated", 0x636, 0x645 },
unicodeslot=0xFC25,
},
[0xFC26]={
@@ -123739,7 +127445,7 @@ characters.data={
description="ARABIC LIGATURE TAH WITH HAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0637, 0x062D },
+ specials={ "isolated", 0x637, 0x62D },
unicodeslot=0xFC26,
},
[0xFC27]={
@@ -123747,7 +127453,7 @@ characters.data={
description="ARABIC LIGATURE TAH WITH MEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0637, 0x0645 },
+ specials={ "isolated", 0x637, 0x645 },
unicodeslot=0xFC27,
},
[0xFC28]={
@@ -123755,7 +127461,7 @@ characters.data={
description="ARABIC LIGATURE ZAH WITH MEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0638, 0x0645 },
+ specials={ "isolated", 0x638, 0x645 },
unicodeslot=0xFC28,
},
[0xFC29]={
@@ -123763,7 +127469,7 @@ characters.data={
description="ARABIC LIGATURE AIN WITH JEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0639, 0x062C },
+ specials={ "isolated", 0x639, 0x62C },
unicodeslot=0xFC29,
},
[0xFC2A]={
@@ -123771,7 +127477,7 @@ characters.data={
description="ARABIC LIGATURE AIN WITH MEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0639, 0x0645 },
+ specials={ "isolated", 0x639, 0x645 },
unicodeslot=0xFC2A,
},
[0xFC2B]={
@@ -123779,7 +127485,7 @@ characters.data={
description="ARABIC LIGATURE GHAIN WITH JEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x063A, 0x062C },
+ specials={ "isolated", 0x63A, 0x62C },
unicodeslot=0xFC2B,
},
[0xFC2C]={
@@ -123787,7 +127493,7 @@ characters.data={
description="ARABIC LIGATURE GHAIN WITH MEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x063A, 0x0645 },
+ specials={ "isolated", 0x63A, 0x645 },
unicodeslot=0xFC2C,
},
[0xFC2D]={
@@ -123795,7 +127501,7 @@ characters.data={
description="ARABIC LIGATURE FEH WITH JEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0641, 0x062C },
+ specials={ "isolated", 0x641, 0x62C },
unicodeslot=0xFC2D,
},
[0xFC2E]={
@@ -123803,7 +127509,7 @@ characters.data={
description="ARABIC LIGATURE FEH WITH HAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0641, 0x062D },
+ specials={ "isolated", 0x641, 0x62D },
unicodeslot=0xFC2E,
},
[0xFC2F]={
@@ -123811,7 +127517,7 @@ characters.data={
description="ARABIC LIGATURE FEH WITH KHAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0641, 0x062E },
+ specials={ "isolated", 0x641, 0x62E },
unicodeslot=0xFC2F,
},
[0xFC30]={
@@ -123819,7 +127525,7 @@ characters.data={
description="ARABIC LIGATURE FEH WITH MEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0641, 0x0645 },
+ specials={ "isolated", 0x641, 0x645 },
unicodeslot=0xFC30,
},
[0xFC31]={
@@ -123827,7 +127533,7 @@ characters.data={
description="ARABIC LIGATURE FEH WITH ALEF MAKSURA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0641, 0x0649 },
+ specials={ "isolated", 0x641, 0x649 },
unicodeslot=0xFC31,
},
[0xFC32]={
@@ -123835,7 +127541,7 @@ characters.data={
description="ARABIC LIGATURE FEH WITH YEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0641, 0x064A },
+ specials={ "isolated", 0x641, 0x64A },
unicodeslot=0xFC32,
},
[0xFC33]={
@@ -123843,7 +127549,7 @@ characters.data={
description="ARABIC LIGATURE QAF WITH HAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0642, 0x062D },
+ specials={ "isolated", 0x642, 0x62D },
unicodeslot=0xFC33,
},
[0xFC34]={
@@ -123851,7 +127557,7 @@ characters.data={
description="ARABIC LIGATURE QAF WITH MEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0642, 0x0645 },
+ specials={ "isolated", 0x642, 0x645 },
unicodeslot=0xFC34,
},
[0xFC35]={
@@ -123859,7 +127565,7 @@ characters.data={
description="ARABIC LIGATURE QAF WITH ALEF MAKSURA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0642, 0x0649 },
+ specials={ "isolated", 0x642, 0x649 },
unicodeslot=0xFC35,
},
[0xFC36]={
@@ -123867,7 +127573,7 @@ characters.data={
description="ARABIC LIGATURE QAF WITH YEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0642, 0x064A },
+ specials={ "isolated", 0x642, 0x64A },
unicodeslot=0xFC36,
},
[0xFC37]={
@@ -123875,7 +127581,7 @@ characters.data={
description="ARABIC LIGATURE KAF WITH ALEF ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0643, 0x0627 },
+ specials={ "isolated", 0x643, 0x627 },
unicodeslot=0xFC37,
},
[0xFC38]={
@@ -123883,7 +127589,7 @@ characters.data={
description="ARABIC LIGATURE KAF WITH JEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0643, 0x062C },
+ specials={ "isolated", 0x643, 0x62C },
unicodeslot=0xFC38,
},
[0xFC39]={
@@ -123891,7 +127597,7 @@ characters.data={
description="ARABIC LIGATURE KAF WITH HAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0643, 0x062D },
+ specials={ "isolated", 0x643, 0x62D },
unicodeslot=0xFC39,
},
[0xFC3A]={
@@ -123899,7 +127605,7 @@ characters.data={
description="ARABIC LIGATURE KAF WITH KHAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0643, 0x062E },
+ specials={ "isolated", 0x643, 0x62E },
unicodeslot=0xFC3A,
},
[0xFC3B]={
@@ -123907,7 +127613,7 @@ characters.data={
description="ARABIC LIGATURE KAF WITH LAM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0643, 0x0644 },
+ specials={ "isolated", 0x643, 0x644 },
unicodeslot=0xFC3B,
},
[0xFC3C]={
@@ -123915,7 +127621,7 @@ characters.data={
description="ARABIC LIGATURE KAF WITH MEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0643, 0x0645 },
+ specials={ "isolated", 0x643, 0x645 },
unicodeslot=0xFC3C,
},
[0xFC3D]={
@@ -123923,7 +127629,7 @@ characters.data={
description="ARABIC LIGATURE KAF WITH ALEF MAKSURA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0643, 0x0649 },
+ specials={ "isolated", 0x643, 0x649 },
unicodeslot=0xFC3D,
},
[0xFC3E]={
@@ -123931,7 +127637,7 @@ characters.data={
description="ARABIC LIGATURE KAF WITH YEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0643, 0x064A },
+ specials={ "isolated", 0x643, 0x64A },
unicodeslot=0xFC3E,
},
[0xFC3F]={
@@ -123939,7 +127645,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH JEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0644, 0x062C },
+ specials={ "isolated", 0x644, 0x62C },
unicodeslot=0xFC3F,
},
[0xFC40]={
@@ -123947,7 +127653,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH HAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0644, 0x062D },
+ specials={ "isolated", 0x644, 0x62D },
unicodeslot=0xFC40,
},
[0xFC41]={
@@ -123955,7 +127661,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH KHAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0644, 0x062E },
+ specials={ "isolated", 0x644, 0x62E },
unicodeslot=0xFC41,
},
[0xFC42]={
@@ -123963,7 +127669,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH MEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0644, 0x0645 },
+ specials={ "isolated", 0x644, 0x645 },
unicodeslot=0xFC42,
},
[0xFC43]={
@@ -123971,7 +127677,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH ALEF MAKSURA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0644, 0x0649 },
+ specials={ "isolated", 0x644, 0x649 },
unicodeslot=0xFC43,
},
[0xFC44]={
@@ -123979,7 +127685,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH YEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0644, 0x064A },
+ specials={ "isolated", 0x644, 0x64A },
unicodeslot=0xFC44,
},
[0xFC45]={
@@ -123987,7 +127693,7 @@ characters.data={
description="ARABIC LIGATURE MEEM WITH JEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0645, 0x062C },
+ specials={ "isolated", 0x645, 0x62C },
unicodeslot=0xFC45,
},
[0xFC46]={
@@ -123995,7 +127701,7 @@ characters.data={
description="ARABIC LIGATURE MEEM WITH HAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0645, 0x062D },
+ specials={ "isolated", 0x645, 0x62D },
unicodeslot=0xFC46,
},
[0xFC47]={
@@ -124003,7 +127709,7 @@ characters.data={
description="ARABIC LIGATURE MEEM WITH KHAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0645, 0x062E },
+ specials={ "isolated", 0x645, 0x62E },
unicodeslot=0xFC47,
},
[0xFC48]={
@@ -124012,7 +127718,7 @@ characters.data={
description="ARABIC LIGATURE MEEM WITH MEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0645, 0x0645 },
+ specials={ "isolated", 0x645, 0x645 },
unicodeslot=0xFC48,
},
[0xFC49]={
@@ -124020,7 +127726,7 @@ characters.data={
description="ARABIC LIGATURE MEEM WITH ALEF MAKSURA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0645, 0x0649 },
+ specials={ "isolated", 0x645, 0x649 },
unicodeslot=0xFC49,
},
[0xFC4A]={
@@ -124028,7 +127734,7 @@ characters.data={
description="ARABIC LIGATURE MEEM WITH YEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0645, 0x064A },
+ specials={ "isolated", 0x645, 0x64A },
unicodeslot=0xFC4A,
},
[0xFC4B]={
@@ -124037,7 +127743,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH JEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0646, 0x062C },
+ specials={ "isolated", 0x646, 0x62C },
unicodeslot=0xFC4B,
},
[0xFC4C]={
@@ -124045,7 +127751,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH HAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0646, 0x062D },
+ specials={ "isolated", 0x646, 0x62D },
unicodeslot=0xFC4C,
},
[0xFC4D]={
@@ -124053,7 +127759,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH KHAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0646, 0x062E },
+ specials={ "isolated", 0x646, 0x62E },
unicodeslot=0xFC4D,
},
[0xFC4E]={
@@ -124062,7 +127768,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH MEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0646, 0x0645 },
+ specials={ "isolated", 0x646, 0x645 },
unicodeslot=0xFC4E,
},
[0xFC4F]={
@@ -124070,7 +127776,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH ALEF MAKSURA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0646, 0x0649 },
+ specials={ "isolated", 0x646, 0x649 },
unicodeslot=0xFC4F,
},
[0xFC50]={
@@ -124078,7 +127784,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH YEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0646, 0x064A },
+ specials={ "isolated", 0x646, 0x64A },
unicodeslot=0xFC50,
},
[0xFC51]={
@@ -124086,7 +127792,7 @@ characters.data={
description="ARABIC LIGATURE HEH WITH JEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0647, 0x062C },
+ specials={ "isolated", 0x647, 0x62C },
unicodeslot=0xFC51,
},
[0xFC52]={
@@ -124094,7 +127800,7 @@ characters.data={
description="ARABIC LIGATURE HEH WITH MEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0647, 0x0645 },
+ specials={ "isolated", 0x647, 0x645 },
unicodeslot=0xFC52,
},
[0xFC53]={
@@ -124102,7 +127808,7 @@ characters.data={
description="ARABIC LIGATURE HEH WITH ALEF MAKSURA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0647, 0x0649 },
+ specials={ "isolated", 0x647, 0x649 },
unicodeslot=0xFC53,
},
[0xFC54]={
@@ -124110,7 +127816,7 @@ characters.data={
description="ARABIC LIGATURE HEH WITH YEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0647, 0x064A },
+ specials={ "isolated", 0x647, 0x64A },
unicodeslot=0xFC54,
},
[0xFC55]={
@@ -124118,7 +127824,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH JEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x064A, 0x062C },
+ specials={ "isolated", 0x64A, 0x62C },
unicodeslot=0xFC55,
},
[0xFC56]={
@@ -124126,7 +127832,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x064A, 0x062D },
+ specials={ "isolated", 0x64A, 0x62D },
unicodeslot=0xFC56,
},
[0xFC57]={
@@ -124134,7 +127840,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH KHAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x064A, 0x062E },
+ specials={ "isolated", 0x64A, 0x62E },
unicodeslot=0xFC57,
},
[0xFC58]={
@@ -124143,7 +127849,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH MEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x064A, 0x0645 },
+ specials={ "isolated", 0x64A, 0x645 },
unicodeslot=0xFC58,
},
[0xFC59]={
@@ -124151,7 +127857,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH ALEF MAKSURA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x064A, 0x0649 },
+ specials={ "isolated", 0x64A, 0x649 },
unicodeslot=0xFC59,
},
[0xFC5A]={
@@ -124159,7 +127865,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH YEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x064A, 0x064A },
+ specials={ "isolated", 0x64A, 0x64A },
unicodeslot=0xFC5A,
},
[0xFC5B]={
@@ -124167,7 +127873,7 @@ characters.data={
description="ARABIC LIGATURE THAL WITH SUPERSCRIPT ALEF ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0630, 0x0670 },
+ specials={ "isolated", 0x630, 0x670 },
unicodeslot=0xFC5B,
},
[0xFC5C]={
@@ -124175,7 +127881,7 @@ characters.data={
description="ARABIC LIGATURE REH WITH SUPERSCRIPT ALEF ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0631, 0x0670 },
+ specials={ "isolated", 0x631, 0x670 },
unicodeslot=0xFC5C,
},
[0xFC5D]={
@@ -124183,7 +127889,7 @@ characters.data={
description="ARABIC LIGATURE ALEF MAKSURA WITH SUPERSCRIPT ALEF ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0649, 0x0670 },
+ specials={ "isolated", 0x649, 0x670 },
unicodeslot=0xFC5D,
},
[0xFC5E]={
@@ -124192,7 +127898,7 @@ characters.data={
description="ARABIC LIGATURE SHADDA WITH DAMMATAN ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0020, 0x064C, 0x0651 },
+ specials={ "isolated", 0x20, 0x64C, 0x651 },
unicodeslot=0xFC5E,
},
[0xFC5F]={
@@ -124201,7 +127907,7 @@ characters.data={
description="ARABIC LIGATURE SHADDA WITH KASRATAN ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0020, 0x064D, 0x0651 },
+ specials={ "isolated", 0x20, 0x64D, 0x651 },
unicodeslot=0xFC5F,
},
[0xFC60]={
@@ -124210,7 +127916,7 @@ characters.data={
description="ARABIC LIGATURE SHADDA WITH FATHA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0020, 0x064E, 0x0651 },
+ specials={ "isolated", 0x20, 0x64E, 0x651 },
unicodeslot=0xFC60,
},
[0xFC61]={
@@ -124219,7 +127925,7 @@ characters.data={
description="ARABIC LIGATURE SHADDA WITH DAMMA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0020, 0x064F, 0x0651 },
+ specials={ "isolated", 0x20, 0x64F, 0x651 },
unicodeslot=0xFC61,
},
[0xFC62]={
@@ -124228,7 +127934,7 @@ characters.data={
description="ARABIC LIGATURE SHADDA WITH KASRA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0020, 0x0650, 0x0651 },
+ specials={ "isolated", 0x20, 0x650, 0x651 },
unicodeslot=0xFC62,
},
[0xFC63]={
@@ -124236,7 +127942,7 @@ characters.data={
description="ARABIC LIGATURE SHADDA WITH SUPERSCRIPT ALEF ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0020, 0x0651, 0x0670 },
+ specials={ "isolated", 0x20, 0x651, 0x670 },
unicodeslot=0xFC63,
},
[0xFC64]={
@@ -124244,7 +127950,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH REH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0626, 0x0631 },
+ specials={ "final", 0x626, 0x631 },
unicodeslot=0xFC64,
},
[0xFC65]={
@@ -124252,7 +127958,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH ZAIN FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0626, 0x0632 },
+ specials={ "final", 0x626, 0x632 },
unicodeslot=0xFC65,
},
[0xFC66]={
@@ -124260,7 +127966,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0626, 0x0645 },
+ specials={ "final", 0x626, 0x645 },
unicodeslot=0xFC66,
},
[0xFC67]={
@@ -124268,7 +127974,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH NOON FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0626, 0x0646 },
+ specials={ "final", 0x626, 0x646 },
unicodeslot=0xFC67,
},
[0xFC68]={
@@ -124276,7 +127982,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0626, 0x0649 },
+ specials={ "final", 0x626, 0x649 },
unicodeslot=0xFC68,
},
[0xFC69]={
@@ -124284,7 +127990,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0626, 0x064A },
+ specials={ "final", 0x626, 0x64A },
unicodeslot=0xFC69,
},
[0xFC6A]={
@@ -124292,7 +127998,7 @@ characters.data={
description="ARABIC LIGATURE BEH WITH REH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0628, 0x0631 },
+ specials={ "final", 0x628, 0x631 },
unicodeslot=0xFC6A,
},
[0xFC6B]={
@@ -124300,7 +128006,7 @@ characters.data={
description="ARABIC LIGATURE BEH WITH ZAIN FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0628, 0x0632 },
+ specials={ "final", 0x628, 0x632 },
unicodeslot=0xFC6B,
},
[0xFC6C]={
@@ -124308,7 +128014,7 @@ characters.data={
description="ARABIC LIGATURE BEH WITH MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0628, 0x0645 },
+ specials={ "final", 0x628, 0x645 },
unicodeslot=0xFC6C,
},
[0xFC6D]={
@@ -124317,7 +128023,7 @@ characters.data={
description="ARABIC LIGATURE BEH WITH NOON FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0628, 0x0646 },
+ specials={ "final", 0x628, 0x646 },
unicodeslot=0xFC6D,
},
[0xFC6E]={
@@ -124325,7 +128031,7 @@ characters.data={
description="ARABIC LIGATURE BEH WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0628, 0x0649 },
+ specials={ "final", 0x628, 0x649 },
unicodeslot=0xFC6E,
},
[0xFC6F]={
@@ -124333,7 +128039,7 @@ characters.data={
description="ARABIC LIGATURE BEH WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0628, 0x064A },
+ specials={ "final", 0x628, 0x64A },
unicodeslot=0xFC6F,
},
[0xFC70]={
@@ -124341,7 +128047,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH REH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062A, 0x0631 },
+ specials={ "final", 0x62A, 0x631 },
unicodeslot=0xFC70,
},
[0xFC71]={
@@ -124349,7 +128055,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH ZAIN FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062A, 0x0632 },
+ specials={ "final", 0x62A, 0x632 },
unicodeslot=0xFC71,
},
[0xFC72]={
@@ -124357,7 +128063,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062A, 0x0645 },
+ specials={ "final", 0x62A, 0x645 },
unicodeslot=0xFC72,
},
[0xFC73]={
@@ -124366,7 +128072,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH NOON FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062A, 0x0646 },
+ specials={ "final", 0x62A, 0x646 },
unicodeslot=0xFC73,
},
[0xFC74]={
@@ -124374,7 +128080,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062A, 0x0649 },
+ specials={ "final", 0x62A, 0x649 },
unicodeslot=0xFC74,
},
[0xFC75]={
@@ -124382,7 +128088,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062A, 0x064A },
+ specials={ "final", 0x62A, 0x64A },
unicodeslot=0xFC75,
},
[0xFC76]={
@@ -124390,7 +128096,7 @@ characters.data={
description="ARABIC LIGATURE THEH WITH REH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062B, 0x0631 },
+ specials={ "final", 0x62B, 0x631 },
unicodeslot=0xFC76,
},
[0xFC77]={
@@ -124398,7 +128104,7 @@ characters.data={
description="ARABIC LIGATURE THEH WITH ZAIN FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062B, 0x0632 },
+ specials={ "final", 0x62B, 0x632 },
unicodeslot=0xFC77,
},
[0xFC78]={
@@ -124406,7 +128112,7 @@ characters.data={
description="ARABIC LIGATURE THEH WITH MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062B, 0x0645 },
+ specials={ "final", 0x62B, 0x645 },
unicodeslot=0xFC78,
},
[0xFC79]={
@@ -124414,7 +128120,7 @@ characters.data={
description="ARABIC LIGATURE THEH WITH NOON FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062B, 0x0646 },
+ specials={ "final", 0x62B, 0x646 },
unicodeslot=0xFC79,
},
[0xFC7A]={
@@ -124422,7 +128128,7 @@ characters.data={
description="ARABIC LIGATURE THEH WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062B, 0x0649 },
+ specials={ "final", 0x62B, 0x649 },
unicodeslot=0xFC7A,
},
[0xFC7B]={
@@ -124430,7 +128136,7 @@ characters.data={
description="ARABIC LIGATURE THEH WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062B, 0x064A },
+ specials={ "final", 0x62B, 0x64A },
unicodeslot=0xFC7B,
},
[0xFC7C]={
@@ -124438,7 +128144,7 @@ characters.data={
description="ARABIC LIGATURE FEH WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0641, 0x0649 },
+ specials={ "final", 0x641, 0x649 },
unicodeslot=0xFC7C,
},
[0xFC7D]={
@@ -124446,7 +128152,7 @@ characters.data={
description="ARABIC LIGATURE FEH WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0641, 0x064A },
+ specials={ "final", 0x641, 0x64A },
unicodeslot=0xFC7D,
},
[0xFC7E]={
@@ -124454,7 +128160,7 @@ characters.data={
description="ARABIC LIGATURE QAF WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0642, 0x0649 },
+ specials={ "final", 0x642, 0x649 },
unicodeslot=0xFC7E,
},
[0xFC7F]={
@@ -124462,7 +128168,7 @@ characters.data={
description="ARABIC LIGATURE QAF WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0642, 0x064A },
+ specials={ "final", 0x642, 0x64A },
unicodeslot=0xFC7F,
},
[0xFC80]={
@@ -124470,7 +128176,7 @@ characters.data={
description="ARABIC LIGATURE KAF WITH ALEF FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0643, 0x0627 },
+ specials={ "final", 0x643, 0x627 },
unicodeslot=0xFC80,
},
[0xFC81]={
@@ -124478,7 +128184,7 @@ characters.data={
description="ARABIC LIGATURE KAF WITH LAM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0643, 0x0644 },
+ specials={ "final", 0x643, 0x644 },
unicodeslot=0xFC81,
},
[0xFC82]={
@@ -124486,7 +128192,7 @@ characters.data={
description="ARABIC LIGATURE KAF WITH MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0643, 0x0645 },
+ specials={ "final", 0x643, 0x645 },
unicodeslot=0xFC82,
},
[0xFC83]={
@@ -124494,7 +128200,7 @@ characters.data={
description="ARABIC LIGATURE KAF WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0643, 0x0649 },
+ specials={ "final", 0x643, 0x649 },
unicodeslot=0xFC83,
},
[0xFC84]={
@@ -124502,7 +128208,7 @@ characters.data={
description="ARABIC LIGATURE KAF WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0643, 0x064A },
+ specials={ "final", 0x643, 0x64A },
unicodeslot=0xFC84,
},
[0xFC85]={
@@ -124510,7 +128216,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0644, 0x0645 },
+ specials={ "final", 0x644, 0x645 },
unicodeslot=0xFC85,
},
[0xFC86]={
@@ -124518,7 +128224,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0644, 0x0649 },
+ specials={ "final", 0x644, 0x649 },
unicodeslot=0xFC86,
},
[0xFC87]={
@@ -124526,7 +128232,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0644, 0x064A },
+ specials={ "final", 0x644, 0x64A },
unicodeslot=0xFC87,
},
[0xFC88]={
@@ -124534,7 +128240,7 @@ characters.data={
description="ARABIC LIGATURE MEEM WITH ALEF FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0645, 0x0627 },
+ specials={ "final", 0x645, 0x627 },
unicodeslot=0xFC88,
},
[0xFC89]={
@@ -124542,7 +128248,7 @@ characters.data={
description="ARABIC LIGATURE MEEM WITH MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0645, 0x0645 },
+ specials={ "final", 0x645, 0x645 },
unicodeslot=0xFC89,
},
[0xFC8A]={
@@ -124550,7 +128256,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH REH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0646, 0x0631 },
+ specials={ "final", 0x646, 0x631 },
unicodeslot=0xFC8A,
},
[0xFC8B]={
@@ -124558,7 +128264,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH ZAIN FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0646, 0x0632 },
+ specials={ "final", 0x646, 0x632 },
unicodeslot=0xFC8B,
},
[0xFC8C]={
@@ -124566,7 +128272,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0646, 0x0645 },
+ specials={ "final", 0x646, 0x645 },
unicodeslot=0xFC8C,
},
[0xFC8D]={
@@ -124575,7 +128281,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH NOON FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0646, 0x0646 },
+ specials={ "final", 0x646, 0x646 },
unicodeslot=0xFC8D,
},
[0xFC8E]={
@@ -124583,7 +128289,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0646, 0x0649 },
+ specials={ "final", 0x646, 0x649 },
unicodeslot=0xFC8E,
},
[0xFC8F]={
@@ -124591,7 +128297,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0646, 0x064A },
+ specials={ "final", 0x646, 0x64A },
unicodeslot=0xFC8F,
},
[0xFC90]={
@@ -124599,7 +128305,7 @@ characters.data={
description="ARABIC LIGATURE ALEF MAKSURA WITH SUPERSCRIPT ALEF FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0649, 0x0670 },
+ specials={ "final", 0x649, 0x670 },
unicodeslot=0xFC90,
},
[0xFC91]={
@@ -124607,7 +128313,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH REH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x064A, 0x0631 },
+ specials={ "final", 0x64A, 0x631 },
unicodeslot=0xFC91,
},
[0xFC92]={
@@ -124615,7 +128321,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH ZAIN FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x064A, 0x0632 },
+ specials={ "final", 0x64A, 0x632 },
unicodeslot=0xFC92,
},
[0xFC93]={
@@ -124623,7 +128329,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x064A, 0x0645 },
+ specials={ "final", 0x64A, 0x645 },
unicodeslot=0xFC93,
},
[0xFC94]={
@@ -124632,7 +128338,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH NOON FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x064A, 0x0646 },
+ specials={ "final", 0x64A, 0x646 },
unicodeslot=0xFC94,
},
[0xFC95]={
@@ -124640,7 +128346,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x064A, 0x0649 },
+ specials={ "final", 0x64A, 0x649 },
unicodeslot=0xFC95,
},
[0xFC96]={
@@ -124648,7 +128354,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x064A, 0x064A },
+ specials={ "final", 0x64A, 0x64A },
unicodeslot=0xFC96,
},
[0xFC97]={
@@ -124656,7 +128362,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH JEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0626, 0x062C },
+ specials={ "initial", 0x626, 0x62C },
unicodeslot=0xFC97,
},
[0xFC98]={
@@ -124664,7 +128370,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0626, 0x062D },
+ specials={ "initial", 0x626, 0x62D },
unicodeslot=0xFC98,
},
[0xFC99]={
@@ -124672,7 +128378,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH KHAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0626, 0x062E },
+ specials={ "initial", 0x626, 0x62E },
unicodeslot=0xFC99,
},
[0xFC9A]={
@@ -124680,7 +128386,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0626, 0x0645 },
+ specials={ "initial", 0x626, 0x645 },
unicodeslot=0xFC9A,
},
[0xFC9B]={
@@ -124688,7 +128394,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH HEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0626, 0x0647 },
+ specials={ "initial", 0x626, 0x647 },
unicodeslot=0xFC9B,
},
[0xFC9C]={
@@ -124696,7 +128402,7 @@ characters.data={
description="ARABIC LIGATURE BEH WITH JEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0628, 0x062C },
+ specials={ "initial", 0x628, 0x62C },
unicodeslot=0xFC9C,
},
[0xFC9D]={
@@ -124704,7 +128410,7 @@ characters.data={
description="ARABIC LIGATURE BEH WITH HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0628, 0x062D },
+ specials={ "initial", 0x628, 0x62D },
unicodeslot=0xFC9D,
},
[0xFC9E]={
@@ -124712,7 +128418,7 @@ characters.data={
description="ARABIC LIGATURE BEH WITH KHAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0628, 0x062E },
+ specials={ "initial", 0x628, 0x62E },
unicodeslot=0xFC9E,
},
[0xFC9F]={
@@ -124721,7 +128427,7 @@ characters.data={
description="ARABIC LIGATURE BEH WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0628, 0x0645 },
+ specials={ "initial", 0x628, 0x645 },
unicodeslot=0xFC9F,
},
[0xFCA0]={
@@ -124729,7 +128435,7 @@ characters.data={
description="ARABIC LIGATURE BEH WITH HEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0628, 0x0647 },
+ specials={ "initial", 0x628, 0x647 },
unicodeslot=0xFCA0,
},
[0xFCA1]={
@@ -124738,7 +128444,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH JEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x062A, 0x062C },
+ specials={ "initial", 0x62A, 0x62C },
unicodeslot=0xFCA1,
},
[0xFCA2]={
@@ -124747,7 +128453,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x062A, 0x062D },
+ specials={ "initial", 0x62A, 0x62D },
unicodeslot=0xFCA2,
},
[0xFCA3]={
@@ -124755,7 +128461,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH KHAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x062A, 0x062E },
+ specials={ "initial", 0x62A, 0x62E },
unicodeslot=0xFCA3,
},
[0xFCA4]={
@@ -124764,7 +128470,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x062A, 0x0645 },
+ specials={ "initial", 0x62A, 0x645 },
unicodeslot=0xFCA4,
},
[0xFCA5]={
@@ -124772,7 +128478,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH HEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x062A, 0x0647 },
+ specials={ "initial", 0x62A, 0x647 },
unicodeslot=0xFCA5,
},
[0xFCA6]={
@@ -124780,7 +128486,7 @@ characters.data={
description="ARABIC LIGATURE THEH WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x062B, 0x0645 },
+ specials={ "initial", 0x62B, 0x645 },
unicodeslot=0xFCA6,
},
[0xFCA7]={
@@ -124788,7 +128494,7 @@ characters.data={
description="ARABIC LIGATURE JEEM WITH HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x062C, 0x062D },
+ specials={ "initial", 0x62C, 0x62D },
unicodeslot=0xFCA7,
},
[0xFCA8]={
@@ -124796,7 +128502,7 @@ characters.data={
description="ARABIC LIGATURE JEEM WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x062C, 0x0645 },
+ specials={ "initial", 0x62C, 0x645 },
unicodeslot=0xFCA8,
},
[0xFCA9]={
@@ -124804,7 +128510,7 @@ characters.data={
description="ARABIC LIGATURE HAH WITH JEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x062D, 0x062C },
+ specials={ "initial", 0x62D, 0x62C },
unicodeslot=0xFCA9,
},
[0xFCAA]={
@@ -124812,7 +128518,7 @@ characters.data={
description="ARABIC LIGATURE HAH WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x062D, 0x0645 },
+ specials={ "initial", 0x62D, 0x645 },
unicodeslot=0xFCAA,
},
[0xFCAB]={
@@ -124820,7 +128526,7 @@ characters.data={
description="ARABIC LIGATURE KHAH WITH JEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x062E, 0x062C },
+ specials={ "initial", 0x62E, 0x62C },
unicodeslot=0xFCAB,
},
[0xFCAC]={
@@ -124828,7 +128534,7 @@ characters.data={
description="ARABIC LIGATURE KHAH WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x062E, 0x0645 },
+ specials={ "initial", 0x62E, 0x645 },
unicodeslot=0xFCAC,
},
[0xFCAD]={
@@ -124836,7 +128542,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH JEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0633, 0x062C },
+ specials={ "initial", 0x633, 0x62C },
unicodeslot=0xFCAD,
},
[0xFCAE]={
@@ -124844,7 +128550,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0633, 0x062D },
+ specials={ "initial", 0x633, 0x62D },
unicodeslot=0xFCAE,
},
[0xFCAF]={
@@ -124852,7 +128558,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH KHAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0633, 0x062E },
+ specials={ "initial", 0x633, 0x62E },
unicodeslot=0xFCAF,
},
[0xFCB0]={
@@ -124860,7 +128566,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0633, 0x0645 },
+ specials={ "initial", 0x633, 0x645 },
unicodeslot=0xFCB0,
},
[0xFCB1]={
@@ -124868,7 +128574,7 @@ characters.data={
description="ARABIC LIGATURE SAD WITH HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0635, 0x062D },
+ specials={ "initial", 0x635, 0x62D },
unicodeslot=0xFCB1,
},
[0xFCB2]={
@@ -124876,7 +128582,7 @@ characters.data={
description="ARABIC LIGATURE SAD WITH KHAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0635, 0x062E },
+ specials={ "initial", 0x635, 0x62E },
unicodeslot=0xFCB2,
},
[0xFCB3]={
@@ -124884,7 +128590,7 @@ characters.data={
description="ARABIC LIGATURE SAD WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0635, 0x0645 },
+ specials={ "initial", 0x635, 0x645 },
unicodeslot=0xFCB3,
},
[0xFCB4]={
@@ -124892,7 +128598,7 @@ characters.data={
description="ARABIC LIGATURE DAD WITH JEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0636, 0x062C },
+ specials={ "initial", 0x636, 0x62C },
unicodeslot=0xFCB4,
},
[0xFCB5]={
@@ -124900,7 +128606,7 @@ characters.data={
description="ARABIC LIGATURE DAD WITH HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0636, 0x062D },
+ specials={ "initial", 0x636, 0x62D },
unicodeslot=0xFCB5,
},
[0xFCB6]={
@@ -124908,7 +128614,7 @@ characters.data={
description="ARABIC LIGATURE DAD WITH KHAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0636, 0x062E },
+ specials={ "initial", 0x636, 0x62E },
unicodeslot=0xFCB6,
},
[0xFCB7]={
@@ -124916,7 +128622,7 @@ characters.data={
description="ARABIC LIGATURE DAD WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0636, 0x0645 },
+ specials={ "initial", 0x636, 0x645 },
unicodeslot=0xFCB7,
},
[0xFCB8]={
@@ -124924,7 +128630,7 @@ characters.data={
description="ARABIC LIGATURE TAH WITH HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0637, 0x062D },
+ specials={ "initial", 0x637, 0x62D },
unicodeslot=0xFCB8,
},
[0xFCB9]={
@@ -124932,7 +128638,7 @@ characters.data={
description="ARABIC LIGATURE ZAH WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0638, 0x0645 },
+ specials={ "initial", 0x638, 0x645 },
unicodeslot=0xFCB9,
},
[0xFCBA]={
@@ -124940,7 +128646,7 @@ characters.data={
description="ARABIC LIGATURE AIN WITH JEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0639, 0x062C },
+ specials={ "initial", 0x639, 0x62C },
unicodeslot=0xFCBA,
},
[0xFCBB]={
@@ -124948,7 +128654,7 @@ characters.data={
description="ARABIC LIGATURE AIN WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0639, 0x0645 },
+ specials={ "initial", 0x639, 0x645 },
unicodeslot=0xFCBB,
},
[0xFCBC]={
@@ -124956,7 +128662,7 @@ characters.data={
description="ARABIC LIGATURE GHAIN WITH JEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x063A, 0x062C },
+ specials={ "initial", 0x63A, 0x62C },
unicodeslot=0xFCBC,
},
[0xFCBD]={
@@ -124964,7 +128670,7 @@ characters.data={
description="ARABIC LIGATURE GHAIN WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x063A, 0x0645 },
+ specials={ "initial", 0x63A, 0x645 },
unicodeslot=0xFCBD,
},
[0xFCBE]={
@@ -124972,7 +128678,7 @@ characters.data={
description="ARABIC LIGATURE FEH WITH JEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0641, 0x062C },
+ specials={ "initial", 0x641, 0x62C },
unicodeslot=0xFCBE,
},
[0xFCBF]={
@@ -124980,7 +128686,7 @@ characters.data={
description="ARABIC LIGATURE FEH WITH HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0641, 0x062D },
+ specials={ "initial", 0x641, 0x62D },
unicodeslot=0xFCBF,
},
[0xFCC0]={
@@ -124988,7 +128694,7 @@ characters.data={
description="ARABIC LIGATURE FEH WITH KHAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0641, 0x062E },
+ specials={ "initial", 0x641, 0x62E },
unicodeslot=0xFCC0,
},
[0xFCC1]={
@@ -124996,7 +128702,7 @@ characters.data={
description="ARABIC LIGATURE FEH WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0641, 0x0645 },
+ specials={ "initial", 0x641, 0x645 },
unicodeslot=0xFCC1,
},
[0xFCC2]={
@@ -125004,7 +128710,7 @@ characters.data={
description="ARABIC LIGATURE QAF WITH HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0642, 0x062D },
+ specials={ "initial", 0x642, 0x62D },
unicodeslot=0xFCC2,
},
[0xFCC3]={
@@ -125012,7 +128718,7 @@ characters.data={
description="ARABIC LIGATURE QAF WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0642, 0x0645 },
+ specials={ "initial", 0x642, 0x645 },
unicodeslot=0xFCC3,
},
[0xFCC4]={
@@ -125020,7 +128726,7 @@ characters.data={
description="ARABIC LIGATURE KAF WITH JEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0643, 0x062C },
+ specials={ "initial", 0x643, 0x62C },
unicodeslot=0xFCC4,
},
[0xFCC5]={
@@ -125028,7 +128734,7 @@ characters.data={
description="ARABIC LIGATURE KAF WITH HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0643, 0x062D },
+ specials={ "initial", 0x643, 0x62D },
unicodeslot=0xFCC5,
},
[0xFCC6]={
@@ -125036,7 +128742,7 @@ characters.data={
description="ARABIC LIGATURE KAF WITH KHAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0643, 0x062E },
+ specials={ "initial", 0x643, 0x62E },
unicodeslot=0xFCC6,
},
[0xFCC7]={
@@ -125044,7 +128750,7 @@ characters.data={
description="ARABIC LIGATURE KAF WITH LAM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0643, 0x0644 },
+ specials={ "initial", 0x643, 0x644 },
unicodeslot=0xFCC7,
},
[0xFCC8]={
@@ -125052,7 +128758,7 @@ characters.data={
description="ARABIC LIGATURE KAF WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0643, 0x0645 },
+ specials={ "initial", 0x643, 0x645 },
unicodeslot=0xFCC8,
},
[0xFCC9]={
@@ -125061,7 +128767,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH JEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0644, 0x062C },
+ specials={ "initial", 0x644, 0x62C },
unicodeslot=0xFCC9,
},
[0xFCCA]={
@@ -125070,7 +128776,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0644, 0x062D },
+ specials={ "initial", 0x644, 0x62D },
unicodeslot=0xFCCA,
},
[0xFCCB]={
@@ -125079,7 +128785,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH KHAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0644, 0x062E },
+ specials={ "initial", 0x644, 0x62E },
unicodeslot=0xFCCB,
},
[0xFCCC]={
@@ -125088,7 +128794,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0644, 0x0645 },
+ specials={ "initial", 0x644, 0x645 },
unicodeslot=0xFCCC,
},
[0xFCCD]={
@@ -125096,7 +128802,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH HEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0644, 0x0647 },
+ specials={ "initial", 0x644, 0x647 },
unicodeslot=0xFCCD,
},
[0xFCCE]={
@@ -125104,7 +128810,7 @@ characters.data={
description="ARABIC LIGATURE MEEM WITH JEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0645, 0x062C },
+ specials={ "initial", 0x645, 0x62C },
unicodeslot=0xFCCE,
},
[0xFCCF]={
@@ -125112,7 +128818,7 @@ characters.data={
description="ARABIC LIGATURE MEEM WITH HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0645, 0x062D },
+ specials={ "initial", 0x645, 0x62D },
unicodeslot=0xFCCF,
},
[0xFCD0]={
@@ -125120,7 +128826,7 @@ characters.data={
description="ARABIC LIGATURE MEEM WITH KHAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0645, 0x062E },
+ specials={ "initial", 0x645, 0x62E },
unicodeslot=0xFCD0,
},
[0xFCD1]={
@@ -125129,7 +128835,7 @@ characters.data={
description="ARABIC LIGATURE MEEM WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0645, 0x0645 },
+ specials={ "initial", 0x645, 0x645 },
unicodeslot=0xFCD1,
},
[0xFCD2]={
@@ -125138,7 +128844,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH JEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0646, 0x062C },
+ specials={ "initial", 0x646, 0x62C },
unicodeslot=0xFCD2,
},
[0xFCD3]={
@@ -125146,7 +128852,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0646, 0x062D },
+ specials={ "initial", 0x646, 0x62D },
unicodeslot=0xFCD3,
},
[0xFCD4]={
@@ -125154,7 +128860,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH KHAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0646, 0x062E },
+ specials={ "initial", 0x646, 0x62E },
unicodeslot=0xFCD4,
},
[0xFCD5]={
@@ -125163,7 +128869,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0646, 0x0645 },
+ specials={ "initial", 0x646, 0x645 },
unicodeslot=0xFCD5,
},
[0xFCD6]={
@@ -125171,7 +128877,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH HEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0646, 0x0647 },
+ specials={ "initial", 0x646, 0x647 },
unicodeslot=0xFCD6,
},
[0xFCD7]={
@@ -125179,7 +128885,7 @@ characters.data={
description="ARABIC LIGATURE HEH WITH JEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0647, 0x062C },
+ specials={ "initial", 0x647, 0x62C },
unicodeslot=0xFCD7,
},
[0xFCD8]={
@@ -125187,7 +128893,7 @@ characters.data={
description="ARABIC LIGATURE HEH WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0647, 0x0645 },
+ specials={ "initial", 0x647, 0x645 },
unicodeslot=0xFCD8,
},
[0xFCD9]={
@@ -125195,7 +128901,7 @@ characters.data={
description="ARABIC LIGATURE HEH WITH SUPERSCRIPT ALEF INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0647, 0x0670 },
+ specials={ "initial", 0x647, 0x670 },
unicodeslot=0xFCD9,
},
[0xFCDA]={
@@ -125203,7 +128909,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH JEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x064A, 0x062C },
+ specials={ "initial", 0x64A, 0x62C },
unicodeslot=0xFCDA,
},
[0xFCDB]={
@@ -125211,7 +128917,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x064A, 0x062D },
+ specials={ "initial", 0x64A, 0x62D },
unicodeslot=0xFCDB,
},
[0xFCDC]={
@@ -125219,7 +128925,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH KHAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x064A, 0x062E },
+ specials={ "initial", 0x64A, 0x62E },
unicodeslot=0xFCDC,
},
[0xFCDD]={
@@ -125228,7 +128934,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x064A, 0x0645 },
+ specials={ "initial", 0x64A, 0x645 },
unicodeslot=0xFCDD,
},
[0xFCDE]={
@@ -125236,7 +128942,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x064A, 0x0647 },
+ specials={ "initial", 0x64A, 0x647 },
unicodeslot=0xFCDE,
},
[0xFCDF]={
@@ -125244,7 +128950,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH MEEM MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0626, 0x0645 },
+ specials={ "medial", 0x626, 0x645 },
unicodeslot=0xFCDF,
},
[0xFCE0]={
@@ -125252,7 +128958,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH HEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0626, 0x0647 },
+ specials={ "medial", 0x626, 0x647 },
unicodeslot=0xFCE0,
},
[0xFCE1]={
@@ -125260,7 +128966,7 @@ characters.data={
description="ARABIC LIGATURE BEH WITH MEEM MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0628, 0x0645 },
+ specials={ "medial", 0x628, 0x645 },
unicodeslot=0xFCE1,
},
[0xFCE2]={
@@ -125268,7 +128974,7 @@ characters.data={
description="ARABIC LIGATURE BEH WITH HEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0628, 0x0647 },
+ specials={ "medial", 0x628, 0x647 },
unicodeslot=0xFCE2,
},
[0xFCE3]={
@@ -125276,7 +128982,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH MEEM MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x062A, 0x0645 },
+ specials={ "medial", 0x62A, 0x645 },
unicodeslot=0xFCE3,
},
[0xFCE4]={
@@ -125284,7 +128990,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH HEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x062A, 0x0647 },
+ specials={ "medial", 0x62A, 0x647 },
unicodeslot=0xFCE4,
},
[0xFCE5]={
@@ -125292,7 +128998,7 @@ characters.data={
description="ARABIC LIGATURE THEH WITH MEEM MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x062B, 0x0645 },
+ specials={ "medial", 0x62B, 0x645 },
unicodeslot=0xFCE5,
},
[0xFCE6]={
@@ -125300,7 +129006,7 @@ characters.data={
description="ARABIC LIGATURE THEH WITH HEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x062B, 0x0647 },
+ specials={ "medial", 0x62B, 0x647 },
unicodeslot=0xFCE6,
},
[0xFCE7]={
@@ -125308,7 +129014,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH MEEM MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0633, 0x0645 },
+ specials={ "medial", 0x633, 0x645 },
unicodeslot=0xFCE7,
},
[0xFCE8]={
@@ -125316,7 +129022,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH HEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0633, 0x0647 },
+ specials={ "medial", 0x633, 0x647 },
unicodeslot=0xFCE8,
},
[0xFCE9]={
@@ -125324,7 +129030,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH MEEM MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0634, 0x0645 },
+ specials={ "medial", 0x634, 0x645 },
unicodeslot=0xFCE9,
},
[0xFCEA]={
@@ -125332,7 +129038,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH HEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0634, 0x0647 },
+ specials={ "medial", 0x634, 0x647 },
unicodeslot=0xFCEA,
},
[0xFCEB]={
@@ -125340,7 +129046,7 @@ characters.data={
description="ARABIC LIGATURE KAF WITH LAM MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0643, 0x0644 },
+ specials={ "medial", 0x643, 0x644 },
unicodeslot=0xFCEB,
},
[0xFCEC]={
@@ -125348,7 +129054,7 @@ characters.data={
description="ARABIC LIGATURE KAF WITH MEEM MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0643, 0x0645 },
+ specials={ "medial", 0x643, 0x645 },
unicodeslot=0xFCEC,
},
[0xFCED]={
@@ -125356,7 +129062,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH MEEM MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0644, 0x0645 },
+ specials={ "medial", 0x644, 0x645 },
unicodeslot=0xFCED,
},
[0xFCEE]={
@@ -125364,7 +129070,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH MEEM MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0646, 0x0645 },
+ specials={ "medial", 0x646, 0x645 },
unicodeslot=0xFCEE,
},
[0xFCEF]={
@@ -125372,7 +129078,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH HEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0646, 0x0647 },
+ specials={ "medial", 0x646, 0x647 },
unicodeslot=0xFCEF,
},
[0xFCF0]={
@@ -125380,7 +129086,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH MEEM MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x064A, 0x0645 },
+ specials={ "medial", 0x64A, 0x645 },
unicodeslot=0xFCF0,
},
[0xFCF1]={
@@ -125388,7 +129094,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x064A, 0x0647 },
+ specials={ "medial", 0x64A, 0x647 },
unicodeslot=0xFCF1,
},
[0xFCF2]={
@@ -125396,7 +129102,7 @@ characters.data={
description="ARABIC LIGATURE SHADDA WITH FATHA MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0640, 0x064E, 0x0651 },
+ specials={ "medial", 0x640, 0x64E, 0x651 },
unicodeslot=0xFCF2,
},
[0xFCF3]={
@@ -125404,7 +129110,7 @@ characters.data={
description="ARABIC LIGATURE SHADDA WITH DAMMA MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0640, 0x064F, 0x0651 },
+ specials={ "medial", 0x640, 0x64F, 0x651 },
unicodeslot=0xFCF3,
},
[0xFCF4]={
@@ -125412,7 +129118,7 @@ characters.data={
description="ARABIC LIGATURE SHADDA WITH KASRA MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0640, 0x0650, 0x0651 },
+ specials={ "medial", 0x640, 0x650, 0x651 },
unicodeslot=0xFCF4,
},
[0xFCF5]={
@@ -125420,7 +129126,7 @@ characters.data={
description="ARABIC LIGATURE TAH WITH ALEF MAKSURA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0637, 0x0649 },
+ specials={ "isolated", 0x637, 0x649 },
unicodeslot=0xFCF5,
},
[0xFCF6]={
@@ -125428,7 +129134,7 @@ characters.data={
description="ARABIC LIGATURE TAH WITH YEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0637, 0x064A },
+ specials={ "isolated", 0x637, 0x64A },
unicodeslot=0xFCF6,
},
[0xFCF7]={
@@ -125436,7 +129142,7 @@ characters.data={
description="ARABIC LIGATURE AIN WITH ALEF MAKSURA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0639, 0x0649 },
+ specials={ "isolated", 0x639, 0x649 },
unicodeslot=0xFCF7,
},
[0xFCF8]={
@@ -125444,7 +129150,7 @@ characters.data={
description="ARABIC LIGATURE AIN WITH YEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0639, 0x064A },
+ specials={ "isolated", 0x639, 0x64A },
unicodeslot=0xFCF8,
},
[0xFCF9]={
@@ -125452,7 +129158,7 @@ characters.data={
description="ARABIC LIGATURE GHAIN WITH ALEF MAKSURA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x063A, 0x0649 },
+ specials={ "isolated", 0x63A, 0x649 },
unicodeslot=0xFCF9,
},
[0xFCFA]={
@@ -125460,7 +129166,7 @@ characters.data={
description="ARABIC LIGATURE GHAIN WITH YEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x063A, 0x064A },
+ specials={ "isolated", 0x63A, 0x64A },
unicodeslot=0xFCFA,
},
[0xFCFB]={
@@ -125468,7 +129174,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH ALEF MAKSURA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0633, 0x0649 },
+ specials={ "isolated", 0x633, 0x649 },
unicodeslot=0xFCFB,
},
[0xFCFC]={
@@ -125476,7 +129182,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH YEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0633, 0x064A },
+ specials={ "isolated", 0x633, 0x64A },
unicodeslot=0xFCFC,
},
[0xFCFD]={
@@ -125484,7 +129190,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH ALEF MAKSURA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0634, 0x0649 },
+ specials={ "isolated", 0x634, 0x649 },
unicodeslot=0xFCFD,
},
[0xFCFE]={
@@ -125492,7 +129198,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH YEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0634, 0x064A },
+ specials={ "isolated", 0x634, 0x64A },
unicodeslot=0xFCFE,
},
[0xFCFF]={
@@ -125500,7 +129206,7 @@ characters.data={
description="ARABIC LIGATURE HAH WITH ALEF MAKSURA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062D, 0x0649 },
+ specials={ "isolated", 0x62D, 0x649 },
unicodeslot=0xFCFF,
},
[0xFD00]={
@@ -125508,7 +129214,7 @@ characters.data={
description="ARABIC LIGATURE HAH WITH YEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062D, 0x064A },
+ specials={ "isolated", 0x62D, 0x64A },
unicodeslot=0xFD00,
},
[0xFD01]={
@@ -125516,7 +129222,7 @@ characters.data={
description="ARABIC LIGATURE JEEM WITH ALEF MAKSURA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062C, 0x0649 },
+ specials={ "isolated", 0x62C, 0x649 },
unicodeslot=0xFD01,
},
[0xFD02]={
@@ -125524,7 +129230,7 @@ characters.data={
description="ARABIC LIGATURE JEEM WITH YEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062C, 0x064A },
+ specials={ "isolated", 0x62C, 0x64A },
unicodeslot=0xFD02,
},
[0xFD03]={
@@ -125532,7 +129238,7 @@ characters.data={
description="ARABIC LIGATURE KHAH WITH ALEF MAKSURA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062E, 0x0649 },
+ specials={ "isolated", 0x62E, 0x649 },
unicodeslot=0xFD03,
},
[0xFD04]={
@@ -125540,7 +129246,7 @@ characters.data={
description="ARABIC LIGATURE KHAH WITH YEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062E, 0x064A },
+ specials={ "isolated", 0x62E, 0x64A },
unicodeslot=0xFD04,
},
[0xFD05]={
@@ -125548,7 +129254,7 @@ characters.data={
description="ARABIC LIGATURE SAD WITH ALEF MAKSURA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0635, 0x0649 },
+ specials={ "isolated", 0x635, 0x649 },
unicodeslot=0xFD05,
},
[0xFD06]={
@@ -125556,7 +129262,7 @@ characters.data={
description="ARABIC LIGATURE SAD WITH YEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0635, 0x064A },
+ specials={ "isolated", 0x635, 0x64A },
unicodeslot=0xFD06,
},
[0xFD07]={
@@ -125564,7 +129270,7 @@ characters.data={
description="ARABIC LIGATURE DAD WITH ALEF MAKSURA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0636, 0x0649 },
+ specials={ "isolated", 0x636, 0x649 },
unicodeslot=0xFD07,
},
[0xFD08]={
@@ -125572,7 +129278,7 @@ characters.data={
description="ARABIC LIGATURE DAD WITH YEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0636, 0x064A },
+ specials={ "isolated", 0x636, 0x64A },
unicodeslot=0xFD08,
},
[0xFD09]={
@@ -125580,7 +129286,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH JEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0634, 0x062C },
+ specials={ "isolated", 0x634, 0x62C },
unicodeslot=0xFD09,
},
[0xFD0A]={
@@ -125588,7 +129294,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH HAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0634, 0x062D },
+ specials={ "isolated", 0x634, 0x62D },
unicodeslot=0xFD0A,
},
[0xFD0B]={
@@ -125596,7 +129302,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH KHAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0634, 0x062E },
+ specials={ "isolated", 0x634, 0x62E },
unicodeslot=0xFD0B,
},
[0xFD0C]={
@@ -125604,7 +129310,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH MEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0634, 0x0645 },
+ specials={ "isolated", 0x634, 0x645 },
unicodeslot=0xFD0C,
},
[0xFD0D]={
@@ -125612,7 +129318,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH REH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0634, 0x0631 },
+ specials={ "isolated", 0x634, 0x631 },
unicodeslot=0xFD0D,
},
[0xFD0E]={
@@ -125620,7 +129326,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH REH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0633, 0x0631 },
+ specials={ "isolated", 0x633, 0x631 },
unicodeslot=0xFD0E,
},
[0xFD0F]={
@@ -125628,7 +129334,7 @@ characters.data={
description="ARABIC LIGATURE SAD WITH REH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0635, 0x0631 },
+ specials={ "isolated", 0x635, 0x631 },
unicodeslot=0xFD0F,
},
[0xFD10]={
@@ -125636,7 +129342,7 @@ characters.data={
description="ARABIC LIGATURE DAD WITH REH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0636, 0x0631 },
+ specials={ "isolated", 0x636, 0x631 },
unicodeslot=0xFD10,
},
[0xFD11]={
@@ -125644,7 +129350,7 @@ characters.data={
description="ARABIC LIGATURE TAH WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0637, 0x0649 },
+ specials={ "final", 0x637, 0x649 },
unicodeslot=0xFD11,
},
[0xFD12]={
@@ -125652,7 +129358,7 @@ characters.data={
description="ARABIC LIGATURE TAH WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0637, 0x064A },
+ specials={ "final", 0x637, 0x64A },
unicodeslot=0xFD12,
},
[0xFD13]={
@@ -125660,7 +129366,7 @@ characters.data={
description="ARABIC LIGATURE AIN WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0639, 0x0649 },
+ specials={ "final", 0x639, 0x649 },
unicodeslot=0xFD13,
},
[0xFD14]={
@@ -125668,7 +129374,7 @@ characters.data={
description="ARABIC LIGATURE AIN WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0639, 0x064A },
+ specials={ "final", 0x639, 0x64A },
unicodeslot=0xFD14,
},
[0xFD15]={
@@ -125676,7 +129382,7 @@ characters.data={
description="ARABIC LIGATURE GHAIN WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x063A, 0x0649 },
+ specials={ "final", 0x63A, 0x649 },
unicodeslot=0xFD15,
},
[0xFD16]={
@@ -125684,7 +129390,7 @@ characters.data={
description="ARABIC LIGATURE GHAIN WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x063A, 0x064A },
+ specials={ "final", 0x63A, 0x64A },
unicodeslot=0xFD16,
},
[0xFD17]={
@@ -125692,7 +129398,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0633, 0x0649 },
+ specials={ "final", 0x633, 0x649 },
unicodeslot=0xFD17,
},
[0xFD18]={
@@ -125700,7 +129406,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0633, 0x064A },
+ specials={ "final", 0x633, 0x64A },
unicodeslot=0xFD18,
},
[0xFD19]={
@@ -125708,7 +129414,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0634, 0x0649 },
+ specials={ "final", 0x634, 0x649 },
unicodeslot=0xFD19,
},
[0xFD1A]={
@@ -125716,7 +129422,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0634, 0x064A },
+ specials={ "final", 0x634, 0x64A },
unicodeslot=0xFD1A,
},
[0xFD1B]={
@@ -125724,7 +129430,7 @@ characters.data={
description="ARABIC LIGATURE HAH WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062D, 0x0649 },
+ specials={ "final", 0x62D, 0x649 },
unicodeslot=0xFD1B,
},
[0xFD1C]={
@@ -125732,7 +129438,7 @@ characters.data={
description="ARABIC LIGATURE HAH WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062D, 0x064A },
+ specials={ "final", 0x62D, 0x64A },
unicodeslot=0xFD1C,
},
[0xFD1D]={
@@ -125740,7 +129446,7 @@ characters.data={
description="ARABIC LIGATURE JEEM WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062C, 0x0649 },
+ specials={ "final", 0x62C, 0x649 },
unicodeslot=0xFD1D,
},
[0xFD1E]={
@@ -125748,7 +129454,7 @@ characters.data={
description="ARABIC LIGATURE JEEM WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062C, 0x064A },
+ specials={ "final", 0x62C, 0x64A },
unicodeslot=0xFD1E,
},
[0xFD1F]={
@@ -125756,7 +129462,7 @@ characters.data={
description="ARABIC LIGATURE KHAH WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062E, 0x0649 },
+ specials={ "final", 0x62E, 0x649 },
unicodeslot=0xFD1F,
},
[0xFD20]={
@@ -125764,7 +129470,7 @@ characters.data={
description="ARABIC LIGATURE KHAH WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062E, 0x064A },
+ specials={ "final", 0x62E, 0x64A },
unicodeslot=0xFD20,
},
[0xFD21]={
@@ -125772,7 +129478,7 @@ characters.data={
description="ARABIC LIGATURE SAD WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0635, 0x0649 },
+ specials={ "final", 0x635, 0x649 },
unicodeslot=0xFD21,
},
[0xFD22]={
@@ -125780,7 +129486,7 @@ characters.data={
description="ARABIC LIGATURE SAD WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0635, 0x064A },
+ specials={ "final", 0x635, 0x64A },
unicodeslot=0xFD22,
},
[0xFD23]={
@@ -125788,7 +129494,7 @@ characters.data={
description="ARABIC LIGATURE DAD WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0636, 0x0649 },
+ specials={ "final", 0x636, 0x649 },
unicodeslot=0xFD23,
},
[0xFD24]={
@@ -125796,7 +129502,7 @@ characters.data={
description="ARABIC LIGATURE DAD WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0636, 0x064A },
+ specials={ "final", 0x636, 0x64A },
unicodeslot=0xFD24,
},
[0xFD25]={
@@ -125804,7 +129510,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH JEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0634, 0x062C },
+ specials={ "final", 0x634, 0x62C },
unicodeslot=0xFD25,
},
[0xFD26]={
@@ -125812,7 +129518,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH HAH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0634, 0x062D },
+ specials={ "final", 0x634, 0x62D },
unicodeslot=0xFD26,
},
[0xFD27]={
@@ -125820,7 +129526,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH KHAH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0634, 0x062E },
+ specials={ "final", 0x634, 0x62E },
unicodeslot=0xFD27,
},
[0xFD28]={
@@ -125828,7 +129534,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0634, 0x0645 },
+ specials={ "final", 0x634, 0x645 },
unicodeslot=0xFD28,
},
[0xFD29]={
@@ -125836,7 +129542,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH REH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0634, 0x0631 },
+ specials={ "final", 0x634, 0x631 },
unicodeslot=0xFD29,
},
[0xFD2A]={
@@ -125844,7 +129550,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH REH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0633, 0x0631 },
+ specials={ "final", 0x633, 0x631 },
unicodeslot=0xFD2A,
},
[0xFD2B]={
@@ -125852,7 +129558,7 @@ characters.data={
description="ARABIC LIGATURE SAD WITH REH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0635, 0x0631 },
+ specials={ "final", 0x635, 0x631 },
unicodeslot=0xFD2B,
},
[0xFD2C]={
@@ -125860,7 +129566,7 @@ characters.data={
description="ARABIC LIGATURE DAD WITH REH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0636, 0x0631 },
+ specials={ "final", 0x636, 0x631 },
unicodeslot=0xFD2C,
},
[0xFD2D]={
@@ -125868,7 +129574,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH JEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0634, 0x062C },
+ specials={ "initial", 0x634, 0x62C },
unicodeslot=0xFD2D,
},
[0xFD2E]={
@@ -125876,7 +129582,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0634, 0x062D },
+ specials={ "initial", 0x634, 0x62D },
unicodeslot=0xFD2E,
},
[0xFD2F]={
@@ -125884,7 +129590,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH KHAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0634, 0x062E },
+ specials={ "initial", 0x634, 0x62E },
unicodeslot=0xFD2F,
},
[0xFD30]={
@@ -125892,7 +129598,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0634, 0x0645 },
+ specials={ "initial", 0x634, 0x645 },
unicodeslot=0xFD30,
},
[0xFD31]={
@@ -125900,7 +129606,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH HEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0633, 0x0647 },
+ specials={ "initial", 0x633, 0x647 },
unicodeslot=0xFD31,
},
[0xFD32]={
@@ -125908,7 +129614,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH HEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0634, 0x0647 },
+ specials={ "initial", 0x634, 0x647 },
unicodeslot=0xFD32,
},
[0xFD33]={
@@ -125916,7 +129622,7 @@ characters.data={
description="ARABIC LIGATURE TAH WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0637, 0x0645 },
+ specials={ "initial", 0x637, 0x645 },
unicodeslot=0xFD33,
},
[0xFD34]={
@@ -125924,7 +129630,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH JEEM MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0633, 0x062C },
+ specials={ "medial", 0x633, 0x62C },
unicodeslot=0xFD34,
},
[0xFD35]={
@@ -125932,7 +129638,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH HAH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0633, 0x062D },
+ specials={ "medial", 0x633, 0x62D },
unicodeslot=0xFD35,
},
[0xFD36]={
@@ -125940,7 +129646,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH KHAH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0633, 0x062E },
+ specials={ "medial", 0x633, 0x62E },
unicodeslot=0xFD36,
},
[0xFD37]={
@@ -125948,7 +129654,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH JEEM MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0634, 0x062C },
+ specials={ "medial", 0x634, 0x62C },
unicodeslot=0xFD37,
},
[0xFD38]={
@@ -125956,7 +129662,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH HAH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0634, 0x062D },
+ specials={ "medial", 0x634, 0x62D },
unicodeslot=0xFD38,
},
[0xFD39]={
@@ -125964,7 +129670,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH KHAH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0634, 0x062E },
+ specials={ "medial", 0x634, 0x62E },
unicodeslot=0xFD39,
},
[0xFD3A]={
@@ -125972,7 +129678,7 @@ characters.data={
description="ARABIC LIGATURE TAH WITH MEEM MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0637, 0x0645 },
+ specials={ "medial", 0x637, 0x645 },
unicodeslot=0xFD3A,
},
[0xFD3B]={
@@ -125980,7 +129686,7 @@ characters.data={
description="ARABIC LIGATURE ZAH WITH MEEM MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0638, 0x0645 },
+ specials={ "medial", 0x638, 0x645 },
unicodeslot=0xFD3B,
},
[0xFD3C]={
@@ -125988,7 +129694,7 @@ characters.data={
description="ARABIC LIGATURE ALEF WITH FATHATAN FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0627, 0x064B },
+ specials={ "final", 0x627, 0x64B },
unicodeslot=0xFD3C,
},
[0xFD3D]={
@@ -125996,7 +129702,7 @@ characters.data={
description="ARABIC LIGATURE ALEF WITH FATHATAN ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0627, 0x064B },
+ specials={ "isolated", 0x627, 0x64B },
unicodeslot=0xFD3D,
},
[0xFD3E]={
@@ -126004,7 +129710,7 @@ characters.data={
category="ps",
description="ORNATE LEFT PARENTHESIS",
direction="on",
- linebreak="op",
+ linebreak="cl",
unicodeslot=0xFD3E,
},
[0xFD3F]={
@@ -126012,7 +129718,7 @@ characters.data={
category="pe",
description="ORNATE RIGHT PARENTHESIS",
direction="on",
- linebreak="cl",
+ linebreak="op",
unicodeslot=0xFD3F,
},
[0xFD50]={
@@ -126020,7 +129726,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH JEEM WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x062A, 0x062C, 0x0645 },
+ specials={ "initial", 0x62A, 0x62C, 0x645 },
unicodeslot=0xFD50,
},
[0xFD51]={
@@ -126028,7 +129734,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH HAH WITH JEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062A, 0x062D, 0x062C },
+ specials={ "final", 0x62A, 0x62D, 0x62C },
unicodeslot=0xFD51,
},
[0xFD52]={
@@ -126036,7 +129742,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH HAH WITH JEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x062A, 0x062D, 0x062C },
+ specials={ "initial", 0x62A, 0x62D, 0x62C },
unicodeslot=0xFD52,
},
[0xFD53]={
@@ -126044,7 +129750,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH HAH WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x062A, 0x062D, 0x0645 },
+ specials={ "initial", 0x62A, 0x62D, 0x645 },
unicodeslot=0xFD53,
},
[0xFD54]={
@@ -126052,7 +129758,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH KHAH WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x062A, 0x062E, 0x0645 },
+ specials={ "initial", 0x62A, 0x62E, 0x645 },
unicodeslot=0xFD54,
},
[0xFD55]={
@@ -126060,7 +129766,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH MEEM WITH JEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x062A, 0x0645, 0x062C },
+ specials={ "initial", 0x62A, 0x645, 0x62C },
unicodeslot=0xFD55,
},
[0xFD56]={
@@ -126068,7 +129774,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH MEEM WITH HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x062A, 0x0645, 0x062D },
+ specials={ "initial", 0x62A, 0x645, 0x62D },
unicodeslot=0xFD56,
},
[0xFD57]={
@@ -126076,7 +129782,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH MEEM WITH KHAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x062A, 0x0645, 0x062E },
+ specials={ "initial", 0x62A, 0x645, 0x62E },
unicodeslot=0xFD57,
},
[0xFD58]={
@@ -126084,7 +129790,7 @@ characters.data={
description="ARABIC LIGATURE JEEM WITH MEEM WITH HAH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062C, 0x0645, 0x062D },
+ specials={ "final", 0x62C, 0x645, 0x62D },
unicodeslot=0xFD58,
},
[0xFD59]={
@@ -126092,7 +129798,7 @@ characters.data={
description="ARABIC LIGATURE JEEM WITH MEEM WITH HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x062C, 0x0645, 0x062D },
+ specials={ "initial", 0x62C, 0x645, 0x62D },
unicodeslot=0xFD59,
},
[0xFD5A]={
@@ -126100,7 +129806,7 @@ characters.data={
description="ARABIC LIGATURE HAH WITH MEEM WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062D, 0x0645, 0x064A },
+ specials={ "final", 0x62D, 0x645, 0x64A },
unicodeslot=0xFD5A,
},
[0xFD5B]={
@@ -126108,7 +129814,7 @@ characters.data={
description="ARABIC LIGATURE HAH WITH MEEM WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062D, 0x0645, 0x0649 },
+ specials={ "final", 0x62D, 0x645, 0x649 },
unicodeslot=0xFD5B,
},
[0xFD5C]={
@@ -126116,7 +129822,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH HAH WITH JEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0633, 0x062D, 0x062C },
+ specials={ "initial", 0x633, 0x62D, 0x62C },
unicodeslot=0xFD5C,
},
[0xFD5D]={
@@ -126124,7 +129830,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH JEEM WITH HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0633, 0x062C, 0x062D },
+ specials={ "initial", 0x633, 0x62C, 0x62D },
unicodeslot=0xFD5D,
},
[0xFD5E]={
@@ -126132,7 +129838,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH JEEM WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0633, 0x062C, 0x0649 },
+ specials={ "final", 0x633, 0x62C, 0x649 },
unicodeslot=0xFD5E,
},
[0xFD5F]={
@@ -126140,7 +129846,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH MEEM WITH HAH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0633, 0x0645, 0x062D },
+ specials={ "final", 0x633, 0x645, 0x62D },
unicodeslot=0xFD5F,
},
[0xFD60]={
@@ -126148,7 +129854,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH MEEM WITH HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0633, 0x0645, 0x062D },
+ specials={ "initial", 0x633, 0x645, 0x62D },
unicodeslot=0xFD60,
},
[0xFD61]={
@@ -126156,7 +129862,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH MEEM WITH JEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0633, 0x0645, 0x062C },
+ specials={ "initial", 0x633, 0x645, 0x62C },
unicodeslot=0xFD61,
},
[0xFD62]={
@@ -126164,7 +129870,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH MEEM WITH MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0633, 0x0645, 0x0645 },
+ specials={ "final", 0x633, 0x645, 0x645 },
unicodeslot=0xFD62,
},
[0xFD63]={
@@ -126172,7 +129878,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH MEEM WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0633, 0x0645, 0x0645 },
+ specials={ "initial", 0x633, 0x645, 0x645 },
unicodeslot=0xFD63,
},
[0xFD64]={
@@ -126180,7 +129886,7 @@ characters.data={
description="ARABIC LIGATURE SAD WITH HAH WITH HAH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0635, 0x062D, 0x062D },
+ specials={ "final", 0x635, 0x62D, 0x62D },
unicodeslot=0xFD64,
},
[0xFD65]={
@@ -126188,7 +129894,7 @@ characters.data={
description="ARABIC LIGATURE SAD WITH HAH WITH HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0635, 0x062D, 0x062D },
+ specials={ "initial", 0x635, 0x62D, 0x62D },
unicodeslot=0xFD65,
},
[0xFD66]={
@@ -126196,7 +129902,7 @@ characters.data={
description="ARABIC LIGATURE SAD WITH MEEM WITH MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0635, 0x0645, 0x0645 },
+ specials={ "final", 0x635, 0x645, 0x645 },
unicodeslot=0xFD66,
},
[0xFD67]={
@@ -126204,7 +129910,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH HAH WITH MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0634, 0x062D, 0x0645 },
+ specials={ "final", 0x634, 0x62D, 0x645 },
unicodeslot=0xFD67,
},
[0xFD68]={
@@ -126212,7 +129918,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH HAH WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0634, 0x062D, 0x0645 },
+ specials={ "initial", 0x634, 0x62D, 0x645 },
unicodeslot=0xFD68,
},
[0xFD69]={
@@ -126220,7 +129926,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH JEEM WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0634, 0x062C, 0x064A },
+ specials={ "final", 0x634, 0x62C, 0x64A },
unicodeslot=0xFD69,
},
[0xFD6A]={
@@ -126228,7 +129934,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH MEEM WITH KHAH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0634, 0x0645, 0x062E },
+ specials={ "final", 0x634, 0x645, 0x62E },
unicodeslot=0xFD6A,
},
[0xFD6B]={
@@ -126236,7 +129942,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH MEEM WITH KHAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0634, 0x0645, 0x062E },
+ specials={ "initial", 0x634, 0x645, 0x62E },
unicodeslot=0xFD6B,
},
[0xFD6C]={
@@ -126244,7 +129950,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH MEEM WITH MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0634, 0x0645, 0x0645 },
+ specials={ "final", 0x634, 0x645, 0x645 },
unicodeslot=0xFD6C,
},
[0xFD6D]={
@@ -126252,7 +129958,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH MEEM WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0634, 0x0645, 0x0645 },
+ specials={ "initial", 0x634, 0x645, 0x645 },
unicodeslot=0xFD6D,
},
[0xFD6E]={
@@ -126260,7 +129966,7 @@ characters.data={
description="ARABIC LIGATURE DAD WITH HAH WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0636, 0x062D, 0x0649 },
+ specials={ "final", 0x636, 0x62D, 0x649 },
unicodeslot=0xFD6E,
},
[0xFD6F]={
@@ -126268,7 +129974,7 @@ characters.data={
description="ARABIC LIGATURE DAD WITH KHAH WITH MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0636, 0x062E, 0x0645 },
+ specials={ "final", 0x636, 0x62E, 0x645 },
unicodeslot=0xFD6F,
},
[0xFD70]={
@@ -126276,7 +129982,7 @@ characters.data={
description="ARABIC LIGATURE DAD WITH KHAH WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0636, 0x062E, 0x0645 },
+ specials={ "initial", 0x636, 0x62E, 0x645 },
unicodeslot=0xFD70,
},
[0xFD71]={
@@ -126284,7 +129990,7 @@ characters.data={
description="ARABIC LIGATURE TAH WITH MEEM WITH HAH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0637, 0x0645, 0x062D },
+ specials={ "final", 0x637, 0x645, 0x62D },
unicodeslot=0xFD71,
},
[0xFD72]={
@@ -126292,7 +129998,7 @@ characters.data={
description="ARABIC LIGATURE TAH WITH MEEM WITH HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0637, 0x0645, 0x062D },
+ specials={ "initial", 0x637, 0x645, 0x62D },
unicodeslot=0xFD72,
},
[0xFD73]={
@@ -126300,7 +130006,7 @@ characters.data={
description="ARABIC LIGATURE TAH WITH MEEM WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0637, 0x0645, 0x0645 },
+ specials={ "initial", 0x637, 0x645, 0x645 },
unicodeslot=0xFD73,
},
[0xFD74]={
@@ -126308,7 +130014,7 @@ characters.data={
description="ARABIC LIGATURE TAH WITH MEEM WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0637, 0x0645, 0x064A },
+ specials={ "final", 0x637, 0x645, 0x64A },
unicodeslot=0xFD74,
},
[0xFD75]={
@@ -126316,7 +130022,7 @@ characters.data={
description="ARABIC LIGATURE AIN WITH JEEM WITH MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0639, 0x062C, 0x0645 },
+ specials={ "final", 0x639, 0x62C, 0x645 },
unicodeslot=0xFD75,
},
[0xFD76]={
@@ -126324,7 +130030,7 @@ characters.data={
description="ARABIC LIGATURE AIN WITH MEEM WITH MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0639, 0x0645, 0x0645 },
+ specials={ "final", 0x639, 0x645, 0x645 },
unicodeslot=0xFD76,
},
[0xFD77]={
@@ -126332,7 +130038,7 @@ characters.data={
description="ARABIC LIGATURE AIN WITH MEEM WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0639, 0x0645, 0x0645 },
+ specials={ "initial", 0x639, 0x645, 0x645 },
unicodeslot=0xFD77,
},
[0xFD78]={
@@ -126340,7 +130046,7 @@ characters.data={
description="ARABIC LIGATURE AIN WITH MEEM WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0639, 0x0645, 0x0649 },
+ specials={ "final", 0x639, 0x645, 0x649 },
unicodeslot=0xFD78,
},
[0xFD79]={
@@ -126348,7 +130054,7 @@ characters.data={
description="ARABIC LIGATURE GHAIN WITH MEEM WITH MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x063A, 0x0645, 0x0645 },
+ specials={ "final", 0x63A, 0x645, 0x645 },
unicodeslot=0xFD79,
},
[0xFD7A]={
@@ -126356,7 +130062,7 @@ characters.data={
description="ARABIC LIGATURE GHAIN WITH MEEM WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x063A, 0x0645, 0x064A },
+ specials={ "final", 0x63A, 0x645, 0x64A },
unicodeslot=0xFD7A,
},
[0xFD7B]={
@@ -126364,7 +130070,7 @@ characters.data={
description="ARABIC LIGATURE GHAIN WITH MEEM WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x063A, 0x0645, 0x0649 },
+ specials={ "final", 0x63A, 0x645, 0x649 },
unicodeslot=0xFD7B,
},
[0xFD7C]={
@@ -126372,7 +130078,7 @@ characters.data={
description="ARABIC LIGATURE FEH WITH KHAH WITH MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0641, 0x062E, 0x0645 },
+ specials={ "final", 0x641, 0x62E, 0x645 },
unicodeslot=0xFD7C,
},
[0xFD7D]={
@@ -126380,7 +130086,7 @@ characters.data={
description="ARABIC LIGATURE FEH WITH KHAH WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0641, 0x062E, 0x0645 },
+ specials={ "initial", 0x641, 0x62E, 0x645 },
unicodeslot=0xFD7D,
},
[0xFD7E]={
@@ -126388,7 +130094,7 @@ characters.data={
description="ARABIC LIGATURE QAF WITH MEEM WITH HAH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0642, 0x0645, 0x062D },
+ specials={ "final", 0x642, 0x645, 0x62D },
unicodeslot=0xFD7E,
},
[0xFD7F]={
@@ -126396,7 +130102,7 @@ characters.data={
description="ARABIC LIGATURE QAF WITH MEEM WITH MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0642, 0x0645, 0x0645 },
+ specials={ "final", 0x642, 0x645, 0x645 },
unicodeslot=0xFD7F,
},
[0xFD80]={
@@ -126404,7 +130110,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH HAH WITH MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0644, 0x062D, 0x0645 },
+ specials={ "final", 0x644, 0x62D, 0x645 },
unicodeslot=0xFD80,
},
[0xFD81]={
@@ -126412,7 +130118,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH HAH WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0644, 0x062D, 0x064A },
+ specials={ "final", 0x644, 0x62D, 0x64A },
unicodeslot=0xFD81,
},
[0xFD82]={
@@ -126420,7 +130126,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH HAH WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0644, 0x062D, 0x0649 },
+ specials={ "final", 0x644, 0x62D, 0x649 },
unicodeslot=0xFD82,
},
[0xFD83]={
@@ -126428,7 +130134,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH JEEM WITH JEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0644, 0x062C, 0x062C },
+ specials={ "initial", 0x644, 0x62C, 0x62C },
unicodeslot=0xFD83,
},
[0xFD84]={
@@ -126436,7 +130142,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH JEEM WITH JEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0644, 0x062C, 0x062C },
+ specials={ "final", 0x644, 0x62C, 0x62C },
unicodeslot=0xFD84,
},
[0xFD85]={
@@ -126444,7 +130150,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH KHAH WITH MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0644, 0x062E, 0x0645 },
+ specials={ "final", 0x644, 0x62E, 0x645 },
unicodeslot=0xFD85,
},
[0xFD86]={
@@ -126452,7 +130158,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH KHAH WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0644, 0x062E, 0x0645 },
+ specials={ "initial", 0x644, 0x62E, 0x645 },
unicodeslot=0xFD86,
},
[0xFD87]={
@@ -126460,7 +130166,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH MEEM WITH HAH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0644, 0x0645, 0x062D },
+ specials={ "final", 0x644, 0x645, 0x62D },
unicodeslot=0xFD87,
},
[0xFD88]={
@@ -126469,7 +130175,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH MEEM WITH HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0644, 0x0645, 0x062D },
+ specials={ "initial", 0x644, 0x645, 0x62D },
unicodeslot=0xFD88,
},
[0xFD89]={
@@ -126477,7 +130183,7 @@ characters.data={
description="ARABIC LIGATURE MEEM WITH HAH WITH JEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0645, 0x062D, 0x062C },
+ specials={ "initial", 0x645, 0x62D, 0x62C },
unicodeslot=0xFD89,
},
[0xFD8A]={
@@ -126485,7 +130191,7 @@ characters.data={
description="ARABIC LIGATURE MEEM WITH HAH WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0645, 0x062D, 0x0645 },
+ specials={ "initial", 0x645, 0x62D, 0x645 },
unicodeslot=0xFD8A,
},
[0xFD8B]={
@@ -126493,7 +130199,7 @@ characters.data={
description="ARABIC LIGATURE MEEM WITH HAH WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0645, 0x062D, 0x064A },
+ specials={ "final", 0x645, 0x62D, 0x64A },
unicodeslot=0xFD8B,
},
[0xFD8C]={
@@ -126501,7 +130207,7 @@ characters.data={
description="ARABIC LIGATURE MEEM WITH JEEM WITH HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0645, 0x062C, 0x062D },
+ specials={ "initial", 0x645, 0x62C, 0x62D },
unicodeslot=0xFD8C,
},
[0xFD8D]={
@@ -126509,7 +130215,7 @@ characters.data={
description="ARABIC LIGATURE MEEM WITH JEEM WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0645, 0x062C, 0x0645 },
+ specials={ "initial", 0x645, 0x62C, 0x645 },
unicodeslot=0xFD8D,
},
[0xFD8E]={
@@ -126517,7 +130223,7 @@ characters.data={
description="ARABIC LIGATURE MEEM WITH KHAH WITH JEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0645, 0x062E, 0x062C },
+ specials={ "initial", 0x645, 0x62E, 0x62C },
unicodeslot=0xFD8E,
},
[0xFD8F]={
@@ -126525,7 +130231,7 @@ characters.data={
description="ARABIC LIGATURE MEEM WITH KHAH WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0645, 0x062E, 0x0645 },
+ specials={ "initial", 0x645, 0x62E, 0x645 },
unicodeslot=0xFD8F,
},
[0xFD92]={
@@ -126533,7 +130239,7 @@ characters.data={
description="ARABIC LIGATURE MEEM WITH JEEM WITH KHAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0645, 0x062C, 0x062E },
+ specials={ "initial", 0x645, 0x62C, 0x62E },
unicodeslot=0xFD92,
},
[0xFD93]={
@@ -126541,7 +130247,7 @@ characters.data={
description="ARABIC LIGATURE HEH WITH MEEM WITH JEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0647, 0x0645, 0x062C },
+ specials={ "initial", 0x647, 0x645, 0x62C },
unicodeslot=0xFD93,
},
[0xFD94]={
@@ -126549,7 +130255,7 @@ characters.data={
description="ARABIC LIGATURE HEH WITH MEEM WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0647, 0x0645, 0x0645 },
+ specials={ "initial", 0x647, 0x645, 0x645 },
unicodeslot=0xFD94,
},
[0xFD95]={
@@ -126557,7 +130263,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH HAH WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0646, 0x062D, 0x0645 },
+ specials={ "initial", 0x646, 0x62D, 0x645 },
unicodeslot=0xFD95,
},
[0xFD96]={
@@ -126565,7 +130271,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH HAH WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0646, 0x062D, 0x0649 },
+ specials={ "final", 0x646, 0x62D, 0x649 },
unicodeslot=0xFD96,
},
[0xFD97]={
@@ -126573,7 +130279,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH JEEM WITH MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0646, 0x062C, 0x0645 },
+ specials={ "final", 0x646, 0x62C, 0x645 },
unicodeslot=0xFD97,
},
[0xFD98]={
@@ -126581,7 +130287,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH JEEM WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0646, 0x062C, 0x0645 },
+ specials={ "initial", 0x646, 0x62C, 0x645 },
unicodeslot=0xFD98,
},
[0xFD99]={
@@ -126589,7 +130295,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH JEEM WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0646, 0x062C, 0x0649 },
+ specials={ "final", 0x646, 0x62C, 0x649 },
unicodeslot=0xFD99,
},
[0xFD9A]={
@@ -126597,7 +130303,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH MEEM WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0646, 0x0645, 0x064A },
+ specials={ "final", 0x646, 0x645, 0x64A },
unicodeslot=0xFD9A,
},
[0xFD9B]={
@@ -126605,7 +130311,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH MEEM WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0646, 0x0645, 0x0649 },
+ specials={ "final", 0x646, 0x645, 0x649 },
unicodeslot=0xFD9B,
},
[0xFD9C]={
@@ -126613,7 +130319,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH MEEM WITH MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x064A, 0x0645, 0x0645 },
+ specials={ "final", 0x64A, 0x645, 0x645 },
unicodeslot=0xFD9C,
},
[0xFD9D]={
@@ -126621,7 +130327,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH MEEM WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x064A, 0x0645, 0x0645 },
+ specials={ "initial", 0x64A, 0x645, 0x645 },
unicodeslot=0xFD9D,
},
[0xFD9E]={
@@ -126629,7 +130335,7 @@ characters.data={
description="ARABIC LIGATURE BEH WITH KHAH WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0628, 0x062E, 0x064A },
+ specials={ "final", 0x628, 0x62E, 0x64A },
unicodeslot=0xFD9E,
},
[0xFD9F]={
@@ -126637,7 +130343,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH JEEM WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062A, 0x062C, 0x064A },
+ specials={ "final", 0x62A, 0x62C, 0x64A },
unicodeslot=0xFD9F,
},
[0xFDA0]={
@@ -126645,7 +130351,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH JEEM WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062A, 0x062C, 0x0649 },
+ specials={ "final", 0x62A, 0x62C, 0x649 },
unicodeslot=0xFDA0,
},
[0xFDA1]={
@@ -126653,7 +130359,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH KHAH WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062A, 0x062E, 0x064A },
+ specials={ "final", 0x62A, 0x62E, 0x64A },
unicodeslot=0xFDA1,
},
[0xFDA2]={
@@ -126661,7 +130367,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH KHAH WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062A, 0x062E, 0x0649 },
+ specials={ "final", 0x62A, 0x62E, 0x649 },
unicodeslot=0xFDA2,
},
[0xFDA3]={
@@ -126669,7 +130375,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH MEEM WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062A, 0x0645, 0x064A },
+ specials={ "final", 0x62A, 0x645, 0x64A },
unicodeslot=0xFDA3,
},
[0xFDA4]={
@@ -126677,7 +130383,7 @@ characters.data={
description="ARABIC LIGATURE TEH WITH MEEM WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062A, 0x0645, 0x0649 },
+ specials={ "final", 0x62A, 0x645, 0x649 },
unicodeslot=0xFDA4,
},
[0xFDA5]={
@@ -126685,7 +130391,7 @@ characters.data={
description="ARABIC LIGATURE JEEM WITH MEEM WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062C, 0x0645, 0x064A },
+ specials={ "final", 0x62C, 0x645, 0x64A },
unicodeslot=0xFDA5,
},
[0xFDA6]={
@@ -126693,7 +130399,7 @@ characters.data={
description="ARABIC LIGATURE JEEM WITH HAH WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062C, 0x062D, 0x0649 },
+ specials={ "final", 0x62C, 0x62D, 0x649 },
unicodeslot=0xFDA6,
},
[0xFDA7]={
@@ -126701,7 +130407,7 @@ characters.data={
description="ARABIC LIGATURE JEEM WITH MEEM WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062C, 0x0645, 0x0649 },
+ specials={ "final", 0x62C, 0x645, 0x649 },
unicodeslot=0xFDA7,
},
[0xFDA8]={
@@ -126709,7 +130415,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH KHAH WITH ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0633, 0x062E, 0x0649 },
+ specials={ "final", 0x633, 0x62E, 0x649 },
unicodeslot=0xFDA8,
},
[0xFDA9]={
@@ -126717,7 +130423,7 @@ characters.data={
description="ARABIC LIGATURE SAD WITH HAH WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0635, 0x062D, 0x064A },
+ specials={ "final", 0x635, 0x62D, 0x64A },
unicodeslot=0xFDA9,
},
[0xFDAA]={
@@ -126725,7 +130431,7 @@ characters.data={
description="ARABIC LIGATURE SHEEN WITH HAH WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0634, 0x062D, 0x064A },
+ specials={ "final", 0x634, 0x62D, 0x64A },
unicodeslot=0xFDAA,
},
[0xFDAB]={
@@ -126733,7 +130439,7 @@ characters.data={
description="ARABIC LIGATURE DAD WITH HAH WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0636, 0x062D, 0x064A },
+ specials={ "final", 0x636, 0x62D, 0x64A },
unicodeslot=0xFDAB,
},
[0xFDAC]={
@@ -126741,7 +130447,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH JEEM WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0644, 0x062C, 0x064A },
+ specials={ "final", 0x644, 0x62C, 0x64A },
unicodeslot=0xFDAC,
},
[0xFDAD]={
@@ -126749,7 +130455,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH MEEM WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0644, 0x0645, 0x064A },
+ specials={ "final", 0x644, 0x645, 0x64A },
unicodeslot=0xFDAD,
},
[0xFDAE]={
@@ -126757,7 +130463,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH HAH WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x064A, 0x062D, 0x064A },
+ specials={ "final", 0x64A, 0x62D, 0x64A },
unicodeslot=0xFDAE,
},
[0xFDAF]={
@@ -126765,7 +130471,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH JEEM WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x064A, 0x062C, 0x064A },
+ specials={ "final", 0x64A, 0x62C, 0x64A },
unicodeslot=0xFDAF,
},
[0xFDB0]={
@@ -126773,7 +130479,7 @@ characters.data={
description="ARABIC LIGATURE YEH WITH MEEM WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x064A, 0x0645, 0x064A },
+ specials={ "final", 0x64A, 0x645, 0x64A },
unicodeslot=0xFDB0,
},
[0xFDB1]={
@@ -126781,7 +130487,7 @@ characters.data={
description="ARABIC LIGATURE MEEM WITH MEEM WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0645, 0x0645, 0x064A },
+ specials={ "final", 0x645, 0x645, 0x64A },
unicodeslot=0xFDB1,
},
[0xFDB2]={
@@ -126789,7 +130495,7 @@ characters.data={
description="ARABIC LIGATURE QAF WITH MEEM WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0642, 0x0645, 0x064A },
+ specials={ "final", 0x642, 0x645, 0x64A },
unicodeslot=0xFDB2,
},
[0xFDB3]={
@@ -126797,7 +130503,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH HAH WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0646, 0x062D, 0x064A },
+ specials={ "final", 0x646, 0x62D, 0x64A },
unicodeslot=0xFDB3,
},
[0xFDB4]={
@@ -126805,7 +130511,7 @@ characters.data={
description="ARABIC LIGATURE QAF WITH MEEM WITH HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0642, 0x0645, 0x062D },
+ specials={ "initial", 0x642, 0x645, 0x62D },
unicodeslot=0xFDB4,
},
[0xFDB5]={
@@ -126813,7 +130519,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH HAH WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0644, 0x062D, 0x0645 },
+ specials={ "initial", 0x644, 0x62D, 0x645 },
unicodeslot=0xFDB5,
},
[0xFDB6]={
@@ -126821,7 +130527,7 @@ characters.data={
description="ARABIC LIGATURE AIN WITH MEEM WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0639, 0x0645, 0x064A },
+ specials={ "final", 0x639, 0x645, 0x64A },
unicodeslot=0xFDB6,
},
[0xFDB7]={
@@ -126829,7 +130535,7 @@ characters.data={
description="ARABIC LIGATURE KAF WITH MEEM WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0643, 0x0645, 0x064A },
+ specials={ "final", 0x643, 0x645, 0x64A },
unicodeslot=0xFDB7,
},
[0xFDB8]={
@@ -126837,7 +130543,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH JEEM WITH HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0646, 0x062C, 0x062D },
+ specials={ "initial", 0x646, 0x62C, 0x62D },
unicodeslot=0xFDB8,
},
[0xFDB9]={
@@ -126845,7 +130551,7 @@ characters.data={
description="ARABIC LIGATURE MEEM WITH KHAH WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0645, 0x062E, 0x064A },
+ specials={ "final", 0x645, 0x62E, 0x64A },
unicodeslot=0xFDB9,
},
[0xFDBA]={
@@ -126853,7 +130559,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH JEEM WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0644, 0x062C, 0x0645 },
+ specials={ "initial", 0x644, 0x62C, 0x645 },
unicodeslot=0xFDBA,
},
[0xFDBB]={
@@ -126861,7 +130567,7 @@ characters.data={
description="ARABIC LIGATURE KAF WITH MEEM WITH MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0643, 0x0645, 0x0645 },
+ specials={ "final", 0x643, 0x645, 0x645 },
unicodeslot=0xFDBB,
},
[0xFDBC]={
@@ -126869,7 +130575,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH JEEM WITH MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0644, 0x062C, 0x0645 },
+ specials={ "final", 0x644, 0x62C, 0x645 },
unicodeslot=0xFDBC,
},
[0xFDBD]={
@@ -126877,7 +130583,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH JEEM WITH HAH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0646, 0x062C, 0x062D },
+ specials={ "final", 0x646, 0x62C, 0x62D },
unicodeslot=0xFDBD,
},
[0xFDBE]={
@@ -126885,7 +130591,7 @@ characters.data={
description="ARABIC LIGATURE JEEM WITH HAH WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062C, 0x062D, 0x064A },
+ specials={ "final", 0x62C, 0x62D, 0x64A },
unicodeslot=0xFDBE,
},
[0xFDBF]={
@@ -126893,7 +130599,7 @@ characters.data={
description="ARABIC LIGATURE HAH WITH JEEM WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062D, 0x062C, 0x064A },
+ specials={ "final", 0x62D, 0x62C, 0x64A },
unicodeslot=0xFDBF,
},
[0xFDC0]={
@@ -126901,7 +130607,7 @@ characters.data={
description="ARABIC LIGATURE MEEM WITH JEEM WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0645, 0x062C, 0x064A },
+ specials={ "final", 0x645, 0x62C, 0x64A },
unicodeslot=0xFDC0,
},
[0xFDC1]={
@@ -126909,7 +130615,7 @@ characters.data={
description="ARABIC LIGATURE FEH WITH MEEM WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0641, 0x0645, 0x064A },
+ specials={ "final", 0x641, 0x645, 0x64A },
unicodeslot=0xFDC1,
},
[0xFDC2]={
@@ -126917,7 +130623,7 @@ characters.data={
description="ARABIC LIGATURE BEH WITH HAH WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0628, 0x062D, 0x064A },
+ specials={ "final", 0x628, 0x62D, 0x64A },
unicodeslot=0xFDC2,
},
[0xFDC3]={
@@ -126925,7 +130631,7 @@ characters.data={
description="ARABIC LIGATURE KAF WITH MEEM WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0643, 0x0645, 0x0645 },
+ specials={ "initial", 0x643, 0x645, 0x645 },
unicodeslot=0xFDC3,
},
[0xFDC4]={
@@ -126933,7 +130639,7 @@ characters.data={
description="ARABIC LIGATURE AIN WITH JEEM WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0639, 0x062C, 0x0645 },
+ specials={ "initial", 0x639, 0x62C, 0x645 },
unicodeslot=0xFDC4,
},
[0xFDC5]={
@@ -126941,7 +130647,7 @@ characters.data={
description="ARABIC LIGATURE SAD WITH MEEM WITH MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0635, 0x0645, 0x0645 },
+ specials={ "initial", 0x635, 0x645, 0x645 },
unicodeslot=0xFDC5,
},
[0xFDC6]={
@@ -126949,7 +130655,7 @@ characters.data={
description="ARABIC LIGATURE SEEN WITH KHAH WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0633, 0x062E, 0x064A },
+ specials={ "final", 0x633, 0x62E, 0x64A },
unicodeslot=0xFDC6,
},
[0xFDC7]={
@@ -126957,7 +130663,7 @@ characters.data={
description="ARABIC LIGATURE NOON WITH JEEM WITH YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0646, 0x062C, 0x064A },
+ specials={ "final", 0x646, 0x62C, 0x64A },
unicodeslot=0xFDC7,
},
[0xFDF0]={
@@ -126965,7 +130671,7 @@ characters.data={
description="ARABIC LIGATURE SALLA USED AS KORANIC STOP SIGN ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0635, 0x0644, 0x06D2 },
+ specials={ "isolated", 0x635, 0x644, 0x6D2 },
unicodeslot=0xFDF0,
},
[0xFDF1]={
@@ -126973,7 +130679,7 @@ characters.data={
description="ARABIC LIGATURE QALA USED AS KORANIC STOP SIGN ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0642, 0x0644, 0x06D2 },
+ specials={ "isolated", 0x642, 0x644, 0x6D2 },
unicodeslot=0xFDF1,
},
[0xFDF2]={
@@ -126982,7 +130688,7 @@ characters.data={
description="ARABIC LIGATURE ALLAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0627, 0x0644, 0x0644, 0x0647 },
+ specials={ "isolated", 0x627, 0x644, 0x644, 0x647 },
unicodeslot=0xFDF2,
},
[0xFDF3]={
@@ -126990,7 +130696,7 @@ characters.data={
description="ARABIC LIGATURE AKBAR ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0627, 0x0643, 0x0628, 0x0631 },
+ specials={ "isolated", 0x627, 0x643, 0x628, 0x631 },
unicodeslot=0xFDF3,
},
[0xFDF4]={
@@ -126998,7 +130704,7 @@ characters.data={
description="ARABIC LIGATURE MOHAMMAD ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0645, 0x062D, 0x0645, 0x062F },
+ specials={ "isolated", 0x645, 0x62D, 0x645, 0x62F },
unicodeslot=0xFDF4,
},
[0xFDF5]={
@@ -127006,7 +130712,7 @@ characters.data={
description="ARABIC LIGATURE SALAM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0635, 0x0644, 0x0639, 0x0645 },
+ specials={ "isolated", 0x635, 0x644, 0x639, 0x645 },
unicodeslot=0xFDF5,
},
[0xFDF6]={
@@ -127014,7 +130720,7 @@ characters.data={
description="ARABIC LIGATURE RASOUL ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0631, 0x0633, 0x0648, 0x0644 },
+ specials={ "isolated", 0x631, 0x633, 0x648, 0x644 },
unicodeslot=0xFDF6,
},
[0xFDF7]={
@@ -127022,7 +130728,7 @@ characters.data={
description="ARABIC LIGATURE ALAYHE ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0639, 0x0644, 0x064A, 0x0647 },
+ specials={ "isolated", 0x639, 0x644, 0x64A, 0x647 },
unicodeslot=0xFDF7,
},
[0xFDF8]={
@@ -127030,7 +130736,7 @@ characters.data={
description="ARABIC LIGATURE WASALLAM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0648, 0x0633, 0x0644, 0x0645 },
+ specials={ "isolated", 0x648, 0x633, 0x644, 0x645 },
unicodeslot=0xFDF8,
},
[0xFDF9]={
@@ -127038,7 +130744,7 @@ characters.data={
description="ARABIC LIGATURE SALLA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0635, 0x0644, 0x0649 },
+ specials={ "isolated", 0x635, 0x644, 0x649 },
unicodeslot=0xFDF9,
},
[0xFDFA]={
@@ -127047,7 +130753,7 @@ characters.data={
description="ARABIC LIGATURE SALLALLAHOU ALAYHE WASALLAM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0635, 0x0644, 0x0649, 0x0020, 0x0627, 0x0644, 0x0644, 0x0647, 0x0020, 0x0639, 0x0644, 0x064A, 0x0647, 0x0020, 0x0648, 0x0633, 0x0644, 0x0645 },
+ specials={ "isolated", 0x635, 0x644, 0x649, 0x20, 0x627, 0x644, 0x644, 0x647, 0x20, 0x639, 0x644, 0x64A, 0x647, 0x20, 0x648, 0x633, 0x644, 0x645 },
unicodeslot=0xFDFA,
},
[0xFDFB]={
@@ -127055,7 +130761,7 @@ characters.data={
description="ARABIC LIGATURE JALLAJALALOUHOU",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062C, 0x0644, 0x0020, 0x062C, 0x0644, 0x0627, 0x0644, 0x0647 },
+ specials={ "isolated", 0x62C, 0x644, 0x20, 0x62C, 0x644, 0x627, 0x644, 0x647 },
unicodeslot=0xFDFB,
},
[0xFDFC]={
@@ -127063,7 +130769,7 @@ characters.data={
description="RIAL SIGN",
direction="al",
linebreak="po",
- specials={ "isolated", 0x0631, 0x06CC, 0x0627, 0x0644 },
+ specials={ "isolated", 0x631, 0x6CC, 0x627, 0x644 },
unicodeslot=0xFDFC,
},
[0xFDFD]={
@@ -127207,7 +130913,7 @@ characters.data={
description="PRESENTATION FORM FOR VERTICAL COMMA",
direction="on",
linebreak="is",
- specials={ "vertical", 0x002C },
+ specials={ "vertical", 0x2C },
unicodeslot=0xFE10,
},
[0xFE11]={
@@ -127234,7 +130940,7 @@ characters.data={
description="PRESENTATION FORM FOR VERTICAL COLON",
direction="on",
linebreak="is",
- specials={ "vertical", 0x003A },
+ specials={ "vertical", 0x3A },
unicodeslot=0xFE13,
},
[0xFE14]={
@@ -127243,7 +130949,7 @@ characters.data={
description="PRESENTATION FORM FOR VERTICAL SEMICOLON",
direction="on",
linebreak="is",
- specials={ "vertical", 0x003B },
+ specials={ "vertical", 0x3B },
unicodeslot=0xFE14,
},
[0xFE15]={
@@ -127252,7 +130958,7 @@ characters.data={
description="PRESENTATION FORM FOR VERTICAL EXCLAMATION MARK",
direction="on",
linebreak="ex",
- specials={ "vertical", 0x0021 },
+ specials={ "vertical", 0x21 },
unicodeslot=0xFE15,
},
[0xFE16]={
@@ -127261,7 +130967,7 @@ characters.data={
description="PRESENTATION FORM FOR VERTICAL QUESTION MARK",
direction="on",
linebreak="ex",
- specials={ "vertical", 0x003F },
+ specials={ "vertical", 0x3F },
unicodeslot=0xFE16,
},
[0xFE17]={
@@ -127294,6 +131000,7 @@ characters.data={
},
[0xFE20]={
category="mn",
+ combining=0xE6,
description="COMBINING LIGATURE LEFT HALF",
direction="nsm",
linebreak="cm",
@@ -127301,6 +131008,7 @@ characters.data={
},
[0xFE21]={
category="mn",
+ combining=0xE6,
description="COMBINING LIGATURE RIGHT HALF",
direction="nsm",
linebreak="cm",
@@ -127308,6 +131016,7 @@ characters.data={
},
[0xFE22]={
category="mn",
+ combining=0xE6,
description="COMBINING DOUBLE TILDE LEFT HALF",
direction="nsm",
linebreak="cm",
@@ -127315,6 +131024,7 @@ characters.data={
},
[0xFE23]={
category="mn",
+ combining=0xE6,
description="COMBINING DOUBLE TILDE RIGHT HALF",
direction="nsm",
linebreak="cm",
@@ -127322,6 +131032,7 @@ characters.data={
},
[0xFE24]={
category="mn",
+ combining=0xE6,
description="COMBINING MACRON LEFT HALF",
direction="nsm",
linebreak="cm",
@@ -127329,6 +131040,7 @@ characters.data={
},
[0xFE25]={
category="mn",
+ combining=0xE6,
description="COMBINING MACRON RIGHT HALF",
direction="nsm",
linebreak="cm",
@@ -127336,11 +131048,68 @@ characters.data={
},
[0xFE26]={
category="mn",
+ combining=0xE6,
description="COMBINING CONJOINING MACRON",
direction="nsm",
linebreak="cm",
unicodeslot=0xFE26,
},
+ [0xFE27]={
+ category="mn",
+ combining=0xDC,
+ description="COMBINING LIGATURE LEFT HALF BELOW",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0xFE27,
+ },
+ [0xFE28]={
+ category="mn",
+ combining=0xDC,
+ description="COMBINING LIGATURE RIGHT HALF BELOW",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0xFE28,
+ },
+ [0xFE29]={
+ category="mn",
+ combining=0xDC,
+ description="COMBINING TILDE LEFT HALF BELOW",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0xFE29,
+ },
+ [0xFE2A]={
+ category="mn",
+ combining=0xDC,
+ description="COMBINING TILDE RIGHT HALF BELOW",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0xFE2A,
+ },
+ [0xFE2B]={
+ category="mn",
+ combining=0xDC,
+ description="COMBINING MACRON LEFT HALF BELOW",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0xFE2B,
+ },
+ [0xFE2C]={
+ category="mn",
+ combining=0xDC,
+ description="COMBINING MACRON RIGHT HALF BELOW",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0xFE2C,
+ },
+ [0xFE2D]={
+ category="mn",
+ combining=0xDC,
+ description="COMBINING CONJOINING MACRON BELOW",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0xFE2D,
+ },
[0xFE30]={
adobename="twodotleadervertical",
category="po",
@@ -127378,7 +131147,7 @@ characters.data={
description="PRESENTATION FORM FOR VERTICAL LOW LINE",
direction="on",
linebreak="id",
- specials={ "vertical", 0x005F },
+ specials={ "vertical", 0x5F },
unicodeslot=0xFE33,
},
[0xFE34]={
@@ -127388,7 +131157,7 @@ characters.data={
description="PRESENTATION FORM FOR VERTICAL WAVY LOW LINE",
direction="on",
linebreak="id",
- specials={ "vertical", 0x005F },
+ specials={ "vertical", 0x5F },
unicodeslot=0xFE34,
},
[0xFE35]={
@@ -127398,7 +131167,7 @@ characters.data={
description="PRESENTATION FORM FOR VERTICAL LEFT PARENTHESIS",
direction="on",
linebreak="op",
- specials={ "vertical", 0x0028 },
+ specials={ "vertical", 0x28 },
unicodeslot=0xFE35,
},
[0xFE36]={
@@ -127408,7 +131177,7 @@ characters.data={
description="PRESENTATION FORM FOR VERTICAL RIGHT PARENTHESIS",
direction="on",
linebreak="cl",
- specials={ "vertical", 0x0029 },
+ specials={ "vertical", 0x29 },
unicodeslot=0xFE36,
},
[0xFE37]={
@@ -127420,7 +131189,7 @@ characters.data={
direction="on",
linebreak="op",
mathfiller="downbracefill",
- specials={ "vertical", 0x007B },
+ specials={ "vertical", 0x7B },
unicodeslot=0xFE37,
},
[0xFE38]={
@@ -127432,7 +131201,7 @@ characters.data={
direction="on",
linebreak="cl",
mathfiller="upbracefill",
- specials={ "vertical", 0x007D },
+ specials={ "vertical", 0x7D },
unicodeslot=0xFE38,
},
[0xFE39]={
@@ -127577,7 +131346,7 @@ characters.data={
description="PRESENTATION FORM FOR VERTICAL LEFT SQUARE BRACKET",
direction="on",
linebreak="op",
- specials={ "vertical", 0x005B },
+ specials={ "vertical", 0x5B },
unicodeslot=0xFE47,
},
[0xFE48]={
@@ -127586,7 +131355,7 @@ characters.data={
description="PRESENTATION FORM FOR VERTICAL RIGHT SQUARE BRACKET",
direction="on",
linebreak="cl",
- specials={ "vertical", 0x005D },
+ specials={ "vertical", 0x5D },
unicodeslot=0xFE48,
},
[0xFE49]={
@@ -127636,7 +131405,7 @@ characters.data={
description="DASHED LOW LINE",
direction="on",
linebreak="id",
- specials={ "compat", 0x005F },
+ specials={ "compat", 0x5F },
unicodeslot=0xFE4D,
},
[0xFE4E]={
@@ -127646,7 +131415,7 @@ characters.data={
description="CENTRELINE LOW LINE",
direction="on",
linebreak="id",
- specials={ "compat", 0x005F },
+ specials={ "compat", 0x5F },
unicodeslot=0xFE4E,
},
[0xFE4F]={
@@ -127656,7 +131425,7 @@ characters.data={
description="WAVY LOW LINE",
direction="on",
linebreak="id",
- specials={ "compat", 0x005F },
+ specials={ "compat", 0x5F },
unicodeslot=0xFE4F,
},
[0xFE50]={
@@ -127666,7 +131435,7 @@ characters.data={
description="SMALL COMMA",
direction="cs",
linebreak="cl",
- specials={ "small", 0x002C },
+ specials={ "small", 0x2C },
unicodeslot=0xFE50,
},
[0xFE51]={
@@ -127685,7 +131454,7 @@ characters.data={
description="SMALL FULL STOP",
direction="cs",
linebreak="cl",
- specials={ "small", 0x002E },
+ specials={ "small", 0x2E },
unicodeslot=0xFE52,
},
[0xFE54]={
@@ -127695,7 +131464,7 @@ characters.data={
description="SMALL SEMICOLON",
direction="on",
linebreak="ns",
- specials={ "small", 0x003B },
+ specials={ "small", 0x3B },
unicodeslot=0xFE54,
},
[0xFE55]={
@@ -127705,7 +131474,7 @@ characters.data={
description="SMALL COLON",
direction="cs",
linebreak="ns",
- specials={ "small", 0x003A },
+ specials={ "small", 0x3A },
unicodeslot=0xFE55,
},
[0xFE56]={
@@ -127714,7 +131483,7 @@ characters.data={
description="SMALL QUESTION MARK",
direction="on",
linebreak="ex",
- specials={ "small", 0x003F },
+ specials={ "small", 0x3F },
unicodeslot=0xFE56,
},
[0xFE57]={
@@ -127723,7 +131492,7 @@ characters.data={
description="SMALL EXCLAMATION MARK",
direction="on",
linebreak="ex",
- specials={ "small", 0x0021 },
+ specials={ "small", 0x21 },
unicodeslot=0xFE57,
},
[0xFE58]={
@@ -127743,8 +131512,8 @@ characters.data={
direction="on",
linebreak="op",
mirror=0xFE5A,
+ specials={ "small", 0x28 },
textclass="open",
- specials={ "small", 0x0028 },
unicodeslot=0xFE59,
},
[0xFE5A]={
@@ -127755,8 +131524,8 @@ characters.data={
direction="on",
linebreak="cl",
mirror=0xFE59,
+ specials={ "small", 0x29 },
textclass="close",
- specials={ "small", 0x0029 },
unicodeslot=0xFE5A,
},
[0xFE5B]={
@@ -127767,8 +131536,8 @@ characters.data={
direction="on",
linebreak="op",
mirror=0xFE5C,
+ specials={ "small", 0x7B },
textclass="open",
- specials={ "small", 0x007B },
unicodeslot=0xFE5B,
},
[0xFE5C]={
@@ -127779,8 +131548,8 @@ characters.data={
direction="on",
linebreak="cl",
mirror=0xFE5B,
+ specials={ "small", 0x7D },
textclass="close",
- specials={ "small", 0x007D },
unicodeslot=0xFE5C,
},
[0xFE5D]={
@@ -127791,8 +131560,8 @@ characters.data={
direction="on",
linebreak="op",
mirror=0xFE5E,
- textclass="open",
specials={ "small", 0x3014 },
+ textclass="open",
unicodeslot=0xFE5D,
},
[0xFE5E]={
@@ -127803,8 +131572,8 @@ characters.data={
direction="on",
linebreak="cl",
mirror=0xFE5D,
- textclass="close",
specials={ "small", 0x3015 },
+ textclass="close",
unicodeslot=0xFE5E,
},
[0xFE5F]={
@@ -127814,7 +131583,7 @@ characters.data={
description="SMALL NUMBER SIGN",
direction="et",
linebreak="id",
- specials={ "small", 0x0023 },
+ specials={ "small", 0x23 },
unicodeslot=0xFE5F,
},
[0xFE60]={
@@ -127823,7 +131592,7 @@ characters.data={
description="SMALL AMPERSAND",
direction="on",
linebreak="id",
- specials={ "small", 0x0026 },
+ specials={ "small", 0x26 },
unicodeslot=0xFE60,
},
[0xFE61]={
@@ -127833,7 +131602,7 @@ characters.data={
description="SMALL ASTERISK",
direction="on",
linebreak="id",
- specials={ "small", 0x002A },
+ specials={ "small", 0x2A },
unicodeslot=0xFE61,
},
[0xFE62]={
@@ -127843,7 +131612,7 @@ characters.data={
description="SMALL PLUS SIGN",
direction="es",
linebreak="id",
- specials={ "small", 0x002B },
+ specials={ "small", 0x2B },
unicodeslot=0xFE62,
},
[0xFE63]={
@@ -127853,7 +131622,7 @@ characters.data={
description="SMALL HYPHEN-MINUS",
direction="es",
linebreak="id",
- specials={ "small", 0x002D },
+ specials={ "small", 0x2D },
unicodeslot=0xFE63,
},
[0xFE64]={
@@ -127864,8 +131633,8 @@ characters.data={
direction="on",
linebreak="id",
mirror=0xFE65,
+ specials={ "small", 0x3C },
textclass="open",
- specials={ "small", 0x003C },
unicodeslot=0xFE64,
},
[0xFE65]={
@@ -127876,8 +131645,8 @@ characters.data={
direction="on",
linebreak="id",
mirror=0xFE64,
+ specials={ "small", 0x3E },
textclass="close",
- specials={ "small", 0x003E },
unicodeslot=0xFE65,
},
[0xFE66]={
@@ -127887,7 +131656,7 @@ characters.data={
description="SMALL EQUALS SIGN",
direction="on",
linebreak="id",
- specials={ "small", 0x003D },
+ specials={ "small", 0x3D },
unicodeslot=0xFE66,
},
[0xFE68]={
@@ -127896,7 +131665,7 @@ characters.data={
description="SMALL REVERSE SOLIDUS",
direction="on",
linebreak="id",
- specials={ "small", 0x005C },
+ specials={ "small", 0x5C },
unicodeslot=0xFE68,
},
[0xFE69]={
@@ -127906,7 +131675,7 @@ characters.data={
description="SMALL DOLLAR SIGN",
direction="et",
linebreak="pr",
- specials={ "small", 0x0024 },
+ specials={ "small", 0x24 },
unicodeslot=0xFE69,
},
[0xFE6A]={
@@ -127916,7 +131685,7 @@ characters.data={
description="SMALL PERCENT SIGN",
direction="et",
linebreak="po",
- specials={ "small", 0x0025 },
+ specials={ "small", 0x25 },
unicodeslot=0xFE6A,
},
[0xFE6B]={
@@ -127926,7 +131695,7 @@ characters.data={
description="SMALL COMMERCIAL AT",
direction="on",
linebreak="id",
- specials={ "small", 0x0040 },
+ specials={ "small", 0x40 },
unicodeslot=0xFE6B,
},
[0xFE70]={
@@ -127934,7 +131703,7 @@ characters.data={
description="ARABIC FATHATAN ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0020, 0x064B },
+ specials={ "isolated", 0x20, 0x64B },
unicodeslot=0xFE70,
},
[0xFE71]={
@@ -127942,7 +131711,7 @@ characters.data={
description="ARABIC TATWEEL WITH FATHATAN ABOVE",
direction="al",
linebreak="al",
- specials={ "medial", 0x0640, 0x064B },
+ specials={ "medial", 0x640, 0x64B },
unicodeslot=0xFE71,
},
[0xFE72]={
@@ -127950,7 +131719,7 @@ characters.data={
description="ARABIC DAMMATAN ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0020, 0x064C },
+ specials={ "isolated", 0x20, 0x64C },
unicodeslot=0xFE72,
},
[0xFE73]={
@@ -127965,7 +131734,7 @@ characters.data={
description="ARABIC KASRATAN ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0020, 0x064D },
+ specials={ "isolated", 0x20, 0x64D },
unicodeslot=0xFE74,
},
[0xFE76]={
@@ -127973,7 +131742,7 @@ characters.data={
description="ARABIC FATHA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0020, 0x064E },
+ specials={ "isolated", 0x20, 0x64E },
unicodeslot=0xFE76,
},
[0xFE77]={
@@ -127981,7 +131750,7 @@ characters.data={
description="ARABIC FATHA MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0640, 0x064E },
+ specials={ "medial", 0x640, 0x64E },
unicodeslot=0xFE77,
},
[0xFE78]={
@@ -127989,7 +131758,7 @@ characters.data={
description="ARABIC DAMMA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0020, 0x064F },
+ specials={ "isolated", 0x20, 0x64F },
unicodeslot=0xFE78,
},
[0xFE79]={
@@ -127997,7 +131766,7 @@ characters.data={
description="ARABIC DAMMA MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0640, 0x064F },
+ specials={ "medial", 0x640, 0x64F },
unicodeslot=0xFE79,
},
[0xFE7A]={
@@ -128005,7 +131774,7 @@ characters.data={
description="ARABIC KASRA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0020, 0x0650 },
+ specials={ "isolated", 0x20, 0x650 },
unicodeslot=0xFE7A,
},
[0xFE7B]={
@@ -128013,7 +131782,7 @@ characters.data={
description="ARABIC KASRA MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0640, 0x0650 },
+ specials={ "medial", 0x640, 0x650 },
unicodeslot=0xFE7B,
},
[0xFE7C]={
@@ -128021,7 +131790,7 @@ characters.data={
description="ARABIC SHADDA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0020, 0x0651 },
+ specials={ "isolated", 0x20, 0x651 },
unicodeslot=0xFE7C,
},
[0xFE7D]={
@@ -128029,7 +131798,7 @@ characters.data={
description="ARABIC SHADDA MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0640, 0x0651 },
+ specials={ "medial", 0x640, 0x651 },
unicodeslot=0xFE7D,
},
[0xFE7E]={
@@ -128037,7 +131806,7 @@ characters.data={
description="ARABIC SUKUN ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0020, 0x0652 },
+ specials={ "isolated", 0x20, 0x652 },
unicodeslot=0xFE7E,
},
[0xFE7F]={
@@ -128045,7 +131814,7 @@ characters.data={
description="ARABIC SUKUN MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0640, 0x0652 },
+ specials={ "medial", 0x640, 0x652 },
unicodeslot=0xFE7F,
},
[0xFE80]={
@@ -128053,7 +131822,7 @@ characters.data={
description="ARABIC LETTER HAMZA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0621 },
+ specials={ "isolated", 0x621 },
unicodeslot=0xFE80,
},
[0xFE81]={
@@ -128061,8 +131830,8 @@ characters.data={
description="ARABIC LETTER ALEF WITH MADDA ABOVE ISOLATED FORM",
direction="al",
linebreak="al",
- shcode=0x0627,
- specials={ "isolated", 0x0622 },
+ shcode=0x627,
+ specials={ "isolated", 0x622 },
unicodeslot=0xFE81,
},
[0xFE82]={
@@ -128071,8 +131840,8 @@ characters.data={
description="ARABIC LETTER ALEF WITH MADDA ABOVE FINAL FORM",
direction="al",
linebreak="al",
- shcode=0x0627,
- specials={ "final", 0x0622 },
+ shcode=0x627,
+ specials={ "final", 0x622 },
unicodeslot=0xFE82,
},
[0xFE83]={
@@ -128080,8 +131849,8 @@ characters.data={
description="ARABIC LETTER ALEF WITH HAMZA ABOVE ISOLATED FORM",
direction="al",
linebreak="al",
- shcode=0x0627,
- specials={ "isolated", 0x0623 },
+ shcode=0x627,
+ specials={ "isolated", 0x623 },
unicodeslot=0xFE83,
},
[0xFE84]={
@@ -128090,8 +131859,8 @@ characters.data={
description="ARABIC LETTER ALEF WITH HAMZA ABOVE FINAL FORM",
direction="al",
linebreak="al",
- shcode=0x0627,
- specials={ "final", 0x0623 },
+ shcode=0x627,
+ specials={ "final", 0x623 },
unicodeslot=0xFE84,
},
[0xFE85]={
@@ -128099,8 +131868,8 @@ characters.data={
description="ARABIC LETTER WAW WITH HAMZA ABOVE ISOLATED FORM",
direction="al",
linebreak="al",
- shcode=0x0648,
- specials={ "isolated", 0x0624 },
+ shcode=0x648,
+ specials={ "isolated", 0x624 },
unicodeslot=0xFE85,
},
[0xFE86]={
@@ -128109,8 +131878,8 @@ characters.data={
description="ARABIC LETTER WAW WITH HAMZA ABOVE FINAL FORM",
direction="al",
linebreak="al",
- shcode=0x0648,
- specials={ "final", 0x0624 },
+ shcode=0x648,
+ specials={ "final", 0x624 },
unicodeslot=0xFE86,
},
[0xFE87]={
@@ -128118,8 +131887,8 @@ characters.data={
description="ARABIC LETTER ALEF WITH HAMZA BELOW ISOLATED FORM",
direction="al",
linebreak="al",
- shcode=0x0627,
- specials={ "isolated", 0x0625 },
+ shcode=0x627,
+ specials={ "isolated", 0x625 },
unicodeslot=0xFE87,
},
[0xFE88]={
@@ -128128,8 +131897,8 @@ characters.data={
description="ARABIC LETTER ALEF WITH HAMZA BELOW FINAL FORM",
direction="al",
linebreak="al",
- shcode=0x0627,
- specials={ "final", 0x0625 },
+ shcode=0x627,
+ specials={ "final", 0x625 },
unicodeslot=0xFE88,
},
[0xFE89]={
@@ -128137,8 +131906,8 @@ characters.data={
description="ARABIC LETTER YEH WITH HAMZA ABOVE ISOLATED FORM",
direction="al",
linebreak="al",
- shcode=0x064A,
- specials={ "isolated", 0x0626 },
+ shcode=0x64A,
+ specials={ "isolated", 0x626 },
unicodeslot=0xFE89,
},
[0xFE8A]={
@@ -128147,8 +131916,8 @@ characters.data={
description="ARABIC LETTER YEH WITH HAMZA ABOVE FINAL FORM",
direction="al",
linebreak="al",
- shcode=0x064A,
- specials={ "final", 0x0626 },
+ shcode=0x64A,
+ specials={ "final", 0x626 },
unicodeslot=0xFE8A,
},
[0xFE8B]={
@@ -128157,8 +131926,8 @@ characters.data={
description="ARABIC LETTER YEH WITH HAMZA ABOVE INITIAL FORM",
direction="al",
linebreak="al",
- shcode=0x064A,
- specials={ "initial", 0x0626 },
+ shcode=0x64A,
+ specials={ "initial", 0x626 },
unicodeslot=0xFE8B,
},
[0xFE8C]={
@@ -128167,8 +131936,8 @@ characters.data={
description="ARABIC LETTER YEH WITH HAMZA ABOVE MEDIAL FORM",
direction="al",
linebreak="al",
- shcode=0x064A,
- specials={ "medial", 0x0626 },
+ shcode=0x64A,
+ specials={ "medial", 0x626 },
unicodeslot=0xFE8C,
},
[0xFE8D]={
@@ -128176,7 +131945,7 @@ characters.data={
description="ARABIC LETTER ALEF ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0627 },
+ specials={ "isolated", 0x627 },
unicodeslot=0xFE8D,
},
[0xFE8E]={
@@ -128185,7 +131954,7 @@ characters.data={
description="ARABIC LETTER ALEF FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0627 },
+ specials={ "final", 0x627 },
unicodeslot=0xFE8E,
},
[0xFE8F]={
@@ -128193,7 +131962,7 @@ characters.data={
description="ARABIC LETTER BEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0628 },
+ specials={ "isolated", 0x628 },
unicodeslot=0xFE8F,
},
[0xFE90]={
@@ -128202,7 +131971,7 @@ characters.data={
description="ARABIC LETTER BEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0628 },
+ specials={ "final", 0x628 },
unicodeslot=0xFE90,
},
[0xFE91]={
@@ -128211,7 +131980,7 @@ characters.data={
description="ARABIC LETTER BEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0628 },
+ specials={ "initial", 0x628 },
unicodeslot=0xFE91,
},
[0xFE92]={
@@ -128220,7 +131989,7 @@ characters.data={
description="ARABIC LETTER BEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0628 },
+ specials={ "medial", 0x628 },
unicodeslot=0xFE92,
},
[0xFE93]={
@@ -128228,7 +131997,7 @@ characters.data={
description="ARABIC LETTER TEH MARBUTA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0629 },
+ specials={ "isolated", 0x629 },
unicodeslot=0xFE93,
},
[0xFE94]={
@@ -128237,7 +132006,7 @@ characters.data={
description="ARABIC LETTER TEH MARBUTA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0629 },
+ specials={ "final", 0x629 },
unicodeslot=0xFE94,
},
[0xFE95]={
@@ -128245,7 +132014,7 @@ characters.data={
description="ARABIC LETTER TEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062A },
+ specials={ "isolated", 0x62A },
unicodeslot=0xFE95,
},
[0xFE96]={
@@ -128254,7 +132023,7 @@ characters.data={
description="ARABIC LETTER TEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062A },
+ specials={ "final", 0x62A },
unicodeslot=0xFE96,
},
[0xFE97]={
@@ -128263,7 +132032,7 @@ characters.data={
description="ARABIC LETTER TEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x062A },
+ specials={ "initial", 0x62A },
unicodeslot=0xFE97,
},
[0xFE98]={
@@ -128272,7 +132041,7 @@ characters.data={
description="ARABIC LETTER TEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x062A },
+ specials={ "medial", 0x62A },
unicodeslot=0xFE98,
},
[0xFE99]={
@@ -128280,7 +132049,7 @@ characters.data={
description="ARABIC LETTER THEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062B },
+ specials={ "isolated", 0x62B },
unicodeslot=0xFE99,
},
[0xFE9A]={
@@ -128289,7 +132058,7 @@ characters.data={
description="ARABIC LETTER THEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062B },
+ specials={ "final", 0x62B },
unicodeslot=0xFE9A,
},
[0xFE9B]={
@@ -128298,7 +132067,7 @@ characters.data={
description="ARABIC LETTER THEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x062B },
+ specials={ "initial", 0x62B },
unicodeslot=0xFE9B,
},
[0xFE9C]={
@@ -128307,7 +132076,7 @@ characters.data={
description="ARABIC LETTER THEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x062B },
+ specials={ "medial", 0x62B },
unicodeslot=0xFE9C,
},
[0xFE9D]={
@@ -128315,7 +132084,7 @@ characters.data={
description="ARABIC LETTER JEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062C },
+ specials={ "isolated", 0x62C },
unicodeslot=0xFE9D,
},
[0xFE9E]={
@@ -128324,7 +132093,7 @@ characters.data={
description="ARABIC LETTER JEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062C },
+ specials={ "final", 0x62C },
unicodeslot=0xFE9E,
},
[0xFE9F]={
@@ -128333,7 +132102,7 @@ characters.data={
description="ARABIC LETTER JEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x062C },
+ specials={ "initial", 0x62C },
unicodeslot=0xFE9F,
},
[0xFEA0]={
@@ -128342,7 +132111,7 @@ characters.data={
description="ARABIC LETTER JEEM MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x062C },
+ specials={ "medial", 0x62C },
unicodeslot=0xFEA0,
},
[0xFEA1]={
@@ -128350,7 +132119,7 @@ characters.data={
description="ARABIC LETTER HAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062D },
+ specials={ "isolated", 0x62D },
unicodeslot=0xFEA1,
},
[0xFEA2]={
@@ -128359,7 +132128,7 @@ characters.data={
description="ARABIC LETTER HAH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062D },
+ specials={ "final", 0x62D },
unicodeslot=0xFEA2,
},
[0xFEA3]={
@@ -128368,7 +132137,7 @@ characters.data={
description="ARABIC LETTER HAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x062D },
+ specials={ "initial", 0x62D },
unicodeslot=0xFEA3,
},
[0xFEA4]={
@@ -128377,7 +132146,7 @@ characters.data={
description="ARABIC LETTER HAH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x062D },
+ specials={ "medial", 0x62D },
unicodeslot=0xFEA4,
},
[0xFEA5]={
@@ -128385,7 +132154,7 @@ characters.data={
description="ARABIC LETTER KHAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062E },
+ specials={ "isolated", 0x62E },
unicodeslot=0xFEA5,
},
[0xFEA6]={
@@ -128394,7 +132163,7 @@ characters.data={
description="ARABIC LETTER KHAH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062E },
+ specials={ "final", 0x62E },
unicodeslot=0xFEA6,
},
[0xFEA7]={
@@ -128403,7 +132172,7 @@ characters.data={
description="ARABIC LETTER KHAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x062E },
+ specials={ "initial", 0x62E },
unicodeslot=0xFEA7,
},
[0xFEA8]={
@@ -128412,7 +132181,7 @@ characters.data={
description="ARABIC LETTER KHAH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x062E },
+ specials={ "medial", 0x62E },
unicodeslot=0xFEA8,
},
[0xFEA9]={
@@ -128420,7 +132189,7 @@ characters.data={
description="ARABIC LETTER DAL ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x062F },
+ specials={ "isolated", 0x62F },
unicodeslot=0xFEA9,
},
[0xFEAA]={
@@ -128429,7 +132198,7 @@ characters.data={
description="ARABIC LETTER DAL FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x062F },
+ specials={ "final", 0x62F },
unicodeslot=0xFEAA,
},
[0xFEAB]={
@@ -128437,7 +132206,7 @@ characters.data={
description="ARABIC LETTER THAL ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0630 },
+ specials={ "isolated", 0x630 },
unicodeslot=0xFEAB,
},
[0xFEAC]={
@@ -128446,7 +132215,7 @@ characters.data={
description="ARABIC LETTER THAL FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0630 },
+ specials={ "final", 0x630 },
unicodeslot=0xFEAC,
},
[0xFEAD]={
@@ -128454,7 +132223,7 @@ characters.data={
description="ARABIC LETTER REH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0631 },
+ specials={ "isolated", 0x631 },
unicodeslot=0xFEAD,
},
[0xFEAE]={
@@ -128463,7 +132232,7 @@ characters.data={
description="ARABIC LETTER REH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0631 },
+ specials={ "final", 0x631 },
unicodeslot=0xFEAE,
},
[0xFEAF]={
@@ -128471,7 +132240,7 @@ characters.data={
description="ARABIC LETTER ZAIN ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0632 },
+ specials={ "isolated", 0x632 },
unicodeslot=0xFEAF,
},
[0xFEB0]={
@@ -128480,7 +132249,7 @@ characters.data={
description="ARABIC LETTER ZAIN FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0632 },
+ specials={ "final", 0x632 },
unicodeslot=0xFEB0,
},
[0xFEB1]={
@@ -128488,7 +132257,7 @@ characters.data={
description="ARABIC LETTER SEEN ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0633 },
+ specials={ "isolated", 0x633 },
unicodeslot=0xFEB1,
},
[0xFEB2]={
@@ -128497,7 +132266,7 @@ characters.data={
description="ARABIC LETTER SEEN FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0633 },
+ specials={ "final", 0x633 },
unicodeslot=0xFEB2,
},
[0xFEB3]={
@@ -128506,7 +132275,7 @@ characters.data={
description="ARABIC LETTER SEEN INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0633 },
+ specials={ "initial", 0x633 },
unicodeslot=0xFEB3,
},
[0xFEB4]={
@@ -128515,7 +132284,7 @@ characters.data={
description="ARABIC LETTER SEEN MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0633 },
+ specials={ "medial", 0x633 },
unicodeslot=0xFEB4,
},
[0xFEB5]={
@@ -128523,7 +132292,7 @@ characters.data={
description="ARABIC LETTER SHEEN ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0634 },
+ specials={ "isolated", 0x634 },
unicodeslot=0xFEB5,
},
[0xFEB6]={
@@ -128532,7 +132301,7 @@ characters.data={
description="ARABIC LETTER SHEEN FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0634 },
+ specials={ "final", 0x634 },
unicodeslot=0xFEB6,
},
[0xFEB7]={
@@ -128541,7 +132310,7 @@ characters.data={
description="ARABIC LETTER SHEEN INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0634 },
+ specials={ "initial", 0x634 },
unicodeslot=0xFEB7,
},
[0xFEB8]={
@@ -128550,7 +132319,7 @@ characters.data={
description="ARABIC LETTER SHEEN MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0634 },
+ specials={ "medial", 0x634 },
unicodeslot=0xFEB8,
},
[0xFEB9]={
@@ -128558,7 +132327,7 @@ characters.data={
description="ARABIC LETTER SAD ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0635 },
+ specials={ "isolated", 0x635 },
unicodeslot=0xFEB9,
},
[0xFEBA]={
@@ -128567,7 +132336,7 @@ characters.data={
description="ARABIC LETTER SAD FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0635 },
+ specials={ "final", 0x635 },
unicodeslot=0xFEBA,
},
[0xFEBB]={
@@ -128576,7 +132345,7 @@ characters.data={
description="ARABIC LETTER SAD INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0635 },
+ specials={ "initial", 0x635 },
unicodeslot=0xFEBB,
},
[0xFEBC]={
@@ -128585,7 +132354,7 @@ characters.data={
description="ARABIC LETTER SAD MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0635 },
+ specials={ "medial", 0x635 },
unicodeslot=0xFEBC,
},
[0xFEBD]={
@@ -128593,7 +132362,7 @@ characters.data={
description="ARABIC LETTER DAD ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0636 },
+ specials={ "isolated", 0x636 },
unicodeslot=0xFEBD,
},
[0xFEBE]={
@@ -128602,7 +132371,7 @@ characters.data={
description="ARABIC LETTER DAD FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0636 },
+ specials={ "final", 0x636 },
unicodeslot=0xFEBE,
},
[0xFEBF]={
@@ -128611,7 +132380,7 @@ characters.data={
description="ARABIC LETTER DAD INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0636 },
+ specials={ "initial", 0x636 },
unicodeslot=0xFEBF,
},
[0xFEC0]={
@@ -128620,7 +132389,7 @@ characters.data={
description="ARABIC LETTER DAD MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0636 },
+ specials={ "medial", 0x636 },
unicodeslot=0xFEC0,
},
[0xFEC1]={
@@ -128628,7 +132397,7 @@ characters.data={
description="ARABIC LETTER TAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0637 },
+ specials={ "isolated", 0x637 },
unicodeslot=0xFEC1,
},
[0xFEC2]={
@@ -128637,7 +132406,7 @@ characters.data={
description="ARABIC LETTER TAH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0637 },
+ specials={ "final", 0x637 },
unicodeslot=0xFEC2,
},
[0xFEC3]={
@@ -128646,7 +132415,7 @@ characters.data={
description="ARABIC LETTER TAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0637 },
+ specials={ "initial", 0x637 },
unicodeslot=0xFEC3,
},
[0xFEC4]={
@@ -128655,7 +132424,7 @@ characters.data={
description="ARABIC LETTER TAH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0637 },
+ specials={ "medial", 0x637 },
unicodeslot=0xFEC4,
},
[0xFEC5]={
@@ -128663,7 +132432,7 @@ characters.data={
description="ARABIC LETTER ZAH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0638 },
+ specials={ "isolated", 0x638 },
unicodeslot=0xFEC5,
},
[0xFEC6]={
@@ -128672,7 +132441,7 @@ characters.data={
description="ARABIC LETTER ZAH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0638 },
+ specials={ "final", 0x638 },
unicodeslot=0xFEC6,
},
[0xFEC7]={
@@ -128681,7 +132450,7 @@ characters.data={
description="ARABIC LETTER ZAH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0638 },
+ specials={ "initial", 0x638 },
unicodeslot=0xFEC7,
},
[0xFEC8]={
@@ -128690,7 +132459,7 @@ characters.data={
description="ARABIC LETTER ZAH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0638 },
+ specials={ "medial", 0x638 },
unicodeslot=0xFEC8,
},
[0xFEC9]={
@@ -128698,7 +132467,7 @@ characters.data={
description="ARABIC LETTER AIN ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0639 },
+ specials={ "isolated", 0x639 },
unicodeslot=0xFEC9,
},
[0xFECA]={
@@ -128707,7 +132476,7 @@ characters.data={
description="ARABIC LETTER AIN FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0639 },
+ specials={ "final", 0x639 },
unicodeslot=0xFECA,
},
[0xFECB]={
@@ -128716,7 +132485,7 @@ characters.data={
description="ARABIC LETTER AIN INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0639 },
+ specials={ "initial", 0x639 },
unicodeslot=0xFECB,
},
[0xFECC]={
@@ -128725,7 +132494,7 @@ characters.data={
description="ARABIC LETTER AIN MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0639 },
+ specials={ "medial", 0x639 },
unicodeslot=0xFECC,
},
[0xFECD]={
@@ -128733,7 +132502,7 @@ characters.data={
description="ARABIC LETTER GHAIN ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x063A },
+ specials={ "isolated", 0x63A },
unicodeslot=0xFECD,
},
[0xFECE]={
@@ -128742,7 +132511,7 @@ characters.data={
description="ARABIC LETTER GHAIN FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x063A },
+ specials={ "final", 0x63A },
unicodeslot=0xFECE,
},
[0xFECF]={
@@ -128751,7 +132520,7 @@ characters.data={
description="ARABIC LETTER GHAIN INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x063A },
+ specials={ "initial", 0x63A },
unicodeslot=0xFECF,
},
[0xFED0]={
@@ -128760,7 +132529,7 @@ characters.data={
description="ARABIC LETTER GHAIN MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x063A },
+ specials={ "medial", 0x63A },
unicodeslot=0xFED0,
},
[0xFED1]={
@@ -128768,7 +132537,7 @@ characters.data={
description="ARABIC LETTER FEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0641 },
+ specials={ "isolated", 0x641 },
unicodeslot=0xFED1,
},
[0xFED2]={
@@ -128777,7 +132546,7 @@ characters.data={
description="ARABIC LETTER FEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0641 },
+ specials={ "final", 0x641 },
unicodeslot=0xFED2,
},
[0xFED3]={
@@ -128786,7 +132555,7 @@ characters.data={
description="ARABIC LETTER FEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0641 },
+ specials={ "initial", 0x641 },
unicodeslot=0xFED3,
},
[0xFED4]={
@@ -128795,7 +132564,7 @@ characters.data={
description="ARABIC LETTER FEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0641 },
+ specials={ "medial", 0x641 },
unicodeslot=0xFED4,
},
[0xFED5]={
@@ -128803,7 +132572,7 @@ characters.data={
description="ARABIC LETTER QAF ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0642 },
+ specials={ "isolated", 0x642 },
unicodeslot=0xFED5,
},
[0xFED6]={
@@ -128812,7 +132581,7 @@ characters.data={
description="ARABIC LETTER QAF FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0642 },
+ specials={ "final", 0x642 },
unicodeslot=0xFED6,
},
[0xFED7]={
@@ -128821,7 +132590,7 @@ characters.data={
description="ARABIC LETTER QAF INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0642 },
+ specials={ "initial", 0x642 },
unicodeslot=0xFED7,
},
[0xFED8]={
@@ -128830,7 +132599,7 @@ characters.data={
description="ARABIC LETTER QAF MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0642 },
+ specials={ "medial", 0x642 },
unicodeslot=0xFED8,
},
[0xFED9]={
@@ -128838,7 +132607,7 @@ characters.data={
description="ARABIC LETTER KAF ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0643 },
+ specials={ "isolated", 0x643 },
unicodeslot=0xFED9,
},
[0xFEDA]={
@@ -128847,7 +132616,7 @@ characters.data={
description="ARABIC LETTER KAF FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0643 },
+ specials={ "final", 0x643 },
unicodeslot=0xFEDA,
},
[0xFEDB]={
@@ -128856,7 +132625,7 @@ characters.data={
description="ARABIC LETTER KAF INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0643 },
+ specials={ "initial", 0x643 },
unicodeslot=0xFEDB,
},
[0xFEDC]={
@@ -128865,7 +132634,7 @@ characters.data={
description="ARABIC LETTER KAF MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0643 },
+ specials={ "medial", 0x643 },
unicodeslot=0xFEDC,
},
[0xFEDD]={
@@ -128873,7 +132642,7 @@ characters.data={
description="ARABIC LETTER LAM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0644 },
+ specials={ "isolated", 0x644 },
unicodeslot=0xFEDD,
},
[0xFEDE]={
@@ -128882,7 +132651,7 @@ characters.data={
description="ARABIC LETTER LAM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0644 },
+ specials={ "final", 0x644 },
unicodeslot=0xFEDE,
},
[0xFEDF]={
@@ -128891,7 +132660,7 @@ characters.data={
description="ARABIC LETTER LAM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0644 },
+ specials={ "initial", 0x644 },
unicodeslot=0xFEDF,
},
[0xFEE0]={
@@ -128900,7 +132669,7 @@ characters.data={
description="ARABIC LETTER LAM MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0644 },
+ specials={ "medial", 0x644 },
unicodeslot=0xFEE0,
},
[0xFEE1]={
@@ -128908,7 +132677,7 @@ characters.data={
description="ARABIC LETTER MEEM ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0645 },
+ specials={ "isolated", 0x645 },
unicodeslot=0xFEE1,
},
[0xFEE2]={
@@ -128917,7 +132686,7 @@ characters.data={
description="ARABIC LETTER MEEM FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0645 },
+ specials={ "final", 0x645 },
unicodeslot=0xFEE2,
},
[0xFEE3]={
@@ -128926,7 +132695,7 @@ characters.data={
description="ARABIC LETTER MEEM INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0645 },
+ specials={ "initial", 0x645 },
unicodeslot=0xFEE3,
},
[0xFEE4]={
@@ -128935,7 +132704,7 @@ characters.data={
description="ARABIC LETTER MEEM MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0645 },
+ specials={ "medial", 0x645 },
unicodeslot=0xFEE4,
},
[0xFEE5]={
@@ -128943,7 +132712,7 @@ characters.data={
description="ARABIC LETTER NOON ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0646 },
+ specials={ "isolated", 0x646 },
unicodeslot=0xFEE5,
},
[0xFEE6]={
@@ -128952,7 +132721,7 @@ characters.data={
description="ARABIC LETTER NOON FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0646 },
+ specials={ "final", 0x646 },
unicodeslot=0xFEE6,
},
[0xFEE7]={
@@ -128961,7 +132730,7 @@ characters.data={
description="ARABIC LETTER NOON INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0646 },
+ specials={ "initial", 0x646 },
unicodeslot=0xFEE7,
},
[0xFEE8]={
@@ -128970,7 +132739,7 @@ characters.data={
description="ARABIC LETTER NOON MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0646 },
+ specials={ "medial", 0x646 },
unicodeslot=0xFEE8,
},
[0xFEE9]={
@@ -128978,7 +132747,7 @@ characters.data={
description="ARABIC LETTER HEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0647 },
+ specials={ "isolated", 0x647 },
unicodeslot=0xFEE9,
},
[0xFEEA]={
@@ -128987,7 +132756,7 @@ characters.data={
description="ARABIC LETTER HEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0647 },
+ specials={ "final", 0x647 },
unicodeslot=0xFEEA,
},
[0xFEEB]={
@@ -128996,7 +132765,7 @@ characters.data={
description="ARABIC LETTER HEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x0647 },
+ specials={ "initial", 0x647 },
unicodeslot=0xFEEB,
},
[0xFEEC]={
@@ -129005,7 +132774,7 @@ characters.data={
description="ARABIC LETTER HEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x0647 },
+ specials={ "medial", 0x647 },
unicodeslot=0xFEEC,
},
[0xFEED]={
@@ -129013,7 +132782,7 @@ characters.data={
description="ARABIC LETTER WAW ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0648 },
+ specials={ "isolated", 0x648 },
unicodeslot=0xFEED,
},
[0xFEEE]={
@@ -129022,7 +132791,7 @@ characters.data={
description="ARABIC LETTER WAW FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0648 },
+ specials={ "final", 0x648 },
unicodeslot=0xFEEE,
},
[0xFEEF]={
@@ -129030,7 +132799,7 @@ characters.data={
description="ARABIC LETTER ALEF MAKSURA ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0649 },
+ specials={ "isolated", 0x649 },
unicodeslot=0xFEEF,
},
[0xFEF0]={
@@ -129039,7 +132808,7 @@ characters.data={
description="ARABIC LETTER ALEF MAKSURA FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0649 },
+ specials={ "final", 0x649 },
unicodeslot=0xFEF0,
},
[0xFEF1]={
@@ -129047,7 +132816,7 @@ characters.data={
description="ARABIC LETTER YEH ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x064A },
+ specials={ "isolated", 0x64A },
unicodeslot=0xFEF1,
},
[0xFEF2]={
@@ -129056,7 +132825,7 @@ characters.data={
description="ARABIC LETTER YEH FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x064A },
+ specials={ "final", 0x64A },
unicodeslot=0xFEF2,
},
[0xFEF3]={
@@ -129065,7 +132834,7 @@ characters.data={
description="ARABIC LETTER YEH INITIAL FORM",
direction="al",
linebreak="al",
- specials={ "initial", 0x064A },
+ specials={ "initial", 0x64A },
unicodeslot=0xFEF3,
},
[0xFEF4]={
@@ -129074,7 +132843,7 @@ characters.data={
description="ARABIC LETTER YEH MEDIAL FORM",
direction="al",
linebreak="al",
- specials={ "medial", 0x064A },
+ specials={ "medial", 0x64A },
unicodeslot=0xFEF4,
},
[0xFEF5]={
@@ -129083,7 +132852,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH ALEF WITH MADDA ABOVE ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0644, 0x0622 },
+ specials={ "isolated", 0x644, 0x622 },
unicodeslot=0xFEF5,
},
[0xFEF6]={
@@ -129092,7 +132861,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH ALEF WITH MADDA ABOVE FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0644, 0x0622 },
+ specials={ "final", 0x644, 0x622 },
unicodeslot=0xFEF6,
},
[0xFEF7]={
@@ -129101,7 +132870,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH ALEF WITH HAMZA ABOVE ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0644, 0x0623 },
+ specials={ "isolated", 0x644, 0x623 },
unicodeslot=0xFEF7,
},
[0xFEF8]={
@@ -129110,7 +132879,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH ALEF WITH HAMZA ABOVE FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0644, 0x0623 },
+ specials={ "final", 0x644, 0x623 },
unicodeslot=0xFEF8,
},
[0xFEF9]={
@@ -129119,7 +132888,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH ALEF WITH HAMZA BELOW ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0644, 0x0625 },
+ specials={ "isolated", 0x644, 0x625 },
unicodeslot=0xFEF9,
},
[0xFEFA]={
@@ -129128,7 +132897,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH ALEF WITH HAMZA BELOW FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0644, 0x0625 },
+ specials={ "final", 0x644, 0x625 },
unicodeslot=0xFEFA,
},
[0xFEFB]={
@@ -129137,7 +132906,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH ALEF ISOLATED FORM",
direction="al",
linebreak="al",
- specials={ "isolated", 0x0644, 0x0627 },
+ specials={ "isolated", 0x644, 0x627 },
unicodeslot=0xFEFB,
},
[0xFEFC]={
@@ -129146,7 +132915,7 @@ characters.data={
description="ARABIC LIGATURE LAM WITH ALEF FINAL FORM",
direction="al",
linebreak="al",
- specials={ "final", 0x0644, 0x0627 },
+ specials={ "final", 0x644, 0x627 },
unicodeslot=0xFEFC,
},
[0xFEFF]={
@@ -129165,7 +132934,7 @@ characters.data={
description="FULLWIDTH EXCLAMATION MARK",
direction="on",
linebreak="ex",
- specials={ "wide", 0x0021 },
+ specials={ "wide", 0x21 },
unicodeslot=0xFF01,
},
[0xFF02]={
@@ -129175,7 +132944,7 @@ characters.data={
description="FULLWIDTH QUOTATION MARK",
direction="on",
linebreak="id",
- specials={ "wide", 0x0022 },
+ specials={ "wide", 0x22 },
unicodeslot=0xFF02,
},
[0xFF03]={
@@ -129185,7 +132954,7 @@ characters.data={
description="FULLWIDTH NUMBER SIGN",
direction="et",
linebreak="id",
- specials={ "wide", 0x0023 },
+ specials={ "wide", 0x23 },
unicodeslot=0xFF03,
},
[0xFF04]={
@@ -129195,7 +132964,7 @@ characters.data={
description="FULLWIDTH DOLLAR SIGN",
direction="et",
linebreak="pr",
- specials={ "wide", 0x0024 },
+ specials={ "wide", 0x24 },
unicodeslot=0xFF04,
},
[0xFF05]={
@@ -129205,7 +132974,7 @@ characters.data={
description="FULLWIDTH PERCENT SIGN",
direction="et",
linebreak="po",
- specials={ "wide", 0x0025 },
+ specials={ "wide", 0x25 },
unicodeslot=0xFF05,
},
[0xFF06]={
@@ -129215,7 +132984,7 @@ characters.data={
description="FULLWIDTH AMPERSAND",
direction="on",
linebreak="id",
- specials={ "wide", 0x0026 },
+ specials={ "wide", 0x26 },
unicodeslot=0xFF06,
},
[0xFF07]={
@@ -129225,7 +132994,7 @@ characters.data={
description="FULLWIDTH APOSTROPHE",
direction="on",
linebreak="id",
- specials={ "wide", 0x0027 },
+ specials={ "wide", 0x27 },
unicodeslot=0xFF07,
},
[0xFF08]={
@@ -129236,8 +133005,8 @@ characters.data={
direction="on",
linebreak="op",
mirror=0xFF09,
+ specials={ "wide", 0x28 },
textclass="open",
- specials={ "wide", 0x0028 },
unicodeslot=0xFF08,
},
[0xFF09]={
@@ -129248,8 +133017,8 @@ characters.data={
direction="on",
linebreak="cl",
mirror=0xFF08,
+ specials={ "wide", 0x29 },
textclass="close",
- specials={ "wide", 0x0029 },
unicodeslot=0xFF09,
},
[0xFF0A]={
@@ -129259,7 +133028,7 @@ characters.data={
description="FULLWIDTH ASTERISK",
direction="on",
linebreak="id",
- specials={ "wide", 0x002A },
+ specials={ "wide", 0x2A },
unicodeslot=0xFF0A,
},
[0xFF0B]={
@@ -129269,7 +133038,7 @@ characters.data={
description="FULLWIDTH PLUS SIGN",
direction="es",
linebreak="id",
- specials={ "wide", 0x002B },
+ specials={ "wide", 0x2B },
unicodeslot=0xFF0B,
},
[0xFF0C]={
@@ -129279,7 +133048,7 @@ characters.data={
description="FULLWIDTH COMMA",
direction="cs",
linebreak="cl",
- specials={ "wide", 0x002C },
+ specials={ "wide", 0x2C },
unicodeslot=0xFF0C,
},
[0xFF0D]={
@@ -129289,7 +133058,7 @@ characters.data={
description="FULLWIDTH HYPHEN-MINUS",
direction="es",
linebreak="id",
- specials={ "wide", 0x002D },
+ specials={ "wide", 0x2D },
unicodeslot=0xFF0D,
},
[0xFF0E]={
@@ -129299,7 +133068,7 @@ characters.data={
description="FULLWIDTH FULL STOP",
direction="cs",
linebreak="cl",
- specials={ "wide", 0x002E },
+ specials={ "wide", 0x2E },
unicodeslot=0xFF0E,
},
[0xFF0F]={
@@ -129309,7 +133078,7 @@ characters.data={
description="FULLWIDTH SOLIDUS",
direction="cs",
linebreak="id",
- specials={ "wide", 0x002F },
+ specials={ "wide", 0x2F },
unicodeslot=0xFF0F,
},
[0xFF10]={
@@ -129319,7 +133088,7 @@ characters.data={
description="FULLWIDTH DIGIT ZERO",
direction="en",
linebreak="id",
- specials={ "wide", 0x0030 },
+ specials={ "wide", 0x30 },
unicodeslot=0xFF10,
},
[0xFF11]={
@@ -129329,7 +133098,7 @@ characters.data={
description="FULLWIDTH DIGIT ONE",
direction="en",
linebreak="id",
- specials={ "wide", 0x0031 },
+ specials={ "wide", 0x31 },
unicodeslot=0xFF11,
},
[0xFF12]={
@@ -129339,7 +133108,7 @@ characters.data={
description="FULLWIDTH DIGIT TWO",
direction="en",
linebreak="id",
- specials={ "wide", 0x0032 },
+ specials={ "wide", 0x32 },
unicodeslot=0xFF12,
},
[0xFF13]={
@@ -129349,7 +133118,7 @@ characters.data={
description="FULLWIDTH DIGIT THREE",
direction="en",
linebreak="id",
- specials={ "wide", 0x0033 },
+ specials={ "wide", 0x33 },
unicodeslot=0xFF13,
},
[0xFF14]={
@@ -129359,7 +133128,7 @@ characters.data={
description="FULLWIDTH DIGIT FOUR",
direction="en",
linebreak="id",
- specials={ "wide", 0x0034 },
+ specials={ "wide", 0x34 },
unicodeslot=0xFF14,
},
[0xFF15]={
@@ -129369,7 +133138,7 @@ characters.data={
description="FULLWIDTH DIGIT FIVE",
direction="en",
linebreak="id",
- specials={ "wide", 0x0035 },
+ specials={ "wide", 0x35 },
unicodeslot=0xFF15,
},
[0xFF16]={
@@ -129379,7 +133148,7 @@ characters.data={
description="FULLWIDTH DIGIT SIX",
direction="en",
linebreak="id",
- specials={ "wide", 0x0036 },
+ specials={ "wide", 0x36 },
unicodeslot=0xFF16,
},
[0xFF17]={
@@ -129389,7 +133158,7 @@ characters.data={
description="FULLWIDTH DIGIT SEVEN",
direction="en",
linebreak="id",
- specials={ "wide", 0x0037 },
+ specials={ "wide", 0x37 },
unicodeslot=0xFF17,
},
[0xFF18]={
@@ -129399,7 +133168,7 @@ characters.data={
description="FULLWIDTH DIGIT EIGHT",
direction="en",
linebreak="id",
- specials={ "wide", 0x0038 },
+ specials={ "wide", 0x38 },
unicodeslot=0xFF18,
},
[0xFF19]={
@@ -129409,7 +133178,7 @@ characters.data={
description="FULLWIDTH DIGIT NINE",
direction="en",
linebreak="id",
- specials={ "wide", 0x0039 },
+ specials={ "wide", 0x39 },
unicodeslot=0xFF19,
},
[0xFF1A]={
@@ -129419,7 +133188,7 @@ characters.data={
description="FULLWIDTH COLON",
direction="cs",
linebreak="ns",
- specials={ "wide", 0x003A },
+ specials={ "wide", 0x3A },
unicodeslot=0xFF1A,
},
[0xFF1B]={
@@ -129429,7 +133198,7 @@ characters.data={
description="FULLWIDTH SEMICOLON",
direction="on",
linebreak="ns",
- specials={ "wide", 0x003B },
+ specials={ "wide", 0x3B },
unicodeslot=0xFF1B,
},
[0xFF1C]={
@@ -129440,8 +133209,8 @@ characters.data={
direction="on",
linebreak="id",
mirror=0xFF1E,
+ specials={ "wide", 0x3C },
textclass="open",
- specials={ "wide", 0x003C },
unicodeslot=0xFF1C,
},
[0xFF1D]={
@@ -129451,7 +133220,7 @@ characters.data={
description="FULLWIDTH EQUALS SIGN",
direction="on",
linebreak="id",
- specials={ "wide", 0x003D },
+ specials={ "wide", 0x3D },
unicodeslot=0xFF1D,
},
[0xFF1E]={
@@ -129462,8 +133231,8 @@ characters.data={
direction="on",
linebreak="id",
mirror=0xFF1C,
+ specials={ "wide", 0x3E },
textclass="close",
- specials={ "wide", 0x003E },
unicodeslot=0xFF1E,
},
[0xFF1F]={
@@ -129473,7 +133242,7 @@ characters.data={
description="FULLWIDTH QUESTION MARK",
direction="on",
linebreak="ex",
- specials={ "wide", 0x003F },
+ specials={ "wide", 0x3F },
unicodeslot=0xFF1F,
},
[0xFF20]={
@@ -129483,7 +133252,7 @@ characters.data={
description="FULLWIDTH COMMERCIAL AT",
direction="on",
linebreak="id",
- specials={ "wide", 0x0040 },
+ specials={ "wide", 0x40 },
unicodeslot=0xFF20,
},
[0xFF21]={
@@ -129494,7 +133263,7 @@ characters.data={
direction="l",
lccode=0xFF41,
linebreak="id",
- specials={ "wide", 0x0041 },
+ specials={ "wide", 0x41 },
unicodeslot=0xFF21,
},
[0xFF22]={
@@ -129505,7 +133274,7 @@ characters.data={
direction="l",
lccode=0xFF42,
linebreak="id",
- specials={ "wide", 0x0042 },
+ specials={ "wide", 0x42 },
unicodeslot=0xFF22,
},
[0xFF23]={
@@ -129516,7 +133285,7 @@ characters.data={
direction="l",
lccode=0xFF43,
linebreak="id",
- specials={ "wide", 0x0043 },
+ specials={ "wide", 0x43 },
unicodeslot=0xFF23,
},
[0xFF24]={
@@ -129527,7 +133296,7 @@ characters.data={
direction="l",
lccode=0xFF44,
linebreak="id",
- specials={ "wide", 0x0044 },
+ specials={ "wide", 0x44 },
unicodeslot=0xFF24,
},
[0xFF25]={
@@ -129538,7 +133307,7 @@ characters.data={
direction="l",
lccode=0xFF45,
linebreak="id",
- specials={ "wide", 0x0045 },
+ specials={ "wide", 0x45 },
unicodeslot=0xFF25,
},
[0xFF26]={
@@ -129549,7 +133318,7 @@ characters.data={
direction="l",
lccode=0xFF46,
linebreak="id",
- specials={ "wide", 0x0046 },
+ specials={ "wide", 0x46 },
unicodeslot=0xFF26,
},
[0xFF27]={
@@ -129560,7 +133329,7 @@ characters.data={
direction="l",
lccode=0xFF47,
linebreak="id",
- specials={ "wide", 0x0047 },
+ specials={ "wide", 0x47 },
unicodeslot=0xFF27,
},
[0xFF28]={
@@ -129571,7 +133340,7 @@ characters.data={
direction="l",
lccode=0xFF48,
linebreak="id",
- specials={ "wide", 0x0048 },
+ specials={ "wide", 0x48 },
unicodeslot=0xFF28,
},
[0xFF29]={
@@ -129582,7 +133351,7 @@ characters.data={
direction="l",
lccode=0xFF49,
linebreak="id",
- specials={ "wide", 0x0049 },
+ specials={ "wide", 0x49 },
unicodeslot=0xFF29,
},
[0xFF2A]={
@@ -129593,7 +133362,7 @@ characters.data={
direction="l",
lccode=0xFF4A,
linebreak="id",
- specials={ "wide", 0x004A },
+ specials={ "wide", 0x4A },
unicodeslot=0xFF2A,
},
[0xFF2B]={
@@ -129604,7 +133373,7 @@ characters.data={
direction="l",
lccode=0xFF4B,
linebreak="id",
- specials={ "wide", 0x004B },
+ specials={ "wide", 0x4B },
unicodeslot=0xFF2B,
},
[0xFF2C]={
@@ -129615,7 +133384,7 @@ characters.data={
direction="l",
lccode=0xFF4C,
linebreak="id",
- specials={ "wide", 0x004C },
+ specials={ "wide", 0x4C },
unicodeslot=0xFF2C,
},
[0xFF2D]={
@@ -129626,7 +133395,7 @@ characters.data={
direction="l",
lccode=0xFF4D,
linebreak="id",
- specials={ "wide", 0x004D },
+ specials={ "wide", 0x4D },
unicodeslot=0xFF2D,
},
[0xFF2E]={
@@ -129637,7 +133406,7 @@ characters.data={
direction="l",
lccode=0xFF4E,
linebreak="id",
- specials={ "wide", 0x004E },
+ specials={ "wide", 0x4E },
unicodeslot=0xFF2E,
},
[0xFF2F]={
@@ -129648,7 +133417,7 @@ characters.data={
direction="l",
lccode=0xFF4F,
linebreak="id",
- specials={ "wide", 0x004F },
+ specials={ "wide", 0x4F },
unicodeslot=0xFF2F,
},
[0xFF30]={
@@ -129659,7 +133428,7 @@ characters.data={
direction="l",
lccode=0xFF50,
linebreak="id",
- specials={ "wide", 0x0050 },
+ specials={ "wide", 0x50 },
unicodeslot=0xFF30,
},
[0xFF31]={
@@ -129670,7 +133439,7 @@ characters.data={
direction="l",
lccode=0xFF51,
linebreak="id",
- specials={ "wide", 0x0051 },
+ specials={ "wide", 0x51 },
unicodeslot=0xFF31,
},
[0xFF32]={
@@ -129681,7 +133450,7 @@ characters.data={
direction="l",
lccode=0xFF52,
linebreak="id",
- specials={ "wide", 0x0052 },
+ specials={ "wide", 0x52 },
unicodeslot=0xFF32,
},
[0xFF33]={
@@ -129692,7 +133461,7 @@ characters.data={
direction="l",
lccode=0xFF53,
linebreak="id",
- specials={ "wide", 0x0053 },
+ specials={ "wide", 0x53 },
unicodeslot=0xFF33,
},
[0xFF34]={
@@ -129703,7 +133472,7 @@ characters.data={
direction="l",
lccode=0xFF54,
linebreak="id",
- specials={ "wide", 0x0054 },
+ specials={ "wide", 0x54 },
unicodeslot=0xFF34,
},
[0xFF35]={
@@ -129714,7 +133483,7 @@ characters.data={
direction="l",
lccode=0xFF55,
linebreak="id",
- specials={ "wide", 0x0055 },
+ specials={ "wide", 0x55 },
unicodeslot=0xFF35,
},
[0xFF36]={
@@ -129725,7 +133494,7 @@ characters.data={
direction="l",
lccode=0xFF56,
linebreak="id",
- specials={ "wide", 0x0056 },
+ specials={ "wide", 0x56 },
unicodeslot=0xFF36,
},
[0xFF37]={
@@ -129736,7 +133505,7 @@ characters.data={
direction="l",
lccode=0xFF57,
linebreak="id",
- specials={ "wide", 0x0057 },
+ specials={ "wide", 0x57 },
unicodeslot=0xFF37,
},
[0xFF38]={
@@ -129747,7 +133516,7 @@ characters.data={
direction="l",
lccode=0xFF58,
linebreak="id",
- specials={ "wide", 0x0058 },
+ specials={ "wide", 0x58 },
unicodeslot=0xFF38,
},
[0xFF39]={
@@ -129758,7 +133527,7 @@ characters.data={
direction="l",
lccode=0xFF59,
linebreak="id",
- specials={ "wide", 0x0059 },
+ specials={ "wide", 0x59 },
unicodeslot=0xFF39,
},
[0xFF3A]={
@@ -129769,7 +133538,7 @@ characters.data={
direction="l",
lccode=0xFF5A,
linebreak="id",
- specials={ "wide", 0x005A },
+ specials={ "wide", 0x5A },
unicodeslot=0xFF3A,
},
[0xFF3B]={
@@ -129780,8 +133549,8 @@ characters.data={
direction="on",
linebreak="op",
mirror=0xFF3D,
+ specials={ "wide", 0x5B },
textclass="open",
- specials={ "wide", 0x005B },
unicodeslot=0xFF3B,
},
[0xFF3C]={
@@ -129791,7 +133560,7 @@ characters.data={
description="FULLWIDTH REVERSE SOLIDUS",
direction="on",
linebreak="id",
- specials={ "wide", 0x005C },
+ specials={ "wide", 0x5C },
unicodeslot=0xFF3C,
},
[0xFF3D]={
@@ -129802,8 +133571,8 @@ characters.data={
direction="on",
linebreak="cl",
mirror=0xFF3B,
+ specials={ "wide", 0x5D },
textclass="close",
- specials={ "wide", 0x005D },
unicodeslot=0xFF3D,
},
[0xFF3E]={
@@ -129813,7 +133582,7 @@ characters.data={
description="FULLWIDTH CIRCUMFLEX ACCENT",
direction="on",
linebreak="id",
- specials={ "wide", 0x005E },
+ specials={ "wide", 0x5E },
unicodeslot=0xFF3E,
},
[0xFF3F]={
@@ -129823,7 +133592,7 @@ characters.data={
description="FULLWIDTH LOW LINE",
direction="on",
linebreak="id",
- specials={ "wide", 0x005F },
+ specials={ "wide", 0x5F },
unicodeslot=0xFF3F,
},
[0xFF40]={
@@ -129833,7 +133602,7 @@ characters.data={
description="FULLWIDTH GRAVE ACCENT",
direction="on",
linebreak="id",
- specials={ "wide", 0x0060 },
+ specials={ "wide", 0x60 },
unicodeslot=0xFF40,
},
[0xFF41]={
@@ -129843,7 +133612,7 @@ characters.data={
description="FULLWIDTH LATIN SMALL LETTER A",
direction="l",
linebreak="id",
- specials={ "wide", 0x0061 },
+ specials={ "wide", 0x61 },
uccode=0xFF21,
unicodeslot=0xFF41,
},
@@ -129854,7 +133623,7 @@ characters.data={
description="FULLWIDTH LATIN SMALL LETTER B",
direction="l",
linebreak="id",
- specials={ "wide", 0x0062 },
+ specials={ "wide", 0x62 },
uccode=0xFF22,
unicodeslot=0xFF42,
},
@@ -129865,7 +133634,7 @@ characters.data={
description="FULLWIDTH LATIN SMALL LETTER C",
direction="l",
linebreak="id",
- specials={ "wide", 0x0063 },
+ specials={ "wide", 0x63 },
uccode=0xFF23,
unicodeslot=0xFF43,
},
@@ -129876,7 +133645,7 @@ characters.data={
description="FULLWIDTH LATIN SMALL LETTER D",
direction="l",
linebreak="id",
- specials={ "wide", 0x0064 },
+ specials={ "wide", 0x64 },
uccode=0xFF24,
unicodeslot=0xFF44,
},
@@ -129887,7 +133656,7 @@ characters.data={
description="FULLWIDTH LATIN SMALL LETTER E",
direction="l",
linebreak="id",
- specials={ "wide", 0x0065 },
+ specials={ "wide", 0x65 },
uccode=0xFF25,
unicodeslot=0xFF45,
},
@@ -129898,7 +133667,7 @@ characters.data={
description="FULLWIDTH LATIN SMALL LETTER F",
direction="l",
linebreak="id",
- specials={ "wide", 0x0066 },
+ specials={ "wide", 0x66 },
uccode=0xFF26,
unicodeslot=0xFF46,
},
@@ -129909,7 +133678,7 @@ characters.data={
description="FULLWIDTH LATIN SMALL LETTER G",
direction="l",
linebreak="id",
- specials={ "wide", 0x0067 },
+ specials={ "wide", 0x67 },
uccode=0xFF27,
unicodeslot=0xFF47,
},
@@ -129920,7 +133689,7 @@ characters.data={
description="FULLWIDTH LATIN SMALL LETTER H",
direction="l",
linebreak="id",
- specials={ "wide", 0x0068 },
+ specials={ "wide", 0x68 },
uccode=0xFF28,
unicodeslot=0xFF48,
},
@@ -129931,7 +133700,7 @@ characters.data={
description="FULLWIDTH LATIN SMALL LETTER I",
direction="l",
linebreak="id",
- specials={ "wide", 0x0069 },
+ specials={ "wide", 0x69 },
uccode=0xFF29,
unicodeslot=0xFF49,
},
@@ -129942,7 +133711,7 @@ characters.data={
description="FULLWIDTH LATIN SMALL LETTER J",
direction="l",
linebreak="id",
- specials={ "wide", 0x006A },
+ specials={ "wide", 0x6A },
uccode=0xFF2A,
unicodeslot=0xFF4A,
},
@@ -129953,7 +133722,7 @@ characters.data={
description="FULLWIDTH LATIN SMALL LETTER K",
direction="l",
linebreak="id",
- specials={ "wide", 0x006B },
+ specials={ "wide", 0x6B },
uccode=0xFF2B,
unicodeslot=0xFF4B,
},
@@ -129964,7 +133733,7 @@ characters.data={
description="FULLWIDTH LATIN SMALL LETTER L",
direction="l",
linebreak="id",
- specials={ "wide", 0x006C },
+ specials={ "wide", 0x6C },
uccode=0xFF2C,
unicodeslot=0xFF4C,
},
@@ -129975,7 +133744,7 @@ characters.data={
description="FULLWIDTH LATIN SMALL LETTER M",
direction="l",
linebreak="id",
- specials={ "wide", 0x006D },
+ specials={ "wide", 0x6D },
uccode=0xFF2D,
unicodeslot=0xFF4D,
},
@@ -129986,7 +133755,7 @@ characters.data={
description="FULLWIDTH LATIN SMALL LETTER N",
direction="l",
linebreak="id",
- specials={ "wide", 0x006E },
+ specials={ "wide", 0x6E },
uccode=0xFF2E,
unicodeslot=0xFF4E,
},
@@ -129997,7 +133766,7 @@ characters.data={
description="FULLWIDTH LATIN SMALL LETTER O",
direction="l",
linebreak="id",
- specials={ "wide", 0x006F },
+ specials={ "wide", 0x6F },
uccode=0xFF2F,
unicodeslot=0xFF4F,
},
@@ -130008,7 +133777,7 @@ characters.data={
description="FULLWIDTH LATIN SMALL LETTER P",
direction="l",
linebreak="id",
- specials={ "wide", 0x0070 },
+ specials={ "wide", 0x70 },
uccode=0xFF30,
unicodeslot=0xFF50,
},
@@ -130019,7 +133788,7 @@ characters.data={
description="FULLWIDTH LATIN SMALL LETTER Q",
direction="l",
linebreak="id",
- specials={ "wide", 0x0071 },
+ specials={ "wide", 0x71 },
uccode=0xFF31,
unicodeslot=0xFF51,
},
@@ -130030,7 +133799,7 @@ characters.data={
description="FULLWIDTH LATIN SMALL LETTER R",
direction="l",
linebreak="id",
- specials={ "wide", 0x0072 },
+ specials={ "wide", 0x72 },
uccode=0xFF32,
unicodeslot=0xFF52,
},
@@ -130041,7 +133810,7 @@ characters.data={
description="FULLWIDTH LATIN SMALL LETTER S",
direction="l",
linebreak="id",
- specials={ "wide", 0x0073 },
+ specials={ "wide", 0x73 },
uccode=0xFF33,
unicodeslot=0xFF53,
},
@@ -130052,7 +133821,7 @@ characters.data={
description="FULLWIDTH LATIN SMALL LETTER T",
direction="l",
linebreak="id",
- specials={ "wide", 0x0074 },
+ specials={ "wide", 0x74 },
uccode=0xFF34,
unicodeslot=0xFF54,
},
@@ -130063,7 +133832,7 @@ characters.data={
description="FULLWIDTH LATIN SMALL LETTER U",
direction="l",
linebreak="id",
- specials={ "wide", 0x0075 },
+ specials={ "wide", 0x75 },
uccode=0xFF35,
unicodeslot=0xFF55,
},
@@ -130074,7 +133843,7 @@ characters.data={
description="FULLWIDTH LATIN SMALL LETTER V",
direction="l",
linebreak="id",
- specials={ "wide", 0x0076 },
+ specials={ "wide", 0x76 },
uccode=0xFF36,
unicodeslot=0xFF56,
},
@@ -130085,7 +133854,7 @@ characters.data={
description="FULLWIDTH LATIN SMALL LETTER W",
direction="l",
linebreak="id",
- specials={ "wide", 0x0077 },
+ specials={ "wide", 0x77 },
uccode=0xFF37,
unicodeslot=0xFF57,
},
@@ -130096,7 +133865,7 @@ characters.data={
description="FULLWIDTH LATIN SMALL LETTER X",
direction="l",
linebreak="id",
- specials={ "wide", 0x0078 },
+ specials={ "wide", 0x78 },
uccode=0xFF38,
unicodeslot=0xFF58,
},
@@ -130107,7 +133876,7 @@ characters.data={
description="FULLWIDTH LATIN SMALL LETTER Y",
direction="l",
linebreak="id",
- specials={ "wide", 0x0079 },
+ specials={ "wide", 0x79 },
uccode=0xFF39,
unicodeslot=0xFF59,
},
@@ -130118,7 +133887,7 @@ characters.data={
description="FULLWIDTH LATIN SMALL LETTER Z",
direction="l",
linebreak="id",
- specials={ "wide", 0x007A },
+ specials={ "wide", 0x7A },
uccode=0xFF3A,
unicodeslot=0xFF5A,
},
@@ -130130,8 +133899,8 @@ characters.data={
direction="on",
linebreak="op",
mirror=0xFF5D,
+ specials={ "wide", 0x7B },
textclass="open",
- specials={ "wide", 0x007B },
unicodeslot=0xFF5B,
},
[0xFF5C]={
@@ -130141,7 +133910,7 @@ characters.data={
description="FULLWIDTH VERTICAL LINE",
direction="on",
linebreak="id",
- specials={ "wide", 0x007C },
+ specials={ "wide", 0x7C },
unicodeslot=0xFF5C,
},
[0xFF5D]={
@@ -130152,8 +133921,8 @@ characters.data={
direction="on",
linebreak="cl",
mirror=0xFF5B,
+ specials={ "wide", 0x7D },
textclass="close",
- specials={ "wide", 0x007D },
unicodeslot=0xFF5D,
},
[0xFF5E]={
@@ -130163,7 +133932,7 @@ characters.data={
description="FULLWIDTH TILDE",
direction="on",
linebreak="id",
- specials={ "wide", 0x007E },
+ specials={ "wide", 0x7E },
unicodeslot=0xFF5E,
},
[0xFF5F]={
@@ -130173,8 +133942,8 @@ characters.data={
direction="on",
linebreak="op",
mirror=0xFF60,
- textclass="open",
specials={ "wide", 0x2985 },
+ textclass="open",
unicodeslot=0xFF5F,
},
[0xFF60]={
@@ -130184,8 +133953,8 @@ characters.data={
direction="on",
linebreak="cl",
mirror=0xFF5F,
- textclass="close",
specials={ "wide", 0x2986 },
+ textclass="close",
unicodeslot=0xFF60,
},
[0xFF61]={
@@ -130206,8 +133975,8 @@ characters.data={
direction="on",
linebreak="op",
mirror=0xFF63,
- textclass="open",
specials={ "narrow", 0x300C },
+ textclass="open",
unicodeslot=0xFF62,
},
[0xFF63]={
@@ -130218,8 +133987,8 @@ characters.data={
direction="on",
linebreak="cl",
mirror=0xFF62,
- textclass="close",
specials={ "narrow", 0x300D },
+ textclass="close",
unicodeslot=0xFF63,
},
[0xFF64]={
@@ -130249,6 +134018,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER WO",
direction="l",
linebreak="al",
+ shcode=0x3092,
specials={ "narrow", 0x30F2 },
unicodeslot=0xFF66,
},
@@ -130309,6 +134079,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER SMALL YA",
direction="l",
linebreak="cj",
+ shcode=0x3084,
specials={ "narrow", 0x30E3 },
unicodeslot=0xFF6C,
},
@@ -130319,6 +134090,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER SMALL YU",
direction="l",
linebreak="cj",
+ shcode=0x3086,
specials={ "narrow", 0x30E5 },
unicodeslot=0xFF6D,
},
@@ -130329,6 +134101,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER SMALL YO",
direction="l",
linebreak="cj",
+ shcode=0x3088,
specials={ "narrow", 0x30E7 },
unicodeslot=0xFF6E,
},
@@ -130339,6 +134112,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER SMALL TU",
direction="l",
linebreak="cj",
+ shcode=0x3064,
specials={ "narrow", 0x30C3 },
unicodeslot=0xFF6F,
},
@@ -130359,6 +134133,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER A",
direction="l",
linebreak="al",
+ shcode=0x3042,
specials={ "narrow", 0x30A2 },
unicodeslot=0xFF71,
},
@@ -130369,6 +134144,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER I",
direction="l",
linebreak="al",
+ shcode=0x3044,
specials={ "narrow", 0x30A4 },
unicodeslot=0xFF72,
},
@@ -130379,6 +134155,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER U",
direction="l",
linebreak="al",
+ shcode=0x3046,
specials={ "narrow", 0x30A6 },
unicodeslot=0xFF73,
},
@@ -130389,6 +134166,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER E",
direction="l",
linebreak="al",
+ shcode=0x3048,
specials={ "narrow", 0x30A8 },
unicodeslot=0xFF74,
},
@@ -130399,6 +134177,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER O",
direction="l",
linebreak="al",
+ shcode=0x304A,
specials={ "narrow", 0x30AA },
unicodeslot=0xFF75,
},
@@ -130409,6 +134188,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER KA",
direction="l",
linebreak="al",
+ shcode=0x304B,
specials={ "narrow", 0x30AB },
unicodeslot=0xFF76,
},
@@ -130419,6 +134199,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER KI",
direction="l",
linebreak="al",
+ shcode=0x304D,
specials={ "narrow", 0x30AD },
unicodeslot=0xFF77,
},
@@ -130429,6 +134210,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER KU",
direction="l",
linebreak="al",
+ shcode=0x304F,
specials={ "narrow", 0x30AF },
unicodeslot=0xFF78,
},
@@ -130439,6 +134221,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER KE",
direction="l",
linebreak="al",
+ shcode=0x3051,
specials={ "narrow", 0x30B1 },
unicodeslot=0xFF79,
},
@@ -130449,6 +134232,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER KO",
direction="l",
linebreak="al",
+ shcode=0x3053,
specials={ "narrow", 0x30B3 },
unicodeslot=0xFF7A,
},
@@ -130459,6 +134243,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER SA",
direction="l",
linebreak="al",
+ shcode=0x3055,
specials={ "narrow", 0x30B5 },
unicodeslot=0xFF7B,
},
@@ -130469,6 +134254,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER SI",
direction="l",
linebreak="al",
+ shcode=0x3057,
specials={ "narrow", 0x30B7 },
unicodeslot=0xFF7C,
},
@@ -130479,6 +134265,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER SU",
direction="l",
linebreak="al",
+ shcode=0x3059,
specials={ "narrow", 0x30B9 },
unicodeslot=0xFF7D,
},
@@ -130489,6 +134276,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER SE",
direction="l",
linebreak="al",
+ shcode=0x305B,
specials={ "narrow", 0x30BB },
unicodeslot=0xFF7E,
},
@@ -130499,6 +134287,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER SO",
direction="l",
linebreak="al",
+ shcode=0x305D,
specials={ "narrow", 0x30BD },
unicodeslot=0xFF7F,
},
@@ -130509,6 +134298,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER TA",
direction="l",
linebreak="al",
+ shcode=0x305F,
specials={ "narrow", 0x30BF },
unicodeslot=0xFF80,
},
@@ -130519,6 +134309,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER TI",
direction="l",
linebreak="al",
+ shcode=0x3061,
specials={ "narrow", 0x30C1 },
unicodeslot=0xFF81,
},
@@ -130529,6 +134320,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER TU",
direction="l",
linebreak="al",
+ shcode=0x3064,
specials={ "narrow", 0x30C4 },
unicodeslot=0xFF82,
},
@@ -130539,6 +134331,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER TE",
direction="l",
linebreak="al",
+ shcode=0x3066,
specials={ "narrow", 0x30C6 },
unicodeslot=0xFF83,
},
@@ -130549,6 +134342,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER TO",
direction="l",
linebreak="al",
+ shcode=0x3068,
specials={ "narrow", 0x30C8 },
unicodeslot=0xFF84,
},
@@ -130559,6 +134353,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER NA",
direction="l",
linebreak="al",
+ shcode=0x306A,
specials={ "narrow", 0x30CA },
unicodeslot=0xFF85,
},
@@ -130569,6 +134364,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER NI",
direction="l",
linebreak="al",
+ shcode=0x306B,
specials={ "narrow", 0x30CB },
unicodeslot=0xFF86,
},
@@ -130579,6 +134375,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER NU",
direction="l",
linebreak="al",
+ shcode=0x306C,
specials={ "narrow", 0x30CC },
unicodeslot=0xFF87,
},
@@ -130589,6 +134386,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER NE",
direction="l",
linebreak="al",
+ shcode=0x306D,
specials={ "narrow", 0x30CD },
unicodeslot=0xFF88,
},
@@ -130599,6 +134397,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER NO",
direction="l",
linebreak="al",
+ shcode=0x306E,
specials={ "narrow", 0x30CE },
unicodeslot=0xFF89,
},
@@ -130609,6 +134408,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER HA",
direction="l",
linebreak="al",
+ shcode=0x306F,
specials={ "narrow", 0x30CF },
unicodeslot=0xFF8A,
},
@@ -130619,6 +134419,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER HI",
direction="l",
linebreak="al",
+ shcode=0x3072,
specials={ "narrow", 0x30D2 },
unicodeslot=0xFF8B,
},
@@ -130629,6 +134430,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER HU",
direction="l",
linebreak="al",
+ shcode=0x3075,
specials={ "narrow", 0x30D5 },
unicodeslot=0xFF8C,
},
@@ -130639,6 +134441,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER HE",
direction="l",
linebreak="al",
+ shcode=0x3078,
specials={ "narrow", 0x30D8 },
unicodeslot=0xFF8D,
},
@@ -130649,6 +134452,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER HO",
direction="l",
linebreak="al",
+ shcode=0x307B,
specials={ "narrow", 0x30DB },
unicodeslot=0xFF8E,
},
@@ -130659,6 +134463,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER MA",
direction="l",
linebreak="al",
+ shcode=0x307E,
specials={ "narrow", 0x30DE },
unicodeslot=0xFF8F,
},
@@ -130669,6 +134474,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER MI",
direction="l",
linebreak="al",
+ shcode=0x307F,
specials={ "narrow", 0x30DF },
unicodeslot=0xFF90,
},
@@ -130679,6 +134485,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER MU",
direction="l",
linebreak="al",
+ shcode=0x3080,
specials={ "narrow", 0x30E0 },
unicodeslot=0xFF91,
},
@@ -130689,6 +134496,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER ME",
direction="l",
linebreak="al",
+ shcode=0x3081,
specials={ "narrow", 0x30E1 },
unicodeslot=0xFF92,
},
@@ -130699,6 +134507,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER MO",
direction="l",
linebreak="al",
+ shcode=0x3082,
specials={ "narrow", 0x30E2 },
unicodeslot=0xFF93,
},
@@ -130709,6 +134518,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER YA",
direction="l",
linebreak="al",
+ shcode=0x3084,
specials={ "narrow", 0x30E4 },
unicodeslot=0xFF94,
},
@@ -130719,6 +134529,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER YU",
direction="l",
linebreak="al",
+ shcode=0x3086,
specials={ "narrow", 0x30E6 },
unicodeslot=0xFF95,
},
@@ -130729,6 +134540,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER YO",
direction="l",
linebreak="al",
+ shcode=0x3088,
specials={ "narrow", 0x30E8 },
unicodeslot=0xFF96,
},
@@ -130739,6 +134551,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER RA",
direction="l",
linebreak="al",
+ shcode=0x3089,
specials={ "narrow", 0x30E9 },
unicodeslot=0xFF97,
},
@@ -130749,6 +134562,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER RI",
direction="l",
linebreak="al",
+ shcode=0x308A,
specials={ "narrow", 0x30EA },
unicodeslot=0xFF98,
},
@@ -130759,6 +134573,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER RU",
direction="l",
linebreak="al",
+ shcode=0x308B,
specials={ "narrow", 0x30EB },
unicodeslot=0xFF99,
},
@@ -130769,6 +134584,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER RE",
direction="l",
linebreak="al",
+ shcode=0x308C,
specials={ "narrow", 0x30EC },
unicodeslot=0xFF9A,
},
@@ -130779,6 +134595,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER RO",
direction="l",
linebreak="al",
+ shcode=0x308D,
specials={ "narrow", 0x30ED },
unicodeslot=0xFF9B,
},
@@ -130789,6 +134606,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER WA",
direction="l",
linebreak="al",
+ shcode=0x308F,
specials={ "narrow", 0x30EF },
unicodeslot=0xFF9C,
},
@@ -130799,6 +134617,7 @@ characters.data={
description="HALFWIDTH KATAKANA LETTER N",
direction="l",
linebreak="al",
+ shcode=0x3093,
specials={ "narrow", 0x30F3 },
unicodeslot=0xFF9D,
},
@@ -131297,7 +135116,7 @@ characters.data={
description="FULLWIDTH CENT SIGN",
direction="et",
linebreak="po",
- specials={ "wide", 0x00A2 },
+ specials={ "wide", 0xA2 },
unicodeslot=0xFFE0,
},
[0xFFE1]={
@@ -131307,7 +135126,7 @@ characters.data={
description="FULLWIDTH POUND SIGN",
direction="et",
linebreak="pr",
- specials={ "wide", 0x00A3 },
+ specials={ "wide", 0xA3 },
unicodeslot=0xFFE1,
},
[0xFFE2]={
@@ -131316,7 +135135,7 @@ characters.data={
description="FULLWIDTH NOT SIGN",
direction="on",
linebreak="id",
- specials={ "wide", 0x00AC },
+ specials={ "wide", 0xAC },
unicodeslot=0xFFE2,
},
[0xFFE3]={
@@ -131326,7 +135145,7 @@ characters.data={
description="FULLWIDTH MACRON",
direction="on",
linebreak="id",
- specials={ "wide", 0x00AF },
+ specials={ "wide", 0xAF },
unicodeslot=0xFFE3,
},
[0xFFE4]={
@@ -131335,7 +135154,7 @@ characters.data={
description="FULLWIDTH BROKEN BAR",
direction="on",
linebreak="id",
- specials={ "wide", 0x00A6 },
+ specials={ "wide", 0xA6 },
unicodeslot=0xFFE4,
},
[0xFFE5]={
@@ -131345,7 +135164,7 @@ characters.data={
description="FULLWIDTH YEN SIGN",
direction="et",
linebreak="pr",
- specials={ "wide", 0x00A5 },
+ specials={ "wide", 0xA5 },
unicodeslot=0xFFE5,
},
[0xFFE6]={
@@ -133861,6 +137680,20 @@ characters.data={
linebreak="al",
unicodeslot=0x1018A,
},
+ [0x1018B]={
+ category="no",
+ description="GREEK ONE QUARTER SIGN",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1018B,
+ },
+ [0x1018C]={
+ category="so",
+ description="GREEK SINUSOID SIGN",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1018C,
+ },
[0x10190]={
category="so",
description="ROMAN SEXTANS SIGN",
@@ -133945,6 +137778,13 @@ characters.data={
linebreak="al",
unicodeslot=0x1019B,
},
+ [0x101A0]={
+ category="so",
+ description="GREEK SYMBOL TAU RHO",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x101A0,
+ },
[0x101D0]={
category="so",
description="PHAISTOS DISC SIGN PEDESTRIAN",
@@ -134262,6 +138102,7 @@ characters.data={
},
[0x101FD]={
category="mn",
+ combining=0xDC,
description="PHAISTOS DISC SIGN COMBINING OBLIQUE STROKE",
direction="nsm",
linebreak="cm",
@@ -134813,6 +138654,203 @@ characters.data={
linebreak="al",
unicodeslot=0x102D0,
},
+ [0x102E0]={
+ category="mn",
+ combining=0xDC,
+ description="COPTIC EPACT THOUSANDS MARK",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x102E0,
+ },
+ [0x102E1]={
+ category="no",
+ description="COPTIC EPACT DIGIT ONE",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102E1,
+ },
+ [0x102E2]={
+ category="no",
+ description="COPTIC EPACT DIGIT TWO",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102E2,
+ },
+ [0x102E3]={
+ category="no",
+ description="COPTIC EPACT DIGIT THREE",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102E3,
+ },
+ [0x102E4]={
+ category="no",
+ description="COPTIC EPACT DIGIT FOUR",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102E4,
+ },
+ [0x102E5]={
+ category="no",
+ description="COPTIC EPACT DIGIT FIVE",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102E5,
+ },
+ [0x102E6]={
+ category="no",
+ description="COPTIC EPACT DIGIT SIX",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102E6,
+ },
+ [0x102E7]={
+ category="no",
+ description="COPTIC EPACT DIGIT SEVEN",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102E7,
+ },
+ [0x102E8]={
+ category="no",
+ description="COPTIC EPACT DIGIT EIGHT",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102E8,
+ },
+ [0x102E9]={
+ category="no",
+ description="COPTIC EPACT DIGIT NINE",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102E9,
+ },
+ [0x102EA]={
+ category="no",
+ description="COPTIC EPACT NUMBER TEN",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102EA,
+ },
+ [0x102EB]={
+ category="no",
+ description="COPTIC EPACT NUMBER TWENTY",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102EB,
+ },
+ [0x102EC]={
+ category="no",
+ description="COPTIC EPACT NUMBER THIRTY",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102EC,
+ },
+ [0x102ED]={
+ category="no",
+ description="COPTIC EPACT NUMBER FORTY",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102ED,
+ },
+ [0x102EE]={
+ category="no",
+ description="COPTIC EPACT NUMBER FIFTY",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102EE,
+ },
+ [0x102EF]={
+ category="no",
+ description="COPTIC EPACT NUMBER SIXTY",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102EF,
+ },
+ [0x102F0]={
+ category="no",
+ description="COPTIC EPACT NUMBER SEVENTY",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102F0,
+ },
+ [0x102F1]={
+ category="no",
+ description="COPTIC EPACT NUMBER EIGHTY",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102F1,
+ },
+ [0x102F2]={
+ category="no",
+ description="COPTIC EPACT NUMBER NINETY",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102F2,
+ },
+ [0x102F3]={
+ category="no",
+ description="COPTIC EPACT NUMBER ONE HUNDRED",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102F3,
+ },
+ [0x102F4]={
+ category="no",
+ description="COPTIC EPACT NUMBER TWO HUNDRED",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102F4,
+ },
+ [0x102F5]={
+ category="no",
+ description="COPTIC EPACT NUMBER THREE HUNDRED",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102F5,
+ },
+ [0x102F6]={
+ category="no",
+ description="COPTIC EPACT NUMBER FOUR HUNDRED",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102F6,
+ },
+ [0x102F7]={
+ category="no",
+ description="COPTIC EPACT NUMBER FIVE HUNDRED",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102F7,
+ },
+ [0x102F8]={
+ category="no",
+ description="COPTIC EPACT NUMBER SIX HUNDRED",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102F8,
+ },
+ [0x102F9]={
+ category="no",
+ description="COPTIC EPACT NUMBER SEVEN HUNDRED",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102F9,
+ },
+ [0x102FA]={
+ category="no",
+ description="COPTIC EPACT NUMBER EIGHT HUNDRED",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102FA,
+ },
+ [0x102FB]={
+ category="no",
+ description="COPTIC EPACT NUMBER NINE HUNDRED",
+ direction="en",
+ linebreak="al",
+ unicodeslot=0x102FB,
+ },
[0x10300]={
category="lo",
description="OLD ITALIC LETTER A",
@@ -135030,6 +139068,13 @@ characters.data={
linebreak="al",
unicodeslot=0x1031E,
},
+ [0x1031F]={
+ category="lo",
+ description="OLD ITALIC LETTER ESS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1031F,
+ },
[0x10320]={
category="no",
description="OLD ITALIC NUMERAL ONE",
@@ -135247,6 +139292,312 @@ characters.data={
linebreak="al",
unicodeslot=0x1034A,
},
+ [0x10350]={
+ category="lo",
+ description="OLD PERMIC LETTER AN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10350,
+ },
+ [0x10351]={
+ category="lo",
+ description="OLD PERMIC LETTER BUR",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10351,
+ },
+ [0x10352]={
+ category="lo",
+ description="OLD PERMIC LETTER GAI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10352,
+ },
+ [0x10353]={
+ category="lo",
+ description="OLD PERMIC LETTER DOI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10353,
+ },
+ [0x10354]={
+ category="lo",
+ description="OLD PERMIC LETTER E",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10354,
+ },
+ [0x10355]={
+ category="lo",
+ description="OLD PERMIC LETTER ZHOI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10355,
+ },
+ [0x10356]={
+ category="lo",
+ description="OLD PERMIC LETTER DZHOI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10356,
+ },
+ [0x10357]={
+ category="lo",
+ description="OLD PERMIC LETTER ZATA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10357,
+ },
+ [0x10358]={
+ category="lo",
+ description="OLD PERMIC LETTER DZITA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10358,
+ },
+ [0x10359]={
+ category="lo",
+ description="OLD PERMIC LETTER I",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10359,
+ },
+ [0x1035A]={
+ category="lo",
+ description="OLD PERMIC LETTER KOKE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1035A,
+ },
+ [0x1035B]={
+ category="lo",
+ description="OLD PERMIC LETTER LEI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1035B,
+ },
+ [0x1035C]={
+ category="lo",
+ description="OLD PERMIC LETTER MENOE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1035C,
+ },
+ [0x1035D]={
+ category="lo",
+ description="OLD PERMIC LETTER NENOE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1035D,
+ },
+ [0x1035E]={
+ category="lo",
+ description="OLD PERMIC LETTER VOOI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1035E,
+ },
+ [0x1035F]={
+ category="lo",
+ description="OLD PERMIC LETTER PEEI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1035F,
+ },
+ [0x10360]={
+ category="lo",
+ description="OLD PERMIC LETTER REI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10360,
+ },
+ [0x10361]={
+ category="lo",
+ description="OLD PERMIC LETTER SII",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10361,
+ },
+ [0x10362]={
+ category="lo",
+ description="OLD PERMIC LETTER TAI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10362,
+ },
+ [0x10363]={
+ category="lo",
+ description="OLD PERMIC LETTER U",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10363,
+ },
+ [0x10364]={
+ category="lo",
+ description="OLD PERMIC LETTER CHERY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10364,
+ },
+ [0x10365]={
+ category="lo",
+ description="OLD PERMIC LETTER SHOOI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10365,
+ },
+ [0x10366]={
+ category="lo",
+ description="OLD PERMIC LETTER SHCHOOI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10366,
+ },
+ [0x10367]={
+ category="lo",
+ description="OLD PERMIC LETTER YRY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10367,
+ },
+ [0x10368]={
+ category="lo",
+ description="OLD PERMIC LETTER YERU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10368,
+ },
+ [0x10369]={
+ category="lo",
+ description="OLD PERMIC LETTER O",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10369,
+ },
+ [0x1036A]={
+ category="lo",
+ description="OLD PERMIC LETTER OO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1036A,
+ },
+ [0x1036B]={
+ category="lo",
+ description="OLD PERMIC LETTER EF",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1036B,
+ },
+ [0x1036C]={
+ category="lo",
+ description="OLD PERMIC LETTER HA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1036C,
+ },
+ [0x1036D]={
+ category="lo",
+ description="OLD PERMIC LETTER TSIU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1036D,
+ },
+ [0x1036E]={
+ category="lo",
+ description="OLD PERMIC LETTER VER",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1036E,
+ },
+ [0x1036F]={
+ category="lo",
+ description="OLD PERMIC LETTER YER",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1036F,
+ },
+ [0x10370]={
+ category="lo",
+ description="OLD PERMIC LETTER YERI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10370,
+ },
+ [0x10371]={
+ category="lo",
+ description="OLD PERMIC LETTER YAT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10371,
+ },
+ [0x10372]={
+ category="lo",
+ description="OLD PERMIC LETTER IE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10372,
+ },
+ [0x10373]={
+ category="lo",
+ description="OLD PERMIC LETTER YU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10373,
+ },
+ [0x10374]={
+ category="lo",
+ description="OLD PERMIC LETTER YA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10374,
+ },
+ [0x10375]={
+ category="lo",
+ description="OLD PERMIC LETTER IA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10375,
+ },
+ [0x10376]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING OLD PERMIC LETTER AN",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x10376,
+ },
+ [0x10377]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING OLD PERMIC LETTER DOI",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x10377,
+ },
+ [0x10378]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING OLD PERMIC LETTER ZATA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x10378,
+ },
+ [0x10379]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING OLD PERMIC LETTER NENOE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x10379,
+ },
+ [0x1037A]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING OLD PERMIC LETTER SII",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1037A,
+ },
[0x10380]={
category="lo",
description="UGARITIC LETTER ALPA",
@@ -137070,6 +141421,3044 @@ characters.data={
linebreak="nu",
unicodeslot=0x104A9,
},
+ [0x10500]={
+ category="lo",
+ description="ELBASAN LETTER A",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10500,
+ },
+ [0x10501]={
+ category="lo",
+ description="ELBASAN LETTER BE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10501,
+ },
+ [0x10502]={
+ category="lo",
+ description="ELBASAN LETTER CE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10502,
+ },
+ [0x10503]={
+ category="lo",
+ description="ELBASAN LETTER CHE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10503,
+ },
+ [0x10504]={
+ category="lo",
+ description="ELBASAN LETTER DE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10504,
+ },
+ [0x10505]={
+ category="lo",
+ description="ELBASAN LETTER NDE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10505,
+ },
+ [0x10506]={
+ category="lo",
+ description="ELBASAN LETTER DHE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10506,
+ },
+ [0x10507]={
+ category="lo",
+ description="ELBASAN LETTER EI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10507,
+ },
+ [0x10508]={
+ category="lo",
+ description="ELBASAN LETTER E",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10508,
+ },
+ [0x10509]={
+ category="lo",
+ description="ELBASAN LETTER FE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10509,
+ },
+ [0x1050A]={
+ category="lo",
+ description="ELBASAN LETTER GE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1050A,
+ },
+ [0x1050B]={
+ category="lo",
+ description="ELBASAN LETTER GJE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1050B,
+ },
+ [0x1050C]={
+ category="lo",
+ description="ELBASAN LETTER HE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1050C,
+ },
+ [0x1050D]={
+ category="lo",
+ description="ELBASAN LETTER I",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1050D,
+ },
+ [0x1050E]={
+ category="lo",
+ description="ELBASAN LETTER JE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1050E,
+ },
+ [0x1050F]={
+ category="lo",
+ description="ELBASAN LETTER KE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1050F,
+ },
+ [0x10510]={
+ category="lo",
+ description="ELBASAN LETTER LE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10510,
+ },
+ [0x10511]={
+ category="lo",
+ description="ELBASAN LETTER LLE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10511,
+ },
+ [0x10512]={
+ category="lo",
+ description="ELBASAN LETTER ME",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10512,
+ },
+ [0x10513]={
+ category="lo",
+ description="ELBASAN LETTER NE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10513,
+ },
+ [0x10514]={
+ category="lo",
+ description="ELBASAN LETTER NA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10514,
+ },
+ [0x10515]={
+ category="lo",
+ description="ELBASAN LETTER NJE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10515,
+ },
+ [0x10516]={
+ category="lo",
+ description="ELBASAN LETTER O",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10516,
+ },
+ [0x10517]={
+ category="lo",
+ description="ELBASAN LETTER PE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10517,
+ },
+ [0x10518]={
+ category="lo",
+ description="ELBASAN LETTER QE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10518,
+ },
+ [0x10519]={
+ category="lo",
+ description="ELBASAN LETTER RE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10519,
+ },
+ [0x1051A]={
+ category="lo",
+ description="ELBASAN LETTER RRE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1051A,
+ },
+ [0x1051B]={
+ category="lo",
+ description="ELBASAN LETTER SE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1051B,
+ },
+ [0x1051C]={
+ category="lo",
+ description="ELBASAN LETTER SHE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1051C,
+ },
+ [0x1051D]={
+ category="lo",
+ description="ELBASAN LETTER TE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1051D,
+ },
+ [0x1051E]={
+ category="lo",
+ description="ELBASAN LETTER THE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1051E,
+ },
+ [0x1051F]={
+ category="lo",
+ description="ELBASAN LETTER U",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1051F,
+ },
+ [0x10520]={
+ category="lo",
+ description="ELBASAN LETTER VE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10520,
+ },
+ [0x10521]={
+ category="lo",
+ description="ELBASAN LETTER XE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10521,
+ },
+ [0x10522]={
+ category="lo",
+ description="ELBASAN LETTER Y",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10522,
+ },
+ [0x10523]={
+ category="lo",
+ description="ELBASAN LETTER ZE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10523,
+ },
+ [0x10524]={
+ category="lo",
+ description="ELBASAN LETTER ZHE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10524,
+ },
+ [0x10525]={
+ category="lo",
+ description="ELBASAN LETTER GHE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10525,
+ },
+ [0x10526]={
+ category="lo",
+ description="ELBASAN LETTER GHAMMA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10526,
+ },
+ [0x10527]={
+ category="lo",
+ description="ELBASAN LETTER KHE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10527,
+ },
+ [0x10530]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER ALT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10530,
+ },
+ [0x10531]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER BET",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10531,
+ },
+ [0x10532]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER GIM",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10532,
+ },
+ [0x10533]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER DAT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10533,
+ },
+ [0x10534]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER EB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10534,
+ },
+ [0x10535]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER ZARL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10535,
+ },
+ [0x10536]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER EYN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10536,
+ },
+ [0x10537]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER ZHIL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10537,
+ },
+ [0x10538]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER TAS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10538,
+ },
+ [0x10539]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER CHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10539,
+ },
+ [0x1053A]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER YOWD",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1053A,
+ },
+ [0x1053B]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER ZHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1053B,
+ },
+ [0x1053C]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER IRB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1053C,
+ },
+ [0x1053D]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER SHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1053D,
+ },
+ [0x1053E]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER LAN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1053E,
+ },
+ [0x1053F]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER INYA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1053F,
+ },
+ [0x10540]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER XEYN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10540,
+ },
+ [0x10541]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER DYAN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10541,
+ },
+ [0x10542]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER CAR",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10542,
+ },
+ [0x10543]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER JHOX",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10543,
+ },
+ [0x10544]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER KAR",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10544,
+ },
+ [0x10545]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER LYIT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10545,
+ },
+ [0x10546]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER HEYT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10546,
+ },
+ [0x10547]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER QAY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10547,
+ },
+ [0x10548]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER AOR",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10548,
+ },
+ [0x10549]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER CHOY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10549,
+ },
+ [0x1054A]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER CHI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1054A,
+ },
+ [0x1054B]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER CYAY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1054B,
+ },
+ [0x1054C]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER MAQ",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1054C,
+ },
+ [0x1054D]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER QAR",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1054D,
+ },
+ [0x1054E]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER NOWC",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1054E,
+ },
+ [0x1054F]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER DZYAY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1054F,
+ },
+ [0x10550]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER SHAK",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10550,
+ },
+ [0x10551]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER JAYN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10551,
+ },
+ [0x10552]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER ON",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10552,
+ },
+ [0x10553]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER TYAY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10553,
+ },
+ [0x10554]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER FAM",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10554,
+ },
+ [0x10555]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER DZAY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10555,
+ },
+ [0x10556]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER CHAT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10556,
+ },
+ [0x10557]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER PEN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10557,
+ },
+ [0x10558]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER GHEYS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10558,
+ },
+ [0x10559]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER RAT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10559,
+ },
+ [0x1055A]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER SEYK",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1055A,
+ },
+ [0x1055B]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER VEYZ",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1055B,
+ },
+ [0x1055C]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER TIWR",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1055C,
+ },
+ [0x1055D]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER SHOY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1055D,
+ },
+ [0x1055E]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER IWN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1055E,
+ },
+ [0x1055F]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER CYAW",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1055F,
+ },
+ [0x10560]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER CAYN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10560,
+ },
+ [0x10561]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER YAYD",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10561,
+ },
+ [0x10562]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER PIWR",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10562,
+ },
+ [0x10563]={
+ category="lo",
+ description="CAUCASIAN ALBANIAN LETTER KIW",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10563,
+ },
+ [0x1056F]={
+ category="po",
+ description="CAUCASIAN ALBANIAN CITATION MARK",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1056F,
+ },
+ [0x10600]={
+ category="lo",
+ description="LINEAR A SIGN AB001",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10600,
+ },
+ [0x10601]={
+ category="lo",
+ description="LINEAR A SIGN AB002",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10601,
+ },
+ [0x10602]={
+ category="lo",
+ description="LINEAR A SIGN AB003",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10602,
+ },
+ [0x10603]={
+ category="lo",
+ description="LINEAR A SIGN AB004",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10603,
+ },
+ [0x10604]={
+ category="lo",
+ description="LINEAR A SIGN AB005",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10604,
+ },
+ [0x10605]={
+ category="lo",
+ description="LINEAR A SIGN AB006",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10605,
+ },
+ [0x10606]={
+ category="lo",
+ description="LINEAR A SIGN AB007",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10606,
+ },
+ [0x10607]={
+ category="lo",
+ description="LINEAR A SIGN AB008",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10607,
+ },
+ [0x10608]={
+ category="lo",
+ description="LINEAR A SIGN AB009",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10608,
+ },
+ [0x10609]={
+ category="lo",
+ description="LINEAR A SIGN AB010",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10609,
+ },
+ [0x1060A]={
+ category="lo",
+ description="LINEAR A SIGN AB011",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1060A,
+ },
+ [0x1060B]={
+ category="lo",
+ description="LINEAR A SIGN AB013",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1060B,
+ },
+ [0x1060C]={
+ category="lo",
+ description="LINEAR A SIGN AB016",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1060C,
+ },
+ [0x1060D]={
+ category="lo",
+ description="LINEAR A SIGN AB017",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1060D,
+ },
+ [0x1060E]={
+ category="lo",
+ description="LINEAR A SIGN AB020",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1060E,
+ },
+ [0x1060F]={
+ category="lo",
+ description="LINEAR A SIGN AB021",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1060F,
+ },
+ [0x10610]={
+ category="lo",
+ description="LINEAR A SIGN AB021F",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10610,
+ },
+ [0x10611]={
+ category="lo",
+ description="LINEAR A SIGN AB021M",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10611,
+ },
+ [0x10612]={
+ category="lo",
+ description="LINEAR A SIGN AB022",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10612,
+ },
+ [0x10613]={
+ category="lo",
+ description="LINEAR A SIGN AB022F",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10613,
+ },
+ [0x10614]={
+ category="lo",
+ description="LINEAR A SIGN AB022M",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10614,
+ },
+ [0x10615]={
+ category="lo",
+ description="LINEAR A SIGN AB023",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10615,
+ },
+ [0x10616]={
+ category="lo",
+ description="LINEAR A SIGN AB023M",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10616,
+ },
+ [0x10617]={
+ category="lo",
+ description="LINEAR A SIGN AB024",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10617,
+ },
+ [0x10618]={
+ category="lo",
+ description="LINEAR A SIGN AB026",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10618,
+ },
+ [0x10619]={
+ category="lo",
+ description="LINEAR A SIGN AB027",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10619,
+ },
+ [0x1061A]={
+ category="lo",
+ description="LINEAR A SIGN AB028",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1061A,
+ },
+ [0x1061B]={
+ category="lo",
+ description="LINEAR A SIGN A028B",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1061B,
+ },
+ [0x1061C]={
+ category="lo",
+ description="LINEAR A SIGN AB029",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1061C,
+ },
+ [0x1061D]={
+ category="lo",
+ description="LINEAR A SIGN AB030",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1061D,
+ },
+ [0x1061E]={
+ category="lo",
+ description="LINEAR A SIGN AB031",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1061E,
+ },
+ [0x1061F]={
+ category="lo",
+ description="LINEAR A SIGN AB034",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1061F,
+ },
+ [0x10620]={
+ category="lo",
+ description="LINEAR A SIGN AB037",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10620,
+ },
+ [0x10621]={
+ category="lo",
+ description="LINEAR A SIGN AB038",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10621,
+ },
+ [0x10622]={
+ category="lo",
+ description="LINEAR A SIGN AB039",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10622,
+ },
+ [0x10623]={
+ category="lo",
+ description="LINEAR A SIGN AB040",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10623,
+ },
+ [0x10624]={
+ category="lo",
+ description="LINEAR A SIGN AB041",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10624,
+ },
+ [0x10625]={
+ category="lo",
+ description="LINEAR A SIGN AB044",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10625,
+ },
+ [0x10626]={
+ category="lo",
+ description="LINEAR A SIGN AB045",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10626,
+ },
+ [0x10627]={
+ category="lo",
+ description="LINEAR A SIGN AB046",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10627,
+ },
+ [0x10628]={
+ category="lo",
+ description="LINEAR A SIGN AB047",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10628,
+ },
+ [0x10629]={
+ category="lo",
+ description="LINEAR A SIGN AB048",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10629,
+ },
+ [0x1062A]={
+ category="lo",
+ description="LINEAR A SIGN AB049",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1062A,
+ },
+ [0x1062B]={
+ category="lo",
+ description="LINEAR A SIGN AB050",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1062B,
+ },
+ [0x1062C]={
+ category="lo",
+ description="LINEAR A SIGN AB051",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1062C,
+ },
+ [0x1062D]={
+ category="lo",
+ description="LINEAR A SIGN AB053",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1062D,
+ },
+ [0x1062E]={
+ category="lo",
+ description="LINEAR A SIGN AB054",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1062E,
+ },
+ [0x1062F]={
+ category="lo",
+ description="LINEAR A SIGN AB055",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1062F,
+ },
+ [0x10630]={
+ category="lo",
+ description="LINEAR A SIGN AB056",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10630,
+ },
+ [0x10631]={
+ category="lo",
+ description="LINEAR A SIGN AB057",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10631,
+ },
+ [0x10632]={
+ category="lo",
+ description="LINEAR A SIGN AB058",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10632,
+ },
+ [0x10633]={
+ category="lo",
+ description="LINEAR A SIGN AB059",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10633,
+ },
+ [0x10634]={
+ category="lo",
+ description="LINEAR A SIGN AB060",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10634,
+ },
+ [0x10635]={
+ category="lo",
+ description="LINEAR A SIGN AB061",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10635,
+ },
+ [0x10636]={
+ category="lo",
+ description="LINEAR A SIGN AB065",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10636,
+ },
+ [0x10637]={
+ category="lo",
+ description="LINEAR A SIGN AB066",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10637,
+ },
+ [0x10638]={
+ category="lo",
+ description="LINEAR A SIGN AB067",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10638,
+ },
+ [0x10639]={
+ category="lo",
+ description="LINEAR A SIGN AB069",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10639,
+ },
+ [0x1063A]={
+ category="lo",
+ description="LINEAR A SIGN AB070",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1063A,
+ },
+ [0x1063B]={
+ category="lo",
+ description="LINEAR A SIGN AB073",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1063B,
+ },
+ [0x1063C]={
+ category="lo",
+ description="LINEAR A SIGN AB074",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1063C,
+ },
+ [0x1063D]={
+ category="lo",
+ description="LINEAR A SIGN AB076",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1063D,
+ },
+ [0x1063E]={
+ category="lo",
+ description="LINEAR A SIGN AB077",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1063E,
+ },
+ [0x1063F]={
+ category="lo",
+ description="LINEAR A SIGN AB078",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1063F,
+ },
+ [0x10640]={
+ category="lo",
+ description="LINEAR A SIGN AB079",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10640,
+ },
+ [0x10641]={
+ category="lo",
+ description="LINEAR A SIGN AB080",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10641,
+ },
+ [0x10642]={
+ category="lo",
+ description="LINEAR A SIGN AB081",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10642,
+ },
+ [0x10643]={
+ category="lo",
+ description="LINEAR A SIGN AB082",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10643,
+ },
+ [0x10644]={
+ category="lo",
+ description="LINEAR A SIGN AB085",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10644,
+ },
+ [0x10645]={
+ category="lo",
+ description="LINEAR A SIGN AB086",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10645,
+ },
+ [0x10646]={
+ category="lo",
+ description="LINEAR A SIGN AB087",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10646,
+ },
+ [0x10647]={
+ category="lo",
+ description="LINEAR A SIGN A100-102",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10647,
+ },
+ [0x10648]={
+ category="lo",
+ description="LINEAR A SIGN AB118",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10648,
+ },
+ [0x10649]={
+ category="lo",
+ description="LINEAR A SIGN AB120",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10649,
+ },
+ [0x1064A]={
+ category="lo",
+ description="LINEAR A SIGN A120B",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1064A,
+ },
+ [0x1064B]={
+ category="lo",
+ description="LINEAR A SIGN AB122",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1064B,
+ },
+ [0x1064C]={
+ category="lo",
+ description="LINEAR A SIGN AB123",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1064C,
+ },
+ [0x1064D]={
+ category="lo",
+ description="LINEAR A SIGN AB131A",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1064D,
+ },
+ [0x1064E]={
+ category="lo",
+ description="LINEAR A SIGN AB131B",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1064E,
+ },
+ [0x1064F]={
+ category="lo",
+ description="LINEAR A SIGN A131C",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1064F,
+ },
+ [0x10650]={
+ category="lo",
+ description="LINEAR A SIGN AB164",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10650,
+ },
+ [0x10651]={
+ category="lo",
+ description="LINEAR A SIGN AB171",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10651,
+ },
+ [0x10652]={
+ category="lo",
+ description="LINEAR A SIGN AB180",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10652,
+ },
+ [0x10653]={
+ category="lo",
+ description="LINEAR A SIGN AB188",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10653,
+ },
+ [0x10654]={
+ category="lo",
+ description="LINEAR A SIGN AB191",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10654,
+ },
+ [0x10655]={
+ category="lo",
+ description="LINEAR A SIGN A301",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10655,
+ },
+ [0x10656]={
+ category="lo",
+ description="LINEAR A SIGN A302",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10656,
+ },
+ [0x10657]={
+ category="lo",
+ description="LINEAR A SIGN A303",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10657,
+ },
+ [0x10658]={
+ category="lo",
+ description="LINEAR A SIGN A304",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10658,
+ },
+ [0x10659]={
+ category="lo",
+ description="LINEAR A SIGN A305",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10659,
+ },
+ [0x1065A]={
+ category="lo",
+ description="LINEAR A SIGN A306",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1065A,
+ },
+ [0x1065B]={
+ category="lo",
+ description="LINEAR A SIGN A307",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1065B,
+ },
+ [0x1065C]={
+ category="lo",
+ description="LINEAR A SIGN A308",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1065C,
+ },
+ [0x1065D]={
+ category="lo",
+ description="LINEAR A SIGN A309A",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1065D,
+ },
+ [0x1065E]={
+ category="lo",
+ description="LINEAR A SIGN A309B",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1065E,
+ },
+ [0x1065F]={
+ category="lo",
+ description="LINEAR A SIGN A309C",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1065F,
+ },
+ [0x10660]={
+ category="lo",
+ description="LINEAR A SIGN A310",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10660,
+ },
+ [0x10661]={
+ category="lo",
+ description="LINEAR A SIGN A311",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10661,
+ },
+ [0x10662]={
+ category="lo",
+ description="LINEAR A SIGN A312",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10662,
+ },
+ [0x10663]={
+ category="lo",
+ description="LINEAR A SIGN A313A",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10663,
+ },
+ [0x10664]={
+ category="lo",
+ description="LINEAR A SIGN A313B",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10664,
+ },
+ [0x10665]={
+ category="lo",
+ description="LINEAR A SIGN A313C",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10665,
+ },
+ [0x10666]={
+ category="lo",
+ description="LINEAR A SIGN A314",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10666,
+ },
+ [0x10667]={
+ category="lo",
+ description="LINEAR A SIGN A315",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10667,
+ },
+ [0x10668]={
+ category="lo",
+ description="LINEAR A SIGN A316",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10668,
+ },
+ [0x10669]={
+ category="lo",
+ description="LINEAR A SIGN A317",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10669,
+ },
+ [0x1066A]={
+ category="lo",
+ description="LINEAR A SIGN A318",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1066A,
+ },
+ [0x1066B]={
+ category="lo",
+ description="LINEAR A SIGN A319",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1066B,
+ },
+ [0x1066C]={
+ category="lo",
+ description="LINEAR A SIGN A320",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1066C,
+ },
+ [0x1066D]={
+ category="lo",
+ description="LINEAR A SIGN A321",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1066D,
+ },
+ [0x1066E]={
+ category="lo",
+ description="LINEAR A SIGN A322",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1066E,
+ },
+ [0x1066F]={
+ category="lo",
+ description="LINEAR A SIGN A323",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1066F,
+ },
+ [0x10670]={
+ category="lo",
+ description="LINEAR A SIGN A324",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10670,
+ },
+ [0x10671]={
+ category="lo",
+ description="LINEAR A SIGN A325",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10671,
+ },
+ [0x10672]={
+ category="lo",
+ description="LINEAR A SIGN A326",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10672,
+ },
+ [0x10673]={
+ category="lo",
+ description="LINEAR A SIGN A327",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10673,
+ },
+ [0x10674]={
+ category="lo",
+ description="LINEAR A SIGN A328",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10674,
+ },
+ [0x10675]={
+ category="lo",
+ description="LINEAR A SIGN A329",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10675,
+ },
+ [0x10676]={
+ category="lo",
+ description="LINEAR A SIGN A330",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10676,
+ },
+ [0x10677]={
+ category="lo",
+ description="LINEAR A SIGN A331",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10677,
+ },
+ [0x10678]={
+ category="lo",
+ description="LINEAR A SIGN A332",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10678,
+ },
+ [0x10679]={
+ category="lo",
+ description="LINEAR A SIGN A333",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10679,
+ },
+ [0x1067A]={
+ category="lo",
+ description="LINEAR A SIGN A334",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1067A,
+ },
+ [0x1067B]={
+ category="lo",
+ description="LINEAR A SIGN A335",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1067B,
+ },
+ [0x1067C]={
+ category="lo",
+ description="LINEAR A SIGN A336",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1067C,
+ },
+ [0x1067D]={
+ category="lo",
+ description="LINEAR A SIGN A337",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1067D,
+ },
+ [0x1067E]={
+ category="lo",
+ description="LINEAR A SIGN A338",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1067E,
+ },
+ [0x1067F]={
+ category="lo",
+ description="LINEAR A SIGN A339",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1067F,
+ },
+ [0x10680]={
+ category="lo",
+ description="LINEAR A SIGN A340",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10680,
+ },
+ [0x10681]={
+ category="lo",
+ description="LINEAR A SIGN A341",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10681,
+ },
+ [0x10682]={
+ category="lo",
+ description="LINEAR A SIGN A342",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10682,
+ },
+ [0x10683]={
+ category="lo",
+ description="LINEAR A SIGN A343",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10683,
+ },
+ [0x10684]={
+ category="lo",
+ description="LINEAR A SIGN A344",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10684,
+ },
+ [0x10685]={
+ category="lo",
+ description="LINEAR A SIGN A345",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10685,
+ },
+ [0x10686]={
+ category="lo",
+ description="LINEAR A SIGN A346",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10686,
+ },
+ [0x10687]={
+ category="lo",
+ description="LINEAR A SIGN A347",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10687,
+ },
+ [0x10688]={
+ category="lo",
+ description="LINEAR A SIGN A348",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10688,
+ },
+ [0x10689]={
+ category="lo",
+ description="LINEAR A SIGN A349",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10689,
+ },
+ [0x1068A]={
+ category="lo",
+ description="LINEAR A SIGN A350",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1068A,
+ },
+ [0x1068B]={
+ category="lo",
+ description="LINEAR A SIGN A351",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1068B,
+ },
+ [0x1068C]={
+ category="lo",
+ description="LINEAR A SIGN A352",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1068C,
+ },
+ [0x1068D]={
+ category="lo",
+ description="LINEAR A SIGN A353",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1068D,
+ },
+ [0x1068E]={
+ category="lo",
+ description="LINEAR A SIGN A354",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1068E,
+ },
+ [0x1068F]={
+ category="lo",
+ description="LINEAR A SIGN A355",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1068F,
+ },
+ [0x10690]={
+ category="lo",
+ description="LINEAR A SIGN A356",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10690,
+ },
+ [0x10691]={
+ category="lo",
+ description="LINEAR A SIGN A357",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10691,
+ },
+ [0x10692]={
+ category="lo",
+ description="LINEAR A SIGN A358",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10692,
+ },
+ [0x10693]={
+ category="lo",
+ description="LINEAR A SIGN A359",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10693,
+ },
+ [0x10694]={
+ category="lo",
+ description="LINEAR A SIGN A360",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10694,
+ },
+ [0x10695]={
+ category="lo",
+ description="LINEAR A SIGN A361",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10695,
+ },
+ [0x10696]={
+ category="lo",
+ description="LINEAR A SIGN A362",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10696,
+ },
+ [0x10697]={
+ category="lo",
+ description="LINEAR A SIGN A363",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10697,
+ },
+ [0x10698]={
+ category="lo",
+ description="LINEAR A SIGN A364",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10698,
+ },
+ [0x10699]={
+ category="lo",
+ description="LINEAR A SIGN A365",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10699,
+ },
+ [0x1069A]={
+ category="lo",
+ description="LINEAR A SIGN A366",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1069A,
+ },
+ [0x1069B]={
+ category="lo",
+ description="LINEAR A SIGN A367",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1069B,
+ },
+ [0x1069C]={
+ category="lo",
+ description="LINEAR A SIGN A368",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1069C,
+ },
+ [0x1069D]={
+ category="lo",
+ description="LINEAR A SIGN A369",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1069D,
+ },
+ [0x1069E]={
+ category="lo",
+ description="LINEAR A SIGN A370",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1069E,
+ },
+ [0x1069F]={
+ category="lo",
+ description="LINEAR A SIGN A371",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1069F,
+ },
+ [0x106A0]={
+ category="lo",
+ description="LINEAR A SIGN A400-VAS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106A0,
+ },
+ [0x106A1]={
+ category="lo",
+ description="LINEAR A SIGN A401-VAS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106A1,
+ },
+ [0x106A2]={
+ category="lo",
+ description="LINEAR A SIGN A402-VAS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106A2,
+ },
+ [0x106A3]={
+ category="lo",
+ description="LINEAR A SIGN A403-VAS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106A3,
+ },
+ [0x106A4]={
+ category="lo",
+ description="LINEAR A SIGN A404-VAS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106A4,
+ },
+ [0x106A5]={
+ category="lo",
+ description="LINEAR A SIGN A405-VAS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106A5,
+ },
+ [0x106A6]={
+ category="lo",
+ description="LINEAR A SIGN A406-VAS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106A6,
+ },
+ [0x106A7]={
+ category="lo",
+ description="LINEAR A SIGN A407-VAS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106A7,
+ },
+ [0x106A8]={
+ category="lo",
+ description="LINEAR A SIGN A408-VAS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106A8,
+ },
+ [0x106A9]={
+ category="lo",
+ description="LINEAR A SIGN A409-VAS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106A9,
+ },
+ [0x106AA]={
+ category="lo",
+ description="LINEAR A SIGN A410-VAS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106AA,
+ },
+ [0x106AB]={
+ category="lo",
+ description="LINEAR A SIGN A411-VAS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106AB,
+ },
+ [0x106AC]={
+ category="lo",
+ description="LINEAR A SIGN A412-VAS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106AC,
+ },
+ [0x106AD]={
+ category="lo",
+ description="LINEAR A SIGN A413-VAS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106AD,
+ },
+ [0x106AE]={
+ category="lo",
+ description="LINEAR A SIGN A414-VAS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106AE,
+ },
+ [0x106AF]={
+ category="lo",
+ description="LINEAR A SIGN A415-VAS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106AF,
+ },
+ [0x106B0]={
+ category="lo",
+ description="LINEAR A SIGN A416-VAS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106B0,
+ },
+ [0x106B1]={
+ category="lo",
+ description="LINEAR A SIGN A417-VAS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106B1,
+ },
+ [0x106B2]={
+ category="lo",
+ description="LINEAR A SIGN A418-VAS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106B2,
+ },
+ [0x106B3]={
+ category="lo",
+ description="LINEAR A SIGN A501",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106B3,
+ },
+ [0x106B4]={
+ category="lo",
+ description="LINEAR A SIGN A502",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106B4,
+ },
+ [0x106B5]={
+ category="lo",
+ description="LINEAR A SIGN A503",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106B5,
+ },
+ [0x106B6]={
+ category="lo",
+ description="LINEAR A SIGN A504",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106B6,
+ },
+ [0x106B7]={
+ category="lo",
+ description="LINEAR A SIGN A505",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106B7,
+ },
+ [0x106B8]={
+ category="lo",
+ description="LINEAR A SIGN A506",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106B8,
+ },
+ [0x106B9]={
+ category="lo",
+ description="LINEAR A SIGN A508",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106B9,
+ },
+ [0x106BA]={
+ category="lo",
+ description="LINEAR A SIGN A509",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106BA,
+ },
+ [0x106BB]={
+ category="lo",
+ description="LINEAR A SIGN A510",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106BB,
+ },
+ [0x106BC]={
+ category="lo",
+ description="LINEAR A SIGN A511",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106BC,
+ },
+ [0x106BD]={
+ category="lo",
+ description="LINEAR A SIGN A512",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106BD,
+ },
+ [0x106BE]={
+ category="lo",
+ description="LINEAR A SIGN A513",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106BE,
+ },
+ [0x106BF]={
+ category="lo",
+ description="LINEAR A SIGN A515",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106BF,
+ },
+ [0x106C0]={
+ category="lo",
+ description="LINEAR A SIGN A516",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106C0,
+ },
+ [0x106C1]={
+ category="lo",
+ description="LINEAR A SIGN A520",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106C1,
+ },
+ [0x106C2]={
+ category="lo",
+ description="LINEAR A SIGN A521",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106C2,
+ },
+ [0x106C3]={
+ category="lo",
+ description="LINEAR A SIGN A523",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106C3,
+ },
+ [0x106C4]={
+ category="lo",
+ description="LINEAR A SIGN A524",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106C4,
+ },
+ [0x106C5]={
+ category="lo",
+ description="LINEAR A SIGN A525",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106C5,
+ },
+ [0x106C6]={
+ category="lo",
+ description="LINEAR A SIGN A526",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106C6,
+ },
+ [0x106C7]={
+ category="lo",
+ description="LINEAR A SIGN A527",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106C7,
+ },
+ [0x106C8]={
+ category="lo",
+ description="LINEAR A SIGN A528",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106C8,
+ },
+ [0x106C9]={
+ category="lo",
+ description="LINEAR A SIGN A529",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106C9,
+ },
+ [0x106CA]={
+ category="lo",
+ description="LINEAR A SIGN A530",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106CA,
+ },
+ [0x106CB]={
+ category="lo",
+ description="LINEAR A SIGN A531",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106CB,
+ },
+ [0x106CC]={
+ category="lo",
+ description="LINEAR A SIGN A532",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106CC,
+ },
+ [0x106CD]={
+ category="lo",
+ description="LINEAR A SIGN A534",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106CD,
+ },
+ [0x106CE]={
+ category="lo",
+ description="LINEAR A SIGN A535",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106CE,
+ },
+ [0x106CF]={
+ category="lo",
+ description="LINEAR A SIGN A536",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106CF,
+ },
+ [0x106D0]={
+ category="lo",
+ description="LINEAR A SIGN A537",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106D0,
+ },
+ [0x106D1]={
+ category="lo",
+ description="LINEAR A SIGN A538",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106D1,
+ },
+ [0x106D2]={
+ category="lo",
+ description="LINEAR A SIGN A539",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106D2,
+ },
+ [0x106D3]={
+ category="lo",
+ description="LINEAR A SIGN A540",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106D3,
+ },
+ [0x106D4]={
+ category="lo",
+ description="LINEAR A SIGN A541",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106D4,
+ },
+ [0x106D5]={
+ category="lo",
+ description="LINEAR A SIGN A542",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106D5,
+ },
+ [0x106D6]={
+ category="lo",
+ description="LINEAR A SIGN A545",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106D6,
+ },
+ [0x106D7]={
+ category="lo",
+ description="LINEAR A SIGN A547",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106D7,
+ },
+ [0x106D8]={
+ category="lo",
+ description="LINEAR A SIGN A548",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106D8,
+ },
+ [0x106D9]={
+ category="lo",
+ description="LINEAR A SIGN A549",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106D9,
+ },
+ [0x106DA]={
+ category="lo",
+ description="LINEAR A SIGN A550",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106DA,
+ },
+ [0x106DB]={
+ category="lo",
+ description="LINEAR A SIGN A551",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106DB,
+ },
+ [0x106DC]={
+ category="lo",
+ description="LINEAR A SIGN A552",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106DC,
+ },
+ [0x106DD]={
+ category="lo",
+ description="LINEAR A SIGN A553",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106DD,
+ },
+ [0x106DE]={
+ category="lo",
+ description="LINEAR A SIGN A554",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106DE,
+ },
+ [0x106DF]={
+ category="lo",
+ description="LINEAR A SIGN A555",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106DF,
+ },
+ [0x106E0]={
+ category="lo",
+ description="LINEAR A SIGN A556",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106E0,
+ },
+ [0x106E1]={
+ category="lo",
+ description="LINEAR A SIGN A557",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106E1,
+ },
+ [0x106E2]={
+ category="lo",
+ description="LINEAR A SIGN A559",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106E2,
+ },
+ [0x106E3]={
+ category="lo",
+ description="LINEAR A SIGN A563",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106E3,
+ },
+ [0x106E4]={
+ category="lo",
+ description="LINEAR A SIGN A564",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106E4,
+ },
+ [0x106E5]={
+ category="lo",
+ description="LINEAR A SIGN A565",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106E5,
+ },
+ [0x106E6]={
+ category="lo",
+ description="LINEAR A SIGN A566",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106E6,
+ },
+ [0x106E7]={
+ category="lo",
+ description="LINEAR A SIGN A568",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106E7,
+ },
+ [0x106E8]={
+ category="lo",
+ description="LINEAR A SIGN A569",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106E8,
+ },
+ [0x106E9]={
+ category="lo",
+ description="LINEAR A SIGN A570",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106E9,
+ },
+ [0x106EA]={
+ category="lo",
+ description="LINEAR A SIGN A571",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106EA,
+ },
+ [0x106EB]={
+ category="lo",
+ description="LINEAR A SIGN A572",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106EB,
+ },
+ [0x106EC]={
+ category="lo",
+ description="LINEAR A SIGN A573",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106EC,
+ },
+ [0x106ED]={
+ category="lo",
+ description="LINEAR A SIGN A574",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106ED,
+ },
+ [0x106EE]={
+ category="lo",
+ description="LINEAR A SIGN A575",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106EE,
+ },
+ [0x106EF]={
+ category="lo",
+ description="LINEAR A SIGN A576",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106EF,
+ },
+ [0x106F0]={
+ category="lo",
+ description="LINEAR A SIGN A577",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106F0,
+ },
+ [0x106F1]={
+ category="lo",
+ description="LINEAR A SIGN A578",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106F1,
+ },
+ [0x106F2]={
+ category="lo",
+ description="LINEAR A SIGN A579",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106F2,
+ },
+ [0x106F3]={
+ category="lo",
+ description="LINEAR A SIGN A580",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106F3,
+ },
+ [0x106F4]={
+ category="lo",
+ description="LINEAR A SIGN A581",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106F4,
+ },
+ [0x106F5]={
+ category="lo",
+ description="LINEAR A SIGN A582",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106F5,
+ },
+ [0x106F6]={
+ category="lo",
+ description="LINEAR A SIGN A583",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106F6,
+ },
+ [0x106F7]={
+ category="lo",
+ description="LINEAR A SIGN A584",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106F7,
+ },
+ [0x106F8]={
+ category="lo",
+ description="LINEAR A SIGN A585",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106F8,
+ },
+ [0x106F9]={
+ category="lo",
+ description="LINEAR A SIGN A586",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106F9,
+ },
+ [0x106FA]={
+ category="lo",
+ description="LINEAR A SIGN A587",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106FA,
+ },
+ [0x106FB]={
+ category="lo",
+ description="LINEAR A SIGN A588",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106FB,
+ },
+ [0x106FC]={
+ category="lo",
+ description="LINEAR A SIGN A589",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106FC,
+ },
+ [0x106FD]={
+ category="lo",
+ description="LINEAR A SIGN A591",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106FD,
+ },
+ [0x106FE]={
+ category="lo",
+ description="LINEAR A SIGN A592",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106FE,
+ },
+ [0x106FF]={
+ category="lo",
+ description="LINEAR A SIGN A594",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x106FF,
+ },
+ [0x10700]={
+ category="lo",
+ description="LINEAR A SIGN A595",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10700,
+ },
+ [0x10701]={
+ category="lo",
+ description="LINEAR A SIGN A596",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10701,
+ },
+ [0x10702]={
+ category="lo",
+ description="LINEAR A SIGN A598",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10702,
+ },
+ [0x10703]={
+ category="lo",
+ description="LINEAR A SIGN A600",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10703,
+ },
+ [0x10704]={
+ category="lo",
+ description="LINEAR A SIGN A601",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10704,
+ },
+ [0x10705]={
+ category="lo",
+ description="LINEAR A SIGN A602",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10705,
+ },
+ [0x10706]={
+ category="lo",
+ description="LINEAR A SIGN A603",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10706,
+ },
+ [0x10707]={
+ category="lo",
+ description="LINEAR A SIGN A604",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10707,
+ },
+ [0x10708]={
+ category="lo",
+ description="LINEAR A SIGN A606",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10708,
+ },
+ [0x10709]={
+ category="lo",
+ description="LINEAR A SIGN A608",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10709,
+ },
+ [0x1070A]={
+ category="lo",
+ description="LINEAR A SIGN A609",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1070A,
+ },
+ [0x1070B]={
+ category="lo",
+ description="LINEAR A SIGN A610",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1070B,
+ },
+ [0x1070C]={
+ category="lo",
+ description="LINEAR A SIGN A611",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1070C,
+ },
+ [0x1070D]={
+ category="lo",
+ description="LINEAR A SIGN A612",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1070D,
+ },
+ [0x1070E]={
+ category="lo",
+ description="LINEAR A SIGN A613",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1070E,
+ },
+ [0x1070F]={
+ category="lo",
+ description="LINEAR A SIGN A614",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1070F,
+ },
+ [0x10710]={
+ category="lo",
+ description="LINEAR A SIGN A615",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10710,
+ },
+ [0x10711]={
+ category="lo",
+ description="LINEAR A SIGN A616",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10711,
+ },
+ [0x10712]={
+ category="lo",
+ description="LINEAR A SIGN A617",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10712,
+ },
+ [0x10713]={
+ category="lo",
+ description="LINEAR A SIGN A618",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10713,
+ },
+ [0x10714]={
+ category="lo",
+ description="LINEAR A SIGN A619",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10714,
+ },
+ [0x10715]={
+ category="lo",
+ description="LINEAR A SIGN A620",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10715,
+ },
+ [0x10716]={
+ category="lo",
+ description="LINEAR A SIGN A621",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10716,
+ },
+ [0x10717]={
+ category="lo",
+ description="LINEAR A SIGN A622",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10717,
+ },
+ [0x10718]={
+ category="lo",
+ description="LINEAR A SIGN A623",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10718,
+ },
+ [0x10719]={
+ category="lo",
+ description="LINEAR A SIGN A624",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10719,
+ },
+ [0x1071A]={
+ category="lo",
+ description="LINEAR A SIGN A626",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1071A,
+ },
+ [0x1071B]={
+ category="lo",
+ description="LINEAR A SIGN A627",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1071B,
+ },
+ [0x1071C]={
+ category="lo",
+ description="LINEAR A SIGN A628",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1071C,
+ },
+ [0x1071D]={
+ category="lo",
+ description="LINEAR A SIGN A629",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1071D,
+ },
+ [0x1071E]={
+ category="lo",
+ description="LINEAR A SIGN A634",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1071E,
+ },
+ [0x1071F]={
+ category="lo",
+ description="LINEAR A SIGN A637",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1071F,
+ },
+ [0x10720]={
+ category="lo",
+ description="LINEAR A SIGN A638",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10720,
+ },
+ [0x10721]={
+ category="lo",
+ description="LINEAR A SIGN A640",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10721,
+ },
+ [0x10722]={
+ category="lo",
+ description="LINEAR A SIGN A642",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10722,
+ },
+ [0x10723]={
+ category="lo",
+ description="LINEAR A SIGN A643",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10723,
+ },
+ [0x10724]={
+ category="lo",
+ description="LINEAR A SIGN A644",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10724,
+ },
+ [0x10725]={
+ category="lo",
+ description="LINEAR A SIGN A645",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10725,
+ },
+ [0x10726]={
+ category="lo",
+ description="LINEAR A SIGN A646",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10726,
+ },
+ [0x10727]={
+ category="lo",
+ description="LINEAR A SIGN A648",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10727,
+ },
+ [0x10728]={
+ category="lo",
+ description="LINEAR A SIGN A649",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10728,
+ },
+ [0x10729]={
+ category="lo",
+ description="LINEAR A SIGN A651",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10729,
+ },
+ [0x1072A]={
+ category="lo",
+ description="LINEAR A SIGN A652",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1072A,
+ },
+ [0x1072B]={
+ category="lo",
+ description="LINEAR A SIGN A653",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1072B,
+ },
+ [0x1072C]={
+ category="lo",
+ description="LINEAR A SIGN A654",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1072C,
+ },
+ [0x1072D]={
+ category="lo",
+ description="LINEAR A SIGN A655",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1072D,
+ },
+ [0x1072E]={
+ category="lo",
+ description="LINEAR A SIGN A656",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1072E,
+ },
+ [0x1072F]={
+ category="lo",
+ description="LINEAR A SIGN A657",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1072F,
+ },
+ [0x10730]={
+ category="lo",
+ description="LINEAR A SIGN A658",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10730,
+ },
+ [0x10731]={
+ category="lo",
+ description="LINEAR A SIGN A659",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10731,
+ },
+ [0x10732]={
+ category="lo",
+ description="LINEAR A SIGN A660",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10732,
+ },
+ [0x10733]={
+ category="lo",
+ description="LINEAR A SIGN A661",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10733,
+ },
+ [0x10734]={
+ category="lo",
+ description="LINEAR A SIGN A662",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10734,
+ },
+ [0x10735]={
+ category="lo",
+ description="LINEAR A SIGN A663",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10735,
+ },
+ [0x10736]={
+ category="lo",
+ description="LINEAR A SIGN A664",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10736,
+ },
+ [0x10740]={
+ category="lo",
+ description="LINEAR A SIGN A701 A",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10740,
+ },
+ [0x10741]={
+ category="lo",
+ description="LINEAR A SIGN A702 B",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10741,
+ },
+ [0x10742]={
+ category="lo",
+ description="LINEAR A SIGN A703 D",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10742,
+ },
+ [0x10743]={
+ category="lo",
+ description="LINEAR A SIGN A704 E",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10743,
+ },
+ [0x10744]={
+ category="lo",
+ description="LINEAR A SIGN A705 F",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10744,
+ },
+ [0x10745]={
+ category="lo",
+ description="LINEAR A SIGN A706 H",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10745,
+ },
+ [0x10746]={
+ category="lo",
+ description="LINEAR A SIGN A707 J",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10746,
+ },
+ [0x10747]={
+ category="lo",
+ description="LINEAR A SIGN A708 K",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10747,
+ },
+ [0x10748]={
+ category="lo",
+ description="LINEAR A SIGN A709 L",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10748,
+ },
+ [0x10749]={
+ category="lo",
+ description="LINEAR A SIGN A709-2 L2",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10749,
+ },
+ [0x1074A]={
+ category="lo",
+ description="LINEAR A SIGN A709-3 L3",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1074A,
+ },
+ [0x1074B]={
+ category="lo",
+ description="LINEAR A SIGN A709-4 L4",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1074B,
+ },
+ [0x1074C]={
+ category="lo",
+ description="LINEAR A SIGN A709-6 L6",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1074C,
+ },
+ [0x1074D]={
+ category="lo",
+ description="LINEAR A SIGN A710 W",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1074D,
+ },
+ [0x1074E]={
+ category="lo",
+ description="LINEAR A SIGN A711 X",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1074E,
+ },
+ [0x1074F]={
+ category="lo",
+ description="LINEAR A SIGN A712 Y",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1074F,
+ },
+ [0x10750]={
+ category="lo",
+ description="LINEAR A SIGN A713 OMEGA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10750,
+ },
+ [0x10751]={
+ category="lo",
+ description="LINEAR A SIGN A714 ABB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10751,
+ },
+ [0x10752]={
+ category="lo",
+ description="LINEAR A SIGN A715 BB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10752,
+ },
+ [0x10753]={
+ category="lo",
+ description="LINEAR A SIGN A717 DD",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10753,
+ },
+ [0x10754]={
+ category="lo",
+ description="LINEAR A SIGN A726 EYYY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10754,
+ },
+ [0x10755]={
+ category="lo",
+ description="LINEAR A SIGN A732 JE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10755,
+ },
+ [0x10760]={
+ category="lo",
+ description="LINEAR A SIGN A800",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10760,
+ },
+ [0x10761]={
+ category="lo",
+ description="LINEAR A SIGN A801",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10761,
+ },
+ [0x10762]={
+ category="lo",
+ description="LINEAR A SIGN A802",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10762,
+ },
+ [0x10763]={
+ category="lo",
+ description="LINEAR A SIGN A803",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10763,
+ },
+ [0x10764]={
+ category="lo",
+ description="LINEAR A SIGN A804",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10764,
+ },
+ [0x10765]={
+ category="lo",
+ description="LINEAR A SIGN A805",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10765,
+ },
+ [0x10766]={
+ category="lo",
+ description="LINEAR A SIGN A806",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10766,
+ },
+ [0x10767]={
+ category="lo",
+ description="LINEAR A SIGN A807",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x10767,
+ },
[0x10800]={
category="lo",
description="CYPRIOT SYLLABLE A",
@@ -137672,6 +145061,510 @@ characters.data={
linebreak="al",
unicodeslot=0x1085F,
},
+ [0x10860]={
+ category="lo",
+ description="PALMYRENE LETTER ALEPH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10860,
+ },
+ [0x10861]={
+ category="lo",
+ description="PALMYRENE LETTER BETH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10861,
+ },
+ [0x10862]={
+ category="lo",
+ description="PALMYRENE LETTER GIMEL",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10862,
+ },
+ [0x10863]={
+ category="lo",
+ description="PALMYRENE LETTER DALETH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10863,
+ },
+ [0x10864]={
+ category="lo",
+ description="PALMYRENE LETTER HE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10864,
+ },
+ [0x10865]={
+ category="lo",
+ description="PALMYRENE LETTER WAW",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10865,
+ },
+ [0x10866]={
+ category="lo",
+ description="PALMYRENE LETTER ZAYIN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10866,
+ },
+ [0x10867]={
+ category="lo",
+ description="PALMYRENE LETTER HETH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10867,
+ },
+ [0x10868]={
+ category="lo",
+ description="PALMYRENE LETTER TETH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10868,
+ },
+ [0x10869]={
+ category="lo",
+ description="PALMYRENE LETTER YODH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10869,
+ },
+ [0x1086A]={
+ category="lo",
+ description="PALMYRENE LETTER KAPH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1086A,
+ },
+ [0x1086B]={
+ category="lo",
+ description="PALMYRENE LETTER LAMEDH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1086B,
+ },
+ [0x1086C]={
+ category="lo",
+ description="PALMYRENE LETTER MEM",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1086C,
+ },
+ [0x1086D]={
+ category="lo",
+ description="PALMYRENE LETTER FINAL NUN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1086D,
+ },
+ [0x1086E]={
+ category="lo",
+ description="PALMYRENE LETTER NUN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1086E,
+ },
+ [0x1086F]={
+ category="lo",
+ description="PALMYRENE LETTER SAMEKH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1086F,
+ },
+ [0x10870]={
+ category="lo",
+ description="PALMYRENE LETTER AYIN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10870,
+ },
+ [0x10871]={
+ category="lo",
+ description="PALMYRENE LETTER PE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10871,
+ },
+ [0x10872]={
+ category="lo",
+ description="PALMYRENE LETTER SADHE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10872,
+ },
+ [0x10873]={
+ category="lo",
+ description="PALMYRENE LETTER QOPH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10873,
+ },
+ [0x10874]={
+ category="lo",
+ description="PALMYRENE LETTER RESH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10874,
+ },
+ [0x10875]={
+ category="lo",
+ description="PALMYRENE LETTER SHIN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10875,
+ },
+ [0x10876]={
+ category="lo",
+ description="PALMYRENE LETTER TAW",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10876,
+ },
+ [0x10877]={
+ category="so",
+ description="PALMYRENE LEFT-POINTING FLEURON",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10877,
+ },
+ [0x10878]={
+ category="so",
+ description="PALMYRENE RIGHT-POINTING FLEURON",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10878,
+ },
+ [0x10879]={
+ category="no",
+ description="PALMYRENE NUMBER ONE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10879,
+ },
+ [0x1087A]={
+ category="no",
+ description="PALMYRENE NUMBER TWO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1087A,
+ },
+ [0x1087B]={
+ category="no",
+ description="PALMYRENE NUMBER THREE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1087B,
+ },
+ [0x1087C]={
+ category="no",
+ description="PALMYRENE NUMBER FOUR",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1087C,
+ },
+ [0x1087D]={
+ category="no",
+ description="PALMYRENE NUMBER FIVE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1087D,
+ },
+ [0x1087E]={
+ category="no",
+ description="PALMYRENE NUMBER TEN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1087E,
+ },
+ [0x1087F]={
+ category="no",
+ description="PALMYRENE NUMBER TWENTY",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1087F,
+ },
+ [0x10880]={
+ category="lo",
+ description="NABATAEAN LETTER FINAL ALEPH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10880,
+ },
+ [0x10881]={
+ category="lo",
+ description="NABATAEAN LETTER ALEPH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10881,
+ },
+ [0x10882]={
+ category="lo",
+ description="NABATAEAN LETTER FINAL BETH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10882,
+ },
+ [0x10883]={
+ category="lo",
+ description="NABATAEAN LETTER BETH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10883,
+ },
+ [0x10884]={
+ category="lo",
+ description="NABATAEAN LETTER GIMEL",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10884,
+ },
+ [0x10885]={
+ category="lo",
+ description="NABATAEAN LETTER DALETH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10885,
+ },
+ [0x10886]={
+ category="lo",
+ description="NABATAEAN LETTER FINAL HE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10886,
+ },
+ [0x10887]={
+ category="lo",
+ description="NABATAEAN LETTER HE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10887,
+ },
+ [0x10888]={
+ category="lo",
+ description="NABATAEAN LETTER WAW",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10888,
+ },
+ [0x10889]={
+ category="lo",
+ description="NABATAEAN LETTER ZAYIN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10889,
+ },
+ [0x1088A]={
+ category="lo",
+ description="NABATAEAN LETTER HETH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1088A,
+ },
+ [0x1088B]={
+ category="lo",
+ description="NABATAEAN LETTER TETH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1088B,
+ },
+ [0x1088C]={
+ category="lo",
+ description="NABATAEAN LETTER FINAL YODH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1088C,
+ },
+ [0x1088D]={
+ category="lo",
+ description="NABATAEAN LETTER YODH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1088D,
+ },
+ [0x1088E]={
+ category="lo",
+ description="NABATAEAN LETTER FINAL KAPH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1088E,
+ },
+ [0x1088F]={
+ category="lo",
+ description="NABATAEAN LETTER KAPH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1088F,
+ },
+ [0x10890]={
+ category="lo",
+ description="NABATAEAN LETTER FINAL LAMEDH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10890,
+ },
+ [0x10891]={
+ category="lo",
+ description="NABATAEAN LETTER LAMEDH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10891,
+ },
+ [0x10892]={
+ category="lo",
+ description="NABATAEAN LETTER FINAL MEM",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10892,
+ },
+ [0x10893]={
+ category="lo",
+ description="NABATAEAN LETTER MEM",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10893,
+ },
+ [0x10894]={
+ category="lo",
+ description="NABATAEAN LETTER FINAL NUN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10894,
+ },
+ [0x10895]={
+ category="lo",
+ description="NABATAEAN LETTER NUN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10895,
+ },
+ [0x10896]={
+ category="lo",
+ description="NABATAEAN LETTER SAMEKH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10896,
+ },
+ [0x10897]={
+ category="lo",
+ description="NABATAEAN LETTER AYIN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10897,
+ },
+ [0x10898]={
+ category="lo",
+ description="NABATAEAN LETTER PE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10898,
+ },
+ [0x10899]={
+ category="lo",
+ description="NABATAEAN LETTER SADHE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10899,
+ },
+ [0x1089A]={
+ category="lo",
+ description="NABATAEAN LETTER QOPH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1089A,
+ },
+ [0x1089B]={
+ category="lo",
+ description="NABATAEAN LETTER RESH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1089B,
+ },
+ [0x1089C]={
+ category="lo",
+ description="NABATAEAN LETTER FINAL SHIN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1089C,
+ },
+ [0x1089D]={
+ category="lo",
+ description="NABATAEAN LETTER SHIN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1089D,
+ },
+ [0x1089E]={
+ category="lo",
+ description="NABATAEAN LETTER TAW",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1089E,
+ },
+ [0x108A7]={
+ category="no",
+ description="NABATAEAN NUMBER ONE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x108A7,
+ },
+ [0x108A8]={
+ category="no",
+ description="NABATAEAN NUMBER TWO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x108A8,
+ },
+ [0x108A9]={
+ category="no",
+ description="NABATAEAN NUMBER THREE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x108A9,
+ },
+ [0x108AA]={
+ category="no",
+ description="NABATAEAN NUMBER FOUR",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x108AA,
+ },
+ [0x108AB]={
+ category="no",
+ description="NABATAEAN CRUCIFORM NUMBER FOUR",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x108AB,
+ },
+ [0x108AC]={
+ category="no",
+ description="NABATAEAN NUMBER FIVE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x108AC,
+ },
+ [0x108AD]={
+ category="no",
+ description="NABATAEAN NUMBER TEN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x108AD,
+ },
+ [0x108AE]={
+ category="no",
+ description="NABATAEAN NUMBER TWENTY",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x108AE,
+ },
+ [0x108AF]={
+ category="no",
+ description="NABATAEAN NUMBER ONE HUNDRED",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x108AF,
+ },
[0x10900]={
category="lo",
description="PHOENICIAN LETTER ALF",
@@ -138521,6 +146414,7 @@ characters.data={
},
[0x10A0D]={
category="mn",
+ combining=0xDC,
description="KHAROSHTHI SIGN DOUBLE RING BELOW",
direction="nsm",
linebreak="cm",
@@ -138535,6 +146429,7 @@ characters.data={
},
[0x10A0F]={
category="mn",
+ combining=0xE6,
description="KHAROSHTHI SIGN VISARGA",
direction="nsm",
linebreak="cm",
@@ -138780,6 +146675,7 @@ characters.data={
},
[0x10A38]={
category="mn",
+ combining=0xE6,
description="KHAROSHTHI SIGN BAR ABOVE",
direction="nsm",
linebreak="cm",
@@ -138787,6 +146683,7 @@ characters.data={
},
[0x10A39]={
category="mn",
+ combining=0x1,
description="KHAROSHTHI SIGN CAUDA",
direction="nsm",
linebreak="cm",
@@ -138794,6 +146691,7 @@ characters.data={
},
[0x10A3A]={
category="mn",
+ combining=0xDC,
description="KHAROSHTHI SIGN DOT BELOW",
direction="nsm",
linebreak="cm",
@@ -138801,6 +146699,7 @@ characters.data={
},
[0x10A3F]={
category="mn",
+ combining=0x9,
description="KHAROSHTHI VIRAMA",
direction="nsm",
linebreak="cm",
@@ -139149,6 +147048,631 @@ characters.data={
linebreak="al",
unicodeslot=0x10A7F,
},
+ [0x10A80]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER HEH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A80,
+ },
+ [0x10A81]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER LAM",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A81,
+ },
+ [0x10A82]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER HAH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A82,
+ },
+ [0x10A83]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER MEEM",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A83,
+ },
+ [0x10A84]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER QAF",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A84,
+ },
+ [0x10A85]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER WAW",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A85,
+ },
+ [0x10A86]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER ES-2",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A86,
+ },
+ [0x10A87]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER REH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A87,
+ },
+ [0x10A88]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER BEH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A88,
+ },
+ [0x10A89]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER TEH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A89,
+ },
+ [0x10A8A]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER ES-1",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A8A,
+ },
+ [0x10A8B]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER KAF",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A8B,
+ },
+ [0x10A8C]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER NOON",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A8C,
+ },
+ [0x10A8D]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER KHAH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A8D,
+ },
+ [0x10A8E]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER SAD",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A8E,
+ },
+ [0x10A8F]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER ES-3",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A8F,
+ },
+ [0x10A90]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER FEH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A90,
+ },
+ [0x10A91]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER ALEF",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A91,
+ },
+ [0x10A92]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER AIN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A92,
+ },
+ [0x10A93]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER DAD",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A93,
+ },
+ [0x10A94]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER GEEM",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A94,
+ },
+ [0x10A95]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER DAL",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A95,
+ },
+ [0x10A96]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER GHAIN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A96,
+ },
+ [0x10A97]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER TAH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A97,
+ },
+ [0x10A98]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER ZAIN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A98,
+ },
+ [0x10A99]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER THAL",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A99,
+ },
+ [0x10A9A]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER YEH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A9A,
+ },
+ [0x10A9B]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER THEH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A9B,
+ },
+ [0x10A9C]={
+ category="lo",
+ description="OLD NORTH ARABIAN LETTER ZAH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A9C,
+ },
+ [0x10A9D]={
+ category="no",
+ description="OLD NORTH ARABIAN NUMBER ONE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A9D,
+ },
+ [0x10A9E]={
+ category="no",
+ description="OLD NORTH ARABIAN NUMBER TEN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A9E,
+ },
+ [0x10A9F]={
+ category="no",
+ description="OLD NORTH ARABIAN NUMBER TWENTY",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10A9F,
+ },
+ [0x10AC0]={
+ arabic="d",
+ category="lo",
+ description="MANICHAEAN LETTER ALEPH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AC0,
+ },
+ [0x10AC1]={
+ arabic="d",
+ category="lo",
+ description="MANICHAEAN LETTER BETH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AC1,
+ },
+ [0x10AC2]={
+ arabic="d",
+ category="lo",
+ description="MANICHAEAN LETTER BHETH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AC2,
+ },
+ [0x10AC3]={
+ arabic="d",
+ category="lo",
+ description="MANICHAEAN LETTER GIMEL",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AC3,
+ },
+ [0x10AC4]={
+ arabic="d",
+ category="lo",
+ description="MANICHAEAN LETTER GHIMEL",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AC4,
+ },
+ [0x10AC5]={
+ arabic="r",
+ category="lo",
+ description="MANICHAEAN LETTER DALETH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AC5,
+ },
+ [0x10AC6]={
+ arabic="u",
+ category="lo",
+ description="MANICHAEAN LETTER HE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AC6,
+ },
+ [0x10AC7]={
+ arabic="r",
+ category="lo",
+ description="MANICHAEAN LETTER WAW",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AC7,
+ },
+ [0x10AC8]={
+ arabic="u",
+ category="so",
+ description="MANICHAEAN SIGN UD",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AC8,
+ },
+ [0x10AC9]={
+ arabic="r",
+ category="lo",
+ description="MANICHAEAN LETTER ZAYIN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AC9,
+ },
+ [0x10ACA]={
+ arabic="r",
+ category="lo",
+ description="MANICHAEAN LETTER ZHAYIN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10ACA,
+ },
+ [0x10ACB]={
+ arabic="u",
+ category="lo",
+ description="MANICHAEAN LETTER JAYIN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10ACB,
+ },
+ [0x10ACC]={
+ arabic="u",
+ category="lo",
+ description="MANICHAEAN LETTER JHAYIN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10ACC,
+ },
+ [0x10ACD]={
+ arabic="l",
+ category="lo",
+ description="MANICHAEAN LETTER HETH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10ACD,
+ },
+ [0x10ACE]={
+ arabic="r",
+ category="lo",
+ description="MANICHAEAN LETTER TETH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10ACE,
+ },
+ [0x10ACF]={
+ arabic="r",
+ category="lo",
+ description="MANICHAEAN LETTER YODH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10ACF,
+ },
+ [0x10AD0]={
+ arabic="r",
+ category="lo",
+ description="MANICHAEAN LETTER KAPH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AD0,
+ },
+ [0x10AD1]={
+ arabic="r",
+ category="lo",
+ description="MANICHAEAN LETTER XAPH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AD1,
+ },
+ [0x10AD2]={
+ arabic="r",
+ category="lo",
+ description="MANICHAEAN LETTER KHAPH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AD2,
+ },
+ [0x10AD3]={
+ arabic="d",
+ category="lo",
+ description="MANICHAEAN LETTER LAMEDH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AD3,
+ },
+ [0x10AD4]={
+ arabic="d",
+ category="lo",
+ description="MANICHAEAN LETTER DHAMEDH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AD4,
+ },
+ [0x10AD5]={
+ arabic="d",
+ category="lo",
+ description="MANICHAEAN LETTER THAMEDH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AD5,
+ },
+ [0x10AD6]={
+ arabic="d",
+ category="lo",
+ description="MANICHAEAN LETTER MEM",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AD6,
+ },
+ [0x10AD7]={
+ arabic="l",
+ category="lo",
+ description="MANICHAEAN LETTER NUN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AD7,
+ },
+ [0x10AD8]={
+ arabic="d",
+ category="lo",
+ description="MANICHAEAN LETTER SAMEKH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AD8,
+ },
+ [0x10AD9]={
+ arabic="d",
+ category="lo",
+ description="MANICHAEAN LETTER AYIN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AD9,
+ },
+ [0x10ADA]={
+ arabic="d",
+ category="lo",
+ description="MANICHAEAN LETTER AAYIN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10ADA,
+ },
+ [0x10ADB]={
+ arabic="d",
+ category="lo",
+ description="MANICHAEAN LETTER PE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10ADB,
+ },
+ [0x10ADC]={
+ arabic="d",
+ category="lo",
+ description="MANICHAEAN LETTER FE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10ADC,
+ },
+ [0x10ADD]={
+ arabic="r",
+ category="lo",
+ description="MANICHAEAN LETTER SADHE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10ADD,
+ },
+ [0x10ADE]={
+ arabic="d",
+ category="lo",
+ description="MANICHAEAN LETTER QOPH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10ADE,
+ },
+ [0x10ADF]={
+ arabic="d",
+ category="lo",
+ description="MANICHAEAN LETTER XOPH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10ADF,
+ },
+ [0x10AE0]={
+ arabic="d",
+ category="lo",
+ description="MANICHAEAN LETTER QHOPH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AE0,
+ },
+ [0x10AE1]={
+ arabic="r",
+ category="lo",
+ description="MANICHAEAN LETTER RESH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AE1,
+ },
+ [0x10AE2]={
+ arabic="u",
+ category="lo",
+ description="MANICHAEAN LETTER SHIN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AE2,
+ },
+ [0x10AE3]={
+ arabic="u",
+ category="lo",
+ description="MANICHAEAN LETTER SSHIN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AE3,
+ },
+ [0x10AE4]={
+ arabic="r",
+ category="lo",
+ description="MANICHAEAN LETTER TAW",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AE4,
+ },
+ [0x10AE5]={
+ category="mn",
+ combining=0xE6,
+ description="MANICHAEAN ABBREVIATION MARK ABOVE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x10AE5,
+ },
+ [0x10AE6]={
+ category="mn",
+ combining=0xDC,
+ description="MANICHAEAN ABBREVIATION MARK BELOW",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x10AE6,
+ },
+ [0x10AEB]={
+ arabic="d",
+ category="no",
+ description="MANICHAEAN NUMBER ONE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AEB,
+ },
+ [0x10AEC]={
+ arabic="d",
+ category="no",
+ description="MANICHAEAN NUMBER FIVE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AEC,
+ },
+ [0x10AED]={
+ arabic="d",
+ category="no",
+ description="MANICHAEAN NUMBER TEN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AED,
+ },
+ [0x10AEE]={
+ arabic="d",
+ category="no",
+ description="MANICHAEAN NUMBER TWENTY",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AEE,
+ },
+ [0x10AEF]={
+ arabic="r",
+ category="no",
+ description="MANICHAEAN NUMBER ONE HUNDRED",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10AEF,
+ },
+ [0x10AF0]={
+ category="po",
+ description="MANICHAEAN PUNCTUATION STAR",
+ direction="r",
+ linebreak="ba",
+ unicodeslot=0x10AF0,
+ },
+ [0x10AF1]={
+ category="po",
+ description="MANICHAEAN PUNCTUATION FLEURON",
+ direction="r",
+ linebreak="ba",
+ unicodeslot=0x10AF1,
+ },
+ [0x10AF2]={
+ category="po",
+ description="MANICHAEAN PUNCTUATION DOUBLE DOT WITHIN DOT",
+ direction="r",
+ linebreak="ba",
+ unicodeslot=0x10AF2,
+ },
+ [0x10AF3]={
+ category="po",
+ description="MANICHAEAN PUNCTUATION DOT WITHIN DOT",
+ direction="r",
+ linebreak="ba",
+ unicodeslot=0x10AF3,
+ },
+ [0x10AF4]={
+ category="po",
+ description="MANICHAEAN PUNCTUATION DOT",
+ direction="r",
+ linebreak="ba",
+ unicodeslot=0x10AF4,
+ },
+ [0x10AF5]={
+ category="po",
+ description="MANICHAEAN PUNCTUATION TWO DOTS",
+ direction="r",
+ linebreak="ba",
+ unicodeslot=0x10AF5,
+ },
+ [0x10AF6]={
+ category="po",
+ description="MANICHAEAN PUNCTUATION LINE FILLER",
+ direction="r",
+ linebreak="in",
+ unicodeslot=0x10AF6,
+ },
[0x10B00]={
category="lo",
description="AVESTAN LETTER A",
@@ -139975,6 +148499,234 @@ characters.data={
linebreak="al",
unicodeslot=0x10B7F,
},
+ [0x10B80]={
+ arabic="d",
+ category="lo",
+ description="PSALTER PAHLAVI LETTER ALEPH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10B80,
+ },
+ [0x10B81]={
+ arabic="r",
+ category="lo",
+ description="PSALTER PAHLAVI LETTER BETH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10B81,
+ },
+ [0x10B82]={
+ arabic="d",
+ category="lo",
+ description="PSALTER PAHLAVI LETTER GIMEL",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10B82,
+ },
+ [0x10B83]={
+ arabic="r",
+ category="lo",
+ description="PSALTER PAHLAVI LETTER DALETH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10B83,
+ },
+ [0x10B84]={
+ arabic="r",
+ category="lo",
+ description="PSALTER PAHLAVI LETTER HE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10B84,
+ },
+ [0x10B85]={
+ arabic="r",
+ category="lo",
+ description="PSALTER PAHLAVI LETTER WAW-AYIN-RESH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10B85,
+ },
+ [0x10B86]={
+ arabic="d",
+ category="lo",
+ description="PSALTER PAHLAVI LETTER ZAYIN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10B86,
+ },
+ [0x10B87]={
+ arabic="d",
+ category="lo",
+ description="PSALTER PAHLAVI LETTER HETH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10B87,
+ },
+ [0x10B88]={
+ arabic="d",
+ category="lo",
+ description="PSALTER PAHLAVI LETTER YODH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10B88,
+ },
+ [0x10B89]={
+ arabic="r",
+ category="lo",
+ description="PSALTER PAHLAVI LETTER KAPH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10B89,
+ },
+ [0x10B8A]={
+ arabic="d",
+ category="lo",
+ description="PSALTER PAHLAVI LETTER LAMEDH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10B8A,
+ },
+ [0x10B8B]={
+ arabic="d",
+ category="lo",
+ description="PSALTER PAHLAVI LETTER MEM-QOPH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10B8B,
+ },
+ [0x10B8C]={
+ arabic="r",
+ category="lo",
+ description="PSALTER PAHLAVI LETTER NUN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10B8C,
+ },
+ [0x10B8D]={
+ arabic="d",
+ category="lo",
+ description="PSALTER PAHLAVI LETTER SAMEKH",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10B8D,
+ },
+ [0x10B8E]={
+ arabic="r",
+ category="lo",
+ description="PSALTER PAHLAVI LETTER PE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10B8E,
+ },
+ [0x10B8F]={
+ arabic="r",
+ category="lo",
+ description="PSALTER PAHLAVI LETTER SADHE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10B8F,
+ },
+ [0x10B90]={
+ arabic="d",
+ category="lo",
+ description="PSALTER PAHLAVI LETTER SHIN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10B90,
+ },
+ [0x10B91]={
+ arabic="r",
+ category="lo",
+ description="PSALTER PAHLAVI LETTER TAW",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10B91,
+ },
+ [0x10B99]={
+ category="po",
+ description="PSALTER PAHLAVI SECTION MARK",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10B99,
+ },
+ [0x10B9A]={
+ category="po",
+ description="PSALTER PAHLAVI TURNED SECTION MARK",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10B9A,
+ },
+ [0x10B9B]={
+ category="po",
+ description="PSALTER PAHLAVI FOUR DOTS WITH CROSS",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10B9B,
+ },
+ [0x10B9C]={
+ category="po",
+ description="PSALTER PAHLAVI FOUR DOTS WITH DOT",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10B9C,
+ },
+ [0x10BA9]={
+ arabic="r",
+ category="no",
+ description="PSALTER PAHLAVI NUMBER ONE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10BA9,
+ },
+ [0x10BAA]={
+ arabic="r",
+ category="no",
+ description="PSALTER PAHLAVI NUMBER TWO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10BAA,
+ },
+ [0x10BAB]={
+ arabic="r",
+ category="no",
+ description="PSALTER PAHLAVI NUMBER THREE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10BAB,
+ },
+ [0x10BAC]={
+ arabic="r",
+ category="no",
+ description="PSALTER PAHLAVI NUMBER FOUR",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10BAC,
+ },
+ [0x10BAD]={
+ arabic="d",
+ category="no",
+ description="PSALTER PAHLAVI NUMBER TEN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10BAD,
+ },
+ [0x10BAE]={
+ arabic="d",
+ category="no",
+ description="PSALTER PAHLAVI NUMBER TWENTY",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10BAE,
+ },
+ [0x10BAF]={
+ arabic="u",
+ category="no",
+ description="PSALTER PAHLAVI NUMBER ONE HUNDRED",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x10BAF,
+ },
[0x10C00]={
category="lo",
description="OLD TURKIC LETTER ORKHON A",
@@ -141195,6 +149947,7 @@ characters.data={
},
[0x11046]={
category="mn",
+ combining=0x9,
description="BRAHMI VIRAMA",
direction="nsm",
linebreak="cm",
@@ -141459,6 +150212,14 @@ characters.data={
linebreak="nu",
unicodeslot=0x1106F,
},
+ [0x1107F]={
+ category="mn",
+ combining=0x9,
+ description="BRAHMI NUMBER JOINER",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1107F,
+ },
[0x11080]={
category="mn",
description="KAITHI SIGN CANDRABINDU",
@@ -141863,6 +150624,7 @@ characters.data={
},
[0x110B9]={
category="mn",
+ combining=0x9,
description="KAITHI SIGN VIRAMA",
direction="nsm",
linebreak="cm",
@@ -141870,6 +150632,7 @@ characters.data={
},
[0x110BA]={
category="mn",
+ combining=0x7,
description="KAITHI SIGN NUKTA",
direction="nsm",
linebreak="cm",
@@ -142171,6 +150934,7 @@ characters.data={
},
[0x11100]={
category="mn",
+ combining=0xE6,
description="CHAKMA SIGN CANDRABINDU",
direction="nsm",
linebreak="cm",
@@ -142178,6 +150942,7 @@ characters.data={
},
[0x11101]={
category="mn",
+ combining=0xE6,
description="CHAKMA SIGN ANUSVARA",
direction="nsm",
linebreak="cm",
@@ -142185,6 +150950,7 @@ characters.data={
},
[0x11102]={
category="mn",
+ combining=0xE6,
description="CHAKMA SIGN VISARGA",
direction="nsm",
linebreak="cm",
@@ -142530,6 +151296,7 @@ characters.data={
},
[0x11133]={
category="mn",
+ combining=0x9,
description="CHAKMA VIRAMA",
direction="nsm",
linebreak="cm",
@@ -142537,6 +151304,7 @@ characters.data={
},
[0x11134]={
category="mn",
+ combining=0x9,
description="CHAKMA MAAYYAA",
direction="nsm",
linebreak="cm",
@@ -142640,6 +151408,280 @@ characters.data={
linebreak="ba",
unicodeslot=0x11143,
},
+ [0x11150]={
+ category="lo",
+ description="MAHAJANI LETTER A",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11150,
+ },
+ [0x11151]={
+ category="lo",
+ description="MAHAJANI LETTER I",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11151,
+ },
+ [0x11152]={
+ category="lo",
+ description="MAHAJANI LETTER U",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11152,
+ },
+ [0x11153]={
+ category="lo",
+ description="MAHAJANI LETTER E",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11153,
+ },
+ [0x11154]={
+ category="lo",
+ description="MAHAJANI LETTER O",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11154,
+ },
+ [0x11155]={
+ category="lo",
+ description="MAHAJANI LETTER KA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11155,
+ },
+ [0x11156]={
+ category="lo",
+ description="MAHAJANI LETTER KHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11156,
+ },
+ [0x11157]={
+ category="lo",
+ description="MAHAJANI LETTER GA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11157,
+ },
+ [0x11158]={
+ category="lo",
+ description="MAHAJANI LETTER GHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11158,
+ },
+ [0x11159]={
+ category="lo",
+ description="MAHAJANI LETTER CA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11159,
+ },
+ [0x1115A]={
+ category="lo",
+ description="MAHAJANI LETTER CHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1115A,
+ },
+ [0x1115B]={
+ category="lo",
+ description="MAHAJANI LETTER JA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1115B,
+ },
+ [0x1115C]={
+ category="lo",
+ description="MAHAJANI LETTER JHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1115C,
+ },
+ [0x1115D]={
+ category="lo",
+ description="MAHAJANI LETTER NYA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1115D,
+ },
+ [0x1115E]={
+ category="lo",
+ description="MAHAJANI LETTER TTA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1115E,
+ },
+ [0x1115F]={
+ category="lo",
+ description="MAHAJANI LETTER TTHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1115F,
+ },
+ [0x11160]={
+ category="lo",
+ description="MAHAJANI LETTER DDA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11160,
+ },
+ [0x11161]={
+ category="lo",
+ description="MAHAJANI LETTER DDHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11161,
+ },
+ [0x11162]={
+ category="lo",
+ description="MAHAJANI LETTER NNA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11162,
+ },
+ [0x11163]={
+ category="lo",
+ description="MAHAJANI LETTER TA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11163,
+ },
+ [0x11164]={
+ category="lo",
+ description="MAHAJANI LETTER THA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11164,
+ },
+ [0x11165]={
+ category="lo",
+ description="MAHAJANI LETTER DA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11165,
+ },
+ [0x11166]={
+ category="lo",
+ description="MAHAJANI LETTER DHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11166,
+ },
+ [0x11167]={
+ category="lo",
+ description="MAHAJANI LETTER NA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11167,
+ },
+ [0x11168]={
+ category="lo",
+ description="MAHAJANI LETTER PA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11168,
+ },
+ [0x11169]={
+ category="lo",
+ description="MAHAJANI LETTER PHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11169,
+ },
+ [0x1116A]={
+ category="lo",
+ description="MAHAJANI LETTER BA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1116A,
+ },
+ [0x1116B]={
+ category="lo",
+ description="MAHAJANI LETTER BHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1116B,
+ },
+ [0x1116C]={
+ category="lo",
+ description="MAHAJANI LETTER MA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1116C,
+ },
+ [0x1116D]={
+ category="lo",
+ description="MAHAJANI LETTER RA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1116D,
+ },
+ [0x1116E]={
+ category="lo",
+ description="MAHAJANI LETTER LA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1116E,
+ },
+ [0x1116F]={
+ category="lo",
+ description="MAHAJANI LETTER VA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1116F,
+ },
+ [0x11170]={
+ category="lo",
+ description="MAHAJANI LETTER SA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11170,
+ },
+ [0x11171]={
+ category="lo",
+ description="MAHAJANI LETTER HA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11171,
+ },
+ [0x11172]={
+ category="lo",
+ description="MAHAJANI LETTER RRA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11172,
+ },
+ [0x11173]={
+ category="mn",
+ combining=0x7,
+ description="MAHAJANI SIGN NUKTA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11173,
+ },
+ [0x11174]={
+ category="po",
+ description="MAHAJANI ABBREVIATION SIGN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11174,
+ },
+ [0x11175]={
+ category="po",
+ description="MAHAJANI SECTION MARK",
+ direction="l",
+ linebreak="bb",
+ unicodeslot=0x11175,
+ },
+ [0x11176]={
+ category="lo",
+ description="MAHAJANI LIGATURE SHRI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11176,
+ },
[0x11180]={
category="mn",
description="SHARADA SIGN CANDRABINDU",
@@ -143090,6 +152132,7 @@ characters.data={
},
[0x111C0]={
category="mc",
+ combining=0x9,
description="SHARADA SIGN VIRAMA",
direction="l",
linebreak="cm",
@@ -143151,6 +152194,13 @@ characters.data={
linebreak="ba",
unicodeslot=0x111C8,
},
+ [0x111CD]={
+ category="po",
+ description="SHARADA SUTRA MARK",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111CD,
+ },
[0x111D0]={
category="nd",
description="SHARADA DIGIT ZERO",
@@ -143221,6 +152271,3305 @@ characters.data={
linebreak="nu",
unicodeslot=0x111D9,
},
+ [0x111DA]={
+ category="lo",
+ description="SHARADA EKAM",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111DA,
+ },
+ [0x111E1]={
+ category="no",
+ description="SINHALA ARCHAIC DIGIT ONE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111E1,
+ },
+ [0x111E2]={
+ category="no",
+ description="SINHALA ARCHAIC DIGIT TWO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111E2,
+ },
+ [0x111E3]={
+ category="no",
+ description="SINHALA ARCHAIC DIGIT THREE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111E3,
+ },
+ [0x111E4]={
+ category="no",
+ description="SINHALA ARCHAIC DIGIT FOUR",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111E4,
+ },
+ [0x111E5]={
+ category="no",
+ description="SINHALA ARCHAIC DIGIT FIVE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111E5,
+ },
+ [0x111E6]={
+ category="no",
+ description="SINHALA ARCHAIC DIGIT SIX",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111E6,
+ },
+ [0x111E7]={
+ category="no",
+ description="SINHALA ARCHAIC DIGIT SEVEN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111E7,
+ },
+ [0x111E8]={
+ category="no",
+ description="SINHALA ARCHAIC DIGIT EIGHT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111E8,
+ },
+ [0x111E9]={
+ category="no",
+ description="SINHALA ARCHAIC DIGIT NINE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111E9,
+ },
+ [0x111EA]={
+ category="no",
+ description="SINHALA ARCHAIC NUMBER TEN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111EA,
+ },
+ [0x111EB]={
+ category="no",
+ description="SINHALA ARCHAIC NUMBER TWENTY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111EB,
+ },
+ [0x111EC]={
+ category="no",
+ description="SINHALA ARCHAIC NUMBER THIRTY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111EC,
+ },
+ [0x111ED]={
+ category="no",
+ description="SINHALA ARCHAIC NUMBER FORTY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111ED,
+ },
+ [0x111EE]={
+ category="no",
+ description="SINHALA ARCHAIC NUMBER FIFTY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111EE,
+ },
+ [0x111EF]={
+ category="no",
+ description="SINHALA ARCHAIC NUMBER SIXTY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111EF,
+ },
+ [0x111F0]={
+ category="no",
+ description="SINHALA ARCHAIC NUMBER SEVENTY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111F0,
+ },
+ [0x111F1]={
+ category="no",
+ description="SINHALA ARCHAIC NUMBER EIGHTY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111F1,
+ },
+ [0x111F2]={
+ category="no",
+ description="SINHALA ARCHAIC NUMBER NINETY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111F2,
+ },
+ [0x111F3]={
+ category="no",
+ description="SINHALA ARCHAIC NUMBER ONE HUNDRED",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111F3,
+ },
+ [0x111F4]={
+ category="no",
+ description="SINHALA ARCHAIC NUMBER ONE THOUSAND",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x111F4,
+ },
+ [0x11200]={
+ category="lo",
+ description="KHOJKI LETTER A",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11200,
+ },
+ [0x11201]={
+ category="lo",
+ description="KHOJKI LETTER AA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11201,
+ },
+ [0x11202]={
+ category="lo",
+ description="KHOJKI LETTER I",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11202,
+ },
+ [0x11203]={
+ category="lo",
+ description="KHOJKI LETTER U",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11203,
+ },
+ [0x11204]={
+ category="lo",
+ description="KHOJKI LETTER E",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11204,
+ },
+ [0x11205]={
+ category="lo",
+ description="KHOJKI LETTER AI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11205,
+ },
+ [0x11206]={
+ category="lo",
+ description="KHOJKI LETTER O",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11206,
+ },
+ [0x11207]={
+ category="lo",
+ description="KHOJKI LETTER AU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11207,
+ },
+ [0x11208]={
+ category="lo",
+ description="KHOJKI LETTER KA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11208,
+ },
+ [0x11209]={
+ category="lo",
+ description="KHOJKI LETTER KHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11209,
+ },
+ [0x1120A]={
+ category="lo",
+ description="KHOJKI LETTER GA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1120A,
+ },
+ [0x1120B]={
+ category="lo",
+ description="KHOJKI LETTER GGA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1120B,
+ },
+ [0x1120C]={
+ category="lo",
+ description="KHOJKI LETTER GHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1120C,
+ },
+ [0x1120D]={
+ category="lo",
+ description="KHOJKI LETTER NGA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1120D,
+ },
+ [0x1120E]={
+ category="lo",
+ description="KHOJKI LETTER CA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1120E,
+ },
+ [0x1120F]={
+ category="lo",
+ description="KHOJKI LETTER CHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1120F,
+ },
+ [0x11210]={
+ category="lo",
+ description="KHOJKI LETTER JA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11210,
+ },
+ [0x11211]={
+ category="lo",
+ description="KHOJKI LETTER JJA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11211,
+ },
+ [0x11213]={
+ category="lo",
+ description="KHOJKI LETTER NYA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11213,
+ },
+ [0x11214]={
+ category="lo",
+ description="KHOJKI LETTER TTA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11214,
+ },
+ [0x11215]={
+ category="lo",
+ description="KHOJKI LETTER TTHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11215,
+ },
+ [0x11216]={
+ category="lo",
+ description="KHOJKI LETTER DDA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11216,
+ },
+ [0x11217]={
+ category="lo",
+ description="KHOJKI LETTER DDHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11217,
+ },
+ [0x11218]={
+ category="lo",
+ description="KHOJKI LETTER NNA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11218,
+ },
+ [0x11219]={
+ category="lo",
+ description="KHOJKI LETTER TA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11219,
+ },
+ [0x1121A]={
+ category="lo",
+ description="KHOJKI LETTER THA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1121A,
+ },
+ [0x1121B]={
+ category="lo",
+ description="KHOJKI LETTER DA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1121B,
+ },
+ [0x1121C]={
+ category="lo",
+ description="KHOJKI LETTER DDDA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1121C,
+ },
+ [0x1121D]={
+ category="lo",
+ description="KHOJKI LETTER DHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1121D,
+ },
+ [0x1121E]={
+ category="lo",
+ description="KHOJKI LETTER NA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1121E,
+ },
+ [0x1121F]={
+ category="lo",
+ description="KHOJKI LETTER PA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1121F,
+ },
+ [0x11220]={
+ category="lo",
+ description="KHOJKI LETTER PHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11220,
+ },
+ [0x11221]={
+ category="lo",
+ description="KHOJKI LETTER BA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11221,
+ },
+ [0x11222]={
+ category="lo",
+ description="KHOJKI LETTER BBA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11222,
+ },
+ [0x11223]={
+ category="lo",
+ description="KHOJKI LETTER BHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11223,
+ },
+ [0x11224]={
+ category="lo",
+ description="KHOJKI LETTER MA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11224,
+ },
+ [0x11225]={
+ category="lo",
+ description="KHOJKI LETTER YA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11225,
+ },
+ [0x11226]={
+ category="lo",
+ description="KHOJKI LETTER RA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11226,
+ },
+ [0x11227]={
+ category="lo",
+ description="KHOJKI LETTER LA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11227,
+ },
+ [0x11228]={
+ category="lo",
+ description="KHOJKI LETTER VA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11228,
+ },
+ [0x11229]={
+ category="lo",
+ description="KHOJKI LETTER SA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11229,
+ },
+ [0x1122A]={
+ category="lo",
+ description="KHOJKI LETTER HA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1122A,
+ },
+ [0x1122B]={
+ category="lo",
+ description="KHOJKI LETTER LLA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1122B,
+ },
+ [0x1122C]={
+ category="mc",
+ description="KHOJKI VOWEL SIGN AA",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x1122C,
+ },
+ [0x1122D]={
+ category="mc",
+ description="KHOJKI VOWEL SIGN I",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x1122D,
+ },
+ [0x1122E]={
+ category="mc",
+ description="KHOJKI VOWEL SIGN II",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x1122E,
+ },
+ [0x1122F]={
+ category="mn",
+ description="KHOJKI VOWEL SIGN U",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1122F,
+ },
+ [0x11230]={
+ category="mn",
+ description="KHOJKI VOWEL SIGN E",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11230,
+ },
+ [0x11231]={
+ category="mn",
+ description="KHOJKI VOWEL SIGN AI",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11231,
+ },
+ [0x11232]={
+ category="mc",
+ description="KHOJKI VOWEL SIGN O",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x11232,
+ },
+ [0x11233]={
+ category="mc",
+ description="KHOJKI VOWEL SIGN AU",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x11233,
+ },
+ [0x11234]={
+ category="mn",
+ description="KHOJKI SIGN ANUSVARA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11234,
+ },
+ [0x11235]={
+ category="mc",
+ combining=0x9,
+ description="KHOJKI SIGN VIRAMA",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x11235,
+ },
+ [0x11236]={
+ category="mn",
+ combining=0x7,
+ description="KHOJKI SIGN NUKTA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11236,
+ },
+ [0x11237]={
+ category="mn",
+ description="KHOJKI SIGN SHADDA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11237,
+ },
+ [0x11238]={
+ category="po",
+ description="KHOJKI DANDA",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0x11238,
+ },
+ [0x11239]={
+ category="po",
+ description="KHOJKI DOUBLE DANDA",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0x11239,
+ },
+ [0x1123A]={
+ category="po",
+ description="KHOJKI WORD SEPARATOR",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1123A,
+ },
+ [0x1123B]={
+ category="po",
+ description="KHOJKI SECTION MARK",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0x1123B,
+ },
+ [0x1123C]={
+ category="po",
+ description="KHOJKI DOUBLE SECTION MARK",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0x1123C,
+ },
+ [0x1123D]={
+ category="po",
+ description="KHOJKI ABBREVIATION SIGN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1123D,
+ },
+ [0x112B0]={
+ category="lo",
+ description="KHUDAWADI LETTER A",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112B0,
+ },
+ [0x112B1]={
+ category="lo",
+ description="KHUDAWADI LETTER AA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112B1,
+ },
+ [0x112B2]={
+ category="lo",
+ description="KHUDAWADI LETTER I",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112B2,
+ },
+ [0x112B3]={
+ category="lo",
+ description="KHUDAWADI LETTER II",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112B3,
+ },
+ [0x112B4]={
+ category="lo",
+ description="KHUDAWADI LETTER U",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112B4,
+ },
+ [0x112B5]={
+ category="lo",
+ description="KHUDAWADI LETTER UU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112B5,
+ },
+ [0x112B6]={
+ category="lo",
+ description="KHUDAWADI LETTER E",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112B6,
+ },
+ [0x112B7]={
+ category="lo",
+ description="KHUDAWADI LETTER AI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112B7,
+ },
+ [0x112B8]={
+ category="lo",
+ description="KHUDAWADI LETTER O",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112B8,
+ },
+ [0x112B9]={
+ category="lo",
+ description="KHUDAWADI LETTER AU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112B9,
+ },
+ [0x112BA]={
+ category="lo",
+ description="KHUDAWADI LETTER KA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112BA,
+ },
+ [0x112BB]={
+ category="lo",
+ description="KHUDAWADI LETTER KHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112BB,
+ },
+ [0x112BC]={
+ category="lo",
+ description="KHUDAWADI LETTER GA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112BC,
+ },
+ [0x112BD]={
+ category="lo",
+ description="KHUDAWADI LETTER GGA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112BD,
+ },
+ [0x112BE]={
+ category="lo",
+ description="KHUDAWADI LETTER GHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112BE,
+ },
+ [0x112BF]={
+ category="lo",
+ description="KHUDAWADI LETTER NGA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112BF,
+ },
+ [0x112C0]={
+ category="lo",
+ description="KHUDAWADI LETTER CA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112C0,
+ },
+ [0x112C1]={
+ category="lo",
+ description="KHUDAWADI LETTER CHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112C1,
+ },
+ [0x112C2]={
+ category="lo",
+ description="KHUDAWADI LETTER JA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112C2,
+ },
+ [0x112C3]={
+ category="lo",
+ description="KHUDAWADI LETTER JJA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112C3,
+ },
+ [0x112C4]={
+ category="lo",
+ description="KHUDAWADI LETTER JHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112C4,
+ },
+ [0x112C5]={
+ category="lo",
+ description="KHUDAWADI LETTER NYA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112C5,
+ },
+ [0x112C6]={
+ category="lo",
+ description="KHUDAWADI LETTER TTA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112C6,
+ },
+ [0x112C7]={
+ category="lo",
+ description="KHUDAWADI LETTER TTHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112C7,
+ },
+ [0x112C8]={
+ category="lo",
+ description="KHUDAWADI LETTER DDA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112C8,
+ },
+ [0x112C9]={
+ category="lo",
+ description="KHUDAWADI LETTER DDDA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112C9,
+ },
+ [0x112CA]={
+ category="lo",
+ description="KHUDAWADI LETTER RRA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112CA,
+ },
+ [0x112CB]={
+ category="lo",
+ description="KHUDAWADI LETTER DDHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112CB,
+ },
+ [0x112CC]={
+ category="lo",
+ description="KHUDAWADI LETTER NNA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112CC,
+ },
+ [0x112CD]={
+ category="lo",
+ description="KHUDAWADI LETTER TA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112CD,
+ },
+ [0x112CE]={
+ category="lo",
+ description="KHUDAWADI LETTER THA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112CE,
+ },
+ [0x112CF]={
+ category="lo",
+ description="KHUDAWADI LETTER DA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112CF,
+ },
+ [0x112D0]={
+ category="lo",
+ description="KHUDAWADI LETTER DHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112D0,
+ },
+ [0x112D1]={
+ category="lo",
+ description="KHUDAWADI LETTER NA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112D1,
+ },
+ [0x112D2]={
+ category="lo",
+ description="KHUDAWADI LETTER PA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112D2,
+ },
+ [0x112D3]={
+ category="lo",
+ description="KHUDAWADI LETTER PHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112D3,
+ },
+ [0x112D4]={
+ category="lo",
+ description="KHUDAWADI LETTER BA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112D4,
+ },
+ [0x112D5]={
+ category="lo",
+ description="KHUDAWADI LETTER BBA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112D5,
+ },
+ [0x112D6]={
+ category="lo",
+ description="KHUDAWADI LETTER BHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112D6,
+ },
+ [0x112D7]={
+ category="lo",
+ description="KHUDAWADI LETTER MA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112D7,
+ },
+ [0x112D8]={
+ category="lo",
+ description="KHUDAWADI LETTER YA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112D8,
+ },
+ [0x112D9]={
+ category="lo",
+ description="KHUDAWADI LETTER RA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112D9,
+ },
+ [0x112DA]={
+ category="lo",
+ description="KHUDAWADI LETTER LA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112DA,
+ },
+ [0x112DB]={
+ category="lo",
+ description="KHUDAWADI LETTER VA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112DB,
+ },
+ [0x112DC]={
+ category="lo",
+ description="KHUDAWADI LETTER SHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112DC,
+ },
+ [0x112DD]={
+ category="lo",
+ description="KHUDAWADI LETTER SA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112DD,
+ },
+ [0x112DE]={
+ category="lo",
+ description="KHUDAWADI LETTER HA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x112DE,
+ },
+ [0x112DF]={
+ category="mn",
+ description="KHUDAWADI SIGN ANUSVARA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x112DF,
+ },
+ [0x112E0]={
+ category="mc",
+ description="KHUDAWADI VOWEL SIGN AA",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x112E0,
+ },
+ [0x112E1]={
+ category="mc",
+ description="KHUDAWADI VOWEL SIGN I",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x112E1,
+ },
+ [0x112E2]={
+ category="mc",
+ description="KHUDAWADI VOWEL SIGN II",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x112E2,
+ },
+ [0x112E3]={
+ category="mn",
+ description="KHUDAWADI VOWEL SIGN U",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x112E3,
+ },
+ [0x112E4]={
+ category="mn",
+ description="KHUDAWADI VOWEL SIGN UU",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x112E4,
+ },
+ [0x112E5]={
+ category="mn",
+ description="KHUDAWADI VOWEL SIGN E",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x112E5,
+ },
+ [0x112E6]={
+ category="mn",
+ description="KHUDAWADI VOWEL SIGN AI",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x112E6,
+ },
+ [0x112E7]={
+ category="mn",
+ description="KHUDAWADI VOWEL SIGN O",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x112E7,
+ },
+ [0x112E8]={
+ category="mn",
+ description="KHUDAWADI VOWEL SIGN AU",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x112E8,
+ },
+ [0x112E9]={
+ category="mn",
+ combining=0x7,
+ description="KHUDAWADI SIGN NUKTA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x112E9,
+ },
+ [0x112EA]={
+ category="mn",
+ combining=0x9,
+ description="KHUDAWADI SIGN VIRAMA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x112EA,
+ },
+ [0x112F0]={
+ category="nd",
+ description="KHUDAWADI DIGIT ZERO",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x112F0,
+ },
+ [0x112F1]={
+ category="nd",
+ description="KHUDAWADI DIGIT ONE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x112F1,
+ },
+ [0x112F2]={
+ category="nd",
+ description="KHUDAWADI DIGIT TWO",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x112F2,
+ },
+ [0x112F3]={
+ category="nd",
+ description="KHUDAWADI DIGIT THREE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x112F3,
+ },
+ [0x112F4]={
+ category="nd",
+ description="KHUDAWADI DIGIT FOUR",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x112F4,
+ },
+ [0x112F5]={
+ category="nd",
+ description="KHUDAWADI DIGIT FIVE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x112F5,
+ },
+ [0x112F6]={
+ category="nd",
+ description="KHUDAWADI DIGIT SIX",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x112F6,
+ },
+ [0x112F7]={
+ category="nd",
+ description="KHUDAWADI DIGIT SEVEN",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x112F7,
+ },
+ [0x112F8]={
+ category="nd",
+ description="KHUDAWADI DIGIT EIGHT",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x112F8,
+ },
+ [0x112F9]={
+ category="nd",
+ description="KHUDAWADI DIGIT NINE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x112F9,
+ },
+ [0x11301]={
+ category="mn",
+ description="GRANTHA SIGN CANDRABINDU",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11301,
+ },
+ [0x11302]={
+ category="mc",
+ description="GRANTHA SIGN ANUSVARA",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x11302,
+ },
+ [0x11303]={
+ category="mc",
+ description="GRANTHA SIGN VISARGA",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x11303,
+ },
+ [0x11305]={
+ category="lo",
+ description="GRANTHA LETTER A",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11305,
+ },
+ [0x11306]={
+ category="lo",
+ description="GRANTHA LETTER AA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11306,
+ },
+ [0x11307]={
+ category="lo",
+ description="GRANTHA LETTER I",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11307,
+ },
+ [0x11308]={
+ category="lo",
+ description="GRANTHA LETTER II",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11308,
+ },
+ [0x11309]={
+ category="lo",
+ description="GRANTHA LETTER U",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11309,
+ },
+ [0x1130A]={
+ category="lo",
+ description="GRANTHA LETTER UU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1130A,
+ },
+ [0x1130B]={
+ category="lo",
+ description="GRANTHA LETTER VOCALIC R",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1130B,
+ },
+ [0x1130C]={
+ category="lo",
+ description="GRANTHA LETTER VOCALIC L",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1130C,
+ },
+ [0x1130F]={
+ category="lo",
+ description="GRANTHA LETTER EE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1130F,
+ },
+ [0x11310]={
+ category="lo",
+ description="GRANTHA LETTER AI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11310,
+ },
+ [0x11313]={
+ category="lo",
+ description="GRANTHA LETTER OO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11313,
+ },
+ [0x11314]={
+ category="lo",
+ description="GRANTHA LETTER AU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11314,
+ },
+ [0x11315]={
+ category="lo",
+ description="GRANTHA LETTER KA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11315,
+ },
+ [0x11316]={
+ category="lo",
+ description="GRANTHA LETTER KHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11316,
+ },
+ [0x11317]={
+ category="lo",
+ description="GRANTHA LETTER GA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11317,
+ },
+ [0x11318]={
+ category="lo",
+ description="GRANTHA LETTER GHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11318,
+ },
+ [0x11319]={
+ category="lo",
+ description="GRANTHA LETTER NGA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11319,
+ },
+ [0x1131A]={
+ category="lo",
+ description="GRANTHA LETTER CA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1131A,
+ },
+ [0x1131B]={
+ category="lo",
+ description="GRANTHA LETTER CHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1131B,
+ },
+ [0x1131C]={
+ category="lo",
+ description="GRANTHA LETTER JA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1131C,
+ },
+ [0x1131D]={
+ category="lo",
+ description="GRANTHA LETTER JHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1131D,
+ },
+ [0x1131E]={
+ category="lo",
+ description="GRANTHA LETTER NYA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1131E,
+ },
+ [0x1131F]={
+ category="lo",
+ description="GRANTHA LETTER TTA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1131F,
+ },
+ [0x11320]={
+ category="lo",
+ description="GRANTHA LETTER TTHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11320,
+ },
+ [0x11321]={
+ category="lo",
+ description="GRANTHA LETTER DDA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11321,
+ },
+ [0x11322]={
+ category="lo",
+ description="GRANTHA LETTER DDHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11322,
+ },
+ [0x11323]={
+ category="lo",
+ description="GRANTHA LETTER NNA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11323,
+ },
+ [0x11324]={
+ category="lo",
+ description="GRANTHA LETTER TA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11324,
+ },
+ [0x11325]={
+ category="lo",
+ description="GRANTHA LETTER THA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11325,
+ },
+ [0x11326]={
+ category="lo",
+ description="GRANTHA LETTER DA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11326,
+ },
+ [0x11327]={
+ category="lo",
+ description="GRANTHA LETTER DHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11327,
+ },
+ [0x11328]={
+ category="lo",
+ description="GRANTHA LETTER NA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11328,
+ },
+ [0x1132A]={
+ category="lo",
+ description="GRANTHA LETTER PA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1132A,
+ },
+ [0x1132B]={
+ category="lo",
+ description="GRANTHA LETTER PHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1132B,
+ },
+ [0x1132C]={
+ category="lo",
+ description="GRANTHA LETTER BA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1132C,
+ },
+ [0x1132D]={
+ category="lo",
+ description="GRANTHA LETTER BHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1132D,
+ },
+ [0x1132E]={
+ category="lo",
+ description="GRANTHA LETTER MA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1132E,
+ },
+ [0x1132F]={
+ category="lo",
+ description="GRANTHA LETTER YA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1132F,
+ },
+ [0x11330]={
+ category="lo",
+ description="GRANTHA LETTER RA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11330,
+ },
+ [0x11332]={
+ category="lo",
+ description="GRANTHA LETTER LA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11332,
+ },
+ [0x11333]={
+ category="lo",
+ description="GRANTHA LETTER LLA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11333,
+ },
+ [0x11335]={
+ category="lo",
+ description="GRANTHA LETTER VA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11335,
+ },
+ [0x11336]={
+ category="lo",
+ description="GRANTHA LETTER SHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11336,
+ },
+ [0x11337]={
+ category="lo",
+ description="GRANTHA LETTER SSA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11337,
+ },
+ [0x11338]={
+ category="lo",
+ description="GRANTHA LETTER SA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11338,
+ },
+ [0x11339]={
+ category="lo",
+ description="GRANTHA LETTER HA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11339,
+ },
+ [0x1133C]={
+ category="mn",
+ combining=0x7,
+ description="GRANTHA SIGN NUKTA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1133C,
+ },
+ [0x1133D]={
+ category="lo",
+ description="GRANTHA SIGN AVAGRAHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1133D,
+ },
+ [0x1133E]={
+ category="mc",
+ description="GRANTHA VOWEL SIGN AA",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x1133E,
+ },
+ [0x1133F]={
+ category="mc",
+ description="GRANTHA VOWEL SIGN I",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x1133F,
+ },
+ [0x11340]={
+ category="mn",
+ description="GRANTHA VOWEL SIGN II",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11340,
+ },
+ [0x11341]={
+ category="mc",
+ description="GRANTHA VOWEL SIGN U",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x11341,
+ },
+ [0x11342]={
+ category="mc",
+ description="GRANTHA VOWEL SIGN UU",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x11342,
+ },
+ [0x11343]={
+ category="mc",
+ description="GRANTHA VOWEL SIGN VOCALIC R",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x11343,
+ },
+ [0x11344]={
+ category="mc",
+ description="GRANTHA VOWEL SIGN VOCALIC RR",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x11344,
+ },
+ [0x11347]={
+ category="mc",
+ description="GRANTHA VOWEL SIGN EE",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x11347,
+ },
+ [0x11348]={
+ category="mc",
+ description="GRANTHA VOWEL SIGN AI",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x11348,
+ },
+ [0x1134B]={
+ category="mc",
+ description="GRANTHA VOWEL SIGN OO",
+ direction="l",
+ linebreak="cm",
+ specials={ "char", 0x11347, 0x1133E },
+ unicodeslot=0x1134B,
+ },
+ [0x1134C]={
+ category="mc",
+ description="GRANTHA VOWEL SIGN AU",
+ direction="l",
+ linebreak="cm",
+ specials={ "char", 0x11347, 0x11357 },
+ unicodeslot=0x1134C,
+ },
+ [0x1134D]={
+ category="mc",
+ combining=0x9,
+ description="GRANTHA SIGN VIRAMA",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x1134D,
+ },
+ [0x11357]={
+ category="mc",
+ description="GRANTHA AU LENGTH MARK",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x11357,
+ },
+ [0x1135D]={
+ category="lo",
+ description="GRANTHA SIGN PLUTA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1135D,
+ },
+ [0x1135E]={
+ category="lo",
+ description="GRANTHA LETTER VEDIC ANUSVARA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1135E,
+ },
+ [0x1135F]={
+ category="lo",
+ description="GRANTHA LETTER VEDIC DOUBLE ANUSVARA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1135F,
+ },
+ [0x11360]={
+ category="lo",
+ description="GRANTHA LETTER VOCALIC RR",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11360,
+ },
+ [0x11361]={
+ category="lo",
+ description="GRANTHA LETTER VOCALIC LL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11361,
+ },
+ [0x11362]={
+ category="mc",
+ description="GRANTHA VOWEL SIGN VOCALIC L",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x11362,
+ },
+ [0x11363]={
+ category="mc",
+ description="GRANTHA VOWEL SIGN VOCALIC LL",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x11363,
+ },
+ [0x11366]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING GRANTHA DIGIT ZERO",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11366,
+ },
+ [0x11367]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING GRANTHA DIGIT ONE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11367,
+ },
+ [0x11368]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING GRANTHA DIGIT TWO",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11368,
+ },
+ [0x11369]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING GRANTHA DIGIT THREE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11369,
+ },
+ [0x1136A]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING GRANTHA DIGIT FOUR",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1136A,
+ },
+ [0x1136B]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING GRANTHA DIGIT FIVE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1136B,
+ },
+ [0x1136C]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING GRANTHA DIGIT SIX",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1136C,
+ },
+ [0x11370]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING GRANTHA LETTER A",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11370,
+ },
+ [0x11371]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING GRANTHA LETTER KA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11371,
+ },
+ [0x11372]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING GRANTHA LETTER NA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11372,
+ },
+ [0x11373]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING GRANTHA LETTER VI",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11373,
+ },
+ [0x11374]={
+ category="mn",
+ combining=0xE6,
+ description="COMBINING GRANTHA LETTER PA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11374,
+ },
+ [0x11480]={
+ category="lo",
+ description="TIRHUTA ANJI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11480,
+ },
+ [0x11481]={
+ category="lo",
+ description="TIRHUTA LETTER A",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11481,
+ },
+ [0x11482]={
+ category="lo",
+ description="TIRHUTA LETTER AA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11482,
+ },
+ [0x11483]={
+ category="lo",
+ description="TIRHUTA LETTER I",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11483,
+ },
+ [0x11484]={
+ category="lo",
+ description="TIRHUTA LETTER II",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11484,
+ },
+ [0x11485]={
+ category="lo",
+ description="TIRHUTA LETTER U",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11485,
+ },
+ [0x11486]={
+ category="lo",
+ description="TIRHUTA LETTER UU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11486,
+ },
+ [0x11487]={
+ category="lo",
+ description="TIRHUTA LETTER VOCALIC R",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11487,
+ },
+ [0x11488]={
+ category="lo",
+ description="TIRHUTA LETTER VOCALIC RR",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11488,
+ },
+ [0x11489]={
+ category="lo",
+ description="TIRHUTA LETTER VOCALIC L",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11489,
+ },
+ [0x1148A]={
+ category="lo",
+ description="TIRHUTA LETTER VOCALIC LL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1148A,
+ },
+ [0x1148B]={
+ category="lo",
+ description="TIRHUTA LETTER E",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1148B,
+ },
+ [0x1148C]={
+ category="lo",
+ description="TIRHUTA LETTER AI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1148C,
+ },
+ [0x1148D]={
+ category="lo",
+ description="TIRHUTA LETTER O",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1148D,
+ },
+ [0x1148E]={
+ category="lo",
+ description="TIRHUTA LETTER AU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1148E,
+ },
+ [0x1148F]={
+ category="lo",
+ description="TIRHUTA LETTER KA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1148F,
+ },
+ [0x11490]={
+ category="lo",
+ description="TIRHUTA LETTER KHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11490,
+ },
+ [0x11491]={
+ category="lo",
+ description="TIRHUTA LETTER GA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11491,
+ },
+ [0x11492]={
+ category="lo",
+ description="TIRHUTA LETTER GHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11492,
+ },
+ [0x11493]={
+ category="lo",
+ description="TIRHUTA LETTER NGA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11493,
+ },
+ [0x11494]={
+ category="lo",
+ description="TIRHUTA LETTER CA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11494,
+ },
+ [0x11495]={
+ category="lo",
+ description="TIRHUTA LETTER CHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11495,
+ },
+ [0x11496]={
+ category="lo",
+ description="TIRHUTA LETTER JA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11496,
+ },
+ [0x11497]={
+ category="lo",
+ description="TIRHUTA LETTER JHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11497,
+ },
+ [0x11498]={
+ category="lo",
+ description="TIRHUTA LETTER NYA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11498,
+ },
+ [0x11499]={
+ category="lo",
+ description="TIRHUTA LETTER TTA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11499,
+ },
+ [0x1149A]={
+ category="lo",
+ description="TIRHUTA LETTER TTHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1149A,
+ },
+ [0x1149B]={
+ category="lo",
+ description="TIRHUTA LETTER DDA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1149B,
+ },
+ [0x1149C]={
+ category="lo",
+ description="TIRHUTA LETTER DDHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1149C,
+ },
+ [0x1149D]={
+ category="lo",
+ description="TIRHUTA LETTER NNA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1149D,
+ },
+ [0x1149E]={
+ category="lo",
+ description="TIRHUTA LETTER TA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1149E,
+ },
+ [0x1149F]={
+ category="lo",
+ description="TIRHUTA LETTER THA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1149F,
+ },
+ [0x114A0]={
+ category="lo",
+ description="TIRHUTA LETTER DA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x114A0,
+ },
+ [0x114A1]={
+ category="lo",
+ description="TIRHUTA LETTER DHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x114A1,
+ },
+ [0x114A2]={
+ category="lo",
+ description="TIRHUTA LETTER NA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x114A2,
+ },
+ [0x114A3]={
+ category="lo",
+ description="TIRHUTA LETTER PA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x114A3,
+ },
+ [0x114A4]={
+ category="lo",
+ description="TIRHUTA LETTER PHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x114A4,
+ },
+ [0x114A5]={
+ category="lo",
+ description="TIRHUTA LETTER BA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x114A5,
+ },
+ [0x114A6]={
+ category="lo",
+ description="TIRHUTA LETTER BHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x114A6,
+ },
+ [0x114A7]={
+ category="lo",
+ description="TIRHUTA LETTER MA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x114A7,
+ },
+ [0x114A8]={
+ category="lo",
+ description="TIRHUTA LETTER YA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x114A8,
+ },
+ [0x114A9]={
+ category="lo",
+ description="TIRHUTA LETTER RA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x114A9,
+ },
+ [0x114AA]={
+ category="lo",
+ description="TIRHUTA LETTER LA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x114AA,
+ },
+ [0x114AB]={
+ category="lo",
+ description="TIRHUTA LETTER VA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x114AB,
+ },
+ [0x114AC]={
+ category="lo",
+ description="TIRHUTA LETTER SHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x114AC,
+ },
+ [0x114AD]={
+ category="lo",
+ description="TIRHUTA LETTER SSA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x114AD,
+ },
+ [0x114AE]={
+ category="lo",
+ description="TIRHUTA LETTER SA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x114AE,
+ },
+ [0x114AF]={
+ category="lo",
+ description="TIRHUTA LETTER HA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x114AF,
+ },
+ [0x114B0]={
+ category="mc",
+ description="TIRHUTA VOWEL SIGN AA",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x114B0,
+ },
+ [0x114B1]={
+ category="mc",
+ description="TIRHUTA VOWEL SIGN I",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x114B1,
+ },
+ [0x114B2]={
+ category="mc",
+ description="TIRHUTA VOWEL SIGN II",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x114B2,
+ },
+ [0x114B3]={
+ category="mn",
+ description="TIRHUTA VOWEL SIGN U",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x114B3,
+ },
+ [0x114B4]={
+ category="mn",
+ description="TIRHUTA VOWEL SIGN UU",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x114B4,
+ },
+ [0x114B5]={
+ category="mn",
+ description="TIRHUTA VOWEL SIGN VOCALIC R",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x114B5,
+ },
+ [0x114B6]={
+ category="mn",
+ description="TIRHUTA VOWEL SIGN VOCALIC RR",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x114B6,
+ },
+ [0x114B7]={
+ category="mn",
+ description="TIRHUTA VOWEL SIGN VOCALIC L",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x114B7,
+ },
+ [0x114B8]={
+ category="mn",
+ description="TIRHUTA VOWEL SIGN VOCALIC LL",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x114B8,
+ },
+ [0x114B9]={
+ category="mc",
+ description="TIRHUTA VOWEL SIGN E",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x114B9,
+ },
+ [0x114BA]={
+ category="mn",
+ description="TIRHUTA VOWEL SIGN SHORT E",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x114BA,
+ },
+ [0x114BB]={
+ category="mc",
+ description="TIRHUTA VOWEL SIGN AI",
+ direction="l",
+ linebreak="cm",
+ specials={ "char", 0x114B9, 0x114BA },
+ unicodeslot=0x114BB,
+ },
+ [0x114BC]={
+ category="mc",
+ description="TIRHUTA VOWEL SIGN O",
+ direction="l",
+ linebreak="cm",
+ specials={ "char", 0x114B9, 0x114B0 },
+ unicodeslot=0x114BC,
+ },
+ [0x114BD]={
+ category="mc",
+ description="TIRHUTA VOWEL SIGN SHORT O",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x114BD,
+ },
+ [0x114BE]={
+ category="mc",
+ description="TIRHUTA VOWEL SIGN AU",
+ direction="l",
+ linebreak="cm",
+ specials={ "char", 0x114B9, 0x114BD },
+ unicodeslot=0x114BE,
+ },
+ [0x114BF]={
+ category="mn",
+ description="TIRHUTA SIGN CANDRABINDU",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x114BF,
+ },
+ [0x114C0]={
+ category="mn",
+ description="TIRHUTA SIGN ANUSVARA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x114C0,
+ },
+ [0x114C1]={
+ category="mc",
+ description="TIRHUTA SIGN VISARGA",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x114C1,
+ },
+ [0x114C2]={
+ category="mn",
+ combining=0x9,
+ description="TIRHUTA SIGN VIRAMA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x114C2,
+ },
+ [0x114C3]={
+ category="mn",
+ combining=0x7,
+ description="TIRHUTA SIGN NUKTA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x114C3,
+ },
+ [0x114C4]={
+ category="lo",
+ description="TIRHUTA SIGN AVAGRAHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x114C4,
+ },
+ [0x114C5]={
+ category="lo",
+ description="TIRHUTA GVANG",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x114C5,
+ },
+ [0x114C6]={
+ category="po",
+ description="TIRHUTA ABBREVIATION SIGN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x114C6,
+ },
+ [0x114C7]={
+ category="lo",
+ description="TIRHUTA OM",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x114C7,
+ },
+ [0x114D0]={
+ category="nd",
+ description="TIRHUTA DIGIT ZERO",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x114D0,
+ },
+ [0x114D1]={
+ category="nd",
+ description="TIRHUTA DIGIT ONE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x114D1,
+ },
+ [0x114D2]={
+ category="nd",
+ description="TIRHUTA DIGIT TWO",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x114D2,
+ },
+ [0x114D3]={
+ category="nd",
+ description="TIRHUTA DIGIT THREE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x114D3,
+ },
+ [0x114D4]={
+ category="nd",
+ description="TIRHUTA DIGIT FOUR",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x114D4,
+ },
+ [0x114D5]={
+ category="nd",
+ description="TIRHUTA DIGIT FIVE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x114D5,
+ },
+ [0x114D6]={
+ category="nd",
+ description="TIRHUTA DIGIT SIX",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x114D6,
+ },
+ [0x114D7]={
+ category="nd",
+ description="TIRHUTA DIGIT SEVEN",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x114D7,
+ },
+ [0x114D8]={
+ category="nd",
+ description="TIRHUTA DIGIT EIGHT",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x114D8,
+ },
+ [0x114D9]={
+ category="nd",
+ description="TIRHUTA DIGIT NINE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x114D9,
+ },
+ [0x11580]={
+ category="lo",
+ description="SIDDHAM LETTER A",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11580,
+ },
+ [0x11581]={
+ category="lo",
+ description="SIDDHAM LETTER AA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11581,
+ },
+ [0x11582]={
+ category="lo",
+ description="SIDDHAM LETTER I",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11582,
+ },
+ [0x11583]={
+ category="lo",
+ description="SIDDHAM LETTER II",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11583,
+ },
+ [0x11584]={
+ category="lo",
+ description="SIDDHAM LETTER U",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11584,
+ },
+ [0x11585]={
+ category="lo",
+ description="SIDDHAM LETTER UU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11585,
+ },
+ [0x11586]={
+ category="lo",
+ description="SIDDHAM LETTER VOCALIC R",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11586,
+ },
+ [0x11587]={
+ category="lo",
+ description="SIDDHAM LETTER VOCALIC RR",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11587,
+ },
+ [0x11588]={
+ category="lo",
+ description="SIDDHAM LETTER VOCALIC L",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11588,
+ },
+ [0x11589]={
+ category="lo",
+ description="SIDDHAM LETTER VOCALIC LL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11589,
+ },
+ [0x1158A]={
+ category="lo",
+ description="SIDDHAM LETTER E",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1158A,
+ },
+ [0x1158B]={
+ category="lo",
+ description="SIDDHAM LETTER AI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1158B,
+ },
+ [0x1158C]={
+ category="lo",
+ description="SIDDHAM LETTER O",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1158C,
+ },
+ [0x1158D]={
+ category="lo",
+ description="SIDDHAM LETTER AU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1158D,
+ },
+ [0x1158E]={
+ category="lo",
+ description="SIDDHAM LETTER KA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1158E,
+ },
+ [0x1158F]={
+ category="lo",
+ description="SIDDHAM LETTER KHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1158F,
+ },
+ [0x11590]={
+ category="lo",
+ description="SIDDHAM LETTER GA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11590,
+ },
+ [0x11591]={
+ category="lo",
+ description="SIDDHAM LETTER GHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11591,
+ },
+ [0x11592]={
+ category="lo",
+ description="SIDDHAM LETTER NGA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11592,
+ },
+ [0x11593]={
+ category="lo",
+ description="SIDDHAM LETTER CA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11593,
+ },
+ [0x11594]={
+ category="lo",
+ description="SIDDHAM LETTER CHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11594,
+ },
+ [0x11595]={
+ category="lo",
+ description="SIDDHAM LETTER JA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11595,
+ },
+ [0x11596]={
+ category="lo",
+ description="SIDDHAM LETTER JHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11596,
+ },
+ [0x11597]={
+ category="lo",
+ description="SIDDHAM LETTER NYA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11597,
+ },
+ [0x11598]={
+ category="lo",
+ description="SIDDHAM LETTER TTA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11598,
+ },
+ [0x11599]={
+ category="lo",
+ description="SIDDHAM LETTER TTHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11599,
+ },
+ [0x1159A]={
+ category="lo",
+ description="SIDDHAM LETTER DDA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1159A,
+ },
+ [0x1159B]={
+ category="lo",
+ description="SIDDHAM LETTER DDHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1159B,
+ },
+ [0x1159C]={
+ category="lo",
+ description="SIDDHAM LETTER NNA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1159C,
+ },
+ [0x1159D]={
+ category="lo",
+ description="SIDDHAM LETTER TA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1159D,
+ },
+ [0x1159E]={
+ category="lo",
+ description="SIDDHAM LETTER THA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1159E,
+ },
+ [0x1159F]={
+ category="lo",
+ description="SIDDHAM LETTER DA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1159F,
+ },
+ [0x115A0]={
+ category="lo",
+ description="SIDDHAM LETTER DHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x115A0,
+ },
+ [0x115A1]={
+ category="lo",
+ description="SIDDHAM LETTER NA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x115A1,
+ },
+ [0x115A2]={
+ category="lo",
+ description="SIDDHAM LETTER PA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x115A2,
+ },
+ [0x115A3]={
+ category="lo",
+ description="SIDDHAM LETTER PHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x115A3,
+ },
+ [0x115A4]={
+ category="lo",
+ description="SIDDHAM LETTER BA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x115A4,
+ },
+ [0x115A5]={
+ category="lo",
+ description="SIDDHAM LETTER BHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x115A5,
+ },
+ [0x115A6]={
+ category="lo",
+ description="SIDDHAM LETTER MA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x115A6,
+ },
+ [0x115A7]={
+ category="lo",
+ description="SIDDHAM LETTER YA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x115A7,
+ },
+ [0x115A8]={
+ category="lo",
+ description="SIDDHAM LETTER RA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x115A8,
+ },
+ [0x115A9]={
+ category="lo",
+ description="SIDDHAM LETTER LA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x115A9,
+ },
+ [0x115AA]={
+ category="lo",
+ description="SIDDHAM LETTER VA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x115AA,
+ },
+ [0x115AB]={
+ category="lo",
+ description="SIDDHAM LETTER SHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x115AB,
+ },
+ [0x115AC]={
+ category="lo",
+ description="SIDDHAM LETTER SSA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x115AC,
+ },
+ [0x115AD]={
+ category="lo",
+ description="SIDDHAM LETTER SA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x115AD,
+ },
+ [0x115AE]={
+ category="lo",
+ description="SIDDHAM LETTER HA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x115AE,
+ },
+ [0x115AF]={
+ category="mc",
+ description="SIDDHAM VOWEL SIGN AA",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x115AF,
+ },
+ [0x115B0]={
+ category="mc",
+ description="SIDDHAM VOWEL SIGN I",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x115B0,
+ },
+ [0x115B1]={
+ category="mc",
+ description="SIDDHAM VOWEL SIGN II",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x115B1,
+ },
+ [0x115B2]={
+ category="mn",
+ description="SIDDHAM VOWEL SIGN U",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x115B2,
+ },
+ [0x115B3]={
+ category="mn",
+ description="SIDDHAM VOWEL SIGN UU",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x115B3,
+ },
+ [0x115B4]={
+ category="mn",
+ description="SIDDHAM VOWEL SIGN VOCALIC R",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x115B4,
+ },
+ [0x115B5]={
+ category="mn",
+ description="SIDDHAM VOWEL SIGN VOCALIC RR",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x115B5,
+ },
+ [0x115B8]={
+ category="mc",
+ description="SIDDHAM VOWEL SIGN E",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x115B8,
+ },
+ [0x115B9]={
+ category="mc",
+ description="SIDDHAM VOWEL SIGN AI",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x115B9,
+ },
+ [0x115BA]={
+ category="mc",
+ description="SIDDHAM VOWEL SIGN O",
+ direction="l",
+ linebreak="cm",
+ specials={ "char", 0x115B8, 0x115AF },
+ unicodeslot=0x115BA,
+ },
+ [0x115BB]={
+ category="mc",
+ description="SIDDHAM VOWEL SIGN AU",
+ direction="l",
+ linebreak="cm",
+ specials={ "char", 0x115B9, 0x115AF },
+ unicodeslot=0x115BB,
+ },
+ [0x115BC]={
+ category="mn",
+ description="SIDDHAM SIGN CANDRABINDU",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x115BC,
+ },
+ [0x115BD]={
+ category="mn",
+ description="SIDDHAM SIGN ANUSVARA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x115BD,
+ },
+ [0x115BE]={
+ category="mc",
+ description="SIDDHAM SIGN VISARGA",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x115BE,
+ },
+ [0x115BF]={
+ category="mn",
+ combining=0x9,
+ description="SIDDHAM SIGN VIRAMA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x115BF,
+ },
+ [0x115C0]={
+ category="mn",
+ combining=0x7,
+ description="SIDDHAM SIGN NUKTA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x115C0,
+ },
+ [0x115C1]={
+ category="po",
+ description="SIDDHAM SIGN SIDDHAM",
+ direction="l",
+ linebreak="bb",
+ unicodeslot=0x115C1,
+ },
+ [0x115C2]={
+ category="po",
+ description="SIDDHAM DANDA",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0x115C2,
+ },
+ [0x115C3]={
+ category="po",
+ description="SIDDHAM DOUBLE DANDA",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0x115C3,
+ },
+ [0x115C4]={
+ category="po",
+ description="SIDDHAM SEPARATOR DOT",
+ direction="l",
+ linebreak="ex",
+ unicodeslot=0x115C4,
+ },
+ [0x115C5]={
+ category="po",
+ description="SIDDHAM SEPARATOR BAR",
+ direction="l",
+ linebreak="ex",
+ unicodeslot=0x115C5,
+ },
+ [0x115C6]={
+ category="po",
+ description="SIDDHAM REPETITION MARK-1",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x115C6,
+ },
+ [0x115C7]={
+ category="po",
+ description="SIDDHAM REPETITION MARK-2",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x115C7,
+ },
+ [0x115C8]={
+ category="po",
+ description="SIDDHAM REPETITION MARK-3",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x115C8,
+ },
+ [0x115C9]={
+ category="po",
+ description="SIDDHAM END OF TEXT MARK",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0x115C9,
+ },
+ [0x11600]={
+ category="lo",
+ description="MODI LETTER A",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11600,
+ },
+ [0x11601]={
+ category="lo",
+ description="MODI LETTER AA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11601,
+ },
+ [0x11602]={
+ category="lo",
+ description="MODI LETTER I",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11602,
+ },
+ [0x11603]={
+ category="lo",
+ description="MODI LETTER II",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11603,
+ },
+ [0x11604]={
+ category="lo",
+ description="MODI LETTER U",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11604,
+ },
+ [0x11605]={
+ category="lo",
+ description="MODI LETTER UU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11605,
+ },
+ [0x11606]={
+ category="lo",
+ description="MODI LETTER VOCALIC R",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11606,
+ },
+ [0x11607]={
+ category="lo",
+ description="MODI LETTER VOCALIC RR",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11607,
+ },
+ [0x11608]={
+ category="lo",
+ description="MODI LETTER VOCALIC L",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11608,
+ },
+ [0x11609]={
+ category="lo",
+ description="MODI LETTER VOCALIC LL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11609,
+ },
+ [0x1160A]={
+ category="lo",
+ description="MODI LETTER E",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1160A,
+ },
+ [0x1160B]={
+ category="lo",
+ description="MODI LETTER AI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1160B,
+ },
+ [0x1160C]={
+ category="lo",
+ description="MODI LETTER O",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1160C,
+ },
+ [0x1160D]={
+ category="lo",
+ description="MODI LETTER AU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1160D,
+ },
+ [0x1160E]={
+ category="lo",
+ description="MODI LETTER KA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1160E,
+ },
+ [0x1160F]={
+ category="lo",
+ description="MODI LETTER KHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1160F,
+ },
+ [0x11610]={
+ category="lo",
+ description="MODI LETTER GA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11610,
+ },
+ [0x11611]={
+ category="lo",
+ description="MODI LETTER GHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11611,
+ },
+ [0x11612]={
+ category="lo",
+ description="MODI LETTER NGA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11612,
+ },
+ [0x11613]={
+ category="lo",
+ description="MODI LETTER CA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11613,
+ },
+ [0x11614]={
+ category="lo",
+ description="MODI LETTER CHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11614,
+ },
+ [0x11615]={
+ category="lo",
+ description="MODI LETTER JA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11615,
+ },
+ [0x11616]={
+ category="lo",
+ description="MODI LETTER JHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11616,
+ },
+ [0x11617]={
+ category="lo",
+ description="MODI LETTER NYA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11617,
+ },
+ [0x11618]={
+ category="lo",
+ description="MODI LETTER TTA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11618,
+ },
+ [0x11619]={
+ category="lo",
+ description="MODI LETTER TTHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11619,
+ },
+ [0x1161A]={
+ category="lo",
+ description="MODI LETTER DDA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1161A,
+ },
+ [0x1161B]={
+ category="lo",
+ description="MODI LETTER DDHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1161B,
+ },
+ [0x1161C]={
+ category="lo",
+ description="MODI LETTER NNA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1161C,
+ },
+ [0x1161D]={
+ category="lo",
+ description="MODI LETTER TA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1161D,
+ },
+ [0x1161E]={
+ category="lo",
+ description="MODI LETTER THA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1161E,
+ },
+ [0x1161F]={
+ category="lo",
+ description="MODI LETTER DA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1161F,
+ },
+ [0x11620]={
+ category="lo",
+ description="MODI LETTER DHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11620,
+ },
+ [0x11621]={
+ category="lo",
+ description="MODI LETTER NA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11621,
+ },
+ [0x11622]={
+ category="lo",
+ description="MODI LETTER PA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11622,
+ },
+ [0x11623]={
+ category="lo",
+ description="MODI LETTER PHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11623,
+ },
+ [0x11624]={
+ category="lo",
+ description="MODI LETTER BA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11624,
+ },
+ [0x11625]={
+ category="lo",
+ description="MODI LETTER BHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11625,
+ },
+ [0x11626]={
+ category="lo",
+ description="MODI LETTER MA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11626,
+ },
+ [0x11627]={
+ category="lo",
+ description="MODI LETTER YA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11627,
+ },
+ [0x11628]={
+ category="lo",
+ description="MODI LETTER RA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11628,
+ },
+ [0x11629]={
+ category="lo",
+ description="MODI LETTER LA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11629,
+ },
+ [0x1162A]={
+ category="lo",
+ description="MODI LETTER VA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1162A,
+ },
+ [0x1162B]={
+ category="lo",
+ description="MODI LETTER SHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1162B,
+ },
+ [0x1162C]={
+ category="lo",
+ description="MODI LETTER SSA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1162C,
+ },
+ [0x1162D]={
+ category="lo",
+ description="MODI LETTER SA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1162D,
+ },
+ [0x1162E]={
+ category="lo",
+ description="MODI LETTER HA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1162E,
+ },
+ [0x1162F]={
+ category="lo",
+ description="MODI LETTER LLA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1162F,
+ },
+ [0x11630]={
+ category="mc",
+ description="MODI VOWEL SIGN AA",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x11630,
+ },
+ [0x11631]={
+ category="mc",
+ description="MODI VOWEL SIGN I",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x11631,
+ },
+ [0x11632]={
+ category="mc",
+ description="MODI VOWEL SIGN II",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x11632,
+ },
+ [0x11633]={
+ category="mn",
+ description="MODI VOWEL SIGN U",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11633,
+ },
+ [0x11634]={
+ category="mn",
+ description="MODI VOWEL SIGN UU",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11634,
+ },
+ [0x11635]={
+ category="mn",
+ description="MODI VOWEL SIGN VOCALIC R",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11635,
+ },
+ [0x11636]={
+ category="mn",
+ description="MODI VOWEL SIGN VOCALIC RR",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11636,
+ },
+ [0x11637]={
+ category="mn",
+ description="MODI VOWEL SIGN VOCALIC L",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11637,
+ },
+ [0x11638]={
+ category="mn",
+ description="MODI VOWEL SIGN VOCALIC LL",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11638,
+ },
+ [0x11639]={
+ category="mn",
+ description="MODI VOWEL SIGN E",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11639,
+ },
+ [0x1163A]={
+ category="mn",
+ description="MODI VOWEL SIGN AI",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1163A,
+ },
+ [0x1163B]={
+ category="mc",
+ description="MODI VOWEL SIGN O",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x1163B,
+ },
+ [0x1163C]={
+ category="mc",
+ description="MODI VOWEL SIGN AU",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x1163C,
+ },
+ [0x1163D]={
+ category="mn",
+ description="MODI SIGN ANUSVARA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1163D,
+ },
+ [0x1163E]={
+ category="mc",
+ description="MODI SIGN VISARGA",
+ direction="l",
+ linebreak="cm",
+ unicodeslot=0x1163E,
+ },
+ [0x1163F]={
+ category="mn",
+ combining=0x9,
+ description="MODI SIGN VIRAMA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1163F,
+ },
+ [0x11640]={
+ category="mn",
+ description="MODI SIGN ARDHACANDRA",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x11640,
+ },
+ [0x11641]={
+ category="po",
+ description="MODI DANDA",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0x11641,
+ },
+ [0x11642]={
+ category="po",
+ description="MODI DOUBLE DANDA",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0x11642,
+ },
+ [0x11643]={
+ category="po",
+ description="MODI ABBREVIATION SIGN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11643,
+ },
+ [0x11644]={
+ category="lo",
+ description="MODI SIGN HUVA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11644,
+ },
+ [0x11650]={
+ category="nd",
+ description="MODI DIGIT ZERO",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x11650,
+ },
+ [0x11651]={
+ category="nd",
+ description="MODI DIGIT ONE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x11651,
+ },
+ [0x11652]={
+ category="nd",
+ description="MODI DIGIT TWO",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x11652,
+ },
+ [0x11653]={
+ category="nd",
+ description="MODI DIGIT THREE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x11653,
+ },
+ [0x11654]={
+ category="nd",
+ description="MODI DIGIT FOUR",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x11654,
+ },
+ [0x11655]={
+ category="nd",
+ description="MODI DIGIT FIVE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x11655,
+ },
+ [0x11656]={
+ category="nd",
+ description="MODI DIGIT SIX",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x11656,
+ },
+ [0x11657]={
+ category="nd",
+ description="MODI DIGIT SEVEN",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x11657,
+ },
+ [0x11658]={
+ category="nd",
+ description="MODI DIGIT EIGHT",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x11658,
+ },
+ [0x11659]={
+ category="nd",
+ description="MODI DIGIT NINE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x11659,
+ },
[0x11680]={
category="lo",
description="TAKRI LETTER A",
@@ -143601,6 +155950,7 @@ characters.data={
},
[0x116B6]={
category="mc",
+ combining=0x9,
description="TAKRI SIGN VIRAMA",
direction="l",
linebreak="cm",
@@ -143608,6 +155958,7 @@ characters.data={
},
[0x116B7]={
category="mn",
+ combining=0x7,
description="TAKRI SIGN NUKTA",
direction="nsm",
linebreak="cm",
@@ -143683,6 +156034,993 @@ characters.data={
linebreak="nu",
unicodeslot=0x116C9,
},
+ [0x118A0]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER NGAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118A0,
+ },
+ [0x118A1]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER A",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118A1,
+ },
+ [0x118A2]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER WI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118A2,
+ },
+ [0x118A3]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER YU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118A3,
+ },
+ [0x118A4]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER YA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118A4,
+ },
+ [0x118A5]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER YO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118A5,
+ },
+ [0x118A6]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER II",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118A6,
+ },
+ [0x118A7]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER UU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118A7,
+ },
+ [0x118A8]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER E",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118A8,
+ },
+ [0x118A9]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER O",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118A9,
+ },
+ [0x118AA]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER ANG",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118AA,
+ },
+ [0x118AB]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER GA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118AB,
+ },
+ [0x118AC]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER KO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118AC,
+ },
+ [0x118AD]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER ENY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118AD,
+ },
+ [0x118AE]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER YUJ",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118AE,
+ },
+ [0x118AF]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER UC",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118AF,
+ },
+ [0x118B0]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER ENN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118B0,
+ },
+ [0x118B1]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER ODD",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118B1,
+ },
+ [0x118B2]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER TTE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118B2,
+ },
+ [0x118B3]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER NUNG",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118B3,
+ },
+ [0x118B4]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER DA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118B4,
+ },
+ [0x118B5]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER AT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118B5,
+ },
+ [0x118B6]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER AM",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118B6,
+ },
+ [0x118B7]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER BU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118B7,
+ },
+ [0x118B8]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER PU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118B8,
+ },
+ [0x118B9]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER HIYO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118B9,
+ },
+ [0x118BA]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER HOLO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118BA,
+ },
+ [0x118BB]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER HORR",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118BB,
+ },
+ [0x118BC]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER HAR",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118BC,
+ },
+ [0x118BD]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER SSUU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118BD,
+ },
+ [0x118BE]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER SII",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118BE,
+ },
+ [0x118BF]={
+ category="lu",
+ description="WARANG CITI CAPITAL LETTER VIYO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118BF,
+ },
+ [0x118C0]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER NGAA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118C0,
+ },
+ [0x118C1]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER A",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118C1,
+ },
+ [0x118C2]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER WI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118C2,
+ },
+ [0x118C3]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER YU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118C3,
+ },
+ [0x118C4]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER YA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118C4,
+ },
+ [0x118C5]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER YO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118C5,
+ },
+ [0x118C6]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER II",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118C6,
+ },
+ [0x118C7]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER UU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118C7,
+ },
+ [0x118C8]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER E",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118C8,
+ },
+ [0x118C9]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER O",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118C9,
+ },
+ [0x118CA]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER ANG",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118CA,
+ },
+ [0x118CB]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER GA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118CB,
+ },
+ [0x118CC]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER KO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118CC,
+ },
+ [0x118CD]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER ENY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118CD,
+ },
+ [0x118CE]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER YUJ",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118CE,
+ },
+ [0x118CF]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER UC",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118CF,
+ },
+ [0x118D0]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER ENN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118D0,
+ },
+ [0x118D1]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER ODD",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118D1,
+ },
+ [0x118D2]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER TTE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118D2,
+ },
+ [0x118D3]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER NUNG",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118D3,
+ },
+ [0x118D4]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER DA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118D4,
+ },
+ [0x118D5]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER AT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118D5,
+ },
+ [0x118D6]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER AM",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118D6,
+ },
+ [0x118D7]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER BU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118D7,
+ },
+ [0x118D8]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER PU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118D8,
+ },
+ [0x118D9]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER HIYO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118D9,
+ },
+ [0x118DA]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER HOLO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118DA,
+ },
+ [0x118DB]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER HORR",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118DB,
+ },
+ [0x118DC]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER HAR",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118DC,
+ },
+ [0x118DD]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER SSUU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118DD,
+ },
+ [0x118DE]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER SII",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118DE,
+ },
+ [0x118DF]={
+ category="ll",
+ description="WARANG CITI SMALL LETTER VIYO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118DF,
+ },
+ [0x118E0]={
+ category="nd",
+ description="WARANG CITI DIGIT ZERO",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x118E0,
+ },
+ [0x118E1]={
+ category="nd",
+ description="WARANG CITI DIGIT ONE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x118E1,
+ },
+ [0x118E2]={
+ category="nd",
+ description="WARANG CITI DIGIT TWO",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x118E2,
+ },
+ [0x118E3]={
+ category="nd",
+ description="WARANG CITI DIGIT THREE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x118E3,
+ },
+ [0x118E4]={
+ category="nd",
+ description="WARANG CITI DIGIT FOUR",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x118E4,
+ },
+ [0x118E5]={
+ category="nd",
+ description="WARANG CITI DIGIT FIVE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x118E5,
+ },
+ [0x118E6]={
+ category="nd",
+ description="WARANG CITI DIGIT SIX",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x118E6,
+ },
+ [0x118E7]={
+ category="nd",
+ description="WARANG CITI DIGIT SEVEN",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x118E7,
+ },
+ [0x118E8]={
+ category="nd",
+ description="WARANG CITI DIGIT EIGHT",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x118E8,
+ },
+ [0x118E9]={
+ category="nd",
+ description="WARANG CITI DIGIT NINE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x118E9,
+ },
+ [0x118EA]={
+ category="no",
+ description="WARANG CITI NUMBER TEN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118EA,
+ },
+ [0x118EB]={
+ category="no",
+ description="WARANG CITI NUMBER TWENTY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118EB,
+ },
+ [0x118EC]={
+ category="no",
+ description="WARANG CITI NUMBER THIRTY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118EC,
+ },
+ [0x118ED]={
+ category="no",
+ description="WARANG CITI NUMBER FORTY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118ED,
+ },
+ [0x118EE]={
+ category="no",
+ description="WARANG CITI NUMBER FIFTY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118EE,
+ },
+ [0x118EF]={
+ category="no",
+ description="WARANG CITI NUMBER SIXTY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118EF,
+ },
+ [0x118F0]={
+ category="no",
+ description="WARANG CITI NUMBER SEVENTY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118F0,
+ },
+ [0x118F1]={
+ category="no",
+ description="WARANG CITI NUMBER EIGHTY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118F1,
+ },
+ [0x118F2]={
+ category="no",
+ description="WARANG CITI NUMBER NINETY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118F2,
+ },
+ [0x118FF]={
+ category="lo",
+ description="WARANG CITI OM",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x118FF,
+ },
+ [0x11AC0]={
+ category="lo",
+ description="PAU CIN HAU LETTER PA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AC0,
+ },
+ [0x11AC1]={
+ category="lo",
+ description="PAU CIN HAU LETTER KA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AC1,
+ },
+ [0x11AC2]={
+ category="lo",
+ description="PAU CIN HAU LETTER LA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AC2,
+ },
+ [0x11AC3]={
+ category="lo",
+ description="PAU CIN HAU LETTER MA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AC3,
+ },
+ [0x11AC4]={
+ category="lo",
+ description="PAU CIN HAU LETTER DA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AC4,
+ },
+ [0x11AC5]={
+ category="lo",
+ description="PAU CIN HAU LETTER ZA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AC5,
+ },
+ [0x11AC6]={
+ category="lo",
+ description="PAU CIN HAU LETTER VA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AC6,
+ },
+ [0x11AC7]={
+ category="lo",
+ description="PAU CIN HAU LETTER NGA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AC7,
+ },
+ [0x11AC8]={
+ category="lo",
+ description="PAU CIN HAU LETTER HA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AC8,
+ },
+ [0x11AC9]={
+ category="lo",
+ description="PAU CIN HAU LETTER GA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AC9,
+ },
+ [0x11ACA]={
+ category="lo",
+ description="PAU CIN HAU LETTER KHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11ACA,
+ },
+ [0x11ACB]={
+ category="lo",
+ description="PAU CIN HAU LETTER SA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11ACB,
+ },
+ [0x11ACC]={
+ category="lo",
+ description="PAU CIN HAU LETTER BA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11ACC,
+ },
+ [0x11ACD]={
+ category="lo",
+ description="PAU CIN HAU LETTER CA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11ACD,
+ },
+ [0x11ACE]={
+ category="lo",
+ description="PAU CIN HAU LETTER TA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11ACE,
+ },
+ [0x11ACF]={
+ category="lo",
+ description="PAU CIN HAU LETTER THA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11ACF,
+ },
+ [0x11AD0]={
+ category="lo",
+ description="PAU CIN HAU LETTER NA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AD0,
+ },
+ [0x11AD1]={
+ category="lo",
+ description="PAU CIN HAU LETTER PHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AD1,
+ },
+ [0x11AD2]={
+ category="lo",
+ description="PAU CIN HAU LETTER RA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AD2,
+ },
+ [0x11AD3]={
+ category="lo",
+ description="PAU CIN HAU LETTER FA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AD3,
+ },
+ [0x11AD4]={
+ category="lo",
+ description="PAU CIN HAU LETTER CHA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AD4,
+ },
+ [0x11AD5]={
+ category="lo",
+ description="PAU CIN HAU LETTER A",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AD5,
+ },
+ [0x11AD6]={
+ category="lo",
+ description="PAU CIN HAU LETTER E",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AD6,
+ },
+ [0x11AD7]={
+ category="lo",
+ description="PAU CIN HAU LETTER I",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AD7,
+ },
+ [0x11AD8]={
+ category="lo",
+ description="PAU CIN HAU LETTER O",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AD8,
+ },
+ [0x11AD9]={
+ category="lo",
+ description="PAU CIN HAU LETTER U",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AD9,
+ },
+ [0x11ADA]={
+ category="lo",
+ description="PAU CIN HAU LETTER UA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11ADA,
+ },
+ [0x11ADB]={
+ category="lo",
+ description="PAU CIN HAU LETTER IA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11ADB,
+ },
+ [0x11ADC]={
+ category="lo",
+ description="PAU CIN HAU LETTER FINAL P",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11ADC,
+ },
+ [0x11ADD]={
+ category="lo",
+ description="PAU CIN HAU LETTER FINAL K",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11ADD,
+ },
+ [0x11ADE]={
+ category="lo",
+ description="PAU CIN HAU LETTER FINAL T",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11ADE,
+ },
+ [0x11ADF]={
+ category="lo",
+ description="PAU CIN HAU LETTER FINAL M",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11ADF,
+ },
+ [0x11AE0]={
+ category="lo",
+ description="PAU CIN HAU LETTER FINAL N",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AE0,
+ },
+ [0x11AE1]={
+ category="lo",
+ description="PAU CIN HAU LETTER FINAL L",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AE1,
+ },
+ [0x11AE2]={
+ category="lo",
+ description="PAU CIN HAU LETTER FINAL W",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AE2,
+ },
+ [0x11AE3]={
+ category="lo",
+ description="PAU CIN HAU LETTER FINAL NG",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AE3,
+ },
+ [0x11AE4]={
+ category="lo",
+ description="PAU CIN HAU LETTER FINAL Y",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AE4,
+ },
+ [0x11AE5]={
+ category="lo",
+ description="PAU CIN HAU RISING TONE LONG",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AE5,
+ },
+ [0x11AE6]={
+ category="lo",
+ description="PAU CIN HAU RISING TONE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AE6,
+ },
+ [0x11AE7]={
+ category="lo",
+ description="PAU CIN HAU SANDHI GLOTTAL STOP",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AE7,
+ },
+ [0x11AE8]={
+ category="lo",
+ description="PAU CIN HAU RISING TONE LONG FINAL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AE8,
+ },
+ [0x11AE9]={
+ category="lo",
+ description="PAU CIN HAU RISING TONE FINAL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AE9,
+ },
+ [0x11AEA]={
+ category="lo",
+ description="PAU CIN HAU SANDHI GLOTTAL STOP FINAL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AEA,
+ },
+ [0x11AEB]={
+ category="lo",
+ description="PAU CIN HAU SANDHI TONE LONG",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AEB,
+ },
+ [0x11AEC]={
+ category="lo",
+ description="PAU CIN HAU SANDHI TONE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AEC,
+ },
+ [0x11AED]={
+ category="lo",
+ description="PAU CIN HAU SANDHI TONE LONG FINAL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AED,
+ },
+ [0x11AEE]={
+ category="lo",
+ description="PAU CIN HAU SANDHI TONE FINAL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AEE,
+ },
+ [0x11AEF]={
+ category="lo",
+ description="PAU CIN HAU MID-LEVEL TONE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AEF,
+ },
+ [0x11AF0]={
+ category="lo",
+ description="PAU CIN HAU GLOTTAL STOP VARIANT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AF0,
+ },
+ [0x11AF1]={
+ category="lo",
+ description="PAU CIN HAU MID-LEVEL TONE LONG FINAL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AF1,
+ },
+ [0x11AF2]={
+ category="lo",
+ description="PAU CIN HAU MID-LEVEL TONE FINAL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AF2,
+ },
+ [0x11AF3]={
+ category="lo",
+ description="PAU CIN HAU LOW-FALLING TONE LONG",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AF3,
+ },
+ [0x11AF4]={
+ category="lo",
+ description="PAU CIN HAU LOW-FALLING TONE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AF4,
+ },
+ [0x11AF5]={
+ category="lo",
+ description="PAU CIN HAU GLOTTAL STOP",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AF5,
+ },
+ [0x11AF6]={
+ category="lo",
+ description="PAU CIN HAU LOW-FALLING TONE LONG FINAL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AF6,
+ },
+ [0x11AF7]={
+ category="lo",
+ description="PAU CIN HAU LOW-FALLING TONE FINAL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AF7,
+ },
+ [0x11AF8]={
+ category="lo",
+ description="PAU CIN HAU GLOTTAL STOP FINAL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x11AF8,
+ },
[0x12000]={
category="lo",
description="CUNEIFORM SIGN A",
@@ -149838,6 +163176,300 @@ characters.data={
linebreak="al",
unicodeslot=0x1236E,
},
+ [0x1236F]={
+ category="lo",
+ description="CUNEIFORM SIGN KAP ELAMITE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1236F,
+ },
+ [0x12370]={
+ category="lo",
+ description="CUNEIFORM SIGN AB TIMES NUN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12370,
+ },
+ [0x12371]={
+ category="lo",
+ description="CUNEIFORM SIGN AB2 TIMES A",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12371,
+ },
+ [0x12372]={
+ category="lo",
+ description="CUNEIFORM SIGN AMAR TIMES KUG",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12372,
+ },
+ [0x12373]={
+ category="lo",
+ description="CUNEIFORM SIGN DAG KISIM5 TIMES U2 PLUS MASH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12373,
+ },
+ [0x12374]={
+ category="lo",
+ description="CUNEIFORM SIGN DAG3",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12374,
+ },
+ [0x12375]={
+ category="lo",
+ description="CUNEIFORM SIGN DISH PLUS SHU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12375,
+ },
+ [0x12376]={
+ category="lo",
+ description="CUNEIFORM SIGN DUB TIMES SHE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12376,
+ },
+ [0x12377]={
+ category="lo",
+ description="CUNEIFORM SIGN EZEN TIMES GUD",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12377,
+ },
+ [0x12378]={
+ category="lo",
+ description="CUNEIFORM SIGN EZEN TIMES SHE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12378,
+ },
+ [0x12379]={
+ category="lo",
+ description="CUNEIFORM SIGN GA2 TIMES AN PLUS KAK PLUS A",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12379,
+ },
+ [0x1237A]={
+ category="lo",
+ description="CUNEIFORM SIGN GA2 TIMES ASH2",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1237A,
+ },
+ [0x1237B]={
+ category="lo",
+ description="CUNEIFORM SIGN GE22",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1237B,
+ },
+ [0x1237C]={
+ category="lo",
+ description="CUNEIFORM SIGN GIG",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1237C,
+ },
+ [0x1237D]={
+ category="lo",
+ description="CUNEIFORM SIGN HUSH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1237D,
+ },
+ [0x1237E]={
+ category="lo",
+ description="CUNEIFORM SIGN KA TIMES ANSHE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1237E,
+ },
+ [0x1237F]={
+ category="lo",
+ description="CUNEIFORM SIGN KA TIMES ASH3",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1237F,
+ },
+ [0x12380]={
+ category="lo",
+ description="CUNEIFORM SIGN KA TIMES GISH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12380,
+ },
+ [0x12381]={
+ category="lo",
+ description="CUNEIFORM SIGN KA TIMES GUD",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12381,
+ },
+ [0x12382]={
+ category="lo",
+ description="CUNEIFORM SIGN KA TIMES HI TIMES ASH2",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12382,
+ },
+ [0x12383]={
+ category="lo",
+ description="CUNEIFORM SIGN KA TIMES LUM",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12383,
+ },
+ [0x12384]={
+ category="lo",
+ description="CUNEIFORM SIGN KA TIMES PA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12384,
+ },
+ [0x12385]={
+ category="lo",
+ description="CUNEIFORM SIGN KA TIMES SHUL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12385,
+ },
+ [0x12386]={
+ category="lo",
+ description="CUNEIFORM SIGN KA TIMES TU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12386,
+ },
+ [0x12387]={
+ category="lo",
+ description="CUNEIFORM SIGN KA TIMES UR2",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12387,
+ },
+ [0x12388]={
+ category="lo",
+ description="CUNEIFORM SIGN LAGAB TIMES GI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12388,
+ },
+ [0x12389]={
+ category="lo",
+ description="CUNEIFORM SIGN LU2 SHESHIG TIMES BAD",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12389,
+ },
+ [0x1238A]={
+ category="lo",
+ description="CUNEIFORM SIGN LU2 TIMES ESH2 PLUS LAL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1238A,
+ },
+ [0x1238B]={
+ category="lo",
+ description="CUNEIFORM SIGN LU2 TIMES SHU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1238B,
+ },
+ [0x1238C]={
+ category="lo",
+ description="CUNEIFORM SIGN MESH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1238C,
+ },
+ [0x1238D]={
+ category="lo",
+ description="CUNEIFORM SIGN MUSH3 TIMES ZA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1238D,
+ },
+ [0x1238E]={
+ category="lo",
+ description="CUNEIFORM SIGN NA4",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1238E,
+ },
+ [0x1238F]={
+ category="lo",
+ description="CUNEIFORM SIGN NIN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1238F,
+ },
+ [0x12390]={
+ category="lo",
+ description="CUNEIFORM SIGN NIN9",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12390,
+ },
+ [0x12391]={
+ category="lo",
+ description="CUNEIFORM SIGN NINDA2 TIMES BAL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12391,
+ },
+ [0x12392]={
+ category="lo",
+ description="CUNEIFORM SIGN NINDA2 TIMES GI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12392,
+ },
+ [0x12393]={
+ category="lo",
+ description="CUNEIFORM SIGN NU11 ROTATED NINETY DEGREES",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12393,
+ },
+ [0x12394]={
+ category="lo",
+ description="CUNEIFORM SIGN PESH2 ASTERISK",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12394,
+ },
+ [0x12395]={
+ category="lo",
+ description="CUNEIFORM SIGN PIR2",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12395,
+ },
+ [0x12396]={
+ category="lo",
+ description="CUNEIFORM SIGN SAG TIMES IGI GUNU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12396,
+ },
+ [0x12397]={
+ category="lo",
+ description="CUNEIFORM SIGN TI2",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12397,
+ },
+ [0x12398]={
+ category="lo",
+ description="CUNEIFORM SIGN UM TIMES ME",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12398,
+ },
[0x12400]={
category="nl",
description="CUNEIFORM NUMERIC SIGN TWO ASH",
@@ -150531,6 +164163,90 @@ characters.data={
linebreak="al",
unicodeslot=0x12462,
},
+ [0x12463]={
+ category="nl",
+ description="CUNEIFORM NUMERIC SIGN ONE QUARTER GUR",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12463,
+ },
+ [0x12464]={
+ category="nl",
+ description="CUNEIFORM NUMERIC SIGN ONE HALF GUR",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12464,
+ },
+ [0x12465]={
+ category="nl",
+ description="CUNEIFORM NUMERIC SIGN ELAMITE ONE THIRD",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12465,
+ },
+ [0x12466]={
+ category="nl",
+ description="CUNEIFORM NUMERIC SIGN ELAMITE TWO THIRDS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12466,
+ },
+ [0x12467]={
+ category="nl",
+ description="CUNEIFORM NUMERIC SIGN ELAMITE FORTY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12467,
+ },
+ [0x12468]={
+ category="nl",
+ description="CUNEIFORM NUMERIC SIGN ELAMITE FIFTY",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12468,
+ },
+ [0x12469]={
+ category="nl",
+ description="CUNEIFORM NUMERIC SIGN FOUR U VARIANT FORM",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x12469,
+ },
+ [0x1246A]={
+ category="nl",
+ description="CUNEIFORM NUMERIC SIGN FIVE U VARIANT FORM",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1246A,
+ },
+ [0x1246B]={
+ category="nl",
+ description="CUNEIFORM NUMERIC SIGN SIX U VARIANT FORM",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1246B,
+ },
+ [0x1246C]={
+ category="nl",
+ description="CUNEIFORM NUMERIC SIGN SEVEN U VARIANT FORM",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1246C,
+ },
+ [0x1246D]={
+ category="nl",
+ description="CUNEIFORM NUMERIC SIGN EIGHT U VARIANT FORM",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1246D,
+ },
+ [0x1246E]={
+ category="nl",
+ description="CUNEIFORM NUMERIC SIGN NINE U VARIANT FORM",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1246E,
+ },
[0x12470]={
category="po",
description="CUNEIFORM PUNCTUATION SIGN OLD ASSYRIAN WORD DIVIDER",
@@ -150559,6 +164275,13 @@ characters.data={
linebreak="ba",
unicodeslot=0x12473,
},
+ [0x12474]={
+ category="po",
+ description="CUNEIFORM PUNCTUATION SIGN DIAGONAL QUADCOLON",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0x12474,
+ },
[0x13000]={
category="lo",
description="EGYPTIAN HIEROGLYPH A001",
@@ -162039,6 +175762,1460 @@ characters.data={
linebreak="al",
unicodeslot=0x16A38,
},
+ [0x16A40]={
+ category="lo",
+ description="MRO LETTER TA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A40,
+ },
+ [0x16A41]={
+ category="lo",
+ description="MRO LETTER NGI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A41,
+ },
+ [0x16A42]={
+ category="lo",
+ description="MRO LETTER YO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A42,
+ },
+ [0x16A43]={
+ category="lo",
+ description="MRO LETTER MIM",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A43,
+ },
+ [0x16A44]={
+ category="lo",
+ description="MRO LETTER BA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A44,
+ },
+ [0x16A45]={
+ category="lo",
+ description="MRO LETTER DA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A45,
+ },
+ [0x16A46]={
+ category="lo",
+ description="MRO LETTER A",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A46,
+ },
+ [0x16A47]={
+ category="lo",
+ description="MRO LETTER PHI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A47,
+ },
+ [0x16A48]={
+ category="lo",
+ description="MRO LETTER KHAI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A48,
+ },
+ [0x16A49]={
+ category="lo",
+ description="MRO LETTER HAO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A49,
+ },
+ [0x16A4A]={
+ category="lo",
+ description="MRO LETTER DAI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A4A,
+ },
+ [0x16A4B]={
+ category="lo",
+ description="MRO LETTER CHU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A4B,
+ },
+ [0x16A4C]={
+ category="lo",
+ description="MRO LETTER KEAAE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A4C,
+ },
+ [0x16A4D]={
+ category="lo",
+ description="MRO LETTER OL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A4D,
+ },
+ [0x16A4E]={
+ category="lo",
+ description="MRO LETTER MAEM",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A4E,
+ },
+ [0x16A4F]={
+ category="lo",
+ description="MRO LETTER NIN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A4F,
+ },
+ [0x16A50]={
+ category="lo",
+ description="MRO LETTER PA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A50,
+ },
+ [0x16A51]={
+ category="lo",
+ description="MRO LETTER OO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A51,
+ },
+ [0x16A52]={
+ category="lo",
+ description="MRO LETTER O",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A52,
+ },
+ [0x16A53]={
+ category="lo",
+ description="MRO LETTER RO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A53,
+ },
+ [0x16A54]={
+ category="lo",
+ description="MRO LETTER SHI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A54,
+ },
+ [0x16A55]={
+ category="lo",
+ description="MRO LETTER THEA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A55,
+ },
+ [0x16A56]={
+ category="lo",
+ description="MRO LETTER EA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A56,
+ },
+ [0x16A57]={
+ category="lo",
+ description="MRO LETTER WA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A57,
+ },
+ [0x16A58]={
+ category="lo",
+ description="MRO LETTER E",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A58,
+ },
+ [0x16A59]={
+ category="lo",
+ description="MRO LETTER KO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A59,
+ },
+ [0x16A5A]={
+ category="lo",
+ description="MRO LETTER LAN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A5A,
+ },
+ [0x16A5B]={
+ category="lo",
+ description="MRO LETTER LA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A5B,
+ },
+ [0x16A5C]={
+ category="lo",
+ description="MRO LETTER HAI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A5C,
+ },
+ [0x16A5D]={
+ category="lo",
+ description="MRO LETTER RI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A5D,
+ },
+ [0x16A5E]={
+ category="lo",
+ description="MRO LETTER TEK",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16A5E,
+ },
+ [0x16A60]={
+ category="nd",
+ description="MRO DIGIT ZERO",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x16A60,
+ },
+ [0x16A61]={
+ category="nd",
+ description="MRO DIGIT ONE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x16A61,
+ },
+ [0x16A62]={
+ category="nd",
+ description="MRO DIGIT TWO",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x16A62,
+ },
+ [0x16A63]={
+ category="nd",
+ description="MRO DIGIT THREE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x16A63,
+ },
+ [0x16A64]={
+ category="nd",
+ description="MRO DIGIT FOUR",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x16A64,
+ },
+ [0x16A65]={
+ category="nd",
+ description="MRO DIGIT FIVE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x16A65,
+ },
+ [0x16A66]={
+ category="nd",
+ description="MRO DIGIT SIX",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x16A66,
+ },
+ [0x16A67]={
+ category="nd",
+ description="MRO DIGIT SEVEN",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x16A67,
+ },
+ [0x16A68]={
+ category="nd",
+ description="MRO DIGIT EIGHT",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x16A68,
+ },
+ [0x16A69]={
+ category="nd",
+ description="MRO DIGIT NINE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x16A69,
+ },
+ [0x16A6E]={
+ category="po",
+ description="MRO DANDA",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0x16A6E,
+ },
+ [0x16A6F]={
+ category="po",
+ description="MRO DOUBLE DANDA",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0x16A6F,
+ },
+ [0x16AD0]={
+ category="lo",
+ description="BASSA VAH LETTER ENNI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16AD0,
+ },
+ [0x16AD1]={
+ category="lo",
+ description="BASSA VAH LETTER KA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16AD1,
+ },
+ [0x16AD2]={
+ category="lo",
+ description="BASSA VAH LETTER SE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16AD2,
+ },
+ [0x16AD3]={
+ category="lo",
+ description="BASSA VAH LETTER FA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16AD3,
+ },
+ [0x16AD4]={
+ category="lo",
+ description="BASSA VAH LETTER MBE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16AD4,
+ },
+ [0x16AD5]={
+ category="lo",
+ description="BASSA VAH LETTER YIE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16AD5,
+ },
+ [0x16AD6]={
+ category="lo",
+ description="BASSA VAH LETTER GAH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16AD6,
+ },
+ [0x16AD7]={
+ category="lo",
+ description="BASSA VAH LETTER DHII",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16AD7,
+ },
+ [0x16AD8]={
+ category="lo",
+ description="BASSA VAH LETTER KPAH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16AD8,
+ },
+ [0x16AD9]={
+ category="lo",
+ description="BASSA VAH LETTER JO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16AD9,
+ },
+ [0x16ADA]={
+ category="lo",
+ description="BASSA VAH LETTER HWAH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16ADA,
+ },
+ [0x16ADB]={
+ category="lo",
+ description="BASSA VAH LETTER WA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16ADB,
+ },
+ [0x16ADC]={
+ category="lo",
+ description="BASSA VAH LETTER ZO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16ADC,
+ },
+ [0x16ADD]={
+ category="lo",
+ description="BASSA VAH LETTER GBU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16ADD,
+ },
+ [0x16ADE]={
+ category="lo",
+ description="BASSA VAH LETTER DO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16ADE,
+ },
+ [0x16ADF]={
+ category="lo",
+ description="BASSA VAH LETTER CE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16ADF,
+ },
+ [0x16AE0]={
+ category="lo",
+ description="BASSA VAH LETTER UWU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16AE0,
+ },
+ [0x16AE1]={
+ category="lo",
+ description="BASSA VAH LETTER TO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16AE1,
+ },
+ [0x16AE2]={
+ category="lo",
+ description="BASSA VAH LETTER BA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16AE2,
+ },
+ [0x16AE3]={
+ category="lo",
+ description="BASSA VAH LETTER VU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16AE3,
+ },
+ [0x16AE4]={
+ category="lo",
+ description="BASSA VAH LETTER YEIN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16AE4,
+ },
+ [0x16AE5]={
+ category="lo",
+ description="BASSA VAH LETTER PA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16AE5,
+ },
+ [0x16AE6]={
+ category="lo",
+ description="BASSA VAH LETTER WADDA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16AE6,
+ },
+ [0x16AE7]={
+ category="lo",
+ description="BASSA VAH LETTER A",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16AE7,
+ },
+ [0x16AE8]={
+ category="lo",
+ description="BASSA VAH LETTER O",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16AE8,
+ },
+ [0x16AE9]={
+ category="lo",
+ description="BASSA VAH LETTER OO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16AE9,
+ },
+ [0x16AEA]={
+ category="lo",
+ description="BASSA VAH LETTER U",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16AEA,
+ },
+ [0x16AEB]={
+ category="lo",
+ description="BASSA VAH LETTER EE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16AEB,
+ },
+ [0x16AEC]={
+ category="lo",
+ description="BASSA VAH LETTER E",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16AEC,
+ },
+ [0x16AED]={
+ category="lo",
+ description="BASSA VAH LETTER I",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16AED,
+ },
+ [0x16AF0]={
+ category="mn",
+ combining=0x1,
+ description="BASSA VAH COMBINING HIGH TONE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x16AF0,
+ },
+ [0x16AF1]={
+ category="mn",
+ combining=0x1,
+ description="BASSA VAH COMBINING LOW TONE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x16AF1,
+ },
+ [0x16AF2]={
+ category="mn",
+ combining=0x1,
+ description="BASSA VAH COMBINING MID TONE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x16AF2,
+ },
+ [0x16AF3]={
+ category="mn",
+ combining=0x1,
+ description="BASSA VAH COMBINING LOW-MID TONE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x16AF3,
+ },
+ [0x16AF4]={
+ category="mn",
+ combining=0x1,
+ description="BASSA VAH COMBINING HIGH-LOW TONE",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x16AF4,
+ },
+ [0x16AF5]={
+ category="po",
+ description="BASSA VAH FULL STOP",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0x16AF5,
+ },
+ [0x16B00]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KEEB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B00,
+ },
+ [0x16B01]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KEEV",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B01,
+ },
+ [0x16B02]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KIB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B02,
+ },
+ [0x16B03]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KIV",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B03,
+ },
+ [0x16B04]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KAUB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B04,
+ },
+ [0x16B05]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KAUV",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B05,
+ },
+ [0x16B06]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KUB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B06,
+ },
+ [0x16B07]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KUV",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B07,
+ },
+ [0x16B08]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KEB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B08,
+ },
+ [0x16B09]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KEV",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B09,
+ },
+ [0x16B0A]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KAIB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B0A,
+ },
+ [0x16B0B]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KAIV",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B0B,
+ },
+ [0x16B0C]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KOOB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B0C,
+ },
+ [0x16B0D]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KOOV",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B0D,
+ },
+ [0x16B0E]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KAWB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B0E,
+ },
+ [0x16B0F]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KAWV",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B0F,
+ },
+ [0x16B10]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KUAB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B10,
+ },
+ [0x16B11]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KUAV",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B11,
+ },
+ [0x16B12]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KOB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B12,
+ },
+ [0x16B13]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KOV",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B13,
+ },
+ [0x16B14]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KIAB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B14,
+ },
+ [0x16B15]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KIAV",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B15,
+ },
+ [0x16B16]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KAB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B16,
+ },
+ [0x16B17]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KAV",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B17,
+ },
+ [0x16B18]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KWB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B18,
+ },
+ [0x16B19]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KWV",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B19,
+ },
+ [0x16B1A]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KAAB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B1A,
+ },
+ [0x16B1B]={
+ category="lo",
+ description="PAHAWH HMONG VOWEL KAAV",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B1B,
+ },
+ [0x16B1C]={
+ category="lo",
+ description="PAHAWH HMONG CONSONANT VAU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B1C,
+ },
+ [0x16B1D]={
+ category="lo",
+ description="PAHAWH HMONG CONSONANT NTSAU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B1D,
+ },
+ [0x16B1E]={
+ category="lo",
+ description="PAHAWH HMONG CONSONANT LAU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B1E,
+ },
+ [0x16B1F]={
+ category="lo",
+ description="PAHAWH HMONG CONSONANT HAU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B1F,
+ },
+ [0x16B20]={
+ category="lo",
+ description="PAHAWH HMONG CONSONANT NLAU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B20,
+ },
+ [0x16B21]={
+ category="lo",
+ description="PAHAWH HMONG CONSONANT RAU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B21,
+ },
+ [0x16B22]={
+ category="lo",
+ description="PAHAWH HMONG CONSONANT NKAU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B22,
+ },
+ [0x16B23]={
+ category="lo",
+ description="PAHAWH HMONG CONSONANT QHAU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B23,
+ },
+ [0x16B24]={
+ category="lo",
+ description="PAHAWH HMONG CONSONANT YAU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B24,
+ },
+ [0x16B25]={
+ category="lo",
+ description="PAHAWH HMONG CONSONANT HLAU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B25,
+ },
+ [0x16B26]={
+ category="lo",
+ description="PAHAWH HMONG CONSONANT MAU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B26,
+ },
+ [0x16B27]={
+ category="lo",
+ description="PAHAWH HMONG CONSONANT CHAU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B27,
+ },
+ [0x16B28]={
+ category="lo",
+ description="PAHAWH HMONG CONSONANT NCHAU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B28,
+ },
+ [0x16B29]={
+ category="lo",
+ description="PAHAWH HMONG CONSONANT HNAU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B29,
+ },
+ [0x16B2A]={
+ category="lo",
+ description="PAHAWH HMONG CONSONANT PLHAU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B2A,
+ },
+ [0x16B2B]={
+ category="lo",
+ description="PAHAWH HMONG CONSONANT NTHAU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B2B,
+ },
+ [0x16B2C]={
+ category="lo",
+ description="PAHAWH HMONG CONSONANT NAU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B2C,
+ },
+ [0x16B2D]={
+ category="lo",
+ description="PAHAWH HMONG CONSONANT AU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B2D,
+ },
+ [0x16B2E]={
+ category="lo",
+ description="PAHAWH HMONG CONSONANT XAU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B2E,
+ },
+ [0x16B2F]={
+ category="lo",
+ description="PAHAWH HMONG CONSONANT CAU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B2F,
+ },
+ [0x16B30]={
+ category="mn",
+ combining=0xE6,
+ description="PAHAWH HMONG MARK CIM TUB",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x16B30,
+ },
+ [0x16B31]={
+ category="mn",
+ combining=0xE6,
+ description="PAHAWH HMONG MARK CIM SO",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x16B31,
+ },
+ [0x16B32]={
+ category="mn",
+ combining=0xE6,
+ description="PAHAWH HMONG MARK CIM KES",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x16B32,
+ },
+ [0x16B33]={
+ category="mn",
+ combining=0xE6,
+ description="PAHAWH HMONG MARK CIM KHAV",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x16B33,
+ },
+ [0x16B34]={
+ category="mn",
+ combining=0xE6,
+ description="PAHAWH HMONG MARK CIM SUAM",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x16B34,
+ },
+ [0x16B35]={
+ category="mn",
+ combining=0xE6,
+ description="PAHAWH HMONG MARK CIM HOM",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x16B35,
+ },
+ [0x16B36]={
+ category="mn",
+ combining=0xE6,
+ description="PAHAWH HMONG MARK CIM TAUM",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x16B36,
+ },
+ [0x16B37]={
+ category="po",
+ description="PAHAWH HMONG SIGN VOS THOM",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0x16B37,
+ },
+ [0x16B38]={
+ category="po",
+ description="PAHAWH HMONG SIGN VOS TSHAB CEEB",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0x16B38,
+ },
+ [0x16B39]={
+ category="po",
+ description="PAHAWH HMONG SIGN CIM CHEEM",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0x16B39,
+ },
+ [0x16B3A]={
+ category="po",
+ description="PAHAWH HMONG SIGN VOS THIAB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B3A,
+ },
+ [0x16B3B]={
+ category="po",
+ description="PAHAWH HMONG SIGN VOS FEEM",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B3B,
+ },
+ [0x16B3C]={
+ category="so",
+ description="PAHAWH HMONG SIGN XYEEM NTXIV",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B3C,
+ },
+ [0x16B3D]={
+ category="so",
+ description="PAHAWH HMONG SIGN XYEEM RHO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B3D,
+ },
+ [0x16B3E]={
+ category="so",
+ description="PAHAWH HMONG SIGN XYEEM TOV",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B3E,
+ },
+ [0x16B3F]={
+ category="so",
+ description="PAHAWH HMONG SIGN XYEEM FAIB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B3F,
+ },
+ [0x16B40]={
+ category="lm",
+ description="PAHAWH HMONG SIGN VOS SEEV",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B40,
+ },
+ [0x16B41]={
+ category="lm",
+ description="PAHAWH HMONG SIGN MEEJ SUAB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B41,
+ },
+ [0x16B42]={
+ category="lm",
+ description="PAHAWH HMONG SIGN VOS NRUA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B42,
+ },
+ [0x16B43]={
+ category="lm",
+ description="PAHAWH HMONG SIGN IB YAM",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B43,
+ },
+ [0x16B44]={
+ category="po",
+ description="PAHAWH HMONG SIGN XAUS",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0x16B44,
+ },
+ [0x16B45]={
+ category="so",
+ description="PAHAWH HMONG SIGN CIM TSOV ROG",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B45,
+ },
+ [0x16B50]={
+ category="nd",
+ description="PAHAWH HMONG DIGIT ZERO",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x16B50,
+ },
+ [0x16B51]={
+ category="nd",
+ description="PAHAWH HMONG DIGIT ONE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x16B51,
+ },
+ [0x16B52]={
+ category="nd",
+ description="PAHAWH HMONG DIGIT TWO",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x16B52,
+ },
+ [0x16B53]={
+ category="nd",
+ description="PAHAWH HMONG DIGIT THREE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x16B53,
+ },
+ [0x16B54]={
+ category="nd",
+ description="PAHAWH HMONG DIGIT FOUR",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x16B54,
+ },
+ [0x16B55]={
+ category="nd",
+ description="PAHAWH HMONG DIGIT FIVE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x16B55,
+ },
+ [0x16B56]={
+ category="nd",
+ description="PAHAWH HMONG DIGIT SIX",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x16B56,
+ },
+ [0x16B57]={
+ category="nd",
+ description="PAHAWH HMONG DIGIT SEVEN",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x16B57,
+ },
+ [0x16B58]={
+ category="nd",
+ description="PAHAWH HMONG DIGIT EIGHT",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x16B58,
+ },
+ [0x16B59]={
+ category="nd",
+ description="PAHAWH HMONG DIGIT NINE",
+ direction="l",
+ linebreak="nu",
+ unicodeslot=0x16B59,
+ },
+ [0x16B5B]={
+ category="no",
+ description="PAHAWH HMONG NUMBER TENS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B5B,
+ },
+ [0x16B5C]={
+ category="no",
+ description="PAHAWH HMONG NUMBER HUNDREDS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B5C,
+ },
+ [0x16B5D]={
+ category="no",
+ description="PAHAWH HMONG NUMBER TEN THOUSANDS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B5D,
+ },
+ [0x16B5E]={
+ category="no",
+ description="PAHAWH HMONG NUMBER MILLIONS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B5E,
+ },
+ [0x16B5F]={
+ category="no",
+ description="PAHAWH HMONG NUMBER HUNDRED MILLIONS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B5F,
+ },
+ [0x16B60]={
+ category="no",
+ description="PAHAWH HMONG NUMBER TEN BILLIONS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B60,
+ },
+ [0x16B61]={
+ category="no",
+ description="PAHAWH HMONG NUMBER TRILLIONS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B61,
+ },
+ [0x16B63]={
+ category="lo",
+ description="PAHAWH HMONG SIGN VOS LUB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B63,
+ },
+ [0x16B64]={
+ category="lo",
+ description="PAHAWH HMONG SIGN XYOO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B64,
+ },
+ [0x16B65]={
+ category="lo",
+ description="PAHAWH HMONG SIGN HLI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B65,
+ },
+ [0x16B66]={
+ category="lo",
+ description="PAHAWH HMONG SIGN THIRD-STAGE HLI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B66,
+ },
+ [0x16B67]={
+ category="lo",
+ description="PAHAWH HMONG SIGN ZWJ THAJ",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B67,
+ },
+ [0x16B68]={
+ category="lo",
+ description="PAHAWH HMONG SIGN HNUB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B68,
+ },
+ [0x16B69]={
+ category="lo",
+ description="PAHAWH HMONG SIGN NQIG",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B69,
+ },
+ [0x16B6A]={
+ category="lo",
+ description="PAHAWH HMONG SIGN XIAB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B6A,
+ },
+ [0x16B6B]={
+ category="lo",
+ description="PAHAWH HMONG SIGN NTUJ",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B6B,
+ },
+ [0x16B6C]={
+ category="lo",
+ description="PAHAWH HMONG SIGN AV",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B6C,
+ },
+ [0x16B6D]={
+ category="lo",
+ description="PAHAWH HMONG SIGN TXHEEJ CEEV",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B6D,
+ },
+ [0x16B6E]={
+ category="lo",
+ description="PAHAWH HMONG SIGN MEEJ TSEEB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B6E,
+ },
+ [0x16B6F]={
+ category="lo",
+ description="PAHAWH HMONG SIGN TAU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B6F,
+ },
+ [0x16B70]={
+ category="lo",
+ description="PAHAWH HMONG SIGN LOS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B70,
+ },
+ [0x16B71]={
+ category="lo",
+ description="PAHAWH HMONG SIGN MUS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B71,
+ },
+ [0x16B72]={
+ category="lo",
+ description="PAHAWH HMONG SIGN CIM HAIS LUS NTOG NTOG",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B72,
+ },
+ [0x16B73]={
+ category="lo",
+ description="PAHAWH HMONG SIGN CIM CUAM TSHOOJ",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B73,
+ },
+ [0x16B74]={
+ category="lo",
+ description="PAHAWH HMONG SIGN CIM TXWV",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B74,
+ },
+ [0x16B75]={
+ category="lo",
+ description="PAHAWH HMONG SIGN CIM TXWV CHWV",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B75,
+ },
+ [0x16B76]={
+ category="lo",
+ description="PAHAWH HMONG SIGN CIM PUB DAWB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B76,
+ },
+ [0x16B77]={
+ category="lo",
+ description="PAHAWH HMONG SIGN CIM NRES TOS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B77,
+ },
+ [0x16B7D]={
+ category="lo",
+ description="PAHAWH HMONG CLAN SIGN TSHEEJ",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B7D,
+ },
+ [0x16B7E]={
+ category="lo",
+ description="PAHAWH HMONG CLAN SIGN YEEG",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B7E,
+ },
+ [0x16B7F]={
+ category="lo",
+ description="PAHAWH HMONG CLAN SIGN LIS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B7F,
+ },
+ [0x16B80]={
+ category="lo",
+ description="PAHAWH HMONG CLAN SIGN LAUJ",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B80,
+ },
+ [0x16B81]={
+ category="lo",
+ description="PAHAWH HMONG CLAN SIGN XYOOJ",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B81,
+ },
+ [0x16B82]={
+ category="lo",
+ description="PAHAWH HMONG CLAN SIGN KOO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B82,
+ },
+ [0x16B83]={
+ category="lo",
+ description="PAHAWH HMONG CLAN SIGN HAWJ",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B83,
+ },
+ [0x16B84]={
+ category="lo",
+ description="PAHAWH HMONG CLAN SIGN MUAS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B84,
+ },
+ [0x16B85]={
+ category="lo",
+ description="PAHAWH HMONG CLAN SIGN THOJ",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B85,
+ },
+ [0x16B86]={
+ category="lo",
+ description="PAHAWH HMONG CLAN SIGN TSAB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B86,
+ },
+ [0x16B87]={
+ category="lo",
+ description="PAHAWH HMONG CLAN SIGN PHAB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B87,
+ },
+ [0x16B88]={
+ category="lo",
+ description="PAHAWH HMONG CLAN SIGN KHAB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B88,
+ },
+ [0x16B89]={
+ category="lo",
+ description="PAHAWH HMONG CLAN SIGN HAM",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B89,
+ },
+ [0x16B8A]={
+ category="lo",
+ description="PAHAWH HMONG CLAN SIGN VAJ",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B8A,
+ },
+ [0x16B8B]={
+ category="lo",
+ description="PAHAWH HMONG CLAN SIGN FAJ",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B8B,
+ },
+ [0x16B8C]={
+ category="lo",
+ description="PAHAWH HMONG CLAN SIGN YAJ",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B8C,
+ },
+ [0x16B8D]={
+ category="lo",
+ description="PAHAWH HMONG CLAN SIGN TSWB",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B8D,
+ },
+ [0x16B8E]={
+ category="lo",
+ description="PAHAWH HMONG CLAN SIGN KWM",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B8E,
+ },
+ [0x16B8F]={
+ category="lo",
+ description="PAHAWH HMONG CLAN SIGN VWJ",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x16B8F,
+ },
[0x16F00]={
category="lo",
description="MIAO LETTER PA",
@@ -162986,6 +178163,1036 @@ characters.data={
linebreak="id",
unicodeslot=0x1B001,
},
+ [0x1BC00]={
+ category="lo",
+ description="DUPLOYAN LETTER H",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC00,
+ },
+ [0x1BC01]={
+ category="lo",
+ description="DUPLOYAN LETTER X",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC01,
+ },
+ [0x1BC02]={
+ category="lo",
+ description="DUPLOYAN LETTER P",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC02,
+ },
+ [0x1BC03]={
+ category="lo",
+ description="DUPLOYAN LETTER T",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC03,
+ },
+ [0x1BC04]={
+ category="lo",
+ description="DUPLOYAN LETTER F",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC04,
+ },
+ [0x1BC05]={
+ category="lo",
+ description="DUPLOYAN LETTER K",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC05,
+ },
+ [0x1BC06]={
+ category="lo",
+ description="DUPLOYAN LETTER L",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC06,
+ },
+ [0x1BC07]={
+ category="lo",
+ description="DUPLOYAN LETTER B",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC07,
+ },
+ [0x1BC08]={
+ category="lo",
+ description="DUPLOYAN LETTER D",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC08,
+ },
+ [0x1BC09]={
+ category="lo",
+ description="DUPLOYAN LETTER V",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC09,
+ },
+ [0x1BC0A]={
+ category="lo",
+ description="DUPLOYAN LETTER G",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC0A,
+ },
+ [0x1BC0B]={
+ category="lo",
+ description="DUPLOYAN LETTER R",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC0B,
+ },
+ [0x1BC0C]={
+ category="lo",
+ description="DUPLOYAN LETTER P N",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC0C,
+ },
+ [0x1BC0D]={
+ category="lo",
+ description="DUPLOYAN LETTER D S",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC0D,
+ },
+ [0x1BC0E]={
+ category="lo",
+ description="DUPLOYAN LETTER F N",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC0E,
+ },
+ [0x1BC0F]={
+ category="lo",
+ description="DUPLOYAN LETTER K M",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC0F,
+ },
+ [0x1BC10]={
+ category="lo",
+ description="DUPLOYAN LETTER R S",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC10,
+ },
+ [0x1BC11]={
+ category="lo",
+ description="DUPLOYAN LETTER TH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC11,
+ },
+ [0x1BC12]={
+ category="lo",
+ description="DUPLOYAN LETTER SLOAN DH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC12,
+ },
+ [0x1BC13]={
+ category="lo",
+ description="DUPLOYAN LETTER DH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC13,
+ },
+ [0x1BC14]={
+ category="lo",
+ description="DUPLOYAN LETTER KK",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC14,
+ },
+ [0x1BC15]={
+ category="lo",
+ description="DUPLOYAN LETTER SLOAN J",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC15,
+ },
+ [0x1BC16]={
+ category="lo",
+ description="DUPLOYAN LETTER HL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC16,
+ },
+ [0x1BC17]={
+ category="lo",
+ description="DUPLOYAN LETTER LH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC17,
+ },
+ [0x1BC18]={
+ category="lo",
+ description="DUPLOYAN LETTER RH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC18,
+ },
+ [0x1BC19]={
+ category="lo",
+ description="DUPLOYAN LETTER M",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC19,
+ },
+ [0x1BC1A]={
+ category="lo",
+ description="DUPLOYAN LETTER N",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC1A,
+ },
+ [0x1BC1B]={
+ category="lo",
+ description="DUPLOYAN LETTER J",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC1B,
+ },
+ [0x1BC1C]={
+ category="lo",
+ description="DUPLOYAN LETTER S",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC1C,
+ },
+ [0x1BC1D]={
+ category="lo",
+ description="DUPLOYAN LETTER M N",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC1D,
+ },
+ [0x1BC1E]={
+ category="lo",
+ description="DUPLOYAN LETTER N M",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC1E,
+ },
+ [0x1BC1F]={
+ category="lo",
+ description="DUPLOYAN LETTER J M",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC1F,
+ },
+ [0x1BC20]={
+ category="lo",
+ description="DUPLOYAN LETTER S J",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC20,
+ },
+ [0x1BC21]={
+ category="lo",
+ description="DUPLOYAN LETTER M WITH DOT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC21,
+ },
+ [0x1BC22]={
+ category="lo",
+ description="DUPLOYAN LETTER N WITH DOT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC22,
+ },
+ [0x1BC23]={
+ category="lo",
+ description="DUPLOYAN LETTER J WITH DOT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC23,
+ },
+ [0x1BC24]={
+ category="lo",
+ description="DUPLOYAN LETTER J WITH DOTS INSIDE AND ABOVE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC24,
+ },
+ [0x1BC25]={
+ category="lo",
+ description="DUPLOYAN LETTER S WITH DOT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC25,
+ },
+ [0x1BC26]={
+ category="lo",
+ description="DUPLOYAN LETTER S WITH DOT BELOW",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC26,
+ },
+ [0x1BC27]={
+ category="lo",
+ description="DUPLOYAN LETTER M S",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC27,
+ },
+ [0x1BC28]={
+ category="lo",
+ description="DUPLOYAN LETTER N S",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC28,
+ },
+ [0x1BC29]={
+ category="lo",
+ description="DUPLOYAN LETTER J S",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC29,
+ },
+ [0x1BC2A]={
+ category="lo",
+ description="DUPLOYAN LETTER S S",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC2A,
+ },
+ [0x1BC2B]={
+ category="lo",
+ description="DUPLOYAN LETTER M N S",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC2B,
+ },
+ [0x1BC2C]={
+ category="lo",
+ description="DUPLOYAN LETTER N M S",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC2C,
+ },
+ [0x1BC2D]={
+ category="lo",
+ description="DUPLOYAN LETTER J M S",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC2D,
+ },
+ [0x1BC2E]={
+ category="lo",
+ description="DUPLOYAN LETTER S J S",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC2E,
+ },
+ [0x1BC2F]={
+ category="lo",
+ description="DUPLOYAN LETTER J S WITH DOT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC2F,
+ },
+ [0x1BC30]={
+ category="lo",
+ description="DUPLOYAN LETTER J N",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC30,
+ },
+ [0x1BC31]={
+ category="lo",
+ description="DUPLOYAN LETTER J N S",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC31,
+ },
+ [0x1BC32]={
+ category="lo",
+ description="DUPLOYAN LETTER S T",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC32,
+ },
+ [0x1BC33]={
+ category="lo",
+ description="DUPLOYAN LETTER S T R",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC33,
+ },
+ [0x1BC34]={
+ category="lo",
+ description="DUPLOYAN LETTER S P",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC34,
+ },
+ [0x1BC35]={
+ category="lo",
+ description="DUPLOYAN LETTER S P R",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC35,
+ },
+ [0x1BC36]={
+ category="lo",
+ description="DUPLOYAN LETTER T S",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC36,
+ },
+ [0x1BC37]={
+ category="lo",
+ description="DUPLOYAN LETTER T R S",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC37,
+ },
+ [0x1BC38]={
+ category="lo",
+ description="DUPLOYAN LETTER W",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC38,
+ },
+ [0x1BC39]={
+ category="lo",
+ description="DUPLOYAN LETTER WH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC39,
+ },
+ [0x1BC3A]={
+ category="lo",
+ description="DUPLOYAN LETTER W R",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC3A,
+ },
+ [0x1BC3B]={
+ category="lo",
+ description="DUPLOYAN LETTER S N",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC3B,
+ },
+ [0x1BC3C]={
+ category="lo",
+ description="DUPLOYAN LETTER S M",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC3C,
+ },
+ [0x1BC3D]={
+ category="lo",
+ description="DUPLOYAN LETTER K R S",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC3D,
+ },
+ [0x1BC3E]={
+ category="lo",
+ description="DUPLOYAN LETTER G R S",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC3E,
+ },
+ [0x1BC3F]={
+ category="lo",
+ description="DUPLOYAN LETTER S K",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC3F,
+ },
+ [0x1BC40]={
+ category="lo",
+ description="DUPLOYAN LETTER S K R",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC40,
+ },
+ [0x1BC41]={
+ category="lo",
+ description="DUPLOYAN LETTER A",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC41,
+ },
+ [0x1BC42]={
+ category="lo",
+ description="DUPLOYAN LETTER SLOAN OW",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC42,
+ },
+ [0x1BC43]={
+ category="lo",
+ description="DUPLOYAN LETTER OA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC43,
+ },
+ [0x1BC44]={
+ category="lo",
+ description="DUPLOYAN LETTER O",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC44,
+ },
+ [0x1BC45]={
+ category="lo",
+ description="DUPLOYAN LETTER AOU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC45,
+ },
+ [0x1BC46]={
+ category="lo",
+ description="DUPLOYAN LETTER I",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC46,
+ },
+ [0x1BC47]={
+ category="lo",
+ description="DUPLOYAN LETTER E",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC47,
+ },
+ [0x1BC48]={
+ category="lo",
+ description="DUPLOYAN LETTER IE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC48,
+ },
+ [0x1BC49]={
+ category="lo",
+ description="DUPLOYAN LETTER SHORT I",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC49,
+ },
+ [0x1BC4A]={
+ category="lo",
+ description="DUPLOYAN LETTER UI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC4A,
+ },
+ [0x1BC4B]={
+ category="lo",
+ description="DUPLOYAN LETTER EE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC4B,
+ },
+ [0x1BC4C]={
+ category="lo",
+ description="DUPLOYAN LETTER SLOAN EH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC4C,
+ },
+ [0x1BC4D]={
+ category="lo",
+ description="DUPLOYAN LETTER ROMANIAN I",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC4D,
+ },
+ [0x1BC4E]={
+ category="lo",
+ description="DUPLOYAN LETTER SLOAN EE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC4E,
+ },
+ [0x1BC4F]={
+ category="lo",
+ description="DUPLOYAN LETTER LONG I",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC4F,
+ },
+ [0x1BC50]={
+ category="lo",
+ description="DUPLOYAN LETTER YE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC50,
+ },
+ [0x1BC51]={
+ category="lo",
+ description="DUPLOYAN LETTER U",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC51,
+ },
+ [0x1BC52]={
+ category="lo",
+ description="DUPLOYAN LETTER EU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC52,
+ },
+ [0x1BC53]={
+ category="lo",
+ description="DUPLOYAN LETTER XW",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC53,
+ },
+ [0x1BC54]={
+ category="lo",
+ description="DUPLOYAN LETTER U N",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC54,
+ },
+ [0x1BC55]={
+ category="lo",
+ description="DUPLOYAN LETTER LONG U",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC55,
+ },
+ [0x1BC56]={
+ category="lo",
+ description="DUPLOYAN LETTER ROMANIAN U",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC56,
+ },
+ [0x1BC57]={
+ category="lo",
+ description="DUPLOYAN LETTER UH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC57,
+ },
+ [0x1BC58]={
+ category="lo",
+ description="DUPLOYAN LETTER SLOAN U",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC58,
+ },
+ [0x1BC59]={
+ category="lo",
+ description="DUPLOYAN LETTER OOH",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC59,
+ },
+ [0x1BC5A]={
+ category="lo",
+ description="DUPLOYAN LETTER OW",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC5A,
+ },
+ [0x1BC5B]={
+ category="lo",
+ description="DUPLOYAN LETTER OU",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC5B,
+ },
+ [0x1BC5C]={
+ category="lo",
+ description="DUPLOYAN LETTER WA",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC5C,
+ },
+ [0x1BC5D]={
+ category="lo",
+ description="DUPLOYAN LETTER WO",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC5D,
+ },
+ [0x1BC5E]={
+ category="lo",
+ description="DUPLOYAN LETTER WI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC5E,
+ },
+ [0x1BC5F]={
+ category="lo",
+ description="DUPLOYAN LETTER WEI",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC5F,
+ },
+ [0x1BC60]={
+ category="lo",
+ description="DUPLOYAN LETTER WOW",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC60,
+ },
+ [0x1BC61]={
+ category="lo",
+ description="DUPLOYAN LETTER NASAL U",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC61,
+ },
+ [0x1BC62]={
+ category="lo",
+ description="DUPLOYAN LETTER NASAL O",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC62,
+ },
+ [0x1BC63]={
+ category="lo",
+ description="DUPLOYAN LETTER NASAL I",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC63,
+ },
+ [0x1BC64]={
+ category="lo",
+ description="DUPLOYAN LETTER NASAL A",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC64,
+ },
+ [0x1BC65]={
+ category="lo",
+ description="DUPLOYAN LETTER PERNIN AN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC65,
+ },
+ [0x1BC66]={
+ category="lo",
+ description="DUPLOYAN LETTER PERNIN AM",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC66,
+ },
+ [0x1BC67]={
+ category="lo",
+ description="DUPLOYAN LETTER SLOAN EN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC67,
+ },
+ [0x1BC68]={
+ category="lo",
+ description="DUPLOYAN LETTER SLOAN AN",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC68,
+ },
+ [0x1BC69]={
+ category="lo",
+ description="DUPLOYAN LETTER SLOAN ON",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC69,
+ },
+ [0x1BC6A]={
+ category="lo",
+ description="DUPLOYAN LETTER VOCALIC M",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC6A,
+ },
+ [0x1BC70]={
+ category="lo",
+ description="DUPLOYAN AFFIX LEFT HORIZONTAL SECANT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC70,
+ },
+ [0x1BC71]={
+ category="lo",
+ description="DUPLOYAN AFFIX MID HORIZONTAL SECANT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC71,
+ },
+ [0x1BC72]={
+ category="lo",
+ description="DUPLOYAN AFFIX RIGHT HORIZONTAL SECANT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC72,
+ },
+ [0x1BC73]={
+ category="lo",
+ description="DUPLOYAN AFFIX LOW VERTICAL SECANT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC73,
+ },
+ [0x1BC74]={
+ category="lo",
+ description="DUPLOYAN AFFIX MID VERTICAL SECANT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC74,
+ },
+ [0x1BC75]={
+ category="lo",
+ description="DUPLOYAN AFFIX HIGH VERTICAL SECANT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC75,
+ },
+ [0x1BC76]={
+ category="lo",
+ description="DUPLOYAN AFFIX ATTACHED SECANT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC76,
+ },
+ [0x1BC77]={
+ category="lo",
+ description="DUPLOYAN AFFIX ATTACHED LEFT-TO-RIGHT SECANT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC77,
+ },
+ [0x1BC78]={
+ category="lo",
+ description="DUPLOYAN AFFIX ATTACHED TANGENT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC78,
+ },
+ [0x1BC79]={
+ category="lo",
+ description="DUPLOYAN AFFIX ATTACHED TAIL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC79,
+ },
+ [0x1BC7A]={
+ category="lo",
+ description="DUPLOYAN AFFIX ATTACHED E HOOK",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC7A,
+ },
+ [0x1BC7B]={
+ category="lo",
+ description="DUPLOYAN AFFIX ATTACHED I HOOK",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC7B,
+ },
+ [0x1BC7C]={
+ category="lo",
+ description="DUPLOYAN AFFIX ATTACHED TANGENT HOOK",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC7C,
+ },
+ [0x1BC80]={
+ category="lo",
+ description="DUPLOYAN AFFIX HIGH ACUTE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC80,
+ },
+ [0x1BC81]={
+ category="lo",
+ description="DUPLOYAN AFFIX HIGH TIGHT ACUTE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC81,
+ },
+ [0x1BC82]={
+ category="lo",
+ description="DUPLOYAN AFFIX HIGH GRAVE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC82,
+ },
+ [0x1BC83]={
+ category="lo",
+ description="DUPLOYAN AFFIX HIGH LONG GRAVE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC83,
+ },
+ [0x1BC84]={
+ category="lo",
+ description="DUPLOYAN AFFIX HIGH DOT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC84,
+ },
+ [0x1BC85]={
+ category="lo",
+ description="DUPLOYAN AFFIX HIGH CIRCLE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC85,
+ },
+ [0x1BC86]={
+ category="lo",
+ description="DUPLOYAN AFFIX HIGH LINE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC86,
+ },
+ [0x1BC87]={
+ category="lo",
+ description="DUPLOYAN AFFIX HIGH WAVE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC87,
+ },
+ [0x1BC88]={
+ category="lo",
+ description="DUPLOYAN AFFIX HIGH VERTICAL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC88,
+ },
+ [0x1BC90]={
+ category="lo",
+ description="DUPLOYAN AFFIX LOW ACUTE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC90,
+ },
+ [0x1BC91]={
+ category="lo",
+ description="DUPLOYAN AFFIX LOW TIGHT ACUTE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC91,
+ },
+ [0x1BC92]={
+ category="lo",
+ description="DUPLOYAN AFFIX LOW GRAVE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC92,
+ },
+ [0x1BC93]={
+ category="lo",
+ description="DUPLOYAN AFFIX LOW LONG GRAVE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC93,
+ },
+ [0x1BC94]={
+ category="lo",
+ description="DUPLOYAN AFFIX LOW DOT",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC94,
+ },
+ [0x1BC95]={
+ category="lo",
+ description="DUPLOYAN AFFIX LOW CIRCLE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC95,
+ },
+ [0x1BC96]={
+ category="lo",
+ description="DUPLOYAN AFFIX LOW LINE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC96,
+ },
+ [0x1BC97]={
+ category="lo",
+ description="DUPLOYAN AFFIX LOW WAVE",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC97,
+ },
+ [0x1BC98]={
+ category="lo",
+ description="DUPLOYAN AFFIX LOW VERTICAL",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC98,
+ },
+ [0x1BC99]={
+ category="lo",
+ description="DUPLOYAN AFFIX LOW ARROW",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC99,
+ },
+ [0x1BC9C]={
+ category="so",
+ description="DUPLOYAN SIGN O WITH CROSS",
+ direction="l",
+ linebreak="al",
+ unicodeslot=0x1BC9C,
+ },
+ [0x1BC9D]={
+ category="mn",
+ description="DUPLOYAN THICK LETTER SELECTOR",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1BC9D,
+ },
+ [0x1BC9E]={
+ category="mn",
+ combining=0x1,
+ description="DUPLOYAN DOUBLE MARK",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1BC9E,
+ },
+ [0x1BC9F]={
+ category="po",
+ description="DUPLOYAN PUNCTUATION CHINOOK FULL STOP",
+ direction="l",
+ linebreak="ba",
+ unicodeslot=0x1BC9F,
+ },
+ [0x1BCA0]={
+ category="cf",
+ description="SHORTHAND FORMAT LETTER OVERLAP",
+ direction="bn",
+ linebreak="cm",
+ unicodeslot=0x1BCA0,
+ },
+ [0x1BCA1]={
+ category="cf",
+ description="SHORTHAND FORMAT CONTINUING OVERLAP",
+ direction="bn",
+ linebreak="cm",
+ unicodeslot=0x1BCA1,
+ },
+ [0x1BCA2]={
+ category="cf",
+ description="SHORTHAND FORMAT DOWN STEP",
+ direction="bn",
+ linebreak="cm",
+ unicodeslot=0x1BCA2,
+ },
+ [0x1BCA3]={
+ category="cf",
+ description="SHORTHAND FORMAT UP STEP",
+ direction="bn",
+ linebreak="cm",
+ unicodeslot=0x1BCA3,
+ },
[0x1D000]={
category="so",
description="BYZANTINE MUSICAL SYMBOL PSILI",
@@ -165410,6 +181617,7 @@ characters.data={
},
[0x1D165]={
category="mc",
+ combining=0xD8,
description="MUSICAL SYMBOL COMBINING STEM",
direction="l",
linebreak="cm",
@@ -165417,6 +181625,7 @@ characters.data={
},
[0x1D166]={
category="mc",
+ combining=0xD8,
description="MUSICAL SYMBOL COMBINING SPRECHGESANG STEM",
direction="l",
linebreak="cm",
@@ -165424,6 +181633,7 @@ characters.data={
},
[0x1D167]={
category="mn",
+ combining=0x1,
description="MUSICAL SYMBOL COMBINING TREMOLO-0x0001",
direction="nsm",
linebreak="cm",
@@ -165431,6 +181641,7 @@ characters.data={
},
[0x1D168]={
category="mn",
+ combining=0x1,
description="MUSICAL SYMBOL COMBINING TREMOLO-0x0002",
direction="nsm",
linebreak="cm",
@@ -165438,6 +181649,7 @@ characters.data={
},
[0x1D169]={
category="mn",
+ combining=0x1,
description="MUSICAL SYMBOL COMBINING TREMOLO-0x0003",
direction="nsm",
linebreak="cm",
@@ -165466,6 +181678,7 @@ characters.data={
},
[0x1D16D]={
category="mc",
+ combining=0xE2,
description="MUSICAL SYMBOL COMBINING AUGMENTATION DOT",
direction="l",
linebreak="cm",
@@ -165473,6 +181686,7 @@ characters.data={
},
[0x1D16E]={
category="mc",
+ combining=0xD8,
description="MUSICAL SYMBOL COMBINING FLAG-0x0001",
direction="l",
linebreak="cm",
@@ -165480,6 +181694,7 @@ characters.data={
},
[0x1D16F]={
category="mc",
+ combining=0xD8,
description="MUSICAL SYMBOL COMBINING FLAG-0x0002",
direction="l",
linebreak="cm",
@@ -165487,6 +181702,7 @@ characters.data={
},
[0x1D170]={
category="mc",
+ combining=0xD8,
description="MUSICAL SYMBOL COMBINING FLAG-0x0003",
direction="l",
linebreak="cm",
@@ -165494,6 +181710,7 @@ characters.data={
},
[0x1D171]={
category="mc",
+ combining=0xD8,
description="MUSICAL SYMBOL COMBINING FLAG-0x0004",
direction="l",
linebreak="cm",
@@ -165501,6 +181718,7 @@ characters.data={
},
[0x1D172]={
category="mc",
+ combining=0xD8,
description="MUSICAL SYMBOL COMBINING FLAG-0x0005",
direction="l",
linebreak="cm",
@@ -165564,6 +181782,7 @@ characters.data={
},
[0x1D17B]={
category="mn",
+ combining=0xDC,
description="MUSICAL SYMBOL COMBINING ACCENT",
direction="nsm",
linebreak="cm",
@@ -165571,6 +181790,7 @@ characters.data={
},
[0x1D17C]={
category="mn",
+ combining=0xDC,
description="MUSICAL SYMBOL COMBINING STACCATO",
direction="nsm",
linebreak="cm",
@@ -165578,6 +181798,7 @@ characters.data={
},
[0x1D17D]={
category="mn",
+ combining=0xDC,
description="MUSICAL SYMBOL COMBINING TENUTO",
direction="nsm",
linebreak="cm",
@@ -165585,6 +181806,7 @@ characters.data={
},
[0x1D17E]={
category="mn",
+ combining=0xDC,
description="MUSICAL SYMBOL COMBINING STACCATISSIMO",
direction="nsm",
linebreak="cm",
@@ -165592,6 +181814,7 @@ characters.data={
},
[0x1D17F]={
category="mn",
+ combining=0xDC,
description="MUSICAL SYMBOL COMBINING MARCATO",
direction="nsm",
linebreak="cm",
@@ -165599,6 +181822,7 @@ characters.data={
},
[0x1D180]={
category="mn",
+ combining=0xDC,
description="MUSICAL SYMBOL COMBINING MARCATO-STACCATO",
direction="nsm",
linebreak="cm",
@@ -165606,6 +181830,7 @@ characters.data={
},
[0x1D181]={
category="mn",
+ combining=0xDC,
description="MUSICAL SYMBOL COMBINING ACCENT-STACCATO",
direction="nsm",
linebreak="cm",
@@ -165613,6 +181838,7 @@ characters.data={
},
[0x1D182]={
category="mn",
+ combining=0xDC,
description="MUSICAL SYMBOL COMBINING LOURE",
direction="nsm",
linebreak="cm",
@@ -165634,6 +181860,7 @@ characters.data={
},
[0x1D185]={
category="mn",
+ combining=0xE6,
description="MUSICAL SYMBOL COMBINING DOIT",
direction="nsm",
linebreak="cm",
@@ -165641,6 +181868,7 @@ characters.data={
},
[0x1D186]={
category="mn",
+ combining=0xE6,
description="MUSICAL SYMBOL COMBINING RIP",
direction="nsm",
linebreak="cm",
@@ -165648,6 +181876,7 @@ characters.data={
},
[0x1D187]={
category="mn",
+ combining=0xE6,
description="MUSICAL SYMBOL COMBINING FLIP",
direction="nsm",
linebreak="cm",
@@ -165655,6 +181884,7 @@ characters.data={
},
[0x1D188]={
category="mn",
+ combining=0xE6,
description="MUSICAL SYMBOL COMBINING SMEAR",
direction="nsm",
linebreak="cm",
@@ -165662,6 +181892,7 @@ characters.data={
},
[0x1D189]={
category="mn",
+ combining=0xE6,
description="MUSICAL SYMBOL COMBINING BEND",
direction="nsm",
linebreak="cm",
@@ -165669,6 +181900,7 @@ characters.data={
},
[0x1D18A]={
category="mn",
+ combining=0xDC,
description="MUSICAL SYMBOL COMBINING DOUBLE TONGUE",
direction="nsm",
linebreak="cm",
@@ -165676,6 +181908,7 @@ characters.data={
},
[0x1D18B]={
category="mn",
+ combining=0xDC,
description="MUSICAL SYMBOL COMBINING TRIPLE TONGUE",
direction="nsm",
linebreak="cm",
@@ -165893,6 +182126,7 @@ characters.data={
},
[0x1D1AA]={
category="mn",
+ combining=0xE6,
description="MUSICAL SYMBOL COMBINING DOWN BOW",
direction="nsm",
linebreak="cm",
@@ -165900,6 +182134,7 @@ characters.data={
},
[0x1D1AB]={
category="mn",
+ combining=0xE6,
description="MUSICAL SYMBOL COMBINING UP BOW",
direction="nsm",
linebreak="cm",
@@ -165907,6 +182142,7 @@ characters.data={
},
[0x1D1AC]={
category="mn",
+ combining=0xE6,
description="MUSICAL SYMBOL COMBINING HARMONIC",
direction="nsm",
linebreak="cm",
@@ -165914,6 +182150,7 @@ characters.data={
},
[0x1D1AD]={
category="mn",
+ combining=0xE6,
description="MUSICAL SYMBOL COMBINING SNAP PIZZICATO",
direction="nsm",
linebreak="cm",
@@ -166725,6 +182962,7 @@ characters.data={
},
[0x1D242]={
category="mn",
+ combining=0xE6,
description="COMBINING GREEK MUSICAL TRISEME",
direction="nsm",
linebreak="cm",
@@ -166732,6 +182970,7 @@ characters.data={
},
[0x1D243]={
category="mn",
+ combining=0xE6,
description="COMBINING GREEK MUSICAL TETRASEME",
direction="nsm",
linebreak="cm",
@@ -166739,6 +182978,7 @@ characters.data={
},
[0x1D244]={
category="mn",
+ combining=0xE6,
description="COMBINING GREEK MUSICAL PENTASEME",
direction="nsm",
linebreak="cm",
@@ -167491,7 +183731,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL A",
direction="l",
linebreak="al",
- specials={ "font", 0x0041 },
+ specials={ "font", 0x41 },
unicodeslot=0x1D400,
visual="bf",
},
@@ -167500,7 +183740,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL B",
direction="l",
linebreak="al",
- specials={ "font", 0x0042 },
+ specials={ "font", 0x42 },
unicodeslot=0x1D401,
visual="bf",
},
@@ -167509,7 +183749,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL C",
direction="l",
linebreak="al",
- specials={ "font", 0x0043 },
+ specials={ "font", 0x43 },
unicodeslot=0x1D402,
visual="bf",
},
@@ -167518,7 +183758,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL D",
direction="l",
linebreak="al",
- specials={ "font", 0x0044 },
+ specials={ "font", 0x44 },
unicodeslot=0x1D403,
visual="bf",
},
@@ -167527,7 +183767,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL E",
direction="l",
linebreak="al",
- specials={ "font", 0x0045 },
+ specials={ "font", 0x45 },
unicodeslot=0x1D404,
visual="bf",
},
@@ -167536,7 +183776,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL F",
direction="l",
linebreak="al",
- specials={ "font", 0x0046 },
+ specials={ "font", 0x46 },
unicodeslot=0x1D405,
visual="bf",
},
@@ -167545,7 +183785,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL G",
direction="l",
linebreak="al",
- specials={ "font", 0x0047 },
+ specials={ "font", 0x47 },
unicodeslot=0x1D406,
visual="bf",
},
@@ -167554,7 +183794,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL H",
direction="l",
linebreak="al",
- specials={ "font", 0x0048 },
+ specials={ "font", 0x48 },
unicodeslot=0x1D407,
visual="bf",
},
@@ -167563,7 +183803,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL I",
direction="l",
linebreak="al",
- specials={ "font", 0x0049 },
+ specials={ "font", 0x49 },
unicodeslot=0x1D408,
visual="bf",
},
@@ -167572,7 +183812,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL J",
direction="l",
linebreak="al",
- specials={ "font", 0x004A },
+ specials={ "font", 0x4A },
unicodeslot=0x1D409,
visual="bf",
},
@@ -167581,7 +183821,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL K",
direction="l",
linebreak="al",
- specials={ "font", 0x004B },
+ specials={ "font", 0x4B },
unicodeslot=0x1D40A,
visual="bf",
},
@@ -167590,7 +183830,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL L",
direction="l",
linebreak="al",
- specials={ "font", 0x004C },
+ specials={ "font", 0x4C },
unicodeslot=0x1D40B,
visual="bf",
},
@@ -167599,7 +183839,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL M",
direction="l",
linebreak="al",
- specials={ "font", 0x004D },
+ specials={ "font", 0x4D },
unicodeslot=0x1D40C,
visual="bf",
},
@@ -167608,7 +183848,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL N",
direction="l",
linebreak="al",
- specials={ "font", 0x004E },
+ specials={ "font", 0x4E },
unicodeslot=0x1D40D,
visual="bf",
},
@@ -167617,7 +183857,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL O",
direction="l",
linebreak="al",
- specials={ "font", 0x004F },
+ specials={ "font", 0x4F },
unicodeslot=0x1D40E,
visual="bf",
},
@@ -167626,7 +183866,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL P",
direction="l",
linebreak="al",
- specials={ "font", 0x0050 },
+ specials={ "font", 0x50 },
unicodeslot=0x1D40F,
visual="bf",
},
@@ -167635,7 +183875,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL Q",
direction="l",
linebreak="al",
- specials={ "font", 0x0051 },
+ specials={ "font", 0x51 },
unicodeslot=0x1D410,
visual="bf",
},
@@ -167644,7 +183884,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL R",
direction="l",
linebreak="al",
- specials={ "font", 0x0052 },
+ specials={ "font", 0x52 },
unicodeslot=0x1D411,
visual="bf",
},
@@ -167653,7 +183893,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL S",
direction="l",
linebreak="al",
- specials={ "font", 0x0053 },
+ specials={ "font", 0x53 },
unicodeslot=0x1D412,
visual="bf",
},
@@ -167662,7 +183902,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL T",
direction="l",
linebreak="al",
- specials={ "font", 0x0054 },
+ specials={ "font", 0x54 },
unicodeslot=0x1D413,
visual="bf",
},
@@ -167671,7 +183911,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL U",
direction="l",
linebreak="al",
- specials={ "font", 0x0055 },
+ specials={ "font", 0x55 },
unicodeslot=0x1D414,
visual="bf",
},
@@ -167680,7 +183920,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL V",
direction="l",
linebreak="al",
- specials={ "font", 0x0056 },
+ specials={ "font", 0x56 },
unicodeslot=0x1D415,
visual="bf",
},
@@ -167689,7 +183929,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL W",
direction="l",
linebreak="al",
- specials={ "font", 0x0057 },
+ specials={ "font", 0x57 },
unicodeslot=0x1D416,
visual="bf",
},
@@ -167698,7 +183938,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL X",
direction="l",
linebreak="al",
- specials={ "font", 0x0058 },
+ specials={ "font", 0x58 },
unicodeslot=0x1D417,
visual="bf",
},
@@ -167707,7 +183947,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL Y",
direction="l",
linebreak="al",
- specials={ "font", 0x0059 },
+ specials={ "font", 0x59 },
unicodeslot=0x1D418,
visual="bf",
},
@@ -167716,7 +183956,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL Z",
direction="l",
linebreak="al",
- specials={ "font", 0x005A },
+ specials={ "font", 0x5A },
unicodeslot=0x1D419,
visual="bf",
},
@@ -167725,7 +183965,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL A",
direction="l",
linebreak="al",
- specials={ "font", 0x0061 },
+ specials={ "font", 0x61 },
unicodeslot=0x1D41A,
visual="bf",
},
@@ -167734,7 +183974,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL B",
direction="l",
linebreak="al",
- specials={ "font", 0x0062 },
+ specials={ "font", 0x62 },
unicodeslot=0x1D41B,
visual="bf",
},
@@ -167743,7 +183983,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL C",
direction="l",
linebreak="al",
- specials={ "font", 0x0063 },
+ specials={ "font", 0x63 },
unicodeslot=0x1D41C,
visual="bf",
},
@@ -167752,7 +183992,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL D",
direction="l",
linebreak="al",
- specials={ "font", 0x0064 },
+ specials={ "font", 0x64 },
unicodeslot=0x1D41D,
visual="bf",
},
@@ -167761,7 +184001,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL E",
direction="l",
linebreak="al",
- specials={ "font", 0x0065 },
+ specials={ "font", 0x65 },
unicodeslot=0x1D41E,
visual="bf",
},
@@ -167770,7 +184010,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL F",
direction="l",
linebreak="al",
- specials={ "font", 0x0066 },
+ specials={ "font", 0x66 },
unicodeslot=0x1D41F,
visual="bf",
},
@@ -167779,7 +184019,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL G",
direction="l",
linebreak="al",
- specials={ "font", 0x0067 },
+ specials={ "font", 0x67 },
unicodeslot=0x1D420,
visual="bf",
},
@@ -167788,7 +184028,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL H",
direction="l",
linebreak="al",
- specials={ "font", 0x0068 },
+ specials={ "font", 0x68 },
unicodeslot=0x1D421,
visual="bf",
},
@@ -167797,7 +184037,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL I",
direction="l",
linebreak="al",
- specials={ "font", 0x0069 },
+ specials={ "font", 0x69 },
unicodeslot=0x1D422,
visual="bf",
},
@@ -167806,7 +184046,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL J",
direction="l",
linebreak="al",
- specials={ "font", 0x006A },
+ specials={ "font", 0x6A },
unicodeslot=0x1D423,
visual="bf",
},
@@ -167815,7 +184055,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL K",
direction="l",
linebreak="al",
- specials={ "font", 0x006B },
+ specials={ "font", 0x6B },
unicodeslot=0x1D424,
visual="bf",
},
@@ -167824,7 +184064,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL L",
direction="l",
linebreak="al",
- specials={ "font", 0x006C },
+ specials={ "font", 0x6C },
unicodeslot=0x1D425,
visual="bf",
},
@@ -167833,7 +184073,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL M",
direction="l",
linebreak="al",
- specials={ "font", 0x006D },
+ specials={ "font", 0x6D },
unicodeslot=0x1D426,
visual="bf",
},
@@ -167842,7 +184082,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL N",
direction="l",
linebreak="al",
- specials={ "font", 0x006E },
+ specials={ "font", 0x6E },
unicodeslot=0x1D427,
visual="bf",
},
@@ -167851,7 +184091,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL O",
direction="l",
linebreak="al",
- specials={ "font", 0x006F },
+ specials={ "font", 0x6F },
unicodeslot=0x1D428,
visual="bf",
},
@@ -167860,7 +184100,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL P",
direction="l",
linebreak="al",
- specials={ "font", 0x0070 },
+ specials={ "font", 0x70 },
unicodeslot=0x1D429,
visual="bf",
},
@@ -167869,7 +184109,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL Q",
direction="l",
linebreak="al",
- specials={ "font", 0x0071 },
+ specials={ "font", 0x71 },
unicodeslot=0x1D42A,
visual="bf",
},
@@ -167878,7 +184118,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL R",
direction="l",
linebreak="al",
- specials={ "font", 0x0072 },
+ specials={ "font", 0x72 },
unicodeslot=0x1D42B,
visual="bf",
},
@@ -167887,7 +184127,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL S",
direction="l",
linebreak="al",
- specials={ "font", 0x0073 },
+ specials={ "font", 0x73 },
unicodeslot=0x1D42C,
visual="bf",
},
@@ -167896,7 +184136,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL T",
direction="l",
linebreak="al",
- specials={ "font", 0x0074 },
+ specials={ "font", 0x74 },
unicodeslot=0x1D42D,
visual="bf",
},
@@ -167905,7 +184145,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL U",
direction="l",
linebreak="al",
- specials={ "font", 0x0075 },
+ specials={ "font", 0x75 },
unicodeslot=0x1D42E,
visual="bf",
},
@@ -167914,7 +184154,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL V",
direction="l",
linebreak="al",
- specials={ "font", 0x0076 },
+ specials={ "font", 0x76 },
unicodeslot=0x1D42F,
visual="bf",
},
@@ -167923,7 +184163,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL W",
direction="l",
linebreak="al",
- specials={ "font", 0x0077 },
+ specials={ "font", 0x77 },
unicodeslot=0x1D430,
visual="bf",
},
@@ -167932,7 +184172,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL X",
direction="l",
linebreak="al",
- specials={ "font", 0x0078 },
+ specials={ "font", 0x78 },
unicodeslot=0x1D431,
visual="bf",
},
@@ -167941,7 +184181,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL Y",
direction="l",
linebreak="al",
- specials={ "font", 0x0079 },
+ specials={ "font", 0x79 },
unicodeslot=0x1D432,
visual="bf",
},
@@ -167950,7 +184190,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL Z",
direction="l",
linebreak="al",
- specials={ "font", 0x007A },
+ specials={ "font", 0x7A },
unicodeslot=0x1D433,
visual="bf",
},
@@ -167959,7 +184199,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL A",
direction="l",
linebreak="al",
- specials={ "font", 0x0041 },
+ specials={ "font", 0x41 },
unicodeslot=0x1D434,
visual="it",
},
@@ -167968,7 +184208,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL B",
direction="l",
linebreak="al",
- specials={ "font", 0x0042 },
+ specials={ "font", 0x42 },
unicodeslot=0x1D435,
visual="it",
},
@@ -167977,7 +184217,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL C",
direction="l",
linebreak="al",
- specials={ "font", 0x0043 },
+ specials={ "font", 0x43 },
unicodeslot=0x1D436,
visual="it",
},
@@ -167986,7 +184226,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL D",
direction="l",
linebreak="al",
- specials={ "font", 0x0044 },
+ specials={ "font", 0x44 },
unicodeslot=0x1D437,
visual="it",
},
@@ -167995,7 +184235,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL E",
direction="l",
linebreak="al",
- specials={ "font", 0x0045 },
+ specials={ "font", 0x45 },
unicodeslot=0x1D438,
visual="it",
},
@@ -168004,7 +184244,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL F",
direction="l",
linebreak="al",
- specials={ "font", 0x0046 },
+ specials={ "font", 0x46 },
unicodeslot=0x1D439,
visual="it",
},
@@ -168013,7 +184253,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL G",
direction="l",
linebreak="al",
- specials={ "font", 0x0047 },
+ specials={ "font", 0x47 },
unicodeslot=0x1D43A,
visual="it",
},
@@ -168022,7 +184262,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL H",
direction="l",
linebreak="al",
- specials={ "font", 0x0048 },
+ specials={ "font", 0x48 },
unicodeslot=0x1D43B,
visual="it",
},
@@ -168031,7 +184271,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL I",
direction="l",
linebreak="al",
- specials={ "font", 0x0049 },
+ specials={ "font", 0x49 },
unicodeslot=0x1D43C,
visual="it",
},
@@ -168040,7 +184280,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL J",
direction="l",
linebreak="al",
- specials={ "font", 0x004A },
+ specials={ "font", 0x4A },
unicodeslot=0x1D43D,
visual="it",
},
@@ -168049,7 +184289,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL K",
direction="l",
linebreak="al",
- specials={ "font", 0x004B },
+ specials={ "font", 0x4B },
unicodeslot=0x1D43E,
visual="it",
},
@@ -168058,7 +184298,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL L",
direction="l",
linebreak="al",
- specials={ "font", 0x004C },
+ specials={ "font", 0x4C },
unicodeslot=0x1D43F,
visual="it",
},
@@ -168067,7 +184307,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL M",
direction="l",
linebreak="al",
- specials={ "font", 0x004D },
+ specials={ "font", 0x4D },
unicodeslot=0x1D440,
visual="it",
},
@@ -168076,7 +184316,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL N",
direction="l",
linebreak="al",
- specials={ "font", 0x004E },
+ specials={ "font", 0x4E },
unicodeslot=0x1D441,
visual="it",
},
@@ -168085,7 +184325,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL O",
direction="l",
linebreak="al",
- specials={ "font", 0x004F },
+ specials={ "font", 0x4F },
unicodeslot=0x1D442,
visual="it",
},
@@ -168094,7 +184334,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL P",
direction="l",
linebreak="al",
- specials={ "font", 0x0050 },
+ specials={ "font", 0x50 },
unicodeslot=0x1D443,
visual="it",
},
@@ -168103,7 +184343,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL Q",
direction="l",
linebreak="al",
- specials={ "font", 0x0051 },
+ specials={ "font", 0x51 },
unicodeslot=0x1D444,
visual="it",
},
@@ -168112,7 +184352,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL R",
direction="l",
linebreak="al",
- specials={ "font", 0x0052 },
+ specials={ "font", 0x52 },
unicodeslot=0x1D445,
visual="it",
},
@@ -168121,7 +184361,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL S",
direction="l",
linebreak="al",
- specials={ "font", 0x0053 },
+ specials={ "font", 0x53 },
unicodeslot=0x1D446,
visual="it",
},
@@ -168130,7 +184370,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL T",
direction="l",
linebreak="al",
- specials={ "font", 0x0054 },
+ specials={ "font", 0x54 },
unicodeslot=0x1D447,
visual="it",
},
@@ -168139,7 +184379,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL U",
direction="l",
linebreak="al",
- specials={ "font", 0x0055 },
+ specials={ "font", 0x55 },
unicodeslot=0x1D448,
visual="it",
},
@@ -168148,7 +184388,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL V",
direction="l",
linebreak="al",
- specials={ "font", 0x0056 },
+ specials={ "font", 0x56 },
unicodeslot=0x1D449,
visual="it",
},
@@ -168157,7 +184397,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL W",
direction="l",
linebreak="al",
- specials={ "font", 0x0057 },
+ specials={ "font", 0x57 },
unicodeslot=0x1D44A,
visual="it",
},
@@ -168166,7 +184406,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL X",
direction="l",
linebreak="al",
- specials={ "font", 0x0058 },
+ specials={ "font", 0x58 },
unicodeslot=0x1D44B,
visual="it",
},
@@ -168175,7 +184415,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL Y",
direction="l",
linebreak="al",
- specials={ "font", 0x0059 },
+ specials={ "font", 0x59 },
unicodeslot=0x1D44C,
visual="it",
},
@@ -168184,7 +184424,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL Z",
direction="l",
linebreak="al",
- specials={ "font", 0x005A },
+ specials={ "font", 0x5A },
unicodeslot=0x1D44D,
visual="it",
},
@@ -168193,7 +184433,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL A",
direction="l",
linebreak="al",
- specials={ "font", 0x0061 },
+ specials={ "font", 0x61 },
unicodeslot=0x1D44E,
visual="it",
},
@@ -168202,7 +184442,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL B",
direction="l",
linebreak="al",
- specials={ "font", 0x0062 },
+ specials={ "font", 0x62 },
unicodeslot=0x1D44F,
visual="it",
},
@@ -168211,7 +184451,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL C",
direction="l",
linebreak="al",
- specials={ "font", 0x0063 },
+ specials={ "font", 0x63 },
unicodeslot=0x1D450,
visual="it",
},
@@ -168220,7 +184460,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL D",
direction="l",
linebreak="al",
- specials={ "font", 0x0064 },
+ specials={ "font", 0x64 },
unicodeslot=0x1D451,
visual="it",
},
@@ -168229,7 +184469,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL E",
direction="l",
linebreak="al",
- specials={ "font", 0x0065 },
+ specials={ "font", 0x65 },
unicodeslot=0x1D452,
visual="it",
},
@@ -168238,7 +184478,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL F",
direction="l",
linebreak="al",
- specials={ "font", 0x0066 },
+ specials={ "font", 0x66 },
unicodeslot=0x1D453,
visual="it",
},
@@ -168247,7 +184487,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL G",
direction="l",
linebreak="al",
- specials={ "font", 0x0067 },
+ specials={ "font", 0x67 },
unicodeslot=0x1D454,
visual="it",
},
@@ -168256,7 +184496,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL I",
direction="l",
linebreak="al",
- specials={ "font", 0x0069 },
+ specials={ "font", 0x69 },
unicodeslot=0x1D456,
visual="it",
},
@@ -168265,7 +184505,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL J",
direction="l",
linebreak="al",
- specials={ "font", 0x006A },
+ specials={ "font", 0x6A },
unicodeslot=0x1D457,
visual="it",
},
@@ -168274,7 +184514,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL K",
direction="l",
linebreak="al",
- specials={ "font", 0x006B },
+ specials={ "font", 0x6B },
unicodeslot=0x1D458,
visual="it",
},
@@ -168283,7 +184523,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL L",
direction="l",
linebreak="al",
- specials={ "font", 0x006C },
+ specials={ "font", 0x6C },
unicodeslot=0x1D459,
visual="it",
},
@@ -168292,7 +184532,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL M",
direction="l",
linebreak="al",
- specials={ "font", 0x006D },
+ specials={ "font", 0x6D },
unicodeslot=0x1D45A,
visual="it",
},
@@ -168301,7 +184541,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL N",
direction="l",
linebreak="al",
- specials={ "font", 0x006E },
+ specials={ "font", 0x6E },
unicodeslot=0x1D45B,
visual="it",
},
@@ -168310,7 +184550,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL O",
direction="l",
linebreak="al",
- specials={ "font", 0x006F },
+ specials={ "font", 0x6F },
unicodeslot=0x1D45C,
visual="it",
},
@@ -168319,7 +184559,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL P",
direction="l",
linebreak="al",
- specials={ "font", 0x0070 },
+ specials={ "font", 0x70 },
unicodeslot=0x1D45D,
visual="it",
},
@@ -168328,7 +184568,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL Q",
direction="l",
linebreak="al",
- specials={ "font", 0x0071 },
+ specials={ "font", 0x71 },
unicodeslot=0x1D45E,
visual="it",
},
@@ -168337,7 +184577,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL R",
direction="l",
linebreak="al",
- specials={ "font", 0x0072 },
+ specials={ "font", 0x72 },
unicodeslot=0x1D45F,
visual="it",
},
@@ -168346,7 +184586,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL S",
direction="l",
linebreak="al",
- specials={ "font", 0x0073 },
+ specials={ "font", 0x73 },
unicodeslot=0x1D460,
visual="it",
},
@@ -168355,7 +184595,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL T",
direction="l",
linebreak="al",
- specials={ "font", 0x0074 },
+ specials={ "font", 0x74 },
unicodeslot=0x1D461,
visual="it",
},
@@ -168364,7 +184604,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL U",
direction="l",
linebreak="al",
- specials={ "font", 0x0075 },
+ specials={ "font", 0x75 },
unicodeslot=0x1D462,
visual="it",
},
@@ -168373,7 +184613,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL V",
direction="l",
linebreak="al",
- specials={ "font", 0x0076 },
+ specials={ "font", 0x76 },
unicodeslot=0x1D463,
visual="it",
},
@@ -168382,7 +184622,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL W",
direction="l",
linebreak="al",
- specials={ "font", 0x0077 },
+ specials={ "font", 0x77 },
unicodeslot=0x1D464,
visual="it",
},
@@ -168391,7 +184631,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL X",
direction="l",
linebreak="al",
- specials={ "font", 0x0078 },
+ specials={ "font", 0x78 },
unicodeslot=0x1D465,
visual="it",
},
@@ -168400,7 +184640,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL Y",
direction="l",
linebreak="al",
- specials={ "font", 0x0079 },
+ specials={ "font", 0x79 },
unicodeslot=0x1D466,
visual="it",
},
@@ -168409,7 +184649,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL Z",
direction="l",
linebreak="al",
- specials={ "font", 0x007A },
+ specials={ "font", 0x7A },
unicodeslot=0x1D467,
visual="it",
},
@@ -168418,7 +184658,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL A",
direction="l",
linebreak="al",
- specials={ "font", 0x0041 },
+ specials={ "font", 0x41 },
unicodeslot=0x1D468,
visual="bi",
},
@@ -168427,7 +184667,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL B",
direction="l",
linebreak="al",
- specials={ "font", 0x0042 },
+ specials={ "font", 0x42 },
unicodeslot=0x1D469,
visual="bi",
},
@@ -168436,7 +184676,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL C",
direction="l",
linebreak="al",
- specials={ "font", 0x0043 },
+ specials={ "font", 0x43 },
unicodeslot=0x1D46A,
visual="bi",
},
@@ -168445,7 +184685,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL D",
direction="l",
linebreak="al",
- specials={ "font", 0x0044 },
+ specials={ "font", 0x44 },
unicodeslot=0x1D46B,
visual="bi",
},
@@ -168454,7 +184694,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL E",
direction="l",
linebreak="al",
- specials={ "font", 0x0045 },
+ specials={ "font", 0x45 },
unicodeslot=0x1D46C,
visual="bi",
},
@@ -168463,7 +184703,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL F",
direction="l",
linebreak="al",
- specials={ "font", 0x0046 },
+ specials={ "font", 0x46 },
unicodeslot=0x1D46D,
visual="bi",
},
@@ -168472,7 +184712,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL G",
direction="l",
linebreak="al",
- specials={ "font", 0x0047 },
+ specials={ "font", 0x47 },
unicodeslot=0x1D46E,
visual="bi",
},
@@ -168481,7 +184721,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL H",
direction="l",
linebreak="al",
- specials={ "font", 0x0048 },
+ specials={ "font", 0x48 },
unicodeslot=0x1D46F,
visual="bi",
},
@@ -168490,7 +184730,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL I",
direction="l",
linebreak="al",
- specials={ "font", 0x0049 },
+ specials={ "font", 0x49 },
unicodeslot=0x1D470,
visual="bi",
},
@@ -168499,7 +184739,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL J",
direction="l",
linebreak="al",
- specials={ "font", 0x004A },
+ specials={ "font", 0x4A },
unicodeslot=0x1D471,
visual="bi",
},
@@ -168508,7 +184748,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL K",
direction="l",
linebreak="al",
- specials={ "font", 0x004B },
+ specials={ "font", 0x4B },
unicodeslot=0x1D472,
visual="bi",
},
@@ -168517,7 +184757,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL L",
direction="l",
linebreak="al",
- specials={ "font", 0x004C },
+ specials={ "font", 0x4C },
unicodeslot=0x1D473,
visual="bi",
},
@@ -168526,7 +184766,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL M",
direction="l",
linebreak="al",
- specials={ "font", 0x004D },
+ specials={ "font", 0x4D },
unicodeslot=0x1D474,
visual="bi",
},
@@ -168535,7 +184775,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL N",
direction="l",
linebreak="al",
- specials={ "font", 0x004E },
+ specials={ "font", 0x4E },
unicodeslot=0x1D475,
visual="bi",
},
@@ -168544,7 +184784,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL O",
direction="l",
linebreak="al",
- specials={ "font", 0x004F },
+ specials={ "font", 0x4F },
unicodeslot=0x1D476,
visual="bi",
},
@@ -168553,7 +184793,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL P",
direction="l",
linebreak="al",
- specials={ "font", 0x0050 },
+ specials={ "font", 0x50 },
unicodeslot=0x1D477,
visual="bi",
},
@@ -168562,7 +184802,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL Q",
direction="l",
linebreak="al",
- specials={ "font", 0x0051 },
+ specials={ "font", 0x51 },
unicodeslot=0x1D478,
visual="bi",
},
@@ -168571,7 +184811,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL R",
direction="l",
linebreak="al",
- specials={ "font", 0x0052 },
+ specials={ "font", 0x52 },
unicodeslot=0x1D479,
visual="bi",
},
@@ -168580,7 +184820,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL S",
direction="l",
linebreak="al",
- specials={ "font", 0x0053 },
+ specials={ "font", 0x53 },
unicodeslot=0x1D47A,
visual="bi",
},
@@ -168589,7 +184829,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL T",
direction="l",
linebreak="al",
- specials={ "font", 0x0054 },
+ specials={ "font", 0x54 },
unicodeslot=0x1D47B,
visual="bi",
},
@@ -168598,7 +184838,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL U",
direction="l",
linebreak="al",
- specials={ "font", 0x0055 },
+ specials={ "font", 0x55 },
unicodeslot=0x1D47C,
visual="bi",
},
@@ -168607,7 +184847,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL V",
direction="l",
linebreak="al",
- specials={ "font", 0x0056 },
+ specials={ "font", 0x56 },
unicodeslot=0x1D47D,
visual="bi",
},
@@ -168616,7 +184856,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL W",
direction="l",
linebreak="al",
- specials={ "font", 0x0057 },
+ specials={ "font", 0x57 },
unicodeslot=0x1D47E,
visual="bi",
},
@@ -168625,7 +184865,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL X",
direction="l",
linebreak="al",
- specials={ "font", 0x0058 },
+ specials={ "font", 0x58 },
unicodeslot=0x1D47F,
visual="bi",
},
@@ -168634,7 +184874,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL Y",
direction="l",
linebreak="al",
- specials={ "font", 0x0059 },
+ specials={ "font", 0x59 },
unicodeslot=0x1D480,
visual="bi",
},
@@ -168643,7 +184883,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL Z",
direction="l",
linebreak="al",
- specials={ "font", 0x005A },
+ specials={ "font", 0x5A },
unicodeslot=0x1D481,
visual="bi",
},
@@ -168652,7 +184892,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL A",
direction="l",
linebreak="al",
- specials={ "font", 0x0061 },
+ specials={ "font", 0x61 },
unicodeslot=0x1D482,
visual="bi",
},
@@ -168661,7 +184901,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL B",
direction="l",
linebreak="al",
- specials={ "font", 0x0062 },
+ specials={ "font", 0x62 },
unicodeslot=0x1D483,
visual="bi",
},
@@ -168670,7 +184910,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL C",
direction="l",
linebreak="al",
- specials={ "font", 0x0063 },
+ specials={ "font", 0x63 },
unicodeslot=0x1D484,
visual="bi",
},
@@ -168679,7 +184919,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL D",
direction="l",
linebreak="al",
- specials={ "font", 0x0064 },
+ specials={ "font", 0x64 },
unicodeslot=0x1D485,
visual="bi",
},
@@ -168688,7 +184928,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL E",
direction="l",
linebreak="al",
- specials={ "font", 0x0065 },
+ specials={ "font", 0x65 },
unicodeslot=0x1D486,
visual="bi",
},
@@ -168697,7 +184937,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL F",
direction="l",
linebreak="al",
- specials={ "font", 0x0066 },
+ specials={ "font", 0x66 },
unicodeslot=0x1D487,
visual="bi",
},
@@ -168706,7 +184946,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL G",
direction="l",
linebreak="al",
- specials={ "font", 0x0067 },
+ specials={ "font", 0x67 },
unicodeslot=0x1D488,
visual="bi",
},
@@ -168715,7 +184955,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL H",
direction="l",
linebreak="al",
- specials={ "font", 0x0068 },
+ specials={ "font", 0x68 },
unicodeslot=0x1D489,
visual="bi",
},
@@ -168724,7 +184964,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL I",
direction="l",
linebreak="al",
- specials={ "font", 0x0069 },
+ specials={ "font", 0x69 },
unicodeslot=0x1D48A,
visual="bi",
},
@@ -168733,7 +184973,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL J",
direction="l",
linebreak="al",
- specials={ "font", 0x006A },
+ specials={ "font", 0x6A },
unicodeslot=0x1D48B,
visual="bi",
},
@@ -168742,7 +184982,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL K",
direction="l",
linebreak="al",
- specials={ "font", 0x006B },
+ specials={ "font", 0x6B },
unicodeslot=0x1D48C,
visual="bi",
},
@@ -168751,7 +184991,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL L",
direction="l",
linebreak="al",
- specials={ "font", 0x006C },
+ specials={ "font", 0x6C },
unicodeslot=0x1D48D,
visual="bi",
},
@@ -168760,7 +185000,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL M",
direction="l",
linebreak="al",
- specials={ "font", 0x006D },
+ specials={ "font", 0x6D },
unicodeslot=0x1D48E,
visual="bi",
},
@@ -168769,7 +185009,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL N",
direction="l",
linebreak="al",
- specials={ "font", 0x006E },
+ specials={ "font", 0x6E },
unicodeslot=0x1D48F,
visual="bi",
},
@@ -168778,7 +185018,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL O",
direction="l",
linebreak="al",
- specials={ "font", 0x006F },
+ specials={ "font", 0x6F },
unicodeslot=0x1D490,
visual="bi",
},
@@ -168787,7 +185027,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL P",
direction="l",
linebreak="al",
- specials={ "font", 0x0070 },
+ specials={ "font", 0x70 },
unicodeslot=0x1D491,
visual="bi",
},
@@ -168796,7 +185036,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL Q",
direction="l",
linebreak="al",
- specials={ "font", 0x0071 },
+ specials={ "font", 0x71 },
unicodeslot=0x1D492,
visual="bi",
},
@@ -168805,7 +185045,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL R",
direction="l",
linebreak="al",
- specials={ "font", 0x0072 },
+ specials={ "font", 0x72 },
unicodeslot=0x1D493,
visual="bi",
},
@@ -168814,7 +185054,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL S",
direction="l",
linebreak="al",
- specials={ "font", 0x0073 },
+ specials={ "font", 0x73 },
unicodeslot=0x1D494,
visual="bi",
},
@@ -168823,7 +185063,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL T",
direction="l",
linebreak="al",
- specials={ "font", 0x0074 },
+ specials={ "font", 0x74 },
unicodeslot=0x1D495,
visual="bi",
},
@@ -168832,7 +185072,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL U",
direction="l",
linebreak="al",
- specials={ "font", 0x0075 },
+ specials={ "font", 0x75 },
unicodeslot=0x1D496,
visual="bi",
},
@@ -168841,7 +185081,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL V",
direction="l",
linebreak="al",
- specials={ "font", 0x0076 },
+ specials={ "font", 0x76 },
unicodeslot=0x1D497,
visual="bi",
},
@@ -168850,7 +185090,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL W",
direction="l",
linebreak="al",
- specials={ "font", 0x0077 },
+ specials={ "font", 0x77 },
unicodeslot=0x1D498,
visual="bi",
},
@@ -168859,7 +185099,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL X",
direction="l",
linebreak="al",
- specials={ "font", 0x0078 },
+ specials={ "font", 0x78 },
unicodeslot=0x1D499,
visual="bi",
},
@@ -168868,7 +185108,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL Y",
direction="l",
linebreak="al",
- specials={ "font", 0x0079 },
+ specials={ "font", 0x79 },
unicodeslot=0x1D49A,
visual="bi",
},
@@ -168877,7 +185117,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL Z",
direction="l",
linebreak="al",
- specials={ "font", 0x007A },
+ specials={ "font", 0x7A },
unicodeslot=0x1D49B,
visual="bi",
},
@@ -168886,7 +185126,7 @@ characters.data={
description="MATHEMATICAL SCRIPT CAPITAL A",
direction="l",
linebreak="al",
- specials={ "font", 0x0041 },
+ specials={ "font", 0x41 },
unicodeslot=0x1D49C,
},
[0x1D49E]={
@@ -168894,7 +185134,7 @@ characters.data={
description="MATHEMATICAL SCRIPT CAPITAL C",
direction="l",
linebreak="al",
- specials={ "font", 0x0043 },
+ specials={ "font", 0x43 },
unicodeslot=0x1D49E,
},
[0x1D49F]={
@@ -168902,7 +185142,7 @@ characters.data={
description="MATHEMATICAL SCRIPT CAPITAL D",
direction="l",
linebreak="al",
- specials={ "font", 0x0044 },
+ specials={ "font", 0x44 },
unicodeslot=0x1D49F,
},
[0x1D4A2]={
@@ -168910,7 +185150,7 @@ characters.data={
description="MATHEMATICAL SCRIPT CAPITAL G",
direction="l",
linebreak="al",
- specials={ "font", 0x0047 },
+ specials={ "font", 0x47 },
unicodeslot=0x1D4A2,
},
[0x1D4A5]={
@@ -168918,7 +185158,7 @@ characters.data={
description="MATHEMATICAL SCRIPT CAPITAL J",
direction="l",
linebreak="al",
- specials={ "font", 0x004A },
+ specials={ "font", 0x4A },
unicodeslot=0x1D4A5,
},
[0x1D4A6]={
@@ -168926,7 +185166,7 @@ characters.data={
description="MATHEMATICAL SCRIPT CAPITAL K",
direction="l",
linebreak="al",
- specials={ "font", 0x004B },
+ specials={ "font", 0x4B },
unicodeslot=0x1D4A6,
},
[0x1D4A9]={
@@ -168934,7 +185174,7 @@ characters.data={
description="MATHEMATICAL SCRIPT CAPITAL N",
direction="l",
linebreak="al",
- specials={ "font", 0x004E },
+ specials={ "font", 0x4E },
unicodeslot=0x1D4A9,
},
[0x1D4AA]={
@@ -168942,7 +185182,7 @@ characters.data={
description="MATHEMATICAL SCRIPT CAPITAL O",
direction="l",
linebreak="al",
- specials={ "font", 0x004F },
+ specials={ "font", 0x4F },
unicodeslot=0x1D4AA,
},
[0x1D4AB]={
@@ -168950,7 +185190,7 @@ characters.data={
description="MATHEMATICAL SCRIPT CAPITAL P",
direction="l",
linebreak="al",
- specials={ "font", 0x0050 },
+ specials={ "font", 0x50 },
unicodeslot=0x1D4AB,
},
[0x1D4AC]={
@@ -168958,7 +185198,7 @@ characters.data={
description="MATHEMATICAL SCRIPT CAPITAL Q",
direction="l",
linebreak="al",
- specials={ "font", 0x0051 },
+ specials={ "font", 0x51 },
unicodeslot=0x1D4AC,
},
[0x1D4AE]={
@@ -168966,7 +185206,7 @@ characters.data={
description="MATHEMATICAL SCRIPT CAPITAL S",
direction="l",
linebreak="al",
- specials={ "font", 0x0053 },
+ specials={ "font", 0x53 },
unicodeslot=0x1D4AE,
},
[0x1D4AF]={
@@ -168974,7 +185214,7 @@ characters.data={
description="MATHEMATICAL SCRIPT CAPITAL T",
direction="l",
linebreak="al",
- specials={ "font", 0x0054 },
+ specials={ "font", 0x54 },
unicodeslot=0x1D4AF,
},
[0x1D4B0]={
@@ -168982,7 +185222,7 @@ characters.data={
description="MATHEMATICAL SCRIPT CAPITAL U",
direction="l",
linebreak="al",
- specials={ "font", 0x0055 },
+ specials={ "font", 0x55 },
unicodeslot=0x1D4B0,
},
[0x1D4B1]={
@@ -168990,7 +185230,7 @@ characters.data={
description="MATHEMATICAL SCRIPT CAPITAL V",
direction="l",
linebreak="al",
- specials={ "font", 0x0056 },
+ specials={ "font", 0x56 },
unicodeslot=0x1D4B1,
},
[0x1D4B2]={
@@ -168998,7 +185238,7 @@ characters.data={
description="MATHEMATICAL SCRIPT CAPITAL W",
direction="l",
linebreak="al",
- specials={ "font", 0x0057 },
+ specials={ "font", 0x57 },
unicodeslot=0x1D4B2,
},
[0x1D4B3]={
@@ -169006,7 +185246,7 @@ characters.data={
description="MATHEMATICAL SCRIPT CAPITAL X",
direction="l",
linebreak="al",
- specials={ "font", 0x0058 },
+ specials={ "font", 0x58 },
unicodeslot=0x1D4B3,
},
[0x1D4B4]={
@@ -169014,7 +185254,7 @@ characters.data={
description="MATHEMATICAL SCRIPT CAPITAL Y",
direction="l",
linebreak="al",
- specials={ "font", 0x0059 },
+ specials={ "font", 0x59 },
unicodeslot=0x1D4B4,
},
[0x1D4B5]={
@@ -169022,7 +185262,7 @@ characters.data={
description="MATHEMATICAL SCRIPT CAPITAL Z",
direction="l",
linebreak="al",
- specials={ "font", 0x005A },
+ specials={ "font", 0x5A },
unicodeslot=0x1D4B5,
},
[0x1D4B6]={
@@ -169030,7 +185270,7 @@ characters.data={
description="MATHEMATICAL SCRIPT SMALL A",
direction="l",
linebreak="al",
- specials={ "font", 0x0061 },
+ specials={ "font", 0x61 },
unicodeslot=0x1D4B6,
},
[0x1D4B7]={
@@ -169038,7 +185278,7 @@ characters.data={
description="MATHEMATICAL SCRIPT SMALL B",
direction="l",
linebreak="al",
- specials={ "font", 0x0062 },
+ specials={ "font", 0x62 },
unicodeslot=0x1D4B7,
},
[0x1D4B8]={
@@ -169046,7 +185286,7 @@ characters.data={
description="MATHEMATICAL SCRIPT SMALL C",
direction="l",
linebreak="al",
- specials={ "font", 0x0063 },
+ specials={ "font", 0x63 },
unicodeslot=0x1D4B8,
},
[0x1D4B9]={
@@ -169054,7 +185294,7 @@ characters.data={
description="MATHEMATICAL SCRIPT SMALL D",
direction="l",
linebreak="al",
- specials={ "font", 0x0064 },
+ specials={ "font", 0x64 },
unicodeslot=0x1D4B9,
},
[0x1D4BB]={
@@ -169062,7 +185302,7 @@ characters.data={
description="MATHEMATICAL SCRIPT SMALL F",
direction="l",
linebreak="al",
- specials={ "font", 0x0066 },
+ specials={ "font", 0x66 },
unicodeslot=0x1D4BB,
},
[0x1D4BD]={
@@ -169070,7 +185310,7 @@ characters.data={
description="MATHEMATICAL SCRIPT SMALL H",
direction="l",
linebreak="al",
- specials={ "font", 0x0068 },
+ specials={ "font", 0x68 },
unicodeslot=0x1D4BD,
},
[0x1D4BE]={
@@ -169078,7 +185318,7 @@ characters.data={
description="MATHEMATICAL SCRIPT SMALL I",
direction="l",
linebreak="al",
- specials={ "font", 0x0069 },
+ specials={ "font", 0x69 },
unicodeslot=0x1D4BE,
},
[0x1D4BF]={
@@ -169086,7 +185326,7 @@ characters.data={
description="MATHEMATICAL SCRIPT SMALL J",
direction="l",
linebreak="al",
- specials={ "font", 0x006A },
+ specials={ "font", 0x6A },
unicodeslot=0x1D4BF,
},
[0x1D4C0]={
@@ -169094,7 +185334,7 @@ characters.data={
description="MATHEMATICAL SCRIPT SMALL K",
direction="l",
linebreak="al",
- specials={ "font", 0x006B },
+ specials={ "font", 0x6B },
unicodeslot=0x1D4C0,
},
[0x1D4C1]={
@@ -169102,7 +185342,7 @@ characters.data={
description="MATHEMATICAL SCRIPT SMALL L",
direction="l",
linebreak="al",
- specials={ "font", 0x006C },
+ specials={ "font", 0x6C },
unicodeslot=0x1D4C1,
},
[0x1D4C2]={
@@ -169110,7 +185350,7 @@ characters.data={
description="MATHEMATICAL SCRIPT SMALL M",
direction="l",
linebreak="al",
- specials={ "font", 0x006D },
+ specials={ "font", 0x6D },
unicodeslot=0x1D4C2,
},
[0x1D4C3]={
@@ -169118,7 +185358,7 @@ characters.data={
description="MATHEMATICAL SCRIPT SMALL N",
direction="l",
linebreak="al",
- specials={ "font", 0x006E },
+ specials={ "font", 0x6E },
unicodeslot=0x1D4C3,
},
[0x1D4C5]={
@@ -169126,7 +185366,7 @@ characters.data={
description="MATHEMATICAL SCRIPT SMALL P",
direction="l",
linebreak="al",
- specials={ "font", 0x0070 },
+ specials={ "font", 0x70 },
unicodeslot=0x1D4C5,
},
[0x1D4C6]={
@@ -169134,7 +185374,7 @@ characters.data={
description="MATHEMATICAL SCRIPT SMALL Q",
direction="l",
linebreak="al",
- specials={ "font", 0x0071 },
+ specials={ "font", 0x71 },
unicodeslot=0x1D4C6,
},
[0x1D4C7]={
@@ -169142,7 +185382,7 @@ characters.data={
description="MATHEMATICAL SCRIPT SMALL R",
direction="l",
linebreak="al",
- specials={ "font", 0x0072 },
+ specials={ "font", 0x72 },
unicodeslot=0x1D4C7,
},
[0x1D4C8]={
@@ -169150,7 +185390,7 @@ characters.data={
description="MATHEMATICAL SCRIPT SMALL S",
direction="l",
linebreak="al",
- specials={ "font", 0x0073 },
+ specials={ "font", 0x73 },
unicodeslot=0x1D4C8,
},
[0x1D4C9]={
@@ -169158,7 +185398,7 @@ characters.data={
description="MATHEMATICAL SCRIPT SMALL T",
direction="l",
linebreak="al",
- specials={ "font", 0x0074 },
+ specials={ "font", 0x74 },
unicodeslot=0x1D4C9,
},
[0x1D4CA]={
@@ -169166,7 +185406,7 @@ characters.data={
description="MATHEMATICAL SCRIPT SMALL U",
direction="l",
linebreak="al",
- specials={ "font", 0x0075 },
+ specials={ "font", 0x75 },
unicodeslot=0x1D4CA,
},
[0x1D4CB]={
@@ -169174,7 +185414,7 @@ characters.data={
description="MATHEMATICAL SCRIPT SMALL V",
direction="l",
linebreak="al",
- specials={ "font", 0x0076 },
+ specials={ "font", 0x76 },
unicodeslot=0x1D4CB,
},
[0x1D4CC]={
@@ -169182,7 +185422,7 @@ characters.data={
description="MATHEMATICAL SCRIPT SMALL W",
direction="l",
linebreak="al",
- specials={ "font", 0x0077 },
+ specials={ "font", 0x77 },
unicodeslot=0x1D4CC,
},
[0x1D4CD]={
@@ -169190,7 +185430,7 @@ characters.data={
description="MATHEMATICAL SCRIPT SMALL X",
direction="l",
linebreak="al",
- specials={ "font", 0x0078 },
+ specials={ "font", 0x78 },
unicodeslot=0x1D4CD,
},
[0x1D4CE]={
@@ -169198,7 +185438,7 @@ characters.data={
description="MATHEMATICAL SCRIPT SMALL Y",
direction="l",
linebreak="al",
- specials={ "font", 0x0079 },
+ specials={ "font", 0x79 },
unicodeslot=0x1D4CE,
},
[0x1D4CF]={
@@ -169206,7 +185446,7 @@ characters.data={
description="MATHEMATICAL SCRIPT SMALL Z",
direction="l",
linebreak="al",
- specials={ "font", 0x007A },
+ specials={ "font", 0x7A },
unicodeslot=0x1D4CF,
},
[0x1D4D0]={
@@ -169214,7 +185454,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT CAPITAL A",
direction="l",
linebreak="al",
- specials={ "font", 0x0041 },
+ specials={ "font", 0x41 },
unicodeslot=0x1D4D0,
visual="bf",
},
@@ -169223,7 +185463,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT CAPITAL B",
direction="l",
linebreak="al",
- specials={ "font", 0x0042 },
+ specials={ "font", 0x42 },
unicodeslot=0x1D4D1,
visual="bf",
},
@@ -169232,7 +185472,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT CAPITAL C",
direction="l",
linebreak="al",
- specials={ "font", 0x0043 },
+ specials={ "font", 0x43 },
unicodeslot=0x1D4D2,
visual="bf",
},
@@ -169241,7 +185481,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT CAPITAL D",
direction="l",
linebreak="al",
- specials={ "font", 0x0044 },
+ specials={ "font", 0x44 },
unicodeslot=0x1D4D3,
visual="bf",
},
@@ -169250,7 +185490,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT CAPITAL E",
direction="l",
linebreak="al",
- specials={ "font", 0x0045 },
+ specials={ "font", 0x45 },
unicodeslot=0x1D4D4,
visual="bf",
},
@@ -169259,7 +185499,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT CAPITAL F",
direction="l",
linebreak="al",
- specials={ "font", 0x0046 },
+ specials={ "font", 0x46 },
unicodeslot=0x1D4D5,
visual="bf",
},
@@ -169268,7 +185508,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT CAPITAL G",
direction="l",
linebreak="al",
- specials={ "font", 0x0047 },
+ specials={ "font", 0x47 },
unicodeslot=0x1D4D6,
visual="bf",
},
@@ -169277,7 +185517,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT CAPITAL H",
direction="l",
linebreak="al",
- specials={ "font", 0x0048 },
+ specials={ "font", 0x48 },
unicodeslot=0x1D4D7,
visual="bf",
},
@@ -169286,7 +185526,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT CAPITAL I",
direction="l",
linebreak="al",
- specials={ "font", 0x0049 },
+ specials={ "font", 0x49 },
unicodeslot=0x1D4D8,
visual="bf",
},
@@ -169295,7 +185535,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT CAPITAL J",
direction="l",
linebreak="al",
- specials={ "font", 0x004A },
+ specials={ "font", 0x4A },
unicodeslot=0x1D4D9,
visual="bf",
},
@@ -169304,7 +185544,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT CAPITAL K",
direction="l",
linebreak="al",
- specials={ "font", 0x004B },
+ specials={ "font", 0x4B },
unicodeslot=0x1D4DA,
visual="bf",
},
@@ -169313,7 +185553,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT CAPITAL L",
direction="l",
linebreak="al",
- specials={ "font", 0x004C },
+ specials={ "font", 0x4C },
unicodeslot=0x1D4DB,
visual="bf",
},
@@ -169322,7 +185562,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT CAPITAL M",
direction="l",
linebreak="al",
- specials={ "font", 0x004D },
+ specials={ "font", 0x4D },
unicodeslot=0x1D4DC,
visual="bf",
},
@@ -169331,7 +185571,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT CAPITAL N",
direction="l",
linebreak="al",
- specials={ "font", 0x004E },
+ specials={ "font", 0x4E },
unicodeslot=0x1D4DD,
visual="bf",
},
@@ -169340,7 +185580,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT CAPITAL O",
direction="l",
linebreak="al",
- specials={ "font", 0x004F },
+ specials={ "font", 0x4F },
unicodeslot=0x1D4DE,
visual="bf",
},
@@ -169349,7 +185589,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT CAPITAL P",
direction="l",
linebreak="al",
- specials={ "font", 0x0050 },
+ specials={ "font", 0x50 },
unicodeslot=0x1D4DF,
visual="bf",
},
@@ -169358,7 +185598,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT CAPITAL Q",
direction="l",
linebreak="al",
- specials={ "font", 0x0051 },
+ specials={ "font", 0x51 },
unicodeslot=0x1D4E0,
visual="bf",
},
@@ -169367,7 +185607,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT CAPITAL R",
direction="l",
linebreak="al",
- specials={ "font", 0x0052 },
+ specials={ "font", 0x52 },
unicodeslot=0x1D4E1,
visual="bf",
},
@@ -169376,7 +185616,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT CAPITAL S",
direction="l",
linebreak="al",
- specials={ "font", 0x0053 },
+ specials={ "font", 0x53 },
unicodeslot=0x1D4E2,
visual="bf",
},
@@ -169385,7 +185625,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT CAPITAL T",
direction="l",
linebreak="al",
- specials={ "font", 0x0054 },
+ specials={ "font", 0x54 },
unicodeslot=0x1D4E3,
visual="bf",
},
@@ -169394,7 +185634,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT CAPITAL U",
direction="l",
linebreak="al",
- specials={ "font", 0x0055 },
+ specials={ "font", 0x55 },
unicodeslot=0x1D4E4,
visual="bf",
},
@@ -169403,7 +185643,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT CAPITAL V",
direction="l",
linebreak="al",
- specials={ "font", 0x0056 },
+ specials={ "font", 0x56 },
unicodeslot=0x1D4E5,
visual="bf",
},
@@ -169412,7 +185652,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT CAPITAL W",
direction="l",
linebreak="al",
- specials={ "font", 0x0057 },
+ specials={ "font", 0x57 },
unicodeslot=0x1D4E6,
visual="bf",
},
@@ -169421,7 +185661,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT CAPITAL X",
direction="l",
linebreak="al",
- specials={ "font", 0x0058 },
+ specials={ "font", 0x58 },
unicodeslot=0x1D4E7,
visual="bf",
},
@@ -169430,7 +185670,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT CAPITAL Y",
direction="l",
linebreak="al",
- specials={ "font", 0x0059 },
+ specials={ "font", 0x59 },
unicodeslot=0x1D4E8,
visual="bf",
},
@@ -169439,7 +185679,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT CAPITAL Z",
direction="l",
linebreak="al",
- specials={ "font", 0x005A },
+ specials={ "font", 0x5A },
unicodeslot=0x1D4E9,
visual="bf",
},
@@ -169448,7 +185688,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT SMALL A",
direction="l",
linebreak="al",
- specials={ "font", 0x0061 },
+ specials={ "font", 0x61 },
unicodeslot=0x1D4EA,
visual="bf",
},
@@ -169457,7 +185697,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT SMALL B",
direction="l",
linebreak="al",
- specials={ "font", 0x0062 },
+ specials={ "font", 0x62 },
unicodeslot=0x1D4EB,
visual="bf",
},
@@ -169466,7 +185706,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT SMALL C",
direction="l",
linebreak="al",
- specials={ "font", 0x0063 },
+ specials={ "font", 0x63 },
unicodeslot=0x1D4EC,
visual="bf",
},
@@ -169475,7 +185715,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT SMALL D",
direction="l",
linebreak="al",
- specials={ "font", 0x0064 },
+ specials={ "font", 0x64 },
unicodeslot=0x1D4ED,
visual="bf",
},
@@ -169484,7 +185724,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT SMALL E",
direction="l",
linebreak="al",
- specials={ "font", 0x0065 },
+ specials={ "font", 0x65 },
unicodeslot=0x1D4EE,
visual="bf",
},
@@ -169493,7 +185733,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT SMALL F",
direction="l",
linebreak="al",
- specials={ "font", 0x0066 },
+ specials={ "font", 0x66 },
unicodeslot=0x1D4EF,
visual="bf",
},
@@ -169502,7 +185742,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT SMALL G",
direction="l",
linebreak="al",
- specials={ "font", 0x0067 },
+ specials={ "font", 0x67 },
unicodeslot=0x1D4F0,
visual="bf",
},
@@ -169511,7 +185751,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT SMALL H",
direction="l",
linebreak="al",
- specials={ "font", 0x0068 },
+ specials={ "font", 0x68 },
unicodeslot=0x1D4F1,
visual="bf",
},
@@ -169520,7 +185760,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT SMALL I",
direction="l",
linebreak="al",
- specials={ "font", 0x0069 },
+ specials={ "font", 0x69 },
unicodeslot=0x1D4F2,
visual="bf",
},
@@ -169529,7 +185769,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT SMALL J",
direction="l",
linebreak="al",
- specials={ "font", 0x006A },
+ specials={ "font", 0x6A },
unicodeslot=0x1D4F3,
visual="bf",
},
@@ -169538,7 +185778,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT SMALL K",
direction="l",
linebreak="al",
- specials={ "font", 0x006B },
+ specials={ "font", 0x6B },
unicodeslot=0x1D4F4,
visual="bf",
},
@@ -169547,7 +185787,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT SMALL L",
direction="l",
linebreak="al",
- specials={ "font", 0x006C },
+ specials={ "font", 0x6C },
unicodeslot=0x1D4F5,
visual="bf",
},
@@ -169556,7 +185796,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT SMALL M",
direction="l",
linebreak="al",
- specials={ "font", 0x006D },
+ specials={ "font", 0x6D },
unicodeslot=0x1D4F6,
visual="bf",
},
@@ -169565,7 +185805,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT SMALL N",
direction="l",
linebreak="al",
- specials={ "font", 0x006E },
+ specials={ "font", 0x6E },
unicodeslot=0x1D4F7,
visual="bf",
},
@@ -169574,7 +185814,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT SMALL O",
direction="l",
linebreak="al",
- specials={ "font", 0x006F },
+ specials={ "font", 0x6F },
unicodeslot=0x1D4F8,
visual="bf",
},
@@ -169583,7 +185823,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT SMALL P",
direction="l",
linebreak="al",
- specials={ "font", 0x0070 },
+ specials={ "font", 0x70 },
unicodeslot=0x1D4F9,
visual="bf",
},
@@ -169592,7 +185832,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT SMALL Q",
direction="l",
linebreak="al",
- specials={ "font", 0x0071 },
+ specials={ "font", 0x71 },
unicodeslot=0x1D4FA,
visual="bf",
},
@@ -169601,7 +185841,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT SMALL R",
direction="l",
linebreak="al",
- specials={ "font", 0x0072 },
+ specials={ "font", 0x72 },
unicodeslot=0x1D4FB,
visual="bf",
},
@@ -169610,7 +185850,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT SMALL S",
direction="l",
linebreak="al",
- specials={ "font", 0x0073 },
+ specials={ "font", 0x73 },
unicodeslot=0x1D4FC,
visual="bf",
},
@@ -169619,7 +185859,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT SMALL T",
direction="l",
linebreak="al",
- specials={ "font", 0x0074 },
+ specials={ "font", 0x74 },
unicodeslot=0x1D4FD,
visual="bf",
},
@@ -169628,7 +185868,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT SMALL U",
direction="l",
linebreak="al",
- specials={ "font", 0x0075 },
+ specials={ "font", 0x75 },
unicodeslot=0x1D4FE,
visual="bf",
},
@@ -169637,7 +185877,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT SMALL V",
direction="l",
linebreak="al",
- specials={ "font", 0x0076 },
+ specials={ "font", 0x76 },
unicodeslot=0x1D4FF,
visual="bf",
},
@@ -169646,7 +185886,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT SMALL W",
direction="l",
linebreak="al",
- specials={ "font", 0x0077 },
+ specials={ "font", 0x77 },
unicodeslot=0x1D500,
visual="bf",
},
@@ -169655,7 +185895,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT SMALL X",
direction="l",
linebreak="al",
- specials={ "font", 0x0078 },
+ specials={ "font", 0x78 },
unicodeslot=0x1D501,
visual="bf",
},
@@ -169664,7 +185904,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT SMALL Y",
direction="l",
linebreak="al",
- specials={ "font", 0x0079 },
+ specials={ "font", 0x79 },
unicodeslot=0x1D502,
visual="bf",
},
@@ -169673,7 +185913,7 @@ characters.data={
description="MATHEMATICAL BOLD SCRIPT SMALL Z",
direction="l",
linebreak="al",
- specials={ "font", 0x007A },
+ specials={ "font", 0x7A },
unicodeslot=0x1D503,
visual="bf",
},
@@ -169682,7 +185922,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR CAPITAL A",
direction="l",
linebreak="al",
- specials={ "font", 0x0041 },
+ specials={ "font", 0x41 },
unicodeslot=0x1D504,
},
[0x1D505]={
@@ -169690,7 +185930,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR CAPITAL B",
direction="l",
linebreak="al",
- specials={ "font", 0x0042 },
+ specials={ "font", 0x42 },
unicodeslot=0x1D505,
},
[0x1D507]={
@@ -169698,7 +185938,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR CAPITAL D",
direction="l",
linebreak="al",
- specials={ "font", 0x0044 },
+ specials={ "font", 0x44 },
unicodeslot=0x1D507,
},
[0x1D508]={
@@ -169706,7 +185946,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR CAPITAL E",
direction="l",
linebreak="al",
- specials={ "font", 0x0045 },
+ specials={ "font", 0x45 },
unicodeslot=0x1D508,
},
[0x1D509]={
@@ -169714,7 +185954,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR CAPITAL F",
direction="l",
linebreak="al",
- specials={ "font", 0x0046 },
+ specials={ "font", 0x46 },
unicodeslot=0x1D509,
},
[0x1D50A]={
@@ -169722,7 +185962,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR CAPITAL G",
direction="l",
linebreak="al",
- specials={ "font", 0x0047 },
+ specials={ "font", 0x47 },
unicodeslot=0x1D50A,
},
[0x1D50D]={
@@ -169730,7 +185970,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR CAPITAL J",
direction="l",
linebreak="al",
- specials={ "font", 0x004A },
+ specials={ "font", 0x4A },
unicodeslot=0x1D50D,
},
[0x1D50E]={
@@ -169738,7 +185978,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR CAPITAL K",
direction="l",
linebreak="al",
- specials={ "font", 0x004B },
+ specials={ "font", 0x4B },
unicodeslot=0x1D50E,
},
[0x1D50F]={
@@ -169746,7 +185986,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR CAPITAL L",
direction="l",
linebreak="al",
- specials={ "font", 0x004C },
+ specials={ "font", 0x4C },
unicodeslot=0x1D50F,
},
[0x1D510]={
@@ -169754,7 +185994,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR CAPITAL M",
direction="l",
linebreak="al",
- specials={ "font", 0x004D },
+ specials={ "font", 0x4D },
unicodeslot=0x1D510,
},
[0x1D511]={
@@ -169762,7 +186002,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR CAPITAL N",
direction="l",
linebreak="al",
- specials={ "font", 0x004E },
+ specials={ "font", 0x4E },
unicodeslot=0x1D511,
},
[0x1D512]={
@@ -169770,7 +186010,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR CAPITAL O",
direction="l",
linebreak="al",
- specials={ "font", 0x004F },
+ specials={ "font", 0x4F },
unicodeslot=0x1D512,
},
[0x1D513]={
@@ -169778,7 +186018,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR CAPITAL P",
direction="l",
linebreak="al",
- specials={ "font", 0x0050 },
+ specials={ "font", 0x50 },
unicodeslot=0x1D513,
},
[0x1D514]={
@@ -169786,7 +186026,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR CAPITAL Q",
direction="l",
linebreak="al",
- specials={ "font", 0x0051 },
+ specials={ "font", 0x51 },
unicodeslot=0x1D514,
},
[0x1D516]={
@@ -169794,7 +186034,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR CAPITAL S",
direction="l",
linebreak="al",
- specials={ "font", 0x0053 },
+ specials={ "font", 0x53 },
unicodeslot=0x1D516,
},
[0x1D517]={
@@ -169802,7 +186042,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR CAPITAL T",
direction="l",
linebreak="al",
- specials={ "font", 0x0054 },
+ specials={ "font", 0x54 },
unicodeslot=0x1D517,
},
[0x1D518]={
@@ -169810,7 +186050,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR CAPITAL U",
direction="l",
linebreak="al",
- specials={ "font", 0x0055 },
+ specials={ "font", 0x55 },
unicodeslot=0x1D518,
},
[0x1D519]={
@@ -169818,7 +186058,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR CAPITAL V",
direction="l",
linebreak="al",
- specials={ "font", 0x0056 },
+ specials={ "font", 0x56 },
unicodeslot=0x1D519,
},
[0x1D51A]={
@@ -169826,7 +186066,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR CAPITAL W",
direction="l",
linebreak="al",
- specials={ "font", 0x0057 },
+ specials={ "font", 0x57 },
unicodeslot=0x1D51A,
},
[0x1D51B]={
@@ -169834,7 +186074,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR CAPITAL X",
direction="l",
linebreak="al",
- specials={ "font", 0x0058 },
+ specials={ "font", 0x58 },
unicodeslot=0x1D51B,
},
[0x1D51C]={
@@ -169842,7 +186082,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR CAPITAL Y",
direction="l",
linebreak="al",
- specials={ "font", 0x0059 },
+ specials={ "font", 0x59 },
unicodeslot=0x1D51C,
},
[0x1D51E]={
@@ -169850,7 +186090,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR SMALL A",
direction="l",
linebreak="al",
- specials={ "font", 0x0061 },
+ specials={ "font", 0x61 },
unicodeslot=0x1D51E,
},
[0x1D51F]={
@@ -169858,7 +186098,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR SMALL B",
direction="l",
linebreak="al",
- specials={ "font", 0x0062 },
+ specials={ "font", 0x62 },
unicodeslot=0x1D51F,
},
[0x1D520]={
@@ -169866,7 +186106,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR SMALL C",
direction="l",
linebreak="al",
- specials={ "font", 0x0063 },
+ specials={ "font", 0x63 },
unicodeslot=0x1D520,
},
[0x1D521]={
@@ -169874,7 +186114,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR SMALL D",
direction="l",
linebreak="al",
- specials={ "font", 0x0064 },
+ specials={ "font", 0x64 },
unicodeslot=0x1D521,
},
[0x1D522]={
@@ -169882,7 +186122,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR SMALL E",
direction="l",
linebreak="al",
- specials={ "font", 0x0065 },
+ specials={ "font", 0x65 },
unicodeslot=0x1D522,
},
[0x1D523]={
@@ -169890,7 +186130,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR SMALL F",
direction="l",
linebreak="al",
- specials={ "font", 0x0066 },
+ specials={ "font", 0x66 },
unicodeslot=0x1D523,
},
[0x1D524]={
@@ -169898,7 +186138,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR SMALL G",
direction="l",
linebreak="al",
- specials={ "font", 0x0067 },
+ specials={ "font", 0x67 },
unicodeslot=0x1D524,
},
[0x1D525]={
@@ -169906,7 +186146,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR SMALL H",
direction="l",
linebreak="al",
- specials={ "font", 0x0068 },
+ specials={ "font", 0x68 },
unicodeslot=0x1D525,
},
[0x1D526]={
@@ -169914,7 +186154,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR SMALL I",
direction="l",
linebreak="al",
- specials={ "font", 0x0069 },
+ specials={ "font", 0x69 },
unicodeslot=0x1D526,
},
[0x1D527]={
@@ -169922,7 +186162,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR SMALL J",
direction="l",
linebreak="al",
- specials={ "font", 0x006A },
+ specials={ "font", 0x6A },
unicodeslot=0x1D527,
},
[0x1D528]={
@@ -169930,7 +186170,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR SMALL K",
direction="l",
linebreak="al",
- specials={ "font", 0x006B },
+ specials={ "font", 0x6B },
unicodeslot=0x1D528,
},
[0x1D529]={
@@ -169938,7 +186178,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR SMALL L",
direction="l",
linebreak="al",
- specials={ "font", 0x006C },
+ specials={ "font", 0x6C },
unicodeslot=0x1D529,
},
[0x1D52A]={
@@ -169946,7 +186186,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR SMALL M",
direction="l",
linebreak="al",
- specials={ "font", 0x006D },
+ specials={ "font", 0x6D },
unicodeslot=0x1D52A,
},
[0x1D52B]={
@@ -169954,7 +186194,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR SMALL N",
direction="l",
linebreak="al",
- specials={ "font", 0x006E },
+ specials={ "font", 0x6E },
unicodeslot=0x1D52B,
},
[0x1D52C]={
@@ -169962,7 +186202,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR SMALL O",
direction="l",
linebreak="al",
- specials={ "font", 0x006F },
+ specials={ "font", 0x6F },
unicodeslot=0x1D52C,
},
[0x1D52D]={
@@ -169970,7 +186210,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR SMALL P",
direction="l",
linebreak="al",
- specials={ "font", 0x0070 },
+ specials={ "font", 0x70 },
unicodeslot=0x1D52D,
},
[0x1D52E]={
@@ -169978,7 +186218,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR SMALL Q",
direction="l",
linebreak="al",
- specials={ "font", 0x0071 },
+ specials={ "font", 0x71 },
unicodeslot=0x1D52E,
},
[0x1D52F]={
@@ -169986,7 +186226,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR SMALL R",
direction="l",
linebreak="al",
- specials={ "font", 0x0072 },
+ specials={ "font", 0x72 },
unicodeslot=0x1D52F,
},
[0x1D530]={
@@ -169994,7 +186234,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR SMALL S",
direction="l",
linebreak="al",
- specials={ "font", 0x0073 },
+ specials={ "font", 0x73 },
unicodeslot=0x1D530,
},
[0x1D531]={
@@ -170002,7 +186242,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR SMALL T",
direction="l",
linebreak="al",
- specials={ "font", 0x0074 },
+ specials={ "font", 0x74 },
unicodeslot=0x1D531,
},
[0x1D532]={
@@ -170010,7 +186250,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR SMALL U",
direction="l",
linebreak="al",
- specials={ "font", 0x0075 },
+ specials={ "font", 0x75 },
unicodeslot=0x1D532,
},
[0x1D533]={
@@ -170018,7 +186258,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR SMALL V",
direction="l",
linebreak="al",
- specials={ "font", 0x0076 },
+ specials={ "font", 0x76 },
unicodeslot=0x1D533,
},
[0x1D534]={
@@ -170026,7 +186266,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR SMALL W",
direction="l",
linebreak="al",
- specials={ "font", 0x0077 },
+ specials={ "font", 0x77 },
unicodeslot=0x1D534,
},
[0x1D535]={
@@ -170034,7 +186274,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR SMALL X",
direction="l",
linebreak="al",
- specials={ "font", 0x0078 },
+ specials={ "font", 0x78 },
unicodeslot=0x1D535,
},
[0x1D536]={
@@ -170042,7 +186282,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR SMALL Y",
direction="l",
linebreak="al",
- specials={ "font", 0x0079 },
+ specials={ "font", 0x79 },
unicodeslot=0x1D536,
},
[0x1D537]={
@@ -170050,7 +186290,7 @@ characters.data={
description="MATHEMATICAL FRAKTUR SMALL Z",
direction="l",
linebreak="al",
- specials={ "font", 0x007A },
+ specials={ "font", 0x7A },
unicodeslot=0x1D537,
},
[0x1D538]={
@@ -170058,7 +186298,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK CAPITAL A",
direction="l",
linebreak="al",
- specials={ "font", 0x0041 },
+ specials={ "font", 0x41 },
unicodeslot=0x1D538,
},
[0x1D539]={
@@ -170066,7 +186306,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK CAPITAL B",
direction="l",
linebreak="al",
- specials={ "font", 0x0042 },
+ specials={ "font", 0x42 },
unicodeslot=0x1D539,
},
[0x1D53B]={
@@ -170074,7 +186314,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK CAPITAL D",
direction="l",
linebreak="al",
- specials={ "font", 0x0044 },
+ specials={ "font", 0x44 },
unicodeslot=0x1D53B,
},
[0x1D53C]={
@@ -170082,7 +186322,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK CAPITAL E",
direction="l",
linebreak="al",
- specials={ "font", 0x0045 },
+ specials={ "font", 0x45 },
unicodeslot=0x1D53C,
},
[0x1D53D]={
@@ -170090,7 +186330,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK CAPITAL F",
direction="l",
linebreak="al",
- specials={ "font", 0x0046 },
+ specials={ "font", 0x46 },
unicodeslot=0x1D53D,
},
[0x1D53E]={
@@ -170098,7 +186338,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK CAPITAL G",
direction="l",
linebreak="al",
- specials={ "font", 0x0047 },
+ specials={ "font", 0x47 },
unicodeslot=0x1D53E,
},
[0x1D540]={
@@ -170106,7 +186346,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK CAPITAL I",
direction="l",
linebreak="al",
- specials={ "font", 0x0049 },
+ specials={ "font", 0x49 },
unicodeslot=0x1D540,
},
[0x1D541]={
@@ -170114,7 +186354,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK CAPITAL J",
direction="l",
linebreak="al",
- specials={ "font", 0x004A },
+ specials={ "font", 0x4A },
unicodeslot=0x1D541,
},
[0x1D542]={
@@ -170122,7 +186362,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK CAPITAL K",
direction="l",
linebreak="al",
- specials={ "font", 0x004B },
+ specials={ "font", 0x4B },
unicodeslot=0x1D542,
},
[0x1D543]={
@@ -170130,7 +186370,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK CAPITAL L",
direction="l",
linebreak="al",
- specials={ "font", 0x004C },
+ specials={ "font", 0x4C },
unicodeslot=0x1D543,
},
[0x1D544]={
@@ -170138,7 +186378,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK CAPITAL M",
direction="l",
linebreak="al",
- specials={ "font", 0x004D },
+ specials={ "font", 0x4D },
unicodeslot=0x1D544,
},
[0x1D546]={
@@ -170146,7 +186386,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK CAPITAL O",
direction="l",
linebreak="al",
- specials={ "font", 0x004F },
+ specials={ "font", 0x4F },
unicodeslot=0x1D546,
},
[0x1D54A]={
@@ -170154,7 +186394,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK CAPITAL S",
direction="l",
linebreak="al",
- specials={ "font", 0x0053 },
+ specials={ "font", 0x53 },
unicodeslot=0x1D54A,
},
[0x1D54B]={
@@ -170162,7 +186402,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK CAPITAL T",
direction="l",
linebreak="al",
- specials={ "font", 0x0054 },
+ specials={ "font", 0x54 },
unicodeslot=0x1D54B,
},
[0x1D54C]={
@@ -170170,7 +186410,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK CAPITAL U",
direction="l",
linebreak="al",
- specials={ "font", 0x0055 },
+ specials={ "font", 0x55 },
unicodeslot=0x1D54C,
},
[0x1D54D]={
@@ -170178,7 +186418,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK CAPITAL V",
direction="l",
linebreak="al",
- specials={ "font", 0x0056 },
+ specials={ "font", 0x56 },
unicodeslot=0x1D54D,
},
[0x1D54E]={
@@ -170186,7 +186426,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK CAPITAL W",
direction="l",
linebreak="al",
- specials={ "font", 0x0057 },
+ specials={ "font", 0x57 },
unicodeslot=0x1D54E,
},
[0x1D54F]={
@@ -170194,7 +186434,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK CAPITAL X",
direction="l",
linebreak="al",
- specials={ "font", 0x0058 },
+ specials={ "font", 0x58 },
unicodeslot=0x1D54F,
},
[0x1D550]={
@@ -170202,7 +186442,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK CAPITAL Y",
direction="l",
linebreak="al",
- specials={ "font", 0x0059 },
+ specials={ "font", 0x59 },
unicodeslot=0x1D550,
},
[0x1D552]={
@@ -170210,7 +186450,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK SMALL A",
direction="l",
linebreak="al",
- specials={ "font", 0x0061 },
+ specials={ "font", 0x61 },
unicodeslot=0x1D552,
},
[0x1D553]={
@@ -170218,7 +186458,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK SMALL B",
direction="l",
linebreak="al",
- specials={ "font", 0x0062 },
+ specials={ "font", 0x62 },
unicodeslot=0x1D553,
},
[0x1D554]={
@@ -170226,7 +186466,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK SMALL C",
direction="l",
linebreak="al",
- specials={ "font", 0x0063 },
+ specials={ "font", 0x63 },
unicodeslot=0x1D554,
},
[0x1D555]={
@@ -170234,7 +186474,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK SMALL D",
direction="l",
linebreak="al",
- specials={ "font", 0x0064 },
+ specials={ "font", 0x64 },
unicodeslot=0x1D555,
},
[0x1D556]={
@@ -170242,7 +186482,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK SMALL E",
direction="l",
linebreak="al",
- specials={ "font", 0x0065 },
+ specials={ "font", 0x65 },
unicodeslot=0x1D556,
},
[0x1D557]={
@@ -170250,7 +186490,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK SMALL F",
direction="l",
linebreak="al",
- specials={ "font", 0x0066 },
+ specials={ "font", 0x66 },
unicodeslot=0x1D557,
},
[0x1D558]={
@@ -170258,7 +186498,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK SMALL G",
direction="l",
linebreak="al",
- specials={ "font", 0x0067 },
+ specials={ "font", 0x67 },
unicodeslot=0x1D558,
},
[0x1D559]={
@@ -170266,7 +186506,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK SMALL H",
direction="l",
linebreak="al",
- specials={ "font", 0x0068 },
+ specials={ "font", 0x68 },
unicodeslot=0x1D559,
},
[0x1D55A]={
@@ -170274,7 +186514,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK SMALL I",
direction="l",
linebreak="al",
- specials={ "font", 0x0069 },
+ specials={ "font", 0x69 },
unicodeslot=0x1D55A,
},
[0x1D55B]={
@@ -170282,7 +186522,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK SMALL J",
direction="l",
linebreak="al",
- specials={ "font", 0x006A },
+ specials={ "font", 0x6A },
unicodeslot=0x1D55B,
},
[0x1D55C]={
@@ -170290,7 +186530,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK SMALL K",
direction="l",
linebreak="al",
- specials={ "font", 0x006B },
+ specials={ "font", 0x6B },
unicodeslot=0x1D55C,
},
[0x1D55D]={
@@ -170298,7 +186538,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK SMALL L",
direction="l",
linebreak="al",
- specials={ "font", 0x006C },
+ specials={ "font", 0x6C },
unicodeslot=0x1D55D,
},
[0x1D55E]={
@@ -170306,7 +186546,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK SMALL M",
direction="l",
linebreak="al",
- specials={ "font", 0x006D },
+ specials={ "font", 0x6D },
unicodeslot=0x1D55E,
},
[0x1D55F]={
@@ -170314,7 +186554,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK SMALL N",
direction="l",
linebreak="al",
- specials={ "font", 0x006E },
+ specials={ "font", 0x6E },
unicodeslot=0x1D55F,
},
[0x1D560]={
@@ -170322,7 +186562,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK SMALL O",
direction="l",
linebreak="al",
- specials={ "font", 0x006F },
+ specials={ "font", 0x6F },
unicodeslot=0x1D560,
},
[0x1D561]={
@@ -170330,7 +186570,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK SMALL P",
direction="l",
linebreak="al",
- specials={ "font", 0x0070 },
+ specials={ "font", 0x70 },
unicodeslot=0x1D561,
},
[0x1D562]={
@@ -170338,7 +186578,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK SMALL Q",
direction="l",
linebreak="al",
- specials={ "font", 0x0071 },
+ specials={ "font", 0x71 },
unicodeslot=0x1D562,
},
[0x1D563]={
@@ -170346,7 +186586,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK SMALL R",
direction="l",
linebreak="al",
- specials={ "font", 0x0072 },
+ specials={ "font", 0x72 },
unicodeslot=0x1D563,
},
[0x1D564]={
@@ -170354,7 +186594,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK SMALL S",
direction="l",
linebreak="al",
- specials={ "font", 0x0073 },
+ specials={ "font", 0x73 },
unicodeslot=0x1D564,
},
[0x1D565]={
@@ -170362,7 +186602,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK SMALL T",
direction="l",
linebreak="al",
- specials={ "font", 0x0074 },
+ specials={ "font", 0x74 },
unicodeslot=0x1D565,
},
[0x1D566]={
@@ -170370,7 +186610,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK SMALL U",
direction="l",
linebreak="al",
- specials={ "font", 0x0075 },
+ specials={ "font", 0x75 },
unicodeslot=0x1D566,
},
[0x1D567]={
@@ -170378,7 +186618,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK SMALL V",
direction="l",
linebreak="al",
- specials={ "font", 0x0076 },
+ specials={ "font", 0x76 },
unicodeslot=0x1D567,
},
[0x1D568]={
@@ -170386,7 +186626,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK SMALL W",
direction="l",
linebreak="al",
- specials={ "font", 0x0077 },
+ specials={ "font", 0x77 },
unicodeslot=0x1D568,
},
[0x1D569]={
@@ -170394,7 +186634,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK SMALL X",
direction="l",
linebreak="al",
- specials={ "font", 0x0078 },
+ specials={ "font", 0x78 },
unicodeslot=0x1D569,
},
[0x1D56A]={
@@ -170402,7 +186642,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK SMALL Y",
direction="l",
linebreak="al",
- specials={ "font", 0x0079 },
+ specials={ "font", 0x79 },
unicodeslot=0x1D56A,
},
[0x1D56B]={
@@ -170410,7 +186650,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK SMALL Z",
direction="l",
linebreak="al",
- specials={ "font", 0x007A },
+ specials={ "font", 0x7A },
unicodeslot=0x1D56B,
},
[0x1D56C]={
@@ -170418,7 +186658,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR CAPITAL A",
direction="l",
linebreak="al",
- specials={ "font", 0x0041 },
+ specials={ "font", 0x41 },
unicodeslot=0x1D56C,
visual="bf",
},
@@ -170427,7 +186667,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR CAPITAL B",
direction="l",
linebreak="al",
- specials={ "font", 0x0042 },
+ specials={ "font", 0x42 },
unicodeslot=0x1D56D,
visual="bf",
},
@@ -170436,7 +186676,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR CAPITAL C",
direction="l",
linebreak="al",
- specials={ "font", 0x0043 },
+ specials={ "font", 0x43 },
unicodeslot=0x1D56E,
visual="bf",
},
@@ -170445,7 +186685,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR CAPITAL D",
direction="l",
linebreak="al",
- specials={ "font", 0x0044 },
+ specials={ "font", 0x44 },
unicodeslot=0x1D56F,
visual="bf",
},
@@ -170454,7 +186694,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR CAPITAL E",
direction="l",
linebreak="al",
- specials={ "font", 0x0045 },
+ specials={ "font", 0x45 },
unicodeslot=0x1D570,
visual="bf",
},
@@ -170463,7 +186703,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR CAPITAL F",
direction="l",
linebreak="al",
- specials={ "font", 0x0046 },
+ specials={ "font", 0x46 },
unicodeslot=0x1D571,
visual="bf",
},
@@ -170472,7 +186712,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR CAPITAL G",
direction="l",
linebreak="al",
- specials={ "font", 0x0047 },
+ specials={ "font", 0x47 },
unicodeslot=0x1D572,
visual="bf",
},
@@ -170481,7 +186721,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR CAPITAL H",
direction="l",
linebreak="al",
- specials={ "font", 0x0048 },
+ specials={ "font", 0x48 },
unicodeslot=0x1D573,
visual="bf",
},
@@ -170490,7 +186730,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR CAPITAL I",
direction="l",
linebreak="al",
- specials={ "font", 0x0049 },
+ specials={ "font", 0x49 },
unicodeslot=0x1D574,
visual="bf",
},
@@ -170499,7 +186739,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR CAPITAL J",
direction="l",
linebreak="al",
- specials={ "font", 0x004A },
+ specials={ "font", 0x4A },
unicodeslot=0x1D575,
visual="bf",
},
@@ -170508,7 +186748,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR CAPITAL K",
direction="l",
linebreak="al",
- specials={ "font", 0x004B },
+ specials={ "font", 0x4B },
unicodeslot=0x1D576,
visual="bf",
},
@@ -170517,7 +186757,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR CAPITAL L",
direction="l",
linebreak="al",
- specials={ "font", 0x004C },
+ specials={ "font", 0x4C },
unicodeslot=0x1D577,
visual="bf",
},
@@ -170526,7 +186766,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR CAPITAL M",
direction="l",
linebreak="al",
- specials={ "font", 0x004D },
+ specials={ "font", 0x4D },
unicodeslot=0x1D578,
visual="bf",
},
@@ -170535,7 +186775,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR CAPITAL N",
direction="l",
linebreak="al",
- specials={ "font", 0x004E },
+ specials={ "font", 0x4E },
unicodeslot=0x1D579,
visual="bf",
},
@@ -170544,7 +186784,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR CAPITAL O",
direction="l",
linebreak="al",
- specials={ "font", 0x004F },
+ specials={ "font", 0x4F },
unicodeslot=0x1D57A,
visual="bf",
},
@@ -170553,7 +186793,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR CAPITAL P",
direction="l",
linebreak="al",
- specials={ "font", 0x0050 },
+ specials={ "font", 0x50 },
unicodeslot=0x1D57B,
visual="bf",
},
@@ -170562,7 +186802,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR CAPITAL Q",
direction="l",
linebreak="al",
- specials={ "font", 0x0051 },
+ specials={ "font", 0x51 },
unicodeslot=0x1D57C,
visual="bf",
},
@@ -170571,7 +186811,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR CAPITAL R",
direction="l",
linebreak="al",
- specials={ "font", 0x0052 },
+ specials={ "font", 0x52 },
unicodeslot=0x1D57D,
visual="bf",
},
@@ -170580,7 +186820,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR CAPITAL S",
direction="l",
linebreak="al",
- specials={ "font", 0x0053 },
+ specials={ "font", 0x53 },
unicodeslot=0x1D57E,
visual="bf",
},
@@ -170589,7 +186829,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR CAPITAL T",
direction="l",
linebreak="al",
- specials={ "font", 0x0054 },
+ specials={ "font", 0x54 },
unicodeslot=0x1D57F,
visual="bf",
},
@@ -170598,7 +186838,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR CAPITAL U",
direction="l",
linebreak="al",
- specials={ "font", 0x0055 },
+ specials={ "font", 0x55 },
unicodeslot=0x1D580,
visual="bf",
},
@@ -170607,7 +186847,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR CAPITAL V",
direction="l",
linebreak="al",
- specials={ "font", 0x0056 },
+ specials={ "font", 0x56 },
unicodeslot=0x1D581,
visual="bf",
},
@@ -170616,7 +186856,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR CAPITAL W",
direction="l",
linebreak="al",
- specials={ "font", 0x0057 },
+ specials={ "font", 0x57 },
unicodeslot=0x1D582,
visual="bf",
},
@@ -170625,7 +186865,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR CAPITAL X",
direction="l",
linebreak="al",
- specials={ "font", 0x0058 },
+ specials={ "font", 0x58 },
unicodeslot=0x1D583,
visual="bf",
},
@@ -170634,7 +186874,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR CAPITAL Y",
direction="l",
linebreak="al",
- specials={ "font", 0x0059 },
+ specials={ "font", 0x59 },
unicodeslot=0x1D584,
visual="bf",
},
@@ -170643,7 +186883,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR CAPITAL Z",
direction="l",
linebreak="al",
- specials={ "font", 0x005A },
+ specials={ "font", 0x5A },
unicodeslot=0x1D585,
visual="bf",
},
@@ -170652,7 +186892,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR SMALL A",
direction="l",
linebreak="al",
- specials={ "font", 0x0061 },
+ specials={ "font", 0x61 },
unicodeslot=0x1D586,
visual="bf",
},
@@ -170661,7 +186901,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR SMALL B",
direction="l",
linebreak="al",
- specials={ "font", 0x0062 },
+ specials={ "font", 0x62 },
unicodeslot=0x1D587,
visual="bf",
},
@@ -170670,7 +186910,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR SMALL C",
direction="l",
linebreak="al",
- specials={ "font", 0x0063 },
+ specials={ "font", 0x63 },
unicodeslot=0x1D588,
visual="bf",
},
@@ -170679,7 +186919,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR SMALL D",
direction="l",
linebreak="al",
- specials={ "font", 0x0064 },
+ specials={ "font", 0x64 },
unicodeslot=0x1D589,
visual="bf",
},
@@ -170688,7 +186928,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR SMALL E",
direction="l",
linebreak="al",
- specials={ "font", 0x0065 },
+ specials={ "font", 0x65 },
unicodeslot=0x1D58A,
visual="bf",
},
@@ -170697,7 +186937,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR SMALL F",
direction="l",
linebreak="al",
- specials={ "font", 0x0066 },
+ specials={ "font", 0x66 },
unicodeslot=0x1D58B,
visual="bf",
},
@@ -170706,7 +186946,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR SMALL G",
direction="l",
linebreak="al",
- specials={ "font", 0x0067 },
+ specials={ "font", 0x67 },
unicodeslot=0x1D58C,
visual="bf",
},
@@ -170715,7 +186955,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR SMALL H",
direction="l",
linebreak="al",
- specials={ "font", 0x0068 },
+ specials={ "font", 0x68 },
unicodeslot=0x1D58D,
visual="bf",
},
@@ -170724,7 +186964,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR SMALL I",
direction="l",
linebreak="al",
- specials={ "font", 0x0069 },
+ specials={ "font", 0x69 },
unicodeslot=0x1D58E,
visual="bf",
},
@@ -170733,7 +186973,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR SMALL J",
direction="l",
linebreak="al",
- specials={ "font", 0x006A },
+ specials={ "font", 0x6A },
unicodeslot=0x1D58F,
visual="bf",
},
@@ -170742,7 +186982,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR SMALL K",
direction="l",
linebreak="al",
- specials={ "font", 0x006B },
+ specials={ "font", 0x6B },
unicodeslot=0x1D590,
visual="bf",
},
@@ -170751,7 +186991,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR SMALL L",
direction="l",
linebreak="al",
- specials={ "font", 0x006C },
+ specials={ "font", 0x6C },
unicodeslot=0x1D591,
visual="bf",
},
@@ -170760,7 +187000,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR SMALL M",
direction="l",
linebreak="al",
- specials={ "font", 0x006D },
+ specials={ "font", 0x6D },
unicodeslot=0x1D592,
visual="bf",
},
@@ -170769,7 +187009,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR SMALL N",
direction="l",
linebreak="al",
- specials={ "font", 0x006E },
+ specials={ "font", 0x6E },
unicodeslot=0x1D593,
visual="bf",
},
@@ -170778,7 +187018,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR SMALL O",
direction="l",
linebreak="al",
- specials={ "font", 0x006F },
+ specials={ "font", 0x6F },
unicodeslot=0x1D594,
visual="bf",
},
@@ -170787,7 +187027,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR SMALL P",
direction="l",
linebreak="al",
- specials={ "font", 0x0070 },
+ specials={ "font", 0x70 },
unicodeslot=0x1D595,
visual="bf",
},
@@ -170796,7 +187036,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR SMALL Q",
direction="l",
linebreak="al",
- specials={ "font", 0x0071 },
+ specials={ "font", 0x71 },
unicodeslot=0x1D596,
visual="bf",
},
@@ -170805,7 +187045,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR SMALL R",
direction="l",
linebreak="al",
- specials={ "font", 0x0072 },
+ specials={ "font", 0x72 },
unicodeslot=0x1D597,
visual="bf",
},
@@ -170814,7 +187054,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR SMALL S",
direction="l",
linebreak="al",
- specials={ "font", 0x0073 },
+ specials={ "font", 0x73 },
unicodeslot=0x1D598,
visual="bf",
},
@@ -170823,7 +187063,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR SMALL T",
direction="l",
linebreak="al",
- specials={ "font", 0x0074 },
+ specials={ "font", 0x74 },
unicodeslot=0x1D599,
visual="bf",
},
@@ -170832,7 +187072,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR SMALL U",
direction="l",
linebreak="al",
- specials={ "font", 0x0075 },
+ specials={ "font", 0x75 },
unicodeslot=0x1D59A,
visual="bf",
},
@@ -170841,7 +187081,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR SMALL V",
direction="l",
linebreak="al",
- specials={ "font", 0x0076 },
+ specials={ "font", 0x76 },
unicodeslot=0x1D59B,
visual="bf",
},
@@ -170850,7 +187090,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR SMALL W",
direction="l",
linebreak="al",
- specials={ "font", 0x0077 },
+ specials={ "font", 0x77 },
unicodeslot=0x1D59C,
visual="bf",
},
@@ -170859,7 +187099,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR SMALL X",
direction="l",
linebreak="al",
- specials={ "font", 0x0078 },
+ specials={ "font", 0x78 },
unicodeslot=0x1D59D,
visual="bf",
},
@@ -170868,7 +187108,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR SMALL Y",
direction="l",
linebreak="al",
- specials={ "font", 0x0079 },
+ specials={ "font", 0x79 },
unicodeslot=0x1D59E,
visual="bf",
},
@@ -170877,7 +187117,7 @@ characters.data={
description="MATHEMATICAL BOLD FRAKTUR SMALL Z",
direction="l",
linebreak="al",
- specials={ "font", 0x007A },
+ specials={ "font", 0x7A },
unicodeslot=0x1D59F,
visual="bf",
},
@@ -170886,7 +187126,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF CAPITAL A",
direction="l",
linebreak="al",
- specials={ "font", 0x0041 },
+ specials={ "font", 0x41 },
unicodeslot=0x1D5A0,
},
[0x1D5A1]={
@@ -170894,7 +187134,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF CAPITAL B",
direction="l",
linebreak="al",
- specials={ "font", 0x0042 },
+ specials={ "font", 0x42 },
unicodeslot=0x1D5A1,
},
[0x1D5A2]={
@@ -170902,7 +187142,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF CAPITAL C",
direction="l",
linebreak="al",
- specials={ "font", 0x0043 },
+ specials={ "font", 0x43 },
unicodeslot=0x1D5A2,
},
[0x1D5A3]={
@@ -170910,7 +187150,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF CAPITAL D",
direction="l",
linebreak="al",
- specials={ "font", 0x0044 },
+ specials={ "font", 0x44 },
unicodeslot=0x1D5A3,
},
[0x1D5A4]={
@@ -170918,7 +187158,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF CAPITAL E",
direction="l",
linebreak="al",
- specials={ "font", 0x0045 },
+ specials={ "font", 0x45 },
unicodeslot=0x1D5A4,
},
[0x1D5A5]={
@@ -170926,7 +187166,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF CAPITAL F",
direction="l",
linebreak="al",
- specials={ "font", 0x0046 },
+ specials={ "font", 0x46 },
unicodeslot=0x1D5A5,
},
[0x1D5A6]={
@@ -170934,7 +187174,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF CAPITAL G",
direction="l",
linebreak="al",
- specials={ "font", 0x0047 },
+ specials={ "font", 0x47 },
unicodeslot=0x1D5A6,
},
[0x1D5A7]={
@@ -170942,7 +187182,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF CAPITAL H",
direction="l",
linebreak="al",
- specials={ "font", 0x0048 },
+ specials={ "font", 0x48 },
unicodeslot=0x1D5A7,
},
[0x1D5A8]={
@@ -170950,7 +187190,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF CAPITAL I",
direction="l",
linebreak="al",
- specials={ "font", 0x0049 },
+ specials={ "font", 0x49 },
unicodeslot=0x1D5A8,
},
[0x1D5A9]={
@@ -170958,7 +187198,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF CAPITAL J",
direction="l",
linebreak="al",
- specials={ "font", 0x004A },
+ specials={ "font", 0x4A },
unicodeslot=0x1D5A9,
},
[0x1D5AA]={
@@ -170966,7 +187206,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF CAPITAL K",
direction="l",
linebreak="al",
- specials={ "font", 0x004B },
+ specials={ "font", 0x4B },
unicodeslot=0x1D5AA,
},
[0x1D5AB]={
@@ -170974,7 +187214,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF CAPITAL L",
direction="l",
linebreak="al",
- specials={ "font", 0x004C },
+ specials={ "font", 0x4C },
unicodeslot=0x1D5AB,
},
[0x1D5AC]={
@@ -170982,7 +187222,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF CAPITAL M",
direction="l",
linebreak="al",
- specials={ "font", 0x004D },
+ specials={ "font", 0x4D },
unicodeslot=0x1D5AC,
},
[0x1D5AD]={
@@ -170990,7 +187230,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF CAPITAL N",
direction="l",
linebreak="al",
- specials={ "font", 0x004E },
+ specials={ "font", 0x4E },
unicodeslot=0x1D5AD,
},
[0x1D5AE]={
@@ -170998,7 +187238,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF CAPITAL O",
direction="l",
linebreak="al",
- specials={ "font", 0x004F },
+ specials={ "font", 0x4F },
unicodeslot=0x1D5AE,
},
[0x1D5AF]={
@@ -171006,7 +187246,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF CAPITAL P",
direction="l",
linebreak="al",
- specials={ "font", 0x0050 },
+ specials={ "font", 0x50 },
unicodeslot=0x1D5AF,
},
[0x1D5B0]={
@@ -171014,7 +187254,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF CAPITAL Q",
direction="l",
linebreak="al",
- specials={ "font", 0x0051 },
+ specials={ "font", 0x51 },
unicodeslot=0x1D5B0,
},
[0x1D5B1]={
@@ -171022,7 +187262,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF CAPITAL R",
direction="l",
linebreak="al",
- specials={ "font", 0x0052 },
+ specials={ "font", 0x52 },
unicodeslot=0x1D5B1,
},
[0x1D5B2]={
@@ -171030,7 +187270,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF CAPITAL S",
direction="l",
linebreak="al",
- specials={ "font", 0x0053 },
+ specials={ "font", 0x53 },
unicodeslot=0x1D5B2,
},
[0x1D5B3]={
@@ -171038,7 +187278,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF CAPITAL T",
direction="l",
linebreak="al",
- specials={ "font", 0x0054 },
+ specials={ "font", 0x54 },
unicodeslot=0x1D5B3,
},
[0x1D5B4]={
@@ -171046,7 +187286,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF CAPITAL U",
direction="l",
linebreak="al",
- specials={ "font", 0x0055 },
+ specials={ "font", 0x55 },
unicodeslot=0x1D5B4,
},
[0x1D5B5]={
@@ -171054,7 +187294,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF CAPITAL V",
direction="l",
linebreak="al",
- specials={ "font", 0x0056 },
+ specials={ "font", 0x56 },
unicodeslot=0x1D5B5,
},
[0x1D5B6]={
@@ -171062,7 +187302,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF CAPITAL W",
direction="l",
linebreak="al",
- specials={ "font", 0x0057 },
+ specials={ "font", 0x57 },
unicodeslot=0x1D5B6,
},
[0x1D5B7]={
@@ -171070,7 +187310,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF CAPITAL X",
direction="l",
linebreak="al",
- specials={ "font", 0x0058 },
+ specials={ "font", 0x58 },
unicodeslot=0x1D5B7,
},
[0x1D5B8]={
@@ -171078,7 +187318,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF CAPITAL Y",
direction="l",
linebreak="al",
- specials={ "font", 0x0059 },
+ specials={ "font", 0x59 },
unicodeslot=0x1D5B8,
},
[0x1D5B9]={
@@ -171086,7 +187326,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF CAPITAL Z",
direction="l",
linebreak="al",
- specials={ "font", 0x005A },
+ specials={ "font", 0x5A },
unicodeslot=0x1D5B9,
},
[0x1D5BA]={
@@ -171094,7 +187334,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF SMALL A",
direction="l",
linebreak="al",
- specials={ "font", 0x0061 },
+ specials={ "font", 0x61 },
unicodeslot=0x1D5BA,
},
[0x1D5BB]={
@@ -171102,7 +187342,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF SMALL B",
direction="l",
linebreak="al",
- specials={ "font", 0x0062 },
+ specials={ "font", 0x62 },
unicodeslot=0x1D5BB,
},
[0x1D5BC]={
@@ -171110,7 +187350,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF SMALL C",
direction="l",
linebreak="al",
- specials={ "font", 0x0063 },
+ specials={ "font", 0x63 },
unicodeslot=0x1D5BC,
},
[0x1D5BD]={
@@ -171118,7 +187358,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF SMALL D",
direction="l",
linebreak="al",
- specials={ "font", 0x0064 },
+ specials={ "font", 0x64 },
unicodeslot=0x1D5BD,
},
[0x1D5BE]={
@@ -171126,7 +187366,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF SMALL E",
direction="l",
linebreak="al",
- specials={ "font", 0x0065 },
+ specials={ "font", 0x65 },
unicodeslot=0x1D5BE,
},
[0x1D5BF]={
@@ -171134,7 +187374,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF SMALL F",
direction="l",
linebreak="al",
- specials={ "font", 0x0066 },
+ specials={ "font", 0x66 },
unicodeslot=0x1D5BF,
},
[0x1D5C0]={
@@ -171142,7 +187382,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF SMALL G",
direction="l",
linebreak="al",
- specials={ "font", 0x0067 },
+ specials={ "font", 0x67 },
unicodeslot=0x1D5C0,
},
[0x1D5C1]={
@@ -171150,7 +187390,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF SMALL H",
direction="l",
linebreak="al",
- specials={ "font", 0x0068 },
+ specials={ "font", 0x68 },
unicodeslot=0x1D5C1,
},
[0x1D5C2]={
@@ -171158,7 +187398,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF SMALL I",
direction="l",
linebreak="al",
- specials={ "font", 0x0069 },
+ specials={ "font", 0x69 },
unicodeslot=0x1D5C2,
},
[0x1D5C3]={
@@ -171166,7 +187406,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF SMALL J",
direction="l",
linebreak="al",
- specials={ "font", 0x006A },
+ specials={ "font", 0x6A },
unicodeslot=0x1D5C3,
},
[0x1D5C4]={
@@ -171174,7 +187414,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF SMALL K",
direction="l",
linebreak="al",
- specials={ "font", 0x006B },
+ specials={ "font", 0x6B },
unicodeslot=0x1D5C4,
},
[0x1D5C5]={
@@ -171182,7 +187422,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF SMALL L",
direction="l",
linebreak="al",
- specials={ "font", 0x006C },
+ specials={ "font", 0x6C },
unicodeslot=0x1D5C5,
},
[0x1D5C6]={
@@ -171190,7 +187430,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF SMALL M",
direction="l",
linebreak="al",
- specials={ "font", 0x006D },
+ specials={ "font", 0x6D },
unicodeslot=0x1D5C6,
},
[0x1D5C7]={
@@ -171198,7 +187438,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF SMALL N",
direction="l",
linebreak="al",
- specials={ "font", 0x006E },
+ specials={ "font", 0x6E },
unicodeslot=0x1D5C7,
},
[0x1D5C8]={
@@ -171206,7 +187446,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF SMALL O",
direction="l",
linebreak="al",
- specials={ "font", 0x006F },
+ specials={ "font", 0x6F },
unicodeslot=0x1D5C8,
},
[0x1D5C9]={
@@ -171214,7 +187454,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF SMALL P",
direction="l",
linebreak="al",
- specials={ "font", 0x0070 },
+ specials={ "font", 0x70 },
unicodeslot=0x1D5C9,
},
[0x1D5CA]={
@@ -171222,7 +187462,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF SMALL Q",
direction="l",
linebreak="al",
- specials={ "font", 0x0071 },
+ specials={ "font", 0x71 },
unicodeslot=0x1D5CA,
},
[0x1D5CB]={
@@ -171230,7 +187470,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF SMALL R",
direction="l",
linebreak="al",
- specials={ "font", 0x0072 },
+ specials={ "font", 0x72 },
unicodeslot=0x1D5CB,
},
[0x1D5CC]={
@@ -171238,7 +187478,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF SMALL S",
direction="l",
linebreak="al",
- specials={ "font", 0x0073 },
+ specials={ "font", 0x73 },
unicodeslot=0x1D5CC,
},
[0x1D5CD]={
@@ -171246,7 +187486,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF SMALL T",
direction="l",
linebreak="al",
- specials={ "font", 0x0074 },
+ specials={ "font", 0x74 },
unicodeslot=0x1D5CD,
},
[0x1D5CE]={
@@ -171254,7 +187494,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF SMALL U",
direction="l",
linebreak="al",
- specials={ "font", 0x0075 },
+ specials={ "font", 0x75 },
unicodeslot=0x1D5CE,
},
[0x1D5CF]={
@@ -171262,7 +187502,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF SMALL V",
direction="l",
linebreak="al",
- specials={ "font", 0x0076 },
+ specials={ "font", 0x76 },
unicodeslot=0x1D5CF,
},
[0x1D5D0]={
@@ -171270,7 +187510,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF SMALL W",
direction="l",
linebreak="al",
- specials={ "font", 0x0077 },
+ specials={ "font", 0x77 },
unicodeslot=0x1D5D0,
},
[0x1D5D1]={
@@ -171278,7 +187518,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF SMALL X",
direction="l",
linebreak="al",
- specials={ "font", 0x0078 },
+ specials={ "font", 0x78 },
unicodeslot=0x1D5D1,
},
[0x1D5D2]={
@@ -171286,7 +187526,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF SMALL Y",
direction="l",
linebreak="al",
- specials={ "font", 0x0079 },
+ specials={ "font", 0x79 },
unicodeslot=0x1D5D2,
},
[0x1D5D3]={
@@ -171294,7 +187534,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF SMALL Z",
direction="l",
linebreak="al",
- specials={ "font", 0x007A },
+ specials={ "font", 0x7A },
unicodeslot=0x1D5D3,
},
[0x1D5D4]={
@@ -171302,7 +187542,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL A",
direction="l",
linebreak="al",
- specials={ "font", 0x0041 },
+ specials={ "font", 0x41 },
unicodeslot=0x1D5D4,
visual="bf",
},
@@ -171311,7 +187551,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL B",
direction="l",
linebreak="al",
- specials={ "font", 0x0042 },
+ specials={ "font", 0x42 },
unicodeslot=0x1D5D5,
visual="bf",
},
@@ -171320,7 +187560,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL C",
direction="l",
linebreak="al",
- specials={ "font", 0x0043 },
+ specials={ "font", 0x43 },
unicodeslot=0x1D5D6,
visual="bf",
},
@@ -171329,7 +187569,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL D",
direction="l",
linebreak="al",
- specials={ "font", 0x0044 },
+ specials={ "font", 0x44 },
unicodeslot=0x1D5D7,
visual="bf",
},
@@ -171338,7 +187578,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL E",
direction="l",
linebreak="al",
- specials={ "font", 0x0045 },
+ specials={ "font", 0x45 },
unicodeslot=0x1D5D8,
visual="bf",
},
@@ -171347,7 +187587,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL F",
direction="l",
linebreak="al",
- specials={ "font", 0x0046 },
+ specials={ "font", 0x46 },
unicodeslot=0x1D5D9,
visual="bf",
},
@@ -171356,7 +187596,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL G",
direction="l",
linebreak="al",
- specials={ "font", 0x0047 },
+ specials={ "font", 0x47 },
unicodeslot=0x1D5DA,
visual="bf",
},
@@ -171365,7 +187605,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL H",
direction="l",
linebreak="al",
- specials={ "font", 0x0048 },
+ specials={ "font", 0x48 },
unicodeslot=0x1D5DB,
visual="bf",
},
@@ -171374,7 +187614,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL I",
direction="l",
linebreak="al",
- specials={ "font", 0x0049 },
+ specials={ "font", 0x49 },
unicodeslot=0x1D5DC,
visual="bf",
},
@@ -171383,7 +187623,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL J",
direction="l",
linebreak="al",
- specials={ "font", 0x004A },
+ specials={ "font", 0x4A },
unicodeslot=0x1D5DD,
visual="bf",
},
@@ -171392,7 +187632,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL K",
direction="l",
linebreak="al",
- specials={ "font", 0x004B },
+ specials={ "font", 0x4B },
unicodeslot=0x1D5DE,
visual="bf",
},
@@ -171401,7 +187641,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL L",
direction="l",
linebreak="al",
- specials={ "font", 0x004C },
+ specials={ "font", 0x4C },
unicodeslot=0x1D5DF,
visual="bf",
},
@@ -171410,7 +187650,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL M",
direction="l",
linebreak="al",
- specials={ "font", 0x004D },
+ specials={ "font", 0x4D },
unicodeslot=0x1D5E0,
visual="bf",
},
@@ -171419,7 +187659,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL N",
direction="l",
linebreak="al",
- specials={ "font", 0x004E },
+ specials={ "font", 0x4E },
unicodeslot=0x1D5E1,
visual="bf",
},
@@ -171428,7 +187668,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL O",
direction="l",
linebreak="al",
- specials={ "font", 0x004F },
+ specials={ "font", 0x4F },
unicodeslot=0x1D5E2,
visual="bf",
},
@@ -171437,7 +187677,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL P",
direction="l",
linebreak="al",
- specials={ "font", 0x0050 },
+ specials={ "font", 0x50 },
unicodeslot=0x1D5E3,
visual="bf",
},
@@ -171446,7 +187686,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL Q",
direction="l",
linebreak="al",
- specials={ "font", 0x0051 },
+ specials={ "font", 0x51 },
unicodeslot=0x1D5E4,
visual="bf",
},
@@ -171455,7 +187695,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL R",
direction="l",
linebreak="al",
- specials={ "font", 0x0052 },
+ specials={ "font", 0x52 },
unicodeslot=0x1D5E5,
visual="bf",
},
@@ -171464,7 +187704,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL S",
direction="l",
linebreak="al",
- specials={ "font", 0x0053 },
+ specials={ "font", 0x53 },
unicodeslot=0x1D5E6,
visual="bf",
},
@@ -171473,7 +187713,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL T",
direction="l",
linebreak="al",
- specials={ "font", 0x0054 },
+ specials={ "font", 0x54 },
unicodeslot=0x1D5E7,
visual="bf",
},
@@ -171482,7 +187722,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL U",
direction="l",
linebreak="al",
- specials={ "font", 0x0055 },
+ specials={ "font", 0x55 },
unicodeslot=0x1D5E8,
visual="bf",
},
@@ -171491,7 +187731,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL V",
direction="l",
linebreak="al",
- specials={ "font", 0x0056 },
+ specials={ "font", 0x56 },
unicodeslot=0x1D5E9,
visual="bf",
},
@@ -171500,7 +187740,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL W",
direction="l",
linebreak="al",
- specials={ "font", 0x0057 },
+ specials={ "font", 0x57 },
unicodeslot=0x1D5EA,
visual="bf",
},
@@ -171509,7 +187749,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL X",
direction="l",
linebreak="al",
- specials={ "font", 0x0058 },
+ specials={ "font", 0x58 },
unicodeslot=0x1D5EB,
visual="bf",
},
@@ -171518,7 +187758,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL Y",
direction="l",
linebreak="al",
- specials={ "font", 0x0059 },
+ specials={ "font", 0x59 },
unicodeslot=0x1D5EC,
visual="bf",
},
@@ -171527,7 +187767,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL Z",
direction="l",
linebreak="al",
- specials={ "font", 0x005A },
+ specials={ "font", 0x5A },
unicodeslot=0x1D5ED,
visual="bf",
},
@@ -171536,7 +187776,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL A",
direction="l",
linebreak="al",
- specials={ "font", 0x0061 },
+ specials={ "font", 0x61 },
unicodeslot=0x1D5EE,
visual="bf",
},
@@ -171545,7 +187785,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL B",
direction="l",
linebreak="al",
- specials={ "font", 0x0062 },
+ specials={ "font", 0x62 },
unicodeslot=0x1D5EF,
visual="bf",
},
@@ -171554,7 +187794,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL C",
direction="l",
linebreak="al",
- specials={ "font", 0x0063 },
+ specials={ "font", 0x63 },
unicodeslot=0x1D5F0,
visual="bf",
},
@@ -171563,7 +187803,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL D",
direction="l",
linebreak="al",
- specials={ "font", 0x0064 },
+ specials={ "font", 0x64 },
unicodeslot=0x1D5F1,
visual="bf",
},
@@ -171572,7 +187812,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL E",
direction="l",
linebreak="al",
- specials={ "font", 0x0065 },
+ specials={ "font", 0x65 },
unicodeslot=0x1D5F2,
visual="bf",
},
@@ -171581,7 +187821,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL F",
direction="l",
linebreak="al",
- specials={ "font", 0x0066 },
+ specials={ "font", 0x66 },
unicodeslot=0x1D5F3,
visual="bf",
},
@@ -171590,7 +187830,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL G",
direction="l",
linebreak="al",
- specials={ "font", 0x0067 },
+ specials={ "font", 0x67 },
unicodeslot=0x1D5F4,
visual="bf",
},
@@ -171599,7 +187839,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL H",
direction="l",
linebreak="al",
- specials={ "font", 0x0068 },
+ specials={ "font", 0x68 },
unicodeslot=0x1D5F5,
visual="bf",
},
@@ -171608,7 +187848,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL I",
direction="l",
linebreak="al",
- specials={ "font", 0x0069 },
+ specials={ "font", 0x69 },
unicodeslot=0x1D5F6,
visual="bf",
},
@@ -171617,7 +187857,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL J",
direction="l",
linebreak="al",
- specials={ "font", 0x006A },
+ specials={ "font", 0x6A },
unicodeslot=0x1D5F7,
visual="bf",
},
@@ -171626,7 +187866,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL K",
direction="l",
linebreak="al",
- specials={ "font", 0x006B },
+ specials={ "font", 0x6B },
unicodeslot=0x1D5F8,
visual="bf",
},
@@ -171635,7 +187875,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL L",
direction="l",
linebreak="al",
- specials={ "font", 0x006C },
+ specials={ "font", 0x6C },
unicodeslot=0x1D5F9,
visual="bf",
},
@@ -171644,7 +187884,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL M",
direction="l",
linebreak="al",
- specials={ "font", 0x006D },
+ specials={ "font", 0x6D },
unicodeslot=0x1D5FA,
visual="bf",
},
@@ -171653,7 +187893,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL N",
direction="l",
linebreak="al",
- specials={ "font", 0x006E },
+ specials={ "font", 0x6E },
unicodeslot=0x1D5FB,
visual="bf",
},
@@ -171662,7 +187902,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL O",
direction="l",
linebreak="al",
- specials={ "font", 0x006F },
+ specials={ "font", 0x6F },
unicodeslot=0x1D5FC,
visual="bf",
},
@@ -171671,7 +187911,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL P",
direction="l",
linebreak="al",
- specials={ "font", 0x0070 },
+ specials={ "font", 0x70 },
unicodeslot=0x1D5FD,
visual="bf",
},
@@ -171680,7 +187920,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL Q",
direction="l",
linebreak="al",
- specials={ "font", 0x0071 },
+ specials={ "font", 0x71 },
unicodeslot=0x1D5FE,
visual="bf",
},
@@ -171689,7 +187929,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL R",
direction="l",
linebreak="al",
- specials={ "font", 0x0072 },
+ specials={ "font", 0x72 },
unicodeslot=0x1D5FF,
visual="bf",
},
@@ -171698,7 +187938,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL S",
direction="l",
linebreak="al",
- specials={ "font", 0x0073 },
+ specials={ "font", 0x73 },
unicodeslot=0x1D600,
visual="bf",
},
@@ -171707,7 +187947,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL T",
direction="l",
linebreak="al",
- specials={ "font", 0x0074 },
+ specials={ "font", 0x74 },
unicodeslot=0x1D601,
visual="bf",
},
@@ -171716,7 +187956,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL U",
direction="l",
linebreak="al",
- specials={ "font", 0x0075 },
+ specials={ "font", 0x75 },
unicodeslot=0x1D602,
visual="bf",
},
@@ -171725,7 +187965,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL V",
direction="l",
linebreak="al",
- specials={ "font", 0x0076 },
+ specials={ "font", 0x76 },
unicodeslot=0x1D603,
visual="bf",
},
@@ -171734,7 +187974,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL W",
direction="l",
linebreak="al",
- specials={ "font", 0x0077 },
+ specials={ "font", 0x77 },
unicodeslot=0x1D604,
visual="bf",
},
@@ -171743,7 +187983,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL X",
direction="l",
linebreak="al",
- specials={ "font", 0x0078 },
+ specials={ "font", 0x78 },
unicodeslot=0x1D605,
visual="bf",
},
@@ -171752,7 +187992,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL Y",
direction="l",
linebreak="al",
- specials={ "font", 0x0079 },
+ specials={ "font", 0x79 },
unicodeslot=0x1D606,
visual="bf",
},
@@ -171761,7 +188001,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL Z",
direction="l",
linebreak="al",
- specials={ "font", 0x007A },
+ specials={ "font", 0x7A },
unicodeslot=0x1D607,
visual="bf",
},
@@ -171770,7 +188010,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC CAPITAL A",
direction="l",
linebreak="al",
- specials={ "font", 0x0041 },
+ specials={ "font", 0x41 },
unicodeslot=0x1D608,
visual="it",
},
@@ -171779,7 +188019,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC CAPITAL B",
direction="l",
linebreak="al",
- specials={ "font", 0x0042 },
+ specials={ "font", 0x42 },
unicodeslot=0x1D609,
visual="it",
},
@@ -171788,7 +188028,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC CAPITAL C",
direction="l",
linebreak="al",
- specials={ "font", 0x0043 },
+ specials={ "font", 0x43 },
unicodeslot=0x1D60A,
visual="it",
},
@@ -171797,7 +188037,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC CAPITAL D",
direction="l",
linebreak="al",
- specials={ "font", 0x0044 },
+ specials={ "font", 0x44 },
unicodeslot=0x1D60B,
visual="it",
},
@@ -171806,7 +188046,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC CAPITAL E",
direction="l",
linebreak="al",
- specials={ "font", 0x0045 },
+ specials={ "font", 0x45 },
unicodeslot=0x1D60C,
visual="it",
},
@@ -171815,7 +188055,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC CAPITAL F",
direction="l",
linebreak="al",
- specials={ "font", 0x0046 },
+ specials={ "font", 0x46 },
unicodeslot=0x1D60D,
visual="it",
},
@@ -171824,7 +188064,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC CAPITAL G",
direction="l",
linebreak="al",
- specials={ "font", 0x0047 },
+ specials={ "font", 0x47 },
unicodeslot=0x1D60E,
visual="it",
},
@@ -171833,7 +188073,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC CAPITAL H",
direction="l",
linebreak="al",
- specials={ "font", 0x0048 },
+ specials={ "font", 0x48 },
unicodeslot=0x1D60F,
visual="it",
},
@@ -171842,7 +188082,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC CAPITAL I",
direction="l",
linebreak="al",
- specials={ "font", 0x0049 },
+ specials={ "font", 0x49 },
unicodeslot=0x1D610,
visual="it",
},
@@ -171851,7 +188091,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC CAPITAL J",
direction="l",
linebreak="al",
- specials={ "font", 0x004A },
+ specials={ "font", 0x4A },
unicodeslot=0x1D611,
visual="it",
},
@@ -171860,7 +188100,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC CAPITAL K",
direction="l",
linebreak="al",
- specials={ "font", 0x004B },
+ specials={ "font", 0x4B },
unicodeslot=0x1D612,
visual="it",
},
@@ -171869,7 +188109,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC CAPITAL L",
direction="l",
linebreak="al",
- specials={ "font", 0x004C },
+ specials={ "font", 0x4C },
unicodeslot=0x1D613,
visual="it",
},
@@ -171878,7 +188118,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC CAPITAL M",
direction="l",
linebreak="al",
- specials={ "font", 0x004D },
+ specials={ "font", 0x4D },
unicodeslot=0x1D614,
visual="it",
},
@@ -171887,7 +188127,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC CAPITAL N",
direction="l",
linebreak="al",
- specials={ "font", 0x004E },
+ specials={ "font", 0x4E },
unicodeslot=0x1D615,
visual="it",
},
@@ -171896,7 +188136,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC CAPITAL O",
direction="l",
linebreak="al",
- specials={ "font", 0x004F },
+ specials={ "font", 0x4F },
unicodeslot=0x1D616,
visual="it",
},
@@ -171905,7 +188145,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC CAPITAL P",
direction="l",
linebreak="al",
- specials={ "font", 0x0050 },
+ specials={ "font", 0x50 },
unicodeslot=0x1D617,
visual="it",
},
@@ -171914,7 +188154,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC CAPITAL Q",
direction="l",
linebreak="al",
- specials={ "font", 0x0051 },
+ specials={ "font", 0x51 },
unicodeslot=0x1D618,
visual="it",
},
@@ -171923,7 +188163,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC CAPITAL R",
direction="l",
linebreak="al",
- specials={ "font", 0x0052 },
+ specials={ "font", 0x52 },
unicodeslot=0x1D619,
visual="it",
},
@@ -171932,7 +188172,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC CAPITAL S",
direction="l",
linebreak="al",
- specials={ "font", 0x0053 },
+ specials={ "font", 0x53 },
unicodeslot=0x1D61A,
visual="it",
},
@@ -171941,7 +188181,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC CAPITAL T",
direction="l",
linebreak="al",
- specials={ "font", 0x0054 },
+ specials={ "font", 0x54 },
unicodeslot=0x1D61B,
visual="it",
},
@@ -171950,7 +188190,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC CAPITAL U",
direction="l",
linebreak="al",
- specials={ "font", 0x0055 },
+ specials={ "font", 0x55 },
unicodeslot=0x1D61C,
visual="it",
},
@@ -171959,7 +188199,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC CAPITAL V",
direction="l",
linebreak="al",
- specials={ "font", 0x0056 },
+ specials={ "font", 0x56 },
unicodeslot=0x1D61D,
visual="it",
},
@@ -171968,7 +188208,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC CAPITAL W",
direction="l",
linebreak="al",
- specials={ "font", 0x0057 },
+ specials={ "font", 0x57 },
unicodeslot=0x1D61E,
visual="it",
},
@@ -171977,7 +188217,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC CAPITAL X",
direction="l",
linebreak="al",
- specials={ "font", 0x0058 },
+ specials={ "font", 0x58 },
unicodeslot=0x1D61F,
visual="it",
},
@@ -171986,7 +188226,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC CAPITAL Y",
direction="l",
linebreak="al",
- specials={ "font", 0x0059 },
+ specials={ "font", 0x59 },
unicodeslot=0x1D620,
visual="it",
},
@@ -171995,7 +188235,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC CAPITAL Z",
direction="l",
linebreak="al",
- specials={ "font", 0x005A },
+ specials={ "font", 0x5A },
unicodeslot=0x1D621,
visual="it",
},
@@ -172004,7 +188244,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC SMALL A",
direction="l",
linebreak="al",
- specials={ "font", 0x0061 },
+ specials={ "font", 0x61 },
unicodeslot=0x1D622,
visual="it",
},
@@ -172013,7 +188253,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC SMALL B",
direction="l",
linebreak="al",
- specials={ "font", 0x0062 },
+ specials={ "font", 0x62 },
unicodeslot=0x1D623,
visual="it",
},
@@ -172022,7 +188262,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC SMALL C",
direction="l",
linebreak="al",
- specials={ "font", 0x0063 },
+ specials={ "font", 0x63 },
unicodeslot=0x1D624,
visual="it",
},
@@ -172031,7 +188271,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC SMALL D",
direction="l",
linebreak="al",
- specials={ "font", 0x0064 },
+ specials={ "font", 0x64 },
unicodeslot=0x1D625,
visual="it",
},
@@ -172040,7 +188280,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC SMALL E",
direction="l",
linebreak="al",
- specials={ "font", 0x0065 },
+ specials={ "font", 0x65 },
unicodeslot=0x1D626,
visual="it",
},
@@ -172049,7 +188289,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC SMALL F",
direction="l",
linebreak="al",
- specials={ "font", 0x0066 },
+ specials={ "font", 0x66 },
unicodeslot=0x1D627,
visual="it",
},
@@ -172058,7 +188298,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC SMALL G",
direction="l",
linebreak="al",
- specials={ "font", 0x0067 },
+ specials={ "font", 0x67 },
unicodeslot=0x1D628,
visual="it",
},
@@ -172067,7 +188307,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC SMALL H",
direction="l",
linebreak="al",
- specials={ "font", 0x0068 },
+ specials={ "font", 0x68 },
unicodeslot=0x1D629,
visual="it",
},
@@ -172076,7 +188316,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC SMALL I",
direction="l",
linebreak="al",
- specials={ "font", 0x0069 },
+ specials={ "font", 0x69 },
unicodeslot=0x1D62A,
visual="it",
},
@@ -172085,7 +188325,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC SMALL J",
direction="l",
linebreak="al",
- specials={ "font", 0x006A },
+ specials={ "font", 0x6A },
unicodeslot=0x1D62B,
visual="it",
},
@@ -172094,7 +188334,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC SMALL K",
direction="l",
linebreak="al",
- specials={ "font", 0x006B },
+ specials={ "font", 0x6B },
unicodeslot=0x1D62C,
visual="it",
},
@@ -172103,7 +188343,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC SMALL L",
direction="l",
linebreak="al",
- specials={ "font", 0x006C },
+ specials={ "font", 0x6C },
unicodeslot=0x1D62D,
visual="it",
},
@@ -172112,7 +188352,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC SMALL M",
direction="l",
linebreak="al",
- specials={ "font", 0x006D },
+ specials={ "font", 0x6D },
unicodeslot=0x1D62E,
visual="it",
},
@@ -172121,7 +188361,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC SMALL N",
direction="l",
linebreak="al",
- specials={ "font", 0x006E },
+ specials={ "font", 0x6E },
unicodeslot=0x1D62F,
visual="it",
},
@@ -172130,7 +188370,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC SMALL O",
direction="l",
linebreak="al",
- specials={ "font", 0x006F },
+ specials={ "font", 0x6F },
unicodeslot=0x1D630,
visual="it",
},
@@ -172139,7 +188379,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC SMALL P",
direction="l",
linebreak="al",
- specials={ "font", 0x0070 },
+ specials={ "font", 0x70 },
unicodeslot=0x1D631,
visual="it",
},
@@ -172148,7 +188388,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC SMALL Q",
direction="l",
linebreak="al",
- specials={ "font", 0x0071 },
+ specials={ "font", 0x71 },
unicodeslot=0x1D632,
visual="it",
},
@@ -172157,7 +188397,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC SMALL R",
direction="l",
linebreak="al",
- specials={ "font", 0x0072 },
+ specials={ "font", 0x72 },
unicodeslot=0x1D633,
visual="it",
},
@@ -172166,7 +188406,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC SMALL S",
direction="l",
linebreak="al",
- specials={ "font", 0x0073 },
+ specials={ "font", 0x73 },
unicodeslot=0x1D634,
visual="it",
},
@@ -172175,7 +188415,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC SMALL T",
direction="l",
linebreak="al",
- specials={ "font", 0x0074 },
+ specials={ "font", 0x74 },
unicodeslot=0x1D635,
visual="it",
},
@@ -172184,7 +188424,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC SMALL U",
direction="l",
linebreak="al",
- specials={ "font", 0x0075 },
+ specials={ "font", 0x75 },
unicodeslot=0x1D636,
visual="it",
},
@@ -172193,7 +188433,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC SMALL V",
direction="l",
linebreak="al",
- specials={ "font", 0x0076 },
+ specials={ "font", 0x76 },
unicodeslot=0x1D637,
visual="it",
},
@@ -172202,7 +188442,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC SMALL W",
direction="l",
linebreak="al",
- specials={ "font", 0x0077 },
+ specials={ "font", 0x77 },
unicodeslot=0x1D638,
visual="it",
},
@@ -172211,7 +188451,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC SMALL X",
direction="l",
linebreak="al",
- specials={ "font", 0x0078 },
+ specials={ "font", 0x78 },
unicodeslot=0x1D639,
visual="it",
},
@@ -172220,7 +188460,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC SMALL Y",
direction="l",
linebreak="al",
- specials={ "font", 0x0079 },
+ specials={ "font", 0x79 },
unicodeslot=0x1D63A,
visual="it",
},
@@ -172229,7 +188469,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF ITALIC SMALL Z",
direction="l",
linebreak="al",
- specials={ "font", 0x007A },
+ specials={ "font", 0x7A },
unicodeslot=0x1D63B,
visual="it",
},
@@ -172238,7 +188478,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL A",
direction="l",
linebreak="al",
- specials={ "font", 0x0041 },
+ specials={ "font", 0x41 },
unicodeslot=0x1D63C,
visual="bi",
},
@@ -172247,7 +188487,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL B",
direction="l",
linebreak="al",
- specials={ "font", 0x0042 },
+ specials={ "font", 0x42 },
unicodeslot=0x1D63D,
visual="bi",
},
@@ -172256,7 +188496,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL C",
direction="l",
linebreak="al",
- specials={ "font", 0x0043 },
+ specials={ "font", 0x43 },
unicodeslot=0x1D63E,
visual="bi",
},
@@ -172265,7 +188505,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL D",
direction="l",
linebreak="al",
- specials={ "font", 0x0044 },
+ specials={ "font", 0x44 },
unicodeslot=0x1D63F,
visual="bi",
},
@@ -172274,7 +188514,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL E",
direction="l",
linebreak="al",
- specials={ "font", 0x0045 },
+ specials={ "font", 0x45 },
unicodeslot=0x1D640,
visual="bi",
},
@@ -172283,7 +188523,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL F",
direction="l",
linebreak="al",
- specials={ "font", 0x0046 },
+ specials={ "font", 0x46 },
unicodeslot=0x1D641,
visual="bi",
},
@@ -172292,7 +188532,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL G",
direction="l",
linebreak="al",
- specials={ "font", 0x0047 },
+ specials={ "font", 0x47 },
unicodeslot=0x1D642,
visual="bi",
},
@@ -172301,7 +188541,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL H",
direction="l",
linebreak="al",
- specials={ "font", 0x0048 },
+ specials={ "font", 0x48 },
unicodeslot=0x1D643,
visual="bi",
},
@@ -172310,7 +188550,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL I",
direction="l",
linebreak="al",
- specials={ "font", 0x0049 },
+ specials={ "font", 0x49 },
unicodeslot=0x1D644,
visual="bi",
},
@@ -172319,7 +188559,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL J",
direction="l",
linebreak="al",
- specials={ "font", 0x004A },
+ specials={ "font", 0x4A },
unicodeslot=0x1D645,
visual="bi",
},
@@ -172328,7 +188568,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL K",
direction="l",
linebreak="al",
- specials={ "font", 0x004B },
+ specials={ "font", 0x4B },
unicodeslot=0x1D646,
visual="bi",
},
@@ -172337,7 +188577,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL L",
direction="l",
linebreak="al",
- specials={ "font", 0x004C },
+ specials={ "font", 0x4C },
unicodeslot=0x1D647,
visual="bi",
},
@@ -172346,7 +188586,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL M",
direction="l",
linebreak="al",
- specials={ "font", 0x004D },
+ specials={ "font", 0x4D },
unicodeslot=0x1D648,
visual="bi",
},
@@ -172355,7 +188595,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL N",
direction="l",
linebreak="al",
- specials={ "font", 0x004E },
+ specials={ "font", 0x4E },
unicodeslot=0x1D649,
visual="bi",
},
@@ -172364,7 +188604,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL O",
direction="l",
linebreak="al",
- specials={ "font", 0x004F },
+ specials={ "font", 0x4F },
unicodeslot=0x1D64A,
visual="bi",
},
@@ -172373,7 +188613,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL P",
direction="l",
linebreak="al",
- specials={ "font", 0x0050 },
+ specials={ "font", 0x50 },
unicodeslot=0x1D64B,
visual="bi",
},
@@ -172382,7 +188622,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL Q",
direction="l",
linebreak="al",
- specials={ "font", 0x0051 },
+ specials={ "font", 0x51 },
unicodeslot=0x1D64C,
visual="bi",
},
@@ -172391,7 +188631,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL R",
direction="l",
linebreak="al",
- specials={ "font", 0x0052 },
+ specials={ "font", 0x52 },
unicodeslot=0x1D64D,
visual="bi",
},
@@ -172400,7 +188640,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL S",
direction="l",
linebreak="al",
- specials={ "font", 0x0053 },
+ specials={ "font", 0x53 },
unicodeslot=0x1D64E,
visual="bi",
},
@@ -172409,7 +188649,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL T",
direction="l",
linebreak="al",
- specials={ "font", 0x0054 },
+ specials={ "font", 0x54 },
unicodeslot=0x1D64F,
visual="bi",
},
@@ -172418,7 +188658,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL U",
direction="l",
linebreak="al",
- specials={ "font", 0x0055 },
+ specials={ "font", 0x55 },
unicodeslot=0x1D650,
visual="bi",
},
@@ -172427,7 +188667,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL V",
direction="l",
linebreak="al",
- specials={ "font", 0x0056 },
+ specials={ "font", 0x56 },
unicodeslot=0x1D651,
visual="bi",
},
@@ -172436,7 +188676,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL W",
direction="l",
linebreak="al",
- specials={ "font", 0x0057 },
+ specials={ "font", 0x57 },
unicodeslot=0x1D652,
visual="bi",
},
@@ -172445,7 +188685,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL X",
direction="l",
linebreak="al",
- specials={ "font", 0x0058 },
+ specials={ "font", 0x58 },
unicodeslot=0x1D653,
visual="bi",
},
@@ -172454,7 +188694,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL Y",
direction="l",
linebreak="al",
- specials={ "font", 0x0059 },
+ specials={ "font", 0x59 },
unicodeslot=0x1D654,
visual="bi",
},
@@ -172463,7 +188703,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL Z",
direction="l",
linebreak="al",
- specials={ "font", 0x005A },
+ specials={ "font", 0x5A },
unicodeslot=0x1D655,
visual="bi",
},
@@ -172472,7 +188712,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL A",
direction="l",
linebreak="al",
- specials={ "font", 0x0061 },
+ specials={ "font", 0x61 },
unicodeslot=0x1D656,
visual="bi",
},
@@ -172481,7 +188721,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL B",
direction="l",
linebreak="al",
- specials={ "font", 0x0062 },
+ specials={ "font", 0x62 },
unicodeslot=0x1D657,
visual="bi",
},
@@ -172490,7 +188730,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL C",
direction="l",
linebreak="al",
- specials={ "font", 0x0063 },
+ specials={ "font", 0x63 },
unicodeslot=0x1D658,
visual="bi",
},
@@ -172499,7 +188739,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL D",
direction="l",
linebreak="al",
- specials={ "font", 0x0064 },
+ specials={ "font", 0x64 },
unicodeslot=0x1D659,
visual="bi",
},
@@ -172508,7 +188748,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL E",
direction="l",
linebreak="al",
- specials={ "font", 0x0065 },
+ specials={ "font", 0x65 },
unicodeslot=0x1D65A,
visual="bi",
},
@@ -172517,7 +188757,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL F",
direction="l",
linebreak="al",
- specials={ "font", 0x0066 },
+ specials={ "font", 0x66 },
unicodeslot=0x1D65B,
visual="bi",
},
@@ -172526,7 +188766,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL G",
direction="l",
linebreak="al",
- specials={ "font", 0x0067 },
+ specials={ "font", 0x67 },
unicodeslot=0x1D65C,
visual="bi",
},
@@ -172535,7 +188775,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL H",
direction="l",
linebreak="al",
- specials={ "font", 0x0068 },
+ specials={ "font", 0x68 },
unicodeslot=0x1D65D,
visual="bi",
},
@@ -172544,7 +188784,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL I",
direction="l",
linebreak="al",
- specials={ "font", 0x0069 },
+ specials={ "font", 0x69 },
unicodeslot=0x1D65E,
visual="bi",
},
@@ -172553,7 +188793,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL J",
direction="l",
linebreak="al",
- specials={ "font", 0x006A },
+ specials={ "font", 0x6A },
unicodeslot=0x1D65F,
visual="bi",
},
@@ -172562,7 +188802,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL K",
direction="l",
linebreak="al",
- specials={ "font", 0x006B },
+ specials={ "font", 0x6B },
unicodeslot=0x1D660,
visual="bi",
},
@@ -172571,7 +188811,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL L",
direction="l",
linebreak="al",
- specials={ "font", 0x006C },
+ specials={ "font", 0x6C },
unicodeslot=0x1D661,
visual="bi",
},
@@ -172580,7 +188820,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL M",
direction="l",
linebreak="al",
- specials={ "font", 0x006D },
+ specials={ "font", 0x6D },
unicodeslot=0x1D662,
visual="bi",
},
@@ -172589,7 +188829,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL N",
direction="l",
linebreak="al",
- specials={ "font", 0x006E },
+ specials={ "font", 0x6E },
unicodeslot=0x1D663,
visual="bi",
},
@@ -172598,7 +188838,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL O",
direction="l",
linebreak="al",
- specials={ "font", 0x006F },
+ specials={ "font", 0x6F },
unicodeslot=0x1D664,
visual="bi",
},
@@ -172607,7 +188847,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL P",
direction="l",
linebreak="al",
- specials={ "font", 0x0070 },
+ specials={ "font", 0x70 },
unicodeslot=0x1D665,
visual="bi",
},
@@ -172616,7 +188856,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL Q",
direction="l",
linebreak="al",
- specials={ "font", 0x0071 },
+ specials={ "font", 0x71 },
unicodeslot=0x1D666,
visual="bi",
},
@@ -172625,7 +188865,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL R",
direction="l",
linebreak="al",
- specials={ "font", 0x0072 },
+ specials={ "font", 0x72 },
unicodeslot=0x1D667,
visual="bi",
},
@@ -172634,7 +188874,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL S",
direction="l",
linebreak="al",
- specials={ "font", 0x0073 },
+ specials={ "font", 0x73 },
unicodeslot=0x1D668,
visual="bi",
},
@@ -172643,7 +188883,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL T",
direction="l",
linebreak="al",
- specials={ "font", 0x0074 },
+ specials={ "font", 0x74 },
unicodeslot=0x1D669,
visual="bi",
},
@@ -172652,7 +188892,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL U",
direction="l",
linebreak="al",
- specials={ "font", 0x0075 },
+ specials={ "font", 0x75 },
unicodeslot=0x1D66A,
visual="bi",
},
@@ -172661,7 +188901,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL V",
direction="l",
linebreak="al",
- specials={ "font", 0x0076 },
+ specials={ "font", 0x76 },
unicodeslot=0x1D66B,
visual="bi",
},
@@ -172670,7 +188910,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL W",
direction="l",
linebreak="al",
- specials={ "font", 0x0077 },
+ specials={ "font", 0x77 },
unicodeslot=0x1D66C,
visual="bi",
},
@@ -172679,7 +188919,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL X",
direction="l",
linebreak="al",
- specials={ "font", 0x0078 },
+ specials={ "font", 0x78 },
unicodeslot=0x1D66D,
visual="bi",
},
@@ -172688,7 +188928,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL Y",
direction="l",
linebreak="al",
- specials={ "font", 0x0079 },
+ specials={ "font", 0x79 },
unicodeslot=0x1D66E,
visual="bi",
},
@@ -172697,7 +188937,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL Z",
direction="l",
linebreak="al",
- specials={ "font", 0x007A },
+ specials={ "font", 0x7A },
unicodeslot=0x1D66F,
visual="bi",
},
@@ -172706,7 +188946,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE CAPITAL A",
direction="l",
linebreak="al",
- specials={ "font", 0x0041 },
+ specials={ "font", 0x41 },
unicodeslot=0x1D670,
},
[0x1D671]={
@@ -172714,7 +188954,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE CAPITAL B",
direction="l",
linebreak="al",
- specials={ "font", 0x0042 },
+ specials={ "font", 0x42 },
unicodeslot=0x1D671,
},
[0x1D672]={
@@ -172722,7 +188962,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE CAPITAL C",
direction="l",
linebreak="al",
- specials={ "font", 0x0043 },
+ specials={ "font", 0x43 },
unicodeslot=0x1D672,
},
[0x1D673]={
@@ -172730,7 +188970,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE CAPITAL D",
direction="l",
linebreak="al",
- specials={ "font", 0x0044 },
+ specials={ "font", 0x44 },
unicodeslot=0x1D673,
},
[0x1D674]={
@@ -172738,7 +188978,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE CAPITAL E",
direction="l",
linebreak="al",
- specials={ "font", 0x0045 },
+ specials={ "font", 0x45 },
unicodeslot=0x1D674,
},
[0x1D675]={
@@ -172746,7 +188986,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE CAPITAL F",
direction="l",
linebreak="al",
- specials={ "font", 0x0046 },
+ specials={ "font", 0x46 },
unicodeslot=0x1D675,
},
[0x1D676]={
@@ -172754,7 +188994,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE CAPITAL G",
direction="l",
linebreak="al",
- specials={ "font", 0x0047 },
+ specials={ "font", 0x47 },
unicodeslot=0x1D676,
},
[0x1D677]={
@@ -172762,7 +189002,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE CAPITAL H",
direction="l",
linebreak="al",
- specials={ "font", 0x0048 },
+ specials={ "font", 0x48 },
unicodeslot=0x1D677,
},
[0x1D678]={
@@ -172770,7 +189010,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE CAPITAL I",
direction="l",
linebreak="al",
- specials={ "font", 0x0049 },
+ specials={ "font", 0x49 },
unicodeslot=0x1D678,
},
[0x1D679]={
@@ -172778,7 +189018,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE CAPITAL J",
direction="l",
linebreak="al",
- specials={ "font", 0x004A },
+ specials={ "font", 0x4A },
unicodeslot=0x1D679,
},
[0x1D67A]={
@@ -172786,7 +189026,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE CAPITAL K",
direction="l",
linebreak="al",
- specials={ "font", 0x004B },
+ specials={ "font", 0x4B },
unicodeslot=0x1D67A,
},
[0x1D67B]={
@@ -172794,7 +189034,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE CAPITAL L",
direction="l",
linebreak="al",
- specials={ "font", 0x004C },
+ specials={ "font", 0x4C },
unicodeslot=0x1D67B,
},
[0x1D67C]={
@@ -172802,7 +189042,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE CAPITAL M",
direction="l",
linebreak="al",
- specials={ "font", 0x004D },
+ specials={ "font", 0x4D },
unicodeslot=0x1D67C,
},
[0x1D67D]={
@@ -172810,7 +189050,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE CAPITAL N",
direction="l",
linebreak="al",
- specials={ "font", 0x004E },
+ specials={ "font", 0x4E },
unicodeslot=0x1D67D,
},
[0x1D67E]={
@@ -172818,7 +189058,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE CAPITAL O",
direction="l",
linebreak="al",
- specials={ "font", 0x004F },
+ specials={ "font", 0x4F },
unicodeslot=0x1D67E,
},
[0x1D67F]={
@@ -172826,7 +189066,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE CAPITAL P",
direction="l",
linebreak="al",
- specials={ "font", 0x0050 },
+ specials={ "font", 0x50 },
unicodeslot=0x1D67F,
},
[0x1D680]={
@@ -172834,7 +189074,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE CAPITAL Q",
direction="l",
linebreak="al",
- specials={ "font", 0x0051 },
+ specials={ "font", 0x51 },
unicodeslot=0x1D680,
},
[0x1D681]={
@@ -172842,7 +189082,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE CAPITAL R",
direction="l",
linebreak="al",
- specials={ "font", 0x0052 },
+ specials={ "font", 0x52 },
unicodeslot=0x1D681,
},
[0x1D682]={
@@ -172850,7 +189090,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE CAPITAL S",
direction="l",
linebreak="al",
- specials={ "font", 0x0053 },
+ specials={ "font", 0x53 },
unicodeslot=0x1D682,
},
[0x1D683]={
@@ -172858,7 +189098,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE CAPITAL T",
direction="l",
linebreak="al",
- specials={ "font", 0x0054 },
+ specials={ "font", 0x54 },
unicodeslot=0x1D683,
},
[0x1D684]={
@@ -172866,7 +189106,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE CAPITAL U",
direction="l",
linebreak="al",
- specials={ "font", 0x0055 },
+ specials={ "font", 0x55 },
unicodeslot=0x1D684,
},
[0x1D685]={
@@ -172874,7 +189114,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE CAPITAL V",
direction="l",
linebreak="al",
- specials={ "font", 0x0056 },
+ specials={ "font", 0x56 },
unicodeslot=0x1D685,
},
[0x1D686]={
@@ -172882,7 +189122,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE CAPITAL W",
direction="l",
linebreak="al",
- specials={ "font", 0x0057 },
+ specials={ "font", 0x57 },
unicodeslot=0x1D686,
},
[0x1D687]={
@@ -172890,7 +189130,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE CAPITAL X",
direction="l",
linebreak="al",
- specials={ "font", 0x0058 },
+ specials={ "font", 0x58 },
unicodeslot=0x1D687,
},
[0x1D688]={
@@ -172898,7 +189138,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE CAPITAL Y",
direction="l",
linebreak="al",
- specials={ "font", 0x0059 },
+ specials={ "font", 0x59 },
unicodeslot=0x1D688,
},
[0x1D689]={
@@ -172906,7 +189146,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE CAPITAL Z",
direction="l",
linebreak="al",
- specials={ "font", 0x005A },
+ specials={ "font", 0x5A },
unicodeslot=0x1D689,
},
[0x1D68A]={
@@ -172914,7 +189154,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE SMALL A",
direction="l",
linebreak="al",
- specials={ "font", 0x0061 },
+ specials={ "font", 0x61 },
unicodeslot=0x1D68A,
},
[0x1D68B]={
@@ -172922,7 +189162,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE SMALL B",
direction="l",
linebreak="al",
- specials={ "font", 0x0062 },
+ specials={ "font", 0x62 },
unicodeslot=0x1D68B,
},
[0x1D68C]={
@@ -172930,7 +189170,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE SMALL C",
direction="l",
linebreak="al",
- specials={ "font", 0x0063 },
+ specials={ "font", 0x63 },
unicodeslot=0x1D68C,
},
[0x1D68D]={
@@ -172938,7 +189178,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE SMALL D",
direction="l",
linebreak="al",
- specials={ "font", 0x0064 },
+ specials={ "font", 0x64 },
unicodeslot=0x1D68D,
},
[0x1D68E]={
@@ -172946,7 +189186,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE SMALL E",
direction="l",
linebreak="al",
- specials={ "font", 0x0065 },
+ specials={ "font", 0x65 },
unicodeslot=0x1D68E,
},
[0x1D68F]={
@@ -172954,7 +189194,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE SMALL F",
direction="l",
linebreak="al",
- specials={ "font", 0x0066 },
+ specials={ "font", 0x66 },
unicodeslot=0x1D68F,
},
[0x1D690]={
@@ -172962,7 +189202,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE SMALL G",
direction="l",
linebreak="al",
- specials={ "font", 0x0067 },
+ specials={ "font", 0x67 },
unicodeslot=0x1D690,
},
[0x1D691]={
@@ -172970,7 +189210,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE SMALL H",
direction="l",
linebreak="al",
- specials={ "font", 0x0068 },
+ specials={ "font", 0x68 },
unicodeslot=0x1D691,
},
[0x1D692]={
@@ -172978,7 +189218,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE SMALL I",
direction="l",
linebreak="al",
- specials={ "font", 0x0069 },
+ specials={ "font", 0x69 },
unicodeslot=0x1D692,
},
[0x1D693]={
@@ -172986,7 +189226,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE SMALL J",
direction="l",
linebreak="al",
- specials={ "font", 0x006A },
+ specials={ "font", 0x6A },
unicodeslot=0x1D693,
},
[0x1D694]={
@@ -172994,7 +189234,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE SMALL K",
direction="l",
linebreak="al",
- specials={ "font", 0x006B },
+ specials={ "font", 0x6B },
unicodeslot=0x1D694,
},
[0x1D695]={
@@ -173002,7 +189242,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE SMALL L",
direction="l",
linebreak="al",
- specials={ "font", 0x006C },
+ specials={ "font", 0x6C },
unicodeslot=0x1D695,
},
[0x1D696]={
@@ -173010,7 +189250,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE SMALL M",
direction="l",
linebreak="al",
- specials={ "font", 0x006D },
+ specials={ "font", 0x6D },
unicodeslot=0x1D696,
},
[0x1D697]={
@@ -173018,7 +189258,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE SMALL N",
direction="l",
linebreak="al",
- specials={ "font", 0x006E },
+ specials={ "font", 0x6E },
unicodeslot=0x1D697,
},
[0x1D698]={
@@ -173026,7 +189266,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE SMALL O",
direction="l",
linebreak="al",
- specials={ "font", 0x006F },
+ specials={ "font", 0x6F },
unicodeslot=0x1D698,
},
[0x1D699]={
@@ -173034,7 +189274,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE SMALL P",
direction="l",
linebreak="al",
- specials={ "font", 0x0070 },
+ specials={ "font", 0x70 },
unicodeslot=0x1D699,
},
[0x1D69A]={
@@ -173042,7 +189282,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE SMALL Q",
direction="l",
linebreak="al",
- specials={ "font", 0x0071 },
+ specials={ "font", 0x71 },
unicodeslot=0x1D69A,
},
[0x1D69B]={
@@ -173050,7 +189290,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE SMALL R",
direction="l",
linebreak="al",
- specials={ "font", 0x0072 },
+ specials={ "font", 0x72 },
unicodeslot=0x1D69B,
},
[0x1D69C]={
@@ -173058,7 +189298,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE SMALL S",
direction="l",
linebreak="al",
- specials={ "font", 0x0073 },
+ specials={ "font", 0x73 },
unicodeslot=0x1D69C,
},
[0x1D69D]={
@@ -173066,7 +189306,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE SMALL T",
direction="l",
linebreak="al",
- specials={ "font", 0x0074 },
+ specials={ "font", 0x74 },
unicodeslot=0x1D69D,
},
[0x1D69E]={
@@ -173074,7 +189314,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE SMALL U",
direction="l",
linebreak="al",
- specials={ "font", 0x0075 },
+ specials={ "font", 0x75 },
unicodeslot=0x1D69E,
},
[0x1D69F]={
@@ -173082,7 +189322,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE SMALL V",
direction="l",
linebreak="al",
- specials={ "font", 0x0076 },
+ specials={ "font", 0x76 },
unicodeslot=0x1D69F,
},
[0x1D6A0]={
@@ -173090,7 +189330,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE SMALL W",
direction="l",
linebreak="al",
- specials={ "font", 0x0077 },
+ specials={ "font", 0x77 },
unicodeslot=0x1D6A0,
},
[0x1D6A1]={
@@ -173098,7 +189338,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE SMALL X",
direction="l",
linebreak="al",
- specials={ "font", 0x0078 },
+ specials={ "font", 0x78 },
unicodeslot=0x1D6A1,
},
[0x1D6A2]={
@@ -173106,7 +189346,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE SMALL Y",
direction="l",
linebreak="al",
- specials={ "font", 0x0079 },
+ specials={ "font", 0x79 },
unicodeslot=0x1D6A2,
},
[0x1D6A3]={
@@ -173114,7 +189354,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE SMALL Z",
direction="l",
linebreak="al",
- specials={ "font", 0x007A },
+ specials={ "font", 0x7A },
unicodeslot=0x1D6A3,
},
[0x1D6A4]={
@@ -173124,7 +189364,7 @@ characters.data={
linebreak="al",
mathclass="default",
mathname="imath",
- specials={ "font", 0x0131 },
+ specials={ "font", 0x131 },
unicodeslot=0x1D6A4,
visual="it",
},
@@ -173135,7 +189375,7 @@ characters.data={
linebreak="al",
mathclass="default",
mathname="jmath",
- specials={ "font", 0x0237 },
+ specials={ "font", 0x237 },
unicodeslot=0x1D6A5,
visual="it",
},
@@ -173144,7 +189384,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL ALPHA",
direction="l",
linebreak="al",
- specials={ "font", 0x0391 },
+ specials={ "font", 0x391 },
unicodeslot=0x1D6A8,
visual="bf",
},
@@ -173153,7 +189393,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL BETA",
direction="l",
linebreak="al",
- specials={ "font", 0x0392 },
+ specials={ "font", 0x392 },
unicodeslot=0x1D6A9,
visual="bf",
},
@@ -173162,7 +189402,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL GAMMA",
direction="l",
linebreak="al",
- specials={ "font", 0x0393 },
+ specials={ "font", 0x393 },
unicodeslot=0x1D6AA,
visual="bf",
},
@@ -173171,7 +189411,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL DELTA",
direction="l",
linebreak="al",
- specials={ "font", 0x0394 },
+ specials={ "font", 0x394 },
unicodeslot=0x1D6AB,
visual="bf",
},
@@ -173180,7 +189420,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL EPSILON",
direction="l",
linebreak="al",
- specials={ "font", 0x0395 },
+ specials={ "font", 0x395 },
unicodeslot=0x1D6AC,
visual="bf",
},
@@ -173189,7 +189429,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL ZETA",
direction="l",
linebreak="al",
- specials={ "font", 0x0396 },
+ specials={ "font", 0x396 },
unicodeslot=0x1D6AD,
visual="bf",
},
@@ -173198,7 +189438,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL ETA",
direction="l",
linebreak="al",
- specials={ "font", 0x0397 },
+ specials={ "font", 0x397 },
unicodeslot=0x1D6AE,
visual="bf",
},
@@ -173207,7 +189447,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL THETA",
direction="l",
linebreak="al",
- specials={ "font", 0x0398 },
+ specials={ "font", 0x398 },
unicodeslot=0x1D6AF,
visual="bf",
},
@@ -173216,7 +189456,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL IOTA",
direction="l",
linebreak="al",
- specials={ "font", 0x0399 },
+ specials={ "font", 0x399 },
unicodeslot=0x1D6B0,
visual="bf",
},
@@ -173225,7 +189465,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL KAPPA",
direction="l",
linebreak="al",
- specials={ "font", 0x039A },
+ specials={ "font", 0x39A },
unicodeslot=0x1D6B1,
visual="bf",
},
@@ -173234,7 +189474,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL LAMDA",
direction="l",
linebreak="al",
- specials={ "font", 0x039B },
+ specials={ "font", 0x39B },
unicodeslot=0x1D6B2,
visual="bf",
},
@@ -173243,7 +189483,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL MU",
direction="l",
linebreak="al",
- specials={ "font", 0x039C },
+ specials={ "font", 0x39C },
unicodeslot=0x1D6B3,
visual="bf",
},
@@ -173252,7 +189492,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL NU",
direction="l",
linebreak="al",
- specials={ "font", 0x039D },
+ specials={ "font", 0x39D },
unicodeslot=0x1D6B4,
visual="bf",
},
@@ -173261,7 +189501,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL XI",
direction="l",
linebreak="al",
- specials={ "font", 0x039E },
+ specials={ "font", 0x39E },
unicodeslot=0x1D6B5,
visual="bf",
},
@@ -173270,7 +189510,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL OMICRON",
direction="l",
linebreak="al",
- specials={ "font", 0x039F },
+ specials={ "font", 0x39F },
unicodeslot=0x1D6B6,
visual="bf",
},
@@ -173279,7 +189519,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL PI",
direction="l",
linebreak="al",
- specials={ "font", 0x03A0 },
+ specials={ "font", 0x3A0 },
unicodeslot=0x1D6B7,
visual="bf",
},
@@ -173288,7 +189528,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL RHO",
direction="l",
linebreak="al",
- specials={ "font", 0x03A1 },
+ specials={ "font", 0x3A1 },
unicodeslot=0x1D6B8,
visual="bf",
},
@@ -173297,7 +189537,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL THETA SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03F4 },
+ specials={ "font", 0x3F4 },
unicodeslot=0x1D6B9,
visual="bf",
},
@@ -173306,7 +189546,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL SIGMA",
direction="l",
linebreak="al",
- specials={ "font", 0x03A3 },
+ specials={ "font", 0x3A3 },
unicodeslot=0x1D6BA,
visual="bf",
},
@@ -173315,7 +189555,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL TAU",
direction="l",
linebreak="al",
- specials={ "font", 0x03A4 },
+ specials={ "font", 0x3A4 },
unicodeslot=0x1D6BB,
visual="bf",
},
@@ -173324,7 +189564,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL UPSILON",
direction="l",
linebreak="al",
- specials={ "font", 0x03A5 },
+ specials={ "font", 0x3A5 },
unicodeslot=0x1D6BC,
visual="bf",
},
@@ -173333,7 +189573,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL PHI",
direction="l",
linebreak="al",
- specials={ "font", 0x03A6 },
+ specials={ "font", 0x3A6 },
unicodeslot=0x1D6BD,
visual="bf",
},
@@ -173342,7 +189582,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL CHI",
direction="l",
linebreak="al",
- specials={ "font", 0x03A7 },
+ specials={ "font", 0x3A7 },
unicodeslot=0x1D6BE,
visual="bf",
},
@@ -173351,7 +189591,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL PSI",
direction="l",
linebreak="al",
- specials={ "font", 0x03A8 },
+ specials={ "font", 0x3A8 },
unicodeslot=0x1D6BF,
visual="bf",
},
@@ -173360,7 +189600,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL OMEGA",
direction="l",
linebreak="al",
- specials={ "font", 0x03A9 },
+ specials={ "font", 0x3A9 },
unicodeslot=0x1D6C0,
visual="bf",
},
@@ -173378,7 +189618,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL ALPHA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B1 },
+ specials={ "font", 0x3B1 },
unicodeslot=0x1D6C2,
visual="bf",
},
@@ -173387,7 +189627,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL BETA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B2 },
+ specials={ "font", 0x3B2 },
unicodeslot=0x1D6C3,
visual="bf",
},
@@ -173396,7 +189636,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL GAMMA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B3 },
+ specials={ "font", 0x3B3 },
unicodeslot=0x1D6C4,
visual="bf",
},
@@ -173405,7 +189645,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL DELTA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B4 },
+ specials={ "font", 0x3B4 },
unicodeslot=0x1D6C5,
visual="bf",
},
@@ -173414,7 +189654,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL EPSILON",
direction="l",
linebreak="al",
- specials={ "font", 0x03B5 },
+ specials={ "font", 0x3B5 },
unicodeslot=0x1D6C6,
visual="bf",
},
@@ -173423,7 +189663,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL ZETA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B6 },
+ specials={ "font", 0x3B6 },
unicodeslot=0x1D6C7,
visual="bf",
},
@@ -173432,7 +189672,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL ETA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B7 },
+ specials={ "font", 0x3B7 },
unicodeslot=0x1D6C8,
visual="bf",
},
@@ -173441,7 +189681,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL THETA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B8 },
+ specials={ "font", 0x3B8 },
unicodeslot=0x1D6C9,
visual="bf",
},
@@ -173450,7 +189690,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL IOTA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B9 },
+ specials={ "font", 0x3B9 },
unicodeslot=0x1D6CA,
visual="bf",
},
@@ -173459,7 +189699,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL KAPPA",
direction="l",
linebreak="al",
- specials={ "font", 0x03BA },
+ specials={ "font", 0x3BA },
unicodeslot=0x1D6CB,
visual="bf",
},
@@ -173468,7 +189708,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL LAMDA",
direction="l",
linebreak="al",
- specials={ "font", 0x03BB },
+ specials={ "font", 0x3BB },
unicodeslot=0x1D6CC,
visual="bf",
},
@@ -173477,7 +189717,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL MU",
direction="l",
linebreak="al",
- specials={ "font", 0x03BC },
+ specials={ "font", 0x3BC },
unicodeslot=0x1D6CD,
visual="bf",
},
@@ -173486,7 +189726,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL NU",
direction="l",
linebreak="al",
- specials={ "font", 0x03BD },
+ specials={ "font", 0x3BD },
unicodeslot=0x1D6CE,
visual="bf",
},
@@ -173495,7 +189735,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL XI",
direction="l",
linebreak="al",
- specials={ "font", 0x03BE },
+ specials={ "font", 0x3BE },
unicodeslot=0x1D6CF,
visual="bf",
},
@@ -173504,7 +189744,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL OMICRON",
direction="l",
linebreak="al",
- specials={ "font", 0x03BF },
+ specials={ "font", 0x3BF },
unicodeslot=0x1D6D0,
visual="bf",
},
@@ -173513,7 +189753,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL PI",
direction="l",
linebreak="al",
- specials={ "font", 0x03C0 },
+ specials={ "font", 0x3C0 },
unicodeslot=0x1D6D1,
visual="bf",
},
@@ -173522,7 +189762,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL RHO",
direction="l",
linebreak="al",
- specials={ "font", 0x03C1 },
+ specials={ "font", 0x3C1 },
unicodeslot=0x1D6D2,
visual="bf",
},
@@ -173531,7 +189771,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL FINAL SIGMA",
direction="l",
linebreak="al",
- specials={ "font", 0x03C2 },
+ specials={ "font", 0x3C2 },
unicodeslot=0x1D6D3,
visual="bf",
},
@@ -173540,7 +189780,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL SIGMA",
direction="l",
linebreak="al",
- specials={ "font", 0x03C3 },
+ specials={ "font", 0x3C3 },
unicodeslot=0x1D6D4,
visual="bf",
},
@@ -173549,7 +189789,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL TAU",
direction="l",
linebreak="al",
- specials={ "font", 0x03C4 },
+ specials={ "font", 0x3C4 },
unicodeslot=0x1D6D5,
visual="bf",
},
@@ -173558,7 +189798,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL UPSILON",
direction="l",
linebreak="al",
- specials={ "font", 0x03C5 },
+ specials={ "font", 0x3C5 },
unicodeslot=0x1D6D6,
visual="bf",
},
@@ -173567,7 +189807,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL PHI",
direction="l",
linebreak="al",
- specials={ "font", 0x03C6 },
+ specials={ "font", 0x3C6 },
unicodeslot=0x1D6D7,
visual="bf",
},
@@ -173576,7 +189816,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL CHI",
direction="l",
linebreak="al",
- specials={ "font", 0x03C7 },
+ specials={ "font", 0x3C7 },
unicodeslot=0x1D6D8,
visual="bf",
},
@@ -173585,7 +189825,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL PSI",
direction="l",
linebreak="al",
- specials={ "font", 0x03C8 },
+ specials={ "font", 0x3C8 },
unicodeslot=0x1D6D9,
visual="bf",
},
@@ -173594,7 +189834,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL OMEGA",
direction="l",
linebreak="al",
- specials={ "font", 0x03C9 },
+ specials={ "font", 0x3C9 },
unicodeslot=0x1D6DA,
visual="bf",
},
@@ -173612,7 +189852,7 @@ characters.data={
description="MATHEMATICAL BOLD EPSILON SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03F5 },
+ specials={ "font", 0x3F5 },
unicodeslot=0x1D6DC,
visual="bf",
},
@@ -173621,7 +189861,7 @@ characters.data={
description="MATHEMATICAL BOLD THETA SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03D1 },
+ specials={ "font", 0x3D1 },
unicodeslot=0x1D6DD,
visual="bf",
},
@@ -173630,7 +189870,7 @@ characters.data={
description="MATHEMATICAL BOLD KAPPA SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03F0 },
+ specials={ "font", 0x3F0 },
unicodeslot=0x1D6DE,
visual="bf",
},
@@ -173639,7 +189879,7 @@ characters.data={
description="MATHEMATICAL BOLD PHI SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03D5 },
+ specials={ "font", 0x3D5 },
unicodeslot=0x1D6DF,
visual="bf",
},
@@ -173648,7 +189888,7 @@ characters.data={
description="MATHEMATICAL BOLD RHO SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03F1 },
+ specials={ "font", 0x3F1 },
unicodeslot=0x1D6E0,
visual="bf",
},
@@ -173657,7 +189897,7 @@ characters.data={
description="MATHEMATICAL BOLD PI SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03D6 },
+ specials={ "font", 0x3D6 },
unicodeslot=0x1D6E1,
visual="bf",
},
@@ -173666,7 +189906,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL ALPHA",
direction="l",
linebreak="al",
- specials={ "font", 0x0391 },
+ specials={ "font", 0x391 },
unicodeslot=0x1D6E2,
visual="it",
},
@@ -173675,7 +189915,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL BETA",
direction="l",
linebreak="al",
- specials={ "font", 0x0392 },
+ specials={ "font", 0x392 },
unicodeslot=0x1D6E3,
visual="it",
},
@@ -173684,7 +189924,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL GAMMA",
direction="l",
linebreak="al",
- specials={ "font", 0x0393 },
+ specials={ "font", 0x393 },
unicodeslot=0x1D6E4,
visual="it",
},
@@ -173693,7 +189933,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL DELTA",
direction="l",
linebreak="al",
- specials={ "font", 0x0394 },
+ specials={ "font", 0x394 },
unicodeslot=0x1D6E5,
visual="it",
},
@@ -173702,7 +189942,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL EPSILON",
direction="l",
linebreak="al",
- specials={ "font", 0x0395 },
+ specials={ "font", 0x395 },
unicodeslot=0x1D6E6,
visual="it",
},
@@ -173711,7 +189951,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL ZETA",
direction="l",
linebreak="al",
- specials={ "font", 0x0396 },
+ specials={ "font", 0x396 },
unicodeslot=0x1D6E7,
visual="it",
},
@@ -173720,7 +189960,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL ETA",
direction="l",
linebreak="al",
- specials={ "font", 0x0397 },
+ specials={ "font", 0x397 },
unicodeslot=0x1D6E8,
visual="it",
},
@@ -173729,7 +189969,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL THETA",
direction="l",
linebreak="al",
- specials={ "font", 0x0398 },
+ specials={ "font", 0x398 },
unicodeslot=0x1D6E9,
visual="it",
},
@@ -173738,7 +189978,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL IOTA",
direction="l",
linebreak="al",
- specials={ "font", 0x0399 },
+ specials={ "font", 0x399 },
unicodeslot=0x1D6EA,
visual="it",
},
@@ -173747,7 +189987,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL KAPPA",
direction="l",
linebreak="al",
- specials={ "font", 0x039A },
+ specials={ "font", 0x39A },
unicodeslot=0x1D6EB,
visual="it",
},
@@ -173756,7 +189996,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL LAMDA",
direction="l",
linebreak="al",
- specials={ "font", 0x039B },
+ specials={ "font", 0x39B },
unicodeslot=0x1D6EC,
visual="it",
},
@@ -173765,7 +190005,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL MU",
direction="l",
linebreak="al",
- specials={ "font", 0x039C },
+ specials={ "font", 0x39C },
unicodeslot=0x1D6ED,
visual="it",
},
@@ -173774,7 +190014,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL NU",
direction="l",
linebreak="al",
- specials={ "font", 0x039D },
+ specials={ "font", 0x39D },
unicodeslot=0x1D6EE,
visual="it",
},
@@ -173783,7 +190023,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL XI",
direction="l",
linebreak="al",
- specials={ "font", 0x039E },
+ specials={ "font", 0x39E },
unicodeslot=0x1D6EF,
visual="it",
},
@@ -173792,7 +190032,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL OMICRON",
direction="l",
linebreak="al",
- specials={ "font", 0x039F },
+ specials={ "font", 0x39F },
unicodeslot=0x1D6F0,
visual="it",
},
@@ -173801,7 +190041,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL PI",
direction="l",
linebreak="al",
- specials={ "font", 0x03A0 },
+ specials={ "font", 0x3A0 },
unicodeslot=0x1D6F1,
visual="it",
},
@@ -173810,7 +190050,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL RHO",
direction="l",
linebreak="al",
- specials={ "font", 0x03A1 },
+ specials={ "font", 0x3A1 },
unicodeslot=0x1D6F2,
visual="it",
},
@@ -173819,7 +190059,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL THETA SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03F4 },
+ specials={ "font", 0x3F4 },
unicodeslot=0x1D6F3,
visual="it",
},
@@ -173828,7 +190068,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL SIGMA",
direction="l",
linebreak="al",
- specials={ "font", 0x03A3 },
+ specials={ "font", 0x3A3 },
unicodeslot=0x1D6F4,
visual="it",
},
@@ -173837,7 +190077,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL TAU",
direction="l",
linebreak="al",
- specials={ "font", 0x03A4 },
+ specials={ "font", 0x3A4 },
unicodeslot=0x1D6F5,
visual="it",
},
@@ -173846,7 +190086,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL UPSILON",
direction="l",
linebreak="al",
- specials={ "font", 0x03A5 },
+ specials={ "font", 0x3A5 },
unicodeslot=0x1D6F6,
visual="it",
},
@@ -173855,7 +190095,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL PHI",
direction="l",
linebreak="al",
- specials={ "font", 0x03A6 },
+ specials={ "font", 0x3A6 },
unicodeslot=0x1D6F7,
visual="it",
},
@@ -173864,7 +190104,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL CHI",
direction="l",
linebreak="al",
- specials={ "font", 0x03A7 },
+ specials={ "font", 0x3A7 },
unicodeslot=0x1D6F8,
visual="it",
},
@@ -173873,7 +190113,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL PSI",
direction="l",
linebreak="al",
- specials={ "font", 0x03A8 },
+ specials={ "font", 0x3A8 },
unicodeslot=0x1D6F9,
visual="it",
},
@@ -173882,7 +190122,7 @@ characters.data={
description="MATHEMATICAL ITALIC CAPITAL OMEGA",
direction="l",
linebreak="al",
- specials={ "font", 0x03A9 },
+ specials={ "font", 0x3A9 },
unicodeslot=0x1D6FA,
visual="it",
},
@@ -173901,7 +190141,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL ALPHA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B1 },
+ specials={ "font", 0x3B1 },
unicodeslot=0x1D6FC,
visual="it",
},
@@ -173910,7 +190150,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL BETA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B2 },
+ specials={ "font", 0x3B2 },
unicodeslot=0x1D6FD,
visual="it",
},
@@ -173919,7 +190159,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL GAMMA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B3 },
+ specials={ "font", 0x3B3 },
unicodeslot=0x1D6FE,
visual="it",
},
@@ -173928,7 +190168,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL DELTA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B4 },
+ specials={ "font", 0x3B4 },
unicodeslot=0x1D6FF,
visual="it",
},
@@ -173937,7 +190177,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL EPSILON",
direction="l",
linebreak="al",
- specials={ "font", 0x03B5 },
+ specials={ "font", 0x3B5 },
unicodeslot=0x1D700,
visual="it",
},
@@ -173946,7 +190186,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL ZETA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B6 },
+ specials={ "font", 0x3B6 },
unicodeslot=0x1D701,
visual="it",
},
@@ -173955,7 +190195,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL ETA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B7 },
+ specials={ "font", 0x3B7 },
unicodeslot=0x1D702,
visual="it",
},
@@ -173964,7 +190204,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL THETA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B8 },
+ specials={ "font", 0x3B8 },
unicodeslot=0x1D703,
visual="it",
},
@@ -173973,7 +190213,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL IOTA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B9 },
+ specials={ "font", 0x3B9 },
unicodeslot=0x1D704,
visual="it",
},
@@ -173982,7 +190222,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL KAPPA",
direction="l",
linebreak="al",
- specials={ "font", 0x03BA },
+ specials={ "font", 0x3BA },
unicodeslot=0x1D705,
visual="it",
},
@@ -173991,7 +190231,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL LAMDA",
direction="l",
linebreak="al",
- specials={ "font", 0x03BB },
+ specials={ "font", 0x3BB },
unicodeslot=0x1D706,
visual="it",
},
@@ -174000,7 +190240,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL MU",
direction="l",
linebreak="al",
- specials={ "font", 0x03BC },
+ specials={ "font", 0x3BC },
unicodeslot=0x1D707,
visual="it",
},
@@ -174009,7 +190249,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL NU",
direction="l",
linebreak="al",
- specials={ "font", 0x03BD },
+ specials={ "font", 0x3BD },
unicodeslot=0x1D708,
visual="it",
},
@@ -174018,7 +190258,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL XI",
direction="l",
linebreak="al",
- specials={ "font", 0x03BE },
+ specials={ "font", 0x3BE },
unicodeslot=0x1D709,
visual="it",
},
@@ -174027,7 +190267,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL OMICRON",
direction="l",
linebreak="al",
- specials={ "font", 0x03BF },
+ specials={ "font", 0x3BF },
unicodeslot=0x1D70A,
visual="it",
},
@@ -174036,7 +190276,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL PI",
direction="l",
linebreak="al",
- specials={ "font", 0x03C0 },
+ specials={ "font", 0x3C0 },
unicodeslot=0x1D70B,
visual="it",
},
@@ -174045,7 +190285,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL RHO",
direction="l",
linebreak="al",
- specials={ "font", 0x03C1 },
+ specials={ "font", 0x3C1 },
unicodeslot=0x1D70C,
visual="it",
},
@@ -174054,7 +190294,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL FINAL SIGMA",
direction="l",
linebreak="al",
- specials={ "font", 0x03C2 },
+ specials={ "font", 0x3C2 },
unicodeslot=0x1D70D,
visual="it",
},
@@ -174063,7 +190303,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL SIGMA",
direction="l",
linebreak="al",
- specials={ "font", 0x03C3 },
+ specials={ "font", 0x3C3 },
unicodeslot=0x1D70E,
visual="it",
},
@@ -174072,7 +190312,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL TAU",
direction="l",
linebreak="al",
- specials={ "font", 0x03C4 },
+ specials={ "font", 0x3C4 },
unicodeslot=0x1D70F,
visual="it",
},
@@ -174081,7 +190321,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL UPSILON",
direction="l",
linebreak="al",
- specials={ "font", 0x03C5 },
+ specials={ "font", 0x3C5 },
unicodeslot=0x1D710,
visual="it",
},
@@ -174090,7 +190330,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL PHI",
direction="l",
linebreak="al",
- specials={ "font", 0x03C6 },
+ specials={ "font", 0x3C6 },
unicodeslot=0x1D711,
visual="it",
},
@@ -174099,7 +190339,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL CHI",
direction="l",
linebreak="al",
- specials={ "font", 0x03C7 },
+ specials={ "font", 0x3C7 },
unicodeslot=0x1D712,
visual="it",
},
@@ -174108,7 +190348,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL PSI",
direction="l",
linebreak="al",
- specials={ "font", 0x03C8 },
+ specials={ "font", 0x3C8 },
unicodeslot=0x1D713,
visual="it",
},
@@ -174117,7 +190357,7 @@ characters.data={
description="MATHEMATICAL ITALIC SMALL OMEGA",
direction="l",
linebreak="al",
- specials={ "font", 0x03C9 },
+ specials={ "font", 0x3C9 },
unicodeslot=0x1D714,
visual="it",
},
@@ -174135,7 +190375,7 @@ characters.data={
description="MATHEMATICAL ITALIC EPSILON SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03F5 },
+ specials={ "font", 0x3F5 },
unicodeslot=0x1D716,
visual="it",
},
@@ -174146,7 +190386,7 @@ characters.data={
linebreak="al",
mathclass="default",
mathname="vartheta",
- specials={ "font", 0x03D1 },
+ specials={ "font", 0x3D1 },
unicodeslot=0x1D717,
visual="it",
},
@@ -174157,7 +190397,7 @@ characters.data={
linebreak="al",
mathclass="default",
mathname="varkappa",
- specials={ "font", 0x03F0 },
+ specials={ "font", 0x3F0 },
unicodeslot=0x1D718,
visual="it",
},
@@ -174166,7 +190406,7 @@ characters.data={
description="MATHEMATICAL ITALIC PHI SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03D5 },
+ specials={ "font", 0x3D5 },
unicodeslot=0x1D719,
visual="it",
},
@@ -174177,7 +190417,7 @@ characters.data={
linebreak="al",
mathclass="variable",
mathname="varrho",
- specials={ "font", 0x03F1 },
+ specials={ "font", 0x3F1 },
unicodeslot=0x1D71A,
visual="it",
},
@@ -174186,7 +190426,7 @@ characters.data={
description="MATHEMATICAL ITALIC PI SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03D6 },
+ specials={ "font", 0x3D6 },
unicodeslot=0x1D71B,
visual="it",
},
@@ -174195,7 +190435,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL ALPHA",
direction="l",
linebreak="al",
- specials={ "font", 0x0391 },
+ specials={ "font", 0x391 },
unicodeslot=0x1D71C,
visual="bi",
},
@@ -174204,7 +190444,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL BETA",
direction="l",
linebreak="al",
- specials={ "font", 0x0392 },
+ specials={ "font", 0x392 },
unicodeslot=0x1D71D,
visual="bi",
},
@@ -174213,7 +190453,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL GAMMA",
direction="l",
linebreak="al",
- specials={ "font", 0x0393 },
+ specials={ "font", 0x393 },
unicodeslot=0x1D71E,
visual="bi",
},
@@ -174222,7 +190462,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL DELTA",
direction="l",
linebreak="al",
- specials={ "font", 0x0394 },
+ specials={ "font", 0x394 },
unicodeslot=0x1D71F,
visual="bi",
},
@@ -174231,7 +190471,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL EPSILON",
direction="l",
linebreak="al",
- specials={ "font", 0x0395 },
+ specials={ "font", 0x395 },
unicodeslot=0x1D720,
visual="bi",
},
@@ -174240,7 +190480,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL ZETA",
direction="l",
linebreak="al",
- specials={ "font", 0x0396 },
+ specials={ "font", 0x396 },
unicodeslot=0x1D721,
visual="bi",
},
@@ -174249,7 +190489,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL ETA",
direction="l",
linebreak="al",
- specials={ "font", 0x0397 },
+ specials={ "font", 0x397 },
unicodeslot=0x1D722,
visual="bi",
},
@@ -174258,7 +190498,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL THETA",
direction="l",
linebreak="al",
- specials={ "font", 0x0398 },
+ specials={ "font", 0x398 },
unicodeslot=0x1D723,
visual="bi",
},
@@ -174267,7 +190507,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL IOTA",
direction="l",
linebreak="al",
- specials={ "font", 0x0399 },
+ specials={ "font", 0x399 },
unicodeslot=0x1D724,
visual="bi",
},
@@ -174276,7 +190516,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL KAPPA",
direction="l",
linebreak="al",
- specials={ "font", 0x039A },
+ specials={ "font", 0x39A },
unicodeslot=0x1D725,
visual="bi",
},
@@ -174285,7 +190525,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL LAMDA",
direction="l",
linebreak="al",
- specials={ "font", 0x039B },
+ specials={ "font", 0x39B },
unicodeslot=0x1D726,
visual="bi",
},
@@ -174294,7 +190534,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL MU",
direction="l",
linebreak="al",
- specials={ "font", 0x039C },
+ specials={ "font", 0x39C },
unicodeslot=0x1D727,
visual="bi",
},
@@ -174303,7 +190543,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL NU",
direction="l",
linebreak="al",
- specials={ "font", 0x039D },
+ specials={ "font", 0x39D },
unicodeslot=0x1D728,
visual="bi",
},
@@ -174312,7 +190552,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL XI",
direction="l",
linebreak="al",
- specials={ "font", 0x039E },
+ specials={ "font", 0x39E },
unicodeslot=0x1D729,
visual="bi",
},
@@ -174321,7 +190561,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL OMICRON",
direction="l",
linebreak="al",
- specials={ "font", 0x039F },
+ specials={ "font", 0x39F },
unicodeslot=0x1D72A,
visual="bi",
},
@@ -174330,7 +190570,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL PI",
direction="l",
linebreak="al",
- specials={ "font", 0x03A0 },
+ specials={ "font", 0x3A0 },
unicodeslot=0x1D72B,
visual="bi",
},
@@ -174339,7 +190579,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL RHO",
direction="l",
linebreak="al",
- specials={ "font", 0x03A1 },
+ specials={ "font", 0x3A1 },
unicodeslot=0x1D72C,
visual="bi",
},
@@ -174348,7 +190588,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL THETA SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03F4 },
+ specials={ "font", 0x3F4 },
unicodeslot=0x1D72D,
visual="bi",
},
@@ -174357,7 +190597,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL SIGMA",
direction="l",
linebreak="al",
- specials={ "font", 0x03A3 },
+ specials={ "font", 0x3A3 },
unicodeslot=0x1D72E,
visual="bi",
},
@@ -174366,7 +190606,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL TAU",
direction="l",
linebreak="al",
- specials={ "font", 0x03A4 },
+ specials={ "font", 0x3A4 },
unicodeslot=0x1D72F,
visual="bi",
},
@@ -174375,7 +190615,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL UPSILON",
direction="l",
linebreak="al",
- specials={ "font", 0x03A5 },
+ specials={ "font", 0x3A5 },
unicodeslot=0x1D730,
visual="bi",
},
@@ -174384,7 +190624,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL PHI",
direction="l",
linebreak="al",
- specials={ "font", 0x03A6 },
+ specials={ "font", 0x3A6 },
unicodeslot=0x1D731,
visual="bi",
},
@@ -174393,7 +190633,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL CHI",
direction="l",
linebreak="al",
- specials={ "font", 0x03A7 },
+ specials={ "font", 0x3A7 },
unicodeslot=0x1D732,
visual="bi",
},
@@ -174402,7 +190642,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL PSI",
direction="l",
linebreak="al",
- specials={ "font", 0x03A8 },
+ specials={ "font", 0x3A8 },
unicodeslot=0x1D733,
visual="bi",
},
@@ -174411,7 +190651,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC CAPITAL OMEGA",
direction="l",
linebreak="al",
- specials={ "font", 0x03A9 },
+ specials={ "font", 0x3A9 },
unicodeslot=0x1D734,
visual="bi",
},
@@ -174429,7 +190669,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL ALPHA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B1 },
+ specials={ "font", 0x3B1 },
unicodeslot=0x1D736,
visual="bi",
},
@@ -174438,7 +190678,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL BETA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B2 },
+ specials={ "font", 0x3B2 },
unicodeslot=0x1D737,
visual="bi",
},
@@ -174447,7 +190687,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL GAMMA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B3 },
+ specials={ "font", 0x3B3 },
unicodeslot=0x1D738,
visual="bi",
},
@@ -174456,7 +190696,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL DELTA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B4 },
+ specials={ "font", 0x3B4 },
unicodeslot=0x1D739,
visual="bi",
},
@@ -174465,7 +190705,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL EPSILON",
direction="l",
linebreak="al",
- specials={ "font", 0x03B5 },
+ specials={ "font", 0x3B5 },
unicodeslot=0x1D73A,
visual="bi",
},
@@ -174474,7 +190714,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL ZETA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B6 },
+ specials={ "font", 0x3B6 },
unicodeslot=0x1D73B,
visual="bi",
},
@@ -174483,7 +190723,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL ETA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B7 },
+ specials={ "font", 0x3B7 },
unicodeslot=0x1D73C,
visual="bi",
},
@@ -174492,7 +190732,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL THETA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B8 },
+ specials={ "font", 0x3B8 },
unicodeslot=0x1D73D,
visual="bi",
},
@@ -174501,7 +190741,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL IOTA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B9 },
+ specials={ "font", 0x3B9 },
unicodeslot=0x1D73E,
visual="bi",
},
@@ -174510,7 +190750,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL KAPPA",
direction="l",
linebreak="al",
- specials={ "font", 0x03BA },
+ specials={ "font", 0x3BA },
unicodeslot=0x1D73F,
visual="bi",
},
@@ -174519,7 +190759,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL LAMDA",
direction="l",
linebreak="al",
- specials={ "font", 0x03BB },
+ specials={ "font", 0x3BB },
unicodeslot=0x1D740,
visual="bi",
},
@@ -174528,7 +190768,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL MU",
direction="l",
linebreak="al",
- specials={ "font", 0x03BC },
+ specials={ "font", 0x3BC },
unicodeslot=0x1D741,
visual="bi",
},
@@ -174537,7 +190777,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL NU",
direction="l",
linebreak="al",
- specials={ "font", 0x03BD },
+ specials={ "font", 0x3BD },
unicodeslot=0x1D742,
visual="bi",
},
@@ -174546,7 +190786,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL XI",
direction="l",
linebreak="al",
- specials={ "font", 0x03BE },
+ specials={ "font", 0x3BE },
unicodeslot=0x1D743,
visual="bi",
},
@@ -174555,7 +190795,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL OMICRON",
direction="l",
linebreak="al",
- specials={ "font", 0x03BF },
+ specials={ "font", 0x3BF },
unicodeslot=0x1D744,
visual="bi",
},
@@ -174564,7 +190804,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL PI",
direction="l",
linebreak="al",
- specials={ "font", 0x03C0 },
+ specials={ "font", 0x3C0 },
unicodeslot=0x1D745,
visual="bi",
},
@@ -174573,7 +190813,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL RHO",
direction="l",
linebreak="al",
- specials={ "font", 0x03C1 },
+ specials={ "font", 0x3C1 },
unicodeslot=0x1D746,
visual="bi",
},
@@ -174582,7 +190822,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL FINAL SIGMA",
direction="l",
linebreak="al",
- specials={ "font", 0x03C2 },
+ specials={ "font", 0x3C2 },
unicodeslot=0x1D747,
visual="bi",
},
@@ -174591,7 +190831,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL SIGMA",
direction="l",
linebreak="al",
- specials={ "font", 0x03C3 },
+ specials={ "font", 0x3C3 },
unicodeslot=0x1D748,
visual="bi",
},
@@ -174600,7 +190840,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL TAU",
direction="l",
linebreak="al",
- specials={ "font", 0x03C4 },
+ specials={ "font", 0x3C4 },
unicodeslot=0x1D749,
visual="bi",
},
@@ -174609,7 +190849,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL UPSILON",
direction="l",
linebreak="al",
- specials={ "font", 0x03C5 },
+ specials={ "font", 0x3C5 },
unicodeslot=0x1D74A,
visual="bi",
},
@@ -174618,7 +190858,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL PHI",
direction="l",
linebreak="al",
- specials={ "font", 0x03C6 },
+ specials={ "font", 0x3C6 },
unicodeslot=0x1D74B,
visual="bi",
},
@@ -174627,7 +190867,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL CHI",
direction="l",
linebreak="al",
- specials={ "font", 0x03C7 },
+ specials={ "font", 0x3C7 },
unicodeslot=0x1D74C,
visual="bi",
},
@@ -174636,7 +190876,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL PSI",
direction="l",
linebreak="al",
- specials={ "font", 0x03C8 },
+ specials={ "font", 0x3C8 },
unicodeslot=0x1D74D,
visual="bi",
},
@@ -174645,7 +190885,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC SMALL OMEGA",
direction="l",
linebreak="al",
- specials={ "font", 0x03C9 },
+ specials={ "font", 0x3C9 },
unicodeslot=0x1D74E,
visual="bi",
},
@@ -174663,7 +190903,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC EPSILON SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03F5 },
+ specials={ "font", 0x3F5 },
unicodeslot=0x1D750,
visual="bi",
},
@@ -174672,7 +190912,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC THETA SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03D1 },
+ specials={ "font", 0x3D1 },
unicodeslot=0x1D751,
visual="bi",
},
@@ -174681,7 +190921,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC KAPPA SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03F0 },
+ specials={ "font", 0x3F0 },
unicodeslot=0x1D752,
visual="bi",
},
@@ -174690,7 +190930,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC PHI SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03D5 },
+ specials={ "font", 0x3D5 },
unicodeslot=0x1D753,
visual="bi",
},
@@ -174699,7 +190939,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC RHO SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03F1 },
+ specials={ "font", 0x3F1 },
unicodeslot=0x1D754,
visual="bi",
},
@@ -174708,7 +190948,7 @@ characters.data={
description="MATHEMATICAL BOLD ITALIC PI SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03D6 },
+ specials={ "font", 0x3D6 },
unicodeslot=0x1D755,
visual="bi",
},
@@ -174717,7 +190957,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL ALPHA",
direction="l",
linebreak="al",
- specials={ "font", 0x0391 },
+ specials={ "font", 0x391 },
unicodeslot=0x1D756,
visual="bf",
},
@@ -174726,7 +190966,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL BETA",
direction="l",
linebreak="al",
- specials={ "font", 0x0392 },
+ specials={ "font", 0x392 },
unicodeslot=0x1D757,
visual="bf",
},
@@ -174735,7 +190975,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL GAMMA",
direction="l",
linebreak="al",
- specials={ "font", 0x0393 },
+ specials={ "font", 0x393 },
unicodeslot=0x1D758,
visual="bf",
},
@@ -174744,7 +190984,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL DELTA",
direction="l",
linebreak="al",
- specials={ "font", 0x0394 },
+ specials={ "font", 0x394 },
unicodeslot=0x1D759,
visual="bf",
},
@@ -174753,7 +190993,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL EPSILON",
direction="l",
linebreak="al",
- specials={ "font", 0x0395 },
+ specials={ "font", 0x395 },
unicodeslot=0x1D75A,
visual="bf",
},
@@ -174762,7 +191002,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL ZETA",
direction="l",
linebreak="al",
- specials={ "font", 0x0396 },
+ specials={ "font", 0x396 },
unicodeslot=0x1D75B,
visual="bf",
},
@@ -174771,7 +191011,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL ETA",
direction="l",
linebreak="al",
- specials={ "font", 0x0397 },
+ specials={ "font", 0x397 },
unicodeslot=0x1D75C,
visual="bf",
},
@@ -174780,7 +191020,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL THETA",
direction="l",
linebreak="al",
- specials={ "font", 0x0398 },
+ specials={ "font", 0x398 },
unicodeslot=0x1D75D,
visual="bf",
},
@@ -174789,7 +191029,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL IOTA",
direction="l",
linebreak="al",
- specials={ "font", 0x0399 },
+ specials={ "font", 0x399 },
unicodeslot=0x1D75E,
visual="bf",
},
@@ -174798,7 +191038,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL KAPPA",
direction="l",
linebreak="al",
- specials={ "font", 0x039A },
+ specials={ "font", 0x39A },
unicodeslot=0x1D75F,
visual="bf",
},
@@ -174807,7 +191047,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL LAMDA",
direction="l",
linebreak="al",
- specials={ "font", 0x039B },
+ specials={ "font", 0x39B },
unicodeslot=0x1D760,
visual="bf",
},
@@ -174816,7 +191056,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL MU",
direction="l",
linebreak="al",
- specials={ "font", 0x039C },
+ specials={ "font", 0x39C },
unicodeslot=0x1D761,
visual="bf",
},
@@ -174825,7 +191065,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL NU",
direction="l",
linebreak="al",
- specials={ "font", 0x039D },
+ specials={ "font", 0x39D },
unicodeslot=0x1D762,
visual="bf",
},
@@ -174834,7 +191074,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL XI",
direction="l",
linebreak="al",
- specials={ "font", 0x039E },
+ specials={ "font", 0x39E },
unicodeslot=0x1D763,
visual="bf",
},
@@ -174843,7 +191083,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL OMICRON",
direction="l",
linebreak="al",
- specials={ "font", 0x039F },
+ specials={ "font", 0x39F },
unicodeslot=0x1D764,
visual="bf",
},
@@ -174852,7 +191092,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL PI",
direction="l",
linebreak="al",
- specials={ "font", 0x03A0 },
+ specials={ "font", 0x3A0 },
unicodeslot=0x1D765,
visual="bf",
},
@@ -174861,7 +191101,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL RHO",
direction="l",
linebreak="al",
- specials={ "font", 0x03A1 },
+ specials={ "font", 0x3A1 },
unicodeslot=0x1D766,
visual="bf",
},
@@ -174870,7 +191110,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL THETA SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03F4 },
+ specials={ "font", 0x3F4 },
unicodeslot=0x1D767,
visual="bf",
},
@@ -174879,7 +191119,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL SIGMA",
direction="l",
linebreak="al",
- specials={ "font", 0x03A3 },
+ specials={ "font", 0x3A3 },
unicodeslot=0x1D768,
visual="bf",
},
@@ -174888,7 +191128,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL TAU",
direction="l",
linebreak="al",
- specials={ "font", 0x03A4 },
+ specials={ "font", 0x3A4 },
unicodeslot=0x1D769,
visual="bf",
},
@@ -174897,7 +191137,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL UPSILON",
direction="l",
linebreak="al",
- specials={ "font", 0x03A5 },
+ specials={ "font", 0x3A5 },
unicodeslot=0x1D76A,
visual="bf",
},
@@ -174906,7 +191146,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL PHI",
direction="l",
linebreak="al",
- specials={ "font", 0x03A6 },
+ specials={ "font", 0x3A6 },
unicodeslot=0x1D76B,
visual="bf",
},
@@ -174915,7 +191155,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL CHI",
direction="l",
linebreak="al",
- specials={ "font", 0x03A7 },
+ specials={ "font", 0x3A7 },
unicodeslot=0x1D76C,
visual="bf",
},
@@ -174924,7 +191164,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL PSI",
direction="l",
linebreak="al",
- specials={ "font", 0x03A8 },
+ specials={ "font", 0x3A8 },
unicodeslot=0x1D76D,
visual="bf",
},
@@ -174933,7 +191173,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD CAPITAL OMEGA",
direction="l",
linebreak="al",
- specials={ "font", 0x03A9 },
+ specials={ "font", 0x3A9 },
unicodeslot=0x1D76E,
visual="bf",
},
@@ -174951,7 +191191,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL ALPHA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B1 },
+ specials={ "font", 0x3B1 },
unicodeslot=0x1D770,
visual="bf",
},
@@ -174960,7 +191200,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL BETA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B2 },
+ specials={ "font", 0x3B2 },
unicodeslot=0x1D771,
visual="bf",
},
@@ -174969,7 +191209,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL GAMMA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B3 },
+ specials={ "font", 0x3B3 },
unicodeslot=0x1D772,
visual="bf",
},
@@ -174978,7 +191218,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL DELTA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B4 },
+ specials={ "font", 0x3B4 },
unicodeslot=0x1D773,
visual="bf",
},
@@ -174987,7 +191227,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL EPSILON",
direction="l",
linebreak="al",
- specials={ "font", 0x03B5 },
+ specials={ "font", 0x3B5 },
unicodeslot=0x1D774,
visual="bf",
},
@@ -174996,7 +191236,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL ZETA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B6 },
+ specials={ "font", 0x3B6 },
unicodeslot=0x1D775,
visual="bf",
},
@@ -175005,7 +191245,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL ETA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B7 },
+ specials={ "font", 0x3B7 },
unicodeslot=0x1D776,
visual="bf",
},
@@ -175014,7 +191254,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL THETA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B8 },
+ specials={ "font", 0x3B8 },
unicodeslot=0x1D777,
visual="bf",
},
@@ -175023,7 +191263,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL IOTA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B9 },
+ specials={ "font", 0x3B9 },
unicodeslot=0x1D778,
visual="bf",
},
@@ -175032,7 +191272,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL KAPPA",
direction="l",
linebreak="al",
- specials={ "font", 0x03BA },
+ specials={ "font", 0x3BA },
unicodeslot=0x1D779,
visual="bf",
},
@@ -175041,7 +191281,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL LAMDA",
direction="l",
linebreak="al",
- specials={ "font", 0x03BB },
+ specials={ "font", 0x3BB },
unicodeslot=0x1D77A,
visual="bf",
},
@@ -175050,7 +191290,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL MU",
direction="l",
linebreak="al",
- specials={ "font", 0x03BC },
+ specials={ "font", 0x3BC },
unicodeslot=0x1D77B,
visual="bf",
},
@@ -175059,7 +191299,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL NU",
direction="l",
linebreak="al",
- specials={ "font", 0x03BD },
+ specials={ "font", 0x3BD },
unicodeslot=0x1D77C,
visual="bf",
},
@@ -175068,7 +191308,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL XI",
direction="l",
linebreak="al",
- specials={ "font", 0x03BE },
+ specials={ "font", 0x3BE },
unicodeslot=0x1D77D,
visual="bf",
},
@@ -175077,7 +191317,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL OMICRON",
direction="l",
linebreak="al",
- specials={ "font", 0x03BF },
+ specials={ "font", 0x3BF },
unicodeslot=0x1D77E,
visual="bf",
},
@@ -175086,7 +191326,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL PI",
direction="l",
linebreak="al",
- specials={ "font", 0x03C0 },
+ specials={ "font", 0x3C0 },
unicodeslot=0x1D77F,
visual="bf",
},
@@ -175095,7 +191335,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL RHO",
direction="l",
linebreak="al",
- specials={ "font", 0x03C1 },
+ specials={ "font", 0x3C1 },
unicodeslot=0x1D780,
visual="bf",
},
@@ -175104,7 +191344,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL FINAL SIGMA",
direction="l",
linebreak="al",
- specials={ "font", 0x03C2 },
+ specials={ "font", 0x3C2 },
unicodeslot=0x1D781,
visual="bf",
},
@@ -175113,7 +191353,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL SIGMA",
direction="l",
linebreak="al",
- specials={ "font", 0x03C3 },
+ specials={ "font", 0x3C3 },
unicodeslot=0x1D782,
visual="bf",
},
@@ -175122,7 +191362,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL TAU",
direction="l",
linebreak="al",
- specials={ "font", 0x03C4 },
+ specials={ "font", 0x3C4 },
unicodeslot=0x1D783,
visual="bf",
},
@@ -175131,7 +191371,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL UPSILON",
direction="l",
linebreak="al",
- specials={ "font", 0x03C5 },
+ specials={ "font", 0x3C5 },
unicodeslot=0x1D784,
visual="bf",
},
@@ -175140,7 +191380,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL PHI",
direction="l",
linebreak="al",
- specials={ "font", 0x03C6 },
+ specials={ "font", 0x3C6 },
unicodeslot=0x1D785,
visual="bf",
},
@@ -175149,7 +191389,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL CHI",
direction="l",
linebreak="al",
- specials={ "font", 0x03C7 },
+ specials={ "font", 0x3C7 },
unicodeslot=0x1D786,
visual="bf",
},
@@ -175158,7 +191398,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL PSI",
direction="l",
linebreak="al",
- specials={ "font", 0x03C8 },
+ specials={ "font", 0x3C8 },
unicodeslot=0x1D787,
visual="bf",
},
@@ -175167,7 +191407,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD SMALL OMEGA",
direction="l",
linebreak="al",
- specials={ "font", 0x03C9 },
+ specials={ "font", 0x3C9 },
unicodeslot=0x1D788,
visual="bf",
},
@@ -175185,7 +191425,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD EPSILON SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03F5 },
+ specials={ "font", 0x3F5 },
unicodeslot=0x1D78A,
visual="bf",
},
@@ -175194,7 +191434,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD THETA SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03D1 },
+ specials={ "font", 0x3D1 },
unicodeslot=0x1D78B,
visual="bf",
},
@@ -175203,7 +191443,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD KAPPA SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03F0 },
+ specials={ "font", 0x3F0 },
unicodeslot=0x1D78C,
visual="bf",
},
@@ -175212,7 +191452,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD PHI SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03D5 },
+ specials={ "font", 0x3D5 },
unicodeslot=0x1D78D,
visual="bf",
},
@@ -175221,7 +191461,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD RHO SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03F1 },
+ specials={ "font", 0x3F1 },
unicodeslot=0x1D78E,
visual="bf",
},
@@ -175230,7 +191470,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD PI SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03D6 },
+ specials={ "font", 0x3D6 },
unicodeslot=0x1D78F,
visual="bf",
},
@@ -175239,7 +191479,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL ALPHA",
direction="l",
linebreak="al",
- specials={ "font", 0x0391 },
+ specials={ "font", 0x391 },
unicodeslot=0x1D790,
visual="bi",
},
@@ -175248,7 +191488,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL BETA",
direction="l",
linebreak="al",
- specials={ "font", 0x0392 },
+ specials={ "font", 0x392 },
unicodeslot=0x1D791,
visual="bi",
},
@@ -175257,7 +191497,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL GAMMA",
direction="l",
linebreak="al",
- specials={ "font", 0x0393 },
+ specials={ "font", 0x393 },
unicodeslot=0x1D792,
visual="bi",
},
@@ -175266,7 +191506,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL DELTA",
direction="l",
linebreak="al",
- specials={ "font", 0x0394 },
+ specials={ "font", 0x394 },
unicodeslot=0x1D793,
visual="bi",
},
@@ -175275,7 +191515,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL EPSILON",
direction="l",
linebreak="al",
- specials={ "font", 0x0395 },
+ specials={ "font", 0x395 },
unicodeslot=0x1D794,
visual="bi",
},
@@ -175284,7 +191524,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL ZETA",
direction="l",
linebreak="al",
- specials={ "font", 0x0396 },
+ specials={ "font", 0x396 },
unicodeslot=0x1D795,
visual="bi",
},
@@ -175293,7 +191533,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL ETA",
direction="l",
linebreak="al",
- specials={ "font", 0x0397 },
+ specials={ "font", 0x397 },
unicodeslot=0x1D796,
visual="bi",
},
@@ -175302,7 +191542,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL THETA",
direction="l",
linebreak="al",
- specials={ "font", 0x0398 },
+ specials={ "font", 0x398 },
unicodeslot=0x1D797,
visual="bi",
},
@@ -175311,7 +191551,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL IOTA",
direction="l",
linebreak="al",
- specials={ "font", 0x0399 },
+ specials={ "font", 0x399 },
unicodeslot=0x1D798,
visual="bi",
},
@@ -175320,7 +191560,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL KAPPA",
direction="l",
linebreak="al",
- specials={ "font", 0x039A },
+ specials={ "font", 0x39A },
unicodeslot=0x1D799,
visual="bi",
},
@@ -175329,7 +191569,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL LAMDA",
direction="l",
linebreak="al",
- specials={ "font", 0x039B },
+ specials={ "font", 0x39B },
unicodeslot=0x1D79A,
visual="bi",
},
@@ -175338,7 +191578,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL MU",
direction="l",
linebreak="al",
- specials={ "font", 0x039C },
+ specials={ "font", 0x39C },
unicodeslot=0x1D79B,
visual="bi",
},
@@ -175347,7 +191587,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL NU",
direction="l",
linebreak="al",
- specials={ "font", 0x039D },
+ specials={ "font", 0x39D },
unicodeslot=0x1D79C,
visual="bi",
},
@@ -175356,7 +191596,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL XI",
direction="l",
linebreak="al",
- specials={ "font", 0x039E },
+ specials={ "font", 0x39E },
unicodeslot=0x1D79D,
visual="bi",
},
@@ -175365,7 +191605,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL OMICRON",
direction="l",
linebreak="al",
- specials={ "font", 0x039F },
+ specials={ "font", 0x39F },
unicodeslot=0x1D79E,
visual="bi",
},
@@ -175374,7 +191614,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL PI",
direction="l",
linebreak="al",
- specials={ "font", 0x03A0 },
+ specials={ "font", 0x3A0 },
unicodeslot=0x1D79F,
visual="bi",
},
@@ -175383,7 +191623,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL RHO",
direction="l",
linebreak="al",
- specials={ "font", 0x03A1 },
+ specials={ "font", 0x3A1 },
unicodeslot=0x1D7A0,
visual="bi",
},
@@ -175392,7 +191632,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL THETA SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03F4 },
+ specials={ "font", 0x3F4 },
unicodeslot=0x1D7A1,
visual="bi",
},
@@ -175401,7 +191641,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL SIGMA",
direction="l",
linebreak="al",
- specials={ "font", 0x03A3 },
+ specials={ "font", 0x3A3 },
unicodeslot=0x1D7A2,
visual="bi",
},
@@ -175410,7 +191650,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL TAU",
direction="l",
linebreak="al",
- specials={ "font", 0x03A4 },
+ specials={ "font", 0x3A4 },
unicodeslot=0x1D7A3,
visual="bi",
},
@@ -175419,7 +191659,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL UPSILON",
direction="l",
linebreak="al",
- specials={ "font", 0x03A5 },
+ specials={ "font", 0x3A5 },
unicodeslot=0x1D7A4,
visual="bi",
},
@@ -175428,7 +191668,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL PHI",
direction="l",
linebreak="al",
- specials={ "font", 0x03A6 },
+ specials={ "font", 0x3A6 },
unicodeslot=0x1D7A5,
visual="bi",
},
@@ -175437,7 +191677,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL CHI",
direction="l",
linebreak="al",
- specials={ "font", 0x03A7 },
+ specials={ "font", 0x3A7 },
unicodeslot=0x1D7A6,
visual="bi",
},
@@ -175446,7 +191686,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL PSI",
direction="l",
linebreak="al",
- specials={ "font", 0x03A8 },
+ specials={ "font", 0x3A8 },
unicodeslot=0x1D7A7,
visual="bi",
},
@@ -175455,7 +191695,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL OMEGA",
direction="l",
linebreak="al",
- specials={ "font", 0x03A9 },
+ specials={ "font", 0x3A9 },
unicodeslot=0x1D7A8,
visual="bi",
},
@@ -175473,7 +191713,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL ALPHA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B1 },
+ specials={ "font", 0x3B1 },
unicodeslot=0x1D7AA,
visual="bi",
},
@@ -175482,7 +191722,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL BETA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B2 },
+ specials={ "font", 0x3B2 },
unicodeslot=0x1D7AB,
visual="bi",
},
@@ -175491,7 +191731,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL GAMMA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B3 },
+ specials={ "font", 0x3B3 },
unicodeslot=0x1D7AC,
visual="bi",
},
@@ -175500,7 +191740,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL DELTA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B4 },
+ specials={ "font", 0x3B4 },
unicodeslot=0x1D7AD,
visual="bi",
},
@@ -175509,7 +191749,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL EPSILON",
direction="l",
linebreak="al",
- specials={ "font", 0x03B5 },
+ specials={ "font", 0x3B5 },
unicodeslot=0x1D7AE,
visual="bi",
},
@@ -175518,7 +191758,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL ZETA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B6 },
+ specials={ "font", 0x3B6 },
unicodeslot=0x1D7AF,
visual="bi",
},
@@ -175527,7 +191767,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL ETA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B7 },
+ specials={ "font", 0x3B7 },
unicodeslot=0x1D7B0,
visual="bi",
},
@@ -175536,7 +191776,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL THETA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B8 },
+ specials={ "font", 0x3B8 },
unicodeslot=0x1D7B1,
visual="bi",
},
@@ -175545,7 +191785,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL IOTA",
direction="l",
linebreak="al",
- specials={ "font", 0x03B9 },
+ specials={ "font", 0x3B9 },
unicodeslot=0x1D7B2,
visual="bi",
},
@@ -175554,7 +191794,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL KAPPA",
direction="l",
linebreak="al",
- specials={ "font", 0x03BA },
+ specials={ "font", 0x3BA },
unicodeslot=0x1D7B3,
visual="bi",
},
@@ -175563,7 +191803,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL LAMDA",
direction="l",
linebreak="al",
- specials={ "font", 0x03BB },
+ specials={ "font", 0x3BB },
unicodeslot=0x1D7B4,
visual="bi",
},
@@ -175572,7 +191812,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL MU",
direction="l",
linebreak="al",
- specials={ "font", 0x03BC },
+ specials={ "font", 0x3BC },
unicodeslot=0x1D7B5,
visual="bi",
},
@@ -175581,7 +191821,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL NU",
direction="l",
linebreak="al",
- specials={ "font", 0x03BD },
+ specials={ "font", 0x3BD },
unicodeslot=0x1D7B6,
visual="bi",
},
@@ -175590,7 +191830,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL XI",
direction="l",
linebreak="al",
- specials={ "font", 0x03BE },
+ specials={ "font", 0x3BE },
unicodeslot=0x1D7B7,
visual="bi",
},
@@ -175599,7 +191839,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL OMICRON",
direction="l",
linebreak="al",
- specials={ "font", 0x03BF },
+ specials={ "font", 0x3BF },
unicodeslot=0x1D7B8,
visual="bi",
},
@@ -175608,7 +191848,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL PI",
direction="l",
linebreak="al",
- specials={ "font", 0x03C0 },
+ specials={ "font", 0x3C0 },
unicodeslot=0x1D7B9,
visual="bi",
},
@@ -175617,7 +191857,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL RHO",
direction="l",
linebreak="al",
- specials={ "font", 0x03C1 },
+ specials={ "font", 0x3C1 },
unicodeslot=0x1D7BA,
visual="bi",
},
@@ -175626,7 +191866,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL FINAL SIGMA",
direction="l",
linebreak="al",
- specials={ "font", 0x03C2 },
+ specials={ "font", 0x3C2 },
unicodeslot=0x1D7BB,
visual="bi",
},
@@ -175635,7 +191875,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL SIGMA",
direction="l",
linebreak="al",
- specials={ "font", 0x03C3 },
+ specials={ "font", 0x3C3 },
unicodeslot=0x1D7BC,
visual="bi",
},
@@ -175644,7 +191884,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL TAU",
direction="l",
linebreak="al",
- specials={ "font", 0x03C4 },
+ specials={ "font", 0x3C4 },
unicodeslot=0x1D7BD,
visual="bi",
},
@@ -175653,7 +191893,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL UPSILON",
direction="l",
linebreak="al",
- specials={ "font", 0x03C5 },
+ specials={ "font", 0x3C5 },
unicodeslot=0x1D7BE,
visual="bi",
},
@@ -175662,7 +191902,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL PHI",
direction="l",
linebreak="al",
- specials={ "font", 0x03C6 },
+ specials={ "font", 0x3C6 },
unicodeslot=0x1D7BF,
visual="bi",
},
@@ -175671,7 +191911,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL CHI",
direction="l",
linebreak="al",
- specials={ "font", 0x03C7 },
+ specials={ "font", 0x3C7 },
unicodeslot=0x1D7C0,
visual="bi",
},
@@ -175680,7 +191920,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL PSI",
direction="l",
linebreak="al",
- specials={ "font", 0x03C8 },
+ specials={ "font", 0x3C8 },
unicodeslot=0x1D7C1,
visual="bi",
},
@@ -175689,7 +191929,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL OMEGA",
direction="l",
linebreak="al",
- specials={ "font", 0x03C9 },
+ specials={ "font", 0x3C9 },
unicodeslot=0x1D7C2,
visual="bi",
},
@@ -175707,7 +191947,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC EPSILON SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03F5 },
+ specials={ "font", 0x3F5 },
unicodeslot=0x1D7C4,
visual="bi",
},
@@ -175716,7 +191956,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC THETA SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03D1 },
+ specials={ "font", 0x3D1 },
unicodeslot=0x1D7C5,
visual="bi",
},
@@ -175725,7 +191965,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC KAPPA SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03F0 },
+ specials={ "font", 0x3F0 },
unicodeslot=0x1D7C6,
visual="bi",
},
@@ -175734,7 +191974,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC PHI SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03D5 },
+ specials={ "font", 0x3D5 },
unicodeslot=0x1D7C7,
visual="bi",
},
@@ -175743,7 +191983,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC RHO SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03F1 },
+ specials={ "font", 0x3F1 },
unicodeslot=0x1D7C8,
visual="bi",
},
@@ -175752,7 +191992,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD ITALIC PI SYMBOL",
direction="l",
linebreak="al",
- specials={ "font", 0x03D6 },
+ specials={ "font", 0x3D6 },
unicodeslot=0x1D7C9,
visual="bi",
},
@@ -175761,7 +192001,7 @@ characters.data={
description="MATHEMATICAL BOLD CAPITAL DIGAMMA",
direction="l",
linebreak="al",
- specials={ "font", 0x03DC },
+ specials={ "font", 0x3DC },
unicodeslot=0x1D7CA,
visual="bf",
},
@@ -175770,7 +192010,7 @@ characters.data={
description="MATHEMATICAL BOLD SMALL DIGAMMA",
direction="l",
linebreak="al",
- specials={ "font", 0x03DD },
+ specials={ "font", 0x3DD },
unicodeslot=0x1D7CB,
visual="bf",
},
@@ -175779,7 +192019,7 @@ characters.data={
description="MATHEMATICAL BOLD DIGIT ZERO",
direction="en",
linebreak="nu",
- specials={ "font", 0x0030 },
+ specials={ "font", 0x30 },
unicodeslot=0x1D7CE,
visual="bf",
},
@@ -175788,7 +192028,7 @@ characters.data={
description="MATHEMATICAL BOLD DIGIT ONE",
direction="en",
linebreak="nu",
- specials={ "font", 0x0031 },
+ specials={ "font", 0x31 },
unicodeslot=0x1D7CF,
visual="bf",
},
@@ -175797,7 +192037,7 @@ characters.data={
description="MATHEMATICAL BOLD DIGIT TWO",
direction="en",
linebreak="nu",
- specials={ "font", 0x0032 },
+ specials={ "font", 0x32 },
unicodeslot=0x1D7D0,
visual="bf",
},
@@ -175806,7 +192046,7 @@ characters.data={
description="MATHEMATICAL BOLD DIGIT THREE",
direction="en",
linebreak="nu",
- specials={ "font", 0x0033 },
+ specials={ "font", 0x33 },
unicodeslot=0x1D7D1,
visual="bf",
},
@@ -175815,7 +192055,7 @@ characters.data={
description="MATHEMATICAL BOLD DIGIT FOUR",
direction="en",
linebreak="nu",
- specials={ "font", 0x0034 },
+ specials={ "font", 0x34 },
unicodeslot=0x1D7D2,
visual="bf",
},
@@ -175824,7 +192064,7 @@ characters.data={
description="MATHEMATICAL BOLD DIGIT FIVE",
direction="en",
linebreak="nu",
- specials={ "font", 0x0035 },
+ specials={ "font", 0x35 },
unicodeslot=0x1D7D3,
visual="bf",
},
@@ -175833,7 +192073,7 @@ characters.data={
description="MATHEMATICAL BOLD DIGIT SIX",
direction="en",
linebreak="nu",
- specials={ "font", 0x0036 },
+ specials={ "font", 0x36 },
unicodeslot=0x1D7D4,
visual="bf",
},
@@ -175842,7 +192082,7 @@ characters.data={
description="MATHEMATICAL BOLD DIGIT SEVEN",
direction="en",
linebreak="nu",
- specials={ "font", 0x0037 },
+ specials={ "font", 0x37 },
unicodeslot=0x1D7D5,
visual="bf",
},
@@ -175851,7 +192091,7 @@ characters.data={
description="MATHEMATICAL BOLD DIGIT EIGHT",
direction="en",
linebreak="nu",
- specials={ "font", 0x0038 },
+ specials={ "font", 0x38 },
unicodeslot=0x1D7D6,
visual="bf",
},
@@ -175860,7 +192100,7 @@ characters.data={
description="MATHEMATICAL BOLD DIGIT NINE",
direction="en",
linebreak="nu",
- specials={ "font", 0x0039 },
+ specials={ "font", 0x39 },
unicodeslot=0x1D7D7,
visual="bf",
},
@@ -175869,7 +192109,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK DIGIT ZERO",
direction="en",
linebreak="nu",
- specials={ "font", 0x0030 },
+ specials={ "font", 0x30 },
unicodeslot=0x1D7D8,
},
[0x1D7D9]={
@@ -175877,7 +192117,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK DIGIT ONE",
direction="en",
linebreak="nu",
- specials={ "font", 0x0031 },
+ specials={ "font", 0x31 },
unicodeslot=0x1D7D9,
},
[0x1D7DA]={
@@ -175885,7 +192125,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK DIGIT TWO",
direction="en",
linebreak="nu",
- specials={ "font", 0x0032 },
+ specials={ "font", 0x32 },
unicodeslot=0x1D7DA,
},
[0x1D7DB]={
@@ -175893,7 +192133,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK DIGIT THREE",
direction="en",
linebreak="nu",
- specials={ "font", 0x0033 },
+ specials={ "font", 0x33 },
unicodeslot=0x1D7DB,
},
[0x1D7DC]={
@@ -175901,7 +192141,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK DIGIT FOUR",
direction="en",
linebreak="nu",
- specials={ "font", 0x0034 },
+ specials={ "font", 0x34 },
unicodeslot=0x1D7DC,
},
[0x1D7DD]={
@@ -175909,7 +192149,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK DIGIT FIVE",
direction="en",
linebreak="nu",
- specials={ "font", 0x0035 },
+ specials={ "font", 0x35 },
unicodeslot=0x1D7DD,
},
[0x1D7DE]={
@@ -175917,7 +192157,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK DIGIT SIX",
direction="en",
linebreak="nu",
- specials={ "font", 0x0036 },
+ specials={ "font", 0x36 },
unicodeslot=0x1D7DE,
},
[0x1D7DF]={
@@ -175925,7 +192165,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK DIGIT SEVEN",
direction="en",
linebreak="nu",
- specials={ "font", 0x0037 },
+ specials={ "font", 0x37 },
unicodeslot=0x1D7DF,
},
[0x1D7E0]={
@@ -175933,7 +192173,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK DIGIT EIGHT",
direction="en",
linebreak="nu",
- specials={ "font", 0x0038 },
+ specials={ "font", 0x38 },
unicodeslot=0x1D7E0,
},
[0x1D7E1]={
@@ -175941,7 +192181,7 @@ characters.data={
description="MATHEMATICAL DOUBLE-STRUCK DIGIT NINE",
direction="en",
linebreak="nu",
- specials={ "font", 0x0039 },
+ specials={ "font", 0x39 },
unicodeslot=0x1D7E1,
},
[0x1D7E2]={
@@ -175949,7 +192189,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF DIGIT ZERO",
direction="en",
linebreak="nu",
- specials={ "font", 0x0030 },
+ specials={ "font", 0x30 },
unicodeslot=0x1D7E2,
},
[0x1D7E3]={
@@ -175957,7 +192197,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF DIGIT ONE",
direction="en",
linebreak="nu",
- specials={ "font", 0x0031 },
+ specials={ "font", 0x31 },
unicodeslot=0x1D7E3,
},
[0x1D7E4]={
@@ -175965,7 +192205,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF DIGIT TWO",
direction="en",
linebreak="nu",
- specials={ "font", 0x0032 },
+ specials={ "font", 0x32 },
unicodeslot=0x1D7E4,
},
[0x1D7E5]={
@@ -175973,7 +192213,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF DIGIT THREE",
direction="en",
linebreak="nu",
- specials={ "font", 0x0033 },
+ specials={ "font", 0x33 },
unicodeslot=0x1D7E5,
},
[0x1D7E6]={
@@ -175981,7 +192221,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF DIGIT FOUR",
direction="en",
linebreak="nu",
- specials={ "font", 0x0034 },
+ specials={ "font", 0x34 },
unicodeslot=0x1D7E6,
},
[0x1D7E7]={
@@ -175989,7 +192229,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF DIGIT FIVE",
direction="en",
linebreak="nu",
- specials={ "font", 0x0035 },
+ specials={ "font", 0x35 },
unicodeslot=0x1D7E7,
},
[0x1D7E8]={
@@ -175997,7 +192237,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF DIGIT SIX",
direction="en",
linebreak="nu",
- specials={ "font", 0x0036 },
+ specials={ "font", 0x36 },
unicodeslot=0x1D7E8,
},
[0x1D7E9]={
@@ -176005,7 +192245,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF DIGIT SEVEN",
direction="en",
linebreak="nu",
- specials={ "font", 0x0037 },
+ specials={ "font", 0x37 },
unicodeslot=0x1D7E9,
},
[0x1D7EA]={
@@ -176013,7 +192253,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF DIGIT EIGHT",
direction="en",
linebreak="nu",
- specials={ "font", 0x0038 },
+ specials={ "font", 0x38 },
unicodeslot=0x1D7EA,
},
[0x1D7EB]={
@@ -176021,7 +192261,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF DIGIT NINE",
direction="en",
linebreak="nu",
- specials={ "font", 0x0039 },
+ specials={ "font", 0x39 },
unicodeslot=0x1D7EB,
},
[0x1D7EC]={
@@ -176029,7 +192269,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD DIGIT ZERO",
direction="en",
linebreak="nu",
- specials={ "font", 0x0030 },
+ specials={ "font", 0x30 },
unicodeslot=0x1D7EC,
visual="bf",
},
@@ -176038,7 +192278,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD DIGIT ONE",
direction="en",
linebreak="nu",
- specials={ "font", 0x0031 },
+ specials={ "font", 0x31 },
unicodeslot=0x1D7ED,
visual="bf",
},
@@ -176047,7 +192287,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD DIGIT TWO",
direction="en",
linebreak="nu",
- specials={ "font", 0x0032 },
+ specials={ "font", 0x32 },
unicodeslot=0x1D7EE,
visual="bf",
},
@@ -176056,7 +192296,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD DIGIT THREE",
direction="en",
linebreak="nu",
- specials={ "font", 0x0033 },
+ specials={ "font", 0x33 },
unicodeslot=0x1D7EF,
visual="bf",
},
@@ -176065,7 +192305,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD DIGIT FOUR",
direction="en",
linebreak="nu",
- specials={ "font", 0x0034 },
+ specials={ "font", 0x34 },
unicodeslot=0x1D7F0,
visual="bf",
},
@@ -176074,7 +192314,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD DIGIT FIVE",
direction="en",
linebreak="nu",
- specials={ "font", 0x0035 },
+ specials={ "font", 0x35 },
unicodeslot=0x1D7F1,
visual="bf",
},
@@ -176083,7 +192323,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD DIGIT SIX",
direction="en",
linebreak="nu",
- specials={ "font", 0x0036 },
+ specials={ "font", 0x36 },
unicodeslot=0x1D7F2,
visual="bf",
},
@@ -176092,7 +192332,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD DIGIT SEVEN",
direction="en",
linebreak="nu",
- specials={ "font", 0x0037 },
+ specials={ "font", 0x37 },
unicodeslot=0x1D7F3,
visual="bf",
},
@@ -176101,7 +192341,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD DIGIT EIGHT",
direction="en",
linebreak="nu",
- specials={ "font", 0x0038 },
+ specials={ "font", 0x38 },
unicodeslot=0x1D7F4,
visual="bf",
},
@@ -176110,7 +192350,7 @@ characters.data={
description="MATHEMATICAL SANS-SERIF BOLD DIGIT NINE",
direction="en",
linebreak="nu",
- specials={ "font", 0x0039 },
+ specials={ "font", 0x39 },
unicodeslot=0x1D7F5,
visual="bf",
},
@@ -176119,7 +192359,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE DIGIT ZERO",
direction="en",
linebreak="nu",
- specials={ "font", 0x0030 },
+ specials={ "font", 0x30 },
unicodeslot=0x1D7F6,
},
[0x1D7F7]={
@@ -176127,7 +192367,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE DIGIT ONE",
direction="en",
linebreak="nu",
- specials={ "font", 0x0031 },
+ specials={ "font", 0x31 },
unicodeslot=0x1D7F7,
},
[0x1D7F8]={
@@ -176135,7 +192375,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE DIGIT TWO",
direction="en",
linebreak="nu",
- specials={ "font", 0x0032 },
+ specials={ "font", 0x32 },
unicodeslot=0x1D7F8,
},
[0x1D7F9]={
@@ -176143,7 +192383,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE DIGIT THREE",
direction="en",
linebreak="nu",
- specials={ "font", 0x0033 },
+ specials={ "font", 0x33 },
unicodeslot=0x1D7F9,
},
[0x1D7FA]={
@@ -176151,7 +192391,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE DIGIT FOUR",
direction="en",
linebreak="nu",
- specials={ "font", 0x0034 },
+ specials={ "font", 0x34 },
unicodeslot=0x1D7FA,
},
[0x1D7FB]={
@@ -176159,7 +192399,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE DIGIT FIVE",
direction="en",
linebreak="nu",
- specials={ "font", 0x0035 },
+ specials={ "font", 0x35 },
unicodeslot=0x1D7FB,
},
[0x1D7FC]={
@@ -176167,7 +192407,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE DIGIT SIX",
direction="en",
linebreak="nu",
- specials={ "font", 0x0036 },
+ specials={ "font", 0x36 },
unicodeslot=0x1D7FC,
},
[0x1D7FD]={
@@ -176175,7 +192415,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE DIGIT SEVEN",
direction="en",
linebreak="nu",
- specials={ "font", 0x0037 },
+ specials={ "font", 0x37 },
unicodeslot=0x1D7FD,
},
[0x1D7FE]={
@@ -176183,7 +192423,7 @@ characters.data={
description="MATHEMATICAL MONOSPACE DIGIT EIGHT",
direction="en",
linebreak="nu",
- specials={ "font", 0x0038 },
+ specials={ "font", 0x38 },
unicodeslot=0x1D7FE,
},
[0x1D7FF]={
@@ -176191,16 +192431,1514 @@ characters.data={
description="MATHEMATICAL MONOSPACE DIGIT NINE",
direction="en",
linebreak="nu",
- specials={ "font", 0x0039 },
+ specials={ "font", 0x39 },
unicodeslot=0x1D7FF,
},
+ [0x1E800]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M001 KI",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E800,
+ },
+ [0x1E801]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M002 KA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E801,
+ },
+ [0x1E802]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M003 KU",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E802,
+ },
+ [0x1E803]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M065 KEE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E803,
+ },
+ [0x1E804]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M095 KE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E804,
+ },
+ [0x1E805]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M076 KOO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E805,
+ },
+ [0x1E806]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M048 KO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E806,
+ },
+ [0x1E807]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M179 KUA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E807,
+ },
+ [0x1E808]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M004 WI",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E808,
+ },
+ [0x1E809]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M005 WA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E809,
+ },
+ [0x1E80A]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M006 WU",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E80A,
+ },
+ [0x1E80B]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M126 WEE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E80B,
+ },
+ [0x1E80C]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M118 WE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E80C,
+ },
+ [0x1E80D]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M114 WOO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E80D,
+ },
+ [0x1E80E]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M045 WO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E80E,
+ },
+ [0x1E80F]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M194 WUI",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E80F,
+ },
+ [0x1E810]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M143 WEI",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E810,
+ },
+ [0x1E811]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M061 WVI",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E811,
+ },
+ [0x1E812]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M049 WVA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E812,
+ },
+ [0x1E813]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M139 WVE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E813,
+ },
+ [0x1E814]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M007 MIN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E814,
+ },
+ [0x1E815]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M008 MAN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E815,
+ },
+ [0x1E816]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M009 MUN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E816,
+ },
+ [0x1E817]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M059 MEN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E817,
+ },
+ [0x1E818]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M094 MON",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E818,
+ },
+ [0x1E819]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M154 MUAN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E819,
+ },
+ [0x1E81A]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M189 MUEN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E81A,
+ },
+ [0x1E81B]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M010 BI",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E81B,
+ },
+ [0x1E81C]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M011 BA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E81C,
+ },
+ [0x1E81D]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M012 BU",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E81D,
+ },
+ [0x1E81E]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M150 BEE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E81E,
+ },
+ [0x1E81F]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M097 BE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E81F,
+ },
+ [0x1E820]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M103 BOO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E820,
+ },
+ [0x1E821]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M138 BO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E821,
+ },
+ [0x1E822]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M013 I",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E822,
+ },
+ [0x1E823]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M014 A",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E823,
+ },
+ [0x1E824]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M015 U",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E824,
+ },
+ [0x1E825]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M163 EE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E825,
+ },
+ [0x1E826]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M100 E",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E826,
+ },
+ [0x1E827]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M165 OO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E827,
+ },
+ [0x1E828]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M147 O",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E828,
+ },
+ [0x1E829]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M137 EI",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E829,
+ },
+ [0x1E82A]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M131 IN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E82A,
+ },
+ [0x1E82B]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M135 IN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E82B,
+ },
+ [0x1E82C]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M195 AN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E82C,
+ },
+ [0x1E82D]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M178 EN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E82D,
+ },
+ [0x1E82E]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M019 SI",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E82E,
+ },
+ [0x1E82F]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M020 SA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E82F,
+ },
+ [0x1E830]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M021 SU",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E830,
+ },
+ [0x1E831]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M162 SEE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E831,
+ },
+ [0x1E832]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M116 SE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E832,
+ },
+ [0x1E833]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M136 SOO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E833,
+ },
+ [0x1E834]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M079 SO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E834,
+ },
+ [0x1E835]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M196 SIA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E835,
+ },
+ [0x1E836]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M025 LI",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E836,
+ },
+ [0x1E837]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M026 LA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E837,
+ },
+ [0x1E838]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M027 LU",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E838,
+ },
+ [0x1E839]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M084 LEE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E839,
+ },
+ [0x1E83A]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M073 LE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E83A,
+ },
+ [0x1E83B]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M054 LOO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E83B,
+ },
+ [0x1E83C]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M153 LO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E83C,
+ },
+ [0x1E83D]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M110 LONG LE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E83D,
+ },
+ [0x1E83E]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M016 DI",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E83E,
+ },
+ [0x1E83F]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M017 DA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E83F,
+ },
+ [0x1E840]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M018 DU",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E840,
+ },
+ [0x1E841]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M089 DEE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E841,
+ },
+ [0x1E842]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M180 DOO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E842,
+ },
+ [0x1E843]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M181 DO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E843,
+ },
+ [0x1E844]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M022 TI",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E844,
+ },
+ [0x1E845]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M023 TA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E845,
+ },
+ [0x1E846]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M024 TU",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E846,
+ },
+ [0x1E847]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M091 TEE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E847,
+ },
+ [0x1E848]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M055 TE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E848,
+ },
+ [0x1E849]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M104 TOO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E849,
+ },
+ [0x1E84A]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M069 TO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E84A,
+ },
+ [0x1E84B]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M028 JI",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E84B,
+ },
+ [0x1E84C]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M029 JA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E84C,
+ },
+ [0x1E84D]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M030 JU",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E84D,
+ },
+ [0x1E84E]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M157 JEE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E84E,
+ },
+ [0x1E84F]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M113 JE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E84F,
+ },
+ [0x1E850]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M160 JOO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E850,
+ },
+ [0x1E851]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M063 JO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E851,
+ },
+ [0x1E852]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M175 LONG JO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E852,
+ },
+ [0x1E853]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M031 YI",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E853,
+ },
+ [0x1E854]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M032 YA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E854,
+ },
+ [0x1E855]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M033 YU",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E855,
+ },
+ [0x1E856]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M109 YEE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E856,
+ },
+ [0x1E857]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M080 YE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E857,
+ },
+ [0x1E858]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M141 YOO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E858,
+ },
+ [0x1E859]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M121 YO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E859,
+ },
+ [0x1E85A]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M034 FI",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E85A,
+ },
+ [0x1E85B]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M035 FA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E85B,
+ },
+ [0x1E85C]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M036 FU",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E85C,
+ },
+ [0x1E85D]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M078 FEE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E85D,
+ },
+ [0x1E85E]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M075 FE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E85E,
+ },
+ [0x1E85F]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M133 FOO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E85F,
+ },
+ [0x1E860]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M088 FO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E860,
+ },
+ [0x1E861]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M197 FUA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E861,
+ },
+ [0x1E862]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M101 FAN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E862,
+ },
+ [0x1E863]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M037 NIN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E863,
+ },
+ [0x1E864]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M038 NAN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E864,
+ },
+ [0x1E865]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M039 NUN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E865,
+ },
+ [0x1E866]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M117 NEN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E866,
+ },
+ [0x1E867]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M169 NON",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E867,
+ },
+ [0x1E868]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M176 HI",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E868,
+ },
+ [0x1E869]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M041 HA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E869,
+ },
+ [0x1E86A]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M186 HU",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E86A,
+ },
+ [0x1E86B]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M040 HEE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E86B,
+ },
+ [0x1E86C]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M096 HE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E86C,
+ },
+ [0x1E86D]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M042 HOO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E86D,
+ },
+ [0x1E86E]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M140 HO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E86E,
+ },
+ [0x1E86F]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M083 HEEI",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E86F,
+ },
+ [0x1E870]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M128 HOOU",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E870,
+ },
+ [0x1E871]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M053 HIN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E871,
+ },
+ [0x1E872]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M130 HAN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E872,
+ },
+ [0x1E873]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M087 HUN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E873,
+ },
+ [0x1E874]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M052 HEN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E874,
+ },
+ [0x1E875]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M193 HON",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E875,
+ },
+ [0x1E876]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M046 HUAN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E876,
+ },
+ [0x1E877]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M090 NGGI",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E877,
+ },
+ [0x1E878]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M043 NGGA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E878,
+ },
+ [0x1E879]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M082 NGGU",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E879,
+ },
+ [0x1E87A]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M115 NGGEE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E87A,
+ },
+ [0x1E87B]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M146 NGGE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E87B,
+ },
+ [0x1E87C]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M156 NGGOO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E87C,
+ },
+ [0x1E87D]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M120 NGGO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E87D,
+ },
+ [0x1E87E]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M159 NGGAA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E87E,
+ },
+ [0x1E87F]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M127 NGGUA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E87F,
+ },
+ [0x1E880]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M086 LONG NGGE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E880,
+ },
+ [0x1E881]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M106 LONG NGGOO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E881,
+ },
+ [0x1E882]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M183 LONG NGGO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E882,
+ },
+ [0x1E883]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M155 GI",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E883,
+ },
+ [0x1E884]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M111 GA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E884,
+ },
+ [0x1E885]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M168 GU",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E885,
+ },
+ [0x1E886]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M190 GEE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E886,
+ },
+ [0x1E887]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M166 GUEI",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E887,
+ },
+ [0x1E888]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M167 GUAN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E888,
+ },
+ [0x1E889]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M184 NGEN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E889,
+ },
+ [0x1E88A]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M057 NGON",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E88A,
+ },
+ [0x1E88B]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M177 NGUAN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E88B,
+ },
+ [0x1E88C]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M068 PI",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E88C,
+ },
+ [0x1E88D]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M099 PA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E88D,
+ },
+ [0x1E88E]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M050 PU",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E88E,
+ },
+ [0x1E88F]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M081 PEE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E88F,
+ },
+ [0x1E890]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M051 PE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E890,
+ },
+ [0x1E891]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M102 POO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E891,
+ },
+ [0x1E892]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M066 PO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E892,
+ },
+ [0x1E893]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M145 MBI",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E893,
+ },
+ [0x1E894]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M062 MBA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E894,
+ },
+ [0x1E895]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M122 MBU",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E895,
+ },
+ [0x1E896]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M047 MBEE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E896,
+ },
+ [0x1E897]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M188 MBEE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E897,
+ },
+ [0x1E898]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M072 MBE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E898,
+ },
+ [0x1E899]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M172 MBOO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E899,
+ },
+ [0x1E89A]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M174 MBO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E89A,
+ },
+ [0x1E89B]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M187 MBUU",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E89B,
+ },
+ [0x1E89C]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M161 LONG MBE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E89C,
+ },
+ [0x1E89D]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M105 LONG MBOO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E89D,
+ },
+ [0x1E89E]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M142 LONG MBO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E89E,
+ },
+ [0x1E89F]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M132 KPI",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E89F,
+ },
+ [0x1E8A0]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M092 KPA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8A0,
+ },
+ [0x1E8A1]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M074 KPU",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8A1,
+ },
+ [0x1E8A2]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M044 KPEE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8A2,
+ },
+ [0x1E8A3]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M108 KPE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8A3,
+ },
+ [0x1E8A4]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M112 KPOO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8A4,
+ },
+ [0x1E8A5]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M158 KPO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8A5,
+ },
+ [0x1E8A6]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M124 GBI",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8A6,
+ },
+ [0x1E8A7]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M056 GBA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8A7,
+ },
+ [0x1E8A8]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M148 GBU",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8A8,
+ },
+ [0x1E8A9]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M093 GBEE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8A9,
+ },
+ [0x1E8AA]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M107 GBE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8AA,
+ },
+ [0x1E8AB]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M071 GBOO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8AB,
+ },
+ [0x1E8AC]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M070 GBO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8AC,
+ },
+ [0x1E8AD]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M171 RA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8AD,
+ },
+ [0x1E8AE]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M123 NDI",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8AE,
+ },
+ [0x1E8AF]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M129 NDA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8AF,
+ },
+ [0x1E8B0]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M125 NDU",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8B0,
+ },
+ [0x1E8B1]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M191 NDEE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8B1,
+ },
+ [0x1E8B2]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M119 NDE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8B2,
+ },
+ [0x1E8B3]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M067 NDOO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8B3,
+ },
+ [0x1E8B4]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M064 NDO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8B4,
+ },
+ [0x1E8B5]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M152 NJA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8B5,
+ },
+ [0x1E8B6]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M192 NJU",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8B6,
+ },
+ [0x1E8B7]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M149 NJEE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8B7,
+ },
+ [0x1E8B8]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M134 NJOO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8B8,
+ },
+ [0x1E8B9]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M182 VI",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8B9,
+ },
+ [0x1E8BA]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M185 VA",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8BA,
+ },
+ [0x1E8BB]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M151 VU",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8BB,
+ },
+ [0x1E8BC]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M173 VEE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8BC,
+ },
+ [0x1E8BD]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M085 VE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8BD,
+ },
+ [0x1E8BE]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M144 VOO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8BE,
+ },
+ [0x1E8BF]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M077 VO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8BF,
+ },
+ [0x1E8C0]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M164 NYIN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8C0,
+ },
+ [0x1E8C1]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M058 NYAN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8C1,
+ },
+ [0x1E8C2]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M170 NYUN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8C2,
+ },
+ [0x1E8C3]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M098 NYEN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8C3,
+ },
+ [0x1E8C4]={
+ category="lo",
+ description="MENDE KIKAKUI SYLLABLE M060 NYON",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8C4,
+ },
+ [0x1E8C7]={
+ category="no",
+ description="MENDE KIKAKUI DIGIT ONE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8C7,
+ },
+ [0x1E8C8]={
+ category="no",
+ description="MENDE KIKAKUI DIGIT TWO",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8C8,
+ },
+ [0x1E8C9]={
+ category="no",
+ description="MENDE KIKAKUI DIGIT THREE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8C9,
+ },
+ [0x1E8CA]={
+ category="no",
+ description="MENDE KIKAKUI DIGIT FOUR",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8CA,
+ },
+ [0x1E8CB]={
+ category="no",
+ description="MENDE KIKAKUI DIGIT FIVE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8CB,
+ },
+ [0x1E8CC]={
+ category="no",
+ description="MENDE KIKAKUI DIGIT SIX",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8CC,
+ },
+ [0x1E8CD]={
+ category="no",
+ description="MENDE KIKAKUI DIGIT SEVEN",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8CD,
+ },
+ [0x1E8CE]={
+ category="no",
+ description="MENDE KIKAKUI DIGIT EIGHT",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8CE,
+ },
+ [0x1E8CF]={
+ category="no",
+ description="MENDE KIKAKUI DIGIT NINE",
+ direction="r",
+ linebreak="al",
+ unicodeslot=0x1E8CF,
+ },
+ [0x1E8D0]={
+ category="mn",
+ combining=0xDC,
+ description="MENDE KIKAKUI COMBINING NUMBER TEENS",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1E8D0,
+ },
+ [0x1E8D1]={
+ category="mn",
+ combining=0xDC,
+ description="MENDE KIKAKUI COMBINING NUMBER TENS",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1E8D1,
+ },
+ [0x1E8D2]={
+ category="mn",
+ combining=0xDC,
+ description="MENDE KIKAKUI COMBINING NUMBER HUNDREDS",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1E8D2,
+ },
+ [0x1E8D3]={
+ category="mn",
+ combining=0xDC,
+ description="MENDE KIKAKUI COMBINING NUMBER THOUSANDS",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1E8D3,
+ },
+ [0x1E8D4]={
+ category="mn",
+ combining=0xDC,
+ description="MENDE KIKAKUI COMBINING NUMBER TEN THOUSANDS",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1E8D4,
+ },
+ [0x1E8D5]={
+ category="mn",
+ combining=0xDC,
+ description="MENDE KIKAKUI COMBINING NUMBER HUNDRED THOUSANDS",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1E8D5,
+ },
+ [0x1E8D6]={
+ category="mn",
+ combining=0xDC,
+ description="MENDE KIKAKUI COMBINING NUMBER MILLIONS",
+ direction="nsm",
+ linebreak="cm",
+ unicodeslot=0x1E8D6,
+ },
[0x1EE00]={
category="lo",
comment="check math properties",
description="ARABIC MATHEMATICAL ALEF",
direction="al",
linebreak="al",
- specials={ "font", 0x0627 },
+ specials={ "font", 0x627 },
unicodeslot=0x1EE00,
},
[0x1EE01]={
@@ -176209,7 +193947,7 @@ characters.data={
description="ARABIC MATHEMATICAL BEH",
direction="al",
linebreak="al",
- specials={ "font", 0x0628 },
+ specials={ "font", 0x628 },
unicodeslot=0x1EE01,
},
[0x1EE02]={
@@ -176218,7 +193956,7 @@ characters.data={
description="ARABIC MATHEMATICAL JEEM",
direction="al",
linebreak="al",
- specials={ "font", 0x062C },
+ specials={ "font", 0x62C },
unicodeslot=0x1EE02,
},
[0x1EE03]={
@@ -176227,7 +193965,7 @@ characters.data={
description="ARABIC MATHEMATICAL DAL",
direction="al",
linebreak="al",
- specials={ "font", 0x062F },
+ specials={ "font", 0x62F },
unicodeslot=0x1EE03,
},
[0x1EE05]={
@@ -176236,7 +193974,7 @@ characters.data={
description="ARABIC MATHEMATICAL WAW",
direction="al",
linebreak="al",
- specials={ "font", 0x0648 },
+ specials={ "font", 0x648 },
unicodeslot=0x1EE05,
},
[0x1EE06]={
@@ -176245,7 +193983,7 @@ characters.data={
description="ARABIC MATHEMATICAL ZAIN",
direction="al",
linebreak="al",
- specials={ "font", 0x0632 },
+ specials={ "font", 0x632 },
unicodeslot=0x1EE06,
},
[0x1EE07]={
@@ -176254,7 +193992,7 @@ characters.data={
description="ARABIC MATHEMATICAL HAH",
direction="al",
linebreak="al",
- specials={ "font", 0x062D },
+ specials={ "font", 0x62D },
unicodeslot=0x1EE07,
},
[0x1EE08]={
@@ -176263,7 +194001,7 @@ characters.data={
description="ARABIC MATHEMATICAL TAH",
direction="al",
linebreak="al",
- specials={ "font", 0x0637 },
+ specials={ "font", 0x637 },
unicodeslot=0x1EE08,
},
[0x1EE09]={
@@ -176272,7 +194010,7 @@ characters.data={
description="ARABIC MATHEMATICAL YEH",
direction="al",
linebreak="al",
- specials={ "font", 0x064A },
+ specials={ "font", 0x64A },
unicodeslot=0x1EE09,
},
[0x1EE0A]={
@@ -176281,7 +194019,7 @@ characters.data={
description="ARABIC MATHEMATICAL KAF",
direction="al",
linebreak="al",
- specials={ "font", 0x0643 },
+ specials={ "font", 0x643 },
unicodeslot=0x1EE0A,
},
[0x1EE0B]={
@@ -176290,7 +194028,7 @@ characters.data={
description="ARABIC MATHEMATICAL LAM",
direction="al",
linebreak="al",
- specials={ "font", 0x0644 },
+ specials={ "font", 0x644 },
unicodeslot=0x1EE0B,
},
[0x1EE0C]={
@@ -176299,7 +194037,7 @@ characters.data={
description="ARABIC MATHEMATICAL MEEM",
direction="al",
linebreak="al",
- specials={ "font", 0x0645 },
+ specials={ "font", 0x645 },
unicodeslot=0x1EE0C,
},
[0x1EE0D]={
@@ -176308,7 +194046,7 @@ characters.data={
description="ARABIC MATHEMATICAL NOON",
direction="al",
linebreak="al",
- specials={ "font", 0x0646 },
+ specials={ "font", 0x646 },
unicodeslot=0x1EE0D,
},
[0x1EE0E]={
@@ -176317,7 +194055,7 @@ characters.data={
description="ARABIC MATHEMATICAL SEEN",
direction="al",
linebreak="al",
- specials={ "font", 0x0633 },
+ specials={ "font", 0x633 },
unicodeslot=0x1EE0E,
},
[0x1EE0F]={
@@ -176326,7 +194064,7 @@ characters.data={
description="ARABIC MATHEMATICAL AIN",
direction="al",
linebreak="al",
- specials={ "font", 0x0639 },
+ specials={ "font", 0x639 },
unicodeslot=0x1EE0F,
},
[0x1EE10]={
@@ -176335,7 +194073,7 @@ characters.data={
description="ARABIC MATHEMATICAL FEH",
direction="al",
linebreak="al",
- specials={ "font", 0x0641 },
+ specials={ "font", 0x641 },
unicodeslot=0x1EE10,
},
[0x1EE11]={
@@ -176344,7 +194082,7 @@ characters.data={
description="ARABIC MATHEMATICAL SAD",
direction="al",
linebreak="al",
- specials={ "font", 0x0635 },
+ specials={ "font", 0x635 },
unicodeslot=0x1EE11,
},
[0x1EE12]={
@@ -176353,7 +194091,7 @@ characters.data={
description="ARABIC MATHEMATICAL QAF",
direction="al",
linebreak="al",
- specials={ "font", 0x0642 },
+ specials={ "font", 0x642 },
unicodeslot=0x1EE12,
},
[0x1EE13]={
@@ -176362,7 +194100,7 @@ characters.data={
description="ARABIC MATHEMATICAL REH",
direction="al",
linebreak="al",
- specials={ "font", 0x0631 },
+ specials={ "font", 0x631 },
unicodeslot=0x1EE13,
},
[0x1EE14]={
@@ -176371,7 +194109,7 @@ characters.data={
description="ARABIC MATHEMATICAL SHEEN",
direction="al",
linebreak="al",
- specials={ "font", 0x0634 },
+ specials={ "font", 0x634 },
unicodeslot=0x1EE14,
},
[0x1EE15]={
@@ -176380,7 +194118,7 @@ characters.data={
description="ARABIC MATHEMATICAL TEH",
direction="al",
linebreak="al",
- specials={ "font", 0x062A },
+ specials={ "font", 0x62A },
unicodeslot=0x1EE15,
},
[0x1EE16]={
@@ -176389,7 +194127,7 @@ characters.data={
description="ARABIC MATHEMATICAL THEH",
direction="al",
linebreak="al",
- specials={ "font", 0x062B },
+ specials={ "font", 0x62B },
unicodeslot=0x1EE16,
},
[0x1EE17]={
@@ -176398,7 +194136,7 @@ characters.data={
description="ARABIC MATHEMATICAL KHAH",
direction="al",
linebreak="al",
- specials={ "font", 0x062E },
+ specials={ "font", 0x62E },
unicodeslot=0x1EE17,
},
[0x1EE18]={
@@ -176407,7 +194145,7 @@ characters.data={
description="ARABIC MATHEMATICAL THAL",
direction="al",
linebreak="al",
- specials={ "font", 0x0630 },
+ specials={ "font", 0x630 },
unicodeslot=0x1EE18,
},
[0x1EE19]={
@@ -176416,7 +194154,7 @@ characters.data={
description="ARABIC MATHEMATICAL DAD",
direction="al",
linebreak="al",
- specials={ "font", 0x0636 },
+ specials={ "font", 0x636 },
unicodeslot=0x1EE19,
},
[0x1EE1A]={
@@ -176425,7 +194163,7 @@ characters.data={
description="ARABIC MATHEMATICAL ZAH",
direction="al",
linebreak="al",
- specials={ "font", 0x0638 },
+ specials={ "font", 0x638 },
unicodeslot=0x1EE1A,
},
[0x1EE1B]={
@@ -176434,7 +194172,7 @@ characters.data={
description="ARABIC MATHEMATICAL GHAIN",
direction="al",
linebreak="al",
- specials={ "font", 0x063A },
+ specials={ "font", 0x63A },
unicodeslot=0x1EE1B,
},
[0x1EE1C]={
@@ -176443,7 +194181,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOTLESS BEH",
direction="al",
linebreak="al",
- specials={ "font", 0x066E },
+ specials={ "font", 0x66E },
unicodeslot=0x1EE1C,
},
[0x1EE1D]={
@@ -176452,7 +194190,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOTLESS NOON",
direction="al",
linebreak="al",
- specials={ "font", 0x06BA },
+ specials={ "font", 0x6BA },
unicodeslot=0x1EE1D,
},
[0x1EE1E]={
@@ -176461,7 +194199,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOTLESS FEH",
direction="al",
linebreak="al",
- specials={ "font", 0x06A1 },
+ specials={ "font", 0x6A1 },
unicodeslot=0x1EE1E,
},
[0x1EE1F]={
@@ -176470,7 +194208,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOTLESS QAF",
direction="al",
linebreak="al",
- specials={ "font", 0x066F },
+ specials={ "font", 0x66F },
unicodeslot=0x1EE1F,
},
[0x1EE21]={
@@ -176479,7 +194217,7 @@ characters.data={
description="ARABIC MATHEMATICAL INITIAL BEH",
direction="al",
linebreak="al",
- specials={ "font", 0x0628 },
+ specials={ "font", 0x628 },
unicodeslot=0x1EE21,
},
[0x1EE22]={
@@ -176488,7 +194226,7 @@ characters.data={
description="ARABIC MATHEMATICAL INITIAL JEEM",
direction="al",
linebreak="al",
- specials={ "font", 0x062C },
+ specials={ "font", 0x62C },
unicodeslot=0x1EE22,
},
[0x1EE24]={
@@ -176497,7 +194235,7 @@ characters.data={
description="ARABIC MATHEMATICAL INITIAL HEH",
direction="al",
linebreak="al",
- specials={ "font", 0x0647 },
+ specials={ "font", 0x647 },
unicodeslot=0x1EE24,
},
[0x1EE27]={
@@ -176506,7 +194244,7 @@ characters.data={
description="ARABIC MATHEMATICAL INITIAL HAH",
direction="al",
linebreak="al",
- specials={ "font", 0x062D },
+ specials={ "font", 0x62D },
unicodeslot=0x1EE27,
},
[0x1EE29]={
@@ -176515,7 +194253,7 @@ characters.data={
description="ARABIC MATHEMATICAL INITIAL YEH",
direction="al",
linebreak="al",
- specials={ "font", 0x064A },
+ specials={ "font", 0x64A },
unicodeslot=0x1EE29,
},
[0x1EE2A]={
@@ -176524,7 +194262,7 @@ characters.data={
description="ARABIC MATHEMATICAL INITIAL KAF",
direction="al",
linebreak="al",
- specials={ "font", 0x0643 },
+ specials={ "font", 0x643 },
unicodeslot=0x1EE2A,
},
[0x1EE2B]={
@@ -176533,7 +194271,7 @@ characters.data={
description="ARABIC MATHEMATICAL INITIAL LAM",
direction="al",
linebreak="al",
- specials={ "font", 0x0644 },
+ specials={ "font", 0x644 },
unicodeslot=0x1EE2B,
},
[0x1EE2C]={
@@ -176542,7 +194280,7 @@ characters.data={
description="ARABIC MATHEMATICAL INITIAL MEEM",
direction="al",
linebreak="al",
- specials={ "font", 0x0645 },
+ specials={ "font", 0x645 },
unicodeslot=0x1EE2C,
},
[0x1EE2D]={
@@ -176551,7 +194289,7 @@ characters.data={
description="ARABIC MATHEMATICAL INITIAL NOON",
direction="al",
linebreak="al",
- specials={ "font", 0x0646 },
+ specials={ "font", 0x646 },
unicodeslot=0x1EE2D,
},
[0x1EE2E]={
@@ -176560,7 +194298,7 @@ characters.data={
description="ARABIC MATHEMATICAL INITIAL SEEN",
direction="al",
linebreak="al",
- specials={ "font", 0x0633 },
+ specials={ "font", 0x633 },
unicodeslot=0x1EE2E,
},
[0x1EE2F]={
@@ -176569,7 +194307,7 @@ characters.data={
description="ARABIC MATHEMATICAL INITIAL AIN",
direction="al",
linebreak="al",
- specials={ "font", 0x0639 },
+ specials={ "font", 0x639 },
unicodeslot=0x1EE2F,
},
[0x1EE30]={
@@ -176578,7 +194316,7 @@ characters.data={
description="ARABIC MATHEMATICAL INITIAL FEH",
direction="al",
linebreak="al",
- specials={ "font", 0x0641 },
+ specials={ "font", 0x641 },
unicodeslot=0x1EE30,
},
[0x1EE31]={
@@ -176587,7 +194325,7 @@ characters.data={
description="ARABIC MATHEMATICAL INITIAL SAD",
direction="al",
linebreak="al",
- specials={ "font", 0x0635 },
+ specials={ "font", 0x635 },
unicodeslot=0x1EE31,
},
[0x1EE32]={
@@ -176596,7 +194334,7 @@ characters.data={
description="ARABIC MATHEMATICAL INITIAL QAF",
direction="al",
linebreak="al",
- specials={ "font", 0x0642 },
+ specials={ "font", 0x642 },
unicodeslot=0x1EE32,
},
[0x1EE34]={
@@ -176605,7 +194343,7 @@ characters.data={
description="ARABIC MATHEMATICAL INITIAL SHEEN",
direction="al",
linebreak="al",
- specials={ "font", 0x0634 },
+ specials={ "font", 0x634 },
unicodeslot=0x1EE34,
},
[0x1EE35]={
@@ -176614,7 +194352,7 @@ characters.data={
description="ARABIC MATHEMATICAL INITIAL TEH",
direction="al",
linebreak="al",
- specials={ "font", 0x062A },
+ specials={ "font", 0x62A },
unicodeslot=0x1EE35,
},
[0x1EE36]={
@@ -176623,7 +194361,7 @@ characters.data={
description="ARABIC MATHEMATICAL INITIAL THEH",
direction="al",
linebreak="al",
- specials={ "font", 0x062B },
+ specials={ "font", 0x62B },
unicodeslot=0x1EE36,
},
[0x1EE37]={
@@ -176632,7 +194370,7 @@ characters.data={
description="ARABIC MATHEMATICAL INITIAL KHAH",
direction="al",
linebreak="al",
- specials={ "font", 0x062E },
+ specials={ "font", 0x62E },
unicodeslot=0x1EE37,
},
[0x1EE39]={
@@ -176641,7 +194379,7 @@ characters.data={
description="ARABIC MATHEMATICAL INITIAL DAD",
direction="al",
linebreak="al",
- specials={ "font", 0x0636 },
+ specials={ "font", 0x636 },
unicodeslot=0x1EE39,
},
[0x1EE3B]={
@@ -176650,7 +194388,7 @@ characters.data={
description="ARABIC MATHEMATICAL INITIAL GHAIN",
direction="al",
linebreak="al",
- specials={ "font", 0x063A },
+ specials={ "font", 0x63A },
unicodeslot=0x1EE3B,
},
[0x1EE42]={
@@ -176659,7 +194397,7 @@ characters.data={
description="ARABIC MATHEMATICAL TAILED JEEM",
direction="al",
linebreak="al",
- specials={ "font", 0x062C },
+ specials={ "font", 0x62C },
unicodeslot=0x1EE42,
},
[0x1EE47]={
@@ -176668,7 +194406,7 @@ characters.data={
description="ARABIC MATHEMATICAL TAILED HAH",
direction="al",
linebreak="al",
- specials={ "font", 0x062D },
+ specials={ "font", 0x62D },
unicodeslot=0x1EE47,
},
[0x1EE49]={
@@ -176677,7 +194415,7 @@ characters.data={
description="ARABIC MATHEMATICAL TAILED YEH",
direction="al",
linebreak="al",
- specials={ "font", 0x064A },
+ specials={ "font", 0x64A },
unicodeslot=0x1EE49,
},
[0x1EE4B]={
@@ -176686,7 +194424,7 @@ characters.data={
description="ARABIC MATHEMATICAL TAILED LAM",
direction="al",
linebreak="al",
- specials={ "font", 0x0644 },
+ specials={ "font", 0x644 },
unicodeslot=0x1EE4B,
},
[0x1EE4D]={
@@ -176695,7 +194433,7 @@ characters.data={
description="ARABIC MATHEMATICAL TAILED NOON",
direction="al",
linebreak="al",
- specials={ "font", 0x0646 },
+ specials={ "font", 0x646 },
unicodeslot=0x1EE4D,
},
[0x1EE4E]={
@@ -176704,7 +194442,7 @@ characters.data={
description="ARABIC MATHEMATICAL TAILED SEEN",
direction="al",
linebreak="al",
- specials={ "font", 0x0633 },
+ specials={ "font", 0x633 },
unicodeslot=0x1EE4E,
},
[0x1EE4F]={
@@ -176713,7 +194451,7 @@ characters.data={
description="ARABIC MATHEMATICAL TAILED AIN",
direction="al",
linebreak="al",
- specials={ "font", 0x0639 },
+ specials={ "font", 0x639 },
unicodeslot=0x1EE4F,
},
[0x1EE51]={
@@ -176722,7 +194460,7 @@ characters.data={
description="ARABIC MATHEMATICAL TAILED SAD",
direction="al",
linebreak="al",
- specials={ "font", 0x0635 },
+ specials={ "font", 0x635 },
unicodeslot=0x1EE51,
},
[0x1EE52]={
@@ -176731,7 +194469,7 @@ characters.data={
description="ARABIC MATHEMATICAL TAILED QAF",
direction="al",
linebreak="al",
- specials={ "font", 0x0642 },
+ specials={ "font", 0x642 },
unicodeslot=0x1EE52,
},
[0x1EE54]={
@@ -176740,7 +194478,7 @@ characters.data={
description="ARABIC MATHEMATICAL TAILED SHEEN",
direction="al",
linebreak="al",
- specials={ "font", 0x0634 },
+ specials={ "font", 0x634 },
unicodeslot=0x1EE54,
},
[0x1EE57]={
@@ -176749,7 +194487,7 @@ characters.data={
description="ARABIC MATHEMATICAL TAILED KHAH",
direction="al",
linebreak="al",
- specials={ "font", 0x062E },
+ specials={ "font", 0x62E },
unicodeslot=0x1EE57,
},
[0x1EE59]={
@@ -176758,7 +194496,7 @@ characters.data={
description="ARABIC MATHEMATICAL TAILED DAD",
direction="al",
linebreak="al",
- specials={ "font", 0x0636 },
+ specials={ "font", 0x636 },
unicodeslot=0x1EE59,
},
[0x1EE5B]={
@@ -176767,7 +194505,7 @@ characters.data={
description="ARABIC MATHEMATICAL TAILED GHAIN",
direction="al",
linebreak="al",
- specials={ "font", 0x063A },
+ specials={ "font", 0x63A },
unicodeslot=0x1EE5B,
},
[0x1EE5D]={
@@ -176776,7 +194514,7 @@ characters.data={
description="ARABIC MATHEMATICAL TAILED DOTLESS NOON",
direction="al",
linebreak="al",
- specials={ "font", 0x06BA },
+ specials={ "font", 0x6BA },
unicodeslot=0x1EE5D,
},
[0x1EE5F]={
@@ -176785,7 +194523,7 @@ characters.data={
description="ARABIC MATHEMATICAL TAILED DOTLESS QAF",
direction="al",
linebreak="al",
- specials={ "font", 0x066F },
+ specials={ "font", 0x66F },
unicodeslot=0x1EE5F,
},
[0x1EE61]={
@@ -176794,7 +194532,7 @@ characters.data={
description="ARABIC MATHEMATICAL STRETCHED BEH",
direction="al",
linebreak="al",
- specials={ "font", 0x0628 },
+ specials={ "font", 0x628 },
unicodeslot=0x1EE61,
},
[0x1EE62]={
@@ -176803,7 +194541,7 @@ characters.data={
description="ARABIC MATHEMATICAL STRETCHED JEEM",
direction="al",
linebreak="al",
- specials={ "font", 0x062C },
+ specials={ "font", 0x62C },
unicodeslot=0x1EE62,
},
[0x1EE64]={
@@ -176812,7 +194550,7 @@ characters.data={
description="ARABIC MATHEMATICAL STRETCHED HEH",
direction="al",
linebreak="al",
- specials={ "font", 0x0647 },
+ specials={ "font", 0x647 },
unicodeslot=0x1EE64,
},
[0x1EE67]={
@@ -176821,7 +194559,7 @@ characters.data={
description="ARABIC MATHEMATICAL STRETCHED HAH",
direction="al",
linebreak="al",
- specials={ "font", 0x062D },
+ specials={ "font", 0x62D },
unicodeslot=0x1EE67,
},
[0x1EE68]={
@@ -176830,7 +194568,7 @@ characters.data={
description="ARABIC MATHEMATICAL STRETCHED TAH",
direction="al",
linebreak="al",
- specials={ "font", 0x0637 },
+ specials={ "font", 0x637 },
unicodeslot=0x1EE68,
},
[0x1EE69]={
@@ -176839,7 +194577,7 @@ characters.data={
description="ARABIC MATHEMATICAL STRETCHED YEH",
direction="al",
linebreak="al",
- specials={ "font", 0x064A },
+ specials={ "font", 0x64A },
unicodeslot=0x1EE69,
},
[0x1EE6A]={
@@ -176848,7 +194586,7 @@ characters.data={
description="ARABIC MATHEMATICAL STRETCHED KAF",
direction="al",
linebreak="al",
- specials={ "font", 0x0643 },
+ specials={ "font", 0x643 },
unicodeslot=0x1EE6A,
},
[0x1EE6C]={
@@ -176857,7 +194595,7 @@ characters.data={
description="ARABIC MATHEMATICAL STRETCHED MEEM",
direction="al",
linebreak="al",
- specials={ "font", 0x0645 },
+ specials={ "font", 0x645 },
unicodeslot=0x1EE6C,
},
[0x1EE6D]={
@@ -176866,7 +194604,7 @@ characters.data={
description="ARABIC MATHEMATICAL STRETCHED NOON",
direction="al",
linebreak="al",
- specials={ "font", 0x0646 },
+ specials={ "font", 0x646 },
unicodeslot=0x1EE6D,
},
[0x1EE6E]={
@@ -176875,7 +194613,7 @@ characters.data={
description="ARABIC MATHEMATICAL STRETCHED SEEN",
direction="al",
linebreak="al",
- specials={ "font", 0x0633 },
+ specials={ "font", 0x633 },
unicodeslot=0x1EE6E,
},
[0x1EE6F]={
@@ -176884,7 +194622,7 @@ characters.data={
description="ARABIC MATHEMATICAL STRETCHED AIN",
direction="al",
linebreak="al",
- specials={ "font", 0x0639 },
+ specials={ "font", 0x639 },
unicodeslot=0x1EE6F,
},
[0x1EE70]={
@@ -176893,7 +194631,7 @@ characters.data={
description="ARABIC MATHEMATICAL STRETCHED FEH",
direction="al",
linebreak="al",
- specials={ "font", 0x0641 },
+ specials={ "font", 0x641 },
unicodeslot=0x1EE70,
},
[0x1EE71]={
@@ -176902,7 +194640,7 @@ characters.data={
description="ARABIC MATHEMATICAL STRETCHED SAD",
direction="al",
linebreak="al",
- specials={ "font", 0x0635 },
+ specials={ "font", 0x635 },
unicodeslot=0x1EE71,
},
[0x1EE72]={
@@ -176911,7 +194649,7 @@ characters.data={
description="ARABIC MATHEMATICAL STRETCHED QAF",
direction="al",
linebreak="al",
- specials={ "font", 0x0642 },
+ specials={ "font", 0x642 },
unicodeslot=0x1EE72,
},
[0x1EE74]={
@@ -176920,7 +194658,7 @@ characters.data={
description="ARABIC MATHEMATICAL STRETCHED SHEEN",
direction="al",
linebreak="al",
- specials={ "font", 0x0634 },
+ specials={ "font", 0x634 },
unicodeslot=0x1EE74,
},
[0x1EE75]={
@@ -176929,7 +194667,7 @@ characters.data={
description="ARABIC MATHEMATICAL STRETCHED TEH",
direction="al",
linebreak="al",
- specials={ "font", 0x062A },
+ specials={ "font", 0x62A },
unicodeslot=0x1EE75,
},
[0x1EE76]={
@@ -176938,7 +194676,7 @@ characters.data={
description="ARABIC MATHEMATICAL STRETCHED THEH",
direction="al",
linebreak="al",
- specials={ "font", 0x062B },
+ specials={ "font", 0x62B },
unicodeslot=0x1EE76,
},
[0x1EE77]={
@@ -176947,7 +194685,7 @@ characters.data={
description="ARABIC MATHEMATICAL STRETCHED KHAH",
direction="al",
linebreak="al",
- specials={ "font", 0x062E },
+ specials={ "font", 0x62E },
unicodeslot=0x1EE77,
},
[0x1EE79]={
@@ -176956,7 +194694,7 @@ characters.data={
description="ARABIC MATHEMATICAL STRETCHED DAD",
direction="al",
linebreak="al",
- specials={ "font", 0x0636 },
+ specials={ "font", 0x636 },
unicodeslot=0x1EE79,
},
[0x1EE7A]={
@@ -176965,7 +194703,7 @@ characters.data={
description="ARABIC MATHEMATICAL STRETCHED ZAH",
direction="al",
linebreak="al",
- specials={ "font", 0x0638 },
+ specials={ "font", 0x638 },
unicodeslot=0x1EE7A,
},
[0x1EE7B]={
@@ -176974,7 +194712,7 @@ characters.data={
description="ARABIC MATHEMATICAL STRETCHED GHAIN",
direction="al",
linebreak="al",
- specials={ "font", 0x063A },
+ specials={ "font", 0x63A },
unicodeslot=0x1EE7B,
},
[0x1EE7C]={
@@ -176983,7 +194721,7 @@ characters.data={
description="ARABIC MATHEMATICAL STRETCHED DOTLESS BEH",
direction="al",
linebreak="al",
- specials={ "font", 0x066E },
+ specials={ "font", 0x66E },
unicodeslot=0x1EE7C,
},
[0x1EE7E]={
@@ -176992,7 +194730,7 @@ characters.data={
description="ARABIC MATHEMATICAL STRETCHED DOTLESS FEH",
direction="al",
linebreak="al",
- specials={ "font", 0x06A1 },
+ specials={ "font", 0x6A1 },
unicodeslot=0x1EE7E,
},
[0x1EE80]={
@@ -177001,7 +194739,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED ALEF",
direction="al",
linebreak="al",
- specials={ "font", 0x0627 },
+ specials={ "font", 0x627 },
unicodeslot=0x1EE80,
},
[0x1EE81]={
@@ -177010,7 +194748,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED BEH",
direction="al",
linebreak="al",
- specials={ "font", 0x0628 },
+ specials={ "font", 0x628 },
unicodeslot=0x1EE81,
},
[0x1EE82]={
@@ -177019,7 +194757,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED JEEM",
direction="al",
linebreak="al",
- specials={ "font", 0x062C },
+ specials={ "font", 0x62C },
unicodeslot=0x1EE82,
},
[0x1EE83]={
@@ -177028,7 +194766,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED DAL",
direction="al",
linebreak="al",
- specials={ "font", 0x062F },
+ specials={ "font", 0x62F },
unicodeslot=0x1EE83,
},
[0x1EE84]={
@@ -177037,7 +194775,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED HEH",
direction="al",
linebreak="al",
- specials={ "font", 0x0647 },
+ specials={ "font", 0x647 },
unicodeslot=0x1EE84,
},
[0x1EE85]={
@@ -177046,7 +194784,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED WAW",
direction="al",
linebreak="al",
- specials={ "font", 0x0648 },
+ specials={ "font", 0x648 },
unicodeslot=0x1EE85,
},
[0x1EE86]={
@@ -177055,7 +194793,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED ZAIN",
direction="al",
linebreak="al",
- specials={ "font", 0x0632 },
+ specials={ "font", 0x632 },
unicodeslot=0x1EE86,
},
[0x1EE87]={
@@ -177064,7 +194802,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED HAH",
direction="al",
linebreak="al",
- specials={ "font", 0x062D },
+ specials={ "font", 0x62D },
unicodeslot=0x1EE87,
},
[0x1EE88]={
@@ -177073,7 +194811,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED TAH",
direction="al",
linebreak="al",
- specials={ "font", 0x0637 },
+ specials={ "font", 0x637 },
unicodeslot=0x1EE88,
},
[0x1EE89]={
@@ -177082,7 +194820,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED YEH",
direction="al",
linebreak="al",
- specials={ "font", 0x064A },
+ specials={ "font", 0x64A },
unicodeslot=0x1EE89,
},
[0x1EE8B]={
@@ -177091,7 +194829,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED LAM",
direction="al",
linebreak="al",
- specials={ "font", 0x0644 },
+ specials={ "font", 0x644 },
unicodeslot=0x1EE8B,
},
[0x1EE8C]={
@@ -177100,7 +194838,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED MEEM",
direction="al",
linebreak="al",
- specials={ "font", 0x0645 },
+ specials={ "font", 0x645 },
unicodeslot=0x1EE8C,
},
[0x1EE8D]={
@@ -177109,7 +194847,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED NOON",
direction="al",
linebreak="al",
- specials={ "font", 0x0646 },
+ specials={ "font", 0x646 },
unicodeslot=0x1EE8D,
},
[0x1EE8E]={
@@ -177118,7 +194856,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED SEEN",
direction="al",
linebreak="al",
- specials={ "font", 0x0633 },
+ specials={ "font", 0x633 },
unicodeslot=0x1EE8E,
},
[0x1EE8F]={
@@ -177127,7 +194865,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED AIN",
direction="al",
linebreak="al",
- specials={ "font", 0x0639 },
+ specials={ "font", 0x639 },
unicodeslot=0x1EE8F,
},
[0x1EE90]={
@@ -177136,7 +194874,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED FEH",
direction="al",
linebreak="al",
- specials={ "font", 0x0641 },
+ specials={ "font", 0x641 },
unicodeslot=0x1EE90,
},
[0x1EE91]={
@@ -177145,7 +194883,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED SAD",
direction="al",
linebreak="al",
- specials={ "font", 0x0635 },
+ specials={ "font", 0x635 },
unicodeslot=0x1EE91,
},
[0x1EE92]={
@@ -177154,7 +194892,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED QAF",
direction="al",
linebreak="al",
- specials={ "font", 0x0642 },
+ specials={ "font", 0x642 },
unicodeslot=0x1EE92,
},
[0x1EE93]={
@@ -177163,7 +194901,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED REH",
direction="al",
linebreak="al",
- specials={ "font", 0x0631 },
+ specials={ "font", 0x631 },
unicodeslot=0x1EE93,
},
[0x1EE94]={
@@ -177172,7 +194910,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED SHEEN",
direction="al",
linebreak="al",
- specials={ "font", 0x0634 },
+ specials={ "font", 0x634 },
unicodeslot=0x1EE94,
},
[0x1EE95]={
@@ -177181,7 +194919,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED TEH",
direction="al",
linebreak="al",
- specials={ "font", 0x062A },
+ specials={ "font", 0x62A },
unicodeslot=0x1EE95,
},
[0x1EE96]={
@@ -177190,7 +194928,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED THEH",
direction="al",
linebreak="al",
- specials={ "font", 0x062B },
+ specials={ "font", 0x62B },
unicodeslot=0x1EE96,
},
[0x1EE97]={
@@ -177199,7 +194937,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED KHAH",
direction="al",
linebreak="al",
- specials={ "font", 0x062E },
+ specials={ "font", 0x62E },
unicodeslot=0x1EE97,
},
[0x1EE98]={
@@ -177208,7 +194946,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED THAL",
direction="al",
linebreak="al",
- specials={ "font", 0x0630 },
+ specials={ "font", 0x630 },
unicodeslot=0x1EE98,
},
[0x1EE99]={
@@ -177217,7 +194955,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED DAD",
direction="al",
linebreak="al",
- specials={ "font", 0x0636 },
+ specials={ "font", 0x636 },
unicodeslot=0x1EE99,
},
[0x1EE9A]={
@@ -177226,7 +194964,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED ZAH",
direction="al",
linebreak="al",
- specials={ "font", 0x0638 },
+ specials={ "font", 0x638 },
unicodeslot=0x1EE9A,
},
[0x1EE9B]={
@@ -177235,7 +194973,7 @@ characters.data={
description="ARABIC MATHEMATICAL LOOPED GHAIN",
direction="al",
linebreak="al",
- specials={ "font", 0x063A },
+ specials={ "font", 0x63A },
unicodeslot=0x1EE9B,
},
[0x1EEA1]={
@@ -177244,7 +194982,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOUBLE-STRUCK BEH",
direction="al",
linebreak="al",
- specials={ "font", 0x0628 },
+ specials={ "font", 0x628 },
unicodeslot=0x1EEA1,
},
[0x1EEA2]={
@@ -177253,7 +194991,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOUBLE-STRUCK JEEM",
direction="al",
linebreak="al",
- specials={ "font", 0x062C },
+ specials={ "font", 0x62C },
unicodeslot=0x1EEA2,
},
[0x1EEA3]={
@@ -177262,7 +195000,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOUBLE-STRUCK DAL",
direction="al",
linebreak="al",
- specials={ "font", 0x062F },
+ specials={ "font", 0x62F },
unicodeslot=0x1EEA3,
},
[0x1EEA5]={
@@ -177271,7 +195009,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOUBLE-STRUCK WAW",
direction="al",
linebreak="al",
- specials={ "font", 0x0648 },
+ specials={ "font", 0x648 },
unicodeslot=0x1EEA5,
},
[0x1EEA6]={
@@ -177280,7 +195018,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOUBLE-STRUCK ZAIN",
direction="al",
linebreak="al",
- specials={ "font", 0x0632 },
+ specials={ "font", 0x632 },
unicodeslot=0x1EEA6,
},
[0x1EEA7]={
@@ -177289,7 +195027,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOUBLE-STRUCK HAH",
direction="al",
linebreak="al",
- specials={ "font", 0x062D },
+ specials={ "font", 0x62D },
unicodeslot=0x1EEA7,
},
[0x1EEA8]={
@@ -177298,7 +195036,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOUBLE-STRUCK TAH",
direction="al",
linebreak="al",
- specials={ "font", 0x0637 },
+ specials={ "font", 0x637 },
unicodeslot=0x1EEA8,
},
[0x1EEA9]={
@@ -177307,7 +195045,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOUBLE-STRUCK YEH",
direction="al",
linebreak="al",
- specials={ "font", 0x064A },
+ specials={ "font", 0x64A },
unicodeslot=0x1EEA9,
},
[0x1EEAB]={
@@ -177316,7 +195054,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOUBLE-STRUCK LAM",
direction="al",
linebreak="al",
- specials={ "font", 0x0644 },
+ specials={ "font", 0x644 },
unicodeslot=0x1EEAB,
},
[0x1EEAC]={
@@ -177325,7 +195063,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOUBLE-STRUCK MEEM",
direction="al",
linebreak="al",
- specials={ "font", 0x0645 },
+ specials={ "font", 0x645 },
unicodeslot=0x1EEAC,
},
[0x1EEAD]={
@@ -177334,7 +195072,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOUBLE-STRUCK NOON",
direction="al",
linebreak="al",
- specials={ "font", 0x0646 },
+ specials={ "font", 0x646 },
unicodeslot=0x1EEAD,
},
[0x1EEAE]={
@@ -177343,7 +195081,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOUBLE-STRUCK SEEN",
direction="al",
linebreak="al",
- specials={ "font", 0x0633 },
+ specials={ "font", 0x633 },
unicodeslot=0x1EEAE,
},
[0x1EEAF]={
@@ -177352,7 +195090,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOUBLE-STRUCK AIN",
direction="al",
linebreak="al",
- specials={ "font", 0x0639 },
+ specials={ "font", 0x639 },
unicodeslot=0x1EEAF,
},
[0x1EEB0]={
@@ -177361,7 +195099,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOUBLE-STRUCK FEH",
direction="al",
linebreak="al",
- specials={ "font", 0x0641 },
+ specials={ "font", 0x641 },
unicodeslot=0x1EEB0,
},
[0x1EEB1]={
@@ -177370,7 +195108,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOUBLE-STRUCK SAD",
direction="al",
linebreak="al",
- specials={ "font", 0x0635 },
+ specials={ "font", 0x635 },
unicodeslot=0x1EEB1,
},
[0x1EEB2]={
@@ -177379,7 +195117,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOUBLE-STRUCK QAF",
direction="al",
linebreak="al",
- specials={ "font", 0x0642 },
+ specials={ "font", 0x642 },
unicodeslot=0x1EEB2,
},
[0x1EEB3]={
@@ -177388,7 +195126,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOUBLE-STRUCK REH",
direction="al",
linebreak="al",
- specials={ "font", 0x0631 },
+ specials={ "font", 0x631 },
unicodeslot=0x1EEB3,
},
[0x1EEB4]={
@@ -177397,7 +195135,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOUBLE-STRUCK SHEEN",
direction="al",
linebreak="al",
- specials={ "font", 0x0634 },
+ specials={ "font", 0x634 },
unicodeslot=0x1EEB4,
},
[0x1EEB5]={
@@ -177406,7 +195144,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOUBLE-STRUCK TEH",
direction="al",
linebreak="al",
- specials={ "font", 0x062A },
+ specials={ "font", 0x62A },
unicodeslot=0x1EEB5,
},
[0x1EEB6]={
@@ -177415,7 +195153,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOUBLE-STRUCK THEH",
direction="al",
linebreak="al",
- specials={ "font", 0x062B },
+ specials={ "font", 0x62B },
unicodeslot=0x1EEB6,
},
[0x1EEB7]={
@@ -177424,7 +195162,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOUBLE-STRUCK KHAH",
direction="al",
linebreak="al",
- specials={ "font", 0x062E },
+ specials={ "font", 0x62E },
unicodeslot=0x1EEB7,
},
[0x1EEB8]={
@@ -177433,7 +195171,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOUBLE-STRUCK THAL",
direction="al",
linebreak="al",
- specials={ "font", 0x0630 },
+ specials={ "font", 0x630 },
unicodeslot=0x1EEB8,
},
[0x1EEB9]={
@@ -177442,7 +195180,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOUBLE-STRUCK DAD",
direction="al",
linebreak="al",
- specials={ "font", 0x0636 },
+ specials={ "font", 0x636 },
unicodeslot=0x1EEB9,
},
[0x1EEBA]={
@@ -177451,7 +195189,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOUBLE-STRUCK ZAH",
direction="al",
linebreak="al",
- specials={ "font", 0x0638 },
+ specials={ "font", 0x638 },
unicodeslot=0x1EEBA,
},
[0x1EEBB]={
@@ -177460,7 +195198,7 @@ characters.data={
description="ARABIC MATHEMATICAL DOUBLE-STRUCK GHAIN",
direction="al",
linebreak="al",
- specials={ "font", 0x063A },
+ specials={ "font", 0x63A },
unicodeslot=0x1EEBB,
},
[0x1EEF0]={
@@ -178694,6 +196432,13 @@ characters.data={
linebreak="id",
unicodeslot=0x1F0BE,
},
+ [0x1F0BF]={
+ category="so",
+ description="PLAYING CARD RED JOKER",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F0BF,
+ },
[0x1F0C1]={
category="so",
description="PLAYING CARD ACE OF DIAMONDS",
@@ -178904,13 +196649,167 @@ characters.data={
linebreak="id",
unicodeslot=0x1F0DF,
},
+ [0x1F0E0]={
+ category="so",
+ description="PLAYING CARD FOOL",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F0E0,
+ },
+ [0x1F0E1]={
+ category="so",
+ description="PLAYING CARD TRUMP-1",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F0E1,
+ },
+ [0x1F0E2]={
+ category="so",
+ description="PLAYING CARD TRUMP-2",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F0E2,
+ },
+ [0x1F0E3]={
+ category="so",
+ description="PLAYING CARD TRUMP-3",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F0E3,
+ },
+ [0x1F0E4]={
+ category="so",
+ description="PLAYING CARD TRUMP-4",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F0E4,
+ },
+ [0x1F0E5]={
+ category="so",
+ description="PLAYING CARD TRUMP-5",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F0E5,
+ },
+ [0x1F0E6]={
+ category="so",
+ description="PLAYING CARD TRUMP-6",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F0E6,
+ },
+ [0x1F0E7]={
+ category="so",
+ description="PLAYING CARD TRUMP-7",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F0E7,
+ },
+ [0x1F0E8]={
+ category="so",
+ description="PLAYING CARD TRUMP-8",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F0E8,
+ },
+ [0x1F0E9]={
+ category="so",
+ description="PLAYING CARD TRUMP-9",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F0E9,
+ },
+ [0x1F0EA]={
+ category="so",
+ description="PLAYING CARD TRUMP-10",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F0EA,
+ },
+ [0x1F0EB]={
+ category="so",
+ description="PLAYING CARD TRUMP-11",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F0EB,
+ },
+ [0x1F0EC]={
+ category="so",
+ description="PLAYING CARD TRUMP-12",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F0EC,
+ },
+ [0x1F0ED]={
+ category="so",
+ description="PLAYING CARD TRUMP-13",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F0ED,
+ },
+ [0x1F0EE]={
+ category="so",
+ description="PLAYING CARD TRUMP-14",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F0EE,
+ },
+ [0x1F0EF]={
+ category="so",
+ description="PLAYING CARD TRUMP-15",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F0EF,
+ },
+ [0x1F0F0]={
+ category="so",
+ description="PLAYING CARD TRUMP-16",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F0F0,
+ },
+ [0x1F0F1]={
+ category="so",
+ description="PLAYING CARD TRUMP-17",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F0F1,
+ },
+ [0x1F0F2]={
+ category="so",
+ description="PLAYING CARD TRUMP-18",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F0F2,
+ },
+ [0x1F0F3]={
+ category="so",
+ description="PLAYING CARD TRUMP-19",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F0F3,
+ },
+ [0x1F0F4]={
+ category="so",
+ description="PLAYING CARD TRUMP-20",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F0F4,
+ },
+ [0x1F0F5]={
+ category="so",
+ description="PLAYING CARD TRUMP-21",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F0F5,
+ },
[0x1F100]={
category="no",
cjkwd="a",
description="DIGIT ZERO FULL STOP",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0030, 0x002E },
+ specials={ "compat", 0x30, 0x2E },
unicodeslot=0x1F100,
},
[0x1F101]={
@@ -178919,7 +196818,7 @@ characters.data={
description="DIGIT ZERO COMMA",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0030, 0x002C },
+ specials={ "compat", 0x30, 0x2C },
unicodeslot=0x1F101,
},
[0x1F102]={
@@ -178928,7 +196827,7 @@ characters.data={
description="DIGIT ONE COMMA",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0031, 0x002C },
+ specials={ "compat", 0x31, 0x2C },
unicodeslot=0x1F102,
},
[0x1F103]={
@@ -178937,7 +196836,7 @@ characters.data={
description="DIGIT TWO COMMA",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0032, 0x002C },
+ specials={ "compat", 0x32, 0x2C },
unicodeslot=0x1F103,
},
[0x1F104]={
@@ -178946,7 +196845,7 @@ characters.data={
description="DIGIT THREE COMMA",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0033, 0x002C },
+ specials={ "compat", 0x33, 0x2C },
unicodeslot=0x1F104,
},
[0x1F105]={
@@ -178955,7 +196854,7 @@ characters.data={
description="DIGIT FOUR COMMA",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0034, 0x002C },
+ specials={ "compat", 0x34, 0x2C },
unicodeslot=0x1F105,
},
[0x1F106]={
@@ -178964,7 +196863,7 @@ characters.data={
description="DIGIT FIVE COMMA",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0035, 0x002C },
+ specials={ "compat", 0x35, 0x2C },
unicodeslot=0x1F106,
},
[0x1F107]={
@@ -178973,7 +196872,7 @@ characters.data={
description="DIGIT SIX COMMA",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0036, 0x002C },
+ specials={ "compat", 0x36, 0x2C },
unicodeslot=0x1F107,
},
[0x1F108]={
@@ -178982,7 +196881,7 @@ characters.data={
description="DIGIT SEVEN COMMA",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0037, 0x002C },
+ specials={ "compat", 0x37, 0x2C },
unicodeslot=0x1F108,
},
[0x1F109]={
@@ -178991,7 +196890,7 @@ characters.data={
description="DIGIT EIGHT COMMA",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0038, 0x002C },
+ specials={ "compat", 0x38, 0x2C },
unicodeslot=0x1F109,
},
[0x1F10A]={
@@ -179000,16 +196899,30 @@ characters.data={
description="DIGIT NINE COMMA",
direction="en",
linebreak="ai",
- specials={ "compat", 0x0039, 0x002C },
+ specials={ "compat", 0x39, 0x2C },
unicodeslot=0x1F10A,
},
+ [0x1F10B]={
+ category="no",
+ description="DINGBAT CIRCLED SANS-SERIF DIGIT ZERO",
+ direction="on",
+ linebreak="ai",
+ unicodeslot=0x1F10B,
+ },
+ [0x1F10C]={
+ category="no",
+ description="DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT ZERO",
+ direction="on",
+ linebreak="ai",
+ unicodeslot=0x1F10C,
+ },
[0x1F110]={
category="so",
cjkwd="a",
description="PARENTHESIZED LATIN CAPITAL LETTER A",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0041, 0x0029 },
+ specials={ "compat", 0x28, 0x41, 0x29 },
unicodeslot=0x1F110,
},
[0x1F111]={
@@ -179018,7 +196931,7 @@ characters.data={
description="PARENTHESIZED LATIN CAPITAL LETTER B",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0042, 0x0029 },
+ specials={ "compat", 0x28, 0x42, 0x29 },
unicodeslot=0x1F111,
},
[0x1F112]={
@@ -179027,7 +196940,7 @@ characters.data={
description="PARENTHESIZED LATIN CAPITAL LETTER C",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0043, 0x0029 },
+ specials={ "compat", 0x28, 0x43, 0x29 },
unicodeslot=0x1F112,
},
[0x1F113]={
@@ -179036,7 +196949,7 @@ characters.data={
description="PARENTHESIZED LATIN CAPITAL LETTER D",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0044, 0x0029 },
+ specials={ "compat", 0x28, 0x44, 0x29 },
unicodeslot=0x1F113,
},
[0x1F114]={
@@ -179045,7 +196958,7 @@ characters.data={
description="PARENTHESIZED LATIN CAPITAL LETTER E",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0045, 0x0029 },
+ specials={ "compat", 0x28, 0x45, 0x29 },
unicodeslot=0x1F114,
},
[0x1F115]={
@@ -179054,7 +196967,7 @@ characters.data={
description="PARENTHESIZED LATIN CAPITAL LETTER F",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0046, 0x0029 },
+ specials={ "compat", 0x28, 0x46, 0x29 },
unicodeslot=0x1F115,
},
[0x1F116]={
@@ -179063,7 +196976,7 @@ characters.data={
description="PARENTHESIZED LATIN CAPITAL LETTER G",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0047, 0x0029 },
+ specials={ "compat", 0x28, 0x47, 0x29 },
unicodeslot=0x1F116,
},
[0x1F117]={
@@ -179072,7 +196985,7 @@ characters.data={
description="PARENTHESIZED LATIN CAPITAL LETTER H",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0048, 0x0029 },
+ specials={ "compat", 0x28, 0x48, 0x29 },
unicodeslot=0x1F117,
},
[0x1F118]={
@@ -179081,7 +196994,7 @@ characters.data={
description="PARENTHESIZED LATIN CAPITAL LETTER I",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0049, 0x0029 },
+ specials={ "compat", 0x28, 0x49, 0x29 },
unicodeslot=0x1F118,
},
[0x1F119]={
@@ -179090,7 +197003,7 @@ characters.data={
description="PARENTHESIZED LATIN CAPITAL LETTER J",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x004A, 0x0029 },
+ specials={ "compat", 0x28, 0x4A, 0x29 },
unicodeslot=0x1F119,
},
[0x1F11A]={
@@ -179099,7 +197012,7 @@ characters.data={
description="PARENTHESIZED LATIN CAPITAL LETTER K",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x004B, 0x0029 },
+ specials={ "compat", 0x28, 0x4B, 0x29 },
unicodeslot=0x1F11A,
},
[0x1F11B]={
@@ -179108,7 +197021,7 @@ characters.data={
description="PARENTHESIZED LATIN CAPITAL LETTER L",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x004C, 0x0029 },
+ specials={ "compat", 0x28, 0x4C, 0x29 },
unicodeslot=0x1F11B,
},
[0x1F11C]={
@@ -179117,7 +197030,7 @@ characters.data={
description="PARENTHESIZED LATIN CAPITAL LETTER M",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x004D, 0x0029 },
+ specials={ "compat", 0x28, 0x4D, 0x29 },
unicodeslot=0x1F11C,
},
[0x1F11D]={
@@ -179126,7 +197039,7 @@ characters.data={
description="PARENTHESIZED LATIN CAPITAL LETTER N",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x004E, 0x0029 },
+ specials={ "compat", 0x28, 0x4E, 0x29 },
unicodeslot=0x1F11D,
},
[0x1F11E]={
@@ -179135,7 +197048,7 @@ characters.data={
description="PARENTHESIZED LATIN CAPITAL LETTER O",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x004F, 0x0029 },
+ specials={ "compat", 0x28, 0x4F, 0x29 },
unicodeslot=0x1F11E,
},
[0x1F11F]={
@@ -179144,7 +197057,7 @@ characters.data={
description="PARENTHESIZED LATIN CAPITAL LETTER P",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0050, 0x0029 },
+ specials={ "compat", 0x28, 0x50, 0x29 },
unicodeslot=0x1F11F,
},
[0x1F120]={
@@ -179153,7 +197066,7 @@ characters.data={
description="PARENTHESIZED LATIN CAPITAL LETTER Q",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0051, 0x0029 },
+ specials={ "compat", 0x28, 0x51, 0x29 },
unicodeslot=0x1F120,
},
[0x1F121]={
@@ -179162,7 +197075,7 @@ characters.data={
description="PARENTHESIZED LATIN CAPITAL LETTER R",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0052, 0x0029 },
+ specials={ "compat", 0x28, 0x52, 0x29 },
unicodeslot=0x1F121,
},
[0x1F122]={
@@ -179171,7 +197084,7 @@ characters.data={
description="PARENTHESIZED LATIN CAPITAL LETTER S",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0053, 0x0029 },
+ specials={ "compat", 0x28, 0x53, 0x29 },
unicodeslot=0x1F122,
},
[0x1F123]={
@@ -179180,7 +197093,7 @@ characters.data={
description="PARENTHESIZED LATIN CAPITAL LETTER T",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0054, 0x0029 },
+ specials={ "compat", 0x28, 0x54, 0x29 },
unicodeslot=0x1F123,
},
[0x1F124]={
@@ -179189,7 +197102,7 @@ characters.data={
description="PARENTHESIZED LATIN CAPITAL LETTER U",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0055, 0x0029 },
+ specials={ "compat", 0x28, 0x55, 0x29 },
unicodeslot=0x1F124,
},
[0x1F125]={
@@ -179198,7 +197111,7 @@ characters.data={
description="PARENTHESIZED LATIN CAPITAL LETTER V",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0056, 0x0029 },
+ specials={ "compat", 0x28, 0x56, 0x29 },
unicodeslot=0x1F125,
},
[0x1F126]={
@@ -179207,7 +197120,7 @@ characters.data={
description="PARENTHESIZED LATIN CAPITAL LETTER W",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0057, 0x0029 },
+ specials={ "compat", 0x28, 0x57, 0x29 },
unicodeslot=0x1F126,
},
[0x1F127]={
@@ -179216,7 +197129,7 @@ characters.data={
description="PARENTHESIZED LATIN CAPITAL LETTER X",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0058, 0x0029 },
+ specials={ "compat", 0x28, 0x58, 0x29 },
unicodeslot=0x1F127,
},
[0x1F128]={
@@ -179225,7 +197138,7 @@ characters.data={
description="PARENTHESIZED LATIN CAPITAL LETTER Y",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x0059, 0x0029 },
+ specials={ "compat", 0x28, 0x59, 0x29 },
unicodeslot=0x1F128,
},
[0x1F129]={
@@ -179234,7 +197147,7 @@ characters.data={
description="PARENTHESIZED LATIN CAPITAL LETTER Z",
direction="l",
linebreak="ai",
- specials={ "compat", 0x0028, 0x005A, 0x0029 },
+ specials={ "compat", 0x28, 0x5A, 0x29 },
unicodeslot=0x1F129,
},
[0x1F12A]={
@@ -179243,7 +197156,7 @@ characters.data={
description="TORTOISE SHELL BRACKETED LATIN CAPITAL LETTER S",
direction="l",
linebreak="ai",
- specials={ "compat", 0x3014, 0x0053, 0x3015 },
+ specials={ "compat", 0x3014, 0x53, 0x3015 },
unicodeslot=0x1F12A,
},
[0x1F12B]={
@@ -179252,7 +197165,7 @@ characters.data={
description="CIRCLED ITALIC LATIN CAPITAL LETTER C",
direction="l",
linebreak="ai",
- specials={ "circle", 0x0043 },
+ specials={ "circle", 0x43 },
unicodeslot=0x1F12B,
},
[0x1F12C]={
@@ -179261,7 +197174,7 @@ characters.data={
description="CIRCLED ITALIC LATIN CAPITAL LETTER R",
direction="l",
linebreak="ai",
- specials={ "circle", 0x0052 },
+ specials={ "circle", 0x52 },
unicodeslot=0x1F12C,
},
[0x1F12D]={
@@ -179270,7 +197183,7 @@ characters.data={
description="CIRCLED CD",
direction="l",
linebreak="ai",
- specials={ "circle", 0x0043, 0x0044 },
+ specials={ "circle", 0x43, 0x44 },
unicodeslot=0x1F12D,
},
[0x1F12E]={
@@ -179278,7 +197191,7 @@ characters.data={
description="CIRCLED WZ",
direction="l",
linebreak="al",
- specials={ "circle", 0x0057, 0x005A },
+ specials={ "circle", 0x57, 0x5A },
unicodeslot=0x1F12E,
},
[0x1F130]={
@@ -179287,7 +197200,7 @@ characters.data={
description="SQUARED LATIN CAPITAL LETTER A",
direction="l",
linebreak="ai",
- specials={ "square", 0x0041 },
+ specials={ "square", 0x41 },
unicodeslot=0x1F130,
},
[0x1F131]={
@@ -179296,7 +197209,7 @@ characters.data={
description="SQUARED LATIN CAPITAL LETTER B",
direction="l",
linebreak="ai",
- specials={ "square", 0x0042 },
+ specials={ "square", 0x42 },
unicodeslot=0x1F131,
},
[0x1F132]={
@@ -179305,7 +197218,7 @@ characters.data={
description="SQUARED LATIN CAPITAL LETTER C",
direction="l",
linebreak="ai",
- specials={ "square", 0x0043 },
+ specials={ "square", 0x43 },
unicodeslot=0x1F132,
},
[0x1F133]={
@@ -179314,7 +197227,7 @@ characters.data={
description="SQUARED LATIN CAPITAL LETTER D",
direction="l",
linebreak="ai",
- specials={ "square", 0x0044 },
+ specials={ "square", 0x44 },
unicodeslot=0x1F133,
},
[0x1F134]={
@@ -179323,7 +197236,7 @@ characters.data={
description="SQUARED LATIN CAPITAL LETTER E",
direction="l",
linebreak="ai",
- specials={ "square", 0x0045 },
+ specials={ "square", 0x45 },
unicodeslot=0x1F134,
},
[0x1F135]={
@@ -179332,7 +197245,7 @@ characters.data={
description="SQUARED LATIN CAPITAL LETTER F",
direction="l",
linebreak="ai",
- specials={ "square", 0x0046 },
+ specials={ "square", 0x46 },
unicodeslot=0x1F135,
},
[0x1F136]={
@@ -179341,7 +197254,7 @@ characters.data={
description="SQUARED LATIN CAPITAL LETTER G",
direction="l",
linebreak="ai",
- specials={ "square", 0x0047 },
+ specials={ "square", 0x47 },
unicodeslot=0x1F136,
},
[0x1F137]={
@@ -179350,7 +197263,7 @@ characters.data={
description="SQUARED LATIN CAPITAL LETTER H",
direction="l",
linebreak="ai",
- specials={ "square", 0x0048 },
+ specials={ "square", 0x48 },
unicodeslot=0x1F137,
},
[0x1F138]={
@@ -179359,7 +197272,7 @@ characters.data={
description="SQUARED LATIN CAPITAL LETTER I",
direction="l",
linebreak="ai",
- specials={ "square", 0x0049 },
+ specials={ "square", 0x49 },
unicodeslot=0x1F138,
},
[0x1F139]={
@@ -179368,7 +197281,7 @@ characters.data={
description="SQUARED LATIN CAPITAL LETTER J",
direction="l",
linebreak="ai",
- specials={ "square", 0x004A },
+ specials={ "square", 0x4A },
unicodeslot=0x1F139,
},
[0x1F13A]={
@@ -179377,7 +197290,7 @@ characters.data={
description="SQUARED LATIN CAPITAL LETTER K",
direction="l",
linebreak="ai",
- specials={ "square", 0x004B },
+ specials={ "square", 0x4B },
unicodeslot=0x1F13A,
},
[0x1F13B]={
@@ -179386,7 +197299,7 @@ characters.data={
description="SQUARED LATIN CAPITAL LETTER L",
direction="l",
linebreak="ai",
- specials={ "square", 0x004C },
+ specials={ "square", 0x4C },
unicodeslot=0x1F13B,
},
[0x1F13C]={
@@ -179395,7 +197308,7 @@ characters.data={
description="SQUARED LATIN CAPITAL LETTER M",
direction="l",
linebreak="ai",
- specials={ "square", 0x004D },
+ specials={ "square", 0x4D },
unicodeslot=0x1F13C,
},
[0x1F13D]={
@@ -179404,7 +197317,7 @@ characters.data={
description="SQUARED LATIN CAPITAL LETTER N",
direction="l",
linebreak="ai",
- specials={ "square", 0x004E },
+ specials={ "square", 0x4E },
unicodeslot=0x1F13D,
},
[0x1F13E]={
@@ -179413,7 +197326,7 @@ characters.data={
description="SQUARED LATIN CAPITAL LETTER O",
direction="l",
linebreak="ai",
- specials={ "square", 0x004F },
+ specials={ "square", 0x4F },
unicodeslot=0x1F13E,
},
[0x1F13F]={
@@ -179422,7 +197335,7 @@ characters.data={
description="SQUARED LATIN CAPITAL LETTER P",
direction="l",
linebreak="ai",
- specials={ "square", 0x0050 },
+ specials={ "square", 0x50 },
unicodeslot=0x1F13F,
},
[0x1F140]={
@@ -179431,7 +197344,7 @@ characters.data={
description="SQUARED LATIN CAPITAL LETTER Q",
direction="l",
linebreak="ai",
- specials={ "square", 0x0051 },
+ specials={ "square", 0x51 },
unicodeslot=0x1F140,
},
[0x1F141]={
@@ -179440,7 +197353,7 @@ characters.data={
description="SQUARED LATIN CAPITAL LETTER R",
direction="l",
linebreak="ai",
- specials={ "square", 0x0052 },
+ specials={ "square", 0x52 },
unicodeslot=0x1F141,
},
[0x1F142]={
@@ -179449,7 +197362,7 @@ characters.data={
description="SQUARED LATIN CAPITAL LETTER S",
direction="l",
linebreak="ai",
- specials={ "square", 0x0053 },
+ specials={ "square", 0x53 },
unicodeslot=0x1F142,
},
[0x1F143]={
@@ -179458,7 +197371,7 @@ characters.data={
description="SQUARED LATIN CAPITAL LETTER T",
direction="l",
linebreak="ai",
- specials={ "square", 0x0054 },
+ specials={ "square", 0x54 },
unicodeslot=0x1F143,
},
[0x1F144]={
@@ -179467,7 +197380,7 @@ characters.data={
description="SQUARED LATIN CAPITAL LETTER U",
direction="l",
linebreak="ai",
- specials={ "square", 0x0055 },
+ specials={ "square", 0x55 },
unicodeslot=0x1F144,
},
[0x1F145]={
@@ -179476,7 +197389,7 @@ characters.data={
description="SQUARED LATIN CAPITAL LETTER V",
direction="l",
linebreak="ai",
- specials={ "square", 0x0056 },
+ specials={ "square", 0x56 },
unicodeslot=0x1F145,
},
[0x1F146]={
@@ -179485,7 +197398,7 @@ characters.data={
description="SQUARED LATIN CAPITAL LETTER W",
direction="l",
linebreak="ai",
- specials={ "square", 0x0057 },
+ specials={ "square", 0x57 },
unicodeslot=0x1F146,
},
[0x1F147]={
@@ -179494,7 +197407,7 @@ characters.data={
description="SQUARED LATIN CAPITAL LETTER X",
direction="l",
linebreak="ai",
- specials={ "square", 0x0058 },
+ specials={ "square", 0x58 },
unicodeslot=0x1F147,
},
[0x1F148]={
@@ -179503,7 +197416,7 @@ characters.data={
description="SQUARED LATIN CAPITAL LETTER Y",
direction="l",
linebreak="ai",
- specials={ "square", 0x0059 },
+ specials={ "square", 0x59 },
unicodeslot=0x1F148,
},
[0x1F149]={
@@ -179512,7 +197425,7 @@ characters.data={
description="SQUARED LATIN CAPITAL LETTER Z",
direction="l",
linebreak="ai",
- specials={ "square", 0x005A },
+ specials={ "square", 0x5A },
unicodeslot=0x1F149,
},
[0x1F14A]={
@@ -179521,7 +197434,7 @@ characters.data={
description="SQUARED HV",
direction="l",
linebreak="ai",
- specials={ "square", 0x0048, 0x0056 },
+ specials={ "square", 0x48, 0x56 },
unicodeslot=0x1F14A,
},
[0x1F14B]={
@@ -179530,7 +197443,7 @@ characters.data={
description="SQUARED MV",
direction="l",
linebreak="ai",
- specials={ "square", 0x004D, 0x0056 },
+ specials={ "square", 0x4D, 0x56 },
unicodeslot=0x1F14B,
},
[0x1F14C]={
@@ -179539,7 +197452,7 @@ characters.data={
description="SQUARED SD",
direction="l",
linebreak="ai",
- specials={ "square", 0x0053, 0x0044 },
+ specials={ "square", 0x53, 0x44 },
unicodeslot=0x1F14C,
},
[0x1F14D]={
@@ -179548,7 +197461,7 @@ characters.data={
description="SQUARED SS",
direction="l",
linebreak="ai",
- specials={ "square", 0x0053, 0x0053 },
+ specials={ "square", 0x53, 0x53 },
unicodeslot=0x1F14D,
},
[0x1F14E]={
@@ -179557,7 +197470,7 @@ characters.data={
description="SQUARED PPV",
direction="l",
linebreak="ai",
- specials={ "square", 0x0050, 0x0050, 0x0056 },
+ specials={ "square", 0x50, 0x50, 0x56 },
unicodeslot=0x1F14E,
},
[0x1F14F]={
@@ -179566,7 +197479,7 @@ characters.data={
description="SQUARED WC",
direction="l",
linebreak="ai",
- specials={ "square", 0x0057, 0x0043 },
+ specials={ "square", 0x57, 0x43 },
unicodeslot=0x1F14F,
},
[0x1F150]={
@@ -179782,7 +197695,7 @@ characters.data={
description="RAISED MC SIGN",
direction="on",
linebreak="al",
- specials={ "super", 0x004D, 0x0043 },
+ specials={ "super", 0x4D, 0x43 },
unicodeslot=0x1F16A,
},
[0x1F16B]={
@@ -179790,7 +197703,7 @@ characters.data={
description="RAISED MD SIGN",
direction="on",
linebreak="al",
- specials={ "super", 0x004D, 0x0044 },
+ specials={ "super", 0x4D, 0x44 },
unicodeslot=0x1F16B,
},
[0x1F170]={
@@ -180059,7 +197972,7 @@ characters.data={
description="SQUARE DJ",
direction="l",
linebreak="ai",
- specials={ "square", 0x0044, 0x004A },
+ specials={ "square", 0x44, 0x4A },
unicodeslot=0x1F190,
},
[0x1F191]={
@@ -181076,6 +198989,90 @@ characters.data={
linebreak="id",
unicodeslot=0x1F320,
},
+ [0x1F321]={
+ category="so",
+ description="THERMOMETER",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F321,
+ },
+ [0x1F322]={
+ category="so",
+ description="BLACK DROPLET",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F322,
+ },
+ [0x1F323]={
+ category="so",
+ description="WHITE SUN",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F323,
+ },
+ [0x1F324]={
+ category="so",
+ description="WHITE SUN WITH SMALL CLOUD",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F324,
+ },
+ [0x1F325]={
+ category="so",
+ description="WHITE SUN BEHIND CLOUD",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F325,
+ },
+ [0x1F326]={
+ category="so",
+ description="WHITE SUN BEHIND CLOUD WITH RAIN",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F326,
+ },
+ [0x1F327]={
+ category="so",
+ description="CLOUD WITH RAIN",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F327,
+ },
+ [0x1F328]={
+ category="so",
+ description="CLOUD WITH SNOW",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F328,
+ },
+ [0x1F329]={
+ category="so",
+ description="CLOUD WITH LIGHTNING",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F329,
+ },
+ [0x1F32A]={
+ category="so",
+ description="CLOUD WITH TORNADO",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F32A,
+ },
+ [0x1F32B]={
+ category="so",
+ description="FOG",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F32B,
+ },
+ [0x1F32C]={
+ category="so",
+ description="WIND BLOWING FACE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F32C,
+ },
[0x1F330]={
category="so",
description="CHESTNUT",
@@ -181118,6 +199115,13 @@ characters.data={
linebreak="id",
unicodeslot=0x1F335,
},
+ [0x1F336]={
+ category="so",
+ description="HOT PEPPER",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F336,
+ },
[0x1F337]={
category="so",
description="TULIP",
@@ -181608,6 +199612,13 @@ characters.data={
linebreak="id",
unicodeslot=0x1F37C,
},
+ [0x1F37D]={
+ category="so",
+ description="FORK AND KNIFE WITH PLATE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F37D,
+ },
[0x1F380]={
category="so",
description="RIBBON",
@@ -181748,6 +199759,90 @@ characters.data={
linebreak="id",
unicodeslot=0x1F393,
},
+ [0x1F394]={
+ category="so",
+ description="HEART WITH TIP ON THE LEFT",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F394,
+ },
+ [0x1F395]={
+ category="so",
+ description="BOUQUET OF FLOWERS",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F395,
+ },
+ [0x1F396]={
+ category="so",
+ description="MILITARY MEDAL",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F396,
+ },
+ [0x1F397]={
+ category="so",
+ description="REMINDER RIBBON",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F397,
+ },
+ [0x1F398]={
+ category="so",
+ description="MUSICAL KEYBOARD WITH JACKS",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F398,
+ },
+ [0x1F399]={
+ category="so",
+ description="STUDIO MICROPHONE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F399,
+ },
+ [0x1F39A]={
+ category="so",
+ description="LEVEL SLIDER",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F39A,
+ },
+ [0x1F39B]={
+ category="so",
+ description="CONTROL KNOBS",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F39B,
+ },
+ [0x1F39C]={
+ category="so",
+ description="BEAMED ASCENDING MUSICAL NOTES",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F39C,
+ },
+ [0x1F39D]={
+ category="so",
+ description="BEAMED DESCENDING MUSICAL NOTES",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F39D,
+ },
+ [0x1F39E]={
+ category="so",
+ description="FILM FRAMES",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F39E,
+ },
+ [0x1F39F]={
+ category="so",
+ description="ADMISSION TICKETS",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F39F,
+ },
[0x1F3A0]={
category="so",
description="CAROUSEL HORSE",
@@ -182007,6 +200102,13 @@ characters.data={
linebreak="id",
unicodeslot=0x1F3C4,
},
+ [0x1F3C5]={
+ category="so",
+ description="SPORTS MEDAL",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F3C5,
+ },
[0x1F3C6]={
category="so",
description="TROPHY",
@@ -182042,6 +200144,118 @@ characters.data={
linebreak="id",
unicodeslot=0x1F3CA,
},
+ [0x1F3CB]={
+ category="so",
+ description="WEIGHT LIFTER",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F3CB,
+ },
+ [0x1F3CC]={
+ category="so",
+ description="GOLFER",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F3CC,
+ },
+ [0x1F3CD]={
+ category="so",
+ description="RACING MOTORCYCLE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F3CD,
+ },
+ [0x1F3CE]={
+ category="so",
+ description="RACING CAR",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F3CE,
+ },
+ [0x1F3D4]={
+ category="so",
+ description="SNOW CAPPED MOUNTAIN",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F3D4,
+ },
+ [0x1F3D5]={
+ category="so",
+ description="CAMPING",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F3D5,
+ },
+ [0x1F3D6]={
+ category="so",
+ description="BEACH WITH UMBRELLA",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F3D6,
+ },
+ [0x1F3D7]={
+ category="so",
+ description="BUILDING CONSTRUCTION",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F3D7,
+ },
+ [0x1F3D8]={
+ category="so",
+ description="HOUSE BUILDINGS",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F3D8,
+ },
+ [0x1F3D9]={
+ category="so",
+ description="CITYSCAPE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F3D9,
+ },
+ [0x1F3DA]={
+ category="so",
+ description="DERELICT HOUSE BUILDING",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F3DA,
+ },
+ [0x1F3DB]={
+ category="so",
+ description="CLASSICAL BUILDING",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F3DB,
+ },
+ [0x1F3DC]={
+ category="so",
+ description="DESERT",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F3DC,
+ },
+ [0x1F3DD]={
+ category="so",
+ description="DESERT ISLAND",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F3DD,
+ },
+ [0x1F3DE]={
+ category="so",
+ description="NATIONAL PARK",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F3DE,
+ },
+ [0x1F3DF]={
+ category="so",
+ description="STADIUM",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F3DF,
+ },
[0x1F3E0]={
category="so",
description="HOUSE BUILDING",
@@ -182161,6 +200375,55 @@ characters.data={
linebreak="id",
unicodeslot=0x1F3F0,
},
+ [0x1F3F1]={
+ category="so",
+ description="WHITE PENNANT",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F3F1,
+ },
+ [0x1F3F2]={
+ category="so",
+ description="BLACK PENNANT",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F3F2,
+ },
+ [0x1F3F3]={
+ category="so",
+ description="WAVING WHITE FLAG",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F3F3,
+ },
+ [0x1F3F4]={
+ category="so",
+ description="WAVING BLACK FLAG",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F3F4,
+ },
+ [0x1F3F5]={
+ category="so",
+ description="ROSETTE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F3F5,
+ },
+ [0x1F3F6]={
+ category="so",
+ description="BLACK ROSETTE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F3F6,
+ },
+ [0x1F3F7]={
+ category="so",
+ description="LABEL",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F3F7,
+ },
[0x1F400]={
category="so",
description="RAT",
@@ -182602,6 +200865,13 @@ characters.data={
linebreak="id",
unicodeslot=0x1F43E,
},
+ [0x1F43F]={
+ category="so",
+ description="CHIPMUNK",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F43F,
+ },
[0x1F440]={
category="so",
description="EYES",
@@ -182609,6 +200879,13 @@ characters.data={
linebreak="id",
unicodeslot=0x1F440,
},
+ [0x1F441]={
+ category="so",
+ description="EYE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F441,
+ },
[0x1F442]={
category="so",
description="EAR",
@@ -183883,6 +202160,13 @@ characters.data={
linebreak="id",
unicodeslot=0x1F4F7,
},
+ [0x1F4F8]={
+ category="so",
+ description="CAMERA WITH FLASH",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F4F8,
+ },
[0x1F4F9]={
category="so",
description="VIDEO CAMERA",
@@ -183911,6 +202195,20 @@ characters.data={
linebreak="id",
unicodeslot=0x1F4FC,
},
+ [0x1F4FD]={
+ category="so",
+ description="FILM PROJECTOR",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F4FD,
+ },
+ [0x1F4FE]={
+ category="so",
+ description="PORTABLE STEREO",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F4FE,
+ },
[0x1F500]={
category="so",
description="TWISTED RIGHTWARDS ARROWS",
@@ -184345,6 +202643,20 @@ characters.data={
linebreak="al",
unicodeslot=0x1F53D,
},
+ [0x1F53E]={
+ category="so",
+ description="LOWER RIGHT SHADOWED WHITE CIRCLE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F53E,
+ },
+ [0x1F53F]={
+ category="so",
+ description="UPPER RIGHT SHADOWED WHITE CIRCLE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F53F,
+ },
[0x1F540]={
category="so",
description="CIRCLED CROSS POMMEE",
@@ -184373,6 +202685,55 @@ characters.data={
linebreak="al",
unicodeslot=0x1F543,
},
+ [0x1F544]={
+ category="so",
+ description="NOTCHED RIGHT SEMICIRCLE WITH THREE DOTS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F544,
+ },
+ [0x1F545]={
+ category="so",
+ description="SYMBOL FOR MARKS CHAPTER",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F545,
+ },
+ [0x1F546]={
+ category="so",
+ description="WHITE LATIN CROSS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F546,
+ },
+ [0x1F547]={
+ category="so",
+ description="HEAVY LATIN CROSS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F547,
+ },
+ [0x1F548]={
+ category="so",
+ description="CELTIC CROSS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F548,
+ },
+ [0x1F549]={
+ category="so",
+ description="OM SYMBOL",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F549,
+ },
+ [0x1F54A]={
+ category="so",
+ description="DOVE OF PEACE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F54A,
+ },
[0x1F550]={
category="so",
description="CLOCK FACE ONE OCLOCK",
@@ -184541,6 +202902,1021 @@ characters.data={
linebreak="id",
unicodeslot=0x1F567,
},
+ [0x1F568]={
+ category="so",
+ description="RIGHT SPEAKER",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F568,
+ },
+ [0x1F569]={
+ category="so",
+ description="RIGHT SPEAKER WITH ONE SOUND WAVE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F569,
+ },
+ [0x1F56A]={
+ category="so",
+ description="RIGHT SPEAKER WITH THREE SOUND WAVES",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F56A,
+ },
+ [0x1F56B]={
+ category="so",
+ description="BULLHORN",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F56B,
+ },
+ [0x1F56C]={
+ category="so",
+ description="BULLHORN WITH SOUND WAVES",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F56C,
+ },
+ [0x1F56D]={
+ category="so",
+ description="RINGING BELL",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F56D,
+ },
+ [0x1F56E]={
+ category="so",
+ description="BOOK",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F56E,
+ },
+ [0x1F56F]={
+ category="so",
+ description="CANDLE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F56F,
+ },
+ [0x1F570]={
+ category="so",
+ description="MANTELPIECE CLOCK",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F570,
+ },
+ [0x1F571]={
+ category="so",
+ description="BLACK SKULL AND CROSSBONES",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F571,
+ },
+ [0x1F572]={
+ category="so",
+ description="NO PIRACY",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F572,
+ },
+ [0x1F573]={
+ category="so",
+ description="HOLE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F573,
+ },
+ [0x1F574]={
+ category="so",
+ description="MAN IN BUSINESS SUIT LEVITATING",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F574,
+ },
+ [0x1F575]={
+ category="so",
+ description="SLEUTH OR SPY",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F575,
+ },
+ [0x1F576]={
+ category="so",
+ description="DARK SUNGLASSES",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F576,
+ },
+ [0x1F577]={
+ category="so",
+ description="SPIDER",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F577,
+ },
+ [0x1F578]={
+ category="so",
+ description="SPIDER WEB",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F578,
+ },
+ [0x1F579]={
+ category="so",
+ description="JOYSTICK",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F579,
+ },
+ [0x1F57B]={
+ category="so",
+ description="LEFT HAND TELEPHONE RECEIVER",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F57B,
+ },
+ [0x1F57C]={
+ category="so",
+ description="TELEPHONE RECEIVER WITH PAGE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F57C,
+ },
+ [0x1F57D]={
+ category="so",
+ description="RIGHT HAND TELEPHONE RECEIVER",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F57D,
+ },
+ [0x1F57E]={
+ category="so",
+ description="WHITE TOUCHTONE TELEPHONE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F57E,
+ },
+ [0x1F57F]={
+ category="so",
+ description="BLACK TOUCHTONE TELEPHONE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F57F,
+ },
+ [0x1F580]={
+ category="so",
+ description="TELEPHONE ON TOP OF MODEM",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F580,
+ },
+ [0x1F581]={
+ category="so",
+ description="CLAMSHELL MOBILE PHONE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F581,
+ },
+ [0x1F582]={
+ category="so",
+ description="BACK OF ENVELOPE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F582,
+ },
+ [0x1F583]={
+ category="so",
+ description="STAMPED ENVELOPE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F583,
+ },
+ [0x1F584]={
+ category="so",
+ description="ENVELOPE WITH LIGHTNING",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F584,
+ },
+ [0x1F585]={
+ category="so",
+ description="FLYING ENVELOPE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F585,
+ },
+ [0x1F586]={
+ category="so",
+ description="PEN OVER STAMPED ENVELOPE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F586,
+ },
+ [0x1F587]={
+ category="so",
+ description="LINKED PAPERCLIPS",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F587,
+ },
+ [0x1F588]={
+ category="so",
+ description="BLACK PUSHPIN",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F588,
+ },
+ [0x1F589]={
+ category="so",
+ description="LOWER LEFT PENCIL",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F589,
+ },
+ [0x1F58A]={
+ category="so",
+ description="LOWER LEFT BALLPOINT PEN",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F58A,
+ },
+ [0x1F58B]={
+ category="so",
+ description="LOWER LEFT FOUNTAIN PEN",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F58B,
+ },
+ [0x1F58C]={
+ category="so",
+ description="LOWER LEFT PAINTBRUSH",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F58C,
+ },
+ [0x1F58D]={
+ category="so",
+ description="LOWER LEFT CRAYON",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F58D,
+ },
+ [0x1F58E]={
+ category="so",
+ description="LEFT WRITING HAND",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F58E,
+ },
+ [0x1F58F]={
+ category="so",
+ description="TURNED OK HAND SIGN",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F58F,
+ },
+ [0x1F590]={
+ category="so",
+ description="RAISED HAND WITH FINGERS SPLAYED",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F590,
+ },
+ [0x1F591]={
+ category="so",
+ description="REVERSED RAISED HAND WITH FINGERS SPLAYED",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F591,
+ },
+ [0x1F592]={
+ category="so",
+ description="REVERSED THUMBS UP SIGN",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F592,
+ },
+ [0x1F593]={
+ category="so",
+ description="REVERSED THUMBS DOWN SIGN",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F593,
+ },
+ [0x1F594]={
+ category="so",
+ description="REVERSED VICTORY HAND",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F594,
+ },
+ [0x1F595]={
+ category="so",
+ description="REVERSED HAND WITH MIDDLE FINGER EXTENDED",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F595,
+ },
+ [0x1F596]={
+ category="so",
+ description="RAISED HAND WITH PART BETWEEN MIDDLE AND RING FINGERS",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F596,
+ },
+ [0x1F597]={
+ category="so",
+ description="WHITE DOWN POINTING LEFT HAND INDEX",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F597,
+ },
+ [0x1F598]={
+ category="so",
+ description="SIDEWAYS WHITE LEFT POINTING INDEX",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F598,
+ },
+ [0x1F599]={
+ category="so",
+ description="SIDEWAYS WHITE RIGHT POINTING INDEX",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F599,
+ },
+ [0x1F59A]={
+ category="so",
+ description="SIDEWAYS BLACK LEFT POINTING INDEX",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F59A,
+ },
+ [0x1F59B]={
+ category="so",
+ description="SIDEWAYS BLACK RIGHT POINTING INDEX",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F59B,
+ },
+ [0x1F59C]={
+ category="so",
+ description="BLACK LEFT POINTING BACKHAND INDEX",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F59C,
+ },
+ [0x1F59D]={
+ category="so",
+ description="BLACK RIGHT POINTING BACKHAND INDEX",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F59D,
+ },
+ [0x1F59E]={
+ category="so",
+ description="SIDEWAYS WHITE UP POINTING INDEX",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F59E,
+ },
+ [0x1F59F]={
+ category="so",
+ description="SIDEWAYS WHITE DOWN POINTING INDEX",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F59F,
+ },
+ [0x1F5A0]={
+ category="so",
+ description="SIDEWAYS BLACK UP POINTING INDEX",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5A0,
+ },
+ [0x1F5A1]={
+ category="so",
+ description="SIDEWAYS BLACK DOWN POINTING INDEX",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5A1,
+ },
+ [0x1F5A2]={
+ category="so",
+ description="BLACK UP POINTING BACKHAND INDEX",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5A2,
+ },
+ [0x1F5A3]={
+ category="so",
+ description="BLACK DOWN POINTING BACKHAND INDEX",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5A3,
+ },
+ [0x1F5A5]={
+ category="so",
+ description="DESKTOP COMPUTER",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5A5,
+ },
+ [0x1F5A6]={
+ category="so",
+ description="KEYBOARD AND MOUSE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5A6,
+ },
+ [0x1F5A7]={
+ category="so",
+ description="THREE NETWORKED COMPUTERS",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5A7,
+ },
+ [0x1F5A8]={
+ category="so",
+ description="PRINTER",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5A8,
+ },
+ [0x1F5A9]={
+ category="so",
+ description="POCKET CALCULATOR",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5A9,
+ },
+ [0x1F5AA]={
+ category="so",
+ description="BLACK HARD SHELL FLOPPY DISK",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5AA,
+ },
+ [0x1F5AB]={
+ category="so",
+ description="WHITE HARD SHELL FLOPPY DISK",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5AB,
+ },
+ [0x1F5AC]={
+ category="so",
+ description="SOFT SHELL FLOPPY DISK",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5AC,
+ },
+ [0x1F5AD]={
+ category="so",
+ description="TAPE CARTRIDGE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5AD,
+ },
+ [0x1F5AE]={
+ category="so",
+ description="WIRED KEYBOARD",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5AE,
+ },
+ [0x1F5AF]={
+ category="so",
+ description="ONE BUTTON MOUSE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5AF,
+ },
+ [0x1F5B0]={
+ category="so",
+ description="TWO BUTTON MOUSE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5B0,
+ },
+ [0x1F5B1]={
+ category="so",
+ description="THREE BUTTON MOUSE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5B1,
+ },
+ [0x1F5B2]={
+ category="so",
+ description="TRACKBALL",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5B2,
+ },
+ [0x1F5B3]={
+ category="so",
+ description="OLD PERSONAL COMPUTER",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5B3,
+ },
+ [0x1F5B4]={
+ category="so",
+ description="HARD DISK",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5B4,
+ },
+ [0x1F5B5]={
+ category="so",
+ description="SCREEN",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5B5,
+ },
+ [0x1F5B6]={
+ category="so",
+ description="PRINTER ICON",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5B6,
+ },
+ [0x1F5B7]={
+ category="so",
+ description="FAX ICON",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5B7,
+ },
+ [0x1F5B8]={
+ category="so",
+ description="OPTICAL DISC ICON",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5B8,
+ },
+ [0x1F5B9]={
+ category="so",
+ description="DOCUMENT WITH TEXT",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5B9,
+ },
+ [0x1F5BA]={
+ category="so",
+ description="DOCUMENT WITH TEXT AND PICTURE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5BA,
+ },
+ [0x1F5BB]={
+ category="so",
+ description="DOCUMENT WITH PICTURE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5BB,
+ },
+ [0x1F5BC]={
+ category="so",
+ description="FRAME WITH PICTURE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5BC,
+ },
+ [0x1F5BD]={
+ category="so",
+ description="FRAME WITH TILES",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5BD,
+ },
+ [0x1F5BE]={
+ category="so",
+ description="FRAME WITH AN X",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5BE,
+ },
+ [0x1F5BF]={
+ category="so",
+ description="BLACK FOLDER",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5BF,
+ },
+ [0x1F5C0]={
+ category="so",
+ description="FOLDER",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5C0,
+ },
+ [0x1F5C1]={
+ category="so",
+ description="OPEN FOLDER",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5C1,
+ },
+ [0x1F5C2]={
+ category="so",
+ description="CARD INDEX DIVIDERS",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5C2,
+ },
+ [0x1F5C3]={
+ category="so",
+ description="CARD FILE BOX",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5C3,
+ },
+ [0x1F5C4]={
+ category="so",
+ description="FILE CABINET",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5C4,
+ },
+ [0x1F5C5]={
+ category="so",
+ description="EMPTY NOTE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5C5,
+ },
+ [0x1F5C6]={
+ category="so",
+ description="EMPTY NOTE PAGE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5C6,
+ },
+ [0x1F5C7]={
+ category="so",
+ description="EMPTY NOTE PAD",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5C7,
+ },
+ [0x1F5C8]={
+ category="so",
+ description="NOTE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5C8,
+ },
+ [0x1F5C9]={
+ category="so",
+ description="NOTE PAGE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5C9,
+ },
+ [0x1F5CA]={
+ category="so",
+ description="NOTE PAD",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5CA,
+ },
+ [0x1F5CB]={
+ category="so",
+ description="EMPTY DOCUMENT",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5CB,
+ },
+ [0x1F5CC]={
+ category="so",
+ description="EMPTY PAGE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5CC,
+ },
+ [0x1F5CD]={
+ category="so",
+ description="EMPTY PAGES",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5CD,
+ },
+ [0x1F5CE]={
+ category="so",
+ description="DOCUMENT",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5CE,
+ },
+ [0x1F5CF]={
+ category="so",
+ description="PAGE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5CF,
+ },
+ [0x1F5D0]={
+ category="so",
+ description="PAGES",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5D0,
+ },
+ [0x1F5D1]={
+ category="so",
+ description="WASTEBASKET",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5D1,
+ },
+ [0x1F5D2]={
+ category="so",
+ description="SPIRAL NOTE PAD",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5D2,
+ },
+ [0x1F5D3]={
+ category="so",
+ description="SPIRAL CALENDAR PAD",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5D3,
+ },
+ [0x1F5D4]={
+ category="so",
+ description="DESKTOP WINDOW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F5D4,
+ },
+ [0x1F5D5]={
+ category="so",
+ description="MINIMIZE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F5D5,
+ },
+ [0x1F5D6]={
+ category="so",
+ description="MAXIMIZE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F5D6,
+ },
+ [0x1F5D7]={
+ category="so",
+ description="OVERLAP",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F5D7,
+ },
+ [0x1F5D8]={
+ category="so",
+ description="CLOCKWISE RIGHT AND LEFT SEMICIRCLE ARROWS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F5D8,
+ },
+ [0x1F5D9]={
+ category="so",
+ description="CANCELLATION X",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F5D9,
+ },
+ [0x1F5DA]={
+ category="so",
+ description="INCREASE FONT SIZE SYMBOL",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F5DA,
+ },
+ [0x1F5DB]={
+ category="so",
+ description="DECREASE FONT SIZE SYMBOL",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F5DB,
+ },
+ [0x1F5DC]={
+ category="so",
+ description="COMPRESSION",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5DC,
+ },
+ [0x1F5DD]={
+ category="so",
+ description="OLD KEY",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5DD,
+ },
+ [0x1F5DE]={
+ category="so",
+ description="ROLLED-UP NEWSPAPER",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5DE,
+ },
+ [0x1F5DF]={
+ category="so",
+ description="PAGE WITH CIRCLED TEXT",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5DF,
+ },
+ [0x1F5E0]={
+ category="so",
+ description="STOCK CHART",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5E0,
+ },
+ [0x1F5E1]={
+ category="so",
+ description="DAGGER KNIFE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5E1,
+ },
+ [0x1F5E2]={
+ category="so",
+ description="LIPS",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5E2,
+ },
+ [0x1F5E3]={
+ category="so",
+ description="SPEAKING HEAD IN SILHOUETTE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5E3,
+ },
+ [0x1F5E4]={
+ category="so",
+ description="THREE RAYS ABOVE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5E4,
+ },
+ [0x1F5E5]={
+ category="so",
+ description="THREE RAYS BELOW",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5E5,
+ },
+ [0x1F5E6]={
+ category="so",
+ description="THREE RAYS LEFT",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5E6,
+ },
+ [0x1F5E7]={
+ category="so",
+ description="THREE RAYS RIGHT",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5E7,
+ },
+ [0x1F5E8]={
+ category="so",
+ description="LEFT SPEECH BUBBLE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5E8,
+ },
+ [0x1F5E9]={
+ category="so",
+ description="RIGHT SPEECH BUBBLE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5E9,
+ },
+ [0x1F5EA]={
+ category="so",
+ description="TWO SPEECH BUBBLES",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5EA,
+ },
+ [0x1F5EB]={
+ category="so",
+ description="THREE SPEECH BUBBLES",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5EB,
+ },
+ [0x1F5EC]={
+ category="so",
+ description="LEFT THOUGHT BUBBLE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5EC,
+ },
+ [0x1F5ED]={
+ category="so",
+ description="RIGHT THOUGHT BUBBLE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5ED,
+ },
+ [0x1F5EE]={
+ category="so",
+ description="LEFT ANGER BUBBLE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5EE,
+ },
+ [0x1F5EF]={
+ category="so",
+ description="RIGHT ANGER BUBBLE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5EF,
+ },
+ [0x1F5F0]={
+ category="so",
+ description="MOOD BUBBLE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5F0,
+ },
+ [0x1F5F1]={
+ category="so",
+ description="LIGHTNING MOOD BUBBLE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5F1,
+ },
+ [0x1F5F2]={
+ category="so",
+ description="LIGHTNING MOOD",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5F2,
+ },
+ [0x1F5F3]={
+ category="so",
+ description="BALLOT BOX WITH BALLOT",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5F3,
+ },
+ [0x1F5F4]={
+ category="so",
+ description="BALLOT SCRIPT X",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F5F4,
+ },
+ [0x1F5F5]={
+ category="so",
+ description="BALLOT BOX WITH SCRIPT X",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F5F5,
+ },
+ [0x1F5F6]={
+ category="so",
+ description="BALLOT BOLD SCRIPT X",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F5F6,
+ },
+ [0x1F5F7]={
+ category="so",
+ description="BALLOT BOX WITH BOLD SCRIPT X",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F5F7,
+ },
+ [0x1F5F8]={
+ category="so",
+ description="LIGHT CHECK MARK",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F5F8,
+ },
+ [0x1F5F9]={
+ category="so",
+ description="BALLOT BOX WITH BOLD CHECK",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F5F9,
+ },
+ [0x1F5FA]={
+ category="so",
+ description="WORLD MAP",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F5FA,
+ },
[0x1F5FB]={
category="so",
description="MOUNT FUJI",
@@ -185031,6 +204407,20 @@ characters.data={
linebreak="id",
unicodeslot=0x1F640,
},
+ [0x1F641]={
+ category="so",
+ description="SLIGHTLY FROWNING FACE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F641,
+ },
+ [0x1F642]={
+ category="so",
+ description="SLIGHTLY SMILING FACE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F642,
+ },
[0x1F645]={
category="so",
description="FACE WITH NO GOOD GESTURE",
@@ -185108,6 +204498,342 @@ characters.data={
linebreak="id",
unicodeslot=0x1F64F,
},
+ [0x1F650]={
+ category="so",
+ description="NORTH WEST POINTING LEAF",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F650,
+ },
+ [0x1F651]={
+ category="so",
+ description="SOUTH WEST POINTING LEAF",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F651,
+ },
+ [0x1F652]={
+ category="so",
+ description="NORTH EAST POINTING LEAF",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F652,
+ },
+ [0x1F653]={
+ category="so",
+ description="SOUTH EAST POINTING LEAF",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F653,
+ },
+ [0x1F654]={
+ category="so",
+ description="TURNED NORTH WEST POINTING LEAF",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F654,
+ },
+ [0x1F655]={
+ category="so",
+ description="TURNED SOUTH WEST POINTING LEAF",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F655,
+ },
+ [0x1F656]={
+ category="so",
+ description="TURNED NORTH EAST POINTING LEAF",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F656,
+ },
+ [0x1F657]={
+ category="so",
+ description="TURNED SOUTH EAST POINTING LEAF",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F657,
+ },
+ [0x1F658]={
+ category="so",
+ description="NORTH WEST POINTING VINE LEAF",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F658,
+ },
+ [0x1F659]={
+ category="so",
+ description="SOUTH WEST POINTING VINE LEAF",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F659,
+ },
+ [0x1F65A]={
+ category="so",
+ description="NORTH EAST POINTING VINE LEAF",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F65A,
+ },
+ [0x1F65B]={
+ category="so",
+ description="SOUTH EAST POINTING VINE LEAF",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F65B,
+ },
+ [0x1F65C]={
+ category="so",
+ description="HEAVY NORTH WEST POINTING VINE LEAF",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F65C,
+ },
+ [0x1F65D]={
+ category="so",
+ description="HEAVY SOUTH WEST POINTING VINE LEAF",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F65D,
+ },
+ [0x1F65E]={
+ category="so",
+ description="HEAVY NORTH EAST POINTING VINE LEAF",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F65E,
+ },
+ [0x1F65F]={
+ category="so",
+ description="HEAVY SOUTH EAST POINTING VINE LEAF",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F65F,
+ },
+ [0x1F660]={
+ category="so",
+ description="NORTH WEST POINTING BUD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F660,
+ },
+ [0x1F661]={
+ category="so",
+ description="SOUTH WEST POINTING BUD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F661,
+ },
+ [0x1F662]={
+ category="so",
+ description="NORTH EAST POINTING BUD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F662,
+ },
+ [0x1F663]={
+ category="so",
+ description="SOUTH EAST POINTING BUD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F663,
+ },
+ [0x1F664]={
+ category="so",
+ description="HEAVY NORTH WEST POINTING BUD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F664,
+ },
+ [0x1F665]={
+ category="so",
+ description="HEAVY SOUTH WEST POINTING BUD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F665,
+ },
+ [0x1F666]={
+ category="so",
+ description="HEAVY NORTH EAST POINTING BUD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F666,
+ },
+ [0x1F667]={
+ category="so",
+ description="HEAVY SOUTH EAST POINTING BUD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F667,
+ },
+ [0x1F668]={
+ category="so",
+ description="HOLLOW QUILT SQUARE ORNAMENT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F668,
+ },
+ [0x1F669]={
+ category="so",
+ description="HOLLOW QUILT SQUARE ORNAMENT IN BLACK SQUARE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F669,
+ },
+ [0x1F66A]={
+ category="so",
+ description="SOLID QUILT SQUARE ORNAMENT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F66A,
+ },
+ [0x1F66B]={
+ category="so",
+ description="SOLID QUILT SQUARE ORNAMENT IN BLACK SQUARE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F66B,
+ },
+ [0x1F66C]={
+ category="so",
+ description="LEFTWARDS ROCKET",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F66C,
+ },
+ [0x1F66D]={
+ category="so",
+ description="UPWARDS ROCKET",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F66D,
+ },
+ [0x1F66E]={
+ category="so",
+ description="RIGHTWARDS ROCKET",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F66E,
+ },
+ [0x1F66F]={
+ category="so",
+ description="DOWNWARDS ROCKET",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F66F,
+ },
+ [0x1F670]={
+ category="so",
+ description="SCRIPT LIGATURE ET ORNAMENT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F670,
+ },
+ [0x1F671]={
+ category="so",
+ description="HEAVY SCRIPT LIGATURE ET ORNAMENT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F671,
+ },
+ [0x1F672]={
+ category="so",
+ description="LIGATURE OPEN ET ORNAMENT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F672,
+ },
+ [0x1F673]={
+ category="so",
+ description="HEAVY LIGATURE OPEN ET ORNAMENT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F673,
+ },
+ [0x1F674]={
+ category="so",
+ description="HEAVY AMPERSAND ORNAMENT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F674,
+ },
+ [0x1F675]={
+ category="so",
+ description="SWASH AMPERSAND ORNAMENT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F675,
+ },
+ [0x1F676]={
+ category="so",
+ description="SANS-SERIF HEAVY DOUBLE TURNED COMMA QUOTATION MARK ORNAMENT",
+ direction="on",
+ linebreak="qu",
+ unicodeslot=0x1F676,
+ },
+ [0x1F677]={
+ category="so",
+ description="SANS-SERIF HEAVY DOUBLE COMMA QUOTATION MARK ORNAMENT",
+ direction="on",
+ linebreak="qu",
+ unicodeslot=0x1F677,
+ },
+ [0x1F678]={
+ category="so",
+ description="SANS-SERIF HEAVY LOW DOUBLE COMMA QUOTATION MARK ORNAMENT",
+ direction="on",
+ linebreak="qu",
+ unicodeslot=0x1F678,
+ },
+ [0x1F679]={
+ category="so",
+ description="HEAVY INTERROBANG ORNAMENT",
+ direction="on",
+ linebreak="ns",
+ unicodeslot=0x1F679,
+ },
+ [0x1F67A]={
+ category="so",
+ description="SANS-SERIF INTERROBANG ORNAMENT",
+ direction="on",
+ linebreak="ns",
+ unicodeslot=0x1F67A,
+ },
+ [0x1F67B]={
+ category="so",
+ description="HEAVY SANS-SERIF INTERROBANG ORNAMENT",
+ direction="on",
+ linebreak="ns",
+ unicodeslot=0x1F67B,
+ },
+ [0x1F67C]={
+ category="so",
+ description="VERY HEAVY SOLIDUS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F67C,
+ },
+ [0x1F67D]={
+ category="so",
+ description="VERY HEAVY REVERSE SOLIDUS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F67D,
+ },
+ [0x1F67E]={
+ category="so",
+ description="CHECKER BOARD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F67E,
+ },
+ [0x1F67F]={
+ category="so",
+ description="REVERSE CHECKER BOARD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F67F,
+ },
[0x1F680]={
category="so",
description="ROCKET",
@@ -185598,6 +205324,195 @@ characters.data={
linebreak="id",
unicodeslot=0x1F6C5,
},
+ [0x1F6C6]={
+ category="so",
+ description="TRIANGLE WITH ROUNDED CORNERS",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6C6,
+ },
+ [0x1F6C7]={
+ category="so",
+ description="PROHIBITED SIGN",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6C7,
+ },
+ [0x1F6C8]={
+ category="so",
+ description="CIRCLED INFORMATION SOURCE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6C8,
+ },
+ [0x1F6C9]={
+ category="so",
+ description="BOYS SYMBOL",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6C9,
+ },
+ [0x1F6CA]={
+ category="so",
+ description="GIRLS SYMBOL",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6CA,
+ },
+ [0x1F6CB]={
+ category="so",
+ description="COUCH AND LAMP",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6CB,
+ },
+ [0x1F6CC]={
+ category="so",
+ description="SLEEPING ACCOMMODATION",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6CC,
+ },
+ [0x1F6CD]={
+ category="so",
+ description="SHOPPING BAGS",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6CD,
+ },
+ [0x1F6CE]={
+ category="so",
+ description="BELLHOP BELL",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6CE,
+ },
+ [0x1F6CF]={
+ category="so",
+ description="BED",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6CF,
+ },
+ [0x1F6E0]={
+ category="so",
+ description="HAMMER AND WRENCH",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6E0,
+ },
+ [0x1F6E1]={
+ category="so",
+ description="SHIELD",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6E1,
+ },
+ [0x1F6E2]={
+ category="so",
+ description="OIL DRUM",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6E2,
+ },
+ [0x1F6E3]={
+ category="so",
+ description="MOTORWAY",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6E3,
+ },
+ [0x1F6E4]={
+ category="so",
+ description="RAILWAY TRACK",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6E4,
+ },
+ [0x1F6E5]={
+ category="so",
+ description="MOTOR BOAT",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6E5,
+ },
+ [0x1F6E6]={
+ category="so",
+ description="UP-POINTING MILITARY AIRPLANE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6E6,
+ },
+ [0x1F6E7]={
+ category="so",
+ description="UP-POINTING AIRPLANE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6E7,
+ },
+ [0x1F6E8]={
+ category="so",
+ description="UP-POINTING SMALL AIRPLANE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6E8,
+ },
+ [0x1F6E9]={
+ category="so",
+ description="SMALL AIRPLANE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6E9,
+ },
+ [0x1F6EA]={
+ category="so",
+ description="NORTHEAST-POINTING AIRPLANE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6EA,
+ },
+ [0x1F6EB]={
+ category="so",
+ description="AIRPLANE DEPARTURE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6EB,
+ },
+ [0x1F6EC]={
+ category="so",
+ description="AIRPLANE ARRIVING",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6EC,
+ },
+ [0x1F6F0]={
+ category="so",
+ description="SATELLITE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6F0,
+ },
+ [0x1F6F1]={
+ category="so",
+ description="ONCOMING FIRE ENGINE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6F1,
+ },
+ [0x1F6F2]={
+ category="so",
+ description="DIESEL LOCOMOTIVE",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6F2,
+ },
+ [0x1F6F3]={
+ category="so",
+ description="PASSENGER SHIP",
+ direction="on",
+ linebreak="id",
+ unicodeslot=0x1F6F3,
+ },
[0x1F700]={
category="so",
description="ALCHEMICAL SYMBOL FOR QUINTESSENCE",
@@ -186410,6 +206325,1637 @@ characters.data={
linebreak="al",
unicodeslot=0x1F773,
},
+ [0x1F780]={
+ category="so",
+ description="BLACK LEFT-POINTING ISOSCELES RIGHT TRIANGLE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F780,
+ },
+ [0x1F781]={
+ category="so",
+ description="BLACK UP-POINTING ISOSCELES RIGHT TRIANGLE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F781,
+ },
+ [0x1F782]={
+ category="so",
+ description="BLACK RIGHT-POINTING ISOSCELES RIGHT TRIANGLE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F782,
+ },
+ [0x1F783]={
+ category="so",
+ description="BLACK DOWN-POINTING ISOSCELES RIGHT TRIANGLE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F783,
+ },
+ [0x1F784]={
+ category="so",
+ description="BLACK SLIGHTLY SMALL CIRCLE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F784,
+ },
+ [0x1F785]={
+ category="so",
+ description="MEDIUM BOLD WHITE CIRCLE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F785,
+ },
+ [0x1F786]={
+ category="so",
+ description="BOLD WHITE CIRCLE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F786,
+ },
+ [0x1F787]={
+ category="so",
+ description="HEAVY WHITE CIRCLE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F787,
+ },
+ [0x1F788]={
+ category="so",
+ description="VERY HEAVY WHITE CIRCLE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F788,
+ },
+ [0x1F789]={
+ category="so",
+ description="EXTREMELY HEAVY WHITE CIRCLE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F789,
+ },
+ [0x1F78A]={
+ category="so",
+ description="WHITE CIRCLE CONTAINING BLACK SMALL CIRCLE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F78A,
+ },
+ [0x1F78B]={
+ category="so",
+ description="ROUND TARGET",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F78B,
+ },
+ [0x1F78C]={
+ category="so",
+ description="BLACK TINY SQUARE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F78C,
+ },
+ [0x1F78D]={
+ category="so",
+ description="BLACK SLIGHTLY SMALL SQUARE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F78D,
+ },
+ [0x1F78E]={
+ category="so",
+ description="LIGHT WHITE SQUARE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F78E,
+ },
+ [0x1F78F]={
+ category="so",
+ description="MEDIUM WHITE SQUARE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F78F,
+ },
+ [0x1F790]={
+ category="so",
+ description="BOLD WHITE SQUARE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F790,
+ },
+ [0x1F791]={
+ category="so",
+ description="HEAVY WHITE SQUARE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F791,
+ },
+ [0x1F792]={
+ category="so",
+ description="VERY HEAVY WHITE SQUARE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F792,
+ },
+ [0x1F793]={
+ category="so",
+ description="EXTREMELY HEAVY WHITE SQUARE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F793,
+ },
+ [0x1F794]={
+ category="so",
+ description="WHITE SQUARE CONTAINING BLACK VERY SMALL SQUARE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F794,
+ },
+ [0x1F795]={
+ category="so",
+ description="WHITE SQUARE CONTAINING BLACK MEDIUM SQUARE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F795,
+ },
+ [0x1F796]={
+ category="so",
+ description="SQUARE TARGET",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F796,
+ },
+ [0x1F797]={
+ category="so",
+ description="BLACK TINY DIAMOND",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F797,
+ },
+ [0x1F798]={
+ category="so",
+ description="BLACK VERY SMALL DIAMOND",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F798,
+ },
+ [0x1F799]={
+ category="so",
+ description="BLACK MEDIUM SMALL DIAMOND",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F799,
+ },
+ [0x1F79A]={
+ category="so",
+ description="WHITE DIAMOND CONTAINING BLACK VERY SMALL DIAMOND",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F79A,
+ },
+ [0x1F79B]={
+ category="so",
+ description="WHITE DIAMOND CONTAINING BLACK MEDIUM DIAMOND",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F79B,
+ },
+ [0x1F79C]={
+ category="so",
+ description="DIAMOND TARGET",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F79C,
+ },
+ [0x1F79D]={
+ category="so",
+ description="BLACK TINY LOZENGE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F79D,
+ },
+ [0x1F79E]={
+ category="so",
+ description="BLACK VERY SMALL LOZENGE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F79E,
+ },
+ [0x1F79F]={
+ category="so",
+ description="BLACK MEDIUM SMALL LOZENGE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F79F,
+ },
+ [0x1F7A0]={
+ category="so",
+ description="WHITE LOZENGE CONTAINING BLACK SMALL LOZENGE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7A0,
+ },
+ [0x1F7A1]={
+ category="so",
+ description="THIN GREEK CROSS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7A1,
+ },
+ [0x1F7A2]={
+ category="so",
+ description="LIGHT GREEK CROSS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7A2,
+ },
+ [0x1F7A3]={
+ category="so",
+ description="MEDIUM GREEK CROSS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7A3,
+ },
+ [0x1F7A4]={
+ category="so",
+ description="BOLD GREEK CROSS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7A4,
+ },
+ [0x1F7A5]={
+ category="so",
+ description="VERY BOLD GREEK CROSS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7A5,
+ },
+ [0x1F7A6]={
+ category="so",
+ description="VERY HEAVY GREEK CROSS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7A6,
+ },
+ [0x1F7A7]={
+ category="so",
+ description="EXTREMELY HEAVY GREEK CROSS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7A7,
+ },
+ [0x1F7A8]={
+ category="so",
+ description="THIN SALTIRE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7A8,
+ },
+ [0x1F7A9]={
+ category="so",
+ description="LIGHT SALTIRE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7A9,
+ },
+ [0x1F7AA]={
+ category="so",
+ description="MEDIUM SALTIRE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7AA,
+ },
+ [0x1F7AB]={
+ category="so",
+ description="BOLD SALTIRE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7AB,
+ },
+ [0x1F7AC]={
+ category="so",
+ description="HEAVY SALTIRE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7AC,
+ },
+ [0x1F7AD]={
+ category="so",
+ description="VERY HEAVY SALTIRE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7AD,
+ },
+ [0x1F7AE]={
+ category="so",
+ description="EXTREMELY HEAVY SALTIRE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7AE,
+ },
+ [0x1F7AF]={
+ category="so",
+ description="LIGHT FIVE SPOKED ASTERISK",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7AF,
+ },
+ [0x1F7B0]={
+ category="so",
+ description="MEDIUM FIVE SPOKED ASTERISK",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7B0,
+ },
+ [0x1F7B1]={
+ category="so",
+ description="BOLD FIVE SPOKED ASTERISK",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7B1,
+ },
+ [0x1F7B2]={
+ category="so",
+ description="HEAVY FIVE SPOKED ASTERISK",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7B2,
+ },
+ [0x1F7B3]={
+ category="so",
+ description="VERY HEAVY FIVE SPOKED ASTERISK",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7B3,
+ },
+ [0x1F7B4]={
+ category="so",
+ description="EXTREMELY HEAVY FIVE SPOKED ASTERISK",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7B4,
+ },
+ [0x1F7B5]={
+ category="so",
+ description="LIGHT SIX SPOKED ASTERISK",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7B5,
+ },
+ [0x1F7B6]={
+ category="so",
+ description="MEDIUM SIX SPOKED ASTERISK",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7B6,
+ },
+ [0x1F7B7]={
+ category="so",
+ description="BOLD SIX SPOKED ASTERISK",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7B7,
+ },
+ [0x1F7B8]={
+ category="so",
+ description="HEAVY SIX SPOKED ASTERISK",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7B8,
+ },
+ [0x1F7B9]={
+ category="so",
+ description="VERY HEAVY SIX SPOKED ASTERISK",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7B9,
+ },
+ [0x1F7BA]={
+ category="so",
+ description="EXTREMELY HEAVY SIX SPOKED ASTERISK",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7BA,
+ },
+ [0x1F7BB]={
+ category="so",
+ description="LIGHT EIGHT SPOKED ASTERISK",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7BB,
+ },
+ [0x1F7BC]={
+ category="so",
+ description="MEDIUM EIGHT SPOKED ASTERISK",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7BC,
+ },
+ [0x1F7BD]={
+ category="so",
+ description="BOLD EIGHT SPOKED ASTERISK",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7BD,
+ },
+ [0x1F7BE]={
+ category="so",
+ description="HEAVY EIGHT SPOKED ASTERISK",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7BE,
+ },
+ [0x1F7BF]={
+ category="so",
+ description="VERY HEAVY EIGHT SPOKED ASTERISK",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7BF,
+ },
+ [0x1F7C0]={
+ category="so",
+ description="LIGHT THREE POINTED BLACK STAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7C0,
+ },
+ [0x1F7C1]={
+ category="so",
+ description="MEDIUM THREE POINTED BLACK STAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7C1,
+ },
+ [0x1F7C2]={
+ category="so",
+ description="THREE POINTED BLACK STAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7C2,
+ },
+ [0x1F7C3]={
+ category="so",
+ description="MEDIUM THREE POINTED PINWHEEL STAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7C3,
+ },
+ [0x1F7C4]={
+ category="so",
+ description="LIGHT FOUR POINTED BLACK STAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7C4,
+ },
+ [0x1F7C5]={
+ category="so",
+ description="MEDIUM FOUR POINTED BLACK STAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7C5,
+ },
+ [0x1F7C6]={
+ category="so",
+ description="FOUR POINTED BLACK STAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7C6,
+ },
+ [0x1F7C7]={
+ category="so",
+ description="MEDIUM FOUR POINTED PINWHEEL STAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7C7,
+ },
+ [0x1F7C8]={
+ category="so",
+ description="REVERSE LIGHT FOUR POINTED PINWHEEL STAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7C8,
+ },
+ [0x1F7C9]={
+ category="so",
+ description="LIGHT FIVE POINTED BLACK STAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7C9,
+ },
+ [0x1F7CA]={
+ category="so",
+ description="HEAVY FIVE POINTED BLACK STAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7CA,
+ },
+ [0x1F7CB]={
+ category="so",
+ description="MEDIUM SIX POINTED BLACK STAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7CB,
+ },
+ [0x1F7CC]={
+ category="so",
+ description="HEAVY SIX POINTED BLACK STAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7CC,
+ },
+ [0x1F7CD]={
+ category="so",
+ description="SIX POINTED PINWHEEL STAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7CD,
+ },
+ [0x1F7CE]={
+ category="so",
+ description="MEDIUM EIGHT POINTED BLACK STAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7CE,
+ },
+ [0x1F7CF]={
+ category="so",
+ description="HEAVY EIGHT POINTED BLACK STAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7CF,
+ },
+ [0x1F7D0]={
+ category="so",
+ description="VERY HEAVY EIGHT POINTED BLACK STAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7D0,
+ },
+ [0x1F7D1]={
+ category="so",
+ description="HEAVY EIGHT POINTED PINWHEEL STAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7D1,
+ },
+ [0x1F7D2]={
+ category="so",
+ description="LIGHT TWELVE POINTED BLACK STAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7D2,
+ },
+ [0x1F7D3]={
+ category="so",
+ description="HEAVY TWELVE POINTED BLACK STAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7D3,
+ },
+ [0x1F7D4]={
+ category="so",
+ description="HEAVY TWELVE POINTED PINWHEEL STAR",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F7D4,
+ },
+ [0x1F800]={
+ category="so",
+ description="LEFTWARDS ARROW WITH SMALL TRIANGLE ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F800,
+ },
+ [0x1F801]={
+ category="so",
+ description="UPWARDS ARROW WITH SMALL TRIANGLE ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F801,
+ },
+ [0x1F802]={
+ category="so",
+ description="RIGHTWARDS ARROW WITH SMALL TRIANGLE ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F802,
+ },
+ [0x1F803]={
+ category="so",
+ description="DOWNWARDS ARROW WITH SMALL TRIANGLE ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F803,
+ },
+ [0x1F804]={
+ category="so",
+ description="LEFTWARDS ARROW WITH MEDIUM TRIANGLE ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F804,
+ },
+ [0x1F805]={
+ category="so",
+ description="UPWARDS ARROW WITH MEDIUM TRIANGLE ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F805,
+ },
+ [0x1F806]={
+ category="so",
+ description="RIGHTWARDS ARROW WITH MEDIUM TRIANGLE ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F806,
+ },
+ [0x1F807]={
+ category="so",
+ description="DOWNWARDS ARROW WITH MEDIUM TRIANGLE ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F807,
+ },
+ [0x1F808]={
+ category="so",
+ description="LEFTWARDS ARROW WITH LARGE TRIANGLE ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F808,
+ },
+ [0x1F809]={
+ category="so",
+ description="UPWARDS ARROW WITH LARGE TRIANGLE ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F809,
+ },
+ [0x1F80A]={
+ category="so",
+ description="RIGHTWARDS ARROW WITH LARGE TRIANGLE ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F80A,
+ },
+ [0x1F80B]={
+ category="so",
+ description="DOWNWARDS ARROW WITH LARGE TRIANGLE ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F80B,
+ },
+ [0x1F810]={
+ category="so",
+ description="LEFTWARDS ARROW WITH SMALL EQUILATERAL ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F810,
+ },
+ [0x1F811]={
+ category="so",
+ description="UPWARDS ARROW WITH SMALL EQUILATERAL ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F811,
+ },
+ [0x1F812]={
+ category="so",
+ description="RIGHTWARDS ARROW WITH SMALL EQUILATERAL ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F812,
+ },
+ [0x1F813]={
+ category="so",
+ description="DOWNWARDS ARROW WITH SMALL EQUILATERAL ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F813,
+ },
+ [0x1F814]={
+ category="so",
+ description="LEFTWARDS ARROW WITH EQUILATERAL ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F814,
+ },
+ [0x1F815]={
+ category="so",
+ description="UPWARDS ARROW WITH EQUILATERAL ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F815,
+ },
+ [0x1F816]={
+ category="so",
+ description="RIGHTWARDS ARROW WITH EQUILATERAL ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F816,
+ },
+ [0x1F817]={
+ category="so",
+ description="DOWNWARDS ARROW WITH EQUILATERAL ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F817,
+ },
+ [0x1F818]={
+ category="so",
+ description="HEAVY LEFTWARDS ARROW WITH EQUILATERAL ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F818,
+ },
+ [0x1F819]={
+ category="so",
+ description="HEAVY UPWARDS ARROW WITH EQUILATERAL ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F819,
+ },
+ [0x1F81A]={
+ category="so",
+ description="HEAVY RIGHTWARDS ARROW WITH EQUILATERAL ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F81A,
+ },
+ [0x1F81B]={
+ category="so",
+ description="HEAVY DOWNWARDS ARROW WITH EQUILATERAL ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F81B,
+ },
+ [0x1F81C]={
+ category="so",
+ description="HEAVY LEFTWARDS ARROW WITH LARGE EQUILATERAL ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F81C,
+ },
+ [0x1F81D]={
+ category="so",
+ description="HEAVY UPWARDS ARROW WITH LARGE EQUILATERAL ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F81D,
+ },
+ [0x1F81E]={
+ category="so",
+ description="HEAVY RIGHTWARDS ARROW WITH LARGE EQUILATERAL ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F81E,
+ },
+ [0x1F81F]={
+ category="so",
+ description="HEAVY DOWNWARDS ARROW WITH LARGE EQUILATERAL ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F81F,
+ },
+ [0x1F820]={
+ category="so",
+ description="LEFTWARDS TRIANGLE-HEADED ARROW WITH NARROW SHAFT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F820,
+ },
+ [0x1F821]={
+ category="so",
+ description="UPWARDS TRIANGLE-HEADED ARROW WITH NARROW SHAFT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F821,
+ },
+ [0x1F822]={
+ category="so",
+ description="RIGHTWARDS TRIANGLE-HEADED ARROW WITH NARROW SHAFT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F822,
+ },
+ [0x1F823]={
+ category="so",
+ description="DOWNWARDS TRIANGLE-HEADED ARROW WITH NARROW SHAFT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F823,
+ },
+ [0x1F824]={
+ category="so",
+ description="LEFTWARDS TRIANGLE-HEADED ARROW WITH MEDIUM SHAFT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F824,
+ },
+ [0x1F825]={
+ category="so",
+ description="UPWARDS TRIANGLE-HEADED ARROW WITH MEDIUM SHAFT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F825,
+ },
+ [0x1F826]={
+ category="so",
+ description="RIGHTWARDS TRIANGLE-HEADED ARROW WITH MEDIUM SHAFT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F826,
+ },
+ [0x1F827]={
+ category="so",
+ description="DOWNWARDS TRIANGLE-HEADED ARROW WITH MEDIUM SHAFT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F827,
+ },
+ [0x1F828]={
+ category="so",
+ description="LEFTWARDS TRIANGLE-HEADED ARROW WITH BOLD SHAFT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F828,
+ },
+ [0x1F829]={
+ category="so",
+ description="UPWARDS TRIANGLE-HEADED ARROW WITH BOLD SHAFT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F829,
+ },
+ [0x1F82A]={
+ category="so",
+ description="RIGHTWARDS TRIANGLE-HEADED ARROW WITH BOLD SHAFT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F82A,
+ },
+ [0x1F82B]={
+ category="so",
+ description="DOWNWARDS TRIANGLE-HEADED ARROW WITH BOLD SHAFT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F82B,
+ },
+ [0x1F82C]={
+ category="so",
+ description="LEFTWARDS TRIANGLE-HEADED ARROW WITH HEAVY SHAFT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F82C,
+ },
+ [0x1F82D]={
+ category="so",
+ description="UPWARDS TRIANGLE-HEADED ARROW WITH HEAVY SHAFT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F82D,
+ },
+ [0x1F82E]={
+ category="so",
+ description="RIGHTWARDS TRIANGLE-HEADED ARROW WITH HEAVY SHAFT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F82E,
+ },
+ [0x1F82F]={
+ category="so",
+ description="DOWNWARDS TRIANGLE-HEADED ARROW WITH HEAVY SHAFT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F82F,
+ },
+ [0x1F830]={
+ category="so",
+ description="LEFTWARDS TRIANGLE-HEADED ARROW WITH VERY HEAVY SHAFT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F830,
+ },
+ [0x1F831]={
+ category="so",
+ description="UPWARDS TRIANGLE-HEADED ARROW WITH VERY HEAVY SHAFT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F831,
+ },
+ [0x1F832]={
+ category="so",
+ description="RIGHTWARDS TRIANGLE-HEADED ARROW WITH VERY HEAVY SHAFT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F832,
+ },
+ [0x1F833]={
+ category="so",
+ description="DOWNWARDS TRIANGLE-HEADED ARROW WITH VERY HEAVY SHAFT",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F833,
+ },
+ [0x1F834]={
+ category="so",
+ description="LEFTWARDS FINGER-POST ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F834,
+ },
+ [0x1F835]={
+ category="so",
+ description="UPWARDS FINGER-POST ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F835,
+ },
+ [0x1F836]={
+ category="so",
+ description="RIGHTWARDS FINGER-POST ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F836,
+ },
+ [0x1F837]={
+ category="so",
+ description="DOWNWARDS FINGER-POST ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F837,
+ },
+ [0x1F838]={
+ category="so",
+ description="LEFTWARDS SQUARED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F838,
+ },
+ [0x1F839]={
+ category="so",
+ description="UPWARDS SQUARED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F839,
+ },
+ [0x1F83A]={
+ category="so",
+ description="RIGHTWARDS SQUARED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F83A,
+ },
+ [0x1F83B]={
+ category="so",
+ description="DOWNWARDS SQUARED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F83B,
+ },
+ [0x1F83C]={
+ category="so",
+ description="LEFTWARDS COMPRESSED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F83C,
+ },
+ [0x1F83D]={
+ category="so",
+ description="UPWARDS COMPRESSED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F83D,
+ },
+ [0x1F83E]={
+ category="so",
+ description="RIGHTWARDS COMPRESSED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F83E,
+ },
+ [0x1F83F]={
+ category="so",
+ description="DOWNWARDS COMPRESSED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F83F,
+ },
+ [0x1F840]={
+ category="so",
+ description="LEFTWARDS HEAVY COMPRESSED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F840,
+ },
+ [0x1F841]={
+ category="so",
+ description="UPWARDS HEAVY COMPRESSED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F841,
+ },
+ [0x1F842]={
+ category="so",
+ description="RIGHTWARDS HEAVY COMPRESSED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F842,
+ },
+ [0x1F843]={
+ category="so",
+ description="DOWNWARDS HEAVY COMPRESSED ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F843,
+ },
+ [0x1F844]={
+ category="so",
+ description="LEFTWARDS HEAVY ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F844,
+ },
+ [0x1F845]={
+ category="so",
+ description="UPWARDS HEAVY ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F845,
+ },
+ [0x1F846]={
+ category="so",
+ description="RIGHTWARDS HEAVY ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F846,
+ },
+ [0x1F847]={
+ category="so",
+ description="DOWNWARDS HEAVY ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F847,
+ },
+ [0x1F850]={
+ category="so",
+ description="LEFTWARDS SANS-SERIF ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F850,
+ },
+ [0x1F851]={
+ category="so",
+ description="UPWARDS SANS-SERIF ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F851,
+ },
+ [0x1F852]={
+ category="so",
+ description="RIGHTWARDS SANS-SERIF ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F852,
+ },
+ [0x1F853]={
+ category="so",
+ description="DOWNWARDS SANS-SERIF ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F853,
+ },
+ [0x1F854]={
+ category="so",
+ description="NORTH WEST SANS-SERIF ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F854,
+ },
+ [0x1F855]={
+ category="so",
+ description="NORTH EAST SANS-SERIF ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F855,
+ },
+ [0x1F856]={
+ category="so",
+ description="SOUTH EAST SANS-SERIF ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F856,
+ },
+ [0x1F857]={
+ category="so",
+ description="SOUTH WEST SANS-SERIF ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F857,
+ },
+ [0x1F858]={
+ category="so",
+ description="LEFT RIGHT SANS-SERIF ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F858,
+ },
+ [0x1F859]={
+ category="so",
+ description="UP DOWN SANS-SERIF ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F859,
+ },
+ [0x1F860]={
+ category="so",
+ description="WIDE-HEADED LEFTWARDS LIGHT BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F860,
+ },
+ [0x1F861]={
+ category="so",
+ description="WIDE-HEADED UPWARDS LIGHT BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F861,
+ },
+ [0x1F862]={
+ category="so",
+ description="WIDE-HEADED RIGHTWARDS LIGHT BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F862,
+ },
+ [0x1F863]={
+ category="so",
+ description="WIDE-HEADED DOWNWARDS LIGHT BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F863,
+ },
+ [0x1F864]={
+ category="so",
+ description="WIDE-HEADED NORTH WEST LIGHT BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F864,
+ },
+ [0x1F865]={
+ category="so",
+ description="WIDE-HEADED NORTH EAST LIGHT BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F865,
+ },
+ [0x1F866]={
+ category="so",
+ description="WIDE-HEADED SOUTH EAST LIGHT BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F866,
+ },
+ [0x1F867]={
+ category="so",
+ description="WIDE-HEADED SOUTH WEST LIGHT BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F867,
+ },
+ [0x1F868]={
+ category="so",
+ description="WIDE-HEADED LEFTWARDS BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F868,
+ },
+ [0x1F869]={
+ category="so",
+ description="WIDE-HEADED UPWARDS BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F869,
+ },
+ [0x1F86A]={
+ category="so",
+ description="WIDE-HEADED RIGHTWARDS BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F86A,
+ },
+ [0x1F86B]={
+ category="so",
+ description="WIDE-HEADED DOWNWARDS BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F86B,
+ },
+ [0x1F86C]={
+ category="so",
+ description="WIDE-HEADED NORTH WEST BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F86C,
+ },
+ [0x1F86D]={
+ category="so",
+ description="WIDE-HEADED NORTH EAST BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F86D,
+ },
+ [0x1F86E]={
+ category="so",
+ description="WIDE-HEADED SOUTH EAST BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F86E,
+ },
+ [0x1F86F]={
+ category="so",
+ description="WIDE-HEADED SOUTH WEST BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F86F,
+ },
+ [0x1F870]={
+ category="so",
+ description="WIDE-HEADED LEFTWARDS MEDIUM BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F870,
+ },
+ [0x1F871]={
+ category="so",
+ description="WIDE-HEADED UPWARDS MEDIUM BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F871,
+ },
+ [0x1F872]={
+ category="so",
+ description="WIDE-HEADED RIGHTWARDS MEDIUM BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F872,
+ },
+ [0x1F873]={
+ category="so",
+ description="WIDE-HEADED DOWNWARDS MEDIUM BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F873,
+ },
+ [0x1F874]={
+ category="so",
+ description="WIDE-HEADED NORTH WEST MEDIUM BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F874,
+ },
+ [0x1F875]={
+ category="so",
+ description="WIDE-HEADED NORTH EAST MEDIUM BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F875,
+ },
+ [0x1F876]={
+ category="so",
+ description="WIDE-HEADED SOUTH EAST MEDIUM BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F876,
+ },
+ [0x1F877]={
+ category="so",
+ description="WIDE-HEADED SOUTH WEST MEDIUM BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F877,
+ },
+ [0x1F878]={
+ category="so",
+ description="WIDE-HEADED LEFTWARDS HEAVY BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F878,
+ },
+ [0x1F879]={
+ category="so",
+ description="WIDE-HEADED UPWARDS HEAVY BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F879,
+ },
+ [0x1F87A]={
+ category="so",
+ description="WIDE-HEADED RIGHTWARDS HEAVY BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F87A,
+ },
+ [0x1F87B]={
+ category="so",
+ description="WIDE-HEADED DOWNWARDS HEAVY BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F87B,
+ },
+ [0x1F87C]={
+ category="so",
+ description="WIDE-HEADED NORTH WEST HEAVY BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F87C,
+ },
+ [0x1F87D]={
+ category="so",
+ description="WIDE-HEADED NORTH EAST HEAVY BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F87D,
+ },
+ [0x1F87E]={
+ category="so",
+ description="WIDE-HEADED SOUTH EAST HEAVY BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F87E,
+ },
+ [0x1F87F]={
+ category="so",
+ description="WIDE-HEADED SOUTH WEST HEAVY BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F87F,
+ },
+ [0x1F880]={
+ category="so",
+ description="WIDE-HEADED LEFTWARDS VERY HEAVY BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F880,
+ },
+ [0x1F881]={
+ category="so",
+ description="WIDE-HEADED UPWARDS VERY HEAVY BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F881,
+ },
+ [0x1F882]={
+ category="so",
+ description="WIDE-HEADED RIGHTWARDS VERY HEAVY BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F882,
+ },
+ [0x1F883]={
+ category="so",
+ description="WIDE-HEADED DOWNWARDS VERY HEAVY BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F883,
+ },
+ [0x1F884]={
+ category="so",
+ description="WIDE-HEADED NORTH WEST VERY HEAVY BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F884,
+ },
+ [0x1F885]={
+ category="so",
+ description="WIDE-HEADED NORTH EAST VERY HEAVY BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F885,
+ },
+ [0x1F886]={
+ category="so",
+ description="WIDE-HEADED SOUTH EAST VERY HEAVY BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F886,
+ },
+ [0x1F887]={
+ category="so",
+ description="WIDE-HEADED SOUTH WEST VERY HEAVY BARB ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F887,
+ },
+ [0x1F890]={
+ category="so",
+ description="LEFTWARDS TRIANGLE ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F890,
+ },
+ [0x1F891]={
+ category="so",
+ description="UPWARDS TRIANGLE ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F891,
+ },
+ [0x1F892]={
+ category="so",
+ description="RIGHTWARDS TRIANGLE ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F892,
+ },
+ [0x1F893]={
+ category="so",
+ description="DOWNWARDS TRIANGLE ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F893,
+ },
+ [0x1F894]={
+ category="so",
+ description="LEFTWARDS WHITE ARROW WITHIN TRIANGLE ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F894,
+ },
+ [0x1F895]={
+ category="so",
+ description="UPWARDS WHITE ARROW WITHIN TRIANGLE ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F895,
+ },
+ [0x1F896]={
+ category="so",
+ description="RIGHTWARDS WHITE ARROW WITHIN TRIANGLE ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F896,
+ },
+ [0x1F897]={
+ category="so",
+ description="DOWNWARDS WHITE ARROW WITHIN TRIANGLE ARROWHEAD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F897,
+ },
+ [0x1F898]={
+ category="so",
+ description="LEFTWARDS ARROW WITH NOTCHED TAIL",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F898,
+ },
+ [0x1F899]={
+ category="so",
+ description="UPWARDS ARROW WITH NOTCHED TAIL",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F899,
+ },
+ [0x1F89A]={
+ category="so",
+ description="RIGHTWARDS ARROW WITH NOTCHED TAIL",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F89A,
+ },
+ [0x1F89B]={
+ category="so",
+ description="DOWNWARDS ARROW WITH NOTCHED TAIL",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F89B,
+ },
+ [0x1F89C]={
+ category="so",
+ description="HEAVY ARROW SHAFT WIDTH ONE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F89C,
+ },
+ [0x1F89D]={
+ category="so",
+ description="HEAVY ARROW SHAFT WIDTH TWO THIRDS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F89D,
+ },
+ [0x1F89E]={
+ category="so",
+ description="HEAVY ARROW SHAFT WIDTH ONE HALF",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F89E,
+ },
+ [0x1F89F]={
+ category="so",
+ description="HEAVY ARROW SHAFT WIDTH ONE THIRD",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F89F,
+ },
+ [0x1F8A0]={
+ category="so",
+ description="LEFTWARDS BOTTOM-SHADED WHITE ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F8A0,
+ },
+ [0x1F8A1]={
+ category="so",
+ description="RIGHTWARDS BOTTOM SHADED WHITE ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F8A1,
+ },
+ [0x1F8A2]={
+ category="so",
+ description="LEFTWARDS TOP SHADED WHITE ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F8A2,
+ },
+ [0x1F8A3]={
+ category="so",
+ description="RIGHTWARDS TOP SHADED WHITE ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F8A3,
+ },
+ [0x1F8A4]={
+ category="so",
+ description="LEFTWARDS LEFT-SHADED WHITE ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F8A4,
+ },
+ [0x1F8A5]={
+ category="so",
+ description="RIGHTWARDS RIGHT-SHADED WHITE ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F8A5,
+ },
+ [0x1F8A6]={
+ category="so",
+ description="LEFTWARDS RIGHT-SHADED WHITE ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F8A6,
+ },
+ [0x1F8A7]={
+ category="so",
+ description="RIGHTWARDS LEFT-SHADED WHITE ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F8A7,
+ },
+ [0x1F8A8]={
+ category="so",
+ description="LEFTWARDS BACK-TILTED SHADOWED WHITE ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F8A8,
+ },
+ [0x1F8A9]={
+ category="so",
+ description="RIGHTWARDS BACK-TILTED SHADOWED WHITE ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F8A9,
+ },
+ [0x1F8AA]={
+ category="so",
+ description="LEFTWARDS FRONT-TILTED SHADOWED WHITE ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F8AA,
+ },
+ [0x1F8AB]={
+ category="so",
+ description="RIGHTWARDS FRONT-TILTED SHADOWED WHITE ARROW",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F8AB,
+ },
+ [0x1F8AC]={
+ category="so",
+ description="WHITE ARROW SHAFT WIDTH ONE",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F8AC,
+ },
+ [0x1F8AD]={
+ category="so",
+ description="WHITE ARROW SHAFT WIDTH TWO THIRDS",
+ direction="on",
+ linebreak="al",
+ unicodeslot=0x1F8AD,
+ },
[0x2F800]={
category="lo",
cjkwd="w",
diff --git a/tex/context/base/char-enc.lua b/tex/context/base/char-enc.lua
index 048837eec..c2061891a 100644
--- a/tex/context/base/char-enc.lua
+++ b/tex/context/base/char-enc.lua
@@ -9,6 +9,8 @@ if not modules then modules = { } end modules ['char-enc'] = {
-- Thanks to tex4ht for these mappings.
+local next = next
+
local allocate, setinitializer = utilities.storage.allocate, utilities.storage.setinitializer
characters = characters or { }
@@ -169,7 +171,10 @@ characters.synonyms = allocate { -- afm mess
-- that table.print would not work on this file unless it is accessed once. This
-- why the serializer does a dummy access.
-local enccodes = allocate() characters.enccodes = enccodes
+local enccodes = allocate()
+characters.enccodes = enccodes
+
+ -- maybe omit context name -> then same as encodings.make_unicode_vector
local function initialize()
for unicode, data in next, characters.data do
@@ -179,7 +184,9 @@ local function initialize()
end
end
for name, unicode in next, characters.synonyms do
- if not enccodes[name] then enccodes[name] = unicode end
+ if not enccodes[name] then
+ enccodes[name] = unicode
+ end
end
end
diff --git a/tex/context/base/char-ent.lua b/tex/context/base/char-ent.lua
index 58ee9472c..e48856a84 100644
--- a/tex/context/base/char-ent.lua
+++ b/tex/context/base/char-ent.lua
@@ -332,7 +332,7 @@ local entities = utilities.storage.allocate {
["NestedLessLess"] = "≪", -- U+0226A
["Nfr"] = "𝔑", -- U+1D511
["Ngr"] = "Ν", -- U+0039D
- ["NoBreak"] = "⁠", -- U+02060
+ ["NoBreak"] = "⁠", -- U+02060
["NonBreakingSpace"] = " ", -- U+000A0
["Nopf"] = "ℕ", -- U+02115
["Not"] = "⫬", -- U+02AEC
@@ -2252,6 +2252,8 @@ local entities = utilities.storage.allocate {
characters = characters or { }
characters.entities = entities
-entities.plusminus = "±" -- 0x000B1
-entities.minusplus = "∓" -- 0x02213
-entities.cdots = utf.char(0x02026) -- U+02026
+entities.plusminus = "±" -- U+000B1
+entities.minusplus = "∓" -- U+02213
+entities.Hat = "ˆ" -- U+002C6 -- better as this will stretch
+entities.cdots = "⋯" -- U+022EF
+entities.dots = "…" -- U+02026
diff --git a/tex/context/base/char-fio.lua b/tex/context/base/char-fio.lua
new file mode 100644
index 000000000..ab2555935
--- /dev/null
+++ b/tex/context/base/char-fio.lua
@@ -0,0 +1,94 @@
+if not modules then modules = { } end modules ['char-fio'] = {
+ version = 1.001,
+ comment = "companion to char-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- % directives="filters.utf.reorder=false"
+
+
+local next = next
+
+-- --
+
+local sequencers = utilities.sequencers
+local appendaction = sequencers.appendaction
+local enableaction = sequencers.enableaction
+local disableaction = sequencers.disableaction
+
+local utffilters = characters.filters.utf
+
+local textfileactions = resolvers.openers.helpers.textfileactions
+local textlineactions = resolvers.openers.helpers.textlineactions
+
+appendaction (textfileactions,"system","characters.filters.utf.reorder")
+disableaction(textfileactions, "characters.filters.utf.reorder")
+
+appendaction (textlineactions,"system","characters.filters.utf.reorder")
+disableaction(textlineactions, "characters.filters.utf.reorder")
+
+appendaction (textfileactions,"system","characters.filters.utf.collapse")
+disableaction(textfileactions, "characters.filters.utf.collapse")
+
+appendaction (textfileactions,"system","characters.filters.utf.decompose")
+disableaction(textfileactions, "characters.filters.utf.decompose")
+
+local report = logs.reporter("unicode filter")
+local reporting = "no"
+
+-- this is messy as for performance reasons i don't want this to happen
+-- per line by default
+
+local enforced = {
+ ["characters.filters.utf.reorder"] = true,
+ ["characters.filters.utf.collapse"] = true,
+ ["characters.filters.utf.decompose"] = true,
+}
+
+function utffilters.enable()
+ for k, v in next, enforced do
+ if v then
+ if reporting == "yes" then
+ report("%a enabled",k)
+ end
+ enableaction(textfileactions,v)
+ else
+ if reporting == "yes" then
+ report("%a not enabled",k)
+ end
+ end
+ end
+ reporting = "never"
+end
+
+local function configure(what,v)
+ if v == "line" then
+ disableaction(textfileactions,what)
+ enableaction (textlineactions,what)
+ elseif not toboolean(v) then
+ if reporting ~= "never" then
+ report("%a disabled",k)
+ reporting = "yes"
+ end
+ enforced[what] = false
+ disableaction(textfileactions,what)
+ disableaction(textlineactions,what)
+ else -- true or text
+ enableaction (textfileactions,what)
+ disableaction(textlineactions,what)
+ end
+end
+
+directives.register("filters.utf.reorder", function(v) configure("characters.filters.utf.reorder", v) end)
+directives.register("filters.utf.collapse", function(v) configure("characters.filters.utf.collapse", v) end)
+directives.register("filters.utf.decompose", function(v) configure("characters.filters.utf.decompose",v) end)
+
+utffilters.setskippable { "mkiv", "mkvi", "mkix", "mkxi" }
+
+interfaces.implement {
+ name = "enableutf",
+ onlyonce = true,
+ actions = utffilters.enable
+}
diff --git a/tex/context/base/char-ini.lua b/tex/context/base/char-ini.lua
index ac47760f3..4559fa28c 100644
--- a/tex/context/base/char-ini.lua
+++ b/tex/context/base/char-ini.lua
@@ -7,26 +7,33 @@ if not modules then modules = { } end modules ['char-ini'] = {
}
-- todo: make two files, one for format generation, one for format use
+-- todo: move some to char-utf
-- we can remove the tag range starting at 0xE0000 (special applications)
local utfchar, utfbyte, utfvalues, ustring, utotable = utf.char, utf.byte, utf.values, utf.ustring, utf.totable
local concat, unpack, tohash = table.concat, table.unpack, table.tohash
local next, tonumber, type, rawget, rawset = next, tonumber, type, rawget, rawset
-local format, lower, gsub, match, gmatch = string.format, string.lower, string.gsub, string.match, string.match, string.gmatch
-local P, R, Cs, lpegmatch, patterns = lpeg.P, lpeg.R, lpeg.Cs, lpeg.match, lpeg.patterns
+local format, lower, gsub = string.format, string.lower, string.gsub
+local P, R, S, Cs = lpeg.P, lpeg.R, lpeg.S, lpeg.Cs
-local utf8byte = patterns.utf8byte
-local utf8char = patterns.utf8char
+if not characters then require("char-def") end
-local allocate = utilities.storage.allocate
-local mark = utilities.storage.mark
+local lpegpatterns = lpeg.patterns
+local lpegmatch = lpeg.match
+local utf8byte = lpegpatterns.utf8byte
+local utf8character = lpegpatterns.utf8character
-local setmetatableindex = table.setmetatableindex
+local utfchartabletopattern = lpeg.utfchartabletopattern
-local trace_defining = false trackers.register("characters.defining", function(v) characters_defining = v end)
+local allocate = utilities.storage.allocate
+local mark = utilities.storage.mark
-local report_defining = logs.reporter("characters")
+local setmetatableindex = table.setmetatableindex
+
+local trace_defining = false trackers.register("characters.defining", function(v) characters_defining = v end)
+
+local report_defining = logs.reporter("characters")
--[[ldx--
<p>This module implements some methods and creates additional datastructured
@@ -37,6 +44,12 @@ from the big character table that we use for all kind of purposes:
loaded!</p>
--ldx]]--
+-- todo: in 'char-def.lua' assume defaults:
+--
+-- directtions = l
+-- cjkwd = a
+-- linebreak = al
+
characters = characters or { }
local characters = characters
local data = characters.data
@@ -54,7 +67,7 @@ end
local pattern = (P("0x") + P("U+")) * ((R("09","AF")^1 * P(-1)) / function(s) return tonumber(s,16) end)
-patterns.chartonumber = pattern
+lpegpatterns.chartonumber = pattern
local function chartonumber(k)
if type(k) == "string" then
@@ -145,6 +158,7 @@ local blocks = allocate {
["bamum"] = { first = 0x0A6A0, last = 0x0A6FF, description = "Bamum" },
["bamumsupplement"] = { first = 0x16800, last = 0x16A3F, description = "Bamum Supplement" },
["basiclatin"] = { first = 0x00000, last = 0x0007F, otf="latn", description = "Basic Latin" },
+ ["bassavah"] = { first = 0x16AD0, last = 0x16AFF, description = "Bassa Vah" },
["batak"] = { first = 0x01BC0, last = 0x01BFF, description = "Batak" },
["bengali"] = { first = 0x00980, last = 0x009FF, otf="beng", description = "Bengali" },
["blockelements"] = { first = 0x02580, last = 0x0259F, otf="bopo", description = "Block Elements" },
@@ -156,8 +170,9 @@ local blocks = allocate {
["buginese"] = { first = 0x01A00, last = 0x01A1F, otf="bugi", description = "Buginese" },
["buhid"] = { first = 0x01740, last = 0x0175F, otf="buhd", description = "Buhid" },
["byzantinemusicalsymbols"] = { first = 0x1D000, last = 0x1D0FF, otf="byzm", description = "Byzantine Musical Symbols" },
- ["commonindicnumberforms"] = { first = 0x0A830, last = 0x0A83F, description = "Common Indic Number Forms" },
["carian"] = { first = 0x102A0, last = 0x102DF, description = "Carian" },
+ ["caucasianalbanian"] = { first = 0x10530, last = 0x1056F, description = "Caucasian Albanian" },
+ ["chakma"] = { first = 0x11100, last = 0x1114F, description = "Chakma" },
["cham"] = { first = 0x0AA00, last = 0x0AA5F, description = "Cham" },
["cherokee"] = { first = 0x013A0, last = 0x013FF, otf="cher", description = "Cherokee" },
["cjkcompatibility"] = { first = 0x03300, last = 0x033FF, otf="hang", description = "CJK Compatibility" },
@@ -170,12 +185,17 @@ local blocks = allocate {
["cjkunifiedideographs"] = { first = 0x04E00, last = 0x09FFF, otf="hang", description = "CJK Unified Ideographs", catcode = "letter" },
["cjkunifiedideographsextensiona"] = { first = 0x03400, last = 0x04DBF, otf="hang", description = "CJK Unified Ideographs Extension A" },
["cjkunifiedideographsextensionb"] = { first = 0x20000, last = 0x2A6DF, otf="hang", description = "CJK Unified Ideographs Extension B" },
+ ["cjkunifiedideographsextensionc"] = { first = 0x2A700, last = 0x2B73F, description = "CJK Unified Ideographs Extension C" },
+ ["cjkunifiedideographsextensiond"] = { first = 0x2B740, last = 0x2B81F, description = "CJK Unified Ideographs Extension D" },
["combiningdiacriticalmarks"] = { first = 0x00300, last = 0x0036F, description = "Combining Diacritical Marks" },
+ ["combiningdiacriticalmarksextended"] = { first = 0x01AB0, last = 0x01AFF, description = "Combining Diacritical Marks Extended" },
["combiningdiacriticalmarksforsymbols"] = { first = 0x020D0, last = 0x020FF, description = "Combining Diacritical Marks for Symbols" },
["combiningdiacriticalmarkssupplement"] = { first = 0x01DC0, last = 0x01DFF, description = "Combining Diacritical Marks Supplement" },
["combininghalfmarks"] = { first = 0x0FE20, last = 0x0FE2F, description = "Combining Half Marks" },
+ ["commonindicnumberforms"] = { first = 0x0A830, last = 0x0A83F, description = "Common Indic Number Forms" },
["controlpictures"] = { first = 0x02400, last = 0x0243F, description = "Control Pictures" },
["coptic"] = { first = 0x02C80, last = 0x02CFF, otf="copt", description = "Coptic" },
+ ["copticepactnumbers"] = { first = 0x102E0, last = 0x102FF, description = "Coptic Epact Numbers" },
["countingrodnumerals"] = { first = 0x1D360, last = 0x1D37F, description = "Counting Rod Numerals" },
["cuneiform"] = { first = 0x12000, last = 0x123FF, otf="xsux", description = "Cuneiform" },
["cuneiformnumbersandpunctuation"] = { first = 0x12400, last = 0x1247F, otf="xsux", description = "Cuneiform Numbers and Punctuation" },
@@ -188,161 +208,256 @@ local blocks = allocate {
["deseret"] = { first = 0x10400, last = 0x1044F, otf="dsrt", description = "Deseret" },
["devanagari"] = { first = 0x00900, last = 0x0097F, otf="deva", description = "Devanagari" },
["devanagariextended"] = { first = 0x0A8E0, last = 0x0A8FF, description = "Devanagari Extended" },
- ["dingbats"] = { first = 0x02700, last = 0x027BF, description = "Dingbats" },
- ["dominotiles"] = { first = 0x1F030, last = 0x1F09F, description = "Domino Tiles" },
- ["egyptianhieroglyphs"] = { first = 0x13000, last = 0x1342F, description = "Egyptian Hieroglyphs" },
- ["emoticons"] = { first = 0x1F600, last = 0x1F64F, description = "Emoticons" },
- ["enclosedalphanumericsupplement"] = { first = 0x1F100, last = 0x1F1FF, description = "Enclosed Alphanumeric Supplement" },
- ["enclosedalphanumerics"] = { first = 0x02460, last = 0x024FF, description = "Enclosed Alphanumerics" },
- ["enclosedcjklettersandmonths"] = { first = 0x03200, last = 0x032FF, description = "Enclosed CJK Letters and Months" },
- ["enclosedideographicsupplement"] = { first = 0x1F200, last = 0x1F2FF, description = "Enclosed Ideographic Supplement" },
- ["ethiopic"] = { first = 0x01200, last = 0x0137F, otf="ethi", description = "Ethiopic" },
- ["ethiopicextended"] = { first = 0x02D80, last = 0x02DDF, otf="ethi", description = "Ethiopic Extended" },
- ["ethiopicextendeda"] = { first = 0x0AB00, last = 0x0AB2F, description = "Ethiopic Extended-A" },
- ["ethiopicsupplement"] = { first = 0x01380, last = 0x0139F, otf="ethi", description = "Ethiopic Supplement" },
- ["generalpunctuation"] = { first = 0x02000, last = 0x0206F, description = "General Punctuation" },
- ["geometricshapes"] = { first = 0x025A0, last = 0x025FF, description = "Geometric Shapes" },
- ["georgian"] = { first = 0x010A0, last = 0x010FF, otf="geor", description = "Georgian" },
- ["georgiansupplement"] = { first = 0x02D00, last = 0x02D2F, otf="geor", description = "Georgian Supplement" },
- ["glagolitic"] = { first = 0x02C00, last = 0x02C5F, otf="glag", description = "Glagolitic" },
- ["gothic"] = { first = 0x10330, last = 0x1034F, otf="goth", description = "Gothic" },
- ["greekandcoptic"] = { first = 0x00370, last = 0x003FF, otf="grek", description = "Greek and Coptic" },
- ["greekextended"] = { first = 0x01F00, last = 0x01FFF, otf="grek", description = "Greek Extended" },
- ["gujarati"] = { first = 0x00A80, last = 0x00AFF, otf="gujr", description = "Gujarati" },
- ["gurmukhi"] = { first = 0x00A00, last = 0x00A7F, otf="guru", description = "Gurmukhi" },
- ["halfwidthandfullwidthforms"] = { first = 0x0FF00, last = 0x0FFEF, description = "Halfwidth and Fullwidth Forms" },
- ["hangulcompatibilityjamo"] = { first = 0x03130, last = 0x0318F, otf="jamo", description = "Hangul Compatibility Jamo" },
- ["hanguljamo"] = { first = 0x01100, last = 0x011FF, otf="jamo", description = "Hangul Jamo" },
- ["hanguljamoextendeda"] = { first = 0x0A960, last = 0x0A97F, description = "Hangul Jamo Extended-A" },
- ["hanguljamoextendedb"] = { first = 0x0D7B0, last = 0x0D7FF, description = "Hangul Jamo Extended-B" },
- ["hangulsyllables"] = { first = 0x0AC00, last = 0x0D7AF, otf="hang", description = "Hangul Syllables" },
- ["hanunoo"] = { first = 0x01720, last = 0x0173F, otf="hano", description = "Hanunoo" },
- ["hebrew"] = { first = 0x00590, last = 0x005FF, otf="hebr", description = "Hebrew" },
- ["highprivateusesurrogates"] = { first = 0x0DB80, last = 0x0DBFF, description = "High Private Use Surrogates" },
- ["highsurrogates"] = { first = 0x0D800, last = 0x0DB7F, description = "High Surrogates" },
- ["hiragana"] = { first = 0x03040, last = 0x0309F, otf="kana", description = "Hiragana" },
- ["ideographicdescriptioncharacters"] = { first = 0x02FF0, last = 0x02FFF, description = "Ideographic Description Characters" },
- ["imperialaramaic"] = { first = 0x10840, last = 0x1085F, description = "Imperial Aramaic" },
- ["inscriptionalpahlavi"] = { first = 0x10B60, last = 0x10B7F, description = "Inscriptional Pahlavi" },
- ["inscriptionalparthian"] = { first = 0x10B40, last = 0x10B5F, description = "Inscriptional Parthian" },
- ["ipaextensions"] = { first = 0x00250, last = 0x002AF, description = "IPA Extensions" },
- ["javanese"] = { first = 0x0A980, last = 0x0A9DF, description = "Javanese" },
- ["kaithi"] = { first = 0x11080, last = 0x110CF, description = "Kaithi" },
- ["kanasupplement"] = { first = 0x1B000, last = 0x1B0FF, description = "Kana Supplement" },
- ["kanbun"] = { first = 0x03190, last = 0x0319F, description = "Kanbun" },
- ["kangxiradicals"] = { first = 0x02F00, last = 0x02FDF, description = "Kangxi Radicals" },
- ["kannada"] = { first = 0x00C80, last = 0x00CFF, otf="knda", description = "Kannada" },
- ["katakana"] = { first = 0x030A0, last = 0x030FF, otf="kana", description = "Katakana" },
- ["katakanaphoneticextensions"] = { first = 0x031F0, last = 0x031FF, otf="kana", description = "Katakana Phonetic Extensions" },
- ["kayahli"] = { first = 0x0A900, last = 0x0A92F, description = "Kayah Li" },
- ["kharoshthi"] = { first = 0x10A00, last = 0x10A5F, otf="khar", description = "Kharoshthi" },
- ["khmer"] = { first = 0x01780, last = 0x017FF, otf="khmr", description = "Khmer" },
- ["khmersymbols"] = { first = 0x019E0, last = 0x019FF, otf="khmr", description = "Khmer Symbols" },
- ["lao"] = { first = 0x00E80, last = 0x00EFF, otf="lao", description = "Lao" },
- ["latinextendeda"] = { first = 0x00100, last = 0x0017F, otf="latn", description = "Latin Extended-A" },
- ["latinextendedadditional"] = { first = 0x01E00, last = 0x01EFF, otf="latn", description = "Latin Extended Additional" },
- ["latinextendedb"] = { first = 0x00180, last = 0x0024F, otf="latn", description = "Latin Extended-B" },
- ["latinextendedc"] = { first = 0x02C60, last = 0x02C7F, otf="latn", description = "Latin Extended-C" },
- ["latinextendedd"] = { first = 0x0A720, last = 0x0A7FF, otf="latn", description = "Latin Extended-D" },
- ["latinsupplement"] = { first = 0x00080, last = 0x000FF, otf="latn", description = "Latin-1 Supplement" },
- ["lepcha"] = { first = 0x01C00, last = 0x01C4F, description = "Lepcha" },
- ["letterlikesymbols"] = { first = 0x02100, last = 0x0214F, description = "Letterlike Symbols" },
- ["limbu"] = { first = 0x01900, last = 0x0194F, otf="limb", description = "Limbu" },
- ["linearbideograms"] = { first = 0x10080, last = 0x100FF, otf="linb", description = "Linear B Ideograms" },
- ["linearbsyllabary"] = { first = 0x10000, last = 0x1007F, otf="linb", description = "Linear B Syllabary" },
- ["lisu"] = { first = 0x0A4D0, last = 0x0A4FF, description = "Lisu" },
- ["lowsurrogates"] = { first = 0x0DC00, last = 0x0DFFF, description = "Low Surrogates" },
- ["lycian"] = { first = 0x10280, last = 0x1029F, description = "Lycian" },
- ["lydian"] = { first = 0x10920, last = 0x1093F, description = "Lydian" },
- ["mahjongtiles"] = { first = 0x1F000, last = 0x1F02F, description = "Mahjong Tiles" },
- ["malayalam"] = { first = 0x00D00, last = 0x00D7F, otf="mlym", description = "Malayalam" },
- ["mandiac"] = { first = 0x00840, last = 0x0085F, otf="mand", description = "Mandaic" },
- ["mathematicalalphanumericsymbols"] = { first = 0x1D400, last = 0x1D7FF, description = "Mathematical Alphanumeric Symbols" },
- ["mathematicaloperators"] = { first = 0x02200, last = 0x022FF, description = "Mathematical Operators" },
- ["meeteimayek"] = { first = 0x0ABC0, last = 0x0ABFF, description = "Meetei Mayek" },
- ["meeteimayekextensions"] = { first = 0x0AAE0, last = 0x0AAFF, description = "Meetei Mayek Extensions" },
- ["meroiticcursive"] = { first = 0x109A0, last = 0x109FF, description = "Meroitic Cursive" },
- ["meroitichieroglyphs"] = { first = 0x10980, last = 0x1099F, description = "Meroitic Hieroglyphs" },
- ["miao"] = { first = 0x16F00, last = 0x16F9F, description = "Miao" },
- ["miscellaneousmathematicalsymbolsa"] = { first = 0x027C0, last = 0x027EF, description = "Miscellaneous Mathematical Symbols-A" },
- ["miscellaneousmathematicalsymbolsb"] = { first = 0x02980, last = 0x029FF, description = "Miscellaneous Mathematical Symbols-B" },
- ["miscellaneoussymbols"] = { first = 0x02600, last = 0x026FF, description = "Miscellaneous Symbols" },
- ["miscellaneoussymbolsandarrows"] = { first = 0x02B00, last = 0x02BFF, description = "Miscellaneous Symbols and Arrows" },
- ["miscellaneoussymbolsandpictographs"] = { first = 0x1F300, last = 0x1F5FF, description = "Miscellaneous Symbols And Pictographs" },
- ["miscellaneoustechnical"] = { first = 0x02300, last = 0x023FF, description = "Miscellaneous Technical" },
- ["modifiertoneletters"] = { first = 0x0A700, last = 0x0A71F, description = "Modifier Tone Letters" },
- ["mongolian"] = { first = 0x01800, last = 0x018AF, otf="mong", description = "Mongolian" },
- ["musicalsymbols"] = { first = 0x1D100, last = 0x1D1FF, otf="musc", description = "Musical Symbols" },
- ["myanmar"] = { first = 0x01000, last = 0x0109F, otf="mymr", description = "Myanmar" },
- ["myanmarextendeda"] = { first = 0x0AA60, last = 0x0AA7F, description = "Myanmar Extended-A" },
- ["newtailue"] = { first = 0x01980, last = 0x019DF, description = "New Tai Lue" },
- ["nko"] = { first = 0x007C0, last = 0x007FF, otf="nko", description = "NKo" },
- ["numberforms"] = { first = 0x02150, last = 0x0218F, description = "Number Forms" },
- ["ogham"] = { first = 0x01680, last = 0x0169F, otf="ogam", description = "Ogham" },
- ["olchiki"] = { first = 0x01C50, last = 0x01C7F, description = "Ol Chiki" },
- ["olditalic"] = { first = 0x10300, last = 0x1032F, otf="ital", description = "Old Italic" },
- ["oldpersian"] = { first = 0x103A0, last = 0x103DF, otf="xpeo", description = "Old Persian" },
- ["oldsoutharabian"] = { first = 0x10A60, last = 0x10A7F, description = "Old South Arabian" },
- ["odlturkic"] = { first = 0x10C00, last = 0x10C4F, description = "Old Turkic" },
- ["opticalcharacterrecognition"] = { first = 0x02440, last = 0x0245F, description = "Optical Character Recognition" },
- ["oriya"] = { first = 0x00B00, last = 0x00B7F, otf="orya", description = "Oriya" },
- ["osmanya"] = { first = 0x10480, last = 0x104AF, otf="osma", description = "Osmanya" },
- ["phagspa"] = { first = 0x0A840, last = 0x0A87F, otf="phag", description = "Phags-pa" },
- ["phaistosdisc"] = { first = 0x101D0, last = 0x101FF, description = "Phaistos Disc" },
- ["phoenician"] = { first = 0x10900, last = 0x1091F, otf="phnx", description = "Phoenician" },
- ["phoneticextensions"] = { first = 0x01D00, last = 0x01D7F, description = "Phonetic Extensions" },
- ["phoneticextensionssupplement"] = { first = 0x01D80, last = 0x01DBF, description = "Phonetic Extensions Supplement" },
- ["playingcards"] = { first = 0x1F0A0, last = 0x1F0FF, description = "Playing Cards" },
- ["privateusearea"] = { first = 0x0E000, last = 0x0F8FF, description = "Private Use Area" },
- ["rejang"] = { first = 0x0A930, last = 0x0A95F, description = "Rejang" },
- ["ruminumeralsymbols"] = { first = 0x10E60, last = 0x10E7F, description = "Rumi Numeral Symbols" },
- ["runic"] = { first = 0x016A0, last = 0x016FF, otf="runr", description = "Runic" },
- ["samaritan"] = { first = 0x00800, last = 0x0083F, description = "Samaritan" },
- ["saurashtra"] = { first = 0x0A880, last = 0x0A8DF, description = "Saurashtra" },
- ["sharada"] = { first = 0x11180, last = 0x111DF, description = "Sharada" },
- ["shavian"] = { first = 0x10450, last = 0x1047F, otf="shaw", description = "Shavian" },
- ["sinhala"] = { first = 0x00D80, last = 0x00DFF, otf="sinh", description = "Sinhala" },
- ["smallformvariants"] = { first = 0x0FE50, last = 0x0FE6F, description = "Small Form Variants" },
- ["sorasompeng"] = { first = 0x110D0, last = 0x110FF, description = "Sora Sompeng" },
- ["spacingmodifierletters"] = { first = 0x002B0, last = 0x002FF, description = "Spacing Modifier Letters" },
- ["specials"] = { first = 0x0FFF0, last = 0x0FFFF, description = "Specials" },
- ["sundanese"] = { first = 0x01B80, last = 0x01BBF, description = "Sundanese" },
- ["sundanesesupplement"] = { first = 0x01CC0, last = 0x01CCF, description = "Sundanese Supplement" },
- ["superscriptsandsubscripts"] = { first = 0x02070, last = 0x0209F, description = "Superscripts and Subscripts" },
- ["supplementalarrowsa"] = { first = 0x027F0, last = 0x027FF, description = "Supplemental Arrows-A" },
- ["supplementalarrowsb"] = { first = 0x02900, last = 0x0297F, description = "Supplemental Arrows-B" },
- ["supplementalmathematicaloperators"] = { first = 0x02A00, last = 0x02AFF, description = "Supplemental Mathematical Operators" },
- ["supplementalpunctuation"] = { first = 0x02E00, last = 0x02E7F, description = "Supplemental Punctuation" },
- ["supplementaryprivateuseareaa"] = { first = 0xF0000, last = 0xFFFFF, description = "Supplementary Private Use Area-A" },
- ["supplementaryprivateuseareab"] = { first = 0x100000,last = 0x10FFFF, description = "Supplementary Private Use Area-B" },
- ["sylotinagri"] = { first = 0x0A800, last = 0x0A82F, otf="sylo", description = "Syloti Nagri" },
- ["syriac"] = { first = 0x00700, last = 0x0074F, otf="syrc", description = "Syriac" },
- ["tagalog"] = { first = 0x01700, last = 0x0171F, otf="tglg", description = "Tagalog" },
- ["tagbanwa"] = { first = 0x01760, last = 0x0177F, otf="tagb", description = "Tagbanwa" },
- ["tags"] = { first = 0xE0000, last = 0xE007F, description = "Tags" },
- ["taile"] = { first = 0x01950, last = 0x0197F, otf="tale", description = "Tai Le" },
- ["taitham"] = { first = 0x01A20, last = 0x01AAF, description = "Tai Tham" },
- ["taiviet"] = { first = 0x0AA80, last = 0x0AADF, description = "Tai Viet" },
- ["taixuanjingsymbols"] = { first = 0x1D300, last = 0x1D35F, description = "Tai Xuan Jing Symbols" },
- ["takri"] = { first = 0x11680, last = 0x116CF, description = "Takri" },
- ["tamil"] = { first = 0x00B80, last = 0x00BFF, otf="taml", description = "Tamil" },
- ["telugu"] = { first = 0x00C00, last = 0x00C7F, otf="telu", description = "Telugu" },
- ["thaana"] = { first = 0x00780, last = 0x007BF, otf="thaa", description = "Thaana" },
- ["thai"] = { first = 0x00E00, last = 0x00E7F, otf="thai", description = "Thai" },
- ["tibetan"] = { first = 0x00F00, last = 0x00FFF, otf="tibt", description = "Tibetan" },
- ["tifinagh"] = { first = 0x02D30, last = 0x02D7F, otf="tfng", description = "Tifinagh" },
- ["transportandmapsymbols"] = { first = 0x1F680, last = 0x1F6FF, description = "Transport And Map Symbols" },
- ["ugaritic"] = { first = 0x10380, last = 0x1039F, otf="ugar", description = "Ugaritic" },
- ["unifiedcanadianaboriginalsyllabics"] = { first = 0x01400, last = 0x0167F, otf="cans", description = "Unified Canadian Aboriginal Syllabics" },
- ["unifiedcanadianaboriginalsyllabicsextended"] = { first = 0x018B0, last = 0x018FF, description = "Unified Canadian Aboriginal Syllabics Extended" },
- ["vai"] = { first = 0x0A500, last = 0x0A63F, description = "Vai" },
- ["variationselectors"] = { first = 0x0FE00, last = 0x0FE0F, description = "Variation Selectors" },
- ["variationselectorssupplement"] = { first = 0xE0100, last = 0xE01EF, description = "Variation Selectors Supplement" },
- ["vedicextensions"] = { first = 0x01CD0, last = 0x01CFF, description = "Vedic Extensions" },
- ["verticalforms"] = { first = 0x0FE10, last = 0x0FE1F, description = "Vertical Forms" },
- ["yijinghexagramsymbols"] = { first = 0x04DC0, last = 0x04DFF, otf="yi", description = "Yijing Hexagram Symbols" },
- ["yiradicals"] = { first = 0x0A490, last = 0x0A4CF, otf="yi", description = "Yi Radicals" },
- ["yisyllables"] = { first = 0x0A000, last = 0x0A48F, otf="yi", description = "Yi Syllables" },
+ ["digitsarabicindic"] = { first = 0x00660, last = 0x00669, math = true },
+ -- ["digitsbengali"] = { first = 0x009E6, last = 0x009EF, math = true },
+ ["digitsbold"] = { first = 0x1D7CE, last = 0x1D7D8, math = true },
+ -- ["digitsdevanagari"] = { first = 0x00966, last = 0x0096F, math = true },
+ ["digitsdoublestruck"] = { first = 0x1D7D8, last = 0x1D7E2, math = true },
+ -- ["digitsethiopic"] = { first = 0x01369, last = 0x01371, math = true },
+ ["digitsextendedarabicindic"] = { first = 0x006F0, last = 0x006F9, math = true },
+ -- ["digitsgujarati"] = { first = 0x00AE6, last = 0x00AEF, math = true },
+ -- ["digitsgurmukhi"] = { first = 0x00A66, last = 0x00A6F, math = true },
+ -- ["digitskannada"] = { first = 0x00CE6, last = 0x00CEF, math = true },
+ -- ["digitskhmer"] = { first = 0x017E0, last = 0x017E9, math = true },
+ -- ["digitslao"] = { first = 0x00ED0, last = 0x00ED9, math = true },
+ ["digitslatin"] = { first = 0x00030, last = 0x00039, math = true },
+ -- ["digitsmalayalam"] = { first = 0x00D66, last = 0x00D6F, math = true },
+ -- ["digitsmongolian"] = { first = 0x01810, last = 0x01809, math = true },
+ ["digitsmonospace"] = { first = 0x1D7F6, last = 0x1D80F, math = true },
+ -- ["digitsmyanmar"] = { first = 0x01040, last = 0x01049, math = true },
+ ["digitsnormal"] = { first = 0x00030, last = 0x00039, math = true },
+ -- ["digitsoriya"] = { first = 0x00B66, last = 0x00B6F, math = true },
+ ["digitssansserifbold"] = { first = 0x1D7EC, last = 0x1D805, math = true },
+ ["digitssansserifnormal"] = { first = 0x1D7E2, last = 0x1D7EC, math = true },
+ -- ["digitstamil"] = { first = 0x00030, last = 0x00039, math = true }, -- no zero
+ -- ["digitstelugu"] = { first = 0x00C66, last = 0x00C6F, math = true },
+ -- ["digitsthai"] = { first = 0x00E50, last = 0x00E59, math = true },
+ -- ["digitstibetan"] = { first = 0x00F20, last = 0x00F29, math = true },
+ ["dingbats"] = { first = 0x02700, last = 0x027BF, description = "Dingbats" },
+ ["dominotiles"] = { first = 0x1F030, last = 0x1F09F, description = "Domino Tiles" },
+ ["duployan"] = { first = 0x1BC00, last = 0x1BC9F, description = "Duployan" },
+ ["egyptianhieroglyphs"] = { first = 0x13000, last = 0x1342F, description = "Egyptian Hieroglyphs" },
+ ["elbasan"] = { first = 0x10500, last = 0x1052F, description = "Elbasan" },
+ ["emoticons"] = { first = 0x1F600, last = 0x1F64F, description = "Emoticons" },
+ ["enclosedalphanumerics"] = { first = 0x02460, last = 0x024FF, description = "Enclosed Alphanumerics" },
+ ["enclosedalphanumericsupplement"] = { first = 0x1F100, last = 0x1F1FF, description = "Enclosed Alphanumeric Supplement" },
+ ["enclosedcjklettersandmonths"] = { first = 0x03200, last = 0x032FF, description = "Enclosed CJK Letters and Months" },
+ ["enclosedideographicsupplement"] = { first = 0x1F200, last = 0x1F2FF, description = "Enclosed Ideographic Supplement" },
+ ["ethiopic"] = { first = 0x01200, last = 0x0137F, otf="ethi", description = "Ethiopic" },
+ ["ethiopicextended"] = { first = 0x02D80, last = 0x02DDF, otf="ethi", description = "Ethiopic Extended" },
+ ["ethiopicextendeda"] = { first = 0x0AB00, last = 0x0AB2F, description = "Ethiopic Extended-A" },
+ ["ethiopicsupplement"] = { first = 0x01380, last = 0x0139F, otf="ethi", description = "Ethiopic Supplement" },
+ ["generalpunctuation"] = { first = 0x02000, last = 0x0206F, description = "General Punctuation" },
+ ["geometricshapes"] = { first = 0x025A0, last = 0x025FF, description = "Geometric Shapes" },
+ ["geometricshapes"] = { first = 0x025A0, last = 0x025FF, math = true },
+ ["geometricshapesextended"] = { first = 0x1F780, last = 0x1F7FF, description = "Geometric Shapes Extended" },
+ ["georgian"] = { first = 0x010A0, last = 0x010FF, otf="geor", description = "Georgian" },
+ ["georgiansupplement"] = { first = 0x02D00, last = 0x02D2F, otf="geor", description = "Georgian Supplement" },
+ ["glagolitic"] = { first = 0x02C00, last = 0x02C5F, otf="glag", description = "Glagolitic" },
+ ["gothic"] = { first = 0x10330, last = 0x1034F, otf="goth", description = "Gothic" },
+ ["grantha"] = { first = 0x11300, last = 0x1137F, description = "Grantha" },
+ ["greekandcoptic"] = { first = 0x00370, last = 0x003FF, otf="grek", description = "Greek and Coptic" },
+ ["greekextended"] = { first = 0x01F00, last = 0x01FFF, otf="grek", description = "Greek Extended" },
+ ["gujarati"] = { first = 0x00A80, last = 0x00AFF, otf="gujr", description = "Gujarati" },
+ ["gurmukhi"] = { first = 0x00A00, last = 0x00A7F, otf="guru", description = "Gurmukhi" },
+ ["halfwidthandfullwidthforms"] = { first = 0x0FF00, last = 0x0FFEF, description = "Halfwidth and Fullwidth Forms" },
+ ["hangulcompatibilityjamo"] = { first = 0x03130, last = 0x0318F, otf="jamo", description = "Hangul Compatibility Jamo" },
+ ["hanguljamo"] = { first = 0x01100, last = 0x011FF, otf="jamo", description = "Hangul Jamo" },
+ ["hanguljamoextendeda"] = { first = 0x0A960, last = 0x0A97F, description = "Hangul Jamo Extended-A" },
+ ["hanguljamoextendedb"] = { first = 0x0D7B0, last = 0x0D7FF, description = "Hangul Jamo Extended-B" },
+ ["hangulsyllables"] = { first = 0x0AC00, last = 0x0D7AF, otf="hang", description = "Hangul Syllables" },
+ ["hanunoo"] = { first = 0x01720, last = 0x0173F, otf="hano", description = "Hanunoo" },
+ ["hebrew"] = { first = 0x00590, last = 0x005FF, otf="hebr", description = "Hebrew" },
+ ["highprivateusesurrogates"] = { first = 0x0DB80, last = 0x0DBFF, description = "High Private Use Surrogates" },
+ ["highsurrogates"] = { first = 0x0D800, last = 0x0DB7F, description = "High Surrogates" },
+ ["hiragana"] = { first = 0x03040, last = 0x0309F, otf="kana", description = "Hiragana" },
+ ["ideographicdescriptioncharacters"] = { first = 0x02FF0, last = 0x02FFF, description = "Ideographic Description Characters" },
+ ["imperialaramaic"] = { first = 0x10840, last = 0x1085F, description = "Imperial Aramaic" },
+ ["inscriptionalpahlavi"] = { first = 0x10B60, last = 0x10B7F, description = "Inscriptional Pahlavi" },
+ ["inscriptionalparthian"] = { first = 0x10B40, last = 0x10B5F, description = "Inscriptional Parthian" },
+ ["ipaextensions"] = { first = 0x00250, last = 0x002AF, description = "IPA Extensions" },
+ ["javanese"] = { first = 0x0A980, last = 0x0A9DF, description = "Javanese" },
+ ["kaithi"] = { first = 0x11080, last = 0x110CF, description = "Kaithi" },
+ ["kanasupplement"] = { first = 0x1B000, last = 0x1B0FF, description = "Kana Supplement" },
+ ["kanbun"] = { first = 0x03190, last = 0x0319F, description = "Kanbun" },
+ ["kangxiradicals"] = { first = 0x02F00, last = 0x02FDF, description = "Kangxi Radicals" },
+ ["kannada"] = { first = 0x00C80, last = 0x00CFF, otf="knda", description = "Kannada" },
+ ["katakana"] = { first = 0x030A0, last = 0x030FF, otf="kana", description = "Katakana" },
+ ["katakanaphoneticextensions"] = { first = 0x031F0, last = 0x031FF, otf="kana", description = "Katakana Phonetic Extensions" },
+ ["kayahli"] = { first = 0x0A900, last = 0x0A92F, description = "Kayah Li" },
+ ["kharoshthi"] = { first = 0x10A00, last = 0x10A5F, otf="khar", description = "Kharoshthi" },
+ ["khmer"] = { first = 0x01780, last = 0x017FF, otf="khmr", description = "Khmer" },
+ ["khmersymbols"] = { first = 0x019E0, last = 0x019FF, otf="khmr", description = "Khmer Symbols" },
+ ["khojki"] = { first = 0x11200, last = 0x1124F, description = "Khojki" },
+ ["khudawadi"] = { first = 0x112B0, last = 0x112FF, description = "Khudawadi" },
+ ["lao"] = { first = 0x00E80, last = 0x00EFF, otf="lao", description = "Lao" },
+ ["latinextendeda"] = { first = 0x00100, last = 0x0017F, otf="latn", description = "Latin Extended-A" },
+ ["latinextendedadditional"] = { first = 0x01E00, last = 0x01EFF, otf="latn", description = "Latin Extended Additional" },
+ ["latinextendedb"] = { first = 0x00180, last = 0x0024F, otf="latn", description = "Latin Extended-B" },
+ ["latinextendedc"] = { first = 0x02C60, last = 0x02C7F, otf="latn", description = "Latin Extended-C" },
+ ["latinextendedd"] = { first = 0x0A720, last = 0x0A7FF, otf="latn", description = "Latin Extended-D" },
+ ["latinextendede"] = { first = 0x0AB30, last = 0x0AB6F, description = "Latin Extended-E" },
+ ["latinsupplement"] = { first = 0x00080, last = 0x000FF, otf="latn", description = "Latin-1 Supplement" },
+ ["lepcha"] = { first = 0x01C00, last = 0x01C4F, description = "Lepcha" },
+ ["letterlikesymbols"] = { first = 0x02100, last = 0x0214F, description = "Letterlike Symbols" },
+ ["letterlikesymbols"] = { first = 0x02100, last = 0x0214F, math = true },
+ ["limbu"] = { first = 0x01900, last = 0x0194F, otf="limb", description = "Limbu" },
+ ["lineara"] = { first = 0x10600, last = 0x1077F, description = "Linear A" },
+ ["linearbideograms"] = { first = 0x10080, last = 0x100FF, otf="linb", description = "Linear B Ideograms" },
+ ["linearbsyllabary"] = { first = 0x10000, last = 0x1007F, otf="linb", description = "Linear B Syllabary" },
+ ["lisu"] = { first = 0x0A4D0, last = 0x0A4FF, description = "Lisu" },
+ ["lowercasebold"] = { first = 0x1D41A, last = 0x1D433, math = true },
+ ["lowercaseboldfraktur"] = { first = 0x1D586, last = 0x1D59F, math = true },
+ ["lowercasebolditalic"] = { first = 0x1D482, last = 0x1D49B, math = true },
+ ["lowercaseboldscript"] = { first = 0x1D4EA, last = 0x1D503, math = true },
+ ["lowercasedoublestruck"] = { first = 0x1D552, last = 0x1D56B, math = true },
+ ["lowercasefraktur"] = { first = 0x1D51E, last = 0x1D537, math = true },
+ ["lowercasegreekbold"] = { first = 0x1D6C2, last = 0x1D6DB, math = true },
+ ["lowercasegreekbolditalic"] = { first = 0x1D736, last = 0x1D74F, math = true },
+ ["lowercasegreekitalic"] = { first = 0x1D6FC, last = 0x1D715, math = true },
+ ["lowercasegreeknormal"] = { first = 0x003B1, last = 0x003CA, math = true },
+ ["lowercasegreeksansserifbold"] = { first = 0x1D770, last = 0x1D789, math = true },
+ ["lowercasegreeksansserifbolditalic"] = { first = 0x1D7AA, last = 0x1D7C3, math = true },
+ ["lowercaseitalic"] = { first = 0x1D44E, last = 0x1D467, math = true },
+ ["lowercasemonospace"] = { first = 0x1D68A, last = 0x1D6A3, math = true },
+ ["lowercasenormal"] = { first = 0x00061, last = 0x0007A, math = true },
+ ["lowercasesansserifbold"] = { first = 0x1D5EE, last = 0x1D607, math = true },
+ ["lowercasesansserifbolditalic"] = { first = 0x1D656, last = 0x1D66F, math = true },
+ ["lowercasesansserifitalic"] = { first = 0x1D622, last = 0x1D63B, math = true },
+ ["lowercasesansserifnormal"] = { first = 0x1D5BA, last = 0x1D5D3, math = true },
+ ["lowercasescript"] = { first = 0x1D4B6, last = 0x1D4CF, math = true },
+ ["lowsurrogates"] = { first = 0x0DC00, last = 0x0DFFF, description = "Low Surrogates" },
+ ["lycian"] = { first = 0x10280, last = 0x1029F, description = "Lycian" },
+ ["lydian"] = { first = 0x10920, last = 0x1093F, description = "Lydian" },
+ ["mahajani"] = { first = 0x11150, last = 0x1117F, description = "Mahajani" },
+ ["mahjongtiles"] = { first = 0x1F000, last = 0x1F02F, description = "Mahjong Tiles" },
+ ["malayalam"] = { first = 0x00D00, last = 0x00D7F, otf="mlym", description = "Malayalam" },
+ ["mandaic"] = { first = 0x00840, last = 0x0085F, otf="mand", description = "Mandaic" },
+ ["manichaean"] = { first = 0x10AC0, last = 0x10AFF, description = "Manichaean" },
+ ["mathematicalalphanumericsymbols"] = { first = 0x1D400, last = 0x1D7FF, math = true, description = "Mathematical Alphanumeric Symbols" },
+ ["mathematicaloperators"] = { first = 0x02200, last = 0x022FF, math = true, description = "Mathematical Operators" },
+ ["meeteimayek"] = { first = 0x0ABC0, last = 0x0ABFF, description = "Meetei Mayek" },
+ ["meeteimayekextensions"] = { first = 0x0AAE0, last = 0x0AAFF, description = "Meetei Mayek Extensions" },
+ ["mendekikakui"] = { first = 0x1E800, last = 0x1E8DF, description = "Mende Kikakui" },
+ ["meroiticcursive"] = { first = 0x109A0, last = 0x109FF, description = "Meroitic Cursive" },
+ ["meroitichieroglyphs"] = { first = 0x10980, last = 0x1099F, description = "Meroitic Hieroglyphs" },
+ ["miao"] = { first = 0x16F00, last = 0x16F9F, description = "Miao" },
+ ["miscellaneousmathematicalsymbolsa"] = { first = 0x027C0, last = 0x027EF, math = true, description = "Miscellaneous Mathematical Symbols-A" },
+ ["miscellaneousmathematicalsymbolsb"] = { first = 0x02980, last = 0x029FF, math = true, description = "Miscellaneous Mathematical Symbols-B" },
+ ["miscellaneoussymbols"] = { first = 0x02600, last = 0x026FF, math = true, description = "Miscellaneous Symbols" },
+ ["miscellaneoussymbolsandarrows"] = { first = 0x02B00, last = 0x02BFF, math = true, description = "Miscellaneous Symbols and Arrows" },
+ ["miscellaneoussymbolsandpictographs"] = { first = 0x1F300, last = 0x1F5FF, description = "Miscellaneous Symbols and Pictographs" },
+ ["miscellaneoustechnical"] = { first = 0x02300, last = 0x023FF, math = true, description = "Miscellaneous Technical" },
+ ["modi"] = { first = 0x11600, last = 0x1165F, description = "Modi" },
+ ["modifiertoneletters"] = { first = 0x0A700, last = 0x0A71F, description = "Modifier Tone Letters" },
+ ["mongolian"] = { first = 0x01800, last = 0x018AF, otf="mong", description = "Mongolian" },
+ ["mro"] = { first = 0x16A40, last = 0x16A6F, description = "Mro" },
+ ["musicalsymbols"] = { first = 0x1D100, last = 0x1D1FF, otf="musc", description = "Musical Symbols" },
+ ["myanmar"] = { first = 0x01000, last = 0x0109F, otf="mymr", description = "Myanmar" },
+ ["myanmarextendeda"] = { first = 0x0AA60, last = 0x0AA7F, description = "Myanmar Extended-A" },
+ ["myanmarextendedb"] = { first = 0x0A9E0, last = 0x0A9FF, description = "Myanmar Extended-B" },
+ ["nabataean"] = { first = 0x10880, last = 0x108AF, description = "Nabataean" },
+ ["newtailue"] = { first = 0x01980, last = 0x019DF, description = "New Tai Lue" },
+ ["nko"] = { first = 0x007C0, last = 0x007FF, otf="nko", description = "NKo" },
+ ["numberforms"] = { first = 0x02150, last = 0x0218F, description = "Number Forms" },
+ ["ogham"] = { first = 0x01680, last = 0x0169F, otf="ogam", description = "Ogham" },
+ ["olchiki"] = { first = 0x01C50, last = 0x01C7F, description = "Ol Chiki" },
+ ["olditalic"] = { first = 0x10300, last = 0x1032F, otf="ital", description = "Old Italic" },
+ ["oldnortharabian"] = { first = 0x10A80, last = 0x10A9F, description = "Old North Arabian" },
+ ["oldpermic"] = { first = 0x10350, last = 0x1037F, description = "Old Permic" },
+ ["oldpersian"] = { first = 0x103A0, last = 0x103DF, otf="xpeo", description = "Old Persian" },
+ ["oldsoutharabian"] = { first = 0x10A60, last = 0x10A7F, description = "Old South Arabian" },
+ ["oldturkic"] = { first = 0x10C00, last = 0x10C4F, description = "Old Turkic" },
+ ["opticalcharacterrecognition"] = { first = 0x02440, last = 0x0245F, description = "Optical Character Recognition" },
+ ["oriya"] = { first = 0x00B00, last = 0x00B7F, otf="orya", description = "Oriya" },
+ ["ornamentaldingbats"] = { first = 0x1F650, last = 0x1F67F, description = "Ornamental Dingbats" },
+ ["osmanya"] = { first = 0x10480, last = 0x104AF, otf="osma", description = "Osmanya" },
+ ["pahawhhmong"] = { first = 0x16B00, last = 0x16B8F, description = "Pahawh Hmong" },
+ ["palmyrene"] = { first = 0x10860, last = 0x1087F, description = "Palmyrene" },
+ ["paucinhau"] = { first = 0x11AC0, last = 0x11AFF, description = "Pau Cin Hau" },
+ ["phagspa"] = { first = 0x0A840, last = 0x0A87F, otf="phag", description = "Phags-pa" },
+ ["phaistosdisc"] = { first = 0x101D0, last = 0x101FF, description = "Phaistos Disc" },
+ ["phoenician"] = { first = 0x10900, last = 0x1091F, otf="phnx", description = "Phoenician" },
+ ["phoneticextensions"] = { first = 0x01D00, last = 0x01D7F, description = "Phonetic Extensions" },
+ ["phoneticextensionssupplement"] = { first = 0x01D80, last = 0x01DBF, description = "Phonetic Extensions Supplement" },
+ ["playingcards"] = { first = 0x1F0A0, last = 0x1F0FF, description = "Playing Cards" },
+ ["privateusearea"] = { first = 0x0E000, last = 0x0F8FF, description = "Private Use Area" },
+ ["psalterpahlavi"] = { first = 0x10B80, last = 0x10BAF, description = "Psalter Pahlavi" },
+ ["rejang"] = { first = 0x0A930, last = 0x0A95F, description = "Rejang" },
+ ["ruminumeralsymbols"] = { first = 0x10E60, last = 0x10E7F, description = "Rumi Numeral Symbols" },
+ ["runic"] = { first = 0x016A0, last = 0x016FF, otf="runr", description = "Runic" },
+ ["samaritan"] = { first = 0x00800, last = 0x0083F, description = "Samaritan" },
+ ["saurashtra"] = { first = 0x0A880, last = 0x0A8DF, description = "Saurashtra" },
+ ["sharada"] = { first = 0x11180, last = 0x111DF, description = "Sharada" },
+ ["shavian"] = { first = 0x10450, last = 0x1047F, otf="shaw", description = "Shavian" },
+ ["shorthandformatcontrols"] = { first = 0x1BCA0, last = 0x1BCAF, description = "Shorthand Format Controls" },
+ ["siddham"] = { first = 0x11580, last = 0x115FF, description = "Siddham" },
+ ["sinhala"] = { first = 0x00D80, last = 0x00DFF, otf="sinh", description = "Sinhala" },
+ ["sinhalaarchaicnumbers"] = { first = 0x111E0, last = 0x111FF, description = "Sinhala Archaic Numbers" },
+ ["smallformvariants"] = { first = 0x0FE50, last = 0x0FE6F, description = "Small Form Variants" },
+ ["sorasompeng"] = { first = 0x110D0, last = 0x110FF, description = "Sora Sompeng" },
+ ["spacingmodifierletters"] = { first = 0x002B0, last = 0x002FF, description = "Spacing Modifier Letters" },
+ ["specials"] = { first = 0x0FFF0, last = 0x0FFFF, description = "Specials" },
+ ["sundanese"] = { first = 0x01B80, last = 0x01BBF, description = "Sundanese" },
+ ["sundanesesupplement"] = { first = 0x01CC0, last = 0x01CCF, description = "Sundanese Supplement" },
+ ["superscriptsandsubscripts"] = { first = 0x02070, last = 0x0209F, description = "Superscripts and Subscripts" },
+ ["supplementalarrowsa"] = { first = 0x027F0, last = 0x027FF, math = true, description = "Supplemental Arrows-A" },
+ ["supplementalarrowsb"] = { first = 0x02900, last = 0x0297F, math = true, description = "Supplemental Arrows-B" },
+ ["supplementalarrowsc"] = { first = 0x1F800, last = 0x1F8FF, math = true, description = "Supplemental Arrows-C" },
+ ["supplementalmathematicaloperators"] = { first = 0x02A00, last = 0x02AFF, math = true, description = "Supplemental Mathematical Operators" },
+ ["supplementalpunctuation"] = { first = 0x02E00, last = 0x02E7F, description = "Supplemental Punctuation" },
+ ["supplementaryprivateuseareaa"] = { first = 0xF0000, last = 0xFFFFF, description = "Supplementary Private Use Area-A" },
+ ["supplementaryprivateuseareab"] = { first = 0x100000,last = 0x10FFFF, description = "Supplementary Private Use Area-B" },
+ ["sylotinagri"] = { first = 0x0A800, last = 0x0A82F, otf="sylo", description = "Syloti Nagri" },
+ ["syriac"] = { first = 0x00700, last = 0x0074F, otf="syrc", description = "Syriac" },
+ ["tagalog"] = { first = 0x01700, last = 0x0171F, otf="tglg", description = "Tagalog" },
+ ["tagbanwa"] = { first = 0x01760, last = 0x0177F, otf="tagb", description = "Tagbanwa" },
+ ["tags"] = { first = 0xE0000, last = 0xE007F, description = "Tags" },
+ ["taile"] = { first = 0x01950, last = 0x0197F, otf="tale", description = "Tai Le" },
+ ["taitham"] = { first = 0x01A20, last = 0x01AAF, description = "Tai Tham" },
+ ["taiviet"] = { first = 0x0AA80, last = 0x0AADF, description = "Tai Viet" },
+ ["taixuanjingsymbols"] = { first = 0x1D300, last = 0x1D35F, description = "Tai Xuan Jing Symbols" },
+ ["takri"] = { first = 0x11680, last = 0x116CF, description = "Takri" },
+ ["tamil"] = { first = 0x00B80, last = 0x00BFF, otf="taml", description = "Tamil" },
+ ["telugu"] = { first = 0x00C00, last = 0x00C7F, otf="telu", description = "Telugu" },
+ ["thaana"] = { first = 0x00780, last = 0x007BF, otf="thaa", description = "Thaana" },
+ ["thai"] = { first = 0x00E00, last = 0x00E7F, otf="thai", description = "Thai" },
+ ["tibetan"] = { first = 0x00F00, last = 0x00FFF, otf="tibt", description = "Tibetan" },
+ ["tifinagh"] = { first = 0x02D30, last = 0x02D7F, otf="tfng", description = "Tifinagh" },
+ ["tirhuta"] = { first = 0x11480, last = 0x114DF, description = "Tirhuta" },
+ ["transportandmapsymbols"] = { first = 0x1F680, last = 0x1F6FF, description = "Transport and Map Symbols" },
+ ["ugaritic"] = { first = 0x10380, last = 0x1039F, otf="ugar", description = "Ugaritic" },
+ ["unifiedcanadianaboriginalsyllabics"] = { first = 0x01400, last = 0x0167F, otf="cans", description = "Unified Canadian Aboriginal Syllabics" },
+ ["unifiedcanadianaboriginalsyllabicsextended"] = { first = 0x018B0, last = 0x018FF, description = "Unified Canadian Aboriginal Syllabics Extended" },
+ ["uppercasebold"] = { first = 0x1D400, last = 0x1D419, math = true },
+ ["uppercaseboldfraktur"] = { first = 0x1D56C, last = 0x1D585, math = true },
+ ["uppercasebolditalic"] = { first = 0x1D468, last = 0x1D481, math = true },
+ ["uppercaseboldscript"] = { first = 0x1D4D0, last = 0x1D4E9, math = true },
+ ["uppercasedoublestruck"] = { first = 0x1D538, last = 0x1D551, math = true },
+ ["uppercasefraktur"] = { first = 0x1D504, last = 0x1D51D, math = true },
+ ["uppercasegreekbold"] = { first = 0x1D6A8, last = 0x1D6C1, math = true },
+ ["uppercasegreekbolditalic"] = { first = 0x1D71C, last = 0x1D735, math = true },
+ ["uppercasegreekitalic"] = { first = 0x1D6E2, last = 0x1D6FB, math = true },
+ ["uppercasegreeknormal"] = { first = 0x00391, last = 0x003AA, math = true },
+ ["uppercasegreeksansserifbold"] = { first = 0x1D756, last = 0x1D76F, math = true },
+ ["uppercasegreeksansserifbolditalic"] = { first = 0x1D790, last = 0x1D7A9, math = true },
+ ["uppercaseitalic"] = { first = 0x1D434, last = 0x1D44D, math = true },
+ ["uppercasemonospace"] = { first = 0x1D670, last = 0x1D689, math = true },
+ ["uppercasenormal"] = { first = 0x00041, last = 0x0005A, math = true },
+ ["uppercasesansserifbold"] = { first = 0x1D5D4, last = 0x1D5ED, math = true },
+ ["uppercasesansserifbolditalic"] = { first = 0x1D63C, last = 0x1D655, math = true },
+ ["uppercasesansserifitalic"] = { first = 0x1D608, last = 0x1D621, math = true },
+ ["uppercasesansserifnormal"] = { first = 0x1D5A0, last = 0x1D5B9, math = true },
+ ["uppercasescript"] = { first = 0x1D49C, last = 0x1D4B5, math = true },
+ ["vai"] = { first = 0x0A500, last = 0x0A63F, description = "Vai" },
+ ["variationselectors"] = { first = 0x0FE00, last = 0x0FE0F, description = "Variation Selectors" },
+ ["variationselectorssupplement"] = { first = 0xE0100, last = 0xE01EF, description = "Variation Selectors Supplement" },
+ ["vedicextensions"] = { first = 0x01CD0, last = 0x01CFF, description = "Vedic Extensions" },
+ ["verticalforms"] = { first = 0x0FE10, last = 0x0FE1F, description = "Vertical Forms" },
+ ["warangciti"] = { first = 0x118A0, last = 0x118FF, description = "Warang Citi" },
+ ["yijinghexagramsymbols"] = { first = 0x04DC0, last = 0x04DFF, otf="yi", description = "Yijing Hexagram Symbols" },
+ ["yiradicals"] = { first = 0x0A490, last = 0x0A4CF, otf="yi", description = "Yi Radicals" },
+ ["yisyllables"] = { first = 0x0A000, last = 0x0A48F, otf="yi", description = "Yi Syllables" },
}
characters.blocks = blocks
@@ -379,13 +494,15 @@ setmetatableindex(otfscripts,function(t,unicode)
return "dflt"
end)
+local splitter = lpeg.splitat(S(":-"))
+
function characters.getrange(name) -- used in font fallback definitions (name or range)
local range = blocks[name]
if range then
return range.first, range.last, range.description, range.gaps
end
name = gsub(name,'"',"0x") -- goodie: tex hex notation
- local start, stop = match(name,"^(.-)[%-%:](.-)$")
+ local start, stop = lpegmatch(splitter,name)
if start and stop then
start, stop = tonumber(start,16) or tonumber(start), tonumber(stop,16) or tonumber(stop)
if start and stop then
@@ -429,7 +546,17 @@ local categorytags = allocate {
cn = "Other Not Assigned",
}
+local detailtags = allocate {
+ sl = "small letter",
+ bl = "big letter",
+ im = "iteration mark",
+ pm = "prolonged sound mark"
+}
+
characters.categorytags = categorytags
+characters.detailtags = detailtags
+
+-- sounds : voiced unvoiced semivoiced
--~ special : cf (softhyphen) zs (emspace)
--~ characters: ll lm lo lt lu mn nl no pc pd pe pf pi po ps sc sk sm so
@@ -459,36 +586,101 @@ local is_mark = allocate ( tohash {
"mn", "ms",
} )
+local is_punctuation = allocate ( tohash {
+ "pc","pd","ps","pe","pi","pf","po",
+} )
+
-- to be redone: store checked characters
-characters.is_character = is_character
-characters.is_letter = is_letter
-characters.is_command = is_command
-characters.is_spacing = is_spacing
-characters.is_mark = is_mark
-
-local mt = { -- yes or no ?
- __index = function(t,k)
- if type(k) == "number" then
- local c = data[k].category
- return c and rawget(t,c)
- else
- -- avoid auto conversion in data.characters lookups
- end
+characters.is_character = is_character
+characters.is_letter = is_letter
+characters.is_command = is_command
+characters.is_spacing = is_spacing
+characters.is_mark = is_mark
+characters.is_punctuation = is_punctuation
+
+local mti = function(t,k)
+ if type(k) == "number" then
+ local c = data[k].category
+ return c and rawget(t,c)
+ else
+ -- avoid auto conversion in data.characters lookups
end
-}
+end
-setmetatableindex(characters.is_character, mt)
-setmetatableindex(characters.is_letter, mt)
-setmetatableindex(characters.is_command, mt)
-setmetatableindex(characters.is_spacing, mt)
+setmetatableindex(characters.is_character, mti)
+setmetatableindex(characters.is_letter, mti)
+setmetatableindex(characters.is_command, mti)
+setmetatableindex(characters.is_spacing, mti)
+setmetatableindex(characters.is_punctuation,mti)
-- todo: also define callers for the above
-- linebreak: todo: hash
--
-- normative : BK CR LF CM SG GL CB SP ZW NL WJ JL JV JT H2 H3
--- informative : XX OP CL QU NS EX SY IS PR PO NU AL ID IN HY BB BA SA AI B2 new:CP
+-- informative : XX OP CL CP QU NS EX SY IS PR PO NU AL ID IN HY BB BA SA AI B2 HL CJ RI
+--
+-- comments taken from standard:
+
+characters.linebreaks = {
+
+ -- non-tailorable line breaking classes
+
+ ["bk"] = "mandatory break", -- nl, ps : cause a line break (after)
+ ["cr"] = "carriage return", -- cr : cause a line break (after), except between cr and lf
+ ["lf"] = "line feed", -- lf : cause a line break (after)
+ ["cm"] = "combining mark", -- combining marks, control codes : prohibit a line break between the character and the preceding character
+ ["nl"] = "next line", -- nel : cause a line break (after)
+ ["sg"] = "surrogate", -- surrogates :do not occur in well-formed text
+ ["wj"] = "word joiner", -- wj : prohibit line breaks before and after
+ ["zw"] = "zero width space", -- zwsp : provide a break opportunity
+ ["gl"] = "non-breaking (glue)", -- cgj, nbsp, zwnbsp : prohibit line breaks before and after
+ ["sp"] = "space", -- space : enable indirect line breaks
+
+ -- break opportunities
+
+ ["b2"] = "break opportunity before and after", -- em dash : provide a line break opportunity before and after the character
+ ["ba"] = "break after", -- spaces, hyphens : generally provide a line break opportunity after the character
+ ["bb"] = "break before", -- punctuation used in dictionaries : generally provide a line break opportunity before the character
+ ["hy"] = "hyphen", -- hyphen-minus : provide a line break opportunity after the character, except in numeric context
+ ["cb"] = "contingent break opportunity", -- inline objects : provide a line break opportunity contingent on additional information
+
+ -- characters prohibiting certain breaks
+
+ ["cl"] = "close punctuation", -- “}”, “❳”, “⟫” etc. : prohibit line breaks before
+ ["cp"] = "close parenthesis", -- “)”, “]” : prohibit line breaks before
+ ["ex"] = "exclamation/interrogation", -- “!”, “?”, etc. : prohibit line breaks before
+ ["in"] = "inseparable", -- leaders : allow only indirect line breaks between pairs
+ ["ns"] = "nonstarter", -- “‼”, “‽”, “⁇”, “⁉”, etc. : allow only indirect line breaks before
+ ["op"] = "open punctuation", -- “(“, “[“, “{“, etc. : prohibit line breaks after
+ ["qu"] = "quotation", -- quotation marks : act like they are both opening and closing
+
+ -- numeric context
+
+ ["is"] = "infix numeric separator", -- . , : prevent breaks after any and before numeric
+ ["nu"] = "numeric", -- digits : form numeric expressions for line breaking purposes
+ ["po"] = "postfix numeric", -- %, ¢ : do not break following a numeric expression
+ ["pr"] = "prefix numeric", -- $, £, ¥, etc. : do not break in front of a numeric expression
+ ["sy"] = "symbols allowing break after", -- / : prevent a break before, and allow a break after
+
+ -- other characters
+
+ ["ai"] = "ambiguous (alphabetic or ideographic)", -- characters with ambiguous east asian width : act like al when the resolved eaw is n; otherwise, act as id
+ ["al"] = "alphabetic", -- alphabets and regular symbols : are alphabetic characters or symbols that are used with alphabetic characters
+ ["cj"] = "conditional japanese starter", -- small kana : treat as ns or id for strict or normal breaking.
+ ["h2"] = "hangul lv syllable", -- hangul : form korean syllable blocks
+ ["h3"] = "hangul lvt syllable", -- hangul : form korean syllable blocks
+ ["hl"] = "hebrew letter", -- hebrew : do not break around a following hyphen; otherwise act as alphabetic
+ ["id"] = "ideographic", -- ideographs : break before or after, except in some numeric context
+ ["jl"] = "hangul l jamo", -- conjoining jamo : form korean syllable blocks
+ ["jv"] = "hangul v jamo", -- conjoining jamo : form korean syllable blocks
+ ["jt"] = "hangul t jamo", -- conjoining jamo : form korean syllable blocks
+ ["ri"] = "regional indicator", -- regional indicator symbol letter a .. z : keep together, break before and after from others
+ ["sa"] = "complex context dependent (south east asian)", -- south east asian: thai, lao, khmer : provide a line break opportunity contingent on additional, language-specific context analysis
+ ["xx"] = "unknown", -- most unassigned, private-use : have as yet unknown line breaking behavior or unassigned code positions
+
+}
-- east asian width:
--
@@ -596,10 +788,10 @@ of the official <l n='api'/>.</p>
-- we could make them virtual: characters.contextnames[n]
-function characters.contextname(n) return data[n].contextname or "" end
-function characters.adobename (n) return data[n].adobename or "" end
-function characters.description(n) return data[n].description or "" end
--------- characters.category (n) return data[n].category or "" end
+function characters.contextname(n) return data[n] and data[n].contextname or "" end
+function characters.adobename (n) return data[n] and data[n].adobename or "" end
+function characters.description(n) return data[n] and data[n].description or "" end
+-------- characters.category (n) return data[n] and data[n].category or "" end
function characters.category(n,verbose)
local c = data[n].category
@@ -636,6 +828,9 @@ local categories = allocate() characters.categories = categories -- lazy table
setmetatableindex(categories, function(t,u) if u then local c = data[u] c = c and c.category or u t[u] = c return c end end)
+-- todo: overloads (these register directly in the tables as number and string) e.g. for greek
+-- todo: for string do a numeric lookup in the table itself
+
local lccodes = allocate() characters.lccodes = lccodes -- lazy table
local uccodes = allocate() characters.uccodes = uccodes -- lazy table
local shcodes = allocate() characters.shcodes = shcodes -- lazy table
@@ -764,17 +959,170 @@ end
----- toupper = Cs((utf8byte/ucchars)^0)
----- toshape = Cs((utf8byte/shchars)^0)
-local tolower = Cs((utf8char/lcchars)^0)
-local toupper = Cs((utf8char/ucchars)^0)
-local toshape = Cs((utf8char/shchars)^0)
+local tolower = Cs((utf8character/lcchars)^0) -- no need to check spacing
+local toupper = Cs((utf8character/ucchars)^0) -- no need to check spacing
+local toshape = Cs((utf8character/shchars)^0) -- no need to check spacing
+
+lpegpatterns.tolower = tolower -- old ones ... will be overloaded
+lpegpatterns.toupper = toupper -- old ones ... will be overloaded
+lpegpatterns.toshape = toshape -- old ones ... will be overloaded
+
+-- function characters.lower (str) return lpegmatch(tolower,str) end
+-- function characters.upper (str) return lpegmatch(toupper,str) end
+-- function characters.shaped(str) return lpegmatch(toshape,str) end
+
+-- local superscripts = allocate() characters.superscripts = superscripts
+-- local subscripts = allocate() characters.subscripts = subscripts
+
+-- if storage then
+-- storage.register("characters/superscripts", superscripts, "characters.superscripts")
+-- storage.register("characters/subscripts", subscripts, "characters.subscripts")
+-- end
+
+-- end
+
+if not characters.splits then
+
+ local char = allocate()
+ local compat = allocate()
+
+ local splits = {
+ char = char,
+ compat = compat,
+ }
+
+ characters.splits = splits
+
+ -- [0x013F] = { 0x004C, 0x00B7 }
+ -- [0x0140] = { 0x006C, 0x00B7 }
+
+ for unicode, data in next, characters.data do
+ local specials = data.specials
+ if specials and #specials > 2 then
+ local kind = specials[1]
+ if kind == "compat" then
+ compat[unicode] = { unpack(specials,2) }
+ elseif kind == "char" then
+ char [unicode] = { unpack(specials,2) }
+ end
+ end
+ end
+
+ if storage then
+ storage.register("characters/splits", splits, "characters.splits")
+ end
+
+end
-patterns.tolower = tolower
-patterns.toupper = toupper
-patterns.toshape = toshape
+if not characters.lhash then
+
+ local lhash = allocate() characters.lhash = lhash -- nil if no conversion
+ local uhash = allocate() characters.uhash = uhash -- nil if no conversion
+ local shash = allocate() characters.shash = shash -- nil if no conversion
+
+ for k, v in next, characters.data do
+ -- if k < 0x11000 then
+ local l = v.lccode
+ if l then
+ -- we have an uppercase
+ if type(l) == "number" then
+ lhash[utfchar(k)] = utfchar(l)
+ elseif #l == 2 then
+ lhash[utfchar(k)] = utfchar(l[1]) .. utfchar(l[2])
+ else
+ inspect(v)
+ end
+ else
+ local u = v.uccode
+ if u then
+ -- we have an lowercase
+ if type(u) == "number" then
+ uhash[utfchar(k)] = utfchar(u)
+ elseif #u == 2 then
+ uhash[utfchar(k)] = utfchar(u[1]) .. utfchar(u[2])
+ else
+ inspect(v)
+ end
+ end
+ end
+ local s = v.shcode
+ if s then
+ if type(s) == "number" then
+ shash[utfchar(k)] = utfchar(s)
+ elseif #s == 2 then
+ shash[utfchar(k)] = utfchar(s[1]) .. utfchar(s[2])
+ else
+ inspect(v)
+ end
+ end
+ -- end
+ end
+
+ if storage then
+ storage.register("characters/lhash", lhash, "characters.lhash")
+ storage.register("characters/uhash", uhash, "characters.uhash")
+ storage.register("characters/shash", shash, "characters.shash")
+ end
+
+end
+
+local lhash = characters.lhash mark(lhash)
+local uhash = characters.uhash mark(uhash)
+local shash = characters.shash mark(shash)
+
+local utf8lowercharacter = utfchartabletopattern(lhash) / lhash
+local utf8uppercharacter = utfchartabletopattern(uhash) / uhash
+local utf8shapecharacter = utfchartabletopattern(shash) / shash
+
+local utf8lower = Cs((utf8lowercharacter + utf8character)^0)
+local utf8upper = Cs((utf8uppercharacter + utf8character)^0)
+local utf8shape = Cs((utf8shapecharacter + utf8character)^0)
+
+lpegpatterns.utf8lowercharacter = utf8lowercharacter -- one character
+lpegpatterns.utf8uppercharacter = utf8uppercharacter -- one character
+lpegpatterns.utf8shapecharacter = utf8shapecharacter -- one character
+
+lpegpatterns.utf8lower = utf8lower -- string
+lpegpatterns.utf8upper = utf8upper -- string
+lpegpatterns.utf8shape = utf8shape -- string
+
+function characters.lower (str) return lpegmatch(utf8lower,str) end
+function characters.upper (str) return lpegmatch(utf8upper,str) end
+function characters.shaped(str) return lpegmatch(utf8shape,str) end
+
+-- local str = [[
+-- ÀÁÂÃÄÅàáâãäå àáâãäåàáâãäå ÀÁÂÃÄÅÀÁÂÃÄÅ AAAAAAaaaaaa
+-- ÆÇæç æçæç ÆÇÆÇ AECaec
+-- ÈÉÊËèéêë èéêëèéêë ÈÉÊËÈÉÊË EEEEeeee
+-- ÌÍÎÏÞìíîïþ ìíîïþìíîïþ ÌÍÎÏÞÌÍÎÏÞ IIIIÞiiiiþ
+-- Ðð ðð ÐÐ Ðð
+-- Ññ ññ ÑÑ Nn
+-- ÒÓÔÕÖòóôõö òóôõöòóôõö ÒÓÔÕÖÒÓÔÕÖ OOOOOooooo
+-- Øø øø ØØ Oo
+-- ÙÚÛÜùúûü ùúûüùúûü ÙÚÛÜÙÚÛÜ UUUUuuuu
+-- Ýýÿ ýýÿ ÝÝŸ Yyy
+-- ß ß SS ss
+-- Ţţ ţţ ŢŢ Tt
+-- ]]
+--
+-- local lower = characters.lower print(lower(str))
+-- local upper = characters.upper print(upper(str))
+-- local shaped = characters.shaped print(shaped(str))
+--
+-- local c, n = os.clock(), 10000
+-- for i=1,n do lower(str) upper(str) shaped(str) end -- 2.08 => 0.77
+-- print(os.clock()-c,n*#str*3)
-function characters.lower (str) return lpegmatch(tolower,str) end
-function characters.upper (str) return lpegmatch(toupper,str) end
-function characters.shaped(str) return lpegmatch(toshape,str) end
+-- maybe: (twice as fast when much ascii)
+--
+-- local tolower = lpeg.patterns.tolower
+-- local lower = string.lower
+--
+-- local allascii = R("\000\127")^1 * P(-1)
+--
+-- function characters.checkedlower(str)
+-- return lpegmatch(allascii,str) and lower(str) or lpegmatch(tolower,str) or str
+-- end
function characters.lettered(str,spacing)
local new, n = { }, 0
@@ -812,15 +1160,6 @@ end
function characters.uccode(n) return uccodes[n] end -- obsolete
function characters.lccode(n) return lccodes[n] end -- obsolete
-function characters.safechar(n)
- local c = data[n]
- if c and c.contextname then
- return "\\" .. c.contextname
- else
- return utfchar(n)
- end
-end
-
function characters.shape(n)
local shcode = shcodes[n]
if not shcode then
@@ -875,41 +1214,42 @@ end
-- groupdata[group] = gdata
-- end
---~ characters.data, characters.groups = chardata, groupdata
-
---~ [0xF0000]={
---~ category="co",
---~ cjkwd="a",
---~ description="<Plane 0x000F Private Use, First>",
---~ direction="l",
---~ unicodeslot=0xF0000,
---~ },
---~ [0xFFFFD]={
---~ category="co",
---~ cjkwd="a",
---~ description="<Plane 0x000F Private Use, Last>",
---~ direction="l",
---~ unicodeslot=0xFFFFD,
---~ },
---~ [0x100000]={
---~ category="co",
---~ cjkwd="a",
---~ description="<Plane 0x0010 Private Use, First>",
---~ direction="l",
---~ unicodeslot=0x100000,
---~ },
---~ [0x10FFFD]={
---~ category="co",
---~ cjkwd="a",
---~ description="<Plane 0x0010 Private Use, Last>",
---~ direction="l",
---~ unicodeslot=0x10FFFD,
---~ },
+-- characters.data, characters.groups = chardata, groupdata
+
+-- [0xF0000]={
+-- category="co",
+-- cjkwd="a",
+-- description="<Plane 0x000F Private Use, First>",
+-- direction="l",
+-- unicodeslot=0xF0000,
+-- },
+-- [0xFFFFD]={
+-- category="co",
+-- cjkwd="a",
+-- description="<Plane 0x000F Private Use, Last>",
+-- direction="l",
+-- unicodeslot=0xFFFFD,
+-- },
+-- [0x100000]={
+-- category="co",
+-- cjkwd="a",
+-- description="<Plane 0x0010 Private Use, First>",
+-- direction="l",
+-- unicodeslot=0x100000,
+-- },
+-- [0x10FFFD]={
+-- category="co",
+-- cjkwd="a",
+-- description="<Plane 0x0010 Private Use, Last>",
+-- direction="l",
+-- unicodeslot=0x10FFFD,
+-- },
if not characters.superscripts then
local superscripts = allocate() characters.superscripts = superscripts
local subscripts = allocate() characters.subscripts = subscripts
+ local fractions = allocate() characters.fractions = fractions
-- skipping U+02120 (service mark) U+02122 (trademark)
@@ -929,16 +1269,24 @@ if not characters.superscripts then
elseif trace_defining then
report_defining("ignoring %s %a, char %c, description %a","subscript",ustring(k),k,v.description)
end
+ elseif what == "fraction" then
+ if #specials > 1 then
+ fractions[k] = { unpack(specials,2) }
+ elseif trace_defining then
+ report_defining("ignoring %s %a, char %c, description %a","fraction",ustring(k),k,v.description)
+ end
end
end
end
-- print(table.serialize(superscripts, "superscripts", { hexify = true }))
-- print(table.serialize(subscripts, "subscripts", { hexify = true }))
+ -- print(table.serialize(fractions, "fractions", { hexify = true }))
if storage then
storage.register("characters/superscripts", superscripts, "characters.superscripts")
storage.register("characters/subscripts", subscripts, "characters.subscripts")
+ storage.register("characters/fractions", fractions, "characters.fractions")
end
end
@@ -961,256 +1309,6 @@ function characters.showstring(str)
end
end
--- the following code will move to char-tex.lua
-
--- tex
-
-if not tex or not context or not commands then return characters end
-
-local tex = tex
-local texsetlccode = tex.setlccode
-local texsetuccode = tex.setuccode
-local texsetsfcode = tex.setsfcode
-local texsetcatcode = tex.setcatcode
-
-local contextsprint = context.sprint
-local ctxcatcodes = catcodes.numbers.ctxcatcodes
-
---[[ldx--
-<p>Instead of using a <l n='tex'/> file to define the named glyphs, we
-use the table. After all, we have this information available anyway.</p>
---ldx]]--
-
-function commands.makeactive(n,name) --
- contextsprint(ctxcatcodes,format("\\catcode%s=13\\unexpanded\\def %s{\\%s}",n,utfchar(n),name))
- -- context("\\catcode%s=13\\unexpanded\\def %s{\\%s}",n,utfchar(n),name)
-end
-
-function commands.utfchar(c,n)
- if n then
- -- contextsprint(c,charfromnumber(n))
- contextsprint(c,utfchar(n))
- else
- -- contextsprint(charfromnumber(c))
- contextsprint(utfchar(c))
- end
-end
-
-function commands.safechar(n)
- local c = data[n]
- if c and c.contextname then
- contextsprint("\\" .. c.contextname) -- context[c.contextname]()
- else
- contextsprint(utfchar(n))
- end
-end
-
-tex.uprint = commands.utfchar
-
-local forbidden = tohash { -- at least now
- 0x00A0,
- 0x2000, 0x2001, 0x2002, 0x2003, 0x2004, 0x2005, 0x2006, 0x2007, 0x2008, 0x2009, 0x200A, 0x200B, 0x200C, 0x200D,
- 0x202F,
- 0x205F,
- -- 0xFEFF,
-}
-
-function characters.define(tobelettered, tobeactivated) -- catcodetables
-
- if trace_defining then
- report_defining("defining active character commands")
- end
-
- local activated, a = { }, 0
-
- for u, chr in next, data do -- these will be commands
- local fallback = chr.fallback
- if fallback then
- contextsprint("{\\catcode",u,"=13\\unexpanded\\gdef ",utfchar(u),"{\\checkedchar{",u,"}{",fallback,"}}}")
- a = a + 1
- activated[a] = u
- else
- local contextname = chr.contextname
- if contextname then
- local category = chr.category
- if is_character[category] then
- if chr.unicodeslot < 128 then
- if is_letter[category] then
- contextsprint(ctxcatcodes,format("\\def\\%s{%s}",contextname,utfchar(u))) -- has no s
- else
- contextsprint(ctxcatcodes,format("\\chardef\\%s=%s",contextname,u)) -- has no s
- end
- else
- contextsprint(ctxcatcodes,format("\\def\\%s{%s}",contextname,utfchar(u))) -- has no s
- end
- elseif is_command[category] and not forbidden[u] then
- contextsprint("{\\catcode",u,"=13\\unexpanded\\gdef ",utfchar(u),"{\\"..contextname,"}}")
- a = a + 1
- activated[a] = u
- end
- end
- end
- end
-
- if tobelettered then -- shared
- local saved = tex.catcodetable
- for i=1,#tobelettered do
- tex.catcodetable = tobelettered[i]
- if trace_defining then
- report_defining("defining letters (global, shared)")
- end
- for u, chr in next, data do
- if not chr.fallback and is_letter[chr.category] and u >= 128 and u <= 65536 then
- texsetcatcode(u,11)
- end
- local range = chr.range
- if range then
- for i=1,range.first,range.last do -- tricky as not all are letters
- texsetcatcode(i,11)
- end
- end
- end
- texsetcatcode(0x200C,11) -- non-joiner
- texsetcatcode(0x200D,11) -- joiner
- for k, v in next, blocks do
- if v.catcode == "letter" then
- for i=v.first,v.last do
- texsetcatcode(i,11)
- end
- end
- end
- end
- tex.catcodetable = saved
- end
-
- local nofactivated = #tobeactivated
- if tobeactivated and nofactivated > 0 then
- for i=1,nofactivated do
- local u = activated[i]
- if u then
- report_defining("character %U is active in set %a, containing %a",u,data[u].description,tobeactivated)
- end
- end
- local saved = tex.catcodetable
- for i=1,#tobeactivated do
- local vector = tobeactivated[i]
- if trace_defining then
- report_defining("defining %a active characters in vector %a",nofactivated,vector)
- end
- tex.catcodetable = vector
- for i=1,nofactivated do
- local u = activated[i]
- if u then
- texsetcatcode(u,13)
- end
- end
- end
- tex.catcodetable = saved
- end
-
-end
-
---[[ldx--
-<p>Setting the lccodes is also done in a loop over the data table.</p>
---ldx]]--
-
-local sfmode = "unset" -- unset, traditional, normal
-
-function characters.setcodes()
- if trace_defining then
- report_defining("defining lc and uc codes")
- end
- local traditional = sfstate == "traditional" or sfstate == "unset"
- for code, chr in next, data do
- local cc = chr.category
- if is_letter[cc] then
- local range = chr.range
- if range then
- for i=range.first,range.last do
- texsetcatcode(i,11) -- letter
- texsetlccode(i,i,i) -- self self
- end
- else
- local lc, uc = chr.lccode, chr.uccode
- if not lc then
- chr.lccode, lc = code, code
- elseif type(lc) == "table" then
- lc = code
- end
- if not uc then
- chr.uccode, uc = code, code
- elseif type(uc) == "table" then
- uc = code
- end
- texsetcatcode(code,11) -- letter
- texsetlccode(code,lc,uc)
- if traditional and cc == "lu" then
- texsetsfcode(code,999)
- end
- end
- elseif is_mark[cc] then
- texsetlccode(code,code,code) -- for hyphenation
- end
- end
- if traditional then
- sfstate = "traditional"
- end
-end
-
--- If this is something that is not documentwide and used a lot, then we
--- need a more clever approach (trivial but not now).
-
-local function setuppersfcodes(v,n)
- if sfstate ~= "unset" then
- report_defining("setting uppercase sf codes to %a",n)
- for code, chr in next, data do
- if chr.category == "lu" then
- texsetsfcode(code,n)
- end
- end
- end
- sfstate = v
-end
-
-directives.register("characters.spaceafteruppercase",function(v)
- if v == "traditional" then
- setuppersfcodes(v,999)
- elseif v == "normal" then
- setuppersfcodes(v,1000)
- end
-end)
-
--- tex
+-- code moved to char-tex.lua
-function commands.chardescription(slot)
- local d = data[slot]
- if d then
- context(d.description)
- end
-end
-
--- xml
-
-characters.activeoffset = 0x10000 -- there will be remapped in that byte range
-
-function commands.remapentity(chr,slot)
- contextsprint(format("{\\catcode%s=13\\xdef%s{\\string%s}}",slot,utfchar(slot),chr))
-end
-
--- xml.entities = xml.entities or { }
---
--- storage.register("xml/entities",xml.entities,"xml.entities") -- this will move to lxml
---
--- function characters.setmkiventities()
--- local entities = xml.entities
--- entities.lt = "<"
--- entities.amp = "&"
--- entities.gt = ">"
--- end
---
--- function characters.setmkiientities()
--- local entities = xml.entities
--- entities.lt = utfchar(characters.activeoffset + utfbyte("<"))
--- entities.amp = utfchar(characters.activeoffset + utfbyte("&"))
--- entities.gt = utfchar(characters.activeoffset + utfbyte(">"))
--- end
+return characters
diff --git a/tex/context/base/char-ini.mkiv b/tex/context/base/char-ini.mkiv
index 113d26709..95ff7af5a 100644
--- a/tex/context/base/char-ini.mkiv
+++ b/tex/context/base/char-ini.mkiv
@@ -13,11 +13,10 @@
\writestatus{loading}{ConTeXt Character Support / Initialization}
-\registerctxluafile{char-def}{1.001} % let's load this one first
-\registerctxluafile{char-ini}{1.001}
-\registerctxluafile{char-cjk}{1.001}
+\registerctxluafile{char-fio}{1.001}
\registerctxluafile{char-map}{1.001} % maybe we will load this someplace else
\registerctxluafile{char-tex}{1.001}
+\registerctxluafile{char-ent}{1.001}
\unprotect
@@ -43,11 +42,15 @@
% use \normalUchar when possible .. the next one is nice for documents and it also accepts
% 0x prefixed numbers
-\def\utfchar#1{\ctxcommand{utfchar(\number#1)}}
+\def\utfchar #1{\clf_utfchar \numexpr#1\relax}
+\def\safechar#1{\clf_safechar\numexpr#1\relax}
\def\checkedchar {\relax\ifmmode\expandafter\checkedmathchar\else\expandafter\checkedtextchar\fi} % #1#2
\def\checkedmathchar#1#2{#2}
-\def\checkedtextchar #1{\iffontchar\font#1 \expandafter\firstoftwoarguments\else\expandafter\secondoftwoarguments\fi{\char#1}}
+%def\checkedtextchar #1{\iffontchar\font#1 \expandafter\firstoftwoarguments\else\expandafter\secondoftwoarguments\fi{\char#1}}
+
+\unexpanded\def\checkedtextchar#1% #2%
+ {\clf_doifelsecharinfont\numexpr#1\relax{\char#1}} % {#2}
\unexpanded\def\textormathchar#1%
{\relax\ifmmode
@@ -65,32 +68,26 @@
% \def\setcclcuc#1#2#3{\global\catcode#1=\lettercatcode\global\lccode#1=#2\global\uccode#1=#3\relax}
% \def\setcclcucself#1{\global\catcode#1=\lettercatcode\global\lccode#1=#1\global\uccode#1=#1\relax }
-\ctxlua{characters.setcodes()}
-
% Is setting up vrb tpa and tpb needed?
-\ctxlua {
- characters.define(
- { % letter catcodes
- \number\texcatcodes,
- \number\ctxcatcodes,
- \number\notcatcodes,
- %number\mthcatcodes,
- \number\vrbcatcodes,
- \number\prtcatcodes,
- \number\tpacatcodes,
- \number\tpbcatcodes,
- \number\txtcatcodes,
- },
- { % activate catcodes
- \number\ctxcatcodes,
- \number\notcatcodes,
- \number\prtcatcodes, % new
- }
- )
-% catcodes.register("xmlcatcodes",\number\xmlcatcodes)
-}
-
-\def\chardescription#1{\ctxcommand{chardescription(\number#1)}}
+% move to lua side
+
+%clf_setcharactercodes
+
+\clf_setlettercatcodes\texcatcodes
+\clf_setlettercatcodes\ctxcatcodes
+\clf_setlettercatcodes\notcatcodes
+%clf_setlettercatcodes\mthcatcodes
+\clf_setlettercatcodes\vrbcatcodes
+\clf_setlettercatcodes\prtcatcodes
+\clf_setlettercatcodes\tpacatcodes
+\clf_setlettercatcodes\tpbcatcodes
+\clf_setlettercatcodes\txtcatcodes
+
+\clf_setactivecatcodes\ctxcatcodes
+\clf_setactivecatcodes\notcatcodes
+\clf_setactivecatcodes\prtcatcodes
+
+\def\chardescription#1{\clf_chardescription#1\relax}
\protect \endinput
diff --git a/tex/context/base/char-tex.lua b/tex/context/base/char-tex.lua
index 472cae930..2093c6d6c 100644
--- a/tex/context/base/char-tex.lua
+++ b/tex/context/base/char-tex.lua
@@ -6,17 +6,118 @@ if not modules then modules = { } end modules ['char-tex'] = {
license = "see context related readme files"
}
-local lpeg = lpeg
-local find = string.find
+local lpeg = lpeg
+local next, type = next, type
+local format, find, gmatch = string.format, string.find, string.gmatch
+local utfchar, utfbyte = utf.char, utf.byte
+local concat, tohash = table.concat, table.tohash
local P, C, R, S, V, Cs, Cc = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.V, lpeg.Cs, lpeg.Cc
-local U, lpegmatch = lpeg.patterns.utf8, lpeg.match
-local allocate, mark = utilities.storage.allocate, utilities.storage.mark
+local lpegpatterns = lpeg.patterns
+local lpegmatch = lpeg.match
+local utfchartabletopattern = lpeg.utfchartabletopattern
+
+local allocate = utilities.storage.allocate
+local mark = utilities.storage.mark
+
+local context = context
+local commands = commands
+local implement = interfaces.implement
+
+local characters = characters
+local texcharacters = { }
+characters.tex = texcharacters
+local utffilters = characters.filters.utf
+
+local is_character = characters.is_character
+local is_letter = characters.is_letter
+local is_command = characters.is_command
+local is_spacing = characters.is_spacing
+local is_mark = characters.is_mark
+local is_punctuation = characters.is_punctuation
+
+local data = characters.data if not data then return end
+local blocks = characters.blocks
+
+local trace_defining = false trackers.register("characters.defining", function(v) characters_defining = v end)
+
+local report_defining = logs.reporter("characters")
+
+--[[ldx--
+<p>In order to deal with 8-bit output, we need to find a way to go from <l n='utf'/> to
+8-bit. This is handled in the <l n='luatex'/> engine itself.</p>
+
+<p>This leaves us problems with characters that are specific to <l n='tex'/> like
+<type>{}</type>, <type>$</type> and alike. We can remap some chars that tex input files
+are sensitive for to a private area (while writing to a utility file) and revert then
+to their original slot when we read in such a file. Instead of reverting, we can (when
+we resolve characters to glyphs) map them to their right glyph there. For this purpose
+we can use the private planes 0x0F0000 and 0x100000.</p>
+--ldx]]--
+
+local low = allocate()
+local high = allocate()
+local escapes = allocate()
+local special = "~#$%^&_{}\\|" -- "~#$%{}\\|"
+
+local private = {
+ low = low,
+ high = high,
+ escapes = escapes,
+}
+
+utffilters.private = private
+
+for ch in gmatch(special,".") do
+ local cb
+ if type(ch) == "number" then
+ cb, ch = ch, utfchar(ch)
+ else
+ cb = utfbyte(ch)
+ end
+ if cb < 256 then
+ escapes[ch] = "\\" .. ch
+ low[ch] = utfchar(0x0F0000 + cb)
+ if ch == "%" then
+ ch = "%%" -- nasty, but we need this as in replacements (also in lpeg) % is interpreted
+ end
+ high[utfchar(0x0F0000 + cb)] = ch
+ end
+end
+
+local tohigh = lpeg.replacer(low) -- frozen, only for basic tex
+local tolow = lpeg.replacer(high) -- frozen, only for basic tex
+
+lpegpatterns.utftohigh = tohigh
+lpegpatterns.utftolow = tolow
+
+function utffilters.harden(str)
+ return lpegmatch(tohigh,str)
+end
+
+function utffilters.soften(str)
+ return lpegmatch(tolow,str)
+end
+
+private.escape = utf.remapper(escapes) -- maybe: ,"dynamic"
+private.replace = utf.remapper(low) -- maybe: ,"dynamic"
+private.revert = utf.remapper(high) -- maybe: ,"dynamic"
+
+--[[ldx--
+<p>We get a more efficient variant of this when we integrate
+replacements in collapser. This more or less renders the previous
+private code redundant. The following code is equivalent but the
+first snippet uses the relocated dollars.</p>
-characters = characters or { }
-local characters = characters
-characters.tex = characters.tex or { }
+<typing>
+[󰀤x󰀤] [$x$]
+</typing>
+--ldx]]--
+
+-- using the tree-lpeg-mapper would be nice but we also need to deal with end-of-string
+-- cases: "\"\i" and don't want "\relax" to be seen as \r e lax" (for which we need to mess
+-- with spaces
local accentmapping = allocate {
['"'] = { [""] = "¨",
@@ -128,7 +229,7 @@ local accentmapping = allocate {
},
}
-characters.tex.accentmapping = accentmapping
+texcharacters.accentmapping = accentmapping
local accent_map = allocate { -- incomplete
['~'] = "̃" , -- ̃ Ẽ
@@ -150,7 +251,7 @@ local accent_map = allocate { -- incomplete
-- ̰ Ḛ
}
--- local accents = table.concat(table.keys(accentmapping)) -- was _map
+-- local accents = concat(table.keys(accentmapping)) -- was _map
local function remap_accent(a,c,braced)
local m = accentmapping[a]
@@ -171,86 +272,505 @@ local function remap_accent(a,c,braced)
end
end
-local command_map = allocate {
- ["i"] = "ı",
- ["l"] = "ł",
- ["ss"] = "ß",
- ["ae"] = "æ",
- ["AE"] = "Æ",
- ["oe"] = "œ",
- ["OE"] = "Œ",
- ["o"] = "ø",
- ["O"] = "Ø",
- ["aa"] = "å",
- ["AA"] = "Å",
+local commandmapping = allocate {
+ ["aa"] = "å", ["AA"] = "Å",
+ ["ae"] = "æ", ["AE"] = "Æ",
+ ["cc"] = "ç", ["CC"] = "Ç",
+ ["i"] = "ı", ["j"] = "ȷ",
+ ["ij"] = "ij", ["IJ"] = "IJ",
+ ["l"] = "ł", ["L"] = "Ł",
+ ["o"] = "ø", ["O"] = "Ø",
+ ["oe"] = "œ", ["OE"] = "Œ",
+ ["sz"] = "ß", ["SZ"] = "SZ", ["ss"] = "ß", ["SS"] = "ß",
}
--- no need for U here
+texcharacters.commandmapping = commandmapping
-local achar = R("az","AZ") + P("ı") + P("\\i")
+local ligaturemapping = allocate {
+ ["''"] = "”",
+ ["``"] = "“",
+ ["--"] = "–",
+ ["---"] = "—",
+}
-local spaces = P(" ")^0
-local no_l = P("{") / ""
-local no_r = P("}") / ""
-local no_b = P('\\') / ""
+-- local achar = R("az","AZ") + P("ı") + P("\\i")
+--
+-- local spaces = P(" ")^0
+-- local no_l = P("{") / ""
+-- local no_r = P("}") / ""
+-- local no_b = P('\\') / ""
+--
+-- local lUr = P("{") * C(achar) * P("}")
+--
+-- local accents_1 = [["'.=^`~]]
+-- local accents_2 = [[Hckruv]]
+--
+-- local accent = P('\\') * (
+-- C(S(accents_1)) * (lUr * Cc(true) + C(achar) * Cc(false)) + -- we need achar for ı etc, could be sped up
+-- C(S(accents_2)) * lUr * Cc(true)
+-- ) / remap_accent
+--
+-- local csname = P('\\') * C(R("az","AZ")^1)
+--
+-- local command = (
+-- csname +
+-- P("{") * csname * spaces * P("}")
+-- ) / commandmapping -- remap_commands
+--
+-- local both_1 = Cs { "run",
+-- accent = accent,
+-- command = command,
+-- run = (V("accent") + no_l * V("accent") * no_r + V("command") + P(1))^0,
+-- }
+--
+-- local both_2 = Cs { "run",
+-- accent = accent,
+-- command = command,
+-- run = (V("accent") + V("command") + no_l * ( V("accent") + V("command") ) * no_r + P(1))^0,
+-- }
+--
+-- function texcharacters.toutf(str,strip)
+-- if not find(str,"\\") then
+-- return str
+-- elseif strip then
+-- return lpegmatch(both_1,str)
+-- else
+-- return lpegmatch(both_2,str)
+-- end
+-- end
-local lUr = P("{") * C(achar) * P("}")
+local untex
-local accents_1 = [["'.=^`~]]
-local accents_2 = [[Hckruv]]
+local function toutfpattern()
+ if not untex then
+ local hash = { }
+ for k, v in next, accentmapping do
+ for kk, vv in next, v do
+ if (k >= "a" and k <= "z") or (k >= "A" and k <= "Z") then
+ hash[ "\\"..k.." "..kk ] = vv
+ hash["{\\"..k.." "..kk.."}"] = vv
+ else
+ hash["\\" ..k ..kk ] = vv
+ hash["{\\"..k ..kk.."}"] = vv
+ end
+ hash["\\" ..k.."{"..kk.."}" ] = vv
+ hash["{\\"..k.."{"..kk.."}}"] = vv
+ end
+ end
+ for k, v in next, commandmapping do
+ hash["\\"..k.." "] = v
+ hash["{\\"..k.."}"] = v
+ hash["{\\"..k.." }"] = v
+ end
+ for k, v in next, ligaturemapping do
+ hash[k] = v
+ end
+ untex = utfchartabletopattern(hash) / hash
+ end
+ return untex
+end
-local accent = P('\\') * (
- C(S(accents_1)) * (lUr * Cc(true) + C(achar) * Cc(false)) + -- we need achar for ı etc, could be sped up
- C(S(accents_2)) * lUr * Cc(true)
-) / remap_accent
+texcharacters.toutfpattern = toutfpattern
-local csname = P('\\') * C(R("az","AZ")^1)
+local pattern = nil
-local command = (
- csname +
- P("{") * csname * spaces * P("}")
-) / command_map -- remap_commands
+local function prepare()
+ pattern = Cs((toutfpattern() + P(1))^0)
+ return pattern
+end
-local both_1 = Cs { "run",
- accent = accent,
- command = command,
- run = (V("accent") + no_l * V("accent") * no_r + V("command") + P(1))^0,
-}
+function texcharacters.toutf(str,strip)
+ if str == "" then
+ return str
+ elseif not find(str,"\\") then
+ return str
+ -- elseif strip then
+ else
+ return lpegmatch(pattern or prepare(),str)
+ end
+end
-local both_2 = Cs { "run",
- accent = accent,
- command = command,
- run = (V("accent") + V("command") + no_l * ( V("accent") + V("command") ) * no_r + P(1))^0,
-}
+-- print(texcharacters.toutf([[\~{Z}]],true))
+-- print(texcharacters.toutf([[\'\i]],true))
+-- print(texcharacters.toutf([[\'{\i}]],true))
+-- print(texcharacters.toutf([[\"{e}]],true))
+-- print(texcharacters.toutf([[\" {e}]],true))
+-- print(texcharacters.toutf([[{\"{e}}]],true))
+-- print(texcharacters.toutf([[{\" {e}}]],true))
+-- print(texcharacters.toutf([[{\l}]],true))
+-- print(texcharacters.toutf([[{\l }]],true))
+-- print(texcharacters.toutf([[\v{r}]],true))
+-- print(texcharacters.toutf([[fo{\"o}{\ss}ar]],true))
+-- print(texcharacters.toutf([[H{\'a}n Th\^e\llap{\raise 0.5ex\hbox{\'{\relax}}} Th{\'a}nh]],true))
-function characters.tex.toutf(str,strip)
- if not find(str,"\\") then
- return str
- elseif strip then
- return lpegmatch(both_1,str)
+function texcharacters.safechar(n) -- was characters.safechar
+ local c = data[n]
+ if c and c.contextname then
+ return "\\" .. c.contextname
else
- return lpegmatch(both_2,str)
+ return utfchar(n)
end
end
--- print(characters.tex.toutf([[\~{Z}]],true))
--- print(characters.tex.toutf([[\'\i]],true))
--- print(characters.tex.toutf([[\'{\i}]],true))
--- print(characters.tex.toutf([[\"{e}]],true))
--- print(characters.tex.toutf([[\" {e}]],true))
--- print(characters.tex.toutf([[{\"{e}}]],true))
--- print(characters.tex.toutf([[{\" {e}}]],true))
--- print(characters.tex.toutf([[{\l}]],true))
--- print(characters.tex.toutf([[{\l }]],true))
--- print(characters.tex.toutf([[\v{r}]],true))
--- print(characters.tex.toutf([[fo{\"o}{\ss}ar]],true))
--- print(characters.tex.toutf([[H{\'a}n Th\^e\llap{\raise 0.5ex\hbox{\'{\relax}}} Th{\'a}nh]],true))
-
-function characters.tex.defineaccents()
+if not context or not commands then
+ -- used in e.g. mtx-bibtex
+ return
+end
+
+-- all kind of initializations
+
+local tex = tex
+local texsetlccode = tex.setlccode
+local texsetuccode = tex.setuccode
+local texsetsfcode = tex.setsfcode
+local texsetcatcode = tex.setcatcode
+
+local contextsprint = context.sprint
+local ctxcatcodes = catcodes.numbers.ctxcatcodes
+
+function texcharacters.defineaccents()
+ local ctx_dodefineaccentcommand = context.dodefineaccentcommand
+ local ctx_dodefineaccent = context.dodefineaccent
+ local ctx_dodefinecommand = context.dodefinecommand
for accent, group in next, accentmapping do
- context.dodefineaccentcommand(accent)
+ ctx_dodefineaccentcommand(accent)
for character, mapping in next, group do
- context.dodefineaccent(accent,character,mapping)
+ ctx_dodefineaccent(accent,character,mapping)
end
end
+ for command, mapping in next, commandmapping do
+ ctx_dodefinecommand(command,mapping)
+ end
end
+
+implement { -- a waste of scanner but consistent
+ name = "defineaccents",
+ actions = texcharacters.defineaccents
+}
+
+--[[ldx--
+<p>Instead of using a <l n='tex'/> file to define the named glyphs, we
+use the table. After all, we have this information available anyway.</p>
+--ldx]]--
+
+function commands.makeactive(n,name) -- not used
+ contextsprint(ctxcatcodes,format("\\catcode%s=13\\unexpanded\\def %s{\\%s}",n,utfchar(n),name))
+ -- context("\\catcode%s=13\\unexpanded\\def %s{\\%s}",n,utfchar(n),name)
+end
+
+implement {
+ name = "utfchar",
+ actions = { utfchar, contextsprint },
+ arguments = "integer"
+}
+
+implement {
+ name = "safechar",
+ actions = { texcharacters.safechar, contextsprint },
+ arguments = "integer"
+}
+
+implement {
+ name = "uchar",
+ arguments = { "integer", "integer" },
+ actions = function(h,l)
+ context(utfchar(h*256+l))
+ end
+}
+
+tex.uprint = commands.utfchar
+
+-- in contect we don't use lc and uc codes (in fact in luatex we should have a hf code)
+-- so at some point we might drop this
+
+local forbidden = tohash { -- at least now
+ 0x00A0,
+ 0x2000, 0x2001, 0x2002, 0x2003, 0x2004, 0x2005, 0x2006, 0x2007, 0x2008, 0x2009, 0x200A, 0x200B, 0x200C, 0x200D,
+ 0x202F,
+ 0x205F,
+ -- 0xFEFF,
+}
+
+local csletters = characters.csletters -- also a signal that we have initialized
+local activated = { }
+local sfmode = "unset" -- unset, traditional, normal
+local block_too = false
+
+directives.register("characters.blockstoo",function(v) block_too = v end)
+
+-- If this is something that is not documentwide and used a lot, then we
+-- need a more clever approach (trivial but not now).
+
+local function setuppersfcodes(v,n)
+ if sfstate ~= "unset" then
+ report_defining("setting uppercase sf codes to %a",n)
+ for u, chr in next, data do
+ if chr.category == "lu" then
+ texsetsfcode(u,n)
+ end
+ end
+ end
+ sfstate = v
+end
+
+directives.register("characters.spaceafteruppercase",function(v)
+ if v == "traditional" then
+ setuppersfcodes(v,999)
+ elseif v == "normal" then
+ setuppersfcodes(v,1000)
+ end
+end)
+
+if not csletters then
+
+ csletters = allocate()
+ characters.csletters = csletters
+
+ report_defining("setting up character related codes and commands")
+
+ if sfstate == "unset" then
+ sfstate = "traditional"
+ end
+
+ local traditional = sfstate == "traditional"
+
+ for u, chr in next, data do -- will move up
+ local fallback = chr.fallback
+ if fallback then
+ contextsprint("{\\catcode",u,"=13\\unexpanded\\gdef ",utfchar(u),"{\\checkedchar{",u,"}{",fallback,"}}}")
+ activated[#activated+1] = u
+ else
+ local contextname = chr.contextname
+ local category = chr.category
+ local isletter = is_letter[category]
+ if contextname then
+ if is_character[category] then
+ if chr.unicodeslot < 128 then
+ if isletter then
+ -- setmacro
+ local c = utfchar(u)
+ contextsprint(ctxcatcodes,format("\\def\\%s{%s}",contextname,c)) -- has no s
+ csletters[c] = u
+ else
+ -- setchar
+ contextsprint(ctxcatcodes,format("\\chardef\\%s=%s",contextname,u)) -- has no s
+ end
+ else
+ -- setmacro
+ local c = utfchar(u)
+ contextsprint(ctxcatcodes,format("\\def\\%s{%s}",contextname,c)) -- has no s
+ if isletter and u >= 32 and u <= 65536 then
+ csletters[c] = u
+ end
+ end
+ --
+ if isletter then
+ local lc, uc = chr.lccode, chr.uccode
+ if not lc then
+ chr.lccode, lc = u, u
+ elseif type(lc) == "table" then
+ lc = u
+ end
+ if not uc then
+ chr.uccode, uc = u, u
+ elseif type(uc) == "table" then
+ uc = u
+ end
+ texsetlccode(u,lc,uc)
+ if traditional and category == "lu" then
+ texsetsfcode(code,999)
+ end
+ end
+ --
+ elseif is_command[category] and not forbidden[u] then
+ -- set
+ contextsprint("{\\catcode",u,"=13\\unexpanded\\gdef ",utfchar(u),"{\\"..contextname,"}}")
+ activated[#activated+1] = u
+ elseif is_mark[category] then
+ texsetlccode(u,u,u) -- for hyphenation
+ end
+ elseif isletter and u >= 32 and u <= 65536 then
+ csletters[utfchar(u)] = u
+ --
+ local lc, uc = chr.lccode, chr.uccode
+ if not lc then
+ chr.lccode, lc = u, u
+ elseif type(lc) == "table" then
+ lc = u
+ end
+ if not uc then
+ chr.uccode, uc = u, u
+ elseif type(uc) == "table" then
+ uc = u
+ end
+ texsetlccode(u,lc,uc)
+ if traditional and category == "lu" then
+ texsetsfcode(code,999)
+ end
+ --
+ elseif is_mark[category] then
+ --
+ texsetlccode(u,u,u) -- for hyphenation
+ --
+ end
+ end
+ end
+
+ if blocks_too then
+ -- this slows down format generation by over 10 percent
+ for k, v in next, blocks do
+ if v.catcode == "letter" then
+ for u=v.first,v.last do
+ csletters[utfchar(u)] = u
+ --
+ -- texsetlccode(u,u,u) -- self self
+ --
+ end
+ end
+ end
+ end
+
+ if storage then
+ storage.register("characters/csletters", csletters, "characters.csletters")
+ end
+
+else
+ mark(csletters)
+
+end
+
+lpegpatterns.csletter = utfchartabletopattern(csletters)
+
+-- todo: get rid of activated
+-- todo: move first loop out ,merge with above
+
+function characters.setlettercatcodes(cct)
+ if trace_defining then
+ report_defining("assigning letter catcodes to catcode table %a",cct)
+ end
+ local saved = tex.catcodetable
+ tex.catcodetable = cct
+ texsetcatcode(0x200C,11) -- non-joiner
+ texsetcatcode(0x200D,11) -- joiner
+ for c, u in next, csletters do
+ texsetcatcode(u,11)
+ end
+ -- for u, chr in next, data do
+ -- if not chr.fallback and is_letter[chr.category] and u >= 32 and u <= 65536 then
+ -- texsetcatcode(u,11)
+ -- end
+ -- local range = chr.range
+ -- if range then
+ -- for i=1,range.first,range.last do -- tricky as not all are letters
+ -- texsetcatcode(i,11)
+ -- end
+ -- end
+ -- end
+ -- for k, v in next, blocks do
+ -- if v.catcode == "letter" then
+ -- for u=v.first,v.last do
+ -- texsetcatcode(u,11)
+ -- end
+ -- end
+ -- end
+ tex.catcodetable = saved
+end
+
+function characters.setactivecatcodes(cct)
+ local saved = tex.catcodetable
+ tex.catcodetable = cct
+ for i=1,#activated do
+ local u = activated[i]
+ texsetcatcode(u,13)
+ if trace_defining then
+ report_defining("character %U (%s) is active in set %a",u,data[u].description,cct)
+ end
+ end
+ tex.catcodetable = saved
+end
+
+--[[ldx--
+<p>Setting the lccodes is also done in a loop over the data table.</p>
+--ldx]]--
+
+-- function characters.setcodes() -- we could loop over csletters
+-- if trace_defining then
+-- report_defining("defining lc and uc codes")
+-- end
+-- local traditional = sfstate == "traditional" or sfstate == "unset"
+-- for code, chr in next, data do
+-- local cc = chr.category
+-- if is_letter[cc] then
+-- local range = chr.range
+-- if range then
+-- for i=range.first,range.last do
+-- texsetlccode(i,i,i) -- self self
+-- end
+-- else
+-- local lc, uc = chr.lccode, chr.uccode
+-- if not lc then
+-- chr.lccode, lc = code, code
+-- elseif type(lc) == "table" then
+-- lc = code
+-- end
+-- if not uc then
+-- chr.uccode, uc = code, code
+-- elseif type(uc) == "table" then
+-- uc = code
+-- end
+-- texsetlccode(code,lc,uc)
+-- if traditional and cc == "lu" then
+-- texsetsfcode(code,999)
+-- end
+-- end
+-- elseif is_mark[cc] then
+-- texsetlccode(code,code,code) -- for hyphenation
+-- end
+-- end
+-- if traditional then
+-- sfstate = "traditional"
+-- end
+-- end
+
+-- tex
+
+implement {
+ name = "chardescription",
+ arguments = "integer",
+ actions = function(slot)
+ local d = data[slot]
+ if d then
+ context(d.description)
+ end
+ end,
+}
+
+-- xml
+
+characters.activeoffset = 0x10000 -- there will be remapped in that byte range
+
+function commands.remapentity(chr,slot) -- not used
+ contextsprint(format("{\\catcode%s=13\\xdef%s{\\string%s}}",slot,utfchar(slot),chr))
+end
+
+-- xml.entities = xml.entities or { }
+--
+-- storage.register("xml/entities",xml.entities,"xml.entities") -- this will move to lxml
+--
+-- function characters.setmkiventities()
+-- local entities = xml.entities
+-- entities.lt = "<"
+-- entities.amp = "&"
+-- entities.gt = ">"
+-- end
+--
+-- function characters.setmkiientities()
+-- local entities = xml.entities
+-- entities.lt = utfchar(characters.activeoffset + utfbyte("<"))
+-- entities.amp = utfchar(characters.activeoffset + utfbyte("&"))
+-- entities.gt = utfchar(characters.activeoffset + utfbyte(">"))
+-- end
+
+implement { name = "setlettercatcodes", scope = "private", actions = characters.setlettercatcodes, arguments = "integer" }
+implement { name = "setactivecatcodes", scope = "private", actions = characters.setactivecatcodes, arguments = "integer" }
+--------- { name = "setcharactercodes", scope = "private", actions = characters.setcodes }
diff --git a/tex/context/base/char-utf.lua b/tex/context/base/char-utf.lua
index 95ed48279..381602ede 100644
--- a/tex/context/base/char-utf.lua
+++ b/tex/context/base/char-utf.lua
@@ -6,11 +6,6 @@ if not modules then modules = { } end modules ['char-utf'] = {
license = "see context related readme files"
}
--- todo: trackers
--- todo: no longer special characters (high) here, only needed in special cases and
--- these don't go through this file anyway
--- graphemes: basic symbols
-
--[[ldx--
<p>When a sequence of <l n='utf'/> characters enters the application, it may be
neccessary to collapse subsequences into their composed variant.</p>
@@ -24,33 +19,51 @@ of output (for instance <l n='pdf'/>).</p>
over a string.</p>
--ldx]]--
-local concat, gmatch, gsub, find = table.concat, string.gmatch, string.gsub, string.find
+local gsub, find = string.gsub, string.find
+local concat, sortedhash, keys, sort = table.concat, table.sortedhash, table.keys, table.sort
local utfchar, utfbyte, utfcharacters, utfvalues = utf.char, utf.byte, utf.characters, utf.values
-local allocate = utilities.storage.allocate
-local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local P, Cs, Cmt, Ct = lpeg.P, lpeg.Cs, lpeg.Cmt, lpeg.Ct
+
+if not characters then require("char-def") end
+if not characters.blocks then require("char-ini") end
+
+local lpegmatch = lpeg.match
+local lpegpatterns = lpeg.patterns
+local p_utf8character = lpegpatterns.utf8character
+local p_utf8byte = lpegpatterns.utf8byte
+local utfchartabletopattern = lpeg.utfchartabletopattern
+
+local formatters = string.formatters
+
+local allocate = utilities.storage.allocate or function() return { } end
+
+local charfromnumber = characters.fromnumber
+
+characters = characters or { }
+local characters = characters
-local charfromnumber = characters.fromnumber
+local graphemes = allocate()
+characters.graphemes = graphemes
-characters = characters or { }
-local characters = characters
+local collapsed = allocate()
+characters.collapsed = collapsed
-characters.graphemes = allocate()
-local graphemes = characters.graphemes
+local combined = allocate()
+characters.combined = combined
-characters.combined = allocate()
-local combined = characters.combined
+local decomposed = allocate()
+characters.decomposed = decomposed
-characters.decomposed = allocate()
-local decomposed = characters.decomposed
+local mathpairs = allocate()
+characters.mathpairs = mathpairs
-characters.mathpairs = allocate()
-local mathpairs = characters.mathpairs
+local filters = allocate()
+characters.filters = filters
-characters.filters = allocate()
-local filters = characters.filters
+local utffilters = { }
+characters.filters.utf = utffilters
-filters.utf = filters.utf or { }
-local utffilters = characters.filters.utf
+local data = characters.data
-- is characters.combined cached?
@@ -59,7 +72,9 @@ local utffilters = characters.filters.utf
to depend on collapsing.</p>
--ldx]]--
--- for the moment, will be entries in char-def.lua
+-- for the moment, will be entries in char-def.lua .. this is just a subset that for
+-- typographic (font) reasons we want to have split ... if we decompose all, we get
+-- problems with fonts
local decomposed = allocate {
["IJ"] = "IJ",
@@ -81,24 +96,97 @@ local decomposed = allocate {
characters.decomposed = decomposed
-local function initialize() -- maybe only 'mn'
+-- local function initialize() -- maybe only 'mn'
+-- local data = characters.data
+-- for unicode, v in next, data do
+-- -- using vs and first testing for length is faster (.02->.01 s)
+-- local vs = v.specials
+-- if vs and #vs == 3 then
+-- local vc = vs[1]
+-- if vc == "char" then
+-- local one, two = vs[2], vs[3]
+-- if data[two].category == "mn" then
+-- local cgf = combined[one]
+-- if not cgf then
+-- cgf = { [two] = unicode }
+-- combined[one] = cgf
+-- else
+-- cgf[two] = unicode
+-- end
+-- end
+-- local first, second, combination = utfchar(one), utfchar(two), utfchar(unicode)
+-- local cgf = graphemes[first]
+-- if not cgf then
+-- cgf = { [second] = combination }
+-- graphemes[first] = cgf
+-- else
+-- cgf[second] = combination
+-- end
+-- if v.mathclass or v.mathspec then
+-- local mps = mathpairs[two]
+-- if not mps then
+-- mps = { [one] = unicode }
+-- mathpairs[two] = mps
+-- else
+-- mps[one] = unicode -- here unicode
+-- end
+-- local mps = mathpairs[second]
+-- if not mps then
+-- mps = { [first] = combination }
+-- mathpairs[second] = mps
+-- else
+-- mps[first] = combination
+-- end
+-- end
+-- -- elseif vc == "compat" then
+-- -- else
+-- -- local description = v.description
+-- -- if find(description,"LIGATURE") then
+-- -- if vs then
+-- -- local t = { }
+-- -- for i=2,#vs do
+-- -- t[#t+1] = utfchar(vs[i])
+-- -- end
+-- -- decomposed[utfchar(unicode)] = concat(t)
+-- -- else
+-- -- local vs = v.shcode
+-- -- if vs then
+-- -- local t = { }
+-- -- for i=1,#vs do
+-- -- t[i] = utfchar(vs[i])
+-- -- end
+-- -- decomposed[utfchar(unicode)] = concat(t)
+-- -- end
+-- -- end
+-- -- end
+-- end
+-- end
+-- end
+-- initialize = false
+-- characters.initialize = function() end -- when used outside tex
+-- end
+
+local function initialize()
local data = characters.data
+ local function backtrack(v,last,target)
+ local vs = v.specials
+ if vs and #vs == 3 and vs[1] == "char" then
+ local one, two = vs[2], vs[3]
+ local first, second = utfchar(one), utfchar(two) .. last
+ collapsed[first..second] = target
+ backtrack(data[one],second,target)
+ end
+ end
for unicode, v in next, data do
- -- using vs and first testing for length is faster (.02->.01 s)
local vs = v.specials
- local vc = vs and #vs == 3 and vs[1]
- if vc == "char" then
+ if vs and #vs == 3 and vs[1] == "char" then
+ --
local one, two = vs[2], vs[3]
- if data[two].category == "mn" then
- local cgf = combined[one]
- if not cgf then
- cgf = { [two] = unicode }
- combined[one] = cgf
- else
- cgf[two] = unicode
- end
- end
local first, second, combination = utfchar(one), utfchar(two), utfchar(unicode)
+ --
+ collapsed[first..second] = combination
+ backtrack(data[one],second,combination)
+ -- sort of obsolete:
local cgf = graphemes[first]
if not cgf then
cgf = { [second] = combination }
@@ -106,6 +194,7 @@ local function initialize() -- maybe only 'mn'
else
cgf[second] = combination
end
+ --
if v.mathclass or v.mathspec then
local mps = mathpairs[two]
if not mps then
@@ -122,35 +211,254 @@ local function initialize() -- maybe only 'mn'
mps[first] = combination
end
end
- -- elseif vc == "compat" then
- -- else
- -- local description = v.description
- -- if find(description,"LIGATURE") then
- -- if vs then
- -- local t = { }
- -- for i=2,#vs do
- -- t[#t+1] = utfchar(vs[i])
- -- end
- -- decomposed[utfchar(unicode)] = concat(t)
- -- else
- -- local vs = v.shcode
- -- if vs then
- -- local t = { }
- -- for i=1,#vs do
- -- t[i] = utfchar(vs[i])
- -- end
- -- decomposed[utfchar(unicode)] = concat(t)
- -- end
- -- end
- -- end
end
end
initialize = false
- characters.initialize = function() end -- when used outside tex
+ characters.initialize = function() end
end
characters.initialize = initialize
+--[[ldx--
+<p>The next variant has lazy token collecting, on a 140 page mk.tex this saves
+about .25 seconds, which is understandable because we have no graphemes and
+not collecting tokens is not only faster but also saves garbage collecting.
+</p>
+--ldx]]--
+
+local skippable = { }
+local filesuffix = file.suffix
+
+function utffilters.setskippable(suffix,value)
+ if value == nil then
+ value = true
+ end
+ if type(suffix) == "table" then
+ for i=1,#suffix do
+ skippable[suffix[i]] = value
+ end
+ else
+ skippable[suffix] = value
+ end
+end
+
+-- function utffilters.collapse(str,filename) -- we can make high a seperate pass (never needed with collapse)
+-- if skippable[filesuffix(filename)] then
+-- return str
+-- -- elseif find(filename,"^virtual://") then
+-- -- return str
+-- -- else
+-- -- -- print("\n"..filename)
+-- end
+-- if str and str ~= "" then
+-- local nstr = #str
+-- if nstr > 1 then
+-- if initialize then -- saves a call
+-- initialize()
+-- end
+-- local tokens, t, first, done, n = { }, 0, false, false, 0
+-- for second in utfcharacters(str) do
+-- if done then
+-- if first then
+-- if second == " " then
+-- t = t + 1
+-- tokens[t] = first
+-- first = second
+-- else
+-- -- local crs = high[second]
+-- -- if crs then
+-- -- t = t + 1
+-- -- tokens[t] = first
+-- -- first = crs
+-- -- else
+-- local cgf = graphemes[first]
+-- if cgf and cgf[second] then
+-- first = cgf[second]
+-- else
+-- t = t + 1
+-- tokens[t] = first
+-- first = second
+-- end
+-- -- end
+-- end
+-- elseif second == " " then
+-- first = second
+-- else
+-- -- local crs = high[second]
+-- -- if crs then
+-- -- first = crs
+-- -- else
+-- first = second
+-- -- end
+-- end
+-- elseif second == " " then
+-- first = nil
+-- n = n + 1
+-- else
+-- -- local crs = high[second]
+-- -- if crs then
+-- -- for s in utfcharacters(str) do
+-- -- if n == 1 then
+-- -- break
+-- -- else
+-- -- t = t + 1
+-- -- tokens[t] = s
+-- -- n = n - 1
+-- -- end
+-- -- end
+-- -- if first then
+-- -- t = t + 1
+-- -- tokens[t] = first
+-- -- end
+-- -- first = crs
+-- -- done = true
+-- -- else
+-- local cgf = graphemes[first]
+-- if cgf and cgf[second] then
+-- for s in utfcharacters(str) do
+-- if n == 1 then
+-- break
+-- else
+-- t = t + 1
+-- tokens[t] = s
+-- n = n - 1
+-- end
+-- end
+-- first = cgf[second]
+-- done = true
+-- else
+-- first = second
+-- n = n + 1
+-- end
+-- -- end
+-- end
+-- end
+-- if done then
+-- if first then
+-- t = t + 1
+-- tokens[t] = first
+-- end
+-- return concat(tokens) -- seldom called
+-- end
+-- elseif nstr > 0 then
+-- return high[str] or str -- this will go from here
+-- end
+-- end
+-- return str
+-- end
+
+-- this is about twice as fast
+
+local p_collapse = nil -- so we can reset if needed
+
+local function prepare()
+ if initialize then
+ initialize()
+ end
+ local tree = utfchartabletopattern(collapsed)
+ p_collapse = Cs((tree/collapsed + p_utf8character)^0 * P(-1)) -- the P(1) is needed in order to accept non utf
+end
+
+function utffilters.collapse(str,filename)
+ if not p_collapse then
+ prepare()
+ end
+ if not str or #str == "" or #str == 1 then
+ return str
+ elseif filename and skippable[filesuffix(filename)] then -- we could hash the collapsables or do a quicker test
+ return str
+ else
+ return lpegmatch(p_collapse,str) or str
+ end
+end
+
+-- function utffilters.decompose(str)
+-- if str and str ~= "" then
+-- local nstr = #str
+-- if nstr > 1 then
+-- -- if initialize then -- saves a call
+-- -- initialize()
+-- -- end
+-- local tokens, t, done, n = { }, 0, false, 0
+-- for s in utfcharacters(str) do
+-- local dec = decomposed[s]
+-- if dec then
+-- if not done then
+-- if n > 0 then
+-- for s in utfcharacters(str) do
+-- if n == 0 then
+-- break
+-- else
+-- t = t + 1
+-- tokens[t] = s
+-- n = n - 1
+-- end
+-- end
+-- end
+-- done = true
+-- end
+-- t = t + 1
+-- tokens[t] = dec
+-- elseif done then
+-- t = t + 1
+-- tokens[t] = s
+-- else
+-- n = n + 1
+-- end
+-- end
+-- if done then
+-- return concat(tokens) -- seldom called
+-- end
+-- end
+-- end
+-- return str
+-- end
+
+-- local replacer = nil
+-- local finder = nil
+--
+-- function utffilters.decompose(str) -- 3 to 4 times faster than the above
+-- if not replacer then
+-- if initialize then
+-- initialize()
+-- end
+-- local tree = utfchartabletopattern(decomposed)
+-- finder = lpeg.finder(tree,false,true)
+-- replacer = lpeg.replacer(tree,decomposed,false,true)
+-- end
+-- if str and str ~= "" and #str > 1 and lpegmatch(finder,str) then
+-- return lpegmatch(replacer,str)
+-- end
+-- return str
+-- end
+
+local p_decompose = nil
+
+local function prepare()
+ if initialize then
+ initialize()
+ end
+ local tree = utfchartabletopattern(decomposed)
+ p_decompose = Cs((tree/decomposed + p_utf8character)^0 * P(-1))
+end
+
+function utffilters.decompose(str,filename) -- 3 to 4 times faster than the above
+ if not p_decompose then
+ prepare()
+ end
+ if str and str ~= "" and #str > 1 then
+ return lpegmatch(p_decompose,str)
+ end
+ if not str or #str == "" or #str < 2 then
+ return str
+ elseif filename and skippable[filesuffix(filename)] then
+ return str
+ else
+ return lpegmatch(p_decompose,str) or str
+ end
+ return str
+end
+
-- utffilters.addgrapheme(utfchar(318),'l','\string~')
-- utffilters.addgrapheme('c','a','b')
@@ -163,265 +471,178 @@ function utffilters.addgrapheme(result,first,second) -- can be U+ 0x string or u
else
graphemes[first][second] = result
end
+ local pair = first .. second
+ if not composed[pair] then
+ composed[pair] = result
+ p_composed = nil
+ end
end
---[[ldx--
-<p>In order to deal with 8-bit output, we need to find a way to go from <l n='utf'/> to
-8-bit. This is handled in the <l n='luatex'/> engine itself.</p>
-
-<p>This leaves us problems with characters that are specific to <l n='tex'/> like
-<type>{}</type>, <type>$</type> and alike. We can remap some chars that tex input files
-are sensitive for to a private area (while writing to a utility file) and revert then
-to their original slot when we read in such a file. Instead of reverting, we can (when
-we resolve characters to glyphs) map them to their right glyph there. For this purpose
-we can use the private planes 0x0F0000 and 0x100000.</p>
---ldx]]--
-
-local low = allocate()
-local high = allocate()
-local escapes = allocate()
-local special = "~#$%^&_{}\\|" -- "~#$%{}\\|"
-
-local private = {
- low = low,
- high = high,
- escapes = escapes,
-}
-
-utffilters.private = private
-
-local tohigh = lpeg.replacer(low) -- frozen, only for basic tex
-local tolow = lpeg.replacer(high) -- frozen, only for basic tex
+if interfaces then -- eventually this goes to char-ctx.lua
-lpegpatterns.utftohigh = tohigh
-lpegpatterns.utftolow = tolow
+ interfaces.implement {
+ name = "addgrapheme",
+ actions = utffilters.addgrapheme,
+ arguments = { "string", "string", "string" }
+ }
-function utffilters.harden(str)
- return lpegmatch(tohigh,str)
end
-function utffilters.soften(str)
- return lpegmatch(tolow,str)
+-- --
+
+local p_reorder = nil
+
+-- local sorter = function(a,b) return b[2] < a[2] end
+--
+-- local function swapper(s,p,t)
+-- local old = { }
+-- for i=1,#t do
+-- old[i] = t[i][1]
+-- end
+-- old = concat(old)
+-- sort(t,sorter)
+-- for i=1,#t do
+-- t[i] = t[i][1]
+-- end
+-- local new = concat(t)
+-- if old ~= new then
+-- print("reordered",old,"->",new)
+-- end
+-- return p, new
+-- end
+
+-- -- the next one isnto stable for similar weights
+
+local sorter = function(a,b)
+ return b[2] < a[2]
end
-local function set(ch)
- local cb
- if type(ch) == "number" then
- cb, ch = ch, utfchar(ch)
- else
- cb = utfbyte(ch)
- end
- if cb < 256 then
- escapes[ch] = "\\" .. ch
- low[ch] = utfchar(0x0F0000 + cb)
- if ch == "%" then
- ch = "%%" -- nasty, but we need this as in replacements (also in lpeg) % is interpreted
- end
- high[utfchar(0x0F0000 + cb)] = ch
+local function swapper(s,p,t)
+ sort(t,sorter)
+ for i=1,#t do
+ t[i] = t[i][1]
end
+ return p, concat(t)
end
-private.set = set
-
--- function private.escape (str) return gsub(str,"(.)", escapes) end
--- function private.replace(str) return utfgsub(str,"(.)", low ) end
--- function private.revert (str) return utfgsub(str,"(.)", high ) end
-
-private.escape = utf.remapper(escapes)
-private.replace = utf.remapper(low)
-private.revert = utf.remapper(high)
-
-for ch in gmatch(special,".") do set(ch) end
-
---[[ldx--
-<p>We get a more efficient variant of this when we integrate
-replacements in collapser. This more or less renders the previous
-private code redundant. The following code is equivalent but the
-first snippet uses the relocated dollars.</p>
-
-<typing>
-[󰀤x󰀤] [$x$]
-</typing>
-
-<p>The next variant has lazy token collecting, on a 140 page mk.tex this saves
-about .25 seconds, which is understandable because we have no graphemes and
-not collecting tokens is not only faster but also saves garbage collecting.
-</p>
---ldx]]--
-
-local skippable = table.tohash { "mkiv", "mkvi" }
-local filesuffix = file.suffix
+-- -- the next one keeps similar weights in the original order
+--
+-- local sorter = function(a,b)
+-- local b2, a2 = b[2], a[2]
+-- if a2 == b2 then
+-- return b[3] > a[3]
+-- else
+-- return b2 < a2
+-- end
+-- end
+--
+-- local function swapper(s,p,t)
+-- for i=1,#t do
+-- t[i][3] = i
+-- end
+-- sort(t,sorter)
+-- for i=1,#t do
+-- t[i] = t[i][1]
+-- end
+-- return p, concat(t)
+-- end
+
+-- at some point exceptions will become an option, for now it's an experiment
+-- to overcome bugs (that have become features) in unicode .. or we might decide
+-- for an extra ordering key in char-def that takes precedence over combining
+
+local exceptions = {
+ -- frozen unicode bug
+ ["َّ"] = "َّ", -- U+64E .. U+651 => U+651 .. U+64E
+}
-function utffilters.collapse(str,filename) -- we can make high a seperate pass (never needed with collapse)
- if skippable[filesuffix(filename)] then
- return str
- -- elseif find(filename,"^virtual://") then
- -- return str
- -- else
- -- -- print("\n"..filename)
- end
- if str and str ~= "" then
- local nstr = #str
- if nstr > 1 then
- if initialize then -- saves a call
- initialize()
- end
- local tokens, t, first, done, n = { }, 0, false, false, 0
- for second in utfcharacters(str) do
- if done then
- if first then
- if second == " " then
- t = t + 1
- tokens[t] = first
- first = second
- else
- -- local crs = high[second]
- -- if crs then
- -- t = t + 1
- -- tokens[t] = first
- -- first = crs
- -- else
- local cgf = graphemes[first]
- if cgf and cgf[second] then
- first = cgf[second]
- else
- t = t + 1
- tokens[t] = first
- first = second
- end
- -- end
- end
- elseif second == " " then
- first = second
- else
- -- local crs = high[second]
- -- if crs then
- -- first = crs
- -- else
- first = second
- -- end
- end
- elseif second == " " then
- first = nil
- n = n + 1
- else
- -- local crs = high[second]
- -- if crs then
- -- for s in utfcharacters(str) do
- -- if n == 1 then
- -- break
- -- else
- -- t = t + 1
- -- tokens[t] = s
- -- n = n - 1
- -- end
- -- end
- -- if first then
- -- t = t + 1
- -- tokens[t] = first
- -- end
- -- first = crs
- -- done = true
- -- else
- local cgf = graphemes[first]
- if cgf and cgf[second] then
- for s in utfcharacters(str) do
- if n == 1 then
- break
- else
- t = t + 1
- tokens[t] = s
- n = n - 1
- end
- end
- first = cgf[second]
- done = true
- else
- first = second
- n = n + 1
- end
- -- end
- end
- end
- if done then
- if first then
- t = t + 1
- tokens[t] = first
- end
- return concat(tokens) -- seldom called
- end
- elseif nstr > 0 then
- return high[str] or str
+local function prepare()
+ local hash = { }
+ for k, v in sortedhash(characters.data) do
+ local combining = v.combining -- v.ordering or v.combining
+ if combining then
+ hash[utfchar(k)] = { utfchar(k), combining, 0 } -- slot 3 can be used in sort
end
end
- return str
+ local e = utfchartabletopattern(exceptions)
+ local p = utfchartabletopattern(hash)
+ p_reorder = Cs((e/exceptions + Cmt(Ct((p/hash)^2),swapper) + p_utf8character)^0) * P(-1)
end
-function utffilters.decompose(str)
- if str and str ~= "" then
- local nstr = #str
- if nstr > 1 then
- -- if initialize then -- saves a call
- -- initialize()
- -- end
- local tokens, t, done, n = { }, 0, false, 0
- for s in utfcharacters(str) do
- local dec = decomposed[s]
- if dec then
- if not done then
- if n > 0 then
- for s in utfcharacters(str) do
- if n == 1 then
- break
- else
- t = t + 1
- tokens[t] = s
- n = n - 1
- end
- end
- end
- done = true
- end
- t = t + 1
- tokens[t] = dec
- elseif done then
- t = t + 1
- tokens[t] = s
- else
- n = n + 1
- end
- end
- if done then
- return concat(tokens) -- seldom called
- end
- end
+function utffilters.reorder(str,filename)
+ if not p_reorder then
+ prepare()
+ end
+ if not str or #str == "" or #str < 2 then
+ return str
+ elseif filename and skippable[filesuffix(filename)] then
+ return str
+ else
+ return lpegmatch(p_reorder,str) or str
end
return str
end
-local sequencers = utilities.sequencers
-
-if sequencers then
-
- local textfileactions = resolvers.openers.helpers.textfileactions
-
- sequencers.appendaction (textfileactions,"system","characters.filters.utf.collapse")
- sequencers.disableaction(textfileactions,"characters.filters.utf.collapse")
-
- sequencers.appendaction (textfileactions,"system","characters.filters.utf.decompose")
- sequencers.disableaction(textfileactions,"characters.filters.utf.decompose")
-
- function characters.filters.utf.enable()
- sequencers.enableaction(textfileactions,"characters.filters.utf.collapse")
- sequencers.enableaction(textfileactions,"characters.filters.utf.decompose")
+-- local collapse = utffilters.collapse
+-- local decompose = utffilters.decompose
+-- local preprocess = utffilters.preprocess
+--
+-- local c1, c2, c3 = "a", "̂", "̃"
+-- local r2, r3 = "â", "ẫ"
+-- local l1 = "ffl"
+--
+-- local str = c1..c2..c3 .. " " .. c1..c2 .. " " .. l1
+-- local res = r3 .. " " .. r2 .. " " .. "ffl"
+--
+-- local text = io.loaddata("t:/sources/tufte.tex")
+--
+-- local function test(n)
+-- local data = text .. string.rep(str,100) .. text
+-- local okay = text .. string.rep(res,100) .. text
+-- local t = os.clock()
+-- for i=1,10000 do
+-- collapse(data)
+-- decompose(data)
+-- -- preprocess(data)
+-- end
+-- print(os.clock()-t,decompose(collapse(data))==okay,decompose(collapse(str)))
+-- end
+--
+-- test(050)
+-- test(150)
+--
+-- local old = "foo" .. string.char(0xE1) .. "bar"
+-- local new = collapse(old)
+-- print(old,new)
+
+-- local one_old = "فَأَصَّدَّقَ دَّ" local one_new = utffilters.reorder(one_old)
+-- local two_old = "فَأَصَّدَّقَ دَّ" local two_new = utffilters.reorder(two_old)
+--
+-- print(one_old,two_old,one_old==two_old,false)
+-- print(one_new,two_new,one_new==two_new,true)
+--
+-- local test = "foo" .. utf.reverse("ؚ" .. "ً" .. "ٌ" .. "ٍ" .. "َ" .. "ُ" .. "ِ" .. "ّ" .. "ْ" ) .. "bar"
+-- local done = utffilters.reorder(test)
+--
+-- print(test,done,test==done,false)
+
+local f_default = formatters["[%U] "]
+local f_description = formatters["[%s] "]
+
+local function convert(n)
+ local d = data[n]
+ d = d and d.description
+ if d then
+ return f_description(d)
+ else
+ return f_default(n)
end
+end
- directives.register("filters.utf.collapse", function(v)
- sequencers[v and "enableaction" or "disableaction"](textfileactions,"characters.filters.utf.collapse")
- end)
-
- directives.register("filters.utf.decompose", function(v)
- sequencers[v and "enableaction" or "disableaction"](textfileactions,"characters.filters.utf.decompose")
- end)
+local pattern = Cs((p_utf8byte / convert)^1)
+function utffilters.verbose(data)
+ return data and lpegmatch(pattern,data) or ""
end
+
+return characters
diff --git a/tex/context/base/char-utf.mkiv b/tex/context/base/char-utf.mkiv
index 280e7ef6d..fe9f402ef 100644
--- a/tex/context/base/char-utf.mkiv
+++ b/tex/context/base/char-utf.mkiv
@@ -22,24 +22,17 @@
\unprotect
+\registerctxluafile{char-def}{1.001}
+\registerctxluafile{char-ini}{1.001}
\registerctxluafile{char-utf}{1.001}
+\registerctxluafile{char-cjk}{1.001}
%D We enable collapsing (combining characters) by default, but
%D since the source files are rather simple, we postpone the
%D initialization till runtime.
-% resolvers.filters.install('utf',characters.filters.utf.collapse)
-
-% \appendtoks
-% \ctxlua{
-% local textfileactions = resolvers.openers.helpers.textfileactions
-% utilities.sequencers.enableaction(textfileactions,"characters.filters.utf.collapse")
-% utilities.sequencers.enableaction(textfileactions,"characters.filters.utf.decompose")
-% }%
-% \to \everyjob
-
\appendtoks
- \ctxlua{characters.filters.utf.enable()}%
+ \clf_enableutf % not needed when we create a format so we do it now
\to \everyjob
%D The next one influences input parsing.
@@ -49,6 +42,6 @@
%D \stoptyping
\unexpanded\def\definecomposedutf#1 #2 #3 %
- {\ctxlua{characters.filters.utf.addgrapheme("#1","#2","#3")}}
+ {\clf_addgrapheme{#1}{#2}{#3}}
\protect
diff --git a/tex/context/base/chem-ini.lua b/tex/context/base/chem-ini.lua
index 10db1a1e4..f7d10ffa2 100644
--- a/tex/context/base/chem-ini.lua
+++ b/tex/context/base/chem-ini.lua
@@ -32,12 +32,31 @@ function chemistry.molecule(str)
return lpegmatch(moleculeparser,str)
end
-function commands.molecule(str)
- if trace_molecules then
- local rep = lpegmatch(moleculeparser,str)
- report_chemistry("molecule %a becomes %a",str,rep)
- context(rep)
- else
- context(lpegmatch(moleculeparser,str))
- end
-end
+interfaces.implement {
+ name = "molecule",
+ arguments = "string",
+ actions = function(str)
+ if trace_molecules then
+ local rep = lpegmatch(moleculeparser,str)
+ report_chemistry("molecule %a becomes %a",str,rep)
+ context(rep)
+ else
+ context(lpegmatch(moleculeparser,str))
+ end
+ end,
+}
+
+-- interfaces.implement {
+-- name = "molecule",
+-- scope = "private",
+-- action = function()
+-- local str = scanstring()
+-- if trace_molecules then
+-- local rep = lpegmatch(moleculeparser,str)
+-- report_chemistry("molecule %a becomes %a",str,rep)
+-- context(rep)
+-- else
+-- context(lpegmatch(moleculeparser,str))
+-- end
+-- end,
+-- }
diff --git a/tex/context/base/chem-ini.mkiv b/tex/context/base/chem-ini.mkiv
index 357bceefa..9a44b4f1a 100644
--- a/tex/context/base/chem-ini.mkiv
+++ b/tex/context/base/chem-ini.mkiv
@@ -37,7 +37,7 @@
%D \typebuffer \getbuffer
\unexpanded\def\molecule#1%
- {\ctxcommand{molecule(\!!bs#1\!!es)}}
+ {\clf_molecule{#1}}
%D For old times sake:
diff --git a/tex/context/base/chem-str.lua b/tex/context/base/chem-str.lua
index 347363345..d724394bb 100644
--- a/tex/context/base/chem-str.lua
+++ b/tex/context/base/chem-str.lua
@@ -44,6 +44,8 @@ local P, R, S, C, Cs, Ct, Cc, Cmt = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpe
local variables = interfaces and interfaces.variables
local commands = commands
local context = context
+local implement = interfaces.implement
+
local formatters = string.formatters
local texgetcount = tex.getcount
@@ -60,11 +62,14 @@ local mpnamedcolor = attributes.colors.mpnamedcolor
local topoints = number.topoints
local todimen = string.todimen
+local trialtypesetting = context.trialtypesetting
+
chemistry = chemistry or { }
local chemistry = chemistry
chemistry.instance = "chemistry"
chemistry.format = "metafun"
+chemistry.method = "double"
chemistry.structures = 0
local common_keys = {
@@ -147,6 +152,8 @@ local one_keys = {
es = "line",
ed = "line",
et = "line",
+ au = "line",
+ ad = "line",
cz = "text",
rot = "transform",
dir = "transform",
@@ -156,6 +163,7 @@ local one_keys = {
local ring_keys = {
db = "line",
+ hb = "line",
br = "line",
lr = "line",
rr = "line",
@@ -316,7 +324,7 @@ local pattern =
-- print(lpegmatch(pattern,"RZ13=x")) -- 1 RZ false false table x
local f_initialize = 'if unknown context_chem : input mp-chem.mpiv ; fi ;'
-local f_start_structure = formatters['chem_start_structure(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);']
+local f_start_structure = formatters['chem_start_structure(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);']
local f_set_trace_bounds = formatters['chem_trace_boundingbox := %l ;']
local f_stop_structure = 'chem_stop_structure;'
local f_start_component = 'chem_start_component;'
@@ -419,7 +427,11 @@ local function process(level,spec,text,n,rulethickness,rulecolor,offset,default_
insert(sstack,variant)
m = m + 1 ; metacode[m] = syntax.save.direct
elseif operation == "restore" then
- variant = remove(sstack)
+ if #sstack > 0 then
+ variant = remove(sstack)
+ else
+ report_chemistry("restore without save")
+ end
local ss = syntax[variant]
keys, max = ss.keys, ss.max
m = m + 1 ; metacode[m] = syntax.restore.direct
@@ -536,6 +548,8 @@ local function process(level,spec,text,n,rulethickness,rulecolor,offset,default_
if not t then txt, t = fetch(txt) end
if t then
t = molecule(processor_tostring(t))
+-- local p, t = processors.split(t)
+-- m = m + 1 ; metacode[m] = f_text(operation,p or align,variant,si,t)
m = m + 1 ; metacode[m] = f_text(operation,align,variant,si,t)
end
end
@@ -605,7 +619,7 @@ end
--
-- rulethickness in points
-local function checked(d,factor,unit,scale)
+local function checked(d,bondlength,unit,scale)
if d == v_none then
return 0
end
@@ -613,9 +627,9 @@ local function checked(d,factor,unit,scale)
if not n then
-- assume dimen
elseif n >= 10 or n <= -10 then
- return factor * unit * n / 1000
+ return bondlength * unit * n / 1000
else
- return factor * unit * n
+ return bondlength * unit * n
end
local n = todimen(d)
if n then
@@ -625,7 +639,7 @@ local function checked(d,factor,unit,scale)
end
end
-local function calculated(height,bottom,top,factor,unit,scale)
+local function calculated(height,bottom,top,bondlength,unit,scale)
local scaled = 0
if height == v_none then
-- this always wins
@@ -634,24 +648,24 @@ local function calculated(height,bottom,top,factor,unit,scale)
top = "0pt"
elseif height == v_fit then
height = "true"
- bottom = bottom == v_fit and "true" or topoints(checked(bottom,factor,unit,scale))
- top = top == v_fit and "true" or topoints(checked(top, factor,unit,scale))
+ bottom = bottom == v_fit and "true" or topoints(checked(bottom,bondlength,unit,scale))
+ top = top == v_fit and "true" or topoints(checked(top, bondlength,unit,scale))
else
- height = checked(height,factor,unit,scale)
+ height = checked(height,bondlength,unit,scale)
if bottom == v_fit then
if top == v_fit then
bottom = height / 2
top = bottom
else
- top = checked(top,factor,unit,scale)
+ top = checked(top,bondlength,unit,scale)
bottom = height - top
end
elseif top == v_fit then
- bottom = checked(bottom,factor,unit,scale)
+ bottom = checked(bottom,bondlength,unit,scale)
top = height - bottom
else
- bottom = checked(bottom,factor,unit,scale)
- top = checked(top, factor,unit,scale)
+ bottom = checked(bottom,bondlength,unit,scale)
+ top = checked(top, bondlength,unit,scale)
local ratio = height / (bottom+top)
bottom = bottom * ratio
top = top * ratio
@@ -669,7 +683,7 @@ function chemistry.start(settings)
local width = settings.width or v_fit
local height = settings.height or v_fit
local unit = settings.unit or 655360
- local factor = settings.factor or 3
+ local bondlength = settings.factor or 3
local rulethickness = settings.rulethickness or 65536
local rulecolor = settings.rulecolor or ""
local axiscolor = settings.framecolor or ""
@@ -683,7 +697,7 @@ function chemistry.start(settings)
--
align = settings.symalign or "auto"
if trace_structure then
- report_chemistry("unit %p, factor %s, symalign %s",unit,factor,align)
+ report_chemistry("unit %p, bondlength %s, symalign %s",unit,bondlength,align)
end
if align ~= "" then
align = "." .. align
@@ -713,10 +727,10 @@ function chemistry.start(settings)
local sp_width = 0
local sp_height = 0
--
- width, left, right, sp_width = calculated(width, left, right,factor,unit,scale)
- height, bottom, top, sp_height = calculated(height,bottom,top, factor,unit,scale)
+ width, left, right, sp_width = calculated(width, left, right,bondlength,unit,scale)
+ height, bottom, top, sp_height = calculated(height,bottom,top, bondlength,unit,scale)
--
- if width ~= "true" and height ~= "true" and texgetcount("@@trialtypesetting") ~= 0 then
+ if width ~= "true" and height ~= "true" and trialtypesetting() then
if trace_structure then
report_chemistry("skipping trial run")
end
@@ -736,7 +750,7 @@ function chemistry.start(settings)
metacode[#metacode+1] = f_start_structure(
chemistry.structures,
left, right, top, bottom,
- rotation, topoints(unit), factor, topoints(offset),
+ rotation, topoints(unit), bondlength, scale, topoints(offset),
tostring(settings.axis == v_on), topoints(rulethickness), tostring(axiscolor)
)
metacode[#metacode+1] = f_set_trace_bounds(trace_boundingbox) ;
@@ -757,6 +771,7 @@ function chemistry.stop()
metapost.graphic {
instance = chemistry.instance,
format = chemistry.format,
+ method = chemistry.method,
data = mpcode,
definitions = f_initialize,
}
@@ -765,14 +780,12 @@ function chemistry.stop()
end
end
-function chemistry.component(spec,text,settings)
+function chemistry.component(spec,text,rulethickness,rulecolor)
if metacode then
- rulethickness, rulecolor, offset = settings.rulethickness, settings.rulecolor
local spec = settings_to_array_with_repeat(spec,true) -- no lower?
local text = settings_to_array_with_repeat(text,true)
- -- inspect(spec)
metacode[#metacode+1] = f_start_component
- process(1,spec,text,1,rulethickness,rulecolor) -- offset?
+ process(1,spec,text,1,rulethickness,rulecolor)
metacode[#metacode+1] = f_stop_component
end
end
@@ -785,11 +798,52 @@ end)
-- interfaces
-commands.undefinechemical = chemistry.undefine
-commands.definechemical = chemistry.define
-commands.startchemical = chemistry.start
-commands.stopchemical = chemistry.stop
-commands.chemicalcomponent = chemistry.component
+implement {
+ name = "undefinechemical",
+ actions = chemistry.undefine,
+ arguments = "string"
+}
+
+implement {
+ name = "definechemical",
+ actions = chemistry.define,
+ arguments = { "string", "string", "string" }
+}
+
+implement {
+ name = "startchemical",
+ actions = chemistry.start,
+ arguments = {
+ {
+ { "width" },
+ { "height" },
+ { "left" },
+ { "right" },
+ { "top" },
+ { "bottom" },
+ { "scale" },
+ { "rotation" },
+ { "symalign" },
+ { "axis" },
+ { "framecolor" },
+ { "rulethickness" },
+ { "offset" },
+ { "unit" },
+ { "factor" }
+ }
+ }
+}
+
+implement {
+ name = "stopchemical",
+ actions = chemistry.stop,
+}
+
+implement {
+ name = "chemicalcomponent",
+ actions = chemistry.component,
+ arguments = { "string", "string", "string", "string" }
+}
-- todo: top / bottom
-- maybe add "=" for double and "≡" for triple?
@@ -806,7 +860,9 @@ local inline = {
["space"] = "\\chemicalspace",
}
-function commands.inlinechemical(spec)
+local ctx_chemicalinline = context.chemicalinline
+
+function chemistry.inlinechemical(spec)
local spec = settings_to_array_with_repeat(spec,true)
for i=1,#spec do
local s = spec[i]
@@ -814,7 +870,13 @@ function commands.inlinechemical(spec)
if inl then
context(inl) -- could be a fast context.sprint
else
- context.chemicalinline(molecule(s))
+ ctx_chemicalinline(molecule(s))
end
end
end
+
+implement {
+ name = "inlinechemical",
+ actions = chemistry.inlinechemical,
+ arguments = "string"
+}
diff --git a/tex/context/base/chem-str.mkiv b/tex/context/base/chem-str.mkiv
index d9ec1842b..f18b422f2 100644
--- a/tex/context/base/chem-str.mkiv
+++ b/tex/context/base/chem-str.mkiv
@@ -62,12 +62,12 @@
%D a historical and practical reason (like shared keys with different meaning
%D that could clash, e.g.\ align).
-\defineMPinstance
+\defineMPinstance % not really needed as we set in lua
[chemistry]
[\s!format=metafun,
%\s!extensions=\v!yes, % Should we add extensions and initializations?
%\s!initializations=\v!yes, % Would this give EmWidth, etc.?
- ]
+ \c!method=\s!double]
\startMPdefinitions{chemistry}
input mp-chem.mpiv ;
@@ -88,7 +88,7 @@
{\startnointerference
\edef\currentdefinedchemical{#1}%
\let\chemical\chem_chemical_nested
- \ctxcommand{undefinechemical("#1")}%
+ \clf_undefinechemical{#1}%
#2% flush
\stopnointerference}
@@ -96,7 +96,7 @@
{\dodoubleempty\chem_chemical_nested_indeed}
\def\chem_chemical_nested_indeed[#1][#2]%
- {\ctxcommand{definechemical("\currentdefinedchemical",\!!bs#1\!!es,\!!bs\detokenize{#2}\!!es)}}
+ {\clf_definechemical{\currentdefinedchemical}{#1}{\detokenize{#2}}}
% chemical symbols
@@ -149,39 +149,39 @@
\settrue\indisplaychemical
\forgetall
\ifsecondargument
- \doifassignmentelse{#1}
+ \doifelseassignment{#1}
{\setupcurrentchemical[#1]}% same as \currentchemical
{\edef\currentchemical{#1}%
\setupcurrentchemical[#2]}%
\else\iffirstargument
- \doifassignmentelse{#1}
+ \doifelseassignment{#1}
{\setupcurrentchemical[#1]}% same as \currentchemical
{\edef\currentchemical{#1}}%
\fi\fi
\the\everystructurechemical
\setbox\b_chem_result\hbox\bgroup
- \ctxcommand{startchemical {
- width = "\chemicalparameter\c!width",
- height = "\chemicalparameter\c!height",
- left = "\chemicalparameter\c!left",
- right = "\chemicalparameter\c!right",
- top = "\chemicalparameter\c!top",
- bottom = "\chemicalparameter\c!bottom",
- scale = "\chemicalparameter\c!scale",
- rotation = "\chemicalparameter\c!rotation",
- symalign = "\chemicalparameter\c!symalign",
- axis = "\chemicalparameter\c!axis",
- framecolor = "\MPcolor{\chemicalparameter\c!framecolor}",
- rulethickness = \number\dimexpr\chemicalparameter\c!rulethickness\relax,
- offset = \number\dimexpr\chemicalparameter\c!offset\relax,
- unit = \number\dimexpr\chemicalparameter\c!unit\relax,
- factor = \number\chemicalparameter\c!factor,
- } }%
+ \clf_startchemical
+ width {\chemicalparameter\c!width}%
+ height {\chemicalparameter\c!height}%
+ left {\chemicalparameter\c!left}%
+ right {\chemicalparameter\c!right}%
+ top {\chemicalparameter\c!top}%
+ bottom {\chemicalparameter\c!bottom}%
+ scale {\chemicalparameter\c!scale}%
+ rotation {\chemicalparameter\c!rotation}%
+ symalign {\chemicalparameter\c!symalign}%
+ axis {\chemicalparameter\c!axis}%
+ framecolor {\MPcolor{\chemicalparameter\c!framecolor}}%
+ rulethickness {\number\dimexpr\chemicalparameter\c!rulethickness}%
+ offset {\number\dimexpr\chemicalparameter\c!offset}%
+ unit {\number\dimexpr\chemicalparameter\c!unit}%
+ factor {\number\chemicalparameter\c!factor}%
+ \relax
\startnointerference}
\unexpanded\def\stopchemical
{\stopnointerference
- \ctxcommand{stopchemical()}%
+ \clf_stopchemical
\egroup
\d_chem_width \wd\b_chem_result
\d_chem_height\ht\b_chem_result
@@ -222,17 +222,21 @@
\def\strc_chem_indeed_three[#1][#2][#3]%
{\writestatus\m!chemicals{hyperlinked chemicals not yet supported}% todo reference, for the moment ignored
- \ctxcommand{chemicalcomponent(\!!bs#2\!!es, \!!bs\detokenize{#3}\!!es, { % maybe also pass first two args this way
- rulethickness = "\the\dimexpr\chemicalparameter\c!rulethickness\relax", % todo: scaled points
- rulecolor = "\MPcolor{\chemicalparameter\c!rulecolor}" % we can precalculate this for speedup
- } ) }%
+ \clf_chemicalcomponent
+ {#2}%
+ {\detokenize{#3}}%
+ {\the\dimexpr\chemicalparameter\c!rulethickness}% todo: scaled points
+ {\MPcolor{\chemicalparameter\c!rulecolor}}% % we can precalculate this for speedup
+ \relax
\ignorespaces}
\def\strc_chem_indeed_two[#1][#2]%
- {\ctxcommand{chemicalcomponent(\!!bs#1\!!es,\!!bs\detokenize{#2}\!!es, { % maybe also pass first two args this way
- rulethickness = "\the\dimexpr\chemicalparameter\c!rulethickness\relax", % todo: scaled points
- rulecolor = "\MPcolor{\chemicalparameter\c!rulecolor}" % we can precalculate this for speedup
- } ) }%
+ {\clf_chemicalcomponent
+ {#1}%
+ {\detokenize{#2}}%
+ {\the\dimexpr\chemicalparameter\c!rulethickness}% todo: scaled points
+ {\MPcolor{\chemicalparameter\c!rulecolor}}% % we can precalculate this for speedup
+ \relax
\ignorespaces}
\appendtoks
@@ -311,8 +315,8 @@
\def\chem_arrow_construct#1#2#3%
{\enspace
\mathematics{#1%
- {\strut\hbox \s!spread 2\emwidth{\hss\ctxcommand{inlinechemical(\!!bs#3\!!es)}\hss}}% {\strut\hbox \s!spread 2em{\hss#3\hss}}}%
- {\strut\hbox \s!spread 2\emwidth{\hss\ctxcommand{inlinechemical(\!!bs#2\!!es)}\hss}}}% {\strut\hbox \s!spread 2em{\hss#2\hss}}%
+ {\strut\hbox \s!spread 2\emwidth{\hss\clf_inlinechemical{#3}\hss}}% {\strut\hbox \s!spread 2em{\hss#3\hss}}}%
+ {\strut\hbox \s!spread 2\emwidth{\hss\clf_inlinechemical{#2}\hss}}}% {\strut\hbox \s!spread 2em{\hss#2\hss}}%
\enspace}
% special macros (probably needs some more work)
@@ -505,7 +509,7 @@
\begingroup
\scratchcounter\normalmathstyle
\usechemicalstyleandcolor\c!style\c!color
- \hbox{\mathematics{\tf\triggermathstyle\scratchcounter\ctxcommand{inlinechemical(\!!bs#1\!!es)}}}%
+ \hbox{\mathematics{\tf\triggermathstyle\scratchcounter\clf_inlinechemical{#1}}}%
\endgroup}
\unexpanded\def\displaychemical
@@ -531,7 +535,7 @@
\unexpanded\def\inlinechemical#1%
{\dontleavehmode
- \hbox{\usechemicalstyleandcolor\c!style\c!color\ctxcommand{inlinechemical(\!!bs#1\!!es)}}}
+ \hbox{\usechemicalstyleandcolor\c!style\c!color\clf_inlinechemical{#1}}}
\unexpanded\def\chemicalbondrule
{\hbox{\vrule\s!height.75\exheight\s!depth-\dimexpr.75\exheight-\linewidth\relax\s!width\emwidth\relax}}
@@ -705,14 +709,14 @@
\setupchemical
[\c!frame=,
- \c!width=\v!fit, % or unitless number, multiplies scale*EmWidth
- \c!height=\v!fit, % or unitless number, multiplies scale*EmWidth
- \c!left=\v!fit, % or unitless number, multiplies scale*EmWidth
- \c!right=\v!fit, % or unitless number, multiplies scale*EmWidth
- \c!top=\v!fit, % or unitless number, multiplies scale*EmWidth
- \c!bottom=\v!fit, % or unitless number, multiplies scale*EmWidth
+ \c!width=\v!fit, % or unitless number, multiplies scale*unit
+ \c!height=\v!fit, % or unitless number, multiplies scale*unit
+ \c!left=\v!fit, % or unitless number, multiplies scale*unit
+ \c!right=\v!fit, % or unitless number, multiplies scale*unit
+ \c!top=\v!fit, % or unitless number, multiplies scale*unit
+ \c!bottom=\v!fit, % or unitless number, multiplies scale*unit
\c!bodyfont=,
- \c!scale=\v!normal, % small, normal or medium, big, or unitless number (multiplies EmWidth)
+ \c!scale=\v!normal, % small, normal or medium, big, or unitless number (multiplies unit)
\c!size=\v!medium,
\c!textsize=\v!big, % how is textsize used??
\c!axis=\v!off,
@@ -720,7 +724,7 @@
\c!rotation=0, % unitless number (interpreted as degrees)
\c!symalign=\v!auto,
\c!location=, % not yet used (was interaction related in mkii)
- \c!offset=.25em,
+ \c!offset=.25\emwidth,
\c!unit=\emwidth,
\c!factor=3,
\c!color=,
diff --git a/tex/context/base/cldf-bas.lua b/tex/context/base/cldf-bas.lua
index b982fc364..1067a17d5 100644
--- a/tex/context/base/cldf-bas.lua
+++ b/tex/context/base/cldf-bas.lua
@@ -24,10 +24,10 @@ if not modules then modules = { } end modules ['cldf-bas'] = {
-- maybe use context.generics
-local type = type
-local format = string.format
-local utfchar = utf.char
-local concat = table.concat
+local type = type
+local format = string.format
+local utfchar = utf.char
+local concat = table.concat
local context = context
local generics = context.generics
@@ -49,13 +49,23 @@ function context.char(k) -- used as escape too, so don't change to utf
elseif n > 0 then
context([[\char%s\relax]],concat(k,[[\relax\char]]))
end
- elseif k then
- context([[\char%s\relax]],k)
+ else
+ if type(k) == "string" then
+ k = tonumber(k)
+ end
+ if type(k) == "number" then
+ context([[\char%s\relax]],k)
+ end
end
end
function context.utfchar(k)
- context(utfchar(k))
+ if type(k) == "string" then
+ k = tonumber(k)
+ end
+ if type(k) == "number" then
+ context(utfchar(k))
+ end
end
-- plain variants
diff --git a/tex/context/base/cldf-ini.lua b/tex/context/base/cldf-ini.lua
index b29db4090..da284ba5e 100644
--- a/tex/context/base/cldf-ini.lua
+++ b/tex/context/base/cldf-ini.lua
@@ -23,21 +23,33 @@ if not modules then modules = { } end modules ['cldf-ini'] = {
-- todo: context("%bold{total: }%s",total)
-- todo: context.documentvariable("title")
-local tex = tex
+-- during the crited project we ran into the situation that luajittex was 10-20 times
+-- slower that luatex ... after 3 days of testing and probing we finally figured out that
+-- the the differences between the lua and luajit hashers can lead to quite a slowdown
+-- in some cases.
-context = context or { }
-local context = context
+-- context(lpeg.match(lpeg.patterns.texescape,"${}"))
+-- context(string.formatters["%!tex!"]("${}"))
+-- context("%!tex!","${}")
-local format, gsub, validstring, stripstring = string.format, string.gsub, string.valid, string.strip
-local next, type, tostring, tonumber, setmetatable, unpack, select = next, type, tostring, tonumber, setmetatable, unpack, select
+local format, validstring, stripstring = string.format, string.valid, string.strip
+local next, type, tostring, tonumber, setmetatable, unpack, select, rawset = next, type, tostring, tonumber, setmetatable, unpack, select, rawset
local insert, remove, concat = table.insert, table.remove, table.concat
local lpegmatch, lpegC, lpegS, lpegP, lpegV, lpegCc, lpegCs, patterns = lpeg.match, lpeg.C, lpeg.S, lpeg.P, lpeg.V, lpeg.Cc, lpeg.Cs, lpeg.patterns
-local formatters = string.formatters -- using formatteds is slower in this case
+local formatters = string.formatters -- using formatters is slower in this case
+
+context = context or { }
+commands = commands or { }
+interfaces = interfaces or { }
+
+local context = context
+local commands = commands
+local interfaces = interfaces
local loaddata = io.loaddata
+local tex = tex
local texsprint = tex.sprint
-local textprint = tex.tprint
local texprint = tex.print
local texwrite = tex.write
local texgetcount = tex.getcount
@@ -64,72 +76,388 @@ local report_cld = logs.reporter("cld","stack")
local processlines = true -- experiments.register("context.processlines", function(v) processlines = v end)
--- for tracing it's easier to have two stacks
+-- In earlier experiments a function tables was referred to as lua.calls and the
+-- primitive \luafunctions was \luacall.
-local _stack_f_, _n_f_ = { }, 0
-local _stack_n_, _n_n_ = { }, 0
+local luafunctions = lua.get_functions_table and lua.get_functions_table()
+local usedstack = nil
+local showstackusage = false
-local function _store_f_(ti)
- _n_f_ = _n_f_ + 1
- _stack_f_[_n_f_] = ti
- return _n_f_
-end
+-- luafunctions = false
-local function _store_n_(ti)
- _n_n_ = _n_n_ + 1
- _stack_n_[_n_n_] = ti
- return _n_n_
-end
+trackers.register("context.stack",function(v) showstackusage = v end)
+
+local storefunction, flushfunction
+local storenode, flushnode
+local registerfunction, unregisterfunction, reservefunction, knownfunctions, callfunctiononce
+
+-- if luafunctions then
+
+ local freed, nofused, noffreed = { }, 0, 0 -- maybe use the number of @@trialtypesetting
+
+ usedstack = function()
+ return nofused, noffreed
+ end
+
+ flushfunction = function(slot,arg)
+ if arg() then
+ -- keep
+ elseif texgetcount("@@trialtypesetting") == 0 then -- @@trialtypesetting is private!
+ noffreed = noffreed + 1
+ freed[noffreed] = slot
+ luafunctions[slot] = false
+ else
+ -- keep
+ end
+ end
+
+ storefunction = function(arg)
+ local f = function(slot) flushfunction(slot,arg) end
+ if noffreed > 0 then
+ local n = freed[noffreed]
+ freed[noffreed] = nil
+ noffreed = noffreed - 1
+ luafunctions[n] = f
+ return n
+ else
+ nofused = nofused + 1
+ luafunctions[nofused] = f
+ return nofused
+ end
+ end
+
+ flushnode = function(slot,arg)
+ if texgetcount("@@trialtypesetting") == 0 then -- @@trialtypesetting is private!
+ writenode(arg)
+ noffreed = noffreed + 1
+ freed[noffreed] = slot
+ luafunctions[slot] = false
+ else
+ writenode(copynodelist(arg))
+ end
+ end
+
+ storenode = function(arg)
+ local f = function(slot) flushnode(slot,arg) end
+ if noffreed > 0 then
+ local n = freed[noffreed]
+ freed[noffreed] = nil
+ noffreed = noffreed - 1
+ luafunctions[n] = f
+ return n
+ else
+ nofused = nofused + 1
+ luafunctions[nofused] = f
+ return nofused
+ end
+ end
+
+ -- registerfunction = function(f)
+ -- if type(f) == "string" then
+ -- f = loadstring(f)
+ -- end
+ -- if type(f) ~= "function" then
+ -- f = function() report_cld("invalid function %A",f) end
+ -- end
+ -- if noffreed > 0 then
+ -- local n = freed[noffreed]
+ -- freed[noffreed] = nil
+ -- noffreed = noffreed - 1
+ -- luafunctions[n] = f
+ -- return n
+ -- else
+ -- nofused = nofused + 1
+ -- luafunctions[nofused] = f
+ -- return nofused
+ -- end
+ -- end
+
+ storage.storedfunctions = storage.storedfunctions or { }
+ local storedfunctions = storage.storedfunctions
+ local initex = environment.initex
+
+ storage.register("storage/storedfunctions", storedfunctions, "storage.storedfunctions")
+
+ local f_resolve = nil
+ local p_resolve = ((1-lpegP("."))^1 / function(s) f_resolve = f_resolve[s] end * lpegP(".")^0)^1
+
+ function resolvestoredfunction(str)
+ f_resolve = global
+ lpegmatch(p_resolve,str)
+ return f_resolve
+ end
+
+ local function expose(slot,f,...) -- so we can register yet undefined functions
+ local func = resolvestoredfunction(f)
+ if not func then
+ func = function() report_cld("beware: unknown function %i called: %s",slot,f) end
+ end
+ luafunctions[slot] = func
+ return func(...)
+ end
-local function _flush_f_(n)
- local sn = _stack_f_[n]
- if not sn then
- report_cld("data with id %a cannot be found on stack",n)
+ if initex then
+ -- todo: log stored functions
else
- local tn = type(sn)
- if tn == "function" then
- if not sn() and texgetcount("@@trialtypesetting") == 0 then -- @@trialtypesetting is private!
- _stack_f_[n] = nil
- else
- -- keep, beware, that way the stack can grow
+ local slots = table.sortedkeys(storedfunctions)
+ local last = #slots
+ if last > 0 then
+ -- we restore the references
+ for i=1,last do
+ local slot = slots[i]
+ local data = storedfunctions[slot]
+ luafunctions[slot] = function(...)
+ return expose(slot,data,...)
+ end
+ end
+ -- we now know how many are defined
+ nofused = slots[last]
+ -- normally there are no holes in the list yet
+ for i=1,nofused do
+ if not luafunctions[i] then
+ noffreed = noffreed + 1
+ freed[noffreed] = i
+ end
end
+ -- report_cld("%s registered functions, %s freed slots",last,noffreed)
+ end
+ end
+
+ registerfunction = function(f,direct) -- either f=code or f=namespace,direct=name
+ local slot, func
+ if noffreed > 0 then
+ slot = freed[noffreed]
+ freed[noffreed] = nil
+ noffreed = noffreed - 1
else
- if texgetcount("@@trialtypesetting") == 0 then -- @@trialtypesetting is private!
- writenode(sn)
- _stack_f_[n] = nil
+ nofused = nofused + 1
+ slot = nofused
+ end
+ if direct then
+ if initex then
+ func = function(...)
+ expose(slot,f,...)
+ end
+ if initex then
+ storedfunctions[slot] = f
+ end
else
- writenode(copynodelist(sn))
- -- keep, beware, that way the stack can grow
+ func = resolvestoredfunction(f)
+ end
+ if type(func) ~= "function" then
+ func = function() report_cld("invalid resolve %A",f) end
end
+ elseif type(f) == "string" then
+ func = loadstring(f)
+ if type(func) ~= "function" then
+ func = function() report_cld("invalid code %A",f) end
+ end
+ elseif type(f) == "function" then
+ func = f
+ else
+ func = function() report_cld("invalid function %A",f) end
+ end
+ luafunctions[slot] = func
+ return slot
+ end
+
+ -- do
+ -- commands.test = function(str) report_cld("test function: %s", str) end
+ -- if initex then
+ -- registerfunction("commands.test") -- number 1
+ -- end
+ -- luafunctions[1]("okay")
+ -- end
+
+ unregisterfunction = function(slot)
+ if luafunctions[slot] then
+ noffreed = noffreed + 1
+ freed[noffreed] = slot
+ luafunctions[slot] = false
+ else
+ report_cld("invalid function slot %A",slot)
end
end
+
+ reservefunction = function()
+ if noffreed > 0 then
+ local n = freed[noffreed]
+ freed[noffreed] = nil
+ noffreed = noffreed - 1
+ return n
+ else
+ nofused = nofused + 1
+ return nofused
+ end
+ end
+
+ callfunctiononce = function(slot)
+ luafunctions[slot](slot)
+ noffreed = noffreed + 1
+ freed[noffreed] = slot
+ luafunctions[slot] = false
+ end
+
+ table.setmetatablecall(luafunctions,function(t,n) return luafunctions[n](n) end)
+
+ knownfunctions = luafunctions
+
+ -- The next hack is a convenient way to define scanners at the Lua end and
+ -- get them available at the TeX end. There is some dirty magic needed to
+ -- prevent overload during format loading.
+
+ -- interfaces.scanners.foo = function() context("[%s]",tokens.scanners.string()) end : \scan_foo
+
+ interfaces.storedscanners = interfaces.storedscanners or { }
+ local storedscanners = interfaces.storedscanners
+
+
+ storage.register("interfaces/storedscanners", storedscanners, "interfaces.storedscanners")
+
+ local interfacescanners = table.setmetatablenewindex(function(t,k,v)
+ if storedscanners[k] then
+ -- report_cld("warning: scanner %a is already set",k)
+ -- os.exit()
+ -- \scan_<k> is already in the format
+ -- report_cld("using interface scanner: %s",k)
+ else
+ -- todo: allocate slot here and pass it
+ storedscanners[k] = true
+ -- report_cld("installing interface scanner: %s",k)
+ context("\\installctxscanner{clf_%s}{interfaces.scanners.%s}",k,k)
+ end
+ rawset(t,k,v)
+ end)
+
+ interfaces.scanners = interfacescanners
+
+-- else -- by now this is obsolete
+--
+-- local luafunctions, noffunctions = { }, 0
+-- local luanodes, nofnodes = { }, 0
+--
+-- usedstack = function()
+-- return noffunctions + nofnodes, 0
+-- end
+--
+-- flushfunction = function(n)
+-- local sn = luafunctions[n]
+-- if not sn then
+-- report_cld("data with id %a cannot be found on stack",n)
+-- elseif not sn() and texgetcount("@@trialtypesetting") == 0 then -- @@trialtypesetting is private!
+-- luafunctions[n] = nil
+-- end
+-- end
+--
+-- storefunction = function(ti)
+-- noffunctions = noffunctions + 1
+-- luafunctions[noffunctions] = ti
+-- return noffunctions
+-- end
+--
+-- -- freefunction = function(n)
+-- -- luafunctions[n] = nil
+-- -- end
+--
+-- flushnode = function(n)
+-- local sn = luanodes[n]
+-- if not sn then
+-- report_cld("data with id %a cannot be found on stack",n)
+-- elseif texgetcount("@@trialtypesetting") == 0 then -- @@trialtypesetting is private!
+-- writenode(sn)
+-- luanodes[n] = nil
+-- else
+-- writenode(copynodelist(sn))
+-- end
+-- end
+--
+-- storenode = function(ti)
+-- nofnodes = nofnodes + 1
+-- luanodes[nofnodes] = ti
+-- return nofnodes
+-- end
+--
+-- _cldf_ = flushfunction -- global
+-- _cldn_ = flushnode -- global
+-- -- _cldl_ = function(n) return luafunctions[n]() end -- luafunctions(n)
+-- _cldl_ = luafunctions
+--
+-- registerfunction = function(f)
+-- if type(f) == "string" then
+-- f = loadstring(f)
+-- end
+-- if type(f) ~= "function" then
+-- f = function() report_cld("invalid function %A",f) end
+-- end
+-- noffunctions = noffunctions + 1
+-- luafunctions[noffunctions] = f
+-- return noffunctions
+-- end
+--
+-- unregisterfunction = function(slot)
+-- if luafunctions[slot] then
+-- luafunctions[slot] = nil
+-- else
+-- report_cld("invalid function slot %A",slot)
+-- end
+-- end
+--
+-- reservefunction = function()
+-- noffunctions = noffunctions + 1
+-- return noffunctions
+-- end
+--
+-- callfunctiononce = function(slot)
+-- luafunctions[slot](slot)
+-- luafunctions[slot] = nil
+-- end
+--
+-- table.setmetatablecall(luafunctions,function(t,n) return luafunctions[n](n) end)
+--
+-- knownfunctions = luafunctions
+--
+-- end
+
+context.registerfunction = registerfunction
+context.unregisterfunction = unregisterfunction
+context.reservefunction = reservefunction
+context.knownfunctions = knownfunctions
+context.callfunctiononce = callfunctiononce _cldo_ = callfunctiononce
+context.storenode = storenode -- private helper
+
+function commands.ctxfunction(code,namespace)
+ context(registerfunction(code,namespace))
end
-local function _flush_n_(n)
- local sn = _stack_n_[n]
- if not sn then
- report_cld("data with id %a cannot be found on stack",n)
- elseif texgetcount("@@trialtypesetting") == 0 then -- @@trialtypesetting is private!
- writenode(sn)
- _stack_n_[n] = nil
- else
- writenode(copynodelist(sn))
- -- keep, beware, that way the stack can grow
+function commands.ctxscanner(name,code,namespace)
+ local n = registerfunction(code,namespace)
+ if storedscanners[name] then
+ storedscanners[name] = n
end
+ context(n)
end
-function context.restart()
- _stack_f_, _n_f_ = { }, 0
- _stack_n_, _n_n_ = { }, 0
+local function dummy() end
+
+function commands.ctxresetter(name)
+ return function()
+ if storedscanners[name] then
+ rawset(interfacescanners,name,dummy)
+ context.resetctxscanner("clf_" .. name)
+ end
+ end
end
-context._stack_f_ = _stack_f_
-context._store_f_ = _store_f_
-context._flush_f_ = _flush_f_ _cldf_ = _flush_f_
+function context.trialtypesetting()
+ return texgetcount("@@trialtypesetting") ~= 0
+end
-context._stack_n_ = _stack_n_
-context._store_n_ = _store_n_
-context._flush_n_ = _flush_n_ _cldn_ = _flush_n_
+-- local f_cldo = formatters["_cldo_(%i)"]
+-- local latelua_node = nodes.pool.latelua
+--
+-- function context.lateluafunctionnnode(f)
+-- return latelua_node(f_cldo(registerfunction(f)))
+-- end
-- Should we keep the catcodes with the function?
@@ -359,98 +687,210 @@ end
local containseol = patterns.containseol
-local function writer(parent,command,first,...) -- already optimized before call
- local t = { first, ... }
- flush(currentcatcodes,command) -- todo: ctx|prt|texcatcodes
- local direct = false
- for i=1,#t do
- local ti = t[i]
- local typ = type(ti)
- if direct then
- if typ == "string" or typ == "number" then
- flush(currentcatcodes,ti)
- else -- node.write
- report_context("error: invalid use of direct in %a, only strings and numbers can be flushed directly, not %a",command,typ)
- end
- direct = false
- elseif ti == nil then
- -- nothing
- elseif ti == "" then
- flush(currentcatcodes,"{}")
- elseif typ == "string" then
- -- is processelines seen ?
- if processlines and lpegmatch(containseol,ti) then
- flush(currentcatcodes,"{")
- local flushlines = parent.__flushlines or flushlines
- flushlines(ti)
- flush(currentcatcodes,"}")
- elseif currentcatcodes == contentcatcodes then
+local writer
+
+if luafunctions then
+
+ writer = function (parent,command,first,...) -- already optimized before call
+ local t = { first, ... }
+ flush(currentcatcodes,command) -- todo: ctx|prt|texcatcodes
+ local direct = false
+ for i=1,#t do
+ local ti = t[i]
+ local typ = type(ti)
+ if direct then
+ if typ == "string" or typ == "number" then
+ flush(currentcatcodes,ti)
+ else -- node.write
+ report_context("error: invalid use of direct in %a, only strings and numbers can be flushed directly, not %a",command,typ)
+ end
+ direct = false
+ elseif ti == nil then
+ -- nothing
+ elseif ti == "" then
+ flush(currentcatcodes,"{}")
+ elseif typ == "string" then
+ -- is processelines seen ?
+ if processlines and lpegmatch(containseol,ti) then
+ flush(currentcatcodes,"{")
+ local flushlines = parent.__flushlines or flushlines
+ flushlines(ti)
+ flush(currentcatcodes,"}")
+ elseif currentcatcodes == contentcatcodes then
+ flush(currentcatcodes,"{",ti,"}")
+ else
+ flush(currentcatcodes,"{")
+ flush(contentcatcodes,ti)
+ flush(currentcatcodes,"}")
+ end
+ elseif typ == "number" then
+ -- numbers never have funny catcodes
flush(currentcatcodes,"{",ti,"}")
- else
- flush(currentcatcodes,"{")
- flush(contentcatcodes,ti)
- flush(currentcatcodes,"}")
- end
- elseif typ == "number" then
- -- numbers never have funny catcodes
- flush(currentcatcodes,"{",ti,"}")
- elseif typ == "table" then
- local tn = #ti
- if tn == 0 then
- local done = false
- for k, v in next, ti do
- if done then
- if v == "" then
- flush(currentcatcodes,",",k,'=')
+ elseif typ == "table" then
+ local tn = #ti
+ if tn == 0 then
+ local done = false
+ for k, v in next, ti do
+ if done then
+ if v == "" then
+ flush(currentcatcodes,",",k,'=')
+ else
+ flush(currentcatcodes,",",k,"={",v,"}")
+ end
else
- flush(currentcatcodes,",",k,"={",v,"}")
+ if v == "" then
+ flush(currentcatcodes,"[",k,"=")
+ else
+ flush(currentcatcodes,"[",k,"={",v,"}")
+ end
+ done = true
end
+ end
+ if done then
+ flush(currentcatcodes,"]")
+ else
+ flush(currentcatcodes,"[]")
+ end
+ elseif tn == 1 then -- some 20% faster than the next loop
+ local tj = ti[1]
+ if type(tj) == "function" then
+ flush(currentcatcodes,"[\\cldl",storefunction(tj),"]")
else
- if v == "" then
- flush(currentcatcodes,"[",k,"=")
+ flush(currentcatcodes,"[",tj,"]")
+ end
+ else -- is concat really faster than flushes here? probably needed anyway (print artifacts)
+ flush(currentcatcodes,"[")
+ for j=1,tn do
+ local tj = ti[j]
+ if type(tj) == "function" then
+ if j == tn then
+ flush(currentcatcodes,"\\cldl",storefunction(tj),"]")
+ else
+ flush(currentcatcodes,"\\cldl",storefunction(tj),",")
+ end
else
- flush(currentcatcodes,"[",k,"={",v,"}")
+ if j == tn then
+ flush(currentcatcodes,tj,"]")
+ else
+ flush(currentcatcodes,tj,",")
+ end
end
- done = true
end
end
- if done then
- flush(currentcatcodes,"]")
+ elseif typ == "function" then
+ flush(currentcatcodes,"{\\cldl ",storefunction(ti),"}") -- todo: ctx|prt|texcatcodes
+ elseif typ == "boolean" then
+ if ti then
+ flushdirect(currentcatcodes,"\r")
else
- flush(currentcatcodes,"[]")
+ direct = true
+ end
+ elseif typ == "thread" then
+ report_context("coroutines not supported as we cannot yield across boundaries")
+ elseif isnode(ti) then -- slow
+ flush(currentcatcodes,"{\\cldl",storenode(ti),"}")
+ else
+ report_context("error: %a gets a weird argument %a",command,ti)
+ end
+ end
+ end
+
+else
+
+ writer = function (parent,command,first,...) -- already optimized before call
+ local t = { first, ... }
+ flush(currentcatcodes,command) -- todo: ctx|prt|texcatcodes
+ local direct = false
+ for i=1,#t do
+ local ti = t[i]
+ local typ = type(ti)
+ if direct then
+ if typ == "string" or typ == "number" then
+ flush(currentcatcodes,ti)
+ else -- node.write
+ report_context("error: invalid use of direct in %a, only strings and numbers can be flushed directly, not %a",command,typ)
end
- elseif tn == 1 then -- some 20% faster than the next loop
- local tj = ti[1]
- if type(tj) == "function" then
- flush(currentcatcodes,"[\\cldf{",_store_f_(tj),"}]")
+ direct = false
+ elseif ti == nil then
+ -- nothing
+ elseif ti == "" then
+ flush(currentcatcodes,"{}")
+ elseif typ == "string" then
+ -- is processelines seen ?
+ if processlines and lpegmatch(containseol,ti) then
+ flush(currentcatcodes,"{")
+ local flushlines = parent.__flushlines or flushlines
+ flushlines(ti)
+ flush(currentcatcodes,"}")
+ elseif currentcatcodes == contentcatcodes then
+ flush(currentcatcodes,"{",ti,"}")
else
- flush(currentcatcodes,"[",tj,"]")
+ flush(currentcatcodes,"{")
+ flush(contentcatcodes,ti)
+ flush(currentcatcodes,"}")
end
- else -- is concat really faster than flushes here? probably needed anyway (print artifacts)
- for j=1,tn do
- local tj = ti[j]
+ elseif typ == "number" then
+ -- numbers never have funny catcodes
+ flush(currentcatcodes,"{",ti,"}")
+ elseif typ == "table" then
+ local tn = #ti
+ if tn == 0 then
+ local done = false
+ for k, v in next, ti do
+ if done then
+ if v == "" then
+ flush(currentcatcodes,",",k,'=')
+ else
+ flush(currentcatcodes,",",k,"={",v,"}")
+ end
+ else
+ if v == "" then
+ flush(currentcatcodes,"[",k,"=")
+ else
+ flush(currentcatcodes,"[",k,"={",v,"}")
+ end
+ done = true
+ end
+ end
+ if done then
+ flush(currentcatcodes,"]")
+ else
+ flush(currentcatcodes,"[]")
+ end
+ elseif tn == 1 then -- some 20% faster than the next loop
+ local tj = ti[1]
if type(tj) == "function" then
- ti[j] = "\\cldf{" .. _store_f_(tj) .. "}"
+ flush(currentcatcodes,"[\\cldf{",storefunction(tj),"}]")
+ else
+ flush(currentcatcodes,"[",tj,"]")
end
+ else -- is concat really faster than flushes here? probably needed anyway (print artifacts)
+ for j=1,tn do
+ local tj = ti[j]
+ if type(tj) == "function" then
+ ti[j] = "\\cldf{" .. storefunction(tj) .. "}"
+ end
+ end
+ flush(currentcatcodes,"[",concat(ti,","),"]")
end
- flush(currentcatcodes,"[",concat(ti,","),"]")
- end
- elseif typ == "function" then
- flush(currentcatcodes,"{\\cldf{",_store_f_(ti),"}}") -- todo: ctx|prt|texcatcodes
- elseif typ == "boolean" then
- if ti then
- flushdirect(currentcatcodes,"\r")
+ elseif typ == "function" then
+ flush(currentcatcodes,"{\\cldf{",storefunction(ti),"}}") -- todo: ctx|prt|texcatcodes
+ elseif typ == "boolean" then
+ if ti then
+ flushdirect(currentcatcodes,"\r")
+ else
+ direct = true
+ end
+ elseif typ == "thread" then
+ report_context("coroutines not supported as we cannot yield across boundaries")
+ elseif isnode(ti) then -- slow
+ flush(currentcatcodes,"{\\cldn{",storenode(ti),"}}")
else
- direct = true
+ report_context("error: %a gets a weird argument %a",command,ti)
end
- elseif typ == "thread" then
- report_context("coroutines not supported as we cannot yield across boundaries")
- elseif isnode(ti) then -- slow
- flush(currentcatcodes,"{\\cldn{",_store_n_(ti),"}}")
- else
- report_context("error: %a gets a weird argument %a",command,ti)
end
end
+
end
local generics = { } context.generics = generics
@@ -507,70 +947,154 @@ end
function context.constructcsonly(k) -- not much faster than the next but more mem efficient
local c = "\\" .. tostring(generics[k] or k)
- rawset(context, k, function()
+ local v = function()
flush(prtcatcodes,c)
- end)
+ end
+ rawset(context,k,v)
+ return v
end
function context.constructcs(k)
local c = "\\" .. tostring(generics[k] or k)
- rawset(context, k, function(first,...)
+ local v = function(first,...)
if first == nil then
flush(prtcatcodes,c)
else
return writer(context,c,first,...)
end
- end)
+ end
+ rawset(context,k,v)
+ return v
end
-local function caller(parent,f,a,...)
- if not parent then
- -- so we don't need to test in the calling (slower but often no issue)
- elseif f ~= nil then
- local typ = type(f)
- if typ == "string" then
- if a then
- flush(contentcatcodes,formatters[f](a,...)) -- was currentcatcodes
- elseif processlines and lpegmatch(containseol,f) then
- local flushlines = parent.__flushlines or flushlines
- flushlines(f)
- else
- flush(contentcatcodes,f)
- end
- elseif typ == "number" then
- if a then
- flush(currentcatcodes,f,a,...)
+-- local splitformatters = utilities.strings.formatters.new(true) -- not faster (yet)
+
+local caller
+
+if luafunctions then
+
+ caller = function(parent,f,a,...)
+ if not parent then
+ -- so we don't need to test in the calling (slower but often no issue)
+ elseif f ~= nil then
+ local typ = type(f)
+ if typ == "string" then
+ if f == "" then
+ -- new, can save a bit sometimes
+ -- if trace_context then
+ -- report_context("empty argument to context()")
+ -- end
+ elseif a then
+ flush(contentcatcodes,formatters[f](a,...)) -- was currentcatcodes
+ -- flush(contentcatcodes,splitformatters[f](a,...)) -- was currentcatcodes
+ elseif processlines and lpegmatch(containseol,f) then
+ local flushlines = parent.__flushlines or flushlines
+ flushlines(f)
+ else
+ flush(contentcatcodes,f)
+ end
+ elseif typ == "number" then
+ if a then
+ flush(currentcatcodes,f,a,...)
+ else
+ flush(currentcatcodes,f)
+ end
+ elseif typ == "function" then
+ -- ignored: a ...
+ flush(currentcatcodes,"{\\cldl",storefunction(f),"}") -- todo: ctx|prt|texcatcodes
+ elseif typ == "boolean" then
+ if f then
+ if a ~= nil then
+ local flushlines = parent.__flushlines or flushlines
+ flushlines(a)
+ else
+ flushdirect(currentcatcodes,"\n") -- no \r, else issues with \startlines ... use context.par() otherwise
+ end
+ else
+ if a ~= nil then
+ -- no command, same as context(a,...)
+ writer(parent,"",a,...)
+ else
+ -- ignored
+ end
+ end
+ elseif typ == "thread" then
+ report_context("coroutines not supported as we cannot yield across boundaries")
+ elseif isnode(f) then -- slow
+ -- writenode(f)
+ flush(currentcatcodes,"\\cldl",storenode(f)," ")
else
- flush(currentcatcodes,f)
+ report_context("error: %a gets a weird argument %a","context",f)
end
- elseif typ == "function" then
- -- ignored: a ...
- flush(currentcatcodes,"{\\cldf{",_store_f_(f),"}}") -- todo: ctx|prt|texcatcodes
- elseif typ == "boolean" then
- if f then
- if a ~= nil then
+ end
+ end
+
+ function context.flushnode(n)
+ flush(currentcatcodes,"\\cldl",storenode(n)," ")
+ end
+
+else
+
+ caller = function(parent,f,a,...)
+ if not parent then
+ -- so we don't need to test in the calling (slower but often no issue)
+ elseif f ~= nil then
+ local typ = type(f)
+ if typ == "string" then
+ if f == "" then
+ -- new, can save a bit sometimes
+ -- if trace_context then
+ -- report_context("empty argument to context()")
+ -- end
+ elseif a then
+ flush(contentcatcodes,formatters[f](a,...)) -- was currentcatcodes
+ -- flush(contentcatcodes,splitformatters[f](a,...)) -- was currentcatcodes
+ elseif processlines and lpegmatch(containseol,f) then
local flushlines = parent.__flushlines or flushlines
- flushlines(a)
+ flushlines(f)
else
- flushdirect(currentcatcodes,"\n") -- no \r, else issues with \startlines ... use context.par() otherwise
+ flush(contentcatcodes,f)
end
- else
- if a ~= nil then
- -- no command, same as context(a,...)
- writer(parent,"",a,...)
+ elseif typ == "number" then
+ if a then
+ flush(currentcatcodes,f,a,...)
+ else
+ flush(currentcatcodes,f)
+ end
+ elseif typ == "function" then
+ -- ignored: a ...
+ flush(currentcatcodes,"{\\cldf{",storefunction(f),"}}") -- todo: ctx|prt|texcatcodes
+ elseif typ == "boolean" then
+ if f then
+ if a ~= nil then
+ local flushlines = parent.__flushlines or flushlines
+ flushlines(a)
+ else
+ flushdirect(currentcatcodes,"\n") -- no \r, else issues with \startlines ... use context.par() otherwise
+ end
else
- -- ignored
+ if a ~= nil then
+ -- no command, same as context(a,...)
+ writer(parent,"",a,...)
+ else
+ -- ignored
+ end
end
+ elseif typ == "thread" then
+ report_context("coroutines not supported as we cannot yield across boundaries")
+ elseif isnode(f) then -- slow
+ -- writenode(f)
+ flush(currentcatcodes,"\\cldn{",storenode(f),"}")
+ else
+ report_context("error: %a gets a weird argument %a","context",f)
end
- elseif typ == "thread" then
- report_context("coroutines not supported as we cannot yield across boundaries")
- elseif isnode(f) then -- slow
- -- writenode(f)
- flush(currentcatcodes,"\\cldn{",_store_n_(f),"}")
- else
- report_context("error: %a gets a weird argument %a","context",f)
end
end
+
+ function context.flushnode(n)
+ flush(currentcatcodes,"\\cldn{",storenode(n),"}")
+ end
+
end
local defaultcaller = caller
@@ -642,8 +1166,12 @@ local visualizer = lpeg.replacer {
}
statistics.register("traced context", function()
+ local used, freed = usedstack()
+ local unreachable = used - freed
if nofwriters > 0 or nofflushes > 0 then
- return format("writers: %s, flushes: %s, maxstack: %s",nofwriters,nofflushes,_n_f_)
+ return format("writers: %s, flushes: %s, maxstack: %s",nofwriters,nofflushes,used,freed,unreachable)
+ elseif showstackusage or unreachable > 0 then
+ return format("maxstack: %s, freed: %s, unreachable: %s",used,freed,unreachable)
end
end)
@@ -1019,7 +1547,8 @@ local function caller(parent,f,a,...)
end
elseif typ == "function" then
-- ignored: a ...
- flush(currentcatcodes,mpdrawing,"{\\cldf{",store_(f),"}}")
+-- flush(currentcatcodes,mpdrawing,"{\\cldf{",store_(f),"}}")
+ flush(currentcatcodes,mpdrawing,"{\\cldl",store_(f),"}")
elseif typ == "boolean" then
-- ignored: a ...
if f then
diff --git a/tex/context/base/cldf-ini.mkiv b/tex/context/base/cldf-ini.mkiv
index 258409d7a..12ada1383 100644
--- a/tex/context/base/cldf-ini.mkiv
+++ b/tex/context/base/cldf-ini.mkiv
@@ -36,6 +36,15 @@
\def\cldf#1{\directlua{_cldf_(#1)}} % global (functions)
\def\cldn#1{\directlua{_cldn_(#1)}} % global (nodes)
+\ifx\luafunction\undefined
+ \def\luafunction#1{\directlua{_cldl_(#1)}}
+\fi
+
+\let\cldl\luafunction
+
+% \catcodetable\ctxcatcodes \catcode`^=\superscriptcatcode\catcode1=\activecatcode \global\let^^A=\cldf
+% \catcodetable\ctxcatcodes \catcode`^=\superscriptcatcode\catcode2=\activecatcode \global\let^^B=\cldn
+
\normalprotected\def\cldprocessfile#1{\directlua{context.runfile("#1")}}
\def\cldloadfile #1{\directlua{context.loadfile("#1")}}
\def\cldcontext #1{\directlua{context(#1)}}
diff --git a/tex/context/base/cldf-prs.lua b/tex/context/base/cldf-prs.lua
index 160d30b19..3c3836a3e 100644
--- a/tex/context/base/cldf-prs.lua
+++ b/tex/context/base/cldf-prs.lua
@@ -12,45 +12,61 @@ local lpegmatch, patterns = lpeg.match, lpeg.patterns
local P, R, V, Cc, Cs = lpeg.P, lpeg.R, lpeg.V, lpeg.Cc, lpeg.Cs
local format = string.format
-local cpatterns = patterns.context or { }
-patterns.context = cpatterns
-
-local backslash = P("\\")
-local csname = backslash * P(1) * (1-backslash)^0
-local sign = P("+") / "\\textplus "
- + P("-") / "\\textminus "
-local leftbrace = P("{")
-local rightbrace = P("}")
-local nested = P { leftbrace * (V(1) + (1-rightbrace))^0 * rightbrace }
-local subscript = P("_")
-local superscript = P("^")
-local utf8char = patterns.utf8char
-local cardinal = patterns.cardinal
-
--- local scripts = P { "start",
--- start = V("csname") + V("lowfirst") + V("highfirst"),
--- csname = csname,
--- content = Cs(V("csname") + nested + sign^-1 * (cardinal + utf8char)),
--- lowfirst = subscript * ( Cc("\\lohi{%s}{%s}") * V("content") * superscript + Cc("\\low{%s}" ) ) * V("content") / format,
--- highfirst = superscript * ( Cc("\\hilo{%s}{%s}") * V("content") * subscript + Cc("\\high{%s}") ) * V("content") / format,
--- }
-
-local scripts = P { "start",
- start = V("csname") + V("lowfirst") + V("highfirst"),
- csname = csname,
- content = Cs(V("csname") + nested + sign^-1 * (cardinal + utf8char)),
- lowfirst = (subscript /"") * ( Cc("\\lohi{") * V("content") * Cc("}{") * (superscript/"") + Cc("\\low{" ) ) * V("content") * Cc("}"),
- highfirst = (superscript/"") * ( Cc("\\hilo{") * V("content") * Cc("}{") * (subscript /"") + Cc("\\high{") ) * V("content") * Cc("}"),
- }
-
-local scripted = Cs((csname + scripts + utf8char)^0)
-
-cpatterns.scripts = scripts
-cpatterns.csname = csname
-cpatterns.scripted = scripted
-cpatterns.nested = nested
-
--- inspect(scripted)
+local cpatterns = patterns.context or { }
+patterns.context = cpatterns
+
+local utf8character = patterns.utf8character
+local cardinal = patterns.cardinal
+
+local leftbrace = P("{")
+local rightbrace = P("}")
+local backslash = P("\\")
+local csname = backslash * P(1) * (1-backslash-leftbrace)^0 * P(" ")^0
+local sign = P("+") / "\\textplus "
+ + P("-") / "\\textminus "
+local nested = P { leftbrace * (V(1) + (1-rightbrace))^0 * rightbrace }
+local subscript = P("_")
+local superscript = P("^")
+
+-- local scripts = P { "start",
+-- start = V("csname") + V("lowfirst") + V("highfirst"),
+-- csname = csname,
+-- content = Cs(V("csname") + nested + sign^-1 * (cardinal + utf8character)),
+-- lowfirst = subscript * ( Cc("\\lohi{%s}{%s}") * V("content") * superscript + Cc("\\low{%s}" ) ) * V("content") / format,
+-- highfirst = superscript * ( Cc("\\hilo{%s}{%s}") * V("content") * subscript + Cc("\\high{%s}") ) * V("content") / format,
+-- }
+
+-- local scripts = P { "start",
+-- start = (V("csname") + V("lowfirst") + V("highfirst"))^1,
+-- csname = csname,
+-- content = Cs(V("csname") + nested + sign^-1 * (cardinal + utf8character)),
+-- lowfirst = (subscript /"") * ( Cc("\\lohi{") * V("content") * Cc("}{") * (superscript/"") + Cc("\\low{" ) ) * V("content") * Cc("}"),
+-- highfirst = (superscript/"") * ( Cc("\\hilo{") * V("content") * Cc("}{") * (subscript /"") + Cc("\\high{") ) * V("content") * Cc("}"),
+-- }
+
+local scripted = Cs { "start",
+ start = (V("csname") + V("nested") + V("lowfirst") + V("highfirst") + V("character"))^1,
+ rest = V("csname") + V("nested") + V("lowfirst") + V("highfirst"),
+ csname = csname,
+ character = utf8character,
+ -- nested = leftbrace * (V("rest") + (V("character")-rightbrace))^0 * rightbrace,
+ nested = leftbrace * (V("start") -rightbrace)^0 * rightbrace,
+ -- content = Cs(V("nested") + sign^-1 * (cardinal + V("character"))),
+ content = V("nested") + sign^-1 * (cardinal + V("character")) + sign,
+ lowfirst = (subscript /"") * ( Cc("\\lohi{") * V("content") * Cc("}{") * (superscript/"") + Cc("\\low{" ) ) * V("content") * Cc("}"),
+ highfirst = (superscript/"") * ( Cc("\\hilo{") * V("content") * Cc("}{") * (subscript /"") + Cc("\\high{") ) * V("content") * Cc("}"),
+ }
+
+cpatterns.csname = csname
+cpatterns.scripted = scripted
+cpatterns.nested = nested
+
-- print(lpegmatch(scripted,"10^-3_x"))
+-- print(lpegmatch(scripted,"\\L {C_5}"))
+-- print(lpegmatch(scripted,"\\SL{}"))
+-- print(lpegmatch(scripted,"\\SL{C_5}"))
+-- print(lpegmatch(scripted,"\\SL{C_5}"))
+-- print(lpegmatch(scripted,"{C_5}"))
+-- print(lpegmatch(scripted,"{\\C_5}"))
-- print(lpegmatch(scripted,"10^-a"))
diff --git a/tex/context/base/cldf-scn.lua b/tex/context/base/cldf-scn.lua
new file mode 100644
index 000000000..5f7e0c74b
--- /dev/null
+++ b/tex/context/base/cldf-scn.lua
@@ -0,0 +1,163 @@
+if not modules then modules = { } end modules ['cldf-scn'] = {
+ version = 1.001,
+ comment = "companion to cldf-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local load, type = load, type
+
+local formatters = string.formatters
+local char = string.char
+local concat = table.concat
+
+local lpegmatch = lpeg.match
+local p_unquoted = lpeg.Cs(lpeg.patterns.unquoted)
+
+local f_action_f = formatters["action%s(%s)"]
+local f_action_s = formatters["local action%s = action[%s]"]
+local f_command = formatters["local action = tokens._action\n%\nt\nreturn function(%s) return %s end"]
+
+local interfaces = interfaces
+local commands = commands
+local scanners = interfaces.scanners
+
+local compile = tokens.compile or function() end
+
+local dummy = function() end
+
+local report = logs.reporter("interfaces","implementor")
+
+function interfaces.implement(specification)
+ local actions = specification.actions
+ local name = specification.name
+ local arguments = specification.arguments
+ local private = specification.scope == "private"
+ local onlyonce = specification.onlyonce
+ if not actions then
+ if name then
+ report("error: no actions for %a",name)
+ else
+ report("error: no actions and no name")
+ end
+ return
+ end
+ if name == "" then
+ name = nil
+ end
+ local scanner
+ local resetter = onlyonce and name and commands.ctxresetter(name)
+ if resetter then
+ local scan = compile(specification)
+ if private then
+ scanner = function()
+ resetter()
+ return scan()
+ end
+ else
+ scanner = function()
+ commands[name] = dummy
+ resetter()
+ return scan()
+ end
+ end
+ else
+ scanner = compile(specification)
+ end
+ if not name then
+ return scanner
+ end
+ if scanners[name] and not specification.overload then
+ report("warning: 'scanners.%s' is redefined",name)
+ end
+ scanners[name] = scanner
+ if private then
+ return
+ end
+ local command
+ if onlyonce then
+ if type(actions) == "function" then
+ actions = { actions }
+ elseif #actions == 1 then
+ actions = { actions[1] }
+ end
+ end
+ if type(actions) == "function" then
+ command = actions
+ elseif #actions == 1 then
+ command = actions[1]
+ else
+ -- this one is not yet complete .. compare tokens
+ tokens._action = actions
+ local f = { }
+ local args
+ if not arguments then
+ args = ""
+ elseif type(arguments) == "table" then
+ local a = { }
+ for i=1,#arguments do
+ local v = arguments[i]
+ local t = type(v)
+ if t == "boolean" then
+ a[i] = tostring(v)
+ elseif t == "number" then
+ a[i] = tostring(v)
+ elseif t == "string" then
+ local s = lpegmatch(p_unquoted,v)
+ if s and v ~= s then
+ a[i] = v -- a string, given as "'foo'" or '"foo"'
+ else
+ a[i] = char(96+i)
+ end
+ else
+ -- nothing special for tables
+ a[i] = char(96+i)
+ end
+ end
+ args = concat(a,",")
+ else
+ args = "a"
+ end
+ command = args
+ for i=1,#actions do
+ command = f_action_f(i,command)
+ f[#f+1] = f_action_s(i,i)
+ end
+ command = f_command(f,args,command)
+ command = load(command)
+ if command then
+ if resetter then
+ local cmd = command()
+ command = function()
+ commands[name] = dummy
+ resetter()
+ cmd()
+ end
+ else
+ command = command()
+ end
+ end
+ tokens._action = nil
+ end
+ if commands[name] and not specification.overload then
+ report("warning: 'commands.%s' is redefined",name)
+ end
+ commands[name] = command
+ -- return scanner, command
+end
+
+-- it's convenient to have copies here:
+
+interfaces.setmacro = tokens.setters.macro
+interfaces.setcount = tokens.setters.count
+interfaces.setdimen = tokens.setters.dimen
+
+interfaces.strings = table.setmetatableindex(function(t,k)
+ local v = { }
+ for i=1,k do
+ v[i] = "string"
+ end
+ t[k] = v
+ return v
+end)
diff --git a/tex/context/base/cldf-ver.lua b/tex/context/base/cldf-ver.lua
index b48fd253a..66432eb1c 100644
--- a/tex/context/base/cldf-ver.lua
+++ b/tex/context/base/cldf-ver.lua
@@ -56,16 +56,18 @@ function context.tocontext(first,...)
end
end
-function context.tobuffer(name,str)
- context.startbuffer { name }
- context.pushcatcodes("verbatim")
- local lines = (type(str) == "string" and find(str,"[\n\r]") and splitlines(str)) or str
- for i=1,#lines do
- context(lines[i] .. " ")
- end
- context.stopbuffer()
- context.popcatcodes()
-end
+-- function context.tobuffer(name,str)
+-- context.startbuffer { name }
+-- context.pushcatcodes("verbatim")
+-- local lines = (type(str) == "string" and find(str,"[\n\r]") and splitlines(str)) or str
+-- for i=1,#lines do
+-- context(lines[i] .. " ")
+-- end
+-- context.stopbuffer()
+-- context.popcatcodes()
+-- end
+
+context.tobuffer = buffers.assign -- (name,str,catcodes)
function context.tolines(str)
local lines = type(str) == "string" and splitlines(str) or str
diff --git a/tex/context/base/colo-ext.mkiv b/tex/context/base/colo-ext.mkiv
index 8878da485..c076fda9e 100644
--- a/tex/context/base/colo-ext.mkiv
+++ b/tex/context/base/colo-ext.mkiv
@@ -29,13 +29,13 @@
%D
%D will negate the colors in box zero.
-\unexpanded\def\negatecolorbox#1%
- {\setbox#1\hbox
- {\startnegative % might change
- % \startcolor[\s!white]\vrule\s!height\ht#1\s!depth\dp#1\s!width\wd#1\stopcolor
- \blackrule[\c!color=\s!white,\c!height=\ht#1,\c!depth=\dp#1,\c!width=\wd#1]%
- \hskip-\wd#1%
- \box#1%
+\unexpanded\def\negatecolorbox#1% or just set attr of #1
+ {\setbox#1\hbox to \wd#1%
+ {\scratchdimen\wd#1\relax
+ \startnegative % might change
+ \blackrule[\c!color=\s!white,\c!height=\ht#1,\c!depth=\dp#1,\c!width=\scratchdimen]%
+ \hskip-\scratchdimen
+ \leaders\box#1\hfill% this triggers application to the box .. a real dirty hack!
\stopnegative}}
%D There are in principle two ways to handle overprint: bound to colors
@@ -47,10 +47,10 @@
\installcorenamespace{colorintent}
\unexpanded\def\registercolorintent#1#2%
- {\setevalue{\??colorintent#1}{\attribute\colorintentattribute\ctxcommand{registercolorintent('#2')} }}
+ {\setevalue{\??colorintent#1}{\attribute\colorintentattribute\clf_registercolorintent{#2}}}
\unexpanded\def\colo_intents_set
- {\ctxcommand{enablecolorintents()}%
+ {\clf_enablecolorintents
\unexpanded\gdef\colo_intents_set##1{\csname\??colorintent##1\endcsname}%
\colo_intents_set}
diff --git a/tex/context/base/colo-grp.mkiv b/tex/context/base/colo-grp.mkiv
index 11b759062..e296bbc33 100644
--- a/tex/context/base/colo-grp.mkiv
+++ b/tex/context/base/colo-grp.mkiv
@@ -65,7 +65,7 @@
\processcommalist[#3]{\colo_groups_define_entry{#1}{#2}}}
\def\colo_groups_define_checked[#1][#2][#3]%
- {\doifinstringelse{:}{#2}
+ {\doifelseinstring{:}{#2}
{\colo_groups_define_normal[#1][\v!rgb][#2]}
{\doloop % inherited
{\ifcsname\??colorgroup#2:\recurselevel\endcsname
diff --git a/tex/context/base/colo-imp-rgb.mkiv b/tex/context/base/colo-imp-rgb.mkiv
index 9bc6befba..d7b691fcc 100644
--- a/tex/context/base/colo-imp-rgb.mkiv
+++ b/tex/context/base/colo-imp-rgb.mkiv
@@ -53,8 +53,8 @@
\definecolor [darkmagenta] [r=.4, g=0, b=.4]
\definecolor [darkyellow] [r=.4, g=.4, b=0]
-\definecolor [darkgray] [s=.60]
-\definecolor [middlegray] [s=.725]
+\definecolor [darkgray] [s=.40]
+\definecolor [middlegray] [s=.625]
\definecolor [lightgray] [s=.85]
%D These colors are mapped to interface dependant colornames.
@@ -268,6 +268,8 @@
%D Bonus (needed for FO test):
-\definecolor [orange] [r=1,g=.5]
+\definecolor [orange] [r=1, g=.5]
+\definecolor [middleorange] [r=.6,g=.3]
+\definecolor [darkorange] [r=.4,g=.2]
\endinput
diff --git a/tex/context/base/colo-ini.lua b/tex/context/base/colo-ini.lua
index 535ee71b8..81adfa680 100644
--- a/tex/context/base/colo-ini.lua
+++ b/tex/context/base/colo-ini.lua
@@ -14,11 +14,18 @@ local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local formatters = string.formatters
local trace_define = false trackers.register("colors.define",function(v) trace_define = v end)
+local trace_pgf = false trackers.register("colors.pgf", function(v) trace_pgf = v end)
local report_colors = logs.reporter("colors","defining")
+local report_pgf = logs.reporter("colors","pgf")
-local attributes, backends, storage = attributes, backends, storage
-local context, commands = context, commands
+local attributes = attributes
+local backends = backends
+local storage = storage
+local context = context
+local commands = commands
+
+local implement = interfaces.implement
local settings_to_hash_strict = utilities.parsers.settings_to_hash_strict
@@ -26,8 +33,8 @@ local colors = attributes.colors
local transparencies = attributes.transparencies
local colorintents = attributes.colorintents
local registrations = backends.registrations
-local settexattribute = tex.setattribute
-local gettexattribute = tex.getattribute
+local texsetattribute = tex.setattribute
+local texgetattribute = tex.getattribute
local a_color = attributes.private('color')
local a_transparency = attributes.private('transparency')
@@ -48,7 +55,7 @@ storage.register("attributes/colors/sets",colorsets,"attributes.colors.sets")
local stack = { }
-function colors.pushset(name)
+local function pushset(name)
insert(stack,colorset)
colorset = colorsets[name]
if not colorset then
@@ -57,32 +64,49 @@ function colors.pushset(name)
end
end
-function colors.popset(name)
+local function popset()
colorset = remove(stack)
end
-function colors.setlist(name)
+local function setlist(name)
return table.sortedkeys(name and name ~= "" and colorsets[name] or colorsets.default or {})
end
+colors.pushset = pushset
+colors.popset = popset
+colors.setlist = setlist
+
+local context_colordefagc = context.colordefagc
+local context_colordefagt = context.colordefagt
+local context_colordefalc = context.colordefalc
+local context_colordefalt = context.colordefalt
+local context_colordeffgc = context.colordeffgc
+local context_colordeffgt = context.colordeffgt
+local context_colordefflc = context.colordefflc
+local context_colordefflt = context.colordefflt
+local context_colordefrgc = context.colordefrgc
+local context_colordefrgt = context.colordefrgt
+local context_colordefrlc = context.colordefrlc
+local context_colordefrlt = context.colordefrlt
+
local function definecolor(name, ca, global)
if ca and ca > 0 then
if global then
if trace_define then
report_colors("define global color %a with attribute %a",name,ca)
end
- context.colordefagc(name,ca)
+ context_colordefagc(name,ca)
else
if trace_define then
report_colors("define local color %a with attribute %a",name,ca)
end
- context.colordefalc(name,ca)
+ context_colordefalc(name,ca)
end
else
if global then
- context.colordefrgc(name)
+ context_colordefrgc(name)
else
- context.colordefrlc(name)
+ context_colordefrlc(name)
end
end
colorset[name] = true-- maybe we can store more
@@ -94,18 +118,18 @@ local function inheritcolor(name, ca, global)
if trace_define then
report_colors("inherit global color %a with attribute %a",name,ca)
end
- context.colordeffgc(name,ca) -- some day we will set the macro directly
+ context_colordeffgc(name,ca) -- some day we will set the macro directly
else
if trace_define then
report_colors("inherit local color %a with attribute %a",name,ca)
end
- context.colordefflc(name,ca)
+ context_colordefflc(name,ca)
end
else
if global then
- context.colordefrgc(name)
+ context_colordefrgc(name)
else
- context.colordefrlc(name)
+ context_colordefrlc(name)
end
end
colorset[name] = true-- maybe we can store more
@@ -117,18 +141,18 @@ local function definetransparent(name, ta, global)
if trace_define then
report_colors("define global transparency %a with attribute %a",name,ta)
end
- context.colordefagt(name,ta)
+ context_colordefagt(name,ta)
else
if trace_define then
report_colors("define local transparency %a with attribute %a",name,ta)
end
- context.colordefalt(name,ta)
+ context_colordefalt(name,ta)
end
else
if global then
- context.colordefrgt(name)
+ context_colordefrgt(name)
else
- context.colordefrlt(name)
+ context_colordefrlt(name)
end
end
end
@@ -139,18 +163,18 @@ local function inherittransparent(name, ta, global)
if trace_define then
report_colors("inherit global transparency %a with attribute %a",name,ta)
end
- context.colordeffgt(name,ta)
+ context_colordeffgt(name,ta)
else
if trace_define then
report_colors("inherit local transparency %a with attribute %a",name,ta)
end
- context.colordefflt(name,ta)
+ context_colordefflt(name,ta)
end
else
if global then
- context.colordefrgt(name)
+ context_colordefrgt(name)
else
- context.colordefrlt(name)
+ context_colordefrlt(name)
end
end
end
@@ -229,18 +253,22 @@ colors.forcedmodel = forcedmodel
colors.couple = true
-function colors.definetransparency(name,n)
+local function definetransparency(name,n)
transparent[name] = n
end
+colors.definetransparency = definetransparency
+
local registered = { }
-local function do_registerspotcolor(parent,name,parentnumber,e,f,d,p)
+local function do_registerspotcolor(parent,parentnumber,e,f,d,p)
if not registered[parent] then
local v = colorvalues[parentnumber]
if v then
local model = colors.default -- else problems with shading etc
- if model == 1 then model = v[1] end
+ if model == 1 then
+ model = v[1]
+ end
if e and e ~= "" then
registrations.spotcolorname(parent,e) -- before registration of the color
end
@@ -256,23 +284,25 @@ local function do_registerspotcolor(parent,name,parentnumber,e,f,d,p)
end
end
-local function do_registermultitonecolor(parent,name,parentnumber,e,f,d,p) -- same as spot but different template
- if not registered[parent] then
- local v = colorvalues[parentnumber]
- if v then
- local model = colors.default -- else problems with shading etc
- if model == 1 then model = v[1] end
- if model == 2 then
- registrations.grayindexcolor(parent,f,d,p,v[2])
- elseif model == 3 then
- registrations.rgbindexcolor (parent,f,d,p,v[3],v[4],v[5])
- elseif model == 4 then
- registrations.cmykindexcolor(parent,f,d,p,v[6],v[7],v[8],v[9])
- end
- end
- registered[parent] = true
- end
-end
+-- local function do_registermultitonecolor(parent,name,parentnumber,e,f,d,p) -- same as spot but different template
+-- if not registered[parent] then
+-- local v = colorvalues[parentnumber]
+-- if v then
+-- local model = colors.default -- else problems with shading etc
+-- if model == 1 then
+-- model = v[1]
+-- end
+-- if model == 2 then
+-- registrations.grayindexcolor(parent,f,d,p,v[2])
+-- elseif model == 3 then
+-- registrations.rgbindexcolor (parent,f,d,p,v[3],v[4],v[5])
+-- elseif model == 4 then
+-- registrations.cmykindexcolor(parent,f,d,p,v[6],v[7],v[8],v[9])
+-- end
+-- end
+-- registered[parent] = true
+-- end
+-- end
function colors.definesimplegray(name,s)
return register_color(name,'gray',s) -- we still need to get rid of 'color'
@@ -310,17 +340,19 @@ directives.register("colors.pgf",function(v)
end
end)
-function colors.defineprocesscolor(name,str,global,freeze) -- still inconsistent color vs transparent
+local defineintermediatecolor
+
+local function defineprocesscolor(name,str,global,freeze) -- still inconsistent color vs transparent
local what, one, two, three = lpegmatch(specialcolor,str)
if what == "H" then
-- for old times sake (if we need to feed from xml or so)
definecolor(name, register_color(name,'rgb',one,two,three),global)
elseif what == "M" then
-- intermediate
- return colors.defineintermediatecolor(name,one,l_color[two],l_color[three],l_transparency[two],l_transparency[three],"",global,freeze)
+ return defineintermediatecolor(name,one,l_color[two],l_color[three],l_transparency[two],l_transparency[three],"",global,freeze)
elseif what == "P" then
-- pgf for tikz
- return colors.defineintermediatecolor(name,two,l_color[one],l_color[three],l_transparency[one],l_transparency[three],"",global,freeze)
+ return defineintermediatecolor(name,two,l_color[one],l_color[three],l_transparency[one],l_transparency[three],"",global,freeze)
else
local settings = settings_to_hash_strict(str)
if settings then
@@ -376,23 +408,25 @@ function colors.defineprocesscolor(name,str,global,freeze) -- still inconsistent
colorset[name] = true-- maybe we can store more
end
-function colors.isblack(ca) -- maybe commands
+local function isblack(ca) -- maybe commands
local cv = ca > 0 and colorvalues[ca]
return (cv and cv[2] == 0) or false
end
-function colors.definespotcolor(name,parent,str,global)
- if parent == "" or find(parent,"=") then
- colors.registerspotcolor(name, parent)
+colors.isblack = isblack
+
+local function definespotcolor(name,parent,str,global)
+ if parent == "" or find(parent,"=",1,true) then
+ colors.registerspotcolor(name, parent) -- does that work? no attr
elseif name ~= parent then
local cp = attributes_list[a_color][parent]
if cp then
local t = settings_to_hash_strict(str)
if t then
local tp = tonumber(t.p) or 1
- do_registerspotcolor(parent, name, cp, t.e, 1, "", tp) -- p not really needed, only diagnostics
+ do_registerspotcolor(parent,cp,t.e,1,"",tp) -- p not really needed, only diagnostics
if name and name ~= "" then
- definecolor(name, register_color(name,'spot', parent, 1, "", tp), true)
+ definecolor(name,register_color(name,'spot',parent,1,"",tp),true)
local ta, tt = t.a, t.t
if ta and tt then
definetransparent(name, transparencies.register(name,transparent[ta] or tonumber(ta) or 1,tonumber(tt) or 1), global)
@@ -415,13 +449,56 @@ function colors.registerspotcolor(parent, str)
local t = settings_to_hash_strict(str)
e = (t and t.e) or ""
end
- do_registerspotcolor(parent, "dummy", cp, e, 1, "", 1) -- p not really needed, only diagnostics
+ do_registerspotcolor(parent, cp, e, 1, "", 1) -- p not really needed, only diagnostics
+ end
+end
+
+local function f(i,colors,fraction)
+ local otf = 0
+ for c=1,#colors do
+ otf = otf + (tonumber(fraction[c]) or 1) * colors[c][i]
+ end
+ if otf > 1 then
+ otf = 1
+ end
+ return otf
+end
+
+local function definemixcolor(makename,name,fractions,cs,global,freeze)
+ local values = { }
+ for i=1,#cs do -- do fraction in here
+ local v = colorvalues[cs[i]]
+ if not v then
+ return
+ end
+ values[i] = v
+ end
+ if #values > 0 then
+ csone = values[1][1]
+ local ca
+ if csone == 2 then
+ ca = register_color(name,'gray',f(2,values,fractions))
+ elseif csone == 3 then
+ ca = register_color(name,'rgb', f(3,values,fractions),
+ f(4,values,fractions),
+ f(5,values,fractions))
+ elseif csone == 4 then
+ ca = register_color(name,'cmyk',f(6,values,fractions),
+ f(7,values,fractions),
+ f(8,values,fractions),
+ f(9,values,fractions))
+ else
+ ca = register_color(name,'gray',f(2,values,fractions))
+ end
+ definecolor(name,ca,global,freeze)
+ else
+ report_colors("invalid specification of components for color %a",makename)
end
end
-function colors.definemultitonecolor(name,multispec,colorspec,selfspec)
+local function definemultitonecolor(name,multispec,colorspec,selfspec)
local dd, pp, nn, max = { }, { }, { }, 0
- for k,v in gmatch(multispec,"(%a+)=([^%,]*)") do -- use settings_to_array
+ for k,v in gmatch(multispec,"([^=,]+)=([^%,]*)") do -- use settings_to_array
max = max + 1
dd[max] = k
pp[max] = v
@@ -432,17 +509,17 @@ function colors.definemultitonecolor(name,multispec,colorspec,selfspec)
local parent = gsub(lower(nn),"[^%d%a%.]+","_")
if not colorspec or colorspec == "" then
local cc = { } for i=1,max do cc[i] = l_color[dd[i]] end
- colors.definemixcolor(parent,pp,cc,global,freeze) -- can become local
+ definemixcolor(name,parent,pp,cc,global,freeze) -- can become local
else
if selfspec ~= "" then
colorspec = colorspec .. "," .. selfspec
end
- colors.defineprocesscolor(parent,colorspec,true,true)
+ defineprocesscolor(parent,colorspec,true,true)
end
local cp = attributes_list[a_color][parent]
dd, pp = concat(dd,','), concat(pp,',')
if cp then
- do_registerspotcolor(parent, name, cp, "", max, dd, pp)
+ do_registerspotcolor(parent, cp, "", max, dd, pp)
definecolor(name, register_color(name, 'spot', parent, max, dd, pp), true)
local t = settings_to_hash_strict(selfspec)
if t and t.a and t.t then
@@ -456,8 +533,13 @@ function colors.definemultitonecolor(name,multispec,colorspec,selfspec)
colorset[name] = true-- maybe we can store more
end
--- will move to mlib-col as colors in m,p are somewhat messy due to the fact
--- that we cannot cast
+colors.defineprocesscolor = defineprocesscolor
+colors.definespotcolor = definespotcolor
+colors.definemultitonecolor = definemultitonecolor
+
+-- will move to mlib-col as colors in mp are somewhat messy due to the fact
+-- that we cannot cast .. so we really need to use (s,s,s) for gray in order
+-- to be able to map onto 'color'
local function mpcolor(model,ca,ta,default)
local cv = colorvalues[ca]
@@ -475,7 +557,8 @@ local function mpcolor(model,ca,ta,default)
elseif model == 4 then
return formatters["transparent(%s,%s,cmyk(%s,%s,%s,%s))"](tv[1],tv[2],cv[6],cv[7],cv[8],cv[9])
elseif model == 5 then
- return formatters['transparent(%s,%s,multitonecolor("%s",%s,"%s","%s"))'](tv[1],tv[2],cv[10],cv[11],cv[12],cv[13])
+ -- return formatters['transparent(%s,%s,multitonecolor("%s",%s,"%s","%s"))'](tv[1],tv[2],cv[10],cv[11],cv[12],cv[13])
+ return formatters['transparent(%s,%s,namedcolor("%s"))'](tv[1],tv[2],cv[10])
else -- see ** in meta-ini.mkiv: return formatters["transparent(%s,%s,(%s))"](tv[1],tv[2],cv[2])
return formatters["transparent(%s,%s,(%s,%s,%s))"](tv[1],tv[2],cv[3],cv[4],cv[5])
end
@@ -487,7 +570,8 @@ local function mpcolor(model,ca,ta,default)
elseif model == 4 then
return formatters["cmyk(%s,%s,%s,%s)"](cv[6],cv[7],cv[8],cv[9])
elseif model == 5 then
- return formatters['multitonecolor("%s",%s,"%s","%s")'](cv[10],cv[11],cv[12],cv[13])
+ -- return formatters['multitonecolor("%s",%s,"%s","%s")'](cv[10],cv[11],cv[12],cv[13])
+ return formatters['namedcolor("%s")'](cv[10])
else -- see ** in meta-ini.mkiv: return formatters["%s"]((cv[2]))
return formatters["(%s,%s,%s)"](cv[3],cv[4],cv[5])
end
@@ -499,7 +583,7 @@ local function mpcolor(model,ca,ta,default)
end
local function mpnamedcolor(name)
- return mpcolor(gettexattribute(a_colorspace),l_color[name] or l_color.black)
+ return mpcolor(texgetattribute(a_colorspace),l_color[name] or l_color.black)
end
local function mpoptions(model,ca,ta,default) -- will move to mlib-col
@@ -669,7 +753,7 @@ local function complement(one,fraction,i)
return otf
end
-function colors.defineintermediatecolor(name,fraction,c_one,c_two,a_one,a_two,specs,global,freeze)
+defineintermediatecolor = function(name,fraction,c_one,c_two,a_one,a_two,specs,global,freeze)
fraction = tonumber(fraction) or 1
local one, two = colorvalues[c_one], colorvalues[c_two]
if one then
@@ -725,48 +809,17 @@ function colors.defineintermediatecolor(name,fraction,c_one,c_two,a_one,a_two,sp
end
end
-local function f(i,colors,fraction)
- local otf = 0
- for c=1,#colors do
- otf = otf + (tonumber(fraction[c]) or 1) * colors[c][i]
- end
- if otf > 1 then
- otf = 1
- end
- return otf
-end
-
-function colors.definemixcolor(name,fractions,cs,global,freeze)
- local values = { }
- for i=1,#cs do -- do fraction in here
- local v = colorvalues[cs[i]]
- if not v then
- return
- end
- values[i] = v
- end
- local csone = values[1][1]
- local ca
- if csone == 2 then
- ca = register_color(name,'gray',f(2,values,fractions))
- elseif csone == 3 then
- ca = register_color(name,'rgb', f(3,values,fractions),
- f(4,values,fractions),
- f(5,values,fractions))
- elseif csone == 4 then
- ca = register_color(name,'cmyk',f(6,values,fractions),
- f(7,values,fractions),
- f(8,values,fractions),
- f(9,values,fractions))
- else
- ca = register_color(name,'gray',f(2,values,fractions))
- end
- definecolor(name,ca,global,freeze)
-end
+colors.defineintermediatecolor = defineintermediatecolor
-- for the moment downward compatible
-local patterns = { "colo-imp-%s.mkiv", "colo-imp-%s.tex", "colo-%s.mkiv", "colo-%s.tex" }
+local patterns = {
+ "colo-imp-%s.mkiv",
+ "colo-imp-%s.tex",
+ -- obsolete:
+ "colo-%s.mkiv",
+ "colo-%s.tex"
+}
local function action(name,foundname)
-- could be one command
@@ -783,7 +836,7 @@ local function failure(name)
report_colors("unknown library %a",name)
end
-function colors.usecolors(name)
+local function usecolors(name)
commands.uselibrary {
category = "color definition",
name = name,
@@ -794,52 +847,121 @@ function colors.usecolors(name)
}
end
--- interface (todo: use locals)
+colors.usecolors = usecolors
-local setcolormodel = colors.setmodel
+-- backend magic
-function commands.setcolormodel(model,weight)
- settexattribute(a_colorspace,setcolormodel(model,weight))
-end
+local currentpagecolormodel
--- function commands.setrastercolor(name,s)
--- settexattribute(a_color,colors.definesimplegray(name,s))
--- end
+function colors.setpagecolormodel(model)
+ currentpagecolormodel = model
+end
-function commands.registermaintextcolor(a)
- colors.main = a
+function colors.getpagecolormodel()
+ return currentpagecolormodel
end
-commands.defineprocesscolor = colors.defineprocesscolor
-commands.definespotcolor = colors.definespotcolor
-commands.definemultitonecolor = colors.definemultitonecolor
-commands.definetransparency = colors.definetransparency
-commands.defineintermediatecolor = colors.defineintermediatecolor
+-- interface
-function commands.spotcolorname (a) context(spotcolorname (a)) end
-function commands.spotcolorparent (a) context(spotcolorparent (a)) end
-function commands.spotcolorvalue (a) context(spotcolorvalue (a)) end
-function commands.colorcomponents (a,s) context(colorcomponents (a,s)) end
-function commands.transparencycomponents(a,s) context(transparencycomponents(a,s)) end
-function commands.processcolorcomponents(a,s) context(processcolorcomponents(a,s)) end
-function commands.formatcolor (...) context(formatcolor (...)) end
-function commands.formatgray (...) context(formatgray (...)) end
+local setcolormodel = colors.setmodel
-function commands.mpcolor(model,ca,ta,default)
- context(mpcolor(model,ca,ta,default))
-end
+implement {
+ name = "setcolormodel",
+ arguments = { "string", "boolean" },
+ actions = function(model,weight)
+ texsetattribute(a_colorspace,setcolormodel(model,weight))
+ end
+}
-function commands.mpoptions(model,ca,ta,default)
- context(mpoptions(model,ca,ta,default))
-end
+implement {
+ name = "setpagecolormodel",
+ actions = colors.setpagecolormodel,
+ arguments = { "string" },
+}
-function commands.doifblackelse(a)
- commands.doifelse(colors.isblack(a))
-end
+implement {
+ name = "defineprocesscolorlocal",
+ actions = defineprocesscolor,
+ arguments = { "string", "string", false, "boolean" }
+}
-function commands.doifdrawingblackelse()
- commands.doifelse(colors.isblack(gettexattribute(a_color)))
-end
+implement {
+ name = "defineprocesscolorglobal",
+ actions = defineprocesscolor,
+ arguments = { "string", "string", true, "boolean" }
+}
+
+implement {
+ name = "defineprocesscolordummy",
+ actions = defineprocesscolor,
+ arguments = { "'d_u_m_m_y'", "string", false, false }
+}
+
+implement {
+ name = "definespotcolorglobal",
+ actions = definespotcolor,
+ arguments = { "string", "string", "string", true }
+}
+
+implement {
+ name = "definemultitonecolorglobal",
+ actions = definemultitonecolor,
+ arguments = { "string", "string", "string", "string", true }
+}
+
+implement {
+ name = "registermaintextcolor",
+ actions = function(main)
+ colors.main = main
+ end,
+ arguments = { "integer" }
+}
+
+implement {
+ name = "definetransparency",
+ actions = definetransparency,
+ arguments = { "string", "integer" }
+}
+
+implement {
+ name = "defineintermediatecolor",
+ actions = defineintermediatecolor,
+ arguments = { "string", "string", "integer", "integer", "integer", "integer", "string", false, "boolean" }
+}
+
+implement { name = "spotcolorname", actions = { spotcolorname, context }, arguments = "integer" }
+implement { name = "spotcolorparent", actions = { spotcolorparent, context }, arguments = "integer" }
+implement { name = "spotcolorvalue", actions = { spotcolorvalue, context }, arguments = "integer" }
+implement { name = "colorcomponents", actions = { colorcomponents, context }, arguments = "integer" }
+implement { name = "transparencycomponents", actions = { transparencycomponents, context }, arguments = "integer" }
+implement { name = "processcolorcomponents", actions = { processcolorcomponents, context }, arguments = "integer" }
+implement { name = "formatcolor", actions = { formatcolor, context }, arguments = { "integer", "string" } }
+implement { name = "formatgray", actions = { formatgray, context }, arguments = { "integer", "string" } }
+
+implement {
+ name = "mpcolor",
+ actions = { mpcolor, context },
+ arguments = { "integer", "integer", "integer" }
+}
+
+implement {
+ name = "mpoptions",
+ actions = { mpoptions, context },
+ arguments = { "integer", "integer", "integer" }
+}
+
+local ctx_doifelse = commands.doifelse
+
+implement {
+ name = "doifelsedrawingblack",
+ actions = function() ctx_doifelse(isblack(texgetattribute(a_color))) end
+}
+
+implement {
+ name = "doifelseblack",
+ actions = { isblack, ctx_doifelse },
+ arguments = "integer"
+}
-- function commands.withcolorsinset(name,command)
-- local set
@@ -859,51 +981,50 @@ end
-- end
-- end
-commands.startcolorset = colors.pushset
-commands.stopcolorset = colors.popset
-
-commands.usecolors = colors.usecolors
+implement { name = "startcolorset", actions = pushset, arguments = "string" }
+implement { name = "stopcolorset", actions = popset }
+implement { name = "usecolors", actions = usecolors, arguments = "string" }
-- bonus
-function commands.pgfxcolorspec(ca) -- {}{}{colorspace}{list}
- -- local cv = attributes.colors.values[ca]
- local cv = colorvalues[ca]
- if cv then
- local model = cv[1]
- if model == 2 then
- context("{gray}{%1.3f}",cv[2])
- elseif model == 3 then
- context("{rgb}{%1.3f,%1.3f,%1.3f}",cv[3],cv[4],cv[5])
- elseif model == 4 then
- context("{cmyk}{%1.3f,%1.3f,%1.3f,%1.3f}",cv[6],cv[7],cv[8],cv[9])
+do
+
+ local function pgfxcolorspec(model,ca) -- {}{}{colorspace}{list}
+ -- local cv = attributes.colors.values[ca]
+ local cv = colorvalues[ca]
+ local str
+ if cv then
+ if model and model ~= 0 then
+ model = model
+ else
+ model = forcedmodel(texgetattribute(a_colorspace))
+ if model == 1 then
+ model = cv[1]
+ end
+ end
+ if model == 3 then
+ str = formatters["{rgb}{%1.3f,%1.3f,%1.3f}"](cv[3],cv[4],cv[5])
+ elseif model == 4 then
+ str = formatters["{cmyk}{%1.3f,%1.3f,%1.3f,%1.3f}"](cv[6],cv[7],cv[8],cv[9])
+ else -- there is no real gray
+ str = formatters["{rgb}{%1.3f,%1.3f,%1.3f}"](cv[2],cv[2],cv[2])
+ end
else
- context("{gray}{%1.3f}",cv[2])
+ str = "{rgb}{0,0,0}"
end
- else
- context("{gray}{0}")
+ if trace_pgf then
+ report_pgf("model %a, string %a",model,str)
+ end
+ return str
end
-end
--- function commands.pgfregistercolor(name,attribute)
--- local cv = colorvalues[ca]
--- context.pushcatcodes('prt')
--- if cv then
--- local model = forcedmodel(cv[1])
--- if model == 2 then
--- context["pgfutil@definecolor"]("{%s}{gray}{%1.3f}",name,cv[2])
--- elseif model == 3 then
--- context["pgfutil@definecolor"]("{%s}{rgb}{%1.3f,%1.3f,%1.3f}",name,cv[3],cv[4],cv[5])
--- elseif model == 4 then
--- context["pgfutil@definecolor"]("{%s}{cmyk}{%1.3f,%1.3f,%1.3f,%1.3f}",name,cv[6],cv[7],cv[8],cv[9])
--- else
--- context["pgfutil@definecolor"]("{%s}{gray}{0}",name)
--- end
--- else
--- context["pgfutil@definecolor"]("{%s}{gray}{0}",name)
--- end
--- context.popcatcodes()
--- end
+ implement {
+ name = "pgfxcolorspec",
+ actions = { pgfxcolorspec, context },
+ arguments = { "integer", "integer" }
+ }
+
+end
-- handy
diff --git a/tex/context/base/colo-ini.mkiv b/tex/context/base/colo-ini.mkiv
index 6aa51b218..1eb3d8d7e 100644
--- a/tex/context/base/colo-ini.mkiv
+++ b/tex/context/base/colo-ini.mkiv
@@ -203,9 +203,9 @@
%D \usecolors[dem]
%D \stoptyping
-\unexpanded\def\startcolorset[#1]{\ctxcommand{startcolorset("#1")}}
-\unexpanded\def\stopcolorset {\ctxcommand{stopcolorset()}}
-\unexpanded\def\usecolors [#1]{\ctxcommand{usecolors("#1")}}
+\unexpanded\def\startcolorset[#1]{\clf_startcolorset{#1}}
+\unexpanded\def\stopcolorset {\clf_stopcolorset}
+\unexpanded\def\usecolors [#1]{\clf_usecolors{#1}}
\let\setupcolor\usecolors
@@ -255,14 +255,14 @@
\colo_helpers_set_current_model
\ifproductionrun
\edef\p_pagecolormodel{\directcolorsparameter\c!pagecolormodel}%
- \ctxcommand{synchronizecolormodel("\ifx\p_pagecolormodel\v!auto \currentcolormodel \else \p_pagecolormodel \fi")}%
+ \clf_setpagecolormodel{\ifx\p_pagecolormodel\v!auto\currentcolormodel\else\p_pagecolormodel\fi}%
\fi
\to \everysetupcolors
\appendtoks
\setupcolors[\c!state=\v!start]%
- \ctxcommand{enablecolor()}% % this can as well happen when
- \ctxcommand{enabletransparency()}% % the handler is defined in lua
+ \clf_enablecolor % this can as well happen when
+ \clf_enabletransparency % the handler is defined in lua
\let\colo_helpers_show_message\showmessage
\to \everyjob
@@ -367,7 +367,7 @@
{\dodoubleargument\colo_palets_define}
\unexpanded\def\colo_palets_define[#1][#2]% todo
- {\doifassignmentelse{#2}
+ {\doifelseassignment{#2}
{%\colo_helpers_show_message\m!colors6{#1}%
\let\m_colo_palets_tmp\empty
\setevalue{\??colorpaletspecification#1}{#2}%
@@ -382,7 +382,7 @@
% {\colo_palets_define_two{#1}[#2]}%
\def\colo_palets_define_one#1#2% get rid of { }
- {\doifassignmentelse{#2} % catch empty entries
+ {\doifelseassignment{#2} % catch empty entries
{\colo_palets_define_two{#1}[#2]}
{\colo_palets_define_three{#1}{#2}}}
@@ -563,7 +563,7 @@
\def\colo_helpers_set_model#1% direct
{\edef\currentcolormodel{#1}%
- \ctxcommand{setcolormodel('\currentcolormodel',\v_colo_weight_state)}} % sets attribute at lua end
+ \clf_setcolormodel{\currentcolormodel}\v_colo_weight_state\relax} % sets attribute at lua end
\colo_helpers_set_model\s!all
@@ -688,24 +688,42 @@
\attribute\colorattribute\attributeunsetvalue
\attribute\transparencyattribute\attributeunsetvalue}
+% todo: check if color is overloading a non-color command
+
+\newcount\c_colo_protection
+
+\unexpanded\def\startprotectedcolors
+ {\advance\c_colo_protection\plusone}
+
+\unexpanded\def\stopprotectedcolors
+ {\advance\c_colo_protection\minusone}
+
\def\colo_basics_define[#1][#2]%
- {\ctxcommand{defineprocesscolor("#1","#2",false,\v_colo_freeze_state)}%
- \unexpanded\setvalue{#1}{\colo_helpers_activate{#1}}}
+ {\clf_defineprocesscolorlocal{#1}{#2}\v_colo_freeze_state\relax
+ \ifcase\c_colo_protection
+ \unexpanded\setvalue{#1}{\colo_helpers_activate{#1}}%
+ \fi}
\def\colo_basics_define_global[#1][#2]%
- {\ctxcommand{defineprocesscolor("#1","#2",true,\v_colo_freeze_state)}%
- \unexpanded\setgvalue{#1}{\colo_helpers_activate{#1}}}
+ {\clf_defineprocesscolorglobal{#1}{#2}\v_colo_freeze_state\relax
+ \ifcase\c_colo_protection
+ \unexpanded\setgvalue{#1}{\colo_helpers_activate{#1}}%
+ \fi}
\def\colo_basics_define_named[#1][#2]% currently same as define
- {\ctxcommand{defineprocesscolor("#1","#2",false,\v_colo_freeze_state)}%
- \unexpanded\setvalue{#1}{\colo_helpers_activate{#1}}}
+ {\clf_defineprocesscolorlocal{#1}{#2}\v_colo_freeze_state\relax
+ \ifcase\c_colo_protection
+ \unexpanded\setvalue{#1}{\colo_helpers_activate{#1}}%
+ \fi}
\def\dodefinefastcolor[#1][#2]% still not fast but ok (might change)
- {\ctxcommand{defineprocesscolor("#1","#2",false,\v_colo_freeze_state)}%
- \unexpanded\setvalue{#1}{\colo_helpers_activate{#1}}}
+ {\clf_defineprocesscolorlocal{#1}{#2}\v_colo_freeze_state\relax
+ \ifcase\c_colo_protection
+ \unexpanded\setvalue{#1}{\colo_helpers_activate{#1}}%
+ \fi}
\def\colo_basics_defined_and_activated#1%
- {\ctxcommand{defineprocesscolor("\v_colo_dummy_name","#1",false,false)}%
+ {\clf_defineprocesscolordummy{#1}%
\colo_helpers_activate_dummy}
\def\colo_basics_define_process
@@ -716,12 +734,16 @@
\fi}
\def\colo_basics_define_process_yes[#1][#2][#3]%
- {\ctxcommand{defineprocesscolor("#1","\processcolorcomponents{#2},#3",false,\v_colo_freeze_state)}%
- \unexpanded\setvalue{#1}{\colo_helpers_activate{#1}}}
+ {\clf_defineprocesscolorlocal{#1}{\processcolorcomponents{#2},#3}\v_colo_freeze_state\relax
+ \ifcase\c_colo_protection
+ \unexpanded\setvalue{#1}{\colo_helpers_activate{#1}}%
+ \fi}
\def\colo_basics_define_process_nop[#1][#2][#3]%
- {\ctxcommand{defineprocesscolor("#1","#2",false,\v_colo_freeze_state)}%
- \unexpanded\setvalue{#1}{\colo_helpers_activate{#1}}}
+ {\clf_defineprocesscolorlocal{#1}{#2}\v_colo_freeze_state\relax
+ \ifcase\c_colo_protection
+ \unexpanded\setvalue{#1}{\colo_helpers_activate{#1}}%
+ \fi}
% Spotcolors used setxvalue but that messes up currentcolor
% and probably no global is needed either but they are global
@@ -729,17 +751,21 @@
% spot colors often are a document wide property
\def\colo_basics_define_spot[#1][#2][#3]%
- {\ctxcommand{definespotcolor("#1","#2","#3",true)}%
- \unexpanded\setgvalue{#1}{\colo_helpers_activate{#1}}}
+ {\clf_definespotcolorglobal{#1}{#2}{#3}%
+ \ifcase\c_colo_protection
+ \unexpanded\setgvalue{#1}{\colo_helpers_activate{#1}}%
+ \fi}
\def\colo_basics_define_multitone[#1][#2][#3][#4]%
- {\ctxcommand{definemultitonecolor("#1","#2","#3","#4",true)}%
- \unexpanded\setgvalue{#1}{\colo_helpers_activate{#1}}}
+ {\clf_definemultitonecolorglobal{#1}{#2}{#3}{#4}%
+ \ifcase\c_colo_protection
+ \unexpanded\setgvalue{#1}{\colo_helpers_activate{#1}}%
+ \fi}
%D Transparencies (only):
\def\colo_basics_define_transpancy[#1][#2]%
- {\ctxcommand{definetransparency("#1",#2)}}
+ {\clf_definetransparency{#1}#2\relax}
% A goodie that replaces the startMPcolor hackery
@@ -791,10 +817,15 @@
{\colo_basics_define_intermediate_indeed[#1][#2][#3]}
\def\colo_basics_define_intermediate_indeed[#1][#2,#3,#4][#5]%
- {\ctxcommand{defineintermediatecolor("#1","#2",
- \thecolorattribute{#3},\thecolorattribute{#4},
- \thetransparencyattribute{#3},\thetransparencyattribute{#4},
- "#5",false,\v_colo_freeze_state)}% not global
+ {\clf_defineintermediatecolor % not global
+ {#1}{#2}%
+ \thecolorattribute{#3} %
+ \thecolorattribute{#4} %
+ \thetransparencyattribute{#3} %
+ \thetransparencyattribute{#4} %
+ {#5}%
+ \v_colo_freeze_state
+ \relax
\unexpanded\setvalue{#1}{\colo_helpers_activate{#1}}}
%D Here is a more efficient helper for pgf:
@@ -833,8 +864,10 @@
%D
%D \stopmode
-\def\pgf@context@registercolor#1% bonus macro
- {\setevalue{\string\color@#1}{\noexpand\xcolor@{}{}\ctxcommand{pgfxcolorspec(\thecolorattribute{#1})}}}
+\def\pgf@context@registercolor#1{\setevalue{\string\color@#1}{\noexpand\xcolor@{}{}\clf_pgfxcolorspec\zerocount\thecolorattribute{#1}}}
+\def\pgf@context@registergray #1{\setevalue{\string\color@#1}{\noexpand\xcolor@{}{}\clf_pgfxcolorspec\plustwo \thecolorattribute{#1}}}
+\def\pgf@context@registerrgb #1{\setevalue{\string\color@#1}{\noexpand\xcolor@{}{}\clf_pgfxcolorspec\plusthree\thecolorattribute{#1}}}
+\def\pgf@context@registercmyk #1{\setevalue{\string\color@#1}{\noexpand\xcolor@{}{}\clf_pgfxcolorspec\plusfour \thecolorattribute{#1}}}
%D \starttyping
%D \ifdefined\pgf@context@registercolor
@@ -849,7 +882,7 @@
%D command. Later on we will explain the use of palets. We
%D define ourselves a color conditional first.
-\def\doifcolorelse#1%
+\def\doifelsecolor#1%
{\ifcsname\??colorattribute\currentcolorprefix#1\endcsname
\expandafter\firstoftwoarguments
\else\ifcsname\??colorattribute#1\endcsname
@@ -858,6 +891,8 @@
\doubleexpandafter\secondoftwoarguments
\fi\fi}
+\let\doifcolorelse\doifelsecolor
+
\def\doifcolor#1%
{\ifcsname\??colorattribute\currentcolorprefix#1\endcsname
\expandafter\firstofoneargument
@@ -916,7 +951,7 @@
{\let\maintextcolor\s!themaintextcolor
\definecolor[\maintextcolor][#1]% can be fast one
\colo_helpers_activate\maintextcolor
- \ctxcommand{registermaintextcolor(\thecolorattribute\maintextcolor)}}
+ \clf_registermaintextcolor\thecolorattribute\maintextcolor\relax}
\unexpanded\def\starttextcolor[#1]%
{\doifsomething{#1}
@@ -958,7 +993,7 @@
\to \everysetupcolors
\def\colo_palets_define_set#1#2#3%
- {\doifassignmentelse{#3}% \definepalet[test][xx={y=.4}]
+ {\doifelseassignment{#3}% \definepalet[test][xx={y=.4}]
{\definecolor[\??colorpalet#1:#2][#3]%
\colo_helpers_set_value{\??colorsetter #1:#2}{\colo_helpers_inherited_palet_ca{#1}{#2}}%
\colo_helpers_set_value{\??colorattribute #1:#2}{\colo_helpers_inherited_palet_cs{#1}{#2}}%
@@ -1029,27 +1064,43 @@
\let\colorformatseparator\space
-\def\MPcolor #1{\ctxcommand{mpcolor(\number\attribute\colormodelattribute,\number\colo_helpers_inherited_current_ca{#1},\number\colo_helpers_inherited_current_ta{#1})}}
-\def\MPoptions #1{\ctxcommand{mpoptions(\number\attribute\colormodelattribute,\number\colo_helpers_inherited_current_ca{#1},\number\colo_helpers_inherited_current_ta{#1})}}
+\def\MPcolor#1%
+ {\clf_mpcolor
+ \attribute\colormodelattribute
+ \colo_helpers_inherited_current_ca{#1} %
+ \colo_helpers_inherited_current_ta{#1} }
+
+
+\def\MPoptions#1%
+ {\clf_mpoptions
+ \attribute\colormodelattribute
+ \colo_helpers_inherited_current_ca{#1} %
+ \colo_helpers_inherited_current_ta{#1} }
+
\def\thecolorattribute #1{\number\csname\??colorattribute \ifcsname\??colorattribute \currentcolorprefix#1\endcsname\currentcolorprefix#1\else\ifcsname\??colorattribute #1\endcsname#1\fi\fi\endcsname}
\def\thetransparencyattribute#1{\number\csname\??transparencyattribute\ifcsname\??transparencyattribute\currentcolorprefix#1\endcsname\currentcolorprefix#1\else\ifcsname\??transparencyattribute#1\endcsname#1\fi\fi\endcsname}
-
\def\thecolormodelattribute {\the\attribute\colormodelattribute}
-\def\internalspotcolorname #1{\ctxcommand{spotcolorname(\thecolorattribute{#1})}}
-\def\internalspotcolorparent #1{\ctxcommand{spotcolorparent(\thecolorattribute{#1})}}
-\def\internalspotcolorsize #1{\ctxcommand{spotcolorvalue(\thecolorattribute{#1})}}
+\def\internalspotcolorname #1{\clf_spotcolorname \thecolorattribute{#1} }
+\def\internalspotcolorparent #1{\clf_spotcolorparent\thecolorattribute{#1} }
+\def\internalspotcolorsize #1{\clf_spotcolorvalue \thecolorattribute{#1} }
+
+\def\colorcomponents #1{\clf_colorcomponents \thecolorattribute {#1} }
+\def\transparencycomponents #1{\clf_transparencycomponents\thetransparencyattribute{#1} }
+\def\processcolorcomponents #1{\clf_processcolorcomponents\thecolorattribute {#1} }
+
+\def\colorvalue #1{\clf_formatcolor\thecolorattribute{#1}{\colorformatseparator}}
+\def\grayvalue #1{\clf_formatgray \thecolorattribute{#1}{\colorformatseparator}}
-\def\colorcomponents #1{\ctxcommand{colorcomponents(\thecolorattribute{#1})}}
-\def\transparencycomponents #1{\ctxcommand{transparencycomponents(\thetransparencyattribute{#1})}}
-\def\processcolorcomponents #1{\ctxcommand{processcolorcomponents(\thecolorattribute{#1},",")}}
+\def\doifelseblack #1{\clf_doifelseblack\thecolorattribute{#1} }
+\def\doifelsedrawingblack {\clf_doifelsedrawingblack}
-\def\colorvalue #1{\ctxcommand{formatcolor(\thecolorattribute{#1},"\colorformatseparator")}}
-\def\grayvalue #1{\ctxcommand{formatgray (\thecolorattribute{#1},"\colorformatseparator")}}
+\let\doifblackelse \doifelseblack
+\let\doifdrawingblackelse \doifelsedrawingblack
-\def\doifblackelse #1{\ctxcommand{doifblackelse(\thecolorattribute{#1})}}
-\def\doifdrawingblackelse {\ctxcommand{doifdrawingblackelse()}}
+\let\doifblackelse \doifelseblack
+\let\doifdrawingblackelse\doifelsedrawingblack
%D \macros
%D {forcecolorhack}
@@ -1071,7 +1122,7 @@
% \normal added else fails in metafun manual (leaders do a hard scan)
-\unexpanded\def\forcecolorhack{\leaders\hrule\hskip\zeropoint}
+\unexpanded\def\forcecolorhack{\leaders\hrule\hskip\zeropoint\relax} % relax is needed !
%D We default to the colors defined in \type {colo-imp-rgb} and
%D support both \RGB\ and \CMYK\ output. Transparencies are defined
diff --git a/tex/context/base/colo-run.mkiv b/tex/context/base/colo-run.mkiv
index 5084fdd35..16f54c3b5 100644
--- a/tex/context/base/colo-run.mkiv
+++ b/tex/context/base/colo-run.mkiv
@@ -46,7 +46,7 @@
\gdef\colo_show_palet[#1][#2]%
{\ifcsname\??colorpalet#1\endcsname
- \doifinsetelse\v!vertical{#2} \colo_palets_show_vertical \colo_palets_show_horizontal [#1][#2]%
+ \doifelseinset\v!vertical{#2} \colo_palets_show_vertical \colo_palets_show_horizontal [#1][#2]%
\fi}
\gdef\colo_palets_show_vertical[#1][#2]%
@@ -58,7 +58,7 @@
\setuppalet[#1]
\tabskip\zeropoint
\def\colo_palets_show_palet##1%
- {\doifinsetelse\v!number{#2}{##1\hskip.5em}{}&
+ {\doifelseinset\v!number{#2}{##1\hskip.5em}{}&
\color[##1]{\vrule\s!width3em\s!height\strutht\s!depth\strutdp}%
\graycolor[##1]{\vrule\s!width3em\s!height\strutht\s!depth\strutdp}&
\doifinset\v!value{#2}{\hskip.5em\colorvalue{##1}}\crcr}
@@ -155,7 +155,7 @@
\gdef\colo_groups_show[#1][#2]%
{\doifcolor{#1:1}
- {\doifinsetelse\v!vertical{#2} \colo_groups_show_vertical \colo_groups_show_horizontal [#1][#2]}}
+ {\doifelseinset\v!vertical{#2} \colo_groups_show_vertical \colo_groups_show_horizontal [#1][#2]}}
\gdef\colo_groups_show_horizontal[#1][#2]%
{\vbox
@@ -177,7 +177,7 @@
\hbox
{\doifinset\v!name{#2}
{\strut
- \doifinsetelse\v!value{#2}
+ \doifelseinset\v!value{#2}
{\raise3\lineheight\hbox{#1\hskip.5em}}
{#1}%
\hskip.5em}%
diff --git a/tex/context/base/colo-xwi.mkii b/tex/context/base/colo-xwi.mkii
index 557e9c57c..13d04759e 100644
--- a/tex/context/base/colo-xwi.mkii
+++ b/tex/context/base/colo-xwi.mkii
@@ -8,11 +8,12 @@
%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
%D I've forgotten where I got these definitions from, but maybe
-%D they can be of use.
+%D they can be of use. Watch out, these colors are in the rgb
+%D color space, so cyan is not defined in cmyk!
\definecolor [aliceblue] [r=0.94,g=0.97,b=1.00]
\definecolor [antiquewhite] [r=0.98,g=0.92,b=0.84]
diff --git a/tex/context/base/cont-fil.mkii b/tex/context/base/cont-fil.mkii
index e05d8eac4..604ea233d 100644
--- a/tex/context/base/cont-fil.mkii
+++ b/tex/context/base/cont-fil.mkii
@@ -124,4 +124,6 @@
\definefilesynonym [letter] [cor-01]
\definefilesynonym [memo] [cor-02]
+\definefilesynonym [syn-01] [syntax]
+
\endinput
diff --git a/tex/context/base/cont-fil.mkiv b/tex/context/base/cont-fil.mkiv
index 8e9be155b..a1159b7f5 100644
--- a/tex/context/base/cont-fil.mkiv
+++ b/tex/context/base/cont-fil.mkiv
@@ -116,5 +116,6 @@
\definefilesynonym [mat-11] [math-characters]
\definefilesynonym [fnt-11] [fonts-system]
\definefilesynonym [fnt-23] [fonts-shapes]
+\definefilesynonym [syn-01] [syntax]
\endinput
diff --git a/tex/context/base/cont-log.mkiv b/tex/context/base/cont-log.mkiv
index 99e08450b..598140d32 100644
--- a/tex/context/base/cont-log.mkiv
+++ b/tex/context/base/cont-log.mkiv
@@ -13,10 +13,9 @@
\writestatus{loading}{ConTeXt TeX Logos}
-%D The system that is used to typeset this text is called \TEX,
-%D typeset with an lowered~E. From te beginning of \TEX,
-%D authors of macro packages adapted this raising and lowering
-%D style. In this module we define some of those logos.
+%D The system that is used to typeset this text is called \TEX, typeset with an
+%D lowered~E. From te beginning of \TEX, authors of macro packages adapted this
+%D raising and lowering style. In this module we define some of those logos.
\unprotect
@@ -26,7 +25,7 @@
\setbox\scratchbox\hbox{M}%
T%
\kern-.1667\wd\scratchbox
- \lower.5ex\hbox{E}%
+ \lower.5\exheight\hbox{E}%
\kern-.125\wd\scratchbox
X%
\endgroup}
@@ -48,40 +47,40 @@
\unexpanded\def\TaBlE
{T%
- \kern-.27em\lower.5ex\hbox{A}%
- \kern-.18emB%
- \kern-.1em\lower.5ex\hbox{L}%
- \kern-.075emE}
+ \kern-.27\emwidth\lower.5\exheight\hbox{A}%
+ \kern-.18\emwidth B%
+ \kern-.1\emwidth\lower.5\exheight\hbox{L}%
+ \kern-.075\emwidth E}
\unexpanded\def\PiCTeX
{P%
- \kern-.12em\lower.5ex\hbox{I}%
- \kern-.075em C%
- \kern-.11em\TeX}
+ \kern-.12\emwidth\lower.5\exheight\hbox{I}%
+ \kern-.075\emwidth C%
+ \kern-.11\emwidth\TeX}
\def\syst_logos_ams_script#1%
- {$\cal\ifdim\bodyfontsize>1.1em\scriptstyle\fi#1$}
+ {$\cal\ifdim\bodyfontsize>1.1\emwidth\scriptstyle\fi#1$}
\unexpanded\def\AmSTeX
{\dontleavehmode
\syst_logos_ams_script A%
- \kern-.1667em\lower.5ex\hbox{\syst_logos_ams_script M}%
- \kern-.125em\syst_logos_ams_script S%
+ \kern-.1667\emwidth\lower.5\exheight\hbox{\syst_logos_ams_script M}%
+ \kern-.125\emwidth\syst_logos_ams_script S%
-\TeX}
\unexpanded\def\LamSTeX
{L%
- \kern-.4em\raise.3ex\hbox{\syst_logos_ams_script A}%
- \kern-.25em\lower.4ex\hbox{\syst_logos_ams_script M}%
- \kern-.1em{\syst_logos_ams_script S}%
+ \kern-.4\emwidth\raise.3\exheight\hbox{\syst_logos_ams_script A}%
+ \kern-.25\emwidth\lower.4\exheight\hbox{\syst_logos_ams_script M}%
+ \kern-.1\emwidth{\syst_logos_ams_script S}%
-\TeX}
-\unexpanded\def\AmSLaTeX
- {\dontleavehmode
- \syst_logos_ams_script A%
- \kern-.1667em\lower.5ex\hbox{\syst_logos_ams_script M}%
- \kern-.125em\AMSswitch S%
- -\LaTeX}
+% \unexpanded\def\AmSLaTeX
+% {\dontleavehmode
+% \syst_logos_ams_script A%
+% \kern-.1667\emwidth\lower.5\exheight\hbox{\syst_logos_ams_script M}%
+% \kern-.125\emwidth{\syst_logos_ams_script S}%
+% -\LaTeX}
%D Alternative \CONTEXT\ logo, first Idris S.~Hamid's version:
%D
@@ -94,31 +93,31 @@
\unexpanded\def\Context % wrong usage of \getscaledglyph
{\dontleavehmode
\begingroup
- C\kern -.0667em\getscaledglyph{.8}\empty{O\kern -.0667emN\kern
- -.0549emT\doifitalicelse{\kern-.1em}{\kern-.1667em}\lower.5ex\hbox
- {E}\doifitalicelse\empty{\kern-.11em}X\kern-.055emT}%
+ C\kern -.0667\emwidth\getscaledglyph{.8}\empty{O\kern -.0667\emwidth N\kern
+ -.0549\emwidth T\doifelseitalic{\kern-.1\emwidth}{\kern-.1667\emwidth}\lower.5\exheight\hbox
+ {E}\doifelseitalic\empty{\kern-.11em}X\kern-.055\emwidth T}%
\endgroup}
-%D The \METAFONT\ and \METAPOST\ logos adapt themselves to the
-%D current fontsize, an ugly but usefull hack.
+%D The \METAFONT\ and \METAPOST\ logos adapt themselves to the current fontsize, an
+%D ugly but usefull hack.
\let\logofont\nullfont
-\loadmapfile[original-base.map] % stil needed?
+\loadmapfile[original-base.map] % stil needed? not if we assume afm
\unexpanded\def\setMFPfont% more sensitive for low level changes
{\font\logofont=logo%
- \ifx\fontalternative\s!bf\else
- \ifx\fontalternative\s!it\else
- \ifx\fontalternative\s!sl\else
- \ifx\fontalternative\s!bi\else
- \ifx\fontalternative\s!bs\else
+ \ifx\fontalternative\s!bf\s!bf\else
+ \ifx\fontalternative\s!it\s!sl\else
+ \ifx\fontalternative\s!sl\s!sl\else
+ \ifx\fontalternative\s!bi\s!bf\else
+ \ifx\fontalternative\s!bs\s!bf\else
\fi\fi\fi\fi\fi
10 at \currentfontscale\bodyfontsize % there is no afm in the minimals yet
\logofont}
\def\syst_logos_meta_hyphen % there is no hyphenchar in this font
- {\discretionary{\vrule\s!height.33em\s!depth-.27em\s!width.33em}{}{}}
+ {\discretionary{\vrule\s!height.33\emwidth\s!depth-.27\emwidth\s!width.33\emwidth}{}{}}
\unexpanded\def\MetaFont
{\dontleavehmode
@@ -146,9 +145,8 @@
%D CONTEXT, PPCHTEX,
%D AMSTEX, LATEX, LAMSTEX}
%D
-%D We define the funny written ones as well as the less
-%D error prone upper case names (in \CONTEXT\ we tend to
-%D write all user defined commands, like abbreviations, in
+%D We define the funny written ones as well as the less error prone upper case names
+%D (in \CONTEXT\ we tend to write all user defined commands, like abbreviations, in
%D uppercase.)
\unexpanded\def\METAFONT {\MetaFont}
@@ -165,18 +163,18 @@
\unexpanded\def\LAMSTEX {\LamSTeX}
\unexpanded\def\INRSTEX {inrs\TeX}
-%D And this is how they show up: \TeX, \MetaFont, \MetaPost,
-%D \PiCTeX, \TaBlE, \ConTeXt, \PPCHTeX, \AmSTeX, \LaTeX,
-%D \LamSTeX.
+%D And this is how they show up: \TeX, \MetaFont, \MetaPost, \PiCTeX, \TaBlE, \ConTeXt,
+%D \PPCHTeX, \AmSTeX, \LaTeX, \LamSTeX.
%D Some placeholders:
-\unexpanded\def\eTeX {\mathematics{\varepsilon}-\TeX}
-\unexpanded\def\pdfTeX {pdf\TeX}
-\unexpanded\def\pdfeTeX{pdfe-\TeX}
-\unexpanded\def\luaTeX {lua\TeX}
-\unexpanded\def\metaTeX{meta\TeX}
-\unexpanded\def\XeTeX {X\lower.5ex\hbox{\kern-.15em\mirror{E}}\kern-.1667em\TeX}
+\unexpanded\def\eTeX {\mathematics{\varepsilon}-\TeX}
+\unexpanded\def\pdfTeX {pdf\TeX}
+\unexpanded\def\pdfeTeX {pdfe-\TeX}
+\unexpanded\def\luaTeX {lua\TeX}
+\unexpanded\def\luajitTeX{luajit\TeX}
+\unexpanded\def\metaTeX {meta\TeX}
+\unexpanded\def\XeTeX {X\lower.5\exheight\hbox{\kern-.15\emwidth\mirror{E}}\kern-.1667\emwidth\TeX}
% Adapted from a patch by Mojca:
@@ -185,9 +183,9 @@
\raise\dimexpr\ht\scratchbox+\dp\scratchbox\relax\hbox{\rotate[\c!rotation=180]{\box\scratchbox}}}
\unexpanded\def\XeTeX
- {X\lower.5ex
+ {X\lower.5\exheight
\hbox
- {\kern-.15em
+ {\kern-.15\emwidth
\iffontchar\font"018E\relax
\char"018E%
\else
@@ -198,14 +196,15 @@
\ifx\fontalternative\s!bs\syst_logos_xetex_e\else
\mirror{E}\fi\fi\fi\fi\fi
\fi}%
- \kern-.1667em\TeX}
+ \kern-.1667\emwidth\TeX}
-\let\ETEX \eTeX
-\let\PDFTEX \pdfTeX
-\let\PDFETEX\pdfeTeX
-\let\LUATEX \luaTeX
-\let\LuaTeX \luaTeX
-\let\XETEX \XeTeX
+\let\ETEX \eTeX
+\let\PDFTEX \pdfTeX
+\let\PDFETEX \pdfeTeX
+\let\LUATEX \luaTeX
+\let\LUAJITTEX\luajitTeX
+\let\LuaTeX \luaTeX
+\let\XETEX \XeTeX
% \unexpanded\def\MkApproved % joke, not used so it might move
% {\dontleavehmode\rotate
@@ -265,22 +264,36 @@
\TeX
\endgroup}
-\let\luaTeX \LuaTeX
-\let\LUATEX \LuaTeX
+\unexpanded\def\LuajitTeX
+ {\dontleavehmode
+ \begingroup
+ % at this moment there is no real need for kerning tT
+ Luajit\kern\zeropoint\TeX
+ \endgroup}
+
+\let\luaTeX \LuaTeX
+\let\luajitTeX\LuajitTeX
+\let\LUATEX \LuaTeX
+\let\LUAJITTEX\LuajitTeX
\unexpanded\def\MKII{MkII}
\unexpanded\def\MKIV{MkIV}
\unexpanded\def\MKVI{MkVI}
+\unexpanded\def\MKIX{MkIX}
+\unexpanded\def\MKXI{MkXI}
\unexpanded\def\MPII{MpII}
\unexpanded\def\MPIV{MpIV}
+\unexpanded\def\MPVI{MpVI}
\appendtoks
- \def\ConTeXt {ConTeXt}%
- \def\MetaPost{MetaPost}%
- \def\MetaFont{MetaFont}%
- \def\MetaFun {MetaFun}%
- \def\TeX {TeX}%
+ \def\ConTeXt {ConTeXt}%
+ \def\MetaPost {MetaPost}%
+ \def\MetaFont {MetaFont}%
+ \def\MetaFun {MetaFun}%
+ \def\TeX {TeX}%
+ \def\LuaTeX {LuaTeX}%
+ \def\LuajitTeX{LuajitTeX}%
\to \everysimplifycommands
\protect \endinput
diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv
index 134b1f08a..e9f259376 100644
--- a/tex/context/base/cont-new.mkiv
+++ b/tex/context/base/cont-new.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2014.01.03 00:40}
+\newcontextversion{2015.05.15 23:03}
%D This file is loaded at runtime, thereby providing an excellent place for
%D hacks, patches, extensions and new features.
@@ -24,6 +24,71 @@
%D Maybe:
+% \appendtoks
+% \inheritmaintextcolor
+% \to \everybeforenoteinsert
+
+% \appendtoks
+% \inheritmaintextcolor
+% \to \everymargindatacontent
+
+% This is experimental; if this changes we need to adapt the mb-mp style too. It's
+% not in the core yet.
+%
+% \def\ActionY{\blank\inleftmargin{\shownofparlines}}
+% \def\ActionN{\inleftmargin{\shownofparlines}}
+%
+% \saveparnumber
+% ...
+% \checknofparlines
+% \ifnum\nofparlines<2
+% \ActionY
+% \else
+% \ActionN
+% \fi
+% ...
+
+\newcount\c_typo_par_current
+\newcount\c_typo_par_saved
+\newcount\c_typo_par_lines
+\newcount\c_typo_par_temp
+
+\appendtoks
+ \advance\c_typo_par_current\plusone % local
+ %\llap{\infofont[\the\c_typo_par_saved:\the\c_typo_par_current]\quad}%
+\to \everypar
+
+\unexpanded\def\saveparnumber % the upcoming number
+ {\c_typo_par_saved\c_typo_par_current} % local
+
+\def\savedparnumber {\number\c_typo_par_saved}
+\def\currentparnumber{\number\c_typo_par_current}
+\def\nofparlines {\number\c_typo_par_lines}
+\let\savedparstate \empty
+
+\unexpanded\def\shownofparlines
+ {\dontleavehmode\hbox\bgroup
+ \infofont[\savedparstate]%
+ \egroup}
+
+\unexpanded\def\checknofparlines
+ {\c_typo_par_temp\numexpr\c_typo_par_saved+\plusone\relax
+ \ifhmode
+ \c_typo_par_lines\zerocount
+ \edef\savedparstate{\number\c_typo_par_temp:\number\c_typo_par_current\space-}%
+ \else\ifnum\c_typo_par_current=\c_typo_par_temp
+ \c_typo_par_lines\prevgraf
+ \edef\savedparstate{\number\c_typo_par_temp:\number\c_typo_par_current\space\number\prevgraf}%
+ \else\ifnum\c_typo_par_temp>\c_typo_par_current
+ \c_typo_par_lines\zerocount
+ \edef\savedparstate{\number\c_typo_par_temp:\number\c_typo_par_current\space-}%
+ \else
+ \c_typo_par_lines\maxdimen
+ \edef\savedparstate{\number\c_typo_par_temp:\number\c_typo_par_current\space+}%
+ \fi\fi\fi}
+
+%D Maybe:
+
\unexpanded\def\tightvbox{\dowithnextbox{\dp\nextbox\zeropoint\box\nextbox}\vbox}
\unexpanded\def\tightvtop{\dowithnextbox{\ht\nextbox\zeropoint\box\nextbox}\vtop}
diff --git a/tex/context/base/cont-run.lua b/tex/context/base/cont-run.lua
new file mode 100644
index 000000000..08bbe5c3c
--- /dev/null
+++ b/tex/context/base/cont-run.lua
@@ -0,0 +1,252 @@
+if not modules then modules = { } end modules ['cont-run'] = {
+ version = 1.001,
+ comment = "companion to cont-yes.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- When a style is loaded there is a good change that we never enter
+-- this code.
+
+local report = logs.reporter("system")
+
+local type, tostring = type, tostring
+
+local report = logs.reporter("sandbox","call")
+local fastserialize = table.fastserialize
+local quoted = string.quoted
+local possiblepath = sandbox.possiblepath
+
+local context = context
+local implement = interfaces.implement
+
+local qualified = { }
+local writeable = { }
+local readable = { }
+local blocked = { }
+local trace_files = false
+local trace_calls = false
+local nofcalls = 0
+local nofrejected = 0
+local logfilename = "sandbox.log"
+
+local function registerstats()
+ statistics.register("sandboxing", function()
+ if trace_files then
+ return string.format("%s calls, %s rejected, logdata in '%s'",nofcalls,nofrejected,logfilename)
+ else
+ return string.format("%s calls, %s rejected",nofcalls,nofrejected)
+ end
+ end)
+ registerstats = false
+end
+
+local function logsandbox(details)
+ local comment = details.comment
+ local result = details.result
+ local arguments = details.arguments
+ for i=1,#arguments do
+ local argument = arguments[i]
+ local t = type(argument)
+ if t == "string" then
+ arguments[i] = quoted(argument)
+ if trace_files and possiblepath(argument) then
+ local q = qualified[argument]
+ if q then
+ local c = q[comment]
+ if c then
+ local r = c[result]
+ if r then
+ c[result] = r + 1
+ else
+ c[result] = r
+ end
+ else
+ q[comment] = {
+ [result] = 1
+ }
+ end
+ else
+ qualified[argument] = {
+ [comment] = {
+ [result] = 1
+ }
+ }
+ end
+ end
+ elseif t == "table" then
+ arguments[i] = fastserialize(argument)
+ else
+ arguments[i] = tostring(argument)
+ end
+ end
+ if trace_calls then
+ report("%s(%,t) => %l",details.comment,arguments,result)
+ end
+ nofcalls = nofcalls + 1
+ if not result then
+ nofrejected = nofrejected + 1
+ end
+end
+
+local ioopen = sandbox.original(io.open)
+
+local function logsandboxfiles(name,what,asked,okay)
+ -- we're only interested in permitted access
+ if not okay then
+ blocked [asked] = blocked [asked] or 0 + 1
+ elseif what == "*" or what == "w" then
+ writeable[asked] = writeable[asked] or 0 + 1
+ else
+ readable [asked] = readable [asked] or 0 + 1
+ end
+end
+
+function sandbox.logcalls()
+ if not trace_calls then
+ trace_calls = true
+ sandbox.setlogger(logsandbox)
+ if registerstats then
+ registerstats()
+ end
+ end
+end
+
+function sandbox.logfiles()
+ if not trace_files then
+ trace_files = true
+ sandbox.setlogger(logsandbox)
+ sandbox.setfilenamelogger(logsandboxfiles)
+ luatex.registerstopactions(function()
+ table.save(logfilename,{
+ calls = {
+ nofcalls = nofcalls,
+ nofrejected = nofrejected,
+ filenames = qualified,
+ },
+ checkednames = {
+ readable = readable,
+ writeable = writeable,
+ blocked = blocked,
+ },
+ })
+ end)
+ if registerstats then
+ registerstats()
+ end
+ end
+end
+
+trackers.register("sandbox.tracecalls",sandbox.logcalls)
+trackers.register("sandbox.tracefiles",sandbox.logfiles)
+
+local sandboxing = environment.arguments.sandbox
+
+if sandboxing then
+
+ report("enabling sandbox")
+
+ sandbox.enable()
+
+ if type(sandboxing) == "string" then
+ sandboxing = utilities.parsers.settings_to_hash(sandboxing)
+ if sandboxing.calls then
+ sandbox.logcalls()
+ end
+ if sandboxing.files then
+ sandbox.logfiles()
+ end
+ end
+
+ -- Nicer would be if we could just disable write 18 and keep os.execute
+ -- which in fact we can do by defining write18 as macro instead of
+ -- primitive ... todo.
+
+ -- We block some potential escapes from protection.
+
+ context [[
+ \let\primitive \relax
+ \let\normalprimitive\relax
+ \let\normalwrite \relax
+ ]]
+
+end
+
+local function processjob()
+
+ environment.initializefilenames() -- todo: check if we really need to pre-prep the filename
+
+ local arguments = environment.arguments
+ local suffix = environment.suffix
+ local filename = environment.filename -- hm, not inputfilename !
+
+ if not filename or filename == "" then
+ -- skip
+ elseif suffix == "xml" or arguments.forcexml then
+
+ -- Maybe we should move the preamble parsing here as it
+ -- can be part of (any) loaded (sub) file. The \starttext
+ -- wrapping might go away.
+
+ report("processing as xml: %s",filename)
+
+ context.starttext()
+ context.xmlprocess("main",filename,"")
+ context.stoptext()
+
+ elseif suffix == "cld" or arguments.forcecld then
+
+ report("processing as cld: %s",filename)
+
+ context.runfile(filename)
+
+ elseif suffix == "lua" or arguments.forcelua then
+
+ -- The wrapping might go away. Why is is it there in the
+ -- first place.
+
+ report("processing as lua: %s",filename)
+
+ context.starttext()
+ context.ctxlua(string.format('dofile("%s")',filename))
+ context.stoptext()
+
+ elseif suffix == "mp" or arguments.forcemp then
+
+ report("processing as metapost: %s",filename)
+
+ context.starttext()
+ context.processMPfigurefile(filename)
+ context.stoptext()
+
+ -- elseif suffix == "prep" then
+ --
+ -- -- Why do we wrap here. Because it can be xml? Let's get rid
+ -- -- of prepping in general.
+ --
+ -- context.starttext()
+ -- context.input(filename)
+ -- context.stoptext()
+
+ else
+
+ -- \writestatus{system}{processing as tex}
+ -- We have a regular tex file so no \starttext yet as we can
+ -- load fonts.
+
+ -- context.enabletrackers { "resolvers.*" }
+ context.input(filename)
+ -- context.disabletrackers { "resolvers.*" }
+
+ end
+
+ context.finishjob()
+
+end
+
+implement {
+ name = "processjob",
+ onlyonce = true,
+ actions = processjob,
+}
diff --git a/tex/context/base/cont-run.mkiv b/tex/context/base/cont-run.mkiv
new file mode 100644
index 000000000..fcca7b581
--- /dev/null
+++ b/tex/context/base/cont-run.mkiv
@@ -0,0 +1,20 @@
+%D \module
+%D [ file=cont-run,
+%D version=2014.12.26,
+%D title=\CONTEXT\ Core Macros,
+%D subtitle=Runner,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Core Macros / Runner}
+
+\unprotect
+
+\registerctxluafile{cont-run}{1.001}
+
+\protect \endinput
diff --git a/tex/context/base/cont-yes.mkiv b/tex/context/base/cont-yes.mkiv
index 1a10fc30e..0b7f3231b 100644
--- a/tex/context/base/cont-yes.mkiv
+++ b/tex/context/base/cont-yes.mkiv
@@ -15,79 +15,85 @@
% wraping as we can assume proper styling. It's a left-over from
% mkii that we need to get rid of.
-\startluacode
-
- -- When a style is loaded there is a good change that we never enter
- -- this code.
-
- local report = logs.reporter("system")
-
- environment.initializefilenames() -- todo: check if we really need to pre-prep the filename
-
- local arguments = environment.arguments
- local suffix = environment.suffix
- local filename = environment.filename -- hm, not inputfilename !
-
- if suffix == "xml" or arguments.forcexml then
-
- -- Maybe we should move the preamble parsing here as it
- -- can be part of (any) loaded (sub) file. The \starttext
- -- wrapping might go away.
-
- report("processing as xml: %s",filename)
-
- context.starttext()
- context.xmlprocess("main",filename,"")
- context.stoptext()
-
- elseif suffix == "cld" or arguments.forcecld then
-
- report("processing as cld: %s",filename)
-
- context.runfile(filename)
-
- elseif suffix == "lua" or arguments.forcelua then
-
- -- The wrapping might go away. Why is is it there in the
- -- first place.
-
- report("processing as lua: %s",filename)
-
- context.starttext()
- context.ctxlua(string.format('dofile("%s")',filename))
- context.stoptext()
-
- elseif suffix == "mp" or arguments.forcemp then
-
- report("processing as metapost: %s",filename)
-
- context.starttext()
- context.processMPfigurefile(filename)
- context.stoptext()
-
- -- elseif suffix == "prep" then
- --
- -- -- Why do we wrap here. Because it can be xml? Let's get rid
- -- -- of prepping in general.
- --
- -- context.starttext()
- -- context.input(filename)
- -- context.stoptext()
-
- else
-
- -- \writestatus{system}{processing as tex}
- -- We have a regular tex file so no \starttext yet as we can
- -- load fonts.
-
- -- context.enabletrackers { "resolvers.*" }
- context.input(filename)
- -- context.disabletrackers { "resolvers.*" }
-
- end
-
- context.finishjob()
-
-\stopluacode
+% now moved to cont-run.lua
+%
+% \startluacode
+%
+% -- When a style is loaded there is a good change that we never enter
+% -- this code.
+%
+% local report = logs.reporter("system")
+%
+% environment.initializefilenames() -- todo: check if we really need to pre-prep the filename
+%
+% local arguments = environment.arguments
+% local suffix = environment.suffix
+% local filename = environment.filename -- hm, not inputfilename !
+%
+% if suffix == "xml" or arguments.forcexml then
+%
+% -- Maybe we should move the preamble parsing here as it
+% -- can be part of (any) loaded (sub) file. The \starttext
+% -- wrapping might go away.
+%
+% report("processing as xml: %s",filename)
+%
+% context.starttext()
+% context.xmlprocess("main",filename,"")
+% context.stoptext()
+%
+% elseif suffix == "cld" or arguments.forcecld then
+%
+% report("processing as cld: %s",filename)
+%
+% context.runfile(filename)
+%
+% elseif suffix == "lua" or arguments.forcelua then
+%
+% -- The wrapping might go away. Why is is it there in the
+% -- first place.
+%
+% report("processing as lua: %s",filename)
+%
+% context.starttext()
+% context.ctxlua(string.format('dofile("%s")',filename))
+% context.stoptext()
+%
+% elseif suffix == "mp" or arguments.forcemp then
+%
+% report("processing as metapost: %s",filename)
+%
+% context.starttext()
+% context.processMPfigurefile(filename)
+% context.stoptext()
+%
+% -- elseif suffix == "prep" then
+% --
+% -- -- Why do we wrap here. Because it can be xml? Let's get rid
+% -- -- of prepping in general.
+% --
+% -- context.starttext()
+% -- context.input(filename)
+% -- context.stoptext()
+%
+% else
+%
+% -- \writestatus{system}{processing as tex}
+% -- We have a regular tex file so no \starttext yet as we can
+% -- load fonts.
+%
+% -- context.enabletrackers { "resolvers.*" }
+% context.input(filename)
+% -- context.disabletrackers { "resolvers.*" }
+%
+% end
+%
+% context.finishjob()
+%
+% \stopluacode
+
+% We don't want to be in protected mode!
+
+\getvalue{clf_processjob} % from cont-run.lua
\endinput
diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf
index 30d18d29b..1145082b4 100644
--- a/tex/context/base/context-version.pdf
+++ b/tex/context/base/context-version.pdf
Binary files differ
diff --git a/tex/context/base/context-version.png b/tex/context/base/context-version.png
index 67edf8a53..39c348e48 100644
--- a/tex/context/base/context-version.png
+++ b/tex/context/base/context-version.png
Binary files differ
diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv
index 0214c2bfa..389d784e4 100644
--- a/tex/context/base/context.mkiv
+++ b/tex/context/base/context.mkiv
@@ -11,6 +11,17 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+% Here is some timing (2015-04-01) luajittex format generation:
+%
+% 2.6 sec : normal make
+% 2.0 sec : with terminal output piped to file
+% 0.2 sec : format dumped
+% 0.2 sec : char-def/char-ini (no bytecode)
+%
+% So a format still saves some 1.5 seconds (with luajjitex) startup and
+% on network shares cq. when no files are cached by the os it's of course
+% much worse. A zero run is .27 sec with luajittex.
+
% Welcome to context, pronounced as kontekst (rather dutch) and not as
% conτεχt.
@@ -28,7 +39,7 @@
%D up and the dependencies are more consistent.
\edef\contextformat {\jobname}
-\edef\contextversion{2014.01.03 00:40}
+\edef\contextversion{2015.05.15 23:03}
\edef\contextkind {current}
%D For those who want to use this:
@@ -99,6 +110,9 @@
% From here on we have \unexpanded being \normalprotected, as we already had
% \unexpanded long before etex came around.
+\loadmarkfile{luat-ini}
+\loadmarkfile{toks-ini}
+
\loadmarkfile{syst-aux}
\loadmarkfile{syst-lua}
\loadmarkfile{syst-con}
@@ -106,15 +120,20 @@
\loadmarkfile{syst-fnt}
\loadmarkfile{syst-rtp}
+\loadmarkfile{luat-usr}
+
+% \loadmarkfile{luat-ini} % moved up
+% \loadmarkfile{toks-ini} % moved up
+
\loadmkvifile{file-ini}
\loadmkvifile{file-res}
\loadmkvifile{file-lib}
\loadmarkfile{supp-dir}
-\loadmarkfile{char-ini}
-\loadmarkfile{char-utf}
-\loadmarkfile{char-act}
+\loadmarkfile{char-utf} % generic code (i.e. not much tex) ... could become unic-ini
+\loadmarkfile{char-ini} % tex / context specific
+\loadmarkfile{char-act} % even more specific
\loadmarkfile{mult-ini}
\loadmarkfile{mult-sys}
@@ -126,9 +145,10 @@
\loadmarkfile{cldf-int} % interface
-\loadmarkfile{luat-ini}
+% \loadmarkfile{luat-ini}
-\loadmarkfile{toks-ini}
+\loadmarkfile{toks-tra}
+%loadmarkfile{toks-map} % obsolete, never used
\loadmarkfile{attr-ini}
@@ -143,7 +163,6 @@
\loadmarkfile{node-fin}
\loadmarkfile{node-mig}
-\loadmarkfile{typo-bld} % par builders
%loadmarkfile{node-pag}
\loadmarkfile{back-ini}
@@ -171,6 +190,9 @@
%loadmarkfile{supp-num} % obsolete
\loadmarkfile{typo-ini}
+\loadmarkfile{typo-bld} % par builders
+
+\loadmarkfile{typo-inj}
\loadmkvifile{file-syn}
\loadmkvifile{file-mod}
@@ -184,6 +206,7 @@
\loadmarkfile{hand-ini}
\loadmarkfile{lang-ini}
+\loadmarkfile{lang-hyp}
\loadmarkfile{lang-lab}
\loadmarkfile{unic-ini}
@@ -221,6 +244,8 @@
\loadmkvifile{typo-prc}
+\loadmarkfile{anch-pos}
+
\loadmkvifile{strc-ini}
\loadmarkfile{strc-tag}
\loadmarkfile{strc-doc}
@@ -234,7 +259,7 @@
\loadmarkfile{strc-xml}
\loadmarkfile{strc-def} % might happen later
\loadmkvifile{strc-ref}
-\loadmarkfile{strc-reg}
+%loadmarkfile{strc-reg}
\loadmkvifile{strc-lev} % experiment
\loadmarkfile{spac-ali}
@@ -246,9 +271,14 @@
\loadmarkfile{spac-par}
%loadmarkfile{spac-adj} % no longer needed
\loadmarkfile{spac-def}
+
+\doiffileelse{spac-prf.mkvi}
+ {\loadmkvifile{spac-prf}}
+ {\loadmkivfile{spac-prf}}
+
\loadmarkfile{spac-grd}
-\loadmarkfile{anch-pos}
+%loadmarkfile{anch-pos}
\loadmkvifile{scrn-ini}
\loadmkvifile{scrn-ref}
@@ -296,7 +326,9 @@
\loadmarkfile{pack-pos}
\loadmkvifile{page-mak}
-\loadmarkfile{page-lin}
+\loadmarkfile{strc-reg} % uses mixed columns
+
+\loadmkvifile{page-lin}
\loadmarkfile{page-par}
\loadmarkfile{typo-pag}
\loadmarkfile{typo-mar}
@@ -357,6 +389,7 @@
\loadmkvifile{font-sel}
\loadmarkfile{typo-tal}
+\loadmarkfile{typo-par} % par builders (uses fonts)
\loadmarkfile{tabl-com}
\loadmarkfile{tabl-pln}
@@ -383,6 +416,7 @@
\loadmarkfile{trac-jus}
\loadmarkfile{typo-cln}
+\loadmarkfile{typo-wrp}
\loadmarkfile{typo-spa}
\loadmarkfile{typo-krn}
\loadmkvifile{typo-itc}
@@ -394,6 +428,9 @@
\loadmkvifile{typo-txt}
\loadmarkfile{typo-drp}
\loadmarkfile{typo-fln}
+\loadmarkfile{typo-sus}
+\loadmarkfile{typo-lig}
+\loadmarkfile{typo-chr}
\loadmkvifile{type-ini}
\loadmarkfile{type-set}
@@ -401,11 +438,11 @@
\loadmarkfile{scrp-ini}
\loadmarkfile{lang-wrd} % can be optional (discussion with mm sideeffect)
-%loadmarkfile{lang-rep} % can be optional (bt 2013 side effect)
+\loadmarkfile{lang-rep} % can be optional (bt 2013 side effect)
\loadmarkfile{prop-ini} % only for downward compatibility
-\loadmarkfile{mlib-ctx}
+\loadmarkfile{mlib-ctx} % messy order
\loadmarkfile{meta-ini}
\loadmarkfile{meta-tex}
@@ -427,7 +464,7 @@
\loadmarkfile{anch-bar}
%loadmarkfile{anch-snc} % when needed this one will be redone
-\loadmarkfile{math-ini}
+\loadmarkfile{math-ini} % way after font-pre !
\loadmarkfile{math-pln}
\loadmarkfile{math-for}
\loadmarkfile{math-def} % also saves some meanings
@@ -476,8 +513,17 @@
\loadmarkfile{lang-spa} % will become obsolete
-\loadmarkfile{bibl-bib}
-\loadmarkfile{bibl-tra}
+% old bibtex support: (will be m-oldbibtex.mkiv)
+
+% \loadmarkfile{bibl-bib}
+% \loadmarkfile{bibl-tra}
+
+% new bibtex support:
+
+\loadmarkfile{publ-ini}
+\loadmarkfile{publ-tra}
+\loadmarkfile{publ-xml}
+\loadmarkfile{publ-old}
%loadmarkfile{x-xtag} % no longer preloaded
@@ -508,6 +554,8 @@
\loadmarkfile{back-exp}
+\loadmarkfile{cont-run} % the main runner (used in cont-yes.mkiv)
+
\setupcurrentlanguage[\defaultlanguagetag]
\prependtoks
diff --git a/tex/context/base/core-con.lua b/tex/context/base/core-con.lua
index dad24a7d4..37942ae71 100644
--- a/tex/context/base/core-con.lua
+++ b/tex/context/base/core-con.lua
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['core-con'] = {
license = "see context related readme files"
}
+-- todo: split into char-lan.lua and core-con.lua
+
--[[ldx--
<p>This module implements a bunch of conversions. Some are more
efficient than their <l n='tex'/> counterpart, some are even
@@ -14,14 +16,15 @@ slower but look nicer this way.</p>
<p>Some code may move to a module in the language namespace.</p>
--ldx]]--
-local command, context = commands, context
-
local floor, date, time, concat = math.floor, os.date, os.time, table.concat
local lower, rep, match = string.lower, string.rep, string.match
local utfchar, utfbyte = utf.char, utf.byte
local tonumber, tostring = tonumber, tostring
+local P, C, Cs, lpegmatch = lpeg.P, lpeg.C, lpeg.Cs, lpeg.match
local context = context
+local commands = commands
+local implement = interfaces.implement
local settings_to_array = utilities.parsers.settings_to_array
local allocate = utilities.storage.allocate
@@ -37,9 +40,8 @@ local languages = languages
converters.number = tonumber
converters.numbers = tonumber
-function commands.number(n) context(n) end
-
-commands.numbers = commands.number
+implement { name = "number", actions = context }
+implement { name = "numbers", actions = context }
-- to be reconsidered ... languages namespace here, might become local plus a register command
@@ -59,6 +61,14 @@ local counters = allocate {
0x006F, 0x0070, 0x0072, 0x0073, 0x0161,
0x0074, 0x0075, 0x0076, 0x007A, 0x017E
},
+ ['spanish'] = {
+ 0x0061, 0x0062, 0x0063, 0x0064, 0x0065,
+ 0x0066, 0x0067, 0x0068, 0x0069, 0x006A,
+ 0x006B, 0x006C, 0x006D, 0x006E, 0x00F1,
+ 0x006F, 0x0070, 0x0071, 0x0072, 0x0073,
+ 0x0074, 0x0075, 0x0076, 0x0077, 0x0078,
+ 0x0079, 0x007A
+ },
['greek'] = { -- this should be the lowercase table
-- 0x0391, 0x0392, 0x0393, 0x0394, 0x0395,
-- 0x0396, 0x0397, 0x0398, 0x0399, 0x039A,
@@ -126,13 +136,30 @@ local counters = allocate {
languages.counters = counters
-counters['ar'] = counters['arabic']
-counters['gr'] = counters['greek']
-counters['g'] = counters['greek']
-counters['sl'] = counters['slovenian']
-counters['kr'] = counters['korean']
-counters['kr-p'] = counters['korean-parent']
-counters['kr-c'] = counters['korean-circle']
+counters['ar'] = counters['arabic']
+counters['gr'] = counters['greek']
+counters['g'] = counters['greek']
+counters['sl'] = counters['slovenian']
+counters['es'] = counters['spanish']
+counters['kr'] = counters['korean']
+counters['kr-p'] = counters['korean-parent']
+counters['kr-c'] = counters['korean-circle']
+
+counters['thainumerals'] = counters['thai']
+counters['devanagarinumerals'] = counters['devanagari']
+counters['gurmurkhinumerals'] = counters['gurmurkhi']
+counters['gujaratinumerals'] = counters['gujarati']
+counters['tibetannumerals'] = counters['tibetan']
+counters['greeknumerals'] = counters['greek']
+counters['arabicnumerals'] = counters['arabic']
+counters['persiannumerals'] = counters['persian']
+counters['arabicexnumerals'] = counters['persian']
+counters['koreannumerals'] = counters['korean']
+counters['koreanparentnumerals'] = counters['korean-parent']
+counters['koreancirclenumerals'] = counters['korean-circle']
+
+counters['sloveniannumerals'] = counters['slovenian']
+counters['spanishnumerals'] = counters['spanish']
local fallback = utfbyte('0')
@@ -177,6 +204,8 @@ converters.maxchrs = maxchrs
local lowercharacter = characters.lcchars
local uppercharacter = characters.ucchars
+local defaultcounter = counters.default
+
local function do_alphabetic(n,mapping,mapper,t) -- todo: make zero based variant (initial n + 1)
if not t then
t = { }
@@ -193,14 +222,17 @@ local function do_alphabetic(n,mapping,mapper,t) -- todo: make zero based varian
end
end
-function converters.alphabetic(n,code)
- return do_alphabetic(n,counters[code] or counters.default,lowercharacter)
+local function alphabetic(n,code)
+ return do_alphabetic(n,code and code ~= "" and counters[code] or defaultcounter,lowercharacter)
end
-function converters.Alphabetic(n,code)
- return do_alphabetic(n,counters[code] or counters.default,uppercharacter)
+local function Alphabetic(n,code)
+ return do_alphabetic(n,code and code ~= "" and counters[code] or defaultcounter,uppercharacter)
end
+converters.alphabetic = alphabetic
+converters.Alphabetic = Alphabetic
+
local lower_offset = 96
local upper_offset = 64
@@ -209,12 +241,18 @@ function converters.Character (n) return chr (n,upper_offset) end
function converters.characters(n) return chrs(n,lower_offset) end
function converters.Characters(n) return chrs(n,upper_offset) end
-function commands.alphabetic(n,c) context(do_alphabetic(n,counters[c],lowercharacter)) end
-function commands.Alphabetic(n,c) context(do_alphabetic(n,counters[c],uppercharacter)) end
-function commands.character (n) context(chr (n,lower_offset)) end
-function commands.Character (n) context(chr (n,upper_offset)) end
-function commands.characters(n) context(chrs(n,lower_offset)) end
-function commands.Characters(n) context(chrs(n,upper_offset)) end
+converters['a'] = converters.characters
+converters['A'] = converters.Characters
+converters['AK'] = converters.Characters
+converters['KA'] = converters.Characters
+
+implement { name = "alphabetic", actions = { alphabetic, context }, arguments = { "integer", "string" } }
+implement { name = "Alphabetic", actions = { Alphabetic, context }, arguments = { "integer", "string" } }
+
+implement { name = "character", actions = { chr, context }, arguments = { "integer", lower_offset } }
+implement { name = "Character", actions = { chr, context }, arguments = { "integer", upper_offset } }
+implement { name = "characters", actions = { chrs, context }, arguments = { "integer", lower_offset } }
+implement { name = "Characters", actions = { chrs, context }, arguments = { "integer", upper_offset } }
local weekday = os.weekday -- moved to l-os
local isleapyear = os.isleapyear -- moved to l-os
@@ -240,20 +278,22 @@ converters.leapyear = leapyear
converters.nofdays = nofdays
converters.textime = textime
-function commands.weekday (day,month,year) context(weekday (day,month,year)) end
-function commands.leapyear(year) context(leapyear(year)) end -- rather useless, only for ifcase
-function commands.nofdays (year,month) context(nofdays (year,month)) end
-
-function commands.year () context(date("%Y")) end
-function commands.month () context(date("%m")) end
-function commands.hour () context(date("%H")) end
-function commands.minute () context(date("%M")) end
-function commands.second () context(date("%S")) end
-function commands.textime() context(textime()) end
-
-function commands.doifleapyearelse(year)
- commands.doifelse(isleapyear(year))
-end
+implement { name = "weekday", actions = { weekday, context }, arguments = { "integer", "integer", "integer" } }
+implement { name = "leapyear", actions = { leapyear, context }, arguments = { "integer" } }
+implement { name = "nofdays", actions = { nofdays, context }, arguments = { "integer", "integer" } }
+
+implement { name = "year", actions = { date, context }, arguments = "'%Y'" }
+implement { name = "month", actions = { date, context }, arguments = "'%m'" }
+implement { name = "hour", actions = { date, context }, arguments = "'%H'" }
+implement { name = "minute", actions = { date, context }, arguments = "'%M'" }
+implement { name = "second", actions = { date, context }, arguments = "'%S'" }
+implement { name = "textime", actions = { textime, context } }
+
+implement {
+ name = "doifelseleapyear",
+ actions = { isleapyear, commands.doifelse },
+ arguments = "integer"
+}
local roman = {
{ [0] = '', 'I', 'II', 'III', 'IV', 'V', 'VI', 'VII', 'VIII', 'IX' },
@@ -273,8 +313,24 @@ converters.toroman = toroman
converters.Romannumerals = toroman
converters.romannumerals = function(n) return lower(toroman(n)) end
-function commands.romannumerals(n) context(lower(toroman(n))) end
-function commands.Romannumerals(n) context( toroman(n)) end
+converters['i'] = converters.romannumerals
+converters['I'] = converters.Romannumerals
+converters['r'] = converters.romannumerals
+converters['R'] = converters.Romannumerals
+converters['KR'] = converters.Romannumerals
+converters['RK'] = converters.Romannumerals
+
+implement {
+ name = "romannumerals",
+ actions = { toroman, lower, context },
+ arguments = "integer",
+}
+
+implement {
+ name = "Romannumerals",
+ actions = { toroman, context },
+ arguments = "integer",
+}
--~ local small = {
--~ 0x0627, 0x066E, 0x062D, 0x062F, 0x0647, 0x0648, 0x0631
@@ -327,8 +383,17 @@ converters.toabjad = toabjad
function converters.abjadnumerals (n) return toabjad(n,false) end
function converters.abjadnodotnumerals(n) return toabjad(n,true ) end
-function commands.abjadnumerals (n) context(toabjad(n,false)) end
-function commands.abjadnodotnumerals(n) context(toabjad(n,true )) end
+implement {
+ name = "abjadnumerals",
+ actions = { toabjad, context },
+ arguments = { "integer", false }
+}
+
+implement {
+ name = "abjadnodotnumerals",
+ actions = { toabjad, context },
+ arguments = { "integer", true }
+}
local vector = {
normal = {
@@ -470,32 +535,59 @@ end
converters.tochinese = tochinese
-function converters.chinesenumerals (n) return tochinese(n,"normal") end
-function converters.chinesecapnumerals(n) return tochinese(n,"cap" ) end
-function converters.chineseallnumerals(n) return tochinese(n,"all" ) end
+function converters.chinesenumerals (n,how) return tochinese(n,how or "normal") end
+function converters.chinesecapnumerals(n) return tochinese(n,"cap") end
+function converters.chineseallnumerals(n) return tochinese(n,"all") end
+
+converters['cn'] = converters.chinesenumerals
+converters['cn-c'] = converters.chinesecapnumerals
+converters['cn-a'] = converters.chineseallnumerals
+
+implement {
+ name = "chinesenumerals",
+ actions = { tochinese, context },
+ arguments = { "integer", "string" }
+}
+
+-- this is a temporary solution: we need a better solution when we have
+-- more languages
+
+function converters.spanishnumerals(n) return alphabetic(n,"es") end
+function converters.Spanishnumerals(n) return Alphabetic(n,"es") end
+function converters.sloviannumerals(n) return alphabetic(n,"sl") end
+function converters.Sloviannumerals(n) return Alphabetic(n,"sl") end
+
+converters['characters:es'] = converters.spanishnumerals
+converters['characters:sl'] = converters.sloveniannumerals
-function commands.chinesenumerals (n) context(tochinese(n,"normal")) end
-function commands.chinesecapnumerals(n) context(tochinese(n,"cap" )) end
-function commands.chineseallnumerals(n) context(tochinese(n,"all" )) end
+converters['Characters:es'] = converters.Spanishnumerals
+converters['Characters:sl'] = converters.Sloveniannumerals
converters.sequences = converters.sequences or { }
local sequences = converters.sequences
storage.register("converters/sequences", sequences, "converters.sequences")
-function converters.define(name,set)
+function converters.define(name,set) -- ,language)
+ -- if language then
+ -- name = name .. ":" .. language
+ -- end
sequences[name] = settings_to_array(set)
end
-commands.defineconversion = converters.define
+implement {
+ name = "defineconversion",
+ actions = converters.define,
+ arguments = { "string", "string" }
+}
-local function convert(method,n) -- todo: language
- local converter = converters[method]
+local function convert(method,n,language)
+ local converter = language and converters[method..":"..language] or converters[method]
if converter then
return converter(n)
else
local lowermethod = lower(method)
- local linguistic = counters[lowermethod]
+ local linguistic = counters[lowermethod]
if linguistic then
return do_alphabetic(n,linguistic,lowermethod == method and lowercharacter or uppercharacter)
end
@@ -507,18 +599,29 @@ local function convert(method,n) -- todo: language
else
return sequence[n]
end
- else
- return n
end
+ return n
end
end
converters.convert = convert
-function commands.checkedconversion(method,n)
- context(convert(method,n))
+local function valid(method,language)
+ return converters[method..":"..language] or converters[method] or sequences[method]
end
+implement {
+ name = "doifelseconverter",
+ actions = { valid, commands.doifelse },
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "checkedconversion",
+ actions = { convert, context },
+ arguments = { "string", "integer" }
+}
+
-- Well, since the one asking for this didn't test it the following code is not
-- enabled.
--
@@ -692,7 +795,7 @@ function converters.ordinal(n,language)
return t and t(n)
end
-function commands.ordinal(n,language)
+local function ctxordinal(n,language)
local t = language and ordinals[language]
local o = t and t(n)
context(n)
@@ -701,7 +804,11 @@ function commands.ordinal(n,language)
end
end
--- verbose numbers
+implement {
+ name = "ordinal",
+ actions = ctxordinal,
+ arguments = { "integer", "string" }
+}
-- verbose numbers
@@ -865,7 +972,7 @@ local words = {
[900] = "novecientos",
[1000] = "mil",
[1000^2] = "millón",
- [1000^3] = "mil millónes",
+ [1000^3] = "mil millones",
[1000^4] = "billón",
}
@@ -945,11 +1052,43 @@ function converters.verbose.translate(n,language)
return t and t.translate(n) or n
end
-function commands.verbose(n,language)
+local function verbose(n,language)
local t = language and data[language]
context(t and t.translate(n) or n)
end
+implement {
+ name = "verbose",
+ actions = verbose,
+ arguments = { "integer", "string" }
+}
+
+-- These are just helpers but not really for the tex end. Do we have to
+-- use translate here?
+
+local whitespace = lpeg.patterns.whitespace
+local word = lpeg.patterns.utf8uppercharacter^-1 * (1-whitespace)^1
+local pattern_one = Cs( whitespace^0 * word^-1 * P(1)^0)
+local pattern_all = Cs((whitespace^1 + word)^1)
+
+function converters.word (s) return s end -- dummies for typos
+function converters.words(s) return s end -- dummies for typos
+function converters.Word (s) return lpegmatch(pattern_one,s) or s end
+function converters.Words(s) return lpegmatch(pattern_all,s) or s end
+
+converters.upper = characters.upper
+converters.lower = characters.lower
+
+-- print(converters.Word("foo bar"))
+-- print(converters.Word(" foo bar"))
+-- print(converters.Word("123 foo bar"))
+-- print(converters.Word(" 123 foo bar"))
+
+-- print(converters.Words("foo bar"))
+-- print(converters.Words(" foo bar"))
+-- print(converters.Words("123 foo bar"))
+-- print(converters.Words(" 123 foo bar"))
+
-- --
local v_day = variables.day
@@ -986,33 +1125,60 @@ local months = { -- not variables.january
"december",
}
-function commands.dayname(n)
- context.labeltext(days[n] or "unknown")
+local function dayname(n)
+ return days[n] or "unknown"
end
-function commands.weekdayname(day,month,year)
- context.labeltext(days[weekday(day,month,year)] or "unknown")
+local function weekdayname(day,month,year)
+ return days[weekday(day,month,year)] or "unknown"
end
-function commands.monthname(n)
- context.labeltext(months[n] or "unknown")
+local function monthname(n)
+ return months[n] or "unknown"
end
-function commands.monthmnem(n)
+local function monthmnem(n)
local m = months[n]
- context.labeltext(m and (m ..":mnem") or "unknown")
+ return m and (m ..":mnem") or "unknown"
end
+implement {
+ name = "dayname",
+ actions = { dayname, context.labeltext },
+ arguments = "integer",
+}
+
+implement {
+ name = "weekdayname",
+ actions = { weekdayname, context.labeltext },
+ arguments = { "integer", "integer", "integer" }
+}
+
+implement {
+ name = "monthname",
+ actions = { monthname, context.labeltext },
+ arguments = { "integer" }
+}
+
+implement {
+ name = "monthmnem",
+ actions = { monthmnem, context.labeltext },
+ arguments = { "integer" }
+}
+
-- a prelude to a function that we can use at the lua end
-- day:ord month:mmem
-- j and jj obsolete
-function commands.currentdate(str,currentlanguage) -- second argument false : no label
+local function currentdate(str,currentlanguage) -- second argument false : no label
local list = utilities.parsers.settings_to_array(str)
local splitlabel = languages.labels.split or string.itself -- we need to get the loading order right
local year, month, day = tex.year, tex.month, tex.day
local auto = true
+ if currentlanguage == "" then
+ currentlanguage = false
+ end
for i=1,#list do
local entry = list[i]
local tag, plus = splitlabel(entry)
@@ -1038,9 +1204,9 @@ function commands.currentdate(str,currentlanguage) -- second argument false : no
if currentlanguage == false then
context(months[month] or "unknown")
elseif mnemonic then
- commands.monthmnem(month)
+ context.labeltext(monthmnem(month))
else
- commands.monthname(month)
+ context.labeltext(monthname(month))
end
elseif tag == "mm" then
context("%02i",month)
@@ -1050,7 +1216,7 @@ function commands.currentdate(str,currentlanguage) -- second argument false : no
if currentlanguage == false then
context(days[day] or "unknown")
else
- context.convertnumber(v_day,day)
+ context.convertnumber(v_day,day) -- why not direct
end
whatordinal = day
elseif tag == "dd" then
@@ -1064,7 +1230,7 @@ function commands.currentdate(str,currentlanguage) -- second argument false : no
if currentlanguage == false then
context(days[wd] or "unknown")
else
- commands.dayname(wd)
+ context.labeltext(days[wd] or "unknown")
end
elseif tag == "W" then
context(weekday(day,month,year))
@@ -1087,6 +1253,20 @@ function commands.currentdate(str,currentlanguage) -- second argument false : no
end
end
-function commands.rawdate(str)
- commands.currentdate(str,false)
-end
+implement {
+ name = "currentdate",
+ actions = currentdate,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "rawdate",
+ actions = currentdate,
+ arguments = { "string", false }
+}
+
+implement {
+ name = "unihex",
+ actions = { formatters["U+%05X"], context },
+ arguments = "integer"
+}
diff --git a/tex/context/base/core-con.mkiv b/tex/context/base/core-con.mkiv
index a43473ced..f7d718f44 100644
--- a/tex/context/base/core-con.mkiv
+++ b/tex/context/base/core-con.mkiv
@@ -67,44 +67,43 @@
%D \showsetup{romannumerals}
%D \showsetup{Romannumerals}
-\def\romannumerals#1{\ctxcommand{romannumerals(\number#1)}}
-\def\Romannumerals#1{\ctxcommand{Romannumerals(\number#1)}}
+\def\romannumerals#1{\clf_romannumerals\numexpr#1\relax}
+\def\Romannumerals#1{\clf_Romannumerals\numexpr#1\relax}
%D Arabic etc:
-\def\abjadnumerals #1{\ctxcommand{abjadnumerals (\number#1)}}
-\def\abjadnodotnumerals#1{\ctxcommand{abjadnodotnumerals(\number#1)}}
-\def\abjadnaivenumerals#1{\ctxcommand{arabicnumerals (\number#1)}}
+\def\abjadnumerals #1{\clf_abjadnumerals \numexpr#1\relax}
+\def\abjadnodotnumerals#1{\clf_abjadnodotnumerals\numexpr#1\relax}
+\def\abjadnaivenumerals#1{\clf_alphabetic \numexpr#1\relax{arabic}} % okay?
-\def\languagecharacters#1{\ctxcommand{alphabetic(\number#1,"\currentlanguage")}} % new
-\def\languageCharacters#1{\ctxcommand{Alphabetic(\number#1,"\currentlanguage")}} % new
+\def\languagecharacters#1{\clf_alphabetic\numexpr#1\relax{\currentlanguage}} % new
+\def\languageCharacters#1{\clf_Alphabetic\numexpr#1\relax{\currentlanguage}} % new
-% we could use an auxiliary macro to save some bytes in the format
-%
-% \def\dolanguagecharacters#1#2{\ctxcommand{alphabetic(\number#2,"#1")}}
-
-\def\thainumerals #1{\ctxcommand{alphabetic(\number#1,"thai")}}
-\def\devanagarinumerals#1{\ctxcommand{alphabetic(\number#1,"devanagari")}}
-\def\gurmurkhinumerals #1{\ctxcommand{alphabetic(\number#1,"gurmurkhi")}}
-\def\gujaratinumerals #1{\ctxcommand{alphabetic(\number#1,"gujarati")}}
-\def\tibetannumerals #1{\ctxcommand{alphabetic(\number#1,"tibetan")}}
-\def\greeknumerals #1{\ctxcommand{alphabetic(\number#1,"greek")}}
-\def\Greeknumerals #1{\ctxcommand{Alphabetic(\number#1,"greek")}}
-\def\arabicnumerals #1{\ctxcommand{alphabetic(\number#1,"arabic")}}
-\def\persiannumerals #1{\ctxcommand{alphabetic(\number#1,"persian")}}
+\def\alphabeticnumerals#1{\clf_alphabetic\numexpr#1\relax{}}
+\def\Alphabeticnumerals#1{\clf_Alphabetic\numexpr#1\relax{}}
+
+\def\thainumerals #1{\clf_alphabetic\numexpr#1\relax{thai}}
+\def\devanagarinumerals#1{\clf_alphabetic\numexpr#1\relax{devanagari}}
+\def\gurmurkhinumerals #1{\clf_alphabetic\numexpr#1\relax{gurmurkhi}}
+\def\gujaratinumerals #1{\clf_alphabetic\numexpr#1\relax{gujarati}}
+\def\tibetannumerals #1{\clf_alphabetic\numexpr#1\relax{tibetan}}
+\def\greeknumerals #1{\clf_alphabetic\numexpr#1\relax{greek}}
+\def\Greeknumerals #1{\clf_Alphabetic\numexpr#1\relax{greek}}
+\def\arabicnumerals #1{\clf_alphabetic\numexpr#1\relax{arabic}}
+\def\persiannumerals #1{\clf_alphabetic\numexpr#1\relax{persian}}
\let\arabicexnumerals \persiannumerals
-\def\koreannumerals #1{\ctxcommand{alphabetic(\number#1,"korean")}}
-\def\koreannumeralsp #1{\ctxcommand{alphabetic(\number#1,"korean-parent")}}
-\def\koreannumeralsc #1{\ctxcommand{alphabetic(\number#1,"korean-circle")}}
+\def\koreannumerals #1{\clf_alphabetic\numexpr#1\relax{korean}}
+\def\koreannumeralsp #1{\clf_alphabetic\numexpr#1\relax{korean-parent}}
+\def\koreannumeralsc #1{\clf_alphabetic\numexpr#1\relax{korean-circle}}
\let\koreanparentnumerals\koreannumeralsp
\let\koreancirclenumerals\koreannumeralsc
-\def\chinesenumerals #1{\ctxcommand{chinesenumerals (\number#1)}}
-\def\chinesecapnumerals#1{\ctxcommand{chinesecapnumerals(\number#1,"cap")}}
-\def\chineseallnumerals#1{\ctxcommand{chineseallnumerals(\number#1,"all")}}
+\def\chinesenumerals #1{\clf_chinesenumerals\numexpr#1\relax{normal}}
+\def\chinesecapnumerals#1{\clf_chinesenumerals\numexpr#1\relax{cap}}
+\def\chineseallnumerals#1{\clf_chinesenumerals\numexpr#1\relax{all}}
%D \macros
%D {character,Character}
@@ -124,8 +123,8 @@
\def\unknowncharacter{-} % else in lists \relax
-\def\character#1{\ctxcommand{character(\number#1)}}
-\def\Character#1{\ctxcommand{Character(\number#1)}}
+\def\character#1{\clf_character\numexpr#1\relax}
+\def\Character#1{\clf_Character\numexpr#1\relax}
%D \macros
%D {characters,Characters}
@@ -136,8 +135,8 @@
%D \showsetup{characters}
%D \showsetup{Characters}
-\def\characters#1{\ctxcommand{characters(\number#1)}}
-\def\Characters#1{\ctxcommand{Characters(\number#1)}}
+\def\characters#1{\clf_characters\numexpr#1\relax}
+\def\Characters#1{\clf_Characters\numexpr#1\relax}
%D \macros
%D {greeknumerals,Greeknumerals}
@@ -206,8 +205,8 @@
%D
%D Anyhow, the conversion looks like:
-\unexpanded\def\monthlong #1{\ctxcommand{monthname(#1)}}
-\unexpanded\def\monthshort#1{\ctxcommand{monthmnem(#1)}}
+\unexpanded\def\monthlong #1{\clf_monthname\numexpr#1\relax}
+\unexpanded\def\monthshort#1{\clf_monthmnem\numexpr#1\relax}
\let\convertmonth\monthlong % for old times sake
@@ -240,16 +239,16 @@
%D \showsetup{weekday}
%D \showsetup{WEEKDAY}
-\unexpanded\def\weekday#1{\ctxcommand{day(#1)}}
-\unexpanded\def\WEEKDAY#1{\WORD{\weekday{#1}}}
+\unexpanded\def\weekday#1{\clf_day\numexpr#1\relax}
+\unexpanded\def\WEEKDAY#1{\WORD{\clf_day\numexpr#1\relax}}
%D \macros
%D {getdayoftheweek, dayoftheweek}
\newcount\normalweekday
- \def\dayoftheweek #1#2#3{\ctxcommand{weekdayname(\number#1,\number#2,\number#3)}} % name
-\unexpanded\def\getdayoftheweek#1#2#3{\normalweekday\ctxcommand{weekday(\number#1,\number#2,\number#3)}\relax} % number
+ \def\dayoftheweek #1#2#3{\clf_weekdayname\numexpr#1\relax\numexpr#2\relax,\numexpr#3\relax} % name
+\unexpanded\def\getdayoftheweek#1#2#3{\normalweekday\clf_weekday\numexpr#1\relax\numexpr#2\relax,\numexpr#3\relax\relax} % number
%D Using this macro in
%D
@@ -294,14 +293,16 @@
%D
%D The number of days is available in the macro \type {\numberofdays}.
-\def\doifleapyearelse#1%
- {\ctxcommand{doifleapyearelse(\number#1)}}
+\def\doifelseleapyear#1%
+ {\clf_doifelseleapyear\numexpr#1\relax}
+
+\let\doifleapyearelse\doifelseleapyear
\unexpanded\def\getdayspermonth#1#2%
- {\edef\numberofdays{\ctxcommand{nofdays(\number#1,\number#2)}}}
+ {\edef\numberofdays{\clf_nofdays\numexpr#1\relax\numexpr#2\relax}}
\def\dayspermonth#1#2%
- {\ctxcommand{nofdays(\number#1,\number#2)}}
+ {\clf_nofdays\numexpr#1\relax\numexpr#2\relax}
% \dayoftheweek{2006}{9}{15}
% \doifleapyearelse{2000}{OK}{NOT OK}
@@ -413,7 +414,7 @@
{\begingroup
\the\everycurrentdate
\doifsomething{#1}{\edef\currentdatespecification{#1}}%
- \ctxcommand{currentdate(\!!bs\currentdatespecification\!!es,"\labellanguage")}%
+ \clf_currentdate{\currentdatespecification}{\labellanguage}%
\endgroup}
\unexpanded\def\date
@@ -434,7 +435,7 @@
\endgroup}
\def\rawdate[#1]% expandable and no labels
- {\ctxcommand{rawdate(\!!bs\currentdatespecification\!!es)}}
+ {\clf_rawdate{\currentdatespecification}}
%D \macros
%D {currenttime}
@@ -443,9 +444,9 @@
%D to the previous date macro using the keys \type {h}, \type {m} and a separator.
\unexpanded\def\calculatecurrenttime
- {\edef\currenthour {\ctxcommand{hour ()}}%
- \edef\currentminute{\ctxcommand{minute()}}%
- \edef\currentsecond{\ctxcommand{second()}}}
+ {\edef\currenthour {\clf_hour }%
+ \edef\currentminute{\clf_minute}%
+ \edef\currentsecond{\clf_second}}
\let\currenthour \!!plusone
\let\currentminute\!!plusone
@@ -454,7 +455,7 @@
\def\currenttimespecification{h,:,m}
\unexpanded\def\currenttime
- {\doifnextoptionalelse\syst_converters_current_time_yes\syst_converters_current_time_nop}
+ {\doifelsenextoptional\syst_converters_current_time_yes\syst_converters_current_time_nop}
\unexpanded\def\syst_converters_current_time_yes[#1]%
{\calculatecurrenttime
@@ -520,6 +521,13 @@
\installcorenamespace {conversionarguments}
\installcorenamespace {conversionwords}
+%D It might be better to move more to lua as we also need conversion there
+%D and doublicating logic doesn't make things cleaner. It means that all
+%D conversions will get a language argument too. However, we permit definitions
+%D at the \TEX\ end so we have to provide some hybrid method.
+
+% checkedconversion(method,n,language)
+
\unexpanded\def\defineconversion
{\dotripleempty\syst_converters_define_conversion}
@@ -531,13 +539,13 @@
\fi}
\def\syst_converters_define_conversion_indeed#1#2#3%
- {\doifinstringelse{,}{\detokenize{#3}}
- {\ctxcommand{defineconversion("#2",\!!bs\detokenize{#3}\!!es)}%
+ {\doifelseinstring{,}{\detokenize{#3}}
+ {\clf_defineconversion{#2}{\detokenize{#3}}% a set e.g. of symbols
\setgvalue{\??conversion#1}{\syst_converters_checked_conversion{#2}}}
{\setgvalue{\??conversion#1}{#3}}}
\def\syst_converters_checked_conversion#1#2%
- {\ctxcommand{checkedconversion("#1",#2)}}
+ {\clf_checkedconversion{#1}\numexpr#2\relax}
%D If a conversion is just a font switch then we need to make sure that the
%D number is indeed ends up as number in the input, so we need to handle the
@@ -557,7 +565,11 @@
\setvalue{\??conversionarguments2}#1{\csname\??conversion #1\endcsname}
\letvalue{\??conversionarguments3}\syst_converters_checked_conversion
-\def\doifconversiondefinedelse#1%
+% we can also add a \ctxcommand{doifelseconversion("#1","\currentlanguage")} to check
+% if we have definitions that are not matched at the lua end .. eventually we might do
+% that when more shows up
+
+\def\doifelseconversiondefined#1%
{\ifcsname\??conversion\currentlanguage#1\endcsname
\expandafter\firstoftwoarguments
\else\ifcsname\??conversion#1\endcsname
@@ -573,6 +585,9 @@
\expandafter\secondoftwoarguments
\fi}
+\let\doifconversiondefinedelse\doifelseconversiondefined
+\let\doifconversionnumberelse \doifelseconversionnumber
+
%D Handy.
\setvalue{\??conversionwords\v!one }{1}
@@ -595,10 +610,10 @@
\def\highordinalstr #1{\high{\notsmallcapped{#1}}}
\def\ordinalstr #1{\notsmallcapped{#1}}
-\def\ordinaldaynumber #1{\ctxcommand{ordinal(#1,"\currentlanguage")}}
+\def\ordinaldaynumber #1{\clf_ordinal\numexpr#1\relax{\currentlanguage}}
-\def\verbosenumber #1{\ctxcommand{verbose(#1,"\currentlanguage")}}
-\def\VerboseNumber #1{\Words{\ctxcommand{verbose(#1,"\currentlanguage")}}}
+\def\verbosenumber #1{\clf_verbose\numexpr#1\relax{\currentlanguage}}
+\def\VerboseNumber #1{\Words{\clf_verbose\numexpr#1\relax{\currentlanguage}}}
%D As longs as symbols are linked to levels or numbers, we can also use the
%D conversion mechanism, but in for instance the itemization macros, we prefer
@@ -627,6 +642,9 @@
\defineconversion [AK] [\smallcappedcharacters]
\defineconversion [KA] [\smallcappedcharacters]
+\defineconversion [\v!alphabetic] [\alphabeticnumerals]
+\defineconversion [\v!Alphabetic] [\Alphabeticnumerals]
+
\defineconversion [\v!number] [\numbers]
\defineconversion [\v!numbers] [\numbers]
\defineconversion [\v!Numbers] [\Numbers]
@@ -662,6 +680,9 @@
\defineconversion [g] [\greeknumerals]
\defineconversion [G] [\Greeknumerals]
+%defineconversion [ñ] [\spanishnumerals]
+%defineconversion [Ñ] [\Spanishnumerals]
+
\defineconversion [abjadnumerals] [\abjadnumerals]
\defineconversion [abjadnodotnumerals] [\adjadnodotnumerals]
\defineconversion [abjadnaivenumerals] [\adjadnaivenumerals]
@@ -686,13 +707,53 @@
\defineconversion [kr-c] [\koreancirclenumerals]
\defineconversion [chinesenumerals] [\chinesenumerals]
-\defineconversion [chinesecapnumeralscn] [\chinesecapnumerals]
-\defineconversion [chineseallnumeralscn] [\chineseallnumerals]
+\defineconversion [chinesecapnumerals] [\chinesecapnumerals]
+\defineconversion [chineseallnumerals] [\chineseallnumerals]
\defineconversion [cn] [\chinesenumerals]
\defineconversion [cn-c] [\chinesecapnumerals]
\defineconversion [cn-a] [\chineseallnumerals]
+%D Moved from lang-def.mkiv:
+%D
+%D Define these as the general character enumeration when language is Slovenian. If
+%D you feel uncomfortable with this, mail Mojca, since she promised to to take the
+%D heat. Pablo was next to request this. We changed characters to numerals for this
+%D feature. We do need these definitions for mechanisms like itemize that check
+%D for converters.
+
+\def\sloveniannumerals#1{\clf_alphabetic\numexpr#1\relax{sl}}
+\def\slovenianNumerals#1{\clf_Alphabetic\numexpr#1\relax{sl}}
+
+\def\spanishnumerals #1{\clf_alphabetic\numexpr#1\relax{es}}
+\def\spanishNumerals #1{\clf_Alphabetic\numexpr#1\relax{es}}
+
+\defineconversion [\s!sl] [character] [\sloveniannumerals]
+\defineconversion [\s!sl] [Character] [\slovenianNumerals]
+\defineconversion [\s!sl] [characters] [\sloveniannumerals]
+\defineconversion [\s!sl] [Characters] [\slovenianNumerals]
+
+\defineconversion [\s!sl] [a] [\sloveniannumerals]
+\defineconversion [\s!sl] [A] [\slovenianNumerals]
+\defineconversion [\s!sl] [AK] [\smallcapped\sloveniannumerals]
+\defineconversion [\s!sl] [KA] [\smallcapped\sloveniannumerals]
+
+\defineconversion [\s!es] [character] [\spanishnumerals]
+\defineconversion [\s!es] [Character] [\spanishNumerals]
+\defineconversion [\s!es] [characters] [\spanishnumerals]
+\defineconversion [\s!es] [Characters] [\spanishNumerals]
+
+\defineconversion [\s!es] [a] [\spanishnumerals]
+\defineconversion [\s!es] [A] [\spanishNumerals]
+\defineconversion [\s!es] [AK] [\smallcapped\spanishnumerals]
+\defineconversion [\s!es] [KA] [\smallcapped\spanishnumerals]
+
+\defineconversion [sloveniannumerals] [\sloveniannumerals]
+\defineconversion [slovenianNumerals] [\slovenianNumerals]
+
+\defineconversion [spanishnumerals] [\spanishnumerals]
+\defineconversion [spanishNumerals] [\spanishNumerals]
+
%D In case a font has no greek (WS):
\defineconversion [mathgreek]
@@ -701,6 +762,10 @@
\m{ν},\m{ξ},\m{ο},\m{π},\m{ρ},\m{σ},
\m{τ},\m{υ},\m{φ},\m{χ},\m{ψ},\m{ω}]
+%D Handy too (expanded!):
+
+\def\unihex#1{\clf_unihex\numexpr#1\relax}
+
%D Symbol sets:
\ifdefined\symbol \else \def\symbol[#1]{#1} \fi % todo
diff --git a/tex/context/base/core-ctx.lua b/tex/context/base/core-ctx.lua
index 18978a530..c20691cd7 100644
--- a/tex/context/base/core-ctx.lua
+++ b/tex/context/base/core-ctx.lua
@@ -39,12 +39,12 @@ local xmltext = xml.text
local report_prepfiles = logs.reporter("system","prepfiles")
-commands = commands or { }
-local commands = commands
+local commands = commands
+local implement = interfaces.implement
-ctxrunner = ctxrunner or { }
+ctxrunner = ctxrunner or { }
-ctxrunner.prepfiles = utilities.storage.allocate()
+ctxrunner.prepfiles = utilities.storage.allocate()
local function dontpreparefile(t,k)
return k -- we only store when we have a prepper
@@ -254,7 +254,7 @@ function ctxrunner.load(ctxname)
for i=1,#runners do
local command = runners[i]
report_prepfiles("command: %s",command)
- local result = os.spawn(command) or 0
+ local result = os.execute(command) or 0
-- if result > 0 then
-- report_prepfiles("error, return code: %s",result)
-- end
@@ -301,35 +301,55 @@ local function resolve(name) -- used a few times later on
return ctxrunner.prepfiles[file.collapsepath(name)] or false
end
-local processfile = commands.processfile
-local doifinputfileelse = commands.doifinputfileelse
-
-function commands.processfile(name,maxreadlevel) -- overloaded
- local prepname = resolve(name)
- if prepname then
- return processfile(prepname,0)
- end
- return processfile(name,maxreadlevel)
+function ctxrunner.preparedfile(name)
+ return resolve(name) or name
end
-function commands.doifinputfileelse(name,depth)
- local prepname = resolve(name)
- if prepname then
- return doifinputfileelse(prepname,0)
+local processfile = commands.processfile
+local doifelseinputfile = commands.doifelseinputfile
+
+implement {
+ name = "processfile",
+ overload = true,
+ arguments = { "string", "integer" },
+ actions = function(name,maxreadlevel) -- overloaded
+ local prepname = resolve(name)
+ if prepname then
+ return processfile(prepname,0)
+ end
+ return processfile(name,maxreadlevel)
end
- return doifinputfileelse(name,depth)
-end
+}
-function commands.preparedfile(name)
- return resolve(name) or name
-end
+implement {
+ name = "doifelseinputfile",
+ overload = true,
+ arguments = { "string", "integer" },
+ actions = function(name,depth)
+ local prepname = resolve(name)
+ if prepname then
+ return doifelseinputfile(prepname,0)
+ end
+ return doifelseinputfile(name,depth)
+ end
+}
-function commands.getctxfile()
- local ctxfile = document.arguments.ctx or ""
- if ctxfile ~= "" then
- ctxrunner.load(ctxfile) -- do we need to locate it?
+-- implement {
+-- name = "preparedfile", -- not used
+-- arguments = "string",
+-- actions = { ctxrunner.preparedfile, context }
+-- }
+
+implement {
+ name = "setdocumentctxfile",
+ onlyonce = true,
+ actions = function()
+ local ctxfile = document.arguments.ctx or ""
+ if ctxfile ~= "" then
+ ctxrunner.load(ctxfile) -- do we need to locate it?
+ end
end
-end
+}
function ctxrunner.resolve(name) -- used a few times later on
local collapsedname = file.collapsepath(name,".")
diff --git a/tex/context/base/core-ctx.mkiv b/tex/context/base/core-ctx.mkiv
index 59115621a..e6372d099 100644
--- a/tex/context/base/core-ctx.mkiv
+++ b/tex/context/base/core-ctx.mkiv
@@ -17,11 +17,13 @@
\unprotect
-\unexpanded\def\job_options_get_commandline {\ctxcommand{getcommandline()}}
-\unexpanded\def\job_options_get_ctxfile {\ctxcommand{getctxfile()}}
-\unexpanded\def\job_options_set_modes {\ctxcommand{setdocumentmodes()}}
-\unexpanded\def\job_options_set_modules {\ctxcommand{setdocumentmodules()}}
-\unexpanded\def\job_options_set_environments{\ctxcommand{setdocumentenvironments()}}
-\unexpanded\def\job_options_set_filenames {\ctxcommand{setdocumentfilenames()}}
+% called directly in core-def.mkiv
+%
+% \unexpanded\def\job_options_get_commandline {\clf_setdocumentcommandline}
+% \unexpanded\def\job_options_get_ctxfile {\clf_setdocumentctxfile}
+% \unexpanded\def\job_options_set_modes {\clf_setdocumentmodes}
+% \unexpanded\def\job_options_set_modules {\clf_setdocumentmodules}
+% \unexpanded\def\job_options_set_environments{\clf_setdocumentenvironments}
+% \unexpanded\def\job_options_set_filenames {\clf_setdocumentfilenames}
\protect \endinput
diff --git a/tex/context/base/core-dat.lua b/tex/context/base/core-dat.lua
index 242d362d0..ca6ec4373 100644
--- a/tex/context/base/core-dat.lua
+++ b/tex/context/base/core-dat.lua
@@ -13,7 +13,8 @@ replaces the twopass data mechanism.</p>
local tonumber, tostring, type = tonumber, tostring, type
-local context, commands = context, commands
+local context = context
+local commands = commands
local trace_datasets = false trackers.register("job.datasets" , function(v) trace_datasets = v end)
local trace_pagestates = false trackers.register("job.pagestates", function(v) trace_pagestates = v end)
@@ -33,6 +34,8 @@ local v_yes = interfaces.variables.yes
local new_latelua = nodes.pool.latelua
+local implement = interfaces.implement
+
local collected = allocate()
local tobesaved = allocate()
@@ -138,7 +141,7 @@ function datasets.getdata(name,tag,key,default)
return default
end
-function commands.setdataset(settings)
+local function setdataset(settings)
settings.convert = true
local name, tag = setdata(settings)
if settings.delay ~= v_yes then
@@ -150,7 +153,7 @@ function commands.setdataset(settings)
end
end
-function commands.datasetvariable(name,tag,key)
+local function datasetvariable(name,tag,key)
local t = collected[name]
if t == nil then
if trace_datasets then
@@ -175,6 +178,25 @@ function commands.datasetvariable(name,tag,key)
end
end
+implement {
+ name = "setdataset",
+ actions = setdataset,
+ arguments = {
+ {
+ { "name" },
+ { "tag" },
+ { "delay" },
+ { "data" },
+ }
+ }
+}
+
+implement {
+ name = "datasetvariable",
+ actions = datasetvariable,
+ arguments = { "string", "string", "string" }
+}
+
--[[ldx--
<p>We also provide an efficient variant for page states.</p>
--ldx]]--
@@ -245,7 +267,7 @@ function pagestates.realpage(name,tag,default)
return default
end
-function commands.setpagestate(settings)
+local function setpagestate(settings)
local name, tag, data = setstate(settings)
if type(tag) == "number" then
context(new_latelua(formatters["job.pagestates.extend(%q,%i)"](name,tag)))
@@ -254,7 +276,7 @@ function commands.setpagestate(settings)
end
end
-function commands.pagestaterealpage(name,tag)
+local function pagestaterealpage(name,tag)
local t = collected[name]
t = t and (t[tag] or t[tonumber(tag)])
if t then
@@ -262,8 +284,32 @@ function commands.pagestaterealpage(name,tag)
end
end
-function commands.setpagestaterealpageno(name,tag)
+local function setpagestaterealpageno(name,tag)
local t = collected[name]
t = t and (t[tag] or t[tonumber(tag)])
texsetcount("realpagestateno",t or texgetcount("realpageno"))
end
+
+implement {
+ name = "setpagestate",
+ actions = setpagestate,
+ arguments = {
+ {
+ { "name" },
+ { "tag" },
+ { "delay" },
+ }
+ }
+}
+
+implement {
+ name = "pagestaterealpage",
+ actions = pagestaterealpage,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "setpagestaterealpageno",
+ actions = setpagestaterealpageno,
+ arguments = { "string", "string" }
+}
diff --git a/tex/context/base/core-dat.mkiv b/tex/context/base/core-dat.mkiv
index 5aef39d69..26b1f14b5 100644
--- a/tex/context/base/core-dat.mkiv
+++ b/tex/context/base/core-dat.mkiv
@@ -51,26 +51,26 @@
\def\syst_datasets_set_named[#1][#2][#3]%
{\begingroup
\edef\currentdataset{#1}%
- \ctxcommand{setdataset{
- name = "\currentdataset",
- tag = \!!bs#2\!!es,
- delay = "\datasetparameter\c!delay",
- data = \!!bs#3\!!es
- }}%
+ \clf_setdataset
+ name {\currentdataset}%
+ tag {#2}%
+ delay {\datasetparameter\c!delay}%
+ data {#3}%
+ \relax
\endgroup}
\def\syst_datasets_set_indexed[#1][#2][#3]%
{\begingroup
\edef\currentdataset{#1}%
- \ctxcommand{setdataset{
- name = "\currentdataset",
- delay = "\datasetparameter\c!delay",
- data = \!!bs#2\!!es
- }}%
+ \clf_setdataset
+ name {\currentdataset}%
+ delay {\datasetparameter\c!delay}%
+ data {#2}%
+ \relax
\endgroup}
\def\datasetvariable#1#2#3%
- {\ctxcommand{datasetvariable("#1","#2","#3")}}
+ {\clf_datasetvariable{#1}{#2}{#3}}
\installcorenamespace{pagestate}
@@ -85,17 +85,17 @@
\def\syst_pagestates_set[#1][#2]%
{\begingroup
\edef\currentpagestate{#1}%
- \ctxcommand{setpagestate{
- name = "\currentpagestate",
- tag = \!!bs#2\!!es,
- delay = "\pagestateparameter\c!delay",
- }}%
+ \clf_setpagestate
+ name {\currentpagestate}%
+ tag {#2}%
+ delay {\pagestateparameter\c!delay}%
+ \relax
\endgroup}
\def\pagestaterealpage#1#2%
- {\ctxcommand{pagestaterealpage("#1","#2")}}
+ {\clf_pagestaterealpage{#1}{#2}}
\def\setpagestaterealpageno#1#2%
- {\ctxcommand{setpagestaterealpageno("#1","#2")}}
+ {\clf_setpagestaterealpageno{#1}{#2}}
\protect
diff --git a/tex/context/base/core-def.mkiv b/tex/context/base/core-def.mkiv
index 99bed6d34..acd411fac 100644
--- a/tex/context/base/core-def.mkiv
+++ b/tex/context/base/core-def.mkiv
@@ -107,17 +107,17 @@
\initializenewlinechar
\calculatecurrenttime
\syst_files_load
- \job_options_get_commandline % expands some commands
- \job_options_get_ctxfile % might expand some commands
- \job_options_set_filenames
+ \clf_setdocumentcommandline
+ \clf_setdocumentctxfile
+ \clf_setdocumentfilenames
\font_preloads_at_every_job
\settopskip % brrr
\initializemainlanguage
\initializepagebackgrounds
\initializepagecounters
- \job_options_set_modes
- \job_options_set_modules
- \job_options_set_environments
+ \clf_setdocumentmodes
+ \clf_setdocumentmodules
+ \clf_setdocumentenvironments
\to \everyjob
\appendtoks
diff --git a/tex/context/base/core-env.lua b/tex/context/base/core-env.lua
index a4d1fdd92..a3a87b7f7 100644
--- a/tex/context/base/core-env.lua
+++ b/tex/context/base/core-env.lua
@@ -13,91 +13,232 @@ if not modules then modules = { } end modules ['core-env'] = {
local P, C, S, Cc, lpegmatch, patterns = lpeg.P, lpeg.C, lpeg.S, lpeg.Cc, lpeg.match, lpeg.patterns
-local csname_id = token.csname_id
-local create = token.create
local texgetcount = tex.getcount
local texsetcount = tex.setcount
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
+local setmetatablecall = table.setmetatablecall
local context = context
-local undefined = csname_id("*undefined*crap*")
-local iftrue = create("iftrue")[2] -- inefficient hack
-
tex.modes = allocate { }
tex.systemmodes = allocate { }
tex.constants = allocate { }
tex.conditionals = allocate { }
tex.ifs = allocate { }
+tex.isdefined = allocate { }
local modes = { }
local systemmodes = { }
-setmetatableindex(tex.modes, function(t,k)
- local m = modes[k]
- if m then
- return m()
- else
- local n = "mode>" .. k
- if csname_id(n) == undefined then
- return false
+-- if newtoken then -- we keep the old code for historic reasons
+
+ -- undefined: mode == 0 or cmdname = "undefined_cs"
+
+ local create = newtoken.create
+
+ local cache = table.setmetatableindex(function(t,k)
+ local v = create(k)
+ t[k] = v
+ return v
+ end)
+
+ -- we can have a modes cache too
+
+ local iftrue = cache["iftrue"].mode
+ local undefined = cache["*undefined*crap*"].mode -- is this ok?
+
+ setmetatableindex(tex.modes, function(t,k)
+ local m = modes[k]
+ if m then
+ return m()
else
- modes[k] = function() return texgetcount(n) >= 1 end
- return texgetcount(n) >= 1
+ local n = "mode>" .. k
+ if cache[n].mode == 0 then
+ return false
+ else
+ modes[k] = function() return texgetcount(n) == 1 end
+ return texgetcount(n) == 1 -- 2 is prevented
+ end
end
- end
-end)
+ end)
-setmetatableindex(tex.systemmodes, function(t,k)
- local m = systemmodes[k]
- if m then
- return m()
- else
- local n = "mode>*" .. k
- if csname_id(n) == undefined then
- return false
+ setmetatableindex(tex.systemmodes, function(t,k)
+ local m = systemmodes[k]
+ if m then
+ return m()
else
- systemmodes[k] = function() return texgetcount(n) >= 1 end
- return texgetcount(n) >= 1
+ local n = "mode>*" .. k
+ if cache[n].mode == 0 then
+ return false
+ else
+ systemmodes[k] = function() return texgetcount(n) == 1 end
+ return texgetcount(n) == 1 -- 2 is prevented
+ end
end
+ end)
+
+ setmetatableindex(tex.constants, function(t,k)
+ return cache[k].mode ~= 0 and texgetcount(k) or 0
+ end)
+
+ setmetatableindex(tex.conditionals, function(t,k) -- 0 == true
+ return cache[k].mode ~= 0 and texgetcount(k) == 0
+ end)
+
+ table.setmetatableindex(tex.ifs, function(t,k)
+ -- local mode = cache[k].mode
+ -- if mode == 0 then
+ -- return nil
+ -- else
+ -- return mode == iftrue
+ -- end
+ return cache[k].mode == iftrue
+ end)
+
+ setmetatableindex(tex.isdefined, function(t,k)
+ return k and cache[k].mode ~= 0
+ end)
+
+ setmetatablecall(tex.isdefined, function(t,k)
+ return k and cache[k].mode ~= 0
+ end)
+
+ local dimencode = cache["scratchdimen" ].command
+ local countcode = cache["scratchcounter"].command
+ local tokencode = cache["scratchtoken" ].command
+ local skipcode = cache["scratchskip" ].command
+
+ local types = {
+ [dimencode] = "dimen",
+ [countcode] = "count",
+ [tokencode] = "token",
+ [skipcode ] = "skip",
+ }
+
+ function tex.isdimen(name)
+ return cache[name].command == dimencode
end
-end)
-setmetatableindex(tex.constants, function(t,k)
- return csname_id(k) ~= undefined and texgetcount(k) or 0
-end)
+ function tex.iscount(name)
+ return cache[name].command == countcode
+ end
-setmetatableindex(tex.conditionals, function(t,k) -- 0 == true
- return csname_id(k) ~= undefined and texgetcount(k) == 0
-end)
+ function tex.istoken(name)
+ return cache[name].command == tokencode
+ end
-setmetatableindex(tex.ifs, function(t,k)
- -- k = "if" .. k -- better not
- return csname_id(k) ~= undefined and create(k)[2] == iftrue -- inefficient, this create, we need a helper
-end)
+ function tex.isskip(name)
+ return cache[name].command == skipcode
+ end
--- todo : global
+ function tex.type(name)
+ return types[cache[name].command] or "macro"
+ end
--- not possible as we let at the tex end to zerocount and plusone
+-- else
+--
+-- local csname_id = token.csname_id
+-- local create = token.create
+--
+-- local undefined = csname_id("*undefined*crap*")
+-- local iftrue = create("iftrue")[2] -- inefficient hack
+--
+-- setmetatableindex(tex.modes, function(t,k)
+-- local m = modes[k]
+-- if m then
+-- return m()
+-- else
+-- local n = "mode>" .. k
+-- if csname_id(n) == undefined then
+-- return false
+-- else
+-- modes[k] = function() return texgetcount(n) == 1 end
+-- return texgetcount(n) == 1 -- 2 is prevented
+-- end
+-- end
+-- end)
+--
+-- setmetatableindex(tex.systemmodes, function(t,k)
+-- local m = systemmodes[k]
+-- if m then
+-- return m()
+-- else
+-- local n = "mode>*" .. k
+-- if csname_id(n) == undefined then
+-- return false
+-- else
+-- systemmodes[k] = function() return texgetcount(n) == 1 end
+-- return texgetcount(n) == 1 -- 2 is prevented
+-- end
+-- end
+-- end)
+--
+-- setmetatableindex(tex.constants, function(t,k)
+-- return csname_id(k) ~= undefined and texgetcount(k) or 0
+-- end)
+--
+-- setmetatableindex(tex.conditionals, function(t,k) -- 0 == true
+-- return csname_id(k) ~= undefined and texgetcount(k) == 0
+-- end)
+--
+-- setmetatableindex(tex.ifs, function(t,k)
+-- -- k = "if" .. k -- better not
+-- return csname_id(k) ~= undefined and create(k)[2] == iftrue -- inefficient, this create, we need a helper
+-- end)
+--
+-- setmetatableindex(tex.isdefined, function(t,k)
+-- return k and csname_id(k) ~= undefined
+-- end)
+-- setmetatablecall(tex.isdefined, function(t,k)
+-- return k and csname_id(k) ~= undefined
+-- end)
--
--- function tex.settrue(name,glob)
--- if glob then
--- texsetcount("global",name,0)
--- else
--- texsetcount(name,0)
+-- local lookuptoken = token.lookup
+--
+-- local dimencode = lookuptoken("scratchdimen" )[1]
+-- local countcode = lookuptoken("scratchcounter")[1]
+-- local tokencode = lookuptoken("scratchtoken" )[1]
+-- local skipcode = lookuptoken("scratchskip" )[1]
+--
+-- local types = {
+-- [dimencode] = "dimen",
+-- [countcode] = "count",
+-- [tokencode] = "token",
+-- [skipcode ] = "skip",
+-- }
+--
+-- function tex.isdimen(name)
+-- return lookuptoken(name)[1] == dimencode
+-- end
+--
+-- function tex.iscount(name)
+-- return lookuptoken(name)[1] == countcode
+-- end
+--
+-- function tex.istoken(name)
+-- return lookuptoken(name)[1] == tokencode
-- end
--- end
--
--- function tex.setfalse(name,glob)
--- if glob then
--- texsetcount("global",name,1)
--- else
--- texsetcount(name,1)
+-- function tex.isskip(name)
+-- return lookuptoken(name)[1] == skipcode
-- end
+--
+-- function tex.type(name)
+-- return types[lookuptoken(name)[1]] or "macro"
+-- end
+--
-- end
+function context.setconditional(name,value)
+ if value then
+ context.settruevalue(name)
+ else
+ context.setfalsevalue(name)
+ end
+end
+
---- arg = P("{") * C(patterns.nested) * P("}") + Cc("")
local sep = S("), ")
@@ -112,45 +253,9 @@ local pattern = (
+ sep^1
)^1
-function commands.autosetups(str)
- lpegmatch(pattern,str)
-end
-
--- new (inefficient)
-
-local lookuptoken = token.lookup
-
-local dimencode = lookuptoken("scratchdimen" )[1]
-local countcode = lookuptoken("scratchcounter")[1]
-local tokencode = lookuptoken("scratchtoken" )[1]
-local skipcode = lookuptoken("scratchskip" )[1]
-
-local types = {
- [dimencode] = "dimen",
- [countcode] = "count",
- [tokencode] = "token",
- [skipcode ] = "skip",
+interfaces.implement {
+ name = "autosetups",
+ actions = function(str) lpegmatch(pattern,str) end,
+ arguments = "string"
}
-function tex.isdimen(name)
- return lookuptoken(name)[1] == dimencode
-end
-
-function tex.iscount(name)
- return lookuptoken(name)[1] == countcode
-end
-
-function tex.istoken(name)
- return lookuptoken(name)[1] == tokencode
-end
-
-function tex.isskip(name)
- return lookuptoken(name)[1] == skipcode
-end
-
-function tex.type(name)
- return types[lookuptoken(name)[1]] or "macro"
-end
-
--- inspect(tex.isdimen("xxxxxxxxxxxxxxx"))
--- inspect(tex.isdimen("textwidth"))
diff --git a/tex/context/base/core-env.mkiv b/tex/context/base/core-env.mkiv
index 1c92a371c..bebc1bef0 100644
--- a/tex/context/base/core-env.mkiv
+++ b/tex/context/base/core-env.mkiv
@@ -46,8 +46,6 @@
\installcorenamespace{modestack}
-% todo: check prevent mode, also at the lua end
-
\setnewconstant\disabledmode \zerocount
\setnewconstant\enabledmode \plusone
\setnewconstant\preventedmode\plustwo
@@ -59,7 +57,7 @@
\def\syst_modes_new#1%
{\expandafter\newcount\csname\??mode#1\endcsname}
-\unexpanded\def\newmode#1%
+\unexpanded\def\newmode#1% so, no change of already set modes !
{\ifcsname\??mode#1\endcsname\else\syst_modes_new{#1}\fi}
\unexpanded\def\setmode#1%
@@ -113,8 +111,6 @@
\unexpanded\def\globalenablemode {\let\syst_mode_prefix\global\unprotect\syst_modes_enable }
\unexpanded\def\globaldisablemode{\let\syst_mode_prefix\global\unprotect\syst_modes_disable}
-\let\definemode\disablemode % nicer
-
\def\syst_modes_prevent[#1]{\protect\rawprocesscommacommand[#1]\syst_modes_prevent_indeed\let\syst_mode_prefix\relax}
\def\syst_modes_enable [#1]{\protect\rawprocesscommacommand[#1]\syst_modes_enable_indeed \let\syst_mode_prefix\relax}
\def\syst_modes_disable[#1]{\protect\rawprocesscommacommand[#1]\syst_modes_disable_indeed\let\syst_mode_prefix\relax}
@@ -135,10 +131,47 @@
\syst_mode_prefix\csname\??mode#1\endcsname\disabledmode
\fi}
+%D If you do a lot of mode testing, it makes sense to define modes (or disable them
+%D explicitly if unset. This makes testing twice as fast. Often one enables modes
+%D beforehand, in which case \type {\definemode} would reset the mode. The optional
+%D second argument \type {keep} will prevent changing the already set mode but defines
+%D it when undefined.
+
+\unexpanded\def\definemode
+ {\unprotect
+ \dodoubleempty\syst_modes_define}
+
+\def\syst_modes_define[#1][#2]%
+ {\protect
+ \edef\m_modes_asked{#2}%
+ \rawprocesscommacommand[#1]\syst_modes_define_indeed}
+
+\def\syst_modes_define_indeed#1%
+ {\ifcsname\??mode#1\endcsname
+ % already set
+ \else
+ \syst_modes_new{#1}
+ \fi
+ \ifx\m_modes_asked\v!keep
+ % not changes, disabled when undefined
+ \else
+ \csname\??mode#1\endcsname\ifx\m_modes_asked\v!yes\enabledmode\else\disabledmode\fi
+ \fi}
+
% handy for mp
\def\booleanmodevalue#1%
- {\ifcsname\??mode#1\endcsname\ifcase\csname\??mode#1\endcsname\s!false\else\s!true\fi\else\s!false\fi}
+ {\ifcsname\??mode#1\endcsname
+ \ifcase\csname\??mode#1\endcsname
+ \s!false
+ \or
+ \s!true
+ \else
+ \s!false
+ \fi
+ \else
+ \s!false
+ \fi}
% check macros
@@ -148,6 +181,27 @@
\newconditional\c_checked_mode
+% one
+
+% \def\syst_modes_check_indeed#1%
+% {\ifcsname\??mode#1\endcsname
+% \ifcase\csname\??mode#1\endcsname\else
+% \let\syst_modes_check_step\gobbleoneargument
+% \fi
+% \fi}
+%
+% \def\syst_modes_check#1#2#3%
+% {\let\syst_modes_check_step\syst_modes_check_indeed
+% \rawprocesscommacommand[#3]\syst_modes_check_step
+% \ifx\syst_modes_check_step\gobbleoneargument
+% \expandafter#1%
+% \else
+% \expandafter#2%
+% \fi}
+
+% modes .. twice as fast on defined modes .. we could use definers and make it even faster
+% if needed
+
\def\syst_modes_check_indeed#1%
{\ifcsname\??mode#1\endcsname
\ifcase\csname\??mode#1\endcsname\else
@@ -155,7 +209,7 @@
\fi
\fi}
-\def\syst_modes_check#1#2#3%
+\def\syst_modes_check_nop#1#2#3%
{\let\syst_modes_check_step\syst_modes_check_indeed
\rawprocesscommacommand[#3]\syst_modes_check_step
\ifx\syst_modes_check_step\gobbleoneargument
@@ -164,10 +218,32 @@
\expandafter#2%
\fi}
+\def\syst_modes_check_yes#1#2#3%
+ {\ifcase\csname\??mode#3\endcsname
+ \expandafter#2%
+ \or
+ \expandafter#1%
+ \else
+ \expandafter#2%
+ \fi}
+
+\def\syst_modes_check#1#2#3%
+ {\ifcsname\??mode#3\endcsname
+ \expandafter\syst_modes_check_yes
+ \else
+ \expandafter\syst_modes_check_nop
+ \fi#1#2{#3}}
+
+% all
+
\def\syst_modes_check_all_indeed#1%
{\ifcsname\??mode#1\endcsname
\ifcase\csname\??mode#1\endcsname
\let\syst_modes_check_all_step\gobbleoneargument
+ \or
+ % enabled
+ \else
+ \let\syst_modes_check_all_step\gobbleoneargument
\fi
\else
\let\syst_modes_check_all_step\gobbleoneargument
@@ -182,17 +258,20 @@
\expandafter#1%
\fi}
-\unexpanded\def\doifmodeelse {\syst_modes_check\firstoftwoarguments\secondoftwoarguments}
+\unexpanded\def\doifelsemode {\syst_modes_check\firstoftwoarguments\secondoftwoarguments}
\unexpanded\def\doifmode {\syst_modes_check\firstofoneargument\gobbleoneargument}
\unexpanded\def\doifnotmode {\syst_modes_check\gobbleoneargument\firstofoneargument}
\unexpanded\def\startmode [#1]{\syst_modes_check\donothing\syst_modes_stop_yes{#1}}
\unexpanded\def\startnotmode [#1]{\syst_modes_check\syst_modes_stop_nop\donothing{#1}}
-\unexpanded\def\doifallmodeselse {\syst_modes_check_all\firstoftwoarguments\secondoftwoarguments}
+\unexpanded\def\doifelseallmodes {\syst_modes_check_all\firstoftwoarguments\secondoftwoarguments}
\unexpanded\def\doifallmodes {\syst_modes_check_all\firstofoneargument\gobbleoneargument}
\unexpanded\def\doifnotallmodes {\syst_modes_check_all\gobbleoneargument\firstofoneargument}
\unexpanded\def\startallmodes [#1]{\syst_modes_check_all\donothing\syst_modes_stop_all_yes{#1}}
\unexpanded\def\startnotallmodes[#1]{\syst_modes_check_all\syst_modes_stop_all_nop\donothing{#1}}
+\let\doifmodeelse \doifelsemode
+\let\doifallmodeselse \doifelseallmodes
+
\unexpanded\def\stopmode {} % no relax
\unexpanded\def\stopnotmode {} % no relax
\unexpanded\def\stopallmodes {} % no relax
@@ -232,21 +311,21 @@
%D \starttyping
%D \enablemode[two]
%D
-%D \startmodes
+%D \startmodeset
%D [one] {1}
%D [two] {2}
%D [two] {2}
%D [three] {3}
%D [default] {?}
-%D \stopmodes
+%D \stopmodeset
%D
-%D \startmodes
+%D \startmodeset
%D [one] {1}
%D [three] {3}
%D [default] {?}
-%D \stopmodes
+%D \stopmodeset
%D
-%D \startmodes
+%D \startmodeset
%D [one] {
%D \input tufte
%D }
@@ -265,15 +344,15 @@
%D [default] {
%D \input ward
%D }
-%D \stopmodes
+%D \stopmodeset
%D \stoptyping
\newconditional\c_syst_modes_set_done % conditionals can be pushed/popped
\unexpanded\def\startmodeset
{\pushmacro\c_syst_modes_set_done
- \setfalse\conditionalfalse
- \doifnextoptionalelse\syst_modes_set_start\syst_modes_set_quit}
+ \setfalse\c_syst_modes_set_done
+ \doifelsenextoptionalcs\syst_modes_set_start\syst_modes_set_quit}
\def\syst_modes_set_start[#1]%
{\edef\m_mode_case{#1}%
@@ -293,10 +372,10 @@
\def\syst_modes_set_yes#1%
{\settrue\c_syst_modes_set_done
#1%
- \doifnextoptionalelse\syst_modes_set_start\syst_modes_set_quit}
+ \doifelsenextoptionalcs\syst_modes_set_start\syst_modes_set_quit}
\def\syst_modes_set_nop#1%
- {\doifnextoptionalelse\syst_modes_set_start\syst_modes_set_quit}
+ {\doifelsenextoptionalcs\syst_modes_set_start\syst_modes_set_quit}
\def\syst_modes_set_quit#1\stopmodeset
{\popmacro\c_syst_modes_set_done}
@@ -314,16 +393,16 @@
\unexpanded\def\startsetups{} % to please dep checker
\unexpanded\def\stopsetups {} % to please dep checker
-\expanded % will become obsolete
- {\def\expandafter\noexpand\csname\e!start\v!setups\endcsname
- {\begingroup\noexpand\doifnextoptionalelse
- {\noexpand\dostartsetupsA\expandafter\noexpand\csname\e!stop\v!setups\endcsname}
- {\noexpand\dostartsetupsB\expandafter\noexpand\csname\e!stop\v!setups\endcsname}}}
-
-\letvalue{\e!stop\v!setups}\relax
+% \expanded % will become obsolete
+% {\def\expandafter\noexpand\csname\e!start\v!setups\endcsname
+% {\begingroup\noexpand\doifnextoptionalcselse
+% {\noexpand\dostartsetupsA\expandafter\noexpand\csname\e!stop\v!setups\endcsname}
+% {\noexpand\dostartsetupsB\expandafter\noexpand\csname\e!stop\v!setups\endcsname}}}
+%
+% \letvalue{\e!stop\v!setups}\relax
-\unexpanded\def\setups{\doifnextbgroupelse\syst_setups_a\syst_setups_b} % {..} or [..]
-\unexpanded\def\setup {\doifnextbgroupelse\syst_setups \syst_setups_c} % {..} or [..]
+\unexpanded\def\setups{\doifelsenextbgroup\syst_setups_a\syst_setups_b} % {..} or [..]
+\unexpanded\def\setup {\doifelsenextbgroup\syst_setups \syst_setups_c} % {..} or [..]
\def\syst_setups_a #1{\processcommacommand[#1]\syst_setups} % {..}
\def\syst_setups_b[#1]{\processcommacommand[#1]\syst_setups} % [..]
@@ -331,15 +410,57 @@
\letvalue{\??setup:\letterpercent}\gobbleoneargument
+% \def\syst_setups#1% the grid option will be extended to other main modes
+% {\csname\??setup
+% \ifgridsnapping
+% \ifcsname\??setup\v!grid:#1\endcsname\v!grid:#1\else\ifcsname\??setup:#1\endcsname:#1\else:\letterpercent\fi\fi
+% \else
+% \ifcsname\??setup:#1\endcsname:#1\else:\letterpercent\fi
+% \fi
+% \endcsname\empty} % takes one argument
+
\def\syst_setups#1% the grid option will be extended to other main modes
{\csname\??setup
\ifgridsnapping
- \ifcsname\??setup\v!grid:#1\endcsname\v!grid:#1\else\ifcsname\??setup:#1\endcsname:#1\else:\letterpercent\fi\fi
+ \ifcsname\??setup\v!grid:#1\endcsname\v!grid:#1\else:\ifcsname\??setup:#1\endcsname#1\else\letterpercent\fi\fi
\else
- \ifcsname\??setup:#1\endcsname:#1\else:\letterpercent\fi
+ :\ifcsname\??setup:#1\endcsname#1\else\letterpercent\fi
\fi
\endcsname\empty} % takes one argument
+% not faster but less tracing sometimes:
+%
+% \def\syst_setups% the grid option will be extended to other main modes
+% {\csname\??setup\ifgridsnapping\expandafter\syst_setups_grid\else\expandafter\syst_setups_normal\fi}
+%
+% \def\syst_setups_grid#1%
+% {\ifcsname\??setup\v!grid:#1\endcsname\v!grid:#1\else\ifcsname\??setup:#1\endcsname:#1\else:\letterpercent\fi\fi\endcsname\empty} % takes one argument
+%
+% \def\syst_setups_normal#1%
+% {:\ifcsname\??setup:#1\endcsname#1\else\letterpercent\fi\endcsname\empty} % takes one argument
+%
+% only makes sense with many setups
+%
+% \def\syst_setups% the grid option will be extended to other main modes
+% {\ifgridsnapping
+% \expandafter\syst_setups_grid
+% \else
+% \expandafter\syst_setups_normal
+% \fi}
+%
+% \def\syst_setups_normal#1% the grid option will be extended to other main modes
+% {\csname\??setup
+% :\ifcsname\??setup:#1\endcsname#1\else\letterpercent\fi
+% \endcsname\empty} % takes one argument
+%
+% \def\syst_setups_grid#1% the grid option will be extended to other main modes
+% {\csname\??setup
+% \ifcsname\??setup\v!grid:#1\endcsname\v!grid:#1\else:\ifcsname\??setup:#1\endcsname#1\else\letterpercent\fi\fi
+% \endcsname\empty} % takes one argument
+%
+% \let\directsetup\syst_setups
+% \let\texsetup \syst_setups % nicer than \directsetup and more en par with xmlsetup and luasetup
+
% We can consider:
%
% \setvalue{\??setup->\v!auto}#1{\ctxcommand{autosetup("#1")}}
@@ -349,6 +470,12 @@
% but it won't work out well with multiple setups (intercepted at the
% lua end) that then get only one argument.
+% no checking and we assume it being defined:
+
+\def\fastsetup #1{\csname\??setup:#1\endcsname\empty}
+\def\fastsetupwithargument #1#2{\csname\??setup:#2\endcsname{#1}}
+\def\fastsetupwithargumentswapped #1{\csname\??setup:#1\endcsname}
+
% the next one is meant for \c!setups situations, hence the check for
% a shortcut
@@ -375,11 +502,11 @@
% setups={S1,lua(S2),xml(test{123}),S3}
\def\syst_setups_process_local
- {\ctxcommand{autosetups("\m_syst_setups_asked")}%
+ {\clf_autosetups{\m_syst_setups_asked}%
\relax} % let's prevent lookahead
\def\autosetups#1%
- {\ctxcommand{autosetups("#1")}}
+ {\clf_autosetups{#1}}
\edef\setupwithargument#1% saves a few expansions
{\noexpand\csname\??setup:\noexpand\ifcsname\??setup:#1\endcsname#1\noexpand\else\letterpercent\noexpand\fi\endcsname}
@@ -390,13 +517,15 @@
\let\directsetup\syst_setups
\let\texsetup \syst_setups % nicer than \directsetup and more en par with xmlsetup and luasetup
-\unexpanded\def\doifsetupselse#1% to be done: grid
+\unexpanded\def\doifelsesetups#1% to be done: grid
{\ifcsname\??setup:#1\endcsname
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
+\let\doifsetupselse\doifelsesetups
+
\unexpanded\def\doifsetups#1% to be done: grid
{\ifcsname\??setup:#1\endcsname
\expandafter\firstofoneargument
@@ -467,17 +596,11 @@
% Is doglobal still relevant? Maybe always global? Or never? Anyway, it will become obsolete.
-\unexpanded\def\startluasetups {\begingroup\doifnextoptionalelse\syst_setups_start_lua_a\syst_setups_start_lua_b}
-\unexpanded\def\startxmlsetups {\begingroup\doifnextoptionalelse\syst_setups_start_xml_a\syst_setups_start_xml_b}
-\unexpanded\def\startrawsetups {\begingroup\doifnextoptionalelse\syst_setups_start_raw_a\syst_setups_start_raw_b}
-\unexpanded\def\startlocalsetups{\begingroup\doifnextoptionalelse\syst_setups_start_loc_a\syst_setups_start_loc_b}
-\unexpanded\def\startsetups {\begingroup\doifnextoptionalelse\syst_setups_start_tex_a\syst_setups_start_tex_b}
-
-\let\stopluasetups \relax
-\let\stopxmlsetups \relax
-\let\stoprawsetups \relax
-\let\stoplocalsetups \relax
-\let\stopsetups \relax
+\unexpanded\def\startluasetups {\begingroup\doifelsenextoptionalcs\syst_setups_start_lua_a\syst_setups_start_lua_b} \let\stopluasetups \relax
+\unexpanded\def\startxmlsetups {\begingroup\doifelsenextoptionalcs\syst_setups_start_xml_a\syst_setups_start_xml_b} \let\stopxmlsetups \relax
+\unexpanded\def\startrawsetups {\begingroup\doifelsenextoptionalcs\syst_setups_start_raw_a\syst_setups_start_raw_b} \let\stoprawsetups \relax
+\unexpanded\def\startlocalsetups{\begingroup\doifelsenextoptionalcs\syst_setups_start_loc_a\syst_setups_start_loc_b} \let\stoplocalsetups \relax
+\unexpanded\def\startsetups {\begingroup\doifelsenextoptionalcs\syst_setups_start_tex_a\syst_setups_start_tex_b} \let\stopsetups \relax
\def\syst_setups_start_lua_indeed#1#2#3\stopluasetups {\endgroup\dodoglobal\expandafter\def\csname\??setup#1:#2\expandafter\endcsname\expandafter##\expandafter1\expandafter{#3}}
\def\syst_setups_start_xml_indeed#1#2#3\stopxmlsetups {\endgroup\dodoglobal\expandafter\def\csname\??setup#1:#2\expandafter\endcsname\expandafter##\expandafter1\expandafter{#3}}
@@ -525,14 +648,14 @@
\def\systemsetups#1{\syst_setups{\systemsetupsprefix#1}}
-\def\resetsetups[#1]% see x-fo for usage
+\unexpanded\def\resetsetups[#1]% see x-fo for usage
{\ifcsname\??setup\ifgridsnapping\v!grid\fi:#1\endcsname
\dodoglobal\letbeundefined{\??setup\ifgridsnapping\v!grid\fi:#1}%
\else
\dodoglobal\letbeundefined{\??setup:#1}%
\fi}
-\def\showsetupsdefinition[#1]%
+\unexpanded\def\showsetupsdefinition[#1]%
{\showvalue{\??setup:#1}} % temp hack for debugging
%D \macros
@@ -619,6 +742,8 @@
\expandafter\secondoftwoarguments
\fi}
+\let\doifvariableelse\doifelsevariable
+
\unexpanded\def\doifvariable#1#2%
{\ifcsname\??variables#1:#2\endcsname
\expandafter\firstofoneargument
@@ -633,18 +758,52 @@
\expandafter\firstofoneargument
\fi}
+%D A few more (we could use a public test variable so that we only need
+%D to expand once, assuming expandable variables):
+
+\letvalue{\??variables:}\empty
+
+\unexpanded\def\doifelseemptyvariable#1#2%
+ {\edef\m_syst_string_one{\csname\??variables\ifcsname\??variables#1:#2\endcsname#1:#2\else:\fi\endcsname}%
+ \ifx\m_syst_string_one\empty
+ \expandafter\firstoffourarguments
+ \else
+ \expandafter\secondoftwoarguments
+ \fi}
+
+\let\doifemptyvariableelse\doifelseemptyvariable
+
+\unexpanded\def\doifemptyvariable#1#2%
+ {\edef\m_syst_string_one{\csname\??variables\ifcsname\??variables#1:#2\endcsname#1:#2\else:\fi\endcsname}%
+ \ifx\m_syst_string_one\empty
+ \expandafter\firstofoneargument
+ \else
+ \expandafter\gobbleoneargument
+ \fi}
+
+\unexpanded\def\doifnotemptyvariable#1#2%
+ {\edef\m_syst_string_one{\csname\??variables\ifcsname\??variables#1:#2\endcsname#1:#2\else:\fi\endcsname}%
+ \ifx\m_syst_string_one\empty
+ \expandafter\gobbleoneargument
+ \else
+ \expandafter\firstofoneargument
+ \fi}
+
+
\def\getvariabledefault#1#2% #3% can be command, so no ifcsname here
{\executeifdefined{\??variables#1:#2}}% {#3}
\unexpanded\def\setupenv
{\dotripleargument\syst_variables_set[\getrawparameters][\s!environment]}
-\unexpanded\def\doifenvelse{\doifelsevariable \s!environment}
+\unexpanded\def\doifelseenv{\doifelsevariable \s!environment}
\unexpanded\def\doifenv {\doifvariable \s!environment}
\unexpanded\def\doifnotenv {\doifnotvariable \s!environment}
\def\env {\getvariable \s!environment}
\def\envvar {\getvariabledefault\s!environment}
+\let\doifenvelse\doifelseenv
+
%D \macros
%D {defineselector,setupselector,select,directselect}
%D
diff --git a/tex/context/base/core-ini.mkiv b/tex/context/base/core-ini.mkiv
index 1682bed1b..c338f6f81 100644
--- a/tex/context/base/core-ini.mkiv
+++ b/tex/context/base/core-ini.mkiv
@@ -58,14 +58,21 @@
\newtoks \everyforgetall
\newtoks \everycleanupfeatures
\newtoks \everysimplifycommands
+\newtoks \everypreroll
\let\simplifiedcommands\everysimplifycommands % backward compatible, will stay as it's used in styles
+\newconditional\simplifyingcommands % public
+
\unexpanded\def\forgetall {\the\everyforgetall}
\unexpanded\def\cleanupfeatures {\the\everycleanupfeatures}
\unexpanded\def\simplifycommands{\the\everysimplifycommands}
\appendtoks
+ \settrue\simplifyingcommands
+\to \everysimplifycommands
+
+\appendtoks
\everypar\emptytoks % pretty important
\to \everyforgetall
@@ -165,6 +172,9 @@
\newif \ifdoingblocks
\newif \ifgridsnapping
+\newconstant\pageduplexmode % 0 single 1 double 2 mix
+\newconstant\pagebodymode % 0 not 1 normal pagebody 2 spread
+
%D \macros
%D {ifproductionrun}
%D
diff --git a/tex/context/base/core-sys.lua b/tex/context/base/core-sys.lua
index 009ec16ea..cf74d68f4 100644
--- a/tex/context/base/core-sys.lua
+++ b/tex/context/base/core-sys.lua
@@ -9,46 +9,13 @@ if not modules then modules = { } end modules ['core-sys'] = {
local lower, format, gsub = string.lower, string.format, string.gsub
local suffixonly, basename, removesuffix = file.suffix, file.basename, file.removesuffix
-local environment = environment
+local environment = environment
+local context = context
+local implement = interfaces.implement
local report_files = logs.reporter("system","files")
--- function commands.updatefilenames(jobname,fulljobname,inputfilename,outputfilename)
--- --
--- environment.jobname = jobname
--- --
--- local jobfilename = gsub(fulljobname or jobname or inputfilename or tex.jobname or "","%./","")
--- --
--- environment.jobfilename = jobfilename
--- environment.jobfilesuffix = lower(suffixonly(environment.jobfilename))
--- --
--- local inputfilename = gsub(inputfilename or "","%./","")
--- environment.inputfilename = inputfilename
--- environment.inputfilebarename = removesuffix(basename(inputfilename))
--- --
--- local inputfilerealsuffix = suffixonly(inputfilename)
--- environment.inputfilerealsuffix = inputfilerealsuffix
--- --
--- local inputfilesuffix = inputfilerealsuffix == "" and "tex" or lower(inputfilerealsuffix)
--- environment.inputfilesuffix = inputfilesuffix
--- --
--- local outputfilename = outputfilename or environment.inputfilebarename or ""
--- environment.outputfilename = outputfilename
--- --
--- local runpath = resolvers.cleanpath(lfs.currentdir())
--- environment.runpath = runpath
--- --
--- statistics.register("running on path", function()
--- return environment.runpath
--- end)
--- --
--- statistics.register("job file properties", function()
--- return format("jobname %a, input %a, suffix %a",jobfilename,inputfilename,inputfilesuffix)
--- end)
--- --
--- end
-
-function environment.initializefilenames() -- commands.updatefilenames(jobname,fulljobname,input,result)
+function environment.initializefilenames()
local arguments = environment.arguments
@@ -90,12 +57,31 @@ function environment.initializefilenames() -- commands.updatefilenames(jobname,f
function environment.initializefilenames() end
end
+-- we could set a macro (but will that work when we're expanding? needs testing!)
+
+implement { name = "operatingsystem", actions = function() context(os.platform) end }
+implement { name = "jobfilename", actions = function() context(environment.jobfilename) end }
+implement { name = "jobfilesuffix", actions = function() context(environment.jobfilesuffix) end }
+implement { name = "inputfilebarename", actions = function() context(environment.inputfilebarename) end }
+implement { name = "inputfilerealsuffix", actions = function() context(environment.inputfilerealsuffix) end }
+implement { name = "inputfilesuffix", actions = function() context(environment.inputfilesuffix) end }
+implement { name = "inputfilename", actions = function() context(environment.inputfilename) end }
+implement { name = "outputfilename", actions = function() context(environment.outputfilename) end }
+
statistics.register("result saved in file", function()
-- suffix will be fetched from backend
local outputfilename = environment.outputfilename or environment.jobname or tex.jobname or "<unset>"
if tex.pdfoutput > 0 then
- return format("%s.%s, compresslevel %s, objectcompreslevel %s",outputfilename,"pdf",tex.pdfcompresslevel, tex.pdfobjcompresslevel)
+ return format("%s.%s, compresslevel %s, objectcompresslevel %s",outputfilename,"pdf",tex.pdfcompresslevel, tex.pdfobjcompresslevel)
else
return format("%s.%s",outputfilename,"dvi") -- hard to imagine
end
end)
+
+implement {
+ name = "systemlog",
+ arguments = { "string", "string", "string" },
+ actions = function(whereto,category,text)
+ logs.system(whereto,"context",tex.jobname,category,text)
+ end,
+}
diff --git a/tex/context/base/core-sys.mkiv b/tex/context/base/core-sys.mkiv
index 8f56b6f16..3d31dee53 100644
--- a/tex/context/base/core-sys.mkiv
+++ b/tex/context/base/core-sys.mkiv
@@ -35,24 +35,24 @@
%D End of lines to the output. \TEX\ will map this onto the platform specific
%D line ending. I hate this mess.
-\edef\operatingsystem {\cldcontext{os.platform}}
+\let\operatingsystem\clf_operatingsystem
%D The jobname is what gets loaded by the cont-yes stub file. This name
%D also determines the name of tuc etc files.
-\def \jobfilename {\cldcontext{environment.jobfilename or ""}}
-\def \jobfilesuffix {\cldcontext{environment.jobfilesuffix or ""}}
+\let\jobfilename \clf_jobfilename
+\let\jobfilesuffix\clf_jobfilesuffix
%D However, that one can itself load another file.
-\def \inputfilebarename {\cldcontext{environment.inputfilebarename or ""}}
-\def \inputfilerealsuffix{\cldcontext{environment.inputfilerealsuffix or ""}}
-\def \inputfilesuffix {\cldcontext{environment.inputfilesuffix or ""}}
-\def \inputfilename {\cldcontext{environment.inputfilename or ""}}
+\let\inputfilebarename \clf_inputfilebarename
+\let\inputfilerealsuffix\clf_inputfilerealsuffix
+\let\inputfilesuffix \clf_inputfilesuffix
+\let\inputfilename \clf_inputfilename
%D The output name is only used for some checking.
-\def \outputfilename {\cldcontext{environment.outputfilename or ""}}
+\let\outputfilename\clf_outputfilename
\installcorenamespace{system}
@@ -89,6 +89,8 @@
% \ctxcommand{updatefilenames("\jobame","\inputfilename","\outputfilename")}%
% \to \everysetupsystem
+\newconditional\prerollrun % when true it means that we have a forced number of runs
+
% Some mechanisms (see x-res-01) use either \jobfilename or
% \jobfilename.somesuffix, in which case we need to use the
% full name if given or a default (like \jobfilename.xml);
@@ -206,27 +208,31 @@
\to \everydefinestartstop
\unexpanded\def\syst_startstop_start#1%
- {\namedstartstopparameter{#1}\c!before
+ {\namedstartstopparameter{#1}\c!before\relax
\bgroup
\def\currentstartstop{#1}%
- \startstopparameter\c!commands
+ \startstopparameter\c!commands\relax % better: setups so that will show op soon
\dostarttagged\t!construct\currentstartstop
\usestartstopstyleandcolor\c!style\c!color}
\unexpanded\def\syst_startstop_stop#1%
{\dostoptagged
\egroup
- \namedstartstopparameter{#1}\c!after}
+ \namedstartstopparameter{#1}\c!after\relax}
\unexpanded\def\syst_startstop_indeed#1%
{\groupedcommand
{\def\currentstartstop{#1}%
- \startstopparameter\c!commands % will become setups
+ \startstopparameter\c!commands\relax % better: setups so that will show op soon
\dostarttagged\t!construct\currentstartstop
- \usestartstopstyleandcolor\c!style\c!color}
- {\def\currentstartstop{#1}%
+ \usestartstopstyleandcolor\c!style\c!color
+ \startstopparameter\c!left\relax}
+ {\def\currentstartstop{#1}% safeguard, nto really needed
+ \startstopparameter\c!right\relax
\dostoptagged
- \startstopparameter\c!inbetween}}
+ \startstopparameter\c!inbetween\relax}}
+
+% \definestartstop[tracing][\c!style=\tt]
% \unexpanded\def\ignorestartstop[#1]%
% {\unexpanded\expandafter\def\csname\e!start#1\expandafter\endcsname\expandafter
@@ -238,8 +244,14 @@
\installcommandhandler \??highlight {highlight} \??highlight % we could do with less
+\setuphighlight
+ [\c!command=\v!yes]
+
\appendtoks
- \setuevalue\currenthighlight{\typo_highlights_indeed{\currenthighlight}}%
+ \edef\p_command{\highlightparameter\c!command}%
+ \ifx\p_command\v!yes
+ \setuevalue\currenthighlight{\typo_highlights_indeed{\currenthighlight}}%
+ \fi
\to \everydefinehighlight
\ifdefined\dotaghighlight \else \let\dotaghighlight\relax \fi
@@ -252,13 +264,29 @@
\dotaghighlight}
{\dostoptagged}}
+\unexpanded\def\highlight[#1]%
+ {\typo_highlights_indeed{#1}}
+
+\unexpanded\def\starthighlight[#1]%
+ {\begingroup
+ \def\currenthighlight{#1}%
+ \dostarttagged\t!highlight\currenthighlight
+ \usehighlightstyleandcolor\c!style\c!color
+ \dotaghighlight}
+
+\unexpanded\def\stophighlight
+ {\dostoptagged
+ \endgroup}
+
+\let\directhighlight\typo_highlights_indeed
+
\unexpanded\def\defineexpandable
- {\doifnextoptionalelse
+ {\doifelsenextoptional
{\syst_basics_define_yes\def}%
{\syst_basics_define_nop\def}}
\unexpanded\def\define
- {\doifnextoptionalelse
+ {\doifelsenextoptional
{\syst_basics_define_yes{\unexpanded\def}}%
{\syst_basics_define_nop{\unexpanded\def}}}
@@ -397,5 +425,31 @@
% \processcommalist[#1]\docommand
% \endgroup}
+\unexpanded\def\syst_log_indeed#1#2#3%
+ {\ctxcommand{systemlog("#1","#2",\!!bs#3\!!es)}}
+
+\let\systemlog\syst_log_indeed
+
+\unexpanded\def\systemlogfirst
+ {\ifcase\directsystemparameter\c!n\relax
+ \expandafter\syst_log_indeed
+ \or
+ \expandafter\syst_log_indeed
+ \else
+ \expandafter\gobblethreearguments
+ \fi}
+
+\unexpanded\def\systemloglast
+ {\ifcase\directsystemparameter\c!n\relax
+ \expandafter\syst_log_indeed
+ \or
+ \expandafter\gobblethreearguments
+ \or
+ \expandafter\gobblethreearguments
+ \or
+ \expandafter\gobblethreearguments
+ \or
+ \expandafter\syst_log_indeed
+ \fi}
\protect \endinput
diff --git a/tex/context/base/core-two.lua b/tex/context/base/core-two.lua
index d6e006e04..bb1afa7db 100644
--- a/tex/context/base/core-two.lua
+++ b/tex/context/base/core-two.lua
@@ -42,9 +42,13 @@ end
jobpasses.define = allocate
-function jobpasses.save(id,str)
+function jobpasses.save(id,str,index)
local jti = allocate(id)
- jti[#jti+1] = str
+ if index then
+ jti[index] = str
+ else
+ jti[#jti+1] = str
+ end
end
function jobpasses.savetagged(id,tag,str)
@@ -54,7 +58,7 @@ end
function jobpasses.getdata(id,index,default)
local jti = collected[id]
- local value = jit and jti[index]
+ local value = jti and jti[index]
return value ~= "" and value or default or ""
end
@@ -140,18 +144,36 @@ jobpasses.inlist = inlist
-- interface
-function commands.gettwopassdata (id) local r = get (id) if r then context(r) end end
-function commands.getfirsttwopassdata(id) local r = first(id) if r then context(r) end end
-function commands.getlasttwopassdata (id) local r = last (id) if r then context(r) end end
-function commands.findtwopassdata (id,n) local r = find (id,n) if r then context(r) end end
-function commands.gettwopassdatalist (id) local r = list (id) if r then context(r) end end
-function commands.counttwopassdata (id) local r = count(id) if r then context(r) end end
-function commands.checktwopassdata (id) local r = check(id) if r then context(r) end end
+local implement = interfaces.implement
-commands.definetwopasslist = jobpasses.define
-commands.savetwopassdata = jobpasses.save
-commands.savetaggedtwopassdata = jobpasses.savetagged
+implement { name = "gettwopassdata", actions = { get , context }, arguments = "string" }
+implement { name = "getfirsttwopassdata",actions = { first, context }, arguments = "string" }
+implement { name = "getlasttwopassdata", actions = { last , context }, arguments = "string" }
+implement { name = "findtwopassdata", actions = { find , context }, arguments = { "string", "string" } }
+implement { name = "gettwopassdatalist", actions = { list , context }, arguments = "string" }
+implement { name = "counttwopassdata", actions = { count, context }, arguments = "string" }
+implement { name = "checktwopassdata", actions = { check, context }, arguments = "string" }
-function commands.doifelseintwopassdata(id,str)
- commands.doifelse(inlist(id,str))
-end
+implement {
+ name = "definetwopasslist",
+ actions = jobpasses.define,
+ arguments = "string"
+}
+
+implement {
+ name = "savetwopassdata",
+ actions = jobpasses.save,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "savetaggedtwopassdata",
+ actions = jobpasses.savetagged,
+ arguments = { "string", "string", "string" }
+}
+
+implement {
+ name = "doifelseintwopassdata",
+ actions = { inlist, commands.doifelse },
+ arguments = { "string", "string" }
+}
diff --git a/tex/context/base/core-two.mkiv b/tex/context/base/core-two.mkiv
index b08914ef4..5a791dea2 100644
--- a/tex/context/base/core-two.mkiv
+++ b/tex/context/base/core-two.mkiv
@@ -73,10 +73,10 @@
\registerctxluafile{core-two}{1.001}
-\def\immediatesavetwopassdata #1#2#3{\normalexpanded{\noexpand\ctxcommand {savetwopassdata('#1',"#3")}}}
+\def\immediatesavetwopassdata #1#2#3{\normalexpanded{\noexpand\clf_savetwopassdata{#1}{#3}}}
\def\savetwopassdata #1#2#3{\normalexpanded{\noexpand\ctxlatecommand{savetwopassdata('#1',"#3")}}}
\def\lazysavetwopassdata #1#2#3{\normalexpanded{\noexpand\ctxlatecommand{savetwopassdata('#1',"#3")}}}
-\def\savetaggedtwopassdata #1#2#3#4{\normalexpanded{\noexpand\ctxcommand {savetaggedtwopassdata('#1','#3',"#4")}}}
+\def\savetaggedtwopassdata #1#2#3#4{\normalexpanded{\noexpand\clf_savetaggedtwopassdata{#1}{#3}{#4}}}
\def\lazysavetaggedtwopassdata#1#2#3#4{\normalexpanded{\noexpand\ctxlatecommand{savetaggedtwopassdata('#1','#3',"#4")}}}
% temp hack: needs a proper \starteverytimeluacode
@@ -87,7 +87,7 @@
\newif \iftwopassdatafound % obsolete, will go
-\def\syst_twopass_check
+\def\syst_twopass_check % can be delegated to lua once obsolete is gone
{\ifx\twopassdata\empty
\twopassdatafoundfalse % obsolete
\setfalse\twopassdatafound
@@ -104,18 +104,19 @@
{\twopassdatafoundtrue % obsolete
\settrue\twopassdatafound}
-\unexpanded\def\definetwopasslist #1{\ctxcommand{definetwopasslist('#1')}}
-\unexpanded\def\gettwopassdata #1{\edef\twopassdata {\ctxcommand{gettwopassdata ("#1")}}\syst_twopass_check}
-\unexpanded\def\checktwopassdata #1{\edef\twopassdata {\ctxcommand{checktwopassdata ("#1")}}\syst_twopass_check}
-\unexpanded\def\findtwopassdata #1#2{\edef\twopassdata {\ctxcommand{findtwopassdata("#1","#2")}}\syst_twopass_check}
-\unexpanded\def\getfirsttwopassdata #1{\edef\twopassdata {\ctxcommand{getfirsttwopassdata ("#1")}}\syst_twopass_check}
-\unexpanded\def\getlasttwopassdata #1{\edef\twopassdata {\ctxcommand{getlasttwopassdata ("#1")}}%
- \edef\noftwopassitems{\ctxcommand{counttwopassdata ("#1")}}\syst_twopass_check}
-\unexpanded\def\getnamedtwopassdatalist#1#2{\edef #1{\ctxcommand{gettwopassdatalist ("#2")}}}
-\unexpanded\def\gettwopassdatalist #1{\edef\twopassdatalist{\ctxcommand{gettwopassdatalist ("#1")}}}
+\unexpanded\def\definetwopasslist #1{\clf_definetwopasslist{#1}}
+\unexpanded\def\gettwopassdata #1{\edef\twopassdata {\clf_gettwopassdata {#1}}\syst_twopass_check}
+\unexpanded\def\checktwopassdata #1{\edef\twopassdata {\clf_checktwopassdata {#1}}\syst_twopass_check}
+\unexpanded\def\findtwopassdata #1#2{\edef\twopassdata {\clf_findtwopassdata {#1}{#2}}\syst_twopass_check}
+\unexpanded\def\getfirsttwopassdata #1{\edef\twopassdata {\clf_getfirsttwopassdata {#1}}\syst_twopass_check}
+\unexpanded\def\getlasttwopassdata #1{\edef\twopassdata {\clf_getlasttwopassdata {#1}}%
+ \edef\noftwopassitems{\clf_counttwopassdata {#1}}\syst_twopass_check}
+\unexpanded\def\getnamedtwopassdatalist#1#2{\edef #1{\clf_gettwopassdatalist {#2}}}
+\unexpanded\def\gettwopassdatalist #1{\edef\twopassdatalist{\clf_gettwopassdatalist {#1}}}
-\unexpanded\def\doifelseintwopassdata #1#2{\ctxcommand{doifelseintwopassdata("#1","#2")}}
+\unexpanded\def\doifelseintwopassdata #1#2{\clf_doifelseintwopassdata{#1}{#2}}
+ \let\doifintwopassdataelse \doifelseintwopassdata
\let\getfromtwopassdata \findtwopassdata
\protect \endinput
diff --git a/tex/context/base/core-uti.lua b/tex/context/base/core-uti.lua
index ef792c1d8..a4b747680 100644
--- a/tex/context/base/core-uti.lua
+++ b/tex/context/base/core-uti.lua
@@ -29,6 +29,8 @@ local packers = utilities.packers
local allocate = utilities.storage.allocate
local mark = utilities.storage.mark
+local implement = interfaces.implement
+
local texgetcount = tex.getcount
local report_passes = logs.reporter("job","passes")
@@ -36,7 +38,7 @@ local report_passes = logs.reporter("job","passes")
job = job or { }
local job = job
-job.version = 1.24
+job.version = 1.30
job.packversion = 1.02
-- some day we will implement loading of other jobs and then we need
@@ -51,30 +53,50 @@ directly access the variable using a <l n='lua'/> call.</p>
local savelist, comment = { }, { }
function job.comment(key,value)
- comment[key] = value
+ if type(key) == "table" then
+ for k, v in next, key do
+ comment[k] = v
+ end
+ else
+ comment[key] = value
+ end
end
job.comment("version",job.version)
-local enabled = true
+local enabled = true
+local initialized = false
directives.register("job.save",function(v) enabled = v end)
+----------.register("job.keep",function(v) kept = v end)
function job.disablesave() -- can be command
enabled = false
end
function job.initialize(loadname,savename)
- job.load(loadname) -- has to come after structure is defined !
- luatex.registerstopactions(function()
- if enabled and not status.lasterrorstring or status.lasterrorstring == "" then
- job.save(savename)
+ if not initialized then
+ if not loadname or loadname == "" then
+ loadname = tex.jobname .. ".tuc"
+ end
+ if not savename or savename == "" then
+ savename = tex.jobname .. ".tua"
end
- end)
+ job.load(loadname) -- has to come after structure is defined !
+ luatex.registerstopactions(function()
+ if enabled and not status.lasterrorstring or status.lasterrorstring == "" then
+ -- if kept then
+ -- job.keep(loadname) -- could move to mtx-context instead
+ -- end
+ job.save(savename)
+ end
+ end)
+ initialized = true
+ end
end
-function job.register(collected, tobesaved, initializer, finalizer)
- savelist[#savelist+1] = { collected, tobesaved, initializer, finalizer }
+function job.register(collected, tobesaved, initializer, finalizer, serializer)
+ savelist[#savelist+1] = { collected, tobesaved, initializer, finalizer, serializer }
end
-- as an example we implement variables
@@ -100,7 +122,7 @@ job.register('job.variables.checksums', 'job.variables.checksums', initializer)
local rmethod, rvalue
-local setxvalue = context.setxvalue
+local ctx_setxvalue = context.setxvalue
local function initializer()
tobesaved = jobvariables.tobesaved
@@ -116,7 +138,7 @@ local function initializer()
end
tobesaved.randomseed = rvalue
for cs, value in next, collected do
- setxvalue(cs,value)
+ ctx_setxvalue(cs,value)
end
end
@@ -126,6 +148,10 @@ function jobvariables.save(cs,value)
tobesaved[cs] = value
end
+function jobvariables.restore(cs)
+ return collected[cs] or tobesaved[cs]
+end
+
-- checksums
function jobvariables.getchecksum(tag)
@@ -171,10 +197,12 @@ function job.save(filename) -- we could return a table but it can get pretty lar
f:write("local utilitydata = { }\n\n")
f:write(serialize(comment,"utilitydata.comment",true),"\n\n")
for l=1,#savelist do
- local list = savelist[l]
- local target = format("utilitydata.%s",list[1])
- local data = list[2]
- local finalizer = list[4]
+ -- f:write("do\n\n") -- no solution for the jit limitatione either
+ local list = savelist[l]
+ local target = format("utilitydata.%s",list[1])
+ local data = list[2]
+ local finalizer = list[4]
+ local serializer = list[5]
if type(data) == "string" then
data = utilities.tables.accesstable(data)
end
@@ -185,11 +213,18 @@ function job.save(filename) -- we could return a table but it can get pretty lar
packers.pack(data,jobpacker,true)
end
local definer, name = definetable(target,true,true) -- no first and no last
- f:write(definer,"\n\n",serialize(data,name,true),"\n\n")
+ if serializer then
+ f:write(definer,"\n\n",serializer(data,name,true),"\n\n")
+ else
+ f:write(definer,"\n\n",serialize(data,name,true),"\n\n")
+ end
+ -- f:write("end\n\n")
end
if job.pack then
packers.strip(jobpacker)
+ -- f:write("do\n\n")
f:write(serialize(jobpacker,"utilitydata.job.packed",true),"\n\n")
+ -- f:write("end\n\n")
end
f:write("return utilitydata")
f:close()
@@ -210,8 +245,9 @@ local function load(filename)
return data
end
else
- os.remove(filename) -- probably a bad file
- report_passes("removing stale job data file %a, restart job",filename)
+ os.remove(filename) -- probably a bad file (or luajit overflow as it cannot handle large tables well)
+ report_passes("removing stale job data file %a, restart job, message: %s%s",filename,tostring(data),
+ jit and " (try luatex instead of luajittex)" or "")
os.exit(true) -- trigger second run
end
end
@@ -263,6 +299,28 @@ function job.loadother(filename)
statistics.stoptiming(_load_)
end
+-- function job.keep(filename)
+-- local suffix = file.suffix(filename)
+-- local base = file.removesuffix(filename)
+-- if suffix == "" then
+-- suffix = "tuc"
+-- end
+-- for i=1,10 do
+-- local tmpname = format("%s-%s-%02d.tmp",base,suffix,i)
+-- if lfs.isfile(tmpname) then
+-- os.remove(tmpname)
+-- report_passes("removing %a",tmpname)
+-- end
+-- end
+-- if lfs.isfile(filename) then
+-- local tmpname = format("%s-%s-%02d.tmp",base,suffix,environment.currentrun or 1)
+-- report_passes("copying %a into %a",filename,tmpname)
+-- file.copy(filename,tmpname)
+-- else
+-- report_passes("no file %a, nothing kept",filename)
+-- end
+-- end
+
-- eventually this will end up in strc-ini
statistics.register("startup time", function()
@@ -319,16 +377,37 @@ function statistics.formatruntime(runtime)
if shipped > 0 or pages > 0 then
local persecond = shipped / runtime
if pages == 0 then pages = shipped end
-if jit then
-local saved = watts_per_core * runtime * kg_per_watt_per_second / speedup_by_other_engine
-local saved = used_wood_factor * runtime
--- return format("%s seconds, %i processed pages, %i shipped pages, %.3f pages/second, %f kg tree saved by using luajittex",runtime,pages,shipped,persecond,saved)
- return format("%s seconds, %i processed pages, %i shipped pages, %.3f pages/second, %f g tree saved by using luajittex",runtime,pages,shipped,persecond,saved*1000)
-else
- return format("%s seconds, %i processed pages, %i shipped pages, %.3f pages/second",runtime,pages,shipped,persecond)
-end
+ -- if jit then
+ -- local saved = watts_per_core * runtime * kg_per_watt_per_second / speedup_by_other_engine
+ -- local saved = used_wood_factor * runtime
+ -- return format("%s seconds, %i processed pages, %i shipped pages, %.3f pages/second, %f mg tree saved by using luajittex",runtime,pages,shipped,persecond,saved*1000*1000)
+ -- else
+ return format("%s seconds, %i processed pages, %i shipped pages, %.3f pages/second",runtime,pages,shipped,persecond)
+ -- end
else
return format("%s seconds",runtime)
end
end
end
+
+implement {
+ name = "savevariable",
+ actions = job.variables.save,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "setjobcomment",
+ actions = job.comment,
+ arguments = { { "*" } }
+}
+
+implement {
+ name = "initializejob",
+ actions = job.initialize
+}
+
+implement {
+ name = "disablejobsave",
+ actions = job.disablesave
+}
diff --git a/tex/context/base/core-uti.mkiv b/tex/context/base/core-uti.mkiv
index 527b90445..cdd8958ff 100644
--- a/tex/context/base/core-uti.mkiv
+++ b/tex/context/base/core-uti.mkiv
@@ -18,23 +18,19 @@
\registerctxluafile{core-uti}{1.001}
\def\savecurrentvalue#1#2% immediate, so not \unexpanded
- {\ctxlua{job.variables.save("\strippedcsname#1","#2")}}
+ {\clf_savevariable{\strippedcsname#1}{#2}}
\appendtoks
- \ctxlua {
- % job.comment("file","\jobname")
- job.comment("file",tex.jobname)
- job.comment("format","\contextformat")
- job.comment("stamp","\contextversion")
- job.comment("escape","\!!bs\space...\space\!!es")
- }%
+ \clf_setjobcomment
+ file {tex.jobname}%
+ format {\contextformat}%
+ stamp {\contextversion}%
+ escape {\!!bs\space...\space\!!es}%
+ \relax
\to \everystarttext
\appendtoks
- \ctxlua {
- % job.initialize("\jobname.tuc","\jobname.tua")
- job.initialize(tex.jobname .. ".tuc",tex.jobname .. ".tua")
- }%
+ \clf_initializejob
\to \everyjob
\protect \endinput
diff --git a/tex/context/base/data-aux.lua b/tex/context/base/data-aux.lua
index b969e6070..dae96ce62 100644
--- a/tex/context/base/data-aux.lua
+++ b/tex/context/base/data-aux.lua
@@ -16,7 +16,8 @@ local resolvers = resolvers
local report_scripts = logs.reporter("resolvers","scripts")
function resolvers.updatescript(oldname,newname) -- oldname -> own.name, not per se a suffix
- local scriptpath = "scripts/context/lua"
+ -- local scriptpath = "scripts/context/lua"
+ local scriptpath = "context/lua"
newname = file.addsuffix(newname,"lua")
local oldscript = resolvers.cleanpath(oldname)
if trace_locating then
diff --git a/tex/context/base/data-crl.lua b/tex/context/base/data-crl.lua
index 445bd5b0a..fba5a6230 100644
--- a/tex/context/base/data-crl.lua
+++ b/tex/context/base/data-crl.lua
@@ -28,7 +28,7 @@ local function runcurl(specification)
if not io.exists(cachename) then
cached[original] = cachename
local command = "curl --silent --create-dirs --output " .. cachename .. " " .. original
- os.spawn(command)
+ os.execute(command)
end
if io.exists(cachename) then
cached[original] = cachename
diff --git a/tex/context/base/data-env.lua b/tex/context/base/data-env.lua
index 58a081506..1d7728c22 100644
--- a/tex/context/base/data-env.lua
+++ b/tex/context/base/data-env.lua
@@ -18,11 +18,13 @@ local formats = allocate()
local suffixes = allocate()
local dangerous = allocate()
local suffixmap = allocate()
+local usertypes = allocate()
resolvers.formats = formats
resolvers.suffixes = suffixes
resolvers.dangerous = dangerous
resolvers.suffixmap = suffixmap
+resolvers.usertypes = usertypes
local luasuffixes = utilities.lua.suffixes
@@ -92,11 +94,13 @@ local relations = allocate { -- todo: handlers also here
names = { "mp" },
variable = 'MPINPUTS',
suffixes = { 'mp', 'mpvi', 'mpiv', 'mpii' },
+ usertype = true,
},
tex = {
names = { "tex" },
variable = 'TEXINPUTS',
- suffixes = { 'tex', "mkvi", "mkiv", "mkii" },
+ suffixes = { "tex", "mkvi", "mkiv", "mkii", "cld", "lfg", "xml" }, -- known suffixes have less lookups
+ usertype = true,
},
icc = {
names = { "icc", "icc profile", "icc profiles" },
@@ -112,6 +116,7 @@ local relations = allocate { -- todo: handlers also here
names = { "lua" },
variable = 'LUAINPUTS',
suffixes = { luasuffixes.lua, luasuffixes.luc, luasuffixes.tma, luasuffixes.tmc },
+ usertype = true,
},
lib = {
names = { "lib" },
@@ -120,11 +125,15 @@ local relations = allocate { -- todo: handlers also here
},
bib = {
names = { 'bib' },
+ variable = 'BIBINPUTS',
suffixes = { 'bib' },
+ usertype = true,
},
bst = {
names = { 'bst' },
+ variable = 'BSTINPUTS',
suffixes = { 'bst' },
+ usertype = true,
},
fontconfig = {
names = { 'fontconfig', 'fontconfig file', 'fontconfig files' },
@@ -210,8 +219,9 @@ function resolvers.updaterelations()
for name, relation in next, categories do
local rn = relation.names
local rv = relation.variable
- local rs = relation.suffixes
if rn and rv then
+ local rs = relation.suffixes
+ local ru = relation.usertype
for i=1,#rn do
local rni = lower(gsub(rn[i]," ",""))
formats[rni] = rv
@@ -223,8 +233,9 @@ function resolvers.updaterelations()
end
end
end
- end
- if rs then
+ if ru then
+ usertypes[name] = true
+ end
end
end
end
diff --git a/tex/context/base/data-exp.lua b/tex/context/base/data-exp.lua
index c67e97bb1..0a7396171 100644
--- a/tex/context/base/data-exp.lua
+++ b/tex/context/base/data-exp.lua
@@ -11,16 +11,20 @@ local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local Ct, Cs, Cc, Carg, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.Carg, lpeg.P, lpeg.C, lpeg.S
local type, next = type, next
+local isdir = lfs.isdir
local ostype = os.type
-local collapsepath = file.collapsepath
+local collapsepath, joinpath, basename = file.collapsepath, file.join, file.basename
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
+local trace_globbing = true trackers.register("resolvers.globbing", function(v) trace_globbing = v end)
local report_expansions = logs.reporter("resolvers","expansions")
+local report_globbing = logs.reporter("resolvers","globbing")
-local resolvers = resolvers
+local resolvers = resolvers
+local resolveprefix = resolvers.resolve
-- As this bit of code is somewhat special it gets its own module. After
-- all, when working on the main resolver code, I don't want to scroll
@@ -123,7 +127,7 @@ local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpeggi
local old = str
str = lpegmatch(l_rest, str)
until old == str
- until old == str -- or not find(str,"{")
+ until old == str -- or not find(str,"{",1,true)
str = lpegmatch(stripper_1,str)
if validate then
for s in gmatch(str,"[^,]+") do
@@ -177,34 +181,28 @@ end
-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-local cleanup = lpeg.replacer {
- { "!" , "" },
- { "\\" , "/" },
-}
+local usedhomedir = nil
+local donegation = (P("!") /"" )^0
+local doslashes = (P("\\")/"/" + 1)^0
-function resolvers.cleanpath(str) -- tricky, maybe only simple paths
- local doslashes = (P("\\")/"/" + 1)^0
- local donegation = (P("!") /"" )^0
- local homedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "")
- if homedir == "~" or homedir == "" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if not str or find(str,"~") then
- return "" -- special case
- else
- return lpegmatch(cleanup,str)
+local function expandedhome()
+ if not usedhomedir then
+ usedhomedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "")
+ if usedhomedir == "~" or usedhomedir == "" or not isdir(usedhomedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent path using current path")
end
- end
- else
- local dohome = ((P("~")+P("$HOME"))/homedir)^0
- local cleanup = Cs(donegation * dohome * doslashes)
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str) or ""
+ usedhomedir = "."
end
end
- return resolvers.cleanpath(str)
+ return usedhomedir
+end
+
+local dohome = ((P("~")+P("$HOME")+P("%HOME%"))/expandedhome)^0
+local cleanup = Cs(donegation * dohome * doslashes)
+
+resolvers.cleanpath = function(str)
+ return str and lpegmatch(cleanup,str) or ""
end
-- print(resolvers.cleanpath(""))
@@ -216,11 +214,18 @@ end
-- This one strips quotes and funny tokens.
-local expandhome = P("~") / "$HOME" -- environment.homedir or "home:"
+-- we have several options here:
+--
+-- expandhome = P("~") / "$HOME" : relocateble
+-- expandhome = P("~") / "home:" : relocateble
+-- expandhome = P("~") / environment.homedir : frozen but unexpanded
+-- expandhome = P("~") = dohome : frozen and expanded
-local dodouble = P('"')/"" * (expandhome + (1 - P('"')))^0 * P('"')/""
-local dosingle = P("'")/"" * (expandhome + (1 - P("'")))^0 * P("'")/""
-local dostring = (expandhome + 1 )^0
+local expandhome = P("~") / "$HOME"
+
+local dodouble = P('"') / "" * (expandhome + (1 - P('"')))^0 * P('"') / ""
+local dosingle = P("'") / "" * (expandhome + (1 - P("'")))^0 * P("'") / ""
+local dostring = (expandhome + 1 )^0
local stripper = Cs(
lpegpatterns.unspacer * (dosingle + dodouble + dostring) * lpegpatterns.unspacer
@@ -285,7 +290,7 @@ end
function resolvers.joinpath(str)
if type(str) == 'table' then
- return file.joinpath(str)
+ return joinpath(str)
else
return str
end
@@ -293,25 +298,25 @@ end
-- The next function scans directories and returns a hash where the
-- entries are either strings or tables.
-
+--
-- starting with . or .. etc or funny char
-
---~ local l_forbidden = S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t")
---~ local l_confusing = P(" ")
---~ local l_character = lpegpatterns.utf8
---~ local l_dangerous = P(".")
-
---~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * P(-1)
---~ ----- l_normal = l_normal * Cc(true) + Cc(false)
-
---~ local function test(str)
---~ print(str,lpegmatch(l_normal,str))
---~ end
---~ test("ヒラギノ明朝 Pro W3")
---~ test("..ヒラギノ明朝 Pro W3")
---~ test(":ヒラギノ明朝 Pro W3;")
---~ test("ヒラギノ明朝 /Pro W3;")
---~ test("ヒラギノ明朝 Pro W3")
+--
+-- local l_forbidden = S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t")
+-- local l_confusing = P(" ")
+-- local l_character = lpegpatterns.utf8
+-- local l_dangerous = P(".")
+--
+-- local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * P(-1)
+-- ----- l_normal = l_normal * Cc(true) + Cc(false)
+--
+-- local function test(str)
+-- print(str,lpegmatch(l_normal,str))
+-- end
+-- test("ヒラギノ明朝 Pro W3")
+-- test("..ヒラギノ明朝 Pro W3")
+-- test(":ヒラギノ明朝 Pro W3;")
+-- test("ヒラギノ明朝 /Pro W3;")
+-- test("ヒラギノ明朝 Pro W3")
-- a lot of this caching can be stripped away when we have ssd's everywhere
--
@@ -319,41 +324,67 @@ end
local attributes, directory = lfs.attributes, lfs.dir
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
-local timer = { }
-local scanned = { }
-local nofscans = 0
-local scancache = { }
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local lessweird = P(".")^1 + lpeg.anywhere(S("~`#$%^&*:;\"\'||<>,?\n\r\t"))
+local timer = { }
+local scanned = { }
+local nofscans = 0
+local scancache = { }
+local fullcache = { }
+----- simplecache = { }
+local nofsharedscans = 0
+
+-- So, we assume either a lowercase name or a mixed case one but only one such case
+-- as having Foo fOo foo FoO FOo etc on the system is braindead in any sane project.
-local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
+local function scan(files,remap,spec,path,n,m,r,onlyone,tolerant)
+ local full = path == "" and spec or (spec .. path .. '/')
local dirs = { }
local nofdirs = 0
+ local pattern = tolerant and lessweird or weird
for name in directory(full) do
- if not lpegmatch(weird,name) then
- local mode = attributes(full..name,'mode')
- if mode == 'file' then
+ if not lpegmatch(pattern,name) then
+ local mode = attributes(full..name,"mode")
+ if mode == "file" then
n = n + 1
- local f = files[name]
- if f then
- if type(f) == 'string' then
- files[name] = { f, path }
+ local lower = lower(name)
+ local paths = files[lower]
+ if paths then
+ if onlyone then
+ -- forget about it
else
- f[#f+1] = path
+ if type(paths) == "string" then
+ files[lower] = { paths, path }
+ else
+ paths[#paths+1] = path
+ end
+ if name ~= lower then
+ local rl = remap[lower]
+ if not rl then
+ remap[lower] = name
+ r = r + 1
+ elseif trace_globbing and rl ~= name then
+ report_globbing("confusing filename, name: %a, lower: %a, already: %a",name,lower,rl)
+ end
+ end
end
else -- probably unique anyway
- files[name] = path
- local lower = lower(name)
+ files[lower] = path
if name ~= lower then
- files["remap:"..lower] = name
- r = r + 1
+ local rl = remap[lower]
+ if not rl then
+ remap[lower] = name
+ r = r + 1
+ elseif trace_globbing and rl ~= name then
+ report_globbing("confusing filename, name: %a, lower: %a, already: %a",name,lower,rl)
+ end
end
end
- elseif mode == 'directory' then
+ elseif mode == "directory" then
m = m + 1
nofdirs = nofdirs + 1
if path ~= "" then
- dirs[nofdirs] = path..'/'..name
+ dirs[nofdirs] = path .. "/" .. name
else
dirs[nofdirs] = name
end
@@ -363,113 +394,72 @@ local function scan(files,spec,path,n,m,r)
if nofdirs > 0 then
sort(dirs)
for i=1,nofdirs do
- files, n, m, r = scan(files,spec,dirs[i],n,m,r)
+ files, remap, n, m, r = scan(files,remap,spec,dirs[i],n,m,r,onlyonce,tolerant)
end
end
scancache[sub(full,1,-2)] = files
- return files, n, m, r
+ return files, remap, n, m, r
end
-local fullcache = { }
-
-function resolvers.scanfiles(path,branch,usecache)
- statistics.starttiming(timer)
- local realpath = resolvers.resolve(path) -- no shortcut
+function resolvers.scanfiles(path,branch,usecache,onlyonce,tolerant)
+ local realpath = resolveprefix(path)
if usecache then
- local files = fullcache[realpath]
- if files then
+ local content = fullcache[realpath]
+ if content then
if trace_locating then
- report_expansions("using caches scan of path %a, branch %a",path,branch or path)
+ report_expansions("using cached scan of path %a, branch %a",path,branch or path)
end
- return files
+ nofsharedscans = nofsharedscans + 1
+ return content
end
end
+ --
+ statistics.starttiming(timer)
if trace_locating then
report_expansions("scanning path %a, branch %a",path,branch or path)
end
- local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
- if trace_locating then
- report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
+ local content
+ if isdir(realpath) then
+ local files, remap, n, m, r = scan({ },{ },realpath .. '/',"",0,0,0,onlyonce,tolerant)
+ content = {
+ metadata = {
+ path = path, -- can be selfautoparent:texmf-whatever
+ files = n,
+ directories = m,
+ remappings = r,
+ },
+ files = files,
+ remap = remap,
+ }
+ if trace_locating then
+ report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
+ end
+ else
+ content = {
+ metadata = {
+ path = path, -- can be selfautoparent:texmf-whatever
+ files = 0,
+ directories = 0,
+ remappings = 0,
+ },
+ files = { },
+ remap = { },
+ }
+ if trace_locating then
+ report_expansions("invalid path %a",realpath)
+ end
end
if usecache then
scanned[#scanned+1] = realpath
- fullcache[realpath] = files
+ fullcache[realpath] = content
end
nofscans = nofscans + 1
statistics.stoptiming(timer)
- return files
+ return content
end
-local function simplescan(files,spec,path) -- first match only, no map and such
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs = { }
- local nofdirs = 0
- for name in directory(full) do
- if not lpegmatch(weird,name) then
- local mode = attributes(full..name,'mode')
- if mode == 'file' then
- if not files[name] then
- -- only first match
- files[name] = path
- end
- elseif mode == 'directory' then
- nofdirs = nofdirs + 1
- if path ~= "" then
- dirs[nofdirs] = path..'/'..name
- else
- dirs[nofdirs] = name
- end
- end
- end
- end
- if nofdirs > 0 then
- sort(dirs)
- for i=1,nofdirs do
- files = simplescan(files,spec,dirs[i])
- end
- end
- return files
-end
-
-local simplecache = { }
-local nofsharedscans = 0
-
function resolvers.simplescanfiles(path,branch,usecache)
- statistics.starttiming(timer)
- local realpath = resolvers.resolve(path) -- no shortcut
- if usecache then
- local files = simplecache[realpath]
- if not files then
- files = scancache[realpath]
- if files then
- nofsharedscans = nofsharedscans + 1
- end
- end
- if files then
- if trace_locating then
- report_expansions("using caches scan of path %a, branch %a",path,branch or path)
- end
- return files
- end
- end
- if trace_locating then
- report_expansions("scanning path %a, branch %a",path,branch or path)
- end
- local files = simplescan({ },realpath .. '/',"")
- if trace_locating then
- report_expansions("%s files found",table.count(files))
- end
- if usecache then
- scanned[#scanned+1] = realpath
- simplecache[realpath] = files
- end
- nofscans = nofscans + 1
- statistics.stoptiming(timer)
- return files
+ return resolvers.scanfiles(path,branch,usecache,true,true) -- onlyonce
end
function resolvers.scandata()
@@ -482,4 +472,57 @@ function resolvers.scandata()
}
end
---~ print(table.serialize(resolvers.scanfiles("t:/sources")))
+function resolvers.get_from_content(content,path,name) -- or (content,name)
+ if not content then
+ return
+ end
+ local files = content.files
+ if not files then
+ return
+ end
+ local remap = content.remap
+ if not remap then
+ return
+ end
+ if name then
+ -- this one resolves a remapped name
+ local used = lower(name)
+ return path, remap[used] or used
+ else
+ -- this one does a lookup and resolves a remapped name
+ local name = path
+ local used = lower(name)
+ local path = files[used]
+ if path then
+ return path, remap[used] or used
+ end
+ end
+end
+
+local nothing = function() end
+
+function resolvers.filtered_from_content(content,pattern)
+ if content and type(pattern) == "string" then
+ local pattern = lower(pattern)
+ local files = content.files
+ local remap = content.remap
+ if files and remap then
+ local n = next(files)
+ local function iterator()
+ while n do
+ local k = n
+ n = next(files,k)
+ if find(k,pattern) then
+ return files[k], remap and remap[k] or k
+ end
+ end
+ end
+ return iterator
+ end
+ end
+ return nothing
+end
+
+
+-- inspect(resolvers.simplescanfiles("e:/temporary/mb-mp"))
+-- inspect(resolvers.scanfiles("e:/temporary/mb-mp"))
diff --git a/tex/context/base/data-fil.lua b/tex/context/base/data-fil.lua
index 09129e03c..b699fc9e3 100644
--- a/tex/context/base/data-fil.lua
+++ b/tex/context/base/data-fil.lua
@@ -10,7 +10,8 @@ local trace_locating = false trackers.register("resolvers.locating", function(v
local report_files = logs.reporter("resolvers","files")
-local resolvers = resolvers
+local resolvers = resolvers
+local resolveprefix = resolvers.resolve
local finders, openers, loaders, savers = resolvers.finders, resolvers.openers, resolvers.loaders, resolvers.savers
local locators, hashers, generators, concatinators = resolvers.locators, resolvers.hashers, resolvers.generators, resolvers.concatinators
@@ -18,35 +19,34 @@ local locators, hashers, generators, concatinators = resolvers.locators, resolve
local checkgarbage = utilities.garbagecollector and utilities.garbagecollector.check
function locators.file(specification)
- local name = specification.filename
- local realname = resolvers.resolve(name) -- no shortcut
+ local filename = specification.filename
+ local realname = resolveprefix(filename) -- no shortcut
if realname and realname ~= '' and lfs.isdir(realname) then
if trace_locating then
- report_files("file locator %a found as %a",name,realname)
+ report_files("file locator %a found as %a",filename,realname)
end
- resolvers.appendhash('file',name,true) -- cache
+ resolvers.appendhash('file',filename,true) -- cache
elseif trace_locating then
- report_files("file locator %a not found",name)
+ report_files("file locator %a not found",filename)
end
end
function hashers.file(specification)
- local name = specification.filename
- local content = caches.loadcontent(name,'files')
- resolvers.registerfilehash(name,content,content==nil)
+ local pathname = specification.filename
+ local content = caches.loadcontent(pathname,'files')
+ resolvers.registerfilehash(pathname,content,content==nil)
end
function generators.file(specification)
- local path = specification.filename
- local content = resolvers.scanfiles(path,false,true) -- scan once
---~ inspect(content)
- resolvers.registerfilehash(path,content,true)
+ local pathname = specification.filename
+ local content = resolvers.scanfiles(pathname,false,true) -- scan once
+ resolvers.registerfilehash(pathname,content,true)
end
concatinators.file = file.join
function finders.file(specification,filetype)
- local filename = specification.filename
+ local filename = specification.filename
local foundname = resolvers.findfile(filename,filetype)
if foundname and foundname ~= "" then
if trace_locating then
diff --git a/tex/context/base/data-ini.lua b/tex/context/base/data-ini.lua
index 201c6a2d7..c74ff2e04 100644
--- a/tex/context/base/data-ini.lua
+++ b/tex/context/base/data-ini.lua
@@ -6,10 +6,12 @@ if not modules then modules = { } end modules ['data-ini'] = {
license = "see context related readme files",
}
+local next, type, getmetatable, rawset = next, type, getmetatable, rawset
local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char
-local next, type = next, type
-
local filedirname, filebasename, filejoin = file.dirname, file.basename, file.join
+local ostype, osname, osuname, ossetenv, osgetenv = os.type, os.name, os.uname, os.setenv, os.getenv
+
+local P, S, R, C, Cs, Cc, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.match
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end)
@@ -17,11 +19,9 @@ local trace_expansions = false trackers.register("resolvers.expansions", functi
local report_initialization = logs.reporter("resolvers","initialization")
-local ostype, osname, ossetenv, osgetenv = os.type, os.name, os.setenv, os.getenv
-
--- The code here used to be part of a data-res but for convenience
--- we now split it over multiple files. As this file is now the
--- starting point we introduce resolvers here.
+-- The code here used to be part of a data-res but for convenience we now split it over multiple
+-- files. As this file is now the starting point we introduce resolvers here. We also put some
+-- helpers here that later can be reimplemented of extended.
resolvers = resolvers or { }
local resolvers = resolvers
@@ -217,7 +217,7 @@ end
environment.texroot = file.collapsepath(texroot)
-if profiler then
+if type(profiler) == "table" and not jit then
directives.register("system.profile",function()
profiler.start("luatex-profile.log")
end)
@@ -225,8 +225,136 @@ end
-- a forward definition
-if not resolvers.resolve then
- function resolvers.resolve (s) return s end
- function resolvers.unresolve(s) return s end
- function resolvers.repath (s) return s end
+-- Because we use resolvers.resolve a lot later on, we will implement the basics here and
+-- add more later.
+
+local prefixes = utilities.storage.allocate()
+resolvers.prefixes = prefixes
+
+local resolved = { }
+local abstract = { }
+local dynamic = { }
+
+function resolvers.resetresolve(str)
+ resolved, abstract = { }, { }
+end
+
+function resolvers.allprefixes(separator)
+ local all = table.sortedkeys(prefixes)
+ if separator then
+ for i=1,#all do
+ all[i] = all[i] .. ":"
+ end
+ end
+ return all
+end
+
+local function _resolve_(method,target)
+ local action = prefixes[method]
+ if action then
+ return action(target)
+ else
+ return method .. ":" .. target
+ end
+end
+
+function resolvers.unresolve(str)
+ return abstract[str] or str
+end
+
+function resolvers.setdynamic(str)
+ dynamic[str] = true
+end
+
+-- home:xx;selfautoparent:xx;
+
+local pattern = Cs((C(R("az")^2) * P(":") * C((1-S(" \"\';,"))^1) / _resolve_ + P(1))^0)
+
+local prefix = C(R("az")^2) * P(":")
+local target = C((1-S(" \"\';,"))^1)
+local notarget = (#S(";,") + P(-1)) * Cc("")
+
+local p_resolve = Cs(((prefix * (target + notarget)) / _resolve_ + P(1))^0)
+local p_simple = prefix * P(-1)
+
+local function resolve(str) -- use schemes, this one is then for the commandline only
+ if type(str) == "table" then
+ local res = { }
+ for i=1,#str do
+ res[i] = resolve(str[i])
+ end
+ return res
+ end
+ -- already resolved
+ local res = resolved[str]
+ if res then
+ return res
+ end
+ -- simple resolving of (dynamic) methods
+ local simple = lpegmatch(p_simple,str)
+ local action = prefixes[simple]
+ if action then
+ local res = action(res)
+ if not dynamic[simple] then
+ resolved[simple] = res
+ abstract[res] = simple
+ end
+ return res
+ end
+ -- more extensive resolving (multiple too)
+ res = lpegmatch(p_resolve,str)
+ resolved[str] = res
+ abstract[res] = str
+ return res
+end
+
+resolvers.resolve = resolve
+
+if type(osuname) == "function" then
+
+ for k, v in next, osuname() do
+ if not prefixes[k] then
+ prefixes[k] = function() return v end
+ end
+ end
+
+end
+
+if ostype == "unix" then
+
+ -- We need to distringuish between a prefix and something else : so we
+ -- have a special repath variant for linux. Also, when a new prefix is
+ -- defined, we need to remake the matcher.
+
+ local pattern
+
+ local function makepattern(t,k,v)
+ if t then
+ rawset(t,k,v)
+ end
+ local colon = P(":")
+ for k, v in table.sortedpairs(prefixes) do
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ pattern = Cs((p * colon + colon/";" + P(1))^0)
+ end
+
+ makepattern()
+
+ table.setmetatablenewindex(prefixes,makepattern)
+
+ function resolvers.repath(str)
+ return lpegmatch(pattern,str)
+ end
+
+else -- already the default:
+
+ function resolvers.repath(str)
+ return str
+ end
+
end
diff --git a/tex/context/base/data-lst.lua b/tex/context/base/data-lst.lua
index 8996fa251..e4621a6e1 100644
--- a/tex/context/base/data-lst.lua
+++ b/tex/context/base/data-lst.lua
@@ -8,12 +8,16 @@ if not modules then modules = { } end modules ['data-lst'] = {
-- used in mtxrun, can be loaded later .. todo
-local find, concat, upper, format = string.find, table.concat, string.upper, string.format
+local rawget, type, next = rawget, type, next
+
+local find, concat, upper = string.find, table.concat, string.upper
local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs
-resolvers.listers = resolvers.listers or { }
+local resolvers = resolvers
+local listers = resolvers.listers or { }
+resolvers.listers = listers
-local resolvers = resolvers
+local resolveprefix = resolvers.resolve
local report_lists = logs.reporter("resolvers","lists")
@@ -25,7 +29,7 @@ local function tabstr(str)
end
end
-function resolvers.listers.variables(pattern)
+function listers.variables(pattern)
local instance = resolvers.instance
local environment = instance.environment
local variables = instance.variables
@@ -46,10 +50,10 @@ function resolvers.listers.variables(pattern)
for key, value in sortedpairs(configured) do
if key ~= "" and (pattern == "" or find(upper(key),pattern)) then
report_lists(key)
- report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
- report_lists(" var: %s",tabstr(configured[key]) or "unset")
- report_lists(" exp: %s",tabstr(expansions[key]) or "unset")
- report_lists(" res: %s",tabstr(resolvers.resolve(expansions[key])) or "unset")
+ report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
+ report_lists(" var: %s",tabstr(configured[key]) or "unset")
+ report_lists(" exp: %s",tabstr(expansions[key]) or "unset")
+ report_lists(" res: %s",tabstr(resolveprefix(expansions[key])) or "unset")
end
end
instance.environment = fastcopy(env)
@@ -59,15 +63,15 @@ end
local report_resolved = logs.reporter("system","resolved")
-function resolvers.listers.configurations()
+function listers.configurations()
local configurations = resolvers.instance.specification
for i=1,#configurations do
- report_resolved("file : %s",resolvers.resolve(configurations[i]))
+ report_resolved("file : %s",resolveprefix(configurations[i]))
end
report_resolved("")
local list = resolvers.expandedpathfromlist(resolvers.splitpath(resolvers.luacnfspec))
for i=1,#list do
- local li = resolvers.resolve(list[i])
+ local li = resolveprefix(list[i])
if lfs.isdir(li) then
report_resolved("path - %s",li)
else
diff --git a/tex/context/base/data-lua.lua b/tex/context/base/data-lua.lua
index 0e7c81181..7c12a5940 100644
--- a/tex/context/base/data-lua.lua
+++ b/tex/context/base/data-lua.lua
@@ -8,7 +8,7 @@ if not modules then modules = { } end modules ['data-lua'] = {
-- This is now a plug in into l-lua (as we also use the extra paths elsewhere).
-local resolvers, package = resolvers, package
+local package, lpeg = package, lpeg
local gsub = string.gsub
local concat = table.concat
@@ -16,18 +16,21 @@ local addsuffix = file.addsuffix
local P, S, Cs, lpegmatch = lpeg.P, lpeg.S, lpeg.Cs, lpeg.match
-local luasuffixes = { 'tex', 'lua' }
-local libsuffixes = { 'lib' }
-local luaformats = { 'TEXINPUTS', 'LUAINPUTS' }
-local libformats = { 'CLUAINPUTS' }
-local helpers = package.helpers or { }
-local methods = helpers.methods or { }
+local luasuffixes = { 'tex', 'lua' }
+local libsuffixes = { 'lib' }
+local luaformats = { 'TEXINPUTS', 'LUAINPUTS' }
+local libformats = { 'CLUAINPUTS' }
+local helpers = package.helpers or { }
+local methods = helpers.methods or { }
+
+local resolvers = resolvers
+local resolveprefix = resolvers.resolve
+
+helpers.report = logs.reporter("resolvers","libraries")
trackers.register("resolvers.libraries", function(v) helpers.trace = v end)
trackers.register("resolvers.locating", function(v) helpers.trace = v end)
-helpers.report = logs.reporter("resolvers","libraries")
-
helpers.sequence = {
"already loaded",
"preload table",
@@ -44,7 +47,7 @@ helpers.sequence = {
local pattern = Cs(P("!")^0 / "" * (P("/") * P(-1) / "/" + P("/")^1 / "/" + 1)^0)
function helpers.cleanpath(path) -- hm, don't we have a helper for this?
- return resolvers.resolve(lpegmatch(pattern,path))
+ return resolveprefix(lpegmatch(pattern,path))
end
local loadedaslib = helpers.loadedaslib
diff --git a/tex/context/base/data-met.lua b/tex/context/base/data-met.lua
index ee9de3fd9..4e8a48f50 100644
--- a/tex/context/base/data-met.lua
+++ b/tex/context/base/data-met.lua
@@ -36,9 +36,7 @@ local function splitmethod(filename) -- todo: filetype in specification
end
filename = file.collapsepath(filename,".") -- hm, we should keep ./ in some cases
- -- filename = gsub(filename,"^%./",getcurrentdir().."/") -- we will merge dir.expandname and collapse some day
-
- if not find(filename,"://") then
+ if not find(filename,"://",1,true) then
return { scheme = "file", path = filename, original = filename, filename = filename }
end
local specification = url.hashed(filename)
diff --git a/tex/context/base/data-pre.lua b/tex/context/base/data-pre.lua
index f2f5bddc4..1c5016f86 100644
--- a/tex/context/base/data-pre.lua
+++ b/tex/context/base/data-pre.lua
@@ -6,63 +6,61 @@ if not modules then modules = { } end modules ['data-pre'] = {
license = "see context related readme files"
}
--- It could be interesting to hook the resolver in the file
--- opener so that unresolved prefixes travel around and we
--- get more abstraction.
+local resolvers = resolvers
+local prefixes = resolvers.prefixes
--- As we use this beforehand we will move this up in the chain
--- of loading.
+local cleanpath = resolvers.cleanpath
+local findgivenfile = resolvers.findgivenfile
+local expansion = resolvers.expansion
+local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
---~ print(resolvers.resolve("abc env:tmp file:cont-en.tex path:cont-en.tex full:cont-en.tex rel:zapf/one/p-chars.tex"))
+local basename = file.basename
+local dirname = file.dirname
+local joinpath = file.join
-local resolvers = resolvers
-local prefixes = utilities.storage.allocate()
-resolvers.prefixes = prefixes
-
-local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findgivenfile, resolvers.expansion
-local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
-local P, S, R, C, Cs, Cc, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.match
-local joinpath, basename, dirname = file.join, file.basename, file.dirname
-local getmetatable, rawset, type = getmetatable, rawset, type
-
--- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
+local isfile = lfs.isfile
prefixes.environment = function(str)
return cleanpath(expansion(str))
end
-prefixes.relative = function(str,n) -- lfs.isfile
- if io.exists(str) then
- -- nothing
- elseif io.exists("./" .. str) then
- str = "./" .. str
- else
- local p = "../"
- for i=1,n or 2 do
- if io.exists(p .. str) then
- str = p .. str
- break
- else
- p = p .. "../"
+local function relative(str,n)
+ if not isfile(str) then
+ local pstr = "./" .. str
+ if isfile(pstr) then
+ str = pstr
+ else
+ local p = "../"
+ for i=1,n or 2 do
+ local pstr = p .. str
+ if isfile(pstr) then
+ str = pstr
+ break
+ else
+ p = p .. "../"
+ end
end
end
end
return cleanpath(str)
end
+local function locate(str)
+ local fullname = findgivenfile(str) or ""
+ return cleanpath(fullname ~= "" and fullname or str)
+end
+
+prefixes.relative = relative
+prefixes.locate = locate
+
prefixes.auto = function(str)
- local fullname = prefixes.relative(str)
- if not lfs.isfile(fullname) then
- fullname = prefixes.locate(str)
+ local fullname = relative(str)
+ if not isfile(fullname) then
+ fullname = locate(str)
end
return fullname
end
-prefixes.locate = function(str)
- local fullname = findgivenfile(str) or ""
- return cleanpath((fullname ~= "" and fullname) or str)
-end
-
prefixes.filename = function(str)
local fullname = findgivenfile(str) or ""
return cleanpath(basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
@@ -89,25 +87,6 @@ prefixes.home = function(str)
return cleanpath(joinpath(getenv('HOME'),str))
end
-local function toppath()
- local inputstack = resolvers.inputstack -- dependency, actually the code should move but it's
- if not inputstack then -- more convenient to keep it here
- return "."
- end
- local pathname = dirname(inputstack[#inputstack] or "")
- if pathname == "" then
- return "."
- else
- return pathname
- end
-end
-
-resolvers.toppath = toppath
-
-prefixes.toppath = function(str)
- return cleanpath(joinpath(toppath(),str))
-end
-
prefixes.env = prefixes.environment
prefixes.rel = prefixes.relative
prefixes.loc = prefixes.locate
@@ -116,131 +95,48 @@ prefixes.full = prefixes.locate
prefixes.file = prefixes.filename
prefixes.path = prefixes.pathname
-function resolvers.allprefixes(separator)
- local all = table.sortedkeys(prefixes)
- if separator then
- for i=1,#all do
- all[i] = all[i] .. ":"
- end
- end
- return all
-end
+-- This one assumes that inputstack is set (used in the tex loader). It is a momentary resolve
+-- as the top of the input stack changes.
-local function _resolve_(method,target)
- local action = prefixes[method]
- if action then
- return action(target)
- else
- return method .. ":" .. target
+local function toppath()
+ local inputstack = resolvers.inputstack -- dependency, actually the code should move but it's
+ if not inputstack then -- more convenient to keep it here
+ return "."
end
-end
-
-local resolved, abstract = { }, { }
-
-function resolvers.resetresolve(str)
- resolved, abstract = { }, { }
-end
-
--- todo: use an lpeg (see data-lua for !! / stripper)
-
--- local function resolve(str) -- use schemes, this one is then for the commandline only
--- if type(str) == "table" then
--- local t = { }
--- for i=1,#str do
--- t[i] = resolve(str[i])
--- end
--- return t
--- else
--- local res = resolved[str]
--- if not res then
--- res = gsub(str,"([a-z][a-z]+):([^ \"\';,]*)",_resolve_) -- home:xx;selfautoparent:xx; etc (comma added)
--- resolved[str] = res
--- abstract[res] = str
--- end
--- return res
--- end
--- end
-
--- home:xx;selfautoparent:xx;
-
-local pattern = Cs((C(R("az")^2) * P(":") * C((1-S(" \"\';,"))^1) / _resolve_ + P(1))^0)
-
-local prefix = C(R("az")^2) * P(":")
-local target = C((1-S(" \"\';,"))^1)
-local notarget = (#S(";,") + P(-1)) * Cc("")
-
-local pattern = Cs(((prefix * (target + notarget)) / _resolve_ + P(1))^0)
-
-local function resolve(str) -- use schemes, this one is then for the commandline only
- if type(str) == "table" then
- local t = { }
- for i=1,#str do
- t[i] = resolve(str[i])
- end
- return t
+ local pathname = dirname(inputstack[#inputstack] or "")
+ if pathname == "" then
+ return "."
else
- local res = resolved[str]
- if not res then
- res = lpegmatch(pattern,str)
- resolved[str] = res
- abstract[res] = str
- end
- return res
+ return pathname
end
end
-local function unresolve(str)
- return abstract[str] or str
-end
-
-resolvers.resolve = resolve
-resolvers.unresolve = unresolve
+-- The next variant is similar but bound to explicitly registered paths. Practice should
+-- show if that gives the same results as the previous one. It is meant for a project
+-- stucture.
-if type(os.uname) == "function" then
-
- for k, v in next, os.uname() do
- if not prefixes[k] then
- prefixes[k] = function() return v end
- end
+local function jobpath()
+ local path = resolvers.stackpath()
+ if not path or path == "" then
+ return "."
+ else
+ return path
end
-
end
-if os.type == "unix" then
-
- -- We need to distringuish between a prefix and something else : so we
- -- have a special repath variant for linux. Also, when a new prefix is
- -- defined, we need to remake the matcher.
-
- local pattern
-
- local function makepattern(t,k,v)
- if t then
- rawset(t,k,v)
- end
- local colon = P(":")
- for k, v in table.sortedpairs(prefixes) do
- if p then
- p = P(k) + p
- else
- p = P(k)
- end
- end
- pattern = Cs((p * colon + colon/";" + P(1))^0)
- end
+resolvers.toppath = toppath
+resolvers.jobpath = jobpath
- makepattern()
+-- This hook sit into the resolver:
- getmetatable(prefixes).__newindex = makepattern
+prefixes.toppath = function(str) return cleanpath(joinpath(toppath(),str)) end -- str can be nil or empty
+prefixes.jobpath = function(str) return cleanpath(joinpath(jobpath(),str)) end -- str can be nil or empty
- function resolvers.repath(str)
- return lpegmatch(pattern,str)
- end
+resolvers.setdynamic("toppath")
+resolvers.setdynamic("jobpath")
-else -- already the default:
+-- for a while (obsolete):
- function resolvers.repath(str)
- return str
- end
+prefixes.jobfile = prefixes.jobpath
-end
+resolvers.setdynamic("jobfile")
diff --git a/tex/context/base/data-res.lua b/tex/context/base/data-res.lua
index 64c38f82c..cbda21ce2 100644
--- a/tex/context/base/data-res.lua
+++ b/tex/context/base/data-res.lua
@@ -18,7 +18,7 @@ if not modules then modules = { } end modules ['data-res'] = {
-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012)
local gsub, find, lower, upper, match, gmatch = string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
-local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
+local concat, insert, remove, sortedkeys, sortedhash = table.concat, table.insert, table.remove, table.sortedkeys, table.sortedhash
local next, type, rawget = next, type, rawget
local os = os
@@ -29,34 +29,48 @@ local formatters = string.formatters
local filedirname = file.dirname
local filebasename = file.basename
local suffixonly = file.suffixonly
+local addsuffix = file.addsuffix
+local removesuffix = file.removesuffix
local filejoin = file.join
local collapsepath = file.collapsepath
local joinpath = file.joinpath
+local is_qualified_path = file.is_qualified_path
+
local allocate = utilities.storage.allocate
local settings_to_array = utilities.parsers.settings_to_array
+
+local getcurrentdir = lfs.currentdir
+local isfile = lfs.isfile
+local isdir = lfs.isdir
+
local setmetatableindex = table.setmetatableindex
local luasuffixes = utilities.lua.suffixes
-local getcurrentdir = lfs.currentdir
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end)
-local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
+local trace_locating = false trackers .register("resolvers.locating", function(v) trace_locating = v end)
+local trace_detail = false trackers .register("resolvers.details", function(v) trace_detail = v end)
+local trace_expansions = false trackers .register("resolvers.expansions", function(v) trace_expansions = v end)
+local trace_paths = false trackers .register("resolvers.paths", function(v) trace_paths = v end)
+local resolve_otherwise = true directives.register("resolvers.otherwise", function(v) resolve_otherwise = v end)
local report_resolving = logs.reporter("resolvers","resolving")
-local resolvers = resolvers
+local resolvers = resolvers
local expandedpathfromlist = resolvers.expandedpathfromlist
local checkedvariable = resolvers.checkedvariable
local splitconfigurationpath = resolvers.splitconfigurationpath
local methodhandler = resolvers.methodhandler
+local filtered = resolvers.filtered_from_content
+local lookup = resolvers.get_from_content
+local cleanpath = resolvers.cleanpath
+local resolveprefix = resolvers.resolve
-local initializesetter = utilities.setters.initialize
+local initializesetter = utilities.setters.initialize
local ostype, osname, osenv, ossetenv, osgetenv = os.type, os.name, os.env, os.setenv, os.getenv
-resolvers.cacheversion = '1.0.1'
-resolvers.configbanner = ''
+resolvers.cacheversion = "1.100"
+resolvers.configbanner = ""
resolvers.homedir = environment.homedir
resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF", "TEXMF", "TEXOS" }
resolvers.luacnfname = "texmfcnf.lua"
@@ -133,6 +147,7 @@ local unset_variable = "unset"
local formats = resolvers.formats
local suffixes = resolvers.suffixes
+local usertypes = resolvers.usertypes
local dangerous = resolvers.dangerous
local suffixmap = resolvers.suffixmap
@@ -154,7 +169,7 @@ function resolvers.setenv(key,value,raw)
-- we feed back into the environment, and as this is used
-- by other applications (via os.execute) we need to make
-- sure that prefixes are resolve
- ossetenv(key,raw and value or resolvers.resolve(value))
+ ossetenv(key,raw and value or resolveprefix(value))
end
end
@@ -177,7 +192,7 @@ resolvers.env = getenv
-- We are going to use some metatable trickery where we backtrack from
-- expansion to variable to environment.
-local function resolve(k)
+local function resolvevariable(k)
return instance.expansions[k]
end
@@ -190,12 +205,12 @@ local somekey = C(R("az","AZ","09","__","--")^1)
local somethingelse = P(";") * ((1-S("!{}/\\"))^1 * P(";") / "")
+ P(";") * (P(";") / "")
+ P(1)
-local variableexpander = Cs( (somevariable * (somekey/resolve) + somethingelse)^1 )
+local variableexpander = Cs( (somevariable * (somekey/resolvevariable) + somethingelse)^1 )
local cleaner = P("\\") / "/" + P(";") * S("!{}/\\")^0 * P(";")^1 / ";"
local variablecleaner = Cs((cleaner + P(1))^0)
-local somevariable = R("az","AZ","09","__","--")^1 / resolve
+local somevariable = R("az","AZ","09","__","--")^1 / resolvevariable
local variable = (P("$")/"") * (somevariable + (P("{")/"") * somevariable * (P("}")/""))
local variableresolver = Cs((variable + P(1))^0)
@@ -205,9 +220,12 @@ end
function resolvers.newinstance() -- todo: all vars will become lowercase and alphanum only
- if trace_locating then
+ -- normally we only need one instance but for special cases we can (re)load one so
+ -- we stick to this model.
+
+ if trace_locating then
report_resolving("creating instance")
- end
+ end
local environment, variables, expansions, order = allocate(), allocate(), allocate(), allocate()
@@ -222,6 +240,7 @@ function resolvers.newinstance() -- todo: all vars will become lowercase and alp
foundintrees = allocate(),
hashes = allocate(),
hashed = allocate(),
+ pathlists = false,-- delayed
specification = allocate(),
lists = allocate(),
data = allocate(), -- only for loading
@@ -234,6 +253,7 @@ function resolvers.newinstance() -- todo: all vars will become lowercase and alp
savelists = true,
pattern = nil, -- lists
force_suffixes = true,
+ pathstack = { },
}
setmetatableindex(variables,function(t,k)
@@ -290,8 +310,14 @@ function resolvers.reset()
end
local function reset_hashes()
- instance.lists = { }
- instance.found = { }
+ instance.lists = { }
+ instance.pathlists = false
+ instance.found = { }
+end
+
+local function reset_caches()
+ instance.lists = { }
+ instance.pathlists = false
end
local slash = P("/")
@@ -355,14 +381,14 @@ local function identify_configuration_files()
for i=1,#cnfpaths do
local filepath = cnfpaths[i]
local filename = collapsepath(filejoin(filepath,luacnfname))
- local realname = resolvers.resolve(filename) -- can still have "//" ... needs checking
+ local realname = resolveprefix(filename) -- can still have "//" ... needs checking
-- todo: environment.skipweirdcnfpaths directive
if trace_locating then
- local fullpath = gsub(resolvers.resolve(collapsepath(filepath)),"//","/")
- local weirdpath = find(fullpath,"/texmf.+/texmf") or not find(fullpath,"/web2c")
+ local fullpath = gsub(resolveprefix(collapsepath(filepath)),"//","/")
+ local weirdpath = find(fullpath,"/texmf.+/texmf") or not find(fullpath,"/web2c",1,true)
report_resolving("looking for %a on %s path %a from specification %a",luacnfname,weirdpath and "weird" or "given",fullpath,filepath)
end
- if lfs.isfile(realname) then
+ if isfile(realname) then
specification[#specification+1] = filename -- unresolved as we use it in matching, relocatable
if trace_locating then
report_resolving("found configuration file %a",realname)
@@ -385,7 +411,7 @@ local function load_configuration_files()
local filename = specification[i]
local pathname = filedirname(filename)
local filename = filejoin(pathname,luacnfname)
- local realname = resolvers.resolve(filename) -- no shortcut
+ local realname = resolveprefix(filename) -- no shortcut
local blob = loadfile(realname)
if blob then
local setups = instance.setups
@@ -393,7 +419,7 @@ local function load_configuration_files()
local parent = data and data.parent
if parent then
local filename = filejoin(pathname,parent)
- local realname = resolvers.resolve(filename) -- no shortcut
+ local realname = resolveprefix(filename) -- no shortcut
local blob = loadfile(realname)
if blob then
local parentdata = blob()
@@ -418,7 +444,7 @@ local function load_configuration_files()
elseif variables[k] == nil then
if trace_locating and not warning then
report_resolving("variables like %a in configuration file %a should move to the 'variables' subtable",
- k,resolvers.resolve(filename))
+ k,resolveprefix(filename))
warning = true
end
variables[k] = v
@@ -491,7 +517,7 @@ local function locate_file_databases()
local stripped = lpegmatch(inhibitstripper,path) -- the !! thing
if stripped ~= "" then
local runtime = stripped == path
- path = resolvers.cleanpath(path)
+ path = cleanpath(path)
local spec = resolvers.splitmethod(stripped)
if runtime and (spec.noscheme or spec.scheme == "file") then
stripped = "tree:///" .. stripped
@@ -557,8 +583,8 @@ function resolvers.renew(hashname)
report_resolving("identifying tree %a",hashname)
end
end
- local realpath = resolvers.resolve(hashname)
- if lfs.isdir(realpath) then
+ local realpath = resolveprefix(hashname)
+ if isdir(realpath) then
if trace_locating then
report_resolving("using path %a",realpath)
end
@@ -680,25 +706,62 @@ function resolvers.unexpandedpath(str)
return joinpath(resolvers.unexpandedpathlist(str))
end
+function resolvers.pushpath(name)
+ local pathstack = instance.pathstack
+ local lastpath = pathstack[#pathstack]
+ local pluspath = filedirname(name)
+ if lastpath then
+ lastpath = collapsepath(filejoin(lastpath,pluspath))
+ else
+ lastpath = collapsepath(pluspath)
+ end
+ insert(pathstack,lastpath)
+ if trace_paths then
+ report_resolving("pushing path %a",lastpath)
+ end
+end
+
+function resolvers.poppath()
+ local pathstack = instance.pathstack
+ if trace_paths and #pathstack > 0 then
+ report_resolving("popping path %a",pathstack[#pathstack])
+ end
+ remove(pathstack)
+end
+
+function resolvers.stackpath()
+ local pathstack = instance.pathstack
+ local currentpath = pathstack[#pathstack]
+ return currentpath ~= "" and currentpath or nil
+end
+
local done = { }
function resolvers.resetextrapath()
local ep = instance.extra_paths
if not ep then
- ep, done = { }, { }
- instance.extra_paths = ep
+ done = { }
+ instance.extra_paths = { }
elseif #ep > 0 then
- instance.lists, done = { }, { }
+ done = { }
+ reset_caches()
end
end
function resolvers.registerextrapath(paths,subpaths)
- paths = settings_to_array(paths)
- subpaths = settings_to_array(subpaths)
- local ep = instance.extra_paths or { }
- local oldn = #ep
- local newn = oldn
- local nofpaths = #paths
+ if not subpaths or subpaths == "" then
+ if not paths or path == "" then
+ return -- invalid spec
+ elseif done[paths] then
+ return -- already done
+ end
+ end
+ local paths = settings_to_array(paths)
+ local subpaths = settings_to_array(subpaths)
+ local ep = instance.extra_paths or { }
+ local oldn = #ep
+ local newn = oldn
+ local nofpaths = #paths
local nofsubpaths = #subpaths
if nofpaths > 0 then
if nofsubpaths > 0 then
@@ -709,7 +772,7 @@ function resolvers.registerextrapath(paths,subpaths)
local ps = p .. "/" .. s
if not done[ps] then
newn = newn + 1
- ep[newn] = resolvers.cleanpath(ps)
+ ep[newn] = cleanpath(ps)
done[ps] = true
end
end
@@ -719,7 +782,7 @@ function resolvers.registerextrapath(paths,subpaths)
local p = paths[i]
if not done[p] then
newn = newn + 1
- ep[newn] = resolvers.cleanpath(p)
+ ep[newn] = cleanpath(p)
done[p] = true
end
end
@@ -731,7 +794,7 @@ function resolvers.registerextrapath(paths,subpaths)
local ps = ep[i] .. "/" .. s
if not done[ps] then
newn = newn + 1
- ep[newn] = resolvers.cleanpath(ps)
+ ep[newn] = cleanpath(ps)
done[ps] = true
end
end
@@ -740,57 +803,80 @@ function resolvers.registerextrapath(paths,subpaths)
if newn > 0 then
instance.extra_paths = ep -- register paths
end
- if newn > oldn then
- instance.lists = { } -- erase the cache
+ if newn ~= oldn then
+ reset_caches()
end
end
-local function made_list(instance,list)
- local ep = instance.extra_paths
- if not ep or #ep == 0 then
- return list
+function resolvers.pushextrapath(path)
+ local paths = settings_to_array(path)
+ if instance.extra_stack then
+ insert(instance.extra_stack,1,paths)
else
- local done, new, newn = { }, { }, 0
- -- honour . .. ../.. but only when at the start
- for k=1,#list do
- local v = list[k]
- if not done[v] then
- if find(v,"^[%.%/]$") then
- done[v] = true
- newn = newn + 1
- new[newn] = v
- else
- break
- end
- end
- end
- -- first the extra paths
- for k=1,#ep do
- local v = ep[k]
+ instance.extra_stack = { paths }
+ end
+ reset_caches()
+end
+
+function resolvers.popextrapath()
+ if instance.extra_stack then
+ reset_caches()
+ return remove(instance.extra_stack,1)
+ end
+end
+
+local function made_list(instance,list,extra_too)
+ local done = { }
+ local new = { }
+ local newn = 0
+ -- a helper
+ local function add(p)
+ for k=1,#p do
+ local v = p[k]
if not done[v] then
done[v] = true
newn = newn + 1
new[newn] = v
end
end
- -- next the formal paths
- for k=1,#list do
- local v = list[k]
- if not done[v] then
- done[v] = true
- newn = newn + 1
- new[newn] = v
+ end
+ -- honour . .. ../.. but only when at the start
+ for k=1,#list do
+ local v = list[k]
+ if done[v] then
+ -- skip
+ elseif find(v,"^[%.%/]$") then
+ done[v] = true
+ newn = newn + 1
+ new[newn] = v
+ else
+ break
+ end
+ end
+ if extra_too then
+ -- first the stacked paths
+ local es = instance.extra_stack
+ if es and #es > 0 then
+ for k=1,#es do
+ add(es[k])
end
end
- return new
+ -- then the extra paths
+ local ep = instance.extra_paths
+ if ep and #ep > 0 then
+ add(ep)
+ end
end
+ -- last the formal paths
+ add(list)
+ return new
end
function resolvers.cleanpathlist(str)
local t = resolvers.expandedpathlist(str)
if t then
for i=1,#t do
- t[i] = collapsepath(resolvers.cleanpath(t[i]))
+ t[i] = collapsepath(cleanpath(t[i]))
end
end
return t
@@ -800,22 +886,22 @@ function resolvers.expandpath(str)
return joinpath(resolvers.expandedpathlist(str))
end
-function resolvers.expandedpathlist(str)
+function resolvers.expandedpathlist(str,extra_too)
if not str then
return { }
- elseif instance.savelists then
+ elseif instance.savelists then -- hm, what if two cases, with and without extra_too
str = lpegmatch(dollarstripper,str)
local lists = instance.lists
local lst = lists[str]
if not lst then
- local l = made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
+ local l = made_list(instance,resolvers.splitpath(resolvers.expansion(str)),extra_too)
lst = expandedpathfromlist(l)
lists[str] = lst
end
return lst
else
local lst = resolvers.splitpath(resolvers.expansion(str))
- return made_list(instance,expandedpathfromlist(lst))
+ return made_list(instance,expandedpathfromlist(lst),extra_too)
end
end
@@ -829,6 +915,14 @@ function resolvers.expandpathfromvariable(str)
return joinpath(resolvers.expandedpathlistfromvariable(str))
end
+function resolvers.cleanedpathlist(v) -- can be cached if needed
+ local t = resolvers.expandedpathlist(v)
+ for i=1,#t do
+ t[i] = resolvers.resolve(resolvers.cleanpath(t[i]))
+ end
+ return t
+end
+
function resolvers.expandbraces(str) -- output variable and brace expansion of STRING
-- local ori = resolvers.variable(str)
-- if ori == "" then
@@ -850,7 +944,7 @@ function resolvers.registerfilehash(name,content,someerror)
end
local function isreadable(name)
- local readable = lfs.isfile(name) -- not file.is_readable(name) asit can be a dir
+ local readable = isfile(name) -- not file.is_readable(name) asit can be a dir
if trace_detail then
if readable then
report_resolving("file %a is readable",name)
@@ -861,75 +955,59 @@ local function isreadable(name)
return readable
end
--- name
--- name/name
+-- name | name/name
-local function collect_files(names)
- local filelist, noffiles = { }, 0
+local function collect_files(names) -- potential files .. sort of too much when asking for just one file
+ local filelist = { } -- but we need it for pattern matching later on
+ local noffiles = 0
+ local function check(hash,root,pathname,path,name)
+ if not pathname or find(path,pathname) then
+ local variant = hash.type
+ local search = filejoin(root,path,name) -- funny no concatinator
+ local result = methodhandler('concatinators',variant,root,path,name)
+ if trace_detail then
+ report_resolving("match: variant %a, search %a, result %a",variant,search,result)
+ end
+ noffiles = noffiles + 1
+ filelist[noffiles] = { variant, search, result }
+ end
+ end
for k=1,#names do
- local fname = names[k]
+ local filename = names[k]
if trace_detail then
- report_resolving("checking name %a",fname)
+ report_resolving("checking name %a",filename)
end
- local bname = filebasename(fname)
- local dname = filedirname(fname)
- if dname == "" or find(dname,"^%.") then
- dname = false
+ local basename = filebasename(filename)
+ local pathname = filedirname(filename)
+ if pathname == "" or find(pathname,"^%.") then
+ pathname = false
else
- dname = gsub(dname,"%*",".*")
- dname = "/" .. dname .. "$"
+ pathname = gsub(pathname,"%*",".*")
+ pathname = "/" .. pathname .. "$"
end
local hashes = instance.hashes
for h=1,#hashes do
- local hash = hashes[h]
- local blobpath = hash.name
- local files = blobpath and instance.files[blobpath]
- if files then
+ local hash = hashes[h]
+ local hashname = hash.name
+ local content = hashname and instance.files[hashname]
+ if content then
if trace_detail then
- report_resolving("deep checking %a, base %a, pattern %a",blobpath,bname,dname)
+ report_resolving("deep checking %a, base %a, pattern %a",hashname,basename,pathname)
end
- local blobfile = files[bname]
- if not blobfile then
- local rname = "remap:"..bname
- blobfile = files[rname]
- if blobfile then
- bname = files[rname]
- blobfile = files[bname]
- end
- end
- if blobfile then
- local blobroot = files.__path__ or blobpath
- if type(blobfile) == 'string' then
- if not dname or find(blobfile,dname) then
- local variant = hash.type
- -- local search = filejoin(blobpath,blobfile,bname)
- local search = filejoin(blobroot,blobfile,bname)
- local result = methodhandler('concatinators',hash.type,blobroot,blobfile,bname)
- if trace_detail then
- report_resolving("match: variant %a, search %a, result %a",variant,search,result)
- end
- noffiles = noffiles + 1
- filelist[noffiles] = { variant, search, result }
- end
+ local path, name = lookup(content,basename)
+ if path then
+ local metadata = content.metadata
+ local realroot = metadata and metadata.path or hashname
+ if type(path) == "string" then
+ check(hash,realroot,pathname,path,name)
else
- for kk=1,#blobfile do
- local vv = blobfile[kk]
- if not dname or find(vv,dname) then
- local variant = hash.type
- -- local search = filejoin(blobpath,vv,bname)
- local search = filejoin(blobroot,vv,bname)
- local result = methodhandler('concatinators',hash.type,blobroot,vv,bname)
- if trace_detail then
- report_resolving("match: variant %a, search %a, result %a",variant,search,result)
- end
- noffiles = noffiles + 1
- filelist[noffiles] = { variant, search, result }
- end
+ for i=1,#path do
+ check(hash,realroot,pathname,path[i],name)
end
end
end
elseif trace_locating then
- report_resolving("no match in %a (%s)",blobpath,bname)
+ report_resolving("no match in %a (%s)",hashname,basename)
end
end
end
@@ -960,7 +1038,7 @@ end
local function can_be_dir(name) -- can become local
local fakepaths = instance.fakepaths
if not fakepaths[name] then
- if lfs.isdir(name) then
+ if isdir(name) then
fakepaths[name] = 1 -- directory
else
fakepaths[name] = 2 -- no directory
@@ -986,10 +1064,11 @@ local function find_analyze(filename,askedformat,allresults)
if askedformat == "" then
if ext == "" or not suffixmap[ext] then
local defaultsuffixes = resolvers.defaultsuffixes
+ local formatofsuffix = resolvers.formatofsuffix
for i=1,#defaultsuffixes do
local forcedname = filename .. '.' .. defaultsuffixes[i]
wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.formatofsuffix(forcedname)
+ filetype = formatofsuffix(forcedname)
if trace_locating then
report_resolving("forcing filetype %a",filetype)
end
@@ -1027,11 +1106,11 @@ local function find_direct(filename,allresults)
end
local function find_wildcard(filename,allresults)
- if find(filename,'%*') then
+ if find(filename,'*',1,true) then
if trace_locating then
report_resolving("checking wildcard %a", filename)
end
- local method, result = resolvers.findwildcardfiles(filename)
+ local result = resolvers.findwildcardfiles(filename)
if result then
return "wildcard", result
end
@@ -1039,7 +1118,7 @@ local function find_wildcard(filename,allresults)
end
local function find_qualified(filename,allresults,askedformat,alsostripped) -- this one will be split too
- if not file.is_qualified_path(filename) then
+ if not is_qualified_path(filename) then
return
end
if trace_locating then
@@ -1135,50 +1214,91 @@ local function check_subpath(fname)
end
end
-local function find_intree(filename,filetype,wantedfiles,allresults)
+-- this caching is not really needed (seldom accessed) but more readable
+-- we could probably move some to a higher level but then we need to adapt
+-- more code ... maybe some day
+
+local function makepathlist(list,filetype)
local typespec = resolvers.variableofformat(filetype)
- local pathlist = resolvers.expandedpathlist(typespec)
- local method = "intree"
+ local pathlist = resolvers.expandedpathlist(typespec,filetype and usertypes[filetype]) -- only extra path with user files
+ local entry = { }
if pathlist and #pathlist > 0 then
+ for k=1,#pathlist do
+ local path = pathlist[k]
+ local prescanned = find(path,'^!!')
+ local resursive = find(path,'//$')
+ local pathname = lpegmatch(inhibitstripper,path)
+ local expression = makepathexpression(pathname)
+ local barename = gsub(pathname,"/+$","")
+ barename = resolveprefix(barename)
+ local scheme = url.hasscheme(barename)
+ local schemename = gsub(barename,"%.%*$",'') -- after scheme
+ -- local prescanned = path ~= pathname -- ^!!
+ -- local resursive = find(pathname,'//$')
+ entry[k] = {
+ path = path,
+ pathname = pathname,
+ prescanned = prescanned,
+ recursive = recursive,
+ expression = expression,
+ barename = barename,
+ scheme = scheme,
+ schemename = schemename,
+ }
+ end
+ entry.typespec = typespec
+ list[filetype] = entry
+ else
+ list[filetype] = false
+ end
+ return entry
+end
+
+-- pathlist : resolved
+-- dirlist : unresolved or resolved
+-- filelist : unresolved
+
+local function find_intree(filename,filetype,wantedfiles,allresults)
+ local pathlists = instance.pathlists
+ if not pathlists then
+ pathlists = setmetatableindex(allocate(),makepathlist)
+ instance.pathlists = pathlists
+ end
+ local pathlist = pathlists[filetype]
+ if pathlist then
-- list search
- local filelist = collect_files(wantedfiles)
- local dirlist = { }
+ local method = "intree"
+ local filelist = collect_files(wantedfiles) -- okay, a bit over the top when we just look relative to the current path
+ local dirlist = { }
+ local result = { }
if filelist then
for i=1,#filelist do
dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
end
end
if trace_detail then
- report_resolving("checking filename %a",filename)
+ report_resolving("checking filename %a in tree",filename)
end
- local resolve = resolvers.resolve
- local result = { }
- -- pathlist : resolved
- -- dirlist : unresolved or resolved
- -- filelist : unresolved
for k=1,#pathlist do
- local path = pathlist[k]
- local pathname = lpegmatch(inhibitstripper,path)
- local doscan = path == pathname -- no ^!!
- if not find (pathname,'//$') then
- doscan = false -- we check directly on the path
- end
- local done = false
+ local entry = pathlist[k]
+ local path = entry.path
+ local pathname = entry.pathname
+ local done = false
-- using file list
if filelist then -- database
-- compare list entries with permitted pattern -- /xx /xx//
- local expression = makepathexpression(pathname)
+ local expression = entry.expression
if trace_detail then
report_resolving("using pattern %a for path %a",expression,pathname)
end
for k=1,#filelist do
local fl = filelist[k]
- local f = fl[2]
- local d = dirlist[k]
+ local f = fl[2]
+ local d = dirlist[k]
-- resolve is new:
- if find(d,expression) or find(resolve(d),expression) then
+ if find(d,expression) or find(resolveprefix(d),expression) then
-- todo, test for readable
- result[#result+1] = resolve(fl[3]) -- no shortcut
+ result[#result+1] = resolveprefix(fl[3]) -- no shortcut
done = true
if allresults then
if trace_detail then
@@ -1198,61 +1318,74 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
if done then
method = "database"
else
- method = "filesystem" -- bonus, even when !! is specified
- pathname = gsub(pathname,"/+$","")
- pathname = resolve(pathname)
- local scheme = url.hasscheme(pathname)
+ -- beware: we don't honor allresults here in a next attempt (done false)
+ -- but that is kind of special anyway
+ method = "filesystem" -- bonus, even when !! is specified
+ local scheme = entry.scheme
if not scheme or scheme == "file" then
- local pname = gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
+ local pname = entry.schemename
+ if not find(pname,"*",1,true) then
if can_be_dir(pname) then
- -- quick root scan first
- for k=1,#wantedfiles do
- local w = wantedfiles[k]
- local fname = check_subpath(filejoin(pname,w))
- if fname then
- result[#result+1] = fname
- done = true
- if not allresults then
- break
- end
+ -- hm, rather useless as we don't go deeper and if we would we could also
+ -- auto generate the file database .. however, we need this for extra paths
+ -- that are not hashed (like sources on my machine) .. so, this is slightly
+ -- out of order but at least fast (and we seldom end up here, only when a file
+ -- is not already found
+-- inspect(entry)
+ if not done and not entry.prescanned then
+ if trace_detail then
+ report_resolving("quick root scan for %a",pname)
end
- end
- if not done and doscan then
- -- collect files in path (and cache the result)
- local files = resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w = wantedfiles[k]
- local subpath = files[w]
- if not subpath or subpath == "" then
- -- rootscan already done
- elseif type(subpath) == "string" then
- local fname = check_subpath(filejoin(pname,subpath,w))
- if fname then
- result[#result+1] = fname
- done = true
- if not allresults then
- break
- end
+ local fname = check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
end
- else
- for i=1,#subpath do
- local sp = subpath[i]
- if sp == "" then
- -- roottest already done
- else
- local fname = check_subpath(filejoin(pname,sp,w))
- if fname then
- result[#result+1] = fname
- done = true
- if not allresults then
- break
+ end
+ end
+ if not done and entry.recursive then -- maybe also when allresults
+ -- collect files in path (and cache the result)
+ if trace_detail then
+ report_resolving("scanning filesystem for %a",pname)
+ end
+ local files = resolvers.simplescanfiles(pname,false,true)
+ for k=1,#wantedfiles do
+ local w = wantedfiles[k]
+ local subpath = files[w]
+ if not subpath or subpath == "" then
+ -- rootscan already done
+ elseif type(subpath) == "string" then
+ local fname = check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp = subpath[i]
+ if sp == "" then
+ -- roottest already done
+ else
+ local fname = check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
end
end
end
- end
- if done and not allresults then
- break
+ if done and not allresults then
+ break
+ end
end
end
end
@@ -1261,6 +1394,20 @@ local function find_intree(filename,filetype,wantedfiles,allresults)
else
-- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
+ else
+ -- we can have extra_paths that are urls
+ for k=1,#wantedfiles do
+ -- independent url scanner
+ local pname = entry.barename
+ local fname = methodhandler('finders',pname .. "/" .. wantedfiles[k])
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
end
end
-- todo recursive scanning
@@ -1298,7 +1445,7 @@ local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other
local filelist = collect_files(wantedfiles)
local fl = filelist and filelist[1]
if fl then
- return "otherwise", { resolvers.resolve(fl[3]) } -- filename
+ return "otherwise", { resolveprefix(fl[3]) } -- filename
end
end
@@ -1306,6 +1453,9 @@ end
-- always analyze .. todo: use url split
collect_instance_files = function(filename,askedformat,allresults) -- uses nested
+ if not filename or filename == "" then
+ return { }
+ end
askedformat = askedformat or ""
filename = collapsepath(filename,".")
@@ -1343,7 +1493,11 @@ collect_instance_files = function(filename,askedformat,allresults) -- uses neste
else
local method, result, stamp, filetype, wantedfiles
if instance.remember then
- stamp = formatters["%s--%s"](filename,askedformat)
+ if askedformat == "" then
+ stamp = formatters["%s::%s"](suffixonly(filename),filename)
+ else
+ stamp = formatters["%s::%s"](askedformat,filename)
+ end
result = stamp and instance.found[stamp]
if result then
if trace_locating then
@@ -1362,7 +1516,8 @@ collect_instance_files = function(filename,askedformat,allresults) -- uses neste
method, result = find_intree(filename,filetype,wantedfiles)
if not result then
method, result = find_onpath(filename,filetype,wantedfiles)
- if not result then
+ if resolve_otherwise and not result then
+ -- this will search everywhere in the tree
method, result = find_otherwise(filename,filetype,wantedfiles)
end
end
@@ -1378,7 +1533,7 @@ collect_instance_files = function(filename,askedformat,allresults) -- uses neste
end
if stamp then
if trace_locating then
- report_resolving("remembering file %a",filename)
+ report_resolving("remembering file %a using hash %a",filename,stamp)
end
instance.found[stamp] = result
end
@@ -1389,6 +1544,9 @@ end
-- -- -- end of main file search routing -- -- --
local function findfiles(filename,filetype,allresults)
+ if not filename or filename == "" then
+ return { }
+ end
local result, status = collect_instance_files(filename,filetype or "",allresults)
if not result or #result == 0 then
local lowered = lower(filename)
@@ -1412,44 +1570,39 @@ function resolvers.findpath(filename,filetype)
end
local function findgivenfiles(filename,allresults)
- local bname, result = filebasename(filename), { }
- local hashes = instance.hashes
- local noffound = 0
- for k=1,#hashes do
- local hash = hashes[k]
- local files = instance.files[hash.name] or { }
- local blist = files[bname]
- if not blist then
- local rname = "remap:"..bname
- blist = files[rname]
- if blist then
- bname = files[rname]
- blist = files[bname]
- end
+ local base = filebasename(filename)
+ local result = { }
+ local hashes = instance.hashes
+ --
+ local function okay(hash,path,name)
+ local found = methodhandler('concatinators',hash.type,hash.name,path,name)
+ if found and found ~= "" then
+ result[#result+1] = resolveprefix(found)
+ return not allresults
end
- if blist then
- if type(blist) == 'string' then
- local found = methodhandler('concatinators',hash.type,hash.name,blist,bname) or ""
- if found ~= "" then
- noffound = noffound + 1
- result[noffound] = resolvers.resolve(found)
- if not allresults then
- break
- end
+ end
+ --
+ for k=1,#hashes do
+ local hash = hashes[k]
+ local content = instance.files[hash.name]
+ if content then
+ local path, name = lookup(content,base)
+ if not path then
+ -- no match
+ elseif type(path) == "string" then
+ if okay(hash,path,name) then
+ return result
end
else
- for kk=1,#blist do
- local vv = blist[kk]
- local found = methodhandler('concatinators',hash.type,hash.name,vv,bname) or ""
- if found ~= "" then
- noffound = noffound + 1
- result[noffound] = resolvers.resolve(found)
- if not allresults then break end
+ for i=1,#path do
+ if okay(hash,path[i],name) then
+ return result
end
end
end
end
end
+ --
return result
end
@@ -1461,37 +1614,6 @@ function resolvers.findgivenfile(filename)
return findgivenfiles(filename,false)[1] or ""
end
-local function doit(path,blist,bname,tag,variant,result,allresults)
- local done = false
- if blist and variant then
- local resolve = resolvers.resolve -- added
- if type(blist) == 'string' then
- -- make function and share code
- if find(lower(blist),path) then
- local full = methodhandler('concatinators',variant,tag,blist,bname) or ""
- result[#result+1] = resolve(full)
- done = true
- end
- else
- for kk=1,#blist do
- local vv = blist[kk]
- if find(lower(vv),path) then
- local full = methodhandler('concatinators',variant,tag,vv,bname) or ""
- result[#result+1] = resolve(full)
- done = true
- if not allresults then break end
- end
- end
- end
- end
- return done
-end
-
---~ local makewildcard = Cs(
---~ (P("^")^0 * P("/") * P(-1) + P(-1)) /".*"
---~ + (P("^")^0 * P("/") / "") * (P("*")/".*" + P("-")/"%%-" + P("?")/"."+ P("\\")/"/" + P(1))^0
---~ )
-
local makewildcard = Cs(
(P("^")^0 * P("/") * P(-1) + P(-1)) /".*"
+ (P("^")^0 * P("/") / "")^0 * (P("*")/".*" + P("-")/"%%-" + P(".")/"%%." + P("?")/"."+ P("\\")/"/" + P(1))^0
@@ -1501,37 +1623,80 @@ function resolvers.wildcardpattern(pattern)
return lpegmatch(makewildcard,pattern) or pattern
end
-local function findwildcardfiles(filename,allresults,result) -- todo: remap: and lpeg
- result = result or { }
---~ local path = lower(lpegmatch(makewildcard,filedirname (filename)))
---~ local name = lower(lpegmatch(makewildcard,filebasename(filename)))
- local base = filebasename(filename)
- local dirn = filedirname(filename)
- local path = lower(lpegmatch(makewildcard,dirn) or dirn)
- local name = lower(lpegmatch(makewildcard,base) or base)
- local files, done = instance.files, false
- if find(name,"%*") then
+-- we use more function calls than before but we also have smaller trees so
+-- why bother
+
+local function findwildcardfiles(filename,allresults,result)
+ local result = result or { }
+ local base = filebasename(filename)
+ local dirn = filedirname(filename)
+ local path = lower(lpegmatch(makewildcard,dirn) or dirn)
+ local name = lower(lpegmatch(makewildcard,base) or base)
+ local files = instance.files
+ --
+ if find(name,"*",1,true) then
local hashes = instance.hashes
+ local function okay(found,path,base,hashname,hashtype)
+ if find(found,path) then
+ local full = methodhandler('concatinators',hashtype,hashname,found,base)
+ if full and full ~= "" then
+ result[#result+1] = resolveprefix(full)
+ return not allresults
+ end
+ end
+ end
for k=1,#hashes do
- local hash = hashes[k]
- local hashname, hashtype = hash.name, hash.type
- for kk, hh in next, files[hashname] do
- if not find(kk,"^remap:") then
- if find(lower(kk),name) then
- if doit(path,hh,kk,hashname,hashtype,result,allresults) then done = true end
- if done and not allresults then break end
+ local hash = hashes[k]
+ local hashname = hash.name
+ local hashtype = hash.type
+ if hashname and hashtype then
+ for found, base in filtered(files[hashname],name) do
+ if type(found) == 'string' then
+ if okay(found,path,base,hashname,hashtype) then
+ break
+ end
+ else
+ for i=1,#found do
+ if okay(found[i],path,base,hashname,hashtype) then
+ break
+ end
+ end
end
end
end
end
else
+ local function okayokay(found,path,base,hashname,hashtype)
+ if find(found,path) then
+ local full = methodhandler('concatinators',hashtype,hashname,found,base)
+ if full and full ~= "" then
+ result[#result+1] = resolveprefix(full)
+ return not allresults
+ end
+ end
+ end
+ --
local hashes = instance.hashes
--- inspect(hashes)
for k=1,#hashes do
- local hash = hashes[k]
- local hashname, hashtype = hash.name, hash.type
- if doit(path,files[hashname][base],base,hashname,hashtype,result,allresults) then done = true end
- if done and not allresults then break end
+ local hash = hashes[k]
+ local hashname = hash.name
+ local hashtype = hash.type
+ if hashname and hashtype then
+ local found, base = lookup(content,base)
+ if not found then
+ -- nothing
+ elseif type(found) == 'string' then
+ if okay(found,path,base,hashname,hashtype) then
+ break
+ end
+ else
+ for i=1,#found do
+ if okay(found[i],path,base,hashname,hashtype) then
+ break
+ end
+ end
+ end
+ end
end
end
-- we can consider also searching the paths not in the database, but then
@@ -1624,7 +1789,7 @@ end
function resolvers.dowithpath(name,func)
local pathlist = resolvers.expandedpathlist(name)
for i=1,#pathlist do
- func("^"..resolvers.cleanpath(pathlist[i]))
+ func("^"..cleanpath(pathlist[i]))
end
end
@@ -1634,23 +1799,23 @@ end
function resolvers.locateformat(name)
local engine = environment.ownmain or "luatex"
- local barename = file.removesuffix(name)
- local fullname = file.addsuffix(barename,"fmt")
+ local barename = removesuffix(name)
+ local fullname = addsuffix(barename,"fmt")
local fmtname = caches.getfirstreadablefile(fullname,"formats",engine) or ""
if fmtname == "" then
fmtname = resolvers.findfile(fullname)
- fmtname = resolvers.cleanpath(fmtname)
+ fmtname = cleanpath(fmtname)
end
if fmtname ~= "" then
- local barename = file.removesuffix(fmtname)
- local luaname = file.addsuffix(barename,luasuffixes.lua)
- local lucname = file.addsuffix(barename,luasuffixes.luc)
- local luiname = file.addsuffix(barename,luasuffixes.lui)
- if lfs.isfile(luiname) then
+ local barename = removesuffix(fmtname)
+ local luaname = addsuffix(barename,luasuffixes.lua)
+ local lucname = addsuffix(barename,luasuffixes.luc)
+ local luiname = addsuffix(barename,luasuffixes.lui)
+ if isfile(luiname) then
return barename, luiname
- elseif lfs.isfile(lucname) then
+ elseif isfile(lucname) then
return barename, lucname
- elseif lfs.isfile(luaname) then
+ elseif isfile(luaname) then
return barename, luaname
end
end
@@ -1669,35 +1834,29 @@ end
function resolvers.dowithfilesintree(pattern,handle,before,after) -- will move, can be a nice iterator instead
local instance = resolvers.instance
- local hashes = instance.hashes
+ local hashes = instance.hashes
for i=1,#hashes do
- local hash = hashes[i]
+ local hash = hashes[i]
local blobtype = hash.type
local blobpath = hash.name
- if blobpath then
+ if blobtype and blobpath then
+ local total = 0
+ local checked = 0
+ local done = 0
if before then
before(blobtype,blobpath,pattern)
end
- local files = instance.files[blobpath]
- local total, checked, done = 0, 0, 0
- if files then
- for k, v in table.sortedhash(files) do -- next, files do, beware: this is not the resolve order
- total = total + 1
- if find(k,"^remap:") then
- -- forget about these
- elseif find(k,pattern) then
- if type(v) == "string" then
- checked = checked + 1
- if handle(blobtype,blobpath,v,k) then
- done = done + 1
- end
- else
- checked = checked + #v
- for i=1,#v do
- if handle(blobtype,blobpath,v[i],k) then
- done = done + 1
- end
- end
+ for path, name in filtered(instance.files[blobpath],pattern) do
+ if type(path) == "string" then
+ checked = checked + 1
+ if handle(blobtype,blobpath,path,name) then
+ done = done + 1
+ end
+ else
+ checked = checked + #path
+ for i=1,#path do
+ if handle(blobtype,blobpath,path[i],name) then
+ done = done + 1
end
end
end
@@ -1709,8 +1868,8 @@ function resolvers.dowithfilesintree(pattern,handle,before,after) -- will move,
end
end
-resolvers.obsolete = resolvers.obsolete or { }
-local obsolete = resolvers.obsolete
+local obsolete = resolvers.obsolete or { }
+resolvers.obsolete = obsolete
resolvers.find_file = resolvers.findfile obsolete.find_file = resolvers.findfile
resolvers.find_files = resolvers.findfiles obsolete.find_files = resolvers.findfiles
diff --git a/tex/context/base/data-sch.lua b/tex/context/base/data-sch.lua
index 41b941c5a..d79e0c7ef 100644
--- a/tex/context/base/data-sch.lua
+++ b/tex/context/base/data-sch.lua
@@ -31,8 +31,18 @@ function cleaners.none(specification)
return specification.original
end
-function cleaners.strip(specification)
- return (gsub(specification.original,"[^%a%d%.]+","-")) -- so we keep periods
+-- function cleaners.strip(specification)
+-- -- todo: only keep suffix periods, so after the last
+-- return (gsub(specification.original,"[^%a%d%.]+","-")) -- so we keep periods
+-- end
+
+function cleaners.strip(specification) -- keep suffixes
+ local path, name = file.splitbase(specification.original)
+ if path == "" then
+ return (gsub(name,"[^%a%d%.]+","-"))
+ else
+ return (gsub((gsub(path,"%.","-") .. "-" .. name),"[^%a%d%.]+","-"))
+ end
end
function cleaners.md5(specification)
@@ -54,8 +64,8 @@ end
local cached, loaded, reused, thresholds, handlers = { }, { }, { }, { }, { }
local function runcurl(name,cachename) -- we use sockets instead or the curl library when possible
- local command = "curl --silent --create-dirs --output " .. cachename .. " " .. name
- os.spawn(command)
+ local command = "curl --silent --insecure --create-dirs --output " .. cachename .. " " .. name
+ os.execute(command)
end
local function fetch(specification)
diff --git a/tex/context/base/data-tex.lua b/tex/context/base/data-tex.lua
index f5c986d77..b6b97a0a9 100644
--- a/tex/context/base/data-tex.lua
+++ b/tex/context/base/data-tex.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['data-tex'] = {
license = "see context related readme files"
}
-local char = string.char
+local char, find = string.char, string.find
local insert, remove = table.insert, table.remove
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -15,10 +15,11 @@ local report_tex = logs.reporter("resolvers","tex")
local resolvers = resolvers
-local sequencers = utilities.sequencers
-local methodhandler = resolvers.methodhandler
-local splitlines = string.splitlines
-local utffiletype = utf.filetype
+local sequencers = utilities.sequencers
+local methodhandler = resolvers.methodhandler
+local splitlines = string.splitlines
+local utffiletype = utf.filetype
+local setmetatableindex = table.setmetatableindex
-- local fileprocessor = nil
-- local lineprocessor = nil
@@ -76,13 +77,13 @@ function helpers.textopener(tag,filename,filehandle,coding)
report_tex("%a opener: %a opened using method %a",tag,filename,coding)
end
if coding == "utf-16-be" then
- lines = utf.utf16_to_utf8_be(lines)
+ lines = utf.utf16_to_utf8_be_t(lines)
elseif coding == "utf-16-le" then
- lines = utf.utf16_to_utf8_le(lines)
+ lines = utf.utf16_to_utf8_le_t(lines)
elseif coding == "utf-32-be" then
- lines = utf.utf32_to_utf8_be(lines)
+ lines = utf.utf32_to_utf8_be_t(lines)
elseif coding == "utf-32-le" then
- lines = utf.utf32_to_utf8_le(lines)
+ lines = utf.utf32_to_utf8_le_t(lines)
else -- utf8 or unknown (could be a mkvi file)
local runner = textfileactions.runner
if runner then
@@ -99,10 +100,11 @@ function helpers.textopener(tag,filename,filehandle,coding)
end
logs.show_open(filename)
insert(inputstack,filename)
- return {
+ local currentline, noflines = 0, noflines
+ local t = {
filename = filename,
noflines = noflines,
- currentline = 0,
+ -- currentline = 0,
close = function()
if trace_locating then
report_tex("%a closer: %a closed",tag,filename)
@@ -113,12 +115,12 @@ function helpers.textopener(tag,filename,filehandle,coding)
end,
reader = function(self)
self = self or t
- local currentline, noflines = self.currentline, self.noflines
+ -- local currentline, noflines = self.currentline, self.noflines
if currentline >= noflines then
return nil
else
currentline = currentline + 1
- self.currentline = currentline
+ -- self.currentline = currentline
local content = lines[currentline]
if not content then
return nil
@@ -137,6 +139,14 @@ function helpers.textopener(tag,filename,filehandle,coding)
end
end
}
+ setmetatableindex(t,function(t,k)
+ if k == "currentline" then
+ return currentline
+ else
+ -- no such key
+ end
+ end)
+ return t
end
function resolvers.findtexfile(filename,filetype)
diff --git a/tex/context/base/data-tmf.lua b/tex/context/base/data-tmf.lua
index c52225193..e0ccac257 100644
--- a/tex/context/base/data-tmf.lua
+++ b/tex/context/base/data-tmf.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['data-tmf'] = {
license = "see context related readme files"
}
-local resolvers = resolvers
+local resolvers = resolvers
local report_tds = logs.reporter("resolvers","tds")
diff --git a/tex/context/base/data-tmp.lua b/tex/context/base/data-tmp.lua
index 3e109dcfe..9e1515a26 100644
--- a/tex/context/base/data-tmp.lua
+++ b/tex/context/base/data-tmp.lua
@@ -23,7 +23,8 @@ luatools with a recache feature.</p>
--ldx]]--
local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
-local concat, serialize, serializetofile = table.concat, table.serialize, table.tofile
+----- serialize, serializetofile = table.serialize, table.tofile -- overloaded so no local
+local concat = table.concat
local mkdirs, isdir, isfile = dir.mkdirs, lfs.isdir, lfs.isfile
local addsuffix, is_writable, is_readable = file.addsuffix, file.is_writable, file.is_readable
local formatters = string.formatters
@@ -35,6 +36,7 @@ local report_caches = logs.reporter("resolvers","caches")
local report_resolvers = logs.reporter("resolvers","caching")
local resolvers = resolvers
+local cleanpath = resolvers.cleanpath
-- intermezzo
@@ -72,7 +74,7 @@ local writable, readables, usedreadables = nil, { }, { }
local function identify()
-- Combining the loops makes it messy. First we check the format cache path
-- and when the last component is not present we try to create it.
- local texmfcaches = resolvers.cleanpathlist("TEXMFCACHE")
+ local texmfcaches = resolvers.cleanpathlist("TEXMFCACHE") -- forward ref
if texmfcaches then
for k=1,#texmfcaches do
local cachepath = texmfcaches[k]
@@ -348,15 +350,11 @@ local saveoptions = { compact = true }
function caches.savedata(filepath,filename,data,raw)
local tmaname, tmcname = caches.setluanames(filepath,filename)
- local reduce, simplify = true, true
- if raw then
- reduce, simplify = false, false
- end
data.cache_uuid = os.uuid()
if caches.direct then
- file.savedata(tmaname,serialize(data,true,saveoptions))
+ file.savedata(tmaname,table.serialize(data,true,saveoptions))
else
- serializetofile(tmaname,data,true,saveoptions)
+ table.tofile(tmaname,data,true,saveoptions)
end
utilities.lua.compile(tmaname,tmcname)
end
@@ -369,10 +367,12 @@ function caches.contentstate()
return content_state or { }
end
-function caches.loadcontent(cachename,dataname)
- local name = caches.hashed(cachename)
- local full, path = caches.getfirstreadablefile(addsuffix(name,luasuffixes.lua),"trees")
- local filename = file.join(path,name)
+function caches.loadcontent(cachename,dataname,filename)
+ if not filename then
+ local name = caches.hashed(cachename)
+ local full, path = caches.getfirstreadablefile(addsuffix(name,luasuffixes.lua),"trees")
+ filename = file.join(path,name)
+ end
local blob = loadfile(addsuffix(filename,luasuffixes.luc)) or loadfile(addsuffix(filename,luasuffixes.lua))
if blob then
local data = blob()
@@ -406,10 +406,12 @@ function caches.collapsecontent(content)
end
end
-function caches.savecontent(cachename,dataname,content)
- local name = caches.hashed(cachename)
- local full, path = caches.setfirstwritablefile(addsuffix(name,luasuffixes.lua),"trees")
- local filename = file.join(path,name) -- is full
+function caches.savecontent(cachename,dataname,content,filename)
+ if not filename then
+ local name = caches.hashed(cachename)
+ local full, path = caches.setfirstwritablefile(addsuffix(name,luasuffixes.lua),"trees")
+ filename = file.join(path,name) -- is full
+ end
local luaname = addsuffix(filename,luasuffixes.lua)
local lucname = addsuffix(filename,luasuffixes.luc)
if trace_locating then
@@ -424,7 +426,7 @@ function caches.savecontent(cachename,dataname,content)
content = content,
uuid = os.uuid(),
}
- local ok = io.savedata(luaname,serialize(data,true))
+ local ok = io.savedata(luaname,table.serialize(data,true))
if ok then
if trace_locating then
report_resolvers("category %a, cachename %a saved in %a",dataname,cachename,luaname)
diff --git a/tex/context/base/data-tre.lua b/tex/context/base/data-tre.lua
index 0a8b00d9b..4388731f9 100644
--- a/tex/context/base/data-tre.lua
+++ b/tex/context/base/data-tre.lua
@@ -6,50 +6,89 @@ if not modules then modules = { } end modules ['data-tre'] = {
license = "see context related readme files"
}
--- \input tree://oeps1/**/oeps.tex
+-- tree://e:/temporary/mb-mp/**/drawing.jpg
+-- tree://e:/temporary/mb-mp/**/Drawing.jpg
+-- tree://t:./**/tufte.tex
+-- tree://t:/./**/tufte.tex
+-- tree://t:/**/tufte.tex
+-- dirlist://e:/temporary/mb-mp/**/drawing.jpg
+-- dirlist://e:/temporary/mb-mp/**/Drawing.jpg
+-- dirlist://e:/temporary/mb-mp/**/just/some/place/drawing.jpg
+-- dirlist://e:/temporary/mb-mp/**/images/drawing.jpg
+-- dirlist://e:/temporary/mb-mp/**/images/drawing.jpg?option=fileonly
+-- dirlist://///storage-2/resources/mb-mp/**/drawing.jpg
+-- dirlist://e:/**/drawing.jpg
-local find, gsub, format = string.find, string.gsub, string.format
+local find, gsub, lower = string.find, string.gsub, string.lower
+local basename, dirname, joinname = file.basename, file.dirname, file .join
+local globdir, isdir, isfile = dir.glob, lfs.isdir, lfs.isfile
+local P, lpegmatch = lpeg.P, lpeg.match
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-local report_trees = logs.reporter("resolvers","trees")
+local report_trees = logs.reporter("resolvers","trees")
-local resolvers = resolvers
+local resolvers = resolvers
+local resolveprefix = resolvers.resolve
+local notfound = resolvers.finders.notfound
+local lookup = resolvers.get_from_content
-local done, found, notfound = { }, { }, resolvers.finders.notfound
+-- A tree search is rather dumb ... there is some basic caching of searched trees
+-- but nothing is cached over runs ... it's also a wildcard one so we cannot use
+-- the normal scanner.
-function resolvers.finders.tree(specification)
+local collectors = { }
+local found = { }
+
+function resolvers.finders.tree(specification) -- to be adapted to new formats
local spec = specification.filename
- local fnd = found[spec]
- if fnd == nil then
+ local okay = found[spec]
+ if okay == nil then
if spec ~= "" then
- local path, name = file.dirname(spec), file.basename(spec)
- if path == "" then path = "." end
- local hash = done[path]
- if not hash then
- local pattern = path .. "/*" -- we will use the proper splitter
- hash = dir.glob(pattern)
- done[path] = hash
+ local path = dirname(spec)
+ local name = basename(spec)
+ if path == "" then
+ path = "."
+ end
+ local names = collectors[path]
+ if not names then
+ local pattern = find(path,"/%*+$") and path or (path .. "/*")
+ names = globdir(pattern)
+ collectors[path] = names
end
local pattern = "/" .. gsub(name,"([%.%-%+])", "%%%1") .. "$"
- for k=1,#hash do
- local v = hash[k]
- if find(v,pattern) then
- found[spec] = v
- return v
+ for i=1,#names do
+ local fullname = names[i]
+ if find(fullname,pattern) then
+ found[spec] = fullname
+ return fullname
+ end
+ end
+ -- let's be nice:
+ local pattern = lower(pattern)
+ for i=1,#names do
+ local fullname = lower(names[i])
+ if find(fullname,pattern) then
+ if isfile(fullname) then
+ found[spec] = fullname
+ return fullname
+ else
+ -- no os name mapping
+ break
+ end
end
end
end
- fnd = notfound() -- false
- found[spec] = fnd
+ okay = notfound() -- false
+ found[spec] = okay
end
- return fnd
+ return okay
end
function resolvers.locators.tree(specification)
local name = specification.filename
- local realname = resolvers.resolve(name) -- no shortcut
- if realname and realname ~= '' and lfs.isdir(realname) then
+ local realname = resolveprefix(name) -- no shortcut
+ if realname and realname ~= '' and isdir(realname) then
if trace_locating then
report_trees("locator %a found",realname)
end
@@ -61,15 +100,134 @@ end
function resolvers.hashers.tree(specification)
local name = specification.filename
- if trace_locating then
- report_trees("analysing %a",name)
- end
+ -- if trace_locating then
+ report_trees("analyzing %a",name)
+ -- end
resolvers.methodhandler("hashers",name)
resolvers.generators.file(specification)
end
-resolvers.concatinators.tree = resolvers.concatinators.file
-resolvers.generators.tree = resolvers.generators.file
-resolvers.openers.tree = resolvers.openers.file
-resolvers.loaders.tree = resolvers.loaders.file
+-- This is a variation on tree lookups but this time we do cache in the given
+-- root. We use a similar hasher as the resolvers because we have to deal with
+-- for instance trees with 50K xml files plus a similar amount of resources to
+-- deal and we don't want too much overhead.
+
+local collectors = { }
+local splitter = lpeg.splitat("/**/")
+local stripper = lpeg.replacer { [P("/") * P("*")^1 * P(-1)] = "" }
+
+table.setmetatableindex(collectors, function(t,k)
+ local rootname = lpegmatch(stripper,k)
+ local dataname = joinname(rootname,"dirlist")
+ local content = caches.loadcontent(dataname,"files",dataname)
+ if not content then
+ -- path branch usecache onlyonce tolerant
+ content = resolvers.scanfiles(rootname,nil,nil,false,true) -- so we accept crap
+ caches.savecontent(dataname,"files",content,dataname)
+ end
+ t[k] = content
+ return content
+end)
+
+
+local function checked(root,p,n)
+ if p then
+ if type(p) == "table" then
+ for i=1,#p do
+ local fullname = joinname(root,p[i],n)
+ if isfile(fullname) then -- safeguard
+ return fullname
+ end
+ end
+ else
+ local fullname = joinname(root,p,n)
+ if isfile(fullname) then -- safeguard
+ return fullname
+ end
+ end
+ end
+ return notfound()
+end
+
+-- no funny characters in path but in filename permitted .. sigh
+
+local function resolve(specification) -- can be called directly too
+ local filename = specification.filename
+ -- inspect(specification)
+ if filename ~= "" then
+ local root, rest = lpegmatch(splitter,filename)
+ if root and rest then
+ local path, name = dirname(rest), basename(rest)
+ if name ~= rest then
+ local content = collectors[root]
+ local p, n = lookup(content,name)
+ if not p then
+ return notfound()
+ end
+ local pattern = ".*/" .. path .. "$"
+ local istable = type(p) == "table"
+ if istable then
+ for i=1,#p do
+ local pi = p[i]
+ if pi == path or find(pi,pattern) then
+ local fullname = joinname(root,pi,n)
+ if isfile(fullname) then -- safeguard
+ return fullname
+ end
+ end
+ end
+ elseif p == path or find(p,pattern) then
+ local fullname = joinname(root,p,n)
+ if isfile(fullname) then -- safeguard
+ return fullname
+ end
+ end
+ local queries = specification.queries
+ if queries and queries.option == "fileonly" then
+ return checked(root,p,n)
+ else
+ return notfound()
+ end
+ end
+ end
+ local path, name = dirname(filename), basename(filename)
+ local root = lpegmatch(stripper,path)
+ local content = collectors[path]
+ local p, n = lookup(content,name)
+ if p then
+ return checked(root,p,n)
+ end
+ end
+ return notfound()
+end
+
+resolvers.finders .dirlist = resolve
+resolvers.locators .dirlist = resolvers.locators .tree
+resolvers.hashers .dirlist = resolvers.hashers .tree
+resolvers.generators.dirlist = resolvers.generators.file
+resolvers.openers .dirlist = resolvers.openers .file
+resolvers.loaders .dirlist = resolvers.loaders .file
+
+function resolvers.finders.dirfile(specification)
+ local queries = specification.queries
+ if queries then
+ queries.option = "fileonly"
+ else
+ specification.queries = { option = "fileonly" }
+ end
+ return resolve(specification)
+end
+
+resolvers.locators .dirfile = resolvers.locators .dirlist
+resolvers.hashers .dirfile = resolvers.hashers .dirlist
+resolvers.generators.dirfile = resolvers.generators.dirlist
+resolvers.openers .dirfile = resolvers.openers .dirlist
+resolvers.loaders .dirfile = resolvers.loaders .dirlist
+
+-- local locate = collectors[ [[E:\temporary\mb-mp]] ]
+-- local locate = collectors( [[\\storage-2\resources\mb-mp]] )
+
+-- print(resolvers.findtexfile("tree://e:/temporary/mb-mp/**/VB_wmf_03_vw_01d_ant.jpg"))
+-- print(resolvers.findtexfile("tree://t:/**/tufte.tex"))
+-- print(resolvers.findtexfile("dirlist://e:/temporary/mb-mp/**/VB_wmf_03_vw_01d_ant.jpg"))
diff --git a/tex/context/base/data-use.lua b/tex/context/base/data-use.lua
index 9c15263bb..7598506e4 100644
--- a/tex/context/base/data-use.lua
+++ b/tex/context/base/data-use.lua
@@ -57,7 +57,7 @@ statistics.register("used cache path", function() return caches.usedpaths() end
-- experiment (code will move)
function statistics.savefmtstatus(texname,formatbanner,sourcefile) -- texname == formatname
- local enginebanner = status.list().banner
+ local enginebanner = status.banner
if formatbanner and enginebanner and sourcefile then
local luvname = file.replacesuffix(texname,"luv") -- utilities.lua.suffixes.luv
local luvdata = {
@@ -75,7 +75,7 @@ end
-- a remake
function statistics.checkfmtstatus(texname)
- local enginebanner = status.list().banner
+ local enginebanner = status.banner
if enginebanner and texname then
local luvname = file.replacesuffix(texname,"luv") -- utilities.lua.suffixes.luv
if lfs.isfile(luvname) then
diff --git a/tex/context/base/data-vir.lua b/tex/context/base/data-vir.lua
index 48fec54e0..7e25c822d 100644
--- a/tex/context/base/data-vir.lua
+++ b/tex/context/base/data-vir.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['data-vir'] = {
license = "see context related readme files"
}
-local format, validstrings = string.format, string.valid
+local formatters, validstrings = string.formatters, string.valid
local trace_virtual = false
local report_virtual = logs.reporter("resolvers","virtual")
@@ -18,14 +18,14 @@ local resolvers = resolvers
local finders, openers, loaders, savers = resolvers.finders, resolvers.openers, resolvers.loaders, resolvers.savers
-local data = { }
-local n = 0 -- hm, number can be query
-local template = "virtual://%s.%s"
+local data = { }
+local n = 0 -- hm, number can be query
+local f_virtual = formatters["virtual://%s.%s"]
function savers.virtual(specification,content)
n = n + 1 -- one number for all namespaces
local path = specification.path
- local filename = format(template,path ~= "" and path or "virtualfile",n)
+ local filename = f_virtual(path ~= "" and path or "virtualfile",n)
if trace_virtual then
report_virtual("saver: file %a saved",filename)
end
diff --git a/tex/context/base/data-zip.lua b/tex/context/base/data-zip.lua
index 5db69670c..2be88e0fc 100644
--- a/tex/context/base/data-zip.lua
+++ b/tex/context/base/data-zip.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['data-zip'] = {
license = "see context related readme files"
}
--- partly redone .. needs testing
+-- real old code ... partly redone .. needs testing due to changes as well as a decent overhaul
local format, find, match = string.format, string.find, string.match
@@ -37,18 +37,6 @@ local archives = zip.archives
zip.registeredfiles = zip.registeredfiles or { }
local registeredfiles = zip.registeredfiles
-local limited = false
-
-directives.register("system.inputmode", function(v)
- if not limited then
- local i_limiter = io.i_limiter(v)
- if i_limiter then
- zip.open = i_limiter.protect(zip.open)
- limited = true
- end
- end
-end)
-
local function validzip(str) -- todo: use url splitter
if not find(str,"^zip://") then
return "zip:///" .. str
@@ -64,7 +52,7 @@ function zip.openarchive(name)
local arch = archives[name]
if not arch then
local full = resolvers.findfile(name) or ""
- arch = (full ~= "" and zip.open(full)) or false
+ arch = full ~= "" and zip.open(full) or false
archives[name] = arch
end
return arch
@@ -235,30 +223,42 @@ function resolvers.usezipfile(archive)
end
function resolvers.registerzipfile(z,tree)
- local files, filter = { }, ""
- if tree == "" then
- filter = "^(.+)/(.-)$"
- else
- filter = format("^%s/(.+)/(.-)$",tree)
- end
+ local names = { }
+ local files = { } -- somewhat overkill .. todo
+ local remap = { } -- somewhat overkill .. todo
+ local n = 0
+ local filter = tree == "" and "^(.+)/(.-)$" or format("^%s/(.+)/(.-)$",tree)
+ local register = resolvers.registerfile
if trace_locating then
report_zip("registering: using filter %a",filter)
end
- local register, n = resolvers.registerfile, 0
for i in z:files() do
- local path, name = match(i.filename,filter)
- if path then
- if name and name ~= '' then
- register(files, name, path)
- n = n + 1
- else
- -- directory
+ local filename = i.filename
+ local path, name = match(filename,filter)
+ if not path then
+ n = n + 1
+ register(names,filename,"")
+ local usedname = lower(filename)
+ files[usedname] = ""
+ if usedname ~= filename then
+ remap[usedname] = filename
end
- else
- register(files, i.filename, '')
+ elseif name and name ~= "" then
n = n + 1
+ register(names,name,path)
+ local usedname = lower(name)
+ files[usedname] = path
+ if usedname ~= name then
+ remap[usedname] = name
+ end
+ else
+ -- directory
end
end
report_zip("registering: %s files registered",n)
- return files
+ return {
+ -- metadata = { },
+ files = files,
+ remap = remap,
+ }
end
diff --git a/tex/context/base/enco-ini.mkiv b/tex/context/base/enco-ini.mkiv
index 77fcbe483..ef0ebef4e 100644
--- a/tex/context/base/enco-ini.mkiv
+++ b/tex/context/base/enco-ini.mkiv
@@ -55,6 +55,8 @@
\unexpanded\def\enco_define_accent#1#2#3% no spaces, used low level
{\setvalue{\??accents\string#1\string#2\empty}{#3}}
+% the following dirty trick is needed to catch \asciimath{\^{1/5}log}:
+
\def\enco_handle_accent#1#2% expandable because we want them in the tuc file
{\csname\??accents
\ifcsname\??accents\string#1#2\empty\endcsname
@@ -62,16 +64,22 @@
\else\ifcsname\??accents\string#1\string#2\empty\endcsname
\string#1\string#2\empty
\else
- \empty
+ \endcsname#2\csname\??accents % very dirty trick: ignore accent but keep char
\fi\fi
\endcsname}
+\letvalue{\??accents}\empty
+
+\unexpanded\def\enco_define_command#1#2%
+ {\setuvalue{\string#1}{#2}}
+
\let\dohandleaccent \enco_handle_accent % maybe useful
\let\dodefineaccent \enco_define_accent % used at the lua end
\let\dodefineaccentcommand\enco_define_accent_command % used at the lua end
+\let\dodefinecommand \enco_define_command % used at the lua end
\unexpanded\def\definecharacter#1 #2 %
- {\doifnumberelse{\string#2}
+ {\doifelsenumber{\string#2}
{\setevalue{\string#1}{\utfchar{#2}}} % or {\expandafter\chardef\csname#1\endcsname#2\relax}
{\setuvalue{\string#1}{#2}}}
@@ -87,17 +95,26 @@
%D Accent handling (try to avoid this):
+% \buildtextaccent\greekdasia\greekalphamacron
+% \buildtextaccent\textacute q
+
\newbox\b_enco_accent
\def\buildmathaccent#1%
{\mathaccent#1 }
+% \unexpanded\def\buildtextaccent#1#2% we could do all at the lua end
+% {\begingroup % but that's no fun (yet)
+% \setbox\b_enco_accent\hbox{#1}%
+% \scratchcounter\cldcontext{nodes.firstcharinbox(\number\b_enco_accent)}\relax
+% \ifcase\scratchcounter\else\accent\scratchcounter\fi
+% \relax#2%
+% \endgroup}
+
\unexpanded\def\buildtextaccent#1#2% we could do all at the lua end
{\begingroup % but that's no fun (yet)
\setbox\b_enco_accent\hbox{#1}%
- \scratchcounter\cldcontext{nodes.firstcharinbox(\number\b_enco_accent)}\relax
- \ifcase\scratchcounter\else\accent\scratchcounter\fi
- \relax#2%
+ \clf_buildtextaccent\b_enco_accent#2%
\endgroup}
\unexpanded\def\bottomaccent#1#2#3#4#5% down right slantcorrection accent char
@@ -199,11 +216,42 @@
\def\pound {£} \def\sterling{£}
\def\promille {‰} \def\permille{‰}
-% tex specific (obsolete)
+% tex specific
+
+\ifdefined\textpercent
+ \let\percent \textpercent
+ \let\procent \textpercent
+ \let\ampersand \textampersand
+ \let\dollar \textdollar
+ \let\hash \texthash
+\else
+ \def\percent {\textpercent}
+ \def\procent {\textpercent}
+ \def\ampersand {\textampersand}
+ \def\dollar {\textdollar}
+ \def\hash {\texthash}
+\fi
+
+% from enco-mis:
-\def\procent {\percent}
-\def\dollar {\textdollar}
-\def\permine {\fakepermine}
+\unexpanded\def\fakepercent
+ {\mathematics{\normalsuperscript{\scriptscriptstyle0}\kern-.25\emwidth/\kern-.2\emwidth\normalsubscript{\scriptscriptstyle0}}}
+
+\unexpanded\def\fakeperthousand
+ {\mathematics{\normalsuperscript{\scriptscriptstyle0}\kern-.25\emwidth/\kern-.2\emwidth\normalsubscript{\scriptscriptstyle00}}}
+
+\unexpanded\def\fakepermine
+ {\dontleavehmode
+ \bgroup
+ \setbox\scratchbox\hbox
+ {\mathematics{+}}%
+ \hbox to \wd\scratchbox
+ {\hss
+ \mathematics{\normalsuperscript{\scriptscriptstyle-}\kern-.4\emwidth/\kern-.3\emwidth\normalsubscript{\scriptscriptstyle-}}%
+ \hss}%
+ \egroup}
+
+\def\permine{\fakepermine}
% some more
@@ -241,9 +289,27 @@
\chardef\textcontrolspace"2423
-\unexpanded\def\fallbackcontrolspace
- %{\getglyph{ComputerModernMono}\textcontrolspace}
- {\getglyph{LMTypewriter-Regular}\textcontrolspace}
+\installcorenamespace{controlspace}
+
+% \unexpanded\def\fallbackcontrolspace % beware: non-matching widths
+% {\hbox to \interwordspace{\hss\getglyph{LMTypewriter-Regular}\textcontrolspace\hss}%
+
+\unexpanded\def\fallbackcontrolspace % beware, current font, we also need to honor color
+ {\hbox to \interwordspace \bgroup
+ \hss
+ \ifcsname\??controlspace\number\interwordspace\endcsname
+ \csname\??controlspace\number\interwordspace\endcsname
+ \else
+ \enco_fast_control_space_define % only regular
+ \fi
+ \textcontrolspace
+ \hss
+ \egroup}
+
+\unexpanded\def\enco_fast_control_space_define
+ {\scratchdimen\interwordspace
+ \definedfont[LMTypewriter-Regular at \the\dimexpr\currentfontbodyscale\dimexpr\fontbody]% see font-sym.mkiv
+ \expandafter\glet\csname\??controlspace\number\scratchdimen\endcsname\lastrawfontcall}
\unexpanded\def\normalcontrolspace
{\iffontchar\font\textcontrolspace
@@ -254,29 +320,35 @@
\let\textvisiblespace\normalcontrolspace
-\unexpanded\def\fastcontrolspace % no glyph resolving after first (use grouped)
- {\enco_fast_control_space}
-
-\def\enco_fast_control_space
+\unexpanded\def\optionalcontrolspace
{\iffontchar\font\textcontrolspace
- \enco_fast_control_space_nop
+ \textcontrolspace
\else
- \enco_fast_control_space_yes
- \fi
- \enco_fast_control_space}
-
-\newbox\b_enco_control_space
-
-\def\enco_fast_control_space_nop
- {\let\enco_fast_control_space\textcontrolspace}
-
-\def\enco_fast_control_space_yes
- {\setbox\b_enco_control_space\hbox{\space}%
- \setbox\b_enco_control_space\hbox to \wd\b_enco_control_space{\hss\fallbackcontrolspace\hss}%
- \let\enco_fast_control_space\flushcontrolspacebox}
+ \asciispacechar % used for export !
+ \fi}
-\def\flushcontrolspacebox
- {\copy\b_enco_control_space}
+% \unexpanded\def\fastcontrolspace % no glyph resolving after first (use grouped)
+% {\enco_fast_control_space}
+%
+% \def\enco_fast_control_space
+% {\iffontchar\font\textcontrolspace
+% \enco_fast_control_space_nop
+% \else
+% \enco_fast_control_space_yes
+% \fi
+% \enco_fast_control_space}
+%
+% \newbox\b_enco_control_space
+%
+% \def\enco_fast_control_space_nop
+% {\let\enco_fast_control_space\textcontrolspace}
+%
+% \def\enco_fast_control_space_yes
+% {\setbox\b_enco_control_space\fallbackcontrolspace
+% \let\enco_fast_control_space\flushcontrolspacebox}
+%
+% \def\flushcontrolspacebox
+% {\copy\b_enco_control_space}
% a few defaults (\<whatever>{}), we really need the verbose \empty as it will be
% stringified .. anyhow, we define this at the lua end now but keep it here as a
@@ -296,26 +368,7 @@
% \defineaccent u {\empty} {\textbreve}
% \defineaccent v {\empty} {\textcaron}
-\ctxlua{characters.tex.defineaccents()}
-
-% from enco-mis:
-
-\unexpanded\def\fakepercent
- {\mathematics{\normalsuperscript{\scriptscriptstyle0}\kern-.25\emwidth/\kern-.2\emwidth\normalsubscript{\scriptscriptstyle0}}}
-
-\unexpanded\def\fakeperthousand
- {\mathematics{\normalsuperscript{\scriptscriptstyle0}\kern-.25\emwidth/\kern-.2\emwidth\normalsubscript{\scriptscriptstyle00}}}
-
-\unexpanded\def\fakepermine
- {\dontleavehmode
- \bgroup
- \setbox\scratchbox\hbox
- {\mathematics{+}}%
- \hbox to \wd\scratchbox
- {\hss
- \mathematics{\normalsuperscript{\scriptscriptstyle-}\kern-.4\emwidth/\kern-.3\emwidth\normalsubscript{\scriptscriptstyle-}}%
- \hss}%
- \egroup}
+\clf_defineaccents % one time
%D A smaller and bolder variant, more like the math and monospaced ones.
@@ -356,37 +409,105 @@
\unexpanded\def\inch {\mathematics{\prime\prime}} % was: \hbox{\rm\char125\relax}
\unexpanded\def\fraction#1#2{\mathematics{#1\over#2}}
-\def\periodswidth {.5em}
-\def\periodsdefault{3} % was 5, but now it's like \unknown
-
-\unexpanded\def\periods
- {\dosingleempty\enco_periods}
-
+% \def\periodswidth {.5em}
+% \def\periodsdefault{3} % was 5, but now it's like \unknown
+%
+% \unexpanded\def\periods
+% {\dosingleempty\enco_periods}
+%
% \def\doperiods[#1]% todo: also n=,width= or maybe just #1,#2
% {\dontleavehmode
% \begingroup
% \scratchdimen\periodswidth
% \hbox to \iffirstargument#1\else\periodsdefault\fi \scratchdimen
-% {\leaders\hbox to \scratchdimen{\hss.\hss}\hss}%
+% {\leaders\hbox to \scratchdimen{\hss\periodsymbol\hss}\hss}%
% \endgroup}
%
% better for export:
+%
+% \unexpanded\def\enco_periods[#1]% todo: also n=,width= or maybe just #1,#2
+% {\dontleavehmode
+% \hbox\bgroup
+% \setbox\scratchbox\hbox to \periodswidth{\hss\periodsymbol\hss}%
+% \dorecurse{\iffirstargument#1\else\periodsdefault\fi}{\copy\scratchbox}%
+% \egroup}
+%
+% \unexpanded\def\unknown
+% {\periods\relax} % relax prevents lookahead for []
+%
+% per request:
+
+%D \startbuffer
+%D \startlines
+%D x\periods x
+%D x\periods[10]x
+%D x\periods[n=10,symbol={,}]x
+%D x\periods[n=4,symbol={!!},width=1em]x
+%D x\periods[n=4,symbol={!!},width=fit]x
+%D x\periods[n=4,symbol={!!},width=fit,distance=1em]x
+%D x\unknown x
+%D \stoplines
+%D \stopbuffer
+%D
+%D \typbuffer \getbuffer
+
+\def\periodswidth {.5\emwidth} % downward compatible
+\def\periodsdefault{3} % downward compatible
+
+\installcorenamespace {periods}
+
+\installsetuponlycommandhandler \??periods {periods}
+
+\setupperiods
+ [\c!n=\periodsdefault,
+ \c!width=\periodswidth, % can also be \v!fit
+ \c!distance=.25\emwidth,
+ \c!symbol=.]
-\unexpanded\def\enco_periods[#1]% todo: also n=,width= or maybe just #1,#2
+\unexpanded\def\periods
{\dontleavehmode
\hbox\bgroup
- \setbox\scratchbox\hbox to \periodswidth{\hss.\hss}%
- \dorecurse{\iffirstargument#1\else\periodsdefault\fi}{\copy\scratchbox}%
+ \doifelsenextoptional\enco_periods_yes\enco_periods_nop}
+
+\unexpanded\def\enco_periods_yes[#1]%
+ {\doifelseassignment{#1}
+ {\setupcurrentperiods[#1]%
+ \scratchcounter\periodsparameter\c!n}
+ {\doifelsenothing{#1}
+ {\scratchcounter\periodsparameter\c!n}%
+ {\scratchcounter#1}}%
+ \enco_periods_finish}
+
+\unexpanded\def\enco_periods_nop
+ {\scratchcounter\periodsparameter\c!n
+ \enco_periods_finish}
+
+\unexpanded\def\enco_periods_finish
+ {\edef\p_width{\periodsparameter\c!width}%
+ \ifx\p_width\v!fit
+ \enco_periods_finish_fit
+ \else
+ \enco_periods_finish_width
+ \fi
\egroup}
+\unexpanded\def\enco_periods_finish_width
+ {\setbox\scratchbox\hbox to \p_width
+ {\hss\periodsparameter\c!symbol\hss}%
+ \dorecurse\scratchcounter{\copy\scratchbox}}
+
+\unexpanded\def\enco_periods_finish_fit
+ {\edef\p_symbol{\periodsparameter\c!symbol}%
+ \scratchdistance\periodsparameter\c!distance
+ \hskip\scratchdistance
+ \dorecurse\scratchcounter{\p_symbol\hskip\scratchdistance}}
+
\unexpanded\def\unknown
- {\periods\relax} % relax prevents lookahead for []
+ {\dontleavehmode
+ \hbox\bgroup
+ \enco_periods_nop}
-% Example by Wolfgang Schuster on the context list:
-%
-% \unexpanded\def\fourdots{{\def\periodswidth{.3em}\periods[4]}}
-%
-% Hello\fourdots\ World\fourdots \par Hello\fourdots\ World.
+%D Left-overs:
\appendtoks
\let\buildtextaccent\secondoftwoarguments
diff --git a/tex/context/base/export-example.css b/tex/context/base/export-example.css
index 06d51c587..f44755892 100644
--- a/tex/context/base/export-example.css
+++ b/tex/context/base/export-example.css
@@ -7,65 +7,100 @@
*/
+/*
+
+ Because empty div elements are not seen we put a comment in them so that empty
+ table cells etc work ok.
+
+ We can style individual elements in several ways and we support in this css file
+ meaningful elements as well as divs. If needed you can use the chain field as well
+ as chain elements in the div class for styling groups.
+
+ \definefloat[myfloata]
+ \definefloat[myfloatb][myfloatbs][figure]
+
+ context|div.float.myfloata { } float[detail='myfloata'] { }
+ context|div.float.myfloatb { } float[detail='myfloatb'] { }
+ context|div.float.figure { } float[detail='figure'] { }
+ context|div.float.figure.myfloatb { } float[chain~='figure'][detail='myfloata'] { }
+ context|div.myfloata { } *[detail='myfloata'] { }
+ context|div.myfloatb { } *[detail='myfloatb'] { }
+ context|div.figure { } *[chain~='figure'] { }
+ context|div.figure.myfloatb { } *[chain~='figure'][detail='myfloatb'] { }
+
+*/
+
+@namespace context url('http://www.pragma-ade.com/context/export') ;
+
/* ignore : mixed */
/* metadata: display */
-ignore {
+ignore, context|div.ignore {
+ display : none ;
+}
+
+ignore, context|div.private {
display : none ;
}
-xmetadata {
+xmetadata, context|div.xmetadata {
display : none ;
}
-xmetavariable {
+xmetavariable, context|div.xmetavariable {
display : none ;
}
/* document : display */
-document:before {
+document:before, context|div.document:before {
content : attr(title) ;
font-size : 44pt ;
font-weight : bold ;
margin-bottom : 1em ;
}
-document {
+document, context|div.document {
font-family : "DejaVu Serif", "Lucida Bright", serif ;
font-size : 12pt ;
+ line-height : 14.4pt;
max-width : 50em ;
padding : 1em ;
/* text-align : justify ; */
+ text-align : justify ;
/* hyphens : manual ; */
/* text-justify : inter-word ; */
}
-document>metadata {
- font-family : "Lucida Console", "DejaVu Sans Mono", monospace ;
+document>metadata, context|div.document context|div.metadata {
+ font-family : "DejaVu Sans Mono", "Lucida Console", monospace ;
margin-bottom : 2em ;
}
-document>metadata>metavariable[name="title"]:before {
+document>metadata>metavariable[name="title"]:before,
+ context|div.document context|div.metadata context|div.metavariable.name-title:before {
content : "title\00A0\00A0\00A0:\00A0" ;
}
-document>metadata>metavariable[name="author"]:before {
- content : "author\00A0\00A0:\00A0" ;
+document>metadata>metavariable[name="author"]:before,
+ context|div.document context|div.metadata context|div.metavariable.name-author:before {
+ content : "author\00A0\00A0:\00A0" ;
}
-document>metadata>metavariable[name="version"]:before {
+document>metadata>metavariable[name="version"]:before,
+ context|div.document context|div.metadata context|div.metavariable.name-version:before {
content : "version\00A0:\00A0" ;
}
-document>metadata>metavariable[name="title"], document>metadata>metavariable[name="author"], document>metadata>metavariable[name="version"] {
+document>metadata>metavariable[name="title"], document>metadata>metavariable[name="author"], document>metadata>metavariable[name="version"],
+ context|div.document context|div.metadata context|div.metavariable.name-title, context|div.document context|div.metadata context|div.metavariable.name-author, context|div.document context|div.metadata context|div.metavariable.name-version {
display : block ;
}
/* paragraph : mixed */
/* p : mixed */
-paragraph, p {
+paragraph, p, context|div.paragraph, context|div.p {
display : block ;
margin-top : 0.5em ;
margin-bottom : 0.5em ;
@@ -73,7 +108,7 @@ paragraph, p {
/* break : display */
-break {
+break, context|div.break {
display : block ;
margin-bottom : 0.5em ;
}
@@ -81,14 +116,15 @@ break {
/* construct : inline */
/* highlight : inline */
-construct {
+construct, context|div.construct {
}
-construct[detail="important"] {
+construct[detail="important"], context|div.construct.important {
font-weight : bold ;
}
-highlight { /* todo: style and color */
+highlight, context|div.highlight { /* todo: style and color */
+ display : inline ;
}
/* section : display */
@@ -96,76 +132,95 @@ highlight { /* todo: style and color */
/* sectionnumber : mixed */
/* sectioncontent : display */
-section {
+section, context|div.section {
display : block ;
}
-sectioncontent {
+sectioncontent, context|div.sectioncontent {
display : block ;
margin-top : 1em ;
margin-bottom : 1em ;
}
-section[detail="chapter"], section[detail="title"] {
- margin-top : 3em ;
- margin-bottom : 2em ;
+section[detail="chapter"], section[detail="title"],
+ context|div.section.chapter, context|div.section.title {
+ page-break-before : always ;
+ page-break-after : avoid ;
+ margin-top : 3em ;
+ margin-bottom : 2em ;
}
-section[detail="section"], section[detail="subject"] {
- margin-top : 2.5em ;
- margin-bottom : 2.5em ;
+section[detail="section"], section[detail="subject"],
+ context|div.section.section, context|div.section.subject {
+ page-break-after : avoid ;
+ margin-top : 2.5em ;
+ margin-bottom : 2.5em ;
}
-section[detail="subsection"], section[detail="subsubject"] {
- margin-top : 2em ;
- margin-bottom : 2em ;
+section[detail="subsection"], section[detail="subsubject"],
+ context|div.section.subsection, context|div.section.subsubject {
+ page-break-after : avoid ;
+ margin-top : 2em ;
+ margin-bottom : 2em ;
}
-section[detail="subsubsection"], section[detail="subsubsubject"] {
- margin-top : 1em ;
- margin-bottom : 0em ;
+section[detail="subsubsection"], section[detail="subsubsubject"],
+ context|div.section.subsubsection, context|div.section.subsubsubject {
+ page-break-after : avoid ;
+ margin-top : 1em ;
+ margin-bottom : 0em ;
}
-section[detail="summary"], section[detail="subsummary"] {
+section[detail="summary"], section[detail="subsummary"],
+ context|div.section.summary, context|div.section.subsummary {
margin-top : 1em ;
margin-bottom : 1em ;
}
-section[detail="chapter"]>sectionnumber {
+section[detail="chapter"]>sectionnumber,
+ context|div.section.chapter context|div.sectionnumber {
display : inline-block ;
margin-right : 1em ;
font-size : 3em ;
font-weight : bold ;
}
-section[detail="chapter"]>sectiontitle, section[detail="title"]>sectiontitle {
+section[detail="chapter"]>sectiontitle, section[detail="title"]>sectiontitle,
+ context|div.section.chapter context|div.sectiontitle, context|div.section.title context|div.sectiontitle {
+ display : inline-block ;
font-size : 3em ;
font-weight : bold ;
}
-section[detail="section"]>sectiontitle, section[detail="subject"]>sectiontitle {
+section[detail="section"]>sectiontitle, section[detail="subject"]>sectiontitle,
+ context|div.section.section context|div.sectiontitle, context|div.section.subject context|div.sectiontitle {
+ display : inline-block ;
font-size : 2.5em ;
font-weight : bold ;
}
-section[detail="subsection"]>sectiontitle, section[detail="subsubject"]>sectiontitle{
+section[detail="subsection"]>sectiontitle, section[detail="subsubject"]>sectiontitle,
+ context|div.section.subsection context|div.sectiontitle, context|div.section.subsubject context|div.sectiontitle {
+ display : inline-block ;
font-size : 2em ;
font-weight : bold ;
}
-section[detail="subsubsection"]>sectiontitle, section[detail="subsubsubject"]>sectiontitle{
+section[detail="subsubsection"]>sectiontitle, section[detail="subsubsubject"]>sectiontitle,
+ context|div.section.subsubsection context|div.sectiontitle, context|div.section.subsubsubject context|div.sectiontitle {
+ display : inline-block ;
font-size : 1em ;
font-weight : bold ;
}
-section[detail="section"]>sectionnumber {
+section[detail="section"]>sectionnumber, context|div.section.section context|div.sectionnumber {
display : inline-block ;
margin-right : 1em ;
font-size : 2.5em ;
font-weight : bold ;
}
-section[detail="summary"]>sectiontitle {
+section[detail="summary"]>sectiontitle, context|div.section.summary context|div.sectiontitle {
display : block ;
margin-top : 1em ;
margin-bottom : 1em ;
@@ -175,21 +230,14 @@ section[detail="summary"]>sectiontitle {
border-width : .15em;
}
-section[detail="subsection"]>sectionnumber {
- display : inline-block ;
- margin-right : 1em ;
- font-size : 2em ;
- font-weight : bold ;
-}
-
-section[detail="subsection"]>sectionnumber {
+section[detail="subsection"]>sectionnumber, context|div.section.subsection context|div.sectionnumber {
display : inline-block ;
margin-right : 1em ;
font-size : 1em ;
font-weight : bold ;
}
-section[detail="subsummary"]>sectiontitle {
+section[detail="subsummary"]>sectiontitle, context|div.section.subsummary context|div.sectiontitle {
display : block ;
margin-top : 1em ;
margin-bottom : 1em ;
@@ -217,37 +265,37 @@ section[detail="subsummary"]>sectiontitle {
glyphs : 'Α' 'B' 'Γ' 'Δ' 'Ε' 'Ζ' 'Η' 'Θ' 'Ι' 'Κ' 'Λ' 'Μ' 'Ν' 'Ξ' 'Ο' 'Π' 'Ρ' 'Σ' 'Τ' 'Υ' 'Φ' 'Χ' 'Ψ' 'Ω' ;
}
-itemgroup {
+itemgroup, context|div.itemgroup {
display : block ;
margin-bottom : 0.5em ;
margin-top : 0.5em ;
}
-itemgroup[symbol="1"] { list-style-type : disc ; }
-itemgroup[symbol="2"] { list-style-type : square ; }
-itemgroup[symbol="3"] { list-style-type : square ; }
-itemgroup[symbol="4"] { list-style-type : square ; }
-itemgroup[symbol="5"] { list-style-type : circ ; }
-itemgroup[symbol="a"] { list-style-type : lower-alpha ; }
-itemgroup[symbol="A"] { list-style-type : alpha ; }
-itemgroup[symbol="r"] { list-style-type : lower-roman ; }
-itemgroup[symbol="R"] { list-style-type : upper-roman ; }
-itemgroup[symbol="n"] { list-style-type : decimal ; }
-itemgroup[symbol="g"] { list-style-type : lower-greek ; }
-itemgroup[symbol="G"] { list-style-type : upper-greek ; }
-
-item {
+itemgroup[symbol="1"], context|div.itemgroup.symbol-1 { list-style-type : disc ; }
+itemgroup[symbol="2"], context|div.itemgroup.symbol-2 { list-style-type : square ; }
+itemgroup[symbol="3"], context|div.itemgroup.symbol-3 { list-style-type : square ; }
+itemgroup[symbol="4"], context|div.itemgroup.symbol-4 { list-style-type : square ; }
+itemgroup[symbol="5"], context|div.itemgroup.symbol-5 { list-style-type : circ ; }
+itemgroup[symbol="a"], context|div.itemgroup.symbol-a { list-style-type : lower-alpha ; }
+itemgroup[symbol="A"], context|div.itemgroup.symbol-A { list-style-type : alpha ; }
+itemgroup[symbol="r"], context|div.itemgroup.symbol-r { list-style-type : lower-roman ; }
+itemgroup[symbol="R"], context|div.itemgroup.symbol-R { list-style-type : upper-roman ; }
+itemgroup[symbol="n"], context|div.itemgroup.symbol-n { list-style-type : decimal ; }
+itemgroup[symbol="g"], context|div.itemgroup.symbol-g { list-style-type : lower-greek ; }
+itemgroup[symbol="G"], context|div.itemgroup.symbol-G { list-style-type : upper-greek ; }
+
+item, context|div.item {
display : list-item ;
margin-left : 1em ;
margin-bottom : 0.5em ;
margin-top : 0.5em ;
}
-itemtag {
+itemtag, context|div.item {
display: none ;
}
-itemcontent {
+itemcontent, context|div.itemcontent {
}
/* description : display */
@@ -255,13 +303,14 @@ itemcontent {
/* descriptioncontent : mixed */
/* descriptionsymbol : inline */
-description {
+description, context|div.description {
display : block ;
margin-bottom : 1em ;
margin-top : 1em ;
}
-descriptiontag {
+descriptiontag, context|div.descriptiontag {
+ display : inline ;
float : left ;
clear : left ;
margin-right : 1em ;
@@ -269,10 +318,11 @@ descriptiontag {
font-weight : bold ;
}
-descriptioncontent {
+descriptioncontent, context|div.descriptioncontent {
}
-descriptionsymbol {
+descriptionsymbol, context|div.descriptionsymbol {
+ display : inline ;
}
/* verbatimblock : display */
@@ -280,46 +330,47 @@ descriptionsymbol {
/* verbatimline : mixed */
/* verbatim : inline */
-verbatimblock {
+verbatimblock, context|div.verbatimblock {
background-color : rgb(50%,50%,100%) ;
display : block ;
padding : 1em ;
margin-bottom : 1em ;
margin-top : 1em ;
- font-family : "Lucida Console", "DejaVu Sans Mono", monospace ;
+ font-family : "DejaVu Sans Mono", "Lucida Console", monospace ;
}
-verbatimlines+verbatimlines {
+verbatimlines+verbatimlines, context|div.verbatimlines+context|div.verbatimlines {
display : block ;
margin-top : 1em ;
}
-verbatimline {
+verbatimline, context|div.verbatimline {
display : block ;
white-space : pre-wrap ;
}
-verbatim {
+verbatim, context|div.verbatim {
+ display : inline ;
white-space : pre-wrap ;
color : rgb(60%,60%,0%) ;
- font-family : "Lucida Console", "DejaVu Sans Mono", monospace ;
+ font-family : "DejaVu Sans Mono", "Lucida Console", monospace ;
}
/* lines : display */
/* line : mixed */
-lines {
+lines, context|div.lines {
display : block ;
margin-bottom : 1em ;
margin-top : 1em ;
}
-lines+lines {
+lines+lines, context|div.lines+context|div.lines {
display : block ;
margin-top : 1em ;
}
-line {
+line, context|div.line {
display : block ;
white-space : pre-wrap ;
}
@@ -327,7 +378,7 @@ line {
/* synonym : inline */
/* sorting : inline */
-sorting, synonym {
+sorting, synonym, context|div.sorting, context|div.synonym {
display : inline ;
font-variant : small-caps ;
}
@@ -342,41 +393,54 @@ sorting, synonym {
/* registerpage : inline */
/* registerpagerange : mixed */
-register {
+register, context|div.register {
display: none ;
}
+registerlocation, context|div.registerlocation {
+ display: inline ;
+}
+
+registerlocation:after, context|div.registerlocation:after {
+ content : "\25B6\00A0\00A0" ;
+ color : rgb(40%,40%,40%) ;
+ font-size : x-small ;
+ line-height : 0 ;
+ padding-top : 0.5ex ;
+ vertical-align : top ;
+}
+
/* table : display */
/* tablerow : display */
/* tablecell : mixed */
-table {
+table, context|div.table {
display : table ;
}
-tablerow {
+tablerow, context|div.tablerow {
display : table-row ;
}
-tablecell[align="middle"] {
+tablecell[align="middle"], context|div.tablecell.align-middle {
display : table-cell ;
text-align : center ;
padding : .1em ;
}
-tablecell[align="flushleft"] {
+tablecell[align="flushleft"], context|div.tablecell.align-flushleft {
display : table-cell ;
text-align : left ;
padding : .1em ;
}
-tablecell[align="flushright"] {
+tablecell[align="flushright"], context|div.tablecell.align-flushright {
display : table-cell ;
text-align : right ;
padding : .1em ;
}
-tablecell {
+tablecell, context|div.tablecell {
display : table-cell ;
text-align : left ;
padding : .1em ;
@@ -386,36 +450,40 @@ tablecell {
/* tabulaterow : display */
/* tabulatecell : mixed */
-tabulate {
+tabulate, context|div.tabulate {
display : table ;
margin-top : 1em ;
margin-bottom : 1em ;
margin-left : 2.5em ;
}
-tabulaterow {
+floatcontent>tabulate, context|div.floatcontent context|div.tabulate {
+ margin-left : 0em ;
+}
+
+tabulaterow, context|div.tabulaterow {
display : table-row ;
}
-tabulatecell[align="middle"] {
+tabulatecell[align="middle"], context|div.tabulatecell.align-middle {
display : table-cell ;
text-align : center ;
padding-right : 1em ;
}
-tabulatecell[align="flushleft"] {
+tabulatecell[align="flushleft"], context|div.tabulatecell.align-flushleft {
display : table-cell ;
text-align : left ;
padding-right : 1em ;
}
-tabulatecell[align="flushright"] {
+tabulatecell[align="flushright"], context|div.tabulatecell.align-flushright {
display : table-cell ;
text-align : right ;
padding-right : 1em ;
}
-tabulatecell {
+tabulatecell, context|div.tabulatecell {
display : table-cell ;
text-align : left ;
padding-right : 1em ;
@@ -426,23 +494,23 @@ tabulatecell {
/* combinationcontent : mixed */
/* combinationcaption : mixed */
-combination {
+combination, context|div.combination {
display : table ;
margin-top : 0em ;
margin-bottom : 0em ;
}
-combinationpair {
+combinationpair, context|div.combinationpair {
display : table-cell ;
padding-right : 1em ;
}
-combinationcontent {
+combinationcontent, context|div.combinationcontent {
display : table-row ;
text-align : center ;
}
-combinationcaption {
+combinationcaption, context|div.combinationcaption {
display : table-row ;
padding-top : 1ex ;
text-align : center ;
@@ -456,36 +524,39 @@ combinationcaption {
/* listdata : mixed */
/* listpage : mixed */
-list {
+list, context|div.list {
display : block ;
}
-listitem[detail="chapter"] {
+listitem[detail="chapter"], context|div.listitem.chapter {
display : block ;
margin-top : 1em ;
font-weight : bold ;
}
-listitem[detail="section"] {
+listitem[detail="section"], context|div.listitem.section {
display : block ;
}
-listitem[detail="subsection"] {
+listitem[detail="subsection"], context|div.listitem.subsection {
display : block ;
+ display : inline-block ;
}
-listtag {
+listtag, context|div.listtag {
display : inline-block ;
width : 5em ;
}
-listcontent {
+listcontent, context|div.listcontent {
+ display : inline-block ;
}
-listdata {
+listdata, context|div.listdata {
+ display : inline-block ;
}
-listpage {
+listpage, context|div.listpage {
display : none ;
}
@@ -495,35 +566,43 @@ listpage {
/* :lang(en) */
-delimited[detail="quotation"]:before, delimitedblock[detail="quotation"]:before {
- content : "\201C" ;
+delimited[detail="quotation"]:before, delimitedblock[detail="quotation"]:before,
+ context|div.delimited.quotation:before, context|div.delimitedblock.quotation:before {
+ /* content : "\201C" ; */
+ font-style : italic ;
}
-delimited[detail="quotation"]:after, delimitedblock[detail="quotation"]:after {
- content : "\201D" ;
+delimited[detail="quotation"]:after, delimitedblock[detail="quotation"]:after,
+ context|div.delimited.quotation:after, context|div.delimitedblock.quotation:after {
+ /* content : "\201D" ; */
+ font-style : italic ;
}
-delimited[detail="quote"]:before, delimitedblock[detail="quote"]:before {
- content : "\2018" ;
+delimited[detail="quote"]:before, delimitedblock[detail="quote"]:before,
+ context|div.delimited.quote:before, context|div.delimitedblock.quote:before {
+ /* content : "\2018" ; */
+ font-style : italic ;
}
-delimited[detail="quote"]:after, delimitedblock[detail="quote"]:after {
- content : "\2019" ;
+delimited[detail="quote"]:after, delimitedblock[detail="quote"]:after,
+ context|div.delimited.quote:after, context|div.delimitedblock.quote:after {
+ /* content : "\2019" ; */
+ font-style : italic ;
}
-delimited {
+delimited, context|div.delimited {
display : inline
}
-delimitedblock {
+delimitedblock, context|div.delimitedblock {
display : block
}
-subsentence:before, subsentence:after {
+subsentence:before, subsentence:after, context|div.subsentence:before, context|div.subsentence:after {
content : "\2014" ;
}
-subsentence {
+subsentence, context|div.subsentence {
display : inline
}
@@ -537,32 +616,36 @@ subsentence {
/* floattext : mixed */
/* floatcontent : mixed */
-float {
+float, context|div.float {
display : block ;
margin-top : 1em ;
margin-bottom : 1em ;
margin-left : 2.5em ;
}
-floatcaption {
+floatcaption, context|div.floatcaption {
display : block ;
margin-top : 0.5em ;
color : rgb(60%,60%,0%) ;
}
-floatlabel {
+floatlabel, context|div.floatlabel {
+ display : inline-block ;
font-weight : bold ;
- margin-right : 1em ;
+ margin-right : 0.25em ;
}
-floatnumber {
- font-weight : bold ;
+floatnumber, context|div.floatnumber {
+ display : inline ;
+ font-weight : bold ;
+ margin-right : 0.25em ;
}
-floattext {
+floattext, context|div.floattext {
+ display : inline ;
}
-floatcontent {
+floatcontent, context|div.floatcontent {
}
/* image : mixed */
@@ -576,11 +659,11 @@ floatcontent {
height : 5.994cm ;
} */
-mpgraphic:before { /* does not work with empty element */
+mpgraphic:before, context|div.mpgraphic:before { /* does not work with empty element */
content : "[runtime metapost graphic]" ;
}
-mpgraphic {
+mpgraphic, context|div.mpgraphic {
display : inline ;
}
@@ -592,52 +675,68 @@ mpgraphic {
/* formulanumber : mixed */
/* formulacontent : display */
-formula {
+formula, context|div.formula {
display : block ;
margin-top : 1em ;
margin-bottom : 1em ;
margin-left : 2.5em ;
}
-subformula { /* todo */
+subformula, context|div.subformula { /* todo */
display : block ;
margin-top : 1em ;
margin-bottom : 1em ;
margin-left : 2.5em ;
}
-formulaset { /* todo */
+formulaset, context|div.formulaset { /* todo */
display : block ;
margin-top : 1em ;
margin-bottom : 1em ;
margin-left : 2.5em ;
}
-formulacaption { /* todo */
+formulacaption, context|div.formulacaption { /* todo */
display : block ;
margin-top : 0.5em ;
color : rgb(60%,60%,0%) ;
}
-formulalabel {
+formulalabel, context|div.formulalabel {
+ display : inline ;
font-weight : bold ;
- margin-right : 1em ;
+ margin-right : .25em ;
}
-formulanumber {
+formulanumber, context|div.formulanumber {
+ display : inline ;
font-weight : bold ;
}
-formulacontent {
+formulacontent, context|div.formulacontent {
display : block ;
}
-/* link : inline */
+link, context|div.link {
+ display : inline ;
+}
/* margintextblock : inline */
/* margintext : inline */
-/* math : inline */
+margintext, context|div.margintext {
+ display : block ;
+ font-weight : bold ;
+ margin-top : 1em ;
+ margin-bottom : 1em ;
+}
+
+margintext:before, context|div.margintext:before {
+ content : "\25B6\00A0\00A0" ;
+ color : rgb(40%,40%,40%) ;
+}
+
+/* math : inline | display */
/* mn : mixed */
/* mi : mixed */
/* mo : mixed */
@@ -660,52 +759,86 @@ formulacontent {
/* mtr : display */
/* mtd : display */
+context|div.math-inline {
+ display : inline ;
+ vertical-align : 0 ; /* this will be set directly */
+}
+
+context|div.math-display {
+ display : block ;
+ margin : 1ex 0ex 1em 3em ;
+}
+
/* quantity : inline */
/* unit : inline */
/* number : inline */
-quantity {
+quantity, context|div.quantity {
+ display : inline-block ;
}
-quantity>unit {
+quantity>unit, context|div.quantity>context|div.unit {
+ display : inline ;
}
-quantity>number {
+quantity>number, context|div.quantity>context|div.number {
+ display : inline ;
}
/* sub : inline */
/* sup : inline */
/* subsup : inline */
-sup {
+sup, context|div.sup {
+ display : inline-block ;
font-size : xx-small ;
- line-height : 0 ;
- vertical-align : top ;
+ vertical-align : super ;
}
-sub {
+sub, context|div.sub {
+ display : inline-block ;
font-size : xx-small ;
- line-height : 0 ;
- vertical-align : bottom ;
+ vertical-align : sub ;
}
-
-subsup>sup {
+subsup>sup, context|div.subsup>context|div.sup {
+ display : inline ;
vertical-align : top ;
}
-subsup>sub {
+subsup>sub, context|div.subsup>context|div.sub {
+ display : inline ;
vertical-align : bottom ;
}
-/* xhtml */
+/* links */
+
+context|div[href]:hover {
+ color : rgb(50%,0%,0%) ;
+ background-color : rgb(85%,85%,85%) ;
+}
+
+/* setups */
-a[href] {
- text-decoration : none ;
- color : inherit ;
+setup, context|div.setup {
+ display : block ;
+}
+
+comment, context|div.comment {
+ background-color : rgb(50%,75%,100%) ;
+ display : block ;
+ padding : 1em ;
+ margin-bottom : 2ex ;
+ margin-top : 2ex ;
+ font-family : "DejaVu Sans Mono", "Lucida Console", monospace ;
}
-a[href]:hover {
- color : rgb(50%,0%,0%) ;
- text-decoration : underline ;
+/* special */
+
+c, context|div.c {
+ display : inline ;
+}
+
+warning, context|div.warning {
+ display : none ;
}
diff --git a/tex/context/base/export-example.tex b/tex/context/base/export-example.tex
index 3a70b92fd..32cb79c5e 100644
--- a/tex/context/base/export-example.tex
+++ b/tex/context/base/export-example.tex
@@ -9,9 +9,7 @@
hyphen=yes]
\setupbackend
- [export=export-example.xml,
- xhtml=export-example.xhtml,
- css=export-example.css]
+ [export=yes]
% \setupalign
% [flushleft]
diff --git a/tex/context/base/file-ini.lua b/tex/context/base/file-ini.lua
index 2bc742a1f..2a0271a9d 100644
--- a/tex/context/base/file-ini.lua
+++ b/tex/context/base/file-ini.lua
@@ -11,27 +11,42 @@ if not modules then modules = { } end modules ['file-ini'] = {
<l n='tex'/>. These methods have counterparts at the <l n='tex'/> end.</p>
--ldx]]--
-resolvers.jobs = resolvers.jobs or { }
-
-local texsetcount = tex.setcount
-local setvalue = context.setvalue
-
-function commands.splitfilename(fullname)
- local t = file.nametotable(fullname)
- local path = t.path
- texsetcount("splitoffkind",(path == "" and 0) or (path == '.' and 1) or 2)
- setvalue("splitofffull",fullname)
- setvalue("splitoffpath",path)
- setvalue("splitoffname",t.name)
- setvalue("splitoffbase",t.base)
- setvalue("splitofftype",t.suffix)
+
+local implement = interfaces.implement
+local setmacro = interfaces.setmacro
+local setcount = interfaces.setcount
+
+resolvers.jobs = resolvers.jobs or { }
+
+local filenametotable = file.nametotable
+local findtexfile = resolvers.findtexfile
+
+local commands_doifelse = commands.doifelse
+
+local function splitfilename(full)
+ local split = filenametotable(full)
+ local path = split.path
+ setcount("splitoffkind",(path == "" and 0) or (path == '.' and 1) or 2)
+ setmacro("splitofffull",full or "")
+ setmacro("splitoffpath",path or "")
+ setmacro("splitoffname",split.name or "")
+ setmacro("splitoffbase",split.base or "")
+ setmacro("splitofftype",split.suffix or "")
end
-function commands.doifparentfileelse(n)
- commands.doifelse(n == environment.jobname or n == environment.jobname .. '.tex' or n == environment.outputfilename)
+local function isparentfile(name)
+ return
+ name == environment.jobname
+ or name == environment.jobname .. '.tex'
+ or name == environment.outputfilename
end
-function commands.doiffileexistelse(name)
- local foundname = resolvers.findtexfile(name)
- commands.doifelse(foundname and foundname ~= "")
+local function istexfile(name)
+ local name = name and findtexfile(name)
+ return name ~= "" and name
end
+
+implement { name = "splitfilename", actions = splitfilename, arguments = "string" }
+implement { name = "doifelseparentfile", actions = { isparentfile, commands_doifelse }, arguments = "string" }
+implement { name = "doifelsepathexist", actions = { lfs.isdir, commands_doifelse }, arguments = "string" }
+implement { name = "doifelsefileexist", actions = { istexfile, commands_doifelse }, arguments = "string" }
diff --git a/tex/context/base/file-ini.mkvi b/tex/context/base/file-ini.mkvi
index 989241dd1..cb9f18cf9 100644
--- a/tex/context/base/file-ini.mkvi
+++ b/tex/context/base/file-ini.mkvi
@@ -103,11 +103,11 @@
\the\everystartreadingfile
\pushcatcodetable % saveguard
\setcatcodetable\ctxcatcodes
- \ctxcommand{pushregime()}}% temporarily this way
+ \clf_pushregime}% temporarily this way
\unexpanded\def\stopreadingfile
{\popcatcodetable % saveguard
- \ctxcommand{popregime()}% temporarily this way
+ \clf_popregime % temporarily this way
\the\everystopreadingfile
\global\advance\readingfilelevel\minusone}
@@ -125,7 +125,7 @@
\def\inputgivenfile#name{\normalinput{#name}}
%D \macros
-%D {doiffileelse}
+%D {doifelsefile}
%D
%D The next alternative only looks if a file is present. No
%D loading is done. This one obeys the standard \TEX\ lookup.
@@ -134,11 +134,19 @@
%D \doiffileelse {filename} {found} {not found}
%D \stoptyping
-\def\doiffileexistselse#name{\ctxcommand{doiffileexistelse([[#name]])}}
+\unexpanded\def\doifelsefile {\clf_doifelsefileexist}
+\unexpanded\def\doifelsepath {\clf_doifelsepathexist}
+\unexpanded\def\doiffile #name{\clf_doifelsefileexist{#name}\firstofoneargument\gobbleoneargument}
+\unexpanded\def\doifnotfile #name{\clf_doifelsefileexist{#name}\gobbleoneargument\firstofoneargument}
-\def\doiffileelse {\doiffileexistselse}
-\def\doiffile #name{\doiffileexistselse{#name}\firstofoneargument\gobbleoneargument}
-\def\doifnotfile #name{\doiffileexistselse{#name}\gobbleoneargument\firstofoneargument}
+\let\doiffileelse\doifelsefile
+\let\doifpathelse\doifelsepath
+
+\let\doifelsefileexists\doifelsefile
+\let\doifelsepathexists\doifelsepath
+
+\let\doiffileexistselse\doifelsefileexists
+\let\doifpathexistselse\doifelsepathexists
%D \macros
%D {doifparentfileelse}
@@ -149,7 +157,9 @@
\ifx\outputfilename\undefined \def\outputfilename{\jobname} \fi
-\def\doifparentfileelse#name{\ctxcommand{doifparentfileelse([[#name]])}}
+\unexpanded\def\doifelseparentfile{\clf_doifelseparentfile}
+
+\let\doifparentfileelse\doifelseparentfile
%D \macros
%D {splitfilename}
@@ -185,7 +195,7 @@
\let\splitoffname\empty
\let\splitofftype\empty
-\def\splitfilename#name{\ctxcommand{splitfilename([[#name]])}}
+\unexpanded\def\splitfilename{\clf_splitfilename}
%D \macros
%D {doonlyonce, doinputonce, doendinputonce}
@@ -216,7 +226,7 @@
\fi}
\unexpanded\def\doinputonce#name%
- {\doonlyonce{#name}{\doiffileelse{#name}{\inputgivenfile{#name}}\donothing}}
+ {\doonlyonce{#name}{\doifelsefile{#name}{\inputgivenfile{#name}}\donothing}}
\unexpanded\def\doendinputonce#name%
{\ifcsname\??fileonce#name\endcsname
diff --git a/tex/context/base/file-job.lua b/tex/context/base/file-job.lua
index 288a690d2..3a034f0a3 100644
--- a/tex/context/base/file-job.lua
+++ b/tex/context/base/file-job.lua
@@ -11,15 +11,20 @@ if not modules then modules = { } end modules ['file-job'] = {
local gsub, match, find = string.gsub, string.match, string.find
local insert, remove, concat = table.insert, table.remove, table.concat
-local validstring = string.valid
+local validstring, formatters = string.valid, string.formatters
local sortedhash = table.sortedhash
-local formatters = string.formatters
-local commands, resolvers, context = commands, resolvers, context
+local commands = commands
+local resolvers = resolvers
+local context = context
-local trace_jobfiles = false trackers.register("system.jobfiles", function(v) trace_jobfiles = v end)
+local ctx_doifelse = commands.doifelse
-local report_jobfiles = logs.reporter("system","jobfiles")
+local implement = interfaces.implement
+
+local trace_jobfiles = false trackers.register("system.jobfiles", function(v) trace_jobfiles = v end)
+
+local report_jobfiles = logs.reporter("system","jobfiles")
local texsetcount = tex.setcount
local elements = interfaces.elements
@@ -42,6 +47,16 @@ local is_qualified_path = file.is_qualified_path
local cleanpath = resolvers.cleanpath
local inputstack = resolvers.inputstack
+local resolveprefix = resolvers.resolve
+
+local hasscheme = url.hasscheme
+
+local jobresolvers = resolvers.jobs
+
+local registerextrapath = resolvers.registerextrapath
+local resetextrapath = resolvers.resetextrapath
+local pushextrapath = resolvers.pushextrapath
+local popextrapath = resolvers.popextrapath
local v_outer = variables.outer
local v_text = variables.text
@@ -56,7 +71,7 @@ local c_prefix = variables.prefix
local function findctxfile(name) -- loc ? any ?
if is_qualified_path(name) then -- maybe when no suffix do some test for tex
return name
- elseif not url.hasscheme(name) then
+ elseif not hasscheme(name) then
return resolvers.finders.byscheme("loc",name) or ""
else
return resolvers.findtexfile(name) or ""
@@ -65,44 +80,94 @@ end
resolvers.findctxfile = findctxfile
-function commands.processfile(name)
- name = findctxfile(name)
- if name ~= "" then
- context.input(name)
+implement {
+ name = "processfile",
+ arguments = "string",
+ actions = function(name)
+ name = findctxfile(name)
+ if name ~= "" then
+ context.input(name)
+ end
end
-end
+}
-function commands.doifinputfileelse(name)
- commands.doifelse(findctxfile(name) ~= "")
-end
+implement {
+ name = "doifelseinputfile",
+ arguments = "string",
+ actions = function(name)
+ ctx_doifelse(findctxfile(name) ~= "")
+ end
+}
-function commands.locatefilepath(name)
- context(dirname(findctxfile(name)))
-end
+implement {
+ name = "locatefilepath",
+ arguments = "string",
+ actions = function(name)
+ context(dirname(findctxfile(name)))
+ end
+}
-function commands.usepath(paths)
- resolvers.registerextrapath(paths)
-end
+implement {
+ name = "usepath",
+ arguments = "string",
+ actions = function(paths)
+ report_jobfiles("using path: %s",paths)
+ registerextrapath(paths)
+ end
+}
-function commands.usesubpath(subpaths)
- resolvers.registerextrapath(nil,subpaths)
-end
+implement {
+ name = "pushpath",
+ arguments = "string",
+ actions = function(paths)
+ report_jobfiles("pushing path: %s",paths)
+ pushextrapath(paths)
+ end
+}
-function commands.allinputpaths()
- context(concat(resolvers.instance.extra_paths or { },","))
-end
+implement {
+ name = "poppath",
+ actions = function(paths)
+ popextrapath()
+ report_jobfiles("popping path")
+ end
+}
-function commands.setdocumentfilenames()
- environment.initializefilenames()
-end
+implement {
+ name = "usesubpath",
+ arguments = "string",
+ actions = function(subpaths)
+ report_jobfiles("using subpath: %s",subpaths)
+ registerextrapath(nil,subpaths)
+ end
+}
-function commands.usezipfile(name,tree)
- if tree and tree ~= "" then
- resolvers.usezipfile(formatters["zip:///%s?tree=%s"](name,tree))
- else
- resolvers.usezipfile(formatters["zip:///%s"](name))
+implement {
+ name = "resetpath",
+ actions = function()
+ report_jobfiles("resetting path")
+ resetextrapath()
end
-end
+}
+
+implement {
+ name = "allinputpaths",
+ actions = function()
+ context(concat(resolvers.instance.extra_paths or { },","))
+ end
+}
+
+implement {
+ name = "usezipfile",
+ arguments = { "string", "string" },
+ actions = function(name,tree)
+ if tree and tree ~= "" then
+ resolvers.usezipfile(formatters["zip:///%s?tree=%s"](name,tree))
+ else
+ resolvers.usezipfile(formatters["zip:///%s"](name))
+ end
+ end
+}
local report_system = logs.reporter("system")
@@ -113,7 +178,7 @@ local luapatterns = { "%s" .. utilities.lua.suffixes.luc, "%s.lua" }
local cldpatterns = { "%s.cld" }
local xmlpatterns = { "%s.xml" }
-local uselibrary = commands.uselibrary
+local uselibrary = resolvers.uselibrary
local input = context.input
-- status
@@ -124,23 +189,36 @@ local processstack = { }
local processedfile = ""
local processedfiles = { }
-function commands.processedfile()
- context(processedfile)
-end
+implement {
+ name = "processedfile",
+ actions = function()
+ context(processedfile)
+ end
+}
-function commands.processedfiles()
- context(concat(processedfiles,","))
-end
+implement {
+ name = "processedfiles",
+ actions = function()
+ context(concat(processedfiles,","))
+ end
+}
-function commands.dostarttextfile(name)
- insert(processstack,name)
- processedfile = name
- insert(processedfiles,name)
-end
+implement {
+ name = "dostarttextfile",
+ arguments = "string",
+ actions = function(name)
+ insert(processstack,name)
+ processedfile = name
+ insert(processedfiles,name)
+ end
+}
-function commands.dostoptextfile()
- processedfile = remove(processstack) or ""
-end
+implement {
+ name = "dostoptextfile",
+ actions = function()
+ processedfile = remove(processstack) or ""
+ end
+}
local function startprocessing(name,notext)
if not notext then
@@ -158,6 +236,11 @@ end
--
+local typestack = { }
+local currenttype = v_text
+
+--
+
local action = function(name,foundname) input(foundname) end
local failure = function(name,foundname) report_jobfiles("unknown %s file %a","tex",name) end
@@ -216,11 +299,6 @@ local function usexmlfile(name,onlyonce,notext)
stopprocessing(notext)
end
-commands.usetexfile = usetexfile
-commands.useluafile = useluafile
-commands.usecldfile = usecldfile
-commands.usexmlfile = usexmlfile
-
local suffixes = {
mkvi = usetexfile,
mkiv = usetexfile,
@@ -233,21 +311,36 @@ local suffixes = {
}
local function useanyfile(name,onlyonce)
- local s = suffixes[file.suffix(name)]
+ local s = suffixes[suffixonly(name)]
+ context(function() resolvers.pushpath(name) end)
if s then
- s(removesuffix(name),onlyonce)
+ -- s(removesuffix(name),onlyonce)
+ s(name,onlyonce) -- so, first with suffix, then without
else
usetexfile(name,onlyonce) -- e.g. ctx file
---~ resolvers.readfilename(name)
+ -- resolvers.readfilename(name)
end
+ context(resolvers.poppath)
end
-commands.useanyfile = useanyfile
+implement { name = "usetexfile", actions = usetexfile, arguments = "string" }
+implement { name = "useluafile", actions = useluafile, arguments = "string" }
+implement { name = "usecldfile", actions = usecldfile, arguments = "string" }
+implement { name = "usexmlfile", actions = usexmlfile, arguments = "string" }
-function resolvers.jobs.usefile(name,onlyonce,notext)
- local s = suffixes[file.suffix(name)]
+implement { name = "usetexfileonce", actions = usetexfile, arguments = { "string", true } }
+implement { name = "useluafileonce", actions = useluafile, arguments = { "string", true } }
+implement { name = "usecldfileonce", actions = usecldfile, arguments = { "string", true } }
+implement { name = "usexmlfileonce", actions = usexmlfile, arguments = { "string", true } }
+
+implement { name = "useanyfile", actions = useanyfile, arguments = "string" }
+implement { name = "useanyfileonce", actions = useanyfile, arguments = { "string", true } }
+
+function jobresolvers.usefile(name,onlyonce,notext)
+ local s = suffixes[suffixonly(name)]
if s then
- s(removesuffix(name),onlyonce,notext)
+ -- s(removesuffix(name),onlyonce,notext)
+ s(name,onlyonce,notext) -- so, first with suffix, then without
end
end
@@ -262,6 +355,8 @@ local function startstoperror()
startstoperror = dummyfunction
end
+local stopped
+
local function starttext()
if textlevel == 0 then
if trace_jobfiles then
@@ -275,73 +370,86 @@ local function starttext()
end
local function stoptext()
- if textlevel == 0 then
- startstoperror()
- elseif textlevel > 0 then
- textlevel = textlevel - 1
- end
- texsetcount("global","textlevel",textlevel)
- if textlevel <= 0 then
- if trace_jobfiles then
- report_jobfiles("stopping text")
+ if not stopped then
+ if textlevel == 0 then
+ startstoperror()
+ elseif textlevel > 0 then
+ textlevel = textlevel - 1
+ end
+ texsetcount("global","textlevel",textlevel)
+ if textlevel <= 0 then
+ if trace_jobfiles then
+ report_jobfiles("stopping text")
+ end
+ context.dostoptext()
+ -- registerfileinfo[end]jobfilename
+ context.finalend()
+ stopped = true
end
- context.dostoptext()
- -- registerfileinfo[end]jobfilename
- context.finalend()
- commands.stoptext = dummyfunction
end
end
-commands.starttext = starttext
-commands.stoptext = stoptext
+implement { name = "starttext", actions = starttext }
+implement { name = "stoptext", actions = stoptext }
-function commands.forcequitjob(reason)
- if reason then
- report_system("forcing quit: %s",reason)
- else
- report_system("forcing quit")
+implement {
+ name = "forcequitjob",
+ arguments = "string",
+ actions = function(reason)
+ if reason then
+ report_system("forcing quit: %s",reason)
+ else
+ report_system("forcing quit")
+ end
+ context.batchmode()
+ while textlevel >= 0 do
+ context.stoptext()
+ end
end
- context.batchmode()
- while textlevel >= 0 do
+}
+
+implement {
+ name = "forceendjob",
+ actions = function()
+ report_system([[don't use \end to finish a document]])
context.stoptext()
end
-end
-
-function commands.forceendjob()
- report_system([[don't use \end to finish a document]])
- context.stoptext()
-end
+}
-function commands.autostarttext()
- if textlevel == 0 then
- report_system([[auto \starttext ... \stoptext]])
+implement {
+ name = "autostarttext",
+ actions = function()
+ if textlevel == 0 then
+ report_system([[auto \starttext ... \stoptext]])
+ end
+ context.starttext()
end
- context.starttext()
-end
+}
-commands.autostoptext = stoptext
+implement {
+ name = "autostoptext",
+ actions = stoptext
+}
-- project structure
-function commands.processfilemany(name)
- useanyfile(name,false)
-end
-
-function commands.processfileonce(name)
- useanyfile(name,true)
-end
-
-function commands.processfilenone(name)
- -- skip file
-end
-
---
+implement {
+ name = "processfilemany",
+ arguments = { "string", false },
+ actions = useanyfile
+}
-local typestack = { }
-local pathstack = { }
+implement {
+ name = "processfileonce",
+ arguments = { "string", true },
+ actions = useanyfile
+}
-local currenttype = v_text
-local currentpath = "."
+implement {
+ name = "processfilenone",
+ arguments = "string",
+ actions = dummyfunction,
+}
local tree = { type = "text", name = "", branches = { } }
local treestack = { }
@@ -401,10 +509,11 @@ luatex.registerstopactions(function()
logspoptarget()
end)
-job.structure = job.structure or { }
-job.structure.collected = job.structure.collected or { }
-job.structure.tobesaved = root
-job.structure.components = { }
+local jobstructure = job.structure or { }
+job.structure = jobstructure
+jobstructure.collected = jobstructure.collected or { }
+jobstructure.tobesaved = root
+jobstructure.components = { }
local function initialize()
local function collect(root,result)
@@ -420,7 +529,7 @@ local function initialize()
end
return result
end
- job.structure.components = collect(job.structure.collected,{})
+ jobstructure.components = collect(jobstructure.collected,{})
end
job.register('job.structure.collected',root,initialize)
@@ -432,48 +541,67 @@ local context_processfilemany = context.processfilemany
local context_processfileonce = context.processfileonce
local context_processfilenone = context.processfilenone
+-- we need a plug in the nested loaded, push pop pseudo current dir
+
+local function processfilecommon(name,action)
+ -- experiment, might go away
+-- if not hasscheme(name) then
+-- local path = dirname(name)
+-- if path ~= "" then
+-- registerextrapath(path)
+-- report_jobfiles("adding search path %a",path)
+-- end
+-- end
+ -- till here
+ action(name)
+end
+
+local function processfilemany(name) processfilecommon(name,context_processfilemany) end
+local function processfileonce(name) processfilecommon(name,context_processfileonce) end
+local function processfilenone(name) processfilecommon(name,context_processfilenone) end
+
local processors = utilities.storage.allocate {
-- [v_outer] = {
- -- [v_text] = { "many", context_processfilemany },
- -- [v_project] = { "once", context_processfileonce },
- -- [v_environment] = { "once", context_processfileonce },
- -- [v_product] = { "once", context_processfileonce },
- -- [v_component] = { "many", context_processfilemany },
+ -- [v_text] = { "many", processfilemany },
+ -- [v_project] = { "once", processfileonce },
+ -- [v_environment] = { "once", processfileonce },
+ -- [v_product] = { "once", processfileonce },
+ -- [v_component] = { "many", processfilemany },
-- },
[v_text] = {
- [v_text] = { "many", context_processfilemany },
- [v_project] = { "once", context_processfileonce }, -- dubious
- [v_environment] = { "once", context_processfileonce },
- [v_product] = { "many", context_processfilemany }, -- dubious
- [v_component] = { "many", context_processfilemany },
+ [v_text] = { "many", processfilemany },
+ [v_project] = { "once", processfileonce }, -- dubious
+ [v_environment] = { "once", processfileonce },
+ [v_product] = { "many", processfilemany }, -- dubious
+ [v_component] = { "many", processfilemany },
},
[v_project] = {
- [v_text] = { "many", context_processfilemany },
- [v_project] = { "none", context_processfilenone },
- [v_environment] = { "once", context_processfileonce },
- [v_product] = { "none", context_processfilenone },
- [v_component] = { "none", context_processfilenone },
+ [v_text] = { "many", processfilemany },
+ [v_project] = { "none", processfilenone },
+ [v_environment] = { "once", processfileonce },
+ [v_product] = { "none", processfilenone },
+ [v_component] = { "none", processfilenone },
},
[v_environment] = {
- [v_text] = { "many", context_processfilemany },
- [v_project] = { "none", context_processfilenone },
- [v_environment] = { "once", context_processfileonce },
- [v_product] = { "none", context_processfilenone },
- [v_component] = { "none", context_processfilenone },
+ [v_text] = { "many", processfilemany },
+ [v_project] = { "none", processfilenone },
+ [v_environment] = { "once", processfileonce },
+ [v_product] = { "none", processfilenone },
+ [v_component] = { "none", processfilenone },
},
[v_product] = {
- [v_text] = { "many", context_processfilemany },
- [v_project] = { "once", context_processfileonce },
- [v_environment] = { "once", context_processfileonce },
- [v_product] = { "many", context_processfilemany },
- [v_component] = { "many", context_processfilemany },
+ [v_text] = { "many", processfilemany },
+ [v_project] = { "once", processfileonce },
+ [v_environment] = { "once", processfileonce },
+ [v_product] = { "many", processfilemany },
+ [v_component] = { "many", processfilemany },
},
[v_component] = {
- [v_text] = { "many", context_processfilemany },
- [v_project] = { "once", context_processfileonce },
- [v_environment] = { "once", context_processfileonce },
- [v_product] = { "none", context_processfilenone },
- [v_component] = { "many", context_processfilemany },
+ [v_text] = { "many", processfilemany },
+ [v_project] = { "once", processfileonce },
+ [v_environment] = { "once", processfileonce },
+ [v_product] = { "none", processfilenone },
+ [v_component] = { "many", processfilemany },
}
}
@@ -493,7 +621,7 @@ local stop = {
[v_component] = context.stoptext,
}
-resolvers.jobs.processors = processors
+jobresolvers.processors = processors
local function topofstack(what)
local stack = stacks[what]
@@ -520,22 +648,22 @@ local function justacomponent()
end
end
-resolvers.jobs.productcomponent = productcomponent
-resolvers.jobs.justacomponent = justacomponent
+jobresolvers.productcomponent = productcomponent
+jobresolvers.justacomponent = justacomponent
-function resolvers.jobs.currentproject () return topofstack(v_project ) end
-function resolvers.jobs.currentproduct () return topofstack(v_product ) end
-function resolvers.jobs.currentcomponent () return topofstack(v_component ) end
-function resolvers.jobs.currentenvironment() return topofstack(v_environment) end
+function jobresolvers.currentproject () return topofstack(v_project ) end
+function jobresolvers.currentproduct () return topofstack(v_product ) end
+function jobresolvers.currentcomponent () return topofstack(v_component ) end
+function jobresolvers.currentenvironment() return topofstack(v_environment) end
local done = { }
-local tolerant = false -- too messy, mkii user with the wrong sructure should adapt
+local tolerant = false -- too messy, mkii user with the wrong structure should adapt
local function process(what,name)
local depth = #typestack
local process
--
- name = resolvers.resolve(name)
+ name = resolveprefix(name)
--
-- if not tolerant then
-- okay, would be best but not compatible with mkii
@@ -589,10 +717,10 @@ local function process(what,name)
end
end
-function commands.useproject (name) process(v_project, name) end
-function commands.useenvironment(name) process(v_environment,name) end
-function commands.useproduct (name) process(v_product, name) end
-function commands.usecomponent (name) process(v_component, name) end
+implement { name = "useproject", actions = function(name) process(v_project, name) end, arguments = "string" }
+implement { name = "useenvironment", actions = function(name) process(v_environment,name) end, arguments = "string" }
+implement { name = "useproduct", actions = function(name) process(v_product, name) end, arguments = "string" } -- will be overloaded
+implement { name = "usecomponent", actions = function(name) process(v_component, name) end, arguments = "string" }
-- todo: setsystemmode to currenttype
-- todo: make start/stop commands at the tex end
@@ -614,9 +742,7 @@ local stop = {
local function gotonextlevel(what,name) -- todo: something with suffix name
insert(stacks[what],name)
insert(typestack,currenttype)
- insert(pathstack,currentpath)
currenttype = what
- currentpath = dirname(name)
pushtree(what,name)
if start[what] then
start[what]()
@@ -628,7 +754,6 @@ local function gotopreviouslevel(what)
stop[what]()
end
poptree()
- currentpath = remove(pathstack) or "."
currenttype = remove(typestack) or v_text
remove(stacks[what]) -- not currenttype ... weak recovery
-- context.endinput() -- does not work
@@ -642,20 +767,20 @@ local function autoname(name)
return name
end
-function commands.startproject (name) gotonextlevel(v_project, autoname(name)) end
-function commands.startproduct (name) gotonextlevel(v_product, autoname(name)) end
-function commands.startcomponent (name) gotonextlevel(v_component, autoname(name)) end
-function commands.startenvironment(name) gotonextlevel(v_environment,autoname(name)) end
+implement { name = "startproject", actions = function(name) gotonextlevel(v_project, autoname(name)) end, arguments = "string" }
+implement { name = "startproduct", actions = function(name) gotonextlevel(v_product, autoname(name)) end, arguments = "string" }
+implement { name = "startcomponent", actions = function(name) gotonextlevel(v_component, autoname(name)) end, arguments = "string" }
+implement { name = "startenvironment", actions = function(name) gotonextlevel(v_environment,autoname(name)) end, arguments = "string" }
-function commands.stopproject () gotopreviouslevel(v_project ) end
-function commands.stopproduct () gotopreviouslevel(v_product ) end
-function commands.stopcomponent () gotopreviouslevel(v_component ) end
-function commands.stopenvironment() gotopreviouslevel(v_environment) end
+implement { name = "stopproject", actions = function() gotopreviouslevel(v_project ) end }
+implement { name = "stopproduct", actions = function() gotopreviouslevel(v_product ) end }
+implement { name = "stopcomponent", actions = function() gotopreviouslevel(v_component ) end }
+implement { name = "stopenvironment", actions = function() gotopreviouslevel(v_environment) end }
-function commands.currentproject () context(topofstack(v_project )) end
-function commands.currentproduct () context(topofstack(v_product )) end
-function commands.currentcomponent () context(topofstack(v_component )) end
-function commands.currentenvironment() context(topofstack(v_environment)) end
+implement { name = "currentproject", actions = function() context(topofstack(v_project )) end }
+implement { name = "currentproduct", actions = function() context(topofstack(v_product )) end }
+implement { name = "currentcomponent", actions = function() context(topofstack(v_component )) end }
+implement { name = "currentenvironment", actions = function() context(topofstack(v_environment)) end }
-- -- -- this will move -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
--
@@ -684,7 +809,7 @@ local function convertexamodes(str)
end
end
-function commands.loadexamodes(filename)
+function environment.loadexamodes(filename)
if not filename or filename == "" then
filename = removesuffix(tex.jobname)
end
@@ -697,6 +822,12 @@ function commands.loadexamodes(filename)
end
end
+implement {
+ name = "loadexamodes",
+ actions = environment.loadexamodes,
+ arguments = "string"
+}
+
-- changed in mtx-context
-- code moved from luat-ini
@@ -739,20 +870,50 @@ function document.setfilename(i,name)
end
end
-function document.getargument(key,default) -- commands
+function document.getargument(key,default)
local v = document.arguments[key]
if type(v) == "boolean" then
v = (v and "yes") or "no"
document.arguments[key] = v
end
- context(v or default or "")
+ return v or default or ""
end
-function document.getfilename(i) -- commands
- context(document.files[tonumber(i)] or "")
+function document.getfilename(i)
+ return document.files[tonumber(i)] or ""
end
-function commands.getcommandline() -- has to happen at the tex end in order to expand
+implement {
+ name = "setdocumentargument",
+ actions = document.setargument,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "setdocumentdefaultargument",
+ actions = document.setdefaultargument,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "setdocumentfilename",
+ actions = document.setfilename,
+ arguments = { "integer", "string" }
+}
+
+implement {
+ name = "getdocumentargument",
+ actions = { document.getargument, context },
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "getdocumentfilename",
+ actions = { document.getfilename, context },
+ arguments = "integer"
+}
+
+function document.setcommandline() -- has to happen at the tex end in order to expand
-- the document[arguments|files] tables are copies
@@ -801,23 +962,45 @@ function commands.getcommandline() -- has to happen at the tex end in order to e
inputfile = basename(inputfile)
end
+ local forcedruns = arguments.forcedruns
local kindofrun = arguments.kindofrun
- local currentrun = arguments.maxnofruns
- local maxnofruns = arguments.currentrun
+ local currentrun = arguments.currentrun
+ local maxnofruns = arguments.maxnofruns or arguments.runs
+
+ -- context.setupsystem {
+ -- [constants.directory] = validstring(arguments.setuppath),
+ -- [constants.inputfile] = inputfile,
+ -- [constants.file] = validstring(arguments.result),
+ -- [constants.random] = validstring(arguments.randomseed),
+ -- -- old:
+ -- [constants.n] = validstring(kindofrun),
+ -- [constants.m] = validstring(currentrun),
+ -- }
context.setupsystem {
- [constants.directory] = validstring(arguments.setuppath),
- [constants.inputfile] = inputfile,
- [constants.file] = validstring(arguments.result),
- [constants.random] = validstring(arguments.randomseed),
+ directory = validstring(arguments.setuppath),
+ inputfile = inputfile,
+ file = validstring(arguments.result),
+ random = validstring(arguments.randomseed),
-- old:
- [constants.n] = validstring(kindofrun),
- [constants.m] = validstring(currentrun),
+ n = validstring(kindofrun),
+ m = validstring(currentrun),
}
- environment.kindofrun = tonumber(kindofrun) or 0
- environment.maxnofruns = tonumber(maxnofruns) or 0
- environment.currentrun = tonumber(currentrun) or 0
+ forcedruns = tonumber(forcedruns) or 0
+ kindofrun = tonumber(kindofrun) or 0
+ maxnofruns = tonumber(maxnofruns) or 0
+ currentrun = tonumber(currentrun) or 0
+
+ local prerollrun = forcedruns > 0 and currentrun > 0 and currentrun < forcedruns
+
+ environment.forcedruns = forcedruns
+ environment.kindofrun = kindofrun
+ environment.maxnofruns = maxnofruns
+ environment.currentrun = currentrun
+ environment.prerollrun = prerollrun
+
+ context.setconditional("prerollrun",prerollrun)
if validstring(arguments.arguments) then
context.setupenv { arguments.arguments }
@@ -869,21 +1052,36 @@ local function apply(list,action)
end
end
-function commands.setdocumentmodes() -- was setup: *runtime:modes
+function document.setmodes() -- was setup: *runtime:modes
apply(document.options.ctxfile .modes,context.enablemode)
apply(document.options.commandline.modes,context.enablemode)
end
-function commands.setdocumentmodules() -- was setup: *runtime:modules
+function document.setmodules() -- was setup: *runtime:modules
apply(document.options.ctxfile .modules,context.usemodule)
apply(document.options.commandline.modules,context.usemodule)
end
-function commands.setdocumentenvironments() -- was setup: *runtime:environments
+function document.setenvironments() -- was setup: *runtime:environments
apply(document.options.ctxfile .environments,context.environment)
apply(document.options.commandline.environments,context.environment)
end
+function document.setfilenames()
+ local initialize = environment.initializefilenames
+ if initialize then
+ initialize()
+ else
+ -- fatal error
+ end
+end
+
+implement { name = "setdocumentcommandline", actions = document.setcommandline, onlyonce = true }
+implement { name = "setdocumentmodes", actions = document.setmodes, onlyonce = true }
+implement { name = "setdocumentmodules", actions = document.setmodules, onlyonce = true }
+implement { name = "setdocumentenvironments", actions = document.setenvironments, onlyonce = true }
+implement { name = "setdocumentfilenames", actions = document.setfilenames, onlyonce = true }
+
local report_files = logs.reporter("system","files")
local report_options = logs.reporter("system","options")
local report_file = logs.reporter("used file")
@@ -914,16 +1112,24 @@ luatex.registerstopactions(function()
logsnewline()
report_options("start commandline options")
logsnewline()
- for argument, value in sortedhash(arguments) do
- report_option("%s=%A",argument,value)
+ if arguments and next(arguments) then
+ for argument, value in sortedhash(arguments) do
+ report_option("%s=%A",argument,value)
+ end
+ else
+ report_file("no arguments")
end
logsnewline()
report_options("stop commandline options")
logsnewline()
report_options("start commandline files")
logsnewline()
- for i=1,#files do
- report_file("% 4i: %s",i,files[i])
+ if files and #files > 0 then
+ for i=1,#files do
+ report_file("% 4i: %s",i,files[i])
+ end
+ else
+ report_file("no files")
end
logsnewline()
report_options("stop commandline files")
@@ -987,15 +1193,20 @@ if environment.initex then
end
-function commands.doifelsecontinuewithfile(inpname,basetoo)
- local inpnamefull = addsuffix(inpname,"tex")
- local inpfilefull = addsuffix(environment.inputfilename,"tex")
- local continue = inpnamefull == inpfilefull
- if basetoo and not continue then
- continue = inpnamefull == basename(inpfilefull)
- end
- if continue then
- report_system("continuing input file %a",inpname)
+implement {
+ name = "doifelsecontinuewithfile",
+ arguments = "string",
+ actions = function(inpname,basetoo)
+ local inpnamefull = addsuffix(inpname,"tex")
+ local inpfilefull = addsuffix(environment.inputfilename,"tex")
+ local continue = inpnamefull == inpfilefull
+ -- if basetoo and not continue then
+ if not continue then
+ continue = inpnamefull == basename(inpfilefull)
+ end
+ if continue then
+ report_system("continuing input file %a",inpname)
+ end
+ ctx_doifelse(continue)
end
- commands.doifelse(continue)
-end
+}
diff --git a/tex/context/base/file-job.mkvi b/tex/context/base/file-job.mkvi
index ce0d54ece..7814aba3f 100644
--- a/tex/context/base/file-job.mkvi
+++ b/tex/context/base/file-job.mkvi
@@ -21,29 +21,59 @@
% processing
-\unexpanded\def\doifinputfileelse#name%
- {\ctxcommand{doifinputfileelse("#name")}}
+\unexpanded\def\doifelseinputfile#name%
+ {\clf_doifelseinputfile{#name}}
+
+\let\doifinputfileelse\doifelseinputfile
\unexpanded\def\processfile#name%
- {\ctxcommand{processfile("#name")}}
+ {\clf_processfile{#name}}
% path control
+%D In the following example the lookup order is \type {.}, {/foo}, \type
+%D {foo/bar} as one can always explicitly prefix a wanted file.
+%D
+%D \starttyping
+%D \pushpath[foo]
+%D \pushpath[foo/bar]
+%D \input test
+%D \poppath
+%D \poppath
+%D \stoptyping
+
\unexpanded\def\usepath[#path]%
- {\ctxcommand{usepath("#path")}}
+ {\clf_usepath{#path}}
\unexpanded\def\usesubpath[#path]%
- {\ctxcommand{usesubpath("#path")}}
+ {\clf_usesubpath{#path}}
+
+\unexpanded\def\pushpath[#path]%
+ {\clf_pushpath{#path}}
+
+\unexpanded\def\poppath
+ {\clf_poppath}
+
+\unexpanded\def\resetpath % not the pushed paths
+ {\clf_resetpath}
\def\allinputpaths
- {\ctxcommand{allinputpaths()}}
+ {\clf_allinputpaths}
% helper (not really needed nowadays)
\let\locatedfilepath\empty
\unexpanded\def\locatefilepath#name% no [] ?
- {\edef\locatedfilepath{\ctxcommand{locatefilepath("#name")}}}
+ {\edef\locatedfilepath{\clf_locatefilepath{#name}}}
+
+% % maybe:
+%
+% % \dirlist{c:/data/temporary/foo}{images/bar.jpg}
+% % \dirfile{c:/data/temporary/foo}{images/bar.jpg}
+%
+% \def\dirlist#1#2{dirlist://#1/**/#2}
+% \def\dirfile#1#2{dirlist://#1/**/#2?option=fileonly}
% zip files are tree'd
@@ -51,7 +81,7 @@
{\dodoubleempty\syst_files_use_zip_file}
\def\syst_files_use_zip_file[#zipname][#subtree]%
- {\ctxcommand{usezipfile("#zipname","#subtree")}}
+ {\clf_usezipfile{#zipname}{#subtree}}
% exa stuff might become obsolete:
@@ -59,7 +89,7 @@
{\dosingleempty\syst_files_load_exa_modes}
\def\syst_files_load_exa_modes[#name]%
- {\ctxcommand{loadexamodes("#name")}}
+ {\clf_loadexamodes{#name}}
% runtime files (maybe also do this in lua?)
@@ -77,14 +107,6 @@
\def\syst_files_load_indeed#name% from now on we assume a suffix to be part of the name
{\readsysfile{#name}{\showmessage\m!system2{#name}}\donothing}
-% obsolete, but we keep it as reference of what happened
-%
-% \unexpanded\def\loadoptionfile
-% {\readjobfile{\jobname.\f!optionextension}
-% {\writestatus\m!system{\jobname.\f!optionextension\space loaded}%
-% \ctxcommand{copyfiletolog("\jobname.\f!optionextension")}}%
-% {\writestatus\m!system{no \jobname.\f!optionextension}}}
-
% document structure
\ifdefined\textlevel \else \newcount\textlevel \fi % might go away
@@ -105,15 +127,15 @@
\global\everybye\emptytoks
\the\everygoodbye
\global\everygoodbye\emptytoks
- \doifsometokselse\everynotabene{\writeline\the\everynotabene\writeline}\donothing
+ \doifelsesometoks\everynotabene{\writeline\the\everynotabene\writeline}\donothing
\normalend} % tex's \end
-\unexpanded\def\starttext {\ctxcommand{starttext()}}
-\unexpanded\def\stoptext {\ctxcommand{stoptext()}}
-\unexpanded\def\forcequitjob {\ctxcommand{forcequitjob()}}
-\unexpanded\def\end {\ctxcommand{forceendjob()}}
-\unexpanded\def\autostarttext{\ctxcommand{autostarttext()}}
-\unexpanded\def\autostoptext {\ctxcommand{autostoptext()}}
+\unexpanded\def\starttext {\clf_starttext}
+\unexpanded\def\stoptext {\clf_stoptext}
+\unexpanded\def\forcequitjob {\clf_forcequitjob}
+\unexpanded\def\end {\clf_forceendjob}
+\unexpanded\def\autostarttext{\clf_autostarttext}
+\unexpanded\def\autostoptext {\clf_autostoptext}
\unexpanded\def\finishjob{\stoptext} % nicer in luatex call commandline
@@ -125,39 +147,39 @@
% protect structure
-\unexpanded\def\processfilemany#name{\ctxcommand{processfilemany("#name")}}
-\unexpanded\def\processfileonce#name{\ctxcommand{processfileonce("#name")}}
-\unexpanded\def\processfilenone#name{\ctxcommand{processfilenone("#name")}}
+\unexpanded\def\processfilemany#name{\clf_processfilemany{#name}}
+\unexpanded\def\processfileonce#name{\clf_processfileonce{#name}}
+\unexpanded\def\processfilenone#name{\clf_processfilenone{#name}}
-\unexpanded\def\project {\doifnextoptionalelse\useproject \syst_structure_arg_project}
-\unexpanded\def\product {\doifnextoptionalelse\useproduct \syst_structure_arg_product}
-\unexpanded\def\component {\doifnextoptionalelse\usecomponent \syst_structure_arg_component}
-\unexpanded\def\environment{\doifnextoptionalelse\useenvironment\syst_structure_arg_environment}
+\unexpanded\def\project {\doifelsenextoptionalcs\useproject \syst_structure_arg_project}
+\unexpanded\def\product {\doifelsenextoptionalcs\useproduct \syst_structure_arg_product}
+\unexpanded\def\component {\doifelsenextoptionalcs\usecomponent \syst_structure_arg_component}
+\unexpanded\def\environment{\doifelsenextoptionalcs\useenvironment\syst_structure_arg_environment}
-\def\syst_structure_arg_project #name {\ctxcommand{useproject ("#name")}}
-\def\syst_structure_arg_product #name {\ctxcommand{useproduct ("#name")}}
-\def\syst_structure_arg_component #name {\ctxcommand{usecomponent ("#name")}}
-\def\syst_structure_arg_environment#name {\ctxcommand{useenvironment("#name")}}
+\def\syst_structure_arg_project #name {\clf_useproject {#name}}
+\def\syst_structure_arg_product #name {\clf_useproduct {#name}}
+\def\syst_structure_arg_component #name {\clf_usecomponent {#name}}
+\def\syst_structure_arg_environment#name {\clf_useenvironment{#name}}
-\unexpanded\def\startproject {\doifnextoptionalelse\syst_structure_start_opt_project \syst_structure_start_arg_project }
-\unexpanded\def\startproduct {\doifnextoptionalelse\syst_structure_start_opt_product \syst_structure_start_arg_product }
-\unexpanded\def\startcomponent {\doifnextoptionalelse\syst_structure_start_opt_component \syst_structure_start_arg_component }
-\unexpanded\def\startenvironment{\doifnextoptionalelse\syst_structure_start_opt_environment\syst_structure_start_arg_environment}
+\unexpanded\def\startproject {\doifelsenextoptionalcs\syst_structure_start_opt_project \syst_structure_start_arg_project }
+\unexpanded\def\startproduct {\doifelsenextoptionalcs\syst_structure_start_opt_product \syst_structure_start_arg_product }
+\unexpanded\def\startcomponent {\doifelsenextoptionalcs\syst_structure_start_opt_component \syst_structure_start_arg_component }
+\unexpanded\def\startenvironment{\doifelsenextoptionalcs\syst_structure_start_opt_environment\syst_structure_start_arg_environment}
-\def\syst_structure_start_arg_project #name {\ctxcommand{startproject ("#name")}}
-\def\syst_structure_start_arg_product #name {\ctxcommand{startproduct ("#name")}}
-\def\syst_structure_start_arg_component #name {\ctxcommand{startcomponent ("#name")}}
-\def\syst_structure_start_arg_environment#name {\ctxcommand{startenvironment("#name")}}
+\def\syst_structure_start_arg_project #name {\clf_startproject {#name}}
+\def\syst_structure_start_arg_product #name {\clf_startproduct {#name}}
+\def\syst_structure_start_arg_component #name {\clf_startcomponent {#name}}
+\def\syst_structure_start_arg_environment#name {\clf_startenvironment{#name}}
-\unexpanded\def\useproject [#name]{\ctxcommand{useproject ("#name")}}
-\unexpanded\def\useproduct [#name]{\ctxcommand{useproduct ("#name")}}
-\unexpanded\def\usecomponent [#name]{\ctxcommand{usecomponent ("#name")}}
-\unexpanded\def\useenvironment[#name]{\ctxcommand{useenvironment("#name")}}
+\unexpanded\def\useproject [#name]{\clf_useproject {#name}}
+\unexpanded\def\useproduct [#name]{\clf_useproduct {#name}}
+\unexpanded\def\usecomponent [#name]{\clf_usecomponent {#name}}
+\unexpanded\def\useenvironment[#name]{\clf_useenvironment{#name}}
-\unexpanded\def\syst_structure_start_opt_project [#name]{\ctxcommand{startproject ("#name")}}
-\unexpanded\def\syst_structure_start_opt_product [#name]{\ctxcommand{startproduct ("#name")}}
-\unexpanded\def\syst_structure_start_opt_component [#name]{\ctxcommand{startcomponent ("#name")}}
-\unexpanded\def\syst_structure_start_opt_environment[#name]{\ctxcommand{startenvironment("#name")}}
+\unexpanded\def\syst_structure_start_opt_project [#name]{\clf_startproject {#name}}
+\unexpanded\def\syst_structure_start_opt_product [#name]{\clf_startproduct {#name}}
+\unexpanded\def\syst_structure_start_opt_component [#name]{\clf_startcomponent {#name}}
+\unexpanded\def\syst_structure_start_opt_environment[#name]{\clf_startenvironment{#name}}
\newconditional\endofinput % hack, needed because \endinput happens one level down in the input when we write from lua
@@ -170,34 +192,35 @@
\endinput
\fi}
-\unexpanded\def\stopproject {\ctxcommand{stopproject ()}\honorendofinput}
-\unexpanded\def\stopproduct {\ctxcommand{stopproduct ()}\honorendofinput}
-\unexpanded\def\stopcomponent {\ctxcommand{stopcomponent ()}\honorendofinput}
-\unexpanded\def\stopenvironment {\ctxcommand{stopenvironment()}\honorendofinput}
+\unexpanded\def\stopproject {\clf_stopproject \honorendofinput}
+\unexpanded\def\stopproduct {\clf_stopproduct \honorendofinput}
+\unexpanded\def\stopcomponent {\clf_stopcomponent \honorendofinput}
+\unexpanded\def\stopenvironment {\clf_stopenvironment\honorendofinput}
-\def\currentproject {\ctxcommand{currentproject ()}}
-\def\currentproduct {\ctxcommand{currentproduct ()}}
-\def\currentcomponent {\ctxcommand{currentcomponent ()}}
-\def\currentenvironment {\ctxcommand{currentenvironment()}}
-\def\processedfile {\ctxcommand{processedfile()}}
-\def\processedfiles {\ctxcommand{processedfiles()}}
+ \let\currentproject \clf_currentproject
+ \let\currentproduct \clf_currentproduct
+ \let\currentcomponent \clf_currentcomponent
+ \let\currentenvironment \clf_currentenvironment
+ \let\processedfile \clf_processedfile
+ \let\processedfiles \clf_processedfiles
-\unexpanded\def\dostarttextfile #name{\ctxcommand{dostarttextfile(name)}}
-\unexpanded\def\dostoptextfile {\ctxcommand{dostoptextfile()}}
+\unexpanded\def\dostarttextfile #name{\clf_dostarttextfile{name}}
+\unexpanded\def\dostoptextfile {\clf_dostoptextfile}
-\unexpanded\def\loadtexfile [#name]{\ctxcommand{usetexfile("#name")}}
-\unexpanded\def\loadluafile [#name]{\ctxcommand{useluafile("#name")}}
-\unexpanded\def\loadcldfile [#name]{\ctxcommand{usecldfile("#name")}}
-\unexpanded\def\loadanyfile [#name]{\ctxcommand{useanyfile("#name")}}
+\unexpanded\def\loadtexfile [#name]{\clf_usetexfile{#name}}
+\unexpanded\def\loadluafile [#name]{\clf_useluafile{#name}}
+\unexpanded\def\loadcldfile [#name]{\clf_usecldfile{#name}}
+\unexpanded\def\loadanyfile [#name]{\clf_useanyfile{#name}}
-\unexpanded\def\loadtexfileonce [#name]{\ctxcommand{usetexfile("#name",true)}}
-\unexpanded\def\loadluafileonce [#name]{\ctxcommand{useluafile("#name",true)}}
-\unexpanded\def\loadcldfileonce [#name]{\ctxcommand{usecldfile("#name",true)}}
-\unexpanded\def\loadanyfileonce [#name]{\ctxcommand{useanyfile("#name",true)}}
+\unexpanded\def\loadtexfileonce [#name]{\clf_usetexfileonce{#name}}
+\unexpanded\def\loadluafileonce [#name]{\clf_useluafileonce{#name}}
+\unexpanded\def\loadcldfileonce [#name]{\clf_usecldfileonce{#name}}
+\unexpanded\def\loadanyfileonce [#name]{\clf_useanyfileonce{#name}}
%D Handy for modules that have a test/demo appended (true added).
-\unexpanded\def\continueifinputfile#name{\ctxcommand{doifelsecontinuewithfile("#name",true)}\relax\endinput} % we cannot do \endinput via lua
+\unexpanded\def\continueifinputfile#name%
+ {\clf_doifelsecontinuewithfile{#name}\relax\endinput} % we cannot do \endinput via lua
% \startproject test
% 1: \startmode[*project] project \stopmode \endgraf
@@ -216,7 +239,7 @@
% {\letvalue{\e!stop\v!localenvironment}\relax}
% {\grabuntil{\e!stop\v!localenvironment}\gobbleoneargument}}
%
-% \setvalue{\v!localenvironment}{\doifnextoptionalelse\uselocalenvironment\redolocalenvironment}
+% \setvalue{\v!localenvironment}{\doifnextoptionalcselse\uselocalenvironment\redolocalenvironment}
%
% \def\redolocalenvironment#1 {\uselocalenvironment[#1]}
% \def\uselocalenvironment[#1]{\doexecutefileonce{#1}}
@@ -233,13 +256,13 @@
\newsystemmode\v!environment
\unexpanded\def\startprojectindeed
- {\starttext
+ {%starttext
\pushsystemmode\v!project
\setsystemmode\v!project}
\unexpanded\def\stopprojectindeed
{\popsystemmode\v!project
- \stoptext
+ %stoptext
\signalendofinput\v!project}
\unexpanded\def\startproductindeed
@@ -274,7 +297,7 @@
%D Relatively new (might move as it depends on setups):
-\newtoks\everysetupdocument
+%newtoks\everysetupdocument
\unexpanded\def\startdocument % todo: dostarttagged\t!document
{\dosingleargument\syst_structure_document_start}
diff --git a/tex/context/base/file-lib.lua b/tex/context/base/file-lib.lua
index 3311321c5..361608ea3 100644
--- a/tex/context/base/file-lib.lua
+++ b/tex/context/base/file-lib.lua
@@ -9,10 +9,18 @@ if not modules then modules = { } end modules ['file-lib'] = {
-- todo: check all usage of truefilename at the tex end and remove
-- files there (and replace definitions by full names)
-local format = string.format
+local format, gsub = string.format, string.gsub
-local trace_files = false trackers.register("resolvers.readfile", function(v) trace_files = v end)
-local report_files = logs.reporter("files","readfile")
+local trace_libraries = false trackers.register("resolvers.libraries", function(v) trace_libraries = v end)
+----- trace_files = false trackers.register("resolvers.readfile", function(v) trace_files = v end)
+
+local report_library = logs.reporter("files","library")
+----- report_files = logs.reporter("files","readfile")
+
+local suffixonly = file.suffix
+local removesuffix = file.removesuffix
+
+local getreadfilename = resolvers.getreadfilename
local loaded = { }
local defaultpatterns = { "%s" }
@@ -25,7 +33,7 @@ local function defaultfailure(name)
report_files("asked name %a, not found",name)
end
-function commands.uselibrary(specification) -- todo; reporter
+function resolvers.uselibrary(specification) -- todo: reporter
local name = specification.name
if name and name ~= "" then
local patterns = specification.patterns or defaultpatterns
@@ -34,32 +42,49 @@ function commands.uselibrary(specification) -- todo; reporter
local onlyonce = specification.onlyonce
local files = utilities.parsers.settings_to_array(name)
local truename = environment.truefilename
- local done = false
+ local function found(filename)
+ local somename = truename and truename(filename) or filename
+ local foundname = getreadfilename("any",".",somename) -- maybe some day also an option not to backtrack .. and ../.. (or block global)
+ return foundname ~= "" and foundname
+ end
for i=1,#files do
local filename = files[i]
- if not loaded[filename] then
+ if loaded[filename] then
+ -- next one
+ else
if onlyonce then
loaded[filename] = true -- todo: base this on return value
end
- for i=1,#patterns do
- local somename = format(patterns[i],filename)
- if truename then
- somename = truename(somename)
+ local foundname = nil
+ local barename = removesuffix(filename)
+ -- direct search (we have an explicit suffix)
+ if barename ~= filename then
+ foundname = found(filename)
+ if trace_libraries then
+ report_library("checking %a: %s",filename,foundname or "not found")
end
- local foundname = resolvers.getreadfilename("any",".",somename) or ""
- if foundname ~= "" then
- action(name,foundname)
- done = true
- break
+ end
+ if not foundname then
+ -- pattern based search
+ for i=1,#patterns do
+ local wanted = format(patterns[i],barename)
+ foundname = found(wanted)
+ if trace_libraries then
+ report_library("checking %a as %a: %s",filename,wanted,foundname or "not found")
+ end
+ if foundname then
+ break
+ end
end
end
- if done then
- break
+ if foundname then
+ action(name,foundname)
+ elseif failure then
+ failure(name)
end
end
end
- if failure and not done then
- failure(name)
- end
end
end
+
+commands.uselibrary = resolvers.uselibrary -- for the moment
diff --git a/tex/context/base/file-mod.lua b/tex/context/base/file-mod.lua
index 822f37c86..7f3763c5d 100644
--- a/tex/context/base/file-mod.lua
+++ b/tex/context/base/file-mod.lua
@@ -18,24 +18,41 @@ if not modules then modules = { } end modules ['file-mod'] = {
at the <l n='tex'/> side.</p>
--ldx]]--
-local format, concat, tonumber = string.format, table.concat, tonumber
+local format, find, concat, tonumber = string.format, string.find, table.concat, tonumber
-local trace_modules = false trackers.register("modules.loading", function(v) trace_modules = v end)
+local trace_modules = false trackers .register("modules.loading", function(v) trace_modules = v end)
+local permit_unprefixed = false directives.register("modules.permitunprefixed", function(v) permit_unprefixed = v end)
-local report_modules = logs.reporter("resolvers","modules")
+local report_modules = logs.reporter("resolvers","modules")
-commands = commands or { }
-local commands = commands
+local commands = commands
+local context = context
+local implement = interfaces.implement
-local context = context
-
-local findbyscheme = resolvers.finders.byscheme -- use different one
-local iterator = utilities.parsers.iterator
+local findbyscheme = resolvers.finders.byscheme -- use different one
+local iterator = utilities.parsers.iterator
-- modules can have a specific suffix or can specify one
-local prefixes = { "m", "p", "s", "x", "v", "t" }
-local suffixes = { "mkvi", "mkiv", "tex", "cld", "lua" } -- order might change and how about cld
+local prefixes = {
+ "m", -- module, extends functionality
+ "p", -- private code
+ "s", -- styles
+ "x", -- xml specific modules
+ -- "v", -- an old internal one for examples
+ "t", -- third party extensions
+}
+
+-- the order might change and how about cld
+
+local suffixes = {
+ "mkvi", -- proprocessed mkiv files
+ "mkiv", -- mkiv files
+ "tex", -- normally source code files
+ "cld", -- context lua documents (often stand alone)
+ "lua", -- lua files
+}
+
local modstatus = { }
local function usemodule(name,hasscheme)
@@ -79,11 +96,11 @@ local function usemodule(name,hasscheme)
end
end
-function commands.usemodules(prefix,askedname,truename)
+function environment.usemodules(prefix,askedname,truename)
local truename = truename or environment.truefilename(askedname)
local hasprefix = prefix and prefix ~= ""
local hashname = ((hasprefix and prefix) or "*") .. "-" .. truename
- local status = modstatus[hashname]
+ local status = modstatus[hashname] or false -- yet unset
if status == 0 then
-- not found
elseif status == 1 then
@@ -117,7 +134,12 @@ function commands.usemodules(prefix,askedname,truename)
end
if status then
-- ok, don't change
- elseif usemodule(truename) then
+ elseif find(truename,"%-") and usemodule(truename) then
+ -- assume a user namespace
+ report_modules("using user prefixed file %a",truename)
+ status = 1
+ elseif permit_unprefixed and usemodule(truename) then
+ report_modules("using unprefixed file %a",truename)
status = 1
else
status = 0
@@ -161,23 +183,60 @@ end)
-- moved from syst-lua.lua:
-local splitter = lpeg.tsplitter(lpeg.S(". "),tonumber)
+local lpegmatch = lpeg.match
+local splitter = lpeg.tsplitter(lpeg.S(". "),tonumber)
-function commands.doifolderversionelse(one,two) -- one >= two
- if not two then
+function environment.comparedversion(one,two) -- one >= two
+ if not two or two == "" then
one, two = environment.version, one
elseif one == "" then
one = environment.version
end
- one = lpeg.match(splitter,one)
- two = lpeg.match(splitter,two)
+ one = lpegmatch(splitter,one)
+ two = lpegmatch(splitter,two)
one = (one[1] or 0) * 10000 + (one[2] or 0) * 100 + (one[3] or 0)
two = (two[1] or 0) * 10000 + (two[2] or 0) * 100 + (two[3] or 0)
- commands.doifelse(one>=two)
+ if one < two then
+ return -1
+ elseif one > two then
+ return 1
+ else
+ return 0
+ end
end
-function commands.useluamodule(list)
+environment.comparedversion = comparedversion
+
+
+function environment.useluamodule(list)
for filename in iterator(list) do
environment.loadluafile(filename)
end
end
+
+local strings = interfaces.strings
+
+implement {
+ name = "usemodules",
+ actions = environment.usemodules,
+ arguments = strings[2]
+}
+
+implement {
+ name = "doifelseolderversion",
+ actions = function(one,two) commands.doifelse(comparedversion(one,two) >= 0) end,
+ arguments = strings[2]
+}
+
+implement {
+ name = "useluamodule",
+ actions = environment.useluamodule,
+ arguments = "string"
+}
+
+implement {
+ name = "loadluamodule",
+ actions = function(name) dofile(resolvers.findctxfile(name)) end, -- hack
+ arguments = "string"
+}
+
diff --git a/tex/context/base/file-mod.mkvi b/tex/context/base/file-mod.mkvi
index 00966a442..f043a68e8 100644
--- a/tex/context/base/file-mod.mkvi
+++ b/tex/context/base/file-mod.mkvi
@@ -37,39 +37,6 @@
\let\usemodule \usemodules
\let\usetexmodule\usemodules
-% \def\strc_modules_use[#category][#name][#parameters]%
-% {\pushmacro\currentmodule
-% \pushmacro\currentmoduleparameters
-% \let\currentmoduleparameters\empty
-% \ifthirdargument
-% \singleexpandafter\strc_modules_use_a
-% \else\ifsecondargument
-% \doubleexpandafter\strc_modules_use_b
-% \else
-% \doubleexpandafter\strc_modules_use_c
-% \fi\fi
-% % here we load
-% \popmacro\currentmoduleparameters
-% \popmacro\currentmodule}
-%
-% \def\strc_modules_use_a[#category][#name][#parameters]%
-% {\doifsomething{#name}
-% {\def\currentmoduleparameters{#parameters}%
-% \processcommalist[#name]{\strc_modules_use_indeed{#category}}}}
-%
-% \def\strc_modules_use_b[#category][#name][#parameters]%
-% {\doifsomething{#name}
-% {\doifassignmentelse{#name}
-% {\def\currentmoduleparameters{#name}%
-% \processcommalist[#category]{\strc_modules_use_indeed\empty}}
-% {\processcommalist[#name]{\strc_modules_use_indeed{#category}}}}}
-%
-% \def\strc_modules_use_c[#category][#name][#parameters]%
-% {\processcommalist[#category]{\strc_modules_use_indeed\empty}}
-%
-% \def\strc_modules_use_indeed#category#name%
-% {\ctxcommand{usemodules("#category","#name","\truefilename{#name}")}}
-
\def\strc_modules_use[#category][#name][#parameters]% category=t|m|x|p|...
{\pushmacro\currentmodule
\pushmacro\currentmodulecategory
@@ -79,7 +46,7 @@
\edef\currentmodule {#name}%
\def \currentmoduleparameters{#parameters}%
\else\ifsecondargument
- \doifassignmentelse{#name}
+ \doifelseassignment{#name}
{\let\currentmodulecategory \empty
\edef\currentmodule {#category}%
\edef\currentmoduleparameters{#name}}
@@ -96,8 +63,8 @@
\popmacro\currentmodulecategory
\popmacro\currentmodule}
-\def\strc_modules_use_indeed#category#name% truefilename moved to lua end
- {\ctxcommand{usemodules("#category","#name")}} % ,"\truefilename{#name}")}}
+\def\strc_modules_use_indeed#category#name%
+ {\clf_usemodules{#category}{#name}}
\installcorenamespace{module}
@@ -107,7 +74,7 @@
\newtoks\everysetupmodule
\unexpanded\def\startmodule
- {\doifnextoptionalelse\syst_modules_start_yes\syst_modules_start_nop}
+ {\doifelsenextoptionalcs\syst_modules_start_yes\syst_modules_start_nop}
\def\syst_modules_start_yes[#name]%
{\pushmacro\currentmodule
@@ -157,8 +124,8 @@
\def\currentmoduleparameter{\moduleparameter\currentmodule} % no need for inlining
-\unexpanded\def\useluamodule [#name]{\ctxlua{dofile(resolvers.findctxfile("#name"))}}
-\unexpanded\def\luaenvironment #name {\ctxlua{dofile(resolvers.findctxfile("#name"))}}
+\unexpanded\def\useluamodule [#name]{\clf_loadluamodule{#1}} % why not use useluamodule
+\unexpanded\def\luaenvironment #name {\clf_loadluamodule{#1}}
% \usemodule[newmml]
% \usemodule[newmml][a=b]
@@ -238,10 +205,13 @@
%D
%D The version pattern is \type {yyyy.mm.dd} (with mm and dd being optional).
-\unexpanded\def\doifolderversionelse#parent#child{\ctxcommand{doifolderversionelse("#parent","#child")}}
-\unexpanded\def\doifoldercontextelse #child{\ctxcommand{doifolderversionelse("#child")}}
+\unexpanded\def\doifelseolderversion#parent#child{\clf_doifelseolderversion{#parent}{#child}}
+\unexpanded\def\doifelseoldercontext #child{\clf_doifelseolderversion{#child}{}}
+
+\let\doifolderversionelse\doifelseolderversion
+\let\doifoldercontextelse\doifelseoldercontext
-%D Relatively new:
+%D Relatively new (no need for a speedup here):
\unexpanded\def\syst_modules_direct_lua#1#2%
{\edef\m_module_command_command {#1}%
@@ -284,8 +254,8 @@
\ctxlua{#2(\!!bs#3\!!es,\!!bs#4\!!es)}%
\directsetup{module:#1:stop}}
-\def\installmodulecommandluaone #1#2{\unexpanded\def#1{\normalexpanded{\syst_modules_one_lua {\strippedcsname#1}{#2}}}}
-\def\installmodulecommandluatwo #1#2{\unexpanded\def#1{\normalexpanded{\syst_modules_two_lua {\strippedcsname#1}{#2}}}}
+\def\installmodulecommandluaone#1#2{\unexpanded\def#1{\normalexpanded{\syst_modules_one_lua{\strippedcsname#1}{#2}}}}
+\def\installmodulecommandluatwo#1#2{\unexpanded\def#1{\normalexpanded{\syst_modules_two_lua{\strippedcsname#1}{#2}}}}
% obsolete
%
@@ -299,6 +269,6 @@
% new:
-\unexpanded\def\useluamodule[#1]{\ctxcommand{useluamodule("#1")}}
+\unexpanded\def\useluamodule[#1]{\clf_useluamodule{#1}}
\protect \endinput
diff --git a/tex/context/base/file-res.lua b/tex/context/base/file-res.lua
index 8e65ba4c7..08a3e22af 100644
--- a/tex/context/base/file-res.lua
+++ b/tex/context/base/file-res.lua
@@ -6,23 +6,30 @@ if not modules then modules = { } end modules ['file-res'] = {
license = "see context related readme files"
}
-local format = string.format
+local format, find = string.format, string.find
local isfile = lfs.isfile
local is_qualified_path = file.is_qualified_path
-local hasscheme = url.hasscheme
+local hasscheme, urlescape = url.hasscheme, url.escape
-local trace_files = false trackers.register("resolvers.readfile", function(v) trace_files = v end)
-local report_files = logs.reporter("files","readfile")
+local trace_files = false trackers.register("resolvers.readfile", function(v) trace_files = v end)
+local trace_details = false trackers.register("resolvers.readfile.details", function(v) trace_details = v end)
+local report_files = logs.reporter("files","readfile")
resolvers.maxreadlevel = 2
-directives.register("resolvers.maxreadlevel", function(v) resolvers.maxreadlevel = tonumber(v) or resolvers.maxreadlevel end)
+directives.register("resolvers.maxreadlevel", function(v)
+ -- resolvers.maxreadlevel = (v == false and 0) or (v == true and 2) or tonumber(v) or 2
+ resolvers.maxreadlevel = v == false and 0 or tonumber(v) or 2
+end)
local finders, loaders, openers = resolvers.finders, resolvers.loaders, resolvers.openers
local found = { } -- can best be done in the resolver itself
local function readfilename(specification,backtrack,treetoo)
+ if trace_details then
+ report_files(table.serialize(specification,"specification"))
+ end
local name = specification.filename
local fnd = name and found[name]
if not fnd then
@@ -127,29 +134,39 @@ openers.fix = openers.file loaders.fix = loaders.file
openers.set = openers.file loaders.set = loaders.file
openers.any = openers.file loaders.any = loaders.file
-function getreadfilename(scheme,path,name) -- better do a split and then pass table
+local function getreadfilename(scheme,path,name) -- better do a split and then pass table
local fullname
if hasscheme(name) or is_qualified_path(name) then
fullname = name
else
+ if not find(name,"%",1,true) then
+ name = urlescape(name) -- if no % in names
+ end
fullname = ((path == "") and format("%s:///%s",scheme,name)) or format("%s:///%s/%s",scheme,path,name)
end
---~ print(">>>",fullname)
return resolvers.findtexfile(fullname) or "" -- can be more direct
end
resolvers.getreadfilename = getreadfilename
-function commands.getreadfilename(scheme,path,name)
- context(getreadfilename(scheme,path,name))
-end
-
-- a name belonging to the run but also honoring qualified
-function commands.locfilename(name)
- context(getreadfilename("loc",".",name))
-end
+local implement = interfaces.implement
-function commands.doiflocfileelse(name)
- commands.doifelse(isfile(getreadfilename("loc",".",name)))
-end
+implement {
+ name = "getreadfilename",
+ actions = { getreadfilename, context },
+ arguments = { "string", "string", "string" }
+}
+
+implement {
+ name = "locfilename",
+ actions = { getreadfilename, context },
+ arguments = { "'loc'","'.'", "string" },
+}
+
+implement {
+ name = "doifelselocfile",
+ actions = { getreadfilename, isfile, commands.doifelse },
+ arguments = { "'loc'","'.'", "string" },
+}
diff --git a/tex/context/base/file-res.mkvi b/tex/context/base/file-res.mkvi
index 5162e63be..17f4cf44c 100644
--- a/tex/context/base/file-res.mkvi
+++ b/tex/context/base/file-res.mkvi
@@ -70,7 +70,7 @@
\let\readfilename\empty
\def\syst_files_read_file#protocol#path#name% #true #false
- {\edef\readfilename{\ctxcommand{getreadfilename("#protocol","#path","#name")}}%
+ {\edef\readfilename{\clf_getreadfilename{#protocol}{#path}{#name}}%
\ifx\readfilename\empty
\expandafter\secondoftwoarguments
\else
@@ -145,7 +145,9 @@
%D \doiflocfileelse {filename} {before loading} {not found}
%D \stoptyping
-\unexpanded\def\doiflocfileelse#name{\ctxcommand{doiflocfileelse([[#name]])}}
- \def\locfilename #name{\ctxcommand{locfilename([[#name]])}}
+\unexpanded\def\doifelselocfile#name{\clf_doifelselocfile{#name}}
+ \def\locfilename #name{\clf_locfilename {#name}}
+
+\let\doiflocfileelse\doifelselocfile
\protect \endinput
diff --git a/tex/context/base/file-syn.lua b/tex/context/base/file-syn.lua
index c1f9a5f48..b6ad27c83 100644
--- a/tex/context/base/file-syn.lua
+++ b/tex/context/base/file-syn.lua
@@ -6,13 +6,16 @@ if not modules then modules = { } end modules ['file-syn'] = {
license = "see context related readme files"
}
-local report_files = logs.reporter("files")
environment.filesynonyms = environment.filesynonyms or { }
local filesynonyms = environment.filesynonyms
-local settings_to_array = utilities.parsers.settings_to_array
-local findfile = resolvers.findfile
+local settings_to_array = utilities.parsers.settings_to_array
+local findfile = resolvers.findfile
+
+local implement = interfaces.implement
+
+local report_files = logs.reporter("files")
storage.register("environment/filesynonyms", filesynonyms, "environment.filesynonyms")
@@ -27,11 +30,7 @@ end
environment.truefilename = truefilename
-function commands.truefilename(name)
- context(truefilename(name))
-end
-
-function commands.definefilesynonym(name,realname)
+function environment.definefilesynonym(name,realname)
local names = settings_to_array(name)
for i=1,#names do
local name = names[i]
@@ -43,7 +42,7 @@ function commands.definefilesynonym(name,realname)
end
end
-function commands.definefilefallback(name,alternatives)
+function environment.definefilefallback(name,alternatives)
local names = settings_to_array(alternatives)
for i=1,#names do
local realname = findfile(names[i])
@@ -53,3 +52,21 @@ function commands.definefilefallback(name,alternatives)
end
end
end
+
+implement {
+ name = "truefilename",
+ actions = { truefilename, context },
+ arguments = "string"
+}
+
+implement {
+ name = "definefilesynonym",
+ actions = environment.definefilesynonym,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "definefilefallback",
+ actions = environment,definefilefallback,
+ arguments = { "string", "string" }
+}
diff --git a/tex/context/base/file-syn.mkvi b/tex/context/base/file-syn.mkvi
index b61bd8ae4..52ecd0d71 100644
--- a/tex/context/base/file-syn.mkvi
+++ b/tex/context/base/file-syn.mkvi
@@ -46,8 +46,8 @@
\unexpanded\def\definefilesynonym {\dodoubleempty \syst_files_define_synonym }
\unexpanded\def\definefilefallback{\dodoubleargument\syst_files_define_fallback} % still used?
-\def\syst_files_define_synonym [#name][#realname]{\ctxcommand{definefilesynonym ("#name","#realname")}}
-\def\syst_files_define_fallback[#name][#alternatives]{\ctxcommand{definefilefallback("#name","#alternatives")}}
+\def\syst_files_define_synonym [#name][#realname]{\clf_definefilesynonym {#name}{#realname}}
+\def\syst_files_define_fallback[#name][#alternatives]{\clf_definefilefallback{#name}{#alternatives}}
%D \macros
%D {truefilename}
@@ -61,6 +61,6 @@
%D
%D The implementation shows that nesting is supported.
-\def\truefilename#1{\ctxcommand{truefilename("#1")}}
+\def\truefilename#1{\clf_truefilename{#1}}
\protect \endinput
diff --git a/tex/context/base/font-afm.lua b/tex/context/base/font-afm.lua
index adb4281b2..a96c6686e 100644
--- a/tex/context/base/font-afm.lua
+++ b/tex/context/base/font-afm.lua
@@ -15,6 +15,14 @@ n='otf'/>.</p>
<p>The following code still has traces of intermediate font support
where we handles font encodings. Eventually font encoding goes
away.</p>
+
+<p>The embedding of a font involves creating temporary files and
+depending on your system setup that can fail. It took more than a
+day to figure out why sometimes embedding failed in mingw luatex
+where running on a real path like c:\... failed while running on
+say e:\... being a link worked well. The native windows binaries
+don't have this issue.</p>
+
--ldx]]--
local fonts, logs, trackers, containers, resolvers = fonts, logs, trackers, containers, resolvers
@@ -32,19 +40,26 @@ local trace_defining = false trackers.register("fonts.defining", function(v
local report_afm = logs.reporter("fonts","afm loading")
+local setmetatableindex = table.setmetatableindex
+
local findbinfile = resolvers.findbinfile
local definers = fonts.definers
local readers = fonts.readers
local constructors = fonts.constructors
+local fontloader = fontloader
+local font_to_table = fontloader.to_table
+local open_font = fontloader.open
+local close_font = fontloader.close
+
local afm = constructors.newhandler("afm")
local pfb = constructors.newhandler("pfb")
local afmfeatures = constructors.newfeatures("afm")
local registerafmfeature = afmfeatures.register
-afm.version = 1.410 -- incrementing this number one up will force a re-cache
+afm.version = 1.500 -- incrementing this number one up will force a re-cache
afm.cache = containers.define("fonts", "afm", afm.version, true)
afm.autoprefixed = true -- this will become false some day (catches texnansi-blabla.*)
@@ -54,6 +69,8 @@ afm.addligatures = true -- best leave this set to true
afm.addtexligatures = true -- best leave this set to true
afm.addkerns = true -- best leave this set to true
+local overloads = fonts.mappings.overloads
+
local applyruntimefixes = fonts.treatments and fonts.treatments.applyfixes
local function setmode(tfmdata,value)
@@ -210,10 +227,10 @@ end
local function get_indexes(data,pfbname)
data.resources.filename = resolvers.unresolve(pfbname) -- no shortcut
- local pfbblob = fontloader.open(pfbname)
+ local pfbblob = open_font(pfbname)
if pfbblob then
local characters = data.characters
- local pfbdata = fontloader.to_table(pfbblob)
+ local pfbdata = font_to_table(pfbblob)
if pfbdata then
local glyphs = pfbdata.glyphs
if glyphs then
@@ -221,6 +238,7 @@ local function get_indexes(data,pfbname)
report_afm("getting index data from %a",pfbname)
end
for index, glyph in next, glyphs do
+ -- for index, glyph in table.sortedhash(glyphs) do
local name = glyph.name
if name then
local char = characters[name]
@@ -238,7 +256,7 @@ local function get_indexes(data,pfbname)
elseif trace_loading then
report_afm("no data in pfb file %a",pfbname)
end
- fontloader.close(pfbblob)
+ close_font(pfbblob)
elseif trace_loading then
report_afm("invalid pfb file %a",pfbname)
end
@@ -306,7 +324,7 @@ by adding ligatures and kern information to the afm derived data. That
way we can set them faster when defining a font.</p>
--ldx]]--
-local addkerns, addligatures, addtexligatures, unify, normalize -- we will implement these later
+local addkerns, addligatures, addtexligatures, unify, normalize, fixnames -- we will implement these later
function afm.load(filename)
-- hm, for some reasons not resolved yet
@@ -336,6 +354,7 @@ function afm.load(filename)
get_indexes(data,pfbname)
elseif trace_loading then
report_afm("no pfb file for %a",filename)
+ -- data.resources.filename = "unset" -- better than loading the afm file
end
report_afm("unifying %a",filename)
unify(data,filename)
@@ -352,6 +371,7 @@ function afm.load(filename)
addkerns(data)
end
normalize(data)
+ fixnames(data)
report_afm("add tounicode data")
fonts.mappings.addtounicode(data,filename)
data.size = size
@@ -359,6 +379,7 @@ function afm.load(filename)
data.pfbsize = pfbsize
data.pfbtime = pfbtime
report_afm("saving %a in cache",name)
+ data.resources.unicodes = nil -- consistent with otf but here we save not much
data = containers.write(afm.cache, name, data)
data = containers.read(afm.cache,name)
end
@@ -410,7 +431,7 @@ unify = function(data, filename)
if unicode then
krn[unicode] = kern
else
- print(unicode,name)
+ -- print(unicode,name)
end
end
description.kerns = krn
@@ -422,13 +443,29 @@ unify = function(data, filename)
resources.filename = resolvers.unresolve(filename) -- no shortcut
resources.unicodes = unicodes -- name to unicode
resources.marks = { } -- todo
- resources.names = names -- name to index
+ -- resources.names = names -- name to index
resources.private = private
end
normalize = function(data)
end
+fixnames = function(data)
+ for k, v in next, data.descriptions do
+ local n = v.name
+ local r = overloads[n]
+ if r then
+ local name = r.name
+ if trace_indexing then
+ report_afm("renaming characters %a to %a",n,name)
+ end
+ v.name = name
+ v.unicode = r.unicode
+ end
+ end
+end
+
+
--[[ldx--
<p>These helpers extend the basic table with extra ligatures, texligatures
and extra kerns. This saves quite some lookups later.</p>
@@ -439,7 +476,7 @@ local addthem = function(rawdata,ligatures)
local descriptions = rawdata.descriptions
local resources = rawdata.resources
local unicodes = resources.unicodes
- local names = resources.names
+ -- local names = resources.names
for ligname, ligdata in next, ligatures do
local one = descriptions[unicodes[ligname]]
if one then
@@ -598,8 +635,8 @@ local function copytotfm(data)
local filename = constructors.checkedfilename(resources)
local fontname = metadata.fontname or metadata.fullname
local fullname = metadata.fullname or metadata.fontname
- local endash = unicodes['space']
- local emdash = unicodes['emdash']
+ local endash = 0x0020 -- space
+ local emdash = 0x2014
local spacer = "space"
local spaceunits = 500
--
@@ -659,7 +696,7 @@ local function copytotfm(data)
parameters.x_height = charxheight
else
-- same as otf
- local x = unicodes['x']
+ local x = 0x0078 -- x
if x then
local x = descriptions[x]
if x then
@@ -719,7 +756,34 @@ function afm.setfeatures(tfmdata,features)
end
end
-local function checkfeatures(specification)
+local function addtables(data)
+ local resources = data.resources
+ local lookuptags = resources.lookuptags
+ local unicodes = resources.unicodes
+ if not lookuptags then
+ lookuptags = { }
+ resources.lookuptags = lookuptags
+ end
+ setmetatableindex(lookuptags,function(t,k)
+ local v = type(k) == "number" and ("lookup " .. k) or k
+ t[k] = v
+ return v
+ end)
+ if not unicodes then
+ unicodes = { }
+ resources.unicodes = unicodes
+ setmetatableindex(unicodes,function(t,k)
+ setmetatableindex(unicodes,nil)
+ for u, d in next, data.descriptions do
+ local n = d.name
+ if n then
+ t[n] = u
+ end
+ end
+ return rawget(t,k)
+ end)
+ end
+ constructors.addcoreunicodes(unicodes) -- do we really need this?
end
local function afmtotfm(specification)
@@ -749,6 +813,7 @@ local function afmtotfm(specification)
if not tfmdata then
local rawdata = afm.load(afmname)
if rawdata and next(rawdata) then
+ addtables(rawdata)
adddimensions(rawdata)
tfmdata = copytotfm(rawdata)
if tfmdata and next(tfmdata) then
@@ -798,6 +863,7 @@ those that make sense for this format.</p>
local function prepareligatures(tfmdata,ligatures,value)
if value then
local descriptions = tfmdata.descriptions
+ local hasligatures = false
for unicode, character in next, tfmdata.characters do
local description = descriptions[unicode]
local dligatures = description.ligatures
@@ -813,17 +879,20 @@ local function prepareligatures(tfmdata,ligatures,value)
type = 0
}
end
+ hasligatures = true
end
end
+ tfmdata.properties.hasligatures = hasligatures
end
end
local function preparekerns(tfmdata,kerns,value)
if value then
- local rawdata = tfmdata.shared.rawdata
- local resources = rawdata.resources
- local unicodes = resources.unicodes
+ local rawdata = tfmdata.shared.rawdata
+ local resources = rawdata.resources
+ local unicodes = resources.unicodes
local descriptions = tfmdata.descriptions
+ local haskerns = false
for u, chr in next, tfmdata.characters do
local d = descriptions[u]
local newkerns = d[kerns]
@@ -839,8 +908,10 @@ local function preparekerns(tfmdata,kerns,value)
kerns[uk] = v
end
end
+ haskerns = true
end
end
+ tfmdata.properties.haskerns = haskerns
end
end
diff --git a/tex/context/base/font-agl.lua b/tex/context/base/font-agl.lua
index 5ee34b028..122d1adc2 100644
--- a/tex/context/base/font-agl.lua
+++ b/tex/context/base/font-agl.lua
@@ -656,12 +656,18 @@ end
-- We load this table only when needed. We could use a loading mechanism
-- return the table but there are no more vectors like this so why bother.
+--
+-- Well, we currently hav ethis table preloaded anyway.
-fonts.encodings = fonts.encodings or { }
-
-fonts.encodings.agl = {
+local agl = {
names = names, -- unicode -> name
unicodes = unicodes, -- name -> unicode
synonyms = synonyms, -- merged into the other two
extras = extras, -- merged into the other two
}
+
+fonts = fonts or { }
+fonts.encodings = fonts.encodings or { }
+fonts.encodings.agl = agl
+
+return agl
diff --git a/tex/context/base/font-chk.lua b/tex/context/base/font-chk.lua
index 6dc1667bb..41205ce5e 100644
--- a/tex/context/base/font-chk.lua
+++ b/tex/context/base/font-chk.lua
@@ -9,12 +9,16 @@ if not modules then modules = { } end modules ['font-chk'] = {
-- possible optimization: delayed initialization of vectors
-- move to the nodes namespace
+local next = next
+
local formatters = string.formatters
local bpfactor = number.dimenfactors.bp
local fastcopy = table.fastcopy
local report_fonts = logs.reporter("fonts","checking")
+local allocate = utilities.storage.allocate
+
local fonts = fonts
fonts.checkers = fonts.checkers or { }
@@ -32,6 +36,8 @@ local getprivatenode = helpers.getprivatenode
local otffeatures = fonts.constructors.newfeatures("otf")
local registerotffeature = otffeatures.register
+local afmfeatures = fonts.constructors.newfeatures("afm")
+local registerafmfeature = afmfeatures.register
local is_character = characters.is_character
local chardata = characters.data
@@ -40,10 +46,21 @@ local tasks = nodes.tasks
local enableaction = tasks.enableaction
local disableaction = tasks.disableaction
+local implement = interfaces.implement
+
local glyph_code = nodes.nodecodes.glyph
-local traverse_id = node.traverse_id
-local remove_node = nodes.remove
-local insert_node_after = node.insert_after
+
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+local setfield = nuts.setfield
+
+local traverse_id = nuts.traverse_id
+local remove_node = nuts.remove
+local insert_node_after = nuts.insert_after
-- maybe in fonts namespace
-- deletion can be option
@@ -75,7 +92,7 @@ end
fonts.loggers.onetimemessage = onetimemessage
-local mapping = { -- this is just an experiment to illustrate some principles elsewhere
+local mapping = allocate { -- this is just an experiment to illustrate some principles elsewhere
lu = "placeholder uppercase red",
ll = "placeholder lowercase red",
lt = "placeholder uppercase red",
@@ -100,9 +117,15 @@ local mapping = { -- this is just an experiment to illustrate some principles el
so = "placeholder lowercase yellow",
}
-table.setmetatableindex(mapping,function(t,k) v = "placeholder unknown gray" t[k] = v return v end)
+table.setmetatableindex(mapping,
+ function(t,k)
+ v = "placeholder unknown gray"
+ t[k] = v
+ return v
+ end
+)
-local fakes = {
+local fakes = allocate {
{
name = "lowercase",
code = ".025 -.175 m .425 -.175 l .425 .525 l .025 .525 l .025 -.175 l .025 0 l .425 0 l .025 -.175 m h S",
@@ -140,7 +163,7 @@ local fakes = {
},
}
-local variants = {
+local variants = allocate {
{ tag = "gray", r = .6, g = .6, b = .6 },
{ tag = "red", r = .6, g = 0, b = 0 },
{ tag = "green", r = 0, g = .6, b = 0 },
@@ -150,7 +173,7 @@ local variants = {
{ tag = "yellow", r = .6, g = .6, b = 0 },
}
-local pdf_blob = "pdf: q %0.6f 0 0 %0.6f 0 0 cm %s %s %s rg %s %s %s RG 10 M 1 j 1 J 0.05 w %s Q"
+local pdf_blob = "pdf: q %0.6F 0 0 %0.6F 0 0 cm %s %s %s rg %s %s %s RG 10 M 1 j 1 J 0.05 w %s Q"
local cache = { } -- saves some tables but not that impressive
@@ -203,11 +226,34 @@ function commands.getplaceholderchar(name)
context(helpers.getprivatenode(fontdata[id],name))
end
+local function placeholder(font,char)
+ local tfmdata = fontdata[font]
+ local properties = tfmdata.properties
+ local privates = properties.privates
+ local category = chardata[char].category
+ local fakechar = mapping[category]
+ local p = privates and privates[fakechar]
+ if not p then
+ addmissingsymbols(tfmdata)
+ p = properties.privates[fakechar]
+ end
+ if properties.lateprivates then
+ -- frozen already
+ return "node", getprivatenode(tfmdata,fakechar)
+ else
+ -- good, we have \definefontfeature[default][default][missing=yes]
+ return "char", p
+ end
+end
+
+checkers.placeholder = placeholder
+
function checkers.missing(head)
local lastfont, characters, found = nil, nil, nil
+ head = tonut(head)
for n in traverse_id(glyph_code,head) do -- faster than while loop so we delay removal
- local font = n.font
- local char = n.char
+ local font = getfont(n)
+ local char = getchar(n)
if font ~= lastfont then
characters = fontcharacters[font]
lastfont = font
@@ -235,42 +281,34 @@ function checkers.missing(head)
end
elseif action == "replace" then
for i=1,#found do
- local n = found[i]
- local font = n.font
- local char = n.char
- local tfmdata = fontdata[font]
- local properties = tfmdata.properties
- local privates = properties.privates
- local category = chardata[char].category
- local fakechar = mapping[category]
- local p = privates and privates[fakechar]
- if not p then
- addmissingsymbols(tfmdata)
- p = properties.privates[fakechar]
- end
- if properties.lateprivates then -- .frozen
- -- bad, we don't have them at the tex end
- local fake = getprivatenode(tfmdata,fakechar)
- insert_node_after(head,n,fake)
- head = remove_node(head,n,true)
+ local node = found[i]
+ local kind, char = placeholder(getfont(node),getchar(node))
+ if kind == "node" then
+ insert_node_after(head,node,tonut(char))
+ head = remove_node(head,node,true)
+ elseif kind == "char" then
+ setfield(node,"char",char)
else
- -- good, we have \definefontfeature[default][default][missing=yes]
- n.char = p
+ -- error
end
end
else
-- maye write a report to the log
end
- return head, false
+ return tonode(head), false
end
-local relevant = { "missing (will be deleted)", "missing (will be flagged)", "missing" }
+local relevant = {
+ "missing (will be deleted)",
+ "missing (will be flagged)",
+ "missing"
+}
-function checkers.getmissing(id)
+local function getmissing(id)
if id then
- local list = checkers.getmissing(font.current())
+ local list = getmissing(font.current())
if list then
- local _, list = next(checkers.getmissing(font.current()))
+ local _, list = next(getmissing(font.current()))
return list
else
return { }
@@ -300,6 +338,8 @@ function checkers.getmissing(id)
end
end
+checkers.getmissing = getmissing
+
local tracked = false
trackers.register("fonts.missing", function(v)
@@ -315,24 +355,6 @@ trackers.register("fonts.missing", function(v)
action = v
end)
-function commands.checkcharactersinfont()
- enableaction("processors","fonts.checkers.missing")
- tracked = true
-end
-
-function commands.removemissingcharacters()
- enableaction("processors","fonts.checkers.missing")
- action = "remove"
- tracked = true
-end
-
-function commands.replacemissingcharacters()
- enableaction("processors","fonts.checkers.missing")
- action = "replace"
- otffeatures.defaults.missing = true
- tracked = true
-end
-
local report_characters = logs.reporter("fonts","characters")
local report_character = logs.reporter("missing")
@@ -393,3 +415,46 @@ local function expandglyph(characters,index,done)
end
helpers.expandglyph = expandglyph
+
+-- should not be needed as we add .notdef in the engine
+
+local dummyzero = {
+ -- width = 0,
+ -- height = 0,
+ -- depth = 0,
+ commands = { { "special", "" } },
+}
+
+local function adddummysymbols(tfmdata,...)
+ local characters = tfmdata.characters
+ if not characters[0] then
+ characters[0] = dummyzero
+ end
+ -- if not characters[1] then
+ -- characters[1] = dummyzero -- test only
+ -- end
+end
+
+registerotffeature {
+ name = "dummies",
+ description = "dummy symbols",
+ default = true,
+ manipulators = {
+ base = adddummysymbols,
+ node = adddummysymbols,
+ }
+}
+
+registerafmfeature {
+ name = "dummies",
+ description = "dummy symbols",
+ default = true,
+ manipulators = {
+ base = adddummysymbols,
+ node = adddummysymbols,
+ }
+}
+
+-- callback.register("char_exists",function(f,c) -- to slow anyway as called often so we should flag in tfmdata
+-- return true
+-- end)
diff --git a/tex/context/base/font-chk.mkiv b/tex/context/base/font-chk.mkiv
index d436388de..4c8967532 100644
--- a/tex/context/base/font-chk.mkiv
+++ b/tex/context/base/font-chk.mkiv
@@ -15,8 +15,15 @@
\registerctxluafile{font-chk}{1.001}
-\unexpanded\def\checkcharactersinfont {\ctxcommand{checkcharactersinfont()}}
-\unexpanded\def\removemissingcharacters {\ctxcommand{removemissingcharacters()}}
-\unexpanded\def\replacemissingcharacters{\ctxcommand{replacemissingcharacters()}}
+\tracinglostchars\zerocount
+
+% Use this instead:
+%
+% \definefontfeature[default][default][missing=yes]
+% \enabletrackers[fonts.missing=replace]
+
+\unexpanded\def\checkcharactersinfont {\enabletrackers[fonts.missing]}
+\unexpanded\def\removemissingcharacters {\enabletrackers[fonts.missing=remove]}
+\unexpanded\def\replacemissingcharacters{\enabletrackers[fonts.missing=replace]}
\endinput
diff --git a/tex/context/base/font-cid.lua b/tex/context/base/font-cid.lua
index e4b565313..0eaacdfbd 100644
--- a/tex/context/base/font-cid.lua
+++ b/tex/context/base/font-cid.lua
@@ -74,7 +74,7 @@ local function loadcidfile(filename)
ordering = ordering,
filename = filename,
unicodes = unicodes,
- names = names
+ names = names,
}
end
end
@@ -112,15 +112,28 @@ function cid.getmap(specification)
report_otf("invalid cidinfo specification, table expected")
return
end
- local registry = specification.registry
- local ordering = specification.ordering
+ local registry = specification.registry
+ local ordering = specification.ordering
local supplement = specification.supplement
- -- check for already loaded file
- local filename = format(registry,ordering,supplement)
- local found = cidmap[lower(filename)]
+ local filename = format(registry,ordering,supplement)
+ local lowername = lower(filename)
+ local found = cidmap[lowername]
if found then
return found
end
+ if ordering == "Identity" then
+ local found = {
+ supplement = supplement,
+ registry = registry,
+ ordering = ordering,
+ filename = filename,
+ unicodes = { },
+ names = { },
+ }
+ cidmap[lowername] = found
+ return found
+ end
+ -- check for already loaded file
if trace_loading then
report_otf("cidmap needed, registry %a, ordering %a, supplement %a",registry,ordering,supplement)
end
diff --git a/tex/context/base/font-col.lua b/tex/context/base/font-col.lua
index f5e17f1da..cbc1953f4 100644
--- a/tex/context/base/font-col.lua
+++ b/tex/context/base/font-col.lua
@@ -17,7 +17,12 @@ local type, next, toboolean = type, next, toboolean
local gmatch = string.gmatch
local fastcopy = table.fastcopy
-local traverse_id = nodes.traverse_id
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+local setfield = nuts.setfield
+local traverse_id = nuts.traverse_id
local settings_to_hash = utilities.parsers.settings_to_hash
@@ -35,11 +40,14 @@ local vectors = collections.vectors or { }
collections.vectors = vectors
local fontdata = fonts.hashes.identifiers
+local chardata = fonts.hashes.characters
local glyph_code = nodes.nodecodes.glyph
local currentfont = font.current
local fontpatternhassize = fonts.helpers.fontpatternhassize
+local implement = interfaces.implement
+
local list = { }
local current = 0
local enabled = false
@@ -199,7 +207,7 @@ end
--
-- if lpegmatch(okay,name) then
-function collections.prepare(name) -- we can do this in lua now
+function collections.prepare(name) -- we can do this in lua now .. todo
current = currentfont()
if vectors[current] then
return
@@ -244,23 +252,23 @@ end
function collections.process(head) -- this way we keep feature processing
local done = false
- for n in traverse_id(glyph_code,head) do
- local v = vectors[n.font]
+ for n in traverse_id(glyph_code,tonut(head)) do
+ local v = vectors[getfont(n)]
if v then
- local id = v[n.char]
+ local id = v[getchar(n)]
if id then
if type(id) == "table" then
local newid, newchar = id[1], id[2]
if trace_collecting then
report_fonts("remapping character %C in font %a to character %C in font %a",getchar(n),getfont(n),newchar,newid)
end
- n.font = newid
- n.char = newchar
+ setfield(n,"font",newid)
+ setfield(n,"char",newchar)
else
if trace_collecting then
report_fonts("remapping font %a to %a for character %C",getfont(n),id,getchar(n))
end
- n.font = id
+ setfield(n,"font",id)
end
end
end
@@ -268,11 +276,58 @@ function collections.process(head) -- this way we keep feature processing
return head, done
end
+function collections.found(font,char) -- this way we keep feature processing
+ if not char then
+ font, char = currentfont(), font
+ end
+ if chardata[font][char] then
+ return true -- in normal font
+ else
+ local v = vectors[font]
+ return v and v[char] and true or false
+ end
+end
+
-- interface
-commands.fontcollectiondefine = collections.define
-commands.fontcollectionreset = collections.reset
-commands.fontcollectionprepare = collections.prepare
-commands.fontcollectionreport = collections.report
-commands.fontcollectionregister = collections.registermain
-commands.fontcollectionclone = collections.clonevector
+implement {
+ name = "fontcollectiondefine",
+ actions = collections.define,
+ arguments = { "string", "string", "string", "string" }
+}
+
+implement {
+ name = "fontcollectionreset",
+ actions = collections.reset,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "fontcollectionprepare",
+ actions = collections.prepare,
+ arguments = "string"
+}
+
+implement {
+ name = "fontcollectionreport",
+ actions = collections.report,
+ arguments = "string"
+}
+
+implement {
+ name = "fontcollectionregister",
+ actions = collections.registermain,
+ arguments = "string"
+}
+
+implement {
+ name = "fontcollectionclone",
+ actions = collections.clonevector,
+ arguments = "string"
+}
+
+implement {
+ name = "doifelsecharinfont",
+ actions = { collections.found, commands.doifelse },
+ arguments = { "integer" }
+}
diff --git a/tex/context/base/font-col.mkvi b/tex/context/base/font-col.mkvi
index 34a1b04a8..bc8e8151e 100644
--- a/tex/context/base/font-col.mkvi
+++ b/tex/context/base/font-col.mkvi
@@ -34,13 +34,13 @@
\unexpanded\def\resetfontfallback {\dodoubleempty \font_fallbacks_reset }
\def\font_fallbacks_define[#name][#font][#ranges][#settings]%
- {\ctxcommand{fontcollectiondefine("#name","#font",\!!bs#ranges\!!es,\!!bs#settings\!!es)}}
+ {\clf_fontcollectiondefine{#name}{#font}{#ranges}{#settings}}
\def\font_fallbacks_reset[#name][#font]%
- {\ctxcommand{fontcollectionreset("#name","#font")}}
+ {\clf_fontcollectionreset{#name}{#font}}
\def\font_fallbacks_prepare#name%
- {\ctxcommand{fontcollectionprepare("#name")}} % add fallbacks to last font
+ {\clf_fontcollectionprepare{#name}} % add fallbacks to last font
% we might as well move the handling to lua but then we need to pass the
% fallbacks, skewchar etc.
@@ -77,7 +77,7 @@
{\endgroup}
\def\font_fallbacks_clone_unique#specification#scale% kind of dododefinefont
- {\ctxcommand{fontcollectionreport("defining #specification (relative scale: #scale)")}% brrr
+ {\clf_fontcollectionreport{defining #specification (relative scale: #scale)}% brrr
\setfalse\c_font_auto_size
\let\lastfontidentifier\s!dummy
\def\v_font_size_relative{#scale}%
@@ -89,8 +89,8 @@
\def\font_fallbacks_clone_inherited#font% also a second argument
{\font_fallbacks_clone_unique{#font \savedfontspec}}
-\def\font_fallbacks_register_main #name{\ctxcommand{fontcollectionregister("#name")}}
-\def\font_fallbacks_prepare_clone_vectors#name{\ctxcommand{fontcollectionclone("#name")}}
+\def\font_fallbacks_register_main #name{\clf_fontcollectionregister{#name}}
+\def\font_fallbacks_prepare_clone_vectors#name{\clf_fontcollectionclone{#name}}
% check : only replace when present in replacement font (default: no)
% force : force replacent even when basefont has glyph (default: yes)
diff --git a/tex/context/base/font-con.lua b/tex/context/base/font-con.lua
index 09293895e..72fbb5c0d 100644
--- a/tex/context/base/font-con.lua
+++ b/tex/context/base/font-con.lua
@@ -45,6 +45,8 @@ constructors.cache = containers.define("fonts", "constructors", constr
constructors.privateoffset = 0xF0000 -- 0x10FFFF
+constructors.cacheintex = true -- so we see the original table in fonts.font
+
-- Some experimental helpers (handy for tracing):
--
-- todo: extra:
@@ -290,14 +292,15 @@ constructors.nofsharedfonts = 0
local sharednames = { }
function constructors.trytosharefont(target,tfmdata)
- if constructors.sharefonts then
+ if constructors.sharefonts then -- not robust !
local characters = target.characters
local n = 1
local t = { target.psname }
local u = sortedkeys(characters)
for i=1,#u do
+ local k = u[i]
n = n + 1 ; t[n] = k
- n = n + 1 ; t[n] = characters[u[i]].index or k
+ n = n + 1 ; t[n] = characters[k].index or k
end
local h = md5.HEX(concat(t," "))
local s = sharednames[h]
@@ -393,7 +396,8 @@ function constructors.scale(tfmdata,specification)
targetparameters.forcedsize = forcedsize -- context specific
targetparameters.extrafactor = extrafactor -- context specific
--
- local tounicode = resources.tounicode
+ local tounicode = fonts.mappings.tounicode
+ --
local defaultwidth = resources.defaultwidth or 0
local defaultheight = resources.defaultheight or 0
local defaultdepth = resources.defaultdepth or 0
@@ -435,6 +439,7 @@ function constructors.scale(tfmdata,specification)
target.tounicode = 1
target.cidinfo = properties.cidinfo
target.format = properties.format
+ target.cache = constructors.cacheintex and "yes" or "renew"
--
local fontname = properties.fontname or tfmdata.fontname -- for the moment we fall back on
local fullname = properties.fullname or tfmdata.fullname -- names in the tfmdata although
@@ -452,7 +457,6 @@ function constructors.scale(tfmdata,specification)
target.psname = psname
target.name = name
--
- -- inspect(properties)
--
properties.fontname = fontname
properties.fullname = fullname
@@ -501,7 +505,9 @@ function constructors.scale(tfmdata,specification)
local autoitalicamount = properties.autoitalicamount
local stackmath = not properties.nostackmath
local nonames = properties.noglyphnames
- local nodemode = properties.mode == "node"
+ local haskerns = properties.haskerns or properties.mode == "base" -- we can have afm in node mode
+ local hasligatures = properties.hasligatures or properties.mode == "base" -- we can have afm in node mode
+ local realdimensions = properties.realdimensions
--
if changed and not next(changed) then
changed = false
@@ -595,39 +601,45 @@ function constructors.scale(tfmdata,specification)
-- we can have a dumb mode (basemode without math etc) that skips most
--
for unicode, character in next, characters do
- local chr, description, index, touni
+ local chr, description, index
if changed then
- -- basemode hack (we try to catch missing tounicodes, e.g. needed for ssty in math cambria)
local c = changed[unicode]
if c then
description = descriptions[c] or descriptions[unicode] or character
character = characters[c] or character
index = description.index or c
- if tounicode then
- touni = tounicode[index] -- nb: index!
- if not touni then -- goodie
- local d = descriptions[unicode] or characters[unicode]
- local i = d.index or unicode
- touni = tounicode[i] -- nb: index!
- end
- end
else
description = descriptions[unicode] or character
index = description.index or unicode
- if tounicode then
- touni = tounicode[index] -- nb: index!
- end
end
else
description = descriptions[unicode] or character
index = description.index or unicode
- if tounicode then
- touni = tounicode[index] -- nb: index!
- end
end
local width = description.width
local height = description.height
local depth = description.depth
+ if realdimensions then
+ -- this is mostly for checking issues
+ if not height or height == 0 then
+ local bb = description.boundingbox
+ local ht = bb[4]
+ if ht ~= 0 then
+ height = ht
+ end
+ if not depth or depth == 0 then
+ local dp = -bb[2]
+ if dp ~= 0 then
+ depth = dp
+ end
+ end
+ elseif not depth or depth == 0 then
+ local dp = -description.boundingbox[2]
+ if dp ~= 0 then
+ depth = dp
+ end
+ end
+ end
if width then width = hdelta*width else width = scaledwidth end
if height then height = vdelta*height else height = scaledheight end
-- if depth then depth = vdelta*depth else depth = scaleddepth end
@@ -666,8 +678,10 @@ function constructors.scale(tfmdata,specification)
}
end
end
- if touni then
- chr.tounicode = touni
+ local isunicode = description.unicode
+ if isunicode then
+ chr.unicode = isunicode
+ chr.tounicode = tounicode(isunicode)
end
if hasquality then
-- we could move these calculations elsewhere (saves calculations)
@@ -764,7 +778,7 @@ function constructors.scale(tfmdata,specification)
end
end
end
- if not nodemode then
+ if haskerns then
local vk = character.kerns
if vk then
local s = sharedkerns[vk]
@@ -775,13 +789,15 @@ function constructors.scale(tfmdata,specification)
end
chr.kerns = s
end
+ end
+ if hasligatures then
local vl = character.ligatures
if vl then
if true then
chr.ligatures = vl -- shared
else
local tt = { }
- for i,l in next, vl do
+ for i, l in next, vl do
tt[i] = l
end
chr.ligatures = tt
@@ -826,7 +842,6 @@ function constructors.scale(tfmdata,specification)
end
targetcharacters[unicode] = chr
end
-
--
constructors.aftercopyingcharacters(target,tfmdata)
--
@@ -963,6 +978,8 @@ function constructors.finalize(tfmdata)
tfmdata.slant = nil
tfmdata.units_per_em = nil
--
+ tfmdata.cache = nil
+ --
properties.finalized = true
--
return tfmdata
@@ -1360,3 +1377,50 @@ function constructors.applymanipulators(what,tfmdata,features,trace,report)
end
end
end
+
+function constructors.addcoreunicodes(unicodes) -- maybe make this a metatable if used at all
+ if not unicodes then
+ unicodes = { }
+ end
+ unicodes.space = 0x0020
+ unicodes.hyphen = 0x002D
+ unicodes.zwj = 0x200D
+ unicodes.zwnj = 0x200C
+ return unicodes
+end
+
+-- -- keep for a while: old tounicode code
+--
+-- if changed then
+-- -- basemode hack (we try to catch missing tounicodes, e.g. needed for ssty in math cambria)
+-- local c = changed[unicode]
+-- if c then
+-- -- local ligatures = character.ligatures -- the original ligatures (as we cannot rely on remapping)
+-- description = descriptions[c] or descriptions[unicode] or character
+-- character = characters[c] or character
+-- index = description.index or c
+-- if tounicode then
+-- touni = tounicode[index] -- nb: index!
+-- if not touni then -- goodie
+-- local d = descriptions[unicode] or characters[unicode]
+-- local i = d.index or unicode
+-- touni = tounicode[i] -- nb: index!
+-- end
+-- end
+-- -- if ligatures and not character.ligatures then
+-- -- character.ligatures = ligatures -- the original targets (for now at least.. see libertine smallcaps)
+-- -- end
+-- else
+-- description = descriptions[unicode] or character
+-- index = description.index or unicode
+-- if tounicode then
+-- touni = tounicode[index] -- nb: index!
+-- end
+-- end
+-- else
+-- description = descriptions[unicode] or character
+-- index = description.index or unicode
+-- if tounicode then
+-- touni = tounicode[index] -- nb: index!
+-- end
+-- end
diff --git a/tex/context/base/font-ctx.lua b/tex/context/base/font-ctx.lua
index b08a6aed2..81db31652 100644
--- a/tex/context/base/font-ctx.lua
+++ b/tex/context/base/font-ctx.lua
@@ -21,6 +21,7 @@ local settings_to_hash, hash_to_string = utilities.parsers.settings_to_hash, uti
local formatcolumns = utilities.formatters.formatcolumns
local mergehashes = utilities.parsers.mergehashes
local formatters = string.formatters
+local basename = file.basename
local tostring, next, type, rawget, tonumber = tostring, next, type, rawget, tonumber
local utfchar, utfbyte = utf.char, utf.byte
@@ -44,6 +45,8 @@ local report_mapfiles = logs.reporter("fonts","mapfiles")
local setmetatableindex = table.setmetatableindex
+local implement = interfaces.implement
+
local fonts = fonts
local handlers = fonts.handlers
local otf = handlers.otf -- brrr
@@ -57,6 +60,21 @@ local helpers = fonts.helpers
local hashes = fonts.hashes
local currentfont = font.current
+local encodings = fonts.encodings
+----- aglunicodes = encodings.agl.unicodes
+local aglunicodes = nil -- delayed loading
+
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getprop = nuts.getprop
+local setprop = nuts.setprop
+local getfont = nuts.getfont
+
local texgetattribute = tex.getattribute
local texsetattribute = tex.setattribute
local texgetdimen = tex.getdimen
@@ -67,18 +85,22 @@ local texdefinefont = tex.definefont
local texsp = tex.sp
local fontdata = hashes.identifiers
-local characters = hashes.chardata
+local characters = hashes.characters
local descriptions = hashes.descriptions
local properties = hashes.properties
local resources = hashes.resources
+local unicodes = hashes.unicodes
local csnames = hashes.csnames
-local marks = hashes.markdata
local lastmathids = hashes.lastmathids
local exheights = hashes.exheights
local emwidths = hashes.emwidths
+local parameters = hashes.parameters
local designsizefilename = fontgoodies.designsizes.filename
+local context_char = context.char
+local context_getvalue = context.getvalue
+
local otffeatures = otf.features
local otftables = otf.tables
@@ -123,12 +145,23 @@ end
-- this will move elsewhere ...
-function fonts.helpers.name(tfmdata)
- return file.basename(type(tfmdata) == "number" and properties[tfmdata].name or tfmdata.properties.name)
+local function getfontname(tfmdata)
+ return basename(type(tfmdata) == "number" and properties[tfmdata].name or tfmdata.properties.name)
end
-utilities.strings.formatters.add(formatters,"font:name", [["'"..fonts.helpers.name(%s).."'"]])
-utilities.strings.formatters.add(formatters,"font:features",[["'"..table.sequenced(%s," ",true).."'"]])
+fonts.helpers.name = getfontname
+
+if _LUAVERSION < 5.2 then
+
+ utilities.strings.formatters.add(formatters,"font:name", [["'"..fontname(%s).."'"]], "local fontname = fonts.helpers.name")
+ utilities.strings.formatters.add(formatters,"font:features",[["'"..sequenced(%s," ",true).."'"]],"local sequenced = table.sequenced")
+
+else
+
+ utilities.strings.formatters.add(formatters,"font:name", [["'"..fontname(%s).."'"]], { fontname = fonts.helpers.name })
+ utilities.strings.formatters.add(formatters,"font:features",[["'"..sequenced(%s," ",true).."'"]],{ sequenced = table.sequenced })
+
+end
-- ... like font-sfm or so
@@ -145,52 +178,54 @@ local hashes = { }
function constructors.trytosharefont(target,tfmdata)
constructors.noffontsloaded = constructors.noffontsloaded + 1
if constructors.sharefonts then
- local properties = target.properties
- local fullname = target.fullname
local fonthash = target.specification.hash
- local sharedname = hashes[fonthash]
- if sharedname then
- -- this is ok for context as we know that only features can mess with font definitions
- -- so a similar hash means that the fonts are similar too
- if trace_defining then
- report_defining("font %a uses backend resources of font %a (%s)",target.fullname,sharedname,"common hash")
- end
- target.fullname = sharedname
- properties.sharedwith = sharedname
- constructors.nofsharedfonts = constructors.nofsharedfonts + 1
- constructors.nofsharedhashes = constructors.nofsharedhashes + 1
- else
- -- the one takes more time (in the worst case of many cjk fonts) but it also saves
- -- embedding time
- local characters = target.characters
- local n = 1
- local t = { target.psname }
- local u = sortedkeys(characters)
- for i=1,#u do
- n = n + 1 ; t[n] = k
- n = n + 1 ; t[n] = characters[u[i]].index or k
- end
- local checksum = md5.HEX(concat(t," "))
- local sharedname = shares[checksum]
+ if fonthash then
+ local properties = target.properties
local fullname = target.fullname
+ local sharedname = hashes[fonthash]
if sharedname then
+ -- this is ok for context as we know that only features can mess with font definitions
+ -- so a similar hash means that the fonts are similar too
if trace_defining then
- report_defining("font %a uses backend resources of font %a (%s)",fullname,sharedname,"common vector")
+ report_defining("font %a uses backend resources of font %a (%s)",target.fullname,sharedname,"common hash")
end
- fullname = sharedname
- properties.sharedwith= sharedname
+ target.fullname = sharedname
+ properties.sharedwith = sharedname
constructors.nofsharedfonts = constructors.nofsharedfonts + 1
- constructors.nofsharedvectors = constructors.nofsharedvectors + 1
+ constructors.nofsharedhashes = constructors.nofsharedhashes + 1
else
- shares[checksum] = fullname
+ -- the one takes more time (in the worst case of many cjk fonts) but it also saves
+ -- embedding time
+ local characters = target.characters
+ local n = 1
+ local t = { target.psname }
+ local u = sortedkeys(characters)
+ for i=1,#u do
+ local k = u[i]
+ n = n + 1 ; t[n] = k
+ n = n + 1 ; t[n] = characters[k].index or k
+ end
+ local checksum = md5.HEX(concat(t," "))
+ local sharedname = shares[checksum]
+ local fullname = target.fullname
+ if sharedname then
+ if trace_defining then
+ report_defining("font %a uses backend resources of font %a (%s)",fullname,sharedname,"common vector")
+ end
+ fullname = sharedname
+ properties.sharedwith= sharedname
+ constructors.nofsharedfonts = constructors.nofsharedfonts + 1
+ constructors.nofsharedvectors = constructors.nofsharedvectors + 1
+ else
+ shares[checksum] = fullname
+ end
+ target.fullname = fullname
+ hashes[fonthash] = fullname
end
- target.fullname = fullname
- hashes[fonthash] = fullname
end
end
end
-
directives.register("fonts.checksharing",function(v)
if not v then
report_defining("font sharing in backend is disabled")
@@ -198,19 +233,6 @@ directives.register("fonts.checksharing",function(v)
constructors.sharefonts = v
end)
-local limited = false
-
-directives.register("system.inputmode", function(v)
- if not limited then
- local i_limiter = io.i_limiter(v)
- if i_limiter then
- fontloader.open = i_limiter.protect(fontloader.open)
- fontloader.info = i_limiter.protect(fontloader.info)
- limited = true
- end
- end
-end)
-
function definers.resetnullfont()
-- resetting is needed because tikz misuses nullfont
local parameters = fonts.nulldata.parameters
@@ -228,7 +250,17 @@ function definers.resetnullfont()
definers.resetnullfont = function() end
end
-commands.resetnullfont = definers.resetnullfont
+implement {
+ name = "resetnullfont",
+ onlyonce = true,
+ actions = function()
+ for i=1,7 do
+ -- we have no direct method yet
+ context([[\fontdimen%s\nullfont\zeropoint]],i)
+ end
+ definers.resetnullfont()
+ end
+}
-- this cannot be a feature initializer as there is no auto namespace
-- so we never enter the loop then; we can store the defaults in the tma
@@ -483,7 +515,7 @@ local function definecontext(name,t) -- can be shared
end
local function presetcontext(name,parent,features) -- will go to con and shared
- if features == "" and find(parent,"=") then
+ if features == "" and find(parent,"=",1,true) then
features = parent
parent = ""
end
@@ -800,7 +832,7 @@ local function splitcontext(features) -- presetcontext creates dummy here
local sf = setups[features]
if not sf then
local n -- number
- if find(features,",") then
+ if find(features,",",a,true) then
-- let's assume a combination which is not yet defined but just specified (as in math)
n, sf = presetcontext(features,features,"")
else
@@ -817,13 +849,13 @@ end
-- local setup = setups[features]
-- if setup then
-- return setup
--- elseif find(features,",") then
+-- elseif find(features,",",1,true) then
-- -- This is not that efficient but handy anyway for quick and dirty tests
-- -- beware, due to the way of caching setups you can get the wrong results
-- -- when components change. A safeguard is to nil the cache.
-- local merge = nil
-- for feature in gmatch(features,"[^, ]+") do
--- if find(feature,"=") then
+-- if find(feature,"=",1,true) then
-- local k, v = lpegmatch(splitter,feature)
-- if k and v then
-- if not merge then
@@ -855,7 +887,10 @@ end
specifiers.splitcontext = splitcontext
function specifiers.contexttostring(name,kind,separator,yes,no,strict,omit) -- not used
- return hash_to_string(mergedtable(handlers[kind].features.defaults or {},setups[name] or {}),separator,yes,no,strict,omit)
+ return hash_to_string(
+ mergedtable(handlers[kind].features.defaults or {},setups[name] or {}),
+ separator, yes, no, strict, omit or { "number" }
+ )
end
local function starred(features) -- no longer fallbacks here
@@ -877,11 +912,11 @@ local separator = S(";,")
local equal = P("=")
local spaces = space^0
local sometext = C((1-equal-space-separator)^1)
-local truevalue = P("+") * spaces * sometext * Cc(true) -- "yes"
-local falsevalue = P("-") * spaces * sometext * Cc(false) -- "no"
+local truevalue = P("+") * spaces * sometext * Cc(true)
+local falsevalue = P("-") * spaces * sometext * Cc(false)
+local somevalue = sometext * spaces * Cc(true)
local keyvalue = sometext * spaces * equal * spaces * sometext
-local somevalue = sometext * spaces * Cc(true) -- "yes"
-local pattern = Cf(Ct("") * (space + separator + Cg(keyvalue + falsevalue + truevalue + somevalue))^0, rawset)
+local pattern = Cf(Ct("") * (space + separator + Cg(falsevalue + truevalue + keyvalue + somevalue))^0, rawset)
local function colonized(specification)
specification.features.normal = normalize_features(lpegmatch(pattern,specification.detail))
@@ -931,319 +966,381 @@ local getspecification = definers.getspecification
-- we can make helper macros which saves parsing (but normaly not
-- that many calls, e.g. in mk a couple of 100 and in metafun 3500)
-local setdefaultfontname = context.fntsetdefname
-local setsomefontname = context.fntsetsomename
-local setemptyfontsize = context.fntsetnopsize
-local setsomefontsize = context.fntsetsomesize
-local letvaluerelax = context.letvaluerelax
-
-function commands.definefont_one(str)
- statistics.starttiming(fonts)
- if trace_defining then
- report_defining("memory usage before: %s",statistics.memused())
- report_defining("start stage one: %s",str)
- end
- local fullname, size = lpegmatch(splitpattern,str)
- local lookup, name, sub, method, detail = getspecification(fullname)
- if not name then
- report_defining("strange definition %a",str)
- setdefaultfontname()
- elseif name == "unknown" then
- setdefaultfontname()
- else
- setsomefontname(name)
- end
- -- we can also use a count for the size
- if size and size ~= "" then
- local mode, size = lpegmatch(sizepattern,size)
- if size and mode then
- texsetcount("scaledfontmode",mode)
- setsomefontsize(size)
+do -- else too many locals
+
+ ----- ctx_setdefaultfontname = context.fntsetdefname
+ ----- ctx_setsomefontname = context.fntsetsomename
+ ----- ctx_setemptyfontsize = context.fntsetnopsize
+ ----- ctx_setsomefontsize = context.fntsetsomesize
+ ----- ctx_letvaluerelax = context.letvaluerelax
+
+ local starttiming = statistics.starttiming
+ local stoptiming = statistics.stoptiming
+
+ local scanners = tokens.scanners
+ local scanstring = scanners.string
+ local scaninteger = scanners.integer
+ local scanboolean = scanners.boolean
+
+ local setmacro = tokens.setters.macro
+
+ local scanners = interfaces.scanners
+
+ -- function commands.definefont_one(str)
+
+ scanners.definefont_one = function()
+ local str = scanstring()
+
+ starttiming(fonts)
+ if trace_defining then
+ report_defining("memory usage before: %s",statistics.memused())
+ report_defining("start stage one: %s",str)
+ end
+ local fullname, size = lpegmatch(splitpattern,str)
+ local lookup, name, sub, method, detail = getspecification(fullname)
+ if not name then
+ report_defining("strange definition %a",str)
+ -- ctx_setdefaultfontname()
+ elseif name == "unknown" then
+ -- ctx_setdefaultfontname()
+ else
+ -- ctx_setsomefontname(name)
+ setmacro("somefontname",name,"global")
+ end
+ -- we can also use a count for the size
+ if size and size ~= "" then
+ local mode, size = lpegmatch(sizepattern,size)
+ if size and mode then
+ texsetcount("scaledfontmode",mode)
+ -- ctx_setsomefontsize(size)
+ setmacro("somefontsize",size)
+ else
+ texsetcount("scaledfontmode",0)
+ -- ctx_setemptyfontsize()
+ end
+ elseif true then
+ -- so we don't need to check in tex
+ texsetcount("scaledfontmode",2)
+ -- ctx_setemptyfontsize()
else
texsetcount("scaledfontmode",0)
- setemptyfontsize()
+ -- ctx_setemptyfontsize()
+ end
+ specification = definers.makespecification(str,lookup,name,sub,method,detail,size)
+ if trace_defining then
+ report_defining("stop stage one")
end
- elseif true then
- -- so we don't need to check in tex
- texsetcount("scaledfontmode",2)
- setemptyfontsize()
- else
- texsetcount("scaledfontmode",0)
- setemptyfontsize()
end
- specification = definers.makespecification(str,lookup,name,sub,method,detail,size)
- if trace_defining then
- report_defining("stop stage one")
+
+ local n = 0
+
+ -- we can also move rscale to here (more consistent)
+ -- the argument list will become a table
+
+ local function nice_cs(cs)
+ return (gsub(cs,".->", ""))
end
-end
-local n = 0
+ -- function commands.definefont_two(global,cs,str,size,inheritancemode,classfeatures,fontfeatures,classfallbacks,fontfallbacks,
+ -- mathsize,textsize,relativeid,classgoodies,goodies,classdesignsize,fontdesignsize,scaledfontmode)
--- we can also move rscale to here (more consistent)
--- the argument list will become a table
+ scanners.definefont_two = function()
-local function nice_cs(cs)
- return (gsub(cs,".->", ""))
-end
+ local global = scanboolean() -- \ifx\fontclass\empty\s!false\else\s!true\fi
+ local cs = scanstring () -- {#csname}%
+ local str = scanstring () -- \somefontfile
+ local size = scaninteger() -- \d_font_scaled_font_size
+ local inheritancemode = scaninteger() -- \c_font_feature_inheritance_mode
+ local classfeatures = scanstring () -- \m_font_class_features
+ local fontfeatures = scanstring () -- \m_font_features
+ local classfallbacks = scanstring () -- \m_font_class_fallbacks
+ local fontfallbacks = scanstring () -- \m_font_fallbacks
+ local mathsize = scaninteger() -- \fontface
+ local textsize = scaninteger() -- \d_font_scaled_text_face
+ local relativeid = scaninteger() -- \relativefontid
+ local classgoodies = scanstring () -- \m_font_class_goodies
+ local goodies = scanstring () -- \m_font_goodies
+ local classdesignsize = scanstring () -- \m_font_class_designsize
+ local fontdesignsize = scanstring () -- \m_font_designsize
+ local scaledfontmode = scaninteger() -- \scaledfontmode
-function commands.definefont_two(global,cs,str,size,inheritancemode,classfeatures,fontfeatures,classfallbacks,fontfallbacks,
- mathsize,textsize,relativeid,classgoodies,goodies,classdesignsize,fontdesignsize,scaledfontmode)
- if trace_defining then
- report_defining("start stage two: %s (size %s)",str,size)
- end
- -- name is now resolved and size is scaled cf sa/mo
- local lookup, name, sub, method, detail = getspecification(str or "")
- -- new (todo: inheritancemode)
- local designsize = fontdesignsize ~= "" and fontdesignsize or classdesignsize or ""
- local designname = designsizefilename(name,designsize,size)
- if designname and designname ~= "" then
- if trace_defining or trace_designsize then
- report_defining("remapping name %a, specification %a, size %a, designsize %a",name,designsize,size,designname)
- end
- -- we don't catch detail here
- local o_lookup, o_name, o_sub, o_method, o_detail = getspecification(designname)
- if o_lookup and o_lookup ~= "" then lookup = o_lookup end
- if o_method and o_method ~= "" then method = o_method end
- if o_detail and o_detail ~= "" then detail = o_detail end
- name = o_name
- sub = o_sub
- end
- -- so far
- -- some settings can have been overloaded
- if lookup and lookup ~= "" then
- specification.lookup = lookup
- end
- if relativeid and relativeid ~= "" then -- experimental hook
- local id = tonumber(relativeid) or 0
- specification.relativeid = id > 0 and id
- end
- --
- specification.name = name
- specification.size = size
- specification.sub = (sub and sub ~= "" and sub) or specification.sub
- specification.mathsize = mathsize
- specification.textsize = textsize
- specification.goodies = goodies
- specification.cs = cs
- specification.global = global
- specification.scalemode = scaledfontmode -- context specific
- if detail and detail ~= "" then
- specification.method = method or "*"
- specification.detail = detail
- elseif specification.detail and specification.detail ~= "" then
- -- already set
- elseif inheritancemode == 0 then
- -- nothing
- elseif inheritancemode == 1 then
- -- fontonly
- if fontfeatures and fontfeatures ~= "" then
- specification.method = "*"
- specification.detail = fontfeatures
- end
- if fontfallbacks and fontfallbacks ~= "" then
- specification.fallbacks = fontfallbacks
- end
- elseif inheritancemode == 2 then
- -- classonly
- if classfeatures and classfeatures ~= "" then
- specification.method = "*"
- specification.detail = classfeatures
- end
- if classfallbacks and classfallbacks ~= "" then
- specification.fallbacks = classfallbacks
- end
- elseif inheritancemode == 3 then
- -- fontfirst
- if fontfeatures and fontfeatures ~= "" then
- specification.method = "*"
- specification.detail = fontfeatures
- elseif classfeatures and classfeatures ~= "" then
- specification.method = "*"
- specification.detail = classfeatures
- end
- if fontfallbacks and fontfallbacks ~= "" then
- specification.fallbacks = fontfallbacks
- elseif classfallbacks and classfallbacks ~= "" then
- specification.fallbacks = classfallbacks
- end
- elseif inheritancemode == 4 then
- -- classfirst
- if classfeatures and classfeatures ~= "" then
- specification.method = "*"
- specification.detail = classfeatures
- elseif fontfeatures and fontfeatures ~= "" then
- specification.method = "*"
- specification.detail = fontfeatures
- end
- if classfallbacks and classfallbacks ~= "" then
- specification.fallbacks = classfallbacks
- elseif fontfallbacks and fontfallbacks ~= "" then
- specification.fallbacks = fontfallbacks
- end
- end
- local tfmdata = definers.read(specification,size) -- id not yet known (size in spec?)
- --
- local lastfontid = 0
- if not tfmdata then
- report_defining("unable to define %a as %a",name,nice_cs(cs))
- lastfontid = -1
- letvaluerelax(cs) -- otherwise the current definition takes the previous one
- elseif type(tfmdata) == "number" then
- if trace_defining then
- report_defining("reusing %s, id %a, target %a, features %a / %a, fallbacks %a / %a, goodies %a / %a, designsize %a / %a",
- name,tfmdata,nice_cs(cs),classfeatures,fontfeatures,classfallbacks,fontfallbacks,classgoodies,goodies,classdesignsize,fontdesignsize)
- end
- csnames[tfmdata] = specification.cs
- texdefinefont(global,cs,tfmdata)
- -- resolved (when designsize is used):
- local size = fontdata[tfmdata].parameters.size or 0
- setsomefontsize(size .. "sp")
- texsetcount("scaledfontsize",size)
- lastfontid = tfmdata
- else
- -- setting the extra characters will move elsewhere
- local characters = tfmdata.characters
- local parameters = tfmdata.parameters
- -- we use char0 as signal; cf the spec pdf can handle this (no char in slot)
- characters[0] = nil
- -- characters[0x00A0] = { width = parameters.space }
- -- characters[0x2007] = { width = characters[0x0030] and characters[0x0030].width or parameters.space } -- figure
- -- characters[0x2008] = { width = characters[0x002E] and characters[0x002E].width or parameters.space } -- period
- --
- constructors.checkvirtualids(tfmdata) -- experiment, will become obsolete when slots can selfreference
- local id = font.define(tfmdata)
- csnames[id] = specification.cs
- tfmdata.properties.id = id
- definers.register(tfmdata,id) -- to be sure, normally already done
- texdefinefont(global,cs,id)
- constructors.cleanuptable(tfmdata)
- constructors.finalize(tfmdata)
if trace_defining then
- report_defining("defining %a, id %a, target %a, features %a / %a, fallbacks %a / %a",
- name,id,nice_cs(cs),classfeatures,fontfeatures,classfallbacks,fontfallbacks)
+ report_defining("start stage two: %s (size %s)",str,size)
end
- -- resolved (when designsize is used):
- local size = tfmdata.parameters.size or 655360
- setsomefontsize(size .. "sp")
- texsetcount("scaledfontsize",size)
- lastfontid = id
- end
- if trace_defining then
- report_defining("memory usage after: %s",statistics.memused())
- report_defining("stop stage two")
- end
- --
- texsetcount("global","lastfontid",lastfontid)
- if not mathsize then
- -- forget about it
- elseif mathsize == 0 then
- lastmathids[1] = lastfontid
- else
- lastmathids[mathsize] = lastfontid
- end
- --
- statistics.stoptiming(fonts)
-end
-
-function definers.define(specification)
- --
- local name = specification.name
- if not name or name == "" then
- return -1
- else
- statistics.starttiming(fonts)
- --
- -- following calls expect a few properties to be set:
- --
- local lookup, name, sub, method, detail = getspecification(name or "")
- --
- specification.name = (name ~= "" and name) or specification.name
- --
- specification.lookup = specification.lookup or (lookup ~= "" and lookup) or "file"
- specification.size = specification.size or 655260
- specification.sub = specification.sub or (sub ~= "" and sub) or ""
- specification.method = specification.method or (method ~= "" and method) or "*"
- specification.detail = specification.detail or (detail ~= "" and detail) or ""
- --
- if type(specification.size) == "string" then
- specification.size = texsp(specification.size) or 655260
+ -- name is now resolved and size is scaled cf sa/mo
+ local lookup, name, sub, method, detail = getspecification(str or "")
+ -- new (todo: inheritancemode)
+ local designsize = fontdesignsize ~= "" and fontdesignsize or classdesignsize or ""
+ local designname = designsizefilename(name,designsize,size)
+ if designname and designname ~= "" then
+ if trace_defining or trace_designsize then
+ report_defining("remapping name %a, specification %a, size %a, designsize %a",name,designsize,size,designname)
+ end
+ -- we don't catch detail here
+ local o_lookup, o_name, o_sub, o_method, o_detail = getspecification(designname)
+ if o_lookup and o_lookup ~= "" then lookup = o_lookup end
+ if o_method and o_method ~= "" then method = o_method end
+ if o_detail and o_detail ~= "" then detail = o_detail end
+ name = o_name
+ sub = o_sub
+ end
+ -- so far
+ -- some settings can have been overloaded
+ if lookup and lookup ~= "" then
+ specification.lookup = lookup
+ end
+ if relativeid and relativeid ~= "" then -- experimental hook
+ local id = tonumber(relativeid) or 0
+ specification.relativeid = id > 0 and id
end
--
- specification.specification = "" -- not used
- specification.resolved = ""
- specification.forced = ""
- specification.features = { } -- via detail, maybe some day
- --
- -- we don't care about mathsize textsize goodies fallbacks
- --
- local cs = specification.cs
- if cs == "" then
- cs = nil
- specification.cs = nil
- specification.global = false
- elseif specification.global == nil then
- specification.global = false
+ specification.name = name
+ specification.size = size
+ specification.sub = (sub and sub ~= "" and sub) or specification.sub
+ specification.mathsize = mathsize
+ specification.textsize = textsize
+ specification.goodies = goodies
+ specification.cs = cs
+ specification.global = global
+ specification.scalemode = scaledfontmode -- context specific
+ if detail and detail ~= "" then
+ specification.method = method or "*"
+ specification.detail = detail
+ elseif specification.detail and specification.detail ~= "" then
+ -- already set
+ elseif inheritancemode == 0 then
+ -- nothing
+ elseif inheritancemode == 1 then
+ -- fontonly
+ if fontfeatures and fontfeatures ~= "" then
+ specification.method = "*"
+ specification.detail = fontfeatures
+ end
+ if fontfallbacks and fontfallbacks ~= "" then
+ specification.fallbacks = fontfallbacks
+ end
+ elseif inheritancemode == 2 then
+ -- classonly
+ if classfeatures and classfeatures ~= "" then
+ specification.method = "*"
+ specification.detail = classfeatures
+ end
+ if classfallbacks and classfallbacks ~= "" then
+ specification.fallbacks = classfallbacks
+ end
+ elseif inheritancemode == 3 then
+ -- fontfirst
+ if fontfeatures and fontfeatures ~= "" then
+ specification.method = "*"
+ specification.detail = fontfeatures
+ elseif classfeatures and classfeatures ~= "" then
+ specification.method = "*"
+ specification.detail = classfeatures
+ end
+ if fontfallbacks and fontfallbacks ~= "" then
+ specification.fallbacks = fontfallbacks
+ elseif classfallbacks and classfallbacks ~= "" then
+ specification.fallbacks = classfallbacks
+ end
+ elseif inheritancemode == 4 then
+ -- classfirst
+ if classfeatures and classfeatures ~= "" then
+ specification.method = "*"
+ specification.detail = classfeatures
+ elseif fontfeatures and fontfeatures ~= "" then
+ specification.method = "*"
+ specification.detail = fontfeatures
+ end
+ if classfallbacks and classfallbacks ~= "" then
+ specification.fallbacks = classfallbacks
+ elseif fontfallbacks and fontfallbacks ~= "" then
+ specification.fallbacks = fontfallbacks
+ end
end
+ local tfmdata = definers.read(specification,size) -- id not yet known (size in spec?)
--
- local tfmdata = definers.read(specification,specification.size)
+ local lastfontid = 0
if not tfmdata then
- return -1, nil
+ report_defining("unable to define %a as %a",name,nice_cs(cs))
+ lastfontid = -1
+ texsetcount("scaledfontsize",0)
+ -- ctx_letvaluerelax(cs) -- otherwise the current definition takes the previous one
elseif type(tfmdata) == "number" then
- if cs then
- texdefinefont(specification.global,cs,tfmdata)
- csnames[tfmdata] = cs
+ if trace_defining then
+ report_defining("reusing %s, id %a, target %a, features %a / %a, fallbacks %a / %a, goodies %a / %a, designsize %a / %a",
+ name,tfmdata,nice_cs(cs),classfeatures,fontfeatures,classfallbacks,fontfallbacks,classgoodies,goodies,classdesignsize,fontdesignsize)
end
- return tfmdata, fontdata[tfmdata]
+ csnames[tfmdata] = specification.cs
+ texdefinefont(global,cs,tfmdata)
+ -- resolved (when designsize is used):
+ local size = fontdata[tfmdata].parameters.size or 0
+ -- ctx_setsomefontsize(size .. "sp")
+ setmacro("somefontsize",size.."sp")
+ texsetcount("scaledfontsize",size)
+ lastfontid = tfmdata
else
+ -- setting the extra characters will move elsewhere
+ local characters = tfmdata.characters
+ local parameters = tfmdata.parameters
+ -- we use char0 as signal; cf the spec pdf can handle this (no char in slot)
+ characters[0] = nil
+ -- characters[0x00A0] = { width = parameters.space }
+ -- characters[0x2007] = { width = characters[0x0030] and characters[0x0030].width or parameters.space } -- figure
+ -- characters[0x2008] = { width = characters[0x002E] and characters[0x002E].width or parameters.space } -- period
+ --
constructors.checkvirtualids(tfmdata) -- experiment, will become obsolete when slots can selfreference
local id = font.define(tfmdata)
+ csnames[id] = specification.cs
tfmdata.properties.id = id
- definers.register(tfmdata,id)
- if cs then
- texdefinefont(specification.global,cs,id)
- csnames[id] = cs
- end
+ definers.register(tfmdata,id) -- to be sure, normally already done
+ texdefinefont(global,cs,id)
constructors.cleanuptable(tfmdata)
constructors.finalize(tfmdata)
- return id, tfmdata
+ if trace_defining then
+ report_defining("defining %a, id %a, target %a, features %a / %a, fallbacks %a / %a",
+ name,id,nice_cs(cs),classfeatures,fontfeatures,classfallbacks,fontfallbacks)
+ end
+ -- resolved (when designsize is used):
+ local size = tfmdata.parameters.size or 655360
+ setmacro("somefontsize",size.."sp")
+ -- ctx_setsomefontsize(size .. "sp")
+ texsetcount("scaledfontsize",size)
+ lastfontid = id
end
- statistics.stoptiming(fonts)
+ if trace_defining then
+ report_defining("memory usage after: %s",statistics.memused())
+ report_defining("stop stage two")
+ end
+ --
+ texsetcount("global","lastfontid",lastfontid)
+ if not mathsize then
+ -- forget about it
+ elseif mathsize == 0 then
+ lastmathids[1] = lastfontid
+ else
+ lastmathids[mathsize] = lastfontid
+ end
+ --
+ stoptiming(fonts)
end
-end
--- local id, cs = fonts.definers.internal { }
--- local id, cs = fonts.definers.internal { number = 2 }
--- local id, cs = fonts.definers.internal { name = "dejavusans" }
-
-local n = 0
-
-function definers.internal(specification,cs)
- specification = specification or { }
- local name = specification.name
- local size = specification.size and number.todimen(specification.size) or texgetdimen("bodyfontsize")
- local number = tonumber(specification.number)
- local id = nil
- if number then
- id = number
- elseif name and name ~= "" then
- local cs = cs or specification.cs
- if not cs then
- n = n + 1 -- beware ... there can be many and they are often used once
- -- cs = formatters["internal font %s"](n)
- cs = "internal font " .. n
+ --
+
+ function definers.define(specification)
+ --
+ local name = specification.name
+ if not name or name == "" then
+ return -1
else
- specification.cs = cs
+ starttiming(fonts)
+ --
+ -- following calls expect a few properties to be set:
+ --
+ local lookup, name, sub, method, detail = getspecification(name or "")
+ --
+ specification.name = (name ~= "" and name) or specification.name
+ --
+ specification.lookup = specification.lookup or (lookup ~= "" and lookup) or "file"
+ specification.size = specification.size or 655260
+ specification.sub = specification.sub or (sub ~= "" and sub) or ""
+ specification.method = specification.method or (method ~= "" and method) or "*"
+ specification.detail = specification.detail or (detail ~= "" and detail) or ""
+ --
+ if type(specification.size) == "string" then
+ specification.size = texsp(specification.size) or 655260
+ end
+ --
+ specification.specification = "" -- not used
+ specification.resolved = ""
+ specification.forced = ""
+ specification.features = { } -- via detail, maybe some day
+ --
+ -- we don't care about mathsize textsize goodies fallbacks
+ --
+ local cs = specification.cs
+ if cs == "" then
+ cs = nil
+ specification.cs = nil
+ specification.global = false
+ elseif specification.global == nil then
+ specification.global = false
+ end
+ --
+ local tfmdata = definers.read(specification,specification.size)
+ if not tfmdata then
+ return -1, nil
+ elseif type(tfmdata) == "number" then
+ if cs then
+ texdefinefont(specification.global,cs,tfmdata)
+ csnames[tfmdata] = cs
+ end
+ return tfmdata, fontdata[tfmdata]
+ else
+ constructors.checkvirtualids(tfmdata) -- experiment, will become obsolete when slots can selfreference
+ local id = font.define(tfmdata)
+ tfmdata.properties.id = id
+ definers.register(tfmdata,id)
+ if cs then
+ texdefinefont(specification.global,cs,id)
+ csnames[id] = cs
+ end
+ constructors.cleanuptable(tfmdata)
+ constructors.finalize(tfmdata)
+ return id, tfmdata
+ end
+ stoptiming(fonts)
+ end
+ end
+
+ -- local id, cs = fonts.definers.internal { }
+ -- local id, cs = fonts.definers.internal { number = 2 }
+ -- local id, cs = fonts.definers.internal { name = "dejavusans" }
+
+ local n = 0
+
+ function definers.internal(specification,cs)
+ specification = specification or { }
+ local name = specification.name
+ local size = tonumber(specification.size)
+ local number = tonumber(specification.number)
+ local id = nil
+ if not size then
+ size = texgetdimen("bodyfontsize")
end
- id = definers.define {
- name = name,
- size = size,
- cs = cs,
- }
+ if number then
+ id = number
+ elseif name and name ~= "" then
+ local cs = cs or specification.cs
+ if not cs then
+ n = n + 1 -- beware ... there can be many and they are often used once
+ -- cs = formatters["internal font %s"](n)
+ cs = "internal font " .. n
+ else
+ specification.cs = cs
+ end
+ id = definers.define {
+ name = name,
+ size = size,
+ cs = cs,
+ }
+ end
+ if not id then
+ id = currentfont()
+ end
+ return id, csnames[id]
end
- if not id then
- id = currentfont()
+
+ -- here
+
+ local infofont = 0
+
+ function fonts.infofont()
+ if infofont == 0 then
+ infofont = definers.define { name = "dejavusansmono", size = tex.sp("6pt") }
+ end
+ return infofont
end
- return id, csnames[id]
+
end
local enable_auto_r_scale = false
@@ -1275,7 +1372,7 @@ function constructors.calculatescale(tfmdata,scaledpoints,relativeid,specificati
end
end
end
- scaledpoints, delta = calculatescale(tfmdata,scaledpoints)
+ local scaledpoints, delta = calculatescale(tfmdata,scaledpoints)
-- if enable_auto_r_scale and relativeid then -- for the moment this is rather context specific (we need to hash rscale then)
-- local relativedata = fontdata[relativeid]
-- local rfmdata = relativedata and relativedata.unscaled and relativedata.unscaled
@@ -1364,10 +1461,10 @@ end
local mappings = fonts.mappings
local loaded = { -- prevent loading (happens in cont-sys files)
- ["original-base.map" ] = true,
- ["original-ams-base.map" ] = true,
- ["original-ams-euler.map"] = true,
- ["original-public-lm.map"] = true,
+ -- ["original-base.map" ] = true,
+ -- ["original-ams-base.map" ] = true,
+ -- ["original-ams-euler.map"] = true,
+ -- ["original-public-lm.map"] = true,
}
function mappings.loadfile(name)
@@ -1405,20 +1502,178 @@ end
mappings.reset() -- resets the default file
+implement {
+ name = "loadmapfile",
+ actions = mappings.loadfile,
+ arguments = "string"
+}
+
+implement {
+ name = "loadmapline",
+ actions = mappings.loadline,
+ arguments = "string"
+}
+
+implement {
+ name = "resetmapfiles",
+ actions = mappings.reset,
+ arguments = "string"
+}
+
-- we need an 'do after the banner hook'
-- => commands
local function nametoslot(name)
local t = type(name)
+ local s = nil
if t == "string" then
- return resources[true].unicodes[name]
+ local slot = unicodes[true][name]
+ if slot then
+ return slot
+ end
+ if not aglunicodes then
+ aglunicodes = encodings.agl.unicodes
+ end
+ slot = aglunicodes[name]
+ if characters[true][slot] then
+ return slot
+ else
+ -- not in font
+ end
elseif t == "number" then
- return n
+ if characters[true][name] then
+ return slot
+ else
+ -- not in font
+ end
+ end
+end
+
+local function indextoslot(index)
+ local r = resources[true]
+ if r then
+ local indices = r.indices
+ if not indices then
+ indices = { }
+ local c = characters[true]
+ for unicode, data in next, c do
+ local di = data.index
+ if di then
+ indices[di] = unicode
+ end
+ end
+ r.indices = indices
+ end
+ return indices[tonumber(index)]
end
end
-helpers.nametoslot = nametoslot
+do -- else too many locals
+
+ local entities = characters.entities
+ local lowered = { } -- delayed initialization
+
+ setmetatableindex(lowered,function(t,k)
+ for k, v in next, entities do
+ local l = lower(k)
+ if not entities[l] then
+ lowered[l] = v
+ end
+ end
+ setmetatableindex(lowered,nil)
+ return lowered[k]
+ end)
+
+ local methods = {
+ -- entity
+ e = function(name)
+ return entities[name] or lowered[name] or name
+ end,
+ -- hexadecimal unicode
+ x = function(name)
+ local n = tonumber(name,16)
+ return n and utfchar(n) or name
+ end,
+ -- decimal unicode
+ d = function(name)
+ local n = tonumber(name)
+ return n and utfchar(n) or name
+ end,
+ -- hexadecimal index (slot)
+ s = function(name)
+ local n = tonumber(name,16)
+ local n = n and indextoslot(n)
+ return n and utfchar(n) or name
+ end,
+ -- decimal index
+ i = function(name)
+ local n = tonumber(name)
+ local n = n and indextoslot(n)
+ return n and utfchar(n) or name
+ end,
+ -- name
+ n = function(name)
+ local n = nametoslot(name)
+ return n and utfchar(n) or name
+ end,
+ -- char
+ c = function(name)
+ return name
+ end,
+ }
+
+ -- -- nicer:
+ --
+ -- setmetatableindex(methods,function(t,k) return methods.c end)
+ --
+ -- local splitter = (C(1) * P(":") + Cc("c")) * C(P(1)^1) / function(method,name)
+ -- return methods[method](name)
+ -- end
+ --
+ -- -- more efficient:
+
+ local splitter = C(1) * P(":") * C(P(1)^1) / function(method,name)
+ local action = methods[method]
+ return action and action(name) or name
+ end
+
+ local function tochar(str)
+ local t = type(str)
+ if t == "number" then
+ return utfchar(str)
+ elseif t == "string" then
+ return lpegmatch(splitter,str) or str
+ else
+ return str
+ end
+ end
+
+ helpers.nametoslot = nametoslot
+ helpers.indextoslot = indextoslot
+ helpers.tochar = tochar
+
+ -- interfaces:
+
+ implement {
+ name = "fontchar",
+ actions = { nametoslot, context_char },
+ arguments = "string",
+ }
+
+ implement {
+ name = "fontcharbyindex",
+ actions = { indextoslot, context_char },
+ arguments = "integer",
+ }
+
+ implement {
+ name = "tochar",
+ actions = { tochar, context },
+ arguments = "string",
+ }
+
+end
-- this will change ...
@@ -1509,6 +1764,11 @@ local Shapes = {
mono = "Mono",
}
+local ctx_startfontclass = context.startfontclass
+local ctx_stopfontclass = context.stopfontclass
+local ctx_definefontsynonym = context.definefontsynonym
+local ctx_dofastdefinetypeface = context.dofastdefinetypeface
+
function fonts.definetypeface(name,t)
if type(name) == "table" then
-- {name=abc,k=v,...}
@@ -1536,16 +1796,22 @@ function fonts.definetypeface(name,t)
local normalwidth = t.normalwidth or t.width or p.normalwidth or p.width or "normal"
local boldwidth = t.boldwidth or t.width or p.boldwidth or p.width or "normal"
Shape = Shapes[shape] or "Serif"
- context.startfontclass { name }
- context.definefontsynonym( { format("%s", Shape) }, { format("spec:%s-%s-regular-%s", fontname, normalweight, normalwidth) } )
- context.definefontsynonym( { format("%sBold", Shape) }, { format("spec:%s-%s-regular-%s", fontname, boldweight, boldwidth ) } )
- context.definefontsynonym( { format("%sBoldItalic", Shape) }, { format("spec:%s-%s-italic-%s", fontname, boldweight, boldwidth ) } )
- context.definefontsynonym( { format("%sItalic", Shape) }, { format("spec:%s-%s-italic-%s", fontname, normalweight, normalwidth) } )
- context.stopfontclass()
+ ctx_startfontclass { name }
+ ctx_definefontsynonym( { formatters["%s"] (Shape) }, { formatters["spec:%s-%s-regular-%s"] (fontname, normalweight, normalwidth) } )
+ ctx_definefontsynonym( { formatters["%sBold"] (Shape) }, { formatters["spec:%s-%s-regular-%s"] (fontname, boldweight, boldwidth ) } )
+ ctx_definefontsynonym( { formatters["%sBoldItalic"](Shape) }, { formatters["spec:%s-%s-italic-%s"] (fontname, boldweight, boldwidth ) } )
+ ctx_definefontsynonym( { formatters["%sItalic"] (Shape) }, { formatters["spec:%s-%s-italic-%s"] (fontname, normalweight, normalwidth) } )
+ ctx_stopfontclass()
local settings = sequenced({ features= t.features },",")
- context.dofastdefinetypeface(name, shortcut, shape, size, settings)
+ ctx_dofastdefinetypeface(name, shortcut, shape, size, settings)
end
+implement {
+ name = "definetypeface",
+ actions = fonts.definetypeface,
+ arguments = { "string", "string" }
+}
+
function fonts.current() -- todo: also handle name
return fontdata[currentfont()] or fontdata[0]
end
@@ -1554,96 +1820,6 @@ function fonts.currentid()
return currentfont() or 0
end
--- interfaces
-
-function commands.fontchar(n)
- n = nametoslot(n)
- if n then
- context.char(n)
- end
-end
-
-function commands.doifelsecurrentfonthasfeature(name) -- can be made faster with a supportedfeatures hash
- local f = fontdata[currentfont()]
- f = f and f.shared
- f = f and f.rawdata
- f = f and f.resources
- f = f and f.features
- commands.doifelse(f and (f.gpos[name] or f.gsub[name]))
-end
-
-local p, f = 1, formatters["%0.1fpt"] -- normally this value is changed only once
-
-local stripper = lpeg.patterns.stripzeros
-
-function commands.nbfs(amount,precision)
- if precision ~= p then
- p = precision
- f = formatters["%0." .. p .. "fpt"]
- end
- context(lpegmatch(stripper,f(amount/65536)))
-end
-
-function commands.featureattribute(tag)
- context(contextnumber(tag))
-end
-
-function commands.setfontfeature(tag)
- texsetattribute(0,contextnumber(tag))
-end
-
-function commands.resetfontfeature()
- texsetattribute(0,0)
-end
-
--- function commands.addfs(tag) withset(tag, 1) end
--- function commands.subfs(tag) withset(tag,-1) end
--- function commands.addff(tag) withfnt(tag, 2) end -- on top of font features
--- function commands.subff(tag) withfnt(tag,-2) end -- on top of font features
-
-function commands.cleanfontname (name) context(names.cleanname(name)) end
-
-function commands.fontlookupinitialize (name) names.lookup(name) end
-function commands.fontlookupnoffound () context(names.noflookups()) end
-function commands.fontlookupgetkeyofindex(key,index) context(names.getlookupkey(key,index)) end
-function commands.fontlookupgetkey (key) context(names.getlookupkey(key)) end
-
--- this might move to a runtime module:
-
-function commands.showchardata(n)
- local tfmdata = fontdata[currentfont()]
- if tfmdata then
- if type(n) == "string" then
- n = utfbyte(n)
- end
- local chr = tfmdata.characters[n]
- if chr then
- report_status("%s @ %s => %U => %c => %s",tfmdata.properties.fullname,tfmdata.parameters.size,n,n,serialize(chr,false))
- end
- end
-end
-
-function commands.showfontparameters(tfmdata)
- -- this will become more clever
- local tfmdata = tfmdata or fontdata[currentfont()]
- if tfmdata then
- local parameters = tfmdata.parameters
- local mathparameters = tfmdata.mathparameters
- local properties = tfmdata.properties
- local hasparameters = parameters and next(parameters)
- local hasmathparameters = mathparameters and next(mathparameters)
- if hasparameters then
- report_status("%s @ %s => text parameters => %s",properties.fullname,parameters.size,serialize(parameters,false))
- end
- if hasmathparameters then
- report_status("%s @ %s => math parameters => %s",properties.fullname,parameters.size,serialize(mathparameters,false))
- end
- if not hasparameters and not hasmathparameters then
- report_status("%s @ %s => no text parameters and/or math parameters",properties.fullname,parameters.size)
- end
- end
-end
-
-- for the moment here, this will become a chain of extras that is
-- hooked into the ctx registration (or scaler or ...)
@@ -1741,14 +1917,15 @@ end
-- redefinition
-local quads = hashes.quads
-local xheights = hashes.xheights
+-- local hashes = fonts.hashes
+-- local emwidths = hashes.emwidths
+-- local exheights = hashes.exheights
setmetatableindex(dimenfactors, function(t,k)
if k == "ex" then
- return 1/xheights[currentfont()]
+ return 1/exheights[currentfont()]
elseif k == "em" then
- return 1/quads[currentfont()]
+ return 1/emwidths[currentfont()]
elseif k == "pct" or k == "%" then
return 1/(texget("hsize")/100)
else
@@ -1824,101 +2001,213 @@ end
-- end
-- end
-function commands.setfontofid(id)
- context.getvalue(csnames[id])
-end
+do
--- more interfacing:
+ -- local scanners = tokens.scanners
+ -- local scanstring = scanners.string
+ -- local scaninteger = scanners.integer
+ -- local scandimen = scanners.dimen
+ -- local scanboolean = scanners.boolean
-commands.definefontfeature = presetcontext
+ -- local scanners = interfaces.scanners
-local cache = { }
+ local setmacro = tokens.setters.macro
-local hows = {
- ["+"] = "add",
- ["-"] = "subtract",
- ["="] = "replace",
-}
+ function constructors.currentfonthasfeature(n)
+ local f = fontdata[currentfont()]
+ if not f then return end f = f.shared
+ if not f then return end f = f.rawdata
+ if not f then return end f = f.resources
+ if not f then return end f = f.features
+ return f and (f.gpos[n] or f.gsub[n])
+ end
-function commands.feature(how,parent,name,font) -- 0/1 test temporary for testing
- if not how or how == 0 then
- if trace_features and texgetattribute(0) ~= 0 then
- report_cummulative("font %!font:name!, reset",fontdata[font or true])
+ implement {
+ name = "doifelsecurrentfonthasfeature",
+ actions = { currentfonthasfeature, commands.doifelse },
+ arguments = "string"
+ }
+
+ -- local p, f = 1, formatters["%0.1fpt"] -- normally this value is changed only once
+ --
+ -- local stripper = lpeg.patterns.stripzeros
+ --
+ -- function commands.nbfs(amount,precision)
+ -- if precision ~= p then
+ -- p = precision
+ -- f = formatters["%0." .. p .. "fpt"]
+ -- end
+ -- context(lpegmatch(stripper,f(amount/65536)))
+ -- end
+
+ local f_strip = formatters["%0.2fpt"] -- normally this value is changed only once
+ local stripper = lpeg.patterns.stripzeros
+
+ -- scanners.nbfs = function()
+ -- context(lpegmatch(stripper,f_strip(scandimen()/65536)))
+ -- end
+
+ implement {
+ name = "nbfs",
+ arguments = "dimen",
+ actions = function(d)
+ context(lpegmatch(stripper,f_strip(d/65536)))
end
- texsetattribute(0,0)
- elseif how == true or how == 1 then
- local hash = "feature > " .. parent
- local done = cache[hash]
- if trace_features and done then
- report_cummulative("font %!font:name!, revive %a : %!font:features!",fontdata[font or true],parent,setups[numbers[done]])
+ }
+
+ -- commands.featureattribute = function(tag) context(contextnumber(tag)) end
+ -- commands.setfontfeature = function(tag) texsetattribute(0,contextnumber(tag)) end
+ -- commands.resetfontfeature = function() texsetattribute(0,0) end
+ -- commands.setfontofid = function(id) context_getvalue(csnames[id]) end
+ -- commands.definefontfeature = presetcontext
+
+ -- scanners.featureattribute = function() context(contextnumber(scanstring())) end
+ -- scanners.setfontfeature = function() texsetattribute(0,contextnumber(scanstring())) end
+ -- scanners.resetfontfeature = function() texsetattribute(0,0) end
+ -- scanners.setfontofid = function() context_getvalue(csnames[scaninteger()]) end
+ -- scanners.definefontfeature = function() presetcontext(scanstring(),scanstring(),scanstring()) end
+
+ implement {
+ name = "featureattribute",
+ arguments = "string",
+ actions = { contextnumber, context }
+ }
+
+ implement {
+ name = "setfontfeature",
+ arguments = "string",
+ actions = function(tag) texsetattribute(0,contextnumber(tag)) end
+ }
+
+ implement {
+ name = "resetfontfeature",
+ arguments = { 0, 0 },
+ actions = texsetattribute,
+ }
+
+ implement {
+ name = "setfontofid",
+ arguments = "integer",
+ actions = function(id)
+ context_getvalue(csnames[id])
end
- texsetattribute(0,done or 0)
- else
- local full = parent .. how .. name
- local hash = "feature > " .. full
- local done = cache[hash]
- if not done then
- local n = setups[full]
- if n then
- -- already defined
- else
- n = mergecontextfeatures(parent,name,how,full)
+ }
+
+ implement {
+ name = "definefontfeature",
+ arguments = { "string", "string", "string" },
+ actions = presetcontext
+ }
+
+ local cache = { }
+
+ local hows = {
+ ["+"] = "add",
+ ["-"] = "subtract",
+ ["="] = "replace",
+ }
+
+ local function setfeature(how,parent,name,font) -- 0/1 test temporary for testing
+ if not how or how == 0 then
+ if trace_features and texgetattribute(0) ~= 0 then
+ report_cummulative("font %!font:name!, reset",fontdata[font or true])
end
- done = registercontextfeature(hash,full,how)
- cache[hash] = done
- if trace_features then
- report_cummulative("font %!font:name!, %s %a : %!font:features!",fontdata[font or true],hows[how],full,setups[numbers[done]])
+ texsetattribute(0,0)
+ elseif how == true or how == 1 then
+ local hash = "feature > " .. parent
+ local done = cache[hash]
+ if trace_features and done then
+ report_cummulative("font %!font:name!, revive %a : %!font:features!",fontdata[font or true],parent,setups[numbers[done]])
end
+ texsetattribute(0,done or 0)
+ else
+ local full = parent .. how .. name
+ local hash = "feature > " .. full
+ local done = cache[hash]
+ if not done then
+ local n = setups[full]
+ if n then
+ -- already defined
+ else
+ n = mergecontextfeatures(parent,name,how,full)
+ end
+ done = registercontextfeature(hash,full,how)
+ cache[hash] = done
+ if trace_features then
+ report_cummulative("font %!font:name!, %s %a : %!font:features!",fontdata[font or true],hows[how],full,setups[numbers[done]])
+ end
+ end
+ texsetattribute(0,done)
end
- texsetattribute(0,done)
end
-end
-function commands.featurelist(...)
- context(fonts.specifiers.contexttostring(...))
-end
+ local function resetfeature()
+ if trace_features and texgetattribute(0) ~= 0 then
+ report_cummulative("font %!font:name!, reset",fontdata[true])
+ end
+ texsetattribute(0,0)
+ end
-function commands.registerlanguagefeatures()
- local specifications = languages.data.specifications
- for i=1,#specifications do
- local specification = specifications[i]
- local language = specification.opentype
- if language then
- local script = specification.opentypescript or specification.script
- if script then
- local context = specification.context
- if type(context) == "table" then
- for i=1,#context do
- definecontext(context[i], { language = language, script = script})
+ local function registerlanguagefeatures()
+ local specifications = languages.data.specifications
+ for i=1,#specifications do
+ local specification = specifications[i]
+ local language = specification.opentype
+ if language then
+ local script = specification.opentypescript or specification.script
+ if script then
+ local context = specification.context
+ if type(context) == "table" then
+ for i=1,#context do
+ definecontext(context[i], { language = language, script = script})
+ end
+ elseif type(context) == "string" then
+ definecontext(context, { language = language, script = script})
end
- elseif type(context) == "string" then
- definecontext(context, { language = language, script = script})
end
end
end
end
+
+ implement { name = "resetfeature", actions = resetfeature }
+ implement { name = "addfeature", actions = setfeature, arguments = { "'+'", "string", "string" } }
+ implement { name = "subtractfeature", actions = setfeature, arguments = { "'-'", "string", "string" } }
+ implement { name = "replacefeature", actions = setfeature, arguments = { "'='", "string", "string" } }
+ implement { name = "revivefeature", actions = setfeature, arguments = { true, "string" } }
+
+ implement {
+ name = "featurelist",
+ actions = { fonts.specifiers.contexttostring, context },
+ arguments = { "string", "'otf'", "string", "'yes'", "'no'", true }
+ }
+
+ implement {
+ name = "registerlanguagefeatures",
+ actions = registerlanguagefeatures,
+ }
+
end
-- a fontkern plug:
-local copy_node = node.copy
-local kern = nodes.pool.register(nodes.pool.kern())
+local copy_node = nuts.copy
+local kern = nuts.pool.register(nuts.pool.kern())
-node.set_attribute(kern,attributes.private('fontkern'),1) -- we can have several, attributes are shared
+setattr(kern,attributes.private('fontkern'),1) -- we can have several, attributes are shared
nodes.injections.installnewkern(function(k)
local c = copy_node(kern)
- c.kern = k
+ setfield(c,"kern",k)
return c
end)
-directives.register("nodes.injections.fontkern", function(v) kern.subtype = v and 0 or 1 end)
+directives.register("nodes.injections.fontkern", function(v) setfield(kern,"subtype",v and 0 or 1) end)
--- here
+-- here (todo: closure)
local trace_analyzing = false trackers.register("otf.analyzing", function(v) trace_analyzing = v end)
-local otffeatures = fonts.constructors.newfeatures("otf")
+----- otffeatures = constructors.newfeatures("otf")
local registerotffeature = otffeatures.register
local analyzers = fonts.analyzers
@@ -1926,7 +2215,7 @@ local methods = analyzers.methods
local unsetvalue = attributes.unsetvalue
-local traverse_by_id = node.traverse_id
+local traverse_by_id = nuts.traverse_id
local a_color = attributes.private('color')
local a_colormodel = attributes.private('colormodel')
@@ -1937,7 +2226,7 @@ local glyph_code = nodes.nodecodes.glyph
local states = analyzers.states
-local names = {
+local colornames = {
[states.init] = "font:1",
[states.medi] = "font:2",
[states.fina] = "font:3",
@@ -1953,16 +2242,17 @@ local names = {
local function markstates(head)
if head then
- local model = head[a_colormodel] or 1
+ head = tonut(head)
+ local model = getattr(head,a_colormodel) or 1
for glyph in traverse_by_id(glyph_code,head) do
- local a = glyph[a_state]
+ local a = getprop(glyph,a_state)
if a then
- local name = names[a]
+ local name = colornames[a]
if name then
local color = m_color[name]
if color then
- glyph[a_colormodel] = model
- glyph[a_color] = color
+ setattr(glyph,a_colormodel,model)
+ setattr(glyph,a_color,color)
end
end
end
@@ -2005,9 +2295,137 @@ registerotffeature { -- adapts
function methods.nocolor(head,font,attr)
for n in traverse_by_id(glyph_code,head) do
- if not font or n.font == font then
- n[a_color] = unsetvalue
+ if not font or getfont(n) == font then
+ setattr(n,a_color,unsetvalue)
end
end
return head, true
end
+
+local function purefontname(name)
+ if type(name) == "number" then
+ name = getfontname(name)
+ end
+ if type(name) == "string" then
+ return basename(name)
+ end
+end
+
+implement {
+ name = "purefontname",
+ actions = { purefontname, context },
+ arguments = "string",
+}
+
+local list = storage.shared.bodyfontsizes or { }
+storage.shared.bodyfontsizes = list
+
+implement {
+ name = "registerbodyfontsize",
+ arguments = "string",
+ actions = function(size)
+ list[size] = true
+ end
+}
+
+implement {
+ name = "getbodyfontsizes",
+ arguments = "string",
+ actions = function(separator)
+ context(concat(sortedkeys(list),separator))
+ end
+}
+
+implement {
+ name = "processbodyfontsizes",
+ arguments = "string",
+ actions = function(command)
+ local keys = sortedkeys(list)
+ if command then
+ local action = context[command]
+ for i=1,#keys do
+ action(keys[i])
+ end
+ else
+ context(concat(keys,","))
+ end
+ end
+}
+
+implement {
+ name = "cleanfontname",
+ actions = { names.cleanname, context },
+ arguments = "string"
+}
+
+implement {
+ name = "fontlookupinitialize",
+ actions = names.lookup,
+ arguments = "string",
+}
+
+implement {
+ name = "fontlookupnoffound",
+ actions = { names.noflookups, context },
+}
+
+implement {
+ name = "fontlookupgetkeyofindex",
+ actions = { names.getlookupkey, context },
+ arguments = { "string", "integer"}
+}
+
+implement {
+ name = "fontlookupgetkey",
+ actions = { names.getlookupkey, context },
+ arguments = "string"
+}
+
+-- this might move to a runtime module:
+
+function commands.showchardata(n)
+ local tfmdata = fontdata[currentfont()]
+ if tfmdata then
+ if type(n) == "string" then
+ n = utfbyte(n)
+ end
+ local chr = tfmdata.characters[n]
+ if chr then
+ report_status("%s @ %s => %U => %c => %s",tfmdata.properties.fullname,tfmdata.parameters.size,n,n,serialize(chr,false))
+ end
+ end
+end
+
+function commands.showfontparameters(tfmdata)
+ -- this will become more clever
+ local tfmdata = tfmdata or fontdata[currentfont()]
+ if tfmdata then
+ local parameters = tfmdata.parameters
+ local mathparameters = tfmdata.mathparameters
+ local properties = tfmdata.properties
+ local hasparameters = parameters and next(parameters)
+ local hasmathparameters = mathparameters and next(mathparameters)
+ if hasparameters then
+ report_status("%s @ %s => text parameters => %s",properties.fullname,parameters.size,serialize(parameters,false))
+ end
+ if hasmathparameters then
+ report_status("%s @ %s => math parameters => %s",properties.fullname,parameters.size,serialize(mathparameters,false))
+ end
+ if not hasparameters and not hasmathparameters then
+ report_status("%s @ %s => no text parameters and/or math parameters",properties.fullname,parameters.size)
+ end
+ end
+end
+
+implement {
+ name = "currentdesignsize",
+ actions = function()
+ context(parameters[currentfont()].designsize)
+ end
+}
+
+implement {
+ name = "doifelsefontpresent",
+ actions = { names.exists, commands.doifelse },
+ arguments = "string"
+}
diff --git a/tex/context/base/font-def.lua b/tex/context/base/font-def.lua
index e5c5d990c..fdded3c36 100644
--- a/tex/context/base/font-def.lua
+++ b/tex/context/base/font-def.lua
@@ -433,8 +433,8 @@ function definers.read(specification,size,id) -- id can be optional, name can al
elseif trace_defining and type(tfmdata) == "table" then
local properties = tfmdata.properties or { }
local parameters = tfmdata.parameters or { }
- report_defining("using %s font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a",
- properties.format, id, properties.name, parameters.size, properties.encodingbytes,
+ report_defining("using %a font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a",
+ properties.format or "unknown", id, properties.name, parameters.size, properties.encodingbytes,
properties.encodingname, properties.fullname, file.basename(properties.filename))
end
statistics.stoptiming(fonts)
diff --git a/tex/context/base/font-enc.lua b/tex/context/base/font-enc.lua
index 5305f0736..2e8b722de 100644
--- a/tex/context/base/font-enc.lua
+++ b/tex/context/base/font-enc.lua
@@ -8,6 +8,7 @@ if not modules then modules = { } end modules ['font-enc'] = {
-- this module is obsolete
+local next = next
local match, gmatch, gsub = string.match, string.gmatch, string.gsub
local setmetatableindex = table.setmetatableindex
@@ -125,7 +126,12 @@ function encodings.make_unicode_vector()
end
end
for name, code in next, characters.synonyms do
- vector[code], hash[name] = name, code
+ if not vector[code] then
+ vector[code] = name
+ end
+ if not hash[name] then
+ hash[name] = code
+ end
end
return containers.write(encodings.cache, 'unicode', { name='unicode', tag='unicode', vector=vector, hash=hash })
end
diff --git a/tex/context/base/font-enh.lua b/tex/context/base/font-enh.lua
index 2bf0741f5..3439a434a 100644
--- a/tex/context/base/font-enh.lua
+++ b/tex/context/base/font-enh.lua
@@ -114,24 +114,24 @@ local registerotffeature = otffeatures.register
-- unicodes = {
-- a1 = 0x2701,
-local tosixteen = fonts.mappings.tounicode16
+----- tosixteen = fonts.mappings.tounicode16
local function initializeunicoding(tfmdata)
local goodies = tfmdata.goodies
local newcoding = nil
- local tounicode = false
+ -- local tounicode = false
for i=1,#goodies do
local remapping = goodies[i].remapping
if remapping and remapping.unicodes then
- newcoding = remapping.unicodes -- names to unicodes
- tounicode = remapping.tounicode
+ newcoding = remapping.unicodes -- names to unicodes
+ -- tounicode = remapping.tounicode -- not used
end
end
if newcoding then
local characters = tfmdata.characters
local descriptions = tfmdata.descriptions
local oldcoding = tfmdata.resources.unicodes
- local tounicodes = tfmdata.resources.tounicode -- index to unicode
+ -- local tounicodes = tfmdata.resources.tounicode -- index to unicode
local originals = { }
for name, newcode in next, newcoding do
local oldcode = oldcoding[name]
@@ -153,15 +153,15 @@ local function initializeunicoding(tfmdata)
else
oldcoding[name] = newcode
end
- if tounicode then
- local description = descriptions[newcode]
- if description then
- local index = description.index
- if not tounicodes[index] then
- tounicodes[index] = tosixteen(newcode) -- shared (we could have a metatable)
- end
- end
- end
+ -- if tounicode then
+ -- local description = descriptions[newcode]
+ -- if description then
+ -- local index = description.index
+ -- if not tounicodes[index] then
+ -- tounicodes[index] = tosixteen(newcode) -- shared (we could have a metatable)
+ -- end
+ -- end
+ -- end
if trace_unicoding then
if oldcode then
report_unicoding("aliasing glyph %a from %U to %U",name,oldcode,newcode)
diff --git a/tex/context/base/font-ext.lua b/tex/context/base/font-ext.lua
index ede2151d6..b7817c140 100644
--- a/tex/context/base/font-ext.lua
+++ b/tex/context/base/font-ext.lua
@@ -7,19 +7,17 @@ if not modules then modules = { } end modules ['font-ext'] = {
}
local next, type, byte = next, type, string.byte
-local gmatch, concat, format = string.gmatch, table.concat, string.format
-local utfchar = utf.char
-local commands, context = commands, context
-local fonts, utilities = fonts, utilities
+local context = context
+local fonts = fonts
+local utilities = utilities
-local trace_protrusion = false trackers.register("fonts.protrusion", function(v) trace_protrusion = v end)
-local trace_expansion = false trackers.register("fonts.expansion", function(v) trace_expansion = v end)
+local trace_protrusion = false trackers.register("fonts.protrusion", function(v) trace_protrusion = v end)
+local trace_expansion = false trackers.register("fonts.expansion", function(v) trace_expansion = v end)
local report_expansions = logs.reporter("fonts","expansions")
local report_protrusions = logs.reporter("fonts","protrusions")
-
--- todo: byte(..) => 0xHHHH
+local report_opbd = logs.reporter("fonts","otf opbd")
--[[ldx--
<p>When we implement functions that deal with features, most of them
@@ -42,6 +40,8 @@ local getparameters = utilities.parsers.getparameters
local setmetatableindex = table.setmetatableindex
+local implement = interfaces.implement
+
-- -- -- -- -- --
-- shared
-- -- -- -- -- --
@@ -79,10 +79,6 @@ expansions.vectors = vectors
classes.preset = { stretch = 2, shrink = 2, step = .5, factor = 1 }
-function commands.setupfontexpansion(class,settings)
- getparameters(classes,class,'preset',settings)
-end
-
classes['quality'] = {
stretch = 2, shrink = 2, step = .5, vector = 'default', factor = 1
}
@@ -172,7 +168,11 @@ registerafmfeature {
fonts.goodies.register("expansions", function(...) return fonts.goodies.report("expansions", trace_expansion, ...) end)
-local report_opbd = logs.reporter("fonts","otf opbd")
+implement {
+ name = "setupfontexpansion",
+ arguments = { "string", "string" },
+ actions = function(class,settings) getparameters(classes,class,'preset',settings) end
+}
-- -- -- -- -- --
-- protrusion
@@ -191,10 +191,6 @@ local vectors = protrusions.vectors
classes.preset = { factor = 1, left = 1, right = 1 }
-function commands.setupfontprotrusion(class,settings)
- getparameters(classes,class,'preset',settings)
-end
-
classes['pure'] = {
vector = 'pure', factor = 1
}
@@ -328,8 +324,10 @@ local function map_opbd_onto_protrusion(tfmdata,value,opbd)
local characters = tfmdata.characters
local descriptions = tfmdata.descriptions
local properties = tfmdata.properties
+ local resources = tfmdata.resources
local rawdata = tfmdata.shared.rawdata
local lookuphash = rawdata.lookuphash
+ local lookuptags = resources.lookuptags
local script = properties.script
local language = properties.language
local done, factor, left, right = false, 1, 1, 1
@@ -349,14 +347,14 @@ local function map_opbd_onto_protrusion(tfmdata,value,opbd)
local data = lookuphash[lookup]
if data then
if trace_protrusion then
- report_protrusions("setting left using lfbd lookup %a",lookup)
+ report_protrusions("setting left using lfbd lookup %a",lookuptags[lookup])
end
for k, v in next, data do
-- local p = - v[3] / descriptions[k].width-- or 1 ~= 0 too but the same
local p = - (v[1] / 1000) * factor * left
characters[k].left_protruding = p
if trace_protrusion then
- report_protrusions("lfbd -> %s -> %C -> %0.03f (% t)",lookup,k,p,v)
+ report_protrusions("lfbd -> %s -> %C -> %0.03f (% t)",lookuptags[lookup],k,p,v)
end
end
done = true
@@ -372,14 +370,14 @@ local function map_opbd_onto_protrusion(tfmdata,value,opbd)
local data = lookuphash[lookup]
if data then
if trace_protrusion then
- report_protrusions("setting right using rtbd lookup %a",lookup)
+ report_protrusions("setting right using rtbd lookup %a",lookuptags[lookup])
end
for k, v in next, data do
-- local p = v[3] / descriptions[k].width -- or 3
local p = (v[1] / 1000) * factor * right
characters[k].right_protruding = p
if trace_protrusion then
- report_protrusions("rtbd -> %s -> %C -> %0.03f (% t)",lookup,k,p,v)
+ report_protrusions("rtbd -> %s -> %C -> %0.03f (% t)",lookuptags[lookup],k,p,v)
end
end
end
@@ -487,6 +485,12 @@ registerafmfeature {
fonts.goodies.register("protrusions", function(...) return fonts.goodies.report("protrusions", trace_protrusion, ...) end)
+implement {
+ name = "setupfontprotrusion",
+ arguments = { "string", "string" },
+ actions = function(class,settings) getparameters(classes,class,'preset',settings) end
+}
+
-- -- --
local function initializenostackmath(tfmdata,value)
@@ -502,6 +506,19 @@ registerotffeature {
}
}
+local function initializerealdimensions(tfmdata,value)
+ tfmdata.properties.realdimensions = value and true
+end
+
+registerotffeature {
+ name = "realdimensions",
+ description = "accept negative dimenions",
+ initializers = {
+ base = initializerealdimensions,
+ node = initializerealdimensions,
+ }
+}
+
local function initializeitlc(tfmdata,value) -- hm, always value
if value then
-- the magic 40 and it formula come from Dohyun Kim but we might need another guess
@@ -887,11 +904,17 @@ registerotffeature {
-- a handy helper (might change or be moved to another namespace)
-local new_special = nodes.pool.special
-local new_glyph = nodes.pool.glyph
+local nodepool = nodes.pool
+
+local new_special = nodepool.special
+local new_glyph = nodepool.glyph
+local new_rule = nodepool.rule
local hpack_node = node.hpack
-function fonts.helpers.addprivate(tfmdata,name,characterdata)
+local helpers = fonts.helpers
+local currentfont = font.current
+
+function helpers.addprivate(tfmdata,name,characterdata)
local properties = tfmdata.properties
local privates = properties.privates
local lastprivate = properties.lastprivate
@@ -915,19 +938,19 @@ function fonts.helpers.addprivate(tfmdata,name,characterdata)
return lastprivate
end
-function fonts.helpers.getprivatenode(tfmdata,name)
+local function getprivatenode(tfmdata,name)
local properties = tfmdata.properties
local privates = properties and properties.privates
if privates then
local p = privates[name]
if p then
- local char = tfmdata.characters[p]
+ local char = tfmdata.characters[p]
local commands = char.commands
if commands then
- local fake = hpack_node(new_special(commands[1][2]))
- fake.width = char.width
+ local fake = hpack_node(new_special(commands[1][2]))
+ fake.width = char.width
fake.height = char.height
- fake.depth = char.depth
+ fake.depth = char.depth
return fake
else
-- todo: set current attribibutes
@@ -937,12 +960,18 @@ function fonts.helpers.getprivatenode(tfmdata,name)
end
end
-function fonts.helpers.hasprivate(tfmdata,name)
+helpers.getprivatenode = getprivatenode
+
+function helpers.hasprivate(tfmdata,name)
local properties = tfmdata.properties
local privates = properties and properties.privates
return privates and privates[name] or false
end
-function commands.getprivatechar(name)
- context(fonts.helpers.getprivatenode(fontdata[font.current()],name))
-end
+implement {
+ name = "getprivatechar",
+ arguments = "string",
+ actions = function(name)
+ context(getprivatenode(fontdata[currentfont()],name))
+ end
+}
diff --git a/tex/context/base/font-fbk.lua b/tex/context/base/font-fbk.lua
index 48e2167e6..22644f046 100644
--- a/tex/context/base/font-fbk.lua
+++ b/tex/context/base/font-fbk.lua
@@ -19,18 +19,20 @@ local trace_combining_define = false trackers.register("fonts.composing.defi
trackers.register("fonts.combining", "fonts.composing.define") -- for old times sake (and manuals)
trackers.register("fonts.combining.all", "fonts.composing.*") -- for old times sake (and manuals)
-local report_combining = logs.reporter("fonts","combining")
+local report_combining = logs.reporter("fonts","combining")
-local force_combining = false -- just for demo purposes (see mk)
+local force_combining = false -- just for demo purposes (see mk)
-local allocate = utilities.storage.allocate
+local allocate = utilities.storage.allocate
local fonts = fonts
local handlers = fonts.handlers
local constructors = fonts.constructors
-local registerotffeature = handlers.otf.features.register
-local registerafmfeature = handlers.afm.features.register
+local otf = handlers.otf
+local afm = handlers.afm
+local registerotffeature = otf.features.register
+local registerafmfeature = afm.features.register
local unicodecharacters = characters.data
local unicodefallbacks = characters.fallbacks
@@ -302,3 +304,32 @@ end
-- {'special', 'pdf: /Fm\XX\space Do'},
-- {'special', 'pdf: Q'},
-- {'special', 'pdf: Q'},
+
+-- new and experimental
+
+local everywhere = { ["*"] = { ["*"] = true } } -- or: { ["*"] = { "*" } }
+local noflags = { }
+
+local char_specification = {
+ type = "ligature",
+ features = everywhere,
+ data = characters.splits.char,
+ order = { "char-ligatures" },
+ flags = noflags,
+ prepend = true,
+}
+
+local compat_specification = {
+ type = "ligature",
+ features = everywhere,
+ data = characters.splits.compat,
+ order = { "compat-ligatures" },
+ flags = noflags,
+ prepend = true,
+}
+
+otf.addfeature("char-ligatures", char_specification) -- xlig (extra)
+otf.addfeature("compat-ligatures",compat_specification) -- plig (pseudo)
+
+registerotffeature { name = 'char-ligatures', description = 'unicode char specials to ligatures' }
+registerotffeature { name = 'compat-ligatures', description = 'unicode compat specials to ligatures' }
diff --git a/tex/context/base/font-fea.mkvi b/tex/context/base/font-fea.mkvi
index 8d985b411..5f6f1b5d5 100644
--- a/tex/context/base/font-fea.mkvi
+++ b/tex/context/base/font-fea.mkvi
@@ -123,13 +123,13 @@
{\dotripleargument\font_basics_define_font_feature}
\def\font_basics_define_font_feature[#featureset][#parent][#settings]%
- {\ctxcommand{definefontfeature("#featureset","#parent","#settings")}}
+ {\clf_definefontfeature{#featureset}{#parent}{#settings}}
\unexpanded\def\fontfeatureslist
{\dodoubleargument\font_basics_features_list}
\def\font_basics_features_list[#name][#separator]% todo: arg voor type
- {\cldcommand{featurelist("#name","otf","\luaescapestring{#separator}","yes","no",true,{"number"})}}
+ {\clf_featurelist{#name}{\detokenize{#separator}}}
\attribute\zerocount\zerocount % first in list, so fast match
@@ -143,11 +143,11 @@
% hashing at this end is slower
-\unexpanded\def\addfeature {\doifnextoptionalelse\font_feature_add_yes \font_feature_add_nop }
-\unexpanded\def\subtractfeature {\doifnextoptionalelse\font_feature_subtract_yes \font_feature_subtract_nop }
-\unexpanded\def\replacefeature {\doifnextoptionalelse\font_feature_replace_yes \font_feature_replace_nop }
-\unexpanded\def\resetandaddfeature{\doifnextoptionalelse\font_feature_reset_add_yes\font_feature_reset_add_nop}
-\unexpanded\def\feature {\doifnextoptionalelse\font_feature_yes \font_feature_nop }
+\unexpanded\def\addfeature {\doifelsenextoptionalcs\font_feature_add_yes \font_feature_add_nop }
+\unexpanded\def\subtractfeature {\doifelsenextoptionalcs\font_feature_subtract_yes \font_feature_subtract_nop }
+\unexpanded\def\replacefeature {\doifelsenextoptionalcs\font_feature_replace_yes \font_feature_replace_nop }
+\unexpanded\def\resetandaddfeature{\doifelsenextoptionalcs\font_feature_reset_add_yes\font_feature_reset_add_nop}
+\unexpanded\def\feature {\doifelsenextoptionalcs\font_feature_yes \font_feature_nop }
\unexpanded\def\font_feature_add_yes [#feature]{\edef\m_font_feature_asked{#feature}\font_feature_add}
\unexpanded\def\font_feature_add_nop #feature{\edef\m_font_feature_asked{#feature}\font_feature_add}
@@ -176,7 +176,7 @@
\fi}
\unexpanded\def\font_feature_add_indeed
- {\ctxcommand{feature("+","\m_font_feature_list","\m_font_feature_asked")}%
+ {\clf_addfeature{\m_font_feature_list}{\m_font_feature_asked}%
\edef\m_font_feature_list{\m_font_feature_list+\m_font_feature_asked}% also + at the lua end
\c_font_feature_state\plusone
\let\currentfeature\m_font_feature_asked}
@@ -191,7 +191,7 @@
\fi}
\unexpanded\def\font_feature_subtract_indeed
- {\ctxcommand{feature("-","\m_font_feature_list","\m_font_feature_asked")}%
+ {\clf_subtractfeature{\m_font_feature_list}{\m_font_feature_asked}%
\edef\m_font_feature_list{\m_font_feature_list-\m_font_feature_asked}% also - at the lua end
\c_font_feature_state\minusone
\let\currentfeature\m_font_feature_asked}
@@ -206,7 +206,7 @@
\fi}
\unexpanded\def\font_feature_replace_indeed
- {\ctxcommand{feature("=","\m_font_feature_list","\m_font_feature_asked")}%
+ {\clf_replacefeature{\m_font_feature_list}{\m_font_feature_asked}%
\edef\m_font_feature_list{\m_font_feature_list=\m_font_feature_asked}% also = at the lua end
\c_font_feature_state\zerocount
\let\currentfeature\m_font_feature_asked}
@@ -220,7 +220,7 @@
{\let\m_font_feature_asked\empty
\let\currentfeature \s!current
\let\m_font_feature_list \s!current
- \ctxcommand{feature(false)}}
+ \clf_resetfeature}
\unexpanded\def\revivefeature
{\ifx\currentfeature\s!current \else
@@ -228,7 +228,7 @@
\fi}
\unexpanded\def\font_feature_revive_indeed
- {\ctxcommand{feature(true,"\m_font_feature_list")}}
+ {\clf_revivefeature{\m_font_feature_list}}
\unexpanded\def\font_feature_reset_add
{\ifnum\c_font_feature_state=\plusone
@@ -240,7 +240,7 @@
\fi}
\unexpanded\def\font_feature_reset_add_indeed
- {\ctxcommand{feature("+","\s!current","\m_font_feature_asked")}%
+ {\clf_addfeature{\s!current}{\m_font_feature_asked}%
\edef\m_font_feature_list{\s!current+\m_font_feature_asked}% also + at the lua end
\c_font_feature_state\plusone
\let\currentfeature\m_font_feature_asked}
@@ -284,12 +284,12 @@
% just for old times sake:
\unexpanded\def\featureattribute#feature%
- {\ctxcommand{featureattribute("#feature")}}
+ {\clf_featureattribute{#feature}}
\unexpanded\def\setfontfeature#feature%
{\edef\currentfeature{#feature}%
\let\m_font_feature_list\currentfeature
- \ctxcommand{setfontfeature("\currentfeature")}}
+ \clf_setfontfeature{\currentfeature}}
\let\resetfontfeature\resetfeature
@@ -324,11 +324,13 @@
% \doifelsecurrentfonthasfeature{kern}{YES}{NO}
\def\doifelsecurrentfonthasfeature#feature%
- {\ctxcommand{doifelsecurrentfonthasfeature("#feature")}}
+ {\clf_doifelsecurrentfonthasfeature{#feature}}
+
+\let\doifcurrentfonthasfeatureelse\doifelsecurrentfonthasfeature
% new:
-\ctxcommand{registerlanguagefeatures()}
+\clf_registerlanguagefeatures
% also new
diff --git a/tex/context/base/font-fil.mkvi b/tex/context/base/font-fil.mkvi
index dcb298619..1a20d1cd8 100644
--- a/tex/context/base/font-fil.mkvi
+++ b/tex/context/base/font-fil.mkvi
@@ -89,11 +89,11 @@
\def\font_basics_define_font_synonym_nop
{\expandafter\let\csname\??fontfile\m_font_name\endcsname\m_font_file
- \doifnextoptionalelse\font_basics_define_font_synonym_nop_opt\font_basics_define_font_synonym_nop_nil}
+ \doifelsenextoptionalcs\font_basics_define_font_synonym_nop_opt\font_basics_define_font_synonym_nop_nil}
\def\font_basics_define_font_synonym_yes
{\expandafter\let\csname\??fontfile\fontclass\m_font_name\endcsname\m_font_file
- \doifnextoptionalelse\font_basics_define_font_synonym_yes_opt\font_basics_define_font_synonym_yes_nil}
+ \doifelsenextoptionalcs\font_basics_define_font_synonym_yes_opt\font_basics_define_font_synonym_yes_nil}
\def\font_basics_define_font_synonym_nop_opt[#specification]%
{\let\p_features \undefined
@@ -241,6 +241,8 @@
\doubleexpandafter\secondoftwoarguments
\fi\fi}
+\let\doiffontsynonymelse\doifelsefontsynonym
+
%D \macros
%D {tracedfontname}
%D
@@ -290,75 +292,75 @@
% resolve
\def\font_helpers_set_features_yes#name%
- {\ifcsname\??fontfile\fontclass#name\s!features \endcsname\expandafter\let\expandafter\m_font_features % class + symbolic_name
- \csname\??fontfile\fontclass#name\s!features \endcsname\else
- \ifcsname\??fontfile #name\s!features \endcsname\expandafter\let\expandafter\m_font_features % symbolic_name
- \csname\??fontfile #name\s!features \endcsname\else
+ {\ifcsname\??fontfile\fontclass#name\s!features \endcsname \edef\m_font_features % class + symbolic_name
+ {\csname\??fontfile\fontclass#name\s!features \endcsname}\else
+ \ifcsname\??fontfile #name\s!features \endcsname \edef\m_font_features % symbolic_name
+ {\csname\??fontfile #name\s!features \endcsname}\else
\ifcsname\??fontfile\fontclass #name\endcsname\expandafter\font_helpers_set_features_yes % class + parent_name
- \csname\??fontfile\fontclass #name\endcsname\else
- \ifcsname\??fontfile #name\endcsname\expandafter\font_helpers_set_features_yes % parent_name
- \csname\??fontfile #name\endcsname\else
+ \csname\??fontfile\fontclass #name\endcsname \else
+ \ifcsname\??fontfile #name\endcsname \expandafter\font_helpers_set_features_yes % parent_name
+ \csname\??fontfile #name\endcsname \else
\let\m_font_features\empty\fi\fi\fi\fi}
\def\font_helpers_set_fallbacks_yes#name%
- {\ifcsname\??fontfile\fontclass#name\s!fallbacks\endcsname\expandafter\let\expandafter\m_font_fallbacks
- \csname\??fontfile\fontclass#name\s!fallbacks\endcsname\else
- \ifcsname\??fontfile #name\s!fallbacks\endcsname\expandafter\let\expandafter\m_font_fallbacks
- \csname\??fontfile #name\s!fallbacks\endcsname\else
- \ifcsname\??fontfile\fontclass #name\endcsname\expandafter\font_helpers_set_fallbacks_yes
- \csname\??fontfile\fontclass #name\endcsname\else
- \ifcsname\??fontfile #name\endcsname\expandafter\font_helpers_set_fallbacks_yes
- \csname\??fontfile #name\endcsname\else
+ {\ifcsname\??fontfile\fontclass#name\s!fallbacks\endcsname \edef\m_font_fallbacks
+ {\csname\??fontfile\fontclass#name\s!fallbacks\endcsname}\else
+ \ifcsname\??fontfile #name\s!fallbacks\endcsname \edef\m_font_fallbacks
+ {\csname\??fontfile #name\s!fallbacks\endcsname}\else
+ \ifcsname\??fontfile\fontclass #name\endcsname \expandafter\font_helpers_set_fallbacks_yes
+ \csname\??fontfile\fontclass #name\endcsname \else
+ \ifcsname\??fontfile #name\endcsname \expandafter\font_helpers_set_fallbacks_yes
+ \csname\??fontfile #name\endcsname \else
\let\m_font_fallbacks\empty\fi\fi\fi\fi}
\def\font_helpers_set_goodies_yes#name%
- {\ifcsname\??fontfile\fontclass#name\s!goodies \endcsname\expandafter\let\expandafter\m_font_goodies
- \csname\??fontfile\fontclass#name\s!goodies \endcsname\else
- \ifcsname\??fontfile #name\s!goodies \endcsname\expandafter\let\expandafter\m_font_goodies
- \csname\??fontfile #name\s!goodies \endcsname\else
- \ifcsname\??fontfile\fontclass #name\endcsname\expandafter\font_helpers_set_goodies_yes
- \csname\??fontfile\fontclass #name\endcsname\else
- \ifcsname\??fontfile #name\endcsname\expandafter\font_helpers_set_goodies_yes
- \csname\??fontfile #name\endcsname\else
+ {\ifcsname\??fontfile\fontclass#name\s!goodies \endcsname \edef\m_font_goodies
+ {\csname\??fontfile\fontclass#name\s!goodies \endcsname}\else
+ \ifcsname\??fontfile #name\s!goodies \endcsname \edef\m_font_goodies
+ {\csname\??fontfile #name\s!goodies \endcsname}\else
+ \ifcsname\??fontfile\fontclass #name\endcsname \expandafter\font_helpers_set_goodies_yes
+ \csname\??fontfile\fontclass #name\endcsname \else
+ \ifcsname\??fontfile #name\endcsname \expandafter\font_helpers_set_goodies_yes
+ \csname\??fontfile #name\endcsname \else
\let\m_font_goodies\empty\fi\fi\fi\fi}
\def\font_helpers_set_designsize_yes#name%
- {\ifcsname\??fontfile\fontclass#name\s!designsize\endcsname\expandafter\let\expandafter\m_font_designsize
- \csname\??fontfile\fontclass#name\s!designsize\endcsname\else
- \ifcsname\??fontfile #name\s!designsize\endcsname\expandafter\let\expandafter\m_font_designsize
- \csname\??fontfile #name\s!designsize\endcsname\else
- \ifcsname\??fontfile\fontclass #name\endcsname\expandafter\font_helpers_set_designsize_yes
- \csname\??fontfile\fontclass #name\endcsname\else
- \ifcsname\??fontfile #name\endcsname\expandafter\font_helpers_set_designsize_yes
- \csname\??fontfile #name\endcsname\else
+ {\ifcsname\??fontfile\fontclass#name\s!designsize\endcsname \edef\m_font_designsize
+ {\csname\??fontfile\fontclass#name\s!designsize\endcsname}\else
+ \ifcsname\??fontfile #name\s!designsize\endcsname \edef\m_font_designsize
+ {\csname\??fontfile #name\s!designsize\endcsname}\else
+ \ifcsname\??fontfile\fontclass #name\endcsname \expandafter\font_helpers_set_designsize_yes
+ \csname\??fontfile\fontclass #name\endcsname \else
+ \ifcsname\??fontfile #name\endcsname \expandafter\font_helpers_set_designsize_yes
+ \csname\??fontfile #name\endcsname \else
\let\m_font_designsize\empty\fi\fi\fi\fi}
\def\font_helpers_set_features_nop#name%
- {\ifcsname\??fontfile#name\s!features \endcsname\expandafter\let\expandafter\m_font_features
- \csname\??fontfile#name\s!features \endcsname\else
- \ifcsname\??fontfile #name\endcsname\expandafter\font_helpers_set_features_nop
- \csname\??fontfile #name\endcsname\else
+ {\ifcsname\??fontfile#name\s!features \endcsname \edef\m_font_features
+ {\csname\??fontfile#name\s!features \endcsname}\else
+ \ifcsname\??fontfile #name\endcsname \expandafter\font_helpers_set_features_nop
+ \csname\??fontfile #name\endcsname \else
\let\m_font_features\empty\fi\fi}
\def\font_helpers_set_fallbacks_nop#name%
- {\ifcsname\??fontfile#name\s!fallbacks\endcsname\expandafter\let\expandafter\m_font_fallbacks
- \csname\??fontfile#name\s!fallbacks\endcsname\else
- \ifcsname\??fontfile #name\endcsname\expandafter\font_helpers_set_fallbacks_nop
- \csname\??fontfile #name\endcsname\else
+ {\ifcsname\??fontfile#name\s!fallbacks\endcsname \edef\m_font_fallbacks
+ {\csname\??fontfile#name\s!fallbacks\endcsname}\else
+ \ifcsname\??fontfile #name\endcsname \expandafter\font_helpers_set_fallbacks_nop
+ \csname\??fontfile #name\endcsname \else
\let\m_font_fallbacks\empty\fi\fi}
\def\font_helpers_set_goodies_nop#name%
- {\ifcsname\??fontfile#name\s!goodies \endcsname\expandafter\let\expandafter\m_font_goodies
- \csname\??fontfile#name\s!goodies \endcsname\else
- \ifcsname\??fontfile #name\endcsname\expandafter\font_helpers_set_goodies_nop
- \csname\??fontfile #name\endcsname\else
+ {\ifcsname\??fontfile#name\s!goodies \endcsname \edef\m_font_goodies
+ {\csname\??fontfile#name\s!goodies \endcsname}\else
+ \ifcsname\??fontfile #name\endcsname \expandafter\font_helpers_set_goodies_nop
+ \csname\??fontfile #name\endcsname \else
\let\m_font_goodies\empty\fi\fi}
\def\font_helpers_set_designsize_nop#name%
- {\ifcsname\??fontfile#name\s!designsize\endcsname\expandafter\let\expandafter\m_font_designsize
- \csname\??fontfile#name\s!designsize\endcsname\else
- \ifcsname\??fontfile #name\endcsname\expandafter\font_helpers_set_designsize_nop
- \csname\??fontfile #name\endcsname\else
+ {\ifcsname\??fontfile#name\s!designsize\endcsname \edef\m_font_designsize
+ {\csname\??fontfile#name\s!designsize\endcsname}\else
+ \ifcsname\??fontfile #name\endcsname \expandafter\font_helpers_set_designsize_nop
+ \csname\??fontfile #name\endcsname \else
\let\m_font_designsize\empty\fi\fi}
\def\font_helpers_update_font_parameters_yes
diff --git a/tex/context/base/font-gds.lua b/tex/context/base/font-gds.lua
index 7131ecad5..073d9ed57 100644
--- a/tex/context/base/font-gds.lua
+++ b/tex/context/base/font-gds.lua
@@ -9,10 +9,12 @@ if not modules then modules = { } end modules ['font-gds'] = {
-- depends on ctx
local type, next, tonumber = type, next, tonumber
-local gmatch, format, lower, find, splitup = string.gmatch, string.format, string.lower, string.find, string.splitup
-local texsp = tex.sp
+local gmatch, lower, find, splitup = string.gmatch, string.lower, string.find, string.splitup
-local fonts, nodes, attributes, node = fonts, nodes, attributes, node
+local fonts = fonts
+local nodes = nodes
+local attributes = attributes
+local node = node
local trace_goodies = false trackers.register("fonts.goodies", function(v) trace_goodies = v end)
local report_goodies = logs.reporter("fonts","goodies")
@@ -20,6 +22,11 @@ local report_goodies = logs.reporter("fonts","goodies")
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
+local implement = interfaces.implement
+
+local texsp = tex.sp
+local formatters = string.formatters
+
local otf = fonts.handlers.otf
local afm = fonts.handlers.afm
local tfm = fonts.handlers.tfm
@@ -46,7 +53,12 @@ local findfile = resolvers.findfile
local glyph_code = nodes.nodecodes.glyph
-local traverse_id = nodes.traverse_id
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+local getattr = nuts.getattr
+local traverse_id = nuts.traverse_id
function fontgoodies.report(what,trace,goodies)
if trace_goodies or trace then
@@ -273,7 +285,7 @@ local function setcolorscheme(tfmdata,scheme)
end
elseif type(name) == "number" then
reverse[name] = i
- elseif find(name,":") then
+ elseif find(name,":",1,true) then
local start, stop = splitup(name,":")
start = tonumber(start)
stop = tonumber(stop)
@@ -311,16 +323,16 @@ local setnodecolor = nodes.tracers.colors.set
-- function colorschemes.coloring(head)
-- local lastfont, lastscheme
-- local done = false
--- for n in traverse_id(glyph_code,head) do
--- local a = n[a_colorscheme]
+-- for n in traverse_id(glyph_code,tonut(head)) do
+-- local a = getattr(n,a_colorscheme)
-- if a then
--- local f = n.font
+-- local f = getfont(n)
-- if f ~= lastfont then
-- lastscheme = fontproperties[f].colorscheme
-- lastfont = f
-- end
-- if lastscheme then
--- local sc = lastscheme[n.char]
+-- local sc = lastscheme[getchar(n)]
-- if sc then
-- done = true
-- setnodecolor(n,"colorscheme:"..a..":"..sc) -- slow
@@ -338,21 +350,21 @@ local setnodecolor = nodes.tracers.colors.set
-- local lastattr = nil
-- local lastscheme = nil
-- local lastprefix = nil
--- local done = nil
--- for n in traverse_id(glyph_code,head) do
--- local a = n[a_colorscheme]
+-- local done = nil
+-- for n in traverse_id(glyph_code,tonut(head)) do
+-- local a = getattr(n,a_colorscheme)
-- if a then
-- if a ~= lastattr then
-- lastattr = a
-- lastprefix = "colorscheme:" .. a .. ":"
-- end
--- local f = n.font
+-- local f = getfont(n)
-- if f ~= lastfont then
-- lastfont = f
-- lastscheme = fontproperties[f].colorscheme
-- end
-- if lastscheme then
--- local sc = lastscheme[n.char]
+-- local sc = lastscheme[getchar(n)]
-- if sc then
-- setnodecolor(n,lastprefix .. sc) -- slow
-- done = true
@@ -384,10 +396,10 @@ function colorschemes.coloring(head)
local lastcache = nil
local lastscheme = nil
local done = nil
- for n in traverse_id(glyph_code,head) do
- local a = n[a_colorscheme]
+ for n in traverse_id(glyph_code,tonut(head)) do
+ local a = getattr(n,a_colorscheme)
if a then
- local f = n.font
+ local f = getfont(n)
if f ~= lastfont then
lastfont = f
lastscheme = fontproperties[f].colorscheme
@@ -397,7 +409,7 @@ function colorschemes.coloring(head)
lastcache = cache[a]
end
if lastscheme then
- local sc = lastscheme[n.char]
+ local sc = lastscheme[getchar(n)]
if sc then
setnodecolor(n,lastcache[sc]) -- we could inline this one
done = true
@@ -424,7 +436,7 @@ local function setextrafeatures(tfmdata)
addotffeature(tfmdata.shared.rawdata,feature,specification)
registerotffeature {
name = feature,
- description = format("extra: %s",feature)
+ description = formatters["extra: %s"](feature)
}
end
end
@@ -814,8 +826,17 @@ end
-- interface
-commands.loadfontgoodies = fontgoodies.load
-commands.enablefontcolorschemes = colorschemes.enable
+implement {
+ name = "loadfontgoodies",
+ actions = fontgoodies.load,
+ arguments = "string"
+}
+
+implement {
+ name = "enablefontcolorschemes",
+ onlyonce = true,
+ actions = colorschemes.enable
+}
-- weird place ... depends on math
@@ -848,7 +869,7 @@ local function setkeepligatures(tfmdata,value)
if letterspacing then
local keptligatures = letterspacing.keptligatures
if keptligatures then
- local unicodes = tfmdata.resources.unicodes
+ local unicodes = tfmdata.resources.unicodes -- so we accept names
local hash = { }
for k, v in next, keptligatures do
local u = unicodes[k]
diff --git a/tex/context/base/font-gds.mkvi b/tex/context/base/font-gds.mkvi
index 6d9798428..e9c34e807 100644
--- a/tex/context/base/font-gds.mkvi
+++ b/tex/context/base/font-gds.mkvi
@@ -46,10 +46,10 @@
\definesystemattribute[colorscheme][public]
\unexpanded\def\loadfontgoodies[#filename]%
- {\ctxcommand{loadfontgoodies("#filename")}}
+ {\clf_loadfontgoodies{#filename}}
\unexpanded\def\setfontcolorscheme % will move to the lua end
- {\ctxcommand{enablefontcolorschemes()}%
+ {\clf_enablefontcolorschemes
\unexpanded\xdef\setfontcolorscheme[##1]{\attribute\colorschemeattribute##1\relax}%
\setfontcolorscheme}
diff --git a/tex/context/base/font-hsh.lua b/tex/context/base/font-hsh.lua
index 1b0dd08b8..07acf2138 100644
--- a/tex/context/base/font-hsh.lua
+++ b/tex/context/base/font-hsh.lua
@@ -35,6 +35,8 @@ local italics = hashes.italics or allocate()
local lastmathids = hashes.lastmathids or allocate()
local dynamics = hashes.dynamics or allocate()
local unicodes = hashes.unicodes or allocate()
+local originals = hashes.originals or allocate()
+local modes = hashes.modes or allocate()
hashes.characters = characters
hashes.descriptions = descriptions
@@ -52,6 +54,8 @@ hashes.italics = italics
hashes.lastmathids = lastmathids
hashes.dynamics = dynamics
hashes.unicodes = unicodes
+hashes.originals = originals
+hashes.modes = modes
local nodepool = nodes.pool
local dummyglyph = nodepool.register(nodepool.glyph())
@@ -261,21 +265,41 @@ setmetatableindex(dynamics, function(t,k)
end
end)
-setmetatableindex(unicodes, function(t,k)
+setmetatableindex(unicodes, function(t,k) -- always a unicode
+ if k == true then
+ return unicodes[currentfont()]
+ else
+ local resources = resources[k]
+ local unicodes = resources and resources.unicodes or { }
+ t[k] = unicodes
+ return unicodes
+ end
+end)
+
+setmetatableindex(originals, function(t,k) -- always a unicode
if k == true then
return originals[currentfont()]
else
- local resources = resources[k]
- local originals = resources and resources.originals or { }
- local characters = characters[k]
- local unicodes = { }
- setmetatableindex(unicodes,function(t,k)
- local v = originals[characters[k].index] or k
- t[k] = v
+ local resolved = { }
+ setmetatableindex(resolved,function(t,name)
+ local u = unicodes[k][name]
+ local d = u and descriptions[k][u]
+ local v = d and d.unicode or u or 0 -- so we return notdef (at least for the moment)
+ t[name] = u
return v
end)
- t[k] = unicodes
- return unicodes
+ t[k] = resolved
+ return resolved
+ end
+end)
+
+setmetatableindex(modes, function(t,k)
+ if k == true then
+ return modes[currentfont()]
+ else
+ local mode = properties[k].mode or "base"
+ t[k] = mode
+ return mode
end
end)
diff --git a/tex/context/base/font-ini.lua b/tex/context/base/font-ini.lua
index 884b22474..c547f89ac 100644
--- a/tex/context/base/font-ini.lua
+++ b/tex/context/base/font-ini.lua
@@ -29,4 +29,4 @@ fonts.readers = { }
fonts.definers = { methods = { } }
fonts.loggers = { register = function() end }
-fontloader.totable = fontloader.to_table
+fontloader.totable = fontloader.to_table -- not used
diff --git a/tex/context/base/font-ini.mkvi b/tex/context/base/font-ini.mkvi
index 521901e05..c04952a9e 100644
--- a/tex/context/base/font-ini.mkvi
+++ b/tex/context/base/font-ini.mkvi
@@ -363,7 +363,7 @@
\let\thedefinedfont\relax % not to be confused with \everydefinefont
\unexpanded\def\definedfont
- {\doifnextoptionalelse\font_basics_defined_font_yes\font_basics_defined_font_nop}
+ {\doifelsenextoptionalcs\font_basics_defined_font_yes\font_basics_defined_font_nop}
\def\font_basics_defined_font_yes[#specification]%
{\c_font_feature_inheritance_mode\c_font_feature_inheritance_fontonly
@@ -419,24 +419,39 @@
%D When we assign for instance 12pt to a \DIMENSION\ register the \type
%D {\the}'d value comes out as 12.0pt, which is often not the way users
%D specify the bodyfont size. Therefore we use normalized values. They
-%D are caches to save overhead in \LUA\ calls.
+%D are cached to save overhead in \LUA\ calls.
-\setnewconstant\fontdigits\plustwo
+% \setnewconstant\fontdigits\plustwo % from now on always 2
\installcorenamespace{fontnormalizedbody}
+% \def\normalizebodyfontsize#macro#body%
+% {\expandafter\let\expandafter#macro\csname\??fontnormalizedbody\number\fontdigits:\number\dimexpr#body\endcsname
+% \ifx#macro\relax
+% \normalizebodyfontsize_indeed#macro{#body}%
+% \fi}
+%
+% \def\normalizebodyfontsize_indeed#macro#body%
+% {\edef#macro{\ctxcommand{nbfs(\number\dimexpr#body,\number\fontdigits)}}%
+% \global\expandafter\let\csname\??fontnormalizedbody\number\fontdigits:\number\dimexpr#body\endcsname#macro}
+%
+% \def\thenormalizedbodyfontsize#body%
+% {\ctxcommand{nbfs(\number\dimexpr#body\relax,\number\fontdigits)}}
+%
+% caching is less relevant now
+
\def\normalizebodyfontsize#macro#body%
- {\expandafter\let\expandafter#macro\csname\??fontnormalizedbody\number\fontdigits:\number\dimexpr#body\endcsname
+ {\expandafter\let\expandafter#macro\csname\??fontnormalizedbody\number\dimexpr#body\endcsname
\ifx#macro\relax
\normalizebodyfontsize_indeed#macro{#body}%
\fi}
\def\normalizebodyfontsize_indeed#macro#body%
- {\edef#macro{\ctxcommand{nbfs(\number\dimexpr#body,\number\fontdigits)}}%
- \global\expandafter\let\csname\??fontnormalizedbody\number\fontdigits:\number\dimexpr#body\endcsname#macro}
+ {\edef#macro{\clf_nbfs\dimexpr#body\relax}%
+ \global\expandafter\let\csname\??fontnormalizedbody\number\dimexpr#body\endcsname#macro}
\def\thenormalizedbodyfontsize#body%
- {\ctxcommand{nbfs(\number\dimexpr#body\relax,\number\fontdigits)}}
+ {\clf_nbfs\dimexpr#body\relax}
\edef\normalizedglobalbodyfontsize{\thenormalizedbodyfontsize\bodyfontsize}
\edef\normalizedlocalbodyfontsize {\thenormalizedbodyfontsize\bodyfontsize}
@@ -723,7 +738,9 @@
\unexpanded\def\font_helpers_low_level_define#specification#csname%
{% we can now set more at the lua end
- \ctxcommand{definefont_one(\!!bs\luaescapestring{#specification}\!!es)}% the escapestring catches at \somedimen
+ \global\let\somefontname\defaultfontfile
+ \let\somefontsize\empty
+ \clf_definefont_one{\detokenize\expandafter{\normalexpanded{#specification}}}% the escapestring catches at \somedimen
% sets \scaledfontmode and \somefontname and \somefontsize
\ifcase\fontface\relax
% \let\v_font_size_absolute\textface % fontbody
@@ -774,39 +791,43 @@
\else
\d_font_scaled_text_face\textface
\fi
- \edef\somefontspec{at \number\d_font_scaled_font_size sp}% probably no longer used, needs checking
+ \edef\somefontspec{at \number\d_font_scaled_font_size sp}%
\edef\somefontfile{\truefontname\somefontname}%
\ifx\somefontfile\s!unknown
\edef\somefontfile{\defaultfontfile}%
\fi
- \ifx\somefontfile\s!unknown
- \edef\somefontfile{\defaultfontfile}%
- \fi
\font_helpers_update_font_parameters
\font_helpers_update_font_class_parameters
% \writestatus{fonts}{low level define: #csname/\somefontfile/\number\d_font_scaled_font_size/\fontface/\number\d_font_scaled_text_face}%
- \ctxcommand{definefont_two(
- \ifx\fontclass\empty false\else true\fi,
- "#csname",
- \!!bs\somefontfile\!!es,
- \number\d_font_scaled_font_size,
- \number\c_font_feature_inheritance_mode,
- "\m_font_class_features",
- "\m_font_features",
- "\m_font_class_fallbacks",
- "\m_font_fallbacks",
- \fontface, % 1/2/3: text script scriptscript 0/4/5: body x xx
- \number\d_font_scaled_text_face,
- "\number\relativefontid", % experiment
- "\m_font_class_goodies", % experiment (not yet used)
- "\m_font_goodies",
- "\m_font_class_designsize",
- "\m_font_designsize",
- \number\scaledfontmode
- )}%
- \edef\somefontspec{at \number\scaledfontsize sp}% we need the resolved designsize (for fallbacks)
- \expandafter\let\expandafter\lastrawfontcall\csname#csname\endcsname
- \the\everydefinefont
+ \clf_definefont_two
+ \ifx\fontclass\empty\s!false\else\s!true\fi
+ {#csname}%
+ {\somefontfile}%
+ \d_font_scaled_font_size
+ \c_font_feature_inheritance_mode
+ {\m_font_class_features}%
+ {\m_font_features}%
+ {\m_font_class_fallbacks}%
+ {\m_font_fallbacks}%
+ \fontface
+ \d_font_scaled_text_face
+ \relativefontid
+ {\m_font_class_goodies}%
+ {\m_font_goodies}%
+ {\m_font_class_designsize}%
+ {\m_font_designsize}%
+ \scaledfontmode
+ \relax
+ \ifcase\scaledfontsize
+ %\scaledfontsize\plusone
+ \let\somefontspec\empty
+ \let\lastrawfontcall\relax
+ \expandafter\let\csname#csname\endcsname\relax
+ \else
+ \edef\somefontspec{at \number\scaledfontsize sp}% we need the resolved designsize (for fallbacks)
+ \expandafter\let\expandafter\lastrawfontcall\csname#csname\endcsname
+ \the\everydefinefont
+ \fi
\c_font_feature_inheritance_mode\c_font_feature_inheritance_default}
\def\font_helpers_check_body_scale#fontsize% gets character (x xx a etc)
@@ -830,10 +851,10 @@
%D The following macros are used at the \LUA\ end. Watch the \type {\normal}
%D hackery: this makes the mkvi parser happy.
-\normaldef\fntsetdefname {\global\let\somefontname\defaultfontfile} % do before calling
-\normaldef\fntsetsomename{\normalgdef\somefontname} % takes argument
-\normaldef\fntsetnopsize {\let\somefontsize\empty} % do before calling
-\normaldef\fntsetsomesize{\normaldef\somefontsize} % takes argument
+% \normaldef\fntsetdefname {\global\let\somefontname\defaultfontfile} % do before calling
+% \normaldef\fntsetnopsize {\let\somefontsize\empty} % do before calling
+% \normaldef\fntsetsomename{\normalgdef\somefontname} % takes argument
+% \normaldef\fntsetsomesize{\normaldef\somefontsize} % takes argument
\newif\ifskipfontcharacteristics \skipfontcharacteristicstrue
@@ -878,7 +899,7 @@
\fi}
\def\font_basics_define_font_a[#name][#specification][#settings]% [name][spec][1.6 | line=10pt | setup_id]
- {\doifsetupselse{#settings}%
+ {\doifelsesetups{#settings}%
{\setuvalue{#name}{\font_basics_define_font_with_setups {#name}{#specification}{#settings}}}
{\setuvalue{#name}{\font_basics_define_font_with_parameters{#name}{#specification}{#settings}}}}
@@ -900,11 +921,19 @@
\unexpanded\def\definefrozenfont
{\dotripleempty\font_basics_define_frozen_font}
+% \def\font_basics_define_frozen_font[#name][#specification][#settings]%
+% {\begingroup
+% \font_basics_define_font[#name][#specification][#settings]%
+% \csname#name\endcsname
+% \expandafter\expandafter\expandafter\endgroup\expandafter\let\csname#name\endcsname\lastrawfontcall}
+
\def\font_basics_define_frozen_font[#name][#specification][#settings]%
{\begingroup
\font_basics_define_font[#name][#specification][#settings]%
\csname#name\endcsname
- \expandafter\endgroup\expandafter\let\csname#name\endcsname\lastrawfontcall}
+ \global\let\lastglobalrawfontcall\lastrawfontcall
+ \endgroup
+ \expandafter\let\csname#name\endcsname\lastglobalrawfontcall}
%D The instance namespace protection makes the switch local so that we can redefine a
%D logical name and/or change the size in between.
@@ -1158,7 +1187,9 @@
\installcorenamespace{fontenvironmentknown}
-\let\bodyfontenvironmentlist\empty % used in font-run (might change)
+% \let\bodyfontenvironmentlist\empty % used in font-run (might change)
+
+\newtoks\bodyfontenvironmentlist
\def\font_helpers_register_environment#class#body%
{\expandafter\let\csname\??fontenvironmentknown#class#body\endcsname\empty}
@@ -1179,6 +1210,12 @@
%D a bodyfont is loaded but changing them afterwards can be sort of tricky as
%D values are not consulted afterwards.
+\def\processbodyfontenvironmentlist#1% no \unexpanded as then we cannot use it in alignments
+ {\clf_processbodyfontsizes{\strippedcsname#1}}
+
+\def\bodyfontenvironmentlist
+ {\clf_getbodyfontsizes}
+
\def\font_basics_define_body_font_environment_class[#class][#body][#settings]%
{\edef\m_font_body{#body}%
\ifx\m_font_body\s!default
@@ -1187,7 +1224,8 @@
\else
\normalizebodyfontsize\m_font_body_normalized\m_font_body
\font_basics_define_body_font_environment_size[#class][\m_font_body_normalized][#settings]%
- \addtocommalist\m_font_body_normalized\bodyfontenvironmentlist
+ %\addtocommalist\m_font_body_normalized\bodyfontenvironmentlist
+ \clf_registerbodyfontsize{\m_font_body_normalized}%
\fi}
%D The empty case uses the same code but needs to ignore the current class
@@ -1367,19 +1405,19 @@
{\font_basics_define_body_font_a[bwhatever][\s!rm][]}
\unexpanded\def\font_basics_define_body_font_a[#whatever]%
- {\doifnumberelse{#whatever}%
+ {\doifelsenumber{#whatever}%
\font_basics_define_body_font_body
\font_basics_define_body_font_name
[#whatever]}
\unexpanded\def\font_basics_define_body_font_body[#body][#style][#specification]%
- {\doifassignmentelse{#specification}
+ {\doifelseassignment{#specification}
\font_basics_define_body_font_body_assignment
\font_basics_define_body_font_body_identifier
[#body][#style][#specification]}%
\unexpanded\def\font_basics_define_body_font_name[#name][#style][#specification]%
- {\doifassignmentelse{#specification}
+ {\doifelseassignment{#specification}
\font_basics_define_body_font_name_assignment
\font_basics_define_body_font_name_identifier
[#name][#style][#specification]}%
@@ -1640,13 +1678,13 @@
\fi}
\def\font_helpers_set_font_check_size#option%
- {\doifnumberelse{#option}{\font_helpers_check_bodyfont_environment{#option}{#option}}\donothing}
+ {\doifelsenumber{#option}{\font_helpers_check_bodyfont_environment{#option}{#option}}\donothing}
\def\font_helpers_set_font_set_font#method#option% method=1: set, method=2: switch
{\doifsomething{#option}{\font_helpers_set_font_set_font_option{#method}{#option}}}
\def\font_helpers_set_font_set_font_option#method#option%
- {\doifnumberelse{#option}%
+ {\doifelsenumber{#option}%
\font_helpers_set_font_set_font_option_body
\font_helpers_set_font_set_font_option_keyword
{#method}{#option}{#option}}
@@ -1988,8 +2026,29 @@
%D takes a bit more time. Keep in mind that the fontsize is
%D represented by a character or empty.
-\unexpanded\def\tx {\font_helpers_set_current_font_x_alternative \fontalternative}
-\unexpanded\def\txx{\font_helpers_set_current_font_xx_alternative\fontalternative}
+% \unexpanded\def\tx {\font_helpers_set_current_font_x_alternative \fontalternative}
+% \unexpanded\def\txx{\font_helpers_set_current_font_xx_alternative\fontalternative}
+
+\unexpanded\def\tx
+ {\ifmmode
+ \scriptstyle
+ \else
+ \let\fontface\!!plusfour
+ \let\fontalternative\fontalternative
+ \font_helpers_synchronize_font
+ \fi
+ \currentxfontsize\plusone
+ \let\tx\txx}
+
+\unexpanded\def\txx
+ {\ifmmode
+ \scriptscriptstyle
+ \else
+ \let\fontface\!!plusfive
+ \let\fontalternative\fontalternative
+ \font_helpers_synchronize_font
+ \fi
+ \currentxfontsize\plustwo}
\let\normaltx \tx
\let\normaltxx\txx
@@ -2082,7 +2141,7 @@
% \newtoks \everyswitchtobodyfont
\unexpanded\def\setupbodyfont
- {\doifnextoptionalelse\font_basics_setupbodyfont_yes\font_basics_setupbodyfont_nop}
+ {\doifelsenextoptionalcs\font_basics_setupbodyfont_yes\font_basics_setupbodyfont_nop}
\def\font_basics_setupbodyfont_nop
{\restoreglobalbodyfont
@@ -2114,6 +2173,13 @@
\unexpanded\def\switchtobodyfont[#specification]% could become an ifx
{\doifsomething{#specification}{\font_basics_switchtobodyfont{#specification}}}
+
+\unexpanded\def\usebodyfontparameter#1%
+ {\edef\m_font_bodyfont_asked{#1\c!bodyfont}%
+ \ifx\m_font_bodyfont_asked\empty\else
+ \font_basics_switchtobodyfont\m_font_bodyfont_asked
+ \fi}
+
\def\font_helpers_switch_bodyfont_step
{\font_basics_switch_points\m_font_step
\font_basics_switch_style \fontstyle}
@@ -2169,13 +2235,28 @@
%D Handy for manuals:
-\unexpanded\def\fontchar#character%
- {\ctxcommand{fontchar("#character")}}
-%D The next auxilliary macro is an alternative to \type
-%D {\fontname}.
+%D The \type {\tochar} commmand takes a specification:
+%D
+%D \starttabulate[|l|l|l|]
+%D \NC e \NC entity \NC e:eacute \NC \NR
+%D \NC x \NC hexadecimal unicode \NC x:013D \NC \NR
+%D \NC d \NC decimal unicode \NC d:123 \NC \NR
+%D \NC s \NC hexadecimal index (slot) \NC s:210D \NC \NR
+%D \NC i \NC decimal index \NC i:456 \NC \NR
+%D \NC n \NC name \NC n:eight \NC \NR
+%D \NC c \NC name \NC c:x \NC \NR
+%D \stoptabulate
+%D
+%D This is an expandable command!
+
+\unexpanded\def\fontchar #character{\clf_fontchar{#character}}
+\unexpanded\def\fontcharbyindex #index{\clf_fontcharbyindex#index\relax}
+ \def\tochar #specifications{\clf_tochar{#specifications}} % expanded (also used in edef)
+
+%D The next auxilliary macro is an alternative to \type {\fontname}.
-\def\purefontname#font{\ctxlua{file.basename("\fontname#font"}} % will be function using id
+\def\purefontname#font{\clf_purefontname{\fontname#font}}
%D \macros
%D {switchstyleonly}
@@ -2190,7 +2271,7 @@
%D \stoptyping
\unexpanded\def\switchstyleonly
- {\doifnextoptionalelse\font_basics_switch_style_only_opt\font_basics_switch_style_only_arg}
+ {\doifelsenextoptionalcs\font_basics_switch_style_only_opt\font_basics_switch_style_only_arg}
\def\font_basics_switch_style_only_arg#name% stupid version
{\font_helpers_set_current_font_style{\csname\??fontshortstyle\checkedstrippedcsname#name\endcsname}%
@@ -2348,6 +2429,83 @@
\def\saveddefinedfontid {\number\fontid\font}
\def\saveddefinedfontname{\fontname\font}
+% yes or no:
+% \let\font_basics_check_text_bodyfont_slow\font_basics_check_text_bodyfont
+%
+% \unexpanded\def\font_basics_check_text_bodyfont
+% {\ifproductionrun
+% % not per se \s!..'s
+% \glet\font_basics_check_text_bodyfont \font_basics_check_text_bodyfont_slow
+% \glet\font_basics_check_text_bodyfont_fast\relax
+% \expandafter\font_basics_check_text_bodyfont
+% \else
+% \expandafter\font_basics_check_text_bodyfont_fast
+% \fi}
+%
+% \def\font_basics_check_text_bodyfont_fast#style#alternative#size% size can be empty (checking needed as \bf is already defined)
+% {\setugvalue{#style#size}% \rma
+% {\let\fontstyle#style%
+% \let\fontsize #size%
+% \font_helpers_check_big_math_synchronization % double? better in everymath?
+% \font_helpers_synchronize_font}%
+% \setugvalue{#alternative#size}% \sla
+% {\let\fontalternative#alternative%
+% \let\fontsize #size%
+% \font_helpers_check_big_math_synchronization % double? better in everymath?
+% \font_helpers_synchronize_font}%
+% \setugvalue{#style#alternative#size}% \rmsla
+% {\let\fontstyle #style%
+% \let\fontalternative#alternative%
+% \let\fontsize #size%
+% \font_helpers_check_big_math_synchronization % double? better in everymath?
+% \font_helpers_synchronize_font}%
+% \ifcsname\s!normal#style\endcsname % text/math check
+% \expandafter\let\csname#style\expandafter\endcsname\csname\s!normal#style\endcsname
+% \else
+% \setugvalue{#style}% \rm
+% {\let\fontstyle#style%
+% \font_typescripts_inherit_check\fontstyle
+% \ifmmode\mr\fi % otherwise \rm not downward compatible ... not adapted yet
+% \font_helpers_synchronize_font}%
+% \fi
+% \ifcsname\s!normal#alternative\endcsname % text/math check
+% \expandafter\let\csname#alternative\expandafter\endcsname\csname\s!normal#alternative\endcsname
+% \else
+% \setugvalue{#alternative}% \sl
+% {\let\fontalternative#alternative%
+% \font_helpers_synchronize_font}%
+% \fi
+% \setugvalue{#style\s!x}% \rmx
+% {\csname#style\endcsname\tx}%
+% \setugvalue{#style\s!xx}% \rmxx
+% {\csname#style\endcsname\txx}%
+% \setugvalue{#alternative\s!x}% \slx
+% {\font_helpers_check_nested_x_fontsize
+% \ifmmode
+% \scriptstyle
+% \else
+% \let\fontface\!!plusfour
+% \let\fontalternative#alternative%
+% \font_helpers_synchronize_font
+% \fi
+% \currentxfontsize\plusone
+% \let\tx\txx}%
+% \setugvalue{#alternative\s!xx}% \slxx
+% {\font_helpers_check_nested_x_fontsize
+% \ifmmode
+% \scriptscriptstyle
+% \else
+% \let\fontface\!!plusfive
+% \let\fontalternative#alternative%
+% \font_helpers_synchronize_font
+% \fi
+% \currentxfontsize\plustwo
+% \let\tx\empty
+% \let\txx\empty}%
+% \setugvalue{#style#alternative}% \rmsl
+% {\let\fontstyle #style%
+% \let\fontalternative#alternative%
+% \font_helpers_synchronize_font}}
\protect \endinput
diff --git a/tex/context/base/font-inj.lua b/tex/context/base/font-inj.lua
new file mode 100644
index 000000000..332e92033
--- /dev/null
+++ b/tex/context/base/font-inj.lua
@@ -0,0 +1,1055 @@
+if not modules then modules = { } end modules ['font-inj'] = {
+ version = 1.001,
+ comment = "companion to font-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- This property based variant is not faster but looks nicer than the attribute one. We
+-- need to use rawget (which is apbout 4 times slower than a direct access but we cannot
+-- get/set that one for our purpose!
+
+if not nodes.properties then return end
+
+local next, rawget = next, rawget
+local utfchar = utf.char
+local fastcopy = table.fastcopy
+
+local trace_injections = false trackers.register("fonts.injections", function(v) trace_injections = v end)
+
+local report_injections = logs.reporter("fonts","injections")
+
+local attributes, nodes, node = attributes, nodes, node
+
+fonts = fonts
+local fontdata = fonts.hashes.identifiers
+
+nodes.injections = nodes.injections or { }
+local injections = nodes.injections
+
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local kern_code = nodecodes.kern
+
+local nuts = nodes.nuts
+local nodepool = nuts.pool
+
+local newkern = nodepool.kern
+
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+
+local traverse_id = nuts.traverse_id
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local find_tail = nuts.tail
+
+local properties = nodes.properties.data
+
+function injections.installnewkern(nk)
+ newkern = nk or newkern
+end
+
+local nofregisteredkerns = 0
+local nofregisteredpairs = 0
+local nofregisteredmarks = 0
+local nofregisteredcursives = 0
+----- markanchors = { } -- one base can have more marks
+local keepregisteredcounts = false
+
+function injections.keepcounts()
+ keepregisteredcounts = true
+end
+
+function injections.resetcounts()
+ nofregisteredkerns = 0
+ nofregisteredpairs = 0
+ nofregisteredmarks = 0
+ nofregisteredcursives = 0
+ keepregisteredcounts = false
+end
+
+-- We need to make sure that a possible metatable will not kick in
+-- unexpectedly.
+
+function injections.reset(n)
+ local p = rawget(properties,n)
+ if p and rawget(p,"injections") then
+ p.injections = nil
+ end
+end
+
+function injections.copy(target,source)
+ local sp = rawget(properties,source)
+ if sp then
+ local tp = rawget(properties,target)
+ local si = rawget(sp,"injections")
+ if si then
+ si = fastcopy(si)
+ if tp then
+ tp.injections = si
+ else
+ propertydata[target] = {
+ injections = si,
+ }
+ end
+ else
+ if tp then
+ tp.injections = nil
+ end
+ end
+ end
+end
+
+function injections.setligaindex(n,index)
+ local p = rawget(properties,n)
+ if p then
+ local i = rawget(p,"injections")
+ if i then
+ i.ligaindex = index
+ else
+ p.injections = {
+ ligaindex = index
+ }
+ end
+ else
+ properties[n] = {
+ injections = {
+ ligaindex = index
+ }
+ }
+ end
+end
+
+function injections.getligaindex(n,default)
+ local p = rawget(properties,n)
+ if p then
+ local i = rawget(p,"injections")
+ if i then
+ return i.ligaindex or default
+ end
+ end
+ return default
+end
+
+function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext) -- hm: nuts or nodes
+ local dx = factor*(exit[1]-entry[1])
+ local dy = -factor*(exit[2]-entry[2])
+ local ws, wn = tfmstart.width, tfmnext.width
+ nofregisteredcursives = nofregisteredcursives + 1
+ if rlmode < 0 then
+ dx = -(dx + wn)
+ else
+ dx = dx - ws
+ end
+ --
+ local p = rawget(properties,start)
+ if p then
+ local i = rawget(p,"injections")
+ if i then
+ i.cursiveanchor = true
+ else
+ p.injections = {
+ cursiveanchor = true,
+ }
+ end
+ else
+ properties[start] = {
+ injections = {
+ cursiveanchor = true,
+ },
+ }
+ end
+ local p = rawget(properties,nxt)
+ if p then
+ local i = rawget(p,"injections")
+ if i then
+ i.cursivex = dx
+ i.cursivey = dy
+ else
+ p.injections = {
+ cursivex = dx,
+ cursivey = dy,
+ }
+ end
+ else
+ properties[nxt] = {
+ injections = {
+ cursivex = dx,
+ cursivey = dy,
+ },
+ }
+ end
+ return dx, dy, nofregisteredcursives
+end
+
+function injections.setpair(current,factor,rlmode,r2lflag,spec,injection) -- r2lflag & tfmchr not used
+ local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4]
+ if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then -- okay?
+ local yoffset = y - h
+ local leftkern = x -- both kerns are set in a pair kern compared
+ local rightkern = w - x -- to normal kerns where we set only leftkern
+ if leftkern ~= 0 or rightkern ~= 0 or yoffset ~= 0 then
+ nofregisteredpairs = nofregisteredpairs + 1
+ if rlmode and rlmode < 0 then
+ leftkern, rightkern = rightkern, leftkern
+ end
+ local p = rawget(properties,current)
+ if p then
+ local i = rawget(p,"injections")
+ if i then
+ if leftkern ~= 0 then
+ i.leftkern = (i.leftkern or 0) + leftkern
+ end
+ if rightkern ~= 0 then
+ i.rightkern = (i.rightkern or 0) + rightkern
+ end
+ if yoffset ~= 0 then
+ i.yoffset = (i.yoffset or 0) + yoffset
+ end
+ elseif leftkern ~= 0 or rightkern ~= 0 then
+ p.injections = {
+ leftkern = leftkern,
+ rightkern = rightkern,
+ yoffset = yoffset,
+ }
+ else
+ p.injections = {
+ yoffset = yoffset,
+ }
+ end
+ elseif leftkern ~= 0 or rightkern ~= 0 then
+ properties[current] = {
+ injections = {
+ leftkern = leftkern,
+ rightkern = rightkern,
+ yoffset = yoffset,
+ },
+ }
+ else
+ properties[current] = {
+ injections = {
+ yoffset = yoffset,
+ },
+ }
+ end
+ return x, y, w, h, nofregisteredpairs
+ end
+ end
+ return x, y, w, h -- no bound
+end
+
+-- this needs checking for rl < 0 but it is unlikely that a r2l script
+-- uses kernclasses between glyphs so we're probably safe (KE has a
+-- problematic font where marks interfere with rl < 0 in the previous
+-- case)
+
+function injections.setkern(current,factor,rlmode,x,injection)
+ local dx = factor * x
+ if dx ~= 0 then
+ nofregisteredkerns = nofregisteredkerns + 1
+ local p = rawget(properties,current)
+ if not injection then
+ injection = "injections"
+ end
+ if p then
+ local i = rawget(p,injection)
+ if i then
+ i.leftkern = dx + (i.leftkern or 0)
+ else
+ p[injection] = {
+ leftkern = dx,
+ }
+ end
+ else
+ properties[current] = {
+ [injection] = {
+ leftkern = dx,
+ },
+ }
+ end
+ return dx, nofregisteredkerns
+ else
+ return 0, 0
+ end
+end
+
+function injections.setmark(start,base,factor,rlmode,ba,ma,tfmbase) -- ba=baseanchor, ma=markanchor
+ local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2])
+ nofregisteredmarks = nofregisteredmarks + 1
+ -- markanchors[nofregisteredmarks] = base
+ if rlmode >= 0 then
+ dx = tfmbase.width - dx -- see later commented ox
+ end
+ local p = rawget(properties,start)
+ if p then
+ local i = rawget(p,"injections")
+ if i then
+ i.markx = dx
+ i.marky = dy
+ i.markdir = rlmode or 0
+ i.markbase = nofregisteredmarks
+ i.markbasenode = base
+ else
+ p.injections = {
+ markx = dx,
+ marky = dy,
+ markdir = rlmode or 0,
+ markbase = nofregisteredmarks,
+ markbasenode = base,
+ }
+ end
+ else
+ properties[start] = {
+ injections = {
+ markx = dx,
+ marky = dy,
+ markdir = rlmode or 0,
+ markbase = nofregisteredmarks,
+ markbasenode = base,
+ },
+ }
+ end
+ return dx, dy, nofregisteredmarks
+end
+
+local function dir(n)
+ return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
+end
+
+local function showchar(n,nested)
+ local char = getchar(n)
+ report_injections("%wfont %s, char %U, glyph %c",nested and 2 or 0,getfont(n),char,char)
+end
+
+local function show(n,what,nested,symbol)
+ if n then
+ local p = rawget(properties,n)
+ if p then
+ local i = rawget(p,what)
+ if i then
+ local leftkern = i.leftkern or 0
+ local rightkern = i.rightkern or 0
+ local yoffset = i.yoffset or 0
+ local markx = i.markx or 0
+ local marky = i.marky or 0
+ local markdir = i.markdir or 0
+ local markbase = i.markbase or 0 -- will be markbasenode
+ local cursivex = i.cursivex or 0
+ local cursivey = i.cursivey or 0
+ local ligaindex = i.ligaindex or 0
+ local margin = nested and 4 or 2
+ --
+ if rightkern ~= 0 or yoffset ~= 0 then
+ report_injections("%w%s pair: lx %p, rx %p, dy %p",margin,symbol,leftkern,rightkern,yoffset)
+ elseif leftkern ~= 0 then
+ report_injections("%w%s kern: dx %p",margin,symbol,leftkern)
+ end
+ if markx ~= 0 or marky ~= 0 or markbase ~= 0 then
+ report_injections("%w%s mark: dx %p, dy %p, dir %s, base %s",margin,symbol,markx,marky,markdir,markbase ~= 0 and "yes" or "no")
+ end
+ if cursivex ~= 0 or cursivey ~= 0 then
+ report_injections("%w%s curs: dx %p, dy %p",margin,symbol,cursivex,cursivey)
+ end
+ if ligaindex ~= 0 then
+ report_injections("%w%s liga: index %i",margin,symbol,ligaindex)
+ end
+ end
+ end
+ end
+end
+
+local function showsub(n,what,where)
+ report_injections("begin subrun: %s",where)
+ for n in traverse_id(glyph_code,n) do
+ showchar(n,where)
+ show(n,what,where," ")
+ end
+ report_injections("end subrun")
+end
+
+local function trace(head,where)
+ report_injections("begin run %s: %s kerns, %s pairs, %s marks and %s cursives registered",
+ where or "",nofregisteredkerns,nofregisteredpairs,nofregisteredmarks,nofregisteredcursives)
+ local n = head
+ while n do
+ local id = getid(n)
+ if id == glyph_code then
+ showchar(n)
+ show(n,"injections",false," ")
+ show(n,"preinjections",false,"<")
+ show(n,"postinjections",false,">")
+ show(n,"replaceinjections",false,"=")
+ elseif id == disc_code then
+ local pre = getfield(n,"pre")
+ local post = getfield(n,"post")
+ local replace = getfield(n,"replace")
+ if pre then
+ showsub(pre,"preinjections","pre")
+ end
+ if post then
+ showsub(post,"postinjections","post")
+ end
+ if replace then
+ showsub(replace,"replaceinjections","replace")
+ end
+ end
+ n = getnext(n)
+ end
+ report_injections("end run")
+end
+
+local function show_result(head)
+ local current = head
+ local skipping = false
+ while current do
+ local id = getid(current)
+ if id == glyph_code then
+ report_injections("char: %C, width %p, xoffset %p, yoffset %p",
+ getchar(current),getfield(current,"width"),getfield(current,"xoffset"),getfield(current,"yoffset"))
+ skipping = false
+ elseif id == kern_code then
+ report_injections("kern: %p",getfield(current,"kern"))
+ skipping = false
+ elseif not skipping then
+ report_injections()
+ skipping = true
+ end
+ current = getnext(current)
+ end
+end
+
+-- we could also check for marks here but maybe not all are registered (needs checking)
+
+local function collect_glyphs_1(head)
+ local glyphs, nofglyphs = { }, 0
+ local marks, nofmarks = { }, 0
+ local nf, tm = nil, nil
+ for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts
+ if getsubtype(n) < 256 then
+ local f = getfont(n)
+ if f ~= nf then
+ nf = f
+ tm = fontdata[nf].resources.marks -- other hash in ctx
+ end
+ if tm and tm[getchar(n)] then
+ nofmarks = nofmarks + 1
+ marks[nofmarks] = n
+ else
+ nofglyphs = nofglyphs + 1
+ glyphs[nofglyphs] = n
+ end
+ -- yoffsets can influence curs steps
+ local p = rawget(properties,n)
+ if p then
+ local i = rawget(p,"injections")
+ if i then
+ local yoffset = i.yoffset
+ if yoffset and yoffset ~= 0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ end
+ end
+ end
+ end
+ return glyphs, nofglyphs, marks, nofmarks
+end
+
+local function collect_glyphs_2(head)
+ local glyphs, nofglyphs = { }, 0
+ local marks, nofmarks = { }, 0
+ local nf, tm = nil, nil
+ for n in traverse_id(glyph_code,head) do
+ if getsubtype(n) < 256 then
+ local f = getfont(n)
+ if f ~= nf then
+ nf = f
+ tm = fontdata[nf].resources.marks -- other hash in ctx
+ end
+ if tm and tm[getchar(n)] then
+ nofmarks = nofmarks + 1
+ marks[nofmarks] = n
+ else
+ nofglyphs = nofglyphs + 1
+ glyphs[nofglyphs] = n
+ end
+ end
+ end
+ return glyphs, nofglyphs, marks, nofmarks
+end
+
+local function inject_marks(marks,nofmarks)
+ for i=1,nofmarks do
+ local n = marks[i]
+ local pn = rawget(properties,n)
+ if pn then
+ pn = rawget(pn,"injections")
+ if pn then
+ local p = pn.markbasenode
+ if p then
+ local px = getfield(p,"xoffset")
+ local ox = 0
+ local rightkern = nil
+ local pp = rawget(properties,p)
+ if pp then
+ pp = rawget(pp,"injections")
+ if pp then
+ rightkern = pp.rightkern
+ end
+ end
+ if rightkern then -- x and w ~= 0
+ if pn.markdir < 0 then
+ -- kern(w-x) glyph(p) kern(x) mark(n)
+ ox = px - pn.markx - rightkern
+ -- report_injections("r2l case 1: %p",ox)
+ else
+ -- kern(x) glyph(p) kern(w-x) mark(n)
+ -- ox = px - getfield(p,"width") + pn.markx - pp.leftkern
+ local leftkern = pp.leftkern
+ if leftkern then
+ ox = px - pn.markx
+ else
+ ox = px - pn.markx - leftkern
+ end
+-- report_injections("l2r case 1: %p",ox)
+ end
+ else
+ -- we need to deal with fonts that have marks with width
+ -- if pn.markdir < 0 then
+ -- ox = px - pn.markx
+ -- -- report_injections("r2l case 3: %p",ox)
+ -- else
+ -- -- ox = px - getfield(p,"width") + pn.markx
+ ox = px - pn.markx
+ -- report_injections("l2r case 3: %p",ox)
+ -- end
+ local wn = getfield(n,"width") -- in arial marks have widths
+ if wn ~= 0 then
+ -- bad: we should center
+ -- insert_node_before(head,n,newkern(-wn/2))
+ -- insert_node_after(head,n,newkern(-wn/2))
+ pn.leftkern = -wn/2
+ pn.rightkern = -wn/2
+ -- wx[n] = { 0, -wn/2, 0, -wn }
+ end
+ -- so far
+ end
+ setfield(n,"xoffset",ox)
+ --
+ local py = getfield(p,"yoffset")
+ local oy = 0
+ if marks[p] then
+ oy = py + pn.marky
+ else
+ oy = getfield(n,"yoffset") + py + pn.marky
+ end
+ setfield(n,"yoffset",oy)
+ else
+ -- normally this can't happen (only when in trace mode which is a special case anyway)
+ -- report_injections("missing mark anchor %i",pn.markbase or 0)
+ end
+ end
+ end
+ end
+end
+
+local function inject_cursives(glyphs,nofglyphs)
+ local cursiveanchor, lastanchor = nil, nil
+ local minc, maxc, last = 0, 0, nil
+ for i=1,nofglyphs do
+ local n = glyphs[i]
+ local pn = rawget(properties,n)
+ if pn then
+ pn = rawget(pn,"injections")
+ end
+ if pn then
+ local cursivex = pn.cursivex
+ if cursivex then
+ if cursiveanchor then
+ if cursivex ~= 0 then
+ pn.leftkern = (pn.leftkern or 0) + cursivex
+ end
+ if lastanchor then
+ if maxc == 0 then
+ minc = lastanchor
+ end
+ maxc = lastanchor
+ properties[cursiveanchor].cursivedy = pn.cursivey
+ end
+ last = n
+ else
+ maxc = 0
+ end
+ elseif maxc > 0 then
+ local ny = getfield(n,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti = glyphs[i]
+ ny = ny + properties[ti].cursivedy
+ setfield(ti,"yoffset",ny) -- why not add ?
+ end
+ maxc = 0
+ end
+ if pn.cursiveanchor then
+ cursiveanchor = n
+ lastanchor = i
+ else
+ cursiveanchor = nil
+ lastanchor = nil
+ if maxc > 0 then
+ local ny = getfield(n,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti = glyphs[i]
+ ny = ny + properties[ti].cursivedy
+ setfield(ti,"yoffset",ny) -- why not add ?
+ end
+ maxc = 0
+ end
+ end
+ elseif maxc > 0 then
+ local ny = getfield(n,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti = glyphs[i]
+ ny = ny + properties[ti].cursivedy
+ setfield(ti,"yoffset",getfield(ti,"yoffset") + ny) -- ?
+ end
+ maxc = 0
+ cursiveanchor = nil
+ lastanchor = nil
+ end
+ -- if maxc > 0 and not cursiveanchor then
+ -- local ny = getfield(n,"yoffset")
+ -- for i=maxc,minc,-1 do
+ -- local ti = glyphs[i]
+ -- ny = ny + properties[ti].cursivedy
+ -- setfield(ti,"yoffset",ny) -- why not add ?
+ -- end
+ -- maxc = 0
+ -- end
+ end
+ if last and maxc > 0 then
+ local ny = getfield(last,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti = glyphs[i]
+ ny = ny + properties[ti].cursivedy
+ setfield(ti,"yoffset",ny) -- why not add ?
+ end
+ end
+end
+
+local function inject_kerns(head,list,length)
+ -- todo: pre/post/replace
+ for i=1,length do
+ local n = list[i]
+ local pn = rawget(properties,n)
+ if pn then
+ local i = rawget(pn,"injections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ insert_node_before(head,n,newkern(leftkern)) -- type 0/2
+ end
+ local rightkern = i.rightkern
+ if rightkern and rightkern ~= 0 then
+ insert_node_after(head,n,newkern(rightkern)) -- type 0/2
+ end
+ end
+ end
+ end
+end
+
+local function inject_everything(head,where)
+ head = tonut(head)
+ if trace_injections then
+ trace(head,"everything")
+ end
+ local glyphs, nofglyphs, marks, nofmarks
+ if nofregisteredpairs > 0 then
+ glyphs, nofglyphs, marks, nofmarks = collect_glyphs_1(head)
+ else
+ glyphs, nofglyphs, marks, nofmarks = collect_glyphs_2(head)
+ end
+ if nofglyphs > 0 then
+ if nofregisteredcursives > 0 then
+ inject_cursives(glyphs,nofglyphs)
+ end
+ if nofregisteredmarks > 0 then -- and nofmarks > 0
+ inject_marks(marks,nofmarks)
+ end
+ inject_kerns(head,glyphs,nofglyphs)
+ end
+ if nofmarks > 0 then
+ inject_kerns(head,marks,nofmarks)
+ end
+ if keepregisteredcounts then
+ keepregisteredcounts = false
+ else
+ nofregisteredkerns = 0
+ nofregisteredpairs = 0
+ nofregisteredmarks = 0
+ nofregisteredcursives = 0
+ end
+ return tonode(head), true
+end
+
+local function inject_kerns_only(head,where)
+ head = tonut(head)
+ if trace_injections then
+ trace(head,"kerns")
+ end
+ local n = head
+ local p = nil
+ while n do
+ local id = getid(n)
+ if id == glyph_code then
+ if getsubtype(n) < 256 then
+ local pn = rawget(properties,n)
+ if pn then
+ if p then
+ local d = getfield(p,"post")
+ if d then
+ local i = rawget(pn,"postinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ local t = find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ end
+ end
+ end
+ local d = getfield(p,"replace")
+ if d then
+ local i = rawget(pn,"replaceinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ local t = find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ end
+ end
+ else
+ local i = rawget(pn,"injections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ setfield(p,"replace",newkern(leftkern))
+ end
+ end
+ end
+ else
+ local i = rawget(pn,"injections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ head = insert_node_before(head,n,newkern(leftkern))
+ end
+ end
+ end
+ end
+ else
+ break
+ end
+ p = nil
+ elseif id == disc_code then
+ local d = getfield(n,"pre")
+ if d then
+ local h = d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ local pn = rawget(properties,n)
+ if pn then
+ local i = rawget(pn,"preinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ h = insert_node_before(h,n,newkern(leftkern))
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h ~= d then
+ setfield(n,"pre",h)
+ end
+ end
+ local d = getfield(n,"post")
+ if d then
+ local h = d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ local pn = rawget(properties,n)
+ if pn then
+ local i = rawget(pn,"postinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ h = insert_node_before(h,n,newkern(leftkern))
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h ~= d then
+ setfield(n,"post",h)
+ end
+ end
+ local d = getfield(n,"replace")
+ if d then
+ local h = d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ local pn = rawget(properties,n) -- why can it be empty { }
+ if pn then
+ local i = rawget(pn,"replaceinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ h = insert_node_before(h,n,newkern(leftkern))
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h ~= d then
+ setfield(n,"replace",h)
+ end
+ end
+ p = n
+ else
+ p = nil
+ end
+ n = getnext(n)
+ end
+ --
+ if keepregisteredcounts then
+ keepregisteredcounts = false
+ else
+ nofregisteredkerns = 0
+ end
+ return tonode(head), true
+end
+
+local function inject_pairs_only(head,where)
+ head = tonut(head)
+ if trace_injections then
+ trace(head,"pairs")
+ end
+ --
+ local n = head
+ local p = nil
+ while n do
+ local id = getid(n)
+ if id == glyph_code then
+ if getsubtype(n) < 256 then
+ local pn = rawget(properties,n)
+ if pn then
+ if p then
+ local d = getfield(p,"post")
+ if d then
+ local i = rawget(pn,"postinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ local t = find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ end
+ -- local rightkern = i.rightkern
+ -- if rightkern and rightkern ~= 0 then
+ -- insert_node_after(head,n,newkern(rightkern))
+ -- n = getnext(n) -- to be checked
+ -- end
+ end
+ end
+ local d = getfield(p,"replace")
+ if d then
+ local i = rawget(pn,"replaceinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ local t = find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ end
+ -- local rightkern = i.rightkern
+ -- if rightkern and rightkern ~= 0 then
+ -- insert_node_after(head,n,newkern(rightkern))
+ -- n = getnext(n) -- to be checked
+ -- end
+ end
+ else
+ local i = rawget(pn,"injections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ setfield(p,"replace",newkern(leftkern))
+ end
+ -- local rightkern = i.rightkern
+ -- if rightkern and rightkern ~= 0 then
+ -- insert_node_after(head,n,newkern(rightkern))
+ -- n = getnext(n) -- to be checked
+ -- end
+ end
+ end
+ else
+ -- this is the most common case
+ local i = rawget(pn,"injections")
+ if i then
+ local yoffset = i.yoffset
+ if yoffset and yoffset ~= 0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ insert_node_before(head,n,newkern(leftkern))
+ end
+ local rightkern = i.rightkern
+ if rightkern and rightkern ~= 0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n = getnext(n) -- to be checked
+ end
+ end
+ end
+ end
+ else
+ break
+ end
+ p = nil
+ elseif id == disc_code then
+ local d = getfield(n,"pre")
+ if d then
+ local h = d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ local p = rawget(properties,n)
+ if p then
+ local i = rawget(p,"preinjections")
+ if i then
+ local yoffset = i.yoffset
+ if yoffset and yoffset ~= 0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ local leftkern = i.leftkern
+ if leftkern ~= 0 then
+ h = insert_node_before(h,n,newkern(leftkern))
+ end
+ local rightkern = i.rightkern
+ if rightkern and rightkern ~= 0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n = getnext(n) -- to be checked
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h ~= d then
+ setfield(n,"pre",h)
+ end
+ end
+ local d = getfield(n,"post")
+ if d then
+ local h = d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ local p = rawget(properties,n)
+ if p then
+ local i = rawget(p,"postinjections")
+ if i then
+ local yoffset = i.yoffset
+ if yoffset and yoffset ~= 0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ h = insert_node_before(h,n,newkern(leftkern))
+ end
+ local rightkern = i.rightkern
+ if rightkern and rightkern ~= 0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n = getnext(n) -- to be checked
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h ~= d then
+ setfield(n,"post",h)
+ end
+ end
+ local d = getfield(n,"replace")
+ if d then
+ local h = d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ local p = rawget(properties,n)
+ if p then
+ local i = rawget(p,"replaceinjections")
+ if i then
+ local yoffset = i.yoffset
+ if yoffset and yoffset ~= 0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ h = insert_node_before(h,n,newkern(leftkern))
+ end
+ local rightkern = i.rightkern
+ if rightkern and rightkern ~= 0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n = getnext(n) -- to be checked
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h ~= d then
+ setfield(n,"replace",h)
+ end
+ end
+ p = n
+ else
+ p = nil
+ end
+ n = getnext(n)
+ end
+ --
+ if keepregisteredcounts then
+ keepregisteredcounts = false
+ else
+ nofregisteredpairs = 0
+ nofregisteredkerns = 0
+ end
+ return tonode(head), true
+end
+
+function injections.handler(head,where) -- optimize for n=1 ?
+ if nofregisteredmarks > 0 or nofregisteredcursives > 0 then
+ return inject_everything(head,where)
+ elseif nofregisteredpairs > 0 then
+ return inject_pairs_only(head,where)
+ elseif nofregisteredkerns > 0 then
+ return inject_kerns_only(head,where)
+ else
+ return head, false
+ end
+end
diff --git a/tex/context/base/font-lib.mkvi b/tex/context/base/font-lib.mkvi
index a664d9b3a..9cc14e02f 100644
--- a/tex/context/base/font-lib.mkvi
+++ b/tex/context/base/font-lib.mkvi
@@ -19,7 +19,7 @@
\registerctxluafile{font-log}{1.001}
\registerctxluafile{font-con}{1.001}
\registerctxluafile{font-enc}{1.001}
-%registerctxluafile{font-agl}{1.001} % loaded when needed, saves 100K in format
+\registerctxluafile{font-agl}{1.001} % if needed we can comment this and delay loading
\registerctxluafile{font-cid}{1.001} % cid maps
\registerctxluafile{font-map}{1.001}
\registerctxluafile{font-syn}{1.001}
@@ -38,9 +38,14 @@
\registerctxluafile{font-ott}{1.001} % otf tables (first)
\registerctxluafile{font-otf}{1.001} % otf main
\registerctxluafile{font-otb}{1.001} % otf main base
-\registerctxluafile{node-inj}{1.001} % we might split it off
-%registerctxluafile{font-ota}{1.001} % otf analyzers
-\registerctxluafile{font-otx}{1.001} % otf analyzers
+
+% \doiffileelse{font-inj.lua}
+% {\registerctxluafile{font-inj}{1.001}} % new method (for the moment only local)
+% {\registerctxluafile{node-inj}{1.001}} % old method
+
+\registerctxluafile{font-inj}{1.001} % the old method (lacks some features)
+
+\registerctxluafile{font-ota}{1.001} % otf analyzers
\registerctxluafile{font-otn}{1.001} % otf main node
\registerctxluafile{font-otd}{1.001} % otf dynamics (does an overload)
\registerctxluafile{font-otp}{1.001} % otf pack
@@ -86,17 +91,14 @@
%D \stoptyping
% we can also move the lookups to the fonts.namespace (of commands)
+% one can also directly use the clf calls when speed is needed
-% these will be renamed but it needs synchronization with WS
-
-\def\dolookupfontbyspec #1{\ctxcommand{fontlookupinitialize("#1")}}
-\def\dolookupnoffound {\ctxcommand{fontlookupnoffound()}}
-\def\dolookupgetkeyofindex#1#2{\ctxcommand{fontlookupgetkeyofindex("#1",#2)}}
-\def\dolookupgetkey #1{\ctxcommand{fontlookupgetkey("#1")}}
-\def\cleanfontname #1{\ctxcommand{cleanfontname("#1")}}
-
-% new:
+ \def\dolookupfontbyspec #1{\clf_fontlookupinitialize{#1}}
+ \def\dolookupnoffound {\clf_fontlookupnoffound}
+ \def\dolookupgetkeyofindex#1#2{\clf_fontlookupgetkeyofindex{#1}\numexpr#2\relax}
+ \def\dolookupgetkey #1{\clf_fontlookupgetkey{#1}}
+ \def\cleanfontname #1{\clf_cleanfontname{#1}}
+\unexpanded\def\setfontofid #1{\clf_setfontofid\numexpr#1\relax}
-\unexpanded\def\setfontofid#1{\ctxcommand{setfontofid(#1)}}
\protect \endinput
diff --git a/tex/context/base/font-map.lua b/tex/context/base/font-map.lua
index ce724b973..449a00f2e 100644
--- a/tex/context/base/font-map.lua
+++ b/tex/context/base/font-map.lua
@@ -6,12 +6,13 @@ if not modules then modules = { } end modules ['font-map'] = {
license = "see context related readme files"
}
-local tonumber = tonumber
+local tonumber, next, type = tonumber, next, type
local match, format, find, concat, gsub, lower = string.match, string.format, string.find, table.concat, string.gsub, string.lower
local P, R, S, C, Ct, Cc, lpegmatch = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cc, lpeg.match
local utfbyte = utf.byte
local floor = math.floor
+local formatters = string.formatters
local trace_loading = false trackers.register("fonts.loading", function(v) trace_loading = v end)
local trace_mapping = false trackers.register("fonts.mapping", function(v) trace_unimapping = v end)
@@ -22,6 +23,8 @@ local fonts = fonts or { }
local mappings = fonts.mappings or { }
fonts.mappings = mappings
+local allocate = utilities.storage.allocate
+
--[[ldx--
<p>Eventually this code will disappear because map files are kind
of obsolete. Some code may move to runtime or auxiliary modules.</p>
@@ -66,26 +69,14 @@ local function makenameparser(str)
end
end
--- local parser = makenameparser("Japan1")
--- local parser = makenameparser()
--- local function test(str)
--- local b, a = lpegmatch(parser,str)
--- print((a and table.serialize(b)) or b)
--- end
--- test("a.sc")
--- test("a")
--- test("uni1234")
--- test("uni1234.xx")
--- test("uni12349876")
--- test("u123400987600")
--- test("index1234")
--- test("Japan1.123")
+local f_single = formatters["%04X"]
+local f_double = formatters["%04X%04X"]
local function tounicode16(unicode,name)
if unicode < 0x10000 then
- return format("%04X",unicode)
+ return f_single(unicode)
elseif unicode < 0x1FFFFFFFFF then
- return format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00)
+ return f_double(floor(unicode/1024),unicode%1024+0xDC00)
else
report_fonts("can't convert %a in %a into tounicode",unicode,name)
end
@@ -94,18 +85,46 @@ end
local function tounicode16sequence(unicodes,name)
local t = { }
for l=1,#unicodes do
- local unicode = unicodes[l]
- if unicode < 0x10000 then
- t[l] = format("%04X",unicode)
+ local u = unicodes[l]
+ if u < 0x10000 then
+ t[l] = f_single(u)
elseif unicode < 0x1FFFFFFFFF then
- t[l] = format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00)
+ t[l] = f_double(floor(u/1024),u%1024+0xDC00)
else
- report_fonts ("can't convert %a in %a into tounicode",unicode,name)
+ report_fonts ("can't convert %a in %a into tounicode",u,name)
+ return
end
end
return concat(t)
end
+local function tounicode(unicode,name)
+ if type(unicode) == "table" then
+ local t = { }
+ for l=1,#unicode do
+ local u = unicode[l]
+ if u < 0x10000 then
+ t[l] = f_single(u)
+ elseif u < 0x1FFFFFFFFF then
+ t[l] = f_double(floor(u/1024),u%1024+0xDC00)
+ else
+ report_fonts ("can't convert %a in %a into tounicode",u,name)
+ return
+ end
+ end
+ return concat(t)
+ else
+ if unicode < 0x10000 then
+ return f_single(unicode)
+ elseif unicode < 0x1FFFFFFFFF then
+ return f_double(floor(unicode/1024),unicode%1024+0xDC00)
+ else
+ report_fonts("can't convert %a in %a into tounicode",unicode,name)
+ end
+ end
+end
+
+
local function fromunicode16(str)
if #str == 4 then
return tonumber(str,16)
@@ -151,6 +170,7 @@ end
mappings.loadlumtable = loadlumtable
mappings.makenameparser = makenameparser
+mappings.tounicode = tounicode
mappings.tounicode16 = tounicode16
mappings.tounicode16sequence = tounicode16sequence
mappings.fromunicode16 = fromunicode16
@@ -164,6 +184,8 @@ local namesplitter = Ct(C((1 - ligseparator - varseparator)^1) * (ligseparator *
-- print(string.formatters["%s: [% t]"](name,split))
-- end
+-- maybe: ff fi fl ffi ffl => f_f f_i f_l f_f_i f_f_l
+
-- test("i.f_")
-- test("this")
-- test("this.that")
@@ -171,33 +193,54 @@ local namesplitter = Ct(C((1 - ligseparator - varseparator)^1) * (ligseparator *
-- test("such_so_more")
-- test("such_so_more.that")
+-- to be completed .. for fonts that use unicodes for ligatures which
+-- is a actually a bad thing and should be avoided in the first place
+
+local overloads = allocate {
+ IJ = { name = "I_J", unicode = { 0x49, 0x4A }, mess = 0x0132 },
+ ij = { name = "i_j", unicode = { 0x69, 0x6A }, mess = 0x0133 },
+ ff = { name = "f_f", unicode = { 0x66, 0x66 }, mess = 0xFB00 },
+ fi = { name = "f_i", unicode = { 0x66, 0x69 }, mess = 0xFB01 },
+ fl = { name = "f_l", unicode = { 0x66, 0x6C }, mess = 0xFB02 },
+ ffi = { name = "f_f_i", unicode = { 0x66, 0x66, 0x69 }, mess = 0xFB03 },
+ ffl = { name = "f_f_l", unicode = { 0x66, 0x66, 0x6C }, mess = 0xFB04 },
+ fj = { name = "f_j", unicode = { 0x66, 0x6A } },
+ fk = { name = "f_k", unicode = { 0x66, 0x6B } },
+}
+
+for k, v in next, overloads do
+ local name = v.name
+ local mess = v.mess
+ if name then
+ overloads[name] = v
+ end
+ if mess then
+ overloads[mess] = v
+ end
+end
+
+mappings.overloads = overloads
+
function mappings.addtounicode(data,filename)
local resources = data.resources
local properties = data.properties
local descriptions = data.descriptions
local unicodes = resources.unicodes
+ local lookuptypes = resources.lookuptypes
if not unicodes then
return
end
-- we need to move this code
- unicodes['space'] = unicodes['space'] or 32
- unicodes['hyphen'] = unicodes['hyphen'] or 45
- unicodes['zwj'] = unicodes['zwj'] or 0x200D
- unicodes['zwnj'] = unicodes['zwnj'] or 0x200C
- -- the tounicode mapping is sparse and only needed for alternatives
+ unicodes['space'] = unicodes['space'] or 32
+ unicodes['hyphen'] = unicodes['hyphen'] or 45
+ unicodes['zwj'] = unicodes['zwj'] or 0x200D
+ unicodes['zwnj'] = unicodes['zwnj'] or 0x200C
local private = fonts.constructors.privateoffset
- local unknown = format("%04X",utfbyte("?"))
local unicodevector = fonts.encodings.agl.unicodes -- loaded runtime in context
- local tounicode = { }
- local originals = { }
- resources.tounicode = tounicode
- resources.originals = originals
+ ----- namevector = fonts.encodings.agl.names -- loaded runtime in context
+ local missing = { }
local lumunic, uparser, oparser
local cidinfo, cidnames, cidcodes, usedmap
- if false then -- will become an option
- lumunic = loadlumtable(filename)
- lumunic = lumunic and lumunic.tounicode
- end
--
cidinfo = properties.cidinfo
usedmap = cidinfo and fonts.cid.getmap(cidinfo)
@@ -212,12 +255,16 @@ function mappings.addtounicode(data,filename)
for unic, glyph in next, descriptions do
local index = glyph.index
local name = glyph.name
- if unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then
+ local r = overloads[name]
+ if r then
+ -- get rid of weird ligatures
+ -- glyph.name = r.name
+ glyph.unicode = r.unicode
+ elseif unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then
local unicode = lumunic and lumunic[name] or unicodevector[name]
if unicode then
- originals[index] = unicode
- tounicode[index] = tounicode16(unicode,name)
- ns = ns + 1
+ glyph.unicode = unicode
+ ns = ns + 1
end
-- cidmap heuristics, beware, there is no guarantee for a match unless
-- the chain resolves
@@ -226,9 +273,8 @@ function mappings.addtounicode(data,filename)
if foundindex then
unicode = cidcodes[foundindex] -- name to number
if unicode then
- originals[index] = unicode
- tounicode[index] = tounicode16(unicode,name)
- ns = ns + 1
+ glyph.unicode = unicode
+ ns = ns + 1
else
local reference = cidnames[foundindex] -- number to name
if reference then
@@ -236,23 +282,20 @@ function mappings.addtounicode(data,filename)
if foundindex then
unicode = cidcodes[foundindex]
if unicode then
- originals[index] = unicode
- tounicode[index] = tounicode16(unicode,name)
- ns = ns + 1
+ glyph.unicode = unicode
+ ns = ns + 1
end
end
if not unicode or unicode == "" then
local foundcodes, multiple = lpegmatch(uparser,reference)
if foundcodes then
- originals[index] = foundcodes
+ glyph.unicode = foundcodes
if multiple then
- tounicode[index] = tounicode16sequence(foundcodes)
- nl = nl + 1
- unicode = true
+ nl = nl + 1
+ unicode = true
else
- tounicode[index] = tounicode16(foundcodes,name)
- ns = ns + 1
- unicode = foundcodes
+ ns = ns + 1
+ unicode = foundcodes
end
end
end
@@ -299,11 +342,9 @@ function mappings.addtounicode(data,filename)
if n == 0 then -- done then
-- nothing
elseif n == 1 then
- originals[index] = t[1]
- tounicode[index] = tounicode16(t[1],name)
+ glyph.unicode = t[1]
else
- originals[index] = t
- tounicode[index] = tounicode16sequence(t)
+ glyph.unicode = t
end
nl = nl + 1
end
@@ -311,32 +352,163 @@ function mappings.addtounicode(data,filename)
if not unicode or unicode == "" then
local foundcodes, multiple = lpegmatch(uparser,name)
if foundcodes then
+ glyph.unicode = foundcodes
if multiple then
- originals[index] = foundcodes
- tounicode[index] = tounicode16sequence(foundcodes,name)
- nl = nl + 1
- unicode = true
+ nl = nl + 1
+ unicode = true
else
- originals[index] = foundcodes
- tounicode[index] = tounicode16(foundcodes,name)
- ns = ns + 1
- unicode = foundcodes
+ ns = ns + 1
+ unicode = foundcodes
end
end
end
- -- if not unicode then
- -- originals[index] = 0xFFFD
- -- tounicode[index] = "FFFD"
+ -- check using substitutes and alternates
+ local r = overloads[unicode]
+ if r then
+ unicode = r.unicode
+ glyph.unicode = unicode
+ end
+ --
+ if not unicode then
+ missing[name] = true
+ end
+ end
+ end
+ if next(missing) then
+ local guess = { }
+ -- helper
+ local function check(gname,code,unicode)
+ local description = descriptions[code]
+ -- no need to add a self reference
+ local variant = description.name
+ if variant == gname then
+ return
+ end
+ -- the variant already has a unicode (normally that resultrs in a default tounicode to self)
+ local unic = unicodes[variant]
+ if unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then
+ -- no default mapping and therefore maybe no tounicode yet
+ else
+ return
+ end
+ -- the variant already has a tounicode
+ if descriptions[code].unicode then
+ return
+ end
+ -- add to the list
+ local g = guess[variant]
+ -- local r = overloads[unicode]
+ -- if r then
+ -- unicode = r.unicode
-- end
+ if g then
+ g[gname] = unicode
+ else
+ guess[variant] = { [gname] = unicode }
+ end
+ end
+ --
+ for unicode, description in next, descriptions do
+ local slookups = description.slookups
+ if slookups then
+ local gname = description.name
+ for tag, data in next, slookups do
+ local lookuptype = lookuptypes[tag]
+ if lookuptype == "alternate" then
+ for i=1,#data do
+ check(gname,data[i],unicode)
+ end
+ elseif lookuptype == "substitution" then
+ check(gname,data,unicode)
+ end
+ end
+ end
+ local mlookups = description.mlookups
+ if mlookups then
+ local gname = description.name
+ for tag, list in next, mlookups do
+ local lookuptype = lookuptypes[tag]
+ if lookuptype == "alternate" then
+ for i=1,#list do
+ local data = list[i]
+ for i=1,#data do
+ check(gname,data[i],unicode)
+ end
+ end
+ elseif lookuptype == "substitution" then
+ for i=1,#list do
+ check(gname,list[i],unicode)
+ end
+ end
+ end
+ end
+ end
+ -- resolve references
+ local done = true
+ while done do
+ done = false
+ for k, v in next, guess do
+ if type(v) ~= "number" then
+ for kk, vv in next, v do
+ if vv == -1 or vv >= private or (vv >= 0xE000 and vv <= 0xF8FF) or vv == 0xFFFE or vv == 0xFFFF then
+ local uu = guess[kk]
+ if type(uu) == "number" then
+ guess[k] = uu
+ done = true
+ end
+ else
+ guess[k] = vv
+ done = true
+ end
+ end
+ end
+ end
+ end
+ -- wrap up
+ local orphans = 0
+ local guessed = 0
+ for k, v in next, guess do
+ if type(v) == "number" then
+ descriptions[unicodes[k]].unicode = descriptions[v].unicode or v -- can also be a table
+ guessed = guessed + 1
+ else
+ local t = nil
+ local l = lower(k)
+ local u = unicodes[l]
+ if not u then
+ orphans = orphans + 1
+ elseif u == -1 or u >= private or (u >= 0xE000 and u <= 0xF8FF) or u == 0xFFFE or u == 0xFFFF then
+ local unicode = descriptions[u].unicode
+ if unicode then
+ descriptions[unicodes[k]].unicode = unicode
+ guessed = guessed + 1
+ else
+ orphans = orphans + 1
+ end
+ else
+ orphans = orphans + 1
+ end
+ end
+ end
+ if trace_loading and orphans > 0 or guessed > 0 then
+ report_fonts("%s glyphs with no related unicode, %s guessed, %s orphans",guessed+orphans,guessed,orphans)
end
end
if trace_mapping then
for unic, glyph in table.sortedhash(descriptions) do
- local name = glyph.name
- local index = glyph.index
- local toun = tounicode[index]
- if toun then
- report_fonts("internal slot %U, name %a, unicode %U, tounicode %a",index,name,unic,toun)
+ local name = glyph.name
+ local index = glyph.index
+ local unicode = glyph.unicode
+ if unicode then
+ if type(unicode) == "table" then
+ local unicodes = { }
+ for i=1,#unicode do
+ unicodes[i] = formatters("%U",unicode[i])
+ end
+ report_fonts("internal slot %U, name %a, unicode %U, tounicode % t",index,name,unic,unicodes)
+ else
+ report_fonts("internal slot %U, name %a, unicode %U, tounicode %U",index,name,unic,unicode)
+ end
else
report_fonts("internal slot %U, name %a, unicode %U",index,name,unic)
end
@@ -346,3 +518,18 @@ function mappings.addtounicode(data,filename)
report_fonts("%s tounicode entries added, ligatures %s",nl+ns,ns)
end
end
+
+-- local parser = makenameparser("Japan1")
+-- local parser = makenameparser()
+-- local function test(str)
+-- local b, a = lpegmatch(parser,str)
+-- print((a and table.serialize(b)) or b)
+-- end
+-- test("a.sc")
+-- test("a")
+-- test("uni1234")
+-- test("uni1234.xx")
+-- test("uni12349876")
+-- test("u123400987600")
+-- test("index1234")
+-- test("Japan1.123")
diff --git a/tex/context/base/font-mis.lua b/tex/context/base/font-mis.lua
index e1d1ebeb9..0ec95ee1b 100644
--- a/tex/context/base/font-mis.lua
+++ b/tex/context/base/font-mis.lua
@@ -22,9 +22,15 @@ local handlers = fonts.handlers
handlers.otf = handlers.otf or { }
local otf = handlers.otf
-otf.version = otf.version or 2.749
+otf.version = otf.version or 2.803
otf.cache = otf.cache or containers.define("fonts", "otf", otf.version, true)
+local fontloader = fontloader
+local font_to_table = fontloader.to_table
+local open_font = fontloader.open
+local get_font_info = fontloader.info
+local close_font = fontloader.close
+
function otf.loadcached(filename,format,sub)
-- no recache when version mismatch
local name = file.basename(file.removesuffix(filename))
@@ -54,10 +60,10 @@ function fonts.helpers.getfeatures(name,t,script,language) -- maybe per font typ
if data and data.resources and data.resources.features then
return data.resources.features
else
- local ff = fontloader.open(filename)
+ local ff = open_font(filename)
if ff then
- local data = fontloader.to_table(ff)
- fontloader.close(ff)
+ local data = font_to_table(ff)
+ close_font(ff)
local features = { }
for k=1,#featuregroups do
local what = featuregroups[k]
diff --git a/tex/context/base/font-nod.lua b/tex/context/base/font-nod.lua
index 7fa3297d4..e2000be7e 100644
--- a/tex/context/base/font-nod.lua
+++ b/tex/context/base/font-nod.lua
@@ -11,10 +11,10 @@ if not modules then modules = { } end modules ['font-nod'] = {
might become a runtime module instead. This module will be cleaned up!</p>
--ldx]]--
-local tonumber, tostring = tonumber, tostring
+local tonumber, tostring, rawget = tonumber, tostring, rawget
local utfchar = utf.char
-local concat = table.concat
-local match, gmatch, concat, rep = string.match, string.gmatch, table.concat, string.rep
+local concat, fastcopy = table.concat, table.fastcopy
+local match, rep = string.match, string.rep
local report_nodes = logs.reporter("fonts","tracing")
@@ -41,13 +41,6 @@ tracers.characters = char_tracers
local step_tracers = tracers.steppers or { }
tracers.steppers = step_tracers
-local texsetbox = tex.setbox
-
-local copy_node_list = nodes.copy_list
-local hpack_node_list = nodes.hpack
-local free_node_list = nodes.flush_list
-local traverse_nodes = nodes.traverse
-
local nodecodes = nodes.nodecodes
local whatcodes = nodes.whatcodes
@@ -59,12 +52,33 @@ local glue_code = nodecodes.glue
local kern_code = nodecodes.kern
local rule_code = nodecodes.rule
local whatsit_code = nodecodes.whatsit
-local spec_code = nodecodes.glue_spec
local localpar_code = whatcodes.localpar
local dir_code = whatcodes.dir
-local nodepool = nodes.pool
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+local getlist = nuts.getlist
+local setbox = nuts.setbox
+
+local copy_node_list = nuts.copy_list
+local hpack_node_list = nuts.hpack
+local free_node_list = nuts.flush_list
+local traverse_nodes = nuts.traverse
+local traverse_id = nuts.traverse_id
+local protect_glyphs = nuts.protect_glyphs
+
+local nodepool = nuts.pool
local new_glyph = nodepool.glyph
local formatters = string.formatters
@@ -78,18 +92,38 @@ local fontcharacters = hashes.characters
local fontproperties = hashes.properties
local fontparameters = hashes.parameters
+local properties = nodes.properties.data
+
+-- direct.set_properties_mode(true,false)
+-- direct.set_properties_mode(true,true) -- default
+
+local function freeze(h,where)
+ -- report_nodes("freezing %s",where)
+ for n in traverse_nodes(tonut(h)) do -- todo: disc but not traced anyway
+ local p = properties[n]
+ if p then
+ local i = p.injections if i then p.injections = fastcopy(i) end
+ -- local i = r.preinjections if i then p.preinjections = fastcopy(i) end
+ -- local i = r.postinjections if i then p.postinjections = fastcopy(i) end
+ -- local i = r.replaceinjections if i then p.replaceinjections = fastcopy(i) end
+ -- only injections
+ end
+ end
+end
+
function char_tracers.collect(head,list,tag,n)
+ head = tonut(head)
n = n or 0
local ok, fn = false, nil
while head do
- local id = head.id
+ local id = getid(head)
if id == glyph_code then
- local f = head.font
+ local f = getfont(head)
if f ~= fn then
ok, fn = false, f
end
- local c = head.char
- local i = fontidentifiers[f].indices[c] or 0
+ local c = getchar(head)
+ -- local i = fontidentifiers[f].indices[c] or 0 -- zero anyway as indices is nilled
if not ok then
ok = true
n = n + 1
@@ -97,13 +131,32 @@ function char_tracers.collect(head,list,tag,n)
list[n][tag] = { }
end
local l = list[n][tag]
- l[#l+1] = { c, f, i }
+ -- l[#l+1] = { c, f, i }
+ l[#l+1] = { c, f }
elseif id == disc_code then
-- skip
+-- local replace = getfield(head,"replace")
+-- if replace then
+-- for n in traverse_id(glyph_code,replace) do
+-- l[#l+1] = { c, f }
+-- end
+-- end
+-- local pre = getfield(head,"pre")
+-- if pre then
+-- for n in traverse_id(glyph_code,pre) do
+-- l[#l+1] = { c, f }
+-- end
+-- end
+-- local post = getfield(head,"post")
+-- if post then
+-- for n in traverse_id(glyph_code,post) do
+-- l[#l+1] = { c, f }
+-- end
+-- end
else
ok = false
end
- head = head.next
+ head = getnext(head)
end
end
@@ -113,7 +166,8 @@ function char_tracers.equal(ta, tb)
else
for i=1,#ta do
local a, b = ta[i], tb[i]
- if a[1] ~= b[1] or a[2] ~= b[2] or a[3] ~= b[3] then
+ -- if a[1] ~= b[1] or a[2] ~= b[2] or a[3] ~= b[3] then
+ if a[1] ~= b[1] or a[2] ~= b[2] then
return false
end
end
@@ -146,28 +200,28 @@ function char_tracers.unicodes(t,decimal)
return concat(tt," ")
end
-function char_tracers.indices(t,decimal)
- local tt = { }
- for i=1,#t do
- local n = t[i][3]
- if n == 0 then
- tt[i] = "-"
- elseif decimal then
- tt[i] = n
- else
- tt[i] = f_unicode(n)
- end
- end
- return concat(tt," ")
-end
+-- function char_tracers.indices(t,decimal)
+-- local tt = { }
+-- for i=1,#t do
+-- local n = t[i][3]
+-- if n == 0 then
+-- tt[i] = "-"
+-- elseif decimal then
+-- tt[i] = n
+-- else
+-- tt[i] = f_unicode(n)
+-- end
+-- end
+-- return concat(tt," ")
+-- end
function char_tracers.start()
- local npc = handlers.characters
+ local npc = handlers.characters -- should accept nuts too
local list = { }
function handlers.characters(head)
local n = #list
char_tracers.collect(head,list,'before',n)
- local h, d = npc(head)
+ local h, d = npc(tonode(head)) -- for the moment tonode
char_tracers.collect(head,list,'after',n)
if #list > n then
list[#list+1] = { }
@@ -234,8 +288,9 @@ end
function step_tracers.glyphs(n,i)
local c = collection[i]
if c then
- local b = hpack_node_list(copy_node_list(c)) -- multiple arguments
- texsetbox(n,b)
+ local c = copy_node_list(c)
+ local b = hpack_node_list(c) -- multiple arguments
+ setbox(n,b)
end
end
@@ -243,8 +298,8 @@ function step_tracers.features()
-- we cannot use first_glyph here as it only finds characters with subtype < 256
local f = collection[1]
while f do
- if f.id == glyph_code then
- local tfmdata, t = fontidentifiers[f.font], { }
+ if getid(f) == glyph_code then
+ local tfmdata, t = fontidentifiers[getfont(f)], { }
for feature, value in table.sortedhash(tfmdata.shared.features) do
if feature == "number" or feature == "features" then
-- private
@@ -265,22 +320,24 @@ function step_tracers.features()
end
return
end
- f = f.next
+ f = getnext(f)
end
end
function tracers.fontchar(font,char)
local n = new_glyph()
- n.font, n.char, n.subtype = font, char, 256
- context(n)
+ setfield(n,"font",font)
+ setfield(n,"char",char)
+ setfield(n,"subtype",256)
+ context(tonode(n))
end
function step_tracers.font(command)
local c = collection[1]
while c do
- local id = c.id
+ local id = getid(c)
if id == glyph_code then
- local font = c.font
+ local font = getfont(c)
local name = file.basename(fontproperties[font].filename or "unknown")
local size = fontparameters[font].size or 0
if command then
@@ -290,30 +347,73 @@ function step_tracers.font(command)
end
return
else
- c = c.next
+ c = getnext(c)
end
end
end
-function step_tracers.codes(i,command)
+local colors = {
+ pre = { "darkred" },
+ post = { "darkgreen" },
+ replace = { "darkblue" },
+}
+
+function step_tracers.codes(i,command,space)
local c = collection[i]
+
+ local function showchar(c)
+ if command then
+ local f, c = getfont(c), getchar(c)
+ local d = fontdescriptions[f]
+ local d = d and d[c]
+ context[command](f,c,d and d.class or "")
+ else
+ context("[%s:U+%04X]",getfont(c),getchar(c))
+ end
+ end
+
+ local function showdisc(d,w,what)
+ if w then
+ context.startcolor(colors[what])
+ context("%s:",what)
+ for c in traverse_nodes(w) do
+ local id = getid(c)
+ if id == glyph_code then
+ showchar(c)
+ else
+ context("[%s]",nodecodes[id])
+ end
+ end
+ context[space]()
+ context.stopcolor()
+ end
+ end
+
while c do
- local id = c.id
+ local id = getid(c)
if id == glyph_code then
- if command then
- local f, c = c.font,c.char
- local d = fontdescriptions[f]
- local d = d and d[c]
- context[command](f,c,d and d.class or "")
+ showchar(c)
+ elseif id == whatsit_code and (getsubtype(c) == localpar_code or getsubtype(c) == dir_code) then
+ context("[%s]",getfield(c,"dir"))
+ elseif id == disc_code then
+ local pre = getfield(c,"pre")
+ local post = getfield(c,"post")
+ local replace = getfield(c,"replace")
+ if pre or post or replace then
+ context("[")
+ context[space]()
+ showdisc(c,pre,"pre")
+ showdisc(c,post,"post")
+ showdisc(c,replace,"replace")
+ context[space]()
+ context("]")
else
- context("[%s:U+%04X]",c.font,c.char)
+ context("[disc]")
end
- elseif id == whatsit_code and (c.subtype == localpar_code or c.subtype == dir_code) then
- context("[%s]",c.dir)
else
context("[%s]",nodecodes[id])
end
- c = c.next
+ c = getnext(c)
end
end
@@ -339,9 +439,12 @@ end
function step_tracers.check(head)
if collecting then
step_tracers.reset()
- local n = copy_node_list(head)
- injections.handler(n,nil,"trace",true)
- handlers.protectglyphs(n) -- can be option
+ local h = tonut(head)
+ local n = copy_node_list(h)
+ freeze(n,"check")
+ injections.keepcounts(n) -- one-time
+ injections.handler(n,"trace")
+ protect_glyphs(n)
collection[1] = n
end
end
@@ -350,9 +453,12 @@ function step_tracers.register(head)
if collecting then
local nc = #collection+1
if messages[nc] then
- local n = copy_node_list(head)
- injections.handler(n,nil,"trace",true)
- handlers.protectglyphs(n) -- can be option
+ local h = tonut(head)
+ local n = copy_node_list(h)
+ freeze(n,"register")
+ injections.keepcounts(n) -- one-time
+ injections.handler(n,"trace")
+ protect_glyphs(n)
collection[nc] = n
end
end
@@ -375,21 +481,30 @@ local threshold = 65536
local function toutf(list,result,nofresult,stopcriterium)
if list then
- for n in traverse_nodes(list) do
- local id = n.id
+ for n in traverse_nodes(tonut(list)) do
+ local id = getid(n)
if id == glyph_code then
- local components = n.components
+ local components = getfield(n,"components")
if components then
result, nofresult = toutf(components,result,nofresult)
else
- local c = n.char
- local fc = fontcharacters[n.font]
+ local c = getchar(n)
+ local fc = fontcharacters[getfont(n)]
if fc then
- local u = fc[c].tounicode
- if u then
- for s in gmatch(u,"....") do
+ local fcc = fc[c]
+ if fcc then
+ local u = fcc.unicode
+ if not u then
+ nofresult = nofresult + 1
+ result[nofresult] = utfchar(c)
+ elseif type(u) == "table" then
+ for i=1,#u do
+ nofresult = nofresult + 1
+ result[nofresult] = utfchar(u[i])
+ end
+ else
nofresult = nofresult + 1
- result[nofresult] = utfchar(tonumber(s,16))
+ result[nofresult] = utfchar(u)
end
else
nofresult = nofresult + 1
@@ -397,23 +512,23 @@ local function toutf(list,result,nofresult,stopcriterium)
end
else
nofresult = nofresult + 1
- result[nofresult] = utfchar(c)
+ result[nofresult] = f_unicode(c)
end
end
elseif id == disc_code then
- result, nofresult = toutf(n.replace,result,nofresult) -- needed?
+ result, nofresult = toutf(getfield(n,"replace"),result,nofresult) -- needed?
elseif id == hlist_code or id == vlist_code then
-- if nofresult > 0 and result[nofresult] ~= " " then
-- nofresult = nofresult + 1
-- result[nofresult] = " "
-- end
- result, nofresult = toutf(n.list,result,nofresult)
+ result, nofresult = toutf(getlist(n),result,nofresult)
elseif id == glue_code then
if nofresult > 0 and result[nofresult] ~= " " then
nofresult = nofresult + 1
result[nofresult] = " "
end
- elseif id == kern_code and n.kern > threshold then
+ elseif id == kern_code and getfield(n,"kern") > threshold then
if nofresult > 0 and result[nofresult] ~= " " then
nofresult = nofresult + 1
result[nofresult] = " "
diff --git a/tex/context/base/font-odv.lua b/tex/context/base/font-odv.lua
index 69f74dfa5..2ef1aabe7 100644
--- a/tex/context/base/font-odv.lua
+++ b/tex/context/base/font-odv.lua
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['font-odv'] = {
license = "see context related readme files"
}
+-- One day I'll speed this up ... char swapping and properties.
+
-- A few remarks:
--
-- This code is a partial rewrite of the code that deals with devanagari. The data and logic
@@ -15,6 +17,9 @@ if not modules then modules = { } end modules ['font-odv'] = {
-- deva: http://www.microsoft.com/typography/OpenType%20Dev/devanagari/introO.mspx
-- dev2: http://www.microsoft.com/typography/OpenType%20Dev/devanagari/intro.mspx
--
+-- Rajeesh Nambiar provided patches for the malayalam variant. Thansk to feedback from
+-- the mailing list some aspects could be improved.
+--
-- As I touched nearly all code, reshuffled it, optimized a lot, etc. etc. (imagine how
-- much can get messed up in over a week work) it could be that I introduced bugs. There
-- is more to gain (esp in the functions applied to a range) but I'll do that when
@@ -47,7 +52,10 @@ if not modules then modules = { } end modules ['font-odv'] = {
-- Some data will move to char-def.lua (some day).
--
-- Hans Hagen, PRAGMA-ADE, Hasselt NL
-
+--
+-- We could have c_nukta, c_halant, c_ra is we know that they are never used mixed within
+-- one script .. yes or no?
+--
-- Matras: according to Microsoft typography specifications "up to one of each type:
-- pre-, above-, below- or post- base", but that does not seem to be right. It could
-- become an option.
@@ -57,9 +65,9 @@ if not modules then modules = { } end modules ['font-odv'] = {
--
-- local function ms_matra(c)
-- local prebase, abovebase, belowbase, postbase = true, true, true, true
--- local n = c.next
--- while n and n.id == glyph_code and n.subtype < 256 and n.font == font do
--- local char = n.char
+-- local n = getnext(c)
+-- while n and getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font do
+-- local char = getchar(n)
-- if not dependent_vowel[char] then
-- break
-- elseif pre_mark[char] and prebase then
@@ -73,7 +81,7 @@ if not modules then modules = { } end modules ['font-odv'] = {
-- else
-- return c
-- end
--- c = c.next
+-- c = getnext(c)
-- end
-- return c
-- end
@@ -100,11 +108,28 @@ local methods = fonts.analyzers.methods
local otffeatures = fonts.constructors.newfeatures("otf")
local registerotffeature = otffeatures.register
-local insert_node_after = nodes.insert_after
-local copy_node = nodes.copy
-local free_node = nodes.free
-local remove_node = nodes.remove
-local flush_list = nodes.flush_list
+local nuts = nodes.nuts
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getchar = nuts.getchar
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getprop = nuts.getprop
+local setprop = nuts.setprop
+
+local insert_node_after = nuts.insert_after
+local copy_node = nuts.copy
+local free_node = nuts.free
+local remove_node = nuts.remove
+local flush_list = nuts.flush_list
+
+local copyinjection = nodes.injections.copy -- KE: is this necessary? HH: probably not as positioning comes later and we rawget/set
local unsetvalue = attributes.unsetvalue
@@ -132,18 +157,26 @@ replace_all_nbsp = function(head) -- delayed definition
return replace_all_nbsp(head)
end
-local fontprocesses = fonts.hashes.processes
local xprocesscharacters = nil
-xprocesscharacters = function(head,font)
- xprocesscharacters = nodes.handlers.characters
- return xprocesscharacters(head,font)
+if context then
+ xprocesscharacters = function(head,font)
+ xprocesscharacters = nodes.handlers.characters
+ return xprocesscharacters(head,font)
+ end
+else
+ xprocesscharacters = function(head,font)
+ xprocesscharacters = nodes.handlers.nodepass -- generic
+ return xprocesscharacters(head,font)
+ end
end
local function processcharacters(head,font)
- return xprocesscharacters(head)
+ return tonut(xprocesscharacters(tonode(head)))
end
+-- local fontprocesses = fonts.hashes.processes
+--
-- function processcharacters(head,font)
-- local processors = fontprocesses[font]
-- for i=1,#processors do
@@ -156,6 +189,29 @@ end
-- Gurmukhi, Kannada, Malayalam, Oriya, Tamil, Telugu. Feel free to provide the
-- code points.
+-- We can assume that script are not mixed in the source but if that is the case
+-- we might need to have consonants etc per script and initialize a local table
+-- pointing to the right one.
+
+-- new, to be checked:
+--
+-- U+00978 : DEVANAGARI LETTER MARWARI DDA
+-- U+00980 : BENGALI ANJI
+-- U+00C00 : TELUGU SIGN COMBINING CANDRABINDU ABOVE
+-- U+00C34 : TELUGU LETTER LLLA
+-- U+00C81 : KANNADA SIGN CANDRABINDU
+-- U+00D01 : MALAYALAM SIGN CANDRABINDU
+-- U+00DE6 : SINHALA LITH DIGIT ZERO
+-- U+00DE7 : SINHALA LITH DIGIT ONE
+-- U+00DE8 : SINHALA LITH DIGIT TWO
+-- U+00DE9 : SINHALA LITH DIGIT THREE
+-- U+00DEA : SINHALA LITH DIGIT FOUR
+-- U+00DEB : SINHALA LITH DIGIT FIVE
+-- U+00DEC : SINHALA LITH DIGIT SIX
+-- U+00DED : SINHALA LITH DIGIT SEVEN
+-- U+00DEE : SINHALA LITH DIGIT EIGHT
+-- U+00DEF : SINHALA LITH DIGIT NINE
+
local consonant = {
-- devanagari
[0x0915] = true, [0x0916] = true, [0x0917] = true, [0x0918] = true,
@@ -182,6 +238,17 @@ local consonant = {
[0x0CB5] = true, [0x0CB6] = true, [0x0CB7] = true, [0x0CB8] = true,
[0x0CB9] = true,
[0x0CDE] = true, -- obsolete
+ -- malayalam
+ [0x0D15] = true, [0x0D16] = true, [0x0D17] = true, [0x0D18] = true,
+ [0x0D19] = true, [0x0D1A] = true, [0x0D1B] = true, [0x0D1C] = true,
+ [0x0D1D] = true, [0x0D1E] = true, [0x0D1F] = true, [0x0D20] = true,
+ [0x0D21] = true, [0x0D22] = true, [0x0D23] = true, [0x0D24] = true,
+ [0x0D25] = true, [0x0D26] = true, [0x0D27] = true, [0x0D28] = true,
+ [0x0D29] = true, [0x0D2A] = true, [0x0D2B] = true, [0x0D2C] = true,
+ [0x0D2D] = true, [0x0D2E] = true, [0x0D2F] = true, [0x0D30] = true,
+ [0x0D31] = true, [0x0D32] = true, [0x0D33] = true, [0x0D34] = true,
+ [0x0D35] = true, [0x0D36] = true, [0x0D37] = true, [0x0D38] = true,
+ [0x0D39] = true, [0x0D3A] = true,
}
local independent_vowel = {
@@ -198,6 +265,11 @@ local independent_vowel = {
[0x0C89] = true, [0x0C8A] = true, [0x0C8B] = true, [0x0C8C] = true,
[0x0C8D] = true, [0x0C8E] = true, [0x0C8F] = true, [0x0C90] = true,
[0x0C91] = true, [0x0C92] = true, [0x0C93] = true, [0x0C94] = true,
+ -- malayalam
+ [0x0D05] = true, [0x0D06] = true, [0x0D07] = true, [0x0D08] = true,
+ [0x0D09] = true, [0x0D0A] = true, [0x0D0B] = true, [0x0D0C] = true,
+ [0x0D0E] = true, [0x0D0F] = true, [0x0D10] = true, [0x0D12] = true,
+ [0x0D13] = true, [0x0D14] = true,
}
local dependent_vowel = { -- matra
@@ -213,6 +285,11 @@ local dependent_vowel = { -- matra
[0x0CC2] = true, [0x0CC3] = true, [0x0CC4] = true, [0x0CC5] = true,
[0x0CC6] = true, [0x0CC7] = true, [0x0CC8] = true, [0x0CC9] = true,
[0x0CCA] = true, [0x0CCB] = true, [0x0CCC] = true,
+ -- malayalam
+ [0x0D3E] = true, [0x0D3F] = true, [0x0D40] = true, [0x0D41] = true,
+ [0x0D42] = true, [0x0D43] = true, [0x0D44] = true, [0x0D46] = true,
+ [0x0D47] = true, [0x0D48] = true, [0x0D4A] = true, [0x0D4B] = true,
+ [0x0D4C] = true, [0x0D57] = true,
}
local vowel_modifier = {
@@ -224,10 +301,16 @@ local vowel_modifier = {
[0xA8E8] = true, [0xA8E9] = true, [0xA8EA] = true, [0xA8EB] = true,
[0xA8EC] = true, [0xA8ED] = true, [0xA8EE] = true, [0xA8EF] = true,
[0xA8F0] = true, [0xA8F1] = true,
+ -- malayalam
+ [0x0D02] = true, [0x0D03] = true,
}
local stress_tone_mark = {
[0x0951] = true, [0x0952] = true, [0x0953] = true, [0x0954] = true,
+ -- kannada
+ [0x0CCD] = true,
+ -- malayalam
+ [0x0D4D] = true,
}
local nukta = {
@@ -242,9 +325,19 @@ local halant = {
[0x094D] = true,
-- kannada
[0x0CCD] = true,
+ -- malayalam
+ [0x0D4D] = true,
+}
+
+local ra = {
+ -- devanagari
+ [0x0930] = true,
+ -- kannada
+ [0x0CB0] = true,
+ -- malayalam
+ [0x0D30] = true,
}
-local c_ra = 0x0930 -- used to be tables (also used as constant)
local c_anudatta = 0x0952 -- used to be tables
local c_nbsp = 0x00A0 -- used to be tables
local c_zwnj = 0x200C -- used to be tables
@@ -270,6 +363,8 @@ local zw_char = { -- could also be inlined
local pre_mark = {
[0x093F] = true, [0x094E] = true,
+ -- malayalam
+ [0x0D46] = true, [0x0D47] = true, [0x0D48] = true,
}
local above_mark = {
@@ -281,6 +376,8 @@ local above_mark = {
[0xA8E8] = true, [0xA8E9] = true, [0xA8EA] = true, [0xA8EB] = true,
[0xA8EC] = true, [0xA8ED] = true, [0xA8EE] = true, [0xA8EF] = true,
[0xA8F0] = true, [0xA8F1] = true,
+ -- malayalam
+ [0x0D4E] = true,
}
local below_mark = {
@@ -295,6 +392,13 @@ local post_mark = {
[0x094F] = true,
}
+local twopart_mark = {
+ -- malayalam
+ [0x0D4A] = { 0x0D46, 0x0D3E, }, -- ൊ
+ [0x0D4B] = { 0x0D47, 0x0D3E, }, -- ോ
+ [0x0D4C] = { 0x0D46, 0x0D57, }, -- ൌ
+}
+
local mark_four = { } -- As we access these frequently an extra hash is used.
for k, v in next, pre_mark do mark_four[k] = pre_mark end
@@ -346,6 +450,7 @@ local reorder_class = {
[0x0CC4] = "after subscript",
[0x0CD5] = "after subscript",
[0x0CD6] = "after subscript",
+ -- malayalam
}
-- We use some pseudo features as we need to manipulate the nodelist based
@@ -376,6 +481,7 @@ local sequence_reorder_matras = {
features = { dv01 = dev2_defaults },
flags = false_flags,
name = "dv01_reorder_matras",
+ order = { "dv01" },
subtables = { "dv01_reorder_matras" },
type = "devanagari_reorder_matras",
}
@@ -385,6 +491,7 @@ local sequence_reorder_reph = {
features = { dv02 = dev2_defaults },
flags = false_flags,
name = "dv02_reorder_reph",
+ order = { "dv02" },
subtables = { "dv02_reorder_reph" },
type = "devanagari_reorder_reph",
}
@@ -394,6 +501,7 @@ local sequence_reorder_pre_base_reordering_consonants = {
features = { dv03 = dev2_defaults },
flags = false_flags,
name = "dv03_reorder_pre_base_reordering_consonants",
+ order = { "dv03" },
subtables = { "dv03_reorder_pre_base_reordering_consonants" },
type = "devanagari_reorder_pre_base_reordering_consonants",
}
@@ -403,6 +511,7 @@ local sequence_remove_joiners = {
features = { dv04 = deva_defaults },
flags = false_flags,
name = "dv04_remove_joiners",
+ order = { "dv04" },
subtables = { "dv04_remove_joiners" },
type = "devanagari_remove_joiners",
}
@@ -426,7 +535,7 @@ local basic_shaping_forms = {
local function initializedevanagi(tfmdata)
local script, language = otf.scriptandlanguage(tfmdata,attr) -- todo: take fast variant
- if script == "deva" or script == "dev2" then
+ if script == "deva" or script == "dev2" or script =="mlym" or script == "mlm2" then
local resources = tfmdata.resources
local lookuphash = resources.lookuphash
if not lookuphash["dv01"] then
@@ -464,15 +573,20 @@ local function initializedevanagi(tfmdata)
--
if script == "deva" then
sharedfeatures["dv04"] = true -- dv04_remove_joiners
- end
- --
- if script == "dev2" then
+ elseif script == "dev2" then
sharedfeatures["dv01"] = true -- dv01_reorder_matras
sharedfeatures["dv02"] = true -- dv02_reorder_reph
sharedfeatures["dv03"] = true -- dv03_reorder_pre_base_reordering_consonants
sharedfeatures["dv04"] = true -- dv04_remove_joiners
+ elseif script == "mlym" then
+ sharedfeatures["pstf"] = true
+ elseif script == "mlm2" then
+ sharedfeatures["pstf"] = true
+ sharedfeatures["pref"] = true
+ sharedfeatures["dv03"] = true -- dv03_reorder_pre_base_reordering_consonants
+ gsubfeatures["dv03"] = dev2_defaults -- reorder pre base reordering consonants
+ insert(sequences,insertindex,sequence_reorder_pre_base_reordering_consonants)
end
- --
end
end
end
@@ -554,30 +668,30 @@ local function deva_reorder(head,start,stop,font,attr,nbspaces)
local lookuphash, reph, vattu, blwfcache = deva_initialize(font,attr) -- could be inlines but ugly
local current = start
- local n = start.next
+ local n = getnext(start)
local base = nil
local firstcons = nil
local lastcons = nil
local basefound = false
- if start.char == c_ra and halant[n.char] and reph then
+ if ra[getchar(start)] and halant[getchar(n)] and reph then
-- if syllable starts with Ra + H and script has 'Reph' then exclude Reph
-- from candidates for base consonants
if n == stop then
return head, stop, nbspaces
end
- if n.next.char == c_zwj then
+ if getchar(getnext(n)) == c_zwj then
current = start
else
- current = n.next
- start[a_state] = s_rphf
+ current = getnext(n)
+ setprop(start,a_state,s_rphf)
end
end
- if current.char == c_nbsp then
+ if getchar(current) == c_nbsp then
-- Stand Alone cluster
if current == stop then
- stop = stop.prev
+ stop = getprev(stop)
head = remove_node(head,current)
free_node(current)
return head, stop, nbspaces
@@ -586,37 +700,40 @@ local function deva_reorder(head,start,stop,font,attr,nbspaces)
base = current
firstcons = current
lastcons = current
- current = current.next
+ current = getnext(current)
if current ~= stop then
- if nukta[current.char] then
- current = current.next
+ if nukta[getchar(current)] then
+ current = getnext(current)
end
- if current.char == c_zwj then
+ if getchar(current) == c_zwj then
if current ~= stop then
- local next = current.next
- if next ~= stop and halant[next.char] then
+ local next = getnext(current)
+ if next ~= stop and halant[getchar(next)] then
current = next
- next = current.next
- local tmp = next and next.next or nil -- needs checking
+ next = getnext(current)
+ local tmp = next and getnext(next) or nil -- needs checking
local changestop = next == stop
local tempcurrent = copy_node(next)
+ copyinjection(tempcurrent,next)
local nextcurrent = copy_node(current)
- tempcurrent.next = nextcurrent
- nextcurrent.prev = tempcurrent
- tempcurrent[a_state] = s_blwf
+ copyinjection(nextcurrent,current) -- KE: necessary? HH: probably not as positioning comes later and we rawget/set
+ setfield(tempcurrent,"next",nextcurrent)
+ setfield(nextcurrent,"prev",tempcurrent)
+ setprop(tempcurrent,a_state,s_blwf)
tempcurrent = processcharacters(tempcurrent,font)
- tempcurrent[a_state] = unsetvalue
- if next.char == tempcurrent.char then
+ setprop(tempcurrent,a_state,unsetvalue)
+ if getchar(next) == getchar(tempcurrent) then
flush_list(tempcurrent)
local n = copy_node(current)
- current.char = dotted_circle
+ copyinjection(n,current) -- KE: necessary? HH: probably not as positioning comes later and we rawget/set
+ setfield(current,"char",dotted_circle)
head = insert_node_after(head, current, n)
else
- current.char = tempcurrent.char -- (assumes that result of blwf consists of one node)
- local freenode = current.next
- current.next = tmp
- if tmp then
- tmp.prev = current
+ setfield(current,"char",getchar(tempcurrent)) -- we assumes that the result of blwf consists of one node
+ local freenode = getnext(current)
+ setfield(current,"next",tmp)
+ if tmp then
+ setfield(tmp,"prev",current)
end
free_node(freenode)
flush_list(tempcurrent)
@@ -633,83 +750,82 @@ local function deva_reorder(head,start,stop,font,attr,nbspaces)
while not basefound do
-- find base consonant
- if consonant[current.char] then
- current[a_state] = s_half
+ if consonant[getchar(current)] then
+ setprop(current,a_state,s_half)
if not firstcons then
firstcons = current
end
lastcons = current
if not base then
base = current
- elseif blwfcache[current.char] then
+ elseif blwfcache[getchar(current)] then
-- consonant has below-base (or post-base) form
- current[a_state] = s_blwf
+ setprop(current,a_state,s_blwf)
else
base = current
end
end
basefound = current == stop
- current = current.next
+ current = getnext(current)
end
if base ~= lastcons then
-- if base consonant is not last one then move halant from base consonant to last one
local np = base
- local n = base.next
- if nukta[n.char] then
+ local n = getnext(base)
+ if nukta[getchar(n)] then
np = n
- n = n.next
+ n = getnext(n)
end
- if halant[n.char] then
+ if halant[getchar(n)] then
if lastcons ~= stop then
- local ln = lastcons.next
- if nukta[ln.char] then
+ local ln = getnext(lastcons)
+ if nukta[getchar(ln)] then
lastcons = ln
end
end
- -- local np = n.prev
- local nn = n.next
- local ln = lastcons.next -- what if lastcons is nn ?
- np.next = nn
- nn.prev = np
- lastcons.next = n
+ -- local np = getprev(n)
+ local nn = getnext(n)
+ local ln = getnext(lastcons) -- what if lastcons is nn ?
+ setfield(np,"next",nn)
+ setfield(nn,"prev",np)
+ setfield(lastcons,"next",n)
if ln then
- ln.prev = n
+ setfield(ln,"prev",n)
end
- n.next = ln
- n.prev = lastcons
+ setfield(n,"next",ln)
+ setfield(n,"prev",lastcons)
if lastcons == stop then
stop = n
end
end
end
- n = start.next
- -- if start.char == c_ra and halant[n.char] and not (n ~= stop and zw_char[n.next.char]) then
- if n ~= stop and start.char == c_ra and halant[n.char] and not zw_char[n.next.char] then
+ n = getnext(start)
+ if n ~= stop and ra[getchar(start)] and halant[getchar(n)] and not zw_char[getchar(getnext(n))] then
-- if syllable starts with Ra + H then move this combination so that it follows either:
-- the post-base 'matra' (if any) or the base consonant
local matra = base
if base ~= stop then
- local next = base.next
- if dependent_vowel[next.char] then
+ local next = getnext(base)
+ if dependent_vowel[getchar(next)] then
matra = next
end
end
-- [sp][start][n][nn] [matra|base][?]
-- [matra|base][start] [n][?] [sp][nn]
- local sp = start.prev
- local nn = n.next
- local mn = matra.next
+ local sp = getprev(start)
+ local nn = getnext(n)
+ local mn = getnext(matra)
if sp then
- sp.next = nn
+ setfield(sp,"next",nn)
end
- nn.prev = sp
- matra.next = start
- start.prev = matra
- n.next = mn
+ setfield(nn,"prev",sp)
+ setfield(matra,"next",start)
+ setfield(start,"prev",matra)
+ setfield(n,"next",mn)
if mn then
- mn.prev = n
+ setfield(mn,"prev",n)
end
if head == start then
head = nn
@@ -722,17 +838,17 @@ local function deva_reorder(head,start,stop,font,attr,nbspaces)
local current = start
while current ~= stop do
- local next = current.next
- if next ~= stop and halant[next.char] and next.next.char == c_zwnj then
- current[a_state] = unsetvalue
+ local next = getnext(current)
+ if next ~= stop and halant[getchar(next)] and getchar(getnext(next)) == c_zwnj then
+ setprop(current,a_state,unsetvalue)
end
current = next
end
- if base ~= stop and base[a_state] then
- local next = base.next
- if halant[next.char] and not (next ~= stop and next.next.char == c_zwj) then
- base[a_state] = unsetvalue
+ if base ~= stop and getprop(base,a_state) then
+ local next = getnext(base)
+ if halant[getchar(next)] and not (next ~= stop and getchar(getnext(next)) == c_zwj) then
+ setprop(base,a_state,unsetvalue)
end
end
@@ -742,62 +858,62 @@ local function deva_reorder(head,start,stop,font,attr,nbspaces)
-- classify consonants and 'matra' parts as pre-base, above-base (Reph), below-base or post-base, and group elements of the syllable (consonants and 'matras') according to this classification
local current, allreordered, moved = start, false, { [base] = true }
- local a, b, p, bn = base, base, base, base.next
- if base ~= stop and nukta[bn.char] then
+ local a, b, p, bn = base, base, base, getnext(base)
+ if base ~= stop and nukta[getchar(bn)] then
a, b, p = bn, bn, bn
end
while not allreordered do
-- current is always consonant
local c = current
- local n = current.next
+ local n = getnext(current)
local l = nil -- used ?
if c ~= stop then
- if nukta[n.char] then
+ if nukta[getchar(n)] then
c = n
- n = n.next
+ n = getnext(n)
end
if c ~= stop then
- if halant[n.char] then
+ if halant[getchar(n)] then
c = n
- n = n.next
+ n = getnext(n)
end
- while c ~= stop and dependent_vowel[n.char] do
+ while c ~= stop and dependent_vowel[getchar(n)] do
c = n
- n = n.next
+ n = getnext(n)
end
if c ~= stop then
- if vowel_modifier[n.char] then
+ if vowel_modifier[getchar(n)] then
c = n
- n = n.next
+ n = getnext(n)
end
- if c ~= stop and stress_tone_mark[n.char] then
+ if c ~= stop and stress_tone_mark[getchar(n)] then
c = n
- n = n.next
+ n = getnext(n)
end
end
end
end
- local bp = firstcons.prev
- local cn = current.next
- local last = c.next
+ local bp = getprev(firstcons)
+ local cn = getnext(current)
+ local last = getnext(c)
while cn ~= last do
-- move pre-base matras...
- if pre_mark[cn.char] then
+ if pre_mark[getchar(cn)] then
if bp then
- bp.next = cn
+ setfield(bp,"next",cn)
end
- local next = cn.next
- local prev = cn.prev
+ local next = getnext(cn)
+ local prev = getprev(cn)
if next then
- next.prev = prev
+ setfield(next,"prev",prev)
end
- prev.next = next
+ setfield(prev,"next",next)
if cn == stop then
stop = prev
end
- cn.prev = bp
- cn.next = firstcons
- firstcons.prev = cn
+ setfield(cn,"prev",bp)
+ setfield(cn,"next",firstcons)
+ setfield(firstcons,"prev",cn)
if firstcons == start then
if head == start then
head = cn
@@ -806,29 +922,29 @@ local function deva_reorder(head,start,stop,font,attr,nbspaces)
end
break
end
- cn = cn.next
+ cn = getnext(cn)
end
allreordered = c == stop
- current = c.next
+ current = getnext(c)
end
if reph or vattu then
local current, cns = start, nil
while current ~= stop do
local c = current
- local n = current.next
- if current.char == c_ra and halant[n.char] then
+ local n = getnext(current)
+ if ra[getchar(current)] and halant[getchar(n)] then
c = n
- n = n.next
+ n = getnext(n)
local b, bn = base, base
while bn ~= stop do
- local next = bn.next
- if dependent_vowel[next.char] then
+ local next = getnext(bn)
+ if dependent_vowel[getchar(next)] then
b = next
end
bn = next
end
- if current[a_state] == s_rphf then
+ if getprop(current,a_state) == s_rphf then
-- position Reph (Ra + H) after post-base 'matra' (if any) since these
-- become marks on the 'matra', not on the base glyph
if b ~= current then
@@ -841,65 +957,65 @@ local function deva_reorder(head,start,stop,font,attr,nbspaces)
if b == stop then
stop = c
end
- local prev = current.prev
+ local prev = getprev(current)
if prev then
- prev.next = n
+ setfield(prev,"next",n)
end
if n then
- n.prev = prev
+ setfield(n,"prev",prev)
end
- local next = b.next
- c.next = next
+ local next = getnext(b)
+ setfield(c,"next",next)
if next then
- next.prev = c
+ setfield(next,"prev",c)
end
- c.next = next
- b.next = current
- current.prev = b
+ setfield(c,"next",next)
+ setfield(b,"next",current)
+ setfield(current,"prev",b)
end
- elseif cns and cns.next ~= current then
+ elseif cns and getnext(cns) ~= current then -- todo: optimize next
-- position below-base Ra (vattu) following the consonants on which it is placed (either the base consonant or one of the pre-base consonants)
- local cp, cnsn = current.prev, cns.next
+ local cp, cnsn = getprev(current), getnext(cns)
if cp then
- cp.next = n
+ setfield(cp,"next",n)
end
if n then
- n.prev = cp
+ setfield(n,"prev",cp)
end
- cns.next = current
- current.prev = cns
- c.next = cnsn
+ setfield(cns,"next",current)
+ setfield(current,"prev",cns)
+ setfield(c,"next",cnsn)
if cnsn then
- cnsn.prev = c
+ setfield(cnsn,"prev",c)
end
if c == stop then
stop = cp
break
end
- current = n.prev
+ current = getprev(n)
end
else
- local char = current.char
+ local char = getchar(current)
if consonant[char] then
cns = current
- local next = cns.next
- if halant[next.char] then
+ local next = getnext(cns)
+ if halant[getchar(next)] then
cns = next
end
elseif char == c_nbsp then
nbspaces = nbspaces + 1
cns = current
- local next = cns.next
- if halant[next.char] then
+ local next = getnext(cns)
+ if halant[getchar(next)] then
cns = next
end
end
end
- current = current.next
+ current = getnext(current)
end
end
- if base.char == c_nbsp then
+ if getchar(base) == c_nbsp then
nbspaces = nbspaces - 1
head = remove_node(head,base)
free_node(base)
@@ -919,24 +1035,24 @@ end
function handlers.devanagari_reorder_matras(head,start,kind,lookupname,replacement) -- no leak
local current = start -- we could cache attributes here
- local startfont = start.font
- local startattr = start[a_syllabe]
+ local startfont = getfont(start)
+ local startattr = getprop(start,a_syllabe)
-- can be fast loop
- while current and current.id == glyph_code and current.subtype<256 and current.font == font and current[a_syllabe] == startattr do
- local next = current.next
- if halant[current.char] and not current[a_state] then
- if next and next.id == glyph_code and next.subtype<256 and next.font == font and next[a_syllabe] == startattr and zw_char[next.char] then
+ while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font and getprop(current,a_syllabe) == startattr do
+ local next = getnext(current)
+ if halant[getchar(current)] and not getprop(current,a_state) then
+ if next and getid(next) == glyph_code and getsubtype(next) < 256 and getfont(next) == font and getprop(next,a_syllabe) == startattr and zw_char[getchar(next)] then
current = next
end
- local startnext = start.next
+ local startnext = getnext(start)
head = remove_node(head,start)
- local next = current.next
+ local next = getnext(current)
if next then
- next.prev = start
+ setfield(next,"prev",start)
end
- start.next = next
- current.next = start
- start.prev = current
+ setfield(start,"next",next)
+ setfield(current,"next",start)
+ setfield(start,"prev",current)
start = startnext
break
end
@@ -972,98 +1088,98 @@ end
function handlers.devanagari_reorder_reph(head,start,kind,lookupname,replacement)
-- since in Devanagari reph has reordering position 'before postscript' dev2 only follows step 2, 4, and 6,
-- the other steps are still ToDo (required for scripts other than dev2)
- local current = start.next
+ local current = getnext(start)
local startnext = nil
local startprev = nil
- local startfont = start.font
- local startattr = start[a_syllabe]
- while current and current.id == glyph_code and current.subtype<256 and current.font == startfont and current[a_syllabe] == startattr do --step 2
- if halant[current.char] and not current[a_state] then
- local next = current.next
- if next and next.id == glyph_code and next.subtype<256 and next.font == startfont and next[a_syllabe] == startattr and zw_char[next.char] then
+ local startfont = getfont(start)
+ local startattr = getprop(start,a_syllabe)
+ while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == startfont and getprop(current,a_syllabe) == startattr do --step 2
+ if halant[getchar(current)] and not getprop(current,a_state) then
+ local next = getnext(current)
+ if next and getid(next) == glyph_code and getsubtype(next) < 256 and getfont(next) == startfont and getprop(next,a_syllabe) == startattr and zw_char[getchar(next)] then
current = next
end
- startnext = start.next
+ startnext = getnext(start)
head = remove_node(head,start)
- local next = current.next
+ local next = getnext(current)
if next then
- next.prev = start
+ setfield(next,"prev",start)
end
- start.next = next
- current.next = start
- start.prev = current
+ setfield(start,"next",next)
+ setfield(current,"next",start)
+ setfield(start,"prev",current)
start = startnext
- startattr = start[a_syllabe]
+ startattr = getprop(start,a_syllabe)
break
end
- current = current.next
+ current = getnext(current)
end
if not startnext then
- current = start.next
- while current and current.id == glyph_code and current.subtype<256 and current.font == startfont and current[a_syllabe] == startattr do --step 4
- if current[a_state] == s_pstf then --post-base
- startnext = start.next
+ current = getnext(start)
+ while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == startfont and getprop(current,a_syllabe) == startattr do --step 4
+ if getprop(current,a_state) == s_pstf then --post-base
+ startnext = getnext(start)
head = remove_node(head,start)
- local prev = current.prev
- start.prev = prev
- prev.next = start
- start.next = current
- current.prev = start
+ local prev = getprev(current)
+ setfield(start,"prev",prev)
+ setfield(prev,"next",start)
+ setfield(start,"next",current)
+ setfield(current,"prev",start)
start = startnext
- startattr = start[a_syllabe]
+ startattr = getprop(start,a_syllabe)
break
end
- current = current.next
+ current = getnext(current)
end
end
-- ToDo: determine position for reph with reordering position other than 'before postscript'
-- (required for scripts other than dev2)
-- leaks
if not startnext then
- current = start.next
+ current = getnext(start)
local c = nil
- while current and current.id == glyph_code and current.subtype<256 and current.font == startfont and current[a_syllabe] == startattr do --step 5
+ while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == startfont and getprop(current,a_syllabe) == startattr do --step 5
if not c then
- local char = current.char
+ local char = getchar(current)
-- todo: combine in one
if mark_above_below_post[char] and reorder_class[char] ~= "after subscript" then
c = current
end
end
- current = current.next
+ current = getnext(current)
end
-- here we can loose the old start node: maybe best split cases
if c then
- startnext = start.next
+ startnext = getnext(start)
head = remove_node(head,start)
- local prev = c.prev
- start.prev = prev
- prev.next = start
- start.next = c
- c.prev = start
+ local prev = getprev(c)
+ setfield(start,"prev",prev)
+ setfield(prev,"next",start)
+ setfield(start,"next",c)
+ setfield(c,"prev",start)
-- end
start = startnext
- startattr = start[a_syllabe]
+ startattr = getprop(start,a_syllabe)
end
end
-- leaks
if not startnext then
current = start
- local next = current.next
- while next and next.id == glyph_code and next.subtype<256 and next.font == startfont and next[a_syllabe] == startattr do --step 6
+ local next = getnext(current)
+ while next and getid(next) == glyph_code and getsubtype(next) < 256 and getfont(next) == startfont and getprop(next,a_syllabe) == startattr do --step 6
current = next
- next = current.next
+ next = getnext(current)
end
if start ~= current then
- startnext = start.next
+ startnext = getnext(start)
head = remove_node(head,start)
- local next = current.next
+ local next = getnext(current)
if next then
- next.prev = start
+ setfield(next,"prev",start)
end
- start.next = next
- current.next = start
- start.prev = current
+ setfield(start,"next",next)
+ setfield(current,"next",start)
+ setfield(start,"prev",current)
start = startnext
end
end
@@ -1086,71 +1202,71 @@ function handlers.devanagari_reorder_pre_base_reordering_consonants(head,start,k
local current = start
local startnext = nil
local startprev = nil
- local startfont = start.font
- local startattr = start[a_syllabe]
+ local startfont = getfont(start)
+ local startattr = getprop(start,a_syllabe)
-- can be fast for loop + caching state
- while current and current.id == glyph_code and current.subtype<256 and current.font == startfont and current[a_syllabe] == startattr do
- local next = current.next
- if halant[current.char] and not current[a_state] then
- if next and next.id == glyph_code and next.subtype<256 and next.font == font and next[a_syllabe] == startattr then
- local char = next.char
+ while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == startfont and getprop(current,a_syllabe) == startattr do
+ local next = getnext(current)
+ if halant[getchar(current)] and not getprop(current,a_state) then
+ if next and getid(next) == glyph_code and getsubtype(next) < 256 and getfont(next) == font and getprop(next,a_syllabe) == startattr then
+ local char = getchar(next)
if char == c_zwnj or char == c_zwj then
current = next
end
end
- startnext = start.next
+ startnext = getnext(start)
removenode(start,start)
- local next = current.next
+ local next = getnext(current)
if next then
- next.prev = start
+ setfield(next,"prev",start)
end
- start.next = next
- current.next = start
- start.prev = current
+ setfield(start,"next",next)
+ setfield(current,"next",start)
+ setfield(start,"prev",current)
start = startnext
break
end
current = next
end
if not startnext then
- current = start.next
- startattr = start[a_syllabe]
- while current and current.id == glyph_code and current.subtype<256 and current.font == startfont and current[a_syllabe] == startattr do
- if not consonant[current.char] and current[a_state] then --main
- startnext = start.next
+ current = getnext(start)
+ startattr = getprop(start,a_syllabe)
+ while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == startfont and getprop(current,a_syllabe) == startattr do
+ if not consonant[getchar(current)] and getprop(current,a_state) then --main
+ startnext = getnext(start)
removenode(start,start)
- local prev = current.prev
- start.prev = prev
- prev.next = start
- start.next = current
- current.prev = start
+ local prev = getprev(current)
+ setfield(start,"prev",prev)
+ setfield(prev,"next",start)
+ setfield(start,"next",current)
+ setfield(current,"prev",start)
start = startnext
break
end
- current = current.next
+ current = getnext(current)
end
end
return head, start, true
end
function handlers.devanagari_remove_joiners(head,start,kind,lookupname,replacement)
- local stop = start.next
- local startfont = start.font
- while stop and stop.id == glyph_code and stop.subtype<256 and stop.font == startfont do
- local char = stop.char
+ local stop = getnext(start)
+ local startfont = getfont(start)
+ while stop and getid(stop) == glyph_code and getsubtype(stop) < 256 and getfont(stop) == startfont do
+ local char = getchar(stop)
if char == c_zwnj or char == c_zwj then
- stop = stop.next
+ stop = getnext(stop)
else
break
end
end
if stop then
- stop.prev.next = nil
- stop.prev = start.prev
+ setfield(getfield(stop,"prev"),"next",nil)
+ setfield(stop,"prev",getprev(start))
end
- local prev = start.prev
+ local prev = getprev(start)
if prev then
- prev.next = stop
+ setfield(prev,"next",stop)
end
if head == start then
head = stop
@@ -1160,11 +1276,15 @@ function handlers.devanagari_remove_joiners(head,start,kind,lookupname,replaceme
end
local valid = {
+ akhn = true, -- malayalam
rphf = true,
pref = true,
half = true,
blwf = true,
pstf = true,
+ pres = true, -- malayalam
+ blws = true, -- malayalam
+ psts = true, -- malayalam
}
local function dev2_initialize(font,attr)
@@ -1206,16 +1326,25 @@ local function dev2_initialize(font,attr)
local reph = false
local chain = dataset[3]
if chain ~= 0 then --rphf is result of of chain
- --ToDo: rphf might be result of other handler/chainproc
+ -- rphf might be result of other handler/chainproc
else
- reph = lookupcache[0x0930]
- if reph then
- reph = reph[0x094D]
- if reph then
- reph = reph["ligature"]
+ -- rphf acts on consonant + halant
+ for k, v in next, ra do
+ local r = lookupcache[k]
+ if r then
+ local h = false
+ for k, v in next, halant do
+ local h = r[k]
+ if h then
+ reph = h.ligature or false
+ break
+ end
+ end
+ if reph then
+ break
+ end
end
end
- --ToDo: rphf actualy acts on consonant + halant. This consonant might not necesseraly be 0x0930 ... (but fot dev2 it is)
end
seqsubset[#seqsubset+1] = { kind, lookupcache, reph }
end
@@ -1256,32 +1385,37 @@ local function dev2_reorder(head,start,stop,font,attr,nbspaces) -- maybe do a pa
local kind = subset[1]
local lookupcache = subset[2]
if kind == "rphf" then
- -- todo: rphf might be result of other handler/chainproc
- -- todo: rphf actualy acts on consonant + halant.
- -- todo: the consonant might not necesseraly be 0x0930 ... (but for devanagari it is)
- local lookup = lookupcache[0x0930]
- if lookup then
- local hit = lookup[0x094D]
- if hit then
- reph = hit["ligature"]
+ for k, v in next, ra do
+ local r = lookupcache[k]
+ if r then
+ for k, v in next, halant do
+ local h = r[k]
+ if h then
+ reph = h.ligature or false
+ break
+ end
+ end
+ if reph then
+ break
+ end
end
end
local current = start
- local last = stop.next
+ local last = getnext(stop)
while current ~= last do
if current ~= stop then
- local c = locl[current] or current.char
+ local c = locl[current] or getchar(current)
local found = lookupcache[c]
if found then
- local next = current.next
- local n = locl[next] or next.char
+ local next = getnext(current)
+ local n = locl[next] or getchar(next)
if found[n] then --above-base: rphf Consonant + Halant
- local afternext = next ~= stop and next.next
- if afternext and zw_char[afternext.char] then -- ZWJ and ZWNJ prevent creation of reph
+ local afternext = next ~= stop and getnext(next)
+ if afternext and zw_char[getchar(afternext)] then -- ZWJ and ZWNJ prevent creation of reph
current = next
- current = current.next
+ current = getnext(current)
elseif current == start then
- current[a_state] = s_rphf
+ setprop(current,a_state,s_rphf)
current = next
else
current = next
@@ -1289,98 +1423,111 @@ local function dev2_reorder(head,start,stop,font,attr,nbspaces) -- maybe do a pa
end
end
end
- current = current.next
+ current = getnext(current)
end
elseif kind == "pref" then
-- why not global? pretty ineffient this way
-- this will move to the initializer and we will store the hash in dataset
-- todo: reph might also be result of chain
- for k, v in lookupcache[0x094D], next do
- pre_base_reordering_consonants[k] = v and v["ligature"] --ToDo: reph might also be result of chain
+ for k, v in next, halant do
+ local h = lookupcache[k]
+ if h then
+ local found = false
+ for k, v in next, h do
+ found = v and v.ligature
+ if found then
+ pre_base_reordering_consonants[k] = found
+ break
+ end
+ end
+ if found then
+ break
+ end
+ end
end
--
local current = start
- local last = stop.next
+ local last = getnext(stop)
while current ~= last do
if current ~= stop then
- local c = locl[current] or current.char
+ local c = locl[current] or getchar(current)
local found = lookupcache[c]
if found then
- local next = current.next
- local n = locl[next] or next.char
+ local next = getnext(current)
+ local n = locl[next] or getchar(next)
if found[n] then
- current[a_state] = s_pref
- next[a_state] = s_pref
+ setprop(current,a_state,s_pref)
+ setprop(next,a_state,s_pref)
current = next
end
end
end
- current = current.next
+ current = getnext(current)
end
elseif kind == "half" then -- half forms: half / Consonant + Halant
local current = start
- local last = stop.next
+ local last = getnext(stop)
while current ~= last do
if current ~= stop then
- local c = locl[current] or current.char
+ local c = locl[current] or getchar(current)
local found = lookupcache[c]
if found then
- local next = current.next
- local n = locl[next] or next.char
+ local next = getnext(current)
+ local n = locl[next] or getchar(next)
if found[n] then
- if next ~= stop and next.next.char == c_zwnj then -- zwnj prevent creation of half
+ if next ~= stop and getchar(getnext(next)) == c_zwnj then -- zwnj prevent creation of half
current = next
else
- current[a_state] = s_half
+ setprop(current,a_state,s_half)
if not halfpos then
halfpos = current
end
end
- current = current.next
+ current = getnext(current)
end
end
end
- current = current.next
+ current = getnext(current)
end
elseif kind == "blwf" then -- below-base: blwf / Halant + Consonant
local current = start
- local last = stop.next
+ local last = getnext(stop)
while current ~= last do
if current ~= stop then
- local c = locl[current] or current.char
+ local c = locl[current] or getchar(current)
local found = lookupcache[c]
if found then
- local next = current.next
- local n = locl[next] or next.char
+ local next = getnext(current)
+ local n = locl[next] or getchar(next)
if found[n] then
- current[a_state] = s_blwf
- next[a_state] = s_blwf
+ setprop(current,a_state,s_blwf)
+ setprop(next,a_state,s_blwf)
current = next
subpos = current
end
end
end
- current = current.next
+ current = getnext(current)
end
elseif kind == "pstf" then -- post-base: pstf / Halant + Consonant
local current = start
- local last = stop.next
+ local last = getnext(stop)
while current ~= last do
if current ~= stop then
- local c = locl[current] or current.char
+ local c = locl[current] or getchar(current)
local found = lookupcache[c]
if found then
- local next = current.next
- local n = locl[next] or next.char
+ local next = getnext(current)
+ local n = locl[next] or getchar(next)
if found[n] then
- current[a_state] = s_pstf
- next[a_state] = s_pstf
+ setprop(current,a_state,s_pstf)
+ setprop(next,a_state,s_pstf)
current = next
postpos = current
end
end
end
- current = current.next
+ current = getnext(current)
end
end
end
@@ -1392,51 +1539,51 @@ local function dev2_reorder(head,start,stop,font,attr,nbspaces) -- maybe do a pa
local current, base, firstcons = start, nil, nil
- if start[a_state] == s_rphf then
+ if getprop(start,a_state) == s_rphf then
-- if syllable starts with Ra + H and script has 'Reph' then exclude Reph from candidates for base consonants
- current = start.next.next
+ current = getnext(getnext(start))
end
- local function stand_alone(is_nbsp)
+ if current ~= getnext(stop) and getchar(current) == c_nbsp then
+ -- Stand Alone cluster
if current == stop then
- stop = stop.prev
+ stop = getprev(stop)
head = remove_node(head,current)
free_node(current)
return head, stop, nbspaces
else
- if is_nbsp then
- nbspaces = nbspaces + 1
- end
- base = current
- current = current.next
+ nbspaces = nbspaces + 1
+ base = current
+ current = getnext(current)
if current ~= stop then
- local char = current.char
+ local char = getchar(current)
if nukta[char] then
- current = current.next
- char = current.char
+ current = getnext(current)
+ char = getchar(current)
end
if char == c_zwj then
- local next = current.next
- if current ~= stop and next ~= stop and halant[next.char] then
+ local next = getnext(current)
+ if current ~= stop and next ~= stop and halant[getchar(next)] then
current = next
- next = current.next
- local tmp = next.next
+ next = getnext(current)
+ local tmp = getnext(next)
local changestop = next == stop
- next.next = nil
- current[a_state] = s_pref
+ setfield(next,"next",nil)
+ setprop(current,a_state,s_pref)
current = processcharacters(current,font)
- current[a_state] = s_blwf
+ setprop(current,a_state,s_blwf)
current = processcharacters(current,font)
- current[a_state] = s_pstf
+ setprop(current,a_state,s_pstf)
current = processcharacters(current,font)
- current[a_state] = unsetvalue
- if halant[current.char] then
- current.next.next = tmp
+ setprop(current,a_state,unsetvalue)
+ if halant[getchar(current)] then
+ setfield(getnext(current),"next",tmp)
local nc = copy_node(current)
- current.char = dotted_circle
+ copyinjection(nc,current)
+ setfield(current,"char",dotted_circle)
head = insert_node_after(head,current,nc)
else
- current.next = tmp -- assumes that result of pref, blwf, or pstf consists of one node
+ setfield(current,"next",tmp) -- assumes that result of pref, blwf, or pstf consists of one node
if changestop then
stop = current
end
@@ -1445,25 +1592,17 @@ local function dev2_reorder(head,start,stop,font,attr,nbspaces) -- maybe do a pa
end
end
end
- end
-
- if current ~= stop.next then
- -- Stand Alone cluster
- stand_alone()
- elseif current.char == c_nbsp then
- -- Stand Alone cluster
- stand_alone(true)
else -- not Stand Alone cluster
- local last = stop.next
+ local last = getnext(stop)
while current ~= last do -- find base consonant
- local next = current.next
- if consonant[current.char] then
- if not (current ~= stop and next ~= stop and halant[next.char] and next.next.char == c_zwj) then
+ local next = getnext(current)
+ if consonant[getchar(current)] then
+ if not (current ~= stop and next ~= stop and halant[getchar(next)] and getchar(getnext(next)) == c_zwj) then
if not firstcons then
firstcons = current
end
-- check whether consonant has below-base or post-base form or is pre-base reordering Ra
- local a = current[a_state]
+ local a = getprop(current,a_state)
if not (a == s_pref or a == s_blwf or a == s_pstf) then
base = current
end
@@ -1477,13 +1616,13 @@ local function dev2_reorder(head,start,stop,font,attr,nbspaces) -- maybe do a pa
end
if not base then
- if start[a_state] == s_rphf then
- start[a_state] = unsetvalue
+ if getprop(start,a_state) == s_rphf then
+ setprop(start,a_state,unsetvalue)
end
return head, stop, nbspaces
else
- if base[a_state] then
- base[a_state] = unsetvalue
+ if getprop(base,a_state) then
+ setprop(base,a_state,unsetvalue)
end
basepos = base
end
@@ -1501,22 +1640,33 @@ local function dev2_reorder(head,start,stop,font,attr,nbspaces) -- maybe do a pa
local moved = { }
local current = start
- local last = stop.next
+ local last = getnext(stop)
while current ~= last do
- local char, target, cn = locl[current] or current.char, nil, current.next
+ local char, target, cn = locl[current] or getchar(current), nil, getnext(current)
+-- not so efficient (needed for malayalam)
+local tpm = twopart_mark[char]
+if tpm then
+ local extra = copy_node(current)
+ copyinjection(extra,current)
+ char = tpm[1]
+ setfield(current,"char",char)
+ setfield(extra,"char",tpm[2])
+ head = insert_node_after(head,current,extra)
+end
+--
if not moved[current] and dependent_vowel[char] then
if pre_mark[char] then -- Before first half form in the syllable
moved[current] = true
- local prev = current.prev
- local next = current.next
+ local prev = getprev(current)
+ local next = getnext(current)
if prev then
- prev.next = next
+ setfield(prev,"next",next)
end
if next then
- next.prev = prev
+ setfield(next,"prev",prev)
end
if current == stop then
- stop = current.prev
+ stop = getprev(current)
end
if halfpos == start then
if head == start then
@@ -1524,13 +1674,13 @@ local function dev2_reorder(head,start,stop,font,attr,nbspaces) -- maybe do a pa
end
start = current
end
- local prev = halfpos.prev
+ local prev = getprev(halfpos)
if prev then
- prev.next = current
+ setfield(prev,"next",current)
end
- current.prev = prev
- halfpos.prev = current
- current.next = halfpos
+ setfield(current,"prev",prev)
+ setfield(halfpos,"prev",current)
+ setfield(current,"next",halfpos)
halfpos = current
elseif above_mark[char] then -- After main consonant
target = basepos
@@ -1552,25 +1702,25 @@ local function dev2_reorder(head,start,stop,font,attr,nbspaces) -- maybe do a pa
postpos = current
end
if mark_above_below_post[char] then
- local prev = current.prev
+ local prev = getprev(current)
if prev ~= target then
- local next = current.next
+ local next = getnext(current)
if prev then -- not needed, already tested with target
- prev.next = next
+ setfield(prev,"next",next)
end
if next then
- next.prev = prev
+ setfield(next,"prev",prev)
end
if current == stop then
stop = prev
end
- local next = target.next
+ local next = getnext(target)
if next then
- next.prev = current
+ setfield(next,"prev",current)
end
- current.next = next
- target.next = current
- current.prev = target
+ setfield(current,"next",next)
+ setfield(target,"next",current)
+ setfield(current,"prev",target)
end
end
end
@@ -1581,7 +1731,7 @@ local function dev2_reorder(head,start,stop,font,attr,nbspaces) -- maybe do a pa
local current, c = start, nil
while current ~= stop do
- local char = current.char
+ local char = getchar(current)
if halant[char] or stress_tone_mark[char] then
if not c then
c = current
@@ -1589,33 +1739,33 @@ local function dev2_reorder(head,start,stop,font,attr,nbspaces) -- maybe do a pa
else
c = nil
end
- local next = current.next
- if c and nukta[next.char] then
+ local next = getnext(current)
+ if c and nukta[getchar(next)] then
if head == c then
head = next
end
if stop == next then
stop = current
end
- local prev = c.prev
+ local prev = getprev(c)
if prev then
- prev.next = next
+ setfield(prev,"next",next)
end
- next.prev = prev
- local nextnext = next.next
- current.next = nextnext
- local nextnextnext = nextnext.next
+ setfield(next,"prev",prev)
+ local nextnext = getnext(next)
+ setfield(current,"next",nextnext)
+ local nextnextnext = getnext(nextnext)
if nextnextnext then
- nextnextnext.prev = current
+ setfield(nextnextnext,"prev",current)
end
- c.prev = nextnext
- nextnext.next = c
+ setfield(c,"prev",nextnext)
+ setfield(nextnext,"next",c)
end
if stop == current then break end
- current = current.next
+ current = getnext(current)
end
- if base.char == c_nbsp then
+ if getchar(base) == c_nbsp then
nbspaces = nbspaces - 1
head = remove_node(head, base)
free_node(base)
@@ -1639,30 +1789,30 @@ for k, v in next, halant do separator[k] = true end
local function analyze_next_chars_one(c,font,variant) -- skip one dependent vowel
-- why two variants ... the comment suggests that it's the same ruleset
- local n = c.next
+ local n = getnext(c)
if not n then
return c
end
if variant == 1 then
- local v = n.id == glyph_code and n.subtype<256 and n.font == font
- if v and nukta[n.char] then
- n = n.next
+ local v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ if v and nukta[getchar(n)] then
+ n = getnext(n)
if n then
- v = n.id == glyph_code and n.subtype<256 and n.font == font
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
end
end
if n and v then
- local nn = n.next
- if nn and nn.id == glyph_code and nn.subtype<256 and nn.font == font then
- local nnn = nn.next
- if nnn and nnn.id == glyph_code and nnn.subtype<256 and nnn.font == font then
- local nnc = nn.char
- local nnnc = nnn.char
+ local nn = getnext(n)
+ if nn and getid(nn) == glyph_code and getsubtype(nn) < 256 and getfont(nn) == font then
+ local nnn = getnext(nn)
+ if nnn and getid(nnn) == glyph_code and getsubtype(nnn) < 256 and getfont(nnn) == font then
+ local nnc = getchar(nn)
+ local nnnc = getchar(nnn)
if nnc == c_zwj and consonant[nnnc] then
c = nnn
elseif (nnc == c_zwnj or nnc == c_zwj) and halant[nnnc] then
- local nnnn = nnn.next
- if nnnn and nnnn.id == glyph_code and consonant[nnnn.char] and nnnn.subtype<256 and nnnn.font == font then
+ local nnnn = getnext(nnn)
+ if nnnn and getid(nnnn) == glyph_code and consonant[getchar(nnnn)] and getsubtype(nnnn) < 256 and getfont(nnnn) == font then
c = nnnn
end
end
@@ -1670,94 +1820,94 @@ local function analyze_next_chars_one(c,font,variant) -- skip one dependent vowe
end
end
elseif variant == 2 then
- if n.id == glyph_code and nukta[n.char] and n.subtype<256 and n.font == font then
+ if getid(n) == glyph_code and nukta[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
c = n
end
- n = c.next
- if n and n.id == glyph_code and n.subtype<256 and n.font == font then
- local nn = n.next
+ n = getnext(c)
+ if n and getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font then
+ local nn = getnext(n)
if nn then
- local nv = nn.id == glyph_code and nn.subtype<256 and nn.font == font
- if nv and zw_char[n.char] then
+ local nv = getid(nn) == glyph_code and getsubtype(nn) < 256 and getfont(nn) == font
+ if nv and zw_char[getchar(n)] then
n = nn
- nn = nn.next
- nv = nn and nn.id == glyph_code and nn.subtype<256 and nn.font == font
+ nn = getnext(nn)
+ nv = nn and getid(nn) == glyph_code and getsubtype(nn) < 256 and getfont(nn) == font
end
- if nv and halant[n.char] and consonant[nn.char] then
+ if nv and halant[getchar(n)] and consonant[getchar(nn)] then
c = nn
end
end
end
end
-- c = ms_matra(c)
- local n = c.next
+ local n = getnext(c)
if not n then
return c
end
- local v = n.id == glyph_code and n.subtype<256 and n.font == font
+ local v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
if not v then
return c
end
- local char = n.char
+ local char = getchar(n)
if dependent_vowel[char] then
- c = c.next
- n = c.next
+ c = getnext(c)
+ n = getnext(c)
if not n then
return c
end
- v = n.id == glyph_code and n.subtype<256 and n.font == font
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
if not v then
return c
end
- char = n.char
+ char = getchar(n)
end
if nukta[char] then
- c = c.next
- n = c.next
+ c = getnext(c)
+ n = getnext(c)
if not n then
return c
end
- v = n.id == glyph_code and n.subtype<256 and n.font == font
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
if not v then
return c
end
- char = n.char
+ char = getchar(n)
end
if halant[char] then
- c = c.next
- n = c.next
+ c = getnext(c)
+ n = getnext(c)
if not n then
return c
end
- v = n.id == glyph_code and n.subtype<256 and n.font == font
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
if not v then
return c
end
- char = n.char
+ char = getchar(n)
end
if vowel_modifier[char] then
- c = c.next
- n = c.next
+ c = getnext(c)
+ n = getnext(c)
if not n then
return c
end
- v = n.id == glyph_code and n.subtype<256 and n.font == font
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
if not v then
return c
end
- char = n.char
+ char = getchar(n)
end
if stress_tone_mark[char] then
- c = c.next
- n = c.next
+ c = getnext(c)
+ n = getnext(c)
if not n then
return c
end
- v = n.id == glyph_code and n.subtype<256 and n.font == font
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
if not v then
return c
end
- char = n.char
+ char = getchar(n)
end
if stress_tone_mark[char] then
return n
@@ -1767,38 +1917,38 @@ local function analyze_next_chars_one(c,font,variant) -- skip one dependent vowe
end
local function analyze_next_chars_two(c,font)
- local n = c.next
+ local n = getnext(c)
if not n then
return c
end
- if n.id == glyph_code and nukta[n.char] and n.subtype<256 and n.font == font then
+ if getid(n) == glyph_code and nukta[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
c = n
end
n = c
while true do
- local nn = n.next
- if nn and nn.id == glyph_code and nn.subtype<256 and nn.font == font then
- local char = nn.char
+ local nn = getnext(n)
+ if nn and getid(nn) == glyph_code and getsubtype(nn) < 256 and getfont(nn) == font then
+ local char = getchar(nn)
if halant[char] then
n = nn
- local nnn = nn.next
- if nnn and nnn.id == glyph_code and zw_char[nnn.char] and nnn.subtype<256 and nnn.font == font then
+ local nnn = getnext(nn)
+ if nnn and getid(nnn) == glyph_code and zw_char[getchar(nnn)] and getsubtype(nnn) < 256 and getfont(nnn) == font then
n = nnn
end
elseif char == c_zwnj or char == c_zwj then
-- n = nn -- not here (?)
- local nnn = nn.next
- if nnn and nnn.id == glyph_code and halant[nnn.char] and nnn.subtype<256 and nnn.font == font then
+ local nnn = getnext(nn)
+ if nnn and getid(nnn) == glyph_code and halant[getchar(nnn)] and getsubtype(nnn) < 256 and getfont(nnn) == font then
n = nnn
end
else
break
end
- local nn = n.next
- if nn and nn.id == glyph_code and consonant[nn.char] and nn.subtype<256 and nn.font == font then
+ local nn = getnext(n)
+ if nn and getid(nn) == glyph_code and consonant[getchar(nn)] and getsubtype(nn) < 256 and getfont(nn) == font then
n = nn
- local nnn = nn.next
- if nnn and nnn.id == glyph_code and nukta[nnn.char] and nnn.subtype<256 and nnn.font == font then
+ local nnn = getnext(nn)
+ if nnn and getid(nnn) == glyph_code and nukta[getchar(nnn)] and getsubtype(nnn) < 256 and getfont(nnn) == font then
n = nnn
end
c = n
@@ -1814,114 +1964,114 @@ local function analyze_next_chars_two(c,font)
-- This shouldn't happen I guess.
return
end
- local n = c.next
+ local n = getnext(c)
if not n then
return c
end
- local v = n.id == glyph_code and n.subtype<256 and n.font == font
+ local v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
if not v then
return c
end
- local char = n.char
+ local char = getchar(n)
if char == c_anudatta then
c = n
- n = c.next
+ n = getnext(c)
if not n then
return c
end
- v = n.id == glyph_code and n.subtype<256 and n.font == font
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
if not v then
return c
end
- char = n.char
+ char = getchar(n)
end
if halant[char] then
- c = c.next
- n = c.next
+ c = getnext(c)
+ n = getnext(c)
if not n then
return c
end
- v = n.id == glyph_code and n.subtype<256 and n.font == font
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
if not v then
return c
end
- char = n.char
+ char = getchar(n)
if char == c_zwnj or char == c_zwj then
- c = c.next
- n = c.next
+ c = getnext(c)
+ n = getnext(c)
if not n then
return c
end
- v = n.id == glyph_code and n.subtype<256 and n.font == font
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
if not v then
return c
end
- char = n.char
+ char = getchar(n)
end
else
-- c = ms_matra(c)
-- same as one
if dependent_vowel[char] then
- c = c.next
- n = c.next
+ c = getnext(c)
+ n = getnext(c)
if not n then
return c
end
- v = n.id == glyph_code and n.subtype<256 and n.font == font
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
if not v then
return c
end
- char = n.char
+ char = getchar(n)
end
if nukta[char] then
- c = c.next
- n = c.next
+ c = getnext(c)
+ n = getnext(c)
if not n then
return c
end
- v = n.id == glyph_code and n.subtype<256 and n.font == font
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
if not v then
return c
end
- char = n.char
+ char = getchar(n)
end
if halant[char] then
- c = c.next
- n = c.next
+ c = getnext(c)
+ n = getnext(c)
if not n then
return c
end
- v = n.id == glyph_code and n.subtype<256 and n.font == font
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
if not v then
return c
end
- char = n.char
+ char = getchar(n)
end
end
-- same as one
if vowel_modifier[char] then
- c = c.next
- n = c.next
+ c = getnext(c)
+ n = getnext(c)
if not n then
return c
end
- v = n.id == glyph_code and n.subtype<256 and n.font == font
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
if not v then
return c
end
- char = n.char
+ char = getchar(n)
end
if stress_tone_mark[char] then
- c = c.next
- n = c.next
+ c = getnext(c)
+ n = getnext(c)
if not n then
return c
end
- v = n.id == glyph_code and n.subtype<256 and n.font == font
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
if not v then
return c
end
- char = n.char
+ char = getchar(n)
end
if stress_tone_mark[char] then
return n
@@ -1932,10 +2082,11 @@ end
local function inject_syntax_error(head,current,mark)
local signal = copy_node(current)
+ copyinjection(signal,current)
if mark == pre_mark then
- signal.char = dotted_circle
+ setfield(signal,"char",dotted_circle)
else
- current.char = dotted_circle
+ setfield(current,"char",dotted_circle)
end
return insert_node_after(head,current,signal)
end
@@ -1944,31 +2095,32 @@ end
-- a lot. Common code has been synced.
function methods.deva(head,font,attr)
+ head = tonut(head)
local current = head
local start = true
local done = false
local nbspaces = 0
while current do
- if current.id == glyph_code and current.subtype<256 and current.font == font then
+ if getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font then
done = true
local syllablestart = current
local syllableend = nil
local c = current
- local n = c.next
- if n and c.char == c_ra and n.id == glyph_code and halant[n.char] and n.subtype<256 and n.font == font then
- local n = n.next
- if n and n.id == glyph_code and n.subtype<256 and n.font == font then
+ local n = getnext(c)
+ if n and ra[getchar(c)] and getid(n) == glyph_code and halant[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
+ local n = getnext(n)
+ if n and getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font then
c = n
end
end
- local standalone = c.char == c_nbsp
+ local standalone = getchar(c) == c_nbsp
if standalone then
- local prev = current.prev
+ local prev = getprev(current)
if not prev then
-- begin of paragraph or box
- elseif prev.id ~= glyph_code or prev.subtype>=256 or prev.font ~= font then
+ elseif getid(prev) ~= glyph_code or getsubtype(prev) >= 256 or getfont(prev) ~= font then
-- different font or language so quite certainly a different word
- elseif not separator[prev.char] then
+ elseif not separator[getchar(prev)] then
-- something that separates words
else
standalone = false
@@ -1977,61 +2129,61 @@ function methods.deva(head,font,attr)
if standalone then
-- stand alone cluster (at the start of the word only): #[Ra+H]+NBSP+[N]+[<[<ZWJ|ZWNJ>]+H+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
local syllableend = analyze_next_chars_one(c,font,2)
- current = syllableend.next
+ current = getnext(syllableend)
if syllablestart ~= syllableend then
head, current, nbspaces = deva_reorder(head,syllablestart,syllableend,font,attr,nbspaces)
- current = current.next
+ current = getnext(current)
end
else
- -- we can delay the n.subtype and n.font and test for say halant first
+ -- we can delay the getsubtype(n) and getfont(n) and test for say halant first
-- as an table access is faster than two function calls (subtype and font are
-- pseudo fields) but the code becomes messy (unless we make it a function)
- local char = current.char
+ local char = getchar(current)
if consonant[char] then
-- syllable containing consonant
local prevc = true
while prevc do
prevc = false
- local n = current.next
+ local n = getnext(current)
if not n then
break
end
- local v = n.id == glyph_code and n.subtype<256 and n.font == font
+ local v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
if not v then
break
end
- local c = n.char
+ local c = getchar(n)
if nukta[c] then
- n = n.next
+ n = getnext(n)
if not n then
break
end
- v = n.id == glyph_code and n.subtype<256 and n.font == font
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
if not v then
break
end
- c = n.char
+ c = getchar(n)
end
if halant[c] then
- n = n.next
+ n = getnext(n)
if not n then
break
end
- v = n.id == glyph_code and n.subtype<256 and n.font == font
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
if not v then
break
end
- c = n.char
+ c = getchar(n)
if c == c_zwnj or c == c_zwj then
- n = n.next
+ n = getnext(n)
if not n then
break
end
- v = n.id == glyph_code and n.subtype<256 and n.font == font
+ v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
if not v then
break
end
- c = n.char
+ c = getchar(n)
end
if consonant[c] then
prevc = true
@@ -2039,77 +2191,77 @@ function methods.deva(head,font,attr)
end
end
end
- local n = current.next
- if n and n.id == glyph_code and nukta[n.char] and n.subtype<256 and n.font == font then
+ local n = getnext(current)
+ if n and getid(n) == glyph_code and nukta[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
-- nukta (not specified in Microsft Devanagari OpenType specification)
current = n
- n = current.next
+ n = getnext(current)
end
syllableend = current
current = n
if current then
- local v = current.id == glyph_code and current.subtype<256 and current.font == font
+ local v = getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font
if v then
- if halant[current.char] then
+ if halant[getchar(current)] then
-- syllable containing consonant without vowels: {C + [Nukta] + H} + C + H
- local n = current.next
- if n and n.id == glyph_code and zw_char[n.char] and n.subtype<256 and n.font == font then
+ local n = getnext(current)
+ if n and getid(n) == glyph_code and zw_char[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
-- code collapsed, probably needs checking with intention
syllableend = n
- current = n.next
+ current = getnext(n)
else
syllableend = current
current = n
end
else
-- syllable containing consonant with vowels: {C + [Nukta] + H} + C + [M] + [VM] + [SM]
- local c = current.char
+ local c = getchar(current)
if dependent_vowel[c] then
syllableend = current
- current = current.next
- v = current and current.id == glyph_code and current.subtype<256 and current.font == font
+ current = getnext(current)
+ v = current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font
if v then
- c = current.char
+ c = getchar(current)
end
end
if v and vowel_modifier[c] then
syllableend = current
- current = current.next
- v = current and current.id == glyph_code and current.subtype<256 and current.font == font
+ current = getnext(current)
+ v = current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font
if v then
- c = current.char
+ c = getchar(current)
end
end
if v and stress_tone_mark[c] then
syllableend = current
- current = current.next
+ current = getnext(current)
end
end
end
end
if syllablestart ~= syllableend then
head, current, nbspaces = deva_reorder(head,syllablestart,syllableend,font,attr,nbspaces)
- current = current.next
+ current = getnext(current)
end
elseif independent_vowel[char] then
-- syllable without consonants: VO + [VM] + [SM]
syllableend = current
- current = current.next
+ current = getnext(current)
if current then
- local v = current.id == glyph_code and current.subtype<256 and current.font == font
+ local v = getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font
if v then
- local c = current.char
+ local c = getchar(current)
if vowel_modifier[c] then
syllableend = current
- current = current.next
- v = current and current.id == glyph_code and current.subtype<256 and current.font == font
+ current = getnext(current)
+ v = current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font
if v then
- c = current.char
+ c = getchar(current)
end
end
if v and stress_tone_mark[c] then
syllableend = current
- current = current.next
+ current = getnext(current)
end
end
end
@@ -2118,11 +2270,11 @@ function methods.deva(head,font,attr)
if mark then
head, current = inject_syntax_error(head,current,mark)
end
- current = current.next
+ current = getnext(current)
end
end
else
- current = current.next
+ current = getnext(current)
end
start = false
end
@@ -2131,7 +2283,7 @@ function methods.deva(head,font,attr)
head = replace_all_nbsp(head)
end
- head = typesetters.characters.handler(head)
+ head = tonode(head)
return head, done
end
@@ -2142,6 +2294,7 @@ end
-- handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,1)
function methods.dev2(head,font,attr)
+ head = tonut(head)
local current = head
local start = true
local done = false
@@ -2149,18 +2302,18 @@ function methods.dev2(head,font,attr)
local nbspaces = 0
while current do
local syllablestart, syllableend = nil, nil
- if current.id == glyph_code and current.subtype<256 and current.font == font then
+ if getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font then
done = true
syllablestart = current
local c = current
- local n = current.next
- if n and c.char == c_ra and n.id == glyph_code and halant[n.char] and n.subtype<256 and n.font == font then
- local n = n.next
- if n and n.id == glyph_code and n.subtype<256 and n.font == font then
+ local n = getnext(current)
+ if n and ra[getchar(c)] and getid(n) == glyph_code and halant[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
+ local n = getnext(n)
+ if n and getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font then
c = n
end
end
- local char = c.char
+ local char = getchar(c)
if independent_vowel[char] then
-- vowel-based syllable: [Ra+H]+V+[N]+[<[<ZWJ|ZWNJ>]+H+C|ZWJ+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
current = analyze_next_chars_one(c,font,1)
@@ -2169,12 +2322,12 @@ function methods.dev2(head,font,attr)
local standalone = char == c_nbsp
if standalone then
nbspaces = nbspaces + 1
- local p = current.prev
+ local p = getprev(current)
if not p then
-- begin of paragraph or box
- elseif p.id ~= glyph_code or p.subtype>=256 or p.font ~= font then
+ elseif getid(p) ~= glyph_code or getsubtype(p) >= 256 or getfont(p) ~= font then
-- different font or language so quite certainly a different word
- elseif not separator[p.char] then
+ elseif not separator[getchar(p)] then
-- something that separates words
else
standalone = false
@@ -2184,7 +2337,7 @@ function methods.dev2(head,font,attr)
-- Stand Alone cluster (at the start of the word only): #[Ra+H]+NBSP+[N]+[<[<ZWJ|ZWNJ>]+H+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
current = analyze_next_chars_one(c,font,2)
syllableend = current
- elseif consonant[current.char] then
+ elseif consonant[getchar(current)] then
-- WHY current INSTEAD OF c ?
-- Consonant syllable: {C+[N]+<H+[<ZWNJ|ZWJ>]|<ZWNJ|ZWJ>+H>} + C+[N]+[A] + [< H+[<ZWNJ|ZWJ>] | {M}+[N]+[H]>]+[SM]+[(VD)]
@@ -2196,28 +2349,33 @@ function methods.dev2(head,font,attr)
if syllableend then
syllabe = syllabe + 1
local c = syllablestart
- local n = syllableend.next
+ local n = getnext(syllableend)
while c ~= n do
- c[a_syllabe] = syllabe
- c = c.next
+ setprop(c,a_syllabe,syllabe)
+ c = getnext(c)
end
end
if syllableend and syllablestart ~= syllableend then
head, current, nbspaces = dev2_reorder(head,syllablestart,syllableend,font,attr,nbspaces)
end
- if not syllableend and current.id == glyph_code and current.subtype<256 and current.font == font and not current[a_state] then
- local mark = mark_four[current.char]
+ if not syllableend and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font and not getprop(current,a_state) then
+ local mark = mark_four[getchar(current)]
if mark then
head, current = inject_syntax_error(head,current,mark)
end
end
start = false
- current = current.next
+ current = getnext(current)
end
if nbspaces > 0 then
head = replace_all_nbsp(head)
end
+ head = tonode(head)
+
return head, done
end
+
+methods.mlym = methods.deva
+methods.mlm2 = methods.dev2
diff --git a/tex/context/base/font-ota.lua b/tex/context/base/font-ota.lua
index 9af5a3347..1f1534870 100644
--- a/tex/context/base/font-ota.lua
+++ b/tex/context/base/font-ota.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['font-ota'] = {
+if not modules then modules = { } end modules ['font-otx'] = {
version = 1.001,
comment = "companion to font-otf.lua (analysing)",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['font-ota'] = {
license = "see context related readme files"
}
--- this might become scrp-*.lua
+-- context only
local type = type
@@ -26,21 +26,35 @@ local methods = allocate()
analyzers.initializers = initializers
analyzers.methods = methods
-analyzers.useunicodemarks = false
+---------.useunicodemarks = false
local a_state = attributes.private('state')
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getprop = nuts.getprop
+local setprop = nuts.setprop
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+
+local traverse_id = nuts.traverse_id
+local traverse_node_list = nuts.traverse
+local end_of_math = nuts.end_of_math
+
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local disc_code = nodecodes.disc
local math_code = nodecodes.math
-local traverse_id = node.traverse_id
-local traverse_node_list = node.traverse
-local end_of_math = node.end_of_math
-
local fontdata = fonts.hashes.identifiers
local categories = characters and characters.categories or { } -- sorry, only in context
+local chardata = characters and characters.data
local otffeatures = fonts.constructors.newfeatures("otf")
local registerotffeature = otffeatures.register
@@ -87,63 +101,70 @@ local features = {
pstf = s_pstf,
}
-analyzers.states = states
-analyzers.features = features
+analyzers.states = states
+analyzers.features = features
+analyzers.useunicodemarks = false
-- todo: analyzers per script/lang, cross font, so we need an font id hash -> script
-- e.g. latin -> hyphenate, arab -> 1/2/3 analyze -- its own namespace
-function analyzers.setstate(head,font)
+function analyzers.setstate(head,font) -- we can skip math
local useunicodemarks = analyzers.useunicodemarks
local tfmdata = fontdata[font]
local descriptions = tfmdata.descriptions
local first, last, current, n, done = nil, nil, head, 0, false -- maybe make n boolean
+ current = tonut(current)
while current do
- local id = current.id
- if id == glyph_code and current.font == font then
+ local id = getid(current)
+ if id == glyph_code and getfont(current) == font then
done = true
- local char = current.char
+ local char = getchar(current)
local d = descriptions[char]
if d then
- if d.class == "mark" or (useunicodemarks and categories[char] == "mn") then
+ if d.class == "mark" then
done = true
- current[a_state] = s_mark
+ setprop(current,a_state,s_mark)
+ elseif useunicodemarks and categories[char] == "mn" then
+ done = true
+ setprop(current,a_state,s_mark)
elseif n == 0 then
first, last, n = current, current, 1
- current[a_state] = s_init
+ setprop(current,a_state,s_init)
else
last, n = current, n+1
- current[a_state] = s_medi
+ setprop(current,a_state,s_medi)
end
else -- finish
if first and first == last then
- last[a_state] = s_isol
+ setprop(last,a_state,s_isol)
elseif last then
- last[a_state] = s_fina
+ setprop(last,a_state,s_fina)
end
first, last, n = nil, nil, 0
end
elseif id == disc_code then
- -- always in the middle
- current[a_state] = s_medi
+ -- always in the middle .. it doesn't make much sense to assign a property
+ -- here ... we might at some point decide to flag the components when present
+ -- but even then it's kind of bogus
+ setprop(current,a_state,s_medi)
last = current
else -- finish
if first and first == last then
- last[a_state] = s_isol
+ setprop(last,a_state,s_isol)
elseif last then
- last[a_state] = s_fina
+ setprop(last,a_state,s_fina)
end
first, last, n = nil, nil, 0
if id == math_code then
current = end_of_math(current)
end
end
- current = current.next
+ current = getnext(current)
end
if first and first == last then
- last[a_state] = s_isol
+ setprop(last,a_state,s_isol)
elseif last then
- last[a_state] = s_fina
+ setprop(last,a_state,s_fina)
end
return head, done
end
@@ -200,234 +221,194 @@ registerotffeature {
-- latin
methods.latn = analyzers.setstate
-
--- This info eventually can go into char-def and we will have a state
--- table for generic then (unicode recognized all states but in practice
--- only has only
---
--- isolated : isol
--- final : isol_fina
--- medial : isol_fina_medi_init
---
--- so in practice, without analyzer it's rather useless info which is
--- why having it in char-def makes only sense for special purposes (like)
--- like tracing cq. visualizing.
-
-local tatweel = 0x0640
-local zwnj = 0x200C
-local zwj = 0x200D
-
-local isolated = { -- isol
- [0x0600] = true, [0x0601] = true, [0x0602] = true, [0x0603] = true,
- [0x0604] = true,
- [0x0608] = true, [0x060B] = true, [0x0621] = true, [0x0674] = true,
- [0x06DD] = true,
- -- mandaic
- [0x0856] = true, [0x0858] = true, [0x0857] = true,
- -- n'ko
- [0x07FA] = true,
- -- also here:
- [zwnj] = true,
-}
-
-local final = { -- isol_fina
- [0x0622] = true, [0x0623] = true, [0x0624] = true, [0x0625] = true,
- [0x0627] = true, [0x0629] = true, [0x062F] = true, [0x0630] = true,
- [0x0631] = true, [0x0632] = true, [0x0648] = true, [0x0671] = true,
- [0x0672] = true, [0x0673] = true, [0x0675] = true, [0x0676] = true,
- [0x0677] = true, [0x0688] = true, [0x0689] = true, [0x068A] = true,
- [0x068B] = true, [0x068C] = true, [0x068D] = true, [0x068E] = true,
- [0x068F] = true, [0x0690] = true, [0x0691] = true, [0x0692] = true,
- [0x0693] = true, [0x0694] = true, [0x0695] = true, [0x0696] = true,
- [0x0697] = true, [0x0698] = true, [0x0699] = true, [0x06C0] = true,
- [0x06C3] = true, [0x06C4] = true, [0x06C5] = true, [0x06C6] = true,
- [0x06C7] = true, [0x06C8] = true, [0x06C9] = true, [0x06CA] = true,
- [0x06CB] = true, [0x06CD] = true, [0x06CF] = true, [0x06D2] = true,
- [0x06D3] = true, [0x06D5] = true, [0x06EE] = true, [0x06EF] = true,
- [0x0759] = true, [0x075A] = true, [0x075B] = true, [0x076B] = true,
- [0x076C] = true, [0x0771] = true, [0x0773] = true, [0x0774] = true,
- [0x0778] = true, [0x0779] = true,
- [0x08AA] = true, [0x08AB] = true, [0x08AC] = true,
- [0xFEF5] = true, [0xFEF7] = true, [0xFEF9] = true, [0xFEFB] = true,
- -- syriac
- [0x0710] = true, [0x0715] = true, [0x0716] = true, [0x0717] = true,
- [0x0718] = true, [0x0719] = true, [0x0728] = true, [0x072A] = true,
- [0x072C] = true, [0x071E] = true,
- [0x072F] = true, [0x074D] = true,
- -- mandaic
- [0x0840] = true, [0x0849] = true, [0x0854] = true, [0x0846] = true,
- [0x084F] = true
-}
-
-local medial = { -- isol_fina_medi_init
- [0x0626] = true, [0x0628] = true, [0x062A] = true, [0x062B] = true,
- [0x062C] = true, [0x062D] = true, [0x062E] = true, [0x0633] = true,
- [0x0634] = true, [0x0635] = true, [0x0636] = true, [0x0637] = true,
- [0x0638] = true, [0x0639] = true, [0x063A] = true, [0x063B] = true,
- [0x063C] = true, [0x063D] = true, [0x063E] = true, [0x063F] = true,
- [0x0641] = true, [0x0642] = true, [0x0643] = true,
- [0x0644] = true, [0x0645] = true, [0x0646] = true, [0x0647] = true,
- [0x0649] = true, [0x064A] = true, [0x066E] = true, [0x066F] = true,
- [0x0678] = true, [0x0679] = true, [0x067A] = true, [0x067B] = true,
- [0x067C] = true, [0x067D] = true, [0x067E] = true, [0x067F] = true,
- [0x0680] = true, [0x0681] = true, [0x0682] = true, [0x0683] = true,
- [0x0684] = true, [0x0685] = true, [0x0686] = true, [0x0687] = true,
- [0x069A] = true, [0x069B] = true, [0x069C] = true, [0x069D] = true,
- [0x069E] = true, [0x069F] = true, [0x06A0] = true, [0x06A1] = true,
- [0x06A2] = true, [0x06A3] = true, [0x06A4] = true, [0x06A5] = true,
- [0x06A6] = true, [0x06A7] = true, [0x06A8] = true, [0x06A9] = true,
- [0x06AA] = true, [0x06AB] = true, [0x06AC] = true, [0x06AD] = true,
- [0x06AE] = true, [0x06AF] = true, [0x06B0] = true, [0x06B1] = true,
- [0x06B2] = true, [0x06B3] = true, [0x06B4] = true, [0x06B5] = true,
- [0x06B6] = true, [0x06B7] = true, [0x06B8] = true, [0x06B9] = true,
- [0x06BA] = true, [0x06BB] = true, [0x06BC] = true, [0x06BD] = true,
- [0x06BE] = true, [0x06BF] = true, [0x06C1] = true, [0x06C2] = true,
- [0x06CC] = true, [0x06CE] = true, [0x06D0] = true, [0x06D1] = true,
- [0x06FA] = true, [0x06FB] = true, [0x06FC] = true, [0x06FF] = true,
- [0x0750] = true, [0x0751] = true, [0x0752] = true, [0x0753] = true,
- [0x0754] = true, [0x0755] = true, [0x0756] = true, [0x0757] = true,
- [0x0758] = true, [0x075C] = true, [0x075D] = true, [0x075E] = true,
- [0x075F] = true, [0x0760] = true, [0x0761] = true, [0x0762] = true,
- [0x0763] = true, [0x0764] = true, [0x0765] = true, [0x0766] = true,
- [0x0767] = true, [0x0768] = true, [0x0769] = true, [0x076A] = true,
- [0x076D] = true, [0x076E] = true, [0x076F] = true, [0x0770] = true,
- [0x0772] = true, [0x0775] = true, [0x0776] = true, [0x0777] = true,
- [0x077A] = true, [0x077B] = true, [0x077C] = true, [0x077D] = true,
- [0x077E] = true, [0x077F] = true,
- [0x08A0] = true, [0x08A2] = true, [0x08A4] = true, [0x08A5] = true,
- [0x08A6] = true, [0x0620] = true, [0x08A8] = true, [0x08A9] = true,
- [0x08A7] = true, [0x08A3] = true,
- -- syriac
- [0x0712] = true, [0x0713] = true, [0x0714] = true, [0x071A] = true,
- [0x071B] = true, [0x071C] = true, [0x071D] = true, [0x071F] = true,
- [0x0720] = true, [0x0721] = true, [0x0722] = true, [0x0723] = true,
- [0x0724] = true, [0x0725] = true, [0x0726] = true, [0x0727] = true,
- [0x0729] = true, [0x072B] = true, [0x072D] = true, [0x072E] = true,
- [0x074E] = true, [0x074F] = true,
- -- mandaic
- [0x0841] = true, [0x0842] = true, [0x0843] = true, [0x0844] = true,
- [0x0845] = true, [0x0847] = true, [0x0848] = true, [0x0855] = true,
- [0x0851] = true, [0x084E] = true, [0x084D] = true, [0x084A] = true,
- [0x084B] = true, [0x084C] = true, [0x0850] = true, [0x0852] = true,
- [0x0853] = true,
- -- n'ko
- [0x07D7] = true, [0x07E8] = true, [0x07D9] = true, [0x07EA] = true,
- [0x07CA] = true, [0x07DB] = true, [0x07CC] = true, [0x07DD] = true,
- [0x07CE] = true, [0x07DF] = true, [0x07D4] = true, [0x07E5] = true,
- [0x07E9] = true, [0x07E7] = true, [0x07E3] = true, [0x07E2] = true,
- [0x07E0] = true, [0x07E1] = true, [0x07DE] = true, [0x07DC] = true,
- [0x07D1] = true, [0x07DA] = true, [0x07D8] = true, [0x07D6] = true,
- [0x07D2] = true, [0x07D0] = true, [0x07CF] = true, [0x07CD] = true,
- [0x07CB] = true, [0x07D3] = true, [0x07E4] = true, [0x07D5] = true,
- [0x07E6] = true,
- -- also here:
- [tatweel]= true,
- [zwj] = true,
-}
+-------.dflt = analyzers.setstate % can be an option or just the default
local arab_warned = { }
--- todo: gref
-
local function warning(current,what)
- local char = current.char
+ local char = getchar(current)
if not arab_warned[char] then
log.report("analyze","arab: character %C has no %a class",char,what)
arab_warned[char] = true
end
end
--- potential optimization: local medial_final = table.merged(medial,final)
+local mappers = {
+ l = s_init, -- left
+ d = s_medi, -- double
+ c = s_medi, -- joiner
+ r = s_fina, -- right
+ u = s_isol, -- nonjoiner
+}
-local function finish(first,last)
- if last then
- if first == last then
- local fc = first.char
- if medial[fc] or final[fc] then
- first[a_state] = s_isol
- else
- warning(first,"isol")
- first[a_state] = s_error
+local classifiers = { } -- we can also use this trick for devanagari
+
+local first_arabic, last_arabic = characters.blockrange("arabic")
+local first_syriac, last_syriac = characters.blockrange("syriac")
+local first_mandiac, last_mandiac = characters.blockrange("mandiac")
+local first_nko, last_nko = characters.blockrange("nko")
+
+table.setmetatableindex(classifiers,function(t,k)
+ local c = chardata[k]
+ local v = false
+ if c then
+ local arabic = c.arabic
+ if arabic then
+ v = mappers[arabic]
+ if not v then
+ log.report("analyze","error in mapping arabic %C",k)
+ -- error
+ v = false
end
- else
- local lc = last.char
- if medial[lc] or final[lc] then
- -- if laststate == 1 or laststate == 2 or laststate == 4 then
- last[a_state] = s_fina
+ elseif k >= first_arabic and k <= last_arabic or k >= first_syriac and k <= last_syriac or
+ k >= first_mandiac and k <= last_mandiac or k >= first_nko and k <= last_nko then
+ if categories[k] == "mn" then
+ v = s_mark
else
- warning(last,"fina")
- last[a_state] = s_error
+ v = s_rest
end
- end
- first, last = nil, nil
- elseif first then
- -- first and last are either both set so we never com here
- local fc = first.char
- if medial[fc] or final[fc] then
- first[a_state] = s_isol
else
- warning(first,"isol")
- first[a_state] = s_error
end
- first = nil
end
- return first, last
-end
+ t[k] = v
+ return v
+end)
function methods.arab(head,font,attr)
- local useunicodemarks = analyzers.useunicodemarks
- local tfmdata = fontdata[font]
- local marks = tfmdata.resources.marks
- local first, last, current, done = nil, nil, head, false
+ local first, last = nil, nil
+ local c_first, c_last = nil, nil
+ local current, done = head, false
+ current = tonut(current)
while current do
- local id = current.id
- if id == glyph_code and current.font == font and current.subtype<256 and not current[a_state] then
+ local id = getid(current)
+ if id == glyph_code and getfont(current) == font and getsubtype(current)<256 and not getprop(current,a_state) then
done = true
- local char = current.char
- if marks[char] or (useunicodemarks and categories[char] == "mn") then
- current[a_state] = s_mark
- elseif isolated[char] then -- can be zwj or zwnj too
- first, last = finish(first,last)
- current[a_state] = s_isol
- first, last = nil, nil
- elseif not first then
- if medial[char] then
- current[a_state] = s_init
- first, last = first or current, current
- elseif final[char] then
- current[a_state] = s_isol
+ local char = getchar(current)
+ local classifier = classifiers[char]
+ if not classifier then
+ if last then
+ if c_last == s_medi or c_last == s_fina then
+ setprop(last,a_state,s_fina)
+ else
+ warning(last,"fina")
+ setprop(last,a_state,s_error)
+ end
first, last = nil, nil
- else -- no arab
- first, last = finish(first,last)
+ elseif first then
+ if c_first == s_medi or c_first == s_fina then
+ setprop(first,a_state,s_isol)
+ else
+ warning(first,"isol")
+ setprop(first,a_state,s_error)
+ end
+ first = nil
end
- elseif medial[char] then
- first, last = first or current, current
- current[a_state] = s_medi
- elseif final[char] then
- if not last[a_state] == s_init then
- -- tricky, we need to check what last may be !
- last[a_state] = s_medi
+ elseif classifier == s_mark then
+ setprop(current,a_state,s_mark)
+ elseif classifier == s_isol then
+ if last then
+ if c_last == s_medi or c_last == s_fina then
+ setprop(last,a_state,s_fina)
+ else
+ warning(last,"fina")
+ setprop(last,a_state,s_error)
+ end
+ first, last = nil, nil
+ elseif first then
+ if c_first == s_medi or c_first == s_fina then
+ setprop(first,a_state,s_isol)
+ else
+ warning(first,"isol")
+ setprop(first,a_state,s_error)
+ end
+ first = nil
+ end
+ setprop(current,a_state,s_isol)
+ elseif classifier == s_medi then
+ if first then
+ last = current
+ c_last = classifier
+ setprop(current,a_state,s_medi)
+ else
+ setprop(current,a_state,s_init)
+ first = current
+ c_first = classifier
+ end
+ elseif classifier == s_fina then
+ if last then
+ if getprop(last,a_state) ~= s_init then
+ setprop(last,a_state,s_medi)
+ end
+ setprop(current,a_state,s_fina)
+ first, last = nil, nil
+ elseif first then
+ -- if getprop(first,a_state) ~= s_init then
+ -- -- needs checking
+ -- setprop(first,a_state,s_medi)
+ -- end
+ setprop(current,a_state,s_fina)
+ first = nil
+ else
+ setprop(current,a_state,s_isol)
+ end
+ else -- classifier == s_rest
+ setprop(current,a_state,s_rest)
+ if last then
+ if c_last == s_medi or c_last == s_fina then
+ setprop(last,a_state,s_fina)
+ else
+ warning(last,"fina")
+ setprop(last,a_state,s_error)
+ end
+ first, last = nil, nil
+ elseif first then
+ if c_first == s_medi or c_first == s_fina then
+ setprop(first,a_state,s_isol)
+ else
+ warning(first,"isol")
+ setprop(first,a_state,s_error)
+ end
+ first = nil
end
- current[a_state] = s_fina
- first, last = nil, nil
- elseif char >= 0x0600 and char <= 0x06FF then -- needs checking
- current[a_state] = s_rest
- first, last = finish(first,last)
- else -- no
- first, last = finish(first,last)
end
else
- if first or last then
- first, last = finish(first,last)
+ if last then
+ if c_last == s_medi or c_last == s_fina then
+ setprop(last,a_state,s_fina)
+ else
+ warning(last,"fina")
+ setprop(last,a_state,s_error)
+ end
+ first, last = nil, nil
+ elseif first then
+ if c_first == s_medi or c_first == s_fina then
+ setprop(first,a_state,s_isol)
+ else
+ warning(first,"isol")
+ setprop(first,a_state,s_error)
+ end
+ first = nil
end
- if id == math_code then
+ if id == math_code then -- a bit duplicate as we test for glyphs twice
current = end_of_math(current)
end
end
- current = current.next
+ current = getnext(current)
end
- if first or last then
- finish(first,last)
+ if last then
+ if c_last == s_medi or c_last == s_fina then
+ setprop(last,a_state,s_fina)
+ else
+ warning(last,"fina")
+ setprop(last,a_state,s_error)
+ end
+ elseif first then
+ if c_first == s_medi or c_first == s_fina then
+ setprop(first,a_state,s_isol)
+ else
+ warning(first,"isol")
+ setprop(first,a_state,s_error)
+ end
end
return head, done
end
diff --git a/tex/context/base/font-otb.lua b/tex/context/base/font-otb.lua
index 2a7b821ea..4e955a197 100644
--- a/tex/context/base/font-otb.lua
+++ b/tex/context/base/font-otb.lua
@@ -7,7 +7,7 @@ if not modules then modules = { } end modules ['font-otb'] = {
}
local concat = table.concat
local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
-local type, next, tonumber, tostring = type, next, tonumber, tostring
+local type, next, tonumber, tostring, rawget = type, next, tonumber, tostring, rawget
local lpegmatch = lpeg.match
local utfchar = utf.char
@@ -47,13 +47,14 @@ local function gref(descriptions,n)
return f_unicode(n)
end
elseif n then
- local num, nam = { }, { }
- for i=2,#n do
+ local num, nam, j = { }, { }, 0
+ for i=1,#n do
local ni = n[i]
if tonumber(ni) then -- first is likely a key
+ j = j + 1
local di = descriptions[ni]
- num[i] = f_unicode(ni)
- nam[i] = di and di.name or "-"
+ num[j] = f_unicode(ni)
+ nam[j] = di and di.name or "-"
end
end
return f_unilist(num,nam)
@@ -62,40 +63,40 @@ local function gref(descriptions,n)
end
end
-local function cref(feature,lookupname)
+local function cref(feature,lookuptags,lookupname)
if lookupname then
- return formatters["feature %a, lookup %a"](feature,lookupname)
+ return formatters["feature %a, lookup %a"](feature,lookuptags[lookupname])
else
return formatters["feature %a"](feature)
end
end
-local function report_alternate(feature,lookupname,descriptions,unicode,replacement,value,comment)
+local function report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,comment)
report_prepare("%s: base alternate %s => %s (%S => %S)",
- cref(feature,lookupname),
+ cref(feature,lookuptags,lookupname),
gref(descriptions,unicode),
replacement and gref(descriptions,replacement),
value,
comment)
end
-local function report_substitution(feature,lookupname,descriptions,unicode,substitution)
+local function report_substitution(feature,lookuptags,lookupname,descriptions,unicode,substitution)
report_prepare("%s: base substitution %s => %S",
- cref(feature,lookupname),
+ cref(feature,lookuptags,lookupname),
gref(descriptions,unicode),
gref(descriptions,substitution))
end
-local function report_ligature(feature,lookupname,descriptions,unicode,ligature)
+local function report_ligature(feature,lookuptags,lookupname,descriptions,unicode,ligature)
report_prepare("%s: base ligature %s => %S",
- cref(feature,lookupname),
+ cref(feature,lookuptags,lookupname),
gref(descriptions,ligature),
gref(descriptions,unicode))
end
-local function report_kern(feature,lookupname,descriptions,unicode,otherunicode,value)
+local function report_kern(feature,lookuptags,lookupname,descriptions,unicode,otherunicode,value)
report_prepare("%s: base kern %s + %s => %S",
- cref(feature,lookupname),
+ cref(feature,lookuptags,lookupname),
gref(descriptions,unicode),
gref(descriptions,otherunicode),
value)
@@ -169,7 +170,8 @@ end
-- pseudo names like hyphen_hyphen to endash so in practice we end
-- up with a bit too many definitions but the overhead is neglectable.
--
--- Todo: if changed[first] or changed[second] then ... end
+-- We can have changed[first] or changed[second] but it quickly becomes
+-- messy if we need to take that into account.
local trace = false
@@ -179,7 +181,7 @@ local function finalize_ligatures(tfmdata,ligatures)
local characters = tfmdata.characters
local descriptions = tfmdata.descriptions
local resources = tfmdata.resources
- local unicodes = resources.unicodes
+ local unicodes = resources.unicodes -- we use rawget in order to avoid bulding the table
local private = resources.private
local alldone = false
while not alldone do
@@ -188,8 +190,8 @@ local function finalize_ligatures(tfmdata,ligatures)
local ligature = ligatures[i]
if ligature then
local unicode, lookupdata = ligature[1], ligature[2]
- if trace then
- trace_ligatures_detail("building % a into %a",lookupdata,unicode)
+ if trace_ligatures_detail then
+ report_prepare("building % a into %a",lookupdata,unicode)
end
local size = #lookupdata
local firstcode = lookupdata[1] -- [2]
@@ -201,8 +203,8 @@ local function finalize_ligatures(tfmdata,ligatures)
local firstdata = characters[firstcode]
if not firstdata then
firstcode = private
- if trace then
- trace_ligatures_detail("defining %a as %a",firstname,firstcode)
+ if trace_ligatures_detail then
+ report_prepare("defining %a as %a",firstname,firstcode)
end
unicodes[firstname] = firstcode
firstdata = { intermediate = true, ligatures = { } }
@@ -215,18 +217,18 @@ local function finalize_ligatures(tfmdata,ligatures)
local secondname = firstname .. "_" .. secondcode
if i == size - 1 then
target = unicode
- if not unicodes[secondname] then
+ if not rawget(unicodes,secondname) then
unicodes[secondname] = unicode -- map final ligature onto intermediates
end
okay = true
else
- target = unicodes[secondname]
+ target = rawget(unicodes,secondname)
if not target then
break
end
end
- if trace then
- trace_ligatures_detail("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target)
+ if trace_ligatures_detail then
+ report_prepare("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target)
end
local firstligs = firstdata.ligatures
if firstligs then
@@ -237,6 +239,8 @@ local function finalize_ligatures(tfmdata,ligatures)
firstcode = target
firstname = secondname
end
+ elseif trace_ligatures_detail then
+ report_prepare("no glyph (%a,%a) for building %a",firstname,firstcode,target)
end
if okay then
ligatures[i] = false
@@ -246,12 +250,15 @@ local function finalize_ligatures(tfmdata,ligatures)
end
alldone = done == 0
end
- if trace then
- for k, v in next, characters do
- if v.ligatures then table.print(v,k) end
+ if trace_ligatures_detail then
+ for k, v in table.sortedhash(characters) do
+ if v.ligatures then
+ table.print(v,k)
+ end
end
end
- tfmdata.resources.private = private
+ resources.private = private
+ return true
end
end
@@ -259,13 +266,14 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis
local characters = tfmdata.characters
local descriptions = tfmdata.descriptions
local resources = tfmdata.resources
+ local properties = tfmdata.properties
local changed = tfmdata.changed
- local unicodes = resources.unicodes
local lookuphash = resources.lookuphash
local lookuptypes = resources.lookuptypes
+ local lookuptags = resources.lookuptags
local ligatures = { }
- local alternate = tonumber(value)
+ local alternate = tonumber(value) or true and 1
local defaultalt = otf.defaultbasealternate
local trace_singles = trace_baseinit and trace_singles
@@ -273,39 +281,39 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis
local trace_ligatures = trace_baseinit and trace_ligatures
local actions = {
- substitution = function(lookupdata,lookupname,description,unicode)
+ substitution = function(lookupdata,lookuptags,lookupname,description,unicode)
if trace_singles then
- report_substitution(feature,lookupname,descriptions,unicode,lookupdata)
+ report_substitution(feature,lookuptags,lookupname,descriptions,unicode,lookupdata)
end
changed[unicode] = lookupdata
end,
- alternate = function(lookupdata,lookupname,description,unicode)
+ alternate = function(lookupdata,lookuptags,lookupname,description,unicode)
local replacement = lookupdata[alternate]
if replacement then
changed[unicode] = replacement
if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal")
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal")
end
elseif defaultalt == "first" then
replacement = lookupdata[1]
changed[unicode] = replacement
if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
end
elseif defaultalt == "last" then
replacement = lookupdata[#data]
if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
end
else
if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown")
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown")
end
end
end,
- ligature = function(lookupdata,lookupname,description,unicode)
+ ligature = function(lookupdata,lookuptags,lookupname,description,unicode)
if trace_ligatures then
- report_ligature(feature,lookupname,descriptions,unicode,lookupdata)
+ report_ligature(feature,lookuptags,lookupname,descriptions,unicode,lookupdata)
end
ligatures[#ligatures+1] = { unicode, lookupdata }
end,
@@ -322,7 +330,7 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis
local lookuptype = lookuptypes[lookupname]
local action = actions[lookuptype]
if action then
- action(lookupdata,lookupname,description,unicode)
+ action(lookupdata,lookuptags,lookupname,description,unicode)
end
end
end
@@ -337,24 +345,25 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis
local action = actions[lookuptype]
if action then
for i=1,#lookuplist do
- action(lookuplist[i],lookupname,description,unicode)
+ action(lookuplist[i],lookuptags,lookupname,description,unicode)
end
end
end
end
end
end
-
- finalize_ligatures(tfmdata,ligatures)
+ properties.hasligatures = finalize_ligatures(tfmdata,ligatures)
end
local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) -- todo what kind of kerns, currently all
local characters = tfmdata.characters
local descriptions = tfmdata.descriptions
local resources = tfmdata.resources
- local unicodes = resources.unicodes
+ local properties = tfmdata.properties
+ local lookuptags = resources.lookuptags
local sharedkerns = { }
local traceindeed = trace_baseinit and trace_kerns
+ local haskerns = false
for unicode, character in next, characters do
local description = descriptions[unicode]
local rawkerns = description.kerns -- shared
@@ -378,13 +387,13 @@ local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist
newkerns = { [otherunicode] = value }
done = true
if traceindeed then
- report_kern(feature,lookup,descriptions,unicode,otherunicode,value)
+ report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value)
end
elseif not newkerns[otherunicode] then -- first wins
newkerns[otherunicode] = value
done = true
if traceindeed then
- report_kern(feature,lookup,descriptions,unicode,otherunicode,value)
+ report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value)
end
end
end
@@ -393,12 +402,14 @@ local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist
if done then
sharedkerns[rawkerns] = newkerns
character.kerns = newkerns -- no empty assignments
+ haskerns = true
else
sharedkerns[rawkerns] = false
end
end
end
end
+ properties.haskerns = haskerns
end
basemethods.independent = {
@@ -428,13 +439,13 @@ local function make_1(present,tree,name)
end
end
-local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookupname)
+local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookuptags,lookupname)
for k, v in next, tree do
if k == "ligature" then
local character = characters[preceding]
if not character then
if trace_baseinit then
- report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookupname,v,preceding)
+ report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookuptags[lookupname],v,preceding)
end
character = makefake(tfmdata,name,present)
end
@@ -455,7 +466,7 @@ local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,don
else
local code = present[name] or unicode
local name = name .. "_" .. k
- make_2(present,tfmdata,characters,v,name,code,k,done,lookupname)
+ make_2(present,tfmdata,characters,v,name,code,k,done,lookuptags,lookupname)
end
end
end
@@ -467,9 +478,10 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis
local changed = tfmdata.changed
local lookuphash = resources.lookuphash
local lookuptypes = resources.lookuptypes
+ local lookuptags = resources.lookuptags
local ligatures = { }
- local alternate = tonumber(value)
+ local alternate = tonumber(value) or true and 1
local defaultalt = otf.defaultbasealternate
local trace_singles = trace_baseinit and trace_singles
@@ -483,7 +495,7 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis
for unicode, data in next, lookupdata do
if lookuptype == "substitution" then
if trace_singles then
- report_substitution(feature,lookupname,descriptions,unicode,data)
+ report_substitution(feature,lookuptags,lookupname,descriptions,unicode,data)
end
changed[unicode] = data
elseif lookuptype == "alternate" then
@@ -491,28 +503,28 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis
if replacement then
changed[unicode] = replacement
if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal")
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal")
end
elseif defaultalt == "first" then
replacement = data[1]
changed[unicode] = replacement
if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
end
elseif defaultalt == "last" then
replacement = data[#data]
if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
end
else
if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown")
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown")
end
end
elseif lookuptype == "ligature" then
ligatures[#ligatures+1] = { unicode, data, lookupname }
if trace_ligatures then
- report_ligature(feature,lookupname,descriptions,unicode,data)
+ report_ligature(feature,lookuptags,lookupname,descriptions,unicode,data)
end
end
end
@@ -535,7 +547,7 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis
for i=1,nofligatures do
local ligature = ligatures[i]
local unicode, tree, lookupname = ligature[1], ligature[2], ligature[3]
- make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookupname)
+ make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookuptags,lookupname)
end
end
@@ -546,11 +558,11 @@ local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist
local characters = tfmdata.characters
local descriptions = tfmdata.descriptions
local resources = tfmdata.resources
+ local properties = tfmdata.properties
local lookuphash = resources.lookuphash
+ local lookuptags = resources.lookuptags
local traceindeed = trace_baseinit and trace_kerns
-
-- check out this sharedkerns trickery
-
for l=1,#lookuplist do
local lookupname = lookuplist[l]
local lookupdata = lookuphash[lookupname]
@@ -565,7 +577,7 @@ local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist
for otherunicode, kern in next, data do
if not kerns[otherunicode] and kern ~= 0 then
kerns[otherunicode] = kern
- report_kern(feature,lookup,descriptions,unicode,otherunicode,kern)
+ report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,kern)
end
end
else
@@ -594,8 +606,9 @@ basemethod = "independent"
local function featuresinitializer(tfmdata,value)
if true then -- value then
- local t = trace_preparing and os.clock()
- local features = tfmdata.shared.features
+ local starttime = trace_preparing and os.clock()
+ local features = tfmdata.shared.features
+ local fullname = tfmdata.properties.fullname or "?"
if features then
applybasemethod("initializehashes",tfmdata)
local collectlookups = otf.collectlookups
@@ -605,34 +618,71 @@ local function featuresinitializer(tfmdata,value)
local language = properties.language
local basesubstitutions = rawdata.resources.features.gsub
local basepositionings = rawdata.resources.features.gpos
- if basesubstitutions then
- for feature, data in next, basesubstitutions do
- local value = features[feature]
- if value then
- local validlookups, lookuplist = collectlookups(rawdata,feature,script,language)
- if validlookups then
- applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
- registerbasefeature(feature,value)
- end
- end
- end
- end
- if basepositionings then
- for feature, data in next, basepositionings do
- local value = features[feature]
- if value then
- local validlookups, lookuplist = collectlookups(rawdata,feature,script,language)
- if validlookups then
- applybasemethod("preparepositionings",tfmdata,feature,features[feature],validlookups,lookuplist)
- registerbasefeature(feature,value)
+ --
+ -- if basesubstitutions then
+ -- for feature, data in next, basesubstitutions do
+ -- local value = features[feature]
+ -- if value then
+ -- local validlookups, lookuplist = collectlookups(rawdata,feature,script,language)
+ -- if validlookups then
+ -- applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
+ -- registerbasefeature(feature,value)
+ -- end
+ -- end
+ -- end
+ -- end
+ -- if basepositionings then
+ -- for feature, data in next, basepositionings do
+ -- local value = features[feature]
+ -- if value then
+ -- local validlookups, lookuplist = collectlookups(rawdata,feature,script,language)
+ -- if validlookups then
+ -- applybasemethod("preparepositionings",tfmdata,feature,features[feature],validlookups,lookuplist)
+ -- registerbasefeature(feature,value)
+ -- end
+ -- end
+ -- end
+ -- end
+ --
+ if basesubstitutions or basepositionings then
+ local sequences = tfmdata.resources.sequences
+ for s=1,#sequences do
+ local sequence = sequences[s]
+ local sfeatures = sequence.features
+ if sfeatures then
+ local order = sequence.order
+ if order then
+ for i=1,#order do --
+ local feature = order[i]
+ local value = features[feature]
+ if value then
+ local validlookups, lookuplist = collectlookups(rawdata,feature,script,language)
+ if not validlookups then
+ -- skip
+ elseif basesubstitutions and basesubstitutions[feature] then
+ if trace_preparing then
+ report_prepare("filtering base %s feature %a for %a with value %a","sub",feature,fullname,value)
+ end
+ applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ elseif basepositionings and basepositionings[feature] then
+ if trace_preparing then
+ report_prepare("filtering base %a feature %a for %a with value %a","pos",feature,fullname,value)
+ end
+ applybasemethod("preparepositionings",tfmdata,feature,value,validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ end
+ end
+ end
end
end
end
end
+ --
registerbasehash(tfmdata)
end
if trace_preparing then
- report_prepare("preparation time is %0.3f seconds for %a",os.clock()-t,tfmdata.properties.fullname)
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,fullname)
end
end
end
diff --git a/tex/context/base/font-otc.lua b/tex/context/base/font-otc.lua
index 3006e47ca..f7f8e9ce2 100644
--- a/tex/context/base/font-otc.lua
+++ b/tex/context/base/font-otc.lua
@@ -70,6 +70,7 @@ local function addfeature(data,feature,specifications)
local subtables = specification.subtables or { specification.data } or { }
local featuretype = types[specification.type or "substitution"]
local featureflags = specification.flags or noflags
+ local featureorder = specification.order or { feature }
local added = false
local featurename = format("ctx_%s_%s",feature,s)
local st = { }
@@ -133,14 +134,20 @@ local function addfeature(data,feature,specifications)
askedfeatures[k] = table.tohash(v)
end
end
- sequences[#sequences+1] = {
+ local sequence = {
chain = 0,
features = { [feature] = askedfeatures },
flags = featureflags,
name = featurename,
+ order = featureorder,
subtables = st,
type = featuretype,
}
+ if specification.prepend then
+ insert(sequences,1,sequence)
+ else
+ insert(sequences,sequence)
+ end
-- register in metadata (merge as there can be a few)
if not gsubfeatures then
gsubfeatures = { }
@@ -204,7 +211,9 @@ local tlig_specification = {
type = "ligature",
features = everywhere,
data = tlig,
+ order = { "tlig" },
flags = noflags,
+ prepend = true,
}
otf.addfeature("tlig",tlig_specification)
@@ -226,7 +235,9 @@ local trep_specification = {
type = "substitution",
features = everywhere,
data = trep,
+ order = { "trep" },
flags = noflags,
+ prepend = true,
}
otf.addfeature("trep",trep_specification)
@@ -256,6 +267,7 @@ if characters.combined then
type = "ligature",
features = everywhere,
data = tcom,
+ order = { "tcom" },
flags = noflags,
initialize = initialize,
}
@@ -314,6 +326,7 @@ local anum_specification = {
{
type = "substitution",
features = { arab = { urd = true, dflt = true } },
+ order = { "anum" },
data = anum_arabic,
flags = noflags, -- { },
valid = valid,
@@ -321,6 +334,7 @@ local anum_specification = {
{
type = "substitution",
features = { arab = { urd = true } },
+ order = { "anum" },
data = anum_persian,
flags = noflags, -- { },
valid = valid,
diff --git a/tex/context/base/font-otd.lua b/tex/context/base/font-otd.lua
index 919da2379..2dd23b741 100644
--- a/tex/context/base/font-otd.lua
+++ b/tex/context/base/font-otd.lua
@@ -129,59 +129,66 @@ local default = "dflt"
-- what about analyze in local and not in font
-local function initialize(sequence,script,language,s_enabled,a_enabled,font,attr,dynamic)
+local function initialize(sequence,script,language,s_enabled,a_enabled,font,attr,dynamic,ra)
local features = sequence.features
if features then
- for kind, scripts in next, features do
- local e_e
- local a_e = a_enabled and a_enabled[kind] -- the value (location)
- if a_e ~= nil then
- e_e = a_e
- else
- e_e = s_enabled and s_enabled[kind] -- the value (font)
- end
- if e_e then
- local languages = scripts[script] or scripts[wildcard]
- if languages then
- -- local valid, what = false
- local valid = false
- -- not languages[language] or languages[default] or languages[wildcard] because we want tracing
- -- only first attribute match check, so we assume simple fina's
- -- default can become a font feature itself
- if languages[language] then
- valid = e_e -- was true
- -- what = language
- -- elseif languages[default] then
- -- valid = true
- -- what = default
- elseif languages[wildcard] then
- valid = e_e -- was true
- -- what = wildcard
- end
- if valid then
- local attribute = autofeatures[kind] or false
- -- if a_e and dynamic < 0 then
- -- valid = false
- -- end
- -- if trace_applied then
- -- local typ, action = match(sequence.type,"(.*)_(.*)") -- brrr
- -- report_process(
- -- "%s font: %03i, dynamic: %03i, kind: %s, script: %-4s, language: %-4s (%-4s), type: %s, action: %s, name: %s",
- -- (valid and "+") or "-",font,attr or 0,kind,script,language,what,typ,action,sequence.name)
- -- end
- if trace_applied then
- report_process(
- "font %s, dynamic %a (%a), feature %a, script %a, language %a, lookup %a, value %a",
- font,attr or 0,dynamic,kind,script,language,sequence.name,valid)
+ local order = sequence.order
+ if order then
+ for i=1,#order do --
+ local kind = order[i] --
+ local e_e
+ local a_e = a_enabled and a_enabled[kind] -- the value (location)
+ if a_e ~= nil then
+ e_e = a_e
+ else
+ e_e = s_enabled and s_enabled[kind] -- the value (font)
+ end
+ if e_e then
+ local scripts = features[kind] --
+ local languages = scripts[script] or scripts[wildcard]
+ if languages then
+ -- local valid, what = false
+ local valid = false
+ -- not languages[language] or languages[default] or languages[wildcard] because we want tracing
+ -- only first attribute match check, so we assume simple fina's
+ -- default can become a font feature itself
+ if languages[language] then
+ valid = e_e -- was true
+ -- what = language
+ -- elseif languages[default] then
+ -- valid = true
+ -- what = default
+ elseif languages[wildcard] then
+ valid = e_e -- was true
+ -- what = wildcard
+ end
+ if valid then
+ local attribute = autofeatures[kind] or false
+ -- if a_e and dynamic < 0 then
+ -- valid = false
+ -- end
+ -- if trace_applied then
+ -- local typ, action = match(sequence.type,"(.*)_(.*)") -- brrr
+ -- report_process(
+ -- "%s font: %03i, dynamic: %03i, kind: %s, script: %-4s, language: %-4s (%-4s), type: %s, action: %s, name: %s",
+ -- (valid and "+") or "-",font,attr or 0,kind,script,language,what,typ,action,sequence.name)
+ -- end
+ if trace_applied then
+ report_process(
+ "font %s, dynamic %a (%a), feature %a, script %a, language %a, lookup %a, value %a",
+ font,attr or 0,dynamic,kind,script,language,sequence.name,valid)
+ end
+ ra[#ra+1] = { valid, attribute, sequence.chain or 0, kind, sequence }
end
- return { valid, attribute, sequence.chain or 0, kind, sequence }
end
end
end
+ -- { valid, attribute, chain, "generic", sequence } -- false anyway, could be flag instead of table
+ else
+ -- can't happen
end
- return false -- { valid, attribute, chain, "generic", sequence } -- false anyway, could be flag instead of table
else
- return false -- { false, false, chain, false, sequence } -- indirect lookup, part of chain (todo: make this a separate table)
+ -- { false, false, chain, false, sequence } -- indirect lookup, part of chain (todo: make this a separate table)
end
end
@@ -249,12 +256,16 @@ function otf.dataset(tfmdata,font,attr) -- attr only when explicit (as in specia
-- return v
-- end
-- end)
+-- for s=1,#sequences do
+-- local v = initialize(sequences[s],script,language,s_enabled,a_enabled,font,attr,dynamic)
+-- if v then
+-- ra[#ra+1] = v
+-- end
+-- end
for s=1,#sequences do
- local v = initialize(sequences[s],script,language,s_enabled,a_enabled,font,attr,dynamic)
- if v then
- ra[#ra+1] = v
- end
+ initialize(sequences[s],script,language,s_enabled,a_enabled,font,attr,dynamic,ra)
end
+-- table.save((jit and "tmc-" or "tma-")..font..".log",ra) -- bug in jit
end
return ra
diff --git a/tex/context/base/font-otf.lua b/tex/context/base/font-otf.lua
index 51c2af00f..85eebaaeb 100644
--- a/tex/context/base/font-otf.lua
+++ b/tex/context/base/font-otf.lua
@@ -20,11 +20,13 @@ local type, next, tonumber, tostring = type, next, tonumber, tostring
local abs = math.abs
local insert = table.insert
local lpegmatch = lpeg.match
-local reversed, concat, remove = table.reversed, table.concat, table.remove
+local reversed, concat, remove, sortedkeys = table.reversed, table.concat, table.remove, table.sortedkeys
local ioflush = io.flush
local fastcopy, tohash, derivetable = table.fastcopy, table.tohash, table.derive
local formatters = string.formatters
+local P, R, S, C, Ct, lpegmatch = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Ct, lpeg.match
+local setmetatableindex = table.setmetatableindex
local allocate = utilities.storage.allocate
local registertracker = trackers.register
local registerdirective = directives.register
@@ -33,13 +35,16 @@ local stoptiming = statistics.stoptiming
local elapsedtime = statistics.elapsedtime
local findbinfile = resolvers.findbinfile
-local trace_private = false registertracker("otf.private", function(v) trace_private = v end)
-local trace_loading = false registertracker("otf.loading", function(v) trace_loading = v end)
-local trace_features = false registertracker("otf.features", function(v) trace_features = v end)
-local trace_dynamics = false registertracker("otf.dynamics", function(v) trace_dynamics = v end)
-local trace_sequences = false registertracker("otf.sequences", function(v) trace_sequences = v end)
-local trace_markwidth = false registertracker("otf.markwidth", function(v) trace_markwidth = v end)
-local trace_defining = false registertracker("fonts.defining", function(v) trace_defining = v end)
+local trace_private = false registertracker("otf.private", function(v) trace_private = v end)
+local trace_loading = false registertracker("otf.loading", function(v) trace_loading = v end)
+local trace_features = false registertracker("otf.features", function(v) trace_features = v end)
+local trace_dynamics = false registertracker("otf.dynamics", function(v) trace_dynamics = v end)
+local trace_sequences = false registertracker("otf.sequences", function(v) trace_sequences = v end)
+local trace_markwidth = false registertracker("otf.markwidth", function(v) trace_markwidth = v end)
+local trace_defining = false registertracker("fonts.defining", function(v) trace_defining = v end)
+
+local compact_lookups = true registertracker("otf.compactlookups", function(v) compact_lookups = v end)
+local purge_names = true registertracker("otf.purgenames", function(v) purge_names = v end)
local report_otf = logs.reporter("fonts","otf loading")
@@ -48,13 +53,18 @@ local otf = fonts.handlers.otf
otf.glists = { "gsub", "gpos" }
-otf.version = 2.749 -- beware: also sync font-mis.lua
+otf.version = 2.803 -- beware: also sync font-mis.lua
otf.cache = containers.define("fonts", "otf", otf.version, true)
-local fontdata = fonts.hashes.identifiers
+local hashes = fonts.hashes
+local definers = fonts.definers
+local readers = fonts.readers
+local constructors = fonts.constructors
+
+local fontdata = hashes and hashes.identifiers
local chardata = characters and characters.data -- not used
-local otffeatures = fonts.constructors.newfeatures("otf")
+local otffeatures = constructors.newfeatures("otf")
local registerotffeature = otffeatures.register
local enhancers = allocate()
@@ -62,13 +72,8 @@ otf.enhancers = enhancers
local patches = { }
enhancers.patches = patches
-local definers = fonts.definers
-local readers = fonts.readers
-local constructors = fonts.constructors
-
local forceload = false
local cleanup = 0 -- mk: 0=885M 1=765M 2=735M (regular run 730M)
-local usemetatables = false -- .4 slower on mk but 30 M less mem so we might change the default -- will be directive
local packdata = true
local syncspace = true
local forcenotdef = false
@@ -80,7 +85,12 @@ local applyruntimefixes = fonts.treatments and fonts.treatments.applyfixes
local wildcard = "*"
local default = "dflt"
-local fontloaderfields = fontloader.fields
+local fontloader = fontloader
+local open_font = fontloader.open
+local close_font = fontloader.close
+local font_fields = fontloader.fields
+local apply_featurefile = fontloader.apply_featurefile
+
local mainfields = nil
local glyphfields = nil -- not used yet
@@ -93,11 +103,11 @@ formats.dfont = "truetype"
registerdirective("fonts.otf.loader.cleanup", function(v) cleanup = tonumber(v) or (v and 1) or 0 end)
registerdirective("fonts.otf.loader.force", function(v) forceload = v end)
-registerdirective("fonts.otf.loader.usemetatables", function(v) usemetatables = v end)
registerdirective("fonts.otf.loader.pack", function(v) packdata = v end)
registerdirective("fonts.otf.loader.syncspace", function(v) syncspace = v end)
registerdirective("fonts.otf.loader.forcenotdef", function(v) forcenotdef = v end)
registerdirective("fonts.otf.loader.overloadkerns", function(v) overloadkerns = v end)
+-----------------("fonts.otf.loader.alldimensions", function(v) alldimensions = v end)
function otf.fileformat(filename)
local leader = lower(io.loadchunk(filename,4))
@@ -106,6 +116,8 @@ function otf.fileformat(filename)
return formats.otf, suffix == "otf"
elseif leader == "ttcf" then
return formats.ttc, suffix == "ttc"
+ -- elseif leader == "true" then
+ -- return formats.ttf, suffix == "ttf"
elseif suffix == "ttc" then
return formats.ttc, true
elseif suffix == "dfont" then
@@ -132,7 +144,7 @@ local function load_featurefile(raw,featurefile)
if trace_loading then
report_otf("using featurefile %a", featurefile)
end
- fontloader.apply_featurefile(raw, featurefile)
+ apply_featurefile(raw, featurefile)
end
end
@@ -201,7 +213,6 @@ local valid_fields = table.tohash {
"extrema_bound",
"familyname",
"fontname",
- "fontname",
"fontstyle_id",
"fontstyle_name",
"fullname",
@@ -237,7 +248,7 @@ local valid_fields = table.tohash {
"upos",
"use_typo_metrics",
"uwidth",
- -- "validation_state",
+ "validation_state",
"version",
"vert_base",
"weight",
@@ -254,7 +265,7 @@ local ordered_enhancers = {
"analyze glyphs",
"analyze math",
- "prepare tounicode", -- maybe merge with prepare
+ -- "prepare tounicode",
"reorganize lookups",
"reorganize mark classes",
@@ -273,10 +284,15 @@ local ordered_enhancers = {
"check metadata",
"check extra features", -- after metadata
+ "prepare tounicode",
+
"check encoding", -- moved
"add duplicates",
"cleanup tables",
+
+ "compact lookups",
+ "purge names",
}
--[[ldx--
@@ -428,12 +444,12 @@ function otf.load(filename,sub,featurefile) -- second argument (format) is gone
report_otf("loading %a, hash %a",filename,hash)
local fontdata, messages
if sub then
- fontdata, messages = fontloader.open(filename,sub)
+ fontdata, messages = open_font(filename,sub)
else
- fontdata, messages = fontloader.open(filename)
+ fontdata, messages = open_font(filename)
end
if fontdata then
- mainfields = mainfields or (fontloaderfields and fontloaderfields(fontdata))
+ mainfields = mainfields or (font_fields and font_fields(fontdata))
end
if trace_loading and messages and #messages > 0 then
if type(messages) == "string" then
@@ -478,6 +494,8 @@ function otf.load(filename,sub,featurefile) -- second argument (format) is gone
lookuptypes = {
},
},
+ warnings = {
+ },
metadata = {
-- raw metadata, not to be used
},
@@ -490,7 +508,7 @@ function otf.load(filename,sub,featurefile) -- second argument (format) is gone
},
helpers = { -- might go away
tounicodelist = splitter,
- tounicodetable = lpeg.Ct(splitter),
+ tounicodetable = Ct(splitter),
},
}
starttiming(data)
@@ -515,7 +533,7 @@ function otf.load(filename,sub,featurefile) -- second argument (format) is gone
report_otf("preprocessing and caching time %s, packtime %s",
elapsedtime(data),packdata and elapsedtime(packtime) or 0)
end
- fontloader.close(fontdata) -- free memory
+ close_font(fontdata) -- free memory
if cleanup > 3 then
collectgarbage("collect")
end
@@ -533,6 +551,39 @@ function otf.load(filename,sub,featurefile) -- second argument (format) is gone
report_otf("loading from cache using hash %a",hash)
end
enhance("unpack",data,filename,nil,false)
+ --
+ local resources = data.resources
+ local lookuptags = resources.lookuptags
+ local unicodes = resources.unicodes
+ if not lookuptags then
+ lookuptags = { }
+ resources.lookuptags = lookuptags
+ end
+ setmetatableindex(lookuptags,function(t,k)
+ local v = type(k) == "number" and ("lookup " .. k) or k
+ t[k] = v
+ return v
+ end)
+ if not unicodes then
+ unicodes = { }
+ resources.unicodes = unicodes
+ setmetatableindex(unicodes,function(t,k)
+ -- use rawget when no table has to be built
+ setmetatableindex(unicodes,nil)
+ for u, d in next, data.descriptions do
+ local n = d.name
+ if n then
+ t[n] = u
+ -- report_otf("accessing known name %a",k)
+ else
+ -- report_otf("accessing unknown name %a",k)
+ end
+ end
+ return rawget(t,k)
+ end)
+ end
+ constructors.addcoreunicodes(unicodes) -- do we really need this?
+ --
if applyruntimefixes then
applyruntimefixes(filename,data)
end
@@ -574,31 +625,29 @@ actions["add dimensions"] = function(data,filename)
local defaultheight = resources.defaultheight or 0
local defaultdepth = resources.defaultdepth or 0
local basename = trace_markwidth and file.basename(filename)
- if usemetatables then
- for _, d in next, descriptions do
- local wd = d.width
- if not wd then
- d.width = defaultwidth
- elseif trace_markwidth and wd ~= 0 and d.class == "mark" then
- report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename)
- -- d.width = -wd
- end
- setmetatable(d,mt)
+ for _, d in next, descriptions do
+ local bb, wd = d.boundingbox, d.width
+ if not wd then
+ -- or bb?
+ d.width = defaultwidth
+ elseif trace_markwidth and wd ~= 0 and d.class == "mark" then
+ report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename)
+ -- d.width = -wd
end
- else
- for _, d in next, descriptions do
- local bb, wd = d.boundingbox, d.width
- if not wd then
- d.width = defaultwidth
- elseif trace_markwidth and wd ~= 0 and d.class == "mark" then
- report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename)
- -- d.width = -wd
- end
- -- if forcenotdef and not d.name then
- -- d.name = ".notdef"
- -- end
- if bb then
- local ht, dp = bb[4], -bb[2]
+ -- if forcenotdef and not d.name then
+ -- d.name = ".notdef"
+ -- end
+ if bb then
+ local ht = bb[4]
+ local dp = -bb[2]
+ -- if alldimensions then
+ -- if ht ~= 0 then
+ -- d.height = ht
+ -- end
+ -- if dp ~= 0 then
+ -- d.depth = dp
+ -- end
+ -- else
if ht == 0 or ht < 0 then
-- not set
else
@@ -609,7 +658,7 @@ actions["add dimensions"] = function(data,filename)
else
d.depth = dp
end
- end
+ -- end
end
end
end
@@ -681,21 +730,31 @@ actions["prepare glyphs"] = function(data,filename,raw)
if includesubfonts then
metadata.subfonts[cidindex] = somecopy(subfont)
end
+ -- we have delayed loading so we cannot use next
for index=0,subfont.glyphcnt-1 do -- we could take the previous glyphcnt instead of 0
local glyph = cidglyphs[index]
if glyph then
local unicode = glyph.unicode
- local name = glyph.name or cidnames[index]
- if not unicode or unicode == -1 or unicode >= criterium then
+ if unicode >= 0x00E000 and unicode <= 0x00F8FF then
+ unicode = -1
+ elseif unicode >= 0x0F0000 and unicode <= 0x0FFFFD then
+ unicode = -1
+ elseif unicode >= 0x100000 and unicode <= 0x10FFFD then
+ unicode = -1
+ end
+ local name = glyph.name or cidnames[index]
+ if not unicode or unicode == -1 then -- or unicode >= criterium then
unicode = cidunicodes[index]
end
if unicode and descriptions[unicode] then
- report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode)
+ if trace_private then
+ report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode)
+ end
unicode = -1
end
- if not unicode or unicode == -1 or unicode >= criterium then
+ if not unicode or unicode == -1 then -- or unicode >= criterium then
if not name then
- name = format("u%06X",private)
+ name = format("u%06X.ctx",private)
end
unicode = private
unicodes[name] = private
@@ -705,14 +764,25 @@ actions["prepare glyphs"] = function(data,filename,raw)
private = private + 1
nofnames = nofnames + 1
else
+ -- if unicode > criterium then
+ -- local taken = descriptions[unicode]
+ -- if taken then
+ -- private = private + 1
+ -- descriptions[private] = taken
+ -- unicodes[taken.name] = private
+ -- indices[taken.index] = private
+ -- if trace_private then
+ -- report_otf("slot %U is moved to %U due to private in font",unicode)
+ -- end
+ -- end
+ -- end
if not name then
- name = format("u%06X",unicode)
+ name = format("u%06X.ctx",unicode)
end
unicodes[name] = unicode
nofunicodes = nofunicodes + 1
end
indices[index] = unicode -- each index is unique (at least now)
-
local description = {
-- width = glyph.width,
boundingbox = glyph.boundingbox,
@@ -721,7 +791,6 @@ actions["prepare glyphs"] = function(data,filename,raw)
index = index,
glyph = glyph,
}
-
descriptions[unicode] = description
else
-- report_otf("potential problem: glyph %U is used but empty",index)
@@ -745,7 +814,7 @@ actions["prepare glyphs"] = function(data,filename,raw)
if glyph then
local unicode = glyph.unicode
local name = glyph.name
- if not unicode or unicode == -1 or unicode >= criterium then
+ if not unicode or unicode == -1 then -- or unicode >= criterium then
unicode = private
unicodes[name] = private
if trace_private then
@@ -753,12 +822,37 @@ actions["prepare glyphs"] = function(data,filename,raw)
end
private = private + 1
else
+ -- We have a font that uses and exposes the private area. As this is rather unreliable it's
+ -- advised no to trust slots here (better use glyphnames). Anyway, we need a double check:
+ -- we need to move already moved entries and we also need to bump the next private to after
+ -- the (currently) last slot. This could leave us with a hole but we have holes anyway.
+ if unicode > criterium then
+ -- \definedfont[file:HANBatang-LVT.ttf] \fontchar{uF0135} \char"F0135
+ local taken = descriptions[unicode]
+ if taken then
+ if unicode >= private then
+ private = unicode + 1 -- restart private (so we can have mixed now)
+ else
+ private = private + 1 -- move on
+ end
+ descriptions[private] = taken
+ unicodes[taken.name] = private
+ indices[taken.index] = private
+ if trace_private then
+ report_otf("slot %U is moved to %U due to private in font",unicode)
+ end
+ else
+ if unicode >= private then
+ private = unicode + 1 -- restart (so we can have mixed now)
+ end
+ end
+ end
unicodes[name] = unicode
end
indices[index] = unicode
- if not name then
- name = format("u%06X",unicode)
- end
+ -- if not name then
+ -- name = format("u%06X",unicode) -- u%06X.ctx
+ -- end
descriptions[unicode] = {
-- width = glyph.width,
boundingbox = glyph.boundingbox,
@@ -768,7 +862,7 @@ actions["prepare glyphs"] = function(data,filename,raw)
}
local altuni = glyph.altuni
if altuni then
- local d
+ -- local d
for i=1,#altuni do
local a = altuni[i]
local u = a.unicode
@@ -783,15 +877,15 @@ actions["prepare glyphs"] = function(data,filename,raw)
vv = { [u] = unicode }
variants[v] = vv
end
- elseif d then
- d[#d+1] = u
- else
- d = { u }
+ -- elseif d then
+ -- d[#d+1] = u
+ -- else
+ -- d = { u }
end
end
- if d then
- duplicates[unicode] = d
- end
+ -- if d then
+ -- duplicates[unicode] = d -- is this needed ?
+ -- end
end
else
report_otf("potential problem: glyph %U is used but empty",index)
@@ -807,6 +901,10 @@ end
-- the next one is still messy but will get better when we have
-- flattened map/enc tables in the font loader
+-- the next one is not using a valid base for unicode privates
+--
+-- PsuedoEncodeUnencoded(EncMap *map,struct ttfinfo *info)
+
actions["check encoding"] = function(data,filename,raw)
local descriptions = data.descriptions
local resources = data.resources
@@ -819,9 +917,11 @@ actions["check encoding"] = function(data,filename,raw)
local mapdata = raw.map or { }
local unicodetoindex = mapdata and mapdata.map or { }
+ local indextounicode = mapdata and mapdata.backmap or { }
-- local encname = lower(data.enc_name or raw.enc_name or mapdata.enc_name or "")
local encname = lower(data.enc_name or mapdata.enc_name or "")
local criterium = 0xFFFF -- for instance cambria has a lot of mess up there
+ local privateoffset = constructors.privateoffset
-- end of messy
@@ -829,42 +929,44 @@ actions["check encoding"] = function(data,filename,raw)
if trace_loading then
report_otf("checking embedded unicode map %a",encname)
end
- for unicode, index in next, unicodetoindex do -- altuni already covers this
- if unicode <= criterium and not descriptions[unicode] then
- local parent = indices[index] -- why nil?
- if not parent then
- report_otf("weird, unicode %U points to nowhere with index %H",unicode,index)
+ local reported = { }
+ -- we loop over the original unicode->index mapping but we
+ -- need to keep in mind that that one can have weird entries
+ -- so we need some extra checking
+ for maybeunicode, index in next, unicodetoindex do
+ if descriptions[maybeunicode] then
+ -- we ignore invalid unicodes (unicode = -1) (ff can map wrong to non private)
+ else
+ local unicode = indices[index]
+ if not unicode then
+ -- weird (cjk or so?)
+ elseif maybeunicode == unicode then
+ -- no need to add
+ elseif unicode > privateoffset then
+ -- we have a non-unicode
else
- local parentdescription = descriptions[parent]
- if parentdescription then
- local altuni = parentdescription.altuni
- if not altuni then
- altuni = { { unicode = unicode } }
- parentdescription.altuni = altuni
- duplicates[parent] = { unicode }
+ local d = descriptions[unicode]
+ if d then
+ local c = d.copies
+ if c then
+ c[maybeunicode] = true
else
- local done = false
- for i=1,#altuni do
- if altuni[i].unicode == unicode then
- done = true
- break
- end
- end
- if not done then
- -- let's assume simple cjk reuse
- insert(altuni,{ unicode = unicode })
- insert(duplicates[parent],unicode)
- end
+ d.copies = { [maybeunicode] = true }
end
- if trace_loading then
- report_otf("weird, unicode %U points to nowhere with index %H",unicode,index)
- end
- else
- report_otf("weird, unicode %U points to %U with index %H",unicode,index)
+ elseif index and not reported[index] then
+ report_otf("missing index %i",index)
+ reported[index] = true
end
end
end
end
+ for unicode, data in next, descriptions do
+ local d = data.copies
+ if d then
+ duplicates[unicode] = sortedkeys(d)
+ data.copies = nil
+ end
+ end
elseif properties.cidinfo then
report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname)
else
@@ -872,12 +974,15 @@ actions["check encoding"] = function(data,filename,raw)
end
if mapdata then
- mapdata.map = { } -- clear some memory
+ mapdata.map = { } -- clear some memory
+ mapdata.backmap = { } -- clear some memory
end
end
-- for the moment we assume that a font with lookups will not use
--- altuni so we stick to kerns only
+-- altuni so we stick to kerns only .. alternatively we can always
+-- do an indirect lookup uni_to_uni . but then we need that in
+-- all lookups
actions["add duplicates"] = function(data,filename,raw)
local descriptions = data.descriptions
@@ -888,31 +993,43 @@ actions["add duplicates"] = function(data,filename,raw)
local duplicates = resources.duplicates
for unicode, d in next, duplicates do
- for i=1,#d do
- local u = d[i]
- if not descriptions[u] then
- local description = descriptions[unicode]
- local duplicate = table.copy(description) -- else packing problem
- duplicate.comment = format("copy of U+%05X", unicode)
- descriptions[u] = duplicate
- local n = 0
- for _, description in next, descriptions do
- if kerns then
+ local nofduplicates = #d
+ if nofduplicates > 4 then
+ if trace_loading then
+ report_otf("ignoring excessive duplicates of %U (n=%s)",unicode,nofduplicates)
+ end
+ else
+ -- local validduplicates = { }
+ for i=1,nofduplicates do
+ local u = d[i]
+ if not descriptions[u] then
+ local description = descriptions[unicode]
+ local n = 0
+ for _, description in next, descriptions do
local kerns = description.kerns
- for _, k in next, kerns do
- local ku = k[unicode]
- if ku then
- k[u] = ku
- n = n + 1
+ if kerns then
+ for _, k in next, kerns do
+ local ku = k[unicode]
+ if ku then
+ k[u] = ku
+ n = n + 1
+ end
end
end
+ -- todo: lookups etc
+ end
+ if u > 0 then -- and
+ local duplicate = table.copy(description) -- else packing problem
+ duplicate.comment = format("copy of U+%05X", unicode)
+ descriptions[u] = duplicate
+ -- validduplicates[#validduplicates+1] = u
+ if trace_loading then
+ report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
+ end
end
- -- todo: lookups etc
- end
- if trace_loading then
- report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
end
end
+ -- duplicates[unicode] = #validduplicates > 0 and validduplicates or nil
end
end
end
@@ -1143,10 +1260,16 @@ actions["reorganize subtables"] = function(data,filename,raw)
elseif features then
-- scripts, tag, ismac
local f = { }
+ local o = { }
for i=1,#features do
local df = features[i]
local tag = strip(lower(df.tag))
- local ft = f[tag] if not ft then ft = {} f[tag] = ft end
+ local ft = f[tag]
+ if not ft then
+ ft = { }
+ f[tag] = ft
+ o[#o+1] = tag
+ end
local dscripts = df.scripts
for i=1,#dscripts do
local d = dscripts[i]
@@ -1166,6 +1289,7 @@ actions["reorganize subtables"] = function(data,filename,raw)
subtables = subtables,
markclass = markclass,
features = f,
+ order = o,
}
else
lookups[name] = {
@@ -1221,9 +1345,9 @@ local function s_uncover(splitter,cache,cover)
local uncovered = cache[cover]
if not uncovered then
uncovered = lpegmatch(splitter,cover)
--- for i=1,#uncovered do
--- uncovered[i] = { [uncovered[i]] = true }
--- end
+ -- for i=1,#uncovered do
+ -- uncovered[i] = { [uncovered[i]] = true }
+ -- end
cache[cover] = uncovered
end
return { uncovered }
@@ -1237,9 +1361,14 @@ local function t_hashed(t,cache)
local ti = t[i]
local tih = cache[ti]
if not tih then
- tih = { }
- for i=1,#ti do
- tih[ti[i]] = true
+ local tn = #ti
+ if tn == 1 then
+ tih = { [ti[1]] = true }
+ else
+ tih = { }
+ for i=1,tn do
+ tih[ti[i]] = true
+ end
end
cache[ti] = tih
end
@@ -1255,12 +1384,17 @@ end
local function s_hashed(t,cache)
if t then
- local ht = { }
local tf = t[1]
- for i=1,#tf do
- ht[i] = { [tf[i]] = true }
+ local nf = #tf
+ if nf == 1 then
+ return { [tf[1]] = true }
+ else
+ local ht = { }
+ for i=1,nf do
+ ht[i] = { [tf[i]] = true }
+ end
+ return ht
end
- return ht
else
return nil
end
@@ -1421,12 +1555,12 @@ actions["reorganize lookups"] = function(data,filename,raw) -- we could check fo
local fore = glyphs.fore
if fore and fore ~= "" then
fore = s_uncover(splitter,s_u_cache,fore)
- rule.before = s_hashed(fore,s_h_cache)
+ rule.after = s_hashed(fore,s_h_cache)
end
local back = glyphs.back
if back then
back = s_uncover(splitter,s_u_cache,back)
- rule.after = s_hashed(back,s_h_cache)
+ rule.before = s_hashed(back,s_h_cache)
end
local names = glyphs.names
if names then
@@ -1434,6 +1568,14 @@ actions["reorganize lookups"] = function(data,filename,raw) -- we could check fo
rule.current = s_hashed(names,s_h_cache)
end
rule.glyphs = nil
+ local lookups = rule.lookups
+ if lookups then
+ for i=1,#names do
+ if not lookups[i] then
+ lookups[i] = "" -- fix sparse array
+ end
+ end
+ end
end
end
end
@@ -1451,7 +1593,9 @@ local function check_variants(unicode,the_variants,splitter,unicodes)
for i=1,#glyphs do
local g = glyphs[i]
if done[g] then
- report_otf("skipping cyclic reference %U in math variant %U",g,unicode)
+ if i > 1 then
+ report_otf("skipping cyclic reference %U in math variant %U",g,unicode)
+ end
else
if n == 0 then
n = 1
@@ -1597,6 +1741,8 @@ actions["merge kern classes"] = function(data,filename,raw)
local lookup = subtable.lookup or subtable.name
if kernclass then -- the next one is quite slow
if #kernclass > 0 then
+ -- it's a table with one entry .. a future luatex can just
+ -- omit that level
kernclass = kernclass[1]
lookup = type(kernclass.lookup) == "string" and kernclass.lookup or lookup
report_otf("fixing kernclass table of lookup %a",lookup)
@@ -1701,6 +1847,12 @@ end
-- future versions will remove _
+local valid = (R("\x00\x7E") - S("(){}[]<>%/ \n\r\f\v"))^0 * P(-1)
+
+local function valid_ps_name(str)
+ return str and str ~= "" and #str < 64 and lpegmatch(valid,str) and true or false
+end
+
actions["check metadata"] = function(data,filename,raw)
local metadata = data.metadata
for _, k in next, mainfields do
@@ -1718,11 +1870,56 @@ actions["check metadata"] = function(data,filename,raw)
ttftables[i].data = "deleted"
end
end
+ --
+ if metadata.validation_state and table.contains(metadata.validation_state,"bad_ps_fontname") then
+ -- the ff library does a bit too much (and wrong) checking ... so we need to catch this
+ -- at least for now
+ local function valid(what)
+ local names = raw.names
+ for i=1,#names do
+ local list = names[i]
+ local names = list.names
+ if names then
+ local name = names[what]
+ if name and valid_ps_name(name) then
+ return name
+ end
+ end
+ end
+ end
+ local function check(what)
+ local oldname = metadata[what]
+ if valid_ps_name(oldname) then
+ report_otf("ignoring warning %a because %s %a is proper ASCII","bad_ps_fontname",what,oldname)
+ else
+ local newname = valid(what)
+ if not newname then
+ newname = formatters["bad-%s-%s"](what,file.nameonly(filename))
+ end
+ local warning = formatters["overloading %s from invalid ASCII name %a to %a"](what,oldname,newname)
+ data.warnings[#data.warnings+1] = warning
+ report_otf(warning)
+ metadata[what] = newname
+ end
+ end
+ check("fontname")
+ check("fullname")
+ end
+ --
end
actions["cleanup tables"] = function(data,filename,raw)
- data.resources.indices = nil -- not needed
- data.helpers = nil
+ local duplicates = data.resources.duplicates
+ if duplicates then
+ for k, v in next, duplicates do
+ if #v == 1 then
+ duplicates[k] = v[1]
+ end
+ end
+ end
+ data.resources.indices = nil -- not needed
+ data.resources.unicodes = nil -- delayed
+ data.helpers = nil -- tricky as we have no unicodes any more
end
-- kern: ttf has a table with kerns
@@ -1820,6 +2017,8 @@ actions["reorganize glyph lookups"] = function(data,filename,raw)
end
+local zero = { 0, 0 }
+
actions["reorganize glyph anchors"] = function(data,filename,raw) -- when we replace inplace we safe entries
local descriptions = data.descriptions
for unicode, description in next, descriptions do
@@ -1828,14 +2027,37 @@ actions["reorganize glyph anchors"] = function(data,filename,raw) -- when we rep
for class, data in next, anchors do
if class == "baselig" then
for tag, specification in next, data do
- for i=1,#specification do
- local si = specification[i]
- specification[i] = { si.x or 0, si.y or 0 }
+ -- for i=1,#specification do
+ -- local si = specification[i]
+ -- specification[i] = { si.x or 0, si.y or 0 }
+ -- end
+ -- can be sparse so we need to fill the holes
+ local n = 0
+ for k, v in next, specification do
+ if k > n then
+ n = k
+ end
+ local x, y = v.x, v.y
+ if x or y then
+ specification[k] = { x or 0, y or 0 }
+ else
+ specification[k] = zero
+ end
+ end
+ local t = { }
+ for i=1,n do
+ t[i] = specification[i] or zero
end
+ data[tag] = t -- so # is okay (nicer for packer)
end
else
for tag, specification in next, data do
- data[tag] = { specification.x or 0, specification.y or 0 }
+ local x, y = specification.x, specification.y
+ if x or y then
+ data[tag] = { x or 0, y or 0 }
+ else
+ data[tag] = zero
+ end
end
end
end
@@ -1844,6 +2066,164 @@ actions["reorganize glyph anchors"] = function(data,filename,raw) -- when we rep
end
end
+local bogusname = (P("uni") + P("u")) * R("AF","09")^4
+ + (P("index") + P("glyph") + S("Ii") * P("dentity") * P(".")^0) * R("09")^1
+local uselessname = (1-bogusname)^0 * bogusname
+
+actions["purge names"] = function(data,filename,raw) -- not used yet
+ if purge_names then
+ local n = 0
+ for u, d in next, data.descriptions do
+ if lpegmatch(uselessname,d.name) then
+ n = n + 1
+ d.name = nil
+ end
+ -- d.comment = nil
+ end
+ if n > 0 then
+ report_otf("%s bogus names removed",n)
+ end
+ end
+end
+
+actions["compact lookups"] = function(data,filename,raw)
+ if not compact_lookups then
+ report_otf("not compacting")
+ return
+ end
+ -- create keyhash
+ local last = 0
+ local tags = table.setmetatableindex({ },
+ function(t,k)
+ last = last + 1
+ t[k] = last
+ return last
+ end
+ )
+ --
+ local descriptions = data.descriptions
+ local resources = data.resources
+ --
+ for u, d in next, descriptions do
+ --
+ -- -- we can also compact anchors and cursives (basechar basemark baselig mark)
+ --
+ local slookups = d.slookups
+ if type(slookups) == "table" then
+ local s = { }
+ for k, v in next, slookups do
+ s[tags[k]] = v
+ end
+ d.slookups = s
+ end
+ --
+ local mlookups = d.mlookups
+ if type(mlookups) == "table" then
+ local m = { }
+ for k, v in next, mlookups do
+ m[tags[k]] = v
+ end
+ d.mlookups = m
+ end
+ --
+ local kerns = d.kerns
+ if type(kerns) == "table" then
+ local t = { }
+ for k, v in next, kerns do
+ t[tags[k]] = v
+ end
+ d.kerns = t
+ end
+ end
+ --
+ local lookups = data.lookups
+ if lookups then
+ local l = { }
+ for k, v in next, lookups do
+ local rules = v.rules
+ if rules then
+ for i=1,#rules do
+ local l = rules[i].lookups
+ if type(l) == "table" then
+ for i=1,#l do
+ l[i] = tags[l[i]]
+ end
+ end
+ end
+ end
+ l[tags[k]] = v
+ end
+ data.lookups = l
+ end
+ --
+ local lookups = resources.lookups
+ if lookups then
+ local l = { }
+ for k, v in next, lookups do
+ local s = v.subtables
+ if type(s) == "table" then
+ for i=1,#s do
+ s[i] = tags[s[i]]
+ end
+ end
+ l[tags[k]] = v
+ end
+ resources.lookups = l
+ end
+ --
+ local sequences = resources.sequences
+ if sequences then
+ for i=1,#sequences do
+ local s = sequences[i]
+ local n = s.name
+ if n then
+ s.name = tags[n]
+ end
+ local t = s.subtables
+ if type(t) == "table" then
+ for i=1,#t do
+ t[i] = tags[t[i]]
+ end
+ end
+ end
+ end
+ --
+ local lookuptypes = resources.lookuptypes
+ if lookuptypes then
+ local l = { }
+ for k, v in next, lookuptypes do
+ l[tags[k]] = v
+ end
+ resources.lookuptypes = l
+ end
+ --
+ local anchor_to_lookup = resources.anchor_to_lookup
+ if anchor_to_lookup then
+ for anchor, lookups in next, anchor_to_lookup do
+ local l = { }
+ for lookup, value in next, lookups do
+ l[tags[lookup]] = value
+ end
+ anchor_to_lookup[anchor] = l
+ end
+ end
+ --
+ local lookup_to_anchor = resources.lookup_to_anchor
+ if lookup_to_anchor then
+ local l = { }
+ for lookup, value in next, lookup_to_anchor do
+ l[tags[lookup]] = value
+ end
+ resources.lookup_to_anchor = l
+ end
+ --
+ tags = table.swapped(tags)
+ --
+ report_otf("%s lookup tags compacted",#tags)
+ --
+ resources.lookuptags = tags
+end
+
-- modes: node, base, none
function otf.setfeatures(tfmdata,features)
@@ -1869,6 +2249,7 @@ end
local function copytotfm(data,cache_id)
if data then
local metadata = data.metadata
+ local warnings = data.warnings
local resources = data.resources
local properties = derivetable(data.properties)
local descriptions = derivetable(data.descriptions)
@@ -1963,6 +2344,7 @@ local function copytotfm(data,cache_id)
local filename = constructors.checkedfilename(resources)
local fontname = metadata.fontname
local fullname = metadata.fullname or fontname
+ local psname = fontname or fullname
local units = metadata.units_per_em or 1000
--
if units == 0 then -- catch bugs in fonts
@@ -1982,8 +2364,8 @@ local function copytotfm(data,cache_id)
parameters.charwidth = charwidth
parameters.charxheight = charxheight
--
- local space = 0x0020 -- unicodes['space'], unicodes['emdash']
- local emdash = 0x2014 -- unicodes['space'], unicodes['emdash']
+ local space = 0x0020
+ local emdash = 0x2014
if monospaced then
if descriptions[space] then
spaceunits, spacer = descriptions[space].width, "space"
@@ -2032,7 +2414,7 @@ local function copytotfm(data,cache_id)
if charxheight then
parameters.x_height = charxheight
else
- local x = 0x78 -- unicodes['x']
+ local x = 0x0078
if x then
local x = descriptions[x]
if x then
@@ -2056,11 +2438,20 @@ local function copytotfm(data,cache_id)
properties.filename = filename
properties.fontname = fontname
properties.fullname = fullname
- properties.psname = fontname or fullname
+ properties.psname = psname
properties.name = filename or fullname
--
-- properties.name = specification.name
-- properties.sub = specification.sub
+ --
+ if warnings and #warnings > 0 then
+ report_otf("warnings for font: %s",filename)
+ report_otf()
+ for i=1,#warnings do
+ report_otf(" %s",warnings[i])
+ end
+ report_otf()
+ end
return {
characters = characters,
descriptions = descriptions,
@@ -2069,6 +2460,7 @@ local function copytotfm(data,cache_id)
resources = resources,
properties = properties,
goodies = goodies,
+ warnings = warnings,
}
end
end
@@ -2084,6 +2476,33 @@ local function otftotfm(specification)
local features = specification.features.normal
local rawdata = otf.load(filename,sub,features and features.featurefile)
if rawdata and next(rawdata) then
+ local descriptions = rawdata.descriptions
+ local duplicates = rawdata.resources.duplicates
+ if duplicates then
+ local nofduplicates, nofduplicated = 0, 0
+ for parent, list in next, duplicates do
+ if type(list) == "table" then
+ local n = #list
+ for i=1,n do
+ local unicode = list[i]
+ if not descriptions[unicode] then
+ descriptions[unicode] = descriptions[parent] -- or copy
+ nofduplicated = nofduplicated + 1
+ end
+ end
+ nofduplicates = nofduplicates + n
+ else
+ if not descriptions[list] then
+ descriptions[list] = descriptions[parent] -- or copy
+ nofduplicated = nofduplicated + 1
+ end
+ nofduplicates = nofduplicates + 1
+ end
+ end
+ if trace_otf and nofduplicated ~= nofduplicates then
+ report_otf("%i extra duplicates copied out of %i",nofduplicated,nofduplicates)
+ end
+ end
rawdata.lookuphash = { }
tfmdata = copytotfm(rawdata,cache_id)
if tfmdata and next(tfmdata) then
diff --git a/tex/context/base/font-otn.lua b/tex/context/base/font-otn.lua
index c57be5f02..b1bd45304 100644
--- a/tex/context/base/font-otn.lua
+++ b/tex/context/base/font-otn.lua
@@ -6,8 +6,17 @@ if not modules then modules = { } end modules ['font-otn'] = {
license = "see context related readme files",
}
+-- todo: looks like we have a leak somewhere (probably in ligatures)
+-- todo: copy attributes to disc
+
+-- this is a context version which can contain experimental code, but when we
+-- have serious patches we also need to change the other two font-otn files
+
-- preprocessors = { "nodes" }
+-- anchor class : mark, mkmk, curs, mklg (todo)
+-- anchor type : mark, basechar, baselig, basemark, centry, cexit, max (todo)
+
-- this is still somewhat preliminary and it will get better in due time;
-- much functionality could only be implemented thanks to the husayni font
-- of Idris Samawi Hamid to who we dedicate this module.
@@ -20,7 +29,6 @@ if not modules then modules = { } end modules ['font-otn'] = {
-- todo:
--
--- kerning is probably not yet ok for latin around dics nodes (interesting challenge)
-- extension infrastructure (for usage out of context)
-- sorting features according to vendors/renderers
-- alternative loop quitters
@@ -32,7 +40,18 @@ if not modules then modules = { } end modules ['font-otn'] = {
-- mark (to mark) code is still not what it should be (too messy but we need some more extreem husayni tests)
-- remove some optimizations (when I have a faster machine)
--
--- maybe redo the lot some way (more context specific)
+-- beware:
+--
+-- we do some disc jugling where we need to keep in mind that the
+-- pre, post and replace fields can have prev pointers to a nesting
+-- node ... i wonder if that is still needed
+--
+-- not possible:
+--
+-- \discretionary {alpha-} {betagammadelta}
+-- {\discretionary {alphabeta-} {gammadelta}
+-- {\discretionary {alphabetagamma-} {delta}
+-- {alphabetagammadelta}}}
--[[ldx--
<p>This module is a bit more split up that I'd like but since we also want to test
@@ -115,7 +134,6 @@ results in different tables.</p>
-- chainmore : multiple substitutions triggered by contextual lookup (e.g. fij -> f + ij)
--
-- remark: the 'not implemented yet' variants will be done when we have fonts that use them
--- remark: we need to check what to do with discretionaries
-- We used to have independent hashes for lookups but as the tags are unique
-- we now use only one hash. If needed we can have multiple again but in that
@@ -123,16 +141,14 @@ results in different tables.</p>
-- Todo: make plugin feature that operates on char/glyphnode arrays
-local concat, insert, remove = table.concat, table.insert, table.remove
-local gmatch, gsub, find, match, lower, strip = string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
-local type, next, tonumber, tostring = type, next, tonumber, tostring
-local lpegmatch = lpeg.match
+local type, next, tonumber = type, next, tonumber
local random = math.random
local formatters = string.formatters
local logs, trackers, nodes, attributes = logs, trackers, nodes, attributes
-local registertracker = trackers.register
+local registertracker = trackers.register
+local registerdirective = directives.register
local fonts = fonts
local otf = fonts.handlers.otf
@@ -154,12 +170,25 @@ local trace_steps = false registertracker("otf.steps", function(v
local trace_skips = false registertracker("otf.skips", function(v) trace_skips = v end)
local trace_directions = false registertracker("otf.directions", function(v) trace_directions = v end)
+local trace_kernruns = false registertracker("otf.kernruns", function(v) trace_kernruns = v end)
+local trace_discruns = false registertracker("otf.discruns", function(v) trace_discruns = v end)
+local trace_compruns = false registertracker("otf.compruns", function(v) trace_compruns = v end)
+
+local quit_on_no_replacement = true -- maybe per font
+local check_discretionaries = true -- "trace"
+local zwnjruns = true
+
+registerdirective("otf.zwnjruns", function(v) zwnjruns = v end)
+registerdirective("otf.chain.quitonnoreplacement",function(value) quit_on_no_replacement = value end)
+
+
local report_direct = logs.reporter("fonts","otf direct")
local report_subchain = logs.reporter("fonts","otf subchain")
local report_chain = logs.reporter("fonts","otf chain")
local report_process = logs.reporter("fonts","otf process")
local report_prepare = logs.reporter("fonts","otf prepare")
local report_warning = logs.reporter("fonts","otf warning")
+local report_run = logs.reporter("fonts","otf run")
registertracker("otf.verbose_chain", function(v) otf.setcontextchain(v and "verbose") end)
registertracker("otf.normal_chain", function(v) otf.setcontextchain(v and "normal") end)
@@ -171,12 +200,35 @@ registertracker("otf.injections","nodes.injections")
registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing")
-local insert_node_after = node.insert_after
-local delete_node = nodes.delete
-local copy_node = node.copy
-local find_node_tail = node.tail or node.slide
-local flush_node_list = node.flush_list
-local end_of_math = node.end_of_math
+local nuts = nodes.nuts
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getprop = nuts.getprop
+local setprop = nuts.setprop
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local delete_node = nuts.delete
+local remove_node = nuts.remove
+local copy_node = nuts.copy
+local copy_node_list = nuts.copy_list
+local find_node_tail = nuts.tail
+local flush_node_list = nuts.flush_list
+local free_node = nuts.free
+local end_of_math = nuts.end_of_math
+local traverse_nodes = nuts.traverse
+local traverse_id = nuts.traverse_id
local setmetatableindex = table.setmetatableindex
@@ -200,36 +252,33 @@ local dir_code = whatcodes.dir
local localpar_code = whatcodes.localpar
local discretionary_code = disccodes.discretionary
+local regular_code = disccodes.regular
+local automatic_code = disccodes.automatic
local ligature_code = glyphcodes.ligature
local privateattribute = attributes.private
-- Something is messed up: we have two mark / ligature indices, one at the injection
--- end and one here ... this is bases in KE's patches but there is something fishy
+-- end and one here ... this is based on KE's patches but there is something fishy
-- there as I'm pretty sure that for husayni we need some connection (as it's much
-- more complex than an average font) but I need proper examples of all cases, not
-- of only some.
local a_state = privateattribute('state')
-local a_markbase = privateattribute('markbase')
-local a_markmark = privateattribute('markmark')
-local a_markdone = privateattribute('markdone') -- assigned at the injection end
-local a_cursbase = privateattribute('cursbase')
-local a_curscurs = privateattribute('curscurs')
-local a_cursdone = privateattribute('cursdone')
-local a_kernpair = privateattribute('kernpair')
-local a_ligacomp = privateattribute('ligacomp') -- assigned here (ideally it should be combined)
+local a_cursbase = privateattribute('cursbase') -- to be checked, probably can go
local injections = nodes.injections
local setmark = injections.setmark
local setcursive = injections.setcursive
local setkern = injections.setkern
local setpair = injections.setpair
+local resetinjection = injections.reset
+local copyinjection = injections.copy
+local setligaindex = injections.setligaindex
+local getligaindex = injections.getligaindex
-local markonce = true
local cursonce = true
-local kernonce = true
local fonthashes = fonts.hashes
local fontdata = fonthashes.identifiers
@@ -252,6 +301,7 @@ local currentfont = false
local lookuptable = false
local anchorlookups = false
local lookuptypes = false
+local lookuptags = false
local handlers = { }
local rlmode = 0
local featurevalue = false
@@ -306,20 +356,20 @@ end
local function cref(kind,chainname,chainlookupname,lookupname,index) -- not in the mood to alias f_
if index then
- return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookupname,index)
+ return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookuptags[lookupname],index)
elseif lookupname then
- return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookupname)
+ return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookuptags[lookupname])
elseif chainlookupname then
- return formatters["feature %a, chain %a, sub %a"](kind,chainname,chainlookupname)
+ return formatters["feature %a, chain %a, sub %a"](kind,lookuptags[chainname],lookuptags[chainlookupname])
elseif chainname then
- return formatters["feature %a, chain %a"](kind,chainname)
+ return formatters["feature %a, chain %a"](kind,lookuptags[chainname])
else
return formatters["feature %a"](kind)
end
end
local function pref(kind,lookupname)
- return formatters["feature %a, lookup %a"](kind,lookupname)
+ return formatters["feature %a, lookup %a"](kind,lookuptags[lookupname])
end
-- We can assume that languages that use marks are not hyphenated. We can also assume
@@ -332,42 +382,93 @@ end
-- and indices.
local function copy_glyph(g) -- next and prev are untouched !
- local components = g.components
+ local components = getfield(g,"components")
if components then
- g.components = nil
+ setfield(g,"components",nil)
local n = copy_node(g)
- g.components = components
+ copyinjection(n,g) -- we need to preserve the lig indices
+ setfield(g,"components",components)
return n
else
- return copy_node(g)
+ local n = copy_node(g)
+ copyinjection(n,g) -- we need to preserve the lig indices
+ return n
+ end
+end
+
+-- temp here (context)
+
+local function collapse_disc(start,next)
+ local replace1 = getfield(start,"replace")
+ local replace2 = getfield(next,"replace")
+ if replace1 and replace2 then
+ local pre2 = getfield(next,"pre")
+ local post2 = getfield(next,"post")
+ setfield(replace1,"prev",nil)
+ if pre2 then
+ local pre1 = getfield(start,"pre")
+ if pre1 then
+ flush_node_list(pre1)
+ end
+ local pre1 = copy_node_list(replace1)
+ local tail1 = find_node_tail(pre1)
+ setfield(tail1,"next",pre2)
+ setfield(pre2,"prev",tail1)
+ setfield(start,"pre",pre1)
+ setfield(next,"pre",nil)
+ else
+ setfield(start,"pre",nil)
+ end
+ if post2 then
+ local post1 = getfield(start,"post")
+ if post1 then
+ flush_node_list(post1)
+ end
+ setfield(start,"post",post2)
+ else
+ setfield(start,"post",nil)
+ end
+ local tail1 = find_node_tail(replace1)
+ setfield(tail1,"next",replace2)
+ setfield(replace2,"prev",tail1)
+ setfield(start,"replace",replace1)
+ setfield(next,"replace",nil)
+ --
+ local nextnext = getnext(next)
+ setfield(nextnext,"prev",start)
+ setfield(start,"next",nextnext)
+ free_node(next)
+ else
+ -- maybe remove it
end
end
-- start is a mark and we need to keep that one
local function markstoligature(kind,lookupname,head,start,stop,char)
- if start == stop and start.char == char then
+ if start == stop and getchar(start) == char then
return head, start
else
- local prev = start.prev
- local next = stop.next
- start.prev = nil
- stop.next = nil
+ local prev = getprev(start)
+ local next = getnext(stop)
+ setfield(start,"prev",nil)
+ setfield(stop,"next",nil)
local base = copy_glyph(start)
if head == start then
head = base
end
- base.char = char
- base.subtype = ligature_code
- base.components = start
+ resetinjection(base)
+ setfield(base,"char",char)
+ setfield(base,"subtype",ligature_code)
+ setfield(base,"components",start)
if prev then
- prev.next = base
+ setfield(prev,"next",base)
end
if next then
- next.prev = base
+ setfield(next,"prev",base)
end
- base.next = next
- base.prev = prev
+ setfield(base,"next",next)
+ setfield(base,"prev",prev)
return head, base
end
end
@@ -380,49 +481,124 @@ end
-- third component.
local function getcomponentindex(start)
- if start.id ~= glyph_code then
+ if getid(start) ~= glyph_code then
return 0
- elseif start.subtype == ligature_code then
+ elseif getsubtype(start) == ligature_code then
local i = 0
- local components = start.components
+ local components = getfield(start,"components")
while components do
i = i + getcomponentindex(components)
- components = components.next
+ components = getnext(components)
end
return i
- elseif not marks[start.char] then
+ elseif not marks[getchar(start)] then
return 1
else
return 0
end
end
--- eventually we will do positioning in an other way (needs addional w/h/d fields)
+local a_noligature = attributes.private("noligature")
+local prehyphenchar = languages and languages.prehyphenchar
+local posthyphenchar = languages and languages.posthyphenchar
+----- preexhyphenchar = languages and languages.preexhyphenchar
+----- postexhyphenchar = languages and languages.postexhyphenchar
+
+if prehyphenchar then
+
+ -- okay
+
+elseif context then
+
+ report_warning("no language support") os.exit()
+
+else
+
+ local newlang = lang.new
+ local getpre = lang.prehyphenchar
+ local getpost = lang.posthyphenchar
+ -- local getpreex = lang.preexhyphenchar
+ -- local getpostex = lang.postexhyphenchar
+
+ prehyphenchar = function(l) local l = newlang(l) return l and getpre (l) or -1 end
+ posthyphenchar = function(l) local l = newlang(l) return l and getpost (l) or -1 end
+ -- preexhyphenchar = function(l) local l = newlang(l) return l and getpreex (l) or -1 end
+ -- postexhyphenchar = function(l) local l = newlang(l) return l and getpostex(l) or -1 end
+
+end
+
+local function addhyphens(template,pre,post)
+ -- inserted by hyphenation algorithm
+ local l = getfield(template,"lang")
+ local p = prehyphenchar(l)
+ if p and p > 0 then
+ local c = copy_node(template)
+ setfield(c,"char",p)
+ if pre then
+ local t = find_node_tail(pre)
+ setfield(t,"next",c)
+ setfield(c,"prev",t)
+ else
+ pre = c
+ end
+ end
+ local p = posthyphenchar(l)
+ if p and p > 0 then
+ local c = copy_node(template)
+ setfield(c,"char",p)
+ if post then
+ -- post has a prev nesting node .. alternatively we could
+ local prev = getprev(post)
+ setfield(c,"next",post)
+ setfield(post,"prev",c)
+ if prev then
+ setfield(prev,"next",c)
+ setfield(c,"prev",prev)
+ end
+ else
+ post = c
+ end
+ end
+ return pre, post
+end
local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) -- brr head
- if start == stop and start.char == char then
- start.char = char
+ if getattr(start,a_noligature) == 1 then
+ -- so we can do: e\noligature{ff}e e\noligature{f}fie (we only look at the first)
+ return head, start
+ end
+ if start == stop and getchar(start) == char then
+ resetinjection(start)
+ setfield(start,"char",char)
return head, start
end
- local prev = start.prev
- local next = stop.next
- start.prev = nil
- stop.next = nil
+ -- needs testing (side effects):
+ local components = getfield(base,"components")
+ if components then
+ flush_node_list(components)
+ end
+ --
+ local prev = getprev(start)
+ local next = getnext(stop)
+ local comp = start
+ setfield(start,"prev",nil)
+ setfield(stop,"next",nil)
local base = copy_glyph(start)
if start == head then
head = base
end
- base.char = char
- base.subtype = ligature_code
- base.components = start -- start can have components
+ resetinjection(base)
+ setfield(base,"char",char)
+ setfield(base,"subtype",ligature_code)
+ setfield(base,"components",comp) -- start can have components .. do we need to flush?
if prev then
- prev.next = base
+ setfield(prev,"next",base)
end
if next then
- next.prev = base
+ setfield(next,"prev",base)
end
- base.next = next
- base.prev = prev
+ setfield(base,"next",next)
+ setfield(base,"prev",prev)
if not discfound then
local deletemarks = markflag ~= "mark"
local components = start
@@ -432,35 +608,101 @@ local function toligature(kind,lookupname,head,start,stop,char,markflag,discfoun
local current = base
-- first we loop over the glyphs in start .. stop
while start do
- local char = start.char
+ local char = getchar(start)
if not marks[char] then
baseindex = baseindex + componentindex
componentindex = getcomponentindex(start)
elseif not deletemarks then -- quite fishy
- start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex)
+ setligaindex(start,baseindex + getligaindex(start,componentindex))
if trace_marks then
- logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
+ logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start))
end
- head, current = insert_node_after(head,current,copy_node(start)) -- unlikely that mark has components
+ local n = copy_node(start)
+ copyinjection(n,start)
+ head, current = insert_node_after(head,current,n) -- unlikely that mark has components
elseif trace_marks then
logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char))
end
- start = start.next
+ start = getnext(start)
end
-- we can have one accent as part of a lookup and another following
-- local start = components -- was wrong (component scanning was introduced when more complex ligs in devanagari was added)
- local start = current.next
- while start and start.id == glyph_code do
- local char = start.char
+ local start = getnext(current)
+ while start and getid(start) == glyph_code do
+ local char = getchar(start)
if marks[char] then
- start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex)
+ setligaindex(start,baseindex + getligaindex(start,componentindex))
if trace_marks then
- logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
+ logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start))
end
else
break
end
- start = start.next
+ start = getnext(start)
+ end
+ else
+ -- discfound ... forget about marks .. probably no scripts that hyphenate and have marks
+ local discprev = getfield(discfound,"prev")
+ local discnext = getfield(discfound,"next")
+ if discprev and discnext then
+ local subtype = getsubtype(discfound)
+ if subtype == discretionary_code then
+ local pre = getfield(discfound,"pre")
+ local post = getfield(discfound,"post")
+ local replace = getfield(discfound,"replace")
+ if not replace then -- todo: signal simple hyphen
+ local prev = getfield(base,"prev")
+ local copied = copy_node_list(comp)
+ setfield(discnext,"prev",nil) -- also blocks funny assignments
+ setfield(discprev,"next",nil) -- also blocks funny assignments
+ if pre then
+ setfield(comp,"next",pre)
+ setfield(pre,"prev",comp)
+ end
+ pre = comp
+ if post then
+ local tail = find_node_tail(post)
+ setfield(tail,"next",discnext)
+ setfield(discnext,"prev",tail)
+ setfield(post,"prev",nil)
+ else
+ post = discnext
+ end
+ setfield(prev,"next",discfound)
+ setfield(next,"prev",discfound)
+ setfield(discfound,"next",next)
+ setfield(discfound,"prev",prev)
+ setfield(base,"next",nil)
+ setfield(base,"prev",nil)
+ setfield(base,"components",copied)
+ setfield(discfound,"pre",pre)
+ setfield(discfound,"post",post)
+ setfield(discfound,"replace",base)
+ setfield(discfound,"subtype",discretionary_code)
+ base = prev -- restart
+ end
+ elseif discretionary_code == regular_code then
+ -- local prev = getfield(base,"prev")
+ -- local next = getfield(base,"next")
+ local copied = copy_node_list(comp)
+ setfield(discnext,"prev",nil) -- also blocks funny assignments
+ setfield(discprev,"next",nil) -- also blocks funny assignments
+ local pre, post = addhyphens(comp,comp,discnext,subtype) -- takes from components
+ setfield(prev,"next",discfound)
+ setfield(next,"prev",discfound)
+ setfield(discfound,"next",next)
+ setfield(discfound,"prev",prev)
+ setfield(base,"next",nil)
+ setfield(base,"prev",nil)
+ setfield(base,"components",copied)
+ setfield(discfound,"pre",pre)
+ setfield(discfound,"post",post)
+ setfield(discfound,"replace",base)
+ setfield(discfound,"subtype",discretionary_code)
+ base = next -- or restart
+ else
+ -- forget about it in generic usage
+ end
end
end
return head, base
@@ -468,9 +710,10 @@ end
function handlers.gsub_single(head,start,kind,lookupname,replacement)
if trace_singles then
- logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement))
+ logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(getchar(start)),gref(replacement))
end
- start.char = replacement
+ resetinjection(start)
+ setfield(start,"char",replacement)
return head, start, true
end
@@ -497,7 +740,7 @@ local function get_alternative_glyph(start,alternatives,value,trace_alternatives
return false, trace_alternatives and formatters["invalid value %a, %s"](value,"out of range")
end
elseif value == 0 then
- return start.char, trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
+ return getchar(start), trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
elseif value < 1 then
return alternatives[1], trace_alternatives and formatters["invalid value %a, taking %a"](value,1)
else
@@ -509,30 +752,32 @@ end
local function multiple_glyphs(head,start,multiple,ignoremarks)
local nofmultiples = #multiple
if nofmultiples > 0 then
- start.char = multiple[1]
+ resetinjection(start)
+ setfield(start,"char",multiple[1])
if nofmultiples > 1 then
- local sn = start.next
+ local sn = getnext(start)
for k=2,nofmultiples do -- todo: use insert_node
-- untested:
--
--- while ignoremarks and marks[sn.char] then
--- local sn = sn.next
+-- while ignoremarks and marks[getchar(sn)] then
+-- local sn = getnext(sn)
-- end
local n = copy_node(start) -- ignore components
- n.char = multiple[k]
- n.next = sn
- n.prev = start
+ resetinjection(n)
+ setfield(n,"char",multiple[k])
+ setfield(n,"next",sn)
+ setfield(n,"prev",start)
if sn then
- sn.prev = n
+ setfield(sn,"prev",n)
end
- start.next = n
+ setfield(start,"next",n)
start = n
end
end
return head, start, true
else
if trace_multiples then
- logprocess("no multiple for %s",gref(start.char))
+ logprocess("no multiple for %s",gref(getchar(start)))
end
return head, start, false
end
@@ -543,12 +788,13 @@ function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence
local choice, comment = get_alternative_glyph(start,alternative,value,trace_alternatives)
if choice then
if trace_alternatives then
- logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(start.char),choice,gref(choice),comment)
+ logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(getchar(start)),choice,gref(choice),comment)
end
- start.char = choice
+ resetinjection(start)
+ setfield(start,"char",choice)
else
if trace_alternatives then
- logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(start.char),comment)
+ logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(getchar(start)),comment)
end
end
return head, start, true
@@ -556,23 +802,23 @@ end
function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence)
if trace_multiples then
- logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple))
+ logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(getchar(start)),gref(multiple))
end
return multiple_glyphs(head,start,multiple,sequence.flags[1])
end
function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
- local s, stop, discfound = start.next, nil, false
- local startchar = start.char
+ local s, stop = getnext(start), nil
+ local startchar = getchar(start)
if marks[startchar] then
while s do
- local id = s.id
- if id == glyph_code and s.font == currentfont and s.subtype<256 then
- local lg = ligature[s.char]
+ local id = getid(s)
+ if id == glyph_code and getfont(s) == currentfont and getsubtype(s)<256 then
+ local lg = ligature[getchar(s)]
if lg then
stop = s
ligature = lg
- s = s.next
+ s = getnext(s)
else
break
end
@@ -584,32 +830,38 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
local lig = ligature.ligature
if lig then
if trace_ligatures then
- local stopchar = stop.char
+ local stopchar = getchar(stop)
head, start = markstoligature(kind,lookupname,head,start,stop,lig)
- logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start)))
else
head, start = markstoligature(kind,lookupname,head,start,stop,lig)
end
- return head, start, true
+ return head, start, true, false
else
-- ok, goto next lookup
end
end
else
- local skipmark = sequence.flags[1]
+ local skipmark = sequence.flags[1]
+ local discfound = false
+ local lastdisc = nil
while s do
- local id = s.id
- if id == glyph_code and s.subtype<256 then
- if s.font == currentfont then
- local char = s.char
+ local id = getid(s)
+ if id == glyph_code and getsubtype(s)<256 then -- not needed
+ if getfont(s) == currentfont then -- also not needed only when mark
+ local char = getchar(s)
if skipmark and marks[char] then
- s = s.next
- else
- local lg = ligature[char]
+ s = getnext(s)
+ else -- ligature is a tree
+ local lg = ligature[char] -- can there be multiple in a row? maybe in a bad font
if lg then
- stop = s
+ if not discfound and lastdisc then
+ discfound = lastdisc
+ lastdisc = nil
+ end
+ stop = s -- needed for fake so outside then
ligature = lg
- s = s.next
+ s = getnext(s)
else
break
end
@@ -618,54 +870,68 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
break
end
elseif id == disc_code then
- discfound = true
- s = s.next
+ lastdisc = s
+ s = getnext(s)
else
break
end
end
- local lig = ligature.ligature
+ local lig = ligature.ligature -- can't we get rid of this .ligature?
if lig then
if stop then
if trace_ligatures then
- local stopchar = stop.char
+ local stopchar = getchar(stop)
head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
- logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start)))
else
head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
end
- return head, start, true
else
-- weird but happens (in some arabic font)
- start.char = lig
+ resetinjection(start)
+ setfield(start,"char",lig)
if trace_ligatures then
logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig))
end
- return head, start, true
end
+ return head, start, true, discfound
else
- -- weird but happens
+ -- weird but happens, pseudo ligatures ... just the components
end
end
- return head, start, false
+ return head, start, false, discfound
end
+-- function is_gsub_ligature(start,ligature) -- limited case: in disc nodes, only latin, always glyphs
+-- local s = getnext(start)
+-- while s do
+-- local lg = ligature[getchar(s)]
+-- if lg then
+-- ligature = lg
+-- s = getnext(s)
+-- else
+-- return
+-- end
+-- end
+-- return ligature and ligature.ligature
+-- end
+
--[[ldx--
<p>We get hits on a mark, but we're not sure if the it has to be applied so
we need to explicitly test for basechar, baselig and basemark entries.</p>
--ldx]]--
function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence)
- local markchar = start.char
+ local markchar = getchar(start)
if marks[markchar] then
- local base = start.prev -- [glyph] [start=mark]
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- local basechar = base.char
+ local base = getprev(start) -- [glyph] [start=mark]
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ local basechar = getchar(base)
if marks[basechar] then
while true do
- base = base.prev
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- basechar = base.char
+ base = getprev(base)
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ basechar = getchar(base)
if not marks[basechar] then
break
end
@@ -689,7 +955,7 @@ function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence
if al[anchor] then
local ma = markanchors[anchor]
if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -717,16 +983,16 @@ end
function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence)
-- check chainpos variant
- local markchar = start.char
+ local markchar = getchar(start)
if marks[markchar] then
- local base = start.prev -- [glyph] [optional marks] [start=mark]
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- local basechar = base.char
+ local base = getprev(start) -- [glyph] [optional marks] [start=mark]
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ local basechar = getchar(base)
if marks[basechar] then
while true do
- base = base.prev
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- basechar = base.char
+ base = getprev(base)
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ basechar = getchar(base)
if not marks[basechar] then
break
end
@@ -738,7 +1004,7 @@ function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequ
end
end
end
- local index = start[a_ligacomp]
+ local index = getligaindex(start)
local baseanchors = descriptions[basechar]
if baseanchors then
baseanchors = baseanchors.anchors
@@ -752,7 +1018,7 @@ function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequ
if ma then
ba = ba[index]
if ba then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) -- index
if trace_marks then
logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
@@ -785,22 +1051,22 @@ function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequ
end
function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence)
- local markchar = start.char
+ local markchar = getchar(start)
if marks[markchar] then
- local base = start.prev -- [glyph] [basemark] [start=mark]
- local slc = start[a_ligacomp]
+ local base = getprev(start) -- [glyph] [basemark] [start=mark]
+ local slc = getligaindex(start)
if slc then -- a rather messy loop ... needs checking with husayni
while base do
- local blc = base[a_ligacomp]
+ local blc = getligaindex(base)
if blc and blc ~= slc then
- base = base.prev
+ base = getprev(base)
else
break
end
end
end
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go
- local basechar = base.char
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then -- subtype test can go
+ local basechar = getchar(base)
local baseanchors = descriptions[basechar]
if baseanchors then
baseanchors = baseanchors.anchors
@@ -812,7 +1078,7 @@ function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence
if al[anchor] then
local ma = markanchors[anchor]
if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true)
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -840,21 +1106,21 @@ function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence
end
function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) -- to be checked
- local alreadydone = cursonce and start[a_cursbase]
+ local alreadydone = cursonce and getprop(start,a_cursbase)
if not alreadydone then
local done = false
- local startchar = start.char
+ local startchar = getchar(start)
if marks[startchar] then
if trace_cursive then
logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
end
else
- local nxt = start.next
- while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do
- local nextchar = nxt.char
+ local nxt = getnext(start)
+ while not done and nxt and getid(nxt) == glyph_code and getfont(nxt) == currentfont and getsubtype(nxt)<256 do
+ local nextchar = getchar(nxt)
if marks[nextchar] then
-- should not happen (maybe warning)
- nxt = nxt.next
+ nxt = getnext(nxt)
else
local entryanchors = descriptions[nextchar]
if entryanchors then
@@ -889,37 +1155,37 @@ function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence)
return head, start, done
else
if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
end
return head, start, false
end
end
-function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence)
- local startchar = start.char
- local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence,injection)
+ local startchar = getchar(start)
+ local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,injection) -- ,characters[startchar])
if trace_kerns then
logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
end
return head, start, false
end
-function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
+function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence,lookuphash,i,injection)
-- todo: kerns in disc nodes: pre, post, replace -> loop over disc too
-- todo: kerns in components of ligatures
- local snext = start.next
+ local snext = getnext(start)
if not snext then
return head, start, false
else
local prev, done = start, false
local factor = tfmdata.parameters.factor
local lookuptype = lookuptypes[lookupname]
- while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do
- local nextchar = snext.char
+ while snext and getid(snext) == glyph_code and getfont(snext) == currentfont and getsubtype(snext)<256 do
+ local nextchar = getchar(snext)
local krn = kerns[nextchar]
if not krn and marks[nextchar] then
prev = snext
- snext = snext.next
+ snext = getnext(snext)
else
if not krn then
-- skip
@@ -927,15 +1193,15 @@ function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
if lookuptype == "pair" then -- probably not needed
local a, b = krn[2], krn[3]
if a and #a > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ local startchar = getchar(start)
+ local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,injection) -- characters[startchar])
if trace_kerns then
logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
end
if b and #b > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ local startchar = getchar(start)
+ local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,injection) -- characters[nextchar])
if trace_kerns then
logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
@@ -946,7 +1212,7 @@ function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
-- if a and a ~= 0 then
-- local k = setkern(snext,factor,rlmode,a)
-- if trace_kerns then
- -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
+ -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar))
-- end
-- end
-- if b and b ~= 0 then
@@ -955,9 +1221,9 @@ function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
end
done = true
elseif krn ~= 0 then
- local k = setkern(snext,factor,rlmode,krn)
+ local k = setkern(snext,factor,rlmode,krn,injection)
if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
+ logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar))
end
done = true
end
@@ -1012,13 +1278,14 @@ end
-- itself. It is meant mostly for dealing with Urdu.
function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements)
- local char = start.char
+ local char = getchar(start)
local replacement = replacements[char]
if replacement then
if trace_singles then
logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
end
- start.char = replacement
+ resetinjection(start)
+ setfield(start,"char",replacement)
return head, start, true
else
return head, start, false
@@ -1047,9 +1314,9 @@ as less as needed but that would also make the code even more messy.</p>
-- -- done
-- elseif ignoremarks then
-- repeat -- start x x m x x stop => start m
--- local next = start.next
--- if not marks[next.char] then
--- local components = next.components
+-- local next = getnext(start)
+-- if not marks[getchar(next)] then
+-- local components = getfield(next,"components")
-- if components then -- probably not needed
-- flush_node_list(components)
-- end
@@ -1059,8 +1326,8 @@ as less as needed but that would also make the code even more messy.</p>
-- until next == stop
-- else -- start x x x stop => start
-- repeat
--- local next = start.next
--- local components = next.components
+-- local next = getnext(start)
+-- local components = getfield(next,"components")
-- if components then -- probably not needed
-- flush_node_list(components)
-- end
@@ -1072,8 +1339,7 @@ as less as needed but that would also make the code even more messy.</p>
-- end
--[[ldx--
-<p>Here we replace start by a single variant, First we delete the rest of the
-match.</p>
+<p>Here we replace start by a single variant.</p>
--ldx]]--
function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
@@ -1081,11 +1347,11 @@ function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lo
local current = start
local subtables = currentlookup.subtables
if #subtables > 1 then
- logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
+ logwarning("todo: check if we need to loop over the replacements: % t",subtables)
end
while current do
- if current.id == glyph_code then
- local currentchar = current.char
+ if getid(current) == glyph_code then
+ local currentchar = getchar(current)
local lookupname = subtables[1] -- only 1
local replacement = lookuphash[lookupname]
if not replacement then
@@ -1102,14 +1368,91 @@ function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lo
if trace_singles then
logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
end
- current.char = replacement
+ resetinjection(current)
+ if check_discretionaries then
+ -- some fonts use a chain lookup to replace e.g. an f in a fi ligature
+ -- and there can be a disc node in between ... the next code tries to catch
+ -- this
+ local next = getnext(current)
+ local prev = getprev(current) -- todo: just remember it above
+ local done = false
+ if next then
+ if getid(next) == disc_code then
+ local subtype = getsubtype(next)
+ if subtype == discretionary_code then
+ setfield(next,"prev",prev)
+ setfield(prev,"next",next)
+ setfield(current,"prev",nil)
+ setfield(current,"next",nil)
+ local replace = getfield(next,"replace")
+ local pre = getfield(next,"pre")
+ local new = copy_node(current)
+ setfield(new,"char",replacement)
+ if replace then
+ setfield(new,"next",replace)
+ setfield(replace,"prev",new)
+ end
+ if pre then
+ setfield(current,"next",pre)
+ setfield(pre,"prev",current)
+ end
+ setfield(next,"replace",new) -- also updates tail
+ setfield(next,"pre",current) -- also updates tail
+ end
+ start = next
+ done = true
+ local next = getnext(start)
+ if next and getid(next) == disc_code then
+ collapse_disc(start,next)
+ end
+ end
+ end
+ if not done and prev then
+ if getid(prev) == disc_code then
+ local subtype = getsubtype(prev)
+ if subtype == discretionary_code then
+ setfield(next,"prev",prev)
+ setfield(prev,"next",next)
+ setfield(current,"prev",nil)
+ setfield(current,"next",nil)
+ local replace = getfield(prev,"replace")
+ local post = getfield(prev,"post")
+ local new = copy_node(current)
+ setfield(new,"char",replacement)
+ if replace then
+ local tail = find_node_tail(replace)
+ setfield(tail,"next",new)
+ setfield(new,"prev",tail)
+ else
+ replace = new
+ end
+ if post then
+ local tail = find_node_tail(post)
+ setfield(tail,"next",current)
+ setfield(current,"prev",tail)
+ else
+ post = current
+ end
+ setfield(prev,"replace",replace) -- also updates tail
+ setfield(prev,"post",post) -- also updates tail
+ start = prev
+ done = true
+ end
+ end
+ end
+ if not done then
+ setfield(current,"char",replacement)
+ end
+ else
+ setfield(current,"char",replacement)
+ end
end
end
return head, start, true
elseif current == stop then
break
else
- current = current.next
+ current = getnext(current)
end
end
return head, start, false
@@ -1118,13 +1461,12 @@ end
chainmores.gsub_single = chainprocs.gsub_single
--[[ldx--
-<p>Here we replace start by a sequence of new glyphs. First we delete the rest of
-the match.</p>
+<p>Here we replace start by a sequence of new glyphs.</p>
--ldx]]--
function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
-- local head, n = delete_till_stop(head,start,stop)
- local startchar = start.char
+ local startchar = getchar(start)
local subtables = currentlookup.subtables
local lookupname = subtables[1]
local replacements = lookuphash[lookupname]
@@ -1167,8 +1509,8 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext
local subtables = currentlookup.subtables
local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
while current do
- if current.id == glyph_code then -- is this check needed?
- local currentchar = current.char
+ if getid(current) == glyph_code then -- is this check needed?
+ local currentchar = getchar(current)
local lookupname = subtables[1]
local alternatives = lookuphash[lookupname]
if not alternatives then
@@ -1183,7 +1525,8 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext
if trace_alternatives then
logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment)
end
- start.char = choice
+ resetinjection(start)
+ setfield(start,"char",choice)
else
if trace_alternatives then
logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment)
@@ -1197,7 +1540,7 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext
elseif current == stop then
break
else
- current = current.next
+ current = getnext(current)
end
end
return head, start, false
@@ -1212,7 +1555,7 @@ assume rather stupid ligatures (no complex disc nodes).</p>
--ldx]]--
function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
- local startchar = start.char
+ local startchar = getchar(start)
local subtables = currentlookup.subtables
local lookupname = subtables[1]
local ligatures = lookuphash[lookupname]
@@ -1227,20 +1570,30 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
end
else
- local s = start.next
+ local s = getnext(start)
local discfound = false
local last = stop
local nofreplacements = 0
local skipmark = currentlookup.flags[1]
while s do
- local id = s.id
+ local id = getid(s)
if id == disc_code then
- s = s.next
- discfound = true
+ if not discfound then
+ discfound = s
+ end
+ if s == stop then
+ break -- okay? or before the disc
+ else
+ s = getnext(s)
+ end
else
- local schar = s.char
+ local schar = getchar(s)
if skipmark and marks[schar] then -- marks
- s = s.next
+-- if s == stop then -- maybe add this
+-- break
+-- else
+ s = getnext(s)
+-- end
else
local lg = ligatures[schar]
if lg then
@@ -1248,7 +1601,7 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
if s == stop then
break
else
- s = s.next
+ s = getnext(s)
end
else
break
@@ -1265,27 +1618,27 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,
if start == stop then
logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
else
- logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2))
+ logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)),gref(l2))
end
end
head, start = toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound)
- return head, start, true, nofreplacements
+ return head, start, true, nofreplacements, discfound
elseif trace_bugs then
if start == stop then
logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
else
- logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char))
+ logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)))
end
end
end
end
- return head, start, false, 0
+ return head, start, false, 0, false
end
chainmores.gsub_ligature = chainprocs.gsub_ligature
function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = start.char
+ local markchar = getchar(start)
if marks[markchar] then
local subtables = currentlookup.subtables
local lookupname = subtables[1]
@@ -1294,14 +1647,14 @@ function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext
markanchors = markanchors[markchar]
end
if markanchors then
- local base = start.prev -- [glyph] [start=mark]
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- local basechar = base.char
+ local base = getprev(start) -- [glyph] [start=mark]
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ local basechar = getchar(base)
if marks[basechar] then
while true do
- base = base.prev
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- basechar = base.char
+ base = getprev(base)
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ basechar = getchar(base)
if not marks[basechar] then
break
end
@@ -1322,7 +1675,7 @@ function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext
if al[anchor] then
local ma = markanchors[anchor]
if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -1349,7 +1702,7 @@ function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext
end
function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = start.char
+ local markchar = getchar(start)
if marks[markchar] then
local subtables = currentlookup.subtables
local lookupname = subtables[1]
@@ -1358,14 +1711,14 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
markanchors = markanchors[markchar]
end
if markanchors then
- local base = start.prev -- [glyph] [optional marks] [start=mark]
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- local basechar = base.char
+ local base = getprev(start) -- [glyph] [optional marks] [start=mark]
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ local basechar = getchar(base)
if marks[basechar] then
while true do
- base = base.prev
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- basechar = base.char
+ base = getprev(base)
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ basechar = getchar(base)
if not marks[basechar] then
break
end
@@ -1378,7 +1731,7 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
end
end
-- todo: like marks a ligatures hash
- local index = start[a_ligacomp]
+ local index = getligaindex(start)
local baseanchors = descriptions[basechar].anchors
if baseanchors then
local baseanchors = baseanchors['baselig']
@@ -1390,7 +1743,7 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
if ma then
ba = ba[index]
if ba then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
@@ -1418,64 +1771,59 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
end
function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = start.char
+ local markchar = getchar(start)
if marks[markchar] then
- -- local alreadydone = markonce and start[a_markmark]
- -- if not alreadydone then
- -- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = lookuphash[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = start.prev -- [glyph] [basemark] [start=mark]
- local slc = start[a_ligacomp]
- if slc then -- a rather messy loop ... needs checking with husayni
- while base do
- local blc = base[a_ligacomp]
- if blc and blc ~= slc then
- base = base.prev
- else
- break
- end
+ -- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local markanchors = lookuphash[lookupname]
+ if markanchors then
+ markanchors = markanchors[markchar]
+ end
+ if markanchors then
+ local base = getprev(start) -- [glyph] [basemark] [start=mark]
+ local slc = getligaindex(start)
+ if slc then -- a rather messy loop ... needs checking with husayni
+ while base do
+ local blc = getligaindex(base)
+ if blc and blc ~= slc then
+ base = getprev(base)
+ else
+ break
end
end
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go
- local basechar = base.char
- local baseanchors = descriptions[basechar].anchors
+ end
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then -- subtype test can go
+ local basechar = getchar(base)
+ local baseanchors = descriptions[basechar].anchors
+ if baseanchors then
+ baseanchors = baseanchors['basemark']
if baseanchors then
- baseanchors = baseanchors['basemark']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head, start, true
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
end
+ return head, start, true
end
end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
end
end
- elseif trace_bugs then
- logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
end
elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
end
- -- elseif trace_marks and trace_details then
- -- logprocess("%s, mark %s is already bound (n=%s), ignoring mark2mark",pref(kind,lookupname),gref(markchar),alreadydone)
- -- end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
elseif trace_bugs then
logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
end
@@ -1483,9 +1831,9 @@ function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext
end
function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local alreadydone = cursonce and start[a_cursbase]
+ local alreadydone = cursonce and getprop(start,a_cursbase)
if not alreadydone then
- local startchar = start.char
+ local startchar = getchar(start)
local subtables = currentlookup.subtables
local lookupname = subtables[1]
local exitanchors = lookuphash[lookupname]
@@ -1499,12 +1847,12 @@ function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,l
logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
end
else
- local nxt = start.next
- while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do
- local nextchar = nxt.char
+ local nxt = getnext(start)
+ while not done and nxt and getid(nxt) == glyph_code and getfont(nxt) == currentfont and getsubtype(nxt)<256 do
+ local nextchar = getchar(nxt)
if marks[nextchar] then
-- should not happen (maybe warning)
- nxt = nxt.next
+ nxt = getnext(nxt)
else
local entryanchors = descriptions[nextchar]
if entryanchors then
@@ -1539,7 +1887,7 @@ function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,l
return head, start, done
else
if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
end
return head, start, false
end
@@ -1549,14 +1897,14 @@ end
function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
-- untested .. needs checking for the new model
- local startchar = start.char
+ local startchar = getchar(start)
local subtables = currentlookup.subtables
local lookupname = subtables[1]
local kerns = lookuphash[lookupname]
if kerns then
kerns = kerns[startchar] -- needed ?
if kerns then
- local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns) -- ,characters[startchar])
if trace_kerns then
logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
end
@@ -1570,9 +1918,9 @@ chainmores.gpos_single = chainprocs.gpos_single -- okay?
-- when machines become faster i will make a shared function
function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
- local snext = start.next
+ local snext = getnext(start)
if snext then
- local startchar = start.char
+ local startchar = getchar(start)
local subtables = currentlookup.subtables
local lookupname = subtables[1]
local kerns = lookuphash[lookupname]
@@ -1582,12 +1930,12 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look
local lookuptype = lookuptypes[lookupname]
local prev, done = start, false
local factor = tfmdata.parameters.factor
- while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do
- local nextchar = snext.char
+ while snext and getid(snext) == glyph_code and getfont(snext) == currentfont and getsubtype(snext)<256 do
+ local nextchar = getchar(snext)
local krn = kerns[nextchar]
if not krn and marks[nextchar] then
prev = snext
- snext = snext.next
+ snext = getnext(snext)
else
if not krn then
-- skip
@@ -1595,15 +1943,15 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look
if lookuptype == "pair" then
local a, b = krn[2], krn[3]
if a and #a > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ local startchar = getchar(start)
+ local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a) -- ,characters[startchar])
if trace_kerns then
logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
end
if b and #b > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ local startchar = getchar(start)
+ local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b) -- ,characters[nextchar])
if trace_kerns then
logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
end
@@ -1614,7 +1962,7 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look
if a and a ~= 0 then
local k = setkern(snext,factor,rlmode,a)
if trace_kerns then
- logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
+ logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar))
end
end
if b and b ~= 0 then
@@ -1625,7 +1973,7 @@ function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,look
elseif krn ~= 0 then
local k = setkern(snext,factor,rlmode,krn)
if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
+ logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar))
end
done = true
end
@@ -1677,7 +2025,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
-- f..l = mid string
if s == 1 then
-- never happens
- match = current.id == glyph_code and current.font == currentfont and current.subtype<256 and seq[1][current.char]
+ match = getid(current) == glyph_code and getfont(current) == currentfont and getsubtype(current)<256 and seq[1][getchar(current)]
else
-- maybe we need a better space check (maybe check for glue or category or combination)
-- we cannot optimize for n=2 because there can be disc nodes
@@ -1692,13 +2040,13 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
-- match = true
else
local n = f + 1
- last = last.next
+ last = getnext(last)
while n <= l do
if last then
- local id = last.id
+ local id = getid(last)
if id == glyph_code then
- if last.font == currentfont and last.subtype<256 then
- local char = last.char
+ if getfont(last) == currentfont and getsubtype(last)<256 then
+ local char = getchar(last)
local ccd = descriptions[char]
if ccd then
local class = ccd.class
@@ -1707,10 +2055,10 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
if trace_skips then
show_skip(kind,chainname,char,ck,class)
end
- last = last.next
+ last = getnext(last)
elseif seq[n][char] then
if n < l then
- last = last.next
+ last = getnext(last)
end
n = n + 1
else
@@ -1726,7 +2074,36 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
break
end
elseif id == disc_code then
- last = last.next
+ if check_discretionaries then
+ local replace = getfield(last,"replace")
+ if replace then
+ -- so far we never entered this branch
+ while replace do
+ if seq[n][getchar(replace)] then
+ n = n + 1
+ replace = getnext(replace)
+ if not replace then
+ break
+ elseif n > l then
+ -- match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ end
+ if not match then
+ break
+ elseif check_discretionaries == "trace" then
+ report_chain("check disc action in current")
+ end
+ else
+ last = getnext(last) -- no skipping here
+ end
+ else
+ last = getnext(last) -- no skipping here
+ end
else
match = false
break
@@ -1740,15 +2117,15 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
-- before
if match and f > 1 then
- local prev = start.prev
+ local prev = getprev(start)
if prev then
local n = f-1
while n >= 1 do
if prev then
- local id = prev.id
+ local id = getid(prev)
if id == glyph_code then
- if prev.font == currentfont and prev.subtype<256 then -- normal char
- local char = prev.char
+ if getfont(prev) == currentfont and getsubtype(prev)<256 then -- normal char
+ local char = getchar(prev)
local ccd = descriptions[char]
if ccd then
local class = ccd.class
@@ -1772,44 +2149,69 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
break
end
elseif id == disc_code then
- -- skip 'm
+ -- the special case: f i where i becomes dottless i ..
+ if check_discretionaries then
+ local replace = getfield(prev,"replace")
+ if replace then
+ -- we seldom enter this branch (e.g. on brill efficient)
+ replace = find_node_tail(replace)
+ local finish = getprev(replace)
+ while replace do
+ if seq[n][getchar(replace)] then
+ n = n - 1
+ replace = getprev(replace)
+ if not replace or replace == finish then
+ break
+ elseif n < 1 then
+ -- match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ end
+ if not match then
+ break
+ elseif check_discretionaries == "trace" then
+ report_chain("check disc action in before")
+ end
+ else
+ -- skip 'm
+ end
+ else
+ -- skip 'm
+ end
elseif seq[n][32] then
n = n -1
else
match = false
break
end
- prev = prev.prev
+ prev = getprev(prev)
elseif seq[n][32] then -- somewhat special, as zapfino can have many preceding spaces
- n = n -1
+ n = n - 1
else
match = false
break
end
end
- elseif f == 2 then
- match = seq[1][32]
else
- for n=f-1,1 do
- if not seq[n][32] then
- match = false
- break
- end
- end
+ match = false
end
end
-- after
if match and s > l then
- local current = last and last.next
+ local current = last and getnext(last)
if current then
-- removed optimization for s-l == 1, we have to deal with marks anyway
local n = l + 1
while n <= s do
if current then
- local id = current.id
+ local id = getid(current)
if id == glyph_code then
- if current.font == currentfont and current.subtype<256 then -- normal char
- local char = current.char
+ if getfont(current) == currentfont and getsubtype(current)<256 then -- normal char
+ local char = getchar(current)
local ccd = descriptions[char]
if ccd then
local class = ccd.class
@@ -1833,14 +2235,42 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
break
end
elseif id == disc_code then
- -- skip 'm
+ if check_discretionaries then
+ local replace = getfield(current,"replace")
+ if replace then
+ -- so far we never entered this branch
+ while replace do
+ if seq[n][getchar(replace)] then
+ n = n + 1
+ replace = getnext(replace)
+ if not replace then
+ break
+ elseif n > s then
+ break
+ end
+ else
+ match = false
+ break
+ end
+ end
+ if not match then
+ break
+ elseif check_discretionaries == "trace" then
+ report_chain("check disc action in after")
+ end
+ else
+ -- skip 'm
+ end
+ else
+ -- skip 'm
+ end
elseif seq[n][32] then -- brrr
n = n + 1
else
match = false
break
end
- current = current.next
+ current = getnext(current)
elseif seq[n][32] then
n = n + 1
else
@@ -1848,15 +2278,8 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
break
end
end
- elseif s-l == 1 then
- match = seq[s][32]
else
- for n=l+1,s do
- if not seq[n][32] then
- match = false
- break
- end
- end
+ match = false
end
end
end
@@ -1864,7 +2287,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
-- ck == currentcontext
if trace_contexts then
local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5]
- local char = start.char
+ local char = getchar(start)
if ck[9] then
logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a",
cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
@@ -1896,15 +2319,15 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
else
local i = 1
- repeat
+ while true do
if skipped then
while true do
- local char = start.char
+ local char = getchar(start)
local ccd = descriptions[char]
if ccd then
local class = ccd.class
if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- start = start.next
+ start = getnext(start)
else
break
end
@@ -1937,19 +2360,21 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
end
end
- if start then
- start = start.next
+ if i > nofchainlookups then
+ break
+ elseif start then
+ start = getnext(start)
else
-- weird
end
- until i > nofchainlookups
+ end
end
else
local replacements = ck[7]
if replacements then
head, start, done = chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) -- sequence
else
- done = true -- can be meant to be skipped
+ done = quit_on_no_replacement -- can be meant to be skipped / quite inconsistent in fonts
if trace_contexts then
logprocess("%s: skipping match",cref(kind,chainname))
end
@@ -2038,14 +2463,21 @@ local autofeatures = fonts.analyzers.features -- was: constants
local function initialize(sequence,script,language,enabled)
local features = sequence.features
if features then
- for kind, scripts in next, features do
- local valid = enabled[kind]
- if valid then
- local languages = scripts[script] or scripts[wildcard]
- if languages and (languages[language] or languages[wildcard]) then
- return { valid, autofeatures[kind] or false, sequence.chain or 0, kind, sequence }
+ local order = sequence.order
+ if order then
+ for i=1,#order do --
+ local kind = order[i] --
+ local valid = enabled[kind]
+ if valid then
+ local scripts = features[kind] --
+ local languages = scripts[script] or scripts[wildcard]
+ if languages and (languages[language] or languages[wildcard]) then
+ return { valid, autofeatures[kind] or false, sequence.chain or 0, kind, sequence }
+ end
end
end
+ else
+ -- can't happen
end
end
return false
@@ -2074,19 +2506,12 @@ function otf.dataset(tfmdata,font) -- generic variant, overloaded in context
}
rs[language] = rl
local sequences = tfmdata.resources.sequences
--- setmetatableindex(rl, function(t,k)
--- if type(k) == "number" then
--- local v = enabled and initialize(sequences[k],script,language,enabled)
--- t[k] = v
--- return v
--- end
--- end)
-for s=1,#sequences do
- local v = enabled and initialize(sequences[s],script,language,enabled)
- if v then
- rl[#rl+1] = v
- end
-end
+ for s=1,#sequences do
+ local v = enabled and initialize(sequences[s],script,language,enabled)
+ if v then
+ rl[#rl+1] = v
+ end
+ end
end
return rl
end
@@ -2099,12 +2524,12 @@ end
-- if ok then
-- done = true
-- end
--- if start then start = start.next end
+-- if start then start = getnext(start) end
-- else
--- start = start.next
+-- start = getnext(start)
-- end
-- else
--- start = start.next
+-- start = getnext(start)
-- end
-- there will be a new direction parser (pre-parsed etc)
@@ -2114,10 +2539,190 @@ end
-- attr = attr or false
--
-- local a = getattr(start,0)
--- if (a == attr and (not attribute or getattr(start,a_state) == attribute)) or (not attribute or getattr(start,a_state) == attribute) then
+-- if (a == attr and (not attribute or getprop(start,a_state) == attribute)) or (not attribute or getprop(start,a_state) == attribute) then
-- -- the action
-- end
+-- assumptions:
+--
+-- * languages that use complex disc nodes
+
+-- optimization comes later ...
+
+local function kernrun(disc,run) -- we can assume that prev and next are glyphs
+ if trace_kernruns then
+ report_run("kern") -- will be more detailed
+ end
+ --
+ local prev = getprev(disc) -- todo, keep these in the main loop
+ local next = getnext(disc) -- todo, keep these in the main loop
+ --
+ local pre = getfield(disc,"pre")
+ if not pre then
+ -- go on
+ elseif prev then
+ local nest = getprev(pre)
+ setfield(pre,"prev",prev)
+ setfield(prev,"next",pre)
+ run(prev,"preinjections")
+ setfield(pre,"prev",nest)
+ setfield(prev,"next",disc)
+ else
+ run(pre,"preinjections")
+ end
+ --
+ local post = getfield(disc,"post")
+ if not post then
+ -- go on
+ elseif next then
+ local tail = find_node_tail(post)
+ setfield(tail,"next",next)
+ setfield(next,"prev",tail)
+ run(post,"postinjections",tail)
+ setfield(tail,"next",nil)
+ setfield(next,"prev",disc)
+ else
+ run(post,"postinjections")
+ end
+ --
+ local replace = getfield(disc,"replace")
+ if not replace then
+ -- this should be already done by discfound
+ elseif prev and next then
+ local tail = find_node_tail(replace)
+ local nest = getprev(replace)
+ setfield(replace,"prev",prev)
+ setfield(prev,"next",replace)
+ setfield(tail,"next",next)
+ setfield(next,"prev",tail)
+ run(prev,"replaceinjections",tail)
+ setfield(replace,"prev",nest)
+ setfield(prev,"next",disc)
+ setfield(tail,"next",nil)
+ setfield(next,"prev",disc)
+ elseif prev then
+ local nest = getprev(replace)
+ setfield(replace,"prev",prev)
+ setfield(prev,"next",replace)
+ run(prev,"replaceinjections")
+ setfield(replace,"prev",nest)
+ setfield(prev,"next",disc)
+ elseif next then
+ local tail = find_node_tail(replace)
+ setfield(tail,"next",next)
+ setfield(next,"prev",tail)
+ run(replace,"replaceinjections",tail)
+ setfield(tail,"next",nil)
+ setfield(next,"prev",disc)
+ else
+ run(replace,"replaceinjections")
+ end
+end
+
+-- the if new test might be dangerous as luatex will check / set some tail stuff
+-- in a temp node
+
+local function comprun(disc,run)
+ if trace_compruns then
+ report_run("comp: %s",languages.serializediscretionary(disc))
+ end
+ --
+ local pre = getfield(disc,"pre")
+ if pre then
+ local new, done = run(pre)
+ if done then
+ setfield(disc,"pre",new)
+ end
+ end
+ --
+ local post = getfield(disc,"post")
+ if post then
+ local new, done = run(post)
+ if done then
+ setfield(disc,"post",new)
+ end
+ end
+ --
+ local replace = getfield(disc,"replace")
+ if replace then
+ local new, done = run(replace)
+ if done then
+ setfield(disc,"replace",new)
+ end
+ end
+end
+
+local function testrun(disc,trun,crun)
+ local next = getnext(disc)
+ if next then
+ local replace = getfield(disc,"replace")
+ if replace then
+ local prev = getprev(disc)
+ if prev then
+ -- only look ahead
+ local tail = find_node_tail(replace)
+ -- local nest = getprev(replace)
+ setfield(tail,"next",next)
+ setfield(next,"prev",tail)
+ if trun(replace,next) then
+ setfield(disc,"replace",nil) -- beware, side effects of nest so first
+ setfield(prev,"next",replace)
+ setfield(replace,"prev",prev)
+ setfield(next,"prev",tail)
+ setfield(tail,"next",next)
+ setfield(disc,"prev",nil)
+ setfield(disc,"next",nil)
+ flush_node_list(disc)
+ return replace -- restart
+ else
+ setfield(tail,"next",nil)
+ setfield(next,"prev",disc)
+ end
+ else
+ -- weird case
+ end
+ else
+ -- no need
+ end
+ else
+ -- weird case
+ end
+ comprun(disc,crun)
+ return next
+end
+
+local function discrun(disc,drun,krun)
+ local next = getnext(disc)
+ local prev = getprev(disc)
+ if trace_discruns then
+ report_run("disc") -- will be more detailed
+ end
+ if next and prev then
+ setfield(prev,"next",next)
+ -- setfield(next,"prev",prev)
+ drun(prev)
+ setfield(prev,"next",disc)
+ -- setfield(next,"prev",disc)
+ end
+ --
+ local pre = getfield(disc,"pre")
+ if not pre then
+ -- go on
+ elseif prev then
+ local nest = getprev(pre)
+ setfield(pre,"prev",prev)
+ setfield(prev,"next",pre)
+ krun(prev,"preinjections")
+ setfield(pre,"prev",nest)
+ setfield(prev,"next",disc)
+ else
+ krun(pre,"preinjections")
+ end
+ return next
+end
+
+-- todo: maybe run lr and rl stretches
+
local function featuresprocessor(head,font,attr)
local lookuphash = lookuphashes[font] -- we can also check sequences here
@@ -2126,6 +2731,8 @@ local function featuresprocessor(head,font,attr)
return head, false
end
+ head = tonut(head)
+
if trace_steps then
checkstep(head)
end
@@ -2139,6 +2746,7 @@ local function featuresprocessor(head,font,attr)
anchorlookups = resources.lookup_to_anchor
lookuptable = resources.lookups
lookuptypes = resources.lookuptypes
+ lookuptags = resources.lookuptags
currentfont = font
rlmode = 0
@@ -2157,41 +2765,53 @@ local function featuresprocessor(head,font,attr)
-- Keeping track of the headnode is needed for devanagari (I generalized it a bit
-- so that multiple cases are also covered.)
+ -- todo: retain prev
+
+ -- We don't goto the next node of a disc node is created so that we can then treat
+ -- the pre, post and replace. It's abit of a hack but works out ok for most cases.
+
+ -- there can be less subtype and attr checking in the comprun etc helpers
+
for s=1,#datasets do
- local dataset = datasets[s]
- featurevalue = dataset[1] -- todo: pass to function instead of using a global
-
- local sequence = dataset[5] -- sequences[s] -- also dataset[5]
- local rlparmode = 0
- local topstack = 0
- local success = false
- local attribute = dataset[2]
- local chain = dataset[3] -- sequence.chain or 0
- local typ = sequence.type
- local subtables = sequence.subtables
+ local dataset = datasets[s]
+ featurevalue = dataset[1] -- todo: pass to function instead of using a global
+ local attribute = dataset[2]
+ local chain = dataset[3] -- sequence.chain or 0
+ local kind = dataset[4]
+ local sequence = dataset[5] -- sequences[s] -- also dataset[5]
+ local rlparmode = 0
+ local topstack = 0
+ local success = false
+ local typ = sequence.type
+ local gpossing = typ == "gpos_single" or typ == "gpos_pair"
+ local subtables = sequence.subtables
+ local handler = handlers[typ]
+
if chain < 0 then
-- this is a limited case, no special treatments like 'init' etc
- local handler = handlers[typ]
-- we need to get rid of this slide! probably no longer needed in latest luatex
local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo
while start do
- local id = start.id
+ local id = getid(start)
if id == glyph_code then
- if start.font == font and start.subtype<256 then
- local a = start[0]
+ if getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
if a then
a = a == attr
else
a = true
end
if a then
+ local char = getchar(start)
for i=1,#subtables do
local lookupname = subtables[i]
local lookupcache = lookuphash[lookupname]
if lookupcache then
- local lookupmatch = lookupcache[start.char]
+ -- local lookupmatch = lookupcache[getchar(start)]
+ local lookupmatch = lookupcache[start]
if lookupmatch then
- head, start, success = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ -- todo: disc?
+ head, start, success = handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,i)
if success then
break
end
@@ -2200,19 +2820,18 @@ local function featuresprocessor(head,font,attr)
report_missing_cache(typ,lookupname)
end
end
- if start then start = start.prev end
+ if start then start = getprev(start) end
else
- start = start.prev
+ start = getprev(start)
end
else
- start = start.prev
+ start = getprev(start)
end
else
- start = start.prev
+ start = getprev(start)
end
end
else
- local handler = handlers[typ]
local ns = #subtables
local start = head -- local ?
rlmode = 0 -- to be checked ?
@@ -2223,125 +2842,187 @@ local function featuresprocessor(head,font,attr)
report_missing_cache(typ,lookupname)
else
- local function subrun(start)
- -- mostly for gsub, gpos would demand a more clever approach
+ local function c_run(start) -- no need to check for 256 and attr probably also the same
local head = start
local done = false
while start do
- local id = start.id
- if id == glyph_code and start.font == font and start.subtype <256 then
- local a = start[0]
+ local id = getid(start)
+ if id ~= glyph_code then
+ -- very unlikely
+ start = getnext(start)
+ elseif getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
if a then
- a = (a == attr) and (not attribute or start[a_state] == attribute)
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
else
- a = not attribute or start[a_state] == attribute
+ a = not attribute or getprop(start,a_state) == attribute
end
if a then
- local lookupmatch = lookupcache[start.char]
+ local lookupmatch = lookupcache[getchar(start)]
if lookupmatch then
-- sequence kan weg
local ok
- head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ head, start, ok = handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,1)
if ok then
done = true
end
end
- if start then start = start.next end
+ if start then start = getnext(start) end
else
- start = start.next
+ start = getnext(start)
end
else
- start = start.next
+ return head, false
end
end
if done then
- success = true
- return head
+ success = true -- needed in this subrun?
end
+ return head, done
end
- local function kerndisc(disc) -- we can assume that prev and next are glyphs
- local prev = disc.prev
- local next = disc.next
- if prev and next then
- prev.next = next
- -- next.prev = prev
- local a = prev[0]
- if a then
- a = (a == attr) and (not attribute or prev[a_state] == attribute)
+ local function t_run(start,stop)
+ while start ~= stop do
+ local id = getid(start)
+ if id == glyph_code and getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
+ else
+ a = not attribute or getprop(start,a_state) == attribute
+ end
+ if a then
+ local lookupmatch = lookupcache[getchar(start)]
+ if lookupmatch then -- hm, hyphens can match (tlig) so we need to really check
+ -- if we need more than ligatures we can outline the code and use functions
+ local s = getnext(start)
+ local l = nil
+ while s do
+ local lg = lookupmatch[getchar(s)]
+ if lg then
+ l = lg
+ s = getnext(s)
+ else
+ break
+ end
+ end
+ if l and l.ligature then
+ return true
+ end
+ end
+ end
+ start = getnext(start)
else
- a = not attribute or prev[a_state] == attribute
+ break
end
- if a then
- local lookupmatch = lookupcache[prev.char]
- if lookupmatch then
- -- sequence kan weg
- local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
- if ok then
- done = true
- success = true
+ end
+ end
+
+ local function d_run(prev) -- we can assume that prev and next are glyphs
+ local a = getattr(prev,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(prev,a_state) == attribute)
+ else
+ a = not attribute or getprop(prev,a_state) == attribute
+ end
+ if a then
+ local lookupmatch = lookupcache[getchar(prev)]
+ if lookupmatch then
+ -- sequence kan weg
+ local h, d, ok = handler(head,prev,kind,lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ done = true
+ success = true
+ end
+ end
+ end
+ end
+
+ local function k_run(sub,injection,last)
+ local a = getattr(sub,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(sub,a_state) == attribute)
+ else
+ a = not attribute or getprop(sub,a_state) == attribute
+ end
+ if a then
+ -- sequence kan weg
+ for n in traverse_nodes(sub) do -- only gpos
+ if n == last then
+ break
+ end
+ local id = getid(n)
+ if id == glyph_code then
+ local lookupmatch = lookupcache[getchar(n)]
+ if lookupmatch then
+ local h, d, ok = handler(sub,n,kind,lookupname,lookupmatch,sequence,lookuphash,1,injection)
+ if ok then
+ done = true
+ success = true
+ end
end
+ else
+ -- message
end
end
- prev.next = disc
- -- next.prev = disc
end
- return next
end
while start do
- local id = start.id
+ local id = getid(start)
if id == glyph_code then
- if start.font == font and start.subtype<256 then
- local a = start[0]
+ if getfont(start) == font and getsubtype(start) < 256 then -- why a 256 test ...
+ local a = getattr(start,0)
if a then
- a = (a == attr) and (not attribute or start[a_state] == attribute)
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
else
- a = not attribute or start[a_state] == attribute
+ a = not attribute or getprop(start,a_state) == attribute
end
if a then
- local lookupmatch = lookupcache[start.char]
+ local char = getchar(start)
+ local lookupmatch = lookupcache[char]
if lookupmatch then
-- sequence kan weg
local ok
- head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ head, start, ok = handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,1)
if ok then
success = true
+ elseif gpossing and zwnjruns and char == zwnj then
+ discrun(start,d_run)
end
+ elseif gpossing and zwnjruns and char == zwnj then
+ discrun(start,d_run)
end
- if start then start = start.next end
+ if start then start = getnext(start) end
else
- start = start.next
+ start = getnext(start)
end
else
- start = start.next
+ start = getnext(start)
end
elseif id == disc_code then
- -- mostly for gsub
- if start.subtype == discretionary_code then
- local pre = start.pre
- if pre then
- local new = subrun(pre)
- if new then start.pre = new end
- end
- local post = start.post
- if post then
- local new = subrun(post)
- if new then start.post = new end
- end
- local replace = start.replace
- if replace then
- local new = subrun(replace)
- if new then start.replace = new end
- end
-elseif typ == "gpos_single" or typ == "gpos_pair" then
- kerndisc(start)
- end
- start = start.next
+ local discretionary = getsubtype(start) == discretionary_code
+ if gpossing then
+ if discretionary then
+ kernrun(start,k_run)
+ else
+ discrun(start,d_run,k_run)
+ end
+ start = getnext(start)
+ elseif discretionary then
+ if typ == "gsub_ligature" then
+ start = testrun(start,t_run,c_run)
+ else
+ comprun(start,c_run)
+ start = getnext(start)
+ end
+ else
+ start = getnext(start)
+ end
elseif id == whatsit_code then -- will be function
- local subtype = start.subtype
+ local subtype = getsubtype(start)
if subtype == dir_code then
- local dir = start.dir
+ local dir = getfield(start,"dir")
if dir == "+TRT" or dir == "+TLT" then
topstack = topstack + 1
dirstack[topstack] = dir
@@ -2360,7 +3041,7 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
end
elseif subtype == localpar_code then
- local dir = start.dir
+ local dir = getfield(start,"dir")
if dir == "TRT" then
rlparmode = -1
elseif dir == "TLT" then
@@ -2374,39 +3055,44 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
end
end
- start = start.next
+ start = getnext(start)
elseif id == math_code then
- start = end_of_math(start).next
+ start = getnext(end_of_math(start))
else
- start = start.next
+ start = getnext(start)
end
end
end
+
else
- local function subrun(start)
- -- mostly for gsub, gpos would demand a more clever approach
+ local function c_run(start)
local head = start
local done = false
while start do
- local id = start.id
- if id == glyph_code and start.id == font and start.subtype <256 then
- local a = start[0]
+ local id = getid(start)
+ if id ~= glyph_code then
+ -- very unlikely
+ start = getnext(start)
+ elseif getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
if a then
- a = (a == attr) and (not attribute or start[a_state] == attribute)
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
else
- a = not attribute or start[a_state] == attribute
+ a = not attribute or getprop(start,a_state) == attribute
end
if a then
+ local char = getchar(start)
for i=1,ns do
local lookupname = subtables[i]
local lookupcache = lookuphash[lookupname]
if lookupcache then
- local lookupmatch = lookupcache[start.char]
+ -- local lookupmatch = lookupcache[getchar(start)]
+ local lookupmatch = lookupcache[char]
if lookupmatch then
-- we could move all code inline but that makes things even more unreadable
local ok
- head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ head, start, ok = handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,i)
if ok then
done = true
break
@@ -2419,122 +3105,201 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
report_missing_cache(typ,lookupname)
end
end
- if start then start = start.next end
+ if start then start = getnext(start) end
else
- start = start.next
+ start = getnext(start)
end
else
- start = start.next
+ return head, false
end
end
if done then
success = true
- return head
end
+ return head, done
end
- local function kerndisc(disc) -- we can assume that prev and next are glyphs
- local prev = disc.prev
- local next = disc.next
- if prev and next then
- prev.next = next
- -- next.prev = prev
- local a = prev[0]
- if a then
- a = (a == attr) and (not attribute or prev[a_state] == attribute)
- else
- a = not attribute or prev[a_state] == attribute
+ local function d_run(prev)
+ local a = getattr(prev,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(prev,a_state) == attribute)
+ else
+ a = not attribute or getprop(prev,a_state) == attribute
+ end
+ if a then
+ -- brr prev can be disc
+ local char = getchar(prev)
+ for i=1,ns do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local h, d, ok = handler(head,prev,kind,lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ done = true
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
end
- if a then
- for i=1,ns do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[prev.char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
- if ok then
- done = true
- break
+ end
+ end
+
+ local function k_run(sub,injection,last)
+ local a = getattr(sub,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(sub,a_state) == attribute)
+ else
+ a = not attribute or getprop(sub,a_state) == attribute
+ end
+ if a then
+ for n in traverse_nodes(sub) do -- only gpos
+ if n == last then
+ break
+ end
+ local id = getid(n)
+ if id == glyph_code then
+ local char = getchar(n)
+ for i=1,ns do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ local h, d, ok = handler(head,n,kind,lookupname,lookupmatch,sequence,lookuphash,i,injection)
+ if ok then
+ done = true
+ break
+ end
end
+ else
+ report_missing_cache(typ,lookupname)
end
- else
- report_missing_cache(typ,lookupname)
end
+ else
+ -- message
end
end
- prev.next = disc
- -- next.prev = disc
end
- return next
+ end
+
+ local function t_run(start,stop)
+ while start ~= stop do
+ local id = getid(start)
+ if id == glyph_code and getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
+ else
+ a = not attribute or getprop(start,a_state) == attribute
+ end
+ if a then
+ local char = getchar(start)
+ for i=1,ns do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ -- if we need more than ligatures we can outline the code and use functions
+ local s = getnext(start)
+ local l = nil
+ while s do
+ local lg = lookupmatch[getchar(s)]
+ if lg then
+ l = lg
+ s = getnext(s)
+ else
+ break
+ end
+ end
+ if l and l.ligature then
+ return true
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ end
+ start = getnext(start)
+ else
+ break
+ end
+ end
end
while start do
- local id = start.id
+ local id = getid(start)
if id == glyph_code then
- if start.font == font and start.subtype<256 then
- local a = start[0]
+ if getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
if a then
- a = (a == attr) and (not attribute or start[a_state] == attribute)
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
else
- a = not attribute or start[a_state] == attribute
+ a = not attribute or getprop(start,a_state) == attribute
end
if a then
for i=1,ns do
local lookupname = subtables[i]
local lookupcache = lookuphash[lookupname]
if lookupcache then
- local lookupmatch = lookupcache[start.char]
+ local char = getchar(start)
+ local lookupmatch = lookupcache[char]
if lookupmatch then
-- we could move all code inline but that makes things even more unreadable
local ok
- head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ head, start, ok = handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,i)
if ok then
success = true
break
elseif not start then
-- don't ask why ... shouldn't happen
break
+ elseif gpossing and zwnjruns and char == zwnj then
+ discrun(start,d_run)
end
+ elseif gpossing and zwnjruns and char == zwnj then
+ discrun(start,d_run)
end
else
report_missing_cache(typ,lookupname)
end
end
- if start then start = start.next end
+ if start then start = getnext(start) end
else
- start = start.next
+ start = getnext(start)
end
else
- start = start.next
+ start = getnext(start)
end
elseif id == disc_code then
- -- mostly for gsub
- if start.subtype == discretionary_code then
- local pre = start.pre
- if pre then
- local new = subrun(pre)
- if new then start.pre = new end
- end
- local post = start.post
- if post then
- local new = subrun(post)
- if new then start.post = new end
+ local discretionary = getsubtype(start) == discretionary_code
+ if gpossing then
+ if discretionary then
+ kernrun(start,k_run)
+ else
+ discrun(start,d_run,k_run)
end
- local replace = start.replace
- if replace then
- local new = subrun(replace)
- if new then start.replace = new end
+ start = getnext(start)
+ elseif discretionary then
+ if typ == "gsub_ligature" then
+ start = testrun(start,t_run,c_run)
+ else
+ comprun(start,c_run)
+ start = getnext(start)
end
-elseif typ == "gpos_single" or typ == "gpos_pair" then
- kerndisc(start)
+ else
+ start = getnext(start)
end
- start = start.next
elseif id == whatsit_code then
- local subtype = start.subtype
+ local subtype = getsubtype(start)
if subtype == dir_code then
- local dir = start.dir
+ local dir = getfield(start,"dir")
if dir == "+TRT" or dir == "+TLT" then
topstack = topstack + 1
dirstack[topstack] = dir
@@ -2553,7 +3318,7 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
end
elseif subtype == localpar_code then
- local dir = start.dir
+ local dir = getfield(start,"dir")
if dir == "TRT" then
rlparmode = -1
elseif dir == "TLT" then
@@ -2566,11 +3331,11 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
end
end
- start = start.next
+ start = getnext(start)
elseif id == math_code then
- start = end_of_math(start).next
+ start = getnext(end_of_math(start))
else
- start = start.next
+ start = getnext(start)
end
end
end
@@ -2581,7 +3346,11 @@ elseif typ == "gpos_single" or typ == "gpos_pair" then
if trace_steps then -- ?
registerstep(head)
end
+
end
+
+ head = tonode(head)
+
return head, done
end
@@ -2734,6 +3503,7 @@ local function prepare_contextchains(tfmdata)
local rawdata = tfmdata.shared.rawdata
local resources = rawdata.resources
local lookuphash = resources.lookuphash
+ local lookuptags = resources.lookuptags
local lookups = rawdata.lookups
if lookups then
for lookupname, lookupdata in next, rawdata.lookups do
@@ -2747,7 +3517,7 @@ local function prepare_contextchains(tfmdata)
report_prepare("unsupported format %a",format)
elseif not validformat[lookuptype] then
-- todo: dejavu-serif has one (but i need to see what use it has)
- report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookupname)
+ report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookuptags[lookupname])
else
local contexts = lookuphash[lookupname]
if not contexts then
@@ -2803,7 +3573,7 @@ local function prepare_contextchains(tfmdata)
-- no rules
end
else
- report_prepare("missing lookuptype for lookupname %a",lookupname)
+ report_prepare("missing lookuptype for lookupname %a",lookuptags[lookupname])
end
end
end
diff --git a/tex/context/base/font-otp.lua b/tex/context/base/font-otp.lua
index 217bb7535..ebf36ed45 100644
--- a/tex/context/base/font-otp.lua
+++ b/tex/context/base/font-otp.lua
@@ -9,6 +9,8 @@ if not modules then modules = { } end modules ['font-otp'] = {
-- todo: pack math (but not that much to share)
--
-- pitfall 5.2: hashed tables can suddenly become indexed with nil slots
+--
+-- unless we sort all hashes we can get a different pack order (no big deal but size can differ)
local next, type = next, type
local sort, concat = table.sort, table.concat
@@ -140,6 +142,11 @@ end
-- return b
-- end
+-- beware: we cannot unpack and repack the same table because then sharing
+-- interferes (we could catch this if needed) .. so for now: save, reload
+-- and repack in such cases (never needed anyway) .. a tricky aspect is that
+-- we then need to sort more thanks to random hashing
+
local function packdata(data)
if data then
-- stripdata(data)
@@ -407,6 +414,14 @@ local function packdata(data)
features[script] = pack_normal(feature)
end
end
+ local order = sequence.order
+ if order then
+ sequence.order = pack_indexed(order)
+ end
+ local markclass = sequence.markclass
+ if markclass then
+ sequence.markclass = pack_boolean(markclass)
+ end
end
end
local lookups = resources.lookups
@@ -742,27 +757,28 @@ local function unpackdata(data)
rule.replacements = tv
end
end
- local fore = rule.fore
- if fore then
- local tv = tables[fore]
- if tv then
- rule.fore = tv
- end
- end
- local back = rule.back
- if back then
- local tv = tables[back]
- if tv then
- rule.back = tv
- end
- end
- local names = rule.names
- if names then
- local tv = tables[names]
- if tv then
- rule.names = tv
- end
- end
+ -- local fore = rule.fore
+ -- if fore then
+ -- local tv = tables[fore]
+ -- if tv then
+ -- rule.fore = tv
+ -- end
+ -- end
+ -- local back = rule.back
+ -- if back then
+ -- local tv = tables[back]
+ -- if tv then
+ -- rule.back = tv
+ -- end
+ -- end
+ -- local names = rule.names
+ -- if names then
+ -- local tv = tables[names]
+ -- if tv then
+ -- rule.names = tv
+ -- end
+ -- end
+ --
local lookups = rule.lookups
if lookups then
local tv = tables[lookups]
@@ -825,6 +841,20 @@ local function unpackdata(data)
end
end
end
+ local order = feature.order
+ if order then
+ local tv = tables[order]
+ if tv then
+ feature.order = tv
+ end
+ end
+ local markclass = feature.markclass
+ if markclass then
+ local tv = tables[markclass]
+ if tv then
+ feature.markclass = tv
+ end
+ end
end
end
local lookups = resources.lookups
@@ -875,3 +905,4 @@ if otf.enhancers.register then
end
otf.enhancers.unpack = unpackdata -- used elsewhere
+otf.enhancers.pack = packdata -- used elsewhere
diff --git a/tex/context/base/font-ott.lua b/tex/context/base/font-ott.lua
index e3aacd0d1..1f9a1ac04 100644
--- a/tex/context/base/font-ott.lua
+++ b/tex/context/base/font-ott.lua
@@ -42,6 +42,7 @@ local scripts = allocate {
['cprt'] = 'cypriot syllabary',
['cyrl'] = 'cyrillic',
['deva'] = 'devanagari',
+ ['dev2'] = 'devanagari variant 2',
['dsrt'] = 'deseret',
['ethi'] = 'ethiopic',
['geor'] = 'georgian',
@@ -67,6 +68,7 @@ local scripts = allocate {
['linb'] = 'linear b',
['math'] = 'mathematical alphanumeric symbols',
['mlym'] = 'malayalam',
+ ['mlm2'] = 'malayalam variant 2',
['mong'] = 'mongolian',
['musc'] = 'musical symbols',
['mymr'] = 'myanmar',
@@ -631,6 +633,7 @@ local features = allocate {
['js..'] = 'justification ..',
["dv.."] = "devanagari ..",
+ ["ml.."] = "malayalam ..",
}
local baselines = allocate {
@@ -855,15 +858,18 @@ function otf.features.normalize(features)
if uv then
-- report_checks("feature value %a first seen at %a",value,key)
else
- if type(value) == "string" then
+ uv = tonumber(value) -- before boolean as there we also handle 0/1
+ if uv then
+ -- we're okay
+ elseif type(value) == "string" then
local b = is_boolean(value)
if type(b) == "nil" then
- uv = tonumber(value) or lower(value)
+ uv = lower(value)
else
uv = b
end
else
- uv = v
+ uv = value
end
if not rawget(features,k) then
k = rawget(verbosefeatures,k) or k
diff --git a/tex/context/base/font-otx.lua b/tex/context/base/font-otx.lua
deleted file mode 100644
index f39045223..000000000
--- a/tex/context/base/font-otx.lua
+++ /dev/null
@@ -1,404 +0,0 @@
-if not modules then modules = { } end modules ['font-otx'] = {
- version = 1.001,
- comment = "companion to font-otf.lua (analysing)",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- context only
-
-local type = type
-
-if not trackers then trackers = { register = function() end } end
-
------ trace_analyzing = false trackers.register("otf.analyzing", function(v) trace_analyzing = v end)
-
-local fonts, nodes, node = fonts, nodes, node
-
-local allocate = utilities.storage.allocate
-
-local otf = fonts.handlers.otf
-
-local analyzers = fonts.analyzers
-local initializers = allocate()
-local methods = allocate()
-
-analyzers.initializers = initializers
-analyzers.methods = methods
----------.useunicodemarks = false
-
-local a_state = attributes.private('state')
-
-local nodecodes = nodes.nodecodes
-local glyph_code = nodecodes.glyph
-local disc_code = nodecodes.disc
-local math_code = nodecodes.math
-
-local traverse_id = node.traverse_id
-local traverse_node_list = node.traverse
-local end_of_math = node.end_of_math
-
-local fontdata = fonts.hashes.identifiers
-local categories = characters and characters.categories or { } -- sorry, only in context
-local chardata = characters and characters.data
-
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
-
---[[ldx--
-<p>Analyzers run per script and/or language and are needed in order to
-process features right.</p>
---ldx]]--
-
--- never use these numbers directly
-
-local s_init = 1 local s_rphf = 7
-local s_medi = 2 local s_half = 8
-local s_fina = 3 local s_pref = 9
-local s_isol = 4 local s_blwf = 10
-local s_mark = 5 local s_pstf = 11
-local s_rest = 6
-
-local states = {
- init = s_init,
- medi = s_medi,
- fina = s_fina,
- isol = s_isol,
- mark = s_mark,
- rest = s_rest,
- rphf = s_rphf,
- half = s_half,
- pref = s_pref,
- blwf = s_blwf,
- pstf = s_pstf,
-}
-
-local features = {
- init = s_init,
- medi = s_medi,
- fina = s_fina,
- isol = s_isol,
- -- mark = s_mark,
- -- rest = s_rest,
- rphf = s_rphf,
- half = s_half,
- pref = s_pref,
- blwf = s_blwf,
- pstf = s_pstf,
-}
-
-analyzers.states = states
-analyzers.features = features
-analyzers.useunicodemarks = false
-
--- todo: analyzers per script/lang, cross font, so we need an font id hash -> script
--- e.g. latin -> hyphenate, arab -> 1/2/3 analyze -- its own namespace
-
-function analyzers.setstate(head,font)
- local useunicodemarks = analyzers.useunicodemarks
- local tfmdata = fontdata[font]
- local descriptions = tfmdata.descriptions
- local first, last, current, n, done = nil, nil, head, 0, false -- maybe make n boolean
- while current do
- local id = current.id
- if id == glyph_code and current.font == font then
- done = true
- local char = current.char
- local d = descriptions[char]
- if d then
- if d.class == "mark" then
- done = true
- current[a_state] = s_mark
- elseif useunicodemarks and categories[char] == "mn" then
- done = true
- current[a_state] = s_mark
- elseif n == 0 then
- first, last, n = current, current, 1
- current[a_state] = s_init
- else
- last, n = current, n+1
- current[a_state] = s_medi
- end
- else -- finish
- if first and first == last then
- last[a_state] = s_isol
- elseif last then
- last[a_state] = s_fina
- end
- first, last, n = nil, nil, 0
- end
- elseif id == disc_code then
- -- always in the middle
- current[a_state] = s_medi
- last = current
- else -- finish
- if first and first == last then
- last[a_state] = s_isol
- elseif last then
- last[a_state] = s_fina
- end
- first, last, n = nil, nil, 0
- if id == math_code then
- current = end_of_math(current)
- end
- end
- current = current.next
- end
- if first and first == last then
- last[a_state] = s_isol
- elseif last then
- last[a_state] = s_fina
- end
- return head, done
-end
-
--- in the future we will use language/script attributes instead of the
--- font related value, but then we also need dynamic features which is
--- somewhat slower; and .. we need a chain of them
-
-local function analyzeinitializer(tfmdata,value) -- attr
- local script, language = otf.scriptandlanguage(tfmdata) -- attr
- local action = initializers[script]
- if not action then
- -- skip
- elseif type(action) == "function" then
- return action(tfmdata,value)
- else
- local action = action[language]
- if action then
- return action(tfmdata,value)
- end
- end
-end
-
-local function analyzeprocessor(head,font,attr)
- local tfmdata = fontdata[font]
- local script, language = otf.scriptandlanguage(tfmdata,attr)
- local action = methods[script]
- if not action then
- -- skip
- elseif type(action) == "function" then
- return action(head,font,attr)
- else
- action = action[language]
- if action then
- return action(head,font,attr)
- end
- end
- return head, false
-end
-
-registerotffeature {
- name = "analyze",
- description = "analysis of character classes",
- default = true,
- initializers = {
- node = analyzeinitializer,
- },
- processors = {
- position = 1,
- node = analyzeprocessor,
- }
-}
-
--- latin
-
-methods.latn = analyzers.setstate
-
-local arab_warned = { }
-
-local function warning(current,what)
- local char = current.char
- if not arab_warned[char] then
- log.report("analyze","arab: character %C has no %a class",char,what)
- arab_warned[char] = true
- end
-end
-
-local mappers = {
- l = s_init, -- left
- d = s_medi, -- double
- c = s_medi, -- joiner
- r = s_fina, -- right
- u = s_isol, -- nonjoiner
-}
-
-local classifiers = { } -- we can also use this trick for devanagari
-
-local first_arabic, last_arabic = characters.blockrange("arabic")
-local first_syriac, last_syriac = characters.blockrange("syriac")
-local first_mandiac, last_mandiac = characters.blockrange("mandiac")
-local first_nko, last_nko = characters.blockrange("nko")
-
-table.setmetatableindex(classifiers,function(t,k)
- local c = chardata[k]
- local v = false
- if c then
- local arabic = c.arabic
- if arabic then
- v = mappers[arabic]
- if not v then
- log.report("analyze","error in mapping arabic %C",k)
- -- error
- v = false
- end
- elseif k >= first_arabic and k <= last_arabic or k >= first_syriac and k <= last_syriac or
- k >= first_mandiac and k <= last_mandiac or k >= first_nko and k <= last_nko then
- if categories[k] == "mn" then
- v = s_mark
- else
- v = s_rest
- end
- else
- end
- end
- t[k] = v
- return v
-end)
-
-function methods.arab(head,font,attr)
- local first, last = nil, nil
- local c_first, c_last = nil, nil
- local current, done = head, false
- while current do
- local id = current.id
- if id == glyph_code and current.font == font and current.subtype<256 and not current[a_state] then
- done = true
- local char = current.char
- local classifier = classifiers[char]
- if not classifier then
- if last then
- if c_last == s_medi or c_last == s_fina then
- last[a_state] = s_fina
- else
- warning(last,"fina")
- last[a_state] = s_error
- end
- first, last = nil, nil
- elseif first then
- if c_first == s_medi or c_first == s_fina then
- first[a_state] = s_isol
- else
- warning(first,"isol")
- first[a_state] = s_error
- end
- first = nil
- end
- elseif classifier == s_mark then
- current[a_state] = s_mark
- elseif classifier == s_isol then
- if last then
- if c_last == s_medi or c_last == s_fina then
- last[a_state] = s_fina
- else
- warning(last,"fina")
- last[a_state] = s_error
- end
- first, last = nil, nil
- elseif first then
- if c_first == s_medi or c_first == s_fina then
- first[a_state] = s_isol
- else
- warning(first,"isol")
- first[a_state] = s_error
- end
- first = nil
- end
- current[a_state] = s_isol
- elseif classifier == s_medi then
- if first then
- last = current
- c_last = classifier
- current[a_state] = s_medi
- else
- current[a_state] = s_init
- first = current
- c_first = classifier
- end
- elseif classifier == s_fina then
- if last then
- if last[a_state] ~= s_init then
- last[a_state] = s_medi
- end
- current[a_state] = s_fina
- first, last = nil, nil
- elseif first then
- -- if first[a_state] ~= s_init then
- -- -- needs checking
- -- first[a_state] = s_medi
- -- end
- current[a_state] = s_fina
- first = nil
- else
- current[a_state] = s_isol
- end
- else -- classifier == s_rest
- current[a_state] = s_rest
- if last then
- if c_last == s_medi or c_last == s_fina then
- last[a_state] = s_fina
- else
- warning(last,"fina")
- last[a_state] = s_error
- end
- first, last = nil, nil
- elseif first then
- if c_first == s_medi or c_first == s_fina then
- first[a_state] = s_isol
- else
- warning(first,"isol")
- first[a_state] = s_error
- end
- first = nil
- end
- end
- else
- if last then
- if c_last == s_medi or c_last == s_fina then
- last[a_state] = s_fina
- else
- warning(last,"fina")
- last[a_state] = s_error
- end
- first, last = nil, nil
- elseif first then
- if c_first == s_medi or c_first == s_fina then
- first[a_state] = s_isol
- else
- warning(first,"isol")
- first[a_state] = s_error
- end
- first = nil
- end
- if id == math_code then -- a bit duplicate as we test for glyphs twice
- current = end_of_math(current)
- end
- end
- current = current.next
- end
- if last then
- if c_last == s_medi or c_last == s_fina then
- last[a_state] = s_fina
- else
- warning(last,"fina")
- last[a_state] = s_error
- end
- elseif first then
- if c_first == s_medi or c_first == s_fina then
- first[a_state] = s_isol
- else
- warning(first,"isol")
- first[a_state] = s_error
- end
- end
- return head, done
-end
-
-methods.syrc = methods.arab
-methods.mand = methods.arab
-methods.nko = methods.arab
-
-directives.register("otf.analyze.useunicodemarks",function(v)
- analyzers.useunicodemarks = v
-end)
diff --git a/tex/context/base/font-pat.lua b/tex/context/base/font-pat.lua
index 9733c9ada..049853796 100644
--- a/tex/context/base/font-pat.lua
+++ b/tex/context/base/font-pat.lua
@@ -38,7 +38,7 @@ local report = patches.report
-- library) the palatino arabic fonts don't have the mkmk features properly
-- set up.
-register("after","rehash features","^palatino.*arabic", function patch(data,filename)
+register("after","rehash features","^palatino.*arabic", function (data,filename)
local gpos = data.gpos
if gpos then
for k=1,#gpos do
diff --git a/tex/context/base/font-pre.mkiv b/tex/context/base/font-pre.mkiv
index c404771fd..157b4585a 100644
--- a/tex/context/base/font-pre.mkiv
+++ b/tex/context/base/font-pre.mkiv
@@ -100,14 +100,14 @@
features=no]
\definefontfeature
- [semetic-complete]
+ [semitic-complete]
[mode=node,analyze=yes,language=dflt,ccmp=yes,
init=yes,medi=yes,fina=yes,isol=yes,
mark=yes,mkmk=yes,kern=yes,curs=yes,
liga=yes,dlig=yes,rlig=yes,clig=yes,calt=yes]
\definefontfeature
- [semetic-simple]
+ [semitic-simple]
[mode=node,analyze=yes,language=dflt,ccmp=yes,
init=yes,medi=yes,fina=yes,isol=yes,
mark=yes,mkmk=yes,kern=yes,curs=yes,
@@ -115,22 +115,22 @@
\definefontfeature
[arabic]
- [semetic-complete]
+ [semitic-complete]
[script=arab]
\definefontfeature
[hebrew]
- [semetic-complete]
+ [semitic-complete]
[script=hebr]
\definefontfeature
[simplearabic]
- [semetic-simple]
+ [semitic-simple]
[script=arab]
\definefontfeature
[simplehebrew]
- [semetic-simple]
+ [semitic-simple]
[script=hebr]
% \definefont [DevaOne] [file:chandas.ttf*devanagari-one at 12pt]
@@ -170,11 +170,55 @@
kern=yes]
\definefontfeature
+ [malayalam-one]
+ [mode=node,
+ language=dflt,
+ script=mlym,
+ akhn=yes,
+ blwf=yes,
+ half=yes,
+ pres=yes,
+ blws=yes,
+ psts=yes,
+ haln=no]
+
+\definefontfeature
+ [malayalam-two]
+ [malayalam-one]
+ [script=mlm2]
+
+\definefontfeature
[jamoforms]
[ljmo=yes,
tjmo=yes,
vjmo=yes]
+% cjk
+
+% \definefontfeature
+% [japanese]
+% [default]
+% [language=jan]
+
+% \definefontfeature
+% [simplified-chinese]
+% [default]
+% [language=zhs]
+
+% \definefontfeature
+% [traditional-chinese]
+% [default]
+% [language=zht]
+
+% \definefontfeature
+% [chinese]
+% [simplified-chinese]
+
+% \definefontfeature
+% [korean]
+% [default]
+% [language=kor]
+
% symbols:
\definefontfeature
@@ -306,6 +350,23 @@
\definecolor[font:8] [g=.75]
\definecolor[font:9] [b=.75]
+\definecolor[f:r:t][a=1,t=.25,r=1]
+\definecolor[f:g:t][a=1,t=.25,g=1]
+\definecolor[f:b:t][a=1,t=.25,b=1]
+\definecolor[f:c:t][a=1,t=.25,c=1]
+\definecolor[f:m:t][a=1,t=.25,m=1]
+\definecolor[f:y:t][a=1,t=.25,y=1]
+\definecolor[f:k:t][a=1,t=.25,s=0]
+\definecolor[f:s:t][a=1,t=.25,s=0]
+
+\definepalet % weird place
+ [layout]
+ [grid=trace:dr,
+ page=trace:dg,
+ profile=f:s:t,
+ one=f:y:t,
+ mix=f:b:t]
+
%D Now we're up to some definitions.
\definebodyfontenvironment
@@ -478,15 +539,16 @@
\definefontsize[\s!a] \definefontsize[\s!b]
\definefontsize[\s!c] \definefontsize[\s!d]
-\definealternativestyle [\v!mediaeval] [\os] []
-\definealternativestyle [\v!normal] [\tf] []
-\definealternativestyle [\v!bold] [\bf] []
-\definealternativestyle [\v!type] [\tt] []
-\definealternativestyle [\v!mono] [\tt] []
-\definealternativestyle [\v!slanted] [\sl] []
-\definealternativestyle [\v!italic] [\it] []
-\definealternativestyle [\v!boldslanted,\v!slantedbold] [\bs] []
-\definealternativestyle [\v!bolditalic,\v!italicbold] [\bi] []
+\definealternativestyle [\v!mediaeval] [\os] []
+\definealternativestyle [\v!normal] [\tf] []
+\definealternativestyle [\v!bold] [\bf] []
+\definealternativestyle [\v!type] [\tt] []
+\definealternativestyle [\v!mono] [\tt] []
+\definealternativestyle [\v!monobold] [\tt\bf] []
+\definealternativestyle [\v!slanted] [\sl] []
+\definealternativestyle [\v!italic] [\it] []
+\definealternativestyle [\v!boldslanted,\v!slantedbold] [\bs] []
+\definealternativestyle [\v!bolditalic,\v!italicbold] [\bi] []
\definealternativestyle [\v!small,\v!smallnormal] [\setsmallbodyfont\tf] []
\definealternativestyle [\v!smallbold] [\setsmallbodyfont\bf] []
@@ -499,11 +561,17 @@
\definealternativestyle [\v!smaller] [\setsmallbodyfont\tf] []
\definealternativestyle [\v!sans,\v!sansserif] [\ss] []
-\definealternativestyle [\v!sansbold] [\ss\bf] []
\definealternativestyle [\v!roman,\v!serif,\v!regular] [\rm]
\definealternativestyle [\v!handwritten] [\hw]
\definealternativestyle [\v!calligraphic] [\cg]
+\definealternativestyle [\v!sansnormal] [\ss\tf] []
+\definealternativestyle [\v!sansbold] [\ss\bf] []
+\definealternativestyle [\v!serifnormal] [\rm\tf] []
+\definealternativestyle [\v!serifbold] [\rm\bf] []
+\definealternativestyle [\v!mononormal] [\tt\tf] []
+\definealternativestyle [\v!monobold] [\tt\bf] []
+
% % maybe we need interface neutral as well (for use in cld):
%
% \letcscsname\mediaeval \csname\v!mediaeval \endcsname
@@ -566,6 +634,7 @@
\definefontfeature[f:smallcaps][smcp=yes]
\definefontfeature[f:oldstyle] [onum=yes]
\definefontfeature[f:tabular] [tnum=yes]
+\definefontfeature[f:superiors][sups=yes]
\definealternativestyle [\v!smallcaps] [\setsmallcaps] [\setsmallcaps]
\definealternativestyle [\v!oldstyle] [\setoldstyle ] [\setoldstyle ]
@@ -573,6 +642,7 @@
\unexpanded\def\setsmallcaps{\doaddfeature{f:smallcaps}}
\unexpanded\def\setoldstyle {\doaddfeature{f:oldstyle}}
\unexpanded\def\settabular {\doaddfeature{f:tabular}}
+\unexpanded\def\setsuperiors{\doaddfeature{f:superiors}}
%D \macros
%D {tinyfont}
@@ -599,6 +669,22 @@
\definefont[infofont][dejavusansmono at 6pt] % todo \the\everybodyfont
+%D Optimization (later we overload in math):
+
+\unexpanded\def\normaltf{\let\fontalternative\s!tf\font_helpers_synchronize_font}
+\unexpanded\def\normalbf{\let\fontalternative\s!bf\font_helpers_synchronize_font}
+\unexpanded\def\normalit{\let\fontalternative\s!it\font_helpers_synchronize_font}
+\unexpanded\def\normalsl{\let\fontalternative\s!sl\font_helpers_synchronize_font}
+\unexpanded\def\normalbi{\let\fontalternative\s!bi\font_helpers_synchronize_font}
+\unexpanded\def\normalbs{\let\fontalternative\s!bs\font_helpers_synchronize_font}
+
+\let\tf\normaltf
+\let\bf\normalbf
+\let\it\normalit
+\let\sl\normalsl
+\let\bi\normalbi
+\let\bs\normalbs
+
\protect \endinput
% LM math vs CM math (analysis by Taco):
@@ -634,3 +720,30 @@
%
% /lessorequalslant
% /greaterorequalslant
+
+% \unprotect
+%
+% \definehighlight[\v!italic ][\c!command=\v!no,\c!style=\v!italic]
+% \definehighlight[\v!bold ][\c!command=\v!no,\c!style=\v!bold]
+% \definehighlight[\v!bolditalic][\c!command=\v!no,\c!style=\v!bolditalic]
+% \definehighlight[\v!mono] [\c!command=\v!no,\c!style=\v!mono]
+% \definehighlight[\v!monobold] [\c!command=\v!no,\c!style=\v!monobold]
+%
+% \definehighlight[important] [\c!command=\v!no,\c!style=\v!bold]
+% \definehighlight[unimportant] [\c!command=\v!no,\c!color=darkgray]
+% \definehighlight[warning] [\c!command=\v!no,\c!style=\v!bold,\c!color=darkblue]
+% \definehighlight[error] [\c!command=\v!no,\c!style=\v!bold,\c!color=darkred]
+%
+% \protect
+%
+% \starttext
+% \highlight[italic] {italic}
+% \highlight[bolditalic] {bolditalic}
+% \highlight[bold] {bold}
+% \highlight[mono] {mono}
+% \highlight[monobold] {monobold}
+% \highlight[important] {important}
+% \highlight[unimportant]{unimportant}
+% \highlight[warning] {warning}
+% \highlight[error] {error}
+% \stoptext
diff --git a/tex/context/base/font-run.mkiv b/tex/context/base/font-run.mkiv
index 66f1acc91..1b8843b94 100644
--- a/tex/context/base/font-run.mkiv
+++ b/tex/context/base/font-run.mkiv
@@ -12,8 +12,8 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D [This code is hooked into the core macros and saves some
-%D format space. It needs a cleanup.]
+%D [This code is hooked into the core macros and saves some format
+%D space. It needs a cleanup.]
\unprotect
@@ -100,8 +100,8 @@
&&\tttf\tx\s!text&&\tttf\tx\s!script&&\tttf\tx\s!scriptscript
&&\tttf\tx\s!x&&\tttf\tx\s!xx&&\tttf\tx\v!small&&\tttf\tx\v!big
&&\tttf\tx\c!interlinespace&\cr
- \noalign{\hrule}
- \@EA\globalprocesscommalist\@EA[\bodyfontenvironmentlist]\next}}
+ \noalign{\hrule}%
+ \processbodyfontenvironmentlist\next}}
\ifinsidefloat\else\stopbaselinecorrection\fi}
\unexpanded\gdef\showfont
@@ -311,7 +311,7 @@
\ifdefined\databox \else \newbox\databox \fi
-\unexpanded\gdef\testminimalbaseline#1%
+\unexpanded\gdef\testminimalbaseline#1% to be used in a tabulate
{\setbox\databox\ruledhbox{#1}%
\scratchdimen\ht\databox
\advance\scratchdimen\dp\databox
diff --git a/tex/context/base/font-sel.lua b/tex/context/base/font-sel.lua
index 2881917eb..c3431a213 100644
--- a/tex/context/base/font-sel.lua
+++ b/tex/context/base/font-sel.lua
@@ -20,6 +20,8 @@ local v_simplefonts = interfaces.variables.simplefonts
local v_selectfont = interfaces.variables.selectfont
local v_default = interfaces.variables.default
+local implement = interfaces.implement
+
local selectfont = fonts.select or { }
fonts.select = selectfont
@@ -174,28 +176,58 @@ local names = {
["heavyitalic"] = { "heavyitalic" },
},
["default"] = { -- weight, width, italic
- ["thin"] = { weight = { 100, 200, 300, 400, 500 }, width = 5, italic = false },
- ["extralight"] = { weight = { 200, 100, 300, 400, 500 }, width = 5, italic = false },
- ["light"] = { weight = { 300, 200, 100, 400, 500 }, width = 5, italic = false },
- ["regular"] = { weight = { 400, 500, 300, 200, 100 }, width = 5, italic = false },
- ["italic"] = { weight = { 400, 500, 300, 200, 100 }, width = 5, italic = true },
- ["medium"] = { weight = { 500, 400, 300, 200, 100 }, width = 5, italic = false },
- ["demibold"] = { weight = { 600, 700, 800, 900 }, width = 5, italic = false },
- ["bold"] = { weight = { 700, 600, 800, 900 }, width = 5, italic = false },
- ["bolditalic"] = { weight = { 700, 600, 800, 900 }, width = 5, italic = true },
- ["smallcaps"] = { weight = { 400, 500, 300, 200, 100 }, width = 5, italic = false },
- ["heavy"] = { weight = { 800, 900, 700, 600 }, width = 5, italic = false },
- ["black"] = { weight = { 900, 800, 700, 600 }, width = 5, italic = false },
+ ["thin"] = { weight = { 100, 200, 300, 400, 500 }, width = 5, italic = false },
+ ["thinitalic"] = { weight = { 100, 200, 300, 400, 500 }, width = 5, italic = true },
+ ["extralight"] = { weight = { 200, 100, 300, 400, 500 }, width = 5, italic = false },
+ ["extralightitalic"] = { weight = { 200, 100, 300, 400, 500 }, width = 5, italic = true },
+ ["light"] = { weight = { 300, 200, 100, 400, 500 }, width = 5, italic = false },
+ ["lightitalic"] = { weight = { 300, 200, 100, 400, 500 }, width = 5, italic = true },
+ ["regular"] = { weight = { 400, 500, 300, 200, 100 }, width = 5, italic = false },
+ ["italic"] = { weight = { 400, 500, 300, 200, 100 }, width = 5, italic = true },
+ ["medium"] = { weight = { 500, 400, 300, 200, 100 }, width = 5, italic = false },
+ ["mediumitalic"] = { weight = { 500, 400, 300, 200, 100 }, width = 5, italic = true },
+ ["demibold"] = { weight = { 600, 700, 800, 900 }, width = 5, italic = false },
+ ["demibolditalic"] = { weight = { 600, 700, 800, 900 }, width = 5, italic = true },
+ ["bold"] = { weight = { 700, 600, 800, 900 }, width = 5, italic = false },
+ ["bolditalic"] = { weight = { 700, 600, 800, 900 }, width = 5, italic = true },
+ ["extrabold"] = { weight = { 800, 900, 700, 600 }, width = 5, italic = false },
+ ["extrabolditalic"] = { weight = { 800, 900, 700, 600 }, width = 5, italic = true },
+ ["heavy"] = { weight = { 900, 800, 700, 600 }, width = 5, italic = false },
+ ["heavyitalic"] = { weight = { 900, 800, 700, 600 }, width = 5, italic = true },
}
}
-names.simplefonts.slanted = names.simplefonts.italic
-names.simplefonts.boldslanted = names.simplefonts.bolditalic
+-- simplefonts synonyms
+
+names.simplefonts.slanted = names.simplefonts.italic
+names.simplefonts.boldslanted = names.simplefonts.bolditalic
+
+-- default synonyms
+
+names.default.ultralight = names.default.extralight
+names.default.semibold = names.default.demibold
+names.default.ultrabold = names.default.extrabold
+names.default.black = names.default.heavy
+
+names.default.ultralightitalic = names.default.extralightitalic
+names.default.semibolditalic = names.default.demibolditalic
+names.default.ultrabolditalic = names.default.extrabolditalic
+names.default.blackitalic = names.default.heavyitalic
-names.default.normal = names.default.regular
-names.default.slanted = names.default.italic
-names.default.semibold = names.default.demibold
-names.default.boldslanted = names.default.bolditalic
+names.default.thinslanted = names.default.thinitalic
+names.default.extralightslanted = names.default.extralightitalic
+names.default.ultralightslanted = names.default.extralightitalic
+names.default.lightslanted = names.default.lightitalic
+names.default.slanted = names.default.italic
+names.default.demiboldslanted = names.default.demibolditalic
+names.default.semiboldslanted = names.default.demibolditalic
+names.default.boldslanted = names.default.bolditalic
+names.default.extraboldslanted = names.default.extrabolditalic
+names.default.ultraboldslanted = names.default.extrabolditalic
+names.default.heavyslanted = names.default.heavyitalic
+names.default.blackslanted = names.default.heavyitalic
+
+names.default.smallcaps = names.default.regular
local mathsettings = {
["asanamath"] = {
@@ -269,12 +301,12 @@ local mathsettings = {
},
}
-function commands.defineselectfont(settings)
+function selectfont.define(settings)
local index = #data + 1
data[index] = settings
selectfont.searchfiles(index)
selectfont.filterinput(index)
- context(index)
+ return index
end
local function savefont(data,alternative,entries)
@@ -463,6 +495,14 @@ function selectfont.filterinput(index)
end
end
+local ctx_definefontsynonym = context.definefontsynonym
+local ctx_resetfontfallback = context.resetfontfallback
+local ctx_startfontclass = context.startfontclass
+local ctx_stopfontclass = context.stopfontclass
+local ctx_loadfontgoodies = context.loadfontgoodies
+local ctx_definefontfallback = context.definefontfallback
+local ctx_definetypeface = context.definetypeface
+
local function definefontsynonym(data,alternative,index,fallback)
local fontdata = data.fonts and data.fonts[alternative]
local style = data.metadata.style
@@ -494,22 +534,26 @@ local function definefontsynonym(data,alternative,index,fallback)
end
for _, entry in next, fontdata do
local designsize = entry["designsize"] or 100
- if designsize == 100 or designsize == 120 or designsize == 0 or #fontdata == 1 then
+ if designsize == 100 or designsize == 110 or designsize == 120 or designsize == 0 or #fontdata == 1 then
local filepath, filename = splitbase(entry["filename"])
+ if entry["format"] == "ttc" or entry["format"] == "dfont" then
+ filename = formatters["%s(%s)"](filename, entry["rawname"])
+ end
registerdesignsizes( fontfile, "default", filename )
break
end
end
if fallback then
- context.definefontsynonym( { fontname }, { fontfile }, { features = features } )
+ -- can we use
+ ctx_definefontsynonym( { fontname }, { fontfile }, { features = features } )
else
- context.definefontsynonym( { fontname }, { fontfile }, { features = features, fallbacks = fontfile, goodies = goodies } )
+ ctx_definefontsynonym( { fontname }, { fontfile }, { features = features, fallbacks = fontfile, goodies = goodies } )
end
else
if fallback then
- context.definefontsynonym( { fontname }, { fontparent }, { features = features } )
+ ctx_definefontsynonym( { fontname }, { fontparent }, { features = features } )
else
- context.definefontsynonym( { fontname }, { fontparent }, { features = features, fallbacks = fontfile, goodies = goodies } )
+ ctx_definefontsynonym( { fontname }, { fontparent }, { features = features, fallbacks = fontfile, goodies = goodies } )
end
end
end
@@ -522,11 +566,11 @@ local function definetypescript(index)
local typeface = data.metadata.typeface
local style = data.metadata.style
if entry and entry["tf"] then
- context.startfontclass( { typeface } )
+ ctx_startfontclass( { typeface } )
if goodies ~= "" then
goodies = utilities.parsers.settings_to_array(goodies)
for _, goodie in next, goodies do
- context.loadfontgoodies( { goodie } )
+ ctx_loadfontgoodies( { goodie } )
end
end
for alternative, _ in next, alternatives do
@@ -534,7 +578,7 @@ local function definetypescript(index)
definefontsynonym(data,alternative)
end
end
- context.stopfontclass()
+ ctx_stopfontclass()
else
-- regular style not available, loading aborted
end
@@ -565,20 +609,20 @@ local function definetextfontfallback(data,alternative,index)
local synonym = formatters["%s-%s-%s-fallback-%s"](typeface, style, alternative, index)
local fallback = formatters["%s-%s-%s"] (typeface, style, alternative)
if index == 1 then
- context.resetfontfallback( { fallback } )
+ ctx_resetfontfallback( { fallback } )
end
- context.definefontfallback( { fallback }, { synonym }, { range }, { rscale = rscale, check = check, force = force } )
+ ctx_definefontfallback( { fallback }, { synonym }, { range }, { rscale = rscale, check = check, force = force } )
end
local function definetextfallback(entry,index)
local data = data[index]
local typeface = data.metadata.typeface
- context.startfontclass( { typeface } )
+ ctx_startfontclass( { typeface } )
for alternative, _ in next, alternatives do
definefontsynonym (data,alternative,entry,true)
definetextfontfallback(data,alternative,entry)
end
- context.stopfontclass()
+ ctx_stopfontclass()
-- inspect(data)
end
@@ -594,14 +638,14 @@ local function definemathfontfallback(data,alternative,index)
local fontdata = data.fonts and data.fonts[alternative]
local fallback = formatters["%s-%s-%s"](typeface, style, alternative)
if index == 1 then
- context.resetfontfallback( { fallback } )
+ ctx_resetfontfallback( { fallback } )
end
if fontdata and #fontdata > 0 then
for _, entry in next, fontdata do
local filename = entry["filename"]
local designsize = entry["designsize"] or 100
- if designsize == 100 or designsize == 120 or designsize == 0 or #fontdata == 1 then
- context.definefontfallback( { fallback }, { formatters["file:%s*%s"](filename,features) }, { range }, { rscale = rscale, check = check, force = force, offset = offset } )
+ if designsize == 100 or designsize == 110 or designsize == 120 or designsize == 0 or #fontdata == 1 then
+ ctx_definefontfallback( { fallback }, { formatters["file:%s*%s"](filename,features) }, { range }, { rscale = rscale, check = check, force = force, offset = offset } )
break
end
end
@@ -612,13 +656,13 @@ local function definemathfallback(entry,index)
local data = data[index]
local typeface = data.metadata.typeface
local style = data.metadata.style
- context.startfontclass( { typeface } )
+ ctx_startfontclass( { typeface } )
for alternative, _ in next, alternatives do
if synonyms[style][alternative] then
definemathfontfallback(data,alternative,entry)
end
end
- context.stopfontclass()
+ ctx_stopfontclass()
-- inspect(data)
end
@@ -646,7 +690,7 @@ local function definetextfont(index)
local style = styles[data.metadata.style]
local designsize = data.options.opticals == v_yes and "auto" or "default"
local scale = data.options.scale ~= "" and data.options.scale or 1
- context.definetypeface( { fontclass }, { shortstyle }, { style }, { "" }, { "default" }, { designsize = designsize, rscale = scale } )
+ ctx_definetypeface( { fontclass }, { shortstyle }, { style }, { "" }, { "default" }, { designsize = designsize, rscale = scale } )
end
local function definemathfont(index)
@@ -658,9 +702,9 @@ local function definemathfont(index)
local typescript = cleanname(data.metadata.family)
local entries = data.fonts
if entries then
- context.definetypeface( { fontclass }, { shortstyle }, { style }, { "" }, { "default" }, { rscale = scale } )
+ ctx_definetypeface( { fontclass }, { shortstyle }, { style }, { "" }, { "default" }, { rscale = scale } )
else
- context.definetypeface( { fontclass }, { shortstyle }, { style }, { typescript }, { "default" }, { rscale = scale } )
+ ctx_definetypeface( { fontclass }, { shortstyle }, { style }, { typescript }, { "default" }, { rscale = scale } )
end
end
@@ -678,5 +722,49 @@ function selectfont.definetypeface(index)
-- inspect(data)
end
-commands.definefontfamily = selectfont.definetypeface
-commands.definefallbackfamily = selectfont.registerfallback
+local styles = {
+ { "tf" }, { "bf" }, { "it" }, { "sl" }, { "bi" }, { "bs" }, { "sc" },
+}
+
+implement {
+ name = "defineselectfont",
+ actions = { selectfont.define, context },
+ arguments = {
+ {
+ {
+ "metadata", {
+ { "typeface" },
+ { "style" },
+ { "family" },
+ }
+ },
+ {
+ "options", {
+ { "opticals" },
+ { "scale" },
+ { "goodies" },
+ { "alternative" },
+ { "range" },
+ { "offset" },
+ { "check" },
+ { "force" },
+ }
+ },
+ { "alternatives", styles },
+ { "files", styles },
+ { "features", styles },
+ }
+ }
+}
+
+implement {
+ name = "definefontfamily",
+ actions = selectfont.definetypeface,
+ arguments = "integer"
+}
+
+implement {
+ name = "definefallbackfamily",
+ actions = selectfont.registerfallback,
+ arguments = { "string", "string", "integer"}
+}
diff --git a/tex/context/base/font-sel.mkvi b/tex/context/base/font-sel.mkvi
index 3d4dc6807..820d211eb 100644
--- a/tex/context/base/font-sel.mkvi
+++ b/tex/context/base/font-sel.mkvi
@@ -1,6 +1,6 @@
%D \module
%D [ file=font-sel,
-%D version=2013.10.19,
+%D version=2014.07.27,
%D title=\CONTEXT\ User Module,
%D subtitle=Selectfont,
%D author=Wolfgang Schuster,
@@ -27,51 +27,49 @@
\fi
\setexpandedselectfontparameter\c!style {\expandnamespaceparameter\??selectfontstyle \selectfontparameter\c!style \s!rm }%
\setexpandedselectfontparameter\c!alternative{\expandnamespaceparameter\??selectfontalternative\selectfontparameter\c!alternative\v!default}%
- \xdef\selectfont_index{\ctxcommand{
- defineselectfont {
- metadata = {
- typeface = "\selectfontparameter\c!name",
- style = "\selectfontparameter\c!style",
- family = "\selectfontparameter\c!family",
- },
- options = {
- opticals = "\selectfontparameter\c!opticalsize",
- scale = "\selectfontparameter\c!scale",
- goodies = "\selectfontparameter\c!goodies",
- alternative = "\selectfontparameter\c!alternative",
- range = "\selectfontparameter\c!range", % fallback only
- offset = "\selectfontparameter\c!offset", % fallback only
- check = "\selectfontparameter\c!check", % fallback only
- force = "\selectfontparameter\c!force", % fallback only
- },
- alternatives = {
- ["tf"] = "\selectfontparameter\s!tf",
- ["bf"] = "\selectfontparameter\s!bf",
- ["it"] = "\selectfontparameter\s!it",
- ["sl"] = "\selectfontparameter\s!sl",
- ["bi"] = "\selectfontparameter\s!bi",
- ["bs"] = "\selectfontparameter\s!bs",
- ["sc"] = "\selectfontparameter\s!sc",
- },
- files = {
- ["tf"] = "\selectfontparameter\c!regularfont",
- ["bf"] = "\selectfontparameter\c!boldfont",
- ["it"] = "\selectfontparameter\c!italicfont",
- ["sl"] = "\selectfontparameter\c!slantedfont",
- ["bi"] = "\selectfontparameter\c!bolditalicfont",
- ["bs"] = "\selectfontparameter\c!boldslantedfont",
- ["sc"] = "\selectfontparameter\c!smallcapsfont",
- },
- features = {
- ["tf"] = "\selectfontparameter\c!regularfeatures",
- ["bf"] = "\selectfontparameter\c!boldfeatures",
- ["it"] = "\selectfontparameter\c!italicfeatures",
- ["sl"] = "\selectfontparameter\c!slantedfeatures",
- ["bi"] = "\selectfontparameter\c!bolditalicfeatures",
- ["bs"] = "\selectfontparameter\c!boldslantedfeatures",
- ["sc"] = "\selectfontparameter\c!smallcapsfeatures",
- }
- }}}%
+ \xdef\selectfont_index{\clf_defineselectfont
+ metadata {%
+ typeface {\selectfontparameter\c!name}%
+ style {\selectfontparameter\c!style}%
+ family {\selectfontparameter\c!family}%
+ }
+ options {%
+ opticals {\selectfontparameter\c!opticalsize}%
+ scale {\selectfontparameter\c!scale}%
+ goodies {\selectfontparameter\c!goodies}%
+ alternative {\selectfontparameter\c!alternative}%
+ range {\selectfontparameter\c!range}% fallback only
+ offset {\selectfontparameter\c!offset}% fallback only
+ check {\selectfontparameter\c!check}% fallback only
+ force {\selectfontparameter\c!force}% fallback only
+ }
+ alternatives {%
+ tf {\selectfontparameter\s!tf}%
+ bf {\selectfontparameter\s!bf}%
+ it {\selectfontparameter\s!it}%
+ sl {\selectfontparameter\s!sl}%
+ bi {\selectfontparameter\s!bi}%
+ bs {\selectfontparameter\s!bs}%
+ sc {\selectfontparameter\s!sc}%
+ }
+ files {%
+ tf {\selectfontparameter\c!regularfont}%
+ bf {\selectfontparameter\c!boldfont}%
+ it {\selectfontparameter\c!italicfont}%
+ sl {\selectfontparameter\c!slantedfont}%
+ bi {\selectfontparameter\c!bolditalicfont}%
+ bs {\selectfontparameter\c!boldslantedfont}%
+ sc {\selectfontparameter\c!smallcapsfont}%
+ }
+ features {%
+ tf {\selectfontparameter\c!regularfeatures}%
+ bf {\selectfontparameter\c!boldfeatures}%
+ it {\selectfontparameter\c!italicfeatures}%
+ sl {\selectfontparameter\c!slantedfeatures}%
+ bi {\selectfontparameter\c!bolditalicfeatures}%
+ bs {\selectfontparameter\c!boldslantedfeatures}%
+ sc {\selectfontparameter\c!smallcapsfeatures}%
+ }}%
\endgroup}
%D \macros
@@ -104,7 +102,7 @@
{\dodoubleargument\selectfont_preset_define}
\def\selectfont_preset_define[#name][#settings]%
- {\doifassignmentelse{#settings}
+ {\doifelseassignment{#settings}
{\setvalue{\??selectfontpreset#name}{\setupcurrentselectfont[#settings]}}
{\setvalue{\??selectfontpreset#name}{\csname\??selectfontpreset#settings\endcsname}}}
@@ -115,11 +113,74 @@
% unknown preset
\fi}
-\definefontfamilypreset [range:chinese] [\c!range={cjkcompatibilityforms,cjkcompatibilityideographs,cjkcompatibilityideographssupplement,cjkradicalssupplement,cjkstrokes,cjksymbolsandpunctuation,cjkunifiedideographs,cjkunifiedideographsextensiona,cjkunifiedideographsextensionb,halfwidthandfullwidthforms,verticalforms,bopomofo,bopomofoextended}]
-\definefontfamilypreset [range:japanese] [\c!range={cjkcompatibilityforms,cjkcompatibilityideographs,cjkcompatibilityideographssupplement,cjkradicalssupplement,cjkstrokes,cjksymbolsandpunctuation,cjkunifiedideographs,cjkunifiedideographsextensiona,cjkunifiedideographsextensionb,halfwidthandfullwidthforms,verticalforms,hiragana,katakana}]
-\definefontfamilypreset [range:korean] [\c!range={cjkcompatibilityforms,cjkcompatibilityideographs,cjkcompatibilityideographssupplement,cjkradicalssupplement,cjkstrokes,cjksymbolsandpunctuation,cjkunifiedideographs,cjkunifiedideographsextensiona,cjkunifiedideographsextensionb,halfwidthandfullwidthforms,verticalforms,hangulcompatibilityjamo,hanguljamo,hanguljamoextendeda,hanguljamoextendedb,hangulsyllables}]
-\definefontfamilypreset [range:cyrillic] [\c!range={cyrillic,cyrillicextendeda,cyrillicextendedb,cyrillicsupplement}]
-\definefontfamilypreset [range:greek] [\c!range={greekandcoptic,greekextended,ancientgreeknumbers}]
+\definefontfamilypreset
+ [range:chinese]
+ [\c!range={cjkcompatibilityforms,
+ cjkcompatibilityideographs,
+ cjkcompatibilityideographssupplement,
+ cjkradicalssupplement,
+ cjkstrokes,
+ cjksymbolsandpunctuation,
+ cjkunifiedideographs,
+ cjkunifiedideographsextensiona,
+ cjkunifiedideographsextensionb,
+ halfwidthandfullwidthforms,
+ verticalforms,
+ bopomofo,
+ bopomofoextended}]
+
+\definefontfamilypreset
+ [range:japanese]
+ [\crange={cjkcompatibilityforms,
+ cjkcompatibilityideographs,
+ cjkcompatibilityideographssupplement,
+ cjkradicalssupplement,
+ cjkstrokes,
+ cjksymbolsandpunctuation,
+ cjkunifiedideographs,
+ cjkunifiedideographsextensiona,
+ cjkunifiedideographsextensionb,
+ halfwidthandfullwidthforms,
+ verticalforms,
+ hiragana,
+ katakana}]
+
+\definefontfamilypreset
+ [range:korean]
+ [\c!range={cjkcompatibilityforms,
+ cjkcompatibilityideographs,
+ cjkcompatibilityideographssupplement,
+ cjkradicalssupplement,
+ cjkstrokes,
+ cjksymbolsandpunctuation,
+ cjkunifiedideographs,
+ cjkunifiedideographsextensiona,
+ cjkunifiedideographsextensionb,
+ halfwidthandfullwidthforms,
+ verticalforms,
+ hangulcompatibilityjamo,
+ hanguljamo,
+ hanguljamoextendeda,
+ hanguljamoextendedb,
+ hangulsyllables}]
+
+\definefontfamilypreset
+ [range:cyrillic]
+ [\c!range={cyrillic,
+ cyrillicextendeda,
+ cyrillicextendedb,
+ cyrillicsupplement}]
+
+\definefontfamilypreset
+ [range:greek]
+ [\c!range={greekandcoptic,
+ greekextended,
+ ancientgreeknumbers}]
+
+\definefontfamilypreset
+ [range:hebrew]
+ [\c!range={hebrew,
+ alphabeticpresentationforms}]
\definefontfamilypreset [math:digitsnormal] [\c!range=digitsnormal]
\definefontfamilypreset [math:digitsbold] [\c!range=digitsnormal,\c!offset=digitsbold,\s!tf=style:bold]
@@ -225,7 +286,7 @@
%D the \tex{definefontfamily} command.
%D
%D \starttyping
-%D \definefallbackfamily [mainface] [serif] [DejaVu Serif] [range=cyrillic,force=yes]
+%D \definefallbackfamily [mainface] [serif] [DejaVu Serif] [range=cyrillic]
%D \definefontfamily [mainface] [serif] [TeX Gyre Pagella]
%D
%D \setupbodyfont[mainface]
@@ -264,20 +325,20 @@
{\doquadrupleempty\selectfont_family_define}
\def\selectfont_family_define[#typeface][#style][#family][#settings]%
- {\doifassignmentelse{#settings}
+ {\doifelseassignment{#settings}
{\selectfont_setparameters[\c!name={#typeface},\c!style={#style},\c!family={#family},#settings]}
{\selectfont_setparameters[\c!name={#typeface},\c!style={#style},\c!family={#family},\c!preset={#settings}]}%
- \ctxcommand{definefontfamily(\selectfont_index)}}
+ \clf_definefontfamily\selectfont_index\relax}
\unexpanded\def\definefallbackfamily
{\doquadrupleempty\selectfont_fallback_define}
\def\selectfont_fallback_define[#typeface][#style][#family][#settings]%
- {\doifassignmentelse{#settings}
+ {\doifelseassignment{#settings}
{\selectfont_setparameters[\c!name={#typeface},\c!style={#style},\c!family={#family},#settings]}
{\selectfont_setparameters[\c!name={#typeface},\c!style={#style},\c!family={#family},\c!preset={#settings}]}%
\edef\p_selectfont_style{\expandnamespacevalue\??selectfontstyle{#style}\s!rm}%
- \ctxcommand{definefallbackfamily("#typeface","\p_selectfont_style",\selectfont_index)}}
+ \clf_definefallbackfamily{#typeface}{\p_selectfont_style}\selectfont_index\relax}
%D \macros
%D {setupfontfamily,setupfallbackfamily}
@@ -325,7 +386,8 @@
\unexpanded\def\selectfont_set_default
{\selectfont_set_font_family[\v!serif][Latin Modern Roman][\c!opticalsize=\v!yes]%
\selectfont_set_font_family[\v!sans] [Latin Modern Sans] [\c!opticalsize=\v!yes]%
- \selectfont_set_font_family[\v!mono] [Latin Modern Mono] [\c!opticalsize=\v!yes,\c!features=\s!none]}
+ \selectfont_set_font_family[\v!mono] [Latin Modern Mono] [\c!opticalsize=\v!yes,\c!features=\s!none]%
+ \selectfont_set_font_family[\v!math] [Latin Modern Math] [\c!opticalsize=\v!yes]}
\unexpanded\def\setupfallbackfamily
{\dotripleempty\selectfont_fallback_setup}
diff --git a/tex/context/base/font-set.mkvi b/tex/context/base/font-set.mkvi
index 0e2058c18..b29545ace 100644
--- a/tex/context/base/font-set.mkvi
+++ b/tex/context/base/font-set.mkvi
@@ -39,27 +39,38 @@
% \enablemode[lmmath]
-\def\font_preloads_reset_nullfont % this is needed because some macro packages (tikz) misuse \nullfont
- {\dorecurse\plusseven{\fontdimen\recurselevel\nullfont\zeropoint}% keep en eye on this as:
- \ctxcommand{resetnullfont()}% in luatex 0.70 this will also do the previous
- \globallet\font_preloads_reset_nullfont\relax}
+\let\m_font_fallback_name\empty
+
+% keep as reference:
+%
+% \def\font_preloads_reset_nullfont % this is needed because some macro packages (tikz) misuse \nullfont
+% {\dorecurse\plusseven{\fontdimen\recurselevel\nullfont\zeropoint}% keep en eye on this as:
+% \clf_resetnullfont % in luatex 0.70 this will also do the previous
+% \globallet\font_preloads_reset_nullfont\relax}
+
+\def\font_preload_check_mode
+ {\doifelsemode{lmmath}
+ {\def\m_font_fallback_name{modern-designsize-virtual}}% this will stay
+ {\def\m_font_fallback_name{modern-designsize}}% % this might become 'modern'
+ \glet\font_preload_check_mode\relax}
\def\font_preload_default_fonts
{\font_preloads_reset
- \doifmodeelse{lmmath}
- {\setupbodyfont[modern-designsize-virtual,\fontstyle,\fontbody]}% this will stay
- {\setupbodyfont[modern-designsize,\fontstyle,\fontbody]}% % this might become 'modern'
- \showmessage\m!fonts6{fallback modern \fontstyle\normalspace\normalizedbodyfontsize}}
+ \font_preload_check_mode
+ \setupbodyfont[\m_font_fallback_name,\fontstyle,\fontbody]%
+ \showmessage\m!fonts6{fallback \m_font_fallback_name\space \fontstyle\normalspace\normalizedbodyfontsize}}
\def\font_preload_default_fonts_mm
- {\writestatus\m!fonts{preloading latin modern fonts (math)}%
- \definetypeface[\fontclass][\s!mm][\s!math][modern][\s!default]%
- \showmessage\m!fonts6{fallback modern mm \normalizedbodyfontsize}}
+ {\font_preload_check_mode
+ \writestatus\m!fonts{preloading \m_font_fallback_name\space (math)}%
+ \definetypeface[\fontclass][\s!mm][\s!math][\m_font_fallback_name][\s!default]%
+ \showmessage\m!fonts6{fallback \m_font_fallback_name\space mm \normalizedbodyfontsize}}
\def\font_preload_default_fonts_tt
- {\writestatus\m!fonts{preloading latin modern fonts (mono)}%
- \definetypeface[\fontclass][\s!tt][\s!mono][modern][\s!default]%
- \showmessage\m!fonts6{fallback modern tt \normalizedbodyfontsize}}
+ {\font_preload_check_mode
+ \writestatus\m!fonts{preloading \m_font_fallback_name\space (mono)}%
+ \definetypeface[\fontclass][\s!tt][\s!mono][\m_font_fallback_name][\s!default]%
+ \showmessage\m!fonts6{fallback \m_font_fallback_name\space tt \normalizedbodyfontsize}}
\def\font_preloads_reset
{\glet\font_preload_default_fonts \relax
@@ -72,7 +83,8 @@
\glet\fourthstagepreloadfonts \relax
\global\everyhbox\emptytoks
\global\everyvbox\emptytoks
- \font_preloads_reset_nullfont}
+ % old: \font_preloads_reset_nullfont, new:
+ \clf_resetnullfont}
\appendtoks
\font_preloads_reset
@@ -81,7 +93,9 @@
\def\font_preloads_reset_checked
{\glet\font_preload_default_fonts_tt\relax
\glet\font_preload_default_fonts_mm\relax
- \glet\font_preloads_third_stage \relax}
+ \glet\font_preloads_third_stage \relax
+% \glet\font_preloads_fourth_stage \relax
+ }
\def\font_preloads_zero_stage_indeed
{\definedfont[\s!file:lmmono10-regular sa 1]}
@@ -92,7 +106,7 @@
\font_preloads_zero_stage}
\unexpanded\def\font_preloads_first_stage % % *nofonts -> *preloadfonts
- {\doifmodeelse{*preloadfonts}
+ {\doifelsemode{*preloadfonts}
{\font_preload_default_fonts
\writestatus\m!fonts{preloading latin modern fonts (first stage)}}
{\writestatus\m!fonts{latin modern fonts are not preloaded}}}
@@ -123,7 +137,7 @@
\unexpanded\def\font_preloads_fourth_stage
{\begingroup
%ifzeropt\fontcharwd\font\number`!\relax
- \setbox\scratchbox\hbox{checking fonts}%
+ \setbox\scratchbox\hbox{c o n t e x t}%
\ifzeropt\wd\scratchbox
\writeline
\writestatus\m!fonts{!! No bodyfont has been defined and no defaults have been}%
diff --git a/tex/context/base/font-sol.lua b/tex/context/base/font-sol.lua
index 9ccfd0588..0761724f1 100644
--- a/tex/context/base/font-sol.lua
+++ b/tex/context/base/font-sol.lua
@@ -45,22 +45,46 @@ local v_preroll = variables.preroll
local v_random = variables.random
local v_split = variables.split
+local implement = interfaces.implement
+
local settings_to_array = utilities.parsers.settings_to_array
local settings_to_hash = utilities.parsers.settings_to_hash
-local find_node_tail = node.tail or node.slide
-local free_node = node.free
-local free_nodelist = node.flush_list
-local copy_nodelist = node.copy_list
-local traverse_nodes = node.traverse
-local traverse_ids = node.traverse_id
-local protect_glyphs = nodes.handlers.protectglyphs or node.protect_glyphs
-local hpack_nodes = node.hpack
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local repack_hlist = nodes.repackhlist
+local tasks = nodes.tasks
+
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+local getlist = nuts.getlist
+
+local find_node_tail = nuts.tail
+local free_node = nuts.free
+local free_nodelist = nuts.flush_list
+local copy_nodelist = nuts.copy_list
+local traverse_nodes = nuts.traverse
+local traverse_ids = nuts.traverse_id
+local hpack_nodes = nuts.hpack
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local protect_glyphs = nuts.protect_glyphs
+
+local repack_hlist = nuts.repackhlist
+
local nodes_to_utf = nodes.listtoutf
+----- protect_glyphs = nodes.handlers.protectglyphs
+
local setnodecolor = nodes.tracers.colors.set
local nodecodes = nodes.nodecodes
@@ -79,8 +103,7 @@ local localpar_code = whatsitcodes.localpar
local dir_code = whatsitcodes.dir
local userdefined_code = whatsitcodes.userdefined
-local nodepool = nodes.pool
-local tasks = nodes.tasks
+local nodepool = nuts.pool
local usernodeids = nodepool.userids
local new_textdir = nodepool.textdir
@@ -90,7 +113,7 @@ local new_leftskip = nodepool.leftskip
local starttiming = statistics.starttiming
local stoptiming = statistics.stoptiming
-local process_characters = nodes.handlers.characters
+----- process_characters = nodes.handlers.characters
local inject_kerns = nodes.injections.handler
local fonthashes = fonts.hashes
@@ -317,11 +340,12 @@ end)
function splitters.split(head)
-- quite fast
+ head = tonut(head)
local current, done, rlmode, start, stop, attribute = head, false, false, nil, nil, 0
cache, max_less, max_more = { }, 0, 0
local function flush() -- we can move this
- local font = start.font
- local last = stop.next
+ local font = getfont(start)
+ local last = getnext(stop)
local list = last and copy_nodelist(start,last) or copy_nodelist(start)
local n = #cache + 1
if encapsulate then
@@ -332,18 +356,18 @@ function splitters.split(head)
else
local current = start
while true do
- current[a_word] = n
+ setattr(current,a_word,n)
if current == stop then
break
else
- current = current.next
+ current = getnext(current)
end
end
end
if rlmode == "TRT" or rlmode == "+TRT" then
local dirnode = new_textdir("+TRT")
- list.prev = dirnode
- dirnode.next = list
+ setfield(list,"prev",dirnode)
+ setfield(dirnode,"next",list)
list = dirnode
end
local c = {
@@ -364,11 +388,11 @@ function splitters.split(head)
start, stop, done = nil, nil, true
end
while current do -- also nextid
- local next = current.next
- local id = current.id
+ local next = getnext(current)
+ local id = getid(current)
if id == glyph_code then
- if current.subtype < 256 then
- local a = current[a_split]
+ if getsubtype(current) < 256 then
+ local a = getattr(current,a_split)
if not a then
start, stop = nil, nil
elseif not start then
@@ -384,7 +408,7 @@ function splitters.split(head)
if start then
flush()
end
- elseif start and next and next.id == glyph_code and next.subtype < 256 then
+ elseif start and next and getid(next) == glyph_code and getsubtype(next) < 256 then
-- beware: we can cross future lines
stop = next
else
@@ -394,9 +418,9 @@ function splitters.split(head)
if start then
flush()
end
- local subtype = current.subtype
+ local subtype = getsubtype(current)
if subtype == dir_code or subtype == localpar_code then
- rlmode = current.dir
+ rlmode = getfield(current,"dir")
end
else
if start then
@@ -410,17 +434,17 @@ function splitters.split(head)
end
nofparagraphs = nofparagraphs + 1
nofwords = nofwords + #cache
- return head, done
+ return tonode(head), done
end
local function collect_words(list) -- can be made faster for attributes
local words, w, word = { }, 0, nil
if encapsulate then
for current in traverse_ids(whatsit_code,list) do
- if current.subtype == userdefined_code then -- hm
- local user_id = current.user_id
+ if getsubtype(current) == userdefined_code then -- hm
+ local user_id = getfield(current,"user_id")
if user_id == splitter_one then
- word = { current.value, current, current }
+ word = { getfield(current,"value"), current, current }
w = w + 1
words[w] = word
elseif user_id == splitter_two then
@@ -436,9 +460,9 @@ local function collect_words(list) -- can be made faster for attributes
local current, first, last, index = list, nil, nil, nil
while current do
-- todo: disc and kern
- local id = current.id
+ local id = getid(current)
if id == glyph_code or id == disc_code then
- local a = current[a_word]
+ local a = getattr(current,a_word)
if a then
if a == index then
-- same word
@@ -471,7 +495,7 @@ local function collect_words(list) -- can be made faster for attributes
report_splitters("skipped: %C",current.char)
end
end
- elseif id == kern_code and (current.subtype == fontkern_code or current[a_fontkern]) then
+ elseif id == kern_code and (getsubtype(current) == fontkern_code or getattr(current,a_fontkern)) then
if first then
last = current
else
@@ -483,13 +507,13 @@ local function collect_words(list) -- can be made faster for attributes
words[w] = { index, first, last }
index = nil
first = nil
- if id == disc_node then
+ if id == disc_code then
if trace_split then
report_splitters("skipped: disc node")
end
end
end
- current = current.next
+ current = getnext(current)
end
if index then
w = w + 1
@@ -520,8 +544,8 @@ local function doit(word,list,best,width,badness,line,set,listdir)
if found then
local h, t
if encapsulate then
- h = word[2].next -- head of current word
- t = word[3].prev -- tail of current word
+ h = getnext(word[2]) -- head of current word
+ t = getprev(word[3]) -- tail of current word
else
h = word[2]
t = word[3]
@@ -536,7 +560,7 @@ local function doit(word,list,best,width,badness,line,set,listdir)
ok = true
break
else
- c = c.next
+ c = getnext(c)
end
end
if not ok then
@@ -555,19 +579,20 @@ local function doit(word,list,best,width,badness,line,set,listdir)
local first = copy_nodelist(original)
if not trace_colors then
for n in traverse_nodes(first) do -- maybe fast force so no attr needed
- n[0] = featurenumber -- this forces dynamics
+ setattr(n,0,featurenumber) -- this forces dynamics
end
elseif set == "less" then
for n in traverse_nodes(first) do
setnodecolor(n,"font:isol") -- yellow
- n[0] = featurenumber
+ setattr(n,0,featurenumber)
end
else
for n in traverse_nodes(first) do
setnodecolor(n,"font:medi") -- green
- n[0] = featurenumber
+ setattr(n,0,featurenumber)
end
end
+first = tonode(first)
local font = found.font
local setdynamics = setfontdynamics[font]
if setdynamics then
@@ -579,20 +604,21 @@ local function doit(word,list,best,width,badness,line,set,listdir)
report_solutions("fatal error, no dynamics for font %a",font)
end
first = inject_kerns(first)
- if first.id == whatsit_code then
+first = tonut(first)
+ if getid(first) == whatsit_code then
local temp = first
- first = first.next
+ first = getnext(first)
free_node(temp)
end
local last = find_node_tail(first)
-- replace [u]h->t by [u]first->last
- local prev = h.prev
- local next = t.next
- prev.next = first
- first.prev = prev
+ local prev = getprev(h)
+ local next = getnext(t)
+ setfield(prev,"next",first)
+ setfield(first,"prev",prev)
if next then
- last.next = next
- next.prev = last
+ setfield(last,"next",next)
+ setfield(next,"prev",last)
end
-- check new pack
local temp, b = repack_hlist(list,width,'exactly',listdir)
@@ -601,22 +627,22 @@ local function doit(word,list,best,width,badness,line,set,listdir)
report_optimizers("line %a, badness before %a, after %a, criterium %a, verdict %a",line,badness,b,criterium,"quit")
end
-- remove last insert
- prev.next = h
- h.prev = prev
+ setfield(prev,"next",h)
+ setfield(h,"prev",prev)
if next then
- t.next = next
- next.prev = t
+ setfield(t,"next",next)
+ setfield(next,"prev",t)
else
- t.next = nil
+ setfield(t,"next",nil)
end
- last.next = nil
+ setfield(last,"next",nil)
free_nodelist(first)
else
if trace_optimize then
report_optimizers("line %a, badness before: %a, after %a, criterium %a, verdict %a",line,badness,b,criterium,"continue")
end
-- free old h->t
- t.next = nil
+ setfield(t,"next",nil)
free_nodelist(h) -- somhow fails
if not encapsulate then
word[2] = first
@@ -697,9 +723,9 @@ variants[v_random] = function(words,list,best,width,badness,line,set,listdir)
end
local function show_quality(current,what,line)
- local set = current.glue_set
- local sign = current.glue_sign
- local order = current.glue_order
+ local set = getfield(current,"glue_set")
+ local sign = getfield(current,"glue_sign")
+ local order = getfield(current,"glue_order")
local amount = set * ((sign == 2 and -1) or 1)
report_optimizers("line %a, category %a, amount %a, set %a, sign %a, how %a, order %a",line,what,amount,set,sign,how,order)
end
@@ -719,20 +745,25 @@ function splitters.optimize(head)
math.setrandomseedi(randomseed)
randomseed = nil
end
- local line = 0
- local tex_hbadness, tex_hfuzz = tex.hbadness, tex.hfuzz
- tex.hbadness, tex.hfuzz = 10000, number.maxdimen
+ local line = 0
+ local tex_hbadness = tex.hbadness
+ local tex_hfuzz = tex.hfuzz
+ tex.hbadness = 10000
+ tex.hfuzz = number.maxdimen
if trace_optimize then
report_optimizers("preroll %a, variant %a, criterium %a, cache size %a",preroll,variant,criterium,nc)
end
- for current in traverse_ids(hlist_code,head) do
- -- report_splitters("before: [%s] => %s",current.dir,nodes.tosequence(current.list,nil))
+ for current in traverse_ids(hlist_code,tonut(head)) do
line = line + 1
- local sign, dir, list, width = current.glue_sign, current.dir, current.list, current.width
- if not encapsulate and list.id == glyph_code then
+ local sign = getfield(current,"glue_sign")
+ local dir = getfield(current,"dir")
+ local width = getfield(current,"width")
+ local list = getlist(current)
+ if not encapsulate and getid(list) == glyph_code then
-- nasty .. we always assume a prev being there .. future luatex will always have a leftskip set
- -- current.list, list = insert_node_before(list,list,new_glue(0))
- current.list, list = insert_node_before(list,list,new_leftskip(0))
+ -- is this assignment ok ? .. needs checking
+ list = insert_node_before(list,list,new_leftskip(0)) -- new_glue(0)
+ setfield(current,"list",list)
end
local temp, badness = repack_hlist(list,width,'exactly',dir) -- it would be nice if the badness was stored in the node
if badness > 0 then
@@ -792,7 +823,7 @@ function splitters.optimize(head)
local words = collect_words(list)
for best=lastbest or 1,max do
local temp, done, changes, b = optimize(words,list,best,width,badness,line,set,dir)
- current.list = temp
+ setfield(current,"list",temp)
if trace_optimize then
report_optimizers("line %a, alternative %a, changes %a, badness %a",line,best,changes,b)
end
@@ -810,15 +841,16 @@ function splitters.optimize(head)
end
end
-- we pack inside the outer hpack and that way keep the original wd/ht/dp as bonus
- current.list = hpack_nodes(current.list,width,'exactly',listdir)
- -- report_splitters("after: [%s] => %s",temp.dir,nodes.tosequence(temp.list,nil))
+ local list = hpack_nodes(getlist(current),width,'exactly',listdir)
+ setfield(current,"list",list)
end
for i=1,nc do
local ci = cache[i]
free_nodelist(ci.original)
end
cache = { }
- tex.hbadness, tex.hfuzz = tex_hbadness, tex_hfuzz
+ tex.hbadness = tex_hbadness
+ tex.hfuzz = tex_hfuzz
stoptiming(splitters)
end
@@ -877,8 +909,52 @@ end
-- interface
-commands.definefontsolution = splitters.define
-commands.startfontsolution = splitters.start
-commands.stopfontsolution = splitters.stop
-commands.setfontsolution = splitters.set
-commands.resetfontsolution = splitters.reset
+implement {
+ name = "definefontsolution",
+ actions = splitters.define,
+ arguments = {
+ "string",
+ {
+ { "goodies" },
+ { "solution" },
+ { "less" },
+ { "more" },
+ }
+ }
+}
+
+implement {
+ name = "startfontsolution",
+ actions = splitters.start,
+ arguments = {
+ "string",
+ {
+ { "method" },
+ { "criterium" },
+ { "randomseed" },
+ }
+ }
+}
+
+implement {
+ name = "stopfontsolution",
+ actions = splitters.stop
+}
+
+implement {
+ name = "setfontsolution",
+ actions = splitters.set,
+ arguments = {
+ "string",
+ {
+ { "method" },
+ { "criterium" },
+ { "randomseed" },
+ }
+ }
+}
+
+implement {
+ name = "resetfontsolution",
+ actions = splitters.reset
+}
diff --git a/tex/context/base/font-sol.mkvi b/tex/context/base/font-sol.mkvi
index b40e37ced..d065b78ea 100644
--- a/tex/context/base/font-sol.mkvi
+++ b/tex/context/base/font-sol.mkvi
@@ -85,24 +85,30 @@
\let\setupfontsolutions\setupfontsolution
\appendtoks
- \ctxcommand{definefontsolution("\currentfontsolution",{ % these are frozen
- goodies = "\fontsolutionparameter\s!goodies",
- solution = "\fontsolutionparameter\c!solution",
- less = "\fontsolutionparameter\c!less",
- more = "\fontsolutionparameter\c!more",
- })}
+ \clf_definefontsolution
+ {\currentfontsolution}%
+ {% these are frozen
+ goodies {\fontsolutionparameter\s!goodies}%
+ solution {\fontsolutionparameter\c!solution}%
+ less {\fontsolutionparameter\c!less}%
+ more {\fontsolutionparameter\c!more}%
+ }%
+ \relax
\to \everydefinefontsolution
\unexpanded\def\setfontsolution[#solution]% just one
{\edef\currentfontsolution{#solution}%
- \ctxcommand{setfontsolution("\currentfontsolution",{
- method = "\fontsolutionparameter\c!method",
- criterium = "\fontsolutionparameter\c!criterium",
- % randomseed = "\fontsolutionparameter\c!random",
- })}}
+ \clf_setfontsolution
+ {\currentfontsolution}%
+ {%
+ method {\fontsolutionparameter\c!method}%
+ criterium {\fontsolutionparameter\c!criterium}%
+ % randomseed {\fontsolutionparameter\c!random}%
+ }%
+ \relax}
\unexpanded\def\resetfontsolution % resets all
- {\ctxcommand{resetfontsolution()}%
+ {\clf_resetfontsolution
\let\currentfontsolution\empty}
\unexpanded\def\startfontsolution % [#1]
@@ -111,7 +117,7 @@
\unexpanded\def\stopfontsolution
{\ifhmode\par\fi
- \ctxcommand{stopfontsolution()}%
+ \clf_stopfontsolution
\popmacro\currentfontsolution}
% We initialize this module at the \LUA\ end.
diff --git a/tex/context/base/font-sty.mkvi b/tex/context/base/font-sty.mkvi
index 03fa598c2..5924a3033 100644
--- a/tex/context/base/font-sty.mkvi
+++ b/tex/context/base/font-sty.mkvi
@@ -279,7 +279,7 @@
{\groupedcommand{\font_styles_use_defined{#name}}{}} % or {\font_styles_apply_grouped{#name}}
\setvalue{\??styleargument3}#specification%
- {\doifassignmentelse{#specification}\font_styles_assignment\font_styles_direct{#specification}}
+ {\doifelseassignment{#specification}\font_styles_assignment\font_styles_direct{#specification}}
\def\font_styles_assignment#specification{\groupedcommand{\font_styles_use_generic{#specification}}{}}
\def\font_styles_direct #specification{\groupedcommand{\definedfont[#specification]}{}}
@@ -311,7 +311,7 @@
{\font_styles_use_defined{#name}}
\setvalue{\??styleenvironment3}#specification%
- {\doifassignmentelse{#specification}\font_styles_start_assignment\font_styles_start_direct{#specification}}
+ {\doifelseassignment{#specification}\font_styles_start_assignment\font_styles_start_direct{#specification}}
\def\font_styles_start_assignment#specification{\font_styles_use_generic{#specification}}
\def\font_styles_start_direct #specification{\definedfont[#specification]\relax}
@@ -352,7 +352,7 @@
\def\font_styles_define_style_instance[#instance][#2][#3][#4]% [name] [rm|ss|tt|..] [sl|bf|...] [whatever]
{\iffirstargument
- \ifcsname#1\endcsname\else\font_styles_define_style_collection[#instance]\fi
+ \ifcsname#instance\endcsname\else\font_styles_define_style_collection[#instance]\fi
\fi
\iffourthargument
\setvalue{\??stylecollection#instance:#2:#3}{#4}%
diff --git a/tex/context/base/font-sym.mkvi b/tex/context/base/font-sym.mkvi
index c8ca49f74..a21bea0ba 100644
--- a/tex/context/base/font-sym.mkvi
+++ b/tex/context/base/font-sym.mkvi
@@ -110,12 +110,14 @@
%D The next macro can be used to make decisions based on the shape:
-\def\doifitalicelse#yes#nop%
+\def\doifelseitalic#yes#nop%
{\ifx\fontalternative\s!sl#yes\else
\ifx\fontalternative\s!it#yes\else
\ifx\fontalternative\s!bs#yes\else
\ifx\fontalternative\s!bi#yes\else#nop\fi\fi\fi\fi}
+\let\doifitalicelse\doifelseitalic
+
%D For an example of usage of the following command,
%D see \type {cont-log.tex}.
%D
@@ -167,15 +169,15 @@
\currentsymbolfont
\global\expandafter\let\csname\??symbolfont\askedsymbolfont\endcsname\lastrawfontcall}
-\unexpanded\def\getnamedglyphstyled#fontname#character{{\setstyledsymbolicfont{#fontname}\ctxcommand{fontchar("#character")}}}
-\unexpanded\def\getnamedglyphdirect#fontname#character{{\setdirectsymbolicfont{#fontname}\ctxcommand{fontchar("#character")}}}
-\unexpanded\def\getglyphstyled #fontname#character{{\setstyledsymbolicfont{#fontname}\doifnumberelse{#character}\char\donothing#character}}
-\unexpanded\def\getglyphdirect #fontname#character{{\setdirectsymbolicfont{#fontname}\doifnumberelse{#character}\char\donothing#character}}
+\unexpanded\def\getnamedglyphstyled#fontname#character{{\setstyledsymbolicfont{#fontname}\clf_fontchar{#character}}}
+\unexpanded\def\getnamedglyphdirect#fontname#character{{\setdirectsymbolicfont{#fontname}\clf_fontchar{#character}}}
+\unexpanded\def\getglyphstyled #fontname#character{{\setstyledsymbolicfont{#fontname}\doifelsenumber{#character}\char\donothing#character}}
+\unexpanded\def\getglyphdirect #fontname#character{{\setdirectsymbolicfont{#fontname}\doifelsenumber{#character}\char\donothing#character}}
% this one is wrong:
\unexpanded\def\getscaledglyph#scale#name#content%
- {{\setscaledstyledsymbolicfont\fontbody{#scale}{#name}\doifnumberelse{#content}\char\donothing#content}}
+ {{\setscaledstyledsymbolicfont\fontbody{#scale}{#name}\doifelsenumber{#content}\char\donothing#content}}
\let\getglyph \getglyphstyled % old
\let\getrawglyph \getglyphdirect % old
@@ -234,7 +236,7 @@
%D \macros{doiffontcharelse}
-\unexpanded\def\doiffontcharelse#specification#unicode% this could be a direct lua call
+\unexpanded\def\doifelsefontchar#specification#unicode% this could be a direct lua call
{\begingroup
\font_basics_define_font_without_parameters{thedefinedfont}{#specification}%
\iffontchar\font#unicode\relax
@@ -243,4 +245,6 @@
\endgroup\expandafter\secondoftwoarguments
\fi}
+\let\doiffontcharelse\doifelsefontchar
+
\protect \endinput
diff --git a/tex/context/base/font-syn.lua b/tex/context/base/font-syn.lua
index 5b50ac75f..bf46c8573 100644
--- a/tex/context/base/font-syn.lua
+++ b/tex/context/base/font-syn.lua
@@ -33,7 +33,14 @@ local exists = io.exists
local findfile = resolvers.findfile
local cleanpath = resolvers.cleanpath
-local resolveresolved = resolvers.resolve
+local resolveprefix = resolvers.resolve
+
+local fontloader = fontloader
+local font_to_table = fontloader.to_table
+local open_font = fontloader.open
+local get_font_info = fontloader.info
+local close_font = fontloader.close
+local font_fields = fontloader.fields
local settings_to_hash = utilities.parsers.settings_to_hash_tolerant
@@ -50,7 +57,7 @@ using a table that has keys filtered from the font related files.</p>
fonts = fonts or { } -- also used elsewhere
-local names = font.names or allocate { }
+local names = fonts.names or allocate { }
fonts.names = names
local filters = names.filters or { }
@@ -81,7 +88,33 @@ directives.register("fonts.usesystemfonts", function(v) usesystemfonts = toboole
local P, C, Cc, Cs = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Cs
--- what to do with 'thin'
+-- -- what to do with these -- --
+--
+-- thin -> thin
+--
+-- regu -> regular -> normal
+-- norm -> normal -> normal
+-- stan -> standard -> normal
+-- medi -> medium
+-- ultr -> ultra
+-- ligh -> light
+-- heav -> heavy
+-- blac -> black
+-- thin
+-- book
+-- verylight
+--
+-- buch -> book
+-- buchschrift -> book
+-- halb -> demi
+-- halbfett -> demi
+-- mitt -> medium
+-- mittel -> medium
+-- fett -> bold
+-- mage -> light
+-- mager -> light
+-- nord -> normal
+-- gras -> normal
local weights = Cs ( -- not extra
P("demibold")
@@ -90,6 +123,7 @@ local weights = Cs ( -- not extra
+ P("ultrabold")
+ P("extrabold")
+ P("ultralight")
+ + P("extralight")
+ P("bold")
+ P("demi")
+ P("semi")
@@ -103,6 +137,17 @@ local weights = Cs ( -- not extra
+ P("regular") / "normal"
)
+-- numeric_weights = {
+-- 200 = "extralight",
+-- 300 = "light",
+-- 400 = "book",
+-- 500 = "medium",
+-- 600 = "demi",
+-- 700 = "bold",
+-- 800 = "heavy",
+-- 900 = "black",
+-- }
+
local normalized_weights = sparse {
regular = "normal",
}
@@ -116,6 +161,7 @@ local styles = Cs (
+ P("roman") / "normal"
+ P("ital") / "italic" -- might be tricky
+ P("ita") / "italic" -- might be tricky
+--+ P("obli") / "oblique"
)
local normalized_styles = sparse {
@@ -129,6 +175,7 @@ local widths = Cs(
+ P("thin")
+ P("expanded")
+ P("cond") / "condensed"
+--+ P("expa") / "expanded"
+ P("normal")
+ P("book") / "normal"
)
@@ -258,22 +305,25 @@ end
but to keep the overview, we define them here.</p>
--ldx]]--
-filters.otf = fontloader.info
-filters.ttf = fontloader.info
-filters.ttc = fontloader.info
-filters.dfont = fontloader.info
+filters.otf = get_font_info
+filters.ttf = get_font_info
+filters.ttc = get_font_info
+filters.dfont = get_font_info
-- We had this as temporary solution because we needed a bit more info but in the
-- meantime it got an interesting side effect: currently luatex delays loading of e.g.
-- glyphs so here we first load and then discard which is a waste. In the past it did
-- free memory because a full load was done. One of these things that goes unnoticed.
--
--- function fontloader.fullinfo(...) -- check with taco what we get / could get
--- local ff = fontloader.open(...)
+-- missing: names, units_per_em, design_range_bottom, design_range_top, design_size,
+-- pfminfo, top_side_bearing
+
+-- local function get_full_info(...) -- check with taco what we get / could get
+-- local ff = open_font(...)
-- if ff then
--- local d = ff -- and fontloader.to_table(ff)
+-- local d = ff -- and font_to_table(ff)
-- d.glyphs, d.subfonts, d.gpos, d.gsub, d.lookups = nil, nil, nil, nil, nil
--- fontloader.close(ff)
+-- close_font(ff)
-- return d
-- else
-- return nil, "error in loading font"
@@ -283,11 +333,11 @@ filters.dfont = fontloader.info
-- Phillip suggested this faster variant but it's still a hack as fontloader.info should
-- return these keys/values (and maybe some more) but at least we close the loader which
-- might save some memory in the end.
---
--- function fontloader.fullinfo(name)
--- local ff = fontloader.open(name)
+
+-- local function get_full_info(name)
+-- local ff = open_font(name)
-- if ff then
--- local fields = table.tohash(fontloader.fields(ff),true)
+-- local fields = table.tohash(font_fields(ff),true) -- isn't that one stable
-- local d = {
-- names = fields.names and ff.names,
-- familyname = fields.familyname and ff.familyname,
@@ -301,33 +351,73 @@ filters.dfont = fontloader.info
-- design_size = fields.design_size and ff.design_size,
-- italicangle = fields.italicangle and ff.italicangle,
-- pfminfo = fields.pfminfo and ff.pfminfo,
+-- top_side_bearing = fields.top_side_bearing and ff.top_side_bearing,
-- }
--- table.setmetatableindex(d,function(t,k)
+-- setmetatableindex(d,function(t,k)
-- report_names("warning, trying to access field %a in font table of %a",k,name)
-- end)
--- fontloader.close(ff)
+-- close_font(ff)
-- return d
-- else
-- return nil, "error in loading font"
-- end
-- end
--- As we have lazy loading anyway, this one still is full and with less code than
--- the previous one.
+-- more efficient:
+
+local fields = nil
-function fontloader.fullinfo(...)
- local ff = fontloader.open(...)
+local function get_full_info(name)
+ local ff = open_font(name)
if ff then
- local d = { } -- ff is userdata so [1] or # fails on it
- table.setmetatableindex(d,ff)
+ if not fields then
+ fields = table.tohash(font_fields(ff),true)
+ end
+ -- unfortunately luatex aborts when a field is not available
+ local d = {
+ names = fields.names and ff.names,
+ familyname = fields.familyname and ff.familyname,
+ fullname = fields.fullname and ff.fullname,
+ fontname = fields.fontname and ff.fontname,
+ weight = fields.weight and ff.weight,
+ italicangle = fields.italicangle and ff.italicangle,
+ units_per_em = fields.units_per_em and ff.units_per_em,
+ design_range_bottom = fields.design_range_bottom and ff.design_range_bottom,
+ design_range_top = fields.design_range_top and ff.design_range_top,
+ design_size = fields.design_size and ff.design_size,
+ italicangle = fields.italicangle and ff.italicangle,
+ pfminfo = fields.pfminfo and ff.pfminfo,
+ top_side_bearing = fields.top_side_bearing and ff.top_side_bearing, -- not there
+ }
+ setmetatableindex(d,function(t,k)
+ report_names("warning, trying to access field %a in font table of %a",k,name)
+ end)
+ close_font(ff)
return d
else
return nil, "error in loading font"
end
end
-filters.otf = fontloader.fullinfo
-filters.ttf = fontloader.fullinfo
+-- As we have lazy loading anyway, this one still is full and with less code than
+-- the previous one. But this depends on the garbage collector to kick in and in the
+-- current version that somehow happens not that often (on my machine I end up with
+-- soem 3 GB extra before that happens).
+
+-- local function get_full_info(...)
+-- local ff = open_font(...)
+-- if ff then
+-- local d = { } -- ff is userdata so [1] or # fails on it
+-- setmetatableindex(d,ff)
+-- return d -- garbage collection will do the close_font(ff)
+-- else
+-- return nil, "error in loading font"
+-- end
+-- end
+
+fontloader.fullinfo = get_full_info
+filters .otf = get_full_info
+filters .ttf = get_full_info
function filters.afm(name)
-- we could parse the afm file as well, and then report an error but
@@ -345,7 +435,7 @@ function filters.afm(name)
if key and #key > 0 then
hash[lower(key)] = value
end
- if find(line,"StartCharMetrics") then
+ if find(line,"StartCharMetrics",1,true) then
break
end
end
@@ -357,7 +447,7 @@ function filters.afm(name)
end
function filters.pfb(name)
- return fontloader.info(name)
+ return get_font_info(name)
end
--[[ldx--
@@ -547,7 +637,7 @@ local function check_name(data,result,filename,modification,suffix,subfont)
fullname = fullname or fontname
familyname = familyname or fontname
-- we do these sparse
- local units = result.units_per_em or 1000
+ local units = result.units_per_em or 1000 -- can be zero too
local minsize = result.design_range_bottom or 0
local maxsize = result.design_range_top or 0
local designsize = result.design_size or 0
@@ -571,7 +661,7 @@ local function check_name(data,result,filename,modification,suffix,subfont)
style = style,
width = width,
variant = variant,
- units = units ~= 1000 and unit or nil,
+ units = units ~= 1000 and units or nil,
pfmwidth = pfmwidth ~= 0 and pfmwidth or nil,
pfmweight = pfmweight ~= 0 and pfmweight or nil,
angle = angle ~= 0 and angle or nil,
@@ -580,6 +670,9 @@ local function check_name(data,result,filename,modification,suffix,subfont)
designsize = designsize ~= 0 and designsize or nil,
modification = modification ~= 0 and modification or nil,
}
+-- inspect(filename)
+-- inspect(result)
+-- inspect(specifications[#specifications])
end
local function cleanupkeywords()
@@ -1011,15 +1104,15 @@ local function analyzefiles(olddata)
resolvers.dowithfilesintree(".*%." .. suffix .. "$", function(method,root,path,name)
if method == "file" or method == "tree" then
local completename = root .."/" .. path .. "/" .. name
- completename = resolveresolved(completename) -- no shortcut
+ completename = resolveprefix(completename) -- no shortcut
identify(completename,name,suffix,name)
return true
end
end, function(blobtype,blobpath,pattern)
- blobpath = resolveresolved(blobpath) -- no shortcut
+ blobpath = resolveprefix(blobpath) -- no shortcut
report_names("scanning path %a for %s files",blobpath,suffix)
end, function(blobtype,blobpath,pattern,total,checked,done)
- blobpath = resolveresolved(blobpath) -- no shortcut
+ blobpath = resolveprefix(blobpath) -- no shortcut
report_names("%s entries found, %s %s files checked, %s okay",total,checked,suffix,done)
end)
end
@@ -1747,7 +1840,7 @@ local lastlookups, lastpattern = { }, ""
-- local lookups = specifications
-- if name then
-- lookups = families[name]
--- elseif not find(pattern,"=") then
+-- elseif not find(pattern,"=",1,true) then
-- lookups = families[pattern]
-- end
-- if trace_names then
@@ -1756,7 +1849,7 @@ local lastlookups, lastpattern = { }, ""
-- if lookups then
-- for key, value in gmatch(pattern,"([^=,]+)=([^=,]+)") do
-- local t, n = { }, 0
--- if find(value,"*") then
+-- if find(value,"*",1,true) then
-- value = topattern(value)
-- for i=1,#lookups do
-- local s = lookups[i]
@@ -1789,7 +1882,7 @@ local lastlookups, lastpattern = { }, ""
local function look_them_up(lookups,specification)
for key, value in next, specification do
local t, n = { }, 0
- if find(value,"*") then
+ if find(value,"*",1,true) then
value = topattern(value)
for i=1,#lookups do
local s = lookups[i]
@@ -1852,7 +1945,7 @@ function names.lookup(pattern,name,reload) -- todo: find
lastpattern = false
lastlookups = lookups or { }
elseif lastpattern ~= pattern then
- local lookups = first_look(name or (not find(pattern,"=") and pattern),reload)
+ local lookups = first_look(name or (not find(pattern,"=",1,true) and pattern),reload)
if lookups then
if trace_names then
report_names("starting with %s lookups for %a",#lookups,pattern)
diff --git a/tex/context/base/font-tfm.lua b/tex/context/base/font-tfm.lua
index 827d70586..ab0378851 100644
--- a/tex/context/base/font-tfm.lua
+++ b/tex/context/base/font-tfm.lua
@@ -70,16 +70,17 @@ local function read_from_tfm(specification)
properties.fontname = tfmdata.fontname
properties.psname = tfmdata.psname
properties.filename = specification.filename
+ properties.format = fonts.formats.tfm -- better than nothing
parameters.size = size
- shared.rawdata = { }
- shared.features = features
- shared.processes = next(features) and tfm.setfeatures(tfmdata,features) or nil
--
tfmdata.properties = properties
tfmdata.resources = resources
tfmdata.parameters = parameters
tfmdata.shared = shared
--
+ shared.rawdata = { }
+ shared.features = features
+ shared.processes = next(features) and tfm.setfeatures(tfmdata,features) or nil
parameters.slant = parameters.slant or parameters[1] or 0
parameters.space = parameters.space or parameters[2] or 0
parameters.space_stretch = parameters.space_stretch or parameters[3] or 0
@@ -114,6 +115,11 @@ local function read_from_tfm(specification)
features.encoding = encoding
end
end
+ -- let's play safe:
+ properties.haskerns = true
+ properties.haslogatures = true
+ resources.unicodes = { }
+ resources.lookuptags = { }
--
return tfmdata
end
diff --git a/tex/context/base/font-tra.mkiv b/tex/context/base/font-tra.mkiv
index 45d8a7280..3d6811a64 100644
--- a/tex/context/base/font-tra.mkiv
+++ b/tex/context/base/font-tra.mkiv
@@ -95,7 +95,9 @@
%D \doiffontpresentelse{adam-lindsay-modern-serif}{YES}{NO}
%D \stoptyping
-\unexpanded\def\doiffontpresentelse#1{\ctxcommand{doifelse(fonts.names.exists("#1"))}}
+\unexpanded\def\doifelsefontpresent#1{\clf_doifelsefontpresent{#1}}
+
+\let\doiffontpresentelse\doifelsefontpresent
% experimental, maybe this becomes a module
@@ -114,11 +116,14 @@
{\ctxlua{nodes.tracers.steppers.glyphs(\number\otfcollector,#1)}%
\unhbox\otfcollector}
-\unexpanded\def\otfstepcharcommand#1#2#3% font char class
+\unexpanded\def\otfstepspace
{\removeunwantedspaces
- \hskip.5\emwidth \s!plus .125\emwidth\relax
- \doif{#3}{mark}{\underbar}{U+\hexnumber{#2}}:\ruledhbox{\ctxlua{nodes.tracers.fontchar(#1,#2)}}%
- \hskip.5\emwidth \s!plus .125\emwidth\relax}
+ \hskip.5\emwidth \s!plus .125\emwidth \s!minus .125\emwidth\relax}
+
+\unexpanded\def\otfstepcharcommand#1#2#3% font char class
+ {\otfstepspace
+ \doif{#3}{mark}{\underbar}{U+\hexnumber{#2}}:\ruledhbox{\ctxlua{nodes.tracers.fontchar(#1,#2)}}%
+ \otfstepspace}
\unexpanded\def\otfstepfontcommand#1#2#3% id font size
{\begingroup
@@ -142,7 +147,7 @@
{\ctxlua{nodes.tracers.steppers.font("otfstepfontcommand")}}
\unexpanded\def\showotfstepchars#1%
- {\ctxlua{nodes.tracers.steppers.codes(#1,"otfstepcharcommand")}}
+ {\ctxlua{nodes.tracers.steppers.codes(#1,"otfstepcharcommand","otfstepspace")}}
\unexpanded\def\showotfstepmessages#1%
{\ctxlua{nodes.tracers.steppers.messages(#1,"otfstepmessagecommand",true)}}
@@ -275,6 +280,7 @@
% \setupcolors[\c!state=\v!start]%
\setupalign[\v!verytolerant,\v!flushleft]%
\startotfsample
+ \nohyphens
\global\setbox\otfcompositionbox\hbox{\definedfont[#1]\relax\getvalue{\??otfcompositiondir#2}\relax#3}%
\stopotfsample
\endgroup}
diff --git a/tex/context/base/font-uni.mkiv b/tex/context/base/font-uni.mkiv
index 223d27606..91a488ff2 100644
--- a/tex/context/base/font-uni.mkiv
+++ b/tex/context/base/font-uni.mkiv
@@ -13,14 +13,14 @@
\writestatus{loading}{ConTeXt Font Macros / Unicode}
-%D In \MKIV\ we only provide the \type {\uchar} macro and
-%D implement it as just an \UTF\ converter. We expand it so
-%D best not use not use it for active characters.
+%D In \MKIV\ we only provide the \type {\uchar} macro and implement it as just
+%D an \UTF\ converter. We expand it so best not use not use it for active
+%D characters. In practice this command is probably never used anyway but we keep
+%D if for old styles.
\unprotect
-%def\uchar#1#2{\ctxcommand{uchar(,)}}
-\def\uchar#1#2{\cldcontext{utf.char(\number\numexpr#1*256+#2\relax)}}
+\def\uchar#1#2{\clf_uchar\numexpr#1\relax\numexpr#2\relax}
\let\uc\uchar
diff --git a/tex/context/base/font-var.mkvi b/tex/context/base/font-var.mkvi
index e50c2bad4..fb60b711c 100644
--- a/tex/context/base/font-var.mkvi
+++ b/tex/context/base/font-var.mkvi
@@ -50,4 +50,7 @@
\let\fontsize \defaultfontsize
\let\fontface \!!zerocount
+% we can use an indirect mapping for fontclasses (map string onto numbers) and indeed this
+% is somewhat more efficient but also makes the code messy ... maybe some day ...
+
\protect \endinput
diff --git a/tex/context/base/grph-epd.lua b/tex/context/base/grph-epd.lua
index 4f9d46097..17f04d593 100644
--- a/tex/context/base/grph-epd.lua
+++ b/tex/context/base/grph-epd.lua
@@ -22,4 +22,13 @@ function figures.mergegoodies(optionlist)
if all or options[variables.layer] then
codeinjections.mergeviewerlayers()
end
+ if all or options[variables.bookmark] then
+ codeinjections.mergebookmarks()
+ end
end
+
+interfaces.implement {
+ name = "figure_mergegoodies",
+ actions = figures.mergegoodies,
+ arguments = "string"
+}
diff --git a/tex/context/base/grph-epd.mkiv b/tex/context/base/grph-epd.mkiv
index 58526fd44..2df195589 100644
--- a/tex/context/base/grph-epd.mkiv
+++ b/tex/context/base/grph-epd.mkiv
@@ -17,12 +17,12 @@
\registerctxluafile{grph-epd}{1.001}
-\def\figurereference{\ctxcommand{figurestatus("reference")}} % might become private
+\def\figurereference{\clf_figurestatus{reference}{}} % might become private
\defineoverlay[system:graphics:epdf][\directsetup{system:graphics:epdf}]
\startsetups system:graphics:epdf
- \ctxlua{figures.mergegoodies("\externalfigureparameter\c!interaction")}%
+ \clf_figure_mergegoodies{\externalfigureparameter\c!interaction}%
\reference[\figurereference]{}% todo: dest area
\stopsetups
@@ -32,7 +32,7 @@
\c!offset=\v!overlay,
\c!background={\v!foreground,system:graphics:epdf}]
-\def\grph_epdf_add_overlay
+\unexpanded\def\grph_epdf_add_overlay
{\global\setbox\foundexternalfigure\vbox\bgroup
\system_graphics_epdf{\box\foundexternalfigure}%
\egroup}
diff --git a/tex/context/base/grph-fig.mkiv b/tex/context/base/grph-fig.mkiv
index 9b9333fa9..f5152874d 100644
--- a/tex/context/base/grph-fig.mkiv
+++ b/tex/context/base/grph-fig.mkiv
@@ -45,7 +45,7 @@
{\ifsecondargument
\grph_buffers_typeset_indeed[#1][#2]%
\else\iffirstargument
- \doifassignmentelse{#1}
+ \doifelseassignment{#1}
{\grph_buffers_typeset_indeed[\jobname][#1]}%
{\grph_buffers_typeset_indeed[#1][]}%
\else
@@ -53,7 +53,7 @@
\fi\fi}
\def\grph_buffers_typeset_indeed[#1][#2]% we could use the via files
- {\doifnot{#1}{*}{\xdef\lasttypesetbuffer{\ctxcommand{runbuffer("#1",true)}}}%
+ {\doifnot{#1}{*}{\xdef\lasttypesetbuffer{\clf_runbuffer{#1}}}%
\ifcase\c_grph_buffers_mode
% typesetonly
\or
@@ -94,7 +94,7 @@
\def\grph_typesetting_process_indeed#1#2#3#4% options settings-a filename settings-b
{\begingroup
- \edef\m_typesetting_name{\cldcontext{job.files.context("#3","#1")}}%
+ \edef\m_typesetting_name{\clf_runcontextjob{#3}{#1}}%
\ifx\m_typesetting_name\empty \else
\expanded{\externalfigure[\m_typesetting_name]}[#2,#4]%
\fi
diff --git a/tex/context/base/grph-fil.lua b/tex/context/base/grph-fil.lua
index 3449f1779..c1532ce25 100644
--- a/tex/context/base/grph-fil.lua
+++ b/tex/context/base/grph-fil.lua
@@ -69,3 +69,9 @@ function jobfiles.context(name,options)
return file.replacesuffix(name,"pdf")
end
end
+
+interfaces.implement {
+ name = "runcontextjob",
+ arguments = { "string", "string" },
+ actions = { jobfiles.context, context }
+}
diff --git a/tex/context/base/grph-inc.lua b/tex/context/base/grph-inc.lua
index 392aa58b1..8a064c8e9 100644
--- a/tex/context/base/grph-inc.lua
+++ b/tex/context/base/grph-inc.lua
@@ -6,6 +6,7 @@ if not modules then modules = { } end modules ['grph-inc'] = {
license = "see context related readme files"
}
+-- todo: files are sometimes located twice
-- todo: empty filename or only suffix always false (not found)
-- lowercase types
-- mps tex tmp svg
@@ -37,6 +38,8 @@ The TeX-Lua mix is suboptimal. This has to do with the fact that we cannot
run TeX code from within Lua. Some more functionality will move to Lua.
]]--
+-- todo: store loaded pages per pdf file someplace
+
local format, lower, find, match, gsub, gmatch = string.format, string.lower, string.find, string.match, string.gsub, string.gmatch
local contains = table.contains
local concat, insert, remove = table.concat, table.insert, table.remove
@@ -46,7 +49,7 @@ local formatters = string.formatters
local longtostring = string.longtostring
local expandfilename = dir.expandname
-local P, lpegmatch = lpeg.P, lpeg.match
+local P, R, S, Cc, C, Cs, Ct, lpegmatch = lpeg.P, lpeg.R, lpeg.S, lpeg.Cc, lpeg.C, lpeg.Cs, lpeg.Ct, lpeg.match
local settings_to_array = utilities.parsers.settings_to_array
local settings_to_hash = utilities.parsers.settings_to_hash
@@ -56,24 +59,37 @@ local replacetemplate = utilities.templates.replace
local images = img
+local hasscheme = url.hasscheme
+local urlhashed = url.hashed
+
+local resolveprefix = resolvers.resolve
+
local texgetbox = tex.getbox
local texsetbox = tex.setbox
local hpack = node.hpack
+local new_latelua = nodes.pool.latelua
+
local context = context
+local implement = interfaces.implement
local variables = interfaces.variables
+
local codeinjections = backends.codeinjections
local nodeinjections = backends.nodeinjections
-local trace_figures = false trackers.register("graphics.locating", function(v) trace_figures = v end)
-local trace_bases = false trackers.register("graphics.bases", function(v) trace_bases = v end)
-local trace_programs = false trackers.register("graphics.programs", function(v) trace_programs = v end)
-local trace_conversion = false trackers.register("graphics.conversion", function(v) trace_conversion = v end)
-local trace_inclusion = false trackers.register("graphics.inclusion", function(v) trace_inclusion = v end)
+local trace_figures = false trackers.register ("graphics.locating", function(v) trace_figures = v end)
+local trace_bases = false trackers.register ("graphics.bases", function(v) trace_bases = v end)
+local trace_programs = false trackers.register ("graphics.programs", function(v) trace_programs = v end)
+local trace_conversion = false trackers.register ("graphics.conversion", function(v) trace_conversion = v end)
+local trace_inclusion = false trackers.register ("graphics.inclusion", function(v) trace_inclusion = v end)
+
+local extra_check = false directives.register("graphics.extracheck", function(v) extra_check = v end)
local report_inclusion = logs.reporter("graphics","inclusion")
+local report_figures = logs.reporter("system","graphics")
+local report_figure = logs.reporter("used graphic")
local f_hash_part = formatters["%s->%s->%s"]
local f_hash_full = formatters["%s->%s->%s->%s->%s->%s->%s"]
@@ -85,23 +101,41 @@ local v_high = variables.high
local v_global = variables["global"]
local v_local = variables["local"]
local v_default = variables.default
+local v_auto = variables.auto
local maxdimen = 2^30-1
function images.check(figure)
if figure then
- local width = figure.width
+ local width = figure.width
local height = figure.height
+ if width <= 0 or height <= 0 then
+ report_inclusion("image %a has bad dimensions (%p,%p), discarding",
+ figure.filename,width,height)
+ return false, "bad dimensions"
+ end
+ local xres = figure.xres
+ local yres = figure.yres
+ local changes = false
if height > width then
if height > maxdimen then
figure.height = maxdimen
figure.width = width * maxdimen/height
- report_inclusion("limiting natural dimensions of %a (%s)",figure.filename,"height")
+ changed = true
end
elseif width > maxdimen then
figure.width = maxdimen
figure.height = height * maxdimen/width
- report_inclusion("limiting natural dimensions of %a (%s)",figure.filename,"width")
+ changed = true
+ end
+ if changed then
+ report_inclusion("limiting natural dimensions of %a, old %p * %p, new %p * %p",
+ figure.filename,width,height,figure.width,figure.height)
+ end
+ if width >=maxdimen or height >= maxdimen then
+ report_inclusion("image %a is too large (%p,%p), discarding",
+ figure.filename,width,height)
+ return false, "dimensions too large"
end
return figure
end
@@ -160,6 +194,7 @@ end
figures = figures or { }
local figures = figures
+figures.images = images
figures.boxnumber = figures.boxnumber or 0
figures.defaultsearch = true
figures.defaultwidth = 0
@@ -228,10 +263,74 @@ local figures_magics = allocate {
{ format = "pdf", pattern = (1 - P("%PDF"))^0 * P("%PDF") },
}
+local figures_native = allocate {
+ pdf = true,
+ jpg = true,
+ jp2 = true,
+ png = true,
+}
+
figures.formats = figures_formats -- frozen
figures.magics = figures_magics -- frozen
figures.order = figures_order -- frozen
+-- name checker
+
+local okay = P("m_k_i_v_")
+
+local pattern = (R("az","AZ") * P(":"))^-1 * ( -- a-z : | A-Z :
+ (okay + R("az","09") + S("_/") - P("_")^2)^1 * (P(".") * R("az")^1)^0 * P(-1) + -- a-z | single _ | /
+ (okay + R("az","09") + S("-/") - P("-")^2)^1 * (P(".") * R("az")^1)^0 * P(-1) + -- a-z | single - | /
+ (okay + R("AZ","09") + S("_/") - P("_")^2)^1 * (P(".") * R("AZ")^1)^0 * P(-1) + -- A-Z | single _ | /
+ (okay + R("AZ","09") + S("-/") - P("-")^2)^1 * (P(".") * R("AZ")^1)^0 * P(-1) -- A-Z | single - | /
+) * Cc(false) + Cc(true)
+
+function figures.badname(name)
+ if not name then
+ -- bad anyway
+ elseif not hasscheme(name) then
+ return lpegmatch(pattern,name)
+ else
+ return lpegmatch(pattern,file.basename(name))
+ end
+end
+
+local trace_names = false
+
+trackers.register("graphics.lognames", function(v)
+ if v and not trace_names then
+ luatex.registerstopactions(function()
+ if figures.nofprocessed > 0 then
+ local report_newline = logs.newline
+ logs.pushtarget("logfile")
+ report_newline()
+ report_figures("start names")
+ for _, data in table.sortedhash(figures_found) do
+ report_newline()
+ report_figure("asked : %s",data.askedname)
+ if data.found then
+ report_figure("format : %s",data.format)
+ report_figure("found : %s",data.foundname)
+ report_figure("used : %s",data.fullname)
+ if data.badname then
+ report_figure("comment : %s","bad name")
+ elseif data.comment then
+ report_figure("comment : %s",data.comment)
+ end
+ else
+ report_figure("comment : %s","not found")
+ end
+ end
+ report_newline()
+ report_figures("stop names")
+ report_newline()
+ logs.poptarget()
+ end
+ end)
+ trace_names = true
+ end
+end)
+
-- We can set the order but only indirectly so that we can check for support.
function figures.setorder(list) -- can be table or string
@@ -330,6 +429,9 @@ end
function figures.registersuffix (suffix, target) register('list', target,suffix ) end
function figures.registerpattern(pattern,target) register('pattern',target,pattern) end
+implement { name = "registerfiguresuffix", actions = register, arguments = { "'list'", "string", "string" } }
+implement { name = "registerfigurepattern", actions = register, arguments = { "'pattern'", "string", "string" } }
+
local last_locationset = last_locationset or nil
local last_pathlist = last_pathlist or nil
@@ -367,6 +469,8 @@ function figures.setpaths(locationset,pathlist)
end
end
+implement { name = "setfigurepaths", actions = figures.setpaths, arguments = { "string", "string" } }
+
-- check conversions and handle it here
function figures.hash(data)
@@ -452,14 +556,15 @@ end
function figures.push(request)
statistics.starttiming(figures)
- local figuredata = figures.initialize(request)
+ local figuredata = figures.initialize(request) -- we could use table.sparse but we set them later anyway
insert(callstack,figuredata)
lastfiguredata = figuredata
return figuredata
end
function figures.pop()
- lastfiguredata = remove(callstack) or lastfiguredata
+ remove(callstack)
+ lastfiguredata = callstack[#callstack] or lastfiguredata
statistics.stoptiming(figures)
end
@@ -479,17 +584,13 @@ end
figures.get = get
-function commands.figurevariable(category,tag,default)
- context(get(category,tag,default))
-end
+implement { name = "figurestatus", actions = { get, context }, arguments = { "'status'", "string", "string" } }
+implement { name = "figurerequest", actions = { get, context }, arguments = { "'request'", "string", "string" } }
+implement { name = "figureused", actions = { get, context }, arguments = { "'used'", "string", "string" } }
-function commands.figurestatus (tag,default) context(get("status", tag,default)) end
-function commands.figurerequest(tag,default) context(get("request",tag,default)) end
-function commands.figureused (tag,default) context(get("used", tag,default)) end
-
-function commands.figurefilepath() context(file.dirname (get("used","fullname"))) end
-function commands.figurefilename() context(file.nameonly(get("used","fullname"))) end
-function commands.figurefiletype() context(file.extname (get("used","fullname"))) end
+implement { name = "figurefilepath", actions = { get, file.dirname, context }, arguments = { "'used'", "'fullname'" } }
+implement { name = "figurefilename", actions = { get, file.nameonly, context }, arguments = { "'used'", "'fullname'" } }
+implement { name = "figurefiletype", actions = { get, file.extname, context }, arguments = { "'used'", "'fullname'" } }
-- todo: local path or cache path
@@ -510,12 +611,31 @@ local function forbiddenname(filename)
end
end
+local function rejected(specification)
+ if extra_check then
+ local fullname = specification.fullname
+ if fullname and figures_native[file.suffix(fullname)] and not figures.guess(fullname) then
+ specification.comment = "probably a bad file"
+ specification.found = false
+ specification.error = true
+ report_inclusion("file %a looks bad",fullname)
+ return true
+ end
+ end
+end
+
local function register(askedname,specification)
if not specification then
- specification = { }
+ specification = { askedname = askedname, comment = "invalid specification" }
elseif forbiddenname(specification.fullname) then
- specification = { }
- else
+ specification = { askedname = askedname, comment = "forbidden name" }
+ elseif specification.internal then
+ -- no filecheck needed
+ specification.found = true
+ if trace_figures then
+ report_inclusion("format %a internally supported by engine",specification.format)
+ end
+ elseif not rejected(specification) then
local format = specification.format
if format then
local conversion = specification.conversion
@@ -552,6 +672,9 @@ local function register(askedname,specification)
report_inclusion("no converter for %a to %a",format,newformat)
end
if converter then
+ --
+ -- todo: outline as helper function
+ --
local oldname = specification.fullname
local newpath = file.dirname(oldname)
local oldbase = file.basename(oldname)
@@ -567,20 +690,23 @@ local function register(askedname,specification)
--
local fc = specification.cache or figures.cachepaths.path
if fc and fc ~= "" and fc ~= "." then
- newpath = fc
+ newpath = gsub(fc,"%*",newpath) -- so cachedir can be "/data/cache/*"
else
newbase = defaultprefix .. newbase
end
- if not file.is_writable(newpath) then
- if trace_conversion then
- report_inclusion("path %a is not writable, forcing conversion path %a",newpath,".")
- end
- newpath = "."
- end
local subpath = specification.subpath or figures.cachepaths.subpath
if subpath and subpath ~= "" and subpath ~= "." then
newpath = newpath .. "/" .. subpath
end
+ if not lfs.isdir(newpath) then
+ dir.makedirs(newpath)
+ if not file.is_writable(newpath) then
+ if trace_conversion then
+ report_inclusion("path %a is not writable, forcing conversion path %a",newpath,".")
+ end
+ newpath = "."
+ end
+ end
local prefix = specification.prefix or figures.cachepaths.prefix
if prefix and prefix ~= "" then
newbase = prefix .. newbase
@@ -602,7 +728,6 @@ local function register(askedname,specification)
local newbase = newbase .. "." .. newformat
--
local newname = file.join(newpath,newbase)
- dir.makedirs(newpath)
oldname = collapsepath(oldname)
newname = collapsepath(newname)
local oldtime = lfs.attributes(oldname,'modification') or 0
@@ -626,7 +751,7 @@ local function register(askedname,specification)
format = newformat
if not figures_suffixes[format] then
-- maybe the new format is lowres.png (saves entry in suffixes)
- -- so let's do thsi extra check
+ -- so let's do this extra check
local suffix = file.suffix(newformat)
if figures_suffixes[suffix] then
if trace_figures then
@@ -636,29 +761,44 @@ local function register(askedname,specification)
end
end
elseif io.exists(oldname) then
- specification.fullname = oldname -- was newname
+ report_inclusion("file %a is bugged",oldname)
+ if format and validtypes[format] then
+ specification.fullname = oldname
+ end
specification.converted = false
+ specification.bugged = true
end
end
end
- local found = figures_suffixes[format] -- validtypes[format]
- if not found then
- specification.found = false
- if trace_figures then
- report_inclusion("format %a is not supported",format)
- end
- else
- specification.found = true
- if trace_figures then
- if validtypes[format] then -- format?
+ if format then
+ local found = figures_suffixes[format] -- validtypes[format]
+ if not found then
+ specification.found = false
+ if trace_figures then
+ report_inclusion("format %a is not supported",format)
+ end
+ elseif validtypes[format] then
+ specification.found = true
+ if trace_figures then
report_inclusion("format %a natively supported by backend",format)
- else
+ end
+ else
+ specification.found = true -- else no foo.1 mps conversion
+ if trace_figures then
report_inclusion("format %a supported by output file format",format)
end
end
+ else
+ specification.askedname = askedname
+ specification.found = false
end
end
- specification.foundname = specification.foundname or specification.fullname
+ if specification.found then
+ specification.foundname = specification.foundname or specification.fullname
+ else
+ specification.foundname = nil
+ end
+ specification.badname = figures.badname(askedname)
local askedhash = f_hash_part(askedname,specification.conversion or "default",specification.resolution or "default")
figures_found[askedhash] = specification
return specification
@@ -667,13 +807,16 @@ end
local resolve_too = false -- true
local internalschemes = {
- file = true,
+ file = true,
+ tree = true,
+ dirfile = true,
+ dirtree = true,
}
local function locate(request) -- name, format, cache
-- not resolvers.cleanpath(request.name) as it fails on a!b.pdf and b~c.pdf
-- todo: more restricted cleanpath
- local askedname = request.name
+ local askedname = request.name or ""
local askedhash = f_hash_part(askedname,request.conversion or "default",request.resolution or "default")
local foundname = figures_found[askedhash]
if foundname then
@@ -684,11 +827,18 @@ local function locate(request) -- name, format, cache
local askedconversion = request.conversion
local askedresolution = request.resolution
--
- if request.format == "" or request.format == "unknown" then
- request.format = nil
+ local askedformat = request.format
+ if not askedformat or askedformat == "" or askedformat == "unknown" then
+ askedformat = file.suffix(askedname) or ""
+ elseif askedformat == v_auto then
+ if trace_figures then
+ report_inclusion("ignoring suffix of %a",askedname)
+ end
+ askedformat = ""
+ askedname = file.removesuffix(askedname)
end
-- protocol check
- local hashed = url.hashed(askedname)
+ local hashed = urlhashed(askedname)
if not hashed then
-- go on
elseif internalschemes[hashed.scheme] then
@@ -697,6 +847,7 @@ local function locate(request) -- name, format, cache
askedname = path
end
else
+ -- local fname = methodhandler('finders',pathname .. "/" .. wantedfiles[k])
local foundname = resolvers.findbinfile(askedname)
if not foundname or not lfs.isfile(foundname) then -- foundname can be dummy
if trace_figures then
@@ -705,7 +856,6 @@ local function locate(request) -- name, format, cache
-- url not found
return register(askedname)
end
- local askedformat = request.format or file.suffix(askedname) or ""
local guessedformat = figures.guess(foundname)
if askedformat ~= guessedformat then
if trace_figures then
@@ -728,9 +878,8 @@ local function locate(request) -- name, format, cache
end
end
-- we could use the hashed data instead
- local askedpath= file.is_rootbased_path(askedname)
+ local askedpath = file.is_rootbased_path(askedname)
local askedbase = file.basename(askedname)
- local askedformat = request.format or file.suffix(askedname) or ""
if askedformat ~= "" then
askedformat = lower(askedformat)
if trace_figures then
@@ -750,7 +899,7 @@ local function locate(request) -- name, format, cache
end
end
if format then
- local foundname, quitscanning, forcedformat = figures.exists(askedname,format,resolve_too) -- not askedformat
+ local foundname, quitscanning, forcedformat, internal = figures.exists(askedname,format,resolve_too) -- not askedformat
if foundname then
return register(askedname, {
askedname = askedname,
@@ -760,6 +909,7 @@ local function locate(request) -- name, format, cache
-- foundname = foundname, -- no
conversion = askedconversion,
resolution = askedresolution,
+ internal = internal,
})
elseif quitscanning then
return register(askedname)
@@ -784,7 +934,7 @@ local function locate(request) -- name, format, cache
else
-- type given
for i=1,#figure_paths do
- local path = figure_paths[i]
+ local path = resolveprefix(figure_paths[i]) -- we resolve (e.g. jobfile:)
local check = path .. "/" .. askedname
-- we pass 'true' as it can be an url as well, as the type
-- is given we don't waste much time
@@ -792,7 +942,7 @@ local function locate(request) -- name, format, cache
if foundname then
return register(check, {
askedname = askedname,
- fullname = check,
+ fullname = foundname, -- check,
format = askedformat,
cache = askedcache,
conversion = askedconversion,
@@ -850,9 +1000,9 @@ local function locate(request) -- name, format, cache
-- local name = file.replacesuffix(askedbase,suffix)
local name = file.replacesuffix(askedname,suffix)
for i=1,#figure_paths do
- local path = figure_paths[i]
+ local path = resolveprefix(figure_paths[i]) -- we resolve (e.g. jobfile:)
local check = path .. "/" .. name
- local isfile = url.hashed(check).scheme == "file"
+ local isfile = internalschemes[urlhashed(check).scheme]
if not isfile then
if trace_figures then
report_inclusion("warning: skipping path %a",path)
@@ -878,7 +1028,7 @@ local function locate(request) -- name, format, cache
report_inclusion("unknown format, using path strategy")
end
for i=1,#figure_paths do
- local path = figure_paths[i]
+ local path = resolveprefix(figure_paths[i]) -- we resolve (e.g. jobfile:)
for j=1,#figures_order do
local format = figures_order[j]
local list = figures_formats[format].list or { format }
@@ -942,7 +1092,7 @@ function identifiers.default(data)
du.fullname = fullname -- can be cached
ds.fullname = foundname -- original
ds.format = l.format
- ds.status = (l.found and 10) or 0
+ ds.status = (l.bugged and 0) or (l.found and 10) or 0
end
return data
end
@@ -952,8 +1102,8 @@ function figures.identify(data)
local list = identifiers.list -- defined at the end
for i=1,#list do
local identifier = list[i]
- data = identifier(data)
- if data.status.status > 0 then
+ local data = identifier(data)
+ if data and (not data.status and data.status.status > 0) then
break
end
end
@@ -969,12 +1119,57 @@ function figures.check(data)
return (checkers[data.status.format] or checkers.generic)(data)
end
+local trace_usage = false
+local used_images = { }
+
+trackers.register("graphics.usage", function(v)
+ if v and not trace_usage then
+ luatex.registerstopactions(function()
+ local found = { }
+ for _, t in table.sortedhash(figures_found) do
+ found[#found+1] = t
+ for k, v in next, t do
+ if v == false or v == "" then
+ t[k] = nil
+ end
+ end
+ end
+ for i=1,#used_images do
+ local u = used_images[i]
+ local s = u.status
+ if s then
+ s.status = nil -- doesn't say much here
+ if s.error then
+ u.used = { } -- better show that it's not used
+ end
+ end
+ for _, t in next, u do
+ for k, v in next, t do
+ if v == false or v == "" then
+ t[k] = nil
+ end
+ end
+ end
+ end
+ table.save(file.nameonly(environment.jobname) .. "-figures-usage.lua",{
+ found = found,
+ used = used_images,
+ } )
+ end)
+ trace_usage = true
+ end
+end)
+
function figures.include(data)
data = data or callstack[#callstack] or lastfiguredata
+ if trace_usage then
+ used_images[#used_images+1] = data
+ end
return (includers[data.status.format] or includers.generic)(data)
end
function figures.scale(data) -- will become lua code
+ data = data or callstack[#callstack] or lastfiguredata
context.doscalefigure()
return data
end
@@ -1011,11 +1206,15 @@ end
function existers.generic(askedname,resolve)
-- not findbinfile
local result
- if lfs.isfile(askedname) then
+ if hasscheme(askedname) then
+ result = resolvers.findbinfile(askedname)
+ elseif lfs.isfile(askedname) then
result = askedname
elseif resolve then
- result = resolvers.findbinfile(askedname) or ""
- if result == "" then result = false end
+ result = resolvers.findbinfile(askedname)
+ end
+ if not result or result == "" then
+ result = false
end
if trace_figures then
if result then
@@ -1029,11 +1228,11 @@ end
function checkers.generic(data)
local dr, du, ds = data.request, data.used, data.status
- local name = du.fullname or "unknown generic"
- local page = du.page or dr.page
- local size = dr.size or "crop"
+ local name = du.fullname or "unknown generic"
+ local page = du.page or dr.page
+ local size = dr.size or "crop"
local color = dr.color or "natural"
- local mask = dr.mask or "none"
+ local mask = dr.mask or "none"
local conversion = dr.conversion
local resolution = dr.resolution
if not conversion or conversion == "" then
@@ -1053,9 +1252,18 @@ function checkers.generic(data)
}
codeinjections.setfigurecolorspace(data,figure)
codeinjections.setfiguremask(data,figure)
- figure = figure and images.check(images.scan(figure)) or false
+ if figure then
+ local f, comment = images.check(images.scan(figure))
+ if not f then
+ ds.comment = comment
+ ds.found = false
+ ds.error = true
+ end
+ figure = f
+ end
local f, d = codeinjections.setfigurealternative(data,figure)
- figure, data = f or figure, d or data
+ figure = f or figure
+ data = d or data
figures_loaded[hash] = figure
if trace_conversion then
report_inclusion("new graphic, using hash %a",hash)
@@ -1081,12 +1289,19 @@ function checkers.generic(data)
return data
end
+local nofimages = 0
+local pofimages = { }
+
+function figures.getrealpage(index)
+ return pofimages[index] or 0
+end
+
function includers.generic(data)
local dr, du, ds = data.request, data.used, data.status
-- here we set the 'natural dimensions'
- dr.width = du.width
- dr.height = du.height
- local hash = figures.hash(data)
+ dr.width = du.width
+ dr.height = du.height
+ local hash = figures.hash(data)
local figure = figures_used[hash]
-- figures.registerresource {
-- filename = du.fullname,
@@ -1102,9 +1317,17 @@ function includers.generic(data)
figures_used[hash] = figure
end
if figure then
- local nr = figures.boxnumber
- -- it looks like we have a leak in attributes here .. todo
- local box = hpack(images.node(figure)) -- images.node(figure) not longer valid
+ local nr = figures.boxnumber
+ nofimages = nofimages + 1
+ ds.pageindex = nofimages
+ local image = images.node(figure)
+ local pager = new_latelua(function()
+ pofimages[nofimages] = pofimages[nofimages] or tex.count.realpageno -- so when reused we register the first one only
+ end)
+ image.next = pager
+ pager.prev = image
+ local box = hpack(image) -- images.node(figure) not longer valid
+
indexed[figure.index] = figure
box.width, box.height, box.depth = figure.width, figure.height, 0 -- new, hm, tricky, we need to do that in tex (yet)
texsetbox(nr,box)
@@ -1178,6 +1401,8 @@ includers.mov = includers.nongeneric
internalschemes.mprun = true
+-- mprun.foo.1 mprun.6 mprun:foo.2
+
local function internal(askedname)
local spec, mprun, mpnum = match(lower(askedname),"mprun([:%.]?)(.-)%.(%d+)")
if spec ~= "" then
@@ -1190,7 +1415,7 @@ end
function existers.mps(askedname)
local mprun, mpnum = internal(askedname)
if mpnum then
- return askedname
+ return askedname, true, "mps", true
else
return existers.generic(askedname)
end
@@ -1211,7 +1436,7 @@ includers.mps = includers.nongeneric
function existers.tex(askedname)
askedname = resolvers.findfile(askedname)
- return askedname ~= "" and askedname or false
+ return askedname ~= "" and askedname or false, true, "tex", true
end
function checkers.tex(data)
@@ -1225,7 +1450,7 @@ includers.tex = includers.nongeneric
function existers.buffer(askedname)
local name = file.nameonly(askedname)
local okay = buffers.exists(name)
- return okay and name, true -- always quit scanning
+ return okay and name, true, "buffer", true -- always quit scanning
end
function checkers.buffer(data)
@@ -1252,7 +1477,10 @@ includers.auto = includers.generic
-- -- -- cld -- -- --
-existers.cld = existers.tex
+function existers.cld(askedname)
+ askedname = resolvers.findfile(askedname)
+ return askedname ~= "" and askedname or false, true, "cld", true
+end
function checkers.cld(data)
return checkers_nongeneric(data,function() context.docheckfigurecld(data.used.fullname) end)
@@ -1270,20 +1498,38 @@ end
-- programs.makeoptions = makeoptions
local function runprogram(binary,argument,variables)
- local binary = match(binary,"[%S]+") -- to be sure
+ -- move this check to the runner code
+ local found = nil
+ if type(binary) == "table" then
+ for i=1,#binary do
+ local b = binary[i]
+ found = os.which(b)
+ if found then
+ binary = b
+ break
+ end
+ end
+ if not found then
+ binary = concat(binary, " | ")
+ end
+ elseif binary then
+ found = os.which(match(binary,"[%S]+"))
+ end
if type(argument) == "table" then
argument = concat(argument," ") -- for old times sake
end
- if not os.which(binary) then
- report_inclusion("program %a is not installed, not running command: %s",binary,command)
+ if not found then
+ report_inclusion("program %a is not installed",binary or "?")
elseif not argument or argument == "" then
- report_inclusion("nothing to run, unknown program %a",binary)
+ report_inclusion("nothing to run, no arguments for program %a",binary)
else
- local command = format([["%s" %s]],binary,replacetemplate(longtostring(argument),variables))
+ -- no need to use the full found filename (found) .. we also don't quote the program
+ -- name any longer as in luatex there is too much messing with these names
+ local command = format([[%s %s]],binary,replacetemplate(longtostring(argument),variables))
if trace_conversion or trace_programs then
report_inclusion("running command: %s",command)
end
- os.spawn(command)
+ os.execute(command)
end
end
@@ -1298,13 +1544,15 @@ local epsconverter = converters.eps or { }
converters.eps = epsconverter
converters.ps = epsconverter
+-- todo: colorspace
+
local epstopdf = {
resolutions = {
[v_low] = "screen",
[v_medium] = "ebook",
[v_high] = "prepress",
},
- command = os.type == "windows" and "gswin32c" or "gs",
+ command = os.type == "windows" and { "gswin64c", "gswin32c" } or "gs",
-- -dProcessDSCComments=false
argument = [[
-q
@@ -1315,8 +1563,10 @@ local epstopdf = {
-dAutoRotatePages=/None
-dPDFSETTINGS=/%presets%
-dEPSCrop
- -sOutputFile=%newname%
- %oldname%
+ -dCompatibilityLevel=%level%
+ -sOutputFile="%newname%"
+ %colorspace%
+ "%oldname%"
-c quit
]],
}
@@ -1324,14 +1574,66 @@ local epstopdf = {
programs.epstopdf = epstopdf
programs.gs = epstopdf
-function epsconverter.pdf(oldname,newname,resolution) -- the resolution interface might change
+local cleanups = { }
+local cleaners = { }
+
+local whitespace = lpeg.patterns.whitespace
+local quadruple = Ct((whitespace^0 * lpeg.patterns.number/tonumber * whitespace^0)^4)
+local betterbox = P("%%BoundingBox:") * quadruple
+ * P("%%HiResBoundingBox:") * quadruple
+ * P("%AI3_Cropmarks:") * quadruple
+ * P("%%CropBox:") * quadruple
+ / function(b,h,m,c)
+ return formatters["%%%%BoundingBox: %i %i %i %i\n%%%%HiResBoundingBox: %F %F %F %F\n%%%%CropBox: %F %F %F %F\n"](
+ m[1],m[2],m[3],m[4],
+ m[1],m[2],m[3],m[4],
+ m[1],m[2],m[3],m[4]
+ )
+ end
+local nocrap = P("%") / "" * (
+ (P("AI9_PrivateDataBegin") * P(1)^0) / "%%%%EOF"
+ + (P("%EOF") * whitespace^0 * P("%AI9_PrintingDataEnd") * P(1)^0) / "%%%%EOF"
+ + (P("AI7_Thumbnail") * (1-P("%%EndData"))^0 * P("%%EndData")) / ""
+ )
+local whatever = nocrap + P(1)
+local pattern = Cs((betterbox * whatever^1 + whatever)^1)
+
+directives.register("graphics.conversion.eps.cleanup.ai",function(v) cleanups.ai = v end)
+
+cleaners.ai = function(name)
+ local tmpname = name .. ".tmp"
+ io.savedata(tmpname,lpegmatch(pattern,io.loaddata(name)))
+ return tmpname
+end
+
+function epsconverter.pdf(oldname,newname,resolution,colorspace) -- the resolution interface might change
local epstopdf = programs.epstopdf -- can be changed
- local presets = epstopdf.resolutions[resolution or ""] or epstopdf.resolutions.high
+ local presets = epstopdf.resolutions[resolution or "high"] or epstopdf.resolutions.high
+ local level = codeinjections.getformatoption("pdf_level") or "1.3"
+ local tmpname = oldname
+ if cleanups.ai then
+ tmpname = cleaners.ai(oldname)
+ end
+ if colorspace == "gray" then
+ colorspace = "-sColorConversionStrategy=Gray -sProcessColorModel=DeviceGray"
+ -- colorspace = "-sColorConversionStrategy=Gray"
+ else
+ colorspace = nil
+ end
runprogram(epstopdf.command, epstopdf.argument, {
- newname = newname,
- oldname = oldname,
- presets = presets,
+ newname = newname,
+ oldname = tmpname,
+ presets = presets,
+ level = tostring(level),
+ colorspace = colorspace,
} )
+ if tmpname ~= oldname then
+ os.remove(tmpname)
+ end
+end
+
+epsconverter["gray.pdf"] = function(oldname,newname,resolution) -- the resolution interface might change
+ epsconverter.pdf(oldname,newname,resolution,"gray")
end
epsconverter.default = epsconverter.pdf
@@ -1339,22 +1641,23 @@ epsconverter.default = epsconverter.pdf
local pdfconverter = converters.pdf or { }
converters.pdf = pdfconverter
-programs.pdftoeps = {
- command = "pdftops",
- argument = [[-eps "%oldname%" "%newname%]],
-}
-
-pdfconverter.stripped = function(oldname,newname)
- local pdftoeps = programs.pdftoeps -- can be changed
- local epstopdf = programs.epstopdf -- can be changed
- local presets = epstopdf.resolutions[resolution or ""] or epstopdf.resolutions.high
- local tmpname = newname .. ".tmp"
- runprogram(pdftoeps.command, pdftoeps.argument, { oldname = oldname, newname = tmpname, presets = presets })
- runprogram(epstopdf.command, epstopdf.argument, { oldname = tmpname, newname = newname, presets = presets })
- os.remove(tmpname)
-end
-
-figures.registersuffix("stripped","pdf")
+-- programs.pdftoeps = {
+-- command = "pdftops",
+-- argument = [[-eps "%oldname%" "%newname%"]],
+-- }
+--
+-- pdfconverter.stripped = function(oldname,newname)
+-- local pdftoeps = programs.pdftoeps -- can be changed
+-- local epstopdf = programs.epstopdf -- can be changed
+-- local presets = epstopdf.resolutions[resolution or ""] or epstopdf.resolutions.high
+-- local level = codeinjections.getformatoption("pdf_level") or "1.3"
+-- local tmpname = newname .. ".tmp"
+-- runprogram(pdftoeps.command, pdftoeps.argument, { oldname = oldname, newname = tmpname, presets = presets, level = level })
+-- runprogram(epstopdf.command, epstopdf.argument, { oldname = tmpname, newname = newname, presets = presets, level = level })
+-- os.remove(tmpname)
+-- end
+--
+-- figures.registersuffix("stripped","pdf")
-- -- -- svg -- -- --
@@ -1431,14 +1734,111 @@ bmpconverter.default = converter
-- todo: lowres
+-- cmyk conversion
+
+-- ecirgb_v2.icc
+-- ecirgb_v2_iccv4.icc
+-- isocoated_v2_300_eci.icc
+-- isocoated_v2_eci.icc
+-- srgb.icc
+-- srgb_v4_icc_preference.icc
+
+-- [[convert %?colorspace: -colorspace "%colorspace%" ?%]]
+
+local rgbprofile = "srgb_v4_icc_preference.icc" -- srgb.icc
+local cmykprofile = "isocoated_v2_300_eci.icc" -- isocoated_v2_eci.icc
+
+directives.register("graphics.conversion.rgbprofile", function(v) rgbprofile = type(v) == "string" and v or rgbprofile end)
+directives.register("graphics.conversion.cmykprofile",function(v) cmykprofile = type(v) == "string" and v or cmykprofile end)
+
+local function profiles()
+ if not lfs.isfile(rgbprofile) then
+ local found = resolvers.findfile(rgbprofile)
+ if found and found ~= "" then
+ rgbprofile = found
+ else
+ report_figures("unknown profile %a",rgbprofile)
+ end
+ end
+ if not lfs.isfile(cmykprofile) then
+ local found = resolvers.findfile(cmykprofile)
+ if found and found ~= "" then
+ cmykprofile = found
+ else
+ report_figures("unknown profile %a",cmykprofile)
+ end
+ end
+ return rgbprofile, cmykprofile
+end
+
+programs.pngtocmykpdf = {
+ command = "gm",
+ argument = [[convert -compress Zip -strip +profile "*" -profile "%rgbprofile%" -profile "%cmykprofile%" -sampling-factor 1x1 "%oldname%" "%newname%"]],
+}
+
+programs.jpgtocmykpdf = {
+ command = "gm",
+ argument = [[convert -compress JPEG -strip +profile "*" -profile "%rgbprofile%" -profile "%cmykprofile%" -sampling-factor 1x1 "%oldname%" "%newname%"]],
+}
+
+programs.pngtograypdf = {
+ command = "gm",
+ argument = [[convert -colorspace gray -compress Zip -sampling-factor 1x1 "%oldname%" "%newname%"]],
+}
+
+programs.jpgtograypdf = {
+ command = "gm",
+ argument = [[convert -colorspace gray -compress Zip -sampling-factor 1x1 "%oldname%" "%newname%"]],
+}
+
+figures.converters.png = {
+ ["cmyk.pdf"] = function(oldname,newname,resolution)
+ local rgbprofile, cmykprofile = profiles()
+ runprogram(programs.pngtocmykpdf.command, programs.pngtocmykpdf.argument, {
+ -- runprogram(programs.pngtocmykpdf, {
+ rgbprofile = rgbprofile,
+ cmykprofile = cmykprofile,
+ oldname = oldname,
+ newname = newname,
+ } )
+ end,
+ ["gray.pdf"] = function(oldname,newname,resolution)
+ runprogram(programs.pngtograypdf.command, programs.pngtograypdf.argument, {
+ -- runprogram(programs.pngtograypdf, {
+ oldname = oldname,
+ newname = newname,
+ } )
+ end,
+}
+
+figures.converters.jpg = {
+ ["cmyk.pdf"] = function(oldname,newname,resolution)
+ local rgbprofile, cmykprofile = profiles()
+ runprogram(programs.jpgtocmykpdf.command, programs.jpgtocmykpdf.argument, {
+ -- runprogram(programs.jpgtocmykpdf, {
+ rgbprofile = rgbprofile,
+ cmykprofile = cmykprofile,
+ oldname = oldname,
+ newname = newname,
+ } )
+ end,
+ ["gray.pdf"] = function(oldname,newname,resolution)
+ runprogram(programs.jpgtograypdf.command, programs.jpgtograypdf.argument, {
+ -- runprogram(programs.jpgtograypdf, {
+ oldname = oldname,
+ newname = newname,
+ } )
+ end,
+}
+
-- -- -- bases -- -- --
local bases = allocate()
figures.bases = bases
-local bases_list = nil -- index => { basename, fullname, xmlroot }
-local bases_used = nil -- [basename] => { basename, fullname, xmlroot } -- pointer to list
-local bases_found = nil
+local bases_list = nil -- index => { basename, fullname, xmlroot }
+local bases_used = nil -- [basename] => { basename, fullname, xmlroot } -- pointer to list
+local bases_found = nil
local bases_enabled = false
local function reset()
@@ -1473,6 +1873,8 @@ function bases.use(basename)
end
end
+implement { name = "usefigurebase", actions = bases.use, arguments = "string" }
+
local function bases_find(basename,askedlabel)
if trace_bases then
report_inclusion("checking for %a in base %a",askedlabel,basename)
@@ -1485,7 +1887,7 @@ local function bases_find(basename,askedlabel)
if base[2] == nil then
-- no yet located
for i=1,#figure_paths do
- local path = figure_paths[i]
+ local path = resolveprefix(figure_paths[i]) -- we resolve (e.g. jobfile:)
local xmlfile = path .. "/" .. basename
if io.exists(xmlfile) then
base[2] = xmlfile
@@ -1503,10 +1905,10 @@ local function bases_find(basename,askedlabel)
page = page + 1
if xml.text(e) == askedlabel then
t = {
- base = file.replacesuffix(base[2],"pdf"),
+ base = file.replacesuffix(base[2],"pdf"),
format = "pdf",
- name = xml.text(e,"../*:file"), -- to be checked
- page = page,
+ name = xml.text(e,"../*:file"), -- to be checked
+ page = page,
}
bases_found[askedlabel] = t
if trace_bases then
@@ -1541,13 +1943,13 @@ function identifiers.base(data)
local dr, du, ds = data.request, data.used, data.status
local fbl = bases_locate(dr.name or dr.label)
if fbl then
- du.page = fbl.page
- du.format = fbl.format
+ du.page = fbl.page
+ du.format = fbl.format
du.fullname = fbl.base
ds.fullname = fbl.name
- ds.format = fbl.format
- ds.page = fbl.page
- ds.status = 10
+ ds.format = fbl.format
+ ds.page = fbl.page
+ ds.status = 10
end
end
return data
@@ -1566,7 +1968,15 @@ identifiers.list = {
statistics.register("graphics processing time", function()
local nofprocessed = figures.nofprocessed
if nofprocessed > 0 then
- return format("%s seconds including tex, %s processed images", statistics.elapsedtime(figures),nofprocessed)
+ local nofnames, nofbadnames = 0, 0
+ for hash, data in next, figures_found do
+ nofnames = nofnames + 1
+ if data.badname then
+ nofbadnames = nofbadnames + 1
+ end
+ end
+ return format("%s seconds including tex, %s processed images, %s unique asked, %s bad names",
+ statistics.elapsedtime(figures),nofprocessed,nofnames,nofbadnames)
else
return nil
end
@@ -1618,4 +2028,59 @@ end
-- interfacing
-commands.setfigurelookuporder = figures.setorder
+implement {
+ name = "figure_push",
+ scope = "private",
+ actions = figures.push,
+ arguments = {
+ {
+ { "name" },
+ { "label" },
+ { "page" },
+ { "size" },
+ { "object" },
+ { "prefix" },
+ { "cache" },
+ { "format" },
+ { "preset" },
+ { "controls" },
+ { "resources" },
+ { "preview" },
+ { "display" },
+ { "mask" },
+ { "conversion" },
+ { "resolution" },
+ { "color" },
+ { "repeat" },
+ { "width", "dimen" },
+ { "height", "dimen" },
+ }
+ }
+}
+
+-- beware, we get a number passed by default
+
+implement { name = "figure_pop", scope = "private", actions = figures.pop }
+implement { name = "figure_done", scope = "private", actions = figures.done }
+implement { name = "figure_dummy", scope = "private", actions = figures.dummy }
+implement { name = "figure_identify", scope = "private", actions = figures.identify }
+implement { name = "figure_scale", scope = "private", actions = figures.scale }
+implement { name = "figure_check", scope = "private", actions = figures.check }
+implement { name = "figure_include", scope = "private", actions = figures.include }
+
+implement {
+ name = "setfigurelookuporder",
+ actions = figures.setorder,
+ arguments = "string"
+}
+
+implement {
+ name = "figure_reset",
+ scope = "private",
+ arguments = { "integer", "dimen", "dimen" },
+ actions = function(box,width,height)
+ figures.boxnumber = box
+ figures.defaultwidth = width
+ figures.defaultheight = height
+ end
+}
diff --git a/tex/context/base/grph-inc.mkiv b/tex/context/base/grph-inc.mkiv
index e8b63cc4b..6b7f2bd63 100644
--- a/tex/context/base/grph-inc.mkiv
+++ b/tex/context/base/grph-inc.mkiv
@@ -11,6 +11,11 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+% \enabledirectives[graphics.conversion.eps.cleanup.ai]
+
+% \setupexternalfigures[directory=dirfile://./test/**]
+% \externalfigure[crappname(2).eps][frame=on]
+
% todo: messages
\writestatus{loading}{ConTeXt Graphic Macros / Figure Inclusion}
@@ -147,6 +152,11 @@
%D the already set parameters need to be set again or otherwise
%D the old values will be used.
+%D New: \type {method=auto}: strips suffix and uses \quote {order} which is handy in
+%D some of four workflows where sources are used for web and print and where
+%D the web tools need a suffix (like gif) which we don't want as we want a high
+%D quality format.
+
\newconditional\c_grph_include_trace_inheritance
\installtextracker
@@ -174,14 +184,14 @@
\def\grph_include_use[#1][#2][#3][#4]%
{\doifelsenothing{#1}
{\doifsomething{#2}
- {\doifassignmentelse{#3}
+ {\doifelseassignment{#3}
{\grph_include_use_indeed{#2}{#2}{#3}{#4}}
{\grph_include_use_indeed{#2}{#2}\empty{#4}}}}
{\doifelsenothing{#2}
- {\doifassignmentelse{#3}
+ {\doifelseassignment{#3}
{\grph_include_use_indeed{#1}{#1}\empty{#3}}
{\grph_include_use_indeed{#1}{#1}{#3}{#4}}}
- {\doifassignmentelse{#3}
+ {\doifelseassignment{#3}
{\grph_include_use_indeed{#1}{#2}\empty{#3}}
{\grph_include_use_indeed{#1}{#2}{#3}{#4}}}}}
@@ -300,56 +310,56 @@
\edef\p_label {\externalfigureparameter\c!label}%
%
\dostarttagged\t!image\empty
- \ctxlua{figures.push {
- name = "\p_grph_include_name",
- label = "\ifx\p_label\empty\p_grph_include_label\else\p_label\fi",
- page = "\externalfigureparameter\c!page",
- size = "\externalfigureparameter\c!size",
- object = "\externalfigureparameter\c!object",
- prefix = "\externalfigureparameter\c!prefix",
- cache = "\externalfigureparameter\c!cache",
- format = "\externalfigureparameter\c!method",
- preset = "\externalfigureparameter\c!prefix",
- controls = "\externalfigureparameter\c!controls",
- resources = "\externalfigureparameter\c!resources",
- preview = "\externalfigureparameter\c!preview",
- display = "\externalfigureparameter\c!display",
- mask = "\externalfigureparameter\c!mask",
- conversion = "\externalfigureparameter\c!conversion",
- resolution = "\externalfigureparameter\c!resolution",
- color = "\internalspotcolorparent{\externalfigureparameter\c!color}", % hack is needed
- ["repeat"] = "\externalfigureparameter\c!repeat",
- \ifx\p_width\empty \else
- width = \number\dimexpr\p_width,
- \fi
- \ifx\p_height\empty \else
- height = \number\dimexpr\p_height,
- \fi
- } }%
- \ctxlua{figures.identify()}%
- % also mode: checkpresense only
+ \clf_figure_push
+ name {\p_grph_include_name}%
+ label {\ifx\p_label\empty\p_grph_include_label\else\p_label\fi}%
+ page {\externalfigureparameter\c!page}%
+ size {\externalfigureparameter\c!size}%
+ object {\externalfigureparameter\c!object}%
+ prefix {\externalfigureparameter\c!prefix}%
+ cache {\externalfigureparameter\c!cache}%
+ format {\externalfigureparameter\c!method}%
+ preset {\externalfigureparameter\c!prefix}%
+ controls {\externalfigureparameter\c!controls}%
+ resources {\externalfigureparameter\c!resources}%
+ preview {\externalfigureparameter\c!preview}%
+ display {\externalfigureparameter\c!display}%
+ mask {\externalfigureparameter\c!mask}%
+ conversion {\externalfigureparameter\c!conversion}%
+ resolution {\externalfigureparameter\c!resolution}%
+ color {\internalspotcolorparent{\externalfigureparameter\c!color}}% hack is needed
+ repeat {\externalfigureparameter\c!repeat}%
+ \ifx\p_width\empty \else
+ width \dimexpr\p_width\relax
+ \fi
+ \ifx\p_height\empty \else
+ height \dimexpr\p_height\relax
+ \fi
+ \relax
+ \clf_figure_identify
+ \relax
\ifconditional\c_grph_include_test_only
\ifcase\figurestatus \else
- \ctxlua{figures.check()}%
- \ctxlua{figures.dummy()}%
- \ctxlua{figures.scale()}%
- \ctxlua{figures.done()}%
+ \clf_figure_check
+ \clf_figure_dummy
+ \clf_figure_scale
+ \clf_figure_done
\fi
\grph_include_set_mode
\else
\ifcase\figurestatus
- \ctxlua{figures.dummy()}%
- \ctxlua{figures.scale()}%
+ \clf_figure_dummy
+ \clf_figure_scale
\else
- \ctxlua{figures.check()}%
- \ctxlua{figures.include()}%
- \ctxlua{figures.scale()}%
+ \clf_figure_check
+ \clf_figure_include
+ \clf_figure_scale
\fi
- \ctxlua{figures.done()}%
+ \clf_figure_done
\grph_include_set_mode
\grph_include_finalize
\fi
- \ctxlua{figures.pop()}%
+ \clf_figure_pop
\dotagfigure
\naturalvbox attr \imageattribute 2 {\box\foundexternalfigure}%
\dostoptagged
@@ -416,13 +426,13 @@
{\dodoubleargument\grph_include_set_type_synonym}
\def\grph_include_set_type_synonym[#1][#2]%
- {\ctxlua{figures.registersuffix("#1","#2")}}
+ {\clf_registerfiguresuffix{#1}{#2}}
%D Additional paths can be installed with the regular setup command. The next
%D macro picks up the list.
\unexpanded\def\setfigurepathlist
- {\ctxlua{figures.setpaths("\externalfigureparameter\c!location",\!!bs\externalfigureparameter\c!directory\!!es)}}
+ {\clf_setfigurepaths{\externalfigureparameter\c!location}{\externalfigureparameter\c!directory}}
%D Variables:
@@ -432,36 +442,36 @@
\def\defaultfigurewidth {8\lineheight}
\def\defaultfigureheight {6\lineheight}
-\def\figurestatus {\numexpr\ctxcommand{figurestatus("status",0)}\relax} % number: 0 = not found
-\def\figurewidth {\ctxcommand{figurestatus("width",0)}sp}
-\def\figureheight {\ctxcommand{figurestatus("height",0)}sp}
-\def\figurexscale {\ctxcommand{figurestatus("xscale",1)}}
-\def\figureyscale {\ctxcommand{figurestatus("yscale",1)}}
-
-\def\figuresize {\ctxcommand{figurerequest("size")}}
-\def\figurelabel {\ctxcommand{figurerequest("label")}}
-\def\figurefileoriginal {\ctxcommand{figurerequest("name")}}
-\def\figurefilepage {\ctxcommand{figurerequest("page",1)}}
-\def\figurefileoptions {\ctxcommand{figurerequest("options")}}
-\def\figurefileconversion{\ctxcommand{figurerequest("conversion")}}
-\def\figurefilecache {\ctxcommand{figurerequest("cache")}}
-\def\figurefileprefix {\ctxcommand{figurerequest("prefix")}}
-
-\def\figurenaturalwidth {\ctxcommand{figureused("width", \number\dimexpr\defaultfigurewidth \relax)}sp}
-\def\figurenaturalheight {\ctxcommand{figureused("height",\number\dimexpr\defaultfigureheight\relax)}sp}
-\def\figurexresolution {\ctxcommand{figureused("xresolution",0)}}
-\def\figureyresolution {\ctxcommand{figureused("yresolution",0)}}
-\def\figurexsize {\ctxcommand{figureused("xsize",0)}}
-\def\figureysize {\ctxcommand{figureused("ysize",0)}}
-\def\figurecolordepth {\ctxcommand{figureused("colordepth",0)}}
-\def\figuredepth {\ctxcommand{figureused("depth",0)}}
-
-\def\figurefullname {\ctxcommand{figureused("fullname")}}
-\def\noffigurepages {\ctxcommand{figureused("pages",0)}}
-
-\def\figurefilepath {\ctxcommand{figurefilepath()}}
-\def\figurefilename {\ctxcommand{figurefilename()}}
-\def\figurefiletype {\ctxcommand{figurefiletype()}}
+\def\figurestatus {\numexpr\clf_figurestatus{status}{0}\relax} % number: 0 = not found
+\def\figurewidth {\clf_figurestatus{width}{0}sp}
+\def\figureheight {\clf_figurestatus{height}{0}sp}
+\def\figurexscale {\clf_figurestatus{xscale}{1}}
+\def\figureyscale {\clf_figurestatus{yscale}{1}}
+
+\def\figuresize {\clf_figurerequest{size}{}}
+\def\figurelabel {\clf_figurerequest{label}{}}
+\def\figurefileoriginal {\clf_figurerequest{name}{}}
+\def\figurefilepage {\clf_figurerequest{page}{1}}
+\def\figurefileoptions {\clf_figurerequest{options}{}}
+\def\figurefileconversion{\clf_figurerequest{conversion}{}}
+\def\figurefilecache {\clf_figurerequest{cache}{}}
+\def\figurefileprefix {\clf_figurerequest{prefix}{}}
+
+\def\figurenaturalwidth {\clf_figureused{width}{\number\dimexpr\defaultfigurewidth\relax}sp}
+\def\figurenaturalheight {\clf_figureused{height}{\number\dimexpr\defaultfigureheight\relax}sp}
+\def\figurexresolution {\clf_figureused{xresolution}{0}}
+\def\figureyresolution {\clf_figureused{yresolution}{0}}
+\def\figurexsize {\clf_figureused{xsize}{0}}
+\def\figureysize {\clf_figureused{ysize}{0}}
+\def\figurecolordepth {\clf_figureused{colordepth}{0}}
+\def\figuredepth {\clf_figureused{depth}{0}}
+
+\def\figurefullname {\clf_figureused{fullname}{}}
+\def\noffigurepages {\clf_figureused{pages}{0}}
+
+\def\figurefilepath {\clf_figurefilepath}
+\def\figurefilename {\clf_figurefilename}
+\def\figurefiletype {\clf_figurefiletype}
\let\naturalfigurewidth \figurenaturalwidth
\let\naturalfigureheight \figurenaturalheight
@@ -500,12 +510,14 @@
\fi}
\appendtoks
- \ctxlua { % figures.defaultwidth .. maybe a dimen some day
- figures.setpaths("\externalfigureparameter\c!location","\externalfigureparameter\c!directory") ;
- figures.defaultwidth = \number\dimexpr\defaultfigurewidth \relax ;
- figures.defaultheight = \number\dimexpr\defaultfigureheight\relax ;
- figures.boxnumber = \number\foundexternalfigure ;
- }%
+ \clf_setfigurepaths
+ {\externalfigureparameter\c!location}%
+ {\externalfigureparameter\c!directory}%
+ \clf_figure_reset
+ \foundexternalfigure
+ \defaultfigurewidth
+ \defaultfigureheight
+ \relax
\to \everyexternalfigureresets
\appendtoks
@@ -629,7 +641,7 @@
\externalfigure[#1][#2,\c!display=,\c!mask=,\c!object=\v!no]%
\stopnointerference}
-\unexpanded\def\doiffigureelse#1%
+\unexpanded\def\doifelsefigure#1%
{\getfiguredimensions[#1]% so data is available !
\ifcase\figurestatus
\expandafter\secondoftwoarguments
@@ -637,6 +649,8 @@
\expandafter\firstoftwoarguments
\fi}
+\let\doiffigureelse\doifelsefigure
+
% No placement, handy for preprocessing:
\unexpanded\def\registerexternalfigure
@@ -661,14 +675,14 @@
% Figure bases
\unexpanded\def\usefigurebase[#1]%
- {\ctxlua{figures.bases.use("#1")}}
+ {\clf_usefigurebase{#1}}
\appendtoks
\setfigurepathlist % the path may be used elsewhere too (as in x-res-04)
\to \everysetupexternalfigure
\appendtoks
- \ctxcommand{setfigurelookuporder("\externalfigureparameter\c!order")}%
+ \clf_setfigurelookuporder{\externalfigureparameter\c!order}%
\to \everysetupexternalfigure
\definecolor[missingfigurecolor][s=.8]
@@ -762,7 +776,7 @@
{\writestatus\m!system{the \string\showexternalfigures\space command is not (yet) implemented in mkiv}}
\unexpanded\def\overlayfigure#1%
- {\externalfigure[#1][\c!width=\overlaywidth,\c!height=\overlayheight]}
+ {\externalfigure[#1][\c!width=\d_overlay_width,\c!height=\d_overlay_height]}
% Bonus:
diff --git a/tex/context/base/grph-raw.lua b/tex/context/base/grph-raw.lua
index 4c5b031ea..62e96fcc9 100644
--- a/tex/context/base/grph-raw.lua
+++ b/tex/context/base/grph-raw.lua
@@ -40,3 +40,18 @@ function figures.bitmapimage(t)
report_bitmap("invalid specification")
end
end
+
+interfaces.implement {
+ name = "bitmapimage",
+ actions = figures.bitmapimage,
+ arguments = {
+ {
+ { "data" },
+ { "colorspace" },
+ { "width" },
+ { "height" },
+ { "xresolution" },
+ { "yresolution" },
+ }
+ }
+}
diff --git a/tex/context/base/grph-raw.mkiv b/tex/context/base/grph-raw.mkiv
index 1c6835564..8978ba267 100644
--- a/tex/context/base/grph-raw.mkiv
+++ b/tex/context/base/grph-raw.mkiv
@@ -46,14 +46,14 @@
\unexpanded\def\bitmapimage[#1]#2%
{\hbox\bgroup
\getdummyparameters[\c!color=rgb,\c!width=,\c!height=,\c!x=,\c!y=,#1]%
- \ctxlua{figures.bitmapimage { % we could pass #1 directly ... todo
- data = \!!bs#2\!!es,
- colorspace = "\directdummyparameter\c!color",
- width = "\directdummyparameter\c!width",
- height = "\directdummyparameter\c!height",
- xresolution = "\directdummyparameter\c!x",
- yresolution = "\directdummyparameter\c!y",
- }}%
+ \clf_bitmapimage
+ data {#2}%
+ colorspace {\directdummyparameter\c!color}%
+ width {\directdummyparameter\c!width}%
+ height {\directdummyparameter\c!height}%
+ xresolution {\directdummyparameter\c!x}%
+ yresolution {\directdummyparameter\c!y}%
+ \relax
\egroup}
\unexpanded\def\startbitmapimage[#1]#2\stopbitmapimage
diff --git a/tex/context/base/grph-trf.mkiv b/tex/context/base/grph-trf.mkiv
index d907c1b0c..fca5c7cf6 100644
--- a/tex/context/base/grph-trf.mkiv
+++ b/tex/context/base/grph-trf.mkiv
@@ -108,7 +108,7 @@
\edef\currentscale{#1}%
\setupcurrentscale[#2]%
\else\iffirstargument
- \doifassignmentelse{#1}
+ \doifelseassignment{#1}
{\let\currentscale\empty
\setupcurrentscale[#1]}
{\edef\currentscale{#1}}%
@@ -318,11 +318,11 @@
\def\m_grph_scale_factor_set{\v!max,\v!fit,\v!broad,\v!auto} % can be an \edef
\def\grph_scale_by_factor
- {\doifinsetelse\p_factor\m_grph_scale_factor_set
+ {\doifelseinset\p_factor\m_grph_scale_factor_set
\grph_scale_by_factor_a
- {\doifinsetelse\p_hfactor\m_grph_scale_factor_set
+ {\doifelseinset\p_hfactor\m_grph_scale_factor_set
\grph_scale_by_factor_b
- {\doifinsetelse\p_wfactor\m_grph_scale_factor_set
+ {\doifelseinset\p_wfactor\m_grph_scale_factor_set
\grph_scale_by_factor_c
\grph_scale_by_factor_d}}}
@@ -1080,18 +1080,18 @@
{\boxcursor\box\nextbox}
\setvalue{\??rotatepreset\v!left}%
- {\edef\p_rotation_rotation{\doifoddpageelse{90}{270}}}
+ {\edef\p_rotation_rotation{\doifelseoddpage{90}{270}}}
\setvalue{\??rotatepreset\v!right}%
- {\edef\p_rotation_rotation{\doifoddpageelse{270}{90}}}
+ {\edef\p_rotation_rotation{\doifelseoddpage{270}{90}}}
\setvalue{\??rotatepreset\v!inner}%
{\signalrightpage
- \doifrightpageelse{\def\p_rotation_rotation{270}}{\def\p_rotation_rotation{90}}}
+ \doifelserightpage{\def\p_rotation_rotation{270}}{\def\p_rotation_rotation{90}}}
\setvalue{\??rotatepreset\v!outer}%
{\signalrightpage
- \doifrightpageelse{\def\p_rotation_rotation{90}}{\def\p_rotation_rotation{270}}}
+ \doifelserightpage{\def\p_rotation_rotation{90}}{\def\p_rotation_rotation{270}}}
\setvalue{\??rotatepreset\v!default}%
{\edef\p_rotation_rotation{\realnumber{\p_rotation_rotation}}}% get rid of leading zeros and spaces
diff --git a/tex/context/base/hand-ini.mkiv b/tex/context/base/hand-ini.mkiv
index 450794d27..fd18e3221 100644
--- a/tex/context/base/hand-ini.mkiv
+++ b/tex/context/base/hand-ini.mkiv
@@ -42,8 +42,8 @@
\unexpanded\def\setupfontexpansion {\dodoubleargument\font_expansion_setup }
\unexpanded\def\setupfontprotrusion{\dodoubleargument\font_protrusion_setup}
-\def\font_expansion_setup [#1][#2]{\ctxcommand{setupfontexpansion ("#1","#2")}}
-\def\font_protrusion_setup[#1][#2]{\ctxcommand{setupfontprotrusion("#1","#2")}}
+\def\font_expansion_setup [#1][#2]{\clf_setupfontexpansion {#1}{#2}}
+\def\font_protrusion_setup[#1][#2]{\clf_setupfontprotrusion{#1}{#2}}
% \setupfontprotrusion[quality-upright][vector=quality]
% \setupfontprotrusion[quality-slanted][vector=quality,right=1.5]
@@ -51,10 +51,10 @@
\let\pdfadjustspacing\relax \newcount\pdfadjustspacing % a little bit protection
\let\pdfprotrudechars\relax \newcount\pdfprotrudechars % a little bit protection
-\def\font_expansion_enable {\normalpdfadjustspacing\plustwo }
-\def\font_expansion_disable {\normalpdfadjustspacing\zerocount}
-\def\font_protruding_enable {\normalpdfprotrudechars\plustwo }
-\def\font_protruding_disable{\normalpdfprotrudechars\zerocount}
+\def\font_expansion_enable {\normalpdfadjustspacing\plustwo } % these will become normal primitives
+\def\font_expansion_disable {\normalpdfadjustspacing\zerocount} % these will become normal primitives
+\def\font_protruding_enable {\normalpdfprotrudechars\plustwo } % these will become normal primitives
+\def\font_protruding_disable{\normalpdfprotrudechars\zerocount} % these will become normal primitives
\appendtoks \font_expansion_disable \to \everyforgetall % Here or not here?
\appendtoks \font_protruding_disable \to \everyforgetall % Here or not here?
diff --git a/tex/context/base/java-imp-fld.mkiv b/tex/context/base/java-imp-fld.mkiv
index aaec257f2..cbd53fffb 100644
--- a/tex/context/base/java-imp-fld.mkiv
+++ b/tex/context/base/java-imp-fld.mkiv
@@ -30,29 +30,34 @@
%D On into Yes. Also, we've changed the test for the on value
%D into !Off as we dón't know what value it gets in the reader.
+% Is this still okay? We can have unicode now, can't we? Anyway it's kind of
+% messy and unneeded in these unicode times.
+
\startluasetups javascript:pdfencoding
- local verbatim = context.verbatim
- verbatim("{\n")
- for accent, group in table.sortedhash(characters.tex.accentmapping) do
- for character, mapping in table.sortedhash(group) do
+ local ctx_verbatim = context.verbatim
+ local utfbyte = utf.byte
+ local sortedhash = table.sortedhash
+
+ ctx_verbatim("{\n")
+ for accent, group in sortedhash(characters.tex.accentmapping) do
+ for character, mapping in sortedhash(group) do
if character == "" then
character = " "
end
if accent == '"' then
- verbatim(" '\\\\%s%s' : '\\u%04X',\n",accent,character,utf.byte(mapping))
+ ctx_verbatim(" '\\\\%s%s' : '\\u%04X',\n",accent,character,utfbyte(mapping))
else
- verbatim(' "\\\\%s%s" : "\\u%04X",\n',accent,character,utf.byte(mapping))
+ ctx_verbatim(' "\\\\%s%s" : "\\u%04X",\n',accent,character,utfbyte(mapping))
end
end
end
- verbatim(" '\\\\OE' : '\\u0152',\n")
- verbatim(" '\\\\oe' : '\\u0153',\n")
- verbatim(" '\\\\AE' : '\\u00C6',\n")
- verbatim(" '\\\\ae' : '\\u00E6',\n")
- verbatim(" '\\\\<<' : '\\u00AB',\n")
- verbatim(" '\\\\>>' : '\\u00BB',\n")
- verbatim(" '\\\\ss' : '\\u00DF' \n")
- verbatim("}\n")
+ for command, mapping in sortedhash(characters.tex.commandmapping) do
+ ctx_verbatim(' "\\\\%s" : "\\u%04X",\n',command,utfbyte(mapping))
+ end
+ -- ctx_verbatim(" '\\\\<<' : '\\u00AB',\n")
+ -- ctx_verbatim(" '\\\\>>' : '\\u00BB',\n")
+ ctx_verbatim("}\n")
+
\stopluasetups
% maybe make { } tex braces in javascript code so that we can call lua
diff --git a/tex/context/base/java-ini.lua b/tex/context/base/java-ini.lua
index 673379494..069eb5ab6 100644
--- a/tex/context/base/java-ini.lua
+++ b/tex/context/base/java-ini.lua
@@ -19,7 +19,7 @@ local variables = interfaces.variables
local formatters = string.formatters
local context = context
-local commands = commands
+local implement = interfaces.implement
local trace_javascript = false trackers.register("backends.javascript", function(v) trace_javascript = v end)
@@ -196,7 +196,13 @@ function javascripts.flushpreambles()
return t
end
-local patterns = { "java-imp-%s.mkiv", "java-imp-%s.tex", "java-%s.mkiv", "java-%s.tex" }
+local patterns = {
+ "java-imp-%s.mkiv",
+ "java-imp-%s.tex",
+ -- obsolete:
+ "java-%s.mkiv",
+ "java-%s.tex"
+}
local function action(name,foundname)
context.startnointerference()
@@ -213,7 +219,7 @@ end
function javascripts.usescripts(name)
if name ~= variables.reset then -- reset is obsolete
- commands.uselibrary {
+ resolvers.uselibrary {
name = name,
patterns = patterns,
action = action,
@@ -225,8 +231,38 @@ end
-- interface
-commands.storejavascriptcode = interactions.javascripts.storecode
-commands.storejavascriptpreamble = interactions.javascripts.storepreamble
-commands.addtojavascriptpreamble = interactions.javascripts.addtopreamble
-commands.usejavascriptpreamble = interactions.javascripts.usepreamblenow
-commands.usejavascriptscripts = interactions.javascripts.usescripts
+implement {
+ name = "storejavascriptcode",
+ actions = javascripts.storecode,
+ arguments = "string"
+}
+
+implement {
+ name = "storejavascriptpreamble",
+ actions = javascripts.storepreamble,
+ arguments = "string"
+}
+
+implement {
+ name = "setjavascriptpreamble",
+ actions = javascripts.setpreamble,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "addtojavascriptpreamble",
+ actions = javascripts.addtopreamble,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "usejavascriptpreamble",
+ actions = javascripts.usepreamblenow,
+ arguments = "string"
+}
+
+implement {
+ name = "usejavascriptscripts",
+ actions = javascripts.usescripts,
+ arguments = "string"
+}
diff --git a/tex/context/base/java-ini.mkiv b/tex/context/base/java-ini.mkiv
index e4f0bf28b..d6fe4dd8a 100644
--- a/tex/context/base/java-ini.mkiv
+++ b/tex/context/base/java-ini.mkiv
@@ -105,15 +105,17 @@
%D This macro can be used to force inclusion of postponed
%D \JAVASCRIPT\ preambles.
+\def\m_java_escape_u{\letterbackslash u}
+
\unexpanded\def\startJScode
{\begingroup
\obeylualines
\obeyluatokens
- \def\u{\letterbackslash u}%
+ \let\u\m_java_escape_u
\java_start_code}
\def\java_start_code#1\stopJScode
- {\normalexpanded{\endgroup\ctxcommand{storejavascriptcode(\!!bs#1\!!es)}}}
+ {\normalexpanded{\endgroup\clf_storejavascriptcode{#1}}}
\let\stopJScode\relax
@@ -121,16 +123,16 @@
{\begingroup
\obeylualines
\obeyluatokens
- \def\u{\letterbackslash u}%
+ \let\u\m_java_escape_u
\java_start_preamble}
\def\java_start_preamble#1\stopJSpreamble
- {\normalexpanded{\endgroup\ctxcommand{storejavascriptpreamble(\!!bs#1\!!es)}}}
+ {\normalexpanded{\endgroup\clf_storejavascriptpreamble{#1}}}
\let\stopJSpreamble\relax
-\unexpanded\def\setJSpreamble #1#2{\ctxcommand{storejavascriptpreamble("#1",\!!bs#2\!!es)}}
-\unexpanded\def\addtoJSpreamble#1#2{\ctxcommand{addtojavascriptpreamble("#1",\!!bs#2\!!es)}}
+\unexpanded\def\setJSpreamble #1#2{\clf_setjavascriptpreamble {#1}{#2}}
+\unexpanded\def\addtoJSpreamble#1#2{\clf_addtojavascriptpreamble{#1}{#2}}
%D \macros
%D {useJSscripts}
@@ -147,11 +149,11 @@
%D
%D The not so complicated implementation of this macro is:
-\def\useJSscripts
+\unexpanded\def\useJSscripts
{\dodoubleempty\java_use_scripts}
\def\java_use_scripts[#1][#2]%
- {\ctxcommand{usejavascriptscripts(\!!bs#1\!!es)}% two steps as this one calls tex code
- \ctxcommand{usejavascriptpreamble("#2")}} % so this one comes later
+ {\clf_usejavascriptscripts {#1}% two steps as this one calls tex code
+ \clf_usejavascriptpreamble{#2}}% so this one comes later
\protect \endinput
diff --git a/tex/context/base/l-boolean.lua b/tex/context/base/l-boolean.lua
index 8d11080e7..8f18d4c00 100644
--- a/tex/context/base/l-boolean.lua
+++ b/tex/context/base/l-boolean.lua
@@ -57,11 +57,11 @@ function string.booleanstring(str)
end
end
-function string.is_boolean(str,default)
+function string.is_boolean(str,default,strict)
if type(str) == "string" then
- if str == "true" or str == "yes" or str == "on" or str == "t" or str == "1" then
+ if str == "true" or str == "yes" or str == "on" or str == "t" or (not strict and str == "1") then
return true
- elseif str == "false" or str == "no" or str == "off" or str == "f" or str == "0" then
+ elseif str == "false" or str == "no" or str == "off" or str == "f" or (not strict and str == "0") then
return false
end
end
diff --git a/tex/context/base/l-dir.lua b/tex/context/base/l-dir.lua
index 40081cc3b..81ac65e50 100644
--- a/tex/context/base/l-dir.lua
+++ b/tex/context/base/l-dir.lua
@@ -6,10 +6,11 @@ if not modules then modules = { } end modules ['l-dir'] = {
license = "see context related readme files"
}
--- dir.expandname will be merged with cleanpath and collapsepath
+-- todo: dir.expandname will be sped up and merged with cleanpath and collapsepath
+-- todo: keep track of currentdir (chdir, pushdir, popdir)
local type, select = type, select
-local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub
+local find, gmatch, match, gsub, sub = string.find, string.gmatch, string.match, string.gsub, string.sub
local concat, insert, remove, unpack = table.concat, table.insert, table.remove, table.unpack
local lpegmatch = lpeg.match
@@ -21,29 +22,51 @@ local lfs = lfs
local attributes = lfs.attributes
local walkdir = lfs.dir
-local isdir = lfs.isdir
-local isfile = lfs.isfile
+local isdir = lfs.isdir -- not robust, will be overloaded anyway
+local isfile = lfs.isfile -- not robust, will be overloaded anyway
local currentdir = lfs.currentdir
local chdir = lfs.chdir
+local mkdir = lfs.mkdir
-local onwindows = os.type == "windows" or find(os.getenv("PATH"),";")
+local onwindows = os.type == "windows" or find(os.getenv("PATH"),";",1,true)
-- in case we load outside luatex
-if not isdir then
- function isdir(name)
- local a = attributes(name)
- return a and a.mode == "directory"
+if onwindows then
+
+ -- lfs.isdir does not like trailing /
+ -- lfs.dir accepts trailing /
+
+ local tricky = S("/\\") * P(-1)
+
+ isdir = function(name)
+ if lpegmatch(tricky,name) then
+ return attributes(name,"mode") == "directory"
+ else
+ return attributes(name.."/.","mode") == "directory"
+ end
end
- lfs.isdir = isdir
-end
-if not isfile then
- function isfile(name)
- local a = attributes(name)
- return a and a.mode == "file"
+ isfile = function(name)
+ return attributes(name,"mode") == "file"
end
+
+ lfs.isdir = isdir
lfs.isfile = isfile
+
+else
+
+ isdir = function(name)
+ return attributes(name,"mode") == "directory"
+ end
+
+ isfile = function(name)
+ return attributes(name,"mode") == "file"
+ end
+
+ lfs.isdir = isdir
+ lfs.isfile = isfile
+
end
-- handy
@@ -52,63 +75,104 @@ function dir.current()
return (gsub(currentdir(),"\\","/"))
end
--- optimizing for no find (*) does not save time
-
---~ local function globpattern(path,patt,recurse,action) -- fails in recent luatex due to some change in lfs
---~ local ok, scanner
---~ if path == "/" then
---~ ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
---~ else
---~ ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
---~ end
---~ if ok and type(scanner) == "function" then
---~ if not find(path,"/$") then path = path .. '/' end
---~ for name in scanner do
---~ local full = path .. name
---~ local mode = attributes(full,'mode')
---~ if mode == 'file' then
---~ if find(full,patt) then
---~ action(full)
---~ end
---~ elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
---~ globpattern(full,patt,recurse,action)
---~ end
---~ end
---~ end
---~ end
-
-local lfsisdir = isdir
-
-local function isdir(path)
- path = gsub(path,"[/\\]+$","")
- return lfsisdir(path)
-end
+-- somewhat optimized
-lfs.isdir = isdir
+local function glob_pattern_function(path,patt,recurse,action)
+ if isdir(path) then
+ local usedpath
+ if path == "/" then
+ usedpath = "/."
+ elseif not find(path,"/$") then
+ usedpath = path .. "/."
+ path = path .. "/"
+ else
+ usedpath = path
+ end
+ local dirs
+ for name in walkdir(usedpath) do
+ if name ~= "." and name ~= ".." then
+ local full = path .. name
+ local mode = attributes(full,'mode')
+ if mode == 'file' then
+ if not patt or find(full,patt) then
+ action(full)
+ end
+ elseif recurse and mode == "directory" then
+ if not dirs then
+ dirs = { full }
+ else
+ dirs[#dirs+1] = full
+ end
+ end
+ end
+ end
+ if dirs then
+ for i=1,#dirs do
+ glob_pattern_function(dirs[i],patt,recurse,action)
+ end
+ end
+ end
+end
-local function globpattern(path,patt,recurse,action)
- if path == "/" then
- path = path .. "."
- elseif not find(path,"/$") then
- path = path .. '/'
+local function glob_pattern_table(path,patt,recurse,result)
+ if not result then
+ result = { }
end
- if isdir(path) then -- lfs.isdir does not like trailing /
- for name in walkdir(path) do -- lfs.dir accepts trailing /
- local full = path .. name
- local mode = attributes(full,'mode')
- if mode == 'file' then
- if find(full,patt) then
- action(full)
+ if isdir(path) then
+ local usedpath
+ if path == "/" then
+ usedpath = "/."
+ elseif not find(path,"/$") then
+ usedpath = path .. "/."
+ path = path .. "/"
+ else
+ usedpath = path
+ end
+ local dirs
+ for name in walkdir(usedpath) do
+ if name ~= "." and name ~= ".." then
+ local full = path .. name
+ local mode = attributes(full,'mode')
+ if mode == 'file' then
+ if not patt or find(full,patt) then
+ result[#result+1] = full
+ end
+ elseif recurse and mode == "directory" then
+ if not dirs then
+ dirs = { full }
+ else
+ dirs[#dirs+1] = full
+ end
end
- elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
- globpattern(full,patt,recurse,action)
+ end
+ end
+ if dirs then
+ for i=1,#dirs do
+ glob_pattern_table(dirs[i],patt,recurse,result)
end
end
end
+ return result
+end
+
+local function globpattern(path,patt,recurse,method)
+ local kind = type(method)
+ if patt and sub(patt,1,-3) == path then
+ patt = false
+ end
+ if kind == "function" then
+ return glob_pattern_function(path,patt,recurse,method)
+ elseif kind == "table" then
+ return glob_pattern_table(path,patt,recurse,method)
+ else
+ return glob_pattern_table(path,patt,recurse,{ })
+ end
end
dir.globpattern = globpattern
+-- never or seldom used so far:
+
local function collectpattern(path,patt,recurse,result)
local ok, scanner
result = result or { }
@@ -118,18 +182,26 @@ local function collectpattern(path,patt,recurse,result)
ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
end
if ok and type(scanner) == "function" then
- if not find(path,"/$") then path = path .. '/' end
+ if not find(path,"/$") then
+ path = path .. '/'
+ end
for name in scanner, first do
- local full = path .. name
- local attr = attributes(full)
- local mode = attr.mode
- if mode == 'file' then
- if find(full,patt) then
+ if name == "." then
+ -- skip
+ elseif name == ".." then
+ -- skip
+ else
+ local full = path .. name
+ local attr = attributes(full)
+ local mode = attr.mode
+ if mode == 'file' then
+ if find(full,patt) then
+ result[name] = attr
+ end
+ elseif recurse and mode == "directory" then
+ attr.list = collectpattern(full,patt,recurse)
result[name] = attr
end
- elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
- attr.list = collectpattern(full,patt,recurse)
- result[name] = attr
end
end
end
@@ -138,19 +210,14 @@ end
dir.collectpattern = collectpattern
-local separator
+local separator, pattern
if onwindows then -- we could sanitize here
--- pattern = Ct {
--- [1] = (C(P(".") + S("/\\")^1) + C(R("az","AZ") * P(":") * S("/\\")^0) + Cc("./")) * V(2) * V(3),
--- [2] = C(((1-S("*?/\\"))^0 * S("/\\"))^0),
--- [3] = C(P(1)^0)
--- }
-
local slash = S("/\\") / "/"
- pattern = Ct {
+-- pattern = Ct {
+ pattern = {
[1] = (Cs(P(".") + slash^1) + Cs(R("az","AZ") * P(":") * slash^0) + Cc("./")) * V(2) * V(3),
[2] = Cs(((1-S("*?/\\"))^0 * slash)^0),
[3] = Cs(P(1)^0)
@@ -158,7 +225,8 @@ if onwindows then -- we could sanitize here
else -- assume unix
- pattern = Ct {
+-- pattern = Ct {
+ pattern = {
[1] = (C(P(".") + P("/")^1) + Cc("./")) * V(2) * V(3),
[2] = C(((1-S("*?/"))^0 * P("/"))^0),
[3] = C(P(1)^0)
@@ -185,12 +253,11 @@ local function glob(str,t)
elseif isfile(str) then
t(str)
else
- local split = lpegmatch(pattern,str) -- we could use the file splitter
- if split then
- local root, path, base = split[1], split[2], split[3]
- local recurse = find(base,"%*%*")
- local start = root .. path
- local result = lpegmatch(filter,start .. base)
+ local root, path, base = lpegmatch(pattern,str) -- we could use the file splitter
+ if root and path and base then
+ local recurse = find(base,"**",1,true) -- find(base,"%*%*")
+ local start = root .. path
+ local result = lpegmatch(filter,start .. base)
globpattern(start,result,recurse,t)
end
end
@@ -209,16 +276,12 @@ local function glob(str,t)
return { str }
end
else
- local split = lpegmatch(pattern,str) -- we could use the file splitter
- if split then
- local t = t or { }
- local action = action or function(name) t[#t+1] = name end
- local root, path, base = split[1], split[2], split[3]
- local recurse = find(base,"%*%*")
- local start = root .. path
- local result = lpegmatch(filter,start .. base)
- globpattern(start,result,recurse,action)
- return t
+ local root, path, base = lpegmatch(pattern,str) -- we could use the file splitter
+ if root and path and base then
+ local recurse = find(base,"**",1,true) -- find(base,"%*%*")
+ local start = root .. path
+ local result = lpegmatch(filter,start .. base)
+ return globpattern(start,result,recurse,t)
else
return { }
end
@@ -228,11 +291,20 @@ end
dir.glob = glob
---~ list = dir.glob("**/*.tif")
---~ list = dir.glob("/**/*.tif")
---~ list = dir.glob("./**/*.tif")
---~ list = dir.glob("oeps/**/*.tif")
---~ list = dir.glob("/oeps/**/*.tif")
+-- local c = os.clock()
+-- local t = dir.glob("e:/**")
+-- local t = dir.glob("t:/sources/**")
+-- local t = dir.glob("t:/**")
+-- print(os.clock()-c,#t)
+
+-- for i=1,3000 do print(t[i]) end
+-- for i=1,10 do print(t[i]) end
+
+-- list = dir.glob("**/*.tif")
+-- list = dir.glob("/**/*.tif")
+-- list = dir.glob("./**/*.tif")
+-- list = dir.glob("oeps/**/*.tif")
+-- list = dir.glob("/oeps/**/*.tif")
local function globfiles(path,recurse,func,files) -- func == pattern or function
if type(func) == "string" then
@@ -274,27 +346,37 @@ function dir.ls(pattern)
return concat(glob(pattern),"\n")
end
---~ mkdirs("temp")
---~ mkdirs("a/b/c")
---~ mkdirs(".","/a/b/c")
---~ mkdirs("a","b","c")
+-- mkdirs("temp")
+-- mkdirs("a/b/c")
+-- mkdirs(".","/a/b/c")
+-- mkdirs("a","b","c")
local make_indeed = true -- false
if onwindows then
function dir.mkdirs(...)
- local str, pth = "", ""
- for i=1,select("#",...) do
- local s = select(i,...)
- if s == "" then
- -- skip
- elseif str == "" then
- str = s
- else
- str = str .. "/" .. s
+ local n = select("#",...)
+ local str
+ if n == 1 then
+ str = select(1,...)
+ if isdir(str) then
+ return str, true
+ end
+ else
+ str = ""
+ for i=1,n do
+ local s = select(i,...)
+ if s == "" then
+ -- skip
+ elseif str == "" then
+ str = s
+ else
+ str = str .. "/" .. s
+ end
end
end
+ local pth = ""
local drive = false
local first, middle, last = match(str,"^(//)(//*)(.*)$")
if first then
@@ -330,35 +412,44 @@ if onwindows then
pth = pth .. "/" .. s
end
if make_indeed and not isdir(pth) then
- lfs.mkdir(pth)
+ mkdir(pth)
end
end
return pth, (isdir(pth) == true)
end
- --~ print(dir.mkdirs("","","a","c"))
- --~ print(dir.mkdirs("a"))
- --~ print(dir.mkdirs("a:"))
- --~ print(dir.mkdirs("a:/b/c"))
- --~ print(dir.mkdirs("a:b/c"))
- --~ print(dir.mkdirs("a:/bbb/c"))
- --~ print(dir.mkdirs("/a/b/c"))
- --~ print(dir.mkdirs("/aaa/b/c"))
- --~ print(dir.mkdirs("//a/b/c"))
- --~ print(dir.mkdirs("///a/b/c"))
- --~ print(dir.mkdirs("a/bbb//ccc/"))
+ -- print(dir.mkdirs("","","a","c"))
+ -- print(dir.mkdirs("a"))
+ -- print(dir.mkdirs("a:"))
+ -- print(dir.mkdirs("a:/b/c"))
+ -- print(dir.mkdirs("a:b/c"))
+ -- print(dir.mkdirs("a:/bbb/c"))
+ -- print(dir.mkdirs("/a/b/c"))
+ -- print(dir.mkdirs("/aaa/b/c"))
+ -- print(dir.mkdirs("//a/b/c"))
+ -- print(dir.mkdirs("///a/b/c"))
+ -- print(dir.mkdirs("a/bbb//ccc/"))
else
function dir.mkdirs(...)
- local str, pth = "", ""
- for i=1,select("#",...) do
- local s = select(i,...)
- if s and s ~= "" then -- we catch nil and false
- if str ~= "" then
- str = str .. "/" .. s
- else
- str = s
+ local n = select("#",...)
+ local str, pth
+ if n == 1 then
+ str = select(1,...)
+ if isdir(str) then
+ return str, true
+ end
+ else
+ str = ""
+ for i=1,n do
+ local s = select(i,...)
+ if s and s ~= "" then -- we catch nil and false
+ if str ~= "" then
+ str = str .. "/" .. s
+ else
+ str = s
+ end
end
end
end
@@ -373,7 +464,7 @@ else
pth = pth .. "/" .. s
end
if make_indeed and not first and not isdir(pth) then
- lfs.mkdir(pth)
+ mkdir(pth)
end
end
else
@@ -381,71 +472,82 @@ else
for s in gmatch(str,"[^/]+") do
pth = pth .. "/" .. s
if make_indeed and not isdir(pth) then
- lfs.mkdir(pth)
+ mkdir(pth)
end
end
end
return pth, (isdir(pth) == true)
end
- --~ print(dir.mkdirs("","","a","c"))
- --~ print(dir.mkdirs("a"))
- --~ print(dir.mkdirs("/a/b/c"))
- --~ print(dir.mkdirs("/aaa/b/c"))
- --~ print(dir.mkdirs("//a/b/c"))
- --~ print(dir.mkdirs("///a/b/c"))
- --~ print(dir.mkdirs("a/bbb//ccc/"))
+ -- print(dir.mkdirs("","","a","c"))
+ -- print(dir.mkdirs("a"))
+ -- print(dir.mkdirs("/a/b/c"))
+ -- print(dir.mkdirs("/aaa/b/c"))
+ -- print(dir.mkdirs("//a/b/c"))
+ -- print(dir.mkdirs("///a/b/c"))
+ -- print(dir.mkdirs("a/bbb//ccc/"))
end
dir.makedirs = dir.mkdirs
--- we can only define it here as it uses dir.current
-if onwindows then
+do
- function dir.expandname(str) -- will be merged with cleanpath and collapsepath
- local first, nothing, last = match(str,"^(//)(//*)(.*)$")
- if first then
- first = dir.current() .. "/" -- dir.current sanitizes
- end
- if not first then
- first, last = match(str,"^(//)/*(.*)$")
- end
- if not first then
- first, last = match(str,"^([a-zA-Z]:)(.*)$")
- if first and not find(last,"^/") then
- local d = currentdir()
- if chdir(first) then
- first = dir.current()
+ -- we can only define it here as it uses dir.chdir and we also need to
+ -- make sure we use the non sandboxed variant because otherwise we get
+ -- into a recursive loop due to usage of expandname in the file resolver
+
+ local chdir = sandbox and sandbox.original(chdir) or chdir
+
+ if onwindows then
+
+ local xcurrentdir = dir.current
+
+ function dir.expandname(str) -- will be merged with cleanpath and collapsepath\
+ local first, nothing, last = match(str,"^(//)(//*)(.*)$")
+ if first then
+ first = xcurrentdir() .. "/" -- xcurrentdir sanitizes
+ end
+ if not first then
+ first, last = match(str,"^(//)/*(.*)$")
+ end
+ if not first then
+ first, last = match(str,"^([a-zA-Z]:)(.*)$")
+ if first and not find(last,"^/") then
+ local d = currentdir() -- push / pop
+ if chdir(first) then
+ first = xcurrentdir() -- xcurrentdir sanitizes
+ end
+ chdir(d)
end
- chdir(d)
+ end
+ if not first then
+ first, last = xcurrentdir(), str
+ end
+ last = gsub(last,"//","/")
+ last = gsub(last,"/%./","/")
+ last = gsub(last,"^/*","")
+ first = gsub(first,"/*$","")
+ if last == "" or last == "." then
+ return first
+ else
+ return first .. "/" .. last
end
end
- if not first then
- first, last = dir.current(), str
- end
- last = gsub(last,"//","/")
- last = gsub(last,"/%./","/")
- last = gsub(last,"^/*","")
- first = gsub(first,"/*$","")
- if last == "" or last == "." then
- return first
- else
- return first .. "/" .. last
- end
- end
-else
+ else
- function dir.expandname(str) -- will be merged with cleanpath and collapsepath
- if not find(str,"^/") then
- str = currentdir() .. "/" .. str
+ function dir.expandname(str) -- will be merged with cleanpath and collapsepath
+ if not find(str,"^/") then
+ str = currentdir() .. "/" .. str
+ end
+ str = gsub(str,"//","/")
+ str = gsub(str,"/%./","/")
+ str = gsub(str,"(.)/%.$","%1")
+ return str
end
- str = gsub(str,"//","/")
- str = gsub(str,"/%./","/")
- str = gsub(str,"(.)/%.$","%1")
- return str
+
end
end
diff --git a/tex/context/base/l-file.lua b/tex/context/base/l-file.lua
index ebb2b39f4..7ed6370f2 100644
--- a/tex/context/base/l-file.lua
+++ b/tex/context/base/l-file.lua
@@ -15,51 +15,53 @@ if not lfs then
lfs = optionalrequire("lfs")
end
-if not lfs then
-
- lfs = {
- getcurrentdir = function()
- return "."
- end,
- attributes = function()
- return nil
- end,
- isfile = function(name)
- local f = io.open(name,'rb')
- if f then
- f:close()
- return true
- end
- end,
- isdir = function(name)
- print("you need to load lfs")
- return false
- end
- }
-
-elseif not lfs.isfile then
-
- local attributes = lfs.attributes
-
- function lfs.isdir(name)
- return attributes(name,"mode") == "directory"
- end
-
- function lfs.isfile(name)
- return attributes(name,"mode") == "file"
- end
-
- -- function lfs.isdir(name)
- -- local a = attributes(name)
- -- return a and a.mode == "directory"
- -- end
-
- -- function lfs.isfile(name)
- -- local a = attributes(name)
- -- return a and a.mode == "file"
- -- end
-
-end
+-- -- see later
+--
+-- if not lfs then
+--
+-- lfs = {
+-- getcurrentdir = function()
+-- return "."
+-- end,
+-- attributes = function()
+-- return nil
+-- end,
+-- isfile = function(name)
+-- local f = io.open(name,'rb')
+-- if f then
+-- f:close()
+-- return true
+-- end
+-- end,
+-- isdir = function(name)
+-- print("you need to load lfs")
+-- return false
+-- end
+-- }
+--
+-- elseif not lfs.isfile then
+--
+-- local attributes = lfs.attributes
+--
+-- function lfs.isdir(name)
+-- return attributes(name,"mode") == "directory"
+-- end
+--
+-- function lfs.isfile(name)
+-- return attributes(name,"mode") == "file"
+-- end
+--
+-- -- function lfs.isdir(name)
+-- -- local a = attributes(name)
+-- -- return a and a.mode == "directory"
+-- -- end
+--
+-- -- function lfs.isfile(name)
+-- -- local a = attributes(name)
+-- -- return a and a.mode == "file"
+-- -- end
+--
+-- end
local insert, concat = table.insert, table.concat
local match, find, gmatch = string.match, string.find, string.gmatch
@@ -72,6 +74,28 @@ local checkedsplit = string.checkedsplit
local P, R, S, C, Cs, Cp, Cc, Ct = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Cp, lpeg.Cc, lpeg.Ct
+-- better this way:
+
+local tricky = S("/\\") * P(-1)
+local attributes = lfs.attributes
+
+if sandbox then
+ sandbox.redefine(lfs.isfile,"lfs.isfile")
+ sandbox.redefine(lfs.isdir, "lfs.isdir")
+end
+
+function lfs.isdir(name)
+ if lpegmatch(tricky,name) then
+ return attributes(name,"mode") == "directory"
+ else
+ return attributes(name.."/.","mode") == "directory"
+ end
+end
+
+function lfs.isfile(name)
+ return attributes(name,"mode") == "file"
+end
+
local colon = P(":")
local period = P(".")
local periods = P("..")
@@ -133,8 +157,8 @@ file.suffix = suffixonly
file.suffixesonly = suffixesonly
file.suffixes = suffixesonly
-file.dirname = pathpart -- obsolete
-file.extname = suffixonly -- obsolete
+file.dirname = pathpart -- obsolete
+file.extname = suffixonly -- obsolete
-- actually these are schemes
@@ -385,31 +409,90 @@ local deslasher = lpeg.replacer(S("\\/")^1,"/")
-- then we still have to deal with urls ... anyhow, multiple // are never a real
-- problem but just ugly.
-function file.join(...)
- local lst = { ... }
- local one = lst[1]
+-- function file.join(...)
+-- local lst = { ... }
+-- local one = lst[1]
+-- if lpegmatch(isnetwork,one) then
+-- local one = lpegmatch(reslasher,one)
+-- local two = lpegmatch(deslasher,concat(lst,"/",2))
+-- if lpegmatch(hasroot,two) then
+-- return one .. two
+-- else
+-- return one .. "/" .. two
+-- end
+-- elseif lpegmatch(isroot,one) then
+-- local two = lpegmatch(deslasher,concat(lst,"/",2))
+-- if lpegmatch(hasroot,two) then
+-- return two
+-- else
+-- return "/" .. two
+-- end
+-- elseif one == "" then
+-- return lpegmatch(stripper,concat(lst,"/",2))
+-- else
+-- return lpegmatch(deslasher,concat(lst,"/"))
+-- end
+-- end
+
+function file.join(one, two, three, ...)
+ if not two then
+ return one == "" and one or lpegmatch(stripper,one)
+ end
+ if one == "" then
+ return lpegmatch(stripper,three and concat({ two, three, ... },"/") or two)
+ end
if lpegmatch(isnetwork,one) then
local one = lpegmatch(reslasher,one)
- local two = lpegmatch(deslasher,concat(lst,"/",2))
+ local two = lpegmatch(deslasher,three and concat({ two, three, ... },"/") or two)
if lpegmatch(hasroot,two) then
return one .. two
else
return one .. "/" .. two
end
elseif lpegmatch(isroot,one) then
- local two = lpegmatch(deslasher,concat(lst,"/",2))
+ local two = lpegmatch(deslasher,three and concat({ two, three, ... },"/") or two)
if lpegmatch(hasroot,two) then
return two
else
return "/" .. two
end
- elseif one == "" then
- return lpegmatch(stripper,concat(lst,"/",2))
else
- return lpegmatch(deslasher,concat(lst,"/"))
+ return lpegmatch(deslasher,concat({ one, two, three, ... },"/"))
end
end
+-- or we can use this:
+--
+-- function file.join(...)
+-- local n = select("#",...)
+-- local one = select(1,...)
+-- if n == 1 then
+-- return one == "" and one or lpegmatch(stripper,one)
+-- end
+-- if one == "" then
+-- return lpegmatch(stripper,n > 2 and concat({ ... },"/",2) or select(2,...))
+-- end
+-- if lpegmatch(isnetwork,one) then
+-- local one = lpegmatch(reslasher,one)
+-- local two = lpegmatch(deslasher,n > 2 and concat({ ... },"/",2) or select(2,...))
+-- if lpegmatch(hasroot,two) then
+-- return one .. two
+-- else
+-- return one .. "/" .. two
+-- end
+-- elseif lpegmatch(isroot,one) then
+-- local two = lpegmatch(deslasher,n > 2 and concat({ ... },"/",2) or select(2,...))
+-- if lpegmatch(hasroot,two) then
+-- return two
+-- else
+-- return "/" .. two
+-- end
+-- else
+-- return lpegmatch(deslasher,concat({ ... },"/"))
+-- end
+-- end
+
+-- print(file.join("c:/whatever"))
-- print(file.join("c:/whatever","name"))
-- print(file.join("//","/y"))
-- print(file.join("/","/y"))
diff --git a/tex/context/base/l-io.lua b/tex/context/base/l-io.lua
index 52f166af9..a91d44d87 100644
--- a/tex/context/base/l-io.lua
+++ b/tex/context/base/l-io.lua
@@ -12,7 +12,7 @@ local concat = table.concat
local floor = math.floor
local type = type
-if string.find(os.getenv("PATH"),";") then
+if string.find(os.getenv("PATH"),";",1,true) then
io.fileseparator, io.pathseparator = "\\", ";"
else
io.fileseparator, io.pathseparator = "/" , ":"
@@ -339,11 +339,6 @@ function io.readstring(f,n,m)
return str
end
---
-
-if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
-if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
-
-- This works quite ok:
--
-- function io.piped(command,writer)
diff --git a/tex/context/base/l-lpeg.lua b/tex/context/base/l-lpeg.lua
index 399b3ad65..55a0d8929 100644
--- a/tex/context/base/l-lpeg.lua
+++ b/tex/context/base/l-lpeg.lua
@@ -6,6 +6,12 @@ if not modules then modules = { } end modules ['l-lpeg'] = {
license = "see context related readme files"
}
+-- lpeg 12 vs lpeg 10: slower compilation, similar parsing speed (i need to check
+-- if i can use new features like capture / 2 and .B (at first sight the xml
+-- parser is some 5% slower)
+
+-- lpeg.P("abc") is faster than lpeg.P("a") * lpeg.P("b") * lpeg.P("c")
+
-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
-- move utf -> l-unicode
@@ -15,14 +21,15 @@ lpeg = require("lpeg")
-- The latest lpeg doesn't have print any more, and even the new ones are not
-- available by default (only when debug mode is enabled), which is a pitty as
--- as it helps bailign down bottlenecks. Performance seems comparable, although
+-- as it helps nailing down bottlenecks. Performance seems comparable: some 10%
+-- slower pattern compilation, same parsing speed, although,
--
-- local p = lpeg.C(lpeg.P(1)^0 * lpeg.P(-1))
--- local a = string.rep("123",10)
+-- local a = string.rep("123",100)
-- lpeg.match(p,a)
--
--- is nearly 20% slower and also still suboptimal (i.e. a match that runs from
--- begin to end, one of the cases where string matchers win).
+-- seems slower and is also still suboptimal (i.e. a match that runs from begin
+-- to end, one of the cases where string matchers win).
if not lpeg.print then function lpeg.print(...) print(lpeg.pcode(...)) end end
@@ -74,7 +81,9 @@ local lpegtype, lpegmatch, lpegprint = lpeg.type, lpeg.match, lpeg.print
-- let's start with an inspector:
-setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+if setinspector then
+ setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+end
-- Beware, we predefine a bunch of patterns here and one reason for doing so
-- is that we get consistent behaviour in some of the visualizers.
@@ -100,7 +109,8 @@ local uppercase = R("AZ")
local underscore = P("_")
local hexdigit = digit + lowercase + uppercase
local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local newline = crlf + S("\r\n") -- cr + lf
+----- newline = crlf + S("\r\n") -- cr + lf
+local newline = P("\r") * (P("\n") + P(true)) + P("\n")
local escaped = P("\\") * anything
local squote = P("'")
local dquote = P('"')
@@ -134,8 +144,11 @@ patterns.utfbom_16_be = utfbom_16_be
patterns.utfbom_16_le = utfbom_16_le
patterns.utfbom_8 = utfbom_8
-patterns.utf_16_be_nl = P("\000\r\000\n") + P("\000\r") + P("\000\n")
-patterns.utf_16_le_nl = P("\r\000\n\000") + P("\r\000") + P("\n\000")
+patterns.utf_16_be_nl = P("\000\r\000\n") + P("\000\r") + P("\000\n") -- P("\000\r") * (P("\000\n") + P(true)) + P("\000\n")
+patterns.utf_16_le_nl = P("\r\000\n\000") + P("\r\000") + P("\n\000") -- P("\r\000") * (P("\n\000") + P(true)) + P("\n\000")
+
+patterns.utf_32_be_nl = P("\000\000\000\r\000\000\000\n") + P("\000\000\000\r") + P("\000\000\000\n")
+patterns.utf_32_le_nl = P("\r\000\000\000\n\000\000\000") + P("\r\000\000\000") + P("\n\000\000\000")
patterns.utf8one = R("\000\127")
patterns.utf8two = R("\194\223") * utf8next
@@ -169,14 +182,32 @@ patterns.whitespace = whitespace
patterns.nonspacer = nonspacer
patterns.nonwhitespace = nonwhitespace
-local stripper = spacer^0 * C((spacer^0 * nonspacer^1)^0) -- from example by roberto
+local stripper = spacer ^0 * C((spacer ^0 * nonspacer ^1)^0) -- from example by roberto
+local fullstripper = whitespace^0 * C((whitespace^0 * nonwhitespace^1)^0)
----- collapser = Cs(spacer^0/"" * ((spacer^1 * endofstring / "") + (spacer^1/" ") + P(1))^0)
local collapser = Cs(spacer^0/"" * nonspacer^0 * ((spacer^0/" " * nonspacer^1)^0))
+local b_collapser = Cs( whitespace^0 /"" * (nonwhitespace^1 + whitespace^1/" ")^0)
+local e_collapser = Cs((whitespace^1 * P(-1)/"" + nonwhitespace^1 + whitespace^1/" ")^0)
+local m_collapser = Cs( (nonwhitespace^1 + whitespace^1/" ")^0)
+
+local b_stripper = Cs( spacer^0 /"" * (nonspacer^1 + spacer^1/" ")^0)
+local e_stripper = Cs((spacer^1 * P(-1)/"" + nonspacer^1 + spacer^1/" ")^0)
+local m_stripper = Cs( (nonspacer^1 + spacer^1/" ")^0)
+
patterns.stripper = stripper
+patterns.fullstripper = fullstripper
patterns.collapser = collapser
+patterns.b_collapser = b_collapser
+patterns.m_collapser = m_collapser
+patterns.e_collapser = e_collapser
+
+patterns.b_stripper = b_stripper
+patterns.m_stripper = m_stripper
+patterns.e_stripper = e_stripper
+
patterns.lowercase = lowercase
patterns.uppercase = uppercase
patterns.letter = patterns.lowercase + patterns.uppercase
@@ -215,9 +246,12 @@ patterns.integer = sign^-1 * digit^1
patterns.unsigned = digit^0 * period * digit^1
patterns.float = sign^-1 * patterns.unsigned
patterns.cunsigned = digit^0 * comma * digit^1
+patterns.cpunsigned = digit^0 * (period + comma) * digit^1
patterns.cfloat = sign^-1 * patterns.cunsigned
+patterns.cpfloat = sign^-1 * patterns.cpunsigned
patterns.number = patterns.float + patterns.integer
patterns.cnumber = patterns.cfloat + patterns.integer
+patterns.cpnumber = patterns.cpfloat + patterns.integer
patterns.oct = zero * octdigit^1
patterns.octal = patterns.oct
patterns.HEX = zero * P("X") * (digit+uppercase)^1
@@ -469,7 +503,7 @@ end
-- local pattern1 = P(1-P(pattern))^0 * P(pattern) : test for not nil
-- local pattern2 = (P(pattern) * Cc(true) + P(1))^0 : test for true (could be faster, but not much)
-function lpeg.finder(lst,makefunction) -- beware: slower than find with 'patternless finds'
+function lpeg.finder(lst,makefunction,isutf) -- beware: slower than find with 'patternless finds'
local pattern
if type(lst) == "table" then
pattern = P(false)
@@ -485,7 +519,11 @@ function lpeg.finder(lst,makefunction) -- beware: slower than find with 'pattern
else
pattern = P(lst)
end
- pattern = (1-pattern)^0 * pattern
+ if isutf then
+ pattern = ((utf8char or 1)-pattern)^0 * pattern
+ else
+ pattern = (1-pattern)^0 * pattern
+ end
if makefunction then
return function(str)
return lpegmatch(pattern,str)
@@ -798,44 +836,185 @@ end
-- experiment:
+local p_false = P(false)
+local p_true = P(true)
+
local function make(t)
- local p
+ local function making(t)
+ local p = p_false
+ local keys = sortedkeys(t)
+ for i=1,#keys do
+ local k = keys[i]
+ if k ~= "" then
+ local v = t[k]
+ if v == true then
+ p = p + P(k) * p_true
+ elseif v == false then
+ -- can't happen
+ else
+ p = p + P(k) * making(v)
+ end
+ end
+ end
+ if t[""] then
+ p = p + p_true
+ end
+ return p
+ end
+ local p = p_false
local keys = sortedkeys(t)
for i=1,#keys do
local k = keys[i]
- local v = t[k]
- if not p then
- if next(v) then
- p = P(k) * make(v)
+ if k ~= "" then
+ local v = t[k]
+ if v == true then
+ p = p + P(k) * p_true
+ elseif v == false then
+ -- can't happen
else
- p = P(k)
+ p = p + P(k) * making(v)
end
- else
- if next(v) then
- p = p + P(k) * make(v)
+ end
+ end
+ return p
+end
+
+local function collapse(t,x)
+ if type(t) ~= "table" then
+ return t, x
+ else
+ local n = next(t)
+ if n == nil then
+ return t, x
+ elseif next(t,n) == nil then
+ -- one entry
+ local k = n
+ local v = t[k]
+ if type(v) == "table" then
+ return collapse(v,x..k)
else
- p = p + P(k)
+ return v, x .. k
end
+ else
+ local tt = { }
+ for k, v in next, t do
+ local vv, kk = collapse(v,k)
+ tt[kk] = vv
+ end
+ return tt, x
end
end
- return p
end
function lpeg.utfchartabletopattern(list) -- goes to util-lpg
local tree = { }
- for i=1,#list do
- local t = tree
- for c in gmatch(list[i],".") do
- if not t[c] then
- t[c] = { }
+ local n = #list
+ if n == 0 then
+ for s in next, list do
+ local t = tree
+ local p, pk
+ for c in gmatch(s,".") do
+ if t == true then
+ t = { [c] = true, [""] = true }
+ p[pk] = t
+ p = t
+ t = false
+ elseif t == false then
+ t = { [c] = false }
+ p[pk] = t
+ p = t
+ t = false
+ else
+ local tc = t[c]
+ if not tc then
+ tc = false
+ t[c] = false
+ end
+ p = t
+ t = tc
+ end
+ pk = c
+ end
+ if t == false then
+ p[pk] = true
+ elseif t == true then
+ -- okay
+ else
+ t[""] = true
+ end
+ end
+ else
+ for i=1,n do
+ local s = list[i]
+ local t = tree
+ local p, pk
+ for c in gmatch(s,".") do
+ if t == true then
+ t = { [c] = true, [""] = true }
+ p[pk] = t
+ p = t
+ t = false
+ elseif t == false then
+ t = { [c] = false }
+ p[pk] = t
+ p = t
+ t = false
+ else
+ local tc = t[c]
+ if not tc then
+ tc = false
+ t[c] = false
+ end
+ p = t
+ t = tc
+ end
+ pk = c
+ end
+ if t == false then
+ p[pk] = true
+ elseif t == true then
+ -- okay
+ else
+ t[""] = true
end
- t = t[c]
end
end
+-- collapse(tree,"") -- needs testing, maybe optional, slightly faster because P("x")*P("X") seems slower than P"(xX") (why)
+-- inspect(tree)
return make(tree)
end
--- inspect ( lpeg.utfchartabletopattern {
+-- local t = { "start", "stoep", "staart", "paard" }
+-- local p = lpeg.Cs((lpeg.utfchartabletopattern(t)/string.upper + 1)^1)
+
+-- local t = { "a", "abc", "ac", "abe", "abxyz", "xy", "bef","aa" }
+-- local p = lpeg.Cs((lpeg.utfchartabletopattern(t)/string.upper + 1)^1)
+
+-- inspect(lpegmatch(p,"a"))
+-- inspect(lpegmatch(p,"aa"))
+-- inspect(lpegmatch(p,"aaaa"))
+-- inspect(lpegmatch(p,"ac"))
+-- inspect(lpegmatch(p,"bc"))
+-- inspect(lpegmatch(p,"zzbczz"))
+-- inspect(lpegmatch(p,"zzabezz"))
+-- inspect(lpegmatch(p,"ab"))
+-- inspect(lpegmatch(p,"abc"))
+-- inspect(lpegmatch(p,"abe"))
+-- inspect(lpegmatch(p,"xa"))
+-- inspect(lpegmatch(p,"bx"))
+-- inspect(lpegmatch(p,"bax"))
+-- inspect(lpegmatch(p,"abxyz"))
+-- inspect(lpegmatch(p,"foobarbefcrap"))
+
+-- local t = { ["^"] = 1, ["^^"] = 2, ["^^^"] = 3, ["^^^^"] = 4 }
+-- local p = lpeg.Cs((lpeg.utfchartabletopattern(t)/t + 1)^1)
+-- inspect(lpegmatch(p," ^ ^^ ^^^ ^^^^ ^^^^^ ^^^^^^ ^^^^^^^ "))
+
+-- local t = { ["^^"] = 2, ["^^^"] = 3, ["^^^^"] = 4 }
+-- local p = lpeg.Cs((lpeg.utfchartabletopattern(t)/t + 1)^1)
+-- inspect(lpegmatch(p," ^ ^^ ^^^ ^^^^ ^^^^^ ^^^^^^ ^^^^^^^ "))
+
+-- lpeg.utfchartabletopattern {
-- utfchar(0x00A0), -- nbsp
-- utfchar(0x2000), -- enquad
-- utfchar(0x2001), -- emquad
@@ -851,7 +1030,7 @@ end
-- utfchar(0x200B), -- zerowidthspace
-- utfchar(0x202F), -- narrownobreakspace
-- utfchar(0x205F), -- math thinspace
--- } )
+-- }
-- a few handy ones:
--
@@ -920,3 +1099,75 @@ lpeg.patterns.stripzeros = stripper
-- lpegmatch(stripper,str)
-- print(#str, os.clock()-ts, lpegmatch(stripper,sample))
+-- for practical reasone we keep this here:
+
+local byte_to_HEX = { }
+local byte_to_hex = { }
+local byte_to_dec = { } -- for md5
+local hex_to_byte = { }
+
+for i=0,255 do
+ local H = format("%02X",i)
+ local h = format("%02x",i)
+ local d = format("%03i",i)
+ local c = char(i)
+ byte_to_HEX[c] = H
+ byte_to_hex[c] = h
+ byte_to_dec[c] = d
+ hex_to_byte[h] = c
+ hex_to_byte[H] = c
+end
+
+local hextobyte = P(2)/hex_to_byte
+local bytetoHEX = P(1)/byte_to_HEX
+local bytetohex = P(1)/byte_to_hex
+local bytetodec = P(1)/byte_to_dec
+local hextobytes = Cs(hextobyte^0)
+local bytestoHEX = Cs(bytetoHEX^0)
+local bytestohex = Cs(bytetohex^0)
+local bytestodec = Cs(bytetodec^0)
+
+patterns.hextobyte = hextobyte
+patterns.bytetoHEX = bytetoHEX
+patterns.bytetohex = bytetohex
+patterns.bytetodec = bytetodec
+patterns.hextobytes = hextobytes
+patterns.bytestoHEX = bytestoHEX
+patterns.bytestohex = bytestohex
+patterns.bytestodec = bytestodec
+
+function string.toHEX(s)
+ if not s or s == "" then
+ return s
+ else
+ return lpegmatch(bytestoHEX,s)
+ end
+end
+
+function string.tohex(s)
+ if not s or s == "" then
+ return s
+ else
+ return lpegmatch(bytestohex,s)
+ end
+end
+
+function string.todec(s)
+ if not s or s == "" then
+ return s
+ else
+ return lpegmatch(bytestodec,s)
+ end
+end
+
+function string.tobytes(s)
+ if not s or s == "" then
+ return s
+ else
+ return lpegmatch(hextobytes,s)
+ end
+end
+
+-- local h = "ADFE0345"
+-- local b = lpegmatch(patterns.hextobytes,h)
+-- print(h,b,string.tohex(b),string.toHEX(b))
diff --git a/tex/context/base/l-lua.lua b/tex/context/base/l-lua.lua
index fc05afa67..1a2a98723 100644
--- a/tex/context/base/l-lua.lua
+++ b/tex/context/base/l-lua.lua
@@ -6,6 +6,17 @@ if not modules then modules = { } end modules ['l-lua'] = {
license = "see context related readme files"
}
+-- potential issues with 5.3:
+
+-- i'm not sure yet if the int/float change is good for luatex
+
+-- math.min
+-- math.max
+-- tostring
+-- tonumber
+-- utf.*
+-- bit32
+
-- compatibility hacksand helpers
local major, minor = string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$")
@@ -148,3 +159,20 @@ function optionalrequire(...)
return result
end
end
+
+-- nice for non ascii scripts (this might move):
+
+if lua then
+ lua.mask = load([[τεχ = 1]]) and "utf" or "ascii"
+end
+
+local flush = io.flush
+
+if flush then
+
+ local execute = os.execute if execute then function os.execute(...) flush() return execute(...) end end
+ local exec = os.exec if exec then function os.exec (...) flush() return exec (...) end end
+ local spawn = os.spawn if spawn then function os.spawn (...) flush() return spawn (...) end end
+ local popen = io.popen if popen then function io.popen (...) flush() return popen (...) end end
+
+end
diff --git a/tex/context/base/l-math.lua b/tex/context/base/l-math.lua
index 43f60b56b..ec62919b4 100644
--- a/tex/context/base/l-math.lua
+++ b/tex/context/base/l-math.lua
@@ -8,6 +8,10 @@ if not modules then modules = { } end modules ['l-math'] = {
local floor, sin, cos, tan = math.floor, math.sin, math.cos, math.tan
+if not math.ceiling then
+ math.ceiling = math.ceil
+end
+
if not math.round then
function math.round(x) return floor(x + 0.5) end
end
diff --git a/tex/context/base/l-md5.lua b/tex/context/base/l-md5.lua
index 8ac20a5a5..00272c873 100644
--- a/tex/context/base/l-md5.lua
+++ b/tex/context/base/l-md5.lua
@@ -19,48 +19,38 @@ if not md5 then
end
local md5, file = md5, file
-local gsub, format, byte = string.gsub, string.format, string.byte
-local md5sum = md5.sum
+local gsub = string.gsub
-local function convert(str,fmt)
- return (gsub(md5sum(str),".",function(chr) return format(fmt,byte(chr)) end))
-end
-
-if not md5.HEX then function md5.HEX(str) return convert(str,"%02X") end end
-if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end
-if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end
-
--- local P, Cs, lpegmatch = lpeg.P, lpeg.Cs,lpeg.match
---
--- if not md5.HEX then
--- local function remap(chr) return format("%02X",byte(chr)) end
--- function md5.HEX(str) return (gsub(md5.sum(str),".",remap)) end
--- end
+-- local gsub, format, byte = string.gsub, string.format, string.byte
--
--- if not md5.hex then
--- local function remap(chr) return format("%02x",byte(chr)) end
--- function md5.hex(str) return (gsub(md5.sum(str),".",remap)) end
+-- local function convert(str,fmt)
+-- return (gsub(md5sum(str),".",function(chr) return format(fmt,byte(chr)) end))
-- end
--
--- if not md5.dec then
--- local function remap(chr) return format("%03i",byte(chr)) end
--- function md5.dec(str) return (gsub(md5.sum(str),".",remap)) end
--- end
+-- if not md5.HEX then function md5.HEX(str) return convert(str,"%02X") end end
+-- if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end
+-- if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end
--- if not md5.HEX then
--- local pattern_HEX = Cs( ( P(1) / function(chr) return format("%02X",byte(chr)) end)^0 )
--- function md5.HEX(str) return lpegmatch(pattern_HEX,md5.sum(str)) end
--- end
---
--- if not md5.hex then
--- local pattern_hex = Cs( ( P(1) / function(chr) return format("%02x",byte(chr)) end)^0 )
--- function md5.hex(str) return lpegmatch(pattern_hex,md5.sum(str)) end
--- end
---
--- if not md5.dec then
--- local pattern_dec = Cs( ( P(1) / function(chr) return format("%02i",byte(chr)) end)^0 )
--- function md5.dec(str) return lpegmatch(pattern_dec,md5.sum(str)) end
--- end
+do
+
+ local patterns = lpeg and lpeg.patterns
+
+ if patterns then
+
+ local bytestoHEX = patterns.bytestoHEX
+ local bytestohex = patterns.bytestohex
+ local bytestodec = patterns.bytestodec
+
+ local lpegmatch = lpeg.match
+ local md5sum = md5.sum
+
+ if not md5.HEX then function md5.HEX(str) if str then return lpegmatch(bytestoHEX,md5sum(str)) end end end
+ if not md5.hex then function md5.hex(str) if str then return lpegmatch(bytestohex,md5sum(str)) end end end
+ if not md5.dec then function md5.dec(str) if str then return lpegmatch(bytestodec,md5sum(str)) end end end
+
+ end
+
+end
function file.needsupdating(oldname,newname,threshold) -- size modification access change
local oldtime = lfs.attributes(oldname,"modification")
diff --git a/tex/context/base/l-os.lua b/tex/context/base/l-os.lua
index bfafa4f95..0a86ea6d6 100644
--- a/tex/context/base/l-os.lua
+++ b/tex/context/base/l-os.lua
@@ -25,8 +25,6 @@ if not modules then modules = { } end modules ['l-os'] = {
-- os.sleep() => socket.sleep()
-- math.randomseed(tonumber(string.sub(string.reverse(tostring(math.floor(socket.gettime()*10000))),1,6)))
--- maybe build io.flush in os.execute
-
local os = os
local date, time = os.date, os.time
local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
@@ -118,15 +116,11 @@ end
-- end of environment hack
-local execute, spawn, exec, iopopen, ioflush = os.execute, os.spawn or os.execute, os.exec or os.execute, io.popen, io.flush
-
-function os.execute(...) ioflush() return execute(...) end
-function os.spawn (...) ioflush() return spawn (...) end
-function os.exec (...) ioflush() return exec (...) end
-function io.popen (...) ioflush() return iopopen(...) end
+local execute = os.execute
+local iopopen = io.popen
function os.resultof(command)
- local handle = io.popen(command,"r")
+ local handle = iopopen(command,"r") -- already has flush
if handle then
local result = handle:read("*all") or ""
handle:close()
@@ -137,7 +131,7 @@ function os.resultof(command)
end
if not io.fileseparator then
- if find(os.getenv("PATH"),";") then
+ if find(os.getenv("PATH"),";",1,true) then
io.fileseparator, io.pathseparator, os.type = "\\", ";", os.type or "mswin"
else
io.fileseparator, io.pathseparator, os.type = "/" , ":", os.type or "unix"
@@ -160,7 +154,7 @@ local launchers = {
}
function os.launch(str)
- os.execute(format(launchers[os.name] or launchers.unix,str))
+ execute(format(launchers[os.name] or launchers.unix,str))
end
if not os.times then -- ?
@@ -236,7 +230,7 @@ elseif os.type == "windows" then
function resolvers.platform(t,k)
local platform, architecture = "", os.getenv("PROCESSOR_ARCHITECTURE") or ""
- if find(architecture,"AMD64") then
+ if find(architecture,"AMD64",1,true) then
-- platform = "mswin-64"
platform = "win64"
else
@@ -252,9 +246,9 @@ elseif name == "linux" then
function resolvers.platform(t,k)
-- we sometimes have HOSTTYPE set so let's check that first
local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
- if find(architecture,"x86_64") then
+ if find(architecture,"x86_64",1,true) then
platform = "linux-64"
- elseif find(architecture,"ppc") then
+ elseif find(architecture,"ppc",1,true) then
platform = "linux-ppc"
else
platform = "linux"
@@ -285,9 +279,9 @@ elseif name == "macosx" then
if architecture == "" then
-- print("\nI have no clue what kind of OSX you're running so let's assume an 32 bit intel.\n")
platform = "osx-intel"
- elseif find(architecture,"i386") then
+ elseif find(architecture,"i386",1,true) then
platform = "osx-intel"
- elseif find(architecture,"x86_64") then
+ elseif find(architecture,"x86_64",1,true) then
platform = "osx-64"
else
platform = "osx-ppc"
@@ -301,7 +295,7 @@ elseif name == "sunos" then
function resolvers.platform(t,k)
local platform, architecture = "", os.resultof("uname -m") or ""
- if find(architecture,"sparc") then
+ if find(architecture,"sparc",1,true) then
platform = "solaris-sparc"
else -- if architecture == 'i86pc'
platform = "solaris-intel"
@@ -315,7 +309,7 @@ elseif name == "freebsd" then
function resolvers.platform(t,k)
local platform, architecture = "", os.resultof("uname -m") or ""
- if find(architecture,"amd64") then
+ if find(architecture,"amd64",1,true) then
platform = "freebsd-amd64"
else
platform = "freebsd"
@@ -330,7 +324,7 @@ elseif name == "kfreebsd" then
function resolvers.platform(t,k)
-- we sometimes have HOSTTYPE set so let's check that first
local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
- if find(architecture,"x86_64") then
+ if find(architecture,"x86_64",1,true) then
platform = "kfreebsd-amd64"
else
platform = "kfreebsd-i386"
@@ -355,8 +349,10 @@ else
end
+os.newline = name == "windows" and "\013\010" or "\010" -- crlf or lf
+
function resolvers.bits(t,k)
- local bits = find(os.platform,"64") and 64 or 32
+ local bits = find(os.platform,"64",1,true) and 64 or 32
os.bits = bits
return bits
end
diff --git a/tex/context/base/l-sandbox.lua b/tex/context/base/l-sandbox.lua
new file mode 100644
index 000000000..f7901379c
--- /dev/null
+++ b/tex/context/base/l-sandbox.lua
@@ -0,0 +1,271 @@
+if not modules then modules = { } end modules ['l-sandbox'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- We use string instead of function variables, so 'io.open' instead of io.open. That
+-- way we can still intercept repetetive overloads. One complication is that when we use
+-- sandboxed function sin helpers in the sanbox checkers, we can get a recursion loop
+-- so for that vreason we need to keep originals around till we enable the sandbox.
+
+-- if sandbox then return end
+
+local global = _G
+local next = next
+local unpack = unpack or table.unpack
+local type = type
+local tprint = texio.write_nl or print
+local tostring = tostring
+local format = string.format -- no formatters yet
+local concat = table.concat
+local sort = table.sort
+local gmatch = string.gmatch
+
+sandbox = { }
+local sandboxed = false
+local overloads = { }
+local skiploads = { }
+local initializers = { }
+local finalizers = { }
+local originals = { }
+local comments = { }
+local trace = false
+local logger = false
+
+-- this comes real early, so that we can still alias
+
+local function report(...)
+ tprint("sandbox ! " .. format(...)) -- poor mans tracer
+end
+
+sandbox.report = report
+
+function sandbox.setreporter(r)
+ report = r
+ sandbox.report = r
+end
+
+function sandbox.settrace(v)
+ trace = v
+end
+
+function sandbox.setlogger(l)
+ logger = type(l) == "function" and l or false
+end
+
+local function register(func,overload,comment)
+ if type(func) == "function" then
+ if type(overload) == "string" then
+ comment = overload
+ overload = nil
+ end
+ local function f(...)
+ if sandboxed then
+ local overload = overloads[f]
+ if overload then
+ if logger then
+ local result = { overload(func,...) }
+ logger {
+ comment = comments[f] or tostring(f),
+ arguments = { ... },
+ result = result[1] and true or false,
+ }
+ return unpack(result)
+ else
+ return overload(func,...)
+ end
+ else
+ -- ignored, maybe message
+ end
+ else
+ return func(...)
+ end
+ end
+ if comment then
+ comments[f] = comment
+ if trace then
+ report("registering function: %s",comment)
+ end
+ end
+ overloads[f] = overload or false
+ originals[f] = func
+ return f
+ end
+end
+
+local function redefine(func,comment)
+ if type(func) == "function" then
+ skiploads[func] = comment or comments[func] or "unknown"
+ if overloads[func] == false then
+ overloads[func] = nil -- not initialized anyway
+ end
+ end
+end
+
+sandbox.register = register
+sandbox.redefine = redefine
+
+function sandbox.original(func)
+ return originals and originals[func] or func
+end
+
+function sandbox.overload(func,overload,comment)
+ comment = comment or comments[func] or "?"
+ if type(func) ~= "function" then
+ if trace then
+ report("overloading unknown function: %s",comment)
+ end
+ elseif type(overload) ~= "function" then
+ if trace then
+ report("overloading function with bad overload: %s",comment)
+ end
+ elseif overloads[func] == nil then
+ if trace then
+ report("function is not registered: %s",comment)
+ end
+ elseif skiploads[func] then
+ if trace then
+ report("function is not skipped: %s",comment)
+ end
+ else
+ if trace then
+ report("overloading function: %s",comment)
+ end
+ overloads[func] = overload
+ end
+ return func
+end
+
+function sandbox.initializer(f)
+ if not sandboxed then
+ initializers[#initializers+1] = f
+ elseif trace then
+ report("already enabled, discarding initializer")
+ end
+end
+
+function sandbox.finalizer(f)
+ if not sandboxed then
+ finalizers[#finalizers+1] = f
+ elseif trace then
+ report("already enabled, discarding finalizer")
+ end
+end
+
+function sandbox.enable()
+ if not sandboxed then
+ for i=1,#initializers do
+ initializers[i]()
+ end
+ for i=1,#finalizers do
+ finalizers[i]()
+ end
+ local nnot = 0
+ local nyes = 0
+ local cnot = { }
+ local cyes = { }
+ local skip = { }
+ for k, v in next, overloads do
+ local c = comments[k]
+ if v then
+ if c then
+ cyes[#cyes+1] = c
+ else -- if not skiploads[k] then
+ nyes = nyes + 1
+ end
+ else
+ if c then
+ cnot[#cnot+1] = c
+ else -- if not skiploads[k] then
+ nnot = nnot + 1
+ end
+ end
+ end
+ for k, v in next, skiploads do
+ skip[#skip+1] = v
+ end
+ if #cyes > 0 then
+ sort(cyes)
+ report(" overloaded known : %s",concat(cyes," | "))
+ end
+ if nyes > 0 then
+ report(" overloaded unknown : %s",nyes)
+ end
+ if #cnot > 0 then
+ sort(cnot)
+ report("not overloaded known : %s",concat(cnot," | "))
+ end
+ if nnot > 0 then
+ report("not overloaded unknown : %s",nnot)
+ end
+ if #skip > 0 then
+ sort(skip)
+ report("not overloaded redefined : %s",concat(skip," | "))
+ end
+ initializers = nil
+ finalizers = nil
+ originals = nil
+ sandboxed = true
+ end
+end
+
+-- we sandbox some of the built-in functions now:
+
+-- todo: require
+-- todo: load
+
+local function supported(library)
+ local l = _G[library]
+ -- if l then
+ -- for k, v in next, l do
+ -- report("%s.%s",library,k)
+ -- end
+ -- end
+ return l
+end
+
+-- io.tmpfile : we don't know where that one ends up but probably is user temp space
+-- os.tmpname : no need to deal with this: outputs rubish anyway (\s9v0. \s9v0.1 \s9v0.2 etc)
+-- os.tmpdir : not needed either (luatex.vob000 luatex.vob000 etc)
+
+-- os.setenv : maybe
+-- require : maybe (normally taken from tree)
+-- http etc : maybe (all schemes that go outside)
+
+loadfile = register(loadfile,"loadfile")
+
+if supported("io") then
+ io.open = register(io.open, "io.open")
+ io.popen = register(io.popen, "io.popen") -- needs checking
+ io.lines = register(io.lines, "io.lines")
+ io.output = register(io.output, "io.output")
+ io.input = register(io.input, "io.input")
+end
+
+if supported("os") then
+ os.execute = register(os.execute, "os.execute")
+ os.spawn = register(os.spawn, "os.spawn")
+ os.exec = register(os.exec, "os.exec")
+ os.rename = register(os.rename, "os.rename")
+ os.remove = register(os.remove, "os.remove")
+end
+
+if supported("lfs") then
+ lfs.chdir = register(lfs.chdir, "lfs.chdir")
+ lfs.mkdir = register(lfs.mkdir, "lfs.mkdir")
+ lfs.rmdir = register(lfs.rmdir, "lfs.rmdir")
+ lfs.isfile = register(lfs.isfile, "lfs.isfile")
+ lfs.isdir = register(lfs.isdir, "lfs.isdir")
+ lfs.attributes = register(lfs.attributes, "lfs.attributes")
+ lfs.dir = register(lfs.dir, "lfs.dir")
+ lfs.lock_dir = register(lfs.lock_dir, "lfs.lock_dir")
+ lfs.touch = register(lfs.touch, "lfs.touch")
+ lfs.link = register(lfs.link, "lfs.link")
+ lfs.setmode = register(lfs.setmode, "lfs.setmode")
+ lfs.readlink = register(lfs.readlink, "lfs.readlink")
+ lfs.shortname = register(lfs.shortname, "lfs.shortname")
+ lfs.symlinkattributes = register(lfs.symlinkattributes,"lfs.symlinkattributes")
+end
diff --git a/tex/context/base/l-string.lua b/tex/context/base/l-string.lua
index 9b079b00a..70c66f661 100644
--- a/tex/context/base/l-string.lua
+++ b/tex/context/base/l-string.lua
@@ -70,6 +70,7 @@ function string.limit(str,n,sentinel) -- not utf proof
end
local stripper = patterns.stripper
+local fullstripper = patterns.fullstripper
local collapser = patterns.collapser
local longtostring = patterns.longtostring
@@ -77,6 +78,10 @@ function string.strip(str)
return lpegmatch(stripper,str) or ""
end
+function string.fullstrip(str)
+ return lpegmatch(fullstripper,str) or ""
+end
+
function string.collapsespaces(str)
return lpegmatch(collapser,str) or ""
end
@@ -89,7 +94,7 @@ end
-- return not find(str,"%S")
-- end
-local pattern = P(" ")^0 * P(-1)
+local pattern = P(" ")^0 * P(-1) -- maybe also newlines
-- patterns.onlyspaces = pattern
diff --git a/tex/context/base/l-table.lua b/tex/context/base/l-table.lua
index f361f3d20..b02f210cb 100644
--- a/tex/context/base/l-table.lua
+++ b/tex/context/base/l-table.lua
@@ -39,7 +39,7 @@ end
function table.keys(t)
if t then
local keys, k = { }, 0
- for key, _ in next, t do
+ for key in next, t do
k = k + 1
keys[k] = key
end
@@ -49,35 +49,127 @@ function table.keys(t)
end
end
+-- local function compare(a,b)
+-- local ta = type(a) -- needed, else 11 < 2
+-- local tb = type(b) -- needed, else 11 < 2
+-- if ta == tb and ta == "number" then
+-- return a < b
+-- else
+-- return tostring(a) < tostring(b) -- not that efficient
+-- end
+-- end
+
+-- local function compare(a,b)
+-- local ta = type(a) -- needed, else 11 < 2
+-- local tb = type(b) -- needed, else 11 < 2
+-- if ta == tb and (ta == "number" or ta == "string") then
+-- return a < b
+-- else
+-- return tostring(a) < tostring(b) -- not that efficient
+-- end
+-- end
+
+-- local function sortedkeys(tab)
+-- if tab then
+-- local srt, category, s = { }, 0, 0 -- 0=unknown 1=string, 2=number 3=mixed
+-- for key in next, tab do
+-- s = s + 1
+-- srt[s] = key
+-- if category == 3 then
+-- -- no further check
+-- else
+-- local tkey = type(key)
+-- if tkey == "string" then
+-- category = (category == 2 and 3) or 1
+-- elseif tkey == "number" then
+-- category = (category == 1 and 3) or 2
+-- else
+-- category = 3
+-- end
+-- end
+-- end
+-- if category == 0 or category == 3 then
+-- sort(srt,compare)
+-- else
+-- sort(srt)
+-- end
+-- return srt
+-- else
+-- return { }
+-- end
+-- end
+
+-- local function compare(a,b)
+-- local ta = type(a) -- needed, else 11 < 2
+-- local tb = type(b) -- needed, else 11 < 2
+-- if ta == tb and (ta == "number" or ta == "string") then
+-- return a < b
+-- else
+-- return tostring(a) < tostring(b) -- not that efficient
+-- end
+-- end
+
+-- local function compare(a,b)
+-- local ta = type(a) -- needed, else 11 < 2
+-- if ta == "number" or ta == "string" then
+-- local tb = type(b) -- needed, else 11 < 2
+-- if ta == tb then
+-- return a < b
+-- end
+-- end
+-- return tostring(a) < tostring(b) -- not that efficient
+-- end
+
local function compare(a,b)
- local ta, tb = type(a), type(b) -- needed, else 11 < 2
- if ta == tb then
- return a < b
- else
- return tostring(a) < tostring(b)
+ local ta = type(a) -- needed, else 11 < 2
+ if ta == "number" then
+ local tb = type(b) -- needed, else 11 < 2
+ if ta == tb then
+ return a < b
+ elseif tb == "string" then
+ return tostring(a) < b
+ end
+ elseif ta == "string" then
+ local tb = type(b) -- needed, else 11 < 2
+ if ta == tb then
+ return a < b
+ else
+ return a < tostring(b)
+ end
end
+ return tostring(a) < tostring(b) -- not that efficient
end
local function sortedkeys(tab)
if tab then
local srt, category, s = { }, 0, 0 -- 0=unknown 1=string, 2=number 3=mixed
- for key,_ in next, tab do
+ for key in next, tab do
s = s + 1
srt[s] = key
if category == 3 then
-- no further check
+ elseif category == 1 then
+ if type(key) ~= "string" then
+ category = 3
+ end
+ elseif category == 2 then
+ if type(key) ~= "number" then
+ category = 3
+ end
else
local tkey = type(key)
if tkey == "string" then
- category = (category == 2 and 3) or 1
+ category = 1
elseif tkey == "number" then
- category = (category == 1 and 3) or 2
+ category = 2
else
category = 3
end
end
end
- if category == 0 or category == 3 then
+ if s < 2 then
+ -- nothing to sort
+ elseif category == 3 then
sort(srt,compare)
else
sort(srt)
@@ -88,16 +180,54 @@ local function sortedkeys(tab)
end
end
+local function sortedhashonly(tab)
+ if tab then
+ local srt, s = { }, 0
+ for key in next, tab do
+ if type(key) == "string" then
+ s = s + 1
+ srt[s] = key
+ end
+ end
+ if s > 1 then
+ sort(srt)
+ end
+ return srt
+ else
+ return { }
+ end
+end
+
+local function sortedindexonly(tab)
+ if tab then
+ local srt, s = { }, 0
+ for key in next, tab do
+ if type(key) == "number" then
+ s = s + 1
+ srt[s] = key
+ end
+ end
+ if s > 1 then
+ sort(srt)
+ end
+ return srt
+ else
+ return { }
+ end
+end
+
local function sortedhashkeys(tab,cmp) -- fast one
if tab then
local srt, s = { }, 0
- for key,_ in next, tab do
+ for key in next, tab do
if key then
s= s + 1
srt[s] = key
end
end
- sort(srt,cmp)
+ if s > 1 then
+ sort(srt,cmp)
+ end
return srt
else
return { }
@@ -107,15 +237,17 @@ end
function table.allkeys(t)
local keys = { }
for k, v in next, t do
- for k, v in next, v do
+ for k in next, v do
keys[k] = true
end
end
return sortedkeys(keys)
end
-table.sortedkeys = sortedkeys
-table.sortedhashkeys = sortedhashkeys
+table.sortedkeys = sortedkeys
+table.sortedhashonly = sortedhashonly
+table.sortedindexonly = sortedindexonly
+table.sortedhashkeys = sortedhashkeys
local function nothing() end
@@ -128,19 +260,21 @@ local function sortedhash(t,cmp)
else
s = sortedkeys(t) -- the robust one
end
- local n = 0
local m = #s
- local function kv(s)
- if n < m then
- n = n + 1
- local k = s[n]
- return k, t[k]
+ if m == 1 then
+ return next, t
+ elseif m > 0 then
+ local n = 0
+ return function()
+ if n < m then
+ n = n + 1
+ local k = s[n]
+ return k, t[k]
+ end
end
end
- return kv, s
- else
- return nothing
end
+ return nothing
end
table.sortedhash = sortedhash
@@ -284,7 +418,7 @@ end
local function copy(t, tables) -- taken from lua wiki, slightly adapted
tables = tables or { }
- local tcopy = {}
+ local tcopy = { }
if not tables[t] then
tables[t] = tcopy
end
@@ -344,7 +478,7 @@ function table.fromhash(t)
return hsh
end
-local noquotes, hexify, handle, reduce, compact, inline, functions
+local noquotes, hexify, handle, compact, inline, functions
local reserved = table.tohash { -- intercept a language inconvenience: no reserved words as key
'and', 'break', 'do', 'else', 'elseif', 'end', 'false', 'for', 'function', 'if',
@@ -352,33 +486,67 @@ local reserved = table.tohash { -- intercept a language inconvenience: no reserv
'NaN', 'goto',
}
+-- local function simple_table(t)
+-- if #t > 0 then
+-- local n = 0
+-- for _,v in next, t do
+-- n = n + 1
+-- end
+-- if n == #t then
+-- local tt, nt = { }, 0
+-- for i=1,#t do
+-- local v = t[i]
+-- local tv = type(v)
+-- if tv == "number" then
+-- nt = nt + 1
+-- if hexify then
+-- tt[nt] = format("0x%X",v)
+-- else
+-- tt[nt] = tostring(v) -- tostring not needed
+-- end
+-- elseif tv == "string" then
+-- nt = nt + 1
+-- tt[nt] = format("%q",v)
+-- elseif tv == "boolean" then
+-- nt = nt + 1
+-- tt[nt] = v and "true" or "false"
+-- else
+-- return nil
+-- end
+-- end
+-- return tt
+-- end
+-- end
+-- return nil
+-- end
+
local function simple_table(t)
- if #t > 0 then
+ local nt = #t
+ if nt > 0 then
local n = 0
for _,v in next, t do
n = n + 1
+ -- if type(v) == "table" then
+ -- return nil
+ -- end
end
- if n == #t then
- local tt, nt = { }, 0
- for i=1,#t do
+ if n == nt then
+ local tt = { }
+ for i=1,nt do
local v = t[i]
local tv = type(v)
if tv == "number" then
- nt = nt + 1
if hexify then
- tt[nt] = format("0x%04X",v)
+ tt[i] = format("0x%X",v)
else
- tt[nt] = tostring(v) -- tostring not needed
+ tt[i] = tostring(v) -- tostring not needed
end
elseif tv == "string" then
- nt = nt + 1
- tt[nt] = format("%q",v)
+ tt[i] = format("%q",v)
elseif tv == "boolean" then
- nt = nt + 1
- tt[nt] = v and "true" or "false"
+ tt[i] = v and "true" or "false"
else
- tt = nil
- break
+ return nil
end
end
return tt
@@ -417,7 +585,7 @@ local function do_serialize(root,name,depth,level,indexed)
local tn = type(name)
if tn == "number" then
if hexify then
- handle(format("%s[0x%04X]={",depth,name))
+ handle(format("%s[0x%X]={",depth,name))
else
handle(format("%s[%s]={",depth,name))
end
@@ -435,15 +603,7 @@ local function do_serialize(root,name,depth,level,indexed)
end
end
-- we could check for k (index) being number (cardinal)
- if root and next(root) then
- -- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
- -- if compact then
- -- -- NOT: for k=1,#root do (we need to quit at nil)
- -- for k,v in ipairs(root) do -- can we use next?
- -- if not first then first = k end
- -- last = last + 1
- -- end
- -- end
+ if root and next(root) ~= nil then
local first, last = nil, 0
if compact then
last = #root
@@ -459,27 +619,21 @@ local function do_serialize(root,name,depth,level,indexed)
end
local sk = sortedkeys(root)
for i=1,#sk do
- local k = sk[i]
- local v = root[k]
- --~ if v == root then
- -- circular
- --~ else
- local tv, tk = type(v), type(k)
+ local k = sk[i]
+ local v = root[k]
+ local tv = type(v)
+ local tk = type(k)
if compact and first and tk == "number" and k >= first and k <= last then
if tv == "number" then
if hexify then
- handle(format("%s 0x%04X,",depth,v))
+ handle(format("%s 0x%X,",depth,v))
else
handle(format("%s %s,",depth,v)) -- %.99g
end
elseif tv == "string" then
- if reduce and tonumber(v) then
- handle(format("%s %s,",depth,v))
- else
- handle(format("%s %q,",depth,v))
- end
+ handle(format("%s %q,",depth,v))
elseif tv == "table" then
- if not next(v) then
+ if next(v) == nil then
handle(format("%s {},",depth))
elseif inline then -- and #t > 0
local st = simple_table(v)
@@ -509,64 +663,48 @@ local function do_serialize(root,name,depth,level,indexed)
elseif tv == "number" then
if tk == "number" then
if hexify then
- handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
+ handle(format("%s [0x%X]=0x%X,",depth,k,v))
else
handle(format("%s [%s]=%s,",depth,k,v)) -- %.99g
end
elseif tk == "boolean" then
if hexify then
- handle(format("%s [%s]=0x%04X,",depth,k and "true" or "false",v))
+ handle(format("%s [%s]=0x%X,",depth,k and "true" or "false",v))
else
handle(format("%s [%s]=%s,",depth,k and "true" or "false",v)) -- %.99g
end
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
if hexify then
- handle(format("%s %s=0x%04X,",depth,k,v))
+ handle(format("%s %s=0x%X,",depth,k,v))
else
handle(format("%s %s=%s,",depth,k,v)) -- %.99g
end
else
if hexify then
- handle(format("%s [%q]=0x%04X,",depth,k,v))
+ handle(format("%s [%q]=0x%X,",depth,k,v))
else
handle(format("%s [%q]=%s,",depth,k,v)) -- %.99g
end
end
elseif tv == "string" then
- if reduce and tonumber(v) then
- if tk == "number" then
- if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,v))
- else
- handle(format("%s [%s]=%s,",depth,k,v))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]=%s,",depth,k and "true" or "false",v))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%s,",depth,k,v))
+ if tk == "number" then
+ if hexify then
+ handle(format("%s [0x%X]=%q,",depth,k,v))
else
- handle(format("%s [%q]=%s,",depth,k,v))
+ handle(format("%s [%s]=%q,",depth,k,v))
end
+ elseif tk == "boolean" then
+ handle(format("%s [%s]=%q,",depth,k and "true" or "false",v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,v))
else
- if tk == "number" then
- if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,v))
- else
- handle(format("%s [%s]=%q,",depth,k,v))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]=%q,",depth,k and "true" or "false",v))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%q,",depth,k,v))
- else
- handle(format("%s [%q]=%q,",depth,k,v))
- end
+ handle(format("%s [%q]=%q,",depth,k,v))
end
elseif tv == "table" then
- if not next(v) then
+ if next(v) == nil then
if tk == "number" then
if hexify then
- handle(format("%s [0x%04X]={},",depth,k))
+ handle(format("%s [0x%X]={},",depth,k))
else
handle(format("%s [%s]={},",depth,k))
end
@@ -582,7 +720,7 @@ local function do_serialize(root,name,depth,level,indexed)
if st then
if tk == "number" then
if hexify then
- handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
+ handle(format("%s [0x%X]={ %s },",depth,k,concat(st,", ")))
else
handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
end
@@ -602,7 +740,7 @@ local function do_serialize(root,name,depth,level,indexed)
elseif tv == "boolean" then
if tk == "number" then
if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,v and "true" or "false"))
+ handle(format("%s [0x%X]=%s,",depth,k,v and "true" or "false"))
else
handle(format("%s [%s]=%s,",depth,k,v and "true" or "false"))
end
@@ -619,7 +757,7 @@ local function do_serialize(root,name,depth,level,indexed)
-- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v) -- maybe strip
if tk == "number" then
if hexify then
- handle(format("%s [0x%04X]=load(%q),",depth,k,f))
+ handle(format("%s [0x%X]=load(%q),",depth,k,f))
else
handle(format("%s [%s]=load(%q),",depth,k,f))
end
@@ -634,7 +772,7 @@ local function do_serialize(root,name,depth,level,indexed)
else
if tk == "number" then
if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
+ handle(format("%s [0x%X]=%q,",depth,k,tostring(v)))
else
handle(format("%s [%s]=%q,",depth,k,tostring(v)))
end
@@ -646,10 +784,9 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s [%q]=%q,",depth,k,tostring(v)))
end
end
- --~ end
end
end
- if level > 0 then
+ if level > 0 then
handle(format("%s},",depth))
end
end
@@ -663,7 +800,6 @@ local function serialize(_handle,root,name,specification) -- handle wins
noquotes = specification.noquotes
hexify = specification.hexify
handle = _handle or specification.handle or print
- reduce = specification.reduce or false
functions = specification.functions
compact = specification.compact
inline = specification.inline and compact
@@ -680,7 +816,6 @@ local function serialize(_handle,root,name,specification) -- handle wins
noquotes = false
hexify = false
handle = _handle or print
- reduce = false
compact = true
inline = true
functions = true
@@ -693,7 +828,7 @@ local function serialize(_handle,root,name,specification) -- handle wins
end
elseif tname == "number" then
if hexify then
- handle(format("[0x%04X]={",name))
+ handle(format("[0x%X]={",name))
else
handle("[" .. name .. "]={")
end
@@ -714,7 +849,7 @@ local function serialize(_handle,root,name,specification) -- handle wins
root._w_h_a_t_e_v_e_r_ = nil
end
-- Let's forget about empty tables.
- if next(root) then
+ if next(root) ~= nil then
do_serialize(root,name,"",0)
end
end
@@ -754,15 +889,6 @@ end
table.tohandle = serialize
--- sometimes tables are real use (zapfino extra pro is some 85M) in which
--- case a stepwise serialization is nice; actually, we could consider:
---
--- for line in table.serializer(root,name,reduce,noquotes) do
--- ...(line)
--- end
---
--- so this is on the todo list
-
local maxtab = 2*1024
function table.tofile(filename,root,name,specification)
@@ -888,16 +1014,27 @@ end
table.identical = identical
table.are_equal = are_equal
--- maybe also make a combined one
-
-function table.compact(t) -- remove empty tables, assumes subtables
- if t then
- for k, v in next, t do
- if not next(v) then -- no type checking
- t[k] = nil
+local function sparse(old,nest,keeptables)
+ local new = { }
+ for k, v in next, old do
+ if not (v == "" or v == false) then
+ if nest and type(v) == "table" then
+ v = sparse(v,nest)
+ if keeptables or next(v) ~= nil then
+ new[k] = v
+ end
+ else
+ new[k] = v
end
end
end
+ return new
+end
+
+table.sparse = sparse
+
+function table.compact(t)
+ return sparse(t,true,true)
end
function table.contains(t, v)
@@ -1006,7 +1143,9 @@ function table.print(t,...)
end
end
-setinspector(function(v) if type(v) == "table" then serialize(print,v,"table") return true end end)
+if setinspector then
+ setinspector(function(v) if type(v) == "table" then serialize(print,v,"table") return true end end)
+end
-- -- -- obsolete but we keep them for a while and might comment them later -- -- --
@@ -1019,11 +1158,11 @@ end
-- slower than #t on indexed tables (#t only returns the size of the numerically indexed slice)
function table.is_empty(t)
- return not t or not next(t)
+ return not t or next(t) == nil
end
function table.has_one_entry(t)
- return t and not next(t,next(t))
+ return t and next(t,next(t)) == nil
end
-- new
@@ -1078,3 +1217,49 @@ function table.values(t,s) -- optional sort flag
return { }
end
end
+
+-- maybe this will move to util-tab.lua
+
+-- for k, v in table.filtered(t,pattern) do ... end
+-- for k, v in table.filtered(t,pattern,true) do ... end
+-- for k, v in table.filtered(t,pattern,true,cmp) do ... end
+
+function table.filtered(t,pattern,sort,cmp)
+ if t and type(pattern) == "string" then
+ if sort then
+ local s
+ if cmp then
+ -- it would be nice if the sort function would accept a third argument (or nicer, an optional first)
+ s = sortedhashkeys(t,function(a,b) return cmp(t,a,b) end)
+ else
+ s = sortedkeys(t) -- the robust one
+ end
+ local n = 0
+ local m = #s
+ local function kv(s)
+ while n < m do
+ n = n + 1
+ local k = s[n]
+ if find(k,pattern) then
+ return k, t[k]
+ end
+ end
+ end
+ return kv, s
+ else
+ local n = next(t)
+ local function iterator()
+ while n ~= nil do
+ local k = n
+ n = next(t,k)
+ if find(k,pattern) then
+ return k, t[k]
+ end
+ end
+ end
+ return iterator, t
+ end
+ else
+ return nothing
+ end
+end
diff --git a/tex/context/base/l-unicode.lua b/tex/context/base/l-unicode.lua
index 6601a4c62..70b60324a 100644
--- a/tex/context/base/l-unicode.lua
+++ b/tex/context/base/l-unicode.lua
@@ -6,7 +6,14 @@ if not modules then modules = { } end modules ['l-unicode'] = {
license = "see context related readme files"
}
--- this module will be reorganized
+-- in lua 5.3:
+
+-- utf8.char(···) : concatinated
+-- utf8.charpatt : "[\0-\x7F\xC2-\xF4][\x80-\xBF]*"
+-- utf8.codes(s) : for p, c in utf8.codes(s) do body end
+-- utf8.codepoint(s [, i [, j]])
+-- utf8.len(s [, i])
+-- utf8.offset(s, n [, i])
-- todo: utf.sub replacement (used in syst-aux)
-- we put these in the utf namespace:
@@ -27,20 +34,23 @@ local type = type
local char, byte, format, sub, gmatch = string.char, string.byte, string.format, string.sub, string.gmatch
local concat = table.concat
local P, C, R, Cs, Ct, Cmt, Cc, Carg, Cp = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs, lpeg.Ct, lpeg.Cmt, lpeg.Cc, lpeg.Carg, lpeg.Cp
-local lpegmatch, patterns = lpeg.match, lpeg.patterns
-local bytepairs = string.bytepairs
+local lpegmatch = lpeg.match
+local patterns = lpeg.patterns
+local tabletopattern = lpeg.utfchartabletopattern
+
+local bytepairs = string.bytepairs
-local finder = lpeg.finder
-local replacer = lpeg.replacer
+local finder = lpeg.finder
+local replacer = lpeg.replacer
-local utfvalues = utf.values
-local utfgmatch = utf.gmatch -- not always present
+local utfvalues = utf.values
+local utfgmatch = utf.gmatch -- not always present
local p_utftype = patterns.utftype
local p_utfstricttype = patterns.utfstricttype
local p_utfoffset = patterns.utfoffset
-local p_utf8char = patterns.utf8char
+local p_utf8char = patterns.utf8character
local p_utf8byte = patterns.utf8byte
local p_utfbom = patterns.utfbom
local p_newline = patterns.newline
@@ -408,9 +418,11 @@ if not utf.sub then
end
end
- local pattern_zero = Cmt(p_utf8char,slide_zero)^0
- local pattern_one = Cmt(p_utf8char,slide_one )^0
- local pattern_two = Cmt(p_utf8char,slide_two )^0
+ local pattern_zero = Cmt(p_utf8char,slide_zero)^0
+ local pattern_one = Cmt(p_utf8char,slide_one )^0
+ local pattern_two = Cmt(p_utf8char,slide_two )^0
+
+ local pattern_first = C(patterns.utf8character)
function utf.sub(str,start,stop)
if not start then
@@ -453,7 +465,9 @@ if not utf.sub then
end
end
end
- if start > stop then
+ if start == 1 and stop == 1 then
+ return lpegmatch(pattern_first,str) or ""
+ elseif start > stop then
return ""
elseif start > 1 then
b, e, n, first, last = 0, 0, 0, start - 1, stop
@@ -503,22 +517,70 @@ end
-- a replacement for simple gsubs:
-function utf.remapper(mapping)
- local pattern = Cs((p_utf8char/mapping)^0)
- return function(str)
- if not str or str == "" then
- return ""
+-- function utf.remapper(mapping)
+-- local pattern = Cs((p_utf8char/mapping)^0)
+-- return function(str)
+-- if not str or str == "" then
+-- return ""
+-- else
+-- return lpegmatch(pattern,str)
+-- end
+-- end, pattern
+-- end
+
+function utf.remapper(mapping,option) -- static also returns a pattern
+ local variant = type(mapping)
+ if variant == "table" then
+ if option == "dynamic" then
+ local pattern = false
+ table.setmetatablenewindex(mapping,function(t,k,v) rawset(t,k,v) pattern = false end)
+ return function(str)
+ if not str or str == "" then
+ return ""
+ else
+ if not pattern then
+ pattern = Cs((tabletopattern(mapping)/mapping + p_utf8char)^0)
+ end
+ return lpegmatch(pattern,str)
+ end
+ end
+ elseif option == "pattern" then
+ return Cs((tabletopattern(mapping)/mapping + p_utf8char)^0)
+ -- elseif option == "static" then
+ else
+ local pattern = Cs((tabletopattern(mapping)/mapping + p_utf8char)^0)
+ return function(str)
+ if not str or str == "" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+ end, pattern
+ end
+ elseif variant == "function" then
+ if option == "pattern" then
+ return Cs((p_utf8char/mapping + p_utf8char)^0)
else
- return lpegmatch(pattern,str)
+ local pattern = Cs((p_utf8char/mapping + p_utf8char)^0)
+ return function(str)
+ if not str or str == "" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+ end, pattern
end
- end, pattern
+ else
+ -- is actually an error
+ return function(str)
+ return str or ""
+ end
+ end
end
-- local remap = utf.remapper { a = 'd', b = "c", c = "b", d = "a" }
-- print(remap("abcd 1234 abcd"))
---
-
function utf.replacer(t) -- no precheck, always string builder
local r = replacer(t,false,false,true)
return function(str)
@@ -624,285 +686,359 @@ end
local utf16_to_utf8_be, utf16_to_utf8_le
local utf32_to_utf8_be, utf32_to_utf8_le
-local utf_16_be_linesplitter = patterns.utfbom_16_be^-1 * lpeg.tsplitat(patterns.utf_16_be_nl)
-local utf_16_le_linesplitter = patterns.utfbom_16_le^-1 * lpeg.tsplitat(patterns.utf_16_le_nl)
+local utf_16_be_getbom = patterns.utfbom_16_be^-1
+local utf_16_le_getbom = patterns.utfbom_16_le^-1
+local utf_32_be_getbom = patterns.utfbom_32_be^-1
+local utf_32_le_getbom = patterns.utfbom_32_le^-1
+
+local utf_16_be_linesplitter = utf_16_be_getbom * lpeg.tsplitat(patterns.utf_16_be_nl)
+local utf_16_le_linesplitter = utf_16_le_getbom * lpeg.tsplitat(patterns.utf_16_le_nl)
+local utf_32_be_linesplitter = utf_32_be_getbom * lpeg.tsplitat(patterns.utf_32_be_nl)
+local utf_32_le_linesplitter = utf_32_le_getbom * lpeg.tsplitat(patterns.utf_32_le_nl)
+
+-- we have three possibilities: bytepairs (using tables), gmatch (using tables), gsub and
+-- lpeg. Bytepairs are the fastert but as soon as we need to remove bombs and so the gain
+-- is less due to more testing. Also, we seldom have to convert utf16 so we don't care to
+-- much about a few milliseconds more runtime. The lpeg variant is upto 20% slower but
+-- still pretty fast.
+--
+-- for historic resone we keep the bytepairs variants around .. beware they don't grab the
+-- bom like the lpegs do so they're not dropins in the functions that follow
+--
+-- utf16_to_utf8_be = function(s)
+-- if not s then
+-- return nil
+-- elseif s == "" then
+-- return ""
+-- end
+-- local result, r, more = { }, 0, 0
+-- for left, right in bytepairs(s) do
+-- if right then
+-- local now = 256*left + right
+-- if more > 0 then
+-- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
+-- more = 0
+-- r = r + 1
+-- result[r] = utfchar(now)
+-- elseif now >= 0xD800 and now <= 0xDBFF then
+-- more = now
+-- else
+-- r = r + 1
+-- result[r] = utfchar(now)
+-- end
+-- end
+-- end
+-- return concat(result)
+-- end
+--
+-- local utf16_to_utf8_be_t = function(t)
+-- if not t then
+-- return nil
+-- elseif type(t) == "string" then
+-- t = lpegmatch(utf_16_be_linesplitter,t)
+-- end
+-- local result = { } -- we reuse result
+-- for i=1,#t do
+-- local s = t[i]
+-- if s ~= "" then
+-- local r, more = 0, 0
+-- for left, right in bytepairs(s) do
+-- if right then
+-- local now = 256*left + right
+-- if more > 0 then
+-- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
+-- more = 0
+-- r = r + 1
+-- result[r] = utfchar(now)
+-- elseif now >= 0xD800 and now <= 0xDBFF then
+-- more = now
+-- else
+-- r = r + 1
+-- result[r] = utfchar(now)
+-- end
+-- end
+-- end
+-- t[i] = concat(result,"",1,r) -- we reused tmp, hence t
+-- end
+-- end
+-- return t
+-- end
+--
+-- utf16_to_utf8_le = function(s)
+-- if not s then
+-- return nil
+-- elseif s == "" then
+-- return ""
+-- end
+-- local result, r, more = { }, 0, 0
+-- for left, right in bytepairs(s) do
+-- if right then
+-- local now = 256*right + left
+-- if more > 0 then
+-- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
+-- more = 0
+-- r = r + 1
+-- result[r] = utfchar(now)
+-- elseif now >= 0xD800 and now <= 0xDBFF then
+-- more = now
+-- else
+-- r = r + 1
+-- result[r] = utfchar(now)
+-- end
+-- end
+-- end
+-- return concat(result)
+-- end
+--
+-- local utf16_to_utf8_le_t = function(t)
+-- if not t then
+-- return nil
+-- elseif type(t) == "string" then
+-- t = lpegmatch(utf_16_le_linesplitter,t)
+-- end
+-- local result = { } -- we reuse result
+-- for i=1,#t do
+-- local s = t[i]
+-- if s ~= "" then
+-- local r, more = 0, 0
+-- for left, right in bytepairs(s) do
+-- if right then
+-- local now = 256*right + left
+-- if more > 0 then
+-- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
+-- more = 0
+-- r = r + 1
+-- result[r] = utfchar(now)
+-- elseif now >= 0xD800 and now <= 0xDBFF then
+-- more = now
+-- else
+-- r = r + 1
+-- result[r] = utfchar(now)
+-- end
+-- end
+-- end
+-- t[i] = concat(result,"",1,r) -- we reused tmp, hence t
+-- end
+-- end
+-- return t
+-- end
+--
+-- local utf32_to_utf8_be_t = function(t)
+-- if not t then
+-- return nil
+-- elseif type(t) == "string" then
+-- t = lpegmatch(utflinesplitter,t)
+-- end
+-- local result = { } -- we reuse result
+-- for i=1,#t do
+-- local r, more = 0, -1
+-- for a,b in bytepairs(t[i]) do
+-- if a and b then
+-- if more < 0 then
+-- more = 256*256*256*a + 256*256*b
+-- else
+-- r = r + 1
+-- result[t] = utfchar(more + 256*a + b)
+-- more = -1
+-- end
+-- else
+-- break
+-- end
+-- end
+-- t[i] = concat(result,"",1,r)
+-- end
+-- return t
+-- end
+--
+-- local utf32_to_utf8_le_t = function(t)
+-- if not t then
+-- return nil
+-- elseif type(t) == "string" then
+-- t = lpegmatch(utflinesplitter,t)
+-- end
+-- local result = { } -- we reuse result
+-- for i=1,#t do
+-- local r, more = 0, -1
+-- for a,b in bytepairs(t[i]) do
+-- if a and b then
+-- if more < 0 then
+-- more = 256*b + a
+-- else
+-- r = r + 1
+-- result[t] = utfchar(more + 256*256*256*b + 256*256*a)
+-- more = -1
+-- end
+-- else
+-- break
+-- end
+-- end
+-- t[i] = concat(result,"",1,r)
+-- end
+-- return t
+-- end
+
+local more = 0
+
+local p_utf16_to_utf8_be = C(1) * C(1) /function(left,right)
+ local now = 256*byte(left) + byte(right)
+ if more > 0 then
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
+ more = 0
+ return utfchar(now)
+ elseif now >= 0xD800 and now <= 0xDBFF then
+ more = now
+ return "" -- else the c's end up in the stream
+ else
+ return utfchar(now)
+ end
+end
--- we have three possibilities:
+local p_utf16_to_utf8_le = C(1) * C(1) /function(right,left)
+ local now = 256*byte(left) + byte(right)
+ if more > 0 then
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
+ more = 0
+ return utfchar(now)
+ elseif now >= 0xD800 and now <= 0xDBFF then
+ more = now
+ return "" -- else the c's end up in the stream
+ else
+ return utfchar(now)
+ end
+end
+local p_utf32_to_utf8_be = C(1) * C(1) * C(1) * C(1) /function(a,b,c,d)
+ return utfchar(256*256*256*byte(a) + 256*256*byte(b) + 256*byte(c) + byte(d))
+end
--- bytepairs: 0.048
--- gmatch : 0.069
--- lpeg : 0.089 (match time captures)
+local p_utf32_to_utf8_le = C(1) * C(1) * C(1) * C(1) /function(a,b,c,d)
+ return utfchar(256*256*256*byte(d) + 256*256*byte(c) + 256*byte(b) + byte(a))
+end
-if bytepairs then
+p_utf16_to_utf8_be = P(true) / function() more = 0 end * utf_16_be_getbom * Cs(p_utf16_to_utf8_be^0)
+p_utf16_to_utf8_le = P(true) / function() more = 0 end * utf_16_le_getbom * Cs(p_utf16_to_utf8_le^0)
+p_utf32_to_utf8_be = P(true) / function() more = 0 end * utf_32_be_getbom * Cs(p_utf32_to_utf8_be^0)
+p_utf32_to_utf8_le = P(true) / function() more = 0 end * utf_32_le_getbom * Cs(p_utf32_to_utf8_le^0)
- -- with a little bit more code we could include the linesplitter
+patterns.utf16_to_utf8_be = p_utf16_to_utf8_be
+patterns.utf16_to_utf8_le = p_utf16_to_utf8_le
+patterns.utf32_to_utf8_be = p_utf32_to_utf8_be
+patterns.utf32_to_utf8_le = p_utf32_to_utf8_le
- utf16_to_utf8_be = function(t)
- if type(t) == "string" then
- t = lpegmatch(utf_16_be_linesplitter,t)
- end
- local result = { } -- we reuse result
- for i=1,#t do
- local r, more = 0, 0
- for left, right in bytepairs(t[i]) do
- if right then
- local now = 256*left + right
- if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
- more = 0
- r = r + 1
- result[r] = utfchar(now)
- elseif now >= 0xD800 and now <= 0xDBFF then
- more = now
- else
- r = r + 1
- result[r] = utfchar(now)
- end
- end
- end
- t[i] = concat(result,"",1,r) -- we reused tmp, hence t
- end
- return t
+utf16_to_utf8_be = function(s)
+ if s and s ~= "" then
+ return lpegmatch(p_utf16_to_utf8_be,s)
+ else
+ return s
end
+end
- utf16_to_utf8_le = function(t)
- if type(t) == "string" then
- t = lpegmatch(utf_16_le_linesplitter,t)
- end
- local result = { } -- we reuse result
- for i=1,#t do
- local r, more = 0, 0
- for left, right in bytepairs(t[i]) do
- if right then
- local now = 256*right + left
- if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
- more = 0
- r = r + 1
- result[r] = utfchar(now)
- elseif now >= 0xD800 and now <= 0xDBFF then
- more = now
- else
- r = r + 1
- result[r] = utfchar(now)
- end
- end
- end
- t[i] = concat(result,"",1,r) -- we reused tmp, hence t
+local utf16_to_utf8_be_t = function(t)
+ if not t then
+ return nil
+ elseif type(t) == "string" then
+ t = lpegmatch(utf_16_be_linesplitter,t)
+ end
+ for i=1,#t do
+ local s = t[i]
+ if s ~= "" then
+ t[i] = lpegmatch(p_utf16_to_utf8_be,s)
end
- return t
end
+ return t
+end
- utf32_to_utf8_be = function(t)
- if type(t) == "string" then
- t = lpegmatch(utflinesplitter,t)
- end
- local result = { } -- we reuse result
- for i=1,#t do
- local r, more = 0, -1
- for a,b in bytepairs(t[i]) do
- if a and b then
- if more < 0 then
- more = 256*256*256*a + 256*256*b
- else
- r = r + 1
- result[t] = utfchar(more + 256*a + b)
- more = -1
- end
- else
- break
- end
- end
- t[i] = concat(result,"",1,r)
- end
- return t
+utf16_to_utf8_le = function(s)
+ if s and s ~= "" then
+ return lpegmatch(p_utf16_to_utf8_le,s)
+ else
+ return s
end
+end
- utf32_to_utf8_le = function(t)
- if type(t) == "string" then
- t = lpegmatch(utflinesplitter,t)
- end
- local result = { } -- we reuse result
- for i=1,#t do
- local r, more = 0, -1
- for a,b in bytepairs(t[i]) do
- if a and b then
- if more < 0 then
- more = 256*b + a
- else
- r = r + 1
- result[t] = utfchar(more + 256*256*256*b + 256*256*a)
- more = -1
- end
- else
- break
- end
- end
- t[i] = concat(result,"",1,r)
+local utf16_to_utf8_le_t = function(t)
+ if not t then
+ return nil
+ elseif type(t) == "string" then
+ t = lpegmatch(utf_16_le_linesplitter,t)
+ end
+ for i=1,#t do
+ local s = t[i]
+ if s ~= "" then
+ t[i] = lpegmatch(p_utf16_to_utf8_le,s)
end
- return t
end
+ return t
+end
-else
-
- utf16_to_utf8_be = function(t)
- if type(t) == "string" then
- t = lpegmatch(utf_16_be_linesplitter,t)
- end
- local result = { } -- we reuse result
- for i=1,#t do
- local r, more = 0, 0
- for left, right in gmatch(t[i],"(.)(.)") do
- if left == "\000" then -- experiment
- r = r + 1
- result[r] = utfchar(byte(right))
- elseif right then
- local now = 256*byte(left) + byte(right)
- if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
- more = 0
- r = r + 1
- result[r] = utfchar(now)
- elseif now >= 0xD800 and now <= 0xDBFF then
- more = now
- else
- r = r + 1
- result[r] = utfchar(now)
- end
- end
- end
- t[i] = concat(result,"",1,r) -- we reused tmp, hence t
- end
- return t
+utf32_to_utf8_be = function(s)
+ if s and s ~= "" then
+ return lpegmatch(p_utf32_to_utf8_be,s)
+ else
+ return s
end
+end
- utf16_to_utf8_le = function(t)
- if type(t) == "string" then
- t = lpegmatch(utf_16_le_linesplitter,t)
- end
- local result = { } -- we reuse result
- for i=1,#t do
- local r, more = 0, 0
- for left, right in gmatch(t[i],"(.)(.)") do
- if right == "\000" then
- r = r + 1
- result[r] = utfchar(byte(left))
- elseif right then
- local now = 256*byte(right) + byte(left)
- if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
- more = 0
- r = r + 1
- result[r] = utfchar(now)
- elseif now >= 0xD800 and now <= 0xDBFF then
- more = now
- else
- r = r + 1
- result[r] = utfchar(now)
- end
- end
- end
- t[i] = concat(result,"",1,r) -- we reused tmp, hence t
+local utf32_to_utf8_be_t = function(t)
+ if not t then
+ return nil
+ elseif type(t) == "string" then
+ t = lpegmatch(utf_32_be_linesplitter,t)
+ end
+ for i=1,#t do
+ local s = t[i]
+ if s ~= "" then
+ t[i] = lpegmatch(p_utf32_to_utf8_be,s)
end
- return t
end
+ return t
+end
- utf32_to_utf8_le = function() return { } end -- never used anyway
- utf32_to_utf8_be = function() return { } end -- never used anyway
-
- -- the next one is slighty slower
-
- -- local result, lines, r, more = { }, { }, 0, 0
- --
- -- local simple = Cmt(
- -- C(1) * C(1), function(str,p,left,right)
- -- local now = 256*byte(left) + byte(right)
- -- if more > 0 then
- -- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
- -- more = 0
- -- r = r + 1
- -- result[r] = utfchar(now)
- -- elseif now >= 0xD800 and now <= 0xDBFF then
- -- more = now
- -- else
- -- r = r + 1
- -- result[r] = utfchar(now)
- -- end
- -- return p
- -- end
- -- )
- --
- -- local complex = Cmt(
- -- C(1) * C(1), function(str,p,left,right)
- -- local now = 256*byte(left) + byte(right)
- -- if more > 0 then
- -- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
- -- more = 0
- -- r = r + 1
- -- result[r] = utfchar(now)
- -- elseif now >= 0xD800 and now <= 0xDBFF then
- -- more = now
- -- else
- -- r = r + 1
- -- result[r] = utfchar(now)
- -- end
- -- return p
- -- end
- -- )
- --
- -- local lineend = Cmt (
- -- patterns.utf_16_be_nl, function(str,p)
- -- lines[#lines+1] = concat(result,"",1,r)
- -- r, more = 0, 0
- -- return p
- -- end
- -- )
- --
- -- local be_1 = patterns.utfbom_16_be^-1 * (simple + complex)^0
- -- local be_2 = patterns.utfbom_16_be^-1 * (lineend + simple + complex)^0
- --
- -- utf16_to_utf8_be = function(t)
- -- if type(t) == "string" then
- -- local s = t
- -- lines, r, more = { }, 0, 0
- -- lpegmatch(be_2,s)
- -- if r > 0 then
- -- lines[#lines+1] = concat(result,"",1,r)
- -- end
- -- result = { }
- -- return lines
- -- else
- -- for i=1,#t do
- -- r, more = 0, 0
- -- lpegmatch(be_1,t[i])
- -- t[i] = concat(result,"",1,r)
- -- end
- -- result = { }
- -- return t
- -- end
- -- end
+utf32_to_utf8_le = function(s)
+ if s and s ~= "" then
+ return lpegmatch(p_utf32_to_utf8_le,s)
+ else
+ return s
+ end
+end
+local utf32_to_utf8_le_t = function(t)
+ if not t then
+ return nil
+ elseif type(t) == "string" then
+ t = lpegmatch(utf_32_le_linesplitter,t)
+ end
+ for i=1,#t do
+ local s = t[i]
+ if s ~= "" then
+ t[i] = lpegmatch(p_utf32_to_utf8_le,s)
+ end
+ end
+ return t
end
-utf.utf16_to_utf8_le = utf16_to_utf8_le
-utf.utf16_to_utf8_be = utf16_to_utf8_be
-utf.utf32_to_utf8_le = utf32_to_utf8_le
-utf.utf32_to_utf8_be = utf32_to_utf8_be
+utf.utf16_to_utf8_le_t = utf16_to_utf8_le_t
+utf.utf16_to_utf8_be_t = utf16_to_utf8_be_t
+utf.utf32_to_utf8_le_t = utf32_to_utf8_le_t
+utf.utf32_to_utf8_be_t = utf32_to_utf8_be_t
-function utf.utf8_to_utf8(t)
+utf.utf16_to_utf8_le = utf16_to_utf8_le
+utf.utf16_to_utf8_be = utf16_to_utf8_be
+utf.utf32_to_utf8_le = utf32_to_utf8_le
+utf.utf32_to_utf8_be = utf32_to_utf8_be
+
+function utf.utf8_to_utf8_t(t)
return type(t) == "string" and lpegmatch(utflinesplitter,t) or t
end
-function utf.utf16_to_utf8(t,endian)
- return endian and utf16_to_utf8_be(t) or utf16_to_utf8_le(t) or t
+function utf.utf16_to_utf8_t(t,endian)
+ return endian and utf16_to_utf8_be_t(t) or utf16_to_utf8_le_t(t) or t
end
-function utf.utf32_to_utf8(t,endian)
- return endian and utf32_to_utf8_be(t) or utf32_to_utf8_le(t) or t
+function utf.utf32_to_utf8_t(t,endian)
+ return endian and utf32_to_utf8_be_t(t) or utf32_to_utf8_le_t(t) or t
end
-local function little(c)
- local b = byte(c)
+local function little(b)
if b < 0x10000 then
return char(b%256,b/256)
else
@@ -912,8 +1048,7 @@ local function little(c)
end
end
-local function big(c)
- local b = byte(c)
+local function big(b)
if b < 0x10000 then
return char(b/256,b%256)
else
@@ -923,18 +1058,10 @@ local function big(c)
end
end
--- function utf.utf8_to_utf16(str,littleendian)
--- if littleendian then
--- return char(255,254) .. utfgsub(str,".",little)
--- else
--- return char(254,255) .. utfgsub(str,".",big)
--- end
--- end
-
-local _, l_remap = utf.remapper(little)
-local _, b_remap = utf.remapper(big)
+local l_remap = Cs((p_utf8byte/little+P(1)/"")^0)
+local b_remap = Cs((p_utf8byte/big +P(1)/"")^0)
-function utf.utf8_to_utf16_be(str,nobom)
+local function utf8_to_utf16_be(str,nobom)
if nobom then
return lpegmatch(b_remap,str)
else
@@ -942,7 +1069,7 @@ function utf.utf8_to_utf16_be(str,nobom)
end
end
-function utf.utf8_to_utf16_le(str,nobom)
+local function utf8_to_utf16_le(str,nobom)
if nobom then
return lpegmatch(l_remap,str)
else
@@ -950,11 +1077,14 @@ function utf.utf8_to_utf16_le(str,nobom)
end
end
+utf.utf8_to_utf16_be = utf8_to_utf16_be
+utf.utf8_to_utf16_le = utf8_to_utf16_le
+
function utf.utf8_to_utf16(str,littleendian,nobom)
if littleendian then
- return utf.utf8_to_utf16_le(str,nobom)
+ return utf8_to_utf16_le(str,nobom)
else
- return utf.utf8_to_utf16_be(str,nobom)
+ return utf8_to_utf16_be(str,nobom)
end
end
@@ -985,16 +1115,16 @@ function utf.xstring(s)
end
function utf.toeight(str)
- if not str then
+ if not str or str == "" then
return nil
end
local utftype = lpegmatch(p_utfstricttype,str)
if utftype == "utf-8" then
- return sub(str,4)
- elseif utftype == "utf-16-le" then
- return utf16_to_utf8_le(str)
+ return sub(str,4) -- remove the bom
elseif utftype == "utf-16-be" then
- return utf16_to_utf8_ne(str)
+ return utf16_to_utf8_be(str) -- bom gets removed
+ elseif utftype == "utf-16-le" then
+ return utf16_to_utf8_le(str) -- bom gets removed
else
return str
end
diff --git a/tex/context/base/l-url.lua b/tex/context/base/l-url.lua
index 7b7910fa7..b189ec5bb 100644
--- a/tex/context/base/l-url.lua
+++ b/tex/context/base/l-url.lua
@@ -26,6 +26,8 @@ local lpegmatch, lpegpatterns, replacer = lpeg.match, lpeg.patterns, lpeg.replac
-- | ___________|____________ |
-- / \ / \ |
-- urn:example:animal:ferret:nose interpretable as extension
+--
+-- also nice: http://url.spec.whatwg.org/ (maybe some day ...)
url = url or { }
local url = url
@@ -43,7 +45,7 @@ local hexdigit = R("09","AF","af")
local plus = P("+")
local nothing = Cc("")
local escapedchar = (percent * C(hexdigit * hexdigit)) / tochar
-local escaped = (plus / " ") + escapedchar
+local escaped = (plus / " ") + escapedchar -- so no loc://foo++.tex
local noslash = P("/") / ""
@@ -143,19 +145,25 @@ local splitquery = Cf ( Ct("") * P { "sequence",
-- hasher
local function hashed(str) -- not yet ok (/test?test)
- if str == "" then
+ if not str or str == "" then
return {
scheme = "invalid",
original = str,
}
end
- local s = split(str)
- local rawscheme = s[1]
- local rawquery = s[4]
- local somescheme = rawscheme ~= ""
- local somequery = rawquery ~= ""
+ local detailed = split(str)
+ local rawscheme = ""
+ local rawquery = ""
+ local somescheme = false
+ local somequery = false
+ if detailed then
+ rawscheme = detailed[1]
+ rawquery = detailed[4]
+ somescheme = rawscheme ~= ""
+ somequery = rawquery ~= ""
+ end
if not somescheme and not somequery then
- s = {
+ return {
scheme = "file",
authority = "",
path = str,
@@ -165,31 +173,38 @@ local function hashed(str) -- not yet ok (/test?test)
noscheme = true,
filename = str,
}
- else -- not always a filename but handy anyway
- local authority, path, filename = s[2], s[3]
- if authority == "" then
- filename = path
- elseif path == "" then
- filename = ""
- else
- filename = authority .. "/" .. path
- end
- s = {
- scheme = rawscheme,
- authority = authority,
- path = path,
- query = lpegmatch(unescaper,rawquery), -- unescaped, but possible conflict with & and =
- queries = lpegmatch(splitquery,rawquery), -- split first and then unescaped
- fragment = s[5],
- original = str,
- noscheme = false,
- filename = filename,
- }
end
- return s
+ -- not always a filename but handy anyway
+ local authority = detailed[2]
+ local path = detailed[3]
+ local filename = nil
+ if authority == "" then
+ filename = path
+ elseif path == "" then
+ filename = ""
+ else
+ filename = authority .. "/" .. path
+ end
+ return {
+ scheme = rawscheme,
+ authority = authority,
+ path = path,
+ query = lpegmatch(unescaper,rawquery), -- unescaped, but possible conflict with & and =
+ queries = lpegmatch(splitquery,rawquery), -- split first and then unescaped
+ fragment = detailed[5],
+ original = str,
+ noscheme = false,
+ filename = filename,
+ }
end
--- inspect(hashed("template://test"))
+-- inspect(hashed())
+-- inspect(hashed(""))
+-- inspect(hashed("template:///test"))
+-- inspect(hashed("template:///test++.whatever"))
+-- inspect(hashed("template:///test%2B%2B.whatever"))
+-- inspect(hashed("template:///test%x.whatever"))
+-- inspect(hashed("tem%2Bplate:///test%x.whatever"))
-- Here we assume:
--
@@ -241,7 +256,7 @@ function url.construct(hash) -- dodo: we need to escape !
return lpegmatch(escaper,concat(fullurl))
end
-local pattern = Cs(noslash * R("az","AZ") * (S(":|")/":") * noslash * P(1)^0)
+local pattern = Cs(slash^-1/"" * R("az","AZ") * ((S(":|")/":") + P(":")) * slash * P(1)^0)
function url.filename(filename)
local spec = hashed(filename)
@@ -251,6 +266,7 @@ end
-- print(url.filename("/c|/test"))
-- print(url.filename("/c/test"))
+-- print(url.filename("file:///t:/sources/cow.svg"))
local function escapestring(str)
return lpegmatch(escaper,str)
diff --git a/tex/context/base/lang-def.mkiv b/tex/context/base/lang-def.mkiv
index 18f572039..c2fb9640c 100644
--- a/tex/context/base/lang-def.mkiv
+++ b/tex/context/base/lang-def.mkiv
@@ -271,37 +271,6 @@
\installlanguage [\s!slovenian] [\s!sl]
\installlanguage [slovene] [\s!sl] % both possible (mojca: still needed?)
-\def\doconvertsloveniancharacters{\dodoconvertcharacters{25}}
-
-\def\sloveniancharacters{\doconvertsloveniancharacters\sloveniancharacter}
-\def\slovenianCharacters{\doconvertsloveniancharacters\slovenianCharacter}
-
-%D Define general-purpose macros for Slovenian character enumerations:
-
-\defineconversion [sloveniancharacter] [\sloveniancharacter]
-\defineconversion [slovenianCharacter] [\slovenianCharacter]
-
-\defineconversion [sloveniancharacters] [\sloveniancharacters]
-\defineconversion [slovenianCharacters] [\slovenianCharacters]
-
-%D Define these as the general character enumeration when
-%D language is Slovenian. If you feel uncomfortable with this,
-%D mail Mojca, since she promised to to take the heat.
-
-\defineconversion [\s!sl] [character] [\sloveniancharacter]
-\defineconversion [\s!sl] [Character] [\slovenianCharacter]
-
-\defineconversion [\s!sl] [characters] [\sloveniancharacters]
-\defineconversion [\s!sl] [Characters] [\slovenianCharacters]
-
-\defineconversion [\s!sl] [a] [\sloveniancharacters]
-\defineconversion [\s!sl] [A] [\slovenianCharacters]
-\defineconversion [\s!sl] [AK] [\smallcapped\sloveniancharacters]
-\defineconversion [\s!sl] [KA] [\smallcapped\sloveniancharacters]
-
-\def\sloveniancharacters#1{\ctxcommand{alphabetic(\number#1,"sl")}}
-\def\slovenianCharacters#1{\ctxcommand{Alphabetic(\number#1,"sl")}}
-
% Cyrillic Languages
\installlanguage
@@ -510,7 +479,8 @@
\c!rightquote=’,
\c!leftquotation=“,
\c!rightquotation=”,
- \c!date={서기,\v!year,년,\v!month,월,\v!day,일}]
+ % \c!date={서기,\space,\v!year,\labeltext{\v!year},\space,\v!month,\labeltext{\v!month},\space,\v!day,\labeltext{\v!day}}]
+ \c!date={\v!year,\labeltext{\v!year},\space,\v!month,\labeltext{\v!month},\space,\v!day,\labeltext{\v!day}}]
% Greek
@@ -634,6 +604,10 @@
\c!rightquotation=\upperrightdoubleninequote,
\c!date={\v!year,\space,\v!month,\space,\v!day}]
+\installlanguage[\s!pt-br][\c!default=\s!pt] % Brazil
+\installlanguage[\s!es-es][\c!default=\s!es] % Spain
+\installlanguage[\s!es-la][\c!default=\s!es] % Latin America
+
\installlanguage
[\s!ro]
[\c!spacing=\v!packed,
diff --git a/tex/context/base/lang-dis.lua b/tex/context/base/lang-dis.lua
new file mode 100644
index 000000000..db19a0fc5
--- /dev/null
+++ b/tex/context/base/lang-dis.lua
@@ -0,0 +1,203 @@
+if not modules then modules = { } end modules ['lang-dis'] = {
+ version = 1.001,
+ comment = "companion to lang-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local concat = table.concat
+
+local nodes = nodes
+
+local tasks = nodes.tasks
+local nuts = nodes.nuts
+local nodepool = nuts.pool
+
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getfont = nuts.getfont
+local getattr = nuts.getattr
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+
+local copy_node = nuts.copy
+local free_node = nuts.free
+local remove_node = nuts.remove
+local traverse_id = nuts.traverse_id
+local traverse_nodes = nuts.traverse
+
+local nodecodes = nodes.nodecodes
+local disccodes = nodes.disccodes
+
+local disc_code = nodecodes.disc
+local glyph_code = nodecodes.glyph
+local discretionary_code = disccodes.discretionary
+
+local a_visualize = attributes.private("visualizediscretionary")
+local setattribute = tex.setattribute
+
+local getlanguagedata = languages.getdata
+
+local expanders = {
+ [disccodes.discretionary] = function(d,template)
+ -- \discretionary
+ return template
+ end,
+ [disccodes.explicit] = function(d,template)
+ -- \-
+ local pre = getfield(d,"pre")
+ if pre and getid(pre) == glyph_code and getchar(pre) <= 0 then
+ setfield(d,"pre",nil)
+ end
+ local post = getfield(d,"post")
+ if post and getid(post) == glyph_code and getchar(post) <= 0 then
+ setfield(d,"post",nil)
+ end
+-- setfield(d,"subtype",discretionary_code) -- to be checked
+ return template
+ end,
+ [disccodes.automatic] = function(d,template)
+ -- following a - : the pre and post chars are already appended and set
+ -- so we have pre=preex and post=postex .. however, the previous
+ -- hyphen is already injected ... downside: the font handler sees this
+ -- so this is another argument for doing a hyphenation pass in context
+ if getfield(d,"pre") then
+ -- we have a preex characters and want that one to replace the
+ -- character in front which is the trigger
+ if not template then
+ -- can there be font kerns already?
+ template = getprev(d)
+ if template and getid(template) ~= glyph_code then
+ template = getnext(d)
+ if template and getid(template) ~= glyph_code then
+ template = nil
+ end
+ end
+ end
+ if template then
+ local pseudohead = getprev(template)
+ if pseudohead then
+ while template ~= d do
+ pseudohead, template, removed = remove_node(pseudohead,template)
+ setfield(d,"replace",removed)
+ -- break ?
+ end
+ else
+ -- can't happen
+ end
+ setfield(d,"subtype",discretionary_code)
+ else
+ -- print("lone regular discretionary ignored")
+ end
+ end
+ return template
+ end,
+ [disccodes.regular] = function(d,template)
+ -- simple
+ if not template then
+ -- can there be font kerns already?
+ template = getprev(d)
+ if template and getid(template) ~= glyph_code then
+ template = getnext(d)
+ if template and getid(template) ~= glyph_code then
+ template = nil
+ end
+ end
+ end
+ if template then
+ local language = template and getfield(template,"lang")
+ local data = getlanguagedata(language)
+ local prechar = data.prehyphenchar
+ local postchar = data.posthyphenchar
+ if prechar and prechar > 0 then
+ local c = copy_node(template)
+ setfield(c,"char",prechar)
+ setfield(d,"pre",c)
+ end
+ if postchar and postchar > 0 then
+ local c = copy_node(template)
+ setfield(c,"char",postchar)
+ setfield(d,"post",c)
+ end
+ setfield(d,"subtype",discretionary_code)
+ else
+ -- print("lone regular discretionary ignored")
+ end
+ return template
+ end,
+ [disccodes.first] = function()
+ -- forget about them
+ end,
+ [disccodes.second] = function()
+ -- forget about them
+ end,
+}
+
+languages.expanders = expanders
+
+function languages.expand(d,template,subtype)
+ if not subtype then
+ subtype = getsubtype(d)
+ end
+ if subtype ~= discretionary_code then
+ return expanders[subtype](d,template)
+ end
+end
+
+local setlistcolor = nodes.tracers.colors.setlist
+
+function languages.visualizediscretionaries(head)
+ for d in traverse_id(disc_code,tonut(head)) do
+ if getattr(d,a_visualize) then
+ local pre = getfield(d,"pre")
+ local post = getfield(d,"post")
+ local replace = getfield(d,"replace")
+ if pre then
+ setlistcolor(pre,"darkred")
+ end
+ if post then
+ setlistcolor(post,"darkgreen")
+ end
+ if replace then
+ setlistcolor(replace,"darkblue")
+ end
+ end
+ end
+end
+
+local enabled = false
+
+function languages.showdiscretionaries(v)
+ if v == false then
+ setattribute(a_visualize,unsetvalue)
+ else -- also nil
+ if not enabled then
+ nodes.tasks.enableaction("processors","languages.visualizediscretionaries")
+ enabled = true
+ end
+ setattribute(a_visualize,1)
+ end
+end
+
+interfaces.implement {
+ name = "showdiscretionaries",
+ actions = languages.showdiscretionaries
+}
+
+local toutf = nodes.listtoutf
+
+function languages.serializediscretionary(d) -- will move to tracer
+ return string.formatters["{%s}{%s}{%s}"](
+ toutf(getfield(d,"pre")) or "",
+ toutf(getfield(d,"post")) or "",
+ toutf(getfield(d,"replace")) or ""
+ )
+end
+
diff --git a/tex/context/base/lang-hyp.lua b/tex/context/base/lang-hyp.lua
new file mode 100644
index 000000000..496cfd19b
--- /dev/null
+++ b/tex/context/base/lang-hyp.lua
@@ -0,0 +1,1648 @@
+if not modules then modules = { } end modules ['lang-hyp'] = {
+ version = 1.001,
+ comment = "companion to lang-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- todo: hyphenate over range if needed
+
+-- to be considered: reset dictionary.hyphenated when a pattern is added
+-- or maybe an explicit reset of the cache
+
+-- In an automated workflow hypenation of long titles can be somewhat problematic
+-- especially when demands conflict. For that reason I played a bit with a Lua based
+-- variant of the traditional hyphenation machinery. This mechanism has been extended
+-- several times in projects, of which a good description can be found in TUGboat,
+-- Volume 27 (2006), No. 2 — Proceedings of EuroTEX2006: Automatic non-standard
+-- hyphenation in OpenOffice.org by László Németh.
+--
+-- Being the result of two days experimenting the following implementation is probably
+-- not completely okay yet. If there is demand I might add some more features and plugs.
+-- The performance is quite okay but can probably improved a bit, although this is not
+-- the most critital code.
+--
+-- . a l g o r i t h m .
+-- 4l1g4
+-- l g o3
+-- 1g o
+-- 2i t h
+-- 4h1m
+-- ---------------------
+-- 4 1 4 3 2 0 4 1
+-- a l-g o-r i t h-m
+
+-- . a s s z o n n y a l .
+-- s1s z/sz=sz,1,3
+-- n1n y/ny=ny,1,3
+-- -----------------------
+-- 0 1 0 0 0 1 0 0 0/sz=sz,2,3,ny=ny,6,3
+-- a s-s z o n-n y a l/sz=sz,2,3,ny=ny,6,3
+--
+-- ab1cd/ef=gh,2,2 : acd - efd (pattern/replacement,start,length
+--
+-- In the procecess of wrapping up (for the ctx conference proceedings) I cleaned up
+-- and extended the code a bit.
+
+local type, rawset, tonumber, next = type, rawset, tonumber, next
+
+local P, R, S, Cg, Cf, Ct, Cc, C, Carg, Cs = lpeg.P, lpeg.R, lpeg.S, lpeg.Cg, lpeg.Cf, lpeg.Ct, lpeg.Cc, lpeg.C, lpeg.Carg, lpeg.Cs
+local lpegmatch = lpeg.match
+
+local concat = table.concat
+local insert = table.insert
+local remove = table.remove
+local formatters = string.formatters
+local utfchar = utf.char
+local utfbyte = utf.byte
+
+if not characters then
+ require("char-ini")
+end
+
+local setmetatableindex = table.setmetatableindex
+
+-- \enabletrackers[hyphenator.steps=silent] will not write to the terminal
+
+local trace_steps = false trackers.register("hyphenator.steps", function(v) trace_steps = v end)
+local trace_visualize = false trackers.register("hyphenator.visualize",function(v) trace_visualize = v end)
+
+local report = logs.reporter("hyphenator")
+
+local implement = interfaces and interfaces.implement or function() end
+
+languages = languages or { }
+local hyphenators = languages.hyphenators or { }
+languages.hyphenators = hyphenators
+local traditional = hyphenators.traditional or { }
+hyphenators.traditional = traditional
+
+local dictionaries = setmetatableindex(function(t,k)
+ local v = {
+ patterns = { },
+ hyphenated = { },
+ specials = { },
+ exceptions = { },
+ loaded = false,
+ }
+ t[k] = v
+ return v
+end)
+
+hyphenators.dictionaries = dictionaries
+
+local character = lpeg.patterns.utf8character
+local digit = R("09")
+local weight = digit/tonumber + Cc(0)
+local fence = P(".")
+local hyphen = P("-")
+local space = P(" ")
+local char = character - space
+local validcharacter = (character - S("./"))
+local keycharacter = character - S("/")
+----- basepart = Ct( (Cc(0) * fence)^-1 * (weight * validcharacter)^1 * weight * (fence * Cc(0))^-1)
+local specpart = (P("/") * Cf ( Ct("") *
+ Cg ( Cc("before") * C((1-P("="))^1) * P("=") ) *
+ Cg ( Cc("after") * C((1-P(","))^1) ) *
+ ( P(",") *
+ Cg ( Cc("start") * ((1-P(","))^1/tonumber) * P(",") ) *
+ Cg ( Cc("length") * ((1-P(-1) )^1/tonumber) )
+ )^-1
+ , rawset))^-1
+
+local make_hashkey_p = Cs((digit/"" + keycharacter)^1)
+----- make_pattern_p = basepart * specpart
+local make_hashkey_e = Cs((hyphen/"" + keycharacter)^1)
+local make_pattern_e = Ct(P(char) * (hyphen * Cc(true) * P(char) + P(char) * Cc(false))^1) -- catch . and char after -
+
+-- local make_hashkey_c = Cs((digit + keycharacter/"")^1)
+-- local make_pattern_c = Ct((P(1)/tonumber)^1)
+
+-- local cache = setmetatableindex(function(t,k)
+-- local n = lpegmatch(make_hashkey_c,k)
+-- local v = lpegmatch(make_pattern_c,n)
+-- t[k] = v
+-- return v
+-- end)
+--
+-- local weight_n = digit + Cc("0")
+-- local basepart_n = Cs( (Cc("0") * fence)^-1 * (weight * validcharacter)^1 * weight * (fence * Cc("0"))^-1) / cache
+-- local make_pattern_n = basepart_n * specpart
+
+local make_pattern_c = Ct((P(1)/tonumber)^1)
+
+-- us + nl: 17664 entries -> 827 unique (saves some 3M)
+
+local cache = setmetatableindex(function(t,k)
+ local v = lpegmatch(make_pattern_c,k)
+ t[k] = v
+ return v
+end)
+
+local weight_n = digit + Cc("0")
+local fence_n = fence / "0"
+local char_n = validcharacter / ""
+local basepart_n = Cs(fence_n^-1 * (weight_n * char_n)^1 * weight_n * fence_n^-1) / cache
+local make_pattern_n = basepart_n * specpart
+
+local function register_pattern(patterns,specials,str,specification)
+ local k = lpegmatch(make_hashkey_p,str)
+ -- local v1, v2 = lpegmatch(make_pattern_p,str)
+ local v1, v2 = lpegmatch(make_pattern_n,str)
+ patterns[k] = v1 -- is this key still ok for complex patterns
+ if specification then
+ specials[k] = specification
+ elseif v2 then
+ specials[k] = v2
+ end
+end
+
+local function unregister_pattern(patterns,specials,str)
+ local k = lpegmatch(make_hashkey_p,str)
+ patterns[k] = nil
+ specials[k] = nil
+end
+
+local p_lower = lpeg.patterns.utf8lower
+
+local function register_exception(exceptions,str,specification)
+ local l = lpegmatch(p_lower,str)
+ local k = lpegmatch(make_hashkey_e,l)
+ local v = lpegmatch(make_pattern_e,l)
+ exceptions[k] = v
+end
+
+local p_pattern = ((Carg(1) * Carg(2) * C(char^1)) / register_pattern + 1)^1
+local p_exception = ((Carg(1) * C(char^1)) / register_exception + 1)^1
+local p_split = Ct(C(character)^1)
+
+function traditional.loadpatterns(language,filename)
+ local dictionary = dictionaries[language]
+ if not dictionary.loaded then
+ if not filename or filename == "" then
+ filename = "lang-" .. language
+ end
+ filename = file.addsuffix(filename,"lua")
+ local fullname = resolvers.findfile(filename)
+ if fullname and fullname ~= "" then
+ local specification = dofile(fullname)
+ if specification then
+ local patterns = specification.patterns
+ if patterns then
+ local data = patterns.data
+ if data and data ~= "" then
+ lpegmatch(p_pattern,data,1,dictionary.patterns,dictionary.specials)
+ end
+ end
+ local exceptions = specification.exceptions
+ if exceptions then
+ local data = exceptions.data
+ if data and data ~= "" then
+ lpegmatch(p_exception,data,1,dictionary.exceptions)
+ end
+ end
+ end
+ end
+ dictionary.loaded = true
+ end
+ return dictionary
+end
+
+local lcchars = characters.lcchars
+local uccodes = characters.uccodes
+local categories = characters.categories
+local nofwords = 0
+local nofhashed = 0
+
+local steps = nil
+local f_show = formatters["%w%s"]
+
+local function show_log()
+ if trace_steps == true then
+ report()
+ local w = #steps[1][1]
+ for i=1,#steps do
+ local s = steps[i]
+ report("%s%w%S %S",s[1],w - #s[1] + 3,s[2],s[3])
+ end
+ report()
+ end
+end
+
+local function show_1(wsplit)
+ local u = concat(wsplit," ")
+ steps = { { f_show(0,u), f_show(0,u) } }
+end
+
+local function show_2(c,m,wsplit,done,i,spec)
+ local s = lpegmatch(p_split,c)
+ local t = { }
+ local n = #m
+ local w = #wsplit
+ for j=1,n do
+ t[#t+1] = m[j]
+ t[#t+1] = s[j]
+ end
+ local m = 2*i-2
+ local l = #t
+ local s = spec and table.sequenced(spec) or ""
+ if m == 0 then
+ steps[#steps+1] = { f_show(m, concat(t,"",2)), f_show(1,concat(done," ",2,#done),s) }
+ elseif i+1 == w then
+ steps[#steps+1] = { f_show(m-1,concat(t,"",1,#t-1)), f_show(1,concat(done," ",2,#done),s) }
+ else
+ steps[#steps+1] = { f_show(m-1,concat(t)), f_show(1,concat(done," ",2,#done),s) }
+ end
+end
+
+local function show_3(wsplit,done)
+ local t = { }
+ local h = { }
+ local n = #wsplit
+ for i=1,n do
+ local w = wsplit[i]
+ if i > 1 then
+ local d = done[i]
+ t[#t+1] = i > 2 and d % 2 == 1 and "-" or " "
+ h[#h+1] = d
+ end
+ t[#t+1] = w
+ h[#h+1] = w
+ end
+ steps[#steps+1] = { f_show(0,concat(h)), f_show(0,concat(t)) }
+ show_log()
+end
+
+local function show_4(wsplit,done)
+ steps = { { concat(wsplit," ") } }
+ show_log()
+end
+
+function traditional.lasttrace()
+ return steps
+end
+
+-- We could reuse the w table but as we cache the resolved words
+-- there is not much gain in that complication.
+--
+-- Beware: word can be a table and when n is passed to we can
+-- assume reuse so we need to honor that n then.
+
+-- todo: a fast variant for tex ... less lookups (we could check is
+-- dictionary has changed) ... although due to caching the already
+-- done words, we don't do much here
+
+local function hyphenate(dictionary,word,n) -- odd is okay
+ nofwords = nofwords + 1
+ local hyphenated = dictionary.hyphenated
+ local isstring = type(word) == "string"
+ if isstring then
+ local done = hyphenated[word]
+ if done ~= nil then
+ return done
+ end
+ elseif n then
+ local done = hyphenated[concat(word,"",1,n)]
+ if done ~= nil then
+ return done
+ end
+ else
+ local done = hyphenated[concat(word)]
+ if done ~= nil then
+ return done
+ end
+ end
+ local key
+ if isstring then
+ key = word
+ word = lpegmatch(p_split,word)
+ if not n then
+ n = #word
+ end
+ else
+ if not n then
+ n = #word
+ end
+ key = concat(word,"",1,n)
+ end
+ local l = 1
+ local w = { "." }
+ for i=1,n do
+ local c = word[i]
+ l = l + 1
+ w[l] = lcchars[c] or c
+ end
+ l = l + 1
+ w[l] = "."
+ local c = concat(w,"",2,l-1)
+ --
+ local done = hyphenated[c]
+ if done ~= nil then
+ hyphenated[key] = done
+ nofhashed = nofhashed + 1
+ return done
+ end
+ --
+ local exceptions = dictionary.exceptions
+ local exception = exceptions[c]
+ if exception then
+ if trace_steps then
+ show_4(w,exception)
+ end
+ hyphenated[key] = exception
+ nofhashed = nofhashed + 1
+ return exception
+ end
+ --
+ if trace_steps then
+ show_1(w)
+ end
+ --
+ local specials = dictionary.specials
+ local patterns = dictionary.patterns
+ --
+-- inspect(specials)
+ local spec
+ for i=1,l do
+ for j=i,l do
+ local c = concat(w,"",i,j)
+ local m = patterns[c]
+ if m then
+ local s = specials[c]
+ if not done then
+ done = { }
+ spec = nil
+ -- the string that we resolve has explicit fences (.) so
+ -- done starts at the first fence and runs upto the last
+ -- one so we need one slot less
+ for i=1,l do
+ done[i] = 0
+ end
+ end
+ -- we run over the pattern that always has a (zero) value for
+ -- each character plus one more as we look at both sides
+ for k=1,#m do
+ local new = m[k]
+ if not new then
+ break
+ elseif new == true then
+ report("fatal error")
+ break
+ elseif new > 0 then
+ local pos = i + k - 1
+ local old = done[pos]
+ if not old then
+ -- break ?
+ elseif new > old then
+ done[pos] = new
+ if s then
+ local b = i + (s.start or 1) - 1
+ if b > 0 then
+ local e = b + (s.length or 2) - 1
+ if e > 0 then
+ if pos >= b and pos <= e then
+ if spec then
+ spec[pos] = { s, k - 1 }
+ else
+ spec = { [pos] = { s, k - 1 } }
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ if trace_steps and done then
+ show_2(c,m,w,done,i,s)
+ end
+ end
+ end
+ end
+ if trace_steps and done then
+ show_3(w,done)
+ end
+ if done then
+ local okay = false
+ for i=3,#done do
+ if done[i] % 2 == 1 then
+ done[i-2] = spec and spec[i] or true
+ okay = true
+ else
+ done[i-2] = false
+ end
+ end
+ if okay then
+ done[#done] = nil
+ done[#done] = nil
+ else
+ done = false
+ end
+ else
+ done = false
+ end
+ hyphenated[key] = done
+ nofhashed = nofhashed + 1
+ return done
+end
+
+function traditional.gettrace(language,word)
+ if not word or word == "" then
+ return
+ end
+ local dictionary = dictionaries[language]
+ if dictionary then
+ local hyphenated = dictionary.hyphenated
+ hyphenated[word] = nil
+ hyphenate(dictionary,word)
+ return steps
+ end
+end
+
+local methods = setmetatableindex(function(t,k) local v = hyphenate t[k] = v return v end)
+
+function traditional.installmethod(name,f)
+ if rawget(methods,name) then
+ report("overloading %a is not permitted",name)
+ else
+ methods[name] = f
+ end
+end
+
+local s_detail_1 = "-"
+local f_detail_2 = formatters["%s-%s"]
+local f_detail_3 = formatters["{%s}{%s}{}"]
+local f_detail_4 = formatters["{%s%s}{%s%s}{%s}"]
+
+function traditional.injecthyphens(dictionary,word,specification)
+ if not word then
+ return false
+ end
+ if not specification then
+ return word
+ end
+ local hyphens = hyphenate(dictionary,word)
+ if not hyphens then
+ return word
+ end
+
+ -- the following code is similar to code later on but here we have
+ -- strings while there we have hyphen specs
+
+ local word = lpegmatch(p_split,word)
+ local size = #word
+
+ local leftmin = specification.leftcharmin or 2
+ local rightmin = size - (specification.rightcharmin or leftmin)
+ local leftchar = specification.leftchar
+ local rightchar = specification.rightchar
+
+ local result = { }
+ local rsize = 0
+ local position = 1
+
+ while position <= size do
+ if position >= leftmin and position <= rightmin then
+ local hyphen = hyphens[position]
+ if not hyphen then
+ rsize = rsize + 1
+ result[rsize] = word[position]
+ position = position + 1
+ elseif hyphen == true then
+ rsize = rsize + 1
+ result[rsize] = word[position]
+ rsize = rsize + 1
+ if leftchar and rightchar then
+ result[rsize] = f_detail_3(rightchar,leftchar)
+ else
+ result[rsize] = s_detail_1
+ end
+ position = position + 1
+ else
+ local o, h = hyphen[2]
+ if o then
+ h = hyphen[1]
+ else
+ h = hyphen
+ o = 1
+ end
+ local b = position - o + (h.start or 1)
+ local e = b + (h.length or 2) - 1
+ if b > 0 and e >= b then
+ for i=1,b-position do
+ rsize = rsize + 1
+ result[rsize] = word[position]
+ position = position + 1
+ end
+ rsize = rsize + 1
+ if leftchar and rightchar then
+ result[rsize] = f_detail_4(h.before,rightchar,leftchar,h.after,concat(word,"",b,e))
+ else
+ result[rsize] = f_detail_2(h.before,h.after)
+ end
+ position = e + 1
+ else
+ -- error
+ rsize = rsize + 1
+ result[rsize] = word[position]
+ position = position + 1
+ end
+ end
+ else
+ rsize = rsize + 1
+ result[rsize] = word[position]
+ position = position + 1
+ end
+ end
+ return concat(result)
+end
+
+do
+
+ local word = C((1-space)^1)
+ local spaces = space^1
+
+ local u_pattern = (Carg(1) * Carg(2) * word / unregister_pattern + spaces)^1
+ local r_pattern = (Carg(1) * Carg(2) * word * Carg(3) / register_pattern + spaces)^1
+ local e_pattern = (Carg(1) * word / register_exception + spaces)^1
+
+ function traditional.registerpattern(language,str,specification)
+ local dictionary = dictionaries[language]
+ if specification == false then
+ lpegmatch(u_pattern,str,1,dictionary.patterns,dictionary.specials)
+ -- unregister_pattern(dictionary.patterns,dictionary.specials,str)
+ else
+ lpegmatch(r_pattern,str,1,dictionary.patterns,dictionary.specials,type(specification) == "table" and specification or false)
+ -- register_pattern(dictionary.patterns,dictionary.specials,str,specification)
+ end
+ end
+
+ function traditional.registerexception(language,str)
+ lpegmatch(e_pattern,str,1,dictionaries[language].exceptions)
+ end
+
+end
+
+-- todo: unicodes or utfhash ?
+
+if context then
+
+ local nodecodes = nodes.nodecodes
+ local disccodes = nodes.disccodes
+
+ local glyph_code = nodecodes.glyph
+ local disc_code = nodecodes.disc
+ local math_code = nodecodes.math
+ local hlist_code = nodecodes.hlist
+
+ local discretionary_code = disccodes.discretionary
+ local explicit_code = disccodes.explicit
+ local regular_code = disccodes.regular
+ local automatic_code = disccodes.automatic
+
+ local nuts = nodes.nuts
+ local tonut = nodes.tonut
+ local tonode = nodes.tonode
+ local nodepool = nuts.pool
+
+ local new_disc = nodepool.disc
+ local new_glyph = nodepool.glyph
+
+ local setfield = nuts.setfield
+ local getfield = nuts.getfield
+ local getfont = nuts.getfont
+ local getchar = nuts.getchar
+ local getid = nuts.getid
+ local getattr = nuts.getattr
+ local getnext = nuts.getnext
+ local getprev = nuts.getprev
+ local getsubtype = nuts.getsubtype
+ local getlist = nuts.getlist
+ local insert_before = nuts.insert_before
+ local insert_after = nuts.insert_after
+ local copy_node = nuts.copy
+ local remove_node = nuts.remove
+ local end_of_math = nuts.end_of_math
+ local node_tail = nuts.tail
+ local traverse_id = nuts.traverse_id
+
+ local setcolor = nodes.tracers.colors.set
+
+ local variables = interfaces.variables
+ local v_reset = variables.reset
+ local v_yes = variables.yes
+ local v_all = variables.all
+
+ local settings_to_array = utilities.parsers.settings_to_array
+
+ local unsetvalue = attributes.unsetvalue
+ local texsetattribute = tex.setattribute
+
+ local prehyphenchar = lang.prehyphenchar
+ local posthyphenchar = lang.posthyphenchar
+ local preexhyphenchar = lang.preexhyphenchar
+ local postexhyphenchar = lang.postexhyphenchar
+
+ local lccodes = characters.lccodes
+
+ local a_hyphenation = attributes.private("hyphenation")
+
+ function traditional.loadpatterns(language)
+ return dictionaries[language]
+ end
+
+ setmetatableindex(dictionaries,function(t,k) -- for the moment we use an independent data structure
+ if type(k) == "string" then
+ -- this will force a load if not yet loaded (we need a nicer way)
+ -- for the moment that will do (nneeded for examples that register
+ -- a pattern specification
+ languages.getnumber(k)
+ end
+ local specification = languages.getdata(k)
+ local dictionary = {
+ patterns = { },
+ exceptions = { },
+ hyphenated = { },
+ specials = { },
+ instance = false,
+ characters = { },
+ unicodes = { },
+ }
+ if specification then
+ local resources = specification.resources
+ if resources then
+ local characters = dictionary.characters or { }
+ local unicodes = dictionary.unicodes or { }
+ for i=1,#resources do
+ local r = resources[i]
+ if not r.in_dictionary then
+ r.in_dictionary = true
+ local patterns = r.patterns
+ if patterns then
+ local data = patterns.data
+ if data then
+ -- regular patterns
+ lpegmatch(p_pattern,data,1,dictionary.patterns,dictionary.specials)
+ end
+ local extra = patterns.extra
+ if extra then
+ -- special patterns
+ lpegmatch(p_pattern,extra,1,dictionary.patterns,dictionary.specials)
+ end
+ end
+ local exceptions = r.exceptions
+ if exceptions then
+ local data = exceptions.data
+ if data and data ~= "" then
+ lpegmatch(p_exception,data,1,dictionary.exceptions)
+ end
+ end
+ local usedchars = lpegmatch(p_split,patterns.characters)
+ for i=1,#usedchars do
+ local char = usedchars[i]
+ local code = utfbyte(char)
+ local upper = uccodes[code]
+ characters[char] = code
+ unicodes [code] = char
+ if type(upper) == "table" then
+ for i=1,#upper do
+ local u = upper[i]
+ unicodes[u] = utfchar(u)
+ end
+ else
+ unicodes[upper] = utfchar(upper)
+ end
+ end
+ end
+ end
+ dictionary.characters = characters
+ dictionary.unicodes = unicodes
+ setmetatableindex(characters,function(t,k) local v = k and utfbyte(k) t[k] = v return v end)
+ end
+ t[specification.number] = dictionary
+ dictionary.instance = specification.instance -- needed for hyphenchars
+ end
+ t[k] = dictionary
+ return dictionary
+ end)
+
+ -- Beware: left and right min doesn't mean that in a 1 mmm hsize there can be snippets
+ -- with less characters than either of them! This could be an option but such a narrow
+ -- hsize doesn't make sense anyway.
+
+ -- We assume that featuresets are defined global ... local definitions
+ -- (also mid paragraph) make not much sense anyway. For the moment we
+ -- assume no predefined sets so we don't need to store them. Nor do we
+ -- need to hash them in order to save space ... no sane user will define
+ -- many of them.
+
+ local featuresets = hyphenators.featuresets or { }
+ hyphenators.featuresets = featuresets
+
+ storage.shared.noflanguagesfeaturesets = storage.shared.noflanguagesfeaturesets or 0
+
+ local noffeaturesets = storage.shared.noflanguagesfeaturesets
+
+ storage.register("languages/hyphenators/featuresets",featuresets,"languages.hyphenators.featuresets")
+
+ ----- hash = table.sequenced(featureset,",") -- no need now
+
+ local function register(name,featureset)
+ noffeaturesets = noffeaturesets + 1
+ featureset.attribute = noffeaturesets
+ featuresets[noffeaturesets] = featureset -- access by attribute
+ featuresets[name] = featureset -- access by name
+ storage.shared.noflanguagesfeaturesets = noffeaturesets
+ return noffeaturesets
+ end
+
+ local function makeset(...) -- a bit overkill, supporting variants but who cares
+ local set = { }
+ for i=1,select("#",...) do
+ local list = select(i,...)
+ local kind = type(list)
+ local used = nil
+ if kind == "string" then
+ if list == v_all then
+ -- not ok ... now all get ignored
+ return setmetatableindex(function(t,k) local v = utfchar(k) t[k] = v return v end)
+ elseif list ~= "" then
+ used = lpegmatch(p_split,list)
+ set = set or { }
+ for i=1,#used do
+ local char = used[i]
+ set[utfbyte(char)] = char
+ end
+ end
+ elseif kind == "table" then
+ if next(list) then
+ set = set or { }
+ for byte, char in next, list do
+ set[byte] = char == true and utfchar(byte) or char
+ end
+ elseif #list > 0 then
+ set = set or { }
+ for i=1,#list do
+ local l = list[i]
+ if type(l) == "number" then
+ set[l] = utfchar(l)
+ else
+ set[utfbyte(l)] = l
+ end
+ end
+ end
+ end
+ end
+ return set
+ end
+
+ local defaulthyphens = {
+ [0x2D] = true, -- hyphen
+ [0xAD] = true, -- soft hyphen
+ }
+
+ local defaultjoiners = {
+ [0x200C] = true, -- nzwj
+ [0x200D] = true, -- zwj
+ }
+
+ local function somehyphenchar(c)
+ c = tonumber(c)
+ return c ~= 0 and c or nil
+ end
+
+ local function definefeatures(name,featureset)
+ local extrachars = featureset.characters -- "[]()"
+ local hyphenchars = featureset.hyphens
+ local joinerchars = featureset.joiners
+ local alternative = featureset.alternative
+ local rightwordmin = tonumber(featureset.rightwordmin)
+ local charmin = tonumber(featureset.charmin)
+ local leftcharmin = tonumber(featureset.leftcharmin)
+ local rightcharmin = tonumber(featureset.rightcharmin)
+ local rightedge = featureset.rightedge
+ local leftchar = somehyphenchar(featureset.leftchar)
+ local rightchar = somehyphenchar(featureset.rightchar)
+ --
+ joinerchars = joinerchars == v_yes and defaultjoiners or joinerchars
+ hyphenchars = hyphenchars == v_yes and defaulthyphens or hyphenchars
+ -- not yet ok: extrachars have to be ignored so it cannot be all)
+ featureset.extrachars = makeset(joinerchars or "",extrachars or "")
+ featureset.hyphenchars = makeset(hyphenchars or "")
+ featureset.alternative = alternative or "hyphenate"
+ featureset.rightwordmin = rightwordmin and rightwordmin > 0 and rightwordmin or nil
+ featureset.charmin = charmin and charmin > 0 and charmin or nil
+ featureset.leftcharmin = leftcharmin and leftcharmin > 0 and leftcharmin or nil
+ featureset.rightcharmin = rightcharmin and rightcharmin > 0 and rightcharmin or nil
+ featureset.leftchar = leftchar
+ featureset.rightchar = rightchar
+ featureset.strict = rightedge == 'tex'
+ --
+ return register(name,featureset)
+ end
+
+ local function setfeatures(n)
+ if not n or n == v_reset then
+ n = false
+ else
+ local f = featuresets[n]
+ if not f and type(n) == "string" then
+ local t = settings_to_array(n)
+ local s = { }
+ for i=1,#t do
+ local ti = t[i]
+ local fs = featuresets[ti]
+ if fs then
+ for k, v in next, fs do
+ s[k] = v
+ end
+ end
+ end
+ n = register(n,s)
+ else
+ n = f and f.attribute
+ end
+ end
+ texsetattribute(a_hyphenation,n or unsetvalue)
+ end
+
+ traditional.definefeatures = definefeatures
+ traditional.setfeatures = setfeatures
+
+ implement {
+ name = "definehyphenationfeatures",
+ actions = definefeatures,
+ arguments = {
+ "string",
+ {
+ { "characters" },
+ { "hyphens" },
+ { "joiners" },
+ { "rightwordmin", "integer" },
+ { "charmin", "integer" },
+ { "leftcharmin", "integer" },
+ { "rightcharmin", "integer" },
+ { "leftchar", "integer" },
+ { "rightchar", "integer" },
+ { "alternative" },
+ { "rightedge" },
+ }
+ }
+ }
+
+ implement {
+ name = "sethyphenationfeatures",
+ actions = setfeatures,
+ arguments = "string"
+ }
+
+ implement {
+ name = "registerhyphenationpattern",
+ actions = traditional.registerpattern,
+ arguments = { "string", "string", "boolean" }
+ }
+
+ implement {
+ name = "registerhyphenationexception",
+ actions = traditional.registerexception,
+ arguments = { "string", "string" }
+ }
+
+ -- This is a relative large function with local variables and local
+ -- functions. A previous implementation had the functions outside but
+ -- this is cleaner and as efficient. The test runs 100 times over
+ -- tufte.tex, knuth.tex, zapf.tex, ward.tex and darwin.tex in lower
+ -- and uppercase with a 1mm hsize.
+ --
+ -- language=0 language>0 4 | 3 * slower
+ --
+ -- tex 2.34 | 1.30 2.55 | 1.45 0.21 | 0.15
+ -- lua 2.42 | 1.38 3.30 | 1.84 0.88 | 0.46
+ --
+ -- Of course we have extra overhead (virtual Lua machine) but also we
+ -- check attributes and support specific local options). The test puts
+ -- the typeset text in boxes and discards it. If we also flush the
+ -- runtime is 4.31|2.56 and 4.99|2.94 seconds so the relative difference
+ -- is (somehow) smaller. The test has 536 pages. There is a little bit
+ -- of extra overhead because we store the patterns in a different way.
+ --
+ -- As usual I will look for speedups. Some 0.01 seconds could be gained
+ -- by sharing patterns which is not impressive but it does save some
+ -- 3M memory on this test. (Some optimizations already brought the 3.30
+ -- seconds down to 3.14 but it all depends on aggressive caching.)
+
+ -- As we kick in the hyphenator before fonts get handled, we don't look
+ -- at implicit (font) kerns or ligatures.
+
+ local starttiming = statistics.starttiming
+ local stoptiming = statistics.stoptiming
+
+ local strictids = {
+ [nodecodes.hlist] = true,
+ [nodecodes.vlist] = true,
+ [nodecodes.rule] = true,
+ [nodecodes.disc] = true,
+ [nodecodes.accent] = true,
+ [nodecodes.math] = true,
+ }
+
+ function traditional.hyphenate(head)
+
+ local first = tonut(head)
+ local tail = nil
+ local last = nil
+ local current = first
+ local dictionary = nil
+ local instance = nil
+ local characters = nil
+ local unicodes = nil
+ local exhyphenchar = tex.exhyphenchar
+ local extrachars = nil
+ local hyphenchars = nil
+ local language = nil
+ local start = nil
+ local stop = nil
+ local word = { } -- we reuse this table
+ local size = 0
+ local leftchar = false
+ local rightchar = false -- utfbyte("-")
+ local leftexchar = false
+ local rightexchar = false -- utfbyte("-")
+ local leftmin = 0
+ local rightmin = 0
+ local charmin = 1
+ local leftcharmin = nil
+ local rightcharmin = nil
+ ----- leftwordmin = nil
+ local rightwordmin = nil
+ local leftchar = nil
+ local rightchar = nil
+ local attr = nil
+ local lastwordlast = nil
+ local hyphenated = hyphenate
+ local strict = nil
+
+ -- We cannot use an 'enabled' boolean (false when no characters or extras) because we
+ -- can have plugins that set a characters metatable and so) ... it doesn't save much
+ -- anyway. Using (unicodes and unicodes[code]) and a nil table when no characters also
+ -- doesn't save much. So there not that much to gain for languages that don't hyphenate.
+ --
+ -- enabled = (unicodes and (next(unicodes) or getmetatable(unicodes))) or (extrachars and next(extrachars))
+ --
+ -- This can be used to not add characters i.e. keep size 0 but then we need to check for
+ -- attributes that change it, which costs time too. Not much to gain there.
+
+ starttiming(traditional)
+
+ local function synchronizefeatureset(a)
+ local f = a and featuresets[a]
+ if f then
+ hyphenated = methods[f.alternative or "hyphenate"]
+ extrachars = f.extrachars
+ hyphenchars = f.hyphenchars
+ rightwordmin = f.rightwordmin
+ charmin = f.charmin
+ leftcharmin = f.leftcharmin
+ rightcharmin = f.rightcharmin
+ leftchar = f.leftchar
+ rightchar = f.rightchar
+ strict = f.strict and strictids
+ if rightwordmin and rightwordmin > 0 and lastwordlast ~= rightwordmin then
+ -- so we can change mid paragraph but it's kind of unpredictable then
+ if not tail then
+ tail = node_tail(first)
+ end
+ last = tail
+ local inword = false
+ while last and rightwordmin > 0 do
+ local id = getid(last)
+ if id == glyph_code then
+ inword = true
+ if trace_visualize then
+ setcolor(last,"darkred")
+ end
+ elseif inword then
+ inword = false
+ rightwordmin = rightwordmin - 1
+ end
+ last = getprev(last)
+ end
+ lastwordlast = rightwordmin
+ end
+ if not charmin or charmin == 0 then
+ charmin = 1
+ end
+ else
+ hyphenated = methods.hyphenate
+ extrachars = false
+ hyphenchars = false
+ rightwordmin = false
+ charmin = 1
+ leftcharmin = false
+ rightcharmin = false
+ leftchar = false
+ rightchar = false
+ strict = false
+ end
+
+ return a
+ end
+
+ local function flush(hyphens) -- todo: no need for result
+
+ local rightmin = size - rightmin
+ local result = { }
+ local rsize = 0
+ local position = 1
+
+ -- todo: remember last dics and don't go back to before that (plus
+ -- message) .. for simplicity we also assume that we don't start
+ -- with a dics node
+ --
+ -- there can be a conflict: if we backtrack then we can end up in
+ -- another disc and get out of sync (dup chars and so)
+
+ while position <= size do
+ if position >= leftmin and position <= rightmin then
+ local hyphen = hyphens[position]
+ if not hyphen then
+ rsize = rsize + 1
+ result[rsize] = word[position]
+ position = position + 1
+ elseif hyphen == true then
+ rsize = rsize + 1
+ result[rsize] = word[position]
+ rsize = rsize + 1
+ result[rsize] = true
+ position = position + 1
+ else
+ local o, h = hyphen[2]
+ if o then
+ -- { hyphen, offset)
+ h = hyphen[1]
+ else
+ -- hyphen
+ h = hyphen
+ o = 1
+ end
+ local b = position - o + (h.start or 1)
+ local e = b + (h.length or 2) - 1
+ if b > 0 and e >= b then
+ for i=1,b-position do
+ rsize = rsize + 1
+ result[rsize] = word[position]
+ position = position + 1
+ end
+ rsize = rsize + 1
+ result[rsize] = {
+ h.before or "", -- pre
+ h.after or "", -- post
+ concat(word,"",b,e), -- replace
+ h.right, -- optional after pre
+ h.left, -- optional before post
+ }
+ position = e + 1
+ else
+ -- error
+ rsize = rsize + 1
+ result[rsize] = word[position]
+ position = position + 1
+ end
+ end
+ else
+ rsize = rsize + 1
+ result[rsize] = word[position]
+ position = position + 1
+ end
+ end
+
+ local function serialize(replacement,leftchar,rightchar)
+ if not replacement then
+ return
+ elseif replacement == true then
+ local glyph = copy_node(stop)
+ setfield(glyph,"char",leftchar or rightchar)
+ return glyph
+ end
+ local head = nil
+ local current = nil
+ if leftchar then
+ head = copy_node(stop)
+ current = head
+ setfield(head,"char",leftchar)
+ end
+ local rsize = #replacement
+ if rsize == 1 then
+ local glyph = copy_node(stop)
+ setfield(glyph,"char",characters[replacement])
+ if head then
+ insert_after(current,current,glyph)
+ else
+ head = glyph
+ end
+ current = glyph
+ elseif rsize > 0 then
+ local list = lpegmatch(p_split,replacement) -- this is an utf split (could be cached)
+ for i=1,#list do
+ local glyph = copy_node(stop)
+ setfield(glyph,"char",characters[list[i]])
+ if head then
+ insert_after(current,current,glyph)
+ else
+ head = glyph
+ end
+ current = glyph
+ end
+ end
+ if rightchar then
+ local glyph = copy_node(stop)
+ insert_after(current,current,glyph)
+ setfield(glyph,"char",rightchar)
+ end
+ return head
+ end
+
+ local current = start
+
+ local attributes = getfield(start,"attr") -- todo: just copy the last disc .. faster
+
+ for i=1,rsize do
+ local r = result[i]
+ if r == true then
+ local disc = new_disc()
+ if rightchar then
+ setfield(disc,"pre",serialize(true,rightchar))
+ end
+ if leftchar then
+ setfield(disc,"post",serialize(true,leftchar))
+ end
+ if attributes then
+ setfield(disc,"attr",attributes)
+ end
+ -- could be a replace as well
+ insert_before(first,current,disc)
+ elseif type(r) == "table" then
+ local disc = new_disc()
+ local pre = r[1]
+ local post = r[2]
+ local replace = r[3]
+ local right = r[4] ~= false and rightchar
+ local left = r[5] ~= false and leftchar
+ if pre and pre ~= "" then
+ setfield(disc,"pre",serialize(pre,false,right))
+ end
+ if post and post ~= "" then
+ setfield(disc,"post",serialize(post,left,false))
+ end
+ if replace and replace ~= "" then
+ setfield(disc,"replace",serialize(replace))
+ end
+ if attributes then
+ setfield(disc,"attr",attributes)
+ end
+ insert_before(first,current,disc)
+ else
+ setfield(current,"char",characters[r])
+ if i < rsize then
+ current = getnext(current)
+ end
+ end
+ end
+ if current and current ~= stop then
+ local current = getnext(current)
+ local last = getnext(stop)
+ while current ~= last do
+ first, current = remove_node(first,current,true)
+ end
+ end
+
+ end
+
+ local function inject(leftchar,rightchar,code,attributes)
+ if first ~= current then
+ local disc = new_disc()
+ first, current, glyph = remove_node(first,current)
+ first, current = insert_before(first,current,disc)
+ if trace_visualize then
+ setcolor(glyph,"darkred") -- these get checked
+ setcolor(disc,"darkgreen") -- in the colorizer
+ end
+ setfield(disc,"replace",glyph)
+ if not leftchar then
+ leftchar = code
+ end
+ if rightchar then
+ local glyph = copy_node(glyph)
+ setfield(glyph,"char",rightchar)
+ setfield(disc,"pre",glyph)
+ end
+ if leftchar then
+ local glyph = copy_node(glyph)
+ setfield(glyph,"char",leftchar)
+ setfield(disc,"post",glyph)
+ end
+ if attributes then
+ setfield(disc,"attr",attributes)
+ end
+ end
+ return current
+ end
+
+ local a = getattr(first,a_hyphenation)
+ if a ~= attr then
+ attr = synchronizefeatureset(a)
+ end
+
+ -- The first attribute in a word determines the way a word gets hyphenated
+ -- and if relevant, other properties are also set then. We could optimize for
+ -- silly one-char cases but it has no priority as the code is still not that
+ -- much slower than the native hyphenator and this variant also provides room
+ -- for extensions.
+
+ while current and current ~= last do -- and current
+ local id = getid(current)
+ if id == glyph_code then
+ local code = getchar(current)
+ local lang = getfield(current,"lang")
+ if lang ~= language then
+ if dictionary and size > charmin and leftmin + rightmin <= size then
+ if categories[word[1]] == "lu" and getfield(start,"uchyph") < 0 then
+ -- skip
+ else
+ local hyphens = hyphenated(dictionary,word,size)
+ if hyphens then
+ flush(hyphens)
+ end
+ end
+ end
+ language = lang
+ if language > 0 then
+ --
+ dictionary = dictionaries[language]
+ instance = dictionary.instance
+ characters = dictionary.characters
+ unicodes = dictionary.unicodes
+ --
+ local a = getattr(current,a_hyphenation)
+ attr = synchronizefeatureset(a)
+ leftchar = leftchar or (instance and posthyphenchar (instance)) -- we can make this more
+ rightchar = rightchar or (instance and prehyphenchar (instance)) -- efficient if needed
+ leftexchar = (instance and preexhyphenchar (instance))
+ rightexchar = (instance and postexhyphenchar(instance))
+ leftmin = leftcharmin or getfield(current,"left")
+ rightmin = rightcharmin or getfield(current,"right")
+ if not leftchar or leftchar < 0 then
+ leftchar = false
+ end
+ if not rightchar or rightchar < 0 then
+ rightchar = false
+ end
+ --
+ local char = unicodes[code] or (extrachars and extrachars[code])
+ if char then
+ word[1] = char
+ size = 1
+ start = current
+ else
+ size = 0
+ end
+ else
+ size = 0
+ end
+ elseif language <= 0 then
+ --
+ elseif size > 0 then
+ local char = unicodes[code] or (extrachars and extrachars[code])
+ if char then
+ size = size + 1
+ word[size] = char
+ elseif dictionary then
+ if size > charmin and leftmin + rightmin <= size then
+ if categories[word[1]] == "lu" and getfield(start,"uchyph") < 0 then
+ -- skip
+ else
+ local hyphens = hyphenated(dictionary,word,size)
+ if hyphens then
+ flush(hyphens)
+ end
+ end
+ end
+ size = 0
+ -- maybe also a strict mode here: no hyphenation before hyphenchars and skip
+ -- the next set (but then, strict is an option)
+ if code == exhyphenchar then
+ current = inject(leftexchar,rightexchar,code,getfield(current,"attr"))
+ elseif hyphenchars and hyphenchars[code] then
+ current = inject(leftchar,rightchar,code,getfield(current,"attr"))
+ end
+ end
+ else
+ local a = getattr(current,a_hyphenation)
+ if a ~= attr then
+ attr = synchronizefeatureset(a) -- influences extrachars
+ leftchar = leftchar or (instance and posthyphenchar (instance)) -- we can make this more
+ rightchar = rightchar or (instance and prehyphenchar (instance)) -- efficient if needed
+ leftexchar = (instance and preexhyphenchar (instance))
+ rightexchar = (instance and postexhyphenchar(instance))
+ leftmin = leftcharmin or getfield(current,"left")
+ rightmin = rightcharmin or getfield(current,"right")
+ if not leftchar or leftchar < 0 then
+ leftchar = false
+ end
+ if not rightchar or rightchar < 0 then
+ rightchar = false
+ end
+ end
+ --
+ local char = unicodes[code] or (extrachars and extrachars[code])
+ if char then
+ word[1] = char
+ size = 1
+ start = current
+ end
+ end
+ stop = current
+ current = getnext(current)
+ else
+ if id == disc_code then
+ local subtype = getsubtype(current)
+ if subtype == discretionary_code then -- \discretionary
+ size = 0
+ current = getnext(current)
+ elseif subtype == explicit_code then -- \- => only here
+ size = 0
+ current = getnext(current)
+ while current do
+ local id = getid(current)
+ if id == glyph_code or id == disc_code then
+ current = getnext(current)
+ else
+ break
+ end
+ end
+ -- todo: change to discretionary_code
+ else
+ -- automatic (-) : the hyphenator turns an exhyphen into glyph+disc
+ -- first : done by the hyphenator
+ -- second : done by the hyphenator
+ -- regular : done by the hyphenator
+ size = 0
+ current = getnext(current)
+ end
+ elseif strict and strict[id] then
+ current = id == math_code and getnext(end_of_math(current)) or getnext(current)
+ size = 0
+ else
+ current = id == math_code and getnext(end_of_math(current)) or getnext(current)
+ end
+ if size > 0 then
+ if dictionary and size > charmin and leftmin + rightmin <= size then
+ if categories[word[1]] == "lu" and getfield(start,"uchyph") < 0 then
+ -- skip
+ else
+ local hyphens = hyphenated(dictionary,word,size)
+ if hyphens then
+ flush(hyphens)
+ end
+ end
+ end
+ size = 0
+ end
+ end
+ end
+ -- we can have quit due to last so we need to flush the last seen word, we could move this in
+ -- the loop and test for current but ... messy
+ if dictionary and size > charmin and leftmin + rightmin <= size then
+ if categories[word[1]] == "lu" and getfield(start,"uchyph") < 0 then
+ -- skip
+ else
+ local hyphens = hyphenated(dictionary,word,size)
+ if hyphens then
+ flush(hyphens)
+ end
+ end
+ end
+
+ stoptiming(traditional)
+
+ return head, true
+ end
+
+ statistics.register("hyphenation",function()
+ if nofwords > 0 or statistics.elapsed(traditional) > 0 then
+ return string.format("%s words hyphenated, %s unique, used time %s",
+ nofwords,nofhashed,statistics.elapsedseconds(traditional) or 0)
+ end
+ end)
+
+ local texmethod = "builders.kernel.hyphenation"
+ local oldmethod = texmethod
+ local newmethod = texmethod
+
+ -- local newmethod = "languages.hyphenators.traditional.hyphenate"
+ --
+ -- nodes.tasks.prependaction("processors","words",newmethod)
+ -- nodes.tasks.disableaction("processors",oldmethod)
+ --
+ -- nodes.tasks.replaceaction("processors","words",oldmethod,newmethod)
+
+ -- \enabledirectives[hyphenators.method=traditional]
+ -- \enabledirectives[hyphenators.method=builtin]
+
+ -- push / pop ? check first attribute
+
+ -- local replaceaction = nodes.tasks.replaceaction -- no longer overload this way (too many local switches)
+
+ local hyphenate = lang.hyphenate
+ local expanders = languages.expanders
+ local methods = { }
+ local usedmethod = false
+ local stack = { }
+
+ local function original(head)
+ local done = hyphenate(head)
+ return head, done
+ end
+
+ local function expanded(head)
+ local done = hyphenate(head)
+ if done then
+ for d in traverse_id(disc_code,tonut(head)) do
+ local s = getsubtype(d)
+ if s ~= discretionary_code then
+ expanders[s](d,template)
+ done = true
+ end
+ end
+ end
+ return head, done
+ end
+
+ local getcount = tex.getcount
+
+ hyphenators.methods = methods
+ hyphenators.optimize = false
+
+ function hyphenators.handler(head,groupcode)
+ if usedmethod then
+ if groupcode == "hbox" and hyphenators.optimize then
+ if getcount("hyphenstate") > 0 then
+ forced = false
+ return usedmethod(head)
+ else
+ return head, false
+ end
+ else
+ return usedmethod(head)
+ end
+ else
+ return head, false
+ end
+ end
+
+ methods.tex = original
+ methods.original = original
+ methods.expanded = expanded
+ methods.traditional = languages.hyphenators.traditional.hyphenate
+ methods.none = false -- function(head) return head, false end
+
+ usedmethod = original
+
+ local function setmethod(method)
+ usedmethod = type(method) == "string" and methods[method]
+ if usedmethod == nil then
+ usedmethod = methods.tex
+ end
+ end
+ local function pushmethod(method)
+ insert(stack,usedmethod)
+ usedmethod = type(method) == "string" and methods[method]
+ if usedmethod == nil then
+ usedmethod = methods.tex
+ end
+ end
+ local function popmethod()
+ usedmethod = remove(stack) or methods.tex
+ end
+
+ hyphenators.setmethod = setmethod
+ hyphenators.pushmethod = pushmethod
+ hyphenators.popmethod = popmethod
+
+ directives.register("hyphenators.method",setmethod)
+
+ function hyphenators.setup(specification)
+ local method = specification.method
+ if method then
+ setmethod(method)
+ end
+ end
+
+ implement { name = "sethyphenationmethod", actions = setmethod, arguments = "string" }
+ implement { name = "pushhyphenation", actions = pushmethod, arguments = "string" }
+ implement { name = "pophyphenation", actions = popmethod }
+
+ -- can become a runtime loaded one:
+
+ local context = context
+ local ctx_NC = context.NC
+ local ctx_NR = context.NR
+ local ctx_verbatim = context.verbatim
+
+ function hyphenators.showhyphenationtrace(language,word)
+ if not word or word == "" then
+ return
+ end
+ local saved = trace_steps
+ trace_steps = "silent"
+ local steps = traditional.gettrace(language,word)
+ trace_steps = saved
+ if steps then
+ local n = #steps
+ if n > 0 then
+ context.starttabulate { "|r|l|l|l|" }
+ for i=1,n do
+ local s = steps[i]
+ ctx_NC() if i > 1 and i < n then context(i-1) end
+ ctx_NC() ctx_verbatim(s[1])
+ ctx_NC() ctx_verbatim(s[2])
+ ctx_NC() ctx_verbatim(s[3])
+ ctx_NC()
+ ctx_NR()
+ end
+ context.stoptabulate()
+ end
+ end
+ end
+
+ implement {
+ name = "showhyphenationtrace",
+ actions = hyphenators.showhyphenationtrace,
+ arguments = { "string", "string" }
+ }
+
+ function nodes.stripdiscretionaries(head)
+ local h = tonut(head)
+ for l in traverse_id(hlist_code,h) do
+ for d in traverse_id(disc_code,getlist(l)) do
+ remove_node(h,false,true)
+ end
+ end
+ return tonode(h)
+ end
+
+
+else
+
+-- traditional.loadpatterns("nl","lang-nl")
+-- traditional.loadpatterns("de","lang-de")
+-- traditional.loadpatterns("us","lang-us")
+
+-- traditional.registerpattern("nl","e1ë", { start = 1, length = 2, before = "e", after = "e" } )
+-- traditional.registerpattern("nl","oo7ë", { start = 2, length = 3, before = "o", after = "e" } )
+-- traditional.registerpattern("de","qqxc9xkqq",{ start = 3, length = 4, before = "ab", after = "cd" } )
+
+-- local specification = {
+-- leftcharmin = 2,
+-- rightcharmin = 2,
+-- leftchar = "<",
+-- rightchar = ">",
+-- }
+
+-- print("reëel", traditional.injecthyphens(dictionaries.nl,"reëel", specification),"r{e>}{<e}{eë}el")
+-- print("reeëel", traditional.injecthyphens(dictionaries.nl,"reeëel", specification),"re{e>}{<e}{eë}el")
+-- print("rooëel", traditional.injecthyphens(dictionaries.nl,"rooëel", specification),"r{o>}{<e}{ooë}el")
+
+-- print( "qxcxkq", traditional.injecthyphens(dictionaries.de, "qxcxkq", specification),"")
+-- print( "qqxcxkqq", traditional.injecthyphens(dictionaries.de, "qqxcxkqq", specification),"")
+-- print( "qqqxcxkqqq", traditional.injecthyphens(dictionaries.de, "qqqxcxkqqq", specification),"")
+-- print("qqqqxcxkqqqq",traditional.injecthyphens(dictionaries.de,"qqqqxcxkqqqq",specification),"")
+
+-- print("kunstmatig", traditional.injecthyphens(dictionaries.nl,"kunstmatig", specification),"")
+-- print("kunststofmatig", traditional.injecthyphens(dictionaries.nl,"kunststofmatig", specification),"")
+-- print("kunst[stof]matig", traditional.injecthyphens(dictionaries.nl,"kunst[stof]matig", specification),"")
+
+-- traditional.loadpatterns("us","lang-us")
+
+-- local specification = {
+-- leftcharmin = 2,
+-- rightcharmin = 2,
+-- leftchar = false,
+-- rightchar = false,
+-- }
+
+-- trace_steps = true
+
+-- print("components", traditional.injecthyphens(dictionaries.us,"components", specification),"")
+-- print("single", traditional.injecthyphens(dictionaries.us,"single", specification),"sin-gle")
+-- print("everyday", traditional.injecthyphens(dictionaries.us,"everyday", specification),"every-day")
+-- print("associate", traditional.injecthyphens(dictionaries.us,"associate", specification),"as-so-ciate")
+-- print("philanthropic", traditional.injecthyphens(dictionaries.us,"philanthropic", specification),"phil-an-thropic")
+-- print("projects", traditional.injecthyphens(dictionaries.us,"projects", specification),"projects")
+-- print("Associate", traditional.injecthyphens(dictionaries.us,"Associate", specification),"As-so-ciate")
+-- print("Philanthropic", traditional.injecthyphens(dictionaries.us,"Philanthropic", specification),"Phil-an-thropic")
+-- print("Projects", traditional.injecthyphens(dictionaries.us,"Projects", specification),"Projects")
+
+end
+
diff --git a/tex/context/base/lang-hyp.mkiv b/tex/context/base/lang-hyp.mkiv
new file mode 100644
index 000000000..927f5a057
--- /dev/null
+++ b/tex/context/base/lang-hyp.mkiv
@@ -0,0 +1,267 @@
+%D \module
+%D [ file=lang-ini,
+%D version=2014.08.10,
+%D title=\CONTEXT\ Language Macros,
+%D subtitle=Experimental Patterns,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D This is an experimental module. We often have to deal with titles
+%D that have conflicting demands:
+%D
+%D \startitemize
+%D \startitem They go into a dedicated space (often a graphic). \stopitem
+%D \startitem The words cannot be hyphenated. \stopitem
+%D \startitem But as an escape they can get hyphenated. \stopitem
+%D \startitem In that case we want at least an extra word on the last line. \stopitem
+%D \stopitemize
+%D
+%D These and maybe more cases can be dealt with using dedicated hyphenation
+%D mechanisms. At he same time we want to experiment with more extensive patterns
+%D as discussed in {\em TUGboat, Volume 27 (2006), No. 2—Proceedings of EuroTEX2006}.
+
+% lua: 5.341 5.354
+% tex: 5.174 5.262
+
+\writestatus{loading}{ConTeXt Language Macros / Initialization}
+
+\registerctxluafile{lang-dis}{1.001}
+\registerctxluafile{lang-hyp}{1.001}
+
+\unprotect
+
+\definesystemattribute[hyphenation][public]
+
+%D This command can change! At some point we will keep the setting with the
+%D paragraph and then the \type {\par} can go.
+
+% \unexpanded\def\atleastoneword#1%
+% {\begingroup
+% \enabledirectives[hyphenators.method=traditional]%
+% \enabledirectives[hyphenators.rightwordsmin=1]%
+% \lefthyphenmin \plusfour
+% \righthyphenmin\plusfour
+% #1\par
+% \disabledirectives[hyphenators.rightwordsmin]%
+% \enabledirectives[hyphenators.method]%
+% \endgroup}
+
+
+% \exhyphenchar \hyphenasciicode
+% \preexhyphenchar \lessthanasciicode
+% \postexhyphenchar\morethanasciicode
+
+%D Here is the real way:
+
+\installcorenamespace{hyphenation}
+\installcorenamespace{hyphenationfeatures}
+
+\installparameterhandler \??hyphenation {hyphenation}
+\installsetuphandler \??hyphenation {hyphenation}
+
+\setuphyphenation
+ [\c!method=\s!default,
+ \s!righthyphenchar=0, % number tzt g: etc
+ \s!lefthyphenchar=0] % number
+
+\appendtoks
+ \clf_sethyphenationmethod{\hyphenationparameter\c!method}%
+\to \everysetuphyphenation
+
+%D These are mostly meant for manuals:
+
+\unexpanded\def\starthyphenation[#1]%
+ {\begingroup
+ \clf_pushhyphenation{#1}}
+
+\unexpanded\def\stophyphenation
+ {\ifhmode\par\fi
+ \clf_pophyphenation
+ \endgroup}
+
+% This is a global setting, so we need to disable it when needed. However, as
+% we are (hopefully) compatible and attribute driven one can also just keep it
+% enabled.
+%
+% \setuphyphenation
+% [\c!method=\s!traditional] % no translations
+
+\unexpanded\def\definehyphenationfeatures
+ {\dodoubleargument\lang_hyphenation_define_features}
+
+\unexpanded\def\lang_hyphenation_define_features[#1][#2]%
+ {\begingroup
+ \letdummyparameter\c!characters\empty % maybe \s!
+ \letdummyparameter\c!hyphens\empty % maybe \s!
+ \letdummyparameter\c!joiners\empty % maybe \s!
+ \letdummyparameter\c!rightwords\zerocount % maybe \s!
+ \letdummyparameter\s!lefthyphenmin\zerocount
+ \letdummyparameter\s!righthyphenmin\zerocount
+ \letdummyparameter\s!hyphenmin\zerocount
+ \letdummyparameter\s!lefthyphenchar\zerocount
+ \letdummyparameter\s!righthyphenchar\zerocount
+ \letdummyparameter\c!alternative\empty
+ \letdummyparameter\c!rightedge\empty
+ \getdummyparameters[#2]%
+ \clf_definehyphenationfeatures
+ {#1}%
+ {
+ characters {\dummyparameter\c!characters}%
+ hyphens {\dummyparameter\c!hyphens}%
+ joiners {\dummyparameter\c!joiners}%
+ rightwordmin \numexpr\dummyparameter\c!rightwords\relax
+ charmin \numexpr\dummyparameter\s!hyphenmin\relax
+ leftcharmin \numexpr\dummyparameter\s!lefthyphenmin\relax
+ rightcharmin \numexpr\dummyparameter\s!righthyphenmin\relax
+ leftchar \numexpr\dummyparameter\s!lefthyphenchar\relax
+ rightchar \numexpr\dummyparameter\s!righthyphenchar\relax
+ alternative {\dummyparameter\c!alternative}%
+ rightedge {\dummyparameter\c!rightedge}%
+ }%
+ \relax
+ \endgroup}
+
+\unexpanded\def\sethyphenationfeatures[#1]%
+ {\clf_sethyphenationfeatures{#1}}
+
+% todo: \start ... \stop too
+
+\unexpanded\def\registerhyphenationpattern
+ {\dodoubleempty\lang_hyphenation_register_pattern}
+
+\def\lang_hyphenation_register_pattern[#1][#2]%
+ {\clf_registerhyphenationpattern\ifsecondargument{#1}{#2}\else{\currentlanguage}{#1}\fi\s!true\relax}
+
+\unexpanded\def\unregisterhyphenationpattern
+ {\dodoubleempty\lang_hyphenation_unregister_pattern}
+
+\def\lang_hyphenation_unregister_pattern[#1][#2]%
+ {\clf_registerhyphenationpattern\ifsecondargument{#1}{#2}\else{\currentlanguage}{#1}\fi\s!false\relax}
+
+\unexpanded\def\registerhyphenationexception
+ {\dodoubleempty\lang_hyphenation_register_exception}
+
+\def\lang_hyphenation_register_exception[#1][#2]%
+ {\clf_registerhyphenationexception\ifsecondargument{#1}{#2}\else{\currentlanguage}{#1}\fi\relax}
+
+\unexpanded\def\showhyphenationtrace
+ {\dodoubleempty\lang_hyphenation_show_trace}
+
+\def\lang_hyphenation_show_trace[#1][#2]%
+ {\begingroup
+ \tt
+ \clf_showhyphenationtrace\ifsecondargument{#1}{#2}\else{\currentlanguage}{#1}\fi\relax
+ \endgroup}
+
+% For old times sake:
+
+\unexpanded\def\atleastoneword#1%
+ {\begingroup
+ \starthyphenation[\c!method=traditional]% this might become default or a faster switch
+ \sethyphenationfeatures[words]%
+ #1\par
+ \stopthyphenation
+ \endgroup}
+
+%D For me:
+
+\unexpanded\def\showdiscretionaries
+ {\clf_showdiscretionaries}
+
+%D These are (at least now) not cummulative:
+
+\definehyphenationfeatures % just an example
+ [fences]
+ [\c!characters={[]()}]
+
+\definehyphenationfeatures
+ [words]
+ [\c!rightwords=1,
+ \s!lefthyphenmin=4,
+ \s!righthyphenmin=4]
+
+\definehyphenationfeatures
+ [default]
+ [%c!rightedge=\v!tex,
+ \c!hyphens=\v!yes,
+ \c!joiners=\v!yes]
+
+\definehyphenationfeatures
+ [strict]
+ [\c!rightedge=\s!tex]
+
+% \sethyphenationfeatures
+% [fences]
+
+% \sethyphenationfeatures
+% [default,fences]
+
+% \setuphyphenation % will be default
+% [method=expanded]
+
+\protect \endinput
+
+% \starttext
+%
+% \enabledirectives[hyphenators.method=traditional]
+%
+% % \dorecurse{1000}{\input tufte \par}
+%
+% \setupalign[verytolerant,flushleft]
+% \setuplayout[width=140pt] \showframe
+%
+% longword longword long word longword longwordword \blank
+%
+% \enabledirectives[hyphenators.rightwordsmin=1]
+%
+% longword longword long word longword longwordword\blank
+%
+% \disabledirectives[hyphenators.rightwordsmin]
+%
+% longword longword long word longword longwordword\blank
+%
+% \atleastoneword{longword longword long word longword longwordword}
+%
+% \enabledirectives[hyphenators.method=traditional]
+%
+% \stoptext
+
+% \startluacode
+% -- e1ë/e=e reëel re-eel
+% -- a1atje./a=t,1,3 omaatje oma-tje
+% -- schif1f/ff=f,5,2 Schiffahrt Schiff-fahrt
+%
+% languages.hyphenators.traditional.registerpattern("en","a1b", { start = 1, length = 2, before = "CD", after = "EF" } )
+% languages.hyphenators.traditional.registerpattern("en","e1ë", { start = 1, length = 2, before = "e", after = "e" } )
+% languages.hyphenators.traditional.registerpattern("en","oo1ë", { start = 2, length = 2, before = "o", after = "e" } )
+% languages.hyphenators.traditional.registerpattern("en","qqxc9xkqq",{ start = 3, length = 4, before = "ab", after = "cd" } ) -- replacement start length
+%
+% -- print("reëel", injecthyphens(dictionaries.nl,"reëel", 2,2))
+% -- print("reeëel", injecthyphens(dictionaries.nl,"reeëel", 2,2))
+% -- print("rooëel", injecthyphens(dictionaries.nl,"rooëel", 2,2))
+% -- print( "QXcXkQ", injecthyphens(dictionaries.de, "QXcXkQ", 2,2))
+% -- print( "QQXcXkQQ", injecthyphens(dictionaries.de, "QQXcXkQQ", 2,2))
+% -- print( "QQQXcXkQQQ", injecthyphens(dictionaries.de, "QQQXcXkQQQ", 2,2))
+% -- print("QQQQXcXkQQQQ",injecthyphens(dictionaries.de,"QQQQXcXkQQQQ",2,2))
+% --
+% -- print( "QQXcXkQQ QQXcXkQQ", injecthyphens(dictionaries.de, "QQXcXkQQ QQXcXkQQ", 2,2))
+% \stopluacode
+%
+% \starttext
+%
+% \blank
+%
+% xreëel rooëel \par xxabxx xxxabxxx \par
+%
+% \hsize1mm \lefthyphenmin2 \righthyphenmin2
+%
+% \blank Capacity \blank capacity \blank xyabxy \blank xreëel \blank rooëel \blank
+%
+% xy\discretionary{CD}{EF}{ab}xy % xxacceedxxx
+%
+% \stoptext
diff --git a/tex/context/base/lang-ini.lua b/tex/context/base/lang-ini.lua
index a9f428caa..d75a665e2 100644
--- a/tex/context/base/lang-ini.lua
+++ b/tex/context/base/lang-ini.lua
@@ -21,16 +21,22 @@ local utfbyte = utf.byte
local format, gsub = string.format, string.gsub
local concat, sortedkeys, sortedpairs = table.concat, table.sortedkeys, table.sortedpairs
+local context = context
+local commands = commands
+local implement = interfaces.implement
+
local settings_to_array = utilities.parsers.settings_to_array
local trace_patterns = false trackers.register("languages.patterns", function(v) trace_patterns = v end)
local report_initialization = logs.reporter("languages","initialization")
-local prehyphenchar = lang.prehyphenchar -- global per language
-local posthyphenchar = lang.posthyphenchar -- global per language
-local lefthyphenmin = lang.lefthyphenmin
-local righthyphenmin = lang.righthyphenmin
+local prehyphenchar = lang.prehyphenchar -- global per language
+local posthyphenchar = lang.posthyphenchar -- global per language
+local preexhyphenchar = lang.preexhyphenchar -- global per language
+local postexhyphenchar = lang.postexhyphenchar -- global per language
+local lefthyphenmin = lang.lefthyphenmin
+local righthyphenmin = lang.righthyphenmin
local lang = lang
lang.exceptions = lang.hyphenation
@@ -53,9 +59,9 @@ local numbers = languages.numbers
languages.data = languages.data or { }
local data = languages.data
-storage.register("languages/numbers", numbers, "languages.numbers")
storage.register("languages/registered",registered,"languages.registered")
storage.register("languages/associated",associated,"languages.associated")
+storage.register("languages/numbers", numbers, "languages.numbers")
storage.register("languages/data", data, "languages.data")
local nofloaded = 0
@@ -73,6 +79,9 @@ local function resolve(tag)
end
local function tolang(what) -- returns lang object
+ if not what then
+ what = tex.language
+ end
local tag = numbers[what]
local data = tag and registered[tag] or registered[what]
if data then
@@ -85,6 +94,14 @@ local function tolang(what) -- returns lang object
end
end
+function languages.getdata(tag) -- or number
+ if tag then
+ return registered[tag] or registered[numbers[tag]]
+ else
+ return registered[numbers[tex.language]]
+ end
+end
+
-- languages.tolang = tolang
-- patterns=en
@@ -115,7 +132,10 @@ local function loaddefinitions(tag,specification)
if trace_patterns then
report_initialization("pattern specification for language %a: %s",tag,specification.patterns)
end
- local dataused, ok = data.used, false
+ local dataused = data.used
+ local ok = false
+ local resources = data.resources or { }
+ data.resources = resources
for i=1,#definitions do
local definition = definitions[i]
if definition == "" then
@@ -137,13 +157,15 @@ local function loaddefinitions(tag,specification)
report_initialization("loading definition %a for language %a from %a",definition,tag,fullname)
end
local suffix, gzipped = gzip.suffix(fullname)
- local defs = table.load(fullname,gzipped and gzip.load)
- if defs then -- todo: version test
+ local loaded = table.load(fullname,gzipped and gzip.load)
+ if loaded then -- todo: version test
ok, nofloaded = true, nofloaded + 1
- -- instance:patterns (defs.patterns and defs.patterns .data or "")
- -- instance:hyphenation(defs.exceptions and defs.exceptions.data or "")
- instance:patterns (validdata(defs.patterns, "patterns", tag) or "")
- instance:hyphenation(validdata(defs.exceptions,"exceptions",tag) or "")
+ -- instance:patterns (loaded.patterns and resources.patterns .data or "")
+ -- instance:hyphenation(loaded.exceptions and resources.exceptions.data or "")
+ instance:patterns (validdata(loaded.patterns, "patterns", tag) or "")
+ instance:hyphenation(validdata(loaded.exceptions,"exceptions",tag) or "")
+ resources[#resources+1] = loaded -- so we can use them otherwise
+
else
report_initialization("invalid definition %a for language %a in %a",definition,tag,filename)
end
@@ -286,10 +308,12 @@ end
-- not that usefull, global values
-function languages.prehyphenchar (what) return prehyphenchar (tolang(what)) end
-function languages.posthyphenchar(what) return posthyphenchar(tolang(what)) end
-function languages.lefthyphenmin (what) return lefthyphenmin (tolang(what)) end
-function languages.righthyphenmin(what) return righthyphenmin(tolang(what)) end
+function languages.prehyphenchar (what) return prehyphenchar (tolang(what)) end
+function languages.posthyphenchar (what) return posthyphenchar (tolang(what)) end
+function languages.preexhyphenchar (what) return preexhyphenchar (tolang(what)) end
+function languages.postexhyphenchar(what) return postexhyphenchar(tolang(what)) end
+function languages.lefthyphenmin (what) return lefthyphenmin (tolang(what)) end
+function languages.righthyphenmin (what) return righthyphenmin (tolang(what)) end
-- e['implementer']= 'imple{m}{-}{-}menter'
-- e['manual'] = 'man{}{}{}'
@@ -351,7 +375,7 @@ languages.associate('fr','latn','fra')
statistics.register("loaded patterns", function()
local result = languages.logger.report()
if result ~= "none" then
--- return result
+ -- return result
return format("%s, load time: %s",result,statistics.elapsedtime(languages))
end
end)
@@ -363,17 +387,58 @@ end)
-- interface
-local getnumber = languages.getnumber
+implement {
+ name = "languagenumber",
+ actions = { languages.getnumber, context },
+ arguments = { "string", "string", "string" }
+}
-function commands.languagenumber(tag,default,patterns)
- context(getnumber(tag,default,patterns))
-end
+implement {
+ name = "installedlanguages",
+ actions = { languages.installed, context },
+}
-function commands.installedlanguages(separator)
- context(languages.installed(separator))
-end
+implement {
+ name = "definelanguage",
+ actions = languages.define,
+ arguments = { "string", "string" }
+}
-commands.definelanguage = languages.define
-commands.setlanguagesynonym = languages.setsynonym
-commands.unloadlanguage = languages.unload
-commands.setlanguageexceptions = languages.setexceptions
+implement {
+ name = "setlanguagesynonym",
+ actions = languages.setsynonym,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "unloadlanguage",
+ actions = languages.unload,
+ arguments = { "string" }
+}
+
+implement {
+ name = "setlanguageexceptions",
+ actions = languages.setexceptions,
+ arguments = { "string", "string" }
+}
+
+
+implement {
+ name = "currentprehyphenchar",
+ actions = function()
+ local c = prehyphenchar(tolang())
+ if c and c > 0 then
+ context.char(c)
+ end
+ end
+}
+
+implement {
+ name = "currentposthyphenchar",
+ actions = function()
+ local c = posthyphenchar(tolang())
+ if c and c > 0 then
+ context.char(c)
+ end
+ end
+}
diff --git a/tex/context/base/lang-ini.mkiv b/tex/context/base/lang-ini.mkiv
index 17d00033b..335d6d1c9 100644
--- a/tex/context/base/lang-ini.mkiv
+++ b/tex/context/base/lang-ini.mkiv
@@ -111,6 +111,44 @@
\csname\??language\s!default#2\endcsname
\fi\fi\fi}
+\def\mainlanguageparameter#1%
+ {\ifcsname\??language\currentmainlanguage#1\endcsname
+ \csname\??language\currentmainlanguage#1\endcsname
+ \else\ifcsname\??language\currentmainlanguage\s!default\endcsname
+ \expandafter\specificlanguageparameter\csname\??language\currentmainlanguage\s!default\endcsname{#1}%
+ \else\ifcsname\??language\s!default#1\endcsname
+ \csname\??language\s!default#1\endcsname
+ \fi\fi\fi}
+
+\def\currentusedlanguage{\currentlanguage}
+
+\let\usedlanguageparameter\languageparameter
+
+\def\askedlanguageparameter#1% assumes \currentusedlanguage being set
+ {\ifcsname\??language\currentusedlanguage#1\endcsname
+ \csname\??language\currentusedlanguage#1\endcsname
+ \else\ifcsname\??language\currentusedlanguage\s!default\endcsname
+ \expandafter\specificlanguageparameter\csname\??language\currentusedlanguage\s!default\endcsname{#1}%
+ \else\ifcsname\??language\s!default#1\endcsname
+ \csname\??language\s!default#1\endcsname
+ \fi\fi\fi}
+
+\unexpanded\def\setlanguageparameter#1%
+ {\edef\currentusedlanguage{\reallanguagetag{#1\c!language}}%
+ %\let\setlanguageparameter\gobbleoneargument
+ \ifx\currentusedlanguage\empty
+ \let\currentusedlanguage \currentlanguage
+ \let\usedlanguageparameter\languageparameter
+ \else\ifx\currentusedlanguage\v!global
+ \let\currentusedlanguage \currentmainlanguage
+ \let\usedlanguageparameter\mainlanguageparameter
+ \else\ifx\currentusedlanguage\v!local
+ \let\currentusedlanguage \currentlanguage
+ \let\usedlanguageparameter\languageparameter
+ \else
+ \let\usedlanguageparameter\askedlanguageparameter
+ \fi\fi\fi}
+
\unexpanded\def\setupcurrentlanguage[#1]%
{\setcurrentlanguage\currentmainlanguage{#1}}
@@ -162,7 +200,6 @@
%D implementations support run time addition of patterns to a
%D preloaded format).
-
%D \macros
%D {preloadlanguages}
%D
@@ -174,15 +211,17 @@
\newtoks \everysetuplanguage
-\def\installedlanguages{\ctxcommand{installedlanguages()}}
+\def\installedlanguages{\clf_installedlanguages}
-\unexpanded\def\doiflanguageelse#1%
+\unexpanded\def\doifelselanguage#1%
{\ifcsname\??language#1\c!state\endcsname
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
+\let\doiflanguageelse\doifelselanguage
+
\def\reallanguagetag#1%
{\ifcsname\??languagelinked#1\endcsname\csname\??languagelinked#1\endcsname\else#1\fi}
@@ -192,17 +231,17 @@
{\dodoubleargument\lang_basics_install}
\def\lang_basics_install[#1][#2]%
- {\doifassignmentelse{#2}
- {\doiflanguageelse{#1}
+ {\doifelseassignment{#2}
+ {\doifelselanguage{#1}
{\getparameters[\??language#1][#2]}
{\setvalue{\??languagelinked#1}{#1}%
\lang_basics_install_indeed{#1}{#1}%
\getparameters[\??language#1][\c!state=\v!start,#2]}%
\edef\currentsetuplanguage{#1}%
- \ctxcommand{definelanguage("#1","\specificlanguageparameter{#1}\s!default")}%
+ \clf_definelanguage{#1}{\specificlanguageparameter{#1}\s!default}%
\the\everysetuplanguage}
{\setvalue{\??languagelinked#1}{#2}%
- \ctxcommand{setlanguagesynonym("#1","#2")}%
+ \clf_setlanguagesynonym{#1}{#2}%
\lang_basics_install_indeed{#1}{#2}}}
\def\lang_basics_install_indeed#1#2%
@@ -213,7 +252,7 @@
%D used dutch mnemonics in the dutch version, but nowadays
%D conform a standard.
-\unexpanded\def\doifpatternselse#1%
+\unexpanded\def\doifelsepatterns#1%
{\begingroup % will change
\lang_basics_set_current[#1]%
\ifnum\normallanguage>\zerocount
@@ -222,6 +261,8 @@
\endgroup\expandafter\secondoftwoarguments
\fi}
+\let\doifpatternselse\doifelsepatterns
+
%D \macros
%D {setuplanguage}
%D
@@ -254,7 +295,7 @@
\lang_basics_synchronize}
\appendtoks
- \ctxcommand{unloadlanguage("\currentsetuplanguage")}%
+ \clf_unloadlanguage{\currentsetuplanguage}%
\to \everysetuplanguage
\setuplanguage
@@ -354,13 +395,58 @@
\newtoks \everylanguage
+% \def\lang_basics_synchronize% assumes that \currentlanguage is set % % % use different name as complex
+% {\normallanguage\ctxcommand{languagenumber(%
+% "\currentlanguage",%
+% "\defaultlanguage\currentlanguage",%
+% "\languageparameter\s!patterns"%
+% )}\relax
+% \the\everylanguage\relax}
+
+% (some 20%) faster but code jungle (the publication code can have excessive
+% switching
+
+\installcorenamespace{languagenumbers}
+
+\appendtoks
+ % we need to reassign the number because new patterns can be defined later on
+ % so let's hope not that many \setups happen during a run
+ \global\expandafter\let\csname\??languagenumbers\currentlanguage\endcsname\undefined
+\to \everysetuplanguage
+
+\def\lang_basics_synchronize_yes
+ {\zerocount % see below
+ \global\expandafter\chardef\csname\??languagenumbers\currentlanguage\endcsname
+ \clf_languagenumber
+ {\currentlanguage}%
+ {\defaultlanguage\currentlanguage}%
+ {\languageparameter\s!patterns}%
+ \relax
+ \normallanguage\csname\??languagenumbers\currentlanguage\endcsname}
+
+\let\lang_basics_synchronize_nop\zerocount % not loaded anyway
+
+\letvalue{\??languagenumbers}\lang_basics_synchronize_nop % initime
+
+\appendtoks
+ \letvalue{\??languagenumbers}\lang_basics_synchronize_yes % runtime
+\to \everydump
+
\def\lang_basics_synchronize% assumes that \currentlanguage is set % % % use different name as complex
- {\normallanguage\ctxcommand{languagenumber(%
- "\currentlanguage",%
- "\defaultlanguage\currentlanguage",%
- "\languageparameter\s!patterns"%
- )}\relax
- \the\everylanguage\relax}
+ {\normallanguage\csname\??languagenumbers
+ \ifcsname\??languagenumbers\currentlanguage\endcsname
+ \currentlanguage
+ \fi
+ \endcsname
+ \relax
+ \the\everylanguage
+ \relax}
+
+% experimental
+
+\newcount\hyphenstate
+
+% so far
\newcount\hyphenminoffset
@@ -479,7 +565,11 @@
\lang_basics_switch_asked}
\unexpanded\def\language
- {\doifnextoptionalelse\lang_basics_set_current\normallanguage}
+ {\doifelsenextoptionalcs\lang_basics_set_current\normallanguage}
+
+\let\setlanguage\language % we make these synonyms
+
+\let\patterns\gobbleoneargument
\newcount\mainlanguagenumber
@@ -505,7 +595,7 @@
\normallanguage\mainlanguagenumber
\to \everybeforepagebody
-%D New (see nomarking and nolist):
+%D Used at all?
\def\splitsequence#1#2%
{\doifelse{#1}\v!no{#2}{\doifelse{#1}\v!yes{\languageparameter\c!limittext}{#1}}}
@@ -647,7 +737,7 @@
\ifx\askedlanguage\empty
\let\askedlanguage\currentlanguage
\fi
- \ctxcommand{setlanguageexceptions("\askedlanguage",\!!bs#2\!!es)}%
+ \clf_setlanguageexceptions{\askedlanguage}{#2}%
\endgroup}
%D For the moment here:
@@ -655,4 +745,18 @@
\uchyph 1 % also treat uppercase
\exhyphenchar 45 % to permit breaking at explicit hyphens
+%D New:
+
+\unexpanded\def\traceddiscretionary#1#2#3%
+ {\dontleavehmode
+ \discretionary{\darkred#1}{\darkgreen#2}{\darkblue#3}}
+
+\unexpanded\def\samplediscretionary
+ {\traceddiscretionary
+ {pre\clf_currentprehyphenchar}%
+ {\clf_currentposthyphenchar post}%
+ {replace}}
+
+% todo: make this configurable
+
\protect \endinput
diff --git a/tex/context/base/lang-lab.lua b/tex/context/base/lang-lab.lua
index 91c258418..e90bee017 100644
--- a/tex/context/base/lang-lab.lua
+++ b/tex/context/base/lang-lab.lua
@@ -19,6 +19,9 @@ local report_labels = logs.reporter("languages","labels")
languages.labels = languages.labels or { }
local labels = languages.labels
+local context = context
+local implement = interfaces.implement
+
local variables = interfaces.variables
local settings_to_array = utilities.parsers.settings_to_array
@@ -30,19 +33,27 @@ end
labels.split = split
-local contextsprint = context.sprint
+local contextsprint = context.sprint
+
+local f_setlabeltextpair = formatters["\\setlabeltextpair{%s}{%s}{%s}{%s}{%s}"]
+local f_key_key = formatters["\\v!%s:\\v!%s"]
+local f_key_raw = formatters["\\v!%s:%s"]
+local f_raw_key = formatters["%s:\\v!%s"]
+local f_raw_raw = formatters["%s:%s"]
+local f_key = formatters["\\v!%s"]
+local f_raw = formatters["%s"]
local function definelanguagelabels(data,class,tag,rawtag)
for language, text in next, data.labels do
if text == "" then
-- skip
elseif type(text) == "table" then
- contextsprint(prtcatcodes,"\\setlabeltextpair{",class,"}{",language,"}{",tag,"}{",text[1],"}{",text[2],"}")
+ contextsprint(prtcatcodes,f_setlabeltextpair(class,language,tag,text[1],text[2]))
if trace_labels then
report_labels("language %a, defining label %a as %a and %a",language,rawtag,text[1],text[2])
end
else
- contextsprint(prtcatcodes,"\\setlabeltextpair{",class,"}{",language,"}{",tag,"}{",text,"}{}")
+ contextsprint(prtcatcodes,f_setlabeltextpair(class,language,tag,text,""))
if trace_labels then
report_labels("language %a, defining label %a as %a",language,rawtag,text)
end
@@ -55,6 +66,7 @@ function labels.define(class,name,prefixed)
if list then
report_labels("defining label set %a",name)
for tag, data in next, list do
+ tag = variables[tag] or tag
if data.hidden then
-- skip
elseif prefixed then
@@ -62,17 +74,17 @@ function labels.define(class,name,prefixed)
if second then
if rawget(variables,first) then
if rawget(variables,second) then
- definelanguagelabels(data,class,formatters["\\v!%s:\\v!%s"](first,second),tag)
+ definelanguagelabels(data,class,f_key_key(first,second),tag)
else
- definelanguagelabels(data,class,formatters["\\v!%s:%s"](first,second),tag)
+ definelanguagelabels(data,class,f_key_raw(first,second),tag)
end
elseif rawget(variables,second) then
- definelanguagelabels(data,class,formatters["%s:\\v!%s"](first,second),tag)
+ definelanguagelabels(data,class,f_raw_key(first,second),tag)
else
- definelanguagelabels(data,class,formatters["%s:%s"](first,second),tag)
+ definelanguagelabels(data,class,f_raw_raw(first,second),tag)
end
elseif rawget(variables,rawtag) then
- definelanguagelabels(data,class,formatters["\\v!%s"](tag),tag)
+ definelanguagelabels(data,class,f_key(tag),tag)
else
definelanguagelabels(data,class,tag,tag)
end
@@ -102,7 +114,11 @@ end
-- interface
-commands.definelabels = labels.define
+interfaces.implement {
+ name = "definelabels",
+ actions = labels.define,
+ arguments = { "string", "string", "boolean" }
+}
-- function commands.setstrippedtextprefix(str)
-- context(string.strip(str))
@@ -115,7 +131,7 @@ commands.definelabels = labels.define
-- text : "a,b,c"
-- separators : "{, },{ and }"
-function commands.concatcommalist(settings) -- it's too easy to forget that this one is there
+local function concatcommalist(settings) -- it's too easy to forget that this one is there
local list = settings.list or settings_to_array(settings.text or "")
local size = #list
local command = settings.command and context[settings.command] or context
@@ -140,3 +156,16 @@ function commands.concatcommalist(settings) -- it's too easy to forget that this
command(list[size])
end
end
+
+implement {
+ name = "concatcommalist",
+ actions = concatcommalist,
+ arguments = {
+ {
+ { "text" },
+ { "separators" },
+ { "first" },
+ { "second" },
+ }
+ }
+}
diff --git a/tex/context/base/lang-lab.mkiv b/tex/context/base/lang-lab.mkiv
index 1ddb44cbb..fbe3346dc 100644
--- a/tex/context/base/lang-lab.mkiv
+++ b/tex/context/base/lang-lab.mkiv
@@ -84,6 +84,7 @@
\unexpanded\def\lang_labels_define_class_indeed#1#2#3#4#5#6#7#8#9%
{\setuvalue{setup#1text}{\protecttextprefixes#2\def\currenttextprefixclass{#1}\dodoubleempty\lang_labels_text_prefix_setup}%
\setuvalue{preset#1text}{\protecttextprefixes1\def\currenttextprefixclass{#1}\dodoubleempty\lang_labels_text_prefix_setup}%
+ \setuvalue{copy#1text}{\protecttextprefixes1\def\currenttextprefixclass{#1}\dodoubleempty\lang_labels_text_prefix_copy}%
\setuvalue{start#1text}{\protecttextprefixes1\def\currenttextprefixclass{#1}\dotripleempty\lang_labels_text_prefix_start[#1]}%
\letvalue{stop#1text}\relax
\def#4{\reallanguagetag{\defaultlanguage\currentmainlanguage}}%
@@ -94,8 +95,10 @@
\csname\??label\currentlabelcategory#1:##1:##2\endcsname
\else\ifcsname\??label#1:##1:##2\endcsname
\csname\??label#1:##1:##2\endcsname
- \else\ifcsname\??language#4\s!default\endcsname
- \expandafter#5\csname\??language#4\s!default\endcsname{##2}%
+% \else\ifcsname\??language#4\s!default\endcsname
+% \expandafter#5\csname\??language#4\s!default\endcsname{##2}%
+ \else\ifcsname\??language##1\s!default\endcsname
+ \expandafter#5\csname\??language##1\s!default\endcsname{##2}%
\else\ifcsname\??label\currentlabelcategory#1:##2\endcsname
\csname\??label\currentlabelcategory#1:##2\endcsname
\else\ifcsname\??label#1:##2\endcsname
@@ -178,7 +181,8 @@
\grabuntil{stop#1text}\lang_labels_text_prefix_start_indeed}
\def\lang_labels_text_prefix_start_indeed#1% text (not special checking done here yet, only for long texts anyway)
- {\expandafter\edef\csname\??label\currenttextprefixclass:\currenttextprefixtag:\currenttextprefixname\endcsname{{\ctxlua{context(string.strip(\!!bs#1\!!es))}}\empty}}
+ {\expandafter\edef\csname\??label\currenttextprefixclass:\currenttextprefixtag:\currenttextprefixname\endcsname%
+ {{\clf_strip{#1}}\empty}}
\def\lang_labels_text_prefix_setup[#1][#2]%
{\ifsecondargument
@@ -234,17 +238,38 @@
{\expandafter\def\csname\??label\currenttextprefixclass:\currenttextprefixtag:#1\endcsname{#2}}
\unexpanded\def\setlabeltextpair#1#2#3#4#5% a fast one for usage at the Lua end
- {\expandafter\def\csname\??label#1:\reallanguagetag{#2}:#3\endcsname{{#4}{#5}}} % class tag key left right
+ {%\writestatus{!!!!}{#1:\reallanguagetag{#2}:#3}%
+ \expandafter\def\csname\??label#1:\reallanguagetag{#2}:#3\endcsname{{#4}{#5}}} % class tag key left right
+
+\def\lang_labels_text_prefix_copy[#1][#2]%
+ {\ifsecondargument
+ \edef\currenttextprefixtag{\reallanguagetag{#1}}%
+ \processcommalist[#2]\lang_labels_text_prefix_copy_indeed
+ \else
+ \edef\currenttextprefixtag{\reallanguagetag\currentmainlanguage}%
+ \processcommalist[#1]\lang_labels_text_prefix_copy_indeed
+ \fi}
+
+\def\lang_labels_text_prefix_copy_indeed#1%
+ {\lang_labels_text_prefix_copy_pair[#1]}
+
+\def\lang_labels_text_prefix_copy_pair[#1=#2]%
+ {\lang_labels_text_prefix_copy_pair_indeed{#1}[#2,,]}
+
+\def\lang_labels_text_prefix_copy_pair_indeed#1[#2,#3]%
+ {\expandafter\let
+ \csname\??label\currenttextprefixclass:\currenttextprefixtag:#1\expandafter\endcsname
+ \csname\??label\currenttextprefixclass:\currenttextprefixtag:#2\endcsname}
\definelabelclass [head] [0] % titles
\definelabelclass [label] [0] % texts
\definelabelclass [mathlabel] [0] % functions
\definelabelclass [taglabel] [2] % tags
-\ctxcommand{definelabels("head", "titles", true )}
-\ctxcommand{definelabels("label", "texts", true )}
-\ctxcommand{definelabels("mathlabel", "functions",false)}
-\ctxcommand{definelabels("taglabel", "tags", false)}
+\clf_definelabels{head}{titles}\s!true\relax
+\clf_definelabels{label}{texts}\s!true\relax
+\clf_definelabels{mathlabel}{functions}\s!false\relax
+\clf_definelabels{taglabel}{tags}\s!false\relax
%D \macros
%D {translate}
@@ -296,7 +321,7 @@
%D \assigntranslation[en=something,nl=iets]\to\command
%D \stoptyping
-\def\assigntranslation[#1]\to#2%
+\unexpanded\def\assigntranslation[#1]\to#2% bad, this \to
{\getparameters[\??translation][#1]%
\edef#2{\csname\??translation\currentlanguage\endcsname}}
@@ -324,12 +349,12 @@
{\dodoubleempty\typo_helpers_concat_comma_list}
\def\typo_helpers_concat_comma_list[#1][#2]%
- {\ctxcommand{concatcommalist{
- text = \!!bs#1\!!es,
- separators = \!!bs#2\!!es,
- first = \!!bs\labeltext{and-1}\!!es,
- second = \!!bs\labeltext{and-2}\!!es
- }}}
+ {\clf_concatcommalist
+ text {#1}%
+ separators {#2}%
+ first {\detokenize\expandafter{\normalexpanded{\labeltext{and-1}}}}%
+ second {\detokenize\expandafter{\normalexpanded{\labeltext{and-2}}}}%
+ \relax}
\setuplabeltext [\s!nl] [and-1={{, }}, and-2={{ en }}] % 1, 2 en 3
\setuplabeltext [\s!en] [and-1={{, }}, and-2={{, }}] % 1, 2, 3
diff --git a/tex/context/base/lang-mis.mkiv b/tex/context/base/lang-mis.mkiv
index 0c4bc3ac4..2b886b3ce 100644
--- a/tex/context/base/lang-mis.mkiv
+++ b/tex/context/base/lang-mis.mkiv
@@ -20,35 +20,28 @@
\unprotect
-%D One of \TEX's strong points in building paragraphs is the way
-%D hyphenations are handled. Although for real good hyphenation
-%D of non||english languages some extensions to the program are
-%D needed, fairly good results can be reached with the standard
-%D mechanisms and an additional macro, at least in Dutch.
-
-%D \CONTEXT\ originates in the wish to typeset educational
-%D materials, especially in a technical environment. In
-%D production oriented environments, a lot of compound words
-%D are used. Because the Dutch language poses no limits on
-%D combining words, we often favor putting dashes between those
-%D words, because it facilitates reading, at least for those
-%D who are not that accustomed to it.
-%D
-%D In \TEX\ compound words, separated by a hyphen, are not
-%D hyphenated at all. In spite of the multiple pass paragraph
-%D typesetting this can lead to parts of words sticking into
-%D the margin. The solution lays in saying \type
-%D {spoelwater||terugwinunit} instead of \type
-%D {spoelwater-terugwinunit}. By using a one character command
-%D like \type {|}, delimited by the same character \type {|},
-%D we get ourselves both a decent visualization (in \TEXEDIT\
-%D and colored verbatim we color these commands yellow) and an
-%D efficient way of combining words.
-%D
-%D The sequence \type{||} simply leads to two words connected by
-%D a hyphen. Because we want to distinguish such a hyphen from
-%D the one inserted when \TEX\ hyphenates a word, we use a bit
-%D longer one.
+%D One of \TEX's strong points in building paragraphs is the way hyphenations are
+%D handled. Although for real good hyphenation of non||english languages some
+%D extensions to the program are needed, fairly good results can be reached with the
+%D standard mechanisms and an additional macro, at least in Dutch.
+%D
+%D \CONTEXT\ originates in the wish to typeset educational materials, especially in
+%D a technical environment. In production oriented environments, a lot of compound
+%D words are used. Because the Dutch language poses no limits on combining words, we
+%D often favor putting dashes between those words, because it facilitates reading,
+%D at least for those who are not that accustomed to it.
+%D
+%D In \TEX\ compound words, separated by a hyphen, are not hyphenated at all. In
+%D spite of the multiple pass paragraph typesetting this can lead to parts of words
+%D sticking into the margin. The solution lays in saying \type
+%D {spoelwater||terugwinunit} instead of \type {spoelwater-terugwinunit}. By using a
+%D one character command like \type {|}, delimited by the same character \type {|},
+%D we get ourselves both a decent visualization (in \TEXEDIT\ and colored verbatim
+%D we color these commands yellow) and an efficient way of combining words.
+%D
+%D The sequence \type{||} simply leads to two words connected by a hyphen. Because
+%D we want to distinguish such a hyphen from the one inserted when \TEX\ hyphenates
+%D a word, we use a bit longer one.
%D
%D \hyphenation {spoel-wa-ter te-rug-win-unit}
%D
@@ -56,9 +49,8 @@
%D \test {spoelwater||terugwinunit}
%D \stoptest
%D
-%D As we already said, the \type{|} is a command. This commands
-%D accepts an optional argument before it's delimiter, which is
-%D also a \type{|}.
+%D As we already said, the \type{|} is a command. This commands accepts an optional
+%D argument before it's delimiter, which is also a \type{|}.
%D
%D \hyphenation {po-ly-meer che-mie}
%D
@@ -66,8 +58,8 @@
%D \test {polymeer|*|chemie}
%D \stoptest
%D
-%D Arguments like \type{*} are not interpreted and inserted
-%D directly, in contrary to arguments like:
+%D Arguments like \type{*} are not interpreted and inserted directly, in contrary to
+%D arguments like:
%D
%D \starttest
%D \test {polymeer|~|chemie}
@@ -75,22 +67,21 @@
%D \test {polymeer|(|chemie|)| }
%D \stoptest
%D
-%D Although such situations seldom occur |<|we typeset thousands
-%D of pages before we encountered one that forced us to enhance
-%D this mechanism|>| we also have to take care of comma's.
+%D Although such situations seldom occur |<|we typeset thousands of pages before we
+%D encountered one that forced us to enhance this mechanism|>| we also have to take
+%D care of comma's.
%D
-%D \hyphenation {uit-stel-len}
+%D \hyphenation {uit-stel-len}
%D
-%D \starttest
-%D \test {op||, in|| en uitstellen}
-%D \stoptest
+%D \starttest
+%D \test {op||, in|| en uitstellen}
+%D \stoptest
%D
-%D The next special case (concerning quotes) was brought to my
-%D attention by Piet Tutelaers, one of the driving forces
-%D behind rebuilding hyphenation patterns for the dutch
-%D language.\footnote{In 1996 the spelling of the dutch
-%D language has been slightly reformed which made this topic
-%D actual again.} We'll also take care of this case.
+%D The next special case (concerning quotes) was brought to my attention by Piet
+%D Tutelaers, one of the driving forces behind rebuilding hyphenation patterns for
+%D the dutch language.\footnote{In 1996 the spelling of the dutch language has been
+%D slightly reformed which made this topic actual again.} We'll also take care of
+%D this case.
%D
%D \starttest
%D \test {AOW|'|er}
@@ -99,8 +90,7 @@
%D \test {rock|-|'n|-|roller}
%D \stoptest
%D
-%D Tobias Burnus pointed out that I should also support
-%D something like
+%D Tobias Burnus pointed out that I should also support something like
%D
%D \starttest
%D \test {well|_|known}
@@ -117,11 +107,10 @@
%D \macros
%D {installdiscretionaries}
%D
-%D The mechanism described here is one of the older inner parts
-%D of \CONTEXT. The most recent extensions concerns some
-%D special cases as well as the possibility to install other
-%D characters as delimiters. The prefered way of specifying
-%D compound words is using \type{||}, which is installed by:
+%D The mechanism described here is one of the older inner parts of \CONTEXT. The
+%D most recent extensions concerns some special cases as well as the possibility to
+%D install other characters as delimiters. The prefered way of specifying compound
+%D words is using \type{||}, which is installed by:
%D
%D \starttyping
%D \installdiscretionary | -
@@ -153,20 +142,18 @@
%D {compoundhyphen,
%D beginofsubsentence,endofsubsentence}
%D
-%D Now let's go to the macros. First we define some variables.
-%D In the main \CONTEXT\ modules these can be tuned by a setup
-%D command. Watch the (maybe) better looking compound hyphen.
+%D Now let's go to the macros. First we define some variables. In the main \CONTEXT\
+%D modules these can be tuned by a setup command. Watch the (maybe) better looking
+%D compound hyphen.
\ifx\compoundhyphen \undefined \unexpanded\def\compoundhyphen {\hbox{-\kern-.25ex-}} \fi
\ifx\beginofsubsentence\undefined \unexpanded\def\beginofsubsentence{\hbox{\emdash}} \fi
\ifx\endofsubsentence \undefined \unexpanded\def\endofsubsentence {\hbox{\emdash}} \fi
-%D The last two variables are needed for subsentences
-%D |<|like this one|>| which we did not yet mention.
-%D
-%D We want to enable breaking but at the same time don't want
-%D compound characters like |-| or || to be separated from the
-%D words. \TEX\ hackers will recognise the next two macro's:
+%D The last two variables are needed for subsentences |<|like this one|>| which we
+%D did not yet mention. We want to enable breaking but at the same time don't want
+%D compound characters like |-| or || to be separated from the words. \TEX\ hackers
+%D will recognise the next two macro's:
\ifx\prewordbreak \undefined \unexpanded\def\prewordbreak {\penalty\plustenthousand\hskip\zeropoint\relax} \fi
\ifx\postwordbreak\undefined \unexpanded\def\postwordbreak {\penalty\zerocount \hskip\zeropoint\relax} \fi
@@ -177,19 +164,17 @@
%D \macros
%D {beginofsubsentencespacing,endofsubsentencespacing}
%D
-%D In the previous macros we provided two hooks which can be
-%D used to support nested sub||sentences. In \CONTEXT\ these
-%D hooks are used to insert a small space when needed.
+%D In the previous macros we provided two hooks which can be used to support nested
+%D sub||sentences. In \CONTEXT\ these hooks are used to insert a small space when
+%D needed.
\ifx\beginofsubsentencespacing\undefined \let\beginofsubsentencespacing\relax \fi
\ifx\endofsubsentencespacing \undefined \let\endofsubsentencespacing \relax \fi
-%D The following piece of code is a torture test compound
-%D hndling. The \type {\relax} before the \type {\ifmmode} is
-%D needed because of the alignment scanner (in \ETEX\ this
-%D problem is not present because there a protected macro is
-%D not expanded. Thanks to Tobias Burnus for providing this
-%D example.
+%D The following piece of code is a torture test compound handling. The \type
+%D {\relax} before the \type {\ifmmode} is needed because of the alignment scanner
+%D (in \ETEX\ this problem is not present because there a protected macro is not
+%D expanded. Thanks to Tobias Burnus for providing this example.
%D
%D \startformula
%D \left|f(x_n)-{1\over2}\right| =
@@ -234,10 +219,9 @@
\def\lang_discretionaries_process_none#1%
{\detokenize{#1}}
-%D The macro \type{\lang_discretionaries_check_before} takes care
-%D of loners like \type{||word}, while it counterpart \type
-%D {\lang_discretionaries_check_after} is responsible for handling
-%D the comma.
+%D The macro \type{\lang_discretionaries_check_before} takes care of loners like
+%D \type{||word}, while it counterpart \type {\lang_discretionaries_check_after} is
+%D responsible for handling the comma.
\newsignal\compoundbreakpoint
@@ -327,8 +311,8 @@
%D \macros
%D {directdiscretionary}
%D
-%D In those situations where the nature of characters is
-%D less predictable, we can use the more direct approach:
+%D In those situations where the nature of characters is less predictable, we can
+%D use the more direct approach:
\unexpanded\def\directdiscretionary
{\csname
@@ -450,10 +434,9 @@
%D \macros
%D {fakecompoundhyphen}
%D
-%D In headers and footers as well as in active pieces of text
-%D we need a dirty hack. Try to imagine what is needed to
-%D savely break the next text across a line and at the same
-%D time make the words interactive.
+%D In headers and footers as well as in active pieces of text we need a dirty hack.
+%D Try to imagine what is needed to savely break the next text across a line and at
+%D the same time make the words interactive.
%D
%D \starttyping
%D \goto{Some||Long||Word}
@@ -471,10 +454,9 @@
%D {midworddiscretionary}
%D
%D If needed, one can add a discretionary hyphen using \type
-%D {\midworddiscretionary}. This macro does the same as
-%D \PLAIN\ \TEX's \type {\-}, but, like the ones implemented
-%D earlier, this one also looks ahead for spaces and grouping
-%D tokens.
+%D {\midworddiscretionary}. This macro does the same as \PLAIN\ \TEX's \type {\-},
+%D but, like the ones implemented earlier, this one also looks ahead for spaces and
+%D grouping tokens.
\unexpanded\def\midworddiscretionary
{\futurelet\nexttoken\lang_discretionaries_mid_word}
@@ -489,15 +471,13 @@
%D \macros
%D {installcompoundcharacter}
%D
-%D When Tobias Burnus started translating the dutch manual of
-%D \PPCHTEX\ into german, he suggested to let \CONTEXT\ support
-%D the \type{german.sty} method of handling compound
-%D characters, especially the umlaut. This package is meant for
-%D use with \PLAIN\ \TEX\ as well as \LATEX.
+%D When Tobias Burnus started translating the dutch manual of \PPCHTEX\ into german,
+%D he suggested to let \CONTEXT\ support the \type{german.sty} method of handling
+%D compound characters, especially the umlaut. This package is meant for use with
+%D \PLAIN\ \TEX\ as well as \LATEX.
%D
-%D I decided to implement compound character support as
-%D versatile as possible. As a result one can define his own
-%D compound character support, like:
+%D I decided to implement compound character support as versatile as possible. As a
+%D result one can define his own compound character support, like:
%D
%D \starttyping
%D \installcompoundcharacter "a {\"a}
@@ -515,25 +495,24 @@
%D \installcompoundcharacter "ff {\discretionary{ff-}{f}{ff}}
%D \stoptyping
%D
-%D The support is not limited to alphabetic characters, so the
-%D next definition is also valid.
+%D The support is not limited to alphabetic characters, so the next definition is
+%D also valid.
%D
%D \starttyping
%D \installcompoundcharacter ". {.\doifnextcharelse{\spacetoken}{}{\kern.125em}}
%D \stoptyping
%D
-%D The implementation looks familiar and uses the same tricks as
-%D mentioned earlier in this module. We take care of two
-%D arguments, which complicates things a bit.
+%D The implementation looks familiar and uses the same tricks as mentioned earlier
+%D in this module. We take care of two arguments, which complicates things a bit.
\installcorenamespace{compoundnormal}
\installcorenamespace{compoundsingle}
\installcorenamespace{compoundmultiple}
\installcorenamespace{compounddefinition}
-%D When we started working on MK IV code, we needed a different
-%D approach for defining the active character itself. In MK II as
-%D well as in MK IV we now use the catcode vectors.
+%D When I started working on \MKIV\ code, we needed a different approach for
+%D defining the active character itself. In \MKII\ as well as in \MKIV\ we now use
+%D the catcode vectors.
\setnewconstant\compoundcharactermode\plusone
@@ -551,22 +530,19 @@
\expandafter\letcatcodecommand\expandafter\ctxcatcodes\expandafter\c_lang_compounds_character\csname\??compounddefinition\detokenize{#1}\endcsname
\fi}
-%D We can also ignore definitions (needed in for instance \XML). Beware,
-%D this macro is supposed to be used grouped!
+%D We can also ignore definitions (needed in for instance \XML). Beware, this macro
+%D is supposed to be used grouped!
\def\ignorecompoundcharacter
{\compoundcharactermode\zerocount}
-%D In handling the compound characters we have to take care of
-%D \type{\bgroup} and \type{\egroup} tokens, so we end up with
-%D a multi||step interpretation macro. We look ahead for a
-%D \type{\bgroup}, \type{\egroup} or \type{\blankspace}. Being
-%D no user of this mechanism, the credits for testing them goes
-%D to Tobias Burnus, the first german user of \CONTEXT.
+%D In handling the compound characters we have to take care of \type {\bgroup} and
+%D \type {\egroup} tokens, so we end up with a multi||step interpretation macro. We
+%D look ahead for a \type {\bgroup}, \type {\egroup} or \type {\blankspace}. Being
+%D no user of this mechanism, the credits for testing them goes to Tobias Burnus,
+%D the first german user of \CONTEXT.
%D
-%D We define these macros as \type{\long} because we can
-%D expect \type{\par} tokens. We need to look into the future
-%D with \type{\futurelet} to prevent spaces from
+%D We need to look into the future with \type{\futurelet} to prevent spaces from
%D disappearing.
\def\lang_compounds_handle_character#1%
@@ -604,12 +580,11 @@
\tripleexpandafter\lang_compounds_handle_character_two
\fi\fi\fi}
-%D Besides taken care of the grouping and space tokens, we have
-%D to deal with three situations. First we look if the next
-%D character equals the first one, if so, then we just insert
-%D the original. Next we look if indeed a compound character is
-%D defined. We either execute the compound character or just
-%D insert the first. So we have
+%D Besides taken care of the grouping and space tokens, we have to deal with three
+%D situations. First we look if the next character equals the first one, if so, then
+%D we just insert the original. Next we look if indeed a compound character is
+%D defined. We either execute the compound character or just insert the first. So we
+%D have
%D
%D \starttyping
%D <key><key> <key><known> <key><unknown>
@@ -639,8 +614,8 @@
\fi\fi\fi
\next}
-%D For very obscure applications (see for an application \type
-%D {lang-sla.tex}) we provide:
+%D For very obscure applications (see for an application \type {lang-sla.tex}) we
+%D provide:
\def\simplifiedcompoundcharacter#1#2%
{\ifcsname\??compoundsingle\string#1\string#2\endcsname
@@ -652,8 +627,8 @@
%D \macros
%D {disablediscretionaries,disablecompoundcharacter}
%D
-%D Occasionally we need to disable this mechanism. For the
-%D moment we assume that \type {|} is used.
+%D Occasionally we need to disable this mechanism. For the moment we assume that
+%D \type {|} is used.
\let\disablediscretionaries \ignorediscretionaries
\let\disablecompoundcharacters\ignorecompoundcharacter
@@ -668,9 +643,8 @@
%D \macros
%D {compound}
%D
-%D We will overload the already active \type {|} so we have
-%D to save its meaning in order to be able to use this handy
-%D macro.
+%D We will overload the already active \type {|} so we have to save its meaning in
+%D order to be able to use this handy macro.
%D
%D \starttyping
%D so test\compound{}test can be used instead of test||test
@@ -688,8 +662,7 @@
\egroup
-%D Here we hook some code into the clean up mechanism needed
-%D for verbatim data.
+%D Here we hook some code into the clean up mechanism needed for verbatim data.
\appendtoks
\disablecompoundcharacters
diff --git a/tex/context/base/lang-rep.lua b/tex/context/base/lang-rep.lua
index 31ae36e6d..95a5e545a 100644
--- a/tex/context/base/lang-rep.lua
+++ b/tex/context/base/lang-rep.lua
@@ -7,9 +7,21 @@ if not modules then modules = { } end modules ['lang-rep'] = {
}
-- A BachoTeX 2013 experiment, probably not that useful. Eventually I used a simpler
--- more generic example.
+-- more generic example. I'm sure no one ever notices of even needs this code.
+--
+-- As a follow up on a question by Alan about special treatment of dropped caps I wonder
+-- if I can make this one more clever (probably in a few more dev steps). For instance
+-- injecting nodes or replacing nodes. It's a prelude to a kind of lpeg for nodes,
+-- although (given experiences so far) we don't really need that. After all, each problem
+-- is somewhat unique.
+local type, tonumber = type, tonumber
local utfbyte, utfsplit = utf.byte, utf.split
+local P, C, U, Cc, Ct, lpegmatch = lpeg.P, lpeg.C, lpeg.patterns.utf8character, lpeg.Cc, lpeg.Ct, lpeg.match
+local find = string.find
+
+local grouped = P("{") * ( Ct((U/utfbyte-P("}"))^1) + Cc(false) ) * P("}")-- grouped
+local splitter = Ct((Ct(Cc("discretionary") * grouped * grouped * grouped) + U/utfbyte)^1)
local trace_replacements = false trackers.register("languages.replacements", function(v) trace_replacements = v end)
local trace_detail = false trackers.register("languages.replacements.detail", function(v) trace_detail = v end)
@@ -18,15 +30,34 @@ local report_replacement = logs.reporter("languages","replacements")
local glyph_code = nodes.nodecodes.glyph
-local insert_node_before = nodes.insert_before
-local remove_node = nodes.remove
-local copy_node = nodes.copy
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getattr = nuts.getattr
+local getid = nuts.getid
+local getchar = nuts.getchar
+
+local insert_node_before = nuts.insert_before
+local remove_node = nuts.remove
+local copy_node = nuts.copy
+local flush_list = nuts.flush_list
+local insert_after = nuts.insert_after
+
+local nodepool = nuts.pool
+local new_glyph = nodepool.glyph
+local new_disc = nodepool.disc
local texsetattribute = tex.setattribute
local unsetvalue = attributes.unsetvalue
local v_reset = interfaces.variables.reset
+local implement = interfaces.implement
+
local replacements = languages.replacements or { }
languages.replacements = replacements
@@ -46,23 +77,32 @@ table.setmetatableindex(lists,function(lists,name)
return data
end)
+lists[v_reset].attribute = unsetvalue -- so we discard 0
+
+-- todo: glue kern
+
local function add(root,word,replacement)
local list = utfsplit(word,true)
- for i=1,#list do
+ local size = #list
+ for i=1,size do
local l = utfbyte(list[i])
if not root[l] then
root[l] = { }
end
- if i == #list then
- local newlist = utfsplit(replacement,true)
- for i=1,#newlist do
- newlist[i] = utfbyte(newlist[i])
- end
+ if i == size then
+ -- local newlist = utfsplit(replacement,true)
+ -- for i=1,#newlist do
+ -- newlist[i] = utfbyte(newlist[i])
+ -- end
+ local special = find(replacement,"{",1,true)
+ local newlist = lpegmatch(splitter,replacement)
+ --
root[l].final = {
word = word,
replacement = replacement,
- oldlength = #list,
+ oldlength = size,
newcodes = newlist,
+ special = special,
}
end
root = root[l]
@@ -83,13 +123,13 @@ end
local function hit(a,head)
local tree = trees[a]
if tree then
- local root = tree[head.char]
+ local root = tree[getchar(head)]
if root then
- local current = head.next
+ local current = getnext(head)
local lastrun = false
local lastfinal = false
- while current and current.id == glyph_code do
- local newroot = root[current.char]
+ while current and getid(current) == glyph_code do
+ local newroot = root[getchar(current)]
if not newroot then
return lastrun, lastfinal
else
@@ -104,7 +144,7 @@ local function hit(a,head)
root = newroot
end
end
- current = current.next
+ current = getnext(current)
end
if lastrun then
return lastrun, lastfinal
@@ -113,11 +153,27 @@ local function hit(a,head)
end
end
+local function tonodes(list,template)
+ local head, current
+ for i=1,#list do
+ local new = copy_node(template)
+ setfield(new,"char",list[i])
+ if head then
+ head, current = insert_after(head,current,new)
+ else
+ head, current = new, new
+ end
+ end
+ return head
+end
+
+
function replacements.handler(head)
+ head = tonut(head)
local current = head
local done = false
while current do
- if current.id == glyph_code then
+ if getid(current) == glyph_code then
local a = getattr(current,a_replacements)
if a then
local last, final = hit(a,current)
@@ -125,46 +181,90 @@ function replacements.handler(head)
local oldlength = final.oldlength
local newcodes = final.newcodes
local newlength = #newcodes
- if report_replacement then
+ if trace_replacement then
report_replacement("replacing word %a by %a",final.word,final.replacement)
end
- if oldlength == newlength then -- #old == #new
+ if final.special then
+ -- easier is to delete and insert (a simple callout to tex would be more efficient)
+ -- maybe just walk over a replacement string instead
+ local prev = getprev(current)
+ local next = getnext(last)
+ local list = current
+ setfield(last,"next",nil)
+ setfield(prev,"next",next)
+ if next then
+ setfield(next,"prev",prev)
+ end
+ current = prev
+ if not current then
+ head = nil
+ end
+ for i=1,newlength do
+ local codes = newcodes[i]
+ local new = nil
+ if type(codes) == "table" then
+ local method = codes[1]
+ if method == "discretionary" then
+ local pre, post, replace = codes[2], codes[3], codes[4]
+ new = new_disc()
+ if pre then
+ setfield(new,"pre",tonodes(pre,last))
+ end
+ if post then
+ setfield(new,"post",tonodes(post,last))
+ end
+ if replace then
+ setfield(new,"replace",tonodes(replace,last))
+ end
+ else
+ -- todo
+ end
+ else
+ new = copy_node(last)
+ setfield(new,"char",codes)
+ end
+ if new then
+ head, current = insert_after(head,current,new)
+ end
+ end
+ flush_list(list)
+ elseif oldlength == newlength then -- #old == #new
for i=1,newlength do
- current.char = newcodes[i]
- current = current.next
+ setfield(current,"char",newcodes[i])
+ current = getnext(current)
end
elseif oldlength < newlength then -- #old < #new
for i=1,newlength-oldlength do
local n = copy_node(current)
- n.char = newcodes[i]
+ setfield(n,"char",newcodes[i])
head, current = insert_node_before(head,current,n)
- current = current.next
+ current = getnext(current)
end
for i=newlength-oldlength+1,newlength do
- current.char = newcodes[i]
- current = current.next
+ setfield(current,"char",newcodes[i])
+ current = getnext(current)
end
else -- #old > #new
for i=1,oldlength-newlength do
head, current = remove_node(head,current,true)
end
for i=1,newlength do
- current.char = newcodes[i]
- current = current.next
+ setfield(current,"char",newcodes[i])
+ current = getnext(current)
end
end
done = true
end
end
end
- current = current.next
+ current = getnext(current)
end
- return head, done
+ return tonode(head), done
end
local enabled = false
-function replacements.set(n) -- number or 'reset'
+function replacements.set(n)
if n == v_reset then
n = unsetvalue
else
@@ -182,8 +282,14 @@ end
-- interface
-commands.setreplacements = replacements.set
-commands.addreplacements = replacements.add
+implement {
+ name = "setreplacements",
+ actions = replacements.set,
+ arguments = "string"
+}
-nodes.tasks.prependaction("processors","words","languages.replacements.handler")
-nodes.tasks.disableaction("processors","languages.replacements.handler")
+implement {
+ name = "addreplacements",
+ actions = replacements.add,
+ arguments = { "string", "string", "string" }
+}
diff --git a/tex/context/base/lang-rep.mkiv b/tex/context/base/lang-rep.mkiv
new file mode 100644
index 000000000..a98d51f6c
--- /dev/null
+++ b/tex/context/base/lang-rep.mkiv
@@ -0,0 +1,75 @@
+%D \module
+%D [ file=lang-rep,
+%D version=2013.04.28,
+%D title=\CONTEXT\ Language Macros,
+%D subtitle=Substitution,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D As I needed an example of messing with nodes for the bacho\TEX\ tutorial
+%D I cooked up this. In the end I decided to stick to a simpler example and
+%D just finished this off in case someone really needs it.
+
+\writestatus{loading}{ConTeXt Language Macros / Replacements}
+
+\unprotect
+
+\registerctxluafile{lang-rep}{1.001}
+
+\definesystemattribute[replacements][public]
+
+%D \startluacode
+%D
+%D -- todo: other nodes (prelude to more experiments with auto dropped caps)
+%D
+%D languages.replacements.add("basics", {
+%D ["aap"] = "monkey",
+%D ["noot"] = "nut",
+%D ["never"] = "forever",
+%D ["newer"] = "cooler",
+%D ["new"] = "cool",
+%D -- ["special"] = "veryspe{>>>}{<<<}{=}cial",
+%D })
+%D
+%D \stopluacode
+%D
+%D \replaceword[more][this][that]
+%D \replaceword[more][crap][support]
+%D \replaceword[more][---][—]
+%D \replaceword[basics][special][veryspe{>>>}{<<<}{=}cial]
+%D
+%D \starttyping
+%D \start \setreplacements[basics] What the heck, it's now or never, isn't it new? \par \stop
+%D \start \setreplacements[more] Do we --- {\it really} --- need this kind of crap? \par \stop
+%D \start \setreplacements[basics] All kinds of special thingies! \par \stop
+%D \start \setreplacements[basics] \hsize1mm special \par \stop
+%D \stoptyping
+
+\unexpanded\def\setreplacements[#1]%
+ {\clf_setreplacements{#1}}
+
+\unexpanded\def\resetreplacements
+ {\attribute\replacementsattribute\attributeunsetvalue}
+
+\unexpanded\def\replaceword
+ {\dotripleargument\languages_replacements_replace}
+
+\unexpanded\def\languages_replacements_replace[#1][#2][#3]%
+ {\ifthirdargument
+ \clf_addreplacements{#1}{#2}{#3}%
+ \fi}
+
+\appendtoks
+ \resetreplacements
+\to \everyresettypesetting
+
+\appendtoks
+ \resetreplacements
+\to \everyinitializeverbatim
+
+\protect \endinput
diff --git a/tex/context/base/lang-txt.lua b/tex/context/base/lang-txt.lua
index 4c3a3a985..2938550ee 100644
--- a/tex/context/base/lang-txt.lua
+++ b/tex/context/base/lang-txt.lua
@@ -181,6 +181,11 @@ data.labels={
sk="cotg",
},
},
+ diff={
+ labels={
+ en="d",
+ },
+ },
deg={
labels={
cz="deg",
@@ -415,6 +420,27 @@ data.labels={
},
},
texts={
+ ["year"]={
+ labels={
+ en="year",
+ nl="jaar",
+ kr="년",
+ },
+ },
+ ["month"]={
+ labels={
+ en="month",
+ nl="maand",
+ kr="월",
+ },
+ },
+ ["day"]={
+ labels={
+ en="day",
+ nl="dag",
+ kr="일",
+ },
+ },
["and"]={
labels={
af="",
@@ -506,7 +532,7 @@ data.labels={
hu="április",
it="aprile",
ja="4",
- kr="4월",
+ kr="4",
la="Aprilis",
lt="balandžio",
nb="april",
@@ -613,7 +639,7 @@ data.labels={
hu="augusztus",
it="agosto",
ja="8",
- kr="8월",
+ kr="8",
la="Augustus",
lt="rugpjūčio",
nb="august",
@@ -681,7 +707,7 @@ data.labels={
fr="Chapitre ",
gr="Κεφάλαιο",
hr="Poglavlje ",
- hu=",. fejezet:",
+ hu={""," fejezet"},
it="",
ja={"第","章"},
kr={"제","장"},
@@ -756,7 +782,7 @@ data.labels={
hu="december",
it="dicembre",
ja="12",
- kr="12월",
+ kr="12",
la="December",
lt="gruodžio",
nb="desember",
@@ -828,7 +854,7 @@ data.labels={
hu="február",
it="febbraio",
ja="2",
- kr="2월",
+ kr="2",
la="Februarius",
lt="vasario",
nb="februar",
@@ -895,12 +921,12 @@ data.labels={
fr="Figure ",
gr="Σχήμα",
hr="Slika ",
- hu=",. ábra:",
+ hu={""," ábra"},
it="Fig. ",
ja="図",
- kr="그림",
+ kr="그림 ",
la="Imago ",
- lt=", pav.",
+ lt={""," pav."},
nb="Figur ",
nl="Figuur ",
nn="Figur ",
@@ -969,7 +995,7 @@ data.labels={
fr="Illustration ",
gr="Γραφικό",
hr="Slika ",
- hu=",. kép:",
+ hu={""," kép"},
it="Grafico ",
ja="イラスト",
la="Typus ",
@@ -990,6 +1016,18 @@ data.labels={
vi="Đồ thị",
},
},
+ precedingpage={
+ labels={
+ en="on a preceding page",
+ nl="op een voorgaande bladzijde",
+ },
+ },
+ followingpage={
+ labels={
+ en="on a following page",
+ nl="op een volgende bladzijde",
+ },
+ },
hencefore={
labels={
af="",
@@ -1041,6 +1079,7 @@ data.labels={
hr="vidi ispod",
hu="lásd lejjebb",
it="come mostrato sotto",
+ kr="이후로",
la="",
lt="kaip parodyta žemiau",
nb="som vist under",
@@ -1074,7 +1113,7 @@ data.labels={
fr="Intermède ",
gr="Παύση",
hr="Intermeco ",
- hu=",. intermezzo:",
+ hu={""," intermezzo"},
it="Intermezzo ",
ja="間奏曲",
kr="간주곡",
@@ -1116,7 +1155,7 @@ data.labels={
hu="január",
it="gennaio",
ja="1",
- kr="1월",
+ kr="1",
la="Ianuarius",
lt="sausio",
nb="januar",
@@ -1189,7 +1228,7 @@ data.labels={
hu="július",
it="luglio",
ja="7",
- kr="7월",
+ kr="7",
la="Iulius",
lt="liepos",
nb="juli",
@@ -1261,7 +1300,7 @@ data.labels={
hu="június",
it="giugno",
ja="6",
- kr="6월",
+ kr="6",
la="Iunius",
lt="birželio",
nb="juni",
@@ -1328,7 +1367,7 @@ data.labels={
fr="ligne ",
gr="Γραμμή",
hr="redak ",
- hu=",. sor:",
+ hu={""," sor"},
it="riga ",
ja="線",
kr="행",
@@ -1406,7 +1445,7 @@ data.labels={
hu="március",
it="marzo",
ja="3",
- kr="3월",
+ kr="3",
la="Martius",
lt="kovo",
nb="mars",
@@ -1479,7 +1518,7 @@ data.labels={
hu="május",
it="maggio",
ja="5",
- kr="5월",
+ kr="5",
la="Maius",
lt="gegužės",
nb="mai",
@@ -1588,7 +1627,7 @@ data.labels={
hu="november",
it="novembre",
ja="11",
- kr="11월",
+ kr="11",
la="November",
lt="lapkričio",
nb="november",
@@ -1659,7 +1698,7 @@ data.labels={
hu="október",
it="ottobre",
ja="10",
- kr="10월",
+ kr="10",
la="October",
lt="spalio",
nb="oktober",
@@ -1761,12 +1800,12 @@ data.labels={
fr="Partie ",
gr="Μέρος",
hr="Dio ",
- hu=",. rész:",
+ hu={""," rész"},
it="Parte ",
ja={"第","パート"},
kr={"제","부"},
la="Pars ",
- lt=", dalis",
+ lt={""," dalis"},
nb="Del",
nl="Deel ",
nn="Del",
@@ -1913,7 +1952,7 @@ data.labels={
hu="szeptember",
it="settembre",
ja="9",
- kr="9월",
+ kr="9",
la="September",
lt="rugsėjo",
nb="september",
@@ -2128,12 +2167,12 @@ data.labels={
fr="Tableau ",
gr="Πίνακας",
hr="Tablica ",
- hu=",. táblázat:",
+ hu={""," táblázat"},
it="Tabella ",
ja="表",
- kr="표",
+ kr="표 ",
la="Tabula ",
- lt=", lentelė.",
+ lt={""," lentelė."},
nb="Tabell ",
nl="Tabel ",
nn="Tabell ",
@@ -2355,7 +2394,7 @@ data.labels={
hu="Ábrák",
it="Figure",
ja="図",
- kr="그림",
+ kr="그림 ",
la="Imagines",
lt="Iliustracijos",
nb="Figurer",
@@ -2392,7 +2431,7 @@ data.labels={
hu="Grafikák",
it="Grafici",
ja="グラフ",
- kr="그래픽",
+ kr="그래픽 ",
la="Typi",
lt="Graphics",
nb="Bilde",
@@ -2429,7 +2468,7 @@ data.labels={
hu="Index",
it="Indice",
ja="目次",
- kr="색인",
+ kr="찾아보기",
la="Indices",
lt="Rodyklė",
nb="Register",
@@ -2537,6 +2576,7 @@ data.labels={
hr="Literatura",
hu="Bibliográfia",
it="Bibliografia",
+ kr="참고문헌",
la="",
lt="Literatūra",
nb="",
@@ -2573,7 +2613,7 @@ data.labels={
hu="Táblázatok",
it="Tabelle",
ja="机",
- kr="표",
+ kr="표 ",
la="Tabulae",
lt="Lentelės",
nb="Tabeller",
@@ -2630,4 +2670,135 @@ data.labels={
},
},
},
+ btx = {
+ ["mastersthesis"] = {
+ labels = {
+ en = "Master's thesis",
+ fr = "Thèse de master (DEA, DESS, master)",
+ de = "Masterarbeit",
+ },
+ },
+ ["phdthesis"] = {
+ labels = {
+ en = "PhD thesis",
+ fr = "Thèse de doctorat",
+ de = "Dissertation",
+ },
+ },
+ ["technicalreport"] = {
+ labels = {
+ en = "Technical report",
+ fr = "Rapport technique",
+ de = "Technischer Bericht",
+ },
+ },
+ --
+ ["editor"] = {
+ labels = {
+ en = "editor",
+ fr = "éditeur",
+ de = "Herausgeber",
+ },
+ },
+ ["editors"] = {
+ labels = {
+ en = "editors",
+ fr = "éditeurs",
+ de = "Herausgeber",
+ },
+ },
+ ["edition"] = {
+ labels = {
+ en = "edition",
+ fr = "édition",
+ de = "Auflage",
+ },
+ },
+ --
+ ["volume"] = {
+ labels = {
+ en = "volume",
+ de = "Band",
+ },
+ },
+ ["Volume"] = {
+ labels = {
+ en = "Volume",
+ de = "Band",
+ },
+ },
+ ["number"] = {
+ labels = {
+ en = "number",
+ fr = "numéro",
+ de = "Numer",
+ },
+ },
+ ["Number"] = {
+ labels = {
+ en = "Number",
+ fr = "Numéro",
+ de = "Numer",
+ },
+ },
+ ["in"] = {
+ labels = {
+ en = "in",
+ fr = "dans",
+ de = "in",
+ },
+ },
+ ["of"] = {
+ labels = {
+ en = "of",
+ fr = "de",
+ de = "von",
+ },
+ },
+ --
+ ["In"] = {
+ labels = {
+ en = "In",
+ fr = "Dans",
+ de = "In",
+ },
+ },
+ --
+ ["p"] = {
+ labels = {
+ en = "p",
+ de = "S",
+ },
+ },
+ ["pp"] = {
+ labels = {
+ en = "pp",
+ de = "S",
+ },
+ },
+ ["pages"] = {
+ labels = {
+ en = "pages",
+ de = "Seiten",
+ },
+ },
+ --
+ ["and"] = {
+ labels = {
+ en = "and",
+ de = "und",
+ },
+ },
+ ["others"] = {
+ labels = {
+ en = "et al.",
+ },
+ },
+ }
}
+
+local functions = data.labels.functions
+
+functions.asin = functions.arcsin
+functions.acos = functions.arccos
+functions.atan = functions.arctan
diff --git a/tex/context/base/lang-url.lua b/tex/context/base/lang-url.lua
index 4ed5cdea1..39418beef 100644
--- a/tex/context/base/lang-url.lua
+++ b/tex/context/base/lang-url.lua
@@ -8,10 +8,16 @@ if not modules then modules = { } end modules ['lang-url'] = {
local utfcharacters, utfvalues, utfbyte, utfchar = utf.characters, utf.values, utf.byte, utf.char
-commands = commands or { }
-local commands = commands
+local commands = commands
+local context = context
-context = context
+local implement = interfaces.implement
+
+local variables = interfaces.variables
+local v_before = variables.before
+local v_after = variables.after
+
+local is_letter = characters.is_letter
--[[
<p>Hyphenating <l n='url'/>'s is somewhat tricky and a matter of taste. I did
@@ -24,39 +30,39 @@ commands.hyphenatedurl = commands.hyphenatedurl or { }
local hyphenatedurl = commands.hyphenatedurl
local characters = utilities.storage.allocate {
- ["!"] = 1,
- ["\""] = 1,
- ["#"] = 1,
- ["$"] = 1,
- ["%"] = 1,
- ["&"] = 1,
- ["("] = 1,
- ["*"] = 1,
- ["+"] = 1,
- [","] = 1,
- ["-"] = 1,
- ["."] = 1,
- ["/"] = 1,
- [":"] = 1,
- [";"] = 1,
- ["<"] = 1,
- ["="] = 1,
- [">"] = 1,
- ["?"] = 1,
- ["@"] = 1,
- ["["] = 1,
- ["\\"] = 1,
- ["^"] = 1,
- ["_"] = 1,
- ["`"] = 1,
- ["{"] = 1,
- ["|"] = 1,
- ["~"] = 1,
-
- ["'"] = 2,
- [")"] = 2,
- ["]"] = 2,
- ["}"] = 2,
+ ["!"] = "before",
+ ["\""] = "before",
+ ["#"] = "before",
+ ["$"] = "before",
+ ["%"] = "before",
+ ["&"] = "before",
+ ["("] = "before",
+ ["*"] = "before",
+ ["+"] = "before",
+ [","] = "before",
+ ["-"] = "before",
+ ["."] = "before",
+ ["/"] = "before",
+ [":"] = "before",
+ [";"] = "before",
+ ["<"] = "before",
+ ["="] = "before",
+ [">"] = "before",
+ ["?"] = "before",
+ ["@"] = "before",
+ ["["] = "before",
+ ["\\"] = "before",
+ ["^"] = "before",
+ ["_"] = "before",
+ ["`"] = "before",
+ ["{"] = "before",
+ ["|"] = "before",
+ ["~"] = "before",
+
+ ["'"] = "after",
+ [")"] = "after",
+ ["]"] = "after",
+ ["}"] = "after",
}
local mapping = utilities.storage.allocate {
@@ -72,27 +78,81 @@ hyphenatedurl.discretionary = nil
-- more fun is to write nodes .. maybe it's nicer to do this
-- in an attribute handler anyway
+-- local ctx_a = context.a
+-- local ctx_b = context.b
+-- local ctx_d = context.d
+-- local ctx_n = context.n
+-- local ctx_s = context.s
+
+-- local function action(hyphenatedurl,str,left,right,disc)
+-- local n = 0
+-- local b = math.max( left or hyphenatedurl.lefthyphenmin, 2)
+-- local e = math.min(#str-(right or hyphenatedurl.righthyphenmin)+2,#str)
+-- local d = disc or hyphenatedurl.discretionary
+-- local p = nil
+-- for s in utfcharacters(str) do
+-- n = n + 1
+-- s = mapping[s] or s
+-- if n > 1 then
+-- ctx_s() -- can be option
+-- end
+-- if s == d then
+-- ctx_d(utfbyte(s))
+-- else
+-- local c = characters[s]
+-- if not c or n <= b or n >= e then
+-- ctx_n(utfbyte(s))
+-- elseif c == 1 then
+-- ctx_b(utfbyte(s))
+-- elseif c == 2 then
+-- ctx_a(utfbyte(s))
+-- end
+-- end
+-- p = s
+-- end
+-- end
+
+local ctx_a = context.a
+local ctx_b = context.b
+local ctx_d = context.d
+local ctx_c = context.c
+local ctx_l = context.l
+local ctx_C = context.C
+local ctx_L = context.L
+
local function action(hyphenatedurl,str,left,right,disc)
local n = 0
local b = math.max( left or hyphenatedurl.lefthyphenmin, 2)
local e = math.min(#str-(right or hyphenatedurl.righthyphenmin)+2,#str)
local d = disc or hyphenatedurl.discretionary
+ local p = nil
for s in utfcharacters(str) do
n = n + 1
s = mapping[s] or s
- if n > 1 then
- context.s() -- can be option
- end
if s == d then
- context.d(utfbyte(s))
+ ctx_d(utfbyte(s))
else
local c = characters[s]
- if not c or n<=b or n>=e then
- context.n(utfbyte(s))
- elseif c == 1 then
- context.b(utfbyte(s))
- elseif c == 2 then
- context.a(utfbyte(s))
+ if c == v_before then
+ p = false
+ ctx_b(utfbyte(s))
+ elseif c == v_after then
+ p = false
+ ctx_a(utfbyte(s))
+ else
+ local l = is_letter[s]
+ if n <= b or n >= e then
+ if p and l then
+ ctx_L(utfbyte(s))
+ else
+ ctx_C(utfbyte(s))
+ end
+ elseif p and l then
+ ctx_l(utfbyte(s))
+ else
+ ctx_c(utfbyte(s))
+ end
+ p = l
end
end
end
@@ -106,8 +166,21 @@ table.setmetatablecall(hyphenatedurl,action) -- watch out: a caller
function hyphenatedurl.setcharacters(str,value) -- 1, 2 == before, after
for s in utfcharacters(str) do
- characters[s] = value or 1
+ characters[s] = value or v_before
end
end
-- .hyphenatedurl.setcharacters("')]}",2)
+
+implement {
+ name = "sethyphenatedurlcharacters",
+ actions = hyphenatedurl.setcharacters,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "hyphenatedurl",
+ scope = "private",
+ actions = function(...) action(hyphenatedurl,...) end,
+ arguments = { "string", "integer", "integer", "string" }
+}
diff --git a/tex/context/base/lang-url.mkiv b/tex/context/base/lang-url.mkiv
index 1bbe16838..e7d62ba01 100644
--- a/tex/context/base/lang-url.mkiv
+++ b/tex/context/base/lang-url.mkiv
@@ -44,18 +44,21 @@
\newtoks\everyhyphenatedurl
\appendtoks
- \let\&\letterampersand
- \let\#\letterhash
- \let\~\lettertilde
- \let\\\letterbackslash
- \let\$\letterdollar
- \let\^\letterhat
- \let\_\letterunderscore
- \let\{\letterleftbrace
- \let\}\letterrightbrace
- \let\|\letterbar
- \let~=\lettertilde
- \let|=\letterbar
+ \resetfontfeature
+ \resetcharacterspacing
+ %
+ \let\&\letterampersand
+ \let\#\letterhash
+ \let\~\lettertilde
+ \let\\\letterbackslash
+ \let\$\letterdollar
+ \let\^\letterhat
+ \let\_\letterunderscore
+ \let\{\letterleftbrace
+ \let\}\letterrightbrace
+ \let\|\letterbar
+ \let~=\lettertilde
+ \let|=\letterbar
\to \everyhyphenatedurl
\let\hyphenatedurlseparator \empty % \periodcentered
@@ -64,47 +67,54 @@
\setnewconstant\hyphenatedurllefthyphenmin \plusthree
\setnewconstant\hyphenatedurlrighthyphenmin\plusthree
-\def\lang_url_space {\nobreak\hskip\zeropoint plus\onepoint\nobreak}
-\def\lang_url_after #1{\char#1\discretionary{}{\hyphenatedurlseparator}{}}
-\def\lang_url_before #1{\discretionary{\hyphenatedurlseparator}{}{}\char#1\relax}
-\def\lang_url_normal #1{\char#1\relax}
-\def\lang_url_disc #1{\discretionary{}{}{}}
-
-\def\lang_url_space_trace {\nobreak\begingroup\darkyellow\ruledhskip\zeropoint plus\onepoint\endgroup\nobreak}
-\def\lang_url_after_trace #1{\char#1\hsmash{\darkblue\vl}\discretionary{}{\hyphenatedurlseparator}{}}
-\def\lang_url_before_trace#1{\discretionary{\hyphenatedurlseparator}{}{}\hsmash{\darkred\vl}\char#1\relax}
-\def\lang_url_normal_trace#1{\char#1\relax}
-\def\lang_url_disc_trace #1{\discretionary{\hsmash{\darkgreen\vl}}{\hsmash{\darkgreen\vl}}{\hsmash{\darkgreen\vl}}}
-
-\def\showhyphenatedurlbreaks
- {\let\lang_url_space \lang_url_space_trace
- \let\lang_url_after \lang_url_after_trace
- \let\lang_url_before\lang_url_before_trace
- \let\lang_url_normal\lang_url_normal_trace
- \let\lang_url_disc \lang_url_disc_trace}
-
-\def\sethyphenatedurlnormal#1{\ctxcommand{hyphenatedurl.setcharacters(\!!bs#1\!!es,0)}} % Lua names will change
-\def\sethyphenatedurlbefore#1{\ctxcommand{hyphenatedurl.setcharacters(\!!bs#1\!!es,1)}} % Lua names will change
-\def\sethyphenatedurlafter #1{\ctxcommand{hyphenatedurl.setcharacters(\!!bs#1\!!es,2)}} % Lua names will change
+\def\lang_url_more{\penalty\plustenthousand\hskip\scratchskipone\penalty\plustenthousand}
+\def\lang_url_less{\penalty\plustenthousand\hskip\scratchskiptwo\penalty\plustenthousand}
+\def\lang_url_trac{\penalty\plustenthousand\hsmash{\darkred\vl}\penalty\plustenthousand}
+
+\def\lang_url_a#1{\lang_url_more\char#1\lang_url_show\discretionary{}{\hyphenatedurlseparator}{}}
+\def\lang_url_b#1{\lang_url_more\discretionary{\hyphenatedurlseparator}{}{}\lang_url_show\char#1\relax}
+\def\lang_url_d#1{\lang_url_more\discretionary{\lang_url_show}{\lang_url_show}{\lang_url_show}}
+\def\lang_url_l#1{\lang_url_less\char#1\relax}
+\def\lang_url_c#1{\lang_url_more\char#1\relax}
+\def\lang_url_L#1{\lang_url_less\char#1\relax}
+\def\lang_url_C#1{\lang_url_more\char#1\relax}
+
+\unexpanded\def\showhyphenatedurlbreaks % for old times sake
+ {\let\lang_url_show\lang_url_trac}
+
+\installtextracker
+ {languages.url}
+ {\let\lang_url_show\lang_url_trac}
+ {\let\lang_url_show\relax}
+
+\let\lang_url_show\relax
+
+\unexpanded\def\sethyphenatedurlnormal#1{\clf_sethyphenatedurlcharacters{#1}{\v!normal}}
+\unexpanded\def\sethyphenatedurlbefore#1{\clf_sethyphenatedurlcharacters{#1}{\v!before}}
+\unexpanded\def\sethyphenatedurlafter #1{\clf_sethyphenatedurlcharacters{#1}{\v!after }}
% \sethyphenatedurlafter{ABCDEF}
-\unexpanded \def\hyphenatedurl#1%
+\unexpanded\def\hyphenatedurl#1%
{\dontleavehmode
\begingroup
\the\everyhyphenatedurl
\normallanguage\zerocount
- \let\s\lang_url_space
- \let\a\lang_url_after
- \let\b\lang_url_before
- \let\n\lang_url_normal
- \let\d\lang_url_disc
- \normalexpanded{\noexpand\ctxcommand{hyphenatedurl(
- \!!bs\noexpand\detokenize{#1}\!!es,
- \number\hyphenatedurllefthyphenmin,
- \number\hyphenatedurlrighthyphenmin,
- "\hyphenatedurldiscretionary"
- )}}%
+ \let\a\lang_url_a % after
+ \let\b\lang_url_b % before
+ \let\d\lang_url_d % disc
+ \let\l\lang_url_l % letter
+ \let\c\lang_url_c % character
+ \let\L\lang_url_L % letter_nobreak
+ \let\C\lang_url_C % character_nobreak
+ \scratchskipone\zeropoint\s!plus\dimexpr\emwidth/12\s!minus\dimexpr\emwidth/24\relax % 1.00pt @ 12pt
+ \scratchskiptwo\zeropoint\s!plus\dimexpr\emwidth/48\s!minus\dimexpr\emwidth/96\relax % 0.25pt @ 12pt
+ \normalexpanded{\noexpand\clf_hyphenatedurl
+ {\noexpand\detokenize{#1}}%
+ \hyphenatedurllefthyphenmin
+ \hyphenatedurlrighthyphenmin
+ {\hyphenatedurldiscretionary}%
+ }%
\endgroup}
%D \macros
@@ -131,3 +141,31 @@
% \dorecurse{100}{\test{a} \test{ab} \test{abc} \test{abcd} \test{abcde} \test{abcdef}}
\protect \endinput
+
+% \setuppapersize[A7]
+%
+% \unexpanded\def\WhateverA#1%
+% {\dontleavehmode
+% \begingroup
+% \prehyphenchar"B7\relax
+% \setbox\scratchbox\hbox{\tttf#1}%
+% \prehyphenchar`-\relax
+% \unhbox\scratchbox
+% \endgroup}
+%
+% \unexpanded\def\WhateverB#1%
+% {\dontleavehmode
+% \begingroup
+% \tttf
+% \prehyphenchar\minusone
+% % \localrightbox{\llap{_}}%
+% \localrightbox{\llap{\smash{\lower1.5ex\hbox{\char"2192}}}}%
+% \setbox\scratchbox\hbox{#1}%
+% \prehyphenchar`-\relax
+% \unhbox\scratchbox
+% \endgroup}
+%
+% \begingroup \hsize1cm
+% \WhateverA{thisisaboringandverylongcommand}\par
+% \WhateverB{thisisaboringandverylongcommand}\par
+% \endgroup
diff --git a/tex/context/base/lang-wrd.lua b/tex/context/base/lang-wrd.lua
index bf066fc09..b564a02ae 100644
--- a/tex/context/base/lang-wrd.lua
+++ b/tex/context/base/lang-wrd.lua
@@ -14,7 +14,10 @@ local P, S, Cs = lpeg.P, lpeg.S, lpeg.Cs
local report_words = logs.reporter("languages","words")
-local nodes, node, languages = nodes, node, languages
+local nodes = nodes
+local languages = languages
+
+local implement = interfaces.implement
languages.words = languages.words or { }
local words = languages.words
@@ -26,7 +29,18 @@ words.threshold = 4
local numbers = languages.numbers
local registered = languages.registered
-local traverse_nodes = node.traverse
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local getnext = nuts.getnext
+local getid = nuts.getid
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+local setattr = nuts.setattr
+
+local traverse_nodes = nuts.traverse
+
local wordsdata = words.data
local chardata = characters.data
local tasks = nodes.tasks
@@ -96,7 +110,7 @@ end
-- there is an n=1 problem somewhere in nested boxes
local function mark_words(head,whenfound) -- can be optimized and shared
- local current, language, done = head, nil, nil, 0, false
+ local current, language, done = tonut(head), nil, nil, 0, false
local str, s, nds, n = { }, 0, { }, 0 -- n could also be a table, saves calls
local function action()
if s > 0 then
@@ -112,9 +126,9 @@ local function mark_words(head,whenfound) -- can be optimized and shared
n, s = 0, 0
end
while current do
- local id = current.id
+ local id = getid(current)
if id == glyph_code then
- local a = current.lang
+ local a = getfield(current,"lang")
if a then
if a ~= language then
if s > 0 then
@@ -126,16 +140,16 @@ local function mark_words(head,whenfound) -- can be optimized and shared
action()
language = a
end
- local components = current.components
+ local components = getfield(current,"components")
if components then
n = n + 1
nds[n] = current
for g in traverse_nodes(components) do
s = s + 1
- str[s] = utfchar(g.char)
+ str[s] = utfchar(getchar(g))
end
else
- local code = current.char
+ local code = getchar(current)
local data = chardata[code]
if is_letter[data.category] then
n = n + 1
@@ -151,12 +165,12 @@ local function mark_words(head,whenfound) -- can be optimized and shared
n = n + 1
nds[n] = current
end
- elseif id == kern_code and current.subtype == kerning_code and s > 0 then
+ elseif id == kern_code and getsubtype(current) == kerning_code and s > 0 then
-- ok
elseif s > 0 then
action()
end
- current = current.next
+ current = getnext(current)
end
if s > 0 then
action()
@@ -176,6 +190,8 @@ local enabled = false
function words.check(head)
if enabled then
return methods[wordmethod](head)
+ elseif not head then
+ return head, false
else
return head, false
end
@@ -207,7 +223,7 @@ table.setmetatableindex(cache, function(t,k) -- k == language, numbers[k] == tag
else
c = colist["word:" .. (numbers[k] or "unset")] or colist["word:unknown"]
end
- local v = c and function(n) n[a_color] = c end or false
+ local v = c and function(n) setattr(n,a_color,c) end or false
t[k] = v
return v
end)
@@ -226,7 +242,7 @@ end
methods[1] = function(head)
for n in traverse_nodes(head) do
- n[a_color] = unsetvalue -- hm, not that selective (reset color)
+ setattr(n,a_color,unsetvalue) -- hm, not that selective (reset color)
end
return mark_words(head,sweep)
end
@@ -327,7 +343,7 @@ end
methods[3] = function(head)
for n in traverse_nodes(head) do
- n[a_color] = unsetvalue
+ setattr(n,a_color,unsetvalue)
end
return mark_words(head,sweep)
end
@@ -348,6 +364,24 @@ end
-- interface
-commands.enablespellchecking = words.enable
-commands.disablespellchecking = words.disable
-commands.loadspellchecklist = words.load
+implement {
+ name = "enablespellchecking",
+ actions = words.enable,
+ arguments = {
+ {
+ { "method" },
+ { "list" }
+ }
+ }
+}
+
+implement {
+ name = "disablespellchecking",
+ actions = words.disable
+}
+
+implement {
+ name = "loadspellchecklist",
+ arguments = { "string", "string" },
+ actions = words.load
+}
diff --git a/tex/context/base/lang-wrd.mkiv b/tex/context/base/lang-wrd.mkiv
index 682489912..8c544773b 100644
--- a/tex/context/base/lang-wrd.mkiv
+++ b/tex/context/base/lang-wrd.mkiv
@@ -40,15 +40,18 @@
\appendtoks
\doifelse{\directspellcheckingparameter\c!state}\v!start
- {\ctxcommand{enablespellchecking { method = "\directspellcheckingparameter\c!method", list = "\directspellcheckingparameter\c!list" }}}
- {\ctxcommand{disablespellchecking()}}%
+ {\clf_enablespellchecking
+ method {\directspellcheckingparameter\c!method}%
+ list {\directspellcheckingparameter\c!list}%
+ \relax}
+ {\clf_disablespellchecking}%
\to \everysetupspellchecking
\unexpanded\def\loadspellchecklist
{\dodoubleempty\lang_spellchecking_load_list}
\def\lang_spellchecking_load_list[#1][#2]%
- {\ctxcommand{loadspellchecklist("#1","#2")}}
+ {\clf_loadspellchecklist{#1}{#2}}
\setupspellchecking
[\c!state=\v!stop,
diff --git a/tex/context/base/lpdf-ano.lua b/tex/context/base/lpdf-ano.lua
index 3f0e718b3..f5e320a00 100644
--- a/tex/context/base/lpdf-ano.lua
+++ b/tex/context/base/lpdf-ano.lua
@@ -10,46 +10,66 @@ if not modules then modules = { } end modules ['lpdf-ano'] = {
-- todo: /AA << WC << ... >> >> : WillClose actions etc
-local next, tostring = next, tostring
-local rep, format = string.rep, string.format
+-- internal references are indicated by a number (and turned into <autoprefix><number>)
+-- we only flush internal destinations that are referred
+
+local next, tostring, tonumber, rawget = next, tostring, tonumber, rawget
+local rep, format, find = string.rep, string.format, string.find
+local min = math.min
local lpegmatch = lpeg.match
local formatters = string.formatters
local backends, lpdf = backends, lpdf
-local trace_references = false trackers.register("references.references", function(v) trace_references = v end)
-local trace_destinations = false trackers.register("references.destinations", function(v) trace_destinations = v end)
-local trace_bookmarks = false trackers.register("references.bookmarks", function(v) trace_bookmarks = v end)
+local trace_references = false trackers.register("references.references", function(v) trace_references = v end)
+local trace_destinations = false trackers.register("references.destinations", function(v) trace_destinations = v end)
+local trace_bookmarks = false trackers.register("references.bookmarks", function(v) trace_bookmarks = v end)
+
+local log_destinations = false directives.register("destinations.log", function(v) log_destinations = v end)
-local report_reference = logs.reporter("backend","references")
-local report_destination = logs.reporter("backend","destinations")
-local report_bookmark = logs.reporter("backend","bookmarks")
+local report_reference = logs.reporter("backend","references")
+local report_destination = logs.reporter("backend","destinations")
+local report_bookmark = logs.reporter("backend","bookmarks")
local variables = interfaces.variables
-local constants = interfaces.constants
+local v_auto = variables.auto
+local v_page = variables.page
+
+local factor = number.dimenfactors.bp
local settings_to_array = utilities.parsers.settings_to_array
+local allocate = utilities.storage.allocate
+local setmetatableindex = table.setmetatableindex
+
local nodeinjections = backends.pdf.nodeinjections
local codeinjections = backends.pdf.codeinjections
local registrations = backends.pdf.registrations
+local getpos = codeinjections.getpos
+local gethpos = codeinjections.gethpos
+local getvpos = codeinjections.getvpos
+
local javascriptcode = interactions.javascripts.code
local references = structures.references
local bookmarks = structures.bookmarks
+local flaginternals = references.flaginternals
+local usedinternals = references.usedinternals
+local usedviews = references.usedviews
+
local runners = references.runners
local specials = references.specials
local handlers = references.handlers
local executers = references.executers
-local getinnermethod = references.getinnermethod
local nodepool = nodes.pool
-local pdfannotation_node = nodepool.pdfannotation
-local pdfdestination_node = nodepool.pdfdestination
-local latelua_node = nodepool.latelua
+----- pdfannotation_node = nodepool.pdfannotation
+----- pdfdestination_node = nodepool.pdfdestination
+----- latelua_node = nodepool.latelua
+local latelua_function_node = nodepool.lateluafunction -- still node ... todo
local texgetcount = tex.getcount
@@ -63,7 +83,12 @@ local pdfshareobjectreference = lpdf.shareobjectreference
local pdfreserveobject = lpdf.reserveobject
local pdfpagereference = lpdf.pagereference
local pdfdelayedobject = lpdf.delayedobject
-local pdfregisterannotation = lpdf.registerannotation
+local pdfregisterannotation = lpdf.registerannotation -- forward definition (for the moment)
+local pdfnull = lpdf.null
+local pdfaddtocatalog = lpdf.addtocatalog
+local pdfaddtonames = lpdf.addtonames
+local pdfaddtopageattributes = lpdf.addtopageattributes
+local pdfrectangle = lpdf.rectangle
-- todo: 3dview
@@ -79,102 +104,439 @@ local pdf_t = pdfconstant("T")
local pdf_fit = pdfconstant("Fit")
local pdf_named = pdfconstant("Named")
-local pdf_border = pdfarray { 0, 0, 0 }
+local autoprefix = "#"
-local cache = { }
+-- Bah, I hate this kind of features .. anyway, as we have delayed resolving we
+-- only support a document-wide setup and it has to be set before the first one
+-- is used. Also, we default to a non-intrusive gray and the outline is kept
+-- thin without dashing lines. This is as far as I'm prepared to go. This way
+-- it can also be used as a debug feature.
-local function pagedestination(n) -- only cache fit
- if n > 0 then
- local pd = cache[n]
- if not pd then
- local a = pdfarray {
- pdfreference(pdfpagereference(n)),
- pdf_fit,
- }
- pd = pdfshareobjectreference(a)
- cache[n] = pd
+local pdf_border_style = pdfarray { 0, 0, 0 } -- radius radius linewidth
+local pdf_border_color = nil
+local set_border = false
+
+local function pdfborder()
+ set_border = true
+ return pdf_border_style, pdf_border_color
+end
+
+lpdf.border = pdfborder
+
+directives.register("references.border",function(v)
+ if v and not set_border then
+ if type(v) == "string" then
+ local m = attributes.list[attributes.private('color')] or { }
+ local c = m and m[v]
+ local v = c and attributes.colors.value(c)
+ if v then
+ local r, g, b = v[3], v[4], v[5]
+ -- if r == g and g == b then
+ -- pdf_border_color = pdfarray { r } -- reduced, not not ... bugged viewers
+ -- else
+ pdf_border_color = pdfarray { r, g, b } -- always rgb
+ -- end
+ end
+ end
+ if not pdf_border_color then
+ pdf_border_color = pdfarray { .6, .6, .6 } -- no reduce to { 0.6 } as there are buggy viewers out there
end
- return pd
+ pdf_border_style = pdfarray { 0, 0, .5 } -- < 0.5 is not show by acrobat (at least not in my version)
end
-end
+end)
+
+-- the used and flag code here is somewhat messy in the sense
+-- that it belongs in strc-ref but at the same time depends on
+-- the backend so we keep it here
+
+-- the caching is somewhat memory intense on the one hand but
+-- it saves many small temporary tables so it might pay off
+
+local pagedestinations = allocate()
+local pagereferences = allocate() -- annots are cached themselves
+
+setmetatableindex(pagedestinations, function(t,k)
+ k = tonumber(k)
+ if not k or k <= 0 then
+ return pdfnull()
+ end
+ local v = rawget(t,k)
+ if v then
+ -- report_reference("page number expected, got %s: %a",type(k),k)
+ return v
+ end
+ local v = k > 0 and pdfarray {
+ pdfreference(pdfpagereference(k)),
+ pdf_fit,
+ } or pdfnull()
+ t[k] = v
+ return v
+end)
+
+setmetatableindex(pagereferences,function(t,k)
+ k = tonumber(k)
+ if not k or k <= 0 then
+ return nil
+ end
+ local v = rawget(t,k)
+ if v then
+ return v
+ end
+ local v = pdfdictionary { -- can be cached
+ S = pdf_goto,
+ D = pagedestinations[k],
+ }
+ t[k] = v
+ return v
+end)
-lpdf.pagedestination = pagedestination
+lpdf.pagereferences = pagereferences -- table
+lpdf.pagedestinations = pagedestinations -- table
local defaultdestination = pdfarray { 0, pdf_fit }
-local function link(url,filename,destination,page,actions)
- if filename and filename ~= "" then
- if file.basename(filename) == tex.jobname then
- return false
- else
- filename = file.addsuffix(filename,"pdf")
+-- fit is default (see lpdf-nod)
+
+local destinations = { } -- to be used soon
+
+local function pdfregisterdestination(name,reference)
+ local d = destinations[name]
+ if d then
+ report_destination("ignoring duplicate destination %a with reference %a",name,reference)
+ else
+ destinations[name] = reference
+ end
+end
+
+lpdf.registerdestination = pdfregisterdestination
+
+local maxslice = 32 -- could be made configureable ... 64 is also ok
+
+luatex.registerstopactions(function()
+ if log_destinations and next(destinations) then
+ local logsnewline = logs.newline
+ local log_destinations = logs.reporter("system","references")
+ local log_destination = logs.reporter("destination")
+ logs.pushtarget("logfile")
+ logsnewline()
+ log_destinations("start used destinations")
+ logsnewline()
+ local n = 0
+ for destination, pagenumber in table.sortedhash(destinations) do
+ log_destination("% 4i : %-5s : %s",pagenumber,usedviews[destination] or defaultview,destination)
+ n = n + 1
+ end
+ logsnewline()
+ log_destinations("stop used destinations")
+ logsnewline()
+ logs.poptarget()
+ report_destination("%s destinations saved in log file",n)
+ end
+end)
+
+
+local function pdfnametree(destinations)
+ local slices = { }
+ local sorted = table.sortedkeys(destinations)
+ local size = #sorted
+
+ if size <= 1.5*maxslice then
+ maxslice = size
+ end
+
+ for i=1,size,maxslice do
+ local amount = min(i+maxslice-1,size)
+ local names = pdfarray { }
+ for j=i,amount do
+ local destination = sorted[j]
+ local pagenumber = destinations[destination]
+ names[#names+1] = tostring(destination) -- tostring is a safeguard
+ names[#names+1] = pdfreference(pagenumber)
+ end
+ local first = sorted[i]
+ local last = sorted[amount]
+ local limits = pdfarray {
+ first,
+ last,
+ }
+ local d = pdfdictionary {
+ Names = names,
+ Limits = limits,
+ }
+ slices[#slices+1] = {
+ reference = pdfreference(pdfflushobject(d)),
+ limits = limits,
+ }
+ end
+ local function collectkids(slices,first,last)
+ local k = pdfarray()
+ local d = pdfdictionary {
+ Kids = k,
+ Limits = pdfarray {
+ slices[first].limits[1],
+ slices[last ].limits[2],
+ },
+ }
+ for i=first,last do
+ k[#k+1] = slices[i].reference
+ end
+ return d
+ end
+ if #slices == 1 then
+ return slices[1].reference
+ else
+ while true do
+ if #slices > maxslice then
+ local temp = { }
+ local size = #slices
+ for i=1,size,maxslice do
+ local kids = collectkids(slices,i,min(i+maxslice-1,size))
+ temp[#temp+1] = {
+ reference = pdfreference(pdfflushobject(kids)),
+ limits = kids.Limits,
+ }
+ end
+ slices = temp
+ else
+ return pdfreference(pdfflushobject(collectkids(slices,1,#slices)))
+ end
+ end
+ end
+end
+
+local function pdfdestinationspecification()
+ if next(destinations) then -- safeguard
+ local r = pdfnametree(destinations)
+ -- pdfaddtocatalog("Dests",r)
+ pdfaddtonames("Dests",r)
+ if not log_destinations then
+ destinations = nil
+ end
+ end
+end
+
+lpdf.nametree = pdfnametree
+lpdf.destinationspecification = pdfdestinationspecification
+
+lpdf.registerdocumentfinalizer(pdfdestinationspecification,"collect destinations")
+
+-- todo
+
+local destinations = { }
+
+local f_xyz = formatters["<< /D [ %i 0 R /XYZ %0.3F %0.3F null ] >>"]
+local f_fit = formatters["<< /D [ %i 0 R /Fit ] >>"]
+local f_fitb = formatters["<< /D [ %i 0 R /FitB ] >>"]
+local f_fith = formatters["<< /D [ %i 0 R /FitH %0.3F ] >>"]
+local f_fitv = formatters["<< /D [ %i 0 R /FitV %0.3F ] >>"]
+local f_fitbh = formatters["<< /D [ %i 0 R /FitBH %0.3F ] >>"]
+local f_fitbv = formatters["<< /D [ %i 0 R /FitBV %0.3F ] >>"]
+local f_fitr = formatters["<< /D [ %i 0 R /FitR %0.3F %0.3F %0.3F %0.3F ] >>"]
+
+local v_standard = variables.standard
+local v_frame = variables.frame
+local v_width = variables.width
+local v_minwidth = variables.minwidth
+local v_height = variables.height
+local v_minheight = variables.minheight
+local v_fit = variables.fit
+local v_tight = variables.tight
+
+-- nicer is to create dictionaries and set properties but it's a bit overkill
+
+-- The problem with the following settings is that they are guesses: we never know
+-- if a box is part of something larger that needs to be in view, or that we are
+-- dealing with a vbox or vtop so the used h/d values cannot be trusted in a tight
+-- view. Of course some decent additional offset would be nice so maybe i'll add
+-- that some day. I never use anything else than 'fit' anyway as I think that the
+-- document should fit the device (and vice versa). In fact, with todays swipe
+-- and finger zooming this whole view is rather useless and as with any zooming
+-- one looses the overview and keeps zooming.
+
+local destinationactions = {
+ -- [v_standard] = function(r,w,h,d) return f_xyz (r,pdfrectangle(w,h,d)) end, -- local left,top with zoom (0 in our case)
+ [v_standard] = function(r,w,h,d) return f_xyz (r,gethpos()*factor,(getvpos()+h)*factor) end, -- local left,top with no zoom
+ [v_frame] = function(r,w,h,d) return f_fitr (r,pdfrectangle(w,h,d)) end, -- fit rectangle in window
+ -- [v_width] = function(r,w,h,d) return f_fith (r,gethpos()*factor) end, -- top coordinate, fit width of page in window
+ [v_width] = function(r,w,h,d) return f_fith (r,(getvpos()+h)*factor) end, -- top coordinate, fit width of page in window
+ -- [v_minwidth] = function(r,w,h,d) return f_fitbh(r,gethpos()*factor) end, -- top coordinate, fit width of content in window
+ [v_minwidth] = function(r,w,h,d) return f_fitbh(r,(getvpos()+h)*factor) end, -- top coordinate, fit width of content in window
+ -- [v_height] = function(r,w,h,d) return f_fitv (r,(getvpos()+h)*factor) end, -- left coordinate, fit height of page in window
+ [v_height] = function(r,w,h,d) return f_fitv (r,gethpos()*factor) end, -- left coordinate, fit height of page in window
+ -- [v_minheight] = function(r,w,h,d) return f_fitbv(r,(getvpos()+h)*factor) end, -- left coordinate, fit height of content in window
+ [v_minheight] = function(r,w,h,d) return f_fitbv(r,gethpos()*factor) end, -- left coordinate, fit height of content in window [v_fit] = f_fit, -- fit page in window
+ [v_tight] = f_fitb, -- fit content in window
+}
+
+local mapping = {
+ [v_standard] = v_standard, xyz = v_standard,
+ [v_frame] = v_frame, fitr = v_frame,
+ [v_width] = v_width, fith = v_width,
+ [v_minwidth] = v_minwidth, fitbh = v_minwidth,
+ [v_height] = v_height, fitv = v_height,
+ [v_minheight] = v_minheight, fitbv = v_minheight,
+ [v_fit] = v_fit, fit = v_fit,
+ [v_tight] = v_tight, fitb = v_tight,
+}
+
+local defaultview = v_fit
+local defaultaction = destinationactions[defaultview]
+
+-- A complication is that we need to use named destinations when we have views so we
+-- end up with a mix. A previous versions just output multiple destinations but not
+-- that we noved all to here we can be more sparse.
+
+local pagedestinations = { }
+
+table.setmetatableindex(pagedestinations,function(t,k)
+ local v = pdfdelayedobject(f_fit(k))
+ t[k] = v
+ return v
+end)
+
+local function flushdestination(width,height,depth,names,view)
+ local r = pdfpagereference(texgetcount("realpageno"))
+ if view == defaultview or not view or view == "" then
+ r = pagedestinations[r]
+ else
+ local action = view and destinationactions[view] or defaultaction
+ r = pdfdelayedobject(action(r,width,height,depth))
+ end
+ for n=1,#names do
+ local name = names[n]
+ if name then
+ pdfregisterdestination(name,r)
end
end
- if url and url ~= "" then
- if filename and filename ~= "" then
- if destination and destination ~= "" then
- url = file.join(url,filename).."#"..destination
+end
+
+function nodeinjections.destination(width,height,depth,names,view)
+ -- todo check if begin end node / was comment
+ view = view and mapping[view] or defaultview
+ if trace_destinations then
+ report_destination("width %p, height %p, depth %p, names %|t, view %a",width,height,depth,names,view)
+ end
+ local method = references.innermethod
+ local noview = view == defaultview
+ local doview = false
+ -- we could save some aut's by using a name when given but it doesn't pay off apart
+ -- from making the code messy and tracing hard .. we only save some destinations
+ -- which we already share anyway
+ for n=1,#names do
+ local name = names[n]
+ if usedviews[name] then
+ -- already done, maybe a warning
+ elseif type(name) == "number" then
+ if noview then
+ usedviews[name] = view
+ names[n] = false
+ elseif method == v_page then
+ usedviews[name] = view
+ names[n] = false
else
- url = file.join(url,filename)
+ local used = usedinternals[name]
+ if used and used ~= defaultview then
+ usedviews[name] = view
+ names[n] = autoprefix .. name
+ doview = true
+ else
+ -- names[n] = autoprefix .. name
+ names[n] = false
+ end
end
+ elseif method == v_page then
+ usedviews[name] = view
+ else
+ usedviews[name] = view
+ doview = true
end
- return pdfdictionary {
- S = pdf_uri,
- URI = url,
- }
- elseif filename and filename ~= "" then
- -- no page ?
- if destination == "" then
+ end
+ if doview then
+ return latelua_function_node(function() flushdestination(width,height,depth,names,view) end)
+ end
+end
+
+-- we could share dictionaries ... todo
+
+local function somedestination(destination,internal,page) -- no view anyway
+ if references.innermethod ~= v_page then
+ if type(destination) == "number" then
+ if not internal then
+ internal = destination
+ end
destination = nil
end
- if not destination and page then
- destination = pdfarray { page - 1, pdf_fit }
+ if internal then
+ flaginternals[internal] = true -- for bookmarks and so
+ local used = usedinternals[internal]
+ if used == defaultview or used == true then
+ return pagereferences[page]
+ end
+ if type(destination) ~= "string" then
+ destination = autoprefix .. internal
+ end
+ return pdfdictionary {
+ S = pdf_goto,
+ D = destination,
+ }
end
- return pdfdictionary {
- S = pdf_gotor, -- can also be pdf_launch
- F = filename,
- D = destination or defaultdestination, -- D is mandate
- NewWindow = (actions.newwindow and true) or nil,
- }
- elseif destination and destination ~= "" then
- return pdfdictionary { -- can be cached
- S = pdf_goto,
- D = destination,
- }
- else
- local p = tonumber(page)
- if p and p > 0 then
- return pdfdictionary { -- can be cached
+ if destination then
+ -- hopefully this one is flushed
+ return pdfdictionary {
S = pdf_goto,
- D = pdfarray {
- pdfreference(pdfpagereference(p)),
- pdf_fit,
- }
+ D = destination,
}
- elseif trace_references then
- report_reference("invalid page reference %a",page)
end
end
- return false
+ return pagereferences[page]
end
-lpdf.link = link
+-- annotations
-function lpdf.launch(program,parameters)
- if program and program ~= "" then
- local d = pdfdictionary {
- S = pdf_launch,
- F = program,
- D = ".",
- }
- if parameters and parameters ~= "" then
- d.P = parameters
- end
- return d
+local pdflink = somedestination
+
+local function pdffilelink(filename,destination,page,actions)
+ if not filename or filename == "" or file.basename(filename) == tex.jobname then
+ return false
+ end
+ filename = file.addsuffix(filename,"pdf")
+ if not destination or destination == "" then
+ destination = pdfarray { (page or 0) - 1, pdf_fit }
+ end
+ return pdfdictionary {
+ S = pdf_gotor, -- can also be pdf_launch
+ F = filename,
+ D = destination or defaultdestination, -- D is mandate
+ NewWindow = actions.newwindow and true or nil,
+ }
+end
+
+local function pdfurllink(url,destination,page)
+ if not url or url == "" then
+ return false
+ end
+ if destination and destination ~= "" then
+ url = url .. "#" .. destination
end
+ return pdfdictionary {
+ S = pdf_uri,
+ URI = url,
+ }
+end
+
+local function pdflaunch(program,parameters)
+ if not program or program == "" then
+ return false
+ end
+ return pdfdictionary {
+ S = pdf_launch,
+ F = program,
+ D = ".",
+ P = parameters ~= "" and parameters or nil
+ }
end
-function lpdf.javascript(name,arguments)
+local function pdfjavascript(name,arguments)
local script = javascriptcode(name,arguments) -- make into object (hash)
if script then
return pdfdictionary {
@@ -200,7 +562,9 @@ local function pdfaction(actions)
if what then
what = what(a,actions)
end
- if what then
+ if action == what then
+ -- ignore this one, else we get a loop
+ elseif what then
action.Next = what
action = what
else
@@ -219,9 +583,11 @@ function codeinjections.prerollreference(actions) -- share can become option
if actions then
local main, n = pdfaction(actions)
if main then
- main = pdfdictionary {
+ local bs, bc = pdfborder()
+ main = pdfdictionary {
Subtype = pdf_link,
- Border = pdf_border,
+ Border = bs,
+ C = bc,
H = (not actions.highlight and pdf_n) or nil,
A = pdfshareobjectreference(main),
F = 4, -- print (mandate in pdf/a)
@@ -231,157 +597,146 @@ function codeinjections.prerollreference(actions) -- share can become option
end
end
-local function use_normal_annotations()
-
- local function reference(width,height,depth,prerolled) -- keep this one
- if prerolled then
- if trace_references then
- report_reference("width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled)
- end
- return pdfannotation_node(width,height,depth,prerolled)
- end
- end
-
- local function finishreference()
- end
-
- return reference, finishreference
-
-end
+-- local function use_normal_annotations()
+--
+-- local function reference(width,height,depth,prerolled) -- keep this one
+-- if prerolled then
+-- if trace_references then
+-- report_reference("width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled)
+-- end
+-- return pdfannotation_node(width,height,depth,prerolled)
+-- end
+-- end
+--
+-- local function finishreference()
+-- end
+--
+-- return reference, finishreference
+--
+-- end
-- eventually we can do this for special refs only
-local hashed, nofunique, nofused = { }, 0, 0
-
-local f_annot = formatters["<< /Type /Annot %s /Rect [%0.3f %0.3f %0.3f %0.3f] >>"]
-local f_bpnf = formatters["_bpnf_(%s,%s,%s,'%s')"]
+local hashed = { }
+local nofunique = 0
+local nofused = 0
+local nofspecial = 0
+local share = true
-local function use_shared_annotations()
+local f_annot = formatters["<< /Type /Annot %s /Rect [ %0.3F %0.3F %0.3F %0.3F ] >>"]
- local factor = number.dimenfactors.bp
+directives.register("refences.sharelinks", function(v) share = v end)
- local function finishreference(width,height,depth,prerolled) -- %0.2f looks okay enough (no scaling anyway)
- local h, v = pdf.h, pdf.v
- local llx, lly = h*factor, (v - depth)*factor
- local urx, ury = (h + width)*factor, (v + height)*factor
- local annot = f_annot(prerolled,llx,lly,urx,ury)
- local n = hashed[annot]
- if not n then
- n = pdfdelayedobject(annot)
- hashed[annot] = n
- nofunique = nofunique + 1
- end
- nofused = nofused + 1
- pdfregisterannotation(n)
+table.setmetatableindex(hashed,function(t,k)
+ local v = pdfdelayedobject(k)
+ if share then
+ t[k] = v
end
+ nofunique = nofunique + 1
+ return v
+end)
+
+local function finishreference(width,height,depth,prerolled) -- %0.2f looks okay enough (no scaling anyway)
+ local annot = hashed[f_annot(prerolled,pdfrectangle(width,height,depth))]
+ nofused = nofused + 1
+ return pdfregisterannotation(annot)
+end
- _bpnf_ = finishreference
-
- local function reference(width,height,depth,prerolled)
- if prerolled then
- if trace_references then
- report_reference("width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled)
- end
- local luacode = f_bpnf(width,height,depth,prerolled)
- return latelua_node(luacode)
- end
+local function finishannotation(width,height,depth,prerolled,r)
+ local annot = f_annot(prerolled,pdfrectangle(width,height,depth))
+ if r then
+ pdfdelayedobject(annot,r)
+ else
+ r = pdfdelayedobject(annot)
end
+ nofspecial = nofspecial + 1
+ return pdfregisterannotation(r)
+end
- statistics.register("pdf annotations", function()
- if nofused > 0 then
- return format("%s embedded, %s unique",nofused,nofunique)
- else
- return nil
+function nodeinjections.reference(width,height,depth,prerolled)
+ if prerolled then
+ if trace_references then
+ report_reference("link: width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled)
end
- end)
-
-
- return reference, finishreference
-
+ return latelua_function_node(function() finishreference(width,height,depth,prerolled) end)
+ end
end
-local lln = latelua_node() if node.has_field(lln,'string') then
-
- directives.register("refences.sharelinks", function(v)
- if v then
- nodeinjections.reference, codeinjections.finishreference = use_shared_annotations()
- else
- nodeinjections.reference, codeinjections.finishreference = use_normal_annotations()
+function nodeinjections.annotation(width,height,depth,prerolled,r)
+ if prerolled then
+ if trace_references then
+ report_reference("special: width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled)
end
- end)
+ return latelua_function_node(function() finishannotation(width,height,depth,prerolled,r or false) end)
+ end
+end
- nodeinjections.reference, codeinjections.finishreference = use_shared_annotations()
+-- beware, we register during a latelua sweep so we have to make sure that
+-- we finalize after that (also in a latelua for the moment as we have no
+-- callback yet)
-else
+local annotations = nil
- nodeinjections.reference, codeinjections.finishreference = use_normal_annotations()
+function lpdf.registerannotation(n)
+ if annotations then
+ annotations[#annotations+1] = pdfreference(n)
+ else
+ annotations = pdfarray { pdfreference(n) } -- no need to use lpdf.array cum suis
+ end
+end
-end node.free(lln)
+pdfregisterannotation = lpdf.registerannotation
--- -- -- --
--- -- -- --
+function lpdf.annotationspecification()
+ if annotations then
+ local r = pdfdelayedobject(tostring(annotations)) -- delayed so okay in latelua
+ pdfaddtopageattributes("Annots",pdfreference(r))
+ annotations = nil
+ end
+end
-local done = { } -- prevent messages
+lpdf.registerpagefinalizer(lpdf.annotationspecification,"finalize annotations")
-function nodeinjections.destination(width,height,depth,name,view)
- if not done[name] then
- done[name] = true
- if trace_destinations then
- report_destination("width %p, height %p, depth %p, name %a, view %a",width,height,depth,name,view)
- end
- return pdfdestination_node(width,height,depth,name,view) -- can be begin/end node
+statistics.register("pdf annotations", function()
+ if nofused > 0 or nofspecial > 0 then
+ return format("%s links (%s unique), %s special",nofused,nofunique,nofspecial)
+ else
+ return nil
end
-end
+end)
-- runners and specials
--- runners["inner"] = function(var,actions)
--- if getinnermethod() == "names" then
--- local vi = var.i
--- if vi then
--- local vir = vi.references
--- if vir then
--- local internal = vir.internal
--- if internal then
--- var.inner = "aut:" .. internal
--- end
--- end
--- end
--- else
--- var.inner = nil
--- end
--- local prefix = var.p
--- local inner = var.inner
--- if inner and prefix and prefix ~= "" then
--- inner = prefix .. ":" .. inner -- might not always be ok
--- end
--- return link(nil,nil,inner,var.r,actions)
--- end
-
runners["inner"] = function(var,actions)
local internal = false
- if getinnermethod() == "names" then
+ local inner = nil
+ if references.innermethod == v_auto then
local vi = var.i
if vi then
local vir = vi.references
if vir then
-- todo: no need for it when we have a real reference
+ local reference = vir.reference
+ if reference and reference ~= "" then
+ var.inner = reference
+ local prefix = var.p
+ if prefix and prefix ~= "" then
+ var.prefix = prefix
+ inner = prefix .. ":" .. reference
+ else
+ inner = reference
+ end
+ end
internal = vir.internal
if internal then
- var.inner = "aut:" .. internal
+ flaginternals[internal] = true
end
end
end
else
var.inner = nil
end
- local prefix = var.p
- local inner = var.inner
- if not internal and inner and prefix and prefix ~= "" then
- -- no prefix with e.g. components
- inner = prefix .. ":" .. inner
- end
- return link(nil,nil,inner,var.r,actions)
+ return pdflink(inner,internal,var.r)
end
runners["inner with arguments"] = function(var,actions)
@@ -391,12 +746,15 @@ end
runners["outer"] = function(var,actions)
local file, url = references.checkedfileorurl(var.outer,var.outer)
- return link(url,file,var.arguments,nil,actions)
+ if file then
+ return pdffilelink(file,var.arguments,nil,actions)
+ elseif url then
+ return pdfurllink(url,var.arguments,nil,actions)
+ end
end
runners["outer with inner"] = function(var,actions)
- local file = references.checkedfile(var.outer) -- was var.f but fails ... why
- return link(nil,file,var.inner,var.r,actions)
+ return pdffilelink(references.checkedfile(var.outer),var.inner,var.r,actions)
end
runners["special outer with operation"] = function(var,actions)
@@ -442,13 +800,10 @@ function specials.internal(var,actions) -- better resolve in strc-ref
local v = i and references.internals[i]
if not v then
-- error
- report_reference("no internal reference %a",i)
- elseif getinnermethod() == "names" then
- -- named
- return link(nil,nil,"aut:"..i,v.references.realpage,actions)
+ report_reference("no internal reference %a",i or "<unset>")
else
- -- page
- return link(nil,nil,nil,v.references.realpage,actions)
+ flaginternals[i] = true
+ return pdflink(nil,i,v.references.realpage)
end
end
@@ -461,8 +816,7 @@ local pages = references.pages
function specials.page(var,actions)
local file = var.f
if file then
- file = references.checkedfile(file)
- return link(nil,file,nil,var.operation,actions)
+ return pdffilelink(references.checkedfile(file),nil,var.operation,actions)
else
local p = var.r
if not p then -- todo: call special from reference code
@@ -472,29 +826,24 @@ function specials.page(var,actions)
else
p = references.realpageofpage(tonumber(p))
end
- -- if p then
- -- var.r = p
- -- end
end
- return link(nil,nil,nil,p or var.operation,actions)
+ return pdflink(nil,nil,p or var.operation)
end
end
function specials.realpage(var,actions)
local file = var.f
if file then
- file = references.checkedfile(file)
- return link(nil,file,nil,var.operation,actions)
+ return pdffilelink(references.checkedfile(file),nil,var.operation,actions)
else
- return link(nil,nil,nil,var.operation,actions)
+ return pdflink(nil,nil,var.operation)
end
end
function specials.userpage(var,actions)
local file = var.f
if file then
- file = references.checkedfile(file)
- return link(nil,file,nil,var.operation,actions)
+ return pdffilelink(references.checkedfile(file),nil,var.operation,actions)
else
local p = var.r
if not p then -- todo: call special from reference code
@@ -506,7 +855,7 @@ function specials.userpage(var,actions)
-- var.r = p
-- end
end
- return link(nil,nil,nil,p or var.operation,actions)
+ return pdflink(nil,nil,p or var.operation)
end
end
@@ -514,7 +863,7 @@ function specials.deltapage(var,actions)
local p = tonumber(var.operation)
if p then
p = references.checkedrealpage(p + texgetcount("realpageno"))
- return link(nil,nil,nil,p,actions)
+ return pdflink(nil,nil,p)
end
end
@@ -554,27 +903,29 @@ function specials.order(var,actions) -- references.specials !
end
function specials.url(var,actions)
- local url = references.checkedurl(var.operation)
- return link(url,nil,var.arguments,nil,actions)
+ return pdfurllink(references.checkedurl(var.operation),var.arguments,nil,actions)
end
function specials.file(var,actions)
- local file = references.checkedfile(var.operation)
- return link(nil,file,var.arguments,nil,actions)
+ return pdffilelink(references.checkedfile(var.operation),var.arguments,nil,actions)
end
function specials.fileorurl(var,actions)
local file, url = references.checkedfileorurl(var.operation,var.operation)
- return link(url,file,var.arguments,nil,actions)
+ if file then
+ return pdffilelink(file,var.arguments,nil,actions)
+ elseif url then
+ return pdfurllink(url,var.arguments,nil,actions)
+ end
end
function specials.program(var,content)
local program = references.checkedprogram(var.operation)
- return lpdf.launch(program,var.arguments)
+ return pdflaunch(program,var.arguments)
end
function specials.javascript(var)
- return lpdf.javascript(var.operation,var.arguments)
+ return pdfjavascript(var.operation,var.arguments)
end
specials.JS = specials.javascript
@@ -698,61 +1049,109 @@ function specials.action(var)
end
end
---~ entry.A = pdfdictionary {
---~ S = pdf_goto,
---~ D = ....
---~ }
-
-local function build(levels,start,parent,method)
- local startlevel = levels[start][1]
+local function build(levels,start,parent,method,nested)
+ local startlevel = levels[start].level
local i, n = start, 0
local child, entry, m, prev, first, last, f, l
while i and i <= #levels do
- local li = levels[i]
- local level, title, reference, open = li[1], li[2], li[3], li[4]
- if level < startlevel then
- pdfflushobject(child,entry)
- return i, n, first, last
- elseif level == startlevel then
- if trace_bookmarks then
- report_bookmark("%3i %w%s %s",reference.realpage,(level-1)*2,(open and "+") or "-",title)
- end
- local prev = child
- child = pdfreserveobject()
- if entry then
- entry.Next = child and pdfreference(child)
- pdfflushobject(prev,entry)
- end
- entry = pdfdictionary {
- Title = pdfunicode(title),
- Parent = parent,
- Prev = prev and pdfreference(prev),
- }
- if method == "internal" then
- entry.Dest = "aut:" .. reference.internal
- else -- if method == "page" then
- entry.Dest = pagedestination(reference.realpage)
- end
- if not first then first, last = child, child end
- prev = child
- last = prev
- n = n + 1
+ local current = levels[i]
+ if current.usedpage == false then
+ -- safeguard
i = i + 1
- elseif i < #levels and level > startlevel then
- i, m, f, l = build(levels,i,pdfreference(child),method)
- entry.Count = (open and m) or -m
- if m > 0 then
- entry.First, entry.Last = pdfreference(f), pdfreference(l)
- end
else
- -- missing intermediate level but ok
- i, m, f, l = build(levels,i,pdfreference(child),method)
- entry.Count = (open and m) or -m
- if m > 0 then
- entry.First, entry.Last = pdfreference(f), pdfreference(l)
+ local level = current.level
+ local title = current.title
+ local reference = current.reference
+ local opened = current.opened
+ local reftype = type(reference)
+ local variant = "unknown"
+ if reftype == "table" then
+ -- we're okay
+ variant = "list"
+ elseif reftype == "string" then
+ local resolved = references.identify("",reference)
+ local realpage = resolved and structures.references.setreferencerealpage(resolved) or 0
+ if realpage > 0 then
+ variant = "realpage"
+ realpage = realpage
+ end
+ elseif reftype == "number" then
+ if reference > 0 then
+ variant = "realpage"
+ realpage = reference
+ end
+ else
+ -- error
+ end
+ if variant == "unknown" then
+ -- error, ignore
+ i = i + 1
+ elseif level <= startlevel then
+ if level < startlevel then
+ if nested then -- could be an option but otherwise we quit too soon
+ if entry then
+ pdfflushobject(child,entry)
+ else
+ report_bookmark("error 1")
+ end
+ return i, n, first, last
+ else
+ report_bookmark("confusing level change at level %a around %a",level,title)
+ end
+ end
+ if trace_bookmarks then
+ report_bookmark("%3i %w%s %s",reference.realpage,(level-1)*2,(opened and "+") or "-",title)
+ end
+ local prev = child
+ child = pdfreserveobject()
+ if entry then
+ entry.Next = child and pdfreference(child)
+ pdfflushobject(prev,entry)
+ end
+ local action = nil
+ if variant == "list" then
+ action = somedestination(reference.internal,reference.internal,reference.realpage)
+ elseif variant == "realpage" then
+ action = pagereferences[realpage]
+ end
+ entry = pdfdictionary {
+ Title = pdfunicode(title),
+ Parent = parent,
+ Prev = prev and pdfreference(prev),
+ A = action,
+ }
+ -- entry.Dest = somedestination(reference.internal,reference.internal,reference.realpage)
+ if not first then first, last = child, child end
+ prev = child
+ last = prev
+ n = n + 1
+ i = i + 1
+ elseif i < #levels and level > startlevel then
+ i, m, f, l = build(levels,i,pdfreference(child),method,true)
+ if entry then
+ entry.Count = (opened and m) or -m
+ if m > 0 then
+ entry.First = pdfreference(f)
+ entry.Last = pdfreference(l)
+ end
+ else
+ report_bookmark("error 2")
+ end
+ else
+ -- missing intermediate level but ok
+ i, m, f, l = build(levels,i,pdfreference(child),method,true)
+ if entry then
+ entry.Count = (opened and m) or -m
+ if m > 0 then
+ entry.First = pdfreference(f)
+ entry.Last = pdfreference(l)
+ end
+ pdfflushobject(child,entry)
+ else
+ report_bookmark("error 3")
+ end
+ return i, n, first, last
end
- pdfflushobject(child,entry)
- return i, n, first, last
end
end
pdfflushobject(child,entry)
@@ -760,10 +1159,9 @@ local function build(levels,start,parent,method)
end
function codeinjections.addbookmarks(levels,method)
- if #levels > 0 then
- structures.bookmarks.flatten(levels) -- dirty trick for lack of structure
+ if levels and #levels > 0 then
local parent = pdfreserveobject()
- local _, m, first, last = build(levels,1,pdfreference(parent),method or "internal")
+ local _, m, first, last = build(levels,1,pdfreference(parent),method or "internal",false)
local dict = pdfdictionary {
Type = pdfconstant("Outlines"),
First = pdfreference(first),
@@ -771,10 +1169,10 @@ function codeinjections.addbookmarks(levels,method)
Count = m,
}
pdfflushobject(parent,dict)
- lpdf.addtocatalog("Outlines",lpdf.reference(parent))
+ pdfaddtocatalog("Outlines",lpdf.reference(parent))
end
end
-- this could also be hooked into the frontend finalizer
-lpdf.registerdocumentfinalizer(function() bookmarks.place() end,1,"bookmarks")
+lpdf.registerdocumentfinalizer(function() bookmarks.place() end,1,"bookmarks") -- hm, why indirect call
diff --git a/tex/context/base/lpdf-col.lua b/tex/context/base/lpdf-col.lua
index b358d0820..877c01a1c 100644
--- a/tex/context/base/lpdf-col.lua
+++ b/tex/context/base/lpdf-col.lua
@@ -14,42 +14,50 @@ local formatters = string.formatters
local backends, lpdf, nodes = backends, lpdf, nodes
-local allocate = utilities.storage.allocate
-local formatters = string.formatters
-
-local nodeinjections = backends.pdf.nodeinjections
-local codeinjections = backends.pdf.codeinjections
-local registrations = backends.pdf.registrations
-
-local nodepool = nodes.pool
-local register = nodepool.register
-local pdfliteral = nodepool.pdfliteral
-
-local pdfconstant = lpdf.constant
-local pdfstring = lpdf.string
-local pdfdictionary = lpdf.dictionary
-local pdfarray = lpdf.array
-local pdfreference = lpdf.reference
-local pdfverbose = lpdf.verbose
-local pdfflushobject = lpdf.flushobject
-local pdfflushstreamobject = lpdf.flushstreamobject
-
-local colors = attributes.colors
-local transparencies = attributes.transparencies
-local registertransparancy = transparencies.register
-local registercolor = colors.register
-local colorsvalue = colors.value
-local transparenciesvalue = transparencies.value
-local forcedmodel = colors.forcedmodel
-
-local c_transparency = pdfconstant("Transparency")
-
-local f_gray = formatters["%.3f g %.3f G"]
-local f_rgb = formatters["%.3f %.3f %.3f rg %.3f %.3f %.3f RG"]
-local f_cmyk = formatters["%.3f %.3f %.3f %.3f k %.3f %.3f %.3f %.3f K"]
+local allocate = utilities.storage.allocate
+local formatters = string.formatters
+
+local nodeinjections = backends.pdf.nodeinjections
+local codeinjections = backends.pdf.codeinjections
+local registrations = backends.pdf.registrations
+
+local nodepool = nodes.pool
+local register = nodepool.register
+local pdfliteral = nodepool.pdfliteral
+
+local pdfconstant = lpdf.constant
+local pdfstring = lpdf.string
+local pdfdictionary = lpdf.dictionary
+local pdfarray = lpdf.array
+local pdfreference = lpdf.reference
+local pdfverbose = lpdf.verbose
+local pdfflushobject = lpdf.flushobject
+local pdfdelayedobject = lpdf.delayedobject
+local pdfflushstreamobject = lpdf.flushstreamobject
+
+local pdfshareobjectreference = lpdf.shareobjectreference
+
+local addtopageattributes = lpdf.addtopageattributes
+local adddocumentcolorspace = lpdf.adddocumentcolorspace
+local adddocumentextgstate = lpdf.adddocumentextgstate
+
+local colors = attributes.colors
+local transparencies = attributes.transparencies
+local registertransparancy = transparencies.register
+local registercolor = colors.register
+local colorsvalue = colors.value
+local transparenciesvalue = transparencies.value
+local forcedmodel = colors.forcedmodel
+local getpagecolormodel = colors.getpagecolormodel
+
+local c_transparency = pdfconstant("Transparency")
+
+local f_gray = formatters["%.3F g %.3F G"]
+local f_rgb = formatters["%.3F %.3F %.3F rg %.3F %.3F %.3F RG"]
+local f_cmyk = formatters["%.3F %.3F %.3F %.3F k %.3F %.3F %.3F %.3F K"]
local f_spot = formatters["/%s cs /%s CS %s SCN %s scn"]
local f_tr = formatters["Tr%s"]
-local f_cm = formatters["q %f %f %f %f %f %f cm"]
+local f_cm = formatters["q %F %F %F %F %F %F cm"]
local f_effect = formatters["%s Tc %s w %s Tr"]
local f_tr_gs = formatters["/Tr%s gs"]
local f_num_1 = tostring
@@ -76,11 +84,13 @@ lpdf.transparencygroups = transparencygroups
table.setmetatableindex(transparencygroups, function(transparencygroups,colormodel)
local cs = colorspaceconstants[colormodel]
if cs then
- local g = pdfreference(pdfflushobject(pdfdictionary {
+ local d = pdfdictionary {
S = c_transparency,
CS = cs,
I = true,
- }))
+ }
+ -- local g = pdfreference(pdfflushobject(tostring(d)))
+ local g = pdfreference(pdfdelayedobject(tostring(d)))
transparencygroups[colormodel] = g
return g
else
@@ -89,26 +99,18 @@ table.setmetatableindex(transparencygroups, function(transparencygroups,colormod
end
end)
-local currentgroupcolormodel
-
local function addpagegroup()
- if currentgroupcolormodel then
- local g = transparencygroups[currentgroupcolormodel]
+ local model = getpagecolormodel()
+ if model then
+ local g = transparencygroups[model]
if g then
- lpdf.addtopageattributes("Group",g)
+ addtopageattributes("Group",g)
end
end
end
lpdf.registerpagefinalizer(addpagegroup,3,"pagegroup")
-local function synchronizecolormodel(model)
- currentgroupcolormodel = model
-end
-
-backends.codeinjections.synchronizecolormodel = synchronizecolormodel
-commands.synchronizecolormodel = synchronizecolormodel
-
-- injection code (needs a bit reordering)
-- color injection
@@ -175,7 +177,7 @@ local f_gray_function = formatters["%s mul"]
local documentcolorspaces = pdfdictionary()
-local spotcolorhash = { } -- not needed
+local spotcolorhash = { } -- not needed
local spotcolornames = { }
local indexcolorhash = { }
local delayedindexcolors = { }
@@ -193,7 +195,7 @@ end
-- This should become delayed i.e. only flush when used; in that case we need
-- need to store the specification and then flush them when accesssomespotcolor
-- is called. At this moment we assume that splotcolors that get defined are
--- also used which keeps the overhad small anyway.
+-- also used which keeps the overhad small anyway. Tricky for mp ...
local processcolors
@@ -224,13 +226,14 @@ local function registersomespotcolor(name,noffractions,names,p,colorspace,range,
local mr = pdfreference(m)
spotcolorhash[name] = m
documentcolorspaces[name] = mr
- lpdf.adddocumentcolorspace(name,mr)
+ adddocumentcolorspace(name,mr)
else
local cnames = pdfarray()
local domain = pdfarray()
local colorants = pdfdictionary()
for n in gmatch(names,"[^,]+") do
local name = spotcolornames[n] or n
+ -- the cmyk names assume that they are indeed these colors
if n == "cyan" then
name = "Cyan"
elseif n == "magenta" then
@@ -241,9 +244,15 @@ local function registersomespotcolor(name,noffractions,names,p,colorspace,range,
name = "Black"
else
local sn = spotcolorhash[name] or spotcolorhash[n]
+ if not sn then
+ report_color("defining %a as colorant",name)
+ colors.definespotcolor("",name,"p=1",true)
+ sn = spotcolorhash[name] or spotcolorhash[n]
+ end
if sn then
colorants[name] = pdfreference(sn)
else
+ -- maybe some day generate colorants (spot colors for multi) automatically
report_color("unknown colorant %a, using black instead",name or n)
name = "Black"
end
@@ -280,13 +289,13 @@ local function registersomespotcolor(name,noffractions,names,p,colorspace,range,
cnames,
colorspace,
pdfreference(calculation),
- lpdf.shareobjectreference(tostring(channels)), -- optional but needed for shades
+ pdfshareobjectreference(tostring(channels)), -- optional but needed for shades
}
local m = pdfflushobject(array)
local mr = pdfreference(m)
spotcolorhash[name] = m
documentcolorspaces[name] = mr
- lpdf.adddocumentcolorspace(name,mr)
+ adddocumentcolorspace(name,mr)
end
end
@@ -336,7 +345,7 @@ local function registersomeindexcolor(name,noffractions,names,p,colorspace,range
end
vector = pdfverbose { "<", concat(vector, " "), ">" }
local n = pdfflushobject(pdfarray{ pdf_indexed, a, 255, vector })
- lpdf.adddocumentcolorspace(format("%s_indexed",name),pdfreference(n))
+ adddocumentcolorspace(format("%s_indexed",name),pdfreference(n))
return n
end
@@ -455,7 +464,7 @@ function registrations.transparency(n,a,t)
local mr = pdfreference(m)
transparencyhash[0] = m
documenttransparencies[0] = mr
- lpdf.adddocumentextgstate("Tr0",mr)
+ adddocumentextgstate("Tr0",mr)
done = true
end
if n > 0 and not transparencyhash[n] then
@@ -470,13 +479,16 @@ function registrations.transparency(n,a,t)
local mr = pdfreference(m)
transparencyhash[n] = m
documenttransparencies[n] = mr
- lpdf.adddocumentextgstate(f_tr(n),mr)
+ adddocumentextgstate(f_tr(n),mr)
end
end
statistics.register("page group warning", function()
- if done and not transparencygroups[currentgroupcolormodel] then
- return "transparencies are used but no pagecolormodel is set"
+ if done then
+ local model = getpagecolormodel()
+ if model and not transparencygroups[model] then
+ return "transparencies are used but no pagecolormodel is set"
+ end
end
end)
@@ -520,6 +532,12 @@ end
lpdf.color = lpdfcolor
+interfaces.implement {
+ name = "lpdf_color",
+ actions = { lpdfcolor, context },
+ arguments = "integer"
+}
+
function lpdf.colorspec(model,ca,default)
if ca and ca > 0 then
local cv = colors.value(ca)
@@ -544,7 +562,7 @@ function lpdf.colorspec(model,ca,default)
end
function lpdf.pdfcolor(attribute) -- bonus, for pgf and friends
- context(lpdfcolor(1,attribute))
+ return lpdfcolor(1,attribute)
end
function lpdf.transparency(ct,default) -- kind of overlaps with transparencycode
@@ -689,7 +707,7 @@ end
-- this will move to lpdf-spe.lua
-local f_slant = formatters["pdf: q 1 0 %f 1 0 0 cm"]
+local f_slant = formatters["pdf: q 1 0 %F 1 0 0 cm"]
backends.pdf.tables.vfspecials = allocate { -- todo: distinguish between glyph and rule color
diff --git a/tex/context/base/lpdf-epa.lua b/tex/context/base/lpdf-epa.lua
index 61d57b8d3..dd5ecc609 100644
--- a/tex/context/base/lpdf-epa.lua
+++ b/tex/context/base/lpdf-epa.lua
@@ -10,30 +10,43 @@ if not modules then modules = { } end modules ['lpdf-epa'] = {
-- change.
local type, tonumber = type, tonumber
-local format, gsub = string.format, string.gsub
+local format, gsub, lower = string.format, string.gsub, string.lower
local formatters = string.formatters
----- lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-local trace_links = false trackers.register("figures.links", function(v) trace_links = v end)
-local report_link = logs.reporter("backend","merging")
+local trace_links = false trackers.register("figures.links", function(v) trace_links = v end)
+local trace_outlines = false trackers.register("figures.outliness", function(v) trace_outlines = v end)
+local report_link = logs.reporter("backend","link")
+local report_outline = logs.reporter("backend","outline")
+
+local epdf = epdf
local backends = backends
local lpdf = lpdf
local context = context
+local loadpdffile = lpdf.epdf.load
+
+local nameonly = file.nameonly
+
local variables = interfaces.variables
local codeinjections = backends.pdf.codeinjections
----- urlescaper = lpegpatterns.urlescaper
----- utftohigh = lpegpatterns.utftohigh
local escapetex = characters.filters.utf.private.escape
+local bookmarks = structures.bookmarks
+
local layerspec = { -- predefining saves time
"epdflinks"
}
+local f_namespace = formatters["lpdf-epa-%s-"]
+
local function makenamespace(filename)
- return format("lpdf-epa-%s-",file.removesuffix(file.basename(filename)))
+ filename = gsub(lower(nameonly(filename)),"[^%a%d]+","-")
+ return f_namespace(filename)
end
local function add_link(x,y,w,h,destination,what)
@@ -71,7 +84,7 @@ local function link_goto(x,y,w,h,document,annotation,pagedata,namespace)
if type(destination) == "string" then
local destinations = document.destinations
local wanted = destinations[destination]
- destination = wanted and wanted.D
+ destination = wanted and wanted.D -- is this ok? isn't it destination already a string?
if destination then what = "named" end
end
local pagedata = destination and destination[1]
@@ -94,10 +107,17 @@ local function link_uri(x,y,w,h,document,annotation)
end
end
+-- The rules in PDF on what a 'file specification' is, is in fact quite elaborate
+-- (see section 3.10 in the 1.7 reference) so we need to test for string as well
+-- as a table. TH/20140916
+
local function link_file(x,y,w,h,document,annotation)
local a = annotation.A
if a then
local filename = a.F
+ if type(filename) == "table" then
+ filename = filename.F
+ end
if filename then
filename = escapetex(filename)
local destination = a.D
@@ -124,7 +144,7 @@ function codeinjections.mergereferences(specification)
end
if specification then
local fullname = specification.fullname
- local document = lpdf.epdf.load(fullname)
+ local document = loadpdffile(fullname) -- costs time
if document then
local pagenumber = specification.page or 1
local xscale = specification.yscale or 1
@@ -132,22 +152,31 @@ function codeinjections.mergereferences(specification)
local size = specification.size or "crop" -- todo
local pagedata = document.pages[pagenumber]
local annotations = pagedata and pagedata.Annots
+ local namespace = makenamespace(fullname)
+ local reference = namespace .. pagenumber
if annotations and annotations.n > 0 then
- local namespace = format("lpdf-epa-%s-",file.removesuffix(file.basename(fullname)))
- local reference = namespace .. pagenumber
- local mediabox = pagedata.MediaBox
- local llx, lly, urx, ury = mediabox[1], mediabox[2], mediabox[3], mediabox[4]
- local width, height = xscale * (urx - llx), yscale * (ury - lly) -- \\overlaywidth, \\overlayheight
+ local mediabox = pagedata.MediaBox
+ local llx = mediabox[1]
+ local lly = mediabox[2]
+ local urx = mediabox[3]
+ local ury = mediabox[4]
+ local width = xscale * (urx - llx) -- \\overlaywidth, \\overlayheight
+ local height = yscale * (ury - lly) -- \\overlaywidth, \\overlayheight
context.definelayer( { "epdflinks" }, { height = height.."bp" , width = width.."bp" })
for i=1,annotations.n do
local annotation = annotations[i]
if annotation then
- local subtype = annotation.Subtype
+ local subtype = annotation.Subtype
local rectangle = annotation.Rect
- local a_llx, a_lly, a_urx, a_ury = rectangle[1], rectangle[2], rectangle[3], rectangle[4]
- local x, y = xscale * (a_llx - llx), yscale * (a_lly - lly)
- local w, h = xscale * (a_urx - a_llx), yscale * (a_ury - a_lly)
- if subtype == "Link" then
+ local a_llx = rectangle[1]
+ local a_lly = rectangle[2]
+ local a_urx = rectangle[3]
+ local a_ury = rectangle[4]
+ local x = xscale * (a_llx - llx)
+ local y = yscale * (a_lly - lly)
+ local w = xscale * (a_urx - a_llx)
+ local h = yscale * (a_ury - a_lly)
+ if subtype == "Link" then
local a = annotation.A
if a then
local linktype = a.S
@@ -161,7 +190,7 @@ function codeinjections.mergereferences(specification)
report_link("unsupported link annotation %a",linktype)
end
else
- report_link("mising link annotation")
+ report_link("missing link annotation")
end
elseif trace_links then
report_link("unsupported annotation %a",subtype)
@@ -171,21 +200,21 @@ function codeinjections.mergereferences(specification)
end
end
context.flushlayer { "epdflinks" }
- -- context("\\gdef\\figurereference{%s}",reference) -- global
- context.setgvalue("figurereference",reference) -- global
- if trace_links then
- report_link("setting figure reference to %a",reference)
- end
- specification.reference = reference
- return namespace
end
+ -- moved outside previous test
+ context.setgvalue("figurereference",reference) -- global
+ if trace_links then
+ report_link("setting figure reference to %a",reference)
+ end
+ specification.reference = reference
+ return namespace
end
end
return ""-- no namespace, empty, not nil
end
function codeinjections.mergeviewerlayers(specification)
- -- todo: parse included page for layers
+ -- todo: parse included page for layers .. or only for whole document inclusion
if true then
return
end
@@ -195,9 +224,9 @@ function codeinjections.mergeviewerlayers(specification)
end
if specification then
local fullname = specification.fullname
- local document = lpdf.epdf.load(fullname)
+ local document = loadpdffile(fullname)
if document then
- local namespace = format("lpdf:epa:%s:",file.removesuffix(file.basename(fullname)))
+ local namespace = makenamespace(fullname)
local layers = document.layers
if layers then
for i=1,layers.n do
@@ -225,3 +254,160 @@ function codeinjections.mergeviewerlayers(specification)
end
end
+-- new: for taco
+
+-- Beware, bookmarks can be in pdfdoc encoding or in unicode. However, in mkiv we
+-- write out the strings in unicode (hex). When we read them in, we check for a bom
+-- and convert to utf.
+
+function codeinjections.getbookmarks(filename)
+
+ -- The first version built a nested tree and flattened that afterwards ... but I decided
+ -- to keep it simple and flat.
+
+ local list = bookmarks.extras.get(filename)
+
+ if list then
+ return list
+ else
+ list = { }
+ end
+
+ local document = nil
+
+ if lfs.isfile(filename) then
+ document = loadpdffile(filename)
+ else
+ report_outline("unknown file %a",filename)
+ bookmarks.extras.register(filename,list)
+ return list
+ end
+
+ local outlines = document.Catalog.Outlines
+ local pages = document.pages
+ local nofpages = pages.n -- we need to access once in order to initialize
+ local destinations = document.destinations
+
+ -- I need to check this destination analyzer with the one in annotations .. best share
+ -- code (and not it's inconsistent). On the todo list ...
+
+ local function setdestination(current,entry)
+ local destination = nil
+ local action = current.A
+ if action then
+ local subtype = action.S
+ if subtype == "GoTo" then
+ destination = action.D
+ if type(destination) == "string" then
+ entry.destination = destination
+ destination = destinations[destination]
+ local pagedata = destination and destination[1]
+ if pagedata then
+ entry.realpage = pagedata.number
+ end
+ else
+ -- maybe
+ end
+ else
+ -- maybe
+ end
+ else
+ local destination = current.Dest
+ if destination then
+ if type(destination) == "string" then
+ local wanted = destinations[destination]
+ destination = wanted and wanted.D
+ if destination then
+ entry.destination = destination
+ end
+ else
+ local pagedata = destination and destination[1]
+ if pagedata and pagedata.Type == "Page" then
+ entry.realpage = pagedata.number
+ end
+ end
+ end
+ end
+ end
+
+ local function traverse(current,depth)
+ while current do
+ -- local title = current.Title
+ local title = current("Title") -- can be pdfdoc or unicode
+ if title then
+ local entry = {
+ level = depth,
+ title = title,
+ }
+ list[#list+1] = entry
+ setdestination(current,entry)
+ if trace_outlines then
+ report_outline("%w%s",2*depth,title)
+ end
+ end
+ local first = current.First
+ if first then
+ local current = first
+ while current do
+ local title = current.Title
+ if title and trace_outlines then
+ report_outline("%w%s",2*depth,title)
+ end
+ local entry = {
+ level = depth,
+ title = title,
+ }
+ setdestination(current,entry)
+ list[#list+1] = entry
+ traverse(current.First,depth+1)
+ current = current.Next
+ end
+ end
+ current = current.Next
+ end
+ end
+
+ if outlines then
+ if trace_outlines then
+ report_outline("outline of %a:",document.filename)
+ report_outline()
+ end
+ traverse(outlines,0)
+ if trace_outlines then
+ report_outline()
+ end
+ elseif trace_outlines then
+ report_outline("no outline in %a",document.filename)
+ end
+
+ bookmarks.extras.register(filename,list)
+
+ return list
+
+end
+
+function codeinjections.mergebookmarks(specification)
+ -- codeinjections.getbookmarks(document)
+ if not specification then
+ specification = figures and figures.current()
+ specification = specification and specification.status
+ end
+ if specification then
+ local fullname = specification.fullname
+ local bookmarks = backends.codeinjections.getbookmarks(fullname)
+ local realpage = tonumber(specification.page) or 1
+ for i=1,#bookmarks do
+ local b = bookmarks[i]
+ if not b.usedpage then
+ if b.realpage == realpage then
+ if trace_options then
+ report_outline("using %a at page %a of file %a",b.title,realpage,fullname)
+ end
+ b.usedpage = true
+ b.section = structures.sections.currentsectionindex()
+ b.pageindex = specification.pageindex
+ end
+ end
+ end
+ end
+end
diff --git a/tex/context/base/lpdf-epd.lua b/tex/context/base/lpdf-epd.lua
index a7399f6b4..1dc20bc26 100644
--- a/tex/context/base/lpdf-epd.lua
+++ b/tex/context/base/lpdf-epd.lua
@@ -6,124 +6,287 @@ if not modules then modules = { } end modules ['lpdf-epd'] = {
license = "see context related readme files"
}
--- This is an experimental layer around the epdf library. The reason for
--- this layer is that I want to be independent of the library (which
--- implements a selection of what a file provides) and also because I
--- want an interface closer to Lua's table model while the API stays
--- close to the original xpdf library. Of course, after prototyping a
--- solution, we can optimize it using the low level epdf accessors.
-
--- It will be handy when we have a __length and __next that can trigger
--- the resolve till then we will provide .n as #.
-
--- As there can be references to the parent we cannot expand a tree. I
--- played with some expansion variants but it does to pay off.
-
--- Maybe we need a close().
--- We cannot access all destinations in one run.
-
-local setmetatable, rawset, rawget, tostring, tonumber = setmetatable, rawset, rawget, tostring, tonumber
-local lower, match, char, find, sub = string.lower, string.match, string.char, string.find, string.sub
+-- This is an experimental layer around the epdf library. The reason for this layer is that
+-- I want to be independent of the library (which implements a selection of what a file
+-- provides) and also because I want an interface closer to Lua's table model while the API
+-- stays close to the original xpdf library. Of course, after prototyping a solution, we can
+-- optimize it using the low level epdf accessors. However, not all are accessible (this will
+-- be fixed).
+--
+-- It will be handy when we have a __length and __next that can trigger the resolve till then
+-- we will provide .n as #; maybe in Lua 5.3 or later.
+--
+-- As there can be references to the parent we cannot expand a tree. I played with some
+-- expansion variants but it does not pay off; adding extra checks is nto worth the trouble.
+--
+-- The document stays over. In order to free memory one has to explicitly onload the loaded
+-- document.
+--
+-- We have much more checking then needed in the prepare functions because occasionally
+-- we run into bugs in poppler or the epdf interface. It took us a while to realize that
+-- there was a long standing gc issue the on long runs with including many pages could
+-- crash the analyzer.
+--
+-- Normally a value is fetched by key, as in foo.Title but as it can be in pdfdoc encoding
+-- a safer bet is foo("Title") which will return a decoded string (or the original if it
+-- already was unicode).
+
+local setmetatable, rawset, rawget, type = setmetatable, rawset, rawget, type
+local tostring, tonumber = tostring, tonumber
+local lower, match, char, byte, find = string.lower, string.match, string.char, string.byte, string.find
+local abs = math.abs
local concat = table.concat
-local toutf = string.toutf
+local toutf, toeight, utfchar = string.toutf, utf.toeight, utf.char
+
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local P, C, S, R, Ct, Cc, V, Carg, Cs, Cf, Cg = lpeg.P, lpeg.C, lpeg.S, lpeg.R, lpeg.Ct, lpeg.Cc, lpeg.V, lpeg.Carg, lpeg.Cs, lpeg.Cf, lpeg.Cg
+
+local epdf = epdf
+ lpdf = lpdf or { }
+local lpdf = lpdf
+local lpdf_epdf = { }
+lpdf.epdf = lpdf_epdf
+
+local pdf_open = epdf.open
+
+local report_epdf = logs.reporter("epdf")
+
+local getDict, getArray, getReal, getNum, getString, getBool, getName, getRef, getRefNum
+local getType, getTypeName
+local dictGetLength, dictGetVal, dictGetValNF, dictGetKey
+local arrayGetLength, arrayGetNF, arrayGet
+local streamReset, streamGetDict, streamGetChar
+
+do
+ local object = epdf.Object()
+ --
+ getDict = object.getDict
+ getArray = object.getArray
+ getReal = object.getReal
+ getNum = object.getNum
+ getString = object.getString
+ getBool = object.getBool
+ getName = object.getName
+ getRef = object.getRef
+ getRefNum = object.getRefNum
+ --
+ getType = object.getType
+ getTypeName = object.getTypeName
+ --
+ streamReset = object.streamReset
+ streamGetDict = object.streamGetDict
+ streamGetChar = object.streamGetChar
+ --
+end
-local report_epdf = logs.reporter("epdf")
+local function initialize_methods(xref)
+ local dictionary = epdf.Dict(xref)
+ local array = epdf.Array(xref)
+ --
+ dictGetLength = dictionary.getLength
+ dictGetVal = dictionary.getVal
+ dictGetValNF = dictionary.getValNF
+ dictGetKey = dictionary.getKey
+ --
+ arrayGetLength = array.getLength
+ arrayGetNF = array.getNF
+ arrayGet = array.get
+ --
+ initialize_methods = function()
+ -- already done
+ end
+end
--- a bit of protection
+local typenames = { [0] =
+ "boolean",
+ "integer",
+ "real",
+ "string",
+ "name",
+ "null",
+ "array",
+ "dictionary",
+ "stream",
+ "ref",
+ "cmd",
+ "error",
+ "eof",
+ "none",
+ "integer64",
+}
-local limited = false
+local typenumbers = table.swapped(typenames)
-directives.register("system.inputmode", function(v)
- if not limited then
- local i_limiter = io.i_limiter(v)
- if i_limiter then
- epdf.open = i_limiter.protect(epdf.open)
- limited = true
- end
- end
-end)
+local null_code = typenumbers.null
+local ref_code = typenumbers.ref
---
+local function fatal_error(...)
+ report_epdf(...)
+ report_epdf("aborting job in order to avoid crash")
+ os.exit()
+end
+
+-- epdf is the built-in library
function epdf.type(o)
local t = lower(match(tostring(o),"[^ :]+"))
return t or "?"
end
-lpdf = lpdf or { }
-local lpdf = lpdf
+local checked_access
+
+-- dictionaries (can be optimized: ... resolve and redefine when all locals set)
-lpdf.epdf = { }
+local frompdfdoc = lpdf.frompdfdoc
-local checked_access
+local function get_flagged(t,f,k)
+ local fk = f[k]
+ if not fk then
+ return t[k]
+ elseif fk == "rawtext" then
+ return frompdfdoc(t[k])
+ else -- no other flags yet
+ return t[k]
+ end
+end
-local function prepare(document,d,t,n,k,mt)
+local function prepare(document,d,t,n,k,mt,flags)
for i=1,n do
- local v = d:getVal(i)
- local r = d:getValNF(i)
- if r:getTypeName() == "ref" then
- r = r:getRef().num
- local c = document.cache[r]
- if c then
- --
+ local v = dictGetVal(d,i)
+ if v then
+ local r = dictGetValNF(d,i)
+ local kind = getType(v)
+ if kind == null_code then
+ -- ignore
else
- c = checked_access[v:getTypeName()](v,document,r)
- if c then
- document.cache[r] = c
- document.xrefs[c] = r
+ local key = dictGetKey(d,i)
+ if kind then
+ if r and getType(r) == ref_code then
+ local objnum = getRefNum(r)
+ local cached = document.__cache__[objnum]
+ if not cached then
+ cached = checked_access[kind](v,document,objnum,mt)
+ if c then
+ document.__cache__[objnum] = cached
+ document.__xrefs__[cached] = objnum
+ end
+ end
+ t[key] = cached
+ else
+ local v, flag = checked_access[kind](v,document)
+ t[key] = v
+ if flag and flags then
+ flags[key] = flag -- flags
+ end
+ end
+ else
+ report_epdf("warning: nil value for key %a in dictionary",key)
end
end
- t[d:getKey(i)] = c
else
- t[d:getKey(i)] = checked_access[v:getTypeName()](v,document)
+ fatal_error("error: invalid value at index %a in dictionary of %a",i,document.filename)
end
end
- getmetatable(t).__index = nil -- ?? weird
-setmetatable(t,mt)
+ if mt then
+ setmetatable(t,mt)
+ else
+ getmetatable(t).__index = nil
+ end
return t[k]
end
-local function some_dictionary(d,document,r,mt)
- local n = d and d:getLength() or 0
+local function some_dictionary(d,document)
+ local n = d and dictGetLength(d) or 0
+ if n > 0 then
+ local t = { }
+ local f = { }
+ setmetatable(t, {
+ __index = function(t,k)
+ return prepare(document,d,t,n,k,_,_,f)
+ end,
+ __call = function(t,k)
+ return get_flagged(t,f,k)
+ end,
+ } )
+ return t
+ end
+end
+
+local function get_dictionary(object,document,r,mt)
+ local d = getDict(object)
+ local n = d and dictGetLength(d) or 0
if n > 0 then
local t = { }
- setmetatable(t, { __index = function(t,k) return prepare(document,d,t,n,k,mt) end } )
+ local f = { }
+ setmetatable(t, {
+ __index = function(t,k)
+ return prepare(document,d,t,n,k,mt,f)
+ end,
+ __call = function(t,k)
+ return get_flagged(t,f,k)
+ end,
+ } )
return t
end
end
-local done = { }
+-- arrays (can be optimized: ... resolve and redefine when all locals set)
local function prepare(document,a,t,n,k)
for i=1,n do
- local v = a:get(i)
- local r = a:getNF(i)
- if v:getTypeName() == "null" then
- -- TH: weird, but appears possible
- elseif r:getTypeName() == "ref" then
- r = r:getRef().num
- local c = document.cache[r]
- if c then
- --
+ local v = arrayGet(a,i)
+ if v then
+ local kind = getType(v)
+ if kind == null_code then
+ -- ignore
+ elseif kind then
+ local r = arrayGetNF(a,i)
+ if r and getType(r) == ref_code then
+ local objnum = getRefNum(r)
+ local cached = document.__cache__[objnum]
+ if not cached then
+ cached = checked_access[kind](v,document,objnum)
+ document.__cache__[objnum] = cached
+ document.__xrefs__[cached] = objnum
+ end
+ t[i] = cached
+ else
+ t[i] = checked_access[kind](v,document)
+ end
else
- c = checked_access[v:getTypeName()](v,document,r)
- document.cache[r] = c
- document.xrefs[c] = r
+ report_epdf("warning: nil value for index %a in array",i)
end
- t[i] = c
else
- t[i] = checked_access[v:getTypeName()](v,document)
+ fatal_error("error: invalid value at index %a in array of %a",i,document.filename)
end
end
getmetatable(t).__index = nil
return t[k]
end
-local function some_array(a,document,r)
- local n = a and a:getLength() or 0
+local function some_array(a,document)
+ local n = a and arrayGetLength(a) or 0
if n > 0 then
local t = { n = n }
- setmetatable(t, { __index = function(t,k) return prepare(document,a,t,n,k) end } )
+ setmetatable(t, {
+ __index = function(t,k)
+ return prepare(document,a,t,n,k)
+ end
+ } )
+ return t
+ end
+end
+
+local function get_array(object,document)
+ local a = getArray(object)
+ local n = a and arrayGetLength(a) or 0
+ if n > 0 then
+ local t = { n = n }
+ setmetatable(t, {
+ __index = function(t,k)
+ return prepare(document,a,t,n,k)
+ end
+ } )
return t
end
end
@@ -131,9 +294,9 @@ end
local function streamaccess(s,_,what)
if not what or what == "all" or what == "*all" then
local t, n = { }, 0
- s:streamReset()
+ streamReset(s)
while true do
- local c = s:streamGetChar()
+ local c = streamGetChar(s)
if c < 0 then
break
else
@@ -145,56 +308,96 @@ local function streamaccess(s,_,what)
end
end
-local function some_stream(d,document,r)
+local function get_stream(d,document)
if d then
- d:streamReset()
- local s = some_dictionary(d:streamGetDict(),document,r)
+ streamReset(d)
+ local s = some_dictionary(streamGetDict(d),document)
getmetatable(s).__call = function(...) return streamaccess(d,...) end
return s
end
end
--- we need epdf.boolean(v) in addition to v:getBool() [dictionary, array, stream, real, integer, string, boolean, name, ref, null]
-
-checked_access = {
- dictionary = function(d,document,r)
- return some_dictionary(d:getDict(),document,r)
- end,
- array = function(a,document,r)
- return some_array(a:getArray(),document,r)
- end,
- stream = function(v,document,r)
- return some_stream(v,document,r)
- end,
- real = function(v)
- return v:getReal()
- end,
- integer = function(v)
- return v:getNum()
- end,
- string = function(v)
- return toutf(v:getString())
- end,
- boolean = function(v)
- return v:getBool()
- end,
- name = function(v)
- return v:getName()
- end,
- ref = function(v)
- return v:getRef()
- end,
- null = function()
- return nil
- end,
-}
+-- We need to convert the string from utf16 although there is no way to
+-- check if we have a regular string starting with a bom. So, we have
+-- na dilemma here: a pdf doc encoded string can be invalid utf.
--- checked_access.real = epdf.real
--- checked_access.integer = epdf.integer
--- checked_access.string = epdf.string
--- checked_access.boolean = epdf.boolean
--- checked_access.name = epdf.name
--- checked_access.ref = epdf.ref
+-- <hex encoded> : implicit 0 appended if odd
+-- (byte encoded) : \( \) \\ escaped
+--
+-- <FE><FF> : utf16be
+--
+-- \r \r \t \b \f \( \) \\ \NNN and \<newline> : append next line
+--
+-- the getString function gives back bytes so we don't need to worry about
+-- the hex aspect.
+
+local u_pattern = lpeg.patterns.utfbom_16_be * lpeg.patterns.utf16_to_utf8_be
+----- b_pattern = lpeg.patterns.hextobytes
+
+local function get_string(v)
+ -- the toutf function only converts a utf16 string and leves the original
+ -- untouched otherwise; one might want to apply lpdf.frompdfdoc to a
+ -- non-unicode string
+ local s = getString(v)
+ if not s or s == "" then
+ return ""
+ end
+ local u = lpegmatch(u_pattern,s)
+ if u then
+ return u -- , "unicode"
+ end
+ -- this is too tricky and fails on e.g. reload of url www.pragma-ade.com)
+ -- local b = lpegmatch(b_pattern,s)
+ -- if b then
+ -- return b, "rawtext"
+ -- end
+ return s, "rawtext"
+end
+
+local function get_null()
+ return nil
+end
+
+-- we have dual access: by typenumber and by typename
+
+local function invalidaccess(k,document)
+ local fullname = type(document) == "table" and document.fullname
+ if fullname then
+ fatal_error("error, asking for key %a in checker of %a",k,fullname)
+ else
+ fatal_error("error, asking for key %a in checker",k)
+ end
+end
+
+checked_access = table.setmetatableindex(function(t,k)
+ return function(v,document)
+ invalidaccess(k,document)
+ end
+end)
+
+checked_access[typenumbers.boolean] = getBool
+checked_access[typenumbers.integer] = getNum
+checked_access[typenumbers.real] = getReal
+checked_access[typenumbers.string] = get_string -- getString
+checked_access[typenumbers.name] = getName
+checked_access[typenumbers.null] = get_null
+checked_access[typenumbers.array] = get_array -- d,document,r
+checked_access[typenumbers.dictionary] = get_dictionary -- d,document,r
+checked_access[typenumbers.stream] = get_stream
+checked_access[typenumbers.ref] = getRef
+
+for i=0,#typenames do
+ local checker = checked_access[i]
+ if not checker then
+ checker = function()
+ return function(v,document)
+ invalidaccess(i,document)
+ end
+ end
+ checked_access[i] = checker
+ end
+ checked_access[typenames[i]] = checker
+end
local function getnames(document,n,target) -- direct
if n then
@@ -252,7 +455,6 @@ local function getlayers(document)
local n = layers.n
for i=1,n do
local layer = layers[i]
---~ print(document.xrefs[layer])
t[i] = layer.Name
end
t.n = n
@@ -261,52 +463,39 @@ local function getlayers(document)
end
end
+local function getstructure(document)
+ -- this might become a tree
+ return document.Catalog.StructTreeRoot
+end
local function getpages(document,Catalog)
- local data = document.data
- local xrefs = document.xrefs
- local cache = document.cache
- local cata = data:getCatalog()
- local xref = data:getXRef()
- local pages = { }
- local nofpages = cata:getNumPages()
--- local function getpagestuff(pagenumber,k)
--- if k == "MediaBox" then
--- local pageobj = cata:getPage(pagenumber)
--- local pagebox = pageobj:getMediaBox()
--- return { pagebox.x1, pagebox.y1, pagebox.x2, pagebox.y2 }
--- elseif k == "CropBox" then
--- local pageobj = cata:getPage(pagenumber)
--- local pagebox = pageobj:getMediaBox()
--- return { pagebox.x1, pagebox.y1, pagebox.x2, pagebox.y2 }
--- elseif k == "Resources" then
--- print("todo page resources from parent")
--- -- local pageobj = cata:getPage(pagenumber)
--- -- local resources = pageobj:getResources()
--- end
--- end
--- for pagenumber=1,nofpages do
--- local mt = { __index = function(t,k)
--- local v = getpagestuff(pagenumber,k)
--- if v then
--- t[k] = v
--- end
--- return v
--- end }
- local mt = { __index = Catalog.Pages }
+ local __data__ = document.__data__
+ local __xrefs__ = document.__xrefs__
+ local __cache__ = document.__cache__
+ local __xref__ = document.__xref__
+ --
+ local catalog = __data__:getCatalog()
+ local pages = { }
+ local nofpages = catalog:getNumPages()
+ local metatable = { __index = Catalog.Pages }
+ --
for pagenumber=1,nofpages do
- local pagereference = cata:getPageRef(pagenumber).num
- local pagedata = some_dictionary(xref:fetch(pagereference,0):getDict(),document,pagereference,mt)
+ local pagereference = catalog:getPageRef(pagenumber).num
+ local pageobject = __xref__:fetch(pagereference,0)
+ local pagedata = get_dictionary(pageobject,document,pagereference,metatable)
if pagedata then
- pagedata.number = pagenumber
- pages[pagenumber] = pagedata
- xrefs[pagedata] = pagereference
- cache[pagereference] = pagedata
+ -- rawset(pagedata,"number",pagenumber)
+ pagedata.number = pagenumber
+ pages[pagenumber] = pagedata
+ __xrefs__[pagedata] = pagereference
+ __cache__[pagereference] = pagedata
else
report_epdf("missing pagedata at slot %i",i)
end
end
+ --
pages.n = nofpages
+ --
return pages
end
@@ -329,23 +518,29 @@ end
local loaded = { }
-function lpdf.epdf.load(filename)
+function lpdf_epdf.load(filename)
local document = loaded[filename]
if not document then
- statistics.starttiming(lpdf.epdf)
- local data = epdf.open(filename) -- maybe resolvers.find_file
- if data then
+ statistics.starttiming(lpdf_epdf)
+ local __data__ = pdf_open(filename) -- maybe resolvers.find_file
+ if __data__ then
+ local __xref__ = __data__:getXRef()
document = {
- filename = filename,
- cache = { },
- xrefs = { },
- data = data,
+ filename = filename,
+ __cache__ = { },
+ __xrefs__ = { },
+ __fonts__ = { },
+ __data__ = __data__,
+ __xref__ = __xref__,
}
- local Catalog = some_dictionary(data:getXRef():getCatalog():getDict(),document)
- local Info = some_dictionary(data:getXRef():getDocInfo():getDict(),document)
- document.Catalog = Catalog
- document.Info = Info
- -- document.catalog = Catalog
+ --
+ initialize_methods(__xref__)
+ --
+ local Catalog = some_dictionary(__xref__:getCatalog():getDict(),document)
+ local Info = some_dictionary(__xref__:getDocInfo():getDict(),document)
+ --
+ document.Catalog = Catalog
+ document.Info = Info
-- a few handy helper tables
document.pages = delayed(document,"pages", function() return getpages(document,Catalog) end)
document.destinations = delayed(document,"destinations", function() return getnames(document,Catalog.Names and Catalog.Names.Dests) end)
@@ -353,28 +548,292 @@ function lpdf.epdf.load(filename)
document.widgets = delayed(document,"widgets", function() return getnames(document,Catalog.Names and Catalog.Names.AcroForm) end)
document.embeddedfiles = delayed(document,"embeddedfiles",function() return getnames(document,Catalog.Names and Catalog.Names.EmbeddedFiles) end)
document.layers = delayed(document,"layers", function() return getlayers(document) end)
+ document.structure = delayed(document,"structure", function() return getstructure(document) end)
else
document = false
end
loaded[filename] = document
- statistics.stoptiming(lpdf.epdf)
- -- print(statistics.elapsedtime(lpdf.epdf))
+ loaded[document] = document
+ statistics.stoptiming(lpdf_epdf)
+ -- print(statistics.elapsedtime(lpdf_epdf))
+ end
+ return document or nil
+end
+
+function lpdf_epdf.unload(filename)
+ local document = loaded[filename]
+ if document then
+ loaded[document] = nil
+ loaded[filename] = nil
end
- return document
end
-- for k, v in next, expand(t) do
-function lpdf.epdf.expand(t)
+local function expand(t)
if type(t) == "table" then
local dummy = t.dummy
end
return t
end
+-- for k, v in expanded(t) do
+
+local function expanded(t)
+ if type(t) == "table" then
+ local dummy = t.dummy
+ end
+ return next, t
+end
+
+lpdf_epdf.expand = expand
+lpdf_epdf.expanded = expanded
+
+-- we could resolve the text stream in one pass if we directly handle the
+-- font but why should we complicate things
+
+local hexdigit = R("09","AF")
+local numchar = ( P("\\") * ( (R("09")^3/tonumber) + C(1) ) ) + C(1)
+local number = lpegpatterns.number / tonumber
+local spaces = lpegpatterns.whitespace^1
+local optspaces = lpegpatterns.whitespace^0
+local keyword = P("/") * C(R("AZ","az","09")^1)
+local operator = C((R("AZ","az")+P("'")+P('"'))^1)
+
+local grammar = P { "start",
+ start = (keyword + number + V("dictionary") + V("unicode") + V("string") + V("unicode")+ V("array") + spaces)^1,
+ -- keyvalue = (keyword * spaces * V("start") + spaces)^1,
+ keyvalue = optspaces * Cf(Ct("") * Cg(keyword * optspaces * V("start") * optspaces)^1,rawset),
+ array = P("[") * Ct(V("start")^1) * P("]"),
+ dictionary = P("<<") * V("keyvalue") * P(">>"),
+ unicode = P("<") * Ct(Cc("hex") * C((1-P(">"))^1)) * P(">"),
+ string = P("(") * Ct(Cc("dec") * C((V("string")+numchar)^1)) * P(")"), -- untested
+}
+
+local operation = Ct(grammar^1 * operator)
+local parser = Ct((operation + P(1))^1)
+
+-- beginbfrange : <start> <stop> <firstcode>
+-- <start> <stop> [ <firstsequence> <firstsequence> <firstsequence> ]
+-- beginbfchar : <code> <newcodes>
+
+local fromsixteen = lpdf.fromsixteen -- maybe inline the lpeg ... but not worth it
+
+local function f_bfchar(t,a,b)
+ t[tonumber(a,16)] = fromsixteen(b)
+end
+
+local function f_bfrange_1(t,a,b,c)
+ print("todo 1",a,b,c)
+ -- c is string
+ -- todo t[tonumber(a,16)] = fromsixteen(b)
+end
+
+local function f_bfrange_2(t,a,b,c)
+ print("todo 2",a,b,c)
+ -- c is table
+ -- todo t[tonumber(a,16)] = fromsixteen(b)
+end
+
+local optionals = spaces^0
+local hexstring = optionals * P("<") * C((1-P(">"))^1) * P(">")
+local bfchar = Carg(1) * hexstring * hexstring / f_bfchar
+local bfrange = Carg(1) * hexstring * hexstring * hexstring / f_bfrange_1
+ + Carg(1) * hexstring * hexstring * optionals * P("[") * Ct(hexstring^1) * optionals * P("]") / f_bfrange_2
+local fromunicode = (
+ P("beginbfchar" ) * bfchar ^1 * optionals * P("endbfchar" ) +
+ P("beginbfrange") * bfrange^1 * optionals * P("endbfrange") +
+ spaces +
+ P(1)
+)^1 * Carg(1)
+
+local function analyzefonts(document,resources) -- unfinished
+ local fonts = document.__fonts__
+ if resources then
+ local fontlist = resources.Font
+ if fontlist then
+ for id, data in expanded(fontlist) do
+ if not fonts[id] then
+ -- a quck hack ... I will look into it more detail if I find a real
+ -- -application for it
+ local tounicode = data.ToUnicode()
+ if tounicode then
+ tounicode = lpegmatch(fromunicode,tounicode,1,{})
+ end
+ fonts[id] = {
+ tounicode = type(tounicode) == "table" and tounicode or { }
+ }
+ table.setmetatableindex(fonts[id],"self")
+ end
+ end
+ end
+ end
+ return fonts
+end
+
+local more = 0
+local unic = nil -- cheaper than passing each time as Carg(1)
+
+local p_hex_to_utf = C(4) / function(s) -- needs checking !
+ local now = tonumber(s,16)
+ if more > 0 then
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
+ more = 0
+ return unic[now] or utfchar(now)
+ elseif now >= 0xD800 and now <= 0xDBFF then
+ more = now
+ -- return ""
+ else
+ return unic[now] or utfchar(now)
+ end
+end
+
+local p_dec_to_utf = C(1) / function(s) -- needs checking !
+ local now = byte(s)
+ return unic[now] or utfchar(now)
+end
+
+local p_hex_to_utf = P(true) / function() more = 0 end * Cs(p_hex_to_utf^1)
+local p_dec_to_utf = P(true) / function() more = 0 end * Cs(p_dec_to_utf^1)
+
+function lpdf_epdf.getpagecontent(document,pagenumber)
+
+ local page = document.pages[pagenumber]
+
+ if not page then
+ return
+ end
+
+ local fonts = analyzefonts(document,page.Resources)
+
+ local content = page.Contents() or ""
+ local list = lpegmatch(parser,content)
+ local font = nil
+ -- local unic = nil
+
+ for i=1,#list do
+ local entry = list[i]
+ local size = #entry
+ local operator = entry[size]
+ if operator == "Tf" then
+ font = fonts[entry[1]]
+ unic = font.tounicode
+ elseif operator == "TJ" then -- { array, TJ }
+ local list = entry[1]
+ for i=1,#list do
+ local li = list[i]
+ if type(li) == "table" then
+ if li[1] == "hex" then
+ list[i] = lpegmatch(p_hex_to_utf,li[2])
+ else
+ list[i] = lpegmatch(p_dec_to_utf,li[2])
+ end
+ else
+ -- kern
+ end
+ end
+ elseif operator == "Tj" or operator == "'" or operator == '"' then -- { string, Tj } { string, ' } { n, m, string, " }
+ local list = entry[size-1]
+ if list[1] == "hex" then
+ list[2] = lpegmatch(p_hex_to_utf,li[2])
+ else
+ list[2] = lpegmatch(p_dec_to_utf,li[2])
+ end
+ end
+ end
+
+ unic = nil -- can be collected
+
+ return list
+
+end
+
+-- This is also an experiment. When I really neet it I can improve it, fo rinstance
+-- with proper position calculating. It might be usefull for some search or so.
+
+local softhyphen = utfchar(0xAD) .. "$"
+local linefactor = 1.3
+
+function lpdf_epdf.contenttotext(document,list) -- maybe signal fonts
+ local last_y = 0
+ local last_f = 0
+ local text = { }
+ local last = 0
+
+ for i=1,#list do
+ local entry = list[i]
+ local size = #entry
+ local operator = entry[size]
+ if operator == "Tf" then
+ last_f = entry[2]
+ elseif operator == "TJ" then
+ local list = entry[1]
+ for i=1,#list do
+ local li = list[i]
+ if type(li) == "string" then
+ last = last + 1
+ text[last] = li
+ elseif li < -50 then
+ last = last + 1
+ text[last] = " "
+ end
+ end
+ line = concat(list)
+ elseif operator == "Tj" then
+ last = last + 1
+ text[last] = entry[size-1]
+ elseif operator == "cm" or operator == "Tm" then
+ local ty = entry[6]
+ local dy = abs(last_y - ty)
+ if dy > linefactor*last_f then
+ if last > 0 then
+ if find(text[last],softhyphen) then
+ -- ignore
+ else
+ last = last + 1
+ text[last] = "\n"
+ end
+ end
+ end
+ last_y = ty
+ end
+ end
+
+ return concat(text)
+end
+
+function lpdf_epdf.getstructure(document,list) -- just a test
+ local depth = 0
+ for i=1,#list do
+ local entry = list[i]
+ local size = #entry
+ local operator = entry[size]
+ if operator == "BDC" then
+ report_epdf("%w%s : %s",depth,entry[1] or "?",entry[2].MCID or "?")
+ depth = depth + 1
+ elseif operator == "EMC" then
+ depth = depth - 1
+ elseif operator == "TJ" then
+ local list = entry[1]
+ for i=1,#list do
+ local li = list[i]
+ if type(li) == "string" then
+ report_epdf("%w > %s",depth,li)
+ elseif li < -50 then
+ report_epdf("%w >",depth,li)
+ end
+ end
+ elseif operator == "Tj" then
+ report_epdf("%w > %s",depth,entry[size-1])
+ end
+ end
+end
+
+-- document.Catalog.StructTreeRoot.ParentTree.Nums[2][1].A.P[1])
+
-- helpers
--- function lpdf.epdf.getdestinationpage(document,name)
--- local destination = document.data:findDest(name)
+-- function lpdf_epdf.getdestinationpage(document,name)
+-- local destination = document.__data__:findDest(name)
-- return destination and destination.number
-- end
diff --git a/tex/context/base/lpdf-fld.lua b/tex/context/base/lpdf-fld.lua
index a9b9fd72d..f0aad3623 100644
--- a/tex/context/base/lpdf-fld.lua
+++ b/tex/context/base/lpdf-fld.lua
@@ -55,7 +55,8 @@ if not modules then modules = { } end modules ['lpdf-fld'] = {
-- for printing especially when highlighting (those colorfull foregrounds) is
-- on.
-local gmatch, lower, format = string.gmatch, string.lower, string.format
+local tostring, next = tostring, next
+local gmatch, lower, format, formatters = string.gmatch, string.lower, string.format, string.formatters
local lpegmatch = lpeg.match
local utfchar = utf.char
local bpfactor, todimen = number.dimenfactors.bp, string.todimen
@@ -92,14 +93,13 @@ local pdfflushobject = lpdf.flushobject
local pdfshareobjectreference = lpdf.shareobjectreference
local pdfshareobject = lpdf.shareobject
local pdfreserveobject = lpdf.reserveobject
-local pdfreserveannotation = lpdf.reserveannotation
local pdfaction = lpdf.action
-local hpack_node = node.hpack
-
-local nodepool = nodes.pool
+local pdfcolor = lpdf.color
+local pdfcolorvalues = lpdf.colorvalues
+local pdflayerreference = lpdf.layerreference
-local pdfannotation_node = nodepool.pdfannotation
+local hpack_node = node.hpack
local submitoutputformat = 0 -- 0=unknown 1=HTML 2=FDF 3=XML => not yet used, needs to be checked
@@ -125,39 +125,39 @@ function codeinjections.setformsmethod(name)
end
local flag = { -- /Ff
- ReadOnly = 1, -- 1
- Required = 2, -- 2
- NoExport = 4, -- 3
- MultiLine = 4096, -- 13
- Password = 8192, -- 14
- NoToggleToOff = 16384, -- 15
- Radio = 32768, -- 16
- PushButton = 65536, -- 17
- PopUp = 131072, -- 18
- Edit = 262144, -- 19
- Sort = 524288, -- 20
- FileSelect = 1048576, -- 21
- DoNotSpellCheck = 4194304, -- 23
- DoNotScroll = 8388608, -- 24
- Comb = 16777216, -- 25
- RichText = 33554432, -- 26
- RadiosInUnison = 33554432, -- 26
- CommitOnSelChange = 67108864, -- 27
+ ReadOnly = 2^ 0, -- 1
+ Required = 2^ 1, -- 2
+ NoExport = 2^ 2, -- 3
+ MultiLine = 2^12, -- 13
+ Password = 2^13, -- 14
+ NoToggleToOff = 2^14, -- 15
+ Radio = 2^15, -- 16
+ PushButton = 2^16, -- 17
+ PopUp = 2^17, -- 18
+ Edit = 2^18, -- 19
+ Sort = 2^19, -- 20
+ FileSelect = 2^20, -- 21
+ DoNotSpellCheck = 2^22, -- 23
+ DoNotScroll = 2^23, -- 24
+ Comb = 2^24, -- 25
+ RichText = 2^25, -- 26
+ RadiosInUnison = 2^25, -- 26
+ CommitOnSelChange = 2^26, -- 27
}
local plus = { -- /F
- Invisible = 1, -- 1
- Hidden = 2, -- 2
- Printable = 4, -- 3
- Print = 4, -- 3
- NoZoom = 8, -- 4
- NoRotate = 16, -- 5
- NoView = 32, -- 6
- ReadOnly = 64, -- 7
- Locked = 128, -- 8
- ToggleNoView = 256, -- 9
- LockedContents = 512, -- 10,
- AutoView = 256, -- 288 (6+9)
+ Invisible = 2^0, -- 1
+ Hidden = 2^1, -- 2
+ Printable = 2^2, -- 3
+ Print = 2^2, -- 3
+ NoZoom = 2^3, -- 4
+ NoRotate = 2^4, -- 5
+ NoView = 2^5, -- 6
+ ReadOnly = 2^6, -- 7
+ Locked = 2^7, -- 8
+ ToggleNoView = 2^8, -- 9
+ LockedContents = 2^9, -- 10,
+ AutoView = 2^8, -- 6 + 9 ?
}
-- todo: check what is interfaced
@@ -198,43 +198,90 @@ local function fieldplus(specification) -- /F
return n
end
-local function checked(what)
- local set, bug = references.identify("",what)
- if not bug and #set > 0 then
- local r, n = pdfaction(set)
- return pdfshareobjectreference(r)
- end
-end
+-- keep:
+--
+-- local function checked(what)
+-- local set, bug = references.identify("",what)
+-- if not bug and #set > 0 then
+-- local r, n = pdfaction(set)
+-- return pdfshareobjectreference(r)
+-- end
+-- end
+--
+-- local function fieldactions(specification) -- share actions
+-- local d, a = { }, nil
+-- a = specification.mousedown
+-- or specification.clickin if a and a ~= "" then d.D = checked(a) end
+-- a = specification.mouseup
+-- or specification.clickout if a and a ~= "" then d.U = checked(a) end
+-- a = specification.regionin if a and a ~= "" then d.E = checked(a) end -- Enter
+-- a = specification.regionout if a and a ~= "" then d.X = checked(a) end -- eXit
+-- a = specification.afterkey if a and a ~= "" then d.K = checked(a) end
+-- a = specification.format if a and a ~= "" then d.F = checked(a) end
+-- a = specification.validate if a and a ~= "" then d.V = checked(a) end
+-- a = specification.calculate if a and a ~= "" then d.C = checked(a) end
+-- a = specification.focusin if a and a ~= "" then d.Fo = checked(a) end
+-- a = specification.focusout if a and a ~= "" then d.Bl = checked(a) end
+-- a = specification.openpage if a and a ~= "" then d.PO = checked(a) end
+-- a = specification.closepage if a and a ~= "" then d.PC = checked(a) end
+-- -- a = specification.visiblepage if a and a ~= "" then d.PV = checked(a) end
+-- -- a = specification.invisiblepage if a and a ~= "" then d.PI = checked(a) end
+-- return next(d) and pdfdictionary(d)
+-- end
+
+local mapping = {
+ mousedown = "D", clickin = "D",
+ mouseup = "U", clickout = "U",
+ regionin = "E",
+ regionout = "X",
+ afterkey = "K",
+ format = "F",
+ validate = "V",
+ calculate = "C",
+ focusin = "Fo",
+ focusout = "Bl",
+ openpage = "PO",
+ closepage = "PC",
+ -- visiblepage = "PV",
+ -- invisiblepage = "PI",
+}
local function fieldactions(specification) -- share actions
- local d, a = { }, nil
- a = specification.mousedown
- or specification.clickin if a and a ~= "" then d.D = checked(a) end
- a = specification.mouseup
- or specification.clickout if a and a ~= "" then d.U = checked(a) end
- a = specification.regionin if a and a ~= "" then d.E = checked(a) end -- Enter
- a = specification.regionout if a and a ~= "" then d.X = checked(a) end -- eXit
- a = specification.afterkey if a and a ~= "" then d.K = checked(a) end
- a = specification.format if a and a ~= "" then d.F = checked(a) end
- a = specification.validate if a and a ~= "" then d.V = checked(a) end
- a = specification.calculate if a and a ~= "" then d.C = checked(a) end
- a = specification.focusin if a and a ~= "" then d.Fo = checked(a) end
- a = specification.focusout if a and a ~= "" then d.Bl = checked(a) end
- a = specification.openpage if a and a ~= "" then d.PO = checked(a) end
- a = specification.closepage if a and a ~= "" then d.PC = checked(a) end
- -- a = specification.visiblepage if a and a ~= "" then d.PV = checked(a) end
- -- a = specification.invisiblepage if a and a ~= "" then d.PI = checked(a) end
- return next(d) and pdfdictionary(d)
+ local d = nil
+ for key, target in next, mapping do
+ local code = specification[key]
+ if code and code ~= "" then
+ -- local a = checked(code)
+ local set, bug = references.identify("",code)
+ if not bug and #set > 0 then
+ local a = pdfaction(set) -- r, n
+ if a then
+ local r = pdfshareobjectreference(a)
+ if d then
+ d[target] = r
+ else
+ d = pdfdictionary { [target] = r }
+ end
+ else
+ report_fields("invalid field action %a, case %s",code,2)
+ end
+ else
+ report_fields("invalid field action %a, case %s",code,1)
+ end
+ end
+ end
+ -- if d then
+ -- d = pdfshareobjectreference(d) -- not much overlap or maybe only some patterns
+ -- end
+ return d
end
-- fonts and color
local pdfdocencodingvector, pdfdocencodingcapsule
--- The pdf doc encoding vector is needed in order to
--- trigger propper unicode. Interesting is that when
--- a glyph is not in the vector, it is still visible
--- as it is taken from some other font. Messy.
+-- The pdf doc encoding vector is needed in order to trigger propper unicode. Interesting is that when
+-- a glyph is not in the vector, it is still visible as it is taken from some other font. Messy.
-- To be checked: only when text/line fields.
@@ -285,7 +332,7 @@ local function fieldsurrounding(specification)
local fontsize = specification.fontsize or "12pt"
local fontstyle = specification.fontstyle or "rm"
local fontalternative = specification.fontalternative or "tf"
- local colorvalue = specification.colorvalue
+ local colorvalue = tonumber(specification.colorvalue)
local s = fontnames[fontstyle]
if not s then
fontstyle, s = "rm", fontnames.rm
@@ -298,16 +345,16 @@ local function fieldsurrounding(specification)
fontsize = todimen(fontsize)
fontsize = fontsize and (bpfactor * fontsize) or 12
fontraise = 0.1 * fontsize -- todo: figure out what the natural one is and compensate for strutdp
- local fontcode = format("%0.4f Tf %0.4f Ts",fontsize,fontraise)
+ local fontcode = formatters["%0.4f Tf %0.4f Ts"](fontsize,fontraise)
-- we could test for colorvalue being 1 (black) and omit it then
- local colorcode = lpdf.color(3,colorvalue) -- we force an rgb color space
+ local colorcode = pdfcolor(3,colorvalue) -- we force an rgb color space
if trace_fields then
report_fields("using font, style %a, alternative %a, size %p, tag %a, code %a",fontstyle,fontalternative,fontsize,tag,fontcode)
report_fields("using color, value %a, code %a",colorvalue,colorcode)
end
local stream = pdfstream {
pdfconstant(tag),
- format("%s %s",fontcode,colorcode)
+ formatters["%s %s"](fontcode,colorcode)
}
usedfonts[tag] = a -- the name
-- move up with "x.y Ts"
@@ -570,17 +617,14 @@ local function todingbat(n)
end
end
--- local zero_bc = pdfarray { 0, 0, 0 }
--- local zero_bg = pdfarray { 1, 1, 1 }
-
local function fieldrendering(specification)
local bvalue = tonumber(specification.backgroundcolorvalue)
local fvalue = tonumber(specification.framecolorvalue)
local svalue = specification.fontsymbol
if bvalue or fvalue or (svalue and svalue ~= "") then
return pdfdictionary {
- BG = bvalue and pdfarray { lpdf.colorvalues(3,bvalue) } or nil, -- or zero_bg,
- BC = fvalue and pdfarray { lpdf.colorvalues(3,fvalue) } or nil, -- or zero_bc,
+ BG = bvalue and pdfarray { pdfcolorvalues(3,bvalue) } or nil, -- or zero_bg,
+ BC = fvalue and pdfarray { pdfcolorvalues(3,fvalue) } or nil, -- or zero_bc,
CA = svalue and pdfstring (svalue) or nil,
}
end
@@ -590,7 +634,7 @@ end
local function fieldlayer(specification) -- we can move this in line
local layer = specification.layer
- return (layer and lpdf.layerreference(layer)) or nil
+ return (layer and pdflayerreference(layer)) or nil
end
-- defining
@@ -611,7 +655,7 @@ local xfdftemplate = [[
function codeinjections.exportformdata(name)
local result = { }
for k, v in table.sortedhash(fields) do
- result[#result+1] = format(" <field name='%s'><value>%s</value></field>",v.name or k,v.default or "")
+ result[#result+1] = formatters[" <field name='%s'><value>%s</value></field>"](v.name or k,v.default or "")
end
local base = file.basename(tex.jobname)
local xfdf = format(xfdftemplate,base,table.concat(result,"\n"))
@@ -912,7 +956,7 @@ local function save_parent(field,specification,d,hasopt)
end
local function save_kid(field,specification,d,optname)
- local kn = pdfreserveannotation()
+ local kn = pdfreserveobject()
field.kids[#field.kids+1] = pdfreference(kn)
if optname then
local opt = field.opt
@@ -921,7 +965,7 @@ local function save_kid(field,specification,d,optname)
end
end
local width, height, depth = specification.width or 0, specification.height or 0, specification.depth
- local box = hpack_node(pdfannotation_node(width,height,depth,d(),kn))
+ local box = hpack_node(nodeinjections.annotation(width,height,depth,d(),kn))
box.width, box.height, box.depth = width, height, depth -- redundant
return box
end
@@ -969,6 +1013,8 @@ local function makelinechild(name,specification)
if trace_fields then
report_fields("using child text %a",name)
end
+ -- we could save a little by not setting some key/value when it's the
+ -- same as parent but it would cost more memory to keep track of it
local d = pdfdictionary {
Subtype = pdf_widget,
Parent = pdfreference(parent.pobj),
diff --git a/tex/context/base/lpdf-fmt.lua b/tex/context/base/lpdf-fmt.lua
index b444f03c3..862c011b8 100644
--- a/tex/context/base/lpdf-fmt.lua
+++ b/tex/context/base/lpdf-fmt.lua
@@ -349,7 +349,7 @@ local filenames = {
}
local function locatefile(filename)
- local fullname = resolvers.findfile(filename,"icc")
+ local fullname = resolvers.findfile(filename,"icc",1,true)
if not fullname or fullname == "" then
fullname = resolvers.finders.byscheme("loc",filename) -- could be specific to the project
end
@@ -710,7 +710,9 @@ function codeinjections.setformat(s)
end
end
function codeinjections.setformat(noname)
- report_backend("error, format is already set to %a, ignoring %a",formatname,noname.format)
+ if trace_format then
+ report_backend("error, format is already set to %a, ignoring %a",formatname,noname.format)
+ end
end
else
report_backend("error, format %a is not supported",format)
@@ -732,9 +734,11 @@ directives.register("backend.format", function(v) -- table !
end
end)
-function commands.setformat(s)
- codeinjections.setformat(s)
-end
+interfaces.implement {
+ name = "setformat",
+ actions = codeinjections.setformat,
+ arguments = { { "*" } }
+}
function codeinjections.getformatoption(key)
return formatspecification and formatspecification[key]
@@ -743,7 +747,7 @@ end
function codeinjections.supportedformats()
local t = { }
for k, v in table.sortedhash(formats) do
- if find(k,"pdf") then
+ if find(k,"pdf",1,true) then
t[#t+1] = k
end
end
diff --git a/tex/context/base/lpdf-grp.lua b/tex/context/base/lpdf-grp.lua
index fed5e6a46..36c3507be 100644
--- a/tex/context/base/lpdf-grp.lua
+++ b/tex/context/base/lpdf-grp.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['lpdf-grp'] = {
license = "see context related readme files"
}
-local format, gsub = string.format, string.gsub
+local formatters, gsub = string.formatters, string.gsub
local concat = table.concat
local round = math.round
@@ -118,7 +118,7 @@ function nodeinjections.injectbitmap(t)
height = width * yresolution / xresolution
end
local image = img.new {
- stream = format(template,d(),t.data),
+ stream = formatters[template](d(),t.data),
width = width,
height = height,
bbox = { 0, 0, urx, ury },
@@ -236,7 +236,7 @@ function img.package(image) -- see lpdf-u3d **
local height = boundingbox[4]
local xform = img.scan {
attr = resources(),
- stream = format("%f 0 0 %f 0 0 cm /%s Do",width,height,imagetag),
+ stream = formatters["%F 0 0 %F 0 0 cm /%s Do"](width,height,imagetag),
bbox = { 0, 0, width/factor, height/factor },
}
img.immediatewrite(xform)
diff --git a/tex/context/base/lpdf-ini.lua b/tex/context/base/lpdf-ini.lua
index 23fe6c177..834f845c5 100644
--- a/tex/context/base/lpdf-ini.lua
+++ b/tex/context/base/lpdf-ini.lua
@@ -6,98 +6,308 @@ if not modules then modules = { } end modules ['lpdf-ini'] = {
license = "see context related readme files"
}
+-- beware of "too many locals" here
+
local setmetatable, getmetatable, type, next, tostring, tonumber, rawset = setmetatable, getmetatable, type, next, tostring, tonumber, rawset
local char, byte, format, gsub, concat, match, sub, gmatch = string.char, string.byte, string.format, string.gsub, table.concat, string.match, string.sub, string.gmatch
-local utfchar, utfvalues = utf.char, utf.values
-local sind, cosd, floor = math.sind, math.cosd, math.floor
+local utfchar, utfbyte, utfvalues = utf.char, utf.byte, utf.values
+local sind, cosd, floor, max, min = math.sind, math.cosd, math.floor, math.max, math.min
local lpegmatch, P, C, R, S, Cc, Cs = lpeg.match, lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cc, lpeg.Cs
local formatters = string.formatters
+local isboolean = string.is_boolean
+
+local report_objects = logs.reporter("backend","objects")
+local report_finalizing = logs.reporter("backend","finalizing")
+local report_blocked = logs.reporter("backend","blocked")
+
+local implement = interfaces.implement
+local two_strings = interfaces.strings[2]
+
+-- In ConTeXt MkIV we use utf8 exclusively so all strings get mapped onto a hex
+-- encoded utf16 string type between <>. We could probably save some bytes by using
+-- strings between () but then we end up with escaped ()\ too.
+
+-- gethpos : used
+-- getpos : used
+-- getvpos : used
+--
+-- getmatrix : used
+-- hasmatrix : used
+--
+-- mapfile : used in font-ctx.lua
+-- mapline : used in font-ctx.lua
+--
+-- maxobjnum : not used
+-- obj : used
+-- immediateobj : used
+-- objtype : not used
+-- pageref : used
+-- print : can be used
+-- refobj : used
+-- registerannot : not to be used
+-- reserveobj : used
+
+-- pdf.catalog : used
+-- pdf.info : used
+-- pdf.trailer : used
+-- pdf.names : not to be used
+
+-- pdf.setinfo : used
+-- pdf.setcatalog : used
+-- pdf.setnames : not to be used
+-- pdf.settrailer : used
+
+-- pdf.getinfo : used
+-- pdf.getcatalog : used
+-- pdf.getnames : not to be used
+-- pdf.gettrailer : used
+
+local pdf = pdf
+local factor = number.dimenfactors.bp
+
+if pdf.setinfo then
+ -- table.setmetatablenewindex(pdf,function(t,k,v)
+ -- report_blocked("'pdf.%s' is not supported",k)
+ -- end)
+ -- the getters are harmless
+end
+
+if not pdf.setinfo then
+ function pdf.setinfo (s) pdf.info = s end
+ function pdf.setcatalog(s) pdf.catalog = s end
+ function pdf.setnames (s) pdf.names = s end
+ function pdf.settrailer(s) pdf.trailer = s end
+end
-local pdfreserveobject = pdf.reserveobj
-local pdfimmediateobject = pdf.immediateobj
-local pdfdeferredobject = pdf.obj
-local pdfreferenceobject = pdf.refobj
+if not pdf.getpos then
+ function pdf.getpos () return pdf.h, pdf.v end
+ function pdf.gethpos () return pdf.h end
+ function pdf.getvpos () return pdf.v end
+ function pdf.hasmatrix() return false end
+ function pdf.getmatrix() return 1, 0, 0, 1, 0, 0 end
+end
+
+if not pdf.setpageresources then
+ function pdf.setpageresources (s) pdf.pageresources = s end
+ function pdf.setpageattributes (s) pdf.pageattributes = s end
+ function pdf.setpagesattributes(s) pdf.pagesattributes = s end
+end
+
+local pdfsetinfo = pdf.setinfo
+local pdfsetcatalog = pdf.setcatalog
+local pdfsetnames = pdf.setnames
+local pdfsettrailer = pdf.settrailer
+
+local pdfsetpageresources = pdf.setpageresources
+local pdfsetpageattributes = pdf.setpageattributes
+local pdfsetpagesattributes = pdf.setpagesattributes
+
+local pdfgetpos = pdf.getpos
+local pdfgethpos = pdf.gethpos
+local pdfgetvpos = pdf.getvpos
+local pdfgetmatrix = pdf.getmatrix
+local pdfhasmatrix = pdf.hasmatrix
+
+local pdfreserveobject = pdf.reserveobj
+local pdfimmediateobject = pdf.immediateobj
+local pdfdeferredobject = pdf.obj
+local pdfreferenceobject = pdf.refobj
+
+-- function pdf.setinfo () report_blocked("'pdf.%s' is not supported","setinfo") end -- use lpdf.addtoinfo etc
+-- function pdf.setcatalog () report_blocked("'pdf.%s' is not supported","setcatalog") end
+-- function pdf.setnames () report_blocked("'pdf.%s' is not supported","setnames") end
+-- function pdf.settrailer () report_blocked("'pdf.%s' is not supported","settrailer") end
+-- function pdf.setpageresources () report_blocked("'pdf.%s' is not supported","setpageresources") end
+-- function pdf.setpageattributes () report_blocked("'pdf.%s' is not supported","setpageattributes") end
+-- function pdf.setpagesattributes() report_blocked("'pdf.%s' is not supported","setpagesattributes") end
+-- function pdf.registerannot () report_blocked("'pdf.%s' is not supported","registerannot") end
+
+local function pdfdisablecommand(command)
+ pdf[command] = function() report_blocked("'pdf.%s' is not supported",command) end
+end
+
+pdfdisablecommand("setinfo")
+pdfdisablecommand("setcatalog")
+pdfdisablecommand("setnames")
+pdfdisablecommand("settrailer")
+pdfdisablecommand("setpageresources")
+pdfdisablecommand("setpageattributes")
+pdfdisablecommand("setpagesattributes")
+pdfdisablecommand("registerannot")
local trace_finalizers = false trackers.register("backend.finalizers", function(v) trace_finalizers = v end)
local trace_resources = false trackers.register("backend.resources", function(v) trace_resources = v end)
local trace_objects = false trackers.register("backend.objects", function(v) trace_objects = v end)
local trace_detail = false trackers.register("backend.detail", function(v) trace_detail = v end)
-local report_objects = logs.reporter("backend","objects")
-local report_finalizing = logs.reporter("backend","finalizing")
-
-local backends = backends
-
-backends.pdf = backends.pdf or {
+local backends = backends
+local pdfbackend = {
comment = "backend for directly generating pdf output",
nodeinjections = { },
codeinjections = { },
registrations = { },
tables = { },
}
+backends.pdf = pdfbackend
+lpdf = lpdf or { }
+local lpdf = lpdf
+
+local codeinjections = pdfbackend.codeinjections
+local nodeinjections = pdfbackend.nodeinjections
+
+codeinjections.getpos = pdfgetpos lpdf.getpos = pdfgetpos
+codeinjections.gethpos = pdfgethpos lpdf.gethpos = pdfgethpos
+codeinjections.getvpos = pdfgetvpos lpdf.getvpos = pdfgetvpos
+codeinjections.hasmatrix = pdfhasmatrix lpdf.hasmatrix = pdfhasmatrix
+codeinjections.getmatrix = pdfgetmatrix lpdf.getmatrix = pdfgetmatrix
+
+function lpdf.transform(llx,lly,urx,ury)
+ if pdfhasmatrix() then
+ local sx, rx, ry, sy = pdfgetmatrix()
+ local w, h = urx - llx, ury - lly
+ return llx, lly, llx + sy*w - ry*h, lly + sx*h - rx*w
+ else
+ return llx, lly, urx, ury
+ end
+end
-lpdf = lpdf or { }
-local lpdf = lpdf
+-- function lpdf.rectangle(width,height,depth)
+-- local h, v = pdfgetpos()
+-- local llx, lly, urx, ury
+-- if pdfhasmatrix() then
+-- local sx, rx, ry, sy = pdfgetmatrix()
+-- llx = 0
+-- lly = -depth
+-- -- llx = ry * depth
+-- -- lly = -sx * depth
+-- urx = sy * width - ry * height
+-- ury = sx * height - rx * width
+-- else
+-- llx = 0
+-- lly = -depth
+-- urx = width
+-- ury = height
+-- return (h+llx)*factor, (v+lly)*factor, (h+urx)*factor, (v+ury)*factor
+-- end
+-- end
-local function tosixteen(str) -- an lpeg might be faster (no table)
- if not str or str == "" then
- return "<feff>" -- not () as we want an indication that it's unicode
+function lpdf.rectangle(width,height,depth)
+ local h, v = pdfgetpos()
+ if pdfhasmatrix() then
+ local sx, rx, ry, sy = pdfgetmatrix()
+ -- return (h+ry*depth)*factor, (v-sx*depth)*factor, (h+sy*width-ry*height)*factor, (v+sx*height-rx*width)*factor
+ return h *factor, (v- depth)*factor, (h+sy*width-ry*height)*factor, (v+sx*height-rx*width)*factor
else
- local r, n = { "<feff" }, 1
- for b in utfvalues(str) do
- n = n + 1
- if b < 0x10000 then
- r[n] = format("%04x",b)
- else
- -- r[n] = format("%04x%04x",b/1024+0xD800,b%1024+0xDC00)
- r[n] = format("%04x%04x",floor(b/1024),b%1024+0xDC00)
- end
- end
- n = n + 1
- r[n] = ">"
- return concat(r)
+ return h *factor, (v- depth)*factor, (h+ width )*factor, (v+ height )*factor
end
end
-lpdf.tosixteen = tosixteen
-
--- lpeg is some 5 times faster than gsub (in test) on escaping
+-- we could use a hash of predefined unicodes
--- local escapes = {
--- ["\\"] = "\\\\",
--- ["/"] = "\\/", ["#"] = "\\#",
--- ["<"] = "\\<", [">"] = "\\>",
--- ["["] = "\\[", ["]"] = "\\]",
--- ["("] = "\\(", [")"] = "\\)",
--- }
---
--- local escaped = Cs(Cc("(") * (S("\\/#<>[]()")/escapes + P(1))^0 * Cc(")"))
---
--- local function toeight(str)
+-- local function tosixteen(str) -- an lpeg might be faster (no table)
-- if not str or str == "" then
--- return "()"
+-- return "<feff>" -- not () as we want an indication that it's unicode
-- else
--- return lpegmatch(escaped,str)
+-- local r, n = { "<feff" }, 1
+-- for b in utfvalues(str) do
+-- n = n + 1
+-- if b < 0x10000 then
+-- r[n] = format("%04x",b)
+-- else
+-- -- r[n] = format("%04x%04x",b/1024+0xD800,b%1024+0xDC00)
+-- r[n] = format("%04x%04x",floor(b/1024),b%1024+0xDC00) --bit32.rshift(b,10)
+-- end
+-- end
+-- n = n + 1
+-- r[n] = ">"
+-- return concat(r)
-- end
-- end
---
--- -- no need for escaping .. just use unicode instead
--- \0 \t \n \r \f <space> ( ) [ ] { } / %
+local cache = table.setmetatableindex(function(t,k) -- can be made weak
+ local v = utfbyte(k)
+ if v < 0x10000 then
+ v = format("%04x",v)
+ else
+ -- v = format("%04x%04x",v/1024+0xD800,v%1024+0xDC00)
+ v = format("%04x%04x",floor(v/1024),v%1024+0xDC00)
+ end
+ t[k] = v
+ return v
+end)
+
+local escaped = Cs(Cc("(") * (S("\\()")/"\\%0" + P(1))^0 * Cc(")"))
+local unified = Cs(Cc("<feff") * (lpeg.patterns.utf8character/cache)^1 * Cc(">"))
-local function toeight(str)
- return "(" .. str .. ")"
+local function tosixteen(str) -- an lpeg might be faster (no table)
+ if not str or str == "" then
+ return "<feff>" -- not () as we want an indication that it's unicode
+ else
+ return lpegmatch(unified,str)
+ end
+end
+
+local more = 0
+
+local pattern = C(4) / function(s) -- needs checking !
+ local now = tonumber(s,16)
+ if more > 0 then
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
+ more = 0
+ return utfchar(now)
+ elseif now >= 0xD800 and now <= 0xDBFF then
+ more = now
+ return "" -- else the c's end up in the stream
+ else
+ return utfchar(now)
+ end
end
-lpdf.toeight = toeight
+local pattern = P(true) / function() more = 0 end * Cs(pattern^0)
+
+local function fromsixteen(str)
+ if not str or str == "" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+end
+
+local toregime = regimes.toregime
+local fromregime = regimes.fromregime
+
+local function topdfdoc(str,default)
+ if not str or str == "" then
+ return ""
+ else
+ return lpegmatch(escaped,toregime("pdfdoc",str,default)) -- could be combined if needed
+ end
+end
---~ local escaped = lpeg.Cs((lpeg.S("\0\t\n\r\f ()[]{}/%")/function(s) return format("#%02X",byte(s)) end + lpeg.P(1))^0)
+local function frompdfdoc(str)
+ if not str or str == "" then
+ return ""
+ else
+ return fromregime("pdfdoc",str)
+ end
+end
---~ local function cleaned(str)
---~ return (str and str ~= "" and lpegmatch(escaped,str)) or ""
---~ end
+if not toregime then topdfdoc = function(s) return s end end
+if not fromregime then frompdfdoc = function(s) return s end end
---~ lpdf.cleaned = cleaned -- not public yet
+local function toeight(str)
+ if not str or str == "" then
+ return "()"
+ else
+ return lpegmatch(escaped,str)
+ end
+end
+
+lpdf.tosixteen = tosixteen
+lpdf.toeight = toeight
+lpdf.topdfdoc = topdfdoc
+lpdf.fromsixteen = fromsixteen
+lpdf.frompdfdoc = frompdfdoc
local function merge_t(a,b)
local t = { }
@@ -106,34 +316,44 @@ local function merge_t(a,b)
return setmetatable(t,getmetatable(a))
end
+local f_key_null = formatters["/%s null"]
local f_key_value = formatters["/%s %s"]
local f_key_dictionary = formatters["/%s << % t >>"]
local f_dictionary = formatters["<< % t >>"]
local f_key_array = formatters["/%s [ % t ]"]
local f_array = formatters["[ % t ]"]
+local f_key_number = formatters["/%s %F"]
+local f_tonumber = formatters["%F"]
+
+-- local f_key_value = formatters["/%s %s"]
+-- local f_key_dictionary = formatters["/%s <<% t>>"]
+-- local f_dictionary = formatters["<<% t>>"]
+-- local f_key_array = formatters["/%s [% t]"]
+-- local f_array = formatters["[% t]"]
local tostring_a, tostring_d
tostring_d = function(t,contentonly,key)
- if not next(t) then
- if contentonly then
- return ""
- else
- return "<< >>"
- end
- else
+ if next(t) then
local r, rn = { }, 0
for k, v in next, t do
rn = rn + 1
local tv = type(v)
if tv == "string" then
r[rn] = f_key_value(k,toeight(v))
- elseif tv == "unicode" then
- r[rn] = f_key_value(k,tosixteen(v))
+ elseif tv == "number" then
+ r[rn] = f_key_number(k,v)
+ -- elseif tv == "unicode" then -- can't happen
+ -- r[rn] = f_key_value(k,tosixteen(v))
elseif tv == "table" then
local mv = getmetatable(v)
if mv and mv.__lpdftype then
- r[rn] = f_key_value(k,tostring(v))
+ -- if v == t then
+ -- report_objects("ignoring circular reference in dirctionary")
+ -- r[rn] = f_key_null(k)
+ -- else
+ r[rn] = f_key_value(k,tostring(v))
+ -- end
elseif v[1] then
r[rn] = f_key_value(k,tostring_a(v))
else
@@ -150,31 +370,36 @@ tostring_d = function(t,contentonly,key)
else
return f_dictionary(r)
end
+ elseif contentonly then
+ return ""
+ else
+ return "<< >>"
end
end
tostring_a = function(t,contentonly,key)
local tn = #t
- if tn == 0 then
- if contentonly then
- return ""
- else
- return "[ ]"
- end
- else
+ if tn ~= 0 then
local r = { }
for k=1,tn do
local v = t[k]
local tv = type(v)
if tv == "string" then
r[k] = toeight(v)
- elseif tv == "unicode" then
- r[k] = tosixteen(v)
+ elseif tv == "number" then
+ r[k] = f_tonumber(v)
+ -- elseif tv == "unicode" then
+ -- r[k] = tosixteen(v)
elseif tv == "table" then
local mv = getmetatable(v)
local mt = mv and mv.__lpdftype
if mt then
- r[k] = tostring(v)
+ -- if v == t then
+ -- report_objects("ignoring circular reference in array")
+ -- r[k] = "null"
+ -- else
+ r[k] = tostring(v)
+ -- end
elseif v[1] then
r[k] = tostring_a(v)
else
@@ -191,40 +416,47 @@ tostring_a = function(t,contentonly,key)
else
return f_array(r)
end
+ elseif contentonly then
+ return ""
+ else
+ return "[ ]"
end
end
-local tostring_x = function(t) return concat(t, " ") end
-local tostring_s = function(t) return toeight(t[1]) end
-local tostring_u = function(t) return tosixteen(t[1]) end
-local tostring_n = function(t) return tostring(t[1]) end -- tostring not needed
-local tostring_c = function(t) return t[1] end -- already prefixed (hashed)
-local tostring_z = function() return "null" end
-local tostring_t = function() return "true" end
-local tostring_f = function() return "false" end
-local tostring_r = function(t) local n = t[1] return n and n > 0 and (n .. " 0 R") or "NULL" end
+local tostring_x = function(t) return concat(t," ") end
+local tostring_s = function(t) return toeight(t[1]) end
+local tostring_p = function(t) return topdfdoc(t[1],t[2]) end
+local tostring_u = function(t) return tosixteen(t[1]) end
+----- tostring_n = function(t) return tostring(t[1]) end -- tostring not needed
+local tostring_n = function(t) return f_tonumber(t[1]) end -- tostring not needed
+local tostring_c = function(t) return t[1] end -- already prefixed (hashed)
+local tostring_z = function() return "null" end
+local tostring_t = function() return "true" end
+local tostring_f = function() return "false" end
+local tostring_r = function(t) local n = t[1] return n and n > 0 and (n .. " 0 R") or "null" end
local tostring_v = function(t)
local s = t[1]
if type(s) == "table" then
- return concat(s,"")
+ return concat(s)
else
return s
end
end
-local function value_x(t) return t end -- the call is experimental
-local function value_s(t,key) return t[1] end -- the call is experimental
-local function value_u(t,key) return t[1] end -- the call is experimental
-local function value_n(t,key) return t[1] end -- the call is experimental
-local function value_c(t) return sub(t[1],2) end -- the call is experimental
-local function value_d(t) return tostring_d(t,true) end -- the call is experimental
-local function value_a(t) return tostring_a(t,true) end -- the call is experimental
-local function value_z() return nil end -- the call is experimental
-local function value_t(t) return t.value or true end -- the call is experimental
-local function value_f(t) return t.value or false end -- the call is experimental
-local function value_r() return t[1] or 0 end -- the call is experimental -- NULL
-local function value_v() return t[1] end -- the call is experimental
+local function value_x(t) return t end
+local function value_s(t) return t[1] end
+local function value_p(t) return t[1] end
+local function value_u(t) return t[1] end
+local function value_n(t) return t[1] end
+local function value_c(t) return sub(t[1],2) end
+local function value_d(t) return tostring_d(t,true) end
+local function value_a(t) return tostring_a(t,true) end
+local function value_z() return nil end
+local function value_t(t) return t.value or true end
+local function value_f(t) return t.value or false end
+local function value_r() return t[1] or 0 end -- null
+local function value_v() return t[1] end
local function add_x(t,k,v) rawset(t,k,tostring(v)) end
@@ -233,6 +465,7 @@ local mt_d = { __lpdftype = "dictionary", __tostring = tostring_d, __call = valu
local mt_a = { __lpdftype = "array", __tostring = tostring_a, __call = value_a }
local mt_u = { __lpdftype = "unicode", __tostring = tostring_u, __call = value_u }
local mt_s = { __lpdftype = "string", __tostring = tostring_s, __call = value_s }
+local mt_p = { __lpdftype = "docstring", __tostring = tostring_p, __call = value_p }
local mt_n = { __lpdftype = "number", __tostring = tostring_n, __call = value_n }
local mt_c = { __lpdftype = "constant", __tostring = tostring_c, __call = value_c }
local mt_z = { __lpdftype = "null", __tostring = tostring_z, __call = value_z }
@@ -266,8 +499,12 @@ local function pdfstring(str,default)
return setmetatable({ str or default or "" },mt_s)
end
+local function pdfdocstring(str,default,defaultchar)
+ return setmetatable({ str or default or "", defaultchar or " " },mt_p)
+end
+
local function pdfunicode(str,default)
- return setmetatable({ str or default or "" },mt_u)
+ return setmetatable({ str or default or "" },mt_u) -- could be a string
end
local cache = { } -- can be weak
@@ -325,17 +562,33 @@ local function pdfboolean(b,default)
end
end
-local function pdfreference(r)
- return setmetatable({ r or 0 },mt_r)
+local r_zero = setmetatable({ 0 },mt_r)
+
+local function pdfreference(r) -- maybe make a weak table
+ if r and r ~= 0 then
+ return setmetatable({ r },mt_r)
+ else
+ return r_zero
+ end
end
+local v_zero = setmetatable({ 0 },mt_v)
+local v_empty = setmetatable({ "" },mt_v)
+
local function pdfverbose(t) -- maybe check for type
- return setmetatable({ t or "" },mt_v)
+ if t == 0 then
+ return v_zero
+ elseif t == "" then
+ return v_empty
+ else
+ return setmetatable({ t },mt_v)
+ end
end
lpdf.stream = pdfstream -- THIS WILL PROBABLY CHANGE
lpdf.dictionary = pdfdictionary
lpdf.array = pdfarray
+lpdf.docstring = pdfdocstring
lpdf.string = pdfstring
lpdf.unicode = pdfunicode
lpdf.number = pdfnumber
@@ -345,37 +598,19 @@ lpdf.boolean = pdfboolean
lpdf.reference = pdfreference
lpdf.verbose = pdfverbose
--- n = pdf.obj(n, str)
--- n = pdf.obj(n, "file", filename)
--- n = pdf.obj(n, "stream", streamtext, attrtext)
--- n = pdf.obj(n, "streamfile", filename, attrtext)
-
--- we only use immediate objects
-
--- todo: tracing
-
local names, cache = { }, { }
function lpdf.reserveobject(name)
- if name == "annot" then
- -- catch misuse
- return pdfreserveobject("annot")
- else
- local r = pdfreserveobject()
- if name then
- names[name] = r
- if trace_objects then
- report_objects("reserving number %a under name %a",r,name)
- end
- elseif trace_objects then
- report_objects("reserving number %a",r)
+ local r = pdfreserveobject() -- we don't support "annot"
+ if name then
+ names[name] = r
+ if trace_objects then
+ report_objects("reserving number %a under name %a",r,name)
end
- return r
+ elseif trace_objects then
+ report_objects("reserving number %a",r)
end
-end
-
-function lpdf.reserveannotation()
- return pdfreserveobject("annot")
+ return r
end
-- lpdf.immediateobject = pdfimmediateobject
@@ -383,11 +618,29 @@ end
-- lpdf.object = pdfdeferredobject
-- lpdf.referenceobject = pdfreferenceobject
-lpdf.pagereference = pdf.pageref or tex.pdfpageref
-lpdf.registerannotation = pdf.registerannot
+local pagereference = pdf.pageref -- tex.pdfpageref is obsolete
+local nofpages = 0
+
+function lpdf.pagereference(n)
+ if nofpages == 0 then
+ nofpages = structures.pages.nofpages
+ if nofpages == 0 then
+ nofpages = 1
+ end
+ end
+ if n > nofpages then
+ return pagereference(nofpages) -- or 1, could be configureable
+ else
+ return pagereference(n)
+ end
+end
-function lpdf.delayedobject(data) -- we will get rid of this one
- local n = pdfdeferredobject(data)
+function lpdf.delayedobject(data,n)
+ if n then
+ pdfdeferredobject(n,data)
+ else
+ n = pdfdeferredobject(data)
+ end
pdfreferenceobject(n)
return n
end
@@ -484,60 +737,10 @@ function lpdf.shareobjectreference(content)
end
end
---~ local d = lpdf.dictionary()
---~ local e = lpdf.dictionary { ["e"] = "abc", x = lpdf.dictionary { ["f"] = "ABC" } }
---~ local f = lpdf.dictionary { ["f"] = "ABC" }
---~ local a = lpdf.array { lpdf.array { lpdf.string("xxx") } }
-
---~ print(a)
---~ os.exit()
-
---~ d["test"] = lpdf.string ("test")
---~ d["more"] = "more"
---~ d["bool"] = true
---~ d["numb"] = 1234
---~ d["oeps"] = lpdf.dictionary { ["hans"] = "ton" }
---~ d["whow"] = lpdf.array { lpdf.string("ton") }
-
---~ a[#a+1] = lpdf.string("xxx")
---~ a[#a+1] = lpdf.string("yyy")
-
---~ d.what = a
-
---~ print(e)
-
---~ local d = lpdf.dictionary()
---~ d["abcd"] = { 1, 2, 3, "test" }
---~ print(d)
---~ print(d())
-
---~ local d = lpdf.array()
---~ d[#d+1] = 1
---~ d[#d+1] = 2
---~ d[#d+1] = 3
---~ d[#d+1] = "test"
---~ print(d)
-
---~ local d = lpdf.array()
---~ d[#d+1] = { 1, 2, 3, "test" }
---~ print(d)
-
---~ local d = lpdf.array()
---~ d[#d+1] = { a=1, b=2, c=3, d="test" }
---~ print(d)
-
---~ local s = lpdf.constant("xx")
---~ print(s) -- fails somehow
---~ print(s()) -- fails somehow
-
---~ local s = lpdf.boolean(false)
---~ s.value = true
---~ print(s)
---~ print(s())
-
-- three priority levels, default=2
-local pagefinalizers, documentfinalizers = { { }, { }, { } }, { { }, { }, { } }
+local pagefinalizers = { { }, { }, { } }
+local documentfinalizers = { { }, { }, { } }
local pageresources, pageattributes, pagesattributes
@@ -550,9 +753,9 @@ end
resetpageproperties()
local function setpageproperties()
- pdf.pageresources = pageresources ()
- pdf.pageattributes = pageattributes ()
- pdf.pagesattributes = pagesattributes()
+ pdfsetpageresources (pageresources ())
+ pdfsetpageattributes (pageattributes ())
+ pdfsetpagesattributes(pagesattributes())
end
local function addtopageresources (k,v) pageresources [k] = v end
@@ -606,8 +809,8 @@ end
lpdf.registerpagefinalizer = registerpagefinalizer
lpdf.registerdocumentfinalizer = registerdocumentfinalizer
-function lpdf.finalizepage()
- if not environment.initex then
+function lpdf.finalizepage(shipout)
+ if shipout and not environment.initex then
-- resetpageproperties() -- maybe better before
run(pagefinalizers,"page")
setpageproperties()
@@ -625,152 +828,252 @@ function lpdf.finalizedocument()
end
end
-backends.pdf.codeinjections.finalizepage = lpdf.finalizepage -- will go when we have hook
+-- codeinjections.finalizepage = lpdf.finalizepage -- no longer triggered at the tex end
---~ callbacks.register("finish_pdfpage", lpdf.finalizepage)
-callbacks.register("finish_pdffile", lpdf.finalizedocument)
+if not callbacks.register("finish_pdfpage", lpdf.finalizepage) then
--- some minimal tracing, handy for checking the order
+ local find_tail = nodes.tail
+ local latelua_node = nodes.pool.latelua
-local function trace_set(what,key)
- if trace_resources then
- report_finalizing("setting key %a in %a",key,what)
+ function nodeinjections.finalizepage(head)
+ local t = find_tail(head.list)
+ if t then
+ local n = latelua_node("lpdf.finalizepage(true)") -- last in the shipout
+ t.next = n
+ n.prev = t
+ end
+ return head, true
end
+
+ nodes.tasks.appendaction("shipouts","normalizers","backends.pdf.nodeinjections.finalizepage")
+
end
-local function trace_flush(what)
- if trace_resources then
- report_finalizing("flushing %a",what)
+
+callbacks.register("finish_pdffile", lpdf.finalizedocument)
+
+
+do
+
+ -- some minimal tracing, handy for checking the order
+
+ local function trace_set(what,key)
+ if trace_resources then
+ report_finalizing("setting key %a in %a",key,what)
+ end
end
-end
-lpdf.protectresources = true
+ local function trace_flush(what)
+ if trace_resources then
+ report_finalizing("flushing %a",what)
+ end
+ end
-local catalog = pdfdictionary { Type = pdfconstant("Catalog") } -- nicer, but when we assign we nil the Type
-local info = pdfdictionary { Type = pdfconstant("Info") } -- nicer, but when we assign we nil the Type
-local names = pdfdictionary { Type = pdfconstant("Names") } -- nicer, but when we assign we nil the Type
+ lpdf.protectresources = true
-local function flushcatalog() if not environment.initex then trace_flush("catalog") catalog.Type = nil pdf.catalog = catalog() end end
-local function flushinfo () if not environment.initex then trace_flush("info") info .Type = nil pdf.info = info () end end
-local function flushnames () if not environment.initex then trace_flush("names") names .Type = nil pdf.names = names () end end
+ local catalog = pdfdictionary { Type = pdfconstant("Catalog") } -- nicer, but when we assign we nil the Type
+ local info = pdfdictionary { Type = pdfconstant("Info") } -- nicer, but when we assign we nil the Type
+ ----- names = pdfdictionary { Type = pdfconstant("Names") } -- nicer, but when we assign we nil the Type
-function lpdf.addtocatalog(k,v) if not (lpdf.protectresources and catalog[k]) then trace_set("catalog",k) catalog[k] = v end end
-function lpdf.addtoinfo (k,v) if not (lpdf.protectresources and info [k]) then trace_set("info", k) info [k] = v end end
-function lpdf.addtonames (k,v) if not (lpdf.protectresources and names [k]) then trace_set("names", k) names [k] = v end end
+ local function flushcatalog()
+ if not environment.initex then
+ trace_flush("catalog")
+ catalog.Type = nil
+ pdfsetcatalog(catalog())
+ end
+ end
-local dummy = pdfreserveobject() -- else bug in hvmd due so some internal luatex conflict
+ local function flushinfo()
+ if not environment.initex then
+ trace_flush("info")
+ info.Type = nil
+ pdfsetinfo(info())
+ end
+ end
--- Some day I will implement a proper minimalized resource management.
+ -- local function flushnames()
+ -- if not environment.initex then
+ -- trace_flush("names")
+ -- names.Type = nil
+ -- pdfsetnames(names())
+ -- end
+ -- end
+
+ function lpdf.addtocatalog(k,v)
+ if not (lpdf.protectresources and catalog[k]) then
+ trace_set("catalog",k)
+ catalog[k] = v
+ end
+ end
-local r_extgstates, d_extgstates = pdfreserveobject(), pdfdictionary() local p_extgstates = pdfreference(r_extgstates)
-local r_colorspaces, d_colorspaces = pdfreserveobject(), pdfdictionary() local p_colorspaces = pdfreference(r_colorspaces)
-local r_patterns, d_patterns = pdfreserveobject(), pdfdictionary() local p_patterns = pdfreference(r_patterns)
-local r_shades, d_shades = pdfreserveobject(), pdfdictionary() local p_shades = pdfreference(r_shades)
+ function lpdf.addtoinfo(k,v)
+ if not (lpdf.protectresources and info[k]) then
+ trace_set("info",k)
+ info[k] = v
+ end
+ end
-local function checkextgstates () if next(d_extgstates ) then addtopageresources("ExtGState", p_extgstates ) end end
-local function checkcolorspaces() if next(d_colorspaces) then addtopageresources("ColorSpace",p_colorspaces) end end
-local function checkpatterns () if next(d_patterns ) then addtopageresources("Pattern", p_patterns ) end end
-local function checkshades () if next(d_shades ) then addtopageresources("Shading", p_shades ) end end
+ -- local function lpdf.addtonames(k,v)
+ -- if not (lpdf.protectresources and names[k]) then
+ -- trace_set("names",k)
+ -- names[k] = v
+ -- end
+ -- end
-local function flushextgstates () if next(d_extgstates ) then trace_flush("extgstates") pdfimmediateobject(r_extgstates, tostring(d_extgstates )) end end
-local function flushcolorspaces() if next(d_colorspaces) then trace_flush("colorspaces") pdfimmediateobject(r_colorspaces,tostring(d_colorspaces)) end end
-local function flushpatterns () if next(d_patterns ) then trace_flush("patterns") pdfimmediateobject(r_patterns, tostring(d_patterns )) end end
-local function flushshades () if next(d_shades ) then trace_flush("shades") pdfimmediateobject(r_shades, tostring(d_shades )) end end
+ local names = pdfdictionary {
+ -- Type = pdfconstant("Names")
+ }
-function lpdf.collectedresources()
- local ExtGState = next(d_extgstates ) and p_extgstates
- local ColorSpace = next(d_colorspaces) and p_colorspaces
- local Pattern = next(d_patterns ) and p_patterns
- local Shading = next(d_shades ) and p_shades
- if ExtGState or ColorSpace or Pattern or Shading then
- local collected = pdfdictionary {
- ExtGState = ExtGState,
- ColorSpace = ColorSpace,
- Pattern = Pattern,
- Shading = Shading,
- -- ProcSet = pdfarray { pdfconstant("PDF") },
- }
- return collected()
- else
- return ""
+ local function flushnames()
+ if next(names) and not environment.initex then
+ names.Type = pdfconstant("Names")
+ trace_flush("names")
+ lpdf.addtocatalog("Names",pdfreference(pdfimmediateobject(tostring(names))))
+ end
+ end
+
+ function lpdf.addtonames(k,v)
+ if not (lpdf.protectresources and names[k]) then
+ trace_set("names", k)
+ names [k] = v
+ end
+ end
+
+ local r_extgstates, d_extgstates = pdfreserveobject(), pdfdictionary() local p_extgstates = pdfreference(r_extgstates)
+ local r_colorspaces, d_colorspaces = pdfreserveobject(), pdfdictionary() local p_colorspaces = pdfreference(r_colorspaces)
+ local r_patterns, d_patterns = pdfreserveobject(), pdfdictionary() local p_patterns = pdfreference(r_patterns)
+ local r_shades, d_shades = pdfreserveobject(), pdfdictionary() local p_shades = pdfreference(r_shades)
+
+ local function checkextgstates () if next(d_extgstates ) then addtopageresources("ExtGState", p_extgstates ) end end
+ local function checkcolorspaces() if next(d_colorspaces) then addtopageresources("ColorSpace",p_colorspaces) end end
+ local function checkpatterns () if next(d_patterns ) then addtopageresources("Pattern", p_patterns ) end end
+ local function checkshades () if next(d_shades ) then addtopageresources("Shading", p_shades ) end end
+
+ local function flushextgstates () if next(d_extgstates ) then trace_flush("extgstates") pdfimmediateobject(r_extgstates, tostring(d_extgstates )) end end
+ local function flushcolorspaces() if next(d_colorspaces) then trace_flush("colorspaces") pdfimmediateobject(r_colorspaces,tostring(d_colorspaces)) end end
+ local function flushpatterns () if next(d_patterns ) then trace_flush("patterns") pdfimmediateobject(r_patterns, tostring(d_patterns )) end end
+ local function flushshades () if next(d_shades ) then trace_flush("shades") pdfimmediateobject(r_shades, tostring(d_shades )) end end
+
+ function lpdf.collectedresources()
+ local ExtGState = next(d_extgstates ) and p_extgstates
+ local ColorSpace = next(d_colorspaces) and p_colorspaces
+ local Pattern = next(d_patterns ) and p_patterns
+ local Shading = next(d_shades ) and p_shades
+ if ExtGState or ColorSpace or Pattern or Shading then
+ local collected = pdfdictionary {
+ ExtGState = ExtGState,
+ ColorSpace = ColorSpace,
+ Pattern = Pattern,
+ Shading = Shading,
+ -- ProcSet = pdfarray { pdfconstant("PDF") },
+ }
+ return collected()
+ else
+ return ""
+ end
end
-end
-function lpdf.adddocumentextgstate (k,v) d_extgstates [k] = v end
-function lpdf.adddocumentcolorspace(k,v) d_colorspaces[k] = v end
-function lpdf.adddocumentpattern (k,v) d_patterns [k] = v end
-function lpdf.adddocumentshade (k,v) d_shades [k] = v end
+ function lpdf.adddocumentextgstate (k,v) d_extgstates [k] = v end
+ function lpdf.adddocumentcolorspace(k,v) d_colorspaces[k] = v end
+ function lpdf.adddocumentpattern (k,v) d_patterns [k] = v end
+ function lpdf.adddocumentshade (k,v) d_shades [k] = v end
-registerdocumentfinalizer(flushextgstates,3,"extended graphic states")
-registerdocumentfinalizer(flushcolorspaces,3,"color spaces")
-registerdocumentfinalizer(flushpatterns,3,"patterns")
-registerdocumentfinalizer(flushshades,3,"shades")
+ registerdocumentfinalizer(flushextgstates,3,"extended graphic states")
+ registerdocumentfinalizer(flushcolorspaces,3,"color spaces")
+ registerdocumentfinalizer(flushpatterns,3,"patterns")
+ registerdocumentfinalizer(flushshades,3,"shades")
-registerdocumentfinalizer(flushcatalog,3,"catalog")
-registerdocumentfinalizer(flushinfo,3,"info")
-registerdocumentfinalizer(flushnames,3,"names") -- before catalog
+ registerdocumentfinalizer(flushnames,3,"names") -- before catalog
+ registerdocumentfinalizer(flushcatalog,3,"catalog")
+ registerdocumentfinalizer(flushinfo,3,"info")
-registerpagefinalizer(checkextgstates,3,"extended graphic states")
-registerpagefinalizer(checkcolorspaces,3,"color spaces")
-registerpagefinalizer(checkpatterns,3,"patterns")
-registerpagefinalizer(checkshades,3,"shades")
+ registerpagefinalizer(checkextgstates,3,"extended graphic states")
+ registerpagefinalizer(checkcolorspaces,3,"color spaces")
+ registerpagefinalizer(checkpatterns,3,"patterns")
+ registerpagefinalizer(checkshades,3,"shades")
+
+end
-- in strc-bkm: lpdf.registerdocumentfinalizer(function() structures.bookmarks.place() end,1)
function lpdf.rotationcm(a)
local s, c = sind(a), cosd(a)
- return format("%0.6f %0.6f %0.6f %0.6f 0 0 cm",c,s,-s,c)
+ return format("%0.6F %0.6F %0.6F %0.6F 0 0 cm",c,s,-s,c)
end
-- ! -> universaltime
-local timestamp = os.date("%Y-%m-%dT%X") .. os.timezone(true)
+do
-function lpdf.timestamp()
- return timestamp
-end
+ local timestamp = os.date("%Y-%m-%dT%X") .. os.timezone(true)
-function lpdf.pdftimestamp(str)
- local Y, M, D, h, m, s, Zs, Zh, Zm = match(str,"^(%d%d%d%d)%-(%d%d)%-(%d%d)T(%d%d):(%d%d):(%d%d)([%+%-])(%d%d):(%d%d)$")
- return Y and format("D:%s%s%s%s%s%s%s%s'%s'",Y,M,D,h,m,s,Zs,Zh,Zm)
-end
+ function lpdf.timestamp()
+ return timestamp
+ end
+
+ function lpdf.pdftimestamp(str)
+ local Y, M, D, h, m, s, Zs, Zh, Zm = match(str,"^(%d%d%d%d)%-(%d%d)%-(%d%d)T(%d%d):(%d%d):(%d%d)([%+%-])(%d%d):(%d%d)$")
+ return Y and format("D:%s%s%s%s%s%s%s%s'%s'",Y,M,D,h,m,s,Zs,Zh,Zm)
+ end
+
+ function lpdf.id()
+ return format("%s.%s",tex.jobname,timestamp)
+ end
-function lpdf.id()
- return format("%s.%s",tex.jobname,timestamp)
end
+-- return nil is nicer in test prints
+
function lpdf.checkedkey(t,key,variant)
local pn = t and t[key]
- if pn then
+ if pn ~= nil then
local tn = type(pn)
if tn == variant then
if variant == "string" then
- return pn ~= "" and pn or nil
+ if pn ~= "" then
+ return pn
+ end
elseif variant == "table" then
- return next(pn) and pn or nil
+ if next(pn) then
+ return pn
+ end
else
return pn
end
- elseif tn == "string" and variant == "number" then
- return tonumber(pn)
+ elseif tn == "string" then
+ if variant == "number" then
+ return tonumber(pn)
+ elseif variant == "boolean" then
+ return isboolean(pn,nil,true)
+ end
end
end
+ -- return nil
end
function lpdf.checkedvalue(value,variant) -- code not shared
- if value then
+ if value ~= nil then
local tv = type(value)
if tv == variant then
if variant == "string" then
- return value ~= "" and value
+ if value ~= "" then
+ return value
+ end
elseif variant == "table" then
- return next(value) and value
+ if next(value) then
+ return value
+ end
else
return value
end
- elseif tv == "string" and variant == "number" then
- return tonumber(value)
+ elseif tv == "string" then
+ if variant == "number" then
+ return tonumber(value)
+ elseif variant == "boolean" then
+ return isboolean(value,nil,true)
+ end
end
end
+ -- return nil
end
function lpdf.limited(n,min,max,default)
@@ -790,34 +1093,121 @@ function lpdf.limited(n,min,max,default)
end
end
--- lpdf.addtoinfo("ConTeXt.Version", tex.contextversiontoks)
+-- lpdf.addtoinfo("ConTeXt.Version", environment.version)
-- lpdf.addtoinfo("ConTeXt.Time", os.date("%Y.%m.%d %H:%M")) -- :%S
-- lpdf.addtoinfo("ConTeXt.Jobname", environment.jobname)
-- lpdf.addtoinfo("ConTeXt.Url", "www.pragma-ade.com")
+-- lpdf.addtoinfo("ConTeXt.Support", "contextgarden.net")
-if not pdfreferenceobject then
-
- local delayed = { }
+-- if not pdfreferenceobject then
+--
+-- local delayed = { }
+--
+-- local function flush()
+-- local n = 0
+-- for k,v in next, delayed do
+-- pdfimmediateobject(k,v)
+-- n = n + 1
+-- end
+-- if trace_objects then
+-- report_objects("%s objects flushed",n)
+-- end
+-- delayed = { }
+-- end
+--
+-- lpdf.registerdocumentfinalizer(flush,3,"objects") -- so we need a final flush too
+-- lpdf.registerpagefinalizer (flush,3,"objects") -- somehow this lags behind .. I need to look into that some day
+--
+-- function lpdf.delayedobject(data)
+-- local n = pdfreserveobject()
+-- delayed[n] = data
+-- return n
+-- end
+--
+-- end
- local function flush()
- local n = 0
- for k,v in next, delayed do
- pdfimmediateobject(k,v)
- n = n + 1
- end
- if trace_objects then
- report_objects("%s objects flushed",n)
+-- setmetatable(pdf, {
+-- __index = function(t,k)
+-- if k == "info" then return pdf.getinfo()
+-- elseif k == "catalog" then return pdf.getcatalog()
+-- elseif k == "names" then return pdf.getnames()
+-- elseif k == "trailer" then return pdf.gettrailer()
+-- elseif k == "pageattribute" then return pdf.getpageattribute()
+-- elseif k == "pageattributes" then return pdf.getpageattributes()
+-- elseif k == "pageresources" then return pdf.getpageresources()
+-- elseif
+-- return nil
+-- end
+-- end,
+-- __newindex = function(t,k,v)
+-- if k == "info" then return pdf.setinfo(v)
+-- elseif k == "catalog" then return pdf.setcatalog(v)
+-- elseif k == "names" then return pdf.setnames(v)
+-- elseif k == "trailer" then return pdf.settrailer(v)
+-- elseif k == "pageattribute" then return pdf.setpageattribute(v)
+-- elseif k == "pageattributes" then return pdf.setpageattributes(v)
+-- elseif k == "pageresources" then return pdf.setpageresources(v)
+-- else
+-- rawset(t,k,v)
+-- end
+-- end,
+-- })
+
+
+-- The next variant of ActualText is what Taco and I could come up with
+-- eventually. As of September 2013 Acrobat copies okay, Sumatra copies a
+-- question mark, pdftotext injects an extra space and Okular adds a
+-- newline plus space.
+
+-- return formatters["BT /Span << /ActualText (CONTEXT) >> BDC [<feff>] TJ % t EMC ET"](code)
+
+do
+
+ local f_actual_text_one = formatters["BT /Span << /ActualText <feff%04x> >> BDC [<feff>] TJ %s EMC ET"]
+ local f_actual_text_two = formatters["BT /Span << /ActualText <feff%04x%04x> >> BDC [<feff>] TJ %s EMC ET"]
+ local f_actual_text = formatters["/Span <</ActualText %s >> BDC"]
+
+ local context = context
+ local pdfdirect = nodes.pool.pdfdirect
+
+ function codeinjections.unicodetoactualtext(unicode,pdfcode)
+ if unicode < 0x10000 then
+ return f_actual_text_one(unicode,pdfcode)
+ else
+ return f_actual_text_two(unicode/1024+0xD800,unicode%1024+0xDC00,pdfcode)
end
- delayed = { }
end
- lpdf.registerdocumentfinalizer(flush,3,"objects") -- so we need a final flush too
- lpdf.registerpagefinalizer (flush,3,"objects") -- somehow this lags behind .. I need to look into that some day
+ implement {
+ name = "startactualtext",
+ arguments = "string",
+ actions = function(str)
+ context(pdfdirect(f_actual_text(tosixteen(str))))
+ end
+ }
- function lpdf.delayedobject(data)
- local n = pdfreserveobject()
- delayed[n] = data
- return n
- end
+ implement {
+ name = "stopactualtext",
+ actions = function()
+ context(pdfdirect("EMC"))
+ end
+ }
end
+
+-- interface
+
+local lpdfverbose = lpdf.verbose
+
+implement { name = "lpdf_collectedresources", actions = { lpdf.collectedresources, context } }
+implement { name = "lpdf_addtocatalog", arguments = two_strings, actions = lpdf.addtocatalog }
+implement { name = "lpdf_addtoinfo", arguments = two_strings, actions = lpdf.addtoinfo }
+implement { name = "lpdf_addtonames", arguments = two_strings, actions = lpdf.addtonames }
+implement { name = "lpdf_addpageattributes", arguments = two_strings, actions = lpdf.addtopageattributes }
+implement { name = "lpdf_addpagesattributes", arguments = two_strings, actions = lpdf.addtopagesattributes }
+implement { name = "lpdf_addpageresources", arguments = two_strings, actions = lpdf.addtopageresources }
+implement { name = "lpdf_adddocumentextgstate", arguments = two_strings, actions = function(a,b) lpdf.adddocumentextgstate (a,lpdfverbose(b)) end }
+implement { name = "lpdf_adddocumentcolorspace", arguments = two_strings, actions = function(a,b) lpdf.adddocumentcolorspace(a,lpdfverbose(b)) end }
+implement { name = "lpdf_adddocumentpattern", arguments = two_strings, actions = function(a,b) lpdf.adddocumentpattern (a,lpdfverbose(b)) end }
+implement { name = "lpdf_adddocumentshade", arguments = two_strings, actions = function(a,b) lpdf.adddocumentshade (a,lpdfverbose(b)) end }
+
diff --git a/tex/context/base/lpdf-mis.lua b/tex/context/base/lpdf-mis.lua
index 174d17427..a1b12d8c0 100644
--- a/tex/context/base/lpdf-mis.lua
+++ b/tex/context/base/lpdf-mis.lua
@@ -16,7 +16,7 @@ if not modules then modules = { } end modules ['lpdf-mis'] = {
-- course there are a couple of more changes.
local next, tostring = next, tostring
-local format, gsub = string.format, string.gsub
+local format, gsub, formatters = string.format, string.gsub, string.formatters
local texset = tex.set
local backends, lpdf, nodes = backends, lpdf, nodes
@@ -41,8 +41,17 @@ local pdfverbose = lpdf.verbose
local pdfstring = lpdf.string
local pdfflushobject = lpdf.flushobject
local pdfflushstreamobject = lpdf.flushstreamobject
+local pdfaction = lpdf.action
+
+local formattedtimestamp = lpdf.pdftimestamp
+local adddocumentextgstate = lpdf.adddocumentextgstate
+local addtocatalog = lpdf.addtocatalog
+local addtoinfo = lpdf.addtoinfo
+local addtopageattributes = lpdf.addtopageattributes
+local addtonames = lpdf.addtonames
local variables = interfaces.variables
+local v_stop = variables.stop
local positive = register(pdfliteral("/GSpositive gs"))
local negative = register(pdfliteral("/GSnegative gs"))
@@ -59,8 +68,8 @@ local function initializenegative()
}
local negative = pdfdictionary { Type = g, TR = pdfreference(pdfflushstreamobject("{ 1 exch sub }",d)) }
local positive = pdfdictionary { Type = g, TR = pdfconstant("Identity") }
- lpdf.adddocumentextgstate("GSnegative", pdfreference(pdfflushobject(negative)))
- lpdf.adddocumentextgstate("GSpositive", pdfreference(pdfflushobject(positive)))
+ adddocumentextgstate("GSnegative", pdfreference(pdfflushobject(negative)))
+ adddocumentextgstate("GSpositive", pdfreference(pdfflushobject(positive)))
initializenegative = nil
end
@@ -68,8 +77,8 @@ local function initializeoverprint()
local g = pdfconstant("ExtGState")
local knockout = pdfdictionary { Type = g, OP = false, OPM = 0 }
local overprint = pdfdictionary { Type = g, OP = true, OPM = 1 }
- lpdf.adddocumentextgstate("GSknockout", pdfreference(pdfflushobject(knockout)))
- lpdf.adddocumentextgstate("GSoverprint", pdfreference(pdfflushobject(overprint)))
+ adddocumentextgstate("GSknockout", pdfreference(pdfflushobject(knockout)))
+ adddocumentextgstate("GSoverprint", pdfreference(pdfflushobject(overprint)))
initializeoverprint = nil
end
@@ -91,8 +100,6 @@ function nodeinjections.negative()
return copy_node(negative)
end
---
-
-- function codeinjections.addtransparencygroup()
-- -- png: /CS /DeviceRGB /I true
-- local d = pdfdictionary {
@@ -100,7 +107,7 @@ end
-- I = true,
-- K = true,
-- }
--- lpdf.registerpagefinalizer(function() lpdf.addtopageattributes("Group",d) end) -- hm
+-- lpdf.registerpagefinalizer(function() addtopageattributes("Group",d) end) -- hm
-- end
-- actions (todo: store and update when changed)
@@ -125,10 +132,10 @@ end
local function flushdocumentactions()
if opendocument then
- lpdf.addtocatalog("OpenAction",lpdf.action(opendocument))
+ addtocatalog("OpenAction",pdfaction(opendocument))
end
if closedocument then
- lpdf.addtocatalog("CloseAction",lpdf.action(closedocument))
+ addtocatalog("CloseAction",pdfaction(closedocument))
end
end
@@ -136,12 +143,12 @@ local function flushpageactions()
if openpage or closepage then
local d = pdfdictionary()
if openpage then
- d.O = lpdf.action(openpage)
+ d.O = pdfaction(openpage)
end
if closepage then
- d.C = lpdf.action(closepage)
+ d.C = pdfaction(closepage)
end
- lpdf.addtopageattributes("AA",d)
+ addtopageattributes("AA",d)
end
end
@@ -168,37 +175,37 @@ local function setupidentity()
if not title or title == "" then
title = tex.jobname
end
- lpdf.addtoinfo("Title", pdfunicode(title), title)
+ addtoinfo("Title", pdfunicode(title), title)
local subtitle = identity.subtitle or ""
if subtitle ~= "" then
- lpdf.addtoinfo("Subject", pdfunicode(subtitle), subtitle)
+ addtoinfo("Subject", pdfunicode(subtitle), subtitle)
end
local author = identity.author or ""
if author ~= "" then
- lpdf.addtoinfo("Author", pdfunicode(author), author) -- '/Author' in /Info, 'Creator' in XMP
+ addtoinfo("Author", pdfunicode(author), author) -- '/Author' in /Info, 'Creator' in XMP
end
local creator = identity.creator or ""
if creator ~= "" then
- lpdf.addtoinfo("Creator", pdfunicode(creator), creator) -- '/Creator' in /Info, 'CreatorTool' in XMP
+ addtoinfo("Creator", pdfunicode(creator), creator) -- '/Creator' in /Info, 'CreatorTool' in XMP
end
- lpdf.addtoinfo("CreationDate", pdfstring(lpdf.pdftimestamp(lpdf.timestamp())))
+ local currenttimestamp = lpdf.timestamp()
+ addtoinfo("CreationDate", pdfstring(formattedtimestamp(currenttimestamp)))
local date = identity.date or ""
- local pdfdate = lpdf.pdftimestamp(date)
+ local pdfdate = formattedtimestamp(date)
if pdfdate then
- lpdf.addtoinfo("ModDate", pdfstring(pdfdate), date)
+ addtoinfo("ModDate", pdfstring(pdfdate), date)
else
-- users should enter the date in 2010-01-19T23:27:50+01:00 format
-- and if not provided that way we use the creation time instead
- date = lpdf.timestamp()
- lpdf.addtoinfo("ModDate", pdfstring(lpdf.pdftimestamp(date)), date)
+ addtoinfo("ModDate", pdfstring(formattedtimestamp(currenttimestamp)), currenttimestamp)
end
local keywords = identity.keywords or ""
if keywords ~= "" then
keywords = gsub(keywords, "[%s,]+", " ")
- lpdf.addtoinfo("Keywords",pdfunicode(keywords), keywords)
+ addtoinfo("Keywords",pdfunicode(keywords), keywords)
end
local id = lpdf.id()
- lpdf.addtoinfo("ID", pdfstring(id), id) -- needed for pdf/x
+ addtoinfo("ID", pdfstring(id), id) -- needed for pdf/x
done = true
else
-- no need for a message
@@ -225,7 +232,7 @@ local function flushjavascripts()
a[#a+1] = pdfstring(name)
a[#a+1] = pdfreference(pdfflushobject(j))
end
- lpdf.addtonames("JavaScript",pdfreference(pdfflushobject(pdfdictionary{ Names = a })))
+ addtonames("JavaScript",pdfreference(pdfflushobject(pdfdictionary{ Names = a })))
end
end
@@ -234,67 +241,93 @@ lpdf.registerdocumentfinalizer(flushjavascripts,"javascripts")
-- -- --
local pagespecs = {
- [variables.max] = { "FullScreen", false, false },
- [variables.bookmark] = { "UseOutlines", false, false },
- [variables.fit] = { "UseNone", false, true },
- [variables.doublesided] = { "UseNone", "TwoColumnRight", true },
- [variables.singlesided] = { "UseNone", false, false },
- [variables.default] = { "UseNone", "auto", false },
- [variables.auto] = { "UseNone", "auto", false },
- [variables.none] = { false, false, false },
+ [variables.max] = { mode = "FullScreen", layout = false, fit = false, fixed = false, duplex = false },
+ [variables.bookmark] = { mode = "UseOutlines", layout = false, fit = false, fixed = false, duplex = false },
+ [variables.fit] = { mode = "UseNone", layout = false, fit = true, fixed = false, duplex = false },
+ [variables.doublesided] = { mode = "UseNone", layout = "TwoColumnRight", fit = true, fixed = false, duplex = false },
+ [variables.singlesided] = { mode = "UseNone", layout = false, fit = false, fixed = false, duplex = false },
+ [variables.default] = { mode = "UseNone", layout = "auto", fit = false, fixed = false, duplex = false },
+ [variables.auto] = { mode = "UseNone", layout = "auto", fit = false, fixed = false, duplex = false },
+ [variables.none] = { mode = false, layout = false, fit = false, fixed = false, duplex = false },
+ -- new
+ [variables.fixed] = { mode = "UseNone", layout = "auto", fit = false, fixed = true, duplex = false }, -- noscale
+ [variables.landscape] = { mode = "UseNone", layout = "auto", fit = false, fixed = true, duplex = "DuplexFlipShortEdge" },
+ [variables.portrait] = { mode = "UseNone", layout = "auto", fit = false, fixed = true, duplex = "DuplexFlipLongEdge" },
+
}
local pagespec, topoffset, leftoffset, height, width, doublesided = "default", 0, 0, 0, 0, false
+local pdfpaperheight = tex.pdfpageheight
+local pdfpaperwidth = tex.pdfpagewidth
+
function codeinjections.setupcanvas(specification)
local paperheight = specification.paperheight
local paperwidth = specification.paperwidth
local paperdouble = specification.doublesided
if paperheight then
texset('global','pdfpageheight',paperheight)
+ pdfpaperheight = paperheight
end
if paperwidth then
texset('global','pdfpagewidth',paperwidth)
+ pdfpaperwidth = paperwidth
end
pagespec = specification.mode or pagespec
topoffset = specification.topoffset or 0
leftoffset = specification.leftoffset or 0
- height = specification.height or tex.pdfpageheight
- width = specification.width or tex.pdfpagewidth
+ height = specification.height or pdfpaperheight
+ width = specification.width or pdfpaperwidth
if paperdouble ~= nil then
doublesided = paperdouble
end
end
local function documentspecification()
+ if not pagespec or pagespec == "" then
+ pagespec = variables.default
+ end
+ -- local settings = utilities.parsers.settings_to_array(pagespec)
+ -- local spec = pagespecs[variables.default]
+ -- for i=1,#settings do
+ -- local s = pagespecs[settings[i]]
+ -- if s then
+ -- for k, v in next, s do
+ -- spec[k] = v
+ -- end
+ -- end
+ -- end
local spec = pagespecs[pagespec] or pagespecs[variables.default]
- if spec then
- local mode, layout, fit = spec[1], spec[2], spec[3]
- if layout == variables.auto then
- if doublesided then
- spec = pagespecs[variables.doublesided] -- to be checked voor interfaces
- if spec then
- mode, layout, fit = spec[1], spec[2], spec[3]
- end
- else
- layout = false
+ if spec.layout == "auto" then
+ if doublesided then
+ local s = pagespecs[variables.doublesided] -- to be checked voor interfaces
+ for k, v in next, s do
+ spec[k] = v
end
+ else
+ spec.layout = false
end
- mode = mode and pdfconstant(mode)
- layout = layout and pdfconstant(layout)
- fit = fit and pdfdictionary { FitWindow = true }
- if layout then
- lpdf.addtocatalog("PageLayout",layout)
- end
- if mode then
- lpdf.addtocatalog("PageMode",mode)
- end
- if fit then
- lpdf.addtocatalog("ViewerPreferences",fit)
- end
- lpdf.addtoinfo ("Trapped", pdfconstant("False")) -- '/Trapped' in /Info, 'Trapped' in XMP
- lpdf.addtocatalog("Version", pdfconstant(format("1.%s",tex.pdfminorversion)))
end
+ local layout = spec.layout
+ local mode = spec.mode
+ local fit = spec.fit
+ local fixed = spec.fixed
+ local duplex = spec.duplex
+ if layout then
+ addtocatalog("PageLayout",pdfconstant(layout))
+ end
+ if mode then
+ addtocatalog("PageMode",pdfconstant(mode))
+ end
+ if fit or fixed or duplex then
+ addtocatalog("ViewerPreferences",pdfdictionary {
+ FitWindow = fit and true or nil,
+ PrintScaling = fixed and pdfconstant("None") or nil,
+ Duplex = duplex and pdfconstant(duplex) or nil,
+ })
+ end
+ addtoinfo ("Trapped", pdfconstant("False")) -- '/Trapped' in /Info, 'Trapped' in XMP
+ addtocatalog("Version", pdfconstant(format("1.%s",tex.pdfminorversion)))
end
-- temp hack: the mediabox is not under our control and has a precision of 4 digits
@@ -302,21 +335,21 @@ end
local factor = number.dimenfactors.bp
local function boxvalue(n) -- we could share them
- return pdfverbose(format("%0.4f",factor * n))
+ return pdfverbose(formatters["%0.4F"](factor * n))
end
local function pagespecification()
- local pageheight = tex.pdfpageheight
+ local pageheight = pdfpaperheight
local box = pdfarray { -- can be cached
boxvalue(leftoffset),
boxvalue(pageheight+topoffset-height),
boxvalue(width-leftoffset),
boxvalue(pageheight-topoffset),
}
- lpdf.addtopageattributes("CropBox",box) -- mandate for rendering
- lpdf.addtopageattributes("TrimBox",box) -- mandate for pdf/x
- -- lpdf.addtopageattributes("BleedBox",box)
- -- lpdf.addtopageattributes("ArtBox",box)
+ addtopageattributes("CropBox",box) -- mandate for rendering
+ addtopageattributes("TrimBox",box) -- mandate for pdf/x
+ -- addtopageattributes("BleedBox",box)
+ -- addtopageattributes("ArtBox",box)
end
lpdf.registerpagefinalizer(pagespecification,"page specification")
@@ -337,34 +370,85 @@ local map = {
characters = "a",
}
+-- local function featurecreep()
+-- local pages, lastconversion, list = structures.pages.tobesaved, nil, pdfarray()
+-- local getstructureset = structures.sets.get
+-- for i=1,#pages do
+-- local p = pages[i]
+-- if not p then
+-- return -- fatal error
+-- else
+-- local numberdata = p.numberdata
+-- if numberdata then
+-- local conversionset = numberdata.conversionset
+-- if conversionset then
+-- local conversion = getstructureset("structure:conversions",p.block,conversionset,1,"numbers")
+-- if conversion ~= lastconversion then
+-- lastconversion = conversion
+-- list[#list+1] = i - 1 -- pdf starts numbering at 0
+-- list[#list+1] = pdfdictionary { S = pdfconstant(map[conversion] or map.numbers) }
+-- end
+-- end
+-- end
+-- if not lastconversion then
+-- lastconversion = "numbers"
+-- list[#list+1] = i - 1 -- pdf starts numbering at 0
+-- list[#list+1] = pdfdictionary { S = pdfconstant(map.numbers) }
+-- end
+-- end
+-- end
+-- addtocatalog("PageLabels", pdfdictionary { Nums = list })
+-- end
+
local function featurecreep()
- local pages, lastconversion, list = structures.pages.tobesaved, nil, pdfarray()
- local getstructureset = structures.sets.get
+ local pages = structures.pages.tobesaved
+ local list = pdfarray()
+ local getset = structures.sets.get
+ local stopped = false
+ local oldlabel = nil
+ local olconversion = nil
for i=1,#pages do
local p = pages[i]
if not p then
return -- fatal error
+ end
+ local label = p.viewerprefix or ""
+ if p.status == v_stop then
+ if not stopped then
+ list[#list+1] = i - 1 -- pdf starts numbering at 0
+ list[#list+1] = pdfdictionary {
+ P = pdfunicode(label),
+ }
+ stopped = true
+ end
+ oldlabel = nil
+ oldconversion = nil
+ stopped = false
else
local numberdata = p.numberdata
+ local conversion = nil
+ local number = p.number
if numberdata then
local conversionset = numberdata.conversionset
if conversionset then
- local conversion = getstructureset("structure:conversions",p.block,conversionset,1,"numbers")
- if conversion ~= lastconversion then
- lastconversion = conversion
- list[#list+1] = i - 1 -- pdf starts numbering at 0
- list[#list+1] = pdfdictionary { S = pdfconstant(map[conversion] or map.numbers) }
- end
+ conversion = getset("structure:conversions",p.block,conversionset,1,"numbers")
end
end
- if not lastconversion then
- lastconversion = "numbers"
+ conversion = conversion and map[conversion] or map.numbers
+ if number == 1 or oldlabel ~= label or oldconversion ~= conversion then
list[#list+1] = i - 1 -- pdf starts numbering at 0
- list[#list+1] = pdfdictionary { S = pdfconstant(map.numbers) }
+ list[#list+1] = pdfdictionary {
+ S = pdfconstant(conversion),
+ St = number,
+ P = label ~= "" and pdfunicode(label) or nil,
+ }
end
+ oldlabel = label
+ oldconversion = conversion
+ stopped = false
end
end
- lpdf.addtocatalog("PageLabels", pdfdictionary { Nums = list })
+ addtocatalog("PageLabels", pdfdictionary { Nums = list })
end
lpdf.registerdocumentfinalizer(featurecreep,"featurecreep")
diff --git a/tex/context/base/lpdf-mov.lua b/tex/context/base/lpdf-mov.lua
index 41db97e0c..87375e4ce 100644
--- a/tex/context/base/lpdf-mov.lua
+++ b/tex/context/base/lpdf-mov.lua
@@ -11,10 +11,10 @@ local format = string.format
local lpdf = lpdf
local nodeinjections = backends.pdf.nodeinjections
-local pdfannotation_node = nodes.pool.pdfannotation
local pdfconstant = lpdf.constant
local pdfdictionary = lpdf.dictionary
local pdfarray = lpdf.array
+local pdfborder = lpdf.border
local write_node = node.write
function nodeinjections.insertmovie(specification)
@@ -31,14 +31,16 @@ function nodeinjections.insertmovie(specification)
ShowControls = (specification.controls and true) or false,
Mode = (specification["repeat"] and pdfconstant("Repeat")) or nil,
}
+ local bs, bc = pdfborder()
local action = pdfdictionary {
Subtype = pdfconstant("Movie"),
- Border = pdfarray { 0, 0, 0 },
+ Border = bs,
+ C = bc,
T = format("movie %s",specification.label),
Movie = moviedict,
A = controldict,
}
- write_node(pdfannotation_node(width,height,0,action())) -- test: context(...)
+ write_node(nodeinjections.annotation(width,height,0,action())) -- test: context(...)
end
function nodeinjections.insertsound(specification)
@@ -51,13 +53,15 @@ function nodeinjections.insertsound(specification)
local sounddict = pdfdictionary {
F = soundclip.filename
}
+ local bs, bc = pdfborder()
local action = pdfdictionary {
Subtype = pdfconstant("Movie"),
- Border = pdfarray { 0, 0, 0 },
+ Border = bs,
+ C = bc,
T = format("sound %s",specification.label),
Movie = sounddict,
A = controldict,
}
- write_node(pdfannotation_node(0,0,0,action())) -- test: context(...)
+ write_node(nodeinjections.annotation(0,0,0,action())) -- test: context(...)
end
end
diff --git a/tex/context/base/lpdf-nod.lua b/tex/context/base/lpdf-nod.lua
index 6b104d2fa..6295947d0 100644
--- a/tex/context/base/lpdf-nod.lua
+++ b/tex/context/base/lpdf-nod.lua
@@ -6,21 +6,29 @@ if not modules then modules = { } end modules ['lpdf-nod'] = {
license = "see context related readme files"
}
-local formatters = string.formatters
+local type = type
-local copy_node = node.copy
-local new_node = node.new
+local formatters = string.formatters
-local nodepool = nodes.pool
-local register = nodepool.register
local whatsitcodes = nodes.whatsitcodes
local nodeinjections = backends.nodeinjections
-local pdfliteral = register(new_node("whatsit", whatsitcodes.pdfliteral)) pdfliteral.mode = 1
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local setfield = nuts.setfield
+
+local copy_node = nuts.copy
+local new_node = nuts.new
+
+local nodepool = nuts.pool
+local register = nodepool.register
+
+local pdfliteral = register(new_node("whatsit", whatsitcodes.pdfliteral)) setfield(pdfliteral,"mode",1)
local pdfsave = register(new_node("whatsit", whatsitcodes.pdfsave))
local pdfrestore = register(new_node("whatsit", whatsitcodes.pdfrestore))
local pdfsetmatrix = register(new_node("whatsit", whatsitcodes.pdfsetmatrix))
-local pdfdest = register(new_node("whatsit", whatsitcodes.pdfdest)) pdfdest.named_id = 1 -- xyz_zoom untouched
+local pdfdest = register(new_node("whatsit", whatsitcodes.pdfdest)) setfield(pdfdest,"named_id",1) -- xyz_zoom untouched
local pdfannot = register(new_node("whatsit", whatsitcodes.pdfannot))
local variables = interfaces.variables
@@ -38,14 +46,14 @@ local views = { -- beware, we do support the pdf keys but this is *not* official
function nodepool.pdfliteral(str)
local t = copy_node(pdfliteral)
- t.data = str
+ setfield(t,"data",str)
return t
end
function nodepool.pdfdirect(str)
local t = copy_node(pdfliteral)
- t.data = str
- t.mode = 1
+ setfield(t,"data",str)
+ setfield(t,"mode",1)
return t
end
@@ -57,16 +65,10 @@ function nodepool.pdfrestore()
return copy_node(pdfrestore)
end
-function nodepool.pdfsetmatrix(rx,sx,sy,ry,tx,ty)
- local t = copy_node(pdfsetmatrix)
- t.data = formatters["%s %s %s %s"](rx or 0,sx or 0,sy or 0,ry or 0) -- todo: tx ty
- return t
-end
-
-function nodepool.pdfsetmatrix(rx,sx,sy,ry,tx,ty)
+function nodepool.pdfsetmatrix(rx,sx,sy,ry,tx,ty) -- todo: tx ty
local t = copy_node(pdfsetmatrix)
if type(rx) == "string" then
- t.data = rx
+ setfield(t,"data",rx)
else
if not rx then
rx = 1
@@ -86,12 +88,12 @@ function nodepool.pdfsetmatrix(rx,sx,sy,ry,tx,ty)
end
if sx == 0 and sy == 0 then
if rx == 1 and ry == 1 then
- t.data = "1 0 0 1"
+ setfield(t,"data","1 0 0 1")
else
- t.data = formatters["%0.6f 0 0 %0.6f"](rx,ry)
+ setfield(t,"data",formatters["%0.6F 0 0 %0.6F"](rx,ry))
end
else
- t.data = formatters["%0.6f %0.6f %0.6f %0.6f"](rx,sx,sy,ry)
+ setfield(t,"data",formatters["%0.6F %0.6F %0.6F %0.6F"](rx,sx,sy,ry))
end
end
return t
@@ -101,24 +103,28 @@ nodeinjections.save = nodepool.pdfsave
nodeinjections.restore = nodepool.pdfrestore
nodeinjections.transform = nodepool.pdfsetmatrix
+-- the next one is implemented differently, using latelua
+
function nodepool.pdfannotation(w,h,d,data,n)
- local t = copy_node(pdfannot)
- if w and w ~= 0 then
- t.width = w
- end
- if h and h ~= 0 then
- t.height = h
- end
- if d and d ~= 0 then
- t.depth = d
- end
- if n then
- t.objnum = n
- end
- if data and data ~= "" then
- t.data = data
- end
- return t
+ report("don't use node based annotations!")
+ os.exit()
+-- local t = copy_node(pdfannot)
+-- if w and w ~= 0 then
+-- setfield(t,"width",w)
+-- end
+-- if h and h ~= 0 then
+-- setfield(t,"height",h)
+-- end
+-- if d and d ~= 0 then
+-- setfield(t,"depth",d)
+-- end
+-- if n then
+-- setfield(t,"objnum",n)
+-- end
+-- if data and data ~= "" then
+-- setfield(t,"data",data)
+-- end
+-- return t
end
-- (!) The next code in pdfdest.w is wrong:
@@ -135,40 +141,43 @@ end
-- so we need to force a matrix.
function nodepool.pdfdestination(w,h,d,name,view,n)
- local t = copy_node(pdfdest)
- local hasdimensions = false
- if w and w ~= 0 then
- t.width = w
- hasdimensions = true
- end
- if h and h ~= 0 then
- t.height = h
- hasdimensions = true
- end
- if d and d ~= 0 then
- t.depth = d
- hasdimensions = true
- end
- if n then
- t.objnum = n
- end
- view = views[view] or view or 1 -- fit is default
- t.dest_id = name
- t.dest_type = view
- if hasdimensions and view == 0 then -- xyz
- -- see (!) s -> m -> t -> r
- local s = copy_node(pdfsave)
- local m = copy_node(pdfsetmatrix)
- local r = copy_node(pdfrestore)
- m.data = "1 0 0 1"
- s.next = m
- m.next = t
- t.next = r
- m.prev = s
- t.prev = m
- r.prev = t
- return s -- a list
- else
- return t
- end
+ report("don't use node based destinations!")
+ os.exit()
+-- local t = copy_node(pdfdest)
+-- local hasdimensions = false
+-- if w and w ~= 0 then
+-- setfield(t,"width",w)
+-- hasdimensions = true
+-- end
+-- if h and h ~= 0 then
+-- setfield(t,"height",h)
+-- hasdimensions = true
+-- end
+-- if d and d ~= 0 then
+-- setfield(t,"depth",d)
+-- hasdimensions = true
+-- end
+-- if n then
+-- setfield(t,"objnum",n)
+-- end
+-- view = views[view] or view or 1 -- fit is default
+-- setfield(t,"dest_id",name)
+-- setfield(t,"dest_type",view)
+-- if hasdimensions and view == 0 then -- xyz
+-- -- see (!) s -> m -> t -> r
+-- -- linked
+-- local s = copy_node(pdfsave)
+-- local m = copy_node(pdfsetmatrix)
+-- local r = copy_node(pdfrestore)
+-- setfield(m,"data","1 0 0 1")
+-- setfield(s,"next",m)
+-- setfield(m,"next",t)
+-- setfield(t,"next",r)
+-- setfield(m,"prev",s)
+-- setfield(t,"prev",m)
+-- setfield(r,"prev",t)
+-- return s -- a list
+-- else
+-- return t
+-- end
end
diff --git a/tex/context/base/lpdf-pda.xml b/tex/context/base/lpdf-pda.xml
index 2d8e7b6f5..3f6b969c0 100644
--- a/tex/context/base/lpdf-pda.xml
+++ b/tex/context/base/lpdf-pda.xml
@@ -3,15 +3,20 @@
<!-- lpdf-pda.xml -->
<x:xmpmeta xmlns:x="adobe:ns:meta/">
+ <!-- http://www.pdfa.org/wp-content/uploads/2011/08/tn0008_predefined_xmp_properties_in_pdfa-1_2008-03-20.pdf -->
<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
<rdf:Description rdf:about="" xmlns:dc="http://purl.org/dc/elements/1.1/">
<dc:format>application/pdf</dc:format>
<dc:creator>
<rdf:Seq>
- <rdf:li/>
+ <rdf:li xml:lang="x-default"/>
</rdf:Seq>
</dc:creator>
- <dc:description/>
+ <dc:description>
+ <rdf:Alt>
+ <rdf:li xml:lang="x-default"/>
+ </rdf:Alt>
+ </dc:description>
<dc:title>
<rdf:Alt>
<rdf:li xml:lang="x-default"/>
diff --git a/tex/context/base/lpdf-pdx.xml b/tex/context/base/lpdf-pdx.xml
index 42e11650e..d55e1fdf3 100644
--- a/tex/context/base/lpdf-pdx.xml
+++ b/tex/context/base/lpdf-pdx.xml
@@ -8,10 +8,14 @@
<dc:format>application/pdf</dc:format>
<dc:creator>
<rdf:Seq>
- <rdf:li/>
+ <rdf:li xml:lang="x-default"/>
</rdf:Seq>
</dc:creator>
- <dc:description/>
+ <dc:description>
+ <rdf:Alt>
+ <rdf:li xml:lang="x-default"/>
+ </rdf:Alt>
+ </dc:description>
<dc:title>
<rdf:Alt>
<rdf:li xml:lang="x-default"/>
diff --git a/tex/context/base/lpdf-ren.lua b/tex/context/base/lpdf-ren.lua
index 6af65f9de..61676d5a8 100644
--- a/tex/context/base/lpdf-ren.lua
+++ b/tex/context/base/lpdf-ren.lua
@@ -15,47 +15,66 @@ local settings_to_array = utilities.parsers.settings_to_array
local backends, lpdf, nodes, node = backends, lpdf, nodes, node
-local nodeinjections = backends.pdf.nodeinjections
-local codeinjections = backends.pdf.codeinjections
-local registrations = backends.pdf.registrations
-local viewerlayers = attributes.viewerlayers
-
-local references = structures.references
-
-references.executers = references.executers or { }
-local executers = references.executers
-
-local variables = interfaces.variables
-
-local v_no = variables.no
-local v_yes = variables.yes
-local v_start = variables.start
-local v_stop = variables.stop
-local v_reset = variables.reset
-local v_auto = variables.auto
-local v_random = variables.random
-
-local pdfconstant = lpdf.constant
-local pdfdictionary = lpdf.dictionary
-local pdfarray = lpdf.array
-local pdfreference = lpdf.reference
-local pdfflushobject = lpdf.flushobject
-local pdfreserveobject = lpdf.reserveobject
-
-local nodepool = nodes.pool
-local register = nodepool.register
-local pdfliteral = nodepool.pdfliteral
-
-local pdf_ocg = pdfconstant("OCG")
-local pdf_ocmd = pdfconstant("OCMD")
-local pdf_off = pdfconstant("OFF")
-local pdf_on = pdfconstant("ON")
-local pdf_toggle = pdfconstant("Toggle")
-local pdf_setocgstate = pdfconstant("SetOCGState")
+local nodeinjections = backends.pdf.nodeinjections
+local codeinjections = backends.pdf.codeinjections
+local registrations = backends.pdf.registrations
+local viewerlayers = attributes.viewerlayers
+
+local references = structures.references
+
+references.executers = references.executers or { }
+local executers = references.executers
+
+local variables = interfaces.variables
+
+local v_no = variables.no
+local v_yes = variables.yes
+local v_start = variables.start
+local v_stop = variables.stop
+local v_reset = variables.reset
+local v_auto = variables.auto
+local v_random = variables.random
+
+local pdfconstant = lpdf.constant
+local pdfdictionary = lpdf.dictionary
+local pdfarray = lpdf.array
+local pdfreference = lpdf.reference
+local pdfflushobject = lpdf.flushobject
+local pdfreserveobject = lpdf.reserveobject
+
+local addtopageattributes = lpdf.addtopageattributes
+local addtopageresources = lpdf.addtopageresources
+local addtocatalog = lpdf.addtocatalog
+
+local nodepool = nodes.pool
+local register = nodepool.register
+local pdfliteral = nodepool.pdfliteral
+
+local pdf_ocg = pdfconstant("OCG")
+local pdf_ocmd = pdfconstant("OCMD")
+local pdf_off = pdfconstant("OFF")
+local pdf_on = pdfconstant("ON")
+local pdf_view = pdfconstant("View")
+local pdf_design = pdfconstant("Design")
+local pdf_toggle = pdfconstant("Toggle")
+local pdf_setocgstate = pdfconstant("SetOCGState")
+
+local copy_node = node.copy
+
+local pdf_print = {
+ [v_yes] = pdfdictionary { PrintState = pdf_on },
+ [v_no ] = pdfdictionary { PrintState = pdf_off },
+}
-local copy_node = node.copy
+local pdf_intent = {
+ [v_yes] = pdf_view,
+ [v_no] = pdf_design,
+}
-local lpdf_usage = pdfdictionary { Print = pdfdictionary { PrintState = pdf_off } }
+local pdf_export = {
+ [v_yes] = pdf_on,
+ [v_no] = pdf_off,
+}
-- We can have references to layers before they are places, for instance from
-- hide and vide actions. This is why we need to be able to force usage of layers
@@ -95,10 +114,13 @@ local function useviewerlayer(name) -- move up so that we can use it as local
local nn = pdfreserveobject()
local nr = pdfreference(nn)
local nd = pdfdictionary {
- Type = pdf_ocg,
- Name = specification.title or "unknown",
- Intent = ((specification.editable ~= v_no) and pdf_design) or nil, -- disable layer hiding by user
- Usage = ((specification.printable == v_no) and lpdf_usage) or nil, -- printable or not
+ Type = pdf_ocg,
+ Name = specification.title or "unknown",
+ Usage = {
+ Intent = pdf_intent[specification.editable or v_yes], -- disable layer hiding by user (useless)
+ Print = pdf_print [specification.printable or v_yes], -- printable or not
+ Export = pdf_export[specification.export or v_yes], -- export or not
+ },
}
cache[#cache+1] = { nn, nd }
pdfln[tag] = nr -- was n
@@ -161,9 +183,17 @@ local function flushtextlayers()
ON = videlayers,
OFF = hidelayers,
BaseState = pdf_on,
+
+AS = pdfarray {
+ pdfdictionary {
+ Category = pdfarray { pdfconstant("Print") },
+ Event = pdfconstant("Print"),
+ OCGs = (viewerlayers.hasorder and sortedlayers) or nil,
+ }
+},
},
}
- lpdf.addtocatalog("OCProperties",d)
+ addtocatalog("OCProperties",d)
textlayers = nil
end
end
@@ -171,7 +201,7 @@ end
local function flushpagelayers() -- we can share these
if pagelayers then
- lpdf.addtopageresources("Properties",pdfreference(pagelayersreference)) -- we could cache this
+ addtopageresources("Properties",pdfreference(pagelayersreference)) -- we could cache this
end
end
@@ -342,8 +372,8 @@ function codeinjections.setpagetransition(specification)
end
delay = tonumber(delay)
if delay and delay > 0 then
- lpdf.addtopageattributes("Dur",delay)
+ addtopageattributes("Dur",delay)
end
- lpdf.addtopageattributes("Trans",d)
+ addtopageattributes("Trans",d)
end
end
diff --git a/tex/context/base/lpdf-swf.lua b/tex/context/base/lpdf-swf.lua
index 12c80036f..88cdcc4ec 100644
--- a/tex/context/base/lpdf-swf.lua
+++ b/tex/context/base/lpdf-swf.lua
@@ -28,8 +28,6 @@ local checkedkey = lpdf.checkedkey
local codeinjections = backends.pdf.codeinjections
local nodeinjections = backends.pdf.nodeinjections
-local pdfannotation_node = nodes.pool.pdfannotation
-
local trace_swf = false trackers.register("backend.swf", function(v) trace_swf = v end)
local report_swf = logs.reporter("backend","swf")
@@ -302,5 +300,5 @@ function backends.pdf.nodeinjections.insertswf(spec)
-- factor = spec.factor,
-- label = spec.label,
}
- context(pdfannotation_node(spec.width,spec.height,0,annotation())) -- the context wrap is probably also needed elsewhere
+ context(nodeinjections.annotation(spec.width,spec.height,0,annotation())) -- the context wrap is probably also needed elsewhere
end
diff --git a/tex/context/base/lpdf-tag.lua b/tex/context/base/lpdf-tag.lua
index 29ffcd207..79ccfe075 100644
--- a/tex/context/base/lpdf-tag.lua
+++ b/tex/context/base/lpdf-tag.lua
@@ -6,70 +6,107 @@ if not modules then modules = { } end modules ['lpdf-tag'] = {
license = "see context related readme files"
}
+local next = next
local format, match, concat = string.format, string.match, table.concat
-local lpegmatch = lpeg.match
+local lpegmatch, P, S, C = lpeg.match, lpeg.P, lpeg.S, lpeg.C
local utfchar = utf.char
+local settings_to_hash = utilities.parsers.settings_to_hash
+local formatters = string.formatters
local trace_tags = false trackers.register("structures.tags", function(v) trace_tags = v end)
local report_tags = logs.reporter("backend","tags")
-local backends, lpdf, nodes = backends, lpdf, nodes
-
-local nodeinjections = backends.pdf.nodeinjections
-local codeinjections = backends.pdf.codeinjections
-
-local tasks = nodes.tasks
-
-local pdfdictionary = lpdf.dictionary
-local pdfarray = lpdf.array
-local pdfboolean = lpdf.boolean
-local pdfconstant = lpdf.constant
-local pdfreference = lpdf.reference
-local pdfunicode = lpdf.unicode
-local pdfstring = lpdf.string
-local pdfflushobject = lpdf.flushobject
-local pdfreserveobject = lpdf.reserveobject
-local pdfpagereference = lpdf.pagereference
-
-local texgetcount = tex.getcount
-
-local nodepool = nodes.pool
-
-local pdfliteral = nodepool.pdfliteral
-
-local nodecodes = nodes.nodecodes
-
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local glyph_code = nodecodes.glyph
-
-local a_tagged = attributes.private('tagged')
-local a_image = attributes.private('image')
-
-local traverse_nodes = node.traverse
-local traverse_id = node.traverse_id
-local tosequence = nodes.tosequence
-local copy_node = node.copy
-local slide_nodelist = node.slide
-
-local structure_stack = { }
-local structure_kids = pdfarray()
-local structure_ref = pdfreserveobject()
-local parent_ref = pdfreserveobject()
-local root = { pref = pdfreference(structure_ref), kids = structure_kids }
-local tree = { }
-local elements = { }
-local names = pdfarray()
-local taglist = structures.tags.taglist
-local usedlabels = structures.tags.labels
-local properties = structures.tags.properties
-local usedmapping = { }
-
-local colonsplitter = lpeg.splitat(":")
-local dashsplitter = lpeg.splitat("-")
-
-local add_ids = false -- true
+local backends = backends
+local lpdf = lpdf
+local nodes = nodes
+
+local nodeinjections = backends.pdf.nodeinjections
+local codeinjections = backends.pdf.codeinjections
+
+local tasks = nodes.tasks
+
+local pdfdictionary = lpdf.dictionary
+local pdfarray = lpdf.array
+local pdfboolean = lpdf.boolean
+local pdfconstant = lpdf.constant
+local pdfreference = lpdf.reference
+local pdfunicode = lpdf.unicode
+local pdfstring = lpdf.string
+local pdfflushobject = lpdf.flushobject
+local pdfreserveobject = lpdf.reserveobject
+local pdfpagereference = lpdf.pagereference
+
+local addtocatalog = lpdf.addtocatalog
+local addtopageattributes = lpdf.addtopageattributes
+
+local texgetcount = tex.getcount
+
+local nodecodes = nodes.nodecodes
+
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local glyph_code = nodecodes.glyph
+
+local a_tagged = attributes.private('tagged')
+local a_image = attributes.private('image')
+
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local nodepool = nuts.pool
+local pdfliteral = nodepool.pdfliteral
+
+local getid = nuts.getid
+local getattr = nuts.getattr
+local getprev = nuts.getprev
+local getnext = nuts.getnext
+local getlist = nuts.getlist
+local setfield = nuts.setfield
+
+local traverse_nodes = nuts.traverse
+local tosequence = nuts.tosequence
+local copy_node = nuts.copy
+local slide_nodelist = nuts.slide
+local insert_before = nuts.insert_before
+local insert_after = nuts.insert_after
+
+local structure_stack = { }
+local structure_kids = pdfarray()
+local structure_ref = pdfreserveobject()
+local parent_ref = pdfreserveobject()
+local root = { pref = pdfreference(structure_ref), kids = structure_kids }
+local tree = { }
+local elements = { }
+local names = pdfarray()
+
+local structurestags = structures.tags
+local taglist = structurestags.taglist
+local specifications = structurestags.specifications
+local usedlabels = structurestags.labels
+local properties = structurestags.properties
+local lasttaginchain = structurestags.lastinchain
+
+local usedmapping = { }
+
+----- tagsplitter = structurestags.patterns.splitter
+
+-- local embeddedtags = false -- true will id all, for tracing
+-- local f_tagid = formatters["%s-%04i"]
+-- local embeddedfilelist = pdfarray() -- /AF crap
+--
+-- directives.register("structures.tags.embedmath",function(v)
+-- if not v then
+-- -- only enable
+-- elseif embeddedtags == true then
+-- -- already all tagged
+-- elseif embeddedtags then
+-- embeddedtags.math = true
+-- else
+-- embeddedtags = { math = true }
+-- end
+-- end)
-- function codeinjections.maptag(original,target,kind)
-- mapping[original] = { target, kind or "inline" }
@@ -79,14 +116,15 @@ local function finishstructure()
if #structure_kids > 0 then
local nums, n = pdfarray(), 0
for i=1,#tree do
- n = n + 1 ; nums[n] = i-1
+ n = n + 1 ; nums[n] = i - 1
n = n + 1 ; nums[n] = pdfreference(pdfflushobject(tree[i]))
end
local parenttree = pdfdictionary {
Nums = nums
}
-- we need to split names into smaller parts (e.g. alphabetic or so)
- if add_ids then
+ -- we already have code for that somewhere
+ if #names > 0 then
local kids = pdfdictionary {
Limits = pdfarray { names[1], names[#names-1] },
Names = names,
@@ -106,18 +144,19 @@ local function finishstructure()
Type = pdfconstant("StructTreeRoot"),
K = pdfreference(pdfflushobject(structure_kids)),
ParentTree = pdfreference(pdfflushobject(parent_ref,parenttree)),
- IDTree = (add_ids and pdfreference(pdfflushobject(idtree))) or nil,
+ IDTree = #names > 0 and pdfreference(pdfflushobject(idtree)) or nil,
RoleMap = rolemap,
}
pdfflushobject(structure_ref,structuretree)
- lpdf.addtocatalog("StructTreeRoot",pdfreference(structure_ref))
+ addtocatalog("StructTreeRoot",pdfreference(structure_ref))
--
local markinfo = pdfdictionary {
Marked = pdfboolean(true),
-- UserProperties = pdfboolean(true),
-- Suspects = pdfboolean(true),
+ -- AF = #embeddedfilelist > 0 and pdfreference(pdfflushobject(embeddedfilelist)) or nil,
}
- lpdf.addtocatalog("MarkInfo",pdfreference(pdfflushobject(markinfo)))
+ addtocatalog("MarkInfo",pdfreference(pdfflushobject(markinfo)))
--
for fulltag, element in next, elements do
pdfflushobject(element.knum,element.kids)
@@ -133,49 +172,110 @@ local pdf_mcr = pdfconstant("MCR")
local pdf_struct_element = pdfconstant("StructElem")
local function initializepage()
- index = 0
+ index = 0
pagenum = texgetcount("realpageno")
pageref = pdfreference(pdfpagereference(pagenum))
- list = pdfarray()
+ list = pdfarray()
tree[pagenum] = list -- we can flush after done, todo
end
local function finishpage()
-- flush what can be flushed
- lpdf.addtopageattributes("StructParents",pagenum-1)
+ addtopageattributes("StructParents",pagenum-1)
end
-- here we can flush and free elements that are finished
+local pdf_userproperties = pdfconstant("UserProperties")
+
+local function makeattribute(t)
+ if t and next(t) then
+ local properties = pdfarray()
+ for k, v in next, t do
+ properties[#properties+1] = pdfdictionary {
+ N = pdfunicode(k),
+ V = pdfunicode(v),
+ }
+ end
+ return pdfdictionary {
+ O = pdf_userproperties,
+ P = properties,
+ }
+ end
+end
+
local function makeelement(fulltag,parent)
- local tag, n = lpegmatch(dashsplitter,fulltag)
- local tg, detail = lpegmatch(colonsplitter,tag)
- local k, r = pdfarray(), pdfreserveobject()
- usedmapping[tg] = true
- tg = usedlabels[tg] or tg
+ local specification = specifications[fulltag]
+ local tag = specification.tagname
+ if tag == "ignore" then
+ return false
+ elseif tag == "mstackertop" or tag == "mstackerbot" or tag == "mstackermid"then
+ -- TODO
+ return true
+ end
+ --
+ local detail = specification.detail
+ local userdata = specification.userdata
+ --
+ usedmapping[tag] = true
+ --
+ -- specification.attribute is unique
+ --
+ local id = nil
+ -- local af = nil
+ -- if embeddedtags then
+ -- local tagname = specification.tagname
+ -- local tagindex = specification.tagindex
+ -- if embeddedtags == true or embeddedtags[tagname] then
+ -- id = f_tagid(tagname,tagindex)
+ -- af = job.fileobjreferences.collected[id]
+ -- if af then
+ -- local r = pdfreference(af)
+ -- af = pdfarray { r }
+ -- -- embeddedfilelist[#embeddedfilelist+1] = r
+ -- end
+ -- end
+ -- end
+ --
+ local k = pdfarray()
+ local r = pdfreserveobject()
+ local t = usedlabels[tag] or tag
local d = pdfdictionary {
Type = pdf_struct_element,
- S = pdfconstant(tg),
- ID = (add_ids and fulltag) or nil,
+ S = pdfconstant(t),
+ ID = id,
T = detail and detail or nil,
P = parent.pref,
Pg = pageref,
K = pdfreference(r),
+ A = a and makeattribute(a) or nil,
-- Alt = " Who cares ",
-- ActualText = " Hi Hans ",
+ AF = af,
}
local s = pdfreference(pdfflushobject(d))
- if add_ids then
- names[#names+1] = fulltag
+ if id then
+ names[#names+1] = id
names[#names+1] = s
end
local kids = parent.kids
kids[#kids+1] = s
- elements[fulltag] = { tag = tag, pref = s, kids = k, knum = r, pnum = pagenum }
+ local e = {
+ tag = t,
+ pref = s,
+ kids = k,
+ knum = r,
+ pnum = pagenum
+ }
+ elements[fulltag] = e
+ return e
end
-local function makecontent(parent,start,stop,slist,id)
- local tag, kids = parent.tag, parent.kids
+local f_BDC = formatters["/%s <</MCID %s>> BDC"]
+
+local function makecontent(parent,id)
+ local tag = parent.tag
+ local kids = parent.kids
local last = index
if id == "image" then
local d = pdfdictionary {
@@ -197,109 +297,304 @@ local function makecontent(parent,start,stop,slist,id)
kids[#kids+1] = d
end
--
- local bliteral = pdfliteral(format("/%s <</MCID %s>>BDC",tag,last))
- local prev = start.prev
- if prev then
- prev.next, bliteral.prev = bliteral, prev
- end
- start.prev, bliteral.next = bliteral, start
- if slist and slist.list == start then
- slist.list = bliteral
- elseif not prev then
- report_tags("this can't happen: injection in front of nothing")
- end
- --
- local eliteral = pdfliteral("EMC")
- local next = stop.next
- if next then
- next.prev, eliteral.next = eliteral, next
- end
- stop.next, eliteral.prev = eliteral, stop
- --
index = index + 1
- list[index] = parent.pref
- return bliteral, eliteral
+ list[index] = parent.pref -- page related list
+ --
+ return f_BDC(tag,last)
end
--- -- --
-
-local level, last, ranges, range = 0, nil, { }, nil
-
-local function collectranges(head,list)
- for n in traverse_nodes(head) do
- local id = n.id -- 14: image, 8: literal (mp)
- if id == glyph_code then
- local at = n[a_tagged]
- if not at then
- range = nil
- elseif last ~= at then
- range = { at, "glyph", n, n, list } -- attr id start stop list
- ranges[#ranges+1] = range
- last = at
- elseif range then
- range[4] = n -- stop
- end
- elseif id == hlist_code or id == vlist_code then
- local at = n[a_image]
- if at then
- local at = n[a_tagged]
+-- no need to adapt head, as we always operate on lists
+
+function nodeinjections.addtags(head)
+
+ local last = nil
+ local ranges = { }
+ local range = nil
+ local head = tonut(head)
+
+ local function collectranges(head,list)
+ for n in traverse_nodes(head) do
+ local id = getid(n) -- 14: image, 8: literal (mp)
+ if id == glyph_code then
+ local at = getattr(n,a_tagged)
if not at then
range = nil
+ elseif last ~= at then
+ range = { at, "glyph", n, n, list } -- attr id start stop list
+ ranges[#ranges+1] = range
+ last = at
+ elseif range then
+ range[4] = n -- stop
+ end
+ elseif id == hlist_code or id == vlist_code then
+ local at = getattr(n,a_image)
+ if at then
+ local at = getattr(n,a_tagged)
+ if not at then
+ range = nil
+ else
+ ranges[#ranges+1] = { at, "image", n, n, list } -- attr id start stop list
+ end
+ last = nil
else
- ranges[#ranges+1] = { at, "image", n, n, list } -- attr id start stop list
+ local nl = getlist(n)
+ -- slide_nodelist(nl) -- temporary hack till math gets slided (tracker item)
+ collectranges(nl,n)
end
- last = nil
- else
- local nl = n.list
- slide_nodelist(nl) -- temporary hack till math gets slided (tracker item)
- collectranges(nl,n)
end
end
end
-end
-function nodeinjections.addtags(head)
- -- no need to adapt head, as we always operate on lists
- level, last, ranges, range = 0, nil, { }, nil
initializepage()
+
collectranges(head)
+
if trace_tags then
for i=1,#ranges do
local range = ranges[i]
- local attr, id, start, stop = range[1], range[2], range[3], range[4]
- local tags = taglist[attr]
+ local attr = range[1]
+ local id = range[2]
+ local start = range[3]
+ local stop = range[4]
+ local tags = taglist[attr]
if tags then -- not ok ... only first lines
- report_tags("%s => %s : %05i % t",tosequence(start,start),tosequence(stop,stop),attr,tags)
+ report_tags("%s => %s : %05i % t",tosequence(start,start),tosequence(stop,stop),attr,tags.taglist)
end
end
end
+
+ local top = nil
+ local noftop = 0
+
for i=1,#ranges do
- local range = ranges[i]
- local attr, id, start, stop, list = range[1], range[2], range[3], range[4], range[5]
- local tags = taglist[attr]
- local prev = root
- local noftags, tag = #tags, nil
- for j=1,noftags do
- local tag = tags[j]
- if not elements[tag] then
- makeelement(tag,prev)
+ local range = ranges[i]
+ local attr = range[1]
+ local id = range[2]
+ local start = range[3]
+ local stop = range[4]
+ local list = range[5]
+ local specification = taglist[attr]
+ local taglist = specification.taglist
+ local noftags = #taglist
+ local common = 0
+
+ if top then
+ for i=1,noftags >= noftop and noftop or noftags do
+ if top[i] == taglist[i] then
+ common = i
+ else
+ break
+ end
+ end
+ end
+
+ local prev = common > 0 and elements[taglist[common]] or root
+
+ for j=common+1,noftags do
+ local tag = taglist[j]
+ local prv = elements[tag] or makeelement(tag,prev)
+ if prv == false then
+ -- ignore this one
+ prev = false
+ break
+ elseif prv == true then
+ -- skip this one
+ else
+ prev = prv
end
- prev = elements[tag]
end
- local b, e = makecontent(prev,start,stop,list,id)
- if start == head then
- report_tags("this can't happen: parent list gets tagged")
- head = b
+
+ if prev then
+ -- use insert instead:
+ local literal = pdfliteral(makecontent(prev,id))
+ local prev = getprev(start)
+ if prev then
+ setfield(prev,"next",literal)
+ setfield(literal,"prev",prev)
+ end
+ setfield(start,"prev",literal)
+ setfield(literal,"next",start)
+ if list and getlist(list) == start then
+ setfield(list,"list",literal)
+ end
+ -- use insert instead:
+ local literal = pdfliteral("EMC")
+ local next = getnext(stop)
+ if next then
+ setfield(next,"prev",literal)
+ setfield(literal,"next",next)
+ end
+ setfield(stop,"next",literal)
+ setfield(literal,"prev",stop)
end
+ top = taglist
+ noftop = noftags
end
+
finishpage()
- -- can be separate feature
- --
- -- injectspans(head) -- does to work yet
- --
+
+ head = tonode(head)
return head, true
+
end
+-- variant: more structure but funny collapsing in viewer
+
+-- function nodeinjections.addtags(head)
+--
+-- local last, ranges, range = nil, { }, nil
+--
+-- local function collectranges(head,list)
+-- for n in traverse_nodes(head) do
+-- local id = getid(n) -- 14: image, 8: literal (mp)
+-- if id == glyph_code then
+-- local at = getattr(n,a_tagged)
+-- if not at then
+-- range = nil
+-- elseif last ~= at then
+-- range = { at, "glyph", n, n, list } -- attr id start stop list
+-- ranges[#ranges+1] = range
+-- last = at
+-- elseif range then
+-- range[4] = n -- stop
+-- end
+-- elseif id == hlist_code or id == vlist_code then
+-- local at = getattr(n,a_image)
+-- if at then
+-- local at = getattr(n,a_tagged)
+-- if not at then
+-- range = nil
+-- else
+-- ranges[#ranges+1] = { at, "image", n, n, list } -- attr id start stop list
+-- end
+-- last = nil
+-- else
+-- local nl = getlist(n)
+-- -- slide_nodelist(nl) -- temporary hack till math gets slided (tracker item)
+-- collectranges(nl,n)
+-- end
+-- end
+-- end
+-- end
+--
+-- initializepage()
+--
+-- head = tonut(head)
+-- collectranges(head)
+--
+-- if trace_tags then
+-- for i=1,#ranges do
+-- local range = ranges[i]
+-- local attr = range[1]
+-- local id = range[2]
+-- local start = range[3]
+-- local stop = range[4]
+-- local tags = taglist[attr]
+-- if tags then -- not ok ... only first lines
+-- report_tags("%s => %s : %05i % t",tosequence(start,start),tosequence(stop,stop),attr,tags.taglist)
+-- end
+-- end
+-- end
+--
+-- local top = nil
+-- local noftop = 0
+-- local last = nil
+--
+-- for i=1,#ranges do
+-- local range = ranges[i]
+-- local attr = range[1]
+-- local id = range[2]
+-- local start = range[3]
+-- local stop = range[4]
+-- local list = range[5]
+-- local specification = taglist[attr]
+-- local taglist = specification.taglist
+-- local noftags = #taglist
+-- local tag = nil
+-- local common = 0
+-- -- local prev = root
+--
+-- if top then
+-- for i=1,noftags >= noftop and noftop or noftags do
+-- if top[i] == taglist[i] then
+-- common = i
+-- else
+-- break
+-- end
+-- end
+-- end
+--
+-- local result = { }
+-- local r = noftop - common
+-- if r > 0 then
+-- for i=1,r do
+-- result[i] = "EMC"
+-- end
+-- end
+--
+-- local prev = common > 0 and elements[taglist[common]] or root
+--
+-- for j=common+1,noftags do
+-- local tag = taglist[j]
+-- local prv = elements[tag] or makeelement(tag,prev)
+-- -- if prv == false then
+-- -- -- ignore this one
+-- -- prev = false
+-- -- break
+-- -- elseif prv == true then
+-- -- -- skip this one
+-- -- else
+-- prev = prv
+-- r = r + 1
+-- result[r] = makecontent(prev,id)
+-- -- end
+-- end
+--
+-- if r > 0 then
+-- local literal = pdfliteral(concat(result,"\n"))
+-- -- use insert instead:
+-- local literal = pdfliteral(result)
+-- local prev = getprev(start)
+-- if prev then
+-- setfield(prev,"next",literal)
+-- setfield(literal,"prev",prev)
+-- end
+-- setfield(start,"prev",literal)
+-- setfield(literal,"next",start)
+-- if list and getlist(list) == start then
+-- setfield(list,"list",literal)
+-- end
+-- end
+--
+-- top = taglist
+-- noftop = noftags
+-- last = stop
+--
+-- end
+--
+-- if last and noftop > 0 then
+-- local result = { }
+-- for i=1,noftop do
+-- result[i] = "EMC"
+-- end
+-- local literal = pdfliteral(concat(result,"\n"))
+-- -- use insert instead:
+-- local next = getnext(last)
+-- if next then
+-- setfield(next,"prev",literal)
+-- setfield(literal,"next",next)
+-- end
+-- setfield(last,"next",literal)
+-- setfield(literal,"prev",last)
+-- end
+--
+-- finishpage()
+--
+-- head = tonode(head)
+-- return head, true
+--
+-- end
+
-- this belongs elsewhere (export is not pdf related)
function codeinjections.enabletags(tg,lb)
diff --git a/tex/context/base/lpdf-u3d.lua b/tex/context/base/lpdf-u3d.lua
index 33269486c..c9f4a0369 100644
--- a/tex/context/base/lpdf-u3d.lua
+++ b/tex/context/base/lpdf-u3d.lua
@@ -17,7 +17,8 @@ if not modules then modules = { } end modules ['lpdf-u3d'] = {
-- point we will end up with a reimplementation. For instance
-- it makes sense to add the same activation code as with swf.
-local format, find = string.format, string.find
+local tonumber = tonumber
+local formatters, find = string.formatters, string.find
local cos, sin, sqrt, pi, atan2, abs = math.cos, math.sin, math.sqrt, math.pi, math.atan2, math.abs
local backends, lpdf = backends, lpdf
@@ -38,8 +39,6 @@ local pdfflushstreamfileobject = lpdf.flushstreamfileobject
local checkedkey = lpdf.checkedkey
local limited = lpdf.limited
-local pdfannotation_node = nodes.pool.pdfannotation
-
local schemes = table.tohash {
"Artwork", "None", "White", "Day", "Night", "Hard",
"Primary", "Blue", "Red", "Cube", "CAD", "Headlamp",
@@ -429,13 +428,13 @@ local function insert3d(spec) -- width, height, factor, display, controls, label
local preview = checkedkey(param,"preview","string")
if preview then
activationdict.A = pdfconstant("XA")
- local tag = format("%s:%s:%s",label,stream,preview)
+ local tag = formatters["%s:%s:%s"](label,stream,preview)
local ref = stored_pr[tag]
if not ref then
local figure = img.immediatewrite {
filename = preview,
- width = width,
- height = height
+ width = width,
+ height = height
}
ref = figure.objnum
stored_pr[tag] = ref
@@ -462,7 +461,7 @@ local function insert3d(spec) -- width, height, factor, display, controls, label
},
ProcSet = pdfarray { pdfconstant("PDF"), pdfconstant("ImageC") },
}
- local pwd = pdfflushstreamobject(format("q /GS gs %f 0 0 %f 0 0 cm /IM Do Q",factor*width,factor*height),pw)
+ local pwd = pdfflushstreamobject(formatters["q /GS gs %F 0 0 %F 0 0 cm /IM Do Q"](factor*width,factor*height),pw)
annot.AP = pdfdictionary {
N = pdfreference(pwd)
}
@@ -484,5 +483,5 @@ function nodeinjections.insertu3d(spec)
controls = spec.controls,
label = spec.label,
}
- node.write(pdfannotation_node(spec.width,spec.height,0,annotation()))
+ node.write(nodeinjections.annotation(spec.width,spec.height,0,annotation()))
end
diff --git a/tex/context/base/lpdf-wid.lua b/tex/context/base/lpdf-wid.lua
index 11ac82a08..22971c2b7 100644
--- a/tex/context/base/lpdf-wid.lua
+++ b/tex/context/base/lpdf-wid.lua
@@ -46,20 +46,18 @@ local pdfcolorspec = lpdf.colorspec
local pdfflushobject = lpdf.flushobject
local pdfflushstreamobject = lpdf.flushstreamobject
local pdfflushstreamfileobject = lpdf.flushstreamfileobject
-local pdfreserveannotation = lpdf.reserveannotation
local pdfreserveobject = lpdf.reserveobject
local pdfpagereference = lpdf.pagereference
local pdfshareobjectreference = lpdf.shareobjectreference
+local pdfaction = lpdf.action
+local pdfborder = lpdf.border
-local nodepool = nodes.pool
-
-local pdfannotation_node = nodepool.pdfannotation
+local pdftransparencyvalue = lpdf.transparencyvalue
+local pdfcolorvalues = lpdf.colorvalues
local hpack_node = node.hpack
local write_node = node.write -- test context(...) instead
-local pdf_border = pdfarray { 0, 0, 0 } -- can be shared
-
-- symbols
local presets = { } -- xforms
@@ -117,8 +115,8 @@ codeinjections.presetsymbollist = presetsymbollist
-- }
local attachment_symbols = {
- Graph = pdfconstant("GraphPushPin"),
- Paperclip = pdfconstant("PaperclipTag"),
+ Graph = pdfconstant("Graph"),
+ Paperclip = pdfconstant("Paperclip"),
Pushpin = pdfconstant("PushPin"),
}
@@ -170,19 +168,36 @@ end
local function analyzecolor(colorvalue,colormodel)
local cvalue = colorvalue and tonumber(colorvalue)
local cmodel = colormodel and tonumber(colormodel) or 3
- return cvalue and pdfarray { lpdf.colorvalues(cmodel,cvalue) } or nil
+ return cvalue and pdfarray { pdfcolorvalues(cmodel,cvalue) } or nil
end
local function analyzetransparency(transparencyvalue)
local tvalue = transparencyvalue and tonumber(transparencyvalue)
- return tvalue and lpdf.transparencyvalue(tvalue) or nil
+ return tvalue and pdftransparencyvalue(tvalue) or nil
end
-- Attachments
+local nofattachments = 0
+local attachments = { }
+local filestreams = { }
+local referenced = { }
+local ignorereferenced = true -- fuzzy pdf spec .. twice in attachment list, can become an option
+local tobesavedobjrefs = utilities.storage.allocate()
+local collectedobjrefs = utilities.storage.allocate()
+
+local fileobjreferences = {
+ collected = collectedobjrefs,
+ tobesaved = tobesavedobjrefs,
+}
-local nofattachments, attachments, filestreams, referenced = 0, { }, { }, { }
+job.fileobjreferences = fileobjreferences
+
+local function initializer()
+ collectedobjrefs = job.fileobjreferences.collected or { }
+ tobesavedobjrefs = job.fileobjreferences.tobesaved or { }
+end
-local ignorereferenced = true -- fuzzy pdf spec .. twice in attachment list, can become an option
+job.register('job.fileobjreferences.collected', tobesavedobjrefs, initializer)
local function flushembeddedfiles()
if next(filestreams) then
@@ -211,6 +226,7 @@ function codeinjections.embedfile(specification)
local hash = specification.hash or filename
local keepdir = specification.keepdir -- can change
local usedname = specification.usedname
+ local filetype = specification.filetype
if filename == "" then
filename = nil
end
@@ -248,11 +264,20 @@ function codeinjections.embedfile(specification)
end
end
end
- usedname = usedname ~= "" and usedname or filename
+ -- needs to cleaned up:
+ usedname = usedname ~= "" and usedname or filename or name
local basename = keepdir == true and usedname or file.basename(usedname)
-local basename = gsub(basename,"%./","")
- local savename = file.addsuffix(name ~= "" and name or basename,"txt") -- else no valid file
- local a = pdfdictionary { Type = pdfconstant("EmbeddedFile") }
+ local basename = gsub(basename,"%./","")
+ local savename = name ~= "" and name or basename
+ if not filetype or filetype == "" then
+ filetype = name and (filename and file.suffix(filename)) or "txt"
+ end
+ savename = file.addsuffix(savename,filetype) -- type is mandate for proper working in viewer
+ local mimetype = specification.mimetype
+ local a = pdfdictionary {
+ Type = pdfconstant("EmbeddedFile"),
+ Subtype = mimetype and mimetype ~= "" and pdfconstant(mimetype) or nil,
+ }
local f
if data then
f = pdfflushstreamobject(data,a)
@@ -267,6 +292,7 @@ local basename = gsub(basename,"%./","")
UF = pdfstring(savename),
EF = pdfdictionary { F = pdfreference(f) },
Desc = title ~= "" and pdfunicode(title) or nil,
+ -- AFRelationship = pdfconstant("Source"), -- some day maybe, not mandate
}
local r = pdfreference(pdfflushobject(d))
filestreams[hash] = r
@@ -320,6 +346,10 @@ function nodeinjections.attachfile(specification)
aref = codeinjections.embedfile(specification)
attachments[registered] = aref
end
+ local reference = specification.reference
+ if reference and aref then
+ tobesavedobjrefs[reference] = aref[1]
+ end
if not aref then
report_attachment("skipping attachment, registered %a",registered)
-- already reported
@@ -342,7 +372,7 @@ function nodeinjections.attachfile(specification)
OC = analyzelayer(specification.layer),
}
local width, height, depth = specification.width or 0, specification.height or 0, specification.depth
- local box = hpack_node(pdfannotation_node(width,height,depth,d()))
+ local box = hpack_node(nodeinjections.annotation(width,height,depth,d()))
box.width, box.height, box.depth = width, height, depth
return box
end
@@ -427,19 +457,19 @@ function nodeinjections.comment(specification) -- brrr: seems to be done twice
local box
if usepopupcomments then
-- rather useless as we can hide/vide
- local nd = pdfreserveannotation()
- local nc = pdfreserveannotation()
+ local nd = pdfreserveobject()
+ local nc = pdfreserveobject()
local c = pdfdictionary {
Subtype = pdfconstant("Popup"),
Parent = pdfreference(nd),
}
d.Popup = pdfreference(nc)
box = hpack_node(
- pdfannotation_node(0,0,0,d(),nd),
- pdfannotation_node(width,height,depth,c(),nc)
+ nodeinjections.annotation(0,0,0,d(),nd),
+ nodeinjections.annotation(width,height,depth,c(),nc)
)
else
- box = hpack_node(pdfannotation_node(width,height,depth,d()))
+ box = hpack_node(nodeinjections.annotation(width,height,depth,d()))
end
box.width, box.height, box.depth = width, height, depth -- redundant
return box
@@ -484,7 +514,7 @@ end
local ms, mu, mf = { }, { }, { }
local function delayed(label)
- local a = pdfreserveannotation()
+ local a = pdfreserveobject()
mu[label] = a
return pdfreference(a)
end
@@ -504,23 +534,25 @@ local function insertrenderingwindow(specification)
local actions = nil
if openpage or closepage then
actions = pdfdictionary {
- PO = (openpage and lpdf.action(openpage )) or nil,
- PC = (closepage and lpdf.action(closepage)) or nil,
+ PO = (openpage and lpdfaction(openpage )) or nil,
+ PC = (closepage and lpdfaction(closepage)) or nil,
}
end
local page = tonumber(specification.page) or texgetcount("realpageno") -- todo
- local r = mu[label] or pdfreserveannotation() -- why the reserve here?
+ local r = mu[label] or pdfreserveobject() -- why the reserve here?
local a = pdfdictionary {
S = pdfconstant("Rendition"),
R = mf[label],
OP = 0,
AN = pdfreference(r),
}
+ local bs, bc = pdfborder()
local d = pdfdictionary {
Subtype = pdfconstant("Screen"),
P = pdfreference(pdfpagereference(page)),
A = a, -- needed in order to make the annotation clickable (i.e. don't bark)
- Border = pdf_border,
+ Border = bs,
+ C = bc,
AA = actions,
}
local width = specification.width or 0
@@ -528,7 +560,7 @@ local function insertrenderingwindow(specification)
if height == 0 or width == 0 then
-- todo: sound needs no window
end
- write_node(pdfannotation_node(width,height,0,d(),r)) -- save ref
+ write_node(nodeinjections.annotation(width,height,0,d(),r)) -- save ref
return pdfreference(r)
end
@@ -539,7 +571,7 @@ local function insertrendering(specification)
local option = settings_to_hash(specification.option)
if not mf[label] then
local filename = specification.filename
- local isurl = find(filename,"://")
+ local isurl = find(filename,"://",1,true)
-- local start = pdfdictionary {
-- Type = pdfconstant("MediaOffset"),
-- S = pdfconstant("T"), -- time
diff --git a/tex/context/base/lpdf-xmp.lua b/tex/context/base/lpdf-xmp.lua
index 061ed0757..b1a795c4b 100644
--- a/tex/context/base/lpdf-xmp.lua
+++ b/tex/context/base/lpdf-xmp.lua
@@ -7,6 +7,7 @@ if not modules then modules = { } end modules ['lpdf-xmp'] = {
comment = "with help from Peter Rolf",
}
+local tostring = tostring
local format, random, char, gsub, concat = string.format, math.random, string.char, string.gsub, table.concat
local xmlfillin = xml.fillin
@@ -25,7 +26,7 @@ local pdfconstant = lpdf.constant
local pdfreference = lpdf.reference
local pdfflushstreamobject = lpdf.flushstreamobject
--- I wonder why this begin end is empty / w (no time now to look into it)
+-- I wonder why this begin end is empty / w (no time now to look into it) / begin can also be "?"
local xpacket = [[
<?xpacket begin="" id="%s"?>
@@ -49,7 +50,7 @@ local mapping = {
-- Dublin Core schema
["Author"] = "rdf:Description/dc:creator/rdf:Seq/rdf:li",
["Format"] = "rdf:Description/dc:format", -- optional, but nice to have
- ["Subject"] = "rdf:Description/dc:description",
+ ["Subject"] = "rdf:Description/dc:description/rdf:Alt/rdf:li",
["Title"] = "rdf:Description/dc:title/rdf:Alt/rdf:li",
-- XMP Basic schema
["CreateDate"] = "rdf:Description/xmp:CreateDate",
@@ -90,7 +91,12 @@ local function setxmpfile(name)
end
codeinjections.setxmpfile = setxmpfile
-commands.setxmpfile = setxmpfile
+
+interfaces.implement {
+ name = "setxmpfile",
+ arguments = "string",
+ actions = setxmpfile
+}
local function valid_xmp()
if not xmp then
@@ -104,7 +110,7 @@ local function valid_xmp()
if xmpfile ~= "" then
report_xmp("using file %a",xmpfile)
end
- local xmpdata = (xmpfile ~= "" and io.loaddata(xmpfile)) or ""
+ local xmpdata = xmpfile ~= "" and io.loaddata(xmpfile) or ""
xmp = xml.convert(xmpdata)
end
return xmp
@@ -119,16 +125,16 @@ end
-- redefined
-local addtoinfo = lpdf.addtoinfo
-local addxmpinfo = lpdf.addxmpinfo
+local pdfaddtoinfo = lpdf.addtoinfo
+local pdfaddxmpinfo = lpdf.addxmpinfo
function lpdf.addtoinfo(tag,pdfvalue,strvalue)
- addtoinfo(tag,pdfvalue)
+ pdfaddtoinfo(tag,pdfvalue)
local value = strvalue or gsub(tostring(pdfvalue),"^%((.*)%)$","%1") -- hack
if trace_info then
report_info("set %a to %a",tag,value)
end
- addxmpinfo(tag,value)
+ pdfaddxmpinfo(tag,value)
end
-- for the do-it-yourselvers
@@ -146,7 +152,8 @@ end
local t = { } for i=1,24 do t[i] = random() end
local function flushxmpinfo()
- commands.freezerandomseed(os.clock()) -- hack
+ commands.pushrandomseed()
+ commands.setrandomseed(os.time())
local t = { } for i=1,24 do t[i] = char(96 + random(26)) end
local packetid = concat(t)
@@ -156,23 +163,22 @@ local function flushxmpinfo()
local producer = format("LuaTeX-%0.2f.%s",tex.luatexversion/100,tex.luatexrevision)
local creator = "LuaTeX + ConTeXt MkIV"
local time = lpdf.timestamp()
- local fullbanner = tex.pdftexbanner
- -- local fullbanner = gsub(tex.pdftexbanner,"kpse.*","")
-
- addxmpinfo("DocumentID", documentid)
- addxmpinfo("InstanceID", instanceid)
- addxmpinfo("Producer", producer)
- addxmpinfo("CreatorTool", creator)
- addxmpinfo("CreateDate", time)
- addxmpinfo("ModifyDate", time)
- addxmpinfo("MetadataDate", time)
- addxmpinfo("PTEX.Fullbanner", fullbanner)
-
- addtoinfo("Producer", producer)
- addtoinfo("Creator", creator)
- addtoinfo("CreationDate", time)
- addtoinfo("ModDate", time)
--- addtoinfo("PTEX.Fullbanner", fullbanner) -- no checking done on existence
+ local fullbanner = status.banner
+
+ pdfaddxmpinfo("DocumentID", documentid)
+ pdfaddxmpinfo("InstanceID", instanceid)
+ pdfaddxmpinfo("Producer", producer)
+ pdfaddxmpinfo("CreatorTool", creator)
+ pdfaddxmpinfo("CreateDate", time)
+ pdfaddxmpinfo("ModifyDate", time)
+ pdfaddxmpinfo("MetadataDate", time)
+ pdfaddxmpinfo("PTEX.Fullbanner", fullbanner)
+
+ pdfaddtoinfo("Producer", producer)
+ pdfaddtoinfo("Creator", creator)
+ pdfaddtoinfo("CreationDate", time)
+ pdfaddtoinfo("ModDate", time)
+ -- pdfaddtoinfo("PTEX.Fullbanner", fullbanner) -- no checking done on existence
local blob = xml.tostring(xml.first(xmp or valid_xmp(),"/x:xmpmeta"))
local md = pdfdictionary {
@@ -196,7 +202,7 @@ local function flushxmpinfo()
local r = pdfflushstreamobject(blob,md,false) -- uncompressed
lpdf.addtocatalog("Metadata",pdfreference(r))
- commands.defrostrandomseed() -- hack
+ commands.poprandomseed() -- hack
end
-- his will be enabled when we can inhibit compression for a stream at the lua end
diff --git a/tex/context/base/luat-bas.mkiv b/tex/context/base/luat-bas.mkiv
index a38912716..cb00d8f55 100644
--- a/tex/context/base/luat-bas.mkiv
+++ b/tex/context/base/luat-bas.mkiv
@@ -13,7 +13,8 @@
\writestatus{loading}{ConTeXt Lua Macros / Basic Lua Libraries}
-\registerctxluafile{l-lua} {1.001}
+\registerctxluafile{l-lua} {1.001} % before sandbox
+\registerctxluafile{l-sandbox} {1.001}
\registerctxluafile{l-package} {1.001}
\registerctxluafile{l-lpeg} {1.001}
\registerctxluafile{l-function}{1.001}
diff --git a/tex/context/base/luat-cbk.lua b/tex/context/base/luat-cbk.lua
index 4f044f9ac..8c224ad2c 100644
--- a/tex/context/base/luat-cbk.lua
+++ b/tex/context/base/luat-cbk.lua
@@ -118,7 +118,7 @@ end
function callbacks.freeze(name,freeze)
freeze = type(freeze) == "string" and freeze
- if find(name,"%*") then
+ if find(name,"*",1,true) then
local pattern = name
for name, _ in next, list do
if find(name,pattern) then
diff --git a/tex/context/base/luat-cnf.lua b/tex/context/base/luat-cnf.lua
index 3672c603e..0f6b8598f 100644
--- a/tex/context/base/luat-cnf.lua
+++ b/tex/context/base/luat-cnf.lua
@@ -7,7 +7,9 @@ if not modules then modules = { } end modules ['luat-cnf'] = {
}
local type, next, tostring, tonumber = type, next, tostring, tonumber
-local format, concat, find = string.format, table.concat, string.find
+local format, concat, find, lower, gsub = string.format, table.concat, string.find, string.lower, string.gsub
+
+local report = logs.reporter("system")
local allocate = utilities.storage.allocate
@@ -17,36 +19,22 @@ texconfig.shell_escape = 't'
luatex = luatex or { }
local luatex = luatex
-texconfig.error_line = 79 -- 79 -- obsolete
-texconfig.half_error_line = 50 -- 50 -- obsolete
+texconfig.max_print_line = 100000 -- frozen
+texconfig.max_in_open = 127 -- frozen
+texconfig.error_line = 79 -- frozen
+texconfig.half_error_line = 50 -- frozen
texconfig.expand_depth = 10000 -- 10000
texconfig.hash_extra = 100000 -- 0
texconfig.nest_size = 1000 -- 50
-texconfig.max_in_open = 500 -- 15
+texconfig.max_in_open = 500 -- 15 -- in fact it's limited to 127
texconfig.max_print_line = 10000 -- 79
texconfig.max_strings = 500000 -- 15000
texconfig.param_size = 25000 -- 60
texconfig.save_size = 50000 -- 4000
+texconfig.save_size = 100000 -- 4000
texconfig.stack_size = 10000 -- 300
--- local function initialize()
--- local t, variable = allocate(), resolvers.variable
--- for name, default in next, variablenames do
--- local name = variablenames[i]
--- local value = variable(name)
--- value = tonumber(value)
--- if not value or value == "" or value == 0 then
--- value = default
--- end
--- texconfig[name], t[name] = value, value
--- end
--- initialize = nil
--- return t
--- end
---
--- luatex.variables = initialize()
-
local stub = [[
-- checking
@@ -76,7 +64,7 @@ function texconfig.init()
"string", "table", "coroutine", "debug", "file", "io", "lpeg", "math", "os", "package", "bit32",
},
basictex = { -- noad
- "callback", "font", "img", "lang", "lua", "node", "pdf", "status", "tex", "texconfig", "texio", "token",
+ "callback", "font", "img", "lang", "lua", "node", "pdf", "status", "tex", "texconfig", "texio", "token", "newtoken"
},
extralua = {
"gzip", "zip", "zlib", "lfs", "ltn12", "mime", "socket", "md5", "profiler", "unicode", "utf",
@@ -87,6 +75,7 @@ function texconfig.init()
obsolete = {
"fontforge", -- can be filled by luat-log
"kpse",
+ "token",
},
functions = {
"assert", "pcall", "xpcall", "error", "collectgarbage",
@@ -134,13 +123,14 @@ function texconfig.init()
-- shortcut and helper
+ local bytecode = lua.bytecode
+
local function init(start)
- local b = lua.bytecode
local i = start
local t = os.clock()
- while b[i] do
- b[i]() ;
- b[i] = nil ;
+ while bytecode[i] do
+ bytecode[i]() ;
+ bytecode[i] = nil ;
i = i + 1
-- collectgarbage('step')
end
@@ -159,6 +149,8 @@ function texconfig.init()
end
end
+ texconfig.init = function() end
+
end
-- we provide a qualified path
@@ -172,26 +164,55 @@ end)
]]
local variablenames = {
- "error_line", "half_error_line",
- "expand_depth", "hash_extra", "nest_size",
- "max_in_open", "max_print_line", "max_strings",
- "param_size", "save_size", "stack_size",
+ error_line = false,
+ half_error_line = false,
+ max_print_line = false,
+ max_in_open = false,
+ expand_depth = true,
+ hash_extra = true,
+ nest_size = true,
+ max_strings = true,
+ param_size = true,
+ save_size = true,
+ stack_size = true,
}
local function makestub()
name = name or (environment.jobname .. ".lui")
+ report("creating stub file %a using directives:",name)
+ report()
firsttable = firsttable or lua.firstbytecode
local t = {
"-- this file is generated, don't change it\n",
"-- configuration (can be overloaded later)\n"
}
- for _,v in next, variablenames do
+ for v, permitted in table.sortedhash(variablenames) do
+ local d = "luatex." .. gsub(lower(v),"[^%a]","")
+ local dv = directives.value(d)
local tv = texconfig[v]
- if tv and tv ~= "" then
+ if dv then
+ if not tv then
+ report(" %s = %s (%s)",d,dv,"configured")
+ tv = dv
+ elseif not permitted then
+ report(" %s = %s (%s)",d,tv,"frozen")
+ elseif tonumber(dv) >= tonumber(tv) then
+ report(" %s = %s (%s)",d,dv,"overloaded")
+ tv = dv
+ else
+ report(" %s = %s (%s)",d,tv,"preset kept")
+ end
+ elseif tv then
+ report(" %s = %s (%s)",d,tv,permitted and "preset" or "frozen")
+ else
+ report(" %s = <unset>",d)
+ end
+ if tv then
t[#t+1] = format("texconfig.%s=%s",v,tv)
end
end
io.savedata(name,format("%s\n\n%s",concat(t,"\n"),format(stub,firsttable)))
+ logs.newline()
end
lua.registerfinalizer(makestub,"create stub file")
diff --git a/tex/context/base/luat-cod.lua b/tex/context/base/luat-cod.lua
index 8b015477f..c436ee6d7 100644
--- a/tex/context/base/luat-cod.lua
+++ b/tex/context/base/luat-cod.lua
@@ -51,6 +51,9 @@ function lua.registercode(filename,version)
bytecode[n] = code
lua.lastbytecode = n
end
+ elseif environment.initex then
+ texio.write_nl("\nerror loading file: " .. filename .. " (aborting)")
+ os.exit()
end
end
end
@@ -85,7 +88,7 @@ local environment = environment
-- no string.unquoted yet
local sourcefile = gsub(arg and arg[1] or "","^\"(.*)\"$","%1")
-local sourcepath = find(sourcefile,"/") and gsub(sourcefile,"/[^/]+$","") or ""
+local sourcepath = find(sourcefile,"/",1,true) and gsub(sourcefile,"/[^/]+$","") or ""
local targetpath = "."
-- delayed (via metatable):
@@ -159,7 +162,7 @@ local function target_file(name)
return targetpath .. "/" .. name
end
-local function find_read_file (id,name)
+local function find_read_file(id,name)
return source_file(name)
end
diff --git a/tex/context/base/luat-env.lua b/tex/context/base/luat-env.lua
index 5558e0303..5f2a0d281 100644
--- a/tex/context/base/luat-env.lua
+++ b/tex/context/base/luat-env.lua
@@ -102,14 +102,20 @@ function environment.luafilechunk(filename,silent) -- used for loading lua bytec
local fullname = environment.luafile(filename)
if fullname and fullname ~= "" then
local data = luautilities.loadedluacode(fullname,strippable,filename) -- can be overloaded
- if trace_locating then
+-- if trace_locating then
+-- report_lua("loading file %a %s",fullname,not data and "failed" or "succeeded")
+-- elseif not silent then
+-- texio.write("<",data and "+ " or "- ",fullname,">")
+-- end
+ if not silent then
report_lua("loading file %a %s",fullname,not data and "failed" or "succeeded")
- elseif not silent then
- texio.write("<",data and "+ " or "- ",fullname,">")
end
return data
else
- if trace_locating then
+-- if trace_locating then
+-- report_lua("unknown file %a",filename)
+-- end
+ if not silent then
report_lua("unknown file %a",filename)
end
return nil
diff --git a/tex/context/base/luat-exe.lua b/tex/context/base/luat-exe.lua
index a57a5a006..d8d954a30 100644
--- a/tex/context/base/luat-exe.lua
+++ b/tex/context/base/luat-exe.lua
@@ -6,121 +6,68 @@ if not modules then modules = { } end modules ['luat-exe'] = {
license = "see context related readme files"
}
--- this module needs checking (very old and never really used, not even enabled)
+if not sandbox then require("l-sandbox") require("util-sbx") end -- for testing
-local match, find, gmatch = string.match, string.find, string.gmatch
-local concat = table.concat
-local select = select
+local type = type
-local report_executers = logs.reporter("system","executers")
+local executers = resolvers.executers or { }
+resolvers.executers = executers
-resolvers.executers = resolvers.executers or { }
-local executers = resolvers.executers
+local disablerunners = sandbox.disablerunners
+local registerbinary = sandbox.registerbinary
+local registerroot = sandbox.registerroot
-local permitted = { }
+local lpegmatch = lpeg.match
-local osexecute = os.execute
-local osexec = os.exec
-local osspawn = os.spawn
-local iopopen = io.popen
+local sc_splitter = lpeg.tsplitat(";")
+local cm_splitter = lpeg.tsplitat(",")
-local execute = osexecute
-local exec = osexec
-local spawn = osspawn
-local popen = iopopen
-
-local function register(...)
- for k=1,select("#",...) do
- local v = select(k,...)
- permitted[#permitted+1] = v == "*" and ".*" or v
- end
-end
+local execution_mode directives.register("system.executionmode", function(v) execution_mode = v end)
+local execution_list directives.register("system.executionlist", function(v) execution_list = v end)
+local root_list directives.register("system.rootlist", function(v) root_list = v end)
-local function prepare(...)
- -- todo: make more clever first split
- local t = { ... }
- local n = #n
- local one = t[1]
- if n == 1 then
- if type(one) == 'table' then
- return one, concat(t," ",2,n)
- else
- local name, arguments = match(one,"^(.-)%s+(.+)$")
- if name and arguments then
- return name, arguments
- else
- return one, ""
+sandbox.initializer(function()
+ if execution_mode == "none" then
+ -- will be done later
+ elseif execution_mode == "list" then
+ if type(execution_list) == "string" then
+ execution_list = lpegmatch(cm_splitter,execution_list)
+ end
+ if type(execution_list) == "table" then
+ for i=1,#execution_list do
+ registerbinary(execution_list[i])
end
end
else
- return one, concat(t," ",2,n)
+ -- whatever else we have configured
end
-end
+end)
-local function executer(action)
- return function(...)
- local name, arguments = prepare(...)
- for k=1,#permitted do
- local v = permitted[k]
- if find(name,v) then
- return action(name .. " " .. arguments)
- else
- report_executers("not permitted: %s %s",name,arguments)
+sandbox.initializer(function()
+ if type(root_list) == "string" then
+ root_list = lpegmatch(sc_splitter,root_list)
+ end
+ if type(root_list) == "table" then
+ for i=1,#root_list do
+ local entry = root_list[i]
+ if entry ~= "" then
+ registerroot(entry)
end
end
- return action("")
end
-end
+end)
-local function finalize() -- todo: os.exec, todo: report ipv print
- execute = executer(osexecute)
- exec = executer(osexec)
- spawn = executer(osspawn)
- popen = executer(iopopen)
- finalize = function()
- report_executers("already finalized")
- end
- register = function()
- report_executers("already finalized, no registration permitted")
- end
- os.execute = execute
- os.exec = exec
- os.spawn = spawn
- io.popen = popen
-end
-
-executers.finalize = function(...) return finalize(...) end
-executers.register = function(...) return register(...) end
-executers.execute = function(...) return execute (...) end
-executers.exec = function(...) return exec (...) end
-executers.spawn = function(...) return spawn (...) end
-executers.popen = function(...) return popen (...) end
-
-local execution_mode directives.register("system.executionmode", function(v) execution_mode = v end)
-local execution_list directives.register("system.executionlist", function(v) execution_list = v end)
-
-function executers.check()
+sandbox.finalizer(function()
if execution_mode == "none" then
- finalize()
- elseif execution_mode == "list" and execution_list ~= "" then
- for s in gmatch("[^%s,]",execution_list) do
- register(s)
- end
- finalize()
- else
- -- all
+ disablerunners()
end
-end
-
---~ resolvers.executers.register('.*')
---~ resolvers.executers.register('*')
---~ resolvers.executers.register('dir','ls')
---~ resolvers.executers.register('dir')
+end)
---~ resolvers.executers.finalize()
---~ resolvers.executers.execute('dir',"*.tex")
---~ resolvers.executers.execute("dir *.tex")
---~ resolvers.executers.execute("ls *.tex")
---~ os.execute('ls')
+-- Let's prevent abuse of these libraries (built-in support still works).
---~ resolvers.executers.check()
+sandbox.finalizer(function()
+ mplib = nil
+ epdf = nil
+ zip = nil
+ fontloader = nil
+end)
diff --git a/tex/context/base/luat-fmt.lua b/tex/context/base/luat-fmt.lua
index 20a4a8fcd..92c1dd6c4 100644
--- a/tex/context/base/luat-fmt.lua
+++ b/tex/context/base/luat-fmt.lua
@@ -95,7 +95,7 @@ function environment.make_format(name)
-- generate format
local command = format("%s --ini %s --lua=%s %s %sdump",engine,primaryflags(),quoted(usedluastub),quoted(fulltexsourcename),os.platform == "unix" and "\\\\" or "\\")
report_format("running command: %s\n",command)
- os.spawn(command)
+ os.execute(command)
-- remove related mem files
local pattern = file.removesuffix(file.basename(usedluastub)).."-*.mem"
-- report_format("removing related mplib format with pattern %a", pattern)
@@ -133,7 +133,7 @@ function environment.run_format(name,data,more)
else
local command = format("%s %s --fmt=%s --lua=%s %s %s",engine,primaryflags(),quoted(barename),quoted(luaname),quoted(data),more ~= "" and quoted(more) or "")
report_format("running command: %s",command)
- os.spawn(command)
+ os.execute(command)
end
end
end
diff --git a/tex/context/base/luat-ini.lua b/tex/context/base/luat-ini.lua
index 587214b93..34e83e7bb 100644
--- a/tex/context/base/luat-ini.lua
+++ b/tex/context/base/luat-ini.lua
@@ -6,14 +6,6 @@ if not modules then modules = { } end modules ['luat-ini'] = {
license = "see context related readme files"
}
--- rather experimental down here ... adapted to lua 5.2 ... but still
--- experimental
-
-local debug = require("debug")
-
-local string, table, lpeg, math, io, system = string, table, lpeg, math, io, system
-local rawset, rawget, next, setmetatable = rawset, rawget, next, setmetatable
-
--[[ldx--
<p>We cannot load anything yet. However what we will do us reserve a few tables.
These can be used for runtime user data or third party modules and will not be
@@ -29,178 +21,6 @@ parametersets = parametersets or { } -- experimental for team
table.setmetatableindex(moduledata,table.autokey)
table.setmetatableindex(thirddata, table.autokey)
---[[ldx--
-<p>Please create a namespace within these tables before using them!</p>
-
-<typing>
-userdata ['my.name'] = { }
-thirddata['tricks' ] = { }
-</typing>
---ldx]]--
-
---[[ldx--
-<p>We could cook up a readonly model for global tables but it makes more sense
-to invite users to use one of the predefined namespaces. One can redefine the
-protector. After all, it's just a lightweight suggestive system, not a
-watertight one.</p>
---ldx]]--
-
-local global = _G
-global.global = global
-
-local dummy = function() end
-
---[[ldx--
-<p>Another approach is to freeze tables by using a metatable, this will be
-implemented stepwise.</p>
---ldx]]--
-
--- moduledata : no need for protection (only for developers)
--- isolatedata : full protection
--- userdata : protected
--- thirddata : protected
-
---[[ldx--
-<p>We could have a metatable that automaticaly creates a top level namespace.</p>
---ldx]]--
-
-local luanames = lua.name -- luatex itself
-
-lua.numbers = lua.numbers or { } local numbers = lua.numbers
-lua.messages = lua.messages or { } local messages = lua.messages
-
-storage.register("lua/numbers", numbers, "lua.numbers" )
-storage.register("lua/messages", messages, "lua.messages")
-
-local setfenv = setfenv or debug.setfenv -- < 5.2
-
-if setfenv then
-
- local protected = {
- -- global table
- global = global,
- -- user tables
- -- moduledata = moduledata,
- userdata = userdata,
- thirddata = thirddata,
- documentdata = documentdata,
- -- reserved
- protect = dummy,
- unprotect = dummy,
- -- luatex
- tex = tex,
- -- lua
- string = string,
- table = table,
- lpeg = lpeg,
- math = math,
- io = io,
- file = file,
- bit32 = bit32,
- --
- context = context,
- }
-
- local protect_full = function(name)
- local t = { }
- for k, v in next, protected do
- t[k] = v
- end
- return t
- end
-
- local protect_part = function(name) -- adds
- local t = rawget(global,name)
- if not t then
- t = { }
- for k, v in next, protected do
- t[k] = v
- end
- rawset(global,name,t)
- end
- return t
- end
-
- protect = function(name)
- if name == "isolateddata" then
- setfenv(2,protect_full(name))
- else
- setfenv(2,protect_part(name or "shareddata"))
- end
- end
-
- function lua.registername(name,message)
- local lnn = lua.numbers[name]
- if not lnn then
- lnn = #messages + 1
- messages[lnn] = message
- numbers[name] = lnn
- end
- luanames[lnn] = message
- context(lnn)
- -- initialize once
- if name ~= "isolateddata" then
- protect_full(name or "shareddata")
- end
- end
-
-elseif libraries then -- assume >= 5.2
-
- local shared
-
- protect = function(name)
- if not shared then
- -- e.g. context is not yet known
- local public = {
- global = global,
- -- moduledata = moduledata,
- userdata = userdata,
- thirddata = thirddata,
- documentdata = documentdata,
- protect = dummy,
- unprotect = dummy,
- context = context,
- }
- --
- for k, v in next, libraries.builtin do public[k] = v end
- for k, v in next, libraries.functions do public[k] = v end
- for k, v in next, libraries.obsolete do public[k] = nil end
- --
- shared = { __index = public }
- protect = function(name)
- local t = global[name] or { }
- setmetatable(t,shared) -- set each time
- return t
- end
- end
- return protect(name)
- end
-
- function lua.registername(name,message)
- local lnn = lua.numbers[name]
- if not lnn then
- lnn = #messages + 1
- messages[lnn] = message
- numbers[name] = lnn
- end
- luanames[lnn] = message
- context(lnn)
- end
-
-else
-
- protect = dummy
-
- function lua.registername(name,message)
- local lnn = lua.numbers[name]
- if not lnn then
- lnn = #messages + 1
- messages[lnn] = message
- numbers[name] = lnn
- end
- luanames[lnn] = message
- context(lnn)
- end
-
+if not global then
+ global = _G
end
-
diff --git a/tex/context/base/luat-ini.mkiv b/tex/context/base/luat-ini.mkiv
index a3a590311..b455a4158 100644
--- a/tex/context/base/luat-ini.mkiv
+++ b/tex/context/base/luat-ini.mkiv
@@ -34,7 +34,7 @@
% the \type {\normalexpanded} around \type {\directlua}. Something to discuss
% in the team.
-\unexpanded\def\startlua % \stoplua
+\normalprotected\def\startlua % \stoplua
{\begingroup
\obeylualines
\luat_start_lua_indeed}
@@ -42,7 +42,7 @@
\def\luat_start_lua_indeed#1\stoplua
{\normalexpanded{\endgroup\noexpand\directlua{#1}}} % \zerocount is default
-\unexpanded\def\startluacode % \stopluacode
+\normalprotected\def\startluacode % \stopluacode
{\begingroup
\obeylualines
\obeyluatokens
@@ -77,7 +77,7 @@
\edef\lua_letter_seven {\string\7} \edef\lua_letter_eight {\string\8}
\edef\lua_letter_nine {\string\9} \edef\lua_letter_zero {\string\0}
-\appendtoks
+\everyluacode {% \appendtoks
\let\\\lua_letter_backslash
\let\|\lua_letter_bar \let\-\lua_letter_dash
\let\(\lua_letter_lparent \let\)\lua_letter_rparent
@@ -92,85 +92,15 @@
\let\5\lua_letter_five \let\6\lua_letter_six
\let\7\lua_letter_seven \let\8\lua_letter_eight
\let\9\lua_letter_nine \let\0\lua_letter_zero
-\to \everyluacode
+} % \to \everyluacode
-\unexpanded\def\obeyluatokens
+\normalprotected\def\obeyluatokens
{\setcatcodetable\luacatcodes
\the\everyluacode}
-
\edef\luamajorversion{\ctxwrite{_MINORVERSION}}
\edef\luaminorversion{\ctxwrite{_MAJORVERSION}}
-%D \macros
-%D {definenamedlua}
-%D
-%D We provide an interface for defining instances:
-
-\def\s!lua{lua} \def\v!code{code} \def\!!name{name} \def\s!data{data}
-
-%D Beware: because \type {\expanded} is een convert command, the error
-%D message will show \type{<inserted text>} as part of the message.
-
-\installcorenamespace{luacode}
-
-\unexpanded\def\luat_start_named_lua_code#1%
- {\begingroup
- \obeylualines
- \obeyluatokens
- \csname\??luacode#1\endcsname}
-
-\unexpanded\def\definenamedlua[#1]#2[#3]% no optional arg handling here yet / we could use numbers instead (more efficient)
- {\ifcsname\??luacode#1\endcsname \else
- \scratchcounter\ctxlua{lua.registername("#1","#3")}%
- \normalexpanded{\xdef\csname\??luacode#1\endcsname##1\csname\e!stop#1\v!code\endcsname}%
- %{\endgroup\noexpand\directlua\the\scratchcounter{local _ENV=protect("#1\s!data")##1}}%
- {\noexpand\normalexpanded{\endgroup\noexpand\directlua\the\scratchcounter{local _ENV=protect("#1\s!data")##1}}}%
- \expandafter\edef\csname\e!start#1\v!code\endcsname {\luat_start_named_lua_code{#1}}%
- \expandafter\edef\csname #1\v!code\endcsname##1{\noexpand\directlua\the\scratchcounter{local _ENV=protect("#1\s!data")##1}}%
- \fi}
-
-%D We predefine a few.
-
-% \definenamedlua[module][module instance] % not needed
-
-\definenamedlua[user] [private user instance]
-\definenamedlua[third] [third party module instance]
-\definenamedlua[isolated][isolated instance]
-
-%D In practice this works out as follows:
-%D
-%D \startbuffer
-%D \startluacode
-%D context("LUA")
-%D \stopluacode
-%D
-%D \startusercode
-%D global.context("USER 1")
-%D context.par()
-%D context("USER 2")
-%D context.par()
-%D if characters then
-%D context("ACCESS directly")
-%D elseif global.characters then
-%D context("ACCESS via global")
-%D else
-%D context("NO ACCESS at all")
-%D end
-%D context.par()
-%D if bogus then
-%D context("ACCESS directly")
-%D elseif global.bogus then
-%D context("ACCESS via global")
-%D else
-%D context("NO ACCESS at all")
-%D end
-%D context.par()
-%D \stopusercode
-%D \stopbuffer
-%D
-%D \typebuffer
-
%D We need a way to pass strings safely to \LUA\ without the
%D need for tricky escaping. Compare:
%D
@@ -188,23 +118,28 @@
%D for it:
%D
%D \starttyping
-%D \long\edef\luaescapestring#1{\!!bs#1\!!es}
+%D \edef\luaescapestring#1{\!!bs#1\!!es}
%D \stoptyping
-\def\setdocumentfilename #1#2{\ctxlua{document.setfilename(#1,"#2")}}
-\def\setdocumentargument #1#2{\ctxlua{document.setargument("#1","#2")}}
-\def\setdocumentargumentdefault#1#2{\ctxlua{document.setdefaultargument("#1","#2")}}
-\def\getdocumentfilename #1{\ctxlua{document.getfilename("#1")}}
-\def\getdocumentargument #1{\ctxlua{document.getargument("#1")}}
-\def\getdocumentargumentdefault#1#2{\ctxlua{document.getargument("#1","#2")}}
-\def\doifdocumentargumentelse #1{\doifsomethingelse{\getdocumentargument{#1}}}
-\def\doifdocumentargument #1{\doifsomething {\getdocumentargument{#1}}}
-\def\doifnotdocumentargument #1{\doifnothing {\getdocumentargument{#1}}}
-\def\doifdocumentfilenameelse #1{\doifsomethingelse{\getdocumentfilename{#1}}}
-\def\doifdocumentfilename #1{\doifsomething {\getdocumentfilename{#1}}}
-\def\doifnotdocumentfilename #1{\doifnothing {\getdocumentfilename{#1}}}
-
-\let\doifelsedocumentargument\doifdocumentargumentelse
+\def\setdocumentfilename #1#2{\clf_setdocumentfilename\numexpr#1\relax{#2}}
+\def\setdocumentargument #1#2{\clf_setdocumentargument{#1}{#2}}
+\def\setdocumentargumentdefault#1#2{\clf_setdocumentdefaultargument{#1}{#2}}
+\def\getdocumentfilename #1{\clf_getdocumentfilename\numexpr#1\relax}
+\def\getdocumentargument #1{\clf_getdocumentargument{#1}{}}
+\def\setdocumentargument #1#2{\clf_setdocumentargument{#1}{#2}}
+\def\getdocumentargumentdefault#1#2{\clf_getdocumentargument{#1}{#2}}
+
+% seldom used so no need for speedy variants:
+
+\def\doifelsedocumentargument #1{\doifelsesomething{\clf_getdocumentargument{#1}}}
+\def\doifdocumentargument #1{\doifsomething {\clf_getdocumentargument{#1}}}
+\def\doifnotdocumentargument #1{\doifnothing {\clf_getdocumentargument{#1}}}
+\def\doifelsedocumentfilename #1{\doifelsesomething{\clf_getdocumentfilename\numexpr#1\relax}}
+\def\doifdocumentfilename #1{\doifsomething {\clf_getdocumentfilename\numexpr#1\relax}}
+\def\doifnotdocumentfilename #1{\doifnothing {\clf_getdocumentfilename\numexpr#1\relax}}
+
+\let\doifdocumentargumentelse\doifelsedocumentargument
+\let\doifdocumentfilenameelse\doifelsedocumentfilename
%D A handy helper:
@@ -212,7 +147,7 @@
%D Experimental:
-\unexpanded\def\startluaparameterset[#1]%
+\normalprotected\def\startluaparameterset[#1]%
{\begingroup
\obeylualines
\obeyluatokens
@@ -251,7 +186,7 @@
%D \ctxluacode{context("%0.5f",1/3)}
%D \stoptyping
-\unexpanded\def\ctxluacode
+\normalprotected\def\ctxluacode
{\begingroup
\obeylualines
\obeyluatokens
@@ -266,4 +201,83 @@
\def\luat_lua_code
{\normalexpanded{\endgroup\noexpand\directlua\expandafter{\the\scratchtoks}}} % \zerocount is default
+% \startctxfunction MyFunctionA
+% context(" A1 ")
+% \stopctxfunction
+%
+% \startctxfunctiondefinition MyFunctionB
+% context(" B2 ")
+% \stopctxfunctiondefinition
+%
+% \starttext
+% \dorecurse{10000}{\ctxfunction{MyFunctionA}} \page
+% \dorecurse{10000}{\MyFunctionB} \page
+% \dorecurse{10000}{\ctxlua{context(" C3 ")}} \page
+% \stoptext
+
+\installsystemnamespace{ctxfunction}
+
+\normalprotected\def\startctxfunctiondefinition #1 %
+ {\begingroup \obeylualines \obeyluatokens \luat_start_lua_function_definition_indeed{#1}}
+
+\installsystemnamespace{luafunction}
+
+\def\luat_start_lua_function_definition_indeed#1#2\stopctxfunctiondefinition
+ {\endgroup
+ \expandafter\chardef\csname\??luafunction#1\endcsname\ctxcommand{ctxfunction(\!!bs#2\!!es)}\relax
+ \expandafter\edef\csname#1\endcsname{\noexpand\luafunction\csname\??luafunction#1\endcsname}}
+
+\normalprotected\def\setctxluafunction#1#2% experiment
+ {\expandafter\chardef\csname\??luafunction#1\endcsname#2\relax
+ \expandafter\edef\csname#1\endcsname{\noexpand\luafunction\csname\??luafunction#1\endcsname}}
+
+\let\stopctxfunctiondefinition\relax
+
+\normalprotected\def\startctxfunction #1 %
+ {\begingroup \obeylualines \obeyluatokens \luat_start_lua_function_indeed{#1}}
+
+\def\luat_start_lua_function_indeed#1#2\stopctxfunction
+ {\endgroup\expandafter\edef\csname\??ctxfunction#1\endcsname{\noexpand\luafunction\ctxcommand{ctxfunction(\!!bs#2\!!es)}\relax}}
+
+\let\stopctxfunction\relax
+
+\def\ctxfunction#1%
+ {\csname\??ctxfunction#1\endcsname}
+
+% In theory this is faster due to the call not being wrapped in a function but in
+% practice the speedup can't be noticed. The actions called for often have lots of
+% lookups so an extra one doesn't matter much. The kind of calls differs a lot per
+% document and often there are other ways to optimize a style. For instance we can
+% gain a lot when defining a font, but when a frozen definition is used that gain
+% gets completely lost. For some calls (take list writers) it can get worse if only
+% because readability gets worse and passing is already efficient due to selective
+% flushing, while with the token scanners one has to scan all of them.
+
+% \startctxfunctiondefinition foo commands.foo() \stopctxfunctiondefinition
+%
+% \installctxfunction\foo{commands.foo}
+
+% This is a forward definition:
+
+\def\checkedstrippedcsname#1% this permits \strippedcsname{\xxx} and \strippedcsname{xxx}
+ {\expandafter\syst_helpers_checked_stripped_csname\string#1}
+
+\def\syst_helpers_checked_stripped_csname#1%
+ {\if\noexpand#1\letterbackslash\else#1\fi}
+
+\normalprotected\def\installctxfunction#1#2%
+ {\edef\m_syst_name{\checkedstrippedcsname#1}%
+ \global\expandafter\chardef\csname\??luafunction\m_syst_name\endcsname\ctxcommand{ctxfunction("#2",true)}\relax
+ \expandafter\xdef\csname\m_syst_name\endcsname{\noexpand\luafunction\csname\??luafunction\m_syst_name\endcsname}}
+
+\normalprotected\def\installctxscanner#1#2%
+ {\edef\m_syst_name{\checkedstrippedcsname#1}%
+ \global\expandafter\chardef\csname\??luafunction\m_syst_name\endcsname\ctxcommand{ctxscanner("\m_syst_name","#2",true)}\relax
+ \expandafter\xdef\csname\m_syst_name\endcsname{\noexpand\luafunction\csname\??luafunction\m_syst_name\endcsname}}
+
+\normalprotected\def\resetctxscanner#1%
+ {\edef\m_syst_name{\checkedstrippedcsname#1}%
+ \global\expandafter\chardef\csname\??luafunction\m_syst_name\endcsname\zerocount
+ \global\expandafter\let\csname\m_syst_name\endcsname\relax}
+
\protect \endinput
diff --git a/tex/context/base/luat-iop.lua b/tex/context/base/luat-iop.lua
index 52f14683e..e1772af4e 100644
--- a/tex/context/base/luat-iop.lua
+++ b/tex/context/base/luat-iop.lua
@@ -6,190 +6,19 @@ if not modules then modules = { } end modules ['luat-iop'] = {
license = "see context related readme files"
}
--- this paranoid stuff in web2c ... we cannot hook checks into the
--- input functions because one can always change the callback but
--- we can feed back specific patterns and paths into the next
--- mechanism
-
--- os.execute os.exec os.spawn io.fopen
--- os.remove lfs.chdir lfs.mkdir
--- io.open zip.open epdf.open mlib.new
-
--- cache
-
-local topattern, find = string.topattern, string.find
-
-local report_limiter = logs.reporter("system","limiter")
-
--- the basic methods
-
-local function match(ruleset,name)
- local n = #ruleset
- if n > 0 then
- for i=1,n do
- local r = ruleset[i]
- if find(name,r[1]) then
- return r[2]
- end
- end
- return false
- else
- -- nothing defined (or any)
- return true
- end
-end
-
-local function protect(ruleset,proc)
- return function(name,...)
- if name == "" then
- -- report_limiter("no access permitted: <no name>") -- can happen in mplib code
- return nil, "no name given"
- elseif match(ruleset,name) then
- return proc(name,...)
- else
- report_limiter("no access permitted for %a",name)
- return nil, name .. ": no access permitted"
- end
- end
-end
-
-function io.limiter(preset)
- preset = preset or { }
- local ruleset = { }
- for i=1,#preset do
- local p = preset[i]
- local what, spec = p[1] or "", p[2] or ""
- if spec == "" then
- -- skip 'm
- elseif what == "tree" then
- resolvers.dowithpath(spec, function(r)
- local spec = resolvers.resolve(r) or ""
- if spec ~= "" then
- ruleset[#ruleset+1] = { topattern(spec,true), true }
- end
- end)
- elseif what == "permit" then
- ruleset[#ruleset+1] = { topattern(spec,true), true }
- elseif what == "forbid" then
- ruleset[#ruleset+1] = { topattern(spec,true), false }
- end
- end
- if #ruleset > 0 then
- return {
- match = function(name) return match (ruleset,name) end,
- protect = function(proc) return protect(ruleset,proc) end,
- }
- else
- return {
- match = function(name) return true end,
- protect = proc,
- }
- end
-end
-
--- a few handlers
-
-io.i_limiters = { }
-io.o_limiters = { }
-
-function io.i_limiter(v)
- local i = io.i_limiters[v]
- if i then
- local i_limiter = io.limiter(i)
- function io.i_limiter()
- return i_limiter
- end
- return i_limiter
- end
-end
-
-function io.o_limiter(v)
- local o = io.o_limiters[v]
- if o then
- local o_limiter = io.limiter(o)
- function io.o_limiter()
- return o_limiter
- end
- return o_limiter
- end
-end
-
--- the real thing (somewhat fuzzy as we need to know what gets done)
-
-local i_opener, i_limited = io.open, false
-local o_opener, o_limited = io.open, false
-
-local function i_register(v)
- if not i_limited then
- local i_limiter = io.i_limiter(v)
- if i_limiter then
- local protect = i_limiter.protect
- i_opener = protect(i_opener)
- i_limited = true
- report_limiter("input mode set to %a",v)
- end
- end
-end
-
-local function o_register(v)
- if not o_limited then
- local o_limiter = io.o_limiter(v)
- if o_limiter then
- local protect = o_limiter.protect
- o_opener = protect(o_opener)
- o_limited = true
- report_limiter("output mode set to %a",v)
- end
- end
-end
-
-function io.open(name,method)
- if method and find(method,"[wa]") then
- return o_opener(name,method)
- else
- return i_opener(name,method)
- end
-end
-
-directives.register("system.inputmode", i_register)
-directives.register("system.outputmode", o_register)
-
-local i_limited = false
-local o_limited = false
-
-local function i_register(v)
- if not i_limited then
- local i_limiter = io.i_limiter(v)
- if i_limiter then
- local protect = i_limiter.protect
- lfs.chdir = protect(lfs.chdir) -- needs checking
- i_limited = true
- end
- end
-end
-
-local function o_register(v)
- if not o_limited then
- local o_limiter = io.o_limiter(v)
- if o_limiter then
- local protect = o_limiter.protect
- os.remove = protect(os.remove) -- rather okay
- lfs.chdir = protect(lfs.chdir) -- needs checking
- lfs.mkdir = protect(lfs.mkdir) -- needs checking
- o_limited = true
- end
- end
-end
-
-directives.register("system.inputmode", i_register)
-directives.register("system.outputmode", o_register)
-
--- the definitions
-
-local limiters = resolvers.variable("limiters")
-
-if limiters then
- io.i_limiters = limiters.input or { }
- io.o_limiters = limiters.output or { }
-end
-
+local cleanedpathlist = resolvers.cleanedpathlist
+local registerroot = sandbox.registerroot
+
+sandbox.initializer(function()
+ local function register(str,mode)
+ local trees = cleanedpathlist(str)
+ for i=1,#trees do
+ registerroot(trees[i],mode)
+ end
+ end
+ register("TEXMF","read")
+ register("TEXINPUTS","read")
+ register("MPINPUTS","read")
+ -- register("TEXMFCACHE","write")
+ registerroot(".","write")
+end)
diff --git a/tex/context/base/luat-lib.mkiv b/tex/context/base/luat-lib.mkiv
index 3f72e780e..24f9da415 100644
--- a/tex/context/base/luat-lib.mkiv
+++ b/tex/context/base/luat-lib.mkiv
@@ -36,6 +36,8 @@
\registerctxluafile{util-sta}{1.001}
+\registerctxluafile{util-sbx}{1.001} % needs tracker and templates
+
\registerctxluafile{data-ini}{1.001}
\registerctxluafile{data-exp}{1.001}
\registerctxluafile{data-env}{1.001}
@@ -71,8 +73,8 @@
\registerctxluafile{luat-ini}{1.001}
\registerctxluafile{util-env}{1.001}
\registerctxluafile{luat-env}{1.001}
-\registerctxluafile{luat-exe}{1.001}
-\registerctxluafile{luat-iop}{1.001}
+\registerctxluafile{luat-exe}{1.001} % simplified
+\registerctxluafile{luat-iop}{1.001} % simplified
\registerctxluafile{luat-bwc}{1.001}
\registerctxluafile{trac-lmx}{1.001} % might become l-lmx or luat-lmx
\registerctxluafile{luat-mac}{1.001}
diff --git a/tex/context/base/luat-mac.lua b/tex/context/base/luat-mac.lua
index 282dc8ce3..6214e591e 100644
--- a/tex/context/base/luat-mac.lua
+++ b/tex/context/base/luat-mac.lua
@@ -92,7 +92,7 @@ local nolong = 1 - longleft - longright
local utf8character = P(1) * R("\128\191")^1 -- unchecked but fast
local name = (R("AZ","az") + utf8character)^1
-local csname = (R("AZ","az") + S("@?!_") + utf8character)^1
+local csname = (R("AZ","az") + S("@?!_:-*") + utf8character)^1
local longname = (longleft/"") * (nolong^1) * (longright/"")
local variable = P("#") * Cs(name + longname)
local escapedname = escape * csname
@@ -144,7 +144,10 @@ local grammar = { "converter",
* V("texbody")
* stopcode
* poplocal,
- texbody = ( V("definition")
+ texbody = (
+leadingcomment -- new per 2015-03-03 (ugly)
++
+ V("definition")
+ identifier
+ V("braced")
+ (1 - stopcode)
@@ -201,7 +204,7 @@ end
function macros.convertfile(oldname,newname) -- beware, no testing on oldname == newname
local data = resolvers.loadtexfile(oldname)
- data = interfaces.preprocessed(data) or ""
+ data = interfaces.preprocessed(data) or "" -- interfaces not yet defined
io.savedata(newname,data)
end
@@ -385,7 +388,7 @@ end
-- \normalexpanded
-- {\def\yes[#one]#two\csname\e!stop#stoptag\endcsname{\command_yes[#one]{#two}}%
-- \def\nop #one\csname\e!stop#stoptag\endcsname{\command_nop {#one}}}%
--- \doifnextoptionalelse\yes\nop}
+-- \doifelsenextoptional\yes\nop}
-- ]]))
--
-- print(macros.preprocessed([[
diff --git a/tex/context/base/luat-run.lua b/tex/context/base/luat-run.lua
index 719a6f7c9..65cf0f338 100644
--- a/tex/context/base/luat-run.lua
+++ b/tex/context/base/luat-run.lua
@@ -6,8 +6,8 @@ if not modules then modules = { } end modules ['luat-run'] = {
license = "see context related readme files"
}
-local format = string.format
-local insert = table.insert
+local format, find = string.format, string.find
+local insert, remove = table.insert, table.remove
-- trace_job_status is also controlled by statistics.enable that is set via the directive system.nostatistics
@@ -130,31 +130,102 @@ luatex.registerstopactions(luatex.cleanuptempfiles)
-- for the moment here
-local synctex = false
-
local report_system = logs.reporter("system")
+local synctex = 0
directives.register("system.synctex", function(v)
- synctex = v
- if v then
- report_system("synctex functionality is enabled!")
+ synctex = tonumber(v) or (toboolean(v,true) and 1) or (v == "zipped" and 1) or (v == "unzipped" and -1) or 0
+ if synctex ~= 0 then
+ report_system("synctex functionality is enabled (%s)!",tostring(synctex))
else
report_system("synctex functionality is disabled!")
end
- synctex = tonumber(synctex) or (toboolean(synctex,true) and 1) or (synctex == "zipped" and 1) or (synctex == "unzipped" and -1) or false
- -- currently this is bugged:
- tex.synctex = synctex
- -- so for the moment we need:
- context.normalsynctex()
- if synctex then
- context.plusone()
- else
- context.zerocount()
- end
+ tex.normalsynctex = synctex
end)
statistics.register("synctex tracing",function()
- if synctex or tex.synctex ~= 0 then
+ if synctex ~= 0 then
return "synctex has been enabled (extra log file generated)"
end
end)
+
+-- filenames
+
+local types = {
+ "data",
+ "font map",
+ "image",
+ "font subset",
+ "full font",
+}
+
+local report_open = logs.reporter("open source")
+local report_close = logs.reporter("close source")
+local report_load = logs.reporter("load resource")
+
+local register = callbacks.register
+
+local level = 0
+local total = 0
+local stack = { }
+local all = false
+
+local function report_start(left,name)
+ if not left then
+ -- skip
+ elseif left ~= 1 then
+ if all then
+ report_load("%s > %s",types[left],name or "?")
+ end
+ elseif find(name,"virtual://") then
+ insert(stack,false)
+ else
+ insert(stack,name)
+ total = total + 1
+ level = level + 1
+ report_open("%i > %i > %s",level,total,name or "?")
+ end
+end
+
+local function report_stop(right)
+ if level == 1 or not right or right == 1 then
+ local name = remove(stack)
+ if name then
+ report_close("%i > %i > %s",level,total,name or "?")
+ level = level - 1
+ end
+ end
+end
+
+local function report_none()
+end
+
+register("start_file",report_start)
+register("stop_file", report_stop)
+
+directives.register("system.reportfiles", function(v)
+ if v == "noresources" then
+ all = false
+ register("start_file",report_start)
+ register("stop_file", report_stop)
+ elseif toboolean(v) or v == "all" then
+ all = true
+ register("start_file",report_start)
+ register("stop_file", report_stop)
+ elseif v == "traditional" then
+ register("start_file",nil)
+ register("stop_file", nil)
+ else
+ register("start_file",report_none)
+ register("stop_file", report_none)
+ end
+end)
+
+-- start_run doesn't work
+
+-- luatex.registerstartactions(function()
+-- if environment.arguments.sandbox then
+-- sandbox.enable()
+-- end
+-- end)
+
diff --git a/tex/context/base/luat-sto.lua b/tex/context/base/luat-sto.lua
index 7a11b7f5e..b04d655c2 100644
--- a/tex/context/base/luat-sto.lua
+++ b/tex/context/base/luat-sto.lua
@@ -13,6 +13,7 @@ local gmatch, format = string.gmatch, string.format
local serialize, concat, sortedhash = table.serialize, table.concat, table.sortedhash
local bytecode = lua.bytecode
local strippedloadstring = utilities.lua.strippedloadstring
+local formatters = string.formatters
local trace_storage = false
local report_storage = logs.reporter("system","storage")
@@ -48,38 +49,71 @@ function storage.register(...)
return t
end
-local n = 0
-local function dump()
- local max = storage.max
- for i=1,#data do
- local d = data[i]
- local message, original, target = d[1], d[2] ,d[3]
- local c, code, name = 0, { }, nil
- -- we have a nice definer for this
- for str in gmatch(target,"([^%.]+)") do
- if name then
- name = name .. "." .. str
+local n = 0 -- is that one used ?
+
+if environment.initex then
+
+ -- local function dump()
+ -- local max = storage.max
+ -- for i=1,#data do
+ -- local d = data[i]
+ -- local message, original, target = d[1], d[2] ,d[3]
+ -- local c, code, name = 0, { }, nil
+ -- -- we have a nice definer for this
+ -- for str in gmatch(target,"([^%.]+)") do
+ -- if name then
+ -- name = name .. "." .. str
+ -- else
+ -- name = str
+ -- end
+ -- c = c + 1 ; code[c] = formatters["%s = %s or { }"](name,name)
+ -- end
+ -- max = max + 1
+ -- if trace_storage then
+ -- c = c + 1 ; code[c] = formatters["print('restoring %s from slot %s')"](message,max)
+ -- end
+ -- c = c + 1 ; code[c] = serialize(original,name)
+ -- if trace_storage then
+ -- report_storage('saving %a in slot %a, size %s',message,max,#code[c])
+ -- end
+ -- -- we don't need tracing in such tables
+ -- bytecode[max] = strippedloadstring(concat(code,"\n"),storage.strip,format("slot %s (%s)",max,name))
+ -- collectgarbage("step")
+ -- end
+ -- storage.max = max
+ -- end
+
+ local function dump()
+ local max = storage.max
+ local strip = storage.strip
+ for i=1,#data do
+ max = max + 1
+ local tabledata = data[i]
+ local message = tabledata[1]
+ local original = tabledata[2]
+ local target = tabledata[3]
+ local definition = utilities.tables.definetable(target,false,true)
+ local comment = formatters["restoring %s from slot %s"](message,max)
+ if trace_storage then
+ comment = formatters["print('%s')"](comment)
else
- name = str
+ comment = formatters["-- %s"](comment)
end
- c = c + 1 ; code[c] = format("%s = %s or { }",name,name)
- end
- max = max + 1
- if trace_storage then
- c = c + 1 ; code[c] = format("print('restoring %s from slot %s')",message,max)
- end
- c = c + 1 ; code[c] = serialize(original,name)
- if trace_storage then
- report_storage('saving %a in slot %a, size %s',message,max,#code[c])
+ local dumped = serialize(original,target)
+ if trace_storage then
+ report_storage('saving %a in slot %a, size %s',message,max,#dumped)
+ end
+ -- we don't need tracing in such tables
+ dumped = concat({ definition, comment, dumped },"\n")
+ bytecode[max] = strippedloadstring(dumped,strip,formatters["slot %s (%s)"](max,name))
+ collectgarbage("step")
end
- -- we don't need tracing in such tables
- bytecode[max] = strippedloadstring(concat(code,"\n"),storage.strip,format("slot %s (%s)",max,name))
- collectgarbage("step")
+ storage.max = max
end
- storage.max = max
-end
-lua.registerfinalizer(dump,"dump storage")
+ lua.registerfinalizer(dump,"dump storage")
+
+end
-- to be tested with otf caching:
@@ -115,31 +149,14 @@ statistics.register("stored bytecode data", function()
local tofmodules = storage.tofmodules or 0
local tofdumps = storage.toftables or 0
if environment.initex then
- local luautilities = utilities.lua
- local nofstrippedbytes = luautilities.nofstrippedbytes
- local nofstrippedchunks = luautilities.nofstrippedchunks
- if nofstrippedbytes > 0 then
- return format("%s modules, %s tables, %s chunks, %s chunks stripped (%s bytes)",
- nofmodules,
- nofdumps,
- nofmodules + nofdumps,
- nofstrippedchunks,
- nofstrippedbytes
- )
- elseif nofstrippedchunks > 0 then
- return format("%s modules, %s tables, %s chunks, %s chunks stripped",
- nofmodules,
- nofdumps,
- nofmodules + nofdumps,
- nofstrippedchunks
- )
- else
- return format("%s modules, %s tables, %s chunks",
- nofmodules,
- nofdumps,
- nofmodules + nofdumps
- )
- end
+ local luautilities = utilities.lua
+ return format("%s modules, %s tables, %s chunks, %s chunks stripped (%s bytes)",
+ nofmodules,
+ nofdumps,
+ nofmodules + nofdumps,
+ luautilities.nofstrippedchunks or 0,
+ luautilities.nofstrippedbytes or 0
+ )
else
return format("%s modules (%0.3f sec), %s tables (%0.3f sec), %s chunks (%0.3f sec)",
nofmodules, tofmodules,
@@ -163,6 +180,7 @@ storage.register("storage/shared", storage.shared, "storage.shared")
local mark = storage.mark
if string.patterns then mark(string.patterns) end
+if string.formatters then mark(string.formatters) end
if lpeg.patterns then mark(lpeg.patterns) end
if os.env then mark(os.env) end
if number.dimenfactors then mark(number.dimenfactors) end
diff --git a/tex/context/base/luat-usr.lua b/tex/context/base/luat-usr.lua
new file mode 100644
index 000000000..071e3bf5b
--- /dev/null
+++ b/tex/context/base/luat-usr.lua
@@ -0,0 +1,192 @@
+if not modules then modules = { } end modules ['luat-usr'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local global = global
+
+local moduledata = moduledata
+local thirddata = thirddata
+local userdata = userdata
+local documentdata = documentdata
+
+local context = context
+local tostring = tostring
+local tonumber = tonumber
+local print = print
+
+local string = string
+local table = table
+local lpeg = lpeg
+local math = math
+local io = io
+local os = os
+local lpeg = lpeg
+
+local luanames = lua.name -- luatex itself
+
+local setmetatableindex = table.setmetatableindex
+local load = load
+local xpcall = xpcall
+local instance_banner = string.formatters["=[instance: %s]"] -- the = controls the lua error / see: lobject.c
+local tex_errormessage = context.errmessage
+
+local implement = interfaces.implement
+local reporter = logs.reporter
+
+local report_instance = reporter("lua instance")
+local report_script = reporter("lua script")
+local report_thread = reporter("lua thread")
+local newline = logs.newline
+
+lua.numbers = lua.numbers or { }
+lua.messages = lua.messages or { }
+
+local numbers = lua.numbers
+local messages = lua.messages
+
+storage.register("lua/numbers", numbers, "lua.numbers" )
+storage.register("lua/messages", messages, "lua.messages")
+
+-- First we implement a pure lua version of directlua and a persistent
+-- variant of it:
+
+local function runscript(code)
+ local done, message = loadstring(code)
+ if done then
+ done()
+ else
+ newline()
+ report_script("error : %s",message or "unknown")
+ report_script()
+ report_script("code : %s",code)
+ newline()
+ end
+end
+
+local threads = setmetatableindex(function(t,k)
+ local v = setmetatableindex({},global)
+ t[k] = v
+ return v
+end)
+
+local function runthread(name,code)
+ if not code or code == "" then
+ threads[name] = nil
+ else
+ local thread = threads[name]
+ local done, message = loadstring(code,nil,nil,thread)
+ if done then
+ done()
+ else
+ newline()
+ report_thread("thread: %s",name)
+ report_thread("error : %s",message or "unknown")
+ report_thread()
+ report_thread("code : %s",code)
+ newline()
+ end
+ end
+end
+
+interfaces.implement {
+ name = "luascript",
+ actions = runscript,
+ arguments = "string"
+}
+
+interfaces.implement {
+ name = "luathread",
+ actions = runthread,
+ arguments = { "string", "string" }
+}
+
+-- local scanners = interfaces.scanners
+--
+-- local function ctxscanner(name)
+-- local scanner = scanners[name]
+-- if scanner then
+-- scanner()
+-- else
+-- report("unknown scanner: %s",name)
+-- end
+-- end
+--
+-- interfaces.implement {
+-- name = "clfscanner",
+-- actions = ctxscanner,
+-- arguments = "string",
+-- }
+
+local function registername(name,message)
+ if not name or name == "" then
+ report_instance("no valid name given")
+ return
+ end
+ if not message or message == "" then
+ message = name
+ end
+ local lnn = numbers[name]
+ if not lnn then
+ lnn = #messages + 1
+ messages[lnn] = message
+ numbers[name] = lnn
+ end
+ luanames[lnn] = instance_banner(message)
+ local report = reporter("lua instance",message)
+ local proxy = {
+ -- we can access all via:
+ global = global, -- or maybe just a metatable
+ -- some protected data
+ moduledata = setmetatableindex(moduledata),
+ thirddata = setmetatableindex(thirddata),
+ -- less protected data
+ userdata = userdata,
+ documentdata = documentdata,
+ -- always there fast
+ context = context,
+ tostring = tostring,
+ tonumber = tonumber,
+ -- standard lua modules
+ string = string,
+ table = table,
+ lpeg = lpeg,
+ math = math,
+ io = io,
+ os = os,
+ lpeg = lpeg,
+ --
+ print = print,
+ report = report,
+ }
+ return function(code)
+ local code, message = load(code,nil,nil,proxy)
+ if not code then
+ report_instance("error: %s",message or code)
+ elseif not xpcall(code,report) then
+ tex_errormessage("hit return to continue or quit this run")
+ end
+ end
+end
+
+lua.registername = registername
+
+implement {
+ name = "registernamedlua",
+ arguments = { "string", "string", "string" },
+ actions = function(name,message,csname)
+ if csname and csname ~= "" then
+ implement {
+ name = csname,
+ arguments = "string",
+ actions = registername(name,message) or report,
+ scope = "private",
+ }
+ else
+ report_instance("unvalid csname for %a",message or name or "?")
+ end
+ end
+}
diff --git a/tex/context/base/luat-usr.mkiv b/tex/context/base/luat-usr.mkiv
new file mode 100644
index 000000000..92d40010c
--- /dev/null
+++ b/tex/context/base/luat-usr.mkiv
@@ -0,0 +1,126 @@
+%D \module
+%D [ file=luat-usr,
+%D version=2005.08.11,% moved from luat-ini
+%D title=\CONTEXT\ Lua Macros,
+%D subtitle=Initialization,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Lua Macros / Userspace}
+
+\unprotect
+
+\registerctxluafile{luat-usr}{1.001}
+
+%D A few goodies:
+%D
+%D \startbuffer
+%D \luascript { context("foo 1:") context(i) }
+%D \luathread {test} { i = 10 context("bar 1:") context(i) }
+%D \luathread {test} { context("bar 2:") context(i) }
+%D \luathread {test} {}
+%D \luathread {test} { context("bar 3:") context(i) }
+%D \luascript { context("foo 2:") context(i) }
+%D \stopbuffer
+%D
+%D \typebuffer \startlines \getbuffer \stoplines
+
+\let\luascript \clf_luascript
+\let\luathread \clf_luathread
+%let\clfscanner\clf_clfscanner
+%def\clfscanner#1{\csname clf_#1\endcsname}
+
+%D \macros
+%D {definenamedlua}
+%D
+%D We provide an interface for defining instances:
+
+\def\s!lua {lua}
+\def\s!code {code}
+\def\s!data {data}
+%def\s!start{start}
+%def\s!stop {stop}
+
+%D Beware: because \type {\expanded} is een convert command, the error
+%D message will show \type{<inserted text>} as part of the message.
+
+\installsystemnamespace{luacode}
+
+\unexpanded\def\definenamedlua
+ {\bgroup
+ \dodoubleempty\syst_definenamedlua}
+
+\unexpanded\def\syst_definenamedlua[#1][#2]% no optional arg handling here yet / we could use numbers instead (more efficient)
+ {\iffirstargument
+ \ifcsname\??luacode#1\endcsname \else
+ %
+ \edef\fullname{lua_code_#1}%
+ %
+ \clf_registernamedlua{#1}{#2}{\fullname}%
+ %
+ \unexpanded\expandafter\xdef\csname\s!start#1\s!code\endcsname
+ {\begingroup
+ \obeylualines
+ \obeyluatokens
+ \csname\??luacode#1\endcsname}%
+ %
+ \global\expandafter\let\csname\s!stop#1\s!code\endcsname\relax
+ %
+ \normalexpanded{\xdef\csname\??luacode#1\endcsname##1\csname\s!stop#1\s!code\endcsname}%
+ {\noexpand\expandafter\endgroup
+ \noexpand\expandafter
+ \expandafter\noexpand\csname clf_\fullname\endcsname
+ \noexpand\expandafter{\noexpand\normalexpanded{##1}}}%
+ %
+ \global\expandafter\let\csname#1\s!code\expandafter\endcsname\csname clf_\fullname\endcsname
+ \fi
+ \fi
+ \egroup}
+
+%D We predefine a few.
+
+% \definenamedlua[module][module instance] % not needed
+
+\definenamedlua[user] [private user]
+\definenamedlua[third] [third party module]
+\definenamedlua[isolated][isolated]
+
+%D In practice this works out as follows:
+%D
+%D \startbuffer
+%D \startluacode
+%D context("LUA")
+%D \stopluacode
+%D
+%D \startusercode
+%D global.context("USER 1")
+%D context.par()
+%D context("USER 2")
+%D context.par()
+%D if characters then
+%D context("ACCESS directly")
+%D elseif global.characters then
+%D context("ACCESS via global")
+%D else
+%D context("NO ACCESS at all")
+%D end
+%D context.par()
+%D if bogus then
+%D context("ACCESS directly")
+%D elseif global.bogus then
+%D context("ACCESS via global")
+%D else
+%D context("NO ACCESS at all")
+%D end
+%D context.par()
+%D \stopusercode
+%D \stopbuffer
+%D
+%D \typebuffer
+
+\protect \endinput
diff --git a/tex/context/base/lxml-aux.lua b/tex/context/base/lxml-aux.lua
index 0fffe261a..8eedade39 100644
--- a/tex/context/base/lxml-aux.lua
+++ b/tex/context/base/lxml-aux.lua
@@ -10,20 +10,23 @@ if not modules then modules = { } end modules ['lxml-aux'] = {
-- compatibility reasons
local trace_manipulations = false trackers.register("lxml.manipulations", function(v) trace_manipulations = v end)
+local trace_inclusions = false trackers.register("lxml.inclusions", function(v) trace_inclusions = v end)
local report_xml = logs.reporter("xml")
local xml = xml
-local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
+local xmlcopy, xmlname = xml.copy, xml.name
local xmlinheritedconvert = xml.inheritedconvert
local xmlapplylpath = xml.applylpath
local xmlfilter = xml.filter
-local type, setmetatable, getmetatable = type, setmetatable, getmetatable
+local type, next, setmetatable, getmetatable = type, next, setmetatable, getmetatable
local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
local gmatch, gsub, format, find, strip = string.gmatch, string.gsub, string.format, string.find, string.strip
local utfbyte = utf.byte
+local lpegmatch = lpeg.match
+local striplinepatterns = utilities.strings.striplinepatterns
local function report(what,pattern,c,e)
report_xml("%s element %a, root %a, position %a, index %a, pattern %a",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
@@ -83,13 +86,15 @@ end
function xml.each(root,pattern,handle,reverse)
local collected = xmlapplylpath(root,pattern)
if collected then
- if reverse then
- for c=#collected,1,-1 do
- handle(collected[c])
- end
- else
- for c=1,#collected do
- handle(collected[c])
+ if handle then
+ if reverse then
+ for c=#collected,1,-1 do
+ handle(collected[c])
+ end
+ else
+ for c=1,#collected do
+ handle(collected[c])
+ end
end
end
return collected
@@ -162,6 +167,8 @@ local function redo_ni(d)
end
end
+xml.reindex = redo_ni
+
local function xmltoelement(whatever,root)
if not whatever then
return nil
@@ -221,8 +228,18 @@ function xml.delete(root,pattern)
report('deleting',pattern,c,e)
end
local d = p.dt
- remove(d,e.ni)
- redo_ni(d) -- can be made faster and inlined
+ local ni = e.ni
+ if ni <= #d then
+ if false then
+ p.dt[ni] = ""
+ else
+ -- what if multiple deleted in one set
+ remove(d,ni)
+ redo_ni(d) -- can be made faster and inlined
+ end
+ else
+ -- disturbing
+ end
end
end
end
@@ -353,46 +370,71 @@ xml.insertbefore = function(r,p,e) insert_element(r,p,e,true) end
xml.injectafter = inject_element
xml.injectbefore = function(r,p,e) inject_element(r,p,e,true) end
-local function include(xmldata,pattern,attribute,recursive,loaddata)
- -- parse="text" (default: xml), encoding="" (todo)
- -- attribute = attribute or 'href'
- pattern = pattern or 'include'
- loaddata = loaddata or io.loaddata
+local function include(xmldata,pattern,attribute,recursive,loaddata,level)
+ -- attribute = attribute or 'href'
+ pattern = pattern or 'include'
+ loaddata = loaddata or io.loaddata
local collected = xmlapplylpath(xmldata,pattern)
if collected then
+ if not level then
+ level = 1
+ end
for c=1,#collected do
local ek = collected[c]
local name = nil
local ekdt = ek.dt
local ekat = ek.at
- local epdt = ek.__p__.dt
+ local ekrt = ek.__p__
+ local epdt = ekrt.dt
if not attribute or attribute == "" then
name = (type(ekdt) == "table" and ekdt[1]) or ekdt -- check, probably always tab or str
end
if not name then
for a in gmatch(attribute or "href","([^|]+)") do
name = ekat[a]
- if name then break end
+ if name then
+ break
+ end
+ end
+ end
+ local data = nil
+ if name and name ~= "" then
+ data = loaddata(name) or ""
+ if trace_inclusions then
+ report_xml("including %s bytes from %a at level %s by pattern %a and attribute %a (%srecursing)",#data,name,level,pattern,attribute or "",recursive and "" or "not ")
end
end
- local data = (name and name ~= "" and loaddata(name)) or ""
- if data == "" then
+ if not data or data == "" then
epdt[ek.ni] = "" -- xml.empty(d,k)
elseif ekat["parse"] == "text" then
-- for the moment hard coded
epdt[ek.ni] = xml.escaped(data) -- d[k] = xml.escaped(data)
else
---~ local settings = xmldata.settings
---~ settings.parent_root = xmldata -- to be tested
---~ local xi = xmlconvert(data,settings)
local xi = xmlinheritedconvert(data,xmldata)
if not xi then
epdt[ek.ni] = "" -- xml.empty(d,k)
else
if recursive then
- include(xi,pattern,attribute,recursive,loaddata)
+ include(xi,pattern,attribute,recursive,loaddata,level+1)
+ end
+ local child = xml.body(xi) -- xml.assign(d,k,xi)
+ child.__p__ = ekrt
+ child.__f__ = name -- handy for tracing
+ epdt[ek.ni] = child
+ local inclusions = xmldata.settings.inclusions
+ if inclusions then
+ inclusions[#inclusions+1] = name
+ else
+ xmldata.settings.inclusions = { name }
+ end
+ if child.er then
+ local badinclusions = xmldata.settings.badinclusions
+ if badinclusions then
+ badinclusions[#badinclusions+1] = name
+ else
+ xmldata.settings.badinclusions = { name }
+ end
end
- epdt[ek.ni] = xml.body(xi) -- xml.assign(d,k,xi)
end
end
end
@@ -401,68 +443,108 @@ end
xml.include = include
+function xml.inclusion(e,default)
+ while e do
+ local f = e.__f__
+ if f then
+ return f
+ else
+ e = e.__p__
+ end
+ end
+ return default
+end
+
+local function getinclusions(key,e,sorted)
+ while e do
+ local settings = e.settings
+ if settings then
+ local inclusions = settings[key]
+ if inclusions then
+ inclusions = table.unique(inclusions) -- a copy
+ if sorted then
+ table.sort(inclusions) -- so we sort the copy
+ end
+ return inclusions -- and return the copy
+ else
+ e = e.__p__
+ end
+ else
+ e = e.__p__
+ end
+ end
+end
+
+function xml.inclusions(e,sorted)
+ return getinclusions("inclusions",e,sorted)
+end
+
+function xml.badinclusions(e,sorted)
+ return getinclusions("badinclusions",e,sorted)
+end
+
+local b_collapser = lpeg.patterns.b_collapser
+local m_collapser = lpeg.patterns.m_collapser
+local e_collapser = lpeg.patterns.e_collapser
+
+local b_stripper = lpeg.patterns.b_stripper
+local m_stripper = lpeg.patterns.m_stripper
+local e_stripper = lpeg.patterns.e_stripper
+
+local lpegmatch = lpeg.match
+
local function stripelement(e,nolines,anywhere)
local edt = e.dt
if edt then
- if anywhere then
- local t, n = { }, 0
- for e=1,#edt do
+ local n = #edt
+ if n == 0 then
+ return e -- convenient
+ elseif anywhere then
+ local t = { }
+ local m = 0
+ for e=1,n do
local str = edt[e]
if type(str) ~= "string" then
- n = n + 1
- t[n] = str
+ m = m + 1
+ t[m] = str
elseif str ~= "" then
- -- todo: lpeg for each case
if nolines then
- str = gsub(str,"%s+"," ")
+ str = lpegmatch((n == 1 and b_collapser) or (n == m and e_collapser) or m_collapser,str)
+ else
+ str = lpegmatch((n == 1 and b_stripper) or (n == m and e_stripper) or m_stripper,str)
end
- str = gsub(str,"^%s*(.-)%s*$","%1")
if str ~= "" then
- n = n + 1
- t[n] = str
+ m = m + 1
+ t[m] = str
end
end
end
e.dt = t
else
- -- we can assume a regular sparse xml table with no successive strings
- -- otherwise we should use a while loop
- if #edt > 0 then
- -- strip front
- local str = edt[1]
- if type(str) ~= "string" then
- -- nothing
- elseif str == "" then
+ local str = edt[1]
+ if type(str) == "string" then
+ if str ~= "" then
+ str = lpegmatch(nolines and b_collapser or b_stripper,str)
+ end
+ if str == "" then
remove(edt,1)
+ n = n - 1
else
- if nolines then
- str = gsub(str,"%s+"," ")
- end
- str = gsub(str,"^%s+","")
- if str == "" then
- remove(edt,1)
- else
- edt[1] = str
- end
+ edt[1] = str
end
end
- local nedt = #edt
- if nedt > 0 then
- -- strip end
- local str = edt[nedt]
- if type(str) ~= "string" then
- -- nothing
- elseif str == "" then
- remove(edt)
- else
- if nolines then
- str = gsub(str,"%s+"," ")
- end
- str = gsub(str,"%s+$","")
+ if n > 0 then
+ str = edt[n]
+ if type(str) == "string" then
if str == "" then
remove(edt)
else
- edt[nedt] = str
+ str = lpegmatch(nolines and e_collapser or e_stripper,str)
+ if str == "" then
+ remove(edt)
+ else
+ edt[n] = str
+ end
end
end
end
@@ -702,8 +784,8 @@ function xml.finalizers.xml.cdata(collected)
return ""
end
-function xml.insertcomment(e,str,n) -- also insertcdata
- table.insert(e.dt,n or 1,{
+function xml.insertcomment(e,str,n)
+ insert(e.dt,n or 1,{
tg = "@cm@",
ns = "",
special = true,
@@ -712,7 +794,27 @@ function xml.insertcomment(e,str,n) -- also insertcdata
})
end
-function xml.setcdata(e,str) -- also setcomment
+function xml.insertcdata(e,str,n)
+ insert(e.dt,n or 1,{
+ tg = "@cd@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ })
+end
+
+function xml.setcomment(e,str,n)
+ e.dt = { {
+ tg = "@cm@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ } }
+end
+
+function xml.setcdata(e,str)
e.dt = { {
tg = "@cd@",
ns = "",
@@ -790,7 +892,7 @@ local function recurse(e,action)
for i=1,#edt do
local str = edt[i]
if type(str) ~= "string" then
- recurse(str,action,recursive)
+ recurse(str,action) -- ,recursive
elseif str ~= "" then
edt[i] = action(str)
end
@@ -809,3 +911,91 @@ function helpers.recursetext(collected,action,recursive)
end
end
end
+
+-- on request ... undocumented ...
+--
+-- _tag : element name
+-- _type : node type (_element can be an option)
+-- _namespace : only if given
+--
+-- [1..n] : text or table
+-- key : value or attribite 'key'
+--
+-- local str = [[
+-- <?xml version="1.0" ?>
+-- <a one="1">
+-- <!-- rubish -->
+-- <b two="1"/>
+-- <b two="2">
+-- c &gt; d
+-- </b>
+-- </a>
+-- ]]
+--
+-- inspect(xml.totable(xml.convert(str)))
+-- inspect(xml.totable(xml.convert(str),true))
+-- inspect(xml.totable(xml.convert(str),true,true))
+
+local specials = {
+ ["@rt@"] = "root",
+ ["@pi@"] = "instruction",
+ ["@cm@"] = "comment",
+ ["@dt@"] = "declaration",
+ ["@cd@"] = "cdata",
+}
+
+local function convert(x,strip,flat)
+ local ns = x.ns
+ local tg = x.tg
+ local at = x.at
+ local dt = x.dt
+ local node = flat and {
+ [0] = (not x.special and (ns ~= "" and ns .. ":" .. tg or tg)) or nil,
+ } or {
+ _namespace = ns ~= "" and ns or nil,
+ _tag = not x.special and tg or nil,
+ _type = specials[tg] or "_element",
+ }
+ if at then
+ for k, v in next, at do
+ node[k] = v
+ end
+ end
+ local n = 0
+ for i=1,#dt do
+ local di = dt[i]
+ if type(di) == "table" then
+ if flat and di.special then
+ -- ignore
+ else
+ di = convert(di,strip,flat)
+ if di then
+ n = n + 1
+ node[n] = di
+ end
+ end
+ elseif strip then
+ di = lpegmatch(strip,di)
+ if di ~= "" then
+ n = n + 1
+ node[n] = di
+ end
+ else
+ n = n + 1
+ node[n] = di
+ end
+ end
+ if next(node) then
+ return node
+ end
+end
+
+function xml.totable(x,strip,flat)
+ if type(x) == "table" then
+ if strip then
+ strip = striplinepatterns[strip]
+ end
+ return convert(x,strip,flat)
+ end
+end
+
diff --git a/tex/context/base/lxml-css.lua b/tex/context/base/lxml-css.lua
index 0deaea4d3..fa921b24f 100644
--- a/tex/context/base/lxml-css.lua
+++ b/tex/context/base/lxml-css.lua
@@ -146,7 +146,19 @@ local pattern = Cf( Ct("") * (
+ (C("sansserif") + C("sans")) / "sans-serif" -- match before serif
+ C("serif")
)
- ) + P(1)
+ )
+--+ P("\\") * (
+-- P("bf") * ( Cg ( Cc("weight") * Cc("bold") ) )
+-- + P("bi") * ( Cg ( Cc("weight") * Cc("bold") )
+-- * Cg ( Cc("style") * Cc("italic") ) )
+-- + P("bs") * ( Cg ( Cc("weight") * Cc("bold") )
+-- * Cg ( Cc("style") * Cc("oblique") ) )
+-- + P("it") * ( Cg ( Cc("style") * Cc("italic") ) )
+-- + P("sl") * ( Cg ( Cc("style") * Cc("oblique") ) )
+-- + P("sc") * ( Cg ( Cc("variant") * Cc("small-caps") ) )
+-- + P("tt") * ( Cg ( Cc("family") * Cc("monospace") ) )
+--)
+ + P(1)
)^0 , rawset)
function css.fontspecification(str)
diff --git a/tex/context/base/lxml-ini.lua b/tex/context/base/lxml-ini.lua
new file mode 100644
index 000000000..115403395
--- /dev/null
+++ b/tex/context/base/lxml-ini.lua
@@ -0,0 +1,142 @@
+if not modules then modules = { } end modules ['lxml-ini'] = {
+ version = 1.001,
+ comment = "this module is the basis for the lxml-* ones",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local xml = xml
+local lxml = lxml
+
+-- this defines an extra scanner lxmlid:
+
+local scanners = tokens.scanners
+local scanstring = scanners.string
+local getid = lxml.id
+
+scanners.lxmlid = function() return getid(scanstring()) end
+
+local implement = interfaces.implement
+
+-- lxml.id
+
+implement { name = "lxmlid", actions = lxml.getid, arguments = "string" }
+
+implement { name = "xmldoif", actions = lxml.doif, arguments = { "string", "string" } }
+implement { name = "xmldoifnot", actions = lxml.doifnot, arguments = { "string", "string" } }
+implement { name = "xmldoifelse", actions = lxml.doifelse, arguments = { "string", "string" } }
+implement { name = "xmldoiftext", actions = lxml.doiftext, arguments = { "string", "string" } }
+implement { name = "xmldoifnottext", actions = lxml.doifnottext, arguments = { "string", "string" } }
+implement { name = "xmldoifelsetext", actions = lxml.doifelsetext, arguments = { "string", "string" } }
+
+implement { name = "xmldoifempty", actions = lxml.doifempty, arguments = { "string", "string" } }
+implement { name = "xmldoifnotempty", actions = lxml.doifnotempty, arguments = { "string", "string" } }
+implement { name = "xmldoifelseempty", actions = lxml.doifelseempty, arguments = { "string", "string" } }
+implement { name = "xmldoifselfempty", actions = lxml.doifempty, arguments = "string" }
+implement { name = "xmldoifnotselfempty", actions = lxml.doifnotempty, arguments = "string" }
+implement { name = "xmldoifelseselfempty", actions = lxml.doifelseempty, arguments = "string" }
+
+--------- { name = "xmlcontent", actions = lxml.content, arguments = "string" }
+--------- { name = "xmlflushstripped", actions = lxml.strip, arguments = { "string", true } }
+implement { name = "xmlall", actions = lxml.all, arguments = { "string", "string" } }
+implement { name = "xmlatt", actions = lxml.att, arguments = { "string", "string" } }
+implement { name = "xmlattdef", actions = lxml.att, arguments = { "string", "string", "string" } }
+implement { name = "xmlattribute", actions = lxml.attribute, arguments = { "string", "string", "string" } }
+implement { name = "xmlattributedef", actions = lxml.attribute, arguments = { "string", "string", "string", "string" } }
+implement { name = "xmlchainatt", actions = lxml.chainattribute, arguments = { "string", "'/'", "string" } }
+implement { name = "xmlchainattdef", actions = lxml.chainattribute, arguments = { "string", "'/'", "string", "string" } }
+implement { name = "xmlrefatt", actions = lxml.refatt, arguments = { "string", "string" } }
+implement { name = "xmlchecknamespace", actions = xml.checknamespace, arguments = { "lxmlid", "string", "string" } }
+implement { name = "xmlcommand", actions = lxml.command, arguments = { "string", "string", "string" } }
+implement { name = "xmlconcat", actions = lxml.concat, arguments = { "string", "string", "string" } } -- \detokenize{#3}
+implement { name = "xmlconcatrange", actions = lxml.concatrange, arguments = { "string", "string", "string", "string", "string" } } -- \detokenize{#5}
+implement { name = "xmlcontext", actions = lxml.context, arguments = { "string", "string" } }
+implement { name = "xmlcount", actions = lxml.count, arguments = { "string", "string" } }
+implement { name = "xmldelete", actions = lxml.delete, arguments = { "string", "string" } }
+implement { name = "xmldirect", actions = lxml.direct, arguments = "string" }
+implement { name = "xmldirectives", actions = lxml.directives.setup, arguments = "string" }
+implement { name = "xmldirectivesafter", actions = lxml.directives.after, arguments = "string" }
+implement { name = "xmldirectivesbefore", actions = lxml.directives.before, arguments = "string" }
+implement { name = "xmldisplayverbatim", actions = lxml.displayverbatim, arguments = "string" }
+implement { name = "xmlelement", actions = lxml.element, arguments = { "string", "string" } } -- could be integer but now we can alias
+implement { name = "xmlfilter", actions = lxml.filter, arguments = { "string", "string" } }
+implement { name = "xmlfilterlist", actions = lxml.filterlist, arguments = { "string", "string" } }
+implement { name = "xmlfirst", actions = lxml.first, arguments = { "string", "string" } }
+implement { name = "xmlflush", actions = lxml.flush, arguments = "string" }
+implement { name = "xmlflushcontext", actions = lxml.context, arguments = "string" }
+implement { name = "xmlflushlinewise", actions = lxml.flushlinewise, arguments = "string" }
+implement { name = "xmlflushspacewise", actions = lxml.flushspacewise, arguments = "string" }
+implement { name = "xmlfunction", actions = lxml.applyfunction, arguments = { "string", "string" } }
+implement { name = "xmlinclude", actions = lxml.include, arguments = { "string", "string", "string", true } }
+implement { name = "xmlincludeoptions", actions = lxml.include, arguments = { "string", "string", "string", "string" } }
+implement { name = "xmlinclusion", actions = lxml.inclusion, arguments = "string" }
+implement { name = "xmlinclusions", actions = lxml.inclusions, arguments = "string" }
+implement { name = "xmlbadinclusions", actions = lxml.badinclusions, arguments = "string" }
+implement { name = "xmlindex", actions = lxml.index, arguments = { "string", "string", "string" } } -- can be integer but now we can alias
+implement { name = "xmlinfo", actions = lxml.info, arguments = "string" }
+implement { name = "xmlinlineverbatim", actions = lxml.inlineverbatim, arguments = "string" }
+implement { name = "xmllast", actions = lxml.last, arguments = "string" }
+implement { name = "xmlload", actions = lxml.load, arguments = { "string", "string", "string", "string" } }
+implement { name = "xmlloadbuffer", actions = lxml.loadbuffer, arguments = { "string", "string", "string", "string" } }
+implement { name = "xmlloaddata", actions = lxml.loaddata, arguments = { "string", "string", "string", "string" } }
+implement { name = "xmlloaddirectives", actions = lxml.directives.load, arguments = "string" }
+implement { name = "xmlloadregistered", actions = lxml.loadregistered, arguments = { "string", "string", "string" } }
+implement { name = "xmlmain", actions = lxml.main, arguments = "string" }
+implement { name = "xmlmatch", actions = lxml.match, arguments = "string" }
+implement { name = "xmlname", actions = lxml.name, arguments = "string" }
+implement { name = "xmlnamespace", actions = lxml.namespace, arguments = "string" }
+implement { name = "xmlnonspace", actions = lxml.nonspace, arguments = { "string", "string" } }
+implement { name = "xmlpos", actions = lxml.pos, arguments = "string" }
+implement { name = "xmlraw", actions = lxml.raw, arguments = { "string", "string" } }
+implement { name = "xmlregisterns", actions = xml.registerns, arguments = { "string", "string" } }
+implement { name = "xmlremapname", actions = xml.remapname, arguments = { "lxmlid", "string","string","string" } }
+implement { name = "xmlremapnamespace", actions = xml.renamespace, arguments = { "lxmlid", "string", "string" } }
+implement { name = "xmlsave", actions = lxml.save, arguments = { "string", "string" } }
+--------- { name = "xmlsetfunction", actions = lxml.setaction, arguments = { "string", "string", "string" } }
+implement { name = "xmlsetsetup", actions = lxml.setsetup, arguments = { "string", "string", "string" } }
+implement { name = "xmlsnippet", actions = lxml.snippet, arguments = { "string", "string" } }
+implement { name = "xmlstrip", actions = lxml.strip, arguments = { "string", "string" } }
+implement { name = "xmlstripanywhere", actions = lxml.strip, arguments = { "string", "string", true, true } }
+implement { name = "xmlstripnolines", actions = lxml.strip, arguments = { "string", "string", true } }
+implement { name = "xmlstripped", actions = lxml.stripped, arguments = { "string", "string" } }
+implement { name = "xmlstrippednolines", actions = lxml.stripped, arguments = { "string", "string", true } }
+implement { name = "xmltag", actions = lxml.tag, arguments = "string" }
+implement { name = "xmltext", actions = lxml.text, arguments = { "string", "string" } }
+implement { name = "xmltobuffer", actions = lxml.tobuffer, arguments = { "string", "string", "string" } }
+implement { name = "xmltobufferverbose", actions = lxml.tobuffer, arguments = { "string", "string", "string", true } }
+implement { name = "xmltofile", actions = lxml.tofile, arguments = { "string", "string", "string" } }
+implement { name = "xmltoparameters", actions = lxml.toparameters, arguments = "string" }
+implement { name = "xmlverbatim", actions = lxml.verbatim, arguments = "string" }
+
+implement { name = "xmlstartraw", actions = lxml.startraw }
+implement { name = "xmlstopraw", actions = lxml.stopraw }
+
+implement { name = "xmlprependsetup", actions = lxml.installsetup, arguments = { 1, "string", "string" } } -- 2:*
+implement { name = "xmlappendsetup", actions = lxml.installsetup, arguments = { 2, "string", "string" } } -- 2:*
+implement { name = "xmlbeforesetup", actions = lxml.installsetup, arguments = { 3, "string", "string", "string" } } -- 2:*
+implement { name = "xmlaftersetup", actions = lxml.installsetup, arguments = { 4, "string", "string", "string" } } -- 2:*
+implement { name = "xmlprependdocumentsetup", actions = lxml.installsetup, arguments = { 1, "string", "string" } }
+implement { name = "xmlappenddocumentsetup", actions = lxml.installsetup, arguments = { 2, "string", "string" } }
+implement { name = "xmlbeforedocumentsetup", actions = lxml.installsetup, arguments = { 3, "string", "string", "string" } }
+implement { name = "xmlafterdocumentsetup", actions = lxml.installsetup, arguments = { 4, "string", "string" } }
+implement { name = "xmlremovesetup", actions = lxml.removesetup, arguments = { "string", "string" } } -- 1:*
+implement { name = "xmlremovedocumentsetup", actions = lxml.removesetup, arguments = { "string", "string" } }
+implement { name = "xmlflushdocumentsetups", actions = lxml.flushsetups, arguments = { "string", "string", "string" } } -- 2:*
+implement { name = "xmlresetdocumentsetups", actions = lxml.resetsetups, arguments = "string" }
+
+implement { name = "xmlgetindex", actions = lxml.getindex, arguments = { "string", "string" } }
+implement { name = "xmlwithindex", actions = lxml.withindex, arguments = { "string", "string", "string" } }
+
+implement { name = "xmlsetentity", actions = xml.registerentity, arguments = { "string", "string" } }
+implement { name = "xmltexentity", actions = lxml.registerentity, arguments = { "string", "string" } }
+
+implement { name = "xmlsetcommandtotext", actions = lxml.setcommandtotext, arguments = "string" }
+implement { name = "xmlsetcommandtonone", actions = lxml.setcommandtonone, arguments = "string" }
+
+implement { name = "xmlstarttiming", actions = function() statistics.starttiming(lxml) end }
+implement { name = "xmlstoptiming", actions = function() statistics.stoptiming (lxml) end }
+
+-- kind of special (3rd argument is a function)
+
+commands.xmlsetfunction = lxml.setaction
diff --git a/tex/context/base/lxml-ini.mkiv b/tex/context/base/lxml-ini.mkiv
index cfa0114d0..fab644fdb 100644
--- a/tex/context/base/lxml-ini.mkiv
+++ b/tex/context/base/lxml-ini.mkiv
@@ -12,6 +12,7 @@
%C details.
%D Todo: auto apply setups (manage at lua end)
+%D Todo: manuak: \xmlinclusion \xmlinclusions
\writestatus{loading}{ConTeXt XML Support / Initialization}
@@ -20,100 +21,203 @@
%registerctxluafile{lxml-xml}{1.001} % xml finalizers
%registerctxluafile{lxml-aux}{1.001} % extras using parser
%registerctxluafile{lxml-mis}{1.001} % extras independent of parser
-\registerctxluafile{char-ent}{1.001}
\registerctxluafile{lxml-ent}{1.001} % entity hacks
\registerctxluafile{lxml-tex}{1.001} % tex finalizers
\registerctxluafile{lxml-dir}{1.001} % ctx hacks
+\registerctxluafile{lxml-ini}{1.001} % interface
\unprotect % todo \!!bs \!!es where handy (slower)
-\def\ctxlxml #1{\ctxlua{lxml.#1}}
-
-\def\xmlmain #1{\ctxlxml{main("#1")}}
-\def\xmlmatch #1{\ctxlxml{match("#1")}}
-\def\xmlall #1#2{\ctxlxml{all("#1","#2")}}
-\def\xmlatt #1#2{\ctxlxml{att("#1","#2")}}
-\def\xmlattdef #1#2#3{\ctxlxml{att("#1","#2","#3")}}
-\def\xmlchainatt #1#2{\ctxlxml{chainattribute("#1","/","#2")}}
-\def\xmlchainattdef #1#2#3{\ctxlxml{chainattribute("#1","/","#2","#3")}}
-\def\xmlattribute #1#2#3{\ctxlxml{attribute("#1","#2","#3")}}
-\def\xmlattributedef #1#2#3#4{\ctxlxml{attribute("#1","#2","#3","#4")}}
-\def\xmlcommand #1#2#3{\ctxlxml{command("#1","#2","#3")}}
-\def\xmlconcat #1#2#3{\ctxlxml{concat("#1","#2",[[\detokenize{#3}]])}}
-\def\xmlconcatrange#1#2#3#4#5{\ctxlxml{concatrange("#1","#2","#3","#4",[[\detokenize{#5}]])}}
-\def\xmlcount #1#2{\ctxlxml{count("#1","#2")}}
-\def\xmldelete #1#2{\ctxlxml{delete("#1","#2")}}
-\def\xmldirectives #1{\ctxlxml{directives.setup("#1")}}
-\def\xmldirectivesbefore #1{\ctxlxml{directives.before("#1")}}
-\def\xmldirectivesafter #1{\ctxlxml{directives.after("#1")}}
-\def\xmlfilter #1#2{\ctxlxml{filter("#1",\!!bs#2\!!es)}}
-\def\xmlfilterlist #1#2{\ctxlxml{filterlist("#1",\!!bs#2\!!es)}}
-\def\xmlfunction #1#2{\ctxlxml{applyfunction("#1",\!!bs#2\!!es)}}
-\def\xmlfirst #1#2{\ctxlxml{first("#1","#2")}}
-\def\xmlflush #1{\ctxlxml{flush("#1")}}
-\def\xmlflushlinewise #1{\ctxlxml{flushlinewise("#1")}}
-\def\xmlflushspacewise #1{\ctxlxml{flushspacewise("#1")}}
-%def\xmlcontent #1{\ctxlxml{content("#1")}}
-%def\xmlflushstripped #1{\ctxlxml{strip("#1",true)}}
-\def\xmldirect #1{\ctxlxml{direct("#1")}} % in loops, not dt but root
-\def\xmlidx #1#2#3{\ctxlxml{idx("#1","#2",\number#3)}}
-\def\xmlinclude #1#2#3{\ctxlxml{include("#1","#2","#3",true)}}
-\def\xmlindex #1#2#3{\ctxlxml{index("#1","#2",\number#3)}}
-\def\xmlinfo #1{\hbox{\ttxx[\ctxlxml{info("#1")}]}}
-\def\xmlshow #1{\startpacked\ttx\xmlverbatim{#1}\stoppacked}
-\def\xmllast #1#2{\ctxlxml{last("#1","#2")}}
-\def\xmlname #1{\ctxlxml{name("#1")}}
-\def\xmlnamespace #1{\ctxlxml{namespace("#1")}}
-\def\xmlnonspace #1#2{\ctxlxml{nonspace("#1","#2")}}
-\def\xmlraw #1#2{\ctxlxml{raw("#1","#2")}}
-\def\xmlcontext #1#2{\ctxlxml{context("#1","#2")}}
-\def\xmlflushcontext #1{\ctxlxml{context("#1")}}
-\def\xmlsnippet #1#2{\ctxlxml{snippet("#1",#2)}}
-\def\xmlelement #1#2{\ctxlxml{element("#1",#2)}}
-\def\xmlregisterns #1#2{\ctxlua{xml.registerns("#1","#2")}} % document
-\def\xmlremapname #1#2#3#4{\ctxlua{xml.remapname(lxml.id("#1"),"#2","#3","#4")}} % element
-\def\xmlremapnamespace #1#2#3{\ctxlua{xml.renamespace(lxml.id("#1"),"#2","#3")}} % document
-\def\xmlchecknamespace #1#2#3{\ctxlua{xml.checknamespace(lxml.id("#1"),"#2","#3")}} % element
-\def\xmlsetfunction #1#2#3{\ctxlxml{setaction("#1",\!!bs#2\!!es,#3)}}
-\def\xmlsetsetup #1#2#3{\ctxlxml{setsetup("#1",\!!bs#2\!!es,"#3")}}
-\def\xmlstrip #1#2{\ctxlxml{strip("#1","#2")}}
-\def\xmlstripnolines #1#2{\ctxlxml{strip("#1","#2",true)}}
-\def\xmlstripanywhere #1#2{\ctxlxml{strip("#1","#2",true,true)}}
-\def\xmlstripped #1#2{\ctxlxml{stripped("#1","#2")}}
-\def\xmlstrippednolines #1#2{\ctxlxml{stripped("#1","#2",true)}}
-\def\xmltag #1{\ctxlxml{tag("#1")}}
-\def\xmltext #1#2{\ctxlxml{text("#1","#2")}}
-\def\xmlverbatim #1{\ctxlxml{verbatim("#1")}}
-\def\xmldisplayverbatim #1{\ctxlxml{displayverbatim("#1")}}
-\def\xmlinlineverbatim #1{\ctxlxml{inlineverbatim("#1")}}
-
-\def\xmlload #1#2{\ctxlxml{load("#1","#2","\directxmlparameter\c!entities","\directxmlparameter\c!compress")}}
-\def\xmlloadbuffer #1#2{\ctxlxml{loadbuffer("#1","#2","\directxmlparameter\c!entities","\directxmlparameter\c!compress")}}
-\def\xmlloaddata #1#2{\ctxlxml{loaddata("#1",\!!bs#2\!!es,"\directxmlparameter\c!entities","\directxmlparameter\c!compress")}}
-\def\xmlloadregistered #1#2{\ctxlxml{loadregistered("#1","\directxmlparameter\c!entities","\directxmlparameter\c!compress")}}
-\def\xmlloaddirectives #1{\ctxlxml{directives.load("any:///#1")}}
-\def\xmlpos #1{\ctxlxml{pos("#1")}}
-
-\def\xmltoparameters #1{\ctxlxml{toparameters("#1")}}
-
-\def\xmltofile #1#2#3{\ctxlxml{tofile("#1","#2","#3")}} % id pattern filename
+% todo: { } mandate
+% avoid #
+
+\def\ctxlxml #1{\ctxlua{lxml.#1}}
+
+%def\xmlall #1#2{\clf_xmlall {#1}{#2}}
+%def\xmlatt #1#2{\clf_xmlatt {#1}{#2}}
+%def\xmlattdef #1#2#3{\clf_xmlattdef {#1}{#2}{#3}}
+%def\xmlattribute #1#2#3{\clf_xmlattribute {#1}{#2}{#3}}
+%def\xmlattributedef #1#2#3#4{\clf_xmlattributedef {#1}{#2}{#3}{#4}}
+%def\xmlchainatt #1#2{\clf_xmlchainatt {#1}{#2}}
+%def\xmlchainattdef #1#2#3{\clf_xmlchainattdef {#1}{#2}{#3}}
+%def\xmlrefatt #1#2{\clf_xmlrefatt {#1}{#2}}
+%def\xmlchecknamespace #1#2#3{\clf_xmlchecknamespace {#1}{#2}{#3}} % element
+%def\xmlcommand #1#2#3{\clf_xmlcommand {#1}{#2}{#3}}
+\def\xmlconcat #1#2#3{\clf_xmlconcat {#1}{#2}{\detokenize{#3}}}
+\def\xmlconcatrange #1#2#3#4#5{\clf_xmlconcatrange {#1}{#2}{#3}{#4}{\detokenize{#5}}}
+%def\xmlcontext #1#2{\clf_xmlcontext {#1}{#2}}
+%def\xmlcount #1#2{\clf_xmlcount {#1}{#2}}
+%def\xmldelete #1#2{\clf_xmldelete {#1}{#2}}
+%def\xmldirect #1{\clf_xmldirect {#1}} % in loops, not dt but root
+%def\xmldirectives #1{\clf_xmldirectives {#1}}
+%def\xmldirectivesafter #1{\clf_xmldirectivesafter {#1}}
+%def\xmldirectivesbefore #1{\clf_xmldirectivesbefore {#1}}
+%def\xmldisplayverbatim #1{\clf_xmldisplayverbatim {#1}}
+%def\xmlelement #1#2{\clf_xmlelement {#1}{#2}}
+%def\xmlfilter #1#2{\clf_xmlfilter {#1}{#2}}
+%def\xmlfilterlist #1#2{\clf_xmlfilterlist {#1}{#2}}
+%def\xmlfirst #1#2{\clf_xmlfirst {#1}{#2}}
+%def\xmlflush #1{\clf_xmlflush {#1}}
+%def\xmlflushcontext #1{\clf_xmlflushcontext {#1}}
+%def\xmlflushlinewise #1{\clf_xmlflushlinewise {#1}}
+%def\xmlflushspacewise #1{\clf_xmlflushspacewise {#1}}
+%def\xmlfunction #1#2{\clf_xmlfunction {#1}{#2}}
+%def\xmlinclude #1#2#3{\clf_xmlinclude {#1}{#2}{#3}}
+%def\xmlincludeoptions#1#2#3#4{\clf_xmlincludeoptions {#1}{#2}{#3}{#4}}
+%def\xmlinclusion #1{\clf_xmlinclusion {#1}}
+%def\xmlinclusions #1{\clf_xmlinclusions {#1}}
+%def\xmlbadinclusions #1{\clf_xmlbadinclusions {#1}}
+%def\xmlindex #1#2#3{\clf_xmlindex {#1}{#2}{#3}}
+%let\xmlposition \xmlindex
+%def\xmlinlineverbatim #1{\clf_xmlinlineverbatim {#1}}
+%def\xmllast #1#2{\clf_xmllast {#1}{#2}}
+\def\xmlload #1#2{\clf_xmlload {#1}{#2}{\directxmlparameter\c!entities}{\directxmlparameter\c!compress}}
+\def\xmlloadbuffer #1#2{\clf_xmlloadbuffer {#1}{#2}{\directxmlparameter\c!entities}{\directxmlparameter\c!compress}}
+\def\xmlloaddata #1#2{\clf_xmlloaddata {#1}{#2}{\directxmlparameter\c!entities}{\directxmlparameter\c!compress}}
+%def\xmlloaddirectives #1{\clf_xmlloaddirectives {#1}}
+\def\xmlloadregistered #1#2{\clf_xmlloadregistered {#1}{\directxmlparameter\c!entities}{\directxmlparameter\c!compress}}
+%def\xmlmain #1{\clf_xmlmain {#1}}
+%def\xmlmatch #1{\clf_xmlmatch {#1}}
+%def\xmlname #1{\clf_xmlname {#1}}
+%def\xmlnamespace #1{\clf_xmlnamespace {#1}}
+%def\xmlnonspace #1#2{\clf_xmlnonspace {#1}{#2}}
+%def\xmlpos #1{\clf_xmlpos {#1}}
+%def\xmlraw #1#2{\clf_xmlraw {#1}{#2}}
+%def\xmlregisterns #1#2{\clf_xmlregisterns {#1}{#2}} % document
+%def\xmlremapname #1#2#3#4{\clf_xmlremapname {#1}{#2}{#3}{#4}} % element
+%def\xmlremapnamespace #1#2#3{\clf_xmlremapnamespace {#1}{#2}{#3}} % document
+%def\xmlsave #1#2{\clf_xmlsave {#1}{#2}}
+%def\xmlsetfunction #1#2#3{\clf_xmlsetfunction {#1}{#2}{#3}}
+%def\xmlsetsetup #1#2#3{\clf_xmlsetsetup {#1}{#2}{#3}}
+%def\xmlsnippet #1#2{\clf_xmlsnippet {#1}{#2}}
+%def\xmlstrip #1#2{\clf_xmlstrip {#1}{#2}}
+%def\xmlstripanywhere #1#2{\clf_xmlstripanywhere {#1}{#2}}
+%def\xmlstripnolines #1#2{\clf_xmlstripnolines {#1}{#2}}
+%def\xmlstripped #1#2{\clf_xmlstripped {#1}{#2}}
+%def\xmlstrippednolines #1#2{\clf_xmlstrippednolines {#1}{#2}}
+%def\xmltag #1{\clf_xmltag {#1}}
+%def\xmltext #1#2{\clf_xmltext {#1}{#2}}
+%def\xmltobuffer #1#2#3{\clf_xmltobuffer {#1}{#2}{#3}} % id pattern name
+%def\xmltobufferverbose #1#2#3{\clf_xmltobufferverbose {#1}{#2}{#3}} % id pattern name
+%def\xmltofile #1#2#3{\clf_xmltofile {#1}{#2}{#3}} % id pattern filename
+%def\xmltoparameters #1{\clf_xmltoparameters {#1}}
+%def\xmlverbatim #1{\clf_xmlverbatim {#1}}
+
+% experiment:
+\let\xmlall \clf_xmlall
+\let\xmlatt \clf_xmlatt
+\let\xmlattdef \clf_xmlattdef
+\let\xmlattribute \clf_xmlattribute
+\let\xmlattributedef \clf_xmlattributedef
+\let\xmlchainatt \clf_xmlchainatt
+\let\xmlchainattdef \clf_xmlchainattdef
+\let\xmlrefatt \clf_xmlrefatt
+\let\xmlchecknamespace \clf_xmlchecknamespace
+\let\xmlcommand \clf_xmlcommand
+% \xmlconcat
+% \xmlconcatrange
+\let\xmlcontext \clf_xmlcontext
+\let\xmlcount \clf_xmlcount
+\let\xmldelete \clf_xmldelete
+\let\xmldirect \clf_xmldirect % in loops, not dt but root
+\let\xmldirectives \clf_xmldirectives
+\let\xmldirectivesafter \clf_xmldirectivesafter
+\let\xmldirectivesbefore \clf_xmldirectivesbefore
+\let\xmldisplayverbatim \clf_xmldisplayverbatim
+\let\xmlelement \clf_xmlelement
+\let\xmlfilter \clf_xmlfilter
+\let\xmlfilterlist \clf_xmlfilterlist
+\let\xmlfirst \clf_xmlfirst
+\let\xmlflush \clf_xmlflush
+\let\xmlflushcontext \clf_xmlflushcontext
+\let\xmlflushlinewise \clf_xmlflushlinewise
+\let\xmlflushspacewise \clf_xmlflushspacewise
+\let\xmlfunction \clf_xmlfunction
+\let\xmlinclude \clf_xmlinclude
+\let\xmlincludeoptions \clf_xmlincludeoptions
+\let\xmlinclusion \clf_xmlinclusion
+\let\xmlinclusions \clf_xmlinclusions
+\let\xmlbadinclusions \clf_xmlbadinclusions
+\let\xmlindex \clf_xmlindex
+\let\xmlposition \clf_xmlindex
+\let\xmlinlineverbatim \clf_xmlinlineverbatim
+\let\xmllast \clf_xmllast
+% \xmlload
+% \xmlloadbuffer
+% \xmlloaddata
+\let\xmlloaddirectives \clf_xmlloaddirectives
+% \xmlloadregistered
+\let\xmlmain \clf_xmlmain
+\let\xmlmatch \clf_xmlmatch
+\let\xmlname \clf_xmlname
+\let\xmlnamespace \clf_xmlnamespace
+\let\xmlnonspace \clf_xmlnonspace
+\let\xmlpos \clf_xmlpos
+\let\xmlraw \clf_xmlraw
+\let\xmlregisterns \clf_xmlregisterns % document
+\let\xmlremapname \clf_xmlremapname % element
+\let\xmlremapnamespace \clf_xmlremapnamespace % document
+\let\xmlsave \clf_xmlsave
+%let\xmlsetfunction \clf_xmlsetfunction
+\let\xmlsetsetup \clf_xmlsetsetup
+\let\xmlsnippet \clf_xmlsnippet
+\let\xmlstrip \clf_xmlstrip
+\let\xmlstripanywhere \clf_xmlstripanywhere
+\let\xmlstripnolines \clf_xmlstripnolines
+\let\xmlstripped \clf_xmlstripped
+\let\xmlstrippednolines \clf_xmlstrippednolines
+\let\xmltag \clf_xmltag
+\let\xmltext \clf_xmltext
+\let\xmltobuffer \clf_xmltobuffer % id pattern name
+\let\xmltobufferverbose \clf_xmltobufferverbose % id pattern name
+\let\xmltofile \clf_xmltofile % id pattern filename
+\let\xmltoparameters \clf_xmltoparameters
+\let\xmlverbatim \clf_xmlverbatim
+
+\def\xmlinfo #1{\hbox{\ttxx[\clf_xmlinfo{#1}]}}
+\def\xmlshow #1{\startpacked\ttx\xmlverbatim{#1}\stoppacked}
+
+% we need to pass the last argument as function, so
+
+\def\xmlsetfunction#1#2#3{\ctxcommand{xmlsetfunction("#1",\!!bs#2\!!es,#3)}}
+
+% goodie:
+
+\unexpanded\def\xmlprettyprint#1#2%
+ {\xmltobufferverbose{#1}{.}{xml-temp}%
+ \ifdefined\scitebuffer
+ \scitebuffer[#2][xml-temp]%
+ \else
+ \typebuffer[xml-temp][\c!option=#2]%
+ \fi}
% kind of special:
-\def\xmlstartraw{\ctxlxml{startraw()}}
-\def\xmlstopraw {\ctxlxml{stopraw()}}
+%def\xmlstartraw{\clf_xmlstartraw}
+%def\xmlstopraw {\clf_xmlstopraw}
+
+\let\xmlstartraw\clf_xmlstartraw
+\let\xmlstopraw \clf_xmlstopraw
-% todo: \xmldoifelseattribute
+% these are expandable! todo: \xmldoifelseattribute
-\def\xmldoif #1#2{\ctxlxml{doif (\!!bs#1\!!es,\!!bs#2\!!es)}} % expandable
-\def\xmldoifnot #1#2{\ctxlxml{doifnot (\!!bs#1\!!es,\!!bs#2\!!es)}} % expandable
-\def\xmldoifelse #1#2{\ctxlxml{doifelse (\!!bs#1\!!es,\!!bs#2\!!es)}} % expandable
-\def\xmldoiftext #1#2{\ctxlxml{doiftext (\!!bs#1\!!es,\!!bs#2\!!es)}} % expandable
-\def\xmldoifnottext #1#2{\ctxlxml{doifnottext (\!!bs#1\!!es,\!!bs#2\!!es)}} % expandable
-\def\xmldoifelsetext #1#2{\ctxlxml{doifelsetext(\!!bs#1\!!es,\!!bs#2\!!es)}} % expandable
+\let\xmldoif \clf_xmldoif
+\let\xmldoifnot \clf_xmldoifnot
+\let\xmldoifelse \clf_xmldoifelse
+\let\xmldoiftext \clf_xmldoiftext
+\let\xmldoifnottext \clf_xmldoifnottext
+\let\xmldoifelsetext \clf_xmldoifelsetext
-%def\xmldoifelseempty #1#2{\ctxlxml{doifelseempty("#1","#2")}} % #2, "*" or "" == self not yet implemented
-%def\xmldoifelseselfempty #1{\ctxlxml{doifelseempty("#1")}}
+\let\xmldoifempty \clf_xmldoifempty
+\let\xmldoifnotempty \clf_xmldoifnotempty
+\let\xmldoifelseempty \clf_xmldoifelseempty
+\let\xmldoifselfempty \clf_xmldoifselfempty
+\let\xmldoifnotselfempty \clf_xmldoifnotselfempty
+\let\xmldoifelseselfempty \clf_xmldoifelseselfempty
+
+\let\xmldoiftextelse \xmldoifelsetext
+\let\xmldoifemptyelse \xmldoifelseempty
+\let\xmldoifselfemptyelse \xmldoifelseselfempty
% \startxmlsetups xml:include
% \xmlinclude{main}{include}{filename|href}
@@ -127,28 +231,28 @@
%\ef\xmlsetup#1#2{\setupwithargument{#2}{#1}}
\let\xmlsetup\setupwithargumentswapped
-\let\xmls\setupwithargumentswapped
-\let\xmlw\setupwithargument
+\let\xmls\setupwithargumentswapped % hardly any faster
+\let\xmlw\setupwithargument % hardly any faster
\newtoks \registeredxmlsetups
% todo: 1:xml:whatever always before 3:xml:something
-\unexpanded\def\xmlprependsetup #1{\ctxlxml{installsetup(1,"*","#1")}}
-\unexpanded\def\xmlappendsetup #1{\ctxlxml{installsetup(2,"*","#1")}}
-\unexpanded\def\xmlbeforesetup #1#2{\ctxlxml{installsetup(3,"*","#1","#2")}}
-\unexpanded\def\xmlaftersetup #1#2{\ctxlxml{installsetup(4,"*","#1","#2")}}
+\unexpanded\def\xmlprependsetup #1{\clf_xmlprependsetup {*}{#1}}
+\unexpanded\def\xmlappendsetup #1{\clf_xmlappendsetup {*}{#1}}
+\unexpanded\def\xmlbeforesetup #1#2{\clf_xmlbeforesetup {*}{#1}{#2}}
+\unexpanded\def\xmlaftersetup #1#2{\clf_xmlaftersetup {*}{#1}{#2}}
-\unexpanded\def\xmlprependdocumentsetup #1#2{\ctxlxml{installsetup(1,"#1","#2")}}
-\unexpanded\def\xmlappenddocumentsetup #1#2{\ctxlxml{installsetup(2,"#1","#2")}}
-\unexpanded\def\xmlbeforedocumentsetup#1#2#3{\ctxlxml{installsetup(3,"#1","#2","#3")}}
-\unexpanded\def\xmlafterdocumentsetup #1#2#3{\ctxlxml{installsetup(4,"#1","#2","#3")}}
+\unexpanded\def\xmlprependdocumentsetup #1#2{\clf_xmlprependdocumentsetup{#1}{#2}}
+\unexpanded\def\xmlappenddocumentsetup #1#2{\clf_xmlappenddocumentsetup {#1}{#2}}
+\unexpanded\def\xmlbeforedocumentsetup #1#2#3{\clf_xmlbeforedocumentsetup {#1}{#2}{#3}}
+\unexpanded\def\xmlafterdocumentsetup #1#2#3{\clf_xmlafterdocumentsetup {#1}{#2}{#3}}
-\unexpanded\def\xmlremovesetup #1{\ctxlxml{removesetup("*","#1")}}
-\unexpanded\def\xmlremovedocumentsetup #1#2{\ctxlxml{removesetup("#1","#2")}}
+\unexpanded\def\xmlremovesetup #1{\clf_xmlremovesetup {*}{#1}}
+\unexpanded\def\xmlremovedocumentsetup #1#2{\clf_xmlremovedocumentsetup {#1}{#2}}
-\unexpanded\def\xmlflushdocumentsetups #1#2{\ctxlxml{flushsetups("#1","*","#2")}} % #1 == id where to apply * and #2
-\unexpanded\def\xmlresetdocumentsetups #1{\ctxlxml{resetsetups("#1")}}
+\unexpanded\def\xmlflushdocumentsetups #1#2{\clf_xmlflushdocumentsetups {#1}{*}{#2}} % #1 == id where to apply * and #2
+\unexpanded\def\xmlresetdocumentsetups #1{\clf_xmlresetdocumentsetups {#1}}
\let\xmlregistersetup \xmlappendsetup
\let\xmlregisterdocumentsetup\xmlappenddocumentsetup
@@ -168,8 +272,8 @@
\xmldefaulttotext{#1}% after include
\xmlstoptiming}
-\unexpanded\def\xmlstarttiming{\ctxlua{statistics.starttiming(lxml)}}
-\unexpanded\def\xmlstoptiming {\ctxlua{statistics.stoptiming (lxml)}}
+\unexpanded\def\xmlstarttiming{\clf_xmlstarttiming}
+\unexpanded\def\xmlstoptiming {\clf_xmlstoptiming}
\def\lxml_process#1#2#3#4#5% flag \loader id name what initializersetup
{\begingroup
@@ -227,11 +331,6 @@
% \xmlsetfunction{main}{verbatim}{lxml.displayverbatim}
% \xmlsetfunction{main}{verb} {lxml.inlineverbatim}
-% \unexpanded\def\startxmldisplayverbatim[#1]{}
-% \unexpanded\def\stopxmldisplayverbatim {}
-% \unexpanded\def\startxmlinlineverbatim [#1]{}
-% \unexpanded\def\stopxmlinlineverbatim {}
-
% we use an xml: namespace so one has to define a suitable verbatim, say
%
% \definetyping[xml:verbatim][typing]
@@ -250,7 +349,7 @@
\unexpanded\def\startxmlinlineverbatim[#1]%
{\begingroup
\edef\currenttype{xml:#1}%
- \let\stopxmldisplayverbatim\endgroup
+ \let\stopxmlinlineverbatim\endgroup
\doinitializeverbatim}
% will move but is developed for xml
@@ -269,14 +368,13 @@
\def\inlinemessage #1{\dontleavehmode{\tttf#1}}
\def\displaymessage#1{\blank\inlinemessage{#1}\blank}
-% \def\xmltraceentities % settextcleanup is not defined
-% {\ctxlua{xml.settextcleanup(lxml.trace_text_entities)}%
-% \appendtoks\ctxlxml{showtextentities()}\to\everygoodbye}
-
% processing instructions
-\def\xmlcontextdirective#1% kind class key value
- {\executeifdefined{xml#1directive}\gobblethreearguments}
+\unexpanded\def\xmlinstalldirective#1#2%
+ {\clf_xmlinstalldirective{#1}{\checkedstrippedcsname#2}}
+
+% \def\xmlcontextdirective#1% kind class key value
+% {\executeifdefined{xml#1directive}\gobblethreearguments}
% setting up xml:
%
@@ -299,15 +397,15 @@
\letvalue{\??xmldefaults\v!text }\plusone
\letvalue{\??xmldefaults\v!hidden}\plustwo
-\unexpanded\def\xmldefaulttotext#1%
+\unexpanded\def\xmldefaulttotext
{\ifcase\xmlprocessingmode
- % unset
+ \expandafter\gobbleoneargument % unset
\or
- \ctxlxml{setcommandtotext("#1")}% 1
+ \expandafter\clf_xmlsetcommandtotext % 1
\or
- \ctxlxml{setcommandtonone("#1")}% 2
+ \expandafter\clf_xmlsetcommandtonone % 2
\else
- % unset
+ \expandafter\gobbleoneargument % unset
\fi}
\appendtoks
@@ -324,22 +422,23 @@
%def\xmlvalue #1#2{\ifcsname\??xmlmapvalue#1:#2\endcsname\csname\??xmlmapvalue#1:#2\expandafter\expandafter\gobbleoneargument\expandafter\endcsname\else\expandafter\firstofoneargument\fi}
\def\xmldoifelsevalue #1#2{\ifcsname\??xmlmapvalue#1:#2\endcsname\expandafter\firstoftwoarguments\else\expandafter\secondoftwoarguments\fi}
+\let\xmldoifvalueelse\xmldoifelsevalue
+
\let\xmlmapval\xmlmapvalue
\let\xmlval \xmlvalue
%D Experimental:
-\def\xmlgetindex #1{\ctxlxml{getindex("\xmldocument","#1")}}
-\def\xmlrawindex #1{\ctxlxml{rawindex("#1")}}
-\def\xmlwithindex #1#2{\ctxlxml{withindex("\xmldocument","#1","#2")}}
+\def\xmlgetindex #1{\clf_xmlgetindex {\xmldocument}{#1}}
+\def\xmlwithindex #1#2{\clf_xmlwithindex{\xmldocument}{#1}{#2}}
\def\xmlreference #1#2{\string\xmlwithindex{#1}{#2}}
%D Entities (might change):
\setnewconstant\xmlautoentities\plusone % 0=off, 1=upper, 2=upper,lower
-\def\xmlsetentity#1#2{\ctxlua{xml.registerentity('#1',\!!bs\detokenize{#2}\!!es)}}
-\def\xmltexentity#1#2{\ctxlua{lxml.registerentity('#1',\!!bs\detokenize{#2}\!!es)}}
+\unexpanded\def\xmlsetentity#1#2{\clf_xmlsetentity{#1}{\detokenize{#2}}}
+\unexpanded\def\xmltexentity#1#2{\clf_xmltexentity{#1}{\detokenize{#2}}}
% \xmlsetentity{tex}{\TEX{}} % {} needed
@@ -391,6 +490,16 @@
\let\processxmlfile \processXMLfile
\let\xmldata \XMLdata
+\unexpanded\def\xmlsetinjectors[#1]%
+ {\clf_xmlsetinjectors{#1}}
+
+\unexpanded\def\xmlresetinjectors
+ {\clf_xmlresetinjectors{}}
+
+\def\xmlinjector#1{\executeifdefined{#1}\donothing}
+
+\let\xmlapplyselectors\clf_xmlapplyselectors
+
\protect \endinput
% \newcount\charactersactiveoffset \charactersactiveoffset="10000
diff --git a/tex/context/base/lxml-lpt.lua b/tex/context/base/lxml-lpt.lua
index 51ab321b9..decb6567b 100644
--- a/tex/context/base/lxml-lpt.lua
+++ b/tex/context/base/lxml-lpt.lua
@@ -505,6 +505,9 @@ local function apply_expression(list,expression,order)
return collected
end
+-- this one can be made faster but there are not that many conversions so it doesn't
+-- really pay of
+
local P, V, C, Cs, Cc, Ct, R, S, Cg, Cb = lpeg.P, lpeg.V, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.R, lpeg.S, lpeg.Cg, lpeg.Cb
local spaces = S(" \n\r\t\f")^0
@@ -541,12 +544,11 @@ local lp_builtin = P (
local lp_attribute = (P("@") + P("attribute::")) / "" * Cc("(ll.at and ll.at['") * ((R("az","AZ") + S("-_:"))^1) * Cc("'])")
--- lp_fastpos_p = (P("+")^0 * R("09")^1 * P(-1)) / function(s) return "l==" .. s end
--- lp_fastpos_n = (P("-") * R("09")^1 * P(-1)) / function(s) return "(" .. s .. "<0 and (#list+".. s .. "==l))" end
-
-lp_fastpos_p = P("+")^0 * R("09")^1 * P(-1) / "l==%0"
-lp_fastpos_n = P("-") * R("09")^1 * P(-1) / "(%0<0 and (#list+%0==l))"
+----- lp_fastpos_p = (P("+")^0 * R("09")^1 * P(-1)) / function(s) return "l==" .. s end
+----- lp_fastpos_n = (P("-") * R("09")^1 * P(-1)) / function(s) return "(" .. s .. "<0 and (#list+".. s .. "==l))" end
+local lp_fastpos_p = P("+")^0 * R("09")^1 * P(-1) / "l==%0"
+local lp_fastpos_n = P("-") * R("09")^1 * P(-1) / "(%0<0 and (#list+%0==l))"
local lp_fastpos = lp_fastpos_n + lp_fastpos_p
local lp_reserved = C("and") + C("or") + C("not") + C("div") + C("mod") + C("true") + C("false")
@@ -806,7 +808,7 @@ local function nodesettostring(set,nodetest)
if not ns or ns == "" then ns = "*" end
if not tg or tg == "" then tg = "*" end
tg = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg)
- t[i] = (directive and tg) or format("not(%s)",tg)
+ t[#t+1] = (directive and tg) or format("not(%s)",tg)
end
if nodetest == false then
return format("not(%s)",concat(t,"|"))
@@ -1039,37 +1041,6 @@ local function normal_apply(list,parsed,nofparsed,order)
return collected
end
---~ local function applylpath(list,pattern)
---~ -- we avoid an extra call
---~ local parsed = cache[pattern]
---~ if parsed then
---~ lpathcalls = lpathcalls + 1
---~ lpathcached = lpathcached + 1
---~ elseif type(pattern) == "table" then
---~ lpathcalls = lpathcalls + 1
---~ parsed = pattern
---~ else
---~ parsed = lpath(pattern) or pattern
---~ end
---~ if not parsed then
---~ return
---~ end
---~ local nofparsed = #parsed
---~ if nofparsed == 0 then
---~ return -- something is wrong
---~ end
---~ local one = list[1] -- we could have a third argument: isroot and list or list[1] or whatever we like ... todo
---~ if not one then
---~ return -- something is wrong
---~ elseif not trace_lpath then
---~ return normal_apply(list,parsed,nofparsed,one.mi)
---~ elseif trace_lprofile then
---~ return profiled_apply(list,parsed,nofparsed,one.mi)
---~ else
---~ return traced_apply(list,parsed,nofparsed,one.mi)
---~ end
---~ end
-
local function applylpath(list,pattern)
if not list then
return
@@ -1163,7 +1134,6 @@ expressions.print = function(...)
return true
end
-expressions.contains = find
expressions.find = find
expressions.upper = upper
expressions.lower = lower
@@ -1187,6 +1157,10 @@ function expressions.contains(str,pattern)
return false
end
+function xml.expressions.idstring(str)
+ return type(str) == "string" and gsub(str,"^#","") or ""
+end
+
-- user interface
local function traverse(root,pattern,handle)
@@ -1384,8 +1358,13 @@ function xml.elements(root,pattern,reverse) -- r, d, k
local collected = applylpath(root,pattern)
if not collected then
return dummy
- elseif reverse then
- local c = #collected + 1
+ end
+ local n = #collected
+ if n == 0 then
+ return dummy
+ end
+ if reverse then
+ local c = n + 1
return function()
if c > 1 then
c = c - 1
@@ -1395,7 +1374,7 @@ function xml.elements(root,pattern,reverse) -- r, d, k
end
end
else
- local n, c = #collected, 0
+ local c = 0
return function()
if c < n then
c = c + 1
@@ -1411,8 +1390,13 @@ function xml.collected(root,pattern,reverse) -- e
local collected = applylpath(root,pattern)
if not collected then
return dummy
- elseif reverse then
- local c = #collected + 1
+ end
+ local n = #collected
+ if n == 0 then
+ return dummy
+ end
+ if reverse then
+ local c = n + 1
return function()
if c > 1 then
c = c - 1
@@ -1420,7 +1404,7 @@ function xml.collected(root,pattern,reverse) -- e
end
end
else
- local n, c = #collected, 0
+ local c = 0
return function()
if c < n then
c = c + 1
@@ -1441,7 +1425,7 @@ end
-- texy (see xfdf):
-local function split(e)
+local function split(e) -- todo: use helpers / lpeg
local dt = e.dt
if dt then
for i=1,#dt do
diff --git a/tex/context/base/lxml-sor.mkiv b/tex/context/base/lxml-sor.mkiv
index 0ee1f16f3..0d8eb6ba1 100644
--- a/tex/context/base/lxml-sor.mkiv
+++ b/tex/context/base/lxml-sor.mkiv
@@ -19,10 +19,13 @@
\unprotect
+% the flusher is unexpandable so that it can be used in tables (noalign
+% interferences)
+
\unexpanded\def\xmlresetsorter #1{\ctxlxml{sorters.reset("#1")}}
\unexpanded\def\xmladdsortentry#1#2#3{\ctxlxml{sorters.add("#1","#2",\!!bs#3\!!es)}}
\unexpanded\def\xmlshowsorter #1{\ctxlxml{sorters.show("#1")}}
-\unexpanded\def\xmlflushsorter #1#2{\ctxlxml{sorters.flush("#1","#2")}}
+ \def\xmlflushsorter #1#2{\ctxlxml{sorters.flush("#1","#2")}}
\unexpanded\def\xmlsortentries #1{\ctxlxml{sorters.sort("#1")}}
\protect \endinput
diff --git a/tex/context/base/lxml-tab.lua b/tex/context/base/lxml-tab.lua
index 3e10eb96d..47e2cac61 100644
--- a/tex/context/base/lxml-tab.lua
+++ b/tex/context/base/lxml-tab.lua
@@ -42,10 +42,10 @@ local xml = xml
--~ local xml = xml
local concat, remove, insert = table.concat, table.remove, table.insert
-local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
+local type, next, setmetatable, getmetatable, tonumber, rawset = type, next, setmetatable, getmetatable, tonumber, rawset
local lower, find, match, gsub = string.lower, string.find, string.match, string.gsub
local utfchar = utf.char
-local lpegmatch = lpeg.match
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
local formatters = string.formatters
@@ -243,8 +243,10 @@ local function add_end(spacing, namespace, tag)
top = stack[#stack]
if #stack < 1 then
errorstr = formatters["unable to close %s %s"](tag,xml.checkerror(top,toclose) or "")
+ report_xml(errorstr)
elseif toclose.tg ~= tag then -- no namespace check
errorstr = formatters["unable to close %s with %s %s"](toclose.tg,tag,xml.checkerror(top,toclose) or "")
+ report_xml(errorstr)
end
dt = top.dt
dt[#dt+1] = toclose
@@ -254,11 +256,38 @@ local function add_end(spacing, namespace, tag)
end
end
+-- local function add_text(text)
+-- if cleanup and #text > 0 then
+-- dt[#dt+1] = cleanup(text)
+-- else
+-- dt[#dt+1] = text
+-- end
+-- end
+
local function add_text(text)
+ local n = #dt
if cleanup and #text > 0 then
- dt[#dt+1] = cleanup(text)
+ if n > 0 then
+ local s = dt[n]
+ if type(s) == "string" then
+ dt[n] = s .. cleanup(text)
+ else
+ dt[n+1] = cleanup(text)
+ end
+ else
+ dt[1] = cleanup(text)
+ end
else
- dt[#dt+1] = text
+ if n > 0 then
+ local s = dt[n]
+ if type(s) == "string" then
+ dt[n] = s .. text
+ else
+ dt[n+1] = text
+ end
+ else
+ dt[1] = text
+ end
end
end
@@ -297,8 +326,11 @@ local function attribute_specification_error(str)
return str
end
+local badentity = "&error;"
+local badentity = "&"
+
xml.placeholders = {
- unknown_dec_entity = function(str) return str == "" and "&error;" or formatters["&%s;"](str) end,
+ unknown_dec_entity = function(str) return str == "" and badentity or formatters["&%s;"](str) end,
unknown_hex_entity = function(str) return formatters["&#x%s;"](str) end,
unknown_any_entity = function(str) return formatters["&#x%s;"](str) end,
}
@@ -325,12 +357,13 @@ end
-- one level expansion (simple case), no checking done
-local rest = (1-P(";"))^0
-local many = P(1)^0
+local p_rest = (1-P(";"))^0
+local p_many = P(1)^0
+local p_char = lpegpatterns.utf8character
local parsedentity =
- P("&") * (P("#x")*(rest/fromhex) + P("#")*(rest/fromdec)) * P(";") * P(-1) +
- (P("#x")*(many/fromhex) + P("#")*(many/fromdec))
+ P("&") * (P("#x")*(p_rest/fromhex) + P("#")*(p_rest/fromdec)) * P(";") * P(-1) +
+ (P("#x")*(p_many/fromhex) + P("#")*(p_many/fromdec))
-- parsing in the xml file
@@ -367,7 +400,41 @@ local privates_n = {
-- keeps track of defined ones
}
-local escaped = utf.remapper(privates_u)
+-- -- local escaped = utf.remapper(privates_u) -- can't be used as it freezes
+-- -- local unprivatized = utf.remapper(privates_p) -- can't be used as it freezes
+--
+-- local p_privates_u = false
+-- local p_privates_p = false
+--
+-- table.setmetatablenewindex(privates_u,function(t,k,v) rawset(t,k,v) p_privates_u = false end)
+-- table.setmetatablenewindex(privates_p,function(t,k,v) rawset(t,k,v) p_privates_p = false end)
+--
+-- local function escaped(str)
+-- if not str or str == "" then
+-- return ""
+-- else
+-- if not p_privates_u then
+-- p_privates_u = Cs((lpeg.utfchartabletopattern(privates_u)/privates_u + p_char)^0)
+-- end
+-- return lpegmatch(p_privates_u,str)
+-- end
+-- end
+--
+-- local function unprivatized(str)
+-- if not str or str == "" then
+-- return ""
+-- else
+-- if not p_privates_p then
+-- p_privates_p = Cs((lpeg.utfchartabletopattern(privates_p)/privates_p + p_char)^0)
+-- end
+-- return lpegmatch(p_privates_p,str)
+-- end
+-- end
+
+local escaped = utf.remapper(privates_u,"dynamic")
+local unprivatized = utf.remapper(privates_p,"dynamic")
+
+xml.unprivatized = unprivatized
local function unescaped(s)
local p = privates_n[s]
@@ -382,10 +449,7 @@ local function unescaped(s)
return p
end
-local unprivatized = utf.remapper(privates_p)
-
xml.privatetoken = unescaped
-xml.unprivatized = unprivatized
xml.privatecodes = privates_n
local function handle_hex_entity(str)
@@ -484,7 +548,7 @@ local function handle_any_entity(str)
report_xml("keeping entity &%s;",str)
end
if str == "" then
- a = "&error;"
+ a = badentity
else
a = "&" .. str .. ";"
end
@@ -513,7 +577,7 @@ local function handle_any_entity(str)
if trace_entities then
report_xml("invalid entity &%s;",str)
end
- a = "&error;"
+ a = badentity
acache[str] = a
else
if trace_entities then
@@ -528,8 +592,19 @@ local function handle_any_entity(str)
end
end
-local function handle_end_entity(chr)
- report_xml("error in entity, %a found instead of %a",chr,";")
+-- local function handle_end_entity(chr)
+-- report_xml("error in entity, %a found instead of %a",chr,";")
+-- end
+
+local function handle_end_entity(str)
+ report_xml("error in entity, %a found without ending %a",str,";")
+ return str
+end
+
+local function handle_crap_error(chr)
+ report_xml("error in parsing, unexpected %a found ",chr)
+ add_text(chr)
+ return chr
end
local space = S(' \r\n\t')
@@ -546,18 +621,20 @@ local valid = R('az', 'AZ', '09') + S('_-.')
local name_yes = C(valid^1) * colon * C(valid^1)
local name_nop = C(P(true)) * C(valid^1)
local name = name_yes + name_nop
-local utfbom = lpeg.patterns.utfbom -- no capture
+local utfbom = lpegpatterns.utfbom -- no capture
local spacing = C(space^0)
----- entitycontent = (1-open-semicolon)^0
-local anyentitycontent = (1-open-semicolon-space-close)^0
+local anyentitycontent = (1-open-semicolon-space-close-ampersand)^0
local hexentitycontent = R("AF","af","09")^0
local decentitycontent = R("09")^0
local parsedentity = P("#")/"" * (
P("x")/"" * (hexentitycontent/handle_hex_entity) +
(decentitycontent/handle_dec_entity)
) + (anyentitycontent/handle_any_entity)
-local entity = ampersand/"" * parsedentity * ( (semicolon/"") + #(P(1)/handle_end_entity))
+----- entity = ampersand/"" * parsedentity * ( (semicolon/"") + #(P(1)/handle_end_entity))
+local entity = (ampersand/"") * parsedentity * (semicolon/"")
+ + ampersand * (anyentitycontent / handle_end_entity)
local text_unparsed = C((1-open)^1)
local text_parsed = Cs(((1-open-ampersand)^1 + entity)^1)
@@ -590,6 +667,8 @@ local emptyelement = (spacing * open * name * attributes * optionals
local beginelement = (spacing * open * name * attributes * optionalspace * close) / add_begin
local endelement = (spacing * open * slash * name * optionalspace * close) / add_end
+-- todo: combine the opens in:
+
local begincomment = open * P("!--")
local endcomment = P("--") * close
local begininstruction = open * P("?")
@@ -635,6 +714,14 @@ local comment = (spacing * begincomment * somecomment * endcomm
local cdata = (spacing * begincdata * somecdata * endcdata ) / function(...) add_special("@cd@",...) end
local doctype = (spacing * begindoctype * somedoctype * enddoctype ) / function(...) add_special("@dt@",...) end
+-- local text_unparsed = C((1-open)^1)
+-- local text_parsed = Cs(((1-open-ampersand)^1 + entity)^1)
+
+local crap_parsed = 1 - beginelement - endelement - emptyelement - begininstruction - begincomment - begincdata - ampersand
+local crap_unparsed = 1 - beginelement - endelement - emptyelement - begininstruction - begincomment - begincdata
+local parsedcrap = Cs((crap_parsed^1 + entity)^1) / handle_crap_error
+local unparsedcrap = Cs((crap_unparsed )^1) / handle_crap_error
+
-- nicer but slower:
--
-- local instruction = (Cc("@pi@") * spacing * begininstruction * someinstruction * endinstruction) / add_special
@@ -651,13 +738,13 @@ local trailer = space^0 * (text_unparsed/set_message)^0
local grammar_parsed_text = P { "preamble",
preamble = utfbom^0 * instruction^0 * (doctype + comment + instruction)^0 * V("parent") * trailer,
parent = beginelement * V("children")^0 * endelement,
- children = parsedtext + V("parent") + emptyelement + comment + cdata + instruction,
+ children = parsedtext + V("parent") + emptyelement + comment + cdata + instruction + parsedcrap,
}
local grammar_unparsed_text = P { "preamble",
preamble = utfbom^0 * instruction^0 * (doctype + comment + instruction)^0 * V("parent") * trailer,
parent = beginelement * V("children")^0 * endelement,
- children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
+ children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction + unparsedcrap,
}
-- maybe we will add settings to result as well
@@ -697,7 +784,7 @@ local function _xmlconvert_(data, settings)
errorstr = "empty xml file"
elseif utfize or resolve then
if lpegmatch(grammar_parsed_text,data) then
- errorstr = ""
+ -- errorstr = "" can be set!
else
errorstr = "invalid xml file - parsed text"
end
@@ -713,6 +800,8 @@ local function _xmlconvert_(data, settings)
local result
if errorstr and errorstr ~= "" then
result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={ }, er = true } } }
+setmetatable(result, mt)
+setmetatable(result.dt[1], mt)
setmetatable(stack, mt)
local errorhandler = settings.error_handler
if errorhandler == false then
@@ -746,8 +835,11 @@ local function _xmlconvert_(data, settings)
end
if errorstr and errorstr ~= "" then
result.error = true
+ else
+ errorstr = nil
end
result.statistics = {
+ errormessage = errorstr,
entities = {
decimals = dcache,
hexadecimals = hcache,
@@ -765,7 +857,7 @@ end
-- Because we can have a crash (stack issues) with faulty xml, we wrap this one
-- in a protector:
-function xmlconvert(data,settings)
+local function xmlconvert(data,settings)
local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
if ok then
return result
@@ -916,14 +1008,18 @@ and then handle the lot.</p>
-- new experimental reorganized serialize
-local function verbose_element(e,handlers) -- options
+local f_attribute = formatters['%s=%q']
+
+local function verbose_element(e,handlers,escape) -- options
local handle = handlers.handle
local serialize = handlers.serialize
local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
local ats = eat and next(eat) and { }
if ats then
+ local n = 0
for k,v in next, eat do
- ats[#ats+1] = formatters['%s=%q'](k,escaped(v))
+ n = n + 1
+ ats[n] = f_attribute(k,escaped(v))
end
end
if ern and trace_entities and ern ~= ens then
@@ -1016,25 +1112,27 @@ local function verbose_document(e,handlers)
end
local function serialize(e,handlers,...)
- local initialize = handlers.initialize
- local finalize = handlers.finalize
- local functions = handlers.functions
- if initialize then
- local state = initialize(...)
- if not state == true then
- return state
+ if e then
+ local initialize = handlers.initialize
+ local finalize = handlers.finalize
+ local functions = handlers.functions
+ if initialize then
+ local state = initialize(...)
+ if not state == true then
+ return state
+ end
+ end
+ local etg = e.tg
+ if etg then
+ (functions[etg] or functions["@el@"])(e,handlers)
+ -- elseif type(e) == "string" then
+ -- functions["@tx@"](e,handlers)
+ else
+ functions["@dc@"](e,handlers) -- dc ?
+ end
+ if finalize then
+ return finalize()
end
- end
- local etg = e.tg
- if etg then
- (functions[etg] or functions["@el@"])(e,handlers)
- -- elseif type(e) == "string" then
- -- functions["@tx@"](e,handlers)
- else
- functions["@dc@"](e,handlers) -- dc ?
- end
- if finalize then
- return finalize()
end
end
diff --git a/tex/context/base/lxml-tex.lua b/tex/context/base/lxml-tex.lua
index 2cbdfc886..550a06a18 100644
--- a/tex/context/base/lxml-tex.lua
+++ b/tex/context/base/lxml-tex.lua
@@ -11,7 +11,7 @@ if not modules then modules = { } end modules ['lxml-tex'] = {
-- be an cldf-xml helper library.
local utfchar = utf.char
-local concat, insert, remove = table.concat, table.insert, table.remove
+local concat, insert, remove, sortedkeys = table.concat, table.insert, table.remove, table.sortedkeys
local format, sub, gsub, find, gmatch, match = string.format, string.sub, string.gsub, string.find, string.gmatch, string.match
local type, next, tonumber, tostring, select = type, next, tonumber, tostring, select
local lpegmatch = lpeg.match
@@ -19,40 +19,66 @@ local P, S, C, Cc = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc
local tex, xml = tex, xml
local lowerchars, upperchars, lettered = characters.lower, characters.upper, characters.lettered
+local basename, dirname, joinfile = file.basename, file.dirname, file.join
lxml = lxml or { }
local lxml = lxml
-local catcodenumbers = catcodes.numbers
-local ctxcatcodes = catcodenumbers.ctxcatcodes -- todo: use different method
-local notcatcodes = catcodenumbers.notcatcodes -- todo: use different method
-
-local commands = commands
-local context = context
-local contextsprint = context.sprint -- with catcodes (here we use fast variants, but with option for tracing)
-
-local xmlelements, xmlcollected, xmlsetproperty = xml.elements, xml.collected, xml.setproperty
-local xmlwithelements = xml.withelements
-local xmlserialize, xmlcollect, xmltext, xmltostring = xml.serialize, xml.collect, xml.text, xml.tostring
-local xmlapplylpath = xml.applylpath
-local xmlunprivatized, xmlprivatetoken, xmlprivatecodes = xml.unprivatized, xml.privatetoken, xml.privatecodes
-
-local variables = (interfaces and interfaces.variables) or { }
-
-local insertbeforevalue, insertaftervalue = utilities.tables.insertbeforevalue, utilities.tables.insertaftervalue
-
-local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming
-
-local trace_setups = false trackers.register("lxml.setups", function(v) trace_setups = v end)
-local trace_loading = false trackers.register("lxml.loading", function(v) trace_loading = v end)
-local trace_access = false trackers.register("lxml.access", function(v) trace_access = v end)
-local trace_comments = false trackers.register("lxml.comments", function(v) trace_comments = v end)
-local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
-
-local report_lxml = logs.reporter("xml","tex")
-local report_xml = logs.reporter("xml","tex")
-
-local forceraw, rawroot = false, nil
+local catcodenumbers = catcodes.numbers
+local ctxcatcodes = catcodenumbers.ctxcatcodes -- todo: use different method
+local notcatcodes = catcodenumbers.notcatcodes -- todo: use different method
+
+local commands = commands
+local context = context
+local contextsprint = context.sprint -- with catcodes (here we use fast variants, but with option for tracing)
+
+local implement = interfaces.implement
+
+local xmlelements = xml.elements
+local xmlcollected = xml.collected
+local xmlsetproperty = xml.setproperty
+local xmlwithelements = xml.withelements
+local xmlserialize = xml.serialize
+local xmlcollect = xml.collect
+local xmltext = xml.text
+local xmltostring = xml.tostring
+local xmlapplylpath = xml.applylpath
+local xmlunprivatized = xml.unprivatized
+local xmlprivatetoken = xml.privatetoken
+local xmlprivatecodes = xml.privatecodes
+local xmlstripelement = xml.stripelement
+local xmlinclusion = xml.inclusion
+local xmlinclusions = xml.inclusions
+local xmlbadinclusions = xml.badinclusions
+local xmlcontent = xml.content
+
+local variables = interfaces and interfaces.variables or { }
+
+local settings_to_hash = utilities.parsers.settings_to_hash
+local settings_to_set = utilities.parsers.settings_to_set
+local options_to_hash = utilities.parsers.options_to_hash
+local options_to_array = utilities.parsers.options_to_array
+
+local insertbeforevalue = utilities.tables.insertbeforevalue
+local insertaftervalue = utilities.tables.insertaftervalue
+
+local resolveprefix = resolvers.resolve
+
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+
+local trace_setups = false trackers.register("lxml.setups", function(v) trace_setups = v end)
+local trace_loading = false trackers.register("lxml.loading", function(v) trace_loading = v end)
+local trace_access = false trackers.register("lxml.access", function(v) trace_access = v end)
+local trace_comments = false trackers.register("lxml.comments", function(v) trace_comments = v end)
+local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
+local trace_selectors = false trackers.register("lxml.selectors",function(v) trace_selectors = v end)
+
+local report_lxml = logs.reporter("lxml","tex")
+local report_xml = logs.reporter("xml","tex")
+
+local forceraw = false
+local forceraw = nil
-- tex entities
--
@@ -62,7 +88,7 @@ lxml.entities = lxml.entities or { }
storage.register("lxml/entities",lxml.entities,"lxml.entities")
---~ xml.placeholders.unknown_any_entity = nil -- has to be per xml
+-- xml.placeholders.unknown_any_entity = nil -- has to be per xml
local xmlentities = xml.entities
local texentities = lxml.entities
@@ -351,7 +377,7 @@ end
function lxml.checkindex(name)
local root = getid(name)
- return (root and root.index) or 0
+ return root and root.index or 0
end
function lxml.withindex(name,n,command) -- will change as name is always there now
@@ -414,7 +440,7 @@ function lxml.convert(id,data,entities,compress,currentresource)
end
function lxml.load(id,filename,compress,entities)
- filename = commands.preparedfile(filename) -- not commands!
+ filename = ctxrunner.preparedfile(filename)
if trace_loading then
report_lxml("loading file %a as %a",filename,id)
end
@@ -433,16 +459,43 @@ function lxml.register(id,xmltable,filename)
return xmltable
end
-function lxml.include(id,pattern,attribute,recurse)
+-- recurse prepare rootpath resolve basename
+
+local options_true = { "recurse", "prepare", "rootpath" }
+local options_nil = { "prepare", "rootpath" }
+
+function lxml.include(id,pattern,attribute,options)
starttiming(xml)
local root = getid(id)
- xml.include(root,pattern,attribute,recurse,function(filename)
+ if options == true then
+ -- downward compatible
+ options = options_true
+ elseif not options then
+ -- downward compatible
+ options = options_nil
+ else
+ options = settings_to_hash(options) or { }
+ end
+ xml.include(root,pattern,attribute,options.recurse,function(filename)
if filename then
- filename = commands.preparedfile(filename)
- if file.dirname(filename) == "" and root.filename then
- local dn = file.dirname(root.filename)
- if dn ~= "" then
- filename = file.join(dn,filename)
+ -- preprocessing
+ if options.prepare then
+ filename = commands.preparedfile(filename)
+ end
+ -- handy if we have a flattened structure
+ if options.basename then
+ filename = basename(filename)
+ end
+ if options.resolve then
+ filename = resolveprefix(filename) or filename
+ end
+ -- some protection
+ if options.rootpath then
+ if dirname(filename) == "" and root.filename then
+ local dn = dirname(root.filename)
+ if dn ~= "" then
+ filename = joinfile(dn,filename)
+ end
end
end
if trace_loading then
@@ -457,6 +510,31 @@ function lxml.include(id,pattern,attribute,recurse)
stoptiming(xml)
end
+function lxml.inclusion(id,default)
+ local inclusion = xmlinclusion(getid(id),default)
+ if inclusion then
+ context(inclusion)
+ end
+end
+
+function lxml.inclusions(id,sorted)
+ local inclusions = xmlinclusions(getid(id),sorted)
+ if inclusions then
+ context(concat(inclusions,","))
+ end
+end
+
+function lxml.badinclusions(id,sorted)
+ local badinclusions = xmlbadinclusions(getid(id),sorted)
+ if badinclusions then
+ context(concat(badinclusions,","))
+ end
+end
+
+function lxml.save(id,name)
+ xml.save(getid(id),name)
+end
+
function xml.getbuffer(name,compress,entities) -- we need to make sure that commands are processed
if not name or name == "" then
name = tex.jobname
@@ -538,30 +616,49 @@ local function tex_element(e,handlers)
end
end
+-- <?context-directive foo ... ?>
+-- <?context-foo-directive ... ?>
+
local pihandlers = { } xml.pihandlers = pihandlers
-local category = P("context-") * C((1-P("-"))^1) * P("-directive")
local space = S(" \n\r")
local spaces = space^0
local class = C((1-space)^0)
local key = class
+local rest = C(P(1)^0)
local value = C(P(1-(space * -1))^0)
-
-local parser = category * spaces * class * spaces * key * spaces * value
-
-pihandlers[#pihandlers+1] = function(str)
- if str then
- local a, b, c, d = lpegmatch(parser,str)
- if d then
- contextsprint(ctxcatcodes,"\\xmlcontextdirective{",a,"}{",b,"}{",c,"}{",d,"}")
+local category = P("context-") * (
+ C((1-P("-"))^1) * P("-directive")
+ + P("directive") * spaces * key
+ )
+
+local c_parser = category * spaces * value -- rest
+local k_parser = class * spaces * key * spaces * rest --value
+
+implement {
+ name = "xmlinstalldirective",
+ arguments = { "string", "string" },
+ actions = function(name,csname)
+ if csname then
+ local keyvalueparser = k_parser / context[csname]
+ local keyvaluechecker = function(category,rest,e)
+ lpegmatch(keyvalueparser,rest)
+ end
+ pihandlers[name] = keyvaluechecker
end
end
-end
+}
local function tex_pi(e,handlers)
local str = e.dt[1]
- for i=1,#pihandlers do
- pihandlers[i](str)
+ if str and str ~= "" then
+ local category, rest = lpegmatch(c_parser,str)
+ if category and rest and #rest > 0 then
+ local handler = pihandlers[category]
+ if handler then
+ handler(category,rest,e)
+ end
+ end
end
end
@@ -915,16 +1012,18 @@ function lxml.setsetup(id,pattern,setup)
end
end
end
+ elseif setup == "-" then
+ for c=1,nc do
+ collected[c].command = false
+ end
+ elseif setup == "+" then
+ for c=1,nc do
+ collected[c].command = true
+ end
else
for c=1,nc do
local e = collected[c]
- if setup == "-" then
- e.command = false
- elseif setup == "+" then
- e.command = true
- else
- e.command = e.tg
- end
+ e.command = e.tg
end
end
elseif trace_setups then
@@ -967,16 +1066,18 @@ function lxml.setsetup(id,pattern,setup)
end
end
end
+ elseif b == "-" then
+ for c=1,nc do
+ collected[c].command = false
+ end
+ elseif b == "+" then
+ for c=1,nc do
+ collected[c].command = true
+ end
else
for c=1,nc do
local e = collected[c]
- if b == "-" then
- e.command = false
- elseif b == "+" then
- e.command = true
- else
- e.command = a .. e.tg
- end
+ e.command = a .. e.tg
end
end
elseif trace_setups then
@@ -1112,11 +1213,13 @@ local function command(collected,cmd,otherwise)
local e = collected[c]
local ix = e.ix
local name = e.name
- if not ix then
+ if name and not ix then
lxml.addindex(name,false,true)
ix = e.ix
end
- if wildcard then
+ if not ix or not name then
+ report_lxml("no valid node index for element %a using command %s",name or "?",cmd)
+ elseif wildcard then
contextsprint(ctxcatcodes,"\\xmlw{",(gsub(cmd,"%*",e.tg)),"}{",name,"::",ix,"}")
else
contextsprint(ctxcatcodes,"\\xmlw{",cmd,"}{",name,"::",ix,"}")
@@ -1186,7 +1289,7 @@ local function stripped(collected) -- tricky as we strip in place
local nc = #collected
if nc > 0 then
for c=1,nc do
- cprint(xml.stripelement(collected[c]))
+ cprint(xmlstripelement(collected[c]))
end
end
end
@@ -1311,10 +1414,11 @@ function texfinalizers.name(collected,n)
c = collected[nc-n+1]
end
if c then
- if c.ns == "" then
+ local ns = c.ns
+ if not ns or ns == "" then
contextsprint(ctxcatcodes,c.tg)
else
- contextsprint(ctxcatcodes,c.ns,":",c.tg)
+ contextsprint(ctxcatcodes,ns,":",c.tg)
end
end
end
@@ -1327,11 +1431,11 @@ function texfinalizers.tags(collected,nonamespace)
if nc > 0 then
for c=1,nc do
local e = collected[c]
- local ns, tg = e.ns, e.tg
- if nonamespace or ns == "" then
- contextsprint(ctxcatcodes,tg)
+ local ns = e.ns
+ if nonamespace or (not ns or ns == "") then
+ contextsprint(ctxcatcodes,e.tg)
else
- contextsprint(ctxcatcodes,ns,":",tg)
+ contextsprint(ctxcatcodes,ns,":",e.tg)
end
end
end
@@ -1341,11 +1445,10 @@ end
--
local function verbatim(id,before,after)
- local root = getid(id)
- if root then
- if before then contextsprint(ctxcatcodes,before,"[",root.tg or "?","]") end
- lxml.toverbatim(xmltostring(root.dt))
---~ lxml.toverbatim(xml.totext(root.dt))
+ local e = getid(id)
+ if e then
+ if before then contextsprint(ctxcatcodes,before,"[",e.tg or "?","]") end
+ lxml.toverbatim(xmltostring(e.dt)) -- lxml.toverbatim(xml.totext(e.dt))
if after then contextsprint(ctxcatcodes,after) end
end
end
@@ -1429,7 +1532,7 @@ end
lxml.content = text
function lxml.position(id,pattern,n)
- position(xmlapplylpath(getid(id),pattern),n)
+ position(xmlapplylpath(getid(id),pattern),tonumber(n))
end
function lxml.chainattribute(id,pattern,a,default)
@@ -1445,72 +1548,136 @@ function lxml.concat(id,pattern,separator,lastseparator,textonly)
end
function lxml.element(id,n)
- position(xmlapplylpath(getid(id),"/*"),n)
+ position(xmlapplylpath(getid(id),"/*"),tonumber(n)) -- tonumber handy
end
lxml.index = lxml.position
function lxml.pos(id)
- local root = getid(id)
- contextsprint(ctxcatcodes,(root and root.ni) or 0)
-end
+ local e = getid(id)
+ contextsprint(ctxcatcodes,e and e.ni or 0)
+end
+
+-- function lxml.att(id,a,default)
+-- local root = getid(id)
+-- if root then
+-- local at = root.at
+-- local str = (at and at[a]) or default
+-- if str and str ~= "" then
+-- contextsprint(notcatcodes,str)
+-- end
+-- elseif default then
+-- contextsprint(notcatcodes,default)
+-- end
+-- end
+--
+-- no need for an assignment so:
function lxml.att(id,a,default)
- local root = getid(id)
- if root then
- local at = root.at
- local str = (at and at[a]) or default
- if str and str ~= "" then
- contextsprint(notcatcodes,str)
+ local e = getid(id)
+ if e then
+ local at = e.at
+ if at then
+ -- normally always true
+ local str = at[a]
+ if not str then
+ if default and default ~= "" then
+ contextsprint(notcatcodes,default)
+ end
+ elseif str ~= "" then
+ contextsprint(notcatcodes,str)
+ else
+ -- explicit empty is valid
+ end
+ elseif default and default ~= "" then
+ contextsprint(notcatcodes,default)
end
- elseif default then
+ elseif default and default ~= "" then
contextsprint(notcatcodes,default)
end
end
+function lxml.refatt(id,a)
+ local e = getid(id)
+ if e then
+ local at = e.at
+ if at then
+ local str = at[a]
+ if str and str ~= "" then
+ str = gsub(str,"^#+","")
+ if str ~= "" then
+ contextsprint(notcatcodes,str)
+ end
+ end
+ end
+ end
+end
+
function lxml.name(id) -- or remapped name? -> lxml.info, combine
- local r = getid(id)
- local ns = r.rn or r.ns or ""
- if ns ~= "" then
- contextsprint(ctxcatcodes,ns,":",r.tg)
- else
- contextsprint(ctxcatcodes,r.tg)
+ local e = getid(id)
+ if e then
+ local ns = e.rn or e.ns
+ if ns and ns ~= "" then
+ contextsprint(ctxcatcodes,ns,":",e.tg)
+ else
+ contextsprint(ctxcatcodes,e.tg)
+ end
end
end
function lxml.match(id) -- or remapped name? -> lxml.info, combine
- contextsprint(ctxcatcodes,getid(id).mi or 0)
+ local e = getid(id)
+ contextsprint(ctxcatcodes,e and e.mi or 0)
end
function lxml.tag(id) -- tag vs name -> also in l-xml tag->name
- contextsprint(ctxcatcodes,getid(id).tg or "")
+ local e = getid(id)
+ if e then
+ local tg = e.tg
+ if tg and tg ~= "" then
+ contextsprint(ctxcatcodes,tg)
+ end
+ end
end
function lxml.namespace(id) -- or remapped name?
- local root = getid(id)
- contextsprint(ctxcatcodes,root.rn or root.ns or "")
+ local e = getid(id)
+ if e then
+ local ns = e.rn or e.ns
+ if ns and ns ~= "" then
+ contextsprint(ctxcatcodes,ns)
+ end
+ end
end
function lxml.flush(id)
- id = getid(id)
- local dt = id and id.dt
- if dt then
- xmlsprint(dt)
+ local e = getid(id)
+ if e then
+ local dt = e.dt
+ if dt then
+ xmlsprint(dt)
+ end
end
end
function lxml.snippet(id,i)
local e = getid(id)
if e then
- local edt = e.dt
- if edt then
- xmlsprint(edt[i])
+ local dt = e.dt
+ if dt then
+ local dti = dt[i]
+ if dti then
+ xmlsprint(dti)
+ end
end
end
end
function lxml.direct(id)
- xmlsprint(getid(id))
+ local e = getid(id)
+ if e then
+ xmlsprint(e)
+ end
end
function lxml.command(id,pattern,cmd)
@@ -1562,7 +1729,20 @@ function lxml.doifelsetext (id,pattern) doifelse(not empty(getid(id),pattern)) e
-- special case: "*" and "" -> self else lpath lookup
---~ function lxml.doifelseempty(id,pattern) doifelse(isempty(getid(id),pattern ~= "" and pattern ~= nil)) end -- not yet done, pattern
+local function checkedempty(id,pattern)
+ local e = getid(id)
+ if not pattern or pattern == "" then
+ local dt = e.dt
+ local nt = #dt
+ return (nt == 0) or (nt == 1 and dt[1] == "")
+ else
+ return isempty(getid(id),pattern)
+ end
+end
+
+function lxml.doifempty (id,pattern) doif (checkedempty(id,pattern)) end
+function lxml.doifnotempty (id,pattern) doifnot (checkedempty(id,pattern)) end
+function lxml.doifelseempty(id,pattern) doifelse(checkedempty(id,pattern)) end
-- status info
@@ -1690,3 +1870,213 @@ end
texfinalizers.upperall = xmlfinalizers.upperall
texfinalizers.lowerall = xmlfinalizers.lowerall
+
+function lxml.tobuffer(id,pattern,name,unescaped)
+ local collected = xmlapplylpath(getid(id),pattern)
+ if collected then
+ if unescaped then
+ collected = xmlcontent(collected[1]) -- expanded entities !
+ else
+ collected = tostring(collected[1])
+ end
+ buffers.assign(name,collected)
+ else
+ buffers.erase(name)
+ end
+end
+
+-- relatively new:
+
+local permitted = nil
+local ctx_xmlinjector = context.xmlinjector
+
+xml.pihandlers["injector"] = function(category,rest,e)
+ local options = options_to_array(rest)
+ local action = options[1]
+ if not action then
+ return
+ end
+ local n = #options
+ if n > 1 then
+ local category = options[2]
+ if category == "*" then
+ ctx_xmlinjector(action)
+ elseif permitted then
+ if n == 2 then
+ if permitted[category] then
+ ctx_xmlinjector(action)
+ end
+ else
+ for i=2,n do
+ local category = options[i]
+ if category == "*" or permitted[category] then
+ ctx_xmlinjector(action)
+ return
+ end
+ end
+ end
+ end
+ else
+ ctx_xmlinjector(action)
+ end
+end
+
+local pattern = P("context-") * C((1-lpeg.patterns.whitespace)^1) * C(P(1)^1)
+
+function lxml.applyselectors(id)
+ local root = getid(id)
+ local function filter(e)
+ local dt = e.dt
+ local ndt = #dt
+ local done = false
+ local i = 1
+ while i <= ndt do
+ local dti = dt[i]
+ if type(dti) == "table" then
+ if dti.tg == "@pi@" then
+ local text = dti.dt[1]
+ local what, rest = lpegmatch(pattern,text)
+ if what == "select" then
+ local categories = options_to_hash(rest)
+ if categories["begin"] then
+ local okay = false
+ for k, v in next, permitted do
+ if categories[k] then
+ okay = k
+ break
+ end
+ end
+ if not trace_selectors then
+ -- skip
+ elseif okay then
+ report_lxml("accepting selector: %s",okay)
+ else
+ categories.begin = false
+ report_lxml("rejecting selector: % t",sortedkeys(categories))
+ end
+ for j=i,ndt do
+ local dtj = dt[j]
+ if type(dtj) == "table" then
+ local tg = dtj.tg
+ if tg == "@pi@" then
+ local text = dtj.dt[1]
+ local what, rest = lpegmatch(pattern,text)
+ if what == "select" then
+ local categories = options_to_hash(rest)
+ if categories["end"] then
+ i = j
+ break
+ else
+ -- error
+ end
+ end
+ elseif not okay then
+ dtj.tg = "@cm@"
+ end
+ else
+-- dt[j] = "" -- okay ?
+ end
+ end
+ end
+ elseif what == "include" then
+ local categories = options_to_hash(rest)
+ if categories["begin"] then
+ local okay = false
+ for k, v in next, permitted do
+ if categories[k] then
+ okay = k
+ break
+ end
+ end
+ if not trace_selectors then
+ -- skip
+ elseif okay then
+ report_lxml("accepting include: %s",okay)
+ else
+ categories.begin = false
+ report_lxml("rejecting include: % t",sortedkeys(categories))
+ end
+ if okay then
+ for j=i,ndt do
+ local dtj = dt[j]
+ if type(dtj) == "table" then
+ local tg = dtj.tg
+ if tg == "@cm@" then
+ local content = dtj.dt[1]
+ local element = root and xml.toelement(content,root)
+ dt[j] = element
+ element.__p__ = dt -- needs checking
+ done = true
+ elseif tg == "@pi@" then
+ local text = dtj.dt[1]
+ local what, rest = lpegmatch(pattern,text)
+ if what == "include" then
+ local categories = options_to_hash(rest)
+ if categories["end"] then
+ i = j
+ break
+ else
+ -- error
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ else
+ filter(dti)
+ end
+ end
+ if done then
+ -- probably not needed
+ xml.reindex(dt)
+ end
+ end
+ i = i + 1
+ end
+ end
+ xmlwithelements(root,filter)
+end
+
+function xml.setinjectors(set)
+ local s = settings_to_set(set)
+ if permitted then
+ for k, v in next, s do
+ permitted[k] = true
+ end
+ else
+ permitted = s
+ end
+end
+
+function xml.resetinjectors(set)
+ if permitted and set and set ~= "" then
+ local s = settings_to_set(set)
+ for k, v in next, s do
+ if v then
+ permitted[k] = nil
+ end
+ end
+ else
+ permitted = nil
+ end
+end
+
+implement {
+ name = "xmlsetinjectors",
+ actions = xml.setinjectors,
+ arguments = "string"
+}
+
+implement {
+ name = "xmlresetinjectors",
+ actions = xml.resetinjectors,
+ arguments = "string"
+}
+
+implement {
+ name = "xmlapplyselectors",
+ actions = lxml.applyselectors,
+ arguments = "string"
+}
diff --git a/tex/context/base/m-chart.lua b/tex/context/base/m-chart.lua
index 2b9869379..f1e7f4cb9 100644
--- a/tex/context/base/m-chart.lua
+++ b/tex/context/base/m-chart.lua
@@ -19,8 +19,6 @@ local P, S, C, Cc, lpegmatch = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc, lpeg.match
local report_chart = logs.reporter("chart")
-local points = number.points -- we can use %p instead
-
local variables = interfaces.variables
local v_yes = variables.yes
@@ -229,6 +227,8 @@ function commands.flow_start_cell(settings)
settings = settings,
x = 1,
y = 1,
+ realx = 1,
+ realy = 1,
name = "",
}
end
@@ -325,9 +325,13 @@ local function inject(includedata,data,hash)
if si.include then
inject(si,data,hash)
else
+ local x = si.x + xoffset
+ local y = si.y + yoffset
local t = {
- x = si.x + xoffset,
- y = si.y + yoffset,
+ x = x,
+ y = y,
+ realx = x,
+ realy = y,
settings = settings,
}
setmetatableindex(t,si)
@@ -451,10 +455,12 @@ function commands.flow_set_location(x,y)
else
y = tonumber(y)
end
- temp.x = x or 1
- temp.y = y or 1
- last_x = x or last_x
- last_y = y or last_y
+ temp.x = x or 1
+ temp.y = y or 1
+ temp.realx = x or 1
+ temp.realy = y or 1
+ last_x = x or last_x
+ last_y = y or last_y
end
function commands.flow_set_connection(location,displacement,name)
@@ -499,17 +505,17 @@ local function process_cells(chart,xoffset,yoffset)
local linesettings = settings.line
context("flow_shape_line_color := \\MPcolor{%s} ;", linesettings.color)
context("flow_shape_fill_color := \\MPcolor{%s} ;", linesettings.backgroundcolor)
- context("flow_shape_line_width := %s ; ", points(linesettings.rulethickness))
+ context("flow_shape_line_width := %p ; ", linesettings.rulethickness)
elseif focus[cell.focus] or focus[cell.name] then
local focussettings = settings.focus
context("flow_shape_line_color := \\MPcolor{%s} ;", focussettings.framecolor)
context("flow_shape_fill_color := \\MPcolor{%s} ;", focussettings.backgroundcolor)
- context("flow_shape_line_width := %s ; ", points(focussettings.rulethickness))
+ context("flow_shape_line_width := %p ; ", focussettings.rulethickness)
else
local shapesettings = settings.shape
context("flow_shape_line_color := \\MPcolor{%s} ;", shapesettings.framecolor)
context("flow_shape_fill_color := \\MPcolor{%s} ;", shapesettings.backgroundcolor)
- context("flow_shape_line_width := %s ; " , points(shapesettings.rulethickness))
+ context("flow_shape_line_width := %p ; " , shapesettings.rulethickness)
end
context("flow_peepshape := false ;") -- todo
context("flow_new_shape(%s,%s,%s) ;",cell.x+xoffset,cell.y+yoffset,shapedata.number)
@@ -580,7 +586,7 @@ local function process_connections(chart,xoffset,yoffset)
context("flow_touchshape := %s ;", linesettings.offset == v_none and "true" or "false")
context("flow_dsp_x := %s ; flow_dsp_y := %s ;",connection.dx or 0, connection.dy or 0)
context("flow_connection_line_color := \\MPcolor{%s} ;",linesettings.color)
- context("flow_connection_line_width := %s ;",points(linesettings.rulethickness))
+ context("flow_connection_line_width := %p ;",linesettings.rulethickness)
context("flow_connect_%s_%s (%s) (%s,%s,%s) (%s,%s,%s) ;",where_cell,where_other,j,cellx,celly,what_cell,otherx,othery,what_other)
context("flow_dsp_x := 0 ; flow_dsp_y := 0 ;")
end
@@ -686,6 +692,7 @@ local function getchart(settings,forced_x,forced_y,forced_nx,forced_ny)
print("no such chart",chartname)
return
end
+-- chart = table.copy(chart)
chart = expanded(chart,settings)
local chartsettings = chart.settings.chart
local autofocus = chart.settings.chart.autofocus
@@ -746,8 +753,8 @@ local function getchart(settings,forced_x,forced_y,forced_nx,forced_ny)
-- relocate cells
for i=1,#data do
local cell = data[i]
- cell.x = cell.x - minx + 1
- cell.y = cell.y - miny + 1
+ cell.x = cell.realx - minx + 1
+ cell.y = cell.realy - miny + 1
end
chart.from_x = 1
chart.from_y = 1
@@ -756,7 +763,9 @@ local function getchart(settings,forced_x,forced_y,forced_nx,forced_ny)
chart.nx = nx
chart.ny = ny
--
- -- inspect(chart)
+ chart.shift_x = minx + 1
+ chart.shift_y = miny + 1
+ --
return chart
end
@@ -792,14 +801,14 @@ local function makechart(chart)
local labeloffset = chartsettings.labeloffset
local exitoffset = chartsettings.exitoffset
local commentoffset = chartsettings.commentoffset
- context("flow_grid_width := %s ;", points(gridwidth))
- context("flow_grid_height := %s ;", points(gridheight))
- context("flow_shape_width := %s ;", points(shapewidth))
- context("flow_shape_height := %s ;", points(shapeheight))
- context("flow_chart_offset := %s ;", points(chartoffset))
- context("flow_label_offset := %s ;", points(labeloffset))
- context("flow_exit_offset := %s ;", points(exitoffset))
- context("flow_comment_offset := %s ;", points(commentoffset))
+ context("flow_grid_width := %p ;", gridwidth)
+ context("flow_grid_height := %p ;", gridheight)
+ context("flow_shape_width := %p ;", shapewidth)
+ context("flow_shape_height := %p ;", shapeheight)
+ context("flow_chart_offset := %p ;", chartoffset)
+ context("flow_label_offset := %p ;", labeloffset)
+ context("flow_exit_offset := %p ;", exitoffset)
+ context("flow_comment_offset := %p ;", commentoffset)
--
local radius = settings.line.radius
local rulethickness = settings.line.rulethickness
@@ -814,10 +823,10 @@ local function makechart(chart)
radius = dy
end
end
- context("flow_connection_line_width := %s ;", points(rulethickness))
- context("flow_connection_smooth_size := %s ;", points(radius))
- context("flow_connection_arrow_size := %s ;", points(radius))
- context("flow_connection_dash_size := %s ;", points(radius))
+ context("flow_connection_line_width := %p ;", rulethickness)
+ context("flow_connection_smooth_size := %p ;", radius)
+ context("flow_connection_arrow_size := %p ;", radius)
+ context("flow_connection_dash_size := %p ;", radius)
--
local offset = chartsettings.offset -- todo: pass string
if offset == v_none or offset == v_overlay or offset == "" then
@@ -825,7 +834,7 @@ local function makechart(chart)
elseif offset == v_standard then
offset = radius -- or rulethickness?
end
- context("flow_chart_offset := %s ;",points(offset))
+ context("flow_chart_offset := %p ;",offset)
--
context("flow_reverse_y := true ;")
process_cells(chart,0,0)
@@ -854,7 +863,7 @@ local function splitchart(chart)
local delta_x = splitsettings.dx or 0
local delta_y = splitsettings.dy or 0
--
- report_chart("spliting %a from (%s,%s) upto (%s,%s) into (%s,%s) with overlap (%s,%s)",
+ report_chart("spliting %a from (%s,%s) upto (%s,%s) with steps (%s,%s) and overlap (%s,%s)",
name,from_x,from_y,to_x,to_y,step_x,step_y,delta_x,delta_y)
--
local part_x = 0
@@ -866,6 +875,9 @@ local function splitchart(chart)
if done then
last_x = to_x
end
+-- if first_x >= to_x then
+-- break
+-- end
local part_y = 0
local first_y = from_y
while true do
@@ -875,14 +887,31 @@ local function splitchart(chart)
if done then
last_y = to_y
end
+-- if first_y >= to_y then
+-- break
+-- end
--
+local data = chart.data
+for i=1,#data do
+ local cell = data[i]
+-- inspect(cell)
+ local cx, cy = cell.x, cell.y
+ if cx >= first_x and cx <= last_x then
+ if cy >= first_y and cy <= last_y then
report_chart("part (%s,%s) of %a is split from (%s,%s) -> (%s,%s)",part_x,part_y,name,first_x,first_y,last_x,last_y)
- local x, y, nx, ny = first_x, first_y, last_x - first_x + 1,last_y - first_y + 1
+ local x = first_x
+ local y = first_y
+ local nx = last_x - first_x + 1
+ local ny = last_y - first_y + 1
context.beforeFLOWsplit()
context.handleFLOWsplit(function()
makechart(getchart(settings,x,y,nx,ny)) -- we need to pass frozen settings !
end)
context.afterFLOWsplit()
+ break
+ end
+ end
+end
--
if done then
break
diff --git a/tex/context/base/m-chart.mkvi b/tex/context/base/m-chart.mkvi
index 2b1a7447c..a0c8b2244 100644
--- a/tex/context/base/m-chart.mkvi
+++ b/tex/context/base/m-chart.mkvi
@@ -97,7 +97,6 @@
[\c!framecolor=FLOWfocuscolor,
\c!background=\FLOWshapeparameter\c!background,
\c!backgroundcolor=\FLOWshapeparameter\c!backgroundcolor,
- \c!backgroundscreen=\FLOWshapeparameter\c!backgroundscreen,
\c!rulethickness=\FLOWshapeparameter\c!rulethickness,
\c!offset=\FLOWshapeparameter\c!offset]
@@ -177,7 +176,7 @@
corner = "\FLOWlineparameter\c!corner",
dash = "\FLOWlineparameter\c!dash",
arrow = "\FLOWlineparameter\c!arrow",
- offset = "\FLOWlineparameter\c!offset",
+ offset = \number\dimexpr\FLOWlineparameter\c!offset,
},
} }%
\endgroup}
@@ -193,7 +192,7 @@
\insidefloattrue
\dontcomplain
\setupFLOWchart[#settings]%
- \setupbodyfont[\FLOWchartparameter\c!bodyfont]%
+ \usebodyfontparameter\FLOWchartparameter
\ctxcommand{flow_make_chart {
chart = {
name = "#name",
@@ -259,8 +258,9 @@
\def\FLOW_charts[#name][#settings]
{\begingroup
- \setupFLOWsplit[\c!state=\v!start,#settings]%
- \FLOW_chart[#name][]%
+ \setupFLOWchart[\c!split=\v!yes]%
+ \setupFLOWsplit[#settings]%
+ \module_charts_process[#name][]% \FLOWchart...
\endgroup}
\appendtoks
diff --git a/tex/context/base/m-hemistich.mkiv b/tex/context/base/m-hemistich.mkiv
index 55fde7b92..7a849d415 100644
--- a/tex/context/base/m-hemistich.mkiv
+++ b/tex/context/base/m-hemistich.mkiv
@@ -33,7 +33,7 @@
\unexpanded\def\dodohemistiches#1[#2]#3#4%
{\dontleavehmode
\begingroup
- \doifassignmentelse{#2}
+ \doifelseassignment{#2}
{\edef\currenthemistich{#1}%
\setupcurrenthemistich[#2]}
{\def\currenthemistich{#2}}%
@@ -41,21 +41,29 @@
{\scratchwidth\availablehsize}
{\scratchwidth\hemistichparameter\c!width\relax}%
\spaceskip\zeropoint\s!plus\plusone\s!fill\relax
+ \dostarttagged\t!division\currenthemistich
\hbox to \scratchwidth\bgroup
\scratchwidth.5\dimexpr\scratchwidth-\hemistichparameter\c!distance\relax
\hbox to \scratchwidth\bgroup
+ \dostarttagged\t!construct\c!lefttext
\usehemistichstyleandcolor\c!leftstyle\c!leftcolor#3%
+ \dostoptagged
\egroup
\hss
\begingroup
+ \dostarttagged\t!construct\c!separator
\usehemistichstyleandcolor\c!separatorstyle\c!separatorcolor
\hemistichparameter\c!separator
+ \dostoptagged
\endgroup
\hss
\hbox to \scratchwidth\bgroup
+ \dostarttagged\t!construct\c!righttext
\usehemistichstyleandcolor\c!rightstyle\c!rightcolor#4%
+ \dostoptagged
\egroup
\egroup
+ \dostoptagged
\endgroup}
\unexpanded\def\hemistichescaesura#1#2#3%
diff --git a/tex/context/base/m-matrix.mkiv b/tex/context/base/m-matrix.mkiv
new file mode 100644
index 000000000..ccb376e39
--- /dev/null
+++ b/tex/context/base/m-matrix.mkiv
@@ -0,0 +1,495 @@
+%D \module
+%D [ file=m-matrix,
+%D version=2014.11.04, % already a year older
+%D title=\CONTEXT\ Extra Modules,
+%D subtitle=Matrices,
+%D author={Jeong Dalyoung \& Hans Hagen},
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D This code is based on a post by Dalyoung on the context list. After that
+%D we turned it into a module and improved the code a bit. Feel free to ask
+%D us for more. Once we're satisfied, a more general helper l-matrix could
+%D be made. Dalyoung does the clever bits, and Hans only cleanes up and
+%D optimizes a bit.
+
+% \registerctxluafile{l-matrix}{1.001} % not yet
+
+\startmodule[matrix]
+
+\startluacode
+
+local settings_to_hash = utilities.parsers.settings_to_hash
+local formatters = string.formatters
+local copy = table.copy
+local insert = table.insert
+local remove = table.remove
+
+local matrix = { }
+moduledata.matrix = matrix
+
+local f_matrix_slot = formatters["%s_{%s%s}"]
+
+function matrix.symbolic(sym, x, y, nx ,ny) -- symMatrix("a", "m", "n")
+ local nx = nx or 2
+ local ny = ny or nx
+ local function filled(i,y)
+ local mrow = { }
+ for j=1,nx do
+ mrow[#mrow+1] = f_matrix_slot(sym,i,j)
+ end
+ mrow[#mrow+1] = "\\cdots"
+ mrow[#mrow+1] = f_matrix_slot(sym,i,y)
+ return mrow
+ end
+ local function dummy()
+ local mrow = { }
+ for j=1,nx do
+ mrow[#mrow+1] = "\\vdots"
+ end
+ mrow[#mrow+1] = "\\ddots"
+ mrow[#mrow+1] = "\\vdots"
+ return mrow
+ end
+ --
+ local mm = { }
+ for i=1,ny do
+ mm[i] = filled(i,y)
+ end
+ mm[#mm+1] = dummy()
+ mm[#mm+1] = filled(x,y)
+ return mm
+end
+
+-- todo: define a matrix at the tex end so that we have more control
+
+local fences_p = {
+ left = "\\left(\\,",
+ right = "\\,\\right)",
+}
+
+local fences_b = {
+ left = "\\left[\\,",
+ right = "\\,\\right]",
+}
+
+function matrix.typeset(m,options)
+ local options = settings_to_hash(options or "")
+ context.startmatrix(options.determinant and fences_b or fences_p)
+ for i=1, #m do
+ local mi = m[i]
+ for j=1,#mi do
+ context.NC(mi[j])
+ end
+ context.NR()
+ end
+ context.stopmatrix()
+end
+
+-- interchange two rows (i-th, j-th)
+
+function matrix.swap(t,i,j)
+ t[i], t[j] = t[j], t[i]
+end
+
+-- replace i-th row with factor * (i-th row)
+
+function matrix.multiply(m,i,factor)
+ local mi = m[i]
+ for k=1,#mi do
+ mi[k] = factor * mi[k]
+ end
+ return m
+end
+
+-- scalar product "factor * m"
+
+function matrix.scalar(m, factor)
+ for i=1,#m do
+ local mi = m[i]
+ for j=1,#mi do
+ mi[j] = factor * mi[j]
+ end
+ end
+ return m
+end
+
+-- replace i-th row with i-th row + factor * (j-th row)
+
+function matrix.sumrow(m,i,j,factor)
+ local mi = m[i]
+ local mj = m[j]
+ for k=1,#mi do
+ mi[k] = mi[k] + factor * mj[k]
+ end
+end
+
+-- transpose of a matrix
+
+function matrix.transpose(m)
+ local t = { }
+ for j=1,#m[1] do
+ local r = { }
+ for i=1,#m do
+ r[i] = m[i][j]
+ end
+ t[j] = r
+ end
+ return t
+end
+
+-- inner product of two vectors
+
+function matrix.inner(u,v)
+ local nu = #u
+ if nu == 0 then
+ return 0
+ end
+ local nv = #v
+ if nv ~= nu then
+ return 0
+ end
+ local result = 0
+ for i=1,nu do
+ result = result + u[i] * v[i]
+ end
+ return result
+end
+
+-- product of two matrices
+
+function matrix.product(m1,m2)
+ local product = { }
+ if #m1[1] == #m2 then
+ for i=1,#m1 do
+ local m1i = m1[i]
+ local mrow = { }
+ for j=1,#m2[1] do
+ local temp = 0
+ for k=1,#m1[1] do
+ temp = temp + m1i[k] * m2[k][j]
+ end
+ mrow[j] = temp
+ end
+ product[i] = mrow
+ end
+ end
+ return product
+end
+
+local function uppertri(m,sign)
+ local temp = copy(m)
+ for i=1,#temp-1 do
+ local pivot = temp[i][i]
+ if pivot == 0 then
+ local pRow = i +1
+ while temp[pRow][i] == 0 do
+ pRow = pRow + 1
+ if pRow > #temp then -- if there is no nonzero number
+ return temp
+ end
+ end
+ temp[i], temp[pRow] = temp[pRow], temp[i]
+ if sign then
+ sign = -sign
+ end
+ end
+ local mi = temp[i]
+ for k=i+1, #temp do
+ local factor = -temp[k][i]/mi[i]
+ local mk = temp[k]
+ for l=i,#mk do
+ mk[l] = mk[l] + factor * mi[l]
+ end
+ end
+ end
+ if sign then
+ return temp, sign
+ else
+ return temp
+ end
+end
+
+matrix.uppertri = uppertri
+
+function matrix.determinant(m)
+ if #m == #m[1] then
+ local d = 1
+ local t, s = uppertri(m,1)
+ for i=1,#t do
+ d = d * t[i][i]
+ end
+ return s*d
+ else
+ return 0
+ end
+end
+
+local function rowechelon(m,r)
+ local temp = copy(m)
+ local pRow = 1
+ local pCol = 1
+ while pRow <= #temp do
+ local pivot = temp[pRow][pCol]
+ if pivot == 0 then
+ local i = pRow
+ local n = #temp
+ while temp[i][pCol] == 0 do
+ i = i + 1
+ if i > n then
+ -- no nonzero number in a column
+ pCol = pCol + 1
+ if pCol > #temp[pRow] then
+ -- there is no nonzero number in a row
+ return temp
+ end
+ i = pRow
+ end
+ end
+ temp[pRow], temp[i] = temp[i], temp[pRow]
+ end
+ local row = temp[pRow]
+ pivot = row[pCol]
+ for l=pCol,#row do
+ row[l] = row[l]/pivot
+ end
+
+ if r == 1 then
+ -- make the "reduced row echelon form"
+ local row = temp[pRow]
+ for k=1,pRow-1 do
+ local current = temp[k]
+ local factor = -current[pCol]
+ local mk = current
+ for l=pCol,#mk do
+ mk[l] = mk[l] + factor * row[l]
+ end
+ end
+ end
+ -- just make the row echelon form
+ local row = temp[pRow]
+ for k=pRow+1, #temp do
+ local current = temp[k]
+ local factor = -current[pCol]
+ local mk = current
+ for l=pCol,#mk do
+ mk[l] = mk[l] + factor * row[l]
+ end
+ end
+ pRow = pRow + 1
+ pCol = pCol + 1
+
+ if pRow > #temp or pCol > #temp[1] then
+ pRow = #temp + 1
+ end
+ end
+ return temp
+end
+
+matrix.rowechelon = rowechelon
+matrix.rowEchelon = rowechelon
+
+-- solve the linear equation m X = c
+
+local function solve(m,c)
+ local n = #m
+ if n ~= #c then
+ return copy(m)
+ end
+ local newm = copy(m)
+ local temp = copy(c)
+ for i=1,n do
+ insert(newm[i],temp[i])
+ end
+ return rowechelon(newm,1)
+end
+
+matrix.solve = solve
+
+-- find the inverse matrix of m
+
+local function inverse(m)
+ local n = #m
+ local temp = copy(m)
+ if n ~= #m[1] then
+ return temp
+ end
+ for i=1,n do
+ for j=1,n do
+ insert(temp[i],j == i and 1 or 0)
+ end
+ end
+ temp = rowechelon(temp,1)
+ for i=1,n do
+ for j=1,n do
+ remove(temp[i], 1)
+ end
+ end
+ return temp
+end
+
+matrix.inverse = inverse
+
+\stopluacode
+
+\stopmodule
+
+\unexpanded\def\ctxmodulematrix#1{\ctxlua{moduledata.matrix.#1}}
+
+\continueifinputfile{m-matrix.mkiv}
+
+\starttext
+
+\startluacode
+document.DemoMatrixA = {
+ { 0, 2, 4, -4, 1 },
+ { 0, 0, 2, 3, 4 },
+ { 2, 2, -6, 2, 4 },
+ { 2, 0, -6, 9, 7 },
+ { 2, 3, 4, 5, 6 },
+ { 6, 6, -6, 6, 6 },
+}
+
+document.DemoMatrixB = {
+ { 0, 2, 4, -4, 1 },
+ { 0, 0, 2, 3, 4 },
+ { 2, 2, -6, 2, 4 },
+ { 2, 0, -6, 9, 7 },
+ { 2, 2, -6, 2, 4 },
+ { 2, 2, -6, 2, 4 },
+}
+\stopluacode
+
+\startsubject[title={A symbolic matrix}]
+
+\ctxmodulematrix{typeset(moduledata.matrix.symbolic("a", "m", "n"))}
+\ctxmodulematrix{typeset(moduledata.matrix.symbolic("a", "m", "n", 4, 8))}
+
+\stopsubject
+
+\startsubject[title={Swap two rows (2 and 4)}]
+
+\startluacode
+moduledata.matrix.typeset(document.DemoMatrixA)
+context.blank()
+moduledata.matrix.swap(document.DemoMatrixA, 2, 4)
+context.blank()
+moduledata.matrix.typeset(document.DemoMatrixA)
+\stopluacode
+
+\stopsubject
+
+\startsubject[title={Multiply $3 \times r_2$}]
+
+\startluacode
+moduledata.matrix.typeset(document.DemoMatrixA)
+context.blank()
+moduledata.matrix.typeset(moduledata.matrix.multiply(document.DemoMatrixA, 2, 3))
+\stopluacode
+
+\stopsubject
+
+\startsubject[title={Row 2 + $3 \times r_4$}]
+
+\startluacode
+moduledata.matrix.typeset(document.DemoMatrixA)
+context.blank()
+moduledata.matrix.sumrow(document.DemoMatrixA, 2, 3, 4)
+context.blank()
+moduledata.matrix.typeset(document.DemoMatrixA)
+\stopluacode
+
+\stopsubject
+
+\startsubject[title={Transpose a matrix}]
+
+\startluacode
+moduledata.matrix.typeset(document.DemoMatrixA)
+context.blank()
+moduledata.matrix.typeset(moduledata.matrix.transpose(document.DemoMatrixA))
+\stopluacode
+
+\stopsubject
+
+\startsubject[title={The inner product of two vectors}]
+
+\startluacode
+context(moduledata.matrix.inner({ 1, 2, 3 }, { 3, 1, 2 }))
+context.blank()
+context(moduledata.matrix.inner({ 1, 2, 3 }, { 3, 1, 2, 4 }))
+\stopluacode
+
+\startsubject[title={The product of two matrices}]
+
+\startluacode
+moduledata.matrix.typeset(document.DemoMatrixA)
+context.blank()
+moduledata.matrix.typeset(moduledata.matrix.product(document.DemoMatrixA,document.DemoMatrixA))
+\stopluacode
+
+\stopsubject
+
+\startsubject[title={An Upper Triangular Matrix}]
+
+\ctxmodulematrix{typeset(moduledata.matrix.uppertri(document.DemoMatrixB))}
+
+\startsubject[title={A determinant}]
+
+\startluacode
+local m = {
+ { 1, 2, 4 },
+ { 0, 0, 2 },
+ { 2, 2, -6 },
+}
+context(moduledata.matrix.determinant(m))
+\stopluacode
+
+\stopsubject
+
+\startsubject[title={Row echelon form}]
+
+\startluacode
+local m = {
+ { 1, 3, -2, 0, 2, 0, 0 },
+ { 2, 6, -5, -2, 4, -3, -1 },
+ { 0, 0, 5, 10, 0, 15, 5 },
+ { 2, 6, 0, 8, 4, 18, 6 },
+}
+
+moduledata.matrix.typeset(m)
+moduledata.matrix.typeset(moduledata.matrix.rowechelon(m,1))
+\stopluacode
+
+\stopsubject
+
+\startsubject[title={Solving linear equation}]
+
+\startluacode
+local m = {
+ { 1, 3, -2, 0 },
+ { 2, 0, 1, 2 },
+ { 6, -5, -2, 4 },
+ { -3, -1, 5, 10 },
+}
+
+local c = { 5, 2, 6, 8 }
+
+moduledata.matrix.typeset(moduledata.matrix.solve(m,c))
+\stopluacode
+
+\stopsubject
+
+\startsubject[title={Inverse matrix}]
+
+\startcombination[2*1]
+ {\ctxlua{moduledata.matrix.typeset { { 1, 1, 1 }, { 0, 2, 3 }, { 3, 2, 1 } }}} {}
+ {\ctxlua{moduledata.matrix.typeset(moduledata.matrix.inverse { { 1, 1, 1 }, { 0, 2, 3 }, { 3, 2, 1 } })}} {}
+\stopcombination
+
+\stopsubject
+
+\stoptext
diff --git a/tex/context/base/m-morse.mkvi b/tex/context/base/m-morse.mkvi
deleted file mode 100644
index a2c20dff7..000000000
--- a/tex/context/base/m-morse.mkvi
+++ /dev/null
@@ -1,273 +0,0 @@
-%D \module
-%D [ file=m-morse,
-%D version=2010.12.10,
-%D title=\CONTEXT\ Extra Modules,
-%D subtitle=Morse,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-% todo: act upon the node list
-% make it a buffer operation
-% nice in cld manual
-
-\startluacode
-
-moduledata.morse = moduledata.morse or { }
-local morse = moduledata.morse
-
-local utfcharacters, gsub = string.utfcharacters, string.gsub
-local ucchars, shchars = characters.ucchars, characters.shchars
-
-local codes = {
-
- ["A"] = "·—",
- ["B"] = "—···",
- ["C"] = "—·—·",
- ["D"] = "—··",
- ["E"] = "·",
- ["F"] = "··—·",
- ["G"] = "——·",
- ["H"] = "····",
- ["I"] = "··",
- ["J"] = "·———",
- ["K"] = "—·—",
- ["L"] = "·—··",
- ["M"] = "——",
- ["N"] = "—·",
- ["O"] = "———",
- ["P"] = "·——·",
- ["Q"] = "——·—",
- ["R"] = "·—·",
- ["S"] = "···",
- ["T"] = "—",
- ["U"] = "··—",
- ["V"] = "···—",
- ["W"] = "·——",
- ["X"] = "—··—",
- ["Y"] = "—·——",
- ["Z"] = "——··",
-
- ["0"] = "—————",
- ["1"] = "·————",
- ["2"] = "··———",
- ["3"] = "···——",
- ["4"] = "····—",
- ["5"] = "·····",
- ["6"] = "—····",
- ["7"] = "——···",
- ["8"] = "———··",
- ["9"] = "————·",
-
- ["."] = "·—·—·—",
- [","] = "——··——",
- [":"] = "———···",
- [";"] = "—·—·—",
-
- ["?"] = "··——··",
- ["!"] = "—·—·——",
-
- ["-"] = "—····—",
- ["/"] = "—··—· ",
-
- ["("] = "—·——·",
- [")"] = "—·——·—",
-
- ["="] = "—···—",
- ["@"] = "·——·—·",
-
- ["'"] = "·————·",
- ['"'] = "·—··—·",
-
- ["À"] = "·——·—",
- ["Å"] = "·——·—",
- ["Ä"] = "·—·—",
- ["Æ"] = "·—·—",
- ["Ç"] = "—·—··",
- ["É"] = "··—··",
- ["È"] = "·—··—",
- ["Ñ"] = "——·——",
- ["Ö"] = "———·",
- ["Ø"] = "———·",
- ["Ü"] = "··——",
- ["ß"] = "··· ···",
-
-}
-
-morse.codes = codes
-
-local fallbackself = false
-
-local function codefallback(t,k)
- if k then
- local u = ucchars[k]
- local v = rawget(t,u) or rawget(t,shchars[u]) or false
- t[k] = v
- return v
- elseif fallbackself then
- return k
- else
- return false
- end
-end
-
-table.setmetatableindex(codes,codefallback)
-
-local MorseBetweenWords = context.MorseBetweenWords
-local MorseBetweenCharacters = context.MorseBetweenCharacters
-local MorseLong = context.MorseLong
-local MorseShort = context.MorseShort
-local MorseSpace = context.MorseSpace
-local MorseUnknown = context.MorseUnknown
-
-local function toverbose(str)
- str = gsub(str,"%s*+%s*","+")
- str = gsub(str,"%s+"," ")
- local done = false
- for m in utfcharacters(str) do
- if done then
- MorseBetweenCharacters()
- end
- if m == "·" or m == "." then
- MorseShort()
- done = true
- elseif m == "—" or m == "-" then
- MorseLong()
- done = true
- elseif m == " " then
- if done then
- MorseBetweenCharacters()
- end
- done = false
- elseif m == "+" then
- MorseBetweenWords()
- done = false
- else
- MorseUnknown(m)
- end
- end
-end
-
-local function toregular(str)
- local inmorse = false
- for s in utfcharacters(str) do
- local m = codes[s]
- if m then
- if inmorse then
- MorseBetweenWords()
- else
- inmorse = true
- end
- local done = false
- for m in utfcharacters(m) do
- if done then
- MorseBetweenCharacters()
- else
- done = true
- end
- if m == "·" then
- MorseShort()
- elseif m == "—" then
- MorseLong()
- elseif m == " " then
- MorseBetweenCharacters()
- end
- end
- inmorse = true
- elseif s == "\n" or s == " " then
- MorseSpace()
- inmorse = false
- else
- if inmorse then
- MorseBetweenWords()
- else
- inmorse = true
- end
- MorseUnknown(s)
- end
- end
-end
-
-local function tomorse(str,verbose)
- if verbose then
- toverbose(str)
- else
- toregular(str)
- end
-end
-
-morse.tomorse = tomorse
-
-function morse.filetomorse(name,verbose)
- tomorse(resolvers.loadtexfile(name),verbose)
-end
-
-function morse.showtable()
- context.starttabulate { "|l|l|" } -- { "|l|l|l|" }
- for k, v in table.sortedpairs(codes) do
- context.NC() context(k)
- -- context.NC() context(v)
- context.NC() tomorse(v,true)
- context.NC() context.NR()
- end
- context.stoptabulate()
-end
-
-\stopluacode
-
-\unprotect
-
-% todo: \setupmorse, but probably it's not worth the trouble.
-
-\def\MorseWidth {0.4em}
-\def\MorseHeight {0.2em}
-%def\MorseShort {\dontleavehmode\blackrule[\c!height=\MorseHeight,\c!width=\dimexpr\MorseWidth]}
-%def\MorseLong {\dontleavehmode\blackrule[\c!height=\MorseHeight,\c!width=3\dimexpr\MorseWidth]}
-\def\MorseShort {\dontleavehmode\vrule\!!width \dimexpr\MorseWidth\!!height\MorseHeight\!!depth\zeropoint\relax}
-\def\MorseLong {\dontleavehmode\vrule\!!width3\dimexpr\MorseWidth\!!height\MorseHeight\!!depth\zeropoint\relax}
-\def\MorseBetweenCharacters {\kern\MorseWidth}
-\def\MorseBetweenWords {\hskip3\dimexpr\MorseWidth\relax}
-\def\MorseSpace {\hskip7\dimexpr\MorseWidth\relax}
-\def\MorseUnknown #text{[\detokenize{#text}]}
-
-\unexpanded\def\MorseCode #text{\ctxlua{moduledata.morse.tomorse(\!!bs#text\!!es,true)}}
-\unexpanded\def\MorseString #text{\ctxlua{moduledata.morse.tomorse(\!!bs#text\!!es)}}
-\unexpanded\def\MorseFile #text{\ctxlua{moduledata.morse.filetomorse("#text")}}
-\unexpanded\def\MorseTable {\ctxlua{moduledata.morse.showtable()}}
-
-\let\Morse \MorseString
-
-%def\MorseShort {·}
-%def\MorseLong {—}
-
-\protect
-
-\continueifinputfile{m-morse.mkvi}
-
-\starttext
-
-\MorseTable
-
-\startlines
-\MorseCode{—·—· ——— —· — · —··— —+—— —·— ·· ···—}
-\MorseCode{—·—· ——— —· — · —··— — + —— —·— ·· ···—}
-\Morse{ÀÁÂÃÄÅàáâãäå}
-\Morse{ÆÇæç}
-\Morse{ÈÉÊËèéêë}
-\Morse{ÌÍÎÏìíîï}
-\Morse{Ññ}
-\Morse{ÒÓÔÕÖòóôõö}
-\Morse{Øø}
-\Morse{ÙÚÛÜùúû}
-\Morse{Ýýÿ}
-\Morse{ß}
-\Morse{Ţţ}
-\stoplines
-
-\Morse{A B C D E F G H I J K L M N O P Q R S T U V W X Y Z}
-
-\stoptext
diff --git a/tex/context/base/m-oldbibtex.mkiv b/tex/context/base/m-oldbibtex.mkiv
new file mode 100644
index 000000000..08c23e7cc
--- /dev/null
+++ b/tex/context/base/m-oldbibtex.mkiv
@@ -0,0 +1,16 @@
+%D \module
+%D [ file=m-oldbibtex,
+%D version=2013.12.12, % based on bibl-apa.tex and later xml variant
+%D title=Falback on old method,
+%D subtitle=Publications,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is therefore copyrighted
+%D by \PRAGMA. See mreadme.pdf for details.
+
+\loadmarkfile{bibl-bib}
+\loadmarkfile{bibl-tra}
+
+\endinput
diff --git a/tex/context/base/m-oldfun.mkiv b/tex/context/base/m-oldfun.mkiv
index 1c5a1d29d..3f2ec0263 100644
--- a/tex/context/base/m-oldfun.mkiv
+++ b/tex/context/base/m-oldfun.mkiv
@@ -131,7 +131,7 @@
\unexpanded\def\DroppedCaps#1#2#3#4#5#6#7% does not yet handle accented chars
{\defconvertedargument\asciia{#7}%
\defconvertedcommand \asciib{\DroppedString}%
- \doifinstringelse\asciia\asciib
+ \doifelseinstring\asciia\asciib
{\noindentation
\dontleavehmode
\checkindentation % redo this one
diff --git a/tex/context/base/m-oldnum.mkiv b/tex/context/base/m-oldnum.mkiv
index efc0af472..382c56eb6 100644
--- a/tex/context/base/m-oldnum.mkiv
+++ b/tex/context/base/m-oldnum.mkiv
@@ -73,8 +73,8 @@
\chardef\digitoutputmode=1 % 0..6
\chardef\digitsignmode =0 % 0..3
-\def\setdigitmode{\chardef\digitoutputmode}
-\def\setdigitsign{\chardef\digitsignmode}
+\unexpanded\def\setdigitmode{\chardef\digitoutputmode}
+\unexpanded\def\setdigitsign{\chardef\digitsignmode}
%D The digit modes are:
%D
@@ -100,7 +100,7 @@
\unexpanded\def\digits
{\bgroup
\let~@%
- \doifnextbgroupelse\dodigits{\doifnextcharelse\normalmathshift\domathdigits\grabdigit}}
+ \doifelsenextbgroup\dodigits{\doifelsenextchar\normalmathshift\domathdigits\grabdigit}}
\def\dodigits#1%
{\grabdigit#1\relax}
@@ -118,7 +118,7 @@
\ifx\normalmathshift\undefined \let\normalmathshift=$ \fi
-\def\scandigit
+\unexpanded\def\scandigit
{\ifx\next\blankspace
\let\next\handledigits
\else\ifx\next\nextobeyedline % the indirect one
@@ -139,10 +139,10 @@
%D typeset it in superscript. The space placeholders are
%D replaced by a \type {@}.
-\def\savedigit#1#2%
+\unexpanded\def\savedigit#1#2%
{\edef#1{#1\saveddigits#2}\let\saveddigits\empty}
-\long\def\collectdigit#1%
+\unexpanded\def\collectdigit#1%
{\ifx#1~%
\savedigit\collecteddigits @%
\else\if#1_% tricky as can be several catcodes ... will become lua code anyway
@@ -158,13 +158,13 @@
\chardef\powerdigits\plusone
\else
\savedigit\collecteddigits#1%
- %\doifnumberelse{#1}
+ %\doifelsenumber{#1}
% {\savedigit\collecteddigits#1}
% {\def\saveddigits{#1}}%
\fi\fi\fi
\else
\savedigit\savedpowerdigits#1%
- %\doifnumberelse{#1}
+ %\doifelsenumber{#1}
% {\savedigit\savedpowerdigits#1}
% {\def\saveddigits{#1}}%
\fi\fi\fi\fi
@@ -173,7 +173,7 @@
\let\handlemathdigits\firstofoneargument
\let\handletextdigits\mathematics
-\def\handledigits
+\unexpanded\def\handledigits
{%\ifcase\powerdigits
% \edef\collecteddigits{\collecteddigits\saveddigits}%
%\else
@@ -235,7 +235,7 @@
% 0,- is invalid, should be =
% 0,-- is invalid, should be ==
-\def\digitzeroamount
+\unexpanded\def\digitzeroamount
{\digitsgn\zeroamount
\def\digitzeroamount
{\hphantom
@@ -243,7 +243,7 @@
\hskip-\wd\scratchbox}%
\let\digitzeroamount\empty}}
-\def\scandigits#1%
+\unexpanded\def\scandigits#1%
{\if#1.\digitsep1\else
\if#1,\digitsep2\else
\if#1@\digitnop \else
@@ -261,7 +261,7 @@
\newbox\digitsepbox \chardef\autodigitmode=1
-\def\digitsep#1%
+\unexpanded\def\digitsep#1%
{\ifcase\autodigitmode
\doscandigit#1%
\else
@@ -277,7 +277,7 @@
%
% while this works
-\def\digitnop
+\unexpanded\def\digitnop
{\hbox{\hphantom{\box\digitsepbox}}%
\hphantom{0}\chardef\skipdigit1\relax}
@@ -287,7 +287,7 @@
% {\hphantom{\box\digitsepbox0}%
% \chardef\skipdigit1\relax}
-\def\digitsgn#1%
+\unexpanded\def\digitsgn#1%
{\ifcase\digitsignmode#1\else
\hbox
{\setbox\scratchbox\hbox{0}%
@@ -404,11 +404,11 @@
%D \digittemplate 12.000.000,00 % \digittemplate .,
%D \stoptyping
-\def\digittemplate #1 %
+\unexpanded\def\digittemplate #1 %
{\chardef\digitinputmode\zerocount
\handletokens#1\with\scandigittemplate}
-\def\scandigittemplate#1%
+\unexpanded\def\scandigittemplate#1%
{\if #1.\ifcase\digitinputmode\chardef\digitinputmode\plusone \fi% period
\else\if#1,\ifcase\digitinputmode\chardef\digitinputmode\plustwo \fi% comma
\fi\fi}
diff --git a/tex/context/base/m-pipemode.mkiv b/tex/context/base/m-pipemode.mkiv
new file mode 100644
index 000000000..e96394c43
--- /dev/null
+++ b/tex/context/base/m-pipemode.mkiv
@@ -0,0 +1,7 @@
+% For Mojca: context --global m-pipemode.mkiv
+
+\disabledirectives[system.errorcontext]
+
+\starttext
+
+\let\stoptext\relax
diff --git a/tex/context/base/m-pstricks.mkii b/tex/context/base/m-pstricks.mkii
index bdcf13b24..d41f19871 100644
--- a/tex/context/base/m-pstricks.mkii
+++ b/tex/context/base/m-pstricks.mkii
@@ -43,7 +43,7 @@
{\input multido \relax
\input pstricks \relax
\input pst-plot \relax
- \loadpstrickscolors{colo-rgb}}
+ \loadpstrickscolors{colo-rgb.mkii}}
{\writestatus{pstricks}{using indirect method; enable write18}}
\catcode`\|=\oldbarcode
diff --git a/tex/context/base/m-punk.mkiv b/tex/context/base/m-punk.mkiv
index 6bf92e4c0..c8021a92f 100644
--- a/tex/context/base/m-punk.mkiv
+++ b/tex/context/base/m-punk.mkiv
@@ -162,6 +162,7 @@ function fonts.handlers.vf.combiner.commands.metafont(g,v)
end
g.properties.virtualized = true
g.variants = list
+ print(g)
end
fonts.definers.methods.install( "punk", {
@@ -177,14 +178,29 @@ fonts.definers.methods.install( "punkboldslanted", {
{ "metafont", "mfplain", "punkfont-boldslanted.mp", 10 },
} )
-typesetters.cases.register("RandomPunk", function(current)
- local used = fontdata[current.font].variants
+-- typesetters.cases.register("RandomPunk", function(current)
+-- local used = fontdata[current].variants
+-- if used then
+-- local f = math.random(1,#used)
+-- current.font = used[f]
+-- return current, true
+-- else
+-- return current, false
+-- end
+-- end)
+
+local getfont = nodes.nuts.getfont
+local setfield = nodes.nuts.setfield
+local random = math.random
+
+typesetters.cases.register("RandomPunk", function(start)
+ local used = fontdata[getfont(start)].variants
if used then
- local f = math.random(1,#used)
- current.font = used[f]
- return current, true
+ local f = random(1,#used)
+ setfield(start,"font",used[f])
+ return start, true
else
- return current, false
+ return start, false
end
end)
@@ -214,7 +230,7 @@ end)
\unexpanded\def\StartRandomPunk {\begingroup\EnableRandomPunk}
\unexpanded\def\StopRandomPunk {\endgroup}
-\starttypescript [serif] [punk] [default]
+\starttypescript [serif] [punk]
\definefontsynonym [Serif] [demo@punk]
\definefontsynonym [SerifBold] [demobold@punkbold]
\definefontsynonym [SerifSlanted] [demoslanted@punkslanted]
diff --git a/tex/context/base/m-scite.mkiv b/tex/context/base/m-scite.mkiv
new file mode 100644
index 000000000..7a8e8b06e
--- /dev/null
+++ b/tex/context/base/m-scite.mkiv
@@ -0,0 +1,275 @@
+%D \module
+%D [ file=m-scite,
+%D version=2014.04.28,
+%D title=\CONTEXT\ Extra Modules,
+%D subtitle=\SCITE\ lexers,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% We can simplify the scite lexers, as long as we're able to return the
+% lexed result table and provide alexer module with the functions that
+% the lexer expects (so I need to decipher the cxx file).
+%
+% lexer._TOKENSTYLES : table
+% lexer._CHILDREN : flag
+% lexer._EXTRASTYLES : table
+% lexer._GRAMMAR : flag
+%
+% lexers.load : function
+% lexers.lex : function
+%
+% And some properties that map styles onto scintilla styling. I get the
+% impression that we end up with something simpler, a hybrid between the
+% scite lexing and the current context way, so we get an intermediate
+% step, with some penalty for context, but at least I don't have to
+% maintain two sets (three sets as we also have a line based series).
+
+% TODO: as these files are in tds we can locate them and set the lexer root
+% to that one. Currently we're on: we're on context/documents.
+
+% This is an experiment: eventually we need to hook it into the verbatim code
+% and deal with widow lines and so.
+
+\startluacode
+
+-- todo: merge with collapse
+-- todo: prehash whitespaces
+
+-- todo: hook into the pretty print code
+-- todo: a simple catcode regime with only \ { }
+
+local gsub, sub, find = string.gsub, string.sub, string.find
+local concat = table.concat
+local formatters = string.formatters
+local lpegmatch = lpeg.match
+local setmetatableindex = table.setmetatableindex
+
+local scite = require("util-sci")
+buffers.scite = scite
+
+-- context output:
+
+local f_def_color = formatters["\\definecolor[slxc%s][h=%s%s%s]%%"]
+local f_fore_none = formatters["\\def\\slx%s#1{{\\slxc%s#1}}%%"]
+local f_fore_bold = formatters["\\def\\slx%s#1{{\\slxc%s\\bf#1}}%%"]
+local f_none_bold = formatters["\\def\\slx%s#1{{\\bf#1}}%%"]
+local f_none_none = formatters["\\def\\slx%s#1{{#1}}%%"]
+local f_texstyled = formatters["\\slx%s{%s}"]
+
+local f_mapping = [[
+\let\string\slxL\string\letterleftbrace
+\let\string\slxR\string\letterrightbrace
+\let\string\slxM\string\letterdollar
+\let\string\slxV\string\letterbar
+\let\string\slxU\string\letterhat
+\let\string\slxD\string\letterunderscore
+\let\string\slxH\string\letterhash
+\let\string\slxB\string\letterbackslash
+\let\string\slxP\string\letterpercent
+\let\string\slxS\string\fixedspace
+%]]
+
+local replacer = lpeg.replacer {
+ ["{"] = "\\slxL ",
+ ["}"] = "\\slxR ",
+ ["$"] = "\\slxM ",
+ ["^"] = "\\slxU ",
+ ["_"] = "\\slxD ",
+ ["|"] = "\\slxV ",
+ ["#"] = "\\slxH ",
+ ["\\"] = "\\slxB ",
+ ["%"] = "\\slxP ",
+ [" "] = "\\slxS ",
+}
+
+local colors = nil
+
+local function exportcolors()
+ if not colors then
+ scite.loadscitelexer()
+ local function black(f)
+ return (f[1] == f[2]) and (f[2] == f[3]) and (f[3] == '00')
+ end
+ local result, r = { f_mapping }, 1
+ for k, v in table.sortedhash(lexer.context.styles) do
+ local fore = v.fore
+ if fore and not black(fore) then
+ r = r + 1
+ result[r] = f_def_color(k,fore[1],fore[2],fore[3])
+ end
+ end
+ r = r + 1
+ result[r] = "%"
+ for k, v in table.sortedhash(lexer.context.styles) do
+ local bold = v.bold
+ local fore = v.fore
+ r = r + 1
+ if fore and not black(fore) then
+ if bold then
+ result[r] = f_fore_bold(k,k)
+ else
+ result[r] = f_fore_none(k,k)
+ end
+ else
+ if bold then
+ result[r] = f_none_bold(k)
+ else
+ result[r] = f_none_none(k)
+ end
+ end
+ end
+ colors = concat(result,"\n")
+ end
+ return colors
+end
+
+local function exportwhites()
+ return setmetatableindex(function(t,k)
+ local v = find(k,"white") and true or false
+ t[k] = v
+ return v
+ end)
+end
+
+local function exportstyled(lexer,text)
+ local result = lexer.lex(lexer,text,0)
+ local start = 1
+ local whites = exportwhites()
+ local buffer = { }
+ for i=1,#result,2 do
+ local style = result[i]
+ local position = result[i+1]
+ local txt = sub(text,start,position-1)
+ txt = lpegmatch(replacer,txt)
+ if whites[style] then
+ buffer[#buffer+1] = txt
+ else
+ buffer[#buffer+1] = f_texstyled(style,txt)
+ end
+ start = position
+ end
+ buffer = concat(buffer)
+ return buffer
+end
+
+function scite.installcommands()
+ context(exportcolors())
+end
+
+local function lexdata(data,lexname)
+ buffers.assign("lex",exportstyled(scite.loadedlexers[lexname],data or ""))
+end
+
+scite.lexdata = lexdata
+
+function scite.lexbuffer(name,lexname)
+ lexdata(buffers.getcontent(name) or "",lexname or "tex")
+end
+
+function scite.lexfile(filename,lexname)
+ lexdata(io.loaddata(filename) or "",lexname or file.suffix(filename))
+end
+
+-- html output
+
+\stopluacode
+
+% This is a preliminary interface.
+
+\unprotect
+
+\unexpanded\def\installscitecommands
+ {\ctxlua{buffers.scite.installcommands()}%
+ \let\installscitecommands\relax}
+
+\unexpanded\def\startscite{\startlines}
+\unexpanded\def\stopscite {\stoplines}
+
+\unexpanded\def\scitefile
+ {\dosingleargument\module_scite_file}
+
+\unexpanded\def\module_scite_file[#1]%
+ {\start
+ \ctxlua{buffers.scite.lexfile("#1")}%
+ \installscitecommands
+ \tt
+ \dontcomplain
+ \setcatcodetable\ctxcatcodes % needed in xml
+ \startscite
+ \getbuffer[lex]%
+ \stopscite
+ \stop}
+
+\unexpanded\def\scitebuffer
+ {\dodoubleargument\module_scite_buffer}
+
+\unexpanded\def\module_scite_buffer[#1][#2]%
+ {\start
+ \ifsecondargument
+ \ctxlua{buffers.scite.lexbuffer("#2","#1")}%
+ \else
+ \ctxlua{buffers.scite.lexbuffer("#1","tex")}%
+ \fi
+ \installscitecommands
+ \tt
+ \dontcomplain
+ \setcatcodetable\ctxcatcodes % needed in xml
+ \startscite
+ \getbuffer[lex]%
+ \stopscite
+ \stop}
+
+\protect
+
+\continueifinputfile{m-scite.mkiv}
+
+\setupbodyfont[dejavu,8pt]
+
+\setuplayout
+ [width=middle,
+ height=middle,
+ header=1cm,
+ footer=1cm,
+ topspace=1cm,
+ bottomspace=1cm,
+ backspace=1cm]
+
+\startbuffer[demo]
+\startsubsubject[title={oeps}]
+
+\startMPcode
+ draw fullcircle
+ scaled 2cm
+ withpen pencircle scaled 1mm
+ withcolor .5green;
+ draw textext (
+ lua (
+ "local function f(s) return string.upper(s) end mp.quoted(f('foo'))"
+ )
+ ) withcolor .5red ;
+\stopMPcode
+
+\startluacode
+ context("foo")
+\stopluacode
+
+\stopsubsubject
+\stopbuffer
+
+\starttext
+
+% \scitefile[../lexers/scite-context-lexer.lua] \page
+% \scitefile[t:/manuals/about/about-metafun.tex] \page
+% \scitefile[t:/sources/strc-sec.mkiv] \page
+% \scitefile[e:/tmp/mp.w] \page
+% \scitefile[t:/manuals/hybrid/tugboat.bib] \page
+\scitefile[e:/tmp/test.bib] \page
+
+% \getbuffer[demo] \scitebuffer[demo]
+
+\stoptext
diff --git a/tex/context/base/m-spreadsheet.lua b/tex/context/base/m-spreadsheet.lua
index f329acf9a..1b3c5cb34 100644
--- a/tex/context/base/m-spreadsheet.lua
+++ b/tex/context/base/m-spreadsheet.lua
@@ -129,10 +129,10 @@ function datacell(a,b,...)
end
local function checktemplate(s)
- if find(s,"%%") then
+ if find(s,"%",1,true) then
-- normal template
return s
- elseif find(s,"@") then
+ elseif find(s,"@",1,true) then
-- tex specific template
return gsub(s,"@","%%")
else
diff --git a/tex/context/base/m-spreadsheet.mkiv b/tex/context/base/m-spreadsheet.mkiv
index 5e0499184..914a2b57a 100644
--- a/tex/context/base/m-spreadsheet.mkiv
+++ b/tex/context/base/m-spreadsheet.mkiv
@@ -88,6 +88,8 @@
\unexpanded\def\doifelsespreadsheetcell
{\dosingleempty\module_spreadsheet_doifelse_cell}
+\let\doifspreadsheetcellelse\doifelsespreadsheetcell
+
\unexpanded\def\module_spreadsheet_doifelse_cell[#1]#2#3%
{\ctxlua{moduledata.spreadsheets.doifelsecell("#1",\number#2,\number#3)}}
@@ -118,7 +120,7 @@
\let\stoprow \module_spreadsheet_row_stop
\let\startcell\module_spreadsheet_cell_start
\let\stopcell \module_spreadsheet_cell_stop
- \doifassignmentelse{#1}
+ \doifelseassignment{#1}
{\module_spreadsheet_start
\directsetup{spreadsheet:before:\currentspreadsheet}%
\bTABLE[\c!align=\v!flushright,#1]}
@@ -136,7 +138,7 @@
\unexpanded\def\module_spreadsheet_row_stop {\eTR}
\unexpanded\def\module_spreadsheet_cell_start
- {\doifnextoptionalelse\module_spreadsheet_cell_start_yes\module_spreadsheet_cell_start_nop}
+ {\doifelsenextoptional\module_spreadsheet_cell_start_yes\module_spreadsheet_cell_start_nop}
\unexpanded\def\module_spreadsheet_cell_start_yes[#1]#2\stopcell
{\bTD[#1]\getspr{#2}\eTD}
diff --git a/tex/context/base/m-steps.lua b/tex/context/base/m-steps.lua
index 97759b799..8eb481550 100644
--- a/tex/context/base/m-steps.lua
+++ b/tex/context/base/m-steps.lua
@@ -10,7 +10,6 @@ if not modules then modules = { } end modules ['x-flow'] = {
moduledata.steps = moduledata.steps or { }
-local points = number.points -- number.pt
local variables = interfaces.variables
local trace_charts = false
@@ -100,22 +99,22 @@ function commands.step_make_chart(settings)
end
--
context("text_line_color := \\MPcolor{%s} ;", textsettings.framecolor)
- context("text_line_width := %s ;", points(textsettings.rulethickness))
+ context("text_line_width := %p ;", textsettings.rulethickness)
context("text_fill_color := \\MPcolor{%s} ;", textsettings.backgroundcolor)
- context("text_offset := %s ;", points(textsettings.offset))
- context("text_distance_set := %s ;", points(textsettings.distance))
+ context("text_offset := %p ;", textsettings.offset)
+ context("text_distance_set := %p ;", textsettings.distance)
--
context("cell_line_color := \\MPcolor{%s} ;", cellsettings.framecolor)
- context("cell_line_width := %s ;", points(cellsettings.rulethickness))
+ context("cell_line_width := %p ;", cellsettings.rulethickness)
context("cell_fill_color := \\MPcolor{%s} ;", cellsettings.backgroundcolor)
- context("cell_offset := %s ;", points(cellsettings.offset))
- context("cell_distance_x := %s ;", points(cellsettings.dx))
- context("cell_distance_y := %s ;", points(cellsettings.dy))
+ context("cell_offset := %p ;", cellsettings.offset)
+ context("cell_distance_x := %p ;", cellsettings.dx)
+ context("cell_distance_y := %p ;", cellsettings.dy)
--
context("line_line_color := \\MPcolor{%s} ;", linesettings.color)
- context("line_line_width := %s ;", points(linesettings.rulethickness))
- context("line_distance := %s ;", points(linesettings.distance))
- context("line_offset := %s ;", points(linesettings.offset))
+ context("line_line_width := %p ;", linesettings.rulethickness)
+ context("line_distance := %p ;", linesettings.distance)
+ context("line_offset := %p ;", linesettings.offset)
--
for i=1,#steps do
local step = steps[i]
diff --git a/tex/context/base/m-steps.mkvi b/tex/context/base/m-steps.mkvi
index a07ece3ae..c9c5a0636 100644
--- a/tex/context/base/m-steps.mkvi
+++ b/tex/context/base/m-steps.mkvi
@@ -98,7 +98,7 @@
\def\module_steps_start_chart[#name][#settings]%
{\startnointerference
\iffirstargument
- \doifassignmentelse{#name}
+ \doifelseassignment{#name}
{\let\currentSTEPchart\empty
\xdef\module_steps_flush_chart{\module_steps_chart[][#name]}}
{\edef\currentSTEPchart{#name}%
diff --git a/tex/context/base/m-translate.mkiv b/tex/context/base/m-translate.mkiv
index f36f9a9fb..2e6cbe950 100644
--- a/tex/context/base/m-translate.mkiv
+++ b/tex/context/base/m-translate.mkiv
@@ -22,12 +22,34 @@
local compiled, list = nil, nil
+ -- function translators.register(from,to)
+ -- local l = lpeg.P(from)/to
+ -- if not list then
+ -- list = l
+ -- else
+ -- list = list + l
+ -- end
+ -- compiled = nil
+ -- end
+ --
+ -- function translators.translate(s)
+ -- if list then
+ -- if not compiled then
+ -- compiled = lpeg.Cs((list + lpeg.P(1))^0)
+ -- end
+ -- return compiled:match(s)
+ -- else
+ -- return s
+ -- end
+ -- end
+
+ -- local function prepare()
+
function translators.register(from,to)
- local l = lpeg.P(from)/to
if not list then
- list = l
+ list = { [from] = to }
else
- list = list + l
+ list[from] = to
end
compiled = nil
end
@@ -35,7 +57,8 @@
function translators.translate(s)
if list then
if not compiled then
- compiled = lpeg.Cs((list + lpeg.P(1))^0)
+ local tree = lpeg.utfchartabletopattern(list)
+ compiled = lpeg.Cs((tree/list + lpeg.patterns.utf8character)^0 * lpeg.P(-1)) -- the P(1) is needed in order to accept non utf
end
return compiled:match(s)
else
diff --git a/tex/context/base/m-visual.mkiv b/tex/context/base/m-visual.mkiv
index 504c0d0c5..d50215966 100644
--- a/tex/context/base/m-visual.mkiv
+++ b/tex/context/base/m-visual.mkiv
@@ -161,7 +161,7 @@
{\freezerandomseed
\let\endstrut\relax
\let\begstrut\relax
- \doifinsetelse{#1}{\v!left,\v!right}
+ \doifelseinset{#1}{\v!left,\v!right}
{\fakewords{2}{4}}
{\fakewords{4}{10}}}%
{\doifinset{#1}{\v!left,\v!right}
@@ -192,8 +192,8 @@
{\dimen0\zeropoint
\getrandomcount\scratchcounter{3}{6}%
\dorecurse\scratchcounter
- {\getrandomdimen\scratchdimen{1em}{3em}%
- \mathinner{\red\fakerule\scratchdimen}%
+ {\getrandomdimen\scratchdimen{0.5em}{1.5em}%
+ \mathord{\red\fakerule\scratchdimen}%
\ifnum\recurselevel<\scratchcounter+\fi
\advance\scratchdimen\dimen0}%
=\mathinner{\red\fakerule\scratchdimen}}
@@ -762,7 +762,7 @@
\startoverlay
{\copy\scratchbox}
{\dodotagbox{#1}\scratchbox{\framed
- [\c!background=\v!screen,\c!backgroundscreen=1]{#2}}}
+ [\c!background=\v!color,\c!backgroundcolor=\v!gray]{#2}}}
\stopoverlay
\egroup
\nextboxwd\the\wd\scratchbox
diff --git a/tex/context/base/math-act.lua b/tex/context/base/math-act.lua
index 879480dce..d0ea78990 100644
--- a/tex/context/base/math-act.lua
+++ b/tex/context/base/math-act.lua
@@ -90,6 +90,8 @@ end
sequencers.appendaction("mathparameters","system","mathematics.scaleparameters")
+-- AccentBaseHeight vs FlattenedAccentBaseHeight
+
function mathematics.checkaccentbaseheight(target,original)
local mathparameters = target.mathparameters
if mathparameters and mathparameters.AccentBaseHeight == 0 then
@@ -103,15 +105,23 @@ function mathematics.checkprivateparameters(target,original)
local mathparameters = target.mathparameters
if mathparameters then
local parameters = target.parameters
+ local properties = target.properties
if parameters then
- if not mathparameters.FractionDelimiterSize then
- mathparameters.FractionDelimiterSize = 1.01 * parameters.size
- end
- if not mathparameters.FractionDelimiterDisplayStyleSize then
- mathparameters.FractionDelimiterDisplayStyleSize = 2.40 * parameters.size
+ local size = parameters.size
+ if size then
+ if not mathparameters.FractionDelimiterSize then
+ mathparameters.FractionDelimiterSize = 1.01 * size
+ end
+ if not mathparameters.FractionDelimiterDisplayStyleSize then
+ mathparameters.FractionDelimiterDisplayStyleSize = 2.40 * size
+ end
+ elseif properties then
+ report_math("invalid parameters in font %a",properties.fullname or "?")
+ else
+ report_math("invalid parameters in font")
end
- elseif target.properties then
- report_math("no parameters in font %a",target.properties.fullname or "?")
+ elseif properties then
+ report_math("no parameters in font %a",properties.fullname or "?")
else
report_math("no parameters and properties in font")
end
@@ -465,21 +475,17 @@ setmetatableindex(extensibles,function(extensibles,font)
return codes
end)
-function mathematics.extensiblecode(family,unicode)
+local function extensiblecode(family,unicode)
return extensibles[family_font(family or 0)][unicode][1]
end
-function commands.extensiblecode(family,unicode)
- context(extensibles[family_font(family or 0)][unicode][1])
-end
-
-- left : [head] ...
-- right : ... [head]
-- horizontal : [head] ... [head]
--
-- abs(right["start"] - right["end"]) | right.advance | characters[right.glyph].width
-function commands.horizontalcode(family,unicode)
+local function horizontalcode(family,unicode)
local font = family_font(family or 0)
local data = extensibles[font][unicode]
local kind = data[1]
@@ -503,13 +509,30 @@ function commands.horizontalcode(family,unicode)
loffset = abs((left ["start"] or 0) - (left ["end"] or 0))
roffset = abs((right["start"] or 0) - (right["end"] or 0))
end
- else
end
- texsetdimen("scratchleftoffset",loffset)
- texsetdimen("scratchrightoffset",roffset)
- context(kind)
+ return kind, loffset, roffset
end
+mathematics.extensiblecode = extensiblecode
+mathematics.horizontalcode = horizontalcode
+
+interfaces.implement {
+ name = "extensiblecode",
+ arguments = { "integer", "integer" },
+ actions = { extensiblecode, context }
+}
+
+interfaces.implement {
+ name = "horizontalcode",
+ arguments = { "integer", "integer" },
+ actions = function(family,unicode)
+ local kind, loffset, roffset = horizontalcode(family,unicode)
+ texsetdimen("scratchleftoffset", loffset)
+ texsetdimen("scratchrightoffset",roffset)
+ context(kind)
+ end
+}
+
-- experiment
-- check: when true, only set when present in font
@@ -517,85 +540,6 @@ end
local blocks = characters.blocks -- this will move to char-ini
-blocks["uppercasenormal"] = { first = 0x00041, last = 0x0005A }
-blocks["uppercasebold"] = { first = 0x1D400, last = 0x1D419 }
-blocks["uppercaseitalic"] = { first = 0x1D434, last = 0x1D44D }
-blocks["uppercasebolditalic"] = { first = 0x1D468, last = 0x1D481 }
-blocks["uppercasescript"] = { first = 0x1D49C, last = 0x1D4B5 }
-blocks["uppercaseboldscript"] = { first = 0x1D4D0, last = 0x1D4E9 }
-blocks["uppercasefraktur"] = { first = 0x1D504, last = 0x1D51D }
-blocks["uppercasedoublestruck"] = { first = 0x1D538, last = 0x1D551 }
-blocks["uppercaseboldfraktur"] = { first = 0x1D56C, last = 0x1D585 }
-blocks["uppercasesansserifnormal"] = { first = 0x1D5A0, last = 0x1D5B9 }
-blocks["uppercasesansserifbold"] = { first = 0x1D5D4, last = 0x1D5ED }
-blocks["uppercasesansserifitalic"] = { first = 0x1D608, last = 0x1D621 }
-blocks["uppercasesansserifbolditalic"] = { first = 0x1D63C, last = 0x1D655 }
-blocks["uppercasemonospace"] = { first = 0x1D670, last = 0x1D689 }
-blocks["uppercasegreeknormal"] = { first = 0x00391, last = 0x003AA }
-blocks["uppercasegreekbold"] = { first = 0x1D6A8, last = 0x1D6C1 }
-blocks["uppercasegreekitalic"] = { first = 0x1D6E2, last = 0x1D6FB }
-blocks["uppercasegreekbolditalic"] = { first = 0x1D71C, last = 0x1D735 }
-blocks["uppercasegreeksansserifbold"] = { first = 0x1D756, last = 0x1D76F }
-blocks["uppercasegreeksansserifbolditalic"] = { first = 0x1D790, last = 0x1D7A9 }
-
-blocks["lowercasenormal"] = { first = 0x00061, last = 0x0007A }
-blocks["lowercasebold"] = { first = 0x1D41A, last = 0x1D433 }
-blocks["lowercaseitalic"] = { first = 0x1D44E, last = 0x1D467 }
-blocks["lowercasebolditalic"] = { first = 0x1D482, last = 0x1D49B }
-blocks["lowercasescript"] = { first = 0x1D4B6, last = 0x1D4CF }
-blocks["lowercaseboldscript"] = { first = 0x1D4EA, last = 0x1D503 }
-blocks["lowercasefraktur"] = { first = 0x1D51E, last = 0x1D537 }
-blocks["lowercasedoublestruck"] = { first = 0x1D552, last = 0x1D56B }
-blocks["lowercaseboldfraktur"] = { first = 0x1D586, last = 0x1D59F }
-blocks["lowercasesansserifnormal"] = { first = 0x1D5BA, last = 0x1D5D3 }
-blocks["lowercasesansserifbold"] = { first = 0x1D5EE, last = 0x1D607 }
-blocks["lowercasesansserifitalic"] = { first = 0x1D622, last = 0x1D63B }
-blocks["lowercasesansserifbolditalic"] = { first = 0x1D656, last = 0x1D66F }
-blocks["lowercasemonospace"] = { first = 0x1D68A, last = 0x1D6A3 }
-blocks["lowercasegreeknormal"] = { first = 0x003B1, last = 0x003CA }
-blocks["lowercasegreekbold"] = { first = 0x1D6C2, last = 0x1D6DB }
-blocks["lowercasegreekitalic"] = { first = 0x1D6FC, last = 0x1D715 }
-blocks["lowercasegreekbolditalic"] = { first = 0x1D736, last = 0x1D74F }
-blocks["lowercasegreeksansserifbold"] = { first = 0x1D770, last = 0x1D789 }
-blocks["lowercasegreeksansserifbolditalic"] = { first = 0x1D7AA, last = 0x1D7C3 }
-
-blocks["digitsnormal"] = { first = 0x00030, last = 0x00039 }
-blocks["digitsbold"] = { first = 0x1D7CE, last = 0x1D7D8 }
-blocks["digitsdoublestruck"] = { first = 0x1D7D8, last = 0x1D7E2 }
-blocks["digitssansserifnormal"] = { first = 0x1D7E2, last = 0x1D7EC }
-blocks["digitssansserifbold"] = { first = 0x1D7EC, last = 0x1D805 }
-blocks["digitsmonospace"] = { first = 0x1D7F6, last = 0x1D80F }
-
-blocks["mathematicaloperators"] = { first = 0x02200, last = 0x022FF }
-blocks["miscellaneousmathematicalsymbolsa"] = { first = 0x027C0, last = 0x027EF }
-blocks["miscellaneousmathematicalsymbolsb"] = { first = 0x02980, last = 0x029FF }
-blocks["supplementalmathematicaloperators"] = { first = 0x02A00, last = 0x02AFF }
-blocks["letterlikesymbols"] = { first = 0x02100, last = 0x0214F }
-blocks["miscellaneoustechnical"] = { first = 0x02308, last = 0x0230B }
-blocks["geometricshapes"] = { first = 0x025A0, last = 0x025FF }
-blocks["miscellaneoussymbolsandarrows"] = { first = 0x02B30, last = 0x02B4C }
-blocks["mathematicalalphanumericsymbols"] = { first = 0x00400, last = 0x1D7FF }
-
-blocks["digitslatin"] = { first = 0x00030, last = 0x00039 }
-blocks["digitsarabicindic"] = { first = 0x00660, last = 0x00669 }
-blocks["digitsextendedarabicindic"] = { first = 0x006F0, last = 0x006F9 }
-------["digitsdevanagari"] = { first = 0x00966, last = 0x0096F }
-------["digitsbengali"] = { first = 0x009E6, last = 0x009EF }
-------["digitsgurmukhi"] = { first = 0x00A66, last = 0x00A6F }
-------["digitsgujarati"] = { first = 0x00AE6, last = 0x00AEF }
-------["digitsoriya"] = { first = 0x00B66, last = 0x00B6F }
-------["digitstamil"] = { first = 0x00030, last = 0x00039 } -- no zero
-------["digitstelugu"] = { first = 0x00C66, last = 0x00C6F }
-------["digitskannada"] = { first = 0x00CE6, last = 0x00CEF }
-------["digitsmalayalam"] = { first = 0x00D66, last = 0x00D6F }
-------["digitsthai"] = { first = 0x00E50, last = 0x00E59 }
-------["digitslao"] = { first = 0x00ED0, last = 0x00ED9 }
-------["digitstibetan"] = { first = 0x00F20, last = 0x00F29 }
-------["digitsmyanmar"] = { first = 0x01040, last = 0x01049 }
-------["digitsethiopic"] = { first = 0x01369, last = 0x01371 }
-------["digitskhmer"] = { first = 0x017E0, last = 0x017E9 }
-------["digitsmongolian"] = { first = 0x01810, last = 0x01809 }
-
-- operators : 0x02200
-- symbolsa : 0x02701
-- symbolsb : 0x02901
diff --git a/tex/context/base/math-ali.mkiv b/tex/context/base/math-ali.mkiv
index 6bfde57b6..bba55ba72 100644
--- a/tex/context/base/math-ali.mkiv
+++ b/tex/context/base/math-ali.mkiv
@@ -31,6 +31,8 @@
\newtoks\c_math_align_b
\newtoks\c_math_align_c
+\def\displayopenupvalue{.25\bodyfontsize}
+
\def\math_build_eqalign
{\scratchtoks\emptytoks
\dorecurse{\mathalignmentparameter\c!m}\math_build_eqalign_step
@@ -38,19 +40,25 @@
\def\math_build_eqalign_step
{\ifnum\recurselevel>\plusone
- %\appendtoks
- % \tabskip\mathalignmentparameter\c!distance\aligntab\tabskip\zeropoint
- %\to\scratchtoks
\scratchtoks\expandafter{\the\scratchtoks\tabskip\mathalignmentparameter\c!distance\aligntab\tabskip\zeropoint}%
\fi
\normalexpanded{\scratchtoks{\the\scratchtoks\the\c_math_align_a}}%
\dorecurse{\numexpr\mathalignmentparameter\c!n-\plusone\relax}
{\normalexpanded{\scratchtoks{\the\scratchtoks\the\c_math_align_b}}}}
-\def\math_math_in_eqalign#1{$\tabskip\zeropoint\everycr\emptytoks\displaystyle{{}#1{}}$}
-\def\math_text_in_eqalign#1{$\tabskip\zeropoint\everycr\emptytoks#1$}
+\def\math_math_in_eqalign#1%
+ {\startforceddisplaymath
+ \tabskip\zeropoint
+ \everycr\emptytoks
+ {{}#1{}}%
+ \stopforceddisplaymath}
-\def\displayopenupvalue{.25\bodyfontsize}
+\def\math_text_in_eqalign#1%
+ {\startimath
+ \tabskip\zeropoint
+ \everycr\emptytoks
+ #1%
+ \stopimath}
\def\eqalign#1% why no halign here, probably because of displaywidth
{\emptyhbox
@@ -58,7 +66,15 @@
\vcenter
{\math_openup\displayopenupvalue % was: \openup\jot
\mathsurround\zeropoint
- \ialign{\strut\hfil$\displaystyle{\alignmark\alignmark}$\aligntab$\displaystyle{{}\alignmark\alignmark{}}$\hfil\crcr#1\crcr}}%
+ \ialign{%
+ \strut
+ \hfil
+ \startforceddisplaymath{\alignmark\alignmark}\stopforceddisplaymath
+ \aligntab
+ \startforceddisplaymath{{}\alignmark\alignmark{}}\stopforceddisplaymath
+ \hfil\crcr
+ #1\crcr}%
+ }%
\mskip\thinmuskip}
% preamble is scanned for tabskips so we need the span to prevent an error message
@@ -257,7 +273,7 @@
%
-\def\numberedeqalign
+\unexpanded\def\numberedeqalign
{\doifelse{\formulaparameter\c!location}\v!left
\math_handle_eqalign_no_l_aligned
\math_handle_eqalign_no_r_aligned}
@@ -605,16 +621,27 @@
{\iffirstargument
\setupcurrentmathmatrix[#1]%
\fi
- \emptyhbox
+ % \emptyhbox % noted at 25-05-2014: what was that one doing here? it messed up spacing
\math_matrix_align_method_analyze
\mathmatrixleft
+ % new per 13-10-2014
+ \edef\p_strut{\mathmatrixparameter\c!strut}%
+ \ifx\p_strut\v!no
+ \let\m_matrix_strut\relax
+ \else
+ \let\m_matrix_strut\strut
+ \ifx\p_strut\v!yes\else
+ \spacing\p_strut
+ \fi
+ \fi
+ %
\mathmatrixbox\bgroup
\pushmacro\math_matrix_NC
\let\endmath\relax
\def\NC{\math_matrix_NC}%
\def\MC{\math_matrix_NC\ifmmode\else\startimath\let\endmath\stopimath\fi}%
\global\let\math_matrix_NC\math_matrix_NC_indeed
- \def\NR{\endmath\global\let\math_matrix_NC\math_matrix_NC_indeed\crcr}%
+ \def\NR{\endmath\global\let\math_matrix_NC\math_matrix_NC_indeed\m_matrix_strut \crcr}%
\normalbaselines
\mathsurround\zeropoint
\everycr\emptytoks
@@ -700,7 +727,7 @@
%D
%D \typebuffer \getbuffer
%D
-%D \definemathmatrix[bmatrix][left={\left[\mskip\thinmuskip},right={\mskip\thinmuskip\right]}]
+%D \definemathmatrix[bmatrix][left={\left[\mskip\thinmuskip},right={\mskip\thinmuskip\right]},strut=1.25]
%D
%D \startbuffer
%D \placeformula \startformula[-] \startbmatrix
@@ -1024,8 +1051,7 @@
\strc_formulas_place_number
\setbox\scratchbox\math_hbox to \displaywidth\bgroup
\mathinnerstrut
- $%
- \displaystyle
+ \startforceddisplaymath
\ifcase\mathraggedstatus\or\hfill\or\hfill\fi}
\def\math_box_llapped_math_no
@@ -1047,7 +1073,7 @@
\fi}
\unexpanded\def\stopmathbox
- {$%
+ {\stopforceddisplaymath
\ifcase\mathraggedstatus\or\or\hfill\or\hfill\fi
\egroup
\setbox0\hbox{\unhcopy\scratchbox}%
diff --git a/tex/context/base/math-arr.mkiv b/tex/context/base/math-arr.mkiv
index 6824c362e..0e3a53f32 100644
--- a/tex/context/base/math-arr.mkiv
+++ b/tex/context/base/math-arr.mkiv
@@ -63,7 +63,7 @@
\setvalue{\??matharrowsettings\v!big }{\def\m_math_arrows_extra{20}}
\setvalue{\??matharrowsettings\v!normal }{}
\setvalue{\??matharrowsettings }{}
-\setvalue{\??matharrowsettings\s!unknown}{\doifnumberelse\p_math_spacing{\let\m_math_arrows_extra\p_math_spacing}\donothing}
+\setvalue{\??matharrowsettings\s!unknown}{\doifelsenumber\p_math_spacing{\let\m_math_arrows_extra\p_math_spacing}\donothing}
\def\math_arrows_construct#1#2#3#4#5% hm, looks like we do a double mathrel (a bit cleaned up .. needs checking)
{\begingroup
diff --git a/tex/context/base/math-def.mkiv b/tex/context/base/math-def.mkiv
index 250986959..8247ac008 100644
--- a/tex/context/base/math-def.mkiv
+++ b/tex/context/base/math-def.mkiv
@@ -30,6 +30,9 @@
\definemathcommand [arccos] [nolop] {\mfunctionlabeltext{arccos}}
\definemathcommand [arcsin] [nolop] {\mfunctionlabeltext{arcsin}}
\definemathcommand [arctan] [nolop] {\mfunctionlabeltext{arctan}}
+\definemathcommand [acos] [nolop] {\mfunctionlabeltext{acos}}
+\definemathcommand [asin] [nolop] {\mfunctionlabeltext{asin}}
+\definemathcommand [atan] [nolop] {\mfunctionlabeltext{atan}}
\definemathcommand [arg] [nolop] {\mfunctionlabeltext{arg}}
\definemathcommand [cosh] [nolop] {\mfunctionlabeltext{cosh}}
\definemathcommand [cos] [nolop] {\mfunctionlabeltext{cos}}
@@ -64,6 +67,7 @@
\definemathcommand [sup] [limop] {\mfunctionlabeltext{sup}}
\definemathcommand [tanh] [nolop] {\mfunctionlabeltext{tanh}}
\definemathcommand [tan] [nolop] {\mfunctionlabeltext{tan}}
+\definemathcommand [diff] {\mfunctionlabeltext{diff}}
\let\normalmatharg\arg % todo: maybe automatically
diff --git a/tex/context/base/math-dir.lua b/tex/context/base/math-dir.lua
index 507a24e41..0f871beed 100644
--- a/tex/context/base/math-dir.lua
+++ b/tex/context/base/math-dir.lua
@@ -23,8 +23,19 @@ local trace_directions = false trackers.register("typesetters.directions.math
local report_directions = logs.reporter("typesetting","math directions")
-local insert_node_before = nodes.insert_before
-local insert_node_after = nodes.insert_after
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getnext = nuts.getnext
+local getchar = nuts.getchar
+local getid = nuts.getid
+local getlist = nuts.getlist
+local setfield = nuts.setfield
+local getattr = nuts.getattr
+
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
local nodecodes = nodes.nodecodes
local tasks = nodes.tasks
@@ -33,7 +44,7 @@ local glyph_code = nodecodes.glyph
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
-local nodepool = nodes.pool
+local nodepool = nuts.pool
local new_textdir = nodepool.textdir
@@ -61,9 +72,9 @@ local function processmath(head)
stop = nil
end
while current do
- local id = current.id
+ local id = getid(current)
if id == glyph_code then
- local char = current.char
+ local char = getchar(current)
local cdir = chardirections[char]
if cdir == "en" or cdir == "an" then -- we could check for mathclass punctuation
if not start then
@@ -83,7 +94,7 @@ local function processmath(head)
if mirror then
local class = charclasses[char]
if class == "open" or class == "close" then
- current.char = mirror
+ setfield(current,"char",mirror)
if trace_directions then
report_directions("mirrored: %C to %C",char,mirror)
end
@@ -94,6 +105,13 @@ local function processmath(head)
end
elseif not start then
-- nothing
+if id == hlist_code or id == vlist_code then
+ local list, d = processmath(getlist(current))
+ setfield(current,"list",list)
+ if d then
+ done = true
+ end
+end
elseif start == stop then
start = nil
else
@@ -101,14 +119,14 @@ local function processmath(head)
-- math can pack things into hlists .. we need to make sure we don't process
-- too often: needs checking
if id == hlist_code or id == vlist_code then
- local list, d = processmath(current.list)
- current.list = list
+ local list, d = processmath(getlist(current))
+ setfield(current,"list",list)
if d then
done = true
end
end
end
- current = current.next
+ current = getnext(current)
end
if not start then
-- nothing
@@ -124,9 +142,11 @@ local enabled = false
function directions.processmath(head) -- style, penalties
if enabled then
- local a = head[a_mathbidi]
+ local h = tonut(head)
+ local a = getattr(h,a_mathbidi)
if a and a > 0 then
- return processmath(head)
+ local head, done = processmath(h)
+ return tonode(head), done
end
end
return head, false
@@ -142,4 +162,8 @@ function directions.setmath(n)
end
end
-commands.setmathdirection = directions.setmath
+interfaces.implement {
+ name = "setmathdirection",
+ actions = directions.setmath,
+ arguments = "integer"
+}
diff --git a/tex/context/base/math-fbk.lua b/tex/context/base/math-fbk.lua
index bd9a1d315..76dd1ad9b 100644
--- a/tex/context/base/math-fbk.lua
+++ b/tex/context/base/math-fbk.lua
@@ -20,7 +20,6 @@ local virtualcharacters = { }
local identifiers = fonts.hashes.identifiers
local lastmathids = fonts.hashes.lastmathids
-local tounicode16 = fonts.mappings.tounicode16
-- we need a trick (todo): if we define scriptscript, script and text in
-- that order we could use their id's .. i.e. we could always add a font
@@ -133,10 +132,8 @@ function fallbacks.apply(target,original)
else
-- something else
end
- if trace_fallbacks then
- if characters[k] then
- report_fallbacks("extending math font %a with %U",target.properties.fullname,k)
- end
+ if trace_fallbacks and characters[k] then
+ report_fallbacks("extending math font %a with %U",target.properties.fullname,k)
end
end
end
@@ -182,22 +179,22 @@ end
-- virtualcharacters[0x208B] = 0x002B
virtualcharacters[0x207A] = function(data)
- data.replacement = 0x2212
+ data.replacement = 0x002B
return raised(data)
end
virtualcharacters[0x207B] = function(data)
- data.replacement = 0x002B
+ data.replacement = 0x2212
return raised(data)
end
virtualcharacters[0x208A] = function(data)
- data.replacement = 0x2212
+ data.replacement = 0x002B
return raised(data,true)
end
virtualcharacters[0x208B] = function(data)
- data.replacement = 0x002B
+ data.replacement = 0x2212
return raised(data,true)
end
@@ -332,11 +329,12 @@ end
-- we could move the defs from math-act here
-local function accent_to_extensible(target,newchr,original,oldchr,height,depth,swap,offset)
+local function accent_to_extensible(target,newchr,original,oldchr,height,depth,swap,offset,unicode)
local characters = target.characters
- local addprivate = fonts.helpers.addprivate
local olddata = characters[oldchr]
- if olddata and not olddata.commands then
+ -- brrr ... pagella has only next
+ if olddata and not olddata.commands and olddata.width > 0 then
+ local addprivate = fonts.helpers.addprivate
if swap then
swap = characters[swap]
height = swap.depth
@@ -351,6 +349,7 @@ local function accent_to_extensible(target,newchr,original,oldchr,height,depth,s
width = olddata.width,
height = height,
depth = depth,
+ unicode = unicode,
}
local glyphdata = newdata
local nextglyph = olddata.next
@@ -399,9 +398,9 @@ local function accent_to_extensible(target,newchr,original,oldchr,height,depth,s
end
end
end
- return glyphdata
+ return glyphdata, true
else
- return olddata
+ return olddata, false
end
end
@@ -416,7 +415,7 @@ virtualcharacters[0x203E] = function(data) -- could be FE33E instead
height = target.parameters.xheight/4
depth = height
end
- return accent_to_extensible(target,0x203E,data.original,0x0305,height,depth)
+ return accent_to_extensible(target,0x203E,data.original,0x0305,height,depth,nil,nil,0x203E)
end
virtualcharacters[0xFE33E] = virtualcharacters[0x203E] -- convenient
@@ -427,38 +426,40 @@ local function smashed(data,unicode,swap,private)
local original = data.original
local chardata = target.characters[unicode]
if chardata and chardata.height > target.parameters.xheight then
- return accent_to_extensible(target,private,original,unicode,0,0,swap)
+ return accent_to_extensible(target,private,original,unicode,0,0,swap,nil,unicode)
else
return original.characters[unicode]
end
end
-addextra(0xFE3DE, { description="EXTENSIBLE OF 0x03DE", unicodeslot=0xFE3DE, mathextensible = "r", mathstretch = "h" } )
-addextra(0xFE3DC, { description="EXTENSIBLE OF 0x03DC", unicodeslot=0xFE3DC, mathextensible = "r", mathstretch = "h" } )
-addextra(0xFE3B4, { description="EXTENSIBLE OF 0x03B4", unicodeslot=0xFE3B4, mathextensible = "r", mathstretch = "h" } )
+addextra(0xFE3DE, { description="EXTENSIBLE OF 0x03DE", unicodeslot=0xFE3DE, mathextensible = "r", mathstretch = "h", mathclass = "topaccent" } )
+addextra(0xFE3DC, { description="EXTENSIBLE OF 0x03DC", unicodeslot=0xFE3DC, mathextensible = "r", mathstretch = "h", mathclass = "topaccent" } )
+addextra(0xFE3B4, { description="EXTENSIBLE OF 0x03B4", unicodeslot=0xFE3B4, mathextensible = "r", mathstretch = "h", mathclass = "topaccent" } )
virtualcharacters[0xFE3DE] = function(data) return smashed(data,0x23DE,0x23DF,0xFE3DE) end
virtualcharacters[0xFE3DC] = function(data) return smashed(data,0x23DC,0x23DD,0xFE3DC) end
virtualcharacters[0xFE3B4] = function(data) return smashed(data,0x23B4,0x23B5,0xFE3B4) end
-addextra(0xFE3DF, { description="EXTENSIBLE OF 0x03DF", unicodeslot=0xFE3DF, mathextensible = "r", mathstretch = "h" } )
-addextra(0xFE3DD, { description="EXTENSIBLE OF 0x03DD", unicodeslot=0xFE3DD, mathextensible = "r", mathstretch = "h" } )
-addextra(0xFE3B5, { description="EXTENSIBLE OF 0x03B5", unicodeslot=0xFE3B5, mathextensible = "r", mathstretch = "h" } )
+addextra(0xFE3DF, { description="EXTENSIBLE OF 0x03DF", unicodeslot=0xFE3DF, mathextensible = "r", mathstretch = "h", mathclass = "botaccent" } )
+addextra(0xFE3DD, { description="EXTENSIBLE OF 0x03DD", unicodeslot=0xFE3DD, mathextensible = "r", mathstretch = "h", mathclass = "botaccent" } )
+addextra(0xFE3B5, { description="EXTENSIBLE OF 0x03B5", unicodeslot=0xFE3B5, mathextensible = "r", mathstretch = "h", mathclass = "botaccent" } )
-virtualcharacters[0xFE3DF] = function(data) return data.original.characters[0x23DF] end
-virtualcharacters[0xFE3DD] = function(data) return data.original.characters[0x23DD] end
-virtualcharacters[0xFE3B5] = function(data) return data.original.characters[0x23B5] end
+virtualcharacters[0xFE3DF] = function(data) local c = data.target.characters[0x23DF] if c then c.unicode = 0x23DF return c end end
+virtualcharacters[0xFE3DD] = function(data) local c = data.target.characters[0x23DD] if c then c.unicode = 0x23DD return c end end
+virtualcharacters[0xFE3B5] = function(data) local c = data.target.characters[0x23B5] if c then c.unicode = 0x23B5 return c end end
-- todo: add some more .. numbers might change
-addextra(0xFE302, { description="EXTENSIBLE OF 0x0302", unicodeslot=0xFE302, mathstretch = "h" } )
-addextra(0xFE303, { description="EXTENSIBLE OF 0x0303", unicodeslot=0xFE303, mathstretch = "h" } )
+addextra(0xFE302, { description="EXTENSIBLE OF 0x0302", unicodeslot=0xFE302, mathstretch = "h", mathclass = "topaccent" } )
+addextra(0xFE303, { description="EXTENSIBLE OF 0x0303", unicodeslot=0xFE303, mathstretch = "h", mathclass = "topaccent" } )
local function smashed(data,unicode,private)
local target = data.target
local height = target.parameters.xheight / 2
- local c = accent_to_extensible(target,private,data.original,unicode,height,0,nil,-height)
- c.top_accent = nil
+ local c, done = accent_to_extensible(target,private,data.original,unicode,height,0,nil,-height,unicode)
+ if done then
+ c.top_accent = nil -- or maybe also all the others
+ end
return c
end
@@ -466,15 +467,21 @@ virtualcharacters[0xFE302] = function(data) return smashed(data,0x0302,0xFE302)
virtualcharacters[0xFE303] = function(data) return smashed(data,0x0303,0xFE303) end
-- another crazy hack .. doesn't work as we define scrscr first .. we now have smaller
--- primes so we have smaller primes for the moment, big ones will become an option
+-- primes so we have smaller primes for the moment, big ones will become an option ..
+-- these primes in fonts are a real mess .. kind of a dead end, so don't wonder about
+-- the values below
+
+-- todo: check tounicodes
local function smashed(data,unicode,optional)
local oldchar = data.characters[unicode]
if oldchar then
- local height = 1.2 * data.target.parameters.xheight
+ local xheight = data.target.parameters.xheight
+ local height = 1.2 * xheight
+ local shift = oldchar.height - height
local newchar = {
commands = {
- { "down", oldchar.height - height },
+ { "down", shift },
{ "char", unicode },
},
height = height,
@@ -486,6 +493,30 @@ local function smashed(data,unicode,optional)
end
end
+-- -- relocate all but less flexible so not used .. instead some noad hackery plus
+-- -- the above
+--
+-- local function smashed(data,unicode,optional)
+-- local oldchar = data.characters[unicode]
+-- if oldchar then
+-- local xheight = data.target.parameters.xheight
+-- local height = oldchar.height
+-- local shift = oldchar.height < 1.5*xheight and -(1.8*xheight-height) or 0
+-- local newchar = {
+-- commands = {
+-- { "down", shift },
+-- { "char", unicode },
+-- },
+-- unicode = unicode,
+-- height = height,
+-- width = oldchar.width,
+-- }
+-- return newchar
+-- elseif not optional then
+-- report_fallbacks("missing %U prime in font %a",unicode,data.target.properties.fullname)
+-- end
+-- end
+
addextra(0xFE932, { description="SMASHED PRIME 0x02032", unicodeslot=0xFE932 } )
addextra(0xFE933, { description="SMASHED PRIME 0x02033", unicodeslot=0xFE933 } )
addextra(0xFE934, { description="SMASHED PRIME 0x02034", unicodeslot=0xFE934 } )
@@ -511,7 +542,7 @@ addextra(0xFE940, { category = "mn", description="SMALL ANNUITY SYMBOL", unicode
local function actuarian(data)
local characters = data.target.characters
local parameters = data.target.parameters
- local basechar = characters[0x0078] -- x (0x0058 X)
+ local basechar = characters[0x0078] -- x (0x0058 X) or 0x1D431
local linewidth = parameters.xheight / 10
local basewidth = basechar.width
local baseheight = basechar.height
@@ -519,7 +550,7 @@ local function actuarian(data)
-- todo: add alttext
-- compromise: lm has large hooks e.g. \actuarial{a}
width = basewidth + 4 * linewidth,
- tounicode = tounicode16(0x20E7),
+ unicode = 0x20E7,
commands = {
{ "right", 2 * linewidth },
{ "down", - baseheight - 3 * linewidth },
diff --git a/tex/context/base/math-fen.mkiv b/tex/context/base/math-fen.mkiv
index 94d93e4af..9570eac83 100644
--- a/tex/context/base/math-fen.mkiv
+++ b/tex/context/base/math-fen.mkiv
@@ -131,6 +131,14 @@
\definemathfence [angle] [\c!left="27E8,\c!right="27E9]
\definemathfence [doubleangle] [\c!left="27EA,\c!right="27EB]
\definemathfence [solidus] [\c!left="2044,\c!right="2044]
+\definemathfence [ceil] [\c!left="2308,\c!right="2309]
+\definemathfence [floor] [\c!left="230A,\c!right="230B]
+\definemathfence [moustache] [\c!left="23B0,\c!right="23B1]
+\definemathfence [uppercorner] [\c!left="231C,\c!right="231D]
+\definemathfence [lowercorner] [\c!left="231E,\c!right="231F]
+\definemathfence [group] [\c!left="27EE,\c!right="27EF]
+\definemathfence [openbracket] [\c!left="27E6,\c!right="27E7]
+
\definemathfence [nothing]
\definemathfence [mirrored] % \v!mirrored
@@ -144,36 +152,54 @@
\definemathfence [mirroredangle] [mirrored] [\c!right="27E8,\c!left="27E9]
\definemathfence [mirroreddoubleangle] [mirrored] [\c!right="27EA,\c!left="27EB]
\definemathfence [mirroredsolidus] [mirrored] [\c!right="2044,\c!left="2044]
-\definemathfence [mirrorednothing] [mirorred]
+\definemathfence [mirroredceil] [mirrored] [\c!right="2308,\c!left="2309]
+\definemathfence [mirroredfloor] [mirrored] [\c!right="230A,\c!left="230B]
+\definemathfence [mirroredmoustache] [mirrored] [\c!right="23B0,\c!left="23B1]
+\definemathfence [mirroreduppercorner] [mirrored] [\c!right="231C,\c!left="231D]
+\definemathfence [mirroredlowercorner] [mirrored] [\c!right="231E,\c!left="231F]
+\definemathfence [mirroredgroup] [mirrored] [\c!right="27EE,\c!left="27EF]
+\definemathfence [mirroredopenbracket] [mirrored] [\c!right="27E6,\c!left="27E7]
+
+\definemathfence [mirrorednothing] [mirrored]
%D A bonus:
-\unexpanded\def\Lparent {\math_fenced_fenced_start{parenthesis}} \unexpanded\def\Rparent {\math_fenced_fenced_stop{parenthesis}}
-\unexpanded\def\Lbracket {\math_fenced_fenced_start{bracket}} \unexpanded\def\Rbracket {\math_fenced_fenced_stop{bracket}}
-\unexpanded\def\Lbrace {\math_fenced_fenced_start{brace}} \unexpanded\def\Rbrace {\math_fenced_fenced_stop{brace}}
-\unexpanded\def\Langle {\math_fenced_fenced_start{angle}} \unexpanded\def\Rangle {\math_fenced_fenced_stop{angle}}
-\unexpanded\def\Ldoubleangle {\math_fenced_fenced_start{doubleangle}} \unexpanded\def\Rdoubleangle {\math_fenced_fenced_stop{doubleangle}}
-\unexpanded\def\Lbar {\math_fenced_fenced_start{bar}} \unexpanded\def\Rbar {\math_fenced_fenced_stop{bar}}
-\unexpanded\def\Ldoublebar {\math_fenced_fenced_start{doublebar}} \unexpanded\def\Rdoublebar {\math_fenced_fenced_stop{doublebar}}
-\unexpanded\def\Ltriplebar {\math_fenced_fenced_start{triplebar}} \unexpanded\def\Rtriplebar {\math_fenced_fenced_stop{triplebar}}
-\unexpanded\def\Lsolidus {\math_fenced_fenced_start{solidus}} \unexpanded\def\Rsolidus {\math_fenced_fenced_stop{solidus}}
-\unexpanded\def\Lnothing {\math_fenced_fenced_start{nothing}} \unexpanded\def\Rnothing {\math_fenced_fenced_stop{nothing}}
-
-\unexpanded\def\Lparentmirrored {\math_fenced_fenced_stop{mirroredparenthesis}} \unexpanded\def\Rparentmirrored {\math_fenced_fenced_start{mirroredparenthesis}}
-\unexpanded\def\Lbracketmirrored {\math_fenced_fenced_stop{mirroredbracket}} \unexpanded\def\Rbracketmirrored {\math_fenced_fenced_start{mirroredbracket}}
-\unexpanded\def\Lbracemirrored {\math_fenced_fenced_stop{mirroredbrace}} \unexpanded\def\Rbracemirrored {\math_fenced_fenced_start{mirroredbrace}}
-\unexpanded\def\Langlemirrored {\math_fenced_fenced_stop{mirroredangle}} \unexpanded\def\Ranglemirrored {\math_fenced_fenced_start{mirroredangle}}
-\unexpanded\def\Ldoubleanglemirrored {\math_fenced_fenced_stop{mirroreddoubleangle}} \unexpanded\def\Rdoubleanglemirrored {\math_fenced_fenced_start{mirroreddoubleangle}}
-\unexpanded\def\Lbarmirrored {\math_fenced_fenced_stop{mirroredbar}} \unexpanded\def\Rbarmirrored {\math_fenced_fenced_start{mirroredbar}}
-\unexpanded\def\Ldoublebarmirrored {\math_fenced_fenced_stop{mirroreddoublebar}} \unexpanded\def\Rdoublebarmirrored {\math_fenced_fenced_start{mirroreddoublebar}}
-\unexpanded\def\Ltriplebarmirrored {\math_fenced_fenced_stop{mirroredtriplebar}} \unexpanded\def\Rtriplebarmirrored {\math_fenced_fenced_start{mirroredtriplebar}}
-\unexpanded\def\Lsolidusmirrored {\math_fenced_fenced_stop{mirroredsolidus}} \unexpanded\def\Rsolidusmirrored {\math_fenced_fenced_start{mirroredsolidus}}
-\unexpanded\def\Lnothingmirrored {\math_fenced_fenced_stop{mirrorednothing}} \unexpanded\def\Rnothingmirrored {\math_fenced_fenced_start{mirrorednothing}}
-
-%D And another one:
+\unexpanded\def\Lparent {\math_fenced_fenced_start{parenthesis}} \unexpanded\def\Rparent {\math_fenced_fenced_stop {parenthesis}}
+\unexpanded\def\Lbracket {\math_fenced_fenced_start{bracket}} \unexpanded\def\Rbracket {\math_fenced_fenced_stop {bracket}}
+\unexpanded\def\Lbrace {\math_fenced_fenced_start{brace}} \unexpanded\def\Rbrace {\math_fenced_fenced_stop {brace}}
+\unexpanded\def\Langle {\math_fenced_fenced_start{angle}} \unexpanded\def\Rangle {\math_fenced_fenced_stop {angle}}
+\unexpanded\def\Ldoubleangle {\math_fenced_fenced_start{doubleangle}} \unexpanded\def\Rdoubleangle {\math_fenced_fenced_stop {doubleangle}}
+\unexpanded\def\Lbar {\math_fenced_fenced_start{bar}} \unexpanded\def\Rbar {\math_fenced_fenced_stop {bar}}
+\unexpanded\def\Ldoublebar {\math_fenced_fenced_start{doublebar}} \unexpanded\def\Rdoublebar {\math_fenced_fenced_stop {doublebar}}
+\unexpanded\def\Ltriplebar {\math_fenced_fenced_start{triplebar}} \unexpanded\def\Rtriplebar {\math_fenced_fenced_stop {triplebar}}
+\unexpanded\def\Lsolidus {\math_fenced_fenced_start{solidus}} \unexpanded\def\Rsolidus {\math_fenced_fenced_stop {solidus}}
+\unexpanded\def\Lfloor {\math_fenced_fenced_start{floor}} \unexpanded\def\Rfloor {\math_fenced_fenced_stop {floor}}
+\unexpanded\def\Lceil {\math_fenced_fenced_start{ceil}} \unexpanded\def\Rceil {\math_fenced_fenced_stop {ceil}}
+\unexpanded\def\Lmoustache {\math_fenced_fenced_start{moustache}} \unexpanded\def\Rmoustache {\math_fenced_fenced_stop {moustache}}
+\unexpanded\def\Luppercorner {\math_fenced_fenced_start{uppercorner}} \unexpanded\def\Ruppercorner {\math_fenced_fenced_stop {uppercorner}}
+\unexpanded\def\Llowercorner {\math_fenced_fenced_start{lowercorner}} \unexpanded\def\Rlowercorner {\math_fenced_fenced_stop {lowercorner}}
+\unexpanded\def\Lgroup {\math_fenced_fenced_start{group}} \unexpanded\def\Rgroup {\math_fenced_fenced_stop {group}}
+\unexpanded\def\Lopenbracket {\math_fenced_fenced_start{openbracket}} \unexpanded\def\Ropenbracket {\math_fenced_fenced_stop {openbracket}}
+\unexpanded\def\Lnothing {\math_fenced_fenced_start{nothing}} \unexpanded\def\Rnothing {\math_fenced_fenced_stop {nothing}}
+
+\unexpanded\def\Lparentmirrored {\math_fenced_fenced_stop {mirroredparenthesis}} \unexpanded\def\Rparentmirrored {\math_fenced_fenced_start{mirroredparenthesis}}
+\unexpanded\def\Lbracketmirrored {\math_fenced_fenced_stop {mirroredbracket}} \unexpanded\def\Rbracketmirrored {\math_fenced_fenced_start{mirroredbracket}}
+\unexpanded\def\Lbracemirrored {\math_fenced_fenced_stop {mirroredbrace}} \unexpanded\def\Rbracemirrored {\math_fenced_fenced_start{mirroredbrace}}
+\unexpanded\def\Langlemirrored {\math_fenced_fenced_stop {mirroredangle}} \unexpanded\def\Ranglemirrored {\math_fenced_fenced_start{mirroredangle}}
+\unexpanded\def\Ldoubleanglemirrored {\math_fenced_fenced_stop {mirroreddoubleangle}} \unexpanded\def\Rdoubleanglemirrored {\math_fenced_fenced_start{mirroreddoubleangle}}
+\unexpanded\def\Lbarmirrored {\math_fenced_fenced_stop {mirroredbar}} \unexpanded\def\Rbarmirrored {\math_fenced_fenced_start{mirroredbar}}
+\unexpanded\def\Ldoublebarmirrored {\math_fenced_fenced_stop {mirroreddoublebar}} \unexpanded\def\Rdoublebarmirrored {\math_fenced_fenced_start{mirroreddoublebar}}
+\unexpanded\def\Ltriplebarmirrored {\math_fenced_fenced_stop {mirroredtriplebar}} \unexpanded\def\Rtriplebarmirrored {\math_fenced_fenced_start{mirroredtriplebar}}
+\unexpanded\def\Lsolidusmirrored {\math_fenced_fenced_stop {mirroredsolidus}} \unexpanded\def\Rsolidusmirrored {\math_fenced_fenced_start{mirroredsolidus}}
+\unexpanded\def\Lfloormirrored {\math_fenced_fenced_stop {mirroredfloor}} \unexpanded\def\Rfloormirrored {\math_fenced_fenced_start{mirroredfloor}}
+\unexpanded\def\Lceilmirrored {\math_fenced_fenced_stop {mirroredceil}} \unexpanded\def\Rceilmirrored {\math_fenced_fenced_start{mirroredceil}}
+\unexpanded\def\Lmoustachemirrored {\math_fenced_fenced_stop {mirroredmoustache}} \unexpanded\def\Rmoustachemirrored {\math_fenced_fenced_start{mirroredmoustache}}
+\unexpanded\def\Luppercornermirrored {\math_fenced_fenced_stop {mirroreduppercorner}} \unexpanded\def\Ruppercornermirrored {\math_fenced_fenced_start{mirroreduppercorner}}
+\unexpanded\def\Llowercornermirrored {\math_fenced_fenced_stop {mirroredlowercorner}} \unexpanded\def\Rlowercornermirrored {\math_fenced_fenced_start{mirroredlowercorner}}
+\unexpanded\def\Lgroupmirrored {\math_fenced_fenced_stop {mirroredgroup}} \unexpanded\def\Rgroupmirrored {\math_fenced_fenced_start{mirroredgroup}}
+\unexpanded\def\Lopenbracketmirrored {\math_fenced_fenced_stop {mirroredopenbracket}} \unexpanded\def\Ropenbracketmirrored {\math_fenced_fenced_start{mirroredopenbracket}}
+\unexpanded\def\Lnothingmirrored {\math_fenced_fenced_stop {mirrorednothing}} \unexpanded\def\Rnothingmirrored {\math_fenced_fenced_start{mirrorednothing}}
-% \setupmathfences[color=darkgreen]
-%
% \startformula
% \left{ \frac{1}{a} \right}
% \left[ \frac{1}{b} \right]
@@ -193,6 +219,8 @@
\newconditional\c_math_fenced_done
\newconditional\c_math_fenced_unknown \settrue\c_math_fenced_unknown
+% maybe use \detokenize ...
+
\def\math_left
{\settrue\c_math_fenced_done
\edef\m_math_left{\meaning\nexttoken}%
@@ -208,6 +236,21 @@
\edef\m_math_middle{\meaning\nexttoken}%
\csname\??mathmiddle\ifcsname\??mathmiddle\m_math_middle\endcsname\m_math_middle\else\s!unknown\fi\endcsname}
+\unexpanded\def\lfence#1%
+ {\settrue\c_math_fenced_done
+ \edef\m_math_left{\meaning#1}%
+ \csname\??mathleft\ifcsname\??mathleft\m_math_left\endcsname\m_math_left\else\s!unknown\fi\endcsname}
+
+\unexpanded\def\rfence#1%
+ {\settrue\c_math_fenced_done
+ \edef\m_math_right{\meaning#1}%
+ \csname\??mathright\ifcsname\??mathright\m_math_right\endcsname\m_math_right\else\s!unknown\fi\endcsname}
+
+\unexpanded\def\mfence#1%
+ {\settrue\c_math_fenced_done
+ \edef\m_math_middle{\meaning#1}%
+ \csname\??mathmiddle\ifcsname\??mathmiddle\m_math_middle\endcsname\m_math_middle\else\s!unknown\fi\endcsname}
+
\setvalue{\??mathleft \s!unknown}{\setfalse\c_math_fenced_done\ifconditional\c_math_fenced_unknown\normalleft \nexttoken\fi}
\setvalue{\??mathright \s!unknown}{\setfalse\c_math_fenced_done\ifconditional\c_math_fenced_unknown\normalright \nexttoken\fi}
\setvalue{\??mathmiddle\s!unknown}{\setfalse\c_math_fenced_done\ifconditional\c_math_fenced_unknown\normalmiddle\nexttoken\fi}
@@ -216,67 +259,79 @@
{\expandafter\let\csname\??mathleft \meaning#1\endcsname#2%
\expandafter\let\csname\??mathright\meaning#3\endcsname#4}
-\expandafter\let\csname\??mathleft \meaning [\endcsname\Lbracket
-\expandafter\let\csname\??mathleft \meaning (\endcsname\Lparent
-\expandafter\let\csname\??mathleft \meaning <\endcsname\Langle
-\expandafter\let\csname\??mathleft \meaning ⟨\endcsname\Langle
-\expandafter\let\csname\??mathleft \meaning ⟪\endcsname\Ldoubleangle
-\expandafter\let\csname\??mathleft \meaning {\endcsname\Lbrace
-\expandafter\let\csname\??mathleft \meaning |\endcsname\Lbar
-\expandafter\let\csname\??mathleft \meaning ‖\endcsname\Ldoublebar
-\expandafter\let\csname\??mathleft \meaning ⦀\endcsname\Ltriplebar
-\expandafter\let\csname\??mathleft \meaning /\endcsname\Lsolidus
-\expandafter\let\csname\??mathleft \meaning .\endcsname\Lnothing
-
-\expandafter\let\csname\??mathright\meaning ]\endcsname\Rbracket
-\expandafter\let\csname\??mathright\meaning )\endcsname\Rparent
-\expandafter\let\csname\??mathright\meaning >\endcsname\Rangle
-\expandafter\let\csname\??mathright\meaning ⟩\endcsname\Rangle
-\expandafter\let\csname\??mathright\meaning ⟫\endcsname\Rdoubleangle
-\expandafter\let\csname\??mathright\meaning }\endcsname\Rbrace
-\expandafter\let\csname\??mathright\meaning |\endcsname\Rbar
-\expandafter\let\csname\??mathright\meaning ⦀\endcsname\Rtriplebar
-\expandafter\let\csname\??mathright\meaning /\endcsname\Rsolidus
-\expandafter\let\csname\??mathright\meaning .\endcsname\Rnothing
-
-\expandafter\let\csname\??mathright\meaning [\endcsname\Lbracketmirrored
-\expandafter\let\csname\??mathright\meaning (\endcsname\Lparentmirrored
-\expandafter\let\csname\??mathright\meaning <\endcsname\Langlemirrored
-\expandafter\let\csname\??mathright\meaning ⟨\endcsname\Langlemirrored
-\expandafter\let\csname\??mathright\meaning ⟪\endcsname\Ldoubleanglemirrored
-\expandafter\let\csname\??mathright\meaning {\endcsname\Lbracemirrored
-%expandafter\let\csname\??mathright\meaning |\endcsname\Lbarmirrored
-%expandafter\let\csname\??mathright\meaning ‖\endcsname\Ldoublebarmirrored
-%expandafter\let\csname\??mathright\meaning ⦀\endcsname\Ltriplebarmirrored
-\expandafter\let\csname\??mathright\meaning /\endcsname\Lsolidusmirrored
-\expandafter\let\csname\??mathright\meaning .\endcsname\Lnothingmirrored
-
-\expandafter\let\csname\??mathleft \meaning ]\endcsname\Rbracketmirrored
-\expandafter\let\csname\??mathleft \meaning )\endcsname\Rparentmirrored
-\expandafter\let\csname\??mathleft \meaning >\endcsname\Ranglemirrored
-\expandafter\let\csname\??mathleft \meaning ⟩\endcsname\Ranglemirrored
-\expandafter\let\csname\??mathleft \meaning ⟫\endcsname\Rdoubleanglemirrored
-\expandafter\let\csname\??mathleft \meaning }\endcsname\Rbracemirrored
-%expandafter\let\csname\??mathleft \meaning |\endcsname\Rbarmirrored
-%expandafter\let\csname\??mathleft \meaning ‖\endcsname\Rdoublebarmirrored
-%expandafter\let\csname\??mathleft \meaning ⦀\endcsname\Rtriplebarmirrored
-\expandafter\let\csname\??mathleft \meaning /\endcsname\Rsolidusmirrored
-\expandafter\let\csname\??mathleft \meaning .\endcsname\Rnothingmirrored
+\normalexpanded{\installmathfencepair {|\detokenize {|}} \Ldoublebar {|\detokenize {|}} \Rdoublebar}
+\normalexpanded{\installmathfencepair {|\detokenize{||}} \Ltriplebar {|\detokenize{||}} \Rtriplebar}
+
+\installmathfencepair \bgroup \Lbrace \egroup \Rbrace
+\installmathfencepair \egroup \Rbracemirrored \bgroup \Lbracemirrored
+
+\installmathfencepair . \Lnothing . \Rnothing
+\installmathfencepair . \Rnothingmirrored . \Lnothingmirrored
+
+\installmathfencepair [ \Lbracket ] \Rbracket
+\installmathfencepair ] \Rbracketmirrored [ \Lbracketmirrored
+
+\installmathfencepair ( \Lparent ) \Rparent
+\installmathfencepair ) \Rparentmirrored ( \Lparentmirrored
+
+\installmathfencepair < \Langle > \Rangle
+\installmathfencepair > \Ranglemirrored < \Langlemirrored
+
+\installmathfencepair / \Lsolidus / \Rsolidus
+%installmathfencepair / \Rsolidusmirrored / \Lsolidusmirrored
+
+\installmathfencepair | \Lbar | \Rbar
+%installmathfencepair | \Rbarmirrored | \Lbarmirrored
+
+\installmathfencepair ⌊ \Lfloor ⌋ \Rfloor
+\installmathfencepair ⌋ \Rfloormirrored ⌊ \Lfloormirrored
+\installmathfencepair ⌈ \Lceil ⌉ \Rceil
+\installmathfencepair ⌉ \Rceilmirrored ⌈ \Lceilmirrored
+
+\installmathfencepair ⟨ \Langle ⟩ \Rangle
+\installmathfencepair ⟩ \Ranglemirrored ⟨ \Langlemirrored
+
+\installmathfencepair ⟪ \Ldoubleangle ⟫ \Rdoubleangle
+\installmathfencepair ⟫ \Rdoubleanglemirrored ⟪ \Ldoubleanglemirrored
+
+\installmathfencepair ‖ \Ldoublebar ‖ \Rdoublebar
+%installmathfencepair ‖ \Rdoublebarmirrored ‖ \Ldoublebarmirrored
+
+\installmathfencepair ⦀ \Ltriplebar ⦀ \Rtriplebar
+%installmathfencepair ⦀ \Rtriplebarmirrored ⦀ \Ltriplebarmirrored
+
+% \installmathfencepair { \Lbrace } \Rbrace
+% \installmathfencepair } \Rbracemirrored { \Lbracemirrored
+
+\appendtoks
+ \ignorediscretionaries % so $\mtext{a|b}$ works, this is ok because it's an \hbox
+\to \everymathematics
% todo paren parent
\let\lbrack\lbracket
\let\rbrack\rbracket
-\installmathfencepair \lbrace \Lbrace \rbrace \Rbrace
-\installmathfencepair \lbracket \Lbracket \rbracket \Rbracket
-\installmathfencepair \lparen \Lparen \rparen \Rparen
-\installmathfencepair \lparent \Lparent \rparent \Rparent
-\installmathfencepair \langle \Langle \rangle \Rangle
-%installmathfencepair \lrangle \Ldoubleangle \rrangle \Rdoubleangle
-%installmathfencepair \lbar \Lbar \rbar \Rbar
-\installmathfencepair \vert \Lbar \vert \Rbar
-\installmathfencepair \solidus \Lsolidus \solidus \Rsolidus
+\installmathfencepair \lbrace \Lbrace \rbrace \Rbrace
+\installmathfencepair \lbracket \Lbracket \rbracket \Rbracket
+\installmathfencepair \lparen \Lparen \rparen \Rparen
+\installmathfencepair \lparent \Lparent \rparent \Rparent
+\installmathfencepair \langle \Langle \rangle \Rangle
+\installmathfencepair \lrangle \Ldoubleangle \rrangle \Rdoubleangle
+\installmathfencepair \lbar \Lbar \rbar \Rbar
+\installmathfencepair \lVert \Ldoublebar \rVert \Rdoublebar
+\installmathfencepair \vert \Lbar \vert \Rbar
+\installmathfencepair \solidus \Lsolidus \solidus \Rsolidus
+\installmathfencepair \lfloor \Lfloor \rfloor \Rfloor
+\installmathfencepair \lceil \Lceil \rceil \Rceil
+
+\installmathfencepair \ulcorner \Luppercorner \ulcorner \Ruppercorner
+\installmathfencepair \llcorner \Llowercorner \llcorner \Rlowercorner
+\installmathfencepair \lmoustache \Lmoustache \lmoustache \Rmoustache
+\installmathfencepair \llbracket \Lopenbracket \llbracket \Ropenbracket
+\installmathfencepair \lgroup \Lgroup \lgroup \Rgroup
+
+% \setupmathfences[color=darkgreen]
\unexpanded\def\{{\mathortext\lbrace \letterleftbrace } % or maybe a chardef
\unexpanded\def\}{\mathortext\rbrace \letterrightbrace } % or maybe a chardef
diff --git a/tex/context/base/math-frc.lua b/tex/context/base/math-frc.lua
index 4f531a530..639edc94b 100644
--- a/tex/context/base/math-frc.lua
+++ b/tex/context/base/math-frc.lua
@@ -25,13 +25,13 @@ table.setmetatableindex(resolved, function(t,k)
return v
end)
-local normalatop = context.normalatop
-local normalover = context.normalover
+local ctx_normalatop = context.normalatop
+local ctx_normalover = context.normalover
-function commands.math_frac(how,left,right,width)
- if how == v_no then
+local function mathfraction(how,left,right,width) -- of course we could use the scanners directly here which
+ if how == v_no then -- is faster but also less abstract ... maybe some day
if left == 0x002E and right == 0x002E then
- normalatop()
+ ctx_normalatop()
else
context("\\atopwithdelims%s%s",resolved[left],resolved[right])
end
@@ -43,9 +43,15 @@ function commands.math_frac(how,left,right,width)
end
else -- v_auto
if left == 0x002E and right == 0x002E then
- normalover()
+ ctx_normalover()
else
context("\\overwithdelims%s%s",resolved[left],resolved[right])
end
end
end
+
+interfaces.implement {
+ name = "mathfraction",
+ actions = mathfraction,
+ arguments = { "string", "number", "number", "dimen" }
+}
diff --git a/tex/context/base/math-frc.mkiv b/tex/context/base/math-frc.mkiv
index 65fa30942..cbe342b66 100644
--- a/tex/context/base/math-frc.mkiv
+++ b/tex/context/base/math-frc.mkiv
@@ -15,7 +15,14 @@
\unprotect
-%D todo: struts ... depends on demand
+%D I need to check it all again as there was a bug in luatex with dimensions that could
+%D resulted in side effects that made me mess with spacing.
+
+\unexpanded\def\topstrut{\vrule\s!width\zeropoint\s!height\strutht\s!depth\zeropoint\relax}
+\unexpanded\def\botstrut{\vrule\s!width\zeropoint\s!height\zeropoint\s!depth\strutdp\relax}
+
+\unexpanded\def\mathtopstrut{\setbox\scratchbox\mathstylehbox{(}\vrule\s!width\zeropoint\s!height\ht\scratchbox\s!depth\zeropoint\relax}
+\unexpanded\def\mathbotstrut{\setbox\scratchbox\mathstylehbox{(}\vrule\s!width\zeropoint\s!height\zeropoint\s!depth\dp\scratchbox\relax}
%D This module is reimplemented in \MKIV\ style.
@@ -106,19 +113,97 @@
\c!rulethickness=.25\exheight,
\c!left=0x2E,
\c!right=0x2E,
+ \c!strut=\v!yes,
+ \c!topdistance=,
+ \c!bottomdistance=,
\c!rule=\v!auto]
\appendtoks
\setuevalue{\currentmathfraction}{\math_frac{\currentmathfraction}}%
\to \everydefinemathfraction
+% Sometimes users want control over the distances:
+
+\let\math_fraction_set_distance\relax
+
+\appendtoks
+ \math_fraction_set_distance
+\to \everymathematics
+
+% why only displaystyle .. a bit weak
+
+\unexpanded\def\math_fraction_set_distance_top
+ {\Umathfractionnumup \displaystyle\m_math_fraction_distance_top
+ \relax}
+
+\unexpanded\def\math_fraction_set_distance_bot
+ {\Umathfractiondenomdown\displaystyle\m_math_fraction_distance_bot
+ \relax}
+
+\unexpanded\def\math_fraction_set_distance_all
+ {\Umathfractionnumup \displaystyle\m_math_fraction_distance_top
+ \Umathfractiondenomdown\displaystyle\m_math_fraction_distance_bot
+ \relax}
+
+\appendtoks
+ \ifx\currentmathfraction\empty
+ \edef\m_math_fraction_distance_top{\mathfractionparameter\c!topdistance}%
+ \edef\m_math_fraction_distance_bot{\mathfractionparameter\c!bottomdistance}%
+ \ifx\m_math_fraction_distance_top\empty
+ \ifx\m_math_fraction_distance_bot\empty
+ \let\math_fraction_set_distance\relax
+ \else
+ \let\math_fraction_set_distance\math_fraction_set_distance_bot
+ \fi
+ \else
+ \ifx\m_math_fraction_distance_bot\empty
+ \let\math_fraction_set_distance\math_fraction_set_distance_top
+ \else
+ \let\math_fraction_set_distance\math_fraction_set_distance_all
+ \fi
+ \fi
+ \fi
+\to \everysetupmathfraction
+
+% So far for control.
+
+\installcorenamespace{mathfractionstrut}
+
+\setvalue{\??mathfractionstrut\v!yes}%
+ {\let\m_fractions_strut_top\mathstrut
+ \let\m_fractions_strut_bot\mathstrut}
+
+\setvalue{\??mathfractionstrut\v!math}%
+ {\let\m_fractions_strut_top\mathstrut
+ \let\m_fractions_strut_bot\mathstrut}
+
+\setvalue{\??mathfractionstrut\v!no}%
+ {\let\m_fractions_strut_top\relax
+ \let\m_fractions_strut_bot\relax}
+
+\setvalue{\??mathfractionstrut\v!tight}%
+ {\let\m_fractions_strut_top\mathbotstrut % indeed swapped name
+ \let\m_fractions_strut_bot\mathtopstrut} % indeed swapped name
+
+\let\m_fractions_strut_top\relax
+\let\m_fractions_strut_bot\relax
+
\newdimen\d_math_fraction_margin
\unexpanded\def\math_frac#1%
{\begingroup
\edef\currentmathfraction{#1}%
+ %
\d_math_fraction_margin\mathfractionparameter\c!margin
+ %
\edef\p_math_fractions_color{\mathfractionparameter\c!color}%
+ %
+ \edef\p_math_fractions_strut{\mathfractionparameter\c!strut}%
+ \csname
+ \??mathfractionstrut
+ \ifcsname\??mathfractionstrut\p_math_fractions_strut\endcsname\p_math_fractions_strut\else\v!no\fi
+ \endcsname
+ %
\ifx\p_math_fractions_color\empty
\expandafter\math_frac_normal
\else
@@ -136,18 +221,17 @@
% we use utfchar anyway so we can as well do all at the lua end
\def\math_frac_command
- {\ctxcommand{math_frac(%
- "\mathfractionparameter\c!rule",%
- \number\mathfractionparameter\c!left,%
- \number\mathfractionparameter\c!right,%
- \number\dimexpr\mathfractionparameter\c!rulethickness%
- )}}
-
-% Having a \withmarginornot{#1}{#2} makes not much sense nor do
-% 4 tests or 4 redundant kerns (longer node lists plus possible
-% interference). A split in normal and margin also makes testing
-% easier. When left and right margins are needed we might merge
-% the variants again. After all, these are not real installers.
+ {\clf_mathfraction
+ {\mathfractionparameter\c!rule}%
+ \mathfractionparameter\c!left\space
+ \mathfractionparameter\c!right\space
+ \dimexpr\mathfractionparameter\c!rulethickness\relax
+ \relax}
+
+% Having a \withmarginornot{#1}{#2} makes not much sense nor do 4 tests or 4 redundant
+% kerns (longer node lists plus possible interference). A split in normal and margin
+% also makes testing easier. When left and right margins are needed we might merge the
+% variants again. After all, these are not real installers.
\setvalue{\??mathfractionalternative\v!inner}%
{\ifcase\d_math_fraction_margin
@@ -156,24 +240,6 @@
\expandafter\math_fraction_inner_margin
\fi}
-\def\math_fraction_inner_normal#1#2%
- {\Ustack{%
- {\usemathstyleparameter\mathfractionparameter{#1}}% we should store this one
- \math_frac_command
- {\usemathstyleparameter\mathfractionparameter{#2}}% and reuse it here
- }\endgroup}
-
-\def\math_fraction_inner_margin#1#2%
- {\Ustack{%
- {\kern\d_math_fraction_margin
- \usemathstyleparameter\mathfractionparameter{#1}% we should store this one
- \kern\d_math_fraction_margin}%
- \math_frac_command
- {\kern\d_math_fraction_margin
- \usemathstyleparameter\mathfractionparameter{#2}% and reuse it here
- \kern\d_math_fraction_margin}%
- }\endgroup}
-
\setvalue{\??mathfractionalternative\v!outer}%
{\ifcase\d_math_fraction_margin
\expandafter\math_fraction_outer_normal
@@ -181,21 +247,84 @@
\expandafter\math_fraction_outer_margin
\fi}
+\setvalue{\??mathfractionalternative\v!both}%
+ {\ifcase\d_math_fraction_margin
+ \expandafter\math_fraction_both_normal
+ \else
+ \expandafter\math_fraction_both_margin
+ \fi}
+
+% todo: store first state and reuse second time
+
+\def\math_fraction_inner_normal#1#2%
+ {\Ustack{%
+ {%
+ {\usemathstyleparameter\mathfractionparameter{\m_fractions_strut_top#1}}%
+ \math_frac_command
+ {\usemathstyleparameter\mathfractionparameter{\m_fractions_strut_bot#2}}%
+ }%
+ }\endgroup}
+
\def\math_fraction_outer_normal#1#2%
{\Ustack{%
\usemathstyleparameter\mathfractionparameter
- {{#1}\math_frac_command{#2}}%
+ {%
+ {\m_fractions_strut_top#1}%
+ \math_frac_command
+ {\m_fractions_strut_bot#2}%
+ }%
+ }\endgroup}
+
+\def\math_fraction_both_normal#1#2%
+ {\Ustack{%
+ \usemathstyleparameter\mathfractionparameter
+ {%
+ {\usemathstyleparameter\mathfractionparameter\m_fractions_strut_top#1}%
+ \math_frac_command
+ {\usemathstyleparameter\mathfractionparameter\m_fractions_strut_bot#2}%
+ }%
+ }\endgroup}
+
+\def\math_fraction_inner_margin#1#2%
+ {\Ustack{%
+ {%
+ {\kern\d_math_fraction_margin
+ \usemathstyleparameter\mathfractionparameter{\m_fractions_strut_top#1}%
+ \kern\d_math_fraction_margin}%
+ \math_frac_command
+ {\kern\d_math_fraction_margin
+ \usemathstyleparameter\mathfractionparameter{\m_fractions_strut_bot#2}%
+ \kern\d_math_fraction_margin}%
+ }%
}\endgroup}
\def\math_fraction_outer_margin#1#2%
{\Ustack{%
\usemathstyleparameter\mathfractionparameter
- {{\kern\d_math_fraction_margin#1\kern\d_math_fraction_margin}%
- \math_frac_command
- {\kern\d_math_fraction_margin#2\kern\d_math_fraction_margin}}%
+ {%
+ {\kern\d_math_fraction_margin
+ \m_fractions_strut_top#1%
+ \kern\d_math_fraction_margin}%
+ \math_frac_command
+ {\kern\d_math_fraction_margin
+ \m_fractions_strut_bot#2%
+ \kern\d_math_fraction_margin}%
+ }%
}\endgroup}
-\definemathfraction[frac][\c!mathstyle=]
+\def\math_fraction_both_margin#1#2%
+ {\Ustack{%
+ \usemathstyleparameter\mathfractionparameter
+ {%
+ {\kern\d_math_fraction_margin
+ \usemathstyleparameter\mathfractionparameter\m_fractions_strut_top#1%
+ \kern\d_math_fraction_margin}%
+ \math_frac_command
+ {\kern\d_math_fraction_margin
+ \usemathstyleparameter\mathfractionparameter\m_fractions_strut_bot#2%
+ \kern\d_math_fraction_margin}%
+ }%
+ }\endgroup}
\unexpanded\def\xfrac {\begingroup\let\xfrac\xxfrac\math_frac_alternative\scriptstyle}
\unexpanded\def\xxfrac{\begingroup \math_frac_alternative\scriptscriptstyle}
@@ -206,6 +335,40 @@
{\begingroup
\math_frac_alternative\scriptscriptstyle{#1}{\raise.25\exheight\hbox{$\scriptscriptstyle#2$}}}
+%D Spacing:
+
+\unexpanded\def\nomathfractiongaps {\normalexpanded{\math_no_fraction_gaps \triggermathstyle\mathstyle}} % maybe collect settings
+\unexpanded\def\overlaymathfractiongaps{\normalexpanded{\math_overlay_fraction_gaps\triggermathstyle\mathstyle}} % maybe collect settings
+
+\unexpanded\def\math_no_fraction_gaps#1%
+ {\Umathfractionnumup #1\zeropoint
+ \Umathfractiondenomdown#1\zeropoint}
+
+\unexpanded\def\math_overlay_fraction_gaps#1%
+ {\Umathfractionnumup #1\zeropoint
+ \Umathfractionnumvgap #1\zeropoint
+ %Umathfractionrule #1\zeropoint
+ \Umathfractiondenomvgap#1\zeropoint
+ \Umathfractiondenomdown#1\zeropoint}
+
+\installcorenamespace{mathfractiondistance}
+
+\letvalue{\??mathfractiondistance\v!none }\nomathfractiongaps
+\letvalue{\??mathfractiondistance\v!no }\nomathfractiongaps
+\letvalue{\??mathfractiondistance\v!overlay}\overlaymathfractiongaps
+
+\setupmathfractions
+ [\c!distance=\v!none]
+
+\appendtoks
+ \edef\p_distance{\rootmathfractionparameter\c!distance}%
+ \ifx\p_distance\empty\else
+ \ifcsname\??mathfractiondistance\p_distance\endcsname
+ \csname\??mathfractiondistance\p_distance\endcsname
+ \fi
+ \fi
+\to \everymathematics
+
%D \macros
%D {dfrac, tfrac, frac, dbinom, tbinom, binom}
%D
@@ -232,9 +395,36 @@
% \unexpanded\def\dfrac #1#2{{\displaystyle {{#1}\normalover {#2}}}}
% \unexpanded\def\tfrac #1#2{{\textstyle {{#1}\normalover {#2}}}}
-\definemathfraction[dfrac][\c!alternative=\v!outer,\c!mathstyle=\s!display]
-\definemathfraction[tfrac][\c!alternative=\v!outer,\c!mathstyle=\s!text]
-\definemathfraction[sfrac][\c!alternative=\v!outer,\c!mathstyle=\s!script]
+\definemathfraction[i:frac] [\c!alternative=\v!inner,\c!mathstyle=] % was script and then small but nothing needed
+\definemathfraction[i:tfrac][\c!alternative=\v!inner,\c!mathstyle=\s!text] % was script (before luatex fix)
+\definemathfraction[i:sfrac][\c!alternative=\v!inner,\c!mathstyle=\s!scriptscript]
+\definemathfraction[i:dfrac][\c!alternative=\v!inner,\c!mathstyle=\s!display]
+
+\definemathfraction[d:frac] [\c!alternative=\v!inner,\c!mathstyle=\s!cramped] % was cramped,text
+\definemathfraction[d:tfrac][\c!alternative=\v!both ,\c!mathstyle={\s!cramped,\s!text}] % was cramped,script (before luatex fix)
+\definemathfraction[d:sfrac][\c!alternative=\v!both ,\c!mathstyle={\s!cramped,\s!scriptscript}]
+\definemathfraction[d:dfrac][\c!alternative=\v!inner,\c!mathstyle=\s!display]
+
+%D \unexpanded\def\ShowMathFractions#1#2%
+%D {\dontleavehmode
+%D \begingroup
+%D \showmathstruts
+%D \mathematics{x+\tfrac{#1}{#2}+1+\frac{#1}{#2}+2+\sfrac{#1}{#2}+g}%
+%D \endgroup}
+%D
+%D The default \type {tfrac}, \type {frac} and \type \sfrac} look like this:
+%D
+%D \blank
+%D \ShowMathFractions{a}{a}\par
+%D \ShowMathFractions{1}{x}\par
+%D \ShowMathFractions{a}{b}\par
+%D \ShowMathFractions{1}{b}\par
+%D \blank
+
+\unexpanded\def\frac {\csname\inlineordisplaymath id:frac\endcsname}
+\unexpanded\def\tfrac{\csname\inlineordisplaymath id:tfrac\endcsname}
+\unexpanded\def\sfrac{\csname\inlineordisplaymath id:sfrac\endcsname}
+\unexpanded\def\dfrac{\csname\inlineordisplaymath id:dfrac\endcsname}
% \definemathfraction[ddfrac][\c!mathstyle=\s!display]
% \definemathfraction[ttfrac][\c!mathstyle=\s!text]
@@ -274,7 +464,7 @@
%D \getbuffer
\unexpanded\def\cfrac
- {\doifnextoptionalelse\math_cfrac_yes\math_cfrac_nop}
+ {\doifelsenextoptionalcs\math_cfrac_yes\math_cfrac_nop}
\def\math_cfrac_nop {\math_cfrac_indeed[cc]}
\def\math_cfrac_yes[#1]{\math_cfrac_indeed[#1cc]}
@@ -319,6 +509,11 @@
\unexpanded\def\splitdfrac#1#2%
{{\displaystyle{{ #1\quad\hfill}\normalabove\zeropoint{ \hfill\quad\mathstrut#2}}}}
+%D For testing:
+
+% \unexpanded\def\ShowMathFractions#1#2%
+% {\mathematics{x+\tfrac{#1}{#2}+1+\frac{#1}{#2}+2+\sfrac{#1}{#2}+g}}
+
\protect \endinput
% I have no clue what \mthfrac and \mthsqrt are supposed to do but
diff --git a/tex/context/base/math-ini.lua b/tex/context/base/math-ini.lua
index 6be06e634..e6a35c39e 100644
--- a/tex/context/base/math-ini.lua
+++ b/tex/context/base/math-ini.lua
@@ -18,30 +18,33 @@ if not modules then modules = { } end modules ['math-ini'] = {
local formatters, find = string.formatters, string.find
local utfchar, utfbyte, utflength = utf.char, utf.byte, utf.length
local floor = math.floor
+local toboolean = toboolean
-local context = context
-local commands = commands
+local context = context
+local commands = commands
+local implement = interfaces.implement
-local contextsprint = context.sprint
-local contextfprint = context.fprint -- a bit inefficient
+local context_sprint = context.sprint
+----- context_fprint = context.fprint -- a bit inefficient
+local ctx_doifelsesomething = commands.doifelsesomething
-local trace_defining = false trackers.register("math.defining", function(v) trace_defining = v end)
+local trace_defining = false trackers.register("math.defining", function(v) trace_defining = v end)
-local report_math = logs.reporter("mathematics","initializing")
+local report_math = logs.reporter("mathematics","initializing")
-mathematics = mathematics or { }
-local mathematics = mathematics
+mathematics = mathematics or { }
+local mathematics = mathematics
-mathematics.extrabase = 0xFE000 -- here we push some virtuals
-mathematics.privatebase = 0xFF000 -- here we push the ex
+mathematics.extrabase = 0xFE000 -- here we push some virtuals
+mathematics.privatebase = 0xFF000 -- here we push the ex
-local unsetvalue = attributes.unsetvalue
-local allocate = utilities.storage.allocate
-local chardata = characters.data
+local unsetvalue = attributes.unsetvalue
+local allocate = utilities.storage.allocate
+local chardata = characters.data
-local texsetattribute = tex.setattribute
-local setmathcode = tex.setmathcode
-local setdelcode = tex.setdelcode
+local texsetattribute = tex.setattribute
+local setmathcode = tex.setmathcode
+local setdelcode = tex.setdelcode
local families = allocate {
mr = 0,
@@ -213,28 +216,28 @@ local f_char = formatters[ [[\Umathchardef\%s "%X "%X "%X ]] ]
local setmathsymbol = function(name,class,family,slot) -- hex is nicer for tracing
if class == classes.accent then
- contextsprint(f_accent(name,family,slot))
+ context_sprint(f_accent(name,family,slot))
elseif class == classes.topaccent then
- contextsprint(f_topaccent(name,family,slot))
+ context_sprint(f_topaccent(name,family,slot))
elseif class == classes.botaccent then
- contextsprint(f_botaccent(name,family,slot))
+ context_sprint(f_botaccent(name,family,slot))
elseif class == classes.over then
- contextsprint(f_over(name,family,slot))
+ context_sprint(f_over(name,family,slot))
elseif class == classes.under then
- contextsprint(f_under(name,family,slot))
+ context_sprint(f_under(name,family,slot))
elseif class == open_class or class == close_class or class == middle_class then
setdelcode("global",slot,{family,slot,0,0})
- contextsprint(f_fence(name,class,family,slot))
+ context_sprint(f_fence(name,class,family,slot))
elseif class == classes.delimiter then
setdelcode("global",slot,{family,slot,0,0})
- contextsprint(f_delimiter(name,family,slot))
+ context_sprint(f_delimiter(name,family,slot))
elseif class == classes.radical then
- contextsprint(f_radical(name,family,slot))
+ context_sprint(f_radical(name,family,slot))
elseif class == classes.root then
- contextsprint(f_root(name,family,slot))
+ context_sprint(f_root(name,family,slot))
else
-- beware, open/close and other specials should not end up here
- contextsprint(f_char(name,class,family,slot))
+ context_sprint(f_char(name,class,family,slot))
end
end
@@ -350,10 +353,12 @@ local utf8byte = lpeg.patterns.utf8byte * lpeg.P(-1)
local somechar = { }
table.setmetatableindex(somechar,function(t,k)
- local b = lpegmatch(utf8byte,k)
- local v = b and chardata[b] or false
- t[k] = v
- return v
+ if k then
+ local b = lpegmatch(utf8byte,k)
+ local v = b and chardata[b] or false
+ t[k] = v
+ return v
+ end
end)
local function utfmathclass(chr, default)
@@ -462,7 +467,7 @@ end
local function utfmathfiller(chr, default)
local cd = somechar[chr]
- local cmd = cd and (cd.mathfiller or cd.mathname)
+ local cmd = cd and cd.mathfiller -- or cd.mathname
return cmd or default or ""
end
@@ -470,23 +475,76 @@ mathematics.utfmathclass = utfmathclass
mathematics.utfmathstretch = utfmathstretch
mathematics.utfmathcommand = utfmathcommand
mathematics.utfmathfiller = utfmathfiller
+mathematics.utfmathaccent = utfmathaccent
-- interfaced
-function commands.utfmathclass (...) context(utfmathclass (...)) end
-function commands.utfmathstretch(...) context(utfmathstretch(...)) end
-function commands.utfmathcommand(...) context(utfmathcommand(...)) end
-function commands.utfmathfiller (...) context(utfmathfiller (...)) end
+implement {
+ name = "utfmathclass",
+ actions = { utfmathclass, context },
+ arguments = "string"
+}
-function commands.doifelseutfmathaccent(chr,asked)
- commands.doifelse(utfmathaccent(chr,nil,asked))
-end
+implement {
+ name = "utfmathstretch",
+ actions = { utfmathstretch, context },
+ arguments = "string"
+}
-function commands.utfmathcommandabove(asked) context(utfmathcommand(asked,nil,"topaccent","over" )) end
-function commands.utfmathcommandbelow(asked) context(utfmathcommand(asked,nil,"botaccent","under")) end
+implement {
+ name = "utfmathcommand",
+ actions = { utfmathcommand, context },
+ arguments = "string"
+}
-function commands.doifelseutfmathabove(chr) commands.doifelse(utfmathaccent(chr,nil,"topaccent","over" )) end
-function commands.doifelseutfmathbelow(chr) commands.doifelse(utfmathaccent(chr,nil,"botaccent","under")) end
+implement {
+ name = "utfmathfiller",
+ actions = { utfmathfiller, context },
+ arguments = "string"
+}
+
+implement {
+ name = "utfmathcommandabove",
+ actions = { utfmathcommand, context },
+ arguments = { "string", false, "'topaccent'","'over'" }
+}
+
+implement {
+ name = "utfmathcommandbelow",
+ actions = { utfmathcommand, context },
+ arguments = { "string", false, "'botaccent'","'under'" }
+}
+implement {
+ name = "utfmathcommandfiller",
+ actions = { utfmathfiller, context },
+ arguments = "string"
+}
+
+-- todo: make this a helper:
+
+implement {
+ name = "doifelseutfmathabove",
+ actions = { utfmathaccent, ctx_doifelsesomething },
+ arguments = { "string", false, "'topaccent'", "'over'" }
+}
+
+implement {
+ name = "doifelseutfmathbelow",
+ actions = { utfmathaccent, ctx_doifelsesomething },
+ arguments = { "string", false, "'botaccent'", "'under'" }
+}
+
+implement {
+ name = "doifelseutfmathaccent",
+ actions = { utfmathaccent, ctx_doifelsesomething },
+ arguments = "string",
+}
+
+implement {
+ name = "doifelseutfmathfiller",
+ actions = { utfmathfiller, ctx_doifelsesomething },
+ arguments = "string",
+}
-- helpers
--
@@ -596,9 +654,20 @@ local noffunctions = 1000 -- offset
categories.functions = functions
-function commands.taggedmathfunction(tag,label,apply)
- local delta = apply and 1000 or 0
- if label then
+implement {
+ name = "tagmfunctiontxt",
+ arguments = { "string", "conditional" },
+ actions = function(tag,apply)
+ local delta = apply and 1000 or 0
+ texsetattribute(a_mathcategory,1000 + delta)
+ end
+}
+
+implement {
+ name = "tagmfunctionlab",
+ arguments = { "string", "conditional" },
+ actions = function(tag,apply)
+ local delta = apply and 1000 or 0
local n = functions[tag]
if not n then
noffunctions = noffunctions + 1
@@ -608,18 +677,14 @@ function commands.taggedmathfunction(tag,label,apply)
else
texsetattribute(a_mathcategory,n + delta)
end
- context.mathlabeltext(tag)
- else
- texsetattribute(a_mathcategory,1000 + delta)
- context(tag)
end
-end
+}
--
local list
-function commands.resetmathattributes()
+function mathematics.resetattributes()
if not list then
list = { }
for k, v in next, attributes.numbers do
@@ -632,3 +697,16 @@ function commands.resetmathattributes()
texsetattribute(list[i],unsetvalue)
end
end
+
+implement {
+ name = "resetmathattributes",
+ actions = mathematics.resetattributes
+}
+
+-- weird to do this here but it's a side affect of math anyway
+
+interfaces.implement {
+ name = "enableasciimode",
+ onlyonce = true,
+ actions = resolvers.macros.enablecomment,
+}
diff --git a/tex/context/base/math-ini.mkiv b/tex/context/base/math-ini.mkiv
index bf9f5278c..83c7554eb 100644
--- a/tex/context/base/math-ini.mkiv
+++ b/tex/context/base/math-ini.mkiv
@@ -13,6 +13,18 @@
\writestatus{loading}{ConTeXt Math Macros / Initializations}
+% Todo in luatex maincontrol.w: also accept a number here:
+%
+% case set_math_param_cmd:
+% p = cur_chr;
+% get_token();
+% if (cur_cmd != math_style_cmd) {
+%
+% plus two new math styles: larger/smaller
+%
+% \unexpanded\def\Umathshow#1%
+% {\hbox{\infofont(\string#1:\the#1\textstyle,\the#1\scriptstyle,\the#1\scriptscriptstyle)}}
+
%D This module provides namespaces for math fonts, thereby permitting mixed usage of
%D math fonts. Although not strictly needed, we also provide a family name mapping
%D mechanism as used in the (original) AMS math definition files, but here these
@@ -26,6 +38,15 @@
%D restore a changed mathstyle so best avoid that one. However, there are cases where
%D we really need to use such grouping.
+% Weird, these fail, maybe amp is solved in a later state from char noads (needs a
+% fix in luatex):
+%
+% $\char"26$
+% $\a$
+% $\string&$
+
+% mathop applied to characters centers it vertically
+
\unprotect
%D We move these definitions into the format:
@@ -80,7 +101,35 @@
\setnewconstant\defaultmathfamily \zerocount % 255
-\unexpanded\def\resetmathattributes{\ctxcommand{resetmathattributes()}}
+\unexpanded\def\resetmathattributes{\clf_resetmathattributes}
+
+% handy
+
+\newconditional\indisplaymath
+
+\appendtoks
+ \setfalse\indisplaymath
+\to \everymath
+
+\appendtoks
+ \settrue\indisplaymath
+\to \everydisplay
+
+\def\inlineordisplaymath{\ifconditional\indisplaymath\expandafter\secondoftwoarguments\else\expandafter\firstoftwoarguments\fi}
+
+\unexpanded\def\forcedisplaymath
+ {\ifmmode
+ \displaystyle
+ \settrue\indisplaymath
+ \fi}
+
+\unexpanded\def\startforceddisplaymath
+ {\startimath
+ \displaystyle
+ \settrue\indisplaymath}
+
+\unexpanded\def\stopforceddisplaymath
+ {\stopimath}
% \unexpanded\def\rawmathcharacter#1% slow but only for tracing
% {\begingroup
@@ -94,9 +143,9 @@
\unexpanded\def\rawmathematics#1% slow but only for tracing
{\begingroup
\ifmmode
- \resetmathattributes#1%
+ \clf_resetmathattributes#1%
\else
- \startimath\resetmathattributes#1\stopimath
+ \startimath\clf_resetmathattributes#1\stopimath
\fi
\endgroup}
@@ -117,7 +166,7 @@
\installswitchcommandhandler \??mathematics {mathematics} \??mathematics
\unexpanded\def\startmathematics % no grouping, if ever then also an optional second
- {\doifnextoptionalelse\math_mathematics_start_yes\math_mathematics_start_nop}
+ {\doifelsenextoptionalcs\math_mathematics_start_yes\math_mathematics_start_nop}
\unexpanded\def\math_mathematics_start_yes[#1]%
{\pushmacro\currentmathematics
@@ -135,22 +184,149 @@
\definemathematics[\v!default] % not needed, but nicer when nesting back to normal
-% Normally this is applied to only one character.
+% Now we redefine \type {\mathematics} and \type {\m}:
+
+\unexpanded\def\mathematics
+ {\doifelsenextoptionalcs\math_m_yes\math_m_nop}
+
+\def\math_m_yes
+ {\relax
+ \ifmmode
+ \expandafter\math_m_yes_math
+ \else
+ \expandafter\math_m_yes_text
+ \fi}
+
+\def\math_m_yes_math[#1]#2%
+ {#2}
+
+\def\math_m_yes_text[#1]%
+ {\begingroup
+ \edef\currentmathematics{#1}% check for valid
+ \edef\p_openup{\mathematicsparameter\c!openup}%
+ \ifx\p_openup\v!yes
+ \expandafter\math_m_yes_text_openedup
+ \else
+ \expandafter\math_m_yes_text_normal
+ \fi}
+
+\def\math_m_yes_text_openedup#1%
+ {\setbox\scratchbox\hbox\bgroup
+ \normalstartimath
+ \the\everyswitchmathematics\relax
+ #1%
+ \normalstopimath
+ \egroup
+ \ifdim\ht\scratchbox>\strutht
+ \math_inline_openup_start_yes
+ \else\ifdim\dp\scratchbox>\strutdp
+ \math_inline_openup_start_yes
+ \else
+ \math_inline_openup_start_nop
+ \fi\fi
+ \unhbox\scratchbox % \normalstartimath#1\normalstopimath
+ \math_inline_openup_stop
+ \endgroup}
+
+\def\math_m_yes_text_normal#1%
+ {\normalstartimath
+ \the\everyswitchmathematics\relax
+ #1%
+ \normalstopimath
+ \endgroup}
+
+\def\math_m_nop#1%
+ {\relax
+ \ifmmode
+ #1%
+ \else
+ \normalstartimath
+ #1%
+ \normalstopimath
+ \fi}
+
+\let\m\mathematics
+
+% e.g.: \definemathematics[i:mp][setups=i:tight,openup=yes]
+
+\newmuskip\defaultthickmuskip \defaultthickmuskip 5mu plus 5mu
+\newmuskip\defaultmedmuskip \defaultmedmuskip 4mu plus 2mu minus 4mu
+\newmuskip\defaultthinmuskip \defaultthinmuskip 3mu
+
+\newmuskip\halfthickmuskip \halfthickmuskip 2.5mu plus 2.5mu
+\newmuskip\halfmedmuskip \halfmedmuskip 2.0mu plus 1.0mu minus 2.0mu
+\newmuskip\halfthinmuskip \halfthinmuskip 1.5mu
+
+\newcount \defaultrelpenalty \defaultrelpenalty 500
+\newcount \defaultbinoppenalty \defaultbinoppenalty 700
+
+
+\startsetups math:spacing:default
+ \thickmuskip \defaultthickmuskip
+ \medmuskip \defaultmedmuskip
+ \thinmuskip \defaultthinmuskip
+ \relpenalty \defaultrelpenalty
+ \binoppenalty \defaultbinoppenalty
+\stopsetups
+
+\startsetups math:spacing:half
+ \thickmuskip \halfthickmuskip
+ \medmuskip \halfmedmuskip
+ \thinmuskip \halfthinmuskip
+ \relpenalty \defaultrelpenalty
+ \binoppenalty \defaultbinoppenalty
+\stopsetups
+
+\startsetups math:spacing:tight
+ \ifcase\raggedstatus
+ \thickmuskip \halfthickmuskip
+ \medmuskip \halfmedmuskip
+ \thinmuskip \halfthinmuskip
+ \else
+ \thickmuskip 1\halfthickmuskip
+ \medmuskip 1\halfmedmuskip
+ \thinmuskip 1\halfthinmuskip
+ \fi
+ \relpenalty \defaultrelpenalty
+ \binoppenalty \maxdimen
+\stopsetups
+
+\startsetups math:spacing:fixed
+ \ifcase\raggedstatus
+ \thickmuskip \halfthickmuskip
+ \medmuskip \halfmedmuskip
+ \thinmuskip \halfthinmuskip
+ \else
+ \thickmuskip 1\halfthickmuskip
+ \medmuskip 1\halfmedmuskip
+ \thinmuskip 1\halfthinmuskip
+ \fi
+ \relpenalty \maxdimen
+ \binoppenalty \maxdimen
+\stopsetups
+
+% \dorecurse{80}{test \m[i:tight]{\red \fakeformula} test }
+
+\definemathematics[i:default][\c!setups=math:spacing:equal]
+\definemathematics[i:half] [\c!setups=math:spacing:half]
+\definemathematics[i:tight] [\c!setups=math:spacing:tight]
+\definemathematics[i:fixed] [\c!setups=math:spacing:fixed]
+
+% Normally the next is applied to only one character.
%
% $ABC$ $\cal ABC$ $\mathaltcal ABC$
% todo: only in mmode
-
% these commands are semi-public but should not be used directly (lua names wil change)
-\unexpanded\def\math_set_attribute #1#2{\ifmmode\ctxcommand{setmathattribute("#1","#2")}\fi}
-\unexpanded\def\math_set_alphabet #1{\ifmmode\ctxcommand{setmathalphabet("#1")}\fi}
-\unexpanded\def\math_set_font_style #1{\ifmmode\ctxcommand{setmathstyle("#1")}\fi}
-\unexpanded\def\math_set_font_alternate#1{\ifmmode\ctxcommand{setmathalternate(\number\defaultmathfamily,"#1")}\fi}
+\unexpanded\def\math_set_attribute #1#2{\ifmmode\clf_setmathattribute{#1}{#2}\fi}
+\unexpanded\def\math_set_alphabet #1{\ifmmode\clf_setmathalphabet{#1}\fi}
+\unexpanded\def\math_set_font_style #1{\ifmmode\clf_setmathstyle{#1}\fi}
+\unexpanded\def\math_set_font_alternate#1{\ifmmode\clf_setmathalternate\defaultmathfamily{#1}\fi}
\installcorenamespace{mathstylealternate} % might become a setuphandler
-\unexpanded\def\math_set_font_style_alterternate#1%
+\unexpanded\def\math_set_font_style_alternate#1%
{\ifcsname\??mathstylealternate\fontclass:#1\endcsname
\expandafter\math_set_font_alternate\csname\??mathstylealternate\fontclass:#1\endcsname
\else\ifcsname\??mathstylealternate#1\endcsname
@@ -169,31 +345,31 @@
\unexpanded\def\mathaltcal{\math_set_font_alternate{cal}\cal} % ss01 in xits
-\let\setmathattribute \math_set_attribute
-\let\setmathalphabet \math_set_alphabet
-\let\setmathfontstyle \math_set_font_style
-\let\setmathfontalternate \math_set_font_alternate
-\let\setmathfontstylealterternate\math_set_font_style_alterternate
+\let\setmathattribute \math_set_attribute
+\let\setmathalphabet \math_set_alphabet
+\let\setmathfontstyle \math_set_font_style
+\let\setmathfontalternate \math_set_font_alternate
+\let\setmathfontstylealternate\math_set_font_style_alternate
-\let\mathalternate \math_set_font_alternate % obsolete
+\let\mathalternate \math_set_font_alternate % obsolete
-\unexpanded\def\mathupright {\math_set_attribute\s!regular\s!tf\math_set_font_style_alterternate\s!tf}
-\unexpanded\def\mathdefault {\math_set_attribute\s!regular\s!it\math_set_font_style_alterternate\s!it}
-\unexpanded\def\mathscript {\math_set_alphabet \s!script \math_set_font_style_alterternate\s!script}
-\unexpanded\def\mathfraktur {\math_set_alphabet \s!fraktur \math_set_font_style_alterternate\s!fraktur}
-\unexpanded\def\mathblackboard{\math_set_alphabet \s!blackboard \math_set_font_style_alterternate\s!blackboard}
+\unexpanded\def\mathupright {\math_set_attribute\s!regular\s!tf\math_set_font_style_alternate\s!tf}
+\unexpanded\def\mathdefault {\math_set_attribute\s!regular\s!it\math_set_font_style_alternate\s!it}
+\unexpanded\def\mathscript {\math_set_alphabet \s!script \math_set_font_style_alternate\s!script}
+\unexpanded\def\mathfraktur {\math_set_alphabet \s!fraktur \math_set_font_style_alternate\s!fraktur}
+\unexpanded\def\mathblackboard{\math_set_alphabet \s!blackboard \math_set_font_style_alternate\s!blackboard}
-\unexpanded\def\mathrm {\math_set_attribute\s!rm\s!tf \math_set_font_style_alterternate\s!tf}
-\unexpanded\def\mathss {\math_set_attribute\s!ss\s!tf \math_set_font_style_alterternate\s!tf}
-\unexpanded\def\mathtt {\math_set_attribute\s!tt\s!tf \math_set_font_style_alterternate\s!tf}
+\unexpanded\def\mathrm {\math_set_attribute\s!rm\s!tf \math_set_font_style_alternate\s!tf}
+\unexpanded\def\mathss {\math_set_attribute\s!ss\s!tf \math_set_font_style_alternate\s!tf}
+\unexpanded\def\mathtt {\math_set_attribute\s!tt\s!tf \math_set_font_style_alternate\s!tf}
-\unexpanded\def\mathtf {\math_set_font_style\s!tf \math_set_font_style_alterternate\s!tf}
-\unexpanded\def\mathsl {\math_set_font_style\s!it \math_set_font_style_alterternate\s!it} % no sl
-\unexpanded\def\mathit {\math_set_font_style\s!it \math_set_font_style_alterternate\s!it}
+\unexpanded\def\mathtf {\math_set_font_style\s!tf \math_set_font_style_alternate\s!tf}
+\unexpanded\def\mathsl {\math_set_font_style\s!it \math_set_font_style_alternate\s!it} % no sl
+\unexpanded\def\mathit {\math_set_font_style\s!it \math_set_font_style_alternate\s!it}
-\unexpanded\def\mathbf {\math_set_font_style\s!bf \math_set_font_style_alterternate\s!bf}
-\unexpanded\def\mathbs {\math_set_font_style\s!bi \math_set_font_style_alterternate\s!bi} % no sl
-\unexpanded\def\mathbi {\math_set_font_style\s!bi \math_set_font_style_alterternate\s!bi}
+\unexpanded\def\mathbf {\math_set_font_style\s!bf \math_set_font_style_alternate\s!bf}
+\unexpanded\def\mathbs {\math_set_font_style\s!bi \math_set_font_style_alternate\s!bi} % no sl
+\unexpanded\def\mathbi {\math_set_font_style\s!bi \math_set_font_style_alternate\s!bi}
\let\tfmath\mathtf % maybe a grouped command
\let\slmath\mathsl
@@ -216,12 +392,12 @@
\unexpanded\def\mathfrak#1{{\mathfraktur #1}} % for AMS compatibility
\unexpanded\def\mathbb #1{{\mathblackboard#1}} % for AMS compatibility
-\let\normaltf\tf \unexpanded\def\tf{\ifmmode\mathtf\else\normaltf\fi}
-\let\normalbf\bf \unexpanded\def\bf{\ifmmode\mathbf\else\normalbf\fi}
-\let\normalit\it \unexpanded\def\it{\ifmmode\mathit\else\normalit\fi}
-\let\normalsl\sl \unexpanded\def\sl{\ifmmode\mathsl\else\normalsl\fi}
-\let\normalbi\bi \unexpanded\def\bi{\ifmmode\mathbi\else\normalbi\fi}
-\let\normalbs\bs \unexpanded\def\bs{\ifmmode\mathbs\else\normalbs\fi}
+\ifdefined\normaltf\else\let\normaltf\tf\fi \unexpanded\def\tf{\ifmmode\mathtf\else\normaltf\fi}
+\ifdefined\normalbf\else\let\normalbf\bf\fi \unexpanded\def\bf{\ifmmode\mathbf\else\normalbf\fi}
+\ifdefined\normalit\else\let\normalit\it\fi \unexpanded\def\it{\ifmmode\mathit\else\normalit\fi}
+\ifdefined\normalsl\else\let\normalsl\sl\fi \unexpanded\def\sl{\ifmmode\mathsl\else\normalsl\fi}
+\ifdefined\normalbi\else\let\normalbi\bi\fi \unexpanded\def\bi{\ifmmode\mathbi\else\normalbi\fi}
+\ifdefined\normalbs\else\let\normalbs\bs\fi \unexpanded\def\bs{\ifmmode\mathbs\else\normalbs\fi}
\let\normalrm\rm \unexpanded\def\rm{\ifmmode\mathrm\else\normalrm\fi}
\let\normalss\ss \unexpanded\def\ss{\ifmmode\mathss\else\normalss\fi}
@@ -230,6 +406,12 @@
\ifdefined\mr \else \let\mr\relax \fi
\ifdefined\mb \else \let\mb\relax \fi
+% 1: $\setmathattribute{ss}{bf}3$
+% 2: $\setmathattribute{ss}{bf}\setmathfontstylealternate{bf}3$
+% 3: $\setmathattribute{ss}{bf}\setmathfontstyle{bf}3$
+% 4: $\setmathattribute{ss}{bf}\setmathfontstyle{bf}\setmathfontstylealternate{bf}3$
+% 5: $e=mc^2 \quad \mb e=mc^2$
+
\prependtoks
\mathdefault
\to \everymathematics
@@ -254,7 +436,7 @@
\newcount\c_math_renderings_attribute
\appendtoks
- \c_math_renderings_attribute\ctxcommand{mathrenderset("\mathematicsparameter\c!symbolset")}\relax
+ \c_math_renderings_attribute\clf_mathrenderset{\mathematicsparameter\c!symbolset}\relax
\to \everysetupmathematics % only in mathematics
\appendtoks
@@ -276,24 +458,26 @@
\unexpanded\def\boldsymbol
{\mathortext\mathboldsymbol\bold}
-%D Helpers:
-
-\def\utfmathclass #1{\ctxcommand{utfmathclass (\!!bs#1\!!es)}}
-\def\utfmathstretch#1{\ctxcommand{utfmathstretch(\!!bs#1\!!es)}}
-\def\utfmathcommand#1{\ctxcommand{utfmathcommand(\!!bs#1\!!es)}}
-\def\utfmathfiller #1{\ctxcommand{utfmathfiller (\!!bs#1\!!es)}}
+%D Helpers
-\def\utfmathclassfiltered #1#2{\ctxcommand{utfmathclass (\!!bs#1\!!es,nil,"#2")}}
-\def\utfmathcommandfiltered#1#2{\ctxcommand{utfmathcommand(\!!bs#1\!!es,nil,"#2")}}
+\def\utfmathclass #1{\clf_utfmathclass {#1}}
+\def\utfmathstretch#1{\clf_utfmathstretch{#1}}
+\def\utfmathcommand#1{\clf_utfmathcommand{#1}}
+\def\utfmathfiller #1{\clf_utfmathfiller {#1}}
-\def\utfmathcommandabove#1{\ctxcommand{utfmathcommandabove(\!!bs#1\!!es)}}
-\def\utfmathcommandbelow#1{\ctxcommand{utfmathcommandbelow(\!!bs#1\!!es)}}
+\def\utfmathcommandabove #1{\clf_utfmathcommandabove {#1}}
+\def\utfmathcommandbelow #1{\clf_utfmathcommandbelow {#1}}
+\def\utfmathcommandfiller#1{\clf_utfmathcommandfiller{#1}}
-\unexpanded\def\doifelseutfmathaccent #1{\ctxcommand{doifelseutfmathaccent(\!!bs#1\!!es)}}
-\unexpanded\def\doifelseutfmathaccentfiltered#1#2{\ctxcommand{doifelseutfmathaccent(\!!bs#1\!!es,"#2")}}
+\unexpanded\def\doifelseutfmathaccent#1{\clf_doifelseutfmathaccent{#1}}
+\unexpanded\def\doifelseutfmathabove #1{\clf_doifelseutfmathabove {#1}}
+\unexpanded\def\doifelseutfmathbelow #1{\clf_doifelseutfmathbelow {#1}}
+\unexpanded\def\doifelseutfmathfiller#1{\clf_doifelseutfmathfiller{#1}}
-\unexpanded\def\doifelseutfmathabove #1{\ctxcommand{doifelseutfmathabove(\!!bs#1\!!es)}}
-\unexpanded\def\doifelseutfmathbelow #1{\ctxcommand{doifelseutfmathbelow(\!!bs#1\!!es)}}
+\let\doifutfmathaccentelse \doifelseutfmathaccent
+\let\doifutfmathaboveelse \doifelseutfmathabove
+\let\doifutfmathbelowelse \doifelseutfmathbelow
+\let\doifutfmathfillerelse \doifelseutfmathfiller
%D Not used that much:
@@ -366,10 +550,39 @@
%D Let's define a few comands here:
-\definemathcommand [mathstrut] {\vphantom{(}}
+%definemathcommand [mathstrut] {\vphantom{(}}
%definemathcommand [joinrel] {\mathrel{\mkern-3mu}}
\definemathcommand [joinrel] [rel] {\mkern-3mu}
+\chardef\c_math_strut"28
+
+\unexpanded\def\math_strut_htdp#1%
+ {\s!height\fontcharht#1\c_math_strut
+ \s!depth \fontchardp#1\c_math_strut}
+
+\unexpanded\def\math_strut_normal
+ {\vrule
+ \normalexpanded{\math_strut_htdp{\mathstylefont\normalmathstyle}}%
+ \s!width \zeropoint
+ \relax}
+
+\unexpanded\def\math_strut_visual
+ {\hskip-.01\emwidth
+ \vrule
+ \normalexpanded{\math_strut_htdp{\mathstylefont\normalmathstyle}}%
+ \s!width .02\emwidth
+ \relax
+ \hskip-.01\emwidth}
+
+\unexpanded\def\showmathstruts % let's not overload \math_strut_normal
+ {\let\math_strut\math_strut_visual}
+
+\let\math_strut\math_strut_normal
+
+% \unexpanded\def\mathstrut{\mathcodecommand{nothing}{\math_strut}}
+
+\definemathcommand [mathstrut] {\math_strut}
+
%D We could have a arg variant \unknown\ but not now.
\unexpanded\def\mathopwithlimits#1#2{\mathop{#1{#2}}\limits}
@@ -390,13 +603,14 @@
\unexpanded\def\mathop
{\normalmathop
\bgroup
- \let\rm\mf
+ % no: \let\rm\mf
\let\nexttoken=}
% this one too: \letvalue{\??mathcodecommand op}\mathop ?
\unexpanded\def\normalmbox
- {\normalhbox\bgroup\mf
+ {\normalhbox\bgroup
+ \usemathematicsstyleandcolor\c!textstyle\c!textcolor % new
\dowithnextboxcs\math_mbox_finish\normalhbox}
\def\math_mbox_finish
@@ -421,10 +635,13 @@
\startimath#1\stopimath
\egroup
\ht\scratchbox\strutht
- \dp\scratchbox\strutht
+ \dp\scratchbox\strutdp
\box\scratchbox
\endgroup}
+\unexpanded\def\mtext#1%
+ {\text{\usemathematicsstyleandcolor\c!textstyle\c!textcolor#1}}
+
%D The next hack is needed needed for sine, cosine etc.
\let\mathfunction\firstofoneunexpanded
@@ -438,19 +655,69 @@
\unexpanded\def\math_tags_mn#1{\begingroup\mathupright#1\endgroup}
\unexpanded\def\math_tags_ms#1{\begingroup\mathupright#1\endgroup}
-\unexpanded\def\mfunction #1{{\mathupright\math_tags_function{#1}}}
-\unexpanded\def\mfunctionlabeltext#1{{\mathupright\math_tags_functionlabeltext{#1}}}
-
% Once this is stable we can store the number at the tex end which is
% faster. Functions getnumbers >= 1000.
-\expanded\def\math_tags_mathfunction_indeed #1{\ctxcommand{taggedmathfunction("#1",false,\ifconditional\c_apply_function true\else false\fi)}}
-\expanded\def\math_tags_mathfunctionlabeltext_indeed#1{\ctxcommand{taggedmathfunction("#1",true ,\ifconditional\c_apply_function true\else false\fi)}}
+\setupmathematics
+ [\c!textstyle=, % rm ss etc i.e. known alternatives, otherwise math
+ \c!textcolor=,
+ \c!functionstyle=, % rm ss etc i.e. known alternatives, otherwise math
+ \c!functioncolor=]
+
+% \unexpanded\def\math_mfunction_styled
+% {\edef\m_math_text_choice_face{\textstyleface\normalmathstyle}%
+% \dowithnextbox
+% {\mathop{\box\nextbox}}%
+% \hbox\bgroup
+% \usemathematicsstyleandcolor\c!functionstyle\c!functioncolor
+% \m_math_text_choice_face
+% \let\next}
+
+\unexpanded\def\math_mfunction_styled
+ {\begingroup
+ \usemathematicscolorparameter\c!functioncolor
+ \edef\p_functionstyle{\mathematicsparameter\c!functionstyle}%
+ \ifx\p_functionstyle\empty
+ \expandafter\math_mfunction_styled_none
+ \else\ifcsname\??alternativestyles\p_functionstyle\endcsname
+ \doubleexpandafter\math_mfunction_styled_text
+ \else
+ \doubleexpandafter\math_mfunction_styled_math
+ \fi\fi}
-\expanded\def\math_tags_mo_indeed#1{\begingroup \attribute\mathcategoryattribute\plusone #1\endgroup}
-\expanded\def\math_tags_mi_indeed#1{\begingroup \attribute\mathcategoryattribute\plustwo #1\endgroup}
-\expanded\def\math_tags_mn_indeed#1{\begingroup\mathupright\attribute\mathcategoryattribute\plusthree#1\endgroup}
-\expanded\def\math_tags_ms_indeed#1{\begingroup\mathupright\attribute\mathcategoryattribute\plusfour #1\endgroup}
+\unexpanded\def\math_mfunction_styled_text#1%
+ {\mathoptext{\csname\??alternativestyles\p_functionstyle\endcsname#1}%
+ \endgroup}
+
+\unexpanded\def\math_mfunction_styled_math#1%
+ {\p_functionstyle
+ #1%
+ \endgroup}
+
+\unexpanded\def\math_mfunction_styled_none#1%
+ {\mathupright
+ #1%
+ \endgroup}
+
+\unexpanded\def\mfunction#1%
+ {\begingroup
+ \math_tags_mfunctiontxt{#1}\c_apply_function
+ \math_mfunction_styled{#1}%
+ \endgroup}
+
+\unexpanded\def\mfunctionlabeltext#1%
+ {\begingroup
+ \math_tags_mfunctionlab{#1}\c_apply_function
+ \math_mfunction_styled{\mathlabeltext{#1}}%
+ \endgroup}
+
+\let\math_tags_mfunctiontxt\gobbletwoarguments
+\let\math_tags_mfunctionlab\gobbletwoarguments
+
+\unexpanded\def\math_tags_mo_indeed#1{\begingroup \attribute\mathcategoryattribute\plusone #1\endgroup}
+\unexpanded\def\math_tags_mi_indeed#1{\begingroup \attribute\mathcategoryattribute\plustwo #1\endgroup}
+\unexpanded\def\math_tags_mn_indeed#1{\begingroup\mathupright\attribute\mathcategoryattribute\plusthree#1\endgroup}
+\unexpanded\def\math_tags_ms_indeed#1{\begingroup\mathupright\attribute\mathcategoryattribute\plusfour #1\endgroup} % todo: mathoptext
\newconditional\c_apply_function
@@ -465,12 +732,12 @@
\endgroup}
\appendtoks
- \let\math_tags_function \math_tags_mathfunction_indeed
- \let\math_tags_functionlabeltext\math_tags_mathfunctionlabeltext_indeed
- \let\math_tags_mo \math_tags_mo_indeed
- \let\math_tags_mi \math_tags_mi_indeed
- \let\math_tags_mn \math_tags_mn_indeed
- \let\math_tags_ms \math_tags_ms_indeed
+ \let\math_tags_mfunctiontxt\clf_tagmfunctiontxt
+ \let\math_tags_mfunctionlab\clf_tagmfunctionlab
+ \let\math_tags_mo \math_tags_mo_indeed
+ \let\math_tags_mi \math_tags_mi_indeed
+ \let\math_tags_mn \math_tags_mn_indeed
+ \let\math_tags_ms \math_tags_ms_indeed
\to \everyenableelements
\appendtoks
@@ -484,21 +751,21 @@
% \def\mlimitsfunction #1{\mathlimopcomm{{\mr#1}}
% \def\mnolimitsfunction#1{\mathnolopcomm{{\mr#1}}
-%D Taco posted this solution as response to a mail by Olivier, so let's integrate
-%D it here.
-
-\def\currentmscaledstyle{rm} % will be plugged into the typeface text=ss option
-
-\unexpanded\def\math_function_style_opnolimits #1{\mathop{\mscaledtext{#1}}\nolimits}
-\unexpanded\def\math_function_style_mfunction #1{\mscaledtext{\math_tags_function{#1}}}
-\unexpanded\def\math_function_style_mfunctionlabeltext#1{\mscaledtext{\math_tags_functionlabeltext{#1}}}
-
-\unexpanded\def\setmathfunctionstyle#1% rm ss tt (can be made faster if needed)
- {\doifsomething{#1}
- {\def\currentmscaledstyle{#1}%
- \let\mathopnolimits \math_function_style_opnolimits
- \let\mfunction \math_function_style_mfunction
- \let\mfunctionlabeltext\math_function_style_mfunctionlabeltext}}
+% %D Taco posted this solution as response to a mail by Olivier, so let's integrate
+% %D it here.
+%
+% \def\currentmscaledstyle{rm} % will be plugged into the typeface text=ss option
+%
+% \unexpanded\def\math_function_style_opnolimits #1{\mathop{\mscaledtext{#1}}\nolimits}
+% \unexpanded\def\math_function_style_mfunction #1{\mscaledtext{\math_tags_function{#1}}}
+% \unexpanded\def\math_function_style_mfunctionlabeltext#1{\mscaledtext{\math_tags_functionlabeltext{#1}}}
+%
+% \unexpanded\def\setmathfunctionstyle#1% rm ss tt (can be made faster if needed)
+% {\doifsomething{#1}
+% {\def\currentmscaledstyle{#1}%
+% \let\mathopnolimits \math_function_style_opnolimits
+% \let\mfunction \math_function_style_mfunction
+% \let\mfunctionlabeltext\math_function_style_mfunctionlabeltext}}
\unexpanded\def\mscaledtext#1%
{\mathchoice
@@ -507,6 +774,9 @@
{\hbox{\csname\currentmscaledstyle\endcsname\tfx #1}}
{\hbox{\csname\currentmscaledstyle\endcsname\tfxx#1}}}
+\unexpanded\def\setmathfunctionstyle#1%
+ {\setupmathematics[\c!functionstyle=#1]} % for old times sake
+
%D We can force the way functions are typeset by manipulating the text option:
%D
%D \starttyping
@@ -788,17 +1058,16 @@
% \ifconditional\knuthmode\else\donknuthmode\fi}
\unexpanded\def\enableasciimode
- {\ctxlua{resolvers.macros.enablecomment()}%
- \glet\enableasciimode\relax}
+ {\clf_enableasciimode} % relaxes itself
\unexpanded\def\asciimode
{\catcodetable\txtcatcodes
- \enableasciimode}
+ \clf_enableasciimode}
\unexpanded\def\startasciimode
{\pushcatcodetable
\catcodetable\txtcatcodes
- \enableasciimode}
+ \clf_enableasciimode}
\unexpanded\def\stopasciimode
{\popcatcodetable}
@@ -816,7 +1085,7 @@
\to \everysetupmathematics
\setupmathematics
- [\c!compact=no]
+ [\c!compact=\v!no]
% \enabletrackers[typesetters.directions.math]
@@ -867,8 +1136,8 @@
\newcount\c_math_bidi
-\setvalue{\??mathbidi\v!no }{\ctxcommand{setmathdirection(0)}\c_math_bidi\attributeunsetvalue}
-\setvalue{\??mathbidi\v!yes}{\ctxcommand{setmathdirection(1)}\c_math_bidi\plusone}
+\setvalue{\??mathbidi\v!no }{\clf_setmathdirection\zerocount\relax\c_math_bidi\attributeunsetvalue}
+\setvalue{\??mathbidi\v!yes}{\clf_setmathdirection\plusone \relax\c_math_bidi\plusone}
\appendtoks
\edef\p_bidi{\mathematicsparameter\c!bidi}%
@@ -950,7 +1219,7 @@
\def\math_italics_initialize
{\ifnum\c_math_italics_attribute=\attributeunsetvalue \else
- \ctxcommand{setmathitalics()}% one time
+ \clf_setmathitalics % one time
\global\let\math_italics_initialize\relax
\fi}
@@ -1236,6 +1505,19 @@
\expandafter#3\else
\expandafter#1\fi}
+% \def\textstyleface#1% #1 is number (\normalmathstyle)
+% {\ifcase\numexpr#1\relax
+% \tf \or
+% \tf \or
+% \tf \or
+% \tf \or
+% \tfx \or
+% \tfx \or
+% \tfxx \or
+% \tfxx \else
+% \tf
+% \fi}
+
\unexpanded\def\verbosemathstyle#1% #1 is number (\normalmathstyle)
{{\normalexpanded{\relax\darkgray\ttxx[\number#1:\ifcase\numexpr#1\relax
display\or % 0
@@ -1267,7 +1549,9 @@
%D
%D \typebuffer \getbuffer
-\unexpanded\def\mathstylehbox#1%
+% to be tested: {#1} but it could have side effects
+
+\unexpanded\def\mathstylehbox#1% sensitive for: a \over b => {a\over b} or \frac{a}{b}
{\normalexpanded{\hbox\bgroup
\startimath\triggermathstyle\normalmathstyle}\mathsurround\zeropoint#1\stopimath\egroup}
@@ -1454,6 +1738,16 @@
\crampedscriptstyle \or
\fi}
+\newcount\c_math_saved_style
+
+\unexpanded\def\pushmathstyle % assumes begingroup .. endgroup
+ {\c_math_saved_style\mathstyle}
+
+\unexpanded\def\popmathstyle
+ {\ifnum\mathstyle=\c_math_saved_style\else
+ \triggermathstyle\c_math_saved_style
+ \fi}
+
\installcorenamespace{mathstylecommand}
\installcorenamespace{mathstylecache}
@@ -1789,7 +2083,14 @@
\ifdefined\text\else \let\text\hbox \fi
-\unexpanded\def\mathoptext#1{\mathop{\text{#1}}}
+% \unexpanded\def\mathoptext#1{\mathop{\text{#1}}}
+
+\unexpanded\def\mathoptext
+ {\normalizebodyfontsize\m_math_text_choice_face{\mathstyleface\normalmathstyle}%
+ %\showmathstyle
+ \dowithnextbox
+ {\mathop{\box\nextbox}}%
+ \hbox\bgroup\font_basics_switchtobodyfont\m_math_text_choice_face\let\next}
% new:
@@ -1814,11 +2115,140 @@
% this should be a primitive:
-% \def\mathextensiblecode#1#2%
-% {\cldcontext{mathematics.extensiblecode(\number#1,\number#2)}}
+\def\mathextensiblecode#1#2{\clf_extensiblecode\numexpr#1\relax\numexpr#2\relax}
+\def\mathhorizontalcode#1#2{\clf_horizontalcode\numexpr#1\relax\numexpr#2\relax}
+
+% experimental:
+
+%D \starttyping
+%D \enabletrackers[math.openedup]
+%D
+%D \dorecurse{10}{\dorecurse{#1}{whatever }}
+%D
+%D \startitemize[packed]
+%D \startitem whatever \openedupimath{\frac{1}{2}} whatever
+%D \startitem whatever \openedupimath{\frac{1}{2}} whatever
+%D \startitem whatever \openedupimath{\frac{1}{2}} whatever
+%D \stopitemize
+%D \startitemize[packed,columns]
+%D \startitem whatever \openedupimath{\frac{1}{2}} whatever
+%D \startitem whatever \openedupimath{\frac{1}{2}} whatever
+%D \startitem whatever \openedupimath{\frac{1}{2}} whatever
+%D \startitem whatever \openedupimath{\frac{1}{2}} whatever
+%D \startitem whatever \openedupimath{\frac{1}{2}} whatever
+%D \startitem whatever \openedupimath{\frac{1}{2}} whatever
+%D \stopitemize
+%D
+%D \dorecurse{5}{\dorecurse{#1}{whatever }\openedupimath{\frac{1}{2}} }
+%D
+%D \startitemize[packed,columns]
+%D \startitem whatever \openedupimath{1+2} whatever
+%D \startitem whatever \openedupimath{1+2} whatever
+%D \startitem whatever \openedupimath{\frac{1}{2}} whatever
+%D \startitem whatever \openedupimath{1+2} whatever
+%D \startitem whatever \openedupimath{1+2} whatever
+%D \stopitemize
+%D
+%D \dorecurse{5}{\dorecurse{#1}{whatever }\openedupimath{1+2} }
+%D
+%D \startitemize[packed]
+%D \startitem whatever \openedupimath{\frac{1}{2}} whatever
+%D \startitem whatever \openedupimath{\frac{1}{2}} whatever
+%D \startitem whatever \openedupimath{\frac{1}{2}} whatever
+%D \stopitemize
+%D
+%D \dorecurse{10}{whatever }
+%D \dorecurse {5}{\dorecurse{#1}{whatever }\openedupimath{\frac{1}{2}} }
+%D \dorecurse{10}{whatever }
+%D \stoptyping
+
+\def\m_math_inline_openup_ht{\dimexpr\ifinsidecolumns\strutdp\else\lineheight\fi/\plusfour\relax}
+\def\m_math_inline_openup_dp{\dimexpr\ifinsidecolumns\strutdp\else\lineheight\fi/\plusfour\relax}
+
+% \def\m_math_inline_openup_ht{\dimexpr\lineheight/\ifinsidecolumns\pluseight\else\plusfour\fi\relax}
+% \def\m_math_inline_openup_dp{\dimexpr\lineheight/\ifinsidecolumns\pluseight\else\plusfour\fi\relax}
+
+\installtextracker
+ {math.openedup}
+ {\let\math_inline_openup_start_yes\math_inline_openup_traced_start}
+ {\let\math_inline_openup_start_yes\math_inline_openup_normal_start}
+
+\unexpanded\def\math_inline_openup_normal_start
+ {\scratchheight\dimexpr\ht\scratchbox+\m_math_inline_openup_ht\relax
+ \scratchdepth \dimexpr\dp\scratchbox+\m_math_inline_openup_dp\relax
+ \vrule\s!width\zeropoint\s!height\scratchheight\s!depth\scratchdepth\relax
+ \begingroup
+ \let\math_inline_openup_stop\math_inline_openup_normal_stop}
+
+\unexpanded\def\math_inline_openup_normal_stop
+ {\endgroup
+ \vrule\s!width\zeropoint\s!height\scratchheight\s!depth\scratchdepth\relax}
+
+\unexpanded\def\math_inline_openup_traced_start
+ {\scratchtopoffset \ht\scratchbox
+ \scratchbottomoffset\dp\scratchbox
+ \scratchheight \dimexpr\scratchtopoffset +\m_math_inline_openup_ht\relax
+ \scratchdepth \dimexpr\scratchbottomoffset+\m_math_inline_openup_dp\relax
+ \vrule\s!width\zeropoint\s!height\scratchheight\s!depth\scratchdepth\relax
+ \begingroup
+ \dofastcoloractivation{darkred}%
+ \vrule\s!width\emwidth\s!height\scratchheight\s!depth-\scratchtopoffset\relax
+ \endgroup
+ \kern-\emwidth
+ \begingroup
+ \let\math_inline_openup_stop\math_inline_openup_traced_stop}
+
+\unexpanded\def\math_inline_openup_traced_stop
+ {\endgroup
+ \kern-\emwidth
+ \begingroup
+ \dofastcoloractivation{darkblue}%
+ \vrule\s!width\emwidth\s!height-\scratchbottomoffset\s!depth\scratchdepth\relax
+ \endgroup
+ \vrule\s!width\zeropoint\s!height\scratchheight\s!depth\scratchdepth\relax}
+
+\let\math_inline_openup_start_yes\math_inline_openup_normal_start
+\let\math_inline_openup_stop \relax
+
+\def\math_inline_openup_start_nop
+ {\let\math_inline_openup_stop\relax}
-\def\mathextensiblecode#1#2{\ctxcommand{extensiblecode(\number#1,\number#2)}}
-\def\mathhorizontalcode#1#2{\ctxcommand{horizontalcode(\number#1,\number#2)}}
+\unexpanded\def\openedupimath
+ {\dontleavehmode
+ \begingroup
+ \ifmmode
+ \expandafter\openedupimath_math
+ \else
+ \expandafter\openedupimath_text
+ \fi}
+
+\unexpanded\def\openedupimath_math#1%
+ {\setbox\scratchbox\mathstylehbox{#1}%
+ \ifdim\ht\scratchbox>\strutht
+ \math_inline_openup_start_yes
+ \else\ifdim\dp\scratchbox>\strutdp
+ \math_inline_openup_start_yes
+ \else
+ \math_inline_openup_start_nop
+ \fi\fi
+ #1%
+ \math_inline_openup_stop
+ \endgroup}
+
+\unexpanded\def\openedupimath_text#1%
+ {\setbox\scratchbox\hbox{\startimath#1\stopimath}%
+ \ifdim\ht\scratchbox>\strutht
+ \math_inline_openup_start_yes
+ \else\ifdim\dp\scratchbox>\strutdp
+ \math_inline_openup_start_yes
+ \else
+ \math_inline_openup_start_nop
+ \fi\fi
+ \startimath
+ #1%
+ \stopimath
+ \math_inline_openup_stop
+ \endgroup}
\protect \endinput
diff --git a/tex/context/base/math-int.mkiv b/tex/context/base/math-int.mkiv
index 6b480961b..6b65738ff 100644
--- a/tex/context/base/math-int.mkiv
+++ b/tex/context/base/math-int.mkiv
@@ -13,6 +13,8 @@
\writestatus{loading}{ConTeXt Math Macros / Integrals}
+% todo: int and sum etc can be stackers
+
\unprotect
%D \startbuffer
diff --git a/tex/context/base/math-map.lua b/tex/context/base/math-map.lua
index 8d301ac33..add6afa4c 100644
--- a/tex/context/base/math-map.lua
+++ b/tex/context/base/math-map.lua
@@ -44,8 +44,8 @@ local registerotffeature = otffeatures.register
local setmetatableindex = table.setmetatableindex
-local texgetattribute = tex.getattribute
-local texsetattribute = tex.setattribute
+local texgetattribute = tex.getattribute
+local texsetattribute = tex.setattribute
local trace_greek = false trackers.register("math.greek", function(v) trace_greek = v end)
local report_remapping = logs.reporter("mathematics","remapping")
@@ -53,13 +53,60 @@ local report_remapping = logs.reporter("mathematics","remapping")
mathematics = mathematics or { }
local mathematics = mathematics
+local implement = interfaces.implement
+
-- Unfortunately some alphabets have gaps (thereby troubling all applications that
-- need to deal with math). Somewhat strange considering all those weird symbols that
-- were added afterwards. The following trickery (and data) is only to be used for
-- diagnostics and quick and dirty alphabet tracing (s-mat-10.mkiv) as we deal with
-- it otherwise.
-mathematics.gaps = {
+-- todo: allocate
+
+mathematics.styles = allocate { "regular", "sansserif", "monospaced", "fraktur", "script", "blackboard" }
+mathematics.alternatives = allocate { "normal", "bold", "italic", "bolditalic" }
+mathematics.sets = allocate { "ucletters", "lcletters", "digits", "ucgreek", "lcgreek", "symbols" }
+
+mathematics.charactersets = allocate {
+ ucletters = {
+ 0x00041, 0x00042, 0x00043, 0x00044, 0x00045,
+ 0x00046, 0x00047, 0x00048, 0x00049, 0x0004A,
+ 0x0004B, 0x0004C, 0x0004D, 0x0004E, 0x0004F,
+ 0x00050, 0x00051, 0x00052, 0x00053, 0x00054,
+ 0x00055, 0x00056, 0x00057, 0x00058, 0x00059,
+ 0x0005A,
+ },
+ lcletters = {
+ 0x00061, 0x00062, 0x00063, 0x00064, 0x00065,
+ 0x00066, 0x00067, 0x00068, 0x00069, 0x0006A,
+ 0x0006B, 0x0006C, 0x0006D, 0x0006E, 0x0006F,
+ 0x00070, 0x00071, 0x00072, 0x00073, 0x00074,
+ 0x00075, 0x00076, 0x00077, 0x00078, 0x00079,
+ 0x0007A,
+ },
+ digits = {
+ 0x00030, 0x00031, 0x00032, 0x00033, 0x00034,
+ 0x00035, 0x00036, 0x00037, 0x00038, 0x00039,
+ },
+ ucgreek = {
+ 0x0391, 0x0392, 0x0393, 0x0394, 0x0395,
+ 0x0396, 0x0397, 0x0398, 0x0399, 0x039A,
+ 0x039B, 0x039C, 0x039D, 0x039E, 0x039F,
+ 0x03A0, 0x03A1, 0x03A3, 0x03A4, 0x03A5,
+ 0x03A6, 0x03A7, 0x03A8, 0x03A9
+ },
+ lcgreek = {
+ 0x03B1, 0x03B2, 0x03B3, 0x03B4, 0x03B5,
+ 0x03B6, 0x03B7, 0x03B8, 0x03B9, 0x03BA,
+ 0x03BB, 0x03BC, 0x03BD, 0x03BE, 0x03BF,
+ 0x03C0, 0x03C1, 0x03C2, 0x03C3, 0x03C4,
+ 0x03C5, 0x03C6, 0x03C7, 0x03C8, 0x03C9,
+ 0x03D1, 0x03D5, 0x03D6, 0x03F0, 0x03F1,
+ 0x03F4, 0x03F5
+ },
+}
+
+mathematics.gaps = allocate {
[0x1D455] = 0x0210E, -- ℎ h
[0x1D49D] = 0x0212C, -- ℬ script B
[0x1D4A0] = 0x02130, -- ℰ script E
@@ -112,9 +159,10 @@ registerotffeature {
-- following approach permits easier remapping of a-a, A-Z and 0-9 to
-- fallbacks; symbols is currently mostly greek
-local function todigit(n) local t = { } for i=0, 9 do t[0x00030+i] = n+i end return t end
-local function toupper(n) local t = { } for i=0,25 do t[0x00041+i] = n+i end return t end
-local function tolower(n) local t = { } for i=0,25 do t[0x00061+i] = n+i end return t end
+local function todigit (n) local t = { } for i=0, 9 do t[0x00030+i] = n+i end return t end
+local function toupper (n) local t = { } for i=0,25 do t[0x00041+i] = n+i end return t end
+local function tolower (n) local t = { } for i=0,25 do t[0x00061+i] = n+i end return t end
+local function tovector(t) return t end
local regular_tf = {
digits = todigit(0x00030),
@@ -138,12 +186,12 @@ local regular_tf = {
},
symbols = {
[0x2202]=0x2202, [0x2207]=0x2207,
- [0x0027]=0x2032, -- prime
+ [0x0027]=0x2032, -- prime
},
}
local regular_it = {
- digits = regular_tf.digits,
+ digits = tovector(regular_tf.digits),
ucletters = toupper(0x1D434),
lcletters = { -- H
[0x00061]=0x1D44E, [0x00062]=0x1D44F, [0x00063]=0x1D450, [0x00064]=0x1D451, [0x00065]=0x1D452,
@@ -202,7 +250,7 @@ local regular_bf= {
}
local regular_bi = {
- digits = regular_bf.digits,
+ digits = tovector(regular_bf.digits),
ucletters = toupper(0x1D468),
lcletters = tolower(0x1D482),
ucgreek = {
@@ -238,18 +286,18 @@ local sansserif_tf = {
digits = todigit(0x1D7E2),
ucletters = toupper(0x1D5A0),
lcletters = tolower(0x1D5BA),
- lcgreek = regular_tf.lcgreek,
- ucgreek = regular_tf.ucgreek,
- symbols = regular_tf.symbols,
+ lcgreek = tovector(regular_tf.lcgreek),
+ ucgreek = tovector(regular_tf.ucgreek),
+ symbols = tovector(regular_tf.symbols),
}
local sansserif_it = {
- digits = regular_tf.digits,
+ digits = tovector(regular_tf.digits),
ucletters = toupper(0x1D608),
lcletters = tolower(0x1D622),
- lcgreek = regular_tf.lcgreek,
- ucgreek = regular_tf.ucgreek,
- symbols = regular_tf.symbols,
+ lcgreek = tovector(regular_tf.lcgreek),
+ ucgreek = tovector(regular_tf.ucgreek),
+ symbols = tovector(regular_tf.symbols),
}
local sansserif_bf = {
@@ -279,7 +327,7 @@ local sansserif_bf = {
}
local sansserif_bi = {
- digits = sansserif_bf.digits,
+ digits = tovector(sansserif_bf.digits),
ucletters = toupper(0x1D63C),
lcletters = tolower(0x1D656),
ucgreek = {
@@ -315,16 +363,20 @@ local monospaced_tf = {
digits = todigit(0x1D7F6),
ucletters = toupper(0x1D670),
lcletters = tolower(0x1D68A),
- lcgreek = sansserif_tf.lcgreek,
- ucgreek = sansserif_tf.ucgreek,
- symbols = sansserif_tf.symbols,
+ lcgreek = tovector(sansserif_tf.lcgreek),
+ ucgreek = tovector(sansserif_tf.ucgreek),
+ symbols = tovector(sansserif_tf.symbols),
}
+local monospaced_it = tovector(sansserif_it)
+local monospaced_bf = tovector(sansserif_bf)
+local monospaced_bi = tovector(sansserif_bi)
+
local monospaced = {
tf = monospaced_tf,
- it = sansserif_tf,
- bf = sansserif_tf,
- bi = sansserif_bf,
+ it = monospaced_tf,
+ bf = monospaced_tf,
+ bi = monospaced_bf,
}
local blackboard_tf = {
@@ -362,7 +414,7 @@ local blackboard = {
}
local fraktur_tf= {
- digits = regular_tf.digits,
+ digits = tovector(regular_tf.digits),
ucletters = { -- C H I R Z
[0x00041]=0x1D504, [0x00042]=0x1D505, [0x00043]=0x0212D, [0x00044]=0x1D507, [0x00045]=0x1D508,
[0x00046]=0x1D509, [0x00047]=0x1D50A, [0x00048]=0x0210C, [0x00049]=0x02111, [0x0004A]=0x1D50D,
@@ -372,18 +424,18 @@ local fraktur_tf= {
[0x0005A]=0x02128,
},
lcletters = tolower(0x1D51E),
- lcgreek = regular_tf.lcgreek,
- ucgreek = regular_tf.ucgreek,
- symbols = regular_tf.symbols,
+ lcgreek = tovector(regular_tf.lcgreek),
+ ucgreek = tovector(regular_tf.ucgreek),
+ symbols = tovector(regular_tf.symbols),
}
local fraktur_bf = {
- digits = regular_bf.digits,
+ digits = tovector(regular_bf.digits),
ucletters = toupper(0x1D56C),
lcletters = tolower(0x1D586),
- lcgreek = regular_bf.lcgreek,
- ucgreek = regular_bf.ucgreek,
- symbols = regular_bf.symbols,
+ lcgreek = tovector(regular_bf.lcgreek),
+ ucgreek = tovector(regular_bf.ucgreek),
+ symbols = tovector(regular_bf.symbols),
}
local fraktur = { -- ok
@@ -394,7 +446,7 @@ local fraktur = { -- ok
}
local script_tf = {
- digits = regular_tf.digits,
+ digits = tovector(regular_tf.digits),
ucletters = { -- B E F H I L M R -- P 2118
[0x00041]=0x1D49C, [0x00042]=0x0212C, [0x00043]=0x1D49E, [0x00044]=0x1D49F, [0x00045]=0x02130,
[0x00046]=0x02131, [0x00047]=0x1D4A2, [0x00048]=0x0210B, [0x00049]=0x02110, [0x0004A]=0x1D4A5,
@@ -411,18 +463,18 @@ local script_tf = {
[0x00075]=0x1D4CA, [0x00076]=0x1D4CB, [0x00077]=0x1D4CC, [0x00078]=0x1D4CD, [0x00079]=0x1D4CE,
[0x0007A]=0x1D4CF,
},
- lcgreek = regular_tf.lcgreek,
- ucgreek = regular_tf.ucgreek,
- symbols = regular_tf.symbols,
+ lcgreek = tovector(regular_tf.lcgreek),
+ ucgreek = tovector(regular_tf.ucgreek),
+ symbols = tovector(regular_tf.symbols),
}
local script_bf = {
- digits = regular_bf.digits,
+ digits = tovector(regular_bf.digits),
ucletters = toupper(0x1D4D0),
lcletters = tolower(0x1D4EA),
- lcgreek = regular_bf.lcgreek,
- ucgreek = regular_bf.ucgreek,
- symbols = regular_bf.symbols,
+ lcgreek = tovector(regular_bf.lcgreek),
+ ucgreek = tovector(regular_bf.ucgreek),
+ symbols = tovector(regular_bf.symbols),
}
local script = {
@@ -441,80 +493,83 @@ local alphabets = allocate {
script = script,
}
-mathematics.alphabets = alphabets
+alphabets.tt = tovector(monospaced)
+alphabets.ss = tovector(sansserif)
+alphabets.rm = tovector(regular)
+alphabets.bb = tovector(blackboard)
+alphabets.fr = tovector(fraktur)
+alphabets.sr = tovector(script)
-local boldmap = { }
-
-local function remap(tf,bf)
- for _, alphabet in next, alphabets do
- local tfdata = alphabet[tf]
- local bfdata = alphabet[bf]
- if tfdata then
- for k, tfd in next, tfdata do
- if type(tfd) == "table" then
- local bfd = bfdata[k]
- if bfd then
- for n, u in next, tfd do
- local bn = bfd[n]
- if bn then
- boldmap[u] = bn
- end
- end
- end
- end
- end
- end
- end
-end
+monospaced.normal = tovector(monospaced_tf)
+monospaced.italic = tovector(monospaced_it)
+monospaced.bold = tovector(monospaced_bf)
+monospaced.bolditalic = tovector(monospaced_bi)
+
+sansserif.normal = tovector(sansserif_tf)
+sansserif.italic = tovector(sansserif_it)
+sansserif.bold = tovector(sansserif_bf)
+sansserif.bolditalic = tovector(sansserif_bi)
+
+regular.normal = tovector(regular_tf)
+regular.italic = tovector(regular_it)
+regular.bold = tovector(regular_bf)
+regular.bolditalic = tovector(regular_bi)
-remap("tf","bf")
-remap("it","bi")
+alphabets.serif = tovector(regular)
+alphabets.type = tovector(monospaced)
+alphabets.teletype = tovector(monospaced)
+
+mathematics.alphabets = alphabets
-mathematics.boldmap = boldmap
+local mathremap = allocate { }
+mathematics.mapremap = mathremap
-local mathremap = allocate { }
+local boldmap = allocate { }
+mathematics.boldmap = boldmap
+
+-- all math (a bit of redundancy here)
for alphabet, styles in next, alphabets do -- per 9/6/2011 we also have attr for missing
for style, data in next, styles do
-- let's keep the long names (for tracing)
local n = #mathremap + 1
- data.attribute = n
- data.alphabet = alphabet
- data.style = style
- mathremap[n] = data
+ local d = {
+ attribute = n,
+ alphabet = alphabet,
+ style = style,
+ }
+ styles[style] = d
+ setmetatableindex(d,data) -- we could use a alphadata table
+ mathremap[n] = d
end
end
-mathematics.mapremap = mathremap
-
--- beware, these are shared tables (no problem since they're not
--- in unicode)
-
-alphabets.tt = monospaced
-alphabets.ss = sansserif
-alphabets.rm = regular
-alphabets.bb = blackboard
-alphabets.fr = fraktur
-alphabets.sr = script
-
-alphabets.serif = regular
-alphabets.type = monospaced
-alphabets.teletype = monospaced
-
-regular.normal = regular_tf
-regular.italic = regular_it
-regular.bold = regular_bf
-regular.bolditalic = regular_bi
+-- bold math
+
+local function remapbold(tf,bf)
+ local styles = mathematics.styles
+ local sets = mathematics.sets
+ for i=1,#styles do
+ for j=1,#sets do
+ local one = styles[i]
+ local two = sets[j]
+ local a = alphabets[one]
+ local tf = a[tf][two]
+ local bf = a[bf][two]
+ if tf and bf then
+ for k, v in next, tf do
+ boldmap[v] = bf[k]
+ end
+ end
+ end
+ end
+end
-sansserif.normal = sansserif_tf
-sansserif.italic = sansserif_it
-sansserif.bold = sansserif_bf
-sansserif.bolditalic = sansserif_bi
+remapbold("tf","bf")
+remapbold("it","bi")
-monospaced.normal = monospaced_tf
-monospaced.italic = monospaced_it
-monospaced.bold = monospaced_bf
-monospaced.bolditalic = monospaced_bi
+-- table.save("e:/tmp/a.lua",alphabets)
+-- table.save("e:/tmp/b.lua",boldmap)
function mathematics.tostyle(attribute)
local r = mathremap[attribute]
@@ -564,6 +619,39 @@ function mathematics.syncname(alphabet)
texsetattribute(mathalphabet,data and data.attribute or texattribute[mathalphabet])
end
+implement {
+ name = "setmathattribute",
+ arguments = { "string", "string" },
+ actions = function(alphabet,style)
+ local data = alphabets[alphabet] or regular
+ data = data[style] or data.tf
+ texsetattribute(mathalphabet,data and data.attribute or texattribute[mathalphabet])
+ end
+}
+
+implement {
+ name = "setmathstyle",
+ arguments = "string",
+ actions = function(style)
+ local r = mathremap[texgetattribute(mathalphabet)]
+ local alphabet = r and r.alphabet or "regular"
+ local data = alphabets[alphabet][style]
+ texsetattribute(mathalphabet,data and data.attribute or texattribute[mathalphabet])
+ end
+}
+
+implement {
+ name = "setmathalphabet",
+ arguments = "string",
+ actions = function(alphabet)
+ -- local r = mathremap[mathalphabet]
+ local r = mathremap[texgetattribute(mathalphabet)]
+ local style = r and r.style or "tf"
+ local data = alphabets[alphabet][style]
+ texsetattribute(mathalphabet,data and data.attribute or texattribute[mathalphabet])
+ end
+}
+
local islcgreek = regular_tf.lcgreek
local isucgreek = regular_tf.ucgreek
local issygreek = regular_tf.symbols
@@ -689,9 +777,3 @@ function mathematics.addfallbacks(main)
checkedcopy(characters,regular.bi.ucgreek,regular.it.ucgreek)
checkedcopy(characters,regular.bi.lcgreek,regular.it.lcgreek)
end
-
--- interface
-
-commands.setmathattribute = mathematics.syncboth
-commands.setmathalphabet = mathematics.syncname
-commands.setmathstyle = mathematics.syncstyle
diff --git a/tex/context/base/math-noa.lua b/tex/context/base/math-noa.lua
index f3987c12f..cdbbe36c3 100644
--- a/tex/context/base/math-noa.lua
+++ b/tex/context/base/math-noa.lua
@@ -28,44 +28,67 @@ local otf = fonts.handlers.otf
local otffeatures = fonts.constructors.newfeatures("otf")
local registerotffeature = otffeatures.register
-local trace_remapping = false trackers.register("math.remapping", function(v) trace_remapping = v end)
-local trace_processing = false trackers.register("math.processing", function(v) trace_processing = v end)
-local trace_analyzing = false trackers.register("math.analyzing", function(v) trace_analyzing = v end)
-local trace_normalizing = false trackers.register("math.normalizing", function(v) trace_normalizing = v end)
-local trace_collapsing = false trackers.register("math.collapsing", function(v) trace_collapsing = v end)
-local trace_goodies = false trackers.register("math.goodies", function(v) trace_goodies = v end)
-local trace_variants = false trackers.register("math.variants", function(v) trace_variants = v end)
-local trace_alternates = false trackers.register("math.alternates", function(v) trace_alternates = v end)
-local trace_italics = false trackers.register("math.italics", function(v) trace_italics = v end)
-local trace_families = false trackers.register("math.families", function(v) trace_families = v end)
-
-local check_coverage = true directives.register("math.checkcoverage", function(v) check_coverage = v end)
-
-local report_processing = logs.reporter("mathematics","processing")
-local report_remapping = logs.reporter("mathematics","remapping")
-local report_normalizing = logs.reporter("mathematics","normalizing")
-local report_collapsing = logs.reporter("mathematics","collapsing")
-local report_goodies = logs.reporter("mathematics","goodies")
-local report_variants = logs.reporter("mathematics","variants")
-local report_alternates = logs.reporter("mathematics","alternates")
-local report_italics = logs.reporter("mathematics","italics")
-local report_families = logs.reporter("mathematics","families")
-
-local a_mathrendering = attributes.private("mathrendering")
-local a_exportstatus = attributes.private("exportstatus")
-
-local mlist_to_hlist = node.mlist_to_hlist
-local font_of_family = node.family_font
-local insert_node_after = node.insert_after
-local insert_node_before = node.insert_before
-local free_node = node.free
-local new_node = node.new -- todo: pool: math_noad math_sub
-local copy_node = node.copy
+local privateattribute = attributes.private
+local registertracker = trackers.register
+local registerdirective = directives.register
+local logreporter = logs.reporter
+
+local trace_remapping = false registertracker("math.remapping", function(v) trace_remapping = v end)
+local trace_processing = false registertracker("math.processing", function(v) trace_processing = v end)
+local trace_analyzing = false registertracker("math.analyzing", function(v) trace_analyzing = v end)
+local trace_normalizing = false registertracker("math.normalizing", function(v) trace_normalizing = v end)
+local trace_collapsing = false registertracker("math.collapsing", function(v) trace_collapsing = v end)
+local trace_patching = false registertracker("math.patching", function(v) trace_patching = v end)
+local trace_goodies = false registertracker("math.goodies", function(v) trace_goodies = v end)
+local trace_variants = false registertracker("math.variants", function(v) trace_variants = v end)
+local trace_alternates = false registertracker("math.alternates", function(v) trace_alternates = v end)
+local trace_italics = false registertracker("math.italics", function(v) trace_italics = v end)
+local trace_families = false registertracker("math.families", function(v) trace_families = v end)
+
+local check_coverage = true registerdirective("math.checkcoverage", function(v) check_coverage = v end)
+
+local report_processing = logreporter("mathematics","processing")
+local report_remapping = logreporter("mathematics","remapping")
+local report_normalizing = logreporter("mathematics","normalizing")
+local report_collapsing = logreporter("mathematics","collapsing")
+local report_patching = logreporter("mathematics","patching")
+local report_goodies = logreporter("mathematics","goodies")
+local report_variants = logreporter("mathematics","variants")
+local report_alternates = logreporter("mathematics","alternates")
+local report_italics = logreporter("mathematics","italics")
+local report_families = logreporter("mathematics","families")
+
+local a_mathrendering = privateattribute("mathrendering")
+local a_exportstatus = privateattribute("exportstatus")
+
+local nuts = nodes.nuts
+local nodepool = nuts.pool
+local tonut = nuts.tonut
+local nutstring = nuts.tostring
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+local insert_node_after = nuts.insert_after
+local insert_node_before = nuts.insert_before
+local free_node = nuts.free
+local new_node = nuts.new -- todo: pool: math_noad math_sub
+local copy_node = nuts.copy
+
+local mlist_to_hlist = nodes.mlist_to_hlist
-local new_kern = nodes.pool.kern
-local new_rule = nodes.pool.rule
+local font_of_family = node.family_font
-local topoints = number.points
+local new_kern = nodepool.kern
+local new_rule = nodepool.rule
local fonthashes = fonts.hashes
local fontdata = fonthashes.identifiers
@@ -126,23 +149,23 @@ local function process(start,what,n,parent)
if n then n = n + 1 else n = 0 end
local prev = nil
while start do
- local id = start.id
+ local id = getid(start)
if trace_processing then
if id == math_noad then
- report_processing("%w%S, class %a",n*2,start,noadcodes[start.subtype])
+ report_processing("%w%S, class %a",n*2,nutstring(start),noadcodes[getsubtype(start)])
elseif id == math_char then
- local char = start.char
- local fam = start.fam
+ local char = getchar(start)
+ local fam = getfield(start,"fam")
local font = font_of_family(fam)
- report_processing("%w%S, family %a, font %a, char %a, shape %c",n*2,start,fam,font,char,char)
+ report_processing("%w%S, family %a, font %a, char %a, shape %c",n*2,nutstring(start),fam,font,char,char)
else
- report_processing("%w%S",n*2,start)
+ report_processing("%w%S",n*2,nutstring(start))
end
end
local proc = what[id]
if proc then
-- report_processing("start processing")
- local done, newstart = proc(start,what,n,parent) -- prev is bugged: or start.prev
+ local done, newstart = proc(start,what,n,parent) -- prev is bugged: or getprev(start)
if newstart then
start = newstart
-- report_processing("stop processing (new start)")
@@ -154,60 +177,102 @@ local function process(start,what,n,parent)
elseif id == math_noad then
if prev then
-- we have no proper prev in math nodes yet
- start.prev = prev
+ setfield(start,"prev",prev)
end
- local noad = start.nucleus if noad then process(noad,what,n,start) end -- list
- noad = start.sup if noad then process(noad,what,n,start) end -- list
- noad = start.sub if noad then process(noad,what,n,start) end -- list
+
+ local noad = getfield(start,"nucleus") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"sup") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"sub") if noad then process(noad,what,n,start) end -- list
elseif id == math_box or id == math_sub then
- -- local noad = start.list if noad then process(noad,what,n,start) end -- list
- local noad = start.head if noad then process(noad,what,n,start) end -- list
+ local noad = getfield(start,"list") if noad then process(noad,what,n,start) end -- list (not getlist !)
elseif id == math_fraction then
- local noad = start.num if noad then process(noad,what,n,start) end -- list
- noad = start.denom if noad then process(noad,what,n,start) end -- list
- noad = start.left if noad then process(noad,what,n,start) end -- delimiter
- noad = start.right if noad then process(noad,what,n,start) end -- delimiter
+ local noad = getfield(start,"num") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"denom") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"left") if noad then process(noad,what,n,start) end -- delimiter
+ noad = getfield(start,"right") if noad then process(noad,what,n,start) end -- delimiter
elseif id == math_choice then
- local noad = start.display if noad then process(noad,what,n,start) end -- list
- noad = start.text if noad then process(noad,what,n,start) end -- list
- noad = start.script if noad then process(noad,what,n,start) end -- list
- noad = start.scriptscript if noad then process(noad,what,n,start) end -- list
+ local noad = getfield(start,"display") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"text") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"script") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"scriptscript") if noad then process(noad,what,n,start) end -- list
elseif id == math_fence then
- local noad = start.delim if noad then process(noad,what,n,start) end -- delimiter
+ local noad = getfield(start,"delim") if noad then process(noad,what,n,start) end -- delimiter
elseif id == math_radical then
- local noad = start.nucleus if noad then process(noad,what,n,start) end -- list
- noad = start.sup if noad then process(noad,what,n,start) end -- list
- noad = start.sub if noad then process(noad,what,n,start) end -- list
- noad = start.left if noad then process(noad,what,n,start) end -- delimiter
- noad = start.degree if noad then process(noad,what,n,start) end -- list
+ local noad = getfield(start,"nucleus") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"sup") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"sub") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"left") if noad then process(noad,what,n,start) end -- delimiter
+ noad = getfield(start,"degree") if noad then process(noad,what,n,start) end -- list
elseif id == math_accent then
- local noad = start.nucleus if noad then process(noad,what,n,start) end -- list
- noad = start.sup if noad then process(noad,what,n,start) end -- list
- noad = start.sub if noad then process(noad,what,n,start) end -- list
- noad = start.accent if noad then process(noad,what,n,start) end -- list
- noad = start.bot_accent if noad then process(noad,what,n,start) end -- list
+ local noad = getfield(start,"nucleus") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"sup") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"sub") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"accent") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"bot_accent") if noad then process(noad,what,n,start) end -- list
elseif id == math_style then
-- has a next
else
-- glue, penalty, etc
end
prev = start
- start = start.next
+ start = getnext(start)
end
end
local function processnoads(head,actions,banner)
if trace_processing then
report_processing("start %a",banner)
- process(head,actions)
+ process(tonut(head),actions)
report_processing("stop %a",banner)
else
- process(head,actions)
+ process(tonut(head),actions)
end
end
noads.process = processnoads
+--
+
+local unknowns = { }
+local checked = { } -- simple case
+local tracked = false trackers.register("fonts.missing", function(v) tracked = v end)
+local cached = table.setmetatableindex("table") -- complex case
+
+local function errorchar(font,char)
+ local done = unknowns[char]
+ if done then
+ unknowns[char] = done + 1
+ else
+ unknowns[char] = 1
+ end
+ if tracked then
+ -- slower as we check each font too and we always replace as math has
+ -- more demands than text
+ local fake = cached[font][char]
+ if fake then
+ return fake
+ else
+ local kind, fake = fonts.checkers.placeholder(font,char)
+ if not fake or kind ~= "char" then
+ fake = 0x3F
+ end
+ cached[font][char] = fake
+ return fake
+ end
+ else
+ -- only simple checking, report at the end so one should take
+ -- action anyway ... we can miss a few checks but that is ok
+ -- as there is at least one reported
+ if not checked[char] then
+ if trace_normalizing then
+ report_normalizing("character %C is not available",char)
+ end
+ checked[char] = true
+ end
+ return 0x3F
+ end
+end
+
-- experiment (when not present fall back to fam 0) -- needs documentation
-- 0-2 regular
@@ -218,7 +283,7 @@ noads.process = processnoads
-- might as well do this
local families = { }
-local a_mathfamily = attributes.private("mathfamily")
+local a_mathfamily = privateattribute("mathfamily")
local boldmap = mathematics.boldmap
local familymap = { [0] =
@@ -234,36 +299,36 @@ local familymap = { [0] =
}
families[math_char] = function(pointer)
- if pointer.fam == 0 then
- local a = pointer[a_mathfamily]
+ if getfield(pointer,"fam") == 0 then
+ local a = getattr(pointer,a_mathfamily)
if a and a > 0 then
- pointer[a_mathfamily] = 0
+ setattr(pointer,a_mathfamily,0)
if a > 5 then
- local char = pointer.char
+ local char = getchar(pointer)
local bold = boldmap[char]
local newa = a - 3
if not bold then
if trace_families then
report_families("no bold replacement for %C, family %s with remap %s becomes %s with remap %s",char,a,familymap[a],newa,familymap[newa])
end
- pointer.fam = newa
- elseif not fontcharacters[font_of_family(newa)][bold] then
+ setfield(pointer,"fam",newa)
+ elseif not fontcharacters[font_of_family(newa)][bold] then
if trace_families then
report_families("no bold character for %C, family %s with remap %s becomes %s with remap %s",char,a,familymap[a],newa,familymap[newa])
end
if newa > 3 then
- pointer.fam = newa - 3
+ setfield(pointer,"fam",newa-3)
end
else
- pointer[a_exportstatus] = char
- pointer.char = bold
+ setattr(pointer,a_exportstatus,char)
+ setfield(pointer,"char",bold)
if trace_families then
report_families("replacing %C by bold %C, family %s with remap %s becomes %s with remap %s",char,bold,a,familymap[a],newa,familymap[newa])
end
- pointer.fam = newa
+ setfield(pointer,"fam",newa)
end
else
- local char = pointer.char
+ local char = getchar(pointer)
if not fontcharacters[font_of_family(a)][char] then
if trace_families then
report_families("no bold replacement for %C",char)
@@ -272,7 +337,7 @@ families[math_char] = function(pointer)
if trace_families then
report_families("family of %C becomes %s with remap %s",char,a,familymap[a])
end
- pointer.fam = a
+ setfield(pointer,"fam",a)
end
end
end
@@ -280,31 +345,31 @@ families[math_char] = function(pointer)
end
families[math_delim] = function(pointer)
- if pointer.small_fam == 0 then
- local a = pointer[a_mathfamily]
+ if getfield(pointer,"small_fam") == 0 then
+ local a = getattr(pointer,a_mathfamily)
if a and a > 0 then
- pointer[a_mathfamily] = 0
+ setattr(pointer,a_mathfamily,0)
if a > 5 then
-- no bold delimiters in unicode
a = a - 3
end
- local char = pointer.small_char
+ local char = getfield(pointer,"small_char")
local okay = fontcharacters[font_of_family(a)][char]
if okay then
- pointer.small_fam = a
+ setfield(pointer,"small_fam",a)
elseif a > 2 then
- pointer.small_fam = a - 3
+ setfield(pointer,"small_fam",a-3)
end
- local char = pointer.large_char
+ local char = getfield(pointer,"large_char")
local okay = fontcharacters[font_of_family(a)][char]
if okay then
- pointer.large_fam = a
+ setfield(pointer,"large_fam",a)
elseif a > 2 then
- pointer.large_fam = a - 3
+ setfield(pointer,"large_fam",a-3)
end
else
- pointer.small_fam = 0
- pointer.large_fam = 0
+ setfield(pointer,"small_fam",0)
+ setfield(pointer,"large_fam",0)
end
end
end
@@ -318,8 +383,8 @@ end
-- character remapping
-local a_mathalphabet = attributes.private("mathalphabet")
-local a_mathgreek = attributes.private("mathgreek")
+local a_mathalphabet = privateattribute("mathalphabet")
+local a_mathgreek = privateattribute("mathgreek")
processors.relocate = { }
@@ -332,8 +397,8 @@ local fallbackstyleattr = mathematics.fallbackstyleattr
local setnodecolor = nodes.tracers.colors.set
local function checked(pointer)
- local char = pointer.char
- local fam = pointer.fam
+ local char = getchar(pointer)
+ local fam = getfield(pointer,"fam")
local id = font_of_family(fam)
local tc = fontcharacters[id]
if not tc[char] then
@@ -346,37 +411,37 @@ local function checked(pointer)
if trace_analyzing then
setnodecolor(pointer,"font:isol")
end
- pointer[a_exportstatus] = char -- testcase: exponentiale
- pointer.char = newchar
+ setattr(pointer,a_exportstatus,char) -- testcase: exponentiale
+ setfield(pointer,"char",newchar)
return true
end
end
end
processors.relocate[math_char] = function(pointer)
- local g = pointer[a_mathgreek] or 0
- local a = pointer[a_mathalphabet] or 0
+ local g = getattr(pointer,a_mathgreek) or 0
+ local a = getattr(pointer,a_mathalphabet) or 0
+ local char = getchar(pointer)
+ local fam = getfield(pointer,"fam")
+ local font = font_of_family(fam)
+ local characters = fontcharacters[font]
if a > 0 or g > 0 then
if a > 0 then
- pointer[a_mathgreek] = 0
+ setattr(pointer,a_mathgreek,0)
end
if g > 0 then
- pointer[a_mathalphabet] = 0
+ setattr(pointer,a_mathalphabet,0)
end
- local char = pointer.char
local newchar = remapalphabets(char,a,g)
if newchar then
- local fam = pointer.fam
- local id = font_of_family(fam)
- local characters = fontcharacters[id]
if characters[newchar] then
if trace_remapping then
- report_remap("char",id,char,newchar)
+ report_remap("char",font,char,newchar)
end
if trace_analyzing then
setnodecolor(pointer,"font:isol")
end
- pointer.char = newchar
+ setfield(pointer,"char",newchar)
return true
else
local fallback = fallbackstyleattr(a)
@@ -385,25 +450,28 @@ processors.relocate[math_char] = function(pointer)
if newchar then
if characters[newchar] then
if trace_remapping then
- report_remap("char",id,char,newchar," (fallback remapping used)")
+ report_remap("char",font,char,newchar," (fallback remapping used)")
end
if trace_analyzing then
setnodecolor(pointer,"font:isol")
end
- pointer.char = newchar
+ setfield(pointer,"char",newchar)
return true
elseif trace_remapping then
- report_remap("char",id,char,newchar," fails (no fallback character)")
+ report_remap("char",font,char,newchar," fails (no fallback character)")
end
elseif trace_remapping then
- report_remap("char",id,char,newchar," fails (no fallback remap character)")
+ report_remap("char",font,char,newchar," fails (no fallback remap character)")
end
elseif trace_remapping then
- report_remap("char",id,char,newchar," fails (no fallback style)")
+ report_remap("char",font,char,newchar," fails (no fallback style)")
end
end
end
end
+ if not characters[char] then
+ setfield(pointer,"char",errorchar(font,char))
+ end
if trace_analyzing then
setnodecolor(pointer,"font:medi")
end
@@ -436,19 +504,19 @@ processors.render = { }
local rendersets = mathematics.renderings.numbers or { } -- store
processors.render[math_char] = function(pointer)
- local attr = pointer[a_mathrendering]
+ local attr = getattr(pointer,a_mathrendering)
if attr and attr > 0 then
- local char = pointer.char
+ local char = getchar(pointer)
local renderset = rendersets[attr]
if renderset then
local newchar = renderset[char]
if newchar then
- local fam = pointer.fam
- local id = font_of_family(fam)
- local characters = fontcharacters[id]
+ local fam = getfield(pointer,"fam")
+ local font = font_of_family(fam)
+ local characters = fontcharacters[font]
if characters and characters[newchar] then
- pointer.char = newchar
- pointer[a_exportstatus] = char
+ setfield(pointer,"char",newchar)
+ setattr(pointer,a_exportstatus,char)
end
end
end
@@ -470,24 +538,24 @@ end
-- todo: just replace the character by an ord noad
-- and remove the right delimiter as well
-local mathsize = attributes.private("mathsize")
+local mathsize = privateattribute("mathsize")
local resize = { } processors.resize = resize
resize[math_fence] = function(pointer)
- local subtype = pointer.subtype
+ local subtype = getsubtype(pointer)
if subtype == left_fence_code or subtype == right_fence_code then
- local a = pointer[mathsize]
+ local a = getattr(pointer,mathsize)
if a and a > 0 then
local method, size = div(a,100), a % 100
- pointer[mathsize] = 0
- local delimiter = pointer.delim
- local chr = delimiter.small_char
+ setattr(pointer,mathsize,0)
+ local delimiter = getfield(pointer,"delim")
+ local chr = getfield(delimiter,"small_char")
if chr > 0 then
- local fam = delimiter.small_fam
+ local fam = getfield(delimiter,"small_fam")
local id = font_of_family(fam)
if id > 0 then
- delimiter.small_char = mathematics.big(fontdata[id],chr,size,method)
+ setfield(delimiter,"small_char",mathematics.big(fontdata[id],chr,size,method))
end
end
end
@@ -499,148 +567,23 @@ function handlers.resize(head,style,penalties)
return true
end
-
-local collapse = { } processors.collapse = collapse
-
-local mathpairs = characters.mathpairs
-
-mathpairs[0x2032] = { [0x2032] = 0x2033, [0x2033] = 0x2034, [0x2034] = 0x2057 } -- (prime,prime) (prime,doubleprime) (prime,tripleprime)
-mathpairs[0x2033] = { [0x2032] = 0x2034, [0x2033] = 0x2057 } -- (doubleprime,prime) (doubleprime,doubleprime)
-mathpairs[0x2034] = { [0x2032] = 0x2057 } -- (tripleprime,prime)
-
-mathpairs[0x2035] = { [0x2035] = 0x2036, [0x2036] = 0x2037 } -- (reversedprime,reversedprime) (reversedprime,doublereversedprime)
-mathpairs[0x2036] = { [0x2035] = 0x2037 } -- (doublereversedprime,reversedprime)
-
-mathpairs[0x222B] = { [0x222B] = 0x222C, [0x222C] = 0x222D }
-mathpairs[0x222C] = { [0x222B] = 0x222D }
-
-mathpairs[0x007C] = { [0x007C] = 0x2016, [0x2016] = 0x2980 } -- bar+bar=double bar+double=triple
-mathpairs[0x2016] = { [0x007C] = 0x2980 } -- double+bar=triple
-
-local movesub = {
- -- primes
- [0x2032] = 0xFE932,
- [0x2033] = 0xFE933,
- [0x2034] = 0xFE934,
- [0x2057] = 0xFE957,
- -- reverse primes
- [0x2035] = 0xFE935,
- [0x2036] = 0xFE936,
- [0x2037] = 0xFE937,
-}
-
-local validpair = {
- [noad_rel] = true,
- [noad_ord] = true,
- [noad_opdisplaylimits] = true,
- [noad_oplimits] = true,
- [noad_opnolimits] = true,
-}
-
-local function movesubscript(parent,current_nucleus,current_char)
- local prev = parent.prev
- if prev and prev.id == math_noad then
- if not prev.sup and not prev.sub then
- current_nucleus.char = movesub[current_char or current_nucleus.char]
- -- {f} {'}_n => f_n^'
- local nucleus = parent.nucleus
- local sub = parent.sub
- local sup = parent.sup
- prev.sup = nucleus
- prev.sub = sub
- local dummy = copy_node(nucleus)
- dummy.char = 0
- parent.nucleus = dummy
- parent.sub = nil
- if trace_collapsing then
- report_collapsing("fixing subscript")
- end
- end
- end
-end
-
-local function collapsepair(pointer,what,n,parent,nested) -- todo: switch to turn in on and off
- if parent then
- if validpair[parent.subtype] then
- local current_nucleus = parent.nucleus
- if current_nucleus.id == math_char then
- local current_char = current_nucleus.char
- if not parent.sub and not parent.sup then
- local mathpair = mathpairs[current_char]
- if mathpair then
- local next_noad = parent.next
- if next_noad and next_noad.id == math_noad then
- if validpair[next_noad.subtype] then
- local next_nucleus = next_noad.nucleus
- if next_nucleus.id == math_char then
- local next_char = next_nucleus.char
- local newchar = mathpair[next_char]
- if newchar then
- local fam = current_nucleus.fam
- local id = font_of_family(fam)
- local characters = fontcharacters[id]
- if characters and characters[newchar] then
- if trace_collapsing then
- report_collapsing("%U + %U => %U",current_char,next_char,newchar)
- end
- current_nucleus.char = newchar
- local next_next_noad = next_noad.next
- if next_next_noad then
- parent.next = next_next_noad
- next_next_noad.prev = parent
- else
- parent.next = nil
- end
- parent.sup = next_noad.sup
- parent.sub = next_noad.sub
- next_noad.sup = nil
- next_noad.sub = nil
- free_node(next_noad)
- collapsepair(pointer,what,n,parent,true)
- if not nested and movesub[current_char] then
- movesubscript(parent,current_nucleus)
- end
- end
- end
- end
- end
- end
- elseif not nested and movesub[current_char] then
- movesubscript(parent,current_nucleus,current_char)
- end
- elseif not nested and movesub[current_char] then
- movesubscript(parent,current_nucleus,current_char)
- end
- end
- end
- end
-end
-
-collapse[math_char] = collapsepair
-
-function noads.handlers.collapse(head,style,penalties)
- processnoads(head,collapse,"collapse")
- return true
-end
-
-- normalize scripts
-local unscript = { } noads.processors.unscript = unscript
-
+local unscript = { } noads.processors.unscript = unscript
local superscripts = characters.superscripts
local subscripts = characters.subscripts
-
-local replaced = { }
+local fractions = characters.fractions
+local replaced = { }
local function replace(pointer,what,n,parent)
pointer = parent -- we're following the parent list (chars trigger this)
- local next = pointer.next
+ local next = getnext(pointer)
local start_super, stop_super, start_sub, stop_sub
local mode = "unset"
- while next and next.id == math_noad do
- local nextnucleus = next.nucleus
- if nextnucleus and nextnucleus.id == math_char and not next.sub and not next.sup then
- local char = nextnucleus.char
+ while next and getid(next) == math_noad do
+ local nextnucleus = getfield(next,"nucleus")
+ if nextnucleus and getid(nextnucleus) == math_char and not getfield(next,"sub") and not getfield(next,"sup") then
+ local char = getchar(nextnucleus)
local s = superscripts[char]
if s then
if not start_super then
@@ -650,8 +593,8 @@ local function replace(pointer,what,n,parent)
break
end
stop_super = next
- next = next.next
- nextnucleus.char = s
+ next = getnext(next)
+ setfield(nextnucleus,"char",s)
replaced[char] = (replaced[char] or 0) + 1
if trace_normalizing then
report_normalizing("superscript %C becomes %C",char,s)
@@ -666,8 +609,8 @@ local function replace(pointer,what,n,parent)
break
end
stop_sub = next
- next = next.next
- nextnucleus.char = s
+ next = getnext(next)
+ setfield(nextnucleus,"char",s)
replaced[char] = (replaced[char] or 0) + 1
if trace_normalizing then
report_normalizing("subscript %C becomes %C",char,s)
@@ -682,29 +625,29 @@ local function replace(pointer,what,n,parent)
end
if start_super then
if start_super == stop_super then
- pointer.sup = start_super.nucleus
+ setfield(pointer,"sup",getfield(start_super,"nucleus"))
else
local list = new_node(math_sub) -- todo attr
- list.head = start_super
- pointer.sup = list
+ setfield(list,"list",start_super)
+ setfield(pointer,"sup",list)
end
if mode == "super" then
- pointer.next = stop_super.next
+ setfield(pointer,"next",getnext(stop_super))
end
- stop_super.next = nil
+ setfield(stop_super,"next",nil)
end
if start_sub then
if start_sub == stop_sub then
- pointer.sub = start_sub.nucleus
+ setfield(pointer,"sub",getfield(start_sub,"nucleus"))
else
local list = new_node(math_sub) -- todo attr
- list.head = start_sub
- pointer.sub = list
+ setfield(list,"list",start_sub)
+ setfield(pointer,"sub",list)
end
if mode == "sub" then
- pointer.next = stop_sub.next
+ setfield(pointer,"next",getnext(stop_sub))
end
- stop_sub.next = nil
+ setfield(stop_sub,"next",nil)
end
-- we could return stop
end
@@ -713,18 +656,27 @@ unscript[math_char] = replace -- not noads as we need to recurse
function handlers.unscript(head,style,penalties)
processnoads(head,unscript,"unscript")
+-- processnoads(head,checkers,"checkers")
return true
end
-statistics.register("math script replacements", function()
- if next(replaced) then
+local function collected(list)
+ if list and next(list) then
local n, t = 0, { }
- for k, v in table.sortedpairs(replaced) do
+ for k, v in table.sortedpairs(list) do
n = n + v
t[#t+1] = formatters["%C"](k)
end
return formatters["% t (n=%s)"](t,n)
end
+end
+
+statistics.register("math script replacements", function()
+ return collected(replaced)
+end)
+
+statistics.register("unknown math characters", function()
+ return collected(unknowns)
end)
-- math alternates: (in xits lgf: $ABC$ $\cal ABC$ $\mathalternate{cal}\cal ABC$)
@@ -770,7 +722,7 @@ registerotffeature {
local getalternate = otf.getalternate
-local a_mathalternate = attributes.private("mathalternate")
+local a_mathalternate = privateattribute("mathalternate")
local alternate = { } -- processors.alternate = alternate
@@ -785,20 +737,20 @@ function mathematics.setalternate(fam,tag)
end
alternate[math_char] = function(pointer)
- local a = pointer[a_mathalternate]
+ local a = getattr(pointer,a_mathalternate)
if a and a > 0 then
- pointer[a_mathalternate] = 0
- local tfmdata = fontdata[font_of_family(pointer.fam)] -- we can also have a famdata
+ setattr(pointer,a_mathalternate,0)
+ local tfmdata = fontdata[font_of_family(getfield(pointer,"fam"))] -- we can also have a famdata
local mathalternatesattributes = tfmdata.shared.mathalternatesattributes
if mathalternatesattributes then
local what = mathalternatesattributes[a]
- local alt = getalternate(tfmdata,pointer.char,what.feature,what.value)
+ local alt = getalternate(tfmdata,getchar(pointer),what.feature,what.value)
if alt then
if trace_alternates then
report_alternates("alternate %a, value %a, replacing glyph %U by glyph %U",
- tostring(what.feature),tostring(what.value),pointer.char,alt)
+ tostring(what.feature),tostring(what.value),getchar(pointer),alt)
end
- pointer.char = alt
+ setfield(pointer,"char",alt)
end
end
end
@@ -814,7 +766,12 @@ end
-- = we check for correction first because accessing nodes is slower
-- = the actual glyph is not that important (we can control it with numbers)
-local a_mathitalics = attributes.private("mathitalics")
+-- Italic correction in luatex math is a mess. There are all kind of assumptions based on
+-- old fonts and new font. Eventually there should be a flag that can signal to ignore all
+-- those heuristics. We want to deal with it ourselves also in the perspective of mxed math
+-- and text.
+
+local a_mathitalics = privateattribute("mathitalics")
local italics = { }
local default_factor = 1/20
@@ -882,21 +839,22 @@ local function getcorrection(method,font,char) -- -- or character.italic -- (thi
end
+local setcolor = nodes.tracers.colors.set
+local resetcolor = nodes.tracers.colors.reset
+local italic_kern = new_kern
+local c_positive_d = "trace:db"
+local c_negative_d = "trace:dr"
+
local function insert_kern(current,kern)
local sub = new_node(math_sub) -- todo: pool
local noad = new_node(math_noad) -- todo: pool
- sub.head = kern
- kern.next = noad
- noad.nucleus = current
+ setfield(sub,"list",kern)
+ setfield(kern,"next",noad)
+ setfield(noad,"nucleus",current)
return sub
end
-local setcolor = nodes.tracers.colors.set
-local italic_kern = new_kern
-local c_positive_d = "trace:db"
-local c_negative_d = "trace:dr"
-
-trackers.register("math.italics", function(v)
+registertracker("math.italics.visualize", function(v)
if v then
italic_kern = function(k,font)
local ex = 1.5 * fontexheights[font]
@@ -913,44 +871,46 @@ trackers.register("math.italics", function(v)
end)
italics[math_char] = function(pointer,what,n,parent)
- local method = pointer[a_mathitalics]
+ local method = getattr(pointer,a_mathitalics)
if method and method > 0 then
- local char = pointer.char
- local font = font_of_family(pointer.fam) -- todo: table
+ local char = getchar(pointer)
+ local font = font_of_family(getfield(pointer,"fam")) -- todo: table
local correction, visual = getcorrection(method,font,char)
if correction then
- local pid = parent.id
+ local pid = getid(parent)
local sub, sup
if pid == math_noad then
- sup = parent.sup
- sub = parent.sub
+ sup = getfield(parent,"sup")
+ sub = getfield(parent,"sub")
end
if sup or sub then
- local subtype = parent.subtype
+ local subtype = getsubtype(parent)
if subtype == noad_oplimits then
if sup then
- parent.sup = insert_kern(sup,italic_kern(correction,font))
+ setfield(parent,"sup",insert_kern(sup,italic_kern(correction,font)))
if trace_italics then
report_italics("method %a, adding %p italic correction for upper limit of %C",method,correction,char)
end
end
if sub then
local correction = - correction
- parent.sub = insert_kern(sub,italic_kern(correction,font))
+ setfield(parent,"sub",insert_kern(sub,italic_kern(correction,font)))
if trace_italics then
report_italics("method %a, adding %p italic correction for lower limit of %C",method,correction,char)
end
end
- else
- if sup then
- parent.sup = insert_kern(sup,italic_kern(correction,font))
+ elseif sup then
+ if pointer ~= sub then
+ setfield(parent,"sup",insert_kern(sup,italic_kern(correction,font)))
if trace_italics then
report_italics("method %a, adding %p italic correction before superscript after %C",method,correction,char)
end
+ else
+ -- otherwise we inject twice
end
end
else
- local next_noad = parent.next
+ local next_noad = getnext(parent)
if not next_noad then
if n== 1 then -- only at the outer level .. will become an option (always,endonly,none)
if trace_italics then
@@ -958,12 +918,12 @@ italics[math_char] = function(pointer,what,n,parent)
end
insert_node_after(parent,parent,italic_kern(correction,font))
end
- elseif next_noad.id == math_noad then
- local next_subtype = next_noad.subtype
+ elseif getid(next_noad) == math_noad then
+ local next_subtype = getsubtype(next_noad)
if next_subtype == noad_punct or next_subtype == noad_ord then
- local next_nucleus = next_noad.nucleus
- if next_nucleus.id == math_char then
- local next_char = next_nucleus.char
+ local next_nucleus = getfield(next_noad,"nucleus")
+ if getid(next_nucleus) == math_char then
+ local next_char = getchar(next_nucleus)
local next_data = chardata[next_char]
local visual = next_data.visual
if visual == "it" or visual == "bi" then
@@ -1026,6 +986,147 @@ function mathematics.resetitalics()
texsetattribute(a_mathitalics,unsetvalue)
end
+-- primes and such
+
+local collapse = { } processors.collapse = collapse
+
+local mathpairs = characters.mathpairs
+
+mathpairs[0x2032] = { [0x2032] = 0x2033, [0x2033] = 0x2034, [0x2034] = 0x2057 } -- (prime,prime) (prime,doubleprime) (prime,tripleprime)
+mathpairs[0x2033] = { [0x2032] = 0x2034, [0x2033] = 0x2057 } -- (doubleprime,prime) (doubleprime,doubleprime)
+mathpairs[0x2034] = { [0x2032] = 0x2057 } -- (tripleprime,prime)
+
+mathpairs[0x2035] = { [0x2035] = 0x2036, [0x2036] = 0x2037 } -- (reversedprime,reversedprime) (reversedprime,doublereversedprime)
+mathpairs[0x2036] = { [0x2035] = 0x2037 } -- (doublereversedprime,reversedprime)
+
+mathpairs[0x222B] = { [0x222B] = 0x222C, [0x222C] = 0x222D }
+mathpairs[0x222C] = { [0x222B] = 0x222D }
+
+mathpairs[0x007C] = { [0x007C] = 0x2016, [0x2016] = 0x2980 } -- bar+bar=double bar+double=triple
+mathpairs[0x2016] = { [0x007C] = 0x2980 } -- double+bar=triple
+
+local movesub = {
+ -- primes
+ [0x2032] = 0xFE932,
+ [0x2033] = 0xFE933,
+ [0x2034] = 0xFE934,
+ [0x2057] = 0xFE957,
+ -- reverse primes
+ [0x2035] = 0xFE935,
+ [0x2036] = 0xFE936,
+ [0x2037] = 0xFE937,
+}
+
+local validpair = {
+ [noad_rel] = true,
+ [noad_ord] = true,
+ [noad_opdisplaylimits] = true,
+ [noad_oplimits] = true,
+ [noad_opnolimits] = true,
+}
+
+local function movesubscript(parent,current_nucleus,current_char)
+ local prev = getfield(parent,"prev")
+ if prev and getid(prev) == math_noad then
+ if not getfield(prev,"sup") and not getfield(prev,"sub") then
+ -- {f} {'}_n => f_n^'
+ setfield(current_nucleus,"char",movesub[current_char or getchar(current_nucleus)])
+ local nucleus = getfield(parent,"nucleus")
+ local sub = getfield(parent,"sub")
+ local sup = getfield(parent,"sup")
+ setfield(prev,"sup",nucleus)
+ setfield(prev,"sub",sub)
+ local dummy = copy_node(nucleus)
+ setfield(dummy,"char",0)
+ setfield(parent,"nucleus",dummy)
+ setfield(parent,"sub",nil)
+ if trace_collapsing then
+ report_collapsing("fixing subscript")
+ end
+ elseif not getfield(prev,"sup") then
+ -- {f} {'}_n => f_n^'
+ setfield(current_nucleus,"char",movesub[current_char or getchar(current_nucleus)])
+ local nucleus = getfield(parent,"nucleus")
+ local sup = getfield(parent,"sup")
+ setfield(prev,"sup",nucleus)
+ local dummy = copy_node(nucleus)
+ setfield(dummy,"char",0)
+ setfield(parent,"nucleus",dummy)
+ if trace_collapsing then
+ report_collapsing("fixing subscript")
+ end
+ end
+ end
+end
+
+local function collapsepair(pointer,what,n,parent,nested) -- todo: switch to turn in on and off
+ if parent then
+ if validpair[getsubtype(parent)] then
+ local current_nucleus = getfield(parent,"nucleus")
+ if getid(current_nucleus) == math_char then
+ local current_char = getchar(current_nucleus)
+ if not getfield(parent,"sub") and not getfield(parent,"sup") then
+ local mathpair = mathpairs[current_char]
+ if mathpair then
+ local next_noad = getnext(parent)
+ if next_noad and getid(next_noad) == math_noad then
+ if validpair[getsubtype(next_noad)] then
+ local next_nucleus = getfield(next_noad,"nucleus")
+ local next_char = getchar(next_nucleus)
+ if getid(next_nucleus) == math_char then
+ local newchar = mathpair[next_char]
+ if newchar then
+ local fam = getfield(current_nucleus,"fam")
+ local id = font_of_family(fam)
+ local characters = fontcharacters[id]
+ if characters and characters[newchar] then
+ if trace_collapsing then
+ report_collapsing("%U + %U => %U",current_char,next_char,newchar)
+ end
+ setfield(current_nucleus,"char",newchar)
+ local next_next_noad = getnext(next_noad)
+ if next_next_noad then
+ setfield(parent,"next",next_next_noad)
+ setfield(next_next_noad,"prev",parent)
+ else
+ setfield(parent,"next",nil)
+ end
+ setfield(parent,"sup",getfield(next_noad,"sup"))
+ setfield(parent,"sub",getfield(next_noad,"sub"))
+ setfield(next_noad,"sup",nil)
+ setfield(next_noad,"sub",nil)
+ free_node(next_noad)
+ collapsepair(pointer,what,n,parent,true)
+ -- if not nested and movesub[current_char] then
+ -- movesubscript(parent,current_nucleus,current_char)
+ -- end
+ end
+ elseif not nested and movesub[current_char] then
+ movesubscript(parent,current_nucleus,current_char)
+ end
+ end
+ end
+ elseif not nested and movesub[current_char] then
+ movesubscript(parent,current_nucleus,current_char)
+ end
+ elseif not nested and movesub[current_char] then
+ movesubscript(parent,current_nucleus,current_char)
+ end
+ elseif not nested and movesub[current_char] then
+ movesubscript(parent,current_nucleus,current_char)
+ end
+ end
+ end
+ end
+end
+
+collapse[math_char] = collapsepair
+
+function noads.handlers.collapse(head,style,penalties)
+ processnoads(head,collapse,"collapse")
+ return true
+end
+
-- variants
local variants = { }
@@ -1047,15 +1148,15 @@ local validvariants = { -- fast check on valid
}
variants[math_char] = function(pointer,what,n,parent) -- also set export value
- local char = pointer.char
+ local char = getchar(pointer)
local selector = validvariants[char]
if selector then
- local next = parent.next
- if next and next.id == math_noad then
- local nucleus = next.nucleus
- if nucleus and nucleus.id == math_char and nucleus.char == selector then
+ local next = getnext(parent)
+ if next and getid(next) == math_noad then
+ local nucleus = getfield(next,"nucleus")
+ if nucleus and getid(nucleus) == math_char and getchar(nucleus) == selector then
local variant
- local tfmdata = fontdata[font_of_family(pointer.fam)] -- we can also have a famdata
+ local tfmdata = fontdata[font_of_family(getfield(pointer,"fam"))] -- we can also have a famdata
local mathvariants = tfmdata.resources.variants -- and variantdata
if mathvariants then
mathvariants = mathvariants[selector]
@@ -1064,8 +1165,8 @@ variants[math_char] = function(pointer,what,n,parent) -- also set export value
end
end
if variant then
- pointer.char = variant
- pointer[a_exportstatus] = char -- we don't export the variant as it's visual markup
+ setfield(pointer,"char",variant)
+ setattr(pointer,a_exportstatus,char) -- we don't export the variant as it's visual markup
if trace_variants then
report_variants("variant (%U,%U) replaced by %U",char,selector,variant)
end
@@ -1074,8 +1175,8 @@ variants[math_char] = function(pointer,what,n,parent) -- also set export value
report_variants("no variant (%U,%U)",char,selector)
end
end
- next.prev = pointer
- parent.next = next.next
+ setfield(next,"prev",pointer)
+ setfield(parent,"next",getnext(next))
free_node(next)
end
end
@@ -1108,7 +1209,7 @@ local colors = {
}
classes[math_char] = function(pointer,what,n,parent)
- local color = colors[parent.subtype]
+ local color = colors[getsubtype(parent)]
if color then
setcolor(pointer,color)
else
@@ -1121,7 +1222,7 @@ function handlers.classes(head,style,penalties)
return true
end
-trackers.register("math.classes",function(v) tasks.setaction("math","noads.handlers.classes",v) end)
+registertracker("math.classes",function(v) tasks.setaction("math","noads.handlers.classes",v) end)
-- just for me
@@ -1129,7 +1230,7 @@ function handlers.showtree(head,style,penalties)
inspect(nodes.totree(head))
end
-trackers.register("math.showtree",function(v) tasks.setaction("math","noads.handlers.showtree",v) end)
+registertracker("math.showtree",function(v) tasks.setaction("math","noads.handlers.showtree",v) end)
-- the normal builder
@@ -1184,6 +1285,20 @@ end)
-- interface
-commands.setmathalternate = mathematics.setalternate
-commands.setmathitalics = mathematics.setitalics
-commands.resetmathitalics = mathematics.resetitalics
+local implement = interfaces.implement
+
+implement {
+ name = "setmathalternate",
+ actions = mathematics.setalternate,
+ arguments = { "integer", "string" }
+}
+
+implement {
+ name = "setmathitalics",
+ actions = mathematics.setitalics
+}
+
+implement {
+ name = "resetmathitalics",
+ actions = mathematics.resetitalics
+}
diff --git a/tex/context/base/math-rad.mkvi b/tex/context/base/math-rad.mkvi
index c6053071e..23e056c1f 100644
--- a/tex/context/base/math-rad.mkvi
+++ b/tex/context/base/math-rad.mkvi
@@ -28,13 +28,13 @@
\def\root#1\of{\rootradical{#1}} % #2
-\unexpanded\def\sqrt{\doifnextoptionalelse\rootwithdegree\rootwithoutdegree}
+\unexpanded\def\sqrt{\doifelsenextoptionalcs\rootwithdegree\rootwithoutdegree}
-\def\styledrootradical#1#2% so that \text works ok ... \rootradical behaves somewhat weird
+\unexpanded\def\styledrootradical#1#2% so that \text works ok ... \rootradical behaves somewhat weird
{\normalexpanded{\rootradical{\normalunexpanded{#1}}{\noexpand\triggermathstyle{\normalmathstyle}\normalunexpanded{#2}}}}
-\def\rootwithdegree[#1]{\rootradical{#1}}
-\def\rootwithoutdegree {\rootradical {}}
+\unexpanded\def\rootwithdegree[#1]{\rootradical{#1}}
+\unexpanded\def\rootwithoutdegree {\rootradical {}}
%D Even older stuff:
@@ -62,7 +62,7 @@
\unexpanded\def\math_radical_handle#tag%
{\begingroup
\edef\currentmathradical{#tag}%
- \doifnextoptionalelse\math_radical_degree_yes\math_radical_degree_nop}
+ \doifelsenextoptionalcs\math_radical_degree_yes\math_radical_degree_nop}
\def\math_radical_alternative{\csname\??mathradicalalternative\mathradicalparameter\c!alternative\endcsname}
@@ -74,8 +74,8 @@
\def\math_radical_indeed#body%
{\math_radical_alternative{#body}\endgroup}
-\setvalue{\??mathradicalalternative\v!default}% #1%
- {\rootradical{\currentmathradicaldegree}}
+\setvalue{\??mathradicalalternative\v!default}% #body%
+ {\rootradical{\currentmathradicaldegree}} % {#body}}
\setvalue{\??mathradicalalternative\v!normal}#body%
{\edef\p_color{\mathradicalparameter\c!color}%
@@ -173,11 +173,11 @@
\setbox\nextbox\mathstylehbox{#body}%
% we use the \overlay variables as these are passes anyway and
% it's more efficient than using parameters
- \edef\overlaywidth {\the\wd\nextbox}%
- \edef\overlayheight {\the\ht\nextbox}%
- \edef\overlaydepth {\the\dp\nextbox}%
- \edef\overlayoffset {\the\scratchoffset}%
- \edef\overlaylinewidth{\the\linewidth}%
+ \d_overlay_width \wd\nextbox
+ \d_overlay_height \ht\nextbox
+ \d_overlay_depth \dp\nextbox
+ \d_overlay_offset \scratchoffset
+ \d_overlay_linewidth\linewidth
\edef\overlaylinecolor{\mathradicalparameter\c!color}%
%
\edef\p_mp{\mathradicalparameter\c!mp}%
@@ -233,11 +233,11 @@
{\begingroup
\scratchoffset\mathornamentparameter\c!mpoffset
\setbox\nextbox\mathstylehbox{#body}%
- \edef\overlaywidth {\the\wd\nextbox}%
- \edef\overlayheight {\the\ht\nextbox}%
- \edef\overlaydepth {\the\dp\nextbox}%
- \edef\overlayoffset {\the\scratchoffset}%
- \edef\overlaylinewidth{\the\linewidth}%
+ \d_overlay_width \wd\nextbox
+ \d_overlay_height \ht\nextbox
+ \d_overlay_depth \dp\nextbox
+ \d_overlay_offset \scratchoffset
+ \d_overlay_linewidth\linewidth
\edef\overlaylinecolor{\mathornamentparameter\c!color}%
\edef\p_mp{\mathornamentparameter\c!mp}%
% thw width of the graphic determines the width of the final result
diff --git a/tex/context/base/math-ren.lua b/tex/context/base/math-ren.lua
index 5c4c13369..4628ffe55 100644
--- a/tex/context/base/math-ren.lua
+++ b/tex/context/base/math-ren.lua
@@ -60,6 +60,8 @@ end
mathematics.renderset = renderset
-function commands.mathrenderset(list)
- context(renderset(list))
-end
+interfaces.implement {
+ name = "mathrenderset",
+ actions = { renderset, context },
+ arguments = "string",
+}
diff --git a/tex/context/base/math-stc.mkvi b/tex/context/base/math-stc.mkvi
index 76a07db5c..a879d157f 100644
--- a/tex/context/base/math-stc.mkvi
+++ b/tex/context/base/math-stc.mkvi
@@ -16,6 +16,12 @@
\unprotect
+%D WARNING: If the code here changes, the export needs to be checked! Stackers are rather
+%D special because the order in mathml matters, so we flush in [base under over] order. We
+%D also do some analysis at the \TEX\ end (passing the right variant). It's easy in the
+%D export to deal with it but in the pdf stream less trivial as we don't actually analyze
+%D there.
+
%D At some point the \MKII\ arrow mechanism has been converted to \MKIV, but we kept
%D most of the logic. We now have a more generic variant dealing with extensibles.
%D There are a few demands than we need to meet:
@@ -78,7 +84,7 @@
{\mathstylehbox{\usemathstackerscolorparameter\c!color
\Umathaccent\fam\zerocount\scratchunicode{\hskip\hsize}}}
-% these delimiters are a unuseable as theu don't center for small arguments:
+% these delimiters are a unuseable as they don't center for small arguments:
%
% $\Umathaccent 0 0 "2190{x}$ \par $\Umathaccent 0 0 "27F8{x}$\par
% $\Udelimiterunder 0 "2190{x}$ \par $\Udelimiterunder 0 "27F8{x}$\par
@@ -121,6 +127,18 @@
\def\math_stackers_skip_indeed#amount%
{\filledhboxk{\unsetteststrut\strut\hskip#amount}} % \dontshowstruts
+\let\math_stackers_start_tagged_mid\relax
+\let\math_stackers_start_tagged_top\relax
+\let\math_stackers_start_tagged_bot\relax
+\let\math_stackers_stop_tagged \relax
+
+\appendtoks
+ \def\math_stackers_start_tagged_mid{\dostarttagged\t!mathstackermid\empty\hbox\bgroup}%
+ \def\math_stackers_start_tagged_top{\dostarttagged\t!mathstackertop\empty\hbox\bgroup}%
+ \def\math_stackers_start_tagged_bot{\dostarttagged\t!mathstackerbot\empty\hbox\bgroup}%
+ \def\math_stackers_stop_tagged {\egroup\dostoptagged}%
+\to \everysetuptagging
+
%D We define a full featured command handler.
\installcorenamespace {mathstackers}
@@ -139,9 +157,10 @@
\c!mpoffset=.25\exheight,
\c!voffset=.25\exheight,
\c!hoffset=.5\emwidth,
+ \c!distance=\mathstackersparameter\c!voffset, % distance between symbol and base (can be different from voffset)
\c!minheight=\exheight,
\c!mindepth=\zeropoint,
- \c!minwidth=\emwidth,
+ \c!minwidth=.5\emwidth,
\c!order=\v!normal,
\c!strut=,
\c!color=, % todo: when I need it
@@ -203,11 +222,11 @@
\setvalue{\??mathstackersalternative\v!mp}%
{\hbox\bgroup % todo: add code key + tag
- \edef\overlaywidth {\the\scratchwidth}%
- \edef\overlayheight {\the\dimexpr\mathstackersparameter\c!mpheight}%
- \edef\overlaydepth {\the\dimexpr\mathstackersparameter\c!mpdepth}%
- \edef\overlayoffset {\the\dimexpr\mathstackersparameter\c!mpoffset}%
- \edef\overlaylinewidth{\the\linewidth}%
+ \d_overlay_width \scratchwidth
+ \d_overlay_height \dimexpr\mathstackersparameter\c!mpheight
+ \d_overlay_depth \dimexpr\mathstackersparameter\c!mpdepth
+ \d_overlay_offset \dimexpr\mathstackersparameter\c!mpoffset
+ \d_overlay_linewidth\linewidth
\edef\overlaylinecolor{\mathstackersparameter\c!color}%
\edef\p_mp{\mathstackersparameter\c!mp}%
\uniqueMPgraphic{\p_mp}%
@@ -261,9 +280,11 @@
\fi}
\unexpanded\def\math_stackers_triplet#method#category#codepoint#toptext#bottomtext%
+ %{\math_stackers_start_group{#category}%
{\begingroup
\edef\currentmathstackers{#category}%
\mathstackersparameter\c!left\relax
+ \dostarttagged\t!mathstacker\currentmathstackers
\ifmmode\math_class_by_parameter\mathstackersparameter\else\dontleavehmode\fi
{\edef\p_offset {\mathstackersparameter\c!offset}%
\edef\p_location {\mathstackersparameter\c!location}%
@@ -311,7 +332,11 @@
\fi
\scratchwidth\wd
\ifdim\wd\scratchboxone>\wd\scratchboxtwo
- \scratchboxone
+ \ifdim\wd\scratchboxone>\wd\scratchboxthree
+ \scratchboxone
+ \else
+ \scratchboxthree
+ \fi
\else\ifdim\wd\scratchboxtwo>\wd\scratchboxthree
\scratchboxtwo
\else
@@ -327,7 +352,9 @@
\advance\scratchwidth2\scratchhoffset
%
\ifcase#method\relax
+ \dostarttagged\t!mathstackermid\empty
\setbox\scratchboxthree\csname\??mathstackersalternative\p_alternative\endcsname
+ \dostoptagged
\fi
%
\ifdim\wd\scratchboxone<\scratchwidth
@@ -370,30 +397,54 @@
\fi
%
\math_stackers_normalize_three
- %
- \math_stackers_middle\bgroup
- \box\scratchboxthree
- \egroup
- %
- \ifdim\htdp\scratchboxone>\zeropoint
- \scratchoffset\dimexpr\scratchvoffset
- \kern-\scratchwidth
- \math_stackers_top\bgroup
- \raise\dimexpr\dp\scratchboxone+\scratchheight+\scratchoffset+\scratchtopoffset\relax
- \box\scratchboxone
+ % analysis
+ \ifdim\htdp\scratchboxtwo>\zeropoint
+ \ifdim\htdp\scratchboxone>\zeropoint
+ \dosettagproperty\s!subtype\t!munderover
+ \else
+ \dosettagproperty\s!subtype\t!munder
+ \fi
+ \else
+ \ifdim\htdp\scratchboxone>\zeropoint
+ \dosettagproperty\s!subtype\t!mover
+ \else
+ % brrr
+ \fi
+ \fi
+ % base
+ \math_stackers_start_tagged_mid
+ \math_stackers_middle\bgroup
+ \box\scratchboxthree
\egroup
+ \math_stackers_stop_tagged
+ % under
+ \ifdim\htdp\scratchboxtwo>\zeropoint
+ \math_stackers_start_tagged_bot
+ \scratchoffset\scratchvoffset
+ \kern-\scratchwidth
+ \math_stackers_bottom\bgroup
+ \lower\dimexpr\ht\scratchboxtwo+\scratchdepth+\scratchoffset+\scratchbottomoffset\relax
+ \box\scratchboxtwo
+ \egroup
+ \math_stackers_stop_tagged
+ \fi
+ % over
+ \ifdim\htdp\scratchboxone>\zeropoint
+ \math_stackers_start_tagged_top
+ \scratchoffset\scratchvoffset
+ \kern-\scratchwidth
+ \math_stackers_top\bgroup
+ \raise\dimexpr\dp\scratchboxone+\scratchheight+\scratchoffset+\scratchtopoffset\relax
+ \box\scratchboxone
+ \egroup
+ \math_stackers_stop_tagged
\fi
%
- \ifdim\htdp\scratchboxtwo>\zeropoint
- \scratchoffset\dimexpr\scratchvoffset
- \kern-\scratchwidth
- \math_stackers_bottom\bgroup
- \lower\dimexpr\ht\scratchboxtwo+\scratchdepth+\scratchoffset+\scratchbottomoffset\relax
- \box\scratchboxtwo
- \egroup
- \fi}%
+ }%
+ \dostoptagged
\mathstackersparameter\c!right\relax
\endgroup}
+ %\math_stackers_start_group}
\unexpanded\def\definemathextensible
{\dotripleempty\math_stackers_define_normal}
@@ -449,13 +500,25 @@
\def\math_class_by_parameter_indeed#1%
{\csname\??mathclasses\ifcsname\??mathclasses#1\endcsname#1\fi\endcsname}
-\unexpanded\def\math_stackers_make_double#top#bottom#category#codepoint#codeextra#text%
+% 1 0 name n 0 | 0 1 name n 0 | 1 1 name n n
+
+\unexpanded\def\math_stackers_start_group#category%
{\begingroup
\edef\currentmathstackers{#category}%
+ \edef\p_limits{\mathstackersparameter\c!mathlimits}%
+ \ifx\p_limits\v!yes
+ \def\math_stackers_stop_group{\egroup\endgroup\limits}%
+ \mathop\bgroup
+ \else
+ \let\math_stackers_stop_group\endgroup
+ \fi}
+
+\unexpanded\def\math_stackers_make_double#top#bottom#category#codepoint#codeextra#text%
+ {\math_stackers_start_group{#category}%
\mathstackersparameter\c!left\relax
+ \dostarttagged\t!mathstacker\currentmathstackers
\ifmmode\math_class_by_parameter\mathstackersparameter\else\dontleavehmode\fi
- {\edef\currentmathstackers{#category}%
- \edef\m_math_stackers_text_middle {#text}%
+ {\edef\m_math_stackers_text_middle {#text}%
%
\edef\p_offset {\mathstackersparameter\c!offset}%
\edef\p_location {\mathstackersparameter\c!location}%
@@ -467,7 +530,7 @@
%
\math_stackers_check_unicode{#codepoint}%
%
- \ifx\currentmathtext\empty
+ \ifx\math_stackers_middle\empty
\setbox\scratchboxthree\emptyhbox
\else
\setmathtextbox\scratchboxthree\hbox{\math_stackers_middletext}%
@@ -480,44 +543,74 @@
\fi
\advance\scratchwidth2\scratchhoffset
%
- \setbox\scratchboxtwo \csname\??mathstackersalternative\p_alternative\endcsname
+ \setbox\scratchboxtwo\csname\??mathstackersalternative\p_alternative\endcsname
\setbox\scratchboxthree\hbox to \scratchwidth{\hss\box\scratchboxthree\hss}%
%
- \math_stackers_normalize_three
+ \scratchunicode#codeextra\relax
+ \ifcase\scratchunicode\else
+ \setbox\scratchboxone\csname\??mathstackersalternative\p_alternative\endcsname
+ \fi
%
- \math_stackers_middle\bgroup
- \box\scratchboxthree
- \egroup
+ \math_stackers_normalize_three
+ % analysis
+ \ifcase#bottom\relax
+ \ifcase#top\relax
+ \dosettagproperty\s!subtype\t!munderover
+ \else
+ \dosettagproperty\s!subtype\t!mover
+ \fi
+ \else
+ \ifcase#top\relax
+ \dosettagproperty\s!subtype\t!munder
+ \else
+ % brrr
+ \fi
+ \fi
+ % base
+ \math_stackers_start_tagged_mid
+ \math_stackers_middle\bgroup
+ \box\scratchboxthree
+ \egroup
+ \math_stackers_stop_tagged
%
\ifdim\htdp\scratchboxtwo>\zeropoint
- \kern-\scratchwidth
- \ifcase#top\else
- \math_stackers_top\bgroup
- % \raise\dimexpr\scratchheight+\scratchtopoffset\relax
- \raise\dimexpr\scratchheight+\mathstackersparameter\c!voffset\relax
- \box\scratchboxtwo
- \egroup
- \fi
- \scratchunicode#codeextra\relax
- \ifcase\scratchunicode\else
+ \ifcase#bottom\else
\kern-\scratchwidth
- \setbox\scratchboxtwo\csname\??mathstackersalternative\p_alternative\endcsname
+ % under
+ \math_stackers_start_tagged_bot
+ \math_stackers_bottom\bgroup
+ \lower\dimexpr
+ \scratchdepth
+ +\ht\scratchboxtwo
+ +\mathstackersparameter\c!distance % was \c!voffset
+ \relax
+ \ifcase#top\relax
+ \box\scratchboxtwo
+ \else
+ \box\scratchboxone
+ \fi
+ \egroup
+ \math_stackers_stop_tagged
\fi
- \ifcase#bottom\else
- \math_stackers_bottom\bgroup
- % \lower\dimexpr\scratchdepth+\ht\scratchboxtwo+\scratchbottomoffset\relax
- \lower\dimexpr\scratchdepth+\ht\scratchboxtwo+\mathstackersparameter\c!voffset\relax
- \box\scratchboxtwo
- \egroup
+ \ifcase#top\else
+ \kern-\scratchwidth
+ % over
+ \math_stackers_start_tagged_top
+ \math_stackers_top\bgroup
+ \raise\dimexpr
+ \scratchheight
+ +\dp\scratchboxtwo % new
+ +\mathstackersparameter\c!distance % was \c!voffset
+ \relax
+ \box\scratchboxtwo
+ \egroup
+ \math_stackers_stop_tagged
\fi
+ %
\fi}%
+ \dostoptagged
\mathstackersparameter\c!right\relax
- \edef\p_limits{\mathstackersparameter\c!mathlimits}%
- \ifx\p_limits\v!yes
- \expandafter\endgroup\expandafter\limits
- \else
- \expandafter\endgroup
- \fi}
+ \math_stackers_stop_group}
\unexpanded\def\definemathoverextensible {\dotripleempty \math_extensibles_define_over }
\unexpanded\def\definemathunderextensible {\dotripleempty \math_extensibles_define_under}
@@ -551,14 +644,173 @@
\def\math_stackers_handle_over[#category]%
{\math_stackers_direct_double\plusone\zerocount{\iffirstargument#category\else\v!top \fi}} % will be defined later on
-\def\math_stackers_handle_under[#category]#codepoint#bottomtext%
+\def\math_stackers_handle_under[#category]%
{\math_stackers_direct_double\zerocount\plusone{\iffirstargument#category\else\v!bottom\fi}} % will be defined later on
-\def\math_stackers_handle_double[#category]#codepoint#bottomtext%
+\def\math_stackers_handle_double[#category]%
{\math_stackers_direct_double\plusone\plusone {\iffirstargument#category\else\v!bottom\fi}} % will be defined later on
\def\math_stackers_direct_double#top#bottom#category#codepoint#text%
- {\math_stackers_make_double#top#bottom{#category}{#codepoint}{#text}%
+ {\math_stackers_make_double#top#bottom{#category}{#codepoint}{0}{#text}%
+ \endgroup}
+
+%D A relative new one is a combination of accents and text (as needed in mathml):
+
+\unexpanded\def\math_stackers_make_double_text#where#category#codepoint#text#extra%
+ {\math_stackers_start_group{#category}%
+ \mathstackersparameter\c!left\relax
+ \dostarttagged\t!mathstacker\currentmathstackers
+ \ifmmode\math_class_by_parameter\mathstackersparameter\else\dontleavehmode\fi
+ {\edef\currentmathstackers{#category}%
+ %
+ \edef\p_offset {\mathstackersparameter\c!offset}%
+ \edef\p_location {\mathstackersparameter\c!location}%
+ \edef\p_strut {\mathstackersparameter\c!strut}%
+ \edef\p_alternative{\mathstackersparameter\c!alternative}%
+ %
+ \scratchleftoffset \zeropoint
+ \scratchrightoffset\zeropoint
+ %
+ \edef\m_math_stackers_text_middle{#text}%
+ \math_stackers_check_unicode{#codepoint}%
+ \scratchunicode#codepoint\relax
+ %
+ \ifx\math_stackers_middle\empty
+ \setbox\scratchboxthree\emptyhbox
+ \else
+ \setmathtextbox\scratchboxthree\hbox{\math_stackers_middletext}%
+ \fi
+ %
+ \ifcase#where\relax
+ \edef\m_math_stackers_text_top{#extra}%
+ \ifx\math_stackers_top\empty
+ \setbox\scratchboxone\emptyhbox
+ \else
+ \setmathsmalltextbox\scratchboxone\hbox{\math_stackers_toptext}%
+ \fi
+ \else
+ \edef\m_math_stackers_text_bottom{#extra}%
+ \ifx\math_stackers_bottom\empty
+ \setbox\scratchboxone\emptyhbox
+ \else
+ \setmathsmalltextbox\scratchboxone\hbox{\math_stackers_bottomtext}%
+ \fi
+ \fi
+ %
+ \scratchwidth\wd
+ \ifdim\wd\scratchboxone>\wd\scratchboxthree
+ \scratchboxone
+ \else
+ \scratchboxthree
+ \fi
+ \relax
+ \scratchdimen\mathstackersparameter\c!minwidth\relax
+ \ifdim\scratchwidth<\scratchdimen
+ \scratchwidth\scratchdimen
+ \fi
+ \advance\scratchwidth2\scratchhoffset
+ %
+ \ifdim\wd\scratchboxone<\scratchwidth
+ \setbox\scratchboxone\hbox to \scratchwidth{\hss\unhbox\scratchboxone\hss}%
+ \fi
+ \ifdim\wd\scratchboxthree<\scratchwidth
+ \setbox\scratchboxthree\hbox to \scratchwidth{\hss\unhbox\scratchboxthree\hss}%
+ \fi
+ %
+ \math_stackers_normalize_three
+ % analysis
+ \dosettagproperty\s!subtype\t!munderover
+ % base
+ \math_stackers_start_tagged_mid
+ \math_stackers_middle\bgroup
+ \box\scratchboxthree
+ \egroup
+ \math_stackers_stop_tagged
+ %
+ \setbox\scratchboxtwo\csname\??mathstackersalternative\p_alternative\endcsname
+ \kern-\scratchwidth
+ \ifcase#where\relax
+ % under
+ \math_stackers_start_tagged_bot
+ \math_stackers_bottom\bgroup
+ \lower\dimexpr
+ \scratchdepth
+ +\ht\scratchboxtwo
+ +\mathstackersparameter\c!distance
+ \relax
+ \box\scratchboxtwo % accent
+ \egroup
+ \math_stackers_stop_tagged
+ \kern-\scratchwidth
+ % over
+ \math_stackers_start_tagged_top
+ \math_stackers_top\bgroup
+ \raise\dimexpr
+ \scratchheight
+ +\dp\scratchboxone
+ +\mathstackersparameter\c!voffset
+ \relax
+ \box\scratchboxone % toptext
+ \egroup
+ \math_stackers_stop_tagged
+ \else
+ % under
+ \math_stackers_start_tagged_bot
+ \math_stackers_bottom\bgroup
+ \lower\dimexpr
+ \scratchdepth
+ +\ht\scratchboxone
+ +\mathstackersparameter\c!voffset
+ \relax
+ \box\scratchboxone % bottext
+ \egroup
+ \math_stackers_stop_tagged
+ \kern-\scratchwidth
+ % over
+ \math_stackers_start_tagged_top
+ \math_stackers_top\bgroup
+ \raise\dimexpr
+ \scratchheight
+ +\dp\scratchboxtwo % new
+ +\mathstackersparameter\c!distance
+ \relax
+ \box\scratchboxtwo % accent
+ \egroup
+ \math_stackers_stop_tagged
+ \fi
+ }%
+ \dostoptagged
+ \mathstackersparameter\c!right\relax
+ \math_stackers_stop_group}
+
+\unexpanded\def\definemathovertextextensible {\dotripleempty\math_extensibles_define_over_text }
+\unexpanded\def\definemathundertextextensible{\dotripleempty\math_extensibles_define_under_text}
+
+\def\math_extensibles_define_over_text[#1][#2][#3]%
+ {\ifthirdargument
+ \setuevalue{#2}{\math_stackers_make_double_text\plusone {#1}{\number#3}}%
+ \else
+ \setuevalue{#1}{\math_stackers_make_double_text\plusone \noexpand\currentmathstackers{\number#2}}%
+ \fi}
+
+\def\math_extensibles_define_under_text[#1][#2][#3]%
+ {\ifthirdargument
+ \setuevalue{#2}{\math_stackers_make_double_text\zerocount{#1}{\number#3}}%
+ \else
+ \setuevalue{#1}{\math_stackers_make_double_text\zerocount\noexpand\currentmathstackers{\number#2}}%
+ \fi}
+
+\unexpanded\def\mathovertext {\begingroup\dosingleempty\math_stackers_handle_over_text }
+\unexpanded\def\mathundertext{\begingroup\dosingleempty\math_stackers_handle_under_text }
+
+\def\math_stackers_handle_over_text[#category]%
+ {\math_stackers_direct_double_text\plusone {\iffirstargument#category\else\v!top \fi}} % will be defined later on
+
+\def\math_stackers_handle_under_text[#category]%
+ {\math_stackers_direct_double_text\zerocount{\iffirstargument#category\else\v!bottom\fi}} % will be defined later on
+
+\def\math_stackers_direct_double_text#where#category#codepoint#text#extra%%
+ {\math_stackers_make_double_text#where{#category}{#codepoint}{#text}{#extra}%
\endgroup}
%D Here is a bonus macro that takes three texts. It can be used to get consistent
@@ -654,11 +906,23 @@
[\v!both]
\definemathstackers
- [vfenced]
+ [\v!vfenced]
[\v!both]
[\c!mathclass=\s!ord,
\c!mathlimits=\v!yes]
+% these are needed for mathml:
+
+% \setupmathstackers
+% [\v!both]
+% [\c!hoffset=1pt,
+% \c!voffset=1pt]
+
+\definemathstackers
+ [\v!bothtext]
+ [\v!both]
+ [\c!strut=\v!yes]
+
% These are compatibity definitions, math only.
% todo: top= bottom= middle= is nicer (compare math-fen)
@@ -761,6 +1025,24 @@
\definemathextensible [\v!mathematics] [mrightleftharpoons] ["21CC]
\definemathextensible [\v!mathematics] [mtriplerel] ["2261]
+\definemathextensible [\v!mathematics] [eleftarrowfill] ["2190] % ["27F5]
+\definemathextensible [\v!mathematics] [erightarrowfill] ["2192] % ["27F6]
+\definemathextensible [\v!mathematics] [eleftrightarrowfill] ["27F7]
+\definemathextensible [\v!mathematics] [etwoheadrightarrowfill] ["27F9]
+\definemathextensible [\v!mathematics] [eleftharpoondownfill] ["21BD]
+\definemathextensible [\v!mathematics] [eleftharpoonupfill] ["21BC]
+\definemathextensible [\v!mathematics] [erightharpoondownfill] ["21C1]
+\definemathextensible [\v!mathematics] [erightharpoonupfill] ["21C0]
+
+\definemathextensible [\v!mathematics] [eoverbarfill] ["FE33E]
+\definemathextensible [\v!mathematics] [eunderbarfill] ["FE33F]
+\definemathextensible [\v!mathematics] [eoverbracefill] ["FE3DE]
+\definemathextensible [\v!mathematics] [eunderbracefill] ["FE3DF]
+\definemathextensible [\v!mathematics] [eoverparentfill] ["FE3DC]
+\definemathextensible [\v!mathematics] [eunderparentfill] ["FE3DD]
+\definemathextensible [\v!mathematics] [eoverbracketfill] ["FE3B4]
+\definemathextensible [\v!mathematics] [eunderbracketfill] ["FE3B5]
+
\definemathextensible [\v!text] [trel] ["002D]
\definemathextensible [\v!text] [tequal] ["003D]
\definemathextensible [\v!text] [tmapsto] ["21A6]
@@ -819,23 +1101,39 @@
% alternatively we can move the original to FE*
\definemathoverextensible [vfenced] [overbar] ["FE33E] % ["203E]
-\definemathunderextensible [vfenced] [underbar] ["FE33F] % ["203E]
+\definemathunderextensible [vfenced] [underbar] ["FE33F] % ["203E]
\definemathdoubleextensible [vfenced] [doublebar] ["FE33E] ["FE33F]
\definemathoverextensible [vfenced] [overbrace] ["FE3DE] % ["023DE]
-\definemathunderextensible [vfenced] [underbrace] ["FE3DF] % ["023DF]
+\definemathunderextensible [vfenced] [underbrace] ["FE3DF] % ["023DF]
\definemathdoubleextensible [vfenced] [doublebrace] ["FE3DE] ["FE3DF]
\definemathoverextensible [vfenced] [overparent] ["FE3DC] % ["023DC]
-\definemathunderextensible [vfenced] [underparent] ["FE3DD] % ["023DD]
+\definemathunderextensible [vfenced] [underparent] ["FE3DD] % ["023DD]
\definemathdoubleextensible [vfenced] [doubleparent] ["FE3DC] ["FE3DD]
\definemathoverextensible [vfenced] [overbracket] ["FE3B4] % ["023B4]
-\definemathunderextensible [vfenced] [underbracket] ["FE3B5] % ["023B5]
+\definemathunderextensible [vfenced] [underbracket] ["FE3B5] % ["023B5]
\definemathdoubleextensible [vfenced] [doublebracket] ["FE3B4] ["FE3B5]
% \unexpanded\def\mathopwithlimits#1#2{\mathop{#1{#2}}\limits}
+%D For mathml:
+
+\definemathdoubleextensible [both] [overbarunderbar] ["FE33E] ["FE33F]
+\definemathdoubleextensible [both] [overbraceunderbrace] ["FE3DE] ["FE3DF]
+\definemathdoubleextensible [both] [overparentunderparent] ["FE3DC] ["FE3DD]
+\definemathdoubleextensible [both] [overbracketunderbracket] ["FE3B4] ["FE3B5]
+
+\definemathovertextextensible [bothtext] [overbartext] ["FE33E]
+\definemathundertextextensible [bothtext] [underbartext] ["FE33F]
+\definemathovertextextensible [bothtext] [overbracetext] ["FE3DE]
+\definemathundertextextensible [bothtext] [underbracetext] ["FE3DF]
+\definemathovertextextensible [bothtext] [overparenttext] ["FE3DC]
+\definemathundertextextensible [bothtext] [underparenttext] ["FE3DD]
+\definemathovertextextensible [bothtext] [overbrackettext] ["FE3B4]
+\definemathundertextextensible [bothtext] [underbrackettext] ["FE3B5]
+
%D Some bonus ones (for the moment here):
\definemathstackers
@@ -927,6 +1225,15 @@
\defineextensiblefiller [Leftrightarrowfill] ["27FA]
\defineextensiblefiller [Leftrightarrowfill] ["27FA]
+%defineextensiblefiller [overbarfill] ["FE33E] % untested
+%defineextensiblefiller [underbarfill] ["FE33F] % untested
+\defineextensiblefiller [overbracefill] ["FE3DE] % untested
+\defineextensiblefiller [underbracefill] ["FE3DF] % untested
+\defineextensiblefiller [overparentfill] ["FE3DC] % untested
+\defineextensiblefiller [underparentfill] ["FE3DD] % untested
+\defineextensiblefiller [overbracketfill] ["FE3B4] % untested
+\defineextensiblefiller [underbracketfill] ["FE3B5] % untested
+
%D Extra:
\unexpanded\edef\singlebond{\mathematics{\mathsurround\zeropoint\char\number"002D}}
diff --git a/tex/context/base/math-tag.lua b/tex/context/base/math-tag.lua
index ab5902dd4..0d900b3a1 100644
--- a/tex/context/base/math-tag.lua
+++ b/tex/context/base/math-tag.lua
@@ -6,15 +6,30 @@ if not modules then modules = { } end modules ['math-tag'] = {
license = "see context related readme files"
}
+-- todo: have a local list with local tags that then get appended
+
-- use lpeg matchers
local find, match = string.find, string.match
-local insert, remove = table.insert, table.remove
+local insert, remove, concat = table.insert, table.remove, table.concat
+
+local attributes = attributes
+local nodes = nodes
-local attributes, nodes = attributes, nodes
+local nuts = nodes.nuts
+local tonut = nuts.tonut
-local set_attributes = nodes.setattributes
-local traverse_nodes = node.traverse
+local getnext = nuts.getnext
+local getid = nuts.getid
+local getchar = nuts.getchar
+local getlist = nuts.getlist
+local getfield = nuts.getfield
+local getsubtype = nuts.getsubtype
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+local set_attributes = nuts.setattributes
+local traverse_nodes = nuts.traverse
local nodecodes = nodes.nodecodes
@@ -31,15 +46,32 @@ local math_style_code = nodecodes.style -- attr style
local math_choice_code = nodecodes.choice -- attr display text script scriptscript
local math_fence_code = nodecodes.fence -- attr subtype
+local accentcodes = nodes.accentcodes
+
+local math_fixed_top = accentcodes.fixedtop
+local math_fixed_bottom = accentcodes.fixedbottom
+local math_fixed_both = accentcodes.fixedboth
+
+local kerncodes = nodes.kerncodes
+
+local fontkern_code = kerncodes.fontkern
+
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
local glue_code = nodecodes.glue
+local kern_code = nodecodes.kern
+local math_code = nodecodes.math
+
+local processnoads = noads.process
local a_tagged = attributes.private('tagged')
+local a_taggedpar = attributes.private('taggedpar')
local a_exportstatus = attributes.private('exportstatus')
local a_mathcategory = attributes.private('mathcategory')
local a_mathmode = attributes.private('mathmode')
+local a_fontkern = attributes.private('fontkern')
local tags = structures.tags
@@ -55,31 +87,53 @@ local mathcodes = mathematics.codes
local ordinary_code = mathcodes.ordinary
local variable_code = mathcodes.variable
+local fromunicode16 = fonts.mappings.fromunicode16
+local font_of_family = node.family_font
+local fontcharacters = fonts.hashes.characters
+
+local report_tags = logs.reporter("structure","tags")
+
local process
local function processsubsup(start)
-- At some point we might need to add an attribute signaling the
-- super- and subscripts because TeX and MathML use a different
- -- order.
- local nucleus, sup, sub = start.nucleus, start.sup, start.sub
+ -- order. The mrows are needed to keep mn's separated.
+ local nucleus = getfield(start,"nucleus")
+ local sup = getfield(start,"sup")
+ local sub = getfield(start,"sub")
if sub then
if sup then
- start[a_tagged] = start_tagged("msubsup")
+ setattr(start,a_tagged,start_tagged("msubsup"))
+ -- start_tagged("mrow")
process(nucleus)
+ -- stop_tagged()
+ start_tagged("mrow")
process(sub)
+ stop_tagged()
+ start_tagged("mrow")
process(sup)
stop_tagged()
+ stop_tagged()
else
- start[a_tagged] = start_tagged("msub")
+ setattr(start,a_tagged,start_tagged("msub"))
+ -- start_tagged("mrow")
process(nucleus)
+ -- stop_tagged()
+ start_tagged("mrow")
process(sub)
stop_tagged()
+ stop_tagged()
end
elseif sup then
- start[a_tagged] = start_tagged("msup")
+ setattr(start,a_tagged,start_tagged("msup"))
+ -- start_tagged("mrow")
process(nucleus)
+ -- stop_tagged()
+ start_tagged("mrow")
process(sup)
stop_tagged()
+ stop_tagged()
else
process(nucleus)
end
@@ -90,254 +144,407 @@ end
-- todo: variants -> original
local actionstack = { }
+local fencesstack = { }
+
+-- glyph nodes and such can happen in under and over stuff
+
+local function getunicode(n) -- instead of getchar
+ local char = getchar(n)
+ local font = font_of_family(getfield(n,"fam")) -- font_of_family
+ local data = fontcharacters[font][char]
+ return data.unicode or char
+end
+
+-------------------
+
+local content = { }
+local found = false
+
+content[math_char_code] = function() found = true end
+
+local function hascontent(head)
+ found = false
+ processnoads(head,content,"content")
+ return found
+end
+
+--------------------
+
+local function showtag(n,id)
+ local attr = getattr(n,a_tagged)
+ report_tags("%s = %s",nodecodes[id or getid(n)],attr and taglist[attr].tagname or "?")
+end
process = function(start) -- we cannot use the processor as we have no finalizers (yet)
+ local mtexttag = nil
while start do
- local id = start.id
- if id == math_char_code then
- local char = start.char
- -- check for code
- local a = start[a_mathcategory]
- if a then
- a = { detail = a }
+ local id = getid(start)
+
+-- showtag(start,id)
+
+ if id == glyph_code or id == disc_code then
+ if not mtexttag then
+ mtexttag = start_tagged("mtext")
end
- local code = getmathcode(char)
- if code then
- code = code[1]
+ setattr(start,a_tagged,mtexttag)
+ elseif mtexttag and id == kern_code and (getsubtype(start) == fontkern_code or getattr(start,a_fontkern)) then
+ setattr(start,a_tagged,mtexttag)
+ else
+ if mtexttag then
+ stop_tagged()
+ mtexttag = nil
end
- local tag
- if code == ordinary_code or code == variable_code then
- local ch = chardata[char]
- local mc = ch and ch.mathclass
- if mc == "number" then
- tag = "mn"
- elseif mc == "variable" or not mc then -- variable is default
- tag = "mi"
+ if id == math_char_code then
+ local char = getchar(start)
+ local code = getmathcode(char)
+ if code then
+ code = code[1]
+ end
+ local tag
+ if code == ordinary_code or code == variable_code then
+ local ch = chardata[char]
+ local mc = ch and ch.mathclass
+ if mc == "number" then
+ tag = "mn"
+ elseif mc == "variable" or not mc then -- variable is default
+ tag = "mi"
+ else
+ tag = "mo"
+ end
else
tag = "mo"
end
- else
- tag = "mo"
- end
- start[a_tagged] = start_tagged(tag,a)
- stop_tagged()
- break -- okay?
- elseif id == math_textchar_code then
- -- check for code
- local a = start[a_mathcategory]
- if a then
- start[a_tagged] = start_tagged("ms",{ detail = a })
- else
- start[a_tagged] = start_tagged("ms")
- end
- stop_tagged()
- break
- elseif id == math_delim_code then
- -- check for code
- start[a_tagged] = start_tagged("mo")
- stop_tagged()
- break
- elseif id == math_style_code then
- -- has a next
- elseif id == math_noad_code then
- processsubsup(start)
- elseif id == math_box_code or id == hlist_code or id == vlist_code then
- -- keep an eye on math_box_code and see what ends up in there
- local attr = start[a_tagged]
- local last = attr and taglist[attr]
- if last and find(last[#last],"formulacaption[:%-]") then
- -- leave alone, will nicely move to the outer level
- else
- local text = start_tagged("mtext")
- start[a_tagged] = text
- local list = start.list
- if not list then
- -- empty list
- elseif not attr then
- -- box comes from strange place
- set_attributes(list,a_tagged,text)
+ local a = getattr(start,a_mathcategory)
+ if a then
+ setattr(start,a_tagged,start_tagged(tag,{ mathcategory = a }))
else
- -- Beware, the first node in list is the actual list so we definitely
- -- need to nest. This approach is a hack, maybe I'll make a proper
- -- nesting feature to deal with this at another level. Here we just
- -- fake structure by enforcing the inner one.
- local tagdata = taglist[attr]
- local common = #tagdata + 1
- local function runner(list) -- quite inefficient
- local cache = { } -- we can have nested unboxed mess so best local to runner
- for n in traverse_nodes(list) do
- local id = n.id
- local aa = n[a_tagged]
- if aa then
- local ac = cache[aa]
- if not ac then
- local tagdata = taglist[aa]
- local extra = #tagdata
- if common <= extra then
- for i=common,extra do
- ac = restart_tagged(tagdata[i]) -- can be made faster
- end
- for i=common,extra do
- stop_tagged() -- can be made faster
+ setattr(start,a_tagged,start_tagged(tag)) -- todo: a_mathcategory
+ end
+ stop_tagged()
+ break -- okay?
+ elseif id == math_textchar_code then -- or id == glyph_code
+ -- check for code
+ local a = getattr(start,a_mathcategory)
+ if a then
+ setattr(start,a_tagged,start_tagged("ms",{ mathcategory = a })) -- mtext
+ else
+ setattr(start,a_tagged,start_tagged("ms")) -- mtext
+ end
+ stop_tagged()
+ break
+ elseif id == math_delim_code then
+ -- check for code
+ setattr(start,a_tagged,start_tagged("mo"))
+ stop_tagged()
+ break
+ elseif id == math_style_code then
+ -- has a next
+ elseif id == math_noad_code then
+ processsubsup(start)
+ elseif id == math_box_code or id == hlist_code or id == vlist_code then
+ -- keep an eye on math_box_code and see what ends up in there
+ local attr = getattr(start,a_tagged)
+ local specification = taglist[attr]
+ local tag = specification.tagname
+ if tag == "formulacaption" then
+ -- skip
+ elseif tag == "mstacker" then
+ local list = getfield(start,"list")
+ if list then
+ process(list)
+ end
+ else
+ if tag ~= "mstackertop" and tag ~= "mstackermid" and tag ~= "mstackerbot" then
+ tag = "mtext"
+ end
+ local text = start_tagged(tag)
+ setattr(start,a_tagged,text)
+ local list = getfield(start,"list")
+ if not list then
+ -- empty list
+ elseif not attr then
+ -- box comes from strange place
+ set_attributes(list,a_tagged,text) -- only the first node ?
+ else
+ -- Beware, the first node in list is the actual list so we definitely
+ -- need to nest. This approach is a hack, maybe I'll make a proper
+ -- nesting feature to deal with this at another level. Here we just
+ -- fake structure by enforcing the inner one.
+ --
+ -- todo: have a local list with local tags that then get appended
+ --
+ local tagdata = specification.taglist
+ local common = #tagdata + 1
+ local function runner(list,depth) -- quite inefficient
+ local cache = { } -- we can have nested unboxed mess so best local to runner
+ local keep = nil
+ -- local keep = { } -- win case we might need to move keep outside
+ for n in traverse_nodes(list) do
+ local id = getid(n)
+ local mth = id == math_code and getsubtype(n)
+ if mth == 0 then
+ -- insert(keep,text)
+ keep = text
+ text = start_tagged("mrow")
+ common = common + 1
+ end
+ local aa = getattr(n,a_tagged)
+ if aa then
+ local ac = cache[aa]
+ if not ac then
+ local tagdata = taglist[aa].taglist
+ local extra = #tagdata
+ if common <= extra then
+ for i=common,extra do
+ ac = restart_tagged(tagdata[i]) -- can be made faster
+ end
+ for i=common,extra do
+ stop_tagged() -- can be made faster
+ end
+ else
+ ac = text
end
- else
- ac = text
+ cache[aa] = ac
end
- cache[aa] = ac
+ setattr(n,a_tagged,ac)
+ else
+ setattr(n,a_tagged,text)
+ end
+
+ if id == hlist_code or id == vlist_code then
+ runner(getlist(n),depth+1)
+ elseif id == glyph_code then
+ runner(getfield(n,"components"),depth+1) -- this should not be needed
+ elseif id == disc_code then
+ runner(getfield(n,"pre"),depth+1) -- idem
+ runner(getfield(n,"post"),depth+1) -- idem
+ runner(getfield(n,"replace"),depth+1) -- idem
+ end
+ if mth == 1 then
+ stop_tagged()
+ -- text = remove(keep)
+ text = keep
+ common = common - 1
end
- n[a_tagged] = ac
- else
- n[a_tagged] = text
end
- if id == hlist_code or id == vlist_code then
- runner(n.list)
+ end
+ runner(list,0)
+ end
+ stop_tagged()
+ end
+ elseif id == math_sub_code then -- normally a hbox
+ local list = getfield(start,"list")
+ if list then
+ local attr = getattr(start,a_tagged)
+ local last = attr and taglist[attr]
+ if last then
+ local tag = last.tagname
+ local detail = last.detail
+ if tag == "maction" then
+ if detail == "" then
+ setattr(start,a_tagged,start_tagged("mrow"))
+ process(list)
+ stop_tagged()
+ elseif actionstack[#actionstack] == action then
+ setattr(start,a_tagged,start_tagged("mrow"))
+ process(list)
+ stop_tagged()
+ else
+ insert(actionstack,action)
+ setattr(start,a_tagged,start_tagged("mrow",{ detail = action }))
+ process(list)
+ stop_tagged()
+ remove(actionstack)
end
+ elseif tag == "mstacker" then -- or tag == "mstackertop" or tag == "mstackermid" or tag == "mstackerbot" then
+ -- looks like it gets processed twice
+-- do we still end up here ?
+ setattr(start,a_tagged,restart_tagged(attr)) -- so we just reuse the attribute
+ process(list)
+ stop_tagged()
+ else
+ setattr(start,a_tagged,start_tagged("mrow"))
+ process(list)
+ stop_tagged()
end
+ else -- never happens, we're always document
+ setattr(start,a_tagged,start_tagged("mrow"))
+ process(list)
+ stop_tagged()
end
- runner(list)
end
+ elseif id == math_fraction_code then
+ local num = getfield(start,"num")
+ local denom = getfield(start,"denom")
+ local left = getfield(start,"left")
+ local right = getfield(start,"right")
+ if left then
+ setattr(left,a_tagged,start_tagged("mo"))
+ process(left)
+ stop_tagged()
+ end
+ setattr(start,a_tagged,start_tagged("mfrac"))
+ process(num)
+ process(denom)
stop_tagged()
- end
- elseif id == math_sub_code then
- local list = start.list
- if list then
- local attr = start[a_tagged]
- local last = attr and taglist[attr]
- local action = last and match(last[#last],"maction:(.-)%-")
- if action and action ~= "" then
- if actionstack[#actionstack] == action then
- start[a_tagged] = start_tagged("mrow")
- process(list)
+ if right then
+ setattr(right,a_tagged,start_tagged("mo"))
+ process(right)
+ stop_tagged()
+ end
+ elseif id == math_choice_code then
+ local display = getfield(start,"display")
+ local text = getfield(start,"text")
+ local script = getfield(start,"script")
+ local scriptscript = getfield(start,"scriptscript")
+ if display then
+ process(display)
+ end
+ if text then
+ process(text)
+ end
+ if script then
+ process(script)
+ end
+ if scriptscript then
+ process(scriptscript)
+ end
+ elseif id == math_fence_code then
+ local delim = getfield(start,"delim")
+ local subtype = getfield(start,"subtype")
+ if subtype == 1 then
+ -- left
+ local properties = { }
+ insert(fencesstack,properties)
+ setattr(start,a_tagged,start_tagged("mfenced",{ properties = properties })) -- needs checking
+ if delim then
+ start_tagged("ignore")
+ local chr = getfield(delim,"small_char")
+ if chr ~= 0 then
+ properties.left = chr
+ end
+ process(delim)
stop_tagged()
- else
- insert(actionstack,action)
- start[a_tagged] = start_tagged("mrow",{ detail = action })
- process(list)
+ end
+ start_tagged("mrow") -- begin of subsequence
+ elseif subtype == 2 then
+ -- middle
+ if delim then
+ start_tagged("ignore")
+ local top = fencesstack[#fencesstack]
+ local chr = getfield(delim,"small_char")
+ if chr ~= 0 then
+ local mid = top.middle
+ if mid then
+ mid[#mid+1] = chr
+ else
+ top.middle = { chr }
+ end
+ end
+ process(delim)
stop_tagged()
- remove(actionstack)
end
- else
- start[a_tagged] = start_tagged("mrow")
- process(list)
+ stop_tagged() -- end of subsequence
+ start_tagged("mrow") -- begin of subsequence
+ elseif subtype == 3 then
+ local properties = remove(fencesstack)
+ if not properties then
+ report_tags("missing right fence")
+ properties = { }
+ end
+ if delim then
+ start_tagged("ignore")
+ local chr = getfield(delim,"small_char")
+ if chr ~= 0 then
+ properties.right = chr
+ end
+ process(delim)
+ stop_tagged()
+ end
+ stop_tagged() -- end of subsequence
stop_tagged()
+ else
+ -- can't happen
end
- end
- elseif id == math_fraction_code then
- local num, denom, left, right = start.num, start.denom, start.left, start.right
- if left then
- left[a_tagged] = start_tagged("mo")
- process(left)
- stop_tagged()
- end
- start[a_tagged] = start_tagged("mfrac")
- process(num)
- process(denom)
- stop_tagged()
- if right then
- right[a_tagged] = start_tagged("mo")
- process(right)
- stop_tagged()
- end
- elseif id == math_choice_code then
- local display, text, script, scriptscript = start.display, start.text, start.script, start.scriptscript
- if display then
- process(display)
- end
- if text then
- process(text)
- end
- if script then
- process(script)
- end
- if scriptscript then
- process(scriptscript)
- end
- elseif id == math_fence_code then
- local delim = start.delim
- local subtype = start.subtype
- if subtype == 1 then
- -- left
- start[a_tagged] = start_tagged("mfenced")
- if delim then
- start[a_tagged] = start_tagged("mleft")
- process(delim)
+ elseif id == math_radical_code then
+ local left = getfield(start,"left")
+ local degree = getfield(start,"degree")
+ if left then
+ start_tagged("ignore")
+ process(left) -- root symbol, ignored
stop_tagged()
end
- elseif subtype == 2 then
- -- middle
- if delim then
- start[a_tagged] = start_tagged("mmiddle")
- process(delim)
+ if degree and hascontent(degree) then
+ setattr(start,a_tagged,start_tagged("mroot"))
+ processsubsup(start)
+ process(degree)
stop_tagged()
- end
- elseif subtype == 3 then
- if delim then
- start[a_tagged] = start_tagged("mright")
- process(delim)
+ else
+ setattr(start,a_tagged,start_tagged("msqrt"))
+ processsubsup(start)
stop_tagged()
end
- stop_tagged()
- else
- -- can't happen
- end
- elseif id == math_radical_code then
- local left, degree = start.left, start.degree
- if left then
- start_tagged("")
- process(left) -- root symbol, ignored
- stop_tagged()
- end
- if degree then -- not good enough, can be empty mlist
- start[a_tagged] = start_tagged("mroot")
- processsubsup(start)
- process(degree)
- stop_tagged()
- else
- start[a_tagged] = start_tagged("msqrt")
- processsubsup(start)
- stop_tagged()
- end
- elseif id == math_accent_code then
- local accent, bot_accent = start.accent, start.bot_accent
- if bot_accent then
- if accent then
- start[a_tagged] = start_tagged("munderover",{ detail = "accent" })
+ elseif id == math_accent_code then
+ local accent = getfield(start,"accent")
+ local bot_accent = getfield(start,"bot_accent")
+ local subtype = getsubtype(start)
+ if bot_accent then
+ if accent then
+ setattr(start,a_tagged,start_tagged("munderover", {
+ accent = true,
+ top = getunicode(accent),
+ bottom = getunicode(bot_accent),
+ topfixed = subtype == math_fixed_top or subtype == math_fixed_both,
+ bottomfixed = subtype == math_fixed_bottom or subtype == math_fixed_both,
+ }))
+ processsubsup(start)
+ process(bot_accent)
+ process(accent)
+ stop_tagged()
+ else
+ setattr(start,a_tagged,start_tagged("munder", {
+ accent = true,
+ bottom = getunicode(bot_accent),
+ bottomfixed = subtype == math_fixed_bottom or subtype == math_fixed_both,
+ }))
+ processsubsup(start)
+ process(bot_accent)
+ stop_tagged()
+ end
+ elseif accent then
+ setattr(start,a_tagged,start_tagged("mover", {
+ accent = true,
+ top = getunicode(accent),
+ topfixed = subtype == math_fixed_top or subtype == math_fixed_both,
+ }))
processsubsup(start)
- process(bot_accent)
process(accent)
stop_tagged()
else
- start[a_tagged] = start_tagged("munder",{ detail = "accent" })
processsubsup(start)
- process(bot_accent)
- stop_tagged()
end
- elseif accent then
- start[a_tagged] = start_tagged("mover",{ detail = "accent" })
- processsubsup(start)
- process(accent)
+ elseif id == glue_code then
+ -- local spec = getfield(start,"spec")
+ -- setattr(start,a_tagged,start_tagged("mspace",{ width = getfield(spec,"width") }))
+ setattr(start,a_tagged,start_tagged("mspace"))
stop_tagged()
else
- processsubsup(start)
+ setattr(start,a_tagged,start_tagged("merror", { detail = nodecodes[i] }))
+ stop_tagged()
end
- elseif id == glue_code then
- start[a_tagged] = start_tagged("mspace")
- stop_tagged()
- else
- start[a_tagged] = start_tagged("merror", { detail = nodecodes[i] })
- stop_tagged()
end
- start = start.next
+ start = getnext(start)
+ end
+ if mtexttag then
+ stop_tagged()
end
end
function noads.handlers.tags(head,style,penalties)
- local v_math = start_tagged("math")
- local v_mrow = start_tagged("mrow")
- local v_mode = head[a_mathmode]
- head[a_tagged] = v_math
- head[a_tagged] = v_mrow
- tags.setattributehash(v_math,"mode",v_mode == 1 and "display" or "inline")
+ head = tonut(head)
+ local v_mode = getattr(head,a_mathmode)
+ local v_math = start_tagged("math", { mode = v_mode == 1 and "display" or "inline" })
+ setattr(head,a_tagged,start_tagged("mrow"))
process(head)
stop_tagged()
stop_tagged()
diff --git a/tex/context/base/math-vfu.lua b/tex/context/base/math-vfu.lua
index 6d9a9f903..a683e02cf 100644
--- a/tex/context/base/math-vfu.lua
+++ b/tex/context/base/math-vfu.lua
@@ -41,6 +41,8 @@ local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
local formatters = string.formatters
+local chardata = characters.data
+
local mathencodings = allocate()
fonts.encodings.math = mathencodings -- better is then: fonts.encodings.vectors
local vfmath = allocate()
@@ -432,6 +434,14 @@ local function repeated(main,characters,id,size,unicode,u,n,private,fraction) --
end
end
+local function cloned(main,characters,id,size,source,target)
+ local data = characters[source]
+ if data then
+ characters[target] = data
+ return true
+ end
+end
+
-- we use the fact that context defines the smallest sizes first .. a real dirty and ugly hack
local data_of_smaller = nil
@@ -451,10 +461,11 @@ function vfmath.addmissing(main,id,size)
-- here id is the index in fonts (normally 14 or so) and that slot points to self
- local characters = main.characters
- local shared = main.shared
- local variables = main.goodies.mathematics and main.goodies.mathematics.variables or { }
+ local characters = main.characters
+ local shared = main.shared
+ local variables = main.goodies.mathematics and main.goodies.mathematics.variables or { }
local joinrelfactor = variables.joinrelfactor or 3
+
for i=0x7A,0x7D do
make(main,characters,id,size,i,1)
end
@@ -536,23 +547,24 @@ function vfmath.addmissing(main,id,size)
-- 21CB leftrightharpoon
-- 21CC rightleftharpoon
- stack (main,characters,id,size,0x2259,0x0003D,3,0x02227) -- \buildrel\wedge\over=
- jointwo (main,characters,id,size,0x22C8,0x022B3,joinrelfactor,0x022B2) -- \mathrel\triangleright\joinrel\mathrel\triangleleft (4 looks better than 3)
- jointwo (main,characters,id,size,0x22A7,0x0007C,joinrelfactor,0x0003D) -- \mathrel|\joinrel=
- jointwo (main,characters,id,size,0x2260,0x00338,0,0x0003D) -- \not\equal
- jointwo (main,characters,id,size,0x2284,0x00338,0,0x02282) -- \not\subset
- jointwo (main,characters,id,size,0x2285,0x00338,0,0x02283) -- \not\supset
- jointwo (main,characters,id,size,0x2209,0x00338,0,0x02208) -- \not\in
- jointwo (main,characters,id,size,0x2254,0x03A,0,0x03D) -- := (≔)
+ stack(main,characters,id,size,0x2259,0x0003D,3,0x02227) -- \buildrel\wedge\over=
+
+ jointwo(main,characters,id,size,0x22C8,0x022B3,joinrelfactor,0x022B2) -- \mathrel\triangleright\joinrel\mathrel\triangleleft (4 looks better than 3)
+ jointwo(main,characters,id,size,0x22A7,0x0007C,joinrelfactor,0x0003D) -- \mathrel|\joinrel=
+ jointwo(main,characters,id,size,0x2260,0x00338,0,0x0003D) -- \not\equal
+ jointwo(main,characters,id,size,0x2284,0x00338,0,0x02282) -- \not\subset
+ jointwo(main,characters,id,size,0x2285,0x00338,0,0x02283) -- \not\supset
+ jointwo(main,characters,id,size,0x2209,0x00338,0,0x02208) -- \not\in
+ jointwo(main,characters,id,size,0x2254,0x03A,0,0x03D) -- := (≔)
repeated(main,characters,id,size,0x222C,0x222B,2,0xFF800,1/3)
repeated(main,characters,id,size,0x222D,0x222B,3,0xFF810,1/3)
- characters[0xFE325] = fastcopy(characters[0x2032])
-
- raise (main,characters,id,size,0x02032,0xFE325,1,id_of_smaller) -- prime
- raise (main,characters,id,size,0x02033,0xFE325,2,id_of_smaller) -- double prime
- raise (main,characters,id,size,0x02034,0xFE325,3,id_of_smaller) -- triple prime
+ if cloned(main,characters,id,size,0x2032,0xFE325) then
+ raise(main,characters,id,size,0x2032,0xFE325,1,id_of_smaller) -- prime
+ raise(main,characters,id,size,0x2033,0xFE325,2,id_of_smaller) -- double prime
+ raise(main,characters,id,size,0x2034,0xFE325,3,id_of_smaller) -- triple prime
+ end
-- there are more (needs discussion first):
@@ -882,7 +894,7 @@ function vfmath.define(specification,set,goodies)
local ru = rv[unicode]
if not ru then
if trace_virtual then
- report_virtual("unicode slot %U has no index %H in vector %a for font %a",unicode,index,vectorname,fontname)
+ report_virtual("unicode slot %U has no index %H in vector %a for font %a (%S)",unicode,index,vectorname,fontname,chardata[unicode].description)
elseif not already_reported then
report_virtual("the mapping is incomplete for %a at %p",name,size)
already_reported = true
diff --git a/tex/context/base/meta-fig.mkiv b/tex/context/base/meta-fig.mkiv
index 46dc4cffc..bf37aa7bf 100644
--- a/tex/context/base/meta-fig.mkiv
+++ b/tex/context/base/meta-fig.mkiv
@@ -47,6 +47,9 @@
\c!command=\meta_process_graphic_instance{\fittingpageparameter\c!instance},
\c!instance=]
+\unexpanded\def\setupMPpage
+ {\setupfittingpage[MPpage]}
+
%D \macros
%D {MPfigure}
%D
@@ -56,7 +59,7 @@
{\bgroup
\getfiguredimensions[#1]% [\c!object=\v!no] already set
\startMPcode
- externalfigure "#1"
+ draw externalfigure "#1"
xscaled \the\dimexpr\figurewidth \relax\space % must be points
yscaled \the\dimexpr\figureheight\relax\space % must be points
#2 ;
diff --git a/tex/context/base/meta-fnt.lua b/tex/context/base/meta-fnt.lua
index cf47f0c92..95bdfa6d9 100644
--- a/tex/context/base/meta-fnt.lua
+++ b/tex/context/base/meta-fnt.lua
@@ -21,32 +21,18 @@ mpfonts.version = mpfonts.version or 1.20
mpfonts.inline = true
mpfonts.cache = containers.define("fonts", "mp", mpfonts.version, true)
-metapost.fonts = metapost.fonts or { }
+metapost.fonts = metapost.fonts or { }
+
+local function unicodetoactualtext(...)
+ unicodetoactualtext = backends.codeinjections.unicodetoactualtext
+ return unicodetoactualtext(...)
+end
-- a few glocals
local characters, descriptions = { }, { }
local factor, code, slot, width, height, depth, total, variants, bbox, llx, lly, urx, ury = 100, { }, 0, 0, 0, 0, 0, 0, true, 0, 0, 0, 0
--- The next variant of ActualText is what Taco and I could come up with
--- eventually. As of September 2013 Acrobat copies okay, Summatra copies a
--- question mark, pdftotext injects an extra space and Okular adds a
--- newline plus space.
-
--- return formatters["BT /Span << /ActualText (CONTEXT) >> BDC [<feff>] TJ % t EMC ET"](code)
-
-local function topdf(n,code)
- if n < 0x10000 then
- return formatters["BT /Span << /ActualText <feff%04x> >> BDC [<feff>] TJ % t EMC ET"](n,code)
- else
- return formatters["BT /Span << /ActualText <feff%04x%04x> >> BDC [<feff>] TJ % t EMC ET"](n/1024+0xD800,n%1024+0xDC00,code)
- end
-end
-
--- local function topdf(n,code)
--- return formatters["/Span << /ActualText (CTX) >> BDC % t EMC"](code)
--- end
-
local flusher = {
startfigure = function(_chr_,_llx_,_lly_,_urx_,_ury_)
code = { }
@@ -68,6 +54,7 @@ local flusher = {
end,
stopfigure = function()
local cd = chardata[n]
+ local code = unicodetoactualtext(slot,concat(code," ")) or ""
descriptions[slot] = {
-- unicode = slot,
name = cd and cd.adobename,
@@ -79,7 +66,7 @@ local flusher = {
if inline then
characters[slot] = {
commands = {
- { "special", "pdf: " .. topdf(slot,code) },
+ { "special", "pdf:" .. code },
}
}
else
@@ -88,13 +75,14 @@ local flusher = {
{
"image",
{
- stream = topdf(slot,code),
+ stream = code,
bbox = { 0, -depth * 65536, width * 65536, height * 65536 }
},
},
}
}
end
+ code = nil -- no need to keep that
end
}
@@ -261,7 +249,16 @@ function metapost.fonts.define(specification)
} )
end
-commands.definemetafont = metapost.fonts.define
+interfaces.implement {
+ name = "definemetafont",
+ actions = metapost.fonts.define,
+ arguments = {
+ {
+ { "fontname" },
+ { "filename" },
+ }
+ }
+}
-- metapost.fonts.define {
-- fontname = "bidi",
diff --git a/tex/context/base/meta-fnt.mkiv b/tex/context/base/meta-fnt.mkiv
index 603fcf14d..e54c0be0a 100644
--- a/tex/context/base/meta-fnt.mkiv
+++ b/tex/context/base/meta-fnt.mkiv
@@ -21,11 +21,11 @@
{\dotripleempty\meta_font_define}
\def\meta_font_define[#1][#2][#3]%
- {\ctxcommand{definemetafont {
- fontname = "#1",
- filename = "#2"
+ {\clf_definemetafont
+ fontname {#1}%
+ filename {#2}%
% no #3 settings yet (compose, instances)
- }}}
+ \relax}
% \startluacode
% metapost.fonts.define { fontname = "bidi-symbols", filename = "bidi-symbols.mp" }
diff --git a/tex/context/base/meta-fun.lua b/tex/context/base/meta-fun.lua
index 78ee25baf..7aaaf2818 100644
--- a/tex/context/base/meta-fun.lua
+++ b/tex/context/base/meta-fun.lua
@@ -26,7 +26,7 @@ function metafun.topath(t,connector)
if type(ti) == "string" then
context(ti)
else
- context("(%s,%s)",ti.x or ti[1] or 0,ti.y or ti[2] or 0)
+ context("(%F,%F)",ti.x or ti[1] or 0,ti.y or ti[2] or 0)
end
end
else
@@ -47,7 +47,7 @@ function metafun.interpolate(f,b,e,s,c)
else
done = true
end
- context("(%s,%s)",i,d(i))
+ context("(%F,%F)",i,d(i))
end
end
if not done then
diff --git a/tex/context/base/meta-imp-dum.mkiv b/tex/context/base/meta-imp-dum.mkiv
index 1daff57ac..e6ccc234c 100644
--- a/tex/context/base/meta-imp-dum.mkiv
+++ b/tex/context/base/meta-imp-dum.mkiv
@@ -60,6 +60,8 @@
% \stopuseMPgraphic
\startuseMPgraphic{figure:placeholder}{width,height,reduction,color}
+ begingroup ;
+ save w, h, d, r, p, c, b ;
numeric w, h, d, r ; path p ;
if cmykcolor \MPvar{color} :
cmykcolor c, b ; b := (0,0,0,0)
@@ -80,6 +82,7 @@
withcolor r[c randomized(.3,.9),b] ;
endfor ;
clip currentpicture to p ;
+ endgroup ;
\stopuseMPgraphic
\defineoverlay
diff --git a/tex/context/base/meta-imp-tab.mkiv b/tex/context/base/meta-imp-tab.mkiv
new file mode 100644
index 000000000..a4affbea4
--- /dev/null
+++ b/tex/context/base/meta-imp-tab.mkiv
@@ -0,0 +1,73 @@
+%D \module
+%D [ file=meta-tab,
+%D version=2003.03.21, % very old but now with splitter
+%D title=\METAPOST\ Graphics,
+%D subtitle=Dummy (External) Graphics,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\startuseMPgraphic{TallyBar}
+ height := (10/12) * LineHeight ;
+ span := ( 4/10) * LineHeight ;
+ drift := ( 1/10) * LineHeight ;
+ def d = (uniformdeviate drift) enddef ;
+ for i := 1 upto \MPvar{n} :
+ draw
+ if (i mod 5)=0 :
+ ((-d-4.5span,d)--(+d-0.5span,height-d))
+ else :
+ ((-d,+d)--(+d,height-d))
+ fi
+ shifted (span*i,d-drift) withpen pencircle ;
+ endfor ;
+ currentpicture := currentpicture scaled .75 ;
+\stopuseMPgraphic
+
+\setupMPvariables
+ [TallyBar]
+ [n=0]
+
+\unexpanded\def\tallynumeral#1%
+ {\dontleavehmode
+ \lower.25\exheight\hbox{\useMPgraphic{TallyBar}{n=#1}}}
+
+\unexpanded\def\tallynumerals#1%
+ {\dontleavehmode
+ \begingroup
+ \scratchcounter#1\relax
+ \doloop
+ {\ifnum\scratchcounter>\plusfive
+ \lower.25\exheight\hbox{\useMPgraphic{TallyBar}{n=5}}%
+ \advance\scratchcounter-\plusfive
+ \space
+ \else
+ \lower.25\exheight\hbox{\useMPgraphic{TallyBar}{n=\the\scratchcounter}}%
+ \exitloop
+ \fi}%
+ \endgroup}
+
+\let\FunnyBar\tallynumeral
+
+\defineconversion[tally][\tallynumerals]
+
+\continueifinputfile{meta-imp-tab.mkiv}
+
+\starttext
+
+ \starttabulate[|pr|c|]
+ \NC \tallynumerals {24} \NC \NR
+ \NC \tallynumerals {12} \NC \times \NR
+ \HL
+ \NC \tallynumerals{288} \NC = \NR
+ \stoptabulate
+
+ \tallynumerals{"FFFF}
+
+\stoptext
+
+\endinput
diff --git a/tex/context/base/meta-imp-txt.mkiv b/tex/context/base/meta-imp-txt.mkiv
index bcfc5513f..7069d21a4 100644
--- a/tex/context/base/meta-imp-txt.mkiv
+++ b/tex/context/base/meta-imp-txt.mkiv
@@ -104,7 +104,7 @@
\unexpanded\def\getshapecharacteristics
{\doglobal\increment\currentshapetext
- \doifdefinedelse{parlines:\currentshapetext}
+ \doifelsedefined{parlines:\currentshapetext}
{\global\parlines \getvalue{parlines:\currentshapetext}%
\global\chardef\parfirst \getvalue{parfirst:\currentshapetext}%
\global\parvoffset \getvalue{parvoffset:\currentshapetext}%
@@ -152,7 +152,7 @@
%%%%%%% rotfont nog definieren
-\doifundefined{RotFont}{\definefont[RotFont][RegularBold]}
+\doifundefined{RotFont}{\definefont[RotFont][RegularBold*default]}
\unexpanded\def\processfollowingtoken#1% strut toegevoegd
{\appendtoks#1\to\MPtoks
@@ -174,9 +174,7 @@
{\vbox\bgroup
\forgetall
\dontcomplain
- \startMPenvironment
- \doifundefined{RotFont}{\definefont[RotFont][RegularBold]}%
- \stopMPenvironment
+ \doifundefined{RotFont}{\definefont[RotFont][RegularBold*default]}%
\MPtoks\emptytoks
\resetMPdrawing
\startMPdrawing
@@ -225,6 +223,7 @@
withpen pencircle scaled .50pt withcolor green ;
fi ;
endfor ;
+% fill boundingbox currentpicture ;
\stopMPdrawing
\MPdrawingdonetrue
\getMPdrawing
diff --git a/tex/context/base/meta-ini.lua b/tex/context/base/meta-ini.lua
index 713ba3d5d..8f7131263 100644
--- a/tex/context/base/meta-ini.lua
+++ b/tex/context/base/meta-ini.lua
@@ -15,34 +15,6 @@ local context = context
metapost = metapost or { }
--- for the moment downward compatible
-
-local report_metapost = logs.reporter ("metapost")
-local status_metapost = logs.messenger("metapost")
-
-local patterns = { "meta-imp-%s.mkiv", "meta-imp-%s.tex", "meta-%s.mkiv", "meta-%s.tex" } -- we are compatible
-
-local function action(name,foundname)
- status_metapost("library %a is loaded",name)
- context.startreadingfile()
- context.input(foundname)
- context.stopreadingfile()
-end
-
-local function failure(name)
- report_metapost("library %a is unknown or invalid",name)
-end
-
-function commands.useMPlibrary(name)
- commands.uselibrary {
- name = name,
- patterns = patterns,
- action = action,
- failure = failure,
- onlyonce = true,
- }
-end
-
-- experimental
local colorhash = attributes.list[attributes.private('color')]
diff --git a/tex/context/base/meta-ini.mkiv b/tex/context/base/meta-ini.mkiv
index 28ba9e901..299f37cef 100644
--- a/tex/context/base/meta-ini.mkiv
+++ b/tex/context/base/meta-ini.mkiv
@@ -106,7 +106,7 @@
{\dosinglegroupempty\meta_start_extensions}
\def\meta_start_extensions#1#2\stopMPextensions % we could use buffers instead
- {\ctxlua{metapost.setextensions("#1",\!!bs#2\!!es)}}
+ {\clf_setmpextensions{#1}{#2}}
\let\stopMPextensions\relax
@@ -155,12 +155,12 @@
\global\t_meta_inclusions\expandafter{\the\t_meta_inclusions#2}%
\let\currentMPinstance\m_meta_saved_instance}
-\def\meta_preset_definitions
- {\edef\overlaywidth {\overlaywidth \space}%
- \edef\overlayheight {\overlayheight \space}%
- \edef\overlaylinewidth{\overlaylinewidth\space}%
- \edef\currentwidth {\the\hsize \space}%
- \edef\currentheight {\the\vsize \space}}
+% \def\meta_preset_definitions
+% {\edef\overlaywidth {\overlaywidth \space}%
+% \edef\overlayheight {\overlayheight \space}%
+% \edef\overlaylinewidth{\overlaylinewidth\space}}
+
+\let\meta_preset_definitions\relax
\installcommandhandler \??mpinstance {MPinstance} \??mpinstance
@@ -217,13 +217,22 @@
\def\currentMPinstance {\defaultMPinstance}
\def\currentMPformat {\currentMPinstance}
-\defineMPinstance[metafun] [\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes]
-\defineMPinstance[extrafun] [\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes]
-\defineMPinstance[doublefun] [\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes,\c!method=\s!double]
-\defineMPinstance[decimalfun][\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes,\c!method=\s!decimal]
-\defineMPinstance[mprun] [\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes]
-\defineMPinstance[metapost] [\s!format=mpost]
-\defineMPinstance[nofun] [\s!format=mpost]
+\defineMPinstance[metafun] [\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes]
+\defineMPinstance[extrafun] [\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes]
+\defineMPinstance[lessfun] [\s!format=metafun]
+\defineMPinstance[doublefun] [\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes,\c!method=\s!double]
+\defineMPinstance[binaryfun] [\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes,\c!method=\s!binary]
+\defineMPinstance[decimalfun] [\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes,\c!method=\s!decimal]
+
+\defineMPinstance[mprun] [\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes]
+
+\defineMPinstance[metapost] [\s!format=mpost]
+\defineMPinstance[nofun] [\s!format=mpost]
+\defineMPinstance[doublepost] [\s!format=mpost,\c!method=\s!double]
+\defineMPinstance[binarypost] [\s!format=mpost,\c!method=\s!binary]
+\defineMPinstance[decimalpost][\s!format=mpost,\c!method=\s!decimal]
+
+%defineMPinstance[megapost] [\s!format=mpost,\c!method=\s!decimal]
\newconditional\c_meta_include_initializations
@@ -264,7 +273,10 @@
\ifx\p_setups\empty \else
\setups[\p_setups]%
\fi
- \useMPinstancestyleandcolor\c!textstyle\c!textcolor}
+ \useMPinstancestyleparameter\c!textstyle}
+
+\def\meta_set_current_color
+ {\useMPinstancecolorparameter\c!textcolor}
\def\meta_stop_current_graphic
{\global\t_meta_definitions\emptytoks
@@ -291,20 +303,19 @@
\forgetall
\edef\p_extensions{\MPinstanceparameter\s!extensions}%
\meta_process_graphic_start
- \normalexpanded{\noexpand\ctxlua{metapost.graphic {
- instance = "\currentMPinstance",
- format = "\currentMPformat",
- data = \!!bs#1;\!!es,
- initializations = \!!bs\meta_flush_current_initializations\!!es,
-% useextensions = "\MPinstanceparameter\s!extensions",
-\ifx\p_extensions\v!yes
- extensions = \!!bs\ctxcommand{getmpextensions("\currentMPinstance")}\!!es,
-\fi
- inclusions = \!!bs\meta_flush_current_inclusions\!!es,
- definitions = \!!bs\meta_flush_current_definitions\!!es,
- figure = "\MPaskedfigure",
- method = "\MPinstanceparameter\c!method",
- }}}%
+ \normalexpanded{\noexpand\clf_mpgraphic
+ instance {\currentMPinstance}%
+ format {\currentMPformat}%
+ data {#1;}%
+ initializations {\meta_flush_current_initializations}%
+ \ifx\p_extensions\v!yes
+ extensions {\clf_getmpextensions{\currentMPinstance}}% goes through tex again
+ \fi
+ inclusions {\meta_flush_current_inclusions}%
+ definitions {\meta_flush_current_definitions}%
+ figure {\MPaskedfigure}%
+ method {\MPinstanceparameter\c!method}%
+ \relax}%
\meta_process_graphic_stop
\meta_stop_current_graphic}
@@ -325,23 +336,43 @@
\newif\ifsetMPrandomseed \setMPrandomseedtrue % false by default
+\let\theMPrandomseed\empty
+
\def\setMPrandomseed
- {\let\theMPrandomseed\empty
- \ifsetMPrandomseed \ifx\getrandomnumber\undefined \else
- \getrandomnumber\localMPseed\zerocount{4095}%
- \def\theMPrandomseed{randomseed:=\localMPseed}%
- \fi\fi}
+ {\ifsetMPrandomseed
+ \def\theMPrandomseed{randomseed:=\mprandomnumber;}%
+ \else
+ \let\theMPrandomseed\empty
+ \fi}
%D Calling up previously defined graphics.
+% \def\includeMPgraphic#1% gets expanded !
+% {\ifcsname\??mpgraphic#1\endcsname
+% \csname\??mpgraphic#1\endcsname ; % ; is safeguard
+% \fi}
+%
+% \unexpanded\def\meta_enable_include % public
+% {\let\meta_handle_use_graphic \thirdofthreearguments
+% \let\meta_handle_reusable_graphic\thirdofthreearguments}
+%
+% but ... we want this too:
+%
+% \startuseMPgraphic{x}
+% draw textext("\externalfigure[foo.pdf]") ;
+% \stopuseMPgraphic
+%
+% \useMPgraphic{x}
+%
+% so we cannot overload unless we let back to the original meanings
+% each graphic ... a better solution is:
+
\def\includeMPgraphic#1% gets expanded !
{\ifcsname\??mpgraphic#1\endcsname
- \csname\??mpgraphic#1\endcsname ; % ; is safeguard
+ \doubleexpandafter\fourthoffourarguments\csname\??mpgraphic#1\endcsname ; % ; is safeguard
\fi}
-\unexpanded\def\meta_enable_include % public
- {\let\meta_handle_use_graphic \thirdofthreearguments
- \let\meta_handle_reusable_graphic\thirdofthreearguments}
+\let\meta_enable_include\relax
%D Drawings (stepwise built):
@@ -397,20 +428,13 @@
\unexpanded\def\meta_grab_clip_path#1#2#3#4% #4 is alternative (called in backend code)
{\begingroup
- \edef\width {#2\space}\let\overlaywidth \width
- \edef\height{#3\space}\let\overlayheight\height
- \ifcsname\??mpclip#1\endcsname
- \meta_start_current_graphic
- \xdef\MPclippath{\normalexpanded{\noexpand\ctxlua{metapost.theclippath {
- instance = "\currentMPinstance",
- format = "\currentMPformat",
- data = \!!bs\getvalue{\??mpclip#1}\!!es,
- initializations = \!!bs\meta_flush_current_initializations\!!es,
- useextensions = "\MPinstanceparameter\s!extensions",
- inclusions = \!!bs\meta_flush_current_inclusions\!!es,
- method = "\MPinstanceparameter\c!method",
- }}}}%
- \meta_stop_current_graphic
+ \edef\width {#2}% \let\overlaywidth \width
+ \edef\height{#3}% \let\overlayheight\height
+ \d_overlay_width #2\onebasepoint
+ \d_overlay_height#3\onebasepoint
+ \edef\currentMPclip{#1}%
+ \ifcsname\??mpclip\currentMPclip\endcsname
+ \meta_grab_clip_path_indeed
\ifx\MPclippath\empty
\xdef\MPclippath{#4}%
\fi
@@ -419,6 +443,19 @@
\fi
\endgroup}
+\def\meta_grab_clip_path_indeed
+ {\meta_start_current_graphic
+ \normalexpanded{\noexpand\clf_mpsetclippath
+ instance {\currentMPinstance}%
+ format {\currentMPformat}%
+ data {\csname\??mpclip\currentMPclip\endcsname}%
+ initializations {\meta_flush_current_initializations}%
+ useextensions {\MPinstanceparameter\s!extensions}%
+ inclusions {\meta_flush_current_inclusions}%
+ method {\MPinstanceparameter\c!method}%
+ \relax}%
+ \meta_stop_current_graphic}
+
%D Since we want lables to follow the document settings, we
%D also set the font related variables.
@@ -426,7 +463,7 @@
{\dontleavehmode
\begingroup
\definedfont[#1]%
- \hskip\cldcontext{fonts.hashes.parameters[font.current()].designsize}sp\relax
+ \hskip\clf_currentdesignsize\scaledpoint\relax
\endgroup}
\definefontsynonym[MetafunDefault][Regular*default]
@@ -434,7 +471,7 @@
\startMPinitializations % scale is not yet ok
defaultfont:="\truefontname{MetafunDefault}";
% defaultscale:=\the\bodyfontsize/10pt; % only when hard coded 10pt
- defaultscale:=1;
+ % defaultscale:=1;
\stopMPinitializations
% watch out, this is a type1 font because mp can only handle 8 bit fonts
@@ -567,7 +604,7 @@
\def\meta_prepare_variable_yes
{\defconvertedcommand\ascii\m_meta_current_variable % otherwise problems
- \doifcolorelse \ascii % with 2\bodyfontsize
+ \doifelsecolor \ascii % with 2\bodyfontsize
{\meta_prepare_variable_color}
{\begingroup
\setbox\b_meta_variable_box\hbox{\scratchdimen\m_meta_current_variable sp}%
@@ -596,7 +633,13 @@
%D \stoptyping
\def\overlaystamp % watch the \MPcolor, since colors can be redefined
- {\overlaywidth:\overlayheight:\overlaydepth:\overlayoffset:\overlaylinewidth:\MPcolor\overlaycolor:\MPcolor\overlaylinecolor}
+ {\the\d_overlay_width :%
+ \the\d_overlay_height :%
+ \the\d_overlay_depth :%
+ \the\d_overlay_offset :%
+ \the\d_overlay_linewidth:%
+ \MPcolor\overlaycolor :% % todo, expand once \m_overlaycolor
+ \MPcolor\overlaylinecolor} % todo, expand once \m_overlaylinecolor
%D A better approach is to let additional variables play a role
%D in determining the uniqueness. In the next macro, the
@@ -779,19 +822,25 @@
\let\reuseMPgraphic \useMPgraphic % we can save a setup here if needed
\let\reusableMPgraphic\reuseMPgraphic % we can save a setup here if needed
-\unexpanded\def\meta_enable_include
- {\let\meta_handle_use_graphic \thirdofthreearguments
- \let\meta_handle_reusable_graphic\thirdofthreearguments}
-
%D \macros
%D {startuniqueMPpagegraphic,uniqueMPpagegraphic}
%D
%D Experimental.
-\def\m_meta_page_prefix{\doifoddpageelse oe}
+\def\m_meta_page_prefix{\doifelseoddpage oe}
+
+% \def\overlaypagestamp
+% {\m_meta_page_prefix:\overlaywidth:\overlayheight:\overlaydepth:\MPcolor\overlaycolor:\MPcolor\overlaylinecolor}
\def\overlaypagestamp
- {\m_meta_page_prefix:\overlaywidth:\overlayheight:\overlaydepth:\MPcolor\overlaycolor:\MPcolor\overlaylinecolor}
+ {\m_meta_page_prefix :%
+ \the\d_overlay_width :%
+ \the\d_overlay_height :%
+ \the\d_overlay_depth :%
+ \the\d_overlay_offset :%
+ \the\d_overlay_linewidth:%
+ \MPcolor\overlaycolor :%
+ \MPcolor\overlaylinecolor}
\unexpanded\def\startuniqueMPpagegraphic
{\dodoublegroupempty\meta_start_unique_page_graphic}
@@ -855,12 +904,21 @@
% makempy.registerfile(filename)
-\startMPinitializations
- boolean collapse_data; collapse_data:=true;
- def data_mpd_file = "\MPdataMPDfile" enddef ;
- def data_mpo_file = "\MPdataMPOfile" enddef ;
- def data_mpy_file = "\MPdataMPYfile" enddef ;
-\stopMPinitializations
+% % hm. extensions get expanded so the wrong names then end up in format
+%
+% \startMPinitializations
+% boolean collapse_data; collapse_data:=true ; % will be obsolete
+% def data_mpd_file = "\MPdataMPDfile" enddef ; % will go via lua
+% def data_mpo_file = "\MPdataMPOfile" enddef ;
+% def data_mpy_file = "\MPdataMPYfile" enddef ;
+% \stopMPinitializations
+
+\startMPextensions
+ boolean collapse_data; collapse_data:=true ; % will be obsolete
+ def data_mpd_file = "\noexpand\MPdataMPDfile" enddef ; % will go via lua
+ def data_mpo_file = "\noexpand\MPdataMPOfile" enddef ;
+ def data_mpy_file = "\noexpand\MPdataMPYfile" enddef ;
+\stopMPextensions
\unexpanded\def\getMPdata
{\let\MPdata\secondoftwoarguments
@@ -880,7 +938,7 @@
\def\meta_process_buffer[#1]%
{\meta_begin_graphic_group{#1}%
- \meta_process_graphic{\ctxcommand{feedback("\currentMPgraphicname")}}%
+ \meta_process_graphic{\clf_feedback{\currentMPgraphicname}}%
\meta_end_graphic_group}
\unexpanded\def\runMPbuffer
@@ -924,15 +982,15 @@
\else\ifx\m_meta_option\!!plustoken
#2% % use in main doc too
\fi\fi\fi
- \ctxlua{metapost.tex.set(\!!bs\detokenize{#2}\!!es)}}
+ \clf_mptexset{\detokenize{#2}}}
\let\stopMPenvironment\relax
\unexpanded\def\resetMPenvironment
- {\ctxlua{metapost.tex.reset()}}
+ {\clf_mptexreset}
\unexpanded\def\useMPenvironmentbuffer[#1]%
- {\ctxlua{metapost.tex.set(buffers.content("#1"))}}
+ {\clf_mpsetfrombuffer{#1}}
%D This command takes \type {[reset]} as optional
%D argument.
@@ -1054,7 +1112,7 @@
%D Loading specific \METAPOST\ related definitions is
%D accomplished by:
-\unexpanded\def\useMPlibrary[#1]{\ctxcommand{useMPlibrary(\!!bs#1\!!es)}}
+\unexpanded\def\useMPlibrary[#1]{\clf_useMPlibrary{#1}}
%D \macros
%D {setMPtext, MPtext, MPstring, MPbetex}
@@ -1084,46 +1142,62 @@
% \setupcolors[state=stop,conversion=never] % quite tricky ... type mismatch
-% A dirty trick, ** in colo-ini.lua (mpcolor).
+% \startMPextensions
+% color OverlayColor,OverlayLineColor;
+% \stopMPextensions
-\def\m_meta_colo_initializations{% no vardef, goes wrong with spot colors
- def OverlayLineColor=\MPcolor{\overlaylinecolor} enddef;
- def OverlayColor=\MPcolor{\overlaycolor} enddef;
-}
+\startMPinitializations
+ CurrentLayout:="\currentlayout";
+\stopMPinitializations
-\startMPextensions
- color OverlayColor,OverlayLineColor;
-\stopMPextensions
+% \startMPinitializations
+% OverlayWidth:=\overlaywidth;
+% OverlayHeight:=\overlayheight;
+% OverlayDepth:=\overlaydepth;
+% OverlayLineWidth:=\overlaylinewidth;
+% OverlayOffset:=\overlayoffset;
+% \stopMPinitializations
+
+% A dirty trick, ** in colo-ini.lua (mpcolor). We cannot use a vardef, because
+% that fails with spot colors.
\startMPinitializations
- CurrentLayout:="\currentlayout";
- OverlayWidth:=\overlaywidth;
- OverlayHeight:=\overlayheight;
- OverlayDepth:=\overlaydepth;
- OverlayLineWidth:=\overlaylinewidth;
- OverlayOffset:=\overlayoffset;
- %
- \m_meta_colo_initializations
- %
- BaseLineSkip:=\the\baselineskip;
- LineHeight:=\the\baselineskip;
- BodyFontSize:=\the\bodyfontsize;
- %
- TopSkip:=\the\topskip;
- StrutHeight:=\strutheight;
- StrutDepth:=\strutdepth;
- %
- CurrentWidth:=\the\hsize;
- CurrentHeight:=\the\vsize;
- %
- EmWidth:=\the\emwidth;
- ExHeight:=\the\exheight;
- %
- PageNumber:=\the\pageno;
- RealPageNumber:=\the\realpageno;
- LastPageNumber:= \lastpage;
+ def OverlayLineColor=\ifx\overlaylinecolor\empty black \else\MPcolor{\overlaylinecolor} \fi enddef;
+ def OverlayColor =\ifx\overlaycolor \empty black \else\MPcolor{\overlaycolor} \fi enddef;
\stopMPinitializations
+% \newcount\c_overlay_colormodel
+% \newcount\c_overlay_color
+% \newcount\c_overlay_transparency
+% \newcount\c_overlay_linecolor
+% \newcount\c_overlay_linetransparency
+
+% \appendtoks
+% \c_overlay_colormodel \attribute\colormodelattribute
+% \c_overlay_color \colo_helpers_inherited_current_ca\overlaycolor
+% \c_overlay_transparency \colo_helpers_inherited_current_ta\overlaycolor
+% \c_overlay_linecolor \colo_helpers_inherited_current_ca\overlaylinecolor
+% \c_overlay_linetransparency\colo_helpers_inherited_current_ta\overlaylinecolor
+% \to \everyMPgraphic
+
+% \startMPinitializations
+% BaseLineSkip:=\the\baselineskip;
+% LineHeight:=\the\baselineskip;
+% BodyFontSize:=\the\bodyfontsize;
+% %
+% TopSkip:=\the\topskip;
+% StrutHeight:=\strutheight;
+% StrutDepth:=\strutdepth;
+% %
+% CurrentWidth:=\the\hsize;
+% CurrentHeight:=\the\vsize;
+% HSize:=\the\hsize ;
+% VSize:=\the\vsize ;
+% %
+% EmWidth:=\the\emwidth;
+% ExHeight:=\the\exheight;
+% \stopMPinitializations
+
\appendtoks
\disablediscretionaries
\disablecompoundcharacters
@@ -1153,11 +1227,6 @@
\let \} \letterclosebrace
\to \everyMPgraphic
-\startMPinitializations
- prologues:=0;
- mpprocset:=1;
-\stopMPinitializations
-
%D \macros
%D {PDFMPformoffset}
%D
@@ -1184,21 +1253,36 @@
enddef;
\stopMPextensions
-\startMPinitializations
- HSize:=\the\hsize ;
- VSize:=\the\vsize ;
-\stopMPinitializations
-
\startMPextensions
vardef ForegroundBox =
unitsquare xysized(HSize,VSize)
enddef ;
- PageFraction := 1 ;
\stopMPextensions
-\startMPinitializations
- PageFraction := if \lastpage>1: (\realfolio-1)/(\lastpage-1) else: 1 fi ;
-\stopMPinitializations
+% \startMPextensions
+% PageFraction := 1 ;
+% \stopMPextensions
+
+% \startMPinitializations
+% PageFraction := if \lastpage>1: (\realfolio-1)/(\lastpage-1) else: 1 fi ;
+% \stopMPinitializations
+
+\startMPdefinitions {metapost}
+ if unknown context_bare : input mp-bare.mpiv ; fi ;
+\stopMPdefinitions
+\startMPdefinitions {binarypost}
+ if unknown context_bare : input mp-bare.mpiv ; fi ;
+\stopMPdefinitions
+\startMPdefinitions {decimalpost}
+ if unknown context_bare : input mp-bare.mpiv ; fi ;
+\stopMPdefinitions
+\startMPdefinitions {doublepost}
+ if unknown context_bare : input mp-bare.mpiv ; fi ;
+\stopMPdefinitions
+
+% \startMPdefinitions {nofun}
+% if unknown context_bare : input mp-bare.mpiv ; fi ;
+% \stopMPdefinitions
%D And some more. These are not really needed since we
%D don't use the normal figure inclusion macros any longer.
@@ -1359,12 +1443,16 @@
\newconstant\MPcolormethod
\appendtoks
- \ctxlua{metapost.setoutercolor(\number\MPcolormethod,\number\attribute\colormodelattribute,\number\attribute\colorattribute,\number\dogetattribute{transparency})}%
+ \clf_mpsetoutercolor
+ \MPcolormethod\space
+ \attribute\colormodelattribute\space
+ \attribute\colorattribute\space
+ \dogetattribute{transparency}\relax
\to \everyMPgraphic
-\startMPinitializations
- defaultcolormodel := \ifcase\MPcolormethod1\or1\or3\else3\fi;
-\stopMPinitializations
+% \startMPinitializations
+% defaultcolormodel := \ifcase\MPcolormethod1\or1\or3\else3\fi;
+% \stopMPinitializations
%D macros
%D {mprunvar,mpruntab,mprunset}
@@ -1390,9 +1478,9 @@
%D $(x,y) = (\MPrunset{point}{,})$
%D \stoptyping
-\def\MPrunvar #1{\ctxcommand{mprunvar("#1")}} \let\mprunvar\MPrunvar
-\def\MPruntab#1#2{\ctxcommand{mprunvar("#1",\number#2)}} \let\mpruntab\MPruntab
-\def\MPrunset#1#2{\ctxcommand{mprunvar("#1","#2")}} \let\mprunset\MPrunset
+\def\MPrunvar #1{\clf_mprunvar{#1}} \let\mprunvar\MPrunvar
+\def\MPruntab#1#2{\clf_mpruntab{#1}#2\relax} \let\mpruntab\MPruntab % #2 is number
+\def\MPrunset#1#2{\clf_mprunset{#1}{#2}} \let\mprunset\MPrunset
%D We also provide an outputless run:
@@ -1461,7 +1549,7 @@
\edef\p_mpy{\directMPgraphicsparameter{mpy}}%
\ifx\p_mpy\empty \else
\let\MPdataMPYfile\p_mpy
- \ctxlua{metapost.makempy.registerfile("\p_mpy")}%
+ \clf_registermpyfile{\p_mpy}%
\fi
\to \everysetupMPgraphics
diff --git a/tex/context/base/meta-pag.mkiv b/tex/context/base/meta-pag.mkiv
index a25353b18..6b6abd211 100644
--- a/tex/context/base/meta-pag.mkiv
+++ b/tex/context/base/meta-pag.mkiv
@@ -23,82 +23,76 @@
%D pagebody looks.
\startMPextensions
- boolean PageStateAvailable,OnRightPage,InPageBody;
+ boolean PageStateAvailable;
PageStateAvailable:=true;
\stopMPextensions
-\startMPinitializations
- OnRightPage:=true;
- InPageBody:=\ifinpagebody true \else false \fi;
-\stopMPinitializations
-
-\startMPinitializations
- CurrentColumn:=\number\mofcolumns;
- NOfColumns:=\number\nofcolumns;
- % todo: ColumnDistance
-\stopMPinitializations
-
% maybe always set as frozen anyway
-\startMPinitializations
- % def LoadPageState =
- OnRightPage:=\MPonrightpage;
- OnOddPage:=\MPonoddpage;
- RealPageNumber:=\the\realpageno;
- PageNumber:=\the\pageno;
- NOfPages:=\lastpage;
- PaperHeight:=\the\paperheight;
- PaperWidth:=\the\paperwidth;
- PrintPaperHeight:=\the\printpaperheight;
- PrintPaperWidth:=\the\printpaperwidth;
- TopSpace:=\the\topspace;
- BottomSpace:=\the\bottomspace;
- BackSpace:=\the\backspace;
- CutSpace:=\the\cutspace;
- MakeupHeight:=\the\makeupheight;
- MakeupWidth:=\the\makeupwidth;
- TopHeight:=\the\topheight;
- TopDistance:=\the\topdistance;
- HeaderHeight:=\the\headerheight;
- HeaderDistance:=\the\headerdistance;
- TextHeight:=\the\textheight;
- FooterDistance:=\the\footerdistance;
- FooterHeight:=\the\footerheight;
- BottomDistance:=\the\bottomdistance;
- BottomHeight:=\the\bottomheight;
- LeftEdgeWidth:=\the\leftedgewidth;
- LeftEdgeDistance:=\the\leftedgedistance;
- LeftMarginWidth:=\the\leftmarginwidth;
- LeftMarginDistance:=\the\leftmargindistance;
- TextWidth:=\the\textwidth ;
- RightMarginDistance:=\the\rightmargindistance;
- RightMarginWidth:=\the\rightmarginwidth;
- RightEdgeDistance:=\the\rightedgedistance;
- RightEdgeWidth:=\the\rightedgewidth;
- InnerMarginDistance:=\the\innermargindistance;
- InnerMarginWidth:=\the\innermarginwidth;
- OuterMarginDistance:=\the\outermargindistance;
- OuterMarginWidth:=\the\outermarginwidth;
- InnerEdgeDistance:=\the\inneredgedistance;
- InnerEdgeWidth:=\the\inneredgewidth;
- OuterEdgeDistance:=\the\outeredgedistance;
- OuterEdgeWidth:=\the\outeredgewidth;
- PageOffset:=\the\pagebackgroundoffset;
- PageDepth:=\the\pagebackgrounddepth;
- LayoutColumns:=\the\layoutcolumns;
- LayoutColumnDistance:=\the\layoutcolumndistance;
- LayoutColumnWidth:=\the\layoutcolumnwidth;
- def LoadPageState =
- % now always set .. this dummy can move to the mp code
- enddef;
-\stopMPinitializations
+% \startMPinitializations
+% PaperHeight:=\the\paperheight;
+% PaperWidth:=\the\paperwidth;
+% PrintPaperHeight:=\the\printpaperheight;
+% PrintPaperWidth:=\the\printpaperwidth;
+% TopSpace:=\the\topspace;
+% BottomSpace:=\the\bottomspace;
+% BackSpace:=\the\backspace;
+% CutSpace:=\the\cutspace;
+% MakeupHeight:=\the\makeupheight;
+% MakeupWidth:=\the\makeupwidth;
+% TopHeight:=\the\topheight;
+% TopDistance:=\the\topdistance;
+% HeaderHeight:=\the\headerheight;
+% HeaderDistance:=\the\headerdistance;
+% TextHeight:=\the\textheight;
+% FooterDistance:=\the\footerdistance;
+% FooterHeight:=\the\footerheight;
+% BottomDistance:=\the\bottomdistance;
+% BottomHeight:=\the\bottomheight;
+% LeftEdgeWidth:=\the\leftedgewidth;
+% LeftEdgeDistance:=\the\leftedgedistance;
+% LeftMarginWidth:=\the\leftmarginwidth;
+% LeftMarginDistance:=\the\leftmargindistance;
+% TextWidth:=\the\textwidth;
+% RightMarginDistance:=\the\rightmargindistance;
+% RightMarginWidth:=\the\rightmarginwidth;
+% RightEdgeDistance:=\the\rightedgedistance;
+% RightEdgeWidth:=\the\rightedgewidth;
+% InnerMarginDistance:=\the\innermargindistance;
+% InnerMarginWidth:=\the\innermarginwidth;
+% OuterMarginDistance:=\the\outermargindistance;
+% OuterMarginWidth:=\the\outermarginwidth;
+% InnerEdgeDistance:=\the\inneredgedistance;
+% InnerEdgeWidth:=\the\inneredgewidth;
+% OuterEdgeDistance:=\the\outeredgedistance;
+% OuterEdgeWidth:=\the\outeredgewidth;
+% PageOffset:=\the\pagebackgroundoffset;
+% PageDepth:=\the\pagebackgrounddepth;
+% LayoutColumns:=\the\layoutcolumns;
+% LayoutColumnDistance:=\the\layoutcolumndistance;
+% LayoutColumnWidth:=\the\layoutcolumnwidth;
+% %
+% boolean OnRightPage,OnOddPage,InPageBody;
+% %
+% OnRightPage:=\MPonrightpage;
+% OnOddPage:=\MPonoddpage;
+% InPageBody:=\ifinpagebody true \else false \fi;
+% %
+% RealPageNumber:=\the\realpageno;
+% PageNumber:=\the\pageno;
+% NOfPages:=\lastpage;
+% LastPageNumber:=\lastpage;
+% %
+% CurrentColumn:=\number\mofcolumns;
+% NOfColumns:=\number\nofcolumns;
+% \stopMPinitializations
\def\MPonrightpage{true}
\def\MPonoddpage {true}
\def\freezeMPpagelayout
{\edef\MPonrightpage{\doifbothsides {tru}{tru}{fals}e}%
- \edef\MPonoddpage {\doifoddpageelse {tru}{fals}e}}
+ \edef\MPonoddpage {\doifelseoddpage {tru}{fals}e}}
%D We need to freeze the pagelayout before the backgrounds
%D are build, because the overlay will temporarily become
@@ -121,7 +115,7 @@
% \to \everyMPgraphic
\prependtoks
- \calculatereducedvsizes % this is really needed
+ \calculatereducedvsizes % bah, this is really needed
\to \everyMPgraphic
\protect \endinput
diff --git a/tex/context/base/meta-pdf.lua b/tex/context/base/meta-pdf.lua
index 46e20ad31..3cbff63b1 100644
--- a/tex/context/base/meta-pdf.lua
+++ b/tex/context/base/meta-pdf.lua
@@ -38,8 +38,8 @@ local mptopdf = metapost.mptopdf
mptopdf.nofconverted = 0
-local f_translate = formatters["1 0 0 0 1 %f %f cm"] -- no %s due to 1e-035 issues
-local f_concat = formatters["%f %f %f %f %f %f cm"] -- no %s due to 1e-035 issues
+local f_translate = formatters["1 0 0 0 1 %F %F cm"] -- no %s due to 1e-035 issues
+local f_concat = formatters["%F %F %F %F %F %F cm"] -- no %s due to 1e-035 issues
local m_path, m_stack, m_texts, m_version, m_date, m_shortcuts = { }, { }, { }, 0, 0, false
@@ -329,7 +329,7 @@ handlers[50] = function() report_mptopdf("skipping special %s",50) end
--end of not supported
function mps.setrgbcolor(r,g,b) -- extra check
- r, g = tonumber(r), tonumber(g) -- needed when we use lpeg
+ r, g, b = tonumber(r), tonumber(g), tonumber(b) -- needed when we use lpeg
if r == 0.0123 and g < 0.1 then
g, b = round(g*10000), round(b*10000)
local s = specials[b]
@@ -411,7 +411,8 @@ function mps.fshow(str,font,scale) -- lpeg parser
mps.textext(font,scale,lpegmatch(package,str))
end
-local cnumber = lpegC(number)
+----- cnumber = lpegC(number)
+local cnumber = number/tonumber -- we now expect numbers (feeds into %F)
local cstring = lpegC(nonspace)
local specials = (lpegP("%%MetaPostSpecials:") * sp * (cstring * sp^0)^0 * eol) / mps.specials
@@ -572,3 +573,11 @@ statistics.register("mps conversion time",function()
return nil
end
end)
+
+-- interface
+
+interfaces.implement {
+ name = "convertmpstopdf",
+ arguments = "string",
+ actions = mptopdf.convertmpstopdf
+}
diff --git a/tex/context/base/meta-pdf.mkiv b/tex/context/base/meta-pdf.mkiv
index 3469419d4..50eb1dd72 100644
--- a/tex/context/base/meta-pdf.mkiv
+++ b/tex/context/base/meta-pdf.mkiv
@@ -46,7 +46,7 @@
\forgetall
\offinterlineskip
\setbox\MPbox\vbox\bgroup
- \ctxlua{metapost.mptopdf.convertmpstopdf("\MPfilename")}%
+ \clf_convertmpstopdf{\MPfilename}%
\removeunwantedspaces % not that needed
\egroup
\finalizeMPbox
diff --git a/tex/context/base/meta-pdh.mkiv b/tex/context/base/meta-pdh.mkiv
index b65fe6ac6..f3db5b388 100644
--- a/tex/context/base/meta-pdh.mkiv
+++ b/tex/context/base/meta-pdh.mkiv
@@ -374,17 +374,17 @@
%D
%D \startbuffer
%D \startuniqueMPgraphic{CircularShade}
-%D path p ; p := unitsquare xscaled \overlaywidth yscaled \overlayheight ;
+%D path p ; p := unitsquare xscaled OverlayWidth yscaled OverlayHeight ;
%D circular_shade(p,0,.2red,.9red) ;
%D \stopuniqueMPgraphic
%D
%D \startuniqueMPgraphic{LinearShade}
-%D path p ; p := unitsquare xscaled \overlaywidth yscaled \overlayheight ;
+%D path p ; p := unitsquare xscaled OverlayWidth yscaled OverlayHeight ;
%D linear_shade(p,0,.2blue,.9blue) ;
%D \stopuniqueMPgraphic
%D
%D \startuniqueMPgraphic{DuotoneShade}
-%D path p ; p := unitsquare xscaled \overlaywidth yscaled \overlayheight ;
+%D path p ; p := unitsquare xscaled OverlayWidth yscaled OverlayHeight ;
%D linear_shade(p,2,.5green,.5red) ;
%D \stopuniqueMPgraphic
%D \stopbuffer
@@ -429,8 +429,8 @@
%D
%D \def\SomeShade#1#2#3#4#5%
%D {\startuniqueMPgraphic{Shade-#1}
-%D width := \overlaywidth ;
-%D height := \overlayheight ;
+%D width := OverlayWidth ;
+%D height := OverlayHeight ;
%D path p ; p := unitsquare xscaled width yscaled height ;
%D #2_shade(p,#3,#4,#5) ;
%D \stopuniqueMPgraphic
diff --git a/tex/context/base/meta-tex.lua b/tex/context/base/meta-tex.lua
index 7a4123abb..1008e45c0 100644
--- a/tex/context/base/meta-tex.lua
+++ b/tex/context/base/meta-tex.lua
@@ -6,12 +6,15 @@ if not modules then modules = { } end modules ['meta-tex'] = {
license = "see context related readme files"
}
+local tostring = tostring
local format, gsub, find, match = string.format, string.gsub, string.find, string.match
local formatters = string.formatters
local P, S, R, C, Cs, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cs, lpeg.match
metapost = metapost or { }
+local implement = interfaces.implement
+
-- local left = P("[")
-- local right = P("]")
-- local space = P(" ")
@@ -39,6 +42,12 @@ function metapost.escaped(str)
context(lpegmatch(pattern,str))
end
+implement {
+ name = "metapostescaped",
+ actions = metapost.escaped,
+ arguments = "string"
+}
+
local simplify = true
-- local function strip(n,e)
@@ -108,10 +117,6 @@ local enumber = number * S("eE") * number
local cleaner = Cs((P("@@")/"@" + P("@")/"%%" + P(1))^0)
-function format_n(fmt,...)
- return
-end
-
context = context or { exponent = function(...) print(...) end }
function metapost.format_string(fmt,...)
@@ -142,6 +147,9 @@ function metapost.nvformat(fmt,str)
metapost.format_number(fmt,metapost.untagvariable(str,false))
end
+implement { name = "metapostformatted", actions = metapost.svformat, arguments = { "string", "string" } }
+implement { name = "metapostgraphformat", actions = metapost.nvformat, arguments = { "string", "string" } }
+
-- local function test(fmt,n)
-- logs.report("mp format test","fmt: %s, n: %s, result: %s, \\exponent{%s}{%s}",fmt,n,
-- formatters[lpegmatch(cleaner,fmt)](n),
@@ -167,3 +175,34 @@ end
-- test("@j","1.2e+102")
-- test("@j","1.23e+102")
-- test("@j","1.234e+102")
+
+local f_textext = formatters[ [[textext("%s")]] ]
+local f_mthtext = formatters[ [[textext("\mathematics{%s}")]] ]
+local f_exptext = formatters[ [[textext("\mathematics{%s\times10^{%s}}")]] ]
+
+local mpprint = mp.print
+
+function mp.format(fmt,str)
+ fmt = lpegmatch(cleaner,fmt)
+ mpprint(f_textext(formatters[fmt](metapost.untagvariable(str,false))))
+end
+
+function mp.formatted(fmt,num) -- svformat
+ fmt = lpegmatch(cleaner,fmt)
+ mpprint(f_textext(formatters[fmt](tonumber(num) or num)))
+end
+
+function mp.graphformat(fmt,num) -- nvformat
+ fmt = lpegmatch(cleaner,fmt)
+ local number = tonumber(num)
+ if number then
+ local base, exponent = lpegmatch(enumber,number)
+ if base and exponent then
+ mpprint(f_exptext(base,exponent))
+ else
+ mpprint(f_mthtext(num))
+ end
+ else
+ mpprint(f_textext(tostring(num)))
+ end
+end
diff --git a/tex/context/base/meta-tex.mkiv b/tex/context/base/meta-tex.mkiv
index deac883c8..0f5a27ff8 100644
--- a/tex/context/base/meta-tex.mkiv
+++ b/tex/context/base/meta-tex.mkiv
@@ -28,7 +28,7 @@
\let\stopTeXtexts\relax
-\def\TeXtext
+\unexpanded\def\TeXtext
{\dosingleempty\meta_textext}
\def\meta_textext[#1]#2#3% contrary to mkii we don't process yet but we do expand
@@ -68,7 +68,7 @@
\unexpanded\def\definetextext[#1]%
{\def\currenttextext{#1}%
- \doifnextoptionalelse\meta_textext_define_one\meta_textext_define_zero}
+ \doifelsenextoptionalcs\meta_textext_define_one\meta_textext_define_zero}
\def\meta_textext_define_one {\setvalue{\??graphictexarguments1:\currenttextext}}
\def\meta_textext_define_zero{\setvalue{\??graphictexarguments0:\currenttextext}}
@@ -76,10 +76,10 @@
\def\sometxt#1#{\meta_some_txt{#1}}
\def\meta_some_txt#1#2% we need to capture embedded quotes (a bit messy as later on textext is filtered
- {textext.drt("\mpsometxt#1{\ctxlua{metapost.escaped(\!!bs#2\!!es)}}")}
+ {textext.drt("\mpsometxt#1{\clf_metapostescaped{#2}}")}
\unexpanded\def\mpsometxt % no _ catcode
- {\doifnextoptionalelse\meta_some_txt_indeed_yes\meta_some_txt_indeed_nop}
+ {\doifelsenextoptionalcs\meta_some_txt_indeed_yes\meta_some_txt_indeed_nop}
\def\meta_some_txt_indeed_yes[#1]%
{\def\currenttextext{#1}%
@@ -116,8 +116,8 @@
%
% \unexpanded\def\definetextext[#1]#2{\setvalue{@@st@@#1}{#2}}
%
-% \long\def\sometxt #1#{\dosometxt{#1}} % grab optional [args]
-% \long\def\dosometxt #1#2{\ctxlua{metapost.sometxt(\!!bs#1\!!es,\!!bs#2\!!es)}}
+% \def\sometxt #1#{\dosometxt{#1}} % grab optional [args]
+% \def\dosometxt #1#2{\ctxlua{metapost.sometxt(\!!bs#1\!!es,\!!bs#2\!!es)}}
%
% \def\sometxta #1{textext.drt("#1")}
% \def\sometxtb #1#2{textext.drt("\getvalue{@@st@@#1}{#2}")}
@@ -139,7 +139,7 @@
% {\showstruts\useMPgraphic{testgraphic}}
\unexpanded\def\MPexponent #1#2{\mathematics{#1\times10^{#2}}}
-\unexpanded\def\MPformatted #1#2{\ctxlua{metapost.svformat("#1","#2")}}
-\unexpanded\def\MPgraphformat#1#2{\ctxlua{metapost.nvformat("#1","#2")}}
+\unexpanded\def\MPformatted #1#2{\clf_metapostformatted{#1}{#2}}
+\unexpanded\def\MPgraphformat#1#2{\clf_metapostgraphformat{#1}{#2}}
\protect \endinput
diff --git a/tex/context/base/mlib-ctx.lua b/tex/context/base/mlib-ctx.lua
index a1a4e645a..b437e1212 100644
--- a/tex/context/base/mlib-ctx.lua
+++ b/tex/context/base/mlib-ctx.lua
@@ -6,21 +6,28 @@ if not modules then modules = { } end modules ['mlib-ctx'] = {
license = "see context related readme files",
}
--- todo
+-- for the moment we have the scanners here but they migh tbe moved to
+-- the other modules
+local type, tostring = type, tostring
local format, concat = string.format, table.concat
local settings_to_hash = utilities.parsers.settings_to_hash
local report_metapost = logs.reporter("metapost")
-local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
-local mplib = mplib
+local mplib = mplib
-metapost = metapost or {}
-local metapost = metapost
+metapost = metapost or {}
+local metapost = metapost
-local v_no = interfaces.variables.no
+local setters = tokens.setters
+local setmacro = setters.macro
+local implement = interfaces.implement
+
+local v_no = interfaces.variables.no
metapost.defaultformat = "metafun"
metapost.defaultinstance = "metafun"
@@ -78,15 +85,155 @@ function metapost.getextensions(instance,state)
end
end
-function commands.getmpextensions(instance,state)
- context(metapost.getextensions(instance,state))
+-- function commands.getmpextensions(instance,state)
+-- context(metapost.getextensions(instance,state))
+-- end
+
+implement {
+ name = "setmpextensions",
+ actions = metapost.setextensions,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "getmpextensions",
+ actions = { metapost.getextensions, context } ,
+ arguments = "string"
+}
+
+local report_metapost = logs.reporter ("metapost")
+local status_metapost = logs.messenger("metapost")
+
+local patterns = {
+ "meta-imp-%s.mkiv",
+ "meta-imp-%s.tex",
+ -- obsolete:
+ "meta-%s.mkiv",
+ "meta-%s.tex"
+}
+
+local function action(name,foundname)
+ status_metapost("library %a is loaded",name)
+ context.startreadingfile()
+ context.input(foundname)
+ context.stopreadingfile()
+end
+
+local function failure(name)
+ report_metapost("library %a is unknown or invalid",name)
end
+implement {
+ name = "useMPlibrary",
+ arguments = "string",
+ actions = function(name)
+ resolvers.uselibrary {
+ name = name,
+ patterns = patterns,
+ action = action,
+ failure = failure,
+ onlyonce = true,
+ }
+ end
+}
+
+-- metapost.variables = { } -- to be stacked
+
+implement {
+ name = "mprunvar",
+ arguments = "string",
+ actions = function(name)
+ local value = metapost.variables[name]
+ if value ~= nil then
+ local tvalue = type(value)
+ if tvalue == "table" then
+ context(concat(value," "))
+ elseif tvalue == "number" or tvalue == "boolean" then
+ context(tostring(value))
+ elseif tvalue == "string" then
+ context(value)
+ end
+ end
+ end
+}
+
+implement {
+ name = "mpruntab",
+ arguments = { "string", "integer" },
+ actions = function(name,n)
+ local value = metapost.variables[name]
+ if value ~= nil then
+ local tvalue = type(value)
+ if tvalue == "table" then
+ context(value[n])
+ elseif tvalue == "number" or tvalue == "boolean" then
+ context(tostring(value))
+ elseif tvalue == "string" then
+ context(value)
+ end
+ end
+ end
+}
+
+implement {
+ name = "mprunset",
+ actions = function(name,connector)
+ local value = metapost.variables[name]
+ if value ~= nil then
+ local tvalue = type(value)
+ if tvalue == "table" then
+ context(concat(value,connector))
+ elseif tvalue == "number" or tvalue == "boolean" then
+ context(tostring(value))
+ elseif tvalue == "string" then
+ context(value)
+ end
+ end
+ end
+}
+
+-- we need to move more from pps to here as pps is the plugin .. the order is a mess
+-- or just move the scanners to pps
+
function metapost.graphic(specification)
- setmpsformat(specification)
- metapost.graphic_base_pass(specification)
+ metapost.graphic_base_pass(setmpsformat(specification))
end
+implement {
+ name = "mpgraphic",
+ actions = metapost.graphic,
+ arguments = {
+ {
+ { "instance" },
+ { "format" },
+ { "data" },
+ { "initializations" },
+ { "extensions" },
+ { "inclusions" },
+ { "definitions" },
+ { "figure" },
+ { "method" },
+ }
+ }
+}
+
+implement {
+ name = "mpsetoutercolor",
+ actions = function(...) metapost.setoutercolor(...) end, -- not yet implemented
+ arguments = { "integer", "integer", "integer", "integer" }
+}
+
+implement {
+ name = "mpflushreset",
+ actions = function() metapost.flushreset() end -- not yet implemented
+}
+
+implement {
+ name = "mpflushliteral",
+ actions = function(str) metapost.flushliteral(str) end, -- not yet implemented
+ arguments = "string",
+}
+
function metapost.getclippath(specification) -- why not a special instance for this
setmpsformat(specification)
local mpx = specification.mpx
@@ -135,20 +282,42 @@ end
function metapost.theclippath(...)
local result = metapost.getclippath(...)
if result then -- we could just print the table
- result = concat(metapost.flushnormalpath(result),"\n")
- context(result)
+-- return concat(metapost.flushnormalpath(result),"\n")
+ return concat(metapost.flushnormalpath(result)," ")
+ else
+ return ""
end
end
+implement {
+ name = "mpsetclippath",
+ actions = function(specification)
+ setmacro("MPclippath",metapost.theclippath(specification),"global")
+ end,
+ arguments = {
+ {
+ { "instance" },
+ { "format" },
+ { "data" },
+ { "initializations" },
+ { "useextensions" },
+ { "inclusions" },
+ { "method" },
+ },
+ }
+}
+
statistics.register("metapost processing time", function()
local n = metapost.n
if n and n > 0 then
local nofconverted = metapost.makempy.nofconverted
local elapsedtime = statistics.elapsedtime
local elapsed = statistics.elapsed
- local str = format("%s seconds, loading: %s, execution: %s, n: %s, average: %s",
+ local instances, memory = metapost.getstatistics(true)
+ local str = format("%s seconds, loading: %s, execution: %s, n: %s, average: %s, instances: %i, memory: %0.3f M",
elapsedtime(metapost), elapsedtime(mplib), elapsedtime(metapost.exectime), n,
- elapsedtime((elapsed(metapost) + elapsed(mplib) + elapsed(metapost.exectime)) / n))
+ elapsedtime((elapsed(metapost) + elapsed(mplib) + elapsed(metapost.exectime)) / n),
+ instances, memory/(1024*1024))
if nofconverted > 0 then
return format("%s, external: %s (%s calls)",
str, elapsedtime(metapost.makempy), nofconverted)
@@ -163,17 +332,44 @@ end)
-- only used in graphictexts
metapost.tex = metapost.tex or { }
+local mptex = metapost.tex
local environments = { }
-function metapost.tex.set(str)
+function mptex.set(str)
environments[#environments+1] = str
end
-function metapost.tex.reset()
- environments = { }
+function mptex.setfrombuffer(name)
+ environments[#environments+1] = buffers.content(name)
end
-function metapost.tex.get()
+function mptex.get()
return concat(environments,"\n")
end
+
+function mptex.reset()
+ environments = { }
+end
+
+implement {
+ name = "mptexset",
+ arguments = "string",
+ actions = mptex.set
+}
+
+implement {
+ name = "mptexsetfrombuffer",
+ arguments = "string",
+ actions = mptex.setfrombuffer
+}
+
+implement {
+ name = "mptexget",
+ actions = { mptex.get, context }
+}
+
+implement {
+ name = "mptexreset",
+ actions = mptex.reset
+}
diff --git a/tex/context/base/mlib-ctx.mkiv b/tex/context/base/mlib-ctx.mkiv
index 75ff45488..a7bb612c8 100644
--- a/tex/context/base/mlib-ctx.mkiv
+++ b/tex/context/base/mlib-ctx.mkiv
@@ -18,6 +18,8 @@
\registerctxluafile{mlib-run}{1.001}
\registerctxluafile{mlib-ctx}{1.001}
+\registerctxluafile{mlib-lua}{1.001}
+\registerctxluafile{mlib-int}{1.001} % here ?
\unprotect
diff --git a/tex/context/base/mlib-int.lua b/tex/context/base/mlib-int.lua
new file mode 100644
index 000000000..6d219fe04
--- /dev/null
+++ b/tex/context/base/mlib-int.lua
@@ -0,0 +1,153 @@
+if not modules then modules = { } end modules ['mlib-int'] = {
+ version = 1.001,
+ comment = "companion to mlib-ctx.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+local factor = number.dimenfactors.bp
+local mpprint = mp.print
+local mpboolean = mp.boolean
+local mpquoted = mp.quoted
+local getdimen = tex.getdimen
+local getcount = tex.getcount
+local get = tex.get
+local mpcolor = attributes.colors.mpcolor
+local emwidths = fonts.hashes.emwidths
+local exheights = fonts.hashes.exheights
+
+function mp.PaperHeight () mpprint(getdimen("paperheight") *factor) end
+function mp.PaperWidth () mpprint(getdimen("paperwidth") *factor) end
+function mp.PrintPaperHeight () mpprint(getdimen("printpaperheight") *factor) end
+function mp.PrintPaperWidth () mpprint(getdimen("printpaperwidth") *factor) end
+function mp.TopSpace () mpprint(getdimen("topspace") *factor) end
+function mp.BottomSpace () mpprint(getdimen("bottomspace") *factor) end
+function mp.BackSpace () mpprint(getdimen("backspace") *factor) end
+function mp.CutSpace () mpprint(getdimen("cutspace") *factor) end
+function mp.MakeupHeight () mpprint(getdimen("makeupheight") *factor) end
+function mp.MakeupWidth () mpprint(getdimen("makeupwidth") *factor) end
+function mp.TopHeight () mpprint(getdimen("topheight") *factor) end
+function mp.TopDistance () mpprint(getdimen("topdistance") *factor) end
+function mp.HeaderHeight () mpprint(getdimen("headerheight") *factor) end
+function mp.HeaderDistance () mpprint(getdimen("headerdistance") *factor) end
+function mp.TextHeight () mpprint(getdimen("textheight") *factor) end
+function mp.FooterDistance () mpprint(getdimen("footerdistance") *factor) end
+function mp.FooterHeight () mpprint(getdimen("footerheight") *factor) end
+function mp.BottomDistance () mpprint(getdimen("bottomdistance") *factor) end
+function mp.BottomHeight () mpprint(getdimen("bottomheight") *factor) end
+function mp.LeftEdgeWidth () mpprint(getdimen("leftedgewidth") *factor) end
+function mp.LeftEdgeDistance () mpprint(getdimen("leftedgedistance") *factor) end
+function mp.LeftMarginWidth () mpprint(getdimen("leftmarginwidth") *factor) end
+function mp.LeftMarginDistance () mpprint(getdimen("leftmargindistance") *factor) end
+function mp.TextWidth () mpprint(getdimen("textwidth") *factor) end
+function mp.RightMarginDistance () mpprint(getdimen("rightmargindistance") *factor) end
+function mp.RightMarginWidth () mpprint(getdimen("rightmarginwidth") *factor) end
+function mp.RightEdgeDistance () mpprint(getdimen("rightedgedistance") *factor) end
+function mp.RightEdgeWidth () mpprint(getdimen("rightedgewidth") *factor) end
+function mp.InnerMarginDistance () mpprint(getdimen("innermargindistance") *factor) end
+function mp.InnerMarginWidth () mpprint(getdimen("innermarginwidth") *factor) end
+function mp.OuterMarginDistance () mpprint(getdimen("outermargindistance") *factor) end
+function mp.OuterMarginWidth () mpprint(getdimen("outermarginwidth") *factor) end
+function mp.InnerEdgeDistance () mpprint(getdimen("inneredgedistance") *factor) end
+function mp.InnerEdgeWidth () mpprint(getdimen("inneredgewidth") *factor) end
+function mp.OuterEdgeDistance () mpprint(getdimen("outeredgedistance") *factor) end
+function mp.OuterEdgeWidth () mpprint(getdimen("outeredgewidth") *factor) end
+function mp.PageOffset () mpprint(getdimen("pagebackgroundoffset")*factor) end
+function mp.PageDepth () mpprint(getdimen("pagebackgrounddepth") *factor) end
+function mp.LayoutColumns () mpprint(getcount("layoutcolumns")) end
+function mp.LayoutColumnDistance() mpprint(getdimen("layoutcolumndistance")*factor) end
+function mp.LayoutColumnWidth () mpprint(getdimen("layoutcolumnwidth") *factor) end
+function mp.SpineWidth () mpprint(getdimen("spinewidth") *factor) end
+function mp.PaperBleed () mpprint(getdimen("paperbleed") *factor) end
+
+function mp.PageNumber () mpprint(getcount("pageno")) end
+function mp.RealPageNumber () mpprint(getcount("realpageno")) end
+function mp.NOfPages () mpprint(getcount("lastpageno")) end
+
+function mp.CurrentColumn () mpprint(getcount("mofcolumns")) end
+function mp.NOfColumns () mpprint(getcount("nofcolumns")) end
+
+function mp.BaseLineSkip () mpprint(getdimen("baselineskip") *factor) end
+function mp.LineHeight () mpprint(getdimen("lineheight") *factor) end
+function mp.BodyFontSize () mpprint(getdimen("bodyfontsize") *factor) end
+
+function mp.TopSkip () mpprint(getdimen("topskip") *factor) end
+function mp.StrutHeight () mpprint(getdimen("strutht") *factor) end
+function mp.StrutDepth () mpprint(getdimen("strutdp") *factor) end
+
+function mp.PageNumber () mpprint(getcount("pageno")) end
+function mp.RealPageNumber () mpprint(getcount("realpageno")) end
+function mp.NOfPages () mpprint(getcount("lastpageno")) end
+
+function mp.CurrentWidth () mpprint(get("hsize") *factor) end
+function mp.CurrentHeight () mpprint(get("vsize") *factor) end
+
+function mp.EmWidth () mpprint(emwidths [false]*factor) end
+function mp.ExHeight () mpprint(exheights[false]*factor) end
+
+mp.HSize = mp.CurrentWidth
+mp.VSize = mp.CurrentHeight
+mp.LastPageNumber = mp.NOfPages
+
+function mp.PageFraction ()
+ local lastpage = getcount("lastpageno")
+ if lastpage > 1 then
+ mpprint((getcount("realpageno")-1)/(lastpage-1))
+ else
+ mpprint(1)
+ end
+end
+
+-- locals
+
+local on_right = structures.pages.on_right
+local is_odd = structures.pages.is_odd
+local in_body = structures.pages.in_body
+
+mp.OnRightPage = function() mpprint(on_right()) end -- needs checking
+mp.OnOddPage = function() mpprint(is_odd ()) end -- needs checking
+mp.InPageBody = function() mpprint(in_body ()) end -- needs checking
+
+-- mp.CurrentLayout : \currentlayout
+
+function mp.OverlayWidth () mpprint(getdimen("d_overlay_width") *factor) end
+function mp.OverlayHeight () mpprint(getdimen("d_overlay_height") *factor) end
+function mp.OverlayDepth () mpprint(getdimen("d_overlay_depth") *factor) end
+function mp.OverlayLineWidth () mpprint(getdimen("d_overlay_linewidth")*factor) end
+function mp.OverlayOffset () mpprint(getdimen("d_overlay_offset") *factor) end
+
+function mp.defaultcolormodel()
+ local colormethod = getcount("MPcolormethod")
+ -- if colormethod == 0 then
+ -- return 1
+ -- elseif colormethod == 1 then
+ -- return 1
+ -- elseif colormethod == 2 then
+ -- return 3
+ -- else
+ -- return 3
+ -- end
+ return (colormethod == 0 or colormethod == 1) and 1 or 3
+end
+
+-- not much difference (10000 calls in a graphic neither as expansion seems to win
+-- over defining the macro etc) so let's not waste counters then
+
+-- function mp.OverlayColor()
+-- local c = mpcolor(
+-- getcount("c_overlay_colormodel"),
+-- getcount("c_overlay_color"),
+-- getcount("c_overlay_transparency")
+-- )
+-- mpquoted(c)
+-- end
+--
+-- function mp.OverlayLineColor()
+-- local c = mpcolor(
+-- getcount("c_overlay_colormodel"),
+-- getcount("c_overlay_linecolor"),
+-- getcount("c_overlay_linetransparency")
+-- )
+-- mpquoted(c)
+-- end
diff --git a/tex/context/base/mlib-lua.lua b/tex/context/base/mlib-lua.lua
new file mode 100644
index 000000000..7e0568463
--- /dev/null
+++ b/tex/context/base/mlib-lua.lua
@@ -0,0 +1,383 @@
+if not modules then modules = { } end modules ['mlib-lua'] = {
+ version = 1.001,
+ comment = "companion to mlib-ctx.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- This is very preliminary code!
+
+-- maybe we need mplib.model, but how with instances
+
+local type, tostring, select, loadstring = type, tostring, select, loadstring
+local find, gsub = string.find, string.gsub
+
+local formatters = string.formatters
+local concat = table.concat
+local lpegmatch = lpeg.match
+
+local P, S, Ct = lpeg.P, lpeg.S, lpeg.Ct
+
+local report_luarun = logs.reporter("metapost","lua")
+
+local trace_luarun = false trackers.register("metapost.lua",function(v) trace_luarun = v end)
+local trace_enabled = true
+
+local be_tolerant = true directives.register("metapost.lua.tolerant",function(v) be_tolerant = v end)
+
+mp = mp or { } -- system namespace
+MP = MP or { } -- user namespace
+
+local buffer, n, max = { }, 0, 10 -- we reuse upto max
+
+function mp._f_()
+ if trace_enabled and trace_luarun then
+ local result = concat(buffer," ",1,n)
+ if n > max then
+ buffer = { }
+ end
+ n = 0
+ report_luarun("data: %s",result)
+ return result
+ else
+ if n == 0 then
+ return ""
+ end
+ local result
+ if n == 1 then
+ result = buffer[1]
+ else
+ result = concat(buffer," ",1,n)
+ end
+ if n > max then
+ buffer = { }
+ end
+ n = 0
+ return result
+ end
+end
+
+local f_code = formatters["%s return mp._f_()"]
+
+local f_numeric = formatters["%.16f"]
+local f_pair = formatters["(%.16f,%.16f)"]
+local f_triplet = formatters["(%.16f,%.16f,%.16f)"]
+local f_quadruple = formatters["(%.16f,%.16f,%.16f,%.16f)"]
+
+function mp.print(...)
+ for i=1,select("#",...) do
+ local value = select(i,...)
+ if value ~= nil then
+ n = n + 1
+ local t = type(value)
+ if t == "number" then
+ buffer[n] = f_numeric(value)
+ elseif t == "string" then
+ buffer[n] = value
+ elseif t == "table" then
+ buffer[n] = "(" .. concat(value,",") .. ")"
+ else -- boolean or whatever
+ buffer[n] = tostring(value)
+ end
+ end
+ end
+end
+
+function mp.boolean(n)
+ n = n + 1
+ buffer[n] = n and "true" or "false"
+end
+
+function mp.numeric(n)
+ n = n + 1
+ buffer[n] = n and f_numeric(n) or "0"
+end
+
+function mp.pair(x,y)
+ n = n + 1
+ if type(x) == "table" then
+ buffer[n] = f_pair(x[1],x[2])
+ else
+ buffer[n] = f_pair(x,y)
+ end
+end
+
+function mp.triplet(x,y,z)
+ n = n + 1
+ if type(x) == "table" then
+ buffer[n] = f_triplet(x[1],x[2],x[3])
+ else
+ buffer[n] = f_triplet(x,y,z)
+ end
+end
+
+function mp.quadruple(w,x,y,z)
+ n = n + 1
+ if type(w) == "table" then
+ buffer[n] = f_quadruple(w[1],w[2],w[3],w[4])
+ else
+ buffer[n] = f_quadruple(w,x,y,z)
+ end
+end
+
+function mp.path(t,connector,cycle)
+ if type(t) == "table" then
+ local tn = #t
+ if tn > 0 then
+ if connector == true then
+ connector = "--"
+ cycle = true
+ elseif not connector then
+ connector = "--"
+ end
+ local ti = t[1]
+ n = n + 1 ; buffer[n] = f_pair(ti[1],ti[2])
+ for i=2,tn do
+ local ti = t[i]
+ n = n + 1 ; buffer[n] = connector
+ n = n + 1 ; buffer[n] = f_pair(ti[1],ti[2])
+ end
+ if cycle then
+ n = n + 1 ; buffer[n] = connector
+ n = n + 1 ; buffer[n] = "cycle"
+ end
+ end
+ end
+end
+
+function mp.size(t)
+ n = n + 1
+ buffer[n] = type(t) == "table" and f_numeric(#t) or "0"
+end
+
+-- experiment: names can change
+
+local datasets = { }
+mp.datasets = datasets
+
+function datasets.load(tag,filename)
+ if not filename then
+ tag, filename = file.basename(tag), tag
+ end
+ local data = mp.dataset(io.loaddata(filename) or "")
+ datasets[tag] = {
+ Data = data,
+ Line = function(n) mp.path(data[n or 1]) end,
+ Size = function() mp.size(data) end,
+ }
+end
+
+--
+
+local replacer = lpeg.replacer("@","%%")
+
+function mp.format(fmt,...)
+ n = n + 1
+ if not find(fmt,"%%") then
+ fmt = lpegmatch(replacer,fmt)
+ end
+ buffer[n] = formatters[fmt](...)
+end
+
+function mp.quoted(fmt,s,...)
+ n = n + 1
+ if s then
+ if not find(fmt,"%%") then
+ fmt = lpegmatch(replacer,fmt)
+ end
+ buffer[n] = '"' .. formatters[fmt](s,...) .. '"'
+ else
+ buffer[n] = '"' .. fmt .. '"'
+ end
+end
+
+function mp.n(t)
+ return type(t) == "table" and #t or 0
+end
+
+local whitespace = lpeg.patterns.whitespace
+local newline = lpeg.patterns.newline
+local setsep = newline^2
+local comment = (S("#%") + P("--")) * (1-newline)^0 * (whitespace - setsep)^0
+local value = (1-whitespace)^1 / tonumber
+local entry = Ct( value * whitespace * value)
+local set = Ct((entry * (whitespace-setsep)^0 * comment^0)^1)
+local series = Ct((set * whitespace^0)^1)
+
+local pattern = whitespace^0 * series
+
+function mp.dataset(str)
+ return lpegmatch(pattern,str)
+end
+
+-- \startluacode
+-- local str = [[
+-- 10 20 20 20
+-- 30 40 40 60
+-- 50 10
+--
+-- 10 10 20 30
+-- 30 50 40 50
+-- 50 20 -- the last one
+--
+-- 10 20 % comment
+-- 20 10
+-- 30 40 # comment
+-- 40 20
+-- 50 10
+-- ]]
+--
+-- MP.myset = mp.dataset(str)
+--
+-- inspect(MP.myset)
+-- \stopluacode
+--
+-- \startMPpage
+-- color c[] ; c[1] := red ; c[2] := green ; c[3] := blue ;
+-- for i=1 upto lua("mp.print(mp.n(MP.myset))") :
+-- draw lua("mp.path(MP.myset[" & decimal i & "])") withcolor c[i] ;
+-- endfor ;
+-- \stopMPpage
+
+-- function metapost.runscript(code)
+-- local f = loadstring(f_code(code))
+-- if f then
+-- local result = f()
+-- if result then
+-- local t = type(result)
+-- if t == "number" then
+-- return f_numeric(result)
+-- elseif t == "string" then
+-- return result
+-- else
+-- return tostring(result)
+-- end
+-- end
+-- end
+-- return ""
+-- end
+
+local cache, n = { }, 0 -- todo: when > n then reset cache or make weak
+
+function metapost.runscript(code)
+ local trace = trace_enabled and trace_luarun
+ if trace then
+ report_luarun("code: %s",code)
+ end
+ local f
+ if n > 100 then
+ cache = nil -- forget about caching
+ f = loadstring(f_code(code))
+ if not f and be_tolerant then
+ f = loadstring(code)
+ end
+ else
+ f = cache[code]
+ if not f then
+ f = loadstring(f_code(code))
+ if f then
+ n = n + 1
+ cache[code] = f
+ elseif be_tolerant then
+ f = loadstring(code)
+ if f then
+ n = n + 1
+ cache[code] = f
+ end
+ end
+ end
+ end
+ if f then
+ local result = f()
+ if result then
+ local t = type(result)
+ if t == "number" then
+ t = f_numeric(result)
+ elseif t == "string" then
+ t = result
+ else
+ t = tostring(result)
+ end
+ if trace then
+ report_luarun("result: %s",code)
+ end
+ return t
+ elseif trace then
+ report_luarun("no result")
+ end
+ else
+ report_luarun("no result, invalid code")
+ end
+ return ""
+end
+
+-- function metapost.initializescriptrunner(mpx)
+-- mp.numeric = function(s) return mpx:get_numeric(s) end
+-- mp.string = function(s) return mpx:get_string (s) end
+-- mp.boolean = function(s) return mpx:get_boolean(s) end
+-- mp.number = mp.numeric
+-- end
+
+local get_numeric = mplib.get_numeric
+local get_string = mplib.get_string
+local get_boolean = mplib.get_boolean
+local get_number = get_numeric
+
+-- function metapost.initializescriptrunner(mpx)
+-- mp.numeric = function(s) return get_numeric(mpx,s) end
+-- mp.string = function(s) return get_string (mpx,s) end
+-- mp.boolean = function(s) return get_boolean(mpx,s) end
+-- mp.number = mp.numeric
+-- end
+
+local currentmpx = nil
+
+local get = { }
+mp.get = get
+
+get.numeric = function(s) return get_numeric(currentmpx,s) end
+get.string = function(s) return get_string (currentmpx,s) end
+get.boolean = function(s) return get_boolean(currentmpx,s) end
+get.number = mp.numeric
+
+function metapost.initializescriptrunner(mpx,trialrun)
+ currentmpx = mpx
+ if trace_luarun then
+ report_luarun("type of run: %s", trialrun and "trial" or "final")
+ end
+ -- trace_enabled = not trialrun blocks too much
+end
+
+-- texts:
+
+local factor = 65536*(7227/7200)
+local textexts = nil
+local mptriplet = mp.triplet
+
+function mp.tt_initialize(tt)
+ textexts = tt
+end
+
+-- function mp.tt_wd(n)
+-- local box = textexts and textexts[n]
+-- mpprint(box and box.width/factor or 0)
+-- end
+-- function mp.tt_ht(n)
+-- local box = textexts and textexts[n]
+-- mpprint(box and box.height/factor or 0)
+-- end
+-- function mp.tt_dp(n)
+-- local box = textexts and textexts[n]
+-- mpprint(box and box.depth/factor or 0)
+-- end
+
+function mp.tt_dimensions(n)
+ local box = textexts[n]
+ if box then
+ -- could be made faster with nuts but not critical
+ mptriplet(box.width/factor,box.height/factor,box.depth/factor)
+ else
+ mptriplet(0,0,0)
+ end
+end
diff --git a/tex/context/base/mlib-pdf.lua b/tex/context/base/mlib-pdf.lua
index 6bb08bd1d..8de09f42a 100644
--- a/tex/context/base/mlib-pdf.lua
+++ b/tex/context/base/mlib-pdf.lua
@@ -19,13 +19,19 @@ local report_metapost = logs.reporter("metapost")
local trace_variables = false trackers.register("metapost.variables",function(v) trace_variables = v end)
-local mplib, context = mplib, context
+local mplib = mplib
+local context = context
local allocate = utilities.storage.allocate
local copy_node = node.copy
local write_node = node.write
+local pen_info = mplib.pen_info
+local object_fields = mplib.fields
+
+local save_table = false
+
metapost = metapost or { }
local metapost = metapost
@@ -33,7 +39,6 @@ metapost.flushers = metapost.flushers or { }
local pdfflusher = { }
metapost.flushers.pdf = pdfflusher
-metapost.multipass = false -- to be stacked
metapost.n = 0
metapost.optimize = true -- false
@@ -41,9 +46,31 @@ local experiment = true -- uses context(node) that already does delayed nod
local savedliterals = nil -- needs checking
local mpsliteral = nodes.pool.register(node.new("whatsit",nodes.whatsitcodes.pdfliteral)) -- pdfliteral.mode = 1
-local pdfliteral = function(s)
+local f_f = formatters["%F"]
+
+local f_m = formatters["%F %F m"]
+local f_c = formatters["%F %F %F %F %F %F c"]
+local f_l = formatters["%F %F l"]
+local f_cm = formatters["%F %F %F %F %F %F cm"]
+local f_M = formatters["%F M"]
+local f_j = formatters["%i j"]
+local f_J = formatters["%i J"]
+local f_d = formatters["[%s] %F d"]
+local f_w = formatters["%F w"]
+
+directives.register("metapost.savetable",function(v)
+ if type(v) == "string" then
+ save_table = file.addsuffix(v,"mpl")
+ elseif v then
+ save_table = file.addsuffix(environment.jobname .. "-graphic","mpl")
+ else
+ save_table = false
+ end
+end)
+
+local pdfliteral = function(pdfcode)
local literal = copy_node(mpsliteral)
- literal.data = s
+ literal.data = pdfcode
return literal
end
@@ -52,18 +79,19 @@ end
-- get a new result table and the stored objects are forgotten. Otherwise they
-- are reused.
-local function getobjects(result,figure,f)
+local function getobjects(result,figure,index)
if metapost.optimize then
- local objects = result.objects
- if not objects then
- result.objects = { }
+ local robjects = result.objects
+ if not robjects then
+ robjects = { }
+ result.objects = robjects
end
- objects = result.objects[f]
- if not objects then
- objects = figure:objects()
- result.objects[f] = objects
+ local fobjects = robjects[index or 1]
+ if not fobjects then
+ fobjects = figure:objects()
+ robjects[index] = fobjects
end
- return objects
+ return fobjects
else
return figure:objects()
end
@@ -71,15 +99,20 @@ end
function metapost.convert(result, trialrun, flusher, multipass, askedfig)
if trialrun then
- metapost.multipass = false
- metapost.parse(result, askedfig)
- if multipass and not metapost.multipass and metapost.optimize then
- metapost.flush(result, flusher, askedfig) -- saves a run
+ local multipassindeed = metapost.parse(result,askedfig)
+ if multipass and not multipassindeed and metapost.optimize then
+ if save_table then
+ table.save(save_table,metapost.totable(result,1)) -- direct
+ end
+ metapost.flush(result,flusher,askedfig) -- saves a run
else
return false
end
else
- metapost.flush(result, flusher, askedfig)
+ if save_table then
+ table.save(save_table,metapost.totable(result,1)) -- direct
+ end
+ metapost.flush(result,flusher,askedfig)
end
return true -- done
end
@@ -119,7 +152,7 @@ end
function pdfflusher.startfigure(n,llx,lly,urx,ury,message)
savedliterals = nil
metapost.n = metapost.n + 1
- context.startMPLIBtoPDF(llx,lly,urx,ury)
+ context.startMPLIBtoPDF(f_f(llx),f_f(lly),f_f(urx),f_f(ury))
if message then pdfflusher.comment(message) end
end
@@ -156,8 +189,6 @@ local bend_tolerance = 131/65536
local rx, sx, sy, ry, tx, ty, divider = 1, 0, 0, 1, 0, 0, 1
-local pen_info = mplib.pen_info
-
local function pen_characteristics(object)
local t = pen_info(object)
rx, ry, sx, sy, tx, ty = t.rx, t.ry, t.sx, t.sy, t.tx, t.ty
@@ -192,11 +223,11 @@ local function flushnormalpath(path, t, open)
nt = nt + 1
pth = path[i]
if not ith then
- t[nt] = formatters["%f %f m"](pth.x_coord,pth.y_coord)
+ t[nt] = f_m(pth.x_coord,pth.y_coord)
elseif curved(ith,pth) then
- t[nt] = formatters["%f %f %f %f %f %f c"](ith.right_x,ith.right_y,pth.left_x,pth.left_y,pth.x_coord,pth.y_coord)
+ t[nt] = f_c(ith.right_x,ith.right_y,pth.left_x,pth.left_y,pth.x_coord,pth.y_coord)
else
- t[nt] = formatters["%f %f l"](pth.x_coord,pth.y_coord)
+ t[nt] = f_l(pth.x_coord,pth.y_coord)
end
ith = pth
end
@@ -204,15 +235,15 @@ local function flushnormalpath(path, t, open)
nt = nt + 1
local one = path[1]
if curved(pth,one) then
- t[nt] = formatters["%f %f %f %f %f %f c"](pth.right_x,pth.right_y,one.left_x,one.left_y,one.x_coord,one.y_coord )
+ t[nt] = f_c(pth.right_x,pth.right_y,one.left_x,one.left_y,one.x_coord,one.y_coord )
else
- t[nt] = formatters["%f %f l"](one.x_coord,one.y_coord)
+ t[nt] = f_l(one.x_coord,one.y_coord)
end
elseif #path == 1 then
-- special case .. draw point
local one = path[1]
nt = nt + 1
- t[nt] = formatters["%f %f l"](one.x_coord,one.y_coord)
+ t[nt] = f_l(one.x_coord,one.y_coord)
end
return t
end
@@ -226,18 +257,18 @@ local function flushconcatpath(path, t, open)
nt = 0
end
nt = nt + 1
- t[nt] = formatters["%f %f %f %f %f %f cm"](sx,rx,ry,sy,tx,ty)
+ t[nt] = f_cm(sx,rx,ry,sy,tx,ty)
for i=1,#path do
nt = nt + 1
pth = path[i]
if not ith then
- t[nt] = formatters["%f %f m"](mpconcat(pth.x_coord,pth.y_coord))
+ t[nt] = f_m(mpconcat(pth.x_coord,pth.y_coord))
elseif curved(ith,pth) then
local a, b = mpconcat(ith.right_x,ith.right_y)
local c, d = mpconcat(pth.left_x,pth.left_y)
- t[nt] = formatters["%f %f %f %f %f %f c"](a,b,c,d,mpconcat(pth.x_coord,pth.y_coord))
+ t[nt] = f_c(a,b,c,d,mpconcat(pth.x_coord,pth.y_coord))
else
- t[nt] = formatters["%f %f l"](mpconcat(pth.x_coord, pth.y_coord))
+ t[nt] = f_l(mpconcat(pth.x_coord, pth.y_coord))
end
ith = pth
end
@@ -247,15 +278,15 @@ local function flushconcatpath(path, t, open)
if curved(pth,one) then
local a, b = mpconcat(pth.right_x,pth.right_y)
local c, d = mpconcat(one.left_x,one.left_y)
- t[nt] = formatters["%f %f %f %f %f %f c"](a,b,c,d,mpconcat(one.x_coord, one.y_coord))
+ t[nt] = f_c(a,b,c,d,mpconcat(one.x_coord, one.y_coord))
else
- t[nt] = formatters["%f %f l"](mpconcat(one.x_coord,one.y_coord))
+ t[nt] = f_l(mpconcat(one.x_coord,one.y_coord))
end
elseif #path == 1 then
-- special case .. draw point
nt = nt + 1
local one = path[1]
- t[nt] = formatters["%f %f l"](mpconcat(one.x_coord,one.y_coord))
+ t[nt] = f_l(mpconcat(one.x_coord,one.y_coord))
end
return t
end
@@ -311,32 +342,8 @@ local variable =
local pattern_lst = (variable * newline^0)^0
-metapost.variables = { } -- to be stacked
-metapost.llx = 0 -- to be stacked
-metapost.lly = 0 -- to be stacked
-metapost.urx = 0 -- to be stacked
-metapost.ury = 0 -- to be stacked
-
-function commands.mprunvar(key,n) -- should be defined in another lib
- local value = metapost.variables[key]
- if value ~= nil then
- local tvalue = type(value)
- if tvalue == "table" then
- local ntype = type(n)
- if ntype == "number" then
- context(value[n])
- elseif ntype == "string" then
- context(concat(value,n))
- else
- context(concat(value," "))
- end
- elseif tvalue == "number" or tvalue == "boolean" then
- context(tostring(value))
- elseif tvalue == "string" then
- context(value)
- end
- end
-end
+metapost.variables = { } -- to be stacked
+metapost.properties = { } -- to be stacked
function metapost.untagvariable(str,variables) -- will be redone
if variables == false then
@@ -362,6 +369,30 @@ function metapost.processspecial(str)
end
end
+local function setproperties(figure)
+ local boundingbox = figure:boundingbox()
+ local properties = {
+ llx = boundingbox[1],
+ lly = boundingbox[2],
+ urx = boundingbox[3],
+ ury = boundingbox[4],
+ slot = figure:charcode(),
+ width = figure:width(),
+ height = figure:height(),
+ depth = figure:depth(),
+ italic = figure:italcorr(),
+ number = figure:charcode() or 0,
+ }
+ metapost.properties = properties
+ return properties
+end
+
+local function setvariables(figure)
+ local variables = { }
+ metapost.variables = variables
+ return variables
+end
+
function metapost.flush(result,flusher,askedfig)
if result then
local figures = result.fig
@@ -376,42 +407,27 @@ function metapost.flush(result,flusher,askedfig)
local flushfigure = flusher.flushfigure
local textfigure = flusher.textfigure
local processspecial = flusher.processspecial or metapost.processspecial
- for f=1,#figures do
- local figure = figures[f]
- local objects = getobjects(result,figure,f)
- local fignum = figure:charcode() or 0
- if askedfig == "direct" or askedfig == "all" or askedfig == fignum then
- local t = { }
+ local variables = setvariables(figure) -- also resets then in case of not found
+ for index=1,#figures do
+ local figure = figures[index]
+ local properties = setproperties(figure)
+ if askedfig == "direct" or askedfig == "all" or askedfig == properties.number then
+ local objects = getobjects(result,figure,index)
+ local result = { }
local miterlimit, linecap, linejoin, dashed = -1, -1, -1, false
- local bbox = figure:boundingbox()
- local llx, lly, urx, ury = bbox[1], bbox[2], bbox[3], bbox[4]
- local variables = { }
- metapost.variables = variables
- metapost.properties = {
- llx = llx,
- lly = lly,
- urx = urx,
- ury = ury,
- slot = figure:charcode(),
- width = figure:width(),
- height = figure:height(),
- depth = figure:depth(),
- italic = figure:italcorr(),
- }
- -- replaced by the above
- metapost.llx = llx
- metapost.lly = lly
- metapost.urx = urx
- metapost.ury = ury
+ local llx = properties.llx
+ local lly = properties.lly
+ local urx = properties.urx
+ local ury = properties.ury
if urx < llx then
-- invalid
- startfigure(fignum,0,0,0,0,"invalid",figure)
+ startfigure(properties.number,0,0,0,0,"invalid",figure)
stopfigure()
else
- startfigure(fignum,llx,lly,urx,ury,"begin",figure)
- t[#t+1] = "q"
+ startfigure(properties.number,llx,lly,urx,ury,"begin",figure)
+ result[#result+1] = "q"
if objects then
- resetplugins(t) -- we should move the colorinitializer here
+ resetplugins(result) -- we should move the colorinitializer here
for o=1,#objects do
local object = objects[o]
local objecttype = object.type
@@ -422,20 +438,21 @@ function metapost.flush(result,flusher,askedfig)
processspecial(object.prescript)
end
elseif objecttype == "start_clip" then
- t[#t+1] = "q"
- flushnormalpath(object.path,t,false)
- t[#t+1] = "W n"
+ local evenodd = not object.istext and object.postscript == "evenodd"
+ result[#result+1] = "q"
+ flushnormalpath(object.path,result,false)
+ result[#result+1] = evenodd and "W* n" or "W n"
elseif objecttype == "stop_clip" then
- t[#t+1] = "Q"
- miterlimit, linecap, linejoin, dashed = -1, -1, -1, false
+ result[#result+1] = "Q"
+ miterlimit, linecap, linejoin, dashed = -1, -1, -1, "" -- was false
elseif objecttype == "text" then
- t[#t+1] = "q"
+ result[#result+1] = "q"
local ot = object.transform -- 3,4,5,6,1,2
- t[#t+1] = formatters["%f %f %f %f %f %f cm"](ot[3],ot[4],ot[5],ot[6],ot[1],ot[2]) -- TH: formatters["%f %f m %f %f %f %f 0 0 cm"](unpack(ot))
- flushfigure(t) -- flush accumulated literals
- t = { }
+ result[#result+1] = f_cm(ot[3],ot[4],ot[5],ot[6],ot[1],ot[2]) -- TH: formatters["%F %F m %F %F %F %F 0 0 cm"](unpack(ot))
+ flushfigure(result) -- flush accumulated literals
+ result = { }
textfigure(object.font,object.dsize,object.text,object.width,object.height,object.depth)
- t[#t+1] = "Q"
+ result[#result+1] = "Q"
else
-- we use an indirect table as we want to overload
-- entries but this is not possible in userdata
@@ -451,32 +468,32 @@ function metapost.flush(result,flusher,askedfig)
local before, after = processplugins(object)
local objecttype = object.type -- can have changed
if before then
- t = pluginactions(before,t,flushfigure)
+ result = pluginactions(before,result,flushfigure)
end
local ml = object.miterlimit
if ml and ml ~= miterlimit then
miterlimit = ml
- t[#t+1] = formatters["%f M"](ml)
+ result[#result+1] = f_M(ml)
end
local lj = object.linejoin
if lj and lj ~= linejoin then
linejoin = lj
- t[#t+1] = formatters["%i j"](lj)
+ result[#result+1] = f_j(lj)
end
local lc = object.linecap
if lc and lc ~= linecap then
linecap = lc
- t[#t+1] = formatters["%i J"](lc)
+ result[#result+1] = f_J(lc)
end
local dl = object.dash
if dl then
- local d = formatters["[%s] %f d"](concat(dl.dashes or {}," "),dl.offset)
+ local d = f_d(concat(dl.dashes or {}," "),dl.offset)
if d ~= dashed then
dashed = d
- t[#t+1] = dashed
+ result[#result+1] = d
end
- elseif dashed then
- t[#t+1] = "[] 0 d"
+ elseif dashed ~= false then -- was just dashed test
+ result[#result+1] = "[] 0 d"
dashed = false
end
local path = object.path -- newpath
@@ -486,7 +503,7 @@ function metapost.flush(result,flusher,askedfig)
if pen then
if pen.type == 'elliptical' then
transformed, penwidth = pen_characteristics(original) -- boolean, value
- t[#t+1] = formatters["%f w"](penwidth) -- todo: only if changed
+ result[#result+1] = f_w(penwidth) -- todo: only if changed
if objecttype == 'fill' then
objecttype = 'both'
end
@@ -495,58 +512,59 @@ function metapost.flush(result,flusher,askedfig)
end
end
if transformed then
- t[#t+1] = "q"
+ result[#result+1] = "q"
end
+ local evenodd = not object.istext and object.postscript == "evenodd"
if path then
if transformed then
- flushconcatpath(path,t,open)
+ flushconcatpath(path,result,open)
else
- flushnormalpath(path,t,open)
+ flushnormalpath(path,result,open)
end
if objecttype == "fill" then
- t[#t+1] = "h f"
+ result[#result+1] = evenodd and "h f*" or "h f" -- f* = eo
elseif objecttype == "outline" then
- t[#t+1] = (open and "S") or "h S"
+ result[#result+1] = open and "S" or "h S"
elseif objecttype == "both" then
- t[#t+1] = "h B"
+ result[#result+1] = evenodd and "h B*" or "h B"-- B* = eo -- b includes closepath
end
end
if transformed then
- t[#t+1] = "Q"
+ result[#result+1] = "Q"
end
local path = object.htap
if path then
if transformed then
- t[#t+1] = "q"
+ result[#result+1] = "q"
end
if transformed then
- flushconcatpath(path,t,open)
+ flushconcatpath(path,result,open)
else
- flushnormalpath(path,t,open)
+ flushnormalpath(path,result,open)
end
if objecttype == "fill" then
- t[#t+1] = "h f"
+ result[#result+1] = evenodd and "h f*" or "h f" -- f* = eo
elseif objecttype == "outline" then
- t[#t+1] = (open and "S") or "h S"
+ result[#result+1] = open and "S" or "h S"
elseif objecttype == "both" then
- t[#t+1] = "h B"
+ result[#result+1] = evenodd and "h B*" or "h B"-- B* = eo -- b includes closepath
end
if transformed then
- t[#t+1] = "Q"
+ result[#result+1] = "Q"
end
end
if after then
- t = pluginactions(after,t,flushfigure)
+ result = pluginactions(after,result,flushfigure)
end
if object.grouped then
-- can be qQ'd so changes can end up in groups
- miterlimit, linecap, linejoin, dashed = -1, -1, -1, false
+ miterlimit, linecap, linejoin, dashed = -1, -1, -1, "" -- was false
end
end
end
end
- t[#t+1] = "Q"
- flushfigure(t)
+ result[#result+1] = "Q"
+ flushfigure(result)
stopfigure("end")
end
if askedfig ~= "all" then
@@ -562,20 +580,18 @@ function metapost.parse(result,askedfig)
if result then
local figures = result.fig
if figures then
+ local multipass = false
local analyzeplugins = metapost.analyzeplugins -- each object
- for f=1,#figures do
- local figure = figures[f]
- local fignum = figure:charcode() or 0
- if askedfig == "direct" or askedfig == "all" or askedfig == fignum then
- local bbox = figure:boundingbox()
- metapost.llx = bbox[1]
- metapost.lly = bbox[2]
- metapost.urx = bbox[3]
- metapost.ury = bbox[4]
- local objects = getobjects(result,figure,f)
+ for index=1,#figures do
+ local figure = figures[index]
+ local properties = setproperties(figure)
+ if askedfig == "direct" or askedfig == "all" or askedfig == properties.number then
+ local objects = getobjects(result,figure,index)
if objects then
for o=1,#objects do
- analyzeplugins(objects[o])
+ if analyzeplugins(objects[o]) then
+ multipass = true
+ end
end
end
if askedfig ~= "all" then
@@ -583,24 +599,24 @@ function metapost.parse(result,askedfig)
end
end
end
+ return multipass
end
end
end
-- tracing:
-local t = { }
+local result = { }
local flusher = {
startfigure = function()
- t = { }
+ result = { }
context.startnointerference()
end,
flushfigure = function(literals)
- local n = #t
- for i=1, #literals do
- n = n + 1
- t[n] = literals[i]
+ local n = #result
+ for i=1,#literals do
+ result[n+i] = literals[i]
end
end,
stopfigure = function()
@@ -610,30 +626,35 @@ local flusher = {
function metapost.pdfliterals(result)
metapost.flush(result,flusher)
- return t
+ return result
end
--- so far
-
-function metapost.totable(result)
- local figure = result and result.fig and result.fig[1]
+function metapost.totable(result,askedfig)
+ local askedfig = askedfig or 1
+ local figure = result and result.fig and result.fig[1]
if figure then
- local t = { }
- local objects = figure:objects()
+ local results = { }
+ -- local objects = figure:objects()
+ local objects = getobjects(result,figure,askedfig)
for o=1,#objects do
local object = objects[o]
- local tt = { }
- local fields = mplib.fields(object)
+ local result = { }
+ local fields = object_fields(object) -- hm, is this the whole list, if so, we can get it once
for f=1,#fields do
local field = fields[f]
- tt[field] = object[field]
+ result[field] = object[field]
end
- t[o] = tt
+ results[o] = result
end
- local b = figure:boundingbox()
+ local boundingbox = figure:boundingbox()
return {
- boundingbox = { llx = b[1], lly = b[2], urx = b[3], ury = b[4] },
- objects = t
+ boundingbox = {
+ llx = boundingbox[1],
+ lly = boundingbox[2],
+ urx = boundingbox[3],
+ ury = boundingbox[4],
+ },
+ objects = results
}
else
return nil
diff --git a/tex/context/base/mlib-pdf.mkiv b/tex/context/base/mlib-pdf.mkiv
index 0913b3699..92bf86ea9 100644
--- a/tex/context/base/mlib-pdf.mkiv
+++ b/tex/context/base/mlib-pdf.mkiv
@@ -88,7 +88,7 @@
% MPLIB specific:
-\def\MPLIBtoPDF#1{\ctxlua{metapost.flushliteral(#1)}}
+\def\MPLIBtoPDF{\clf_mpflushliteral}
\def\startMPLIBtoPDF#1#2#3#4%
{\meta_process_graphic_figure_start
@@ -109,7 +109,7 @@
\meta_process_graphic_figure_stop}
\def\MPLIBflushreset % This can (will) move to the Lua end.
- {\ctxlua{metapost.flushreset()}}
+ {\clf_mpflushreset}
%D Kind of special:
%
@@ -134,27 +134,50 @@
\unexpanded\def\directMPgraphic
{\dodoublegroupempty\mlib_direct_graphic}
+% \def\mlib_direct_graphic#1#2% makes pages (todo: make boxes)
+% {\meta_begin_graphic_group{#1}%
+% \let\startMPLIBtoPDF\directstartMPLIBtoPDF
+% \let\stopMPLIBtoPDF \directstopMPLIBtoPDF
+% \meta_start_current_graphic
+% \forgetall
+% \edef\p_extensions{\MPinstanceparameter\s!extensions}%
+% \normalexpanded{\noexpand\ctxlua{metapost.graphic {
+% instance = "\currentMPinstance",
+% format = "\currentMPformat",
+% data = \!!bs#2;\!!es,
+% initializations = \!!bs\meta_flush_current_initializations\!!es,
+% % useextensions = "\MPinstanceparameter\s!extensions",
+% \ifx\p_extensions\v!yes
+% extensions = \!!bs\clf_getmpextensions{\currentMPinstance}\!!es,
+% \fi
+% inclusions = \!!bs\meta_flush_current_inclusions\!!es,
+% definitions = \!!bs\meta_flush_current_definitions\!!es,
+% figure = "all",
+% method = "\MPinstanceparameter\c!method",
+% }}}%
+% \meta_stop_current_graphic
+% \meta_end_graphic_group}
+
\def\mlib_direct_graphic#1#2% makes pages (todo: make boxes)
{\meta_begin_graphic_group{#1}%
\let\startMPLIBtoPDF\directstartMPLIBtoPDF
\let\stopMPLIBtoPDF \directstopMPLIBtoPDF
\meta_start_current_graphic
- \forgetall
- \edef\p_extensions{\MPinstanceparameter\s!extensions}%
- \normalexpanded{\noexpand\ctxlua{metapost.graphic {
- instance = "\currentMPinstance",
- format = "\currentMPformat",
- data = \!!bs#2;\!!es,
- initializations = \!!bs\meta_flush_current_initializations\!!es,
-% useextensions = "\MPinstanceparameter\s!extensions",
-\ifx\p_extensions\v!yes
- extensions = \!!bs\ctxcommand{getmpextensions("\currentMPinstance")}\!!es,
-\fi
- inclusions = \!!bs\meta_flush_current_inclusions\!!es,
- definitions = \!!bs\meta_flush_current_definitions\!!es,
- figure = "all",
- method = "\MPinstanceparameter\c!method",
- }}}%
+ \forgetall
+ \edef\p_extensions{\MPinstanceparameter\s!extensions}%
+ \normalexpanded{\noexpand\clf_mpgraphic
+ instance {\currentMPinstance}%
+ format {\currentMPformat}%
+ data {#2;}%
+ initializations {\meta_flush_current_initializations}%
+ \ifx\p_extensions\v!yes
+ extensions {\clf_getmpextensions{\currentMPinstance}}%
+ \fi
+ inclusions {\meta_flush_current_inclusions}%
+ definitions {\meta_flush_current_definitions}%
+ figure {all}%
+ method {\MPinstanceparameter\c!method}%
+ \relax}%
\meta_stop_current_graphic
\meta_end_graphic_group}
diff --git a/tex/context/base/mlib-pps.lua b/tex/context/base/mlib-pps.lua
index 385fb3ece..ab56699b9 100644
--- a/tex/context/base/mlib-pps.lua
+++ b/tex/context/base/mlib-pps.lua
@@ -6,10 +6,8 @@ if not modules then modules = { } end modules ['mlib-pps'] = {
license = "see context related readme files",
}
--- todo: pass multipass nicer
-
local format, gmatch, match, split = string.format, string.gmatch, string.match, string.split
-local tonumber, type = tonumber, type
+local tonumber, type, unpack = tonumber, type, unpack
local round = math.round
local insert, remove, concat = table.insert, table.remove, table.concat
local Cs, Cf, C, Cg, Ct, P, S, V, Carg = lpeg.Cs, lpeg.Cf, lpeg.C, lpeg.Cg, lpeg.Ct, lpeg.P, lpeg.S, lpeg.V, lpeg.Carg
@@ -18,8 +16,15 @@ local formatters = string.formatters
local mplib, metapost, lpdf, context = mplib, metapost, lpdf, context
+local context = context
+local context_setvalue = context.setvalue
+
+local implement = interfaces.implement
+local setmacro = interfaces.setmacro
+
local texgetbox = tex.getbox
local texsetbox = tex.setbox
+local textakebox = tex.takebox
local copy_list = node.copy_list
local free_list = node.flush_list
local setmetatableindex = table.setmetatableindex
@@ -37,11 +42,19 @@ local report_textexts = logs.reporter("metapost","textexts")
local report_scripts = logs.reporter("metapost","scripts")
local colors = attributes.colors
+local defineprocesscolor = colors.defineprocesscolor
+local definespotcolor = colors.definespotcolor
+local definemultitonecolor = colors.definemultitonecolor
+local colorvalue = colors.value
-local rgbtocmyk = colors.rgbtocmyk or function() return 0,0,0,1 end
-local cmyktorgb = colors.cmyktorgb or function() return 0,0,0 end
-local rgbtogray = colors.rgbtogray or function() return 0 end
-local cmyktogray = colors.cmyktogray or function() return 0 end
+local transparencies = attributes.transparencies
+local registertransparency = transparencies.register
+local transparencyvalue = transparencies.value
+
+local rgbtocmyk = colors.rgbtocmyk -- or function() return 0,0,0,1 end
+local cmyktorgb = colors.cmyktorgb -- or function() return 0,0,0 end
+local rgbtogray = colors.rgbtogray -- or function() return 0 end
+local cmyktogray = colors.cmyktogray -- or function() return 0 end
metapost.makempy = metapost.makempy or { nofconverted = 0 }
local makempy = metapost.makempy
@@ -56,11 +69,6 @@ local innertransparency = nooutertransparency
local pdfcolor = lpdf.color
local pdftransparency = lpdf.transparency
-local registercolor = colors.register
-local registerspotcolor = colors.registerspotcolor
-
-local transparencies = attributes.transparencies
-local registertransparency = transparencies.register
function metapost.setoutercolor(mode,colormodel,colorattribute,transparencyattribute)
-- has always to be called before conversion
@@ -82,12 +90,19 @@ function metapost.setoutercolor(mode,colormodel,colorattribute,transparencyattri
innertransparency = outertransparency -- not yet used
end
-local f_gray = formatters["%.3f g %.3f G"]
-local f_rgb = formatters["%.3f %.3f %.3f rg %.3f %.3f %.3f RG"]
-local f_cmyk = formatters["%.3f %.3f %.3f %.3f k %.3f %.3f %.3f %.3f K"]
-local f_cm = formatters["q %f %f %f %f %f %f cm"]
+-- todo: get this from the lpdf module
+
+local f_f = formatters["%F"]
+local f_f3 = formatters["%.3F"]
+
+local f_gray = formatters["%.3F g %.3F G"]
+local f_rgb = formatters["%.3F %.3F %.3F rg %.3F %.3F %.3F RG"]
+local f_cmyk = formatters["%.3F %.3F %.3F %.3F k %.3F %.3F %.3F %.3F K"]
+local f_cm = formatters["q %F %F %F %F %F %F cm"]
local f_shade = formatters["MpSh%s"]
+local f_spot = formatters["/%s cs /%s CS %s SCN %s scn"]
+
local function checked_color_pair(color,...)
if not color then
return innercolor, outercolor
@@ -137,15 +152,6 @@ local function normalize(ca,cb)
end
end
--- todo: check for the same colorspace (actually a backend issue), now we can
--- have several similar resources
---
--- normalize(ca,cb) fails for spotcolors
-
-local function spotcolorconverter(parent, n, d, p)
- registerspotcolor(parent)
- return pdfcolor(colors.model,registercolor(nil,'spot',parent,n,d,p)), outercolor
-end
local commasplitter = tsplitat(",")
@@ -211,21 +217,34 @@ local function checkandconvert(ca,cb)
end
end
+-- We keep textexts in a shared list (as it's easier that way and we also had that in
+-- the beginning). Each graphic gets its own (1 based) subtable so that we can also
+-- handle multiple conversions in one go which is needed when we process mp files
+-- directly.
+
local stack = { } -- quick hack, we will pass topofstack around
local top = nil
local nofruns = 0 -- askedfig: "all", "first", number
-local function startjob(texmode)
- top = {
- textexts = { }, -- all boxes, optionally with a different color
- texslots = { }, -- references to textexts in order or usage
- texorder = { }, -- references to textexts by mp index
+local function preset(t,k)
+ -- references to textexts by mp index
+ local v = {
textrial = 0,
texfinal = 0,
- -- used by tx plugin
+ texslots = { },
+ texorder = { },
texhash = { },
+ }
+ t[k] = v
+ return v
+end
+
+local function startjob(plugmode)
+ top = {
+ textexts = { }, -- all boxes, optionally with a different color
texlast = 0,
- texmode = texmode, -- some day we can then skip all pre/postscripts
+ texdata = setmetatableindex({},preset), -- references to textexts in order or usage
+ plugmode = plugmode, -- some day we can then skip all pre/postscripts
}
insert(stack,top)
if trace_runs then
@@ -239,7 +258,7 @@ local function stopjob()
for n, tn in next, top.textexts do
free_list(tn)
if trace_textexts then
- report_textexts("freeing box %s",n)
+ report_textexts("freeing text %s",n)
end
end
if trace_runs then
@@ -251,27 +270,41 @@ local function stopjob()
end
end
-function metapost.settextexts () end -- obsolete
-function metapost.resettextexts() end -- obsolete
+function metapost.getjobdata()
+ return top
+end
-- end of new
-function metapost.settext(box,slot)
- top.textexts[slot] = copy_list(texgetbox(box))
- texsetbox(box,nil)
- -- this will become
- -- top.textexts[slot] = texgetbox(box)
- -- unsetbox(box)
+local function settext(box,slot)
+ if top then
+ top.textexts[slot] = copy_list(texgetbox(box))
+ texsetbox(box,nil)
+ -- this can become
+ -- top.textexts[slot] = textakebox(box)
+ else
+ -- weird error
+ end
end
-function metapost.gettext(box,slot)
- texsetbox(box,copy_list(top.textexts[slot]))
- if trace_textexts then
- report_textexts("putting text %s in box %s",slot,box)
+local function gettext(box,slot)
+ if top then
+ texsetbox(box,copy_list(top.textexts[slot]))
+ if trace_textexts then
+ report_textexts("putting text %s in box %s",slot,box)
+ end
+ -- top.textexts[slot] = nil -- no, pictures can be placed several times
+ else
+ -- weird error
end
- -- top.textexts[slot] = nil -- no, pictures can be placed several times
end
+metapost.settext = settext
+metapost.gettext = gettext
+
+implement { name = "mpsettext", actions = settext, arguments = { "integer", "integer" } } -- box slot
+implement { name = "mpgettext", actions = gettext, arguments = { "integer", "integer" } } -- box slot
+
-- rather generic pdf, so use this elsewhere too it no longer pays
-- off to distinguish between outline and fill (we now have both
-- too, e.g. in arrows)
@@ -461,7 +494,7 @@ end
-- currently a a one-liner produces less code
-- textext.*(".*") can have "'s but tricky parsing as we can have concatenated strings
--- so this is something for a boring plain or train trip and we might assume proper mp
+-- so this is something for a boring plane or train trip and we might assume proper mp
-- input anyway
local parser = Cs((
@@ -471,57 +504,95 @@ local parser = Cs((
+ 1
)^0)
+local checking_enabled = true directives.register("metapost.checktexts",function(v) checking_enabled = v end)
+
local function checktexts(str)
- found, forced = false, false
- return lpegmatch(parser,str), found, forced
+ if checking_enabled then
+ found, forced = false, false
+ return lpegmatch(parser,str), found, forced
+ else
+ return str
+ end
end
metapost.checktexts = checktexts
local factor = 65536*(7227/7200)
-function metapost.edefsxsy(wd,ht,dp) -- helper for figure
- local hd = ht + dp
- context.setvalue("sx",wd ~= 0 and factor/wd or 0)
- context.setvalue("sy",hd ~= 0 and factor/hd or 0)
-end
+-- function metapost.edefsxsy(wd,ht,dp) -- helper for figure
+-- local hd = ht + dp
+-- context_setvalue("sx",wd ~= 0 and factor/wd or 0)
+-- context_setvalue("sy",hd ~= 0 and factor/hd or 0)
+-- end
+
+implement {
+ name = "mpsetsxsy",
+ arguments = { "dimen", "dimen", "dimen" },
+ actions = function(wd,ht,dp)
+ local hd = ht + dp
+ setmacro("sx",wd ~= 0 and factor/wd or 0)
+ setmacro("sy",hd ~= 0 and factor/hd or 0)
+ end
+}
local function sxsy(wd,ht,dp) -- helper for text
local hd = ht + dp
return (wd ~= 0 and factor/wd) or 0, (hd ~= 0 and factor/hd) or 0
end
-local no_first_run = "mfun_first_run := false ;"
-local do_first_run = "mfun_first_run := true ;"
-local no_trial_run = "mfun_trial_run := false ;"
-local do_trial_run = "mfun_trial_run := true ;"
+-- for stock mp we need to declare the booleans first
+
+local no_first_run = "boolean mfun_first_run ; mfun_first_run := false ;"
+local do_first_run = "boolean mfun_first_run ; mfun_first_run := true ;"
+local no_trial_run = "boolean mfun_trial_run ; mfun_trial_run := false ;"
+local do_trial_run = "boolean mfun_trial_run ; mfun_trial_run := true ;"
local do_begin_fig = "; beginfig(1) ; "
local do_end_fig = "; endfig ;"
local do_safeguard = ";"
-local f_text_data = formatters["mfun_tt_w[%i] := %f ; mfun_tt_h[%i] := %f ; mfun_tt_d[%i] := %f ;"]
+-- local f_text_data = formatters["mfun_tt_w[%i] := %f ; mfun_tt_h[%i] := %f ; mfun_tt_d[%i] := %f ;"]
+--
+-- function metapost.textextsdata()
+-- local textexts = top.textexts
+-- local collected = { }
+-- local nofcollected = 0
+-- for k, data in sortedhash(top.texdata) do -- sort is nicer in trace
+-- local texorder = data.texorder
+-- for n=1,#texorder do
+-- local box = textexts[texorder[n]]
+-- if box then
+-- local wd, ht, dp = box.width/factor, box.height/factor, box.depth/factor
+-- if trace_textexts then
+-- report_textexts("passed data item %s:%s > (%p,%p,%p)",k,n,wd,ht,dp)
+-- end
+-- nofcollected = nofcollected + 1
+-- collected[nofcollected] = f_text_data(n,wd,n,ht,n,dp)
+-- else
+-- break
+-- end
+-- end
+-- end
+-- return collected
+-- end
function metapost.textextsdata()
- local texorder = top.texorder
- local textexts = top.textexts
- local collected = { }
- local nofcollected = 0
- for n=1,#texorder do
- local box = textexts[texorder[n]]
- if box then
- local wd, ht, dp = box.width/factor, box.height/factor, box.depth/factor
- if trace_textexts then
- report_textexts("passed data item %s: (%p,%p,%p)",n,wd,ht,dp)
+ local textexts = top.textexts
+ local collected = { }
+ for k, data in sortedhash(top.texdata) do -- sort is nicer in trace
+ local texorder = data.texorder
+ for n=1,#texorder do
+ local box = textexts[texorder[n]]
+ if box then
+ collected[n] = box
+ else
+ break
end
- nofcollected = nofcollected + 1
- collected[nofcollected] = f_text_data(n,wd,n,ht,n,dp)
- else
- break
end
end
- return collected
+ mp.tt_initialize(collected)
end
+
metapost.intermediate = metapost.intermediate or { }
metapost.intermediate.actions = metapost.intermediate.actions or { }
@@ -552,16 +623,16 @@ local function extrapass()
if trace_runs then
report_metapost("second run of job %s, asked figure %a",top.nofruns,top.askedfig)
end
+ local textexts = metapost.textextsdata()
processmetapost(top.mpx, {
top.wrappit and do_begin_fig or "",
no_trial_run,
- concat(metapost.textextsdata()," ;\n"),
+ textexts and concat(textexts," ;\n") or "",
top.initializations,
do_safeguard,
top.data,
top.wrappit and do_end_fig or "",
}, false, nil, false, true, top.askedfig)
- -- context.MPLIBresettexts() -- must happen afterwards
end
function metapost.graphic_base_pass(specification) -- name will change (see mlib-ctx.lua)
@@ -585,17 +656,18 @@ function metapost.graphic_base_pass(specification) -- name will change (see mlib
top.nofruns = nofruns
--
local done_1, done_2, done_3, forced_1, forced_2, forced_3
- data, done_1, forced_1 = checktexts(data)
- -- we had preamble = extensions + inclusions
- if extensions == "" then
- extensions, done_2, forced_2 = "", false, false
- else
- extensions, done_2, forced_2 = checktexts(extensions)
- end
- if inclusions == "" then
- inclusions, done_3, forced_3 = "", false, false
- else
- inclusions, done_3, forced_3 = checktexts(inclusions)
+ if checking_enabled then
+ data, done_1, forced_1 = checktexts(data)
+ if extensions == "" then
+ extensions, done_2, forced_2 = "", false, false
+ else
+ extensions, done_2, forced_2 = checktexts(extensions)
+ end
+ if inclusions == "" then
+ inclusions, done_3, forced_3 = "", false, false
+ else
+ inclusions, done_3, forced_3 = checktexts(inclusions)
+ end
end
top.intermediate = false
top.multipass = false -- no needed here
@@ -680,6 +752,12 @@ function makempy.registerfile(filename)
mpyfilename = filename
end
+implement {
+ name = "registermpyfile",
+ actions = makempy.registerfile,
+ arguments = "string"
+}
+
function makempy.processgraphics(graphics)
if #graphics == 0 then
return
@@ -720,13 +798,13 @@ end
-- -- the new plugin handler -- --
-local sequencers = utilities.sequencers
-local appendgroup = sequencers.appendgroup
-local appendaction = sequencers.appendaction
+local sequencers = utilities.sequencers
+local appendgroup = sequencers.appendgroup
+local appendaction = sequencers.appendaction
-local resetter = nil
-local analyzer = nil
-local processor = nil
+local resetter = nil
+local analyzer = nil
+local processor = nil
local resetteractions = sequencers.new { arguments = "t" }
local analyzeractions = sequencers.new { arguments = "object,prescript" }
@@ -773,7 +851,7 @@ end
-- end
function metapost.pluginactions(what,t,flushfigure) -- before/after object, depending on what
- if top.texmode then
+ if top.plugmode then -- hm, what about other features
for i=1,#what do
local wi = what[i]
if type(wi) == "function" then
@@ -790,7 +868,7 @@ function metapost.pluginactions(what,t,flushfigure) -- before/after object, depe
end
function metapost.resetplugins(t) -- intialize plugins, before figure
- if top.texmode then
+ if top.plugmode then
-- plugins can have been added
resetter = resetteractions.runner
analyzer = analyzeractions.runner
@@ -801,16 +879,18 @@ function metapost.resetplugins(t) -- intialize plugins, before figure
end
function metapost.analyzeplugins(object) -- each object (first pass)
- if top.texmode then
+ if top.plugmode then
local prescript = object.prescript -- specifications
if prescript and #prescript > 0 then
- return analyzer(object,splitprescript(prescript))
+ analyzer(object,splitprescript(prescript))
+ return top.multipass
end
end
+ return false
end
function metapost.processplugins(object) -- each object (second pass)
- if top.texmode then
+ if top.plugmode then
local prescript = object.prescript -- specifications
if prescript and #prescript > 0 then
local before = { }
@@ -854,19 +934,33 @@ end
local function tx_reset()
if top then
+ -- why ?
top.texhash = { }
top.texlast = 0
end
end
local fmt = formatters["%s %s %s % t"]
-local pat = tsplitat(":")
+----- pat = tsplitat(":")
+local pat = lpeg.tsplitter(":",tonumber) -- so that %F can do its work
+
+local f_gray_yes = formatters["s=%F,a=%F,t=%F"]
+local f_gray_nop = formatters["s=%F"]
+local f_rgb_yes = formatters["r=%F,g=%F,b=%F,a=%F,t=%F"]
+local f_rgb_nop = formatters["r=%F,g=%F,b=%F"]
+local f_cmyk_yes = formatters["c=%F,m=%F,y=%F,k=%F,a=%F,t=%F"]
+local f_cmyk_nop = formatters["c=%F,m=%F,y=%F,k=%F"]
+
+local ctx_MPLIBsetNtext = context.MPLIBsetNtext
+local ctx_MPLIBsetCtext = context.MPLIBsetCtext
+local ctx_MPLIBsettext = context.MPLIBsettext
local function tx_analyze(object,prescript) -- todo: hash content and reuse them
+ local data = top.texdata[metapost.properties.number]
local tx_stage = prescript.tx_stage
if tx_stage == "trial" then
- local tx_trial = top.textrial + 1
- top.textrial = tx_trial
+ local tx_trial = data.textrial + 1
+ data.textrial = tx_trial
local tx_number = tonumber(prescript.tx_number)
local s = object.postscript or ""
local c = object.color -- only simple ones, no transparency
@@ -876,79 +970,80 @@ local function tx_analyze(object,prescript) -- todo: hash content and reuse them
c = lpegmatch(pat,txc)
end
end
- local a = prescript.tr_alternative
- local t = prescript.tr_transparency
+ local a = tonumber(prescript.tr_alternative)
+ local t = tonumber(prescript.tr_transparency)
local h = fmt(tx_number,a or "-",t or "-",c or "-")
- local n = top.texhash[h] -- todo: hashed variant with s (nicer for similar labels)
+ local n = data.texhash[h] -- todo: hashed variant with s (nicer for similar labels)
if not n then
local tx_last = top.texlast + 1
top.texlast = tx_last
+ -- report_textexts("tex string: %s",s)
if not c then
- -- no color
+ ctx_MPLIBsetNtext(tx_last,s)
elseif #c == 1 then
if a and t then
- s = formatters["\\directcolored[s=%f,a=%f,t=%f]%s"](c[1],a,t,s)
+ ctx_MPLIBsetCtext(tx_last,f_gray_yes(c[1],a,t),s)
else
- s = formatters["\\directcolored[s=%f]%s"](c[1],s)
+ ctx_MPLIBsetCtext(tx_last,f_gray_nop(c[1]),s)
end
elseif #c == 3 then
if a and t then
- s = formatters["\\directcolored[r=%f,g=%f,b=%f,a=%f,t=%f]%s"](c[1],c[2],c[3],a,t,s)
+ ctx_MPLIBsetCtext(tx_last,f_rgb_nop(c[1],c[2],c[3],a,t),s)
else
- s = formatters["\\directcolored[r=%f,g=%f,b=%f]%s"](c[1],c[2],c[3],s)
+ ctx_MPLIBsetCtext(tx_last,f_rgb_nop(c[1],c[2],c[3]),s)
end
elseif #c == 4 then
if a and t then
- s = formatters["\\directcolored[c=%f,m=%f,y=%f,k=%f,a=%f,t=%f]%s"](c[1],c[2],c[3],c[4],a,t,s)
+ ctx_MPLIBsetCtext(tx_last,f_cmyk_yes(c[1],c[2],c[3],c[4],a,t),s)
else
- s = formatters["\\directcolored[c=%f,m=%f,y=%f,k=%f]%s"](c[1],c[2],c[3],c[4],s)
+ ctx_MPLIBsetCtext(tx_last,f_cmyk_nop(c[1],c[2],c[3],c[4]),s)
end
+ else
+ ctx_MPLIBsetNtext(tx_last,s)
end
- context.MPLIBsettext(tx_last,s)
top.multipass = true
- metapost.multipass = true -- ugly
- top.texhash[h] = tx_last
- top.texslots[tx_trial] = tx_last
- top.texorder[tx_number] = tx_last
+ data.texhash [h] = tx_last
+ data.texslots[tx_trial] = tx_last
+ data.texorder[tx_number] = tx_last
if trace_textexts then
- report_textexts("stage %a, usage %a, number %a, new %a, hash %a",tx_stage,tx_trial,tx_number,tx_last,h)
+ report_textexts("stage %a, usage %a, number %a, new %a, hash %a, text %a",tx_stage,tx_trial,tx_number,tx_last,h,s)
end
else
- top.texslots[tx_trial] = n
+ data.texslots[tx_trial] = n
if trace_textexts then
- report_textexts("stage %a, usage %a, number %a, new %a, hash %a",tx_stage,tx_trial,tx_number,n,h)
+ report_textexts("stage %a, usage %a, number %a, old %a, hash %a, text %a",tx_stage,tx_trial,tx_number,n,h,s)
end
end
elseif tx_stage == "extra" then
- local tx_trial = top.textrial + 1
- top.textrial = tx_trial
+ local tx_trial = data.textrial + 1
+ data.textrial = tx_trial
local tx_number = tonumber(prescript.tx_number)
- if not top.texorder[tx_number] then
+ if not data.texorder[tx_number] then
local s = object.postscript or ""
local tx_last = top.texlast + 1
top.texlast = tx_last
- context.MPLIBsettext(tx_last,s)
+ ctx_MPLIBsettext(tx_last,s)
top.multipass = true
- metapost.multipass = true -- ugly
- top.texslots[tx_trial] = tx_last
- top.texorder[tx_number] = tx_last
+ data.texslots[tx_trial] = tx_last
+ data.texorder[tx_number] = tx_last
if trace_textexts then
- report_textexts("stage %a, usage %a, number %a, extra %a",tx_stage,tx_trial,tx_number,tx_last)
+ report_textexts("stage %a, usage %a, number %a, extra %a, text %a",tx_stage,tx_trial,tx_number,tx_last,s)
end
end
end
end
local function tx_process(object,prescript,before,after)
- local tx_number = prescript.tx_number
+ local data = top.texdata[metapost.properties.number]
+ local tx_number = tonumber(prescript.tx_number)
if tx_number then
- tx_number = tonumber(tx_number)
local tx_stage = prescript.tx_stage
if tx_stage == "final" then
- top.texfinal = top.texfinal + 1
- local n = top.texslots[top.texfinal]
+ local tx_final = data.texfinal + 1
+ data.texfinal = tx_final
+ local n = data.texslots[tx_final]
if trace_textexts then
- report_textexts("stage %a, usage %a, number %a, use %a",tx_stage,top.texfinal,tx_number,n)
+ report_textexts("stage %a, usage %a, number %a, use %a",tx_stage,tx_final,tx_number,n)
end
local sx, rx, ry, sy, tx, ty = cm(object) -- needs to be frozen outside the function
local box = top.textexts[n]
@@ -956,12 +1051,12 @@ local function tx_process(object,prescript,before,after)
before[#before+1] = function()
-- flush always happens, we can have a special flush function injected before
context.MPLIBgettextscaledcm(n,
- format("%f",sx), -- bah ... %s no longer checks
- format("%f",rx), -- bah ... %s no longer checks
- format("%f",ry), -- bah ... %s no longer checks
- format("%f",sy), -- bah ... %s no longer checks
- format("%f",tx), -- bah ... %s no longer checks
- format("%f",ty), -- bah ... %s no longer checks
+ f_f(sx), -- bah ... %s no longer checks
+ f_f(rx), -- bah ... %s no longer checks
+ f_f(ry), -- bah ... %s no longer checks
+ f_f(sy), -- bah ... %s no longer checks
+ f_f(tx), -- bah ... %s no longer checks
+ f_f(ty), -- bah ... %s no longer checks
sxsy(box.width,box.height,box.depth))
end
else
@@ -972,8 +1067,9 @@ local function tx_process(object,prescript,before,after)
if not trace_textexts then
object.path = false -- else: keep it
end
- object.color = false
+ object.color = false
object.grouped = true
+ object.istext = true
end
end
end
@@ -996,7 +1092,6 @@ local function gt_analyze(object,prescript)
graphics[gt_index] = formatters["\\MPLIBgraphictext{%s}"](object.postscript or "")
top.intermediate = true
top.multipass = true
- metapost.multipass = true -- ugly
end
end
@@ -1012,9 +1107,9 @@ local function sh_process(object,prescript,before,after)
local sh_type = prescript.sh_type
if sh_type then
nofshades = nofshades + 1
- local domain = lpegmatch(domainsplitter,prescript.sh_domain)
- local centera = lpegmatch(centersplitter,prescript.sh_center_a)
- local centerb = lpegmatch(centersplitter,prescript.sh_center_b)
+ local domain = lpegmatch(domainsplitter,prescript.sh_domain or "0 1")
+ local centera = lpegmatch(centersplitter,prescript.sh_center_a or "0 0")
+ local centerb = lpegmatch(centersplitter,prescript.sh_center_b or "0 0")
--
local sh_color_a = prescript.sh_color_a or "1"
local sh_color_b = prescript.sh_color_b or "1"
@@ -1063,8 +1158,9 @@ local function sh_process(object,prescript,before,after)
local coordinates = { centera[1], centera[2], centerb[1], centerb[2] }
lpdf.linearshade(name,domain,ca,cb,1,colorspace,coordinates,separation) -- backend specific (will be renamed)
elseif sh_type == "circular" then
- local radiusa = tonumber(prescript.sh_radius_a)
- local radiusb = tonumber(prescript.sh_radius_b)
+ local factor = tonumber(prescript.sh_factor) or 1
+ local radiusa = factor * tonumber(prescript.sh_radius_a)
+ local radiusb = factor * tonumber(prescript.sh_radius_b)
local coordinates = { centera[1], centera[2], radiusa, centerb[1], centerb[2], radiusb }
lpdf.circularshade(name,domain,ca,cb,1,colorspace,coordinates,separation) -- backend specific (will be renamed)
else
@@ -1109,8 +1205,9 @@ local function ps_process(object,prescript,before,after)
local first, third = op[1], op[3]
local x, y = first.x_coord, first.y_coord
local w, h = third.x_coord - x, third.y_coord - y
- x = x - metapost.llx
- y = metapost.ury - y
+ local properties = metapost.properties
+ x = x - properties.llx
+ y = properties.ury - y
before[#before+1] = function()
context.MPLIBpositionwhd(ps_label,x,y,w,h)
end
@@ -1136,7 +1233,7 @@ end
-- color and transparency
local value = Cs ( (
- (Carg(1) * C((1-P(","))^1)) / function(a,b) return format("%0.3f",a * tonumber(b)) end
+ (Carg(1) * C((1-P(","))^1)) / function(a,b) return f_f3(a * tonumber(b)) end
+ P(","))^1
)
@@ -1145,6 +1242,12 @@ local value = Cs ( (
local t_list = attributes.list[attributes.private('transparency')]
local c_list = attributes.list[attributes.private('color')]
+local remappers = {
+ [1] = formatters["s=%s"],
+ [3] = formatters["r=%s,g=%s,b=%s"],
+ [4] = formatters["c=%s,m=%s,y=%s,k=%s"],
+}
+
local function tr_process(object,prescript,before,after)
-- before can be shortcut to t
local tr_alternative = prescript.tr_alternative
@@ -1160,56 +1263,83 @@ local function tr_process(object,prescript,before,after)
local sp_type = prescript.sp_type
if not sp_type then
c_b, c_a = colorconverter(cs)
- elseif sp_type == "spot" or sp_type == "multitone" then
- local sp_name = prescript.sp_name or "black"
- local sp_fractions = prescript.sp_fractions or 1
- local sp_components = prescript.sp_components or ""
- local sp_value = prescript.sp_value or "1"
- local cf = cs[1]
- if cf ~= 1 then
- -- beware, we do scale the spotcolors but not the alternative representation
- sp_value = lpegmatch(value,sp_value,1,cf) or sp_value
- end
- c_b, c_a = spotcolorconverter(sp_name,sp_fractions,sp_components,sp_value)
- elseif sp_type == "named" then
- -- we might move this to another namespace .. also, named can be a spotcolor
- -- so we need to check for that too ... also we need to resolve indirect
- -- colors so we might need the second pass for this (draw dots with \MPcolor)
+ else
local sp_name = prescript.sp_name or "black"
- if not tr_alternative then
- -- todo: sp_name is not yet registered at this time
- local t = t_list[sp_name] -- string or attribute
- local v = t and attributes.transparencies.value(t)
- if v then
- before[#before+1] = formatters["/Tr%s gs"](registertransparency(nil,v[1],v[2],true))
- after[#after+1] = "/Tr0 gs" -- outertransparency
+ if sp_type == "spot" then
+ local sp_value = prescript.sp_value or "s:1"
+ local sp_temp = formatters["mp:%s"](sp_value)
+ local s = split(sp_value,":")
+ local r = remappers[#s]
+ defineprocesscolor(sp_temp,r and r(unpack(s)) or "s=0",true,true)
+ definespotcolor(sp_name,sp_temp,"p=1",true)
+ sp_type = "named"
+ elseif sp_type == "multitone" then
+ local sp_value = prescript.sp_value or "s:1"
+ local sp_spec = { }
+ local sp_list = split(sp_value," ")
+ for i=1,#sp_list do
+ local v = sp_list[i]
+ local t = formatters["mp:%s"](v)
+ local s = split(v,":")
+ local r = remappers[#s]
+ defineprocesscolor(t,r and r(unpack(s)) or "s=0",true,true)
+ local tt = formatters["ms:%s"](v)
+ definespotcolor(tt,t,"p=1",true)
+ sp_spec[#sp_spec+1] = formatters["%s=1"](t)
end
+ sp_spec = concat(sp_spec,",")
+ definemultitonecolor(sp_name,sp_spec,"","",true)
+ sp_type = "named"
end
- local c = c_list[sp_name] -- string or attribute
- local v = c and attributes.colors.value(c)
- if v then
- -- all=1 gray=2 rgb=3 cmyk=4
- local colorspace = v[1]
- local f = cs[1]
- if colorspace == 2 then
- local s = f*v[2]
- c_b, c_a = checked_color_pair(f_gray,s,s)
- elseif colorspace == 3 then
- local r, g, b = f*v[3], f*v[4], f*v[5]
- c_b, c_a = checked_color_pair(f_rgb,r,g,b,r,g,b)
- elseif colorspace == 4 or colorspace == 1 then
- local c, m, y, k = f*v[6], f*v[7], f*v[8], f*v[9]
- c_b, c_a = checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k)
- else
- local s = f*v[2]
- c_b, c_a = checked_color_pair(f_gray,s,s)
+ if sp_type == "named" then
+ -- we might move this to another namespace .. also, named can be a spotcolor
+ -- so we need to check for that too ... also we need to resolve indirect
+ -- colors so we might need the second pass for this (draw dots with \MPcolor)
+ if not tr_alternative then
+ -- todo: sp_name is not yet registered at this time
+ local t = t_list[sp_name] -- string or attribute
+ local v = t and transparencyvalue(t)
+ if v then
+ before[#before+1] = formatters["/Tr%s gs"](registertransparency(nil,v[1],v[2],true))
+ after[#after+1] = "/Tr0 gs" -- outertransparency
+ end
+ end
+ local c = c_list[sp_name] -- string or attribute
+ local v = c and colorvalue(c)
+ if v then
+ -- all=1 gray=2 rgb=3 cmyk=4
+ local colorspace = v[1]
+ local f = cs[1]
+ if colorspace == 2 then
+ local s = f*v[2]
+ c_b, c_a = checked_color_pair(f_gray,s,s)
+ elseif colorspace == 3 then
+ local r, g, b = f*v[3], f*v[4], f*v[5]
+ c_b, c_a = checked_color_pair(f_rgb,r,g,b,r,g,b)
+ elseif colorspace == 4 or colorspace == 1 then
+ local c, m, y, k = f*v[6], f*v[7], f*v[8], f*v[9]
+ c_b, c_a = checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k)
+ elseif colorspace == 5 then
+ -- not all viewers show the fractions ok
+ local name = v[10]
+ local value = split(v[13],",")
+ if f ~= 1 then
+ for i=1,#value do
+ value[i] = f * (tonumber(value[i]) or 1)
+ end
+ end
+ value = concat(value," ")
+ c_b, c_a = checked_color_pair(f_spot,name,name,value,value)
+ else
+ local s = f*v[2]
+ c_b, c_a = checked_color_pair(f_gray,s,s)
+ end
end
end
- --
end
if c_a and c_b then
before[#before+1] = c_b
- after[#after+1] = c_a
+ after [#after +1] = c_a
end
end
end
diff --git a/tex/context/base/mlib-pps.mkiv b/tex/context/base/mlib-pps.mkiv
index e16827585..07ac84b8d 100644
--- a/tex/context/base/mlib-pps.mkiv
+++ b/tex/context/base/mlib-pps.mkiv
@@ -33,55 +33,43 @@
\newbox \MPtextbox
\newtoks\everyMPLIBsettext % not used
-% \def\MPLIBsettext#1% #2%
-% {\dowithnextbox{\ctxlua{metapost.settext(\number\nextbox,#1)}}\hbox}
-%
-% \def\MPLIBresettexts
-% {\ctxlua{metapost.resettextexts()}}
-%
-% \newconditional\MPLIBtextgetdone
-%
-% \def\MPLIBsettext#1% #2%
-% {\ifconditional\MPLIBtextgetdone
-% \else
-% \cldcontext{metapost.tex.get()}% MPenvironments are depricated
-% \settrue\MPLIBtextgetdone % no \global needed
-% \fi
-% \dowithnextbox{\ctxlua{metapost.settext(\number\nextbox,#1)}}\hbox}
-%
-% \def\MPLIBresettexts
-% {\ctxlua{metapost.resettextexts()}%
-% \setfalse\MPLIBtextgetdone}
-
\def\doMPLIBflushenvironment
{%\writestatus\m!metapost{flushing environment}%
- \cldcontext{metapost.tex.get()}%
+ \clf_mptexget
\let\MPLIBflushenvironment\relax}% MPenvironments are depricated}
\let\MPLIBflushenvironment\doMPLIBflushenvironment
-\def\MPLIBsettext#1% #2%
+\unexpanded\def\MPLIBsetNtext#1% #2% box text
+ {\MPLIBflushenvironment
+ \dowithnextbox{\clf_mpsettext\nextbox #1}\hbox\bgroup
+ \meta_set_current_color
+ \let\MPLIBflushenvironment\doMPLIBflushenvironment
+ \let\next} % gobble open brace
+
+\unexpanded\def\MPLIBsetCtext#1#2% #3% box colorspec text
{\MPLIBflushenvironment
- \dowithnextbox{\ctxlua{metapost.settext(\number\nextbox,#1)}}\hbox\bgroup
+ \dowithnextbox{\clf_mpsettext\nextbox #1}\hbox\bgroup
+ \directcolored[#2]%
+ \meta_set_current_color % so, textcolor wins !
\let\MPLIBflushenvironment\doMPLIBflushenvironment
\let\next} % gobble open brace
-\def\MPLIBresettexts
- {\ctxlua{metapost.resettextexts()}}
+\let\MPLIBsettext\MPLIBsetNtext
-\def\MPLIBgettextscaled#1#2#3% why a copy .. can be used more often
- {\ctxlua{metapost.gettext(\number\MPtextbox,#1)}%
+\unexpanded\def\MPLIBgettextscaled#1#2#3% why a copy .. can be used more often
+ {\clf_mpgettext\MPtextbox #1%
\vbox to \zeropoint{\vss\hbox to \zeropoint{\scale[\c!sx=#2,\c!sy=#3]{\raise\dp\MPtextbox\box\MPtextbox}\forcecolorhack\hss}}}
-\def\MPLIBfigure#1#2%
+\unexpanded\def\MPLIBfigure#1#2%
{\setbox\scratchbox\hbox{\externalfigure[#1][\c!mask=#2]}%
- \ctxlua{metapost.edefsxsy(\number\wd\scratchbox,\number\ht\scratchbox,0)}%
+ \clf_mpsetsxsy\wd\scratchbox\ht\scratchbox\zeropoint
\vbox to \zeropoint{\vss\hbox to \zeropoint{\scale[\c!sx=\sx,\c!sy=\sy]{\box\scratchbox}\hss}}}
% horrible (we could inline scale and matrix code):
-\def\MPLIBgettextscaledcm#1#2#3#4#5#6#7#8#9% 2-7: sx,rx,ry,sy,tx,ty
- {\ctxlua{metapost.gettext(\number\MPtextbox,#1)}%
+\unexpanded\def\MPLIBgettextscaledcm#1#2#3#4#5#6#7#8#9% 2-7: sx,rx,ry,sy,tx,ty
+ {\clf_mpgettext\MPtextbox #1%
\setbox\MPbox\hbox\bgroup
\dotransformnextbox{#2}{#3}{#4}{#5}{#6}{#7}% does push pop ... will be changed to proper lua call (avoid small numbers)
\vbox to \zeropoint\bgroup
@@ -103,7 +91,7 @@
\smashbox\MPbox
\box\MPbox}
-\def\MPLIBgraphictext#1% use at mp end
+\unexpanded\def\MPLIBgraphictext#1% use at mp end
{\startTEXpage[\c!scale=10000]#1\stopTEXpage}
%D \startbuffer
@@ -132,7 +120,7 @@
%D
%D \typebuffer \startlinecorrection \getbuffer \stoplinecorrection
-\def\MPLIBpositionwhd#1#2#3#4#5% bp !
+\unexpanded\def\MPLIBpositionwhd#1#2#3#4#5% bp !
{\dosavepositionwhd{#1}\zerocount{#2\onebasepoint}{#3\onebasepoint}{#4\onebasepoint}{#5\onebasepoint}\zeropoint}
% \def\MPLIBextrapass#1%
@@ -158,9 +146,9 @@
\box\scratchbox
\endgroup}
-\def\MPLIBstartgroup#1#2#3#4#5#6% isolated 0/1, knockout 0/1 llx lly urx ury
+\unexpanded\def\MPLIBstartgroup#1#2#3#4#5#6% isolated 0/1, knockout 0/1 llx lly urx ury
{\begingroup
\setbox\scratchbox\hbox\bgroup
- \def\MPLIBstopgroup{\doMPLIBstopgroup{#1}{#2}{#3}{#4}{#5}{#6}}}
+ \unexpanded\def\MPLIBstopgroup{\doMPLIBstopgroup{#1}{#2}{#3}{#4}{#5}{#6}}}
\protect \endinput
diff --git a/tex/context/base/mlib-run.lua b/tex/context/base/mlib-run.lua
index f30ed0c9f..121c32ae9 100644
--- a/tex/context/base/mlib-run.lua
+++ b/tex/context/base/mlib-run.lua
@@ -44,12 +44,12 @@ local texerrormessage = logs.texerrormessage
local starttiming = statistics.starttiming
local stoptiming = statistics.stoptiming
+local formatters = string.formatters
+
local mplib = mplib
metapost = metapost or { }
local metapost = metapost
-local mplibone = tonumber(mplib.version()) <= 1.50
-
metapost.showlog = false
metapost.lastlog = ""
metapost.collapse = true -- currently mplib cannot deal with begingroup/endgroup mismatch in stepwise processing
@@ -84,77 +84,74 @@ local mpbasepath = lpeg.instringchecker(P("/metapost/") * (P("context") + P("bas
-- mplib has no real io interface so we have a different mechanism than
-- tex (as soon as we have more control, we will use the normal code)
-
-local finders = { }
-mplib.finders = finders
-
+--
-- for some reason mp sometimes calls this function twice which is inefficient
-- but we cannot catch this
-local function preprocessed(name)
- if not mpbasepath(name) then
- -- we could use the via file but we don't have a complete io interface yet
- local data, found, forced = metapost.checktexts(io.loaddata(name) or "")
- if found then
- local temp = luatex.registertempfile(name,true)
- io.savedata(temp,data)
- return temp
+do
+
+ local finders = { }
+ mplib.finders = finders -- also used in meta-lua.lua
+
+ local new_instance = mplib.new
+ local resolved_file = resolvers.findfile
+
+ local function preprocessed(name)
+ if not mpbasepath(name) then
+ -- we could use the via file but we don't have a complete io interface yet
+ local data, found, forced = metapost.checktexts(io.loaddata(name) or "")
+ if found then
+ local temp = luatex.registertempfile(name,true)
+ io.savedata(temp,data)
+ return temp
+ end
end
+ return name
end
- return name
-end
-mplib.preprocessed = preprocessed -- helper
+ mplib.preprocessed = preprocessed -- helper
-finders.file = function(specification,name,mode,ftype)
- return preprocessed(resolvers.findfile(name,ftype))
-end
+ local function validftype(ftype)
+ if ftype == "" then
+ -- whatever
+ elseif ftype == 0 then
+ -- mplib bug
+ else
+ return ftype
+ end
+ end
-local function i_finder(name,mode,ftype) -- fake message for mpost.map and metafun.mpvi
- local specification = url.hashed(name)
- local finder = finders[specification.scheme] or finders.file
- return finder(specification,name,mode,ftype)
-end
+ finders.file = function(specification,name,mode,ftype)
+ return preprocessed(resolvers.findfile(name,validftype(ftype)))
+ end
-local function o_finder(name,mode,ftype)
- -- report_metapost("output file %a, mode %a, ftype %a",name,mode,ftype)
- return name
-end
+ local function i_finder(name,mode,ftype) -- fake message for mpost.map and metafun.mpvi
+ local specification = url.hashed(name)
+ local finder = finders[specification.scheme] or finders.file
+ return finder(specification,name,mode,validftype(ftype))
+ end
-local function finder(name, mode, ftype)
- if mode == "w" then
- return o_finder(name,mode,ftype)
- else
- return i_finder(name,mode,ftype)
+ local function o_finder(name,mode,ftype)
+ return name
end
-end
-local i_limited = false
-local o_limited = false
+ o_finder = sandbox.register(o_finder,sandbox.filehandlerone,"mplib output finder")
-directives.register("system.inputmode", function(v)
- if not i_limited then
- local i_limiter = io.i_limiter(v)
- if i_limiter then
- i_finder = i_limiter.protect(i_finder)
- i_limited = true
- end
+ local function finder(name,mode,ftype)
+ return (mode == "w" and o_finder or i_finder)(name,mode,validftype(ftype))
end
-end)
-
-directives.register("system.outputmode", function(v)
- if not o_limited then
- local o_limiter = io.o_limiter(v)
- if o_limiter then
- o_finder = o_limiter.protect(o_finder)
- o_limited = true
- end
+
+ function mplib.new(specification)
+ specification.find_file = finder -- so we block an overload
+ return new_instance(specification)
end
-end)
--- -- --
+ mplib.finder = finder
+
+end
-metapost.finder = finder
+local new_instance = mplib.new
+local find_file = mplib.finder
function metapost.reporterror(result)
if not result then
@@ -182,173 +179,93 @@ function metapost.reporterror(result)
return true
end
-if mplibone then
-
- report_metapost("fatal error: mplib is too old")
-
- os.exit()
-
- -- local preamble = [[
- -- boolean mplib ; mplib := true ;
- -- string mp_parent_version ; mp_parent_version := "%s" ;
- -- input "%s" ; dump ;
- -- ]]
- --
- -- metapost.parameters = {
- -- hash_size = 100000,
- -- main_memory = 4000000,
- -- max_in_open = 50,
- -- param_size = 100000,
- -- }
- --
- -- function metapost.make(name, target, version)
- -- starttiming(mplib)
- -- target = file.replacesuffix(target or name, "mem") -- redundant
- -- local mpx = mplib.new ( table.merged (
- -- metapost.parameters,
- -- {
- -- ini_version = true,
- -- find_file = finder,
- -- job_name = file.removesuffix(target),
- -- }
- -- ) )
- -- if mpx then
- -- starttiming(metapost.exectime)
- -- local result = mpx:execute(format(preamble,version or "unknown",name))
- -- stoptiming(metapost.exectime)
- -- mpx:finish()
- -- end
- -- stoptiming(mplib)
- -- end
- --
- -- function metapost.load(name)
- -- starttiming(mplib)
- -- local mpx = mplib.new ( table.merged (
- -- metapost.parameters,
- -- {
- -- ini_version = false,
- -- mem_name = file.replacesuffix(name,"mem"),
- -- find_file = finder,
- -- -- job_name = "mplib",
- -- }
- -- ) )
- -- local result
- -- if not mpx then
- -- result = { status = 99, error = "out of memory"}
- -- end
- -- stoptiming(mplib)
- -- return mpx, result
- -- end
- --
- -- function metapost.checkformat(mpsinput)
- -- local mpsversion = environment.version or "unset version"
- -- local mpsinput = file.addsuffix(mpsinput or "metafun", "mp")
- -- local mpsformat = file.removesuffix(file.basename(texconfig.formatname or (tex and tex.formatname) or mpsinput))
- -- local mpsbase = file.removesuffix(file.basename(mpsinput))
- -- if mpsbase ~= mpsformat then
- -- mpsformat = mpsformat .. "-" .. mpsbase
- -- end
- -- mpsformat = file.addsuffix(mpsformat, "mem")
- -- local mpsformatfullname = caches.getfirstreadablefile(mpsformat,"formats","metapost") or ""
- -- if mpsformatfullname ~= "" then
- -- report_metapost("loading %a from %a", mpsinput, mpsformatfullname)
- -- local mpx, result = metapost.load(mpsformatfullname)
- -- if mpx then
- -- local result = mpx:execute("show mp_parent_version ;")
- -- if not result.log then
- -- metapost.reporterror(result)
- -- else
- -- local version = match(result.log,">> *(.-)[\n\r]") or "unknown"
- -- version = gsub(version,"[\'\"]","")
- -- if version ~= mpsversion then
- -- report_metapost("version mismatch: %s <> %s", version or "unknown", mpsversion)
- -- else
- -- return mpx
- -- end
- -- end
- -- else
- -- report_metapost("error in loading %a from %a", mpsinput, mpsformatfullname)
- -- metapost.reporterror(result)
- -- end
- -- end
- -- local mpsformatfullname = caches.setfirstwritablefile(mpsformat,"formats")
- -- report_metapost("making %a into %a", mpsinput, mpsformatfullname)
- -- metapost.make(mpsinput,mpsformatfullname,mpsversion) -- somehow return ... fails here
- -- if lfs.isfile(mpsformatfullname) then
- -- report_metapost("loading %a from %a", mpsinput, mpsformatfullname)
- -- return metapost.load(mpsformatfullname)
- -- else
- -- report_metapost("problems with %a from %a", mpsinput, mpsformatfullname)
- -- end
- -- end
-
-else
-
- -- let end = relax ;
-
- local preamble = [[
- boolean mplib ; mplib := true ;
- let dump = endinput ;
- input "%s" ;
- ]]
-
- local methods = {
- double = "double",
- scaled = "scaled",
- default = "scaled",
- decimal = false, -- for the moment
- }
+local f_preamble = formatters [ [[
+ boolean mplib ; mplib := true ;
+ let dump = endinput ;
+ input "%s" ;
+]] ]
+
+local methods = {
+ double = "double",
+ scaled = "scaled",
+ binary = "binary",
+ decimal = "decimal",
+ default = "scaled",
+}
- function metapost.load(name,method)
- starttiming(mplib)
- method = method and methods[method] or "scaled"
- local mpx = mplib.new {
- ini_version = true,
- find_file = finder,
- math_mode = method,
- }
- report_metapost("initializing number mode %a",method)
- local result
- if not mpx then
- result = { status = 99, error = "out of memory"}
- else
- result = mpx:execute(format(preamble, file.addsuffix(name,"mp"))) -- addsuffix is redundant
- end
- stoptiming(mplib)
- metapost.reporterror(result)
- return mpx, result
+function metapost.runscript(code)
+ return code
+end
+
+function metapost.scripterror(str)
+ report_metapost("script error: %s",str)
+end
+
+-- todo: random_seed
+
+local f_textext = formatters[ [[rawtextext("%s")]] ]
+
+function metapost.maketext(s,mode)
+ if mode and mode == 1 then
+ -- report_metapost("ignoring verbatimtex: %s",s)
+ else
+ -- report_metapost("handling btex ... etex: %s",s)
+ s = gsub(s,'"','"&ditto&"')
+ return f_textext(s)
end
+end
- function metapost.checkformat(mpsinput,method)
- local mpsversion = environment.version or "unset version"
- local mpsinput = mpsinput or "metafun"
- local foundfile = ""
- if file.suffix(mpsinput) ~= "" then
- foundfile = finder(mpsinput) or ""
- end
- if foundfile == "" then
- foundfile = finder(file.replacesuffix(mpsinput,"mpvi")) or ""
- end
- if foundfile == "" then
- foundfile = finder(file.replacesuffix(mpsinput,"mpiv")) or ""
- end
- if foundfile == "" then
- foundfile = finder(file.replacesuffix(mpsinput,"mp")) or ""
- end
- if foundfile == "" then
- report_metapost("loading %a fails, format not found",mpsinput)
+function metapost.load(name,method)
+ starttiming(mplib)
+ method = method and methods[method] or "scaled"
+ local mpx = new_instance {
+ ini_version = true,
+ math_mode = method,
+ run_script = metapost.runscript,
+ script_error = metapost.scripterror,
+ make_text = metapost.maketext,
+ extensions = 1,
+ }
+ report_metapost("initializing number mode %a",method)
+ local result
+ if not mpx then
+ result = { status = 99, error = "out of memory"}
+ else
+ result = mpx:execute(f_preamble(file.addsuffix(name,"mp"))) -- addsuffix is redundant
+ end
+ stoptiming(mplib)
+ metapost.reporterror(result)
+ return mpx, result
+end
+
+function metapost.checkformat(mpsinput,method)
+ local mpsversion = environment.version or "unset version"
+ local mpsinput = mpsinput or "metafun"
+ local foundfile = ""
+ if file.suffix(mpsinput) ~= "" then
+ foundfile = find_file(mpsinput) or ""
+ end
+ if foundfile == "" then
+ foundfile = find_file(file.replacesuffix(mpsinput,"mpvi")) or ""
+ end
+ if foundfile == "" then
+ foundfile = find_file(file.replacesuffix(mpsinput,"mpiv")) or ""
+ end
+ if foundfile == "" then
+ foundfile = find_file(file.replacesuffix(mpsinput,"mp")) or ""
+ end
+ if foundfile == "" then
+ report_metapost("loading %a fails, format not found",mpsinput)
+ else
+ report_metapost("loading %a as %a using method %a",mpsinput,foundfile,method or "default")
+ local mpx, result = metapost.load(foundfile,method)
+ if mpx then
+ return mpx
else
- report_metapost("loading %a as %a using method %a",mpsinput,foundfile,method or "default")
- local mpx, result = metapost.load(foundfile,method)
- if mpx then
- return mpx
- else
- report_metapost("error in loading %a",mpsinput)
- metapost.reporterror(result)
- end
+ report_metapost("error in loading %a",mpsinput)
+ metapost.reporterror(result)
end
end
-
end
function metapost.unload(mpx)
@@ -398,27 +315,39 @@ function metapost.reset(mpx)
end
end
-local mp_inp, mp_log, mp_tag = { }, { }, 0
+local mp_tra = { }
+local mp_tag = 0
-- key/values
+if not metapost.initializescriptrunner then
+ function metapost.initializescriptrunner() end
+end
+
function metapost.process(mpx, data, trialrun, flusher, multipass, isextrapass, askedfig)
local converted, result = false, { }
if type(mpx) == "string" then
mpx = metapost.format(mpx) -- goody
end
if mpx and data then
+ local tra = nil
starttiming(metapost)
+ metapost.initializescriptrunner(mpx,trialrun)
if trace_graphics then
- if not mp_inp[mpx] then
+ tra = mp_tra[mpx]
+ if not tra then
mp_tag = mp_tag + 1
local jobname = tex.jobname
- mp_inp[mpx] = io.open(format("%s-mplib-run-%03i.mp", jobname,mp_tag),"w")
- mp_log[mpx] = io.open(format("%s-mplib-run-%03i.log",jobname,mp_tag),"w")
+ tra = {
+ inp = io.open(formatters["%s-mplib-run-%03i.mp"] (jobname,mp_tag),"w"),
+ log = io.open(formatters["%s-mplib-run-%03i.log"](jobname,mp_tag),"w"),
+ }
+ mp_tra[mpx] = tra
end
- local banner = format("%% begin graphic: n=%s, trialrun=%s, multipass=%s, isextrapass=%s\n\n", metapost.n, tostring(trialrun), tostring(multipass), tostring(isextrapass))
- mp_inp[mpx]:write(banner)
- mp_log[mpx]:write(banner)
+ local banner = formatters["%% begin graphic: n=%s, trialrun=%s, multipass=%s, isextrapass=%s\n\n"](
+ metapost.n, tostring(trialrun), tostring(multipass), tostring(isextrapass))
+ tra.inp:write(banner)
+ tra.log:write(banner)
end
if type(data) == "table" then
-- this hack is needed because the library currently barks on \n\n
@@ -455,17 +384,17 @@ function metapost.process(mpx, data, trialrun, flusher, multipass, isextrapass,
-- d = string.gsub(d,"\r","")
if d then
if trace_graphics then
- mp_inp[mpx]:write(format("\n%% begin snippet %s\n",i))
- mp_inp[mpx]:write(d)
- mp_inp[mpx]:write(format("\n%% end snippet %s\n",i))
+ tra.inp:write(formatters["\n%% begin snippet %s\n"](i))
+ tra.inp:write(d)
+ tra.inp:write(formatters["\n%% end snippet %s\n"](i))
end
starttiming(metapost.exectime)
- result = mpx:execute(d)
+ result = mpx:execute(d) -- some day we wil use a coroutine with textexts
stoptiming(metapost.exectime)
if trace_graphics and result then
local str = result.log or result.error
if str and str ~= "" then
- mp_log[mpx]:write(str)
+ tra.log:write(str)
end
end
if not metapost.reporterror(result) then
@@ -489,7 +418,7 @@ function metapost.process(mpx, data, trialrun, flusher, multipass, isextrapass,
data = "tracingall;" .. data
end
if trace_graphics then
- mp_inp[mpx]:write(data)
+ tra.inp:write(data)
end
starttiming(metapost.exectime)
result = mpx:execute(data)
@@ -497,7 +426,7 @@ function metapost.process(mpx, data, trialrun, flusher, multipass, isextrapass,
if trace_graphics and result then
local str = result.log or result.error
if str and str ~= "" then
- mp_log[mpx]:write(str)
+ tra.log:write(str)
end
end
-- todo: error message
@@ -517,8 +446,8 @@ function metapost.process(mpx, data, trialrun, flusher, multipass, isextrapass,
end
if trace_graphics then
local banner = "\n% end graphic\n\n"
- mp_inp[mpx]:write(banner)
- mp_log[mpx]:write(banner)
+ tra.inp:write(banner)
+ tra.log:write(banner)
end
stoptiming(metapost)
end
@@ -580,7 +509,7 @@ function metapost.directrun(formatname,filename,outputformat,astable,mpdata)
else
output = figures[v]:svg() -- (3) for prologues
end
- local outname = format("%s-%s.%s",basename,v,outputformat)
+ local outname = formatters["%s-%s.%s"](basename,v,outputformat)
report_metapost("saving %s bytes in %a",#output,outname)
io.savedata(outname,output)
end
@@ -613,7 +542,7 @@ function metapost.quickanddirty(mpxformat,data)
stopfigure = function()
end
}
- local data = format("; beginfig(1) ;\n %s\n ; endfig ;",data)
+ local data = formatters["; beginfig(1) ;\n %s\n ; endfig ;"](data)
metapost.process(mpxformat, { data }, false, flusher, false, false, "all")
if code then
return {
@@ -625,3 +554,20 @@ function metapost.quickanddirty(mpxformat,data)
report_metapost("invalid quick and dirty run")
end
end
+
+function metapost.getstatistics(memonly)
+ if memonly then
+ local n, m = 0, 0
+ for name, mpx in next, mpxformats do
+ n = n + 1
+ m = m + mpx:statistics().memory
+ end
+ return n, m
+ else
+ local t = { }
+ for name, mpx in next, mpxformats do
+ t[name] = mpx:statistics()
+ end
+ return t
+ end
+end
diff --git a/tex/context/base/mtx-context-arrange.tex b/tex/context/base/mtx-context-arrange.tex
index 49920293f..fb53406d8 100644
--- a/tex/context/base/mtx-context-arrange.tex
+++ b/tex/context/base/mtx-context-arrange.tex
@@ -27,10 +27,11 @@
% --printformat : 2UP, etc
% --paperformat=spec : paper*print or paperxprint
%
-% example: context --extra=arrange --printformat=2UP --paperformat=A4,A3,landscape myfile
+% example: context --extra=arrange --printformat=2UP --paperformat=A4*A3,landscape myfile
%
% end help
+
\input mtx-context-common.tex
\doifdocumentargument {paperoffset} {
@@ -46,6 +47,7 @@
\setdocumentargument{sided}{singlesided}
}
+
\setuppapersize
[\getdocumentargument{paperformat_paper}]
[\getdocumentargument{paperformat_print}]
@@ -95,16 +97,17 @@
\starttext
\startluacode
- local format = string.format
- local fprint = function(...) tex.sprint(tex.ctxcatcodes,format(...)) end
-
- if #document.files > 0 then
- if document.arguments.sort then
- table.sort(document.files)
+ local arguments = document.arguments
+ local files = document.files
+ local noffiles = #files
+ if noffiles > 0 then
+ if arguments.sort then
+ table.sort(files)
end
- local emptypages = document.arguments.addempty or ""
- local textwidth = document.arguments.textwidth or "0cm"
- for _, filename in ipairs(document.files) do
+ local emptypages = arguments.addempty or ""
+ local textwidth = arguments.textwidth or "0cm"
+ for i=1,noffiles do
+ local filename = files[i]
if not string.find(filename,"^mtx%-context%-") then
context.insertpages (
{ filename },
@@ -114,7 +117,7 @@
end
end
else
- fprint("no files given")
+ context("no files given")
end
\stopluacode
diff --git a/tex/context/base/mtx-context-listing.tex b/tex/context/base/mtx-context-listing.tex
index d69db9934..583aa2b8f 100644
--- a/tex/context/base/mtx-context-listing.tex
+++ b/tex/context/base/mtx-context-listing.tex
@@ -20,16 +20,24 @@
% --sort : sort filenames first
% --topspace=dimension : distance above first line
% --backspace=dimension : distance before left margin
-% --pretty : pretty print comform suffix (temporarily disabled)
+% --pretty : pretty print comform suffix
+% --scite : pretty print comform suffix using scite lexer
% --bodyfont=list : additional bodyfont settings
% --paperformat=spec : paper*print or paperxprint
+% --compact : small margins, small font
%
% end help
\input mtx-context-common.tex
+\doifdocumentargument {compact} {
+ \setdocumentargument{topspace} {5mm}
+ \setdocumentargument{backspace}{5mm}
+ \setdocumentargument{bodyfont} {8pt}
+}
+
\setupbodyfont
- [11pt,tt,\getdocumentargument{bodyfont}]
+ [dejavu,11pt,tt,\getdocumentargument{bodyfont}] % dejavu is more complete
\setuptyping
[lines=yes]
@@ -70,16 +78,22 @@
}
local pattern = document.arguments.pattern
+ local scite = document.arguments.scite
if pattern then
document.files = dir.glob(pattern)
end
+ if scite then
+ context.usemodule { "scite" }
+ end
+
if #document.files > 0 then
if document.arguments.sort then
table.sort(document.files)
end
- for _, filename in ipairs(document.files) do
+ for i=1,#document.files do
+ local filename = document.files[i]
if not string.find(filename,"^mtx%-context%-") then
local pretty = document.arguments.pretty
if pretty == true then
@@ -94,19 +108,24 @@
{ function() context.detokenize(pattern and filename or file.basename(filename)) return true end },
{ function() context.pagenumber() return true end }
)
- if pretty then
+ if scite then
+ context.scitefile { filename } -- here { }
+ elseif pretty then
if type(pretty) ~= "string" or pretty == "" then
context.setuptyping { option = "color" }
else
context.setuptyping { option = types[pretty] or pretty }
end
+ context.typefile(filename)
+ else
+ context.typefile(filename)
end
- context.typefile(filename)
end
end
else
context("no files given")
end
+
\stopluacode
\stoptext
diff --git a/tex/context/base/mtx-context-precache.tex b/tex/context/base/mtx-context-precache.tex
new file mode 100644
index 000000000..9cbb46cf2
--- /dev/null
+++ b/tex/context/base/mtx-context-precache.tex
@@ -0,0 +1,161 @@
+%D \module
+%D [ file=mtx-context-precache,
+%D version=2014.12.24,
+%D title=\CONTEXT\ Extra Trickry,
+%D subtitle=Precaching Fonts,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% begin help
+%
+% usage: context --extra=precache [no options yet]
+%
+% example: context --extra=precache
+%
+% end help
+
+\startluacode
+
+local lower = string.lower
+local filesuffix = file.suffix
+local findfile = resolvers.find_file
+
+local report = logs.reporter("fonts","precache")
+
+function fonts.names.precache()
+ local handlers = fonts.handlers
+ if not handlers then
+ report("no handlers available")
+ return
+ end
+ local otfloader = handlers.otf and handlers.otf.load
+ local afmloader = handlers.afm and handlers.afm.load
+ if not (otfloader or afmloader) then
+ report("no otf or afm handler available")
+ return
+ end
+ fonts.names.load()
+ local data = fonts.names.data
+ if not data then
+ report("no font data available")
+ return
+ end
+ local specifications = data.specifications
+ if not specifications then
+ report("no font specifications available")
+ return
+ end
+ local n = 0
+ for i=1,#specifications do
+ local specification = specifications[i]
+ local filename = specification.filename
+ local cleanfilename = specification.cleanfilename
+ local foundfile = findfile(filename)
+ if foundfile and foundfile ~= "" then
+ local suffix = lower(filesuffix(foundfile))
+ if suffix == "otf" or suffix == "ttf" then
+ if otfloader then
+ report("caching otf file: %s",foundfile)
+ otfloader(foundfile) -- todo: ttc/sub
+ n = n + 1
+ end
+ elseif suffix == "afm" then
+ if afmloader then
+ report("caching afm file: %s",foundfile)
+ afmloader(foundfile)
+ n = n + 1
+ end
+ end
+ end
+ end
+ report("%s files out of %s cached",n,#specifications)
+end
+
+\stopluacode
+
+\starttext
+
+\setuppapersize
+ [A4,landscape]
+
+\setuplayout
+ [width=middle,
+ height=middle,
+ footer=0pt,
+ header=1cm,
+ headerdistance=0cm,
+ backspace=5mm,
+ topspace=5mm]
+
+\setupbodyfont
+ [dejavu,6pt,tt]
+
+\startmode[*first]
+ \startluacode
+ fonts.names.precache()
+ \stopluacode
+\stopmode
+
+\startluacode
+ fonts.names.load()
+
+ local specifications = fonts.names.data.specifications
+
+ local sorted = { }
+ local hashed = { }
+
+ for i=1,#specifications do
+ local filename = specifications[i].cleanfilename
+ sorted[i] = filename
+ hashed[filename] = i
+ end
+
+ table.sort(sorted)
+
+ local context = context
+ local basename = file.basename
+
+ local NC = context.NC
+ local NR = context.NR
+ local HL = context.HL
+ local bold = context.bold
+
+ context.starttabulate { "||||||||||" }
+ HL()
+ NC() bold("format")
+ NC() bold("cleanfilename")
+ NC() bold("filename")
+ -- NC() bold("familyname")
+ -- NC() bold("fontname")
+ NC() bold("fullname")
+ NC() bold("rawname")
+ NC() bold("style")
+ NC() bold("variant")
+ NC() bold("weight")
+ NC() bold("width")
+ NC() NR()
+ HL()
+ for i=1,#sorted do
+ local specification = specifications[hashed[sorted[i]]]
+ NC() context(specification.format)
+ NC() context(specification.cleanfilename)
+ NC() context(basename(specification.filename))
+ -- NC() context(specification.familyname)
+ -- NC() context(specification.fontname)
+ NC() context(specification.fullname)
+ NC() context(specification.rawname)
+ NC() context(specification.style)
+ NC() context(specification.variant)
+ NC() context(specification.weight)
+ NC() context(specification.width)
+ NC() NR()
+ end
+ context.stoptabulate()
+\stopluacode
+
+\stoptext
diff --git a/tex/context/base/mult-aux.lua b/tex/context/base/mult-aux.lua
index bdc626d4c..353b5e69c 100644
--- a/tex/context/base/mult-aux.lua
+++ b/tex/context/base/mult-aux.lua
@@ -54,7 +54,7 @@ function namespaces.define(namespace,settings)
if trace_namespaces then
report_namespaces("namespace %a for %a uses parent %a",namespace,name,parent)
end
- if not find(parent,"\\") then
+ if not find(parent,"\\",1,true) then
parent = "\\" .. prefix .. parent
-- todo: check if defined
end
@@ -154,3 +154,15 @@ function namespaces.list()
local keys = { "type", "name", "comment", "version", "parent", "definition", "setup", "style" }
utilities.formatters.list(data,"namespace",keys)
end
+
+
+interfaces.implement {
+ name = "definenamespace",
+ arguments = { "string", "string" },
+ actions = namespaces.define
+}
+
+interfaces.implement {
+ name = "listnamespaces",
+ actions = namespaces.list
+}
diff --git a/tex/context/base/mult-aux.mkiv b/tex/context/base/mult-aux.mkiv
index 6c44a0ec9..b69d7f370 100644
--- a/tex/context/base/mult-aux.mkiv
+++ b/tex/context/base/mult-aux.mkiv
@@ -106,10 +106,14 @@
\doubleexpandafter\gobbleoneargument
\else
\mult_interfaces_get_parameters_assign#1==\empty\_e_o_p_
- \doubleexpandafter\mult_interfaces_get_parameters_item
+ % \doubleexpandafter\mult_interfaces_get_parameters_item % saves skipping when at end
\fi\fi#2}
-\def\mult_interfaces_get_parameters_error#1#2#3%
+\def\mult_interfaces_get_parameters_error#1#2% #3%
+ {\mult_interfaces_get_parameters_error_indeed{#1}{#2}%
+ \gobbleoneargument}
+
+\def\mult_interfaces_get_parameters_error_indeed#1#2%
{\showassignerror{#2}{\the\inputlineno\space(#1)}}
\def\mult_interfaces_get_parameters_assign#1=#2=#3#4\_e_o_p_
@@ -118,9 +122,54 @@
\else\ifx#3\empty
\doubleexpandafter\mult_interfaces_get_parameters_error
\else
- \doubleexpandafter\dosetvalue
+ \doubleexpandafter\mult_interfaces_def
\fi\fi
- \m_mult_interfaces_namespace{#1}{#2}}
+ \m_mult_interfaces_namespace{#1}{#2}%
+ \doubleexpandafter\mult_interfaces_get_parameters_item}
+
+\startinterface english
+
+ % some 10% faster
+
+ \let\mult_interfaces_get_parameters_error\undefined
+
+ \def\mult_interfaces_get_parameters_error_one#1\csname#2#3\endcsname#4%
+ {\mult_interfaces_get_parameters_error_indeed{#2}{#3}\iftrue}
+
+ \def\mult_interfaces_get_parameters_error_two#1\csname#2#3\endcsname#4%
+ {\mult_interfaces_get_parameters_error_indeed{#2}{#3}}
+
+ \def\mult_interfaces_get_parameters_assign#1=#2=#3#4\_e_o_p_
+ {\ifx\empty#1\empty
+ \mult_interfaces_get_parameters_error_one
+ \else\ifx#3\empty
+ \mult_interfaces_get_parameters_error_two
+ \else
+ \expandafter\def\csname\m_mult_interfaces_namespace#1\endcsname{#2}%
+ \fi\fi
+ \doubleexpandafter\mult_interfaces_get_parameters_item}
+
+ % interesting but not faster
+ %
+ % \def\mult_interfaces_get_parameters_error_one#1\m_mult_interfaces_namespace#2\fi\fi%
+ % {\mult_interfaces_get_parameters_error_indeed\m_mult_interfaces_namespace{#2}\m_mult_interfaces_namespace\s!dummy\fi}
+ %
+ % \def\mult_interfaces_get_parameters_error_two#1\m_mult_interfaces_namespace#2\fi\fi%
+ % {\mult_interfaces_get_parameters_error_indeed\m_mult_interfaces_namespace{#2}\m_mult_interfaces_namespace\s!dummy\fi\fi}
+ %
+ % \def\mult_interfaces_get_parameters_assign#1=#2=#3#4\_e_o_p_
+ % {\expandafter\def\csname
+ % \ifx\empty#1\empty
+ % \mult_interfaces_get_parameters_error_one
+ % \else\ifx#3\empty
+ % \mult_interfaces_get_parameters_error_two
+ % \else
+ % \m_mult_interfaces_namespace#1%
+ % \fi\fi
+ % \endcsname{#2}
+ % \doubleexpandafter\mult_interfaces_get_parameters_item}
+
+\stopinterface
\newif\ifassignment
@@ -132,6 +181,24 @@
% End of experimental code.
+\unexpanded\def\mult_interfaces_let #1#2{\expandafter\let \csname#1\ifcsname\k!prefix!#2\endcsname\csname\k!prefix!#2\endcsname\else#2\fi\endcsname}
+\unexpanded\def\mult_interfaces_lete#1#2{\expandafter\let \csname#1\ifcsname\k!prefix!#2\endcsname\csname\k!prefix!#2\endcsname\else#2\fi\endcsname\empty}
+\unexpanded\def\mult_interfaces_def #1#2{\expandafter\def \csname#1\ifcsname\k!prefix!#2\endcsname\csname\k!prefix!#2\endcsname\else#2\fi\endcsname}
+\unexpanded\def\mult_interfaces_edef#1#2{\expandafter\edef\csname#1\ifcsname\k!prefix!#2\endcsname\csname\k!prefix!#2\endcsname\else#2\fi\endcsname}
+\unexpanded\def\mult_interfaces_gdef#1#2{\expandafter\gdef\csname#1\ifcsname\k!prefix!#2\endcsname\csname\k!prefix!#2\endcsname\else#2\fi\endcsname}
+\unexpanded\def\mult_interfaces_xdef#1#2{\expandafter\xdef\csname#1\ifcsname\k!prefix!#2\endcsname\csname\k!prefix!#2\endcsname\else#2\fi\endcsname}
+
+\startinterface english
+
+ \unexpanded\def\mult_interfaces_let #1#2{\expandafter \let\csname#1#2\endcsname}
+ \unexpanded\def\mult_interfaces_lete#1#2{\expandafter \let\csname#1#2\endcsname\empty}
+ \unexpanded\def\mult_interfaces_def #1#2{\expandafter \def\csname#1#2\endcsname}
+ \unexpanded\def\mult_interfaces_edef#1#2{\expandafter\edef\csname#1#2\endcsname}
+ \unexpanded\def\mult_interfaces_gdef#1#2{\expandafter\gdef\csname#1#2\endcsname}
+ \unexpanded\def\mult_interfaces_xdef#1#2{\expandafter\xdef\csname#1#2\endcsname}
+
+\stopinterface
+
% the commented detokenized variant that backtracks ... needs testing usage first
%
% \let\whatever\relax
@@ -156,14 +223,30 @@
\def#8##1{\csname\ifcsname#1#2:##1\endcsname#1#2:##1\else\s!empty\fi\endcsname}%
\def#9##1{\csname#1#2:##1\endcsname}}
+% pre-expansion can be a bit faster but handly any effect on a normal run so let's go for
+% saving some memory
+%
+% \unexpanded\def\mult_interfaces_install_parameter_handler#1#2#3#4#5#6#7#8#9% inlining \csname*\endcsname is more efficient (#3 and #6 only)
+% {\ifx#2\relax\let#2\empty\fi % it is hardly faster but produces less expansion tracing
+% %\def#3##1{\csname#4{#1#2}{##1}\endcsname}%
+% \edef#3##1{\noexpand\csname\noexpand\ifcsname#1\noexpand#2:##1\endcsname#1\noexpand#2:##1\noexpand\else\noexpand\expandafter\noexpand#5\noexpand\csname#1\noexpand#2:\s!parent\endcsname{##1}\noexpand\fi\endcsname}%
+% \edef#4##1##2{\noexpand\ifcsname##1:##2\endcsname##1:##2\noexpand\else\noexpand\expandafter\noexpand#5\noexpand\csname##1:\s!parent\endcsname{##2}\noexpand\fi}%
+% \def #5##1##2{\ifx##1\relax\s!empty\else#4{##1}{##2}\fi}% is {} needed around ##1 ?
+% \edef#6##1##2{\noexpand\csname\noexpand\ifcsname#1##1:##2\endcsname#1##1:##2\noexpand\else\noexpand\expandafter\noexpand#5\noexpand\csname#1##1:\s!parent\endcsname{##2}\noexpand\fi\endcsname}%
+% \def#7##1{\detokenize\expandafter\expandafter\expandafter{\csname#1#2:##1\endcsname}}% always root, no backtrack
+% % \def#7##1{\mult_interfaces_detokenize{\csname#4{#1#2}{##1}\endcsname}}% compact version
+% % \def#7##1{\mult_interfaces_detokenize{\csname\ifcsname#1#2:##1\endcsname#1#2:##1\else\expandafter#5\csname#1#2:\s!parent\endcsname{##1}\fi\endcsname}}%
+% \edef#8##1{\noexpand\csname\noexpand\ifcsname#1\noexpand#2:##1\endcsname#1\noexpand#2:##1\noexpand\else\s!empty\noexpand\fi\endcsname}%
+% \edef#9##1{\noexpand\csname#1#2:##1\endcsname}}
+
\unexpanded\def\installparameterhandler#1#2%
{\normalexpanded
{\mult_interfaces_install_parameter_handler
{\noexpand#1}% \??aa
\expandafter\noexpand\csname current#2\endcsname
\expandafter\noexpand\csname #2parameter\endcsname
- \expandafter\noexpand\csname do#2parameter\endcsname % or : #2_parameter_hash
- \expandafter\noexpand\csname do#2parentparameter\endcsname % or : #2_parent_parameter_hash
+ \expandafter\noexpand\csname do#2parameter\endcsname % or : #2_parameter
+ \expandafter\noexpand\csname do#2parentparameter\endcsname % or : #2_parent_parameter
\expandafter\noexpand\csname named#2parameter\endcsname
\expandafter\noexpand\csname detokenized#2parameter\endcsname
\expandafter\noexpand\csname strict#2parameter\endcsname % checked
@@ -207,14 +290,14 @@
% In \MKIV\ we can probably use the english variant for all other
% languages too.
-% todo: inline the \do*value
+% todo: inline the def/let
\unexpanded\def\mult_interfaces_install_parameter_set_handler#1#2#3#4#5#6%
{\ifx#2\relax\let#2\empty\fi
- \unexpanded\def#3{\dosetvalue {#1#2:}}% ##1 {##2} (braces are mandate)
- \unexpanded\def#4{\dosetevalue{#1#2:}}% ##1 {##2} (braces are mandate)
- \unexpanded\def#5{\doletvalue {#1#2:}}% ##1 ##2
- \unexpanded\def#6{\doletvalue {#1#2:}\empty}}% ##1
+ \unexpanded\def#3{\mult_interfaces_def {#1#2:}}% ##1 {##2} (braces are mandate)
+ \unexpanded\def#4{\mult_interfaces_edef{#1#2:}}% ##1 {##2} (braces are mandate)
+ \unexpanded\def#5{\mult_interfaces_let {#1#2:}}% ##1 ##2
+ \unexpanded\def#6{\mult_interfaces_lete{#1#2:}}}% ##1
\startinterface english
@@ -272,6 +355,11 @@
\expandafter\edef\csname#1#4:\s!parent\endcsname{#2}%
\fi \fi}
+\def\mult_interfaces_chain#1#2{\ifcsname#1#2:\s!chain\endcsname\csname#1#2:\s!chain\endcsname\space\fi}
+\def\getparentchain #1#2{\ifcsname#1#2:\s!chain\endcsname\csname#1#2:\s!chain\endcsname\fi}
+\def\getcurrentparentchain#1#2{\csname#1#2:\s!chain\endcsname} % for the moment test:
+\def\getcurrentparentchain#1#2{\ifcsname#1#2:\s!chain\endcsname\csname#1#2:\s!chain\endcsname\fi}
+
\unexpanded\def\mult_interfaces_install_define_handler#1#2#3#4#5#6#7#8#9% why is \expanded still needed in clones
{\ifx#4\relax\let#4\empty\fi % see \defineregister
\unexpanded\def#2{\dotripleempty#5}%
@@ -284,6 +372,7 @@
\the#6% predefine
\edef#8{##2}%
\mult_check_for_parent{#1}{#3}#4#8%
+ \expandafter\edef\csname#1#4:\s!chain\endcsname{\mult_interfaces_chain#1{##2}##1}%
\expandafter\edef\csname#1#4:\s!parent\endcsname{#1##2}%
\mult_interfaces_get_parameters{#1#4:}[##3]%
\else\ifsecondargument
@@ -291,16 +380,19 @@
\expandafter\mult_check_for_assignment_indeed\detokenize{##2}=@@\_end_
\ifassignment
\let#8\empty
+ \expandafter\edef\csname#1#4:\s!chain\endcsname{##1}%
\expandafter\edef\csname#1#4:\s!parent\endcsname{#3}%
\mult_interfaces_get_parameters{#1#4:}[##2]%
\else
\edef#8{##2}%
\mult_check_for_parent{#1}{#3}#4#8%
+ \expandafter\edef\csname#1#4:\s!chain\endcsname{\mult_interfaces_chain#1{##2}##1}%
\expandafter\edef\csname#1#4:\s!parent\endcsname{#1##2}%
\fi
\else
\the#6% predefine
\let#8\empty
+ \expandafter\edef\csname#1#4:\s!chain\endcsname{##1}%
\expandafter\edef\csname#1#4:\s!parent\endcsname{#3}%
\fi\fi
\the#7%
@@ -548,10 +640,10 @@
\expandafter\noexpand\csname everysetup#2\endcsname}}
\unexpanded\def\mult_interfaces_install_direct_parameter_set_handler#1#2#3#4#5%
- {\unexpanded\def#2{\dosetvalue #1}%
- \unexpanded\def#3{\dosetevalue#1}%
- \unexpanded\def#4{\doletvalue #1}%
- \unexpanded\def#5{\doletvalue #1\empty}}%
+ {\unexpanded\def#2{\mult_interfaces_def #1}%
+ \unexpanded\def#3{\mult_interfaces_edef#1}%
+ \unexpanded\def#4{\mult_interfaces_let #1}%
+ \unexpanded\def#5{\mult_interfaces_let #1\empty}}%
\startinterface english
@@ -629,7 +721,7 @@
\edef#2{##1}%
#3[##2]%
\else\iffirstargument
- \doifassignmentelse{##1}
+ \doifelseassignment{##1}
{\let#2\empty
#3[##1]}%
{\edef#2{##1}}%
@@ -691,12 +783,11 @@
\else
\global\advance\c_mult_interfaces_n_of_namespaces\plusone
\expandafter\edef\csname ??#1\endcsname{\v_interfaces_prefix_template}%
- \ctxcommand{registernamespace(\number\c_mult_interfaces_n_of_namespaces,"#1")}%
+ \clf_registernamespace\c_mult_interfaces_n_of_namespaces{#1}%
\fi}
-\def\mult_interfaces_get_parameters_error#1#2#3% redefined
- {\ctxcommand{showassignerror("#1","#2","#3",\the\inputlineno)}%
- \waitonfatalerror}
+\def\mult_interfaces_get_parameters_error_indeed#1#2%
+ {\clf_showassignerror{#1}{#2}\inputlineno} % no longer \waitonfatalerror
% We install two core namespaces here, as we want nice error messages. Maybe
% we will reserve the first 9.
@@ -778,10 +869,10 @@
{\dodoubleargument\mult_interfaces_define_name_space}
\def\mult_interfaces_define_name_space[#1][#2]% namespace settings
- {\ctxlua{interfaces.namespaces.define(\!!bs#1\!!es,\!!bs#2\!!es)}}
+ {\clf_definenamespace{#1}{#2}}
\def\listnamespaces
- {\ctxlua{interfaces.namespaces.list()}}
+ {\clf_listnamespaces}
%D Helper:
%D
@@ -807,6 +898,8 @@
\expandafter\secondoftwoarguments
\fi}
+\let\doifcommandhandlerelse\doifelsecommandhandler
+
\unexpanded\def\doifcommandhandler#1#2% namespace name
{\ifcsname#1#2:\s!parent\endcsname
\expandafter\firstofoneargument
@@ -825,10 +918,10 @@
% another set of (fast) helpers (grep for usage):
-\def\expandnamespaceparameter#1#2#3% \??xx \getp \c!xx \c!yy
+\def\expandnamespaceparameter#1#2#3% \??xx \getp \c!xx \v!yy
{\csname#1\ifcsname#1\expandafter\expandafter\expandafter\mult_aux_expand_namespace_parameter#2#3}
-\def\mult_aux_expand_namespace_parameter#1#2% \cs \c!yy
+\def\mult_aux_expand_namespace_parameter#1#2% \cs \v!yy
{#1\endcsname#1\else#2\fi\endcsname}
\def\expandnamespacemacro#1#2#3% \??xx \some_edefed_cs \c!yy
@@ -856,4 +949,302 @@
%D \edef\m_class_whatever{whatever}
%D \stoptyping
+% experiment: in principle this is faster but not that noticeable as we don't do that
+% many assignments and mechanism that do are also slow; the advantage is mostly nicer
+% in tracing
+
+\def\s!simple{simple}
+\def\s!single{single}
+\def\s!double{double}
+\def\s!triple{triple}
+
+\unexpanded\def\syst_helpers_double_empty#1#2#3%
+ {\syst_helpers_argument_reset
+ \doifelsenextoptional
+ {\syst_helpers_double_empty_one_yes_mult#2#3}%
+ {\syst_helpers_double_empty_one_nop_mult#1}}
+
+\def\syst_helpers_double_empty_one_yes_mult#1#2[#3]%
+ {\firstargumenttrue
+ \doifelsenextoptional
+ {\secondargumenttrue#2[{#3}]}%
+ {\syst_helpers_double_empty_two_nop_mult#1{#3}}}
+
+\def\syst_helpers_double_empty_one_nop_mult% #1%
+ {\firstargumentfalse
+ \secondargumentfalse
+ }% #1}
+
+\def\syst_helpers_double_empty_two_nop_mult
+ {\secondargumentfalse
+ \if_next_blank_space_token
+ \expandafter\syst_helpers_double_empty_one_spaced_mult
+ \else
+ \expandafter\syst_helpers_double_empty_one_normal_mult
+ \fi}
+
+\def\syst_helpers_double_empty_one_spaced_mult#1#2{#1[{#2}] }
+\def\syst_helpers_double_empty_one_normal_mult#1#2{#1[{#2}]}
+
+\unexpanded\def\mult_interfaces_install_setup_handler#1#2#3#4#5#6#7#8%
+ {\ifx#3\relax\let#3\empty\fi
+ \unexpanded\def#5{\mult_interfaces_get_parameters{#1#3:}}% no every ! don't change it
+ \newtoks#4%
+ \newtoks#7%
+ \edef\m_mult_interface_setup{\strippedcsname#2_}%
+ \unexpanded\edef#2{\syst_helpers_double_empty
+ \csname\m_mult_interface_setup\s!simple\endcsname
+ \csname\m_mult_interface_setup\s!single\endcsname
+ \csname\m_mult_interface_setup\s!double\endcsname}%
+ \unexpanded\expandafter\def\csname\m_mult_interface_setup\s!double\endcsname[##1][##2]%
+ {\let#6#3%
+ \def#8####1% we will have a simple one as well
+ {\edef#3{####1}%
+ \mult_interfaces_get_parameters{#1#3:}[##2]%
+ \the#4}%
+ \processcommalist[##1]#8%
+ \let#3#6%
+ \the#7}%
+ \unexpanded\expandafter\def\csname\m_mult_interface_setup\s!single\endcsname[##1]%
+ {\let#6#3%
+ \let#3\empty
+ \mult_interfaces_get_parameters{#1:}[##1]%
+ \the#4%
+ \let#3#6%
+ \the#7}%
+ \unexpanded\expandafter\def\csname\m_mult_interface_setup\s!simple\endcsname%
+ {\let#6#3%
+ \let#3\empty
+ \the#4%
+ \let#3#6%
+ \the#7}}
+
+\unexpanded\def\installsetuphandler#1#2%
+ {\normalexpanded
+ {\mult_interfaces_install_setup_handler
+ {\noexpand#1}% \??aa
+ \expandafter\noexpand\csname setup#2\endcsname
+ \expandafter\noexpand\csname current#2\endcsname
+ \expandafter\noexpand\csname everysetup#2\endcsname
+ \expandafter\noexpand\csname setupcurrent#2\endcsname
+ \expandafter\noexpand\csname saved_setup_current#2\endcsname
+ \expandafter\noexpand\csname everysetup#2root\endcsname
+ \expandafter\noexpand\csname nested_setup_current#2\endcsname}}
+
+\unexpanded\def\syst_helpers_triple_empty#1#2#3#4%
+ {\syst_helpers_argument_reset
+ \doifelsenextoptional
+ {\syst_helpers_triple_empty_one_yes_mult#2#3#4}%
+ {\syst_helpers_triple_empty_one_nop_mult#1}}
+
+\def\syst_helpers_triple_empty_one_yes_mult#1#2#3[#4]%
+ {\firstargumenttrue
+ \doifelsenextoptional
+ {\syst_helpers_triple_empty_two_yes_mult#2#3{#4}}%
+ {\syst_helpers_triple_empty_two_nop_mult#1{#4}}}
+
+\def\syst_helpers_triple_empty_two_yes_mult#1#2#3[#4]%
+ {\secondargumenttrue
+ \doifelsenextoptional
+ {\thirdargumenttrue#2[{#3}][{#4}]}%
+ {\syst_helpers_triple_empty_three_nop_mult#1{#3}{#4}}}
+
+\def\syst_helpers_triple_empty_one_nop_mult % #1%
+ {\firstargumentfalse
+ \secondargumentfalse
+ \thirdargumentfalse
+ } % #1
+
+\def\syst_helpers_triple_empty_two_nop_mult
+ {\secondargumentfalse
+ \thirdargumentfalse
+ \if_next_blank_space_token
+ \expandafter\syst_helpers_triple_empty_two_spaced_mult
+ \else
+ \expandafter\syst_helpers_triple_empty_two_normal_mult
+ \fi}
+
+\def\syst_helpers_triple_empty_three_nop_mult
+ {\thirdargumentfalse
+ \if_next_blank_space_token
+ \expandafter\syst_helpers_triple_empty_three_spaced_mult
+ \else
+ \expandafter\syst_helpers_triple_empty_three_normal_mult
+ \fi}
+
+\def\syst_helpers_triple_empty_two_spaced_mult #1#2{#1[{#2}] }
+\def\syst_helpers_triple_empty_two_normal_mult #1#2{#1[{#2}]}
+\def\syst_helpers_triple_empty_three_spaced_mult#1#2#3{#1[{#2}][{#3}] }
+\def\syst_helpers_triple_empty_three_normal_mult#1#2#3{#1[{#2}][{#3}]}
+
+\unexpanded\def\mult_interfaces_install_auto_setup_handler#1#2#3#4#5#6#7#8%
+ {\ifx#3\relax\let#3\empty\fi
+ \unexpanded\def#5{\mult_interfaces_get_parameters{#1#3:}}%
+ \newtoks#4%
+ \edef\m_mult_interface_setup{\strippedcsname#2_}%
+ \unexpanded\edef#2{\syst_helpers_triple_empty
+ \csname\m_mult_interface_setup\s!simple\endcsname
+ \csname\m_mult_interface_setup\s!single\endcsname
+ \csname\m_mult_interface_setup\s!double\endcsname
+ \csname\m_mult_interface_setup\s!triple\endcsname}%
+ \unexpanded\expandafter\def\csname\m_mult_interface_setup\s!triple\endcsname[##1][##2][##3]%
+ {\let#7#3%
+ \def#8####1%
+ {\edef#3{####1}%
+ \expandafter\def\csname#1#3:\s!parent\endcsname{#1##2}%
+ \mult_interfaces_get_parameters{#1#3:}[##3]% always sets parent
+ \the#4}%
+ \processcommalist[##1]#8%
+ \let#3#7}%
+ \unexpanded\expandafter\def\csname\m_mult_interface_setup\s!double\endcsname[##1][##2]%
+ {\let#7#3%
+ \def#8####1%
+ {\edef#3{####1}%
+ #6% checks parent and sets if needed
+ \mult_interfaces_get_parameters{#1#3:}[##2]%
+ \the#4}%
+ \processcommalist[##1]#8%
+ \let#3#7}%
+ \unexpanded\expandafter\def\csname\m_mult_interface_setup\s!single\endcsname[##1]%
+ {\let#7#3%
+ \let#3\empty
+ \mult_interfaces_get_parameters{#1:}[##1]%
+ \the#4%
+ \let#3#7}%
+ \unexpanded\expandafter\def\csname\m_mult_interface_setup\s!simple\endcsname%
+ {\let#7#3%
+ \let#3\empty
+ \the#4%
+ \let#3#7}}
+
+\unexpanded\def\installautosetuphandler#1#2%
+ {\normalexpanded
+ {\mult_interfaces_install_auto_setup_handler
+ {\noexpand#1}% \??aa
+ \expandafter\noexpand\csname setup#2\endcsname
+ \expandafter\noexpand\csname current#2\endcsname
+ \expandafter\noexpand\csname everysetup#2\endcsname
+ \expandafter\noexpand\csname setupcurrent#2\endcsname
+ \expandafter\noexpand\csname check#2parent\endcsname
+ \expandafter\noexpand\csname saved_setup_current#2\endcsname
+ \expandafter\noexpand\csname nested_setup_current#2\endcsname}}
+
+% okay, we can also get rid of the #9, but this code looks pretty bad, while the previous is
+% still okay given that we can also use #6 as setup (so in fact we can save some cs again and
+% only use one extra)
+%
+% \global\advance\commalevel \plusone
+% \expandafter\def\csname\??nextcommalevel\the\commalevel\endcsname####1,%
+% {\edef#3{####1}%
+% \mult_interfaces_get_parameters{#1#3:}[##2]%
+% \the#5%
+% \syst_helpers_do_process_comma_item}%
+% \expandafter\syst_helpers_do_do_process_comma_item\gobbleoneargument\relax##1,]\relax
+% % \syst_helpers_do_do_process_comma_item##1,]\relax
+% \global\advance\commalevel \minusone
+
+% The next one is experimental (and used in publications):
+
+\let\c_mult_set\relax
+
+\unexpanded\def\mult_interfaces_install_definition_set#1#2#3#4#5#6#7%
+ {\newcount#3%
+ \let#6\empty
+ \unexpanded\def#2%
+ {\expandafter\let\expandafter\c_mult_set\csname #1_t_#6\endcsname
+ \ifx\c_mult_set\relax
+ \expandafter\newtoks\c_mult_set
+ \expandafter\let\csname #1_t_#6\endcsname\c_mult_set
+ \fi}
+ \unexpanded\def#4##1%
+ {\pushmacro#6%
+ \advance#3\plusone
+ \edef#6{##1}%
+ \unprotect}%
+ \unexpanded\def#5%
+ {\protect
+ \advance#3\minusone
+ \popmacro#6}%
+ \unexpanded\def#7##1%
+ {\edef#6{##1}%
+ #2%
+ \the\c_mult_set\relax}}
+
+\unexpanded\def\installdefinitionset#1#2%
+ {\normalexpanded
+ {\mult_interfaces_install_definition_set
+ {\noexpand#1}% \??aa
+ \expandafter\noexpand\csname set_#2_toks\endcsname
+ \expandafter\noexpand\csname #2_nesting_depth\endcsname
+ \expandafter\noexpand\csname push#2\endcsname
+ \expandafter\noexpand\csname pop#2\endcsname
+ \expandafter\noexpand\csname current#2\endcsname
+ \expandafter\noexpand\csname use#2\endcsname}}
+
+\unexpanded\def\mult_interfaces_install_definition_set_member#1#2#3#4#5#6#7#8#9% no everysetups etc
+ {\let#5#2%
+ \unexpanded\def#2%
+ {\ifcase#4\relax\expandafter#5\else\expandafter#6\fi}%
+ \unexpanded\def#6%
+ {\dodoubleempty#7}%
+ \unexpanded\def#7[##1][##2]%
+ {\ifsecondargument
+ #3\c_mult_set\expandafter{\the\c_mult_set#9[##1][##2]}%
+ \else\iffirstargument
+ #3\c_mult_set\expandafter{\the\c_mult_set#8[##1]}%
+ \fi\fi}}
+
+\unexpanded\def\installdefinitionsetmember#1#2#3#4%
+ {\normalexpanded
+ {\mult_interfaces_install_definition_set_member
+ {\noexpand#3}% \??aa
+ \expandafter\noexpand\csname setup#4\endcsname
+ \expandafter\noexpand\csname set_#2_toks\endcsname
+ \expandafter\noexpand\csname #2_nesting_depth\endcsname
+ \expandafter\noexpand\csname normal_setup_#4\endcsname
+ \expandafter\noexpand\csname delayed_setup_#4\endcsname
+ \expandafter\noexpand\csname do_delayed_setup_#4\endcsname
+ \expandafter\noexpand\csname setup#4_\s!single\endcsname
+ \expandafter\noexpand\csname setup#4_\s!double\endcsname}}
+
+%D Another experiment:
+
+\unexpanded\def\mult_interfaces_install_parent_injector#1#2#3#4%
+ {\unexpanded\def#4##1%
+ {\ifx#3\empty
+ \expandafter\def\csname#1#2:\s!parent\endcsname{#1##1}%
+ \fi}}
+
+\unexpanded\def\installparentinjector#1#2%
+ {\normalexpanded{\mult_interfaces_install_parent_injector
+ {\noexpand#1}%
+ \expandafter\noexpand\csname current#2\endcsname
+ \expandafter\noexpand\csname current#2parent\endcsname
+ \expandafter\noexpand\csname inject#2parent\endcsname}}
+
\protect
+
+%\unprotect
+% \installcorenamespace {test} \installcommandhandler \??test {test} \??test
+% \unexpanded\def\TestMeA[#1]%
+% {\edef\currenttest{#1}
+% \edef\p_before{\testparameter\c!before}%
+% \ifx\p_before\empty \relax \else \relax \fi}
+% \unexpanded\def\TestMeB[#1]%
+% {\edef\currenttest{#1}
+% \doifelsenothing{\testparameter\c!before}\relax\relax}
+% \unexpanded\def\TestMeC[#1]%
+% {\edef\currenttest{#1}
+% \expandafter\expandafter\expandafter\ifx\testparameter\c!before\empty \relax \else \relax \fi}
+% \unexpanded\def\TestMeD[#1]%
+% {\edef\currenttest{#1}
+% \doubleexpandafter\ifx\testparameter\c!before\empty \relax \else \relax \fi}
+% \protect
+%
+% \starttext
+% \definetest[foo] \definetest[bar][foo] \setuptest[bar][before=indeed]
+% \resettimer \dorecurse{100000}{\TestMeA[bar]} A:\elapsedtime \par % 0.502
+% \resettimer \dorecurse{100000}{\TestMeB[bar]} B:\elapsedtime \par % 0.530
+% \resettimer \dorecurse{100000}{\TestMeC[bar]} C:\elapsedtime \par % 0.487
+% \resettimer \dorecurse{100000}{\TestMeD[bar]} D:\elapsedtime \par % 0.493
+% \stoptext
diff --git a/tex/context/base/mult-chk.lua b/tex/context/base/mult-chk.lua
index 2a2dfcd4b..44a9f739f 100644
--- a/tex/context/base/mult-chk.lua
+++ b/tex/context/base/mult-chk.lua
@@ -16,7 +16,8 @@ local allocate = utilities.storage.allocate
local report_interface = logs.reporter("interface","checking")
-interfaces = interfaces or { }
+local interfaces = interfaces
+local implement = interfaces.implement
interfaces.syntax = allocate {
test = { keys = table.tohash { "a","b","c","d","e","f","g" } }
@@ -48,6 +49,18 @@ function interfaces.addvalidkeys(category,list)
end
end
+implement {
+ name = "setvalidinterfacekeys",
+ actions = interfaces.setvalidkeys,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "addvalidinterfacekeys",
+ actions = interfaces.addvalidkeys,
+ arguments = { "string", "string" }
+}
+
-- weird code, looks incomplete ... probably an experiment
local prefix, category, keys
@@ -73,4 +86,8 @@ function interfaces.getcheckedparameters(k,p,s)
end
end
--- _igcp_ = interfaces.getcheckedparameters
+implement {
+ name = "getcheckedinterfaceparameters",
+ actions = interfaces.getcheckedparameters,
+ arguments = { "string", "string", "string" }
+}
diff --git a/tex/context/base/mult-chk.mkiv b/tex/context/base/mult-chk.mkiv
index 1d02f166d..9208a73e1 100644
--- a/tex/context/base/mult-chk.mkiv
+++ b/tex/context/base/mult-chk.mkiv
@@ -38,8 +38,8 @@
\unexpanded\def\setvalidparameterkeys{\dodoubleargument\mult_checkers_set_valid_parameter_keys}
\unexpanded\def\addvalidparameterkeys{\dodoubleargument\mult_checkers_add_valid_parameter_keys}
-\def\mult_checkers_set_valid_parameter_keys[#1][#2]{\ctxlua{interfaces.setvalidkeys("#1",\!!bs#2\!!es)}}
-\def\mult_checkers_add_valid_parameter_keys[#1][#2]{\ctxlua{interfaces.addvalidkeys("#1",\!!bs#2\!!es)}}
+\def\mult_checkers_set_valid_parameter_keys[#1][#2]{\clf_setvalidinterfacekeys{#1}{#2}}
+\def\mult_checkers_add_valid_parameter_keys[#1][#2]{\clf_addvalidinterfacekeys{#1}{#2}}
\def\mult_checkers_get_checked_parameters_yes[#1]#2[#3]#4[#5%
{\if\noexpand#5]%
@@ -50,8 +50,7 @@
\fi{#1}{#3}#5}
\def\mult_checkers_get_checked_parameters_yes_indeed#1#2#3]%
- %{\ctxlua{_igcp_("#1","#2",\!!bs\detokenize{#3}\!!es)}}
- {\ctxlua{interfaces.getcheckedparameters("#1","#2",\!!bs\detokenize{#3}\!!es)}}
+ {\clf_getcheckedinterfaceparameters{#1}{#2}{\detokenize{#3}}}
\def\mult_checkers_get_checked_parameters_nop[#1]#2[#3]#4[#5%
{\if\noexpand#5]%
diff --git a/tex/context/base/mult-de.mkii b/tex/context/base/mult-de.mkii
index 5f2714ce6..90aae390e 100644
--- a/tex/context/base/mult-de.mkii
+++ b/tex/context/base/mult-de.mkii
@@ -180,6 +180,7 @@
\setinterfacevariable{flushleft}{flushleft}
\setinterfacevariable{flushouter}{flushouter}
\setinterfacevariable{flushright}{flushright}
+\setinterfacevariable{followingpage}{followingpage}
\setinterfacevariable{footer}{fusszeile}
\setinterfacevariable{footnote}{fussnote}
\setinterfacevariable{force}{zwinge}
@@ -301,6 +302,7 @@
\setinterfacevariable{mirrored}{gespiegelt}
\setinterfacevariable{monday}{montag}
\setinterfacevariable{mono}{mono}
+\setinterfacevariable{monobold}{monofett}
\setinterfacevariable{month}{monat}
\setinterfacevariable{more}{more}
\setinterfacevariable{morehyphenation}{morehyphenation}
@@ -360,6 +362,7 @@
\setinterfacevariable{positive}{positiv}
\setinterfacevariable{postponing}{verschieben}
\setinterfacevariable{postscript}{postscript}
+\setinterfacevariable{precedingpage}{followingpage}
\setinterfacevariable{preference}{einstellung}
\setinterfacevariable{preview}{vorschau}
\setinterfacevariable{previous}{vorig}
@@ -419,7 +422,7 @@
\setinterfacevariable{september}{september}
\setinterfacevariable{serif}{serif}
\setinterfacevariable{serried}{kleinerabstand}
-\setinterfacevariable{setups}{impostazioni}
+\setinterfacevariable{setups}{setups}
\setinterfacevariable{sheet}{sheet}
\setinterfacevariable{short}{kurz}
\setinterfacevariable{simplefonts}{simplefonts}
@@ -651,7 +654,7 @@
\setinterfaceconstant{coupling}{verknuepfung}
\setinterfaceconstant{couplingway}{verkopplungsart}
\setinterfaceconstant{criterium}{kriterium}
-\setinterfaceconstant{css}{css}
+\setinterfaceconstant{cssfile}{cssfile}
\setinterfaceconstant{current}{aktuell}
\setinterfaceconstant{cutspace}{cutspace}
\setinterfaceconstant{dash}{strich}
@@ -686,7 +689,7 @@
\setinterfaceconstant{equalwidth}{equalwidth}
\setinterfaceconstant{escape}{escape}
\setinterfaceconstant{evenmargin}{geraderand}
-\setinterfaceconstant{exitoffset}{labeloffset}
+\setinterfaceconstant{exitoffset}{exitoffset}
\setinterfaceconstant{expansion}{expansion}
\setinterfaceconstant{export}{export}
\setinterfaceconstant{extras}{extras}
@@ -701,6 +704,7 @@
\setinterfaceconstant{file}{datei}
\setinterfaceconstant{filtercommand}{filtercommand}
\setinterfaceconstant{finalnamesep}{finalnamesep}
+\setinterfaceconstant{finalpubsep}{finalpubsep}
\setinterfaceconstant{firstnamesep}{firstnamesep}
\setinterfaceconstant{firstpage}{ersteseite}
\setinterfaceconstant{focus}{focus}
@@ -752,6 +756,7 @@
\setinterfaceconstant{indenting}{einziehen}
\setinterfaceconstant{indentnext}{ziehefolgendeein}
\setinterfaceconstant{indicator}{indikator}
+\setinterfaceconstant{initialsep}{initialsep}
\setinterfaceconstant{inner}{innen}
\setinterfaceconstant{innermargin}{innermargin}
\setinterfaceconstant{inputfile}{inputfile}
@@ -857,6 +862,7 @@
\setinterfaceconstant{nright}{nrechts}
\setinterfaceconstant{ntop}{noben}
\setinterfaceconstant{number}{nummer}
+\setinterfaceconstant{numberalign}{numberalign}
\setinterfaceconstant{numbercolor}{nummernfarbe}
\setinterfaceconstant{numbercommand}{nummerbefehl}
\setinterfaceconstant{numberconversion}{numberconversion}
@@ -1045,6 +1051,8 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
+\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
+\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
@@ -1278,6 +1286,7 @@
\setinterfacecommand{definetabletemplate}{definieretabellenvorlage}
\setinterfacecommand{definetabulate}{definieretabulator}
\setinterfacecommand{definetext}{definieretext}
+\setinterfacecommand{definetextbackground}{definetextbackground}
\setinterfacecommand{definetextposition}{definetextposition}
\setinterfacecommand{definetextvariable}{definetextvariable}
\setinterfacecommand{definetype}{definetype}
@@ -1438,7 +1447,6 @@
\setinterfacecommand{paperheight}{papierhoehe}
\setinterfacecommand{paperwidth}{papierbreite}
\setinterfacecommand{periods}{punkt}
-\setinterfacecommand{plaatsruwelijst}{placerawlist}
\setinterfacecommand{placebookmarks}{platzierebookmarks}
\setinterfacecommand{placecombinedlist}{platzierezusammengestellteliste}
\setinterfacecommand{placefloat}{placefloat}
@@ -1448,11 +1456,13 @@
\setinterfacecommand{placeheadtext}{placeheadtext}
\setinterfacecommand{placelegend}{platzierelegende}
\setinterfacecommand{placelist}{platziereliste}
+\setinterfacecommand{placelistofsynonyms}{placelistofsynonyms}
\setinterfacecommand{placelocalfootnotes}{platzierelokalefussnoten}
\setinterfacecommand{placelogos}{platzierelogo}
\setinterfacecommand{placeongrid}{amgitterausrichten}
\setinterfacecommand{placeontopofeachother}{platziereuntereinander}
\setinterfacecommand{placepagenumber}{placepagenumber}
+\setinterfacecommand{placerawlist}{placerawlist}
\setinterfacecommand{placereferencelist}{placereferencelist}
\setinterfacecommand{placeregister}{platziereregister}
\setinterfacecommand{placerule}{placerule}
@@ -1599,7 +1609,6 @@
\setinterfacecommand{setupregister}{stelleregisterein}
\setinterfacecommand{setuprotate}{stelledrehenein}
\setinterfacecommand{setuprule}{setuprule}
-\setinterfacecommand{setups}{einstellungen}
\setinterfacecommand{setupscreens}{stellerasterein}
\setinterfacecommand{setupsection}{stelleabschnittein}
\setinterfacecommand{setupsectionblock}{stelleabschnittsblockein}
@@ -1617,6 +1626,7 @@
\setinterfacecommand{setuptables}{stelletabellenein}
\setinterfacecommand{setuptabulate}{stelletabulatorein}
\setinterfacecommand{setuptext}{stelletextein}
+\setinterfacecommand{setuptextbackground}{setuptextbackground}
\setinterfacecommand{setuptextposition}{setuptextposition}
\setinterfacecommand{setuptextrules}{stelletextumrissein}
\setinterfacecommand{setuptexttexts}{stelletexttexteein}
@@ -1663,6 +1673,7 @@
\setinterfacecommand{startdocument}{startdokument}
\setinterfacecommand{startenvironment}{startumgebung}
\setinterfacecommand{startfigure}{startabbildung}
+\setinterfacecommand{startframed}{startframed}
\setinterfacecommand{startglobal}{startglobal}
\setinterfacecommand{startline}{startzeile}
\setinterfacecommand{startlinecorrection}{startzeilenkorrektur}
@@ -1689,6 +1700,7 @@
\setinterfacecommand{starttable}{starttabelle}
\setinterfacecommand{starttables}{starttabellen}
\setinterfacecommand{starttext}{starttext}
+\setinterfacecommand{starttextbackground}{starttextbackground}
\setinterfacecommand{starttextrule}{starttextlinie}
\setinterfacecommand{startunpacked}{startgrosserdurchschuss}
\setinterfacecommand{startversion}{startversion}
@@ -1703,6 +1715,7 @@
\setinterfacecommand{stopcomponent}{stopkomponente}
\setinterfacecommand{stopdocument}{stopdokument}
\setinterfacecommand{stopenvironment}{stopumgebung}
+\setinterfacecommand{stopframed}{stopframed}
\setinterfacecommand{stopglobal}{stopglobal}
\setinterfacecommand{stopline}{stopzeile}
\setinterfacecommand{stoplinecorrection}{stopzeilenkorrektur}
@@ -1728,6 +1741,7 @@
\setinterfacecommand{stoptable}{stoptabelle}
\setinterfacecommand{stoptables}{stoptabellen}
\setinterfacecommand{stoptext}{stoptext}
+\setinterfacecommand{stoptextbackground}{stoptextbackground}
\setinterfacecommand{stoptextrule}{stoptextlinie}
\setinterfacecommand{stopunpacked}{stopgrosserdurchschuss}
\setinterfacecommand{stopversion}{stopversion}
diff --git a/tex/context/base/mult-def.lua b/tex/context/base/mult-def.lua
index afd466531..c0831de2d 100644
--- a/tex/context/base/mult-def.lua
+++ b/tex/context/base/mult-def.lua
@@ -1275,6 +1275,10 @@ return {
["pe"]="تعریف‌مترادفها",
["ro"]="definestesinonim",
},
+ ["placelistofsynonyms"]={
+ ["en"]="placelistofsynonyms",
+ ["nl"]="plaatslijstmetsynoniemen",
+ },
["definetabletemplate"]={
["cs"]="definujsablonutabulky",
["de"]="definieretabellenvorlage",
@@ -2905,7 +2909,7 @@ return {
["pe"]="نقطه‌ها",
["ro"]="puncte",
},
- ["plaatsruwelijst"]={
+ ["placerawlist"]={
["cs"]="placerawlist",
["de"]="placerawlist",
["en"]="placerawlist",
@@ -3055,7 +3059,7 @@ return {
["pe"]="درج‌شماره‌صفحه",
["ro"]="punenumarpagina",
},
- ["placereferencelist"]={
+ ["placereferencelist"]={ -- not in mkiv
["cs"]="placereferencelist",
["de"]="placereferencelist",
["en"]="placereferencelist",
@@ -4509,16 +4513,6 @@ return {
["pe"]="بارگذاری‌خط",
["ro"]="seteazarigla",
},
- ["setups"]={
- ["cs"]="nastaveni",
- ["de"]="einstellungen",
- ["en"]="setups",
- ["fr"]="reglages",
- ["it"]="impostazioni",
- ["nl"]="instellingen",
- ["pe"]="بارگذاریها",
- ["ro"]="setari",
- },
["setupscreens"]={
["cs"]="nastavrastr",
["de"]="stellerasterein",
@@ -5039,6 +5033,30 @@ return {
["pe"]="شروع‌تنظیم",
["ro"]="startaliniere",
},
+ ["starttextbackground"]={
+ ["en"]="starttextbackground",
+ ["nl"]="starttekstachtergrond",
+ },
+ ["stoptextbackground"]={
+ ["en"]="stoptextbackground",
+ ["nl"]="stoptekstachtergrond",
+ },
+ ["setuptextbackground"]={
+ ["en"]="setuptextbackground",
+ ["nl"]="steltekstachtergrondin",
+ },
+ ["definetextbackground"]={
+ ["en"]="definetextbackground",
+ ["nl"]="definieertekstachtergrond",
+ },
+ ["startframed"]={
+ ["en"]="startframed",
+ ["nl"]="startomlijnd",
+ },
+ ["stopframed"]={
+ ["en"]="stopframed",
+ ["nl"]="stopomlijnd",
+ },
["startbackground"]={
["cs"]="startpozadi",
["de"]="starthintergrund",
@@ -6454,6 +6472,10 @@ return {
},
},
["constants"]={
+ ["setups"]={
+ ["comment"]="no translations",
+ ["en"]="setups",
+ },
-- select/simplefonts
["regularfont"] ={ ["en"]="regularfont" },
["boldfont"] ={ ["en"]="boldfont" },
@@ -6508,8 +6530,8 @@ return {
["export"] = {
["en"]="export",
},
- ["css"] = {
- ["en"]="css",
+ ["cssfile"] = {
+ ["en"]="cssfile",
},
["xhtml"] = {
["en"]="xhtml",
@@ -6522,7 +6544,7 @@ return {
["en"]="labeloffset",
},
["exitoffset"]={
- ["en"]="labeloffset",
+ ["en"]="exitoffset",
},
["commentoffset"]={
["en"]="commentoffset",
@@ -6558,6 +6580,10 @@ return {
["en"]="headalign",
["nl"]="kopuitlijnen",
},
+ ["numberalign"]={
+ ["en"]="numberalign",
+ ["nl"]="nummeruitlijnen",
+ },
["alignsymbol"]={
["en"]="alignsymbol",
},
@@ -6613,6 +6639,9 @@ return {
["firstnamesep"]={
["en"]="firstnamesep",
},
+ ["surnamefirstnamesep"]={
+ ["en"]="surnamefirstnamesep",
+ },
["vonsep"]={
["en"]="vonsep",
},
@@ -6622,6 +6651,12 @@ return {
["surnamesep"]={
["en"]="surnamesep",
},
+ ["initialsep"]={
+ ["en"]="initialsep",
+ },
+ ["surnameinitialsep"]={
+ ["en"]="surnameinitialsep",
+ },
["lastnamesep"]={
["en"]="lastnamesep",
},
@@ -6637,6 +6672,9 @@ return {
["lastpubsep"]={
["en"]="lastpubsep",
},
+ ["finalpubsep"]={
+ ["en"]="finalpubsep",
+ },
["refcommand"]={
["en"]="refcommand",
},
@@ -8935,7 +8973,7 @@ return {
["de"]="mindepth",
["en"]="mindepth",
["fr"]="profondeurmin",
- ["it"]="mindeoth",
+ ["it"]="mindepth",
["nl"]="mindiepte",
["pe"]="کمترین‌عمق",
["ro"]="mindepth",
@@ -9702,7 +9740,7 @@ return {
["en"]="reference",
["fr"]="reference",
["it"]="riferimento",
- ["nl"]="verwijzing",
+ ["nl"]="referentie",
["pe"]="مرجع",
["ro"]="referinta",
},
@@ -10124,16 +10162,6 @@ return {
["pe"]="قراربده",
["ro"]="set",
},
- ["setups"]={
- ["cs"]="setups",
- ["de"]="setups",
- ["en"]="setups",
- ["fr"]="reglages",
- ["it"]="setups",
- ["nl"]="setups",
- ["pe"]="بارگذاریها",
- ["ro"]="setups",
- },
["shrink"]={
["en"]="shrink",
["nl"]="krimp",
@@ -10911,7 +10939,7 @@ return {
["en"]="unknownreference",
["fr"]="referenceinconnue",
["it"]="riferimentoingoto",
- ["nl"]="onbekendeverwijzing",
+ ["nl"]="onbekendereferentie",
["pe"]="مرجع‌ناشناس",
["ro"]="referintanecunoscuta",
},
@@ -11393,6 +11421,18 @@ return {
},
},
["variables"]={
+ ["setups"]={
+ ["comment"]="no translations",
+ ["en"]="setups",
+ },
+ ["followingpage"]={
+ ["en"]="followingpage",
+ ["nl"]="opvolgendepagina",
+ },
+ ["precedingpage"]={
+ ["en"]="followingpage",
+ ["nl"]="voorafgaandepagina",
+ },
["math"]={
["en"]="math",
},
@@ -11509,21 +11549,27 @@ return {
},
["maxheight"]={
["en"]="maxheight",
+ ["nl"]="maxhoogte",
},
["maxdepth"]={
["en"]="maxdepth",
+ ["nl"]="maxdiepte",
},
["maxwidth"]={
["en"]="maxwidth",
+ ["nl"]="maxbreedte",
},
["minheight"]={
["en"]="minheight",
+ ["nl"]="minhoogte",
},
["mindepth"]={
["en"]="mindepth",
+ ["nl"]="mindiepte",
},
["minwidth"]={
["en"]="minwidth",
+ ["nl"]="minbreedte",
},
["short"]={
["nl"]="kort",
@@ -14378,6 +14424,16 @@ return {
["pe"]="مونو",
["ro"]="mono",
},
+ ["monobold"]={
+ ["cs"]="monotucne",
+ ["de"]="monofett",
+ ["en"]="monobold",
+ ["fr"]="monogras",
+ ["it"]="monograssetto",
+ ["nl"]="monovet",
+ ["pe"]="monobold",
+ ["ro"]="monoaldin",
+ },
["month"]={
["cs"]="mesic",
["de"]="monat",
@@ -15475,16 +15531,6 @@ return {
["pe"]="تنگ‌هم",
["ro"]="serried",
},
- ["setups"]={
- ["cs"]="einstellungen",
- ["de"]="impostazioni",
- ["en"]="setups",
- ["fr"]="reglages",
- ["it"]="nastaveni",
- ["nl"]="instellingen",
- ["pe"]="بارگذاریها",
- ["ro"]="setari",
- },
["sheet"]={
["cs"]="sheet",
["de"]="sheet",
diff --git a/tex/context/base/mult-def.mkiv b/tex/context/base/mult-def.mkiv
index 192a380ee..d547a7b81 100644
--- a/tex/context/base/mult-def.mkiv
+++ b/tex/context/base/mult-def.mkiv
@@ -30,76 +30,130 @@
% \input mult-\userinterfacetag \relax
% \input mult-m\userresponsestag \relax
-\ctxlua{interfaces.setuserinterface("\userinterfacetag","\userresponsestag")}
-
-% start todo:
-
-\def\c!fences {fences}
-\def\c!keeptogether {keeptogether}
-
-\def\c!dataset {dataset}
-\def\c!sectionblock {sectionblock}
-\def\c!language {language}
-\def\c!compressseparator{compressseparator}
-\def\c!renderingsetup {renderingsetup}
-\def\c!filler {filler}
-\def\c!resources {resources}
-\def\c!first {first}
-\def\c!last {last}
-\def\c!quotechar {quotechar}
-\def\c!commentchar {commentchar}
-\def\c!symbolcommand {symbolcommand}
-\def\c!xmlsetup {xmlsetup}
-\def\c!comma {comma}
-\def\c!period {period}
-\def\c!monthconversion {monthconversion}
-\def\c!comment {comment}
-\def\c!textalign {textalign}
-\def\c!up {up}
-\def\c!down {down}
-\def\c!instance {instance}
-\def\c!database {database}
-\def\c!group {group}
-\def\c!groupsuffix {groupsuffix}
-
-\def\v!compressseparator{compressseparator}
-\def\v!notation {notation}
-\def\v!endnote {endnote}
-\def\v!interactive {interactive}
-\def\v!autopunctuation {autopunctuation}
-\def\v!integral {integral}
-\def\v!shiftup {shiftup}
-\def\v!shiftdown {shiftdown}
-\def\v!construction {construction}
-\def\v!unframed {unframed}
-\def\v!chemical {chemical}
-\def\v!chemicals {chemicals}
-\def\v!words {words}
-\def\v!combination {combination}
-\def\v!norepeat {norepeat}
-\def\v!mixed {mixed}
-
-\def\s!lcgreek {lcgreek}
-\def\s!ucgreek {ucgreek}
-\def\s!sygreek {sygreek}
-\def\s!italics {italics}
-\def\s!integral {integral}
-\def\s!insert {insert} % maybe insertclass
-\def\s!marker {marker}
-
-\def\s!mixedcolumn {mixedcolumn}
-
-\def\s!double {double}
-\def\s!decimal {decimal}
-
-\def\s!current {current}
-
-\def\s!rel {rel}
-\def\s!ord {ord}
-
-\def\c!HL {HL}
-\def\c!VL {VL}
-\def\c!NL {NL}
+\clf_setuserinterface{\userinterfacetag}{\userresponsestag}
+
+% start todo in mult-def.lua:
+
+\def\c!openup {openup}
+
+\def\v!serifnormal {serifnormal}
+\def\v!serifbold {serifbold}
+\def\v!sansnormal {sansnormal}
+%def\v!sansbold {sansbold}
+\def\v!mononormal {mononormal}
+\def\v!monobold {monobold}
+
+\def\c!functionstyle {functionstyle}
+\def\c!functioncolor {functioncolor}
+
+\def\v!extremestretch {extremestretch}
+
+\def\v!alphabetic {alphabetic}
+\def\v!Alphabetic {Alphabetic}
+
+\def\c!svgstyle {svgstyle}
+
+\def\c!translate {translate}
+
+\def\c!nextleft {nextleft}
+\def\c!nextright {nextright}
+\def\c!nextleftquotation {nextleftquotation}
+\def\c!nextrightquotation{nextrightquotation}
+
+\def\c!profile {profile}
+
+\def\c!fences {fences}
+\def\c!words {words}
+\def\c!characters {characters}
+\def\c!hyphens {hyphens}
+\def\c!joiners {joiners}
+\def\c!leftwords {leftwords}
+\def\c!rightwords {rightwords}
+\def\c!keeptogether {keeptogether}
+\def\c!viewerprefix {viewerprefix}
+
+\def\v!display {display}
+\def\v!inline {inline}
+
+\def\v!camel {camel}
+
+\def\c!dataset {dataset}
+\def\c!sectionblock {sectionblock}
+\def\c!language {language}
+\def\c!compressseparator {compressseparator}
+\def\c!renderingsetup {renderingsetup}
+\def\c!filler {filler}
+\def\c!resources {resources}
+\def\c!first {first}
+\def\c!last {last}
+\def\c!quotechar {quotechar}
+\def\c!commentchar {commentchar}
+\def\c!symbolcommand {symbolcommand}
+\def\c!xmlsetup {xmlsetup}
+\def\c!comma {comma}
+\def\c!period {period}
+\def\c!monthconversion {monthconversion}
+\def\c!authorconversion {authorconversion}
+\def\c!comment {comment}
+\def\c!textalign {textalign}
+\def\c!up {up}
+\def\c!down {down}
+\def\c!instance {instance}
+\def\c!database {database}
+\def\c!group {group}
+\def\c!groupsuffix {groupsuffix}
+\def\c!properties {properties}
+\def\c!journalconversion {journalconversion}
+\def\c!register {register}
+\def\c!note {note}
+\def\c!field {field}
+\def\c!ignore {ignore}
+\def\c!specification {specification}
+
+\def\c!pageleft {pageleft}
+\def\c!pageright {pageright}
+\def\c!pagesep {pagesep}
+\def\c!lastpagesep {lastpagesep}
+\def\c!finalpagesep {finalpagesep}
+\def\c!pageconnector {pageconnector}
+
+\def\c!referencemethod {referencemethod} % forward both
+
+\def\v!dataset {dataset}
+\def\v!compressseparator {compressseparator}
+\def\v!notation {notation}
+\def\v!endnote {endnote}
+\def\v!interactive {interactive}
+\def\v!autopunctuation {autopunctuation}
+\def\v!integral {integral}
+\def\v!shiftup {shiftup}
+\def\v!shiftdown {shiftdown}
+\def\v!construction {construction}
+\def\v!unframed {unframed}
+\def\v!chemical {chemical}
+\def\v!chemicals {chemicals}
+\def\v!words {words}
+\def\v!combination {combination}
+\def\v!norepeat {norepeat}
+\def\v!mixed {mixed}
+\def\v!centerlast {centerlast}
+\def\v!long {long}
+\def\v!box {box}
+
+\def\v!noline {noline}
+\def\v!noheight {noheight}
+\def\v!nodepth {nodepth}
+
+\def\v!bookmark {bookmark}
+
+\def\v!vfenced {vfenced}
+\def\v!bothtext {bothtext}
+
+\def\s!traditional {traditional}
+
+\def\c!HL {HL}
+\def\c!VL {VL}
+\def\c!NL {NL}
\ifdefined\v!kerncharacters\else \def\v!kerncharacters{kerncharacters} \fi % no time now for translations should be a e! actually
\ifdefined\v!letterspacing \else \def\v!letterspacing {letterspacing} \fi % no time now for translations should be a e! actually
@@ -112,6 +166,11 @@
\def\c!etallimit {etallimit}
\def\c!etaldisplay{etaldisplay}
\def\c!etaltext {etaltext}
+\def\c!etaloption {etaloption}
+
+\ifdefined\v!simplelist\else \def\v!simplelist{simplelist} \fi
+\ifdefined\v!sorting \else \def\v!sorting {sorting} \fi
+\ifdefined\v!synonym \else \def\v!synonym {synonym} \fi
% stop todo
diff --git a/tex/context/base/mult-en.mkii b/tex/context/base/mult-en.mkii
index 97732dab7..b08070ba0 100644
--- a/tex/context/base/mult-en.mkii
+++ b/tex/context/base/mult-en.mkii
@@ -180,6 +180,7 @@
\setinterfacevariable{flushleft}{flushleft}
\setinterfacevariable{flushouter}{flushouter}
\setinterfacevariable{flushright}{flushright}
+\setinterfacevariable{followingpage}{followingpage}
\setinterfacevariable{footer}{footer}
\setinterfacevariable{footnote}{footnote}
\setinterfacevariable{force}{force}
@@ -301,6 +302,7 @@
\setinterfacevariable{mirrored}{mirrored}
\setinterfacevariable{monday}{monday}
\setinterfacevariable{mono}{mono}
+\setinterfacevariable{monobold}{monobold}
\setinterfacevariable{month}{month}
\setinterfacevariable{more}{more}
\setinterfacevariable{morehyphenation}{morehyphenation}
@@ -360,6 +362,7 @@
\setinterfacevariable{positive}{positive}
\setinterfacevariable{postponing}{postponing}
\setinterfacevariable{postscript}{postscript}
+\setinterfacevariable{precedingpage}{followingpage}
\setinterfacevariable{preference}{preference}
\setinterfacevariable{preview}{preview}
\setinterfacevariable{previous}{previous}
@@ -651,7 +654,7 @@
\setinterfaceconstant{coupling}{coupling}
\setinterfaceconstant{couplingway}{couplingway}
\setinterfaceconstant{criterium}{criterium}
-\setinterfaceconstant{css}{css}
+\setinterfaceconstant{cssfile}{cssfile}
\setinterfaceconstant{current}{current}
\setinterfaceconstant{cutspace}{cutspace}
\setinterfaceconstant{dash}{dash}
@@ -686,7 +689,7 @@
\setinterfaceconstant{equalwidth}{equalwidth}
\setinterfaceconstant{escape}{escape}
\setinterfaceconstant{evenmargin}{evenmargin}
-\setinterfaceconstant{exitoffset}{labeloffset}
+\setinterfaceconstant{exitoffset}{exitoffset}
\setinterfaceconstant{expansion}{expansion}
\setinterfaceconstant{export}{export}
\setinterfaceconstant{extras}{extras}
@@ -701,6 +704,7 @@
\setinterfaceconstant{file}{file}
\setinterfaceconstant{filtercommand}{filtercommand}
\setinterfaceconstant{finalnamesep}{finalnamesep}
+\setinterfaceconstant{finalpubsep}{finalpubsep}
\setinterfaceconstant{firstnamesep}{firstnamesep}
\setinterfaceconstant{firstpage}{firstpage}
\setinterfaceconstant{focus}{focus}
@@ -752,6 +756,7 @@
\setinterfaceconstant{indenting}{indenting}
\setinterfaceconstant{indentnext}{indentnext}
\setinterfaceconstant{indicator}{indicator}
+\setinterfaceconstant{initialsep}{initialsep}
\setinterfaceconstant{inner}{inner}
\setinterfaceconstant{innermargin}{innermargin}
\setinterfaceconstant{inputfile}{inputfile}
@@ -857,6 +862,7 @@
\setinterfaceconstant{nright}{nright}
\setinterfaceconstant{ntop}{ntop}
\setinterfaceconstant{number}{number}
+\setinterfaceconstant{numberalign}{numberalign}
\setinterfaceconstant{numbercolor}{numbercolor}
\setinterfaceconstant{numbercommand}{numbercommand}
\setinterfaceconstant{numberconversion}{numberconversion}
@@ -1045,6 +1051,8 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
+\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
+\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
@@ -1278,6 +1286,7 @@
\setinterfacecommand{definetabletemplate}{definetabletemplate}
\setinterfacecommand{definetabulate}{definetabulate}
\setinterfacecommand{definetext}{definetext}
+\setinterfacecommand{definetextbackground}{definetextbackground}
\setinterfacecommand{definetextposition}{definetextposition}
\setinterfacecommand{definetextvariable}{definetextvariable}
\setinterfacecommand{definetype}{definetype}
@@ -1438,7 +1447,6 @@
\setinterfacecommand{paperheight}{paperheight}
\setinterfacecommand{paperwidth}{paperwidth}
\setinterfacecommand{periods}{periods}
-\setinterfacecommand{plaatsruwelijst}{placerawlist}
\setinterfacecommand{placebookmarks}{placebookmarks}
\setinterfacecommand{placecombinedlist}{placecombinedlist}
\setinterfacecommand{placefloat}{placefloat}
@@ -1448,11 +1456,13 @@
\setinterfacecommand{placeheadtext}{placeheadtext}
\setinterfacecommand{placelegend}{placelegend}
\setinterfacecommand{placelist}{placelist}
+\setinterfacecommand{placelistofsynonyms}{placelistofsynonyms}
\setinterfacecommand{placelocalfootnotes}{placelocalfootnotes}
\setinterfacecommand{placelogos}{placelogos}
\setinterfacecommand{placeongrid}{placeongrid}
\setinterfacecommand{placeontopofeachother}{placeontopofeachother}
\setinterfacecommand{placepagenumber}{placepagenumber}
+\setinterfacecommand{placerawlist}{placerawlist}
\setinterfacecommand{placereferencelist}{placereferencelist}
\setinterfacecommand{placeregister}{placeregister}
\setinterfacecommand{placerule}{placerule}
@@ -1599,7 +1609,6 @@
\setinterfacecommand{setupregister}{setupregister}
\setinterfacecommand{setuprotate}{setuprotate}
\setinterfacecommand{setuprule}{setuprule}
-\setinterfacecommand{setups}{setups}
\setinterfacecommand{setupscreens}{setupscreens}
\setinterfacecommand{setupsection}{setupsection}
\setinterfacecommand{setupsectionblock}{setupsectionblock}
@@ -1617,6 +1626,7 @@
\setinterfacecommand{setuptables}{setuptables}
\setinterfacecommand{setuptabulate}{setuptabulate}
\setinterfacecommand{setuptext}{setuptext}
+\setinterfacecommand{setuptextbackground}{setuptextbackground}
\setinterfacecommand{setuptextposition}{setuptextposition}
\setinterfacecommand{setuptextrules}{setuptextrules}
\setinterfacecommand{setuptexttexts}{setuptexttexts}
@@ -1663,6 +1673,7 @@
\setinterfacecommand{startdocument}{startdocument}
\setinterfacecommand{startenvironment}{startenvironment}
\setinterfacecommand{startfigure}{startfigure}
+\setinterfacecommand{startframed}{startframed}
\setinterfacecommand{startglobal}{startglobal}
\setinterfacecommand{startline}{startline}
\setinterfacecommand{startlinecorrection}{startlinecorrection}
@@ -1689,6 +1700,7 @@
\setinterfacecommand{starttable}{starttable}
\setinterfacecommand{starttables}{starttables}
\setinterfacecommand{starttext}{starttext}
+\setinterfacecommand{starttextbackground}{starttextbackground}
\setinterfacecommand{starttextrule}{starttextrule}
\setinterfacecommand{startunpacked}{startunpacked}
\setinterfacecommand{startversion}{startversion}
@@ -1703,6 +1715,7 @@
\setinterfacecommand{stopcomponent}{stopcomponent}
\setinterfacecommand{stopdocument}{stopdocument}
\setinterfacecommand{stopenvironment}{stopenvironment}
+\setinterfacecommand{stopframed}{stopframed}
\setinterfacecommand{stopglobal}{stopglobal}
\setinterfacecommand{stopline}{stopline}
\setinterfacecommand{stoplinecorrection}{stoplinecorrection}
@@ -1728,6 +1741,7 @@
\setinterfacecommand{stoptable}{stoptable}
\setinterfacecommand{stoptables}{stoptables}
\setinterfacecommand{stoptext}{stoptext}
+\setinterfacecommand{stoptextbackground}{stoptextbackground}
\setinterfacecommand{stoptextrule}{stoptextrule}
\setinterfacecommand{stopunpacked}{stopunpacked}
\setinterfacecommand{stopversion}{stopversion}
diff --git a/tex/context/base/mult-fr.mkii b/tex/context/base/mult-fr.mkii
index 520f8e1a6..d76da18d9 100644
--- a/tex/context/base/mult-fr.mkii
+++ b/tex/context/base/mult-fr.mkii
@@ -180,6 +180,7 @@
\setinterfacevariable{flushleft}{flushleft}
\setinterfacevariable{flushouter}{flushouter}
\setinterfacevariable{flushright}{flushright}
+\setinterfacevariable{followingpage}{followingpage}
\setinterfacevariable{footer}{pdp}
\setinterfacevariable{footnote}{notepdp}
\setinterfacevariable{force}{force}
@@ -301,6 +302,7 @@
\setinterfacevariable{mirrored}{reflete}
\setinterfacevariable{monday}{lundi}
\setinterfacevariable{mono}{mono}
+\setinterfacevariable{monobold}{monogras}
\setinterfacevariable{month}{mois}
\setinterfacevariable{more}{more}
\setinterfacevariable{morehyphenation}{morehyphenation}
@@ -360,6 +362,7 @@
\setinterfacevariable{positive}{positif}
\setinterfacevariable{postponing}{postponing}
\setinterfacevariable{postscript}{postscript}
+\setinterfacevariable{precedingpage}{followingpage}
\setinterfacevariable{preference}{preference}
\setinterfacevariable{preview}{previsualisation}
\setinterfacevariable{previous}{precedent}
@@ -419,7 +422,7 @@
\setinterfacevariable{september}{septembre}
\setinterfacevariable{serif}{serif}
\setinterfacevariable{serried}{serried}
-\setinterfacevariable{setups}{reglages}
+\setinterfacevariable{setups}{setups}
\setinterfacevariable{sheet}{sheet}
\setinterfacevariable{short}{short}
\setinterfacevariable{simplefonts}{simplefonts}
@@ -651,7 +654,7 @@
\setinterfaceconstant{coupling}{couplage}
\setinterfaceconstant{couplingway}{modecouplage}
\setinterfaceconstant{criterium}{critere}
-\setinterfaceconstant{css}{css}
+\setinterfaceconstant{cssfile}{cssfile}
\setinterfaceconstant{current}{courant}
\setinterfaceconstant{cutspace}{cutspace}
\setinterfaceconstant{dash}{pointille}
@@ -686,7 +689,7 @@
\setinterfaceconstant{equalwidth}{equalwidth}
\setinterfaceconstant{escape}{escape}
\setinterfaceconstant{evenmargin}{margepaire}
-\setinterfaceconstant{exitoffset}{labeloffset}
+\setinterfaceconstant{exitoffset}{exitoffset}
\setinterfaceconstant{expansion}{expansion}
\setinterfaceconstant{export}{export}
\setinterfaceconstant{extras}{extras}
@@ -701,6 +704,7 @@
\setinterfaceconstant{file}{fichier}
\setinterfaceconstant{filtercommand}{filtercommand}
\setinterfaceconstant{finalnamesep}{finalnamesep}
+\setinterfaceconstant{finalpubsep}{finalpubsep}
\setinterfaceconstant{firstnamesep}{firstnamesep}
\setinterfaceconstant{firstpage}{premierepage}
\setinterfaceconstant{focus}{focus}
@@ -752,6 +756,7 @@
\setinterfaceconstant{indenting}{composeenalinea}
\setinterfaceconstant{indentnext}{indentesuivant}
\setinterfaceconstant{indicator}{indicateur}
+\setinterfaceconstant{initialsep}{initialsep}
\setinterfaceconstant{inner}{interieur}
\setinterfaceconstant{innermargin}{margeinterieure}
\setinterfaceconstant{inputfile}{fichierentree}
@@ -857,6 +862,7 @@
\setinterfaceconstant{nright}{ndroite}
\setinterfaceconstant{ntop}{nsup}
\setinterfaceconstant{number}{numero}
+\setinterfaceconstant{numberalign}{numberalign}
\setinterfaceconstant{numbercolor}{couleurnumero}
\setinterfaceconstant{numbercommand}{commandenumero}
\setinterfaceconstant{numberconversion}{numberconversion}
@@ -1001,7 +1007,7 @@
\setinterfaceconstant{separatorcolor}{separatorcolor}
\setinterfaceconstant{separatorstyle}{separatorstyle}
\setinterfaceconstant{set}{set}
-\setinterfaceconstant{setups}{reglages}
+\setinterfaceconstant{setups}{setups}
\setinterfaceconstant{shrink}{shrink}
\setinterfaceconstant{side}{cote}
\setinterfaceconstant{sidealign}{sidealign}
@@ -1045,6 +1051,8 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
+\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
+\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
@@ -1278,6 +1286,7 @@
\setinterfacecommand{definetabletemplate}{definittrametableau}
\setinterfacecommand{definetabulate}{definittabulation}
\setinterfacecommand{definetext}{definittexte}
+\setinterfacecommand{definetextbackground}{definetextbackground}
\setinterfacecommand{definetextposition}{definitpositiontexte}
\setinterfacecommand{definetextvariable}{definitvariabletexte}
\setinterfacecommand{definetype}{definittype}
@@ -1438,7 +1447,6 @@
\setinterfacecommand{paperheight}{hauteurpapier}
\setinterfacecommand{paperwidth}{largeurpapier}
\setinterfacecommand{periods}{periodes}
-\setinterfacecommand{plaatsruwelijst}{placerawlist}
\setinterfacecommand{placebookmarks}{placemarquespages}
\setinterfacecommand{placecombinedlist}{placelisteinmbriquee}
\setinterfacecommand{placefloat}{placeflottant}
@@ -1448,11 +1456,13 @@
\setinterfacecommand{placeheadtext}{placetextetete}
\setinterfacecommand{placelegend}{placelegende}
\setinterfacecommand{placelist}{placeliste}
+\setinterfacecommand{placelistofsynonyms}{placelistofsynonyms}
\setinterfacecommand{placelocalfootnotes}{placenotespdplocales}
\setinterfacecommand{placelogos}{placelogos}
\setinterfacecommand{placeongrid}{placesurgrille}
\setinterfacecommand{placeontopofeachother}{placelesunsaudessusdesautres}
\setinterfacecommand{placepagenumber}{placenumeropage}
+\setinterfacecommand{placerawlist}{placerawlist}
\setinterfacecommand{placereferencelist}{placelistereference}
\setinterfacecommand{placeregister}{placeregistre}
\setinterfacecommand{placerule}{placeregle}
@@ -1599,7 +1609,6 @@
\setinterfacecommand{setupregister}{regleregistre}
\setinterfacecommand{setuprotate}{regleoriente}
\setinterfacecommand{setuprule}{regleregle}
-\setinterfacecommand{setups}{reglages}
\setinterfacecommand{setupscreens}{regleecrans}
\setinterfacecommand{setupsection}{reglesection}
\setinterfacecommand{setupsectionblock}{regleblocsection}
@@ -1617,6 +1626,7 @@
\setinterfacecommand{setuptables}{regletableaux}
\setinterfacecommand{setuptabulate}{regletabulation}
\setinterfacecommand{setuptext}{regletexte}
+\setinterfacecommand{setuptextbackground}{setuptextbackground}
\setinterfacecommand{setuptextposition}{reglepositiontexte}
\setinterfacecommand{setuptextrules}{reglelignesreglestexte}
\setinterfacecommand{setuptexttexts}{regletextestexte}
@@ -1663,6 +1673,7 @@
\setinterfacecommand{startdocument}{demarredocument}
\setinterfacecommand{startenvironment}{demarreenvironement}
\setinterfacecommand{startfigure}{demarrefigure}
+\setinterfacecommand{startframed}{startframed}
\setinterfacecommand{startglobal}{demarreglobal}
\setinterfacecommand{startline}{demarreligne}
\setinterfacecommand{startlinecorrection}{demarrecorrectionligne}
@@ -1689,6 +1700,7 @@
\setinterfacecommand{starttable}{demarretableau}
\setinterfacecommand{starttables}{demarretableaux}
\setinterfacecommand{starttext}{demarretexte}
+\setinterfacecommand{starttextbackground}{starttextbackground}
\setinterfacecommand{starttextrule}{demarreligneregleetexte}
\setinterfacecommand{startunpacked}{demarredegroupe}
\setinterfacecommand{startversion}{demarreversion}
@@ -1703,6 +1715,7 @@
\setinterfacecommand{stopcomponent}{stoppecomposant}
\setinterfacecommand{stopdocument}{stoppedocument}
\setinterfacecommand{stopenvironment}{stoppeenvironement}
+\setinterfacecommand{stopframed}{stopframed}
\setinterfacecommand{stopglobal}{stoppeglobal}
\setinterfacecommand{stopline}{stoppeligne}
\setinterfacecommand{stoplinecorrection}{stoppecorrectionligne}
@@ -1728,6 +1741,7 @@
\setinterfacecommand{stoptable}{stoppetableau}
\setinterfacecommand{stoptables}{stoppetableaux}
\setinterfacecommand{stoptext}{stoppetexte}
+\setinterfacecommand{stoptextbackground}{stoptextbackground}
\setinterfacecommand{stoptextrule}{stoppeligneregleetexte}
\setinterfacecommand{stopunpacked}{stoppedegroupe}
\setinterfacecommand{stopversion}{stoppeversion}
diff --git a/tex/context/base/mult-fun.lua b/tex/context/base/mult-fun.lua
index 2101b95e9..27aa32055 100644
--- a/tex/context/base/mult-fun.lua
+++ b/tex/context/base/mult-fun.lua
@@ -4,26 +4,30 @@ return {
"nocolormodel", "greycolormodel", "graycolormodel", "rgbcolormodel", "cmykcolormodel",
"shadefactor",
"textextoffset",
- "normaltransparent", "multiplytransparent", "screentransparent", "overlaytransparent", "softlighttransparent",
- "hardlighttransparent", "colordodgetransparent", "colorburntransparent", "darkentransparent", "lightentransparent",
- "differencetransparent", "exclusiontransparent", "huetransparent", "saturationtransparent", "colortransparent", "luminositytransparent",
--- "originlength", "tickstep ", "ticklength",
--- "autoarrows", "ahfactor",
--- "angleoffset", anglelength", anglemethod",
+ "normaltransparent", "multiplytransparent", "screentransparent", "overlaytransparent",
+ "softlighttransparent", "hardlighttransparent", "colordodgetransparent", "colorburntransparent",
+ "darkentransparent", "lightentransparent", "differencetransparent", "exclusiontransparent",
+ "huetransparent", "saturationtransparent", "colortransparent", "luminositytransparent",
+ -- "originlength", "tickstep ", "ticklength",
+ -- "autoarrows", "ahfactor",
+ -- "angleoffset", anglelength", anglemethod",
"metapostversion",
"maxdimensions",
},
commands = {
+ "transparency",
--
"sqr", "log", "ln", "exp", "inv", "pow", "pi", "radian",
"tand", "cotd", "sin", "cos", "tan", "cot", "atan", "asin", "acos",
- "invsin", "invcos", "acosh", "asinh", "sinh", "cosh",
+ "invsin", "invcos", "invtan", "acosh", "asinh", "sinh", "cosh",
+ "zmod",
"paired", "tripled",
"unitcircle", "fulldiamond", "unitdiamond", "fullsquare",
-- "halfcircle", "quartercircle",
"llcircle", "lrcircle", "urcircle", "ulcircle",
"tcircle", "bcircle", "lcircle", "rcircle",
"lltriangle", "lrtriangle", "urtriangle", "ultriangle",
+ "uptriangle", "downtriangle", "lefttriangle", "righttriangle", "triangle",
"smoothed", "cornered", "superellipsed", "randomized", "squeezed", "enlonged", "shortened",
"punked", "curved", "unspiked", "simplified", "blownup", "stretched",
"enlarged", "leftenlarged", "topenlarged", "rightenlarged", "bottomenlarged",
@@ -36,24 +40,32 @@ return {
"xsized", "ysized", "xysized", "sized", "xyscaled",
"intersection_point", "intersection_found", "penpoint",
"bbwidth", "bbheight",
- "withshade", "withlinearshading", "withcircularshading", "withfromshadecolor", "withtoshadecolor", "withshading", "shadedinto",
- "withcircularshade", "withlinearshade",
+ "withshade", "withcircularshade", "withlinearshade", -- old but kept
+ "defineshade", "shaded",
+ -- "withshading", "withlinearshading", "withcircularshading", "withfromshadecolor", "withtoshadecolor",
+ "shadedinto", "withshadecolors", "withshadedomain", "withshademethod", "withshadefactor", "withshadevector", "withshadecenter",
"cmyk", "spotcolor", "multitonecolor", "namedcolor",
"drawfill", "undrawfill",
"inverted", "uncolored", "softened", "grayed", "greyed",
"onlayer",
"along",
- "graphictext", "loadfigure", "externalfigure", "withmask", "figure", "register", "bitmapimage",
+ "graphictext", "loadfigure", "externalfigure", "figure", "register",
+ "withmask", "bitmapimage",
"colordecimals", "ddecimal", "dddecimal", "ddddecimal",
- "textext", "thetextext", "rawtextext", "textextoffset", "verbatim", "thelabel", "label", "autoalign",
+ "textext", "thetextext", "rawtextext", "textextoffset",
+ "verbatim",
+ "thelabel", "label",
+ "autoalign",
"transparent", "withtransparency",
"property", "properties", "withproperties",
"asgroup",
- "infont", -- redefined usign textext
- -- "property", "withproperties", "properties", -- not yet
- "set_linear_vector", "linear_shade", "define_linear_shade", "define_circular_linear_shade", "define_sampled_linear_shade",
- "set_circular_vector", "circular_shade", "define_circular_shade", "define_circular_linear_shade", "define_sampled_circular_shade",
- "space", "CRLF",
+ "infont", -- redefined using textext
+ -- "set_linear_vector", "set_circular_vector",
+ -- "linear_shade", "circular_shade",
+ -- "define_linear_shade", "define_circular_shade",
+ -- "define_circular_linear_shade", "define_circular_linear_shade",
+ -- "define_sampled_linear_shade", "define_sampled_circular_shade",
+ "space", "crlf", "dquote", "percent", "SPACE", "CRLF", "DQUOTE", "PERCENT",
"grayscale", "greyscale", "withgray", "withgrey",
"colorpart",
"readfile",
@@ -63,10 +75,12 @@ return {
"break",
"xstretched", "ystretched", "snapped",
--
- "pathconnectors", "function", "constructedpath", "constructedpairs",
- "punkedfunction", "curvedfunction", "tightfunction",
- "punkedpath", "curvedpath", "tightpath",
- "punkedpairs", "curvedpairs", "tightpairs",
+ "pathconnectors", "function",
+ "constructedfunction", "constructedpath", "constructedpairs",
+ -- "punkedfunction", "punkedpath", "punkedpairs",
+ "straightfunction", "straightpath", "straightpairs",
+ "curvedfunction", "curvedpath", "curvedpairs",
+ -- "tightfunction", "tightpath", "tightpairs",
--
"evenly", "oddly",
--
@@ -75,26 +89,31 @@ return {
"pushcurrentpicture", "popcurrentpicture",
--
"arrowpath",
--- "colorlike", "dowithpath", "rangepath", "straightpath", "addbackground",
--- "cleanstring", "asciistring", "setunstringed", "getunstringed", "unstringed",
--- "showgrid",
--- "phantom",
--- "xshifted", "yshifted",
--- "drawarrowpath", "midarrowhead", "arrowheadonpath",
--- "drawxticks", "drawyticks", "drawticks",
--- "pointarrow",
--- "thefreelabel", "freelabel", "freedotlabel",
--- "anglebetween", "colorcircle",
--- "remapcolors", "normalcolors", "resetcolormap", "remapcolor", "remappedcolor",
--- "recolor", "refill", "redraw", "retext", "untext", "restroke", "reprocess", "repathed",
+ -- "colorlike", "dowithpath", "rangepath", "straightpath", "addbackground",
+ -- "cleanstring", "asciistring", "setunstringed", "getunstringed", "unstringed",
+ -- "showgrid",
+ -- "phantom",
+ -- "xshifted", "yshifted",
+ -- "drawarrowpath", "midarrowhead", "arrowheadonpath",
+ -- "drawxticks", "drawyticks", "drawticks",
+ -- "pointarrow",
+ -- "thefreelabel", "freelabel", "freedotlabel",
+ -- "anglebetween", "colorcircle",
+ -- "remapcolors", "normalcolors", "resetcolormap", "remapcolor", "remappedcolor",
+ -- "recolor", "refill", "redraw", "retext", "untext", "restroke", "reprocess", "repathed",
"tensecircle", "roundedsquare",
- "colortype", "whitecolor", "blackcolor",
+ "colortype", "whitecolor", "blackcolor", "basiccolors",
--
--- "swappointlabels",
+ -- "swappointlabels",
"normalfill", "normaldraw", "visualizepaths", "naturalizepaths",
- "drawboundary", "drawwholepath", "visualizeddraw", "visualizedfill", "draworigin", "drawboundingbox",
- "drawpath", "drawpoint", "drawpoints", "drawcontrolpoints", "drawcontrollines", "drawpointlabels",
- "drawlineoptions", "drawpointoptions", "drawcontroloptions", "drawlabeloptions", "draworiginoptions", "drawboundoptions", "drawpathoptions", "resetdrawoptions",
+ "drawboundary", "drawwholepath",
+ "visualizeddraw", "visualizedfill",
+ "draworigin", "drawboundingbox",
+ "drawpath",
+ "drawpoint", "drawpoints", "drawcontrolpoints", "drawcontrollines",
+ "drawpointlabels",
+ "drawlineoptions", "drawpointoptions", "drawcontroloptions", "drawlabeloptions",
+ "draworiginoptions", "drawboundoptions", "drawpathoptions", "resetdrawoptions",
--
"undashed",
--
@@ -102,5 +121,8 @@ return {
--
"passvariable", "passarrayvariable", "tostring", "format", "formatted",
"startpassingvariable", "stoppassingvariable",
+ --
+ "eofill", "eoclip",
+ "area",
},
}
diff --git a/tex/context/base/mult-ini.lua b/tex/context/base/mult-ini.lua
index e3ff904a6..bd3b1d38b 100644
--- a/tex/context/base/mult-ini.lua
+++ b/tex/context/base/mult-ini.lua
@@ -12,6 +12,7 @@ local serialize = table.serialize
local context = context
local commands = commands
+local implement = interfaces.implement
local allocate = utilities.storage.allocate
local mark = utilities.storage.mark
@@ -240,9 +241,17 @@ function interfaces.setuserinterface(interface,response)
end
report_interface("definitions: %a constants, %a variables, %a elements, %a commands, %a formats, %a translations",
nofconstants,nofvariables,nofelements,nofcommands,nofformats,noftranslations)
+ else
+ report_interface("the language(s) can only be set when making the format")
end
end
+interfaces.implement {
+ name = "setuserinterface",
+ actions = interfaces.setuserinterface,
+ arguments = { "string", "string" }
+}
+
interfaces.cachedsetups = interfaces.cachedsetups or { }
interfaces.hashedsetups = interfaces.hashedsetups or { }
@@ -265,9 +274,15 @@ function interfaces.cachesetup(t)
end
end
-function interfaces.is_command(str)
- return (str and str ~= "" and token.csname_name(token.create(str)) ~= "") or false -- there will be a proper function for this
-end
+-- if token.lookup then
+-- interfaces.is_command = token.lookup
+-- else
+
+ function interfaces.is_command(str)
+ return (str and str ~= "" and token.csname_name(token.create(str)) ~= "") or false -- there will be a proper function for this
+ end
+
+-- end
function interfaces.interfacedcommand(name)
local command = complete.commands[name]
@@ -276,41 +291,56 @@ end
-- interface
-function commands.writestatus(category,message,...)
- local r = reporters[category]
- if r then
- r(message,...)
- end
+function interfaces.writestatus(category,message)
+ reporters[category](message) -- could also be a setmetatablecall
end
-commands.registernamespace = interfaces.registernamespace
-commands.setinterfaceconstant = interfaces.setconstant
-commands.setinterfacevariable = interfaces.setvariable
-commands.setinterfaceelement = interfaces.setelement
-commands.setinterfacemessage = interfaces.setmessage
-commands.setinterfacemessages = interfaces.setmessages
-commands.showmessage = interfaces.showmessage
+implement { name = "registernamespace", actions = interfaces.registernamespace, arguments = { "integer", "string" } }
+implement { name = "setinterfaceconstant", actions = interfaces.setconstant, arguments = { "string", "string" } }
+implement { name = "setinterfacevariable", actions = interfaces.setvariable, arguments = { "string", "string" } }
+implement { name = "setinterfaceelement", actions = interfaces.setelement, arguments = { "string", "string" } }
+implement { name = "setinterfacemessage", actions = interfaces.setmessage, arguments = { "string", "string", "string" } }
+implement { name = "setinterfacemessages", actions = interfaces.setmessages, arguments = { "string", "string" } }
+implement { name = "showmessage", actions = interfaces.showmessage, arguments = { "string", "string", "string" } }
+
+implement {
+ name = "doifelsemessage",
+ actions = { interfaces.doifelsemessage, commands.doifelse },
+ arguments = { "string", "string" },
+}
-function commands.doifelsemessage(category,tag)
- commands.doifelse(interfaces.doifelsemessage(category,tag))
-end
+implement {
+ name = "getmessage",
+ actions = { interfaces.getmessage, context },
+ arguments = { "string", "string", "string" },
+}
-function commands.getmessage(category,tag,default)
- context(interfaces.getmessage(category,tag,default))
-end
+implement {
+ name = "writestatus",
+ overload = true,
+ actions = interfaces.writestatus,
+ arguments = { "string", "string" },
+}
-function commands.showassignerror(namespace,key,value,line)
- local ns, instance = match(namespace,"^(%d+)[^%a]+(%a+)")
+local function showassignerror(namespace,key,line)
+ local ns, instance = match(namespace,"^(%d+)[^%a]+(%a*)")
if ns then
namespace = corenamespaces[tonumber(ns)] or ns
end
- if instance then
+ -- injected in the stream for timing:
+ if instance and instance ~= "" then
context.writestatus("setup",formatters["error in line %a, namespace %a, instance %a, key %a"](line,namespace,instance,key))
else
context.writestatus("setup",formatters["error in line %a, namespace %a, key %a"](line,namespace,key))
end
end
+implement {
+ name = "showassignerror",
+ actions = showassignerror,
+ arguments = { "string", "string", "integer" },
+}
+
-- a simple helper
local settings_to_hash = utilities.parsers.settings_to_hash
diff --git a/tex/context/base/mult-ini.mkiv b/tex/context/base/mult-ini.mkiv
index 09fc5daf0..1dd5a696a 100644
--- a/tex/context/base/mult-ini.mkiv
+++ b/tex/context/base/mult-ini.mkiv
@@ -365,10 +365,10 @@
{\bgroup
\ifcsname\m!prefix!#2\endcsname\else\setgvalue{\m!prefix!#2}{#2}\fi
\catcode\endoflineasciicode\activecatcode
- \doifinsetelse{#1}{\currentresponses,all}\mult_messages_start_yes\mult_messages_start_nop{#2}}
+ \doifelseinset{#1}{\currentresponses,all}\mult_messages_start_yes\mult_messages_start_nop{#2}}
\def\mult_messages_start_yes#1#2\stopmessages
- {\ctxcommand{setinterfacemessages("#1",\!!bs#2\!!es)}%
+ {\clf_setinterfacemessages{#1}{#2}%
\egroup}
\def\mult_messages_start_nop#1#2\stopmessages
@@ -378,13 +378,15 @@
\unexpanded\def\setinterfacemessage#1#2#3%
{\ifcsname\m!prefix!#1\endcsname\else\setgvalue{\m!prefix!#1}{#1}\fi
- \ctxcommand{setinterfacemessage("#1","#2",\!!bs#3\!!es)}}
+ \clf_setinterfacemessage{#1}{#2}{#3}}
-\unexpanded\def\setmessagetext #1#2{\edef\currentmessagetext{\ctxcommand{getmessage("#1","#2")}}}
-\unexpanded\def\getmessage #1#2{\ctxcommand{getmessage("#1","#2")}}
-\unexpanded\def\doifelsemessage #1#2{\ctxcommand{doifelsemessage("#1","#2")}}
-\unexpanded\def\showmessage #1#2#3{\ctxcommand{showmessage("#1","#2",\!!bs#3\!!es)}}
-\unexpanded\def\writestatus #1#2{\ctxcommand{writestatus("#1",\!!bs#2\!!es)}}
+\unexpanded\def\setmessagetext #1#2{\edef\currentmessagetext{\clf_getmessage{#1}{#2}}}
+\unexpanded\def\getmessage #1#2{\clf_getmessage{#1}{#2}}
+\unexpanded\def\doifelsemessage #1#2{\clf_doifelsemessage{#1}{#2}}
+\unexpanded\def\showmessage #1#2#3{\clf_showmessage{#1}{#2}{#3}}
+\unexpanded\def\writestatus #1#2{\clf_writestatus{#1}{#2}}
+
+\let\doifmessageelse\doifelsemessage
%D \macros
%D {ifshowwarnings, ifshowmessages}
@@ -730,11 +732,11 @@
% temporary mkiv hack (we can best just store the whole table in memory)
\unexpanded\def\setinterfaceconstant#1#2%
- {\ctxcommand{setinterfaceconstant("#1","#2")}%
+ {\clf_setinterfaceconstant{#1}{#2}%
\expandafter\def\csname\c!prefix!#1\endcsname{#1}}
\unexpanded\def\setinterfacevariable#1#2%
- {\ctxcommand{setinterfacevariable("#1","#2")}%
+ {\clf_setinterfacevariable{#1}{#2}%
\expandafter\def\csname\v!prefix!#1\endcsname{#2}}
%D \macros
@@ -763,7 +765,7 @@
%D part is needed, we use a \type{-}:
\unexpanded\def\setinterfaceelement#1#2%
- {\ctxcommand{setinterfaceelement("#1","#2")}%
+ {\clf_setinterfaceelement{#1}{#2}%
\ifcsname\e!prefix!#1\endcsname
\doifnotvalue{\e!prefix!#1}{#2}{\setvalue{\e!prefix!#1}{#2}}%
\else
diff --git a/tex/context/base/mult-it.mkii b/tex/context/base/mult-it.mkii
index 2b31e8e10..6474d93c4 100644
--- a/tex/context/base/mult-it.mkii
+++ b/tex/context/base/mult-it.mkii
@@ -180,6 +180,7 @@
\setinterfacevariable{flushleft}{flushleft}
\setinterfacevariable{flushouter}{flushouter}
\setinterfacevariable{flushright}{flushright}
+\setinterfacevariable{followingpage}{followingpage}
\setinterfacevariable{footer}{piedipagina}
\setinterfacevariable{footnote}{notapdp}
\setinterfacevariable{force}{forza}
@@ -301,6 +302,7 @@
\setinterfacevariable{mirrored}{riflesso}
\setinterfacevariable{monday}{lunedi}
\setinterfacevariable{mono}{mono}
+\setinterfacevariable{monobold}{monograssetto}
\setinterfacevariable{month}{mese}
\setinterfacevariable{more}{more}
\setinterfacevariable{morehyphenation}{morehyphenation}
@@ -360,6 +362,7 @@
\setinterfacevariable{positive}{positivo}
\setinterfacevariable{postponing}{posporre}
\setinterfacevariable{postscript}{postscript}
+\setinterfacevariable{precedingpage}{followingpage}
\setinterfacevariable{preference}{preferenza}
\setinterfacevariable{preview}{anteprima}
\setinterfacevariable{previous}{precedente}
@@ -419,7 +422,7 @@
\setinterfacevariable{september}{settembre}
\setinterfacevariable{serif}{serif}
\setinterfacevariable{serried}{vicino}
-\setinterfacevariable{setups}{nastaveni}
+\setinterfacevariable{setups}{setups}
\setinterfacevariable{sheet}{sheet}
\setinterfacevariable{short}{short}
\setinterfacevariable{simplefonts}{simplefonts}
@@ -651,7 +654,7 @@
\setinterfaceconstant{coupling}{accoppiamento}
\setinterfaceconstant{couplingway}{modoaccoppiamento}
\setinterfaceconstant{criterium}{criterio}
-\setinterfaceconstant{css}{css}
+\setinterfaceconstant{cssfile}{cssfile}
\setinterfaceconstant{current}{corrente}
\setinterfaceconstant{cutspace}{cutspace}
\setinterfaceconstant{dash}{dash}
@@ -686,7 +689,7 @@
\setinterfaceconstant{equalwidth}{equalwidth}
\setinterfaceconstant{escape}{escape}
\setinterfaceconstant{evenmargin}{marginepari}
-\setinterfaceconstant{exitoffset}{labeloffset}
+\setinterfaceconstant{exitoffset}{exitoffset}
\setinterfaceconstant{expansion}{espansione}
\setinterfaceconstant{export}{export}
\setinterfaceconstant{extras}{extras}
@@ -701,6 +704,7 @@
\setinterfaceconstant{file}{file}
\setinterfaceconstant{filtercommand}{filtercommand}
\setinterfaceconstant{finalnamesep}{finalnamesep}
+\setinterfaceconstant{finalpubsep}{finalpubsep}
\setinterfaceconstant{firstnamesep}{firstnamesep}
\setinterfaceconstant{firstpage}{primapagina}
\setinterfaceconstant{focus}{focus}
@@ -752,6 +756,7 @@
\setinterfaceconstant{indenting}{rientro}
\setinterfaceconstant{indentnext}{rientrasuccessivo}
\setinterfaceconstant{indicator}{indicatore}
+\setinterfaceconstant{initialsep}{initialsep}
\setinterfaceconstant{inner}{interno}
\setinterfaceconstant{innermargin}{margineinterno}
\setinterfaceconstant{inputfile}{inputfile}
@@ -833,7 +838,7 @@
\setinterfaceconstant{middletext}{testocentro}
\setinterfaceconstant{midsentence}{midsentence}
\setinterfaceconstant{min}{min}
-\setinterfaceconstant{mindepth}{mindeoth}
+\setinterfaceconstant{mindepth}{mindepth}
\setinterfaceconstant{minheight}{altezzamin}
\setinterfaceconstant{minwidth}{ampiezzamin}
\setinterfaceconstant{moffset}{moffset}
@@ -857,6 +862,7 @@
\setinterfaceconstant{nright}{ndestra}
\setinterfaceconstant{ntop}{ncima}
\setinterfaceconstant{number}{numero}
+\setinterfaceconstant{numberalign}{numberalign}
\setinterfaceconstant{numbercolor}{colorenumero}
\setinterfaceconstant{numbercommand}{comandonumero}
\setinterfaceconstant{numberconversion}{numberconversion}
@@ -1045,6 +1051,8 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
+\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
+\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
@@ -1278,6 +1286,7 @@
\setinterfacecommand{definetabletemplate}{definiscimodellotabella}
\setinterfacecommand{definetabulate}{definiscitabulato}
\setinterfacecommand{definetext}{definiscitesto}
+\setinterfacecommand{definetextbackground}{definetextbackground}
\setinterfacecommand{definetextposition}{definisciposizionetesto}
\setinterfacecommand{definetextvariable}{definiscivariabiletesto}
\setinterfacecommand{definetype}{definiscitype}
@@ -1438,7 +1447,6 @@
\setinterfacecommand{paperheight}{altezzacarta}
\setinterfacecommand{paperwidth}{ampiezzacarta}
\setinterfacecommand{periods}{punti}
-\setinterfacecommand{plaatsruwelijst}{placerawlist}
\setinterfacecommand{placebookmarks}{mettisegnalibro}
\setinterfacecommand{placecombinedlist}{mettielencocombinato}
\setinterfacecommand{placefloat}{placefloat}
@@ -1448,11 +1456,13 @@
\setinterfacecommand{placeheadtext}{posizionatestotesta}
\setinterfacecommand{placelegend}{mettilegenda}
\setinterfacecommand{placelist}{mettielenco}
+\setinterfacecommand{placelistofsynonyms}{placelistofsynonyms}
\setinterfacecommand{placelocalfootnotes}{mettinotepdplocali}
\setinterfacecommand{placelogos}{mettiloghi}
\setinterfacecommand{placeongrid}{mettiingriglia}
\setinterfacecommand{placeontopofeachother}{mettiunosullaltro}
\setinterfacecommand{placepagenumber}{mettinumeropagina}
+\setinterfacecommand{placerawlist}{placerawlist}
\setinterfacecommand{placereferencelist}{placereferencelist}
\setinterfacecommand{placeregister}{mettiregistro}
\setinterfacecommand{placerule}{mettilinea}
@@ -1599,7 +1609,6 @@
\setinterfacecommand{setupregister}{impostaregistro}
\setinterfacecommand{setuprotate}{impostarotazione}
\setinterfacecommand{setuprule}{impostalinea}
-\setinterfacecommand{setups}{impostazioni}
\setinterfacecommand{setupscreens}{impostaschermi}
\setinterfacecommand{setupsection}{impostasezione}
\setinterfacecommand{setupsectionblock}{impostabloccosezione}
@@ -1617,6 +1626,7 @@
\setinterfacecommand{setuptables}{impostatabelle}
\setinterfacecommand{setuptabulate}{impostatabulato}
\setinterfacecommand{setuptext}{impostatesto}
+\setinterfacecommand{setuptextbackground}{setuptextbackground}
\setinterfacecommand{setuptextposition}{impostaposizionetesto}
\setinterfacecommand{setuptextrules}{impostalineetesto}
\setinterfacecommand{setuptexttexts}{impostatestotesti}
@@ -1663,6 +1673,7 @@
\setinterfacecommand{startdocument}{iniziadocumento}
\setinterfacecommand{startenvironment}{iniziaambiente}
\setinterfacecommand{startfigure}{iniziafigura}
+\setinterfacecommand{startframed}{startframed}
\setinterfacecommand{startglobal}{iniziaglobale}
\setinterfacecommand{startline}{iniziariga}
\setinterfacecommand{startlinecorrection}{iniziacorrezioneriga}
@@ -1689,6 +1700,7 @@
\setinterfacecommand{starttable}{iniziatabella}
\setinterfacecommand{starttables}{iniziatabelle}
\setinterfacecommand{starttext}{iniziatesto}
+\setinterfacecommand{starttextbackground}{starttextbackground}
\setinterfacecommand{starttextrule}{inizialineatesto}
\setinterfacecommand{startunpacked}{iniziaunpacked}
\setinterfacecommand{startversion}{iniziaversione}
@@ -1703,6 +1715,7 @@
\setinterfacecommand{stopcomponent}{terminacomponente}
\setinterfacecommand{stopdocument}{terminadocumento}
\setinterfacecommand{stopenvironment}{terminaambiente}
+\setinterfacecommand{stopframed}{stopframed}
\setinterfacecommand{stopglobal}{terminaglobale}
\setinterfacecommand{stopline}{terminariga}
\setinterfacecommand{stoplinecorrection}{terminacorrezioneriga}
@@ -1728,6 +1741,7 @@
\setinterfacecommand{stoptable}{terminatabella}
\setinterfacecommand{stoptables}{terminatabelle}
\setinterfacecommand{stoptext}{terminatesto}
+\setinterfacecommand{stoptextbackground}{stoptextbackground}
\setinterfacecommand{stoptextrule}{terminalineatesto}
\setinterfacecommand{stopunpacked}{terminaunpacked}
\setinterfacecommand{stopversion}{terminaversioni}
diff --git a/tex/context/base/mult-low.lua b/tex/context/base/mult-low.lua
index f82be039c..9a05e59d9 100644
--- a/tex/context/base/mult-low.lua
+++ b/tex/context/base/mult-low.lua
@@ -13,7 +13,7 @@ return {
--
"zerocount", "minusone", "minustwo", "plusone", "plustwo", "plusthree", "plusfour", "plusfive",
"plussix", "plusseven", "pluseight", "plusnine", "plusten", "plussixteen", "plushundred",
- "plusthousand", "plustenthousand", "plustwentythousand", "medcard", "maxcard",
+ "plusthousand", "plustenthousand", "plustwentythousand", "medcard", "maxcard", "maxcardminusone",
"zeropoint", "onepoint", "halfapoint", "onebasepoint", "maxdimen", "scaledpoint", "thousandpoint",
"points", "halfpoint",
"zeroskip",
@@ -31,7 +31,9 @@ return {
--
"fmtname", "fmtversion", "texengine", "texenginename", "texengineversion",
"luatexengine", "pdftexengine", "xetexengine", "unknownengine",
- "etexversion", "pdftexversion", "xetexversion", "xetexrevision",
+ -- "etexversion",
+ -- "pdftexversion", "pdftexrevision",
+ -- "xetexversion", "xetexrevision",
--
"activecatcode",
--
@@ -47,7 +49,7 @@ return {
"inicatcodes",
"ctxcatcodes", "texcatcodes", "notcatcodes", "txtcatcodes", "vrbcatcodes",
"prtcatcodes", "nilcatcodes", "luacatcodes", "tpacatcodes", "tpbcatcodes",
- "xmlcatcodes",
+ "xmlcatcodes", "ctdcatcodes",
--
"escapecatcode", "begingroupcatcode", "endgroupcatcode", "mathshiftcatcode", "alignmentcatcode",
"endoflinecatcode", "parametercatcode", "superscriptcatcode", "subscriptcatcode", "ignorecatcode",
@@ -60,7 +62,7 @@ return {
"lessthanasciicode", "morethanasciicode", "doublecommentsignal",
"atsignasciicode", "exclamationmarkasciicode", "questionmarkasciicode",
"doublequoteasciicode", "singlequoteasciicode", "forwardslashasciicode",
- "primeasciicode",
+ "primeasciicode", "hyphenasciicode",
--
"activemathcharcode",
--
@@ -85,23 +87,27 @@ return {
"fontexheight", "fontemwidth", "fontextraspace", "slantperpoint",
"interwordspace", "interwordstretch", "interwordshrink", "exheight", "emwidth", "extraspace",
"mathsupdisplay", "mathsupnormal", "mathsupcramped", "mathsubnormal", "mathsubcombined", "mathaxisheight",
+ "muquad",
--
-- maybe a different class
--
"startmode", "stopmode", "startnotmode", "stopnotmode", "startmodeset", "stopmodeset",
- "doifmode", "doifmodeelse", "doifnotmode",
- "startallmodes", "stopallmodes", "startnotallmodes", "stopnotallmodes", "doifallmodes", "doifallmodeselse", "doifnotallmodes",
+ "doifmode", "doifelsemode", "doifmodeelse", "doifnotmode",
+ "startmodeset","stopmodeset",
+ "startallmodes", "stopallmodes", "startnotallmodes", "stopnotallmodes",
+ "doifallmodes", "doifelseallmodes", "doifallmodeselse", "doifnotallmodes",
"startenvironment", "stopenvironment", "environment",
"startcomponent", "stopcomponent", "component",
"startproduct", "stopproduct", "product",
"startproject", "stopproject", "project",
"starttext", "stoptext", "startnotext", "stopnotext","startdocument", "stopdocument", "documentvariable", "setupdocument",
"startmodule", "stopmodule", "usemodule", "usetexmodule", "useluamodule","setupmodule","currentmoduleparameter","moduleparameter",
+ "everystarttext", "everystoptext",
--
"startTEXpage", "stopTEXpage",
-- "startMPpage", "stopMPpage", -- already catched by nested lexer
--
- "enablemode", "disablemode", "preventmode",
+ "enablemode", "disablemode", "preventmode", "definemode",
"globalenablemode", "globaldisablemode", "globalpreventmode",
"pushmode", "popmode",
--
@@ -119,11 +125,12 @@ return {
--
"lefttorightmark", "righttoleftmark",
--
- "breakablethinspace", "nobreakspace", "narrownobreakspace", "zerowidthnobreakspace",
+ "breakablethinspace", "nobreakspace", "nonbreakablespace", "narrownobreakspace", "zerowidthnobreakspace",
"ideographicspace", "ideographichalffillspace",
"twoperemspace", "threeperemspace", "fourperemspace", "fiveperemspace", "sixperemspace",
"figurespace", "punctuationspace", "hairspace",
"zerowidthspace", "zerowidthnonjoiner", "zerowidthjoiner", "zwnj", "zwj",
+ "optionalspace", "asciispacechar",
},
["helpers"] = {
--
@@ -136,9 +143,11 @@ return {
"starttexdefinition", "stoptexdefinition",
"starttexcode", "stoptexcode",
"startcontextcode", "stopcontextcode",
+ "startcontextdefinitioncode", "stopcontextdefinitioncode",
+ "texdefinition",
--
- "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup",
- "doifelsecommandhandler","doifnotcommandhandler","doifcommandhandler",
+ "doifelsesetups", "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup", "fastsetup",
+ "doifelsecommandhandler", "doifcommandhandlerelse", "doifnotcommandhandler", "doifcommandhandler",
--
"newmode", "setmode", "resetmode",
"newsystemmode", "setsystemmode", "resetsystemmode", "pushsystemmode", "popsystemmode",
@@ -150,7 +159,7 @@ return {
"then",
"begcsname",
--
- "strippedcsname",
+ "strippedcsname","checkedstrippedcsname",
--
"firstargumentfalse", "firstargumenttrue",
"secondargumentfalse", "secondargumenttrue",
@@ -165,6 +174,8 @@ return {
--
"donetrue", "donefalse",
--
+ "inlineordisplaymath","indisplaymath","forcedisplaymath","startforceddisplaymath","stopforceddisplaymath","reqno",
+ --
"htdp",
"unvoidbox",
"hfilll", "vfilll",
@@ -196,7 +207,7 @@ return {
--
"normalbaselineskip", "normallineskip", "normallineskiplimit",
--
- "availablehsize", "localhsize", "setlocalhsize",
+ "availablehsize", "localhsize", "setlocalhsize", "distributedhsize", "hsizefraction",
--
"nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs",
--
@@ -218,23 +229,36 @@ return {
"scratchleftskip", "scratchrightskip", "scratchtopskip", "scratchbottomskip",
--
"doif", "doifnot", "doifelse",
- "doifinset", "doifnotinset", "doifinsetelse",
- "doifnextcharelse", "doifnextoptionalelse", "doifnextbgroupelse", "doifnextparenthesiselse", "doiffastoptionalcheckelse",
- "doifundefinedelse", "doifdefinedelse", "doifundefined", "doifdefined",
+ "doifinset", "doifnotinset",
+ "doifelseinset", "doifinsetelse",
+ "doifelsenextchar", "doifnextcharelse",
+ "doifelsenextoptional", "doifnextoptionalelse",
+ "doifelsenextoptionalcs", "doifnextoptionalcselse",
+ "doifelsefastoptionalcheck", "doiffastoptionalcheckelse",
+ "doifelsenextbgroup", "doifnextbgroupelse",
+ "doifelsenextbgroupcs", "doifnextbgroupcselse",
+ "doifelsenextparenthesis", "doifnextparenthesiselse",
+ "doifelseundefined", "doifundefinedelse",
+ "doifelsedefined", "doifdefinedelse",
+ "doifundefined", "doifdefined",
"doifelsevalue", "doifvalue", "doifnotvalue",
- "doifnothing", "doifsomething", "doifelsenothing", "doifsomethingelse",
- "doifvaluenothing", "doifvaluesomething", "doifelsevaluenothing",
- "doifdimensionelse", "doifnumberelse", "doifnumber", "doifnotnumber",
- "doifcommonelse", "doifcommon", "doifnotcommon",
- "doifinstring", "doifnotinstring", "doifinstringelse",
- "doifassignmentelse", "docheckassignment",
+ "doifnothing", "doifsomething",
+ "doifelsenothing", "doifnothingelse",
+ "doifelsesomething", "doifsomethingelse",
+ "doifvaluenothing", "doifvaluesomething",
+ "doifelsevaluenothing", "doifvaluenothingelse",
+ "doifelsedimension", "doifdimensionelse",
+ "doifelsenumber", "doifnumberelse", "doifnumber", "doifnotnumber",
+ "doifelsecommon", "doifcommonelse", "doifcommon", "doifnotcommon",
+ "doifinstring", "doifnotinstring", "doifelseinstring", "doifinstringelse",
+ "doifelseassignment", "doifassignmentelse", "docheckassignment",
--
"tracingall", "tracingnone", "loggingall",
--
"removetoks", "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to",
--
- "endgraf", "endpar", "everyendpar", "reseteverypar", "finishpar", "empty", "null", "space", "quad", "enspace",
- "obeyspaces", "obeylines", "obeyedspace", "obeyedline",
+ "endgraf", "endpar", "everyendpar", "reseteverypar", "finishpar", "empty", "null", "space", "quad", "enspace", "nbsp",
+ "obeyspaces", "obeylines", "obeyedspace", "obeyedline", "obeyedtab", "obeyedpage",
"normalspace",
--
"executeifdefined",
@@ -273,13 +297,17 @@ return {
"firstofsixarguments", "secondofsixarguments", "thirdofsixarguments", "fourthofsixarguments", "fifthofsixarguments", "sixthofsixarguments",
--
"firstofoneunexpanded",
+ "firstoftwounexpanded", "secondoftwounexpanded",
+ "firstofthreeunexpanded", "secondofthreeunexpanded", "thirdofthreeunexpanded",
--
"gobbleoneargument", "gobbletwoarguments", "gobblethreearguments", "gobblefourarguments", "gobblefivearguments", "gobblesixarguments", "gobblesevenarguments", "gobbleeightarguments", "gobbleninearguments", "gobbletenarguments",
"gobbleoneoptional", "gobbletwooptionals", "gobblethreeoptionals", "gobblefouroptionals", "gobblefiveoptionals",
--
"dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "dowith",
--
- "newconstant", "setnewconstant", "newconditional", "settrue", "setfalse", "setconstant",
+ "newconstant", "setnewconstant", "setconstant", "setconstantvalue",
+ "newconditional", "settrue", "setfalse", "settruevalue", "setfalsevalue",
+ --
"newmacro", "setnewmacro", "newfraction",
"newsignal",
--
@@ -292,7 +320,7 @@ return {
--
"modulonumber", "dividenumber",
--
- "getfirstcharacter", "doiffirstcharelse",
+ "getfirstcharacter", "doifelsefirstchar", "doiffirstcharelse",
--
"startnointerference", "stopnointerference",
--
@@ -300,7 +328,9 @@ return {
--
"leftorright",
--
- "strut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "begstrut", "endstrut", "lineheight",
+ "offinterlineskip", "oninterlineskip", "nointerlineskip",
+ --
+ "strut", "halfstrut", "quarterstrut", "depthstrut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "begstrut", "endstrut", "lineheight",
--
"ordordspacing", "ordopspacing", "ordbinspacing", "ordrelspacing",
"ordopenspacing", "ordclosespacing", "ordpunctspacing", "ordinnerspacing",
@@ -334,13 +364,17 @@ return {
"uncramped", "cramped", "triggermathstyle", "mathstylefont", "mathsmallstylefont", "mathstyleface", "mathsmallstyleface", "mathstylecommand", "mathpalette",
"mathstylehbox", "mathstylevbox", "mathstylevcenter", "mathstylevcenteredhbox", "mathstylevcenteredvbox",
"mathtext", "setmathsmalltextbox", "setmathtextbox",
+ "pushmathstyle", "popmathstyle",
--
"triggerdisplaystyle", "triggertextstyle", "triggerscriptstyle", "triggerscriptscriptstyle",
"triggeruncrampedstyle", "triggercrampedstyle",
"triggersmallstyle", "triggeruncrampedsmallstyle", "triggercrampedsmallstyle",
"triggerbigstyle", "triggeruncrampedbigstyle", "triggercrampedbigstyle",
--
- "luaexpr", "expdoifelse", "expdoif", "expdoifnot", "expdoifcommonelse", "expdoifinsetelse",
+ "luaexpr",
+ "expelsedoif", "expdoif", "expdoifnot",
+ "expdoifelsecommon", "expdoifcommonelse",
+ "expdoifelseinset", "expdoifinsetelse",
--
"ctxdirectlua", "ctxlatelua", "ctxsprint", "ctxwrite", "ctxcommand", "ctxdirectcommand", "ctxlatecommand", "ctxreport",
"ctxlua", "luacode", "lateluacode", "directluacode",
@@ -351,6 +385,8 @@ return {
"definenamedlua",
"obeylualines", "obeyluatokens",
"startluacode", "stopluacode", "startlua", "stoplua",
+ "startctxfunction","stopctxfunction","ctxfunction",
+ "startctxfunctiondefinition","stopctxfunctiondefinition", "installctxfunction",
--
"carryoverpar",
--
@@ -365,5 +401,8 @@ return {
--
"lesshyphens", "morehyphens", "nohyphens", "dohyphens",
--
+ "Ucheckedstartdisplaymath", "Ucheckedstopdisplaymath",
+ --
+ "nobreak", "allowbreak", "goodbreak",
}
}
diff --git a/tex/context/base/mult-mes.lua b/tex/context/base/mult-mes.lua
index d9ee151a8..64184f8b0 100644
--- a/tex/context/base/mult-mes.lua
+++ b/tex/context/base/mult-mes.lua
@@ -996,6 +996,21 @@ return {
["publications:7"] = {
en = "placing all entries, use 'text' to be more selective",
},
+ ["publications:10"] = {
+ en = "unknown command %a, using built-in context variant %a",
+ },
+ ["publications:11"] = {
+ en = "unknown command %a",
+ },
+ ["publications:12"] = {
+ en = "missing setup: %s",
+ },
+ ["publications:13"] = {
+ en = "no field %a for tag %a in dataset %a",
+ },
+ ["publications:14"] = {
+ en = "unknown rendering alternative %a",
+ },
["pushing level: %a"] = {
nl = "niveau omhoog: %a",
},
diff --git a/tex/context/base/mult-mps.lua b/tex/context/base/mult-mps.lua
index 104b9d42e..a6bebc266 100644
--- a/tex/context/base/mult-mps.lua
+++ b/tex/context/base/mult-mps.lua
@@ -3,7 +3,7 @@ return {
"btex", "etex", "verbatimtex",
},
shortcuts = {
- "..", "...", "--", "---", "&",
+ "..", "...", "--", "---", "&", "\\",
},
primitives = { -- to be checked
"charcode", "day", "linecap", "linejoin", "miterlimit", "month", "pausing",
@@ -31,8 +31,10 @@ return {
"def", "vardef", "enddef", "expr", "suffix", "text", "primary", "secondary",
"tertiary", "primarydef", "secondarydef", "tertiarydef",
"randomseed", "also", "contour", "doublepath",
- "withcolor", "withcmykcolor", "withpen", "dashed", "if", "else", "elseif", "fi", "for", "endfor", "forever", "exitif", "within",
- "forsuffixes", "downto", "upto", "step", "until",
+ "withcolor", "withcmykcolor", "withpen",
+ "dashed",
+ "if", "else", "elseif", "fi", "for", "endfor", "forever", "exitif", "within",
+ "forsuffixes", "step", "until",
"charlist", "extensible", "fontdimen", "headerbyte", "kern", "ligtable",
"boundarychar", "chardp", "charext", "charht", "charic", "charwd", "designsize",
"fontmaking", "charexists",
@@ -50,27 +52,34 @@ return {
"withprescript", "withpostscript",
"top", "bot", "lft", "rt", "ulft", "urt", "llft", "lrt",
--
- "redpart", "greenpart", "bluepart", "cyanpart", "magentapart", "yellowpart", "blackpart", "greypart",
+ "redpart", "greenpart", "bluepart",
+ "cyanpart", "magentapart", "yellowpart",
+ "blackpart",
"prescriptpart", "postscriptpart",
- "rgbcolor", "cmykcolor", "greycolor", "graycolor",
- "colormodel", "graypart",
+ "rgbcolor", "cmykcolor", -- "greycolor", "graycolor",
+ "colormodel", "graypart", "greypart", "greycolor", "graycolor",
"dashpart", "penpart",
-- "colorpart",
"stroked", "filled", "textual", "clipped", "bounded", "pathpart",
"expandafter",
- "minute", "hour", "outputformat", "outputtemplate", "filenametemplate", "fontmapfile", "fontmapline",
+ "minute", "hour",
+ "outputformat", "outputtemplate", "filenametemplate", "fontmapfile", "fontmapline",
"fontpart", "fontsize", "glyph", "restoreclipcolor", "troffmode",
+ --
+ "runscript", "maketext",
},
commands = {
+ "upto", "downto",
"beginfig", "endfig",
- "beginglyph", "endglyph", "charscale",
+ "beginglyph", "endglyph", -- actually a mult-fun one
"rotatedaround", "reflectedabout",
"arrowhead",
"currentpen", "currentpicture", "cuttings",
"defaultfont", "extra_beginfig", "extra_endfig",
"ditto", "EOF", "down",
"evenly", "fullcircle", "halfcircle", "identity", "in", "left",
- "origin", "pensquare", "quartercircle", "right",
+ "pensquare", "penrazor", "penspec",
+ "origin", "quartercircle", "right",
"unitsquare", "up", "withdots",
"abs", "bbox", "ceiling", "center", "cutafter", "cutbefore", "dir",
"directionpoint", "div", "dotprod", "intersectionpoint", "inverse", "mod",
@@ -97,14 +106,13 @@ return {
"counterclockwise", "tensepath", "takepower", "direction",
"softjoin", -- "magstep",
"makelabel", -- "laboff",
- "rotatedabout", "flex", "superellipse", "erase", "image",
+ "rotatedabout", "flex", "superellipse", "image",
"nullpen", "savepen", "clearpen", "penpos", "penlabels", -- "clear_pen_memory",
- "range", "numtok", "thru",
+ "range", "thru",
"z", "laboff",
"bye",
--
"red", "green", "blue", "cyan", "magenta", "yellow", "black", "white", "background",
- "graypart", "graycolor",
--
"mm", "pt", "dd", "bp", "cm", "pc", "cc", "in",
--
@@ -114,8 +122,41 @@ return {
--
"mitered", "rounded", "beveled", "butt", "squared",
"eps", "epsilon", "infinity",
- "bboxmargin", "ahlength", "ahangle", "labeloffset", "dotlabeldiam", "defaultpen", "defaultscale", "join_radius",
+ "bboxmargin",
+ "ahlength", "ahangle",
+ "labeloffset", "dotlabeldiam",
+ "defaultpen", "defaultscale",
+ "join_radius",
+ "charscale", -- actually a mult-fun one
--
"pen_lft", "pen_rt", "pen_top", "pen_bot", -- "pen_count_",
},
+ metafont = {
+ -- :: =: =:| =:|> |=: |=:> |=:| |=:|> |=:|>> ||:
+ "autorounding", "beginchar", "blacker", "boundarychar", "capsule_def",
+ "capsule_end", "change_width", "chardp", "chardx", "chardy", "charexists",
+ "charext", "charht", "charic", "charlist", "charwd", "cull", "cullit",
+ "currenttransform", "currentwindow", "define_blacker_pixels",
+ "define_corrected_pixels", "define_good_x_pixels", "define_good_y_pixels",
+ "define_horizontal_corrected_pixels", "define_pixels",
+ "define_whole_blacker_pixels", "define_whole_pixels",
+ "define_whole_vertical_blacker_pixels", "define_whole_vertical_pixels",
+ "designsize", "display", "displaying", "endchar", "extensible", "extra_beginchar",
+ "extra_endchar", "extra_setup", "fillin", "font_coding_scheme",
+ "font_extra_space", "font_identifier", "font_normal_shrink",
+ "font_normal_space", "font_normal_stretch", "font_quad", "font_size",
+ "font_slant", "font_x_height", "fontdimen", "fontmaking", "gfcorners",
+ "granularity", "grayfont", "headerbyte", "hppp", "hround", "imagerules",
+ "italcorr", "kern", "labelfont", "ligtable", "lowres_fix", "makebox",
+ "makegrid", "maketicks", "mode_def", "mode_setup", "nodisplays",
+ "notransforms", "numspecial", "o_correction", "openit", "openwindow",
+ "pixels_per_inch", "proofing", "proofoffset", "proofrule", "proofrulethickness",
+ "rulepen", "screenchars", "screenrule", "screenstrokes", "screen_cols", "screen_rows",
+ "showit", "slantfont", "smode", "smoothing", "titlefont", "totalweight",
+ "tracingedges", "tracingpens", "turningcheck", "unitpixel", "vppp", "vround",
+ "xoffset", "yoffset",
+ },
+ disabled = {
+ "verbatimtex", "troffmode"
+ }
}
diff --git a/tex/context/base/mult-nl.mkii b/tex/context/base/mult-nl.mkii
index 9f91515cb..22350dc50 100644
--- a/tex/context/base/mult-nl.mkii
+++ b/tex/context/base/mult-nl.mkii
@@ -180,6 +180,7 @@
\setinterfacevariable{flushleft}{lijnlinks}
\setinterfacevariable{flushouter}{lijnbuiten}
\setinterfacevariable{flushright}{lijnrechts}
+\setinterfacevariable{followingpage}{opvolgendepagina}
\setinterfacevariable{footer}{voet}
\setinterfacevariable{footnote}{voetnoot}
\setinterfacevariable{force}{forceer}
@@ -287,20 +288,21 @@
\setinterfacevariable{mathematics}{wiskunde}
\setinterfacevariable{mathmatrix}{wiskundematrix}
\setinterfacevariable{max}{max}
-\setinterfacevariable{maxdepth}{maxdepth}
-\setinterfacevariable{maxheight}{maxheight}
-\setinterfacevariable{maxwidth}{maxwidth}
+\setinterfacevariable{maxdepth}{maxdiepte}
+\setinterfacevariable{maxheight}{maxhoogte}
+\setinterfacevariable{maxwidth}{maxbreedte}
\setinterfacevariable{may}{mei}
\setinterfacevariable{mediaeval}{mediaeval}
\setinterfacevariable{medium}{middel}
\setinterfacevariable{middle}{midden}
\setinterfacevariable{min}{min}
-\setinterfacevariable{mindepth}{mindepth}
+\setinterfacevariable{mindepth}{mindiepte}
\setinterfacevariable{minheight}{minhoogte}
\setinterfacevariable{minwidth}{minbreedte}
\setinterfacevariable{mirrored}{gespiegeld}
\setinterfacevariable{monday}{maandag}
\setinterfacevariable{mono}{mono}
+\setinterfacevariable{monobold}{monovet}
\setinterfacevariable{month}{maand}
\setinterfacevariable{more}{meer}
\setinterfacevariable{morehyphenation}{morehyphenation}
@@ -360,6 +362,7 @@
\setinterfacevariable{positive}{positief}
\setinterfacevariable{postponing}{uitstellen}
\setinterfacevariable{postscript}{postscript}
+\setinterfacevariable{precedingpage}{voorafgaandepagina}
\setinterfacevariable{preference}{voorkeur}
\setinterfacevariable{preview}{preview}
\setinterfacevariable{previous}{vorige}
@@ -419,7 +422,7 @@
\setinterfacevariable{september}{september}
\setinterfacevariable{serif}{serif}
\setinterfacevariable{serried}{aanelkaar}
-\setinterfacevariable{setups}{instellingen}
+\setinterfacevariable{setups}{setups}
\setinterfacevariable{sheet}{sheet}
\setinterfacevariable{short}{kort}
\setinterfacevariable{simplefonts}{simplefonts}
@@ -651,7 +654,7 @@
\setinterfaceconstant{coupling}{koppeling}
\setinterfaceconstant{couplingway}{koppelwijze}
\setinterfaceconstant{criterium}{criterium}
-\setinterfaceconstant{css}{css}
+\setinterfaceconstant{cssfile}{cssfile}
\setinterfaceconstant{current}{huidige}
\setinterfaceconstant{cutspace}{snijwit}
\setinterfaceconstant{dash}{streep}
@@ -686,7 +689,7 @@
\setinterfaceconstant{equalwidth}{equalwidth}
\setinterfaceconstant{escape}{escape}
\setinterfaceconstant{evenmargin}{evenmarge}
-\setinterfaceconstant{exitoffset}{labeloffset}
+\setinterfaceconstant{exitoffset}{exitoffset}
\setinterfaceconstant{expansion}{expansie}
\setinterfaceconstant{export}{exporteer}
\setinterfaceconstant{extras}{extras}
@@ -701,6 +704,7 @@
\setinterfaceconstant{file}{file}
\setinterfaceconstant{filtercommand}{filtercommand}
\setinterfaceconstant{finalnamesep}{finalnamesep}
+\setinterfaceconstant{finalpubsep}{finalpubsep}
\setinterfaceconstant{firstnamesep}{firstnamesep}
\setinterfaceconstant{firstpage}{eerstepagina}
\setinterfaceconstant{focus}{focus}
@@ -752,6 +756,7 @@
\setinterfaceconstant{indenting}{inspringen}
\setinterfaceconstant{indentnext}{springvolgendein}
\setinterfaceconstant{indicator}{aanduiding}
+\setinterfaceconstant{initialsep}{initialsep}
\setinterfaceconstant{inner}{binnen}
\setinterfaceconstant{innermargin}{binnenmarge}
\setinterfaceconstant{inputfile}{inputfile}
@@ -857,6 +862,7 @@
\setinterfaceconstant{nright}{nrechts}
\setinterfaceconstant{ntop}{nboven}
\setinterfaceconstant{number}{nummer}
+\setinterfaceconstant{numberalign}{nummeruitlijnen}
\setinterfaceconstant{numbercolor}{nummerkleur}
\setinterfaceconstant{numbercommand}{nummercommando}
\setinterfaceconstant{numberconversion}{numberconversion}
@@ -944,7 +950,7 @@
\setinterfaceconstant{reduction}{reductie}
\setinterfaceconstant{ref}{ref}
\setinterfaceconstant{refcommand}{refcommand}
-\setinterfaceconstant{reference}{verwijzing}
+\setinterfaceconstant{reference}{referentie}
\setinterfaceconstant{referenceprefix}{referenceprefix}
\setinterfaceconstant{referencing}{refereren}
\setinterfaceconstant{region}{gebied}
@@ -1045,6 +1051,8 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixscheider}
\setinterfaceconstant{suffixstopper}{suffixafsluiter}
+\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
+\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
@@ -1092,7 +1100,7 @@
\setinterfaceconstant{totalnumber}{totalnumber}
\setinterfaceconstant{type}{type}
\setinterfaceconstant{unit}{eenheid}
-\setinterfaceconstant{unknownreference}{onbekendeverwijzing}
+\setinterfaceconstant{unknownreference}{onbekendereferentie}
\setinterfaceconstant{urlalternative}{urlvariant}
\setinterfaceconstant{urlspace}{urlspatie}
\setinterfaceconstant{validate}{valideer}
@@ -1278,6 +1286,7 @@
\setinterfacecommand{definetabletemplate}{definieertabelvorm}
\setinterfacecommand{definetabulate}{definieertabulatie}
\setinterfacecommand{definetext}{definieertekst}
+\setinterfacecommand{definetextbackground}{definieertekstachtergrond}
\setinterfacecommand{definetextposition}{definieertekstpositie}
\setinterfacecommand{definetextvariable}{definieertekstvariabele}
\setinterfacecommand{definetype}{definieertype}
@@ -1438,7 +1447,6 @@
\setinterfacecommand{paperheight}{papierhoogte}
\setinterfacecommand{paperwidth}{papierbreedte}
\setinterfacecommand{periods}{punten}
-\setinterfacecommand{plaatsruwelijst}{plaatsruwelijst}
\setinterfacecommand{placebookmarks}{plaatsbookmarks}
\setinterfacecommand{placecombinedlist}{plaatssamengesteldelijst}
\setinterfacecommand{placefloat}{plaatsplaatsblok}
@@ -1448,11 +1456,13 @@
\setinterfacecommand{placeheadtext}{plaatskoptekst}
\setinterfacecommand{placelegend}{plaatslegenda}
\setinterfacecommand{placelist}{plaatslijst}
+\setinterfacecommand{placelistofsynonyms}{plaatslijstmetsynoniemen}
\setinterfacecommand{placelocalfootnotes}{plaatslokalevoetnoten}
\setinterfacecommand{placelogos}{plaatsbeeldmerken}
\setinterfacecommand{placeongrid}{plaatsopgrid}
\setinterfacecommand{placeontopofeachother}{plaatsonderelkaar}
\setinterfacecommand{placepagenumber}{plaatspaginanummer}
+\setinterfacecommand{placerawlist}{plaatsruwelijst}
\setinterfacecommand{placereferencelist}{plaatsreferentielijst}
\setinterfacecommand{placeregister}{plaatsregister}
\setinterfacecommand{placerule}{plaatslijn}
@@ -1599,7 +1609,6 @@
\setinterfacecommand{setupregister}{stelregisterin}
\setinterfacecommand{setuprotate}{stelroterenin}
\setinterfacecommand{setuprule}{stellijnin}
-\setinterfacecommand{setups}{instellingen}
\setinterfacecommand{setupscreens}{stelrastersin}
\setinterfacecommand{setupsection}{stelsectiein}
\setinterfacecommand{setupsectionblock}{stelsectieblokin}
@@ -1617,6 +1626,7 @@
\setinterfacecommand{setuptables}{steltabellenin}
\setinterfacecommand{setuptabulate}{steltabulatiein}
\setinterfacecommand{setuptext}{steltekstin}
+\setinterfacecommand{setuptextbackground}{steltekstachtergrondin}
\setinterfacecommand{setuptextposition}{steltekstpositiein}
\setinterfacecommand{setuptextrules}{steltekstlijnenin}
\setinterfacecommand{setuptexttexts}{stelteksttekstenin}
@@ -1663,6 +1673,7 @@
\setinterfacecommand{startdocument}{startdocument}
\setinterfacecommand{startenvironment}{startomgeving}
\setinterfacecommand{startfigure}{startfiguur}
+\setinterfacecommand{startframed}{startomlijnd}
\setinterfacecommand{startglobal}{startglobaal}
\setinterfacecommand{startline}{startregel}
\setinterfacecommand{startlinecorrection}{startregelcorrectie}
@@ -1689,6 +1700,7 @@
\setinterfacecommand{starttable}{starttabel}
\setinterfacecommand{starttables}{starttabellen}
\setinterfacecommand{starttext}{starttekst}
+\setinterfacecommand{starttextbackground}{starttekstachtergrond}
\setinterfacecommand{starttextrule}{starttekstlijn}
\setinterfacecommand{startunpacked}{startvanelkaar}
\setinterfacecommand{startversion}{startversie}
@@ -1703,6 +1715,7 @@
\setinterfacecommand{stopcomponent}{stoponderdeel}
\setinterfacecommand{stopdocument}{stopdocument}
\setinterfacecommand{stopenvironment}{stopomgeving}
+\setinterfacecommand{stopframed}{stopomlijnd}
\setinterfacecommand{stopglobal}{stopglobaal}
\setinterfacecommand{stopline}{stopregel}
\setinterfacecommand{stoplinecorrection}{stopregelcorrectie}
@@ -1728,6 +1741,7 @@
\setinterfacecommand{stoptable}{stoptabel}
\setinterfacecommand{stoptables}{stoptabellen}
\setinterfacecommand{stoptext}{stoptekst}
+\setinterfacecommand{stoptextbackground}{stoptekstachtergrond}
\setinterfacecommand{stoptextrule}{stoptekstlijn}
\setinterfacecommand{stopunpacked}{stopvanelkaar}
\setinterfacecommand{stopversion}{stopversie}
diff --git a/tex/context/base/mult-pe.mkii b/tex/context/base/mult-pe.mkii
index 240130cdf..32cf32db1 100644
--- a/tex/context/base/mult-pe.mkii
+++ b/tex/context/base/mult-pe.mkii
@@ -180,6 +180,7 @@
\setinterfacevariable{flushleft}{پمپ‌چپ}
\setinterfacevariable{flushouter}{پمپ‌خارجی}
\setinterfacevariable{flushright}{پمپ‌راست}
+\setinterfacevariable{followingpage}{followingpage}
\setinterfacevariable{footer}{ته‌برگ}
\setinterfacevariable{footnote}{پانوشت}
\setinterfacevariable{force}{اجبار}
@@ -301,6 +302,7 @@
\setinterfacevariable{mirrored}{منعکس}
\setinterfacevariable{monday}{دوشنبه}
\setinterfacevariable{mono}{مونو}
+\setinterfacevariable{monobold}{monobold}
\setinterfacevariable{month}{ماه}
\setinterfacevariable{more}{more}
\setinterfacevariable{morehyphenation}{شکست‌کلمات‌بیشتر}
@@ -360,6 +362,7 @@
\setinterfacevariable{positive}{مثبت}
\setinterfacevariable{postponing}{تاخیر}
\setinterfacevariable{postscript}{پست‌اسکریپت}
+\setinterfacevariable{precedingpage}{followingpage}
\setinterfacevariable{preference}{ترجیح}
\setinterfacevariable{preview}{پیش‌دید}
\setinterfacevariable{previous}{قبلی}
@@ -419,7 +422,7 @@
\setinterfacevariable{september}{سپتامبر}
\setinterfacevariable{serif}{سریف}
\setinterfacevariable{serried}{تنگ‌هم}
-\setinterfacevariable{setups}{بارگذاریها}
+\setinterfacevariable{setups}{setups}
\setinterfacevariable{sheet}{ورقه}
\setinterfacevariable{short}{short}
\setinterfacevariable{simplefonts}{simplefonts}
@@ -651,7 +654,7 @@
\setinterfaceconstant{coupling}{تزویج}
\setinterfaceconstant{couplingway}{روش‌تزویج}
\setinterfaceconstant{criterium}{criterium}
-\setinterfaceconstant{css}{css}
+\setinterfaceconstant{cssfile}{cssfile}
\setinterfaceconstant{current}{جاری}
\setinterfaceconstant{cutspace}{فضای‌برش}
\setinterfaceconstant{dash}{دش}
@@ -686,7 +689,7 @@
\setinterfaceconstant{equalwidth}{عرض‌یکسان}
\setinterfaceconstant{escape}{فرار}
\setinterfaceconstant{evenmargin}{حاشیه‌زوج}
-\setinterfaceconstant{exitoffset}{labeloffset}
+\setinterfaceconstant{exitoffset}{exitoffset}
\setinterfaceconstant{expansion}{گسترش}
\setinterfaceconstant{export}{export}
\setinterfaceconstant{extras}{extras}
@@ -701,6 +704,7 @@
\setinterfaceconstant{file}{پرونده}
\setinterfaceconstant{filtercommand}{filtercommand}
\setinterfaceconstant{finalnamesep}{finalnamesep}
+\setinterfaceconstant{finalpubsep}{finalpubsep}
\setinterfaceconstant{firstnamesep}{firstnamesep}
\setinterfaceconstant{firstpage}{صفحه‌اول}
\setinterfaceconstant{focus}{تمرکز}
@@ -752,6 +756,7 @@
\setinterfaceconstant{indenting}{تورفتگی}
\setinterfaceconstant{indentnext}{متن‌تورفته}
\setinterfaceconstant{indicator}{اندیکاتور}
+\setinterfaceconstant{initialsep}{initialsep}
\setinterfaceconstant{inner}{داخلی}
\setinterfaceconstant{innermargin}{حاشیه‌داخلی}
\setinterfaceconstant{inputfile}{پرونده‌ورودی}
@@ -857,6 +862,7 @@
\setinterfaceconstant{nright}{nright}
\setinterfaceconstant{ntop}{ntop}
\setinterfaceconstant{number}{شماره}
+\setinterfaceconstant{numberalign}{numberalign}
\setinterfaceconstant{numbercolor}{رنگ‌شماره}
\setinterfaceconstant{numbercommand}{فرمان‌شماره}
\setinterfaceconstant{numberconversion}{numberconversion}
@@ -1001,7 +1007,7 @@
\setinterfaceconstant{separatorcolor}{separatorcolor}
\setinterfaceconstant{separatorstyle}{separatorstyle}
\setinterfaceconstant{set}{قراربده}
-\setinterfaceconstant{setups}{بارگذاریها}
+\setinterfaceconstant{setups}{setups}
\setinterfaceconstant{shrink}{shrink}
\setinterfaceconstant{side}{کنار}
\setinterfaceconstant{sidealign}{تنظیم‌کنار}
@@ -1045,6 +1051,8 @@
\setinterfaceconstant{suffix}{پسوند}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
+\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
+\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
@@ -1278,6 +1286,7 @@
\setinterfacecommand{definetabletemplate}{تعریف‌الگوی‌جدول}
\setinterfacecommand{definetabulate}{تعریف‌جدول‌بندی}
\setinterfacecommand{definetext}{تعریف‌متن}
+\setinterfacecommand{definetextbackground}{definetextbackground}
\setinterfacecommand{definetextposition}{تعریف‌مکان‌متن}
\setinterfacecommand{definetextvariable}{تعریف‌متغیرمتن}
\setinterfacecommand{definetype}{تعریف‌تایپ}
@@ -1438,7 +1447,6 @@
\setinterfacecommand{paperheight}{ارتفاع‌برگ}
\setinterfacecommand{paperwidth}{عرض‌برگ}
\setinterfacecommand{periods}{نقطه‌ها}
-\setinterfacecommand{plaatsruwelijst}{درج‌لیست‌خام}
\setinterfacecommand{placebookmarks}{درج‌چوب‌خط}
\setinterfacecommand{placecombinedlist}{درج‌لیست‌مختلط}
\setinterfacecommand{placefloat}{درج‌شناور}
@@ -1448,11 +1456,13 @@
\setinterfacecommand{placeheadtext}{درج‌متن‌سر}
\setinterfacecommand{placelegend}{درج‌راهنما}
\setinterfacecommand{placelist}{درج‌لیست}
+\setinterfacecommand{placelistofsynonyms}{placelistofsynonyms}
\setinterfacecommand{placelocalfootnotes}{درج‌پانوشتهای‌موضعی}
\setinterfacecommand{placelogos}{درج‌آرمها}
\setinterfacecommand{placeongrid}{درج‌در‌توری}
\setinterfacecommand{placeontopofeachother}{درج‌در‌بالای‌یکدیگر}
\setinterfacecommand{placepagenumber}{درج‌شماره‌صفحه}
+\setinterfacecommand{placerawlist}{درج‌لیست‌خام}
\setinterfacecommand{placereferencelist}{درج‌لیست‌مرجع}
\setinterfacecommand{placeregister}{درج‌ثبت}
\setinterfacecommand{placerule}{درج‌خط}
@@ -1599,7 +1609,6 @@
\setinterfacecommand{setupregister}{بارگذاری‌ثبت}
\setinterfacecommand{setuprotate}{بارگذاری‌دوران}
\setinterfacecommand{setuprule}{بارگذاری‌خط}
-\setinterfacecommand{setups}{بارگذاریها}
\setinterfacecommand{setupscreens}{بارگذاری‌پرده‌ها}
\setinterfacecommand{setupsection}{بارگذاری‌بخش}
\setinterfacecommand{setupsectionblock}{بارگذاری‌بلوک‌بخش}
@@ -1617,6 +1626,7 @@
\setinterfacecommand{setuptables}{بارگذاری‌جدولها}
\setinterfacecommand{setuptabulate}{بارگذاری‌جدول‌بندی}
\setinterfacecommand{setuptext}{بارگذاری‌متن}
+\setinterfacecommand{setuptextbackground}{setuptextbackground}
\setinterfacecommand{setuptextposition}{بارگذاری‌مکان‌متن}
\setinterfacecommand{setuptextrules}{بارگذاری‌خطهای‌متن}
\setinterfacecommand{setuptexttexts}{بارگذاری‌متن‌متنها}
@@ -1663,6 +1673,7 @@
\setinterfacecommand{startdocument}{شروع‌نوشتار}
\setinterfacecommand{startenvironment}{شروع‌محیط}
\setinterfacecommand{startfigure}{شروع‌شکل}
+\setinterfacecommand{startframed}{startframed}
\setinterfacecommand{startglobal}{شروع‌سراسری}
\setinterfacecommand{startline}{شروع‌خط}
\setinterfacecommand{startlinecorrection}{شروع‌تصحیح‌خط}
@@ -1689,6 +1700,7 @@
\setinterfacecommand{starttable}{شروع‌جدول}
\setinterfacecommand{starttables}{شروع‌جدولها}
\setinterfacecommand{starttext}{شروع‌متن}
+\setinterfacecommand{starttextbackground}{starttextbackground}
\setinterfacecommand{starttextrule}{شروع‌خط‌متن}
\setinterfacecommand{startunpacked}{شروع‌غیر‌فشرده}
\setinterfacecommand{startversion}{شروع‌نسخه}
@@ -1703,6 +1715,7 @@
\setinterfacecommand{stopcomponent}{پایان‌مولفه}
\setinterfacecommand{stopdocument}{پایان‌نوشتار}
\setinterfacecommand{stopenvironment}{پایان‌محیط}
+\setinterfacecommand{stopframed}{stopframed}
\setinterfacecommand{stopglobal}{پایان‌سراسری}
\setinterfacecommand{stopline}{پایان‌خط}
\setinterfacecommand{stoplinecorrection}{پایان‌تصحیح‌خط}
@@ -1728,6 +1741,7 @@
\setinterfacecommand{stoptable}{پایان‌جدول}
\setinterfacecommand{stoptables}{پایان‌جدولها}
\setinterfacecommand{stoptext}{پایان‌متن}
+\setinterfacecommand{stoptextbackground}{stoptextbackground}
\setinterfacecommand{stoptextrule}{پایان‌خط‌متن}
\setinterfacecommand{stopunpacked}{پایان‌غیرفشرده}
\setinterfacecommand{stopversion}{پایان‌نسخه}
diff --git a/tex/context/base/mult-prm.lua b/tex/context/base/mult-prm.lua
index e6fa4abcc..3b4d84356 100644
--- a/tex/context/base/mult-prm.lua
+++ b/tex/context/base/mult-prm.lua
@@ -232,9 +232,10 @@ return {
"latelua",
"luaescapestring",
"luastartup",
- "luatexdatestamp",
+ "luatexbanner",
"luatexrevision",
"luatexversion",
+ "luafunction",
"mathstyle",
"nokerns",
"noligs",
@@ -252,6 +253,7 @@ return {
"suppressifcsnameerror",
"suppresslongerror",
"suppressoutererror",
+ "suppressmathparerror",
"synctex",
},
["omega"]={
@@ -573,10 +575,10 @@ return {
"catcodetable",
"char",
"chardef",
- "chardp",
- "charht",
- "charit",
- "charwd",
+--"chardp",
+--"charht",
+--"charit",
+--"charwd",
"cleaders",
"clearmarks",
"closein",
@@ -772,7 +774,7 @@ return {
"lpcode",
"luaescapestring",
"luastartup",
- "luatexdatestamp",
+ "luatexbanner",
"luatexrevision",
"luatexversion",
"mag",
diff --git a/tex/context/base/mult-ro.mkii b/tex/context/base/mult-ro.mkii
index 3b7206e44..a5d90033f 100644
--- a/tex/context/base/mult-ro.mkii
+++ b/tex/context/base/mult-ro.mkii
@@ -180,6 +180,7 @@
\setinterfacevariable{flushleft}{flushleft}
\setinterfacevariable{flushouter}{flushouter}
\setinterfacevariable{flushright}{flushright}
+\setinterfacevariable{followingpage}{followingpage}
\setinterfacevariable{footer}{subsol}
\setinterfacevariable{footnote}{notasubsol}
\setinterfacevariable{force}{fortat}
@@ -301,6 +302,7 @@
\setinterfacevariable{mirrored}{oglindit}
\setinterfacevariable{monday}{luni}
\setinterfacevariable{mono}{mono}
+\setinterfacevariable{monobold}{monoaldin}
\setinterfacevariable{month}{luna}
\setinterfacevariable{more}{more}
\setinterfacevariable{morehyphenation}{morehyphenation}
@@ -360,6 +362,7 @@
\setinterfacevariable{positive}{positiv}
\setinterfacevariable{postponing}{postponing}
\setinterfacevariable{postscript}{postscript}
+\setinterfacevariable{precedingpage}{followingpage}
\setinterfacevariable{preference}{preferinta}
\setinterfacevariable{preview}{previzualizare}
\setinterfacevariable{previous}{precedent}
@@ -419,7 +422,7 @@
\setinterfacevariable{september}{septembrie}
\setinterfacevariable{serif}{serif}
\setinterfacevariable{serried}{serried}
-\setinterfacevariable{setups}{setari}
+\setinterfacevariable{setups}{setups}
\setinterfacevariable{sheet}{sheet}
\setinterfacevariable{short}{short}
\setinterfacevariable{simplefonts}{simplefonts}
@@ -651,7 +654,7 @@
\setinterfaceconstant{coupling}{cuplare}
\setinterfaceconstant{couplingway}{modcuplare}
\setinterfaceconstant{criterium}{criteriu}
-\setinterfaceconstant{css}{css}
+\setinterfaceconstant{cssfile}{cssfile}
\setinterfaceconstant{current}{curent}
\setinterfaceconstant{cutspace}{cutspace}
\setinterfaceconstant{dash}{dash}
@@ -686,7 +689,7 @@
\setinterfaceconstant{equalwidth}{equalwidth}
\setinterfaceconstant{escape}{escape}
\setinterfaceconstant{evenmargin}{marginepara}
-\setinterfaceconstant{exitoffset}{labeloffset}
+\setinterfaceconstant{exitoffset}{exitoffset}
\setinterfaceconstant{expansion}{expansiune}
\setinterfaceconstant{export}{export}
\setinterfaceconstant{extras}{extras}
@@ -701,6 +704,7 @@
\setinterfaceconstant{file}{fisier}
\setinterfaceconstant{filtercommand}{filtercommand}
\setinterfaceconstant{finalnamesep}{finalnamesep}
+\setinterfaceconstant{finalpubsep}{finalpubsep}
\setinterfaceconstant{firstnamesep}{firstnamesep}
\setinterfaceconstant{firstpage}{primapagina}
\setinterfaceconstant{focus}{focus}
@@ -752,6 +756,7 @@
\setinterfaceconstant{indenting}{aliniat}
\setinterfaceconstant{indentnext}{aliniaturmator}
\setinterfaceconstant{indicator}{indicator}
+\setinterfaceconstant{initialsep}{initialsep}
\setinterfaceconstant{inner}{intern}
\setinterfaceconstant{innermargin}{innermargin}
\setinterfaceconstant{inputfile}{inputfile}
@@ -857,6 +862,7 @@
\setinterfaceconstant{nright}{ndreapta}
\setinterfaceconstant{ntop}{nsus}
\setinterfaceconstant{number}{numar}
+\setinterfaceconstant{numberalign}{numberalign}
\setinterfaceconstant{numbercolor}{culoarenumar}
\setinterfaceconstant{numbercommand}{comandanumar}
\setinterfaceconstant{numberconversion}{numberconversion}
@@ -1045,6 +1051,8 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
+\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
+\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
@@ -1278,6 +1286,7 @@
\setinterfacecommand{definetabletemplate}{definestesablontabel}
\setinterfacecommand{definetabulate}{definestetabulatori}
\setinterfacecommand{definetext}{definestetext}
+\setinterfacecommand{definetextbackground}{definetextbackground}
\setinterfacecommand{definetextposition}{definestepozitietext}
\setinterfacecommand{definetextvariable}{definestevariabilatext}
\setinterfacecommand{definetype}{definetype}
@@ -1438,7 +1447,6 @@
\setinterfacecommand{paperheight}{inaltimehartie}
\setinterfacecommand{paperwidth}{latimehartie}
\setinterfacecommand{periods}{puncte}
-\setinterfacecommand{plaatsruwelijst}{placerawlist}
\setinterfacecommand{placebookmarks}{plaseazasemnecarte}
\setinterfacecommand{placecombinedlist}{punelistacombinata}
\setinterfacecommand{placefloat}{placefloat}
@@ -1448,11 +1456,13 @@
\setinterfacecommand{placeheadtext}{placeheadtext}
\setinterfacecommand{placelegend}{punelegenda}
\setinterfacecommand{placelist}{punelista}
+\setinterfacecommand{placelistofsynonyms}{placelistofsynonyms}
\setinterfacecommand{placelocalfootnotes}{punenotesubsollocale}
\setinterfacecommand{placelogos}{punelogouri}
\setinterfacecommand{placeongrid}{plaseazapegrid}
\setinterfacecommand{placeontopofeachother}{punedeasuprafiecareia}
\setinterfacecommand{placepagenumber}{punenumarpagina}
+\setinterfacecommand{placerawlist}{placerawlist}
\setinterfacecommand{placereferencelist}{placereferencelist}
\setinterfacecommand{placeregister}{puneregistru}
\setinterfacecommand{placerule}{punerigla}
@@ -1599,7 +1609,6 @@
\setinterfacecommand{setupregister}{seteazaregistru}
\setinterfacecommand{setuprotate}{seteazarotare}
\setinterfacecommand{setuprule}{seteazarigla}
-\setinterfacecommand{setups}{setari}
\setinterfacecommand{setupscreens}{seteazaecrane}
\setinterfacecommand{setupsection}{seteazasectiune}
\setinterfacecommand{setupsectionblock}{seteazablocsectiune}
@@ -1617,6 +1626,7 @@
\setinterfacecommand{setuptables}{seteazatabele}
\setinterfacecommand{setuptabulate}{seteazatabulatori}
\setinterfacecommand{setuptext}{seteazatext}
+\setinterfacecommand{setuptextbackground}{setuptextbackground}
\setinterfacecommand{setuptextposition}{seteazapozitietext}
\setinterfacecommand{setuptextrules}{seteazarigletext}
\setinterfacecommand{setuptexttexts}{seteazatextetext}
@@ -1663,6 +1673,7 @@
\setinterfacecommand{startdocument}{startdocument}
\setinterfacecommand{startenvironment}{startmediu}
\setinterfacecommand{startfigure}{startfigura}
+\setinterfacecommand{startframed}{startframed}
\setinterfacecommand{startglobal}{startglobal}
\setinterfacecommand{startline}{startlinie}
\setinterfacecommand{startlinecorrection}{startcorectielinie}
@@ -1689,6 +1700,7 @@
\setinterfacecommand{starttable}{starttabel}
\setinterfacecommand{starttables}{starttabele}
\setinterfacecommand{starttext}{starttext}
+\setinterfacecommand{starttextbackground}{starttextbackground}
\setinterfacecommand{starttextrule}{startriglatext}
\setinterfacecommand{startunpacked}{startneimpachetat}
\setinterfacecommand{startversion}{startversiune}
@@ -1703,6 +1715,7 @@
\setinterfacecommand{stopcomponent}{stopcomponenta}
\setinterfacecommand{stopdocument}{stopdocument}
\setinterfacecommand{stopenvironment}{stopmediu}
+\setinterfacecommand{stopframed}{stopframed}
\setinterfacecommand{stopglobal}{stopblobal}
\setinterfacecommand{stopline}{stoplinie}
\setinterfacecommand{stoplinecorrection}{stopcorectielinie}
@@ -1728,6 +1741,7 @@
\setinterfacecommand{stoptable}{stoptabel}
\setinterfacecommand{stoptables}{stoptabele}
\setinterfacecommand{stoptext}{stoptext}
+\setinterfacecommand{stoptextbackground}{stoptextbackground}
\setinterfacecommand{stoptextrule}{stopriglatext}
\setinterfacecommand{stopunpacked}{stopneimpachetat}
\setinterfacecommand{stopversion}{stopversiune}
diff --git a/tex/context/base/mult-sys.mkiv b/tex/context/base/mult-sys.mkiv
index 8c1bff2bc..b1475f2dc 100644
--- a/tex/context/base/mult-sys.mkiv
+++ b/tex/context/base/mult-sys.mkiv
@@ -124,7 +124,7 @@
\definesystemconstant {SansSlanted}
\definesystemconstant {SansBoldSlanted}
\definesystemconstant {SansCaps}
-% \definesystemconstant {SansCapsSlanted}
+%definesystemconstant {SansCapsSlanted}
\definesystemconstant {Mono}
\definesystemconstant {MonoBold}
@@ -133,8 +133,8 @@
\definesystemconstant {MonoSlanted}
\definesystemconstant {MonoBoldSlanted}
\definesystemconstant {MonoCaps}
-% \definesystemconstant {MonoCapsSlanted}
-% \definesystemconstant {MonoVariable}
+%definesystemconstant {MonoCapsSlanted}
+%definesystemconstant {MonoVariable}
\definesystemconstant {DefaultFont}
@@ -261,6 +261,20 @@
\definesystemconstant {single}
\definesystemconstant {multi}
\definesystemconstant {indeed}
+\definesystemconstant {internal}
+\definesystemconstant {current}
+\definesystemconstant {chain}
+
+% translating setups is asking for a mess so we keep them as-is:
+
+\definesystemconstant {setups}
+
+\definesystemconstant {cite}
+\definesystemconstant {nocite}
+\definesystemconstant {list}
+\definesystemconstant {register}
+\definesystemconstant {author}
+\definesystemconstant {numbering}
% \def\s!parent{->} % 1% faster / => does not work in assignments
% \def\s!child {<-} % 1% faster / <= does not work in assignments
@@ -279,12 +293,16 @@
\definesystemconstant {bold}
\definesystemconstant {italic}
\definesystemconstant {slanted}
-
-\definesystemconstant {default}
\definesystemconstant {smallcaps}
+\definesystemconstant {lcgreek}
+\definesystemconstant {ucgreek}
+\definesystemconstant {sygreek}
+\definesystemconstant {italics}
\definesystemconstant {run}
+\definesystemconstant {default}
+
\definesystemconstant {mode}
\definesystemconstant {setup}
\definesystemconstant {environment}
@@ -298,11 +316,22 @@
\definesystemconstant {uncramped}
\definesystemconstant {cramped}
+\definesystemconstant {hyphenmin}
\definesystemconstant {lefthyphenmin}
\definesystemconstant {righthyphenmin}
\definesystemconstant {lefthyphenchar}
\definesystemconstant {righthyphenchar}
+\definesystemconstant {head}
+\definesystemconstant {symbol}
+\definesystemconstant {sub}
+\definesystemconstant {margin}
+\definesystemconstant {edge}
+
+\definesystemconstant {double}
+\definesystemconstant {decimal}
+\definesystemconstant {binary}
+
%definesystemconstant {skewchar}
%definesystemconstant {hyphenchar}
\definesystemconstant {catcodes}
@@ -322,6 +351,12 @@
\definesystemconstant {designsize}
%definesystemconstant {background}
%definesystemconstant {ucmap}
+\definesystemconstant {level}
+\definesystemconstant {integral}
+\definesystemconstant {insert} % maybe insertclass
+\definesystemconstant {marker}
+
+\definesystemconstant {mixedcolumn}
%definesystemconstant {property}
%definesystemconstant {overprint}
@@ -382,8 +417,13 @@
\definesystemconstant {size}
\definesystemconstant {depth}
+\definesystemconstant {height}
+\definesystemconstant {noheight}
\definesystemconstant {nodepth}
+\definesystemconstant {rel}
+\definesystemconstant {ord}
+
%D Just to be complete we define the standard \TEX\ units.
\definesystemconstant {cm}
@@ -538,6 +578,13 @@
\defineinterfacevariable {c} {c}
\defineinterfacevariable {d} {d}
+%D For tagging:
+
+\definesystemconstant {subtype}
+\definesystemconstant {top}
+\definesystemconstant {bottom}
+\definesystemconstant {both}
+
%D Special purpose variables:
\def\v!oddeven#1{\ifodd#1\v!odd\else\v!even\fi}
diff --git a/tex/context/base/node-acc.lua b/tex/context/base/node-acc.lua
index 81ae496b2..e684aeb7b 100644
--- a/tex/context/base/node-acc.lua
+++ b/tex/context/base/node-acc.lua
@@ -11,10 +11,27 @@ local nodes, node = nodes, node
local nodecodes = nodes.nodecodes
local tasks = nodes.tasks
-local traverse_nodes = node.traverse
-local traverse_id = node.traverse_id
-local copy_node = node.copy
-local free_nodelist = node.flush_list
+local nuts = nodes.nuts
+local tonut = nodes.tonut
+local tonode = nodes.tonode
+
+local getid = nuts.getid
+local getfield = nuts.getfield
+local getattr = nuts.getattr
+local getlist = nuts.getlist
+local getchar = nuts.getchar
+local getnext = nuts.getnext
+
+local setfield = nuts.setfield
+local setattr = nuts.setattr
+
+local traverse_nodes = nuts.traverse
+local traverse_id = nuts.traverse_id
+local copy_node = nuts.copy
+local free_nodelist = nuts.flush_list
+local insert_after = nuts.insert_after
+
+local new_gluespec = nuts.pool.gluespec -- temp hack
local glue_code = nodecodes.glue
local kern_code = nodecodes.kern
@@ -24,62 +41,85 @@ local vlist_code = nodecodes.vlist
local a_characters = attributes.private("characters")
-local threshold = 65536
+local threshold = 65536 -- not used
+local nofreplaced = 0
-- todo: nbsp etc
-- todo: collapse kerns
+-- p_id
+
local function injectspaces(head)
- local p
+ local p, p_id
local n = head
while n do
- local id = n.id
+ local id = getid(n)
if id == glue_code then -- todo: check for subtype related to spacing (13/14 but most seems to be 0)
- -- if n.spec.width > 0 then -- threshold
- if p and p.id == glyph_code then
+ -- if getfield(getfield(n,"spec"),"width") > 0 then -- threshold
+-- if p and p_id == glyph_code then
+ if p and getid(p) == glyph_code then
local g = copy_node(p)
- local c = g.components
+ local c = getfield(g,"components")
if c then -- it happens that we copied a ligature
free_nodelist(c)
- g.components = nil
- g.subtype = 256
+ setfield(g,"components",nil)
+ setfield(g,"subtype",256)
end
- local a = n[a_characters]
- local s = copy_node(n.spec)
- g.char, n.spec = 32, s
- p.next, g.prev = g, p
- g.next, n.prev = n, g
- s.width = s.width - g.width
+ local a = getattr(n,a_characters)
+ -- local s = copy_node(getfield(n,"spec"))
+ -- this will be fixed in luatex but for now a temp hack (zero test)
+ local s = getfield(n,"spec")
+ s = s == 0 and new_gluespec(0) or copy_node(s)
+ --
+ setfield(g,"char",32)
+ setfield(n,"spec",s)
+ -- insert_after(p,p,g)
+ setfield(p,"next",g)
+ setfield(g,"prev",p)
+ setfield(g,"next",n)
+ setfield(n,"prev",g)
+ setfield(s,"width",getfield(s,"width") - getfield(g,"width"))
if a then
- g[a_characters] = a
+ setattr(g,a_characters,a)
end
- s[a_characters] = 0
- n[a_characters] = 0
+ setattr(s,a_characters,0)
+ setattr(n,a_characters,0)
+ nofreplaced = nofreplaced + 1
end
-- end
elseif id == hlist_code or id == vlist_code then
- injectspaces(n.list,attribute)
+ injectspaces(getlist(n),attribute)
-- elseif id == kern_code then -- the backend already collapses
-- local first = n
-- while true do
- -- local nn = n.next
- -- if nn and nn.id == kern_code then
+ -- local nn = getnext(n)
+ -- if nn and getid(nn) == kern_code then
-- -- maybe we should delete kerns but who cares at this stage
- -- first.kern = first.kern + nn.kern
- -- nn.kern = 0
+ -- setfield(first,"kern",getfield(first,"kern") + getfield(nn,"kern")
+ -- setfield(nn,"kern",0)
-- n = nn
-- else
-- break
-- end
-- end
end
+ p_id = id
p = n
- n = n.next
+ n = getnext(n)
end
- return head, true
+ return head, true -- always done anyway
+end
+
+nodes.handlers.accessibility = function(head)
+ local head, done = injectspaces(tonut(head))
+ return tonode(head), done
end
-nodes.handlers.accessibility = injectspaces
+statistics.register("inserted spaces in output",function()
+ if nofreplaced > 0 then
+ return nofreplaced
+ end
+end)
-- todo:
@@ -90,16 +130,18 @@ nodes.handlers.accessibility = injectspaces
-- local function compact(n)
-- local t = { }
-- for n in traverse_id(glyph_code,n) do
--- t[#t+1] = utfchar(n.char) -- check for unicode
+-- t[#t+1] = utfchar(getchar(n)) -- check for unicode
-- end
-- return concat(t,"")
-- end
--
-- local function injectspans(head)
--- for n in traverse_nodes(head) do
--- local id = n.id
+-- local done = false
+-- for n in traverse_nodes(tonuts(head)) do
+-- local id = getid(n)
-- if id == disc then
--- local r, p = n.replace, n.pre
+-- local r = getfield(n,"replace")
+-- local p = getfield(n,"pre")
-- if r and p then
-- local str = compact(r)
-- local hsh = hyphenated[str]
@@ -108,13 +150,14 @@ nodes.handlers.accessibility = injectspaces
-- hyphenated[str] = hsh
-- codes[hsh] = str
-- end
--- n[a_hyphenated] = hsh
+-- setattr(n,a_hyphenated,hsh)
+-- done = true
-- end
-- elseif id == hlist_code or id == vlist_code then
--- injectspans(n.list)
+-- injectspans(getlist(n))
-- end
-- end
--- return head, true
+-- return tonodes(head), done
-- end
--
-- nodes.injectspans = injectspans
@@ -122,19 +165,22 @@ nodes.handlers.accessibility = injectspaces
-- tasks.appendaction("processors", "words", "nodes.injectspans")
--
-- local function injectspans(head)
--- for n in traverse_nodes(head) do
--- local id = n.id
+-- local done = false
+-- for n in traverse_nodes(tonut(head)) do
+-- local id = getid(n)
-- if id == disc then
--- local a = n[a_hyphenated]
+-- local a = getattr(n,a_hyphenated)
-- if a then
-- local str = codes[a]
-- local b = new_pdfliteral(format("/Span << /ActualText %s >> BDC", lpdf.tosixteen(str)))
-- local e = new_pdfliteral("EMC")
--- node.insert_before(head,n,b)
--- node.insert_after(head,n,e)
+-- insert_before(head,n,b)
+-- insert_after(head,n,e)
+-- done = true
-- end
-- elseif id == hlist_code or id == vlist_code then
--- injectspans(n.list)
+-- injectspans(getlist(n))
-- end
-- end
+-- return tonodes(head), done
-- end
diff --git a/tex/context/base/node-aux.lua b/tex/context/base/node-aux.lua
index 443c78547..12da8ea8a 100644
--- a/tex/context/base/node-aux.lua
+++ b/tex/context/base/node-aux.lua
@@ -22,82 +22,153 @@ local vlist_code = nodecodes.vlist
local attributelist_code = nodecodes.attributelist -- temporary
local math_code = nodecodes.math
-local nodepool = nodes.pool
-
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+local vianuts = nuts.vianuts
+
+local getbox = nuts.getbox
+local getnext = nuts.getnext
+local getid = nuts.getid
+local getsubtype = nuts.getsubtype
+local getlist = nuts.getlist
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+local getattr = nuts.getattr
+local setfield = nuts.setfield
+local setattr = nuts.setattr
+
+local traverse_nodes = nuts.traverse
+local traverse_id = nuts.traverse_id
+local free_node = nuts.free
+local hpack_nodes = nuts.hpack
+local unset_attribute = nuts.unset_attribute
+local first_glyph = nuts.first_glyph
+local copy_node = nuts.copy
+local copy_node_list = nuts.copy_list
+local find_tail = nuts.tail
+local insert_node_after = nuts.insert_after
+local isnode = nuts.is_node
+local getbox = nuts.getbox
+
+local nodes_traverse_id = nodes.traverse_id
+local nodes_first_glyph = nodes.first_glyph
+
+local nodepool = nuts.pool
local new_glue = nodepool.glue
local new_glyph = nodepool.glyph
-local traverse_nodes = node.traverse
-local traverse_id = node.traverse_id
-local free_node = node.free
-local hpack_nodes = node.hpack
-local unset_attribute = node.unset_attribute
-local first_glyph = node.first_glyph or node.first_character
-local copy_node = node.copy
-local copy_node_list = node.copy_list
-local slide_nodes = node.slide
-local insert_node_after = node.insert_after
-local isnode = node.is_node
-
local unsetvalue = attributes.unsetvalue
local current_font = font.current
-local texgetbox = tex.getbox
+local texsetbox = tex.setbox
local report_error = logs.reporter("node-aux:error")
-function nodes.repackhlist(list,...)
---~ nodes.showsimplelist(list)
+-- At some point we figured that copying before using was the safest bet
+-- when dealing with boxes at the tex end. This is because tex also needs
+-- to manage the grouping (i.e. savestack). However, there is an easy
+-- solution that keeps the tex end happy as tex.setbox deals with this. The
+-- overhead of one temporary list node is neglectable.
+--
+-- function tex.takebox(id)
+-- local box = tex.getbox(id)
+-- if box then
+-- local copy = node.copy(box)
+-- local list = box.list
+-- copy.list = list
+-- box.list = nil
+-- tex.setbox(id,nil)
+-- return copy
+-- end
+-- end
+
+local function takebox(id)
+ local box = getbox(id)
+ if box then
+ local copy = copy_node(box)
+ local list = getlist(box)
+ setfield(copy,"list",list)
+ setfield(box,"list",nil)
+ texsetbox(id,nil)
+ return copy
+ end
+end
+
+function nodes.takebox(id)
+ local b = takebox(id)
+ if b then
+ return tonode(b)
+ end
+end
+
+nuts.takebox = takebox
+tex.takebox = nodes.takebox -- sometimes more clear
+
+-- so far
+
+local function repackhlist(list,...)
local temp, b = hpack_nodes(list,...)
- list = temp.list
- temp.list = nil
+ list = getlist(temp)
+ setfield(temp,"list",nil)
free_node(temp)
return list, b
end
+nuts.repackhlist = repackhlist
+
+function nodes.repackhlist(list,...)
+ local list, b = repackhlist(tonut(list),...)
+ return tonode(list), b
+end
+
local function set_attributes(head,attr,value)
for n in traverse_nodes(head) do
- n[attr] = value
- local id = n.id
+ setattr(n,attr,value)
+ local id = getid(n)
if id == hlist_node or id == vlist_node then
- set_attributes(n.list,attr,value)
+ set_attributes(getlist(n),attr,value)
end
end
end
local function set_unset_attributes(head,attr,value)
for n in traverse_nodes(head) do
- if not n[attr] then
- n[attr] = value
+ if not getattr(n,attr) then
+ setattr(n,attr,value)
end
- local id = n.id
+ local id = getid(n)
if id == hlist_code or id == vlist_code then
- set_unset_attributes(n.list,attr,value)
+ set_unset_attributes(getlist(n),attr,value)
end
end
end
local function unset_attributes(head,attr)
for n in traverse_nodes(head) do
- n[attr] = unsetvalue
- local id = n.id
+ setattr(n,attr,unsetvalue)
+ local id = getid(n)
if id == hlist_code or id == vlist_code then
- unset_attributes(n.list,attr)
+ unset_attributes(getlist(n),attr)
end
end
end
-nodes.setattribute = node.set_attribute
-nodes.getattribute = node.has_attribute
-nodes.unsetattribute = node.unset_attribute
-nodes.has_attribute = node.has_attribute
+-- for old times sake
+
+nuts.setattribute = nuts.setattr nodes.setattribute = nodes.setattr
+nuts.getattribute = nuts.getattr nodes.getattribute = nodes.getattr
+nuts.unsetattribute = nuts.unset_attribute nodes.unsetattribute = nodes.unset_attribute
+nuts.has_attribute = nuts.has_attribute nodes.has_attribute = nodes.has_attribute
+nuts.firstglyph = nuts.first_glyph nodes.firstglyph = nodes.first_glyph
-nodes.firstglyph = first_glyph
-nodes.setattributes = set_attributes
-nodes.setunsetattributes = set_unset_attributes
-nodes.unsetattributes = unset_attributes
+nuts.setattributes = set_attributes nodes.setattributes = vianuts(set_attributes)
+nuts.setunsetattributes = set_unset_attributes nodes.setunsetattributes = vianuts(set_unset_attributes)
+nuts.unsetattributes = unset_attributes nodes.unsetattributes = vianuts(unset_attributes)
+-- history:
+--
-- function nodes.is_skipable(a,id) -- skipable nodes at the margins during character protrusion
-- return (
-- id ~= glyph_node
@@ -106,29 +177,26 @@ nodes.unsetattributes = unset_attributes
-- or id == adjust_node
-- or id == penalty_node
-- or (id == glue_node and a.spec.writable)
--- or (id == disc_node and a.pre == nil and a.post == nil and a.replace == nil)
--- or (id == math_node and a.surround == 0)
--- or (id == kern_node and (a.kern == 0 or a.subtype == NORMAL))
--- or (id == hlist_node and a.width == 0 and a.height == 0 and a.depth == 0 and a.list == nil)
--- or (id == whatsit_node and a.subtype ~= pdf_refximage_node and a.subtype ~= pdf_refxform_node)
+-- or (id == disc_node and getfield(a,"pre") == nil and getfield(a,"post") == nil and getfield(a,"replace") == nil)
+-- or (id == math_node and getfield(a,"surround") == 0)
+-- or (id == kern_node and (getfield(a,"kern") == 0 or getsubtype(subtype) == NORMAL))
+-- or (id == hlist_node and getfield(a,"width") == 0 and getfield(a,"height") == 0 and getfield(a,"depth") == 0 and getlist(a) == nil)
+-- or (id == whatsit_node and getsubtype(a) ~= pdf_refximage_node and getsubtype(a) ~= pdf_refxform_node)
-- )
-- end
-
--- history:
---
--
-- local function glyph_width(a)
--- local ch = chardata[a.font][a.char]
+-- local ch = chardata[getfont(a)][getchar(a)]
-- return (ch and ch.width) or 0
-- end
--
-- local function glyph_total(a)
--- local ch = chardata[a.font][a.char]
+-- local ch = chardata[getfont(a)][getchar(a)]
-- return (ch and (ch.height+ch.depth)) or 0
-- end
--
-- local function non_discardable(a) -- inline
--- return a.id < math_node -- brrrr
+-- return getid(id) < math_node -- brrrr
-- end
--
-- local function calculate_badness(t,s)
@@ -183,8 +251,36 @@ nodes.unsetattributes = unset_attributes
-- return -u
-- end
-- end
+--
+-- if not node.end_of_math then
+-- function node.end_of_math(n)
+-- for n in traverse_id(math_code,getnext(next)) do
+-- return n
+-- end
+-- end
+-- end
+--
+-- nodes.endofmath = node.end_of_math
+--
+-- local function firstline(n)
+-- while n do
+-- local id = getid(n)
+-- if id == hlist_code then
+-- if getsubtype(n) == line_code then
+-- return n
+-- else
+-- return firstline(getlist(n))
+-- end
+-- elseif id == vlist_code then
+-- return firstline(getlist(n))
+-- end
+-- n = getnext(n)
+-- end
+-- end
+--
+-- nodes.firstline = firstline
-function nodes.firstcharacter(n,untagged) -- tagged == subtype > 255
+function nuts.firstcharacter(n,untagged) -- tagged == subtype > 255
if untagged then
return first_glyph(n)
else
@@ -194,43 +290,41 @@ function nodes.firstcharacter(n,untagged) -- tagged == subtype > 255
end
end
-function nodes.firstcharinbox(n)
- local l = texgetbox(n).list
+-- function nodes.firstcharacter(n,untagged) -- tagged == subtype > 255
+-- if untagged then
+-- return nodes_first_glyph(n)
+-- else
+-- for g in nodes_traverse_id(glyph_code,n) do
+-- return g
+-- end
+-- end
+-- end
+
+local function firstcharinbox(n)
+ local l = getlist(getbox(n))
if l then
for g in traverse_id(glyph_code,l) do
- return g.char
+ return getchar(g)
end
end
return 0
end
-if not node.end_of_math then
- function node.end_of_math(n)
- for n in traverse_id(math_code,n.next) do
- return n
+nuts .firstcharinbox = firstcharinbox
+nodes.firstcharinbox = firstcharinbox
+nodes.firstcharacter = vianuts(firstcharacter)
+
+interfaces.implement {
+ name = "buildtextaccent",
+ arguments = "integer",
+ actions = function(n) -- Is this crap really used? Or was it an experiment?
+ local char = firstcharinbox(n)
+ if char > 0 then
+ -- context.accent(false,char)
+ context([[\accent%s\relax]],char)
end
end
-end
-
-nodes.endofmath = node.end_of_math
-
--- local function firstline(n)
--- while n do
--- local id = n.id
--- if id == hlist_code then
--- if n.subtype == line_code then
--- return n
--- else
--- return firstline(n.list)
--- end
--- elseif id == vlist_code then
--- return firstline(n.list)
--- end
--- n = n.next
--- end
--- end
-
--- nodes.firstline = firstline
+}
-- this depends on fonts, so we have a funny dependency ... will be
-- sorted out .. we could make tonodes a plugin into this
@@ -242,10 +336,8 @@ local function tonodes(str,fnt,attr) -- (str,template_glyph) -- moved from blob-
local head, tail, space, fnt, template = nil, nil, nil, nil, nil
if not fnt then
fnt = current_font()
- elseif type(fnt) ~= "number" and fnt.id == "glyph" then
- fnt, template = nil, fnt
- -- else
- -- already a number
+ elseif type(fnt) ~= "number" and getid(fnt) == glyph_code then -- so it has to be a real node
+ fnt, template = nil, tonut(fnt)
end
for s in utfvalues(str) do
local n
@@ -259,12 +351,13 @@ local function tonodes(str,fnt,attr) -- (str,template_glyph) -- moved from blob-
end
elseif template then
n = copy_node(template)
- n.char = s
+ setvalue(n,"char",s)
else
n = new_glyph(fnt,s)
end
if attr then -- normally false when template
- n.attr = copy_node_list(attr)
+ -- setfield(n,"attr",copy_node_list(attr))
+ setfield(n,"attr",attr)
end
if head then
insert_node_after(head,tail,n)
@@ -276,69 +369,130 @@ local function tonodes(str,fnt,attr) -- (str,template_glyph) -- moved from blob-
return head, tail
end
-nodes.tonodes = tonodes
+nuts.tonodes = tonodes
-local function link(list,currentfont,currentattr,head,tail)
+nodes.tonodes = function(str,fnt,attr)
+ local head, tail = tonodes(str,fnt,attr)
+ return tonode(head), tonode(tail)
+end
+
+-- local function link(list,currentfont,currentattr,head,tail)
+-- for i=1,#list do
+-- local n = list[i]
+-- if n then
+-- local tn = isnode(n)
+-- if not tn then
+-- local tn = type(n)
+-- if tn == "number" then
+-- if not currentfont then
+-- currentfont = current_font()
+-- end
+-- local h, t = tonodes(tostring(n),currentfont,currentattr)
+-- if not h then
+-- -- skip
+-- elseif not head then
+-- head = h
+-- tail = t
+-- else
+-- setfield(tail,"next",h)
+-- setfield(h,"prev",t)
+-- tail = t
+-- end
+-- elseif tn == "string" then
+-- if #tn > 0 then
+-- if not currentfont then
+-- currentfont = current_font()
+-- end
+-- local h, t = tonodes(n,currentfont,currentattr)
+-- if not h then
+-- -- skip
+-- elseif not head then
+-- head, tail = h, t
+-- else
+-- setfield(tail,"next",h)
+-- setfield(h,"prev",t)
+-- tail = t
+-- end
+-- end
+-- elseif tn == "table" then
+-- if #tn > 0 then
+-- if not currentfont then
+-- currentfont = current_font()
+-- end
+-- head, tail = link(n,currentfont,currentattr,head,tail)
+-- end
+-- end
+-- elseif not head then
+-- head = n
+-- tail = find_tail(n)
+-- elseif getid(n) == attributelist_code then
+-- -- weird case
+-- report_error("weird node type in list at index %s:",i)
+-- for i=1,#list do
+-- local l = list[i]
+-- report_error("%3i: %s %S",i,getid(l) == attributelist_code and "!" or ">",l)
+-- end
+-- os.exit()
+-- else
+-- setfield(tail,"next",n)
+-- setfield(n,"prev",tail)
+-- if getnext(n) then
+-- tail = find_tail(n)
+-- else
+-- tail = n
+-- end
+-- end
+-- else
+-- -- permitting nil is convenient
+-- end
+-- end
+-- return head, tail
+-- end
+
+local function link(list,currentfont,currentattr,head,tail) -- an oldie, might be replaced
for i=1,#list do
local n = list[i]
if n then
- local tn = isnode(n)
- if not tn then
- local tn = type(n)
- if tn == "number" then
+ local tn = type(n)
+ if tn == "string" then
+ if #tn > 0 then
if not currentfont then
currentfont = current_font()
end
- local h, t = tonodes(tostring(n),currentfont,currentattr)
+ local h, t = tonodes(n,currentfont,currentattr)
if not h then
-- skip
elseif not head then
head, tail = h, t
else
- tail.next, h.prev, tail = h, t, t
- end
- elseif tn == "string" then
- if #tn > 0 then
- if not currentfont then
- currentfont = current_font()
- end
- local h, t = tonodes(n,currentfont,currentattr)
- if not h then
- -- skip
- elseif not head then
- head, tail = h, t
- else
- tail.next, h.prev, tail = h, t, t
- end
+ setfield(tail,"next",h)
+ setfield(h,"prev",t)
+ tail = t
end
- elseif tn == "table" then
- if #tn > 0 then
- if not currentfont then
- currentfont = current_font()
- end
- head, tail = link(n,currentfont,currentattr,head,tail)
+ end
+ elseif tn == "table" then
+ if #tn > 0 then
+ if not currentfont then
+ currentfont = current_font()
end
+ head, tail = link(n,currentfont,currentattr,head,tail)
end
elseif not head then
head = n
- if n.next then
- tail = slide_nodes(n)
- else
- tail = n
- end
- elseif n.id == attributelist_code then
+ tail = find_tail(n)
+ elseif getid(n) == attributelist_code then
-- weird case
report_error("weird node type in list at index %s:",i)
for i=1,#list do
local l = list[i]
- report_error("%3i: %s %S",i,l.id == attributelist_code and "!" or ">",l)
+ report_error("%3i: %s %S",i,getid(l) == attributelist_code and "!" or ">",l)
end
os.exit()
else
- tail.next = n
- n.prev = tail
- if n.next then
- tail = slide_nodes(n)
+ setfield(tail,"next",n)
+ setfield(n,"prev",tail)
+ if getnext(n) then
+ tail = find_tail(n)
else
tail = n
end
@@ -350,17 +504,22 @@ local function link(list,currentfont,currentattr,head,tail)
return head, tail
end
-nodes.link = link
+nuts.link = link
+
+nodes.link = function(list,currentfont,currentattr,head,tail)
+ local head, tail = link(list,currentfont,currentattr,tonut(head),tonut(tail))
+ return tonode(head), tonode(tail)
+end
local function locate(start,wantedid,wantedsubtype)
for n in traverse_nodes(start) do
- local id = n.id
+ local id = getid(n)
if id == wantedid then
- if not wantedsubtype or n.subtype == wantedsubtype then
+ if not wantedsubtype or getsubtype(n) == wantedsubtype then
return n
end
elseif id == hlist_code or id == vlist_code then
- local found = locate(n.list,wantedid,wantedsubtype)
+ local found = locate(getlist(n),wantedid,wantedsubtype)
if found then
return found
end
@@ -368,7 +527,12 @@ local function locate(start,wantedid,wantedsubtype)
end
end
-nodes.locate = locate
+nuts.locate = locate
+
+nodes.locate = function(start,wantedid,wantedsubtype)
+ local found = locate(tonut(start),wantedid,wantedsubtype)
+ return found and tonode(found)
+end
-- I have no use for this yet:
--
@@ -381,10 +545,12 @@ nodes.locate = locate
-- return (badness/100)^(1/3)
-- end
--
--- function tex.stretch_amount(skip,badness)
+-- function tex.stretch_amount(skip,badness) -- node no nut
-- if skip.id == gluespec_code then
-- return skip.width + (badness and (badness/100)^(1/3) or 1) * skip.stretch
-- else
-- return 0
-- end
-- end
+
+
diff --git a/tex/context/base/node-bck.lua b/tex/context/base/node-bck.lua
index feaa2c684..99992de09 100644
--- a/tex/context/base/node-bck.lua
+++ b/tex/context/base/node-bck.lua
@@ -11,6 +11,8 @@ if not modules then modules = { } end modules ['node-bck'] = {
local attributes, nodes, node = attributes, nodes, node
+local tasks = nodes.tasks
+
local nodecodes = nodes.nodecodes
local listcodes = nodes.listcodes
@@ -19,11 +21,25 @@ local vlist_code = nodecodes.vlist
local glyph_code = nodecodes.glyph
local cell_code = listcodes.cell
-local traverse = node.traverse
-local traverse_id = node.traverse_id
+local nuts = nodes.nuts
+local nodepool = nuts.pool
+
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getsubtype = nuts.getsubtype
+
+local traverse = nuts.traverse
+local traverse_id = nuts.traverse_id
-local nodepool = nodes.pool
-local tasks = nodes.tasks
local new_rule = nodepool.rule
local new_glue = nodepool.glue
@@ -37,50 +53,50 @@ local a_alignbackground = attributes.private('alignbackground')
local function add_backgrounds(head) -- rather old code .. to be redone
local current = head
while current do
- local id = current.id
+ local id = getid(current)
if id == hlist_code or id == vlist_code then
- local list = current.list
+ local list = getlist(current)
if list then
local head = add_backgrounds(list)
if head then
- current.list = head
+ setfield(current,"list",head)
list = head
end
end
- local width = current.width
+ local width = getfield(current,"width")
if width > 0 then
- local background = current[a_background]
+ local background = getattr(current,a_background)
if background then
-- direct to hbox
-- colorspace is already set so we can omit that and stick to color
- local mode = current[a_colorspace]
+ local mode = getattr(current,a_colorspace)
if mode then
- local height = current.height
- local depth = current.depth
+ local height = getfield(current,"height")
+ local depth = getfield(current,"depth")
local skip = id == hlist_code and width or (height + depth)
local glue = new_glue(-skip)
local rule = new_rule(width,height,depth)
- local color = current[a_color]
- local transparency = current[a_transparency]
- rule[a_colorspace] = mode
+ local color = getattr(current,a_color)
+ local transparency = getattr(current,a_transparency)
+ setattr(rule,a_colorspace,mode)
if color then
- rule[a_color] = color
+ setattr(rule,a_color,color)
end
if transparency then
- rule[a_transparency] = transparency
+ setattr(rule,a_transparency,transparency)
end
- rule.next = glue
- glue.prev = rule
+ setfield(rule,"next",glue)
+ setfield(glue,"prev",rule)
if list then
- glue.next = list
- list.prev = glue
+ setfield(glue,"next",list)
+ setfield(list,"prev",glue)
end
- current.list = rule
+ setfield(current,"list",rule)
end
end
end
end
- current = current.next
+ current = getnext(current)
end
return head, true
end
@@ -88,16 +104,16 @@ end
local function add_alignbackgrounds(head)
local current = head
while current do
- local id = current.id
+ local id = getid(current)
if id == hlist_code then
- local list = current.list
+ local list = getlist(current)
if not list then
-- no need to look
- elseif current.subtype == cell_code then
+ elseif getsubtype(current) == cell_code then
local background = nil
local found = nil
-- for l in traverse(list) do
- -- background = l[a_alignbackground]
+ -- background = getattr(l,a_alignbackground)
-- if background then
-- found = l
-- break
@@ -106,7 +122,7 @@ local function add_alignbackgrounds(head)
-- we know that it's a fake hlist (could be user node)
-- but we cannot store tables in user nodes yet
for l in traverse_id(hpack_code,list) do
- background = l[a_alignbackground]
+ background = getattr(l,a_alignbackground)
if background then
found = l
end
@@ -115,28 +131,28 @@ local function add_alignbackgrounds(head)
--
if background then
-- current has subtype 5 (cell)
- local width = current.width
+ local width = getfield(current,"width")
if width > 0 then
- local mode = found[a_colorspace]
+ local mode = getattr(found,a_colorspace)
if mode then
local glue = new_glue(-width)
- local rule = new_rule(width,current.height,current.depth)
- local color = found[a_color]
- local transparency = found[a_transparency]
- rule[a_colorspace] = mode
+ local rule = new_rule(width,getfield(current,"height"),getfield(current,"depth"))
+ local color = getattr(found,a_color)
+ local transparency = getattr(found,a_transparency)
+ setattr(rule,a_colorspace,mode)
if color then
- rule[a_color] = color
+ setattr(rule,a_color,color)
end
if transparency then
- rule[a_transparency] = transparency
+ setattr(rule,a_transparency,transparency)
end
- rule.next = glue
- glue.prev = rule
+ setfield(rule,"next",glue)
+ setfield(glue,"prev",rule)
if list then
- glue.next = list
- list.prev = glue
+ setfield(glue,"next",list)
+ setfield(list,"prev",glue)
end
- current.list = rule
+ setfield(current,"list",rule)
end
end
end
@@ -144,18 +160,37 @@ local function add_alignbackgrounds(head)
add_alignbackgrounds(list)
end
elseif id == vlist_code then
- local list = current.list
+ local list = getlist(current)
if list then
add_alignbackgrounds(list)
end
end
- current = current.next
+ current = getnext(current)
end
return head, true
end
-nodes.handlers.backgrounds = add_backgrounds
-nodes.handlers.alignbackgrounds = add_alignbackgrounds
+-- nodes.handlers.backgrounds = add_backgrounds
+-- nodes.handlers.alignbackgrounds = add_alignbackgrounds
-tasks.appendaction("shipouts","normalizers","nodes.handlers.backgrounds")
-tasks.appendaction("shipouts","normalizers","nodes.handlers.alignbackgrounds")
+nodes.handlers.backgrounds = function(head) local head, done = add_backgrounds (tonut(head)) return tonode(head), done end
+nodes.handlers.alignbackgrounds = function(head) local head, done = add_alignbackgrounds(tonut(head)) return tonode(head), done end
+
+-- elsewhere: needs checking
+
+-- tasks.appendaction("shipouts","normalizers","nodes.handlers.backgrounds")
+-- tasks.appendaction("shipouts","normalizers","nodes.handlers.alignbackgrounds")
+
+interfaces.implement {
+ name = "enablebackgroundboxes",
+ onlyonce = true,
+ actions = nodes.tasks.enableaction,
+ arguments = { "'shipouts'", "'nodes.handlers.backgrounds'" }
+}
+
+interfaces.implement {
+ name = "enablebackgroundalign",
+ onlyonce = true,
+ actions = nodes.tasks.enableaction,
+ arguments = { "'shipouts'", "'nodes.handlers.alignbackgrounds'" }
+}
diff --git a/tex/context/base/node-bck.mkiv b/tex/context/base/node-bck.mkiv
index 25739c560..ff0de6a5d 100644
--- a/tex/context/base/node-bck.mkiv
+++ b/tex/context/base/node-bck.mkiv
@@ -21,14 +21,6 @@
\registerctxluafile{node-bck}{1.001}
-\def\node_backgrounds_boxes_initialize % will move to lua
- {\ctxlua{nodes.tasks.enableaction("shipouts","nodes.handlers.backgrounds")}%
- \glet\node_backgrounds_boxes_initialize\donothing}
-
-\def\node_backgrounds_align_initialize % will move to lua
- {\ctxlua{nodes.tasks.enableaction("shipouts","nodes.handlers.alignbackgrounds")}%
- \glet\node_backgrounds_align_initialize\donothing}
-
% \backgroundvbox[green] {\input tufte } \par
% \backgroundvbox[blue] {\input ward } \par
% \backgroundvbox[red] {\input knuth } \par
@@ -71,7 +63,7 @@
% \def\node_backgrounds_boxes_add#1[#2]%
% {\begingroup
-% \node_backgrounds_boxes_initialize
+% \clf_enablebackgroundboxes
% \dousecolorparameter{#2}%
% \normalexpanded{\endgroup#1
% attr \backgroundattribute \plusone
@@ -82,13 +74,13 @@
% more efficient:
\def\node_backgrounds_boxes_add#1[#2]%
- {\node_backgrounds_boxes_initialize
+ {\clf_enablebackgroundboxes
#1\backgroundcolorattr{#2}}
% less argument carry over:
%
% \def\node_backgrounds_boxes_add#1[#2]%
-% {\node_backgrounds_boxes_initialize#1%
+% {\clf_enablebackgroundboxes#1%
% \ifcsname\??colorattribute\currentcolorprefix#2\endcsname
% \thebackgroundcolorattr{\currentcolorprefix#2}%
% \else\ifcsname\??colorattribute#2\endcsname
diff --git a/tex/context/base/node-fin.lua b/tex/context/base/node-fin.lua
index 63a5ef83e..84c5b9a2f 100644
--- a/tex/context/base/node-fin.lua
+++ b/tex/context/base/node-fin.lua
@@ -8,42 +8,63 @@ if not modules then modules = { } end modules ['node-fin'] = {
-- this module is being reconstructed
-- local functions, only slightly slower
+--
+-- leaders are also triggers ... see colo-ext for an example (negate a box)
local next, type, format = next, type, string.format
local attributes, nodes, node = attributes, nodes, node
-local copy_node = node.copy
-local find_tail = node.slide
+local nuts = nodes.nuts
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getleader = nuts.getleader
+local getattr = nuts.getattr
+
+local copy_node = nuts.copy
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
-local nodecodes = nodes.nodecodes
-local whatcodes = nodes.whatcodes
+local nodecodes = nodes.nodecodes
+local whatcodes = nodes.whatcodes
-local glyph_code = nodecodes.glyph
-local disc_code = nodecodes.disc
-local glue_code = nodecodes.glue
-local rule_code = nodecodes.rule
-local whatsit_code = nodecodes.whatsit
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local glue_code = nodecodes.glue
+local rule_code = nodecodes.rule
+local whatsit_code = nodecodes.whatsit
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
-local pdfliteral_code = whatcodes.pdfliteral
+local pdfliteral_code = whatcodes.pdfliteral
-local states = attributes.states
-local numbers = attributes.numbers
-local a_trigger = attributes.private('trigger')
-local triggering = false
+local states = attributes.states
+local numbers = attributes.numbers
+local a_trigger = attributes.private('trigger')
+local triggering = false
-local starttiming = statistics.starttiming
-local stoptiming = statistics.stoptiming
-local loadstripped = utilities.lua.loadstripped
-local unsetvalue = attributes.unsetvalue
+local implement = interfaces.implement
+
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+local loadstripped = utilities.lua.loadstripped
+local unsetvalue = attributes.unsetvalue
-- these two will be like trackers
function states.enabletriggering () triggering = true end
function states.disabletriggering() triggering = false end
+implement { name = "enablestatetriggering", actions = states.enabletriggering }
+implement { name = "disablestatetriggering", actions = states.disabletriggering }
+
nodes.plugindata = nil
-- inheritance: -0x7FFFFFFF -- we can best use nil and skip !
@@ -102,14 +123,17 @@ function nodes.installattributehandler(plugin)
return loadstripped(template)()
end
--- the injectors
+-- for the moment:
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
+local function copied(n)
+ return copy_node(tonut(n))
+end
+
+-- the injectors
local nsdata, nsnone, nslistwise, nsforced, nsselector, nstrigger
local current, current_selector, done = 0, 0, false -- nb, stack has a local current !
-local nsbegin, nsend
+local nsbegin, nsend, nsreset
function states.initialize(namespace,attribute,head)
nsdata = namespace.data
@@ -123,6 +147,7 @@ function states.initialize(namespace,attribute,head)
done = false -- todo: done cleanup
nsstep = namespace.resolve_step
if nsstep then
+ nsreset = namespace.resolve_reset
nsbegin = namespace.resolve_begin
nsend = namespace.resolve_end
nspush = namespace.push
@@ -132,23 +157,24 @@ end
function states.finalize(namespace,attribute,head) -- is this one ok?
if current > 0 and nsnone then
- local id = head.id
+ head = tonut(head)
+ local id = getid(head)
if id == hlist_code or id == vlist_code then
- local list = head.list
+ local list = getlist(head)
if list then
- head.list = insert_node_before(list,list,copy_node(nsnone))
+ list = insert_node_before(list,list,copied(nsnone)) -- two return values
+ setfield(head,"list",list)
end
else
- head = insert_node_before(head,head,copy_node(nsnone))
+ head = insert_node_before(head,head,copied(nsnone))
end
- return head, true, true
+ return tonode(head), true, true
end
return head, false, false
end
--- disc nodes can be ignored
-- we need to deal with literals too (reset as well as oval)
--- if id == glyph_code or (id == whatsit_code and stack.subtype == pdfliteral_code) or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then
+-- if id == glyph_code or (id == whatsit_code and getsubtype(stack) == pdfliteral_code) or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then
local function process(namespace,attribute,head,inheritance,default) -- one attribute
local stack = head
@@ -156,53 +182,59 @@ local function process(namespace,attribute,head,inheritance,default) -- one attr
local check = false
local leader = nil
while stack do
- local id = stack.id
+ local id = getid(stack)
if id == glyph_code then
check = true
+ -- elseif id == disc_code then
+ -- check = true -- no longer needed as we flatten replace
elseif id == glue_code then
- leader = stack.leader
+ leader = getleader(stack)
if leader then
check = true
end
elseif id == hlist_code or id == vlist_code then
- local content = stack.list
+ local content = getlist(stack)
if content then
-- begin nested --
- local ok
- if nstrigger and stack[nstrigger] then
- local outer = stack[attribute]
+ if nstrigger and getattr(stack,nstrigger) then
+ local outer = getattr(stack,attribute)
if outer ~= inheritance then
- stack.list, ok = process(namespace,attribute,content,inheritance,outer)
+ local list, ok = process(namespace,attribute,content,inheritance,outer)
+ setfield(stack,"list",list)
+ done = done or ok
else
- stack.list, ok = process(namespace,attribute,content,inheritance,default)
+ local list, ok = process(namespace,attribute,content,inheritance,default)
+ setfield(stack,"list",list)
+ done = done or ok
end
else
- stack.list, ok = process(namespace,attribute,content,inheritance,default)
+ local list, ok = process(namespace,attribute,content,inheritance,default)
+ setfield(stack,"list",list)
+ done = done or ok
end
-- end nested --
- done = done or ok
end
elseif id == rule_code then
- check = stack.width ~= 0
+ check = getfield(stack,"width") ~= 0
end
-- much faster this way than using a check() and nested() function
if check then
- local c = stack[attribute]
+ local c = getattr(stack,attribute)
if c then
if default and c == inheritance then
if current ~= default then
- head = insert_node_before(head,stack,copy_node(nsdata[default]))
+ head = insert_node_before(head,stack,copied(nsdata[default]))
current = default
done = true
end
elseif current ~= c then
- head = insert_node_before(head,stack,copy_node(nsdata[c]))
+ head = insert_node_before(head,stack,copied(nsdata[c]))
current = c
done = true
end
if leader then
local savedcurrent = current
- local ci = leader.id
+ local ci = getid(leader)
if ci == hlist_code or ci == vlist_code then
-- else we reset inside a box unneeded, okay, the downside is
-- that we trigger color in each repeated box, so there is room
@@ -210,41 +242,48 @@ local function process(namespace,attribute,head,inheritance,default) -- one attr
current = 0
end
-- begin nested --
- local ok = false
- if nstrigger and stack[nstrigger] then
- local outer = stack[attribute]
+ if nstrigger and getattr(stack,nstrigger) then
+ local outer = getattr(stack,attribute)
if outer ~= inheritance then
- stack.leader, ok = process(namespace,attribute,leader,inheritance,outer)
+ local list, ok = process(namespace,attribute,leader,inheritance,outer)
+ setfield(stack,"leader",list)
+ done = done or ok
else
- stack.leader, ok = process(namespace,attribute,leader,inheritance,default)
+ local list, ok = process(namespace,attribute,leader,inheritance,default)
+ setfield(stack,"leader",list)
+ done = done or ok
end
else
- stack.leader, ok = process(namespace,attribute,leader,inheritance,default)
+ local list, ok = process(namespace,attribute,leader,inheritance,default)
+ setfield(stack,"leader",list)
+ done = done or ok
end
-- end nested --
- done = done or ok
current = savedcurrent
leader = false
end
elseif default and inheritance then
if current ~= default then
- head = insert_node_before(head,stack,copy_node(nsdata[default]))
+ head = insert_node_before(head,stack,copied(nsdata[default]))
current = default
done = true
end
elseif current > 0 then
- head = insert_node_before(head,stack,copy_node(nsnone))
+ head = insert_node_before(head,stack,copied(nsnone))
current = 0
done = true
end
check = false
end
- stack = stack.next
+ stack = getnext(stack)
end
return head, done
end
-states.process = process
+states.process = function(namespace,attribute,head,default)
+ local head, done = process(namespace,attribute,tonut(head),default)
+ return tonode(head), done
+end
-- we can force a selector, e.g. document wide color spaces, saves a little
-- watch out, we need to check both the selector state (like colorspace) and
@@ -258,93 +297,105 @@ local function selective(namespace,attribute,head,inheritance,default) -- two at
local check = false
local leader = nil
while stack do
- local id = stack.id
+ local id = getid(stack)
if id == glyph_code then
check = true
+ -- elseif id == disc_code then
+ -- check = true -- no longer needed as we flatten replace
elseif id == glue_code then
- leader = stack.leader
+ leader = getleader(stack)
if leader then
check = true
end
elseif id == hlist_code or id == vlist_code then
- local content = stack.list
+ local content = getlist(stack)
if content then
- local ok = false
-- begin nested
- if nstrigger and stack[nstrigger] then
- local outer = stack[attribute]
+ if nstrigger and getattr(stack,nstrigger) then
+ local outer = getattr(stack,attribute)
if outer ~= inheritance then
- stack.list, ok = selective(namespace,attribute,content,inheritance,outer)
+ local list, ok = selective(namespace,attribute,content,inheritance,outer)
+ setfield(stack,"list",list)
+ done = done or ok
else
- stack.list, ok = selective(namespace,attribute,content,inheritance,default)
+ local list, ok = selective(namespace,attribute,content,inheritance,default)
+ setfield(stack,"list",list)
+ done = done or ok
end
else
- stack.list, ok = selective(namespace,attribute,content,inheritance,default)
+ local list, ok = selective(namespace,attribute,content,inheritance,default)
+ setfield(stack,"list",list)
+ done = done or ok
end
-- end nested
- done = done or ok
end
elseif id == rule_code then
- check = stack.width ~= 0
+ check = getfield(stack,"width") ~= 0
end
if check then
- local c = stack[attribute]
+ local c = getattr(stack,attribute)
if c then
if default and c == inheritance then
if current ~= default then
local data = nsdata[default]
- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
+ head = insert_node_before(head,stack,copied(data[nsforced or getattr(stack,nsselector) or nsselector]))
current = default
done = true
end
else
- local s = stack[nsselector]
+ local s = getattr(stack,nsselector)
if current ~= c or current_selector ~= s then
local data = nsdata[c]
- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
+ head = insert_node_before(head,stack,copied(data[nsforced or getattr(stack,nsselector) or nsselector]))
current = c
current_selector = s
done = true
end
end
if leader then
- local ok = false
-- begin nested
- if nstrigger and stack[nstrigger] then
- local outer = stack[attribute]
+ if nstrigger and getattr(stack,nstrigger) then
+ local outer = getatribute(stack,attribute)
if outer ~= inheritance then
- stack.leader, ok = selective(namespace,attribute,leader,inheritance,outer)
+ local list, ok = selective(namespace,attribute,leader,inheritance,outer)
+ setfield(stack,"leader",list)
+ done = done or ok
else
- stack.leader, ok = selective(namespace,attribute,leader,inheritance,default)
+ local list, ok = selective(namespace,attribute,leader,inheritance,default)
+ setfield(stack,"leader",list)
+ done = done or ok
end
else
- stack.leader, ok = selective(namespace,attribute,leader,inheritance,default)
+ local list, ok = selective(namespace,attribute,leader,inheritance,default)
+ setfield(stack,"leader",list)
+ done = done or ok
end
-- end nested
- done = done or ok
leader = false
end
elseif default and inheritance then
if current ~= default then
local data = nsdata[default]
- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
+ head = insert_node_before(head,stack,copied(data[nsforced or getattr(stack,nsselector) or nsselector]))
current = default
done = true
end
elseif current > 0 then
- head = insert_node_before(head,stack,copy_node(nsnone))
+ head = insert_node_before(head,stack,copied(nsnone))
current, current_selector, done = 0, 0, true
end
check = false
end
-
- stack = stack.next
+ stack = getnext(stack)
end
return head, done
end
-states.selective = selective
+states.selective = function(namespace,attribute,head,default)
+ local head, done = selective(namespace,attribute,tonut(head),default)
+ return tonode(head), done
+end
-- Ideally the next one should be merged with the previous but keeping it separate is
-- safer. We deal with two situations: efficient boxwise (layoutareas) and mixed layers
@@ -363,135 +414,145 @@ local function stacked(namespace,attribute,head,default) -- no triggering, no in
local check = false
local leader = false
while stack do
- local id = stack.id
+ local id = getid(stack)
if id == glyph_code then
check = true
elseif id == glue_code then
- leader = stack.leader
+ leader = getleader(stack)
if leader then
check = true
end
elseif id == hlist_code or id == vlist_code then
- local content = stack.list
+ local content = getlist(stack)
if content then
-- the problem is that broken lines gets the attribute which can be a later one
if nslistwise then
- local a = stack[attribute]
+ local a = getattr(stack,attribute)
if a and current ~= a and nslistwise[a] then -- viewerlayer / needs checking, see below
local p = current
- current, done = a, true
- head = insert_node_before(head,stack,copy_node(nsdata[a]))
- stack.list = stacked(namespace,attribute,content,current)
- head, stack = insert_node_after(head,stack,copy_node(nsnone))
+ current = a
+ head = insert_node_before(head,stack,copied(nsdata[a]))
+ local list = stacked(namespace,attribute,content,current) -- two return values
+ setfield(stack,"list",list)
+ done = true
+ head, stack = insert_node_after(head,stack,copied(nsnone))
current = p
else
- local ok = false
- stack.list, ok = stacked(namespace,attribute,content,current)
+ local list, ok = stacked(namespace,attribute,content,current)
+ setfield(stack,"list",list) -- only if ok
done = done or ok
end
else
- local ok = false
- stack.list, ok = stacked(namespace,attribute,content,current)
+ local list, ok = stacked(namespace,attribute,content,current)
+ setfield(stack,"list",list) -- only if ok
done = done or ok
end
end
elseif id == rule_code then
- check = stack.width ~= 0
+ check = getfield(stack,"width") ~= 0
end
if check then
- local a = stack[attribute]
+ local a = getattr(stack,attribute)
if a then
if current ~= a then
- head = insert_node_before(head,stack,copy_node(nsdata[a]))
+ head = insert_node_before(head,stack,copied(nsdata[a]))
depth = depth + 1
current, done = a, true
end
if leader then
- local ok = false
- stack.leader, ok = stacked(namespace,attribute,content,current)
+ local list, ok = stacked(namespace,attribute,content,current)
+ setfield(stack,"leader",list) -- only if ok
done = done or ok
leader = false
end
elseif default > 0 then
--
elseif current > 0 then
- head = insert_node_before(head,stack,copy_node(nsnone))
+ head = insert_node_before(head,stack,copied(nsnone))
depth = depth - 1
current, done = 0, true
end
check = false
end
-
- stack = stack.next
+ stack = getnext(stack)
end
while depth > 0 do
- head = insert_node_after(head,stack,copy_node(nsnone))
+ head = insert_node_after(head,stack,copied(nsnone))
depth = depth - 1
end
return head, done
end
-states.stacked = stacked
+states.stacked = function(namespace,attribute,head,default)
+ local head, done = stacked(namespace,attribute,tonut(head),default)
+ return tonode(head), done
+end
-- experimental
local function stacker(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise
- nsbegin()
+
+-- nsbegin()
+ local stacked = false
+
local current = head
local previous = head
local done = false
- local okay = false
local attrib = default or unsetvalue
local check = false
local leader = false
+
while current do
- local id = current.id
+ local id = getid(current)
if id == glyph_code then
check = true
elseif id == glue_code then
- leader = current.leader
+ leader = getleader(current)
if leader then
check = true
end
elseif id == hlist_code or id == vlist_code then
- local content = current.list
+ local content = getlist(current)
if not content then
-- skip
elseif nslistwise then
- local a = current[attribute]
+ local a = getattr(current,attribute)
if a and attrib ~= a and nslistwise[a] then -- viewerlayer
+ head = insert_node_before(head,current,copied(nsdata[a]))
+ local list = stacker(namespace,attribute,content,a)
+ setfield(current,"list",list)
done = true
- head = insert_node_before(head,current,copy_node(nsdata[a]))
- current.list = stacker(namespace,attribute,content,a)
- head, current = insert_node_after(head,current,copy_node(nsnone))
+ head, current = insert_node_after(head,current,copied(nsnone))
else
- local ok = false
- current.list, ok = stacker(namespace,attribute,content,attrib)
+ local list, ok = stacker(namespace,attribute,content,attrib)
+ setfield(current,"list",list)
done = done or ok
end
else
- local ok = false
- current.list, ok = stacker(namespace,attribute,content,default)
+ local list, ok = stacker(namespace,attribute,content,default)
+ setfield(current,"list",list)
done = done or ok
end
elseif id == rule_code then
- check = current.width ~= 0
+ check = getfield(current,"width") ~= 0
end
if check then
- local a = current[attribute] or unsetvalue
+ local a = getattr(current,attribute) or unsetvalue
if a ~= attrib then
+ if not stacked then
+ stacked = true
+ nsbegin()
+ end
local n = nsstep(a)
if n then
- -- !!!! TEST CODE !!!!
- -- head = insert_node_before(head,current,copy_node(nsdata[tonumber(n)])) -- a
- head = insert_node_before(head,current,n) -- a
+ head = insert_node_before(head,current,tonut(n)) -- a
end
- attrib, done, okay = a, true, true
+ attrib, done = a, true
if leader then
-- tricky as a leader has to be a list so we cannot inject before
- local _, ok = stacker(namespace,attribute,leader,attrib)
+ local list, ok = stacker(namespace,attribute,leader,attrib)
done = done or ok
leader = false
end
@@ -500,20 +561,27 @@ local function stacker(namespace,attribute,head,default) -- no triggering, no in
end
previous = current
- current = current.next
+ current = getnext(current)
end
- if okay then
- local n = nsend()
- if n then
- -- !!!! TEST CODE !!!!
- -- head = insert_node_after(head,previous,copy_node(nsdata[tostring(n)]))
- head = insert_node_after(head,previous,n)
- end
+
+if stacked then
+
+ local n = nsend()
+ while n do
+ head = insert_node_after(head,previous,tonut(n))
+ n = nsend()
end
+
+end
+
return head, done
end
-states.stacker = stacker
+states.stacker = function(namespace,attribute,head,default)
+ local head, done = stacker(namespace,attribute,tonut(head),default)
+ nsreset()
+ return tonode(head), done
+end
-- -- --
diff --git a/tex/context/base/node-fin.mkiv b/tex/context/base/node-fin.mkiv
index 2eb033fc1..7c95699dd 100644
--- a/tex/context/base/node-fin.mkiv
+++ b/tex/context/base/node-fin.mkiv
@@ -23,12 +23,12 @@
% we might have two variants at some point (efficiency)
-\unexpanded\def\finalizeobjectbox #1{\ctxcommand{finalizebox(\number#1)}}
-\unexpanded\def\finalizeshipoutbox#1{\ctxcommand{finalizebox(\number#1)}}
+\unexpanded\def\finalizeobjectbox #1{\clf_finalizebox#1\relax}
+\unexpanded\def\finalizeshipoutbox#1{\clf_finalizebox#1\relax}
% Experimental (for Aditya):
-\unexpanded\def\cleanupbox#1{\ctxcommand{cleanupbox(\number#1)}}
+\unexpanded\def\cleanupbox#1{\clf_cleanupbox#1\relax}
% Tricky stuff: this might become obsolete.
@@ -61,12 +61,12 @@
\stopinheritattributes}
\def\enableattributeinheritance
- {\ctxlua{attributes.states.enabletriggering()}%
+ {\clf_enablestatetriggering
\let\attributedcopy\doattributedcopy
\let\attributedbox \doattributedbox}
\def\disableattributeinheritance
- {\ctxlua{attributes.states.disabletriggering()}%
+ {\clf_disablestatetriggering
\let\attributedcopy\copy
\let\attributedbox \box}
diff --git a/tex/context/base/node-fnt.lua b/tex/context/base/node-fnt.lua
index 2f59d513c..774a68718 100644
--- a/tex/context/base/node-fnt.lua
+++ b/tex/context/base/node-fnt.lua
@@ -13,8 +13,11 @@ local concat, keys = table.concat, table.keys
local nodes, node, fonts = nodes, node, fonts
-local trace_characters = false trackers.register("nodes.characters", function(v) trace_characters = v end)
-local trace_fontrun = false trackers.register("nodes.fontrun", function(v) trace_fontrun = v end)
+local trace_characters = false trackers .register("nodes.characters", function(v) trace_characters = v end)
+local trace_fontrun = false trackers .register("nodes.fontrun", function(v) trace_fontrun = v end)
+
+local force_discrun = true directives.register("nodes.discrun", function(v) force_discrun = v end)
+local force_basepass = true directives.register("nodes.basepass", function(v) force_basepass = v end)
local report_fonts = logs.reporter("fonts","processing")
@@ -23,12 +26,27 @@ local fontdata = fonthashes.identifiers
local otf = fonts.handlers.otf
-local traverse_id = node.traverse_id
local starttiming = statistics.starttiming
local stoptiming = statistics.stoptiming
+
local nodecodes = nodes.nodecodes
local handlers = nodes.handlers
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getattr = nuts.getattr
+local getid = nuts.getid
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getfield = nuts.getfield
+
+local traverse_id = nuts.traverse_id
+
local glyph_code = nodecodes.glyph
local disc_code = nodecodes.disc
@@ -99,68 +117,153 @@ fonts.hashes.processes = fontprocesses
-- inside a run which means that we need to keep track of this which in turn complicates matters
-- in a way i don't like
+-- we need to deal with the basemode fonts here and can only run over ranges as we
+-- otherwise get luatex craches due to all kind of asserts in the disc/lig builder
+
+local ligaturing = node.ligaturing
+local kerning = node.kerning
+
function handlers.characters(head)
-- either next or not, but definitely no already processed list
starttiming(nodes)
- local usedfonts, attrfonts = { }, { }
- local a, u, prevfont, prevattr, done = 0, 0, nil, 0, false
+
+ local usedfonts = { }
+ local attrfonts = { }
+ local basefonts = { }
+ local a, u, b = 0, 0, 0
+ local basefont = nil
+ local prevfont = nil
+ local prevattr = 0
+ local done = false
+
if trace_fontrun then
run = run + 1
report_fonts()
report_fonts("checking node list, run %s",run)
report_fonts()
- local n = head
+ local n = tonut(head)
while n do
- local id = n.id
+ local id = getid(n)
if id == glyph_code then
- local font = n.font
- local attr = n[0] or 0
- report_fonts("font %03i, dynamic %03i, glyph %C",font,attr,n.char)
+ local font = getfont(n)
+ local attr = getattr(n,0) or 0
+ report_fonts("font %03i, dynamic %03i, glyph %C",font,attr,getchar(n))
elseif id == disc_code then
report_fonts("[disc] %s",nodes.listtoutf(n,true,false,n))
else
report_fonts("[%s]",nodecodes[id])
end
- n = n.next
+ n = getnext(n)
end
end
- for n in traverse_id(glyph_code,head) do
- -- if n.subtype<256 then -- all are 1
- local font = n.font
- local attr = n[0] or 0 -- zero attribute is reserved for fonts in context
- if font ~= prevfont or attr ~= prevattr then
- if attr > 0 then
- local used = attrfonts[font]
- if not used then
- used = { }
- attrfonts[font] = used
+
+ local nuthead = tonut(head)
+
+ for n in traverse_id(glyph_code,nuthead) do
+ if getsubtype(n) < 256 then -- all are 1
+ local font = getfont(n)
+ local attr = getattr(n,0) or 0 -- zero attribute is reserved for fonts in context
+ if font ~= prevfont or attr ~= prevattr then
+ if basefont then
+ basefont[2] = tonode(getprev(n)) -- todo, save p
end
- if not used[attr] then
- local fd = setfontdynamics[font]
- if fd then
- used[attr] = fd[attr]
- a = a + 1
+ if attr > 0 then
+ local used = attrfonts[font]
+ if not used then
+ used = { }
+ attrfonts[font] = used
+ end
+ if not used[attr] then
+ local fd = setfontdynamics[font]
+ if fd then
+ used[attr] = fd[attr]
+ a = a + 1
+ elseif force_basepass then
+ b = b + 1
+ basefont = { tonode(n), nil }
+ basefonts[b] = basefont
+ end
+ end
+ else
+ local used = usedfonts[font]
+ if not used then
+ local fp = fontprocesses[font]
+ if fp then
+ usedfonts[font] = fp
+ u = u + 1
+ elseif force_basepass then
+ b = b + 1
+ basefont = { tonode(n), nil }
+ basefonts[b] = basefont
+ end
end
end
- else
- local used = usedfonts[font]
- if not used then
- local fp = fontprocesses[font]
- if fp then
- usedfonts[font] = fp
- u = u + 1
- end
+ prevfont = font
+ prevattr = attr
+ end
+ end
+ end
+
+ -- could be an optional pass : seldom needed, only for documentation as a discretionary
+ -- with pre/post/replace will normally not occur on it's own
+
+ if force_discrun then
+
+ -- basefont is not supported in disc only runs ... it would mean a lot of
+ -- ranges .. we could try to run basemode as a separate processor run but
+ -- not for now (we can consider it when the new node code is tested
+
+ -- local prevfont = nil
+ -- local prevattr = 0
+
+ for d in traverse_id(disc_code,nuthead) do
+ -- we could use first_glyph
+ local r = getfield(n,"replace") -- good enough
+ if r then
+ for n in traverse_id(glyph_code,r) do
+ if getsubtype(n) < 256 then -- all are 1
+ local font = getfont(n)
+ local attr = getattr(n,0) or 0 -- zero attribute is reserved for fonts in context
+ if font ~= prevfont or attr ~= prevattr then
+ if attr > 0 then
+ local used = attrfonts[font]
+ if not used then
+ used = { }
+ attrfonts[font] = used
+ end
+ if not used[attr] then
+ local fd = setfontdynamics[font]
+ if fd then
+ used[attr] = fd[attr]
+ a = a + 1
+ end
+ end
+ else
+ local used = usedfonts[font]
+ if not used then
+ local fp = fontprocesses[font]
+ if fp then
+ usedfonts[font] = fp
+ u = u + 1
+ end
+ end
+ end
+ prevfont = font
+ prevattr = attr
+ end
+ end
+ break
end
end
- prevfont = font
- prevattr = attr
end
- -- end
+
end
+
if trace_fontrun then
report_fonts()
- report_fonts("statics : %s",(u > 0 and concat(keys(usedfonts)," ")) or "none")
- report_fonts("dynamics: %s",(a > 0 and concat(keys(attrfonts)," ")) or "none")
+ report_fonts("statics : %s",u > 0 and concat(keys(usedfonts)," ") or "none")
+ report_fonts("dynamics: %s",a > 0 and concat(keys(attrfonts)," ") or "none")
+ report_fonts("built-in: %s",b > 0 and b or "none")
report_fonts()
end
-- in context we always have at least 2 processors
@@ -212,6 +315,49 @@ function handlers.characters(head)
end
end
end
+ if b == 0 then
+ -- skip
+ elseif b == 1 then
+ -- only one font
+ local front = head == start
+ local range = basefonts[1]
+ local start = range[1]
+ local stop = range[2]
+ if stop then
+ start, stop = ligaturing(start,stop)
+ start, stop = kerning(start,stop)
+ elseif start then -- safeguard
+ start = ligaturing(start)
+ start = kerning(start)
+ else
+ -- something bad happened
+ end
+ if front then
+ -- shouldn't happen
+ head = start
+ end
+ else
+ -- multiple fonts
+ local front = head == start
+ for i=1,b do
+ local range = basefonts[i]
+ local start = range[1]
+ local stop = range[2]
+ if stop then
+ start, stop = ligaturing(start,stop)
+ start, stop = kerning(start,stop)
+ elseif start then -- safeguard
+ start = ligaturing(start)
+ start = kerning(start)
+ else
+ -- something bad happened
+ end
+ end
+ if front then
+ -- shouldn't happen
+ head = start
+ end
+ end
stoptiming(nodes)
if trace_characters then
nodes.report(head,done)
@@ -280,7 +426,9 @@ end
-- return false
-- end
-- end)
-
+--
+-- -- TODO: basepasses!
+--
-- function handlers.characters(head)
-- -- either next or not, but definitely no already processed list
-- starttiming(nodes)
@@ -391,5 +539,18 @@ end
-- return head, true
-- end
-handlers.protectglyphs = node.protect_glyphs
-handlers.unprotectglyphs = node.unprotect_glyphs
+local d_protect_glyphs = nuts.protect_glyphs
+local d_unprotect_glyphs = nuts.unprotect_glyphs
+
+handlers.protectglyphs = function(n) return d_protect_glyphs (tonut(n)) end
+handlers.unprotectglyphs = function(n) return d_unprotect_glyphs(tonut(n)) end
+
+-- function handlers.protectglyphs(h)
+-- local h = tonut(h)
+-- for n in traverse_id(disc_code,h) do
+-- local d = getfield(n,"pre") if d then d_protect_glyphs(d) end
+-- local d = getfield(n,"post") if d then d_protect_glyphs(d) end
+-- local d = getfield(n,"replace") if d then d_protect_glyphs(d) end
+-- end
+-- return d_protect_glyphs(h)
+-- end
diff --git a/tex/context/base/node-ini.lua b/tex/context/base/node-ini.lua
index 652b46caf..02d4c7a3f 100644
--- a/tex/context/base/node-ini.lua
+++ b/tex/context/base/node-ini.lua
@@ -154,9 +154,9 @@ local disccodes = allocate {
[0] = "discretionary", -- \discretionary
[1] = "explicit", -- \-
[2] = "automatic", -- following a -
- [3] = "regular", -- simple
- [4] = "first", -- hard first item
- [5] = "second", -- hard second item
+ [3] = "regular", -- by hyphenator: simple
+ [4] = "first", -- by hyphenator: hard first item
+ [5] = "second", -- by hyphenator: hard second item
}
local accentcodes = allocate {
@@ -206,7 +206,7 @@ nodes.whatcodes = whatcodes nodes.whatsitcodes = whatcodes -- more offici
nodes.listcodes = listcodes
nodes.glyphcodes = glyphcodes
nodes.kerncodes = kerncodes
-nodes.penaltycodes = kerncodes
+nodes.penaltycodes = penaltycodes
nodes.mathcodes = mathcodes
nodes.fillcodes = fillcodes
nodes.margincodes = margincodes
@@ -220,6 +220,8 @@ listcodes.column = listcodes.alignment
kerncodes.italiccorrection = kerncodes.userkern
kerncodes.kerning = kerncodes.fontkern
+whatcodes.textdir = whatcodes.dir
+
nodes.codes = allocate { -- mostly for listing
glue = skipcodes,
noad = noadcodes,
diff --git a/tex/context/base/node-ini.mkiv b/tex/context/base/node-ini.mkiv
index e99653327..d04e647de 100644
--- a/tex/context/base/node-ini.mkiv
+++ b/tex/context/base/node-ini.mkiv
@@ -19,10 +19,9 @@
\registerctxluafile{node-ini}{1.001}
\registerctxluafile{node-met}{1.001}
-
-\ctxlua{if nodes.gonuts then context.registerctxluafile("node-nut","1.001") end}
-
+\registerctxluafile{node-nut}{1.001}
\registerctxluafile{node-res}{1.001}
+\registerctxluafile{node-ppt}{1.001} % experimental
\registerctxluafile{node-dir}{1.001}
\registerctxluafile{node-aux}{1.001}
\registerctxluafile{node-tst}{1.001}
@@ -36,6 +35,8 @@
\registerctxluafile{node-acc}{1.001} % experimental
%registerctxluafile{node-prp}{1.001} % makes no sense (yet)
+\doifelsefile{node-ppt.lua}{\registerctxluafile{node-ppt}{1.001}}{}
+
\newcount\c_node_tracers_show_box % box number
\unexpanded\def\shownextnodes{\afterassignment\node_tracers_show_next\c_node_tracers_show_box}
diff --git a/tex/context/base/node-inj.lua b/tex/context/base/node-inj.lua
index ae48150a6..402403529 100644
--- a/tex/context/base/node-inj.lua
+++ b/tex/context/base/node-inj.lua
@@ -8,10 +8,9 @@ if not modules then modules = { } end modules ['node-inj'] = {
-- This is very experimental (this will change when we have luatex > .50 and
-- a few pending thingies are available. Also, Idris needs to make a few more
--- test fonts. Btw, future versions of luatex will have extended glyph properties
--- that can be of help. Some optimizations can go away when we have faster machines.
+-- test fonts. Some optimizations can go away when we have faster machines.
--- todo: make a special one for context
+-- todo: ignore kerns between disc and glyph
local next = next
local utfchar = utf.char
@@ -31,12 +30,30 @@ local injections = nodes.injections
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local kern_code = nodecodes.kern
-local nodepool = nodes.pool
+
+local nuts = nodes.nuts
+local nodepool = nuts.pool
+
local newkern = nodepool.kern
-local traverse_id = node.traverse_id
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getattr = nuts.getattr
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+
+local setfield = nuts.setfield
+local setattr = nuts.setattr
+
+local traverse_id = nuts.traverse_id
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
local a_kernpair = attributes.private('kernpair')
local a_ligacomp = attributes.private('ligacomp')
@@ -47,6 +64,8 @@ local a_cursbase = attributes.private('cursbase')
local a_curscurs = attributes.private('curscurs')
local a_cursdone = attributes.private('cursdone')
+local unsetvalue = attributes.unsetvalue
+
-- This injector has been tested by Idris Samawi Hamid (several arabic fonts as well as
-- the rather demanding Husayni font), Khaled Hosny (latin and arabic) and Kaj Eigner
-- (arabic, hebrew and thai) and myself (whatever font I come across). I'm pretty sure
@@ -67,12 +86,39 @@ local kerns = { }
-- For the moment we pass the r2l key ... volt/arabtype tests .. idris: this needs
-- checking with husayni (volt and fontforge).
+function injections.reset(n)
+-- if getattr(n,a_kernpair) then
+-- setattr(n,a_kernpair,unsetvalue)
+-- end
+-- if getattr(n,a_markdone) then
+-- setattr(n,a_markbase,unsetvalue)
+-- setattr(n,a_markmark,unsetvalue)
+-- setattr(n,a_markdone,unsetvalue)
+-- end
+-- if getattr(n,a_cursdone) then
+-- setattr(n,a_cursbase,unsetvalue)
+-- setattr(n,a_curscurs,unsetvalue)
+-- setattr(n,a_cursdone,unsetvalue)
+-- end
+-- if getattr(n,a_ligacomp) then
+-- setattr(n,a_ligacomp,unsetvalue)
+-- end
+end
+
+function injections.setligaindex(n,index)
+ setattr(n,a_ligacomp,index)
+end
+
+function injections.getligaindex(n,default)
+ return getattr(n,a_ligacomp) or default
+end
+
function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2])
local ws, wn = tfmstart.width, tfmnext.width
local bound = #cursives + 1
- start[a_cursbase] = bound
- nxt[a_curscurs] = bound
+ setattr(start,a_cursbase,bound)
+ setattr(nxt,a_curscurs,bound)
cursives[bound] = { rlmode, dx, dy, ws, wn }
return dx, dy, bound
end
@@ -81,14 +127,14 @@ function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4]
-- dy = y - h
if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then
- local bound = current[a_kernpair]
+ local bound = getattr(current,a_kernpair)
if bound then
local kb = kerns[bound]
-- inefficient but singles have less, but weird anyway, needs checking
kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h
else
bound = #kerns + 1
- current[a_kernpair] = bound
+ setattr(current,a_kernpair,bound)
kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width }
end
return x, y, w, h, bound
@@ -100,7 +146,7 @@ function injections.setkern(current,factor,rlmode,x,tfmchr)
local dx = factor*x
if dx ~= 0 then
local bound = #kerns + 1
- current[a_kernpair] = bound
+ setattr(current,a_kernpair,bound)
kerns[bound] = { rlmode, dx }
return dx, bound
else
@@ -108,9 +154,9 @@ function injections.setkern(current,factor,rlmode,x,tfmchr)
end
end
-function injections.setmark(start,base,factor,rlmode,ba,ma,index,baseismark) -- ba=baseanchor, ma=markanchor
- local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2]) -- the index argument is no longer used but when this
- local bound = base[a_markbase] -- fails again we should pass it
+function injections.setmark(start,base,factor,rlmode,ba,ma) -- ba=baseanchor, ma=markanchor
+ local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2])
+ local bound = getattr(base,a_markbase)
local index = 1
if bound then
local mb = marks[bound]
@@ -118,20 +164,19 @@ function injections.setmark(start,base,factor,rlmode,ba,ma,index,baseismark) --
-- if not index then index = #mb + 1 end
index = #mb + 1
mb[index] = { dx, dy, rlmode }
- start[a_markmark] = bound
- start[a_markdone] = index
+ setattr(start,a_markmark,bound)
+ setattr(start,a_markdone,index)
return dx, dy, bound
else
- report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound)
+ report_injections("possible problem, %U is base mark without data (id %a)",getchar(base),bound)
end
end
--- index = index or 1
index = index or 1
bound = #marks + 1
- base[a_markbase] = bound
- start[a_markmark] = bound
- start[a_markdone] = index
- marks[bound] = { [index] = { dx, dy, rlmode, baseismark } }
+ setattr(base,a_markbase,bound)
+ setattr(start,a_markmark,bound)
+ setattr(start,a_markdone,index)
+ marks[bound] = { [index] = { dx, dy, rlmode } }
return dx, dy, bound
end
@@ -142,15 +187,15 @@ end
local function trace(head)
report_injections("begin run")
for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- local kp = n[a_kernpair]
- local mb = n[a_markbase]
- local mm = n[a_markmark]
- local md = n[a_markdone]
- local cb = n[a_cursbase]
- local cc = n[a_curscurs]
- local char = n.char
- report_injections("font %s, char %U, glyph %c",n.font,char,char)
+ if getsubtype(n) < 256 then
+ local kp = getattr(n,a_kernpair)
+ local mb = getattr(n,a_markbase)
+ local mm = getattr(n,a_markmark)
+ local md = getattr(n,a_markdone)
+ local cb = getattr(n,a_cursbase)
+ local cc = getattr(n,a_curscurs)
+ local char = getchar(n)
+ report_injections("font %s, char %U, glyph %c",getfont(n),char,char)
if kp then
local k = kerns[kp]
if k[3] then
@@ -198,22 +243,24 @@ local function show_result(head)
local current = head
local skipping = false
while current do
- local id = current.id
+ local id = getid(current)
if id == glyph_code then
- report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset)
+ report_injections("char: %C, width %p, xoffset %p, yoffset %p",
+ getchar(current),getfield(current,"width"),getfield(current,"xoffset"),getfield(current,"yoffset"))
skipping = false
elseif id == kern_code then
- report_injections("kern: %p",current.kern)
+ report_injections("kern: %p",getfield(current,"kern"))
skipping = false
elseif not skipping then
report_injections()
skipping = true
end
- current = current.next
+ current = getnext(current)
end
end
function injections.handler(head,where,keep)
+ head = tonut(head)
local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns)
if has_marks or has_cursives then
if trace_injections then
@@ -224,17 +271,18 @@ function injections.handler(head,where,keep)
if has_kerns then -- move outside loop
local nf, tm = nil, nil
for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts
- if n.subtype < 256 then
+ if getsubtype(n) < 256 then
nofvalid = nofvalid + 1
valid[nofvalid] = n
- if n.font ~= nf then
- nf = n.font
- tm = fontdata[nf].resources.marks
+ local f = getfont(n)
+ if f ~= nf then
+ nf = f
+ tm = fontdata[nf].resources.marks -- other hash in ctx
end
if tm then
- mk[n] = tm[n.char]
+ mk[n] = tm[getchar(n)]
end
- local k = n[a_kernpair]
+ local k = getattr(n,a_kernpair)
if k then
local kk = kerns[k]
if kk then
@@ -254,15 +302,16 @@ function injections.handler(head,where,keep)
else
local nf, tm = nil, nil
for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
+ if getsubtype(n) < 256 then
nofvalid = nofvalid + 1
valid[nofvalid] = n
- if n.font ~= nf then
- nf = n.font
- tm = fontdata[nf].resources.marks
+ local f = getfont(n)
+ if f ~= nf then
+ nf = f
+ tm = fontdata[nf].resources.marks -- other hash in ctx
end
if tm then
- mk[n] = tm[n.char]
+ mk[n] = tm[getchar(n)]
end
end
end
@@ -272,7 +321,7 @@ function injections.handler(head,where,keep)
local cx = { }
if has_kerns and next(ky) then
for n, k in next, ky do
- n.yoffset = k
+ setfield(n,"yoffset",k)
end
end
-- todo: reuse t and use maxt
@@ -283,9 +332,9 @@ function injections.handler(head,where,keep)
for i=1,nofvalid do -- valid == glyphs
local n = valid[i]
if not mk[n] then
- local n_cursbase = n[a_cursbase]
+ local n_cursbase = getattr(n,a_cursbase)
if p_cursbase then
- local n_curscurs = n[a_curscurs]
+ local n_curscurs = getattr(n,a_curscurs)
if p_cursbase == n_curscurs then
local c = cursives[n_curscurs]
if c then
@@ -310,20 +359,20 @@ function injections.handler(head,where,keep)
end
end
elseif maxt > 0 then
- local ny = n.yoffset
+ local ny = getfield(n,"yoffset")
for i=maxt,1,-1 do
ny = ny + d[i]
local ti = t[i]
- ti.yoffset = ti.yoffset + ny
+ setfield(ti,"yoffset",getfield(ti,"yoffset") + ny)
end
maxt = 0
end
if not n_cursbase and maxt > 0 then
- local ny = n.yoffset
+ local ny = getfield(n,"yoffset")
for i=maxt,1,-1 do
ny = ny + d[i]
local ti = t[i]
- ti.yoffset = ny
+ setfield(ti,"yoffset",ny) -- maybe add to current yoffset
end
maxt = 0
end
@@ -331,11 +380,11 @@ function injections.handler(head,where,keep)
end
end
if maxt > 0 then
- local ny = n.yoffset
+ local ny = getfield(n,"yoffset") -- hm, n unset ?
for i=maxt,1,-1 do
ny = ny + d[i]
local ti = t[i]
- ti.yoffset = ny
+ setfield(ti,"yoffset",ny)
end
maxt = 0
end
@@ -346,57 +395,83 @@ function injections.handler(head,where,keep)
if has_marks then
for i=1,nofvalid do
local p = valid[i]
- local p_markbase = p[a_markbase]
+ local p_markbase = getattr(p,a_markbase)
if p_markbase then
- local mrks = marks[p_markbase]
- local nofmarks = #mrks
- for n in traverse_id(glyph_code,p.next) do
- local n_markmark = n[a_markmark]
+ local mrks = marks[p_markbase]
+ local nofmarks = #mrks
+ for n in traverse_id(glyph_code,getnext(p)) do
+ local n_markmark = getattr(n,a_markmark)
if p_markbase == n_markmark then
- local index = n[a_markdone] or 1
+ local index = getattr(n,a_markdone) or 1
local d = mrks[index]
if d then
local rlmode = d[3]
--
local k = wx[p]
+ local px = getfield(p,"xoffset")
+ local ox = 0
if k then
local x = k[2]
local w = k[4]
if w then
if rlmode and rlmode >= 0 then
-- kern(x) glyph(p) kern(w-x) mark(n)
- n.xoffset = p.xoffset - p.width + d[1] - (w-x)
+ ox = px - getfield(p,"width") + d[1] - (w-x)
+ -- report_injections("l2r case 1: %p",ox)
else
-- kern(w-x) glyph(p) kern(x) mark(n)
- n.xoffset = p.xoffset - d[1] - x
+ ox = px - d[1] - x
+ -- report_injections("r2l case 1: %p",ox)
end
else
if rlmode and rlmode >= 0 then
-- okay for husayni
- n.xoffset = p.xoffset - p.width + d[1]
+ ox = px - getfield(p,"width") + d[1]
+ -- report_injections("r2l case 2: %p",ox)
else
-- needs checking: is x ok here?
- n.xoffset = p.xoffset - d[1] - x
+ ox = px - d[1] - x
+ -- report_injections("r2l case 2: %p",ox)
end
end
else
+ -- if rlmode and rlmode >= 0 then
+ -- ox = px - getfield(p,"width") + d[1]
+ -- -- report_injections("l2r case 3: %p",ox)
+ -- else
+ -- ox = px - d[1]
+ -- -- report_injections("r2l case 3: %p",ox)
+ -- end
+ --
+ -- we need to deal with fonts that have marks with width
+ --
+ local wp = getfield(p,"width")
+ local wn = getfield(n,"width") -- in arial marks have widths
if rlmode and rlmode >= 0 then
- n.xoffset = p.xoffset - p.width + d[1]
+ ox = px - wp + d[1]
+ -- report_injections("l2r case 3: %p",ox)
else
- n.xoffset = p.xoffset - d[1]
+ ox = px - d[1]
+ -- report_injections("r2l case 3: %p",ox)
end
- local w = n.width
- if w ~= 0 then
- insert_node_before(head,n,newkern(-w/2))
- insert_node_after(head,n,newkern(-w/2))
+ if wn ~= 0 then
+ -- bad: we should center
+ insert_node_before(head,n,newkern(-wn/2))
+ insert_node_after(head,n,newkern(-wn/2))
+ -- wx[n] = { 0, -wn/2, 0, -wn }
end
+ -- so far
end
- -- --
+ setfield(n,"xoffset",ox)
+ --
+ local py = getfield(p,"yoffset")
+ local oy = 0
if mk[p] then
- n.yoffset = p.yoffset + d[2]
+ oy = py + d[2]
else
- n.yoffset = n.yoffset + p.yoffset + d[2]
+ oy = getfield(n,"yoffset") + py + d[2]
end
+ setfield(n,"yoffset",oy)
--
if nofmarks == 1 then
break
@@ -404,6 +479,8 @@ function injections.handler(head,where,keep)
nofmarks = nofmarks - 1
end
end
+ elseif not n_markmark then
+ break -- HH: added 2013-09-12: no need to deal with non marks
else
-- KE: there can be <mark> <mkmk> <mark> sequences in ligatures
end
@@ -465,7 +542,7 @@ function injections.handler(head,where,keep)
-- if trace_injections then
-- show_result(head)
-- end
- return head, true
+ return tonode(head), true
elseif not keep then
kerns, cursives, marks = { }, { }, { }
end
@@ -474,14 +551,14 @@ function injections.handler(head,where,keep)
trace(head)
end
for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- local k = n[a_kernpair]
+ if getsubtype(n) < 256 then
+ local k = getattr(n,a_kernpair)
if k then
local kk = kerns[k]
if kk then
local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4]
if y and y ~= 0 then
- n.yoffset = y -- todo: h ?
+ setfield(n,"yoffset",y) -- todo: h ?
end
if w then
-- copied from above
@@ -518,9 +595,9 @@ function injections.handler(head,where,keep)
-- if trace_injections then
-- show_result(head)
-- end
- return head, true
+ return tonode(head), true
else
-- no tracing needed
end
- return head, false
+ return tonode(head), false
end
diff --git a/tex/context/base/node-ltp.lua b/tex/context/base/node-ltp.lua
index c52e001df..e4956f7df 100644
--- a/tex/context/base/node-ltp.lua
+++ b/tex/context/base/node-ltp.lua
@@ -18,7 +18,6 @@ if not modules then modules = { } end modules ['node-par'] = {
-- todo: add a couple of plugin hooks
-- todo: maybe split expansion code paths
-- todo: fix line numbers (cur_list.pg_field needed)
--- todo: make kerns stretch an option and disable it by default (definitely not shrink)
-- todo: check and improve protrusion
-- todo: arabic etc (we could use pretty large scales there) .. marks and cursive
@@ -73,7 +72,8 @@ if not modules then modules = { } end modules ['node-par'] = {
To be honest, I slowly start to grasp the magic here as normally I start from scratch when implementing
something (as it's the only way I can understand things). This time I had a recently acquired stack of
- Porcupine Tree disks to get me through.
+ Porcupine Tree disks to get me through, although I must admit that watching their dvd's is more fun
+ than coding.
Picking up this effort was inspired by discussions between Luigi Scarso and me about efficiency of Lua
code and we needed some stress tests to compare regular LuaTeX and LuajitTeX. One of the tests was
@@ -121,6 +121,13 @@ if not modules then modules = { } end modules ['node-par'] = {
is enabled, but in the Lua variant the extra overhead is way less significant. This means that when we
retrofit the same approach into the core, the overhead of expansion can be sort of nilled.
+ In 2013 the expansion factor method became also used at the TeX end so then I could complete the code
+ here, and indeed, expansions works quite well now (not compatible of course because we use floats at the
+ Lua end. The Lua base variant is still slower but quite ok, especially if we go nuts.
+
+ A next iteration will provide plug-ins and more control. I will also explore the possibility to avoid the
+ redundant hpack calculations (easier now, although I've only done some quick and dirty experiments.)
+
]]--
local utfchar = utf.char
@@ -180,22 +187,38 @@ local chardata = fonthashes.characters
local quaddata = fonthashes.quads
local parameters = fonthashes.parameters
-local slide_nodes = node.slide
-local new_node = node.new
-local copy_node = node.copy
-local copy_node_list = node.copy_list
-local flush_node = node.free
-local flush_node_list = node.flush_list
-local hpack_nodes = node.hpack
-local xpack_nodes = node.hpack
-local replace_node = nodes.replace
-local insert_node_after = node.insert_after
-local insert_node_before = node.insert_before
-local traverse_by_id = node.traverse_id
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getid = nuts.getid
+local getsubtype = nuts.getsubtype
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getlist = nuts.getlist
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+local getattr = nuts.getattr
+
+local slide_nodelist = nuts.slide -- get rid of this, probably ok > 78.2
+local find_tail = nuts.tail
+local new_node = nuts.new
+local copy_node = nuts.copy
+local copy_nodelist = nuts.copy_list
+local flush_node = nuts.free
+local flush_nodelist = nuts.flush_list
+local hpack_nodes = nuts.hpack
+local xpack_nodes = nuts.hpack
+local replace_node = nuts.replace
+local insert_node_after = nuts.insert_after
+local insert_node_before = nuts.insert_before
+local traverse_by_id = nuts.traverse_id
local setnodecolor = nodes.tracers.colors.set
-local nodepool = nodes.pool
+local nodepool = nuts.pool
local nodecodes = nodes.nodecodes
local whatcodes = nodes.whatcodes
@@ -287,7 +310,8 @@ local glyphdir_is_equal = nodes.glyphdir_is_equal
local dir_pops = nodes.dir_is_pop
local dir_negations = nodes.dir_negation
-local is_skipable = node.protrusion_skippable
+local is_skipable = nuts.protrusion_skippable
+
local a_fontkern = attributes.private('fontkern')
-- helpers --
@@ -308,12 +332,12 @@ local function checked_line_dir(stack,current)
local n = stack.n + 1
stack.n = n
stack[n] = current
- return current.dir
+ return getfield(current,"dir")
elseif n > 0 then
local n = stack.n
local dirnode = stack[n]
dirstack.n = n - 1
- return dirnode.dir
+ return getfield(dirnode,"dir")
else
report_parbuilders("warning: missing pop node (%a)",1) -- in line ...
end
@@ -328,8 +352,8 @@ local function inject_dirs_at_end_of_line(stack,current,start,stop)
local n = stack.n
local h = nil
while start and start ~= stop do
- if start.id == whatsit_code and start.subtype == dir_code then
- if not dir_pops[start.dir] then
+ if getid(start) == whatsit_code and getsubtype(start) == dir_code then
+ if not dir_pops[getfield(start,"dir")] then -- weird, what is this #
n = n + 1
stack[n] = start
elseif n > 0 then
@@ -338,10 +362,10 @@ local function inject_dirs_at_end_of_line(stack,current,start,stop)
report_parbuilders("warning: missing pop node (%a)",2) -- in line ...
end
end
- start = start.next
+ start = getnext(start)
end
for i=n,1,-1 do
- h, current = insert_node_after(current,current,new_dir(dir_negations[stack[i].dir]))
+ h, current = insert_node_after(current,current,new_dir(dir_negations[getfield(stack[i],"dir")]))
end
stack.n = n
return current
@@ -382,22 +406,8 @@ end
-- resolvers --
-local whatsiters = {
- get_width = { },
- get_dimensions = { },
-}
-
-local get_whatsit_width = whatsiters.get_width
-local get_whatsit_dimensions = whatsiters.get_dimensions
-
-local function get_width (n) return n.width end
-local function get_dimensions(n) return n.width, n.height, n.depth end
-
-get_whatsit_width[pdfrefximage_code] = get_width
-get_whatsit_width[pdfrefxform_code ] = get_width
-
-get_whatsit_dimensions[pdfrefximage_code] = get_dimensions
-get_whatsit_dimensions[pdfrefxform_code ] = get_dimensions
+local get_whatsit_width = nodes.whatsitters.getters.width
+local get_whatsit_dimensions = nodes.whatsitters.getters.dimensions
-- expansion etc --
@@ -414,13 +424,13 @@ end
local function check_shrinkage(par,n)
-- called often, so maybe move inline -- use NORMAL
- if n.shrink_order ~= 0 and n.shrink ~= 0 then
+ if getfield(n,"shrink_order") ~= 0 and getfield(n,"shrink") ~= 0 then
if par.no_shrink_error_yet then
par.no_shrink_error_yet = false
report_parbuilders("infinite glue shrinkage found in a paragraph and removed")
end
n = copy_node(n)
- n.shrink_order = 0
+ setfield(n,"shrink_order",0)
end
return n
end
@@ -467,48 +477,10 @@ setmetatableindex(expansions,function(t,font) -- we can store this in tfmdata if
end
end)
--- local function char_stretch_shrink(p)
--- local data = expansions[p.font][p.char]
--- if data then
--- return data.glyphstretch, data.glyphshrink
--- else
--- return 0, 0
--- end
--- end
---
--- local cal_margin_kern_var = char_stretch_shrink
-
--- local function kern_stretch_shrink(p,d)
--- local l = p.prev
--- if l and l.id == glyph_code then -- how about disc nodes?
--- local r = p.next
--- if r and r.id == glyph_code then
--- local lf, rf = l.font, r.font
--- if lf == rf then
--- local data = expansions[lf][l.char]
--- if data then
--- local stretch = data.stretch
--- local shrink = data.shrink
--- if stretch ~= 0 then
--- -- stretch = data.factor * (d * stretch - d)
--- stretch = data.factor * d * (stretch - 1)
--- end
--- if shrink ~= 0 then
--- -- shrink = data.factor * (d * shrink - d)
--- shrink = data.factor * d * (shrink - 1)
--- end
--- return stretch, shrink
--- end
--- end
--- end
--- end
--- return 0, 0
--- end
-
local function kern_stretch_shrink(p,d)
- local left = p.prev
- if left and left.id == glyph_code then -- how about disc nodes?
- local data = expansions[left.font][left.char]
+ local left = getprev(p)
+ if left and getid(left) == glyph_code then -- how about disc nodes?
+ local data = expansions[getfont(left)][getchar(left)]
if data then
local stretch = data.stretch
local shrink = data.shrink
@@ -526,14 +498,8 @@ local function kern_stretch_shrink(p,d)
return 0, 0
end
--- local function kern_stretch_shrink(p,d)
--- -- maybe make it an option in luatex where we also need to check for attribute fontkern but in general
--- -- it makes no sense to scale kerns
--- return 0, 0
--- end
-
local expand_kerns = false
--- local expand_kerns = "both"
+----- expand_kerns = "both"
directives.register("builders.paragraphs.adjusting.kerns",function(v)
if not v then
@@ -623,18 +589,18 @@ end
local function find(head) -- do we really want to recurse into an hlist?
while head do
- local id = head.id
+ local id = getid(head)
if id == glyph_code then
return head
elseif id == hlist_code then
- local found = find(head.list)
+ local found = find(getlist(head))
if found then
return found
else
- head = head.next
+ head = getnext(head)
end
elseif is_skipable(head) then
- head = head.next
+ head = getnext(head)
else
return head
end
@@ -643,38 +609,38 @@ local function find(head) -- do we really want to recurse into an hlist?
end
local function find_protchar_left(l) -- weird function
- local ln = l.next
- if ln and ln.id == hlist_code and not ln.list and ln.width == 0 and ln.height == 0 and ln.depth == 0 then
- l = l.next
+ local ln = getnext(l)
+ if ln and getid(ln) == hlist_code and not getlist(ln) and getfield(ln,"width") == 0 and getfield(ln,"height") == 0 and getfield(ln,"depth") == 0 then
+ l = getnext(l)
else -- if d then -- was always true
- local id = l.id
+ local id = getid(l)
while ln and not (id == glyph_code or id < math_code) do -- is there always a glyph?
l = ln
- ln = l.next
- id = ln.id
+ ln = getnext(l)
+ id = getid(ln)
end
end
- -- if l.id == glyph_code then
+ -- if getid(l) == glyph_code then
-- return l
-- end
return find(l) or l
end
local function find(head,tail)
- local tail = tail or slide_nodes(head)
+ local tail = tail or find_tail(head)
while tail do
- local id = tail.id
+ local id = getid(tail)
if id == glyph_code then
return tail
elseif id == hlist_code then
- local found = find(tail.list)
+ local found = find(getlist(tail))
if found then
return found
else
- tail = tail.prev
+ tail = getprev(tail)
end
elseif is_skipable(tail) then
- tail = tail.prev
+ tail = getprev(tail)
else
return tail
end
@@ -687,8 +653,8 @@ local function find_protchar_right(l,r)
end
local function left_pw(p)
- local font = p.font
- local prot = chardata[font][p.char].left_protruding
+ local font = getfont(p)
+ local prot = chardata[font][getchar(p)].left_protruding
if not prot or prot == 0 then
return 0
end
@@ -696,8 +662,8 @@ local function left_pw(p)
end
local function right_pw(p)
- local font = p.font
- local prot = chardata[font][p.char].right_protruding
+ local font = getfont(p)
+ local prot = chardata[font][getchar(p)].right_protruding
if not prot or prot == 0 then
return 0
end
@@ -721,17 +687,17 @@ local function add_to_width(line_break_dir,checked_expansion,s) -- split into tw
local adjust_stretch = 0
local adjust_shrink = 0
while s do
- local id = s.id
+ local id = getid(s)
if id == glyph_code then
if is_rotated[line_break_dir] then -- can be shared
- size = size + s.height + s.depth
+ size = size + getfield(s,"height") + getfield(s,"depth")
else
- size = size + s.width
+ size = size + getfield(s,"width")
end
if checked_expansion then
- local data = checked_expansion[s.font]
+ local data = checked_expansion[getfont(s)]
if data then
- data = data[s.char]
+ data = data[getchar(s)]
if data then
adjust_stretch = adjust_stretch + data.glyphstretch
adjust_shrink = adjust_shrink + data.glyphshrink
@@ -739,16 +705,16 @@ local function add_to_width(line_break_dir,checked_expansion,s) -- split into tw
end
end
elseif id == hlist_code or id == vlist_code then
- if is_parallel[s.dir][line_break_dir] then
- size = size + s.width
+ if is_parallel[getfield(s,"dir")][line_break_dir] then
+ size = size + getfield(s,"width")
else
- size = size + s.depth + s.height
+ size = size + getfield(s,"height") + getfield(s,"depth")
end
elseif id == kern_code then
- local d = s.kern
- if d ~= 0 then
- if checked_expansion and expand_kerns and (s.subtype == kerning_code or s[a_fontkern]) then
- local stretch, shrink = kern_stretch_shrink(s,d)
+ local kern = getfield(s,"kern")
+ if kern ~= 0 then
+ if checked_expansion and expand_kerns and (getsubtype(s) == kerning_code or getattr(a_fontkern)) then
+ local stretch, shrink = kern_stretch_shrink(s,kern)
if expand_kerns == "stretch" then
adjust_stretch = adjust_stretch + stretch
elseif expand_kerns == "shrink" then
@@ -758,14 +724,14 @@ local function add_to_width(line_break_dir,checked_expansion,s) -- split into tw
adjust_shrink = adjust_shrink + shrink
end
end
- size = size + d
+ size = size + kern
end
elseif id == rule_code then
- size = size + s.width
- else
+ size = size + getfield(s,"width")
+ elseif trace_unsupported then
report_parbuilders("unsupported node at location %a",6)
end
- s = s.next
+ s = getnext(s)
end
return size, adjust_stretch, adjust_shrink
end
@@ -779,14 +745,14 @@ local function compute_break_width(par,break_type,p) -- split in two
local break_size = break_width.size + disc_width.size
local break_adjust_stretch = break_width.adjust_stretch + disc_width.adjust_stretch
local break_adjust_shrink = break_width.adjust_shrink + disc_width.adjust_shrink
- local replace = p.replace
+ local replace = getfield(p,"replace")
if replace then
local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,replace)
break_size = break_size - size
break_adjust_stretch = break_adjust_stretch - adjust_stretch
break_adjust_shrink = break_adjust_shrink - adjust_shrink
end
- local post = p.post
+ local post = getfield(p,"post")
if post then
local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,post)
break_size = break_size + size
@@ -797,56 +763,56 @@ local function compute_break_width(par,break_type,p) -- split in two
break_width.adjust_stretch = break_adjust_stretch
break_width.adjust_shrink = break_adjust_shrink
if not post then
- p = p.next
+ p = getnext(p)
else
return
end
end
while p do -- skip spacing etc
- local id = p.id
+ local id = getid(p)
if id == glyph_code then
return -- happens often
elseif id == glue_code then
- local spec = p.spec
- local order = stretch_orders[spec.stretch_order]
- break_width.size = break_width.size - spec.width
- break_width[order] = break_width[order] - spec.stretch
- break_width.shrink = break_width.shrink - spec.shrink
+ local spec = getfield(p,"spec")
+ local order = stretch_orders[getfield(spec,"stretch_order")]
+ break_width.size = break_width.size - getfield(spec,"width")
+ break_width[order] = break_width[order] - getfield(spec,"stretch")
+ break_width.shrink = break_width.shrink - getfield(spec,"shrink")
elseif id == penalty_code then
-- do nothing
elseif id == kern_code then
- if p.subtype == userkern_code then
- break_width.size = break_width.size - p.kern
+ if getsubtype(p) == userkern_code then
+ break_width.size = break_width.size - getfield(p,"kern")
else
return
end
elseif id == math_code then
- break_width.size = break_width.size - p.surround
+ break_width.size = break_width.size - getfield(p,"surround")
else
return
end
- p = p.next
+ p = getnext(p)
end
end
local function append_to_vlist(par, b)
local prev_depth = par.prev_depth
if prev_depth > par.ignored_dimen then
- if b.id == hlist_code then
- local d = par.baseline_skip.width - prev_depth - b.height -- deficiency of space between baselines
- local s = d < par.line_skip_limit and new_lineskip(tex.lineskip) or new_baselineskip(d)
+ if getid(b) == hlist_code then
+ local d = getfield(par.baseline_skip,"width") - prev_depth - getfield(b,"height") -- deficiency of space between baselines
+ local s = d < par.line_skip_limit and new_lineskip(par.lineskip) or new_baselineskip(d)
-- local s = d < par.line_skip_limit
-- if s then
-- s = new_lineskip()
- -- s.spec = tex.lineskip
+ -- setfield(s,"spec",tex.lineskip)
-- else
-- s = new_baselineskip(d)
-- end
local head_field = par.head_field
if head_field then
- local n = slide_nodes(head_field)
- n.next = s
- s.prev = n
+ local n = slide_nodelist(head_field) -- todo: find_tail
+ setfield(n,"next",s)
+ setfield(s,"prev",n)
else
par.head_field = s
end
@@ -854,14 +820,14 @@ local function append_to_vlist(par, b)
end
local head_field = par.head_field
if head_field then
- local n = slide_nodes(head_field)
- n.next = b
- b.prev = n
+ local n = slide_nodelist(head_field) -- todo: find_tail
+ setfield(n,"next",b)
+ setfield(b,"prev",n)
else
par.head_field = b
end
- if b.id == hlist_code then
- local pd = b.depth
+ if getid(b) == hlist_code then
+ local pd = getfield(b,"depth")
par.prev_depth = pd
texnest[texnest.ptr].prevdepth = pd
end
@@ -870,9 +836,9 @@ end
local function append_list(par, b)
local head_field = par.head_field
if head_field then
- local n = slide_nodes(head_field)
- n.next = b
- b.prev = n
+ local n = slide_nodelist(head_field) -- todo: find_tail
+ setfield(n,"next",b)
+ setfield(b,"prev",n)
else
par.head_field = b
end
@@ -884,14 +850,18 @@ end
local hztolerance = 2500
local hzwarned = false
+local function used_skip(s)
+ return s and (getfield(s,"width") ~= 0 or getfield(s,"stretch") ~= 0 or getfield(s,"shrink") ~= 0) and s or nil
+end
+
local function initialize_line_break(head,display)
local hang_indent = tex.hangindent or 0
local hsize = tex.hsize or 0
local hang_after = tex.hangafter or 0
local par_shape_ptr = tex.parshape
- local left_skip = tex.leftskip -- nodes
- local right_skip = tex.rightskip -- nodes
+ local left_skip = tonut(tex.leftskip) -- nodes
+ local right_skip = tonut(tex.rightskip) -- nodes
local pretolerance = tex.pretolerance
local tolerance = tex.tolerance
local adjust_spacing = tex.pdfadjustspacing
@@ -899,7 +869,7 @@ local function initialize_line_break(head,display)
local last_line_fit = tex.lastlinefit
local newhead = new_temp()
- newhead.next = head
+ setfield(newhead,"next",head)
local adjust_spacing_status = adjust_spacing > 1 and -1 or 0
@@ -966,13 +936,13 @@ local function initialize_line_break(head,display)
last_line_depth = tex.pdflastlinedepth or 0, -- this will go away
ignored_dimen = tex.pdfignoreddimen or 0, -- this will go away
- baseline_skip = tex.baselineskip or 0,
- lineskip = tex.lineskip or 0,
- line_skip_limit = tex.lineskiplimit or 0,
+ baseline_skip = tonut(tex.baselineskip),
+ lineskip = tonut(tex.lineskip),
+ line_skip_limit = tex.lineskiplimit,
prev_depth = texnest[texnest.ptr].prevdepth,
- final_par_glue = slide_nodes(head), -- todo: we know tail already, slow
+ final_par_glue = slide_nodelist(head), -- todo: we know tail already, slow
par_break_dir = tex.pardir,
line_break_dir = tex.pardir,
@@ -1041,6 +1011,13 @@ local function initialize_line_break(head,display)
}
+ -- optimizers
+
+ par.used_left_skip = used_skip(par.left_skip)
+ par.used_right_skip = used_skip(par.right_skip)
+
+ -- so far
+
if adjust_spacing > 1 then
local checked_expansion = { par = par }
setmetatableindex(checked_expansion,check_expand_pars)
@@ -1062,13 +1039,13 @@ local function initialize_line_break(head,display)
local l = check_shrinkage(par,left_skip)
local r = check_shrinkage(par,right_skip)
- local l_order = stretch_orders[l.stretch_order]
- local r_order = stretch_orders[r.stretch_order]
+ local l_order = stretch_orders[getfield(l,"stretch_order")]
+ local r_order = stretch_orders[getfield(r,"stretch_order")]
- background.size = l.width + r.width
- background.shrink = l.shrink + r.shrink
- background[l_order] = l.stretch
- background[r_order] = r.stretch + background[r_order]
+ background.size = getfield(l,"width") + getfield(r,"width")
+ background.shrink = getfield(l,"shrink") + getfield(r,"shrink")
+ background[l_order] = getfield(l,"stretch")
+ background[r_order] = getfield(r,"stretch") + background[r_order]
-- this will move up so that we can assign the whole par table
@@ -1148,185 +1125,193 @@ local function initialize_line_break(head,display)
return par
end
+-- there are still all kind of artefacts in here (a side effect I guess of pdftex,
+-- etex, omega and other extensions that got obscured by patching)
+
local function post_line_break(par)
local prevgraf = texnest[texnest.ptr].prevgraf
- local cur_line = prevgraf + 1 -- the current line number being justified
- local cur_p = nil
+ local current_line = prevgraf + 1 -- the current line number being justified
local adjust_spacing = par.adjust_spacing
local protrude_chars = par.protrude_chars
local statistics = par.statistics
- local p, s, k, w -- check when local
+ local stack = new_dir_stack()
+
+ local leftskip = par.used_left_skip -- used or normal ?
+ local rightskip = par.right_skip
+ local parshape = par.par_shape_ptr
+ local ignored_dimen = par.ignored_dimen
- local q = par.best_bet.break_node
- repeat -- goto first breakpoint
- local r = q
- q = q.prev_break
- r.prev_break = cur_p
- cur_p = r
- until not q
+ local adapt_width = par.adapt_width
- local stack = new_dir_stack()
+ -- reverse the links of the relevant passive nodes, goto first breakpoint
+ local current_break = nil
+
+ local break_node = par.best_bet.break_node
repeat
+ local first_break = break_node
+ break_node = break_node.prev_break
+ first_break.prev_break = current_break
+ current_break = first_break
+ until not break_node
+
+ local head = par.head
- inject_dirs_at_begin_of_line(stack,par.head)
+ -- maybe : each_...
- local q = nil
- local r = cur_p.cur_break
+ while current_break do
+
+ inject_dirs_at_begin_of_line(stack,head)
local disc_break = false
local post_disc_break = false
local glue_break = false
- if not r then
- r = slide_nodes(par.head)
- if r == par.final_par_glue then
- q = r -- q refers to the last node of the line (and paragraph)
- r = r.prev -- r refers to the node after which the dir nodes should be closed
+ local lineend = nil -- q lineend refers to the last node of the line (and paragraph)
+ local lastnode = current_break.cur_break -- r lastnode refers to the node after which the dir nodes should be closed
+
+ if not lastnode then
+ -- only at the end
+ lastnode = slide_nodelist(head) -- todo: find_tail
+ if lastnode == par.final_par_glue then
+ lineend = lastnode
+ lastnode = getprev(lastnode)
end
- else
- local id = r.id
+ else -- todo: use insert_list_after
+ local id = getid(lastnode)
if id == glue_code then
- -- r is normal skip
- r = replace_node(r,new_rightskip(par.right_skip))
+ -- lastnode is normal skip
+ lastnode = replace_node(lastnode,new_rightskip(rightskip))
glue_break = true
- q = r -- q refers to the last node of the line
- r = r.prev -- r refers to the node after which the dir nodes should be closed
+ lineend = lastnode
+ lastnode = getprev(r)
elseif id == disc_code then
- -- todo: use insert_before/after
- local prev_r = r.prev
- local next_r = r.next
- local subtype = r.subtype
- local pre = r.pre
- local post = r.post
- local replace = r.replace
+ local prevlast = getprev(lastnode)
+ local nextlast = getnext(lastnode)
+ local subtype = getsubtype(lastnode)
+ local pre = getfield(lastnode,"pre")
+ local post = getfield(lastnode,"post")
+ local replace = getfield(lastnode,"replace")
if subtype == second_disc_code then
- if not (prev_r.id == disc_code and prev_r.subtype == first_disc_code) then
+ if not (getid(prevlast) == disc_code and getsubtype(prevlast) == first_disc_code) then
report_parbuilders('unsupported disc at location %a',3)
end
if pre then
- flush_node_list(pre)
- r.pre = nil
- pre = nil -- signal
+ flush_nodelist(pre)
+ setfield(lastnode,"pre",nil)
+ pre = nil -- signal
end
if replace then
- local n = slide_nodes(replace)
- prev_r.next = replace
- replace.prev = prev_r
- n.next = r
- r.prev = n
- r.replace = nil
- replace = nil -- signal
+ local n = find_tail(replace)
+ setfield(prevlast,"next",replace)
+ setfield(replace,"prev",prevlast)
+ setfield(n,"next",lastnode)
+ setfield(lastnode,"prev",n)
+ setfield(lastnode,"replace",nil)
+ replace = nil -- signal
end
- local pre = prev_r.pre
- local post = prev_r.post
- local replace = prev_r.replace
+ local pre = getfield(prevlast,"pre")
+ local post = getfield(prevlast,"post")
+ local replace = getfield(prevlast,"replace")
if pre then
- flush_node_list(pre)
- prev_r.pre = nil
+ flush_nodelist(pre)
+ setfield(prevlast,"pre",nil)
end
if replace then
- flush_node_list(replace)
- prev_r.replace = nil
+ flush_nodelist(replace)
+ setfield(prevlast,"replace",nil)
end
if post then
- flush_node_list(post)
- prev_r.post = nil
+ flush_nodelist(post)
+ setfield(prevlast,"post",nil)
end
elseif subtype == first_disc_code then
- if not (v.id == disc_code and v.subtype == second_disc_code) then
+ -- what is v ... next probably
+ if not (getid(v) == disc_code and getsubtype(v) == second_disc_code) then
report_parbuilders('unsupported disc at location %a',4)
end
- next_r.subtype = regular_disc_code
- next_r.replace = post
- r.post = nil
+ setfield(nextlast,"subtype",regular_disc_code)
+ setfield(nextlast,"replace",post)
+ setfield(lastnode,"post",nil)
end
if replace then
- r.replace = nil -- free
- flush_node_list(replace)
+ setfield(lastnode,"replace",nil) -- free
+ flush_nodelist(replace)
end
if pre then
- local n = slide_nodes(pre)
- prev_r.next = pre
- pre.prev = prev_r
- n.next = r
- r.prev = n
- r.pre = nil
+ local n = find_tail(pre)
+ setfield(prevlast,"next",pre)
+ setfield(pre,"prev",prevlast)
+ setfield(n,"next",lastnode)
+ setfield(lastnode,"prev",n)
+ setfield(lastnode,"pre",nil)
end
if post then
- local n = slide_nodes(post)
- r.next = post
- post.prev = r
- n.next = next_r
- next_r.prev = n
- r.post = nil
+ local n = find_tail(post)
+ setfield(lastnode,"next",post)
+ setfield(post,"prev",lastnode)
+ setfield(n,"next",nextlast)
+ setfield(nextlast,"prev",n)
+ setfield(lastnode,"post",nil)
post_disc_break = true
end
disc_break = true
elseif id == kern_code then
- r.kern = 0
- elseif r.id == math_code then
- r.surround = 0
+ setfield(lastnode,"kern",0)
+ elseif getid(lastnode) == math_code then
+ setfield(lastnode,"surround",0)
end
end
- r = inject_dirs_at_end_of_line(stack,r,par.head.next,cur_p.cur_break)
- local crb = cur_p.passive_right_box
- if crb then
- local s = copy_node(crb)
- local e = r.next
- r.next = s
- s.prev = r
- s.next = e
- if e then
- e.prev = s
- end
- r = s
+ lastnode = inject_dirs_at_end_of_line(stack,lastnode,getnext(head),current_break.cur_break)
+ local rightbox = current_break.passive_right_box
+ if rightbox then
+ lastnode = insert_node_after(lastnode,lastnode,copy_node(rightbox))
end
- if not q then
- q = r
+ if not lineend then
+ lineend = lastnode
end
- if q and q ~= par.head and protrude_chars > 0 then
- local id = q.id
- local c = (disc_break and (id == glyph_code or id ~= disc_code) and q) or q.prev
- local p = find_protchar_right(par.head.next,c)
- if p and p.id == glyph_code then
+ if lineend and lineend ~= head and protrude_chars > 0 then
+ local id = getid(lineend)
+ local c = (disc_break and (id == glyph_code or id ~= disc_code) and lineend) or getprev(lineend)
+ local p = find_protchar_right(getnext(head),c)
+ if p and getid(p) == glyph_code then
local w, last_rightmost_char = right_pw(p)
if last_rightmost_char and w ~= 0 then
- -- so we inherit attributes, q is new pseudo head
- q, c = insert_node_after(q,c,new_rightmarginkern(copy_node(last_rightmost_char),-w))
+ -- so we inherit attributes, lineend is new pseudo head
+ lineend, c = insert_node_after(lineend,c,new_rightmarginkern(copy_node(last_rightmost_char),-w))
end
end
end
+ -- we finish the line
+ local r = getnext(lineend)
+ setfield(lineend,"next",nil)
if not glue_break then
- local h
- h, q = insert_node_after(q,q,new_rightskip(par.right_skip)) -- q moves on as pseudo head
- end
- r = q.next
- q.next = nil
- local phead = par.head
- q = phead.next
- phead.next = r
+ if rightskip then
+ insert_node_after(lineend,lineend,new_rightskip(right_skip)) -- lineend moves on as pseudo head
+ end
+ end
+ -- each time ?
+ local q = getnext(head)
+ setfield(head,"next",r)
if r then
- r.prev = phead
- end
- local clb = cur_p.passive_left_box
- if clb then -- here we miss some prev links
- local s = copy_node(cb)
- s = q.next
- r.next = q
- q = r
- if s and cur_line == (par.first_line + 1) and s.id == hlist_code and not s.list then
- q = q.next
- r.next = s.next
- s.next = r
+ setfield(r,"prev",head)
+ end
+ -- insert leftbox (if needed after parindent)
+ local leftbox = current_break.passive_left_box
+ if leftbox then
+ local first = getnext(q)
+ if first and current_line == (par.first_line + 1) and getid(first) == hlist_code and not getlist(first) then
+ insert_node_after(q,q,copy_node(leftbox))
+ else
+ q = insert_node_before(q,q,copy_node(leftbox))
end
end
if protrude_chars > 0 then
local p = find_protchar_left(q)
- if p and p.id == glyph_code then
+ if p and getid(p) == glyph_code then
local w, last_leftmost_char = left_pw(p)
if last_leftmost_char and w ~= 0 then
-- so we inherit attributes, q is pseudo head and moves back
@@ -1334,32 +1319,35 @@ local function post_line_break(par)
end
end
end
- local ls = par.left_skip
- if ls and (ls.width ~= 0 or ls.stretch ~= 0 or ls.shrink ~= 0) then
- q = insert_node_before(q,q,new_leftskip(ls))
+ if leftskip then
+ q = insert_node_before(q,q,new_leftskip(leftskip))
end
- local curwidth, cur_indent
- if cur_line > par.last_special_line then
+ local cur_width, cur_indent
+ if current_line > par.last_special_line then
cur_indent = par.second_indent
cur_width = par.second_width
+ elseif parshape then
+ local shape = parshape[current_line]
+ cur_indent = shape[1]
+ cur_width = shape[2]
else
- local psp = par.par_shape_ptr
- if psp then
- cur_indent = psp[cur_line][1]
- cur_width = psp[cur_line][2]
- else
- cur_indent = par.first_indent
- cur_width = par.first_width
- end
+ cur_indent = par.first_indent
+ cur_width = par.first_width
end
+
+ if adapt_width then -- extension
+ local l, r = adapt_width(par,current_line)
+ cur_indent = cur_indent + l
+ cur_width = cur_width - l - r
+ end
+
statistics.noflines = statistics.noflines + 1
+ local finished_line = nil
if adjust_spacing > 0 then
statistics.nofadjustedlines = statistics.nofadjustedlines + 1
- -- in the built-in hpack cal_expand_ratio will later on call subst_ext_font
- -- in the alternative approach we can do both in one run
- just_box = xpack_nodes(q,cur_width,"cal_expand_ratio",par.par_break_dir) -- ,cur_p.analysis)
+ finished_line = xpack_nodes(q,cur_width,"cal_expand_ratio",par.par_break_dir,par.first_line,current_line) -- ,current_break.analysis)
else
- just_box = xpack_nodes(q,cur_width,"exactly",par.par_break_dir) -- ,cur_p.analysis)
+ finished_line = xpack_nodes(q,cur_width,"exactly",par.par_break_dir,par.first_line,current_line) -- ,current_break.analysis)
end
if protrude_chars > 0 then
statistics.nofprotrudedlines = statistics.nofprotrudedlines + 1
@@ -1368,39 +1356,42 @@ local function post_line_break(par)
local adjust_head = texlists.adjust_head
local pre_adjust_head = texlists.pre_adjust_head
--
- just_box.shift = cur_indent
- if par.each_line_height ~= par.ignored_dimen then
- just_box.height = par.each_line_height
+ setfield(finished_line,"shift",cur_indent)
+ -- this will probably go away:
+ if par.each_line_height ~= ignored_dimen then
+ setfield(finished_line,"height",par.each_line_height)
end
- if par.each_line_depth ~= par.ignored_dimen then
- just_box.depth = par.each_line_depth
+ if par.each_line_depth ~= ignored_dimen then
+ setfield(finished_line,"depth",par.each_line_depth)
end
- if par.first_line_height ~= par.ignored_dimen and (cur_line == par.first_line + 1) then
- just_box.height = par.first_line_height
+ if par.first_line_height ~= ignored_dimen and (current_line == par.first_line + 1) then
+ setfield(finished_line,"height",par.first_line_height)
end
- if par.last_line_depth ~= par.ignored_dimen and cur_line + 1 == par.best_line then
- just_box.depth = par.last_line_depth
+ if par.last_line_depth ~= ignored_dimen and current_line + 1 == par.best_line then
+ setfield(finished_line,"depth",par.last_line_depth)
end
+ --
if texlists.pre_adjust_head ~= pre_adjust_head then
append_list(par, texlists.pre_adjust_head)
texlists.pre_adjust_head = pre_adjust_head
end
- append_to_vlist(par, just_box)
+ append_to_vlist(par,finished_line)
if texlists.adjust_head ~= adjust_head then
append_list(par, texlists.adjust_head)
texlists.adjust_head = adjust_head
end
+ --
local pen
- if cur_line + 1 ~= par.best_line then
- if cur_p.passive_pen_inter then
- pen = cur_p.passive_pen_inter
+ if current_line + 1 ~= par.best_line then
+ if current_break.passive_pen_inter then
+ pen = current_break.passive_pen_inter
else
pen = par.inter_line_penalty
end
- if cur_line == prevgraf + 1 then
+ if current_line == prevgraf + 1 then
pen = pen + par.club_penalty
end
- if cur_line + 2 == par.best_line then
+ if current_line + 2 == par.best_line then
if par.display then
pen = pen + par.display_widow_penalty
else
@@ -1408,56 +1399,62 @@ local function post_line_break(par)
end
end
if disc_break then
- if cur_p.passive_pen_broken ~= 0 then
- pen = pen + cur_p.passive_pen_broken
+ if current_break.passive_pen_broken ~= 0 then
+ pen = pen + current_break.passive_pen_broken
else
pen = pen + par.broken_penalty
end
end
if pen ~= 0 then
append_to_vlist(par,new_penalty(pen))
- end
+ end
end
- cur_line = cur_line + 1
- cur_p = cur_p.prev_break
- if cur_p and not post_disc_break then
- local phead = par.head
- local r = phead
+ current_line = current_line + 1
+ current_break = current_break.prev_break
+ if current_break and not post_disc_break then
+ local current = head
+ local next = nil
while true do
- q = r.next
- if q == cur_p.cur_break or q.id == glyph_code then
+ next = getnext(current)
+ if next == current_break.cur_break or getid(next) == glyph_code then
break
end
- local id = q.id
- if not (id == whatsit_code and q.subtype == localpar_code) then
- if id < math_code or (id == kern_code and q.subtype ~= userkern_code) then
- break
- end
+ local id = getid(next)
+ local subtype = getsubtype(next)
+ if id == whatsit_code and subtype == localpar_code then
+ -- nothing
+ elseif id < math_code then
+ -- messy criterium
+ break
+elseif id == math_code then
+ -- keep the math node
+ setfield(next,"surround",0)
+ break
+ elseif id == kern_code and (subtype ~= userkern_code and not getattr(next,a_fontkern)) then
+ -- fontkerns and accent kerns as well as otf injections
+ break
end
- r = q
+ current = next
end
- if r ~= phead then
- r.next = nil
- flush_node_list(phead.next)
- phead.next = q
- if q then
- q.prev = phead
+ if current ~= head then
+ setfield(current,"next",nil)
+ flush_nodelist(getnext(head))
+ setfield(head,"next",next)
+ if next then
+ setfield(next,"prev",head)
end
end
end
- until not cur_p
- if cur_line ~= par.best_line then -- or not par.head.next then
- report_parbuilders("line breaking")
- end
- if par.head then -- added
--- flush_node(par.head) -- the localpar_code whatsit
- par.head = nil
end
- cur_line = cur_line - 1
+ -- if current_line ~= par.best_line then
+ -- report_parbuilders("line breaking")
+ -- end
+ par.head = nil -- needs checking
+ current_line = current_line - 1
if trace_basic then
- report_parbuilders("paragraph broken into %a lines",cur_line)
+ report_parbuilders("paragraph broken into %a lines",current_line)
end
- texnest[texnest.ptr].prevgraf = cur_line
+ texnest[texnest.ptr].prevgraf = current_line
end
local function wrap_up(par)
@@ -1475,11 +1472,11 @@ local function wrap_up(par)
par.do_last_line_fit = false
else
local glue = par.final_par_glue
- local spec = copy_node(glue.spec)
- spec.width = spec.width + active_short - active_glue
- spec.stretch = 0
- -- flush_node(glue.spec) -- brrr, when we do this we can get an "invalid id stretch message", maybe dec refcount
- glue.spec = spec
+ local spec = copy_node(getfield(glue,"spec"))
+ setfield(spec,"width",getfield(spec,"width") + active_short - active_glue)
+ setfield(spec,"stretch",0)
+ -- flush_node(getfield(glue,"spec")) -- brrr, when we do this we can get an "invalid id stretch message", maybe dec refcount
+ setfield(glue,"spec",spec)
if trace_lastlinefit then
report_parbuilders("applying last line fit, short %a, glue %p",active_short,active_glue)
end
@@ -1487,8 +1484,8 @@ local function wrap_up(par)
end
-- we have a bunch of glue and and temp nodes not freed
local head = par.head
- if head.id == temp_code then
- par.head = head.next
+ if getid(head) == temp_code then
+ par.head = getnext(head)
flush_node(head)
end
post_line_break(par)
@@ -1498,7 +1495,8 @@ local function wrap_up(par)
end
-- we could do active nodes differently ... table instead of linked list or a list
--- with prev nodes
+-- with prev nodes but it doesn't save much (as we still need to keep indices then
+-- in next)
local function deactivate_node(par,prev_prev_r,prev_r,r,cur_active_width,checked_expansion) -- no need for adjust if disabled
local active = par.active
@@ -1616,18 +1614,26 @@ local function lastlinecrap(shortfall,active_short,active_glue,cur_active_width,
end
end
-local function try_break(pi, break_type, par, first_p, cur_p, checked_expansion)
+-- todo: statistics .. count tries and so
- if pi >= infinite_penalty then
- return -- this breakpoint is inhibited by infinite penalty
- elseif pi <= -infinite_penalty then
- pi = eject_penalty -- this breakpoint will be forced
+local trialcount = 0
+
+local function try_break(pi, break_type, par, first_p, current, checked_expansion)
+
+-- trialcount = trialcount + 1
+-- print(trialcount,pi,break_type,current,nuts.tostring(current))
+
+ if pi >= infinite_penalty then -- this breakpoint is inhibited by infinite penalty
+ local p_active = par.active
+ return p_active, p_active and p_active.next
+ elseif pi <= -infinite_penalty then -- this breakpoint will be forced
+ pi = eject_penalty
end
local prev_prev_r = nil -- a step behind prev_r, if type(prev_r)=delta_code
local prev_r = par.active -- stays a step behind r
local r = nil -- runs through the active list
- local no_break_yet = true -- have we found a feasible break at cur_p?
+ local no_break_yet = true -- have we found a feasible break at current?
local node_r_stays_active = false -- should node r remain in the active list?
local line_width = 0 -- the current line will be justified to this width
local line_number = 0 -- line number of current active node
@@ -1648,6 +1654,10 @@ local function try_break(pi, break_type, par, first_p, cur_p, checked_expansion)
local tracing_paragraphs = par.tracing_paragraphs
-- local par_active = par.active
+ local adapt_width = par.adapt_width
+
+ local parshape = par.par_shape_ptr
+
local cur_active_width = checked_expansion and { -- distance from current active node
size = active_width.size,
stretch = active_width.stretch,
@@ -1702,8 +1712,8 @@ local function try_break(pi, break_type, par, first_p, cur_p, checked_expansion)
break_width.adjust_stretch = 0
break_width.adjust_shrink = 0
end
- if cur_p then
- compute_break_width(par,break_type,cur_p)
+ if current then
+ compute_break_width(par,break_type,current)
end
end
if prev_r.id == delta_code then
@@ -1769,14 +1779,14 @@ local function try_break(pi, break_type, par, first_p, cur_p, checked_expansion)
end
for fit_class = fit_very_loose_class, fit_tight_class do
if minimal_demerits[fit_class] <= minimum_demerits then
- -- insert a new active node from best_place[fit_class] to cur_p
+ -- insert a new active node from best_place[fit_class] to current
par.pass_number = par.pass_number + 1
local prev_break = best_place[fit_class]
local passive = {
id = passive_code,
subtype = nosubtype_code,
next = par.passive,
- cur_break = cur_p,
+ cur_break = current,
serial = par.pass_number,
prev_break = prev_break,
passive_pen_inter = par.internal_pen_inter,
@@ -1811,7 +1821,7 @@ local function try_break(pi, break_type, par, first_p, cur_p, checked_expansion)
prev_r.next = q
prev_r = q
if tracing_paragraphs then
- diagnostics.break_node(par,q,fit_class,break_type,cur_p)
+ diagnostics.break_node(par,q,fit_class,break_type,current)
end
end
minimal_demerits[fit_class] = awful_badness
@@ -1850,7 +1860,7 @@ local function try_break(pi, break_type, par, first_p, cur_p, checked_expansion)
end
end
if r == par.active then
- return
+ return r, r and r.next -- p_active, n_active
end
if line_number > par.easy_line then
old_line_number = max_halfword - 1
@@ -1859,12 +1869,16 @@ local function try_break(pi, break_type, par, first_p, cur_p, checked_expansion)
old_line_number = line_number
if line_number > par.last_special_line then
line_width = par.second_width
- elseif par.par_shape_ptr then
- line_width = par.par_shape_ptr[line_number][2]
+ elseif parshape then
+ line_width = parshape[line_number][2]
else
line_width = par.first_width
end
end
+ if adapt_width then
+ local l, r = adapt_width(par,line_number)
+ line_width = line_width - l - r
+ end
end
local artificial_demerits = false -- has d been forced to zero
local shortfall = line_width - cur_active_width.size - par.internal_right_box_width -- used in badness calculations
@@ -1878,17 +1892,17 @@ local function try_break(pi, break_type, par, first_p, cur_p, checked_expansion)
-- this is quite time consuming
local b = r.break_node
local l = b and b.cur_break or first_p
- local o = cur_p and cur_p.prev
- if cur_p and cur_p.id == disc_code and cur_p.pre then
- o = slide_nodes(cur_p.pre)
+ local o = current and getprev(current)
+ if current and getid(current) == disc_code and getfield(current,"pre") then
+ o = find_tail(getfield(current,"pre"))
else
o = find_protchar_right(l,o)
end
- if o and o.id == glyph_code then
+ if o and getid(o) == glyph_code then
pw, rp = right_pw(o)
shortfall = shortfall + pw
end
- local id = l.id
+ local id = getid(l)
if id == glyph_code then
-- ok ?
elseif id == disc_code and l.post then
@@ -1896,7 +1910,7 @@ local function try_break(pi, break_type, par, first_p, cur_p, checked_expansion)
else
l = find_protchar_left(l)
end
- if l and l.id == glyph_code then
+ if l and getid(l) == glyph_code then
pw, lp = left_pw(l)
shortfall = shortfall + pw
end
@@ -1906,27 +1920,23 @@ local function try_break(pi, break_type, par, first_p, cur_p, checked_expansion)
local margin_kern_shrink = 0
if protrude_chars > 1 then
if lp then
--- margin_kern_stretch, margin_kern_shrink = cal_margin_kern_var(lp)
-local data = expansions[lp.font][lp.char]
-if data then
- margin_kern_stretch, margin_kern_shrink = data.glyphstretch, data.glyphshrink
-end
+ local data = expansions[getfont(lp)][getchar(lp)]
+ if data then
+ margin_kern_stretch, margin_kern_shrink = data.glyphstretch, data.glyphshrink
+ end
end
if rp then
--- local mka, mkb = cal_margin_kern_var(rp)
--- margin_kern_stretch = margin_kern_stretch + mka
--- margin_kern_shrink = margin_kern_shrink + mkb
-local data = expansions[lp.font][lp.char]
-if data then
- margin_kern_stretch = margin_kern_stretch + data.glyphstretch
- margin_kern_shrink = margin_kern_shrink + data.glyphshrink
-end
+ local data = expansions[getfont(lp)][getchar(lp)]
+ if data then
+ margin_kern_stretch = margin_kern_stretch + data.glyphstretch
+ margin_kern_shrink = margin_kern_shrink + data.glyphshrink
+ end
end
end
local total = cur_active_width.adjust_stretch + margin_kern_stretch
if shortfall > 0 and total > 0 then
if total > shortfall then
- shortfall = total / (par.max_stretch_ratio / par.cur_font_step) / 2 -- to be adapted
+ shortfall = total / (par.max_stretch_ratio / par.cur_font_step) / 2
else
shortfall = shortfall - total
end
@@ -1934,7 +1944,7 @@ end
total = cur_active_width.adjust_shrink + margin_kern_shrink
if shortfall < 0 and total > 0 then
if total > - shortfall then
- shortfall = - total / (par.max_shrink_ratio / par.cur_font_step) / 2 -- to be adapted
+ shortfall = - total / (par.max_shrink_ratio / par.cur_font_step) / 2
else
shortfall = shortfall + total
end
@@ -1949,7 +1959,7 @@ end
if cur_active_width.fi ~= 0 or cur_active_width.fil ~= 0 or cur_active_width.fill ~= 0 or cur_active_width.filll ~= 0 then
if not do_last_line_fit then
-- okay
- elseif not cur_p then
+ elseif not current then
found, shortfall, fit_class, g, b = lastlinecrap(shortfall,r.active_short,r.active_glue,cur_active_width,par.fill_width,par.last_line_fit)
else
shortfall = 0
@@ -1984,7 +1994,7 @@ end
end
end
if do_last_line_fit and not found then
- if not cur_p then
+ if not current then
-- g = 0
shortfall = 0
elseif shortfall > 0 then
@@ -2032,7 +2042,7 @@ end
d = d - pi * pi
end
if break_type == hyphenated_code and r.id == hyphenated_code then
- if cur_p then
+ if current then
d = d + par.double_hyphen_demerits
else
d = d + par.final_hyphen_demerits
@@ -2044,9 +2054,9 @@ end
end
end
if tracing_paragraphs then
- diagnostics.feasible_break(par,cur_p,r,b,pi,d,artificial_demerits)
+ diagnostics.feasible_break(par,current,r,b,pi,d,artificial_demerits)
end
- d = d + r.total_demerits -- this is the minimum total demerits from the beginning to cur_p via r
+ d = d + r.total_demerits -- this is the minimum total demerits from the beginning to current via r
if d <= minimal_demerits[fit_class] then
minimal_demerits[fit_class] = d
best_place [fit_class] = r.break_node
@@ -2070,25 +2080,16 @@ end
end
end
-local function kern_break(par, cur_p, first_p, checked_expansion) -- move inline if needed
- local v = cur_p.next
- if par.auto_breaking and v.id == glue_code then
- try_break(0, unhyphenated_code, par, first_p, cur_p, checked_expansion)
- end
- local active_width = par.active_width
- if cur_p.id ~= math_code then
- active_width.size = active_width.size + cur_p.kern
- else
- active_width.size = active_width.size + cur_p.surround
- end
-end
-
-- we can call the normal one for simple box building in the otr so we need
-- frequent enabling/disabling
+local dcolor = { [0] = "red", "green", "blue", "magenta", "cyan", "gray" }
+
local temp_head = new_temp()
function constructors.methods.basic(head,d)
+ head = tonut(head)
+
if trace_basic then
report_parbuilders("starting at %a",head)
end
@@ -2140,24 +2141,27 @@ function constructors.methods.basic(head,d)
par.passive = nil -- = 0
par.printed_node = temp_head -- only when tracing, shared
- par.printed_node.next = head
par.pass_number = 0
- par.auto_breaking = true
+-- par.auto_breaking = true
- local cur_p = head
- local first_p = cur_p
+ setfield(temp_head,"next",head)
+
+ local current = head
+ local first_p = current
+
+ local auto_breaking = true
par.font_in_short_display = 0
- if cur_p and cur_p.id == whatsit_code and cur_p.subtype == localpar_code then
- par.init_internal_left_box = cur_p.box_left
- par.init_internal_left_box_width = cur_p.box_left_width
- par.internal_pen_inter = cur_p.pen_inter
- par.internal_pen_broken = cur_p.pen_broken
+ if current and getid(current) == whatsit_code and getsubtype(current) == localpar_code then
+ par.init_internal_left_box = getfield(current,"box_left")
+ par.init_internal_left_box_width = getfield(current,"box_left_width")
+ par.internal_pen_inter = getfield(current,"pen_inter")
+ par.internal_pen_broken = getfield(current,"pen_broken")
par.internal_left_box = par.init_internal_left_box
par.internal_left_box_width = par.init_internal_left_box_width
- par.internal_right_box = cur_p.box_right
- par.internal_right_box_width = cur_p.box_right_width
+ par.internal_right_box = getfield(current,"box_right")
+ par.internal_right_box_width = getfield(current,"box_right_width")
end
-- all passes are combined in this loop so maybe we should split this into
@@ -2169,23 +2173,34 @@ function constructors.methods.basic(head,d)
local fontexp, lastfont -- we can pass fontexp to calculate width if needed
- while cur_p and par.active.next ~= par.active do
- while cur_p and cur_p.id == glyph_code do
+ -- i flattened the inner loop over glyphs .. it looks nicer and the extra p_active ~= n_active
+ -- test is fast enough (and try_break now returns the updated values); the kern helper has been
+ -- inlined as it did a double check on id so in fact we had hardly any code to share
+
+ local p_active = par.active
+ local n_active = p_active and p_active.next
+ local second_pass = par.second_pass
+
+ trialcount = 0
+
+ while current and p_active ~= n_active do
+ local id = getid(current)
+ if id == glyph_code then
if is_rotated[par.line_break_dir] then
- active_width.size = active_width.size + cur_p.height + cur_p.depth
+ active_width.size = active_width.size + getfield(current,"height") + getfield(current,"depth")
else
- active_width.size = active_width.size + cur_p.width
+ active_width.size = active_width.size + getfield(current,"width")
end
if checked_expansion then
- local data= checked_expansion[cur_p.font]
+ local currentfont = getfont(current)
+ local data= checked_expansion[currentfont]
if data then
- local currentfont = cur_p.font
if currentfont ~= lastfont then
fontexps = checked_expansion[currentfont] -- a bit redundant for the par line packer
lastfont = currentfont
end
if fontexps then
- local expansion = fontexps[cur_p.char]
+ local expansion = fontexps[getchar(current)]
if expansion then
active_width.adjust_stretch = active_width.adjust_stretch + expansion.glyphstretch
active_width.adjust_shrink = active_width.adjust_shrink + expansion.glyphshrink
@@ -2193,51 +2208,45 @@ function constructors.methods.basic(head,d)
end
end
end
- cur_p = cur_p.next
- end
- if not cur_p then -- TODO
- report_parbuilders("problems with linebreak_tail")
- os.exit()
- end
- local id = cur_p.id
- if id == hlist_code or id == vlist_code then
- if is_parallel[cur_p.dir][par.line_break_dir] then
- active_width.size = active_width.size + cur_p.width
+ elseif id == hlist_code or id == vlist_code then
+ if is_parallel[getfield(current,"dir")][par.line_break_dir] then
+ active_width.size = active_width.size + getfield(current,"width")
else
- active_width.size = active_width.size + cur_p.depth + cur_p.height
+ active_width.size = active_width.size + getfield(current,"depth") + getfield(current,"height")
end
elseif id == glue_code then
- if par.auto_breaking then
- local prev_p = cur_p.prev
+-- if par.auto_breaking then
+ if auto_breaking then
+ local prev_p = getprev(current)
if prev_p and prev_p ~= temp_head then
- local id = prev_p.id
+ local id = getid(prev_p)
if id == glyph_code or
- (id < math_code and (id ~= whatsit_code or prev_p.subtype ~= dir_code)) or -- was: precedes_break(prev_p)
- (id == kern_code and prev_p.subtype ~= userkern_code) then
- try_break(0, unhyphenated_code, par, first_p, cur_p, checked_expansion)
+ (id < math_code and (id ~= whatsit_code or getsubtype(prev_p) ~= dir_code)) or -- was: precedes_break(prev_p)
+ (id == kern_code and getsubtype(prev_p) ~= userkern_code) then
+ p_active, n_active = try_break(0, unhyphenated_code, par, first_p, current, checked_expansion)
end
end
end
- local spec = check_shrinkage(par,cur_p.spec)
- local order = stretch_orders[spec.stretch_order]
- cur_p.spec = spec
- active_width.size = active_width.size + spec.width
- active_width[order] = active_width[order] + spec.stretch
- active_width.shrink = active_width.shrink + spec.shrink
+ local spec = check_shrinkage(par,getfield(current,"spec"))
+ local order = stretch_orders[getfield(spec,"stretch_order")]
+ setfield(current,"spec",spec)
+ active_width.size = active_width.size + getfield(spec,"width")
+ active_width[order] = active_width[order] + getfield(spec,"stretch")
+ active_width.shrink = active_width.shrink + getfield(spec,"shrink")
elseif id == disc_code then
- local subtype = cur_p.subtype
- if subtype ~= second_disc_code then -- are there still second_disc_code in luatex
+ local subtype = getsubtype(current)
+ if subtype ~= second_disc_code then
local line_break_dir = par.line_break_dir
- if par.second_pass then -- todo: make second pass local
+ if second_pass or subtype <= automatic_disc_code then
local actual_pen = subtype == automatic_disc_code and par.ex_hyphen_penalty or par.hyphen_penalty
- local pre = cur_p.pre
+ local pre = getfield(current,"pre")
if not pre then -- trivial pre-break
disc_width.size = 0
if checked_expansion then
disc_width.adjust_stretch = 0
disc_width.adjust_shrink = 0
end
- try_break(actual_pen, hyphenated_code, par, first_p, cur_p, checked_expansion)
+ p_active, n_active = try_break(actual_pen, hyphenated_code, par, first_p, current, checked_expansion)
else
local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,pre)
disc_width.size = size
@@ -2251,13 +2260,13 @@ function constructors.methods.basic(head,d)
-- disc_width.adjust_stretch = 0
-- disc_width.adjust_shrink = 0
end
- try_break(actual_pen, hyphenated_code, par, first_p, cur_p, checked_expansion)
+ p_active, n_active = try_break(actual_pen, hyphenated_code, par, first_p, current, checked_expansion)
if subtype == first_disc_code then
- local cur_p_next = cur_p.next
- if cur_p_next.id ~= disc_code or cur_p_next.subtype ~= second_disc_code then
+ local cur_p_next = getnext(current)
+ if getid(cur_p_next) ~= disc_code or getsubtype(cur_p_next) ~= second_disc_code then
report_parbuilders("unsupported disc at location %a",1)
else
- local pre = cur_p_next.pre
+ local pre = getfield(cur_p_next,"pre")
if pre then
local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,pre)
disc_width.size = disc_width.size + size
@@ -2265,16 +2274,16 @@ function constructors.methods.basic(head,d)
disc_width.adjust_stretch = disc_width.adjust_stretch + adjust_stretch
disc_width.adjust_shrink = disc_width.adjust_shrink + adjust_shrink
end
- try_break(actual_pen, hyphenated_code, par, first_p, cur_p_next, checked_expansion)
+ p_active, n_active = try_break(actual_pen, hyphenated_code, par, first_p, cur_p_next, checked_expansion)
--
-- I will look into this some day ... comment in linebreak.w says that this fails,
-- maybe this is what Taco means with his comment in the luatex manual.
--
-- do_one_seven_eight(sub_disc_width_from_active_width);
-- do_one_seven_eight(reset_disc_width);
- -- s = vlink_no_break(vlink(cur_p));
+ -- s = vlink_no_break(vlink(current));
-- add_to_widths(s, line_break_dir, pdf_adjust_spacing,disc_width);
- -- ext_try_break(...,first_p,vlink(cur_p));
+ -- ext_try_break(...,first_p,vlink(current));
--
else
report_parbuilders("unsupported disc at location %a",2)
@@ -2289,7 +2298,7 @@ function constructors.methods.basic(head,d)
end
end
end
- local replace = cur_p.replace
+ local replace = getfield(current,"replace")
if replace then
local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,replace)
active_width.size = active_width.size + size
@@ -2300,14 +2309,20 @@ function constructors.methods.basic(head,d)
end
end
elseif id == kern_code then
- if cur_p.subtype == userkern_code then
- kern_break(par,cur_p,first_p, checked_expansion)
+ if getsubtype(current) == userkern_code then
+ local v = getnext(current)
+-- if par.auto_breaking and getid(v) == glue_code then
+ if auto_breaking and getid(v) == glue_code then
+ p_active, n_active = try_break(0, unhyphenated_code, par, first_p, current, checked_expansion)
+ end
+ local active_width = par.active_width
+ active_width.size = active_width.size + getfield(current,"kern")
else
- local d = cur_p.kern
- of d ~= 0 then
- active_width.size = active_width.size + d
- if checked_expansion and expand_kerns and (cur_p.subtype == kerning_code or cur_p[a_fontkern]) then
- local stretch, shrink = kern_stretch_shrink(cur_p,d)
+ local kern = getfield(current,"kern")
+ if kern ~= 0 then
+ active_width.size = active_width.size + kern
+ if checked_expansion and expand_kerns and (getsubtype(current) == kerning_code or getattr(current,a_fontkern)) then
+ local stretch, shrink = kern_stretch_shrink(current,kern)
if expand_kerns == "stretch" then
active_width.adjust_stretch = active_width.adjust_stretch + stretch
elseif expand_kerns == "shrink" then
@@ -2320,40 +2335,47 @@ function constructors.methods.basic(head,d)
end
end
elseif id == math_code then
- par.auto_breaking = cur_p.subtype == endmath_code
- kern_break(par,cur_p, first_p, checked_expansion)
+-- par.auto_breaking = getsubtype(current) == endmath_code
+ auto_breaking = getsubtype(current) == endmath_code
+ local v = getnext(current)
+-- if par.auto_breaking and getid(v) == glue_code then
+ if auto_breaking and getid(v) == glue_code then
+ p_active, n_active = try_break(0, unhyphenated_code, par, first_p, current, checked_expansion)
+ end
+ local active_width = par.active_width
+ active_width.size = active_width.size + getfield(current,"surround")
elseif id == rule_code then
- active_width.size = active_width.size + cur_p.width
+ active_width.size = active_width.size + getfield(current,"width")
elseif id == penalty_code then
- try_break(cur_p.penalty, unhyphenated_code, par, first_p, cur_p, checked_expansion)
+ p_active, n_active = try_break(getfield(current,"penalty"), unhyphenated_code, par, first_p, current, checked_expansion)
elseif id == whatsit_code then
- local subtype = cur_p.subtype
+ local subtype = getsubtype(current)
if subtype == localpar_code then
- par.internal_pen_inter = cur_p.pen_inter
- par.internal_pen_broken = cur_p.pen_broken
- par.internal_left_box = cur_p.box_left
- par.internal_left_box_width = cur_p.box_left_width
- par.internal_right_box = cur_p.box_right
- par.internal_right_box_width = cur_p.box_right_width
+ par.internal_pen_inter = getfield(current,"pen_inter")
+ par.internal_pen_broken = getfield(current,"pen_broken")
+ par.internal_left_box = getfield(current,"box_left")
+ par.internal_left_box_width = getfield(current,"box_left_width")
+ par.internal_right_box = getfield(current,"box_right")
+ par.internal_right_box_width = getfield(current,"box_right_width")
elseif subtype == dir_code then
par.line_break_dir = checked_line_dir(dirstack) or par.line_break_dir
else
local get_width = get_whatsit_width[subtype]
if get_width then
- active_width.size = active_width.size + get_width(cur_p)
+ active_width.size = active_width.size + get_width(current,par.line_break_dir)
end
end
- elseif id == mark_code or id == ins_code or id == adjust_code then
- -- skip
- else
- report_parbuilders("node of type %a found in paragraph",type(id))
+ elseif trace_unsupported then
+ if id == mark_code or id == ins_code or id == adjust_code then
+ -- skip
+ else
+ report_parbuilders("node of type %a found in paragraph",type(id))
+ end
end
- cur_p = cur_p.next
+ current = getnext(current)
end
- if not cur_p then
- try_break(eject_penalty, hyphenated_code, par, first_p, cur_p, checked_expansion)
- local p_active = par.active
- local n_active = p_active.next
+ if not current then
+ local p_active, n_active = try_break(eject_penalty, hyphenated_code, par, first_p, current, checked_expansion)
if n_active ~= p_active then
local r = n_active
par.fewest_demerits = awful_badness
@@ -2367,7 +2389,7 @@ function constructors.methods.basic(head,d)
par.best_line = par.best_bet.line_number
local asked_looseness = par.looseness
if asked_looseness == 0 then
- return wrap_up(par)
+ return tonode(wrap_up(par))
end
local r = n_active
local actual_looseness = 0
@@ -2387,30 +2409,30 @@ function constructors.methods.basic(head,d)
end
end
r = r.next
- until r == p_active -- weird, loop list?
+ until r == p_active
par.best_line = par.best_bet.line_number
if actual_looseness == asked_looseness or par.final_pass then
- return wrap_up(par)
+ return tonode(wrap_up(par))
end
end
end
reset_meta(par) -- clean up the memory by removing the break nodes
- if not par.second_pass then
+ if not second_pass then
if tracing_paragraphs then
diagnostics.current_pass(par,"secondpass")
end
- par.threshold = par.tolerance
+ par.threshold = par.tolerance
par.second_pass = true
- par.final_pass = par.emergency_stretch <= 0
+ par.final_pass = par.emergency_stretch <= 0
else
if tracing_paragraphs then
diagnostics.current_pass(par,"emergencypass")
end
par.background.stretch = par.background.stretch + par.emergency_stretch
- par.final_pass = true
+ par.final_pass = true
end
end
- return wrap_up(par)
+ return tonode(wrap_up(par))
end
-- standard tex logging .. will be adapted ..
@@ -2435,48 +2457,58 @@ function diagnostics.current_pass(par,what)
write_nl("log",format("@%s",what))
end
-local function short_display(a,font_in_short_display)
+local verbose = false -- true
+
+local function short_display(target,a,font_in_short_display)
while a do
- local id = a.id
+ local id = getid(a)
if id == glyph_code then
- local font = a.font
+ local font = getfont(a)
if font ~= font_in_short_display then
- write("log",tex.fontidentifier(font) .. ' ')
+ write(target,tex.fontidentifier(font) .. ' ')
font_in_short_display = font
end
- if a.subtype == ligature_code then
- font_in_short_display = short_display(a.components,font_in_short_display)
+ if getsubtype(a) == ligature_code then
+ font_in_short_display = short_display(target,getfield(a,"components"),font_in_short_display)
else
- write("log",utfchar(a.char))
+ write(target,utfchar(getchar(a)))
end
--- elseif id == rule_code then
--- write("log","|")
--- elseif id == glue_code then
--- if a.spec.writable then
--- write("log"," ")
--- end
--- elseif id == math_code then
--- write("log","$")
elseif id == disc_code then
- font_in_short_display = short_display(a.pre,font_in_short_display)
- font_in_short_display = short_display(a.post,font_in_short_display)
- else -- no explicit checking
- write("log",format("[%s]",nodecodes[id]))
+ font_in_short_display = short_display(target,getfield(a,"pre"),font_in_short_display)
+ font_in_short_display = short_display(target,getfield(a,"post"),font_in_short_display)
+ elseif verbose then
+ write(target,format("[%s]",nodecodes[id]))
+ elseif id == rule_code then
+ write(target,"|")
+ elseif id == glue_code then
+ if getfield(getfield(a,"spec"),"writable") then
+ write(target," ")
+ end
+ elseif id == kern_code and (getsubtype(a) == userkern_code or getattr(a,a_fontkern)) then
+ if verbose then
+ write(target,"[|]")
+ else
+ write(target,"")
+ end
+ elseif id == math_code then
+ write(target,"$")
+ else
+ write(target,"[]")
end
- a = a.next
+ a = getnext(a)
end
return font_in_short_display
end
diagnostics.short_display = short_display
-function diagnostics.break_node(par, q, fit_class, break_type, cur_p) -- %d ?
+function diagnostics.break_node(par, q, fit_class, break_type, current) -- %d ?
local passive = par.passive
local typ_ind = break_type == hyphenated_code and '-' or ""
if par.do_last_line_fit then
local s = number.toscaled(q.active_short)
local g = number.toscaled(q.active_glue)
- if cur_p then
+ if current then
write_nl("log",format("@@%d: line %d.%d%s t=%s s=%s g=%s",
passive.serial or 0,q.line_number-1,fit_class,typ_ind,q.total_demerits,s,g))
else
@@ -2494,26 +2526,26 @@ function diagnostics.break_node(par, q, fit_class, break_type, cur_p) -- %d ?
end
end
-function diagnostics.feasible_break(par, cur_p, r, b, pi, d, artificial_demerits)
+function diagnostics.feasible_break(par, current, r, b, pi, d, artificial_demerits)
local printed_node = par.printed_node
- if printed_node ~= cur_p then
+ if printed_node ~= current then
write_nl("log","")
- if not cur_p then
- par.font_in_short_display = short_display(printed_node.next,par.font_in_short_display)
+ if not current then
+ par.font_in_short_display = short_display("log",getnext(printed_node),par.font_in_short_display)
else
- local save_link = cur_p.next
- cur_p.next = nil
+ local save_link = getnext(current)
+ setfield(current,"next",nil)
write_nl("log","")
- par.font_in_short_display = short_display(printed_node.next,par.font_in_short_display)
- cur_p.next = save_link
+ par.font_in_short_display = short_display("log",getnext(printed_node),par.font_in_short_display)
+ setfield(current,"next",save_link)
end
- par.printed_node = cur_p
+ par.printed_node = current
end
write_nl("log","@")
- if not cur_p then
+ if not current then
write_esc("par")
else
- local id = cur_p.id
+ local id = getid(current)
if id == glue_code then
-- print nothing
elseif id == penalty_code then
@@ -2562,49 +2594,54 @@ end)
-- with the glyph.
local function glyph_width_height_depth(curdir,pdir,p)
+ local wd = getfield(p,"width")
+ local ht = getfield(p,"height")
+ local dp = getfield(p,"depth")
if is_rotated[curdir] then
if is_parallel[curdir][pdir] then
- local half = (p.height + p.depth) / 2
- return p.width, half, half
+ local half = (ht + dp) / 2
+ return wd, half, half
else
- local half = p.width / 2
- return p.height + p.depth, half, half
+ local half = wd / 2
+ return ht + dp, half, half
end
elseif is_rotated[pdir] then
if is_parallel[curdir][pdir] then
- local half = (p.height + p.depth) / 2
- return p.width, half, half
+ local half = (ht + dp) / 2
+ return wd, half, half
else
- return p.height + p.depth, p.width, 0 -- weird
+ return ht + dp, wd, 0 -- weird
end
else
if glyphdir_is_equal[curdir][pdir] then
- return p.width, p.height, p.depth
+ return wd, ht, dp
elseif is_opposite[curdir][pdir] then
- return p.width, p.depth, p.height
+ return wd, dp, ht
else -- can this happen?
- return p.height + p.depth, p.width, 0 -- weird
+ return ht + dp, wd, 0
end
end
end
local function pack_width_height_depth(curdir,pdir,p)
+ local wd = getfield(p,"width")
+ local ht = getfield(p,"height")
+ local dp = getfield(p,"depth")
if is_rotated[curdir] then
if is_parallel[curdir][pdir] then
- local half = (p.height + p.depth) / 2
- return p.width, half, half
+ local half = (ht + dp) / 2
+ return wd, half, half
else -- can this happen?
- local half = p.width / 2
- return p.height + p.depth, half, half
+ local half = wd / 2
+ return ht + dp, half, half
end
else
if pardir_is_equal[curdir][pdir] then
- return p.width, p.height, p.depth
+ return wd, ht, dp
elseif is_opposite[curdir][pdir] then
- return p.width, p.depth, p.height
+ return wd, dp, ht
else -- weird dimensions, can this happen?
- -- return p.width, p.depth, p.height
- return p.height + p.depth, p.width, 0
+ return ht + dp, wd, 0
end
end
end
@@ -2622,17 +2659,17 @@ end
--
-- local hlist = new_node("hlist")
--
--- hlist.list = head
--- hlist.dir = direction or tex.textdir
--- hlist.width = width
--- hlist.height = height
--- hlist.depth = depth
+-- setfield(hlist,"list",head)
+-- setfield(hlist,"dir",direction or tex.textdir)
+-- setfield(hlist,"width",width)
+-- setfield(hlist,"height",height)
+-- setfield(hlist,"depth",depth)
--
-- if delta == 0 then
--
--- hlist.glue_sign = 0
--- hlist.glue_order = 0
--- hlist.glue_set = 0
+-- setfield(hlist,"glue_sign",0)
+-- setfield(hlist,"glue_order",0)
+-- setfield(hlist,"glue_set",0)
--
-- else
--
@@ -2648,16 +2685,15 @@ end
-- else
-- local stretch = analysis.stretch
-- if stretch ~= 0 then
--- hlist.glue_sign = 1 -- stretch
--- hlist.glue_order = order
--- hlist.glue_set = delta/stretch
+-- setfield(hlist,"glue_sign",1) -- stretch
+-- setfield(hlist,"glue_order",order)
+-- setfield(hlist,"glue_set",delta/stretch)
-- else
--- hlist.glue_sign = 0 -- nothing
--- hlist.glue_order = order
--- hlist.glue_set = 0
+-- setfield(hlist,"glue_sign",0) -- nothing
+-- setfield(hlist,"glue_order",order)
+-- setfield(hlist,"glue_set",0)
-- end
-- end
--- print("stretch",hlist.glue_sign,hlist.glue_order,hlist.glue_set)
--
-- else
--
@@ -2666,16 +2702,15 @@ end
-- else
-- local shrink = analysis.shrink
-- if shrink ~= 0 then
--- hlist.glue_sign = 2 -- shrink
--- hlist.glue_order = order
--- hlist.glue_set = - delta/shrink
+-- setfield(hlist,"glue_sign",2) -- shrink
+-- setfield(hlist,"glue_order",order)
+-- setfield(hlist,"glue_set",-delta/stretch)
-- else
--- hlist.glue_sign = 0 -- nothing
--- hlist.glue_order = order
--- hlist.glue_set = 0
+-- setfield(hlist,"glue_sign",0) -- nothing
+-- setfield(hlist,"glue_order",order)
+-- setfield(hlist,"glue_set",0)
-- end
-- end
--- print("shrink",hlist.glue_sign,hlist.glue_order,hlist.glue_set)
--
-- end
--
@@ -2689,7 +2724,7 @@ end
-- end
-- local current = head
-- while current do
--- local id = current.id
+-- local id = getid(current)
-- if id == glyph_code then
-- local stretch, shrink = char_stretch_shrink(current) -- get only one
-- if stretch then
@@ -2699,12 +2734,12 @@ end
-- current.expansion_factor = font_expand_ratio * stretch
-- end
-- elseif id == kern_code then
--- local kern = current.kern
--- if kern ~= 0 and current.subtype == kerning_code then
--- current.kern = font_expand_ratio * current.kern
+-- local kern = getfield(current,"kern")
+-- if kern ~= 0 and getsubtype(current) == kerning_code then
+-- setfield(current,"kern",font_expand_ratio * kern)
-- end
-- end
--- current = current.next
+-- current = getnext(current)
-- end
-- elseif font_expand_ratio < 0 then
-- if font_expand_ratio < -1000 then
@@ -2712,7 +2747,7 @@ end
-- end
-- local current = head
-- while current do
--- local id = current.id
+-- local id = getid(current)
-- if id == glyph_code then
-- local stretch, shrink = char_stretch_shrink(current) -- get only one
-- if shrink then
@@ -2722,26 +2757,31 @@ end
-- current.expansion_factor = font_expand_ratio * shrink
-- end
-- elseif id == kern_code then
--- local kern = current.kern
--- if kern ~= 0 and current.subtype == kerning_code then
--- current.kern = font_expand_ratio * current.kern
+-- local kern = getfield(current,"kern")
+-- if kern ~= 0 and getsubtype(current) == kerning_code then
+-- setfield(current,"kern",font_expand_ratio * kern)
-- end
-- end
--- current = current.next
+-- current = getnext(current)
-- end
-- end
-- return hlist, 0
-- end
-local function hpack(head,width,method,direction) -- fast version when head = nil
+local function hpack(head,width,method,direction,firstline,line) -- fast version when head = nil
-- we can pass the adjust_width and adjust_height so that we don't need to recalculate them but
- -- with the glue mess it's less trivial as we lack detail
+ -- with the glue mess it's less trivial as we lack detail .. challenge
local hlist = new_node("hlist")
+ setfield(hlist,"dir",direction)
+
if head == nil then
+ setfield(hlist,"width",width)
return hlist, 0
+ else
+ setfield(hlist,"list",head)
end
local cal_expand_ratio = method == "cal_expand_ratio" or method == "subst_ex_font"
@@ -2757,8 +2797,6 @@ local function hpack(head,width,method,direction) -- fast version when head = ni
local font_shrink = 0
local font_expand_ratio = 0
local last_badness = 0
- local disc_stack = { }
- local disc_level = 0
local expansion_stack = cal_expand_ratio and { } -- todo: optionally pass this
local expansion_index = 0
local total_stretch = { [0] = 0, 0, 0, 0, 0 }
@@ -2768,11 +2806,8 @@ local function hpack(head,width,method,direction) -- fast version when head = ni
local adjust_head = texlists.adjust_head
local pre_adjust_head = texlists.pre_adjust_head
- local adjust_tail = adjust_head and slide_nodes(adjust_head)
- local pre_adjust_tail = pre_adjust_head and slide_nodes(pre_adjust_head)
-
- hlist.list = head
- hlist.dir = hpack_dir
+ local adjust_tail = adjust_head and slide_nodelist(adjust_head) -- todo: find_tail
+ local pre_adjust_tail = pre_adjust_head and slide_nodelist(pre_adjust_head) -- todo: find_tail
new_dir_stack(hpack_dir)
@@ -2787,173 +2822,72 @@ local function hpack(head,width,method,direction) -- fast version when head = ni
local fontexps, lastfont
- local current = head
+ local function process(current) -- called nested in disc replace
- while current do
- local id = current.id
- if id == glyph_code then
- if cal_expand_ratio then
- local currentfont = current.font
- if currentfont ~= lastfont then
- fontexps = checked_expansion[currentfont] -- a bit redundant for the par line packer
- lastfont = currentfont
- end
- if fontexps then
- local expansion = fontexps[current.char]
- if expansion then
- font_stretch = font_stretch + expansion.glyphstretch
- font_shrink = font_shrink + expansion.glyphshrink
- expansion_index = expansion_index + 1
- expansion_stack[expansion_index] = current
+ while current do
+ local id = getid(current)
+ if id == glyph_code then
+ if cal_expand_ratio then
+ local currentfont = getfont(current)
+ if currentfont ~= lastfont then
+ fontexps = checked_expansion[currentfont] -- a bit redundant for the par line packer
+ lastfont = currentfont
end
- end
- end
- -- use inline if no expansion
- local wd, ht, dp = glyph_width_height_depth(hpack_dir,"TLT",current) -- was TRT ?
- natural = natural + wd
- if ht > height then
- height = ht
- end
- if dp > depth then
- depth = dp
- end
- current = current.next
- elseif id == kern_code then
- local kern = current.kern
- if kern == 0 then
- -- no kern
- else
- if cal_expand_ratio and expand_kerns and current.subtype == kerning_code or current[a_fontkern] then -- check p.kern
- local stretch, shrink = kern_stretch_shrink(current,kern)
- if expand_kerns == "stretch" then
- font_stretch = font_stretch + stretch
- elseif expand_kerns == "shrink" then
- font_shrink = font_shrink + shrink
- else
- font_stretch = font_stretch + stretch
- font_shrink = font_shrink + shrink
+ if fontexps then
+ local expansion = fontexps[getchar(current)]
+ if expansion then
+ font_stretch = font_stretch + expansion.glyphstretch
+ font_shrink = font_shrink + expansion.glyphshrink
+ expansion_index = expansion_index + 1
+ expansion_stack[expansion_index] = current
+ end
end
- expansion_index = expansion_index + 1
- expansion_stack[expansion_index] = current
end
- natural = natural + kern
- end
- current = current.next
- elseif id == disc_code then
- if current.subtype ~= second_disc_code then
- -- we follow the end of line disc chain
- local replace = current.replace
- if replace then
- disc_level = disc_level + 1
- disc_stack[disc_level] = current.next
- current = replace
- else
- current = current.next
- end
- else
- current = current.next
- end
- elseif id == glue_code then
- local spec = current.spec
- natural = natural + spec.width
- local op = spec.stretch_order
- local om = spec.shrink_order
- total_stretch[op] = total_stretch[op] + spec.stretch
- total_shrink [om] = total_shrink [om] + spec.shrink
- if current.subtype >= leaders_code then
- local leader = current.leader
- local ht = leader.height
- local dp = leader.depth
+ -- use inline
+ local wd, ht, dp = glyph_width_height_depth(hpack_dir,"TLT",current) -- was TRT ?
+ natural = natural + wd
if ht > height then
height = ht
end
if dp > depth then
depth = dp
end
- end
- current = current.next
- elseif id == hlist_code or id == vlist_code then
- local sh = current.shift
- local wd, ht, dp = pack_width_height_depth(hpack_dir,current.dir or hpack_dir,current) -- added: or pack_dir
- local hs, ds = ht - sh, dp + sh
- natural = natural + wd
- if hs > height then
- height = hs
- end
- if ds > depth then
- depth = ds
- end
- current = current.next
- elseif id == rule_code then
- local wd = current.width
- local ht = current.height
- local dp = current.depth
- natural = natural + wd
- if ht > height then
- height = ht
- end
- if dp > depth then
- depth = dp
- end
- current = current.next
- elseif id == math_code then
- natural = natural + current.surround
- current = current.next
- elseif id == unset_code then
- local wd = current.width
- local ht = current.height
- local dp = current.depth
- local sh = current.shift
- local hs = ht - sh
- local ds = dp + sh
- natural = natural + wd
- if hs > height then
- height = hs
- end
- if ds > depth then
- depth = ds
- end
- current = current.next
- elseif id == ins_code or id == mark_code then
- local prev = current.prev
- local next = current.next
- if adjust_tail then -- todo
- if next then
- next.prev = prev
+ elseif id == kern_code then
+ local kern = getfield(current,"kern")
+ if kern == 0 then
+ -- no kern
+ elseif getsubtype(current) == kerning_code then -- check getfield(p,"kern")
+ if cal_expand_ratio then
+ local stretch, shrink = kern_stretch_shrink(current,kern)
+ font_stretch = font_stretch + stretch
+ font_shrink = font_shrink + shrink
+ expansion_index = expansion_index + 1
+ expansion_stack[expansion_index] = current
+ end
+ natural = natural + kern
+ else
+ natural = natural + kern
end
- if prev then
- prev.next = next
+ elseif id == disc_code then
+ local subtype = getsubtype(current)
+ if subtype ~= second_disc_code then
+ -- todo : local stretch, shrink = char_stretch_shrink(s)
+ local replace = getfield(current,"replace")
+ if replace then
+ process(replace)
+ end
end
- current.prev = adjust_tail
- current.next = nil
- adjust_tail.next = current
- adjust_tail = current
- else
- adjust_head = current
- adjust_tail = current
- current.prev = nil
- current.next = nil
- end
- current = next
- elseif id == adjust_code then
- local list = current.list
- if adjust_tail then
- adjust_tail.next = list
- adjust_tail = slide_nodes(list)
- else
- adjust_head = list
- adjust_tail = slide_nodes(list)
- end
- current = current.next
- elseif id == whatsit_code then
- local subtype = current.subtype
- if subtype == dir_code then
- hpack_dir = checked_line_dir(stack,current) or hpack_dir
- else
- local get_dimensions = get_whatsit_dimensions[subtype]
- if get_dimensions then
- local wd, ht, dp = get_dimensions(current)
- natural = natural + wd
+ elseif id == glue_code then
+ local spec = getfield(current,"spec")
+ natural = natural + getfield(spec,"width")
+ local op = getfield(spec,"stretch_order")
+ local om = getfield(spec,"shrink_order")
+ total_stretch[op] = total_stretch[op] + getfield(spec,"stretch")
+ total_shrink [om] = total_shrink [om] + getfield(spec,"shrink")
+ if getsubtype(current) >= leaders_code then
+ local leader = getleader(current)
+ local ht = getfield(leader,"height")
+ local dp = getfield(leader,"depth")
if ht > height then
height = ht
end
@@ -2961,51 +2895,132 @@ local function hpack(head,width,method,direction) -- fast version when head = ni
depth = dp
end
end
+ elseif id == hlist_code or id == vlist_code then
+ local sh = getfield(current,"shift")
+ local wd, ht, dp = pack_width_height_depth(hpack_dir,getfield(current,"dir") or hpack_dir,current) -- added: or pack_dir
+ local hs, ds = ht - sh, dp + sh
+ natural = natural + wd
+ if hs > height then
+ height = hs
+ end
+ if ds > depth then
+ depth = ds
+ end
+ elseif id == rule_code then
+ local wd = getfield(current,"width")
+ local ht = getfield(current,"height")
+ local dp = getfield(current,"depth")
+ natural = natural + wd
+ if ht > height then
+ height = ht
+ end
+ if dp > depth then
+ depth = dp
+ end
+ elseif id == math_code then
+ natural = natural + getfield(current,"surround")
+ elseif id == unset_code then
+ local wd = getfield(current,"width")
+ local ht = getfield(current,"height")
+ local dp = getfield(current,"depth")
+ local sh = getfield(current,"shift")
+ local hs = ht - sh
+ local ds = dp + sh
+ natural = natural + wd
+ if hs > height then
+ height = hs
+ end
+ if ds > depth then
+ depth = ds
+ end
+ elseif id == ins_code or id == mark_code then
+ local prev = getprev(current)
+ local next = getnext(current)
+ if adjust_tail then -- todo
+ if next then
+ setfield(next,"prev",prev)
+ end
+ if prev then
+ setfield(prev,"next",next)
+ end
+ setfield(current,"prev",adjust_tail)
+ setfield(current,"next",nil)
+ adjust_setfield(tail,"next",current)
+ adjust_tail = current
+ else
+ adjust_head = current
+ adjust_tail = current
+ setfield(current,"prev",nil)
+ setfield(current,"next",nil)
+ end
+ elseif id == adjust_code then
+ local list = getlist(current)
+ if adjust_tail then
+ adjust_setfield(tail,"next",list)
+ else
+ adjust_head = list
+ end
+ adjust_tail = slide_nodelist(list) -- find_tail(list)
+ elseif id == whatsit_code then
+ local subtype = getsubtype(current)
+ if subtype == dir_code then
+ hpack_dir = checked_line_dir(stack,current) or hpack_dir
+ else
+ local get_dimensions = get_whatsit_dimensions[subtype]
+ if get_dimensions then
+ local wd, ht, dp = get_dimensions(current,hpack_dir)
+ natural = natural + wd
+ if ht > height then
+ height = ht
+ end
+ if dp > depth then
+ depth = dp
+ end
+ end
+ end
+ elseif id == marginkern_code then
+ local width = getfield(current,"width")
+ if cal_expand_ratio then
+ -- is this ok?
+ local glyph = getfield(current,"glyph")
+ local char_pw = getsubtype(current) == leftmargin_code and left_pw or right_pw
+ font_stretch = font_stretch - width - char_pw(glyph)
+ font_shrink = font_shrink - width - char_pw(glyph)
+ expansion_index = expansion_index + 1
+ expansion_stack[expansion_index] = glyph
+ end
+ natural = natural + width
end
- current = current.next
- elseif id == marginkern_code then
- if cal_expand_ratio then
- local glyph = current.glyph
- local char_pw = current.subtype == leftmargin_code and left_pw or right_pw
- font_stretch = font_stretch - current.width - char_pw(glyph)
- font_shrink = font_shrink - current.width - char_pw(glyph)
- expansion_index = expansion_index + 1
- expansion_stack[expansion_index] = glyph
- end
- natural = natural + current.width
- current = current.next
- else
- current = current.next
- end
- if not current and disc_level > 0 then
- current = disc_stack[disc_level]
- disc_level = disc_level - 1
+ current = getnext(current)
end
+
end
+
+ process(head)
+
if adjust_tail then
adjust_tail.next = nil -- todo
end
if pre_adjust_tail then
pre_adjust_tail.next = nil -- todo
end
- if mode == "additional" then
+ if method == "additional" then
width = width + natural
end
- hlist.width = width
- hlist.height = height
- hlist.depth = depth
+ setfield(hlist,"width",width)
+ setfield(hlist,"height",height)
+ setfield(hlist,"depth",depth)
local delta = width - natural
if delta == 0 then
- hlist.glue_sign = 0
- hlist.glue_order = 0
- hlist.glue_set = 0
+ setfield(hlist,"glue_sign",0)
+ setfield(hlist,"glue_order",0)
+ setfield(hlist,"glue_set",0)
elseif delta > 0 then
-- natural width smaller than requested width
local order = (total_stretch[4] ~= 0 and 4 or total_stretch[3] ~= 0 and 3) or
(total_stretch[2] ~= 0 and 2 or total_stretch[1] ~= 0 and 1) or 0
--- local correction = 0
if cal_expand_ratio and order == 0 and font_stretch > 0 then -- check sign of font_stretch
font_expand_ratio = delta/font_stretch
@@ -3017,41 +3032,38 @@ local function hpack(head,width,method,direction) -- fast version when head = ni
for i=1,expansion_index do
local g = expansion_stack[i]
local e
- if g.id == glyph_code then
- local currentfont = g.font
+ if getid(g) == glyph_code then
+ local currentfont = getfont(g)
if currentfont ~= lastfont then
fontexps = expansions[currentfont]
lastfont = currentfont
end
- local data = fontexps[g.char]
+ local data = fontexps[getchar(g)]
if trace_expansion then
setnodecolor(g,"hz:positive")
end
e = font_expand_ratio * data.glyphstretch / 1000
--- correction = correction + (e / 1000) * g.width
else
- local kern = g.kern
+ local kern = getfield(g,"kern")
local stretch, shrink = kern_stretch_shrink(g,kern)
e = font_expand_ratio * stretch / 1000
--- correction = correction + (e / 1000) * kern
end
- g.expansion_factor = e
+ setfield(g,"expansion_factor",e)
end
end
--- delta = delta - correction
local tso = total_stretch[order]
if tso ~= 0 then
- hlist.glue_sign = 1
- hlist.glue_order = order
- hlist.glue_set = delta/tso
+ setfield(hlist,"glue_sign",1)
+ setfield(hlist,"glue_order",order)
+ setfield(hlist,"glue_set",delta/tso)
else
- hlist.glue_sign = 0
- hlist.glue_order = order
- hlist.glue_set = 0
+ setfield(hlist,"glue_sign",0)
+ setfield(hlist,"glue_order",order)
+ setfield(hlist,"glue_set",0)
end
if font_expand_ratio ~= 0 then
-- todo
- elseif order == 0 then -- and hlist.list then
+ elseif order == 0 then -- and getlist(hlist) then
last_badness = calculate_badness(delta,total_stretch[0])
if last_badness > tex.hbadness then
if last_badness > 100 then
@@ -3065,7 +3077,6 @@ local function hpack(head,width,method,direction) -- fast version when head = ni
-- natural width larger than requested width
local order = total_shrink[4] ~= 0 and 4 or total_shrink[3] ~= 0 and 3
or total_shrink[2] ~= 0 and 2 or total_shrink[1] ~= 0 and 1 or 0
--- local correction = 0
if cal_expand_ratio and order == 0 and font_shrink > 0 then -- check sign of font_shrink
font_expand_ratio = delta/font_shrink
@@ -3077,65 +3088,60 @@ local function hpack(head,width,method,direction) -- fast version when head = ni
for i=1,expansion_index do
local g = expansion_stack[i]
local e
- if g.id == glyph_code then
- local currentfont = g.font
+ if getid(g) == glyph_code then
+ local currentfont = getfont(g)
if currentfont ~= lastfont then
fontexps = expansions[currentfont]
lastfont = currentfont
end
- local data = fontexps[g.char]
+ local data = fontexps[getchar(g)]
if trace_expansion then
setnodecolor(g,"hz:negative")
end
e = font_expand_ratio * data.glyphshrink / 1000
- -- local d = (e / 1000) * 1000
- -- local eps = g.width - (1 + d / 1000000) * g.width
- -- correction = correction + eps
- -- e = d
--- correction = correction + (e / 1000) * g.width
else
- local kern = g.kern
+ local kern = getfield(g,"kern")
local stretch, shrink = kern_stretch_shrink(g,kern)
e = font_expand_ratio * shrink / 1000
--- correction = correction + (e / 1000) * kern
end
- g.expansion_factor = e
+ setfield(g,"expansion_factor",e)
end
end
--- delta = delta - correction
local tso = total_shrink[order]
if tso ~= 0 then
- hlist.glue_sign = 2
- hlist.glue_order = order
- hlist.glue_set = -delta/tso
+ setfield(hlist,"glue_sign",2)
+ setfield(hlist,"glue_order",order)
+ setfield(hlist,"glue_set",-delta/tso)
else
- hlist.glue_sign = 0
- hlist.glue_order = order
- hlist.glue_set = 0
+ setfield(hlist,"glue_sign",0)
+ setfield(hlist,"glue_order",order)
+ setfield(hlist,"glue_set",0)
end
if font_expand_ratio ~= 0 then
-- todo
- elseif tso < -delta and order == 0 then -- and hlist.list then
+ elseif tso < -delta and order == 0 then -- and getlist(hlist) then
last_badness = 1000000
- hlist.glue_set = 1
+ setfield(hlist,"glue_set",1)
local fuzz = - delta - total_shrink[0]
local hfuzz = tex.hfuzz
if fuzz > hfuzz or tex.hbadness < 100 then
local overfullrule = tex.overfullrule
if fuzz > hfuzz and overfullrule > 0 then
-- weird, is always called and no rules shows up
- slide_nodes(list).next = new_rule(overfullrule,nil,nil,hlist.dir)
+ setfield(slide_nodelist(list),"next",new_rule(overfullrule,nil,nil,hlist.dir)) -- todo: find_tail
end
diagnostics.overfull_hbox(hlist,line,-delta)
end
- elseif order == 0 and hlist.list and last_badness > tex.hbadness then
+ elseif order == 0 and getlist(hlist) and last_badness > tex.hbadness then
diagnostics.bad_hbox(hlist,line,last_badness)
end
end
return hlist, last_badness
end
-xpack_nodes = hpack -- comment this for old fashioned expansion
+xpack_nodes = hpack -- comment this for old fashioned expansion (we need to fix float mess)
+
+constructors.methods.hpack = hpack
local function common_message(hlist,line,str)
write_nl("")
@@ -3173,20 +3179,3 @@ end
function diagnostics.loose_hbox(hlist,line,b)
common_message(hlist,line,format("Loose \\hbox (badness %i)",b))
end
-
--- e = font_expand_ratio * data.glyphstretch / 1000
--- local stretch = data.stretch
--- if e >= stretch then
--- e = stretch
--- else
--- local step = 5
--- e = math.round(e/step) * step
--- end
-
--- local shrink = - data.shrink
--- if e <= shrink then
--- e = shrink
--- else
--- local step = 5
--- e = math.round(e/step) * step
--- end
diff --git a/tex/context/base/node-met.lua b/tex/context/base/node-met.lua
index c85a53c8e..335ce2a98 100644
--- a/tex/context/base/node-met.lua
+++ b/tex/context/base/node-met.lua
@@ -68,7 +68,7 @@ local nodes = nodes
nodes.gonuts = gonuts
-local nodecodes = nodes.codes
+local nodecodes = nodes.nodecodes
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
@@ -332,6 +332,28 @@ function nodes.writable_spec(n) -- not pool
return spec
end
+function nodes.copy_spec(old,free) -- also frees
+ if not old then
+ return n_new_node("glue_spec")
+ else
+ local new = n_copy_node(old)
+ if free and old.writable then
+ free_node(old)
+ end
+ return new
+ end
+end
+
+function nodes.free_spec(old)
+ if not old then
+ -- skip
+ elseif old.writable then
+ free_node(old)
+ else
+ -- skip
+ end
+end
+
if gonuts then
function nodes.reference(n)
@@ -668,3 +690,34 @@ end
nodes.keys = keys -- [id][subtype]
nodes.fields = nodefields -- (n)
+
+-- one issue solved in flush_node:
+--
+-- case glue_spec_node:
+-- if (glue_ref_count(p)!=null) {
+-- decr(glue_ref_count(p));
+-- return ;
+-- /*
+-- } else if (! valid_node(p)) {
+-- return ;
+-- */
+-- /*
+-- } else {
+-- free_node(p, get_node_size(type(p), subtype(p)));
+-- return ;
+-- */
+-- }
+-- break ;
+--
+-- or:
+--
+-- case glue_spec_node:
+-- if (glue_ref_count(p)!=null) {
+-- decr(glue_ref_count(p));
+-- return ;
+-- } else if (valid_node(p)) {
+-- free_node(p, get_node_size(type(p), subtype(p)));
+-- return ;
+-- } else {
+-- break ;
+-- }
diff --git a/tex/context/base/node-mig.lua b/tex/context/base/node-mig.lua
index 9fc35a048..41f95be45 100644
--- a/tex/context/base/node-mig.lua
+++ b/tex/context/base/node-mig.lua
@@ -6,15 +6,32 @@ if not modules then modules = { } end modules ['node-mig'] = {
license = "see context related readme files"
}
+-- todo: insert_after
+
local format = string.format
-local attributes, nodes, node = attributes, nodes, node
+local trace_migrations = false trackers.register("nodes.migrations", function(v) trace_migrations = v end)
-local remove_nodes = nodes.remove
+local report_nodes = logs.reporter("nodes","migrations")
-local nodecodes = nodes.nodecodes
+local attributes = attributes
+local nodes = nodes
local tasks = nodes.tasks
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local getnext = nuts.getnext
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getattr = nuts.getattr
+
+local setfield = nuts.setfield
+local setattr = nuts.setattr
+
+local remove_node = nuts.remove
+
+local nodecodes = nodes.nodecodes
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local insert_code = nodecodes.ins
@@ -22,10 +39,6 @@ local mark_code = nodecodes.mark
local a_migrated = attributes.private("migrated")
-local trace_migrations = false trackers.register("nodes.migrations", function(v) trace_migrations = v end)
-
-local report_nodes = logs.reporter("nodes","migrations")
-
local migrate_inserts, migrate_marks, inserts_too
local t_inserts, t_marks, t_sweeps = 0, 0, 0
@@ -33,32 +46,42 @@ local t_inserts, t_marks, t_sweeps = 0, 0, 0
local function locate(head,first,last,ni,nm)
local current = head
while current do
- local id = current.id
+ local id = getid(current)
if id == vlist_code or id == hlist_code then
- current.list, first, last, ni, nm = locate(current.list,first,last,ni,nm)
- current = current.next
+ local list = getlist(current)
+ if list then
+ list, first, last, ni, nm = locate(list,first,last,ni,nm)
+ setfield(current,"list",list)
+ end
+ current = getnext(current)
elseif migrate_inserts and id == insert_code then
local insert
- head, current, insert = remove_nodes(head,current)
- insert.next = nil
+ head, current, insert = remove_node(head,current)
+ setfield(insert,"next",nil)
if first then
- insert.prev, last.next = last, insert
+ setfield(insert,"prev",last)
+ setfield(last,"next",insert)
else
- insert.prev, first = nil, insert
+ setfield(insert,"prev",nil)
+ first = insert
end
- last, ni = insert, ni + 1
+ last = insert
+ ni = ni + 1
elseif migrate_marks and id == mark_code then
local mark
- head, current, mark = remove_nodes(head,current)
- mark.next = nil
+ head, current, mark = remove_node(head,current)
+ setfield(mark,"next",nil)
if first then
- mark.prev, last.next = last, mark
+ setfield(mark,"prev",last)
+ setfield(last,"next",mark)
else
- mark.prev, first = nil, mark
+ setfield(mark,"prev",nil)
+ first = mark
end
- last, nm = mark, nm + 1
+ last = mark
+ nm = nm + 1
else
- current= current.next
+ current = getnext(current)
end
end
return head, first, last, ni, nm
@@ -70,39 +93,43 @@ function nodes.handlers.migrate(head,where)
if trace_migrations then
report_nodes("migration sweep %a",where)
end
- local current = head
+ local current = tonut(head)
while current do
- local id = current.id
+ local id = getid(current)
-- inserts_too is a temp hack, we should only do them when it concerns
-- newly placed (flushed) inserts
- if id == vlist_code or id == hlist_code or (inserts_too and id == insert_code) and not current[a_migrated] then
- current[a_migrated] = 1
+ if id == vlist_code or id == hlist_code or (inserts_too and id == insert_code) and not getattr(current,a_migrated) then
+ setattr(current,a_migrated,1)
t_sweeps = t_sweeps + 1
- local h = current.list
+ local h = getlist(current)
local first, last, ni, nm
while h do
- local id = h.id
+ local id = getid(h)
if id == vlist_code or id == hlist_code then
h, first, last, ni, nm = locate(h,first,last,0,0)
end
- h = h.next
+ h = getnext(h)
end
if first then
- t_inserts, t_marks = t_inserts + ni, t_marks + nm
+ t_inserts = t_inserts + ni
+ t_marks = t_marks + nm
if trace_migrations and (ni > 0 or nm > 0) then
report_nodes("sweep %a, container %a, %s inserts and %s marks migrated outwards during %a",
t_sweeps,nodecodes[id],ni,nm,where)
end
- -- inserts after head
- local n = current.next
+ -- inserts after head, use insert_after
+ local n = getnext(current)
if n then
- last.next, n.prev = n, last
+ setfield(last,"next",n)
+ setfield(n,"prev",last)
end
- current.next, first.prev = first, current
- done, current = true, last
+ setfield(current,"next",first)
+ setfield(first,"prev",current)
+ done = true
+ current = last
end
end
- current = current.next
+ current = getnext(next)
end
return head, done
end
diff --git a/tex/context/base/node-nut.lua b/tex/context/base/node-nut.lua
new file mode 100644
index 000000000..b133c4e74
--- /dev/null
+++ b/tex/context/base/node-nut.lua
@@ -0,0 +1,790 @@
+if not modules then modules = { } end modules ['node-met'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- Here starts some more experimental code that Luigi and I use in a next stage of
+-- exploring and testing potential speedups in the engines. This code is not meant
+-- for users and can change (or be removed) any moment. During the experiments I'll
+-- do my best to keep the code as fast as possible by using two codebases. See
+-- about-fast.pdf for some more info about impacts. Although key based access has
+-- more charm, function based is somewhat faster and has more potential for future
+-- speedups.
+
+-- This next iteration is flagged direct because we avoid user data which has a price
+-- in allocation and metatable tagging. Although in this stage we pass numbers around
+-- future versions might use light user data, so never depend on what direct function
+-- return. Using the direct approach had some speed advantages but you loose the key
+-- based access. The speed gain is only measurable in cases with lots of access. For
+-- instance when typesettign arabic with advanced fonts, we're talking of many millions
+-- of function calls and there we can get a 30\% or more speedup. On average complex
+-- \CONTEXT\ runs the gain can be 10\% to 15\% percent. Because mixing the two models
+-- (here we call then nodes and nuts) is not possible you need to cast either way which
+-- has a penalty. Also, error messages in nuts mode are less clear and \LUATEX\ will
+-- often simply abort when you make mistakes of mix the models. So, development (at least
+-- in \CONTEXT) can be done in node mode and not in nuts mode. Only robust code will
+-- be turned nuts afterwards and quite likely not all code. The official \LUATEX\ api
+-- to nodes is userdata!
+--
+-- Listening to 'lunatic soul' at the same time helped wrapping my mind around the mixed
+-- usage of both models. Just for the record: the potential of the direct approach only
+-- became clear after experimenting for weeks and partly adapting code. It is one of those
+-- (sub)projects where you afterwards wonder if it was worth the trouble, but users that
+-- rely on lots of complex functionality and font support will probably notice the speedup.
+--
+-- luatex luajittex
+-- ------------- ----- -------------------- ---------------------------------
+-- name pages old new pct old new pct
+-- ------------- ----- -------------------- ---------------------------------
+-- fonts-mkiv 166 9.3 7.7/7.4 17.2 7.4 (37.5) 5.9/5.7 (55.6) 20.3
+-- about 60 3.3 2.7/2.6 20.4 2.5 (39.5) 2.1 (57.0) 23.4
+-- arabic-001 61 25.3 15.8 18.2 15.3 (46.7) 6.8 (54.7) 16.0
+-- torture-001 300 21.4 11.4 24.2 13.9 (35.0) 6.3 (44.7) 22.2
+--
+-- so:
+--
+-- - we run around 20% faster on documents of average complexity and gain more when
+-- dealing with scripts like arabic and such
+-- - luajittex benefits a bit more so a luajittex job can (in principle) now be much
+-- faster
+-- - if we reason backwards, and take luajittex as norm we get 1:2:3 on some jobs for
+-- luajittex direct:luatex direct:luatex normal i.e. we can be 3 times faster
+-- - keep in mind that these are tex/lua runs so the real gain at the lua end is much
+-- larger
+--
+-- Because we can fake direct mode a little bit by using the fast getfield and setfield
+-- at the cost of wrapped getid and alike, we still are running quite ok. As we could gain
+-- some 5% with fast mode, we can sacrifice some on wrappers when we use a few fast core
+-- functions. This means that simulated direct mode runs font-mkiv in 9.1 seconds (we could
+-- get down to 8.7 seconds in fast mode) and that we can migrate slowely to direct mode.
+--
+-- The following measurements are from 2013-07-05 after adapting some 47 files to nuts. Keep
+-- in mind that the old binary can fake a fast getfield and setfield but that the other
+-- getters are wrapped functions. The more we have, the slower it gets.
+--
+-- fonts about arabic
+-- old mingw, indexed plus some functions : 8.9 3.2 20.3
+-- old mingw, fake functions : 9.9 3.5 27.4
+-- new mingw, node functions : 9.0 3.1 20.8
+-- new mingw, indexed plus some functions : 8.6 3.1 19.6
+-- new mingw, direct functions : 7.5 2.6 14.4
+--
+-- \starttext \dorecurse{1000}{test\page} \stoptext :
+--
+-- luatex 560 pps
+-- luajittex 600 pps
+--
+-- \setupbodyfont[pagella]
+--
+-- \edef\zapf{\cldcontext{context(io.loaddata(resolvers.findfile("zapf.tex")))}}
+--
+-- \starttext \dorecurse{1000}{\zapf\par} \stoptext
+--
+-- luatex 3.9 sec / 54 pps
+-- luajittex 2.3 sec / 93 pps
+
+local type, rawget = type, rawget
+
+local nodes = nodes
+local gonuts = nodes.gonuts
+local direct = node.direct
+
+local fastcopy = table.fastcopy
+
+if type(direct) ~= "table" then
+ return
+elseif gonuts then
+ statistics.register("running in nuts mode", function() return "yes" end)
+else
+ statistics.register("running in nuts mode", function() return "no" end)
+ return
+end
+
+local texget = tex.get
+
+local nodecodes = nodes.nodecodes
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+
+local nuts = nodes.nuts or { }
+nodes.nuts = nuts
+
+nodes.is_node = direct.is_node or function() return true end
+nodes.is_direct = direct.is_direct or function() return false end
+nodes.is_nut = nodes.is_direct
+
+-- casters
+
+local tonode = direct.tonode or function(n) return n end
+local tonut = direct.todirect or function(n) return n end
+
+nuts.tonode = tonode
+nuts.tonut = tonut
+
+nodes.tonode = tonode
+nodes.tonut = tonut
+
+-- getters
+
+nuts.getfield = direct.getfield
+nuts.getnext = direct.getnext
+nuts.getprev = direct.getprev
+nuts.getid = direct.getid
+nuts.getattr = direct.has_attribute or direct.getfield
+nuts.getchar = direct.getchar
+nuts.getfont = direct.getfont
+nuts.getsubtype = direct.getsubtype
+nuts.getlist = direct.getlist -- only hlist and vlist !
+nuts.getleader = direct.getleader
+
+-- local function track(name)
+-- local n = 0
+-- local f = nuts[name]
+-- function nuts[name](...)
+-- n = n + 1
+-- if n % 1000 == 0 then
+-- print(name,n)
+-- end
+-- return f(...)
+-- end
+-- end
+
+-- track("getsubtype")
+
+-- local dgf = direct.getfield function nuts.getlist(n) return dgf(n,"list") end
+
+-- setters
+
+nuts.setfield = direct.setfield
+nuts.setattr = direct.set_attribute or setfield
+
+nuts.getbox = direct.getbox
+nuts.setbox = direct.setbox
+nuts.getskip = direct.getskip or function(s) return tonut(texget(s)) end
+
+-- helpers
+
+nuts.tostring = direct.tostring
+nuts.copy = direct.copy
+nuts.copy_list = direct.copy_list
+nuts.delete = direct.delete
+nuts.dimensions = direct.dimensions
+nuts.end_of_math = direct.end_of_math
+nuts.flush_list = direct.flush_list
+nuts.flush_node = direct.flush_node
+nuts.free = direct.free
+nuts.insert_after = direct.insert_after
+nuts.insert_before = direct.insert_before
+nuts.hpack = direct.hpack
+nuts.new = direct.new
+nuts.tail = direct.tail
+nuts.traverse = direct.traverse
+nuts.traverse_id = direct.traverse_id
+nuts.slide = direct.slide
+nuts.writable_spec = direct.writable_spec
+nuts.vpack = direct.vpack
+nuts.is_node = direct.is_node
+nuts.is_direct = direct.is_direct
+nuts.is_nut = direct.is_direct
+nuts.first_glyph = direct.first_glyph
+nuts.first_character = direct.first_character
+nuts.has_glyph = direct.has_glyph or direct.first_glyph
+
+nuts.current_attr = direct.current_attr
+nuts.do_ligature_n = direct.do_ligature_n
+nuts.has_field = direct.has_field
+nuts.last_node = direct.last_node
+nuts.usedlist = direct.usedlist
+nuts.protrusion_skippable = direct.protrusion_skippable
+nuts.write = direct.write
+
+nuts.has_attribute = direct.has_attribute
+nuts.set_attribute = direct.set_attribute
+nuts.unset_attribute = direct.unset_attribute
+
+nuts.protect_glyphs = direct.protect_glyphs
+nuts.unprotect_glyphs = direct.unprotect_glyphs
+
+-- placeholders
+
+if not direct.kerning then
+
+ local n_kerning = node.kerning
+
+ function nuts.kerning(head)
+ return tonode(n_kerning(tonut(head)))
+ end
+
+end
+
+if not direct.ligaturing then
+
+ local n_ligaturing = node.ligaturing
+
+ function nuts.ligaturing(head)
+ return tonode(n_ligaturing(tonut(head)))
+ end
+
+end
+
+if not direct.mlist_to_hlist then
+
+ local n_mlist_to_hlist = node.mlist_to_hlist
+
+ function nuts.mlist_to_hlist(head)
+ return tonode(n_mlist_to_hlist(tonut(head)))
+ end
+
+end
+
+--
+
+local d_remove_node = direct.remove
+local d_free_node = direct.free
+local d_getfield = direct.getfield
+local d_setfield = direct.setfield
+local d_getnext = direct.getnext
+local d_getprev = direct.getprev
+local d_getid = direct.getid
+local d_getlist = direct.getlist
+local d_find_tail = direct.tail
+local d_insert_after = direct.insert_after
+local d_insert_before = direct.insert_before
+local d_slide = direct.slide
+local d_copy_node = direct.copy
+local d_traverse = direct.traverse
+
+local function remove(head,current,free_too)
+ local t = current
+ head, current = d_remove_node(head,current)
+ if not t then
+ -- forget about it
+ elseif free_too then
+ d_free_node(t)
+ t = nil
+ else
+ d_setfield(t,"next",nil) -- not that much needed (slows down unless we check the source on this)
+ d_setfield(t,"prev",nil) -- not that much needed (slows down unless we check the source on this)
+ end
+ return head, current, t
+end
+
+-- bad: we can have prev's being glue_spec
+
+-- local function remove(head,current,free_too) -- d_remove_node does a slide which can fail
+-- local prev = d_getprev(current) -- weird
+-- local next = d_getnext(current)
+-- if next then
+-- -- print("!!!!!!!! prev is gluespec",
+-- -- nodes.nodecodes[d_getid(current)],
+-- -- nodes.nodecodes[d_getid(next)],
+-- -- nodes.nodecodes[d_getid(prev)])
+-- d_setfield(prev,"next",next)
+-- d_setfield(next,"prev",prev)
+-- else
+-- d_setfield(prev,"next",nil)
+-- end
+-- if free_too then
+-- d_free_node(current)
+-- current = nil
+-- else
+-- d_setfield(current,"next",nil) -- use this fact !
+-- d_setfield(current,"prev",nil) -- use this fact !
+-- end
+-- if head == current then
+-- return next, next, current
+-- else
+-- return head, next, current
+-- end
+-- end
+
+nuts.remove = remove
+
+function nuts.delete(head,current)
+ return remove(head,current,true)
+end
+
+function nuts.replace(head,current,new) -- no head returned if false
+ if not new then
+ head, current, new = false, head, current
+ end
+ local prev = d_getprev(current)
+ local next = d_getnext(current)
+ if next then
+ d_setfield(new,"next",next)
+ d_setfield(next,"prev",new)
+ end
+ if prev then
+ d_setfield(new,"prev",prev)
+ d_setfield(prev,"next",new)
+ end
+ if head then
+ if head == current then
+ head = new
+ end
+ d_free_node(current)
+ return head, new
+ else
+ d_free_node(current)
+ return new
+ end
+end
+
+local function count(stack,flat)
+ local n = 0
+ while stack do
+ local id = d_getid(stack)
+ if not flat and id == hlist_code or id == vlist_code then
+ local list = d_getlist(stack)
+ if list then
+ n = n + 1 + count(list) -- self counts too
+ else
+ n = n + 1
+ end
+ else
+ n = n + 1
+ end
+ stack = d_getnext(stack)
+ end
+ return n
+end
+
+nuts.count = count
+
+function nuts.append(head,current,...)
+ for i=1,select("#",...) do
+ head, current = d_insert_after(head,current,(select(i,...)))
+ end
+ return head, current
+end
+
+function nuts.prepend(head,current,...)
+ for i=1,select("#",...) do
+ head, current = d_insert_before(head,current,(select(i,...)))
+ end
+ return head, current
+end
+
+function nuts.linked(...)
+ local head, last
+ for i=1,select("#",...) do
+ local next = select(i,...)
+ if next then
+ if head then
+ d_setfield(last,"next",next)
+ d_setfield(next,"prev",last)
+ else
+ head = next
+ end
+ last = d_find_tail(next) -- we could skip the last one
+ end
+ end
+ return head
+end
+
+function nuts.concat(list) -- consider tail instead of slide
+ local head, tail
+ for i=1,#list do
+ local li = list[i]
+ if li then
+ if head then
+ d_setfield(tail,"next",li)
+ d_setfield(li,"prev",tail)
+ else
+ head = li
+ end
+ tail = d_slide(li)
+ end
+ end
+ return head, tail
+end
+
+function nuts.writable_spec(n) -- not pool
+ local spec = d_getfield(n,"spec")
+ if not spec then
+ spec = d_copy_node(glue_spec)
+ d_setfield(n,"spec",spec)
+ elseif not d_getfield(spec,"writable") then
+ spec = d_copy_node(spec)
+ d_setfield(n,"spec",spec)
+ end
+ return spec
+end
+
+function nuts.reference(n)
+ return n or "<none>"
+end
+
+-- quick and dirty tracing of nuts
+
+-- for k, v in next, nuts do
+-- if string.find(k,"box") then
+-- nuts[k] = function(...) print(k,...) return v(...) end
+-- end
+-- end
+
+function nodes.vianuts (f) return function(n,...) return tonode(f(tonut (n),...)) end end
+function nodes.vianodes(f) return function(n,...) return tonut (f(tonode(n),...)) end end
+
+nuts.vianuts = nodes.vianuts
+nuts.vianodes = nodes.vianodes
+
+-- for k, v in next, nuts do
+-- if type(v) == "function" then
+-- if not string.find(k,"^[sg]et") and not string.find(k,"^to") then
+-- local f = v
+-- nuts[k] = function(...) print("d",k,...) return f(...) end
+-- end
+-- end
+-- end
+
+-- for k, v in next, nodes do
+-- if type(v) == "function" then
+-- if not string.find(k,"^[sg]et") and not string.find(k,"^to") then
+-- local f = v
+-- nodes[k] = function(...) print("n",k,...) return f(...) end
+-- end
+-- end
+-- end
+
+-- function nodes.insert_before(h,c,n)
+-- if c then
+-- if c == h then
+-- n_setfield(n,"next",h)
+-- n_setfield(n,"prev",nil)
+-- n_setfield(h,"prev",n)
+-- else
+-- local cp = n_getprev(c)
+-- n_setfield(n,"next",c)
+-- n_setfield(n,"prev",cp)
+-- if cp then
+-- n_setfield(cp,"next",n)
+-- end
+-- n_setfield(c,"prev",n)
+-- return h, n
+-- end
+-- end
+-- return n, n
+-- end
+
+-- function nodes.insert_after(h,c,n)
+-- if c then
+-- local cn = n_getnext(c)
+-- if cn then
+-- n_setfield(n,"next",cn)
+-- n_setfield(cn,"prev",n)
+-- else
+-- n_setfield(n,"next",nil)
+-- end
+-- n_setfield(c,"next",n)
+-- n_setfield(n,"prev",c)
+-- return h, n
+-- end
+-- return n, n
+-- end
+
+function nodes.insert_list_after(h,c,n)
+ local t = n_tail(n)
+ if c then
+ local cn = n_getnext(c)
+ if cn then
+ n_setfield(t,"next",cn)
+ n_setfield(cn,"prev",t)
+ else
+ n_setfield(t,"next",nil)
+ end
+ n_setfield(c,"next",n)
+ n_setfield(n,"prev",c)
+ return h, n
+ end
+ return n, t
+end
+
+-- function nuts.insert_before(h,c,n)
+-- if c then
+-- if c == h then
+-- d_setfield(n,"next",h)
+-- d_setfield(n,"prev",nil)
+-- d_setfield(h,"prev",n)
+-- else
+-- local cp = d_getprev(c)
+-- d_setfield(n,"next",c)
+-- d_setfield(n,"prev",cp)
+-- if cp then
+-- d_setfield(cp,"next",n)
+-- end
+-- d_setfield(c,"prev",n)
+-- return h, n
+-- end
+-- end
+-- return n, n
+-- end
+
+-- function nuts.insert_after(h,c,n)
+-- if c then
+-- local cn = d_getnext(c)
+-- if cn then
+-- d_setfield(n,"next",cn)
+-- d_setfield(cn,"prev",n)
+-- else
+-- d_setfield(n,"next",nil)
+-- end
+-- d_setfield(c,"next",n)
+-- d_setfield(n,"prev",c)
+-- return h, n
+-- end
+-- return n, n
+-- end
+
+function nuts.insert_list_after(h,c,n)
+ local t = d_tail(n)
+ if c then
+ local cn = d_getnext(c)
+ if cn then
+ d_setfield(t,"next",cn)
+ d_setfield(cn,"prev",t)
+ else
+ d_setfield(t,"next",nil)
+ end
+ d_setfield(c,"next",n)
+ d_setfield(n,"prev",c)
+ return h, n
+ end
+ return n, t
+end
+
+-- test code only
+
+-- collectranges and mix
+
+local report = logs.reporter("sliding")
+
+local function message(detail,head,current,previous)
+ report("error: %s, current: %s:%s, previous: %s:%s, list: %s, text: %s",
+ detail,
+ nodecodes[d_getid(current)],
+ current,
+ nodecodes[d_getid(previous)],
+ previous,
+ nodes.idstostring(head),
+ nodes.listtoutf(head)
+ )
+ utilities.debugger.showtraceback(report)
+end
+
+local function warn()
+ report()
+ report("warning: the slide tracer is enabled")
+ report()
+ warn = false
+end
+
+local function tracedslide(head)
+ if head then
+ if warn then
+ warn()
+ end
+ local next = d_getnext(head)
+ if next then
+ local prev = head
+ for n in d_traverse(next) do
+ local p = d_getprev(n)
+ if not p then
+ message("unset",head,n,prev)
+ -- break
+ elseif p ~= prev then
+ message("wrong",head,n,prev)
+ -- break
+ end
+ prev = n
+ end
+ end
+ return d_slide(head)
+ end
+end
+
+local function nestedtracedslide(head,level) -- no sliding !
+ if head then
+ if warn then
+ warn()
+ end
+ local id = d_getid(head)
+ local next = d_getnext(head)
+ if next then
+ report("%whead:%s",level or 0,nodecodes[id])
+ local prev = head
+ for n in d_traverse(next) do
+ local p = d_getprev(n)
+ if not p then
+ message("unset",head,n,prev)
+ -- break
+ elseif p ~= prev then
+ message("wrong",head,n,prev)
+ -- break
+ end
+ prev = n
+ local id = d_getid(n)
+ if id == hlist_code or id == vlist_code then
+ nestedtracedslide(d_getlist(n),(level or 0) + 1)
+ end
+ end
+ elseif id == hlist_code or id == vlist_code then
+ report("%wlist:%s",level or 0,nodecodes[id])
+ nestedtracedslide(d_getlist(head),(level or 0) + 1)
+ end
+ -- return d_slide(head)
+ end
+end
+
+local function untracedslide(head)
+ if head then
+ if warn then
+ warn()
+ end
+ local next = d_getnext(head)
+ if next then
+ local prev = head
+ for n in d_traverse(next) do
+ local p = d_getprev(n)
+ if not p then
+ return "unset", d_getid(n)
+ elseif p ~= prev then
+ return "wrong", d_getid(n)
+ end
+ prev = n
+ end
+ end
+ return d_slide(head)
+ end
+end
+
+nuts.tracedslide = tracedslide
+nuts.untracedslide = untracedslide
+nuts.nestedtracedslide = nestedtracedslide
+
+-- nuts.slide = tracedslide
+
+-- this might move
+
+local propertydata = direct.get_properties_table and direct.get_properties_table()
+
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+if propertydata then
+
+ nodes.properties = {
+ data = propertydata,
+ }
+
+ -- direct.set_properties_mode(true,false) -- shallow copy ... problem: in fonts we then affect the originals too
+ direct.set_properties_mode(true,true) -- create metatable, slower but needed for font-inj.lua (unless we use an intermediate table)
+
+ -- todo:
+ --
+ -- function direct.set_properties_mode()
+ -- -- we really need the set modes
+ -- end
+
+ -- experimental code with respect to copying attributes has been removed
+ -- as it doesn't pay of (most attributes are only accessed once anyway)
+
+ nuts.getprop = function(n,k)
+ local p = propertydata[n]
+ if p then
+ return p[k]
+ end
+ end
+
+ nuts.setprop = function(n,k,v)
+ if v then
+ local p = propertydata[n]
+ if p then
+ p[k] = v
+ else
+ propertydata[n] = { [k] = v }
+ end
+ end
+ end
+
+ nodes.setprop = nodes.setproperty
+ nodes.getprop = nodes.getproperty
+
+else
+
+ -- for testing and simple cases
+
+ nuts.getprop = getattr
+ nuts.setprop = setattr
+
+ nodes.setprop = getattr
+ nodes.getprop = setattr
+
+end
+
+function nuts.copy_properties(source,target,what)
+ local newprops = propertydata[source]
+ if not newprops then
+ -- nothing to copy
+ return
+ end
+ if what then
+ -- copy one category
+ newprops = rawget(source,what)
+ if newprops then
+ newprops = fastcopy(newprops)
+ local p = rawget(propertydata,target)
+ if p then
+ p[what] = newprops
+ else
+ propertydata[target] = {
+ [what] = newprops,
+ }
+ end
+ end
+ else
+ -- copy all properties
+ newprops = fastcopy(newprops)
+ propertydata[target] = newprops
+ end
+ return newprops -- for checking
+end
+
+-- a bit special
+
+local getwidth = { }
+local setwidth = { }
+local getdimensions = { }
+local setdimensions = { }
+
+nodes.whatsitters = {
+ getters = { width = getwidth, dimensions = getdimensions },
+ setters = { width = setwidth, dimensions = setdimensions },
+}
+
+-- this might move (in fact forms and images will become nodes)
+
+local function get_width(n,dir)
+ n = tonut(n)
+ return getfield(n,"width")
+end
+
+local function get_dimensions(n,dir)
+ n = tonut(n)
+ return getfield(n,"width"), getfield(n,"height"), getfield(n,"depth")
+end
+
+local whatcodes = nodes.whatcodes
+local pdfrefximage_code = whatcodes.pdfrefximage
+local pdfrefxform_code = whatcodes.pdfrefxform
+
+getwidth [pdfrefximage_code] = get_width
+getwidth [pdfrefxform_code ] = get_width
+
+getdimensions[pdfrefximage_code] = get_dimensions
+getdimensions[pdfrefxform_code ] = get_dimensions
+
+
diff --git a/tex/context/base/node-ppt.lua b/tex/context/base/node-ppt.lua
new file mode 100644
index 000000000..5e7abeaae
--- /dev/null
+++ b/tex/context/base/node-ppt.lua
@@ -0,0 +1,476 @@
+if not modules then modules = { } end modules ['node-ppt'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This is all very exeperimental and likely to change.
+
+local next, type, unpack, load = next, type, table.unpack, load
+
+local serialize = table.serialize
+local formatters = string.formatters
+
+local report = logs.reporter("properties")
+local report_setting = logs.reporter("properties","setting")
+local trace_setting = false trackers.register("properties.setting", function(v) trace_setting = v end)
+
+-- report("using experimental properties")
+
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+local getid = nuts.getid
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getsubtype = nuts.getsubtype
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getlist = nuts.getlist
+local flushnode = nuts.flush
+local removenode = nuts.remove
+local traverse = nuts.traverse
+local traverse_id = nuts.traverse_id
+
+local nodecodes = nodes.nodecodes
+local whatsitcodes = nodes.whatsitcodes
+
+local whatsit_code = nodecodes.whatsit
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local userdefined_code = whatsitcodes.userdefined
+local localpar_code = whatsitcodes.localpar
+
+local nodepool = nodes.pool
+local new_usernumber = nodepool.usernumber
+
+local nutpool = nuts.pool
+local nut_usernumber = nutpool.usernumber
+
+local variables = interfaces.variables
+local v_before = variables.before
+local v_after = variables.after
+local v_here = variables.here
+
+local cache = { }
+local nofslots = 0
+local property_id = nodepool.userids["property"]
+
+local properties = nodes.properties
+local propertydata = properties.data
+
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+
+if not propertydata then
+ return
+end
+
+-- management
+
+local function register(where,data,...)
+ if not data then
+ data = where
+ where = v_after
+ end
+ if data then
+ local data = { where, data, ... }
+ nofslots = nofslots + 1
+ if nofslots > 1 then
+ cache[nofslots] = data
+ else
+ -- report("restarting attacher")
+ cache = { data } -- also forces collection
+ end
+ return new_usernumber(property_id,nofslots)
+ end
+end
+
+local writenode = node.write
+local flushnode = context.flushnode
+
+function commands.deferredproperty(...)
+-- context(register(...))
+ flushnode(register(...))
+end
+
+
+function commands.immediateproperty(...)
+ writenode(register(...))
+end
+
+commands.attachproperty = commands.deferredproperty
+
+local actions = { } properties.actions = actions
+
+table.setmetatableindex(actions,function(t,k)
+ report("unknown property action %a",k)
+ local v = function() end
+ return v
+end)
+
+local f_delayed = formatters["return function(target,head,where,propdata,parent) %s end"]
+local f_immediate = formatters["return function(target,head,where,propdata) %s end"]
+
+local nofdelayed = 0 -- better is to keep track of it per page ... we can have deleted nodes with properties
+
+function actions.delayed(target,head,where,propdata,code,...) -- this one is used at the tex end
+-- local kind = type(code)
+-- if kind == "string" then
+-- code, err = load(f_delayed(code))
+-- if code then
+-- code = code()
+-- end
+-- elseif kind ~= "function" then
+-- code = nil
+-- end
+ if code then
+ local delayed = propdata.delayed
+ if delayed then
+ delayed[#delayed+1] = { where, code, ... }
+ else
+ propdata.delayed = { { where, code, ... } }
+ nofdelayed = nofdelayed + 1
+ end
+ end
+end
+
+function actions.fdelayed(target,head,where,propdata,code,...) -- this one is used at the tex end
+-- local kind = type(code)
+-- if kind == "string" then
+-- code, err = load(f_delayed(code))
+-- if code then
+-- code = code()
+-- end
+-- elseif kind ~= "function" then
+-- code = nil
+-- end
+ if code then
+ local delayed = propdata.delayed
+ if delayed then
+ delayed[#delayed+1] = { false, code, ... }
+ else
+ propdata.delayed = { { false, code, ... } }
+ nofdelayed = nofdelayed + 1
+ end
+ end
+end
+
+function actions.immediate(target,head,where,propdata,code,...) -- this one is used at the tex end
+ local kind = type(code)
+ if kind == "string" then
+ local f = f_immediate(code)
+ local okay, err = load(f)
+ if okay then
+ local h = okay()(target,head,where,propdata,...)
+ if h and h ~= head then
+ return h
+ end
+ end
+ elseif kind == "function" then
+ local h = code()(target,head,where,propdata,...)
+ if h and h ~= head then
+ return h
+ end
+ end
+end
+
+-- another experiment (a table or function closure are equally efficient); a function
+-- is easier when we want to experiment with different (compatible) implementations
+
+-- function nodes.nuts.pool.deferredfunction(...)
+-- nofdelayed = nofdelayed + 1
+-- local n = nut_usernumber(property_id,0)
+-- propertydata[n] = { deferred = { ... } }
+-- return n
+-- end
+
+-- function nodes.nuts.pool.deferredfunction(f)
+-- nofdelayed = nofdelayed + 1
+-- local n = nut_usernumber(property_id,0)
+-- propertydata[n] = { deferred = f }
+-- return n
+-- end
+
+-- maybe actions will get parent too
+
+local function delayed(head,parent) -- direct based
+ for target in traverse(head) do
+ local p = propertydata[target]
+ if p then
+ -- local deferred = p.deferred -- kind of late lua (but too soon as we have no access to pdf.h/v)
+ -- if deferred then
+ -- -- if #deferred > 0 then
+ -- -- deferred[1](unpack(deferred,2))
+ -- -- else
+ -- -- deferred[1]()
+ -- -- end
+ -- deferred()
+ -- p.deferred = false
+ -- if nofdelayed == 1 then
+ -- nofdelayed = 0
+ -- return head
+ -- else
+ -- nofdelayed = nofdelayed - 1
+ -- end
+ -- else
+ local delayed = p.delayed
+ if delayed then
+ for i=1,#delayed do
+ local d = delayed[i]
+ local code = d[2]
+ local kind = type(code)
+ if kind == "string" then
+ code, err = load(f_delayed(code))
+ if code then
+ code = code()
+ end
+ end
+ local where = d[1]
+ if where then
+ local h = code(target,where,head,p,parent,unpack(d,3)) -- target where propdata head parent
+ if h and h ~= head then
+ head = h
+ end
+ else
+ code(unpack(d,3))
+ end
+ end
+ p.delayed = nil
+ if nofdelayed == 1 then
+ nofdelayed = 0
+ return head
+ else
+ nofdelayed = nofdelayed - 1
+ end
+ end
+ -- end
+ end
+ local id = getid(target)
+ if id == hlist_code or id == vlist_code then
+ local list = getlist(target)
+ if list then
+ local done = delayed(list,parent)
+ if done then
+ setfield(target,"list",done)
+ end
+ if nofdelayed == 0 then
+ return head
+ end
+ end
+ else
+ -- maybe also some more lists? but we will only use this for some
+ -- special cases .. who knows
+ end
+ end
+ return head
+end
+
+function properties.delayed(head) --
+ if nofdelayed > 0 then
+ -- if next(propertydata) then
+ starttiming(properties)
+ head = delayed(tonut(head))
+ stoptiming(properties)
+ return tonode(head), true -- done in shipout anyway
+ -- else
+ -- delayed = 0
+ -- end
+ end
+ return head, false
+end
+
+-- more explicit ones too
+
+local anchored = {
+ [v_before] = function(n)
+ while n do
+ n = getprev(n)
+ if getid(n) == whatsit_code and getsubtype(n) == user_code and getfield(n,"user_id") == property_id then
+ -- continue
+ else
+ return n
+ end
+ end
+ end,
+ [v_after] = function(n)
+ while n do
+ n = getnext(n)
+ if getid(n) == whatsit_code then
+ local subtype = getsubtype(n)
+ if (subtype == userdefined_code and getfield(n,"user_id") == property_id) then
+ -- continue
+ elseif subtype == localpar_code then
+ -- continue .. can't happen anyway as we cannot write
+ else
+ return n
+ end
+ else
+ return n
+ end
+ end
+ end,
+ [v_here] = function(n)
+ -- todo
+ end,
+}
+
+table.setmetatableindex(anchored,function(t,k)
+ v = anchored[v_after]
+ t[k] = v
+ return v
+end)
+
+function properties.attach(head)
+
+ if nofslots <= 0 then
+ return head, false
+ end
+
+ local done = false
+ local last = nil
+ local head = tonut(head)
+
+ starttiming(properties)
+
+ for source in traverse_id(whatsit_code,head) do
+ if getsubtype(source) == userdefined_code then
+ if last then
+ removenode(head,last,true)
+ last = nil
+ end
+ if getfield(source,"user_id") == property_id then
+ local slot = getfield(source,"value")
+ local data = cache[slot]
+ if data then
+ cache[slot] = nil
+ local where = data[1]
+ local target = anchored[where](source)
+ if target then
+ local first = data[2]
+ local method = type(first)
+ local p_target = propertydata[target]
+ local p_source = propertydata[source]
+ if p_target then
+ if p_source then
+ for k, v in next, p_source do
+ p_target[k] = v
+ end
+ end
+ if method == "table" then
+ for k, v in next, first do
+ p_target[k] = v
+ end
+ elseif method == "function" then
+ first(target,head,where,p_target,unpack(data,3))
+ elseif method == "string" then
+ actions[first](target,head,where,p_target,unpack(data,3))
+ end
+ elseif p_source then
+ if method == "table" then
+ propertydata[target] = p_source
+ for k, v in next, first do
+ p_source[k] = v
+ end
+ elseif method == "function" then
+ propertydata[target] = p_source
+ first(target,head,where,p_source,unpack(data,3))
+ elseif method == "string" then
+ propertydata[target] = p_source
+ actions[first](target,head,where,p_source,unpack(data,3))
+ end
+ else
+ if method == "table" then
+ propertydata[target] = first
+ elseif method == "function" then
+ local t = { }
+ propertydata[target] = t
+ first(target,head,where,t,unpack(data,3))
+ elseif method == "string" then
+ local t = { }
+ propertydata[target] = t
+ actions[first](target,head,where,t,unpack(data,3))
+ end
+ end
+ if trace_setting then
+ report_setting("node %i, id %s, data %s",
+ target,nodecodes[getid(target)],serialize(propertydata[target],false))
+ end
+ end
+ if nofslots == 1 then
+ nofslots = 0
+ last = source
+ break
+ else
+ nofslots = nofslots - 1
+ end
+ end
+ last = source
+ end
+ end
+ end
+
+ if last then
+ removenode(head,last,true)
+ end
+
+ stoptiming(properties)
+
+ return head, done
+
+end
+
+local tasks = nodes.tasks
+
+-- maybe better hard coded in-place
+
+-- tasks.prependaction("processors","before","nodes.properties.attach")
+-- tasks.appendaction("shipouts","normalizers","nodes.properties.delayed")
+
+statistics.register("properties processing time", function()
+ return statistics.elapsedseconds(properties)
+end)
+
+-- only for development
+
+-- local function show(head,level,report)
+-- for target in traverse(head) do
+-- local p = propertydata[target]
+-- if p then
+-- report("level %i, node %i, id %s, data %s",
+-- level,target,nodecodes[getid(target)],serialize(propertydata[target],false))
+-- end
+-- local id = getid(target)
+-- if id == hlist_code or id == vlist_code then
+-- local list = getlist(target)
+-- if list then
+-- show(list,level+1,report)
+-- end
+-- else
+-- -- maybe more lists
+-- end
+-- end
+-- return head, false
+-- end
+--
+-- local report_shipout = logs.reporter("properties","shipout")
+-- local report_processors = logs.reporter("properties","processors")
+--
+-- function properties.showshipout (head) return tonode(show(tonut(head),1,report_shipout )), true end
+-- function properties.showprocessors(head) return tonode(show(tonut(head),1,report_processors)), true end
+--
+-- tasks.prependaction("shipouts","before","nodes.properties.showshipout")
+-- tasks.disableaction("shipouts","nodes.properties.showshipout")
+--
+-- trackers.register("properties.shipout",function(v)
+-- tasks.setaction("shipouts","nodes.properties.showshipout",v)
+-- end)
+--
+-- tasks.appendaction ("processors","after","nodes.properties.showprocessors")
+-- tasks.disableaction("processors","nodes.properties.showprocessors")
+--
+-- trackers.register("properties.processors",function(v)
+-- tasks.setaction("processors","nodes.properties.showprocessors",v)
+-- end)
diff --git a/tex/context/base/node-pro.lua b/tex/context/base/node-pro.lua
index aa6692d7b..27e349893 100644
--- a/tex/context/base/node-pro.lua
+++ b/tex/context/base/node-pro.lua
@@ -13,15 +13,15 @@ local trace_callbacks = false trackers.register("nodes.callbacks", function(v)
local report_nodes = logs.reporter("nodes","processors")
-local nodes, node = nodes, node
+local nodes = nodes
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local tasks = nodes.tasks
+local nuts = nodes.nuts
-local free_node = node.free
-local first_glyph = node.first_glyph or node.first_character
-local has_attribute = node.has_attribute
+local first_glyph = nodes.first_glyph
+local has_glyph = nodes.has_glyph
nodes.processors = nodes.processors or { }
local processors = nodes.processors
@@ -31,43 +31,53 @@ local processors = nodes.processors
local actions = tasks.actions("processors")
-local n = 0
+do
-local function reconstruct(head) -- we probably have a better one
- local t, n, h = { }, 0, head
- while h do
+ local tonut = nuts.tonut
+ local getid = nuts.getid
+ local getchar = nuts.getchar
+ local getnext = nuts.getnext
+
+ local n = 0
+
+ local function reconstruct(head) -- we probably have a better one
+ local t, n, h = { }, 0, tonut(head)
+ while h do
+ n = n + 1
+ local id = getid(h)
+ if id == glyph_code then -- todo: disc etc
+ t[n] = utfchar(getchar(h))
+ else
+ t[n] = "[]"
+ end
+ h = getnext(h)
+ end
+ return concat(t)
+ end
+
+ function processors.tracer(what,state,head,groupcode,before,after,show)
+ if not groupcode then
+ groupcode = "unknown"
+ elseif groupcode == "" then
+ groupcode = "mvl"
+ end
n = n + 1
- local id = h.id
- if id == glyph_code then -- todo: disc etc
- t[n] = utfchar(h.char)
+ if show then
+ report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s, stream: %s",what,n,state,groupcode,before,after,reconstruct(head))
else
- t[n] = "[]"
+ report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s",what,n,state,groupcode,before,after)
end
- h = h.next
end
- return concat(t)
-end
-local function tracer(what,state,head,groupcode,before,after,show)
- if not groupcode then
- groupcode = "unknown"
- elseif groupcode == "" then
- groupcode = "mvl"
- end
- n = n + 1
- if show then
- report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s, stream: %s",what,n,state,groupcode,before,after,reconstruct(head))
- else
- report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s",what,n,state,groupcode,before,after)
- end
end
-processors.tracer = tracer
+local tracer = processors.tracer
processors.enabled = true -- this will become a proper state (like trackers)
function processors.pre_linebreak_filter(head,groupcode) -- ,size,packtype,direction
- local first, found = first_glyph(head) -- they really need to be glyphs
+ -- local first, found = first_glyph(head) -- they really need to be glyphs
+ local found = has_glyph(head)
if found then
if trace_callbacks then
local before = nodes.count(head,true)
@@ -94,10 +104,8 @@ local enabled = true
function processors.hpack_filter(head,groupcode,size,packtype,direction)
if enabled then
- -- if not head.next and head.id ~= glyph_code then -- happens often but not faster
- -- return true
- -- end
- local first, found = first_glyph(head) -- they really need to be glyphs
+ -- local first, found = first_glyph(head) -- they really need to be glyphs
+ local found = has_glyph(head)
if found then
if trace_callbacks then
local before = nodes.count(head,true)
@@ -121,15 +129,36 @@ function processors.hpack_filter(head,groupcode,size,packtype,direction)
return true
end
-local hpack = node.hpack
+do
+
+ local setfield = nodes.setfield
+ local hpack = nodes.hpack
+
+ function nodes.fasthpack(...) -- todo: pass explicit arguments
+ enabled = false
+ local hp, b = hpack(...)
+ setfield(hp,"prev",nil)
+ setfield(hp,"next",nil)
+ enabled = true
+ return hp, b
+ end
+
+end
+
+do
+
+ local setfield = nuts.setfield
+ local hpack = nuts.hpack
+
+ function nuts.fasthpack(...) -- todo: pass explicit arguments
+ enabled = false
+ local hp, b = hpack(...)
+ setfield(hp,"prev",nil)
+ setfield(hp,"next",nil)
+ enabled = true
+ return hp, b
+ end
-function nodes.fasthpack(...) -- todo: pass explicit arguments
- enabled = false
- local hp, b = hpack(...)
- hp.prev = nil
- hp.next = nil
- enabled = true
- return hp, b
end
callbacks.register('pre_linebreak_filter', processors.pre_linebreak_filter, "all kind of horizontal manipulations (before par break)")
diff --git a/tex/context/base/node-ref.lua b/tex/context/base/node-ref.lua
index aa864fb1c..97c37c74e 100644
--- a/tex/context/base/node-ref.lua
+++ b/tex/context/base/node-ref.lua
@@ -16,12 +16,13 @@ if not modules then modules = { } end modules ['node-ref'] = {
-- is grouplevel still used?
+local concat = table.concat
+
local attributes, nodes, node = attributes, nodes, node
local allocate = utilities.storage.allocate, utilities.storage.mark
local mark = utilities.storage.allocate, utilities.storage.mark
-
local nodeinjections = backends.nodeinjections
local codeinjections = backends.codeinjections
@@ -33,17 +34,38 @@ local colors = attributes.colors
local references = structures.references
local tasks = nodes.tasks
-local hpack_list = node.hpack
-local list_dimensions = node.dimensions
-
-local trace_backend = false trackers.register("nodes.backend", function(v) trace_backend = v end)
-local trace_references = false trackers.register("nodes.references", function(v) trace_references = v end)
-local trace_destinations = false trackers.register("nodes.destinations", function(v) trace_destinations = v end)
+local trace_references = false trackers.register("nodes.references", function(v) trace_references = v end)
+local trace_destinations = false trackers.register("nodes.destinations", function(v) trace_destinations = v end)
+local trace_areas = false trackers.register("nodes.areas", function(v) trace_areas = v end)
+local show_references = false trackers.register("nodes.references.show", function(v) show_references = tonumber(v) or (v and 2.25 or false) end)
+local show_destinations = false trackers.register("nodes.destinations.show", function(v) show_destinations = tonumber(v) or (v and 2.00 or false) end)
local report_reference = logs.reporter("backend","references")
local report_destination = logs.reporter("backend","destinations")
local report_area = logs.reporter("backend","areas")
+local nuts = nodes.nuts
+local nodepool = nuts.pool
+
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getsubtype = nuts.getsubtype
+
+local hpack_list = nuts.hpack
+local vpack_list = nuts.vpack
+local list_dimensions = nuts.dimensions
+local traverse = nuts.traverse
+local find_node_tail = nuts.tail
+
local nodecodes = nodes.nodecodes
local skipcodes = nodes.skipcodes
local whatcodes = nodes.whatcodes
@@ -52,6 +74,8 @@ local listcodes = nodes.listcodes
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local glue_code = nodecodes.glue
+local glyph_code = nodecodes.glyph
+local rule_code = nodecodes.rule
local whatsit_code = nodecodes.whatsit
local leftskip_code = skipcodes.leftskip
@@ -63,75 +87,150 @@ local dir_code = whatcodes.dir
local line_code = listcodes.line
-local nodepool = nodes.pool
-
+local new_rule = nodepool.rule
local new_kern = nodepool.kern
-local traverse = node.traverse
-local find_node_tail = node.tail or node.slide
+local free_node = nuts.free
+
local tosequence = nodes.tosequence
--- local function dimensions(parent,start,stop)
--- stop = stop and stop.next
--- if parent then
--- if stop then
--- return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start,stop)
--- else
--- return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start)
--- end
--- else
--- if stop then
--- return list_dimensions(start,stop)
--- else
--- return list_dimensions(start)
--- end
--- end
--- end
---
--- -- more compact
+local implement = interfaces.implement
+
+-- Normally a (destination) area is a box or a simple stretch if nodes but when it is
+-- a paragraph we hav ea problem: we cannot calculate the height well. This happens
+-- with footnotes or content broken across a page.
-local function dimensions(parent,start,stop)
+local function vlist_dimensions(start,stop)
+ local temp
+ if stop then
+ temp = getnext(stop)
+ setfield(stop,"next",nil)
+ end
+ local v = vpack_list(start)
+ local w = getfield(v,"width")
+ local h = getfield(v,"height")
+ local d = getfield(v,"depth")
+ setfield(v,"list",nil)
+ free_node(v)
+ if temp then
+ setfield(stop,"next",temp)
+ end
+ return w, h, d
+end
+
+local function hlist_dimensions(start,stop,parent)
+ local last = stop and getnext(stop)
if parent then
- return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start,stop and stop.next)
+ return list_dimensions(getfield(parent,"glue_set"),getfield(parent,"glue_sign"),getfield(parent,"glue_order"),start,last)
else
- return list_dimensions(start,stop and stop.next)
+ return list_dimensions(start,last)
+ end
+end
+
+local function dimensions(parent,start,stop) -- in principle we could move some to the caller
+ local id = getid(start)
+ if start == stop then
+ if id == hlist_code or id == vlist_code or id == glyph_code or id == rule_code then -- or image
+ if trace_areas then
+ report_area("dimensions taken of %a",nodecodes[id])
+ end
+ return getfield(start,"width"), getfield(parent,"height"), getfield(parent,"depth")
+ else
+ if trace_areas then
+ report_area("dimensions calculated of %a",nodecodes[id])
+ end
+ return hlist_dimensions(start,stop) -- one node only so simple
+ end
+ end
+ local last = stop and getnext(stop)
+ if parent then
+ -- todo: if no prev and no next and parent
+ -- todo: we need a a list_dimensions for a vlist
+ if getid(parent) == vlist_code then
+ local l = getlist(parent)
+ local c = l
+ local ok = false
+ while c do
+ if c == start then
+ ok = true
+ end
+ if ok and getid(c) == hlist_code then
+ break
+ else
+ c = getnext(c)
+ end
+ end
+ if ok and c then
+ if trace_areas then
+ report_area("dimensions taken of first line in vlist")
+ end
+ return getfield(c,"width"), getfield(c,"height"), getfield(c,"depth"), c
+ else
+ if trace_areas then
+ report_area("dimensions taken of vlist (probably wrong)")
+ end
+ return hlist_dimensions(start,stop,parent)
+ end
+ else
+ if trace_areas then
+ report_area("dimensions taken of range starting with %a using parent",nodecodes[id])
+ end
+ return hlist_dimensions(start,stop,parent)
+ end
+ else
+ if trace_areas then
+ report_area("dimensions taken of range starting with %a",nodecodes[id])
+ end
+ return hlist_dimensions(start,stop)
end
end
-- is pardir important at all?
local function inject_range(head,first,last,reference,make,stack,parent,pardir,txtdir)
- local width, height, depth = dimensions(parent,first,last)
+ local width, height, depth, line = dimensions(parent,first,last)
if txtdir == "+TRT" or (txtdir == "===" and pardir == "TRT") then -- KH: textdir == "===" test added
width = - width
end
local result, resolved = make(width,height,depth,reference)
if result and resolved then
- if head == first then
- if trace_backend then
- report_area("head: %04i %s %s %s => w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",tosequence(first,last,true),width,height,depth,resolved)
+ if line then
+ -- special case, we only treat the first line in a vlist
+ local l = getlist(line)
+ if trace_areas then
+ report_area("%s: %04i %s %s %s => w=%p, h=%p, d=%p, c=%S","line",
+ reference,pardir or "---",txtdir or "---",tosequence(l,nil,true),width,height,depth,resolved)
end
- result.next = first
- first.prev = result
- return result, last
+ setfield(line,"list",result)
+ setfield(result,"next",l)
+ setfield(l,"prev",result)
+ return head, last
else
- if trace_backend then
- report_area("middle: %04i %s %s => w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",tosequence(first,last,true),width,height,depth,resolved)
- end
- local prev = first.prev
- if prev then
- result.next = first
- result.prev = prev
- prev.next = result
- first.prev = result
+ if head == first then
+ if trace_areas then
+ report_area("%s: %04i %s %s %s => w=%p, h=%p, d=%p, c=%S","head",
+ reference,pardir or "---",txtdir or "---",tosequence(first,last,true),width,height,depth,resolved)
+ end
+ setfield(result,"next",first)
+ setfield(first,"prev",result)
+ return result, last
else
- result.next = first
- first.prev = result
- end
- if first == head.next then
- head.next = result -- hm, weird
+ if trace_areas then
+ report_area("%s: %04i %s %s %s => w=%p, h=%p, d=%p, c=%S","middle",
+ reference,pardir or "---",txtdir or "---",tosequence(first,last,true),width,height,depth,resolved)
+ end
+ local prev = getprev(first)
+ if prev then
+ setfield(prev,"next",result)
+ setfield(result,"prev",prev)
+ end
+ setfield(result,"next",first)
+ setfield(first,"prev",result)
+ -- if first == getnext(head) then
+ -- setfield(head,"next",result) -- hm, weird
+ -- end
+ return head, last
end
- return head, last
end
else
return head, last
@@ -139,9 +238,12 @@ local function inject_range(head,first,last,reference,make,stack,parent,pardir,t
end
local function inject_list(id,current,reference,make,stack,pardir,txtdir)
- local width, height, depth, correction = current.width, current.height, current.depth, 0
- local moveright = false
- local first = current.list
+ local width = getfield(current,"width")
+ local height = getfield(current,"height")
+ local depth = getfield(current,"depth")
+ local correction = 0
+ local moveright = false
+ local first = getlist(current)
if id == hlist_code then -- box_code line_code
-- can be either an explicit hbox or a line and there is no way
-- to recognize this; anyway only if ht/dp (then inline)
@@ -149,17 +251,17 @@ local function inject_list(id,current,reference,make,stack,pardir,txtdir)
if first then
if sr and sr[2] then
local last = find_node_tail(first)
- if last.id == glue_code and last.subtype == rightskip_code then
- local prev = last.prev
- moveright = first.id == glue_code and first.subtype == leftskip_code
- if prev and prev.id == glue_code and prev.subtype == parfillskip_code then
- width = dimensions(current,first,prev.prev) -- maybe not current as we already take care of it
+ if getid(last) == glue_code and getsubtype(last) == rightskip_code then
+ local prev = getprev(last)
+ moveright = getid(first) == glue_code and getsubtype(first) == leftskip_code
+ if prev and getid(prev) == glue_code and getsubtype(prev) == parfillskip_code then
+ width = dimensions(current,first,getprev(prev)) -- maybe not current as we already take care of it
else
- if moveright and first.writable then
- width = width - first.spec.stretch*current.glue_set * current.glue_sign
+ if moveright and getfield(first,"writable") then
+ width = width - getfield(getfield(first,"spec"),"stretch") * getfield(current,"glue_set") * getfield(current,"glue_sign")
end
- if last.writable then
- width = width - last.spec.stretch*current.glue_set * current.glue_sign
+ if getfield(last,"writable") then
+ width = width - getfield(getfield(last,"spec"),"stretch") * getfield(current,"glue_set") * getfield(current,"glue_sign")
end
end
end
@@ -180,23 +282,26 @@ local function inject_list(id,current,reference,make,stack,pardir,txtdir)
local result, resolved = make(width,height,depth,reference)
-- todo: only when width is ok
if result and resolved then
- if trace_backend then
- report_area("box: %04i %s %s: w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",width,height,depth,resolved)
+ if trace_areas then
+ report_area("%s: %04i %s %s %s: w=%p, h=%p, d=%p, c=%S","box",
+ reference,pardir or "---",txtdir or "----","[]",width,height,depth,resolved)
end
if not first then
- current.list = result
+ setfield(current,"list",result)
elseif moveright then -- brr no prevs done
-- result after first
- local n = first.next
- result.next = n
- first.next = result
- result.prev = first
- if n then n.prev = result end
+ local n = getnext(first)
+ setfield(result,"next",n)
+ setfield(first,"next",result)
+ setfield(result,"prev",first)
+ if n then
+ setfield(n,"prev",result)
+ end
else
-- first after result
- result.next = first
- first.prev = result
- current.list = result
+ setfield(result,"next",first)
+ setfield(first,"prev",result)
+ setfield(current,"list",result)
end
end
end
@@ -209,45 +314,61 @@ local function inject_areas(head,attribute,make,stack,done,skip,parent,pardir,tx
pardir = pardir or "==="
txtdir = txtdir or "==="
while current do
- local id = current.id
+ local id = getid(current)
if id == hlist_code or id == vlist_code then
- local r = current[attribute]
- -- somehow reference is true so the following fails (second one not done) in
- -- test \goto{test}[page(2)] test \gotobox{test}[page(2)]
- -- so let's wait till this fails again
- -- if not reference and r and (not skip or r > skip) then -- > or ~=
- if r and (not skip or r > skip) then -- > or ~=
- inject_list(id,current,r,make,stack,pardir,txtdir)
- end
+
+ -- see dimensions: this is tricky with split off boxes like inserts
+ -- where we can end up with a first and last spanning lines
+
+ local r = getattr(current,attribute)
+ -- test \goto{test}[page(2)] test \gotobox{test}[page(2)]
+ -- test \goto{\TeX}[page(2)] test \gotobox{\hbox {x} \hbox {x}}[page(2)]
+ -- if r and (not skip or r >) skip then -- maybe no > test
+ -- inject_list(id,current,r,make,stack,pardir,txtdir)
+ -- end
if r then
+ if not reference then
+ reference, first, last, firstdir = r, current, current, txtdir
+ elseif r == reference then
+ -- same link
+ last = current
+ elseif (done[reference] or 0) == 0 then
+ if not skip or r > skip then -- maybe no > test
+ head, current = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
+ reference, first, last, firstdir = nil, nil, nil, nil
+ end
+ else
+ reference, first, last, firstdir = r, current, current, txtdir
+ end
done[r] = (done[r] or 0) + 1
end
- local list = current.list
+ local list = getlist(current)
if list then
- local _
- current.list, _, pardir, txtdir = inject_areas(list,attribute,make,stack,done,r or skip or 0,current,pardir,txtdir)
+ local h, ok
+ h, ok , pardir, txtdir = inject_areas(list,attribute,make,stack,done,r or skip or 0,current,pardir,txtdir)
+ setfield(current,"list",h)
end
if r then
done[r] = done[r] - 1
end
elseif id == whatsit_code then
- local subtype = current.subtype
+ local subtype = getsubtype(current)
if subtype == localpar_code then
- pardir = current.dir
+ pardir = getfield(current,"dir")
elseif subtype == dir_code then
- txtdir = current.dir
+ txtdir = getfield(current,"dir")
end
- elseif id == glue_code and current.subtype == leftskip_code then -- any glue at the left?
+ elseif id == glue_code and getsubtype(current) == leftskip_code then -- any glue at the left?
--
else
- local r = current[attribute]
+ local r = getattr(current,attribute)
if not r then
-- just go on, can be kerns
elseif not reference then
reference, first, last, firstdir = r, current, current, txtdir
elseif r == reference then
last = current
- elseif (done[reference] or 0) == 0 then -- or id == glue_code and current.subtype == right_skip_code
+ elseif (done[reference] or 0) == 0 then -- or id == glue_code and getsubtype(current) == right_skip_code
if not skip or r > skip then -- maybe no > test
head, current = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
reference, first, last, firstdir = nil, nil, nil, nil
@@ -256,7 +377,7 @@ local function inject_areas(head,attribute,make,stack,done,skip,parent,pardir,tx
reference, first, last, firstdir = r, current, current, txtdir
end
end
- current = current.next
+ current = getnext(current)
end
if reference and (done[reference] or 0) == 0 then
head = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
@@ -271,45 +392,39 @@ local function inject_area(head,attribute,make,stack,done,parent,pardir,txtdir)
txtdir = txtdir or "==="
local current = head
while current do
- local id = current.id
+ local id = getid(current)
if id == hlist_code or id == vlist_code then
- local r = current[attribute]
+ local r = getattr(current,attribute)
if r and not done[r] then
done[r] = true
inject_list(id,current,r,make,stack,pardir,txtdir)
end
- local list = current.list
+ local list = getlist(current)
if list then
- current.list = inject_area(list,attribute,make,stack,done,current,pardir,txtdir)
+ setfield(current,"list",(inject_area(list,attribute,make,stack,done,current,pardir,txtdir)))
end
elseif id == whatsit_code then
- local subtype = current.subtype
+ local subtype = getsubtype(current)
if subtype == localpar_code then
- pardir = current.dir
+ pardir = getfield(current,"dir")
elseif subtype == dir_code then
- txtdir = current.dir
+ txtdir = getfield(current,"dir")
end
else
- local r = current[attribute]
+ local r = getattr(current,attribute)
if r and not done[r] then
done[r] = true
head, current = inject_range(head,current,current,r,make,stack,parent,pardir,txtdir)
end
end
- current = current.next
+ current = getnext(current)
end
end
return head, true
end
--- tracing
+-- tracing: todo: use predefined colors
-local nodepool = nodes.pool
-
-local new_rule = nodepool.rule
-local new_kern = nodepool.kern
-
-local set_attribute = node.set_attribute
local register_color = colors.register
local a_color = attributes.private('color')
@@ -319,7 +434,32 @@ local u_transparency = nil
local u_colors = { }
local force_gray = true
-local function colorize(width,height,depth,n,reference,what)
+local function addstring(what,str,shift) --todo make a pluggable helper (in font-ctx)
+ if str then
+ local typesetters = nuts.typesetters
+ if typesetters then
+ local hashes = fonts.hashes
+ local infofont = fonts.infofont()
+ local emwidth = hashes.emwidths [infofont]
+ local exheight = hashes.exheights[infofont]
+ if what == "reference" then
+ str = str .. " "
+ shift = - (shift or 2.25) * exheight
+ else
+ str = str .. " "
+ shift = (shift or 2) * exheight
+ end
+ local text = typesetters.fast_hpack(str,infofont)
+ local rule = new_rule(emwidth/5,4*exheight,3*exheight)
+ setfield(text,"shift",shift)
+ return nuts.fasthpack(nuts.linked(text,rule))
+ -- local text = typesetters.fast_hpack(str,fonts.infofont())
+ -- return text
+ end
+ end
+end
+
+local function colorize(width,height,depth,n,reference,what,sr,offset)
if force_gray then n = 0 end
u_transparency = u_transparency or transparencies.register(nil,2,.65)
local ucolor = u_colors[n]
@@ -346,25 +486,49 @@ local function colorize(width,height,depth,n,reference,what)
height = 65536/2
depth = height
end
- local rule = new_rule(width,height,depth)
- rule[a_colormodel] = 1 -- gray color model
- rule[a_color] = u_color
- rule[a_transparency] = u_transparency
+ local rule = new_rule(width,height,depth) -- todo: use tracer rule
+ setattr(rule,a_colormodel,1) -- gray color model
+ setattr(rule,a_color,u_color)
+ setattr(rule,a_transparency,u_transparency)
if width < 0 then
local kern = new_kern(width)
- rule.width = -width
- kern.next = rule
- rule.prev = kern
+ setfield(rule,"width",-width)
+ setfield(kern,"next",rule)
+ setfield(rule,"prev",kern)
return kern
else
+
+if sr and sr ~= "" then
+ local text = addstring(what,sr,shift)
+ if text then
+ local kern = new_kern(-getfield(text,"width"))
+ setfield(kern,"next",text)
+ setfield(text,"prev",kern)
+ setfield(text,"next",rule)
+ setfield(rule,"prev",text)
+ return kern
+ end
+end
+
return rule
end
end
--- references:
+local function justadd(what,sr,shift)
+ if sr and sr ~= "" then
+ local text = addstring(what,sr,shift)
+ if text then
+ local kern = new_kern(-getfield(text,"width"))
+ setfield(kern,"next",text)
+ setfield(text,"prev",kern)
+ setfield(text,"next",rule)
+ setfield(rule,"prev",text)
+ return kern
+ end
+ end
+end
-local nodepool = nodes.pool
-local new_kern = nodepool.kern
+-- references:
local texsetattribute = tex.setattribute
local texsetcount = tex.setcount
@@ -397,35 +561,64 @@ function references.get(n) -- not public so functionality can change
return sn and sn[1]
end
-local function makereference(width,height,depth,reference)
+local function makereference(width,height,depth,reference) -- height and depth are of parent
local sr = stack[reference]
if sr then
if trace_references then
report_reference("resolving attribute %a",reference)
end
local resolved, ht, dp, set, n = sr[1], sr[2], sr[3], sr[4], sr[5]
+ -- logs.report("temp","child: ht=%p dp=%p, parent: ht=%p dp=%p",ht,dp,height,depth)
if ht then
if height < ht then height = ht end
if depth < dp then depth = dp end
end
+ -- logs.report("temp","used: ht=%p dp=%p",height,depth)
local annot = nodeinjections.reference(width,height,depth,set)
if annot then
+ annot = tonut(annot) -- todo
nofreferences = nofreferences + 1
- local result, current
+ local result, current, texts
+ if show_references then
+ local d = sr[1]
+ if d then
+ local r = d.reference
+ local p = d.prefix
+ if r then
+ if p then
+ texts = p .. "|" .. r
+ else
+ texts = r
+ end
+ else
+ -- t[#t+1] = d.internal or "?"
+ end
+ end
+ end
if trace_references then
local step = 65536
- result = hpack_list(colorize(width,height-step,depth-step,2,reference,"reference")) -- step subtracted so that we can see seperate links
- result.width = 0
+ result = hpack_list(colorize(width,height-step,depth-step,2,reference,"reference",texts,show_references)) -- step subtracted so that we can see seperate links
+ setfield(result,"width",0)
current = result
+ elseif texts then
+ texts = justadd("reference",texts,show_references)
+ if texts then
+ result = hpack_list(texts)
+ setfield(result,"width",0)
+ current = result
+ end
end
if current then
- current.next = annot
+ setfield(current,"next",annot)
+ setfield(annot,"prev",current)
else
result = annot
end
references.registerpage(n)
result = hpack_list(result,0)
- result.width, result.height, result.depth = 0, 0, 0
+ setfield(result,"width",0)
+ setfield(result,"height",0)
+ setfield(result,"depth",0)
if cleanupreferences then stack[reference] = nil end
return result, resolved
elseif trace_references then
@@ -436,9 +629,19 @@ local function makereference(width,height,depth,reference)
end
end
+-- function nodes.references.handler(head)
+-- if topofstack > 0 then
+-- return inject_areas(head,attribute,makereference,stack,done)
+-- else
+-- return head, false
+-- end
+-- end
+
function nodes.references.handler(head)
if topofstack > 0 then
- return inject_areas(head,attribute,makereference,stack,done)
+ head = tonut(head)
+ local head, done = inject_areas(head,attribute,makereference,stack,done)
+ return tonode(head), done
else
return head, false
end
@@ -470,49 +673,86 @@ local function makedestination(width,height,depth,reference)
if trace_destinations then
report_destination("resolving attribute %a",reference)
end
- local resolved, ht, dp, name, view = sr[1], sr[2], sr[3], sr[4], sr[5]
+ local resolved, ht, dp, name, view = sr[1], sr[2], sr[3], sr[4], sr[5] -- sr[4] will change to just internal
if ht then
if height < ht then height = ht end
if depth < dp then depth = dp end
end
- local result, current
+ local result, current, texts
+ if show_destinations then
+ if name and #name > 0 then
+ local t = { }
+ for i=1,#name do
+ local s = name[i]
+ if type(s) == "number" then
+ local d = references.internals[s]
+ if d then
+ d = d.references
+ local r = d.reference
+ local p = d.usedprefix
+ if r then
+ if p then
+ t[#t+1] = p .. "|" .. r
+ else
+ t[#t+1] = r
+ end
+ else
+ -- t[#t+1] = d.internal or "?"
+ end
+ end
+ else
+ -- in fact we have a prefix:name here
+ end
+ end
+ if #t > 0 then
+ texts = concat(t," & ")
+ end
+ end
+ end
if trace_destinations then
local step = 0
if width == 0 then
step = 4*65536
width, height, depth = 5*step, 5*step, 0
end
- for n=1,#name do
- local rule = hpack_list(colorize(width,height,depth,3,reference,"destination"))
- rule.width = 0
- if not result then
- result, current = rule, rule
- else
- current.next = rule
- rule.prev = current
- current = rule
+ local rule = hpack_list(colorize(width,height,depth,3,reference,"destination",texts,show_destinations))
+ setfield(rule,"width",0)
+ if not result then
+ result, current = rule, rule
+ else
+ setfield(current,"next",rule)
+ setfield(rule,"prev",current)
+ current = rule
+ end
+ width, height = width - step, height - step
+ elseif texts then
+ texts = justadd("destination",texts,show_destinations)
+ if texts then
+ result = hpack_list(texts)
+ if result then
+ setfield(result,"width",0)
+ current = result
end
- width, height = width - step, height - step
end
end
nofdestinations = nofdestinations + 1
- for n=1,#name do
- local annot = nodeinjections.destination(width,height,depth,name[n],view)
- if annot then
- -- probably duplicate
- if not result then
- result = annot
- else
- current.next = annot
- annot.prev = current
- end
- current = find_node_tail(annot)
+ local annot = nodeinjections.destination(width,height,depth,name,view)
+ if annot then
+ annot = tonut(annot) -- obsolete soon
+ if result then
+ setfield(current,"next",annot)
+ setfield(annot,"prev",current)
+ else
+ result = annot
end
+ current = find_node_tail(annot)
end
if result then
-- some internal error
result = hpack_list(result,0)
- result.width, result.height, result.depth = 0, 0, 0
+ setfield(result,"width",0)
+ setfield(result,"height",0)
+ setfield(result,"depth",0)
end
if cleanupdestinations then stack[reference] = nil end
return result, resolved
@@ -521,28 +761,42 @@ local function makedestination(width,height,depth,reference)
end
end
+-- function nodes.destinations.handler(head)
+-- if topofstack > 0 then
+-- return inject_area(head,attribute,makedestination,stack,done) -- singular
+-- else
+-- return head, false
+-- end
+-- end
+
function nodes.destinations.handler(head)
if topofstack > 0 then
- return inject_area(head,attribute,makedestination,stack,done) -- singular
+ head = tonut(head)
+ local head, done = inject_areas(head,attribute,makedestination,stack,done)
+ return tonode(head), done
else
return head, false
end
end
+
-- will move
function references.mark(reference,h,d,view)
return setdestination(tex.currentgrouplevel,h,d,reference,view)
end
-function references.inject(prefix,reference,h,d,highlight,newwindow,layer) -- todo: use currentreference is possible
+function references.inject(prefix,reference,specification) -- todo: use currentreference is possible
+-- print(prefix,reference,h,d,highlight,newwindow,layer)
local set, bug = references.identify(prefix,reference)
if bug or #set == 0 then
-- unknown ref, just don't set it and issue an error
else
-- check
- set.highlight, set.newwindow, set.layer = highlight, newwindow, layer
- setreference(h,d,set) -- sets attribute / todo: for set[*].error
+ set.highlight = specification.highlight
+ set.newwindow = specification.newwindow
+ set.layer = specification.layer
+ setreference(specification.height,specification.depth,set) -- sets attribute / todo: for set[*].error
end
end
@@ -553,8 +807,32 @@ function references.injectcurrentset(h,d) -- used inside doifelse
end
end
-commands.injectreference = references.inject
-commands.injectcurrentreference = references.injectcurrentset
+implement {
+ name = "injectreference",
+ actions = references.inject,
+ arguments = {
+ "string",
+ "string",
+ {
+ { "highlight", "boolean" },
+ { "newwindow", "boolean" },
+ { "layer" },
+ { "height", "dimen" },
+ { "depth", "dimen" },
+ }
+ }
+}
+
+implement {
+ name = "injectcurrentreference",
+ actions = references.injectcurrentset,
+}
+
+implement {
+ name = "injectcurrentreferencehtdp",
+ actions = references.injectcurrentset,
+ arguments = { "dimen", "dimen" },
+}
--
@@ -583,4 +861,11 @@ end)
function references.enableinteraction()
tasks.enableaction("shipouts","nodes.references.handler")
tasks.enableaction("shipouts","nodes.destinations.handler")
+ function references.enableinteraction() end
end
+
+implement {
+ name = "enableinteraction",
+ actions = references.enableinteraction,
+ onlyonce = true
+}
diff --git a/tex/context/base/node-res.lua b/tex/context/base/node-res.lua
index ca9d67f91..43dd3895e 100644
--- a/tex/context/base/node-res.lua
+++ b/tex/context/base/node-res.lua
@@ -18,13 +18,8 @@ local report_nodes = logs.reporter("nodes","housekeeping")
local nodes, node = nodes, node
-local copy_node = node.copy
-local free_node = node.free
-local free_list = node.flush_list
-local new_node = node.new
-
nodes.pool = nodes.pool or { }
-local pool = nodes.pool
+local nodepool = nodes.pool
local whatsitcodes = nodes.whatsitcodes
local skipcodes = nodes.skipcodes
@@ -35,400 +30,549 @@ local glyph_code = nodecodes.glyph
local allocate = utilities.storage.allocate
-local texgetbox = tex.getbox
local texgetcount = tex.getcount
local reserved, nofreserved = { }, 0
-local function register_node(n)
- nofreserved = nofreserved + 1
- reserved[nofreserved] = n
- return n
-end
+-- user nodes
-pool.register = register_node
+local userids = allocate()
+local lastid = 0
-function pool.cleanup(nofboxes) -- todo
- if nodes.tracers.steppers then -- to be resolved
- nodes.tracers.steppers.reset() -- todo: make a registration subsystem
- end
- local nl, nr = 0, nofreserved
- for i=1,nofreserved do
- local ri = reserved[i]
- -- if not (ri.id == glue_spec and not ri.is_writable) then
- free_node(reserved[i])
- -- end
+setmetatable(userids, {
+ __index = function(t,k)
+ if type(k) == "string" then
+ lastid = lastid + 1
+ rawset(userids,lastid,k)
+ rawset(userids,k,lastid)
+ return lastid
+ else
+ rawset(userids,k,k)
+ return k
+ end
+ end,
+ __call = function(t,k)
+ return t[k]
end
- if nofboxes then
- for i=0,nofboxes do
- local l = texgetbox(i)
- if l then
- free_node(l) -- also list ?
- nl = nl + 1
- end
+} )
+
+-- nuts overload
+
+local nuts = nodes.nuts
+local nutpool = { }
+nuts.pool = nutpool
+
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getbox = nuts.getbox
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getid = nuts.getid
+local getlist = nuts.getlist
+
+local copy_nut = nuts.copy
+local new_nut = nuts.new
+local free_nut = nuts.free
+
+local copy_node = nodes.copy
+local new_node = nodes.new
+
+-- at some point we could have a dual set (the overhead of tonut is not much larger than
+-- metatable associations at the lua/c end esp if we also take assignments into account
+
+-- table.setmetatableindex(nodepool,function(t,k,v)
+-- -- report_nodes("defining nodepool[%s] instance",k)
+-- local f = nutpool[k]
+-- local v = function(...)
+-- return tonode(f(...))
+-- end
+-- t[k] = v
+-- return v
+-- end)
+--
+-- -- we delay one step because that permits us a forward reference
+-- -- e.g. in pdfsetmatrix
+
+table.setmetatableindex(nodepool,function(t,k,v)
+ -- report_nodes("defining nodepool[%s] instance",k)
+ local v = function(...)
+ local f = nutpool[k]
+ local v = function(...)
+ return tonode(f(...))
end
+ t[k] = v
+ return v(...)
end
- reserved = { }
- nofreserved = 0
- return nr, nl, nofboxes -- can be nil
+ t[k] = v
+ return v
+end)
+
+local function register_nut(n)
+ nofreserved = nofreserved + 1
+ reserved[nofreserved] = n
+ return n
end
-function pool.usage()
- local t = { }
- for n, tag in gmatch(status.node_mem_usage,"(%d+) ([a-z_]+)") do
- t[tag] = n
+local function register_node(n)
+ nofreserved = nofreserved + 1
+ if type(n) == "number" then -- isnut(n)
+ reserved[nofreserved] = n
+ else
+ reserved[nofreserved] = tonut(n)
end
- return t
+ return n
end
-local disc = register_node(new_node("disc"))
-local kern = register_node(new_node("kern",kerncodes.userkern))
-local fontkern = register_node(new_node("kern",kerncodes.fontkern))
-local penalty = register_node(new_node("penalty"))
-local glue = register_node(new_node("glue")) -- glue.spec = nil
-local glue_spec = register_node(new_node("glue_spec"))
-local glyph = register_node(new_node("glyph",0))
-local textdir = register_node(new_node("whatsit",whatsitcodes.dir))
-local latelua = register_node(new_node("whatsit",whatsitcodes.latelua))
-local special = register_node(new_node("whatsit",whatsitcodes.special))
-local user_n = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_n.type = 100 -- 44
-local user_l = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_l.type = 110 -- 44
-local user_s = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_s.type = 115 -- 44
-local user_t = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_t.type = 116 -- 44
-local left_margin_kern = register_node(new_node("margin_kern",0))
-local right_margin_kern = register_node(new_node("margin_kern",1))
-local lineskip = register_node(new_node("glue",skipcodes.lineskip))
-local baselineskip = register_node(new_node("glue",skipcodes.baselineskip))
-local leftskip = register_node(new_node("glue",skipcodes.leftskip))
-local rightskip = register_node(new_node("glue",skipcodes.rightskip))
-local temp = register_node(new_node("temp",0))
-local noad = register_node(new_node("noad"))
+nodepool.userids = userids
+nodepool.register = register_node
+
+nutpool.userids = userids
+nutpool.register = register_node -- could be register_nut
+
+-- so far
+
+local disc = register_nut(new_nut("disc"))
+local kern = register_nut(new_nut("kern",kerncodes.userkern))
+local fontkern = register_nut(new_nut("kern",kerncodes.fontkern))
+local penalty = register_nut(new_nut("penalty"))
+local glue = register_nut(new_nut("glue")) -- glue.spec = nil
+local glue_spec = register_nut(new_nut("glue_spec"))
+local glyph = register_nut(new_nut("glyph",0))
+local textdir = register_nut(new_nut("whatsit",whatsitcodes.dir))
+local latelua = register_nut(new_nut("whatsit",whatsitcodes.latelua))
+local special = register_nut(new_nut("whatsit",whatsitcodes.special))
+local user_n = register_nut(new_nut("whatsit",whatsitcodes.userdefined)) setfield(user_n,"type",100) -- 44
+local user_l = register_nut(new_nut("whatsit",whatsitcodes.userdefined)) setfield(user_l,"type",110) -- 44
+local user_s = register_nut(new_nut("whatsit",whatsitcodes.userdefined)) setfield(user_s,"type",115) -- 44
+local user_t = register_nut(new_nut("whatsit",whatsitcodes.userdefined)) setfield(user_t,"type",116) -- 44
+----- user_c = register_nut(new_nut("whatsit",whatsitcodes.userdefined)) setfield(user_c,"type",108) -- 44
+local left_margin_kern = register_nut(new_nut("margin_kern",0))
+local right_margin_kern = register_nut(new_nut("margin_kern",1))
+local lineskip = register_nut(new_nut("glue",skipcodes.lineskip))
+local baselineskip = register_nut(new_nut("glue",skipcodes.baselineskip))
+local leftskip = register_nut(new_nut("glue",skipcodes.leftskip))
+local rightskip = register_nut(new_nut("glue",skipcodes.rightskip))
+local temp = register_nut(new_nut("temp",0))
+local noad = register_nut(new_nut("noad"))
-- the dir field needs to be set otherwise crash:
-local rule = register_node(new_node("rule")) rule .dir = "TLT"
-local hlist = register_node(new_node("hlist")) hlist.dir = "TLT"
-local vlist = register_node(new_node("vlist")) vlist.dir = "TLT"
-
-function pool.zeroglue(n)
- local s = n.spec
- return not writable or (
- s.width == 0
- and s.stretch == 0
- and s.shrink == 0
- and s.stretch_order == 0
- and s.shrink_order == 0
- )
-end
-
-function pool.glyph(fnt,chr)
- local n = copy_node(glyph)
- if fnt then n.font = fnt end
- if chr then n.char = chr end
+local rule = register_nut(new_nut("rule")) setfield(rule, "dir","TLT")
+local hlist = register_nut(new_nut("hlist")) setfield(hlist,"dir","TLT")
+local vlist = register_nut(new_nut("vlist")) setfield(vlist,"dir","TLT")
+
+function nutpool.zeroglue(n)
+ local s = getfield(n,"spec")
+ return
+ getfield(s,"width") == 0 and
+ getfield(s,"stretch") == 0 and
+ getfield(s,"shrink") == 0 and
+ getfield(s,"stretch_order") == 0 and
+ getfield(s,"shrink_order") == 0
+end
+
+function nutpool.glyph(fnt,chr)
+ local n = copy_nut(glyph)
+ if fnt then setfield(n,"font",fnt) end
+ if chr then setfield(n,"char",chr) end
return n
end
-function pool.penalty(p)
- local n = copy_node(penalty)
- n.penalty = p
+function nutpool.penalty(p)
+ local n = copy_nut(penalty)
+ setfield(n,"penalty",p)
return n
end
-function pool.kern(k)
- local n = copy_node(kern)
- n.kern = k
+function nutpool.kern(k)
+ local n = copy_nut(kern)
+ setfield(n,"kern",k)
return n
end
-function pool.fontkern(k)
- local n = copy_node(fontkern)
- n.kern = k
+function nutpool.fontkern(k)
+ local n = copy_nut(fontkern)
+ setfield(n,"kern",k)
return n
end
-function pool.gluespec(width,stretch,shrink,stretch_order,shrink_order)
- local s = copy_node(glue_spec)
- if width then s.width = width end
- if stretch then s.stretch = stretch end
- if shrink then s.shrink = shrink end
- if stretch_order then s.stretch_order = stretch_order end
- if shrink_order then s.shrink_order = shrink_order end
+function nutpool.gluespec(width,stretch,shrink,stretch_order,shrink_order)
+ local s = copy_nut(glue_spec)
+ if width then setfield(s,"width",width) end
+ if stretch then setfield(s,"stretch",stretch) end
+ if shrink then setfield(s,"shrink",shrink) end
+ if stretch_order then setfield(s,"stretch_order",stretch_order) end
+ if shrink_order then setfield(s,"shrink_order",shrink_order) end
return s
end
local function someskip(skip,width,stretch,shrink,stretch_order,shrink_order)
- local n = copy_node(skip)
+ local n = copy_nut(skip)
if not width then
-- no spec
elseif width == false or tonumber(width) then
- local s = copy_node(glue_spec)
- if width then s.width = width end
- if stretch then s.stretch = stretch end
- if shrink then s.shrink = shrink end
- if stretch_order then s.stretch_order = stretch_order end
- if shrink_order then s.shrink_order = shrink_order end
- n.spec = s
+ local s = copy_nut(glue_spec)
+ if width then setfield(s,"width",width) end
+ if stretch then setfield(s,"stretch",stretch) end
+ if shrink then setfield(s,"shrink",shrink) end
+ if stretch_order then setfield(s,"stretch_order",stretch_order) end
+ if shrink_order then setfield(s,"shrink_order",shrink_order) end
+ setfield(n,"spec",s)
else
-- shared
- n.spec = copy_node(width)
+ setfield(n,"spec",copy_nut(width))
end
return n
end
-function pool.stretch(a,b)
- local n = copy_node(glue)
- local s = copy_node(glue_spec)
+function nutpool.stretch(a,b)
+ local n = copy_nut(glue)
+ local s = copy_nut(glue_spec)
if b then
- s.stretch = a
- s.stretch_order = b
+ setfield(s,"stretch",a)
+ setfield(s,"stretch_order",b)
else
- s.stretch = 1
- s.stretch_order = a or 1
+ setfield(s,"stretch",1)
+ setfield(s,"stretch_order",a or 1)
end
- n.spec = s
+ setfield(n,"spec",s)
return n
end
-function pool.shrink(a,b)
- local n = copy_node(glue)
- local s = copy_node(glue_spec)
+function nutpool.shrink(a,b)
+ local n = copy_nut(glue)
+ local s = copy_nut(glue_spec)
if b then
- s.shrink = a
- s.shrink_order = b
+ setfield(s,"shrink",a)
+ setfield(s,"shrink_order",b)
else
- s.shrink = 1
- s.shrink_order = a or 1
+ setfield(s,"shrink",1)
+ setfield(s,"shrink_order",a or 1)
end
- n.spec = s
+ setfield(n,"spec",s)
return n
end
-
-function pool.glue(width,stretch,shrink,stretch_order,shrink_order)
+function nutpool.glue(width,stretch,shrink,stretch_order,shrink_order)
return someskip(glue,width,stretch,shrink,stretch_order,shrink_order)
end
-function pool.leftskip(width,stretch,shrink,stretch_order,shrink_order)
+function nutpool.negatedglue(glue)
+ local n = copy_nut(glue)
+ local s = copy_nut(getfield(n,"spec"))
+ local width = getfield(s,"width")
+ local stretch = getfield(s,"stretch")
+ local shrink = getfield(s,"shrink")
+ if width then setfield(s,"width", -width) end
+ if stretch then setfield(s,"stretch",-stretch) end
+ if shrink then setfield(s,"shrink", -shrink) end
+ setfield(n,"spec",s)
+ return n
+end
+
+function nutpool.leftskip(width,stretch,shrink,stretch_order,shrink_order)
return someskip(leftskip,width,stretch,shrink,stretch_order,shrink_order)
end
-function pool.rightskip(width,stretch,shrink,stretch_order,shrink_order)
+function nutpool.rightskip(width,stretch,shrink,stretch_order,shrink_order)
return someskip(rightskip,width,stretch,shrink,stretch_order,shrink_order)
end
-function pool.lineskip(width,stretch,shrink,stretch_order,shrink_order)
+function nutpool.lineskip(width,stretch,shrink,stretch_order,shrink_order)
return someskip(lineskip,width,stretch,shrink,stretch_order,shrink_order)
end
-function pool.baselineskip(width,stretch,shrink)
+function nutpool.baselineskip(width,stretch,shrink)
return someskip(baselineskip,width,stretch,shrink)
end
-function pool.disc()
- return copy_node(disc)
+function nutpool.disc()
+ return copy_nut(disc)
end
-function pool.textdir(dir)
- local t = copy_node(textdir)
- t.dir = dir
+function nutpool.textdir(dir)
+ local t = copy_nut(textdir)
+ setfield(t,"dir",dir)
return t
end
-function pool.rule(width,height,depth,dir) -- w/h/d == nil will let them adapt
- local n = copy_node(rule)
- if width then n.width = width end
- if height then n.height = height end
- if depth then n.depth = depth end
- if dir then n.dir = dir end
+function nutpool.rule(width,height,depth,dir) -- w/h/d == nil will let them adapt
+ local n = copy_nut(rule)
+ if width then setfield(n,"width",width) end
+ if height then setfield(n,"height",height) end
+ if depth then setfield(n,"depth",depth) end
+ if dir then setfield(n,"dir",dir) end
+ return n
+end
+
+function nutpool.latelua(code)
+ local n = copy_nut(latelua)
+ setfield(n,"string",code)
return n
end
-if node.has_field(latelua,'string') then
- function pool.latelua(code)
- local n = copy_node(latelua)
- n.string = code
+if context and _cldo_ then
+
+ -- a typical case where we have more nodes than nuts
+
+ local context = context
+
+ local f_cldo = string.formatters["_cldo_(%i)"]
+ local register = context.registerfunction
+
+ local latelua_node = register_node(new_node("whatsit",whatsitcodes.latelua))
+ local latelua_nut = register_nut (new_nut ("whatsit",whatsitcodes.latelua))
+
+ local setfield_node = nodes.setfield
+ local setfield_nut = nuts .setfield
+
+ function nodepool.lateluafunction(f)
+ local n = copy_node(latelua_node)
+ setfield_node(n,"string",f_cldo(register(f)))
return n
end
-else
- function pool.latelua(code)
- local n = copy_node(latelua)
- n.data = code
+ function nutpool.lateluafunction(f)
+ local n = copy_nut(latelua_nut)
+ setfield_nut(n,"string",f_cldo(register(f)))
return n
end
+
+ -- when function in latelua:
+
+ -- function nodepool.lateluafunction(f)
+ -- local n = copy_node(latelua_node)
+ -- setfield_node(n,"string",f)
+ -- return n
+ -- end
+ -- function nutpool.lateluafunction(f)
+ -- local n = copy_nut(latelua_nut)
+ -- setfield_nut(n,"string",f)
+ -- return n
+ -- end
+
+ local latefunction = nodepool.lateluafunction
+ local flushnode = context.flushnode
+
+ function context.lateluafunction(f)
+ flushnode(latefunction(f)) -- hm, quite some indirect calls
+ end
+
+ -- when function in latelua:
+
+ -- function context.lateluafunction(f)
+ -- local n = copy_node(latelua_node)
+ -- setfield_node(n,"string",f)
+ -- flushnode(n)
+ -- end
+
+ -- local contextsprint = context.sprint
+ -- local ctxcatcodes = tex.ctxcatcodes
+ -- local storenode = context.storenode
+
+ -- when 0.79 is out:
+
+ -- function context.lateluafunction(f)
+ -- contextsprint(ctxcatcodes,"\\cldl",storenode(latefunction(f))," ")
+ -- end
+
+ -- when function in latelua:
+
+ -- function context.lateluafunction(f)
+ -- local n = copy_node(latelua_node)
+ -- setfield_node(n,"string",f)
+ -- contextsprint(ctxcatcodes,"\\cldl",storenode(n)," ")
+ -- end
+
end
-function pool.leftmarginkern(glyph,width)
- local n = copy_node(left_margin_kern)
+function nutpool.leftmarginkern(glyph,width)
+ local n = copy_nut(left_margin_kern)
if not glyph then
report_nodes("invalid pointer to left margin glyph node")
- elseif glyph.id ~= glyph_code then
+ elseif getid(glyph) ~= glyph_code then
report_nodes("invalid node type %a for %s margin glyph node",nodecodes[glyph],"left")
else
- n.glyph = glyph
+ setfield(n,"glyph",glyph)
end
if width then
- n.width = width
+ setfield(n,"width",width)
end
return n
end
-function pool.rightmarginkern(glyph,width)
- local n = copy_node(right_margin_kern)
+function nutpool.rightmarginkern(glyph,width)
+ local n = copy_nut(right_margin_kern)
if not glyph then
report_nodes("invalid pointer to right margin glyph node")
- elseif glyph.id ~= glyph_code then
+ elseif getid(glyph) ~= glyph_code then
report_nodes("invalid node type %a for %s margin glyph node",nodecodes[p],"right")
else
- n.glyph = glyph
+ setfield(n,"glyph",glyph)
end
if width then
- n.width = width
+ setfield(n,"width",width)
end
return n
end
-function pool.temp()
- return copy_node(temp)
+function nutpool.temp()
+ return copy_nut(temp)
end
-function pool.noad()
- return copy_node(noad)
+function nutpool.noad()
+ return copy_nut(noad)
end
-function pool.hlist(list,width,height,depth)
- local n = copy_node(hlist)
+function nutpool.hlist(list,width,height,depth)
+ local n = copy_nut(hlist)
if list then
- n.list = list
+ setfield(n,"list",list)
end
if width then
- n.width = width
+ setfield(n,"width",width)
end
if height then
- n.height = height
+ setfield(n,"height",height)
end
if depth then
- n.depth = depth
+ setfield(n,"depth",depth)
end
return n
end
-function pool.vlist(list,width,height,depth)
- local n = copy_node(vlist)
+function nutpool.vlist(list,width,height,depth)
+ local n = copy_nut(vlist)
if list then
- n.list = list
+ setfield(n,"list",list)
end
if width then
- n.width = width
+ setfield(n,"width",width)
end
if height then
- n.height = height
+ setfield(n,"height",height)
end
if depth then
- n.depth = depth
+ setfield(n,"depth",depth)
end
return n
end
---[[
-<p>At some point we ran into a problem that the glue specification
-of the zeropoint dimension was overwritten when adapting a glue spec
-node. This is a side effect of glue specs being shared. After a
-couple of hours tracing and debugging Taco and I came to the
-conclusion that it made no sense to complicate the spec allocator
-and settled on a writable flag. This all is a side effect of the
-fact that some glues use reserved memory slots (with the zeropoint
-glue being a noticeable one). So, next we wrap this into a function
-and hide it for the user. And yes, LuaTeX now gives a warning as
-well.</p>
-]]--
-
-function nodes.writable_spec(n) -- not pool
- local spec = n.spec
- if not spec then
- spec = copy_node(glue_spec)
- n.spec = spec
- elseif not spec.writable then
- spec = copy_node(spec)
- n.spec = spec
- end
- return spec
-end
-
-- local num = userids["my id"]
-- local str = userids[num]
-local userids = allocate() pool.userids = userids
-local lastid = 0
-
-setmetatable(userids, {
- __index = function(t,k)
- if type(k) == "string" then
- lastid = lastid + 1
- rawset(userids,lastid,k)
- rawset(userids,k,lastid)
- return lastid
- else
- rawset(userids,k,k)
- return k
- end
- end,
- __call = function(t,k)
- return t[k]
- end
-} )
-
-function pool.usernumber(id,num)
- local n = copy_node(user_n)
+function nutpool.usernumber(id,num)
+ local n = copy_nut(user_n)
if num then
- n.user_id, n.value = id, num
+ setfield(n,"user_id",id)
+ setfield(n,"value",num)
elseif id then
- n.value = id
+ setfield(n,"value",id)
end
return n
end
-function pool.userlist(id,list)
- local n = copy_node(user_l)
+function nutpool.userlist(id,list)
+ local n = copy_nut(user_l)
if list then
- n.user_id, n.value = id, list
+ setfield(n,"user_id",id)
+ setfield(n,"value",list)
else
- n.value = id
+ setfield(n,"value",id)
end
return n
end
-function pool.userstring(id,str)
- local n = copy_node(user_s)
+function nutpool.userstring(id,str)
+ local n = copy_nut(user_s)
if str then
- n.user_id, n.value = id, str
+ setfield(n,"user_id",id)
+ setfield(n,"value",str)
else
- n.value = id
+ setfield(n,"value",id)
end
return n
end
-function pool.usertokens(id,tokens)
- local n = copy_node(user_t)
+function nutpool.usertokens(id,tokens)
+ local n = copy_nut(user_t)
if tokens then
- n.user_id, n.value = id, tokens
+ setfield(n,"user_id",id)
+ setfield(n,"value",tokens)
else
- n.value = id
+ setfield(n,"value",id)
end
return n
end
-function pool.special(str)
- local n = copy_node(special)
- n.data = str
+-- function nutpool.usercode(id,code)
+-- local n = copy_nut(user_c)
+-- if code then
+-- setfield(n,"user_id",id)
+-- setfield(n,"value",code)
+-- else
+-- setfield(n,"value",id)
+-- end
+-- return n
+-- end
+
+function nutpool.special(str)
+ local n = copy_nut(special)
+ setfield(n,"data",str)
return n
end
+-- housekeeping
+
+local function cleanup(nofboxes) -- todo
+ if nodes.tracers.steppers then -- to be resolved
+ nodes.tracers.steppers.reset() -- todo: make a registration subsystem
+ end
+ local nl, nr = 0, nofreserved
+ for i=1,nofreserved do
+ local ri = reserved[i]
+ -- if not (getid(ri) == glue_spec and not getfield(ri,"is_writable")) then
+ free_nut(reserved[i])
+ -- end
+ end
+ if nofboxes then
+ for i=0,nofboxes do
+ local l = getbox(i)
+ if l then
+-- print(nodes.listtoutf(getlist(l)))
+ free_nut(l) -- also list ?
+ nl = nl + 1
+ end
+ end
+ end
+ reserved = { }
+ nofreserved = 0
+ return nr, nl, nofboxes -- can be nil
+end
+
+
+local function usage()
+ local t = { }
+ for n, tag in gmatch(status.node_mem_usage,"(%d+) ([a-z_]+)") do
+ t[tag] = n
+ end
+ return t
+end
+
+nutpool .cleanup = cleanup
+nodepool.cleanup = cleanup
+
+nutpool .usage = usage
+nodepool.usage = usage
+
+-- end
+
statistics.register("cleaned up reserved nodes", function()
- return format("%s nodes, %s lists of %s", pool.cleanup(texgetcount("c_syst_last_allocated_box")))
+ return format("%s nodes, %s lists of %s", cleanup(texgetcount("c_syst_last_allocated_box")))
end) -- \topofboxstack
statistics.register("node memory usage", function() -- comes after cleanup !
return status.node_mem_usage
end)
-lua.registerfinalizer(pool.cleanup, "cleanup reserved nodes")
+lua.registerfinalizer(cleanup, "cleanup reserved nodes")
diff --git a/tex/context/base/node-rul.lua b/tex/context/base/node-rul.lua
index 96d6bdf41..36d56a16c 100644
--- a/tex/context/base/node-rul.lua
+++ b/tex/context/base/node-rul.lua
@@ -13,12 +13,28 @@ if not modules then modules = { } end modules ['node-rul'] = {
local attributes, nodes, node = attributes, nodes, node
-local nodecodes = nodes.nodecodes
-local tasks = nodes.tasks
-
-local glyph_code = nodecodes.glyph
-local disc_code = nodecodes.disc
-local rule_code = nodecodes.rule
+local nuts = nodes.nuts
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+local getlist = nuts.getlist
+
+local nodecodes = nodes.nodecodes
+local tasks = nodes.tasks
+
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local rule_code = nodecodes.rule
function nodes.striprange(first,last) -- todo: dir
if first and last then -- just to be sure
@@ -26,11 +42,11 @@ function nodes.striprange(first,last) -- todo: dir
return first, last
end
while first and first ~= last do
- local id = first.id
+ local id = getid(first)
if id == glyph_code or id == disc_code then -- or id == rule_code
break
else
- first = first.next
+ first = getnext(first)
end
end
if not first then
@@ -39,13 +55,13 @@ function nodes.striprange(first,last) -- todo: dir
return first, last
end
while last and last ~= first do
- local id = last.id
+ local id = getid(last)
if id == glyph_code or id == disc_code then -- or id == rule_code
break
else
- local prev = last.prev -- luatex < 0.70 has italic correction kern not prev'd
+ local prev = getprev(last) -- luatex < 0.70 has italic correction kern not prev'd
if prev then
- last = last.prev
+ last = prev
else
break
end
@@ -73,12 +89,12 @@ local a_color = attributes.private('color')
local a_transparency = attributes.private('transparency')
local a_colorspace = attributes.private('colormodel')
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local striprange = nodes.striprange
-local list_dimensions = node.dimensions
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local list_dimensions = nuts.dimensions
+local hpack_nodes = nuts.hpack
-local hpack_nodes = node.hpack
+local striprange = nodes.striprange
local fontdata = fonts.hashes.identifiers
local variables = interfaces.variables
@@ -111,7 +127,7 @@ local dir_code = whatcodes.dir
local kerning_code = kerncodes.kern
-local nodepool = nodes.pool
+local nodepool = nuts.pool
local new_rule = nodepool.rule
local new_kern = nodepool.kern
@@ -141,9 +157,9 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
local f, l, a, d, i, class
local continue, done, strip, level = false, false, true, -1
while n do
- local id = n.id
+ local id = getid(n)
if id == glyph_code or id == rule_code then
- local aa = n[attribute]
+ local aa = getattr(n,attribute)
if aa then
if aa == a then
if not f then -- ?
@@ -172,13 +188,13 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
end
f, l, a = nil, nil, nil
end
--- elseif f and (id == disc_code or (id == kern_code and n.subtype == kerning_code)) then
+-- elseif f and (id == disc_code or (id == kern_code and getsubtype(n) == kerning_code)) then
-- l = n
elseif id == disc_code then
if f then
l = n
end
- elseif id == kern_code and n.subtype == kerning_code then
+ elseif id == kern_code and getsubtype(n) == kerning_code then
if f then
l = n
end
@@ -187,11 +203,11 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
head, done = flush(head,f,l,d,level,parent,strip), true
f, l, a = nil, nil, nil
end
- local list = n.list
+ local list = getlist(n)
if list then
- n.list = processwords(attribute,data,flush,list,n)
+ setfield(n,"list",(processwords(attribute,data,flush,list,n))) -- watch ()
end
- elseif checkdir and id == whatsit_code and n.subtype == dir_code then -- only changes in dir, we assume proper boundaries
+ elseif checkdir and id == whatsit_code and getsubtype(n) == dir_code then -- only changes in dir, we assume proper boundaries
if f and a then
l = n
end
@@ -203,8 +219,8 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
-- l = n
elseif id == glue_code then
-- catch \underbar{a} \underbar{a} (subtype test is needed)
- local subtype = n.subtype
- if n[attribute] and (subtype == userskip_code or subtype == spaceskip_code or subtype == xspaceskip_code) then
+ local subtype = getsubtype(n)
+ if getattr(n,attribute) and (subtype == userskip_code or subtype == spaceskip_code or subtype == xspaceskip_code) then
l = n
else
head, done = flush(head,f,l,d,level,parent,strip), true
@@ -216,7 +232,7 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
f, l, a = nil, nil, nil
end
end
- n = n.next
+ n = getnext(n)
end
if f then
head, done = flush(head,f,l,d,level,parent,strip), true
@@ -227,7 +243,16 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
end
end
-nodes.processwords = processwords
+-- nodes.processwords = processwords
+
+nodes.processwords = function(attribute,data,flush,head,parent) -- we have hlistdir and local dir
+ head = tonut(head)
+ if parent then
+ parent = tonut(parent)
+ end
+ local head, done = processwords(attribute,data,flush,head,parent)
+ return tonode(head), done
+end
--
@@ -246,7 +271,7 @@ end
local a_viewerlayer = attributes.private("viewerlayer")
local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but acceptable for this purpose
- if f.id ~= glyph_code then
+ if getid(f) ~= glyph_code then
-- saveguard ... we need to deal with rules and so (math)
return head
end
@@ -264,16 +289,16 @@ local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but a
if not f then
return head
end
- local w = list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,f,l.next)
+ local w = list_dimensions(getfield(parent,"glue_set"),getfield(parent,"glue_sign"),getfield(parent,"glue_order"),f,getnext(l))
local method, offset, continue, dy, order, max = d.method, d.offset, d.continue, d.dy, d.order, d.max
local rulethickness, unit = d.rulethickness, d.unit
local ma, ca, ta = d.ma, d.ca, d.ta
- local colorspace = ma > 0 and ma or f[a_colorspace] or 1
- local color = ca > 0 and ca or f[a_color]
- local transparency = ta > 0 and ta or f[a_transparency]
+ local colorspace = ma > 0 and ma or getattr(f,a_colorspace) or 1
+ local color = ca > 0 and ca or getattr(f,a_color)
+ local transparency = ta > 0 and ta or getattr(f,a_transparency)
local foreground = order == v_foreground
- local e = dimenfactor(unit,f.font) -- what if no glyph node
+ local e = dimenfactor(unit,getfont(f)) -- what if no glyph node
local rt = tonumber(rulethickness)
if rt then
@@ -281,7 +306,7 @@ local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but a
else
local n, u = splitdimen(rulethickness)
if n and u then -- we need to intercept ex and em and % and ...
- rulethickness = n * dimenfactor(u,fontdata[f.font]) / 2
+ rulethickness = n * dimenfactor(u,fontdata[getfont(f)]) / 2
else
rulethickness = 1/5
end
@@ -300,18 +325,18 @@ local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but a
local ht = (offset+(i-1)*dy)*e + rulethickness - m
local dp = -(offset+(i-1)*dy)*e + rulethickness + m
local r = new_rule(w,ht,dp)
- local v = f[a_viewerlayer]
+ local v = getattr(f,a_viewerlayer)
-- quick hack
if v then
- r[a_viewerlayer] = v
+ setattr(r,a_viewerlayer,v)
end
--
if color then
- r[a_colorspace] = colorspace
- r[a_color] = color
+ setattr(r,a_colorspace,colorspace)
+ setattr(r,a_color,color)
end
if transparency then
- r[a_transparency] = transparency
+ setattr(r,a_transparency,transparency)
end
local k = new_kern(-w)
if foreground then
@@ -365,21 +390,27 @@ local function flush_shifted(head,first,last,data,level,parent,strip) -- not tha
if true then
first, last = striprange(first,last)
end
- local prev, next = first.prev, last.next
- first.prev, last.next = nil, nil
- local width, height, depth = list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,first,next)
+ local prev = getprev(first)
+ local next = getnext(last)
+ setfield(first,"prev",nil)
+ setfield(last,"next",nil)
+ local width, height, depth = list_dimensions(getfield(parent,"glue_set"),getfield(parent,"glue_sign"),getfield(parent,"glue_order"),first,next)
local list = hpack_nodes(first,width,"exactly")
if first == head then
head = list
end
if prev then
- prev.next, list.prev = list, prev
+ setfield(prev,"next",list)
+ setfield(list,"prev",prev)
end
if next then
- next.prev, list.next = list, next
+ setfield(next,"prev",list)
+ setfield(list,"next",next)
end
- local raise = data.dy * dimenfactor(data.unit,fontdata[first.font])
- list.shift, list.height, list.depth = raise, height, depth
+ local raise = data.dy * dimenfactor(data.unit,fontdata[getfont(first)])
+ setfield(list,"shift",raise)
+ setfield(list,"height",height)
+ setfield(list,"depth",depth)
if trace_shifted then
report_shifted("width %p, nodes %a, text %a",width,n_tostring(first,last),n_tosequence(first,last,true))
end
@@ -393,3 +424,52 @@ nodes.shifts.handler = function(head) return process(a_shifted,data,flush_shifte
function nodes.shifts.enable()
tasks.enableaction("shipouts","nodes.shifts.handler")
end
+
+-- interface
+
+local implement = interfaces.implement
+
+implement {
+ name = "definerule",
+ actions = { nodes.rules.define, context },
+ arguments = {
+ {
+ { "continue" },
+ { "unit" },
+ { "order" },
+ { "method", "integer" },
+ { "offset", "number" },
+ { "rulethickness", "string" },
+ { "dy", "number" },
+ { "max", "number" },
+ { "ma", "integer" },
+ { "ca", "integer" },
+ { "ta", "integer" },
+ }
+ }
+}
+
+implement {
+ name = "enablerules",
+ onlyonce = true,
+ actions = nodes.rules.enable
+}
+
+implement {
+ name = "defineshift",
+ actions = { nodes.shifts.define, context },
+ arguments = {
+ {
+ { "continue" },
+ { "unit" },
+ { "method", "integer" },
+ { "dy", "number" },
+ }
+ }
+}
+
+implement {
+ name = "enableshifts",
+ onlyonce = true,
+ actions = nodes.shifts.enable
+}
diff --git a/tex/context/base/node-rul.mkiv b/tex/context/base/node-rul.mkiv
index 2d2e61134..7fa0473a5 100644
--- a/tex/context/base/node-rul.mkiv
+++ b/tex/context/base/node-rul.mkiv
@@ -112,19 +112,20 @@
\unexpanded\def\node_rules_define
{\edef\p_node_rules_color{\barparameter\c!color}%
- \setevalue{\??barattribute\currentbar}{\number\ctxlua{nodes.rules.define {
- method = \barparameter\c!method,
- offset = \barparameter\c!offset,
- continue = "\barparameter\c!continue",
- dy = \barparameter\c!dy,
- rulethickness = "\barparameter\c!rulethickness",
- unit = "\barparameter\c!unit",
- order = "\barparameter\c!order",
- max = \barparameter\c!max,
- ma = \thecolormodelattribute,
- ca = \thecolorattribute\p_node_rules_color,
- ta = \thetransparencyattribute\p_node_rules_color
- }}}}
+ \setevalue{\??barattribute\currentbar}{\number
+ \clf_definerule
+ continue {\barparameter\c!continue}%
+ unit {\barparameter\c!unit}%
+ order {\barparameter\c!order}%
+ rulethickness {\barparameter\c!rulethickness}%
+ method \barparameter\c!method
+ ma \thecolormodelattribute
+ ca \thecolorattribute\p_node_rules_color
+ ta \thetransparencyattribute\p_node_rules_color
+ offset \barparameter\c!offset\space % number
+ dy \barparameter\c!dy\space % number
+ max \barparameter\c!max
+ \relax}}
\unexpanded\def\node_rules_redefine#1%
{\def\currentbar{#1}\node_rules_define}
@@ -132,13 +133,16 @@
\unexpanded\def\node_rules_direct#1%
{\groupedcommand{\node_rules_set{#1}}\relax}
-\def\node_rules_set
- {\ctxlua{nodes.rules.enable()}% will be moved to lua
- \glet\node_rules_set\node_rules_set_indeed
- \node_rules_set}
+% \unexpanded\def\node_rules_set
+% {\clf_enablerules % will be moved to lua
+% \glet\node_rules_set\node_rules_set_indeed
+% \node_rules_set}
+%
+%\unexpanded\def\node_rules_set_indeed#1% maybe reverse the 1000 (also maybe use more attributes instead of settings)
-\def\node_rules_set_indeed#1% maybe reverse the 1000 (also maybe use more attributes instead of settings)
- {\edef\currentbar{#1}%
+\unexpanded\def\node_rules_set#1% maybe reverse the 1000 (also maybe use more attributes instead of settings)
+ {\clf_enablerules % will be relaxed
+ \edef\currentbar{#1}%
\expandafter\let\expandafter\c_node_rules_index\csname\??barindex#1\endcsname
\advance\c_node_rules_index\plusone
\usebarstyleandcolor\c!foregroundstyle\c!foregroundcolor
@@ -157,6 +161,8 @@
\unexpanded\def\setbar[#1]%
{\node_rules_set{#1}}
+\let\directsetbar\node_rules_set
+
% ungrouped
\newcount\c_node_rules_nesting % todo: same as colors
@@ -279,31 +285,27 @@
\to \everydefineshift
\unexpanded\def\node_shifts_define
- {\setevalue{\??shiftattribute\currentshift}{\number\ctxlua{nodes.shifts.define {
- method = \shiftparameter\c!method,
- continue = "\shiftparameter\c!continue",
- dy = \shiftparameter\c!dy,
- unit = "\shiftparameter\c!unit",
- }}}}
+ {\setevalue{\??shiftattribute\currentshift}{\number
+ \clf_defineshift
+ continue {\shiftparameter\c!continue}%
+ unit {\shiftparameter\c!unit}%
+ method \shiftparameter\c!method
+ dy \shiftparameter\c!dy % number
+ \relax}}
\unexpanded\def\node_shifts_redefine#1%
{\def\currentshift{#1}\node_shifts_define}
-\unexpanded\def\node_shifts_set
- {\ctxlua{nodes.shifts.enable()}%
- \glet\node_shifts_set\node_shifts_set_indeed
- \node_shifts_set}
-
-% \unexpanded\def\node_shifts_direct#1%
-% {\doisolatedgroupedalign{\node_shifts_set{#1}}\donothing}
-
-\unexpanded\def\node_shifts_direct#1%
- {\groupedcommand
- {\begingroup\dostartisolation\begingroup\node_shifts_set{#1}}
- {\endgroup\dostopisolation\endgroup}}
+% \unexpanded\def\node_shifts_set
+% {\clf_enableshifts
+% \glet\node_shifts_set\node_shifts_set_indeed
+% \node_shifts_set}
+%
+% \def\node_shifts_set_indeed#1% todo: check parent !
-\def\node_shifts_set_indeed#1% todo: check parent !
- {\def\currentshift{#1}%
+\unexpanded\def\node_shifts_set#1% todo: check parent !
+ {\clf_enableshifts
+ \def\currentshift{#1}%
\expandafter\let\expandafter\c_node_shifts_index\csname\??shiftindex#1\endcsname
\advance\c_node_shifts_index\plusone
\attribute\shiftedattribute\numexpr
@@ -320,6 +322,14 @@
\unexpanded\def\stopshift
{\endgroup}
+% \unexpanded\def\node_shifts_direct#1%
+% {\doisolatedgroupedalign{\node_shifts_set{#1}}\donothing}
+
+\unexpanded\def\node_shifts_direct#1%
+ {\groupedcommand
+ {\begingroup\dostartisolation\begingroup\node_shifts_set{#1}}
+ {\endgroup\dostopisolation\endgroup}}
+
\setupshifts
[\c!method=0,
\c!continue=\v!no,
diff --git a/tex/context/base/node-shp.lua b/tex/context/base/node-shp.lua
index 6ebfd767f..42cc83b8f 100644
--- a/tex/context/base/node-shp.lua
+++ b/tex/context/base/node-shp.lua
@@ -15,6 +15,8 @@ local setmetatableindex = table.setmetatableindex
local nodecodes = nodes.nodecodes
local whatsitcodes = nodes.whatsitcodes
+local disccodes = nodes.disccodes
+
local tasks = nodes.tasks
local handlers = nodes.handlers
@@ -26,11 +28,27 @@ local kern_code = nodecodes.kern
local glue_code = nodecodes.glue
local whatsit_code = nodecodes.whatsit
+local fulldisc_code = disccodes.discretionary
+
local texgetbox = tex.getbox
-local free_node = node.free
-local remove_node = node.remove
-local traverse_nodes = node.traverse
+local implement = interfaces.implement
+
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+local free_node = nuts.free
+local remove_node = nuts.remove
+local traverse_nodes = nuts.traverse
+local find_tail = nuts.tail
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getid = nuts.getid
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getlist = nuts.getlist
+local getsubtype = nuts.getsubtype
local removables = {
[whatsitcodes.open] = true,
@@ -41,38 +59,63 @@ local removables = {
[whatsitcodes.latelua] = true,
}
-local function cleanup_redundant(head)
+-- About 10% of the nodes make no sense for the backend. By (at least)
+-- removing the replace disc nodes, we can omit extensive checking in
+-- the finalizer code (e.g. colors in disc nodes). Removing more nodes
+-- (like marks) is not saving much and removing empty boxes is even
+-- dangerous because we can rely on dimensions (e.g. in references).
+
+local wipedisc = false -- we can use them in the export ... can be option
+
+local function cleanup_redundant(head) -- better name is: flatten_page
local start = head
while start do
- local id = start.id
+ local id = getid(start)
if id == disc_code then
- head, start = remove_node(head,start,true)
- -- elseif id == glue_code then
- -- if start.writable then
- -- start = start.next
- -- elseif some_complex_check_on_glue_spec then
- -- head, start = remove_node(head,start,true)
- -- else
- -- start = start.next
- -- end
- elseif id == kern_code then
- if start.kern == 0 then
- head, start = remove_node(head,start,true)
+ if getsubtype(start) == fulldisc_code then
+ local replace = getfield(start,"replace")
+ if replace then
+ local prev = getprev(start)
+ local next = getnext(start)
+ local tail = find_tail(replace)
+ setfield(start,"replace",nil)
+ if start == head then
+ remove_node(head,start,true)
+ head = replace
+ else
+ remove_node(head,start,true)
+ end
+ if next then
+ setfield(tail,"next",next)
+ setfield(next,"prev",tail)
+ end
+ if prev then
+ setfield(prev,"next",replace)
+ setfield(replace,"prev",prev)
+ else
+ setfield(replace,"prev",nil) -- to be sure
+ end
+ start = next
+ elseif wipedisc then
+ -- pre and post can have values
+ head, start = remove_node(head,start,true)
+ else
+ start = getnext(start)
+ end
else
- start = start.next
+ start = getnext(start)
end
- elseif id == mark_code then
- head, start = remove_node(head,start,true)
elseif id == hlist_code or id == vlist_code then
- local sl = start.list
+ local sl = getlist(start)
if sl then
- start.list = cleanup_redundant(sl)
- start = start.next
- else
- head, start = remove_node(head,start,true)
+ local rl = cleanup_redundant(sl)
+ if rl ~= sl then
+ setfield(start,"list",rl)
+ end
end
+ start = getnext(start)
else
- start = start.next
+ start = getnext(start)
end
end
return head
@@ -81,54 +124,49 @@ end
local function cleanup_flushed(head) -- rough
local start = head
while start do
- local id = start.id
- if id == whatsit_code and removables[start.subtype] then
- head, start = remove_node(head,start,true)
+ local id = getid(start)
+ if id == whatsit_code then
+ if removables[getsubtype(start)] then
+ head, start = remove_node(head,start,true)
+ else
+ start = getnext(start)
+ end
elseif id == hlist_code or id == vlist_code then
- local sl = start.list
+ local sl = getlist(start)
if sl then
- start.list = cleanup_flushed(sl)
- start = start.next
- else
- head, start = remove_node(head,start,true)
+ local rl = cleanup_flushed(sl)
+ if rl ~= sl then
+ setfield(start,"list",rl)
+ end
end
+ start = getnext(start)
else
- start = start.next
+ start = getnext(start)
end
end
return head
end
function handlers.cleanuppage(head)
- -- about 10% of the nodes make no sense for the backend
- return cleanup_redundant(head), true
+ return tonode(cleanup_redundant(tonut(head))), true
end
function handlers.cleanupbox(head)
- return cleanup_flushed(head), true
+ return tonode(cleanup_flushed(tonut(head))), true
end
-directives.register("backend.cleanup", function()
- tasks.enableaction("shipouts","nodes.handlers.cleanuppage")
-end)
-
local actions = tasks.actions("shipouts") -- no extra arguments
function handlers.finalize(head) -- problem, attr loaded before node, todo ...
return actions(head)
end
-function commands.cleanupbox(n)
- cleanup_flushed(texgetbox(n))
-end
-
-- handlers.finalize = actions
-- interface
-function commands.finalizebox(n)
- actions(texgetbox(n))
-end
+implement { name = "cleanupbox", actions = { texgetbox, cleanup_flushed }, arguments = "integer" }
+implement { name = "finalizebox", actions = { texgetbox, actions }, arguments = "integer" }
-- just in case we want to optimize lookups:
@@ -158,12 +196,12 @@ local function count(head,data,subcategory)
-- no components, pre, post, replace .. can maybe an option .. but
-- we use this for optimization so it makes sense to look the the
-- main node only
- for n in traverse_nodes(head) do
- local id = n.id
- local dn = data[nodecodes[n.id]]
+ for n in traverse_nodes(tonut(head)) do
+ local id = getid(n)
+ local dn = data[nodecodes[id]] -- we could use id and then later convert to nodecodes
dn[subcategory] = dn[subcategory] + 1
if id == hlist_code or id == vlist_code then
- count(n.list,data,subcategory)
+ count(getfield(n,"list"),data,subcategory)
end
end
end
diff --git a/tex/context/base/node-tex.lua b/tex/context/base/node-tex.lua
index 2170e0603..c9d3091df 100644
--- a/tex/context/base/node-tex.lua
+++ b/tex/context/base/node-tex.lua
@@ -6,33 +6,32 @@ if not modules then modules = { } end modules ['node-tex'] = {
license = "see context related readme files"
}
-local format = string.format
+builders = builders or { }
+local kernel = builders.kernel or { }
+builders.kernel = kernel
-builders = builders or { }
-builders.kernel = builders.kernel or { }
-local kernel = builders.kernel
+local hyphenate = lang.hyphenate
+local ligaturing = node.ligaturing
+local kerning = node.kerning
-local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming
-local hyphenate, ligaturing, kerning = lang.hyphenate, node.ligaturing, node.kerning
+kernel.originals = {
+ hyphenate = hyphenate,
+ ligaturing = ligaturing,
+ kerning = kerning,
+}
function kernel.hyphenation(head)
- -- starttiming(kernel)
local done = hyphenate(head)
- -- stoptiming(kernel)
return head, done
end
function kernel.ligaturing(head)
- -- starttiming(kernel)
- local head, tail, done = ligaturing(head) -- todo: check what is returned
- -- stoptiming(kernel)
+ local head, tail, done = ligaturing(head) -- we return 3 values indeed
return head, done
end
function kernel.kerning(head)
- -- starttiming(kernel)
- local head, tail, done = kerning(head) -- todo: check what is returned
- -- stoptiming(kernel)
+ local head, tail, done = kerning(head) -- we return 3 values indeed
return head, done
end
diff --git a/tex/context/base/node-tra.lua b/tex/context/base/node-tra.lua
index 9617f7476..a7ab7f77f 100644
--- a/tex/context/base/node-tra.lua
+++ b/tex/context/base/node-tra.lua
@@ -34,9 +34,30 @@ nodes.handlers = handlers
local injections = nodes.injections or { }
nodes.injections = injections
-local traverse_nodes = node.traverse
-local traverse_by_id = node.traverse_id
-local count_nodes = nodes.count
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getfield = nuts.getfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getchar = nuts.getchar
+local getsubtype = nuts.getsubtype
+local getlist = nuts.getlist
+
+local setattr = nuts.setattr
+
+local flush_list = nuts.flush_list
+local count_nodes = nuts.count
+local used_nodes = nuts.usedlist
+
+local traverse_by_id = nuts.traverse_id
+local traverse_nodes = nuts.traverse
+local d_tostring = nuts.tostring
+
+local nutpool = nuts.pool
+local new_rule = nutpool.rule
local nodecodes = nodes.nodecodes
local whatcodes = nodes.whatcodes
@@ -56,10 +77,8 @@ local gluespec_code = nodecodes.gluespec
local localpar_code = whatcodes.localpar
local dir_code = whatcodes.dir
-local nodepool = nodes.pool
-local new_rule = nodepool.rule
-
local dimenfactors = number.dimenfactors
+local fillorders = nodes.fillcodes
local formatters = string.formatters
-- this will be reorganized:
@@ -68,15 +87,16 @@ function nodes.showlist(head, message)
if message then
report_nodes(message)
end
- for n in traverse_nodes(head) do
- report_nodes(tostring(n))
+ for n in traverse_nodes(tonut(head)) do
+ report_nodes(d_tostring(n))
end
end
function nodes.handlers.checkglyphs(head,message)
+ local h = tonut(head)
local t = { }
- for g in traverse_by_id(glyph_code,head) do
- t[#t+1] = formatters["%U:%s"](g.char,g.subtype)
+ for g in traverse_by_id(glyph_code,h) do
+ t[#t+1] = formatters["%U:%s"](getchar(g),getsubtype(g))
end
if #t > 0 then
if message and message ~= "" then
@@ -90,12 +110,12 @@ end
function nodes.handlers.checkforleaks(sparse)
local l = { }
- local q = node.usedlist()
- for p in traverse(q) do
- local s = table.serialize(nodes.astable(p,sparse),nodecodes[p.id])
+ local q = used_nodes()
+ for p in traverse_nodes(q) do
+ local s = table.serialize(nodes.astable(p,sparse),nodecodes[getid(p)])
l[s] = (l[s] or 0) + 1
end
- node.flush_list(q)
+ flush_list(q)
for k, v in next, l do
report_nodes("%s * %s",v,k)
end
@@ -105,39 +125,40 @@ local f_sequence = formatters["U+%04X:%s"]
local function tosequence(start,stop,compact)
if start then
+ start = tonut(start)
+ stop = stop and tonut(stop)
local t = { }
while start do
- local id = start.id
+ local id = getid(start)
if id == glyph_code then
- local c = start.char
+ local c = getchar(start)
if compact then
- if start.components then
- t[#t+1] = tosequence(start.components,nil,compact)
+ local components = getfield(start,"components")
+ if components then
+ t[#t+1] = tosequence(components,nil,compact)
else
t[#t+1] = utfchar(c)
end
else
t[#t+1] = f_sequence(c,utfchar(c))
end
- elseif id == whatsit_code and start.subtype == localpar_code or start.subtype == dir_code then
- t[#t+1] = "[" .. start.dir .. "]"
elseif id == rule_code then
if compact then
t[#t+1] = "|"
else
t[#t+1] = nodecodes[id]
end
+ elseif id == whatsit_code and getsubtype(start) == localpar_code or getsubtype(start) == dir_code then
+ t[#t+1] = "[" .. getfield(start,"dir") .. "]"
+ elseif compact then
+ t[#t+1] = "[]"
else
- if compact then
- t[#t+1] = "[]"
- else
- t[#t+1] = nodecodes[id]
- end
+ t[#t+1] = nodecodes[id]
end
if start == stop then
break
else
- start = start.next
+ start = getnext(start)
end
end
if compact then
@@ -151,23 +172,26 @@ local function tosequence(start,stop,compact)
end
nodes.tosequence = tosequence
+nuts .tosequence = tosequence
function nodes.report(t,done)
- report_nodes("output %a, %changed %a, %s nodes",status.output_active,done,count_nodes(t))
+ report_nodes("output %a, %changed %a, %s nodes",status.output_active,done,count_nodes(tonut(t)))
end
function nodes.packlist(head)
local t = { }
- for n in traverse(head) do
- t[#t+1] = tostring(n)
+ for n in traverse_nodes(tonut(head)) do
+ t[#t+1] = d_tostring(n)
end
return t
end
function nodes.idstostring(head,tail)
+ head = tonut(head)
+ tail = tail and tonut(tail)
local t, last_id, last_n = { }, nil, 0
for n in traverse_nodes(head,tail) do -- hm, does not stop at tail
- local id = n.id
+ local id = getid(n)
if not last_id then
last_id, last_n = id, 1
elseif last_id == id then
@@ -195,6 +219,8 @@ function nodes.idstostring(head,tail)
end
-- function nodes.xidstostring(head,tail) -- only for special tracing of backlinks
+-- head = tonut(head)
+-- tail = tonut(tail)
-- local n = head
-- while n.next do
-- n = n.next
@@ -217,7 +243,7 @@ end
-- if n == head then
-- break
-- end
--- n = n.prev
+-- n = getprev(n)
-- end
-- if not last_id then
-- t[#t+1] = "no nodes"
@@ -230,51 +256,56 @@ end
-- end
local function showsimplelist(h,depth,n)
+ h = h and tonut(h)
while h do
report_nodes("% w%s",n,d_tostring(h))
if not depth or n < depth then
- local id = h.id
+ local id = getid(h)
if id == hlist_code or id == vlist_code then
- showsimplelist(h.list,depth,n+1)
+ showsimplelist(getlist(h),depth,n+1)
end
end
- h = h.next
+ h = getnext(h)
end
end
---~ \startluacode
---~ callback.register('buildpage_filter',function() nodes.show_simple_list(tex.lists.contrib_head) end)
---~ \stopluacode
---~ \vbox{b\footnote{n}a}
---~ \startluacode
---~ callback.register('buildpage_filter',nil)
---~ \stopluacode
+-- \startluacode
+-- callback.register('buildpage_filter',function() nodes.show_simple_list(tex.lists.contrib_head) end)
+-- \stopluacode
+-- \vbox{b\footnote{n}a}
+-- \startluacode
+-- callback.register('buildpage_filter',nil)
+-- \stopluacode
nodes.showsimplelist = function(h,depth) showsimplelist(h,depth,0) end
local function listtoutf(h,joiner,textonly,last)
- local joiner = (joiner == true and utfchar(0x200C)) or joiner -- zwnj
local w = { }
while h do
- local id = h.id
+ local id = getid(h)
if id == glyph_code then -- always true
- local c = h.char
+ local c = getchar(h)
w[#w+1] = c >= 0 and utfchar(c) or formatters["<%i>"](c)
if joiner then
w[#w+1] = joiner
end
elseif id == disc_code then
- local pre = h.pre
- local pos = h.post
- local rep = h.replace
+ local pre = getfield(h,"pre")
+ local pos = getfield(h,"post")
+ local rep = getfield(h,"replace")
w[#w+1] = formatters["[%s|%s|%s]"] (
pre and listtoutf(pre,joiner,textonly) or "",
pos and listtoutf(pos,joiner,textonly) or "",
rep and listtoutf(rep,joiner,textonly) or ""
)
elseif textonly then
- if id == glue_code and h.spec and h.spec.width > 0 then
- w[#w+1] = " "
+ if id == glue_code then
+ local spec = getfield(h,"spec")
+ if spec and getfield(spec,"width") > 0 then
+ w[#w+1] = " "
+ end
+ elseif id == hlist_code or id == vlist_code then
+ w[#w+1] = "[]"
end
else
w[#w+1] = "[-]"
@@ -282,24 +313,32 @@ local function listtoutf(h,joiner,textonly,last)
if h == last then
break
else
- h = h.next
+ h = getnext(h)
end
end
return concat(w)
end
-nodes.listtoutf = listtoutf
+function nodes.listtoutf(h,joiner,textonly,last)
+ if h then
+ local joiner = joiner == true and utfchar(0x200C) or joiner -- zwnj
+ return listtoutf(tonut(h),joiner,textonly,last and tonut(last))
+ else
+ return ""
+ end
+end
local what = { [0] = "unknown", "line", "box", "indent", "row", "cell" }
local function showboxes(n,symbol,depth)
- depth, symbol = depth or 0, symbol or "."
- for n in traverse_nodes(n) do
- local id = n.id
+ depth = depth or 0
+ symbol = symbol or "."
+ for n in traverse_nodes(tonut(n)) do
+ local id = getid(n)
if id == hlist_code or id == vlist_code then
- local s = n.subtype
+ local s = getsubtype(n)
report_nodes(rep(symbol,depth) .. what[s] or s)
- showboxes(n.list,symbol,depth+1)
+ showboxes(getlist(n),symbol,depth+1)
end
end
end
@@ -320,70 +359,180 @@ local stripper = lpeg.patterns.stripzeros
--
-- redefined:
-local dimenfactors = number.dimenfactors
+-- local function nodetodimen(d,unit,fmt,strip)
+-- d = tonut(d) -- tricky: direct nuts are an issue
+-- if unit == true then
+-- unit = "pt"
+-- fmt = "%0.5f%s"
+-- else
+-- unit = unit or 'pt'
+-- if not fmt then
+-- fmt = "%s%s"
+-- elseif fmt == true then
+-- fmt = "%0.5f%s"
+-- end
+-- end
+-- local id = getid(d)
+-- if id == kern_code then
+-- local str = formatters[fmt](getfield(d,"width")*dimenfactors[unit],unit)
+-- return strip and lpegmatch(stripper,str) or str
+-- end
+-- if id == glue_code then
+-- d = getfield(d,"spec")
+-- end
+-- if not d or not getid(d) == gluespec_code then
+-- local str = formatters[fmt](0,unit)
+-- return strip and lpegmatch(stripper,str) or str
+-- end
+-- local width = getfield(d,"width")
+-- local plus = getfield(d,"stretch_order")
+-- local minus = getfield(d,"shrink_order")
+-- local stretch = getfield(d,"stretch")
+-- local shrink = getfield(d,"shrink")
+-- if plus ~= 0 then
+-- plus = " plus " .. stretch/65536 .. fillcodes[plus]
+-- elseif stretch ~= 0 then
+-- plus = formatters[fmt](stretch*dimenfactors[unit],unit)
+-- plus = " plus " .. (strip and lpegmatch(stripper,plus) or plus)
+-- else
+-- plus = ""
+-- end
+-- if minus ~= 0 then
+-- minus = " minus " .. shrink/65536 .. fillcodes[minus]
+-- elseif shrink ~= 0 then
+-- minus = formatters[fmt](shrink*dimenfactors[unit],unit)
+-- minus = " minus " .. (strip and lpegmatch(stripper,minus) or minus)
+-- else
+-- minus = ""
+-- end
+-- local str = formatters[fmt](getfield(d,"width")*dimenfactors[unit],unit)
+-- return (strip and lpegmatch(stripper,str) or str) .. plus .. minus
+-- end
+--
+-- local function numbertodimen(d,unit,fmt,strip)
+-- if not d then
+-- local str = formatters[fmt](0,unit)
+-- return strip and lpegmatch(stripper,str) or str
+-- end
+-- local t = type(d)
+-- if t == 'string' then
+-- return d
+-- elseif t == "number" then
+-- if unit == true then
+-- unit = "pt"
+-- fmt = "%0.5f%s"
+-- else
+-- unit = unit or 'pt'
+-- if not fmt then
+-- fmt = "%s%s"
+-- elseif fmt == true then
+-- fmt = "%0.5f%s"
+-- end
+-- end
+-- local str = formatters[fmt](d*dimenfactors[unit],unit)
+-- return strip and lpegmatch(stripper,str) or str
+-- else
+-- return nodetodimen(d,unit,fmt,strip) -- real node
+-- end
+-- end
-local function numbertodimen(d,unit,fmt,strip)
- if not d then
- local str = formatters[fmt](0,unit)
- return strip and lpegmatch(stripper,str) or str
- end
- local t = type(d)
- if t == 'string' then
- return d
- end
- if unit == true then
- unit = "pt"
- fmt = "%0.5f%s"
- else
- unit = unit or 'pt'
- if not fmt then
- fmt = "%s%s"
- elseif fmt == true then
- fmt = "%0.5f%s"
- end
- end
- if t == "number" then
- local str = formatters[fmt](d*dimenfactors[unit],unit)
- return strip and lpegmatch(stripper,str) or str
- end
- local id = d.id
+local f_f_f = formatters["%0.5Fpt plus %0.5F%s minus %0.5F%s"]
+local f_f_m = formatters["%0.5Fpt plus %0.5F%s minus %0.5Fpt"]
+local f_p_f = formatters["%0.5Fpt plus %0.5Fpt minus %0.5F%s"]
+local f_p_m = formatters["%0.5Fpt plus %0.5Fpt minus %0.5Fpt"]
+local f_f_z = formatters["%0.5Fpt plus %0.5F%s"]
+local f_p_z = formatters["%0.5Fpt plus %0.5Fpt"]
+local f_z_f = formatters["%0.5Fpt minus %0.5F%s"]
+local f_z_m = formatters["%0.5Fpt minus %0.5Fpt"]
+local f_z_z = formatters["%0.5Fpt"]
+
+local tonut = nodes.tonut
+local getfield = nodes.nuts.getfield
+
+local function nodetodimen(n)
+ n = tonut(n)
+ local id = getid(n)
if id == kern_code then
- local str = formatters[fmt](d.width*dimenfactors[unit],unit)
- return strip and lpegmatch(stripper,str) or str
+ local width = getfield(n,"width")
+ if width == 0 then
+ return "0pt"
+ else
+ return f_z_z(width)
+ end
end
if id == glue_code then
- d = d.spec
+ n = getfield(n,"spec")
end
- if not d or not d.id == gluespec_code then
- local str = formatters[fmt](0,unit)
- return strip and lpegmatch(stripper,str) or str
+ if not n or not getid(n) == gluespec_code then
+ return "0pt"
end
- local width = d.width
- local plus = d.stretch_order
- local minus = d.shrink_order
- local stretch = d.stretch
- local shrink = d.shrink
- if plus ~= 0 then
- plus = " plus " .. stretch/65536 .. fillcodes[plus]
+ local stretch_order = getfield(n,"stretch_order")
+ local shrink_order = getfield(n,"shrink_order")
+ local stretch = getfield(n,"stretch") / 65536
+ local shrink = getfield(n,"shrink") / 65536
+ local width = getfield(n,"width") / 65536
+ if stretch_order ~= 0 then
+ if shrink_order ~= 0 then
+ return f_f_f(width,stretch,fillorders[stretch_order],shrink,fillorders[shrink_order])
+ elseif shrink ~= 0 then
+ return f_f_m(width,stretch,fillorders[stretch_order],shrink)
+ else
+ return f_f_z(width,stretch,fillorders[stretch_order])
+ end
+ elseif shrink_order ~= 0 then
+ if stretch ~= 0 then
+ return f_p_f(width,stretch,shrink,fillorders[shrink_order])
+ else
+ return f_z_f(width,shrink,fillorders[shrink_order])
+ end
elseif stretch ~= 0 then
- plus = formatters[fmt](stretch*dimenfactors[unit],unit)
- plus = " plus " .. (strip and lpegmatch(stripper,plus) or plus)
+ if shrink ~= 0 then
+ return f_p_m(width,stretch,shrink)
+ else
+ return f_p_z(width,stretch)
+ end
+ elseif shrink ~= 0 then
+ return f_z_m(width,shrink)
+ elseif width == 0 then
+ return "0pt"
else
- plus = ""
+ return f_z_z(width)
end
- if minus ~= 0 then
- minus = " minus " .. shrink/65536 .. fillcodes[minus]
- elseif shrink ~= 0 then
- minus = formatters[fmt](shrink*dimenfactors[unit],unit)
- minus = " minus " .. (strip and lpegmatch(stripper,minus) or minus)
+end
+
+
+-- number.todimen(123)
+-- number.todimen(123,"cm")
+-- number.todimen(123,false,"%F))
+
+local f_pt = formatters["%p"]
+local f_un = formatters["%F%s"]
+
+dimenfactors[""] = dimenfactors.pt
+
+local function numbertodimen(d,unit,fmt)
+ if not d or d == 0 then
+ if not unit or unit == "pt" then
+ return "0pt"
+ elseif fmt then
+ return formatters[fmt](0,unit)
+ else
+ return 0 .. unit
+ end
+ elseif fmt then
+ if not unit then
+ unit = "pt"
+ end
+ return formatters[fmt](d*dimenfactors[unit],unit)
+ elseif not unit or unit == "pt" then
+ return f_pt(d)
else
- minus = ""
+ return f_un(d*dimenfactors[unit],unit)
end
- local str = formatters[fmt](d.width*dimenfactors[unit],unit)
- return (strip and lpegmatch(stripper,str) or str) .. plus .. minus
end
number.todimen = numbertodimen
+nodes .todimen = nodetodimen
function number.topoints (n,fmt) return numbertodimen(n,"pt",fmt) end
function number.toinches (n,fmt) return numbertodimen(n,"in",fmt) end
@@ -398,6 +547,19 @@ function number.tociceros (n,fmt) return numbertodimen(n,"cc",fmt) end
function number.tonewdidots (n,fmt) return numbertodimen(n,"nd",fmt) end
function number.tonewciceros (n,fmt) return numbertodimen(n,"nc",fmt) end
+function nodes.topoints (n,fmt) return nodetodimen(n,"pt",fmt) end
+function nodes.toinches (n,fmt) return nodetodimen(n,"in",fmt) end
+function nodes.tocentimeters (n,fmt) return nodetodimen(n,"cm",fmt) end
+function nodes.tomillimeters (n,fmt) return nodetodimen(n,"mm",fmt) end
+function nodes.toscaledpoints(n,fmt) return nodetodimen(n,"sp",fmt) end
+function nodes.toscaledpoints(n) return n .. "sp" end
+function nodes.tobasepoints (n,fmt) return nodetodimen(n,"bp",fmt) end
+function nodes.topicas (n,fmt) return nodetodimen(n "pc",fmt) end
+function nodes.todidots (n,fmt) return nodetodimen(n,"dd",fmt) end
+function nodes.tociceros (n,fmt) return nodetodimen(n,"cc",fmt) end
+function nodes.tonewdidots (n,fmt) return nodetodimen(n,"nd",fmt) end
+function nodes.tonewciceros (n,fmt) return nodetodimen(n,"nc",fmt) end
+
-- stop redefinition
local points = function(n)
@@ -406,7 +568,7 @@ local points = function(n)
elseif type(n) == "number" then
return lpegmatch(stripper,format("%.5fpt",n*ptfactor)) -- faster than formatter
else
- return numbertodimen(n,"pt",true,true) -- also deals with nodes
+ return numbertodimen(n,"pt") -- also deals with nodes
end
end
@@ -416,7 +578,7 @@ local basepoints = function(n)
elseif type(n) == "number" then
return lpegmatch(stripper,format("%.5fbp",n*bpfactor)) -- faster than formatter
else
- return numbertodimen(n,"bp",true,true) -- also deals with nodes
+ return numbertodimen(n,"bp") -- also deals with nodes
end
end
@@ -426,7 +588,7 @@ local pts = function(n)
elseif type(n) == "number" then
return format("%.5fpt",n*ptfactor) -- faster than formatter
else
- return numbertodimen(n,"pt",true) -- also deals with nodes
+ return numbertodimen(n,"pt") -- also deals with nodes
end
end
@@ -443,8 +605,13 @@ number.basepoints = basepoints
number.pts = pts
number.nopts = nopts
-local colors = { }
-tracers.colors = colors
+nodes.points = function(n) return numbertodimen(n,"pt") end
+nodes.basepoints = function(n) return numbertodimen(n,"bp") end
+nodes.pts = function(n) return numbertodimen(n,"pt") end
+nodes.nopts = function(n) return format("%.5f",n*ptfactor) end
+
+local colors = { }
+tracers.colors = colors
local unsetvalue = attributes.unsetvalue
@@ -454,36 +621,34 @@ local m_color = attributes.list[a_color] or { }
function colors.set(n,c,s)
local mc = m_color[c]
- if not mc then
- n[a_color] = unsetvalue
+ local nn = tonut(n)
+ if mc then
+ local mm = s or texgetattribute(a_colormodel)
+ setattr(nn,a_colormodel,mm <= 0 and mm or 1)
+ setattr(nn,a_color,mc)
else
- if not n[a_colormodel] then
- n[a_colormodel] = s or 1
- end
- n[a_color] = mc
+ setattr(nn,a_color,unsetvalue)
end
return n
end
function colors.setlist(n,c,s)
- local f = n
- while n do
- local mc = m_color[c]
- if not mc then
- n[a_color] = unsetvalue
- else
- if not n[a_colormodel] then
- n[a_colormodel] = s or 1
- end
- n[a_color] = mc
- end
- n = n.next
+ local nn = tonut(n)
+ local mc = m_color[c] or unsetvalue
+ local mm = s or texgetattribute(a_colormodel)
+ if mm <= 0 then
+ mm = 1
end
- return f
+ while nn do
+ setattr(nn,a_colormodel,mm)
+ setattr(nn,a_color,mc)
+ nn = getnext(nn)
+ end
+ return n
end
function colors.reset(n)
- n[a_color] = unsetvalue
+ setattr(tonut(n),a_color,unsetvalue)
return n
end
@@ -496,31 +661,22 @@ local a_transparency = attributes.private('transparency')
local m_transparency = attributes.list[a_transparency] or { }
function transparencies.set(n,t)
- local mt = m_transparency[t]
- if not mt then
- n[a_transparency] = unsetvalue
- else
- n[a_transparency] = mt
- end
+ setattr(tonut(n),a_transparency,m_transparency[t] or unsetvalue)
return n
end
function transparencies.setlist(n,c,s)
- local f = n
- while n do
- local mt = m_transparency[c]
- if not mt then
- n[a_transparency] = unsetvalue
- else
- n[a_transparency] = mt
- end
- n = n.next
+ local nn = tonut(n)
+ local mt = m_transparency[c] or unsetvalue
+ while nn do
+ setattr(nn,a_transparency,mt)
+ nn = getnext(nn)
end
- return f
+ return n
end
function transparencies.reset(n)
- n[a_transparency] = unsetvalue
+ setattr(n,a_transparency,unsetvalue)
return n
end
@@ -537,52 +693,76 @@ end
-- although tracers are used seldom
local function setproperties(n,c,s)
+ local nn = tonut(n)
local mm = texgetattribute(a_colormodel)
- n[a_colormodel] = mm > 0 and mm or 1
- n[a_color] = m_color[c]
- n[a_transparency] = m_transparency[c]
+ setattr(nn,a_colormodel,mm > 0 and mm or 1)
+ setattr(nn,a_color,m_color[c])
+ setattr(nn,a_transparency,m_transparency[c])
return n
end
tracers.setproperties = setproperties
-function tracers.setlistv(n,c,s)
- local f = n
+function tracers.setlist(n,c,s)
+ local nn = tonut(n)
local mc = m_color[c]
local mt = m_transparency[c]
local mm = texgetattribute(a_colormodel)
if mm <= 0 then
mm = 1
end
- while n do
- n[a_colormodel] = mm
- n[a_color] = mc
- n[a_transparency] = mt
- n = n.next
+ while nn do
+ setattr(nn,a_colormodel,mm)
+ setattr(nn,a_color,mc)
+ setattr(nn,a_transparency,mt)
+ nn = getnext(nn)
end
- return f
+ return n
end
function tracers.resetproperties(n)
- n[a_color] = unsetvalue
- n[a_transparency] = unsetvalue
+ local nn = tonut(n)
+ setattr(nn,a_color,unsetvalue)
+ setattr(nn,a_transparency,unsetvalue)
return n
end
-function tracers.rule(w,h,d,c,s) -- so some day we can consider using literals (speedup)
- return setproperties(new_rule(w,h,d),c,s)
-end
-
--- only nodes
+-- this one returns a nut
local nodestracerpool = { }
+local nutstracerpool = { }
tracers.pool = {
nodes = nodestracerpool,
+ nuts = nutstracerpool,
}
-function nodestracerpool.rule(w,h,d,c,s) -- so some day we can consider using literals (speedup)
+table.setmetatableindex(nodestracerpool,function(t,k,v)
+ local f = nutstracerpool[k]
+ local v = function(...)
+ return tonode(f(...))
+ end
+ t[k] = v
+ return v
+end)
+
+function nutstracerpool.rule(w,h,d,c,s) -- so some day we can consider using literals (speedup)
return setproperties(new_rule(w,h,d),c,s)
end
tracers.rule = nodestracerpool.rule -- for a while
+
+-- local function show(head,n,message)
+-- print("START",message or "")
+-- local i = 0
+-- for current in traverse(head) do
+-- local prev = getprev(current)
+-- local next = getnext(current)
+-- i = i + 1
+-- print(i, prev and nodecodes[getid(prev)],nodecodes[getid(current)],next and nodecodes[getid(next)])
+-- if i == n then
+-- break
+-- end
+-- end
+-- print("STOP", message or "")
+-- end
diff --git a/tex/context/base/node-tsk.lua b/tex/context/base/node-tsk.lua
index dfa570b24..56a4b18ef 100644
--- a/tex/context/base/node-tsk.lua
+++ b/tex/context/base/node-tsk.lua
@@ -117,6 +117,14 @@ function tasks.disableaction(name,action)
end
end
+function tasks.replaceaction(name,group,oldaction,newaction)
+ local data = valid(name)
+ if data then
+ sequencers.replaceaction(data.list,group,oldaction,newaction)
+ data.runner = false
+ end
+end
+
function tasks.setaction(name,action,value)
if value then
tasks.enableaction(name,action)
diff --git a/tex/context/base/node-tst.lua b/tex/context/base/node-tst.lua
index bfe0051bd..7f5102d5f 100644
--- a/tex/context/base/node-tst.lua
+++ b/tex/context/base/node-tst.lua
@@ -24,17 +24,26 @@ local rightskip_code = skipcodes.rightskip
local abovedisplayshortskip_code = skipcodes.abovedisplayshortskip
local belowdisplayshortskip_code = skipcodes.belowdisplayshortskip
-local find_node_tail = node.tail or node.slide
+local nuts = nodes.nuts
-function nodes.leftmarginwidth(n) -- todo: three values
+local getfield = nuts.getfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getchar = nuts.getchar
+local getsubtype = nuts.getsubtype
+
+local find_node_tail = nuts.tail
+
+function nuts.leftmarginwidth(n) -- todo: three values
while n do
- local id = n.id
+ local id = getid(n)
if id == glue_code then
- return n.subtype == leftskip_code and n.spec.width or 0
+ return getsubtype(n) == leftskip_code and getfield(getfield(n,"spec"),"width") or 0
elseif id == whatsit_code then
- n = n.next
+ n = getnext(n)
elseif id == hlist_code then
- return n.width
+ return getfield(n,"width")
else
break
end
@@ -42,15 +51,15 @@ function nodes.leftmarginwidth(n) -- todo: three values
return 0
end
-function nodes.rightmarginwidth(n)
+function nuts.rightmarginwidth(n)
if n then
n = find_node_tail(n)
while n do
- local id = n.id
+ local id = getid(n)
if id == glue_code then
- return n.subtype == rightskip_code and n.spec.width or 0
+ return getsubtype(n) == rightskip_code and getfield(getfield(n,"spec"),"width") or 0
elseif id == whatsit_code then
- n = n.prev
+ n = getprev(n)
else
break
end
@@ -59,15 +68,15 @@ function nodes.rightmarginwidth(n)
return false
end
-function nodes.somespace(n,all)
+function nuts.somespace(n,all)
if n then
- local id = n.id
+ local id = getid(n)
if id == glue_code then
- return (all or (n.spec.width ~= 0)) and glue_code
+ return (all or (getfield(getfield(n,"spec"),"width") ~= 0)) and glue_code
elseif id == kern_code then
- return (all or (n.kern ~= 0)) and kern
+ return (all or (getfield(n,"kern") ~= 0)) and kern
elseif id == glyph_code then
- local category = chardata[n.char].category
+ local category = chardata[getchar(n)].category
-- maybe more category checks are needed
return (category == "zs") and glyph_code
end
@@ -75,12 +84,12 @@ function nodes.somespace(n,all)
return false
end
-function nodes.somepenalty(n,value)
+function nuts.somepenalty(n,value)
if n then
- local id = n.id
+ local id = getid(n)
if id == penalty_code then
if value then
- return n.penalty == value
+ return getfield(n,"penalty") == value
else
return true
end
@@ -89,32 +98,38 @@ function nodes.somepenalty(n,value)
return false
end
-function nodes.is_display_math(head)
- local n = head.prev
+function nuts.is_display_math(head)
+ local n = getprev(head)
while n do
- local id = n.id
+ local id = getid(n)
if id == penalty_code then
elseif id == glue_code then
- if n.subtype == abovedisplayshortskip_code then
+ if getsubtype(n) == abovedisplayshortskip_code then
return true
end
else
break
end
- n = n.prev
+ n = getprev(n)
end
- n = head.next
+ n = getnext(head)
while n do
- local id = n.id
+ local id = getid(n)
if id == penalty_code then
elseif id == glue_code then
- if n.subtype == belowdisplayshortskip_code then
+ if getsubtype(n) == belowdisplayshortskip_code then
return true
end
else
break
end
- n = n.next
+ n = getnext(n)
end
return false
end
+
+nodes.leftmarginwidth = nodes.vianuts(nuts.leftmarginwidth)
+nodes.rightmarginwidth = nodes.vianuts(nuts.rightmarginwidth)
+nodes.somespace = nodes.vianuts(nuts.somespace)
+nodes.somepenalty = nodes.vianuts(nuts.somepenalty)
+nodes.is_display_math = nodes.vianuts(nuts.is_display_math)
diff --git a/tex/context/base/node-typ.lua b/tex/context/base/node-typ.lua
index 4a2ef8d49..f1aacf25a 100644
--- a/tex/context/base/node-typ.lua
+++ b/tex/context/base/node-typ.lua
@@ -8,33 +8,45 @@ if not modules then modules = { } end modules ['node-typ'] = {
-- code has been moved to blob-ini.lua
-local typesetters = nodes.typesetters or { }
-nodes.typesetters = typesetters
+local typesetters = nodes.typesetters or { }
+nodes.typesetters = typesetters
-local hpack_node_list = nodes.hpack
-local vpack_node_list = nodes.vpack
-local fast_hpack_list = nodes.fasthpack
+local nuts = nodes.nuts
+local tonode = nuts.tonode
+local tonut = nuts.tonut
-local nodepool = nodes.pool
+local setfield = nuts.setfield
+local getfont = nuts.getfont
+
+local hpack_node_list = nuts.hpack
+local vpack_node_list = nuts.vpack
+local fast_hpack_list = nuts.fasthpack
+local copy_node = nuts.copy
+
+local nodepool = nuts.pool
local new_glyph = nodepool.glyph
local new_glue = nodepool.glue
local utfvalues = utf.values
-local currentfont = font.current
-local fontparameters = fonts.hashes.parameters
+local currentfont = font.current
+local fontparameters = fonts.hashes.parameters
-local function tonodes(str,fontid,spacing) -- quick and dirty
+local function tonodes(str,fontid,spacing,templateglyph) -- quick and dirty
local head, prev = nil, nil
if not fontid then
- fontid = currentfont()
+ if templateglyph then
+ fontid = getfont(templateglyph)
+ else
+ fontid = currentfont()
+ end
end
local fp = fontparameters[fontid]
local s, p, m
if spacing then
s, p, m = spacing, 0, 0
else
- s, p, m = fp.space, fp.space_stretch, fp,space_shrink
+ s, p, m = fp.space, fp.space_stretch, fp.space_shrink
end
local spacedone = false
for c in utfvalues(str) do
@@ -44,6 +56,10 @@ local function tonodes(str,fontid,spacing) -- quick and dirty
next = new_glue(s,p,m)
spacedone = true
end
+ elseif templateglyph then
+ next = copy_glyph(templateglyph)
+ setfield(next,"char",c)
+ spacedone = false
else
next = new_glyph(fontid or 1,c)
spacedone = false
@@ -53,8 +69,8 @@ local function tonodes(str,fontid,spacing) -- quick and dirty
elseif not head then
head = next
else
- prev.next = next
- next.prev = prev
+ setfield(prev,"next",next)
+ setfield(next,"prev",prev)
end
prev = next
end
@@ -77,17 +93,30 @@ end
local tovpackfast = tovpack
-typesetters.tonodes = tonodes
-typesetters.tohpack = tohpack
-typesetters.tohpackfast = tohpackfast
-typesetters.tovpack = tovpack
-typesetters.tovpackfast = tovpackfast
+local tnuts = { }
+nuts.typesetters = tnuts
+
+tnuts.tonodes = tonodes
+tnuts.tohpack = tohpack
+tnuts.tohpackfast = tohpackfast
+tnuts.tovpack = tovpack
+tnuts.tovpackfast = tovpackfast
+
+tnuts.hpack = tohpack -- obsolete
+tnuts.fast_hpack = tohpackfast -- obsolete
+tnuts.vpack = tovpack -- obsolete
+
+typesetters.tonodes = function(...) local h, b = tonodes (...) return tonode(h), b end
+typesetters.tohpack = function(...) local h, b = tohpack (...) return tonode(h), b end
+typesetters.tohpackfast = function(...) local h, b = tohpackfast(...) return tonode(h), b end
+typesetters.tovpack = function(...) local h, b = tovpack (...) return tonode(h), b end
+typesetters.tovpackfast = function(...) local h, b = tovpackfast(...) return tonode(h), b end
-typesetters.hpack = tohpack
-typesetters.fast_hpack = tohpackfast
-typesetters.vpack = tovpack
+typesetters.hpack = typesetters.tohpack -- obsolete
+typesetters.fast_hpack = typesetters.tofasthpack -- obsolete
+typesetters.vpack = typesetters.tovpack -- obsolete
-- node.write(nodes.typestters.hpack("Hello World!"))
-- node.write(nodes.typestters.hpack("Hello World!",1,100*1024*10))
-string.tonodes = tonodes -- quite convenient
+string.tonodes = function(...) return tonode(tonodes(...)) end -- quite convenient
diff --git a/tex/context/base/pack-bck.mkvi b/tex/context/base/pack-bck.mkvi
index 72eafd282..bb4b72252 100644
--- a/tex/context/base/pack-bck.mkvi
+++ b/tex/context/base/pack-bck.mkvi
@@ -32,7 +32,7 @@
%D \starttyping
%D \setupbackground
%D [backgroundoffset=4pt,
-%D background=screen,
+%D background=color,
%D frame=on,
%D framecolor=red,
%D leftoffset=2pt]
@@ -206,14 +206,9 @@
\c!corner=\v!rectangular,
\c!frame=\v!off,
\c!depth=\zeropoint,
-% \c!color=,
-% \c!background=\v!screen,
-% \c!backgroundcolor=\backgroundparameter\c!color,
-% \c!screen=\defaultbackgroundscreen,
-%
+ % \c!color=,
\c!background=\v!color,
\c!backgroundcolor=lightgray,
-%
\c!before=,
\c!after=]
diff --git a/tex/context/base/pack-box.mkiv b/tex/context/base/pack-box.mkiv
index f8b36691c..6e3bab6f4 100644
--- a/tex/context/base/pack-box.mkiv
+++ b/tex/context/base/pack-box.mkiv
@@ -24,10 +24,10 @@
%D which in itself is ok, but can lead to loops due to rounding errors (happened
%D in demo-obv).
-\definelayer[\v!text-2][\c!position=\v!yes,\c!region=,\c!width=\overlaywidth,\c!height=\overlayheight]
-\definelayer[\v!text-1][\c!position=\v!yes,\c!region=,\c!width=\overlaywidth,\c!height=\overlayheight]
-\definelayer[\v!text+1][\c!position=\v!yes,\c!region=,\c!width=\overlaywidth,\c!height=\overlayheight]
-\definelayer[\v!text+2][\c!position=\v!yes,\c!region=,\c!width=\overlaywidth,\c!height=\overlayheight]
+\definelayer[\v!text-2][\c!position=\v!yes,\c!region=,\c!width=\d_overlay_width,\c!height=\d_overlay_height]
+\definelayer[\v!text-1][\c!position=\v!yes,\c!region=,\c!width=\d_overlay_width,\c!height=\d_overlay_height]
+\definelayer[\v!text+1][\c!position=\v!yes,\c!region=,\c!width=\d_overlay_width,\c!height=\d_overlay_height]
+\definelayer[\v!text+2][\c!position=\v!yes,\c!region=,\c!width=\d_overlay_width,\c!height=\d_overlay_height]
\unexpanded\def\internaltextoverlay#1% will become more generic and installable
{\startoverlay % i.e. probably an overlay by itself
@@ -436,7 +436,7 @@
\def\pack_ornament_text[#1][#2]%
{\bgroup
- \doifassignmentelse{#1}
+ \doifelseassignment{#1}
{\letdummyparameter\c!alternative\v!a
\getdummyparameters[#1]%
\doifelse{\directdummyparameter\c!alternative}\v!a
@@ -552,7 +552,7 @@
%
\doifelse{\bleedingparameter\c!stretch}\v!yes\donetrue\donefalse
%
- \xdef\bleedwidth{\dimexpr
+ \xdef\bleedwidth{\the\dimexpr
\ifdone
\ifconditional\c_pack_boxes_l
\scratchwidth+\MPx\currentbgposition-\MPx\currentpageposition
@@ -563,8 +563,9 @@
\fi\fi
\else
\scratchwidth
- \fi+\scratchhoffset}%
- \xdef\bleedheight{\dimexpr
+ \fi+\scratchhoffset
+ \relax}%
+ \xdef\bleedheight{\the\dimexpr
\ifdone
\ifconditional\c_pack_boxes_t
\paperheight -\MPy\currentbgposition+\MPy\currentpageposition % not checked
@@ -575,7 +576,8 @@
\fi\fi
\else
\scratchheight
- \fi+\scratchvoffset}%
+ \fi+\scratchvoffset
+ \relax}%
\dowithnextboxcontentcs\pack_boxes_bleed_settings\pack_boxes_bleed_finish\hbox}
\def\pack_boxes_bleed_settings
@@ -678,7 +680,7 @@
\dowithnextboxcontent
{\forgetall
\hsize\directdummyparameter\c!width
- \normalexpanded{\setupalign[\directdummyparameter\c!align]}%
+ \usealignparameter\directdummyparameter
\dousestyleparameter{\directdummyparameter\c!style}}
{\setlayer[#1][#2]{\strut\dousecolorparameter{\directdummyparameter\c!color}\flushnextbox}% maybe expand the color
\egroup}%
@@ -977,22 +979,43 @@
\box\scratchbox
\egroup}
-% \backgroundimage{1}{\hsize}{\vsize}{\externalfigure[cow][\c!width=3cm]}
+% \backgroundimage{1}{\hsize}{\vsize}{\externalfigure[cow][\c!width=3cm]}
+
+% \framed[offset=overlay,width=6cm,height=3cm]{\backgroundimage {1}{\hsize}{\vsize}{\externalfigure[cow][width=1cm]}}
+% \framed[offset=overlay,width=6cm,height=3cm]{\backgroundimage {2}{\hsize}{\vsize}{\externalfigure[cow][width=1cm]}}
+% \framed[offset=overlay,width=6cm,height=3cm]{\backgroundimage {3}{\hsize}{\vsize}{\externalfigure[cow][width=1cm]}}
+
+% \framed[offset=overlay,width=4cm,height=2cm]{\backgroundimagefill{1}{\hsize}{\vsize}{\externalfigure[cow][width=1cm]}}
+% \framed[offset=overlay,width=4cm,height=2cm]{\backgroundimagefill{2}{\hsize}{\vsize}{\externalfigure[cow][width=1cm]}}
+% \framed[offset=overlay,width=4cm,height=2cm]{\backgroundimagefill{3}{\hsize}{\vsize}{\externalfigure[cow][width=1cm]}}
+% \framed[offset=overlay,width=2cm,height=4cm]{\backgroundimagefill{1}{\hsize}{\vsize}{\externalfigure[cow][width=1cm]}}
+% \framed[offset=overlay,width=2cm,height=4cm]{\backgroundimagefill{2}{\hsize}{\vsize}{\externalfigure[cow][width=1cm]}}
+% \framed[offset=overlay,width=2cm,height=4cm]{\backgroundimagefill{3}{\hsize}{\vsize}{\externalfigure[cow][width=1cm]}}
+%
+% \framed[offset=overlay,width=4cm,height=2cm]{\backgroundimagefill{1}{\hsize}{\vsize}{\externalfigure[mill]}}
+% \framed[offset=overlay,width=4cm,height=2cm]{\backgroundimagefill{2}{\hsize}{\vsize}{\externalfigure[mill]}}
+% \framed[offset=overlay,width=4cm,height=2cm]{\backgroundimagefill{3}{\hsize}{\vsize}{\externalfigure[mill]}}
+% \framed[offset=overlay,width=2cm,height=4cm]{\backgroundimagefill{1}{\hsize}{\vsize}{\externalfigure[mill]}}
+% \framed[offset=overlay,width=2cm,height=4cm]{\backgroundimagefill{2}{\hsize}{\vsize}{\externalfigure[mill]}}
+% \framed[offset=overlay,width=2cm,height=4cm]{\backgroundimagefill{3}{\hsize}{\vsize}{\externalfigure[mill]}}
\unexpanded\def\backgroundimage#1#2#3% repeat hsize vsize
{\bgroup
\forgetall
- \dowithnextbox{\pack_boxes_background_image{#1}{#2}{#3}}\hbox}
+ \scratchcounter#1\relax
+ \scratchwidth #2\relax
+ \scratchheight #3\relax
+ \dowithnextboxcs\pack_boxes_background_image\hbox}
-\def\pack_boxes_background_image#1#2#3%
+\def\pack_boxes_background_image
{\offinterlineskip
- \ifcase#1\relax
+ \ifcase\scratchcounter
% just one
\else
- \scratchdimen#2\divide\scratchdimen\wd\nextbox\scratchnx\scratchdimen\advance\scratchnx\plusone\relax
- \scratchdimen#3\divide\scratchdimen\ht\nextbox\scratchny\scratchdimen\advance\scratchny\plusone\relax
+ \scratchdimen\scratchwidth \divide\scratchdimen\wd\nextbox\scratchnx\scratchdimen\advance\scratchnx\plusone\relax
+ \scratchdimen\scratchheight\divide\scratchdimen\ht\nextbox\scratchny\scratchdimen\advance\scratchny\plusone\relax
% to be considered: methods
- \ifcase#1%
+ \ifcase\scratchcounter
\or % x and y
\setbox\nextbox\hbox{\dorecurse\scratchnx{\copy\nextbox}}%
\setbox\nextbox\vbox{\dorecurse\scratchny{\copy\nextbox\endgraf}}%
@@ -1002,13 +1025,59 @@
\setbox\nextbox\vbox{\dorecurse\scratchny{\copy\nextbox\endgraf}}%
\fi
\fi
- \ifdim\wd\nextbox>#2\relax
- \setbox\nextbox\hbox to #2{\hss\box\nextbox\hss}%
- \setbox\nextbox\hbox{\normalexpanded{\clip[\c!width=#2,\c!height=\the\ht\nextbox]{\box\nextbox}}}%
+ \ifdim\wd\nextbox>\scratchwidth
+ \setbox\nextbox\hbox to \scratchwidth{\hss\box\nextbox\hss}%
+ \setbox\nextbox\hbox{\normalexpanded{\clip[\c!width=\the\scratchwidth,\c!height=\the\ht\nextbox]{\box\nextbox}}}%
+ \fi
+ \ifdim\ht\nextbox>\scratchheight
+ \setbox\nextbox\vbox to \scratchheight{\vss\box\nextbox\vss}%
+ \setbox\nextbox\hbox{\normalexpanded{\clip[\c!width=\the\wd\nextbox,\c!height=\the\scratchheight]{\box\nextbox}}}%
+ \fi
+ \box\nextbox
+ \egroup}
+
+\unexpanded\def\backgroundimagefill#1#2#3% repeat hsize vsize
+ {\bgroup
+ \forgetall
+ \scratchcounter#1\relax
+ \scratchwidth #2\relax
+ \scratchheight #3\relax
+ \dowithnextboxcs\pack_boxes_background_image_fill\hbox}
+
+\def\pack_boxes_background_image_fill
+ {\offinterlineskip
+ \setbox\nextbox\hbox\bgroup
+ \ifdim\scratchwidth>\scratchheight
+ \scale[\c!width=\the\scratchwidth]{\box\nextbox}%
+ \else
+ \scale[\c!height=\the\scratchheight]{\box\nextbox}%
+ \fi
+ \egroup
+ \ifdim\wd\nextbox>\scratchwidth
+ \setbox\nextbox\hbox to \scratchwidth
+ {\ifcase\scratchcounter
+ \hss\box\nextbox\hss
+ \or
+ \box\nextbox\hss
+ \or
+ \hss\box\nextbox
+ \else
+ \hss\box\nextbox\hss
+ \fi}%
+ \setbox\nextbox\hbox{\normalexpanded{\clip[\c!width=\the\scratchwidth,\c!height=\the\ht\nextbox]{\box\nextbox}}}%
\fi
- \ifdim\ht\nextbox>#3\relax
- \setbox\nextbox\vbox to #3{\vss\box\nextbox\vss}%
- \setbox\nextbox\hbox{\normalexpanded{\clip[\c!width=\the\wd\nextbox,\c!height=#3]{\box\nextbox}}}%
+ \ifdim\ht\nextbox>\scratchheight
+ \setbox\nextbox\vbox to \scratchheight
+ {\ifcase\scratchcounter
+ \vss\box\nextbox\vss
+ \or
+ \box\nextbox\vss
+ \or
+ \vss\box\nextbox
+ \else
+ \vss\box\nextbox\vss
+ \fi}%
+ \setbox\nextbox\hbox{\normalexpanded{\clip[\c!width=\the\wd\nextbox,\c!height=\the\scratchheight]{\box\nextbox}}}%
\fi
\box\nextbox
\egroup}
diff --git a/tex/context/base/pack-com.mkiv b/tex/context/base/pack-com.mkiv
index 2c28d6b20..b734d6028 100644
--- a/tex/context/base/pack-com.mkiv
+++ b/tex/context/base/pack-com.mkiv
@@ -178,7 +178,7 @@
\unexpanded\def\pack_common_caption_stop {\removeunwantedspaces\egroup}
\unexpanded\def\stopcombination
- {\bgroup\normalexpanded{\egroup{}\ctxcommand{ntimes("{}{}",\number\c_pack_combinations_n)}}% brr
+ {\bgroup\normalexpanded{\egroup{}\ntimes{{}{}}\c_pack_combinations_n}% brr
\dostoptagged
\egroup
\egroup}
@@ -191,19 +191,19 @@
\edef\currentcombination{#1}%
\edef\currentcombinationspec{#2}%
\ifx\currentcombinationspec\empty
- \doifassignmentelse{#1}%
+ \doifelseassignment{#1}%
{\let\currentcombination\empty
\setupcurrentcombination[#1]%
\edef\currentcombinationspec{\combinationparameter\c!nx*\combinationparameter\c!ny*}}
- {\doifinstringelse{*}\currentcombination
+ {\doifelseinstring{*}\currentcombination
{\edef\currentcombinationspec{\currentcombination*\plusone*}%
\let\currentcombination\empty}
- {\doifnumberelse\currentcombination
+ {\doifelsenumber\currentcombination
{\edef\currentcombinationspec{\currentcombination*\plusone*}%
\let\currentcombination\empty}
{\edef\currentcombinationspec{\combinationparameter\c!nx*\combinationparameter\c!ny*}}}}%
\else
- \doifassignmentelse{#2}%
+ \doifelseassignment{#2}%
{\setupcurrentcombination[#2]%
\edef\currentcombinationspec{\combinationparameter\c!nx*\combinationparameter\c!ny*}}
{\edef\currentcombinationspec{\currentcombinationspec*\plusone*}}%
@@ -221,13 +221,12 @@
\edef\p_height {\combinationparameter\c!height}%
\edef\p_width {\combinationparameter\c!width}%
\edef\p_location{\combinationparameter\c!location}%
- \edef\p_align {\combinationparameter\c!align}%
\edef\p_distance{\combinationparameter\c!distance}%
%
\pack_combinations_location_reset
\rawprocesscommacommand[\p_location]\pack_combinations_location_step
%
- \dostarttagged\t!combination\currentcombination
+ \dostarttaggedchained\t!combination\currentcombination\??combination
\vbox \ifx\p_height\v!fit\else to \p_height \fi \bgroup
\let\combination\empty % permits \combination{}{} handy for cld
\normalexpanded{\pack_combinations_start_indeed[\currentcombinationspec]}}
@@ -295,7 +294,7 @@
\def\pack_combinations_alternative_label_indeed
{\dowithnextboxcs\pack_combinations_pickup_caption\vtop\bgroup
\hsize\wd\b_pack_combinations_content
- \ifx\p_align\empty\else\setupalign[\p_align]\fi
+ \usealignparameter\combinationparameter
\usecombinationstyleandcolor\c!style\c!color
\begstrut
\normalexpanded{\strc_labels_command{\v!combination\ifx\currentcombination\empty\else:\currentcombination\fi}}%
@@ -324,15 +323,18 @@
\def\pack_combinations_caption_second
{\ifx\nexttoken\egroup
% the caption is empty
+ \else\ifx\nexttoken\stopcaption
+ % the caption is empty (new per 2014-05-24)
\else
+ % todo: \p_pack_combinations_alternative\v!none: no style, strut etc
\hsize\wd\b_pack_combinations_content
- \ifx\p_align\empty\else\setupalign[\p_align]\fi
+ \usealignparameter\combinationparameter
\usecombinationstyleandcolor\c!style\c!color
\bgroup
\aftergroup\endstrut
\aftergroup\egroup
\begstrut
- \fi}
+ \fi\fi}
\def\pack_combinations_pickup_package_pair % we need to store the caption row
{\vbox
@@ -626,12 +628,12 @@
\unexpanded\def\placepairedbox[#1]%
{\bgroup
\edef\currentpairedbox{#1}%
- \doifnextoptionalelse\pack_pairedboxes_place\pack_pairedboxes_place_indeed}
+ \doifelsenextoptionalcs\pack_pairedboxes_place\pack_pairedboxes_place_indeed}
\unexpanded\def\startplacepairedbox[#1]%
{\bgroup
\edef\currentpairedbox{#1}%
- \doifnextoptionalelse\pack_pairedboxes_place\pack_pairedboxes_place_indeed}
+ \doifelsenextoptionalcs\pack_pairedboxes_place\pack_pairedboxes_place_indeed}
\unexpanded\def\stopplacepairedbox
{}
@@ -780,7 +782,7 @@
\fi}
\def\pack_pairedboxes_between
- {\switchtobodyfont[\pairedboxparameter\c!bodyfont]% split under same regime
+ {\usebodyfontparameter\pairedboxparameter
\setbox\b_pack_pairedboxes_first\box\nextbox
\ifconditional\c_pack_pairedboxes_horizontal
\pack_pairedboxes_between_horizontal
diff --git a/tex/context/base/pack-fen.mkiv b/tex/context/base/pack-fen.mkiv
index 4253eeaa7..04a36fa46 100644
--- a/tex/context/base/pack-fen.mkiv
+++ b/tex/context/base/pack-fen.mkiv
@@ -50,12 +50,12 @@
\else
\def\setinstalledframedimensions
- {\edef\overlaywidth {\the\frameddimenwd\space}%
- \edef\overlayheight {\the\dimexpr\frameddimenht+\frameddimendp\relax\space}%
- \edef\overlaydepth {\the\frameddimendp\space}%
+ {\d_overlay_width \frameddimenwd
+ \d_overlay_height \dimexpr\frameddimenht+\frameddimendp\relax
+ \d_overlay_depth \frameddimendp
+ \d_overlay_linewidth \ruledlinewidth
\edef\overlaycolor {\framedparameter\c!backgroundcolor}%
- \edef\overlaylinecolor{\framedparameter\c!framecolor}%
- \edef\overlaylinewidth{\the\ruledlinewidth}}
+ \edef\overlaylinecolor{\framedparameter\c!framecolor}}
\fi
@@ -63,7 +63,7 @@
\def\whateverleftframe#1%
{\setinstalledframedimensions
- \setbox\b_framed_rendered\vbox to \overlayheight{\vss#1\vss}%
+ \setbox\b_framed_rendered\vbox to \d_overlay_height{\vss#1\vss}%
\setbox\b_framed_rendered\hbox to \zeropoint{\box\b_framed_rendered\hss}%
\ht\b_framed_rendered\zeropoint
\dp\b_framed_rendered\zeropoint
@@ -71,7 +71,7 @@
\def\whateverrightframe#1%
{\setinstalledframedimensions
- \setbox\b_framed_rendered\vbox to \overlayheight{\vss#1\vss}%
+ \setbox\b_framed_rendered\vbox to \d_overlay_height{\vss#1\vss}%
\setbox\b_framed_rendered\hbox to \zeropoint{\hss\box\b_framed_rendered}%
\ht\b_framed_rendered\zeropoint
\dp\b_framed_rendered\zeropoint
@@ -79,7 +79,7 @@
\def\whatevertopframe#1%
{\setinstalledframedimensions
- \setbox\b_framed_rendered\hbox to \overlaywidth{\hss#1\hss}%
+ \setbox\b_framed_rendered\hbox to \d_overlay_width{\hss#1\hss}%
\setbox\b_framed_rendered\vbox to \zeropoint{\box\b_framed_rendered\vss}%
\ht\b_framed_rendered\zeropoint
\dp\b_framed_rendered\zeropoint
@@ -88,7 +88,7 @@
\def\whateverbottomframe#1%
{\setinstalledframedimensions
- \setbox\b_framed_rendered\hbox to \overlaywidth{\hss#1\hss}%
+ \setbox\b_framed_rendered\hbox to \d_overlay_width{\hss#1\hss}%
\setbox\b_framed_rendered\vbox to \zeropoint{\vss\box\b_framed_rendered}%
\ht\b_framed_rendered\zeropoint
\dp\b_framed_rendered\zeropoint
diff --git a/tex/context/base/pack-lyr.mkiv b/tex/context/base/pack-lyr.mkiv
index a891c998d..a847dec67 100644
--- a/tex/context/base/pack-lyr.mkiv
+++ b/tex/context/base/pack-lyr.mkiv
@@ -101,9 +101,6 @@
\def\layeranchor{\currentlayer:\the\realpageno}
-\unexpanded\def\anch_mark_anchor_box#1%
- {\ctxcommand{markregionbox(\number#1,"\layeranchor")}} % needs an hbox
-
\let\p_pack_layers_doublesided\empty
\let\p_pack_layers_state \empty
\let\p_pack_layers_option \empty
@@ -229,7 +226,7 @@
\else\ifthirdargument
\pack_layers_set_indeed[#1][#2][#3]%
\else
- \doifassignmentelse{#2}
+ \doifelseassignment{#2}
{\pack_layers_set_indeed[#1][][#2]}%
{\pack_layers_set_indeed[#1][#2][]}%
\fi\fi}
@@ -323,7 +320,7 @@
{\dodoubleargument\pack_layers_define_preset}
\def\pack_layers_define_preset[#1][#2]%
- {\doifassignmentelse{#2}
+ {\doifelseassignment{#2}
{\setvalue{\??layerpreset#1}{\setupcurrentlayer[#2]}}
{\setvalue{\??layerpreset#1}{\csname\??layerpreset#2\endcsname}}}
@@ -510,6 +507,8 @@
\expandafter\secondoftwoarguments
\fi}
+\let\doiflayerdataelse\doifelselayerdata
+
%D \macros
%D {flushlayer}
%D
@@ -610,7 +609,7 @@
% {\setlayoutcomponentattribute{\v!layer:#2}}%
% \resetlayoutcomponentattribute
% \ifx\p_pack_layers_option\v!test \ruledvbox \else \vbox \fi \ifx\p_pack_layers_method\v!overlay to \overlayheight \fi \layoutcomponentboxattribute
-% {\hbox \ifx\p_pack_layers_method\v!overlay to \overlaywidth \fi
+% {\hbox \ifx\p_pack_layers_method\v!overlay to \d_overlay_width \fi
% {\edef\currentlayer{#2\the\realpageno}% local
% \edef\p_pack_layers_position{\layerparameter\c!position}% local
% \ifx\p_pack_layers_position\v!yes
@@ -669,15 +668,15 @@
\pack_layers_positioned_box_nop
\fi
% todo: method=offset => overlayoffset right/down (handy for backgrounds with offset)
- \doifoverlayelse{#2}%
+ \doifelseoverlay{#2}%
{\setlayoutcomponentattribute{\v!layer:#2}}%
\resetlayoutcomponentattribute
% we have conflicting demands: some mechanisms want ll anchoring .. I need to figure this out
% an dmaybe we will have 'origin=bottom' or so
\setbox\nextbox
- \ifx\p_pack_layers_option\v!test \ruledvbox \else \vbox \fi \ifx\p_pack_layers_method\v!overlay to \overlayheight \fi \layoutcomponentboxattribute
+ \ifx\p_pack_layers_option\v!test \ruledvbox \else \vbox \fi \ifx\p_pack_layers_method\v!overlay to \d_overlay_height \fi \layoutcomponentboxattribute
{\pack_layers_top_fill
- \hbox \ifx\p_pack_layers_method\v!overlay to \overlaywidth \fi
+ \hbox \ifx\p_pack_layers_method\v!overlay to \d_overlay_width \fi
{\box\nextbox
\hss}%
\pack_layers_bottom_fill}%
@@ -687,7 +686,7 @@
\ifx\p_pack_layers_position\v!yes
\edef\p_pack_layers_region{\layerparameter\c!region}%
\ifx\p_pack_layers_region\empty \else
- \anch_mark_anchor_box\nextbox
+ \anch_mark_tagged_box\nextbox\layeranchor
\fi
\fi
\box\nextbox
@@ -724,12 +723,20 @@
\unexpanded\def\composedlayer#1{\flushlayer[#1]}
+% \unexpanded\def\tightlayer[#1]%
+% {\hbox
+% {\def\currentlayer{#1}% todo: left/right
+% \setbox\nextbox\emptybox
+% \hsize\layerparameter\c!width
+% \vsize\layerparameter\c!height
+% \composedlayer{#1}}}
+
\unexpanded\def\tightlayer[#1]%
{\hbox
{\def\currentlayer{#1}% todo: left/right
- \setbox\nextbox\emptybox % hoogte/breedte are \wd\nextbox/\ht\nextbox
- \hsize\layerparameter\c!width % \overlaywidth = \hsize
- \vsize\layerparameter\c!height % \overlaywheight = \vsize
+ \setbox\nextbox\emptybox
+ \d_overlay_width \layerparameter\c!width
+ \d_overlay_height\layerparameter\c!height
\composedlayer{#1}}}
\let\placelayer\flushlayer
diff --git a/tex/context/base/pack-mis.mkvi b/tex/context/base/pack-mis.mkvi
index 978cc120c..420f9440a 100644
--- a/tex/context/base/pack-mis.mkvi
+++ b/tex/context/base/pack-mis.mkvi
@@ -46,7 +46,7 @@
\unexpanded\def\pack_placement#tag%
{\bgroup
\edef\currentplacement{#tag}%
- \doifnextoptionalelse\pack_placement_yes\pack_placement_nop}
+ \doifelsenextoptionalcs\pack_placement_yes\pack_placement_nop}
\def\pack_placement_yes[#settings]%
{\setupcurrentplacement[#settings]%
@@ -67,6 +67,7 @@
%\ifinsidefloat \else
% \page_backgrounds_add_local_to_box\nextbox
%\fi
+\flushnotes % new per 2014-05-29 : todo: move them up in the mvl
\ifgridsnapping
\pack_placement_flush_grid_yes
\else
diff --git a/tex/context/base/pack-mrl.mkiv b/tex/context/base/pack-mrl.mkiv
index 7c3f08825..a97c9e6f8 100644
--- a/tex/context/base/pack-mrl.mkiv
+++ b/tex/context/base/pack-mrl.mkiv
@@ -40,7 +40,7 @@
\unexpanded\def\blackrule
{\hbox\bgroup
- \doifnextoptionalelse\pack_black_rule_pickup\pack_black_rule_indeed}
+ \doifelsenextoptionalcs\pack_black_rule_pickup\pack_black_rule_indeed}
\def\pack_black_rule_pickup[#1]%
{\setupcurrentblackrules[#1]%
@@ -96,7 +96,7 @@
\unexpanded\def\blackrules % probably never used
{\hbox\bgroup
- \doifnextoptionalelse\pack_black_rules_pickup\pack_black_rules_indeed}
+ \doifelsenextoptionalcs\pack_black_rules_pickup\pack_black_rules_indeed}
\def\pack_black_rules_pickup[#1]%
{\setupcurrentblackrules[#1]%
@@ -145,7 +145,8 @@
%D \showsetup{hl}
\unexpanded\def\pack_rule_vl_indeed#1#2#3%
- {\bgroup
+ {\dontleavehmode
+ \begingroup
\setbox\scratchbox\hbox
{\vrule
\s!width #1\linewidth
@@ -154,13 +155,14 @@
\dp\scratchbox\strutdp
\ht\scratchbox\strutht
\box\scratchbox
- \egroup}
+ \endgroup}
\def\pack_rule_vl[#1]%
{\pack_rule_vl_indeed{#1}{#1}{#1}}
\def\pack_rule_hl[#1]%
- {\hbox
+ {\dontleavehmode
+ \hbox
{\vrule
\s!width #1\emwidth
\s!height\linewidth
@@ -614,7 +616,7 @@
\fi
\doifelse{\directtextrulesparameter\c!depthcorrection}\v!on\pack_textrule_correct_depth_yes\pack_textrule_correct_depth_nop
\nointerlineskip
- \dontleavehmode\vbox
+ \noindent\vbox % was \dontleavehmode
{\color[\directtextrulesparameter\c!rulecolor]
{\hrule\s!depth\directtextrulesparameter\c!rulethickness\s!height\zeropoint\s!width\availablehsize}}}
{\pack_textrule_with_text{#1}}%
@@ -629,7 +631,7 @@
\def\pack_textrule_nop_indeed{\csname\??textrulealternatives\v!middle\endcsname}%
\csname\??textrulealternatives\v!top\endcsname{#1}%
\bgroup
- \doifsomething{\directtextrulesparameter\c!bodyfont}{\switchtobodyfont[\directtextrulesparameter\c!bodyfont]}}
+ \usebodyfontparameter\directtextrulesparameter}
\unexpanded\def\stoptextrule
{\par
diff --git a/tex/context/base/pack-obj.lua b/tex/context/base/pack-obj.lua
index 70876a346..d1cc5bafc 100644
--- a/tex/context/base/pack-obj.lua
+++ b/tex/context/base/pack-obj.lua
@@ -11,19 +11,21 @@ if not modules then modules = { } end modules ['pack-obj'] = {
reusable components.</p>
--ldx]]--
-local commands, context = commands, context
+local context = context
-local allocate = utilities.storage.allocate
+local implement = interfaces.implement
-local collected = allocate()
-local tobesaved = allocate()
+local allocate = utilities.storage.allocate
-local jobobjects = {
+local collected = allocate()
+local tobesaved = allocate()
+
+local jobobjects = {
collected = collected,
tobesaved = tobesaved,
}
-job.objects = jobobjects
+job.objects = jobobjects
local function initializer()
collected = jobobjects.collected
@@ -32,45 +34,60 @@ end
job.register('job.objects.collected', tobesaved, initializer, nil)
-function jobobjects.save(tag,number,page)
+local function saveobject(tag,number,page)
local t = { number, page }
tobesaved[tag], collected[tag] = t, t
end
-function jobobjects.set(tag,number,page)
+local function setobject(tag,number,page)
collected[tag] = { number, page }
end
-function jobobjects.get(tag)
+local function getobject(tag)
return collected[tag] or tobesaved[tag]
end
-function jobobjects.number(tag,default)
+local function getobjectnumber(tag,default)
local o = collected[tag] or tobesaved[tag]
return o and o[1] or default
end
-function jobobjects.page(tag,default)
+local function getobjectpage(tag,default)
local o = collected[tag] or tobesaved[tag]
return o and o[2] or default
end
--- interface
+jobobjects.save = saveobject
+jobobjects.set = setobject
+jobobjects.get = getobject
+jobobjects.number = getobjectnumber
+jobobjects.page = getobjectpage
-commands.saveobject = jobobjects.save
-commands.setobject = jobobjects.set
+implement {
+ name = "saveobject",
+ actions = saveobject
+}
-function commands.objectnumber(tag,default)
- local o = collected[tag] or tobesaved[tag]
- context(o and o[1] or default)
-end
+implement {
+ name = "setobject",
+ actions = setobject,
+ arguments = { "string", "integer", "integer" }
+}
-function commands.objectpage(tag,default)
- local o = collected[tag] or tobesaved[tag]
- context(o and o[2] or default)
-end
+implement {
+ name = "objectnumber",
+ actions = { getobjectnumber, context },
+ arguments = { "string", "string" },
+}
-function commands.doifobjectreferencefoundelse(tag)
- commands.doifelse(collected[tag] or tobesaved[tag])
-end
+implement {
+ name = "objectpage",
+ actions = { getobjectpage, context },
+ arguments = { "string", "string" },
+}
+implement {
+ name = "doifelseobjectreferencefound",
+ actions = { jobobjects.get, commands.doifelse },
+ arguments = "string"
+}
diff --git a/tex/context/base/pack-obj.mkiv b/tex/context/base/pack-obj.mkiv
index 356a0b7eb..605dd3b9e 100644
--- a/tex/context/base/pack-obj.mkiv
+++ b/tex/context/base/pack-obj.mkiv
@@ -366,7 +366,7 @@
% no undefined test ! ! ! ! (pdftex fails on undefined objects)
\unexpanded\def\pack_objects_register_reference#1#2#3{\normalexpanded{\noexpand\ctxlatecommand{saveobject("#1::#2",#3,\noexpand\the\realpageno)}}}
-\unexpanded\def\pack_objects_overload_reference#1#2#3{\ctxcommand{setobject("#1::#2",#3,\the\realpageno)}}
+\unexpanded\def\pack_objects_overload_reference#1#2#3{\clf_setobject{#1::#2}#3 \realpageno\relax}
\unexpanded\def\dosetobjectreference
{\ifcase\crossreferenceobject
@@ -382,8 +382,8 @@
\def\defaultobjectreference#1#2{0} % driver dependent
\def\defaultobjectpage #1#2{\realfolio}
-\unexpanded\def\dogetobjectreference #1#2#3{\xdef#3{\ctxcommand{objectnumber("#1::#2","\defaultobjectreference{#1}{#2}")}}}
-\unexpanded\def\dogetobjectreferencepage#1#2#3{\xdef#3{\ctxcommand{objectpage("#1::#2","\defaultobjectpage{#1}{#2}")}}}
+\unexpanded\def\dogetobjectreference #1#2#3{\xdef#3{\clf_objectnumber{#1::#2}{\defaultobjectreference{#1}{#2}}}}
+\unexpanded\def\dogetobjectreferencepage#1#2#3{\xdef#3{\clf_objectpage {#1::#2}{\defaultobjectpage {#1}{#2}}}}
\unexpanded\def\setobject {\driverreferenced\pack_objects_set1}
\unexpanded\def\settightobject{\driverreferenced\pack_objects_set0}
@@ -399,14 +399,17 @@
%D \doifobjectreferencefoundelse{class}{object}{do then}{do else}
%D \stoptyping
-\unexpanded\def\doifobjectfoundelse#1#2%
+\unexpanded\def\doifelseobjectfound#1#2%
{\ifcsname\??objects#1::#2\endcsname
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
-\unexpanded\def\doifobjectreferencefoundelse#1#2%
- {\ctxcommand{doifobjectreferencefoundelse("#1::#2")}}
+\unexpanded\def\doifelseobjectreferencefound#1#2%
+ {\clf_doifelseobjectreferencefound{#1::#2}}
+
+\let\doifobjectfoundelse \doifelseobjectfound
+\let\doifobjectreferencefoundelse\doifelseobjectreferencefound
\protect \endinput
diff --git a/tex/context/base/pack-pos.mkiv b/tex/context/base/pack-pos.mkiv
index f92ceb78a..c52c01ca4 100644
--- a/tex/context/base/pack-pos.mkiv
+++ b/tex/context/base/pack-pos.mkiv
@@ -62,7 +62,7 @@
\edef\currentpositioning{#1}%
\setupcurrentpositioning[#2]%
\else\iffirstargument
- \doifassignmentelse{#1}
+ \doifelseassignment{#1}
{\let\currentpositioning\empty
\setupcurrentpositioning[#1]}%
{\edef\currentpositioning{#1}}%
diff --git a/tex/context/base/pack-rul.lua b/tex/context/base/pack-rul.lua
index 329ea63b8..151642c3a 100644
--- a/tex/context/base/pack-rul.lua
+++ b/tex/context/base/pack-rul.lua
@@ -14,6 +14,14 @@ if not modules then modules = { } end modules ['pack-rul'] = {
-- challenge: adapt glue_set
-- setfield(h,"glue_set", getfield(h,"glue_set") * getfield(h,"width")/maxwidth -- interesting ... doesn't matter much
+-- \framed[align={lohi,middle}]{$x$}
+-- \framed[align={lohi,middle}]{$ $}
+-- \framed[align={lohi,middle}]{\hbox{ }}
+-- \framed[align={lohi,middle}]{\hbox{}}
+-- \framed[align={lohi,middle}]{$\hskip2pt$}
+
+local type = type
+
local hlist_code = nodes.nodecodes.hlist
local vlist_code = nodes.nodecodes.vlist
local box_code = nodes.listcodes.box
@@ -21,15 +29,26 @@ local line_code = nodes.listcodes.line
local texsetdimen = tex.setdimen
local texsetcount = tex.setcount
-local texgetbox = tex.getbox
-local hpack = nodes.hpack
-local free = nodes.free
-local copy = nodes.copy_list
-local traverse_id = nodes.traverse_id
-local node_dimensions = nodes.dimensions
-function commands.doreshapeframedbox(n)
- local box = texgetbox(n)
+local implement = interfaces.implement
+
+local nuts = nodes.nuts
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getlist = nuts.getlist
+local getid = nuts.getid
+local getsubtype = nuts.getsubtype
+local getbox = nuts.getbox
+
+local hpack = nuts.hpack
+local traverse_id = nuts.traverse_id
+local node_dimensions = nuts.dimensions
+
+local function doreshapeframedbox(n)
+ local box = getbox(n)
local noflines = 0
local firstheight = nil
local lastdepth = nil
@@ -38,27 +57,27 @@ function commands.doreshapeframedbox(n)
local maxwidth = 0
local totalwidth = 0
local averagewidth = 0
- local boxwidth = box.width
+ local boxwidth = getfield(box,"width")
if boxwidth ~= 0 then -- and h.subtype == vlist_code
- local list = box.list
+ local list = getlist(box)
if list then
local function check(n,repack)
if not firstheight then
- firstheight = n.height
+ firstheight = getfield(n,"height")
end
- lastdepth = n.depth
+ lastdepth = getfield(n,"depth")
noflines = noflines + 1
- local l = n.list
+ local l = getlist(n)
if l then
if repack then
- local subtype = n.subtype
+ local subtype = getsubtype(n)
if subtype == box_code or subtype == line_code then
- lastlinelength = node_dimensions(l,n.dir) -- used to be: hpack(copy(l)).width
+ lastlinelength = node_dimensions(l,getfield(n,"dir")) -- used to be: hpack(copy(l)).width
else
- lastlinelength = n.width
+ lastlinelength = getfield(n,"width")
end
else
- lastlinelength = n.width
+ lastlinelength = getfield(n,"width")
end
if lastlinelength > maxwidth then
maxwidth = lastlinelength
@@ -80,33 +99,34 @@ function commands.doreshapeframedbox(n)
-- vdone = true
end
if not firstheight then
- -- done
+ -- done)
elseif maxwidth ~= 0 then
if hdone then
for h in traverse_id(hlist_code,list) do
- local l = h.list
+ local l = getlist(h)
if l then
- local subtype = h.subtype
+ local subtype = getsubtype(h)
if subtype == box_code or subtype == line_code then
- h.list = hpack(l,maxwidth,'exactly',h.dir)
- h.shift = 0 -- needed for display math
+ l = hpack(l,maxwidth,'exactly',getfield(h,"dir")) -- multiple return values
+ setfield(h,"list",l)
+ setfield(h,"shift",0) -- needed for display math, so no width check possible
end
- h.width = maxwidth
+ setfield(h,"width",maxwidth)
end
end
- box.width = maxwidth -- moved
- averagewidth = noflines > 0 and totalwidth/noflines or 0
end
-- if vdone then
-- for v in traverse_id(vlist_code,list) do
- -- local width = n.width
+ -- local width = getfield(n,"width")
-- if width > maxwidth then
- -- v.width = maxwidth
+ -- setfield(v,"width",maxwidth)
-- end
-- end
-- end
- box.width = maxwidth
+ setfield(box,"width",maxwidth)
averagewidth = noflines > 0 and totalwidth/noflines or 0
+ else -- e.g. empty math {$ $} or \hbox{} or ...
+setfield(box,"width",0)
end
end
end
@@ -118,19 +138,19 @@ function commands.doreshapeframedbox(n)
texsetdimen("global","framedaveragewidth",averagewidth)
end
-function commands.doanalyzeframedbox(n)
- local box = texgetbox(n)
+local function doanalyzeframedbox(n)
+ local box = getbox(n)
local noflines = 0
local firstheight = nil
local lastdepth = nil
- if box.width ~= 0 then
- local list = box.list
+ if getfield(box,"width") ~= 0 then
+ local list = getlist(box)
if list then
local function check(n)
if not firstheight then
- firstheight = n.height
+ firstheight = getfield(n,"height")
end
- lastdepth = n.depth
+ lastdepth = getfield(n,"depth")
noflines = noflines + 1
end
for h in traverse_id(hlist_code,list) do
@@ -145,3 +165,46 @@ function commands.doanalyzeframedbox(n)
texsetdimen("global","framedfirstheight",firstheight or 0)
texsetdimen("global","framedlastdepth",lastdepth or 0)
end
+
+implement { name = "doreshapeframedbox", actions = doreshapeframedbox, arguments = "integer" }
+implement { name = "doanalyzeframedbox", actions = doanalyzeframedbox, arguments = "integer" }
+
+function nodes.maxboxwidth(box)
+ local boxwidth = getfield(box,"width")
+ if boxwidth == 0 then
+ return 0
+ end
+ local list = getlist(box)
+ if not list then
+ return 0
+ end
+ if getid(box) == hlist_code then
+ return boxwidth
+ end
+ local lastlinelength = 0
+ local maxwidth = 0
+ local function check(n,repack)
+ local l = getlist(n)
+ if l then
+ if repack then
+ local subtype = getsubtype(n)
+ if subtype == box_code or subtype == line_code then
+ lastlinelength = node_dimensions(l,getfield(n,"dir"))
+ else
+ lastlinelength = getfield(n,"width")
+ end
+ else
+ lastlinelength = getfield(n,"width")
+ end
+ if lastlinelength > maxwidth then
+ maxwidth = lastlinelength
+ end
+ end
+ end
+ for h in traverse_id(hlist_code,list) do -- no dir etc needed
+ check(h,true)
+ end
+ for v in traverse_id(vlist_code,list) do -- no dir etc needed
+ check(v,false)
+ end
+end
diff --git a/tex/context/base/pack-rul.mkiv b/tex/context/base/pack-rul.mkiv
index 377d39499..5f72a1113 100644
--- a/tex/context/base/pack-rul.mkiv
+++ b/tex/context/base/pack-rul.mkiv
@@ -43,23 +43,23 @@
\def\pack_framed_setup_line_width[#1]%
{\assigndimension{#1}\linewidth{.2\points}{.4\points}{.6\points}}
-%D \macros
-%D {setupscreens}
-%D
-%D Sort of obsolete:
-%D
-%D \showsetup{setupscreens}
-
-\installcorenamespace{screens}
-
-\installsetuponlycommandhandler \??screens {screens}
-
-\appendtoks
- \edef\defaultbackgroundscreen{\directscreensparameter\c!screen}
-\to \everysetupscreens
-
-\setupscreens
- [\c!screen=.90] % was .95 but that's hardly visible
+% %D \macros
+% %D {setupscreens}
+% %D
+% %D Sort of obsolete:
+% %D
+% %D \showsetup{setupscreens}
+%
+% \installcorenamespace{screens}
+%
+% \installsetuponlycommandhandler \??screens {screens}
+%
+% \appendtoks
+% \edef\defaultbackgroundscreen{\directscreensparameter\c!screen}
+% \to \everysetupscreens
+%
+% \setupscreens
+% [\c!screen=.90] % was .95 but that's hardly visible
%D The parameter handler:
@@ -155,8 +155,6 @@
%\c!foregroundcolor=,
%\c!foregroundstyle=,
%\c!background=,
- %\c!backgroundscreen=,
- \c!backgroundscreen=\defaultbackgroundscreen,
%\c!backgroundcolor=,
\c!backgroundoffset=\zeropoint,
%\c!framecolor=,
@@ -238,7 +236,6 @@
\let\p_framed_lines \empty
\let\p_framed_empty \empty
\let\p_framed_backgroundcolor \empty
-\let\p_framed_backgroundscreen\empty
\let\p_framed_framecolor \empty
\let\p_framed_component \empty
\let\p_framed_region \empty
@@ -352,14 +349,14 @@
%D The oval box is drawn using a special macro, depending on
%D the driver in use.
-\def\pack_framed_background_box_gray % avoid black rules when no gray
- {\edef\p_framed_backgroundscreen{\framedparameter\c!backgroundscreen}%
- \ifx\p_framed_backgroundscreen\empty \else
- \pack_framed_background_box_gray_indeed
- \fi}
-
-\def\pack_framed_background_box_gray_indeed % can be more direct but who cares, just compatibility
- {\colored[s=\p_framed_backgroundscreen]{\pack_framed_filled_box}}
+% \def\pack_framed_background_box_gray % avoid black rules when no gray
+% {\edef\p_framed_backgroundscreen{\framedparameter\c!backgroundscreen}%
+% \ifx\p_framed_backgroundscreen\empty \else
+% \pack_framed_background_box_gray_indeed
+% \fi}
+%
+% \def\pack_framed_background_box_gray_indeed % can be more direct but who cares, just compatibility
+% {\colored[s=\p_framed_backgroundscreen]{\pack_framed_filled_box}}
%D It won't be a surprise that we not only provide gray boxes, but also colored
%D ones. Here it is:
@@ -429,14 +426,36 @@
%D
%D The resulting box is lowered to the right depth.
-\def\overlaywidth {\the\hsize\space} % We preset the variables
-\def\overlayheight {\the\vsize\space} % to some reasonable default
-\def\overlaydepth {0pt } % values. The attributes
-\let\overlayoffset \overlaydepth % of the frame can be (are)
-\let\overlaylinewidth \overlaydepth % set somewhere else.
+%def\overlaywidth {\the\hsize\space} % We preset the variables
+%def\overlayheight {\the\vsize\space} % to some reasonable default
+%def\overlaydepth {0pt } % values. The attributes
+%let\overlayoffset \overlaydepth % of the frame can be (are)
+%let\overlaylinewidth \overlaydepth % set somewhere else.
\let\overlaycolor \empty
\let\overlaylinecolor \empty
+\newdimen\d_overlay_width
+\newdimen\d_overlay_height
+\newdimen\d_overlay_depth
+\newdimen\d_overlay_offset
+\newdimen\d_overlay_linewidth
+
+% expandable ... in a future version the space will go (in my one can use Overlay*)
+
+\def\overlaywidth {\the\d_overlay_width \space} % We preset the variables
+\def\overlayheight {\the\d_overlay_height \space} % to some reasonable default
+\def\overlaydepth {\the\d_overlay_depth \space} % values.
+\def\overlayoffset {\the\d_overlay_offset \space} % of the frame can be (are)
+\def\overlaylinewidth {\the\d_overlay_linewidth\space} % set somewhere else.
+
+% public but kind of protected
+
+\def\usedoverlaywidth {\dimexpr\d_overlay_width \relax}
+\def\usedoverlayheight {\dimexpr\d_overlay_height \relax}
+\def\usedoverlaydepth {\dimexpr\d_overlay_depth \relax}
+\def\usedoverlayoffset {\dimexpr\d_overlay_offset \relax}
+\def\usedoverlaylinewidth{\dimexpr\d_overlay_linewidth\relax}
+
%D The next register is used to initialize overlays.
\newtoks\everyoverlay
@@ -452,8 +471,8 @@
\to \everyoverlay
\prependtoks
- \hsize\overlaywidth
- \vsize\overlayheight
+ \hsize\d_overlay_width
+ \vsize\d_overlay_height
\to \everyoverlay
\unexpanded\def\defineoverlay
@@ -475,8 +494,8 @@
\egroup
\setlayoutcomponentattribute{\v!overlay:#1}%
\setbox\scratchbox\hbox \layoutcomponentboxattribute
- {\kern -.5\dimexpr\wd\scratchbox-\d_framed_target_wd\relax % was \overlaywidth
- \raise-.5\dimexpr\ht\scratchbox-\d_framed_target_ht\relax % not \overlayheight !
+ {\kern -.5\dimexpr\wd\scratchbox-\d_framed_target_wd\relax % was \d_overlay_width
+ \raise-.5\dimexpr\ht\scratchbox-\d_framed_target_ht\relax % not \d_overlay_height !
\box\scratchbox}%
\wd\scratchbox\d_framed_target_wd
\ht\scratchbox\d_framed_target_ht
@@ -490,19 +509,21 @@
\unexpanded\def\overlayfakebox
{\hbox
{\setbox\scratchbox\emptyhbox
- \wd\scratchbox\overlaywidth
- \ht\scratchbox\overlayheight
+ \wd\scratchbox\d_overlay_width
+ \ht\scratchbox\d_overlay_height
\box\scratchbox}}
%D For testing we provide:
-\def\doifoverlayelse#1% only tests external overlays
+\def\doifelseoverlay#1% only tests external overlays
{\ifcsname\??overlay#1\endcsname
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
+\let\doifoverlayelse\doifelseoverlay
+
%D The content of the box will be (temporary) saved in a box. We also have an
%D extra box for backgrounds.
@@ -599,16 +620,28 @@
\hss
\egroup}}
+% \def\pack_framed_overlay_initialize_indeed
+% {\edef\overlaywidth {\the\d_framed_target_wd\space}%
+% \edef\overlayheight {\the\dimexpr\d_framed_target_ht+\d_framed_target_dp\relax\space}%
+% \edef\overlaydepth {\the\d_framed_target_dp\space}%
+% \edef\overlaycolor {\framedparameter\c!backgroundcolor}% let ?
+% \edef\overlaylinecolor{\framedparameter\c!framecolor}% only needed for layers
+% \edef\overlaylinewidth{\the\d_framed_linewidth\space}%
+% %\edef\overlaycorner {\framedparameter\c!backgroundcorner}%
+% %\edef\overlayradius {\framedparameter\c!backgroundradius}%
+% \edef\overlayoffset {\the\framedbackgroundoffset\space}% \backgroundoffset % we steal this one
+% \let\pack_framed_overlay_initialize\relax}
+
\def\pack_framed_overlay_initialize_indeed
- {\edef\overlaywidth {\the\d_framed_target_wd\space}%
- \edef\overlayheight {\the\dimexpr\d_framed_target_ht+\d_framed_target_dp\relax\space}%
- \edef\overlaydepth {\the\d_framed_target_dp\space}%
+ {\d_overlay_width \d_framed_target_wd
+ \d_overlay_height \dimexpr\d_framed_target_ht+\d_framed_target_dp\relax
+ \d_overlay_depth \d_framed_target_dp
+ \d_overlay_linewidth \d_framed_linewidth
+ \d_overlay_offset \framedbackgroundoffset\relax
\edef\overlaycolor {\framedparameter\c!backgroundcolor}% let ?
\edef\overlaylinecolor{\framedparameter\c!framecolor}% only needed for layers
- \edef\overlaylinewidth{\the\d_framed_linewidth\space}%
%\edef\overlaycorner {\framedparameter\c!backgroundcorner}%
%\edef\overlayradius {\framedparameter\c!backgroundradius}%
- \edef\overlayoffset {\the\framedbackgroundoffset\space}% \backgroundoffset % we steal this one
\let\pack_framed_overlay_initialize\relax}
%D One can explictly insert the foreground box. For that purpose we introduce the
@@ -778,6 +811,35 @@
\newcount\c_pack_framed_nesting
+% to be tested (slightly more efficient):
+%
+% \unexpanded\def\pack_frame_common % #1 #2
+% {\bgroup
+% \advance\c_pack_framed_nesting\plusone
+% \expandafter\let\csname\??framed>\the\c_pack_framed_nesting:\s!parent\endcsname\??framed
+% \edef\currentframed{>\the\c_pack_framed_nesting}%
+% \pack_framed_initialize
+% \bgroup
+% \doifnextoptionalcselse} % #1 #2
+%
+% \unexpanded\def\framed {\pack_frame_common\pack_framed_process_framed_pickup\pack_framed_process_indeed}
+% \unexpanded\def\startframed{\pack_frame_common\pack_framed_start_framed_pickup \pack_framed_start_indeed }
+%
+% \def\pack_framed_process_framed_pickup[#1]%
+% {\setupcurrentframed[#1]%
+% \pack_framed_process_indeed}
+%
+% \def\pack_framed_start_framed_pickup[#1]%
+% {\setupcurrentframed[#1]% here !
+% \secondargumenttrue % dirty trick
+% \pack_framed_start_framed_indeed}
+%
+% \def\pack_framed_start_framed_indeed
+% {\pack_framed_process_indeed
+% \bgroup}
+%
+% no longer .. we also accept \startframed[tag]
+
\unexpanded\def\pack_framed_process_framed[#1]%
{\bgroup
\iffirstargument % faster
@@ -793,20 +855,63 @@
\pack_framed_initialize
\dosingleempty\pack_framed_process_framed}
+% \unexpanded\def\startframed
+% {\dosingleempty\pack_framed_start_framed}
+%
+% \def\pack_framed_start_framed[#1]%
+% {\bgroup
+% \advance\c_pack_framed_nesting\plusone
+% \expandafter\let\csname\??framed>\the\c_pack_framed_nesting:\s!parent\endcsname\??framed
+% \edef\currentframed{>\the\c_pack_framed_nesting}%
+% \pack_framed_initialize
+% \bgroup
+% \iffirstargument
+% \secondargumenttrue % dirty trick
+% \setupcurrentframed[#1]% here !
+% \fi
+% \pack_framed_process_indeed
+% \bgroup
+% \ignorespaces}
+
\unexpanded\def\startframed
{\dosingleempty\pack_framed_start_framed}
\def\pack_framed_start_framed[#1]%
{\bgroup
- \advance\c_pack_framed_nesting\plusone
+ \doifelseassignment{#1}\pack_framed_start_framed_yes\pack_framed_start_framed_nop{#1}}
+
+\def\pack_framed_start_framed_yes#1%
+ {\advance\c_pack_framed_nesting\plusone
\expandafter\let\csname\??framed>\the\c_pack_framed_nesting:\s!parent\endcsname\??framed
\iffirstargument\secondargumenttrue\fi % dirty trick
\edef\currentframed{>\the\c_pack_framed_nesting}%
\pack_framed_initialize
- \pack_framed_process_framed[#1]% can be inlined
- \bgroup}
+ \bgroup
+ \iffirstargument
+ \secondargumenttrue % dirty trick
+ \setupcurrentframed[#1]% here !
+ \fi
+ \pack_framed_process_indeed
+ \bgroup
+ \ignorespaces}
+
+\def\pack_framed_start_framed_nop#1%
+ {\edef\currentframed{#1}%
+ \dosingleempty\pack_framed_start_framed_nop_indeed}
-\let\stopframed\egroup
+\def\pack_framed_start_framed_nop_indeed[#1]%
+ {\pack_framed_initialize
+ \bgroup
+ \setupcurrentframed[#1]% here !
+ \pack_framed_process_indeed
+ \bgroup
+ \ignorespaces}
+
+% till here
+
+\unexpanded\def\stopframed
+ {\removeunwantedspaces
+ \egroup}
\unexpanded\def\normalframedwithsettings[#1]%
{\bgroup
@@ -1304,10 +1409,19 @@
\def\pack_framed_restart
{\aftergroup\pack_framed_finish}
-\def\pack_framed_do_top {\raggedtopcommand\framedparameter\c!top}
-\def\pack_framed_do_bottom{\framedparameter\c!bottom\raggedbottomcommand}
+\def\pack_framed_do_top
+ {\raggedtopcommand
+ \framedparameter\c!top
+ \edef\p_blank{\framedparameter\c!blank}%
+ \ifx\p_blank\v!yes\else % auto or no
+ \doinhibitblank
+ \fi}
-%D Carefull analysis of this macro will learn us that not all branches in the last
+\def\pack_framed_do_bottom
+ {\framedparameter\c!bottom
+ \raggedbottomcommand}
+
+%D Careful analysis of this macro will learn us that not all branches in the last
%D conditionals can be encountered, that is, some assignments to \type{\next} will
%D never occur. Nevertheless we implement the whole scheme, if not for future
%D extensions.
@@ -1384,10 +1498,20 @@
\pack_framed_reshape_reset
\fi}
+\def\pack_framed_profile_box
+ {\profilegivenbox\p_profile\b_framed_normal
+ \setbox\b_framed_normal\vbox{\unvbox\b_framed_normal}}
+
\unexpanded\def\pack_framed_finish
- {\pack_framed_stop_orientation % hm, wrong place ! should rotate the result (after reshape)
+ {%\pack_framed_stop_orientation % hm, wrong place ! should rotate the result (after reshape) .. moved down
\pack_framed_locator_before\p_framed_location
\ifconditional\c_framed_has_format
+ \ifconditional\c_framed_has_height \else
+ \edef\p_profile{\framedparameter\c!profile}%
+ \ifx\p_profile\empty\else
+ \pack_framed_profile_box
+ \fi
+ \fi
\ifx\p_framed_autowidth\v!force
\pack_framed_finish_a
\else\ifx\localwidth\v!fit
@@ -1417,6 +1541,7 @@
\ifx\p_framed_empty\v!yes
\pack_framed_fake_box
\fi
+ \pack_framed_stop_orientation % moved here at 2014-05-25
\iftrialtypesetting \else
\edef\p_framed_region{\framedparameter\c!region}%
\ifx\p_framed_region\v!yes % maybe later named
@@ -1910,10 +2035,10 @@
%D \stoplinecorrection
%D
%D \startbuffer
-%D \framed[strut=nee,offset=.5cm] {rule based learning}
-%D \framed[strut=nee,offset=0cm] {rule based learning}
-%D \framed[strut=nee,offset=none] {rule based learning}
-%D \framed[strut=nee,offset=overlay]{rule based learning}
+%D \framed[strut=no,offset=.5cm] {rule based learning}
+%D \framed[strut=no,offset=0cm] {rule based learning}
+%D \framed[strut=no,offset=none] {rule based learning}
+%D \framed[strut=no,offset=overlay]{rule based learning}
%D \stopbuffer
%D
%D \typebuffer
@@ -1923,9 +2048,9 @@
%D \stoplinecorrection
%D
%D \startbuffer
-%D \framed[width=3cm,align=left] {rule\\based\\learning}
-%D \framed[width=3cm,align=middle] {rule\\based\\learning}
-%D \framed[width=3cm,align=right] {rule\\based\\learning}
+%D \framed[width=3cm,align=left] {rule\\based\\learning}
+%D \framed[width=3cm,align=middle] {rule\\based\\learning}
+%D \framed[width=3cm,align=right] {rule\\based\\learning}
%D \framed[width=fit,align=middle] {rule\\based\\learning}
%D \stopbuffer
%D
@@ -1992,7 +2117,6 @@
% \vbox{\hbox{x}}
% \stopTEXpage
-
% \def\pack_framed_forgetall{\forgetall}
\def\pack_framed_set_foregroundcolor
@@ -2001,14 +2125,15 @@
\def\pack_framed_do_setups
{\ifx\p_framed_setups\empty \else
- \setups[\p_framed_setups]% \texsetup
+ \setups[\p_framed_setups]% \texsetup (or only one!)
+ % \fastsetup\p_framed_setup % singular would have been better
\fi}
\def\pack_framed_format_format_yes
{\vbox to \d_framed_height
\bgroup
\let\postprocessframebox\relax
-% \pack_framed_forgetall
+ % \pack_framed_forgetall
\iftrialtypesetting \else
\pack_framed_set_foregroundcolor
\fi
@@ -2019,7 +2144,7 @@
\raggedcommand
\pack_framed_do_top
\bgroup
-\synchronizeinlinedirection
+ \synchronizeinlinedirection
\localbegstrut
\aftergroup\localendstrut
\aftergroup\pack_framed_do_bottom
@@ -2030,7 +2155,7 @@
{\vbox to \d_framed_height
\bgroup
\let\postprocessframebox\relax
-% \pack_framed_forgetall
+ % \pack_framed_forgetall
\iftrialtypesetting \else
\pack_framed_set_foregroundcolor
\fi
@@ -2041,7 +2166,7 @@
\raggedcenter
\vss
\bgroup
-\synchronizeinlinedirection
+ \synchronizeinlinedirection
\localbegstrut
\aftergroup\localendstrut
\aftergroup\vss
@@ -2052,7 +2177,7 @@
{\vbox to \d_framed_height
\bgroup
\let\postprocessframebox\relax
-% \pack_framed_forgetall
+ % \pack_framed_forgetall
\iftrialtypesetting \else
\pack_framed_set_foregroundcolor
\fi
@@ -2064,7 +2189,7 @@
\aftergroup\localendstrut
\aftergroup\vss
\aftergroup\egroup
-\synchronizeinlinedirection
+ \synchronizeinlinedirection
\localbegstrut
\doformatonelinerbox}
@@ -2072,7 +2197,7 @@
{\vbox
\bgroup
\let\postprocessframebox\relax
-% \pack_framed_forgetall
+ % \pack_framed_forgetall
\iftrialtypesetting \else
\pack_framed_set_foregroundcolor
\fi
@@ -2082,7 +2207,7 @@
\raggedcommand
\pack_framed_do_top
\bgroup
-\synchronizeinlinedirection
+ \synchronizeinlinedirection
\localbegstrut
\aftergroup\localendstrut
\aftergroup\pack_framed_do_bottom
@@ -2093,7 +2218,7 @@
{\vbox to \d_framed_height
\bgroup
\let\postprocessframebox\relax
-% \pack_framed_forgetall
+ % \pack_framed_forgetall
\iftrialtypesetting \else
\pack_framed_set_foregroundcolor
\fi
@@ -2106,7 +2231,7 @@
\hbox
\bgroup
\aftergroup\egroup
-\synchronizeinlinedirection
+ \synchronizeinlinedirection
\localstrut
\doformatonelinerbox}
@@ -2114,13 +2239,13 @@
{\hbox to \d_framed_width
\bgroup
\let\postprocessframebox\relax
-% \pack_framed_forgetall
+ % \pack_framed_forgetall
\iftrialtypesetting \else
\pack_framed_set_foregroundcolor
\fi
\pack_framed_do_setups
\hss
-\synchronizeinlinedirection
+ \synchronizeinlinedirection
\localstrut
\bgroup
\aftergroup\hss
@@ -2135,7 +2260,7 @@
\fi
\let\postprocessframebox\relax
\pack_framed_do_setups
-\synchronizeinlinedirection
+ \synchronizeinlinedirection
\localstrut
\doformatonelinerbox}
@@ -2215,8 +2340,8 @@
\framedmaxwidth \zeropoint
\framedaveragewidth\zeropoint}
-\def\pack_framed_reshape_process{\ifvbox\b_framed_normal\ctxcommand{doreshapeframedbox(\number\b_framed_normal)}\fi}
-\def\pack_framed_reshape_analyze{\ifvbox\b_framed_normal\ctxcommand{doanalyzeframedbox(\number\b_framed_normal)}\fi}
+\def\pack_framed_reshape_process{\ifvbox\b_framed_normal\clf_doreshapeframedbox\b_framed_normal\relax\fi}
+\def\pack_framed_reshape_analyze{\ifvbox\b_framed_normal\clf_doanalyzeframedbox\b_framed_normal\relax\fi}
% torture test / strange case (much depth) / method 2 needed
%
@@ -2514,7 +2639,6 @@
%\c!foregroundstyle=,
%\c!background=,
%\c!backgroundcolor=,
- \c!backgroundscreen=\defaultbackgroundscreen,
\c!linecorrection=\v!on,
\c!depthcorrection=\v!on,
\c!margin=\v!standard]
@@ -2544,7 +2668,7 @@
\dodoubleempty\pack_framed_text_start_indeed}
\def\pack_framed_text_start_indeed[#1][#2]%
- {\doifassignmentelse{#1}
+ {\doifelseassignment{#1}
{\pack_framed_text_start_continue\empty{#1}}
{\pack_framed_text_start_continue{#1}{#2}}}
@@ -2559,15 +2683,30 @@
\startboxedcontent
\hsize\localhsize
% \insidefloattrue % ? better
- \normalexpanded{\switchtobodyfont[\framedtextparameter\c!bodyfont]}%
+ \usebodyfontparameter\framedtextparameter
\letframedtextparameter\c!strut\v!no
\inheritedframedtextframed\bgroup
\let\\=\endgraf
\framedtextparameter\c!inner % oud spul
- \doif{\framedtextparameter\c!depthcorrection}\v!on\pack_framed_text_start_depth_correction
+ \edef\p_framed_text_depthcorrection{\framedtextparameter\c!depthcorrection}%
+ \ifx\p_framed_text_depthcorrection\v!on
+ \pack_framed_text_start_depth_correction
+ \else
+ \bgroup
+ \fi
+ \vskip-\strutdp % brrr why is this needed ... needs to be sorted out, see testcase 1
\doinhibitblank
- \setupindenting[\framedtextparameter\c!indenting]%
- \useframedtextstyleandcolor\c!style\c!color}
+ \useindentingparameter\framedtextparameter
+ \useframedtextstyleandcolor\c!style\c!color
+ \ignorespaces}
+
+% testcase 1:
+%
+% \showstruts
+% \startframedtext[align={normal,tolerant},offset=0pt] \input tufte \stopframedtext
+% \startframedtext[align={normal,tolerant},offset=0pt,depthcorrection=off] \input tufte \stopframedtext
+% \startframedtext[align={normal,tolerant},offset=0pt,depthcorrection=off] \inframed{x} \stopframedtext
+% \framed[align={normal,tolerant},offset=0pt]{\input tufte }
%D The \type {none} option is handy for nested usage, as in the presentation
%D styles, where we don't want interference.
@@ -2577,7 +2716,11 @@
\unexpanded\def\pack_framed_text_stop % no \baselinecorrection, see faq docs
{\endgraf
\removelastskip
- \doif{\framedtextparameter\c!depthcorrection}\v!on\pack_framed_text_stop_depth_correction
+ \ifx\p_framed_text_depthcorrection\v!on
+ \pack_framed_text_stop_depth_correction
+ \else
+ \egroup
+ \fi
\stopboxedcontent
\ifconditional\c_framed_text_location_none
\egroup
@@ -2656,7 +2799,7 @@
\dosingleempty\pack_framed_text_start_direct}
\def\pack_framed_text_start_direct[#1]%
- {\normalexpanded{\switchtobodyfont[\framedtextparameter\c!bodyfont]}%
+ {\usebodyfontparameter\framedtextparameter
\letframedtextparameter\c!strut\v!no
\iffirstargument
\setupcurrentframedtext[#1]%
diff --git a/tex/context/base/page-app.mkiv b/tex/context/base/page-app.mkiv
index e4858d48f..2e81f7537 100644
--- a/tex/context/base/page-app.mkiv
+++ b/tex/context/base/page-app.mkiv
@@ -139,6 +139,9 @@
[TEXpage]
[\c!align=\v!normal] % needed, else problems !
+\unexpanded\def\setupTEXpage
+ {\setupfittingpage[TEXpage]}
+
%D For Mojca:
%D
%D \starttyping
diff --git a/tex/context/base/page-bck.mkiv b/tex/context/base/page-bck.mkiv
index 0246e8eb6..01de48e8c 100644
--- a/tex/context/base/page-bck.mkiv
+++ b/tex/context/base/page-bck.mkiv
@@ -55,7 +55,7 @@
%D This is the only spot where we hav ea low level dependency on the way
%D parent chains are defined but we want the speed.
-\def\page_backgrounds_check_background
+\unexpanded\def\page_backgrounds_check_background
{\ifcsname\??framed\currentotrbackground:\c!background\endcsname
\edef\page_background_temp{\csname\??framed\currentotrbackground:\c!background\endcsname}%
\ifx\page_background_temp\empty
@@ -140,13 +140,15 @@
{\edef\currentotrbackground{\??layoutbackgrounds#1}%
\page_backgrounds_check_background}
-\def\doifsomebackgroundelse#1%
+\def\doifelsesomebackground#1%
{\ifcsname\??layoutbackgrounds#1\endcsname
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
+\let\doifsomebackgroundelse\doifelsesomebackground
+
\def\doifsomebackground#1%
{\ifcsname\??layoutbackgrounds#1\endcsname
\expandafter\firstofoneargument
@@ -277,7 +279,7 @@
\page_backgrounds_set_boxes
\setbox#1\vbox
{\offinterlineskip
- \doifmarginswapelse{\copy\leftbackground}{\copy\rightbackground}%
+ \doifelsemarginswap{\copy\leftbackground}{\copy\rightbackground}%
\box#1}%
\fi}
@@ -343,7 +345,7 @@
{\dontcomplain
\swapmargins
\ifconditional\swapbackgroundmargins
- \doifmarginswapelse \donothing
+ \doifelsemarginswap \donothing
{\swapmacros\v!rightmargin\v!leftmargin
\swapmacros\v!rightedge \v!leftedge}%
\fi
@@ -501,7 +503,7 @@
\unexpanded\def\page_backgrounds_setup_double[#1][#2][#3]% if needed we can speed this up
{\global\settrue\c_page_backgrounds_some
\def\page_backgrounds_setup_step##1%
- {\doifinsetelse{##1}\v_page_backgrounds_double_set
+ {\doifelseinset{##1}\v_page_backgrounds_double_set
{\page_backgrounds_setup_and_check{##1}{#3}}
{\def\page_backgrounds_setup_step_nested####1{\page_backgrounds_setup_and_check{##1####1}{#3}}%
\processcommacommand[#2]\page_backgrounds_setup_step_nested}}%
@@ -510,7 +512,7 @@
\unexpanded\def\page_backgrounds_setup_single[#1][#2][#3]%
{\global\settrue\c_page_backgrounds_some
- \doifcommonelse{#1}\v_page_backgrounds_single_set
+ \doifelsecommon{#1}\v_page_backgrounds_single_set
{\def\page_backgrounds_setup_step##1{\page_backgrounds_setup_and_check{##1}{#2}}%
\processcommacommand[#1]\page_backgrounds_setup_step
\the\everybackgroundssetup}%
@@ -607,6 +609,48 @@
\setfalse\c_page_backgrounds_some
+%D Sometimes you have a document wide (page) background but need to overload it
+%D locally. In such case (at least in my experience) the only values that get set
+%D are the background and backgroundcolor (if set at all). A full inheritance chain
+%D would complicate things because then we need to use named backgrounds which in
+%D turn will make this mechanism slower. I considered independent local backgrounds
+%D but that also complicates the code (not that much) but isolation means that we
+%D need to set more parameters each time. The following simple approach proabbly
+%D suits most usage.
+%D
+%D \starttyping
+%D \starttext
+%D \setupbackgrounds[page][background=color,backgroundcolor=red]
+%D \input tufte \page
+%D \setupbackgrounds[page][background=,backgroundcolor=]
+%D \input tufte \page
+%D \setupbackgrounds[page][background=color,backgroundcolor=red]
+%D \input tufte \page
+%D \pushbackground[page]
+%D \setupbackgrounds[page][background=color,backgroundcolor=green]
+%D \input tufte \page
+%D \popbackground
+%D \input tufte \page
+%D \stoptext
+%D \stoptyping
+
+\unexpanded\def\pushbackground[#1]%
+ {\pushmacro\popbackground
+ \edef\currentotrbackground{\??layoutbackgrounds#1}%
+ \unexpanded\edef\popbackground
+ {\setupframed
+ [\currentotrbackground]
+ [\c!background=\namedframedparameter{\currentotrbackground}\c!background,
+ \c!backgroundcolor=\namedframedparameter{\currentotrbackground}\c!backgroundcolor]%
+ \page_backgrounds_check_background
+ \popmacro\popbackground}%
+ \setupframed
+ [\currentotrbackground]
+ [\c!background=,\c!backgroundcolor=]%
+ \page_backgrounds_check_background}
+
+\let\popbackground\relax
+
\protect \endinput
% %D The next series is used in local (for instance floating) backgrounds.
diff --git a/tex/context/base/page-brk.mkiv b/tex/context/base/page-brk.mkiv
index cc9a9b4d2..f9c933052 100644
--- a/tex/context/base/page-brk.mkiv
+++ b/tex/context/base/page-brk.mkiv
@@ -112,13 +112,13 @@
\endgroup
\fi}
-\def\resetpagebreak % used elsewhere too
+\unexpanded\def\resetpagebreak % used elsewhere too
{\global\settrue\c_page_breaks_enabled}
-\def\simplifypagebreak % to be used grouped !
+\unexpanded\def\simplifypagebreak % to be used grouped !
{\def\page_breaks_process[##1]{\goodbreak}}
-\def\disablepagebreaks % to be used grouped !
+\unexpanded\def\disablepagebreaks % to be used grouped !
{\def\page_breaks_process[##1]{}}
\installpagebreakmethod \s!dummy
@@ -134,11 +134,11 @@
\endgroup}
\installpagebreakmethod \s!unknown
- {\doifinstringelse{+}\page_breaks_current_option
+ {\doifelseinstring{+}\page_breaks_current_option
{\page_otr_flush_all_floats
\page_otr_command_next_page
\dorecurse\page_breaks_current_option\page_otr_insert_dummy_page}
- {\doifnumberelse\page_breaks_current_option
+ {\doifelsenumber\page_breaks_current_option
{\page_otr_flush_all_floats
\page_otr_command_next_page
\doloop
@@ -178,6 +178,8 @@
\global\pageornamentstate\plusone
\fi}
+% also needed: \page \doifoddpageelse\relax{\page[\v!blank,\v!right]
+
\installpagebreakmethod \v!no
{\ifconditional\c_page_breaks_enabled
\dosomebreak\nobreak
@@ -220,11 +222,11 @@
\installpagebreakmethod \v!even
{\page
- \doifoddpageelse\page_reset_marks_and_insert_dummy\donothing}
+ \doifelseoddpage\page_reset_marks_and_insert_dummy\donothing}
\installpagebreakmethod \v!odd
{\page
- \doifoddpageelse\donothing\page_reset_marks_and_insert_dummy}
+ \doifelseoddpage\donothing\page_reset_marks_and_insert_dummy}
\installpagebreakmethod \v!quadruple % not yet ok inside columnsets
{\ifdoublesided
@@ -316,75 +318,204 @@
%D Test page breaks.
-\newdimen \d_page_tests_test
-\newconstant\c_page_tests_mode
-
-\newconstant\testpagemethod % todo: \testnewpage[method=,lines=,voffset=]
-\newconstant\testpagetrigger
+% \newdimen \d_page_tests_test
+% \newconstant\c_page_tests_mode
-\unexpanded\def\testpage {\c_page_tests_mode\plusone \dodoubleempty\page_tests_test} %
-\unexpanded\def\testpageonly{\c_page_tests_mode\plustwo \dodoubleempty\page_tests_test} % no penalties added to the mvl
-\unexpanded\def\testpagesync{\c_page_tests_mode\plusthree\dodoubleempty\page_tests_test} % force sync
+\newconstant\testpagemethod % old
+\newconstant\testpagetrigger % old
-\def\page_tests_test[#1][#2]% don't change, only add more methods
+% \unexpanded\def\testpage {\c_page_tests_mode\plusone \dodoubleempty\page_tests_test} %
+% \unexpanded\def\testpageonly{\c_page_tests_mode\plustwo \dodoubleempty\page_tests_test} % no penalties added to the mvl
+% \unexpanded\def\testpagesync{\c_page_tests_mode\plusthree\dodoubleempty\page_tests_test} % force sync
+%
+% \def\page_tests_test[#1][#2]% don't change, only add more methods
+% {\relax % needed before \if
+% \ifconditional\c_page_breaks_enabled
+% % new from here
+% \ifcase\testpagetrigger
+% \endgraf
+% \or\ifvmode
+% \dosomebreak\allowbreak
+% \else % indeed?
+% \vadjust{\allowbreak}%
+% \endgraf
+% \fi\fi
+% % till here
+% \ifdim\pagegoal<\maxdimen \relax
+% \ifdim\pagetotal<\pagegoal \relax
+% \d_page_tests_test\dimexpr
+% #1\lineheight
+% +\pagetotal
+% \ifdim\lastskip<\parskip+\parskip\fi
+% \ifsecondargument+#2\fi
+% \relax
+% \ifcase\testpagemethod
+% \ifdim\d_page_tests_test>.99\pagegoal
+% \penalty-\plustenthousand
+% \fi
+% \or
+% \ifdim\dimexpr\d_page_tests_test-\pagegoal\relax>-\lineheight
+% \penalty-\plustenthousand
+% \fi
+% \or
+% \getnoflines\pagegoal
+% \ifdim\dimexpr\d_page_tests_test-\noflines\lineheight\relax>-\lineheight
+% \penalty-\plustenthousand
+% \fi
+% \or % same as 0 but more accurate
+% \ifdim\dimexpr\d_page_tests_test-10\scaledpoint\relax>\pagegoal
+% \penalty-\plustenthousand
+% \fi
+% \fi
+% \else\ifnum\c_page_tests_mode=\plusthree
+% \page_tests_flush_so_far
+% \fi\fi
+% \else\ifnum\c_page_tests_mode=\plusone
+% \goodbreak
+% \fi\fi
+% \else
+% \endgraf
+% \fi}
+%
+% \def\page_tests_flush_so_far
+% {\endgraf
+% \ifdim\pagetotal>\pagegoal
+% \ifdim\dimexpr\pagetotal-\pageshrink\relax>\pagegoal
+% \goodbreak
+% \else
+% \page
+% \fi
+% \fi}
+
+\installcorenamespace {pagechecker}
+\installcorenamespace {pagecheckermethod}
+
+\installcommandhandler \??pagechecker {pagechecker} \??pagechecker
+
+\setuppagechecker
+ [\c!method=1,
+ \c!before=,
+ \c!after=,
+ \c!inbetween=,
+ \c!lines=\plusthree,
+ \c!offset=\zeropoint]
+
+\def\page_check_amount
+ {\dimexpr
+ \pagecheckerparameter\c!lines\lineheight
+ +\pagetotal
+ \ifdim\lastskip<\parskip+\parskip\fi
+ +\pagecheckerparameter\c!offset
+ \relax}
+
+\unexpanded\def\checkpage
+ {\dodoubleempty\page_check}
+
+\def\page_check[#1][#2]%
{\relax % needed before \if
+ \endgraf
\ifconditional\c_page_breaks_enabled
- % new from here
- \ifcase\testpagetrigger
- \endgraf
- \or\ifvmode
- \dosomebreak\allowbreak
- \else % indeed?
- \vadjust{\allowbreak}%
- \endgraf
- \fi\fi
- % till here
- \ifdim\pagegoal<\maxdimen \relax
- \ifdim\pagetotal<\pagegoal \relax
- \d_page_tests_test\dimexpr
- #1\lineheight
- +\pagetotal
- \ifdim\lastskip<\parskip+\parskip\fi
- \ifsecondargument+#2\fi
- \relax
- \ifcase\testpagemethod
- \ifdim\d_page_tests_test>.99\pagegoal
- \penalty-\plustenthousand
- \fi
- \or
- \ifdim\dimexpr\d_page_tests_test-\pagegoal\relax>-\lineheight
- \penalty-\plustenthousand
- \fi
- \or
- \getnoflines\pagegoal
- \ifdim\dimexpr\d_page_tests_test-\noflines\lineheight\relax>-\lineheight
- \penalty-\plustenthousand
- \fi
- \or % same as 0 but more accurate
- \ifdim\dimexpr\d_page_tests_test-10\scaledpoint\relax>\pagegoal
- \penalty-\plustenthousand
- \fi
- \fi
- \else\ifnum\c_page_tests_mode=\plusthree
- \page_tests_flush_so_far
- \fi\fi
- \else\ifnum\c_page_tests_mode=\plusone
- \goodbreak
- \fi\fi
+ \begingroup
+ \edef\currentpagechecker{#1}%
+ \ifsecondargument\setupcurrentpagechecker[#2]\fi
+ \csname\??pagecheckermethod\pagecheckerparameter\c!method\endcsname
+ \endgroup
+ \fi}
+
+\setvalue{\??pagecheckermethod 0}%
+ {\ifdim\pagegoal<\maxdimen \relax
+ \ifdim\pagetotal<\pagegoal \relax
+ \ifdim\page_check_amount>.99\pagegoal
+ \pagecheckerparameter\c!before
+ \penalty-\plustenthousand
+ \pagecheckerparameter\c!after
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi}
+
+\setvalue{\??pagecheckermethod 1}%
+ {\ifdim\pagegoal<\maxdimen \relax
+ \ifdim\pagetotal<\pagegoal \relax
+ \ifdim\dimexpr\page_check_amount-\pagegoal\relax>-\lineheight
+ \pagecheckerparameter\c!before
+ \penalty-\plustenthousand
+ \pagecheckerparameter\c!after
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi
\else
- \endgraf
+ \goodbreak
+ \pagecheckerparameter\c!inbetween
\fi}
-\def\page_tests_flush_so_far
- {\endgraf
- \ifdim\pagetotal>\pagegoal
- \ifdim\dimexpr\pagetotal-\pageshrink\relax>\pagegoal
- \goodbreak
+\setvalue{\??pagecheckermethod 2}%
+ {\ifdim\pagegoal<\maxdimen \relax
+ \ifdim\pagetotal<\pagegoal \relax
+ \getnoflines\pagegoal
+ \ifdim\dimexpr\page_check_amount-\noflines\lineheight\relax>-\lineheight
+ \pagecheckparameter\c!before
+ \penalty-\plustenthousand
+ \pagecheckerparameter\c!after
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi
\else
- \page
+ \pagecheckerparameter\c!inbetween
\fi
+ \else
+ \pagecheckerparameter\c!inbetween
\fi}
+\setvalue{\??pagecheckermethod 3}%
+ {\ifdim\pagegoal<\maxdimen \relax
+ \ifdim\pagetotal<\pagegoal \relax
+ \ifdim\dimexpr\page_check_amount-10\scaledpoint\relax>\pagegoal
+ \pagecheckerparameter\c!before
+ \penalty-\plustenthousand
+ \pagecheckerparameter\c!after
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi
+ \else
+ \ifdim\pagetotal>\pagegoal
+ \ifdim\dimexpr\pagetotal-\pageshrink\relax>\pagegoal
+ \goodbreak
+ \pagecheckerparameter\c!inbetween
+ \else
+ \pagecheckerparameter\c!before
+ \page
+ \pagecheckerparameter\c!after
+ \fi
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi
+ \fi
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi}
+
+\definepagechecker[\s!unknown:0] [\c!method=0,\c!before=,\c!after=,\c!inbetween=]
+\definepagechecker[\s!unknown:1][\s!unknown:0][\c!method=1]
+\definepagechecker[\s!unknown:2][\s!unknown:0][\c!method=2]
+\definepagechecker[\s!unknown:3][\s!unknown:0][\c!method=3]
+
+\def\page_tests_test_a[#1][#2]{\normalexpanded{\checkpage[\s!unknown:1][\c!lines=#1,\c!offset=\ifsecondargument#2\else\zeropoint\fi]}}
+\def\page_tests_test_b[#1][#2]{\normalexpanded{\checkpage[\s!unknown:2][\c!lines=#1,\c!offset=\ifsecondargument#2\else\zeropoint\fi]}}
+\def\page_tests_test_c[#1][#2]{\normalexpanded{\checkpage[\s!unknown:3][\c!lines=#1,\c!offset=\ifsecondargument#2\else\zeropoint\fi]}}
+
+\unexpanded\def\testpage {\dodoubleempty\page_tests_test_a} %
+\unexpanded\def\testpageonly{\dodoubleempty\page_tests_test_b} % no penalties added to the mvl
+\unexpanded\def\testpagesync{\dodoubleempty\page_tests_test_c} % force sync
+
%D Test column breaks.
\unexpanded\def\testcolumn
diff --git a/tex/context/base/page-flt.lua b/tex/context/base/page-flt.lua
index 11aa2be21..e91285d0a 100644
--- a/tex/context/base/page-flt.lua
+++ b/tex/context/base/page-flt.lua
@@ -21,20 +21,23 @@ local C, S, P, lpegmatch = lpeg.C, lpeg.S, lpeg.P, lpeg.match
-- we use floatbox, floatwidth, floatheight
-- text page leftpage rightpage (todo: top, bottom, margin, order)
-local copy_node_list = node.copy_list
+local copy_node_list = node.copy_list
+local flush_node_list = node.flush_list
+local copy_node = node.copy
-local setdimen = tex.setdimen
-local setcount = tex.setcount
-local texgetbox = tex.getbox
-local texsetbox = tex.setbox
+local setdimen = tex.setdimen
+local setcount = tex.setcount
+local texgetbox = tex.getbox
+local texsetbox = tex.setbox
+local textakebox = nodes.takebox
-floats = floats or { }
-local floats = floats
+floats = floats or { }
+local floats = floats
-local noffloats = 0
-local last = nil
-local default = "text"
-local pushed = { }
+local noffloats = 0
+local last = nil
+local default = "text"
+local pushed = { }
local function initialize()
return {
@@ -105,21 +108,20 @@ end
function floats.save(which,data)
which = which or default
- local b = texgetbox("floatbox")
+ local b = textakebox("floatbox")
if b then
local stack = stacks[which]
noffloats = noffloats + 1
- local w, h, d = b.width, b.height, b.depth
local t = {
n = noffloats,
data = data or { },
- box = copy_node_list(b),
+ box = b,
}
- texsetbox("floatbox",nil)
insert(stack,t)
setcount("global","savednoffloats",#stacks[default])
if trace_floats then
- report_floats("%s, category %a, number %a, slot %a, width %p, height %p, depth %p","saving",which,noffloats,#stack,w,h,d)
+ report_floats("%s, category %a, number %a, slot %a, width %p, height %p, depth %p","saving",
+ which,noffloats,#stack,b.width,b.height,b.depth)
else
interfaces.showmessage("floatblocks",2,noffloats)
end
@@ -132,14 +134,13 @@ function floats.resave(which)
if last then
which = which or default
local stack = stacks[which]
- local b = texgetbox("floatbox")
- local w, h, d = b.width, b.height, b.depth
- last.box = copy_node_list(b)
- texsetbox("floatbox",nil)
+ local b = textakebox("floatbox")
+ last.box = b
insert(stack,1,last)
setcount("global","savednoffloats",#stacks[default])
if trace_floats then
- report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","resaving",which,noffloats,#stack,w,h,d)
+ report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","resaving",
+ which,noffloats,#stack,b.width,b.height,b.depth)
else
interfaces.showmessage("floatblocks",2,noffloats)
end
@@ -153,9 +154,10 @@ function floats.flush(which,n,bylabel)
local stack = stacks[which]
local t, b, n = get(stack,n or 1,bylabel)
if t then
- local w, h, d = setdimensions(b)
if trace_floats then
- report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","flushing",which,t.n,n,w,h,d)
+ local w, h, d = setdimensions(b) -- ?
+ report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","flushing",
+ which,t.n,n,w,h,d)
else
interfaces.showmessage("floatblocks",3,t.n)
end
@@ -173,9 +175,10 @@ function floats.consult(which,n)
local stack = stacks[which]
local t, b, n = get(stack,n)
if t then
- local w, h, d = setdimensions(b)
if trace_floats then
- report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","consulting",which,t.n,n,w,h,d)
+ local w, h, d = setdimensions(b)
+ report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","consulting",
+ which,t.n,n,w,h,d)
end
return t, b, n
else
@@ -239,10 +242,14 @@ function floats.checkedpagefloat(packed)
end
end
-function floats.nofstacked()
+function floats.nofstacked(which)
return #stacks[which or default] or 0
end
+function floats.hasstacked(which)
+ return (#stacks[which or default] or 0) > 0
+end
+
-- todo: check for digits !
local method = C((1-S(", :"))^1)
@@ -270,27 +277,101 @@ end
-- interface
-local context = context
-local setvalue = context.setvalue
-
-commands.flushfloat = floats.flush
-commands.savefloat = floats.save
-commands.resavefloat = floats.resave
-commands.pushfloat = floats.push
-commands.popfloat = floats.pop
-commands.consultfloat = floats.consult
-commands.collectfloat = floats.collect
-
-function commands.getfloatvariable (...) local v = floats.getvariable(...) if v then context(v) end end
-function commands.checkedpagefloat (...) local v = floats.checkedpagefloat(...) if v then context(v) end end
-
-function commands.nofstackedfloats (...) context(floats.nofstacked(...)) end
-function commands.doifelsesavedfloat(...) commands.doifelse(floats.nofstacked(...)>0) end
-
-function commands.analysefloatmethod(str) -- currently only one method
- local method, label, row, column = floats.analysemethod(str)
- setvalue("floatmethod",method or "")
- setvalue("floatlabel", label or "")
- setvalue("floatrow", row or "")
- setvalue("floatcolumn",column or "")
-end
+local context = context
+local commands = commands
+local implement = interfaces.implement
+local setmacro = interfaces.setmacro
+
+implement {
+ name = "flushfloat",
+ actions = floats.flush,
+ arguments = { "string", "integer" },
+}
+
+implement {
+ name = "flushlabeledfloat",
+ actions = floats.flush,
+ arguments = { "string", "string", true },
+}
+
+implement {
+ name = "savefloat",
+ actions = floats.save,
+ arguments = "string"
+}
+
+implement {
+ name = "savespecificfloat",
+ actions = floats.save,
+ arguments = {
+ "string",
+ {
+ { "specification" },
+ { "label" },
+ }
+ }
+}
+
+implement {
+ name = "resavefloat",
+ actions = floats.resave,
+ arguments = "string"
+}
+
+implement {
+ name = "pushfloat",
+ actions = floats.push
+}
+
+implement {
+ name = "popfloat",
+ actions = floats.pop
+}
+
+implement {
+ name = "consultfloat",
+ actions = floats.consult,
+ arguments = "string",
+}
+
+implement {
+ name = "collectfloat",
+ actions = floats.collect,
+ arguments = { "string", "dimen", "dimen" }
+}
+
+implement {
+ name = "getfloatvariable",
+ actions = { floats.getvariable, context },
+ arguments = "string"
+}
+
+implement {
+ name = "checkedpagefloat",
+ actions = { floats.checkedpagefloat, context },
+ arguments = "string"
+}
+
+implement {
+ name = "nofstackedfloats",
+ actions = { floats.nofstacked, context },
+ arguments = "string"
+}
+
+implement {
+ name = "doifelsestackedfloats",
+ actions = { floats.hasstacked, commands.doifelse },
+ arguments = "string"
+}
+
+implement {
+ name = "analysefloatmethod",
+ actions = function(str)
+ local method, label, row, column = floats.analysemethod(str)
+ setmacro("floatmethod",method or "")
+ setmacro("floatlabel", label or "")
+ setmacro("floatrow", row or "")
+ setmacro("floatcolumn",column or "")
+ end,
+ arguments = "string"
+}
diff --git a/tex/context/base/page-flt.mkiv b/tex/context/base/page-flt.mkiv
index d641e1c7d..16c427139 100644
--- a/tex/context/base/page-flt.mkiv
+++ b/tex/context/base/page-flt.mkiv
@@ -83,49 +83,49 @@
\to \everyfloatscheck
\unexpanded\def\page_floats_flush#1#2%
- {\ctxcommand{flushfloat("#1",\number#2)}%
+ {\clf_flushfloat{#1}#2\relax
\the\everyfloatscheck}
\unexpanded\def\page_floats_flush_by_label#1#2%
- {\ctxcommand{flushfloat("#1","#2",true)}%
+ {\clf_flushlabeledfloat{#1}{#2}\relax
\the\everyfloatscheck}
\unexpanded\def\page_floats_save#1%
- {\ctxcommand{savefloat("#1")}%
+ {\clf_savefloat{#1}\relax
\the\everyfloatscheck}
\unexpanded\def\page_floats_resave#1%
- {\ctxcommand{resavefloat("#1")}%
+ {\clf_resavefloat{#1}\relax
\the\everyfloatscheck}
\unexpanded\def\page_floats_push_saved
- {\ctxcommand{pushfloat()}%
+ {\clf_pushfloat
\the\everyfloatscheck}
\unexpanded\def\page_floats_pop_saved
- {\ctxcommand{popfloat()}%
+ {\clf_popfloat
\the\everyfloatscheck}
\unexpanded\def\page_floats_get_info#1%
- {\ctxcommand{consultfloat("#1")}}
+ {\clf_consultfloat{#1}}
\unexpanded\def\page_floats_if_else#1%
- {\ctxcommand{doifelsesavedfloat("#1")}}
+ {\clf_doifelsestackedfloats{#1}}
\unexpanded\def\page_floats_collect#1#2#3%
- {\ctxcommand{collectfloat("#1",\number\dimexpr#2,\number\dimexpr#3)}}
+ {\clf_collectfloat{#1}\dimexpr#2\relax\dimexpr#3\relax}
-\unexpanded\def\nofstackedfloatincategory#1%
- {\ctxcommand{nofstackedfloats("#1")}}
+\def\nofstackedfloatincategory#1%
+ {\clf_nofstackedfloats{#1}}
\let\page_floats_column_push_saved\page_floats_push_saved % overloaded in page-mul
\let\page_floats_column_pop_saved \page_floats_pop_saved % overloaded in page-mul
\unexpanded\def\page_floats_save_page_float#1#2%
- {\ctxcommand{savefloat("#1", { specification = "#2" })}}
+ {\clf_savespecificfloat{#1}{specification{#2}}\relax}
\unexpanded\def\page_floats_save_somewhere_float#1#2% #1=method
- {\ctxcommand{savefloat("#1", { specification = "#2", label = "\floatlabel" })}}
+ {\clf_savespecificfloat{#1}{specification{#2}label{\floatlabel}}\relax}
%D This is an experimental new feature (for Alan Braslau), a prelude to more:
%D
@@ -256,7 +256,7 @@
\def\page_floats_flush_page_floats_indeed#1% future releases can do more clever things
{\page_floats_flush{#1}\plusone
- \edef\floatspecification{\ctxcommand{getfloatvariable("specification")}}% Is this okay?
+ \edef\floatspecification{\clf_getfloatvariable{specification}}% Is this okay?
\the\everybeforeflushedpagefloat
\vbox to \textheight
{\doifnotinset\v!high\floatspecification\vfill
@@ -265,7 +265,7 @@
\page_otr_fill_and_eject_page}
\unexpanded\def\page_floats_flush_page_floats % used in postpone
- {\edef\m_page_otf_checked_page_float{\ctxcommand{checkedpagefloat()}}% (true) for packed
+ {\edef\m_page_otf_checked_page_float{\clf_checkedpagefloat}%
\ifx\m_page_otf_checked_page_float\empty
% nothing
\else\ifx\m_page_otf_checked_page_float\v!empty
diff --git a/tex/context/base/page-flw.mkiv b/tex/context/base/page-flw.mkiv
index ec1fa636d..56fe32e5b 100644
--- a/tex/context/base/page-flw.mkiv
+++ b/tex/context/base/page-flw.mkiv
@@ -80,14 +80,14 @@
\def\textflowcollector#1%
{\csname\??textflowbox#1\endcsname}
-\unexpanded\def\doiftextflowcollectorelse#1%
+\unexpanded\def\doifelsetextflowcollector#1%
{\ifcsname\??textflowbox#1\endcsname
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
-\unexpanded\def\doiftextflowelse#1%
+\unexpanded\def\doifelsetextflow#1%
{\ifcsname\??textflowbox#1\endcsname
\ifvoid\csname\??textflowbox#1\endcsname
\doubleexpandafter\secondoftwoarguments
@@ -98,6 +98,9 @@
\expandafter\secondoftwoarguments
\fi}
+\let\doiftextflowcollectorelse\doifelsetextflowcollector
+\let\doiftextflowelse \doifelsetextflow
+
% \unexpanded\def\doiftextflow#1%
% {\doiftextflowelse{#1}\firstofoneargument\gobbleoneargument}
diff --git a/tex/context/base/page-grd.mkiv b/tex/context/base/page-grd.mkiv
index 281d0bfbe..e70414b66 100644
--- a/tex/context/base/page-grd.mkiv
+++ b/tex/context/base/page-grd.mkiv
@@ -15,22 +15,19 @@
\unprotect
-\definepalet
- [layout]
- [grid=red,
- page=green]
-
\newconstant\c_page_grids_location
\newconstant\c_page_grids_line_mode
\newconstant\c_page_grids_lineno_mode
+\newconstant\c_page_grids_columns_mode
\unexpanded\def\showgrid
{\dosingleempty\page_grids_show}
\def\page_grids_show[#1]%
- {\c_page_grids_location \plusone % downward compatible default
- \c_page_grids_line_mode \plusone
- \c_page_grids_lineno_mode\plusone
+ {\c_page_grids_location \plusone % downward compatible default
+ \c_page_grids_line_mode \plusone
+ \c_page_grids_lineno_mode \plusone
+ \c_page_grids_columns_mode\plusone
\processallactionsinset
[#1]%
[ \v!reset=>\c_page_grids_location \zerocount,
@@ -48,14 +45,24 @@
\let\page_grids_add_to_box\gobbleoneargument
\else % 1=bottom 2=top
\let\page_grids_add_to_box\page_grids_add_to_box_indeed
+ \fi
+ \ifcase\c_page_grids_columns_mode
+ \let\page_grids_add_to_one\gobbleoneargument
+ \let\page_grids_add_to_mix\gobbleoneargument
+ \else
+ \let\page_grids_add_to_one\page_grids_add_to_one_indeed
+ \let\page_grids_add_to_mix\page_grids_add_to_mix_indeed
\fi}
% if really needed for speed we can cache the grid
\let\page_grids_add_to_box\gobbleoneargument
+\let\page_grids_add_to_one\gobbleoneargument
+\let\page_grids_add_to_mix\gobbleoneargument
\def\page_grids_add_to_box_indeed#1% to be checked for color and layer ..... use mp
{\startcolor[layout:grid]%
+ \resetvisualizers
\gridboxlinemode \c_page_grids_line_mode
\gridboxlinenomode\c_page_grids_lineno_mode
\setgridbox\scratchbox\makeupwidth\textheight % todo: check color
@@ -82,4 +89,16 @@
\ifcase\c_page_grids_location\or\hskip-\makeupwidth\box#1\fi}%
\stopcolor}
+\def\page_grids_add_to_one_indeed#1%
+ {\begingroup
+ \resetvisualizers
+ \global\setbox#1\vbox{\backgroundline[layout:one]{\box#1}}%
+ \endgroup}
+
+\def\page_grids_add_to_mix_indeed#1%
+ {\begingroup
+ \resetvisualizers
+ \global\setbox#1\vbox{\backgroundline[layout:mix]{\box#1}}%
+ \endgroup}
+
\protect \endinput
diff --git a/tex/context/base/page-imp.mkiv b/tex/context/base/page-imp.mkiv
index cfa535ab2..198a98229 100644
--- a/tex/context/base/page-imp.mkiv
+++ b/tex/context/base/page-imp.mkiv
@@ -41,7 +41,7 @@
\prependtoks
\page_shipouts_flush_text_data
\to \everylastshipout
-
+
% Problem: we need to apply the finalizers to a to be shipped out page (as
% we can have positioning involved). However, we can also add stuff in the
% imposition, like cropmarks. Fortunately we do that with metapost so
@@ -163,7 +163,7 @@
\donetrue
\fi
\else % testen, aangepast / expanded nodig ?
- \normalexpanded{\doifinsetelse{\the\shippedoutpages}{\pagestoshipout}}\donetrue\donefalse
+ \normalexpanded{\doifelseinset{\the\shippedoutpages}{\pagestoshipout}}\donetrue\donefalse
\fi
\ifdone
\setbox\shipoutscratchbox\hbox{#1}%
@@ -288,7 +288,7 @@
\fi
\setuppapersize
\ifarrangingpages
- \ctxlua{job.disablesave()}%
+ \clf_disablejobsave
%\disabledirective[job.save]%
\fi
\fi}
@@ -355,14 +355,14 @@
{\dosetuparrangement{2}{2}{4}{3}{3}%
\pusharrangedpageSIXTEENTWO\poparrangedpagesAtoD\relax}
-\installpagearrangement 2*2*4 % onother one of Willy Egger
+\installpagearrangement 2*2*4 % another one of Willy Egger
{\dosetuparrangement{2}{1}{8}{3}{2}%
\pusharrangedpageSIXTEENFOUR\poparrangedpagesAtoH\relax}
\installpagearrangement 2TOPSIDE
{\dosetuparrangement{1}{2}{4}{2}{3}%
\pusharrangedpageTWOTOPSIDE\poparrangedpagesTWOTOPSIDE\handlearrangedpageTOP}
-
+
\def\filluparrangedpages % beware: \realpageno is 1 ahead
{\ifarrangingpages
\scratchcounter\numexpr\realpageno-\plusone\relax
@@ -748,7 +748,7 @@
\poparrangedpages
\fi}
-%D Might be used if a printer is printing from a rol or creating mini-books from A4:
+%D Might be used if a printer is printing from a roll or creating mini-books from A4:
%D This section has 16 pages. The folding scheme is first a Z-fold and at the end
%D a final fold in the spine.
%D Coding: [2*8*Z]
@@ -1045,7 +1045,7 @@
%D There should be arrangements for sections made of heavy and thick paper. i.e. the heavier the paper
%D the fewer pages per section:
-%D Section with 8 pages put on to sheets of paper. Each sheet carries recto 2 and verso 2 pages.
+%D Section with 8 pages put on two sheets of paper. Each sheet carries recto 2 and verso 2 pages.
%D Coding: [2*2*2]
\installpagearrangement 2*2*2
@@ -1351,7 +1351,7 @@
\doifelse{#1}\v!page {\let\page_boxes_apply_shift_print\page_boxes_apply_shift}{\let\page_boxes_apply_shift_print\gobbleoneargument}%
\doifelse{#1}\v!paper{\let\page_boxes_apply_shift_paper\page_boxes_apply_shift}{\let\page_boxes_apply_shift_paper\gobbleoneargument}%
\else\ifsecondargument
- \doifinsetelse{#1}{\v!page,\v!paper}
+ \doifelseinset{#1}{\v!page,\v!paper}
{\setuppageshift[#1][#2][#2]}
{\setuppageshift[\v!page][#1][#2]}%
\else\iffirstargument
diff --git a/tex/context/base/page-ini.mkiv b/tex/context/base/page-ini.mkiv
index fdffa552d..15783a99b 100644
--- a/tex/context/base/page-ini.mkiv
+++ b/tex/context/base/page-ini.mkiv
@@ -102,6 +102,7 @@
\page_otr_check_for_pending_inserts
% but does not hurt either (we're still in the otr!)
\inpagebodytrue % needed for enabling \blank ! brrr
+ \pagebodymode\plusone % todo: \plustwo when spread
\page_otr_command_flush_saved_floats
\page_otr_command_set_vsize % this is needed for interacting components, like floats and multicolumns
\strc_pagenumbers_increment_counters % should hook into an every
@@ -143,9 +144,15 @@
\newconstant\pageornamentstate % 0=on 1=one-off 2=always-off
+% \appendtoks
+% \ifcase\pageornamentstate \or
+% \pageornamentstate\zerocount
+% \fi
+% \to \everyaftershipout
+
\appendtoks
\ifcase\pageornamentstate \or
- \pageornamentstate\zerocount
+ \global\pageornamentstate\zerocount
\fi
\to \everyaftershipout
@@ -229,7 +236,7 @@
\the\everyafterpagebody
\egroup}
-\def\doiftopofpageelse
+\def\doifelsetopofpage
{\ifdim\pagegoal=\maxdimen
\expandafter\firstoftwoarguments
\else\ifdim\pagegoal=\vsize
@@ -238,6 +245,8 @@
\doubleexpandafter\secondoftwoarguments
\fi\fi}
+\let\doiftopofpageelse\doifelsetopofpage
+
% %D Idea:
%
% \newinsert\thispageinsert % <- installinsertion
diff --git a/tex/context/base/page-inj.lua b/tex/context/base/page-inj.lua
index 56e5a234e..fd66ead08 100644
--- a/tex/context/base/page-inj.lua
+++ b/tex/context/base/page-inj.lua
@@ -16,10 +16,9 @@ pagebuilders.injections = injections
local report = logs.reporter("pagebuilder","injections")
local trace = false trackers.register("pagebuilder.injections",function(v) trace = v end)
-local variables = interfaces.variables
-
local context = context
-local commands = commands
+local implement = interfaces.implement
+local variables = interfaces.variables
local texsetcount = tex.setcount
@@ -103,6 +102,24 @@ function injections.flushafter() -- maybe not public, just commands.*
end
end
-commands.page_injections_save = injections.save
-commands.page_injections_flush_after = injections.flushafter
-commands.page_injections_flush_before = injections.flushbefore
+implement {
+ name = "savepageinjections",
+ actions = injections.save,
+ arguments = {
+ {
+ { "name" },
+ { "state" },
+ { "userdata" }
+ }
+ }
+}
+
+implement {
+ name = "flushpageinjectionsbefore",
+ actions = injections.flushbefore
+}
+
+implement {
+ name = "flushpageinjectionsafter",
+ actions = injections.flushafter
+}
diff --git a/tex/context/base/page-inj.mkvi b/tex/context/base/page-inj.mkvi
index bee564683..03472fe55 100644
--- a/tex/context/base/page-inj.mkvi
+++ b/tex/context/base/page-inj.mkvi
@@ -29,8 +29,8 @@
\installframedcommandhandler \??pageinjection {pageinjection} \??pageinjection
\installcommandhandler \??pageinjectionalternative {pageinjectionalternative} \??pageinjectionalternative
-\def\page_boxes_flush_before{\ctxcommand{page_injections_flush_before()}}
-\def\page_boxes_flush_after {\ctxcommand{page_injections_flush_after ()}}
+\let\page_boxes_flush_before\clf_flushpageinjectionsbefore
+\let\page_boxes_flush_after \clf_flushpageinjectionsafter
\def\page_injections_flush_saved#name#parameters%
{\begingroup
@@ -47,11 +47,11 @@
\dodoubleempty\page_injections_direct}
\def\page_injections_direct[#1][#2]% name parameters | settings parameters | name | parameters
- {\doifassignmentelse{#1}
- {\doifassignmentelse{#2}
+ {\doifelseassignment{#1}
+ {\doifelseassignment{#2}
{\page_injections_direct_settings_parameters{#1}{#2}}
{\page_injections_direct_parameters {#1}}}
- {\doifassignmentelse{#2}
+ {\doifelseassignment{#2}
{\page_injections_direct_name_parameters {#1}{#2}}
{\page_injections_direct_name {#1}}}}
@@ -87,11 +87,11 @@
\fi}
\def\page_injections_direct_indeed_yes#parameters%
- {\ctxcommand{page_injections_save{
- name = "\currentpageinjection",
- state = "\p_page_injections_state",
- userdata = \!!bs\normalunexpanded{#parameters}\!!es
- }}%
+ {\clf_savepageinjections
+ name {\currentpageinjection}%
+ state {\p_page_injections_state}%
+ userdata {\normalunexpanded{#parameters}}%
+ \relax
\endgroup}
\def\page_injections_direct_indeed_nop#parameters%
diff --git a/tex/context/base/page-ins.lua b/tex/context/base/page-ins.lua
index 7f870735d..235f586c6 100644
--- a/tex/context/base/page-ins.lua
+++ b/tex/context/base/page-ins.lua
@@ -4,19 +4,9 @@ if not modules then modules = { } end modules ['page-ins'] = {
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files",
- -- public = {
- -- functions = {
- -- "inserts.define",
- -- "inserts.getdata",
- -- },
- -- commands = {
- -- "defineinsertion",
- -- "inserttionnumber",
- -- }
- -- }
}
--- Maybe we should only register in lua and forget about the tex end.
+local next = next
structures = structures or { }
structures.inserts = structures.inserts or { }
@@ -36,6 +26,9 @@ local v_firstcolumn = variables.firstcolumn
local v_lastcolumn = variables.lastcolumn
local v_text = variables.text
+local context = context
+local implement = interfaces.implement
+
storage.register("structures/inserts/stored", inserts.stored, "structures.inserts.stored")
local data = inserts.data
@@ -49,7 +42,7 @@ end
function inserts.define(name,specification)
specification.name= name
local number = specification.number or 0
- data[name] = specification
+ data[name] = specification
data[number] = specification
-- only needed at runtime as this get stored in a bytecode register
stored[name] = specification
@@ -90,8 +83,37 @@ end
-- interface
-commands.defineinsertion = inserts.define
-commands.setupinsertion = inserts.setup
-commands.setinsertionlocation = inserts.setlocation
-commands.insertionnumber = function(name) context(data[name].number or 0) end
+implement {
+ name = "defineinsertion",
+ actions = inserts.define,
+ arguments = {
+ "string",
+ {
+ { "number", "integer" }
+ }
+ }
+}
+
+implement {
+ name = "setupinsertion",
+ actions = inserts.setup,
+ arguments = {
+ "string",
+ {
+ { "location" }
+ }
+ }
+}
+
+implement {
+ name = "setinsertionlocation",
+ actions = inserts.setlocation,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "insertionnumber",
+ actions = function(name) context(data[name].number or 0) end,
+ arguments = "string"
+}
diff --git a/tex/context/base/page-ins.mkiv b/tex/context/base/page-ins.mkiv
index a63de0b26..c91073a14 100644
--- a/tex/context/base/page-ins.mkiv
+++ b/tex/context/base/page-ins.mkiv
@@ -82,13 +82,15 @@
\insert#1{\unvbox#1}%
\fi}
-\unexpanded\def\doifinsertionelse#1%
+\unexpanded\def\doifelseinsertion#1%
{\ifcsname\??insertionnumber#1\endcsname
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
+\let\doifinsertionelse\doifelseinsertion
+
% \unexpanded\def\startinsertion[#1]%
% {\insert\csname\??insertionnumber#1\endcsname\bgroup}
%
@@ -108,7 +110,12 @@
\else
\expandafter\newinsert\csname\??insertionnumber\currentinsertion\endcsname
\page_inserts_synchronize_registers
- \ctxcommand{defineinsertion("\currentinsertion",{ number = \number\currentinsertionnumber })}%
+ \clf_defineinsertion
+ {\currentinsertion}%
+ {%
+ number \currentinsertionnumber
+ }%
+ \relax
\t_page_inserts_list\expandafter\expandafter\expandafter
{\expandafter\the\expandafter\t_page_inserts_list
\expandafter\page_inserts_process\csname\??insertionnumber\currentinsertion\endcsname}%
@@ -123,13 +130,16 @@
\to \everydefineinsertion
\appendtoks
- \ctxcommand{setupinsertion("\currentinsertion",{
- location = "\insertionparameter\c!location",
- })}%
+ \clf_setupinsertion
+ {\currentinsertion}
+ {%
+ location {\insertionparameter\c!location}%
+ }%
+ \relax
\to \everysetupinsertion
\unexpanded\def\page_inserts_set_location#1#2% fast one
- {\ctxcommand{setinsertionlocation("#1","#2")}}
+ {\clf_setinsertionlocation{#1}{#2}}
%D Auxiliary macros:
diff --git a/tex/context/base/page-lay.mkiv b/tex/context/base/page-lay.mkiv
index 81eb0423c..d1328bb6b 100644
--- a/tex/context/base/page-lay.mkiv
+++ b/tex/context/base/page-lay.mkiv
@@ -30,6 +30,8 @@
\newdimen\paperheight \paperheight = 297mm
\newdimen\paperwidth \paperwidth = 210mm
\newdimen\paperoffset \paperoffset = \zeropoint
+\newdimen\paperbleed \paperbleed = \zeropoint
+\newdimen\spinewidth \spinewidth = \zeropoint
\newdimen\printpaperheight \printpaperheight = \paperheight
\newdimen\printpaperwidth \printpaperwidth = \paperwidth
@@ -154,13 +156,15 @@
\fi
\to \everysetuplayout
-\def\doiflayoutdefinedelse#1%
+\def\doifelselayoutdefined#1%
{\ifcsname\namedlayouthash{#1}\c!state\endcsname % maybe a helper
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
+\let\doiflayoutdefinedelse\doifelselayoutdefined
+
\def\layoutdistance#1#2{\ifdim\zeropoint<#1#2\else\zeropoint\fi}
\def\page_layouts_set_dimensions
@@ -365,7 +369,7 @@
\ifx\currentlayouttarget\empty
% invalid target
\else
- \doifassignmentelse{#2}
+ \doifelseassignment{#2}
{\definelayouttarget[#1][#2]}
{\setevalue{\??layoutpaper#1}{#2}%
\setevalue{\??layoutprint#1}{#3}}%
@@ -390,9 +394,9 @@
\unexpanded\def\page_paper_setup_size[#1][#2]%
{\iffirstargument
- \doifassignmentelse{#1}
+ \doifelseassignment{#1}
{\page_paper_setup_size_settings[#1]}
- {\doifassignmentelse{#2}
+ {\doifelseassignment{#2}
{\page_paper_setup_size_settings_by_name[#1][#2]}
{\page_paper_setup_size_change_size[#1][#2]}}%
\else
@@ -1026,12 +1030,12 @@
\unexpanded\def\startlayout[#1]%
{\page
- \pushmacro\currentlayout
- \doiflayoutdefinedelse{#1}{\setuplayout[#1]}\donothing} % {\setuplayout[\currentlayout]}}
+ \globalpushmacro\currentlayout
+ \doifelselayoutdefined{#1}{\setuplayout[#1]}\donothing} % {\setuplayout[\currentlayout]}}
\unexpanded\def\stoplayout
{\page
- \popmacro\currentlayout
+ \globalpopmacro\currentlayout
\setuplayout[\currentlayout]}
% NOG EENS NAGAAN WANNEER NU GLOBAL EN WANNEER NIET
@@ -1121,13 +1125,15 @@
% #single #left #right
-\def\doifoddpageelse
+\def\doifelseoddpage
{\ifodd\pagenoshift
\expandafter\page_layouts_if_odd_else_yes
\else
\expandafter\page_layouts_if_odd_else_nop
\fi}
+\let\doifoddpageelse\doifelseoddpage
+
\def\page_layouts_if_odd_else_yes
{\ifodd\realpageno
\expandafter\secondoftwoarguments
@@ -1142,9 +1148,7 @@
\expandafter\secondoftwoarguments
\fi}
-\let\doifonevenpaginaelse\doifoddpageelse
-
-\def\page_layouts_if_odd_else_again#1{\doifoddpageelse}
+\def\page_layouts_if_odd_else_again#1{\doifelseoddpage}
\def\doifbothsidesoverruled
{\ifdoublesided
@@ -1171,7 +1175,7 @@
\def\settexthoffset % name will change
{\texthoffset\doifbothsides\backspace\backspace{\dimexpr\paperwidth-\backspace-\makeupwidth\relax}}
-
+
% The next hack is too tricky as we may shipout more pages:
%
% \def\freezepagestatechecks
@@ -1199,11 +1203,13 @@
\def\goleftonpage % name will change (we could cache)
{\hskip-\dimexpr\leftmargindistance+\leftmarginwidth+\leftedgedistance+\leftedgewidth\relax}
-\def\doifmarginswapelse#1#2%
+\def\doifelsemarginswap#1#2%
{\doifbothsides{#1}{#1}{#2}}
+\let\doifmarginswapelse\doifelsemarginswap
+
\def\swapmargins % name will change
- {\doifmarginswapelse\relax\doswapmargins}
+ {\doifelsemarginswap\relax\doswapmargins}
\def\doswapmargins % name will change
{\let\swapmargins \relax % to prevent local swapping
@@ -1221,7 +1227,7 @@
{\ifsinglesided
\expandafter\firstoftwoarguments
\else
- \expandafter\doifoddpageelse
+ \expandafter\doifelseoddpage
\fi}
\def\outermarginwidth {\rightorleftpageaction\rightmarginwidth \leftmarginwidth }
@@ -1275,7 +1281,7 @@
{\globalpopmacro\currentlayout
\globalpopmacro\page_paper_restore
\page_paper_restore
- \setuplayout\relax}
+ \setuplayout[\currentlayout]\relax} % explicit !
%D \macros
%D {showprint, showframe, showlayout, showsetups}
@@ -1377,6 +1383,9 @@
\definepapersize [A9] [\c!width=37mm,\c!height=52mm]
\definepapersize [A10] [\c!width=26mm,\c!height=37mm]
+\definepapersize [A4/2][\c!width=\dimexpr297mm/2\relax,\c!height=210mm] % 148.5mm
+%definepapersize [2A5] [\c!width=296mm,\c!height=210mm] % doublewide
+
\definepapersize [B0] [\c!width=1000mm,\c!height=1414mm]
\definepapersize [B1] [\c!width=707mm,\c!height=1000mm]
\definepapersize [B2] [\c!width=500mm,\c!height=707mm]
@@ -1424,6 +1433,7 @@
\definepapersize [SW] [\c!width=800pt,\c!height=450pt] % wide
\definepapersize [HD] [\c!width=1920pt,\c!height=1080pt]
\definepapersize [HD+] [\c!width=1920pt,\c!height=1200pt]
+\definepapersize [HD-] [\c!width=960pt,\c!height=540pt]
%D These are handy too:
@@ -1491,6 +1501,11 @@
\definepapersize [A3plus] [\c!width=329mm,\c!height=483mm]
+%D For Alan:
+
+\definepapersize [business] [\c!width=85mm,\c!height=55mm]
+\definepapersize [businessUS] [\c!width=3.5in,\c!height=2in]
+
%D We can now default to a reasonable size. We match the print
%D paper size with the typeset paper size. This setting should
%D come after the first layout specification (already done).
@@ -1505,15 +1520,18 @@
% [ \c!width=\paperwidth,
% \c!height=\paperheight]
+\setuppapersize
+ [\c!distance=1.5cm] % offset is already taken
+
\definepapersize
[oversized]
- [ \c!width=\dimexpr\paperwidth +1.5cm\relax,
- \c!height=\dimexpr\paperheight+1.5cm\relax]
+ [ \c!width=\dimexpr\paperwidth +\layouttargetparameter\c!distance\relax,
+ \c!height=\dimexpr\paperheight+\layouttargetparameter\c!distance\relax]
\definepapersize
[undersized]
- [ \c!width=\dimexpr\paperwidth -1.5cm\relax,
- \c!height=\dimexpr\paperheight-1.5cm\relax]
+ [ \c!width=\dimexpr\paperwidth -\layouttargetparameter\c!distance\relax,
+ \c!height=\dimexpr\paperheight-\layouttargetparameter\c!distance\relax]
\definepapersize
[doublesized]
@@ -1522,8 +1540,13 @@
\definepapersize
[doubleoversized]
- [ \c!width=\dimexpr \paperheight+1.5cm\relax,
- \c!height=\dimexpr2\paperwidth +1.5cm\relax]
+ [ \c!width=\dimexpr \paperheight+\layouttargetparameter\c!distance\relax,
+ \c!height=\dimexpr2\paperwidth +\layouttargetparameter\c!distance\relax]
+
+\definepapersize
+ [doublewide]
+ [ \c!width=\dimexpr2\paperwidth \relax,
+ \c!height=\dimexpr \paperheight\relax]
% \setuppapersize
% [A4][A4]
diff --git a/tex/context/base/page-lin.lua b/tex/context/base/page-lin.lua
index 7e8e9ad8a..5a447c458 100644
--- a/tex/context/base/page-lin.lua
+++ b/tex/context/base/page-lin.lua
@@ -8,37 +8,49 @@ if not modules then modules = { } end modules ['page-lin'] = {
-- experimental -> will become builders
-local trace_numbers = false trackers.register("lines.numbers", function(v) trace_numbers = v end)
+-- if there is demand for it, we can support multiple numbering streams
+-- and use more than one attibute
-local report_lines = logs.reporter("lines")
+local next, tonumber = next, tonumber
-local attributes, nodes, node, context = attributes, nodes, node, context
+local trace_numbers = false trackers.register("lines.numbers", function(v) trace_numbers = v end)
-nodes.lines = nodes.lines or { }
-local lines = nodes.lines
+local report_lines = logs.reporter("lines")
-lines.data = lines.data or { } -- start step tag
-local data = lines.data
-local last = #data
+local attributes = attributes
+local nodes = nodes
+local context = context
-local texgetbox = tex.getbox
+local implement = interfaces.implement
-lines.scratchbox = lines.scratchbox or 0
+nodes.lines = nodes.lines or { }
+local lines = nodes.lines
-local leftmarginwidth = nodes.leftmarginwidth
+lines.data = lines.data or { } -- start step tag
+local data = lines.data
+local last = #data
-storage.register("lines/data", lines.data, "nodes.lines.data")
+lines.scratchbox = lines.scratchbox or 0
--- if there is demand for it, we can support multiple numbering streams
--- and use more than one attibute
+storage.register("lines/data", data, "nodes.lines.data")
local variables = interfaces.variables
+local v_next = variables.next
+local v_page = variables.page
+local v_no = variables.no
+
local nodecodes = nodes.nodecodes
+local skipcodes = nodes.skipcodes
+local whatcodes = nodes.whatcodes
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local whatsit_code = nodecodes.whatsit
+local glue_code = nodecodes.glue
+local glyph_code = nodecodes.glyph
+local leftskip_code = skipcodes.leftskip
+local textdir_code = whatcodes.dir
local a_displaymath = attributes.private('displaymath')
local a_linenumber = attributes.private('linenumber')
@@ -49,12 +61,32 @@ local current_list = { }
local cross_references = { }
local chunksize = 250 -- not used in boxed
-local traverse_id = node.traverse_id
-local traverse = node.traverse
-local copy_node = node.copy
-local hpack_node = node.hpack
-local insert_node_after = node.insert_after
-local insert_node_before = node.insert_before
+local nuts = nodes.nuts
+
+local getid = nuts.getid
+local getsubtype = nuts.getsubtype
+local getnext = nuts.getnext
+local getattr = nuts.getattr
+local getlist = nuts.getlist
+local getbox = nuts.getbox
+local getfield = nuts.getfield
+
+local setfield = nuts.setfield
+
+local traverse_id = nuts.traverse_id
+local traverse = nuts.traverse
+local copy_node = nuts.copy
+local hpack_node = nuts.hpack
+local insert_node_after = nuts.insert_after
+local insert_node_before = nuts.insert_before
+local is_display_math = nuts.is_display_math
+local leftmarginwidth = nuts.leftmarginwidth
+
+local negated_glue = nuts.pool.negatedglue
+local new_hlist = nuts.pool.hlist
+
+local ctx_convertnumber = context.convertnumber
+local ctx_makelinenumber = context.makelinenumber
-- cross referencing
@@ -67,16 +99,16 @@ end
local function resolve(n,m) -- we can now check the 'line' flag (todo)
while n do
- local id = n.id
+ local id = getid(n)
if id == whatsit_code then -- why whatsit
- local a = n[a_linereference]
+ local a = getattr(n,a_linereference)
if a then
cross_references[a] = m
end
elseif id == hlist_code or id == vlist_code then
- resolve(n.list,m)
+ resolve(getlist(n),m)
end
- n = n.next
+ n = getnext(n)
end
end
@@ -105,7 +137,7 @@ filters.line = filters.line or { }
function filters.line.default(data)
-- helpers.title(data.entries.linenumber or "?",data.metadata)
- context.convertnumber(data.entries.conversion or "numbers",data.entries.linenumber or "0")
+ ctx_convertnumber(data.entries.conversion or "numbers",data.entries.linenumber or "0")
end
function filters.line.page(data,prefixspec,pagespec) -- redundant
@@ -133,9 +165,19 @@ function boxed.register(configuration)
return last
end
-function commands.registerlinenumbering(configuration)
- context(boxed.register(configuration))
-end
+implement {
+ name = "registerlinenumbering",
+ actions = { boxed.register, context },
+ arguments = {
+ {
+ { "continue" },
+ { "start", "integer" },
+ { "step", "integer" },
+ { "method" },
+ { "tag" },
+ }
+ }
+}
function boxed.setup(n,configuration)
local d = data[n]
@@ -155,7 +197,20 @@ function boxed.setup(n,configuration)
return n
end
-commands.setuplinenumbering = boxed.setup
+implement {
+ name = "setuplinenumbering",
+ actions = boxed.setup,
+ arguments = {
+ "integer",
+ {
+ { "continue" },
+ { "start", "integer" },
+ { "step", "integer" },
+ { "method" },
+ { "tag" },
+ }
+ }
+}
local function check_number(n,a,skip,sameline)
local d = data[a]
@@ -165,20 +220,20 @@ local function check_number(n,a,skip,sameline)
if sameline then
skipflag = 0
if trace_numbers then
- report_lines("skipping broken line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no")
+ report_lines("skipping broken line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or v_no)
end
elseif not skip and s % d.step == 0 then
skipflag, d.start = 1, s + 1 -- (d.step or 1)
if trace_numbers then
- report_lines("making number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no")
+ report_lines("making number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or v_no)
end
else
skipflag, d.start = 0, s + 1 -- (d.step or 1)
if trace_numbers then
- report_lines("skipping line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no")
+ report_lines("skipping line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or v_no)
end
end
- context.makelinenumber(tag,skipflag,s,n.shift,n.width,leftmarginwidth(n.list),n.dir)
+ ctx_makelinenumber(tag,skipflag,s,getfield(n,"shift"),getfield(n,"width"),leftmarginwidth(getlist(n)),getfield(n,"dir"))
end
end
@@ -189,26 +244,27 @@ end
local function identify(list)
if list then
for n in traverse_id(hlist_code,list) do
- if n[a_linenumber] then
- return list
+ local a = getattr(n,a_linenumber)
+ if a then
+ return list, a
end
end
local n = list
while n do
- local id = n.id
+ local id = getid(n)
if id == hlist_code or id == vlist_code then
- local ok = identify(n.list)
+ local ok, a = identify(getlist(n))
if ok then
- return ok
+ return ok, a
end
end
- n = n.next
+ n = getnext(n)
end
end
end
function boxed.stage_zero(n)
- return identify(texgetbox(n).list)
+ return identify(getlist(getbox(n)))
end
-- reset ranges per page
@@ -217,66 +273,143 @@ end
function boxed.stage_one(n,nested)
current_list = { }
- local box = texgetbox(n)
+ local box = getbox(n)
if box then
- local list = box.list
- if nested then
- list = identify(list)
+ local found = nil
+ local list = getlist(box)
+ if list and nested then
+ list, found = identify(list)
end
- local last_a, last_v, skip = nil, -1, false
- for n in traverse_id(hlist_code,list) do -- attr test here and quit as soon as zero found
- if n.height == 0 and n.depth == 0 then
- -- skip funny hlists -- todo: check line subtype
- else
- local list = n.list
- local a = list[a_linenumber]
- if a and a > 0 then
- if last_a ~= a then
- local da = data[a]
- local ma = da.method
- if ma == variables.next then
- skip = true
- elseif ma == variables.page then
- da.start = 1 -- eventually we will have a normal counter
- end
- last_a = a
- if trace_numbers then
- report_lines("starting line number range %s: start %s, continue",a,da.start,da.continue or "no")
+ if list then
+ local last_a, last_v, skip = nil, -1, false
+ for n in traverse_id(hlist_code,list) do -- attr test here and quit as soon as zero found
+ if getfield(n,"height") == 0 and getfield(n,"depth") == 0 then
+ -- skip funny hlists -- todo: check line subtype
+ else
+ local list = getlist(n)
+ local a = getattr(list,a_linenumber)
+ if not a or a == 0 then
+ local n = getnext(list)
+ while n do
+ local id = getid(n)
+ if id == whatsit_code and getsubtype(n) == textdir_code then
+ n = getnext(n)
+ elseif id == glue_code and getsubtype(n) == leftskip_code then
+ n = getnext(n)
+ else
+if id == glyph_code then
+ break
+else
+ -- can be hlist or skip (e.g. footnote line)
+ n = getnext(n)
+end
+ end
end
+ a = n and getattr(n,a_linenumber)
end
- if n[a_displaymath] then
- if nodes.is_display_math(n) then
- check_number(n,a,skip)
+ if a and a > 0 then
+ if last_a ~= a then
+ local da = data[a]
+ local ma = da.method
+ if ma == v_next then
+ skip = true
+ elseif ma == v_page then
+ da.start = 1 -- eventually we will have a normal counter
+ end
+ last_a = a
+ if trace_numbers then
+ report_lines("starting line number range %s: start %s, continue %s",a,da.start,da.continue or v_no)
+ end
end
- else
- local v = list[a_verbatimline]
- if not v or v ~= last_v then
- last_v = v
- check_number(n,a,skip)
+ if getattr(n,a_displaymath) then
+ if is_display_math(n) then
+ check_number(n,a,skip)
+ end
else
- check_number(n,a,skip,true)
+ local v = getattr(list,a_verbatimline)
+ if not v or v ~= last_v then
+ last_v = v
+ check_number(n,a,skip)
+ else
+ check_number(n,a,skip,true)
+ end
end
+ skip = false
end
- skip = false
end
end
end
end
end
+-- [dir][leftskip][content]
+
+function boxed.stage_two(n,m)
+ if #current_list > 0 then
+ m = m or lines.scratchbox
+ local t, tn = { }, 0
+ for l in traverse_id(hlist_code,getlist(getbox(m))) do
+ tn = tn + 1
+ t[tn] = copy_node(l) -- use take_box instead
+ end
+ for i=1,#current_list do
+ local li = current_list[i]
+ local n, m, ti = li[1], li[2], t[i]
+ if ti then
+ local l = getlist(n)
+ -- we want to keep leftskip at the start
+-- local id = getid(l)
+-- if id == whatsit_code and getsubtype(l) == textdir_code then
+-- l = getnext(l)
+-- id = getid(l)
+-- end
+-- if getid(l) == glue_code and getsubtype(l) == leftskip_code then
+-- -- [leftskip] [number] [rest]
+-- local forward = copy_node(l)
+-- local backward = negated_glue(l)
+-- local next = getnext(l)
+-- setfield(l,"next",backward)
+-- setfield(backward,"prev",l)
+-- setfield(backward,"next",ti)
+-- setfield(ti,"prev",backward)
+-- setfield(ti,"next",forward)
+-- setfield(forward,"prev",ti)
+-- setfield(forward,"next",next)
+-- setfield(next,"prev",forward)
+-- else
+ -- [number] [rest]
+ setfield(ti,"next",l)
+ setfield(l,"prev",ti)
+ setfield(n,"list",ti)
+-- end
+ resolve(n,m)
+ else
+ report_lines("error in linenumbering (1)")
+ return
+ end
+ end
+ end
+end
+
function boxed.stage_two(n,m)
if #current_list > 0 then
m = m or lines.scratchbox
local t, tn = { }, 0
- for l in traverse_id(hlist_code,texgetbox(m).list) do
+ for l in traverse_id(hlist_code,getlist(getbox(m))) do
tn = tn + 1
- t[tn] = copy_node(l)
+ t[tn] = copy_node(l) -- use take_box instead
end
for i=1,#current_list do
local li = current_list[i]
local n, m, ti = li[1], li[2], t[i]
if ti then
- ti.next, n.list = n.list, ti
+ local l = getlist(n)
+ setfield(ti,"next",l)
+ setfield(l,"prev",ti)
+ local h = copy_node(n)
+ setfield(h,"dir","TLT")
+ setfield(h,"list",ti)
+ setfield(n,"list",h)
resolve(n,m)
else
report_lines("error in linenumbering (1)")
@@ -286,5 +419,14 @@ function boxed.stage_two(n,m)
end
end
-commands.linenumbersstageone = boxed.stage_one
-commands.linenumbersstagetwo = boxed.stage_two
+implement {
+ name = "linenumbersstageone",
+ actions = boxed.stage_one,
+ arguments = { "integer", "boolean" }
+}
+
+implement {
+ name = "linenumbersstagetwo",
+ actions = boxed.stage_two,
+ arguments = { "integer", "integer" }
+}
diff --git a/tex/context/base/page-lin.mkiv b/tex/context/base/page-lin.mkiv
deleted file mode 100644
index ae293091c..000000000
--- a/tex/context/base/page-lin.mkiv
+++ /dev/null
@@ -1,573 +0,0 @@
-%D \module
-%D [ file=page-lin,
-%D version=2007.11.29,
-%D title=\CONTEXT\ Core Macros,
-%D subtitle=Line Numbering,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-% generic or not ... maybe not bother too much and simplify to mkiv only
-% get rid of \mk* (left over from experimental times)
-%
-% to be redone (was experiment) .. can be hooked into margin code
-
-\writestatus{loading}{ConTeXt Core Macros / Line Numbering}
-
-\unprotect
-
-% todo: save settings
-%
-% low level interface
-%
-% we should use normal counters but then we need to sync settings
-
-% some line
-%
-% \startlocallinenumbering
-% some source code 1\par
-% some source code 2\par
-% some source code 3\par
-% \stoplocallinenumbering
-%
-% some line
-
-\registerctxluafile{page-lin}{1.001}
-
-\definesystemattribute[linenumber] [public]
-\definesystemattribute[linereference][public]
-
-\appendtoksonce
- \attribute\linenumberattribute\attributeunsetvalue
-\to \everyforgetall
-
-\newcount \linenumber % not used
-\newbox \b_page_lines_scratch
-\newcount \c_page_lines_reference
-\newconstant\c_page_lines_nesting
-
-\newconditional\tracelinenumbering % we keep this for old times sake
-
-\installtextracker
- {lines.numbers.show}
- {\settrue \tracelinenumbering}
- {\setfalse\tracelinenumbering}
-
-% id nr shift width leftskip dir
-
-\installcorenamespace{linenumberinginstance}
-
-\let\makelinenumber\gobblesevenarguments % used at lua end
-
-\newconditional\page_postprocessors_needed_box
-
-\unexpanded\def\page_postprocessors_linenumbers_page #1{\page_lines_add_numbers_to_box{#1}\plusone \plusone \zerocount}
-\unexpanded\def\page_postprocessors_linenumbers_box #1{\page_lines_add_numbers_to_box{#1}\plusone \plusone \zerocount}
-\unexpanded\def\page_postprocessors_linenumbers_deepbox#1{\page_lines_add_numbers_to_box{#1}\plusone \plusone \plusone }
-\unexpanded\def\page_postprocessors_linenumbers_column #1{\page_lines_add_numbers_to_box{#1}\currentcolumn\nofcolumns\zerocount}
-
-\def\page_lines_parameters_regular
- {continue = "\ifnum\c_page_lines_mode=\zerocount\v!yes\else\v!no\fi",
- start = \number\linenumberingparameter\c!start,
- step = \number\linenumberingparameter\c!step,
- method = "\linenumberingparameter\c!method",
- tag = "\currentlinenumbering"}
-
-\def\page_lines_parameters_update
- {continue = "\ifnum\c_page_lines_mode=\zerocount\v!yes\else\v!no\fi"}
-
-\def\page_lines_start_define
- {\setxvalue{\??linenumberinginstance\currentlinenumbering}{\ctxcommand{registerlinenumbering({\page_lines_parameters_regular})}}}
-
-\def\page_lines_start_update
- {\ctxcommand{setuplinenumbering(\csname\??linenumberinginstance\currentlinenumbering\endcsname,{\page_lines_parameters_update})}}
-
-\def\page_lines_setup
- {\ifcsname \??linenumberinginstance\currentlinenumbering\endcsname
- \ctxcommand{setuplinenumbering(\csname\??linenumberinginstance\currentlinenumbering\endcsname,{\page_lines_parameters_regular})}%
- \fi}
-
-% we could make this a bit more efficient by putting the end reference
-% in the same table as the start one but why make things complex ...
-
-\let\dofinishlinereference\dofinishfullreference % at lua end
-
-\unexpanded\def\page_lines_some_reference#1#2#3%
- {\dontleavehmode\begingroup
- \global\advance\c_page_lines_reference\plusone
- \attribute\linereferenceattribute\c_page_lines_reference
- #3%
- % for the moment we use a simple system i.e. no prefixes etc .. todo: store as number
- \normalexpanded{\strc_references_set_named_reference{line}{#2}{conversion=\linenumberingparameter\c!conversion}{\the\c_page_lines_reference}}% kind labels userdata text
- \endgroup}
-
-% \def\page_lines_reference_start#1{\page_lines_some_reference{#1}{lr:b:#1}{}} % reimplemented later
-% \def\page_lines_reference_stop #1{\page_lines_some_reference{#1}{lr:e:#1}{}} % reimplemented later
-
-% \def\mklinestartreference#1[#2]{\in{#1}[lr:b:#2]} % not interfaced/ not used
-% \def\mklinestopreference #1[#2]{\in{#1}[lr:e:#2]} % not interfaced/ not used
-
-\newif\ifnumberinglines % will change
-\newif\iftypesettinglines % will change
-
-\installcorenamespace{linenumbering}
-
-\installcommandhandler \??linenumbering {linenumbering} \??linenumbering
-
-\setnewconstant\c_page_lines_mode \plusone % 0=continue, 1=restart
-\setnewconstant\c_page_lines_location \plusone % 0=middle, 1=left, 2=right, 3=inner, 4=outer, 5=text, 6=begin, 7=end
-\setnewconstant\c_page_lines_alignment\plusfive % 0=middle, 1=left, 2=right, 5=auto
-
-\newdimen\d_page_lines_width
-\newdimen\d_page_lines_distance
-
-\newevery \beforeeverylinenumbering \relax
-\newevery \aftereverylinenumbering \relax
-\newevery \everylinenumber \relax
-
-\appendtoks
- \page_lines_setup
-\to \everysetuplinenumbering
-
-\appendtoks
- \page_lines_start_define
-\to \everydefinelinenumbering
-
-\setuplinenumbering
- [\c!conversion=\v!numbers,
- \c!start=1,
- \c!step=1,
- \c!method=\v!first,
- \c!continue=\v!no,
- \c!location=\v!left,
- \c!style=,
- \c!color=,
- \c!width=2\emwidth,
- \c!left=,
- \c!right=,
- \c!command=,
- \c!distance=\zeropoint,
- \c!align=\v!auto]
-
-\definelinenumbering
- []
-
-% no intermediate changes in values, define a class, otherwise each range
-% would need a number
-
-% todo: text
-
-\installcorenamespace{linenumberinglocation}
-\installcorenamespace{linenumberingalternative}
-
-\expandafter\let\csname\??linenumberinglocation\v!middle \endcsname \zerocount
-\expandafter\let\csname\??linenumberinglocation\v!left \endcsname \plusone
-\expandafter\let\csname\??linenumberinglocation\v!margin \endcsname \plusone
-\expandafter\let\csname\??linenumberinglocation\v!inmargin \endcsname \plusone
-\expandafter\let\csname\??linenumberinglocation\v!inleft \endcsname \plusone
-\expandafter\let\csname\??linenumberinglocation\v!right \endcsname \plustwo
-\expandafter\let\csname\??linenumberinglocation\v!inright \endcsname \plustwo
-\expandafter\let\csname\??linenumberinglocation\v!inner \endcsname \plusthree
-\expandafter\let\csname\??linenumberinglocation\v!outer \endcsname \plusfour
-\expandafter\let\csname\??linenumberinglocation\v!text \endcsname \plusfive
-\expandafter\let\csname\??linenumberinglocation\v!begin \endcsname \plussix
-\expandafter\let\csname\??linenumberinglocation\v!end \endcsname \plusseven
-
-\expandafter\let\csname\??linenumberingalternative\v!middle \endcsname \zerocount
-\expandafter\let\csname\??linenumberingalternative\v!right \endcsname \plusone
-\expandafter\let\csname\??linenumberingalternative\v!flushleft \endcsname \plusone
-\expandafter\let\csname\??linenumberingalternative\v!left \endcsname \plustwo
-\expandafter\let\csname\??linenumberingalternative\v!flushright\endcsname \plustwo
-\expandafter\let\csname\??linenumberingalternative\v!auto \endcsname \plusfive
-
-% \startlinenumbering[<startvalue>|continue|settings|name]
-% \startlinenumbering[name][<startvalue>|continue|settings]
-
-\unexpanded\def\startlinenumbering
- {\dodoubleempty\page_lines_start}
-
-\def\page_lines_start % we stay downward compatible
- {\begingroup
- \ifsecondargument
- \expandafter\page_lines_start_two
- \else\iffirstargument
- \doubleexpandafter\page_lines_start_one
- \else
- \doubleexpandafter\page_lines_start_zero
- \fi\fi}
-
-\def\page_lines_start_zero[#1][#2]%
- {\edef\m_argument{\linenumberingparameter\c!continue}%
- \ifx\m_argument\v!continue
- \c_page_lines_mode\zerocount
- \else
- \c_page_lines_mode\plusone
- \fi
- \page_lines_start_followup}
-
-\def\page_lines_start_one[#1][#2]% [continue|<number>|settings] % historic
- {\edef\m_argument{#1}%
- \ifx\m_argument\v!continue
- \c_page_lines_mode\zerocount
- \let\currentlinenumbering\empty
- \else
- \c_page_lines_mode\plusone
- \ifx\m_argument\v!empty
- \let\currentlinenumbering\empty
- \else
- \doifassignmentelse{#1}
- {\let\currentlinenumbering\empty
- \setupcurrentlinenumbering[#1]}
- {\doifnumberelse\m_argument
- {\let\currentlinenumbering\empty
- \letlinenumberingparameter\c!start\m_argument}
- {\let\currentlinenumbering\m_argument}}%
- \fi
- \edef\p_continue{\linenumberingparameter\c!continue}%
- \ifx\p_continue\v!yes
- \c_page_lines_mode\zerocount
- \fi
- \fi
- \page_lines_start_followup}
-
-\def\page_lines_start_two[#1][#2]% [tag][continue|<number>|settings]
- {\edef\currentlinenumbering{#1}%
- \edef\m_argument{#2}%
- \ifx\m_argument\v!continue
- \c_page_lines_mode\zerocount
- \else
- \c_page_lines_mode\plusone
- \ifx\m_argument\v!empty \else
- \doifassignmentelse{#2}
- {\setupcurrentlinenumbering[#2]}
- {\doifnumber\m_argument
- {\letlinenumberingparameter\c!start\m_argument}}%
- \fi
- \edef\p_continue{\linenumberingparameter\c!continue}%
- \ifx\p_continue\v!yes
- \c_page_lines_mode\zerocount
- \fi
- \fi
- \page_lines_start_followup}
-
-\def\page_lines_start_followup
- {\numberinglinestrue
- \the\beforeeverylinenumbering
- \globallet\page_postprocessors_page \page_postprocessors_linenumbers_page
- \globallet\page_postprocessors_column\page_postprocessors_linenumbers_column
- \global\settrue\page_postprocessors_needed_box % see core-rul.mkiv
- \ifcase\c_page_lines_mode\relax
- \page_lines_start_update % continue
- \or
- \page_lines_start_define % only when assignment
- \fi
- \attribute\linenumberattribute\getvalue{\??linenumberinginstance\currentlinenumbering}\relax}
-
-\unexpanded\def\stoplinenumbering
- {\attribute\linenumberattribute\attributeunsetvalue
- \the\aftereverylinenumbering
- \endgroup}
-
-% number placement .. will change into (the new) margin code
-
-\def\page_lines_number_inner_indeed{\doifoddpageelse\page_lines_number_left_indeed\page_lines_number_right_indeed}
-\def\page_lines_number_outer_indeed{\doifoddpageelse\page_lines_number_right_indeed\page_lines_number_left_indeed}
-
-\def\page_lines_number_left
- {\ifcase\c_page_lines_location
- \expandafter\page_lines_number_left_indeed
- \or
- \expandafter\page_lines_number_left_indeed
- \or
- \expandafter\page_lines_number_left_indeed
- \or
- \expandafter\page_lines_number_inner_indeed
- \or
- \expandafter\page_lines_number_outer_indeed
- \or
- \expandafter\page_lines_number_text_indeed
- \or
- \expandafter\page_lines_number_begin_indeed
- \or
- \expandafter\page_lines_number_end_indeed
- \fi}
-
-\def\page_lines_number_right
- {\ifcase\c_page_lines_location
- \expandafter\page_lines_number_right_indeed
- \or
- \expandafter\page_lines_number_right_indeed
- \or
- \expandafter\page_lines_number_right_indeed
- \or
- \expandafter\page_lines_number_outer_indeed
- \or
- \expandafter\page_lines_number_inner_indeed
- \or
- \expandafter\page_lines_number_text_indeed
- \or
- \expandafter\page_lines_number_end_indeed
- \or
- \expandafter\page_lines_number_begin_indeed
- \fi}
-
-\newconditional\c_page_lines_fake_number
-\newconstant \b_page_lines_number
-\newconstant \c_page_lines_column
-\newconstant \c_page_lines_last_column
-
-\def\page_lines_add_numbers_to_box#1#2#3#4% box col max nesting
- {\bgroup
- \b_page_lines_number #1\relax
- \c_page_lines_column #2\relax
- \c_page_lines_last_column#3\relax
- \c_page_lines_nesting #4\relax
- \fullrestoreglobalbodyfont
- \let\makelinenumber\page_lines_make_number % used at lua end
- \setbox\b_page_lines_scratch\vbox
- {\forgetall
- \offinterlineskip
- \ctxcommand{linenumbersstageone(\number\b_page_lines_number,\ifcase\c_page_lines_nesting false\else true\fi)}}%
- \ctxcommand{linenumbersstagetwo(\number\b_page_lines_number,\number\b_page_lines_scratch)}% can move to lua code
- \egroup}
-
-\let\page_lines_make_number_indeed\relax
-
-\def\page_lines_make_number#1#2%
- {\edef\currentlinenumbering{#1}%
- \ifcase#2\relax
- \settrue \c_page_lines_fake_number
- \else
- \setfalse\c_page_lines_fake_number
- \fi
- \c_page_lines_location \executeifdefined{\??linenumberinglocation \linenumberingparameter\c!location}\plusone \relax % left
- \c_page_lines_alignment\executeifdefined{\??linenumberingalternative\linenumberingparameter\c!align }\plusfive\relax % auto
- \ifcase\c_page_lines_last_column\relax
- \settrue \c_page_lines_fake_number
- \or
- % one column
- \ifcase\c_page_lines_location
- \settrue \c_page_lines_fake_number
- \let\page_lines_make_number_indeed\page_lines_number_fake_indeed
- \or
- \let\page_lines_make_number_indeed\page_lines_number_left
- \or
- \let\page_lines_make_number_indeed\page_lines_number_right
- \or % inner
- \let\page_lines_make_number_indeed\page_lines_number_inner_indeed
- \or % outer
- \let\page_lines_make_number_indeed\page_lines_number_outer_indeed
- \or % text
- \let\page_lines_make_number_indeed\page_lines_number_text_indeed
- \or
- \let\page_lines_make_number_indeed\page_lines_number_begin_indeed
- \or
- \let\page_lines_make_number_indeed\page_lines_number_end_indeed
- \fi
- \else\ifcase\c_page_lines_column\relax
- \settrue \c_page_lines_fake_number
- \or
- \let\page_lines_make_number_indeed\page_lines_number_left
- \ifcase\c_page_lines_location\or
- \c_page_lines_location\plusone
- \or
- \c_page_lines_location\plustwo
- \else
- \c_page_lines_location\plusone
- \or
- \c_page_lines_location\plusone
- \or
- \c_page_lines_location\plusone
- \or
- \c_page_lines_location\plusone % todo
- \or
- \c_page_lines_location\plusone % todo
- \fi
- \else
- \let\page_lines_make_number_indeed\page_lines_number_right
- \ifcase\c_page_lines_location\or
- \c_page_lines_location\plustwo
- \or
- \c_page_lines_location\plusone
- \or
- \c_page_lines_location\plustwo
- \or
- \c_page_lines_location\plustwo
- \or
- \c_page_lines_location\plustwo % todo
- \or
- \c_page_lines_location\plustwo % todo
- \fi
- \fi\fi
- \page_lines_make_number_indeed{#1}}
-
-\let\page_lines_number_fake_indeed\gobblesixarguments % needs checking
-
-\def\page_lines_number_text_indeed#1#2#3#4#5#6% beware, one needs so compensate for this in the \hsize
- {\hbox{\page_lines_number_construct{#1}{2}{#2}{#5}\hskip#3\scaledpoint}}
-
-\def\page_lines_number_left_indeed#1#2#3#4#5#6%
- {\naturalhbox to \zeropoint
- {\ifcase\istltdir#6\else \hskip-#4\scaledpoint \fi
- \llap{\page_lines_number_construct{#1}{2}{#2}{#5}\kern#3\scaledpoint}}}
-
-\def\page_lines_number_right_indeed#1#2#3#4#5#6%
- {\naturalhbox to \zeropoint
- {\ifcase\istltdir#6\else \hskip-#4\scaledpoint \fi
- \rlap{\hskip\dimexpr#4\scaledpoint+#3\scaledpoint\relax\page_lines_number_construct{#1}{1}{#2}{#5}}}}
-
-\def\page_lines_number_begin_indeed#1#2#3#4#5#6%
- {\ifcase\istltdir#6\relax
- \c_page_lines_location\plusone
- \expandafter\page_lines_number_left_indeed
- \else
- \c_page_lines_location\plustwo
- \expandafter\page_lines_number_left_indeed
- \fi{#1}{#2}{#3}{#4}{#5}{#6}}
-
-\def\page_lines_number_end_indeed#1#2#3#4#5#6%
- {\ifcase\istltdir#6\relax
- \c_page_lines_location\plustwo
- \expandafter\page_lines_number_left_indeed
- \else
- \c_page_lines_location\plusone
- \expandafter\page_lines_number_left_indeed
- \fi{#1}{#2}{#3}{#4}{#5}{#6}}
-
-\def\page_lines_number_construct#1#2#3#4% tag 1=left|2=right linenumber leftskip
- {\begingroup
- \def\currentlinenumbering{#1}%
- \def\linenumber{#3}% unsafe
- \doifelse{\linenumberingparameter\c!width}\v!margin
- {\d_page_lines_width\leftmarginwidth}
- {\d_page_lines_width\linenumberingparameter\c!width}%
- \d_page_lines_distance\linenumberingparameter\c!distance\relax
- \ifcase#2\relax\or\hskip\d_page_lines_distance\fi\relax
- \ifnum\c_page_lines_location=\plusfive
- \scratchdimen\dimexpr#4\scaledpoint-\d_page_lines_distance\relax
- \c_page_lines_location\plusone
- \else
- \scratchdimen\zeropoint
- \fi
- \ifcase\c_page_lines_alignment
- \c_page_lines_location\zerocount % middle
- \or
- \c_page_lines_location\plusone % left
- \or
- \c_page_lines_location\plustwo % right
- \fi
- \ifconditional\tracelinenumbering\ruledhbox\else\hbox\fi to \d_page_lines_width
- {\ifcase\c_page_lines_location
- \hss % middle
- \or
- % left
- \or
- \hss % right
- \or
- \doifoddpageelse\relax\hss % inner
- \or
- \doifoddpageelse\hss\relax % outer
- \fi
- \ifconditional\c_page_lines_fake_number
- % we need to reserve space
- \else
- \uselinenumberingstyleandcolor\c!style\c!color
- \linenumberingparameter\c!command
- {\linenumberingparameter\c!left
- \convertnumber{\linenumberingparameter\c!conversion}{#3}%
- \linenumberingparameter\c!right}%
- \fi
- \ifcase\c_page_lines_location
- \hss % middle
- \or
- \hss % left
- \or
- % right
- \or
- \doifoddpageelse\hss\relax % inner
- \or
- \doifoddpageelse\relax\hss % outer
- \fi}%
- \ifcase#2\relax
- \hskip-\scratchdimen
- \or
- \hskip-\scratchdimen
- \or
- \hskip\dimexpr\d_page_lines_distance-\scratchdimen\relax
- \fi
- \relax
- \the\everylinenumber
- \endgroup}
-
-% referencing: \permithyphenation, also removes leading spaces (new per 29-11-2013)
-
-\unexpanded\def\someline [#1]{\page_lines_reference_start{#1}\page_lines_reference_stop{#1}} % was just a def
-\unexpanded\def\startline[#1]{\page_lines_reference_start{#1}\ignorespaces}
-\unexpanded\def\stopline [#1]{\removeunwantedspaces\permithyphenation\page_lines_reference_stop{#1}}
-
-\def\page_lines_reference_show_start
- {\ifconditional\tracelinenumbering
- \expandafter\page_lines_reference_show_start_indeed
- \else
- \expandafter\gobbleoneargument
- \fi}
-
-\def\page_lines_reference_show_stop
- {\ifconditional\tracelinenumbering
- \expandafter\page_lines_reference_show_stop_indeed
- \else
- \expandafter\gobbleoneargument
- \fi}
-
-
-\def\page_lines_reference_show_start_indeed#1%
- {\setbox\scratchbox\hbox{\llap
- {\vrule\s!width\onepoint\s!depth\strutdp\s!height.8\strutht\raise.85\strutht\hbox{\llap{\tt\txx#1}}}}%
- \smashbox\scratchbox
- \box\scratchbox}
-
-\def\page_lines_reference_show_stop_indeed#1%
- {\setbox\scratchbox\hbox{\rlap
- {\raise.85\strutht\hbox{\rlap{\tt\txx#1}}\vrule\s!width\onepoint\s!depth\strutdp\s!height.8\strutht}}%
- \smashbox\scratchbox
- \box\scratchbox}
-
-\def\page_lines_reference_start#1{\page_lines_some_reference{#1}{lr:b:#1}{\page_lines_reference_show_start{#1}}}
-\def\page_lines_reference_stop #1{\page_lines_some_reference{#1}{lr:e:#1}{\page_lines_reference_show_stop {#1}}}
-
-% eventually we will do this in lua
-
-\def\currentreferencelinenumber{\ctxcommand{filterreference("linenumber")}}
-
-\let\m_page_lines_from\empty
-\let\m_page_lines_to \empty
-
-\unexpanded\def\doifelsesamelinereference#1#2#3%
- {\doifreferencefoundelse{lr:b:#1}
- {\edef\m_page_lines_from{\currentreferencelinenumber}%
- \doifreferencefoundelse{lr:e:#1}
- {\edef\m_page_lines_to{\currentreferencelinenumber}%
- %[\m_page_lines_from,\m_page_lines_to]
- \ifx\m_page_lines_from\m_page_lines_to#2\else#3\fi}
- {#2}}
- {#2}}
-
-\unexpanded\def\inline#1[#2]%
- {\doifelsenothing{#1}
- {\doifelsesamelinereference{#2}
- {\in{\leftlabeltext\v!line}{\rightlabeltext\v!line}[lr:b:#2]}
- {\in{\leftlabeltext\v!lines}{}[lr:b:#2]--\in{}{\rightlabeltext\v!lines}[lr:e:#2]}}
- {\doifelsesamelinereference{#2}
- {\in{#1}[lr:b:#2]}
- {\in{#1}[lr:b:#2]--\in[lr:e:#2]}}}
-
-\unexpanded\def\inlinerange[#1]%
- {\doifelsesamelinereference{#1}
- {\in[lr:b:#1]}
- {\in[lr:b:#1]\endash\in[lr:e:#1]}}
-
-\protect \endinput
diff --git a/tex/context/base/page-lin.mkvi b/tex/context/base/page-lin.mkvi
new file mode 100644
index 000000000..dd13a98c3
--- /dev/null
+++ b/tex/context/base/page-lin.mkvi
@@ -0,0 +1,590 @@
+%D \module
+%D [ file=page-lin,
+%D version=2007.11.29,
+%D title=\CONTEXT\ Core Macros,
+%D subtitle=Line Numbering,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% generic or not ... maybe not bother too much and simplify to mkiv only
+% get rid of \mk* (left over from experimental times)
+%
+% to be redone (was experiment) .. can be hooked into margin code
+% reshuffle arguments
+
+\writestatus{loading}{ConTeXt Core Macros / Line Numbering}
+
+\unprotect
+
+% todo: save settings
+%
+% low level interface
+%
+% we should use normal counters but then we need to sync settings
+
+% some line
+%
+% \startlocallinenumbering
+% some source code 1\par
+% some source code 2\par
+% some source code 3\par
+% \stoplocallinenumbering
+%
+% some line
+
+\registerctxluafile{page-lin}{1.001}
+
+\definesystemattribute[linenumber] [public]
+\definesystemattribute[linereference][public]
+
+\appendtoksonce
+ \attribute\linenumberattribute\attributeunsetvalue
+\to \everyforgetall
+
+\newcount \linenumber % not used
+\newbox \b_page_lines_scratch
+\newcount \c_page_lines_reference
+\newconstant\c_page_lines_nesting
+
+\newconditional\tracelinenumbering % we keep this for old times sake
+
+\installtextracker
+ {lines.numbers.show}
+ {\settrue \tracelinenumbering}
+ {\setfalse\tracelinenumbering}
+
+% id nr shift width leftskip dir
+
+\installcorenamespace{linenumberinginstance}
+
+% tag skipflag s getfield(n,"shift") getfield(n,"width") leftmarginwidth(getlist(n)) getfield(n,"dir"))
+
+\let\makelinenumber\gobblesevenarguments % used at lua end
+
+\newconditional\page_postprocessors_needed_box
+
+\unexpanded\def\page_postprocessors_linenumbers_page #tag{\page_lines_add_numbers_to_box{#tag}\plusone \plusone \zerocount}
+\unexpanded\def\page_postprocessors_linenumbers_box #tag{\page_lines_add_numbers_to_box{#tag}\plusone \plusone \zerocount}
+\unexpanded\def\page_postprocessors_linenumbers_deepbox#tag{\page_lines_add_numbers_to_box{#tag}\plusone \plusone \plusone }
+\unexpanded\def\page_postprocessors_linenumbers_column #tag{\page_lines_add_numbers_to_box{#tag}\currentcolumn\nofcolumns\zerocount}
+
+\def\page_lines_start_define
+ {\setxvalue{\??linenumberinginstance\currentlinenumbering}%
+ {\clf_registerlinenumbering
+ continue {\ifnum\c_page_lines_mode=\zerocount\v!yes\else\v!no\fi}%
+ start \linenumberingparameter\c!start
+ step \linenumberingparameter\c!step
+ method {\linenumberingparameter\c!method}%
+ tag {\currentlinenumbering}%
+ }}
+
+\def\page_lines_start_update
+ {\clf_setuplinenumbering
+ \csname\??linenumberinginstance\currentlinenumbering\endcsname
+ {%
+ continue {\ifnum\c_page_lines_mode=\zerocount\v!yes\else\v!no\fi}%
+ }%
+ \relax}
+
+\def\page_lines_setup
+ {\ifcsname \??linenumberinginstance\currentlinenumbering\endcsname
+ \clf_setuplinenumbering
+ \csname\??linenumberinginstance\currentlinenumbering\endcsname
+ {%
+ continue {\ifnum\c_page_lines_mode=\zerocount\v!yes\else\v!no\fi}%
+ start \linenumberingparameter\c!start
+ step \linenumberingparameter\c!step
+ method {\linenumberingparameter\c!method}%
+ tag {\currentlinenumbering}%
+ }%
+ \relax
+ \fi}
+
+% we could make this a bit more efficient by putting the end reference
+% in the same table as the start one but why make things complex ...
+
+\unexpanded\def\page_lines_some_reference#1#2#3%
+ {\dontleavehmode\begingroup
+ \global\advance\c_page_lines_reference\plusone
+ \attribute\linereferenceattribute\c_page_lines_reference
+ #3% todo: #3{#1} as there is no need to pass #1 as part of #3
+ % for the moment we use a simple system i.e. no prefixes etc .. todo: store as number
+ \c_strc_references_bind_state\zerocount % we don't want the prewordbreak and manage it here
+ \normalexpanded{\strc_references_set_named_reference{line}{#2}{conversion=\linenumberingparameter\c!conversion}{\the\c_page_lines_reference}}% kind labels userdata text
+ \endgroup}
+
+% \def\page_lines_reference_start#1{\page_lines_some_reference{#1}{lr:b:#1}{}} % reimplemented later
+% \def\page_lines_reference_stop #1{\page_lines_some_reference{#1}{lr:e:#1}{}} % reimplemented later
+
+% \def\mklinestartreference#1[#2]{\in{#1}[lr:b:#2]} % not interfaced/ not used
+% \def\mklinestopreference #1[#2]{\in{#1}[lr:e:#2]} % not interfaced/ not used
+
+\newif\ifnumberinglines % will change
+\newif\iftypesettinglines % will change
+
+\installcorenamespace{linenumbering}
+
+\installcommandhandler \??linenumbering {linenumbering} \??linenumbering
+
+\setnewconstant\c_page_lines_mode \plusone % 0=continue, 1=restart
+\setnewconstant\c_page_lines_location \plusone % 0=middle, 1=left, 2=right, 3=inner, 4=outer, 5=text, 6=begin, 7=end
+\setnewconstant\c_page_lines_alignment\plusfive % 0=middle, 1=left, 2=right, 5=auto
+
+\newdimen\d_page_lines_width
+\newdimen\d_page_lines_distance
+
+\newevery \beforeeverylinenumbering \relax
+\newevery \aftereverylinenumbering \relax
+\newevery \everylinenumber \relax
+
+\appendtoks
+ \page_lines_setup
+\to \everysetuplinenumbering
+
+\appendtoks
+ \page_lines_start_define
+\to \everydefinelinenumbering
+
+\setuplinenumbering
+ [\c!conversion=\v!numbers,
+ \c!start=1,
+ \c!step=1,
+ \c!method=\v!first,
+ \c!continue=\v!no,
+ \c!style=,
+ \c!color=,
+ \c!width=2\emwidth,
+ \c!left=,
+ \c!right=,
+ \c!command=,
+ \c!margin=2.5\emwidth,
+ \c!distance=\zeropoint,
+ \c!location=\v!default, % depends on direction, columns etc
+ \c!align=\v!auto]
+
+\definelinenumbering
+ []
+
+% \startlinenumbering[<startvalue>|continue|settings|name]
+% \startlinenumbering[name][<startvalue>|continue|settings]
+
+\unexpanded\def\startlinenumbering
+ {\dodoubleempty\page_lines_start}
+
+\def\page_lines_start % we stay downward compatible
+ {\begingroup
+ \ifsecondargument
+ \expandafter\page_lines_start_two
+ \else\iffirstargument
+ \doubleexpandafter\page_lines_start_one
+ \else
+ \doubleexpandafter\page_lines_start_zero
+ \fi\fi}
+
+\def\page_lines_start_zero[#1][#2]%
+ {\edef\m_argument{\linenumberingparameter\c!continue}%
+ \ifx\m_argument\v!continue
+ \c_page_lines_mode\zerocount
+ \else
+ \c_page_lines_mode\plusone
+ \fi
+ \page_lines_start_followup}
+
+\def\page_lines_start_one[#1][#2]% [continue|<number>|settings] % historic
+ {\edef\m_argument{#1}%
+ \ifx\m_argument\v!continue
+ \c_page_lines_mode\zerocount
+ \let\currentlinenumbering\empty
+ \else
+ \c_page_lines_mode\plusone
+ \ifx\m_argument\v!empty
+ \let\currentlinenumbering\empty
+ \else
+ \doifelseassignment{#1}
+ {\let\currentlinenumbering\empty
+ \setupcurrentlinenumbering[#1]}
+ {\doifelsenumber\m_argument
+ {\let\currentlinenumbering\empty
+ \letlinenumberingparameter\c!start\m_argument}
+ {\let\currentlinenumbering\m_argument}}%
+ \fi
+ \edef\p_continue{\linenumberingparameter\c!continue}%
+ \ifx\p_continue\v!yes
+ \c_page_lines_mode\zerocount
+ \fi
+ \fi
+ \page_lines_start_followup}
+
+\def\page_lines_start_two[#1][#2]% [tag][continue|<number>|settings]
+ {\edef\currentlinenumbering{#1}%
+ \edef\m_argument{#2}%
+ \ifx\m_argument\v!continue
+ \c_page_lines_mode\zerocount
+ \else
+ \c_page_lines_mode\plusone
+ \ifx\m_argument\v!empty \else
+ \doifelseassignment{#2}
+ {\setupcurrentlinenumbering[#2]}
+ {\doifnumber\m_argument
+ {\letlinenumberingparameter\c!start\m_argument}}%
+ \fi
+ \edef\p_continue{\linenumberingparameter\c!continue}%
+ \ifx\p_continue\v!yes
+ \c_page_lines_mode\zerocount
+ \fi
+ \fi
+ \page_lines_start_followup}
+
+\newconditional\c_page_lines_auto_narrow
+
+\def\page_lines_start_followup
+ {\numberinglinestrue
+ \edef\p_location{\linenumberingparameter\c!location}%
+ \setfalse\c_page_lines_auto_narrow
+ \ifhmode \else
+ \ifx\p_location\v!text
+ \ifdim\leftskip>\zeropoint \else
+ \advance\leftskip\linenumberingparameter\c!margin
+ \settrue\c_page_lines_auto_narrow
+ \fi
+ \else\ifx\p_location\v!begin
+ \ifdim\leftskip>\zeropoint \else
+ \advance\leftskip\linenumberingparameter\c!margin
+ \settrue\c_page_lines_auto_narrow
+ \fi
+ \else\ifx\p_location\v!end
+ \ifdim\leftskip>\zeropoint \else
+ \advance\rightskip\linenumberingparameter\c!margin
+ \settrue\c_page_lines_auto_narrow
+ \fi
+ \fi\fi\fi
+ \fi
+ \the\beforeeverylinenumbering
+ \globallet\page_postprocessors_page \page_postprocessors_linenumbers_page
+ \globallet\page_postprocessors_column\page_postprocessors_linenumbers_column
+ \global\settrue\page_postprocessors_needed_box % see core-rul.mkiv
+ \ifcase\c_page_lines_mode\relax
+ \page_lines_start_update % continue
+ \or
+ \page_lines_start_define % only when assignment
+ \fi
+ \attribute\linenumberattribute\csname\??linenumberinginstance\currentlinenumbering\endcsname\relax}
+
+\unexpanded\def\stoplinenumbering
+ {\attribute\linenumberattribute\attributeunsetvalue
+ \the\aftereverylinenumbering
+ \ifconditional\c_page_lines_auto_narrow\par\fi
+ \endgroup}
+
+% number placement .. will change into (the new) margin code
+
+\newconditional\c_page_lines_fake_number
+\newconstant \b_page_lines_number
+\newconstant \c_page_lines_column
+\newconstant \c_page_lines_last_column
+\newdimen \d_page_lines_line_width
+\settrue \c_page_lines_dir_left_to_right
+
+\installcorenamespace{linenumberinghandler}
+
+\def\page_line_swap_align % can become a helper
+ {\ifx\p_align\v!inner \let\p_align\v!outer \else
+ \ifx\p_align\v!outer \let\p_align\v!inner \else
+ \ifx\p_align\v!flushleft \let\p_align\v!flushright\else
+ \ifx\p_align\v!flushright\let\p_align\v!flushleft \else
+ \ifx\p_align\v!left \let\p_align\v!right \else
+ \ifx\p_align\v!right \let\p_align\v!left \fi\fi\fi\fi\fi\fi}
+
+\def\page_lines_add_numbers_to_box#box#column#max#nesting%
+ {\bgroup
+ \b_page_lines_number #box\relax
+ \c_page_lines_column #column\relax
+ \c_page_lines_last_column#max\relax
+ \c_page_lines_nesting #nesting\relax
+ \fullrestoreglobalbodyfont
+ \let\makelinenumber\page_lines_make_number % used at lua end
+ \setbox\b_page_lines_scratch\vbox
+ {\forgetall
+ \offinterlineskip
+ \clf_linenumbersstageone
+ \b_page_lines_number
+ \ifcase\c_page_lines_nesting false\else true\fi
+ \relax}%
+ \clf_linenumbersstagetwo
+ \b_page_lines_number
+ \b_page_lines_scratch
+ \fi
+ \egroup}
+
+\let\page_lines_make_number_indeed\relax
+
+% \def\page_lines_rlap{\ifconditional\c_page_lines_dir_left_to_right\expandafter\rlap\else\expandafter\llap\fi}
+% \def\page_lines_llap{\ifconditional\c_page_lines_dir_left_to_right\expandafter\llap\else\expandafter\rlap\fi}
+
+\def\page_lines_add_numbers_to_box#box#column#max#nesting%
+ {\bgroup
+ \b_page_lines_number #box\relax
+ \c_page_lines_column #column\relax
+ \c_page_lines_last_column#max\relax
+ \c_page_lines_nesting #nesting\relax
+ \fullrestoreglobalbodyfont
+ \let\makelinenumber\page_lines_make_number % used at lua end
+ \setbox\b_page_lines_scratch\vbox
+ {\forgetall
+ \offinterlineskip
+ \clf_linenumbersstageone
+ \b_page_lines_number
+ \ifcase\c_page_lines_nesting false\else true\fi
+ \relax}%
+ \clf_linenumbersstagetwo
+ \b_page_lines_number
+ \b_page_lines_scratch
+ \relax
+ \egroup}
+
+\def\page_lines_make_number#tag#mode#linenumber#shift#width#leftskip#dir% beware, one needs so compensate for this in the \hsize
+ {\naturalhbox to \zeropoint \bgroup
+ \ifcase#mode\relax
+ % \settrue \c_page_lines_fake_number
+ \else
+ % \setfalse\c_page_lines_fake_number
+ \edef\currentlinenumbering{#tag}%
+ \def\linenumber{#linenumber}% unsafe
+ \d_page_lines_line_width#width\scaledpoint\relax
+ \d_page_lines_distance\linenumberingparameter\c!distance\relax
+ \edef\p_align{\linenumberingparameter\c!align}%
+ \edef\p_location{\linenumberingparameter\c!location}%
+ \ifcase\istltdir#dir\relax
+ \settrue \c_page_lines_dir_left_to_right
+ \else
+ \setfalse\c_page_lines_dir_left_to_right
+ \fi
+ %
+ % maybe we also need an option to ignore columns, so that we renumber
+ % once but on the other hand this assumes aligned lines
+ %
+ \ifcase\c_page_lines_last_column\relax
+ \settrue \c_page_lines_fake_number % why
+ \or
+ % one column
+ \or
+ % two columns
+ \ifx\p_location\v!default % or just margin
+ \ifcase\c_page_lines_column\relax
+ \settrue \c_page_lines_fake_number % why
+ \or
+ % one
+ \let\p_location\v!left
+ \else
+ % two
+ \let\p_location\v!right
+ % can become a helper
+ \page_line_swap_align
+ \fi
+ \fi
+ \else
+ % too fuzzy
+ \fi
+ \ifx\p_location\v!default
+ \ifconditional\c_page_lines_dir_left_to_right
+ \let\p_location\v!left
+ \else
+ \let\p_location\v!right
+ \page_line_swap_align % yes or no
+ \fi
+ \fi
+ %
+ \executeifdefined{\??linenumberinghandler\p_location}\relax
+ \fi
+ \egroup}
+
+\def\page_lines_number_inject#align#width%
+ {\edef\p_width{\linenumberingparameter\c!width}%
+ \ifx\p_width\v!margin
+ \d_page_lines_width#width%
+ \else
+ \d_page_lines_width\p_width
+ \fi
+ \relax
+ \ifdim\d_page_lines_width>\zeropoint
+% \ifconditional\c_page_lines_dir_left_to_right\else
+% \let\simplealignedbox\simplereversealignedbox
+% \fi
+ \ifconditional\tracelinenumbering
+ \ruledhbox{\simplealignedbox\d_page_lines_width#align{\page_lines_number_inject_indeed}}%
+ \else
+ \simplealignedbox\d_page_lines_width#align{\page_lines_number_inject_indeed}%
+ \fi
+ \else
+ \ifconditional\tracelinenumbering
+ \ruledhbox
+ \else
+ % \hbox
+ \fi
+ {\page_lines_number_inject_indeed}%
+ \fi}
+
+\def\page_lines_number_inject_indeed
+ {\uselinenumberingstyleandcolor\c!style\c!color
+ \linenumberingparameter\c!command
+ {\linenumberingparameter\c!left
+ \convertnumber{\linenumberingparameter\c!conversion}\linenumber
+ \linenumberingparameter\c!right}}
+
+% \def\dodorlap{\hbox to \zeropoint{\box\nextbox\normalhss}\endgroup}
+% \def\dodollap{\hbox to \zeropoint{\normalhss\box\nextbox}\endgroup}
+
+\def\page_line_handle_left#align#width#distance%
+ {\llap
+ {\page_lines_number_inject#align#width%
+ \kern\dimexpr#distance+\d_page_lines_distance\relax
+ \the\everylinenumber
+ \hss}}
+
+\def\page_line_handle_right#align#width#distance%
+ {\rlap
+ {\kern\dimexpr#distance+\d_page_lines_distance+\d_page_lines_line_width\relax
+ \page_lines_number_inject#align#width%
+ \the\everylinenumber}}
+
+\setuvalue{\??linenumberinghandler\v!left}%
+ {\page_line_handle_left\p_align\leftmarginwidth\leftmargindistance}
+
+\setuvalue{\??linenumberinghandler\v!right}%
+ {\page_line_handle_right\p_align\rightmarginwidth\rightmargindistance}
+
+\setuvalue{\??linenumberinghandler\v!inner}%
+ {\ifodd\realpageno
+ \ifx\p_align\v!inner
+ \page_line_handle_left\v!flushleft\leftmarginwidth\leftmargindistance
+ \else\ifx\p_align\v!outer
+ \page_line_handle_left\v!flushright\leftmarginwidth\leftmargindistance
+ \else
+ \page_line_handle_left\p_align\leftmarginwidth\leftmargindistance
+ \fi\fi
+ \else
+ \ifx\p_align\v!inner
+ \page_line_handle_right\v!flushright\rightmarginwidth\rightmargindistance
+ \else\ifx\p_align\v!outer
+ \page_line_handle_right\v!flushleft\rightmarginwidth\rightmargindistance
+ \else
+ \page_line_handle_right\p_align\rightmarginwidth\rightmargindistance
+ \fi\fi
+ \fi}
+
+\setuvalue{\??linenumberinghandler\v!outer}%
+ {\ifodd\realpageno
+ \ifx\p_align\v!inner
+ \page_line_handle_right\v!flushleft\leftmarginwidth\leftmargindistance
+ \else\ifx\p_align\v!outer
+ \page_line_handle_right\v!flushright\leftmarginwidth\leftmargindistance
+ \else
+ \page_line_handle_right\p_align\leftmarginwidth\leftmargindistance
+ \fi\fi
+ \else
+ \ifx\p_align\v!inner
+ \page_line_handle_left\v!flushright\rightmarginwidth\rightmargindistance
+ \else\ifx\p_align\v!outer
+ \page_line_handle_left\v!flushleft\rightmarginwidth\rightmargindistance
+ \else
+ \page_line_handle_left\p_align\rightmarginwidth\rightmargindistance
+ \fi\fi
+ \fi}
+
+\def\page_line_handle_begin#align%
+ {\rlap
+ {\kern\d_page_lines_distance
+ \page_lines_number_inject#align\zeropoint
+ \the\everylinenumber}}
+
+\def\page_line_handle_end#align%
+ {\rlap
+ {\kern\d_page_lines_line_width\relax
+ \llap
+ {\page_lines_number_inject#align\zeropoint
+ \kern\d_page_lines_distance
+ \the\everylinenumber}}}
+
+\setuvalue{\??linenumberinghandler\v!begin}{\page_line_handle_begin\p_align}
+\setuvalue{\??linenumberinghandler\v!end }{\page_line_handle_end \p_align}
+\setuvalue{\??linenumberinghandler\v!text }{\page_line_handle_begin\p_align}
+
+\setuevalue{\??linenumberinghandler\v!inleft }{\getvalue{\??linenumberinghandler\v!left }}
+\setuevalue{\??linenumberinghandler\v!inmargin}{\getvalue{\??linenumberinghandler\v!left }}
+\setuevalue{\??linenumberinghandler\v!margin }{\getvalue{\??linenumberinghandler\v!left }}
+\setuevalue{\??linenumberinghandler\v!inright }{\getvalue{\??linenumberinghandler\v!right}}
+
+% referencing: \permithyphenation, also removes leading spaces (new per 29-11-2013)
+
+\unexpanded\def\someline [#1]{\page_lines_reference_start{#1}\page_lines_reference_stop{#1}} % was just a def
+\unexpanded\def\startline[#1]{\page_lines_reference_start{#1}\ignorespaces}
+\unexpanded\def\stopline [#1]{\removeunwantedspaces\permithyphenation\page_lines_reference_stop{#1}}
+
+\def\page_lines_reference_show_start
+ {\ifconditional\tracelinenumbering
+ \expandafter\page_lines_reference_show_start_indeed
+ \else
+ \expandafter\gobbleoneargument
+ \fi}
+
+\def\page_lines_reference_show_stop
+ {\ifconditional\tracelinenumbering
+ \expandafter\page_lines_reference_show_stop_indeed
+ \else
+ \expandafter\gobbleoneargument
+ \fi}
+
+\def\page_lines_reference_show_start_indeed#1%
+ {\setbox\scratchbox\hbox{\llap
+ {\vrule\s!width\onepoint\s!depth\strutdp\s!height.8\strutht\raise.85\strutht\hbox{\llap{\tt\txx#1}}}}%
+ \smashbox\scratchbox
+ \box\scratchbox}
+
+\def\page_lines_reference_show_stop_indeed#1%
+ {\setbox\scratchbox\hbox{\rlap
+ {\raise.85\strutht\hbox{\rlap{\tt\txx#1}}\vrule\s!width\onepoint\s!depth\strutdp\s!height.8\strutht}}%
+ \smashbox\scratchbox
+ \box\scratchbox}
+
+\def\page_lines_reference_start#1{\page_lines_some_reference{#1}{lr:b:#1}{\page_lines_reference_show_start{#1}}}
+\def\page_lines_reference_stop #1{\page_lines_some_reference{#1}{lr:e:#1}{\page_lines_reference_show_stop {#1}}}
+
+% eventually we will do this in lua
+
+\def\currentreferencelinenumber{\clf_filterreference{linenumber}}
+
+\let\m_page_lines_from\empty
+\let\m_page_lines_to \empty
+
+\unexpanded\def\doifelsesamelinereference#1#2#3%
+ {\doifelsereferencefound{lr:b:#1}
+ {\edef\m_page_lines_from{\currentreferencelinenumber}%
+ \doifelsereferencefound{lr:e:#1}
+ {\edef\m_page_lines_to{\currentreferencelinenumber}%
+ %[\m_page_lines_from,\m_page_lines_to]
+ \ifx\m_page_lines_from\m_page_lines_to#2\else#3\fi}
+ {#2}}
+ {#2}}
+
+\let\doifsamelinereferenceelse\doifelsesamelinereference
+
+\unexpanded\def\inline#1[#2]%
+ {\doifelsenothing{#1}
+ {\doifelsesamelinereference{#2}
+ {\in{\leftlabeltext\v!line}{\rightlabeltext\v!line}[lr:b:#2]}
+ {\in{\leftlabeltext\v!lines}{}[lr:b:#2]--\in{}{\rightlabeltext\v!lines}[lr:e:#2]}}
+ {\doifelsesamelinereference{#2}
+ {\in{#1}[lr:b:#2]}
+ {\in{#1}[lr:b:#2]--\in[lr:e:#2]}}}
+
+\unexpanded\def\inlinerange[#1]%
+ {\doifelsesamelinereference{#1}
+ {\in[lr:b:#1]}
+ {\in[lr:b:#1]\endash\in[lr:e:#1]}}
+
+\protect \endinput
diff --git a/tex/context/base/page-mak.mkvi b/tex/context/base/page-mak.mkvi
index 71af520a1..ee144f20a 100644
--- a/tex/context/base/page-mak.mkvi
+++ b/tex/context/base/page-mak.mkvi
@@ -45,7 +45,7 @@
\appendtoks
\setuevalue{\e!start\currentmakeup\e!makeup}{\startmakeup[\currentmakeup]}%
\setuevalue{\e!stop \currentmakeup\e!makeup}{\stopmakeup}%
- \doiflayoutdefinedelse\currentmakeup\donothing{\definelayout[\currentmakeup]}% new
+ \doifelselayoutdefined\currentmakeup\donothing{\definelayout[\currentmakeup]}% new
\to \everydefinemakeup
%D The \type{\start}||\type{\stop} macros are used for both
@@ -91,13 +91,59 @@
\def\page_makeup_start_yes[#name]% [#settings]%
{\doifelsecommandhandler\??makeup{#name}\page_makeup_start_indeed\page_makeup_start_nop[#name]}%
+% case 1:
+%
+% \setuplayout[height=5cm]
+%
+% case 2:
+%
+% \definelayout[crap][height=10cm]
+% \definelayout[standard][crap]
+%
+% case 3:
+%
+% \setuplayout[standard][height=15cm]
+%
+% case 4:
+%
+% \definelayout[whatever][height=2cm]
+% \setuplayout[whatever]
+
\def\page_makeup_start_indeed[#name][#settings]%
- {\doifelsenothing{\namedmakeupparameter{#name}\c!page}
- {\page}% new, so best not have dangling mess here like references (we could capture then and flush embedded)
- {\page[\namedmakeupparameter{#name}\c!page]}%
+ {% the next grouping hack is somewhat messy:
+ \begingroup
+ % we need to figure out the current layout
+ \xdef\m_page_makeup_name{#name}%
+ \let\currentmakeup\m_page_makeup_name
+ \let\currentlayout\m_page_makeup_name
+ \xdef\m_page_makeup_layout_parent{\layoutparameter\s!parent}%
+ \setupcurrentmakeup[#settings]%
+ \edef\p_page{\makeupparameter\c!page}%
+ \ifx\p_page\empty
+ \endgroup
+ \page % new, so best not have dangling mess here like references (we could capture then and flush embedded)
+ \else\ifx\p_page\v!no
+ % nothing
+ \endgroup
+ \else
+ \normalexpanded{\endgroup\page[\p_page]}%
+ \fi\fi
+ % some dirty trickery (sorry) for determining if we have
+ % - a layout definition at all
+ % - inherit from the parent of that definition
+ % - inherit from the current layout otherwise
+ \ifx\m_page_makeup_name\currentlayout
+ % we already use the layout
+ \else\ifx\m_page_makeup_layout_parent\??layout
+ % we inherit from the current layout
+ \normalexpanded{\setuplayout[#name][\s!parent=\??layout\currentlayout]}% is remembered but checked later anyway
+ % \else
+ % we have an inherited layout
+ \fi\fi
\startlayout[#name]% includes \page
\bgroup
- \edef\currentmakeup{#name}%
+ %\edef\currentmakeup{#name}%
+ \let\currentmakeup\m_page_makeup_name
\setupcurrentmakeup[#settings]%
\setsystemmode\v!makeup
\the\t_page_makeup_every_setup
@@ -137,7 +183,12 @@
\fi \fi
\strc_pagenumbers_page_state_pop % new
\egroup
- \stoplayout} % includes \page
+ \stoplayout % includes \page
+ \ifx\m_page_makeup_name\currentlayout
+ \else\ifx\m_page_makeup_layout_parent\??layout
+ \normalexpanded{\setuplayout[\m_page_makeup_name][\s!parent=\??layout]}% is remembered but checked later anyway
+ % \else
+ \fi\fi}
\setvalue{\??makeupdoublesided\v!yes}%
{\emptyhbox
@@ -184,6 +235,7 @@
\c!headerstate=\v!stop,
\c!footerstate=\v!stop,
\c!pagestate=\v!stop] % in manual ! ! !
+% \c!pagestate=\v!start]
\definemakeup
[\v!standard]
diff --git a/tex/context/base/page-mbk.mkvi b/tex/context/base/page-mbk.mkvi
index 9e3e57193..8038771d9 100644
--- a/tex/context/base/page-mbk.mkvi
+++ b/tex/context/base/page-mbk.mkvi
@@ -98,7 +98,7 @@
\unvbox\b_page_margin_blocks
\marginblockparameter\c!inbetween
\fi
- \setupalign[\marginblockparameter\c!align]%
+ \usealignparameter\marginblockparameter
\usemarginblockstyleandcolor\c!style\c!color
\begstrut
\ignorespaces}
diff --git a/tex/context/base/page-mix.lua b/tex/context/base/page-mix.lua
index 7d13d9e4e..61a4f944d 100644
--- a/tex/context/base/page-mix.lua
+++ b/tex/context/base/page-mix.lua
@@ -13,48 +13,81 @@ if not modules then modules = { } end modules ["page-mix"] = {
-- local trackers, logs, storage = trackers, logs, storage
-- local number, table = number, table
+local next, type = next, type
local concat = table.concat
-
-local nodecodes = nodes.nodecodes
-local gluecodes = nodes.gluecodes
-local nodepool = nodes.pool
-
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local kern_code = nodecodes.kern
-local glue_code = nodecodes.glue
-local penalty_code = nodecodes.penalty
-local insert_code = nodecodes.ins
-local mark_code = nodecodes.mark
-
-local new_hlist = nodepool.hlist
-local new_vlist = nodepool.vlist
-local new_glue = nodepool.glue
-
-local hpack = node.hpack
-local vpack = node.vpack
-local freenode = node.free
-local concatnodes = nodes.concat
-
-local texgetbox = tex.getbox
-local texsetbox = tex.setbox
-local texgetskip = tex.getskip
-
-local points = number.points
-
-local settings_to_hash = utilities.parsers.settings_to_hash
-
-local variables = interfaces.variables
-local v_yes = variables.yes
-local v_global = variables["global"]
-local v_local = variables["local"]
-local v_columns = variables.columns
+local ceil, floor = math.ceil, math.floor
local trace_state = false trackers.register("mixedcolumns.trace", function(v) trace_state = v end)
local trace_detail = false trackers.register("mixedcolumns.detail", function(v) trace_detail = v end)
local report_state = logs.reporter("mixed columns")
+local nodecodes = nodes.nodecodes
+local gluecodes = nodes.gluecodes
+
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local kern_code = nodecodes.kern
+local glue_code = nodecodes.glue
+local penalty_code = nodecodes.penalty
+local insert_code = nodecodes.ins
+local mark_code = nodecodes.mark
+local rule_code = nodecodes.rule
+
+local topskip_code = gluecodes.topskip
+local lineskip_code = gluecodes.lineskip
+local baselineskip_code = gluecodes.baselineskip
+local userskip_code = gluecodes.userskip
+
+local nuts = nodes.nuts
+local tonode = nuts.tonode
+local nodetostring = nuts.tostring
+local listtoutf = nodes.listtoutf
+
+local hpack = nuts.hpack
+local vpack = nuts.vpack
+local freenode = nuts.free
+local concatnodes = nuts.concat
+local slidenodes = nuts.slide -- ok here as we mess with prev links intermediately
+local traversenodes = nuts.traverse
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getsubtype = nuts.getsubtype
+local getbox = nuts.getbox
+local setbox = nuts.setbox
+local getskip = nuts.getskip
+local getattribute = nuts.getattribute
+
+local nodepool = nuts.pool
+
+local new_hlist = nodepool.hlist
+local new_vlist = nodepool.vlist
+local new_glue = nodepool.glue
+
+local points = number.points
+
+local settings_to_hash = utilities.parsers.settings_to_hash
+
+local variables = interfaces.variables
+local v_yes = variables.yes
+local v_global = variables["global"]
+local v_local = variables["local"]
+local v_columns = variables.columns
+local v_fixed = variables.fixed
+local v_auto = variables.auto
+local v_none = variables.none
+local v_more = variables.more
+local v_less = variables.less
+local v_halfline = variables.halfline
+
+local context = context
+local implement = interfaces.implement
+
pagebuilders = pagebuilders or { }
pagebuilders.mixedcolumns = pagebuilders.mixedcolumns or { }
local mixedcolumns = pagebuilders.mixedcolumns
@@ -77,13 +110,13 @@ local function collectinserts(result,nxt,nxtid)
local inserts, currentskips, nextskips, inserttotal = { }, 0, 0, 0
while nxt do
if nxtid == insert_code then
- inserttotal = inserttotal + nxt.height + nxt.depth
- local s = nxt.subtype
+ inserttotal = inserttotal + getfield(nxt,"height") + getfield(nxt,"depth")
+ local s = getsubtype(nxt)
local c = inserts[s]
if not c then
c = { }
inserts[s] = c
- local width = texgetskip(s).width
+ local width = getfield(getskip(s),"width")
if not result.inserts[s] then
currentskips = currentskips + width
end
@@ -100,9 +133,9 @@ local function collectinserts(result,nxt,nxtid)
else
break
end
- nxt = nxt.next
+ nxt = getnext(nxt)
if nxt then
- nxtid = nxt.id
+ nxtid = getid(nxt)
else
break
end
@@ -128,30 +161,30 @@ end
local function discardtopglue(current,discarded)
local size = 0
while current do
- local id = current.id
+ local id = getid(current)
if id == glue_code then
- size = size + current.spec.width
+ size = size + getfield(getfield(current,"spec"),"width")
discarded[#discarded+1] = current
- current = current.next
+ current = getnext(current)
elseif id == penalty_code then
- if current.penalty == forcedbreak then
+ if getfield(current,"penalty") == forcedbreak then
discarded[#discarded+1] = current
- current = current.next
- while current and current.id == glue_code do
- size = size + current.spec.width
+ current = getnext(current)
+ while current and getid(current) == glue_code do
+ size = size + getfield(getfield(current,"spec"),"width")
discarded[#discarded+1] = current
- current = current.next
+ current = getnext(current)
end
else
discarded[#discarded+1] = current
- current = current.next
+ current = getnext(current)
end
else
break
end
end
if current then
- current.prev = nil
+ setfield(current,"prev",nil) -- prevent look back
end
return current, size
end
@@ -162,13 +195,13 @@ local function stripbottomglue(results,discarded)
local r = results[i]
local t = r.tail
while t and t ~= r.head do
- local prev = t.prev
+ local prev = getprev(t)
if not prev then
break
end
- local id = t.id
+ local id = getid(t)
if id == penalty_code then
- if t.penalty == forcedbreak then
+ if getfield(t,"penalty") == forcedbreak then
break
else
discarded[#discarded+1] = t
@@ -177,7 +210,7 @@ local function stripbottomglue(results,discarded)
end
elseif id == glue_code then
discarded[#discarded+1] = t
- local width = t.spec.width
+ local width = getfield(getfield(t,"spec"),"width")
if trace_state then
report_state("columns %s, discarded bottom glue %p",i,width)
end
@@ -195,51 +228,52 @@ local function stripbottomglue(results,discarded)
return height
end
-local function setsplit(specification) -- a rather large function
+local function preparesplit(specification) -- a rather large function
local box = specification.box
if not box then
report_state("fatal error, no box")
return
end
- local list = texgetbox(box)
+ local list = getbox(box)
if not list then
report_state("fatal error, no list")
return
end
- local head = list.head or specification.originalhead
+ local head = getlist(list) or specification.originalhead
if not head then
report_state("fatal error, no head")
return
end
- local discarded = { }
- local originalhead = head
- local originalwidth = specification.originalwidth or list.width
- local originalheight = specification.originalheight or list.height
- local current = head
- local skipped = 0
- local height = 0
- local depth = 0
- local skip = 0
- local splitmethod = specification.splitmethod or false
+ slidenodes(head) -- we can have set prev's to nil to prevent backtracking
+ local discarded = { }
+ local originalhead = head
+ local originalwidth = specification.originalwidth or getfield(list,"width")
+ local originalheight = specification.originalheight or getfield(list,"height")
+ local current = head
+ local skipped = 0
+ local height = 0
+ local depth = 0
+ local skip = 0
+ local splitmethod = specification.splitmethod or false
if splitmethod == v_none then
splitmethod = false
end
- local options = settings_to_hash(specification.option or "")
+ local options = settings_to_hash(specification.option or "")
local stripbottom = specification.alternative == v_local
- local cycle = specification.cycle or 1
- local nofcolumns = specification.nofcolumns or 1
+ local cycle = specification.cycle or 1
+ local nofcolumns = specification.nofcolumns or 1
if nofcolumns == 0 then
nofcolumns = 1
end
local preheight = specification.preheight or 0
- local extra = specification.extra or 0
+ local extra = specification.extra or 0
local maxheight = specification.maxheight
- local optimal = originalheight/nofcolumns
+ local optimal = originalheight/nofcolumns
if specification.balance ~= v_yes then
optimal = maxheight
end
- local target = optimal + extra
- local overflow = target > maxheight - preheight
+ local target = optimal + extra
+ local overflow = target > maxheight - preheight
local threshold = specification.threshold or 0
if overflow then
target = maxheight - preheight
@@ -267,33 +301,73 @@ local function setsplit(specification) -- a rather large function
local rest = nil
local lastlocked = nil
local lastcurrent = nil
+ local lastcontent = nil
local backtracked = false
if trace_state then
report_state("setting collector to column %s",column)
end
+ local function unlock(penalty)
+ if lastlocked then
+ if trace_state then
+ report_state("penalty %s, unlocking in column %s",penalty or "-",column)
+ end
+ lastlocked = nil
+ end
+ lastcurrent = nil
+ lastcontent = nil
+ end
+
+ local function lock(penalty,current)
+ if trace_state then
+ report_state("penalty %s, locking in column %s",penalty,column)
+ end
+ lastlocked = penalty
+ lastcurrent = current or lastcurrent
+ lastcontent = nil
+ end
+
local function backtrack(start)
local current = start
-- first skip over glue and penalty
while current do
- local id = current.id
- if id == glue_code or id == penalty_code then
- current = current.prev
+ local id = getid(current)
+ if id == glue_code then
+ if trace_state then
+ report_state("backtracking over %s in column %s","glue",column)
+ end
+ current = getprev(current)
+ elseif id == penalty_code then
+ if trace_state then
+ report_state("backtracking over %s in column %s","penalty",column)
+ end
+ current = getprev(current)
else
break
end
end
-- then skip over content
while current do
- local id = current.id
- if id == glue_code or id == penalty_code then
+ local id = getid(current)
+ if id == glue_code then
+ if trace_state then
+ report_state("quitting at %s in column %s","glue",column)
+ end
+ break
+ elseif id == penalty_code then
+ if trace_state then
+ report_state("quitting at %s in column %s","penalty",column)
+ end
break
else
- current = current.prev
+ current = getprev(current)
end
end
if not current then
+ if trace_state then
+ report_state("no effective backtracking in column %s",column)
+ end
current = start
end
return current
@@ -310,7 +384,12 @@ local function setsplit(specification) -- a rather large function
backtracked = true
end
lastcurrent = nil
- lastlocked = nil
+ if lastlocked then
+ if trace_state then
+ report_state("unlocking in column %s",column)
+ end
+ lastlocked = nil
+ end
end
if head == lasthead then
if trace_state then
@@ -324,7 +403,7 @@ local function setsplit(specification) -- a rather large function
if current == head then
result.tail = head
else
- result.tail = current.prev
+ result.tail = getprev(current)
end
result.height = height
result.depth = depth
@@ -344,6 +423,9 @@ local function setsplit(specification) -- a rather large function
report_state("setting collector to column %s",column)
end
current, skipped = discardtopglue(current,discarded)
+ if trace_detail and skipped ~= 0 then
+ report_state("check > column 1, discarded %p",skipped)
+ end
head = current
return true, skipped
end
@@ -352,6 +434,7 @@ local function setsplit(specification) -- a rather large function
local function checked(advance,where,locked)
local total = skip + height + depth + advance
local delta = total - target
+-- - 65536*3
local state = "same"
local okay = false
local skipped = 0
@@ -366,7 +449,7 @@ local function setsplit(specification) -- a rather large function
end
end
if trace_detail then
- report_state("%-7s > column %s, delta %p, threshold %p, advance %p, total %p, target %p, discarded %p => %a (height %p, depth %p, skip %p)",
+ report_state("%-7s > column %s, delta %p, threshold %p, advance %p, total %p, target %p => %a (height %p, depth %p, skip %p)",
where,curcol,delta,threshold,advance,total,target,state,skipped,height,depth,skip)
end
return state, skipped
@@ -387,7 +470,7 @@ local function setsplit(specification) -- a rather large function
head = current
local function process_skip(current,nxt)
- local advance = current.spec.width
+ local advance = getfield(getfield(current,"spec"),"width")
if advance ~= 0 then
local state, skipped = checked(advance,"glue")
if trace_state then
@@ -401,17 +484,28 @@ local function setsplit(specification) -- a rather large function
end
height = height + depth + skip
depth = 0
+if advance < 0 then
+ height = height + advance
+ skip = 0
+ if height < 0 then
+ height = 0
+ end
+else
skip = height > 0 and advance or 0
+end
if trace_state then
report_state("%-7s > column %s, height %p, depth %p, skip %p","glue",column,height,depth,skip)
end
else
-- what else? ignore? treat as valid as usual?
end
+ if lastcontent then
+ unlock()
+ end
end
local function process_kern(current,nxt)
- local advance = current.kern
+ local advance = getfield(current,"kern")
if advance ~= 0 then
local state, skipped = checked(advance,"kern")
if trace_state then
@@ -434,25 +528,28 @@ local function setsplit(specification) -- a rather large function
local function process_rule(current,nxt)
-- simple variant of h|vlist
- local advance = current.height -- + current.depth
- local state, skipped = checked(advance+currentskips,"rule")
- if trace_state then
- report_state("%-7s > column %s, state %a, rule, advance %p, height %p","line",column,state,advance,inserttotal,height)
- if skipped ~= 0 then
- report_state("%-7s > column %s, discarded %p","rule",column,skipped)
+ local advance = getfield(current,"height") -- + getfield(current,"depth")
+ if advance ~= 0 then
+ local state, skipped = checked(advance,"rule")
+ if trace_state then
+ report_state("%-7s > column %s, state %a, rule, advance %p, height %p","rule",column,state,advance,inserttotal,height)
+ if skipped ~= 0 then
+ report_state("%-7s > column %s, discarded %p","rule",column,skipped)
+ end
end
+ if state == "quit" then
+ return true
+ end
+ height = height + depth + skip + advance
+ -- if state == "next" then
+ -- height = height + nextskips
+ -- else
+ -- height = height + currentskips
+ -- end
+ depth = getfield(current,"depth")
+ skip = 0
end
- if state == "quit" then
- return true
- end
- height = height + depth + skip + advance
- if state == "next" then
- height = height + nextskips
- else
- height = height + currentskips
- end
- depth = current.depth
- skip = 0
+ lastcontent = current
end
-- okay, here we could do some badness like magic but we want something
@@ -462,12 +559,11 @@ local function setsplit(specification) -- a rather large function
-- [chapter] [penalty] [section] [penalty] [first line]
local function process_penalty(current,nxt)
- local penalty = current.penalty
+ local penalty = getfield(current,"penalty")
if penalty == 0 then
- lastlocked = nil
- lastcurrent = nil
+ unlock(penalty)
elseif penalty == forcedbreak then
- local needed = current[a_checkedbreak]
+ local needed = getattribute(current,a_checkedbreak)
local proceed = not needed or needed == 0
if not proceed then
local available = target - height
@@ -477,8 +573,7 @@ local function setsplit(specification) -- a rather large function
end
end
if proceed then
- lastlocked = nil
- lastcurrent = nil
+ unlock(penalty)
local okay, skipped = gotonext()
if okay then
if trace_state then
@@ -499,28 +594,26 @@ local function setsplit(specification) -- a rather large function
end
elseif penalty < 0 then
-- we don't care too much
- lastlocked = nil
- lastcurrent = nil
+ unlock(penalty)
elseif penalty >= 10000 then
if not lastcurrent then
- lastcurrent = current
- lastlocked = penalty
+ lock(penalty,current)
elseif penalty > lastlocked then
- lastlocked = penalty
+ lock(penalty)
end
else
- lastlocked = nil
- lastcurrent = nil
+ unlock(penalty)
end
end
local function process_list(current,nxt)
- local nxtid = nxt and nxt.id
+ local nxtid = nxt and getid(nxt)
line = line + 1
local inserts, currentskips, nextskips, inserttotal = nil, 0, 0, 0
- local advance = current.height -- + current.depth
+ local advance = getfield(current,"height")
+-- + getfield(current,"depth") -- when > strutdp
if trace_state then
- report_state("%-7s > column %s, content: %s","line",column,listtoutf(current.list,true,true))
+ report_state("%-7s > column %s, content: %s","line",column,listtoutf(getlist(current),true,true))
end
if nxt and (nxtid == insert_code or nxtid == mark_code) then
nxt, inserts, localskips, insertskips, inserttotal = collectinserts(result,nxt,nxtid)
@@ -541,7 +634,7 @@ local function setsplit(specification) -- a rather large function
else
height = height + currentskips
end
- depth = current.depth
+ depth = getfield(current,"depth")
skip = 0
if inserts then
-- so we already collect them ... makes backtracking tricky ... alternatively
@@ -551,12 +644,15 @@ local function setsplit(specification) -- a rather large function
if trace_state then
report_state("%-7s > column %s, height %p, depth %p, skip %p","line",column,height,depth,skip)
end
+ lastcontent = current
end
+local kept = head
+
while current do
- local id = current.id
- local nxt = current.next
+ local id = getid(current)
+ local nxt = getnext(current)
backtracked = false
@@ -602,14 +698,16 @@ local function setsplit(specification) -- a rather large function
if not current then
if trace_state then
- report_state("nilling rest")
+ report_state("nothing left")
end
- rest = nil
- elseif rest == lasthead then
+ -- needs well defined case
+ -- rest = nil
+ elseif rest == lasthead then
if trace_state then
- report_state("nilling rest as rest is lasthead")
+ report_state("rest equals lasthead")
end
- rest = nil
+ -- test case: x\index{AB} \index{AA}x \blank \placeindex
+ -- makes line disappear: rest = nil
end
if stripbottom then
@@ -629,24 +727,26 @@ local function setsplit(specification) -- a rather large function
specification.overflow = overflow
specification.discarded = discarded
- texgetbox(specification.box).list = nil
+ setfield(getbox(specification.box),"list",nil)
return specification
end
-function mixedcolumns.finalize(result)
+local function finalize(result)
if result then
- local results = result.results
- for i=1,result.nofcolumns do
+ local results = result.results
+ local columns = result.nofcolumns
+ local maxtotal = 0
+ for i=1,columns do
local r = results[i]
local h = r.head
if h then
- h.prev = nil
+ setfield(h,"prev",nil)
local t = r.tail
if t then
- t.next = nil
+ setfield(t,"next",nil)
else
- h.next = nil
+ setfield(h,"next",nil)
r.tail = h
end
for c, list in next, r.inserts do
@@ -655,16 +755,26 @@ function mixedcolumns.finalize(result)
local l = list[i]
local h = new_hlist()
t[i] = h
- h.head = l.head
- h.height = l.height
- h.depth = l.depth
- l.head = nil
+ setfield(h,"list",getfield(l,"head"))
+ setfield(h,"height",getfield(l,"height"))
+ setfield(h,"depth",getfield(l,"depth"))
+ setfield(l,"head",nil)
end
- t[1].prev = nil -- needs checking
- t[#t].next = nil -- needs checking
+ setfield(t[1],"prev",nil) -- needs checking
+ setfield(t[#t],"next",nil) -- needs checking
r.inserts[c] = t
end
end
+ local total = r.height + r.depth
+ if total > maxtotal then
+ maxtotal = total
+ end
+ r.total = total
+ end
+ result.maxtotal = maxtotal
+ for i=1,columns do
+ local r = results[i]
+ r.extra = maxtotal - r.total
end
end
end
@@ -679,12 +789,12 @@ local function report_deltas(result,str)
report_state("%s, cycles %s, deltas % | t",str,result.cycle or 1,t)
end
-function mixedcolumns.setsplit(specification)
+local function setsplit(specification)
splitruns = splitruns + 1
if trace_state then
report_state("split run %s",splitruns)
end
- local result = setsplit(specification)
+ local result = preparesplit(specification)
if result then
if result.overflow then
if trace_state then
@@ -697,7 +807,7 @@ function mixedcolumns.setsplit(specification)
local cycles = specification.cycles or 100
while result.rest and cycle <= cycles do
specification.extra = cycle * step
- result = setsplit(specification) or result
+ result = preparesplit(specification) or result
if trace_state then
report_state("cycle: %s.%s, original height %p, total height %p",
splitruns,cycle,result.originalheight,result.nofcolumns*result.targetheight)
@@ -719,7 +829,7 @@ function mixedcolumns.setsplit(specification)
end
end
-function mixedcolumns.getsplit(result,n)
+local function getsplit(result,n)
if not result then
report_state("flush, column %s, no result",n)
return
@@ -733,17 +843,18 @@ function mixedcolumns.getsplit(result,n)
return new_glue(result.originalwidth)
end
- h.prev = nil -- move up
+ setfield(h,"prev",nil) -- move up
local strutht = result.strutht
local strutdp = result.strutdp
local lineheight = strutht + strutdp
+ local isglobal = result.alternative == v_global
local v = new_vlist()
- v.head = h
+ setfield(v,"list",h)
-- local v = vpack(h,"exactly",height)
- if result.alternative == v_global then -- option
+ if isglobal then -- option
result.height = result.maxheight
end
@@ -751,24 +862,69 @@ function mixedcolumns.getsplit(result,n)
local dp = 0
local wd = result.originalwidth
- local grid = result.grid
+ local grid = result.grid
+ local internalgrid = result.internalgrid
+ local httolerance = .25
+ local dptolerance = .50
+ local lineheight = internalgrid == v_halfline and lineheight/2 or lineheight
+
+ local function amount(r,s,t)
+ local l = ceil((r-t)/lineheight)
+ local a = lineheight * l
+ if a > s then
+ return a - s
+ else
+ return s
+ end
+ end
if grid then
- ht = lineheight * math.ceil(result.height/lineheight) - strutdp
- dp = strutdp
+ -- print(n,result.maxtotal,r.total,r.extra)
+ if isglobal then
+ local rh = r.height
+ -- ht = (lineheight * ceil(result.height/lineheight) - strutdp
+ ht = amount(rh,strutdp,0)
+ dp = strutdp
+ else
+ -- natural dimensions
+ local rh = r.height
+ local rd = r.depth
+ if rh > ht then
+ ht = amount(rh,strutdp,httolerance*strutht)
+ end
+ if rd > dp then
+ dp = amount(rd,strutht,dptolerance*strutdp)
+ end
+ -- forced dimensions
+ local rh = result.height or 0
+ local rd = result.depth or 0
+ if rh > ht then
+ ht = amount(rh,strutdp,httolerance*strutht)
+ end
+ if rd > dp then
+ dp = amount(rd,strutht,dptolerance*strutdp)
+ end
+ -- always one line at least
+ if ht < strutht then
+ ht = strutht
+ end
+ if dp < strutdp then
+ dp = strutdp
+ end
+ end
else
ht = result.height
dp = result.depth
end
- v.width = wd
- v.height = ht
- v.depth = dp
+ setfield(v,"width",wd)
+ setfield(v,"height",ht)
+ setfield(v,"depth",dp)
if trace_state then
- local id = h.id
+ local id = getid(h)
if id == hlist_code then
- report_state("flush, column %s, grid %a, width %p, height %p, depth %p, %s: %s",n,grid,wd,ht,dp,"top line",nodes.toutf(h.list))
+ report_state("flush, column %s, grid %a, width %p, height %p, depth %p, %s: %s",n,grid,wd,ht,dp,"top line",listtoutf(getlist(h)))
else
report_state("flush, column %s, grid %a, width %p, height %p, depth %p, %s: %s",n,grid,wd,ht,dp,"head node",nodecodes[id])
end
@@ -777,27 +933,27 @@ function mixedcolumns.getsplit(result,n)
for c, list in next, r.inserts do
local l = concatnodes(list)
local b = vpack(l) -- multiple arguments, todo: fastvpack
- -- texsetbox("global",c,b)
- texsetbox(c,b)
+ -- setbox("global",c,b)
+ setbox(c,b)
r.inserts[c] = nil
end
return v
end
-function mixedcolumns.getrest(result)
+local function getrest(result)
local rest = result and result.rest
result.rest = nil -- to be sure
return rest
end
-function mixedcolumns.getlist(result)
+local function getlist(result)
local originalhead = result and result.originalhead
result.originalhead = nil -- to be sure
return originalhead
end
-function mixedcolumns.cleanup(result)
+local function cleanup(result)
local discarded = result.discarded
for i=1,#discarded do
freenode(discarded[i])
@@ -805,52 +961,100 @@ function mixedcolumns.cleanup(result)
result.discarded = { }
end
+mixedcolumns.setsplit = setsplit
+mixedcolumns.getsplit = getsplit
+mixedcolumns.finalize = finalize
+mixedcolumns.getrest = getrest
+mixedcolumns.getlist = getlist
+mixedcolumns.cleanup = cleanup
+
-- interface --
local result
-function commands.mixsetsplit(specification)
- if result then
- for k, v in next, specification do
- result[k] = v
+implement {
+ name = "mixsetsplit",
+ actions = function(specification)
+ if result then
+ for k, v in next, specification do
+ result[k] = v
+ end
+ result = setsplit(result)
+ else
+ result = setsplit(specification)
end
- result = mixedcolumns.setsplit(result)
- else
- result = mixedcolumns.setsplit(specification)
- end
-end
+ end,
+ arguments = {
+ {
+ { "box", "integer" },
+ { "nofcolumns", "integer" },
+ { "maxheight", "dimen" },
+ { "step", "dimen" },
+ { "cycles", "integer" },
+ { "preheight", "dimen" },
+ { "prebox", "integer" },
+ { "strutht", "dimen" },
+ { "strutdp", "dimen" },
+ { "threshold", "dimen" },
+ { "splitmethod" },
+ { "balance" },
+ { "alternative" },
+ { "internalgrid" },
+ { "grid", "boolean" },
+ }
+ }
+}
-function commands.mixgetsplit(n)
- if result then
- context(mixedcolumns.getsplit(result,n))
- end
-end
+implement {
+ name = "mixgetsplit",
+ arguments = "integer",
+ actions = function(n)
+ if result then
+ context(tonode(getsplit(result,n)))
+ end
+ end,
+}
-function commands.mixfinalize()
- if result then
- mixedcolumns.finalize(result)
+implement {
+ name = "mixfinalize",
+ actions = function()
+ if result then
+ finalize(result)
+ end
end
-end
+}
-function commands.mixflushrest()
- if result then
- context(mixedcolumns.getrest(result))
+implement {
+ name = "mixflushrest",
+ actions = function()
+ if result then
+ context(tonode(getrest(result)))
+ end
end
-end
+}
-function commands.mixflushlist()
- if result then
- context(mixedcolumns.getlist(result))
+implement {
+ name = "mixflushlist",
+ actions = function()
+ if result then
+ context(tonode(getlist(result)))
+ end
end
-end
+}
-function commands.mixstate()
- context(result and result.rest and 1 or 0)
-end
+implement {
+ name = "mixstate",
+ actions = function()
+ context(result and result.rest and 1 or 0)
+ end
+}
-function commands.mixcleanup()
- if result then
- mixedcolumns.cleanup(result)
- result = nil
+implement {
+ name = "mixcleanup",
+ actions = function()
+ if result then
+ cleanup(result)
+ result = nil
+ end
end
-end
+}
diff --git a/tex/context/base/page-mix.mkiv b/tex/context/base/page-mix.mkiv
index 5d1c54a71..6d7f144a6 100644
--- a/tex/context/base/page-mix.mkiv
+++ b/tex/context/base/page-mix.mkiv
@@ -29,6 +29,7 @@
% wide floats
% move floats
% offsets (inner ones, so we change the hsize ... needed with backgrounds
+% when no content we currently loose the page
% luatex buglet:
%
@@ -68,6 +69,7 @@
\c!maxheight=\textheight,
\c!maxwidth=\makeupwidth,
\c!grid=\v!tolerant,
+ \c!internalgrid=\v!line,
\c!step=.25\lineheight, % needs some experimenting
%\c!splitmethod=\v!fixed, % will be default
\c!method=\ifinner\s!box\else\s!otr\fi] % automatic as suggested by WS
@@ -75,7 +77,7 @@
\let\startmixedcolumns\relax % defined later
\let\stopmixedcolumns \relax % defined later
-\appendtoks
+\appendtoks % could become an option
\setuevalue{\e!start\currentmixedcolumns}{\startmixedcolumns[\currentmixedcolumns]}%
\setuevalue{\e!stop \currentmixedcolumns}{\stopmixedcolumns}%
\to \everydefinemixedcolumns
@@ -166,8 +168,18 @@
[\c!n=\itemgroupparameter\c!n,
\c!separator=\v!none,
\c!splitmethod=\v!none,
+ \c!grid=\v!tolerant,
+ \c!internalgrid=\v!halfline, % new, we may still revert to \v!line
\c!balance=\v!yes]
+% better
+
+\setupmixedcolumns
+ [\s!itemgroupcolumns]
+ [\c!splitmethod=\v!fixed,
+ \c!grid=\v!yes,
+ \c!internalgrid=\v!line]
+
\unexpanded\def\strc_itemgroups_start_columns
{\startmixedcolumns[\s!itemgroupcolumns]} % we could have a fast one
@@ -195,11 +207,6 @@
%D The interceptor is quite simple, at least for the moment.
-% \def\page_mix_routine_intercept
-% {\global\setbox\b_page_mix_preceding\vbox
-% {\page_otr_command_flush_top_insertions
-% \unvbox\normalpagebox}}
-
\def\page_mix_routine_intercept
{\ifdim\pagetotal>\pagegoal
% testcase: preceding-001 ... if we don't do this, text can disappear as
@@ -208,7 +215,7 @@
\fi
\global\setbox\b_page_mix_preceding\vbox
{\page_otr_command_flush_top_insertions
- \ifdim\ht\b_page_mix_preceding=\zeropoint \else
+ \ifdim\htdp\b_page_mix_preceding=\zeropoint \else
\writestatus\m!columns{preceding error}%
\unvbox\b_page_mix_preceding
\fi
@@ -288,18 +295,32 @@
\installcorenamespace{mixedcolumnsseparator}
-\setvalue{\??mixedcolumnsseparator\v!rule}%
- {\starttextproperties
- \usemixedcolumnscolorparameter\c!rulecolor
- \vrule\s!width\mixedcolumnsparameter\c!rulethickness
- \stoptextproperties}
+\unexpanded\def\installmixedcolumnseparator#1#2%
+ {\setvalue{\??mixedcolumnsseparator#1}{#2}}
+
+\installmixedcolumnseparator\v!rule
+ {\vrule
+ \s!width \mixedcolumnsparameter\c!rulethickness
+ \s!height\mixedcolumnseparatorheight
+ \s!depth \mixedcolumnseparatordepth
+ \relax}
\unexpanded\def\page_mix_command_inject_separator
- {\bgroup
+ {\begingroup
+ \setbox\scratchbox\hbox to \zeropoint \bgroup
+ \hss
+ \starttextproperties
+ \usemixedcolumnscolorparameter\c!rulecolor
+ \csname\??mixedcolumnsseparator\p_separator\endcsname % was \c!rule
+ \stoptextproperties
+ \hss
+ \egroup
+ \ht\scratchbox\zeropoint
+ \dp\scratchbox\zeropoint
\hss
- \csname\??mixedcolumnsseparator\mixedcolumnsparameter\c!separator\endcsname % was \c!rule
+ \box\scratchbox
\hss
- \egroup}
+ \endgroup}
%D We've now arrived at the real code. The start command mostly sets up the
%D environment and variables that are used in the splitter. One of the last
@@ -347,13 +368,13 @@
\csname\??mixedcolumnsstart\currentmixedcolumnsmethod\endcsname}
\def\page_mix_start_columns_b[#1][#2]%
- {\doifassignmentelse{#1}%
+ {\doifelseassignment{#1}%
{\let\currentmixedcolumns\empty
\page_mix_error_b}
{\edef\currentmixedcolumns{#1}%
\firstargumentfalse}%
\edef\currentmixedcolumnsmethod{\mixedcolumnsparameter\c!method}%
- \mixedcolumnsparameter\c!before\relax % so, it doesn't list to local settings !
+ \mixedcolumnsparameter\c!before\relax % so, it doesn't listen to local settings !
\csname\??mixedcolumnsbefore\currentmixedcolumnsmethod\endcsname\relax
\begingroup
\iffirstargument
@@ -425,8 +446,7 @@
[\s!itemgroupcolumns]
[\c!grid=\itemgroupparameter\c!grid]
-\setupitemgroups
- [\c!grid=\v!tolerant]
+% better
%D The common initialization:
@@ -451,6 +471,10 @@
%
\usemixedcolumnscolorparameter\c!color
%
+ \insidecolumnstrue % new
+ %
+ \useprofileparameter\mixedcolumnsparameter % new
+ %
\nofcolumns\c_page_mix_n_of_columns} % public
%D The otr method related hooks are defined next:
@@ -464,23 +488,44 @@
\newcount\c_page_mix_otr_nesting
+% \setvalue{\??mixedcolumnsbefore\s!otr}%
+% {\par
+% \global\advance\c_page_mix_otr_nesting\plusone
+% \ifcase\c_page_mix_otr_nesting\or
+% \ifdim\pagetotal=\zeropoint \else
+% \obeydepth % we could handle this in pre material
+% \fi
+% \fi}
+
\setvalue{\??mixedcolumnsbefore\s!otr}%
{\par
\global\advance\c_page_mix_otr_nesting\plusone
\ifcase\c_page_mix_otr_nesting\or
\ifdim\pagetotal=\zeropoint \else
- \obeydepth % we could handle this in pre material
+ % make sure that whitespace an dblanks are done
+ \strut
+ \vskip-\lineheight
+ % no, bad spacing: \obeydepth % we could handle this in pre material
\fi
\fi}
\setvalue{\??mixedcolumnsstart\s!otr}%
{\ifcase\c_page_mix_otr_nesting\or
+ \scratchwidth\textwidth
\setupoutputroutine[\s!mixedcolumn]%
\c_page_mix_routine\c_page_mix_routine_intercept
\page_otr_trigger_output_routine
%
\holdinginserts\maxdimen
%
+ \ifvoid\b_page_mix_preceding \else
+ % moved here, before the packaging
+ \page_postprocessors_linenumbers_deepbox\b_page_mix_preceding
+ % we need to avoid unvboxing with successive balanced on one page
+ \global\setbox\b_page_mix_preceding\vbox{\box\b_page_mix_preceding}%
+ \wd\b_page_mix_preceding\scratchwidth % \makeupwidth
+ \page_grids_add_to_one\b_page_mix_preceding
+ \fi
\global\d_page_mix_preceding_height\ht\b_page_mix_preceding
\c_page_mix_routine\c_page_mix_routine_continue
%
@@ -500,8 +545,14 @@
\setvalue{\??mixedcolumnsstop\s!otr}%
{\par
\ifcase\c_page_mix_otr_nesting\or
- \doif{\mixedcolumnsparameter\c!balance}\v!yes{\c_page_mix_routine\c_page_mix_routine_balance}%
+ \doifelse{\mixedcolumnsparameter\c!balance}\v!yes
+ {\c_page_mix_routine\c_page_mix_routine_balance}%
+ {\penalty-\plustenthousand}% weird hack, we need to trigger the otr sometimes (new per 20140306, see balancing-001.tex)
\page_otr_trigger_output_routine
+ \ifvoid\b_page_mix_preceding \else
+ % empty columns so we need to make sure pending content is flushed
+ \unvbox\b_page_mix_preceding % new per 2014.10.25
+ \fi
\fi}
\setvalue{\??mixedcolumnsafter\s!otr}%
@@ -517,54 +568,76 @@
%D footnotes. Eventually we will have multiple strategies available.
\unexpanded\def\page_mix_routine_construct#1%
- {\ctxcommand{mixsetsplit {
- box = \number\b_page_mix_collected,
- nofcolumns = \number\c_page_mix_n_of_columns,
- maxheight = \number\d_page_mix_max_height,
- step = \number\d_page_mix_balance_step,
- cycles = \number\c_page_mix_balance_cycles,
- preheight = \number\d_page_mix_preceding_height,
- prebox = \number\b_page_mix_preceding,
- strutht = \number\strutht,
- strutdp = \number\strutdp,
- threshold = \number\d_page_mix_threshold,
- splitmethod = "\mixedcolumnsparameter\c!splitmethod",
- balance = "#1",
- alternative = "\mixedcolumnsparameter\c!alternative",
- grid = \ifgridsnapping true\else false\fi,
- }}%
+ {\d_page_mix_max_height\mixedcolumnsparameter\c!maxheight % can have changed due to header=high
+ \clf_mixsetsplit
+ box \b_page_mix_collected
+ nofcolumns \c_page_mix_n_of_columns
+ maxheight \d_page_mix_max_height
+ step \d_page_mix_balance_step
+ cycles \c_page_mix_balance_cycles
+ preheight \d_page_mix_preceding_height
+ prebox \b_page_mix_preceding
+ strutht \strutht
+ strutdp \strutdp
+ threshold \d_page_mix_threshold
+ splitmethod {\mixedcolumnsparameter\c!splitmethod}%
+ balance {#1}%
+ alternative {\mixedcolumnsparameter\c!alternative}%
+ internalgrid {\mixedcolumnsparameter\c!internalgrid}%
+ grid \ifgridsnapping true\else false\fi
+ \relax
\deadcycles\zerocount}
+\newdimen\mixedcolumnseparatorheight
+\newdimen\mixedcolumnseparatordepth
+\newdimen\mixedcolumnseparatorwidth
+
+\def\page_mix_routine_package_step
+ {% needs packaging anyway
+ \setbox\scratchbox\page_mix_command_package_column
+ \page_marks_synchronize_column\plusone\c_page_mix_n_of_columns\recurselevel\scratchbox
+ % backgrounds
+ \anch_mark_column_box\scratchbox
+ % for the moment a quick and dirty patch .. we need to go into the box (hence the \plusone) .. a slowdowner
+ % moved to start: \page_lines_add_numbers_to_box\scratchbox\recurselevel\c_page_mix_n_of_columns\plusone
+ % the framed needs a reset of strut, align, setups etc
+ \mixedcolumnseparatorheight\ht\scratchbox
+ \mixedcolumnseparatordepth \dp\scratchbox
+ \inheritedmixedcolumnsframedbox\currentmixedcolumns\scratchbox
+ % optional
+ \ifnum\recurselevel<\c_page_mix_n_of_columns
+ \ifcsname\??mixedcolumnsseparator\p_separator\endcsname
+ \page_mix_command_inject_separator
+ \else
+ \hss
+ \fi
+ \fi}
+
\unexpanded\def\page_mix_routine_package
- {\ctxcommand{mixfinalize()}%
+ {\clf_mixfinalize
\setbox\b_page_mix_collected\vbox \bgroup
\ifvoid\b_page_mix_preceding \else
- \box\b_page_mix_preceding
+ \page_postprocessors_linenumbers_deepbox\b_page_mix_preceding
+ \vbox\bgroup
+ \box\b_page_mix_preceding
+ \egroup
\global\d_page_mix_preceding_height\zeropoint
\nointerlineskip
+ % no no:
+ % \prevdepth\strutdepth
\fi
\hskip\d_page_mix_leftskip
\page_mix_hbox to \d_page_mix_max_width \bgroup
- \dorecurse\c_page_mix_n_of_columns{%
- % needs packaging anyway
- \setbox\scratchbox\page_mix_command_package_column
- \page_marks_synchronize_column\plusone\c_page_mix_n_of_columns\recurselevel\scratchbox
- % for the moment a quick and dirty patch .. we need to go into the box (hence the \plusone) .. a slowdowner
- \page_lines_add_numbers_to_box\scratchbox\recurselevel\c_page_mix_n_of_columns\plusone
- % the framed needs a reset of strut, align, setups etc
- \inheritedmixedcolumnsframedbox\currentmixedcolumns\scratchbox
- % optional
- \ifnum\recurselevel<\c_page_mix_n_of_columns
- \page_mix_command_inject_separator
- \fi
- }%
+ \edef\p_separator{\mixedcolumnsparameter\c!separator}%
+ \mixedcolumnseparatorwidth\d_page_mix_distance % \mixedcolumnsparameter\c!rulethickness\relax
+ \dorecurse\c_page_mix_n_of_columns\page_mix_routine_package_step
\egroup
\egroup}
\unexpanded\def\page_mix_command_package_column
{\page_mix_hbox to \d_page_mix_column_width \bgroup
% maybe intercept empty
- \ctxcommand{mixgetsplit(\recurselevel)}%
+ \clf_mixgetsplit\recurselevel\relax
\hskip-\d_page_mix_column_width
\page_mix_hbox to \d_page_mix_column_width \bgroup
\placenoteinserts
@@ -580,8 +653,8 @@
\page_mix_routine_construct\v!no
\page_mix_routine_package
\page_otr_construct_and_shipout\box\b_page_mix_collected
- \ctxcommand{mixflushrest()}%
- \ctxcommand{mixcleanup()}%
+ \clf_mixflushrest
+ \clf_mixcleanup
\egroup}
\unexpanded\def\page_mix_routine_balance
@@ -592,9 +665,9 @@
\doloop
{%writestatus\m!columns{construct continue (\the\htdp\b_page_mix_collected)}%
\page_mix_routine_construct\v!no
- \ifcase\ctxcommand{mixstate()}\relax
+ \ifcase\clf_mixstate\relax
% 0 = okay, we can balance
- \setbox\b_page_mix_collected\vbox{\ctxcommand{mixflushlist()}}% we could avoid this
+ \setbox\b_page_mix_collected\vbox{\clf_mixflushlist}% we could avoid this
%writestatus\m!columns{construct balance}%
\page_mix_routine_construct\v!yes
\page_mix_routine_package
@@ -604,21 +677,22 @@
\page_otr_command_set_hsize
\par
%writestatus\m!columns{flush balance}%
+ \page_grids_add_to_mix\b_page_mix_collected % no linenumbers here
\box\b_page_mix_collected
\vskip\zeropoint % triggers recalculation of page stuff (weird that this is needed but it *is* needed, see mixed-001.tex)
\par
\nointerlineskip
\prevdepth\strutdp
- \ctxcommand{mixflushrest()}% rubish
- \ctxcommand{mixcleanup()}% rubish
+ \clf_mixflushrest% rubish
+ \clf_mixcleanup % rubish
\exitloop
\or
% 1 = we have stuff left, so flush and rebalance
%writestatus\m!columns{flush continue}%
\page_mix_routine_package
\page_otr_construct_and_shipout\box\b_page_mix_collected
- \setbox\b_page_mix_collected\vbox{\ctxcommand{mixflushrest()}}% we could avoid this
- \ctxcommand{mixcleanup()}%
+ \setbox\b_page_mix_collected\vbox{\clf_mixflushrest}% we could avoid this
+ \clf_mixcleanup
\ifdim\ht\b_page_mix_collected=\zeropoint
\exitloop
\fi
@@ -686,11 +760,32 @@
\letvalue{\??mixedcolumnsbefore\s!box}\donothing
\letvalue{\??mixedcolumnsafter \s!box}\donothing
+% \setvalue{\??mixedcolumnsstart\s!box}%
+% {\edef\p_page_mix_strut{\mixedcolumnsparameter\c!strut}%
+% \setbox\b_page_mix_collected\vbox\bgroup
+% \let\currentoutputroutine\s!mixedcolumn % makes \column work
+% \forgetall
+% \page_mix_command_set_hsize
+% \ifx\p_page_mix_strut\v!yes
+% \begstrut
+% \ignorespaces
+% \fi}
+%
+% \setvalue{\??mixedcolumnsstop\s!box}%
+% {\ifx\p_page_mix_strut\v!yes
+% \removeunwantedspaces
+% \endstrut
+% \fi
+% \egroup
+% \page_mix_box_balance}
+
\setvalue{\??mixedcolumnsstart\s!box}%
{\edef\p_page_mix_strut{\mixedcolumnsparameter\c!strut}%
- \setbox\b_page_mix_collected\vbox\bgroup
+ \setbox\b_page_mix_collected\vbox \bgroup
\let\currentoutputroutine\s!mixedcolumn % makes \column work
\forgetall
+ \usegridparameter\mixedcolumnsparameter
+ % \useprofileparameter\mixedcolumnsparameter
\page_mix_command_set_hsize
\ifx\p_page_mix_strut\v!yes
\begstrut
@@ -703,6 +798,16 @@
\endstrut
\fi
\egroup
+ \edef\p_profile{\mixedcolumnsparameter\c!profile}%
+ \ifx\p_profile\empty \else
+ % this can never be ok because we cheat with depth and height
+ % and glue in between and when we're too large we run into issues
+ % so mayb best limit correction to one line
+ \profilegivenbox\p_profile\b_page_mix_collected
+ \setbox\b_page_mix_collected\vbox{\unvbox\b_page_mix_collected}%
+ % tracing
+ % \addprofiletobox\b_page_mix_collected
+ \fi
\page_mix_box_balance}
%D The related balancer is only a few lines:
@@ -713,8 +818,8 @@
\page_mix_routine_construct\v!yes
\page_mix_routine_package
\dontleavehmode\box\b_page_mix_collected
- \ctxcommand{mixflushrest()}%
- \ctxcommand{mixcleanup()}%
+ \clf_mixflushrest
+ \clf_mixcleanup
\egroup}
%D As usual, floats complicates matters and this is where experimental code
@@ -755,11 +860,11 @@
% \unexpanded\def\page_mix_command_flush_top_insertions
% {\page_one_command_flush_top_insertions}
-% \unexpanded\def\page_mix_place_float_top
-% {\showmessage\m!columns4\empty\page_one_place_float_here}
+\unexpanded\def\page_mix_place_float_top
+ {\showmessage\m!columns4\empty\page_one_place_float_here}
-% \unexpanded\def\page_mix_place_float_bottom
-% {\showmessage\m!columns5\empty\page_one_place_float_here}
+\unexpanded\def\page_mix_place_float_bottom
+ {\showmessage\m!columns5\empty\page_one_place_float_here}
\unexpanded\def\page_mix_place_float_here
{\page_one_place_float_here}
diff --git a/tex/context/base/page-mul.mkiv b/tex/context/base/page-mul.mkiv
index a874cd116..8db5a4773 100644
--- a/tex/context/base/page-mul.mkiv
+++ b/tex/context/base/page-mul.mkiv
@@ -960,7 +960,7 @@
\ifnum\c_page_mul_balance_tries>\c_page_mul_balance_tries_max\relax
\showmessage\m!columns7\empty
\else
- \showmessage\m!columns8{\the\c_page_mul_balance_tries\space}%
+ \showmessage\m!columns8{\the\c_page_mul_balance_tries}%
\fi
\egroup}
@@ -1430,7 +1430,7 @@
% \stopcolumns
% \def\backgroundfinishcolumnbox
- % {\doifinsetelse\@@kloffset{\v!none,\v!overlay}
+ % {\doifelseinset\@@kloffset{\v!none,\v!overlay}
% {\let\@@kloffset\!!zeropoint}
% {\scratchdimen\@@kloffset
% \advance\scratchdimen -\@@klrulethickness
@@ -1605,9 +1605,11 @@
\else
\balancecolumnsfalse
\fi
- \installalign\v!yes {\page_columns_align_option_yes }%
- \installalign\v!no {\page_columns_align_option_no }%
- \installalign\v!text{\page_columns_align_option_text}%
+ % % this won't work (blocked by check for overloading; too fuzzy anyway)
+ % \installalign\v!yes {\page_columns_align_option_yes }% \stretchcolumnstrue \inheritcolumnsfalse
+ % \installalign\v!no {\page_columns_align_option_no }% \stretchcolumnsfalse\inheritcolumnsfalse
+ % \installalign\v!text{\page_columns_align_option_text}% \stretchcolumnsfalse\inheritcolumnstrue
+ % %
\stretchcolumnsfalse
\inheritcolumnstrue
\edef\p_align{\columnsparameter\c!align}%
diff --git a/tex/context/base/page-one.mkiv b/tex/context/base/page-one.mkiv
index 3f9dcd7c6..4cf59da2d 100644
--- a/tex/context/base/page-one.mkiv
+++ b/tex/context/base/page-one.mkiv
@@ -454,10 +454,13 @@
\fi\fi
\fi}
+
\def\page_one_place_float_here_indeed
- {%\ifgridsnapping \else
- \baselinecorrection
- %\fi
+ {\ifgridsnapping
+ % otherwise real bad outcome
+ \else
+ \baselinecorrection % this has to be done better (and definitely not in column mode)
+ \fi
\doplacefloatbox
\page_floats_report_total
\dohandlenextfloatindent}
@@ -496,12 +499,12 @@
\def\page_one_place_float_bottom {\page_one_place_float_otherwise}
\def\page_one_place_float_otherwise
- {\doifinsetelse\v!here\floatlocationmethod
+ {\doifelseinset\v!here\floatlocationmethod
\page_one_place_float_otherwise_here
\page_one_place_float_otherwise_else}
\def\page_one_place_float_otherwise_here
- {\doifinsetelse\v!always\floatlocationmethod
+ {\doifelseinset\v!always\floatlocationmethod
{\page[\v!preference]%
\page_otr_command_check_if_float_fits
\ifconditional\c_page_floats_room
@@ -525,7 +528,7 @@
\fi}}
\def\page_one_place_float_otherwise_else
- {\doifinsetelse\v!always\floatlocationmethod
+ {\doifelseinset\v!always\floatlocationmethod
{\page_otr_command_check_if_float_fits
\ifconditional\c_page_floats_room
\page_one_place_float_auto_top_bottom
diff --git a/tex/context/base/page-pst.lua b/tex/context/base/page-pst.lua
index 50580ae33..472bdbabe 100644
--- a/tex/context/base/page-pst.lua
+++ b/tex/context/base/page-pst.lua
@@ -8,16 +8,18 @@ if not modules then modules = { } end modules ['page-pst'] = {
-- todo: adapt message
-local tonumber, next = tonumber, next
-local format, validstring = string.format, string.valid
-local sortedkeys = table.sortedkeys
+local tonumber, next, type = tonumber, next, type
+local find, validstring = string.find, string.valid
-local context = context
-local commands = commands
+local context = context
+local implement = interfaces.implement
local texgetcount = tex.getcount
local texsetcount = tex.setcount
+local sortedkeys = table.sortedkeys
+local formatters = string.formatters
+
local cache = { }
local function flush(page)
@@ -25,7 +27,7 @@ local function flush(page)
if c then
for i=1,#c do
-- characters.showstring(c[i])
- context.viafile(c[i],format("page.%s",validstring(page,"nopage")))
+ context.viafile(c[i],formatters["page.%s"](validstring(page,"nopage")))
end
cache[page] = nil
end
@@ -43,11 +45,11 @@ local function setnextpage()
texsetcount("global","c_page_postponed_blocks_next_page",n)
end
-function commands.flushpostponedblocks(page)
+local function flushpostponedblocks(specification)
-- we need to flush previously pending pages as well and the zero
-- slot is the generic one so that one is always flushed
local t = sortedkeys(cache)
- local p = tonumber(page) or texgetcount("realpageno") or 0
+ local p = tonumber(specification.page) or texgetcount("realpageno") or 0
for i=1,#t do
local ti = t[i]
if ti <= p then
@@ -59,9 +61,19 @@ function commands.flushpostponedblocks(page)
setnextpage()
end
-function commands.registerpostponedblock(page)
+implement {
+ name = "flushpostponedblocks",
+ actions = flushpostponedblocks,
+ arguments = {
+ {
+ { "page" }
+ }
+ }
+}
+
+local function registerpostponedblock(page)
if type(page) == "string" then
- if string.find(page,"^+") then
+ if find(page,"^+") then
page = texgetcount("realpageno") + (tonumber(page) or 1) -- future delta page
else
page = tonumber(page) or 0 -- preferred page or otherwise first possible occasion
@@ -80,7 +92,14 @@ function commands.registerpostponedblock(page)
if page == 0 then
interfaces.showmessage("layouts",3,#c)
else
- interfaces.showmessage("layouts",3,string.format("%s (realpage: %s)",#c,page))
+ interfaces.showmessage("layouts",3,formatters["%s (realpage: %s)"](#c,page))
end
setnextpage()
end
+
+implement {
+ name = "registerpostponedblock",
+ actions = registerpostponedblock,
+ arguments = "string"
+}
+
diff --git a/tex/context/base/page-pst.mkiv b/tex/context/base/page-pst.mkiv
index 704289246..f12663f66 100644
--- a/tex/context/base/page-pst.mkiv
+++ b/tex/context/base/page-pst.mkiv
@@ -57,10 +57,10 @@
\unexpanded\setvalue{\e!start\v!postponing}%
{\bgroup
\obeylines
- \doifnextoptionalelse{\egroup\page_postponed_blocks_start}{\egroup\page_postponed_blocks_start[0]}}
+ \doifelsenextoptional{\egroup\page_postponed_blocks_start}{\egroup\page_postponed_blocks_start[0]}}
\unexpanded\setvalue{\e!stop\v!postponing}%
- {\ctxcommand{registerpostponedblock("\currentpostponedpage")}\relax}
+ {\clf_registerpostponedblock{\currentpostponedpage}\relax}
\def\page_postponed_blocks_start[#1]%
{\edef\currentpostponedpage{#1}%
@@ -80,7 +80,8 @@
\setnormalcatcodes % postponing in verbatim
\uncatcodespacetokens % postponing in startlines
\restoreglobalbodyfont % otherwise problems inside split verbatim
- \ctxcommand{flushpostponedblocks()}%
+ \clf_flushpostponedblocks
+ % page {123}
\relax
\page_otr_command_flush_floats % new but potential dangerous, maybe we need a classification
\endgroup} % of blocks: with and without flush
diff --git a/tex/context/base/page-run.mkiv b/tex/context/base/page-run.mkiv
index dabf37252..9adcb23c7 100644
--- a/tex/context/base/page-run.mkiv
+++ b/tex/context/base/page-run.mkiv
@@ -74,18 +74,34 @@
\startluacode
local format, concat = string.format, table.concat
+local todimen = number.todimen
+local texdimen = tex.dimen
-local function todimen(name,unit,fmt)
- return number.todimen(tex.dimen[name],unit,fmt)
+local function asdimen(name,unit)
+ return todimen(texdimen[name],unit,"%0.4f") -- 4 is more than enough, even 3 would be okay
end
-function commands.showlayoutvariables(options)
-
- if options == "" then
+local function checkedoptions(options)
+ if type(options) == "table" then
+ return options
+ elseif not options or options == "" then
options = "pt,cm"
end
+ options = utilities.parsers.settings_to_hash(options)
+ local n = 4
+ for k, v in table.sortedhash(options) do
+ local m = tonumber(k)
+ if m then
+ n = m
+ end
+ end
+ options.n = n
+ return options
+end
+
+function commands.showlayoutvariables(options)
- local options = utilities.parsers.settings_to_hash(options)
+ options = checkedoptions(options)
local dimensions = { "pt", "bp", "cm", "mm", "dd", "cc", "pc", "nd", "nc", "sp", "in" }
@@ -108,7 +124,7 @@ function commands.showlayoutvariables(options)
for i=1,#dimensions do
local d = dimensions[i]
if options[d] then
- context("%s%s",todimen(name,d,"%0.4f"),d)
+ context("%s%s",asdimen(name,d),d)
context.NC()
end
end
@@ -129,7 +145,7 @@ function commands.showlayoutvariables(options)
for i=1,#dimensions do
local d = dimensions[i]
if options[d] then
- result[#result+1] = format("%12s%s",todimen(name,d,"%0.4f"),d)
+ result[#result+1] = format("%12s%s",asdimen(name,d),d)
end
end
commands.writestatus("layout",format("%-24s %s",interfaces.interfacedcommand(name),concat(result," ")))
@@ -215,6 +231,8 @@ end
function commands.showlayout(options)
+ options = checkedoptions(options)
+
if tex.count.textlevel == 0 then
commands.showlayoutvariables(options)
@@ -225,7 +243,7 @@ function commands.showlayout(options)
context.bgroup()
context.showframe()
context.setuplayout { marking = interfaces.variables.on }
- for i=1,4 do
+ for i=1,(options.n or 4) do
commands.showlayoutvariables(options)
context.page()
end
@@ -281,8 +299,8 @@ end
\unexpanded\gdef\showmargins
{\starttabulate
- \NC asynchrone \NC \doifoddpageelse {odd} {even} \NC \NR
- \NC synchrone \NC \doifrightpageelse {right} {left} \NC \NR
+ \NC asynchrone \NC \doifelseoddpage {odd} {even} \NC \NR
+ \NC synchrone \NC \doifelserightpage {right} {left} \NC \NR
\NC right margin \NC \the\rightmarginwidth \NC \NR
\NC left margin \NC \the\leftmarginwidth \NC \NR
\NC outer margin \NC \the\outermarginwidth \NC \NR
diff --git a/tex/context/base/page-sel.mkvi b/tex/context/base/page-sel.mkvi
index cb9bcb509..ee25a37db 100644
--- a/tex/context/base/page-sel.mkvi
+++ b/tex/context/base/page-sel.mkvi
@@ -56,7 +56,7 @@
{\dotripleempty\page_selectors_insert}
\def\page_selectors_insert[#filename][#emptylist][#settings]%
- {\doifassignmentelse{#emptylist}
+ {\doifelseassignment{#emptylist}
{\page_selectors_insert_indeed[#filename][][#emptylist]}
{\page_selectors_insert_indeed[#filename][#emptylist][#settings]}}
diff --git a/tex/context/base/page-set.mkiv b/tex/context/base/page-set.mkiv
index 9c232f535..a5afb92e9 100644
--- a/tex/context/base/page-set.mkiv
+++ b/tex/context/base/page-set.mkiv
@@ -387,7 +387,7 @@
{\advance\scratchcounter\plusone}}%
\popmacro\columnmaxcells}
-\long\def\OTRSETrecurseRL#1%
+\def\OTRSETrecurseRL#1%
{\dostepwiserecurse\nofcolumns\plusone\minusone
{#1\hskip\namedcolumnsetparameter{\currentcolumnset:\recurselevel}\c!distance}}
@@ -2184,11 +2184,11 @@
\def\dodefinecolumntextarea[#1][#2][#3]% y=0 is mogelijke en handig !
{\ifthirdargument
- \doifinsetelse{#2}{\v!both,\v!fixed}
+ \doifelseinset{#2}{\v!both,\v!fixed}
{\definecolumntextarea[#1][\v!left ][\c!type=#2,#3]%
\definecolumntextarea[#1][\v!right][\c!type=#2,#3]}
{\doifelse{#2}\v!next
- {\doifoddpageelse
+ {\doifelseoddpage
{\definecolumntextarea[#1][\v!right][\c!type=#2,#3]}
{\definecolumntextarea[#1][\v!left ][\c!type=#2,#3]}}
{\presetlocalframed
@@ -2214,7 +2214,7 @@
{\setupcolumntextarea[#1][\v!left ][#3]%
\setupcolumntextarea[#1][\v!right][#3]}
{\doifelse{#2}\v!next
- {\doifoddpageelse
+ {\doifelseoddpage
{\setupcolumntextarea[#1][\v!right][#3]}
{\setupcolumntextarea[#1][\v!left][#3]}}
{\getparameters[\??mt#1#2][#3]}}%
@@ -2379,13 +2379,13 @@
\unexpanded\def\setupcolumntextareatext
{\dotripleempty\dosetupcolumntextareatext}
-\long\def\dosetupcolumntextareatext[#1][#2][#3]%
+\def\dosetupcolumntextareatext[#1][#2][#3]%
{\ifthirdargument
\doifelse{#2}\v!both
{\setvalue{\??mt#1\v!left }{#3}%
\setvalue{\??mt#1\v!right}{#3}}
{\doifelse{#2}\v!next
- {\doifoddpageelse
+ {\doifelseoddpage
{\setvalue{\??mt#1\v!right}{#3}}%
{\setvalue{\??mt#1\v!left }{#3}}}%
{\setvalue{\??mt#1#2}{#3}}}%
@@ -2645,8 +2645,6 @@
% \chapter{thuan} \dorecurse{25}{\input thuan \endgraf\placefigure{}{}}
% \stopcolumnset
-\unprotect
-
% only in columnsets
% \def\cornerfigure
diff --git a/tex/context/base/page-sid.mkiv b/tex/context/base/page-sid.mkiv
index f7a2357bf..cbee4da20 100644
--- a/tex/context/base/page-sid.mkiv
+++ b/tex/context/base/page-sid.mkiv
@@ -15,21 +15,18 @@
\unprotect
-% These macro deal with side floats. We started with Daniel
-% Comenetz macros as published in TUGBoat Volume 14 (1993),
-% No.\ 1: Anchored Figures at Either Margin. I extended and
-% patched the macros to suite our needs which results in a
-% messy module. Therefore, this module badly needs an update
-% because it's now a mixture of old and new macros.
+% These macro deal with side floats. We started with Daniel Comenetz macros as published
+% in TUGBoat Volume 14 (1993), No.\ 1: Anchored Figures at Either Margin. I extended and
+% patched the macros to suite our needs which results in a messy module. Therefore, this
+% module badly needs an update because it's now a mixture of old and new macros.
% Interesting cases where it goes wrong:
%
% \placefigure[left]{}{} \dorecurse{3}{\input ward } {\par} \input ward
%
-% Here we get an unwanted carried over hangindent and parindent. A
-% solution is to associate it with the local par node instead. This
-% is something to discuss with Taco as it could be a new luatex
-% feature: explicitly set par properties.
+% Here we get an unwanted carried over hangindent and parindent. A solution is to associate
+% it with the local par node instead. This is something to discuss with Taco as it could be
+% a new luatex/mkiv feature: explicitly set par properties.
% Maybe I should just rewrite the lot.
@@ -202,17 +199,72 @@
\global\setfalse\c_page_sides_short
\global\setfalse\c_page_sides_flag}
+\unexpanded\def\doifelsesidefloat
+ {\par
+ \ifnum\dimexpr\d_page_sides_vsize-\pagetotal\relax>\zeropoint
+ \expandafter\firstoftwoarguments
+ \else
+ \expandafter\secondoftwoarguments
+ \fi}
+
+\let\doifsidefloatelse\doifelsesidefloat
+
+% \def\page_sides_flush_floats_indeed
+% {\global\advance\d_page_sides_vsize-\d_page_sides_bottomskip
+% \begingroup
+% \let\page_sides_flush_floats\relax
+% \forgetall
+% \doloop
+% {\strut
+% \iftracesidefloats
+% \color[darkgray]{\ruledhbox{\strut\kern\d_page_sides_width}}%
+% \fi
+% \par
+% % \ifdim\dimexpr\d_page_sides_vsize-\pagetotal\relax>\zeropoint
+% \ifdim\dimexpr\d_page_sides_vsize-\d_page_sides_bottomskip-\pagetotal\relax>\zeropoint
+% \ifnum\recurselevel>\plushundred % safeguard, sort of deadcycles
+% \exitloop
+% \fi
+% \else
+% \exitloop
+% \fi}%
+% \endgroup
+% \ifdim\parskip>\zeropoint % why this test ?
+% \ifdim\d_page_sides_bottomskip>\parskip
+% % \nowhitespace
+% % \vskip\d_page_sides_bottomskip
+% \blank[\v!nowhite,\the\dimexpr\d_page_sides_bottomskip]
+% \fi
+% \else
+% \blank[\the\d_page_sides_bottomskip]% new, so needs checking
+% \fi}
+
+\installcorenamespace{sidefloatsteps}
+
+\setvalue{\??sidefloatsteps\v!line }{\strut}
+\setvalue{\??sidefloatsteps\v!big }{\strut}
+\setvalue{\??sidefloatsteps\v!medium}{\halfstrut}
+\setvalue{\??sidefloatsteps\v!small }{\quarterstrut}
+\setvalue{\??sidefloatsteps\v!depth }{\depthstrut}
+
+% we don't officially know what kind of float we flush
+
\def\page_sides_flush_floats_indeed
{\global\advance\d_page_sides_vsize-\d_page_sides_bottomskip
\begingroup
\let\page_sides_flush_floats\relax
+ \edef\m_pages_strut{\executeifdefined{\??sidefloatsteps\rootfloatparameter\c!step}\strut}%
\forgetall
\doloop
- {\strut
- \iftracesidefloats
- \color[darkgray]{\ruledhbox{\strut\kern\d_page_sides_width}}%
+ {\iftracesidefloats
+ \dontleavehmode
+ \ruledhbox{\m_pages_strut\kern\d_page_sides_width}%
+ \else
+ \m_pages_strut
\fi
\par
+ \nointerlineskip
+ % \ifdim\dimexpr\d_page_sides_vsize-\d_page_sides_bottomskip-\pagetotal\relax>\zeropoint
\ifdim\dimexpr\d_page_sides_vsize-\pagetotal\relax>\zeropoint
\ifnum\recurselevel>\plushundred % safeguard, sort of deadcycles
\exitloop
@@ -223,11 +275,28 @@
\endgroup
\ifdim\parskip>\zeropoint % why this test ?
\ifdim\d_page_sides_bottomskip>\parskip
- \nowhitespace
- \vskip\d_page_sides_bottomskip
+ % \nowhitespace
+ % \vskip\d_page_sides_bottomskip
+% \blank[\v!nowhite,\the\dimexpr\d_page_sides_bottomskip]
+ \blank[\v!nowhite,\rootfloatparameter\c!sidespaceafter]
\fi
+ \else
+% \blank[\the\d_page_sides_bottomskip]% new, so needs checking
+ \blank[\rootfloatparameter\c!sidespaceafter]% new, so needs checking
\fi}
+% alternative method (unsnapped)
+%
+% \def\page_sides_flush_floats_indeed
+% {\scratchdimen\dimexpr\d_page_sides_vsize-\d_page_sides_bottomskip-\pagetotal\relax
+% \ifdim\parskip>\zeropoint % why this test ?
+% \ifdim\scratchdimen>\parskip
+% \blank[\v!nowhite,\the\scratchdimen] % better in stages
+% \fi
+% \else
+% \blank[\the\scratchdimen]
+% \fi}
+
\def\page_sides_check_floats_after_par
{\page_sides_check_floats_indeed
\ifdim\oldpagetotal=\pagetotal \else
@@ -294,10 +363,10 @@
\page_otr_sides_pop_penalties}
\def\page_sides_output_routine_yes % we need to rework this ... add pagediscards and such
- {\unvbox\normalpagebox
+ {\unvbox\normalpagebox % bah, and the discards?
\setbox\b_page_sides_bottom\lastbox
\ifdim\wd\b_page_sides_bottom>\d_page_sides_hsize
- \penalty-201
+ \penalty-201 % hm, i really need to write this from scatch
\box\b_page_sides_bottom
\else\ifvoid\b_page_sides_bottom
\else
@@ -592,14 +661,39 @@
\global\settrue\c_page_floats_room
\fi}
+% \def\page_sides_prepare_space
+% {\par
+% % no longer needed \whitespace
+% \begingroup
+% \forgetall
+% \reseteverypar
+% \verticalstrut
+% \vskip-\struttotal
+% \endgroup}
+
+\installtextracker
+ {sidefloats.anchor}
+ {\let\page_sides_anchor\page_sides_anchor_yes}
+ {\let\page_sides_anchor\page_sides_anchor_nop}
+
+\def\page_sides_anchor_yes
+ {\darkred
+ \hskip-5\emwidth
+ \vrule\s!height.05\exheight\s!depth.05\exheight\s!width10\emwidth}
+
+\def\page_sides_anchor_nop
+ {\strut}
+
+\let\page_sides_anchor\page_sides_anchor_nop
+
\def\page_sides_prepare_space
{\par
- \whitespace
\begingroup
- \forgetall
\reseteverypar
- \verticalstrut
+ \dontleavehmode\hbox to \zeropoint{\page_sides_anchor\hss\strut}%
+ \vskip-\parskip
\vskip-\struttotal
+ \inhibitblank
\endgroup}
\def\page_sides_handle_float#1% grid (4) is rather experimental
@@ -702,13 +796,15 @@
\fi}
\def\page_sides_inject_dummy_lines
- {\scratchcounter\pageshrink
+ {\begingroup
+ \scratchcounter\pageshrink
\divide\scratchcounter \baselineskip
\advance\scratchcounter \plusone
\parskip\zeropoint
\dorecurse\scratchcounter{\hbox to \hsize{}}%
\kern-\scratchcounter\baselineskip
- \penalty\zerocount}
+ \penalty\zerocount
+ \endgroup}
% Display math
%
@@ -764,7 +860,7 @@
\def\checksidefloat {\page_sides_check_floats}
\def\flushsidefloats {\page_sides_flush_floats}
\def\flushsidefloatsafterpar{\page_sides_flush_floats_after_par}
-%def\forgetsidefloats {\page_sides_forget_floats}
+\def\forgetsidefloats {\page_sides_forget_floats}
%def\synchronizesidefloats {\page_sides_synchronize_floats}
\protect \endinput
diff --git a/tex/context/base/page-str.lua b/tex/context/base/page-str.lua
index 35ce85609..56c6167aa 100644
--- a/tex/context/base/page-str.lua
+++ b/tex/context/base/page-str.lua
@@ -17,10 +17,12 @@ local nodes, node = nodes, node
local nodepool = nodes.pool
local tasks = nodes.tasks
+local implement = interfaces.implement
+
local new_kern = nodepool.kern
local new_glyph = nodepool.glyph
-local find_tail = node.slide
+local slide_nodelist = node.slide
local write_node = node.write
local free_node = node.free
local copy_nodelist = node.copy_list
@@ -73,7 +75,7 @@ function streams.collect(head,where)
end
local last = dana[#dana]
if last then
- local tail = find_tail(last)
+ local tail = slide_nodelist(last)
tail.next, head.prev = head, tail
elseif last == false then
dana[#dana] = head
@@ -202,7 +204,7 @@ function streams.synchronize(list) -- this is an experiment !
else
-- this is not yet ok as we also need to keep an eye on vertical spacing
-- so we might need to do some splitting or whatever
- local tail = vbox.list and find_tail(vbox.list)
+ local tail = vbox.list and slide_nodelist(vbox.list)
local n, delta = 0, delta_height -- for tracing
while delta > 0 do
-- we need to add some interline penalties
@@ -235,6 +237,60 @@ tasks.disableaction("mvlbuilders", "streams.collect")
function streams.initialize()
tasks.enableaction ("mvlbuilders", "streams.collect")
+ function streams.initialize() end
end
-- todo: remove empty last { }'s
+-- todo: better names, enable etc
+
+implement {
+ name = "initializestream",
+ actions = streams.initialize,
+ onlyonce = true,
+}
+
+implement {
+ name = "enablestream",
+ actions = streams.enable,
+ arguments = "string"
+}
+
+implement {
+ name = "disablestream",
+ actions = streams.disable
+}
+
+implement {
+ name = "startstream",
+ actions = streams.start,
+ arguments = "string"
+}
+
+implement {
+ name = "stopstream",
+ actions = streams.stop
+}
+
+implement {
+ name = "flushstream",
+ actions = streams.flush,
+ arguments = "string"
+}
+
+implement {
+ name = "flushstreamcopy",
+ actions = streams.flush,
+ arguments = { "string", true }
+}
+
+implement {
+ name = "synchronizestream",
+ actions = streams.synchronize,
+ arguments = "string"
+}
+
+implement {
+ name = "pushstream",
+ actions = streams.push,
+ arguments = "string"
+}
diff --git a/tex/context/base/page-str.mkiv b/tex/context/base/page-str.mkiv
index 200a71377..e4b2fa229 100644
--- a/tex/context/base/page-str.mkiv
+++ b/tex/context/base/page-str.mkiv
@@ -29,14 +29,12 @@
%D
%D Remark: marknotes are gone, at least for a while.
-\writestatus{loading}{ConTeXt Page Macros / Page Streams}
-
\registerctxluafile{page-str}{1.001}
\unprotect
\let \currentoutputstream \empty
-\newif \ifinoutputstream % will becoem a conditional or mode
+\newif \ifinoutputstream % will become a conditional or mode
\newtoks \everyenableoutputstream
\appendtoks
@@ -44,7 +42,7 @@
\to \everyenableoutputstream
\unexpanded\def\initializeoutputstreams
- {\ctxlua{streams.initialize()}%
+ {\clf_initializestream
\glet\initializeoutputstreams\relax}
\unexpanded\def\enableoutputstream[#1]% could be \startoutputsubstream
@@ -52,12 +50,12 @@
\the\everyenableoutputstream
\inoutputstreamtrue
\xdef\currentoutputstream{#1}%
- \ctxlua{streams.enable("#1")}}
+ \clf_enablestream{#1}}
\unexpanded\def\disableoutputstream
{\inoutputstreamfalse
\global\let\currentoutputstream\s!default
- \ctxlua{streams.disable()}}
+ \clf_disablestream}
\unexpanded\def\startoutputstream[#1]%
{\begingroup
@@ -65,10 +63,10 @@
\the\everyenableoutputstream
\inoutputstreamtrue
\xdef\currentoutputstream{#1}%
- \ctxlua{streams.start("#1")}}
+ \clf_startstream{#1}}
\unexpanded\def\stopoutputstream
- {\ctxlua{streams.stop()}%
+ {\clf_stopstream
\endgroup}
\unexpanded\def\startoutputsubstream[#1]% just push/pop instead
@@ -79,13 +77,13 @@
{\globalpopmacro\currentoutputstream
\enableoutputstream[\currentoutputstream]}
-\def\flushoutputstream [#1]{\ctxlua{streams.flush("#1")}}
-\def\outputstreamcopy [#1]{\vbox{\ctxlua{streams.flush("#1",true)}}}
-\def\outputstreambox [#1]{\vbox{\ctxlua{streams.flush("#1")}}}
-\def\outputstreamunvcopy[#1]{\ctxlua{streams.flush("#1",true)}}
-\def\outputstreamunvbox [#1]{\ctxlua{streams.flush("#1")}}
-\def\synchronizestreams [#1]{\ctxlua{streams.synchronize("#1")}}
-\def\dopushoutputstream [#1]{\ctxlua{streams.push("#1")}}
+\def\flushoutputstream [#1]{\clf_flushstream{#1}}
+\def\outputstreambox [#1]{\vbox{\clf_flushstream{#1}}}
+\def\outputstreamcopy [#1]{\vbox{\clf_flushstreamcopy{#1}}}
+\def\outputstreamunvbox [#1]{\clf_flushstream{#1}}
+\def\outputstreamunvcopy[#1]{\clf_flushstreamcopy{#1}}
+\def\synchronizestreams [#1]{\clf_synchronizestream{#1}}
+\def\dopushoutputstream [#1]{\clf_pushstream{#1}}
\unexpanded\def\pushoutputstream
{\dosingleempty\dopushoutputstream}
diff --git a/tex/context/base/page-txt.mkvi b/tex/context/base/page-txt.mkvi
index 240f0e00b..76143a018 100644
--- a/tex/context/base/page-txt.mkvi
+++ b/tex/context/base/page-txt.mkvi
@@ -142,11 +142,11 @@
%D \showsetup{noheaderandfooterlines}
%D \showsetup{notopandbottomlines}
-\def\noheaderandfooterlines
+\unexpanded\def\noheaderandfooterlines
{\setuplayoutelement[\v!header][\c!state=\v!empty]%
\setuplayoutelement[\v!footer][\c!state=\v!empty]}
-\def\notopandbottomlines
+\unexpanded\def\notopandbottomlines
{\setuplayoutelement[\v!top ][\c!state=\v!empty]%
\setuplayoutelement[\v!bottom][\c!state=\v!empty]}
@@ -253,7 +253,7 @@
%D only when double sided typesetting is enabled.
\unexpanded\def\page_layouts_process_element_double
- {\doifoddpageelse
+ {\doifelseoddpage
\page_layouts_process_element_double_odd
\page_layouts_process_element_double_even}
@@ -378,6 +378,9 @@
\doubleexpandafter\firstoftwoarguments
\fi\fi}
+\let\doiflayouttextlineelse\doifelselayouttextline
+\let\doiflayoutsomelineelse\doifelselayoutsomeline
+
\newconditional\resyncaftertextline
\setvalue{\??layouttextsline\v!normal}{\page_layouts_place_text_line_indeed}
@@ -418,7 +421,7 @@
%D The following macro has to be called after a page
%D is flushed.
-\def\resetlayouttextlines % public
+\unexpanded\def\resetlayouttextlines % public
{\csname\??layouttextsreset\v!top \endcsname
\csname\??layouttextsreset\v!header\endcsname
\csname\??layouttextsreset\v!text \endcsname
@@ -435,17 +438,17 @@
% \settext[header][text][middle][xxx][yyy]
-\def\settextcontent
+\unexpanded\def\settextcontent
{\doquintupleempty\page_layouts_set_text_content}
\def\page_layouts_set_text_content[#vertical][#horizontal][#one][#two][#three]% header text middle text/text
{\iffifthargument
- \setvalue{\namedlayoutelementhash{#vertical:#horizontal}\executeifdefined{\??layouttextcontent\c!text:#one}\c!middletext}%
+ \setvalue{\namedlayoutelementhash{#vertical:#horizontal}\executeifdefined{\??layouttextcontent\v!text:#one}\c!middletext}%
{\page_layouts_process_element_double
\c!leftstyle \c!leftcolor \c!leftwidth {#two}%
\c!rightstyle\c!rightcolor\c!rightwidth{#three}}%
\else\iffourthargument
- \setvalue{\namedlayoutelementhash{#vertical:#horizontal}\executeifdefined{\??layouttextcontent\c!text:#one}\c!middletext}%
+ \setvalue{\namedlayoutelementhash{#vertical:#horizontal}\executeifdefined{\??layouttextcontent\v!text:#one}\c!middletext}%
{\page_layouts_process_element_double
\c!leftstyle \c!leftcolor \c!leftwidth {#two}%
\c!rightstyle\c!rightcolor\c!rightwidth{#two}}%
@@ -456,22 +459,22 @@
\c!rightstyle\c!rightcolor\c!rightwidth{#one}}%
\fi\fi\fi}
-\def\resettextcontent
+\unexpanded\def\resettextcontent
{\dotripleempty\page_layouts_reset_text_content}
\def\page_layouts_reset_text_content[#vertical][#horizontal][#tag]% header text middle
{\edef\currentlayoutelement{#vertical:#horizontal}%
\ifthirdargument
- \letvalueempty{\layoutelementhash\executeifdefined{\??layouttextcontent\c!text:#tag}\c!middletext}%
+ \letvalueempty{\layoutelementhash\executeifdefined{\??layouttextcontent\v!text:#tag}\c!middletext}%
\else\ifsecondargument
\resetlayoutelementparameter\c!lefttext
\resetlayoutelementparameter\c!middletext
\resetlayoutelementparameter\c!righttext
\fi\fi}
-\letvalue{\??layouttextcontent\c!middle:\c!text}\c!middletext
-\letvalue{\??layouttextcontent\c!left :\c!text}\c!lefttext
-\letvalue{\??layouttextcontent\c!right :\c!text}\c!righttext
+\letvalue{\??layouttextcontent\c!middle:\v!text}\c!middletext
+\letvalue{\??layouttextcontent\c!left :\v!text}\c!lefttext
+\letvalue{\??layouttextcontent\c!right :\v!text}\c!righttext
%D The placement of a whole line is handled by the next two
%D macros. These are hooked into the general purpose token
@@ -756,10 +759,12 @@
\page_layouts_reset_page_number_location
\ifx\p_strc_pagenumbers_location\empty
% set otherwise
+ \else\ifx\p_strc_pagenumbers_location\v!none
+ % set otherwise
\else
\page_layouts_identify_page_number_location
\page_layouts_set_page_number_location
- \fi
+ \fi\fi
\fi}
\def\page_layouts_place_page_number_left % historic
diff --git a/tex/context/base/pdfr-def.mkii b/tex/context/base/pdfr-def.mkii
index 7554bda9e..b3f67b93f 100644
--- a/tex/context/base/pdfr-def.mkii
+++ b/tex/context/base/pdfr-def.mkii
@@ -1,4 +1,4 @@
-% filename : pdfr-def.tex
+% filename : pdfr-def.mkii
% comment : generated by mtxrun --script chars --pdf
% author : Hans Hagen, PRAGMA-ADE, Hasselt NL
% copyright: PRAGMA ADE / ConTeXt Development Team
diff --git a/tex/context/base/phys-dim.lua b/tex/context/base/phys-dim.lua
index e40d1eabb..7430b62d7 100644
--- a/tex/context/base/phys-dim.lua
+++ b/tex/context/base/phys-dim.lua
@@ -39,6 +39,7 @@ if not modules then modules = { } end modules ['phys-dim'] = {
-- RevPerSec = [[RPS]],
-- RevPerMin = [[RPM]],
+local rawset, next = rawset, next
local V, P, S, R, C, Cc, Cs, matchlpeg = lpeg.V, lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.match
local format, lower = string.format, string.lower
local appendlpeg = lpeg.append
@@ -49,12 +50,11 @@ local utfchar = utf.char
physics = physics or { }
physics.units = physics.units or { }
-local variables = interfaces.variables
-local v_reverse = variables.reverse
local allocate = utilities.storage.allocate
local context = context
local commands = commands
+local implement = interfaces.implement
local trace_units = false
local report_units = logs.reporter("units")
@@ -172,8 +172,8 @@ local p_c = (ddigitspace^1 * dskipperiod)^0 -- ___.
local p_c_dparser = math_one + math_two + dleader * p_c * dtrailer * dfinal
local c_p_dparser = math_one + math_two + dleader * c_p * dtrailer * dfinal
-function commands.digits(str,p_c)
- if p_c == v_reverse then
+local function makedigits(str,reverse)
+ if reverse then
matchlpeg(p_c_dparser,str)
else
matchlpeg(c_p_dparser,str)
@@ -286,18 +286,29 @@ local long_units = {
-- synonyms
- ["Metric Ton"] = "tonne",
+ MetricTon = "tonne",
Litre = "liter",
+ ["Metric Ton"] = "tonne",
+
-- non-SI units whose values must be obtained experimentally (Table 7)
- ["Electron Volt"] = "electronvolt",
+ AtomicMassUnit = "atomicmassunit",
+ AstronomicalUnit = "astronomicalunit",
+ ElectronVolt = "electronvolt",
Dalton = "dalton",
+
["Atomic Mass Unit"] = "atomicmassunit",
["Astronomical Unit"] = "astronomicalunit",
+ ["Electron Volt"] = "electronvolt",
-- special cases (catch doubles, okay, a bit over the top)
+ DegreesCelsius = "celsius",
+ DegreesFahrenheit = "fahrenheit",
+ DegreeCelsius = "celsius",
+ DegreeFahrenheit = "fahrenheit",
+
["Degrees Celsius"] = "celsius",
["Degrees Fahrenheit"] = "fahrenheit",
["Degree Celsius"] = "celsius",
@@ -322,12 +333,14 @@ local long_units = {
Hg = "mercury",
-- ["Millimetre Of Mercury"] = [[mmHg]],
Angstrom = "angstrom", -- strictly Ångström
- ["Nautical Mile"] = "nauticalmile",
+ NauticalMile = "nauticalmile",
Barn = "barn",
Knot = "knot",
Neper = "neper",
Bel = "bel", -- in practice only decibel used
+ ["Nautical Mile"] = "nauticalmile",
+
-- other non-SI units from CGS system (Table 9)
Erg = "erg",
@@ -506,20 +519,20 @@ local packaged_units = {
-- rendering:
-local unitsPUS = context.unitsPUS
-local unitsPU = context.unitsPU
-local unitsPS = context.unitsPS
-local unitsP = context.unitsP
-local unitsUS = context.unitsUS
-local unitsU = context.unitsU
-local unitsS = context.unitsS
-local unitsO = context.unitsO
-local unitsN = context.unitsN
-local unitsC = context.unitsC
-local unitsQ = context.unitsQ
-local unitsNstart = context.unitsNstart
-local unitsNstop = context.unitsNstop
-local unitsNspace = context.unitsNspace
+local ctx_unitsPUS = context.unitsPUS
+local ctx_unitsPU = context.unitsPU
+local ctx_unitsPS = context.unitsPS
+local ctx_unitsP = context.unitsP
+local ctx_unitsUS = context.unitsUS
+local ctx_unitsU = context.unitsU
+local ctx_unitsS = context.unitsS
+local ctx_unitsO = context.unitsO
+local ctx_unitsN = context.unitsN
+local ctx_unitsC = context.unitsC
+local ctx_unitsQ = context.unitsQ
+local ctx_unitsNstart = context.unitsNstart
+local ctx_unitsNstop = context.unitsNstop
+local ctx_unitsNspace = context.unitsNspace
local labels = languages.data.labels
@@ -600,7 +613,7 @@ labels.units = allocate {
electronvolt = { labels = { en = [[eV]] } },
dalton = { labels = { en = [[Da]] } },
atomicmassunit = { labels = { en = [[u]] } },
- astronomicalunit = { labels = { en = [[ua]] } },
+ astronomicalunit = { labels = { en = [[au]] } },
bar = { labels = { en = [[bar]] } },
angstrom = { labels = { en = [[Å]] } }, -- strictly Ångström
nauticalmile = { labels = { en = [[M]] } },
@@ -664,28 +677,28 @@ local function dimpus(p,u,s)
if p ~= "" then
if u ~= "" then
if s ~= "" then
- unitsPUS(p,u,s)
+ ctx_unitsPUS(p,u,s)
else
- unitsPU(p,u)
+ ctx_unitsPU(p,u)
end
elseif s ~= "" then
- unitsPS(p,s)
+ ctx_unitsPS(p,s)
else
- unitsP(p)
+ ctx_unitsP(p)
end
else
if u ~= "" then
if s ~= "" then
- unitsUS(u,s)
+ ctx_unitsUS(u,s)
-- elseif c then
- -- unitsC(u)
+ -- ctx_unitsC(u)
else
- unitsU(u)
+ ctx_unitsU(u)
end
elseif s ~= "" then
- unitsS(s)
+ ctx_unitsS(s)
else
- unitsP(p)
+ ctx_unitsP(p)
end
end
end
@@ -699,7 +712,7 @@ local function dimop(o)
report_units("operator %a",o)
end
if o then
- unitsO(o)
+ ctx_unitsO(o)
end
end
@@ -709,7 +722,7 @@ local function dimsym(s)
end
s = symbol_units[s] or s
if s then
- unitsC(s)
+ ctx_unitsC(s)
end
end
@@ -719,7 +732,7 @@ local function dimpre(p)
end
p = packaged_units[p] or p
if p then
- unitsU(p)
+ ctx_unitsU(p)
end
end
@@ -789,7 +802,7 @@ local function update_parsers() -- todo: don't remap utf sequences
* (V("packaged") / dimpre)
* V("somespace"),
-- someunknown = V("somespace")
- -- * (V("nospace")/unitsU)
+ -- * (V("nospace")/ctx_unitsU)
-- * V("somespace"),
--
combination = V("longprefix") * V("longunit") -- centi meter
@@ -798,22 +811,32 @@ local function update_parsers() -- todo: don't remap utf sequences
+ V("nothing") * V("shortunit")
+ V("longprefix") * V("shortunit") -- centi m
+ V("shortprefix") * V("longunit"), -- c meter
+
+-- combination = ( V("longprefix") -- centi meter
+-- + V("nothing")
+-- ) * V("longunit")
+-- + ( V("shortprefix") -- c m
+-- + V("nothing")
+-- + V("longprefix")
+-- ) * V("shortunit") -- centi m
+-- + ( V("shortprefix") -- c meter
+-- ) * V("longunit"),
+
+
dimension = V("somespace")
* (
V("packaged") / dimpre
+ (V("longsuffix") * V("combination")) / dimspu
+ (V("combination") * (V("shortsuffix") + V("nothing"))) / dimpus
)
- * (V("qualifier") / unitsQ)^-1
+ * (V("qualifier") / ctx_unitsQ)^-1
* V("somespace"),
operator = V("somespace")
* ((V("longoperator") + V("shortoperator")) / dimop)
* V("somespace"),
snippet = V("dimension")
+ V("somesymbol"),
- unit = (
- V("snippet")
- * (V("operator") * V("snippet"))^0
+ unit = ( V("snippet") * (V("operator") * V("snippet"))^0
+ V("somepackaged")
)^1,
}
@@ -824,13 +847,13 @@ local function update_parsers() -- todo: don't remap utf sequences
local number = Cs( P("$") * (1-P("$"))^1 * P("$")
+ P([[\m{]]) * (1-P("}"))^1 * P("}")
+ (1-R("az","AZ")-P(" "))^1 -- todo: catch { } -- not ok
- ) / unitsN
+ ) / ctx_unitsN
- local start = Cc(nil) / unitsNstart
- local stop = Cc(nil) / unitsNstop
- local space = Cc(nil) / unitsNspace
+ local start = Cc(nil) / ctx_unitsNstart
+ local stop = Cc(nil) / ctx_unitsNstop
+ local space = Cc(nil) / ctx_unitsNspace
- -- todo: avoid \unitsNstart\unitsNstop (weird that it can happen .. now catched at tex end)
+ -- todo: avoid \ctx_unitsNstart\ctx_unitsNstop (weird that it can happen .. now catched at tex end)
local p_c_combinedparser = P { "start",
number = start * dleader * (p_c_dparser + number) * stop,
@@ -853,7 +876,7 @@ local p_c_parser = nil
local c_p_parser = nil
local dirty = true
-function commands.unit(str,p_c)
+local function makeunit(str,reverse)
if dirty then
if trace_units then
report_units("initializing parser")
@@ -862,7 +885,7 @@ function commands.unit(str,p_c)
dirty = false
end
local ok
- if p_c == v_reverse then
+ if reverse then
ok = matchlpeg(p_c_parser,str)
else
ok = matchlpeg(c_p_parser,str)
@@ -908,7 +931,7 @@ local mapping = {
packaged = "packaged",
}
-function commands.registerunit(category,list)
+local function registerunit(category,list)
if not list or list == "" then
list = category
category = "unit"
@@ -921,3 +944,11 @@ function commands.registerunit(category,list)
end
-- inspect(tables)
end
+
+physics.units.registerunit = registerunit
+
+implement { name = "digits_normal", actions = makedigits, arguments = "string" }
+implement { name = "digits_reverse", actions = makedigits, arguments = { "string", true } }
+implement { name = "unit_normal", actions = makeunit, arguments = "string"}
+implement { name = "unit_reverse", actions = makeunit, arguments = { "string", true } }
+implement { name = "registerunit", actions = registerunit, arguments = { "string", "string" } }
diff --git a/tex/context/base/phys-dim.mkiv b/tex/context/base/phys-dim.mkiv
index 3de6b2344..232edc2fc 100644
--- a/tex/context/base/phys-dim.mkiv
+++ b/tex/context/base/phys-dim.mkiv
@@ -246,12 +246,12 @@
\unexpanded\def\phys_digits_indeed#1%
{\dontleavehmode
\begingroup
- \ctxcommand{digits(\!!bs\detokenize{#1}\!!es,"\ifcase\c_phys_digits_order \v!normal\else\v!reverse\fi")}%
+ \ifcase\c_phys_digits_order\expandafter\clf_digits_normal\else\expandafter\clf_digits_reverse\fi{\detokenize{#1}}%
\endgroup
\settrue\c_phys_units_dospace}
\unexpanded\def\digits
- {\doifnextbgroupelse\phys_digits_argument\phys_digits_spaced}
+ {\doifelsenextbgroup\phys_digits_argument\phys_digits_spaced}
\def\phys_digits_argument#1%
{\phys_digits_indeed{#1}}
@@ -500,7 +500,8 @@
\to \everyunits
\unexpanded\def\phys_units_indeed#1%
- {\ctxcommand{unit(\!!bs\detokenize{#1}\!!es,"\unitparameter\c!order")}}
+ {\edef\p_order{\unitparameter\c!order}%
+ \ifx\p_order\v!reverse\expandafter\clf_unit_reverse\else\expandafter\clf_unit_normal\fi{\detokenize{#1}}}
\unexpanded\def\unitsPUS#1#2#3{\phys_units_next\prefixtext{#1}\unittext{#2}\unitsraise{\suffixtext{#3}}\c_phys_units_state\plusone} % suffix
\unexpanded\def\unitsPU #1#2{\phys_units_next\prefixtext{#1}\unittext{#2}\c_phys_units_state\plusthree} % unit
@@ -578,7 +579,7 @@
\let\unitsNstartindeed\unitsNstart
\unexpanded\def\unitsNstart
- {\doifnextcharelse\unitsNstop\gobbleoneargument\unitsNstartindeed}
+ {\doifelsenextchar\unitsNstop\gobbleoneargument\unitsNstartindeed}
% End of hack.
@@ -682,10 +683,10 @@
\definelabelclass [prefix] [2]
\definelabelclass [suffix] [2] % This is only a label because we want to show them in a table.
-\ctxcommand{definelabels("prefix", "prefixes" )}
-\ctxcommand{definelabels("unit", "units" )}
-\ctxcommand{definelabels("operator","operators")}
-\ctxcommand{definelabels("suffix", "suffixes" )}
+\clf_definelabels{prefix}{prefixes}\s!false\relax
+\clf_definelabels{unit}{units}\s!false\relax
+\clf_definelabels{operator}{operators}\s!false\relax
+\clf_definelabels{suffix}{suffixes}\s!false\relax
%D You can define additional units:
%D
@@ -716,7 +717,7 @@
{\dodoubleempty\phys_units_register}
\def\phys_units_register[#1][#2]%
- {\ctxcommand{registerunit(\!!bs#1\!!es,\!!bs#2\!!es)}}
+ {\clf_registerunit{#1}{#2}}
%D You can generate a list as follows:
%D
diff --git a/tex/context/base/ppchtex.mkiv b/tex/context/base/ppchtex.mkiv
index 0f42f91ce..d1167d414 100644
--- a/tex/context/base/ppchtex.mkiv
+++ b/tex/context/base/ppchtex.mkiv
@@ -100,11 +100,11 @@
\newconstant\chemicaldrawingmode
-\doifdefinedelse{beginpicture} % PiCTeX
- {\doifdefinedelse{startMPdrawing}
+\doifelsedefined{beginpicture} % PiCTeX
+ {\doifelsedefined{startMPdrawing}
{\chemicaldrawingmode\plustwo } % MetaPost
{\chemicaldrawingmode\zerocount}} % raw
- {\doifdefinedelse{psaxes}
+ {\doifelsedefined{psaxes}
{\chemicaldrawingmode\plusone } % PSTricks
{\chemicaldrawingmode\plusthree}} % unknown
@@ -387,7 +387,7 @@
{\def\maxchemical{#1}}
\def\doifchemicalnumber#1#2#3%
- {\doifnumberelse{#1}
+ {\doifelsenumber{#1}
{\ifnum#1>\maxchemical\relax
\writestatus{ppchtex}{number #1 is skipped}%
\else
@@ -682,7 +682,7 @@
\edef\@@chemicaltop {\the\!!countc}%
\edef\@@chemicalbottom{\the\!!countd}%
%
- \doifinsetelse\v!on{\@@chemicalframe,\@@chemicalaxis}
+ \doifelseinset\v!on{\@@chemicalframe,\@@chemicalaxis}
{\def\@@chemicalborder{\chemicalframe}}
{\def\@@chemicalborder{\normalchemicalframe}}%
%
@@ -867,14 +867,14 @@
\def\chemicalrepeat {1}
\def\redoprocesschemical[#1#2]%
- {\doifinstringelse{#1}{0123456789.}
+ {\doifelseinstring{#1}{0123456789.}
{\edef\chemicalrepeat{\chemicalrepeat#1}%
\redoprocesschemical[#2]}
{\processchemical[#1#2]%
\def\chemicalrepeat{1}}}
\def\doprocesschemical[#1#2]#3%
- {\doifinstringelse{#1}{0123456789.}
+ {\doifelseinstring{#1}{0123456789.}
{\def\chemicalrepeat{#1}%
\redoprocesschemical[#2]}
{#3}}
@@ -891,9 +891,9 @@
\divide\dimen0 by \@@localchemicalscale
\!!counta=\dimen0
\def\doprocess[##1##2]%
- {\doifinstringelse{##1}{128}
+ {\doifelseinstring{##1}{128}
{\edef\chemicaloffset{\the\!!counta}}
- {\doifinstringelse{##1}{456}
+ {\doifelseinstring{##1}{456}
{\edef\chemicaloffset{-\the\!!counta}}
{\doifelse{##1}{0}
{\edef\chemicaloffset{0}}
@@ -917,9 +917,9 @@
\dimen0=.25\wd0
\divide\dimen0 by \@@localchemicalscale
\!!counta=\dimen0
- \doifinstringelse{#1}{128}
+ \doifelseinstring{#1}{128}
{\edef\chemicaloffset{\the\!!counta}}
- {\doifinstringelse{#1}{456}
+ {\doifelseinstring{#1}{456}
{\edef\chemicaloffset{-\the\!!counta}}
{\doifelse{#1}{0}
{\edef\chemicaloffset{0}}
@@ -959,7 +959,7 @@
\setvalue{\s!angle4.#1}{\dosetchemicalangle{#5}}}
\def\chemicalrotate[#1]%
- {\doifdefinedelse{\s!mirror#1}
+ {\doifelsedefined{\s!mirror#1}
{\getvalue{\s!rotate\chemicalrotation.#1\getvalue{\s!mirror#1}}%
\getvalue{\s!angle\chemicalrotation.#1\getvalue{\s!mirror#1}}}
{\getvalue{\s!rotate\chemicalrotation.#1}%
@@ -984,7 +984,7 @@
\def\processchemicalrotation#1%
{\def\doprocess[##1##2]%
- {\doifnumberelse{##1}
+ {\doifelsenumber{##1}
{\def\chemicalrotation{##1}}
{\unknownchemical{ROT#1}}}%
\doprocess[#1]}
@@ -1067,9 +1067,11 @@
\def\dodoifsinglelocation#1#2\\#3%
{\ifx#2\relax#3\fi}
-\def\doifsinglelocationelse#1%
+\def\doifelsesinglelocation#1%
{\expandafter\dodoifsinglelocationelse#1\relax\\}
+\let\doifsinglelocationelse\doifelsesinglelocation
+
\def\putchemicaltext#1#2%
{\enablechemicalspecials
\ifchemicalpicture
@@ -1706,7 +1708,7 @@
\newif\ifinnerchemical
\def\dosimplechemical#1#2#3%
- {\doifdefinedelse{\??chemical\c!location}
+ {\doifelsedefined{\??chemical\c!location}
{\writestatus{ppchtex}{the {}{}-alternative is not permitted here}}
{\ifinnerchemical
\let\chemicalsign = \chemicalinnersign
@@ -2065,7 +2067,7 @@
HIGH=>\sethighsubscripts,
LOW=>\setlowsubscripts,
\s!default=>,
- \s!unknown=>\doifdefinedelse{\s!executechemical#1}
+ \s!unknown=>\doifelsedefined{\s!executechemical#1}
{\def\chemicalrotation{1}%
\def\chemicaloffset{0}%
\doifdefined{\s!executechemical#1}
@@ -2260,9 +2262,9 @@
{\dosingleargument\dodefinechemical}
\def\getpredefinedchemical#1%
- {\doifdefinedelse{\??chemical#1}
+ {\doifelsedefined{\??chemical#1}
{\getvalue{\??chemical#1}}
- {\doifdefinedelse{#1}
+ {\doifelsedefined{#1}
{\getvalue{#1}}
{\writestatus{ppchtex}{unknown chemical definition #1}}}}
diff --git a/tex/context/base/publ-aut.lua b/tex/context/base/publ-aut.lua
new file mode 100644
index 000000000..5a9d48551
--- /dev/null
+++ b/tex/context/base/publ-aut.lua
@@ -0,0 +1,876 @@
+if not modules then modules = { } end modules ['publ-aut'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if not characters then
+ dofile(resolvers.findfile("char-def.lua"))
+ dofile(resolvers.findfile("char-ini.lua"))
+end
+
+local lpeg = lpeg
+
+local type, next, tostring = type, next, tostring
+local concat = table.concat
+local utfchar = utf.char
+local utfsub = utf.sub
+local formatters = string.formatters
+
+local P, S, C, V, Cs, Ct, Cg, Cf, Cc = lpeg.P, lpeg.S, lpeg.C, lpeg.V, lpeg.Cs, lpeg.Ct, lpeg.Cg, lpeg.Cf, lpeg.Cc
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local settings_to_hash = utilities.parsers.settings_to_hash
+
+local context = context
+----- commands = commands
+
+local implement = interfaces.implement
+local ctx_setmacro = interfaces.setmacro
+
+local publications = publications
+
+local datasets = publications.datasets
+local getcasted = publications.getcasted
+
+local allocate = utilities.storage.allocate
+
+local chardata = characters.data
+
+local trace_hashing = false trackers.register("publications.authorhash", function(v) trace_hashing = v end)
+
+local report = logs.reporter("publications","authors")
+local report_cite = logs.reporter("publications","cite")
+
+local v_last = interfaces.variables.last
+
+-- local function makesplitter(separator)
+-- return Ct { "start",
+-- start = (Cs((V("outer") + (1-separator))^1) + separator^1)^1,
+-- start = Cs(V("outer")) + (Cs((V("inner") + (1-separator))^1) + separator^1)^1,
+-- outer = (P("{")/"") * ((V("inner") + P(1-P("}")))^0) * (P("}")/""),
+-- inner = P("{") * ((V("inner") + P(1-P("}")))^0) * P("}"),
+-- }
+-- end
+
+-- authorlist = { authorspec and authorspec and authorspec }
+-- authorspec = composedname
+-- authorspec = surnames, firstnames
+-- authorspec = von, surnames, firstnames
+-- authorspec = von, surnames, jr, firstnames
+-- authorspec = von, surnames, jr, firstnames, initials
+
+local space = lpegpatterns.whitespace
+local comma = P(",")
+local period = P(".")
+local dash = P("-")
+local firstcharacter = lpegpatterns.utf8byte
+local utf8character = lpegpatterns.utf8character
+local p_and = space^1 * (P("and") + P("&&") + P("++")) * space^1
+local p_comma = space^0 * comma * space^0
+local p_space = space^1
+local p_shortone = C((utf8character -dash-period)^1)
+local p_longone = C( utf8character) * (1-dash-period)^0
+
+local p_empty = P("{}")/"" * #(p_space^0 * (P(-1) + P(",")))
+
+local andsplitter = Ct { "start",
+ start = (Cs((V("inner") + (1-p_and))^1) + p_and)^1,
+ inner = P("{") * ((V("inner") + P(1-P("}")))^1) * P("}"),
+}
+
+local commasplitter = Ct { "start",
+ start = Cs(V("outer")) + (p_empty + Cs((V("inner") + (1-p_comma))^1) + p_comma)^1,
+ outer = (P("{")/"") * ((V("inner") + P(1-P("}")))^1) * (P("}")/""),
+ inner = P("{") * ((V("inner") + P(1-P("}")))^1) * P("}"),
+}
+
+local spacesplitter = Ct { "start",
+ start = Cs(V("outer")) + (Cs((V("inner") + (1-p_space))^1) + p_space)^1,
+ outer = (P("{")/"") * ((V("inner") + P(1-P("}")))^1) * (P("}")/""),
+ inner = P("{") * ((V("inner") + P(1-P("}")))^1) * P("}"),
+}
+
+local p_initial = p_shortone * period * dash^0
+ + p_longone * (period + dash + P(-1))
+local initialsplitter = p_initial * P(-1) + Ct((p_initial)^1)
+
+local optionsplitter = Cf(Ct("") * Cg(C((1-space)^1) * space^0 * Cc(true))^1,rawset)
+
+local function is_upper(str)
+ local first = lpegmatch(firstcharacter,str)
+ local okay = chardata[first]
+ return okay and okay.category == "lu"
+end
+
+-- local cleaner = Cs( ( P("{}")/"" + P(1) )^1 )
+
+local cache = allocate() -- 33% reuse on tugboat.bib
+local nofhits = 0
+local nofused = 0
+
+publications.authorcache = cache
+
+local function makeinitials(firstnames)
+ if firstnames and #firstnames > 0 then
+ local initials = { }
+ for i=1,#firstnames do
+ initials[i] = lpegmatch(initialsplitter,firstnames[i])
+ end
+ return initials
+ end
+end
+
+local authormap = allocate()
+publications.authormap = authormap
+
+local function splitauthor(author)
+ local detail = cache[author]
+ if detail then
+ return detail
+ end
+ local remapped = authormap[author]
+ if remapped then
+ report("remapping %a to %a",author,remapped)
+ local detail = cache[remapped]
+ if detail then
+ cache[author] = detail
+ return detail
+ end
+ end
+ local author = remapped or author
+ local firstnames, vons, surnames, initials, juniors, options
+ local split = lpegmatch(commasplitter,author)
+ local n = #split
+ detail = {
+ original = author,
+ snippets = n,
+ }
+ if n == 1 then
+ -- {First Middle von Last}
+ local words = lpegmatch(spacesplitter,author)
+ firstnames, vons, surnames = { }, { }, { }
+ local i, n = 1, #words
+ while i <= n do
+ local w = words[i]
+ if is_upper(w) then
+ firstnames[#firstnames+1], i = w, i + 1
+ else
+ break
+ end
+ end
+ while i <= n do
+ local w = words[i]
+ if is_upper(w) then
+ break
+ else
+ vons[#vons+1], i = w, i + 1
+ end
+ end
+ if i <= n then
+ while i <= n do
+ surnames[#surnames+1], i = words[i], i + 1
+ end
+ elseif #vons == 0 then
+ surnames[1] = firstnames[#firstnames]
+ firstnames[#firstnames] = nil
+ else
+ -- mess
+ end
+ if #surnames == 0 then
+ -- safeguard
+ firstnames = { }
+ vons = { }
+ surnames = { author }
+ else
+ initials = makeinitials(firstnames)
+ end
+ elseif n == 2 then
+ -- {Last, First}
+ -- {von Last, First}
+ firstnames, vons, surnames = { }, { }, { }
+ local words = lpegmatch(spacesplitter,split[1])
+ local i, n = 1, #words
+ while i <= n do
+ local w = words[i]
+ if is_upper(w) then
+ break
+ else
+ vons[#vons+1], i = w, i + 1
+ end
+ end
+ while i <= n do
+ surnames[#surnames+1], i = words[i], i + 1
+ end
+ --
+ local words = lpegmatch(spacesplitter,split[2])
+ local i, n = 1, #words
+ while i <= n do
+ local w = words[i]
+ if is_upper(w) then
+ firstnames[#firstnames+1], i = w, i + 1
+ else
+ break
+ end
+ end
+ while i <= n do
+ vons[#vons+1], i = words[i], i + 1
+ end
+ if surnames and firstnames and #surnames == 0 then
+ -- safeguard
+ surnames[1] = firstnames[#firstnames]
+ firstnames[#firstnames] = nil
+ end
+ initials = makeinitials(firstnames)
+ elseif n == 3 then
+ -- {von Last, First, Jr}
+ surnames = lpegmatch(spacesplitter,split[1])
+ juniors = lpegmatch(spacesplitter,split[2])
+ firstnames = lpegmatch(spacesplitter,split[3])
+ initials = makeinitials(firstnames)
+ elseif n == 4 then
+ -- {Von, Last, First, Jr}
+ vons = lpegmatch(spacesplitter,split[1])
+ surnames = lpegmatch(spacesplitter,split[2])
+ juniors = lpegmatch(spacesplitter,split[3])
+ firstnames = lpegmatch(spacesplitter,split[4])
+ initials = makeinitials(firstnames)
+ elseif n >= 5 then
+ -- {Von, Last, First, Jr, F.}
+ -- {Von, Last, First, Jr, Fr., options}
+ vons = lpegmatch(spacesplitter,split[1])
+ surnames = lpegmatch(spacesplitter,split[2])
+ juniors = lpegmatch(spacesplitter,split[3])
+ firstnames = lpegmatch(spacesplitter,split[4])
+ initials = lpegmatch(spacesplitter,split[5])
+ options = split[6]
+ if options then
+ options = lpegmatch(optionsplitter,options)
+ end
+ end
+ if firstnames and #firstnames > 0 then detail.firstnames = firstnames end
+ if vons and #vons > 0 then detail.vons = vons end
+ if surnames and #surnames > 0 then detail.surnames = surnames end
+ if initials and #initials > 0 then detail.initials = initials end
+ if juniors and #juniors > 0 then detail.juniors = juniors end
+ if options and next(options) then detail.options = options end
+ cache[author] = detail
+ nofhits = nofhits + 1
+ return detail
+end
+
+local function splitauthorstring(str)
+ if not str or str == "" then
+ return
+ end
+ nofused = nofused + 1
+
+ local remapped = authormap[str]
+ if remapped then
+ local detail = cache[remapped]
+ if detail then
+ cache[str] = detail
+ return { detail }
+ end
+ end
+
+ local authors = cache[str]
+ if authors then
+ return { authors } -- we assume one author
+ end
+
+ -- we could cache these too but it can become messy .. leave that for later
+
+ local authors = lpegmatch(andsplitter,str) or { } -- maybe fake an author
+ local nofauthors = #authors
+ for i=1,nofauthors do
+ authors[i] = splitauthor(authors[i])
+ end
+ if nofauthors > 1 and authors[nofauthors].original == "others" then
+ -- only the last one is looked at
+ authors[nofauthors] = nil
+ authors.others = true
+ end
+ return authors
+end
+
+publications.splitoneauthor = splitauthor
+publications.splitauthor = splitauthorstring
+
+local function the_initials(initials,symbol,connector)
+ if not symbol then
+ symbol = "."
+ end
+ if not connector then
+ connector = "-"
+ end
+ local result, r = { }, 0
+ for i=1,#initials do
+ local initial = initials[i]
+ if type(initial) == "table" then
+ local set, s = { }, 0
+ for i=1,#initial do
+ if i > 1 then
+ s = s + 1 ; set[s] = connector
+ end
+ s = s + 1 ; set[s] = initial[i]
+ s = s + 1 ; set[s] = symbol
+ end
+ r = r + 1 ; result[r] = concat(set)
+ else
+ r = r + 1 ; result[r] = initial .. symbol
+ end
+ end
+ return result
+end
+
+local ctx_btxsetconcat = context.btxsetconcat
+local ctx_btxsetoverflow = context.btxsetoverflow
+local ctx_btxsetinitials = context.btxsetinitials
+local ctx_btxsetfirstnames = context.btxsetfirstnames
+local ctx_btxsetvons = context.btxsetvons
+local ctx_btxsetsurnames = context.btxsetsurnames
+local ctx_btxsetjuniors = context.btxsetjuniors
+local ctx_btxciteauthorsetup = context.btxciteauthorsetup
+local ctx_btxlistauthorsetup = context.btxlistauthorsetup
+local ctx_btxsetauthorvariant = context.btxsetauthorvariant
+local ctx_btxstartauthor = context.btxstartauthor
+local ctx_btxstopauthor = context.btxstopauthor
+
+local concatstate = publications.concatstate
+local f_invalid = formatters["<invalid %s: %s>"]
+
+local currentauthordata = nil
+local currentauthorsymbol = nil
+
+local manipulators = typesetters.manipulators
+local splitmanipulation = manipulators.splitspecification
+local applymanipulation = manipulators.applyspecification
+local manipulatormethods = manipulators.methods
+
+local function value(i,field)
+ if currentauthordata then
+ local entry = currentauthordata[i]
+ if entry then
+ local value = entry[field]
+ if value and #value > 0 then
+ return value
+ end
+ end
+ end
+end
+
+implement { name = "btxcurrentfirstnames", arguments = "integer", actions = function(i) local v = value(i,"firstnames") if v then context(concat(v," ")) end end }
+implement { name = "btxcurrentinitials", arguments = "integer", actions = function(i) local v = value(i,"initials") if v then context(concat(the_initials(v,currentauthorsymbol))) end end }
+implement { name = "btxcurrentjuniors", arguments = "integer", actions = function(i) local v = value(i,"juniors") if v then context(concat(v," ")) end end }
+implement { name = "btxcurrentsurnames", arguments = "integer", actions = function(i) local v = value(i,"surnames") if v then context(concat(v," ")) end end }
+implement { name = "btxcurrentvons", arguments = "integer", actions = function(i) local v = value(i,"vons") if v then context(concat(v," ")) end end }
+
+local function btxauthorfield(i,field)
+ if currentauthordata then
+ local entry = currentauthordata[i]
+ if entry then
+ local manipulator, field = splitmanipulation(field)
+ local value = entry[field]
+ if not value or #value == 0 then
+ -- value, no need for message
+ elseif manipulator then
+ for i=1,#value do
+ if i > 1 then
+ context(" ")
+ end
+ context(applymanipulation(manipulator,value) or value)
+ end
+ elseif field == "initials" then
+ context(concat(the_initials(value,currentauthorsymbol)))
+ else
+ context(concat(value," "))
+ end
+ end
+ end
+end
+
+-- This is somewhat tricky: an author is not always an author but
+-- can also be a title or key, depending on the (optional) set it's
+-- in. Also, authors can be combined with years and so and they
+-- might be called upon mixed with other calls.
+
+local function btxauthor(dataset,tag,field,settings)
+ local split, usedfield, kind = getcasted(dataset,tag,field)
+ if kind == "author" then
+ local max = split and #split or 0
+ if max == 0 then
+ return
+ -- error
+ end
+ local absmax = max
+ local etallimit = tonumber(settings.etallimit) or 1000
+ local etaldisplay = tonumber(settings.etaldisplay) or etallimit
+ local etaloption = settings_to_hash(settings.etaloption or "")
+ local etallast = etaloption[v_last]
+ local combiner = settings.combiner
+ local symbol = settings.symbol
+ local index = settings.index
+ if not combiner or combiner == "" then
+ combiner = "normal"
+ end
+ if not symbol then
+ symbol = "."
+ end
+ local ctx_btxsetup = settings.kind == "cite" and ctx_btxciteauthorsetup or ctx_btxlistauthorsetup
+ if max > etallimit and (etaldisplay+(etallast and 1 or 0)) < max then
+ max = etaldisplay
+ else
+ etallast = false
+ end
+ currentauthordata = split
+ currentauthorsymbol = symbol
+
+ local function oneauthor(i,last,justone)
+ local author = split[i]
+ if index then
+ ctx_btxstartauthor(i,1,0)
+ elseif last then
+ ctx_btxstartauthor(i,1,0)
+ ctx_btxsetconcat(0)
+ ctx_btxsetauthorvariant(combiner)
+ else
+ local state = author.state or 0
+ ctx_btxstartauthor(i,max,state)
+ ctx_btxsetconcat(concatstate(i,max))
+ ctx_btxsetauthorvariant(combiner)
+ end
+ local initials = author.initials
+ if initials and #initials > 0 then
+ ctx_btxsetinitials() -- (concat(the_initials(initials,symbol)," "))
+ end
+ local firstnames = author.firstnames
+ if firstnames and #firstnames > 0 then
+ ctx_btxsetfirstnames() -- (concat(firstnames," "))
+ end
+ local vons = author.vons
+ if vons and #vons > 0 then
+ ctx_btxsetvons() -- (concat(vons," "))
+ end
+ local surnames = author.surnames
+ if surnames and #surnames > 0 then
+ ctx_btxsetsurnames() -- (concat(surnames," "))
+ end
+ local juniors = author.juniors
+ if juniors and #juniors > 0 then
+ ctx_btxsetjuniors() -- (concat(juniors," "))
+ end
+ if not index and i == max then
+ if split.others then
+ ctx_btxsetoverflow(1)
+ else
+ local overflow = #split - max
+ if overflow > 0 then
+ ctx_btxsetoverflow(overflow)
+ end
+ end
+ end
+ ctx_btxsetup(combiner)
+ ctx_btxstopauthor()
+ end
+ if index then
+ oneauthor(index)
+ elseif max == 1 then
+ oneauthor(1,false,true)
+ else
+ for i=1,max do
+ oneauthor(i)
+ end
+ if etallast then
+ oneauthor(absmax,true)
+ end
+ end
+ else
+ report("ignored field %a of tag %a, used field %a is no author",field,tag,usedfield)
+ end
+end
+
+implement {
+ name = "btxauthorfield",
+ actions = btxauthorfield,
+ arguments = { "integer", "string" }
+}
+
+implement {
+ name = "btxauthor",
+ actions = btxauthor,
+ arguments = {
+ "string",
+ "string",
+ "string",
+ {
+ { "combiner" },
+ { "kind" },
+ { "etallimit" },
+ { "etaldisplay" },
+ { "etaloption" },
+ { "symbol" },
+ }
+ }
+}
+
+local function components(snippet,short)
+ local vons = snippet.vons
+ local surnames = snippet.surnames
+ local initials = snippet.initials
+ local firstnames = not short and snippet.firstnames
+ local juniors = snippet.juniors
+ return
+ vons and #vons > 0 and concat(vons," ") or "",
+ surnames and #surnames > 0 and concat(surnames," ") or "",
+ initials and #initials > 0 and concat(the_initials(initials)," ") or "",
+ firstnames and #firstnames > 0 and concat(firstnames," ") or "",
+ juniors and #juniors > 0 and concat(juniors, " ") or ""
+end
+
+local collapsers = allocate { }
+
+publications.authorcollapsers = collapsers
+
+local function default(author) -- one author
+ local hash = author.hash
+ if hash then
+ return hash
+ end
+ local original = author.original
+ local vons = author.vons
+ local surnames = author.surnames
+ local initials = author.initials
+ local firstnames = author.firstnames
+ local juniors = author.juniors
+ local result = { }
+ local nofresult = 0
+ if vons and #vons > 0 then
+ for j=1,#vons do
+ nofresult = nofresult + 1
+ result[nofresult] = vons[j]
+ end
+ end
+ if surnames and #surnames > 0 then
+ for j=1,#surnames do
+ nofresult = nofresult + 1
+ result[nofresult] = surnames[j]
+ end
+ end
+ if initials and #initials > 0 then
+ initials = the_initials(initials)
+ for j=1,#initials do
+ nofresult = nofresult + 1
+ result[nofresult] = initials[j]
+ end
+ end
+ if firstnames and #firstnames > 0 then
+ for j=1,#firstnames do
+ nofresult = nofresult + 1
+ result[nofresult] = firstnames[j]
+ end
+ end
+ if juniors and #juniors > 0 then
+ for j=1,#juniors do
+ nofresult = nofresult + 1
+ result[nofresult] = juniors[j]
+ end
+ end
+ local hash = concat(result," ")
+ if trace_hashing then
+ report("hash: %s -> %s",original,hash)
+ end
+ author.hash = hash
+ return hash
+end
+
+local authorhashers = { }
+publications.authorhashers = authorhashers
+
+-- todo: some hashing
+
+local function name(authors)
+ if type(authors) == "table" then
+ local n = #authors
+ if n == 0 then
+ return ""
+ end
+ local result = { }
+ local nofresult = 0
+ for i=1,n do
+ local author = authors[i]
+ local surnames = author.surnames
+ if surnames and #surnames > 0 then
+ for j=1,#surnames do
+ nofresult = nofresult + 1
+ result[nofresult] = surnames[j]
+ end
+ end
+ end
+ return concat(result," ")
+ else
+ return authors
+ end
+end
+
+table.setmetatableindex(authorhashers,function(t,k)
+ t[k] = name
+ return name
+end)
+
+authorhashers.normal = function(authors)
+ if type(authors) == "table" then
+ local n = #authors
+ if n == 0 then
+ return ""
+ end
+ local result = { }
+ local nofresult = 0
+ for i=1,n do
+ local author = authors[i]
+ local vons = author.vons
+ local surnames = author.surnames
+ local firstnames = author.firstnames
+ local juniors = author.juniors
+ if vons and #vons > 0 then
+ for j=1,#vons do
+ nofresult = nofresult + 1
+ result[nofresult] = vons[j]
+ end
+ end
+ if surnames and #surnames > 0 then
+ for j=1,#surnames do
+ nofresult = nofresult + 1
+ result[nofresult] = surnames[j]
+ end
+ end
+ if firstnames and #firstnames > 0 then
+ for j=1,#firstnames do
+ nofresult = nofresult + 1
+ result[nofresult] = firstnames[j]
+ end
+ end
+ if juniors and #juniors > 0 then
+ for j=1,#juniors do
+ nofresult = nofresult + 1
+ result[nofresult] = juniors[j]
+ end
+ end
+ end
+ return concat(result," ")
+ else
+ return authors
+ end
+end
+
+authorhashers.normalshort = function(authors)
+ if type(authors) == "table" then
+ local n = #authors
+ if n == 0 then
+ return ""
+ end
+ local result = { }
+ local nofresult = 0
+ for i=1,n do
+ local author = authors[i]
+ local vons = author.vons
+ local surnames = author.surnames
+ local initials = author.initials
+ local juniors = author.juniors
+ if vons and #vons > 0 then
+ for j=1,#vons do
+ nofresult = nofresult + 1
+ result[nofresult] = vons[j]
+ end
+ end
+ if surnames and #surnames > 0 then
+ for j=1,#surnames do
+ nofresult = nofresult + 1
+ result[nofresult] = surnames[j]
+ end
+ end
+ if initials and #initials > 0 then
+ initials = the_initials(initials)
+ for j=1,#initials do
+ nofresult = nofresult + 1
+ result[nofresult] = initials[j]
+ end
+ end
+ if juniors and #juniors > 0 then
+ for j=1,#juniors do
+ nofresult = nofresult + 1
+ result[nofresult] = juniors[j]
+ end
+ end
+ end
+ return concat(result," ")
+ else
+ return authors
+ end
+end
+
+authorhashers.normalinverted = authorhashers.normal
+authorhashers.invertedshort = authorhashers.normalshort
+
+local p_clean = Cs ( (
+ P("\\btxcmd") / "" -- better keep the argument
+ + S("`~!@#$%^&*()_-+={}[]:;\"\'<>,.?/|\\") / ""
+ + lpeg.patterns.utf8character
+ )^1)
+
+authorhashers.short = function(authors)
+ -- a short is a real dumb hardcodes kind of tag and we only support
+ -- this one because some users might expect it, not because it makes
+ -- sense
+ if type(authors) == "table" then
+ local n = #authors
+ if n == 0 then
+ return "unk"
+ elseif n == 1 then
+ local surnames = authors[1].surnames
+ if not surnames or #surnames == 0 then
+ return "err"
+ else
+ local s = surnames[1]
+ local c = lpegmatch(p_clean,s)
+ if s ~= c then
+ report_cite("name %a cleaned to %a for short construction",s,c)
+ end
+ return utfsub(c,1,3)
+ end
+ else
+ local t = { }
+ for i=1,n do
+ if i > 3 then
+ t[#t+1] = "+" -- indeed
+ break
+ end
+ local surnames = authors[i].surnames
+ if not surnames or #surnames == 0 then
+ t[#t+1] = "?"
+ else
+ local s = surnames[1]
+ local c = lpegmatch(p_clean,s)
+ if s ~= c then
+ report_cite("name %a cleaned to %a for short construction",s,c)
+ end
+ t[#t+1] = utfsub(c,1,1)
+ end
+ end
+ return concat(t)
+ end
+ else
+ return utfsub(authors,1,3)
+ end
+end
+
+collapsers.default = default
+
+local function authorwriter(key,index)
+ if not key then
+ return ""
+ end
+ if type(key) == "string" then
+ return key
+ end
+ local n = #key
+ if n == 0 then
+ return ""
+ end
+ if index then
+ if not key[index] then
+ return ""
+ end
+ elseif n == 1 then
+ index = 1
+ end
+ if index then
+ local author = key[index]
+ local options = author.options
+ if options then
+ for option in next, options do
+ local collapse = collapsers[option]
+ if collapse then
+ return collapse(author)
+ end
+ end
+ end
+ local hash = default(author)
+ -- if trace_hashing then
+ -- report("hash: %s",hash)
+ -- end
+ return hash
+ end
+ local t = { }
+ local s = 0
+ for i=1,n do
+ local author = key[i]
+ local options = author.options
+ s = s + 1
+ if options then
+ local done = false
+ for option in next, options do
+ local collapse = collapsers[option]
+ if collapse then
+ t[s] = collapse(author)
+ done = true
+ end
+ end
+ if not done then
+ t[s] = default(author)
+ end
+ else
+ t[s] = default(author)
+ end
+ end
+ local hash = concat(t," & ")
+ -- if trace_hashing then
+ -- report("hash: %s",hash)
+ -- end
+ return hash
+end
+
+local function writer(key)
+ return authorwriter(key) -- discard extra arguments in the caller
+end
+
+publications.writers .author = writer
+publications.casters .author = splitauthorstring
+publications.components.author = components
+
+-- sharedmethods.author = {
+-- { field = "key", default = "", unknown = "" },
+-- { field = "author", default = "", unknown = "" },
+-- { field = "title", default = "", unknown = "" },
+-- }
+
+-- Analysis of the APA by Alan:
+--
+-- first : key author editor publisher title journal volume number pages
+-- second: year suffix title month day journal volume number
+
+publications.sortmethods.authoryear = {
+ sequence = {
+ -- { field = "key", default = "ZZZZ", unknown = "ZZZZ" },
+ { field = "author", default = "", unknown = "" },
+ { field = "year", default = "9998", unknown = "9999" },
+ -- { field = "suffix", default = " ", unknown = " " },
+ { field = "month", default = "13", unknown = "14" },
+ { field = "day", default = "32", unknown = "33" },
+ { field = "journal", default = "", unknown = "" },
+ { field = "volume", default = "", unknown = "" },
+ -- { field = "number", default = "", unknown = "" },
+ { field = "pages", default = "", unknown = "" },
+ { field = "title", default = "", unknown = "" },
+ { field = "index", default = "", unknown = "" },
+ },
+}
+
+implement {
+ name = "btxremapauthor",
+ arguments = { "string", "string" },
+ actions = function(k,v)
+ publications.authormap[k] = v
+ end
+}
diff --git a/tex/context/base/publ-dat.lua b/tex/context/base/publ-dat.lua
new file mode 100644
index 000000000..36ba15000
--- /dev/null
+++ b/tex/context/base/publ-dat.lua
@@ -0,0 +1,1194 @@
+if not modules then modules = { } end modules ['publ-dat'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- todo: strip the @ in the lpeg instead of on do_definition and do_shortcut
+-- todo: store bibroot and bibrootdt
+-- todo: dataset = datasets[dataset] => current = datasets[dataset]
+-- todo: maybe split this file
+
+--[[ldx--
+<p>This is a prelude to integrated bibliography support. This file just loads
+bibtex files and converts them to xml so that the we access the content
+in a convenient way. Actually handling the data takes place elsewhere.</p>
+--ldx]]--
+
+if not characters then
+ dofile(resolvers.findfile("char-utf.lua"))
+ dofile(resolvers.findfile("char-tex.lua"))
+end
+
+local chardata = characters.data
+local lowercase = characters.lower
+
+local lower, find, sub = string.lower, string.find, string.sub
+local concat, copy, tohash = table.concat, table.copy, table.tohash
+local next, type, rawget = next, type, rawget
+local utfchar = utf.char
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local textoutf = characters and characters.tex.toutf
+local settings_to_hash, settings_to_array = utilities.parsers.settings_to_hash, utilities.parsers.settings_to_array
+local formatters = string.formatters
+local sortedkeys, sortedhash, keys = table.sortedkeys, table.sortedhash, table.keys
+local xmlcollected, xmltext, xmlconvert = xml.collected, xml.text, xml.convert
+local setmetatableindex = table.setmetatableindex
+
+-- todo: more allocate
+
+local P, R, S, V, C, Cc, Cs, Ct, Carg, Cmt, Cp = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Carg, lpeg.Cmt, lpeg.Cp
+
+local p_whitespace = lpegpatterns.whitespace
+local p_utf8character = lpegpatterns.utf8character
+
+local trace = false trackers.register("publications", function(v) trace = v end)
+local trace_duplicates = true trackers.register("publications.duplicates", function(v) trace = v end)
+
+local report = logs.reporter("publications")
+local report_duplicates = logs.reporter("publications","duplicates")
+
+local allocate = utilities.storage.allocate
+
+local commands = commands
+local implement = interfaces.implement
+
+publications = publications or { }
+local publications = publications
+
+local datasets = publications.datasets or { }
+publications.datasets = datasets
+
+local writers = publications.writers or { }
+publications.writers = writers
+
+local tables = publications.tables or { }
+publications.tables = tables
+
+publications.statistics = publications.statistics or { }
+local publicationsstats = publications.statistics
+
+local loaders = publications.loaders or { }
+publications.loaders = loaders
+
+local casters = { }
+publications.casters = casters
+
+-- local sorters = { }
+-- publications.sorters = sorters
+--
+-- local indexers = { }
+-- publications.indexers = indexers
+
+local components = { }
+publications.components = components -- register components
+
+local enhancers = publications.enhancers or { }
+publications.enhancers = enhancers
+
+local enhancer = publications.enhancer or utilities.sequencers.new { arguments = "dataset" }
+publications.enhancer = enhancer
+
+utilities.sequencers.appendgroup(enhancer,"system") -- private
+
+publicationsstats.nofbytes = 0
+publicationsstats.nofdefinitions = 0
+publicationsstats.nofshortcuts = 0
+publicationsstats.nofdatasets = 0
+
+local privates = allocate {
+ category = true,
+ tag = true,
+ index = true,
+ suffix = true,
+ specification = true,
+}
+
+local specials = allocate {
+ key = true,
+ crossref = true,
+ keywords = true,
+ language = true,
+ comment = true,
+}
+
+local implicits = allocate {
+ category = "implicit",
+ tag = "implicit",
+ key = "implicit",
+ keywords = "implicit",
+ language = "implicit",
+ crossref = "implicit",
+}
+
+local origins = allocate {
+ "optional",
+ "extra",
+ "required",
+ "virtual",
+}
+
+local virtuals = allocate {
+ "authoryear",
+ "authoryears",
+ "authornum",
+ "num",
+ "suffix",
+}
+
+local defaulttypes = allocate {
+ author = "author",
+ editor = "author",
+ publisher = "author",
+ page = "pagenumber",
+ pages = "pagenumber",
+ keywords = "keyword",
+ doi = "url",
+ url = "url",
+}
+
+local defaultsets = allocate {
+ page = { "page", "pages" },
+}
+
+tables.implicits = implicits
+tables.origins = origins
+tables.virtuals = virtuals
+tables.types = defaulttypes
+tables.sets = defaultsets
+tables.privates = privates
+tables.specials = specials
+
+local variables = interfaces and interfaces.variables or setmetatableindex("self")
+
+local v_all = variables.all
+local v_default = variables.default
+
+if not publications.usedentries then
+ function publications.usedentries()
+ return { }
+ end
+end
+
+local xmlplaceholder = "<?xml version='1.0' standalone='yes'?>\n<bibtex></bibtex>"
+
+local defaultshortcuts = allocate {
+ jan = "1",
+ feb = "2",
+ mar = "3",
+ apr = "4",
+ may = "5",
+ jun = "6",
+ jul = "7",
+ aug = "8",
+ sep = "9",
+ oct = "10",
+ nov = "11",
+ dec = "12",
+}
+
+local space = p_whitespace^0
+local separator = space * "+" * space
+local l_splitter = lpeg.tsplitat(separator)
+local d_splitter = lpeg.splitat (separator)
+
+local unknownfield = function(t,k)
+ local v = "extra"
+ t[k] = v
+ return v
+end
+
+local unknowncategory = function(t,k)
+ local v = {
+ required = false,
+ optional = false,
+ virtual = false,
+ fields = setmetatableindex(unknownfield), -- this will remember them
+ types = unknowntypes,
+ sets = setmetatableindex(defaultsets), -- new, but rather small
+ }
+ t[k] = v
+ return v
+end
+
+local unknowntype = function(t,k)
+ local v = "string"
+ t[k] = v
+ return v
+end
+
+local default = {
+ name = name,
+ version = "1.00",
+ comment = "unknown specification.",
+ author = "anonymous",
+ copyright = "no one",
+ categories = setmetatableindex(unknowncategory),
+ types = setmetatableindex(defaulttypes,unknowntype),
+}
+
+-- maybe at some point we can have a handlers table with per field
+-- a found, fetch, ... method
+
+local function checkfield(specification,category,data)
+ local list = setmetatableindex({},implicits)
+ data.fields = list
+ data.category = category
+ local sets = data.sets or { }
+ for i=1,#origins do
+ local t = origins[i]
+ local d = data[t]
+ if d then
+ for i=1,#d do
+ local di = d[i]
+ di = sets[di] or di
+ if type(di) == "table" then
+ for i=1,#di do
+ list[di[i]] = t
+ end
+ else
+ list[di] = t
+ end
+ end
+ else
+ data[t] = { }
+ end
+ end
+ return data
+end
+
+local specifications = setmetatableindex(function(t,name)
+ if not name then
+ return default -- initializer
+ end
+ local filename = formatters["publ-imp-%s.lua"](name)
+ local fullname = resolvers.findfile(filename) or ""
+ if fullname == "" then
+ report("no data definition file %a for %a",filename,name)
+ return default
+ end
+ local specification = table.load(fullname)
+ if not specification then
+ report("invalid data definition file %a for %a",fullname,name)
+ return default
+ end
+ --
+ local categories = specification.categories
+ if not categories then
+ categories = { }
+ specification.categories = categories
+ end
+ setmetatableindex(categories,unknowncategory)
+ --
+ local types = specification.types
+ if not types then
+ types = defaulttypes
+ specification.types = types
+ end
+ setmetatableindex(types,unknowntype)
+ --
+ local fields = setmetatableindex(unknownfield)
+ specification.fields = fields
+ --
+ local virtual = specification.virtual
+ if virtual == nil then -- so false is valid
+ virtual = { }
+ elseif virtual == false then
+ virtual = { }
+ elseif type(virtual) ~= table then
+ virtual = virtuals
+ end
+ specification.virtual = virtual
+ specification.virtualfields = tohash(virtual)
+ --
+ for category, data in next, categories do
+ categories[category] = checkfield(specification,category,copy(data)) -- we make sure we have no clones
+ end
+ --
+ t[name] = specification
+ --
+ return specification
+end)
+
+publications.specifications = specifications
+
+function publications.setcategory(target,category,data)
+ local specification = specifications[target]
+ specification.categories[category] = checkfield(specification,category,data)
+end
+
+function publications.parenttag(dataset,tag)
+ if not dataset or not tag then
+ report("error in specification, dataset %a, tag %a",dataset,tag)
+ elseif find(tag,"%+") then
+ local tags = lpegmatch(l_splitter,tag)
+ local parent = tags[1]
+ local current = datasets[dataset]
+ local luadata = current.luadata
+ local details = current.details
+ local first = luadata[parent]
+ if first then
+ local detail = details[parent]
+ local children = detail.children
+ if not children then
+ children = { }
+ detail.children = children
+ end
+ -- add new ones but only once
+ for i=2,#tags do
+ local tag = tags[i]
+ for j=1,#children do
+ if children[j] == tag then
+ tag = false
+ end
+ end
+ if tag then
+ local entry = luadata[tag]
+ if entry then
+ local detail = details[tag]
+ children[#children+1] = tag
+ if detail.parent then
+ report("error in combination, dataset %a, tag %a, parent %a, ignored %a",dataset,tag,detail.parent,parent)
+ else
+ report("combining, dataset %a, tag %a, parent %a",dataset,tag,parent)
+ detail.parent = parent
+ end
+ end
+ end
+ end
+ return parent
+ end
+ end
+ return tag or ""
+end
+
+function publications.new(name)
+ publicationsstats.nofdatasets = publicationsstats.nofdatasets + 1
+ local dataset = {
+ name = name or "dataset " .. publicationsstats.nofdatasets,
+ nofentries = 0,
+ shortcuts = { },
+ luadata = { },
+ suffixes = { },
+ xmldata = xmlconvert(xmlplaceholder),
+ details = { },
+ ordered = { },
+ nofbytes = 0,
+ entries = nil, -- empty == all
+ sources = { },
+ loaded = { },
+ fields = { },
+ userdata = { },
+ used = { },
+ commands = { }, -- for statistical purposes
+ status = {
+ resources = false,
+ userdata = false,
+ },
+ specifications = {
+ -- used specifications
+ },
+ suffixed = false,
+ }
+ -- we delay details till we need it (maybe we just delay the
+ -- individual fields but that is tricky as there can be some
+ -- depedencies)
+ return dataset
+end
+
+setmetatableindex(datasets,function(t,k)
+ if type(k) == "table" then
+ return k -- so we can use this accessor as checker
+ else
+ local v = publications.new(k)
+ datasets[k] = v
+ return v
+ end
+end)
+
+local function getindex(dataset,luadata,tag)
+ local found = luadata[tag]
+ if found then
+ local index = found.index or 0
+ dataset.ordered[tag] = index
+ return index
+ else
+ local index = dataset.nofentries + 1
+ dataset.nofentries = index
+ dataset.ordered[index] = tag
+ return index
+ end
+end
+
+publications.getindex = getindex
+
+do
+
+ -- we apply some normalization
+
+ local space = S(" \t\n\r\f") -- / " "
+ local collapsed = space^1/" "
+ ----- csletter = R("az","AZ")
+ local csletter = lpegpatterns.csletter
+
+ ----- command = P("\\") * Cc("btxcmd{") * (R("az","AZ")^1) * Cc("}")
+ ----- command = P("\\") * (Carg(1) * C(R("az","AZ")^1) / function(list,c) list[c] = (list[c] or 0) + 1 return "btxcmd{" .. c .. "}" end)
+ ----- command = P("\\") * (Carg(1) * C(R("az","AZ")^1) * space^0 / function(list,c) list[c] = (list[c] or 0) + 1 return "btxcmd{" .. c .. "}" end)
+ local command = P("\\") * (Carg(1) * C(csletter^1) * space^0 / function(list,c) list[c] = (list[c] or 0) + 1 return "btxcmd{" .. c .. "}" end)
+ local whatever = P("\\") * P(" ")^1 / " "
+ + P("\\") * ( P("hbox") + P("raise") ) -- bah
+ local somemath = P("$") * ((1-P("$"))^1) * P("$") -- let's not assume nested math
+ ----- character = lpegpatterns.utf8character
+ local any = P(1)
+ local done = P(-1)
+ -- local one_l = P("{") / ""
+ -- local one_r = P("}") / ""
+ -- local two_l = P("{{") / ""
+ -- local two_r = P("}}") / ""
+ local zero_l_r = P("{}") / "" * #P(1)
+ local special = P("#") / "\\letterhash "
+
+ local filter_0 = S('\\{}#')
+ local filter_1 = (1-filter_0)^0 * filter_0
+ local filter_2 = Cs(
+ -- {{...}} ... {{...}}
+ -- two_l * (command + special + any - two_r - done)^0 * two_r * done +
+ -- one_l * (command + special + any - one_r - done)^0 * one_r * done +
+ (
+ somemath +
+ whatever +
+ command +
+ special +
+ collapsed +
+ zero_l_r +
+ any
+ )^0
+ )
+
+ -- Currently we expand shortcuts and for large ones (like the acknowledgements
+ -- in tugboat.bib) this is not that efficient. However, eventually strings get
+ -- hashed again.
+
+ local function do_shortcut(key,value,dataset)
+ publicationsstats.nofshortcuts = publicationsstats.nofshortcuts + 1
+ dataset.shortcuts[key] = value
+ end
+
+ -- todo: categories : metatable that lowers and also counts
+ -- todo: fields : metatable that lowers
+
+ local tags = table.setmetatableindex("table")
+
+ local function do_definition(category,tag,tab,dataset)
+ publicationsstats.nofdefinitions = publicationsstats.nofdefinitions + 1
+ if tag == "" then
+ tag = "no-tag-set"
+ end
+ local fields = dataset.fields
+ local luadata = dataset.luadata
+ local hashtag = tag
+ if luadata[tag] then
+ local t = tags[tag]
+ local d = dataset.name
+ local n = (t[d] or 0) + 1
+ t[d] = n
+ hashtag = tag .. "-" .. n
+ if trace_duplicates then
+ local p = { }
+ for k, v in sortedhash(t) do
+ p[#p+1] = formatters["%s:%s"](k,v)
+ end
+ report_duplicates("tag %a is present multiple times: % t, assigning hashtag %a",tag,p,hashtag)
+ end
+ end
+ local index = getindex(dataset,luadata,hashtag)
+ local entries = {
+ category = lower(category),
+ tag = tag,
+ index = index,
+ }
+ for i=1,#tab,2 do
+ local original = tab[i]
+ local normalized = fields[original]
+ if not normalized then
+ normalized = lower(original) -- we assume ascii fields
+ fields[original] = normalized
+ end
+ -- if entries[normalized] then
+ if rawget(entries,normalized) then
+ if trace_duplicates then
+ report_duplicates("redundant field %a is ignored for tag %a in dataset %a",normalized,tag,dataset.name)
+ end
+ else
+ local value = tab[i+1]
+ value = textoutf(value)
+ if lpegmatch(filter_1,value) then
+ value = lpegmatch(filter_2,value,1,dataset.commands) -- we need to start at 1 for { }
+ end
+ if normalized == "crossref" then
+ local parent = luadata[value]
+ if parent then
+ setmetatableindex(entries,parent)
+ else
+ -- warning
+ end
+ end
+ entries[normalized] = value
+ end
+ end
+ luadata[hashtag] = entries
+ end
+
+ local function resolve(s,dataset)
+ return dataset.shortcuts[s] or defaultshortcuts[s] or s -- can be number
+ end
+
+ local pattern = p_whitespace^0
+ * C(P("message") + P("warning") + P("error") + P("comment")) * p_whitespace^0 * P(":")
+ * p_whitespace^0
+ * C(P(1)^1)
+
+ local function do_comment(s,dataset)
+ local how, what = lpegmatch(pattern,s)
+ if how and what then
+ local t = string.splitlines(utilities.strings.striplines(what))
+ local b = file.basename(dataset.fullname or dataset.name or "unset")
+ for i=1,#t do
+ report("%s > %s : %s",b,how,t[i])
+ end
+ end
+ end
+
+ local percent = P("%")
+ local start = P("@")
+ local comma = P(",")
+ local hash = P("#")
+ local escape = P("\\")
+ local single = P("'")
+ local double = P('"')
+ local left = P('{')
+ local right = P('}')
+ local both = left + right
+ local lineending = S("\n\r")
+ local space = S(" \t\n\r\f") -- / " "
+ local spacing = space^0
+ local equal = P("=")
+ ----- collapsed = (space^1)/ " "
+ local collapsed = p_whitespace^1/" "
+ local nospaces = p_whitespace^1/""
+
+ local p_left = (p_whitespace^0 * left) / ""
+ local p_right = (right * p_whitespace^0) / ""
+
+ local balanced = P {
+ [1] = ((escape * (left+right)) + (collapsed + 1 - (left+right))^1 + V(2))^0,
+ [2] = left * V(1) * right,
+ }
+
+ -- local unbalanced = P {
+ -- [1] = left * V(2) * right,
+ -- [2] = ((escape * (left+right)) + (collapsed + 1 - (left+right))^1 + V(1))^0,
+ -- }
+
+ local unbalanced = (left/"") * balanced * (right/"") * P(-1)
+
+ local keyword = C((R("az","AZ","09") + S("@_:-"))^1)
+ local key = C((1-space-equal)^1)
+ local tag = C((1-space-comma)^0)
+ local reference = keyword
+ local category = C((1-space-left)^1)
+ local s_quoted = ((escape*single) + collapsed + (1-single))^0
+ local d_quoted = ((escape*double) + collapsed + (1-double))^0
+
+ local b_value = p_left * balanced * p_right
+ -- local u_value = p_left * unbalanced * p_right -- get rid of outer { }
+ -- local s_value = (single/"") * (u_value + s_quoted) * (single/"")
+ -- local d_value = (double/"") * (u_value + d_quoted) * (double/"")
+ local s_value = (single/"") * (unbalanced + s_quoted) * (single/"")
+ local d_value = (double/"") * (unbalanced + d_quoted) * (double/"")
+ local r_value = reference * Carg(1) /resolve
+
+ local somevalue = d_value + b_value + s_value + r_value
+ local value = Cs((somevalue * ((spacing * hash * spacing)/"" * somevalue)^0))
+
+ value = value / function(s) return lpegmatch(lpegpatterns.stripper,s) end
+
+ local forget = percent^1 * (1-lineending)^0
+ local spacing = spacing * forget^0 * spacing
+ local assignment = spacing * key * spacing * equal * spacing * value * spacing
+ local definition = category * spacing * left * spacing * tag * spacing * comma * Ct((assignment * comma^0)^0) * spacing * right * Carg(1) / do_definition
+
+ local crapword = C((1-space-left)^1)
+ local shortcut = Cmt(crapword,function(_,p,s) return lower(s) == "string" and p end) * spacing * left * ((assignment * Carg(1))/do_shortcut * comma^0)^0 * spacing * right
+ local comment = Cmt(crapword,function(_,p,s) return lower(s) == "comment" and p end) * spacing * lpegpatterns.argument * Carg(1) / do_comment
+
+ local casecrap = #S("sScC") * (shortcut + comment)
+
+ local bibtotable = (space + forget + P("@") * (casecrap + definition) + 1)^0
+
+ -- todo \%
+
+ -- loadbibdata -> dataset.luadata
+ -- loadtexdata -> dataset.luadata
+ -- loadluadata -> dataset.luadata
+
+ -- converttoxml -> dataset.xmldata from dataset.luadata
+
+ function publications.loadbibdata(dataset,content,source,kind)
+ if not source then
+ report("invalid source for dataset %a",dataset)
+ return
+ end
+ local current = datasets[dataset]
+ local size = #content
+ if size == 0 then
+ report("empty source %a for dataset %a",source,current.name)
+ else
+ report("adding bib data to set %a from source %a",current.name,source)
+ end
+ statistics.starttiming(publications)
+ publicationsstats.nofbytes = publicationsstats.nofbytes + size
+ current.nofbytes = current.nofbytes + size
+ if source then
+ table.insert(current.sources, { filename = source, checksum = md5.HEX(content) })
+ current.loaded[source] = kind or true
+ end
+ current.newtags = #current.luadata > 0 and { } or current.newtags
+ lpegmatch(bibtotable,content or "",1,current)
+ statistics.stoptiming(publications)
+ end
+
+end
+
+do
+
+ -- we could use xmlescape again
+
+ local cleaner_0 = S('<>&')
+ local cleaner_1 = (1-cleaner_0)^0 * cleaner_0
+ local cleaner_2 = Cs ( (
+ P("<") / "&lt;" +
+ P(">") / "&gt;" +
+ P("&") / "&amp;" +
+ P(1)
+ )^0)
+
+ local compact = false -- can be a directive but then we also need to deal with newlines ... not now
+
+ function publications.converttoxml(dataset,nice,dontstore,usedonly,subset) -- we have fields !
+ local current = datasets[dataset]
+ local luadata = subset or (current and current.luadata)
+ if luadata then
+ statistics.starttiming(publications)
+ --
+ local result, r, n = { }, 0, 0
+ local usedonly = usedonly and publications.usedentries()
+ --
+ r = r + 1 ; result[r] = "<?xml version='1.0' standalone='yes'?>"
+ r = r + 1 ; result[r] = "<bibtex>"
+ --
+ if nice then -- will be default
+ local f_entry_start = formatters[" <entry tag='%s' category='%s' index='%s'>"]
+ local s_entry_stop = " </entry>"
+ local f_field = formatters[" <field name='%s'>%s</field>"]
+ for tag, entry in sortedhash(luadata) do
+ if not usedonly or usedonly[tag] then
+ r = r + 1 ; result[r] = f_entry_start(tag,entry.category,entry.index)
+ for key, value in sortedhash(entry) do
+ if key ~= "tag" and key ~= "category" and key ~= "index" then
+ if lpegmatch(cleaner_1,value) then
+ value = lpegmatch(cleaner_2,value)
+ end
+ if value ~= "" then
+ r = r + 1 ; result[r] = f_field(key,value)
+ end
+ end
+ end
+ r = r + 1 ; result[r] = s_entry_stop
+ n = n + 1
+ end
+ end
+ else
+ local f_entry_start = formatters["<entry tag='%s' category='%s' index='%s'>"]
+ local s_entry_stop = "</entry>"
+ local f_field = formatters["<field name='%s'>%s</field>"]
+ for tag, entry in next, luadata do
+ if not usedonly or usedonly[tag] then
+ r = r + 1 ; result[r] = f_entry_start(entry.tag,entry.category,entry.index)
+ for key, value in next, entry do
+ if key ~= "tag" and key ~= "category" and key ~= "index" then
+ if lpegmatch(cleaner_1,value) then
+ value = lpegmatch(cleaner_2,value)
+ end
+ if value ~= "" then
+ r = r + 1 ; result[r] = f_field(key,value)
+ end
+ end
+ end
+ r = r + 1 ; result[r] = s_entry_stop
+ n = n + 1
+ end
+ end
+ end
+ --
+ r = r + 1 ; result[r] = "</bibtex>"
+ --
+ result = concat(result,nice and "\n" or nil)
+ --
+ if dontstore then
+ -- indeed
+ else
+ statistics.starttiming(xml)
+ current.xmldata = xmlconvert(result, {
+ resolve_entities = true,
+ resolve_predefined_entities = true, -- in case we have escaped entities
+ -- unify_predefined_entities = true, -- &#038; -> &amp;
+ utfize_entities = true,
+ } )
+ statistics.stoptiming(xml)
+ if lxml then
+ lxml.register(formatters["btx:%s"](current.name),current.xmldata)
+ end
+ end
+ statistics.stoptiming(publications)
+ return result, n
+ end
+ end
+
+end
+
+do
+
+ local function resolvedname(dataset,filename)
+ local current = datasets[dataset]
+ if type(filename) ~= "string" then
+ report("invalid filename %a",tostring(filename))
+ end
+ local fullname = resolvers.findfile(filename,"bib")
+ if fullname == "" then
+ fullname = resolvers.findfile(filename) -- let's not be too picky
+ end
+ if not fullname or fullname == "" then
+ report("no file %a",filename)
+ current.fullname = filename
+ return current, false
+ else
+ current.fullname = fullname
+ return current, fullname
+ end
+ end
+
+ publications.resolvedname = resolvedname
+
+ local cleaner = false
+ local cleaned = false
+
+ function loaders.registercleaner(what,fullname)
+ if not fullname or fullname == "" then
+ report("no %s file %a",what,fullname)
+ return
+ end
+ local list = table.load(fullname)
+ if not list then
+ report("invalid %s file %a",what,fullname)
+ return
+ end
+ list = list.replacements
+ if not list then
+ report("no replacement table in %a",fullname)
+ return
+ end
+ if cleaned then
+ report("adding replacements from %a",fullname)
+ for k, v in next, list do
+ cleaned[k] = v
+ end
+ else
+ report("using replacements from %a",fullname)
+ cleaned = list
+ end
+ cleaner = true
+ end
+
+ function loaders.bib(dataset,filename,kind)
+ local dataset, fullname = resolvedname(dataset,filename)
+ if not fullname then
+ return
+ end
+ local data = io.loaddata(fullname) or ""
+ if data == "" then
+ report("empty file %a, nothing loaded",fullname)
+ return
+ end
+ if cleaner == true then
+ cleaner = Cs((lpeg.utfchartabletopattern(keys(cleaned)) / cleaned + p_utf8character)^1)
+ end
+ if cleaner ~= false then
+ data = lpegmatch(cleaner,data)
+ end
+ if trace then
+ report("loading file %a",fullname)
+ end
+ publications.loadbibdata(dataset,data,fullname,kind)
+ end
+
+ function loaders.lua(dataset,filename) -- if filename is a table we load that one
+ local current, data, fullname
+ if type(filename) == "table" then
+ current = datasets[dataset]
+ data = filename
+ else
+ dataset, fullname = resolvedname(dataset,filename)
+ if not fullname then
+ return
+ end
+ current = datasets[dataset]
+ data = table.load(fullname)
+ end
+ if data then
+ local luadata = current.luadata
+ -- we want the same index each run
+ for tag, entry in sortedhash(data) do
+ if type(entry) == "table" then
+ entry.index = getindex(current,luadata,tag)
+ entry.tag = tag
+ luadata[tag] = entry -- no cleaning yet
+ end
+ end
+ end
+ end
+
+ function loaders.buffer(dataset,name) -- if filename is a table we load that one
+ local current = datasets[dataset]
+ local barename = file.removesuffix(name)
+ local data = buffers.getcontent(barename) or ""
+ if data == "" then
+ report("empty buffer %a, nothing loaded",barename)
+ return
+ end
+ if trace then
+ report("loading buffer",barename)
+ end
+ publications.loadbibdata(current,data,barename,"bib")
+ end
+
+ function loaders.xml(dataset,filename)
+ local dataset, fullname = resolvedname(dataset,filename)
+ if not fullname then
+ return
+ end
+ local current = datasets[dataset]
+ local luadata = current.luadata
+ local root = xml.load(fullname)
+ for bibentry in xmlcollected(root,"/bibtex/entry") do
+ local attributes = bibentry.at
+ local tag = attributes.tag
+ local entry = {
+ category = attributes.category,
+ tag = tag, -- afterwards also set, to prevent overload
+ index = 0, -- prelocated
+ }
+ for field in xmlcollected(bibentry,"/field") do
+ entry[field.at.name] = field.dt[1] -- no cleaning yet | xmltext(field)
+ end
+ entry.index = getindex(current,luadata,tag)
+ entry.tag = tag
+ luadata[tag] = entry
+ end
+ end
+
+ setmetatableindex(loaders,function(t,filetype)
+ local v = function(dataset,filename)
+ report("no loader for file %a with filetype %a",filename,filetype)
+ end
+ t[filetype] = v
+ return v
+ end)
+
+ function publications.load(specification)
+ local current = datasets[specification.dataset or v_default]
+ local files = settings_to_array(specification.filename)
+ local kind = specification.kind
+ local dataspec = specification.specification
+ statistics.starttiming(publications)
+ for i=1,#files do
+ local filetype, filename = string.splitup(files[i],"::")
+ if not filename then
+ filename = filetype
+ filetype = file.suffix(filename)
+ end
+ if filename then
+ if not filetype or filetype == "" then
+ filetype = "bib"
+ end
+ if file.suffix(filename) == "" then
+ file.addsuffix(filename,filetype)
+ end
+ loaders[filetype](current,filename)
+ if kind then
+ current.loaded[current.fullname or filename] = kind
+ end
+ if dataspec then
+ current.specifications[dataspec] = true
+ end
+ end
+ end
+ local runner = enhancer.runner
+ if runner then
+ runner(current)
+ end
+ statistics.stoptiming(publications)
+ return current
+ end
+
+end
+
+do
+
+ function enhancers.order(dataset)
+ local luadata = dataset.luadata
+ local ordered = dataset.ordered
+ for i=1,#ordered do
+ local tag = ordered[i]
+ if type(tag) == "string" then
+ ordered[i] = luadata[tag]
+ end
+ end
+ end
+
+ function enhancers.details(dataset)
+ local luadata = dataset.luadata
+ local details = dataset.details
+ for tag, entry in next, luadata do
+ if not details[tag] then
+ details[tag] = { }
+ end
+ end
+ end
+
+ utilities.sequencers.appendaction(enhancer,"system","publications.enhancers.order")
+ utilities.sequencers.appendaction(enhancer,"system","publications.enhancers.details")
+
+end
+
+do
+
+ local checked = function(s,d) d[s] = (d[s] or 0) + 1 end
+ local checktex = ( (1-P("\\"))^1 + P("\\") * ((C(R("az","AZ")^1) * Carg(1))/checked))^0
+
+ function publications.analyze(dataset)
+ local current = datasets[dataset]
+ local data = current.luadata
+ local categories = { }
+ local fields = { }
+ local commands = { }
+ for k, v in next, data do
+ categories[v.category] = (categories[v.category] or 0) + 1
+ for k, v in next, v do
+ fields[k] = (fields[k] or 0) + 1
+ lpegmatch(checktex,v,1,commands)
+ end
+ end
+ current.analysis = {
+ categories = categories,
+ fields = fields,
+ commands = commands,
+ }
+ end
+
+end
+
+function publications.tags(dataset)
+ return sortedkeys(datasets[dataset].luadata)
+end
+
+function publications.sortedentries(dataset)
+ return sortedhash(datasets[dataset].luadata)
+end
+
+-- a helper:
+
+function publications.concatstate(i,n)
+ if i == 0 then
+ return 0
+ elseif i == 1 then
+ return 1
+ elseif i == 2 and n == 2 then
+ return 4
+ elseif i == n then
+ return 3
+ else
+ return 2
+ end
+end
+
+-- savers
+
+do
+
+ local savers = { }
+
+ local s_preamble = [[
+% this is an export from context mkiv
+
+@preamble{
+ \ifdefined\btxcmd
+ % we're probably in context
+ \else
+ \def\btxcmd#1{\csname#1\endcsname}
+ \fi
+}
+
+]]
+
+ function savers.bib(dataset,filename,tobesaved)
+ local f_start = formatters["@%s{%s,\n"]
+ local f_field = formatters[" %s = {%s},\n"]
+ local s_stop = "}\n\n"
+ local result = { s_preamble }
+ local n, r = 0, 1
+ for tag, data in sortedhash(tobesaved) do
+ r = r + 1 ; result[r] = f_start(data.category or "article",tag)
+ for key, value in sortedhash(data) do
+ if not privates[key] then
+ r = r + 1 ; result[r] = f_field(key,value)
+ end
+ end
+ r = r + 1 ; result[r] = s_stop
+ n = n + 1
+ end
+ report("%s entries from dataset %a saved in %a",n,dataset,filename)
+ io.savedata(filename,concat(result))
+ end
+
+ function savers.lua(dataset,filename,tobesaved)
+ local list = { }
+ local n = 0
+ for tag, data in next, tobesaved do
+ local t = { }
+ for key, value in next, data do
+ if not privates[key] then
+ d[key] = value
+ end
+ end
+ list[tag] = t
+ n = n + 1
+ end
+ report("%s entries from dataset %a saved in %a",n,dataset,filename)
+ table.save(filename,list)
+ end
+
+ function savers.xml(dataset,filename,tobesaved)
+ local result, n = publications.converttoxml(dataset,true,true,false,tobesaved)
+ report("%s entries from dataset %a saved in %a",n,dataset,filename)
+ io.savedata(filename,result)
+ end
+
+ function publications.save(specification)
+ local dataset = specification.dataset
+ local filename = specification.filename
+ local filetype = specification.filetype
+ local criterium = specification.criterium
+ statistics.starttiming(publications)
+ if not filename or filename == "" then
+ report("no filename for saving given")
+ return
+ end
+ if not filetype or filetype == "" then
+ filetype = file.suffix(filename)
+ end
+ if not criterium or criterium == "" then
+ criterium = v_all
+ end
+ local saver = savers[filetype]
+ if saver then
+ local current = datasets[dataset]
+ local luadata = current.luadata or { }
+ local tobesaved = { }
+ local result = structures.lists.filter({criterium = criterium, names = "btx"}) or { }
+ for i=1,#result do
+ local userdata = result[i].userdata
+ if userdata then
+ local set = userdata.btxset or v_default
+ if set == dataset then
+ local tag = userdata.btxref
+ if tag then
+ tobesaved[tag] = luadata[tag]
+ end
+ end
+ end
+ end
+ saver(dataset,filename,tobesaved)
+ else
+ report("unknown format %a for saving %a",filetype,dataset)
+ end
+ statistics.stoptiming(publications)
+ return dataset
+ end
+
+ implement {
+ name = "btxsavedataset",
+ actions = publications.save,
+ arguments = {
+ {
+ { "dataset" },
+ { "filename" },
+ { "filetype" },
+ { "criterium" },
+ }
+ }
+ }
+
+end
+
+-- casters
+
+do
+
+ publications.detailed = setmetatableindex(function(detailed,kind)
+ local values = setmetatableindex(function(values,value)
+ local caster = casters[kind]
+ local cast = caster and caster(value) or value
+ values[value] = cast
+ return cast
+ end)
+ detailed[kind] = values
+ return values
+ end)
+
+ local keywordsplitter = utilities.parsers.groupedsplitat(";,")
+
+ casters.keyword = function(str)
+ return lpegmatch(keywordsplitter,str)
+ end
+
+
+ writers.keyword = function(k)
+ if type(k) == "table" then
+ return concat(p,";")
+ else
+ return k
+ end
+ end
+
+ local pagessplitter = lpeg.splitat(P("-")^1)
+
+ casters.range = function(str)
+ local first, last = lpegmatch(pagessplitter,str)
+ return first and last and { first, last } or str
+ end
+
+ writers.range = function(p)
+ if type(p) == "table" then
+ return concat(p,"-")
+ else
+ return p
+ end
+ end
+
+ casters.pagenumber = casters.range
+ writers.pagenumber = writers.range
+
+end
diff --git a/tex/context/base/publ-fnd.lua b/tex/context/base/publ-fnd.lua
new file mode 100644
index 000000000..32d0c11be
--- /dev/null
+++ b/tex/context/base/publ-fnd.lua
@@ -0,0 +1,298 @@
+if not modules then modules = { } end modules ['publ-fnd'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if not characters then
+ dofile(resolvers.findfile("char-def.lua"))
+ dofile(resolvers.findfile("char-utf.lua"))
+end
+
+-- this tracker is only for real debugging and not for the average user
+
+local trace_match = false trackers.register("publications.match", function(v) trace_match = v end)
+
+local publications = publications
+
+local tonumber, next, type = tonumber, next, type
+local find = string.find
+local P, R, S, C, Cs, Cp, Cc, Carg, Ct, V = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Cp, lpeg.Cc, lpeg.Carg, lpeg.Ct, lpeg.V
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local concat = table.concat
+
+local formatters = string.formatters
+local lowercase = characters.lower
+local topattern = string.topattern
+
+publications = publications or { } -- for testing
+
+local report = logs.reporter("publications","match")
+
+local colon = P(":")
+local dash = P("-")
+local lparent = P("(")
+local rparent = P(")")
+local space = lpegpatterns.whitespace
+local utf8char = lpegpatterns.utf8character
+local valid = 1 - colon - space - lparent - rparent
+----- key = C(valid^1)
+local key = C(R("az","AZ")^1)
+local wildcard = C("*")
+local word = Cs(lpegpatterns.unquoted + lpegpatterns.argument + valid^1)
+local simple = C(valid^1)
+local number = C(valid^1)
+
+local key = C(R("az","AZ")^1)
+local contains = S(":~")
+local exact = P("=")
+local valid = (1 - space - lparent -rparent)^1
+local wildcard = P("*") / ".*"
+local single = P("?") / "."
+local dash = P("-") / "%."
+local percent = P("-") / "%%"
+local word = Cs(lpegpatterns.unquoted + lpegpatterns.argument + valid)
+local range = P("<") * space^0 * C((1-space)^1) * space^1 * C((1-space- P(">"))^1) * space^0 * P(">")
+
+local f_key_fld = formatters[" local kf_%s = get(entry,%q) \n if kf_%s then kf_%s = lower(kf_%s) end"]
+local f_key_set = formatters[" local ks_%s = get(entry,%q,categories)\n if ks_%s then ks_%s = lower(ks_%s) end"]
+local f_number_fld = formatters[" local nf_%s = tonumber(get(entry,%q))"]
+local f_number_set = formatters[" local ns_%s = tonumber(get(entry,%q,categories))"]
+
+local f_fld_exact = formatters["(kf_%s == %q)"]
+local f_set_exact = formatters["(ks_%s == %q)"]
+local f_fld_contains = formatters["(kf_%s and find(kf_%s,%q))"]
+local f_set_contains = formatters["(ks_%s and find(ks_%s,%q))"]
+local f_fld_between = formatters["(nf_%s and nf_%s >= %s and nf_%s <= %s)"]
+local f_set_between = formatters["(ns_%s and ns_%s >= %s and ns_%s <= %s)"]
+
+local f_all_match = formatters["anywhere(entry,%q)"]
+
+local function test_key_value(keys,where,key,first,last)
+ if not key or key == "" then
+ return "(false)"
+ elseif key == "*" then
+ last = "^.*" .. topattern(lowercase(last)) .. ".*$" -- todo: make an lpeg
+ return f_all_match(last)
+ elseif first == false then
+ -- exact
+ last = lowercase(last)
+ if where == "set" then
+ keys[key] = f_key_set(key,key,key,key,key)
+ return f_set_exact(key,last)
+ else
+ keys[key] = f_key_fld(key,key,key,key,key)
+ return f_fld_exact(key,last)
+ end
+ elseif first == true then
+ -- contains
+ last = "^.*" .. topattern(lowercase(last)) .. ".*$"
+ if where == "set" then
+ keys[key] = f_key_set(key,key,key,key,key)
+ return f_set_contains(key,key,last)
+ else
+ keys[key] = f_key_fld(key,key,key,key,key)
+ return f_fld_contains(key,key,last)
+ end
+ else
+ -- range
+ if where == "set" then
+ keys[key] = f_number_set(key,key)
+ return f_set_between(key,key,tonumber(first),key,tonumber(last))
+ else
+ keys[key] = f_number_fld(key,key)
+ return f_fld_between(key,key,tonumber(first),key,tonumber(last))
+ end
+ end
+end
+
+local p_compare = P { "all",
+ all = (V("one") + V("operator") + V("nested") + C(" "))^1,
+ nested = C("(") * V("all") * C(")"), -- C really needed?
+ operator = C("and")
+ + C("or")
+ + C("not"),
+ one = Carg(1)
+ * V("where")
+ * V("key")
+ * (V("how") * V("word") + V("range"))
+ / test_key_value,
+ key = key
+ + C("*"),
+ where = C("set") * P(":")
+ + Cc(""),
+ how = contains * Cc(true)
+ + exact * Cc(false),
+ word = word,
+ range = range,
+}
+
+-- local p_combine = space^0 * (P(",")/" or ") * space^0
+
+-- local pattern = Cs((P("match")/"" * space^0 * p_compare + p_combine)^1)
+
+local comma = P(",")
+local p_spaces = space^0
+local p_combine = p_spaces * comma * p_spaces / " or "
+local p_expression = P("match")/"" * Cs(p_compare)
+ + Carg(1)
+ * Cc("")
+ * Cc("tag")
+ * Cc(false)
+ * (
+ P("tag") * p_spaces * P("(") * Cs((1-S(")")-space)^1) * p_spaces * P(")")
+ + p_spaces * Cs((1-space-comma)^1) * p_spaces
+ ) / test_key_value
+
+local pattern = Cs {
+ [1] = V(2) * (p_combine * V(2))^0,
+ [2] = p_expression,
+}
+
+-- -- -- -- -- -- -- -- -- -- -- -- --
+-- -- -- -- -- -- -- -- -- -- -- -- --
+
+function publications.anywhere(entry,str) -- helpers
+ for k, v in next, entry do
+ if find(lowercase(v),str) then
+ return true
+ end
+ end
+end
+
+-- todo: use an environment instead of
+
+-- table={
+-- { "match", "((kf_editor and find(kf_editor,\"^.*braslau.*$\")))" },
+-- { "hash", "foo1234" },
+-- { "tag", "bar5678" },
+-- }
+
+local f_template = formatters[ [[
+local find = string.find
+local lower = characters.lower
+local anywhere = publications.anywhere
+local get = publications.getfuzzy
+local specification = publications.currentspecification
+local categories = specification and specification.categories
+return function(entry)
+%s
+ return %s and true or false
+end
+]] ]
+
+local function compile(dataset,expr)
+ local keys = { }
+ -- local expression = lpegmatch(pattern,expr,start,keys)
+ local expression = lpegmatch(pattern,expr,1,keys)
+ if trace_match then
+ report("compiling expression: %s",expr)
+ end
+ local definitions = { }
+ for k, v in next, keys do
+ definitions[#definitions+1] = v
+ end
+ if #definitions == 0 then
+ report("invalid expression: %s",expr)
+ elseif trace_match then
+ for i=1,#definitions do
+ report("% 3i : %s",i,definitions[i])
+ end
+ end
+ definitions = concat(definitions,"\n")
+ local code = f_template(definitions,expression)
+ if trace_match then
+ report("generated code: %s",code)
+ end
+ local finder = loadstring(code) -- use an environment
+ if type(finder) == "function" then
+ finder = finder()
+ if type(finder) == "function" then
+ return finder, code
+ end
+ end
+ report("invalid expression: %s",expr)
+ return false
+end
+
+-- local function test(str)
+-- local keys = { }
+-- local definitions = { }
+-- local expression = lpegmatch(pattern,str,1,keys)
+-- for k, v in next, keys do
+-- definitions[#definitions+1] = v
+-- end
+-- definitions = concat(definitions,"\n")
+-- print(f_template(definitions,expression))
+-- end
+
+-- test("match(foo:bar and (foo:bar or foo:bar))")
+-- test("match(foo=bar and (foo=bar or foo=bar))")
+-- test("match(set:foo:bar),match(set:foo:bar)")
+-- test("match(set:foo=bar)")
+-- test("match(foo:{bar bar})")
+-- test("match(foo={bar bar})")
+-- test("match(set:foo:'bar bar')")
+-- test("match(set:foo='bar bar')")
+-- test("match(set:foo<1000 2000>)")
+-- test("match(set:foo<1000 2000>)")
+-- test("match(*:foo)")
+-- test("match(*:*)")
+
+local trigger = (P("match") + P("tag")) * p_spaces * P("(")
+local check = (1-trigger)^0 * trigger
+
+local function finder(dataset,expression)
+ local found = lpegmatch(check,expression) and compile(dataset,expression) or false
+ if found then
+ local okay, message = pcall(found,{})
+ if not okay then
+ found = false
+ report("error in match: %s",message)
+ end
+ end
+ return found
+end
+
+-- finder("match(author:foo)")
+-- finder("match(author:foo and author:bar)")
+-- finder("match(author:foo or (author:bar and page:123))")
+-- finder("match(author:foo),match(author:foo)")
+
+publications.finder = finder
+
+function publications.search(dataset,expression)
+ local find = finder(dataset,expression)
+ if find then
+ local ordered = dataset.ordered
+ local target = { }
+ for i=1,#ordered do
+ local entry = ordered[i]
+ if find(entry) then
+ local tag = entry.tag
+ if not target[tag] then
+ -- we always take the first
+ target[tag] = entry
+ end
+ end
+ end
+ return target
+ else
+ return { } -- { dataset.luadata[expression] } -- ?
+ end
+end
+
+-- local d = publications.datasets.default
+--
+-- local d = publications.load {
+-- dataset = "default",
+-- filename = "t:/manuals/mkiv/hybrid/tugboat.bib"
+-- }
+--
+-- inspect(publications.search(d,[[match(author:hagen)]]))
+-- inspect(publications.search(d,[[match(author:hagen and author:hoekwater and year:1990-2010)]]))
+-- inspect(publications.search(d,[[match(author:"Bogusław Jackowski")]]))
+-- inspect(publications.search(d,[[match(author:"Bogusław Jackowski" and (tonumber(field:year) or 0) > 2000)]]))
+-- inspect(publications.search(d,[[Hagen:TB19-3-304]]))
diff --git a/tex/context/base/publ-imp-apa.lua b/tex/context/base/publ-imp-apa.lua
new file mode 100644
index 000000000..1d894f261
--- /dev/null
+++ b/tex/context/base/publ-imp-apa.lua
@@ -0,0 +1,523 @@
+local specification = {
+ --
+ -- metadata
+ --
+ name = "apa",
+ version = "1.00",
+ comment = "APA specification",
+ author = "Alan Braslau and Hans Hagen",
+ copyright = "ConTeXt development team",
+ --
+ -- derived (combinations of) fields (all share the same default set)
+ --
+ virtual = {
+ "authoryear",
+ "authoryears",
+ "authornum",
+ "num",
+ "suffix",
+ },
+ --
+ -- special datatypes
+ --
+ types = {
+ --
+ -- list of fields that are interpreted as names: "NAME [and NAME]" where
+ -- NAME is one of the following:
+ --
+ -- First vons Last
+ -- vons Last, First
+ -- vons Last, Jrs, First
+ -- Vons, Last, Jrs, First
+ --
+ author = "author", -- interpreted as name(s)
+ editor = "author",
+ artist = "author",
+ composer = "author",
+ producer = "author",
+ director = "author",
+ doi = "url", -- an external link
+ url = "url",
+ page = "pagenumber", -- number or range: f--t
+ pages = "pagenumber",
+ volume = "range",
+ number = "range",
+ keywords = "keyword", -- comma|-|separated list
+ year = "number",
+ },
+ --
+ -- categories with their specific fields
+ --
+ categories = {
+ --
+ -- categories are added below
+ --
+ },
+}
+
+local generic = {
+ --
+ -- A set returns the first field (in order of position below) that is found
+ -- present in an entry. A set having the same name as a field conditionally
+ -- allows the substitution of an alternate field.
+ --
+ -- note that anything can get assigned a doi or be available online.
+ doi = { "doi", "url" },
+ editionset = { "edition", "volume", "number", "pages" },
+}
+
+-- Definition of recognized categories and the fields that they contain.
+-- Required fields should be present; optional fields may also be rendered;
+-- all other fields will be ignored.
+
+-- Sets contain either/or in order of precedence.
+--
+-- For a category *not* defined here yet present in the dataset, *all* fields
+-- are taken as optional. This allows for flexibility in the addition of new
+-- categories.
+
+local categories = specification.categories
+
+-- an article from a journal
+
+categories.article = {
+ sets = {
+ author = { "author", "editor", "title" },
+ doi = generic.doi,
+ },
+ required = {
+ "author"
+ },
+ optional = {
+ "year",
+ "subtitle", "type", "file",
+ "journal", "volume", "number", "pages",
+ "doi", "note",
+ },
+}
+
+-- an article from a magazine
+
+categories.magazine = {
+ sets = categories.article.sets,
+ required = {
+ "author",
+ "year",
+ "journal",
+ },
+ optional = {
+ "subtitle", "type", "file",
+ "number",
+ "month", "day",
+ "doi", "note",
+ },
+}
+
+categories.newspaper = categories.magazine
+
+-- (from jabref) to be identified and setup ...
+
+categories.periodical = {
+ sets = {
+ author = { "editor", "publisher" },
+ doi = generic.doi,
+ },
+ required = {
+ "title",
+ "year",
+ },
+ optional = {
+ "author",
+ "subtitle", "file",
+ "series", "volume", "number", "month",
+ "organization",
+ "doi", "note",
+ },
+}
+
+-- (from jabref) to be identified and setup ...
+
+categories.standard = {
+ sets = {
+ author = { "author", "institution", "organization" },
+ doi = generic.doi,
+ },
+ required = {
+ "author",
+ "year",
+ "title", "subtitle",
+ "doi", "note",
+ },
+ optional = {
+ },
+}
+
+-- a book with an explicit publisher.
+
+categories.book = {
+ sets = {
+ author = { "author", "editor", "publisher", "title" },
+ editionset = generic.editionset,
+ doi = generic.doi,
+ },
+ required = { "author" },
+ optional = {
+ "year", "month", "day",
+ "subtitle", "type", "file",
+ "editionset", "series",
+ "address",
+ "doi", "note",
+ },
+}
+
+-- a part of a book, which may be a chapter (or section or whatever) and/or a range of pages.
+
+categories.inbook = {
+ sets = {
+ author = { "author", "editor", "publisher", "title", },
+ editionset = generic.editionset,
+ doi = generic.doi,
+ },
+ required = {
+ "author",
+ "year" ,
+ },
+ optional = {
+ "subtitle", "type", "file",
+ "booktitle",
+ -- APA ignores this: "chapter",
+ "editionset", "series",
+ "month",
+ "address",
+ "doi", "note",
+ },
+}
+
+-- a book having its own title as part of a collection.
+-- (like inbook, but we here make booktitle required)
+
+categories.incollection = {
+ sets = {
+ author = { "author", "editor", "publisher", "title", },
+ editionset = generic.editionset,
+ doi = generic.doi,
+ },
+ required = {
+ "author",
+ "booktitle",
+ "year",
+ },
+ optional = {
+ "subtitle", "type", "file",
+ "editionset", "series",
+ -- APA ignores this: "chapter",
+ "month",
+ "address",
+ "doi", "note",
+ },
+}
+
+-- a work that is printed and bound, but without a named publisher or sponsoring institution.
+
+categories.booklet = {
+ sets = {
+ author = { "author", "title", },
+ publisher = { "howpublished" }, -- no "publisher"!
+ doi = generic.doi,
+ },
+ required = {
+ "author"
+ },
+ optional = {
+ "publisher",
+ "year", "month",
+ "subtitle", "type", "file",
+ "address",
+ "doi", "note",
+ },
+}
+
+-- the proceedings of a conference.
+
+categories.proceedings = {
+ sets = {
+ author = { "editor", "organization", "publisher", "title" }, -- no "author"!
+ publisher = { "publisher", "organization" },
+ editionset = generic.editionset,
+ doi = generic.doi,
+ },
+ required = {
+ "author",
+ "year"
+ },
+ optional = {
+ "publisher",
+ "subtitle", "file",
+ "editionset", "series",
+ "month",
+ "address",
+ "doi", "note",
+ },
+}
+
+-- an article in a conference proceedings.
+
+categories.inproceedings = {
+ sets = categories.incollection.sets,
+ required = categories.incollection.required,
+ optional = {
+ "subtitle", "type", "file",
+ "month",
+ "edition", "series",
+ "address", "organization",
+ "doi", "note",
+ },
+}
+
+categories.conference = categories.inproceedings
+
+-- a thesis (of course).
+
+categories.thesis = {
+ sets = {
+ doi = generic.doi,
+ },
+ required = {
+ "author",
+ "title",
+ "school",
+ "year",
+ "type"
+ },
+ optional = {
+ "subtitle", "file",
+ "month",
+ "address",
+ "doi", "note",
+ },
+}
+
+categories.mastersthesis = {
+ sets = categories.thesis.sets,
+ required = {
+ "author",
+ "title",
+ "school",
+ "year"
+ },
+ optional = {
+ "type",
+ "subtitle", "file",
+ "month",
+ "address",
+ "doi", "note",
+ },
+}
+categories.phdthesis = categories.mastersthesis
+
+-- a report published by a school or other institution, usually numbered within a series.
+
+categories.techreport = {
+ sets = {
+ author = { "author", "institution", "publisher", "title" },
+ publisher = { "publisher", "institution", },
+ editionset = { "type", "volume", "number", "pages" }, -- no "edition"!
+ doi = generic.doi,
+ },
+ required = {
+ "author",
+ "title",
+ "institution",
+ "year"
+ },
+ optional = {
+ "publisher",
+ "address",
+ "subtitle", "file",
+ "editionset",
+ "month",
+ "doi", "note",
+ },
+}
+
+-- technical documentation.
+
+categories.manual = {
+ sets = {
+ author = { "author", "organization", "publisher", "title" },
+ publisher = { "publisher", "organization", },
+ editionset = generic.editionset,
+ doi = generic.doi,
+ },
+ required = {
+ "title"
+ },
+ optional = {
+ "author", "publisher",
+ "address",
+ "subtitle", "file",
+ "editionset", "month", "year",
+ "doi", "note",
+-- "abstract",
+ },
+}
+
+-- a patent (of course).
+
+categories.patent = {
+ sets = {
+ author = { "author", "assignee", },
+ publisher = { "publisher", "assignee", },
+ year = { "year", "yearfiled", },
+ month = { "month", "monthfiled", },
+ day = { "day", "dayfiled", },
+ doi = generic.doi,
+ },
+ required = {
+ "nationality",
+ "number",
+ "year",
+ },
+ optional = {
+ "type",
+ --check this: "language",
+ "author", "publisher",
+ "title", "subtitle", "file",
+ "address",
+ "day", "month",
+ "doi", "note"
+ },
+}
+
+-- a document having an author and title, but not formally published.
+
+categories.unpublished = {
+ sets = {
+ doi = generic.doi,
+ },
+ required = {
+ "author",
+ "title",
+ "note"
+ },
+ optional = {
+ "subtitle", "file",
+ "year", "month",
+ "doi"
+ },
+}
+
+-- like misc below but includes organization.
+
+categories.electronic = {
+ sets = {
+ doi = generic.doi,
+ },
+ required = {
+ "title"
+ },
+ optional = {
+ "subtitle", "type", "file",
+ "year", "month",
+ "author",
+ "address",
+ "organization",
+ "howpublished",
+ "doi", "note"
+ },
+}
+
+-- not bibtex categories...
+
+categories.film = {
+ sets = {
+ doi = generic.doi,
+ author = { "producer", "director", },
+ },
+ required = {
+ "author",
+ "title",
+ "year",
+ "address", "publisher", -- aka studio
+ },
+ optional = {
+ "type",
+ "note",
+ "doi",
+ },
+}
+
+categories.music = {
+ sets = {
+ doi = generic.doi,
+ author = { "composer", "artist", "title", "album" },
+ title = { "title", "album", },
+ },
+ required = {
+ "author",
+ "title",
+ "year",
+ "address", "publisher", -- aka label
+ },
+ optional = {
+ "type",
+ "note",
+ "doi",
+ },
+}
+
+-- use this type when nothing else fits.
+
+categories.misc = {
+ sets = {
+ doi = generic.doi,
+ },
+ required = {
+ -- nothing is really important here
+ },
+ optional = {
+ "author",
+ "title", "subtitle", "file",
+ "year", "month",
+ "howpublished",
+ "doi", "note",
+ },
+}
+
+-- other (whatever jabref does not know!)
+
+categories.other = {
+ sets = {
+ doi = generic.doi,
+ },
+ required = {
+ "author",
+ "title",
+ "year"
+ },
+ optional = {
+ "subtitle", "file",
+ "doi", "note",
+ },
+}
+
+-- if all else fails to match:
+
+categories.literal = {
+ sets = {
+ author = { "key" },
+ doi = generic.doi,
+ },
+ required = {
+ "author",
+ "text"
+ },
+ optional = {
+ "doi", "note"
+ },
+ virtual = false,
+}
+
+-- done
+
+return specification
diff --git a/tex/context/base/publ-imp-apa.mkvi b/tex/context/base/publ-imp-apa.mkvi
new file mode 100644
index 000000000..9ab624bf8
--- /dev/null
+++ b/tex/context/base/publ-imp-apa.mkvi
@@ -0,0 +1,1465 @@
+%D \module
+%D [ file=publ-imp-apa,
+%D version=2013.12.12,
+%D title=APA bibliography style,
+%D subtitle=Publications,
+%D author=Alan Braslau and Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is therefore copyrighted
+%D by \PRAGMA. See mreadme.pdf for details.
+
+\startbtxrenderingdefinitions[apa]
+
+\ifdefined\c!translate \else \def\c!translate{translate} \fi
+
+%D Reference:
+%D \startTEX
+%D @Book{APA2010,
+%D title ={Publication Manual of the American Psychological Association},
+%D year ={2010},
+%D edition ={Sixth},
+%D address ={Washington, DC},
+%D publisher={American Psychological Association},
+%D Xpages ={291},
+%D url ={http://www.apa.org/books/},
+%D }
+%D \stopTEX
+
+% set ALL specific APA compliant values
+
+\definebtx
+ [apa]
+ [\c!default=default,
+ \c!specification=apa,
+ \c!otherstext={\space\btxlabeltext{apa:others}},
+ %c!journalconversion=\v!normal,
+ \c!monthconversion=\v!month]
+
+% First, define list and rendering parameters
+
+% The APA style sorts the unnumbered rendered list by authoryear
+
+\definebtxrendering
+ [apa]
+ [\c!specification=apa,
+ \c!sorttype=authoryear,
+ \c!numbering=\v!no]
+
+\setupbtxlist
+ [apa]
+ [\c!alternative=\v!paragraph,
+ %\c!width=\v!fit,
+ %\c!distance=.5\emwidth,
+ \c!margin=3.5\emwidth]
+
+\definebtx
+ [apa:\s!list]
+ [apa]
+ [\c!otherstext={,\nobreakspace\textellipsis\space},
+ \c!etallimit=7,
+ \c!etaldisplay=6,
+ \c!etaloption=last,
+ \c!authorconversion=invertedshort,
+ \c!separator:names:2={,\space}, % aka namesep - in this namespace
+ \c!separator:names:3={,\nobreakspace\textampersand\space}, % comma separated list
+ \c!separator:names:4={\nobreakspace\textampersand\space}] % last of two, no comma!
+
+% First, we define a namespace for a few special fields
+
+\definebtx
+ [apa:\s!list:author]
+ [apa:\s!list]
+
+\definebtx
+ [apa:\s!list:editor]
+ [apa:\s!list:author]
+
+\definebtx
+ [apa:\s!list:suffix]
+ [apa:\s!list]
+
+\definebtx
+ [apa:\s!list:url]
+ [apa:\s!list]
+
+\definebtx
+ [apa:\s!list:doi]
+ [apa:\s!list]
+
+\definebtx
+ [apa:\s!list:\s!page]
+ [apa:\s!list]
+ [\c!separator:2={,\space},
+ \c!separator:3={,\space\btxlabeltext{apa:and}\space},
+ \c!separator:4={\space\btxlabeltext{apa:and}\space},
+ \c!left={\btxleftparenthesis},
+ \c!right={\btxrightparenthesis},
+ \c!command={\wordright}]
+
+\definebtx
+ [apa:\s!list:numbering]
+ [apa:\s!list]
+ [\c!right={\space}]
+
+\definebtx
+ [apa:\s!list:numbering:default]
+ [apa:\s!list:numbering]
+
+\definebtx
+ [apa:\s!list:numbering:num]
+ [apa:\s!list:numbering]
+ [\c!stopper={.}]
+
+\definebtx
+ [apa:\s!list:numbering:short]
+ [apa:\s!list:numbering]
+
+\definebtx
+ [apa:\s!list:numbering:tag]
+ [apa:\s!list:numbering]
+
+\definebtx
+ [apa:\s!list:numbering:index]
+ [apa:\s!list:numbering]
+
+% Next, we define a namespace for each category
+
+%D In order to be able to get journals expanded (or normalized or abbreviated) you need
+%D to load a list:
+%D
+%D \starttyping
+%D \btxloadjournallist[journals.txt] % the jabref list
+%D \stoptyping
+
+\definebtx
+ [apa:\s!list:journal]
+ [apa:\s!list]
+ [\c!style=\v!italic]
+ %command=\btxexpandedjournal] % btxabbreviatedjournal
+
+\definebtx
+ [apa:\s!list:volume]
+ [apa:\s!list]
+ [\c!style=\v!italic]
+
+\definebtx
+ [apa:\s!list:title]
+ [apa:\s!list]
+ [\c!style=\v!italic,
+ \c!command=\Word,
+ \c!translate=\v!yes]
+
+\definebtx
+ [apa:\s!list:title:article]
+ [apa:\s!list:title]
+ [\c!style=] % journal is set in italics
+
+\definebtx
+ [apa:\s!list:title:magazine]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:newspaper]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:periodical]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:standard]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:book]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:inbook]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:incollection]
+ [apa:\s!list:title]
+ [\c!style=] % booktitle is set in italics
+
+\definebtx
+ [apa:\s!list:title:proceedings]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:inproceedings]
+ [apa:\s!list:title]
+ [\c!style=] % booktitle is set in italics
+
+\definebtx
+ [apa:\s!list:title:conference]
+ [apa:\s!list:title]
+ [\c!style=] % booktitle is set in italics
+
+\definebtx
+ [apa:\s!list:title:thesis]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:phdthesis]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:mastersthesis]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:booklet]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:manual]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:techreport]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:unpublished]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:patent]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:electronic]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:music]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:film]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:other]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:misc]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:literal]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:type]
+ [\c!command=\Word]
+
+% Then define and set cite parameters.
+
+\definebtx
+ [apa:\s!cite]
+ [apa]
+ [\c!alternative=authoryear,
+ \c!otherstext={,\space\btxlabeltext{apa:others}},
+ \c!etallimit=5,
+ \c!etaldisplay=1, % TODO: when 2-4, show all first time, etaldisplay subsequently...
+ \c!authorconversion=\v!name,
+ \c!sorttype=normal,
+ \c!compress=\v!yes, % note that cite sorts only work with compress=yes.
+ \c!separator:names:2={,\space},
+ \c!separator:names:3={,\space\btxlabeltext{apa:and}\space}, % not \textampersand
+ \c!separator:names:4={\space\btxlabeltext{apa:and}\space}] % not \textampersand
+
+\definebtx
+ [apa:\s!cite:author]
+ [apa:\s!cite]
+
+\definebtx
+ [apa:\s!cite:authoryear]
+ [apa:\s!cite:author]
+ [\c!left={(},
+ \c!right={)},
+ \c!inbetween={,\space}]
+
+\definebtx
+ [apa:\s!cite:default]
+ [apa:\s!cite:authoryear]
+
+\definebtx
+ [apa:\s!cite:authoryears]
+ [apa:\s!cite:authoryear]
+ [\c!left=,
+ \c!right=,
+ \c!inbetween={\space}]
+
+\definebtx
+ [apa:\s!cite:authornum]
+ [apa:\s!cite:author]
+ [\c!left={(},
+ \c!right={)}]
+
+\definebtx
+ [apa:\s!cite:author:num] % todo
+ [apa:\s!cite:authornum]
+ [\c!left={\space[},
+ \c!right={]}]
+
+\definebtx
+ [apa:\s!cite:author:year] % todo
+ [apa:\s!cite]
+
+\definebtx
+ [apa:\s!cite:author:years] % todo
+ [apa:\s!cite:authoryears]
+ [\c!inbetween=,
+ \c!left={\space(},
+ \c!right={)}]
+
+\definebtx
+ [apa:\s!cite:year]
+ [apa:\s!cite]
+ [\c!separator:2={,\space}, % :0 and :1 - between items of a list
+ \c!separator:3={,\space\btxlabeltext{apa:and}\space}, % not \textampersand
+ \c!separator:4={\space\btxlabeltext{apa:and}\space}] % not \textampersand
+
+\definebtx
+ [apa:\s!cite:title]
+ [apa:\s!cite]
+ [\c!separator:2={,\space}, % :0 and :1 - between items of a list
+ \c!separator:3={,\space\btxlabeltext{apa:and}\space}, % not \textampersand
+ \c!separator:4={\space\btxlabeltext{apa:and}\space}, % not \textampersand
+ \c!command={\language[\currentbtxlanguage]}, % BAH
+ \c!sorttype=none,
+ \c!style=\v!italic]
+
+\definebtx
+ [apa:\s!cite:booktitle]
+ [apa:\s!cite:title]
+
+\definebtx
+ [apa:\s!cite:tag]
+ [apa:\s!cite]
+ [\c!left={[},
+ \c!right={]}]
+
+
+\definebtx
+ [apa:\s!cite:index]
+ [apa:\s!cite]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtx
+ [apa:\s!cite:page]
+ [apa:\s!cite]
+ [\c!left=,
+ \c!right=,
+ \c!separator:2={,\space}, % :0 and :1 - between items of a list
+ \c!separator:3={,\space\btxlabeltext{apa:and}\space}, % not \textampersand
+ \c!separator:4={\space\btxlabeltext{apa:and}\space}] % not \textampersand
+
+\definebtx
+ [apa:\s!cite:pages]
+ [apa:\s!cite:page]
+
+\definebtx
+ [apa:\s!cite:keywords]
+ [apa:\s!cite]
+
+\definebtx
+ [apa:\s!cite:short]
+ [apa:\s!cite]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtx
+ [apa:\s!cite:category]
+ [apa:\s!cite]
+
+\definebtx
+ [apa:\s!cite:url]
+ [apa:\s!cite]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtx
+ [apa:\s!cite:doi]
+ [apa:\s!cite:url]
+
+\definebtx
+ [apa:\s!cite:num]
+ [apa:\s!cite]
+ [\c!left={[},
+ \c!right={]},
+ \c!separator:2={,}, % no space
+ \c!separator:3=\btxparameter{\c!separator:2},
+ \c!separator:4=\btxparameter{\c!separator:2}]
+
+\definebtx
+ [apa:\s!cite:textnum]
+ [apa:\s!cite:num]
+ [\c!left={Ref.\nbsp},
+ \c!right=,
+ \c!separator:2={,\space},
+ \c!separator:3={\space\btxlabeltext{apa:and}\space},
+ \c!separator:4={\space\btxlabeltext{apa:and}\space}]
+
+\definebtx
+ [apa:\s!cite:entry]
+ [apa:\s!cite]
+ [\c!left=,
+ \c!right=,
+ \c!inbetween={\space},
+ \c!separator:2={;\space},
+ \c!separator:3=\btxparameter{\c!separator:2},
+ \c!separator:4=\btxparameter{\c!separator:2}]
+
+% Now we setup for the details of the renderings
+
+%D Sometimes we have verbose injections in an entry and these can be language
+%D dependent, so we use labels.
+%D
+%D Because we want to mix rendering (in the manual) we need a namespace in label
+%D texts:
+
+\setupbtxlabeltext
+ [en]
+ [apa:and=and,
+ apa:number={no.},
+ apa:edition={ed.},
+ apa:Editor={Ed.},
+ apa:Editors={Eds.},
+ apa:Volume={Vol.},
+ apa:Volumes={Vols.},
+ apa:others={et al.},
+ apa:page={p.},
+ apa:pages={pp.},
+ apa:nd={n.d.}, % no date
+ apa:mastersthesis={Master's thesis},
+ apa:phdthesis={Doctoral dissertation},
+ apa:technicalreport={Tech. Rep.}, % Technical report
+ apa:supplement={Suppl.}, % Supplement (not used?)
+ apa:patent=Patent,
+ apa:MotionPicture={Motion picture},
+ apa:Producer=Producer,
+ apa:Producers=Producers,
+ apa:Director=Director,
+ apa:Directors=Directors,
+ apa:Recordedby={Recorded by},
+ apa:Author=Author,
+ apa:Translator={Trans.}, % Translator(s) (not used?)
+ apa:Advanced={Advanced online publication},
+ apa:Retrieved={Available from}, % {Retrieved from},
+ apa:In=In]
+
+\setupbtxlabeltext
+ [nl]
+ [apa:and=en,
+ apa:number={nr.},
+ apa:edition={ed.}, % editie
+ apa:Editor=Editor, % Ed./Eds.
+ apa:Editors=Editors,
+ apa:Volume={Vol.},
+ apa:Volumes={Vols.},
+ apa:others={et al.},
+ apa:page={p.},
+ apa:pages={pp.},
+ apa:nd={g.d.} % geen datum
+ apa:mastersthesis=Masterproef,
+ apa:phdthesis=Proefschrift,
+ apa:technicalreport={Technisch rapport}, % Technical report
+ apa:supplement=Supplement,
+ apa:patent=Octrooi,
+ apa:MotionPicture=Film, % ?
+ apa:Producer=Producent, % ?
+ apa:Producers=Producents, % ?
+ apa:Director=Directeur,
+ apa:Directors=Directeurs,
+ apa:Recordedby={Opgenomen door}, % ?
+ apa:Author=Auteur,
+ apa:Translator=Vertaler,
+ apa:Advanced={Geavanceerde online publicatie},
+ apa:Retrieved={Beschikbaar vanaf}, % {Ontvangen van},
+ apa:In=In]
+
+\setupbtxlabeltext
+ [fr]
+ [apa:and=et,
+ apa:number={n\high{o}},
+ apa:edition={édition},
+ apa:Editor=Éditeur,
+ apa:Editors=Éditeurs,
+ apa:Volume=Volume,
+ apa:Volumes=Volumes,
+ apa:others={et al.},
+ apa:page={p.},
+ apa:pages={pp.},
+ apa:nd={s.d.} % sans date
+ apa:mastersthesis={Thèse de master (DEA, DESS, master)},
+ apa:phdthesis={Thèse de doctorat},
+ apa:technicalreport={Rapport technique},
+ apa:supplement=Supplément,
+ apa:patent=Brevet,
+ apa:MotionPicture={Film cinématographique},
+ apa:Producer=Producteur,
+ apa:Producers=Producteurs,
+ apa:Director=Réalisateur,
+ apa:Directors=Réalisateurs,
+ apa:Recordedby={Enregistré par},
+ apa:Author=Auteur,
+ apa:Translator=Traducteur,
+ apa:Advanced={Publication en ligne anticipée},
+ apa:Retrieved={Disponible à}, % {Téléchargé de},
+ apa:In=Dans]
+
+\setupbtxlabeltext
+ [de]
+ [apa:and=und,
+ apa:number={nr.},
+ apa:edition=Auf\/lage,
+ apa:Editor=Herausgeber, % Hrsg./Hg.
+ apa:Editors=Herausgeber,
+ apa:Volume=Band, % Bd.
+ apa:Volumes={Bände},
+ apa:others={et al.},
+ apa:page={S.},
+ apa:pages={S.},
+ apa:nd={o.D.}, % ohne Datum (mostly: o.J. / ohne Jahr)
+ apa:mastersthesis={Masterarbeit},
+ apa:phdthesis={Dissertation},
+ apa:technicalreport={Technischer Bericht},
+ apa:supplement={Beilage}, % Supplement
+ apa:patent=Patent,
+ apa:MotionPicture=Kinofilm, % ?
+ apa:Producer=Producer, % ?
+ apa:Producers=Produzenten, % ?
+ apa:Director=Director, % ?
+ apa:Directors=Directors, % ?
+ apa:Recordedby={per Einschreiben}, % ?
+ apa:Author=Autor,
+ apa:Translator={Übersetzer}, % Übers.
+ apa:Advanced={Erweiterte Online-Publikation},
+ apa:Retrieved={heruntergeladen von},
+ apa:In=In]
+
+% thanks: Andrea Valle
+
+\setupbtxlabeltext
+ [it]
+ [apa:and=e,
+ apa:number={nº},
+ apa:edition={ed.}, % edizione
+ apa:Editor={A cura di},
+ apa:Editors={A cura di},
+ apa:Volume={Vol.}, % Volume
+ apa:Volumes={Vol.}, % Volumi
+ apa:others={et al.},
+ apa:page={p.},
+ apa:pages={pp.},
+ apa:nd={s.d.}, % senza data
+ apa:mastersthesis={Tesi di laurea},
+ apa:phdthesis={Tesi di dottorato},
+ apa:technicalreport={Relazione tecnica},
+ apa:supplement={Supplemento},
+ apa:patent=Brevetto,
+ apa:MotionPicture=Film, % ?
+ apa:Producer=Produttore,
+ apa:Producers=Produttori,
+ apa:Director=Direttore,
+ apa:Directors=Direttori,
+ apa:Recordedby={Registrato da},
+ apa:Author=Autore,
+ apa:Translator={Trad.}, % Translator(s)
+ apa:Advanced={Pre-pubblicazione on line},
+ apa:Retrieved={Accessible online},
+ apa:In=In]
+
+\setupbtxlabeltext
+ [es]
+ [apa:and=y,
+ apa:number={nº},
+ apa:edition={ed.}, % edición
+ apa:Editor=Editor, % Ed./Eds.
+ apa:Editors=Editores,
+ apa:Volume={Vol.}, % Volumen
+ apa:Volumes={Vols.}, % Volúmenes
+ apa:others={et al.},
+ apa:page={p.},
+ apa:pages={pp.},
+ apa:nd={s.f.}, % sin fecha
+ apa:mastersthesis={Tesis de maestría},
+ apa:phdthesis={Tesis doctoral},
+ apa:technicalreport={Informe técnico},
+ apa:supplement=Suplemento,
+ apa:patent=Patente,
+ apa:MotionPicture=Cinematográfica,
+ apa:Producer=Productor,
+ apa:Producers=Productores,
+ apa:Director=Director,
+ apa:Directors=Directores,
+ apa:Recordedby={Grabada por},
+ apa:Author=Autor,
+ apa:Translator=Traductor,
+ apa:Advanced={Publicación en línea avanzada},
+ apa:Retrieved={Disponible desde}, % {Obtenido de},
+ apa:In=En]
+
+% cite setups
+
+% The following differs from the default returning n.d. if year is empty
+
+\startsetups btx:apa:nd
+ \btxlabeltext{apa:nd}
+\stopsetups
+
+\startsetups btx:apa:cite:author:year
+ \texdefinition{\s!btx:\s!cite:concat}
+ %\btxparameter\c!left
+ \ifx\currentbtxfirst\empty
+ \fastsetup{btx:apa:nd}
+ \else
+ \texdefinition {\s!btx:\s!cite:inject} {
+ \btxcitereference
+ \currentbtxfirst
+ }
+ \ifx\currentbtxsecond\empty \else
+ \btxparameter\c!range
+ \texdefinition {\s!btx:\s!cite:inject} {
+ \currentbtxsecond
+ }
+ \fi
+ \btxflushsuffix
+ \fi
+ %\btxparameter\c!right
+\stopsetups
+
+\startsetups btx:apa:cite:author:years
+ \fastsetup{btx:apa:cite:author:year}
+\stopsetups
+
+% used in publ-imp-page.mkvi
+
+\startsetups [btx:apa:list:page-or-pages]
+ \ifx\currentbtxlastpage\empty
+ \btxlabeltext{apa:page}
+ \else
+ \btxlabeltext{apa:pages}
+ \fi
+ \btxnbsp
+\stopsetups
+
+% The sameauthor feature may not be APA compliant
+% (there is nothing in the manual cited above).
+% It can be removed using the command:
+% \resetsetups [apa:list:sameauthor]
+
+% :rule, :empty or :ditto ...
+
+\startsetups apa:list:sameauthor
+ \fastsetup{apa:list:sameauthor:rule}
+\stopsetups
+
+\startsetups apa:list:sameauthor:rule
+ \blackrule
+ [\c!width=\dimexpr\listparameter\c!margin-\interwordspace\relax,
+ \c!height=1.5\linewidth]% \linewidth is just too thin with respect to font strokes...
+\stopsetups
+
+\startsetups [apa:list:sameauthor:\v!empty]
+ \kern\dimexpr\listparameter\c!margin-\interwordspace\relax
+\stopsetups
+
+% horrible !
+
+\startsetups apa:list:sameauthor:ditto
+ \inframed
+ [\c!width=\dimexpr\listparameter\c!margin-\interwordspace\relax,
+ \c!frame=\v!off,
+ \c!align=\v!middle]
+ {\doubleprime}
+\stopsetups
+
+%D Instead of texdefinitions without arguments, we could have used setups but in my
+%D editor (hh, scite) the commands stand out better. It also saves an additional
+%D component in the name (e.g. common:) because commands and setups have a different
+%D namespace, so similar calls don't clash. Performance of definitions is somewhat
+%D better.
+
+%D We use "texdefinitions" (with eventual arguments) for helpers that are used
+%D in the rendering "setups" defined for each category below.
+
+%D Note that \btxdoif... and \btxflush rely on the definitions in
+%D publ-imp-apa.lua: fields that are not listed as required nor optional are
+%D IGNORED. We also make heavy use of the notion of sets - comma-separated lists
+%D of alternative fields to be used in hierarchal order. For example:
+%D author = { "author", "editor", "publisher", "title" }, will return the
+%D author field if it exists; if not, the editor field will be returned, if it
+%D exists; if not, the publisher field will be returned, if it exists; if not,
+%D the title field will be returned, it it exists; if not, nothing will be
+%D returned. In lua syntax, it can be understood as
+%D author or editor or publisher or title or ""
+
+% #title can be title or booktitle
+
+\starttexdefinition btx:apa:translated-title #title
+ \ifx\currentbtxlanguage\empty
+ % no need for an extra
+ \else\ifx\mainbtxlanguage\currentbtxlanguage
+ % no need for an extra
+ \else
+ \btxdoif {#title:\mainbtxlanguage} {
+ \begingroup
+ \language[\mainbtxlanguage]
+ \btxleftbracket
+ \btxusecommand [apa:\s!list:title:\currentbtxcategory] {
+ \btxflush{#title:\mainbtxlanguage}
+ }
+ \btxrightbracket
+ \endgroup
+ }
+ \fi\fi
+\stoptexdefinition
+
+\starttexdefinition btx:apa:composed-title #title
+ \btxstartstyleandcolor [apa:\s!list:title:\currentbtxcategory]
+ \begingroup
+ \language[\currentbtxlanguage]
+ \btxusecommand [apa:\s!list:title:\currentbtxcategory] {
+ \btxflush{#title}
+ \btxdoif {sub#title} {
+ \btxcolon
+ \btxflush{sub#title}
+ }
+ }
+ \endgroup
+ % which namespace?
+ %\doif{\btxparameter{translate}}\v!yes {
+ \texdefinition{btx:apa:translated-title}{#title}
+ %}
+ \btxstopstyleandcolor
+\stoptexdefinition
+
+\starttexdefinition btx:apa:title
+ \setmode{btx:apa:title-placed}
+ % we make the title active, opening "file"
+ \btxdoifelse {file} {
+ \texdefinition{btx:format:inject}
+ {url(file:\btxflush{file})}
+ {
+ \texdefinition{btx:apa:composed-title}{title}
+ }
+ } {
+ \texdefinition{btx:apa:composed-title}{title}
+ }
+ \btxdoif {title} {
+ \btxperiod
+ % TODO: this period may NOT be wanted, as in: Title (2nd ed.).
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:apa:title-if-not-placed
+ \doifelsemode {btx:apa:title-placed} {
+ \resetmode{btx:apa:title-placed}
+ } {
+ \btxdoif {title} {
+ \btxspace
+ \texdefinition {btx:apa:title}
+ }
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:apa:suffixedyear
+ \btxdoifelse {year} {
+ \btxflush{year}
+ \btxflushsuffix
+ } {
+ \btxlabeltext{apa:nd}
+ }
+\stoptexdefinition
+
+% #author may be author(set) or editor
+
+\starttexdefinition btx:apa:author-or-editor #author
+ \btxdoif {#author} {
+ \btxflush{#author}
+ \doifelse {\btxfoundname{#author}} {editor} {
+ \btxleftparenthesis
+ \btxsingularorplural {editor} {
+ \btxlabeltext{apa:Editor}
+ } {
+ \btxlabeltext{apa:Editors}
+ }
+ \btxrightparenthesisperiod
+ } {
+ \doifelse {\btxfoundname{#author}} {producer} {
+ \btxleftparenthesis
+ \btxsingularorplural {producer} {
+ \btxlabeltext{apa:Producer}
+ } {
+ \btxlabeltext{apa:Producers}
+ }
+ \btxrightparenthesis
+ \btxdoifelse {director} {
+ \removeunwantedspaces
+ \btxparameter{\c!separator:names:3}
+ \btxflush{director}
+ \btxleftparenthesis
+ \btxsingularorplural {director} {
+ \btxlabeltext{apa:Director}
+ } {
+ \btxlabeltext{apa:Directors}
+ }
+ \btxrightparenthesisperiod
+ } {
+ \btxperiod
+ }
+ } {
+ \doif {\btxfoundname{#author}} {director} {
+ \btxleftparenthesis
+ \btxsingularorplural {director} {
+ \btxlabeltext{apa:Director}
+ } {
+ \btxlabeltext{apa:Directors}
+ }
+ \btxrightparenthesisperiod
+ }
+ }
+ }
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:apa:authoryear
+ % we make the authoryear active, pointing to the citation
+ \texdefinition{btx:format:inject}
+ {internal(\currentbtxinternal)}
+ {
+ \doifelsesetups{apa:list:sameauthor} {
+ \btxdoifelsesameasprevious {author} {
+ \fastsetup{apa:list:sameauthor}
+ } {
+ \texdefinition{btx:apa:author-or-editor} {author}
+ }
+ } {
+ \texdefinition{btx:apa:author-or-editor} {author}
+ }
+ \btxleftparenthesis
+ \texdefinition{btx:apa:suffixedyear}
+ \btxrightparenthesis
+ }
+ % outside of interaction
+ \btxperiod
+ \doif {\btxfoundname{author}} {title} {
+ \setmode{btx:apa:title-placed}
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:apa:editor-in
+ \btxdoif {booktitle} {
+ \btxlabeltext{apa:In}
+ \doifnot {\btxfoundname{author}} {editor} {
+ \btxspace
+ \texdefinition{btx:apa:author-or-editor} {editor}
+ }
+ \btxspace
+ \texdefinition{btx:apa:composed-title} {booktitle}
+ \btxperiod
+ }
+\stoptexdefinition
+
+% TODO: The title is terminated with period. However,
+% we probably don't want this before the parenthesis.
+
+\starttexdefinition btx:apa:leftparenthesis-or-comma
+ \doifelsemode {btx:apa:editionset-is-empty} {
+ \btxleftparenthesis
+ \resetmode{btx:apa:editionset-is-empty}
+ } {
+ \btxcomma
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:apa:editionset
+ \setmode{btx:apa:editionset-is-empty}
+ \doif {\currentbtxcategory} {techreport} {
+ \texdefinition{btx:apa:leftparenthesis-or-comma}
+ \btxdoifelse {type} {
+ \btxusecommand [apa:\s!list:type] {
+ \btxflush{type}
+ }
+ } {
+ \btxlabeltext{apa:technicalreport}
+ }
+ }
+ \btxdoif {volume} {
+ \texdefinition{btx:apa:leftparenthesis-or-comma}
+ \btxoneorrange {volume} {
+ \btxlabeltext{apa:Volume}
+ } {
+ \btxlabeltext{apa:Volumes}
+ }
+ \btxspace
+ \btxflush{volume}
+ }
+ \btxdoif {number} {
+ \texdefinition{btx:apa:leftparenthesis-or-comma}
+ \btxlabeltext{apa:number}
+ \btxspace
+ \btxflush{number}
+ }
+ \btxdoif {edition} {
+ \texdefinition{btx:apa:leftparenthesis-or-comma}
+ \btxflush{edition}
+ \btxspace
+ \btxlabeltext{apa:edition}
+ }
+ \btxdoif {pages} {
+ \texdefinition{btx:apa:leftparenthesis-or-comma}
+ \btxoneorrange {pages} {
+ \btxlabeltext{apa:page}
+ } {
+ \btxlabeltext{apa:pages}
+ }
+ \btxnbsp
+ \btxflush{pages}
+ }
+ \doifnotmode {btx:apa:editionset-is-empty} {
+ \btxrightparenthesisperiod
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:apa:journal
+ \btxstartstyleandcolor[apa:\s!list:journal]
+ \btxusecommand [apa:\s!list:journal] {
+ \btxflush{journal}
+ }
+ \btxstopstyleandcolor
+\stoptexdefinition
+
+\starttexdefinition btx:apa:volume
+ \btxstartstyleandcolor[apa:\s!list:volume]
+ \btxflush{volume}
+ \btxstopstyleandcolor
+\stoptexdefinition
+
+ % this could be simplified!
+
+\starttexdefinition btx:apa:journal-volume-number-pages
+ \btxdoif {journal} {
+ \btxspace
+ \texdefinition{btx:apa:journal}
+ \btxdoifelse {volume} {
+ \btxcomma
+ \texdefinition{btx:apa:volume}
+ \btxdoif {number} {
+ %\btxleftparenthesis
+ (\btxflush{number}
+ \btxrightparenthesis
+ }
+ } {
+ \btxdoif {number} {
+ \btxcomma
+ \btxleftparenthesis
+ \btxflush{number}
+ \btxrightparenthesis
+ }
+ }
+ \btxdoif {pages} {
+ \btxcomma
+ \doif {\currentbtxcategory} {newspaper} {
+ \btxoneorrange {pages} {
+ \btxlabeltext{apa:page}
+ } {
+ \btxlabeltext{apa:pages}
+ }
+ \btxnbsp
+ }
+ \btxflush{pages}
+ }
+ \btxperiod
+ \doifnot {\currentbtxcategory} {newspaper} {
+ \btxdoifnot {volume} {
+ \btxdoifnot {number} {
+ \btxdoifnot {pages} {
+ \btxdoif {doi} {%set: doi or url
+ \btxspace
+ \btxlabeltext{apa:Advanced}
+ \btxperiod
+ }
+ }
+ }
+ }
+ }
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:apa:wherefrom-publisher
+ \btxdoifelse {address} {
+ \btxflush{address}
+ \btxdoif {country} {
+ \btxcomma
+ \btxflush{country}
+ }
+ \btxcolon
+ } {
+ \btxdoif {country} {
+ \btxflush{country}
+ \btxcolon
+ }
+ }
+ \doifelse {\btxfoundname{author}} {\btxfoundname{publisher}} {
+ \btxlabeltext{apa:Author}
+ } {
+ \btxdoifelse {publisher} {
+ \btxflush{publisher}
+ } {
+ \btxlabeltext{apa:Author}
+ }
+ }
+ \btxperiod
+\stoptexdefinition
+
+\definebreakpoints[doi]
+\definebreakpoint [doi][:][nleft=3,type=1]
+\definebreakpoint [doi][/][nleft=3,type=1]
+\definebreakpoint [doi][-][nleft=3,type=1]
+\definebreakpoint [doi][.][nleft=3,type=1]
+
+% use \btxentry here?
+
+\starttexdefinition btx:apa:url
+ \btxspace
+ \btxlabeltext{apa:Retrieved}
+ \btxspace
+ \begingroup
+ \setbreakpoints[doi]
+ \ifconditional\btxinteractive
+ \goto {
+ \hyphenatedurl{\btxflush{url}}
+ } [
+ url(\btxflush{url})
+ ]
+ \else
+ \hyphenatedurl{\btxflush{url}}
+ \fi
+ \endgroup
+\stoptexdefinition
+
+% use \btxentry here?
+
+\starttexdefinition btx:apa:doi
+ \btxspace
+ \begingroup
+ \setbreakpoints[doi]
+ \ifconditional\btxinteractive
+ \goto {
+ \hyphenatedurl{doi:\btxflush{doi}}
+ } [
+ url(http://dx.doi.org/\btxflush{doi})
+ ]
+ \else
+ \hyphenatedurl{doi:\btxflush{doi}}
+ \fi
+ \endgroup
+\stoptexdefinition
+
+\starttexdefinition btx:apa:note
+ \btxdoif {note} {
+ \btxleftparenthesis
+ \btxflush{note}
+ \btxrightparenthesis
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:apa:url-doi-note
+ \doif {\btxfoundname{doi}} {url} {
+ \texdefinition{btx:apa:url}
+ }
+ \doif {\btxfoundname{doi}} {doi} {
+ \texdefinition{btx:apa:doi}
+ }
+ \texdefinition{btx:apa:note}
+ \removeunwantedspaces
+\stoptexdefinition
+
+% Then setups, by category
+
+% An article from a journal
+% Required fields: author or editor or title, journal, (year).
+% Optional fields: volume, number, pages, type, doi, url, note.
+% Note that bibtex (and tools) do not include editor (e.g. special issue or section)
+
+\startsetups btx:apa:list:article
+ \texdefinition{btx:apa:authoryear}
+ \texdefinition{btx:apa:title-if-not-placed}
+ \btxdoif {type} {
+ \btxleftbracket
+ \btxflush{type}
+ \btxrightbracketperiod
+ }
+ \texdefinition{btx:apa:journal-volume-number-pages}
+ \texdefinition{btx:apa:url-doi-note}
+\stopsetups
+
+% An article from a magazine.
+% Required fields: author or title, journal, (year).
+% Optional fields: number, pages, type, month, day, doi, url, note.
+
+\startsetups btx:apa:list:magazine
+ \fastsetup{btx:apa:list:article}
+\stopsetups
+
+% An article from a newspaper.
+% Required fields: author or title, journal, (year).
+% Optional fields: volume, number, pages, type, month, day, doi, url, note.
+
+\startsetups btx:apa:list:newspaper
+ \fastsetup{btx:apa:list:article}
+\stopsetups
+
+% A complete issue of a periodical, such as a special issue of a journal.
+% Required fields: title, year
+% Optional fields: editor, publisher, subtitle, series, volume, number, month, organization, doi, url, issn, note
+
+% needs to be tuned...
+
+\startsetups btx:apa:list:periodical
+ \fastsetup{btx:apa:list:article}
+\stopsetups
+
+% National and international standards issued by a standards body
+% Required fields: author, institution, or organization, year, title
+% Optional fields: subtitle, doi, url, note
+
+\startsetups btx:apa:list:standard
+ \texdefinition{btx:apa:authoryear}
+ \texdefinition{btx:apa:title-if-not-placed}
+ \texdefinition{btx:apa:url-doi-note}
+\stopsetups
+
+% A book with an explicit publisher.
+% Required fields: author or editor or publisher, title, (year).
+% Optional fields: volume or number, series, address, edition, month, day, note.
+% APA ignores: month, day
+
+% todo: series?
+
+\startsetups btx:apa:list:book
+ \texdefinition{btx:apa:authoryear}
+ \texdefinition{btx:apa:title-if-not-placed}
+ \texdefinition{btx:apa:editionset}
+ \texdefinition{btx:apa:wherefrom-publisher}
+ \texdefinition{btx:apa:url-doi-note}
+\stopsetups
+
+% There is some debate about how inbook should differ from incollection
+
+% A part of a book, which may be a chapter (or section or whatever) and/or a range of pages.
+% (note that inbook is handled differently by bibtex and biblatex)
+% Required fields: author or editor, title, chapter and/or pages, publisher, year.
+% Optional fields: volume or number, series, type, address, edition, month, note.
+% We add optional: booktitle.
+% APA ignores: chapter, month
+
+\startsetups btx:apa:list:inbook
+ \texdefinition{btx:apa:authoryear}
+ \texdefinition{btx:apa:title-if-not-placed}
+ \texdefinition{btx:apa:editor-in}
+ \texdefinition{btx:apa:editionset}
+ \texdefinition{btx:apa:wherefrom-publisher}
+ \texdefinition{btx:apa:url-doi-note}
+\stopsetups
+
+% A part of a book having its own title.
+% Required fields: author, title, booktitle, publisher, year.
+% Optional fields: editor, volume or number, series, type, chapter, pages, address, edition, month, note.
+% APA ignores: chapter, month
+
+\startsetups btx:apa:list:incollection
+ \fastsetup{btx:apa:list:inbook}
+\stopsetups
+
+% The proceedings of a conference.
+% Required fields: title, year.
+% Optional fields: editor, volume or number, series, address, month, organization, publisher, note.
+% todo: series?
+
+\startsetups btx:apa:list:proceedings
+ \fastsetup{btx:apa:list:book}
+\stopsetups
+
+% An article in a conference proceedings.
+% Required fields: author, title, booktitle, year.
+% Optional fields: editor, volume or number, series, pages, address, month, organization, publisher, note.
+
+\startsetups btx:apa:list:inproceedings
+ \texdefinition{btx:apa:authoryear}
+ \texdefinition{btx:apa:title-if-not-placed}
+ \texdefinition{btx:apa:editor-in}
+ \texdefinition{btx:apa:editionset}
+ \btxdoif {organization} {
+ \btxspace
+ \btxflush{organization}
+ \btxcomma
+ }
+ \texdefinition{btx:apa:wherefrom-publisher}
+ \texdefinition{btx:apa:url-doi-note}
+\stopsetups
+
+\startsetups btx:apa:list:conference
+ \fastsetup{btx:apa:list:inproceedings}
+\stopsetups
+
+% A thesis.
+% Required fields: author, title, school, year.
+% Optional fields: type, address, month, note.
+
+\startsetups btx:apa:list:thesis
+ \texdefinition{btx:apa:authoryear}
+ \texdefinition{btx:apa:title-if-not-placed}
+ \btxleftparenthesis
+ \btxdoifelse {type} {
+ \btxusecommand [apa:\s!list:type] {
+ \btxflush{type}
+ }
+ } {
+ \btxlabeltext{apa:\currentbtxcategory}
+ }
+ \btxrightparenthesis
+ \btxdoif {school} {
+ \btxperiod
+ \btxflush{school}
+ }
+ \btxdoif {address} {
+ \btxdoifelse {school} {
+ \btxcomma
+ } {
+ \btxperiod
+ }
+ \btxflush{address}
+ \btxdoif {country} {
+ \btxcomma
+ \btxflush{country}
+ }
+ }
+ \btxperiod
+ \texdefinition{btx:apa:url-doi-note}
+\stopsetups
+
+\startsetups btx:apa:list:phdthesis
+ \fastsetup{btx:apa:list:thesis}
+\stopsetups
+
+\startsetups btx:apa:list:mastersthesis
+ \fastsetup{btx:apa:list:thesis}
+\stopsetups
+
+% A work that is printed and bound, but without a named publisher or sponsoring institution.
+% Required field: title.
+% Optional fields: author, howpublished, address, month, year, note.
+
+\startsetups btx:apa:list:booklet
+ \fastsetup{btx:apa:list:book}
+\stopsetups
+
+% Technical documentation.
+% Required field: title.
+% Optional fields: author, organization, address, edition, month, year, note.
+
+\startsetups btx:apa:list:manual
+ \fastsetup{btx:apa:list:book}
+\stopsetups
+
+% A report published by a school or other institution, usually numbered within a series.
+% Required fields: author, title, institution, year.
+% Optional fields: type, number, address, month, note.
+
+\startsetups btx:apa:list:techreport
+ \fastsetup{btx:apa:list:book}
+\stopsetups
+
+% A document having an author and title, but not formally published.
+% Required fields: author, title, note.
+% Optional fields: month, year.
+
+\startsetups btx:apa:list:unpublished
+ \fastsetup{btx:apa:list:book}
+\stopsetups
+
+% A patent. Note that this category was not defined with BIBTEX. Below from JabRef:
+% Required fields: nationality, number, year, yearfiled
+% Optional fields: author, title, assignee, address, type, number, day, dayfiled, month, monthfiled, note, url
+% Also optional: publisher
+
+% todo: yearfiled, monthfiled, dayfiled
+
+\startsetups btx:apa:list:patent
+ \texdefinition{btx:apa:authoryear}
+ \texdefinition{btx:apa:title-if-not-placed}
+ \begingroup
+ \it
+ \btxdoif {nationality} {
+ \btxspace
+ \btxflush{nationality}
+ }
+ \btxspace
+ \btxlabeltext{apa:patent}
+ \btxdoif {number} {
+ \btxspace
+ \btxlabeltext{apa:number}
+ \btxspace
+ \btxflush{number}
+ }
+ \btxperiod
+ \italiccorrection
+ \endgroup
+ \texdefinition{btx:apa:wherefrom-publisher}
+ \texdefinition{btx:apa:url}
+ \texdefinition{btx:apa:note}
+\stopsetups
+
+% Electronic. Note that this category was not defined with BIBTEX. Below from JabRef:
+% Required fields: title
+% Optional fields: address, author, howpublished, month, note, organization, url, year, doi
+% Also optional: type
+
+% Like Misc below but includes organization.
+
+\startsetups btx:apa:list:electronic
+ \texdefinition{btx:apa:authoryear}
+ \texdefinition{btx:apa:title-if-not-placed}
+ \btxdoif {organization} {
+ \btxspace
+ \btxflush{organization}
+ \btxperiod
+ }
+ \btxdoif {howpublished} {
+ \btxspace
+ \btxflush{howpublished}
+ \btxperiod
+ }
+ \texdefinition{btx:apa:url-doi-note}
+\stopsetups
+
+% Film. Note that this category was not defined with BIBTEX.
+% Required fields: producer, director, title, year, address, publisher
+% Optional fields: subtitle, type, note, url, doi
+
+\startsetups btx:apa:list:film
+ \texdefinition{btx:apa:authoryear}
+ \texdefinition {btx:apa:title}
+ \btxleftbracket
+ \btxdoifelse {type} {
+ \btxflush{type}
+ } {
+ \btxlabeltext{apa:MotionPicture}
+ }
+ \btxrightbracketperiod
+ \texdefinition{btx:apa:wherefrom-publisher}
+ \texdefinition{btx:apa:url-doi-note}
+\stopsetups
+
+% Music. Note that this category was not defined with BIBTEX.
+% Required fields: composer, artist, title, album, year, address, publisher
+% Optional fields: subtitle, type, note, url, doi
+
+\startsetups btx:apa:list:music
+ \texdefinition{btx:apa:authoryear}
+ \texdefinition{btx:apa:title-if-not-placed}
+ \doifnot {\btxfoundname{author}} {artist} {
+ \btxdoif {artist} {
+ \btxleftbracket
+ \btxlabeltext{apa:Recordedby}
+ \btxspace
+ \btxflush{artist}
+ \btxrightbracketperiod
+ }
+ }
+ \doifnot {\btxfoundname{title}} {album} {
+ \btxdoif {album} {
+ \btxlabeltext{apa:In}
+ \btxspace
+ \btxflush{album}
+ \btxperiod
+ }
+ }
+ \btxdoif {type} {
+ \btxleftbracket
+ \btxflush{type}
+ \btxrightbracketperiod
+ }
+ \texdefinition{btx:apa:wherefrom-publisher}
+ \texdefinition{btx:apa:url-doi-note}
+\stopsetups
+
+% Other. Note that this category was not defined with BIBTEX. Below from JabRef:
+% Required fields: author or title, year
+% Optional fields: note, doi, url
+
+\startsetups btx:apa:list:other
+ \fastsetup{btx:apa:list:book}
+\stopsetups
+
+% Use this type when nothing else fits.
+% Required fields: none.
+% Optional fields: author, title, howpublished, month, year, note.
+
+\startsetups btx:apa:list:misc
+ \texdefinition{btx:apa:authoryear}
+ \texdefinition{btx:apa:title-if-not-placed}
+ \btxdoif {howpublished} {
+ \btxspace
+ \btxflush{howpublished}
+ \btxperiod
+ }
+ \texdefinition{btx:apa:url-doi-note}
+\stopsetups
+
+% If all else fails to match:
+
+\startsetups btx:apa:list:literal
+ %\btxleftparenthesis
+ \removeunwantedspaces(
+ \btxflush{tag}
+ \btxrightparenthesis
+ \btxdoif {text} {
+ \btxflush{text}
+ }
+\stopsetups
+
+% HH: an example of setting up translations using a sub rendering. Keep it here
+% till we find another spot as otherwise I forget about it and I don't want to
+% waste hours reinventing a wheel when something like this is needed.
+%
+% \definebtx
+% [apa:cite:title:translated]
+% [apa:cite:title]
+% [left=\btxleftbracket,
+% right=\btxrightbracket,
+% style=\v!bolditalic]
+%
+% \startsetups btx:apa:cite:title
+% % need to add concat, etc.
+% \btxcitereference
+% \currentbtxfirst
+% \doifmode {btx:apa:translatedtitles} {
+% \ifx\currentbtxlanguage\empty
+% % no need for an extra
+% \else\ifx\mainbtxlanguage\currentbtxlanguage
+% % no need for an extra
+% \else
+% \btxdoif {title:\mainbtxlanguage} {
+% \btxstartciterendering[title:translated]
+% \language[\mainbtxlanguage]
+% \btxflush{title:\mainbtxlanguage}
+% \btxstopciterendering
+% }
+% \fi\fi
+% }
+% \stopsetups
+
+\stopbtxrenderingdefinitions
diff --git a/tex/context/base/publ-imp-aps.lua b/tex/context/base/publ-imp-aps.lua
new file mode 100644
index 000000000..c15ffe918
--- /dev/null
+++ b/tex/context/base/publ-imp-aps.lua
@@ -0,0 +1,479 @@
+local specification = {
+ --
+ -- metadata
+ --
+ name = "aps",
+ version = "1.00",
+ comment = "APS specification",
+ author = "Alan Braslau and Hans Hagen",
+ copyright = "ConTeXt development team",
+ --
+ -- derived (combinations of) fields (all share the same default set)
+ --
+ virtual = {
+ "authoryear",
+ "authoryears",
+ "authornum",
+ "num",
+ "suffix",
+ },
+ --
+ -- special datatypes
+ --
+ types = {
+ --
+ -- list of fields that are interpreted as names: "NAME [and NAME]" where
+ -- NAME is one of the following:
+ --
+ -- First vons Last
+ -- vons Last, First
+ -- vons Last, Jrs, First
+ -- Vons, Last, Jrs, First
+ --
+ author = "author", -- interpreted as name(s)
+ editor = "author",
+ doi = "url", -- an external link
+ url = "url",
+ page = "pagenumber", -- number or range: f--t
+ pages = "pagenumber",
+ volume = "range",
+ number = "range",
+ keywords = "keyword", -- comma|-|separated list
+ year = "number",
+ },
+ --
+ -- categories with their specific fields
+ --
+ categories = {
+ --
+ -- categories are added below
+ --
+ },
+}
+
+local generic = {
+ --
+ -- A set returns the first field (in order of position below) that is found
+ -- present in an entry. A set having the same name as a field conditionally
+ -- allows the substitution of an alternate field.
+ --
+ -- note that anything can get assigned a doi or be available online.
+ editionset = { "edition", "volume", "number", "pages" },
+}
+
+-- Note that the APS specification allows an additional field "collaboration"
+-- to be rendered following the author list (if the collaboration name appears
+-- in the byline of the cited article).
+
+-- Definition of recognized categories and the fields that they contain.
+-- Required fields should be present; optional fields may also be rendered;
+-- all other fields will be ignored.
+
+-- Sets contain either/or in order of precedence.
+--
+-- For a category *not* defined here yet present in the dataset, *all* fields
+-- are taken as optional. This allows for flexibility in the addition of new
+-- categories.
+
+local categories = specification.categories
+
+-- an article from a journal
+
+categories.article = {
+ sets = {
+ author = { "author", "editor" },
+ },
+ required = {
+ "author",
+ },
+ optional = {
+ "collaboration",
+ "year",
+ "title", "subtitle", "type", "file",
+ "journal", "volume", "number", "pages",
+ "doi", "url", "note",
+ },
+}
+
+-- an article from a magazine
+
+categories.magazine = {
+ sets = categories.article.sets,
+ required = {
+ "author",
+ "year",
+ "journal",
+ },
+ optional = {
+ "collaboration",
+ "title", "subtitle", "type", "file",
+ "number", "pages",
+ "month", "day",
+ "doi", "url", "note",
+ },
+}
+
+categories.newspaper = categories.magazine
+
+-- (from jabref) to be identified and setup ...
+
+categories.periodical = {
+ sets = {
+ author = { "editor", "publisher" },
+ },
+ required = {
+ "title",
+ "year",
+ },
+ optional = {
+ "author",
+ "collaboration",
+ "subtitle", "file",
+ "series", "volume", "number", "month",
+ "organization",
+ "doi", "url", "note",
+ },
+}
+
+-- (from jabref) to be identified and setup ...
+
+categories.standard = {
+ sets = {
+ author = { "author", "institution", "organization" },
+ },
+ required = {
+ "author",
+ "year",
+ "title", "subtitle",
+ "doi", "note",
+ },
+ optional = {
+ "collaboration",
+ "url",
+ },
+}
+
+-- a book with an explicit publisher.
+
+categories.book = {
+ sets = {
+ author = { "author", "editor", "publisher" },
+ editionset = generic.editionset,
+ },
+ required = {
+ "author",
+ "title",
+ },
+ optional = {
+ "collaboration",
+ "year", "month", "day",
+ "title", "subtitle", "type", "file",
+ "editionset", "series",
+ "address",
+ "doi", "url", "note",
+ },
+}
+
+-- a part of a book, which may be a chapter (or section or whatever) and/or a range of pages.
+
+categories.inbook = {
+ sets = {
+ author = { "author", "editor", "publisher", },
+ editionset = generic.editionset,
+ },
+ required = {
+ "author",
+ "year" ,
+ "title",
+ },
+ optional = {
+ "collaboration",
+ "subtitle", "type", "file",
+ "booktitle",
+ -- "chapter",
+ "editionset", "series",
+ "month",
+ "address",
+ "doi", "url", "note",
+ },
+}
+
+-- a book having its own title as part of a collection.
+-- (like inbook, but we here make booktitle required)
+
+categories.incollection = {
+ sets = {
+ author = { "author", "editor", "publisher" },
+ editionset = generic.editionset,
+ },
+ required = {
+ "author",
+ "booktitle",
+ "year",
+ },
+ optional = {
+ "collaboration",
+ "title", "subtitle", "type", "file",
+ "editionset", "series",
+ "chapter",
+ "month",
+ "address",
+ "doi", "url", "note",
+ },
+}
+
+-- a work that is printed and bound, but without a named publisher or sponsoring institution.
+
+categories.booklet = {
+ sets = {
+ publisher = { "howpublished" }, -- no "publisher"!
+ },
+ required = {
+ "author",
+ "title",
+ },
+ optional = {
+ "publisher",
+ "collaboration",
+ "year", "month",
+ "subtitle", "type", "file",
+ "address",
+ "doi", "url", "note",
+ },
+}
+
+-- the proceedings of a conference.
+
+categories.proceedings = {
+ sets = {
+ author = { "editor", "organization", "publisher" }, -- no "author"!
+ publisher = { "publisher", "organization" },
+ editionset = generic.editionset,
+ },
+ required = {
+ "author",
+ "year"
+ },
+ optional = {
+ "collaboration",
+ "publisher",
+ "title", "subtitle", "file",
+ "editionset", "series",
+ "month",
+ "address",
+ "doi", "url", "note",
+ },
+}
+
+-- an article in a conference proceedings.
+
+categories.inproceedings = {
+ sets = categories.incollection.sets,
+ required = categories.incollection.required,
+ optional = {
+ "collaboration",
+ "title", "subtitle", "type", "file",
+ "month",
+ "edition", "series",
+ "address", "organization",
+ "doi", "url", "note",
+ },
+}
+
+categories.conference = categories.inproceedings
+
+-- a thesis (of course).
+
+categories.thesis = {
+ required = {
+ "author",
+ "title",
+ "school",
+ "year",
+ "type"
+ },
+ optional = {
+ "collaboration",
+ "subtitle", "file",
+ "month",
+ "address",
+ "doi", "url", "note",
+ },
+}
+
+categories.mastersthesis = {
+ sets = categories.thesis.sets,
+ required = {
+ "author",
+ "title",
+ "school",
+ "year"
+ },
+ optional = {
+ "collaboration",
+ "type",
+ "subtitle", "file",
+ "month",
+ "address",
+ "doi", "url", "note",
+ },
+}
+categories.phdthesis = categories.mastersthesis
+
+-- a report published by a school or other institution, usually numbered within a series.
+
+categories.techreport = {
+ sets = {
+ author = { "author", "institution", "publisher" },
+ publisher = { "publisher", "institution", },
+ editionset = { "type", "volume", "number", "pages" }, -- no "edition"!
+ },
+ required = {
+ "author",
+ "title",
+ "institution",
+ "year"
+ },
+ optional = {
+ "collaboration",
+ "publisher",
+ "address",
+ "subtitle", "file",
+ "editionset",
+ "month",
+ "doi", "url", "note",
+ },
+}
+
+-- technical documentation.
+
+categories.manual = {
+ sets = {
+ author = { "author", "organization", "publisher" },
+ publisher = { "publisher", "organization", },
+ editionset = generic.editionset,
+ },
+ required = {
+ "title"
+ },
+ optional = {
+ "author", "publisher",
+ "collaboration",
+ "address",
+ "subtitle", "file",
+ "editionset", "month", "year",
+ "doi", "url", "note",
+ },
+}
+
+-- a patent (of course).
+
+categories.patent = {
+ sets = {
+ author = { "author", "assignee", },
+ publisher = { "publisher", "assignee", },
+ year = { "year", "yearfiled", },
+ month = { "month", "monthfiled", },
+ day = { "day", "dayfiled", },
+ },
+ required = {
+ "nationality",
+ "number",
+ "year",
+ },
+ optional = {
+ "type",
+ --check this: "language",
+ "author", "publisher",
+ "collaboration",
+ "title", "subtitle", "file",
+ "address",
+ "day", "month",
+ "doi", "url", "note",
+ },
+}
+
+-- a document having an author and title, but not formally published.
+
+categories.unpublished = {
+ required = {
+ "author",
+ "title",
+ "note"
+ },
+ optional = {
+ "collaboration",
+ "subtitle", "file",
+ "year", "month",
+ "doi", "url",
+ },
+}
+
+-- like misc below but includes organization.
+
+categories.electronic = {
+ required = {
+ "title"
+ },
+ optional = {
+ "subtitle", "type", "file",
+ "year", "month",
+ "author",
+ "collaboration",
+ "address",
+ "organization",
+ "howpublished",
+ "doi", "url", "note",
+ },
+}
+
+-- use this type when nothing else fits.
+
+categories.misc = {
+ required = {
+ -- nothing is really important here
+ },
+ optional = {
+ "author",
+ "collaboration",
+ "title", "subtitle", "file",
+ "year", "month",
+ "howpublished",
+ "doi", "url", "note",
+ },
+}
+
+-- other (whatever jabref does not know!)
+
+categories.other = {
+ required = {
+ "author",
+ "title",
+ "year"
+ },
+ optional = {
+ "collaboration",
+ "subtitle", "file",
+ "doi", "url", "note",
+ },
+}
+
+-- if all else fails to match:
+
+categories.literal = {
+ sets = {
+ author = { "tag" }, -- need to check this!
+ },
+ required = {
+ "text"
+ },
+ optional = {
+ "author",
+ "doi", "url", "note"
+ },
+ virtual = false,
+}
+
+-- done
+
+return specification
diff --git a/tex/context/base/publ-imp-aps.mkvi b/tex/context/base/publ-imp-aps.mkvi
new file mode 100644
index 000000000..28af82e74
--- /dev/null
+++ b/tex/context/base/publ-imp-aps.mkvi
@@ -0,0 +1,1110 @@
+%D \module
+%D [ file=publ-imp-aps,
+%D version=2015.03.22,
+%D title=APS bibliography style,
+%D subtitle=Publications,
+%D author=Alan Braslau and Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is therefore copyrighted
+%D by \PRAGMA. See mreadme.pdf for details.
+
+\startbtxrenderingdefinitions[aps]
+
+%D Reference:
+%D \startTEX
+%D @Book{APS2011,
+%D title ={Physical Review Style and Notation Guide}
+%D year ={2011},
+%D month ={June}
+%D edition ={Revised},
+%D editor ={Waldron, A and Judd, P. and Miller, V.},
+%D address ={Ridge, NY},
+%D publisher={American Physical Society},
+%D Xpages ={26},
+%D url ={http://journals.aps.org/files/styleguide-pr.pdf}
+%D }
+%D \stopTEX
+
+% set ALL specific APS compliant values
+
+\definebtx
+ [aps]
+ [\c!default=default,
+ \c!specification=aps,
+ \c!otherstext={\space{\it\btxlabeltext{aps:others}}},
+ \c!etallimit=10,
+ \c!etaldisplay=\btxparameter\c!etallimit,
+ %c!journalconversion=\v!normal,
+ \c!monthconversion=\v!month,
+ \c!title=\v!yes,
+ \c!separator:names:2={,\space},
+ \c!separator:names:3={,\space\btxlabeltext{aps:and}\space}, % not \textampersand
+ \c!separator:names:4= {\space\btxlabeltext{aps:and}\space}] % not \textampersand
+
+% First, define and set list and rendering parameters
+
+\definebtxrendering
+ [aps]
+ [\c!specification=aps,
+ \c!sorttype=\v!default,
+ \c!numbering=num]
+
+\setupbtxlist
+ [aps]
+ [\c!alternative=b] % spaces
+
+\definebtx
+ [aps:\s!list]
+ [aps]
+ [\c!authorconversion=normalshort]
+
+\definebtx
+ [aps:\s!list:author]
+ [aps:\s!list]
+
+\definebtx
+ [aps:\s!list:editor]
+ [aps:\s!list:author]
+
+\definebtx
+ [aps:\s!list:suffix]
+ [aps:\s!list]
+
+\definebtx
+ [aps:\s!list:url]
+ [aps:\s!list]
+
+\definebtx
+ [aps:\s!list:doi]
+ [aps:\s!list]
+
+\definebtx
+ [aps:\s!list:\s!page]
+ [aps:\s!list]
+ [\c!separator:2={,\space},
+ \c!separator:3={,\space\btxlabeltext{aps:and}\space},
+ \c!separator:4={\space\btxlabeltext{aps:and}\space},
+ \c!left={\btxleftparenthesis},
+ \c!right={\btxrightparenthesis},
+ \c!command={\wordright}]
+
+\definebtx
+ [aps:\s!list:numbering]
+ [aps:\s!list]
+ [left={[},
+ right={]}]
+
+\definebtx
+ [aps:\s!list:numbering:default]
+ [aps:\s!list:numbering]
+
+\definebtx
+ [aps:\s!list:numbering:num]
+ [aps:\s!list:numbering]
+
+\definebtx
+ [aps:\s!list:numbering:short]
+ [aps:\s!list:numbering]
+
+\definebtx
+ [aps:\s!list:numbering:tag]
+ [aps:\s!list:numbering]
+
+\definebtx
+ [aps:\s!list:numbering:index]
+ [aps:\s!list:numbering]
+
+%D In order to be able to get journals expanded (or normalized or abbreviated) you need
+%D to load a list:
+%D
+%D \starttyping
+%D \btxloadjournallist[journals.txt] % the jabref list
+%D \stoptyping
+
+% TODO
+
+\definebtx
+ [aps:\s!list:journal]
+ [aps:\s!list]
+ [\c!style=\v!italic]
+ %command=\btxexpandedjournal] % btxabbreviatedjournal
+
+\definebtx
+ [aps:\s!list:volume]
+ [aps:\s!list]
+ [\c!style=\v!bold]
+
+\definebtx
+ [aps:\s!list:title]
+ [aps:\s!list]
+ [\c!style=\v!italic,
+ \c!command=\Word]
+
+\definebtx
+ [aps:\s!list:title:article]
+ [aps:\s!list:title]
+ [\c!style=] % journal is set in italics
+
+\definebtx
+ [aps:\s!list:title:magazine]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:newspaper]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:periodical]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:standard]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:book]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:inbook]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:incollection]
+ [aps:\s!list:title]
+ [\c!style=] % booktitle is set in italics
+
+\definebtx
+ [aps:\s!list:title:proceedings]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:inproceedings]
+ [aps:\s!list:title]
+ [\c!style=] % booktitle is set in italics
+
+\definebtx
+ [aps:\s!list:title:conference]
+ [aps:\s!list:title]
+ [\c!style=] % booktitle is set in italics
+
+\definebtx
+ [aps:\s!list:title:thesis]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:phdthesis]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:mastersthesis]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:booklet]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:manual]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:techreport]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:unpublished]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:patent]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:electronic]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:other]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:misc]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:literal]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:type]
+ [\c!command=\Word]
+
+% Then define and set all cite parameters
+
+\definebtx
+ [aps:\s!cite]
+ [aps]
+ [\c!authorconversion=\v!name,
+ \c!compress=\v!yes,
+ \c!sorttype=normal]
+
+\definebtx
+ [aps:\s!cite:author]
+ [aps:\s!cite]
+
+\definebtx
+ [aps:\s!cite:authoryear]
+ [aps:\s!cite:author]
+ [\c!left={(},
+ \c!right={)},
+ \c!inbetween={,\space}]
+
+\definebtx
+ [aps:\s!cite:authoryears]
+ [aps:\s!cite:authoryear]
+ [\c!left=,
+ \c!right=,
+ \c!inbetween={\space}]
+
+\definebtx
+ [aps:\s!cite:authornum]
+ [aps:\s!cite:author]
+ [\c!left={(},
+ \c!right={)}]
+
+\definebtx
+ [aps:\s!cite:author:num] % todo
+ [aps:\s!cite:authornum]
+ [\c!left={\space[},
+ \c!right={]}]
+
+\definebtx
+ [aps:\s!cite:author:year] % todo
+ [aps:\s!cite:authoryear]
+ [\c!left=,
+ \c!right=]
+
+\definebtx
+ [aps:\s!cite:author:years] % todo
+ [aps:\s!cite:authoryears]
+ [\c!inbetween=,
+ \c!left={\space(},
+ \c!right={)}]
+
+\definebtx
+ [aps:\s!cite:year]
+ [aps:\s!cite]
+
+\definebtx
+ [aps:\s!cite:title]
+ [aps:\s!cite]
+ [\c!command={\language[\currentbtxlanguage]}, % BAH
+ \c!sorttype=none,
+ \c!style=\v!italic]
+
+\definebtx
+ [aps:\s!cite:booktitle]
+ [aps:\s!cite:title]
+
+\definebtx
+ [aps:\s!cite:tag]
+ [aps:\s!cite]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtx
+ [aps:\s!cite:index]
+ [aps:\s!cite]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtx
+ [aps:\s!cite:page]
+ [aps:\s!cite]
+ [\c!left=,
+ \c!right=,
+ \c!separator:2={,\space}, % :0 and :1 - between items of a list
+ \c!separator:3={,\space\btxlabeltext{aps:and}\space}, % not \textampersand
+ \c!separator:4= {\space\btxlabeltext{aps:and}\space}] % not \textampersand
+
+\definebtx
+ [aps:\s!cite:pages]
+ [aps:\s!cite:page]
+
+\definebtx
+ [aps:\s!cite:keywords]
+ [aps:\s!cite]
+ [\c!left={(},
+ \c!right={)}]
+
+\definebtx
+ [aps:\s!cite:short]
+ [aps:\s!cite]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtx
+ [aps:\s!cite:category]
+ [aps:\s!cite]
+
+\definebtx
+ [aps:\s!cite:url]
+ [aps:\s!cite]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtx
+ [aps:\s!cite:doi]
+ [aps:\s!cite:url]
+
+\definebtx
+ [aps:\s!cite:num]
+ [aps:\s!cite]
+ [\c!left={[},
+ \c!right={]},
+ %\c!left=, % TODO: PRB uses superscript references...
+ %\c!right=, % and after punctuation, PRA, C, D, E, and L are before!
+ %\c!command={\high},
+ \c!separator:2={\btxcommabreak},
+ \c!separator:3=\btxparameter{\c!separator:2},
+ \c!separator:4=\btxparameter{\c!separator:2}]
+
+\definebtx
+ [aps:\s!cite:default]
+ [aps:\s!cite:num]
+
+\definebtx
+ [aps:\s!cite:textnum]
+ [aps:\s!cite:num]
+ [\c!left={Ref.\nbsp},
+ \c!command=,
+ \c!separator:2={,\space},
+ \c!separator:3={\space\btxlabeltext{aps:and}\space},
+ \c!separator:4={\space\btxlabeltext{aps:and}\space}]
+
+\definebtx
+ [aps:\s!cite:entry]
+ [aps:\s!cite]
+ [\c!left=,
+ \c!right=,
+ \c!inbetween={\space},
+ \c!separator:2={;\space},
+ \c!separator:3=\btxparameter{\c!separator:2},
+ \c!separator:4=\btxparameter{\c!separator:2}]
+
+%D Sometimes we have verbose injections in an entry and these can be language
+%D dependent, so we use labels.
+%D
+%D Because we want to mix rendering (in the manual) we need a namespace in label
+%D texts:
+
+\setupbtxlabeltext
+ [en]
+ [aps:and=and,
+ aps:number={no.},
+ aps:edition={ed.},
+ aps:Editor={Ed.},
+ aps:Editors={Eds.},
+ aps:Volume={Vol.},
+ aps:Volumes={Vols.},
+ aps:others={et al.},
+ aps:page={p.},
+ aps:pages={pp.},
+ aps:mastersthesis={Master's thesis},
+ aps:phdthesis={Doctoral dissertation},
+ aps:technicalreport={Tech. Rep.}, % Technical report
+ aps:supplement={Suppl.}, % Supplement (not used?)
+ aps:patent=Patent,
+ aps:inpress={in press},
+ aps:tobe={to be published},
+ aps:unpublished={unpublished},
+ aps:In=In]
+
+% Check this (google translate!!):
+
+\setupbtxlabeltext
+ [nl]
+ [aps:and=en,
+ aps:number={nr.},
+ aps:edition={ed.}, % editie
+ aps:Editor=Editor, % Ed./Eds.
+ aps:Editors=Editors,
+ aps:Volume={Vol.},
+ aps:Volumes={Vols.},
+ aps:others={et al.},
+ aps:page={p.},
+ aps:pages={pp.},
+ aps:mastersthesis=Masterproef,
+ aps:phdthesis=Proefschrift,
+ aps:technicalreport={Technisch rapport}, % Technical report
+ aps:supplement=Supplement,
+ aps:patent=Octrooi,
+ aps:inpress={in press}, % CHECK THESE!
+ aps:tobe={worden gepubliceerd},
+ aps:unpublished={onuitgegeven},
+ aps:In=In]
+
+\setupbtxlabeltext
+ [fr]
+ [aps:and=et,
+ aps:number={n\high{o}},
+ aps:edition={édition},
+ aps:Editor=Éditeur,
+ aps:Editors=Éditeurs,
+ aps:Volume=Volume,
+ aps:Volumes=Volumes,
+ aps:others={et al.},
+ aps:page={p.},
+ aps:pages={pp.},
+ aps:mastersthesis={Thèse de master (DEA, DESS, master)},
+ aps:phdthesis={Thèse de doctorat},
+ aps:technicalreport={Rapport technique},
+ aps:supplement=Supplément,
+ aps:patent=Brevet,
+ aps:inpress={sous impression},
+ aps:tobe={à paraître},
+ aps:unpublished={inédit}, % pour un livre
+ aps:In=Dans]
+
+\setupbtxlabeltext
+ [de]
+ [aps:and=und,
+ aps:number={nr.},
+ aps:edition=Auf\/lage,
+ aps:Editor=Herausgeber, % Hrsg./Hg.
+ aps:Editors=Herausgeber,
+ aps:Volume=Band, % Bd.
+ aps:Volumes={Bände},
+ aps:others={et al.},
+ aps:page={S.},
+ aps:pages={S.},
+ aps:mastersthesis={Masterarbeit},
+ aps:phdthesis={Dissertation},
+ aps:technicalreport={Technischer Bericht},
+ aps:supplement={Beilage}, % Supplement
+ aps:patent=Patent,
+ aps:inpress={in der Presse}, % CHECK THESE!
+ aps:tobe={veröffentlicht werden},
+ aps:unpublished={unveröffentlicht},
+ aps:In=In]
+
+% thanks: Andrea Valle
+
+\setupbtxlabeltext
+ [it]
+ [aps:and=e,
+ aps:number={nº},
+ aps:edition={ed.}, % edizione
+ aps:Editor={A cura di},
+ aps:Editors={A cura di},
+ aps:Volume={Vol.}, % Volume
+ aps:Volumes={Vol.}, % Volumi
+ aps:others={et al.},
+ aps:page={p.},
+ aps:pages={pp.},
+ aps:mastersthesis={Tesi di laurea},
+ aps:phdthesis={Tesi di dottorato},
+ aps:technicalreport={Relazione tecnica},
+ aps:supplement={Supplemento},
+ aps:patent=Brevetto,
+ aps:inpress={in press}, % CHECK THESE!
+ aps:tobe={da pubblicare},
+ aps:unpublished={inedito},
+ aps:In=In]
+
+\setupbtxlabeltext
+ [es]
+ [aps:and=y,
+ aps:number={nº},
+ aps:edition={ed.}, % edición
+ aps:Editor=Editor, % Ed./Eds.
+ aps:Editors=Editores,
+ aps:Volume={Vol.}, % Volumen
+ aps:Volumes={Vols.}, % Volúmenes
+ aps:others={et al.},
+ aps:page={p.},
+ aps:pages={pp.},
+ aps:mastersthesis={Tesis de maestría},
+ aps:phdthesis={Tesis doctoral},
+ aps:technicalreport={Informe técnico},
+ aps:supplement=Suplemento,
+ aps:patent=Patente,
+ aps:inpress={en prensa}, % CHECK THESE!
+ aps:tobe={que se publicará},
+ aps:unpublished={inédito},
+ aps:In=En]
+
+% cite setups
+
+\startsetups btx:aps:nd
+ \doifelse {\currentbtxcategory} {article} {
+ \btxlabeltext{aps:tobe}
+ } {
+ \doifelse {\currentbtxcategory} {book} {
+ \btxlabeltext{aps:inpress}
+ } {
+ \btxlabeltext{aps:unpublished}
+ }
+ }
+\stopsetups
+
+\startsetups btx:aps:cite:author:year
+ \texdefinition{\s!btx:\s!cite:concat}
+ \ifx\currentbtxfirst\empty
+ \fastsetup{btx:aps:nd}
+ \else
+ \texdefinition {\s!btx:\s!cite:inject} {
+ \btxcitereference
+ \currentbtxfirst
+ }
+ \ifx\currentbtxsecond\empty \else
+ \btxparameter\c!inbetween
+ \texdefinition {\s!btx:\s!cite:inject} {
+ \currentbtxsecond
+ }
+ \fi
+ \btxflushsuffix
+ \fi
+\stopsetups
+
+\startsetups btx:aps:cite:author:years
+ \fastsetup{btx:aps:cite:author:year}
+\stopsetups
+
+% used in publ-imp-page.mkvi
+
+\startsetups [btx:aps:list:page-or-pages]
+ \ifx\currentbtxlastpage\empty
+ \btxlabeltext{aps:page}
+ \else
+ \btxlabeltext{aps:pages}
+ \fi
+ \btxnbsp
+\stopsetups
+
+%D Instead of texdefinitions without arguments, we could have used setups but in my
+%D editor (hh, scite) the commands stand out better. It also saves an additional
+%D component in the name (e.g. common:) because commands and setups have a different
+%D namespace, so similar calls don't clash. Performance of definitions is somewhat
+%D better.
+
+%D We use "texdefinitions" (with eventual arguments) for helpers that are used
+%D in the rendering "setups" defined for each category below.
+
+%D Note that \btxdoif... and \btxflush rely on the definitions in
+%D publ-imp-aps.lua: fields that are not listed as required nor optional are
+%D IGNORED. We also make heavy use of the notion of sets - comma-separated lists
+%D of alternative fields to be used in hierarchal order. For example:
+%D author = { "author", "editor", "publisher", "title" }, will return the
+%D author field if it exists; if not, the editor field will be returned, if it
+%D exists; if not, the publisher field will be returned, if it exists; if not,
+%D the title field will be returned, it it exists; if not, nothing will be
+%D returned. In lua syntax, it can be understood as
+%D author or editor or publisher or title or ""
+
+\starttexdefinition btx:aps:composed-title #title
+ \btxstartstyleandcolor [aps:\s!list:title:\currentbtxcategory]
+ \begingroup
+ \language[\currentbtxlanguage]
+ \btxusecommand [aps:\s!list:title:\currentbtxcategory] {
+ \btxflush{#title}
+ \btxdoif {sub#title} {
+ \btxcolon
+ \btxflush{sub#title}
+ }
+ }
+ \endgroup
+ \btxstopstyleandcolor
+\stoptexdefinition
+
+\starttexdefinition btx:aps:title
+ \btxdoif {title} {
+ % we make the title active, opening file
+ \btxdoifelse {file} {
+ \texdefinition{btx:format:inject}
+ {url(file:\btxflush{file})}
+ {
+ \texdefinition{btx:aps:composed-title}{title}
+ }
+ } {
+ \texdefinition{btx:aps:composed-title}{title}
+ }
+ \btxcomma
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:aps:optional-title
+ \doif{\btxparameter{\c!title}}\v!yes {
+ \texdefinition {btx:aps:title}
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:aps:year
+ \btxdoifelse {year} {
+ \btxflush{year}
+ } {
+ \fastsetup{btx:aps:nd}
+ }
+\stoptexdefinition
+
+% #author may be author(set) or editor
+
+\starttexdefinition btx:aps:author-or-editor #author
+ \btxdoif {#author} {
+ \btxflush{#author}
+ \doifelse {\btxfoundname{#author}} {editor} {
+ \btxleftparenthesis
+ \btxsingularorplural {editor} {
+ \btxlabeltext{aps:Editor}
+ } {
+ \btxlabeltext{aps:Editors}
+ }
+ \btxrightparenthesis
+ } {
+ \btxdoif {collaboration} {
+ \btxleftparenthesis
+ \btxflush{collaboration}
+ \btxrightparenthesis
+ }
+ }
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:aps:author
+ \btxflush{author}
+ \btxcomma
+\stoptexdefinition
+
+\starttexdefinition btx:aps:editor-in
+ \btxdoif {booktitle} {
+ \btxlabeltext{aps:In}
+ \doifnot {\btxfoundname{author}} {editor} {
+ \btxspace
+ \texdefinition{btx:aps:author-or-editor} {editor}
+ }
+ \btxspace
+ \texdefinition{btx:aps:composed-title} {booktitle}
+ \btxcomma
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:aps:editionset
+ \doif {\currentbtxcategory} {techreport} {
+ \btxdoifelse {type} {
+ \btxusecommand [\currentbtx:type] {
+ \btxflush{type}
+ }
+ } {
+ \btxlabeltext{aps:technicalreport}
+ }
+ \btxcomma
+ }
+ \btxdoif {volume} {
+ \btxoneorrange {volume} {
+ \btxlabeltext{aps:Volume}
+ } {
+ \btxlabeltext{aps:Volumes}
+ }
+ \btxspace
+ \btxflush{volume}
+ \btxcomma
+ }
+ \btxdoif {number} {
+ \btxlabeltext{aps:number}
+ \btxspace
+ \btxflush{number}
+ \btxcomma
+ }
+ \btxdoif {edition} {
+ \btxflush{edition}
+ \btxspace
+ \btxlabeltext{aps:edition}
+ \btxcomma
+ }
+ \btxdoif {pages} {
+ \btxoneorrange {pages} {
+ \btxlabeltext{aps:page}
+ } {
+ \btxlabeltext{aps:pages}
+ }
+ \btxnbsp
+ \btxflush{pages}
+ \btxcomma
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:aps:journal-volume-year
+ \btxdoif {journal} {
+ \btxstartstyleandcolor [aps:\s!list:journal]
+ % expandedjournal abbreviatedjournal
+ \btxflush{expandedjournal -> journal}
+ \btxstopstyleandcolor
+ \btxdoifelse {volume} {
+ \btxspace
+ \btxstartstyleandcolor [aps:\s!list:volume]
+ \btxflush{volume}
+ \btxstopstyleandcolor
+ \btxdoif {number} {
+ \removeunwantedspaces(
+ \btxflush{number}
+ \btxrightparenthesiscomma
+ }
+ } {
+ \btxdoif {number} {
+ \btxspace
+ \btxflush{number}
+ \btxcomma
+ }
+ }
+ \btxdoif {pages} {
+ \btxspace
+ \btxflush{pages}
+ }
+ }
+ \btxleftparenthesis
+ \texdefinition{btx:aps:year}
+ \btxrightparenthesis
+\stoptexdefinition
+
+\starttexdefinition btx:aps:publisher-wherefrom-year
+ \removeunwantedspaces
+ \removepunctuation
+ \btxleftparenthesis
+ \btxflush{publisher}
+ \btxdoifelse {address} {
+ \btxdoif {publisher} {
+ \btxcomma
+ }
+ \btxflush{address}
+ \btxdoif {country} {
+ \btxcomma
+ \btxflush{country}
+ }
+ \btxcomma
+ } {
+ \btxdoif {publisher} {
+ \btxcomma
+ }
+ }
+ \texdefinition{btx:aps:year}
+ \btxrightparenthesis
+\stoptexdefinition
+
+\starttexdefinition btx:aps:note
+ \btxperiod
+ \btxdoif {note} {
+ \btxleftparenthesis
+ \btxflush{note}
+ \btxrightparenthesis
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:aps:doi-url #text
+ \ifconditional\btxinteractive
+ \btxdoifelse {doi} {
+ \goto {#text} [url(http://dx.doi.org/\btxflush{doi})]
+ } {
+ \btxdoifelse {url} {
+ \goto {#text} [url(\btxflush{url})]
+ } {
+ #text
+ }
+ }
+ \else
+ #text
+ \fi
+
+\stoptexdefinition
+
+% Then setups, by category
+
+% An article from a journal
+% Required fields: author or editor or title, journal, (year).
+% Optional fields: volume, number, pages, type, doi, url, note.
+% Note that bibtex (and tools) do not include editor (e.g. special issue or section)
+
+\startsetups btx:aps:list:article
+ \texdefinition{btx:aps:author}
+ \texdefinition{btx:aps:optional-title}
+ \texdefinition{btx:aps:doi-url} {\texdefinition{btx:aps:journal-volume-year}}
+ \texdefinition{btx:aps:note}
+\stopsetups
+
+% An article from a magazine.
+% Required fields: author or title, journal, (year).
+% Optional fields: number, pages, type, month, day, doi, url, note.
+
+\startsetups btx:aps:list:magazine
+ \fastsetup{btx:aps:list:article}
+\stopsetups
+
+% An article from a newspaper.
+% Required fields: author or title, journal, (year).
+% Optional fields: number, pages, type, month, day, doi, url, note.
+
+\startsetups btx:aps:list:newspaper
+ \fastsetup{btx:aps:list:article}
+\stopsetups
+
+% A complete issue of a periodical, such as a special issue of a journal.
+% Required fields: title, year
+% Optional fields: editor, publisher, subtitle, series, volume, number, month, organization, doi, url, issn, note
+
+% needs to be tuned...
+\startsetups btx:aps:list:periodical
+ \fastsetup{btx:aps:list:article}
+\stopsetups
+
+% National and international standards issued by a standards body
+% Required fields: author, institution, or organization, year, title
+% Optional fields: subtitle, doi, url, note
+
+\startsetups btx:aps:list:standard
+ \texdefinition{btx:aps:author}
+ \texdefinition{btx:aps:title}
+ \texdefinition{btx:aps:note}
+\stopsetups
+% year?
+
+% A book with an explicit publisher.
+% Required fields: author or editor or publisher, title, (year).
+% Optional fields: volume or number, series, address, edition, month, day, note.
+% APS? ignores: month, day
+
+% todo: series?
+
+\startsetups btx:aps:list:book
+ \texdefinition{btx:aps:author}
+ \texdefinition{btx:aps:title}
+ \texdefinition{btx:aps:editionset}
+ \texdefinition{btx:aps:doi-url} {\texdefinition{btx:aps:publisher-wherefrom-year}}
+ \texdefinition{btx:aps:note}
+\stopsetups
+
+% There is some debate about how inbook should differ from incollection
+
+% A part of a book, which may be a chapter (or section or whatever) and/or a range of pages.
+% (note that inbook is handled differently by bibtex and biblatex)
+% Required fields: author or editor, title, chapter and/or pages, publisher, year.
+% Optional fields: volume or number, series, type, address, edition, month, note.
+% We add optional: booktitle.
+% APS? ignores: chapter, month
+
+\startsetups btx:aps:list:inbook
+ \texdefinition{btx:aps:author}
+ \texdefinition{btx:aps:title}
+ \texdefinition{btx:aps:editor-in}
+ \texdefinition{btx:aps:editionset}
+ \texdefinition{btx:aps:doi-url} {\texdefinition{btx:aps:publisher-wherefrom-year}}
+ \texdefinition{btx:aps:note}
+\stopsetups
+% chapter?
+
+% A part of a book having its own title.
+% Required fields: author, title, booktitle, publisher, year.
+% Optional fields: editor, volume or number, series, type, chapter, pages, address, edition, month, note.
+% APS? ignores: chapter, month
+
+\startsetups btx:aps:list:incollection
+ \fastsetup{btx:aps:list:inbook}
+\stopsetups
+
+% The proceedings of a conference.
+% Required fields: title, year.
+% Optional fields: editor, volume or number, series, address, month, organization, publisher, note.
+% todo: series?
+
+\startsetups btx:aps:list:proceedings
+ \fastsetup{btx:aps:list:book}
+\stopsetups
+
+% An article in a conference proceedings.
+% Required fields: author, title, booktitle, year.
+% Optional fields: editor, volume or number, series, pages, address, month, organization, publisher, note.
+
+\startsetups btx:aps:list:inproceedings
+ \texdefinition{btx:aps:author}
+ \texdefinition{btx:aps:title}
+ \texdefinition{btx:aps:editor-in}
+ \texdefinition{btx:aps:editionset}
+ \btxdoif {organization} {
+ \btxspace
+ \btxflush{organization}
+ \btxcomma
+ }
+ \texdefinition{btx:aps:doi-url} {\texdefinition{btx:aps:publisher-wherefrom-year}}
+ \texdefinition{btx:aps:note}
+\stopsetups
+
+\startsetups btx:aps:list:conference
+ \fastsetup{btx:aps:list:inproceedings}
+\stopsetups
+
+% A thesis.
+% Required fields: author, title, school, year.
+% Optional fields: type, address, month, note.
+
+\startsetups btx:aps:list:thesis
+ \texdefinition{btx:aps:author}
+ \texdefinition{btx:aps:title}
+ \btxleftparenthesis
+ \btxdoifelse {type} {
+ \btxusecommand [aps:list:type] {
+ \btxflush{type}
+ }
+ } {
+ \btxlabeltext{aps:\currentbtxcategory}
+ }
+ \btxrightparenthesis
+ \btxdoif {school} {
+ \btxperiod
+ \btxflush{school}
+ }
+ \btxdoif {address} {
+ \btxdoifelse {school} {
+ \btxcomma
+ } {
+ \btxperiod
+ }
+ \btxflush{address}
+ \btxdoif {country} {
+ \btxcomma
+ \btxflush{country}
+ }
+ }
+ \btxleftparenthesis
+ \texdefinition{btx:aps:year}
+ \btxrightparenthesis
+ \texdefinition{btx:aps:note}
+\stopsetups
+
+\startsetups btx:aps:list:phdthesis
+ \fastsetup{btx:aps:list:thesis}
+\stopsetups
+
+\startsetups btx:aps:list:mastersthesis
+ \fastsetup{btx:aps:list:thesis}
+\stopsetups
+
+% A work that is printed and bound, but without a named publisher or sponsoring institution.
+% Required field: title.
+% Optional fields: author, howpublished, address, month, year, note.
+
+\startsetups btx:aps:list:booklet
+ \fastsetup{btx:aps:list:book}
+\stopsetups
+
+% Technical documentation.
+% Required field: title.
+% Optional fields: author, organization, address, edition, month, year, note.
+
+\startsetups btx:aps:list:manual
+ \fastsetup{btx:aps:list:book}
+\stopsetups
+
+% A report published by a school or other institution, usually numbered within a series.
+% Required fields: author, title, institution, year.
+% Optional fields: type, number, address, month, note.
+
+\startsetups btx:aps:list:techreport
+ \fastsetup{btx:aps:list:book}
+\stopsetups
+
+% A document having an author and title, but not formally published.
+% Required fields: author, title, note.
+% Optional fields: month, year.
+
+\startsetups btx:aps:list:unpublished
+ \fastsetup{btx:aps:list:book}
+\stopsetups
+
+% A patent. Note that this category was not defined with BIBTEX. Below from JabRef:
+% Required fields: nationality, number, year, yearfiled
+% Optional fields: author, title, assignee, address, type, number, day, dayfiled, month, monthfiled, note, url
+% Also optional: publisher
+
+% todo: yearfiled, monthfiled, dayfiled
+
+\startsetups btx:aps:list:patent
+ \texdefinition{btx:aps:author}
+ \texdefinition{btx:aps:title}
+ \begingroup
+ \it
+ \btxdoif {nationality} {
+ \btxspace
+ \btxflush{nationality}
+ }
+ \btxspace
+ \btxlabeltext{aps:patent}
+ \btxdoif {number} {
+ \btxspace
+ \btxlabeltext{aps:number}
+ \btxspace
+ \btxflush{number}
+ }
+ \btxperiod
+ \italiccorrection
+ \endgroup
+ \texdefinition{btx:aps:doi-url} {\texdefinition{btx:aps:publisher-wherefrom-year}}
+ \texdefinition{btx:aps:url}
+ \texdefinition{btx:aps:note}
+\stopsetups
+
+% Electronic. Note that this category was not defined with BIBTEX. Below from JabRef:
+% Required fields: title
+% Optional fields: address, author, howpublished, month, note, organization, url, year, doi
+% Also optional: type
+
+% Like Misc below but includes organization.
+
+\startsetups btx:aps:list:electronic
+ \texdefinition{btx:aps:author}
+ \texdefinition{btx:aps:title}
+ \btxdoif {organization} {
+ \btxspace
+ \btxflush{organization}
+ \btxperiod
+ }
+ \btxdoif {howpublished} {
+ \btxspace
+ \btxflush{howpublished}
+ }
+ \btxleftparenthesis
+ \texdefinition{btx:aps:year}
+ \btxrightparenthesis
+ \texdefinition{btx:aps:note}
+\stopsetups
+
+% Other. Note that this category was not defined with BIBTEX. Below from JabRef:
+% Required fields: author or title, year
+% Optional fields: note, doi, url
+
+\startsetups btx:aps:list:other
+ \fastsetup{btx:aps:list:book}
+\stopsetups
+
+% Use this type when nothing else fits.
+% Required fields: none.
+% Optional fields: author, title, howpublished, month, year, note.
+
+\startsetups btx:aps:list:misc
+ \fastsetup{btx:aps:list:electronic}
+ % Note that organization is an "ignored" field.
+\stopsetups
+
+% If all else fails to match:
+
+\startsetups btx:aps:list:literal
+ %\btxleftparenthesis
+ \removeunwantedspaces(
+ \btxflush{tag}
+ \btxrightparenthesis
+ \btxdoif {text} {
+ \btxflush{text}
+ }
+\stopsetups
+
+\stopbtxrenderingdefinitions
diff --git a/tex/context/base/publ-imp-author.mkvi b/tex/context/base/publ-imp-author.mkvi
new file mode 100644
index 000000000..7529c7aa9
--- /dev/null
+++ b/tex/context/base/publ-imp-author.mkvi
@@ -0,0 +1,303 @@
+% TODO: MAKE default
+
+%D \module
+%D [ file=publ-imp-author,
+%D version=2014.06.23,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=Authors,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\unprotect
+
+% these can be used instead of the macros and they accept manipulator prefixes
+%
+% \currentbtxinitials : \btxauthorfield{initials}
+% \currentbtxfirstnames : \btxauthorfield{firstnames}
+% \currentbtxvons : \btxauthorfield{vons}
+% \currentbtxsurnames : \btxauthorfield{surnames}
+% \currentbtxjuniors : \btxauthorfield{juniors}
+
+\starttexdefinition \s!btx:\s!cite:\s!author:\s!de
+ \ifx\currentbtxlanguage\s!de
+ \setmode{\s!btx:\s!de}
+ \fi
+\stoptexdefinition
+
+\startsetups \s!btx:\s!cite:\s!author:concat
+ \ifcase\currentbtxoverflow
+ \btxparameter{\c!separator:names:\number\currentbtxconcat}
+ \else\ifnum\currentbtxauthorindex>\plusone
+ \btxparameter{\c!separator:names:2}
+ \fi\fi
+\stopsetups
+
+\startsetups \s!btx:\s!cite:\s!author:others
+ \ifcase\currentbtxoverflow \else
+ \btxparameter\c!otherstext
+ \fi
+\stopsetups
+
+\startsetups \s!btx:\s!cite:\s!author:normal
+ \fastsetup{\s!btx:\s!cite:\s!author:concat}
+ \ifx\currentbtxfirstnames\empty \else
+ \currentbtxfirstnames
+ \btxparameter{\c!separator:firstnames}
+ \fi
+ \ifx\currentbtxvons\empty \else
+ \currentbtxvons
+ \ifx\currentbtxsurnames\empty \else
+ \btxparameter{\c!separator:vons}
+ \fi
+ \fi
+ \ifx\currentbtxsurnames\empty \else
+ \currentbtxsurnames
+ \ifx\currentbtxjuniors\empty \else
+ \btxparameter{\c!separator:juniors}
+ \currentbtxjuniors
+ \fi
+ \fi
+ \fastsetup{\s!btx:\s!cite:\s!author:others}
+\stopsetups
+
+\startsetups \s!btx:\s!cite:\s!author:normalshort
+ \fastsetup{\s!btx:\s!cite:\s!author:concat}
+ \ifx\currentbtxinitials\empty \else
+ \currentbtxinitials
+ \btxparameter{\c!separator:initials}
+ \fi
+ \ifx\currentbtxvons\empty \else
+ \currentbtxvons
+ \ifx\currentbtxsurnames\empty \else
+ \btxparameter{\c!separator:vons}
+ \fi
+ \fi
+ \ifx\currentbtxsurnames\empty \else
+ \currentbtxsurnames
+ \ifx\currentbtxjuniors\empty \else
+ \btxparameter{\c!separator:juniors}
+ \currentbtxjuniors
+ \fi
+ \fi
+ \fastsetup{\s!btx:\s!cite:\s!author:others}
+\stopsetups
+
+\startsetups \s!btx:\s!cite:\s!author:inverted
+ \fastsetup{\s!btx:\s!cite:\s!author:concat}
+ \ifx\currentbtxvons\empty \else
+ \texdefinition{\s!btx:\s!cite:\s!author:\s!de}
+ \doifmode {\s!btx:\s!de} {
+ \currentbtxvons
+ \btxparameter{\c!separator:vons}
+ }
+ \fi
+ \ifx\currentbtxsurnames\empty \else
+ \currentbtxsurnames
+ \ifx\currentbtxjuniors\empty \else
+ \btxparameter{\c!separator:juniors}
+ \currentbtxjuniors
+ \fi
+ \fi
+ \ifx\currentbtxfirstnames\empty
+ % firstnames are optional
+ \else
+ \btxparameter{\c!separator:invertedfirstnames}
+ \currentbtxfirstnames
+ \fi
+ \ifx\currentbtxvons\empty \else
+ \doifnotmode {\s!btx:\s!de} {
+ \btxparameter{\c!separator:vons}
+ \currentbtxvons
+ }
+ \fi
+ \fastsetup{\s!btx:\s!cite:\s!author:others}
+\stopsetups
+
+\startsetups \s!btx:\s!cite:\s!author:invertedshort
+ \fastsetup{\s!btx:\s!cite:\s!author:concat}
+ \ifx\currentbtxvons\empty \else
+ \texdefinition{\s!btx:\s!cite:\s!author:\s!de}
+ \doifnotmode {\s!btx:\s!de} {
+ \currentbtxvons
+ \btxparameter{\c!separator:vons}
+ }
+ \fi
+ \ifx\currentbtxsurnames\empty \else
+ \currentbtxsurnames
+ \ifx\currentbtxjuniors\empty \else
+ \btxparameter{\c!separator:juniors}
+ \currentbtxjuniors
+ \fi
+ \fi
+ \ifx\currentbtxinitials\empty
+ % initials are optional
+ \else
+ \btxparameter{\c!separator:invertedinitials}
+ \currentbtxinitials
+ \fi
+ \ifx\currentbtxvons\empty \else
+ \doifmode {\s!btx:\s!de} {
+ \btxparameter{\c!separator:vons}
+ \currentbtxvons
+ }
+ \fi
+ \fastsetup{\s!btx:\s!cite:\s!author:others}
+\stopsetups
+
+\startsetups \s!btx:\s!cite:\s!author:name
+ \fastsetup{\s!btx:\s!cite:\s!author:concat}
+ % is this treated differently in german?
+ \ifx\currentbtxvons\empty \else
+ \currentbtxvons
+ \btxparameter{\c!separator:vons}
+ \fi
+ \currentbtxsurnames
+ \ifcase\currentbtxauthorstate \else
+ % potential clash of names so we force initials
+ \ifx\currentbtxinitials\empty \else
+ \btxparameter{\c!separator:invertedinitials}
+ \currentbtxinitials
+ \fi
+ \fi
+ \fastsetup{\s!btx:\s!cite:\s!author:others}
+\stopsetups
+
+% list (mostly the same)
+
+\startsetups \s!btx:\s!list:\s!author:concat
+ \ifcase\currentbtxoverflow
+ \btxparameter{\c!separator:names:\number\currentbtxconcat}
+ \else\ifnum\currentbtxauthorindex>\plusone
+ \btxparameter{\c!separator:names:2}
+ \fi\fi
+\stopsetups
+
+\startsetups \s!btx:\s!list:\s!author:others
+ \ifcase\currentbtxoverflow \else
+ \btxparameter\c!otherstext
+ \fi
+\stopsetups
+
+\startsetups \s!btx:\s!list:\s!author:normal
+ \fastsetup{\s!btx:\s!list:\s!author:concat}
+ \ifx\currentbtxfirstnames\empty \else
+ \currentbtxfirstnames
+ \btxparameter{\c!separator:firstnames}
+ \fi
+ \ifx\currentbtxvons\empty \else
+ \currentbtxvons
+ \ifx\currentbtxsurnames\empty \else
+ \btxparameter{\c!separator:vons}
+ \fi
+ \fi
+ \ifx\currentbtxsurnames\empty \else
+ \currentbtxsurnames
+ \ifx\currentbtxjuniors\empty \else
+ \btxparameter{\c!separator:juniors}
+ \currentbtxjuniors
+ \fi
+ \fi
+ \fastsetup{\s!btx:\s!list:\s!author:others}
+\stopsetups
+
+\startsetups \s!btx:\s!list:\s!author:normalshort
+ \fastsetup{\s!btx:\s!list:\s!author:concat}
+ \ifx\currentbtxinitials\empty \else
+ \currentbtxinitials
+ \btxparameter{\c!separator:initials}
+ \fi
+ \ifx\currentbtxvons\empty \else
+ \currentbtxvons
+ \ifx\currentbtxsurnames\empty \else
+ \btxparameter{\c!separator:vons}
+ \fi
+ \fi
+ \ifx\currentbtxsurnames\empty \else
+ \currentbtxsurnames
+ \ifx\currentbtxjuniors\empty \else
+ \btxparameter{\c!separator:juniors}
+ \currentbtxjuniors
+ \fi
+ \fi
+ \fastsetup{\s!btx:\s!list:\s!author:others}
+\stopsetups
+
+\startsetups \s!btx:\s!list:\s!author:inverted
+ \fastsetup{\s!btx:\s!list:\s!author:concat}
+ \ifx\currentbtxvons\empty \else
+ \texdefinition{\s!btx:\s!cite:\s!author:\s!de}
+ \doifnotmode {\s!btx:\s!de} {
+ \currentbtxvons
+ \btxparameter{\c!separator:vons}
+ }
+ \fi
+ \ifx\currentbtxsurnames\empty \else
+ \currentbtxsurnames
+ \ifx\currentbtxjuniors\empty \else
+ \btxparameter{\c!separator:juniors}
+ \currentbtxjuniors
+ \fi
+ \fi
+ \ifx\currentbtxfirstnames\empty
+ % firstnames are optional
+ \else
+ \btxparameter{\c!separator:invertedfirstnames}
+ \currentbtxfirstnames
+ \fi
+ \ifx\currentbtxvons\empty \else
+ \doifmode {\s!btx:\s!de} {
+ \btxparameter{\c!separator:vons}
+ \currentbtxvons
+ }
+ \fi
+ \fastsetup{\s!btx:\s!list:\s!author:others}
+\stopsetups
+
+\startsetups \s!btx:\s!list:\s!author:invertedshort
+ \fastsetup{\s!btx:\s!list:\s!author:concat}
+ \ifx\currentbtxvons\empty \else
+ \texdefinition{\s!btx:\s!cite:\s!author:\s!de}
+ \doifnotmode {\s!btx:\s!de} {
+ \currentbtxvons
+ \btxparameter{\c!separator:vons}
+ }
+ \fi
+ \ifx\currentbtxsurnames\empty \else
+ \currentbtxsurnames
+ \ifx\currentbtxjuniors\empty \else
+ \btxparameter{\c!separator:juniors}
+ \currentbtxjuniors
+ \fi
+ \fi
+ \ifx\currentbtxinitials\empty
+ % initials are optional
+ \else
+ \btxparameter{\c!separator:invertedinitials}
+ \currentbtxinitials
+ \fi
+ \ifx\currentbtxvons\empty \else
+ \doifmode {\s!btx:\s!de} {
+ \btxparameter{\c!separator:vons}
+ \currentbtxvons
+ }
+ \fi
+ \fastsetup{\s!btx:\s!list:\s!author:others}
+\stopsetups
+
+\startsetups \s!btx:\s!list:\s!author:name
+ \fastsetup{\s!btx:\s!list:\s!author:concat}
+ % is this treated differently in german?
+ \ifx\currentbtxvons\empty \else
+ \currentbtxvons
+ \btxparameter{\c!separator:vons}
+ \fi
+ \currentbtxsurnames
+ \fastsetup{\s!btx:\s!list:\s!author:others}
+\stopsetups
+
+\protect
diff --git a/tex/context/base/publ-imp-cite.mkvi b/tex/context/base/publ-imp-cite.mkvi
new file mode 100644
index 000000000..7ce9ea0da
--- /dev/null
+++ b/tex/context/base/publ-imp-cite.mkvi
@@ -0,0 +1,281 @@
+%D \module
+%D [ file=publ-imp-cite,
+%D version=2013.12.24,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=Citations,
+%D author=Alan Braslau and Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\unprotect
+
+\starttexdefinition btx:cite:inject #content
+ \ifconditional\btxinteractive
+ \ifx\currentbtxinternal\empty
+ #content
+ \else
+ \goto {
+ #content
+ } [
+ \s!internal(\currentbtxinternal)
+ ]
+ \fi
+ \else
+ #content
+ \fi
+\stoptexdefinition
+
+\starttexdefinition btx:cite:checkconcat
+ \ifnum\currentbtxcount>\zerocount
+ \let\currentbtxinternal\empty
+ \let\currentbtxbacklink\empty
+ \fi
+\stoptexdefinition
+
+% The null case:
+
+\startsetups btx:cite:none
+ \btxcitereference
+ % dummy
+\stopsetups
+
+\startsetups btx:cite:nocite
+ \dontleavehmode
+ \btxcitereference
+\stopsetups
+
+\startsetups btx:cite:unknown
+ \begingroup
+ \tttf
+ \btxcitereference
+ \currentbtxfirst
+ \endgroup
+\stopsetups
+
+\startsetups btx:cite:empty
+ \btxcitereference
+ <empty>
+\stopsetups
+
+\startsetups btx:cite:invalid
+ \btxcitereference
+ {\tt <\currentbtxreference>}
+\stopsetups
+
+\starttexdefinition btx:cite:concat
+ \btxparameter{\c!separator:\number\currentbtxconcat}
+\stoptexdefinition
+
+% when we have an author-year combination, the first and seconds is not
+% fields data but something more complex (that itself calls for a setup)
+
+% \startsetups btx:cite:normal
+% \texdefinition{\s!btx:\s!cite:concat}
+% \currentbtxlefttext
+% \ifx\currentbtxfirst\empty
+% \fastsetup{\s!btx:\s!cite:\s!empty}
+% \else
+% \texdefinition {\s!btx:\s!cite:inject} {
+% \btxcitereference
+% \btxusecommand [\currentbtxspecification:cite:\currentbtxcitealternative] {
+% \currentbtxfirst
+% }
+% }
+% \ifx\currentbtxsecond\empty
+% \btxflushsuffix
+% \else
+% \btxparameter\c!inbetween
+% \texdefinition {\s!btx:\s!cite:inject} {
+% \btxusecommand [\currentbtxspecification:cite:\currentbtxcitealternative] {
+% \currentbtxsecond
+% }
+% }
+% % quite likely an error
+% \btxflushsuffix
+% \fi
+% \fi
+% \currentbtxrighttext
+% \stopsetups
+
+\startsetups btx:cite:normal
+ \texdefinition{\s!btx:\s!cite:concat}
+ \currentbtxlefttext
+ \ifx\currentbtxfirst\empty
+ \fastsetup{\s!btx:\s!cite:\s!empty}
+ \else\ifx\currentbtxsecond\empty
+ \texdefinition {\s!btx:\s!cite:inject} {
+ \btxcitereference
+ \btxusecommand [\currentbtxspecification:cite:\currentbtxcitealternative] {
+ \currentbtxfirst
+ \btxflushsuffix
+ }
+ }
+ \else
+ % \texdefinition {\s!btx:\s!cite:inject} {
+ % \btxcitereference
+ % \btxusecommand [\currentbtxspecification:cite:\currentbtxcitealternative] {
+ % \currentbtxfirst
+ % }
+ % }
+ % \btxparameter\c!inbetween
+ % \texdefinition {\s!btx:\s!cite:inject} {
+ % \btxusecommand [\currentbtxspecification:cite:\currentbtxcitealternative] {
+ % \currentbtxsecond
+ % }
+ % }
+ % \btxflushsuffix
+ %
+ % probably better:
+ \texdefinition {\s!btx:\s!cite:inject} {
+ \btxcitereference
+ \btxusecommand [\currentbtxspecification:cite:\currentbtxcitealternative] {
+ \currentbtxfirst
+ \btxparameter\c!inbetween
+ \currentbtxsecond
+ \btxflushsuffix
+ }
+ }
+ \fi\fi
+ \currentbtxrighttext
+\stopsetups
+
+
+\startsetups btx:cite:range
+ \texdefinition{\s!btx:\s!cite:concat}
+ \currentbtxlefttext
+ \ifx\currentbtxfirst\empty
+ \fastsetup{\s!btx:\s!cite:\s!empty}
+ \else
+ \texdefinition {\s!btx:\s!cite:inject} {
+ \btxcitereference
+ \btxusecommand [\currentbtxspecification:cite:\currentbtxcitealternative] {
+ \currentbtxfirst
+ \ifx\currentbtxsecond\empty \else
+ \btxparameter\c!range
+ \currentbtxsecond
+ \fi
+ \btxflushsuffix
+ }
+ }
+ \fi
+ \currentbtxrighttext
+\stopsetups
+
+% somehow related to keywords:
+
+\startsetups btx:cite:listelement
+ \texdefinition{\s!btx:\s!cite:concat}
+ \currentbtxlefttext
+ \ifx\currentbtxfirst\empty
+ \fastsetup{\s!btx:\s!cite:\s!empty}
+ \else
+ \texdefinition {\s!btx:\s!cite:inject} {
+ \btxcitereference
+ \currentbtxfirst
+ }
+ \fi
+ \currentbtxrighttext
+\stopsetups
+
+\startsetups \s!btx:\s!cite:entry
+ \texdefinition{\s!btx:\s!cite:concat}
+ \currentbtxlefttext
+ \btxhandleciteentry
+ \currentbtxrighttext
+\stopsetups
+
+% these three are goodies to get something but are not set up as it makes no
+% sense to have something root for combinations like this (esp not because one
+% gets default anyway
+
+% AB: not so sure about that. Why define them in default rather than here?
+
+\startsetups \s!btx:\s!cite:author
+ \fastsetup{\s!btx:\s!cite:normal}
+\stopsetups
+\startsetups \s!btx:\s!cite:authoryear
+ \fastsetup{\s!btx:\s!cite:normal}
+\stopsetups
+\startsetups \s!btx:\s!cite:authoryears
+ \fastsetup{\s!btx:\s!cite:normal}
+\stopsetups
+\startsetups \s!btx:\s!cite:authornum
+ \fastsetup{\s!btx:\s!cite:normal}
+\stopsetups
+\startsetups \s!btx:\s!cite:num
+ \fastsetup{\s!btx:\s!cite:range}
+\stopsetups
+\startsetups \s!btx:\s!cite:default
+ \fastsetup{\s!btx:\s!cite:num}
+\stopsetups
+\startsetups \s!btx:\s!cite:textnum
+ \fastsetup{\s!btx:\s!cite:num}
+\stopsetups
+\startsetups \s!btx:\s!cite:year
+ \fastsetup{\s!btx:\s!cite:range}
+\stopsetups
+\startsetups \s!btx:\s!cite:author:num
+ \fastsetup{\s!btx:\s!cite:range}
+\stopsetups
+\startsetups \s!btx:\s!cite:author:year
+ \fastsetup{\s!btx:\s!cite:range}
+\stopsetups
+\startsetups \s!btx:\s!cite:author:years
+ \fastsetup{\s!btx:\s!cite:range}
+\stopsetups
+
+\startsetups \s!btx:\s!cite:index
+ \fastsetup{\s!btx:\s!cite:range}
+\stopsetups
+\startsetups \s!btx:\s!cite:category
+ \fastsetup{\s!btx:\s!cite:normal}
+\stopsetups
+\startsetups \s!btx:\s!cite:short
+ \fastsetup{\s!btx:\s!cite:normal}
+\stopsetups
+\startsetups \s!btx:\s!cite:tag
+ \fastsetup{\s!btx:\s!cite:normal}
+\stopsetups
+
+% the following correspond to fields, but can be used in many renderings
+
+\startsetups \s!btx:\s!cite:keywords
+ \fastsetup{\s!btx:\s!cite:list}
+\stopsetups
+\startsetups \s!btx:\s!cite:title
+ \fastsetup{\s!btx:\s!cite:normal}
+\stopsetups
+\startsetups \s!btx:\s!cite:pages
+ \fastsetup{\s!btx:\s!cite:range}
+\stopsetups
+
+% is the next one used?
+% Yes, bibtex is a mess and one can have pages or sometimes page
+
+\startsetups \s!btx:\s!cite:page
+ \fastsetup{\s!btx:\s!cite:normal}
+\stopsetups
+
+% the following is kind of specific, but can be used in many renderings
+
+\startsetups btx:cite:url
+ \ifx\currentbtxfirst\empty
+ \fastsetup{\s!btx:\s!cite:\s!empty}
+ \else\ifconditional\btxinteractive
+ \goto {
+ \btxcitereference
+ \hyphenatedurl{\doif{\currentbtxcitealternative}{doi}{doi:}\currentbtxfirst}
+ } [
+ url(\doif{\currentbtxcitealternative}{doi}{http://dx.doi.org/}\currentbtxfirst)
+ ]
+ \else
+ \btxcitereference
+ \hyphenatedurl{\doif{\currentbtxcitealternative}{doi}{doi:}\currentbtxfirst}
+ \fi\fi
+\stopsetups
+
+\protect
diff --git a/tex/context/base/publ-imp-commands.mkvi b/tex/context/base/publ-imp-commands.mkvi
new file mode 100644
index 000000000..14e2dbae1
--- /dev/null
+++ b/tex/context/base/publ-imp-commands.mkvi
@@ -0,0 +1,15 @@
+\unprotect
+
+% for tugboat
+
+\definebtxcommand\hbox {\hbox}
+\definebtxcommand\vbox {\vbox}
+\definebtxcommand\llap {\llap}
+\definebtxcommand\rlap {\rlap}
+\definebtxcommand\url #1{\hyphenatedurl{#1}}
+\definebtxcommand\acro #1{\dontleavehmode{\smallcaps#1}}
+
+\let\<<
+\let\<>
+
+\protect \endinput
diff --git a/tex/context/base/publ-imp-default.lua b/tex/context/base/publ-imp-default.lua
new file mode 100644
index 000000000..61b08f30c
--- /dev/null
+++ b/tex/context/base/publ-imp-default.lua
@@ -0,0 +1,124 @@
+-- For the moment I put this here as example. When writing the publication modules we
+-- explored several approached: pure tex, pure lua, a mix with xml, etc. In the end
+-- each has advantages and drawbacks so we ended up with readable tex plus helpers in
+-- lua. Anyway here is a lua variant of a setup ... it doesn't look nicer. An alternative
+-- can be to build a table with characters but then we need to pass left, right and
+-- other separators so again no real gain.
+
+-- function publications.maybe.default.journal(currentdataset,currenttag)
+-- if publications.okay(currentdataset,currenttag,"journal") then
+-- context.btxspace()
+-- context.startbtxstyle("italic")
+-- commands.btxflush(currentdataset,currenttag,"expandedjournal -> journal")
+-- context.stopbtxstyle()
+-- if publications.okay(currentdataset,currenttag,"volume") then
+-- context.btxspace()
+-- commands.btxflush(currentdataset,currenttag,"volume")
+-- if publications.okay(currentdataset,currenttag,"number") then
+-- context.ignorespaces()
+-- context.btxleftparenthesis()
+-- commands.btxflush(currentdataset,currenttag,"number")
+-- context.btxrightparenthesis()
+-- end
+-- elseif publications.okay(currentdataset,currenttag,"number") then
+-- context.btxlabeltext("default:number")
+-- context.btxspace()
+-- commands.btxflush(currentdataset,currenttag,"number")
+-- end
+-- if publications.okay(currentdataset,currenttag,"pages") then
+-- context.btxcomma()
+-- commands.btxflush(currentdataset,currenttag,"pages")
+-- end
+-- context.btxcomma()
+-- end
+-- end
+
+return {
+ --
+ -- metadata
+ --
+ name = "default",
+ version = "1.00",
+ comment = "DEFAULT specification",
+ author = "Alan Braslau and Hans Hagen",
+ copyright = "ConTeXt development team",
+ --
+ -- derived (combinations of) fields (all share the same default set)
+ --
+ virtual = {
+ "authoryear",
+ "authoryears",
+ "authornum",
+ "num",
+ "suffix",
+ },
+ --
+ -- special datatypes
+ --
+ types = {
+ author = "author", -- interpreted as name(s)
+ editor = "author", -- interpreted as name(s)
+ page = "pagenumber", -- number or range: f--t -- maybe just range
+ pages = "pagenumber", -- number or range: f--t -- maybe just range
+ volume = "range", -- number or range: f--t
+ number = "range", -- number or range: f--t
+ keywords = "keyword", -- comma|-|separated list
+ },
+ --
+ -- categories with their specific fields
+ --
+ categories = {
+ --
+ -- the following fields are for documentation and testing purposes
+ --
+ ["demo-a"] = {
+ sets = {
+ author = { "author", "institution", "organization" },
+ },
+ required = { "author", "title", "year" },
+ optional = { "subtitle" },
+ },
+ ["demo-b"] = {
+ sets = {
+ authors = { "author", "institution", "organization" },
+ },
+ required = { "authors", "title", "year" },
+ optional = { "subtitle" },
+ },
+ --
+ -- we only provide article and book (maybe a few more later) and we keep it
+ -- real simple. See the apa and aps definitions for more extensive examples
+ --
+ article = {
+ sets = {
+ author = { "author", "editor" },
+ },
+ required = {
+ "author", -- a set
+ "year",
+ },
+ optional = {
+ "title",
+ "keywords",
+ "journal", "volume", "number", "pages",
+ },
+ },
+ book = {
+ sets = {
+ author = { "author", "editor", },
+ editionset = { "edition", "volume", "number" },
+ },
+ required = {
+ "author", -- a set
+ "title",
+ "year",
+ },
+ optional = {
+ "subtitle",
+ "keywords",
+ "publisher", "address",
+ "editionset",
+ },
+ },
+ },
+}
diff --git a/tex/context/base/publ-imp-default.mkvi b/tex/context/base/publ-imp-default.mkvi
new file mode 100644
index 000000000..c7c88d74a
--- /dev/null
+++ b/tex/context/base/publ-imp-default.mkvi
@@ -0,0 +1,482 @@
+%D \module
+%D [ file=publ-imp-default,
+%D version=2014.02.06,
+%D title=Default bibliography style,
+%D subtitle=Publications,
+%D author=Alan Braslau and Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is therefore copyrighted
+%D by \PRAGMA. See mreadme.pdf for details.
+
+%D This default style defines only a few categories: book and article.
+%D If you want more, you need to load a more complete style such as \type {apa},
+%D \type {aps}, etc. The default style is used in the manuals that ship with
+%D \CONTEXT. This file is always loaded.
+
+\startbtxrenderingdefinitions[\s!default]
+
+\definebtxrendering
+ [\s!default]
+ [\c!specification=\s!default,
+ \c!sorttype=\v!default,
+ \c!numbering=num]
+
+\definebtx
+ [\s!default]
+ [\c!default=, % we do not want to fall|-|back on ourself.
+ \c!otherstext={\space\btxlabeltext{default:others}},
+ %c!journalconversion=\v!normal,
+ \c!monthconversion=\v!number,
+ \c!separator:names:2={,\space},
+ \c!separator:names:3={\space\btxlabeltext{default:and}\space},
+ \c!separator:names:4={\space\btxlabeltext{default:and}\space}]
+
+\definebtx
+ [\s!default:\s!list]
+ [\s!default]
+ [\c!authorconversion=normalshort]
+
+\definebtx
+ [\s!default:\s!cite]
+ [\s!default]
+ [\c!alternative=num,
+ [\c!compress=\v!yes,
+ \c!sorttype=normal,
+ \c!authorconversion=\v!name]
+
+% List variants, some having specific settings:
+
+\definebtx
+ [\s!default:\s!list:\s!page]
+ [\s!default:\s!list]
+ [\c!separator:2={,\space},
+ \c!separator:3={,\space\btxlabeltext{default:and}\space},
+ \c!separator:4={\space\btxlabeltext{default:and}\space},
+ \c!left={\btxleftparenthesis},
+ \c!right={\btxrightparenthesis}]
+
+\definebtx
+ [\s!default:\s!list:numbering]
+ [\s!default:\s!list]
+
+\definebtx
+ [\s!default:\s!list:numbering:default]
+ [\s!default:\s!list:numbering]
+
+\definebtx
+ [\s!default:\s!list:numbering:num]
+ [\s!default:\s!list:numbering]
+
+\definebtx
+ [\s!default:\s!list:numbering:short]
+ [\s!default:\s!list:numbering:num]
+
+\definebtx
+ [\s!default:\s!list:numbering:tag]
+ [\s!default:\s!list:numbering:num]
+
+\definebtx
+ [\s!default:\s!list:numbering:index]
+ [\s!default:\s!list:numbering:num]
+
+\definebtx
+ [\s!default:\s!list:author]
+ [\s!default:\s!list]
+
+\definebtx
+ [\s!default:\s!list:editor]
+ [\s!default:\s!list:author]
+
+\definebtx
+ [\s!default:\s!list:url]
+ [\s!default:\s!list]
+
+\definebtx
+ [\s!default:\s!list:doi]
+ [\s!default:\s!list]
+
+\definebtx
+ [\s!default:\s!list:short]
+ [\s!default:\s!list]
+
+\definebtx
+ [\s!default:\s!list:journal]
+ [\s!default:\s!list]
+ [\c!style=\v!italic]
+
+\definebtx
+ [\s!default:\s!list:title]
+ [\s!default:\s!list]
+ [\c!style=\v!italic,
+ \c!command=\Word]
+
+\definebtx
+ [\s!default:\s!list:title:article]
+ [\s!default:\s!list:title]
+ [\c!style=, % journal is set in italics
+ \c!command={\quotation\Word}]
+
+\definebtx
+ [\s!default:\s!list:title:book]
+ [\s!default:\s!list:title]
+
+% Citation variants, some having specific settings :
+
+\definebtx
+ [\s!default:\s!cite:author]
+ [\s!default:\s!cite]
+
+\definebtx
+ [\s!default:\s!cite:authornum]
+ [\s!default:\s!cite:author]
+ [\c!left={(},
+ \c!right={)}]
+
+\definebtx
+ [\s!default:\s!cite:authoryear]
+ [\s!default:\s!cite:author]
+ [\c!left={(},
+ \c!right={)},
+ \c!inbetween={,\space}]
+
+\definebtx
+ [\s!default:\s!cite:authoryears]
+ [\s!default:\s!cite:author]
+
+\definebtx
+ [\s!default:\s!cite:author:num] % todo
+ [\s!default:\s!cite:authornum]
+ [\c!left={\space[},
+ \c!right={]}]
+
+\definebtx
+ [\s!default:\s!cite:author:year] % todo
+ [\s!default:\s!cite:authoryear]
+ [\c!left=,
+ \c!right=]
+
+\definebtx
+ [\s!default:\s!cite:author:years] % todo
+ [\s!default:\s!cite:authoryears]
+ [\c!inbetween=,
+ \c!left={\space(},
+ \c!right={)}]
+
+\definebtx
+ [\s!default:\s!cite:year]
+ [\s!default:\s!cite]
+
+\definebtx
+ [\s!default:\s!cite:title]
+ [\s!default:\s!cite]
+ [\c!command={\language[\currentbtxlanguage]}, % BAH
+ \c!style=\v!italic]
+
+\definebtx
+ [\s!default:\s!cite:tag]
+ [\s!default:\s!cite]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtx
+ [\s!default:\s!cite:index]
+ [\s!default:\s!cite]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtx
+ [\s!default:\s!cite:page]
+ [\s!default:\s!cite]
+ [\c!left=,
+ \c!right=]
+
+\definebtx
+ [\s!default:\s!cite:pages]
+ [\s!default:\s!cite:page]
+
+\definebtx
+ [\s!default:\s!cite:keywords]
+ [\s!default:\s!cite]
+ [\c!left={(},
+ \c!right={)}]
+
+\definebtx
+ [\s!default:\s!cite:short]
+ [\s!default:\s!cite]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtx
+ [\s!default:\s!cite:category]
+ [\s!default:\s!cite]
+
+\definebtx
+ [\s!default:\s!cite:url]
+ [\s!default:\s!cite]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtx
+ [\s!default:\s!cite:doi]
+ [\s!default:\s!cite:url]
+
+\definebtx
+ [\s!default:\s!cite:num]
+ [\s!default:\s!cite]
+ [\c!left={[},
+ \c!right={]},
+ \c!separator:2=\btxcommabreak,
+ \c!separator:3=\btxparameter{\c!separator:2},
+ \c!separator:4=\btxparameter{\c!separator:2}]
+
+\definebtx
+ [\s!default:\s!cite:default]
+ [\s!default:\s!cite:num]
+
+\definebtx
+ [\s!default:\s!cite:textnum]
+ [\s!default:\s!cite:num]
+ [\c!left=, % in apa: {Ref.\nbsp} or so
+ \c!right=,
+ \c!separator:2={,\space},
+ \c!separator:3={,\space\btxlabeltext{default:and}\space},
+ \c!separator:4= {\space\btxlabeltext{default:and}\space}]
+
+\definebtx
+ [\s!default:\s!cite:entry]
+ [\s!default:\s!cite]
+ [\c!left={(},
+ \c!right={)}]
+
+% Multilingual text strings
+
+\setupbtxlabeltext
+ [en]
+ [\s!default:and=and,
+ \s!default:number={no.},
+ \s!default:edition={ed.},
+ \s!default:Editor=Editor, % Ed./Eds.
+ \s!default:Editors=Editors,
+ \s!default:Volume={Vol.},
+ \s!default:Volumes={Vols.},
+ \s!default:others={et al.}]
+
+\setupbtxlabeltext
+ [nl]
+ [\s!default:and=en,
+ \s!default:number={nr.},
+ \s!default:edition={ed.}, % editie
+ \s!default:Editor=Editor, % Ed./Eds.
+ \s!default:Editors=Editors,
+ \s!default:Volume={Vol.},
+ \s!default:Volumes={Vols.},
+ \s!default:others={et al.}]
+
+\setupbtxlabeltext
+ [fr]
+ [\s!default:and=et,
+ \s!default:others={et al.},
+ \s!default:number={n\high{o}},
+ \s!default:edition={édition},
+ \s!default:Editor=Éditeur,
+ \s!default:Editors=Éditeurs,
+ \s!default:Volume=Volume,
+ \s!default:Volumes=Volumes,
+ \s!default:others={et al.}]
+
+\setupbtxlabeltext
+ [de]
+ [\s!default:and=und,
+ \s!default:number={nr.},
+ \s!default:edition=Auf\/lage,
+ \s!default:Editor=Herausgeber, % Hrsg./Hg.
+ \s!default:Editors=Herausgeber,
+ \s!default:Volume=Band, % Bd.
+ \s!default:Volumes={Bände},
+ \s!default:others={et al.}]
+
+\setupbtxlabeltext
+ [it]
+ [\s!default:and=e,
+ \s!default:number={nº},
+ \s!default:edition={ed.}, % edizione
+ \s!default:Editor={A cura di},
+ \s!default:Editors={A cura di},
+ \s!default:Volume={Vol.}, % Volume
+ \s!default:Volumes={Vol.}, % Volumi
+ \s!default:others={et al.}]
+
+\setupbtxlabeltext
+ [es]
+ [\s!default:and=y,
+ \s!default:number={nº},
+ \s!default:edition={ed.}, % edición
+ \s!default:Editor=Editor, % Ed./Eds.
+ \s!default:Editors=Editores,
+ \s!default:Volume={Vol.}, % Volumen
+ \s!default:Volumes={Vols.}, % Volúmenes
+ \s!default:others={et al.}]
+
+% First some helpers:
+
+\starttexdefinition btx:default:composed-title
+ \begingroup
+ \language[\currentbtxlanguage]
+ \btxflush{title}
+ \btxdoif {subtitle} {
+ \btxcolon
+ \btxflush{subtitle}
+ }
+ \endgroup
+\stoptexdefinition
+
+\starttexdefinition btx:default:title
+ \btxdoif {title} {
+ \btxspace
+ \btxstartstyleandcolor [default:list:title:\currentbtxcategory]
+ \btxusecommand [default:list:title:\currentbtxcategory] {
+ \texdefinition{btx:default:composed-title}
+ }
+ \btxstopstyleandcolor
+ \btxcomma
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:default:author
+ \btxdoif {author} {
+ \btxflush{author}
+ \doif {\btxfoundname{author}} {editor} {
+ \btxcomma
+ \btxsingularorplural {editor} {
+ \btxlabeltext{default:Editor}
+ } {
+ \btxlabeltext{default:Editors}
+ }
+ }
+ \btxcomma
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:default:year
+ \btxflush{year}
+ \btxflushsuffix
+\stoptexdefinition
+
+\starttexdefinition btx:default:journal
+ \btxdoif {journal} {
+ \btxspace
+ \btxstartstyleandcolor [default:list:journal]
+ \btxusecommand [default:list:journal] {
+ \btxflush{journal}
+ }
+ \btxstopstyleandcolor
+ \btxdoifelse {volume} {
+ \btxspace
+ \btxflush{volume}
+ \btxdoif {number} {
+ \ignorespaces % brrr
+ \btxleftparenthesis
+ \btxflush{number}
+ \btxrightparenthesis
+ }
+
+ } {
+ \btxdoif {number} {
+ \btxlabeltext{default:number}
+ \btxspace
+ \btxflush{number}
+ }
+ }
+ \btxdoif {pages} {
+ \btxcomma
+ \btxflush{pages}
+ }
+ \btxcomma
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:default:editionset
+ \btxdoif {editionset} {
+ \removeunwantedspaces
+ \removepunctuation
+ \btxleftparenthesis
+ \doif {\btxfoundname{editionset}} {edition} {
+ \btxflush{edition}
+ \btxspace
+ \btxlabeltext{default:edition}
+ \btxcomma
+ }
+ \btxdoif {volume} {
+ \btxoneorrange {volume} {
+ \btxlabeltext{default:Volume}
+ } {
+ \btxlabeltext{default:Volumes}
+ }
+ \btxspace
+ \btxflush{volume}
+ \btxcomma
+ }
+ \btxdoifelse {number} {
+ \btxlabeltext{default:number}
+ \btxspace
+ \btxflush{number}
+ } {
+ \removeunwantedspaces
+ \removepunctuation
+ }
+ \btxrightparenthesiscomma
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:default:publisher
+ \btxdoif {publisher} {
+ \btxflush{publisher}
+ \btxcomma
+ }
+ \btxdoif {address} {
+ \btxflush{address}
+ \btxcomma
+ }
+\stoptexdefinition
+
+% Then a minimal number of setups:
+
+\startsetups btx:default:list:article
+ \texdefinition{btx:default:author}
+ \texdefinition{btx:default:title}
+ \texdefinition{btx:default:journal}
+ \texdefinition{btx:default:year}
+ \removeunwantedspaces
+ \removepunctuation
+ \btxperiod
+\stopsetups
+
+\startsetups btx:default:list:book
+ \texdefinition{btx:default:author}
+ \texdefinition{btx:default:title}
+ \texdefinition{btx:default:editionset}
+ \texdefinition{btx:default:publisher}
+ \texdefinition{btx:default:year}
+ \removeunwantedspaces
+ \removepunctuation
+ \btxperiod
+\stopsetups
+
+\startsetups btx:default:list:unknown
+ \currentbtxcategory\btxcolon
+ \btxshowentryinline
+\stopsetups
+
+%D Experiment:
+
+\startsetups btx:default:lefttext
+ \currentbtxlefttext
+\stopsetups
+
+\startsetups btx:default:righttext
+ \currentbtxrighttext
+\stopsetups
+
+\stopbtxrenderingdefinitions
diff --git a/tex/context/base/publ-imp-definitions.mkvi b/tex/context/base/publ-imp-definitions.mkvi
new file mode 100644
index 000000000..8dfa931b3
--- /dev/null
+++ b/tex/context/base/publ-imp-definitions.mkvi
@@ -0,0 +1,123 @@
+%D \module
+%D [ file=publ-imp-def,
+%D version=2013.12.24,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=Definitions,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D Here we collect some helper setups. We assume that checking of a field
+%D happens in the calling setup, if only because that is the place where
+%D fences are also dealt with.
+
+% These will become texdefinitions
+
+\unprotect
+
+\startxmlsetups btx:format:crossref
+ \cite[\btxfield{crossref}]
+\stopxmlsetups
+
+\startxmlsetups btx:format:key
+ \btxfield{short}
+\stopxmlsetups
+
+\starttexdefinition btx:format:inject #link #content
+ \ifx\currentbtxinternal\empty
+ #content
+ \else\ifconditional\btxinteractive
+ \goto {#content} [#link]
+ \else
+ #content
+ \fi\fi
+\stoptexdefinition
+
+\startxmlsetups btx:format:doi
+ \edef\currentbtxfielddoi{\btxfield{doi}}
+ \ifx\currentbtxfielddoi\empty
+ {\tttf no-doi}
+ \else\ifconditional\btxinteractive
+ \goto{\hyphenatedurl{\currentbtxfielddoi}}[url(http://dx.doi.org/\currentbtxfielddoi)]
+ \else
+ \hyphenatedurl{\currentbtxfielddoi}
+ \fi\fi
+\stopxmlsetups
+
+\startxmlsetups btx:format:url
+ \edef\currentbtxfieldurl{\btxfield{url}}
+ \ifx\currentbtxfieldurl\empty
+ {\tttf no-url}
+ \else\ifconditional\btxinteractive
+ \goto{\hyphenatedurl{\currentbtxfieldurl}}[url(\currentbtxfieldurl)]
+ \else
+ \hyphenatedurl{\currentbtxfieldurl}
+ \fi\fi
+\stopxmlsetups
+
+\startxmlsetups btx:format:year
+ \edef\currentbtxfieldyear{\btxfield{year}}
+ \ifx\currentbtxfieldyear\empty
+ \btxlabeltext{\currentbtxspecification:nd}
+ \fi
+\stopxmlsetups
+
+\startxmlsetups btx:format:month
+ \edef\currentbtxfieldmonth{\btxfield{month}}
+ \ifx\currentbtxfieldmonth\empty
+ {\tttf no-month}
+ \else
+ \edef\p_monthconversion{\btxparameter\c!monthconversion}
+ \ifx\p_monthconversion\empty % month month:mnem
+ \currentbtxfieldmonth
+ \else
+ \doifelsenumber \currentbtxfieldmonth {
+ \convertnumber\p_monthconversion\currentbtxfieldmonth
+ } {
+ \currentbtxfieldmonth
+ }
+ \fi
+ \fi
+\stopxmlsetups
+
+% macros:
+
+\starttexdefinition btx:style:italic #content
+ \dontleavehmode
+ \begingroup
+ \it
+ #content
+ \italiccorrection
+ \endgroup
+\stoptexdefinition
+
+\starttexdefinition btx:style:bold #content
+ \dontleavehmode
+ \begingroup
+ \bf
+ #content
+ \endgroup
+\stoptexdefinition
+
+\starttexdefinition btx:style:quote #content
+ \dontleavehmode
+ \startquote
+ #content
+ \stopquote
+\stoptexdefinition
+
+\starttexdefinition btx:style #style #content
+ \doifelsedefined {btx:style:#style} {
+ \texdefinition{btx:style:#style} {
+ #content
+ }
+ } {
+ #content
+ }
+\stoptexdefinition
+
+\protect
diff --git a/tex/context/base/publ-imp-list.mkvi b/tex/context/base/publ-imp-list.mkvi
new file mode 100644
index 000000000..23256de33
--- /dev/null
+++ b/tex/context/base/publ-imp-list.mkvi
@@ -0,0 +1,96 @@
+%D \module
+%D [ file=publ-imp-list,
+%D version=2014.06.16,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=Lists,
+%D author=Alan Braslau and Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\unprotect
+
+\starttexdefinition btx:list:inject #content
+ \ifconditional\btxinteractive
+ \ifx\currentbtxinternal\empty
+ #content
+ \else
+ \goto {
+ #content
+ } [
+ \s!internal(\currentbtxinternal)
+ ]
+ \fi
+ \else
+ #content
+ \fi
+\stoptexdefinition
+
+\starttexdefinition btx:list:helpers:concat
+ \space
+\stoptexdefinition
+
+\startsetups[\s!btx:\s!list:\s!page]
+ \texdefinition{\s!btx:\s!list:concat}
+ \texdefinition{\s!btx:\s!list:inject} {
+ % real pagenumber: todo, userpage
+ \currentbtxfirst
+ % order
+ \high{\currentbtxsecond}
+ }
+\stopsetups
+
+\startsetups[\s!btx:\s!list:\s!numbering]
+ \texdefinition {\s!btx:\s!list:inject} {
+ \currentbtxfirst
+ \btxparameter\c!stopper
+ }
+\stopsetups
+
+% Hans: can the following setups be condensed to one using some variable?
+
+\startsetups[\s!btx:\s!list:\s!numbering:default]
+ \btxstartstyleandcolor [\currentbtxspecification:\s!list:\s!numbering:default]
+ \btxusecommand[\currentbtxspecification:\s!list:\s!numbering:default] {
+ \fastsetup{\s!btx:\s!list:\s!numbering}
+ }
+ \btxstopstyleandcolor
+\stopsetups
+
+\startsetups[\s!btx:\s!list:\s!numbering:num]
+ \btxstartstyleandcolor [\currentbtxspecification:\s!list:\s!numbering:num]
+ \btxusecommand[\currentbtxspecification:\s!list:\s!numbering:num] {
+ \fastsetup{\s!btx:\s!list:\s!numbering}
+ }
+ \btxstopstyleandcolor
+\stopsetups
+
+\startsetups[\s!btx:\s!list:\s!numbering:short]
+ \btxstartstyleandcolor [\currentbtxspecification:\s!list:\s!numbering:short]
+ \btxusecommand [\currentbtxspecification:\s!list:\s!numbering:short] {
+ \fastsetup{\s!btx:\s!list:\s!numbering}
+ \btxflushsuffix
+ }
+ \btxstopstyleandcolor
+\stopsetups
+
+\startsetups[\s!btx:\s!list:\s!numbering:tag]
+ \btxstartstyleandcolor [\currentbtxspecification:\s!list:\s!numbering:tag]
+ \btxusecommand [\currentbtxspecification:\s!list:\s!numbering:tag] {
+ \fastsetup{\s!btx:\s!list:\s!numbering}
+ }
+ \btxstopstyleandcolor
+\stopsetups
+
+\startsetups[\s!btx:\s!list:\s!numbering:index]
+ \btxstartstyleandcolor [\currentbtxspecification:\s!list:\s!numbering:index]
+ \btxusecommand [\currentbtxspecification:\s!list:\s!numbering:index] {
+ \fastsetup{\s!btx:\s!list:\s!numbering}
+ }
+ \btxstopstyleandcolor
+\stopsetups
+
+\protect
diff --git a/tex/context/base/publ-imp-page.mkvi b/tex/context/base/publ-imp-page.mkvi
new file mode 100644
index 000000000..2d9040b10
--- /dev/null
+++ b/tex/context/base/publ-imp-page.mkvi
@@ -0,0 +1,51 @@
+%D \module
+%D [ file=publ-imp-page,
+%D version=2014.11.05,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=Page numbers,
+%D author=Alan Braslau and Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\unprotect
+
+\startsetups \s!btx:\s!list:\s!page:concat
+ \ifcase\currentbtxoverflow
+ \btxparameter{\c!separator:\number\currentbtxconcat}
+ \fi
+\stopsetups
+
+% for the moment we have only one variant
+
+\startsetups [\s!btx:\s!list:\s!page]
+ \fastsetup{\s!btx:\s!list:\s!page:concat}
+ \fastsetup{\s!btx:\currentbtxspecification:\s!list:page-or-pages}
+ \ifconditional\btxinteractive
+ \goto {
+ \currentbtxfirstpage
+ } [
+ internal(\currentbtxfirstinternal)
+ ]
+ \ifx\currentbtxlastpage\empty \else
+ %\btxparameter\c!pageconnector
+ \btxparameter\c!range
+ \goto {
+ \currentbtxlastpage
+ } [
+ internal(\currentbtxlastinternal)
+ ]
+ \fi
+ \else
+ \currentbtxfirstpage
+ \ifx\currentbtxlastpage\empty \else
+ \btxparameter\c!range
+ \currentbtxlastpage
+ \fi
+ \fi
+\stopsetups
+
+\protect
diff --git a/tex/context/base/publ-imp-replacements.lua b/tex/context/base/publ-imp-replacements.lua
new file mode 100644
index 000000000..e0ac249fc
--- /dev/null
+++ b/tex/context/base/publ-imp-replacements.lua
@@ -0,0 +1,23 @@
+-- Many bibtex databases are poluted. This is a side effect of 7 bit encoding on the
+-- one hand and tweaking the outcome at the other. The worst examples are the use
+-- of \rlap on whole names. We found that trying to cope with all can one drive insane
+-- so we stopped at some point. Clean up your mess or pay the price. But, you can load
+-- this file (and similar ones) to help you out. There is simply no reward in trying
+-- to deal with it ourselves.
+
+return {
+ name = "replacements",
+ version = "1.00",
+ comment = "Good riddance",
+ author = "Alan Braslau and Hans Hagen",
+ copyright = "ConTeXt development team",
+ replacements = {
+ [ [[\emdash]] ] = "—",
+ [ [[\endash]] ] = "–",
+ [ [[{\emdash}]] ] = "—",
+ [ [[{\endash}]] ] = "–",
+ [ [[Th\^e\llap{\raise 0.5ex\hbox{\'{\relax}}}]] ] = "Thánh",
+ [ [[Th{\^e}\llap{\raise0.5ex\hbox{\'{\relax}}}]] ] = "Thánh",
+ [ [[Th{\^e}\llap{\raise 0.5ex\hbox{\'{\relax}}}]] ] = "Thánh",
+ },
+}
diff --git a/tex/context/base/publ-imp-test.bib b/tex/context/base/publ-imp-test.bib
new file mode 100644
index 000000000..0d81b49ff
--- /dev/null
+++ b/tex/context/base/publ-imp-test.bib
@@ -0,0 +1,294 @@
+% This is a test bibliography for developing publ-impl-XXX files. Although
+% meant for testing apa styles, it can also be used for other styles.
+%
+% All publication categories have an example entry here. Most have an entry
+% with a "-min" key, containing only the minimal set of fields.
+%
+% Maintained by: Alan Braslau, Hans Hagen, Robin Kirkham
+
+@article {test-article-min,
+ author = "An Author",
+ title = "Title-article",
+ journal = "Journal",
+ year = "Year"
+}
+
+@article {test-article,
+ author = "An Author",
+ title = "Title-article",
+ journal = "Journal",
+ year = "Year",
+ volume = "Volume",
+ number = "Number",
+ pages = "Pages",
+ month = "Month",
+ note = "Note"
+}
+
+% author, editor, or both
+
+@book {test-book-min,
+ author = "An Author",
+ title = "Title-book",
+ publisher = "Publisher",
+ year = "Year"
+}
+
+% author, editor, or both
+% volume, number, or both
+
+@book {test-book,
+ author = "An Author",
+ editor = "An Editor",
+ title = "Title-book",
+ publisher = "Publisher",
+ year = "Year",
+ volume = "Volume",
+ number = "Number",
+ series = "Series",
+ address = "Address",
+ edition = "Edition",
+ month = "Month",
+ note = "Note"
+}
+
+% no author
+
+@book {test-book-edited,
+ editor = "An Editor",
+ title = "Title-book-edited",
+ publisher = "Publisher",
+ year = "Year"
+}
+
+@booklet {test-booklet-min,
+ title = "Title-booklet"
+}
+
+@booklet {test-booklet,
+ title = "Title-booklet",
+ author = "An Author",
+ howpublished = "How-published",
+ address = "Address",
+ month = "Month",
+ year = "Year",
+ note = "Note"
+}
+
+% author, editor, or both
+% chapter, pages, or both
+
+@inbook {test-inbook-min,
+ author = "An Author",
+ editor = "An Editor",
+ title = "Title-inbook",
+ chapter = "Chapter",
+ pages = "Pages",
+ publisher = "Publisher",
+ year = "Year"
+}
+
+% author, editor, or both
+% chapter, pages, or both
+% volume, number, or both
+
+@inbook {test-inbook,
+ author = "An Author",
+ editor = "An Editor",
+ title = "Title-inbook",
+ chapter = "Chapter",
+ pages = "Pages",
+ publisher = "Publisher",
+ year = "Year",
+ volume = "Volume",
+ number = "Number",
+ series = "Series",
+ type = "Type",
+ address = "Address",
+ edition = "Edition",
+ month = "Month",
+ note = "Note"
+}
+
+@incollection {test-incollection-min,
+ author = "An Author",
+ title = "Title-incollection",
+ booktitle = "Booktitle",
+ publisher = "Publisher",
+ year = "Year"
+}
+
+% volume, number, or both
+
+@incollection {test-incollection,
+ author = "An Author",
+ title = "Title-incollection",
+ booktitle = "Booktitle",
+ publisher = "Publisher",
+ year = "Year",
+ editor = "An Editor",
+ volume = "Volume",
+ number = "Number",
+ series = "Series",
+ type = "Type",
+ chapter = "Chapter",
+ pages = "Pages",
+ address = "Address",
+ edition = "Edition",
+ month = "Month",
+ note = "Note"
+}
+
+@inproceedings {test-inproceedings-min,
+ author = "An Author",
+ title = "Title-inproceedings",
+ booktitle = "Booktitle",
+ year = "Year"
+}
+
+% volume, number, or both
+
+@inproceedings {test-inproceedings,
+ author = "An Author",
+ title = "Title-inproceedings",
+ booktitle = "Booktitle",
+ year = "Year",
+ editor = "An Editor",
+ volume = "Volume",
+ number = "Number",
+ series = "Series",
+ pages = "Pages",
+ address = "Address",
+ month = "Month",
+ organization = "Organization",
+ publisher = "Publisher",
+ note = "Note"
+}
+
+@manual {test-manual-min,
+ title = "Title-manual"
+}
+
+@manual {test-manual,
+ title = "Title-manual",
+ author = "An Author",
+ organization = "Organization",
+ address = "Address",
+ edition = "Edition",
+ month = "Month",
+ year = "Year"
+ note = "Note"
+}
+
+
+@mastersthesis {test-mastersthesis-min,
+ author = "An Author",
+ title = "Title-mastersthesis",
+ school = "School",
+ year = "Year",
+}
+
+@mastersthesis {test-mastersthesis,
+ author = "An Author",
+ title = "Title-mastersthesis",
+ school = "School",
+ year = "Year",
+ type = "Type",
+ address = "Address",
+ month = "Month",
+ note = "Note"
+}
+
+@proceedings {test-proceedings-min,
+ title = "Title-proceedings",
+ year = "Year",
+}
+
+% volume, number, or both
+
+@proceedings {test-proceedings,
+ title = "Title-proceedings",
+ year = "Year",
+ editor = "An Editor",
+ volume = "Volume",
+ number = "Number",
+ series = "Series",
+ address = "Address",
+ month = "Month",
+ organization = "Organization",
+ publisher = "Publisher",
+ note = "Note"
+}
+
+@phdthesis {test-phdthesis-min,
+ author = "An Author",
+ title = "Title-phdthesis",
+ school = "School",
+ year = "Year",
+}
+
+@phdthesis {test-phdthesis,
+ author = "An Author",
+ title = "Title-phdthesis",
+ school = "School",
+ year = "Year",
+ type = "Type",
+ address = "Address",
+ month = "Month",
+ note = "Note"
+}
+
+@techreport {test-techreport-min,
+ author = "An Author",
+ title = "Title-techreport",
+ institution = "Institution",
+ year = "Year",
+}
+
+@techreport {test-techreport,
+ author = "An Author",
+ title = "Title-techreport",
+ institution = "Institution",
+ year = "Year",
+ type = "Type",
+ number = "Number",
+ address = "Address",
+ month = "Month",
+ note = "Note"
+}
+
+@misc {test-misc,
+ author = "An Author",
+ title = "Title-misc",
+ howpublished = "How-published",
+ month = "Month",
+ year = "Year",
+ note = "Note"
+}
+
+@unpublished {test-unpublished-min,
+ author = "An Author",
+ title = "Title-unpublished",
+ note = "Note"
+}
+
+@unpublished {test-unpublished,
+ author = "An Author",
+ title = "Title-unpublished",
+ note = "Note",
+ month = "Month",
+ year = "Year"
+}
+
+% some other test entries
+
+@misc {test-manyauthor,
+ author = "A Author and B Author and C Author and D Author and
+ E Author and F Author and G Author and H Author and
+ I Author and J Author and K Author and L Author and
+ M Author and N Author and O Author and P Author and
+ Q Author and R Author and S Author and T Author and
+ U Author and V Author and W Author and X Author and
+ Y Author and Z Author",
+ title = "Title-Many Authors"
+}
diff --git a/tex/context/base/publ-ini.lua b/tex/context/base/publ-ini.lua
new file mode 100644
index 000000000..34eeaec18
--- /dev/null
+++ b/tex/context/base/publ-ini.lua
@@ -0,0 +1,3295 @@
+if not modules then modules = { } end modules ['publ-ini'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- bah .. this 200 locals limit again ... so we need to split it as adding more
+-- do ... ends makes it messier
+
+-- plug the list sorted in the list mechanism (specification.sortorder)
+
+-- If we define two datasets with the same bib file we can consider
+-- sharing the data but that means that we need to have a parent which
+-- in turn makes things messy if we start manipulating entries in
+-- different ways (future) .. not worth the trouble as we will seldom
+-- load big bib files many times and even then ... fonts are larger.
+
+-- A potential optimization is to work with current_dataset, current_tag when
+-- fetching fields but the code become real messy that way (many currents). The
+-- gain is not that large anyway because not much publication stuff is flushed.
+
+local next, rawget, type, tostring, tonumber = next, rawget, type, tostring, tonumber
+local match, find, gsub = string.match, string.find, string.gsub
+local concat, sort, tohash = table.concat, table.sort, table.tohash
+local utfsub = utf.sub
+local mod = math.mod
+local formatters = string.formatters
+local allocate = utilities.storage.allocate
+local settings_to_array, settings_to_set = utilities.parsers.settings_to_array, utilities.parsers.settings_to_set
+local sortedkeys, sortedhash = table.sortedkeys, table.sortedhash
+local setmetatableindex = table.setmetatableindex
+local lpegmatch = lpeg.match
+local P, S, C, Ct, Cs, R, Carg = lpeg.P, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.R, lpeg.Carg
+local upper = utf.upper
+
+local report = logs.reporter("publications")
+local report_cite = logs.reporter("publications","cite")
+local report_list = logs.reporter("publications","list")
+local report_reference = logs.reporter("publications","reference")
+local report_suffix = logs.reporter("publications","suffix")
+
+local trace = false trackers.register("publications", function(v) trace = v end)
+local trace_cite = false trackers.register("publications.cite", function(v) trace_cite = v end)
+local trace_missing = false trackers.register("publications.cite.missing", function(v) trace_missing = v end)
+local trace_references = false trackers.register("publications.cite.references", function(v) trace_references = v end)
+local trace_detail = false trackers.register("publications.detail", function(v) trace_detail = v end)
+local trace_suffixes = false trackers.register("publications.suffixes", function(v) trace_suffixes = v end)
+
+publications = publications or { }
+local datasets = publications.datasets
+local writers = publications.writers
+local casters = publications.casters
+local detailed = publications.detailed
+local enhancer = publications.enhancer
+local enhancers = publications.enhancers
+
+local tracers = publications.tracers or { }
+publications.tracers = tracers
+
+local variables = interfaces.variables
+
+local v_local = variables["local"]
+local v_global = variables["global"]
+
+local v_force = variables.force
+local v_normal = variables.normal
+local v_reverse = variables.reverse
+local v_none = variables.none
+local v_yes = variables.yes
+local v_no = variables.no
+local v_all = variables.all
+local v_always = variables.always
+local v_doublesided = variables.doublesided
+local v_default = variables.default
+local v_dataset = variables.dataset
+
+local conditionals = tex.conditionals
+
+local numbertochar = converters.characters
+
+local logsnewline = logs.newline
+local logspushtarget = logs.pushtarget
+local logspoptarget = logs.poptarget
+
+local isdefined = tex.isdefined
+
+----- basicsorter = sorters.basicsorter -- (a,b)
+----- sortstripper = sorters.strip
+----- sortsplitter = sorters.splitters.utf
+
+local manipulators = typesetters.manipulators
+local splitmanipulation = manipulators.splitspecification
+local applymanipulation = manipulators.applyspecification
+local manipulatormethods = manipulators.methods
+
+-- this might move elsewhere
+
+manipulatormethods.Word = converters.Word
+manipulatormethods.WORD = converters.WORD
+manipulatormethods.Words = converters.Words
+manipulatormethods.WORDS = converters.WORDS
+
+local context = context
+local commands = commands
+local implement = interfaces.implement
+local ctx_setmacro = interfaces.setmacro
+
+local ctx_doifelse = commands.doifelse
+local ctx_doif = commands.doif
+local ctx_doifnot = commands.doifnot
+local ctx_gobbletwoarguments = context.gobbletwoarguments
+
+local ctx_btxdirectlink = context.btxdirectlink
+local ctx_btxhandlelistentry = context.btxhandlelistentry
+local ctx_btxhandlelisttextentry = context.btxhandlelisttextentry
+local ctx_btxhandlecombientry = context.btxhandlecombientry
+local ctx_btxchecklistentry = context.btxchecklistentry
+local ctx_btxchecklistcombi = context.btxchecklistcombi
+
+local ctx_btxsetdataset = context.btxsetdataset
+local ctx_btxsettag = context.btxsettag
+local ctx_btxsetnumber = context.btxsetnumber
+local ctx_btxsetlanguage = context.btxsetlanguage
+local ctx_btxsetcombis = context.btxsetcombis
+local ctx_btxsetcategory = context.btxsetcategory
+local ctx_btxcitesetup = context.btxcitesetup
+local ctx_btxsubcitesetup = context.btxsubcitesetup
+local ctx_btxnumberingsetup = context.btxnumberingsetup
+local ctx_btxpagesetup = context.btxpagesetup
+local ctx_btxsetfirst = context.btxsetfirst
+local ctx_btxsetsecond = context.btxsetsecond
+----- ctx_btxsetthird = context.btxsetthird
+local ctx_btxsetsuffix = context.btxsetsuffix
+local ctx_btxsetinternal = context.btxsetinternal
+local ctx_btxsetlefttext = context.btxsetlefttext
+local ctx_btxsetrighttext = context.btxsetrighttext
+local ctx_btxsetbefore = context.btxsetbefore
+local ctx_btxsetafter = context.btxsetafter
+local ctx_btxsetbacklink = context.btxsetbacklink
+local ctx_btxsetbacktrace = context.btxsetbacktrace
+local ctx_btxsetcount = context.btxsetcount
+local ctx_btxsetconcat = context.btxsetconcat
+local ctx_btxsetoveflow = context.btxsetoverflow
+local ctx_btxsetfirstpage = context.btxsetfirstpage
+local ctx_btxsetlastpage = context.btxsetlastpage
+local ctx_btxsetfirstinternal = context.btxsetfirstinternal
+local ctx_btxsetlastinternal = context.btxsetlastinternal
+local ctx_btxstartcite = context.btxstartcite
+local ctx_btxstopcite = context.btxstopcite
+local ctx_btxstartciteauthor = context.btxstartciteauthor
+local ctx_btxstopciteauthor = context.btxstopciteauthor
+local ctx_btxstartsubcite = context.btxstartsubcite
+local ctx_btxstopsubcite = context.btxstopsubcite
+local ctx_btxstartlistentry = context.btxstartlistentry
+local ctx_btxstoplistentry = context.btxstoplistentry
+local ctx_btxstartcombientry = context.btxstartcombientry
+local ctx_btxstopcombientry = context.btxstopcombientry
+local ctx_btxlistsetup = context.btxlistsetup
+local ctx_btxflushauthor = context.btxflushauthor
+local ctx_btxsetnoflistentries = context.btxsetnoflistentries
+local ctx_btxsetcurrentlistentry = context.btxsetcurrentlistentry
+local ctx_btxsetcurrentlistindex = context.btxsetcurrentlistindex
+
+languages.data = languages.data or { }
+local data = languages.data
+
+local specifications = publications.specifications
+local currentspecification = specifications[false]
+local ignoredfields = { }
+publications.currentspecification = currentspecification
+
+local function setspecification(name)
+ currentspecification = specifications[name]
+ if trace then
+ report("setting specification %a",type(name) == "string" and name or "anything")
+ end
+ publications.currentspecification = currentspecification
+end
+
+publications.setspecification = setspecification
+
+implement {
+ name = "btxsetspecification",
+ actions = setspecification,
+ arguments = "string",
+}
+
+local optionalspace = lpeg.patterns.whitespace^0
+local prefixsplitter = optionalspace * lpeg.splitat(optionalspace * P("::") * optionalspace)
+
+statistics.register("publications load time", function()
+ local publicationsstats = publications.statistics
+ local nofbytes = publicationsstats.nofbytes
+ if nofbytes > 0 then
+ return string.format("%s seconds, %s bytes, %s definitions, %s shortcuts",
+ statistics.elapsedtime(publications),
+ nofbytes,
+ publicationsstats.nofdefinitions or 0,
+ publicationsstats.nofshortcuts or 0
+ )
+ else
+ return nil
+ end
+end)
+
+luatex.registerstopactions(function()
+ local done = false
+ for name, dataset in sortedhash(datasets) do
+ for command, n in sortedhash(dataset.commands) do
+ if not done then
+ logspushtarget("logfile")
+ logsnewline()
+ report("start used btx commands")
+ logsnewline()
+ done = true
+ end
+ if isdefined[command] then
+ report("%-20s %-20s % 5i %s",name,command,n,"known")
+ elseif isdefined[upper(command)] then
+ report("%-20s %-20s % 5i %s",name,command,n,"KNOWN")
+ else
+ report("%-20s %-20s % 5i %s",name,command,n,"unknown")
+ end
+ end
+ end
+ if done then
+ logsnewline()
+ report("stop used btx commands")
+ logsnewline()
+ logspoptarget()
+ end
+end)
+
+-- multipass, we need to sort because hashing is random per run and not per
+-- version (not the best changed feature of lua)
+
+local collected = allocate()
+local tobesaved = allocate()
+
+do
+
+ local function serialize(t)
+ local f_key_table = formatters[" [%q] = {"]
+ local f_key_string = formatters[" %s = %q,"]
+ local r = { "return {" }
+ local m = 1
+ for tag, entry in sortedhash(t) do
+ m = m + 1
+ r[m] = f_key_table(tag)
+ local s = sortedkeys(entry)
+ for i=1,#s do
+ local k = s[i]
+ m = m + 1
+ r[m] = f_key_string(k,entry[k])
+ end
+ m = m + 1
+ r[m] = " },"
+ end
+ r[m] = "}"
+ return concat(r,"\n")
+ end
+
+ local function finalizer()
+ local prefix = tex.jobname -- or environment.jobname
+ local setnames = sortedkeys(datasets)
+ for i=1,#setnames do
+ local name = setnames[i]
+ local dataset = datasets[name]
+ local userdata = dataset.userdata
+ local checksum = nil
+ local username = file.addsuffix(file.robustname(formatters["%s-btx-%s"](prefix,name)),"lua")
+ if userdata and next(userdata) then
+ if job.passes.first then
+ local newdata = serialize(userdata)
+ checksum = md5.HEX(newdata)
+ io.savedata(username,newdata)
+ end
+ else
+ os.remove(username)
+ username = nil
+ end
+ local loaded = dataset.loaded
+ local sources = dataset.sources
+ local used = { }
+ for i=1,#sources do
+ local source = sources[i]
+ -- if loaded[source.filename] ~= "previous" then -- needs checking
+ if loaded[source.filename] ~= "previous" or loaded[source.filename] == "current" then
+ used[#used+1] = source
+ end
+ end
+ tobesaved[name] = {
+ usersource = {
+ filename = username,
+ checksum = checksum,
+ },
+ datasources = used,
+ }
+ end
+ end
+
+ local function initializer()
+ statistics.starttiming(publications)
+ for name, state in sortedhash(collected) do
+ local dataset = datasets[name]
+ local datasources = state.datasources
+ local usersource = state.usersource
+ if datasources then
+ for i=1,#datasources do
+ local filename = datasources[i].filename
+ publications.load {
+ dataset = dataset,
+ filename = filename,
+ kind = "previous"
+ }
+ end
+ end
+ if usersource then
+ dataset.userdata = table.load(usersource.filename) or { }
+ end
+ end
+ statistics.stoptiming(publications)
+ function initializer() end -- will go, for now, runtime loaded
+ end
+
+ job.register('publications.collected',tobesaved,initializer,finalizer)
+
+end
+
+-- we want to minimize references as there can be many (at least
+-- when testing)
+
+local nofcitations = 0
+local usedentries = nil
+local citetolist = nil
+local listtocite = nil
+local listtolist = nil
+
+do
+
+ local initialize = nil
+
+ initialize = function(t)
+ usedentries = allocate { }
+ citetolist = allocate { }
+ listtocite = allocate { }
+ listtolist = allocate { }
+ local names = { }
+ local internals = structures.references.internals
+ local p_collect = (C(R("09")^1) * Carg(1) / function(s,entry) listtocite[tonumber(s)] = entry end + P(1))^0
+ local nofunique = 0
+ local nofreused = 0
+ for i=1,#internals do
+ local entry = internals[i]
+ if entry then
+ local metadata = entry.metadata
+ if metadata then
+ local kind = metadata.kind
+ if kind == "full" then
+ -- reference (in list)
+ local userdata = entry.userdata
+ if userdata then
+ local tag = userdata.btxref
+ if tag then
+ local set = userdata.btxset or v_default
+ local s = usedentries[set]
+ if s then
+ local u = s[tag]
+ if u then
+ u[#u+1] = entry
+ else
+ s[tag] = { entry }
+ end
+ nofreused = nofreused + 1
+ else
+ usedentries[set] = { [tag] = { entry } }
+ nofunique = nofunique + 1
+ end
+ -- alternative: collect prev in group
+ local bck = userdata.btxbck
+ if bck then
+ lpegmatch(p_collect,bck,1,entry) -- for s in string.gmatch(bck,"[^ ]+") do listtocite[tonumber(s)] = entry end
+ local lst = tonumber(userdata.btxlst)
+ if lst then
+ listtolist[lst] = entry
+ end
+ else
+ local int = tonumber(userdata.btxint)
+ if int then
+ listtocite[int] = entry
+ end
+ end
+ local detail = datasets[set].details[tag]
+-- todo: these have to be pluggable
+ if detail then
+ local author = detail.author
+ if author then
+ for i=1,#author do
+ local a = author[i]
+ local s = a.surnames
+ if s then
+ local c = concat(s,"+")
+ local n = names[c]
+ if n then
+ n[#n+1] = a
+ break
+ else
+ names[c] = { a }
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ elseif kind == "btx" or kind == "userdata" then -- will go: kind == "userdata"
+ -- list entry (each cite)
+ local userdata = entry.userdata
+ if userdata then
+ local int = tonumber(userdata.btxint)
+ if int then
+ citetolist[int] = entry
+ end
+ end
+ end
+ end
+ else
+ -- weird
+ end
+ end
+ for k, v in sortedhash(names) do
+ local n = #v
+ if n > 1 then
+ local original = v[1].original
+ for i=2,n do
+ if original ~= v[i].original then
+ report("potential clash in name %a",k)
+ for i=1,n do
+ v[i].state = 1
+ end
+ break
+ end
+ end
+ end
+ end
+ if trace_detail then
+ report("%s unique bibentries: %s reused entries",nofunique,nofreused)
+ end
+ initialize = nil
+ end
+
+ usedentries = setmetatableindex(function(_,k) if initialize then initialize() end return usedentries[k] end)
+ citetolist = setmetatableindex(function(_,k) if initialize then initialize() end return citetolist [k] end)
+ listtocite = setmetatableindex(function(_,k) if initialize then initialize() end return listtocite [k] end)
+ listtolist = setmetatableindex(function(_,k) if initialize then initialize() end return listtolist [k] end)
+
+ function publications.usedentries()
+ if initialize then
+ initialize()
+ end
+ return usedentries
+ end
+
+end
+
+-- match:
+--
+-- [current|previous|following] section
+-- [current|previous|following] block
+-- [current|previous|following] component
+--
+-- by prefix
+-- by dataset
+
+local findallused do
+
+ local reported = { }
+ local finder = publications.finder
+
+ findallused = function(dataset,reference,internal)
+ local current = datasets[dataset]
+ local finder = publications.finder -- for the moment, not yet in all betas
+ local find = finder and finder(current,reference)
+ local tags = not find and settings_to_array(reference)
+ local todo = { }
+ local okay = { } -- only if mark
+ local allused = usedentries[dataset] or { }
+ local luadata = current.luadata
+ local details = current.details
+ local ordered = current.ordered
+ if set then
+ local registered = { }
+ local function register(tag)
+ local entry = allused[tag]
+ if not entry then
+ local parent = details[tag].parent
+ if parent then
+ entry = allused[parent]
+ end
+ if entry then
+ report("using reference of parent %a for %a",parent,tag)
+ tag = parent
+ end
+ end
+ if registered[tag] then
+ return
+ end
+ if entry then
+ -- only once in a list but at some point we can have more (if we
+ -- decide to duplicate)
+ if #entry == 1 then
+ entry = entry[1]
+ else
+ -- same block and section
+ local done = false
+ if internal and internal > 0 then
+ -- first following in list
+ for i=1,#entry do
+ local e = entry[i]
+ if e.references.internal > internal then
+ done = e
+ break
+ end
+ end
+ if not done then
+ -- last preceding in list
+ for i=1,#entry do
+ local e = entry[i]
+ if e.references.internal < internal then
+ done = e
+ else
+ break
+ end
+ end
+ end
+ end
+ if done then
+ entry = done
+ else
+ entry = entry[1]
+ end
+ end
+ okay[#okay+1] = entry
+ end
+ todo[tag] = true
+ registered[tag] = true
+ return tag
+ end
+ if reference == "*" then
+ tags = { }
+ for i=1,#ordered do
+ local tag = ordered[i].tag
+ tag = register(tag)
+ tags[#tags+1] = tag
+ end
+ elseif find then
+ tags = { }
+ for i=1,#ordered do
+ local entry = ordered[i]
+ if find(entry) then
+ local tag = entry.tag
+ tag = register(tag)
+ tags[#tags+1] = tag
+ end
+ end
+ if #tags == 0 and not reported[reference] then
+ tags[1] = reference
+ reported[reference] = true
+ end
+ else
+ for i=1,#tags do
+ local tag = tags[i]
+ if luadata[tag] then
+ tag = register(tag)
+ tags[i] = tag
+ elseif not reported[tag] then
+ reported[tag] = true
+ report_cite("non-existent entry %a in %a",tag,dataset)
+ end
+ end
+ end
+ else
+ if find then
+ tags = { }
+ for i=1,#ordered do
+ local entry = ordered[i]
+ if find(entry) then
+ local tag = entry.tag
+ local parent = details[tag].parent
+ if parent then
+ tag = parent
+ end
+ tags[#tags+1] = tag
+ todo[tag] = true
+ end
+ end
+ if #tags == 0 and not reported[reference] then
+ tags[1] = reference
+ reported[reference] = true
+ end
+ else
+ for i=1,#tags do
+ local tag = tags[i]
+ local parent = details[tag].parent
+ if parent then
+ tag = parent
+ tags[i] = tag
+ end
+ if luadata[tag] then
+ todo[tag] = true
+ elseif not reported[tag] then
+ reported[tag] = true
+ report_cite("non-existent entry %a in %a",tag,dataset)
+ end
+ end
+ end
+ end
+ return okay, todo, tags
+ end
+
+end
+
+local function unknowncite(reference)
+ ctx_btxsettag(reference)
+ if trace_detail then
+ report("expanding %a cite setup %a","unknown","unknown")
+ end
+ ctx_btxcitesetup("unknown")
+end
+
+local concatstate = publications.concatstate
+
+local tobemarked = nil
+
+local function marknocite(dataset,tag,nofcitations,setup)
+ ctx_btxstartcite()
+ ctx_btxsetdataset(dataset)
+ ctx_btxsettag(tag)
+ ctx_btxsetbacklink(nofcitations)
+ if trace_detail then
+ report("expanding cite setup %a",setup)
+ end
+ ctx_btxcitesetup(setup)
+ ctx_btxstopcite()
+end
+
+local function markcite(dataset,tag,flush)
+ if not tobemarked then
+ return 0
+ end
+ local citation = tobemarked[tag]
+ if not citation then
+ return 0
+ end
+ if citation == true then
+ nofcitations = nofcitations + 1
+ if trace_cite then
+ report_cite("mark, dataset: %s, tag: %s, number: %s, state: %s",dataset,tag,nofcitations,"cited")
+ end
+ if flush then
+ marknocite(dataset,tag,nofcitations,"nocite")
+ end
+ tobemarked[tag] = nofcitations
+ return nofcitations
+ else
+ return citation
+ end
+end
+
+local marked_dataset = nil
+local marked_list = nil
+
+local function flushmarked(dataset,list,todo)
+ marked_dataset = dataset
+ marked_list = list
+end
+
+local function btxflushmarked()
+ if marked_list and tobemarked then
+ for i=1,#marked_list do
+ -- keep order
+ local tag = marked_list[i]
+ local tbm = tobemarked[tag]
+ if tbm == true or not tbm then
+ nofcitations = nofcitations + 1
+ marknocite(marked_dataset,tag,nofcitations,tbm and "nocite" or "invalid")
+ if trace_cite then
+ report_cite("mark, dataset: %s, tag: %s, number: %s, state: %s",marked_dataset,tag,nofcitations,tbm and "unset" or "invalid")
+ end
+ end
+ end
+ end
+ tobemarked = nil
+ marked_dataset = nil
+ marked_list = nil
+end
+
+implement { name = "btxflushmarked", actions = btxflushmarked }
+
+-- basic access
+
+local function getfield(dataset,tag,name) -- for the moment quick and dirty
+ local d = datasets[dataset].luadata[tag]
+ return d and d[name]
+end
+
+local function getdetail(dataset,tag,name) -- for the moment quick and dirty
+ local d = datasets[dataset].details[tag]
+ return d and d[name]
+end
+
+local function getcasted(dataset,tag,field,specification)
+ local current = datasets[dataset]
+ if current then
+ local data = current.luadata[tag]
+ if data then
+ local category = data.category
+ if not specification then
+ specification = currentspecification
+ end
+ local catspec = specification.categories[category]
+ if not catspec then
+ return false
+ end
+ local fields = catspec.fields
+ if fields then
+ local sets = catspec.sets
+ if sets then
+ local set = sets[field]
+ if set then
+ for i=1,#set do
+ local field = set[i]
+ local value = fields[field] and data[field] -- redundant check
+ if value then
+ local kind = specification.types[field]
+ return detailed[kind][value], field, kind
+ end
+ end
+ end
+ end
+ local value = fields[field] and data[field] -- redundant check
+ if value then
+ local kind = specification.types[field]
+ return detailed[kind][value], field, kind
+ end
+ end
+ local data = current.details[tag]
+ if data then
+ local kind = specification.types[field]
+ return data[field], field, kind -- no check
+ end
+ end
+ end
+end
+
+local function getfaster(current,data,details,field,categories,types)
+ local category = data.category
+ local catspec = categories[category]
+ if not catspec then
+ return false
+ end
+ local fields = catspec.fields
+ if fields then
+ local sets = catspec.sets
+ if sets then
+ local set = sets[field]
+ if set then
+ for i=1,#set do
+ local field = set[i]
+ local value = fields[field] and data[field] -- redundant check
+ if value then
+ local kind = types[field]
+ return detailed[kind][value], field, kind
+ end
+ end
+ end
+ end
+ local value = fields[field] and data[field] -- redundant check
+ if value then
+ local kind = types[field]
+ return detailed[kind][value]
+ end
+ end
+ if details then
+ local kind = types[field]
+ return details[field]
+ end
+end
+
+local function getdirect(dataset,data,field,catspec) -- no field check, no dataset check
+ local catspec = (catspec or currentspecification).categories[data.category]
+ if not catspec then
+ return false
+ end
+ local fields = catspec.fields
+ if fields then
+ local sets = catspec.sets
+ if sets then
+ local set = sets[field]
+ if set then
+ for i=1,#set do
+ local field = set[i]
+ local value = fields[field] and data[field] -- redundant check
+ if value then
+ return value
+ end
+ end
+ end
+ end
+ return fields[field] and data[field] or nil -- redundant check
+ end
+end
+
+local function getfuzzy(data,field,categories) -- no field check, no dataset check
+ local catspec
+ if categories then
+ local category = data.category
+ if category then
+ catspec = categories[data.category]
+ end
+ end
+ if not field then
+ return
+ elseif not catspec then
+ return data[field]
+ end
+ local fields = catspec.fields
+ if fields then
+ local sets = catspec.sets
+ if sets then
+ local set = sets[field]
+ if set then
+ for i=1,#set do
+ local field = set[i]
+ local value = fields[field] and data[field] -- redundant check
+ if value then
+ return value
+ end
+ end
+ end
+ end
+ return fields[field] and data[field] or nil -- redundant check
+ end
+end
+
+publications.getfield = getfield
+publications.getdetail = getdetail
+publications.getcasted = getcasted
+publications.getfaster = getfaster
+publications.getdirect = getdirect
+publications.getfuzzy = getfuzzy
+
+-- this needs to be checked: a specific type should have a checker
+
+-- author pagenumber keyword url
+
+-- function commands.btxsingularorplural(dataset,tag,name)
+-- local d = getcasted(dataset,tag,name)
+-- if type(d) == "table" then
+-- d = #d <= 1
+-- else
+-- d = true
+-- end
+-- ctx_doifelse(d)
+-- end
+
+-- function commands.oneorrange(dataset,tag,name)
+-- local d = datasets[dataset].luadata[tag] -- details ?
+-- if d then
+-- d = d[name]
+-- end
+-- if type(d) == "string" then
+-- d = find(d,"%-")
+-- else
+-- d = false
+-- end
+-- ctx_doifelse(not d) -- so singular is default
+-- end
+
+-- function commands.firstofrange(dataset,tag,name)
+-- local d = datasets[dataset].luadata[tag] -- details ?
+-- if d then
+-- d = d[name]
+-- end
+-- if type(d) == "string" then
+-- context(match(d,"([^%-]+)"))
+-- end
+-- end
+
+local inspectors = allocate()
+local nofmultiple = allocate()
+local firstandlast = allocate()
+
+publications.inspectors = inspectors
+inspectors.nofmultiple = nofmultiple
+inspectors.firstandlast = firstandlast
+
+function nofmultiple.author(d)
+ return type(d) == "table" and #d or 0
+end
+
+function publications.singularorplural(dataset,tag,name)
+ local data, field, kind = getcasted(dataset,tag,name)
+ if data then
+ local test = nofmultiple[kind]
+ if test then
+ local n = test(data)
+ return not n or n < 2
+ end
+ end
+ return true
+end
+
+function firstandlast.range(d)
+ if type(d) == "table" then
+ return d[1], d[2]
+ end
+end
+
+firstandlast.pagenumber = firstandlast.range
+
+function publications.oneorrange(dataset,tag,name)
+ local data, field, kind = getcasted(dataset,tag,name)
+ if data then
+ local test = firstandlast[kind]
+ if test then
+ local first, last = test(data)
+ return not (first and last)
+ end
+ end
+ return nil -- nothing at all
+end
+
+function publications.firstofrange(dataset,tag,name)
+ local data, field, kind = getcasted(dataset,tag,name)
+ if data then
+ local test = firstandlast[kind]
+ if test then
+ local first = test(data)
+ if first then
+ return first
+ end
+ end
+ end
+end
+
+function publications.lastofrange(dataset,tag,name)
+ local data, field, kind = getcasted(dataset,tag,name)
+ if data then
+ local test = firstandlast[kind]
+ if test then
+ local first, last = test(data)
+ if last then
+ return last
+ end
+ end
+ end
+end
+
+local three_strings = { "string", "string", "string" }
+
+implement {
+ name = "btxsingularorplural",
+ actions = { publications.singularorplural, ctx_doifelse },
+ arguments = three_strings
+}
+
+implement {
+ name = "btxoneorrange",
+ actions = { publications.oneorrange, function(b) if b == nil then ctx_gobbletwoarguments() else ctx_doifelse(b) end end },
+ arguments = three_strings
+}
+
+implement {
+ name = "btxfirstofrange",
+ actions = { publications.firstofrange, context },
+ arguments = three_strings
+}
+
+implement {
+ name = "btxlastofrange",
+ actions = { publications.lastofrange, context },
+ arguments = three_strings
+}
+
+-- basic loading
+
+function publications.usedataset(specification)
+ specification.kind = "current"
+ publications.load(specification)
+end
+
+implement {
+ name = "btxusedataset",
+ actions = publications.usedataset,
+ arguments = {
+ {
+ { "specification" },
+ { "dataset" },
+ { "filename" },
+ }
+ }
+}
+
+implement {
+ name = "convertbtxdatasettoxml",
+ arguments = { "string", true },
+ actions = publications.converttoxml
+}
+
+-- enhancing
+
+do
+
+ -- maybe not redo when already done
+
+ local function shortsorter(a,b)
+ local ay, by = a[2], b[2] -- year
+ if ay ~= by then
+ return ay < by
+ end
+ local ay, by = a[3], b[3] -- suffix
+ if ay ~= by then
+ -- bah, bah, bah
+ local an, bn = tonumber(ay), tonumber(by)
+ if an and bn then
+ return an < bn
+ else
+ return ay < by
+ end
+ end
+ return a[4] < b[4]
+ end
+
+ -- We could avoid loops by combining enhancers but that makes it only
+ -- more messy and for documents that use publications the few extra milli
+ -- seconds are irrelevant (there is for sure more to gain by proper coding
+ -- of the source and or style).
+
+ local f_short = formatters["%s%02i"]
+
+ function publications.enhancers.suffixes(dataset)
+ if not dataset then
+ return -- bad news
+ else
+ report("analyzing previous publication run for %a",dataset.name)
+ end
+ dataset.suffixed = true
+ --
+ local used = usedentries[dataset.name]
+ if not used then
+ return -- probably a first run
+ end
+ local luadata = dataset.luadata
+ local details = dataset.details
+ local ordered = dataset.ordered
+ if not luadata or not details or not ordered then
+ report("nothing to be analyzed in %a",dataset.name)
+ return -- also bad news
+ end
+ -- we have two suffixes: author (dependent of type) and short
+ local kind = dataset.authorconversion or "name"
+ local field = "author" -- currently only author
+ local shorts = { }
+ local authors = { }
+ local hasher = publications.authorhashers[kind]
+ local shorter = publications.authorhashers.short
+ for i=1,#ordered do
+ local entry = ordered[i]
+ if entry then
+ local tag = entry.tag
+ if tag then
+ local use = used[tag]
+ if use then
+ -- use is a table of used list entries (so there can be more) and we just look at
+ -- the first one for btx properties
+ local listentry = use[1]
+ local userdata = listentry.userdata
+ local btxspc = userdata and userdata.btxspc
+ if btxspc then
+ -- we could act on the 3rd arg returned by getcasted but in general any string will do
+ -- so we deal with it in the author hashers ... maybe some day ...
+ local author = getcasted(dataset,tag,field,specifications[btxspc])
+ local kind = type(author)
+ if kind == "table" or kind == "string" then
+ if u then
+ u = listentry.entries.text -- hm
+ else
+ u = "0"
+ end
+ local year = tonumber(entry.year) or 9999
+ local data = { tag, year, u, i }
+ -- authors
+ local hash = hasher(author)
+ local found = authors[hash]
+ if not found then
+ authors[hash] = { data }
+ else
+ found[#found+1] = data
+ end
+ -- shorts
+ local hash = shorter(author)
+ local short = f_short(hash,mod(year,100))
+ local found = shorts[short]
+ if not found then
+ shorts[short] = { data }
+ else
+ found[#found+1] = data
+ end
+ --
+ else
+ report("author typecast expected for field %a",field)
+ end
+ else
+ --- no spec so let's forget about it
+ end
+ end
+ end
+ end
+ end
+ local function addsuffix(hashed,key,suffixkey)
+ for hash, tags in sortedhash(hashed) do -- ordered ?
+ local n = #tags
+ if n == 0 then
+ -- skip
+ elseif n == 1 then
+ local tagdata = tags[1]
+ local tag = tagdata[1]
+ local detail = details[tag]
+ local entry = luadata[tag]
+ local year = entry.year
+ detail[key] = hash
+ elseif n > 1 then
+ sort(tags,shortsorter) -- or take first -- todo: proper utf sorter
+ local lastyear = nil
+ local suffix = nil
+ local previous = nil
+ for i=1,n do
+ local tagdata = tags[i]
+ local tag = tagdata[1]
+ local detail = details[tag]
+ local entry = luadata[tag]
+ local year = entry.year
+ detail[key] = hash
+ if year ~= lastyear then
+ lastyear = year
+ suffix = 1
+ else
+ if previous and suffix == 1 then
+ previous[suffixkey] = suffix
+ end
+ suffix = suffix + 1
+ detail[suffixkey] = suffix
+ end
+ previous = detail
+ end
+ end
+ if trace_suffixes then
+ for i=1,n do
+ local tag = tags[i][1]
+ local year = luadata[tag].year
+ local suffix = details[tag].suffix
+ if suffix then
+ report_suffix("%s: tag %a, hash %a, year %a, suffix %a",key,tag,hash,year or '',suffix or '')
+ else
+ report_suffix("%s: tag %a, hash %a, year %a",key,tag,hash,year or '')
+ end
+ end
+ end
+ end
+ end
+ addsuffix(shorts, "shorthash", "shortsuffix") -- todo: shorthash
+ addsuffix(authors,"authorhash","authorsuffix")
+ end
+
+ -- utilities.sequencers.appendaction(enhancer,"system","publications.enhancers.suffixes")
+
+end
+
+implement {
+ name = "btxaddentry",
+ actions = function(name,settings,content)
+ local dataset = datasets[name]
+ if dataset then
+ publications.addtexentry(dataset,settings,content)
+ end
+ end,
+ arguments = { "string", "string", "string" }
+}
+
+function publications.checkeddataset(name,default)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ return name
+ elseif default and default ~= "" then
+ return default
+ else
+ report("unknown dataset %a, forcing %a",name,v_default)
+ return v_default
+ end
+end
+
+implement {
+ name = "btxsetdataset",
+ actions = { publications.checkeddataset, context },
+ arguments = { "string", "string"}
+}
+
+implement {
+ name = "btxsetentry",
+ actions = function(name,tag)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ if dataset.luadata[tag] then
+ context(tag)
+ else
+ report("unknown tag %a in dataset %a",tag,name)
+ end
+ else
+ report("unknown dataset %a",name)
+ end
+ end,
+ arguments = { "string", "string" },
+}
+
+-- rendering of fields
+
+do
+
+ local typesetters = { }
+ publications.typesetters = typesetters
+
+ local function defaulttypesetter(field,value,manipulator)
+ if value and value ~= "" then
+ value = tostring(value)
+ context(manipulator and applymanipulation(manipulator,value) or value)
+ end
+ end
+
+ setmetatableindex(typesetters,function(t,k)
+ local v = defaulttypesetter
+ t[k] = v
+ return v
+ end)
+
+ function typesetters.string(field,value,manipulator)
+ if value and value ~= "" then
+ context(manipulator and applymanipulation(manipulator,value) or value)
+ end
+ end
+
+ function typesetters.author(field,value,manipulator)
+ ctx_btxflushauthor(field)
+ end
+
+ -- function typesetters.url(field,value,manipulator)
+ -- ....
+ -- end
+
+ -- if there is no specification then we're in trouble but there is
+ -- always a default anyway
+ --
+ -- there's also always a fields table but it can be empty due to
+ -- lack of specifications
+ --
+ -- then there can be cases where we have no specification for instance
+ -- when we have a special kind of database
+
+ local splitter = lpeg.splitat(":")
+
+ local function permitted(category,field)
+ local catspec = currentspecification.categories[category]
+ if not catspec then
+ report("invalid category %a, %s",category,"no specification") -- can't happen
+ return false
+ end
+ local fields = catspec.fields
+ if not fields then
+ report("invalid category %a, %s",category,"no fields") -- can't happen
+ return false
+ end
+ if ignoredfields and ignoredfields[field] then
+ return false
+ end
+ local virtualfields = currentspecification.virtualfields
+ if virtualfields and virtualfields[field] then
+ return true
+ end
+ local sets = catspec.sets
+ if sets then
+ local set = sets[field]
+ if set then
+ return set
+ end
+ end
+ if fields[field] then
+ return true
+ end
+ local f, l = lpegmatch(splitter,field)
+ if f and l and fields[f] then
+ return true -- language specific one
+ end
+ end
+
+ local function found(dataset,tag,field,valid,fields)
+ if valid == true then
+ -- local fields = dataset.luadata[tag]
+ local okay = fields[field]
+ if okay then
+ return field, okay
+ end
+ local details = dataset.details[tag]
+ local value = details[field]
+ if value then
+ return field, value
+ end
+ elseif valid then
+ -- local fields = dataset.luadata[tag]
+ for i=1,#valid do
+ local field = valid[i]
+ local value = fields[field]
+ if value then
+ return field, value
+ end
+ end
+ local details = dataset.details[tag]
+ for i=1,#valid do
+ local value = details[field]
+ if value then
+ return field, value
+ end
+ end
+ end
+ end
+
+ local function get(dataset,tag,field,what,check,catspec) -- somewhat more extensive
+ local current = rawget(datasets,dataset)
+ if current then
+ local data = current.luadata[tag]
+ if data then
+ local category = data.category
+ local catspec = (catspec or currentspecification).categories[category]
+ if not catspec then
+ return false
+ end
+ local fields = catspec.fields
+ if fields then
+ local sets = catspec.sets
+ if sets then
+ local set = sets[field]
+ if set then
+ if check then
+ for i=1,#set do
+ local field = set[i]
+ local kind = (not check or data[field]) and fields[field]
+ if kind then
+ return what and kind or field
+ end
+ end
+ elseif what then
+ local t = { }
+ for i=1,#set do
+ t[i] = fields[set[i]] or "unknown"
+ end
+ return concat(t,",")
+ else
+ return concat(set,",")
+ end
+ end
+ end
+ local kind = (not check or data[field]) and fields[field]
+ if kind then
+ return what and kind or field
+ end
+ end
+ end
+ end
+ return ""
+ end
+
+ publications.permitted = permitted
+ publications.found = found
+ publications.get = get
+
+ local function btxflush(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local fields = dataset.luadata[tag]
+ if fields then
+ local manipulator, field = splitmanipulation(field)
+ local category = fields.category
+ local valid = permitted(category,field)
+ if valid then
+ local name, value = found(dataset,tag,field,valid,fields)
+ if value then
+ typesetters[currentspecification.types[name]](field,value,manipulator)
+ elseif trace_detail then
+ report("%s %s %a in category %a for tag %a in dataset %a","unknown","entry",field,category,tag,name)
+ end
+ elseif trace_detail then
+ report("%s %s %a in category %a for tag %a in dataset %a","invalid","entry",field,category,tag,name)
+ end
+ else
+ report("unknown tag %a in dataset %a",tag,name)
+ end
+ else
+ report("unknown dataset %a",name)
+ end
+ end
+
+ local function btxfield(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local fields = dataset.luadata[tag]
+ if fields then
+ local category = fields.category
+ local manipulator, field = splitmanipulation(field)
+ if permitted(category,field) then
+ local value = fields[field]
+ if value then
+ typesetters[currentspecification.types[field]](field,value,manipulator)
+ elseif trace_detail then
+ report("%s %s %a in category %a for tag %a in dataset %a","unknown","field",field,category,tag,name)
+ end
+ elseif trace_detail then
+ report("%s %s %a in category %a for tag %a in dataset %a","invalid","field",field,category,tag,name)
+ end
+ else
+ report("unknown tag %a in dataset %a",tag,name)
+ end
+ else
+ report("unknown dataset %a",name)
+ end
+ end
+
+ local function btxdetail(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local fields = dataset.luadata[tag]
+ if fields then
+ local details = dataset.details[tag]
+ if details then
+ local category = fields.category
+ local manipulator, field = splitmanipulation(field)
+ if permitted(category,field) then
+ local value = details[field]
+ if value then
+ typesetters[currentspecification.types[field]](field,value,manipulator)
+ elseif trace_detail then
+ report("%s %s %a in category %a for tag %a in dataset %a","unknown","detail",field,category,tag,name)
+ end
+ elseif trace_detail then
+ report("%s %s %a in category %a for tag %a in dataset %a","invalid","detail",field,category,tag,name)
+ end
+ else
+ report("no details for tag %a in dataset %a",tag,name)
+ end
+ else
+ report("unknown tag %a in dataset %a",tag,name)
+ end
+ else
+ report("unknown dataset %a",name)
+ end
+ end
+
+ local function btxdirect(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local fields = dataset.luadata[tag]
+ if fields then
+ local manipulator, field = splitmanipulation(field)
+ local value = fields[field]
+ if value then
+ context(typesetters.default(field,value,manipulator))
+ elseif trace_detail then
+ report("field %a of tag %a in dataset %a has no value",field,tag,name)
+ end
+ else
+ report("unknown tag %a in dataset %a",tag,name)
+ end
+ else
+ report("unknown dataset %a",name)
+ end
+ end
+
+ local function okay(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local fields = dataset.luadata[tag]
+ if fields then
+ local category = fields.category
+ local valid = permitted(category,field)
+ if valid then
+ local value, field = found(dataset,tag,field,valid,fields)
+ return value and value ~= ""
+ end
+ end
+ end
+ end
+
+ publications.okay = okay
+
+ implement { name = "btxfield", actions = btxfield, arguments = { "string", "string", "string" } }
+ implement { name = "btxdetail", actions = btxdetail, arguments = { "string", "string", "string" } }
+ implement { name = "btxflush", actions = btxflush, arguments = { "string", "string", "string" } }
+ implement { name = "btxdirect", actions = btxdirect, arguments = { "string", "string", "string" } }
+
+ implement { name = "btxfieldname", actions = { get, context }, arguments = { "string", "string", "string", false, false } }
+ implement { name = "btxfieldtype", actions = { get, context }, arguments = { "string", "string", "string", true, false } }
+ implement { name = "btxfoundname", actions = { get, context }, arguments = { "string", "string", "string", false, true } }
+ implement { name = "btxfoundtype", actions = { get, context }, arguments = { "string", "string", "string", true, true } }
+
+ implement { name = "btxdoifelse", actions = { okay, ctx_doifelse }, arguments = { "string", "string", "string" } }
+ implement { name = "btxdoif", actions = { okay, ctx_doif }, arguments = { "string", "string", "string" } }
+ implement { name = "btxdoifnot", actions = { okay, ctx_doifnot }, arguments = { "string", "string", "string" } }
+
+end
+
+-- -- alternative approach: keep data at the tex end
+
+function publications.singularorplural(singular,plural)
+ if lastconcatsize and lastconcatsize > 1 then
+ context(plural)
+ else
+ context(singular)
+ end
+end
+
+-- loading
+
+do
+
+ local patterns = {
+ "publ-imp-%s.mkvi",
+ "publ-imp-%s.mkiv",
+ "publ-imp-%s.tex",
+ }
+
+ local function failure(name)
+ report("unknown library %a",name)
+ end
+
+ local function action(name,foundname)
+ context.input(foundname)
+ end
+
+ function publications.loaddefinitionfile(name) -- a more specific name
+ resolvers.uselibrary {
+ name = gsub(name,"^publ%-",""),
+ patterns = patterns,
+ action = action,
+ failure = failure,
+ onlyonce = true,
+ }
+ end
+
+ local patterns = {
+ "publ-imp-%s.lua",
+ }
+
+ function publications.loadreplacementfile(name) -- a more specific name
+ resolvers.uselibrary {
+ name = gsub(name,"^publ%-",""),
+ patterns = patterns,
+ action = publications.loaders.registercleaner,
+ failure = failure,
+ onlyonce = true,
+ }
+ end
+
+ implement { name = "btxloaddefinitionfile", actions = publications.loaddefinitionfile, arguments = "string" }
+ implement { name = "btxloadreplacementfile", actions = publications.loadreplacementfile, arguments = "string" }
+
+end
+
+-- lists
+
+do
+
+ publications.lists = publications.lists or { }
+ local lists = publications.lists
+
+ local context = context
+ local structures = structures
+
+ local references = structures.references
+ local sections = structures.sections
+
+ -- per rendering
+
+ local renderings = { } --- per dataset
+
+ setmetatableindex(renderings,function(t,k)
+ local v = {
+ list = { },
+ done = { },
+ alldone = { },
+ used = { },
+ registered = { },
+ ordered = { },
+ shorts = { },
+ method = v_none,
+ texts = setmetatableindex("table"),
+ currentindex = 0,
+ }
+ t[k] = v
+ return v
+ end)
+
+ -- helper
+
+ function lists.register(dataset,tag,short) -- needs checking now that we split
+ local r = renderings[dataset]
+ if not short or short == "" then
+ short = tag
+ end
+ if trace then
+ report("registering publication entry %a with shortcut %a",tag,short)
+ end
+ local top = #r.registered + 1
+ -- do we really need these
+ r.registered[top] = tag
+ r.ordered [tag] = top
+ r.shorts [tag] = short
+ end
+
+ function lists.nofregistered(dataset)
+ return #renderings[dataset].registered
+ end
+
+ local function validkeyword(dataset,tag,keyword,specification) -- todo: pass specification
+ local kw = getcasted(dataset,tag,"keywords",specification)
+ if kw then
+ for i=1,#kw do
+ if keyword[kw[i]] then
+ return true
+ end
+ end
+ end
+ end
+
+ local function registerpage(pages,tag,result,listindex)
+ local p = pages[tag]
+ local r = result[listindex].references
+ if p then
+ local last = p[#p][2]
+ local real = last.realpage
+ if real ~= r.realpage then
+ p[#p+1] = { listindex, r }
+ end
+ else
+ pages[tag] = { { listindex, r } }
+ end
+ end
+
+
+ -- tag | listindex | reference | userdata | dataindex
+
+ local methods = { }
+ lists.methods = methods
+
+ methods[v_dataset] = function(dataset,rendering,keyword)
+ local current = datasets[dataset]
+ local luadata = current.luadata
+ local list = rendering.list
+ for tag, data in sortedhash(luadata) do
+ if not keyword or validkeyword(dataset,tag,keyword) then
+ local index = data.index or 0
+ list[#list+1] = { tag, index, 0, false, index }
+ end
+ end
+ end
+
+ -- todo: names = { "btx" }
+
+ methods[v_force] = function (dataset,rendering,keyword)
+ -- only for checking, can have duplicates, todo: collapse page numbers, although
+ -- we then also needs deferred writes
+ local result = structures.lists.filter(rendering.specifications) or { }
+ local list = rendering.list
+ local current = datasets[dataset]
+ local luadata = current.luadata
+ for listindex=1,#result do
+ local r = result[listindex]
+ local u = r.userdata -- better check on metadata.kind == "btx"
+ if u then
+ local set = u.btxset or v_default
+ if set == dataset then
+ local tag = u.btxref
+ if tag and (not keyword or validkeyword(dataset,tag,keyword)) then
+ local data = luadata[tag]
+ list[#list+1] = { tag, listindex, 0, u, data and data.index or 0 }
+ end
+ end
+ end
+ end
+ lists.result = result
+ end
+
+ -- local : if tag and done[tag] ~= section then ...
+ -- global : if tag and not alldone[tag] and done[tag] ~= section then ...
+
+ methods[v_local] = function(dataset,rendering,keyword)
+ local result = structures.lists.filter(rendering.specifications) or { }
+ local section = sections.currentid()
+ local list = rendering.list
+ local repeated = rendering.repeated == v_yes
+ local r_done = rendering.done
+ local r_alldone = rendering.alldone
+ local done = repeated and { } or r_done
+ local alldone = repeated and { } or r_alldone
+ local doglobal = rendering.method == v_global
+ local traced = { } -- todo: only if interactive (backlinks) or when tracing
+ local pages = { }
+ local current = datasets[dataset]
+ local luadata = current.luadata
+ -- handy for tracing :
+ rendering.result = result
+ --
+ for listindex=1,#result do
+ local r = result[listindex]
+ local u = r.userdata
+ if u then -- better check on metadata.kind == "btx"
+ local set = u.btxset or v_default
+ if set == dataset then
+-- inspect(structures.references.internals[tonumber(u.btxint)])
+ local tag = u.btxref
+ if not tag then
+ -- problem
+ elseif done[tag] == section then -- a bit messy for global and all and so
+ -- skip
+ elseif doglobal and alldone[tag] then
+ -- skip
+ elseif not keyword or validkeyword(dataset,tag,keyword) then
+ if traced then
+ local l = traced[tag]
+ if l then
+ l[#l+1] = u.btxint
+ else
+ local data = luadata[tag]
+ local l = { tag, listindex, 0, u, data and data.index or 0 }
+ list[#list+1] = l
+ traced[tag] = l
+ end
+ else
+ done[tag] = section
+ alldone[tag] = true
+ local data = luadata[tag]
+ list[#list+1] = { tag, listindex, 0, u, data and data.index or 0 }
+ end
+ end
+ if tag then
+ registerpage(pages,tag,result,listindex)
+ end
+ end
+ end
+ end
+ if traced then
+ for tag in next, traced do
+ done[tag] = section
+ alldone[tag] = true
+ end
+ end
+ lists.result = result
+ structures.lists.result = result
+ rendering.pages = pages -- or list.pages
+ end
+
+ methods[v_global] = methods[v_local]
+
+ function lists.collectentries(specification)
+ local dataset = specification.dataset
+ if not dataset then
+ return
+ end
+ local rendering = renderings[dataset]
+ if not rendering then
+ return
+ end
+ local method = specification.method or v_none
+ local ignored = specification.ignored or ""
+ rendering.method = method
+ rendering.ignored = ignored ~= "" and settings_to_set(ignored) or nil
+ rendering.list = { }
+ rendering.done = { }
+ rendering.sorttype = specification.sorttype or v_default
+ rendering.criterium = specification.criterium or v_none
+ rendering.repeated = specification.repeated or v_no
+ rendering.group = specification.group or ""
+ rendering.specifications = specification
+ local filtermethod = methods[method]
+ if not filtermethod then
+ report_list("invalid method %a",method or "")
+ return
+ end
+ report_list("collecting entries using method %a and sort order %a",method,rendering.sorttype)
+ lists.result = { } -- kind of reset
+ local keyword = specification.keyword
+ if keyword and keyword ~= "" then
+ keyword = settings_to_set(keyword)
+ else
+ keyword = nil
+ end
+ filtermethod(dataset,rendering,keyword)
+ local list = rendering.list
+ ctx_btxsetnoflistentries(list and #list or 0)
+ end
+
+ -- for determining width
+
+ local groups = setmetatableindex("number")
+
+ function lists.prepareentries(dataset)
+ local rendering = renderings[dataset]
+ local list = rendering.list
+ local used = rendering.used
+ local forceall = rendering.criterium == v_all
+ local repeated = rendering.repeated == v_yes
+ local sorttype = rendering.sorttype or v_default
+ local group = rendering.group or ""
+ local sorter = lists.sorters[sorttype]
+ local current = datasets[dataset]
+ local luadata = current.luadata
+ local details = current.details
+ local newlist = { }
+ local lastreferencenumber = groups[group] -- current.lastreferencenumber or 0
+ for i=1,#list do
+ local li = list[i]
+ local tag = li[1]
+ local entry = luadata[tag]
+ if entry then
+ if forceall or repeated or not used[tag] then
+ newlist[#newlist+1] = li
+ -- already here:
+ if not repeated then
+ used[tag] = true -- beware we keep the old state (one can always use criterium=all)
+ end
+ end
+ end
+ end
+ if type(sorter) == "function" then
+ list = sorter(dataset,rendering,newlist,sorttype) or newlist
+ else
+ list = newlist
+ end
+ local newlist = { }
+ local tagtolistindex = { }
+ rendering.tagtolistindex = tagtolistindex
+ for i=1,#list do
+ local li = list[i]
+ local tag = li[1]
+ local entry = luadata[tag]
+ if entry then
+ local detail = details[tag]
+ if not detail then
+ -- fatal error
+ report("fatal error, missing details for tag %a in dataset %a (enhanced: %s)",tag,dataset,current.enhanced and "yes" or "no")
+ -- lastreferencenumber = lastreferencenumber + 1
+ -- details[tag] = { referencenumber = lastreferencenumber }
+ -- li[3] = lastreferencenumber
+ -- tagtolistindex[tag] = i
+ -- newlist[#newlist+1] = li
+ elseif detail.parent then
+ -- skip this one
+ else
+ local referencenumber = detail.referencenumber
+ if not referencenumber then
+ lastreferencenumber = lastreferencenumber + 1
+ referencenumber = lastreferencenumber
+ detail.referencenumber = lastreferencenumber
+ end
+ li[3] = referencenumber
+ tagtolistindex[tag] = i
+ newlist[#newlist+1] = li
+ end
+ end
+ end
+ groups[group] = lastreferencenumber
+ rendering.list = newlist
+ end
+
+ function lists.fetchentries(dataset)
+ local rendering = renderings[dataset]
+ local list = rendering.list
+ if list then
+ for i=1,#list do
+ local li = list[i]
+ ctx_btxsettag(li[1])
+ ctx_btxsetnumber(li[3])
+ ctx_btxchecklistentry()
+ end
+ end
+ end
+
+ -- for rendering
+
+ -- setspecification
+
+ local function btxflushpages(dataset,tag)
+ -- todo: interaction
+ local rendering = renderings[dataset]
+ local pages = rendering.pages
+ if not pages then
+ return
+ else
+ pages = pages[tag]
+ end
+ if not pages then
+ return
+ end
+ local nofpages = #pages
+ if nofpages == 0 then
+ return
+ end
+ local first_p = nil
+ local first_r = nil
+ local last_p = nil
+ local last_r = nil
+ local ranges = { }
+ local nofdone = 0
+ local function flush()
+ if last_r and first_r ~= last_r then
+ ranges[#ranges+1] = { first_p, last_p }
+ else
+ ranges[#ranges+1] = { first_p }
+ end
+ end
+ for i=1,nofpages do
+ local next_p = pages[i]
+ local next_r = next_p[2].realpage
+ if not first_r then
+ first_p = next_p
+ first_r = next_r
+ elseif last_r + 1 == next_r then
+ -- continue
+ elseif first_r then
+ flush()
+ first_p = next_p
+ first_r = next_r
+ end
+ last_p = next_p
+ last_r = next_r
+ end
+ if first_r then
+ flush()
+ end
+ local nofranges = #ranges
+ for i=1,nofranges do
+ local r = ranges[i]
+ ctx_btxsetconcat(concatstate(i,nofranges))
+ local first, last = r[1], r[2]
+ ctx_btxsetfirstinternal(first[2].internal)
+ ctx_btxsetfirstpage(first[1])
+ if last then
+ ctx_btxsetlastinternal(last[2].internal)
+ ctx_btxsetlastpage(last[1])
+ end
+ if trace_detail then
+ report("expanding page setup")
+ end
+ ctx_btxpagesetup("") -- nothing yet
+ end
+ end
+
+ implement {
+ name = "btxflushpages",
+ actions = btxflushpages,
+ arguments = { "string", "string" }
+ }
+
+ function lists.sameasprevious(dataset,i,name,order,method)
+ local rendering = renderings[dataset]
+ local list = rendering.list
+ local n = tonumber(i)
+ if n and n > 1 and n <= #list then
+ local luadata = datasets[dataset].luadata
+ local p_index = list[n-1][1]
+ local c_index = list[n ][1]
+ local previous = getdirect(dataset,luadata[p_index],name)
+ local current = getdirect(dataset,luadata[c_index],name)
+
+ -- authors are a special case
+
+ -- if not order then
+ -- order = gettexcounter("c_btx_list_reference")
+ -- end
+ if order and order > 0 and (method == v_always or method == v_doublesided) then
+ local clist = listtolist[order]
+ local plist = listtolist[order-1]
+ if clist and plist then
+ local crealpage = clist.references.realpage
+ local prealpage = plist.references.realpage
+ if crealpage ~= prealpage then
+ if method == v_always or not conditionals.layoutisdoublesided then
+ if trace_detail then
+ report("previous %a, current %a, different page",previous,current)
+ end
+ return false
+ elseif crealpage % 2 == 0 then
+ if trace_detail then
+ report("previous %a, current %a, different page",previous,current)
+ end
+ return false
+ end
+ end
+ end
+ end
+ local sameentry = false
+ if current and current == previous then
+ sameentry = true
+ else
+ local p_casted = getcasted(dataset,p_index,name)
+ local c_casted = getcasted(dataset,c_index,name)
+ if c_casted and c_casted == p_casted then
+ sameentry = true
+ elseif type(c_casted) == "table" and type(p_casted) == "table" then
+ sameentry = table.identical(c_casted,p_casted)
+ end
+ end
+ if trace_detail then
+ if sameentry then
+ report("previous %a, current %a, same entry",previous,current)
+ else
+ report("previous %a, current %a, different entry",previous,current)
+ end
+ end
+ return sameentry
+ else
+ return false
+ end
+ end
+
+ function lists.combiinlist(dataset,tag)
+ local rendering = renderings[dataset]
+ local list = rendering.list
+ local toindex = rendering.tagtolistindex
+ return toindex and toindex[tag]
+ end
+
+ function lists.flushcombi(dataset,tag)
+ local rendering = renderings[dataset]
+ local list = rendering.list
+ local toindex = rendering.tagtolistindex
+ local listindex = toindex and toindex[tag]
+ if listindex then
+ local li = list[listindex]
+ if li then
+ local data = datasets[dataset]
+ local luadata = data.luadata
+ local details = data.details
+ local tag = li[1]
+ local listindex = li[2]
+ local n = li[3]
+ local entry = luadata[tag]
+ local detail = details[tag]
+ ctx_btxstartcombientry()
+ ctx_btxsetcurrentlistindex(listindex)
+ ctx_btxsetcategory(entry.category or "unknown")
+ ctx_btxsettag(tag)
+ ctx_btxsetnumber(n)
+ local language = entry.language
+ if language then
+ ctx_btxsetlanguage(language)
+ end
+ local authorsuffix = detail.authorsuffix
+ if authorsuffix then
+ ctx_btxsetsuffix(authorsuffix)
+ end
+ ctx_btxhandlecombientry()
+ ctx_btxstopcombientry()
+ end
+ end
+ end
+
+ function lists.flushentry(dataset,i,textmode)
+ local rendering = renderings[dataset]
+ local list = rendering.list
+ local li = list[i]
+ if li then
+ local data = datasets[dataset]
+ local luadata = data.luadata
+ local details = data.details
+ local tag = li[1]
+ local listindex = li[2]
+ local n = li[3]
+ local entry = luadata[tag]
+ local detail = details[tag]
+ --
+ ctx_btxstartlistentry()
+ ctx_btxsetcurrentlistentry(i) -- redundant
+ ctx_btxsetcurrentlistindex(listindex or 0)
+ local children = detail.children
+ local language = entry.language
+ if children then
+ ctx_btxsetcombis(concat(children,","))
+ end
+ ctx_btxsetcategory(entry.category or "unknown")
+ ctx_btxsettag(tag)
+ ctx_btxsetnumber(n)
+ if language then
+ ctx_btxsetlanguage(language)
+ end
+ local userdata = li[4]
+ if userdata then
+ local b = userdata.btxbtx
+ local a = userdata.btxatx
+ if b then
+ ctx_btxsetbefore(b)
+ end
+ if a then
+ ctx_btxsetafter(a)
+ end
+ local bl = userdata.btxint
+ if bl and bl ~= "" then
+ ctx_btxsetbacklink(bl)
+ end
+ end
+ local authorsuffix = detail.authorsuffix
+ if authorsuffix then
+ ctx_btxsetsuffix(authorsuffix)
+ end
+ rendering.userdata = userdata
+ if textmode then
+ ctx_btxhandlelisttextentry()
+ else
+ ctx_btxhandlelistentry()
+ end
+ ctx_btxstoplistentry()
+ --
+ -- context(function()
+ -- -- wrapup
+ -- rendering.ignoredfields = nil
+ -- end)
+ end
+ end
+
+ local function getuserdata(dataset,key)
+ local rendering = renderings[dataset]
+ if rendering then
+ local userdata = rendering.userdata
+ if userdata then
+ local value = userdata[key]
+ if value and value ~= "" then
+ return value
+ end
+ end
+ end
+ end
+
+ lists.uservariable = getuserdata
+
+ function lists.filterall(dataset)
+ local r = renderings[dataset]
+ local list = r.list
+ local registered = r.registered
+ for i=1,#registered do
+ list[i] = { registered[i], i, 0, false, false }
+ end
+ end
+
+ implement {
+ name = "btxuservariable",
+ actions = { getuserdata, context },
+ arguments = { "string", "string" }
+ }
+
+ implement {
+ name = "btxdoifelseuservariable",
+ actions = { getuserdata, ctx_doifelse },
+ arguments = { "string", "string" }
+ }
+
+ -- implement {
+ -- name = "btxresolvelistreference",
+ -- actions = lists.resolve,
+ -- arguments = { "string", "string" }
+ -- }
+
+ implement {
+ name = "btxcollectlistentries",
+ actions = lists.collectentries,
+ arguments = {
+ {
+ { "names" },
+ { "criterium" },
+ { "reference" },
+ { "method" },
+ { "dataset" },
+ { "keyword" },
+ { "sorttype" },
+ { "repeated" },
+ { "ignored" },
+ { "group" },
+ }
+ }
+ }
+
+ implement {
+ name = "btxpreparelistentries",
+ actions = lists.prepareentries,
+ arguments = { "string" },
+ }
+
+ implement {
+ name = "btxfetchlistentries",
+ actions = lists.fetchentries,
+ arguments = { "string" },
+ }
+
+ implement {
+ name = "btxflushlistentry",
+ actions = lists.flushentry,
+ arguments = { "string", "integer" }
+ }
+
+ implement {
+ name = "btxflushlistcombi",
+ actions = lists.flushcombi,
+ arguments = { "string", "string" }
+ }
+
+ implement {
+ name = "btxdoifelsesameasprevious",
+ actions = { lists.sameasprevious, ctx_doifelse },
+ arguments = { "string", "integer", "string", "integer", "string" }
+ }
+
+ implement {
+ name = "btxdoifelsecombiinlist",
+ actions = { lists.combiinlist, ctx_doifelse },
+ arguments = { "string", "string" }
+ }
+
+end
+
+do
+
+ local citevariants = { }
+ publications.citevariants = citevariants
+
+ local function btxhandlecite(specification)
+ local dataset = specification.dataset or v_default
+ local reference = specification.reference
+ local variant = specification.variant
+ if not variant or variant == "" then
+ variant = "default"
+ end
+ if not reference or reference == "" then
+ return
+ end
+ --
+ local data = datasets[dataset]
+ if not data.suffixed then
+ data.authorconversion = specification.authorconversion
+ publications.enhancers.suffixes(data)
+ end
+ --
+ specification.variant = variant
+ specification.compress = specification.compress
+ specification.markentry = specification.markentry ~= false
+ --
+ if specification.sorttype == v_yes then
+ specification.sorttype = v_normal
+ end
+ --
+ local prefix, rest = lpegmatch(prefixsplitter,reference)
+ if prefix and rest then
+ dataset = prefix
+ specification.dataset = prefix
+ specification.reference = rest
+ end
+ --
+ if trace_cite then
+ report_cite("inject, dataset: %s, tag: %s, variant: %s, compressed",
+ specification.dataset or "-",
+ specification.reference,
+ specification.variant
+ )
+ end
+ --
+ ctx_btxsetdataset(dataset)
+ --
+ citevariants[variant](specification) -- we always fall back on default
+ end
+
+ local function btxhandlenocite(specification)
+ local dataset = specification.dataset or v_default
+ local reference = specification.reference
+ if not reference or reference == "" then
+ return
+ end
+ --
+ local markentry = specification.markentry ~= false
+ local internal = specification.internal or ""
+ --
+ local prefix, rest = lpegmatch(prefixsplitter,reference)
+ if rest then
+ dataset = prefix
+ reference = rest
+ end
+ --
+ if trace_cite then
+ report_cite("mark, dataset: %s, tags: %s",dataset or "-",reference)
+ end
+ --
+ local reference = publications.parenttag(dataset,reference)
+ --
+ local found, todo, list = findallused(dataset,reference,internal)
+ --
+ tobemarked = markentry and todo
+ if found and tobemarked then
+ flushmarked(dataset,list)
+ btxflushmarked() -- here (could also be done in caller)
+ end
+ end
+
+ implement {
+ name = "btxhandlecite",
+ actions = btxhandlecite,
+ arguments = {
+ {
+ { "dataset" },
+ { "reference" },
+ { "markentry", "boolean" },
+ { "variant" },
+ { "sorttype" },
+ { "compress" },
+ { "authorconversion" },
+ { "author" },
+ { "lefttext" },
+ { "righttext" },
+ { "before" },
+ { "after" },
+ }
+ }
+ }
+
+ implement {
+ name = "btxhandlenocite",
+ actions = btxhandlenocite,
+ arguments = {
+ {
+ { "dataset" },
+ { "reference" },
+ { "markentry", "boolean" },
+ }
+ }
+ }
+
+ -- sorter
+
+ local keysorter = function(a,b)
+ local ak = a.sortkey
+ local bk = b.sortkey
+ if ak == bk then
+ local as = a.suffix -- numeric
+ local bs = b.suffix -- numeric
+ if as and bs then
+ return (as or 0) < (bs or 0)
+ else
+ return false
+ end
+ else
+ return ak < bk
+ end
+ end
+
+ local revsorter = function(a,b)
+ return keysorter(b,a)
+ end
+
+ local function compresslist(source,specification)
+ if specification.sorttype == v_normal then
+ sort(source,keysorter)
+ elseif specification.sorttype == v_reverse then
+ sort(source,revsorter)
+ end
+ if specification and specification.compress == v_yes and specification.numeric then
+ local first, last, firstr, lastr
+ local target, noftarget, tags = { }, 0, { }
+ local oldvalue = nil
+ local function flushrange()
+ noftarget = noftarget + 1
+ if last > first + 1 then
+ target[noftarget] = {
+ first = firstr,
+ last = lastr,
+ tags = tags,
+ }
+ else
+ target[noftarget] = firstr
+ if last > first then
+ noftarget = noftarget + 1
+ target[noftarget] = lastr
+ end
+ end
+ tags = { }
+ end
+ for i=1,#source do
+ local entry = source[i]
+ local current = entry.sortkey -- so we need a sortkey !
+ if entry.suffix then
+ if not first then
+ first, last, firstr, lastr = current, current, entry, entry
+ else
+ flushrange()
+ first, last, firstr, lastr = current, current, entry, entry
+ end
+ else
+ if not first then
+ first, last, firstr, lastr = current, current, entry, entry
+ elseif current == last + 1 then
+ last, lastr = current, entry
+ else
+ flushrange()
+ first, last, firstr, lastr = current, current, entry, entry
+ end
+ end
+ tags[#tags+1] = entry.tag
+ end
+ if first and last then
+ flushrange()
+ end
+ return target
+ else
+ local target, noftarget = { }, 0
+ for i=1,#source do
+ local entry = source[i]
+ noftarget = noftarget + 1
+ target[noftarget] = {
+ first = entry,
+ tags = { entry.tag },
+ }
+ end
+ return target
+ end
+ end
+
+ -- local source = {
+ -- { tag = "one", internal = 1, value = "foo", page = 1 },
+ -- { tag = "two", internal = 2, value = "bar", page = 2 },
+ -- { tag = "three", internal = 3, value = "gnu", page = 3 },
+ -- }
+ --
+ -- local target = compresslist(source)
+
+ local numberonly = R("09")^1 / tonumber + P(1)^0
+ local f_missing = formatters["<%s>"]
+
+ -- maybe also sparse (e.g. pages)
+
+ -- a bit redundant access to datasets
+
+ local function processcite(presets,specification)
+ --
+ if specification then
+ setmetatableindex(specification,presets)
+ else
+ specification = presets
+ end
+ --
+ local dataset = specification.dataset
+ local reference = specification.reference
+ local internal = specification.internal
+ local setup = specification.variant
+ local compress = specification.compress
+ local sorttype = specification.sorttype
+ local getter = specification.getter
+ local setter = specification.setter
+ local compressor = specification.compressor
+ --
+ local reference = publications.parenttag(dataset,reference)
+ --
+ local found, todo, list = findallused(dataset,reference,internal)
+ tobemarked = specification.markentry and todo
+ --
+ if not found or #found == 0 then
+ report("no entry %a found in dataset %a",reference,dataset)
+ elseif not setup then
+ report("invalid reference for %a",reference)
+ else
+ if trace_cite then
+ report("processing reference %a",reference)
+ end
+ local source = { }
+ local luadata = datasets[dataset].luadata
+ for i=1,#found do
+ local entry = found[i]
+-- inspect(entry)
+ local tag = entry.userdata.btxref
+ local ldata = luadata[tag]
+ local data = {
+ internal = entry.references.internal,
+ language = ldata.language,
+ dataset = dataset,
+ tag = tag,
+ -- combis = entry.userdata.btxcom,
+ -- luadata = ldata,
+ }
+ setter(data,dataset,tag,entry)
+ if type(data) == "table" then
+ source[#source+1] = data
+ else
+ report("error in cite rendering %a",setup or "?")
+ end
+ end
+
+ local lefttext = specification.lefttext
+ local righttext = specification.righttext
+ local before = specification.before
+ local after = specification.after
+
+ if lefttext and lefttext ~= "" then lefttext = settings_to_array(lefttext) end
+ if righttext and righttext ~= "" then righttext = settings_to_array(righttext) end
+ if before and before ~= "" then before = settings_to_array(before) end
+ if after and after ~= "" then after = settings_to_array(after) end
+
+ local function flush(i,n,entry,last)
+ local tag = entry.tag
+ local currentcitation = markcite(dataset,tag)
+ --
+ ctx_btxstartcite()
+ ctx_btxsettag(tag)
+ ctx_btxsetcategory(entry.category or "unknown")
+ --
+ if lefttext then local text = lefttext [i] ; if text and text ~= "" then ctx_btxsetlefttext (text) end end
+ if righttext then local text = righttext[i] ; if text and text ~= "" then ctx_btxsetrighttext(text) end end
+ if before then local text = before [i] ; if text and text ~= "" then ctx_btxsetbefore (text) end end
+ if after then local text = after [i] ; if text and text ~= "" then ctx_btxsetafter (text) end end
+ --
+ ctx_btxsetbacklink(currentcitation)
+ local bl = listtocite[currentcitation]
+ if bl then
+ -- we refer to a coming list entry
+ ctx_btxsetinternal(bl.references.internal or "")
+ else
+ -- we refer to a previous list entry
+ ctx_btxsetinternal(entry.internal or "")
+ end
+ local language = entry.language
+ if language then
+ ctx_btxsetlanguage(language)
+ end
+ -- local combis = entry.combis
+ -- if combis then
+ -- ctx_btxsetcombis(combis)
+ -- end
+ if not getter(entry,last,nil,specification) then
+ ctx_btxsetfirst("") -- (f_missing(tag))
+ end
+ ctx_btxsetconcat(concatstate(i,n))
+ if trace_detail then
+ report("expanding cite setup %a",setup)
+ end
+ ctx_btxcitesetup(setup)
+ ctx_btxstopcite()
+ end
+ if sorttype == v_normal or sorttype == v_reverse then
+ local target = (compressor or compresslist)(source,specification)
+ local nofcollected = #target
+ if nofcollected == 0 then
+ local nofcollected = #source
+ if nofcollected == 0 then
+ unknowncite(reference)
+ else
+ for i=1,nofcollected do
+ flush(i,nofcollected,source[i])
+ end
+ end
+ else
+ for i=1,nofcollected do
+ local entry = target[i]
+ local first = entry.first
+ if first then
+ flush(i,nofcollected,first,entry.last)
+ else
+ flush(i,nofcollected,entry)
+ end
+ end
+ end
+ else
+ local nofcollected = #source
+ if nofcollected == 0 then
+ unknowncite(reference)
+ else
+ for i=1,nofcollected do
+ flush(i,nofcollected,source[i])
+ end
+ end
+ end
+ end
+ if tobemarked then
+ flushmarked(dataset,list)
+ btxflushmarked() -- here (could also be done in caller)
+ end
+ end
+
+ --
+
+ local function simplegetter(first,last,field,specification)
+ local value = first[field]
+ if value then
+ ctx_btxsetfirst(value)
+ if last then
+ ctx_btxsetsecond(last[field])
+ end
+ return true
+ end
+ end
+
+ local setters = setmetatableindex({},function(t,k)
+ local v = function(data,dataset,tag,entry)
+ local value = getcasted(dataset,tag,k)
+ data.value = value -- not really needed
+ data[k] = value
+ data.sortkey = value
+ data.sortfld = k
+ end
+ t[k] = v
+ return v
+ end)
+
+ local getters = setmetatableindex({},function(t,k)
+ local v = function(first,last,_,specification)
+ return simplegetter(first,last,k,specification) -- maybe _ or k
+ end
+ t[k] = v
+ return v
+ end)
+
+ setmetatableindex(citevariants,function(t,k)
+ local p = defaultvariant or "default"
+ local v = rawget(t,p)
+ report_cite("variant %a falls back on %a setter and getter with setup %a",k,p,k)
+ t[k] = v
+ return v
+ end)
+
+ function citevariants.default(presets)
+ local variant = presets.variant
+ processcite(presets,{
+ setup = variant,
+ setter = setters[variant],
+ getter = getters[variant],
+ })
+ end
+
+ -- category
+
+ do
+
+ local function setter(data,dataset,tag,entry)
+ data.category = getfield(dataset,tag,"category")
+ end
+
+ local function getter(first,last,_,specification)
+ return simplegetter(first,last,"category",specification)
+ end
+
+ function citevariants.category(presets)
+ processcite(presets,{
+ setter = setter,
+ getter = getter,
+ })
+ end
+
+ end
+
+
+ -- entry (we could provide a generic one)
+
+ do
+
+ local function setter(data,dataset,tag,entry)
+ -- nothing
+ end
+
+ local function getter(first,last,_,specification) -- last not used
+ ctx_btxsetfirst(first.tag)
+ end
+
+ function citevariants.entry(presets)
+ processcite(presets,{
+ compress = false,
+ setter = setter,
+ getter = getter,
+ })
+ end
+
+ end
+
+ -- short
+
+ do
+
+ local function setter(data,dataset,tag,entry)
+ local short = getdetail(dataset,tag,"shorthash")
+ local suffix = getdetail(dataset,tag,"shortsuffix")
+ data.short = short
+ data.sortkey = short
+ data.suffix = suffix
+ end
+
+ local function getter(first,last,_,specification) -- last not used
+ local short = first.short
+ if short then
+ local suffix = first.suffix
+ ctx_btxsetfirst(short)
+ if suffix then
+ ctx_btxsetsuffix(suffix) -- watch out: third
+ end
+ return true
+ end
+ end
+
+ function citevariants.short(presets)
+ processcite(presets,{
+ setter = setter,
+ getter = getter,
+ })
+ end
+
+ end
+
+ -- pages (no compress)
+
+ do
+
+ local function setter(data,dataset,tag,entry)
+ data.pages = getcasted(dataset,tag,"pages")
+ end
+
+ local function getter(first,last,_,specification)
+ local pages = first.pages
+ if pages then
+ if type(pages) == "table" then
+ ctx_btxsetfirst(pages[1])
+ ctx_btxsetsecond(pages[2])
+ else
+ ctx_btxsetfirst(pages)
+ end
+ return true
+ end
+ end
+
+ function citevariants.page(presets)
+ processcite(presets,{
+ setter = setter,
+ getter = getter,
+ })
+ end
+
+ end
+
+ -- num
+
+ do
+
+ local function setter(data,dataset,tag,entry)
+ local entries = entry.entries
+ local text = entries and entries.text or "?"
+ data.num = text
+ data.sortkey = tonumber(text) or text
+ end
+
+ local function getter(first,last,tag,specification)
+ return simplegetter(first,last,"num",specification)
+ end
+
+ function citevariants.num(presets)
+ processcite(presets,{
+ numeric = true,
+ setter = setter,
+ getter = getter,
+ })
+ end
+
+ citevariants.textnum = citevariants.num -- should not be needed
+
+ end
+
+ -- year
+
+ do
+
+ local function setter(data,dataset,tag,entry)
+ local year = getfield (dataset,tag,"year")
+ local suffix = getdetail(dataset,tag,"authorsuffix")
+ data.year = year
+ data.suffix = suffix
+ data.sortkey = tonumber(year) or 9999
+ end
+
+ local function getter(first,last,_,specification)
+ return simplegetter(first,last,"year",specification)
+ end
+
+ function citevariants.year(presets)
+ processcite(presets,{
+ numeric = true,
+ setter = setter,
+ getter = getter,
+ })
+ end
+
+ end
+
+ -- index
+
+ do
+
+ local function setter(data,dataset,tag,entry)
+ local index = getfield(dataset,tag,"index")
+ data.index = index
+ data.sortkey = index
+ end
+
+ local function getter(first,last,_,specification)
+ return simplegetter(first,last,"index",specification)
+ end
+
+ function citevariants.index(presets)
+ processcite(presets,{
+ setter = setter,
+ getter = getter,
+ numeric = true,
+ })
+ end
+
+ end
+
+ -- tag
+
+ do
+
+ local function setter(data,dataset,tag,entry)
+ data.tag = tag
+ data.sortkey = tag
+ end
+
+ local function getter(first,last,_,specification)
+ return simplegetter(first,last,"tag",specification)
+ end
+
+ function citevariants.tag(presets)
+ return processcite(presets,{
+ setter = setter,
+ getter = getter,
+ })
+ end
+
+ end
+
+ -- keyword
+
+ do
+
+ local function listof(list)
+ local size = type(list) == "table" and #list or 0
+ if size > 0 then
+ return function()
+ for i=1,size do
+ ctx_btxsetfirst(list[i])
+ ctx_btxsetconcat(concatstate(i,size))
+ ctx_btxcitesetup("listelement")
+ end
+ return true
+ end
+ else
+ return "?" -- unknown
+ end
+ end
+
+ local function setter(data,dataset,tag,entry)
+ data.keywords = getcasted(dataset,tag,"keywords")
+ end
+
+ local function getter(first,last,_,specification)
+ context(listof(first.keywords))
+ end
+
+ function citevariants.keywords(presets)
+ return processcite(presets,{
+ variant = "keywords",
+ setter = setter,
+ getter = getter,
+ })
+ end
+
+ end
+
+ -- authors
+
+ do
+
+ -- is this good enough?
+
+ local keysorter = function(a,b)
+ local ak = a.authorhash
+ local bk = b.authorhash
+ if ak == bk then
+ local as = a.authorsuffix -- numeric
+ local bs = b.authorsuffix -- numeric
+ if as and bs then
+ return (as or 0) < (bs or 0)
+ else
+ return false
+ end
+ elseif ak and bk then
+ return ak < bk
+ else
+ return false
+ end
+ end
+
+ local revsorter = function(a,b)
+ return keysorter(b,a)
+ end
+
+ local currentbtxciteauthor = function()
+ context.currentbtxciteauthor()
+ return true -- needed?
+ end
+
+ local function authorcompressor(found,specification)
+ -- HERE
+ if specification.sorttype == v_normal then
+ sort(found,keysorter)
+ elseif specification.sorttype == v_reverse then
+ sort(found,revsorter)
+ end
+ local result = { }
+ local entries = { }
+ for i=1,#found do
+ local entry = found[i]
+ local author = entry.authorhash
+ if author then
+ local aentries = entries[author]
+ if aentries then
+ aentries[#aentries+1] = entry
+ else
+ entries[author] = { entry }
+ end
+ end
+ end
+ -- beware: we use tables as hash so we get a cycle when inspecting (unless we start
+ -- hashing with strings)
+ for i=1,#found do
+ local entry = found[i]
+ local author = entry.authorhash
+ if author then
+ local aentries = entries[author]
+ if not aentries then
+ result[#result+1] = entry
+ elseif aentries == true then
+ -- already done
+ else
+ result[#result+1] = entry
+ entry.entries = aentries
+ entries[author] = true
+ end
+ end
+ end
+ return result
+ end
+
+ local function authorconcat(target,key,setup)
+ ctx_btxstartsubcite(setup)
+ local nofcollected = #target
+ if nofcollected == 0 then
+ unknowncite(tag)
+ else
+ for i=1,nofcollected do
+ local entry = target[i]
+ local first = entry.first
+ local tag = entry.tag
+ local currentcitation = markcite(entry.dataset,tag)
+ ctx_btxstartciteauthor()
+ ctx_btxsettag(tag)
+ ctx_btxsetbacklink(currentcitation)
+ local bl = listtocite[currentcitation]
+ ctx_btxsetinternal(bl and bl.references.internal or "")
+ if first then
+ ctx_btxsetfirst(first[key] or "") -- f_missing(first.tag))
+ local suffix = entry.suffix
+ local last = entry.last
+ local value = last and last[key]
+ if value then
+ ctx_btxsetsecond(value)
+ end
+ if suffix then
+ ctx_btxsetsuffix(suffix)
+ end
+ else
+ local suffix = entry.suffix
+ local value = entry[key] or "" -- f_missing(tag)
+ ctx_btxsetfirst(value)
+ if suffix then
+ ctx_btxsetsuffix(suffix)
+ end
+ end
+ ctx_btxsetconcat(concatstate(i,nofcollected))
+ if trace_detail then
+ report("expanding %a cite setup %a","multiple author",setup)
+ end
+ ctx_btxsubcitesetup(setup)
+ ctx_btxstopciteauthor()
+ end
+ end
+ ctx_btxstopsubcite()
+ end
+
+ local function authorsingle(entry,key,setup)
+ ctx_btxstartsubcite(setup)
+ ctx_btxstartciteauthor()
+ local tag = entry.tag
+ ctx_btxsettag(tag)
+ -- local currentcitation = markcite(entry.dataset,tag)
+ -- ctx_btxsetbacklink(currentcitation)
+ -- local bl = listtocite[currentcitation]
+ -- ctx_btxsetinternal(bl and bl.references.internal or "")
+ ctx_btxsetfirst(entry[key] or "") -- f_missing(tag)
+ if suffix then
+ ctx_btxsetsuffix(entry.suffix)
+ end
+ if trace_detail then
+ report("expanding %a cite setup %a","single author",setup)
+ end
+ ctx_btxcitesetup(setup)
+ ctx_btxstopciteauthor()
+ ctx_btxstopsubcite()
+ end
+
+ local partialinteractive = false
+
+ local function authorgetter(first,last,key,specification) -- only first
+ -- ctx_btxsetfirst(first.author) -- unformatted
+ -- ctx_btxsetfirst(currentbtxciteauthor) -- formatter (much slower)
+ if first.type == "author" then
+ ctx_btxsetfirst(currentbtxciteauthor) -- formatter (much slower)
+ else
+ ctx_btxsetfirst(first.author) -- unformatted
+ end
+ local entries = first.entries
+ -- alternatively we can use a concat with one ... so that we can only make the
+ -- year interactive, as with the concat
+ if partialinteractive and not entries then
+ entries = { first }
+ end
+ if entries then
+ -- happens with year
+ local c = compresslist(entries,specification)
+ local f = function() authorconcat(c,key,specification.setup or "author") return true end -- indeed return true?
+ ctx_btxsetcount(#c)
+ ctx_btxsetsecond(f)
+ elseif first then
+ -- happens with num
+ local f = function() authorsingle(first,key,specification.setup or "author") return true end -- indeed return true?
+ ctx_btxsetcount(0)
+ ctx_btxsetsecond(f)
+ end
+ return true
+ end
+
+ -- author
+
+ local function setter(data,dataset,tag,entry)
+ data.author, data.field, data.type = getcasted(dataset,tag,"author")
+ data.sortkey = text and lpegmatch(numberonly,text)
+ data.authorhash = getdetail(dataset,tag,"authorhash") -- todo let getcasted return
+ end
+
+ local function getter(first,last,_,specification)
+ if first.type == "author" then
+ ctx_btxsetfirst(currentbtxciteauthor) -- formatter (much slower)
+ else
+ ctx_btxsetfirst(first.author) -- unformatted
+ end
+ return true
+ end
+
+ function citevariants.author(presets)
+ processcite(presets,{
+ variant = "author",
+ setup = "author",
+ setter = setter,
+ getter = getter,
+ compressor = authorcompressor,
+ })
+ end
+
+ -- authornum
+
+ local function setter(data,dataset,tag,entry)
+ local entries = entry.entries
+ local text = entries and entries.text or "?"
+ data.author, data.field, data.type = getcasted(dataset,tag,"author")
+ data.authorhash = getdetail(dataset,tag,"authorhash") -- todo let getcasted return
+ data.num = text
+ data.sortkey = text and lpegmatch(numberonly,text)
+ end
+
+ local function getter(first,last,_,specification)
+ authorgetter(first,last,"num",specification)
+ return true
+ end
+
+ function citevariants.authornum(presets)
+ processcite(presets,{
+ variant = "authornum",
+ setup = "author:num",
+ numeric = true,
+ setter = setter,
+ getter = getter,
+ compressor = authorcompressor,
+ })
+ end
+
+ -- authoryear | authoryears
+
+ local function setter(data,dataset,tag,entry)
+ data.author, data.field, data.type = getcasted(dataset,tag,"author")
+ data.authorhash = getdetail(dataset,tag,"authorhash") -- todo let getcasted return
+ local year = getfield (dataset,tag,"year")
+ local suffix = getdetail(dataset,tag,"authorsuffix")
+ data.year = year
+ data.suffix = suffix
+ data.sortkey = tonumber(year) or 9999
+ end
+
+ local function getter(first,last,_,specification)
+ authorgetter(first,last,"year",specification)
+ return true
+ end
+
+ function citevariants.authoryear(presets)
+ processcite(presets,{
+ variant = "authoryear",
+ setup = "author:year",
+ numeric = true,
+ setter = setter,
+ getter = getter,
+ compressor = authorcompressor,
+ })
+ end
+
+ local function getter(first,last,_,specification)
+ authorgetter(first,last,"year",specification)
+ return true
+ end
+
+ function citevariants.authoryears(presets)
+ processcite(presets,{
+ variant = "authoryears",
+ setup = "author:years",
+ numeric = true,
+ setter = setter,
+ getter = getter,
+ compressor = authorcompressor,
+ })
+ end
+
+ end
+
+end
+
+-- List variants
+
+do
+
+ local listvariants = { }
+ publications.listvariants = listvariants
+
+ local function btxlistvariant(dataset,block,tag,variant,listindex)
+ local action = listvariants[variant] or listvariants.default
+ if action then
+ action(dataset,block,tag,variant,tonumber(listindex) or 0)
+ end
+ end
+
+ implement {
+ name = "btxlistvariant",
+ actions = btxlistvariant,
+ arguments = { "string", "string", "string", "string", "string" } -- not integer here
+ }
+
+ function listvariants.default(dataset,block,tag,variant)
+ ctx_btxsetfirst("?")
+ if trace_detail then
+ report("expanding %a list setup %a","default",variant)
+ end
+ ctx_btxnumberingsetup("default")
+ end
+
+ function listvariants.num(dataset,block,tag,variant,listindex)
+ ctx_btxsetfirst(listindex)
+ if trace_detail then
+ report("expanding %a list setup %a","num",variant)
+ end
+ ctx_btxnumberingsetup(variant or "num")
+ end
+
+ -- listvariants[v_yes] = listvariants.num
+
+ function listvariants.index(dataset,block,tag,variant,listindex)
+ local index = getdetail(dataset,tag,"index")
+ ctx_btxsetfirst(index or "?")
+ if trace_detail then
+ report("expanding %a list setup %a","index",variant)
+ end
+ ctx_btxnumberingsetup(variant or "index")
+ end
+
+ function listvariants.tag(dataset,block,tag,variant,listindex)
+ ctx_btxsetfirst(tag)
+ if trace_detail then
+ report("expanding %a list setup %a","tag",variant)
+ end
+ ctx_btxnumberingsetup(variant or "tag")
+ end
+
+ function listvariants.short(dataset,block,tag,variant,listindex)
+ local short = getdetail(dataset,tag,"shorthash")
+ local suffix = getdetail(dataset,tag,"shortsuffix")
+ if short then
+ ctx_btxsetfirst(short)
+ end
+ if suffix then
+ ctx_btxsetsuffix(suffix)
+ end
+ if trace_detail then
+ report("expanding %a list setup %a","short",variant)
+ end
+ ctx_btxnumberingsetup(variant or "short")
+ end
+
+ function listvariants.page(dataset,block,tag,variant,listindex)
+ local rendering = renderings[dataset]
+ local specification = rendering.list[listindex]
+ for i=3,#specification do
+ local backlink = tonumber(specification[i])
+ if backlink then
+ local citation = citetolist[backlink]
+ if citation then
+ local references = citation.references
+ if references then
+ local internal = references.internal
+ local realpage = references.realpage
+ if internal and realpage then
+ ctx_btxsetconcat(i-2)
+ ctx_btxsetfirst(realpage)
+ ctx_btxsetsecond(backlink)
+ if trace_detail then
+ report("expanding %a list setup %a","page",variant)
+ end
+ ctx_btxlistsetup(variant)
+ end
+ end
+ end
+ end
+ end
+ end
+
+end
+
+-- a helper
+
+do
+
+ -- local context = context
+ -- local lpegmatch = lpeg.match
+ local splitter = lpeg.tsplitat(":")
+
+ interfaces.implement {
+ name = "checkinterfacechain",
+ arguments = { "string", "string" },
+ actions = function(str,command)
+ local chain = lpegmatch(splitter,str)
+ if #chain > 0 then
+ local command = context[command]
+ local parent = ""
+ local child = chain[1]
+ command(child,parent)
+ for i=2,#chain do
+ parent = child
+ child = child .. ":" .. chain[i]
+ command(child,parent)
+ end
+ end
+ end
+ }
+
+end
diff --git a/tex/context/base/publ-ini.mkiv b/tex/context/base/publ-ini.mkiv
new file mode 100644
index 000000000..233734bb6
--- /dev/null
+++ b/tex/context/base/publ-ini.mkiv
@@ -0,0 +1,1813 @@
+%D \module
+%D [ file=publ-ini,
+%D version=2013.05.12,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=Initialization,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% TODO: s! vs v! for default and neutral key/values
+% todo: too many refs in list
+
+% todo: no need for all these %'s
+
+% todo: tagging
+% todo: we cannot use 'default' as this wipes metadata names (maybe no longer do that)
+% todo: \v!cite => \s!cite
+% todo: interface with (ml)bibtex (export -> call -> import)
+% todo: check if 'all' etc are ok ... either use list or use other criterium
+% todo: \the\everysetupbtxciteplacement probably too often
+
+% \definecolor[btx:field] [darkred]
+% \definecolor[btx:crossref][darkblue]
+% \definecolor[btx:key] [darkgreen]
+% \definecolor[btx:todo] [darkyellow]
+
+%D We operate on several axis:
+%D
+%D \startitemize[packed]
+%D \startitem we can have several databases (or combinations) \stopitem
+%D \startitem we can add entries to them if needed (coded in tex) \stopitem
+%D \startitem we can have several lists each using one of the databases \stopitem
+%D \startitem we can render each list or citation independently \stopitem
+%D \stopitemize
+%D
+%D We assume that the rendering of a list entry is consistent in a document,
+%D although one can redefine properties if needed. Adding more granularity would
+%D complicate the user interface beyond comprehension.
+
+\writestatus{loading}{ConTeXt Publication Support / Initialization}
+
+\registerctxluafile{publ-dat}{1.001}
+\registerctxluafile{publ-ini}{1.001}
+\registerctxluafile{publ-sor}{1.001}
+\registerctxluafile{publ-aut}{1.001}
+\registerctxluafile{publ-usr}{1.001}
+\registerctxluafile{publ-oth}{1.001} % this could become an option
+\registerctxluafile{publ-fnd}{1.001} % new method (for the moment only local)
+\registerctxluafile{publ-jrn}{1.001}
+\registerctxluafile{publ-reg}{1.001}
+
+\unprotect
+
+\startcontextdefinitioncode
+
+\def\s!btx {btx}
+
+\def\v!btxcite {btxcite}
+\def\v!btxlist {btxlist}
+\def\v!btxrendering {btxrendering}
+
+\def\s!btxset {btxset}
+\def\s!btxref {btxref}
+\def\s!btxint {btxint}
+\def\s!btxbck {btxbck}
+\def\s!btxltx {btxltx}
+\def\s!btxrtx {btxrtx}
+\def\s!btxatx {btxatx}
+\def\s!btxbtx {btxbtx}
+\def\s!btxspc {btxspc}
+\def\s!btxlst {btxlst}
+\def\s!btxcom {btxcom}
+
+\definelabelclass[btxlabel][2]
+
+% It is not that trivial to come up with a proper organization of setup
+% and control commands for publications. This is because we have complex
+% inline as well as extensive list rendering. The rules are partially
+% driven by somewhat archaic bibtex specifications and evolving journal
+% (or field) specific demands. The logic in the standards is often so
+% complex that it looks like manual rendering is assumed. But, we want to
+% automate the process as much as possible.
+%
+% Another complication is that in manuals we want to demonstrate different
+% properties of the implementation and therefore we need a way to handle
+% independent standards, databases, etc. This has resulted in the following
+% organization:
+%
+% - general setup (rather minimal)
+% - list setup (rendering)
+% - cite setup
+% - dataset setup
+%
+% The rendering is mostly driven by setups. In there we can call for fields
+% in the database but also for virtual fields or combinations.
+
+% The main rendering style (standard driven).
+
+%D We assume that a specification is global or used grouped. It doesn't make much sense
+%D to split between cite and list here as it only complicates matters (timing) and is
+%D not clear either.
+
+\let\currentbtxspecification\empty
+
+\unexpanded\def\startbtxrenderingdefinitions[#1]%
+ {\unprotect
+ \pushmacro\currentbtxspecification
+ \edef\currentbtxspecification{#1}}
+
+\unexpanded\def\stopbtxrenderingdefinitions
+ {\popmacro\currentbtxspecification
+ \protect}
+
+\unexpanded\def\loadbtxdefinitionfile [#1]{\clf_btxloaddefinitionfile {#1}}
+\unexpanded\def\loadbtxreplacementfile[#1]{\clf_btxloadreplacementfile{#1}}
+
+\unexpanded\def\publ_specification_push#1%
+ {\pushmacro\currentbtxspecification
+ \pushmacro\currentbtxspecificationfallback
+ \edef\currentbtxspecification{#1}%
+ \edef\currentbtxspecificationfallback{\namedbtxparameter\currentbtxspecification\c!default}%
+ \ifx\currentbtxspecificationfallback\currentbtxspecification
+ \let\currentbtxspecificationfallback\empty
+ \fi
+ \clf_btxsetspecification{\currentbtxspecification}}
+
+\unexpanded\def\publ_specification_pop
+ {\popmacro\currentbtxspecificationfallback
+ \popmacro\currentbtxspecification
+ \clf_btxsetspecification{\currentbtxspecification}}
+
+\unexpanded\def\publ_specification_set#1% beware: is global
+ {\edef\currentbtxspecification{#1}%
+ \edef\currentbtxspecificationfallback{\namedbtxparameter\currentbtxspecification\c!default}%
+ \ifx\currentbtxspecificationfallback\currentbtxspecification
+ \let\currentbtxspecificationfallback\empty
+ \fi
+ % has to be done explicitly: \loadbtxdefinitionfile[\currentbtxspecification]%
+ \ifx\currentbtxspecification\empty
+ % we set default at the end
+ \else
+ \clf_btxsetspecification{\currentbtxspecification}%
+ \fi}% todo: ,true == also load
+
+\installcorenamespace {btx}
+
+\installswitchcommandhandler \??btx {btx} \??btx
+
+% because we have lots of setups we provide a checker for sloppy users
+
+\unexpanded\def\btx_check_chain#1#2#3%
+ {\doifelsesomething{#3}
+ {\writestatus{btx #1}{defining\space"#2"\space as\space descendant\space of\space"#3"}% we're in definition regime (no space)
+ \definebtx[#2][#3]}
+ {\writestatus{btx #1}{defining\space"#2"}%
+ \definebtx[#2]}}
+
+% \unexpanded\def\btxcheckdefine#1#2{\doifelsecommandhandler\??btx{#1}\donothing{\btx_check_chain{define}{#1}{#2}}}
+% \unexpanded\def\btxchecksetup #1#2{\doifelsecommandhandler\??btx{#1}\donothing{\btx_check_chain {setup}{#1}{#2}}}
+
+\unexpanded\def\btxcheckdefine#1{\doifelsecommandhandler\??btx{#1}\gobbleoneargument{\btx_check_chain{define}{#1}}} % {#2}
+\unexpanded\def\btxchecksetup #1{\doifelsecommandhandler\??btx{#1}\gobbleoneargument{\btx_check_chain {setup}{#1}}} % {#2}
+
+% fpr the moment experimental:
+
+\unexpanded\def\btxenableautodefine
+ {\prependtoks
+ \clf_checkinterfacechain{\currentbtx}{btxcheckdefine}%
+ \to \everydefinebtx
+ \prependtoks
+ \ifnum\btxsetupmode=\doingrootsetupnamed
+ \clf_checkinterfacechain{\currentbtx}{btxchecksetup}%
+ \fi
+ \to \everysetupbtx
+ \let\btxenableautodefine\relax}
+
+\appendtoks
+ \ifnum\btxsetupmode=\doingrootsetuproot
+ \publ_specification_set{\btxparameter\c!specification}%
+ \else\ifnum\btxsetupmode=\doingrootsetupnamed
+ \doifelsecommandhandler\??btx\currentbtx
+ {\publ_specification_set{\btxparameter\c!specification}}%
+ {}% maybe a warning
+ \fi\fi
+\to \everysetupbtx
+
+\appendtoks
+ \ifnum\btxsetupmode=\doingrootsetuproot
+ \edef\currentbtxdataset{\clf_btxsetdataset{\btxparameter\c!dataset}{\currentbtxdataset}}%
+ \fi
+\to \everysetupbtx
+
+\appendtoks
+ \publ_specification_set{\btxparameter\c!specification}%
+\to \everyjob
+
+\unexpanded\def\startusingbtxspecification[#1]%
+ {\publ_specification_push{#1}}
+
+\let\stopusingbtxspecification\publ_specification_pop
+
+% \setupbtxlist[alternative=paragraph,width=auto,distance=\emwidth]
+% \setupbtxlist[alternative=paragraph,width=auto,distance=\emwidth,margin=2em] % useless
+% \setupbtxlist[alternative=paragraph,width=fit,distance=\emwidth]
+% \setupbtxlist[alternative=paragraph,width=fit,distance=\emwidth,margin=2em]
+
+% here starts the bib stuff
+
+\installcorenamespace {btxdataset}
+\installcorenamespace {btxrendering}
+\installcorenamespace {btxregister}
+\installcorenamespace {btxcommand}
+\installcorenamespace {btxrenderingdefinition}
+
+\installcommandhandler \??btxdataset {btxdataset} \??btxdataset
+\installcommandhandler \??btxregister {btxregister} \??btxregister
+\installcommandhandler \??btxrendering {btxrendering} \??btxrendering
+
+\let\currentbtxcitealternative\empty
+
+\let\currentbtxspecificationfallback\empty
+
+\unexpanded\def\setbtxparameterset#1#2%
+ {\edef\currentbtx
+ {\ifcsname\??btx\currentbtxspecification:#1:#2:\s!parent\endcsname
+ \currentbtxspecification:%
+ \else\ifx\currentbtxspecificationfallback\empty
+ \else\ifcsname\??btx\currentbtxspecificationfallback:#1:#2:\s!parent\endcsname
+ \currentbtxspecificationfallback:%
+ \fi\fi\fi#1:#2}}
+
+\unexpanded\def\setbtxparametersetroot#1%
+ {\edef\currentbtx
+ {\ifcsname\??btx\currentbtxspecification:#1:\s!parent\endcsname
+ \currentbtxspecification:#1%
+ \else\ifx\currentbtxspecificationfallback\empty
+ \else\ifcsname\??btx\currentbtxspecificationfallback:#1:\s!parent\endcsname
+ \currentbtxspecificationfallback:#1%
+ \fi\fi\fi}}
+
+\unexpanded\def\setbtxrendering
+ {\edef\currentbtxrendering
+ {\ifcsname\??btx\currentbtxspecification:\s!parent\endcsname
+ \currentbtxspecification
+ \else\ifx\currentbtxspecificationfallback\empty
+ \else\ifcsname\??btx\currentbtxspecificationfallback:\s!parent\endcsname
+ \currentbtxspecificationfallback
+ \fi\fi\fi}}
+
+\unexpanded\def\setbtxlist % maybe simplify this one, always list=rendering?
+ {\edef\currentbtxlist
+ {\ifcsname\??btx\currentbtxrendering:\s!parent\endcsname
+ \currentbtxrendering
+ \else\ifcsname\??btx\currentbtxspecification:\s!parent\endcsname
+ \currentbtxspecification
+ \else\ifx\currentbtxspecificationfallback\empty
+ \else\ifcsname\??btx\currentbtxspecificationfallback:\s!parent\endcsname
+ \currentbtxspecificationfallback
+ \fi\fi\fi\fi}%
+ \edef\currentlist{\s!btx:\currentbtxlist}}
+
+\unexpanded\def\usebtxdataset
+ {\begingroup
+ \dotripleempty\publ_use_dataset}
+
+\def\publ_use_dataset[#1][#2][#3]%
+ {\getdummyparameters[\c!specification=\currentbtxspecification,#3]%
+ \ifsecondargument
+ \clf_btxusedataset
+ specification {\dummyparameter\c!specification}%
+ dataset {#1}%
+ filename {#2}%
+ \relax
+ \else\iffirstargument
+ \clf_btxusedataset
+ specification {\dummyparameter\c!specification}%
+ dataset {\v!default}%
+ filename {#1}%
+ \relax
+ \fi\fi
+ \endgroup}
+
+\definebtxdataset
+ [\v!default]
+% [\c!language=] % nothing set so use current
+
+% \usebtxdataset
+% [default]
+% [mybibs.bib]
+
+\let\startpublication\relax
+\let\stoppublication \relax
+
+\unexpanded\def\startpublication
+ {\dodoubleempty\publ_set_publication}
+
+\def\publ_set_publication[#1][#2]%
+ {\begingroup
+ \catcode\commentasciicode\othercatcode
+ \ifsecondargument
+ \expandafter\publ_set_publication_indeed
+ \else\iffirstargument
+ \doubleexpandafter\publ_set_publication_checked
+ \else
+ \doubleexpandafter\publ_set_publication_default
+ \fi\fi{#1}{#2}}
+
+\def\publ_set_publication_default#1#2%
+ {\publ_set_publication_indeed\v!default{#1}}
+
+\def\publ_set_publication_checked#1#2%
+ {\doifelseassignment{#1}
+ {\publ_set_publication_indeed\v!default{#1}}
+ {\publ_set_publication_indeed{#1}{}}}
+
+\def\publ_set_publication_indeed#1#2#3\stoppublication
+ {\clf_btxaddentry{#1}{#2}{\detokenize{#3}}%
+ \endgroup
+ \ignorespaces}
+
+% commands
+
+\unexpanded\def\btxcommand#1%
+ {\ifcsname\??btxcommand#1\endcsname
+ \expandafter\publ_command_yes
+ \else
+ \expandafter\publ_command_nop
+ \fi{#1}}
+
+\let\btxcmd\btxcommand
+
+\def\publ_command_yes#1%
+ {\csname\??btxcommand#1\endcsname}
+
+\def\publ_command_nop#1%
+ {\ifcsname#1\endcsname
+ \showmessage\m!publications{10}{#1,#1}%
+ \global\expandafter\let\csname\??btxcommand#1\expandafter\endcsname\csname#1\endcsname
+ \else\ifcsname\utfupper{#1}\endcsname
+ \showmessage\m!publications{10}{#1}{\utfupper{#1}}%
+ \global\expandafter\let\csname\??btxcommand#1\expandafter\endcsname\csname\utfupper{#1}\endcsname
+ \else
+ \showmessage\m!publications{11}{#1}%
+ \setugvalue{\??btxcommand#1}{\underbar{\tttf#1}}%
+ \fi\fi
+ \publ_command_yes{#1}}
+
+\unexpanded\def\definebtxcommand#1% {body} #1..#n{body}
+ {\setuvalue{\??btxcommand\strippedcsname#1}}%
+
+% tracing
+
+\installtextracker
+ {publications.crosslinks}
+ {\let\btx_trace_list_cross\strc_references_tracer}
+ {\let\btx_trace_list_cross\gobbletwoarguments}
+
+\let\btx_trace_list_cross\gobbletwoarguments
+
+% access
+
+\let\currentbtxtag \empty
+\let\currentbtxdataset\v!default
+
+\unexpanded\def\setbtxentry[#1]% or maybe btxsetentry
+ {\edef\currentbtxtag{\clf_btxsetentry{\currentbtxdataset}{#1}}}
+
+% \let\btxsetdataset\setbtxdataset
+% \let\btxsetentry \setbtxentry
+
+% todo: no need for the currents as we can keep them at the lua end so we will have
+%
+% \btxfield : current
+% \btxspecificfield : dataset,tag,key
+
+\def\btxfield #1{\clf_btxfield {\currentbtxdataset}{\currentbtxtag}{#1}}
+\def\btxdetail #1{\clf_btxdetail {\currentbtxdataset}{\currentbtxtag}{#1}}
+\def\btxflush #1{\clf_btxflush {\currentbtxdataset}{\currentbtxtag}{#1}}
+\def\btxdirect #1{\clf_btxdirect {\currentbtxdataset}{\currentbtxtag}{#1}}
+\def\btxfieldname #1{\clf_btxfieldname {\currentbtxdataset}{\currentbtxtag}{#1}}
+\def\btxfieldtype #1{\clf_btxfieldtype {\currentbtxdataset}{\currentbtxtag}{#1}}
+\def\btxfoundname #1{\clf_btxfoundname {\currentbtxdataset}{\currentbtxtag}{#1}}
+\def\btxfoundtype #1{\clf_btxfoundtype {\currentbtxdataset}{\currentbtxtag}{#1}}
+\def\btxauthorfield#1{\clf_btxauthorfield \currentbtxauthorindex{#1}}
+\def\btxdoifelse #1{\clf_btxdoifelse {\currentbtxdataset}{\currentbtxtag}{#1}}
+\def\btxdoif #1{\clf_btxdoif {\currentbtxdataset}{\currentbtxtag}{#1}}
+\def\btxdoifnot #1{\clf_btxdoifnot {\currentbtxdataset}{\currentbtxtag}{#1}}
+
+\let\btxsetup\fastsetup
+
+%D How complex will we go? Can we assume that e.g. an apa style will not be mixed
+%D with another one? I think this assumption is okay. For manuals we might want to
+%D mix but we can work around it.
+
+%D Rendering.
+
+\unexpanded\def\btxspace {\removeunwantedspaces\space}
+\unexpanded\def\btxnobreakspace {\removeunwantedspaces\nobreakspace} % these two are
+\unexpanded\def\btxnbsp {\removeunwantedspaces\nbsp} % the same anyway
+\unexpanded\def\btxperiod {\removeunwantedspaces.\space}
+\unexpanded\def\btxcomma {\removeunwantedspaces,\space}
+\unexpanded\def\btxcommabreak {\removeunwantedspaces,\hskip\zeropoint plus .5\emwidth\relax}
+\unexpanded\def\btxcolon {\removeunwantedspaces:\space}
+\unexpanded\def\btxsemicolon {\removeunwantedspaces;\space}
+\unexpanded\def\btxlparent {\removeunwantedspaces\space(} % obsolete
+\unexpanded\def\btxrparent {\removeunwantedspaces)\space} % obsolete
+\unexpanded\def\btxleftparenthesis {\removeunwantedspaces\space(}
+\unexpanded\def\btxrightparenthesis {\removeunwantedspaces)\space}
+\unexpanded\def\btxrightparenthesisperiod{\removeunwantedspaces).\space}
+\unexpanded\def\btxrightparenthesiscomma {\removeunwantedspaces),\space}
+\unexpanded\def\btxleftbracket {\removeunwantedspaces\space[}
+\unexpanded\def\btxrightbracket {\removeunwantedspaces]\space}
+\unexpanded\def\btxrightbracketperiod {\removeunwantedspaces].\space}
+\unexpanded\def\btxrightbracketcomma {\removeunwantedspaces],\space}
+
+%D Variables:
+
+\let\currentbtxbacklink \empty \unexpanded\def\btxsetbacklink {\def\currentbtxbacklink}
+\let\currentbtxbacktrace \empty \unexpanded\def\btxsetbacktrace {\def\currentbtxbacktrace}
+\let\currentbtxcategory \empty \unexpanded\def\btxsetcategory {\def\currentbtxcategory}
+\let\currentbtxcombis \empty \unexpanded\def\btxsetcombis {\def\currentbtxcombis}
+\let\currentbtxdataset \empty \unexpanded\def\btxsetdataset {\def\currentbtxdataset}
+\let\currentbtxfirst \empty \unexpanded\def\btxsetfirst {\def\currentbtxfirst}
+\let\currentbtxsecond \empty \unexpanded\def\btxsetsecond {\def\currentbtxsecond}
+%let\currentbtxthird \empty \unexpanded\def\btxsetthird {\def\currentbtxthird}
+\let\currentbtxsuffix \empty \unexpanded\def\btxsetsuffix {\def\currentbtxsuffix}
+\let\currentbtxinternal \empty \unexpanded\def\btxsetinternal {\def\currentbtxinternal}
+\let\currentbtxlefttext \empty \unexpanded\def\btxsetlefttext {\def\currentbtxlefttext}
+\let\currentbtxrighttext \empty \unexpanded\def\btxsetrighttext {\def\currentbtxrighttext}
+\let\currentbtxbefore \empty \unexpanded\def\btxsetbefore {\def\currentbtxbefore}
+\let\currentbtxafter \empty \unexpanded\def\btxsetafter {\def\currentbtxafter}
+\let\currentbtxlanguage \empty \unexpanded\def\btxsetlanguage {\def\currentbtxlanguage}
+\let\currentbtxtag \empty \unexpanded\def\btxsettag {\def\currentbtxtag}
+\let\currentbtxnumber \empty \unexpanded\def\btxsetnumber {\def\currentbtxnumber}
+\let\currentbtxauthorvariant\v!normal \unexpanded\def\btxsetauthorvariant{\def\currentbtxauthorvariant}
+
+\let\currentbtxfirstnames \empty \unexpanded\def\btxsetfirstnames{\let\currentbtxfirstnames\currentbtxfirstnames_indeed}
+\let\currentbtxinitials \empty \unexpanded\def\btxsetinitials {\let\currentbtxinitials \currentbtxinitials_indeed }
+\let\currentbtxjuniors \empty \unexpanded\def\btxsetjuniors {\let\currentbtxjuniors \currentbtxjuniors_indeed }
+\let\currentbtxsurnames \empty \unexpanded\def\btxsetsurnames {\let\currentbtxsurnames \currentbtxsurnames_indeed }
+\let\currentbtxvons \empty \unexpanded\def\btxsetvons {\let\currentbtxvons \currentbtxvons_indeed }
+
+\newconstant\currentbtxoverflow \unexpanded\def\btxsetoverflow #1{\currentbtxoverflow #1\relax}
+\newconstant\currentbtxconcat \unexpanded\def\btxsetconcat #1{\currentbtxconcat #1\relax}
+\newconstant\currentbtxcount \unexpanded\def\btxsetcount #1{\currentbtxcount #1\relax}
+\newconstant\currentbtxauthorindex %unexpanded\def\btxsetauthorindex#1{\currentbtxauthorindex#1\relax} % passed directly
+\newconstant\currentbtxauthorcount %unexpanded\def\btxsetauthorcount#1{\currentbtxauthorcount#1\relax} % passed directly
+\newconstant\currentbtxauthorstate \unexpanded\def\btxsetauthorstate#1{\currentbtxauthorstate#1\relax}
+
+\unexpanded\def\currentbtxfirstnames_indeed{\clf_btxcurrentfirstnames\numexpr\currentbtxauthorindex\relax}
+\unexpanded\def\currentbtxinitials_indeed {\clf_btxcurrentinitials \numexpr\currentbtxauthorindex\relax}
+\unexpanded\def\currentbtxjuniors_indeed {\clf_btxcurrentjuniors \numexpr\currentbtxauthorindex\relax}
+\unexpanded\def\currentbtxsurnames_indeed {\clf_btxcurrentsurnames \numexpr\currentbtxauthorindex\relax}
+\unexpanded\def\currentbtxvons_indeed {\clf_btxcurrentvons \numexpr\currentbtxauthorindex\relax}
+
+\let\currentbtxfirstpage \empty \unexpanded\def\btxsetfirstpage #1{\def\currentbtxfirstpage{\btx_page_number{#1}}}
+\let\currentbtxlastpage \empty \unexpanded\def\btxsetlastpage #1{\def\currentbtxlastpage {\btx_page_number{#1}}}
+\let\currentbtxfirstinternal\empty \unexpanded\def\btxsetfirstinternal {\def\currentbtxfirstinternal}
+\let\currentbtxlastinternal \empty \unexpanded\def\btxsetlastinternal {\def\currentbtxlastinternal}
+
+\def\currentbtxauthorvariant{normal}
+
+\unexpanded\def\btx_reset_list % not needed as we're grouped
+ {\let\currentbtxcombis \empty
+ \let\currentbtxcategory \empty
+ \let\currentbtxinternal \empty
+ \let\currentbtxlefttext \empty
+ \let\currentbtxrighttext\empty
+ \let\currentbtxbefore \empty
+ \let\currentbtxafter \empty
+ \let\currentbtxbacklink \empty
+ \let\currentbtxbacktrace\empty
+ \let\currentbtxlanguage \empty
+ \let\currentbtxtag \empty
+ \let\currentbtxsuffix \empty
+ \let\currentbtxnumber \empty
+ \let\currentbtxdataset \empty}
+
+\unexpanded\def\btx_reset_cite % check for less .. not all resets needed when we're grouped (only subcites)
+ {\let \currentbtxfirst \empty
+ \let \currentbtxsecond \empty
+ \let \currentbtxsuffix \empty
+ \let \currentbtxinternal \empty
+ \let \currentbtxlefttext \empty
+ \let \currentbtxrighttext \empty
+ \let \currentbtxbefore \empty
+ \let \currentbtxafter \empty
+ \let \currentbtxbacklink \empty
+ \let \currentbtxbacktrace \empty % not used here
+ \let \currentbtxlanguage \empty
+ \let \currentbtxdataset \empty
+ \let \currentbtxtag \empty
+ \let \currentbtxnumber \empty
+ \setconstant\currentbtxoverflow \zerocount
+ \setconstant\currentbtxconcat \zerocount
+ \setconstant\currentbtxcount \zerocount}
+
+\unexpanded\def\btx_reset_page % probably not needed
+ {\let \currentbtxfirstpage \empty
+ \let \currentbtxlastpage \empty
+ \let \currentbtxfirstinternal\empty
+ \let \currentbtxlastinternal \empty
+ \setconstant\currentbtxoverflow \zerocount
+ \setconstant\currentbtxconcat \zerocount
+ \setconstant\currentbtxcount \zerocount}
+
+\unexpanded\def\btx_reset_numbering % probably not needed
+ {\let \currentbtxfirst \empty
+ \let \currentbtxsecond\empty
+ \let \currentbtxsuffix\empty
+ \setconstant\currentbtxconcat\zerocount}
+
+%D Pages:
+
+\unexpanded\def\btx_page_number#1%
+ {\def\currentlistindex{#1}%
+ \structurelistpagenumber}
+
+%D Language:
+
+\def\mainbtxlanguage{\currentmainlanguage}
+
+\unexpanded\def\btx_check_language
+ {\let\mainbtxlanguage\currentlanguage
+ \ifx\currentbtxlanguage\empty
+ \let\currentbtxlanguage\currentlanguage
+ \else
+ \btx_check_language_indeed
+ \fi}
+
+\unexpanded\def\btx_check_language_indeed
+ {\edef\currentbtxlanguage{\reallanguagetag\currentbtxlanguage}%
+ \ifx\currentbtxlanguage\empty
+ \let\currentbtxlanguage\currentlanguage
+ \else\ifx\currentbtxlanguage\currentlanguage\else
+ \setcurrentlanguage\currentmainlanguage\currentbtxlanguage
+ \fi\fi}
+
+%D Tracing
+
+\newconditional\c_btx_trace % not used yet
+
+\installtextracker
+ {btxrendering}
+ {\settrue \c_btx_trace}
+ {\setfalse\c_btx_trace}
+
+%D Rendering lists and citations.
+
+\unexpanded\def\btxtodo#1%
+ {[#1]}
+
+%D Lists:
+
+\newdimen\d_publ_number_width
+
+\ifdefined\btxblock \else \newcount\btxblock \fi \btxblock\plusone
+\ifdefined\btxcitecounter \else \newcount\btxcitecounter \fi % maybe pass this to lua
+
+\newtoks \everysetupbtxlistplacement % name will change
+\newtoks \everysetupbtxciteplacement % name will change
+
+\definelist % only used for selecting
+ [\s!btx]
+
+\setuplist
+ [\s!btx]
+ [\c!prefixstopper=:,
+ \c!state=\v!start,
+ \c!alternative=a,
+ %\c!alternative=\v!paragraph,
+ %\c!width=\v!auto,
+ %\c!distance=\emwidth,
+ \c!before=\blank,
+ \c!after=\blank]
+
+\unexpanded\def\setupbtxlist
+ {\dodoubleempty\publ_setup_list}
+
+\unexpanded\def\publ_setup_list[#1][#2]%
+ {\ifsecondargument
+ \setuplist[\s!btx:#1][#2]%
+ \else\iffirstargument
+ \setuplist[\s!btx][#1]%
+ \fi\fi}
+
+\appendtoks
+ \ifx\currentbtxrenderingparent\empty
+ \definelist
+ [\s!btx:\currentbtxrendering]%
+ [\s!btx]%
+ \else\ifx\currentbtxrenderingparent\s!btx
+ \definelist
+ [\s!btx:\currentbtxrendering]%
+ [\s!btx]%
+ \else
+ \definelist
+ [\s!btx:\currentbtxrendering]%
+ [\s!btx:\currentbtxrenderingparent]%
+ \fi\fi
+\to \everydefinebtxrendering
+
+\newconditional\c_btx_list_texts
+
+\appendtoks
+ \doifelse{\btxrenderingparameter\c!textstate}\v!start
+ \settrue\setfalse\c_btx_list_texts
+\to \everysetupbtxlistplacement
+
+\newconditional\c_btx_list_pages
+
+\appendtoks
+ \doifelse{\btxrenderingparameter\c!pagestate}\v!start
+ \settrue\setfalse\c_btx_list_pages
+\to \everysetupbtxlistplacement
+
+\unexpanded\def\btx_entry_inject_pages % for the moment only normal
+ {\dontleavehmode
+ \begingroup
+ \setbtxlist % probably already set
+ \btx_reset_page
+ \setbtxparameterset\s!list\s!page
+ \btxparameter\c!command
+ {\usebtxstyleandcolor\c!style\c!color
+ \btxparameter\c!left
+ \clf_btxflushpages{\currentbtxdataset}{\currentbtxtag}%
+ \btxparameter\c!right}%
+ \endgroup}
+
+\unexpanded\def\btxpagesetup#1% there will b eno left|right|command|style at this inner level
+ {\begingroup
+ \publ_fast_setup\plusfive\s!list\s!page
+ \endgroup
+ \btx_reset_page} % probably not needed
+
+\unexpanded\def\btxnumberingsetup#1%
+ {\begingroup
+ \setbtxparameterset{\c!list:\s!numbering}\currentbtxnumbering % brrrr \setbtxlist
+ \btxparameter\c!left
+ % \btxparameter\c!command{\publ_fast_setup\plusthree{\s!list:\s!numbering}{#1}}%
+ \publ_fast_setup\plusthree{\s!list:\s!numbering}{#1}%
+ \btxparameter\c!right
+ \endgroup
+ \btx_reset_numbering} % probably not needed
+
+% end of page stuff
+
+\unexpanded\def\btx_entry_inject
+ {\begingroup
+ \redoconvertfont % see (**) in strc-lst, this will become an configuration option
+ \edef\currentbtxcategory{\btxfield{category}}%
+ \ignorespaces
+ \ifconditional\c_btx_list_texts
+ \currentbtxbefore
+ \fi
+ \begingroup
+ \usebtxstyleandcolor\c!style\c!color
+ \ignorespaces
+ \publ_fast_setup\plusfour\s!list\currentbtxcategory
+ \removeunwantedspaces
+ \endgroup
+ \ifx\currentbtxcombis\empty \else
+ \btxrenderingparameter\c!separator
+ % maybe move this loop to lua
+ \begingroup
+ \processcommacommand[\currentbtxcombis]\btx_entry_inject_combi
+ \endgroup
+ \fi
+ \ifconditional\c_btx_list_pages
+ \btx_entry_inject_pages
+ \fi
+ \ifconditional\c_btx_list_texts
+ \currentbtxafter
+ \fi
+ \endgroup}
+
+\unexpanded\def\btxshowentryinline
+ {\dodoubleempty\btx_entry_show_inline}
+
+\unexpanded\def\btx_entry_show_inline[#1][#2]%
+ {\ifsecondargument
+ \ctxcommand{showbtxentry("#1","#2")}
+ \else\iffirstargument
+ \ctxcommand{showbtxentry("\currentbtxdataset","#1")}
+ \else
+ \ctxcommand{showbtxentry("\currentbtxdataset","\currentbtxtag")}
+ \fi\fi}
+
+\unexpanded\def\btxstartcombientry
+ {\begingroup}
+
+\unexpanded\def\btxstopcombientry
+ {\endgroup}
+
+\unexpanded\def\btxhandlecombientry
+ {\btx_reference_indeed}
+
+\def\btx_entry_inject_combi#1%
+ {\begingroup
+ \def\currentbtxtag{#1}%
+ \ignorespaces
+ \publ_fast_setup\plusfour\s!list\currentbtxcategory
+ \removeunwantedspaces
+ \endgroup}
+
+% uses reference when set
+
+% \def\btx_entry_inject_combi#1%
+% {\begingroup
+% \def\currentbtxtag{#1}%
+% \ignorespaces
+% \btxdoifelsecombiinlist\currentbtxdataset\currentbtxtag
+% {\clf_btxflushlistcombi{\currentbtxdataset}{\currentbtxtag}}
+% {\publ_fast_setup\plusfour\s!list\currentbtxcategory}%
+% \removeunwantedspaces
+% \endgroup}
+
+\unexpanded\def\completebtxrendering{\dodoubleempty\publ_place_list_complete}
+\unexpanded\def\placebtxrendering {\dodoubleempty\publ_place_list_standard}
+
+\let\completelistofpublications\completebtxrendering
+\let\placelistofpublications \placebtxrendering
+
+\newtoks\everybtxlistrendering
+
+\appendtoks
+ \setbtxlist
+ %
+ \edef\currentbtxcriterium{\btxrenderingparameter\c!criterium}% \v!cite will become \s!cite
+ \ifx\currentbtxcriterium\empty
+ \let\currentbtxcriterium\v!previous
+ \else\ifx\currentbtxcriterium\v!cite
+ \let\currentbtxcriterium\v!here
+ \fi\fi
+ %
+ \iflocation
+ \letinteractionparameter\c!style\empty
+ \fi
+\to \everybtxlistrendering
+
+\def\nofbtxlistentries {0}
+\def\currentbtxlistentry{0}
+\def\currentbtxlistindex{0} % only for internal use (points back to big list)
+
+\newconditional\c_publ_prefixed
+
+\unexpanded\def\btxsetnoflistentries #1{\edef\nofbtxlistentries {#1}}
+\unexpanded\def\btxsetcurrentlistentry#1{\edef\currentbtxlistentry{#1}}
+\unexpanded\def\btxsetcurrentlistindex#1{\edef\currentbtxlistindex{#1}}
+
+\unexpanded\def\btxdoifelsesameaspreviouschecked#1#2% #1 == always | doublesided
+ {\clf_btxdoifelsesameasprevious
+ {\currentbtxdataset}%
+ \currentbtxlistentry%
+ {#2}%
+ \c_btx_list_reference
+ {#1}}
+
+\unexpanded\def\btxdoifelsesameasprevious
+ {\btxdoifelsesameaspreviouschecked\v!doublesided}
+
+\unexpanded\def\btxdoifelsecombiinlist#1#2%
+ {\clf_btxdoifelsecombiinlist{#1}{#2}}
+
+\let\btxdoifsameaspreviouscheckedelse\btxdoifelsesameaspreviouschecked
+\let\btxdoifsameaspreviouselse \btxdoifelsesameasprevious
+\let\btxdoifcombiinlistelse \btxdoifelsecombiinlist
+
+\def\publ_place_list_indeed#1[#2][#3]%
+ {\begingroup
+ \ifsecondargument
+ % [rendering] [settings]
+ \edef\currentbtxrendering{#2}%
+ \setupcurrentbtxrendering[#3]%
+ \edef\p_specification{\btxrenderingparameter\c!specification}%
+ \ifx\p_specification\empty\else
+ \let\currentbtxspecification\p_specification
+ \fi
+ \else\iffirstargument
+ \doifelseassignment{#2}
+ {% [settings]
+ \let\currentbtxrendering\currentbtxspecification
+ \setupcurrentbtxrendering[#2]%
+ \edef\p_specification{\btxrenderingparameter\c!specification}%
+ \ifx\p_specification\empty\else
+ \let\currentbtxspecification\p_specification
+ \let\currentbtxrendering\currentbtxspecification % tricky
+ \fi}
+ {\edef\currentbtxrendering{#2}%
+ \edef\p_specification{\btxrenderingparameter\c!specification}%
+ \ifx\p_specification\empty\else
+ \let\currentbtxspecification\p_specification
+ \fi}%
+ \else
+ \let\currentbtxrendering\currentbtxspecification
+ \fi\fi
+ \setbtxparameterset\currentbtxspecification\s!list
+ \the\everybtxlistrendering
+ \ifconditional#1\relax
+ \edef\currentbtxrenderingtitle{\btxrenderingparameter\c!title}%
+ \ifx\currentbtxrenderingtitle\empty
+ \normalexpanded{\startnamedsection[\v!chapter][\c!reference=\currentbtxrendering,\c!title={\headtext{\currentbtxrendering}}]}%
+ \else
+ \normalexpanded{\startnamedsection[\v!chapter][\c!reference=\currentbtxrendering,\c!title={\currentbtxrenderingtitle}]}%
+ \fi
+ \fi
+ \ifx\currentbtxrendering\empty
+ \setbtxrendering % hm
+ \fi
+ \btxrenderingparameter\c!before
+ \edef\currentbtxdataset{\btxrenderingparameter\c!dataset}%
+ \uselanguageparameter\btxdatasetparameter % new
+ \setbtxlist
+ \the\everystructurelist
+ \the\everysetupbtxlistplacement
+ \forgetall
+ % why not pass this with collect .. todo
+ % here we just collect items
+ \clf_btxcollectlistentries
+ names {\s!btx}%
+ criterium {\currentbtxcriterium}%
+ reference {\btxrenderingparameter\c!reference}%
+ method {\btxrenderingparameter\c!method}%
+ dataset {\currentbtxdataset}%
+ keyword {\btxrenderingparameter\c!keyword}%
+ sorttype {\btxrenderingparameter\c!sorttype}%
+ repeated {\btxrenderingparameter\c!repeat}%
+ ignored {\btxrenderingparameter\c!ignore}%
+ group {\btxrenderingparameter\c!group}%
+ \relax
+ \ifnum\nofbtxlistentries>\zerocount
+ \startpacked[\v!blank]%
+ % sorting and so
+ \clf_btxpreparelistentries{\currentbtxdataset}% could be put in collect
+ % next we analyze the width
+ \ifx\currentbtxnumbering\empty \else
+ \edef\p_width{\listparameter\c!width}%
+ \ifx\p_width\v!auto
+ \setbox\scratchbox\vbox \bgroup
+ \settrialtypesetting
+ \clf_btxfetchlistentries{\currentbtxdataset}%
+ \egroup
+ \d_publ_number_width\wd\scratchbox
+ \letlistparameter\c!width\d_publ_number_width
+ \fi
+ \fi
+ \doifelse{\listparameter\c!prefix}\v!yes\settrue\setfalse\c_publ_prefixed
+ % this actually typesets them, we loop here as otherwise the whole
+ % bunch gets flushed at once
+ \dorecurse\nofbtxlistentries
+ {\let\currentbtxlistentry\recurselevel
+ \clf_btxflushlistentry{\currentbtxdataset}\currentbtxlistentry\relax}%
+ \stoppacked
+ \fi
+ \btxrenderingparameter\c!after
+ \global\advance\btxblock\plusone
+ \ifconditional#1\relax
+ \stopnamedsection
+ \fi
+ \endgroup}
+
+\def\publ_place_list_complete{\publ_place_list_indeed\conditionaltrue}
+\def\publ_place_list_standard{\publ_place_list_indeed\conditionalfalse}
+
+\def\currentbtxblock{\number\btxblock}
+
+% called at the lua end, for determining the width
+
+\unexpanded\def\btxchecklistentry
+ {\begingroup
+ % todo, switch to font
+ \hbox{\btx_reference_checked}%
+ \par
+ \endgroup}
+
+% called at the lua end, the real rendering
+
+% we could have a yes and no where o nils the btx_reference_indeed ... saves a check there
+
+\installstructurelistprocessor{\s!btx}
+ {\let\currentlistentrynumber \btx_reference_indeed
+ \let\currentlistentrytitle \btx_entry_indeed
+ \let\currentlistentrypagenumber\btx_page_indeed
+ \strc_lists_apply_renderingsetup}
+
+\def\btx_entry_indeed
+ {\btx_list_reference_inject
+ \btx_entry_inject}
+
+\def\btx_page_indeed
+ {}
+
+\unexpanded\def\btxhandlelistentry
+ {\strc_lists_entry_process}
+
+\unexpanded\def\btxstartlistentry % maybe pass i
+ {\begingroup
+ \global\advance\c_btx_list_reference\plusone}
+
+\unexpanded\def\btxstoplistentry
+ {\iftrialtypesetting
+ \global\advance\c_btx_list_reference\minusone
+ \fi
+ \endgroup}
+
+\newtoks\everybtxlistentry
+
+\unexpanded\def\btxlistsetup#1% used for the reference in the list
+ {\the\everybtxlistentry
+ \everybtxlistentry\emptytoks % so only once per entry to be sure
+ \publ_fast_setup\plusfour\s!list{#1}}
+
+\appendtoks
+ \btx_check_language
+\to \everybtxlistentry
+
+\unexpanded\def\btx_reference_indeed
+ {\begingroup
+ % redundantm will go away:
+ \setbtxparameterset{\c!list:\s!numbering}\currentbtxnumbering
+ %
+ \ifx\currentbtxnumbering\empty
+ % nothing
+ \else\ifx\currentbtxnumbering\v!no
+ % nothing
+ \else
+ \usebtxstyleandcolor\c!style\c!color % new, needed?
+ \ifconditional\c_publ_prefixed\btxlistprefixednumber\fi
+ \clf_btxlistvariant % some can go
+ {\currentbtxdataset}%
+ {\currentbtxblock}%
+ {\currentbtxtag}%
+ {\currentbtxnumbering}%
+ {\currentbtxnumber}%
+ \relax
+ \fi\fi
+ \endgroup}
+
+\unexpanded\def\btxlistprefixednumber % hack but alan needs it
+ {\clf_listprefixednumber
+ {\currentlist}%
+ \currentbtxlistindex
+ {%
+ prefix {\listparameter\c!prefix}%
+ separatorset {\listparameter\c!prefixseparatorset}%
+ conversionset {\listparameter\c!prefixconversionset}%
+ starter {\listparameter\c!prefixstarter}%
+ stopper {\listparameter\c!prefixstopper}%
+ set {\listparameter\c!prefixset}%
+ segments {\listparameter\c!prefixsegments}%
+ connector {\listparameter\c!prefixconnector}%
+ }%
+ \relax}
+
+\unexpanded\def\btx_reference_checked
+ {\dontleavehmode\hbox\bgroup
+ \btx_reference_indeed
+ \egroup}
+
+\newcount\c_btx_list_reference
+
+\unexpanded\def\btx_list_reference_inject
+ {\dontleavehmode\begingroup % no box
+ \iftrialtypesetting\else
+ \btx_list_reference_inject_now
+ \fi
+ % \btx_reference_indeed % else double entry in list
+ \endgroup}
+
+\def\btx_list_reference_inject_now
+ {\btx_trace_list_cross\empty\currentbtxbacktrace
+ \strc_references_direct_full_user
+ {\ifx\currentbtxdataset\v!default\else\s!btxset=\currentbtxdataset,\fi%
+ \s!btxref=\currentbtxtag,%
+ \s!btxspc=\currentbtxspecification,%
+ \s!btxlst=\number\c_btx_list_reference,% check if needed
+ %\ifx\currentbtxcombis\empty\else\s!btxcom={\currentbtxcombis},\fi%
+ \ifx\currentbtxbefore\empty\else\s!btxbtx={\currentbtxbefore},\fi%
+ \ifx\currentbtxafter \empty\else\s!btxatx={\currentbtxafter },\fi%
+ \ifx\currentbtxbacklink\currentbtxbacktrace\s!btxint=\currentbtxbacklink\else\s!btxbck=\currentbtxbacktrace\fi}%
+ {\s!btx::\v!list::\number\c_btx_list_reference}%
+ {\currentbtxnumber}}
+
+\newconditional\c_btx_cite_reference_injected
+
+\unexpanded\def\btx_cite_reference_inject
+ {\ifconditional\c_btx_cite_reference_injected
+ \else
+ \dontleavehmode
+ \iftrialtypesetting \else
+ \ifx\currentbtxbacklink\empty
+ % can be made empty when combining author / year
+ \else
+ \btx_cite_reference_inject_indeed
+ \settrue\c_btx_cite_reference_injected
+ \fi
+ \fi
+ \fi}
+
+\newtoks\t_btx_reference_inject
+
+\def\btx_cite_reference_inject_indeed
+ {\btx_trace_list_cross\currentbtxbacklink\empty
+ \the\t_btx_reference_inject
+ \strc_lists_inject_direct % todo: make like \btx_list_reference_inject_now with { }
+ [\s!btx]%
+ [\c!type=\s!btx]% \c!location=\v!none
+ [\ifx\currentbtxdataset\v!default\else\s!btxset=\currentbtxdataset,\fi%
+ \s!btxref=\currentbtxtag,%
+ %\ifx\currentbtxcombis\empty\else\s!btxcom={\currentbtxcombis},\fi%
+ \ifx\currentbtxbefore\empty\else\s!btxbtx={\currentbtxbefore},\fi%
+ \ifx\currentbtxafter \empty\else\s!btxatx={\currentbtxafter },\fi%
+ \s!btxint=\number\currentbtxbacklink
+ \ifx\currentbtxciteuservariables\empty\else,\currentbtxciteuservariables\fi]}
+
+\def\currentbtxuservariable #1{\clf_btxuservariable {\currentbtxdataset}{#1}}
+\def\btxdoifelseuservariable#1{\clf_btxdoifelseuservariable{\currentbtxdataset}{#1}}
+
+\let\btxdoifuservariableelse\btxdoifelseuservariable
+
+\let\btxcitereference\btx_cite_reference_inject
+
+\let\currentbtxnumbering\empty
+
+\appendtoks
+ \edef\currentbtxnumbering{\btxrenderingparameter\c!numbering}%
+ \ifx\currentbtxnumbering\v!yes
+ \def\currentbtxnumbering{num}% convenient alias
+ \letbtxrenderingparameter\c!numbering\currentbtxnumbering
+ \letlistparameter\c!headnumber\v!always
+ \else\ifx\currentbtxnumbering\v!no
+ \letlistparameter\c!headnumber\v!no
+ \let\currentbtxnumbering\empty
+ % \letlistparameter\c!textcommand\outdented % needed? we can use titlealign
+ \letlistparameter\c!symbol \v!none
+ \letlistparameter\c!aligntitle \v!yes
+ \letlistparameter\c!numbercommand\firstofoneargument % for the moment, no doubling needed
+ \else
+ \letlistparameter\c!headnumber\v!always
+ \fi\fi
+ \let\currentlistmethod\s!btx
+\to \everysetupbtxlistplacement
+
+\unexpanded\def\btxremapauthor
+ {\dodoubleargument\btx_remap_author}
+
+\def\btx_remap_author[#1][#2]%
+ {\clf_btxremapauthor{#1}{#2}}
+
+\unexpanded\def\btxflushauthor
+ {\doifelsenextoptionalcs\btx_flush_author_yes\btx_flush_author_nop}
+
+\unexpanded\def\btxflushsuffix
+ {\ifx\currentbtxsuffix\empty
+ % nothing
+ \else
+ \characters{\currentbtxsuffix}% todo : rendering specific converter
+ \fi}
+
+\def\btx_flush_author_yes[#1]{\btx_flush_author{#1}}
+\def\btx_flush_author_nop {\btx_flush_author{\btxparameter\c!authorconversion}}
+
+\unexpanded\def\btx_flush_author#1#2%
+ {\begingroup
+ \edef\currentbtxfield{#2}%
+ \setbtxparameterset\s!list\currentbtxfield
+% \let\currentbtxlistvariant\currentbtxfield
+ \clf_btxauthor
+ {\currentbtxdataset}%
+ {\currentbtxtag}%
+ {\currentbtxfield}%
+ {%
+ combiner {#1}%
+ kind {list}%
+ etallimit {\btxparameter\c!etallimit}%
+ etaldisplay {\btxparameter\c!etaldisplay}%
+ etaloption {\btxparameter\c!etaloption}%
+ symbol {\btxparameter{\c!stopper:initials}}%
+ }%
+ \relax
+ \endgroup}
+
+% yes or no: maybe just \flushauthor{...}{...}
+
+\unexpanded\def\btxflushauthorname {\btx_flush_author{name}} % #1
+\unexpanded\def\btxflushauthornormal {\btx_flush_author{normal}} % #1
+\unexpanded\def\btxflushauthornormalshort {\btx_flush_author{normalshort}} % #1
+\unexpanded\def\btxflushauthorinverted {\btx_flush_author{inverted}} % #1
+\unexpanded\def\btxflushauthorinvertedshort{\btx_flush_author{invertedshort}} % #1
+
+\unexpanded\def\currentbtxciteauthor % always author
+ {\begingroup
+ \setbtxparameterset\s!cite\s!author
+ \clf_btxauthor
+ {\currentbtxdataset}%
+ {\currentbtxtag}%
+ {\s!author}%
+ {%
+ combiner {\btxparameter\c!authorconversion}%
+ kind {cite}%
+ etallimit {\btxparameter\c!etallimit}%
+ etaldisplay {\btxparameter\c!etaldisplay}%
+ etaloption {\btxparameter\c!etaloption}%
+ symbol {\btxparameter{\c!stopper:initials}}%
+ }%
+ \relax
+ \endgroup}
+
+\unexpanded\def\btxstartauthor#1#2#3% a state > 0 signals that some authors can clash
+ {\begingroup
+ \currentbtxauthorindex#1\relax
+ \currentbtxauthorcount#2\relax
+ \currentbtxauthorstate#3\relax}
+
+\unexpanded\def\btxstopauthor
+ {\endgroup}
+
+\unexpanded\def\btxciteauthorsetup#1{\fastsetup{\s!btx:\s!cite:\s!author:#1}}
+\unexpanded\def\btxlistauthorsetup#1{\fastsetup{\s!btx:\s!list:\s!author:#1}}
+
+% \btxflushauthor{author}
+% \btxflushauthor{editor}
+%
+% \btxflushauthor[name]{author}
+% \btxflushauthor[normal]{author}
+% \btxflushauthor[normalshort]{author}
+% \btxflushauthor[inverted]{author}
+% \btxflushauthor[invertedshort]{author}
+
+% \btxflushauthor{author}
+% \btxflushauthor{editor}
+%
+% \btxflushauthor[name]{author}
+% \btxflushauthor[normal]{author}
+% \btxflushauthor[normalshort]{author}
+% \btxflushauthor[inverted]{author}
+% \btxflushauthor[invertedshort]{author}
+
+% Interaction
+
+\newconditional\btxinteractive
+\newconditional\btx_interactive
+
+% of maybe modes?
+
+\appendtoks
+ \iflocation
+ \edef\p_interaction{\btxparameter\c!interaction}%
+ \ifx\p_interaction\v!stop
+ \setfalse\btxinteractive
+ \else
+ \settrue\btxinteractive
+ \ifx\p_interaction\v!all
+ \settrue\btx_interactive
+ \else
+ \setfalse\btx_interactive
+ \fi
+ \fi
+ \else
+ \setfalse\btxinteractive
+ \setfalse\btx_interactive
+ \fi
+\to \everysetupbtxlistplacement
+
+\appendtoks
+ \iflocation
+ \edef\p_interaction{\btxparameter\c!interaction}%
+ \ifx\p_interaction\v!stop
+ \setfalse\btxinteractive
+ \else
+ \settrue\btxinteractive
+ \fi
+ \else
+ \setfalse\btxinteractive
+ \fi
+\to \everysetupbtxciteplacement
+
+%D When a publication is cited, we need to signal that somehow. This is done with the
+%D following (not user) command. We could tag without injecting a node but this way
+%D we also store the location, which makes it possible to ask local lists.
+
+%D \macros{cite,nocite,citation,nocitation,usecitation}
+%D
+%D The inline \type {\cite} command creates a (often) short reference to a publication
+%D and for historic reasons uses a strict test for brackets. This means, at least
+%D in the default case that spaces are ignored in the argument scanner. The \type
+%D {\citation} commands is more liberal but also gobbles following spaces. Both
+%D commands insert a reference as well as a visual clue.
+%D
+%D The \type {no} commands all do the same (they are synonyms): they make sure that
+%D a reference is injected but show nothing. However, they do create a node so best
+%D attach them to some text in order to avoid spacing interferences. A slightly
+%D less efficient alternative is \type {\cite[none][tag]}.
+
+% [tags]
+% [settings|variant][tags]
+% [base::tags]
+% [settings|variant][base::tags]
+
+% these need to be sort of protected:
+
+\let\p_publ_cite_before \empty
+\let\p_publ_cite_after \empty
+\let\p_publ_cite_lefttext \empty
+\let\p_publ_cite_righttext\empty
+
+\let\currentbtxciteuservariables\empty
+
+\unexpanded\def\btxhybridcite % so one can alias the old
+ {\dontleavehmode
+ \begingroup
+ \strictdoifelsenextoptional\publ_cite_tags_options\publ_cite_tags_indeed}
+
+\unexpanded\def\publ_cite_tags_options[#1]%
+ {\strictdoifelsenextoptional{\publ_cite_tags_options_indeed{#1}}{\publ_cite_tags_indeed{#1}}}
+
+\unexpanded\def\publ_cite_tags_indeed#1%
+ {\letinteractionparameter\c!style\empty
+ \setbtxparametersetroot\s!cite % we need to get the default
+ \edef\currentbtxcitealternative{\btxparameter\c!alternative}%
+ \setbtxparameterset\s!cite\currentbtxcitealternative
+ \edef\currentbtxcitetag{#1}%
+ \the\everysetupbtxciteplacement
+ \publ_cite_variant
+ \endgroup}
+
+\unexpanded\def\publ_cite_tags_options_indeed#1%
+ {\doifelseassignment{#1}\publ_cite_tags_settings_indeed\publ_cite_tags_variants_indeed{#1}}
+
+\def\publ_cite_tags_settings_indeed#1[#2]%
+ {\letinteractionparameter\c!style\empty
+ %\letinteractionparameter\c!color\empty
+ \letdummyparameter\c!reference \empty
+ \letdummyparameter\c!alternative\empty
+ \letdummyparameter\c!before \empty
+ \letdummyparameter\c!after \empty
+ \letdummyparameter\c!lefttext \empty
+ \letdummyparameter\c!righttext \empty
+ \getdummyparameters[#1]%
+ \edef\p_reference{\dummyparameter\c!reference}%
+ \ifx\p_reference\empty
+ \edef\currentbtxcitetag{#2}%
+ \else
+ \let\currentbtxcitetag\p_reference
+ \edef\currentbtxciteuservariables{#2}%
+ \fi
+ \edef\p_alternative{\dummyparameter\c!alternative}%
+ \ifx\p_alternative\empty
+ \setbtxparametersetroot\s!cite
+ \edef\currentbtxcitealternative{\btxparameter\c!alternative}%
+ \else
+ \let\currentbtxcitealternative\p_alternative
+ \fi
+ \setbtxparameterset\s!cite\currentbtxcitealternative
+ \setupcurrentbtx[#1]%
+ %
+ \edef\p_publ_cite_before {\dummyparameter\c!before}%
+ \edef\p_publ_cite_after {\dummyparameter\c!after}%
+ \edef\p_publ_cite_lefttext {\dummyparameter\c!lefttext}%
+ \edef\p_publ_cite_righttext{\dummyparameter\c!righttext}%
+ %
+ \the\everysetupbtxciteplacement
+ \publ_cite_variant
+ \endgroup}
+
+\def\publ_cite_tags_variants_indeed#1[#2]%
+ {\letinteractionparameter\c!style\empty
+ \edef\currentbtxcitealternative{#1}%
+ \edef\currentbtxcitetag{#2}%
+ \setbtxparameterset\s!cite\currentbtxcitealternative
+ \the\everysetupbtxciteplacement
+ \publ_cite_variant
+ \endgroup}
+
+\newconditional\btxcitecompress
+
+\let\currentbtxreference\empty
+
+\def\publ_cite_variant
+ {\begingroup
+ \publ_cite_handle_variant_indeed[\currentbtxcitetag]}
+
+\unexpanded\def\publ_cite_handle_variant#1%
+ {\begingroup
+ \edef\currentbtxcitealternative{#1}%
+ \setbtxparameterset\s!cite\currentbtxcitealternative
+ \the\everysetupbtxciteplacement
+ \dosingleargument\publ_cite_handle_variant_indeed}
+
+\unexpanded\def\publ_cite_handle_variant_blob
+ {\clf_btxhandlecite
+ dataset {\currentbtxdataset}%
+ reference {\currentbtxreference}%
+ markentry \iftrialtypesetting\s!false\else\s!true\fi\space
+ variant {\currentbtxcitealternative}%
+ sorttype {\btxparameter\c!sorttype}%
+ compress {\btxparameter\c!compress}%
+ author {\btxparameter\c!author}%
+ authorconversion {\c!authorconversion}%
+ lefttext {\p_publ_cite_lefttext}%
+ righttext {\p_publ_cite_righttext}%
+ before {\p_publ_cite_before}%
+ after {\p_publ_cite_after}%
+ \relax
+ \clf_btxflushmarked} % maybe: \iftrialtypesetting\else ... \fi
+
+\let\dobtxcitevariantblob\publ_cite_handle_variant_blob % command can use it via lua
+
+\def\publ_cite_handle_variant_indeed[#1]%
+ {\letbtxparameter\c!alternative\currentbtxcitealternative
+ \edef\currentbtxreference{#1}%
+ \usebtxstyleandcolor\c!style\c!color
+ \uselanguageparameter\btxdatasetparameter % new
+ \btxparameter\c!left
+ \btxparameter\c!command{\dobtxcitevariantblob}% {\publ_cite_handle_variant_blob}%
+ \btxparameter\c!right
+ \endgroup}
+
+\unexpanded\def\btxcitation
+ {\dontleavehmode
+ \begingroup
+ \dodoubleempty\publ_citation}
+
+\def\publ_citation[#1][#2]% could be made more efficient but not now
+ {\ifsecondargument
+ \publ_cite_tags_options_indeed{#1}[#2]%
+ \else
+ \publ_cite_tags_indeed{#1}%
+ \fi}
+
+\unexpanded\def\btxnocitation
+ {\dosingleempty\publ_cite_no}
+
+\unexpanded\def\publ_cite_no[#1]%
+ {\iftrialtypesetting \else
+ \begingroup
+ \edef\currentbtxreference{#1}%
+ \clf_btxhandlenocite
+ dataset {\currentbtxdataset}%
+ reference {\currentbtxreference}%
+ markentry true%
+ \relax
+ % \clf_btxflushmarked
+ \endgroup
+ \fi}
+
+\unexpanded\def\btxmissing#1%
+ {\dontleavehmode{\tttf<#1>}}
+
+%D Compatibility:
+
+\let\cite \btxcitation
+\let\citation \btxcitation
+\let\nocite \btxnocitation
+\let\nocitation\btxnocitation
+
+\unexpanded\def\cite {\doifelsenextoptionalcs\btxcitation \btxdirectcite}
+\unexpanded\def\nocite{\doifelsenextoptionalcs\btxnocitation\btxdirectnocite}
+
+\unexpanded\def\btxdirectcite #1{\btxcitation [#1]\relax} % no optional arguments
+\unexpanded\def\btxdirectnocite#1{\btxnocitation[#1]\relax} % no optional arguments
+
+%D Setup helpers, beware, we need to wrap this .. now we need to know
+%D how setups are implemented.
+
+\setvalue{\??setup:\s!btx:\s!unknown}#1{\inframed{\tttf#1}}
+
+\def\publ_fast_setup_yes#1#2%
+ {\csname\??setup:\s!btx:%
+ \ifcsname\??setup:\s!btx:\currentbtxspecification:#1:#2\endcsname
+ \currentbtxspecification:#1:#2%
+ \else\ifcsname\??setup:\s!btx:\currentbtxspecificationfallback:#1:#2\endcsname
+ \currentbtxspecificationfallback:#1:#2%
+ \else\ifcsname\??setup:\s!btx:#1:#2\endcsname
+ #1:#2%
+ \else\ifcsname\??setup:\s!btx:\currentbtxspecification:#1:\s!unknown\endcsname
+ \currentbtxspecification:#1:\s!unknown
+ \else\ifcsname\??setup:\s!btx:\currentbtxspecificationfallback:#1:\s!unknown\endcsname
+ \currentbtxspecificationfallback:#1:\s!unknown
+ \else
+ #1:\s!unknown
+ \fi\fi\fi\fi\fi
+ \endcsname{#2}}
+
+\def\publ_fast_setup_nop#1#2%
+ {\csname\??setup:\s!btx:%
+ \ifcsname\??setup:\s!btx:\currentbtxspecification:#1:#2\endcsname
+ \currentbtxspecification:#1:#2%
+ \else\ifcsname\??setup:\s!btx:#1:#2\endcsname
+ #1:#2%
+ \else\ifcsname\??setup:\s!btx:\currentbtxspecification:#1:\s!unknown\endcsname
+ \currentbtxspecification:#1:\s!unknown
+ \else
+ #1:\s!unknown
+ \fi\fi\fi
+ \endcsname{#2}}
+
+\newconstant\btxsetuptype
+
+% 0 = unknown darkred
+% 1 = cite darkblue
+% 2 = subcite darkgreen
+% 3 = numbering darkorange
+% 4 = list darkcyan
+% 5 = page darkmagenta
+% 6 = unknown darkred
+
+\unexpanded\def\publ_fast_btx_setup_chain_inbetween
+ {\allowbreak->\allowbreak}
+
+\unexpanded\def\publ_fast_btx_setup_chain_yes#1#2%
+ {\dontleavehmode\begingroup
+ \infofont
+ \ifcase\btxsetuptype\darkred\or\darkblue\or\darkgreen\or\darkcyan\or\darkmagenta\else\darkred\fi
+ [%
+ \currentbtxspecification :#1:#2\ifcsname\??setup:\s!btx:\currentbtxspecification :#1:#2\endcsname\else
+ \publ_fast_btx_setup_chain_inbetween
+ \currentbtxspecificationfallback:#1:#2\ifcsname\??setup:\s!btx:\currentbtxspecificationfallback :#1:#2\endcsname\else
+ \publ_fast_btx_setup_chain_inbetween
+ #1:#2\ifcsname\??setup:\s!btx :#1:#2\endcsname\else
+ \publ_fast_btx_setup_chain_inbetween
+ \currentbtxspecification :#1:\s!unknown\ifcsname\??setup:\s!btx:\currentbtxspecification :#1:\s!unknown\endcsname\else
+ \publ_fast_btx_setup_chain_inbetween
+ \currentbtxspecificationfallback:#1:\s!unknown\ifcsname\??setup:\s!btx:\currentbtxspecificationfallback:#1:\s!unknown\endcsname\else
+ \publ_fast_btx_setup_chain_inbetween
+ unset\fi\fi\fi\fi\fi
+ \space @\space
+ \currentbtx
+ ]%
+ \endgroup}
+
+\unexpanded\def\publ_fast_btx_setup_chain_nop#1#2%
+ {\dontleavehmode\begingroup
+ \infofont
+ \darkred
+ [%
+ \currentbtxspecification :#1:#2\ifcsname\??setup:\s!btx:\currentbtxspecification :#1:#2\endcsname\else
+ \publ_fast_btx_setup_chain_inbetween
+ #1:#2\ifcsname\??setup:\s!btx :#1:#2\endcsname\else
+ \publ_fast_btx_setup_chain_inbetween
+ \currentbtxspecification :#1:\s!unknown\ifcsname\??setup:\s!btx:\currentbtxspecification :#1:\s!unknown\endcsname\else
+ \publ_fast_btx_setup_chain_inbetween
+ unset\fi\fi\fi
+ \space @\space
+ \currentbtx
+ ]%
+ \endgroup}
+
+\unexpanded\def\publ_fast_btx_setup_normal#1%
+ {\btxsetuptype#1\relax
+ \ifx\currentbtxspecificationfallback\empty
+ \expandafter\publ_fast_setup_nop
+ \else
+ \expandafter\publ_fast_setup_yes
+ \fi}
+
+\unexpanded\def\publ_fast_btx_setup_visual#1#2#3%
+ {\btxsetuptype#1\relax
+ \ifx\currentbtxspecificationfallback\empty
+ \expandafter\publ_fast_btx_setup_chain_nop
+ \else
+ \expandafter\publ_fast_btx_setup_chain_yes
+ \fi{#2}{#3}%
+ \ifx\currentbtxspecificationfallback\empty
+ \expandafter\publ_fast_setup_nop
+ \else
+ \expandafter\publ_fast_setup_yes
+ \fi{#2}{#3}}
+
+\installtextracker
+ {publications.setups}
+ {\let\publ_fast_setup\publ_fast_btx_setup_visual}
+ {\let\publ_fast_setup\publ_fast_btx_setup_normal}
+
+\let\publ_fast_setup\publ_fast_btx_setup_normal
+
+%D Cite helpers:
+
+\newtoks\everybtxciteentry
+
+\prependtoks
+ \setfalse\c_btx_cite_reference_injected
+\to \everybtxciteentry
+
+\unexpanded\def\btxcitesetup#1%
+ {\the\everybtxciteentry
+ \everybtxciteentry\emptytoks % tricky maybe not when subcites
+ \publ_fast_setup\plusone\s!cite{#1}} % no \btxcitereset as we loose dataset and such
+
+\unexpanded\def\btxsubcitesetup#1%
+ {\the\everybtxciteentry
+ \everybtxciteentry\emptytoks % tricky maybe not when subcites
+ \publ_fast_setup\plustwo\s!cite{#1}} % no \btxcitereset as we loose dataset and such
+
+\appendtoks
+ \btx_check_language
+\to \everybtxciteentry
+
+\unexpanded\def\btxstartsubcite#1%
+ {\begingroup
+ \btx_reset_cite % todo: limited set
+ \def\currentbtxcitealternative{#1}%
+ \setbtxparameterset\s!cite\currentbtxcitealternative
+ \usebtxstyleandcolor\c!style\c!color
+ \btxparameter\c!left
+ \relax}
+
+\unexpanded\def\btxstopsubcite
+ {\relax
+ \btxparameter\c!right
+ \endgroup}
+
+\unexpanded\def\btxstartciterendering[#1]%
+ {\begingroup
+ \edef\currentbtxcitealternative{#1}%
+ \setbtxparameterset\s!cite\currentbtxcitealternative
+ \usebtxstyleandcolor\c!style\c!color
+ \btxparameter\c!left
+ \relax}
+
+\unexpanded\def\btxstopciterendering
+ {\relax
+ \btxparameter\c!right
+ \endgroup}
+
+\let\btxstartcite \begingroup
+\let\btxstopcite \endgroup
+\let\btxstartciteauthor\begingroup
+\let\btxstopciteauthor \endgroup
+
+%D Whatever helpers:
+
+\unexpanded\def\btxsingularplural#1{\clf_btxsingularorplural{\currentbtxdataset}{\currentbtxtag}{#1}}
+\unexpanded\def\btxoneorrange #1{\clf_btxoneorrange {\currentbtxdataset}{\currentbtxtag}{#1}}
+\unexpanded\def\btxfirstofrange #1{\clf_btxfirstofrange {\currentbtxdataset}{\currentbtxtag}{#1}}
+
+\let\btxsingularorplural\btxsingularplural
+
+\stopcontextdefinitioncode
+
+%D Journals
+
+\unexpanded\def\btxloadjournallist [#1]{\clf_btxloadjournallist{#1}}
+\unexpanded\def\btxsavejournallist [#1]{\clf_btxsavejournallist{#1}}
+\unexpanded\def\btxaddjournal [#1][#2]{\clf_btxaddjournal{#1}{#2}}
+ \def\btxexpandedjournal #1{\clf_btxexpandedjournal{#1}} % \unexpanded ?
+ \def\btxabbreviatedjournal#1{\clf_btxabbreviatedjournal{#1}} % \unexpanded ?
+
+% \installcorenamespace{btxjournal}
+%
+% \letvalue{\s!btxjournal\v!long }\btxexpandedjournal
+% \letvalue{\s!btxjournal\v!short }\btxabbreviatedjournal
+% \letvalue{\s!btxjournal\v!normal}\firstofoneargument
+%
+% \unexpanded\def\btxcheckedjournal
+% {\expandnamespaceparameter\s!btxjournal\btxrenderingparameter\c!journalconversion}
+
+% \btxloadjournallist[list.txt] % Foo Journal of Bars = FBJ \n ....
+%
+% \btxexpandedjournal[fbj]
+% \btxabbreviatedjournal[foo journal of bars]
+
+%D Saving data:
+
+\unexpanded\def\savebtxdataset
+ {\dotripleargument\publ_save_dataset}
+
+\unexpanded\def\publ_save_dataset[#1][#2][#3]%
+ {\ifthirdargument
+ \publ_save_dataset_indeed[#1][#2][#3]%
+ \else\ifsecondargument
+ \doifelseassignment{#2}%
+ {\publ_save_dataset_indeed[\s!default][#1][#2]}%
+ {\publ_save_dataset_indeed[#1][#2][]}%
+ \else\iffirstargument
+ \doifelseassignment{#1}%
+ {\publ_save_dataset_indeed[\s!default][\jobname-saved.bib][#1]}%
+ {\publ_save_dataset_indeed[\s!default][#1][]}%
+ % \else
+ % % bad news
+ \fi\fi\fi}
+
+\unexpanded\def\publ_save_dataset_indeed[#1][#2][#3]%
+ {\begingroup
+ \getdummyparameters
+ [\c!criterium=\v!all,%
+ \c!type=,%
+ \c!dataset=#1,%
+ \c!file=#2,%
+ #3]% % all or used
+ \clf_btxsavedataset
+ dataset {\dummyparameter\c!dataset}%
+ filename {\dummyparameter\c!file}%
+ filetype {\dummyparameter\c!type}%
+ criterium {\dummyparameter\c!criterium}%
+ \relax
+ \endgroup}
+
+% \savebtxdataset[default][e:/tmp/foo.bib]
+% \savebtxdataset[default][e:/tmp/foo.lua]
+% \savebtxdataset[default][e:/tmp/foo.xml]
+
+%D In-text entries:
+
+\unexpanded\def\placecitation{\citation[entry]} % [#1]
+
+\unexpanded\def\btxhandleciteentry
+ {\dontleavehmode
+ \begingroup
+ \def\currentbtxcitealternative{entry}%
+ \setbtxparameterset\s!cite\currentbtxcitealternative % needs checking
+ \btxcitereference
+ \btx_entry_inject
+ \endgroup}
+
+%D Registers
+
+% \setupbtxregister
+% [\c!state=\v!start,
+% \c!dataset=\v!all,
+% \c!method=\v!always]
+
+\unexpanded\def\publ_registers_set
+ {\ifx\currentbtxregister\empty \else
+ \clf_btxsetregister
+ specification {\currentbtxspecification}%
+ name {\currentbtxregister}%
+ state {\btxregisterparameter\c!state}%
+ dataset {\btxregisterparameter\c!dataset}%
+ field {\btxregisterparameter\c!field}%
+ register {\btxregisterparameter\c!register}%
+ method {\btxregisterparameter\c!method}%
+ alternative {\btxregisterparameter\c!alternative}%
+ \relax
+ \fi}
+
+\appendtoks
+ \publ_registers_set
+\to \everydefinebtxregister
+
+\appendtoks
+ \publ_registers_set
+\to \everysetupbtxregister
+
+\appendtoks
+ \normalexpanded{%
+ \defineprocessor
+ [\s!btx:r:\currentbtxregister]%
+ [\c!style=\noexpand\namedbtxregisterparameter{\currentbtxregister}\noexpand\c!style,
+ \c!color=\noexpand\namedbtxregisterparameter{\currentbtxregister}\noexpand\c!color]}%
+\to \everydefinebtxregister
+
+\appendtoks
+ \clf_btxtoregister{\currentbtxdataset}{\currentbtxtag}%
+\to \t_btx_reference_inject
+
+\unexpanded\def\btxindexedauthor#1#2#3#4#5#6% alternative von last initials first junior
+ {\begingroup
+ \def\currentbtxcitealternative{#1}%
+ \ifx\currentbtxcitealternative\empty
+ \edef\currentbtxcitealternative{invertedshort}% maybe we need some default here too?
+ \fi
+ %let\currentbtxlistvariant\currentbtxcitealternative % we inherit
+ \the\everysetupbtxciteplacement
+ \def\currentbtxvons {#2}%
+ \def\currentbtxsurnames {#3}%
+ \def\currentbtxinitials {#4}%
+ \def\currentbtxfirstnames {#5}%
+ \def\currentbtxjuniors {#6}%
+ \setbtxparameterset\s!cite\currentbtxcitealternative
+ \fastsetup{\s!btx:\s!cite:\s!author:\currentbtxcitealternative}%
+ \endgroup}
+
+\unexpanded\def\btxregisterauthor
+ {\doifelsenextoptionalcs\publ_register_author_yes\publ_register_author_nop}
+
+\def\publ_register_author_yes[#1]#2%
+ {\clf_btxauthortoregister{#1}{#2}\relax}
+
+\def\publ_register_author_nop#1%
+ {\clf_btxauthortoregister{\currentbtxdataset}{#1}\relax}
+
+
+%D We hook some setters in the definition sets:
+
+% \installdefinitionsetmember \??btx {btxspecification} \??btxcitevariant {btxcitevariant}
+% \installdefinitionsetmember \??btx {btxspecification} \??btxlistvariant {btxlistvariant}
+% \installdefinitionsetmember \??btx {btxspecification} \??btxlist {btxlist}
+% \installdefinitionsetmember \??btx {btxspecification} \??btxrendering {btxrendering}
+% \installdefinitionsetmember \??btx {btxspecification} \??btx {btx}
+
+%D And more helpers ... a never ending story these publications:
+
+% \definebtx
+% [btx:apa:list:article:title]
+% [style=bolditalic,
+% command=\WORD]
+%
+% \btxstartstyle[btx:apa:list:article:title]
+% \btxusecommand[btx:apa:list:article:title]{foo}
+% \btxstopstyle
+
+\let\savedcurrentbtx\empty
+
+\unexpanded\def\btxstartstyle[#1]%
+ {\begingroup
+ \let\savedcurrentbtx\currentbtx
+ \def\currentbtx{#1}%
+ \usebtxstyle\c!style
+ \let\currentbtx\savedcurrentbtx}
+
+\unexpanded\def\btxstartcolor[#1]%
+ {\begingroup
+ \let\savedcurrentbtx\currentbtx
+ \def\currentbtx{#1}%
+ \usebtxcolor\c!color
+ \let\currentbtx\savedcurrentbtx}
+
+\unexpanded\def\btxstartstyleandcolor[#1]%
+ {\begingroup
+ \let\savedcurrentbtx\currentbtx
+ \def\currentbtx{#1}%
+ \usebtxstyleandcolor\c!style\c!color
+ \let\currentbtx\savedcurrentbtx}
+
+\let\btxstopstyle \endgroup
+\let\btxstopcolor \endgroup
+\let\btxstopstyleandcolor\endgroup
+
+\unexpanded\def\btxusecommand[#1]#2% using #2 permits space after []
+ {\namedbtxparameter{#1}\c!command{#2}}
+
+%D Defaults:
+
+\setupbtxrendering
+ [\c!interaction=\v!start, % \v!all
+ \c!specification=\btxparameter\c!specification,
+ \c!dataset=\v!default,
+ \c!repeat=\v!no,
+ \c!continue=\v!no,
+ \c!method=\v!global,
+ % \c!setups=btx:\btxrenderingparameter\c!alternative:initialize, % not the same usage as cite !
+ \c!sorttype=\v!default,
+ \c!criterium=\v!text,
+ \c!refcommand=authoryears, % todo
+ \c!numbering=\v!yes,
+ %\c!saveinlist=\v!no, % maybe for before/after
+ \c!textstate=\v!start,
+ \c!width=\v!auto,
+ \c!separator={;\space},
+ \c!distance=1.5\emwidth]
+
+% Quite some interpunction and labels are the same of at least consistent within
+% a standard when citations and list entries are involved. We assume that each
+% standard defines its own set but it can fall back on these defaults.
+
+\setupbtx
+ [\c!interaction=\v!start,
+ \c!alternative=num, % default cite form
+ \c!inbetween=\space,
+ \c!range=\endash, % separator:range?
+ \c!compress=\v!yes, % was no?
+ \c!authorconversion=normal,
+ \c!sorttype=normal, % normal, reverse or none
+ \c!etallimit=3,
+ \c!etaldisplay=\btxparameter\c!etallimit,
+ \c!otherstext={\space et al.},
+ \c!separator:firstnames={\space},
+ \c!separator:juniors={\space},
+ \c!separator:vons={\space},
+ \c!separator:initials={\space},
+ \c!stopper:initials={.},
+ %\c!surnamesep={,\space}, % is this used anywhere?
+ \c!separator:invertedinitials={,\space},
+ \c!separator:invertedfirstnames={,\space},
+ \c!separator:names:2={,\space}, % separates multiple names
+ \c!separator:names:3=\btxparameter{\c!separator:2}, % before last name in a list
+ \c!separator:names:4=\btxparameter{\c!separator:2}, % between only two names
+ \c!separator:2={;\space}, % aka pubsep - separates multiple objects
+ \c!separator:3=\btxparameter{separator:2}, % before last object in a list
+ \c!separator:4=\btxparameter{separator:2}, % between only two objects
+ \c!pagestate=\v!stop]
+
+% Do we want these in the format? Loading them delayed is somewhat messy.
+
+\loadbtxdefinitionfile[commands]
+\loadbtxdefinitionfile[definitions]
+
+\loadbtxdefinitionfile[cite]
+\loadbtxdefinitionfile[list]
+\loadbtxdefinitionfile[page]
+\loadbtxdefinitionfile[author]
+
+% we assume that the users sets up the right specification and if not ... well,
+% hope for the best that something shows up and consult the manual otherwise
+
+\unexpanded\def\usebtxdefinitions[#1]%
+ {\loadbtxdefinitionfile[#1]% % for hh
+ \setupbtx[\c!specification=#1]} % for ab
+
+\setupbtx
+ [\c!specification=\s!default,
+ \c!dataset=\v!default,
+ \c!default=\v!default]
+
+\loadbtxdefinitionfile
+ [\s!default]
+
+\protect
diff --git a/tex/context/base/publ-jrn.lua b/tex/context/base/publ-jrn.lua
new file mode 100644
index 000000000..0cc41862f
--- /dev/null
+++ b/tex/context/base/publ-jrn.lua
@@ -0,0 +1,189 @@
+if not modules then modules = { } end modules ['publ-jrn'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- http://jabref.sourceforge.net/journals/journal_abbreviations_general.txt
+--
+-- <full name> = <abbreviation> [;shortest unique abbreviation[;frequency]].
+
+-- require("char-utf")
+
+-- Abhandlungen aus dem Westfälischen Museum für Naturkunde = Abh. Westfäl. Mus. Nat.kd.
+-- Abhandlungen der Naturforschenden Gesellschaft in Zürich = Abh. Nat.forsch. Ges. Zür.
+-- Abhandlungen des Naturwissenschaftlichen Vereins zu Bremen = Abh. Nat.wiss. Ver. Bremen
+
+local context = context
+local commands = commands
+
+local type = type
+local find = string.find
+local P, C, S, Cs, lpegmatch, lpegpatterns = lpeg.P, lpeg.C, lpeg.S, lpeg.Cs, lpeg.match, lpeg.patterns
+
+local report_journals = logs.reporter("publications","journals")
+
+local publications = publications
+local journals = { }
+publications.journals = journals
+
+local lowercase = characters.lower
+
+local expansions = { }
+local abbreviations = { }
+local nofexpansions = 0
+local nofabbreviations = 0
+
+local valid = 1 - S([[ ."':;,-]])
+local pattern = Cs((valid^1 + P(1)/"")^1)
+
+local function simplify(name)
+ -- we have utf but it doesn't matter much if we lower the bytes
+ return name and lowercase(lpegmatch(pattern,name)) or name
+end
+
+local function add(expansion,abbreviation)
+ if expansion and abbreviation then
+ local se = simplify(expansion)
+ local sa = simplify(abbreviation)
+ if not expansions[sa] then
+ expansions[sa] = expansion
+ nofexpansions = nofexpansions + 1
+ end
+ if not abbreviations[se] then
+ abbreviations[se] = abbreviation
+ nofabbreviations = nofabbreviations + 1
+ end
+ end
+end
+
+-- [#%-] comment
+-- meaning = abbreviations [;.....]
+
+local whitespace = lpegpatterns.whitespace^0
+local assignment = whitespace * P("=") * whitespace
+local separator = P(";")
+local newline = lpegpatterns.newline
+local endofline = lpegpatterns.space^0 * (newline + P(-1) + separator)
+local restofline = (1-newline)^0
+local splitter = whitespace * C((1-assignment)^1) * assignment * C((1-endofline)^1) * restofline
+local comment = S("#-%") * restofline
+local pattern = (comment + splitter / add)^0
+
+function journals.load(filename)
+ if not filename then
+ return
+ end-- error
+ if file.suffix(filename,"txt") then
+ local data = io.loaddata(filename)
+ if type(data) ~= "string" then
+ return
+ elseif find(data,"=") then
+ -- expansion = abbreviation
+ lpegmatch(pattern,data)
+ end
+ elseif file.suffix(filename,"lua") then
+ local data = table.load(filename)
+ if type(data) ~= "table" then
+ return
+ else
+ local de = data.expansions
+ local da = data.abbreviations
+ if de and da then
+ -- { expansions = { a = e }, abbreviations = { e = a } }
+ if next(expansions) then
+ table.merge(expansions,de)
+ else
+ expansions = de
+ end
+ if next(abbreviations) then
+ table.merge(abbreviations,da)
+ else
+ abbreviations = da
+ end
+ elseif #data > 0 then
+ -- { expansion, abbreviation }, ... }
+ for i=1,#data do
+ local d = d[i]
+ add(d[1],d[2])
+ end
+ else
+ -- { expansion = abbreviation, ... }
+ for expansion, abbreviation in data do
+ add(expansion,abbreviation)
+ end
+ end
+ end
+ end
+ report_journals("file %a loaded, %s expansions, %s abbreviations",filename,nofexpansions,nofabbreviations)
+end
+
+function journals.save(filename)
+ table.save(filename,{ expansions = expansions, abbreviations = abbreviations })
+end
+
+function journals.add(expansion,abbreviation)
+ add(expansion,abbreviation)
+end
+
+function journals.expanded(name)
+ local s = simplify(name)
+ return expansions[s] or expansions[simplify(abbreviations[s])] or name
+end
+
+function journals.abbreviated(name)
+ local s = simplify(name)
+ return abbreviations[s] or abbreviations[simplify(expansions[s])] or name
+end
+
+local implement = interfaces and interfaces.implement
+
+if implement then
+
+ implement {
+ name = "btxloadjournallist",
+ arguments = "string",
+ actions = journals.load
+ }
+
+ implement {
+ name = "btxsavejournallist",
+ arguments = "string",
+ actions = journals.save
+ }
+
+ implement {
+ name = "btxaddjournal",
+ arguments = { "string", "string" },
+ actions = { journals.add, context }
+ }
+
+ implement {
+ name = "btxexpandedjournal",
+ arguments = "string",
+ actions = { journals.expanded, context },
+ }
+
+ implement {
+ name = "btxabbreviatedjournal",
+ arguments = "string",
+ actions = { journals.abbreviated, context },
+ }
+
+end
+
+-- journals.load("e:/tmp/journals.txt")
+-- journals.save("e:/tmp/journals.lua")
+
+-- inspect(journals.expanded ("Z. Ökol. Nat.schutz"))
+-- inspect(journals.abbreviated("Z. Ökol. Nat. schutz"))
+
+if typesetters then
+ typesetters.manipulators.methods.expandedjournal = journals.expanded
+ typesetters.manipulators.methods.abbreviatedjournal = journals.abbreviated
+end
+
+-- journals.load("t:/manuals/publications-mkiv/journals.txt")
+-- journals.save("t:/manuals/publications-mkiv/journals.lua")
diff --git a/tex/context/base/publ-old.mkiv b/tex/context/base/publ-old.mkiv
new file mode 100644
index 000000000..f616428e6
--- /dev/null
+++ b/tex/context/base/publ-old.mkiv
@@ -0,0 +1,22 @@
+%D \module
+%D [ file=publ-old,
+%D version=2013.12.24,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=Old Fashioned \BIBTEX,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\unprotect
+
+% we could use runtime commands instead
+
+\unexpanded\def\setupbibtex {\usemodule[oldbibtex]\setupbibtex}
+\unexpanded\def\setuppublications {\usemodule[oldbibtex]\setuppublications}
+\unexpanded\def\setuppublicationlist{\usemodule[oldbibtex]\setuppublicationlist}
+
+\protect
diff --git a/tex/context/base/publ-oth.lua b/tex/context/base/publ-oth.lua
new file mode 100644
index 000000000..55c62c31e
--- /dev/null
+++ b/tex/context/base/publ-oth.lua
@@ -0,0 +1,154 @@
+if not modules then modules = { } end modules ['publ-oth'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local P, S, C, Ct, Cf, Cg, Cmt, Carg = lpeg.P, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cf, lpeg.Cg, lpeg.Cmt, lpeg.Carg
+local lpegmatch = lpeg.match
+
+local p_endofline = lpeg.patterns.newline
+
+local publications = publications
+
+local loaders = publications.loaders
+local getindex = publications.getindex
+
+local function addfield(t,k,v,fields)
+ k = fields[k]
+ if k then
+ local tk = t[k]
+ if tk then
+ t[k] = tk .. " and " .. v
+ else
+ t[k] = v
+ end
+ end
+ return t
+end
+
+local function checkfield(_,_,t,categories,all)
+ local tag = t.tag
+ if tag then
+ local category = t.category
+ t.tag = nil
+ t.category = categories[category] or category
+ all[tag] = t
+ end
+ return true
+end
+
+-- endnotes --
+
+local fields = {
+ ["@"] = "tag",
+ ["0"] = "category",
+ ["A"] = "author",
+ ["E"] = "editor",
+ ["T"] = "title",
+ ["D"] = "year",
+ ["I"] = "publisher",
+}
+
+local categories = {
+ ["Journal Article"] = "article",
+}
+
+local entry = P("%") * Cg(C(1) * (S(" \t")^1) * C((1-p_endofline)^0) * Carg(1)) * p_endofline
+local record = Cf(Ct("") * (entry^1), addfield)
+local records = (Cmt(record * Carg(2) * Carg(3), checkfield) * P(1))^1
+
+function publications.endnotes_to_btx(data)
+ local all = { }
+ lpegmatch(records,data,1,fields,categories,all)
+ return all
+end
+
+function loaders.endnote(dataset,filename)
+ -- we could combine the next into checkfield but let's not create too messy code
+ local dataset, fullname = publications.resolvedname(dataset,filename)
+ if fullname then
+ loaders.lua(dataset,publications.endnotes_to_btx(io.loaddata(fullname) or ""))
+ end
+end
+
+-- refman --
+
+local entry = Cg(C((1-lpeg.S(" \t")-p_endofline)^1) * (S(" \t-")^1) * C((1-p_endofline)^0) * Carg(1)) * p_endofline
+local record = Cf(Ct("") * (entry^1), addfield)
+local records = (Cmt(record * Carg(2) * Carg(3), checkfield) * P(1))^1
+
+local fields = {
+ ["SN"] = "tag",
+ ["TY"] = "category",
+ ["A1"] = "author",
+ ["E1"] = "editor",
+ ["T1"] = "title",
+ ["Y1"] = "year",
+ ["PB"] = "publisher",
+}
+
+local categories = {
+ ["JOUR"] = "article",
+}
+
+function publications.refman_to_btx(data)
+ local all = { }
+ lpegmatch(records,data,1,fields,categories,all)
+ return all
+end
+
+function loaders.refman(dataset,filename)
+ -- we could combine the next into checkfield but let's not create too messy code
+ local dataset, fullname = publications.resolvedname(dataset,filename)
+ if fullname then
+ loaders.lua(dataset,publications.refman_to_btx(io.loaddata(fullname) or ""))
+ end
+end
+
+-- test --
+
+-- local endnote = [[
+-- %0 Journal Article
+-- %T Scientific Visualization, Overviews, Methodologies, and Techniques
+-- %A Nielson, Gregory M
+-- %A Hagen, Hans
+-- %A Müller, Heinrich
+-- %@ 0818677776
+-- %D 1994
+-- %I IEEE Computer Society
+--
+-- %0 Journal Article
+-- %T Scientific Visualization, Overviews, Methodologies, and Techniques
+-- %A Nielson, Gregory M
+-- %A Hagen, Hans
+-- %A Müller, Heinrich
+-- %@ 0818677775
+-- %D 1994
+-- %I IEEE Computer Society
+-- ]]
+--
+-- local refman = [[
+-- TY - JOUR
+-- T1 - Scientific Visualization, Overviews, Methodologies, and Techniques
+-- A1 - Nielson, Gregory M
+-- A1 - Hagen, Hans
+-- A1 - Müller, Heinrich
+-- SN - 0818677776
+-- Y1 - 1994
+-- PB - IEEE Computer Society
+--
+-- TY - JOUR
+-- T1 - Scientific Visualization, Overviews, Methodologies, and Techniques
+-- A1 - Nielson, Gregory M
+-- A1 - Hagen, Hans
+-- A1 - Müller, Heinrich
+-- SN - 0818677775
+-- Y1 - 1994
+-- PB - IEEE Computer Society
+-- ]]
+--
+-- inspect(publications.endnotes_to_btx(endnote))
+-- inspect(publications.refman_to_btx(refman))
diff --git a/tex/context/base/publ-reg.lua b/tex/context/base/publ-reg.lua
new file mode 100644
index 000000000..3f276b49a
--- /dev/null
+++ b/tex/context/base/publ-reg.lua
@@ -0,0 +1,227 @@
+if not modules then modules = { } end modules ['publ-reg'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local formatters = string.formatters
+local concat = table.concat
+local sortedhash = table.sortedhash
+local lpegmatch = lpeg.match
+
+local context = context
+
+local implement = interfaces.implement
+local variables = interfaces.variables
+
+local v_once = variables.once
+local v_stop = variables.stop
+local v_all = variables.all
+
+local publications = publications
+local datasets = publications.datasets
+local specifications = publications.specifications
+local writers = publications.writers
+local getcasted = publications.getcasted
+
+local registrations = { }
+local sequence = { }
+local flushers = table.setmetatableindex(function(t,k) local v = t.default t[k] = v return v end)
+
+local function btxsetregister(specification)
+ local name = specification.name
+ local register = specification.register
+ local dataset = specification.dataset
+ local field = specification.field
+ if not field or field == "" or not register or register == "" then
+ return
+ end
+ if not dataset or dataset == "" then
+ dataset = v_all
+ end
+ -- could be metatable magic
+ local s = registrations[register]
+ if not s then
+ s = { }
+ registrations[register] = s
+ end
+ local processors = name ~= register and name or ""
+ if processor == "" then
+ processor = nil
+ elseif processor then
+ processor = "btx:r:" .. processor
+ end
+ local datasets = utilities.parsers.settings_to_array(dataset)
+ for i=1,#datasets do
+ local dataset = datasets[i]
+ local d = s[dataset]
+ if not d then
+ d = { }
+ s[dataset] = d
+ end
+ --
+ -- check all
+ --
+ d.active = specification.state ~= v_stop
+ d.once = specification.method == v_once or false
+ d.field = field
+ d.processor = processor
+ d.alternative = d.alternative or specification.alternative
+ d.register = register
+ d.dataset = dataset
+ d.done = d.done or { }
+ end
+ --
+ sequence = { }
+ for register, s in sortedhash(registrations) do
+ for dataset, d in sortedhash(s) do
+ if d.active then
+ sequence[#sequence+1] = d
+ end
+ end
+ end
+end
+
+local function btxtoregister(dataset,tag)
+ local current = datasets[dataset]
+ for i=1,#sequence do
+ local step = sequence[i]
+ local dset = step.dataset
+ if dset == v_all or dset == dataset then
+ local done = step.done
+ if not done[tag] then
+ local value, field, kind = getcasted(current,tag,step.field,specifications[step.specification])
+ if value then
+ flushers[kind](step,field,value)
+ end
+ done[tag] = true
+ end
+ end
+ end
+end
+
+implement {
+ name = "btxsetregister",
+ actions = btxsetregister,
+ arguments = {
+ {
+ { "specification" },
+ { "name" },
+ { "state" },
+ { "dataset" },
+ { "field" },
+ { "register" },
+ { "method" },
+ { "alternative" },
+ }
+ }
+}
+
+implement {
+ name = "btxtoregister",
+ actions = btxtoregister,
+ arguments = { "string", "string" }
+}
+
+-- context.setregisterentry (
+-- { register },
+-- {
+-- ["entries:1"] = value,
+-- ["keys:1"] = value,
+-- }
+-- )
+
+local ctx_dosetfastregisterentry = context.dosetfastregisterentry -- register entry key
+
+----- p_keywords = lpeg.tsplitat(lpeg.patterns.whitespace^0 * lpeg.P(";") * lpeg.patterns.whitespace^0)
+local components = publications.components.author
+local f_author = formatters[ [[\btxindexedauthor{%s}{%s}{%s}{%s}{%s}{%s}]] ]
+
+function flushers.string(step,field,value)
+ if type(value) == "string" and value ~= "" then
+ ctx_dosetfastregisterentry(step.register,value or "","",step.processor or "","")
+ end
+end
+
+flushers.default = flushers.string
+
+local shorts = {
+ normalshort = "normalshort",
+ invertedshort = "invertedshort",
+}
+
+function flushers.author(step,field,value)
+ if type(value) == "string" then
+ value = publications.authorcache[value]
+ end
+ if type(value) == "table" and #value > 0 then
+ local register = step.register
+ local processor = step.processor
+ local alternative = shorts[step.alternative or "invertedshort"] or "invertedshort"
+ for i=1,#value do
+ local a = value[i]
+ local k = writers[field] { a }
+ local e = f_author(alternative,components(a))
+ ctx_dosetfastregisterentry(register,e,k,processor or "","")
+ end
+ end
+end
+
+function flushers.keyword(step,field,value)
+ if type(value) == "table" and #value > 0 then
+ local register = step.register
+ local processor = step.processor
+ for i=1,#value do
+ ctx_dosetfastregisterentry(register,value[i],"",processor or "","")
+ end
+ end
+end
+
+-- publications.registerflushers = flushers
+
+local function btxtoregister(dataset,tag)
+ local current = datasets[dataset]
+ for i=1,#sequence do
+ local step = sequence[i]
+ local dset = step.dataset
+ if dset == v_all or dset == dataset then
+ local done = step.done
+ if not done[tag] then
+ local value, field, kind = getcasted(current,tag,step.field,specifications[step.specification])
+ if value then
+ flushers[kind](step,field,value)
+ end
+ done[tag] = true
+ end
+ end
+ end
+end
+
+local function authortoregister(dataset,hash)
+ local author = publications.authorcache[hash]
+ if author then
+ local current = datasets[dataset]
+ for i=1,#sequence do
+ local step = sequence[i]
+ local dset = step.dataset
+ if dset == v_all or dset == dataset then
+ local register = step.register
+ local processor = step.processor
+ local alternative = shorts[step.alternative or "invertedshort"] or "invertedshort"
+ local k = writers.author { author }
+ local e = f_author(alternative,components(author,short))
+ ctx_dosetfastregisterentry(register,e,k,processor or "","")
+ end
+ end
+ end
+end
+
+publications.authortoregister = authortoregister
+
+implement {
+ name = "btxauthortoregister",
+ actions = authortoregister,
+ arguments = { "string", "string" }
+}
diff --git a/tex/context/base/publ-sor.lua b/tex/context/base/publ-sor.lua
new file mode 100644
index 000000000..b617af760
--- /dev/null
+++ b/tex/context/base/publ-sor.lua
@@ -0,0 +1,377 @@
+if not modules then modules = { } end modules ['publ-sor'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- if needed we can optimize this one: chekc if it's detail or something else
+-- and use direct access, but in practice it's fast enough
+
+local type = type
+local concat = table.concat
+local formatters = string.formatters
+local compare = sorters.comparers.basic -- (a,b)
+local sort = table.sort
+
+local toarray = utilities.parsers.settings_to_array
+local utfchar = utf.char
+
+local publications = publications
+local writers = publications.writers
+
+local variables = interfaces.variables
+local v_short = variables.short
+local v_default = variables.default
+local v_reference = variables.reference
+local v_dataset = variables.dataset
+local v_list = variables.list
+local v_index = variables.index
+local v_cite = variables.cite
+local v_used = variables.used
+
+local report = logs.reporter("publications","sorters")
+
+local trace_sorters trackers.register("publications.sorters",function(v) trace_sorters = v end)
+
+-- authors(s) | year | journal | title | pages
+
+local template = [[
+local type, tostring = type, tostring
+
+local writers = publications.writers
+local datasets = publications.datasets
+local getter = publications.getfaster -- (current,data,details,field,categories,types)
+local strip = sorters.strip
+local splitter = sorters.splitters.utf
+
+local function newsplitter(splitter)
+ return table.setmetatableindex({},function(t,k) -- could be done in the sorter but seldom that many shared
+ local v = splitter(k,true) -- in other cases
+ t[k] = v
+ return v
+ end)
+end
+
+return function(dataset,list,method) -- indexer
+ local current = datasets[dataset]
+ local luadata = current.luadata
+ local details = current.details
+ local specification = publications.currentspecification
+ local categories = specification.categories
+ local types = specification.types
+ local splitted = newsplitter(splitter) -- saves mem
+ local snippets = { } -- saves mem
+ local result = { }
+
+%helpers%
+
+ for i=1,#list do
+ -- either { tag, tag, ... } or { { tag, index }, { tag, index } }
+ local li = list[i]
+ local tag = type(li) == "string" and li or li[1]
+ local index = tostring(i)
+ local entry = luadata[tag]
+ if entry then
+ -- maybe optional: if entry.key then push the keygetter
+ -- in slot 1 and ignore (e.g. author)
+ local detail = details[tag]
+ result[i] = {
+ index = i,
+ split = {
+
+%getters%
+
+ },
+ }
+ else
+ result[i] = {
+ index = i,
+ split = {
+
+%unknowns%
+
+ },
+ }
+ end
+ end
+ return result
+end
+]]
+
+local f_getter = formatters["splitted[strip(getter(current,entry,detail,%q,categories,types) or %q)], -- %s"]
+local f_writer = formatters["splitted[strip(writer_%s(getter(current,entry,detail,%q,categories,types) or %q,snippets))], -- %s"]
+local f_helper = formatters["local writer_%s = writers[%q] -- %s: %s"]
+local f_value = formatters["splitted[%q], -- %s"]
+local s_index = "splitted[index], -- the order in the list, always added"
+
+-- there is no need to cache this in specification
+
+local sharedmethods = { }
+publications.sortmethods = sharedmethods
+
+local function sortsequence(dataset,list,sorttype)
+
+ if not list or #list == 0 then
+ return
+ end
+
+ local specification = publications.currentspecification
+ local types = specification.types
+ local sortmethods = specification.sortmethods
+ local method = sortmethods and sortmethods[sorttype] or sharedmethods[sorttype]
+ local sequence = method and method.sequence
+
+ local s_default = "<before end>"
+ local s_unknown = "<at the end>"
+
+ local c_default = utfchar(0xFFFE)
+ local c_unknown = utfchar(0xFFFF)
+
+ if not sequence and type(sorttype) == "string" then
+ local list = toarray(sorttype)
+ if #list > 0 then
+ local indexdone = false
+ sequence = { }
+ for i=1,#list do
+ local entry = toarray(list[i])
+ local field = entry[1]
+ local default = entry[2]
+ local unknown = entry[3] or default
+ sequence[i] = {
+ field = field,
+ default = default == s_default and c_default or default or c_default,
+ unknown = unknown == s_unknown and c_unknown or unknown or c_unknown,
+ }
+ if field == "index" then
+ indexdone = true
+ end
+ end
+ if not indexdone then
+ sequence[#sequence+1] = {
+ field = "index",
+ default = 0,
+ unknown = 0,
+ }
+ end
+ end
+ if trace_sorters then
+ report("creating sequence from method %a",sorttype)
+ end
+ end
+
+ if sequence then
+
+ local getters = { }
+ local unknowns = { }
+ local helpers = { }
+
+ if trace_sorters then
+ report("initializing method %a",sorttype)
+ end
+
+ for i=1,#sequence do
+ local step = sequence[i]
+ local field = step.field or "?"
+ local default = step.default or c_default
+ local unknown = step.unknown or c_unknown
+ local fldtype = types[field]
+ local fldwriter = step.writer or fldtype
+ local writer = fldwriter and writers[fldwriter]
+
+ if trace_sorters then
+ report("% 3i : field %a, type %a, default %a, unknown %a",i,field,fldtype,
+ default == c_default and s_default or default,
+ unknown == c_unknown and s_unknown or unknown
+ )
+ end
+
+ if writer then
+ local h = #helpers + 1
+ getters[i] = f_writer(h,field,default,field)
+ helpers[h] = f_helper(h,fldwriter,field,fldtype)
+ else
+ getters[i] = f_getter(field,default,field)
+ end
+ unknowns[i] = f_value(unknown,field)
+ end
+
+ unknowns[#unknowns+1] = s_index
+ getters [#getters +1] = s_index
+
+ local code = utilities.templates.replace(template, {
+ helpers = concat(helpers, "\n"),
+ getters = concat(getters, "\n"),
+ unknowns = concat(unknowns,"\n"),
+ })
+
+ -- print(code)
+
+ local action, error = loadstring(code)
+ if type(action) == "function" then
+ action = action()
+ else
+ report("error when compiling sort method %a: %s",sorttype,error or "unknown")
+ end
+ if type(action) == "function" then
+ local valid = action(dataset,list,method)
+ if valid and #valid > 0 then
+ sorters.sort(valid,compare)
+ return valid
+ else
+ report("error when applying sort method %a",sorttype)
+ end
+ else
+ report("error in sort method %a",sorttype)
+ end
+ else
+ report("invalid sort method %a",sorttype)
+ end
+
+end
+
+-- tag | listindex | reference | userdata | dataindex
+
+-- short : short + tag index
+-- dataset : index + tag
+-- list : list + index
+-- reference : tag + index
+-- used : reference + dataset
+-- authoryear : complex sort
+
+local sorters = { }
+
+sorters[v_short] = function(dataset,rendering,list) -- should we store it
+ local shorts = rendering.shorts
+ local function compare(a,b)
+ if a and b then
+ local taga = a[1]
+ local tagb = b[1]
+ if taga and tagb then
+ local shorta = shorts[taga]
+ local shortb = shorts[tagb]
+ if shorta and shortb then
+ -- assumes ascii shorts ... no utf yet
+ return shorta < shortb
+ end
+ -- fall back on tag order
+ return taga < tagb
+ end
+ -- fall back on dataset order
+ local indexa = a[5]
+ local indexb = b[5]
+ if indexa and indexb then
+ return indexa < indexb
+ end
+ end
+ return false
+ end
+ sort(list,compare)
+end
+
+sorters[v_dataset] = function(dataset,rendering,list) -- dataset index
+ local function compare(a,b)
+ if a and b then
+ local indexa = a[5]
+ local indexb = b[5]
+ if indexa and indexb then
+ return indexa < indexb
+ end
+ local taga = a[1]
+ local tagb = b[1]
+ if taga and tagb then
+ return taga < tagb
+ end
+ end
+ return false
+ end
+ sort(list,compare)
+end
+
+sorters[v_list] = function(dataset,rendering,list) -- list index (normally redundant)
+ local function compare(a,b)
+ if a and b then
+ local lista = a[2]
+ local listb = b[2]
+ if lista and listb then
+ return lista < listb
+ end
+ local indexa = a[5]
+ local indexb = b[5]
+ if indexa and indexb then
+ return indexa < indexb
+ end
+ end
+ return false
+ end
+ sort(list,compare)
+end
+
+sorters[v_reference] = function(dataset,rendering,list) -- tag
+ local function compare(a,b)
+ if a and b then
+ local taga = a[1]
+ local tagb = b[1]
+ if taga and tagb then
+ return taga < tagb
+ end
+ local indexa = a[5]
+ local indexb = b[5]
+ if indexa and indexb then
+ return indexa < indexb
+ end
+ end
+ return false
+ end
+ sort(list,compare)
+end
+
+sorters[v_used] = function(dataset,rendering,list) -- tag
+ local function compare(a,b)
+ if a and b then
+ local referencea = a[2]
+ local referenceb = b[2]
+ if referencea and referenceb then
+ return referencea < referenceb
+ end
+ local indexa = a[5]
+ local indexb = b[5]
+ if indexa and indexb then
+ return indexa < indexb
+ end
+ end
+ return false
+ end
+ sort(list,compare)
+end
+
+sorters[v_default] = sorters[v_list]
+sorters[""] = sorters[v_list]
+sorters[v_cite] = sorters[v_list]
+sorters[v_index] = sorters[v_dataset]
+
+local function anything(dataset,rendering,list,sorttype)
+ local valid = sortsequence(dataset,list,sorttype) -- field order
+ if valid and #valid > 0 then
+ -- hm, we have a complication here because a sortsequence doesn't know if there's a field
+ -- so there is no real catch possible here .., anyway, we add a index as last entry when no
+ -- one is set so that should be good enough (needs testing)
+ for i=1,#valid do
+ local v = valid[i]
+ valid[i] = list[v.index]
+ end
+ return valid
+ end
+end
+
+table.setmetatableindex(sorters,function(t,k) return anything end)
+
+publications.lists.sorters = sorters
+
+-- publications.sortmethods.key = {
+-- sequence = {
+-- { field = "key", default = "", unknown = "" },
+-- { field = "index", default = "", unknown = "" },
+-- },
+-- }
diff --git a/tex/context/base/publ-tra.lua b/tex/context/base/publ-tra.lua
new file mode 100644
index 000000000..4b03307ac
--- /dev/null
+++ b/tex/context/base/publ-tra.lua
@@ -0,0 +1,504 @@
+if not modules then modules = { } end modules ['publ-tra'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- todo: use context.tt .. more efficient, less code
+
+local next, type = next, type
+
+local sortedhash = table.sortedhash
+local sortedkeys = table.sortedkeys
+local settings_to_array = utilities.parsers.settings_to_array
+local formatters = string.formatters
+local concat = table.concat
+
+local context = context
+local commands = commands
+
+local v_default = interfaces.variables.default
+
+local publications = publications
+local tracers = publications.tracers
+local tables = publications.tables
+local datasets = publications.datasets
+local specifications = publications.specifications
+local citevariants = publications.citevariants
+
+local getfield = publications.getfield
+local getcasted = publications.getcasted
+
+local ctx_NC, ctx_NR, ctx_HL, ctx_FL, ctx_ML, ctx_LL, ctx_EQ = context.NC, context.NR, context.HL, context.FL, context.ML, context.LL, context.EQ
+
+local ctx_starttabulate = context.starttabulate
+local ctx_stoptabulate = context.stoptabulate
+
+local ctx_formatted = context.formatted
+local ctx_bold = ctx_formatted.monobold
+local ctx_monobold = ctx_formatted.monobold
+local ctx_verbatim = ctx_formatted.verbatim
+
+local ctx_rotate = context.rotate
+local ctx_llap = context.llap
+local ctx_rlap = context.rlap
+local ctx_page = context.page
+
+local privates = tables.privates
+local specials = tables.specials
+
+local report = logs.reporter("publications","tracers")
+
+function tracers.showdatasetfields(settings)
+ local dataset = settings.dataset
+ local current = datasets[dataset]
+ local luadata = current.luadata
+ local specification = settings.specification
+ local fielddata = specification and specifications[specification] or specifications.apa
+ local categories = fielddata.categories
+ if next(luadata) then
+ ctx_starttabulate { "|lT|lT|pTl|" }
+ ctx_NC() ctx_bold("tag")
+ ctx_NC() ctx_bold("category")
+ ctx_NC() ctx_bold("fields")
+ ctx_NC() ctx_NR()
+ ctx_FL()
+ for tag, entry in sortedhash(luadata) do
+ local category = entry.category
+ local catedata = categories[category]
+ local fields = catedata and catedata.fields or { }
+ ctx_NC() context(tag)
+ ctx_NC() context(category)
+ ctx_NC() -- grouping around colors needed
+ for key, value in sortedhash(entry) do
+ if privates[key] then
+ -- skip
+ elseif specials[key] then
+ context("{\\darkblue %s} ",key)
+ else
+ local kind = fields[key]
+ if kind == "required" then
+ context("{\\darkgreen %s} ",key)
+ elseif kind == "optional" then
+ context("%s ",key)
+ else
+ context("{\\darkyellow %s} ",key)
+ end
+ end
+ end
+ ctx_NC() ctx_NR()
+ end
+ ctx_stoptabulate()
+ end
+end
+
+function tracers.showdatasetcompleteness(settings)
+ local dataset = settings.dataset
+ local current = datasets[dataset]
+ local luadata = current.luadata
+ local specification = settings.specification
+ local fielddata = specification and specifications[specification] or specifications.apa
+ local categories = fielddata.categories
+
+ -- local lpegmatch = lpeg.match
+ -- local texescape = lpeg.patterns.texescape
+
+ local preamble = { "|lTBw(5em)|lBTp(10em)|plT|" }
+
+ local function identified(tag,category,crossref,index)
+ ctx_NC() ctx_monobold(index)
+ ctx_NC() ctx_monobold(category)
+ ctx_NC() if crossref then
+ ctx_monobold("%s\\hfill\\darkblue => %s",tag,crossref)
+ else
+ ctx_monobold(tag)
+ end
+ ctx_NC() ctx_NR()
+ end
+
+ local function required(done,foundfields,key,value,indirect)
+ ctx_NC() if not done then ctx_monobold("required") end
+ ctx_NC() context(key)
+ ctx_NC()
+ if indirect then
+ if value then
+ context("\\darkblue")
+ ctx_verbatim(value)
+ else
+ context("\\darkred\\tttf [missing crossref]")
+ end
+ elseif value then
+ ctx_verbatim(value)
+ else
+ context("\\darkred\\tttf [missing value]")
+ end
+ ctx_NC() ctx_NR()
+ foundfields[key] = nil
+ return done or true
+ end
+
+ local function optional(done,foundfields,key,value,indirect)
+ ctx_NC() if not done then ctx_monobold("optional") end
+ ctx_NC() context(key)
+ ctx_NC()
+ if indirect then
+ context("\\darkblue")
+ ctx_verbatim(value)
+ elseif value then
+ ctx_verbatim(value)
+ end
+ ctx_NC() ctx_NR()
+ foundfields[key] = nil
+ return done or true
+ end
+
+ local function special(done,key,value)
+ ctx_NC() if not done then ctx_monobold("special") end
+ ctx_NC() context(key)
+ ctx_NC() ctx_verbatim(value)
+ ctx_NC() ctx_NR()
+ return done or true
+ end
+
+ local function extra(done,key,value)
+ ctx_NC() if not done then ctx_monobold("extra") end
+ ctx_NC() context(key)
+ ctx_NC() ctx_verbatim(value)
+ ctx_NC() ctx_NR()
+ return done or true
+ end
+
+ if next(luadata) then
+ for tag, entry in sortedhash(luadata) do
+ local category = entry.category
+ local fields = categories[category]
+ local foundfields = { }
+ for k, v in next, entry do
+ foundfields[k] = true
+ end
+ ctx_starttabulate(preamble)
+ identified(tag,category,entry.crossref,entry.index)
+ ctx_FL()
+ if fields then
+ local requiredfields = fields.required
+ local sets = fields.sets or { }
+ local done = false
+ if requiredfields then
+ for i=1,#requiredfields do
+ local r = requiredfields[i]
+ local r = sets[r] or r
+ if type(r) == "table" then
+ local okay = false
+ for i=1,#r do
+ local ri = r[i]
+ if rawget(entry,ri) then
+ done = required(done,foundfields,ri,entry[ri])
+ okay = true
+ elseif entry[ri] then
+ done = required(done,foundfields,ri,entry[ri],true)
+ okay = true
+ end
+ end
+ if not okay then
+ done = required(done,foundfields,table.concat(r," {\\letterbar} "))
+ end
+ elseif rawget(entry,r) then
+ done = required(done,foundfields,r,entry[r])
+ elseif entry[r] then
+ done = required(done,foundfields,r,entry[r],true)
+ else
+ done = required(done,foundfields,r)
+ end
+ end
+ end
+ local optionalfields = fields.optional
+ local done = false
+ if optionalfields then
+ for i=1,#optionalfields do
+ local o = optionalfields[i]
+ local o = sets[o] or o
+ if type(o) == "table" then
+ for i=1,#o do
+ local oi = o[i]
+ if rawget(entry,oi) then
+ done = optional(done,foundfields,oi,entry[oi])
+ elseif entry[oi] then
+ done = optional(done,foundfields,oi,entry[oi],true)
+ end
+ end
+ elseif rawget(entry,o) then
+ done = optional(done,foundfields,o,entry[o])
+ elseif entry[o] then
+ done = optional(done,foundfields,o,entry[o],true)
+ end
+ end
+ end
+ end
+ local done = false
+ for k, v in sortedhash(foundfields) do
+ if privates[k] then
+ -- skip
+ elseif specials[k] then
+ done = special(done,k,entry[k])
+ end
+ end
+ local done = false
+ for k, v in sortedhash(foundfields) do
+ if privates[k] then
+ -- skip
+ elseif not specials[k] then
+ done = extra(done,k,entry[k])
+ end
+ end
+ ctx_stoptabulate()
+ end
+ end
+
+end
+
+function tracers.showfields(settings)
+ local rotation = settings.rotation
+ local specification = settings.specification
+ local fielddata = specification and specifications[specification] or specifications.apa
+ local categories = fielddata.categories
+ local validfields = { }
+ for category, data in next, categories do
+ local sets = data.sets
+ local fields = data.fields
+ for name, list in next, fields do
+ validfields[name] = true
+ end
+ end
+ local s_categories = sortedkeys(categories)
+ local s_fields = sortedkeys(validfields)
+ ctx_starttabulate { "|l" .. string.rep("|c",#s_categories) .. "|" }
+ ctx_FL()
+ ctx_NC()
+ if rotation then
+ rotation = { rotation = rotation }
+ end
+ for i=1,#s_categories do
+ ctx_NC()
+ local txt = formatters["\\bf %s"](s_categories[i])
+ if rotation then
+ ctx_rotate(rotation,txt)
+ else
+ context(txt)
+ end
+ end
+ ctx_NC() ctx_NR()
+ ctx_FL()
+ for i=1,#s_fields do
+ local field = s_fields[i]
+ ctx_NC()
+ ctx_bold(field)
+ for j=1,#s_categories do
+ ctx_NC()
+ local kind = categories[s_categories[j]].fields[field]
+ if kind == "required" then
+ context("\\darkgreen*")
+ elseif kind == "optional" then
+ context("*")
+ end
+ end
+ ctx_NC() ctx_NR()
+ end
+ ctx_LL()
+ ctx_stoptabulate()
+end
+
+function tracers.showtables(settings)
+ for name, list in sortedhash(tables) do
+ ctx_starttabulate { "|Tl|Tl|" }
+ ctx_FL()
+ ctx_NC()
+ ctx_rlap(function() ctx_bold(name) end)
+ ctx_NC()
+ ctx_NC()
+ ctx_NR()
+ ctx_FL()
+ for k, v in sortedhash(list) do
+ ctx_NC()
+ context(k)
+ ctx_NC()
+ if type(v) == "table" then
+ context("% t",v)
+ else
+ context(tostring(v))
+ end
+ ctx_NC()
+ ctx_NR()
+ end
+ ctx_LL()
+ ctx_stoptabulate()
+ end
+end
+
+function tracers.showdatasetauthors(settings)
+
+ local dataset = settings.dataset
+ local field = settings.field
+
+ local sortkey = publications.writers.author
+
+ if not dataset or dataset == "" then dataset = v_default end
+ if not field or field == "" then field = "author" end
+
+ local function row(i,k,v)
+ ctx_NC()
+ if i then
+ ctx_verbatim(i)
+ end
+ ctx_NC()
+ ctx_verbatim(k)
+ ctx_EQ()
+ if type(v) == "table" then
+ local t = { }
+ for i=1,#v do
+ local vi = v[i]
+ if type(vi) == "table" then
+ t[i] = concat(vi,"-")
+ else
+ t[i] = vi
+ end
+ end
+ ctx_verbatim(concat(t, " | "))
+ else
+ ctx_verbatim(v)
+ end
+ ctx_NC()
+ ctx_NR()
+ end
+
+ local function authorrow(ai,k,i)
+ local v = ai[k]
+ if v then
+ row(i,k,v)
+ end
+ end
+
+ local function commonrow(key,value)
+ ctx_NC() ctx_rlap(function() ctx_verbatim(key) end)
+ ctx_NC()
+ ctx_EQ() ctx_verbatim(value)
+ ctx_NC() ctx_NR()
+ end
+
+ local d = datasets[dataset].luadata
+
+ local trialtypesetting = context.trialtypesetting()
+
+ for tag, entry in sortedhash(d) do
+
+ local a, f, k = getcasted(dataset,tag,field)
+
+ if type(a) == "table" and #a > 0 and k == "author" then
+ context.start()
+ context.tt()
+ ctx_starttabulate { "|B|Bl|p|" }
+ ctx_FL()
+ local original = getfield(dataset,tag,field)
+ commonrow("tag",tag)
+ commonrow("field",field)
+ commonrow("original",original)
+ commonrow("sortkey",sortkey(a))
+ for i=1,#a do
+ ctx_ML()
+ local ai = a[i]
+ if ai then
+ authorrow(ai,"original",i)
+ authorrow(ai,"snippets")
+ authorrow(ai,"initials")
+ authorrow(ai,"firstnames")
+ authorrow(ai,"vons")
+ authorrow(ai,"surnames")
+ authorrow(ai,"juniors")
+ local options = ai.options
+ if options then
+ row(false,"options",sortedkeys(options))
+ end
+ elseif not trialtypesetting then
+ report("bad author name: %s",original or "?")
+ end
+ end
+ ctx_LL()
+ ctx_stoptabulate()
+ context.stop()
+ end
+
+ end
+
+end
+
+function tracers.showentry(dataset,tag)
+ local dataset = datasets[dataset]
+ if dataset then
+ local entry = dataset.luadata[tag]
+ local done = false
+ for k, v in sortedhash(entry) do
+ if not privates[k] then
+ ctx_verbatim("%w[%s: %s]",done and 1 or 0,k,v)
+ done = true
+ end
+ end
+ end
+end
+
+local skipped = { index = true, default = true }
+
+function tracers.showvariants(dataset,pages)
+ local variants = sortedkeys(citevariants)
+ for tag in publications.sortedentries(dataset or v_default) do
+ if pages then
+ ctx_page()
+ end
+ ctx_starttabulate { "|T||" }
+ for i=1,#variants do
+ local variant = variants[i]
+ if not skipped[variant] then
+ ctx_NC() context(variant)
+ -- ctx_EQ() citevariants[variant] { dataset = v_default, reference = tag, variant = variant }
+ ctx_EQ() context.cite({variant},{dataset .. "::" .. tag})
+ ctx_NC() ctx_NR()
+ end
+ end
+ ctx_stoptabulate()
+ if pages then
+ ctx_page()
+ end
+ end
+end
+
+function tracers.showhashedauthors(dataset,pages)
+ local components = publications.components.author
+ ctx_starttabulate { "|T|T|T|T|T|T|" }
+ ctx_NC() ctx_bold("hash")
+ ctx_NC() ctx_bold("vons")
+ ctx_NC() ctx_bold("surnames")
+ ctx_NC() ctx_bold("initials")
+ ctx_NC() ctx_bold("firstnames")
+ ctx_NC() ctx_bold("juniors")
+ ctx_NC() ctx_NR() ctx_HL()
+ for hash, data in sortedhash(publications.authorcache) do
+ local vons, surnames, initials, firstnames, juniors = components(data)
+ ctx_NC() context(hash)
+ ctx_NC() context(vons)
+ ctx_NC() context(surnames)
+ ctx_NC() context(initials)
+ ctx_NC() context(firstnames)
+ ctx_NC() context(juniors)
+ ctx_NC() ctx_NR()
+ end
+ ctx_stoptabulate()
+end
+
+commands.showbtxdatasetfields = tracers.showdatasetfields
+commands.showbtxdatasetcompleteness = tracers.showdatasetcompleteness
+commands.showbtxfields = tracers.showfields
+commands.showbtxtables = tracers.showtables
+commands.showbtxdatasetauthors = tracers.showdatasetauthors
+commands.showbtxhashedauthors = tracers.showhashedauthors
+commands.showbtxentry = tracers.showentry
+commands.showbtxvariants = tracers.showvariants
diff --git a/tex/context/base/publ-tra.mkiv b/tex/context/base/publ-tra.mkiv
new file mode 100644
index 000000000..6ef86ca59
--- /dev/null
+++ b/tex/context/base/publ-tra.mkiv
@@ -0,0 +1,87 @@
+%D \module
+%D [ file=publ-tra,
+%D version=2013.12.24,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=Tracing,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% todo: make this a runtime module
+% todo: use the module interface
+
+\writestatus{loading}{ConTeXt Publication Support / Tracing}
+
+\registerctxluafile{publ-tra}{1.001}
+
+\unprotect
+
+\unexpanded\def\showbtxdatasetfields {\dosingleempty\publ_show_dataset_fields}
+\unexpanded\def\showbtxdatasetcompleteness{\dosingleempty\publ_show_dataset_completeness}
+\unexpanded\def\showbtxdatasetauthors {\dosingleempty\publ_show_dataset_authors}
+\unexpanded\def\showbtxhashedauthors {\dosingleempty\publ_show_hashed_authors}
+\unexpanded\def\showbtxfields {\dosingleempty\publ_show_fields}
+\unexpanded\def\showbtxtables {\dosingleempty\publ_show_tables}
+
+\def\publ_show_dataset_whatever#1[#2]%
+ {\begingroup
+ \letdummyparameter\c!specification\currentbtxspecification
+ \setdummyparameter\c!dataset {\currentbtxdataset}%
+ \letdummyparameter\c!field \empty
+ \iffirstargument
+ \doifelseassignment{#2}
+ {\getdummyparameters[#2]}%
+ {\setdummyparameter\c!dataset{#2}}%
+ \else
+ \getdummyparameters[#2]%
+ \fi
+ \ctxcommand{#1{
+ dataset = "\dummyparameter\c!dataset",
+ specification = "\dummyparameter\c!specification",
+ field = "\dummyparameter\c!field",
+ }}%
+ \endgroup}
+
+\def\publ_show_dataset_fields {\publ_show_dataset_whatever{showbtxdatasetfields}}
+\def\publ_show_dataset_completeness{\publ_show_dataset_whatever{showbtxdatasetcompleteness}}
+\def\publ_show_dataset_authors {\publ_show_dataset_whatever{showbtxdatasetauthors}}
+
+\def\publ_show_fields[#1]%
+ {\begingroup
+ \setdummyparameter\c!rotation{90}%
+ \doifelseassignment{#1}%
+ {\letdummyparameter\c!specification\currentbtxspecification
+ \getdummyparameters[#1]}%
+ {\doifelsenothing{#1}%
+ {\letdummyparameter\c!specification\currentbtxspecification}%
+ {\setdummyparameter\c!specification{#1}}}%
+ \ctxcommand{showbtxfields{
+ rotation = "\dummyparameter\c!rotation",
+ specification = "\dummyparameter\c!specification"
+ }}%
+ \endgroup}
+
+\def\publ_show_tables[#1]%
+ {\begingroup
+ \ctxcommand{showbtxtables{}}%
+ \endgroup}
+
+\def\publ_show_hashed_authors[#1]%
+ {\ctxcommand{showbtxhashedauthors{}}}
+
+\protect
+
+\continueifinputfile{publ-tra.mkiv}
+
+\starttext
+
+ \showbtxfields[rotation=85] \page
+ \showbtxfields[rotation=90] \page
+
+ \showbtxtables \page
+
+\stoptext
diff --git a/tex/context/base/publ-usr.lua b/tex/context/base/publ-usr.lua
new file mode 100644
index 000000000..901f11629
--- /dev/null
+++ b/tex/context/base/publ-usr.lua
@@ -0,0 +1,128 @@
+if not modules then modules = { } end modules ['publ-usr'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local P, Cs, R, Cc, C, Carg = lpeg.P, lpeg.Cs, lpeg.R, lpeg.Cc, lpeg.C, lpeg.Carg
+local lpegmatch = lpeg.match
+local settings_to_hash = utilities.parsers.settings_to_hash
+
+local publications = publications
+local datasets = publications.datasets
+
+local report = logs.reporter("publications")
+local trace = false trackers.register("publications",function(v) trace = v end)
+
+-- local str = [[
+-- \startpublication[k=Berdnikov:TB21-2-129,t=article,a={{Berdnikov},{}},y=2000,n=2257,s=BHHJ00]
+-- \artauthor[]{Alexander}[A.]{}{Berdnikov}
+-- \artauthor[]{Hans}[H.]{}{Hagen}
+-- \artauthor[]{Taco}[T.]{}{Hoekwater}
+-- \artauthor[]{Bogus{\l}aw}[B.]{}{Jackowski}
+-- \pubyear{2000}
+-- \arttitle{{Even more MetaFun with \MP: A request for permission}}
+-- \journal{TUGboat}
+-- \issn{0896-3207}
+-- \volume{21}
+-- \issue{2}
+-- \pages{129--130}
+-- \month{6}
+-- \stoppublication
+-- ]]
+
+local lists = {
+ author = true,
+ editor = true,
+ -- artauthor = true,
+ -- arteditor = true,
+}
+
+local function registervalue(target,key,value)
+ target[key] = value
+end
+
+-- Instead of being generic we just hardcode the old stuff:
+
+local function registerauthor(target,key,juniors,firstnames,initials,vons,surnames)
+ local value = target[key]
+ target[key]= ((value and value .. " and {") or "{") ..
+ vons .. "},{" ..
+ surnames .. "},{" ..
+ juniors .. "},{" ..
+ firstnames .. "},{" ..
+ initials .. "}"
+end
+
+local leftbrace = P("{")
+local rightbrace = P("}")
+local leftbracket = P("[")
+local rightbracket = P("]")
+local backslash = P("\\")
+local letter = R("az","AZ")
+
+local skipspaces = lpeg.patterns.whitespace^0
+local key = Cs(letter^1)
+local value = leftbrace * Cs(lpeg.patterns.balanced) * rightbrace
+local optional = leftbracket * Cs((1-rightbracket)^0) * rightbracket
+
+local authorkey = (P("artauthor") + P("author")) / "author"
+ + (P("arteditor") + P("editor")) / "editor"
+local authorvalue = (optional + Cc("{}")) * skipspaces -- [juniors]
+ * (value + Cc("{}")) * skipspaces -- {firstnames}
+ * (optional + Cc("{}")) * skipspaces -- [initials]
+ * (value + Cc("{}")) * skipspaces -- {vons}
+ * (value + Cc("{}")) * skipspaces -- {surnames}
+
+local keyvalue = Carg(1) * authorkey * skipspaces * authorvalue / registerauthor
+ + Carg(1) * key * skipspaces * value / registervalue
+
+local pattern = (backslash * keyvalue + P(1))^0
+
+local function addtexentry(dataset,settings,content)
+ local current = datasets[dataset]
+ local settings = settings_to_hash(settings)
+ local data = {
+ tag = settings.tag or settings.k or "no tag",
+ category = settings.category or settings.t or "article",
+ }
+ lpegmatch(pattern,content,1,data) -- can set tag too
+ local tag = data.tag
+ local index = publications.getindex(dataset,current.luadata,tag)
+ current.ordered[index] = data
+ current.luadata[tag] = data
+ current.userdata[tag] = data
+ current.details[tag] = nil
+ return data
+end
+
+local pattern = ( Carg(1)
+ * P("\\startpublication")
+ * skipspaces
+ * optional
+ * C((1 - P("\\stoppublication"))^1)
+ * P("\\stoppublication") / addtexentry
+ + P("%") * (1-lpeg.patterns.newline)^0
+ + P(1)
+)^0
+
+function publications.loaders.bbl(dataset,filename)
+ local dataset, fullname = publications.resolvedname(dataset,filename)
+ if not fullname then
+ return
+ end
+ local data = io.loaddata(filename) or ""
+ if data == "" then
+ report("empty file %a, nothing loaded",fullname)
+ return
+ end
+ if trace then
+ report("loading file %a",fullname)
+ end
+ lpegmatch(pattern,data,1,dataset)
+end
+
+publications.addtexentry = addtexentry
+commands.addbtxentry = addtexentry
diff --git a/tex/context/base/publ-usr.mkiv b/tex/context/base/publ-usr.mkiv
new file mode 100644
index 000000000..cb078f424
--- /dev/null
+++ b/tex/context/base/publ-usr.mkiv
@@ -0,0 +1,2 @@
+% todo
+
diff --git a/tex/context/base/publ-xml.mkiv b/tex/context/base/publ-xml.mkiv
new file mode 100644
index 000000000..96375b9cc
--- /dev/null
+++ b/tex/context/base/publ-xml.mkiv
@@ -0,0 +1,114 @@
+%D \module
+%D [ file=publ-xml,
+%D version=2013.12.24,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=XML,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Publication Support / XML}
+
+\unprotect
+
+\unexpanded\def\convertbtxdatasettoxml
+ {\dosingleempty\publ_convert_to_xml}
+
+\def\publ_convert_to_xml[#1]%
+ {\clf_convertbtxdatasettoxml{\iffirstargument#1\else\s!default\fi}} % or current when not empty
+
+% \startxmlsetups btx:initialize
+% \xmlregistereddocumentsetups{#1}{}
+% \xmlsetsetup{#1}{bibtex|entry|field}{btx:*}
+% \xmlmain{#1}
+% \stopxmlsetups
+
+\startxmlsetups btx:initialize
+ \xmlsetsetup{#1}{bibtex|entry|field}{btx:*}
+ \xmlmain{#1}
+\stopxmlsetups
+
+% \startxmlsetups btx:entry
+% \xmlflush{#1}
+% \stopxmlsetups
+
+\startxmlsetups btx:field
+ \xmlflushcontext{#1}
+\stopxmlsetups
+
+\protect \endinput
+
+% \startxmlsetups bibtex:entry:getkeys
+% \xmladdsortentry{bibtex}{#1}{\xmlfilter{#1}{/field[@name='author']/text()}}
+% \xmladdsortentry{bibtex}{#1}{\xmlfilter{#1}{/field[@name='year' ]/text()}}
+% \xmladdsortentry{bibtex}{#1}{\xmlatt{#1}{tag}}
+% \stopxmlsetups
+
+% \startbuffer
+% \startxmlsetups xml:bibtex:sorter
+% \xmlresetsorter{bibtex}
+% % \xmlfilter{#1}{entry/command(bibtex:entry:getkeys)}
+% \xmlfilter{#1}{
+% bibtex
+% /entry[@category='article']
+% /field[@name='author' and find(text(),'Knuth')]
+% /../command(bibtex:entry:getkeys)}
+% \xmlsortentries{bibtex}
+% \xmlflushsorter{bibtex}{bibtex:entry:flush}
+% \stopxmlsetups
+% \stopbuffer
+
+% \bgroup
+% \setups[bibtex-commands]
+% \getbuffer
+% \egroup
+
+% \startxmlsetups bibtex:entry:flush
+% \xmlfilter{#1}{/field[@name='author']/context()} / %
+% \xmlfilter{#1}{/field[@name='year' ]/context()} / %
+% \xmlatt{#1}{tag}\par
+% \stopxmlsetups
+
+% \startpacked
+% \getbuffer
+% \stoppacked
+
+
+% \unexpanded\def\btx_xml_list_handle_entry
+% {\begingroup
+% \ignorespaces
+% \xmlfilter{btx:\currentbtxrendering}{/bibtex/entry[@tag='\currentbtxtag']/command(btx:format)}%
+% \removeunwantedspaces
+% \endgroup}
+
+% \startxmlsetups btx:format
+% \btxlistparameter\c!before\relax % prevents lookahead
+% \edef\currentbibxmlnode {#1}
+% \edef\currentbibxmltag {\xmlatt{#1}{tag}}
+% \edef\currentbtxcategory{\xmlatt{#1}{category}}
+% \ignorespaces
+% \xmlcommand{#1}{.}{btx:\currentbtxformat:\currentbibxmlcategory}
+% \removeunwantedspaces
+% \btxlistparameter\c!after\relax % prevents lookahead
+% \stopxmlsetups
+
+% \startxmlsetups btx:list
+% \xmlfilter{#1}{/bibtex/entry/command(bibtex:format)}
+% \stopxmlsetups
+
+% \startxmlsetups btx:btx
+% \xmlfilter{#1}{/entry/command(btx:format)}
+% \stopxmlsetups
+
+% \unexpanded\def\btx_xml_doifelse#1{\xmldoifelse\currentbibxmlnode{/field[@name='#1']}}
+% \unexpanded\def\btx_xml_doif #1{\xmldoif \currentbibxmlnode{/field[@name='#1']}}
+% \unexpanded\def\btx_xml_doifnot #1{\xmldoifnot \currentbibxmlnode{/field[@name='#1']}}
+% \def\btx_xml_flush #1{\xmlcontext \currentbibxmlnode{/field[@name='#1']}}
+% \def\btx_xml_setup {\xmlsetup \currentbibxmlnode} % {#1}
+% \unexpanded\def\btx_xml_todo #1{[#1]}
+
+% \xmlfilter{#1}{/field[@name='\currentbtxfield']/btxconcat('\currentbtxfield')}
diff --git a/tex/context/base/regi-ini.lua b/tex/context/base/regi-ini.lua
index d5d278b16..37a88fd5f 100644
--- a/tex/context/base/regi-ini.lua
+++ b/tex/context/base/regi-ini.lua
@@ -15,7 +15,7 @@ runtime.</p>
local commands, context = commands, context
local utfchar = utf.char
-local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+local P, Cs, Cc, lpegmatch = lpeg.P, lpeg.Cs, lpeg.Cc, lpeg.match
local char, gsub, format, gmatch, byte, match = string.char, string.gsub, string.format, string.gmatch, string.byte, string.match
local next = next
local insert, remove, fastcopy = table.insert, table.remove, table.fastcopy
@@ -27,6 +27,9 @@ local sequencers = utilities.sequencers
local textlineactions = resolvers.openers.helpers.textlineactions
local setmetatableindex = table.setmetatableindex
+local implement = interfaces.implement
+local setmacro = interfaces.setmacro
+
--[[ldx--
<p>We will hook regime handling code into the input methods.</p>
--ldx]]--
@@ -99,6 +102,8 @@ local synonyms = { -- backward compatibility list
["windows"] = "cp1252",
+ ["pdf"] = "pdfdoc",
+
}
local currentregime = "utf"
@@ -132,7 +137,7 @@ end
setmetatableindex(mapping, loadregime)
setmetatableindex(backmapping,loadreverse)
-local function translate(line,regime)
+local function fromregime(regime,line)
if line and #line > 0 then
local map = mapping[regime and synonyms[regime] or regime or currentregime]
if map then
@@ -178,12 +183,15 @@ local function toregime(vector,str,default) -- toregime('8859-1',"abcde Ä","?")
local r = c[d]
if not r then
local t = fastcopy(backmapping[vector])
- setmetatableindex(t, function(t,k)
- local v = d
- t[k] = v
- return v
- end)
- r = utf.remapper(t)
+ -- r = utf.remapper(t) -- not good for defaults here
+ local pattern = Cs((lpeg.utfchartabletopattern(t)/t + lpeg.patterns.utf8character/d + P(1)/d)^0)
+ r = function(str)
+ if not str or str == "" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+ end
c[d] = r
end
return r(str)
@@ -192,6 +200,7 @@ end
local function disable()
currentregime = "utf"
sequencers.disableaction(textlineactions,"regimes.process")
+ return currentregime
end
local function enable(regime)
@@ -202,12 +211,14 @@ local function enable(regime)
currentregime = regime
sequencers.enableaction(textlineactions,"regimes.process")
end
+ return currentregime
end
-regimes.toregime = toregime
-regimes.translate = translate
-regimes.enable = enable
-regimes.disable = disable
+regimes.toregime = toregime
+regimes.fromregime = fromregime
+regimes.translate = function(str,regime) return fromregime(regime,str) end
+regimes.enable = enable
+regimes.disable = disable
-- The following function can be used when we want to make sure that
-- utf gets passed unharmed. This is needed for modules.
@@ -216,7 +227,7 @@ local level = 0
function regimes.process(str,filename,currentline,noflines,coding)
if level == 0 and coding ~= "utf-8" then
- str = translate(str,currentregime)
+ str = fromregime(currentregime,str)
if trace_translating then
report_translating("utf: %s",str)
end
@@ -243,40 +254,62 @@ end
regimes.push = push
regimes.pop = pop
-sequencers.prependaction(textlineactions,"system","regimes.process")
-sequencers.disableaction(textlineactions,"regimes.process")
+if sequencers then
+
+ sequencers.prependaction(textlineactions,"system","regimes.process")
+ sequencers.disableaction(textlineactions,"regimes.process")
+
+end
-- interface:
-commands.enableregime = enable
-commands.disableregime = disable
+implement {
+ name = "enableregime",
+ arguments = "string",
+ actions = function(regime) setmacro("currentregime",enable(regime)) end
+}
-commands.pushregime = push
-commands.popregime = pop
+implement {
+ name = "disableregime",
+ actions = function() setmacro("currentregime",disable()) end
+}
-function commands.currentregime()
- context(currentregime)
-end
+implement {
+ name = "pushregime",
+ actions = push
+}
+
+implement {
+ name = "popregime",
+ actions = pop
+}
local stack = { }
-function commands.startregime(regime)
- insert(stack,currentregime)
- if trace_translating then
- report_translating("start using %a",regime)
+implement {
+ name = "startregime",
+ arguments = "string",
+ actions = function(regime)
+ insert(stack,currentregime)
+ if trace_translating then
+ report_translating("start using %a",regime)
+ end
+ setmacro("currentregime",enable(regime))
end
- enable(regime)
-end
+}
-function commands.stopregime()
- if #stack > 0 then
- local regime = remove(stack)
- if trace_translating then
- report_translating("stop using %a",regime)
+implement {
+ name = "stopregime",
+ actions = function()
+ if #stack > 0 then
+ local regime = remove(stack)
+ if trace_translating then
+ report_translating("stop using %a",regime)
+ end
+ setmacro("currentregime",enable(regime))
end
- enable(regime)
end
-end
+}
-- Next we provide some hacks. Unfortunately we run into crappy encoded
-- (read : mixed) encoded xml files that have these ë ä ö ü sequences
@@ -311,48 +344,82 @@ local patterns = { }
--
-- twice as fast and much less lpeg bytecode
+-- function regimes.cleanup(regime,str)
+-- if not str or str == "" then
+-- return str
+-- end
+-- local p = patterns[regime]
+-- if p == nil then
+-- regime = regime and synonyms[regime] or regime or currentregime
+-- local vector = regime ~= "utf" and regime ~= "utf-8" and mapping[regime]
+-- if vector then
+-- local utfchars = { }
+-- local firsts = { }
+-- for k, uchar in next, vector do
+-- local stream = { }
+-- local split = totable(uchar)
+-- local nofsplits = #split
+-- if nofsplits > 1 then
+-- local first
+-- for i=1,nofsplits do
+-- local u = vector[split[i]]
+-- if not first then
+-- first = firsts[u]
+-- if not first then
+-- first = { }
+-- firsts[u] = first
+-- end
+-- end
+-- stream[i] = u
+-- end
+-- local nofstream = #stream
+-- if nofstream > 1 then
+-- first[#first+1] = concat(stream,2,nofstream)
+-- utfchars[concat(stream)] = uchar
+-- end
+-- end
+-- end
+-- p = P(false)
+-- for k, v in next, firsts do
+-- local q = P(false)
+-- for i=1,#v do
+-- q = q + P(v[i])
+-- end
+-- p = p + P(k) * q
+-- end
+-- p = Cs(((p+1)/utfchars)^1)
+-- -- lpeg.print(p) -- size: 1042
+-- else
+-- p = false
+-- end
+-- patterns[regime] = p
+-- end
+-- return p and lpegmatch(p,str) or str
+-- end
+--
+-- 5 times faster:
+
function regimes.cleanup(regime,str)
+ if not str or str == "" then
+ return str
+ end
local p = patterns[regime]
if p == nil then
regime = regime and synonyms[regime] or regime or currentregime
- local vector = regime ~= "utf" and mapping[regime]
+ local vector = regime ~= "utf" and regime ~= "utf-8" and mapping[regime]
if vector then
- local utfchars = { }
- local firsts = { }
- for k, uchar in next, vector do
- local stream = { }
- local split = totable(uchar)
- local nofsplits = #split
- if nofsplits > 1 then
- local first
- for i=1,nofsplits do
- local u = vector[split[i]]
- if not first then
- first = firsts[u]
- if not first then
- first = { }
- firsts[u] = first
- end
- end
- stream[i] = u
- end
- local nofstream = #stream
- if nofstream > 1 then
- first[#first+1] = concat(stream,2,nofstream)
- utfchars[concat(stream)] = uchar
- end
+ local mapping = { }
+ for k, v in next, vector do
+ local split = totable(v)
+ for i=1,#split do
+ split[i] = utfchar(byte(split[i]))
end
- end
- p = P(false)
- for k, v in next, firsts do
- local q = P(false)
- for i=1,#v do
- q = q + P(v[i])
+ split = concat(split)
+ if v ~= split then
+ mapping[split] = v
end
- p = p + P(k) * q
end
- p = Cs(((p+1)/utfchars)^1)
- -- lpeg.print(p) -- size: 1042
+ p = Cs((lpeg.utfchartabletopattern(mapping)/mapping+P(1))^0)
else
p = false
end
@@ -361,28 +428,9 @@ function regimes.cleanup(regime,str)
return p and lpegmatch(p,str) or str
end
--- local map = require("regi-cp1252")
-- local old = [[test ë ä ö ü crap]]
--- local new = correctencoding(map,old)
---
--- print(old,new)
-
--- obsolete:
---
--- function regimes.setsynonym(synonym,target)
--- synonyms[synonym] = target
--- end
---
--- function regimes.truename(regime)
--- return regime and synonyms[regime] or regime or currentregime
--- end
---
--- commands.setregimesynonym = regimes.setsynonym
---
--- function commands.trueregimename(regime)
--- context(regimes.truename(regime))
--- end
---
--- function regimes.load(regime)
--- return mapping[synonyms[regime] or regime]
--- end
+-- local new = regimes.cleanup("cp1252",old)
+-- report_translating("%s -> %s",old,new)
+-- local old = "Pozn" .. char(0xE1) .. "mky"
+-- local new = fromregime("cp1250",old)
+-- report_translating("%s -> %s",old,new)
diff --git a/tex/context/base/regi-ini.mkiv b/tex/context/base/regi-ini.mkiv
index 651e2f13c..6190393d8 100644
--- a/tex/context/base/regi-ini.mkiv
+++ b/tex/context/base/regi-ini.mkiv
@@ -16,34 +16,18 @@
\unprotect
%D \macros
-%D {enableregime,disableregime,currentregime}
+%D {enableregime,disableregime,
+%D startregime,stopregime,
+%D currentregime}
%D
%D Beware, the enable and disable commands are global switches, so
%D best use the start|/|stop commands.
-\unexpanded\def\enableregime[#1]{\ctxcommand{enableregime("#1")}}
-\unexpanded\def\disableregime {\ctxcommand{disableregime()}}
-\unexpanded\def\startregime [#1]{\ctxcommand{startregime("#1")}}
-\unexpanded\def\stopregime {\ctxcommand{stopregime()}}
- \def\currentregime {\ctxcommand{currentregime()}}
+\let\currentregime\empty
-% D \macros
-% D {defineregimesynonym,trueregimename}
-%
-% \unexpanded\def\defineregimesynonym % more or less obsolete
-% {\dodoubleargument\dodefineregimesynonym}
-%
-% \def\dodefineregimesynonym[#1][#2]%
-% {\ctxcommand{setregimesynonym("#1","#2")}}
-%
-% \def\trueregimename#1%
-% {\ctxcommand{trueregimename("#1")}}
-
-% D \macros
-% D {useregime}
-% D
-% D This one is sort of obsolete but we keep them around.
-
-\unexpanded\def\useregime[#1]{}
+\unexpanded\def\enableregime[#1]{\clf_enableregime{#1}}
+\unexpanded\def\disableregime {\clf_disableregime}
+\unexpanded\def\startregime [#1]{\clf_startregime{#1}}
+\unexpanded\def\stopregime {\clf_stopregime}
\protect \endinput
diff --git a/tex/context/base/regi-pdfdoc.lua b/tex/context/base/regi-pdfdoc.lua
new file mode 100644
index 000000000..363d3ae0d
--- /dev/null
+++ b/tex/context/base/regi-pdfdoc.lua
@@ -0,0 +1,26 @@
+if not modules then modules = { } end modules ['regi-pdfdoc'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return { [0] =
+ 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, 0x0010,
+ 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x02D8, 0x02C7, 0x02C6, 0x02D9, 0x02DD, 0x02DB, 0x02DA, 0x02DC, 0x001F,
+ 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
+ 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
+ 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
+ 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
+ 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
+ 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
+ 0x2022, 0x2020, 0x2021, 0x2026, 0x2014, 0x2013, 0x0192, 0x2044, 0x2039, 0x203A, 0x2212, 0x2030, 0x201E, 0x201C, 0x201D, 0x2018,
+ 0x2019, 0x201A, 0x2122, 0xFB01, 0xFB02, 0x0141, 0x0152, 0x0160, 0x0178, 0x017D, 0x0131, 0x0142, 0x0153, 0x0161, 0x017E, 0x009F,
+ 0x20AC, 0x00A1, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00AA, 0x00AB, 0x00AC, 0xFFFD, 0x00AE, 0x00AF,
+ 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00BA, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00BF,
+ 0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF,
+ 0x00D0, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x00D7, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x00DD, 0x00DE, 0x00DF,
+ 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF,
+ 0x00F0, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x00F7, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x00FD, 0x00FE, 0x00FF,
+}
diff --git a/tex/context/base/s-abr-01.tex b/tex/context/base/s-abr-01.tex
index 7d8064b29..00a1a5c1e 100644
--- a/tex/context/base/s-abr-01.tex
+++ b/tex/context/base/s-abr-01.tex
@@ -34,6 +34,7 @@
%logo [FGA] {fga}
%logo [FGBBS] {fgbbs}
\logo [ACROBAT] {Acro\-bat}
+\logo [APA] {apa}
\logo [AFM] {afm}
\logo [API] {api}
\logo [ALEPH] {Aleph} % {\mathematics{\aleph}}
@@ -45,8 +46,10 @@
\logo [ARABTEX] {Arab\TeX}
\logo [ASCII] {ascii}
\logo [ASCIITEX] {ascii\TeX}
+\logo [ASCIIMATH] {AsciiMath}
\logo [BACHOTEX] {Bacho\TeX}
\logo [BIBTEX] {bib\TeX}
+\logo [MLBIBTEX] {MLbib\TeX}
\logo [BLUESKY] {BlueSky}
\logo [BMP] {bmp}
\logo [BSD] {bsd}
@@ -64,6 +67,8 @@
\logo [CMYK] {cmyk}
\logo [CODHOST] {CodHost}
\logo [CONTEXT] {\ConTeXt}
+\logo [CONTEXTWIKI] {\ConTeXt\ Wiki}
+\logo [CONTEXTGROUP] {\ConTeXt\ Group}
\logo [CSS] {css}
\logo [CTAN] {ctan}
\logo [CTXTOOLS] {ctxtools}
@@ -92,6 +97,7 @@
\logo [EMTEX] {em\TeX}
\logo [ENCODING] {enc}
\logo [ENCTEX] {enc\TeX}
+\logo [EPUB] {ePub}
\logo [EPS] {eps}
\logo [ETEX] {\eTeX}
\logo [EUROBACHOTEX] {EuroBacho\TeX}
@@ -233,11 +239,13 @@
\logo [SSD] {ssd}
\logo [SVG] {svg}
\logo [STIX] {Stix}
+\logo [SUMATRAPDF] {SumatraPDF}
\logo [SWIG] {swig}
\logo [SWIGLIB] {SwigLib}
\logo [TABLE] {\TaBlE}
\logo [TCPIP] {tcp/ip}
\logo [TDS] {tds} % no sc te
+\logo [TEI] {tei} % no sc te
\logo [TETEX] {te\TeX} % no sc te
\logo [TEX] {\TeX}
\logo [TEXADRES] {\TeX adress}
@@ -259,6 +267,7 @@
\logo [TEXTOOLS] {\TeX tools}
\logo [TEXUTIL] {\TeX util}
\logo [TEXWORK] {\TeX work}
+\logo [TEXWORKS] {\TeX works}
\logo [TEXXET] {\TeX\XeT} \def\XeT{XeT}
\logo [TFM] {tfm}
\logo [TIF] {tif}
diff --git a/tex/context/base/s-art-01.mkiv b/tex/context/base/s-art-01.mkiv
index e2584a357..601ee1adc 100644
--- a/tex/context/base/s-art-01.mkiv
+++ b/tex/context/base/s-art-01.mkiv
@@ -2,7 +2,7 @@
\startmodule[art-01]
-\definemeasure [article:margin] [\paperheight/15]
+\definemeasure [article:margin] [\paperheight/20] % was 15, see xtables-mkiv for results
\definemeasure [overview:margin] [\paperheight/30]
\definelayout
@@ -10,7 +10,7 @@
[\c!topspace=\measure{article:margin},
\c!bottomspace=\measure{article:margin},
\c!backspace=\measure{article:margin},
- \c!header=\measure{overview:margin},
+ \c!header=\measure{article:margin},
\c!footer=0pt,
\c!width=\v!middle,
\c!height=\v!middle]
diff --git a/tex/context/base/s-figures-names.mkiv b/tex/context/base/s-figures-names.mkiv
new file mode 100644
index 000000000..a2782efc9
--- /dev/null
+++ b/tex/context/base/s-figures-names.mkiv
@@ -0,0 +1,99 @@
+%D \module
+%D [ file=s-figures-names.mkiv,
+%D version=2017.07.17,
+%D title=\CONTEXT\ Style File,
+%D subtitle=Show Figure Names,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D Normally this module will be run someplace at the end of a document in some kind of tracing
+%D mode. We could hook it into \type {\stoptext} but better leave it up to the user. Loading
+%D this module upfront will not show anything useful. The main reason for making this module
+%D was that we wanted to report weird figurenames: spaces, multiple hyphens in a row, mixed
+%D hyphens and underscores, inconsistently mixed upper and lowercase, etc.
+%D
+%D If you only want info in the logfile, you can use:
+%D
+%D \starttyping
+%D \enabletrackers[graphics.lognames]
+%D \stoptyping
+%D
+%D This directive is persistent and can be issued any time before the end of the run.
+
+\startmodule[figures-names]
+
+\startluacode
+
+local context = context
+local ctx_NC = context.NC
+local ctx_NR = context.NR
+local ctx_red = context.red
+local ctx_starttabulate = context.starttabulate
+local ctx_stoptabulate = context.stoptabulate
+local ctx_hyphenatedname = context.hyphenatedfilename
+
+trackers.enable("graphics.lognames")
+
+context.start()
+
+ context.switchtobodyfont { "tt,small" }
+
+ local template = { "|Bl|p|" }
+
+ for _, data in table.sortedhash(figures.found) do
+ ctx_starttabulate(template)
+ local badname = data.badname
+ local found = data.found
+ ctx_NC()
+ context("asked name")
+ ctx_NC()
+ if badname then
+ ctx_red()
+ end
+ context(data.askedname)
+ ctx_NC() ctx_NR()
+ if found then
+ ctx_NC()
+ context("format")
+ ctx_NC()
+ context(data.format)
+ ctx_NC() ctx_NR()
+ ctx_NC()
+ context("found file")
+ ctx_NC()
+ ctx_hyphenatedname(data.foundname)
+ -- context(data.foundname)
+ ctx_NC() ctx_NR()
+ ctx_NC()
+ context("used file")
+ ctx_NC()
+ ctx_hyphenatedname(data.fullname)
+ -- context(data.fullname)
+ ctx_NC() ctx_NR()
+ if badname then
+ ctx_NC()
+ context("comment")
+ ctx_NC()
+ context("bad name")
+ ctx_NC() ctx_NR()
+ end
+ else
+ ctx_NC()
+ context("comment")
+ ctx_NC()
+ context(data.comment or "not found")
+ ctx_NC() ctx_NR()
+ end
+ ctx_stoptabulate()
+ end
+
+context.stop()
+
+\stopluacode
+
+\stopmodule
diff --git a/tex/context/base/s-fnt-10.mkiv b/tex/context/base/s-fnt-10.mkiv
index 9b6211c2b..2fe82e079 100644
--- a/tex/context/base/s-fnt-10.mkiv
+++ b/tex/context/base/s-fnt-10.mkiv
@@ -100,7 +100,7 @@ function fonts.tracers.show_glyphs()
end
\stopluacode
-\def\ShowCompleteFont#1#2#3%
+\unexpanded\def\ShowCompleteFont#1#2#3%
{\bgroup
\page
\font\TestFont=#1 at #2
@@ -118,7 +118,7 @@ end
\page
\egroup}
-\def\ShowAllGlyphs#1#2#3%
+\unexpanded\def\ShowAllGlyphs#1#2#3%
{\bgroup
\page
\def\showglyph##1{\dontleavehmode\strut\char##1\relax\par}
diff --git a/tex/context/base/s-fnt-21.mkiv b/tex/context/base/s-fnt-21.mkiv
index 588c98016..10d5a4623 100644
--- a/tex/context/base/s-fnt-21.mkiv
+++ b/tex/context/base/s-fnt-21.mkiv
@@ -47,7 +47,7 @@
\endinput
-% \usemodule[fnt-20]
+% \usemodule[fnt-21]
%
% \starttext
%
diff --git a/tex/context/base/s-fnt-24.mkiv b/tex/context/base/s-fnt-24.mkiv
index 2c0022e5c..2e6b9a591 100644
--- a/tex/context/base/s-fnt-24.mkiv
+++ b/tex/context/base/s-fnt-24.mkiv
@@ -56,7 +56,7 @@ function fonts.analyzers.cjktest(first,second)
end
\stopluacode
-\def\ShowCombinationsKorean
+\unexpanded\def\ShowCombinationsKorean
{\dodoubleempty\doShowCombinationsKorean}
\def\doShowCombinationsKorean[#1][#2]%
@@ -68,7 +68,7 @@ end
\disabletrackers[cjk.analyzing]
\stoppacked}
-\def\ShowCombinationsChinese
+\unexpanded\def\ShowCombinationsChinese
{\dodoubleempty\doShowCombinationsChinese}
\def\doShowCombinationsChinese[#1][#2]%
diff --git a/tex/context/base/s-fonts-coverage.lua b/tex/context/base/s-fonts-coverage.lua
index db47e57c4..dd772d5f0 100644
--- a/tex/context/base/s-fonts-coverage.lua
+++ b/tex/context/base/s-fonts-coverage.lua
@@ -29,6 +29,7 @@ function moduledata.fonts.coverage.showcomparison(specification)
local present = { }
local names = { }
local files = { }
+ local chars = { }
if not pattern then
-- skip
@@ -56,6 +57,7 @@ function moduledata.fonts.coverage.showcomparison(specification)
end
names[#names+1] = fontname
files[#files+1] = fontfile
+ chars[#names] = fontdata.characters
end
end
@@ -91,7 +93,11 @@ function moduledata.fonts.coverage.showcomparison(specification)
NR()
HL()
for k, v in table.sortedpairs(present) do
- if k > 0 then
+ if k <= 0 then
+ -- ignore
+ elseif k >= 0x110000 then
+ logs.report("warning","weird character %U",k)
+ else
local description = chardata[k].description
if not pattern or (pattern == k) or (description and lpegmatch(pattern,description)) then
NC()
@@ -99,7 +105,11 @@ function moduledata.fonts.coverage.showcomparison(specification)
NC()
for i=1,#names do
getvalue(names[i])
- char(k)
+ if chars[i][k] then
+ char(k)
+ else
+ -- missing
+ end
NC()
end
context(description)
diff --git a/tex/context/base/s-fonts-ligatures.mkiv b/tex/context/base/s-fonts-ligatures.mkiv
new file mode 100644
index 000000000..e6ff2461e
--- /dev/null
+++ b/tex/context/base/s-fonts-ligatures.mkiv
@@ -0,0 +1,292 @@
+%D \module
+%D [ file=s-fonts-ligatures,
+%D version=2014.12.14,
+%D title=\CONTEXT\ Style File,
+%D subtitle=Show Fonts Ligatures,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% begin info
+%
+% title : show some ligature building in fonts
+%
+% comment : we trace some ligatures that have rather different implementations in fontss
+% status : experimental, used for luatex testing
+%
+% end info
+
+\definefontfeature
+ [otfligaturetest]
+ [analyze=off,
+ ccmp=yes, % brill uses that .. not really ligatures !
+ %clig=yes,
+ script=latn,
+ language=dflt]
+
+\hyphenation{xf-fi-a}
+\hyphenation{xff-i-b}
+\hyphenation{xffi-c}
+\hyphenation{xffid}
+
+\registerhyphenationexception[xf-fi-a]
+\registerhyphenationexception[xff-i-b]
+\registerhyphenationexception[xffi-c]
+\registerhyphenationexception[xffid]
+
+\starttexdefinition showotfligaturescaption #1
+ \bTD [align={flushleft,lohi},nx=3]
+ \nohyphens
+ \ttbf
+ #1
+ \eTD
+\stoptexdefinition
+
+\starttexdefinition showotfligatureslegend #1
+ \bTD [width=6em,align={flushleft,lohi}]
+ \nohyphens \ttxx original
+ \eTD
+ \bTD [width=6em,align={flushleft,lohi}]
+ \nohyphens \ttxx expanded
+ \eTD
+ \bTD [width=6em,align={flushleft,lohi}]
+ \nohyphens \ttxx traditional
+ \eTD
+\stoptexdefinition
+
+\starttexdefinition showotfligaturesline #1#2
+ \bTD[toffset=.5ex,frame=off]
+ \starthyphenation[#1]
+ \LigatureFont
+ \showfontkerns
+ \showdiscretionaries
+ \begstrut#2\endstrut
+ \par
+ \stophyphenation
+ \eTD
+\stoptexdefinition
+
+\def\showotfligaturescells{3}
+%def\showotfligaturesnx {12}
+%def\showotfligatureswidth{18em}
+\def\showotfligaturesnx {\the\numexpr 4*\showotfligaturescells}
+\def\showotfligatureswidth{\the\dimexpr6em*\showotfligaturescells}
+
+\starttexdefinition showotfligaturesbanner #1
+ \bTR[frame=off]
+ \bTD [nx=\showotfligaturesnx,width=\showotfligatureswidth,align={middle,lohi},height=4ex]
+ \tttf #1
+ \eTD
+ \eTR
+\stoptexdefinition
+
+\starttexdefinition showotfligaturescaptions #1
+ \bTR[height=3ex,align={middle,lohi},bottomframe=off]
+ \processcommalist[#1]\showotfligaturescaption
+ \eTR
+ \bTR[height=3ex,align={middle,lohi},topframe=off]
+ \processcommalist[#1]\showotfligatureslegend
+ \eTR
+\stoptexdefinition
+
+\starttexdefinition showotfligatureslineset #1
+ \showotfligaturesline{original} {#1}
+ \showotfligaturesline{expanded} {#1}
+ \showotfligaturesline{traditional}{#1}
+\stoptexdefinition
+
+
+\starttexdefinition showotfligaturesparagraphset #1
+ \showotfligatureslineset {
+ \hsize \zeropoint
+ \lefthyphenmin \plustwo
+ \righthyphenmin\plustwo
+ #1
+ }
+\stoptexdefinition
+
+\starttexdefinition showotfligaturesextremeset #1
+ \showotfligatureslineset {
+ \hsize \zeropoint
+ \lefthyphenmin \plusone
+ \righthyphenmin\plusone
+ #1
+ }
+\stoptexdefinition
+
+\starttexdefinition showotfligatureslines #1
+ \bTR[height=4ex,bottomframe=off]
+ \processcommalist[#1]\showotfligatureslineset
+ \eTR
+\stoptexdefinition
+
+\starttexdefinition showotfligaturesparagraphs #1
+ \bTR[topframe=off]
+ \processcommalist[#1]\showotfligaturesparagraphset
+ \eTR
+\stoptexdefinition
+
+\starttexdefinition showotfligaturesextremes #1
+ \bTR[topframe=off]
+ \processcommalist[#1]\showotfligaturesextremeset
+ \eTR
+\stoptexdefinition
+
+\starttexdefinition showotfligaturesdefaults
+ \bTR
+ \bTD [nx=\showotfligaturesnx,width=\showotfligatureswidth,align=middle,height=4ex,frame=off]
+ \start \LigatureFont fb \stop \quad
+ \start \LigatureFont ff \stop \quad
+ \start \LigatureFont fi \stop \quad
+ \start \LigatureFont fk \stop \quad
+ \start \LigatureFont fl \stop \quad
+ \start \LigatureFont ft \stop \quad
+ \start \LigatureFont ffb \stop \quad
+ \start \LigatureFont fff \stop \quad
+ \start \LigatureFont ffi \stop \quad
+ \start \LigatureFont ffl \stop \quad
+ \start \LigatureFont ffk \stop \quad
+ \start \LigatureFont fft \stop
+ \eTD
+ \eTR
+\stoptexdefinition
+
+\starttexdefinition showotfligaturesexample #1#2
+ \showotfligaturescaptions {#1}
+ \showotfligatureslines {#2}
+ \showotfligaturesparagraphs{#2}
+ \showotfligaturesextremes {#2}
+\stoptexdefinition
+
+% todo: n -> and split in lua
+
+\starttexdefinition showotfligaturesexamples
+ \showotfligaturesexample
+ {leafing,baffling,efficient,shifffahrt}
+ {leafing,baffling,efficient,shifffahrt}
+ \showotfligaturesexample
+ {offbeat,effect,ef-fective,ef\-fective}
+ {offbeat,effect,ef-fective,ef\-fective}
+ \showotfligaturesexample
+ {xf+fi+a,xff+i+b,xffi+c,xffid}
+ {xffia, xffib, xffic, xffid}
+\stoptexdefinition
+
+\starttexdefinition showotfligatures [#1]
+ \begingroup
+ \getdummyparameters[font=Regular,features=default,#1]
+ \definefont[LigatureFont][\dummyparameter{font}*\dummyparameter{features},otfligaturetest ht 2ex]
+ \bTABLE[leftframe=off,rightframe=off]
+ \showotfligaturesbanner{\dummyparameter{font} * \dummyparameter{features}}
+ \showotfligaturesdefaults
+ \showotfligaturesexamples
+ \eTABLE
+ \endgroup
+\stoptexdefinition
+
+\continueifinputfile{s-fonts-ligatures.mkiv}
+
+\starttext
+
+ \startTEXpage[offset=10pt] \showotfligatures[font=lmroman10-regular.otf, features=default] \stopTEXpage
+ \startTEXpage[offset=10pt] \showotfligatures[font=dejavu-serif.ttf, features=default] \stopTEXpage
+ \startTEXpage[offset=10pt] \showotfligatures[font=minionpro.otf, features=default] \stopTEXpage
+ \startTEXpage[offset=10pt] \showotfligatures[font=minionpro.otf, features=smallcaps] \stopTEXpage
+ \startTEXpage[offset=10pt] \showotfligatures[font=brill.otf, features=default] \stopTEXpage
+ \startTEXpage[offset=10pt] \showotfligatures[font=gentiumplus-r.ttf, features=default] \stopTEXpage
+ \startTEXpage[offset=10pt] \showotfligatures[font=cambria, features=default] \stopTEXpage
+
+\stoptext
+
+% \startluacode
+%
+% local f = fonts.hashes.identifiers[true]
+%
+% local sequences = f.resources.sequences
+% local descriptions = f.shared.rawdata.descriptions
+% local lookuptypes = f.resources.lookuptypes
+% local lookups = f.resources.lookups
+%
+% local ligatures = { "liga", "dlig", "rlig", "clig", "tlig", "ccmp" }
+% local found = { }
+%
+% for i=1,#sequences do
+% local sequence = sequences[i]
+% local features = sequence.features
+% for i=1,#ligatures do
+% local l = ligatures[i]
+% if features[l] then
+% local subtables = sequence.subtables
+% if subtables then
+% for i=1,#subtables do
+% local subtable = subtables[i]
+% local lookup = found[subtable]
+% if lookup then
+% lookup[l] = true
+% else
+% found[subtable] = { [l] = true }
+% end
+% end
+% end
+% end
+% end
+% end
+%
+% context.starttabulate { "|||T|T|T|" }
+%
+% local function flush(l,v,start,unicode,data,done)
+% local features = found[l]
+% if features then
+% local lookuptype = lookuptypes[l]
+% if lookuptype == "ligature" then
+% local t = { }
+% for i=1,#v do
+% t[i] = utf.char(v[i])
+% end
+% t = table.concat(t," ")
+% if not done[t] then
+% context.NC()
+% context(t)
+% context.NC()
+% context(utf.char(unicode))
+% context.NC()
+% context(" %t",table.sortedkeys(features))
+% context.NC()
+% local name = data.name
+% if name then
+% context(name)
+% end
+% context.NC()
+% context("%U",unicode)
+% context.NC()
+% context.NR()
+% done[t] = true
+% end
+% end
+% end
+% end
+%
+% for unicode, data in table.sortedhash(descriptions) do
+% local slookups = data.slookups
+% local mlookups = data.mlookups
+% local done = { }
+% if slookups then
+% for l, v in next, slookups do
+% flush(l,v,1,unicode,data,done)
+% end
+% end
+% if mlookups then
+% for i=1,#mlookups do
+% local v = mlookups[i]
+% flush(v[1],v,2,unicode,data,done)
+% end
+% end
+% end
+%
+% context.stoptabulate()
+%
+% \stopluacode
diff --git a/tex/context/base/s-fonts-shapes.lua b/tex/context/base/s-fonts-shapes.lua
index b387c11dd..bca860f3f 100644
--- a/tex/context/base/s-fonts-shapes.lua
+++ b/tex/context/base/s-fonts-shapes.lua
@@ -16,24 +16,32 @@ local NC, NR = context.NC, context.NR
local space, dontleavehmode, glyph, getvalue = context.space, context.dontleavehmode, context.glyph, context.getvalue
local formatters = string.formatters
-function moduledata.fonts.shapes.showlist(specification) -- todo: ranges
- specification = interfaces.checkedspecification(specification)
- local id, cs = fonts.definers.internal(specification,"<module:fonts:shapes:font>")
- local chrs = fontdata[id].characters
- function char(k)
- dontleavehmode()
- glyph(id,k)
- end
- local function special(v)
- local specials = v.specials
- if specials and #specials > 1 then
- context("%s:",specials[1])
+function char(id,k)
+ dontleavehmode()
+ glyph(id,k)
+end
+
+local function special(id,specials)
+ if specials and #specials > 1 then
+ context("%s:",specials[1])
+ if #specials > 5 then
+ space() char(id,specials[2])
+ space() char(id,specials[3])
+ space() context("...")
+ space() char(id,specials[#specials-1])
+ space() char(id,specials[#specials])
+ else
for i=2,#specials do
- space()
- char(specials[i])
+ space() char(id,specials[i])
end
end
end
+end
+
+function moduledata.fonts.shapes.showlist(specification) -- todo: ranges
+ specification = interfaces.checkedspecification(specification)
+ local id, cs = fonts.definers.internal(specification,"<module:fonts:shapes:font>")
+ local chrs = fontdata[id].characters
context.begingroup()
context.tt()
context.starttabulate { "|l|c|c|c|c|l|l|" }
@@ -50,11 +58,11 @@ function moduledata.fonts.shapes.showlist(specification) -- todo: ranges
for k, v in next, characters.data do
if chrs[k] then
NC() context("0x%05X",k)
- NC() char(k) -- getvalue(cs) context.char(k)
- NC() char(v.shcode)
- NC() char(v.lccode or k)
- NC() char(v.uccode or k)
- NC() special(v)
+ NC() char(id,k) -- getvalue(cs) context.char(k)
+ NC() char(id,v.shcode)
+ NC() char(id,v.lccode or k)
+ NC() char(id,v.uccode or k)
+ NC() special(id,v.specials)
NC() context.tx(v.description)
NC() NR()
end
@@ -67,20 +75,6 @@ function moduledata.fonts.shapes.showlist(specification) -- todo: ranges
specification = interfaces.checkedspecification(specification)
local id, cs = fonts.definers.internal(specification,"<module:fonts:shapes:font>")
local chrs = fontdata[id].characters
- function char(k)
- dontleavehmode()
- glyph(id,k)
- end
- local function special(v)
- local specials = v.specials
- if specials and #specials > 1 then
- context("%s:",specials[1])
- for i=2,#specials do
- space()
- char(specials[i])
- end
- end
- end
context.begingroup()
context.tt()
context.starttabulate { "|l|c|c|c|c|l|l|" }
@@ -97,11 +91,11 @@ function moduledata.fonts.shapes.showlist(specification) -- todo: ranges
for k, v in next, characters.data do
if chrs[k] then
NC() context("0x%05X",k)
- NC() char(k)
- NC() char(v.shcode)
- NC() char(v.lccode or k)
- NC() char(v.uccode or k)
- NC() special(v)
+ NC() char(id,k)
+ NC() char(id,v.shcode)
+ NC() char(id,v.lccode or k)
+ NC() char(id,v.uccode or k)
+ NC() special(id,v.specials)
NC() context.tx(v.description)
NC() NR()
end
@@ -137,7 +131,8 @@ local function showglyphshape(specification)
local top_accent, bot_accent = (d.top_accent or 0)*factor, (d.bot_accent or 0)*factor
local anchors, math = d.anchors, d.math
context.startMPcode()
- context("pickup pencircle scaled .25bp ;")
+ context("numeric lw ; lw := .125bp ;")
+ context("pickup pencircle scaled lw ;")
context('picture p ; p := image(draw textext.drt("\\getuvalue{%s}\\gray\\char%s");); draw p ;',cs,charnum)
context('draw (%s,%s)--(%s,%s)--(%s,%s)--(%s,%s)--cycle withcolor green ;',llx,lly,urx,lly,urx,ury,llx,ury)
context('draw (%s,%s)--(%s,%s) withcolor green ;',llx,0,urx,0)
@@ -153,11 +148,11 @@ local function showglyphshape(specification)
l[#l+1] = formatters["((%s,%s) shifted (%s,%s))"](xsign*k*factor,ysign*h*factor,dx,dy)
end
end
- context("draw ((%s,%s) shifted (%s,%s))--%s dashed (evenly scaled .25) withcolor .5white;", xsign*v[1].kern*factor,lly,dx,dy,l[1])
+ context("draw ((%s,%s) shifted (%s,%s))--%s dashed (evenly scaled 1/16) withcolor .5white;", xsign*v[1].kern*factor,lly,dx,dy,l[1])
context("draw laddered (%s) withcolor .5white ;",table.concat(l,".."))
- context("draw ((%s,%s) shifted (%s,%s))--%s dashed (evenly scaled .25) withcolor .5white;", xsign*v[#v].kern*factor,ury,dx,dy,l[#l])
+ context("draw ((%s,%s) shifted (%s,%s))--%s dashed (evenly scaled 1/16) withcolor .5white;", xsign*v[#v].kern*factor,ury,dx,dy,l[#l])
for k, v in ipairs(l) do
- context("draw %s withcolor blue withpen pencircle scaled 1bp;",v)
+ context("draw %s withcolor blue withpen pencircle scaled 2lw ;",v)
end
end
end
@@ -203,7 +198,7 @@ local function showglyphshape(specification)
end
local function show(x,y,txt)
local xx, yy = x*factor, y*factor
- context("draw (%s,%s) withcolor blue withpen pencircle scaled 1bp;",xx,yy)
+ context("draw (%s,%s) withcolor blue withpen pencircle scaled 2lw ;",xx,yy)
context('label.top("\\type{%s}",(%s,%s-2bp)) ;',txt,xx,yy)
context('label.bot("(%s,%s)",(%s,%s+2bp)) ;',x,y,xx,yy)
end
@@ -242,9 +237,9 @@ local function showglyphshape(specification)
end
end
if italic ~= 0 then
- context('draw (%s,%s-1bp)--(%s,%s-0.5bp) withcolor blue;',width,ury,width,ury)
- context('draw (%s,%s-1bp)--(%s,%s-0.5bp) withcolor blue;',width+italic,ury,width+italic,ury)
- context('draw (%s,%s-1bp)--(%s,%s-1bp) withcolor blue;',width,ury,width+italic,ury)
+ context('draw (%s,%s-1bp)--(%s,%s-0.5bp) withcolor blue ;',width,ury,width,ury)
+ context('draw (%s,%s-1bp)--(%s,%s-0.5bp) withcolor blue ;',width+italic,ury,width+italic,ury)
+ context('draw (%s,%s-1bp)--(%s,%s-1bp) withcolor blue ;',width,ury,width+italic,ury)
context('label.lft("\\type{%s}",(%s+2bp,%s-1bp));',"italic",width,ury)
context('label.rt("%s",(%s-2bp,%s-1bp));',d.italic,width+italic,ury)
end
@@ -258,7 +253,7 @@ local function showglyphshape(specification)
context('label.top("\\type{%s}",(%s,%s-1bp));',"bot_accent",top_accent,ury)
context('label.bot("%s",(%s,%s+1bp));',d.bot_accent,bot_accent,lly)
end
- context('draw origin withcolor red withpen pencircle scaled 1bp;')
+ context('draw origin withcolor red withpen pencircle scaled 2lw;')
context("setbounds currentpicture to boundingbox currentpicture enlarged 1bp ;")
context("currentpicture := currentpicture scaled 8 ;")
context.stopMPcode()
diff --git a/tex/context/base/s-fonts-tables.lua b/tex/context/base/s-fonts-tables.lua
index 5c91d5ee7..b3dac7d06 100644
--- a/tex/context/base/s-fonts-tables.lua
+++ b/tex/context/base/s-fonts-tables.lua
@@ -21,6 +21,12 @@ local digits = {
dflt = {
dflt = "1234567890 1/2",
},
+ arab = {
+ dflt = "",
+ },
+ latn = {
+ dflt = "1234567890 1/2",
+ }
}
local punctuation = {
@@ -65,6 +71,9 @@ local lowercase = {
cyrl= {
dflt = "абвгдежзиійклмнопрстуфхцчшщъыьѣэюяѳ"
},
+ arab = {
+ dflt = "ابجدهوزحطيكلمنسعفصقرشتثخذضظغ"
+ },
}
local samples = {
diff --git a/tex/context/base/s-inf-01.mkvi b/tex/context/base/s-inf-01.mkvi
index 8263413ac..2c0c1681e 100644
--- a/tex/context/base/s-inf-01.mkvi
+++ b/tex/context/base/s-inf-01.mkvi
@@ -126,6 +126,8 @@
return max, what, function(n) return (max == 0 and 0) or (n == 0 and 0) or n/max end
end
+ local f_norm = string.formatters["%0.3f"]
+
function document.context_state_1(what)
local max, what, norm = prepare(what)
context.starttabulate { "|Tl|T|T|T|T|T|" }
@@ -154,9 +156,15 @@
(used.mkvi[k] and "vi") or "~~",
k
)
+ -- context("%s\\enspace %s\\quad %s\\quad %s",
+ -- (used.mkii[k] and "ii") or "\\quad",
+ -- (used.mkiv[k] and "iv") or "\\quad",
+ -- (used.mkvi[k] and "vi") or "\\quad",
+ -- k
+ -- )
context.NC()
for i=1,#types do
- context.Bar(types[i],v[i],c[i],norm(v[i]))
+ context.Bar(types[i],v[i],c[i],f_norm(norm(v[i])))
context.NC()
end
context.NR()
@@ -170,7 +178,7 @@
local c = (what == size and comp[k]) or nope
context.StartUp(k)
for i=1,#types do
- context.Up(types[i],norm(v[i]))
+ context.Up(types[i],f_norm(norm(v[i])))
end
context.StopUp()
end
diff --git a/tex/context/base/s-inf-03.mkiv b/tex/context/base/s-inf-03.mkiv
index 822173d00..a253bed77 100644
--- a/tex/context/base/s-inf-03.mkiv
+++ b/tex/context/base/s-inf-03.mkiv
@@ -8,7 +8,7 @@
\setupbodyfont[dejavu]
-\doifmodeelse {tablet} {
+\doifelsemode {tablet} {
\setuppapersize
[S6,landscape]
@@ -16,7 +16,7 @@
\definefont
[TitlePageFont]
- [MonoBold at 16pt]
+ [MonoBold at 15pt]
\setupbodyfont
[tt,8pt]
@@ -25,7 +25,7 @@
\definefont
[TitlePageFont]
- [MonoBold at 20pt]
+ [MonoBold at 18pt]
\setupbodyfont
[tt]
@@ -159,16 +159,16 @@ local skipglobal = table.tohash {
"_G", "_M", "_ENV", "",
"context", "modules", "global", "arg", "utf", 1,
"_ptbs_", "_pcol_", "_plib_", "_clib_", "_tlib_",
- "kpse",
+ "kpse", "commands",
}
local skipkeys = table.tohash {
"_pcol_", "_plib_", "_clib_", "_tlib_", "_bpnf_", "_ptbs_",
- "_cldf_", "_cldn_",
+ "_cldf_", "_cldn_", "_cldo_",
"_clmb_", "_clme_", "_clmm_", "_clmn_", "_clma_", "_clmh_",
"_G", "_M", "_ENV", "",
+ -- "global", "shortcuts",
"_VERSION", "_COPYRIGHT", "_DESCRIPTION", "_NAME", "_PACKAGE", "__unload",
-
}
local sameglobal = {
@@ -196,6 +196,8 @@ local variant = 1 -- all parents
local variant = 2 -- parent name too
local variant = 3 -- no parents
+local done = { }
+
local function childtables(key,tab,handler,depth)
depth = depth or 1
local keys = sortedkeys(tab) -- no sorted_pairs
@@ -214,6 +216,10 @@ local function childtables(key,tab,handler,depth)
t = "data"
handler(s,t,depth)
else
+if done[v] then
+ -- logs.report("inf-03","key %a in %a already done",k,v)
+else
+ done[v] = true
handler(s,t,depth)
if variant == 3 then
childtables(false,v,handler,depth+1)
@@ -223,6 +229,7 @@ local function childtables(key,tab,handler,depth)
childtables(s,v,handler,depth+1)
end
end
+end
else
handler(s,t,depth)
end
@@ -245,6 +252,7 @@ end
local function show(title,subtitle,alias,builtin,t,lib,libcolor,glo,glocolor,mark,obsolete)
-- todo: table as argument
+-- print(title,subtitle,alias,builtin,t,lib,libcolor,glo,glocolor,mark,obsolete)
local keys = sortedkeys(t) -- no sorted_pairs
if #keys > 0 then
local fulltitle = title
@@ -341,13 +349,20 @@ end
show("global","",sameglobal.global,false,_G,builtin,"darkgreen",globals,"darkblue",false,obsolete)
+-- inspect(table.sortedkeys(context))
+
for k, v in table.sortedpairs(_G) do
if not skipglobal[k] and not obsolete[k] and type(v) == "table" and not marked(v) then
+
+ -- local mt = getmetatable(v)
+ -- print("!!!!!!!!!!",k,v,mt,mt and mt.__index)
+
if basiclua[k] then show(k,"basic lua",sameglobal[k],basiclua[k],v,builtin[k],"darkred", false,false,true)
elseif extralua[k] then show(k,"extra lua",sameglobal[k],extralua[k],v,builtin[k],"darkred", false,false,true)
elseif basictex[k] then show(k,"basic tex",sameglobal[k],basictex[k],v,builtin[k],"darkred", false,false,true)
elseif extratex[k] then show(k,"extra tex",sameglobal[k],extratex[k],v,builtin[k],"darkred", false,false,true)
- else show(k,"context", sameglobal[k],false, v,builtin[k],"darkyellow",false,false,true)
+ else
+ show(k,"context", sameglobal[k],false, v,builtin[k],"darkyellow",false,false,true)
end
end
end
diff --git a/tex/context/base/s-languages-hyphenation.lua b/tex/context/base/s-languages-hyphenation.lua
index 660392f80..c5a4f91f1 100644
--- a/tex/context/base/s-languages-hyphenation.lua
+++ b/tex/context/base/s-languages-hyphenation.lua
@@ -13,8 +13,8 @@ local a_colormodel = attributes.private('colormodel')
local nodecodes = nodes.nodecodes
local nodepool = nodes.pool
-local disc = nodecodes.disc
-local glyph = nodecodes.glyph
+local disc_code = nodecodes.disc
+local glyph_code = nodecodes.glyph
local emwidths = fonts.hashes.emwidths
local exheights = fonts.hashes.exheights
local newkern = nodepool.kern
@@ -23,8 +23,8 @@ local newglue = nodepool.glue
local insert_node_after = node.insert_after
local traverse_by_id = node.traverse_id
-local hyphenate = lang.hyphenate
-local find_tail = node.slide
+local hyphenate = languages.hyphenators.handler -- lang.hyphenate
+local find_tail = node.tail
local remove_node = nodes.remove
local tracers = nodes.tracers
@@ -36,11 +36,11 @@ local function identify(head,marked)
while current do
local id = current.id
local next = current.next
- if id == disc then
- if prev and next.id == glyph then -- catch other usage of disc
+ if id == disc_code then
+ if prev and next then -- and next.id == glyph_code then -- catch other usage of disc
marked[#marked+1] = prev
end
- elseif id == glyph then
+ elseif id == glyph_code then
prev = current
end
current = next
@@ -81,10 +81,10 @@ function moduledata.languages.hyphenation.showhyphens(head)
local m = { }
local l = langs[i]
marked[i] = m
- for n in traverse_by_id(glyph,head) do
+ for n in traverse_by_id(glyph_code,head) do
n.lang = l
end
- hyphenate(head,find_tail(head))
+ languages.hyphenators.methods.original(head)
identify(head,m)
strip(head,m)
end
diff --git a/tex/context/base/s-languages-hyphenation.mkiv b/tex/context/base/s-languages-hyphenation.mkiv
index 769c3d059..6662dbf2f 100644
--- a/tex/context/base/s-languages-hyphenation.mkiv
+++ b/tex/context/base/s-languages-hyphenation.mkiv
@@ -26,7 +26,7 @@
{\begingroup
\par
% \language\zerocount
- \setupalign[\v!nothyphenated]%
+ % \setupalign[\v!nothyphenated]%
\ctxlua{moduledata.languages.hyphenation.startcomparepatterns("#1")}}
\unexpanded\def\stopcomparepatterns
@@ -56,13 +56,15 @@
\starttext
+\def|#1|{-}
+
\startsubject{Normal text}
\input tufte
\stopsubject
\startsubject{Compare hyphenation points of \showcomparepatternslegend[en,de]}
\startcomparepatterns
- \input tufte \quad (\showcomparepatternslegend)
+ \input tufte \quad (\showcomparepatternslegend)
\stopcomparepatterns
\stopsubject
diff --git a/tex/context/base/s-languages-system.lua b/tex/context/base/s-languages-system.lua
index 5afc4d403..3b422db9f 100644
--- a/tex/context/base/s-languages-system.lua
+++ b/tex/context/base/s-languages-system.lua
@@ -10,26 +10,53 @@ moduledata.languages = moduledata.languages or { }
moduledata.languages.system = moduledata.languages.system or { }
local NC, NR, HL = context.NC, context.NR, context.HL
+local sortedhash = table.sortedhash
+local registered = languages.registered
+local context = context
+local ctx_NC = context.NC
+local ctx_NR = context.NR
+local ctx_bold = context.bold
+
+function moduledata.languages.system.loadinstalled()
+ context.start()
+ for k, v in table.sortedhash(registered) do
+ context.language{ k }
+ end
+ context.stop()
+end
function moduledata.languages.system.showinstalled()
- local numbers = languages.numbers
- local registered = languages.registered
- context.starttabulate { "|r|l|l|l|l|" }
- NC() context("id")
- NC() context("tag")
- NC() context("synonyms")
- NC() context("parent")
- NC() context("loaded")
- NC() NR() HL()
- for i=1,#numbers do
- local tag = numbers[i]
- local data = registered[tag]
- NC() context(data.number)
- NC() context(tag)
- NC() context("% t",table.sortedkeys(data.synonyms))
- NC() context(data.parent)
- NC() context("%+t",table.sortedkeys(data.used))
- NC() NR()
+ --
+ context.starttabulate { "|l|r|l|l|p(7em)|r|p|" }
+ context.FL()
+ ctx_NC() ctx_bold("tag")
+ ctx_NC() ctx_bold("n")
+ ctx_NC() ctx_bold("parent")
+ ctx_NC() ctx_bold("file")
+ ctx_NC() ctx_bold("synonyms")
+ ctx_NC() ctx_bold("patterns")
+ ctx_NC() ctx_bold("characters")
+ ctx_NC() ctx_NR()
+ context.FL()
+ for k, v in sortedhash(registered) do
+ local parent = v.parent
+ local resources = v.resources
+ local patterns = resources and resources.patterns
+ ctx_NC() context(k)
+ ctx_NC() context(v.number)
+ ctx_NC() context(v.parent)
+ ctx_NC() context(v.patterns)
+ ctx_NC() for k, v in sortedhash(v.synonyms) do context("%s\\par",k) end
+ if patterns then
+ ctx_NC() context(patterns.n)
+ ctx_NC() context("% t",utf.split(patterns.characters))
+ else
+ ctx_NC()
+ ctx_NC()
+ end
+ ctx_NC() ctx_NR()
end
+ context.LL()
context.stoptabulate()
+ --
end
diff --git a/tex/context/base/s-languages-system.mkiv b/tex/context/base/s-languages-system.mkiv
index 363720374..22991f264 100644
--- a/tex/context/base/s-languages-system.mkiv
+++ b/tex/context/base/s-languages-system.mkiv
@@ -16,6 +16,7 @@
\registerctxluafile{s-languages-system}{}
\installmodulecommandluasingle \showinstalledlanguages {moduledata.languages.system.showinstalled}
+\installmodulecommandluasingle \loadinstalledlanguages {moduledata.languages.system.loadinstalled}
\stopmodule
@@ -25,6 +26,7 @@
\starttext
+ \loadinstalledlanguages
\showinstalledlanguages
\stoptext
diff --git a/tex/context/base/s-languages-words.lua b/tex/context/base/s-languages-words.lua
new file mode 100644
index 000000000..ea7aee87b
--- /dev/null
+++ b/tex/context/base/s-languages-words.lua
@@ -0,0 +1,32 @@
+if not modules then modules = { } end modules ['s-languages-words'] = {
+ version = 1.001,
+ comment = "companion to s-languages-words.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+moduledata.languages = moduledata.languages or { }
+moduledata.languages.words = moduledata.languages.words or { }
+
+function moduledata.languages.words.showwords(specification)
+ local filename = specification.filename or file.addsuffix(tex.jobname,"words")
+ if lfs.isfile(filename) then
+ local w = dofile(filename)
+ if w then
+ -- table.print(w)
+ for cname, category in table.sortedpairs(w.categories) do
+ for lname, language in table.sortedpairs(category.languages) do
+ context.bold(string.format("category: %s, language: %s, total: %s, unique: %s:",
+ cname, lname, language.total or 0, language.unique or 0)
+ )
+ for word, n in table.sortedpairs(language.list) do
+ context(" %s (%s)",word,n)
+ end
+ context.par()
+ end
+ end
+ end
+ end
+end
+
diff --git a/tex/context/base/s-languages-words.mkiv b/tex/context/base/s-languages-words.mkiv
new file mode 100644
index 000000000..4e350bf34
--- /dev/null
+++ b/tex/context/base/s-languages-words.mkiv
@@ -0,0 +1,22 @@
+%D \module
+%D [ file=s-languages-words,
+%D version=2010.10.21,
+%D title=\CONTEXT\ Style File,
+%D subtitle=Language Environment 3,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D This is used in the test suite.
+
+\startmodule[languages-words]
+
+\registerctxluafile{s-languages-words}{}
+
+\installmodulecommandluasingle \showwords {moduledata.languages.words.showwords}
+
+\stopmodule
diff --git a/tex/context/base/s-map-10.mkiv b/tex/context/base/s-map-10.mkiv
index b1218f6e2..c7541babc 100644
--- a/tex/context/base/s-map-10.mkiv
+++ b/tex/context/base/s-map-10.mkiv
@@ -172,7 +172,7 @@
\def\hfontii{\ssbf}
\def\hfontiii{\rm\it}
-\doifmodeelse{nosubsub}{%
+\doifelsemode{nosubsub}{%
\setuphead [section][%
style=\hfontii,
before={\blank[line]},
@@ -201,7 +201,7 @@
before={\blank[halfline]}%
]}
-\doifmodeelse{nosubsub}{%
+\doifelsemode{nosubsub}{%
\setuphead [subject][%
style=\hfontii,
before={\blank[halfline]},
@@ -348,10 +348,10 @@
#1]%
\doifnothing{\MapsPeriod}{%
\ifnum \normalmonth<6 \gdef\MapsPeriod{VOORJAAR}\else \gdef\MapsPeriod{NAJAAR}\fi}
- \doifinstringelse{oorjaar}{\MapsPeriod}{\gdef\MapsPeriod{VOORJAAR}}{}%
- \doifinstringelse{pring}{\MapsPeriod}{\gdef\MapsPeriod{VOORJAAR}}{}%
- \doifinstringelse{ajaar}{\MapsPeriod}{\gdef\MapsPeriod{NAJAAR}}{}%
- \doifinstringelse{utumn}{\MapsPeriod}{\gdef\MapsPeriod{NAJAAR}}{}%
+ \doifelseinstring{oorjaar}{\MapsPeriod}{\gdef\MapsPeriod{VOORJAAR}}{}%
+ \doifelseinstring{pring}{\MapsPeriod}{\gdef\MapsPeriod{VOORJAAR}}{}%
+ \doifelseinstring{ajaar}{\MapsPeriod}{\gdef\MapsPeriod{NAJAAR}}{}%
+ \doifelseinstring{utumn}{\MapsPeriod}{\gdef\MapsPeriod{NAJAAR}}{}%
\doifnothing{\MapsYear}{\gdef\MapsYear{\the\year}}%
\doifnothing{\MapsNumber}{%
\ifnum \normalmonth<6
@@ -441,9 +441,9 @@
%%%%%%%%%%%
-\doifmodeelse{onecolumn}{%
+\doifelsemode{onecolumn}{%
\setuplayout[width=340pt]
- \doifmodeelse{asym}{% one col, asymmetric
+ \doifelsemode{asym}{% one col, asymmetric
\setuplayout[backspace=187.3pt]%
\setuptyping [widetyping][oddmargin=-117pt]
\setuppagenumbering [alternative={singlesided,doublesided}]
diff --git a/tex/context/base/s-math-characters.lua b/tex/context/base/s-math-characters.lua
index b0a79fcb6..8ff3a8660 100644
--- a/tex/context/base/s-math-characters.lua
+++ b/tex/context/base/s-math-characters.lua
@@ -16,23 +16,26 @@ local lower = string.lower
local utfchar = utf.char
local round = math.round
-local fontdata = fonts.hashes.identifiers
-local chardata = characters.data
+local fontdata = fonts.hashes.identifiers
+local chardata = characters.data
+local blocks = characters.blocks
local no_description = "no description, private to font"
-local limited = true
-local fillinthegaps = true
-local upperlimit = 0x0007F
-local upperlimit = 0xF0000
+local limited = true
+local fillinthegaps = true
+local upperlimit = 0x0007F
+local upperlimit = 0xF0000
-local f_unicode = string.formatters["%U"]
-local f_slot = string.formatters["%s/%0X"]
+local f_unicode = string.formatters["%U"]
+local f_slot = string.formatters["%s/%0X"]
function moduledata.math.characters.showlist(specification)
- specification = interfaces.checkedspecification(specification)
- local id = specification.number -- or specification.id
- local list = specification.list
+ specification = interfaces.checkedspecification(specification)
+ local id = specification.number -- or specification.id
+ local list = specification.list
+ local showvirtual = specification.virtual == "all"
+ local check = specification.check == "yes"
if not id then
id = font.current()
end
@@ -70,137 +73,170 @@ function moduledata.math.characters.showlist(specification)
names[k] = (name and file.basename(name)) or id
end
end
- context.showmathcharactersstart()
- for _, unicode in next, sorted do
- if not limited or unicode < upperlimit then
- local code = gaps[unicode] or unicode
- local char = characters[code]
- local desc = descriptions[code]
- local info = chardata[code]
- if char then
- local next_sizes = char.next
- local v_variants = char.vert_variants
- local h_variants = char.horiz_variants
- local commands = char.commands
- local slookups = desc and desc.slookups
- local mlookups = desc and desc.mlookups
- local mathclass = info.mathclass
- local mathspec = info.mathspec
- local mathsymbol = info.mathsymbol
- local description = info.description or no_description
- context.showmathcharactersstartentry()
- context.showmathcharactersreference(f_unicode(unicode))
- context.showmathcharactersentryhexdectit(f_unicode(code),code,lower(description))
- context.showmathcharactersentrywdhtdpic(round(char.width or 0),round(char.height or 0),round(char.depth or 0),round(char.italic or 0))
- if virtual and commands then
- local t = { }
- for i=1,#commands do
- local ci = commands[i]
- if ci[1] == "slot" then
- local fnt, idx = ci[2], ci[3]
- t[#t+1] = f_slot(names[fnt] or fnt,idx)
+ if check then
+ for k, v in table.sortedhash(blocks) do
+ if v.math then
+ local first = v.first
+ local last = v.last
+ local f, l = 0, 0
+ if first and last then
+ for unicode=first,last do
+ local code = gaps[unicode] or unicode
+ local char = characters[code]
+ if char and not (char.commands and not showvirtual) then
+ f = unicode
+ break
end
end
- if #t > 0 then
- context.showmathcharactersentryresource(concat(t,", "))
- end
- end
- if mathclass or mathspec then
- context.showmathcharactersstartentryclassspec()
- if mathclass then
- context.showmathcharactersentryclassname(mathclass,info.mathname or "no name")
- end
- if mathspec then
- for i=1,#mathspec do
- local mi = mathspec[i]
- context.showmathcharactersentryclassname(mi.class,mi.name or "no name")
+ for unicode=last,first,-1 do
+ local code = gaps[unicode] or unicode
+ local char = characters[code]
+ if char and not (char.commands and not showvirtual) then
+ l = unicode
+ break
end
end
- context.showmathcharactersstopentryclassspec()
+ context.showmathcharacterssetrange(k,f,l)
end
- if mathsymbol then
- context.showmathcharactersentrysymbol(f_unicode(mathsymbol),mathsymbol)
- end
- if next_sizes then
- local n, done = 0, { }
- context.showmathcharactersstartnext()
- while next_sizes do
- n = n + 1
- if done[next_sizes] then
- context.showmathcharactersnextcycle(n)
- break
- else
- done[next_sizes] = true
- context.showmathcharactersnextentry(n,f_unicode(next_sizes),next_sizes)
- next_sizes = characters[next_sizes]
- v_variants = next_sizes.vert_variants or v_variants
- h_variants = next_sizes.horiz_variants or h_variants
- if next_sizes then
- next_sizes = next_sizes.next
+ end
+ end
+ else
+ context.showmathcharactersstart()
+ for _, unicode in next, sorted do
+ if not limited or unicode < upperlimit then
+ local code = gaps[unicode] or unicode
+ local char = characters[code]
+ local desc = descriptions[code]
+ local info = chardata[code]
+ if char then
+ local commands = char.commands
+ if commands and not showvirtual then
+ -- skip
+ else
+ local next_sizes = char.next
+ local v_variants = char.vert_variants
+ local h_variants = char.horiz_variants
+ local slookups = desc and desc.slookups
+ local mlookups = desc and desc.mlookups
+ local mathclass = info.mathclass
+ local mathspec = info.mathspec
+ local mathsymbol = info.mathsymbol
+ local description = info.description or no_description
+ context.showmathcharactersstartentry()
+ context.showmathcharactersreference(f_unicode(unicode))
+ context.showmathcharactersentryhexdectit(f_unicode(code),code,lower(description))
+ context.showmathcharactersentrywdhtdpic(round(char.width or 0),round(char.height or 0),round(char.depth or 0),round(char.italic or 0))
+ if virtual and commands then
+ local t = { }
+ for i=1,#commands do
+ local ci = commands[i]
+ if ci[1] == "slot" then
+ local fnt, idx = ci[2], ci[3]
+ t[#t+1] = f_slot(names[fnt] or fnt,idx)
+ end
+ end
+ if #t > 0 then
+ context.showmathcharactersentryresource(concat(t,", "))
end
end
- end
- context.showmathcharactersstopnext()
- if h_variants or v_variants then
- context.showmathcharactersbetweennextandvariants()
- end
- end
- if h_variants then
- context.showmathcharactersstarthvariants()
- for i=1,#h_variants do -- we might go top-down in the original
- local vi = h_variants[i]
- context.showmathcharactershvariantsentry(i,f_unicode(vi.glyph),vi.glyph)
- end
- context.showmathcharactersstophvariants()
- elseif v_variants then
- context.showmathcharactersstartvvariants()
- for i=1,#v_variants do
- local vi = v_variants[#v_variants-i+1]
- context.showmathcharactersvvariantsentry(i,f_unicode(vi.glyph),vi.glyph)
- end
- context.showmathcharactersstopvvariants()
- end
- if slookups or mlookups then
- local variants = { }
- if slookups then
- for lookupname, lookupdata in next, slookups do
- local lookuptype = lookuptypes[lookupname]
- if lookuptype == "substitution" then
- variants[lookupdata] = "sub"
- elseif lookuptype == "alternate" then
- for i=1,#lookupdata do
- variants[lookupdata[i]] = "alt"
+ if mathclass or mathspec then
+ context.showmathcharactersstartentryclassspec()
+ if mathclass then
+ context.showmathcharactersentryclassname(mathclass,info.mathname or "no name")
+ end
+ if mathspec then
+ for i=1,#mathspec do
+ local mi = mathspec[i]
+ context.showmathcharactersentryclassname(mi.class,mi.name or "no name")
end
end
+ context.showmathcharactersstopentryclassspec()
end
- end
- if mlookups then
- for lookupname, lookuplist in next, mlookups do
- local lookuptype = lookuptypes[lookupname]
- for i=1,#lookuplist do
- local lookupdata = lookuplist[i]
- local lookuptype = lookuptypes[lookupname]
- if lookuptype == "substitution" then
- variants[lookupdata] = "sub"
- elseif lookuptype == "alternate" then
- for i=1,#lookupdata do
- variants[lookupdata[i]] = "alt"
+ if mathsymbol then
+ context.showmathcharactersentrysymbol(f_unicode(mathsymbol),mathsymbol)
+ end
+ if next_sizes then
+ local n, done = 0, { }
+ context.showmathcharactersstartnext()
+ while next_sizes do
+ n = n + 1
+ if done[next_sizes] then
+ context.showmathcharactersnextcycle(n)
+ break
+ else
+ done[next_sizes] = true
+ context.showmathcharactersnextentry(n,f_unicode(next_sizes),next_sizes)
+ next_sizes = characters[next_sizes]
+ v_variants = next_sizes.vert_variants or v_variants
+ h_variants = next_sizes.horiz_variants or h_variants
+ if next_sizes then
+ next_sizes = next_sizes.next
end
end
end
+ context.showmathcharactersstopnext()
+ if h_variants or v_variants then
+ context.showmathcharactersbetweennextandvariants()
+ end
end
+ if h_variants then
+ context.showmathcharactersstarthvariants()
+ for i=1,#h_variants do -- we might go top-down in the original
+ local vi = h_variants[i]
+ context.showmathcharactershvariantsentry(i,f_unicode(vi.glyph),vi.glyph)
+ end
+ context.showmathcharactersstophvariants()
+ elseif v_variants then
+ context.showmathcharactersstartvvariants()
+ for i=1,#v_variants do
+ local vi = v_variants[#v_variants-i+1]
+ context.showmathcharactersvvariantsentry(i,f_unicode(vi.glyph),vi.glyph)
+ end
+ context.showmathcharactersstopvvariants()
+ end
+ if slookups or mlookups then
+ local variants = { }
+ if slookups then
+ for lookupname, lookupdata in next, slookups do
+ local lookuptype = lookuptypes[lookupname]
+ if lookuptype == "substitution" then
+ variants[lookupdata] = "sub"
+ elseif lookuptype == "alternate" then
+ for i=1,#lookupdata do
+ variants[lookupdata[i]] = "alt"
+ end
+ end
+ end
+ end
+ if mlookups then
+ for lookupname, lookuplist in next, mlookups do
+ local lookuptype = lookuptypes[lookupname]
+ for i=1,#lookuplist do
+ local lookupdata = lookuplist[i]
+ local lookuptype = lookuptypes[lookupname]
+ if lookuptype == "substitution" then
+ variants[lookupdata] = "sub"
+ elseif lookuptype == "alternate" then
+ for i=1,#lookupdata do
+ variants[lookupdata[i]] = "alt"
+ end
+ end
+ end
+ end
+ end
+ context.showmathcharactersstartlookupvariants()
+ local i = 0
+ for variant, lookuptype in table.sortedpairs(variants) do
+ i = i + 1
+ context.showmathcharacterslookupvariant(i,f_unicode(variant),variant,lookuptype)
+ end
+ context.showmathcharactersstoplookupvariants()
+ end
+ context.showmathcharactersstopentry()
end
- context.showmathcharactersstartlookupvariants()
- local i = 0
- for variant, lookuptype in table.sortedpairs(variants) do
- i = i + 1
- context.showmathcharacterslookupvariant(i,f_unicode(variant),variant,lookuptype)
- end
- context.showmathcharactersstoplookupvariants()
end
- context.showmathcharactersstopentry()
end
end
+ context.showmathcharactersstop()
end
- context.showmathcharactersstop()
end
diff --git a/tex/context/base/s-math-characters.mkiv b/tex/context/base/s-math-characters.mkiv
index 1c4159544..3b273cb6c 100644
--- a/tex/context/base/s-math-characters.mkiv
+++ b/tex/context/base/s-math-characters.mkiv
@@ -46,6 +46,7 @@
\let\showmathcharactersstartlookupvariants \relax
\let\showmathcharacterslookupvariant \gobblefourarguments
\let\showmathcharactersstoplookupvariants \relax
+ \let\showmathcharacterssetrange \gobblethreearguments
\stopsetups
@@ -126,15 +127,23 @@
\def\module_math_characters_show[#1]%
{\begingroup
- \getdummyparameters[\c!bodyfont=,\c!list=,\c!alternative=default,#1]%
+ \getdummyparameters
+ [\c!bodyfont=,
+ \c!list=,
+ \c!check=,
+ \c!alternative=default,
+ \c!option=\v!all,
+ #1]%
\directsetup{s-math-characters:\dummyparameter\c!alternative}%
\doifelsenothing{\dummyparameter\c!bodyfont}
{\definedfont[MathRoman*math-text]}
{\definedfont[\dummyparameter\c!bodyfont]}%
\dontcomplain
\ctxlua{moduledata.math.characters.showlist {
- number = false,
- list = "\dummyparameter\c!list",
+ number = false,
+ check = "\dummyparameter\c!check",
+ list = "\dummyparameter\c!list",
+ option = "\dummyparameter\c!option",
}}%
\endgroup}
diff --git a/tex/context/base/s-math-coverage.lua b/tex/context/base/s-math-coverage.lua
index a74e24450..3c6080dc3 100644
--- a/tex/context/base/s-math-coverage.lua
+++ b/tex/context/base/s-math-coverage.lua
@@ -6,135 +6,101 @@ if not modules then modules = { } end modules ['s-math-coverage'] = {
license = "see context related readme files"
}
-moduledata.math = moduledata.math or { }
-moduledata.math.coverage = moduledata.math.coverage or { }
-
local utfchar, utfbyte = utf.char, utf.byte
local formatters, lower = string.formatters, string.lower
local concat = table.concat
+local sortedhash = table.sortedhash
-local context = context
-local NC, NR, HL = context.NC, context.NR, context.HL
-local char, getglyph, bold = context.char, context.getglyph, context.bold
+moduledata.math = moduledata.math or { }
+moduledata.math.coverage = moduledata.math.coverage or { }
-local ucgreek = {
- 0x0391, 0x0392, 0x0393, 0x0394, 0x0395,
- 0x0396, 0x0397, 0x0398, 0x0399, 0x039A,
- 0x039B, 0x039C, 0x039D, 0x039E, 0x039F,
- 0x03A0, 0x03A1, 0x03A3, 0x03A4, 0x03A5,
- 0x03A6, 0x03A7, 0x03A8, 0x03A9
-}
+local context = context
-local lcgreek = {
- 0x03B1, 0x03B2, 0x03B3, 0x03B4, 0x03B5,
- 0x03B6, 0x03B7, 0x03B8, 0x03B9, 0x03BA,
- 0x03BB, 0x03BC, 0x03BD, 0x03BE, 0x03BF,
- 0x03C0, 0x03C1, 0x03C2, 0x03C3, 0x03C4,
- 0x03C5, 0x03C6, 0x03C7, 0x03C8, 0x03C9,
- 0x03D1, 0x03D5, 0x03D6, 0x03F0, 0x03F1,
- 0x03F4, 0x03F5
-}
+local ctx_NC = context.NC
+local ctx_NR = context.NR
+local ctx_HL = context.HL
-local ucletters = {
- 0x00041, 0x00042, 0x00043, 0x00044, 0x00045,
- 0x00046, 0x00047, 0x00048, 0x00049, 0x0004A,
- 0x0004B, 0x0004C, 0x0004D, 0x0004E, 0x0004F,
- 0x00050, 0x00051, 0x00052, 0x00053, 0x00054,
- 0x00055, 0x00056, 0x00057, 0x00058, 0x00059,
- 0x0005A,
-}
+local ctx_startmixedcolumns = context.startmixedcolumns
+local ctx_stopmixedcolumns = context.stopmixedcolumns
+local ctx_setupalign = context.setupalign
+local ctx_starttabulate = context.starttabulate
+local ctx_stoptabulate = context.stoptabulate
+local ctx_rawmathematics = context.formatted.rawmathematics
+local ctx_mathematics = context.formatted.mathematics
+local ctx_startimath = context.startimath
+local ctx_stopimath = context.stopimath
+local ctx_setmathattribute = context.setmathattribute
+local ctx_underbar = context.underbar
+local ctx_getglyph = context.getglyph
-local lcletters = {
- 0x00061, 0x00062, 0x00063, 0x00064, 0x00065,
- 0x00066, 0x00067, 0x00068, 0x00069, 0x0006A,
- 0x0006B, 0x0006C, 0x0006D, 0x0006E, 0x0006F,
- 0x00070, 0x00071, 0x00072, 0x00073, 0x00074,
- 0x00075, 0x00076, 0x00077, 0x00078, 0x00079,
- 0x0007A,
-}
+local styles = mathematics.styles
+local alternatives = mathematics.alternatives
+local charactersets = mathematics.charactersets
-local digits = {
- 0x00030, 0x00031, 0x00032, 0x00033, 0x00034,
- 0x00035, 0x00036, 0x00037, 0x00038, 0x00039,
-}
+local getboth = mathematics.getboth
+local remapalphabets = mathematics.remapalphabets
-local styles = {
- "regular", "sansserif", "monospaced", "fraktur", "script", "blackboard"
-}
-
-local alternatives = {
- "normal", "bold", "italic", "bolditalic"
-}
-
-local alphabets = {
- ucletters, lcletters, ucgreek, lcgreek, digits,
-}
-
-local getboth = mathematics.getboth
-local remapalphabets = mathematics.remapalphabets
-
-local chardata = characters.data
-local superscripts = characters.superscripts
-local subscripts = characters.subscripts
+local chardata = characters.data
+local superscripts = characters.superscripts
+local subscripts = characters.subscripts
context.writestatus("math coverage","underline: not remapped")
function moduledata.math.coverage.showalphabets()
- context.starttabulate { "|lT|l|Tl|" }
+ ctx_starttabulate { "|lT|l|Tl|" }
for i=1,#styles do
local style = styles[i]
for i=1,#alternatives do
local alternative = alternatives[i]
- for i=1,#alphabets do
- local alphabet = alphabets[i]
- NC()
+ for _, alphabet in sortedhash(charactersets) do
+ ctx_NC()
if i == 1 then
context("%s %s",style,alternative)
end
- NC()
- context.startimath()
- context.setmathattribute(style,alternative)
+ ctx_NC()
+ ctx_startimath()
+ ctx_setmathattribute(style,alternative)
for i=1,#alphabet do
local letter = alphabet[i]
local id = getboth(style,alternative)
local unicode = remapalphabets(letter,id)
if not unicode then
- context.underbar(utfchar(letter))
+ ctx_underbar(utfchar(letter))
elseif unicode == letter then
context(utfchar(unicode))
else
context(utfchar(unicode))
end
end
- context.stopimath()
- NC()
+ ctx_stopimath()
+ ctx_NC()
local first = alphabet[1]
local last = alphabet[#alphabet]
local id = getboth(style,alternative)
local f_unicode = remapalphabets(first,id) or utfbyte(first)
local l_unicode = remapalphabets(last,id) or utfbyte(last)
context("%05X - %05X",f_unicode,l_unicode)
- NC()
- NR()
+ ctx_NC()
+ ctx_NR()
end
end
end
- context.stoptabulate()
+ ctx_stoptabulate()
end
function moduledata.math.coverage.showcharacters()
- context.startcolumns()
- context.setupalign { "nothyphenated" }
- context.starttabulate { "|T|i2|Tpl|" }
- for u, d in table.sortedpairs(chardata) do
+ ctx_startmixedcolumns { balance = "yes" }
+ ctx_setupalign { "nothyphenated" }
+ ctx_starttabulate { "|T|i2|Tpl|" }
+ for u, d in sortedhash(chardata) do
local mathclass = d.mathclass
local mathspec = d.mathspec
if mathclass or mathspec then
- NC()
+ ctx_NC()
context("%05X",u)
- NC()
- getglyph("MathRoman",u)
- NC()
+ ctx_NC()
+ ctx_getglyph("MathRoman",u)
+ ctx_NC()
if mathspec then
local t = { }
for i=1,#mathspec do
@@ -145,38 +111,87 @@ function moduledata.math.coverage.showcharacters()
else
context(mathclass)
end
- NC()
- NR()
+ ctx_NC()
+ ctx_NR()
end
end
- context.stoptabulate()
- context.stopcolumns()
+ ctx_stoptabulate()
+ ctx_stopmixedcolumns()
end
-- This is a somewhat tricky table as we need to bypass the math machinery.
function moduledata.math.coverage.showscripts()
- context.starttabulate { "|cT|c|cT|c|c|c|l|" }
- for k, v in table.sortedpairs(table.merged(superscripts,subscripts)) do
+ ctx_starttabulate { "|cT|c|cT|c|c|c|l|" }
+ for k, v in sortedhash(table.merged(superscripts,subscripts)) do
local ck = utfchar(k)
local cv = utfchar(v)
local ss = superscripts[k] and "^" or "_"
- NC()
- context("%05X",k)
- NC()
- context(ck)
- NC()
- context("%05X",v)
- NC()
- context(cv)
- NC()
- context.formatted.rawmathematics("x%s = x%s%s",ck,ss,cv)
- NC()
- context.formatted.mathematics("x%s = x%s%s",ck,ss,cv)
- NC()
- context(lower(chardata[k].description))
- NC()
- NR()
+ ctx_NC() context("%05X",k)
+ ctx_NC() context(ck)
+ ctx_NC() context("%05X",v)
+ ctx_NC() context(cv)
+ ctx_NC() ctx_rawmathematics("x%s = x%s%s",ck,ss,cv)
+ ctx_NC() ctx_mathematics("x%s = x%s%s",ck,ss,cv)
+ ctx_NC() context(lower(chardata[k].description))
+ ctx_NC() ctx_NR()
+ end
+ ctx_stoptabulate()
+end
+
+-- Handy too.
+
+function moduledata.math.coverage.showbold()
+ ctx_starttabulate { "|lT|cm|lT|cm|lT|" }
+ for k, v in sortedhash(mathematics.boldmap) do
+ ctx_NC() context("%U",k)
+ ctx_NC() context("%c",k)
+ ctx_NC() context("%U",v)
+ ctx_NC() context("%c",v)
+ ctx_NC() context(chardata[k].description)
+ ctx_NC() ctx_NR()
end
- context.stoptabulate()
+ ctx_stoptabulate()
end
+
+-- function moduledata.math.coverage.showentities()
+-- ctx_startmixedcolumns { balance = "yes" }
+-- ctx_starttabulate { "|Tl|c|Tl|" }
+-- for k, v in sortedhash(characters.entities) do
+-- local b = utf.byte(v)
+-- local d = chardata[b]
+-- local m = d.mathname
+-- local c = d.contextname
+-- local s = ((m and "\\"..m) or (c and "\\".. c) or v) .. "{}{}{}"
+-- ctx_NC()
+-- context("%U",b)
+-- ctx_NC()
+-- ctx_mathematics(s)
+-- ctx_NC()
+-- context(k)
+-- ctx_NC()
+-- ctx_NR()
+-- end
+-- ctx_stoptabulate()
+-- ctx_stopmixedcolumns()
+-- end
+
+function moduledata.math.coverage.showentities()
+ ctx_startmixedcolumns { balance = "yes" }
+ ctx_starttabulate { "|T||T|T|" }
+ for k, v in sortedhash(characters.entities) do
+ local d = chardata[v]
+ if d then
+ local m = d.mathclass or d.mathspec
+ local u = d.unicodeslot
+ ctx_NC() context(m and "m" or "t")
+ ctx_NC() ctx_getglyph("MathRoman",u)
+ ctx_NC() context("%05X",u)
+ ctx_NC() context(k)
+ ctx_NC() ctx_NR()
+ end
+ end
+ ctx_stoptabulate()
+ ctx_stopmixedcolumns()
+end
+
diff --git a/tex/context/base/s-math-coverage.mkiv b/tex/context/base/s-math-coverage.mkiv
index d68ffe587..e318c9eff 100644
--- a/tex/context/base/s-math-coverage.mkiv
+++ b/tex/context/base/s-math-coverage.mkiv
@@ -15,9 +15,11 @@
\registerctxluafile{s-math-coverage}{}
-\installmodulecommandluasingle \showmathalphabets {moduledata.math.coverage.showalphabets}
-\installmodulecommandluasingle \showmathcharacters {moduledata.math.coverage.showcharacters}
-\installmodulecommandluasingle \showmathscripts {moduledata.math.coverage.showscripts}
+\installmodulecommandluasingle \showmathalphabets {moduledata.math.coverage.showalphabets}
+\installmodulecommandluasingle \showmathcharacters{moduledata.math.coverage.showcharacters}
+\installmodulecommandluasingle \showmathscripts {moduledata.math.coverage.showscripts}
+\installmodulecommandluasingle \showmathbold {moduledata.math.coverage.showbold}
+\installmodulecommandluasingle \showmathentities {moduledata.math.coverage.showentities}
\stopmodule
@@ -30,5 +32,7 @@
\showmathalphabets \page
\showmathcharacters \page
\showmathscripts \page
+ \showmathbold \page
+ \showmathentities \page
\stoptext
diff --git a/tex/context/base/s-math-repertoire.mkiv b/tex/context/base/s-math-repertoire.mkiv
index a66d7fc6d..230eb513e 100644
--- a/tex/context/base/s-math-repertoire.mkiv
+++ b/tex/context/base/s-math-repertoire.mkiv
@@ -39,7 +39,8 @@
% \setuplayout
% [page]
-\setuppapersize[HD+]
+\setuppapersize
+ [HD+]
\setuplayout
[backspace=0pt,
@@ -102,6 +103,16 @@
color=textcolor,
contrastcolor=nonecolor]
+\def\showmathcharacterssetrange#1#2#3%
+ {\writestatus{range}{#1: \unihex{#2} - \unihex{#3}}%
+ \ifcase#2\relax
+ \definereference[#1][notpresent]%
+ \else\ifcase#3\relax
+ \definereference[#1][notpresent]%
+ \else
+ \normalexpanded{\definereference[#1][\unihex{#2}]}%
+ \fi\fi}
+
\startinteractionmenu[bottom]
\startgot [firstpage] first \stopgot \quad
\startgot [deltapage(-100)] -100 \stopgot \quad
@@ -159,6 +170,22 @@
\startgot [U+1D7AA] grk ss bolditalic \stopgot
\stopinteractionmenu
+% \startinteractionmenu[symbols]
+% \startgot [U+00030] dig normal \stopgot \quad
+% \startgot [U+1D7CE] dig bold \stopgot \quad
+% \startgot [U+1D7D8] dig doublestruck \stopgot \quad
+% \startgot [U+1D7E2] dig ss normal \stopgot \quad
+% \startgot [U+1D7EC] dig ss bold \stopgot \quad
+% \startgot [U+1D7F6] dig monospace \stopgot \quad
+% \startgot [U+02200] operators \stopgot \quad
+% \startgot [U+02701] symbols a \stopgot \quad
+% \startgot [U+02901] symbols b \stopgot \quad
+% \startgot [U+02A00] supplemental \stopgot \quad
+% \startgot [U+027F0] arrows a \stopgot \quad
+% \startgot [U+02900] arrows b \stopgot \quad
+% \startgot [U+1F800] arrows c \stopgot
+% \stopinteractionmenu
+
\startinteractionmenu[symbols]
\startgot [U+00030] dig normal \stopgot \quad
\startgot [U+1D7CE] dig bold \stopgot \quad
@@ -167,9 +194,13 @@
\startgot [U+1D7EC] dig ss bold \stopgot \quad
\startgot [U+1D7F6] dig monospace \stopgot \quad
\startgot [U+02200] operators \stopgot \quad
- \startgot [U+02701] symbols a \stopgot \quad
- \startgot [U+02901] symbols b \stopgot \quad
- \startgot [U+02A00] supplemental \stopgot
+ \startgot [miscellaneousmathematicalsymbolsa] symbols a \stopgot \quad
+ \startgot [miscellaneousmathematicalsymbolsb] symbols b \stopgot \quad
+ \startgot [supplementalmathematicaloperators] supplemental \stopgot \quad
+ \startgot [supplementalarrowsa] arrows a \stopgot \quad
+ \startgot [supplementalarrowsb] arrows b \stopgot \quad
+ \startgot [supplementalarrowsc] arrows c \stopgot \quad
+ \removeunwantedspaces
\stopinteractionmenu
\defineframed
@@ -227,7 +258,7 @@
\showmathcharactersmth{10}{#1}%
\endgroup
\vfilll
- \doifmodeelse{crosslink}
+ \doifelsemode{crosslink}
{\goto{\strut\textcolor\showmathcharacterstxt{#2}}[#2::#1]}%
{\strut\textcolor\showmathcharacterstxt{#2}}}%
\hskip1ex}
@@ -404,13 +435,17 @@
% main
+% this is a one-run style so we can forget about an alternative
+% just assume that the previous definitions are global
+
\unprotect
\unexpanded\def\showmathfontrepertoire
{\dosingleempty\module_math_repertoire_show}
-\def\module_math_repertoire_show[#1]% % this is a one-run style so we can forget about an alternative
- {\showmathfontcharacters[\c!alternative=,#1]} % just assume that the previous definitions are global
+\def\module_math_repertoire_show[#1]%
+ {\showmathfontcharacters[alternative=,option=,check=yes,#1]
+ \showmathfontcharacters[alternative=,option=,#1]}
\protect
@@ -418,13 +453,13 @@
\continueifinputfile{s-math-repertoire.mkiv}
-\showmathcharacterssetbodyfonts{lucidanova,cambria,xits,modern,pagella,termes,bonum}
+\showmathcharacterssetbodyfonts{lucidaot,cambria,xits,modern,pagella,termes,bonum,schola,dejavu}
\starttext
\doifelse {\getdocumentargument{bodyfont}} {} {
- \setupbodyfont[cambria, 12pt]
+ % \setupbodyfont[cambria, 12pt]
% \setupbodyfont[modern, 12pt]
% \setupbodyfont[lmvirtual, 12pt]
% \setupbodyfont[pxvirtual, 12pt]
@@ -434,9 +469,11 @@
% \setupbodyfont[stix, 12pt]
% \setupbodyfont[xits, 12pt]
% \setupbodyfont[lucida, 12pt]
- % \setupbodyfont[lucidanova,12pt]
+ % \setupbodyfont[lucidaot, 12pt]
% \setupbodyfont[pagella, 12pt]
% \setupbodyfont[bonum, 12pt]
+ % \setupbodyfont[schola, 12pt]
+ \setupbodyfont[dejavu, 12pt]
} {
diff --git a/tex/context/base/s-pre-17.mkiv b/tex/context/base/s-pre-17.mkiv
index 9505faa6b..9c46b4ed7 100644
--- a/tex/context/base/s-pre-17.mkiv
+++ b/tex/context/base/s-pre-17.mkiv
@@ -194,7 +194,7 @@
\egroup
\setbox\scratchbox\vbox\bgroup
\vskip100pt
- \doifmodeelse {SpreadPage} {
+ \doifelsemode {SpreadPage} {
\hbox spread 200pt
} {
\hbox to \wd\scratchbox
diff --git a/tex/context/base/s-references-show.mkiv b/tex/context/base/s-references-show.mkiv
new file mode 100644
index 000000000..72cccce54
--- /dev/null
+++ b/tex/context/base/s-references-show.mkiv
@@ -0,0 +1,132 @@
+%D \module
+%D [ file=s-references-show.mkiv,
+%D version=2015.04.13,
+%D title=\CONTEXT\ Style File,
+%D subtitle=Reference Checking,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\startmodule[references-show]
+
+\unprotect
+
+% \enabletrackers[nodes.references,nodes.destinations] % also shows areas
+
+\enabletrackers[nodes.references.show,nodes.destinations.show]
+
+\enablehiddenbackground % trick
+
+\edef\hiddenbackgroundlist{\hiddenbackgroundlist,trace-references}
+
+\defineoverlay
+ [trace-references]
+ [\directsetup{trace:references:onpage}]
+
+\startluacode
+ local pagelist = structures.references.tracedpages
+
+ function commands.getreferencesonpage(n)
+ n = tonumber(n)
+ if n then
+ local pagedata = pagelist[n]
+ if pagedata then
+ context("%s references",#pagedata)
+ context.blank()
+ for i=1,#pagedata do
+ local details = pagedata[i]
+ local prefix = details[1]
+ local reference = details[2]
+ -- local internal = details[3]
+ -- context("%04i = %s : %s",internal,prefix == "" and "-" or prefix,reference)
+ context("%s : %s",prefix == "" and "-" or prefix,reference)
+ context.par()
+ end
+ else
+ context("no references")
+ end
+ else
+ context("no valid page")
+ end
+ end
+
+\stopluacode
+
+\definecolor
+ [trace:references:onpage]
+ [b=.5,a=1,t=.25]
+
+\defineframed
+ [trace:references:onpage]
+ [\c!offset=2\exheight,
+ %\c!foregroundstyle=\infofont,
+ \c!frame=\v!off,
+ \c!background=\v!color,
+ \c!backgroundcolor=trace:references:onpage,
+ \c!align=\v!normal]
+
+\startsetups trace:references:onpage
+ \vbox to \vsize \bgroup
+ \infofont
+ \vskip\dimexpr-\topspace-\headerheight+2\exheight\relax
+ \hbox to \hsize \bgroup
+ \doifoddpageelse\hss{\hskip\dimexpr-\cutspace+2\exheight\relax}%
+ \directlocalframed [
+ trace:references:onpage
+ ] {
+ \ctxcommand{getreferencesonpage(\the\realpageno)}
+ }
+ \doifoddpageelse{\hskip\dimexpr-\cutspace+2\exheight\relax}\hss
+ \egroup
+ \vss
+ \egroup
+\stopsetups
+
+\protect
+
+\continueifinputfile{s-references-show.mkiv}
+
+\usemodule[art-01]
+
+\setupinteraction
+ [state=start]
+
+\setuppagenumbering
+ [alternative=doublesided]
+
+\starttext
+
+\title {Contents}
+
+\placelist[chapter]
+
+\setupreferenceprefix[zero]
+
+\chapter[crap]{foo}
+
+\setupreferenceprefix[one]
+
+test \pagereference[whatever]
+
+\dorecurse{5}{
+ \placefigure
+ [here][bar 1.#1]
+ {xx}{\framed{xx #1}}
+}
+
+\setupreferenceprefix[two]
+
+\dorecurse{5}{
+ \placefigure
+ [here][bar 2.#1]
+ {xx}{\framed{xx #1}}
+}
+
+\in{checked}[bar 1.1]
+\in{checked}[bar 2.1]
+
+\stoptext
diff --git a/tex/context/base/s-structure-sections.mkiv b/tex/context/base/s-structure-sections.mkiv
new file mode 100644
index 000000000..daaab5abc
--- /dev/null
+++ b/tex/context/base/s-structure-sections.mkiv
@@ -0,0 +1,80 @@
+%D \module
+%D [ file=s-structure-sections,
+%D version=2015.02.02,
+%D title=\CONTEXT\ Style File,
+%D subtitle=Show Structure Sections,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\unprotect
+
+\startluacode
+ local context = context
+ local ctx_NC, ctx_NR = context.NC, context.NR
+ local ctx_bold = context.bold
+
+ structures.tracers = structures.tracers or { }
+
+ function structures.tracers.showsections()
+
+ local list = structures.sections.registered
+ local keys = table.keys(list)
+ table.sort(keys,function(a,b)
+ local la, lb = list[a].level, list[b].level
+ if la == lb then
+ return a < b
+ else
+ return la < lb
+ end
+ end)
+ context.start()
+ context.switchtobodyfont { "tt" }
+ context.starttabulate { "||c||||" }
+ context.FL()
+ ctx_NC() ctx_bold("name")
+ ctx_NC() ctx_bold("level")
+ ctx_NC() ctx_bold("parent")
+ ctx_NC() ctx_bold("section")
+ ctx_NC() ctx_bold("coupling")
+ ctx_NC() context.NR()
+ context.ML()
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ ctx_NC() ctx_bold(k)
+ ctx_NC() context(v.level)
+ ctx_NC() context(v.parent)
+ ctx_NC() context(v.section)
+ ctx_NC() context(v.coupling)
+ ctx_NC() context.NR()
+ end
+ context.LL()
+ context.stoptabulate()
+ context.stop()
+
+ end
+\stopluacode
+
+
+\starttexdefinition showstructuresections
+
+ % no settings yet
+
+ \ctxlua{structures.tracers.showsections()}
+
+\stoptexdefinition
+
+\protect
+
+\continueifinputfile{s-structure-sections.mkiv}
+
+\starttext
+
+ \showstructuresections
+
+\stoptext
diff --git a/tex/context/base/s-syn-01.tex b/tex/context/base/s-syntax.mkii
index 01c8f6653..6d1d59697 100644
--- a/tex/context/base/s-syn-01.tex
+++ b/tex/context/base/s-syntax.mkii
@@ -1,5 +1,5 @@
%D \module
-%D [ file=s-syn-01,
+%D [ file=s-syntax, % was: s-syn-01,
%D version=0000.00.00,
%D title=\CONTEXT\ Style File,
%D subtitle=Preliminary Syntax Stuff,
diff --git a/tex/context/base/s-syntax.mkiv b/tex/context/base/s-syntax.mkiv
new file mode 100644
index 000000000..96312f771
--- /dev/null
+++ b/tex/context/base/s-syntax.mkiv
@@ -0,0 +1,96 @@
+%D \module
+%D [ file=s-syntax, % was: s-syn-01,
+%D version=0000.00.00,
+%D title=\CONTEXT\ Style File,
+%D subtitle=Preliminary Syntax Stuff,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D This is needed for the \METAFUN\ manual: quite old code that I would do
+%D differently nowadays.
+
+\unprotect
+
+\unexpanded\def\module_syntax_Indent #1{\ifvmode\noindent\hbox to 2em{\hss#1}\else#1\fi}
+\unexpanded\def\module_syntax_Sugar #1{\removeunwantedspaces\kern.25em{#1}\kern.25em\ignorespaces}
+\unexpanded\def\module_syntax_Something #1{\Sugar{\mathematics{\langle\hbox{#1}\rangle}}}
+\unexpanded\def\module_syntax_Lbrace {\Sugar{\tttf\leftargument}}
+\unexpanded\def\module_syntax_Rbrace {\Sugar{\tttf\rightargument}}
+\unexpanded\def\module_syntax_Lparent {\Sugar{\tttf(}}
+\unexpanded\def\module_syntax_Rparent {\Sugar{\tttf)}}
+\unexpanded\def\module_syntax_Lbracket {\Sugar{\tttf[}}
+\unexpanded\def\module_syntax_Rbracket {\Sugar{\tttf]}}
+\unexpanded\def\module_syntax_Or {\Sugar{\mathematics{\vert}}}
+\unexpanded\def\module_syntax_Optional #1{\Sugar{\mathematics{[\hbox{#1}]}}}
+\unexpanded\def\module_syntax_Means {\Sugar{\mathematics{\rightarrow}}}
+\unexpanded\def\module_syntax_Tex #1{\Sugar{\type{#1}}}
+\unexpanded\def\module_syntax_Literal #1{\Sugar{\type{#1}}}
+\unexpanded\def\module_syntax_Syntax #1{\strut\kern-.25em{#1}\kern-.25em}
+\unexpanded\def\module_syntax_Next {\crlf\hbox to 2em{}\nobreak}
+\unexpanded\def\module_syntax_Whatever #1{\Sugar{\mathematics{(\hbox{#1})}}}
+\unexpanded\def\module_syntax_Quote #1{\Sugar{\quote{#1}}}
+\unexpanded\def\module_syntax_Or {\Sugar{\module_syntax_Indent{\mathematics{\vert}}}}
+\unexpanded\def\module_syntax_Means {\Sugar{\module_syntax_Indent{\mathematics{\rightarrow}}}}
+\unexpanded\def\module_syntax_FlaggedLiteral #1{\color[darkred]{\module_syntax_Literal{#1}}}
+\unexpanded\def\module_syntax_FlaggedSomething#1{\module_syntax_Something{\color[darkred]{#1}}}
+
+\unexpanded\def\StartSyntax
+ {\startlines
+ % formatters
+ \let\Indent \module_syntax_Indent
+ \let\Sugar \module_syntax_Sugar
+ \let\Something \module_syntax_Something
+ \let\Lbrace \module_syntax_Lbrace
+ \let\Rbrace \module_syntax_Rbrace
+ \let\Lparent \module_syntax_Lparent
+ \let\Rparent \module_syntax_Rparent
+ \let\Lbracket \module_syntax_Lbracket
+ \let\Rbracket \module_syntax_Rbracket
+ \let\Or \module_syntax_Or
+ \let\Optional \module_syntax_Optional
+ \let\Means \module_syntax_Means
+ \let\Tex \module_syntax_Tex
+ \let\Literal \module_syntax_Literal
+ \let\Syntax \module_syntax_Syntax
+ \let\Next \module_syntax_Next
+ \let\Whatever \module_syntax_Whatever
+ \let\Quote \module_syntax_Quote
+ \let\Or \module_syntax_Or
+ \let\Means \module_syntax_Means
+ \let\FlaggedLiteral \module_syntax_FlaggedLiteral
+ \let\FlaggedSomething\module_syntax_FlaggedSomething
+ % shortcuts
+ \let\FL \module_syntax_FlaggedLiteral
+ \let\FS \module_syntax_FlaggedSomething
+ \let\L \module_syntax_Literal
+ \let\S \module_syntax_Something
+ \let\M \module_syntax_Means
+ \let\O \module_syntax_Or
+ \let\Q \module_syntax_Quote
+ \let\LB \module_syntax_Lbrace
+ \let\RB \module_syntax_Rbrace
+ \let\LP \module_syntax_Lparent
+ \let\RP \module_syntax_Rparent
+ \let\LS \module_syntax_Lbracket
+ \let\RS \module_syntax_Rbracket
+ \let\{ \module_syntax_Lbrace
+ \let\} \module_syntax_Rbrace
+ \let\( \module_syntax_Lparent
+ \let\) \module_syntax_Rparent
+ \let\[ \module_syntax_Lbracket
+ \let\] \module_syntax_Rbracket
+ % precaution
+ \catcode`\#\othercatcode}
+
+\unexpanded\def\StopSyntax
+ {\stoplines}
+
+\unexpanded\def\SyntaxCommand#1%
+ {\csname module_syntax_#1\endcsname}
+
+\protect \endinput
diff --git a/tex/context/base/s-typesetting-kerning.mkiv b/tex/context/base/s-typesetting-kerning.mkiv
new file mode 100644
index 000000000..48d81ce36
--- /dev/null
+++ b/tex/context/base/s-typesetting-kerning.mkiv
@@ -0,0 +1,209 @@
+%D \module
+%D [ file=s-typesetting-kerning,
+%D version=2014.12.14,
+%D title=\CONTEXT\ Style File,
+%D subtitle=Show Character Kerning,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\unprotect
+
+\definecharacterkerning
+ [typesetting-kerning-demo]
+ [factor=.5]
+
+\startbuffer[showcharacterkerning:boxes]
+ \starttextrule{boxes}
+ \showfontkerns
+ \dontcomplain
+ \startlines
+ test \hbox{!} test
+ test\hbox{!} test
+ test \hbox{!}test
+ test:$x$ test
+ \setcharacterkerning[typesetting-kerning-demo]
+ test \hbox{!} test
+ test\hbox{!} test
+ test \hbox{!}test
+ test:$x$ test
+ \stoplines
+ \stoptextrule
+\stopbuffer
+
+\startbuffer[showcharacterkerning:ligatures]
+ \starttextrule{ligatures}
+ \dontcomplain
+ \startlines
+ effe flink effectief efficient fietsen
+ \blank
+ \setcharacterkerning[typesetting-kerning-demo]
+ effe flink effectief efficient fietsen
+ \blank \hsize\zeropoint
+ effe
+ flink
+ effectief
+ efficient
+ fietsen
+ \stoplines
+ \stoptextrule
+\stopbuffer
+
+\startbuffer[showcharacterkerning:discretionaries]
+ \starttextrule{discretionary}
+ \dontcomplain
+ \startlines
+ \hbox{\samplediscretionary}
+ \hbox{xxx\samplediscretionary}
+ \hbox{\samplediscretionary xxx}
+ \hbox{xxx\samplediscretionary xxx}
+ \blank
+ \setcharacterkerning[typesetting-kerning-demo]
+ \hbox{\samplediscretionary}
+ \hbox{xxx\samplediscretionary}
+ \hbox{\samplediscretionary xxx}
+ \hbox{xxx\samplediscretionary xxx}
+ \blank \hsize\zeropoint
+ \samplediscretionary
+ xxx\samplediscretionary
+ \samplediscretionary xxx
+ xxx\samplediscretionary xxx
+ \stoplines
+ \stoptextrule
+\stopbuffer
+
+\startbuffer[showcharacterkerning:explicits]
+ \starttextrule{explicits}
+ \exhyphenchar \hyphenasciicode
+ \preexhyphenchar \lessthanasciicode
+ \postexhyphenchar\morethanasciicode
+ \def\TestDisc
+ {\discretionary
+ {\kern\emwidth<}%
+ {>\kern\emwidth}%
+ {\kern\emwidth=\kern\emwidth}%
+ }
+ \dontcomplain
+ \startlines
+ \hbox{super-charged}
+ \hbox{super\-charged}
+ \hbox{super\TestDisc charged}
+ \hbox{super\discretionary{[}{]}{[]}charged}
+ \blank
+ \setcharacterkerning[typesetting-kerning-demo]
+ \hbox{super-charged}
+ \hbox{super\-charged}
+ \hbox{super\TestDisc charged}
+ \hbox{super\discretionary{[}{]}{[]}charged}
+ \blank \hsize\zeropoint
+ super-charged
+ super\-charged
+ super\TestDisc charged
+ super\discretionary{[}{]}{[]}charged
+ \stoplines
+ \stoptextrule
+\stopbuffer
+
+\starttexdefinition unexpanded showcharacterkerning
+ \getbuffer[showcharacterkerning:boxes]
+ \getbuffer[showcharacterkerning:ligatures]
+ \getbuffer[showcharacterkerning:discretionaries]
+ \getbuffer[showcharacterkerning:explicits]
+\stoptexdefinition
+
+
+\starttexdefinition showcharacterkerningstepscompared #1
+ \definecharacterkerning[crap][factor=\KerningStepFactor]%
+ \setbox0=\ruledhbox{\color[color-1]{#1}\hss}
+ \setbox2=\ruledhbox{\setcharacterkerning[crap]\color[color-2]{#1}}
+ \setbox4=\ruledhbox{\setcharacterkerning[crap]\showfontkerns\showglyphs#1}
+ \xdef\KerningStepPercentage{\ctxlua{context("\letterpercent 0.2f",(1-\number\wd0/\number\wd2)*100)}}
+ \scratchwidth\wd0
+ \vtop\bgroup
+ \hbox{\box0\hskip-\scratchwidth\box2}
+ \par
+ \box4
+ \egroup
+\stoptexdefinition
+
+\starttexdefinition showcharacterkerningsteps [#1]
+
+ \start
+
+ \getdummyparameters
+ [\s!font=Regular,
+ \c!sample={Wat een bende, rommelen met het font design!},
+ \c!text={rommelen},
+ \c!first=00,
+ \c!last=95,
+ \c!step=05,
+ \c!option=, % \v!page
+ #1]
+
+ \doif{\dummyparameter\c!option}\v!page {
+ \startTEXpage[\c!offset=1ex]
+ }
+
+ \definecolor[color-1][r=1,t=.5,a=1]
+ \definecolor[color-2][b=1,t=.5,a=1]
+
+ \definedfont[\dummyparameter\s!font*default sa 1]
+
+ \doif {\dummyparameter\c!option}\v!page {
+ \begingroup
+ \tttf \dummyparameter\s!font\space @ default
+ \endgroup
+ \blank
+ }
+
+ \starttabulate[|cT|l|cT|l|cT|]
+
+ \NC \tt\bf factor \NC \tt\bf sample \NC \tt\bf \letterpercent \NC \tt\bf text \NC \tt\bf \letterpercent \NC \NR \HL
+
+ \dostepwiserecurse {\dummyparameter\c!first} {\dummyparameter\c!last} {\dummyparameter\c!step} {
+ \NC
+ \xdef\KerningStepFactor{\ctxlua{context("\letterpercent 0.3f",####1/1000)}}
+ \KerningStepFactor
+ \NC
+ \showcharacterkerningstepscompared{\dummyparameter\c!sample}
+ \NC
+ \KerningStepPercentage
+ \NC
+ \showcharacterkerningstepscompared{\dummyparameter\c!text}
+ \NC
+ \KerningStepPercentage
+ \NC \NR
+ }
+
+ \stoptabulate
+
+ \doif{\dummyparameter\c!option}\v!page {
+ \stopTEXpage
+ }
+
+ \stop
+
+\stoptexdefinition
+
+\protect
+
+\continueifinputfile{s-typesetting-kerning.mkiv}
+
+\starttext
+
+ % \showcharacterkerning
+
+ \showcharacterkerningsteps[font=file:FuturaStd-Book.otf,option=page]
+ \showcharacterkerningsteps[font=file:FuturaStd-Medium.otf,option=page]
+ \showcharacterkerningsteps[font=file:FuturaStd-Bold.otf,option=page]
+ \showcharacterkerningsteps[font=file:FuturaStd-heavy.otf,option=page]
+
+\stoptext
+
+% {\hsize1mm efficient\discretionary{\kern1pt!\kern1pt}{\kern1pt!\kern1pt}{\kern1pt!\kern1pt}efficient\par}
+% {\hsize1mm\definedfont[Regular]\setcharacterkerning[typesetting-kerning-demo]efficient\-efficient\par}
+
diff --git a/tex/context/base/s-youless.mkiv b/tex/context/base/s-youless.mkiv
index 247eb5f64..e15973b9c 100644
--- a/tex/context/base/s-youless.mkiv
+++ b/tex/context/base/s-youless.mkiv
@@ -59,9 +59,10 @@
for y=year,year do
- local year = years[y]
- local scale = 20
- local mark = 3
+ local year = years[y]
+ local scale = 20
+ local mark = 3
+ local maxwatt = specification.maxwatt or year.maxwatt
for m=1,12 do
local month = year.months[m]
@@ -69,7 +70,7 @@
context.startMPpage { offset = "10pt" }
context("linecap := butt; pickup pencircle scaled .5")
- for i=0,(math.div(year.maxwatt,1000)+1)*1000,100 do
+ for i=0,(math.div(maxwatt,1000)+1)*1000,100 do
context("draw (%s,%s) -- (%s,%s) withcolor .6white ;",0,i/scale,31 * 24,i/scale)
end
@@ -123,7 +124,7 @@
context("draw (%s,%s) -- (%s,%s) withcolor darkgray ; ",xoffset,0,xoffset,-10)
end
- local max = (math.div(year.maxwatt,1000)+1)
+ local max = (math.div(maxwatt,1000)+1)
for i=0,max*1000,1000 do
context([[draw textext.lft("%s") shifted (%s,%s) ; ]],i,-10,i/scale)
diff --git a/tex/context/base/scrn-bar.mkvi b/tex/context/base/scrn-bar.mkvi
index 1dadc26f3..8a2f9441c 100644
--- a/tex/context/base/scrn-bar.mkvi
+++ b/tex/context/base/scrn-bar.mkvi
@@ -67,7 +67,7 @@
\def\scrn_bar_direct[#tag][#settings]% somewhat messy
{\iflocation
\begingroup
- \doifassignmentelse{#tag}
+ \doifelseassignment{#tag}
{\let\currentinteractionbar\empty
\setupcurrentinteractionbar[#tag]%
\edef\currentinteractionbar{\interactionbarparameter\c!alternative}}%
diff --git a/tex/context/base/scrn-but.lua b/tex/context/base/scrn-but.lua
index 74f6e0cd9..7d883c910 100644
--- a/tex/context/base/scrn-but.lua
+++ b/tex/context/base/scrn-but.lua
@@ -6,12 +6,10 @@ if not modules then modules = { } end modules ['scrn-but'] = {
license = "see context related readme files"
}
-local commands = commands
local context = context
-
local f_two_colon = string.formatters["%s:%s"]
-function commands.registerbuttons(tag,register,language)
+local function registerbuttons(tag,register,language)
local data = sorters.definitions[language]
local orders = daya and data.orders or sorters.definitions.default.orders
local tag = tag == "" and { "" } or { tag }
@@ -20,3 +18,9 @@ function commands.registerbuttons(tag,register,language)
context.menubutton(tag,f_two_colon(register,order),order)
end
end
+
+interfaces.implement {
+ name = "registerbuttons",
+ actions = registerbuttons,
+ arguments = { "string", "string", "string" }
+}
diff --git a/tex/context/base/scrn-but.mkvi b/tex/context/base/scrn-but.mkvi
index fd2da9e08..3fdaf2c5d 100644
--- a/tex/context/base/scrn-but.mkvi
+++ b/tex/context/base/scrn-but.mkvi
@@ -93,7 +93,7 @@
[\c!state=\v!start,
\c!width=\v!fit,
\c!height=\v!broad,
- \c!offset=0.25em,
+ \c!offset=0.25\emwidth,
\c!frame=\v!on,
\c!background=,
\c!backgroundcolor=,
@@ -159,7 +159,7 @@
\attribute\referenceattribute\attributeunsetvalue
\global\setfalse\c_scrn_button_skipped
\chardef\locationboxpagestate\csname\??buttonlocation#currentparameter\c!samepage\endcsname % ?? bt: todo
- \doifreferencefoundelse{#action}\scrn_button_make_yes\scrn_button_make_nop
+ \doifelsereferencefound{#action}\scrn_button_make_yes\scrn_button_make_nop
#currentparameter%
#inheritedframed%
#letparameter%
@@ -217,12 +217,12 @@
{\global\settrue\c_scrn_button_skipped}
\def\scrn_button_make_normal#currentparameter#inheritedframed#letparameter#setparameter#text%
- {\ctxlua{structures.references.injectcurrentset(nil,nil)}%
+ {\clf_injectcurrentreference
\hbox attr \referenceattribute \lastreferenceattribute
{#inheritedframed{\ignorespaces#text\removeunwantedspaces}}}
\def\scrn_button_make_contrast#currentparameter#inheritedframed#letparameter#setparameter#text%
- {\ctxlua{structures.references.injectcurrentset(nil,nil)}%
+ {\clf_injectcurrentreference
\hbox attr \referenceattribute \lastreferenceattribute
{#setparameter\c!foregroundcolor{#currentparameter\c!contrastcolor}%
#inheritedframed{\ignorespaces#text\removeunwantedspaces}}}
@@ -343,12 +343,12 @@
\def\scrn_menu_define[#tag][#category][#settings]% category reflects location, settings can be parent
{\ifthirdargument
- \doifassignmentelse{#settings}%
+ \doifelseassignment{#settings}%
{\scrn_menu_define_original[#tag][#category][\c!category=#category,#settings]}% child definition
{\scrn_menu_define_original[#tag][#settings][\c!category=#category]}% % child definition
\scrn_menu_register{#tag}{#category}%
\else\ifsecondargument
- \doifassignmentelse{#category}%
+ \doifelseassignment{#category}%
{\scrn_menu_define_original[#tag][#category]}% % root definition
{\scrn_menu_define_original[#tag][#category][\c!category=#category]% % child definition
\scrn_menu_register{#tag}{#category}}%
@@ -368,7 +368,7 @@
%D Fill menus:
-\normalexpanded{\long\def\expandafter\noexpand\csname\e!start\v!interactionmenu\endcsname[#tag]#content\expandafter\noexpand\csname\e!stop\v!interactionmenu\endcsname}%
+\normalexpanded{\def\expandafter\noexpand\csname\e!start\v!interactionmenu\endcsname[#tag]#content\expandafter\noexpand\csname\e!stop\v!interactionmenu\endcsname}%
{\def\currentinteractionmenu{#tag}%
\expandafter\settrue\csname\??menustate\interactionmenuparameter\c!category\endcsname
\setinteractionmenuparameter\c!menu{#content}}
@@ -691,7 +691,7 @@
\def\scrn_button_make_position#currentparameter#inheritedframed#letparameter#setparameter#text#action%
{\global\advance\c_scrn_menu_position\plusone
- \doifreferencefoundelse{#action}% 0=not found, 1=same page, >1=elsewhere
+ \doifelsereferencefound{#action}% 0=not found, 1=same page, >1=elsewhere
{\c_scrn_menu_page_mode\ifnum\currentreferencerealpage=\realpageno\plusone\else\plustwo\fi}%
{\c_scrn_menu_page_mode\plustwo}%
\doglobal\appendetoks
@@ -709,7 +709,8 @@
\unexpanded\def\scrn_menu_got_start[#action]#text\stopgot
{\scrn_menu_action_start
- \setupcurrentinteractionmenu[\c!frame=\v!off,\c!background=]% needs checking, was buttons
+ \letinteractionmenuparameter\c!frame\v!off
+ \letinteractionmenuparameter\c!background\empty
\scrn_button_make
\interactionmenuparameter
\inheritedinteractionmenuframed
@@ -967,7 +968,7 @@
\scrn_menu_menu_button_a
{#menutag}{#settings}{#text}{#action}%
\else
- \doifassignmentelse{#menutag}\scrn_menu_menu_button_b\scrn_menu_menu_button_c
+ \doifelseassignment{#menutag}\scrn_menu_menu_button_b\scrn_menu_menu_button_c
{#menutag}{#text}{#action}%
\fi}
@@ -1022,9 +1023,14 @@
\def\scrn_menu_register_menu_buttons[#menu][#register]%
{\ifsecondargument
- \ctxcommand{registerbuttons("menu","#register","\currentlanguage")}
+ \clf_registerbuttons{menu}{#register}{\currentlanguage}%
\else
- \ctxcommand{registerbuttons("","#menu","\currentlanguage")}
+ \clf_registerbuttons{}{#menu}{\currentlanguage}%
\fi}
+% or less readable:
+%
+% \def\scrn_menu_register_menu_buttons[#menu][#register]%
+% {\clf_registerbuttons\ifsecondargument{menu}{#register}\else{}{#menu}\fi{\currentlanguage}}
+
\protect \endinput
diff --git a/tex/context/base/scrn-fld.lua b/tex/context/base/scrn-fld.lua
index 69480b887..1563b9005 100644
--- a/tex/context/base/scrn-fld.lua
+++ b/tex/context/base/scrn-fld.lua
@@ -8,6 +8,10 @@ if not modules then modules = { } end modules ['scrn-fld'] = {
-- we should move some code from lpdf-fld to here
+local context = context
+local ctx_doifelse = commands.doifelse
+local implement = interfaces.implement
+
local variables = interfaces.variables
local v_yes = variables.yes
@@ -40,48 +44,141 @@ fields.defineset = defineset
fields.clone = clone
fields.insert = insert
-commands.definefield = define
-commands.definefieldset = defineset
-commands.clonefield = clone
+-- codeinjections are not yet defined
+
+implement {
+ name = "definefield",
+ actions = define,
+ arguments = {
+ {
+ { "name" },
+ { "alternative" },
+ { "type" },
+ { "category" },
+ { "values" },
+ { "default" },
+ }
+ }
+}
-function commands.insertfield(name,specification)
- texsetbox("b_scrn_field_body",insert(name,specification))
-end
+implement {
+ name = "definefieldset",
+ actions = defineset,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "clonefield",
+ actions = clone,
+ arguments = {
+ {
+ { "children" },
+ { "alternative" },
+ { "parent" },
+ { "category" },
+ { "values" },
+ { "default" },
+ }
+ }
+}
+
+implement {
+ name = "insertfield",
+ actions = function(name,specification)
+ texsetbox("b_scrn_field_body",insert(name,specification))
+ end,
+ arguments = {
+ "string",
+ {
+ { "title" },
+ { "width", "dimen" },
+ { "height", "dimen" },
+ { "depth", "dimen" },
+ { "align" },
+ { "length" },
+ { "fontstyle" },
+ { "fontalternative" },
+ { "fontsize" },
+ { "fontsymbol" },
+ { "colorvalue", "integer" },
+ { "color" },
+ { "backgroundcolorvalue", "integer" },
+ { "backgroundcolor" },
+ { "framecolorvalue", "integer" },
+ { "framecolor" },
+ { "layer" },
+ { "option" },
+ { "align" },
+ { "clickin" },
+ { "clickout" },
+ { "regionin" },
+ { "regionout" },
+ { "afterkey" },
+ { "format" },
+ { "validate" },
+ { "calculate" },
+ { "focusin" },
+ { "focusout" },
+ { "openpage" },
+ { "closepage" },
+ }
+ }
+}
-- (for the monent) only tex interface
-function commands.getfieldcategory(name)
- local g = codeinjections.getfieldcategory(name)
- if g then
- context(g)
+implement {
+ name = "getfieldcategory",
+ arguments = "string",
+ actions = function(name)
+ local g = codeinjections.getfieldcategory(name)
+ if g then
+ context(g)
+ end
end
-end
+}
-function commands.getdefaultfieldvalue(name)
- local d = codeinjections.getdefaultfieldvalue(name)
- if d then
- context(d)
+implement {
+ name = "getdefaultfieldvalue",
+ arguments = "string",
+ actions = function(name)
+ local d = codeinjections.getdefaultfieldvalue(name)
+ if d then
+ context(d)
+ end
end
-end
+}
-function commands.exportformdata(export)
- if export == v_yes then
- codeinjections.exportformdata()
+implement {
+ name = "exportformdata",
+ arguments = "string",
+ actions = function(export)
+ if export == v_yes then
+ codeinjections.exportformdata()
+ end
end
-end
-
-function commands.setformsmethod(method)
- codeinjections.setformsmethod(method)
-end
+}
-function commands.doiffieldcategoryelse(name)
- commands.doifelse(codeinjections.validfieldcategory(name))
-end
+implement {
+ name = "setformsmethod",
+ arguments = "string",
+ actions = function(method)
+ codeinjections.setformsmethod(method)
+ end
+}
-function commands.doiffieldsetelse(tag)
- commands.doifelse(codeinjections.validfieldset(name))
-end
+implement {
+ name = "doifelsefieldcategory",
+ arguments = "string",
+ actions = function(name)
+ ctx_doifelse(codeinjections.validfieldcategory(name))
+ end
+}
-function commands.doiffieldelse(name)
- commands.doifelse(codeinjections.validfield(name))
-end
+implement {
+ name = "doiffieldsetelse",
+ arguments = "string",
+ actions = function(name)
+ ctx_doifelse(codeinjections.validfieldset(name))
+ end
+}
diff --git a/tex/context/base/scrn-fld.mkvi b/tex/context/base/scrn-fld.mkvi
index 049ac92c3..4b4c9d0ee 100644
--- a/tex/context/base/scrn-fld.mkvi
+++ b/tex/context/base/scrn-fld.mkvi
@@ -105,7 +105,7 @@
\installdirectcommandhandler \??forms {forms}
\appendtoks
- \ctxcommand{setformsmethod("\formsparameter\c!method")}%
+ \clf_setformsmethod{\formsparameter\c!method}%
\to \everysetupforms
\setupforms
@@ -113,7 +113,7 @@
\appendtoks
\iflocation
- \ctxcommand{exportformdata("\formsparameter\c!export")}%
+ \clf_exportformdata{\formsparameter\c!export}%
\fi
\to \everystoptext
@@ -127,7 +127,7 @@
{\processcommalist[#set]\scrn_symbols_preset_indeed}%
\def\scrn_symbols_preset_indeed#tag%
- {\doifobjectfoundelse{SYM}{#tag}
+ {\doifelseobjectfound{SYM}{#tag}
{}
{\settightobject{SYM}{#tag}\hbox{\symbol[#tag]}% % todo: set this as immediate xform
\page_otr_add_special_content{\hskip-\maxdimen\getobject{SYM}{#tag}}}} % and then force it into the file
@@ -177,23 +177,23 @@
\appendtoks % we cannot use parent .. maybe s!parent has to change
\ifx\currentfieldbodyparent\empty
\scrn_field_check_category
- \ctxcommand{definefield{
- name = "\currentfieldbody",
- alternative = "normal",
- type = "\fieldbodyparameter\c!type",
- category = "\fieldbodyparameter\c!category",
- values = \!!bs\fieldbodyparameter\c!values\!!es,
- default = \!!bs\fieldbodyparameter\c!default\!!es
- }}%
+ \clf_definefield
+ name {\currentfieldbody}%
+ alternative {normal}%
+ type {\fieldbodyparameter\c!type}%
+ category {\fieldbodyparameter\c!category}%
+ values {\fieldbodyparameter\c!values}%
+ default {\fieldbodyparameter\c!default}%
+ \relax
\else
- \ctxcommand{clonefield{
- children = "\currentfieldbody",
- alternative = "clone",
- parent = "\currentfieldbodyparent",
- category = "\fieldbodyparameter\c!category",
- values = \!!bs\fieldbodyparameter\c!values\!!es,
- default = \!!bs\fieldbodyparameter\c!default\!!es
- }}%
+ \clf_clonefield
+ children {\currentfieldbody}%
+ alternative {clone}%
+ parent {\currentfieldbodyparent}%
+ category {\fieldbodyparameter\c!category}%
+ values {\fieldbodyparameter\c!values}%
+ default {\fieldbodyparameter\c!default}%
+ \relax
\fi
\to \everydefinefieldbody
@@ -224,43 +224,46 @@
% == \edef\currentfieldbackgroundcolorvalue{\thecolorattribute\currentfieldbackgroundcolor}%
\fi
\usefieldbodystyleandcolor\c!style\c!color
- \ctxcommand{insertfield("\currentfieldbody", {
- title = "\currentfieldbody",
- width = \number\dimexpr\fieldbodyparameter\c!width \relax,
- height = \number\dimexpr\fieldbodyparameter\c!height\relax,
- depth = \number\dimexpr\fieldbodyparameter\c!depth \relax,
- align = "\fieldbodyparameter\c!align",
- length = "\fieldbodyparameter\c!n",
- fontstyle = "\fontstyle",
- fontalternative = "\fontalternative",
- fontsize = "\fontbody",
- fontsymbol = "\fieldbodyparameter\c!symbol",
- color = "\fieldbodyparameter\c!color",
- colorvalue = \number\attribute\colorattribute,
- \ifx\currentfieldbackgroundcolor\empty \else
- backgroundcolor = "\currentfieldbackgroundcolor",
- backgroundcolorvalue = "\currentfieldbackgroundcolorvalue",
- \fi
- \ifx\currentfieldframecolor\empty \else
- framecolor = "\currentfieldframecolor",
- framecolorvalue = "\currentfieldframecolorvalue",
- \fi
- layer = "\fieldbodyparameter\c!fieldlayer",
- option = "\fieldbodyparameter\c!option",
- align = "\fieldbodyparameter\c!align",
- clickin = "\fieldbodyparameter\c!clickin",
- clickout = "\fieldbodyparameter\c!clickout",
- regionin = "\fieldbodyparameter\c!regionin",
- regionout = "\fieldbodyparameter\c!regionout",
- afterkey = "\fieldbodyparameter\c!afterkey",
- format = "\fieldbodyparameter\c!format",
- validate = "\fieldbodyparameter\c!validate",
- calculate = "\fieldbodyparameter\c!calculate",
- focusin = "\fieldbodyparameter\c!focusin",
- focusout = "\fieldbodyparameter\c!focusout",
- openpage = "\fieldbodyparameter\c!openpage",
- closepage = "\fieldbodyparameter\c!closepage",
- })}}
+ \clf_insertfield
+ {\currentfieldbody}%
+ {%
+ title {\currentfieldbody}
+ width \dimexpr\fieldbodyparameter\c!width \relax
+ height \dimexpr\fieldbodyparameter\c!height\relax
+ depth \dimexpr\fieldbodyparameter\c!depth \relax
+ align {\fieldbodyparameter\c!align}%
+ length {\fieldbodyparameter\c!n}%
+ fontstyle {\fontstyle}%
+ fontalternative {\fontalternative}%
+ fontsize {\fontbody}%
+ fontsymbol {\fieldbodyparameter\c!symbol}%
+ color {\fieldbodyparameter\c!color}%
+ colorvalue \attribute\colorattribute
+ \ifx\currentfieldbackgroundcolor\empty \else
+ backgroundcolor {\currentfieldbackgroundcolor}%
+ backgroundcolorvalue \numexpr\currentfieldbackgroundcolorvalue\relax
+ \fi
+ \ifx\currentfieldframecolor\empty \else
+ framecolor {\currentfieldframecolor}%
+ framecolorvalue \numexpr\currentfieldframecolorvalue\relax
+ \fi
+ layer {\fieldbodyparameter\c!fieldlayer}%
+ option {\fieldbodyparameter\c!option}%
+ align {\fieldbodyparameter\c!align}%
+ clickin {\fieldbodyparameter\c!clickin}%
+ clickout {\fieldbodyparameter\c!clickout}%
+ regionin {\fieldbodyparameter\c!regionin}%
+ regionout {\fieldbodyparameter\c!regionout}%
+ afterkey {\fieldbodyparameter\c!afterkey}%
+ format {\fieldbodyparameter\c!format}%
+ validate {\fieldbodyparameter\c!validate}%
+ calculate {\fieldbodyparameter\c!calculate}%
+ focusin {\fieldbodyparameter\c!focusin}%
+ focusout {\fieldbodyparameter\c!focusout}%
+ openpage {\fieldbodyparameter\c!openpage}%
+ closepage {\fieldbodyparameter\c!closepage}%
+ }%
+ \relax}
%D The sets are used in grouped calculations.
%D
@@ -270,16 +273,20 @@
{\dodoubleempty\scrn_field_define_set}
\def\scrn_field_define_set[#tag][#list]%
- {\ctxcommand{definefieldset("#tag","#list")}}
+ {\clf_definefieldset{#tag}{#list}}
\let\dodefinefieldset\definefieldbodyset % compatibility
%D A few testing macros:
-\def\doiffieldbodyelse #tag{\ctxcommand{doiffieldelse("#tag")}}
-\def\doiffieldcategoryelse#tag{\ctxcommand{doiffieldcategoryelse("#tag")}}
+\def\doifelsefieldbody #tag{\clf_doifelsefield{#tag}}
+\def\doifelsefieldcategory#tag{\clf_doifelsefieldcategory{#tag}}
+
+\let\doiffieldbodyelse \doifelsefieldbody
+\let\doiffieldcategoryelse\doifelsefieldcategory
-\let\doiffieldelse\doiffieldbodyelse % compatibility
+\let\doiffieldelse \doifelsefieldbody % compatibility / will be dropped
+\let\doifelsefield \doifelsefieldbody % compatibility / will be dropped
%D We still support the traditional method of defining fields:
%D
@@ -365,7 +372,7 @@
\def\scrn_field_fit[#tag][#settings]%
{\iflocation
\begingroup
- \edef\currentdefaultfieldvalue{\ctxcommand{getdefaultfieldvalue("#tag")}}%
+ \edef\currentdefaultfieldvalue{\clf_getdefaultfieldvalue{#tag}}%
\setbox\b_scrn_field_fit_symbol\hbox{\symbol[\currentdefaultfieldvalue]}%
\fitfieldframed
{\fieldbody[#tag]
@@ -494,7 +501,7 @@
\scrn_field_load_scripts
\edef\currentfieldbody {#tag}%
\edef\currentfieldlabel {#label}%
- \edef\currentfieldcategory{\ctxcommand{getfieldcategory("#tag")}}%
+ \edef\currentfieldcategory{\clf_getfieldcategory{#tag}}%
\ifx\currentfieldlabel\empty
\let\currentfieldlabel\currentfieldbody
\fi
@@ -679,7 +686,7 @@
\edef\currenttooltipname{tooltip:\number\c_scrn_tooltip_n}%
\setbox\b_scrn_tooltip_anchor\hbox
{\strut#anchortext}%
- \doifassignmentelse{#settings}
+ \doifelseassignment{#settings}
{\setupcurrenttooltip[#settings]}%
{\setupcurrenttooltip[\c!location=#settings]}%
\setbox\b_scrn_tooltip_text\hbox
@@ -740,6 +747,8 @@
%D \goto{walk field}[Walk{mine}]
%D \stoptyping
+% todo: expand #symbols
+
\unexpanded\def\definefieldstack
{\dotripleargument\scrn_fieldstack_define}
@@ -868,7 +877,7 @@
\setvalue{pushbutton:#tag}{\scrn_pushbutton_handle{#tag}{#settings}}}
\def\scrn_pushbutton_define_variant#tag#variant#content%
- {\doifsymboldefinedelse{pushsymbol:#tag:#variant}
+ {\doifelsesymboldefined{pushsymbol:#tag:#variant}
\donothing
{\definesymbol[pushsymbol:#tag:#variant][{#content}]}}
@@ -975,7 +984,7 @@
\setupcurrentinteractionmenu[#settings]%
\let\scrn_rollbutton_symbol\scrn_rollbutton_symbol_m
\else
- \doifassignmentelse{#tag}
+ \doifelseassignment{#tag}
{\let\currentbutton\empty
\setupcurrentbutton[#tag]%
\let\scrn_rollbutton_symbol\scrn_rollbutton_symbol_b}%
diff --git a/tex/context/base/scrn-hlp.lua b/tex/context/base/scrn-hlp.lua
index d344ce280..99c0565a8 100644
--- a/tex/context/base/scrn-hlp.lua
+++ b/tex/context/base/scrn-hlp.lua
@@ -6,13 +6,15 @@ if not modules then modules = { } end modules ['scrn-hlp'] = {
license = "see context related readme files"
}
-local format = string.format
+local tonumber = tonumber
local help = { }
interactions.help = help
local context = context
-local commands = commands
+local implement = interfaces.implement
+
+local formatters = string.formatters
local a_help = attributes.private("help")
@@ -48,21 +50,26 @@ local helpscript = [[
local template = "javascript(Hide_All_Help{help:}),action(show{help:%s})"
-function help.register(number,name,box)
- if helpscript then
- interactions.javascripts.setpreamble("HelpTexts",helpscript)
- helpscript = false
- end
- local b = copy_nodelist(texgetbox(box))
- register_list(b)
- data[number] = b
- if name and name ~= "" then
- references[name] = number
- structures.references.define("",name,format(template,number))
+local function register(specification)
+ local number = specification.number
+ local name = specification.name
+ local box = specification.box
+ if number and name and box then
+ if helpscript then
+ interactions.javascripts.setpreamble("HelpTexts",helpscript)
+ helpscript = false
+ end
+ local b = copy_nodelist(texgetbox(box))
+ register_list(b)
+ data[number] = b
+ if name and name ~= "" then
+ references[name] = number
+ structures.references.define("",name,formatters[template](number))
+ end
end
end
-local function collect(head,used)
+local function collectused(head,used)
while head do
local id = head.id
if id == hlist_code then
@@ -74,51 +81,77 @@ local function collect(head,used)
used[#used+1] = a
end
else
- used = collect(head.list,used)
+ used = collectused(head.list,used)
end
elseif id == vlist_code then
- used = collect(head.list,used)
+ used = collectused(head.list,used)
end
head = head.next
end
return used
end
-function help.collect(box)
+local function collect(box)
if next(data) then
- return collect(texgetbox(box).list)
+ return collectused(texgetbox(box).list)
end
end
-commands.registerhelp = help.register
-
-function commands.collecthelp(box)
- local used = help.collect(box)
- if used then
- local done = { }
- context.startoverlay()
- for i=1,#used do
- local d = data[used[i]]
- if d and not done[d] then
- local box = hpack_nodelist(copy_nodelist(d))
- context(false,box)
- done[d] = true
- else
- -- error
+local function reference(name)
+ return references[name] or tonumber(name) or 0
+end
+
+help.register = register
+help.collect = collect
+help.reference = reference
+
+implement {
+ name = "registerhelp",
+ actions = register,
+ arguments = {
+ {
+ { "number", "integer" },
+ { "name" },
+ { "box" , "integer" }
+ }
+ }
+}
+
+implement {
+ name = "collecthelp",
+ arguments = "integer",
+ actions = function(box)
+ local used = collect(box)
+ if used then
+ local done = { }
+ context.startoverlay()
+ for i=1,#used do
+ local d = data[used[i]]
+ if d and not done[d] then
+ local box = hpack_nodelist(copy_nodelist(d))
+ context(false,box)
+ done[d] = true
+ else
+ -- error
+ end
end
+ context.stopoverlay()
end
- context.stopoverlay()
end
-end
-
-function help.reference(name)
- return references[name] or tonumber(name) or 0
-end
+}
-function commands.helpreference(name)
- context(references[name] or tonumber(name) or 0)
-end
+implement {
+ name = "helpreference",
+ arguments = "string",
+ actions = function(name)
+ context(reference(name))
+ end
+}
-function commands.helpaction(name)
- context(template,references[name] or tonumber(name) or 0)
-end
+implement {
+ name = "helpaction",
+ arguments = "string",
+ actions = function(name)
+ context(template,reference(name))
+ end
+}
diff --git a/tex/context/base/scrn-hlp.mkvi b/tex/context/base/scrn-hlp.mkvi
index f5a78fb08..eca79c90a 100644
--- a/tex/context/base/scrn-hlp.mkvi
+++ b/tex/context/base/scrn-hlp.mkvi
@@ -130,25 +130,31 @@
\c!values=\currenthelpname]%
\setbox\b_scrn_help_box\hbox
{\fieldbody[\currenthelpname]}%
- \ctxcommand{registerhelp(\number\c_scrn_help_n,"\currenthelpreference",\number\b_scrn_help_box)}}
+ \clf_registerhelp
+ number \c_scrn_help_n
+ name {\currenthelpreference}%
+ box \b_scrn_help_box
+ \relax}
-\def\doifelsehelp
+\unexpanded\def\doifelsehelp
{\ifcase\c_scrn_help_n
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
-\def\placehelp % was \helpdata
+\let\doifhelpelse\doifelsehelp
+
+\unexpanded\def\placehelp % was \helpdata
{\ifinpagebody\ifcase\c_scrn_help_n\else
- \ctxcommand{collecthelp(255)}% rather hard coded ... bad
+ \clf_collecthelp\normalpagebox
\fi\fi}
\def\helpreference#category%
- {\ctxcommand{helpreference("#category")}}
+ {\clf_helpreference{#category}}
\def\helpaction#category%
- {\ctxcommand{helpaction("#category")}}
+ {\clf_helpaction{#category}}
\unexpanded\def\helpsignal#category%
{\hbox attr \helpattribute \helpreference{#category}{}}
diff --git a/tex/context/base/scrn-ini.lua b/tex/context/base/scrn-ini.lua
index 4831408f9..ce9f9f71b 100644
--- a/tex/context/base/scrn-ini.lua
+++ b/tex/context/base/scrn-ini.lua
@@ -16,7 +16,7 @@ local codeinjections = backends.codeinjections
local identitydata = { }
-local function setupidentity(specification)
+function general.setupidentity(specification)
for k, v in next, specification do
identitydata[k] = v
end
@@ -27,6 +27,17 @@ function general.getidentity()
return identitydata
end
-general.setupidentity = setupidentity
-
-commands.setupidentity = setupidentity
+interfaces.implement {
+ name = "setupidentity",
+ actions = general.setupidentity,
+ arguments = {
+ {
+ { "title" },
+ { "subtitle" },
+ { "author" },
+ { "creator" },
+ { "date" },
+ { "keywords" },
+ }
+ }
+}
diff --git a/tex/context/base/scrn-ini.mkvi b/tex/context/base/scrn-ini.mkvi
index f5b294624..2ed822c6e 100644
--- a/tex/context/base/scrn-ini.mkvi
+++ b/tex/context/base/scrn-ini.mkvi
@@ -51,6 +51,8 @@
\expandafter\secondoftwoarguments
\fi}
+\let\doiflocationelse\doifelselocation
+
\setupinteraction
[\c!state=\v!stop]
@@ -176,14 +178,14 @@
%D Identity
\def\scrn_identity_synchronize
- {\ctxcommand{setupidentity{
- title = \!!bs\interactionparameter\c!title\!!es,
- subtitle = \!!bs\interactionparameter\c!subtitle\!!es,
- author = \!!bs\interactionparameter\c!author\!!es,
- creator = \!!bs ConTeXt - \contextversion\!!es,
- date = \!!bs\interactionparameter\c!date\!!es,
- keywords = \!!bs\interactionparameter\c!keyword\!!es,
- }}}
+ {\clf_setupidentity
+ title {\interactionparameter\c!title}%
+ subtitle {\interactionparameter\c!subtitle}%
+ author {\interactionparameter\c!author}%
+ creator { ConTeXt - \contextversion}%
+ date {\interactionparameter\c!date}%
+ keywords {\interactionparameter\c!keyword}%
+ \relax}
\appendtoks
\scrn_identity_synchronize
diff --git a/tex/context/base/scrn-pag.lua b/tex/context/base/scrn-pag.lua
index 7003d0285..4d7b388ee 100644
--- a/tex/context/base/scrn-pag.lua
+++ b/tex/context/base/scrn-pag.lua
@@ -10,18 +10,43 @@ interactions = interactions or { }
interactions.pages = interactions.pages or { }
local pages = interactions.pages
+local implement = interfaces.implement
+
local codeinjections = backends.codeinjections
-local function setupcanvas(specification)
+function pages.setupcanvas(specification)
codeinjections.setupcanvas(specification)
end
-local function setpagetransition(specification)
+function pages.setpagetransition(specification)
codeinjections.setpagetransition(specification)
end
-pages.setupcanvas = setupcanvas
-pages.setpagetransition = setpagetransition
+implement {
+ name = "setupcanvas",
+ actions = pages.setupcanvas,
+ arguments = {
+ {
+ { "mode" },
+ { "singlesided", "boolean" },
+ { "doublesided", "boolean" },
+ { "leftoffset", "dimen" },
+ { "topoffset", "dimen" },
+ { "width", "dimen" },
+ { "height", "dimen" },
+ { "paperwidth", "dimen" },
+ { "paperheight", "dimen" },
+ }
+ }
+}
-commands.setupcanvas = setupcanvas
-commands.setpagetransition = setpagetransition
+implement {
+ name = "setpagetransition",
+ actions = pages.setpagetransition,
+ arguments = {
+ {
+ { "n" },
+ { "delay", "integer" },
+ }
+ }
+}
diff --git a/tex/context/base/scrn-pag.mkvi b/tex/context/base/scrn-pag.mkvi
index 5bbdadda8..3dfcd65c5 100644
--- a/tex/context/base/scrn-pag.mkvi
+++ b/tex/context/base/scrn-pag.mkvi
@@ -124,30 +124,49 @@
%
% \starttext \input ward \stoptext
-\def\scrn_canvas_synchronize_simple
- {\ctxcommand{setupcanvas{
- paperwidth = \number\printpaperwidth,
- paperheight = \number\printpaperheight
- }}}
+\let\scrn_canvas_synchronize_simple \relax
+\let\scrn_canvas_synchronize_complex\relax
-\def\scrn_canvas_synchronize_complex
+\appendtoks
+ \global\let\scrn_canvas_synchronize_simple \scrn_canvas_synchronize_simple_indeed
+ \global\let\scrn_canvas_synchronize_complex\scrn_canvas_synchronize_complex_indeed
+\to \everysetuplayout
+
+\def\scrn_canvas_synchronize_simple_indeed
+ {\clf_setupcanvas
+ paperwidth \printpaperwidth
+ paperheight \printpaperheight
+ \relax
+ %\global\let\scrn_canvas_synchronize_simple \relax
+ \global\let\scrn_canvas_synchronize_complex\relax}
+
+\def\scrn_canvas_synchronize_complex_indeed
{\scrn_canvas_calculate % otherwise we need to hook it into setuppage etc
- \ctxcommand{setupcanvas{
- mode = "\interactionscreenparameter\c!option",
- singlesided = \ifsinglesided true\else false\fi,
- doublesided = \ifdoublesided true\else false\fi,
- leftoffset = \number\canvasbackoffset,
- topoffset = \number\canvastopoffset,
- width = \number\canvaswidth,
- height = \number\canvasheight,
- paperwidth = \number\canvasmaxwidth,
- paperheight = \number\canvasmaxheight
- }}}
+ \clf_setupcanvas
+ mode {\interactionscreenparameter\c!option}%
+ singlesided \ifsinglesided true\else false\fi\space
+ doublesided \ifdoublesided true\else false\fi\space
+ leftoffset \canvasbackoffset
+ topoffset \canvastopoffset
+ width \canvaswidth
+ height \canvasheight
+ paperwidth \canvasmaxwidth
+ paperheight \canvasmaxheight
+ \relax
+ %\global\let\scrn_canvas_synchronize_simple \relax
+ \global\let\scrn_canvas_synchronize_complex\relax}
\appendtoks
- \doifcommonelse{\interactionscreenparameter\c!option}{\v!max,\v!fit}%
- {\global\settrue \c_scrn_canvas_tight_page}%
- {\global\setfalse\c_scrn_canvas_tight_page}%
+ \begingroup
+ \edef\p_option{\interactionscreenparameter\c!option}%
+ \ifx\p_option\v!max
+ \global\settrue \c_scrn_canvas_tight_page
+ \else\ifx\p_option\v!fit
+ \global\settrue \c_scrn_canvas_tight_page
+ \else
+ \global\setfalse\c_scrn_canvas_tight_page
+ \fi\fi
+ \endgroup
\to \everysetupinteractionscreen
\setupinteractionscreen
@@ -159,6 +178,11 @@
\c!topspace=\topspace,
\c!option=\v!auto]
+\appendtoks
+ \global\let\scrn_canvas_synchronize_simple \scrn_canvas_synchronize_simple
+ \global\let\scrn_canvas_synchronize_complex\scrn_canvas_synchronize_complex
+\to \everysetupinteractionscreen
+
%D Conditional page breaks:
\unexpanded\def\screen
@@ -177,7 +201,10 @@
{\dosingleempty\scrn_transitions_setup}
\def\scrn_transitions_setup[#list]%
- {\edef\scrn_transitions_list{#list}}
+ {\edef\scrn_transitions_list{#list}%
+ \ifx\scrn_transitions_list\v!reset
+ \let\scrn_transitions_list\empty
+ \fi}
\def\scrn_transitions_set
{\iflocation \ifx\scrn_transitions_list\empty \else
@@ -187,10 +214,10 @@
\def\scrn_transitions_set_indeed
{\begingroup
\edef\currentinteractionscreendelay{\interactionscreenparameter\c!delay}%
- \ctxcommand{setpagetransition{
- n = "\scrn_transitions_list",
- delay = "\ifx\currentinteractionscreendelay\v!none 0\else\currentinteractionscreendelay\fi"
- }}%
+ \clf_setpagetransition
+ n {\scrn_transitions_list}%
+ delay \ifx\currentinteractionscreendelay\v!none \zerocount\else\currentinteractionscreendelay\fi
+ \relax
\endgroup}
\prependtoks
diff --git a/tex/context/base/scrn-ref.lua b/tex/context/base/scrn-ref.lua
index df71b6a97..c1fc94871 100644
--- a/tex/context/base/scrn-ref.lua
+++ b/tex/context/base/scrn-ref.lua
@@ -15,6 +15,8 @@ local codeinjections = backends.codeinjections
local expandcurrent = structures.references.expandcurrent
local identify = structures.references.identify
+local implement = interfaces.implement
+
local function check(what)
if what and what ~= "" then
local set, bug = identify("",what)
@@ -54,12 +56,12 @@ local function setclosepageaction(close)
end
end
-references.setopendocument = setopendocumentaction
-references.setclosedocument = setclosedocumentaction
-references.setopenpage = setopenpageaction
-references.setclosepage = setclosepageaction
+references.setopendocument = setopendocumentaction
+references.setclosedocument = setclosedocumentaction
+references.setopenpage = setopenpageaction
+references.setclosepage = setclosepageaction
-commands.setopendocumentaction = setopendocumentaction
-commands.setclosedocumentaction = setclosedocumentaction
-commands.setopenpageaction = setopenpageaction
-commands.setclosepageaction = setclosepageaction
+implement { name = "setopendocumentaction", arguments = "string", actions = setopendocumentaction }
+implement { name = "setclosedocumentaction", arguments = "string", actions = setclosedocumentaction }
+implement { name = "setopenpageaction", arguments = "string", actions = setopenpageaction }
+implement { name = "setclosepageaction", arguments = "string", actions = setclosepageaction }
diff --git a/tex/context/base/scrn-ref.mkvi b/tex/context/base/scrn-ref.mkvi
index a06ba1ad8..2b15b4677 100644
--- a/tex/context/base/scrn-ref.mkvi
+++ b/tex/context/base/scrn-ref.mkvi
@@ -25,7 +25,7 @@
\to \everysetupinteraction
\def\scrn_reference_enable_page_destinations % no reset
- {\ctxlua{structures.references.setinnermethod("\interactionparameter\c!page")}}
+ {\clf_setinnerreferencemethod{\interactionparameter\c!page}}
\setupinteraction % start fit page and reset form
[\c!page=\v!no,
@@ -34,7 +34,7 @@
\c!focus=\v!fit,
\c!calculate=,
% rendering:
- \c!width=1em,
+ \c!width=\emwidth,
\c!height=\zeropoint,
\c!depth=\zeropoint,
\c!symbolset=]
@@ -46,10 +46,10 @@
\edef\currentinteractionopenaction {\interactionparameter\c!openaction }%
\edef\currentinteractioncloseaction{\interactionparameter\c!closeaction}%
\ifx\currentinteractionopenaction\empty \else
- \ctxcommand{setopendocumentaction("\currentinteractionopenaction")}%
+ \clf_setopendocumentaction{\currentinteractionopenaction}%
\fi
\ifx\currentinteractioncloseaction\empty \else
- \ctxcommand{setclosedocumentaction("\currentinteractioncloseaction")}%
+ \clf_setclosedocumentaction{\currentinteractioncloseaction}%
\fi
\glet\scrn_reference_set_text_actions\relax
\fi}
@@ -59,10 +59,10 @@
\edef\currentinteractionopenpageaction {\interactionparameter\c!openpageaction }%
\edef\currentinteractionclosepageaction{\interactionparameter\c!closepageaction}%
\ifx\currentinteractionopenpageaction\empty \else
- \ctxcommand{setopenpageaction("\currentinteractionopenpageaction")}%
+ \clf_setopenpageaction{\currentinteractionopenpageaction}%
\fi
\ifx\currentinteractionclosepageaction\empty \else
- \ctxcommand{setclosepageaction("\currentinteractionclosepageaction")}%
+ \clf_setclosepageaction{\currentinteractionclosepageaction}%
\fi
\fi}
@@ -73,7 +73,7 @@
\def\scrn_reference_enable_references
{\ifproductionrun
- \ctxlua{structures.references.enableinteraction()}%
+ \clf_enableinteraction % only once anyway
\glet\scrn_reference_enable_references\relax
\fi}
diff --git a/tex/context/base/scrn-wid.lua b/tex/context/base/scrn-wid.lua
index 5b319b07e..72c9bc733 100644
--- a/tex/context/base/scrn-wid.lua
+++ b/tex/context/base/scrn-wid.lua
@@ -10,6 +10,7 @@ interactions = interactions or { }
local interactions = interactions
local context = context
+local implement = interfaces.implement
local allocate = utilities.storage.allocate
@@ -42,9 +43,13 @@ local report_attachments = logs.reporter("widgets","attachments")
-- Symbols
-function commands.presetsymbollist(list)
- codeinjections.presetsymbollist(list)
-end
+implement {
+ name = "presetsymbollist",
+ arguments = "string",
+ actions = function(list)
+ codeinjections.presetsymbollist(list)
+ end
+}
-- Attachments
--
@@ -108,11 +113,51 @@ function attachments.insert(specification)
return nodeinjections.attachfile(specification)
end
-commands.registerattachment = attachments.register
+implement {
+ name = "registerattachment",
+ actions = attachments.register,
+ arguments = {
+ {
+ { "tag" },
+ { "registered" },
+ { "title" },
+ { "subtitle" },
+ { "author" },
+ { "file" },
+ { "name" },
+ { "buffer" },
+ }
+ }
+}
-function commands.insertattachment(specification)
- texsetbox("b_scrn_attachment_link",(attachments.insert(specification)))
-end
+implement {
+ name = "insertattachment",
+ actions = function(specification)
+ texsetbox("b_scrn_attachment_link",(attachments.insert(specification)))
+ end,
+ arguments = {
+ {
+ { "tag" },
+ { "registered" },
+ { "method" },
+ { "width", "dimen" },
+ { "height", "dimen" },
+ { "depth", "dimen" },
+ { "colormodel", "integer" },
+ { "colorvalue", "integer" },
+ { "color" },
+ { "transparencyvalue", "integer" },
+ { "symbol" },
+ { "layer" },
+ { "title" },
+ { "subtitle" },
+ { "author" },
+ { "file" },
+ { "name" },
+ { "buffer" },
+ }
+ }
+}
-- Comment
@@ -124,9 +169,32 @@ function comments.insert(specification)
return nodeinjections.comment(specification)
end
-function commands.insertcomment(specification)
- texsetbox("b_scrn_comment_link",(comments.insert(specification)))
-end
+implement {
+ name = "insertcomment",
+ actions = function(specification)
+ texsetbox("b_scrn_comment_link",(comments.insert(specification)))
+ end,
+ arguments = {
+ {
+ { "tag" },
+ { "title" },
+ { "subtitle" },
+ { "author" },
+ { "width", "dimen" },
+ { "height", "dimen" },
+ { "depth", "dimen" },
+ { "nx" },
+ { "ny" },
+ { "colormodel", "integer" },
+ { "colorvalue", "integer" },
+ { "transparencyvalue", "integer" },
+ { "option" },
+ { "symbol" },
+ { "buffer" },
+ { "layer" },
+ }
+ }
+}
-- Soundclips
@@ -153,8 +221,27 @@ function soundclips.insert(tag)
end
end
-commands.registersoundclip = soundclips.register
-commands.insertsoundclip = soundclips.insert
+implement {
+ name = registersoundclip,
+ actions = soundclips.register,
+ arguments = {
+ {
+ { "tag" },
+ { "file" }
+ }
+ }
+}
+
+implement {
+ name = insertsoundclip,
+ actions = soundclips.insert,
+ arguments = {
+ {
+ { "tag" },
+ { "repeat" }
+ }
+ }
+}
-- Renderings
@@ -175,48 +262,84 @@ function renderings.rendering(label)
end
end
-local function var(label,key)
+function renderings.var(label,key)
local rn = renderings[label]
return rn and rn[key] or ""
end
-renderings.var = var
-
-function commands.renderingvar(label,key)
- context(var(label,key))
-end
+implement {
+ name = "renderingvar",
+ actions = { renderings.var, context },
+ arguments = { "string", "string" }
+}
-commands.registerrendering = renderings.register
+implement {
+ name = "registerrendering",
+ actions = renderings.register,
+ arguments = {
+ {
+ { "type" },
+ { "label" },
+ { "mime" },
+ { "filename" },
+ { "option" },
+ }
+ }
+}
-- Rendering:
-function commands.insertrenderingwindow(specification)
- codeinjections.insertrenderingwindow(specification)
-end
+implement {
+ name = "insertrenderingwindow",
+ actions = function(specification)
+ codeinjections.insertrenderingwindow(specification)
+ end,
+ arguments = {
+ {
+ { "label" },
+ { "width", "dimen" },
+ { "height", "dimen" },
+ { "option" },
+ { "page", "integer" },
+ }
+ }
+}
-- Linkedlists (only a context interface)
-function commands.definelinkedlist(tag)
- -- no need
-end
+implement {
+ name = "definelinkedlist",
+ arguments = "string",
+ actions = function(tag)
+ -- no need
+ end
+}
-function commands.enhancelinkedlist(tag,n)
- local ll = jobpasses.gettobesaved(tag)
- if ll then
- ll[n] = texgetcount("realpageno")
- end
-end
+implement {
+ name = "enhancelinkedlist",
+ arguments = { "string", "integer" },
+ actions = function(tag,n)
+ local ll = jobpasses.gettobesaved(tag)
+ if ll then
+ ll[n] = texgetcount("realpageno")
+ end
+ end
+}
-function commands.addlinklistelement(tag)
- local tobesaved = jobpasses.gettobesaved(tag)
- local collected = jobpasses.getcollected(tag) or { }
- local currentlink = #tobesaved + 1
- local noflinks = #collected
- tobesaved[currentlink] = 0
- local f = collected[1] or 0
- local l = collected[noflinks] or 0
- local p = collected[currentlink-1] or f
- local n = collected[currentlink+1] or l
- context.setlinkedlistproperties(currentlink,noflinks,f,p,n,l)
- -- context.ctxlatelua(function() commands.enhancelinkedlist(tag,currentlink) end)
-end
+implement {
+ name = "addlinklistelement",
+ arguments = "string",
+ actions = function(tag)
+ local tobesaved = jobpasses.gettobesaved(tag)
+ local collected = jobpasses.getcollected(tag) or { }
+ local currentlink = #tobesaved + 1
+ local noflinks = #collected
+ tobesaved[currentlink] = 0
+ local f = collected[1] or 0
+ local l = collected[noflinks] or 0
+ local p = collected[currentlink-1] or f
+ local n = collected[currentlink+1] or l
+ context.setlinkedlistproperties(currentlink,noflinks,f,p,n,l)
+ -- context.ctxlatelua(function() commands.enhancelinkedlist(tag,currentlink) end)
+ end
+}
diff --git a/tex/context/base/scrn-wid.mkvi b/tex/context/base/scrn-wid.mkvi
index fad451651..57a4be276 100644
--- a/tex/context/base/scrn-wid.mkvi
+++ b/tex/context/base/scrn-wid.mkvi
@@ -100,16 +100,16 @@
\begingroup
\def\currentattachment{_}%
\setupcurrentattachment[#settings,\s!parent=\??attachment]%
- \ctxcommand{registerattachment{
- tag = "#tag",
- registered = "#tag",
- title = "\attachmentparameter\c!title",
- subtitle = "\attachmentparameter\c!subtitle",
- author = "\attachmentparameter\c!author",
- file = "\attachmentparameter\c!file",
- name = "\attachmentparameter\c!name",
- buffer = "\attachmentparameter\c!buffer",
- }}%
+ \clf_registerattachment
+ tag {#tag}%
+ registered {#tag}%
+ title {\attachmentparameter\c!title}%
+ subtitle {\attachmentparameter\c!subtitle}%
+ author {\attachmentparameter\c!author}%
+ file {\attachmentparameter\c!file}%
+ name {\attachmentparameter\c!name}%
+ buffer {\attachmentparameter\c!buffer}%
+ \relax
\endgroup
\else
% todo
@@ -136,7 +136,7 @@
{\bgroup
\doifelsenothing{#registered}
{\scrn_attachment_inject[\v!auto][]}
- {\doifassignmentelse{#registered}
+ {\doifelseassignment{#registered}
{\scrn_attachment_inject[\v!auto][#registered]}
{\scrn_attachment_inject[#registered][#settings]}}%
\egroup}
@@ -162,7 +162,7 @@
{\bgroup
\doifelsenothing{#registered}
{\def\scrn_attachment_stop{\scrn_attachment_inject[\v!auto][\c!buffer=\v!attachment]\egroup}}%
- {\doifassignmentelse{#registered}
+ {\doifelseassignment{#registered}
{\def\scrn_attachment_stop{\scrn_attachment_inject[\v!auto][\c!buffer=\v!attachment,#registered]\egroup}}%
{\def\scrn_attachment_stop{\scrn_attachment_inject[#registered][\c!buffer=\v!attachment,#settings]\egroup}}}%
\grabbufferdatadirect\v!attachment{\e!start\currentattachment}{\e!stop\currentattachment}}
@@ -181,37 +181,37 @@
\edef\currentattachmentheight{\attachmentparameter\c!height}%
\edef\currentattachmentdepth {\attachmentparameter\c!depth }%
\ifx\currentattachmentsymbol\empty
- \ifx\currentattachmentwidth \v!fit\edef\currentattachmentwidth {.5em}\fi
- \ifx\currentattachmentheight\v!fit\edef\currentattachmentheight{.5em}\fi
+ \ifx\currentattachmentwidth \v!fit\edef\currentattachmentwidth {.5\emwidth}\fi
+ \ifx\currentattachmentheight\v!fit\edef\currentattachmentheight{.5\emwidth}\fi
\ifx\currentattachmentdepth \v!fit\let \currentattachmentdepth \zeropoint\fi
\else
- \ctxcommand{presetsymbollist("\attachmentparameter\c!symbol")}%
+ \clf_presetsymbollist{\attachmentparameter\c!symbol}%
% we cannot yet ask for the wd/ht/dp of an xform else we could use those
\setbox\b_scrn_attachment_symbol\hbox{\symbol[\lastpredefinedsymbol]}%
\ifx\currentattachmentwidth \v!fit\edef\currentattachmentwidth {\wd\b_scrn_attachment_symbol}\fi
\ifx\currentattachmentheight\v!fit\edef\currentattachmentheight{\ht\b_scrn_attachment_symbol}\fi
\ifx\currentattachmentdepth \v!fit\edef\currentattachmentdepth {\dp\b_scrn_attachment_symbol}\fi
\fi
- \ctxcommand{insertattachment{
- tag = "\currentattachment",
- registered = "\currentattachmentregistered",
- width = \number\dimexpr\currentattachmentwidth \relax,
- height = \number\dimexpr\currentattachmentheight\relax,
- depth = \number\dimexpr\currentattachmentdepth \relax,
- color = "\attachmentparameter\c!color",
- colormodel = \number\attribute\colormodelattribute,
- colorvalue = \thecolorattribute{\attachmentparameter\c!color},
- transparencyvalue = \thetransparencyattribute{\attachmentparameter\c!color},
- symbol = "\currentattachmentsymbol",
- layer = "\attachmentparameter\c!textlayer",
+ \clf_insertattachment
+ tag {\currentattachment}%
+ registered {\currentattachmentregistered}%
+ width \dimexpr\currentattachmentwidth \relax
+ height \dimexpr\currentattachmentheight\relax
+ depth \dimexpr\currentattachmentdepth \relax
+ color {\attachmentparameter\c!color}%
+ colormodel \attribute\colormodelattribute
+ colorvalue \numexpr\thecolorattribute{\attachmentparameter\c!color}\relax % or are these chardefs
+ transparencyvalue \numexpr\thetransparencyattribute{\attachmentparameter\c!color}\relax % or are these chardefs
+ symbol {\currentattachmentsymbol}%
+ layer {\attachmentparameter\c!textlayer}%
% these will be overloaded by registered when available
- title = "\attachmentparameter\c!title",
- subtitle = "\attachmentparameter\c!subtitle",
- author = "\attachmentparameter\c!author",
- file = "\attachmentparameter\c!file",
- name = "\attachmentparameter\c!name",
- buffer = "\attachmentparameter\c!buffer",
- }}%
+ title {\attachmentparameter\c!title}%
+ subtitle {\attachmentparameter\c!subtitle}%
+ author {\attachmentparameter\c!author}%
+ file {\attachmentparameter\c!file}%
+ name {\attachmentparameter\c!name}%
+ buffer {\attachmentparameter\c!buffer}%
+ \relax
\setbox\b_scrn_attachment_link\hbox{\scrn_attachment_place}%
\wd\b_scrn_attachment_link\currentattachmentwidth
\ht\b_scrn_attachment_link\currentattachmentheight
@@ -219,11 +219,11 @@
\box\b_scrn_attachment_link}
\setvalue{\??attachmentmethod\v!hidden}%
- {\ctxcommand{insertattachment{
- tag = "\currentattachment",
- registered = "\currentattachmentregistered",
- method = "\v!hidden"
- }}}
+ {\clf_insertattachment
+ tag {\currentattachment}%
+ registered {\currentattachmentregistered}%
+ method {\v!hidden}%
+ \relax}
\def\scrn_attachment_place
{\executeifdefined
@@ -398,10 +398,10 @@
\scrn_comment_argument_ignore}
\def\scrn_comment_argument_indeed[#title][#settings]#text%
- {\doifassignmentelse{#title}
+ {\doifelseassignment{#title}
{\setupcurrentcomment[#title]}
{\setupcurrentcomment[\c!title=#title,#settings]}%
- \ctxlua{buffers.assign("\v!comment",\!!bs#text\!!es)}% todo: expansion control, but expanded by default (xml)
+ \clf_assignbuffer{\v!comment}{#text}\catcodetable\relax% todo: expansion control, but expanded by default (xml)
\scrn_comment_inject
\ignorespaces}
@@ -421,7 +421,7 @@
\def\scrn_comment_start_indeed[#title][#settings]%
{\bgroup
- \doifassignmentelse{#title}
+ \doifelseassignment{#title}
{\setupcurrentcomment[#title]}
{\setupcurrentcomment[\c!title=#title,#settings]}%
\unexpanded\def\scrn_comment_stop{\scrn_comment_inject\egroup}%
@@ -447,35 +447,35 @@
\edef\currentcommentheight{\commentparameter\c!height}%
\edef\currentcommentdepth {\commentparameter\c!depth }%
\ifx\currentcommentsymbol\empty
- \ifx\currentcommentwidth \v!fit\edef\currentcommentwidth {.5em}\fi
- \ifx\currentcommentheight\v!fit\edef\currentcommentheight{.5em}\fi
+ \ifx\currentcommentwidth \v!fit\edef\currentcommentwidth {.5\emwidth}\fi
+ \ifx\currentcommentheight\v!fit\edef\currentcommentheight{.5\emwidth}\fi
\ifx\currentcommentdepth \v!fit\let \currentcommentdepth \zeropoint\fi
\else
- \ctxcommand{presetsymbollist("\commentparameter\c!symbol")}%
+ \clf_presetsymbollist{\commentparameter\c!symbol}%
% we cannot yet ask for the wd/ht/dp of an xform else we could use those
\setbox\b_scrn_comment_symbol\hbox{\symbol[\lastpredefinedsymbol]}%
\ifx\currentcommentwidth \v!fit\edef\currentcommentwidth {\wd\b_scrn_comment_symbol}\fi
\ifx\currentcommentheight\v!fit\edef\currentcommentheight{\ht\b_scrn_comment_symbol}\fi
\ifx\currentcommentdepth \v!fit\edef\currentcommentdepth {\dp\b_scrn_comment_symbol}\fi
\fi
- \ctxcommand{insertcomment{
- tag = "\currentcomment",
- title = "\commentparameter\c!title",
- subtitle = "\commentparameter\c!subtitle",
- author = "\commentparameter\c!author",
- width = \number\dimexpr\currentcommentwidth,
- height = \number\dimexpr\currentcommentheight,
- depth = \number\dimexpr\currentcommentdepth,
- nx = \commentparameter\c!nx,
- ny = \commentparameter\c!ny,
- colormodel = \number\attribute\colormodelattribute,
- colorvalue = \thecolorattribute{\commentparameter\c!color},
- transparencyvalue = \thetransparencyattribute{\commentparameter\c!color},
- option = "\commentparameter\c!option", % todo
- symbol = "\commentparameter\c!symbol",
- buffer = "\v!comment",
- layer = "\commentparameter\c!textlayer"
- }}%
+ \clf_insertcomment
+ tag {\currentcomment}%
+ title {\commentparameter\c!title}%
+ subtitle {\commentparameter\c!subtitle}%
+ author {\commentparameter\c!author}%
+ width \dimexpr\currentcommentwidth\relax
+ height \dimexpr\currentcommentheight\relax
+ depth \dimexpr\currentcommentdepth\relax
+ nx {\commentparameter\c!nx}%
+ ny {\commentparameter\c!ny}%
+ colormodel \attribute\colormodelattribute
+ colorvalue \numexpr\thecolorattribute{\commentparameter\c!color}\relax
+ transparencyvalue \numexpr\thetransparencyattribute{\commentparameter\c!color}\relax
+ option {\commentparameter\c!option}% % todo
+ symbol {\commentparameter\c!symbol}%
+ buffer {\v!comment}%
+ layer {\commentparameter\c!textlayer}%
+ \relax
\wd\b_scrn_comment_link\currentcommentwidth
\ht\b_scrn_comment_link\currentcommentheight
\dp\b_scrn_comment_link\currentcommentdepth
@@ -542,17 +542,17 @@
{\dodoubleargument\scrn_soundtrack_indeed}
\def\scrn_soundtrack_indeed[#tag][#filename]%
- {\ctxcommand{registersoundclip{
- tag = "#tag",
- file = "#filename"
- }}}
+ {\clf_registersoundclip
+ tag {#tag}%
+ file {#filename}%
+ \relax}
\def\checksoundtrack#tag% yet untested in mkiv (also move management to lua)
{\iflocation
- \ctxcommand{insertsoundclip{
- tag = "#tag",
- ["repeat"] = "\directexternalsoundtrackparameter\c!option", % todo: pass option as-is
- }}%
+ \clf_insertsoundclip
+ tag {#tag}%
+ repeat {\directexternalsoundtrackparameter\c!option}%
+ \relax
\fi}
%D Renderings (not yet tested in mkvi):
@@ -571,31 +571,31 @@
\unexpanded\def\setinternalrendering{\dodoubleempty \scrn_rendering_set}
\def\scrn_rendering_use[#tag][#mime][#file][#option]%
- {\ctxcommand{registerrendering{
- type = "external",
- label = "#tag",
- mime = "#mime",
- filename = "#file",
- option = "#option",
- }}}
+ {\clf_registerrendering
+ type {external}%
+ label {#tag}%
+ mime {#mime}%
+ filename {#file}%
+ option {#option}%
+ \relax}
\def\scrn_rendering_set[#tag][#option]% {content} % crappy
{\bgroup
\dowithnextbox
- {\ctxcommand{registerrendering{
- type = "internal",
- label = "#tag",
- mime = "IRO", % brrr
- filename = "#tag",
- option = "#option",
- }}%
+ {\clf_registerrendering
+ type {internal}%
+ label {#tag}%
+ mime {IRO}% brrr
+ filename {#tag}%
+ option {#option}%
+ \relax
\let\objectoffset\zeropoint
\setobject{IRO}{#tag}\hbox{\box\nextbox}%
\egroup}%
\hbox}
-\def\renderingtype #tag{\ctxcommand{renderingvar("#tag","type")}}
-\def\renderingoption#tag{\ctxcommand{renderingvar("#tag","option")}}
+\def\renderingtype #tag{\clf_renderingvar{#tag}{type}}
+\def\renderingoption#tag{\clf_renderingvar{#tag}{option}}
\newdimen\d_scrn_rendering_width \d_scrn_rendering_width 8cm
\newdimen\d_scrn_rendering_height \d_scrn_rendering_height 6cm
@@ -648,13 +648,14 @@
\letrenderingwindowparameter\c!offset\v!overlay
\inheritedrenderingwindowframed
{\vfill
- \ctxcommand{insertrenderingwindow {
- label = "\currentrendering",
- width = \number\d_scrn_rendering_width,
- height = \number\d_scrn_rendering_height,
- option = "\renderingoption\currentrendering",
- page = \number\m_scrn_rendering_page,
- }}\hfill}%
+ \clf_insertrenderingwindow
+ label {\currentrendering}%
+ width \d_scrn_rendering_width
+ height \d_scrn_rendering_height
+ option {\renderingoption\currentrendering}%
+ page \m_scrn_rendering_page
+ \relax
+ \hfill}%
\egroup}
%D Linkedlists (not tested in mkvi):
@@ -676,7 +677,7 @@
% \let\setupbutton\setuplinkedlists\setuplinkedlist
%
% \appendtoks
-% \ctxcommand{definelinkedlist("\currentlinkedlist")}%
+% \clf_definelinkedlist{\currentlinkedlist}%
% \to \everydefinelinkedlist
%
% \def\setlinkedlistproperties#1#2#3#4#5#6%
@@ -694,8 +695,8 @@
% \edef\currentlinkedlist{#1}%
% \ifcsname\??lk\currentlinkedlist\s!parent\endcsname
% \hskip\linkedlistparameter\c!distance
-% \ctxcommand{addlinklistelement("\currentlinkedlist")}%
-% \expanded{\ctxlatelua{commands.enhancelinkedlist("\currentlinkedlist",\currentlink)}}% can also be done at the lua end
+% \clf_addlinklistelement{\currentlinkedlist}%
+% \expanded{\ctxlatecommand{enhancelinkedlist("\currentlinkedlist",\currentlink)}}% can also be done at the lua end
% \dogotosomepage {\??lk\currentlinkedlist}\gotobegincharacter \firstlink
% \ifnum\noflinks>\plustwo
% \dogotosomepage{\??lk\currentlinkedlist}\gobackwardcharacter\previouslink
diff --git a/tex/context/base/scrp-cjk.lua b/tex/context/base/scrp-cjk.lua
index 681fc4c43..9050da6be 100644
--- a/tex/context/base/scrp-cjk.lua
+++ b/tex/context/base/scrp-cjk.lua
@@ -14,15 +14,29 @@ if not modules then modules = { } end modules ['scrp-cjk'] = {
-- sense either because otherwise a wanted space at the end of a
-- line would have to be a hard coded ones.
-local utfchar = utf.char
-
-local insert_node_after = nodes.insert_after
-local insert_node_before = nodes.insert_before
-local remove_node = nodes.remove
-local copy_node = nodes.copy
-local traverse_id = nodes.traverse_id
-
-local nodepool = nodes.pool
+local utfchar = utf.getchar
+
+local nuts = nodes.nuts
+local tonut = nodes.tonut
+local tonode = nodes.tonode
+
+local insert_node_after = nuts.insert_after
+local insert_node_before = nuts.insert_before
+local copy_node = nuts.copy
+local remove_node = nuts.remove
+local traverse_id = nuts.traverse_id
+
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+local getid = nuts.getid
+local getattr = nuts.getattr
+local getsubtype = nuts.getsubtype
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+
+local nodepool = nuts.pool
local new_glue = nodepool.glue
local new_kern = nodepool.kern
local new_penalty = nodepool.penalty
@@ -88,20 +102,20 @@ end
-- at font definition time and/or just assume a correct font
local function trace_detail(current,what)
- local prev = current.prev
- local c_id = current.id
- local p_id = prev and prev.id
+ local prev = getprev(current)
+ local c_id = getid(current)
+ local p_id = prev and getid(prev)
if c_id == glyph_code then
- local c_ch = current.char
+ local c_ch = getchar(current)
if p_id == glyph_code then
- local p_ch = p_id and prev.char
+ local p_ch = p_id and getchar(prev)
report_details("[%C %a] [%s] [%C %a]",p_ch,hash[p_ch],what,c_ch,hash[c_ch])
else
report_details("[%s] [%C %a]",what,c_ch,hash[c_ch])
end
else
if p_id == glyph_code then
- local p_ch = p_id and prev.char
+ local p_ch = p_id and getchar(prev)
report_details("[%C %a] [%s]",p_ch,hash[p_ch],what)
else
report_details("[%s]",what)
@@ -110,8 +124,8 @@ local function trace_detail(current,what)
end
local function trace_detail_between(p,n,what)
- local p_ch = p.char
- local n_ch = n.char
+ local p_ch = getchar(p)
+ local n_ch = getchar(n)
report_details("[%C %a] [%s] [%C %a]",p_ch,hash[p_ch],what,n_ch,hash[n_ch])
end
@@ -427,29 +441,29 @@ local function process(head,first,last)
if first ~= last then
local lastfont, previous, last = nil, "start", nil
while true do
- local upcoming, id = first.next, first.id
+ local upcoming, id = getnext(first), getid(first)
if id == glyph_code then
- local a = first[a_scriptstatus]
+ local a = getattr(first,a_scriptstatus)
local current = numbertocategory[a]
local action = injectors[previous]
if action then
action = action[current]
if action then
- local font = first.font
+ local font = getfont(first)
if font ~= lastfont then
lastfont = font
- set_parameters(font,numbertodataset[first[a_scriptinjection]])
+ set_parameters(font,numbertodataset[getattr(first,a_scriptinjection)])
end
action(head,first)
end
end
previous = current
else -- glue
- local p, n = first.prev, upcoming
+ local p, n = getprev(first), upcoming
if p and n then
- local pid, nid = p.id, n.id
+ local pid, nid = getid(p), getid(n)
if pid == glyph_code and nid == glyph_code then
- local pa, na = p[a_scriptstatus], n[a_scriptstatus]
+ local pa, na = getattr(p,a_scriptstatus), getattr(n,a_scriptstatus)
local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na]
if not pcjk or not ncjk
or pcjk == "korean" or ncjk == "korean"
@@ -495,23 +509,24 @@ scripts.installmethod {
}
function scripts.decomposehangul(head)
+ local head = tonut(head)
local done = false
for current in traverse_id(glyph_code,head) do
- local lead_consonant, medial_vowel, tail_consonant = decomposed(current.char)
+ local lead_consonant, medial_vowel, tail_consonant = decomposed(getchar(current))
if lead_consonant then
- current.char = lead_consonant
+ setfield(current,"char",lead_consonant)
local m = copy_node(current)
- m.char = medial_vowel
+ setfield(m,"char",medial_vowel)
head, current = insert_node_after(head,current,m)
if tail_consonant then
local t = copy_node(current)
- t.char = tail_consonant
+ setfield(t,"char",tail_consonant)
head, current = insert_node_after(head,current,t)
end
done = true
end
end
- return head, done
+ return tonode(head), done
end
-- nodes.tasks.prependaction("processors","normalizers","scripts.decomposehangul")
@@ -682,29 +697,29 @@ local function process(head,first,last)
if first ~= last then
local lastfont, previous, last = nil, "start", nil
while true do
- local upcoming, id = first.next, first.id
+ local upcoming, id = getnext(first), getid(first)
if id == glyph_code then
- local a = first[a_scriptstatus]
+ local a = getattr(first,a_scriptstatus)
local current = numbertocategory[a]
local action = injectors[previous]
if action then
action = action[current]
if action then
- local font = first.font
+ local font = getfont(first)
if font ~= lastfont then
lastfont = font
- set_parameters(font,numbertodataset[first[a_scriptinjection]])
+ set_parameters(font,numbertodataset[getattr(first,a_scriptinjection)])
end
action(head,first)
end
end
previous = current
else -- glue
- local p, n = first.prev, upcoming
+ local p, n = getprev(first), upcoming
if p and n then
- local pid, nid = p.id, n.id
+ local pid, nid = getid(p), getid(n)
if pid == glyph_code and nid == glyph_code then
- local pa, na = p[a_scriptstatus], n[a_scriptstatus]
+ local pa, na = getattr(p,a_scriptstatus), getattr(n,a_scriptstatus)
local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na]
if not pcjk or not ncjk
or pcjk == "korean" or ncjk == "korean"
@@ -904,34 +919,32 @@ local function process(head,first,last)
if first ~= last then
local lastfont, previous, last = nil, "start", nil
while true do
- local upcoming, id = first.next, first.id
+ local upcoming, id = getnext(first), getid(first)
if id == glyph_code then
- local a = first[a_scriptstatus]
+ local a = getattr(first,a_scriptstatus)
local current = numbertocategory[a]
local action = injectors[previous]
if action then
action = action[current]
if action then
- local font = first.font
+ local font = getfont(first)
if font ~= lastfont then
lastfont = font
- set_parameters(font,numbertodataset[first[a_scriptinjection]])
+ set_parameters(font,numbertodataset[getattr(first,a_scriptinjection)])
end
action(head,first)
end
end
previous = current
-
--- elseif id == math_code then
--- upcoming = end_of_math(current).next
--- previous = "start"
-
+ -- elseif id == math_code then
+ -- upcoming = getnext(end_of_math(current))
+ -- previous = "start"
else -- glue
- local p, n = first.prev, upcoming -- we should remember prev
+ local p, n = getprev(first), upcoming -- we should remember prev
if p and n then
- local pid, nid = p.id, n.id
+ local pid, nid = getid(p), getid(n)
if pid == glyph_code and nid == glyph_code then
- local pa, na = p[a_scriptstatus], n[a_scriptstatus]
+ local pa, na = getattr(p,a_scriptstatus), getattr(n,a_scriptstatus)
local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na]
if not pcjk or not ncjk
or pcjk == "korean" or ncjk == "korean"
@@ -940,17 +953,17 @@ local function process(head,first,last)
or pcjk == "half_width_close" or ncjk == "half_width_open" then -- extra compared to korean
previous = "start"
else -- if head ~= first then
-if id == glue_code and first.subtype == userskip_code then -- also scriptstatus check?
- -- for the moment no distinction possible between space and userskip
- local w = first.spec.width
- local s = spacedata[p.font]
- if w == s then -- could be option
- if trace_details then
- trace_detail_between(p,n,"space removed")
- end
- remove_node(head,first,true)
- end
-end
+ if id == glue_code and getsubtype(first) == userskip_code then -- also scriptstatus check?
+ -- for the moment no distinction possible between space and userskip
+ local w = getfield(getfield(first,"spec"),"width")
+ local s = spacedata[getfont(p)]
+ if w == s then -- could be option
+ if trace_details then
+ trace_detail_between(p,n,"space removed")
+ end
+ remove_node(head,first,true)
+ end
+ end
previous = pcjk
-- else
-- previous = pcjk
diff --git a/tex/context/base/scrp-eth.lua b/tex/context/base/scrp-eth.lua
index 597afa1b5..8ecbce522 100644
--- a/tex/context/base/scrp-eth.lua
+++ b/tex/context/base/scrp-eth.lua
@@ -9,9 +9,17 @@ if not modules then modules = { } end modules ['scrp-eth'] = {
-- at some point I will review the script code but for the moment we
-- do it this way; so space settings like with cjk yet
-local insert_node_before = node.insert_before
+local nuts = nodes.nuts
-local nodepool = nodes.pool
+local getnext = nuts.getnext
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+local getid = nuts.getid
+local getattr = nuts.getattr
+
+local insert_node_before = nuts.insert_before
+
+local nodepool = nuts.pool
local new_glue = nodepool.glue
local new_penalty = nodepool.penalty
@@ -37,13 +45,13 @@ local inter_character_stretch_factor = 1
local inter_character_shrink_factor = 1
local function space_glue(current)
- local data = numbertodataset[current[a_scriptinjection]]
+ local data = numbertodataset[getattr(current,a_scriptinjection)]
if data then
inter_character_space_factor = data.inter_character_space_factor or 1
inter_character_stretch_factor = data.inter_character_stretch_factor or 1
inter_character_shrink_factor = data.inter_character_shrink_factor or 1
end
- local font = current.font
+ local font = getfont(current)
if lastfont ~= font then
local pf = parameters[font]
space = pf.space
@@ -104,9 +112,9 @@ local function process(head,first,last)
local injector = false
local current = first
while current do
- local id = current.id
+ local id = getid(current)
if id == glyph_code then
- local scriptstatus = current[a_scriptstatus]
+ local scriptstatus = getattr(current,a_scriptstatus)
local category = numbertocategory[scriptstatus]
if injector then
local action = injector[category]
@@ -121,7 +129,7 @@ local function process(head,first,last)
if current == last then
break
else
- current = current.next
+ current = getnext(current)
end
end
end
diff --git a/tex/context/base/scrp-ini.lua b/tex/context/base/scrp-ini.lua
index 56422e622..3c3517542 100644
--- a/tex/context/base/scrp-ini.lua
+++ b/tex/context/base/scrp-ini.lua
@@ -14,7 +14,7 @@ local attributes, nodes, node = attributes, nodes, node
local trace_analyzing = false trackers.register("scripts.analyzing", function(v) trace_analyzing = v end)
local trace_injections = false trackers.register("scripts.injections", function(v) trace_injections = v end)
local trace_splitting = false trackers.register("scripts.splitting", function(v) trace_splitting = v end)
-local trace_splitdetail = false trackers.register("scripts.splitring.detail", function(v) trace_splitdetail = v end)
+local trace_splitdetail = false trackers.register("scripts.splitting.detail", function(v) trace_splitdetail = v end)
local report_preprocessing = logs.reporter("scripts","preprocessing")
local report_splitting = logs.reporter("scripts","splitting")
@@ -22,14 +22,13 @@ local report_splitting = logs.reporter("scripts","splitting")
local utfbyte, utfsplit = utf.byte, utf.split
local gmatch = string.gmatch
-local first_glyph = node.first_glyph or node.first_character
-local traverse_id = node.traverse_id
-
local texsetattribute = tex.setattribute
local nodecodes = nodes.nodecodes
local unsetvalue = attributes.unsetvalue
+local implement = interfaces.implement
+
local glyph_code = nodecodes.glyph
local glue_code = nodecodes.glue
@@ -48,9 +47,23 @@ local setmetatableindex = table.setmetatableindex
local enableaction = nodes.tasks.enableaction
local disableaction = nodes.tasks.disableaction
-local insert_node_after = node.insert_after
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getnext = nuts.getnext
+local getchar = nuts.getchar
+local getfont = nuts.getfont
+local getid = nuts.getid
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+local insert_node_after = nuts.insert_after
+local first_glyph = nuts.first_glyph
+local traverse_id = nuts.traverse_id
+
+local nodepool = nuts.pool
-local nodepool = nodes.pool
local new_glue = nodepool.glue
local new_rule = nodepool.rule
local new_penalty = nodepool.penalty
@@ -400,7 +413,7 @@ scripts.numbertocategory = numbertocategory
local function colorize(start,stop)
for n in traverse_id(glyph_code,start) do
- local kind = numbertocategory[n[a_scriptstatus]]
+ local kind = numbertocategory[getattr(n,a_scriptstatus)]
if kind then
local ac = scriptcolors[kind]
if ac then
@@ -432,16 +445,17 @@ end
-- we can have a fonts.hashes.originals
function scripts.injectors.handler(head)
+ head = tonut(head)
local start = first_glyph(head) -- we already have glyphs here (subtype 1)
if not start then
- return head, false
+ return tonode(head), false
else
local last_a, normal_process, lastfont, originals = nil, nil, nil, nil
local done, first, last, ok = false, nil, nil, false
while start do
- local id = start.id
+ local id = getid(start)
if id == glyph_code then
- local a = start[a_scriptinjection]
+ local a = getattr(start,a_scriptinjection)
if a then
if a ~= last_a then
if first then
@@ -463,23 +477,24 @@ function scripts.injectors.handler(head)
normal_process = handler.injector
end
if normal_process then
- local f = start.font
+ -- wrong: originals are indices !
+ local f = getfont(start)
if f ~= lastfont then
originals = fontdata[f].resources
if resources then
originals = resources.originals
else
- -- can't happen
+ originals = nil -- can't happen
end
lastfont = f
end
- local c = start.char
- if originals then
+ local c = getchar(start)
+ if originals and type(originals) == "number" then
c = originals[c] or c
end
local h = hash[c]
if h then
- start[a_scriptstatus] = categorytonumber[h]
+ setattr(start,a_scriptstatus,categorytonumber[h])
if not first then
first, last = start, start
else
@@ -540,7 +555,7 @@ function scripts.injectors.handler(head)
first, last = nil, nil
end
end
- start = start.next
+ start = getnext(start)
end
if ok then
if trace_analyzing then
@@ -553,7 +568,7 @@ function scripts.injectors.handler(head)
end
done = true
end
- return head, done
+ return tonode(head), done
end
end
@@ -683,11 +698,11 @@ end)
local categories = characters.categories or { }
local function hit(root,head)
- local current = head.next
+ local current = getnext(head)
local lastrun = false
local lastfinal = false
- while current and current.id == glyph_code do
- local char = current.char
+ while current and getid(current) == glyph_code do
+ local char = getchar(current)
local newroot = root[char]
if newroot then
local final = newroot.final
@@ -701,7 +716,7 @@ local function hit(root,head)
else
return lastrun, lastfinal
end
- current = current.next
+ current = getnext(current)
end
if lastrun then
return lastrun, lastfinal
@@ -710,12 +725,13 @@ end
local tree, attr, proc
-function splitters.handler(head)
+function splitters.handler(head) -- todo: also first_glyph test
+ head = tonut(head)
local current = head
local done = false
while current do
- if current.id == glyph_code then
- local a = current[a_scriptsplitting]
+ if getid(current) == glyph_code then
+ local a = getattr(current,a_scriptsplitting)
if a then
if a ~= attr then
local handler = numbertohandler[a]
@@ -724,14 +740,14 @@ function splitters.handler(head)
proc = handler.splitter
end
if proc then
- local root = tree[current.char]
+ local root = tree[getchar(current)]
if root then
-- we don't check for attributes in the hitter (yet)
local last, final = hit(root,current)
if last then
- local next = last.next
- if next and next.id == glyph_code then
- local nextchar = next.char
+ local next = getnext(last)
+ if next and getid(next) == glyph_code then
+ local nextchar = getchar(next)
if tree[nextchar] then
if trace_splitdetail then
if type(final) == "string" then
@@ -760,9 +776,9 @@ function splitters.handler(head)
end
end
end
- current = current.next
+ current = getnext(current)
end
- return head, done
+ return tonode(head), done
end
local function marker(head,current,font,color) -- could become: nodes.tracers.marker
@@ -792,8 +808,8 @@ end
local last_a, last_f, last_s, last_q
function splitters.insertafter(handler,head,first,last,detail)
- local a = first[a_scriptsplitting]
- local f = first.font
+ local a = getattr(first,a_scriptsplitting)
+ local f = getfont(first)
if a ~= last_a or f ~= last_f then
last_s = emwidths[f] * numbertodataset[a].inter_word_stretch_factor
last_a = a
@@ -870,15 +886,15 @@ setmetatableindex(cache_nop,function(t,k) local v = { } t[k] = v return v end)
-- playing nice
function autofontfeature.handler(head)
- for n in traverse_id(glyph_code,head) do
- -- if n[a_scriptinjection] then
+ for n in traverse_id(glyph_code,tonut(head)) do
+ -- if getattr(n,a_scriptinjection) then
-- -- already tagged by script feature, maybe some day adapt
-- else
- local char = n.char
+ local char = getchar(n)
local script = otfscripts[char]
if script then
- local dynamic = n[0] or 0
- local font = n.font
+ local dynamic = getattr(n,0) or 0
+ local font = getfont(n)
if dynamic > 0 then
local slot = cache_yes[font]
local attr = slot[script]
@@ -904,7 +920,7 @@ function autofontfeature.handler(head)
end
end
if attr ~= 0 then
- n[0] = attr
+ setattr(n,0,attr)
-- maybe set scriptinjection when associated
end
end
@@ -924,5 +940,22 @@ function autofontfeature.disable()
disableaction("processors","scripts.autofontfeature.handler")
end
-commands.enableautofontscript = autofontfeature.enable
-commands.disableautofontscript = autofontfeature.disable
+implement {
+ name = "enableautofontscript",
+ actions = autofontfeature.enable
+}
+
+implement {
+ name = "disableautofontscript",
+ actions = autofontfeature.disable }
+
+implement {
+ name = "setscript",
+ actions = scripts.set,
+ arguments = { "string", "string", "string" }
+}
+
+implement {
+ name = "resetscript",
+ actions = scripts.reset
+}
diff --git a/tex/context/base/scrp-ini.mkiv b/tex/context/base/scrp-ini.mkiv
index 4a27dd8e2..cd060c02b 100644
--- a/tex/context/base/scrp-ini.mkiv
+++ b/tex/context/base/scrp-ini.mkiv
@@ -41,14 +41,14 @@
\to \everydefinescript
\unexpanded\def\scripts_basics_set
- {\ctxlua{scripts.set("\currentscript","\scriptparameter\c!method","\scriptparameter\c!preset")}}
+ {\clf_setscript{\currentscript}{\scriptparameter\c!method}{\scriptparameter\c!preset}}
\unexpanded\def\setscript[#1]%
{\edef\currentscript{#1}%
\scripts_basics_set}
\unexpanded\def\resetscript
- {\ctxlua{scripts.reset()}}
+ {\clf_resetscript}
\unexpanded\def\startscript[#1]%
{\begingroup
@@ -86,8 +86,8 @@
\fi
\to \everysetupscript
-\unexpanded\def\enableautofontscript {\ctxcommand{enableautofontscript ()}}
-\unexpanded\def\disableautofontscript{\ctxcommand{disableautofontscript()}}
+\unexpanded\def\enableautofontscript {\clf_enableautofontscript }
+\unexpanded\def\disableautofontscript{\clf_disableautofontscript}
\definefontfeature[latn][script=latn]
\definefontfeature[grek][script=grek]
diff --git a/tex/context/base/sort-ini.lua b/tex/context/base/sort-ini.lua
index 479d1c489..9c4d5acee 100644
--- a/tex/context/base/sort-ini.lua
+++ b/tex/context/base/sort-ini.lua
@@ -39,19 +39,28 @@ relatively easy to do.</p>
how they map onto this mechanism. I've learned that users can come up
with any demand so nothing here is frozen.</p>
+<p>Todo: I ran into the Unicode Collation document and noticed that
+there are some similarities (like the weights) but using that method
+would still demand extra code for language specifics. One option is
+to use the allkeys.txt file for the uc vectors but then we would also
+use the collapsed key (sq, code is now commented). In fact, we could
+just hook those into the replacer code that we reun beforehand.</p>
+
<p>In the future index entries will become more clever, i.e. they will
have language etc properties that then can be used.</p>
]]--
-local gsub, rep, sub, sort, concat = string.gsub, string.rep, string.sub, table.sort, table.concat
+local gsub, rep, sub, sort, concat, tohash, format = string.gsub, string.rep, string.sub, table.sort, table.concat, table.tohash, string.format
local utfbyte, utfchar, utfcharacters, utfvalues = utf.byte, utf.char, utf.characters, utf.values
local next, type, tonumber, rawget, rawset = next, type, tonumber, rawget, rawset
+local P, Cs, R, S, lpegmatch = lpeg.P, lpeg.Cs, lpeg.R, lpeg.S, lpeg.match
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
local trace_tests = false trackers.register("sorters.tests", function(v) trace_tests = v end)
local trace_methods = false trackers.register("sorters.methods", function(v) trace_methods = v end)
+local trace_orders = false trackers.register("sorters.orders", function(v) trace_orders = v end)
local report_sorters = logs.reporter("languages","sorters")
@@ -65,7 +74,9 @@ local digitsoffset = 0x20000 -- frozen
local digitsmaximum = 0xFFFFF -- frozen
local lccodes = characters.lccodes
+local uccodes = characters.uccodes
local lcchars = characters.lcchars
+local ucchars = characters.ucchars
local shchars = characters.shchars
local fscodes = characters.fscodes
local fschars = characters.fschars
@@ -81,8 +92,8 @@ local v_after = variables.after
local v_first = variables.first
local v_last = variables.last
-local validmethods = table.tohash {
- -- "ch", -- raw character
+local validmethods = tohash {
+ "ch", -- raw character (for tracing)
"mm", -- minus mapping
"zm", -- zero mapping
"pm", -- plus mapping
@@ -101,11 +112,11 @@ local predefinedmethods = {
}
sorters = {
- comparers = comparers,
- splitters = splitters,
- definitions = definitions,
- tracers = tracers,
- constants = {
+ comparers = comparers,
+ splitters = splitters,
+ definitions = definitions,
+ tracers = tracers,
+ constants = {
ignoredoffset = ignoredoffset,
replacementoffset = replacementoffset,
digitsoffset = digitsoffset,
@@ -113,6 +124,7 @@ sorters = {
defaultlanguage = v_default,
defaultmethod = v_default,
defaultdigits = v_numbers,
+ validmethods = validmethods,
}
}
@@ -120,7 +132,7 @@ local sorters = sorters
local constants = sorters.constants
local data, language, method, digits
-local replacements, m_mappings, z_mappings, p_mappings, entries, orders, lower, upper, method, sequence
+local replacements, m_mappings, z_mappings, p_mappings, entries, orders, lower, upper, method, sequence, usedinsequence
local thefirstofsplit
local mte = { -- todo: assign to t
@@ -156,6 +168,7 @@ local mte = { -- todo: assign to t
}
local noorder = false
+local nothing = { 0 }
local function preparetables(data)
local orders, lower, m_mappings, z_mappings, p_mappings = data.orders, data.lower, { }, { }, { }
@@ -168,12 +181,12 @@ local function preparetables(data)
__index = function(t,k)
local n, nn
if k then
- if trace_tests then
+ if trace_orders then
report_sorters("simplifing character %C",k)
end
local l = lower[k] or lcchars[k]
if l then
- if trace_tests then
+ if trace_orders then
report_sorters(" 1 lower: %C",l)
end
local ml = rawget(t,l)
@@ -184,7 +197,7 @@ local function preparetables(data)
nn = nn + 1
n[nn] = ml[i] + (t.__delta or 0)
end
- if trace_tests then
+ if trace_orders then
report_sorters(" 2 order: % t",n)
end
end
@@ -192,7 +205,7 @@ local function preparetables(data)
if not n then
local s = shchars[k] -- maybe all components?
if s and s ~= k then
- if trace_tests then
+ if trace_orders then
report_sorters(" 3 shape: %C",s)
end
n = { }
@@ -200,7 +213,7 @@ local function preparetables(data)
for l in utfcharacters(s) do
local ml = rawget(t,l)
if ml then
- if trace_tests then
+ if trace_orders then
report_sorters(" 4 keep: %C",l)
end
if ml then
@@ -212,7 +225,7 @@ local function preparetables(data)
else
l = lower[l] or lcchars[l]
if l then
- if trace_tests then
+ if trace_orders then
report_sorters(" 5 lower: %C",l)
end
local ml = rawget(t,l)
@@ -226,44 +239,45 @@ local function preparetables(data)
end
end
else
- -- -- we probably never enter this branch
- -- -- fschars returns a single char
- --
- -- s = fschars[k]
- -- if s and s ~= k then
- -- if trace_tests then
- -- report_sorters(" 6 split: %s",s)
- -- end
- -- local ml = rawget(t,s)
- -- if ml then
- -- n = { }
- -- nn = 0
- -- for i=1,#ml do
- -- nn = nn + 1
- -- n[nn] = ml[i]
- -- end
- -- end
- -- end
- local b = utfbyte(k)
- n = decomposed[b] or { b }
- if trace_tests then
- report_sorters(" 6 split: %s",utf.tostring(b)) -- todo
+ -- this is a kind of last resort branch that we might want to revise
+ -- one day
+ --
+ -- local b = utfbyte(k)
+ -- n = decomposed[b] or { b }
+ -- if trace_tests then
+ -- report_sorters(" 6 split: %s",utf.tostring(b)) -- todo
+ -- end
+ --
+ -- we need to move way above valid order (new per 2014-10-16) .. maybe we
+ -- need to move it even more up to get numbers right (not all have orders)
+ --
+ if k == "\000" then
+ n = nothing -- shared
+ if trace_orders then
+ report_sorters(" 6 split: space") -- todo
+ end
+ else
+ local b = 2 * #orders + utfbyte(k)
+ n = decomposed[b] or { b } -- could be shared tables
+ if trace_orders then
+ report_sorters(" 6 split: %s",utf.tostring(b)) -- todo
+ end
end
end
if n then
- if trace_tests then
+ if trace_orders then
report_sorters(" 7 order: % t",n)
end
else
n = noorder
- if trace_tests then
+ if trace_orders then
report_sorters(" 8 order: 0")
end
end
end
else
n = noorder
- if trace_tests then
+ if trace_orders then
report_sorters(" 9 order: 0")
end
end
@@ -298,11 +312,11 @@ local function update() -- prepare parent chains, needed when new languages are
end
end
-local function setlanguage(l,m,d,u)
+local function setlanguage(l,m,d,u) -- this will become a specification table
language = (l ~= "" and l) or constants.defaultlanguage
- data = definitions[language or constants.defaultlanguage] or definitions[constants.defaultlanguage]
- method = (m ~= "" and m) or data.method or constants.defaultmethod
- digits = (d ~= "" and d) or data.digits or constants.defaultdigits
+ data = definitions[language or constants.defaultlanguage] or definitions[constants.defaultlanguage]
+ method = (m ~= "" and m) or (data.method ~= "" and data.method) or constants.defaultmethod
+ digits = (d ~= "" and d) or (data.digits ~= "" and data.digits) or constants.defaultdigits
if trace_tests then
report_sorters("setting language %a, method %a, digits %a",language,method,digits)
end
@@ -333,7 +347,10 @@ local function setlanguage(l,m,d,u)
report_sorters("invalid sorter method %a in %a",s,method)
end
end
+ usedinsequence = tohash(sequence)
data.sequence = sequence
+ data.usedinsequence = usedinsequence
+-- usedinsequence.ch = true -- better just store the string
if trace_tests then
report_sorters("using sort sequence: % t",sequence)
end
@@ -353,6 +370,8 @@ end
-- tricky: { 0, 0, 0 } vs { 0, 0, 0, 0 } => longer wins and mm, pm, zm can have them
+-- inlining and checking first slot first doesn't speed up (the 400K complex author sort)
+
local function basicsort(sort_a,sort_b)
if sort_a and sort_b then
local na = #sort_a
@@ -360,19 +379,27 @@ local function basicsort(sort_a,sort_b)
if na > nb then
na = nb
end
- for i=1,na do
- local ai, bi = sort_a[i], sort_b[i]
- if ai > bi then
- return 1
- elseif ai < bi then
- return -1
+ if na > 0 then
+ for i=1,na do
+ local ai, bi = sort_a[i], sort_b[i]
+ if ai > bi then
+ return 1
+ elseif ai < bi then
+ return -1
+ end
end
end
end
return 0
end
-function comparers.basic(a,b) -- trace ea and eb
+-- todo: compile compare function
+
+local function basic(a,b) -- trace ea and eb
+ if a == b then
+ -- hashed (shared) entries
+ return 0
+ end
local ea, eb = a.split, b.split
local na, nb = #ea, #eb
if na == 0 and nb == 0 then
@@ -432,25 +459,72 @@ function comparers.basic(a,b) -- trace ea and eb
end
end
-local function numify(s)
- s = digitsoffset + tonumber(s) -- alternatively we can create range
- if s > digitsmaximum then
- s = digitsmaximum
+-- if we use sq:
+--
+-- local function basic(a,b) -- trace ea and eb
+-- local ea, eb = a.split, b.split
+-- local na, nb = #ea, #eb
+-- if na == 0 and nb == 0 then
+-- -- simple variant (single word)
+-- return basicsort(ea.sq,eb.sq)
+-- else
+-- -- complex variant, used in register (multiple words)
+-- local result = 0
+-- for i=1,nb < na and nb or na do
+-- local eai, ebi = ea[i], eb[i]
+-- result = basicsort(ea.sq,eb.sq)
+-- if result ~= 0 then
+-- return result
+-- end
+-- end
+-- if result ~= 0 then
+-- return result
+-- elseif na > nb then
+-- return 1
+-- elseif nb > na then
+-- return -1
+-- else
+-- return 0
+-- end
+-- end
+-- end
+
+comparers.basic = basic
+
+function sorters.basicsorter(a,b)
+ return basic(a,b) == -1
+end
+
+local function numify(old)
+ if digits == v_numbers then -- was swapped, fixed 2014-11-10
+ local new = digitsoffset + tonumber(old) -- alternatively we can create range
+ if new > digitsmaximum then
+ new = digitsmaximum
+ end
+ return utfchar(new)
+ else
+ return old
end
- return utfchar(s)
+end
+
+local pattern = nil
+
+local function prepare()
+ pattern = Cs( (
+ characters.tex.toutfpattern()
+ + lpeg.patterns.whitespace / "\000"
+ + (P("\\") / "") * R("AZ")^0 * (P(-1) + #(1-R("AZ")))
+ + (P("\\") * P(1) * R("az","AZ")^0) / ""
+ + S("[](){}$\"'") / ""
+ + R("09")^1 / numify
+ + P(1)
+ )^0 )
+ return pattern
end
function sorters.strip(str) -- todo: only letters and such
if str and str ~= "" then
- -- todo: make a decent lpeg
- str = gsub(str,"\\[\"\'~^`]*","") -- \"e -- hm, too greedy
- str = gsub(str,"\\%S*","") -- the rest
- str = gsub(str,"%s","\001") -- can be option
- str = gsub(str,"[%s%[%](){}%$\"\']*","")
- if digits == v_numbers then
- str = gsub(str,"(%d+)",numify) -- sort numbers properly
- end
- return str
+ return lpegmatch(pattern or prepare(),str)
else
return ""
end
@@ -477,7 +551,7 @@ sorters.firstofsplit = firstofsplit
-- for the moment we use an inefficient bunch of tables but once
-- we know what combinations make sense we can optimize this
-function splitters.utf(str) -- we could append m and u but this is cleaner, s is for tracing
+function splitters.utf(str,checked) -- we could append m and u but this is cleaner, s is for tracing
if #replacements > 0 then
-- todo make an lpeg for this
for k=1,#replacements do
@@ -518,10 +592,15 @@ function splitters.utf(str) -- we could append m and u but this is cleaner, s is
else
n = n + 1
local l = lower[sc]
- l = l and utfbyte(l) or lccodes[b]
+ l = l and utfbyte(l) or lccodes[b] or b
+ -- local u = upper[sc]
+ -- u = u and utfbyte(u) or uccodes[b] or b
if type(l) == "table" then
l = l[1] -- there are currently no tables in lccodes but it can be some, day
end
+ -- if type(u) == "table" then
+ -- u = u[1] -- there are currently no tables in lccodes but it can be some, day
+ -- end
z_case[n] = l
if l ~= b then
m_case[n] = l - 1
@@ -580,18 +659,39 @@ function splitters.utf(str) -- we could append m and u but this is cleaner, s is
-- p_mapping = { p_mappings[fs][1] }
-- end
-- end
- local t = {
- ch = char,
- uc = byte,
- mc = m_case,
- zc = z_case,
- pc = p_case,
- mm = m_mapping,
- zm = z_mapping,
- pm = p_mapping,
- }
-
- return t
+ local result
+ if checked then
+ result = {
+ ch = trace_tests and char or nil, -- not in sequence
+ uc = usedinsequence.uc and byte or nil,
+ mc = usedinsequence.mc and m_case or nil,
+ zc = usedinsequence.zc and z_case or nil,
+ pc = usedinsequence.pc and p_case or nil,
+ mm = usedinsequence.mm and m_mapping or nil,
+ zm = usedinsequence.zm and z_mapping or nil,
+ pm = usedinsequence.pm and p_mapping or nil,
+ }
+ else
+ result = {
+ ch = char,
+ uc = byte,
+ mc = m_case,
+ zc = z_case,
+ pc = p_case,
+ mm = m_mapping,
+ zm = z_mapping,
+ pm = p_mapping,
+ }
+ end
+ -- local sq, n = { }, 0
+ -- for i=1,#byte do
+ -- for s=1,#sequence do
+ -- n = n + 1
+ -- sq[n] = result[sequence[s]][i]
+ -- end
+ -- end
+ -- result.sq = sq
+ return result
end
local function packch(entry)
@@ -602,7 +702,14 @@ local function packch(entry)
local tt, li = { }, split[i].ch
for j=1,#li do
local lij = li[j]
- tt[j] = utfbyte(lij) > ignoredoffset and "[]" or lij
+ local byt = utfbyte(lij)
+ if byt > ignoredoffset then
+ tt[j] = "[]"
+ elseif byt == 0 then
+ tt[j] = " "
+ else
+ tt[j] = lij
+ end
end
t[i] = concat(tt)
end
@@ -611,7 +718,14 @@ local function packch(entry)
local t, li = { }, split.ch
for j=1,#li do
local lij = li[j]
- t[j] = utfbyte(lij) > ignoredoffset and "[]" or lij
+ local byt = utfbyte(lij)
+ if byt > ignoredoffset then
+ t[j] = "[]"
+ elseif byt == 0 then
+ t[j] = " "
+ else
+ t[j] = lij
+ end
end
return concat(t)
end
@@ -622,16 +736,16 @@ local function packuc(entry)
if #split > 0 then -- useless test
local t = { }
for i=1,#split do
- t[i] = concat(split[i].uc, " ")
+ t[i] = concat(split[i].uc, " ") -- sq
end
return concat(t," + ")
else
- return concat(split.uc," ")
+ return concat(split.uc," ") -- sq
end
end
function sorters.sort(entries,cmp)
- if trace_tests or trace_methods then
+ if trace_methods then
local nofentries = #entries
report_sorters("entries: %s, language: %s, method: %s, digits: %s",nofentries,language,method,tostring(digits))
for i=1,nofentries do
@@ -653,7 +767,9 @@ function sorters.sort(entries,cmp)
first = " "
else
s = first
- report_sorters(">> %C (%C)",first,letter)
+ if first and letter then
+ report_sorters(">> %C (%C)",first,letter)
+ end
end
report_sorters(" %s | %s",packch(entry),packuc(entry))
end
diff --git a/tex/context/base/sort-lan.lua b/tex/context/base/sort-lan.lua
index 6d16c0d80..6b0cc5007 100644
--- a/tex/context/base/sort-lan.lua
+++ b/tex/context/base/sort-lan.lua
@@ -310,7 +310,7 @@ local ch, CH = utfchar(replacementoffset + 1), utfchar(replacementoffset + 11)
definitions["cz"] = {
replacements = {
- { "ch", ch }, { "CH", CH }
+ { "ch", ch }, { "Ch", ch }, { "CH", ch }
},
entries = {
["a"] = "a", ["á"] = "a", ["b"] = "b", ["c"] = "c", ["č"] = "č",
diff --git a/tex/context/base/spac-adj.lua b/tex/context/base/spac-adj.lua
index c87a9d17f..cdf9b5051 100644
--- a/tex/context/base/spac-adj.lua
+++ b/tex/context/base/spac-adj.lua
@@ -56,3 +56,11 @@ function nodes.handlers.graphicvadjust(head,groupcode) -- we can make an actionc
return head, false
end
end
+
+interfaces.implement {
+ name = "enablegraphicvadjust",
+ onlyonce = true,
+ actions = function()
+ nodes.tasks.enableaction("finalizers","nodes.handlers.graphicvadjust")
+ end
+}
diff --git a/tex/context/base/spac-adj.mkiv b/tex/context/base/spac-adj.mkiv
index 447dd7229..b8534303d 100644
--- a/tex/context/base/spac-adj.mkiv
+++ b/tex/context/base/spac-adj.mkiv
@@ -16,18 +16,19 @@
\unprotect
% Very nasty but needed for margin stuff inside colored
-% paragraphs. Obsolete anyway.
+% paragraphs. Obsolete for while .
\registerctxluafile{spac-adj}{1.001}
\definesystemattribute [graphicvadjust] [public]
\unexpanded\def\enablegraphicvadjust
- {\ctxlua{nodes.tasks.enableaction("finalizers","nodes.handlers.graphicvadjust")}%
+ {\writestatus\m!systems{graphicvadjusting is no longer needed!}
+ \clf_enablegraphicvadjust %once anyway
\glet\enablegraphicvadjust\relax}
\unexpanded\def\graphicvadjust % currently not enabled ... nasty bidi handling
- {\enablegraphicvadjust % and no longer needed anyway
+ {\clf_enablegraphicvadjust % and probably no longer needed anyway
\dowithnextboxcontentcs\forgetall\spac_vadjust_graphic_finish\vbox}
\def\spac_vadjust_graphic_finish
diff --git a/tex/context/base/spac-ali.lua b/tex/context/base/spac-ali.lua
index 25cc6cd66..880da6213 100644
--- a/tex/context/base/spac-ali.lua
+++ b/tex/context/base/spac-ali.lua
@@ -10,13 +10,26 @@ local div = math.div
local format = string.format
local tasks = nodes.tasks
-local appendaction = tasks.appendaction
-local prependaction = tasks.prependaction
-local disableaction = tasks.disableaction
local enableaction = tasks.enableaction
-local slide_nodes = node.slide
-local hpack_nodes = node.hpack -- nodes.fasthpack not really faster here
+local nuts = nodes.nuts
+local nodepool = nuts.pool
+
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getsubtype = nuts.getsubtype
+
+local hpack_nodes = nuts.hpack -- nodes.fasthpack not really faster here
+local linked_nodes = nuts.linked
local unsetvalue = attributes.unsetvalue
@@ -27,8 +40,6 @@ local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local line_code = listcodes.line
-local nodepool = nodes.pool
-
local new_stretch = nodepool.stretch
local a_realign = attributes.private("realign")
@@ -56,10 +67,10 @@ local function handler(head,leftpage,realpageno)
local current = head
local done = false
while current do
- local id = current.id
+ local id = getid(current)
if id == hlist_code then
- if current.subtype == line_code then
- local a = current[a_realign]
+ if getsubtype(current) == line_code then
+ local a = getattr(current,a_realign)
if not a or a == 0 then
-- skip
else
@@ -75,12 +86,12 @@ local function handler(head,leftpage,realpageno)
action = leftpage and 2 or 1
end
if action == 1 then
- current.list = hpack_nodes(current.list .. new_stretch(3),current.width,"exactly")
+ setfield(current,"list",hpack_nodes(linked_nodes(getlist(current),new_stretch(3)),getfield(current,"width"),"exactly"))
if trace_realign then
report_realign("flushing left, align %a, page %a, realpage %a",align,pageno,realpageno)
end
elseif action == 2 then
- current.list = hpack_nodes(new_stretch(3) .. current.list,current.width,"exactly")
+ setfield(current,"list",hpack_nodes(linked_nodes(new_stretch(3),getlist(current)),getfield(current,"width"),"exactly"))
if trace_realign then
report_realign("flushing right. align %a, page %a, realpage %a",align,pageno,realpageno)
end
@@ -90,14 +101,14 @@ local function handler(head,leftpage,realpageno)
done = true
nofrealigned = nofrealigned + 1
end
- current[a_realign] = unsetvalue
+ setattr(current,a_realign,unsetvalue)
end
end
- handler(current.list,leftpage,realpageno)
+ handler(getlist(current),leftpage,realpageno)
elseif id == vlist_code then
- handler(current.list,leftpage,realpageno)
+ handler(getlist(current),leftpage,realpageno)
end
- current = current.next
+ current = getnext(current)
end
return head, done
end
@@ -105,7 +116,8 @@ end
function alignments.handler(head)
local leftpage = isleftpage(true,false)
local realpageno = texgetcount("realpageno")
- return handler(head,leftpage,realpageno)
+ local head, done = handler(tonut(head),leftpage,realpageno)
+ return tonode(head), done
end
local enabled = false
@@ -121,7 +133,11 @@ function alignments.set(n)
texsetattribute(a_realign,texgetcount("realpageno") * 10 + n)
end
-commands.setrealign = alignments.set
+interfaces.implement {
+ name = "setrealign",
+ actions = alignments.set,
+ arguments = "integer",
+}
statistics.register("realigning", function()
if nofrealigned > 0 then
diff --git a/tex/context/base/spac-ali.mkiv b/tex/context/base/spac-ali.mkiv
index 9c7e81379..07d588ba7 100644
--- a/tex/context/base/spac-ali.mkiv
+++ b/tex/context/base/spac-ali.mkiv
@@ -38,8 +38,8 @@
\to \everyforgetall
\unexpanded\def\resetrealignsignal{\attribute\realignattribute\attributeunsetvalue}
-\unexpanded\def\signalinnerrealign{\ctxcommand{setrealign(2)}}
-\unexpanded\def\signalouterrealign{\ctxcommand{setrealign(1)}}
+\unexpanded\def\signalinnerrealign{\clf_setrealign\plustwo}
+\unexpanded\def\signalouterrealign{\clf_setrealign\plusone}
\installcorenamespace{aligncommand}
\installcorenamespace{alignhorizontal}
@@ -207,6 +207,9 @@
\unexpanded\def\spac_align_set_stretch
{\emergencystretch\bodyfontsize}
+\unexpanded\def\spac_align_set_extreme_stretch
+ {\emergencystretch10\bodyfontsize}
+
% Vertical
\newconstant\c_spac_align_state_vertical
@@ -562,6 +565,12 @@
\spac_align_use_indeed
\fi}
+\unexpanded\def\dousealignparameter#1% faster local variant
+ {\edef\m_spac_align_asked{#1}%
+ \ifx\m_spac_align_asked\empty\else
+ \spac_align_use_indeed
+ \fi}
+
\def\spac_align_use_indeed
{\expandafter\let\expandafter\raggedcommand\csname\??alignmentnormalcache\m_spac_align_asked\endcsname
\ifx\raggedcommand\relax
@@ -585,13 +594,36 @@
\unexpanded\def\spac_align_use_now#1%
{\csname\??alignmentnormalcache#1\endcsname}
-% The keywords:
+% Maybe we need something different in columns.
\unexpanded\def\installalign#1#2% beware: commands must be unexpandable!
{\ifcsname\??aligncommand#1\endcsname \else
\setvalue{\??aligncommand#1}{\t_spac_align_collected\expandafter{\the\t_spac_align_collected#2}}%
\fi}
+% beware, toks stuff and states are set at a differt time, so installalign is
+% only for special options
+%
+% \setvalue{\??aligncommand whatever}%
+% {\c_spac_align_state_horizontal\plushundred
+% \t_spac_align_collected\expandafter{\the\t_spac_align_collected .....}}
+%
+% this one could deal with both
+%
+% \unexpanded\def\installalignoption#1#2%
+% {\ifcsname\??aligncommand#1\endcsname \else
+% \setvalue{\??aligncommand#1}%
+% {\spac_align_set_horizontal_none
+% \c_spac_align_state_horizontal\plushundred % don't set
+% \t_spac_align_collected\expandafter{\the\t_spac_align_collected#2}}%
+% \fi}
+%
+% \installalignoption
+% {whatever}
+% {}
+
+% The keywords:
+
\letvalue{\??aligncommand\empty }\empty
\setvalue{\??aligncommand\v!broad }{\c_spac_align_state_broad \plusone }
\setvalue{\??aligncommand\v!wide }{\c_spac_align_state_broad \plustwo }
@@ -652,6 +684,7 @@
\setvalue{\??aligncommand\v!tolerant }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\spac_align_set_tolerant}}
\setvalue{\??aligncommand\v!verytolerant }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\spac_align_set_very_tolerant}}
\setvalue{\??aligncommand\v!stretch }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\spac_align_set_stretch}}
+\setvalue{\??aligncommand\v!extremestretch }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\spac_align_set_extreme_stretch}}
%D For Wolfgang:
@@ -692,7 +725,7 @@
\forgetall
\let\\=\endgraf
\ifdoublesided\signalinnerrealign\fi
- \doifrightpageelse\spac_align_set_horizontal_right\spac_align_set_horizontal_left
+ \doifelserightpage\spac_align_set_horizontal_right\spac_align_set_horizontal_left
\let\next}
\unexpanded\def\obox#1#2#3%
@@ -700,7 +733,7 @@
\forgetall
\let\\=\endgraf
\ifdoublesided\signalouterrealign\fi
- \doifrightpageelse\c_spac_align_state_horizontal_left\spac_align_set_horizontal_right
+ \doifelserightpage\c_spac_align_state_horizontal_left\spac_align_set_horizontal_right
\let\next}
\let\raggedbox\relax
@@ -938,7 +971,7 @@
\hbox}
\def\doxcheckline % used for floats so multipass anyway
- {\signalrightpage\doifrightpageelse\donetrue\donefalse}
+ {\signalrightpage\doifelserightpage\donetrue\donefalse}
\setvalue{\??alignline\v!inner }{\doxalignline\doxcheckline++\zeropoint \relax\hss }
\setvalue{\??alignline\v!outer }{\doxalignline\doxcheckline++\zeropoint \hss \relax}
@@ -1012,15 +1045,51 @@
% \simplealignedbox{2cm}{right}{x}
\installcorenamespace{alignsimple}
-
-\setvalue{\??alignsimple\v!right }#1{{#1\hss}}
-\setvalue{\??alignsimple\v!left }#1{{\hss#1}}
-\setvalue{\??alignsimple\v!flushright}#1{{\hss#1}}
-\setvalue{\??alignsimple\v!flushleft }#1{{#1\hss}}
-\setvalue{\??alignsimple\v!middle }#1{{\hss#1\hss}}
+\installcorenamespace{alignsimplereverse}
+
+% todo: also handle \bgroup ... \egroup
+
+\unexpanded\def\spac_align_simple_left #1{{#1\hss}}
+\unexpanded\def\spac_align_simple_right #1{{\hss#1}}
+\unexpanded\def\spac_align_simple_middle#1{{\hss#1\hss}}
+
+\letvalue{\??alignsimple \v!right }\spac_align_simple_left
+\letvalue{\??alignsimple \v!outer }\spac_align_simple_left % not managed! see linenumbers
+\letvalue{\??alignsimple \v!flushleft }\spac_align_simple_left
+\letvalue{\??alignsimple \v!left }\spac_align_simple_right
+\letvalue{\??alignsimple \v!inner }\spac_align_simple_right % not managed! see linenumbers
+\letvalue{\??alignsimple \v!flushright}\spac_align_simple_right
+\letvalue{\??alignsimple \v!middle }\spac_align_simple_middle
+
+\letvalue{\??alignsimplereverse\v!right }\spac_align_simple_right
+\letvalue{\??alignsimplereverse\v!outer }\spac_align_simple_right % not managed! see linenumbers
+\letvalue{\??alignsimplereverse\v!flushleft }\spac_align_simple_right
+\letvalue{\??alignsimplereverse\v!left }\spac_align_simple_left
+\letvalue{\??alignsimplereverse\v!inner }\spac_align_simple_left % not managed! see linenumbers
+\letvalue{\??alignsimplereverse\v!flushright}\spac_align_simple_left
+\letvalue{\??alignsimplereverse\v!middle }\spac_align_simple_middle
\unexpanded\def\simplealignedbox#1#2%
- {\hbox to #1\csname\??alignsimple\ifcsname\??alignsimple#2\endcsname#2\else\v!right\fi\endcsname}
+ {\hbox \ifdim#1>\zeropoint to #1
+ \csname\??alignsimple\ifcsname\??alignsimple#2\endcsname#2\else\v!right\fi\expandafter\endcsname
+ \fi}
+
+\unexpanded\def\simplealignedboxplus#1#2#3%
+ {\hbox #3 \ifdim#1>\zeropoint to #1
+ \csname\??alignsimple\ifcsname\??alignsimple#2\endcsname#2\else\v!right\fi\expandafter\endcsname
+ \fi}
+
+\newconditional\alignsimplelefttoright \settrue\alignsimplelefttoright
+
+\unexpanded\def\simplereversealignedbox#1#2%
+ {\hbox \ifdim#1>\zeropoint to #1
+ \csname\??alignsimplereverse\ifcsname\??alignsimplereverse#2\endcsname#2\else\v!left\fi\expandafter\endcsname
+ \fi}
+
+\unexpanded\def\simplereversealignedboxplus#1#2#3%
+ {\hbox #3 \ifdim#1>\zeropoint to #1
+ \csname\??alignsimplereverse\ifcsname\??alignsimplereverse#2\endcsname#2\else\v!left\fi\expandafter\endcsname
+ \fi}
% \installnamespace{alignsets}
%
diff --git a/tex/context/base/spac-chr.lua b/tex/context/base/spac-chr.lua
index db98b42a6..e3fa6d099 100644
--- a/tex/context/base/spac-chr.lua
+++ b/tex/context/base/spac-chr.lua
@@ -14,24 +14,47 @@ local byte, lower = string.byte, string.lower
-- to be redone: characters will become tagged spaces instead as then we keep track of
-- spaceskip etc
+-- todo: only setattr when export / use properties
+
local next = next
-trace_characters = false trackers.register("typesetters.characters", function(v) trace_characters = v end)
+local trace_characters = false trackers.register("typesetters.characters", function(v) trace_characters = v end)
+local trace_nbsp = false trackers.register("typesetters.nbsp", function(v) trace_nbsp = v end)
-report_characters = logs.reporter("typesetting","characters")
+local report_characters = logs.reporter("typesetting","characters")
local nodes, node = nodes, node
-local insert_node_after = nodes.insert_after
-local remove_node = nodes.remove
-local copy_node_list = nodes.copy_list
-local traverse_id = nodes.traverse_id
+local nuts = nodes.nuts
+
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+
+local setcolor = nodes.tracers.colors.set
+
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local remove_node = nuts.remove
+local copy_node_list = nuts.copy_list
+local traverse_id = nuts.traverse_id
local tasks = nodes.tasks
-local nodepool = nodes.pool
+local nodepool = nuts.pool
local new_penalty = nodepool.penalty
local new_glue = nodepool.glue
+local new_kern = nodepool.kern
+local new_rule = nodepool.rule
local nodecodes = nodes.nodecodes
local skipcodes = nodes.skipcodes
@@ -41,6 +64,7 @@ local glue_code = nodecodes.glue
local space_skip_code = skipcodes["spaceskip"]
local chardata = characters.data
+local is_punctuation = characters.is_punctuation
local typesetters = typesetters
@@ -63,48 +87,56 @@ local c_zero = byte('0')
local c_period = byte('.')
local function inject_quad_space(unicode,head,current,fraction)
- local attr = current.attr
+ local attr = getfield(current,"attr")
if fraction ~= 0 then
- fraction = fraction * fontquads[current.font]
+ fraction = fraction * fontquads[getfont(current)]
end
local glue = new_glue(fraction)
--- glue.attr = copy_node_list(attr)
- glue.attr = attr
- current.attr = nil
- glue[a_character] = unicode
+ setfield(glue,"attr",attr)
+ setfield(current,"attr",nil)
+ setattr(glue,a_character,unicode)
head, current = insert_node_after(head,current,glue)
return head, current
end
local function inject_char_space(unicode,head,current,parent)
- local attr = current.attr
- local font = current.font
+ local attr = getfield(current,"attr")
+ local font = getfont(current)
local char = fontcharacters[font][parent]
local glue = new_glue(char and char.width or fontparameters[font].space)
- glue.attr = current.attr
- current.attr = nil
- glue[a_character] = unicode
+ setfield(glue,"attr",attr)
+ setfield(current,"attr",nil)
+ setattr(glue,a_character,unicode)
head, current = insert_node_after(head,current,glue)
return head, current
end
local function inject_nobreak_space(unicode,head,current,space,spacestretch,spaceshrink)
- local attr = current.attr
- local glue = new_glue(space,spacestretch,spaceshrink)
+ local attr = getfield(current,"attr")
+ local glue = new_glue(space,spacestretch,spaceshrink)
local penalty = new_penalty(10000)
- glue.attr = attr
- current.attr = nil
- glue[a_character] = unicode
+ setfield(glue,"attr",attr)
+ setfield(current,"attr",nil)
+ setattr(glue,a_character,unicode) -- bombs
head, current = insert_node_after(head,current,penalty)
+ if trace_nbsp then
+ local rule = new_rule(space)
+ local kern = new_kern(-space)
+ local penalty = new_penalty(10000)
+ setcolor(rule,"orange")
+ head, current = insert_node_after(head,current,rule)
+ head, current = insert_node_after(head,current,kern)
+ head, current = insert_node_after(head,current,penalty)
+ end
head, current = insert_node_after(head,current,glue)
return head, current
end
local function nbsp(head,current)
- local para = fontparameters[current.font]
- if current[a_alignstate] == 1 then -- flushright
+ local para = fontparameters[getfont(current)]
+ if getattr(current,a_alignstate) == 1 then -- flushright
head, current = inject_nobreak_space(0x00A0,head,current,para.space,0,0)
- current.subtype = space_skip_code
+ setfield(current,"subtype",space_skip_code)
else
head, current = inject_nobreak_space(0x00A0,head,current,para.space,para.spacestretch,para.spaceshrink)
end
@@ -121,7 +153,7 @@ end
function characters.replacenbspaces(head)
for current in traverse_id(glyph_code,head) do
- if current.char == 0x00A0 then
+ if getchar(current) == 0x00A0 then
local h = nbsp(head,current)
if h then
head = remove_node(h,current,true)
@@ -146,22 +178,34 @@ local methods = {
-- The next one uses an attribute assigned to the character but still we
-- don't have the 'local' value.
+ [0x001F] = function(head,current)
+ local next = getnext(current)
+ if next and getid(next) == glyph_code then
+ local char = getchar(next)
+ head, current = remove_node(head,current,true)
+ if not is_punctuation[char] then
+ local p = fontparameters[getfont(next)]
+ head, current = insert_node_before(head,current,new_glue(p.space,p.space_stretch,p.space_shrink))
+ end
+ end
+ end,
+
[0x00A0] = function(head,current) -- nbsp
- local next = current.next
- if next and next.id == glyph_code then
- local char = next.char
+ local next = getnext(current)
+ if next and getid(next) == glyph_code then
+ local char = getchar(next)
if char == 0x200C or char == 0x200D then -- nzwj zwj
- next = next.next
- if next and nbsphash[next.char] then
+ next = getnext(next)
+ if next and nbsphash[getchar(next)] then
return false
end
elseif nbsphash[char] then
return false
end
end
- local prev = current.prev
- if prev and prev.id == glyph_code and nbsphash[prev.char] then
- return false -- kannada
+ local prev = getprev(current)
+ if prev and getid(prev) == glyph_code and nbsphash[getchar(prev)] then
+ return false
end
return nbsp(head,current)
end,
@@ -215,11 +259,11 @@ local methods = {
end,
[0x202F] = function(head,current) -- narrownobreakspace
- return inject_nobreak_space(0x202F,head,current,fontquads[current.font]/8)
+ return inject_nobreak_space(0x202F,head,current,fontquads[getfont(current)]/8)
end,
[0x205F] = function(head,current) -- math thinspace
- return inject_nobreak_space(0x205F,head,current,fontparameters[current.font].space/8)
+ return inject_nobreak_space(0x205F,head,current,fontparameters[getfont(current)].space/8)
end,
-- [0xFEFF] = function(head,current) -- zerowidthnobreakspace
@@ -228,14 +272,15 @@ local methods = {
}
-function characters.handler(head)
+function characters.handler(head) -- todo: use traverse_id
+ head = tonut(head)
local current = head
local done = false
while current do
- local id = current.id
+ local id = getid(current)
if id == glyph_code then
- local next = current.next
- local char = current.char
+ local next = getnext(current)
+ local char = getchar(current)
local method = methods[char]
if method then
if trace_characters then
@@ -249,8 +294,8 @@ function characters.handler(head)
end
current = next
else
- current = current.next
+ current = getnext(current)
end
end
- return head, done
+ return tonode(head), done
end
diff --git a/tex/context/base/spac-chr.mkiv b/tex/context/base/spac-chr.mkiv
index 54a25be34..562fb940c 100644
--- a/tex/context/base/spac-chr.mkiv
+++ b/tex/context/base/spac-chr.mkiv
@@ -77,6 +77,9 @@
\let\zwnj\zerowidthnonjoiner
\let\zwj \zerowidthjoiner
+\let\nbsp\nobreakspace
+
+\chardef\optionalspace"1F % will be space unless before punctuation
% Shortcuts:
diff --git a/tex/context/base/spac-def.mkiv b/tex/context/base/spac-def.mkiv
index 312483cfa..7ead3c63e 100644
--- a/tex/context/base/spac-def.mkiv
+++ b/tex/context/base/spac-def.mkiv
@@ -60,7 +60,7 @@
\c!depth=.28,
\c!top=1.0,
\c!bottom=0.4,
- \c!distance=\onepoint,
+ \c!distance=\onepoint, % \dimexpr\openlineheight/10\relax
\c!line=2.8\exheight,
\c!stretch=\zerocount,
\c!shrink=\zerocount]
diff --git a/tex/context/base/spac-hor.lua b/tex/context/base/spac-hor.lua
index c9d6e2b15..5d5a43e31 100644
--- a/tex/context/base/spac-hor.lua
+++ b/tex/context/base/spac-hor.lua
@@ -6,14 +6,15 @@ if not modules then modules = { } end modules ['spac-hor'] = {
license = "see context related readme files"
}
-local match = string.match
local utfbyte = utf.byte
+local lpegmatch, P, C = lpeg.match, lpeg.P, lpeg.C
local context = context
-local commands = commands
local chardata = characters.data
+local p_check = P("the ") * (P("letter") + P("character")) * P(" ") * lpeg.patterns.utf8byte -- is a capture already
+
local can_have_space = table.tohash {
"lu", "ll", "lt", "lm", "lo", -- letters
-- "mn", "mc", "me", -- marks
@@ -26,10 +27,18 @@ local can_have_space = table.tohash {
-- "cc", "cf", "cs", "co", "cn", -- others
}
-function commands.autonextspace(str) -- todo: use nexttoken
- local ch = match(str,"the letter (.)") or match(str,"the character (.)")
- ch = ch and chardata[utfbyte(ch)]
- if ch and can_have_space[ch.category] then
- context.space()
+local function autonextspace(str) -- todo: make a real not intrusive lookahead
+ local b = lpegmatch(p_check,str)
+ if b then
+ local d = chardata[b]
+ if d and can_have_space[d.category] then
+ context.space()
+ end
end
end
+
+interfaces.implement {
+ name = "autonextspace",
+ actions = autonextspace,
+ arguments = "string",
+}
diff --git a/tex/context/base/spac-hor.mkiv b/tex/context/base/spac-hor.mkiv
index 4cd913290..54156c3b4 100644
--- a/tex/context/base/spac-hor.mkiv
+++ b/tex/context/base/spac-hor.mkiv
@@ -32,7 +32,7 @@
{\doifoutervmode{\ifconditional\c_spac_indentation_indent_first\else\spac_indentation_variant_no\fi}}
\unexpanded\def\setupindenting
- {\doifnextoptionalelse\spac_indentation_setup_options\spac_indentation_setup_size}
+ {\doifelsenextoptionalcs\spac_indentation_setup_options\spac_indentation_setup_size}
\unexpanded\def\spac_indentation_setup_size
{\assigndimension\v_spac_indentation_current\d_spac_indentation_par{1\emwidth}{1.5\emwidth}{2\emwidth}}
@@ -64,24 +64,71 @@
\def\spac_indentation_set_everypar
{\everypar{\checkindentation}}
+\unexpanded\def\useindentingparameter#1% faster local variant
+ {\edef\m_spac_indentation_options{#1\c!indenting}%
+ \ifx\m_spac_indentation_options\empty \else
+ \spac_indentation_setup_indeed
+ \fi}
+
+% \def\spac_indentation_apply_step_one#1%
+% {\ifcsname\??indentingmethod#1\endcsname
+% % case two
+% \else
+% \edef\v_spac_indentation_current{#1}% single entry in list
+% \let\normalindentation\v_spac_indentation_current
+% \spac_indentation_setup_size
+% \fi}
+%
+% \def\spac_indentation_apply_step_two#1%
+% {\ifcsname\??indentingmethod#1\endcsname
+% \csname\??indentingmethod#1\endcsname
+% \else
+% % case one
+% \fi}
+
+% \defineindenting[whatever][yes,2cm]
+% %defineindenting[whatever][yes,-2cm]
+%
+% \setupindenting[yes,-2em] \input ward \par
+% \setupindenting[yes,2em] \input ward \par
+% \setupindenting[whatever] \input ward \par
+
+\installcorenamespace {indentingpreset}
+
+\unexpanded\def\defineindenting
+ {\dodoubleargument\spac_indenting_define}
+
+\def\spac_indenting_define[#1][#2]% todo: mixes
+ {\setevalue{\??indentingpreset#1}{#2}}
+
+\def\spac_indentation_apply_step_one_nested#1%
+ {\expandafter\processcommacommand\expandafter[\csname\??indentingpreset#1\endcsname]\spac_indentation_apply_step_one}
+
+\def\spac_indentation_apply_step_two_nested#1%
+ {\expandafter\processcommacommand\expandafter[\csname\??indentingpreset#1\endcsname]\spac_indentation_apply_step_two}
+
\def\spac_indentation_apply_step_one#1%
- {\ifcsname\??indentingmethod#1\endcsname
+ {\ifcsname\??indentingpreset#1\endcsname
+ \spac_indentation_apply_step_one_nested{#1}%
+ \else\ifcsname\??indentingmethod#1\endcsname
% case two
\else
\edef\v_spac_indentation_current{#1}% single entry in list
\let\normalindentation\v_spac_indentation_current
\spac_indentation_setup_size
- \fi}
+ \fi\fi}
\def\spac_indentation_apply_step_two#1%
- {\ifcsname\??indentingmethod#1\endcsname
+ {\ifcsname\??indentingpreset#1\endcsname
+ \spac_indentation_apply_step_two_nested{#1}%
+ \else\ifcsname\??indentingmethod#1\endcsname
\csname\??indentingmethod#1\endcsname
\else
% case one
- \fi}
+ \fi\fi}
\unexpanded\def\indenting % kind of obsolete
- {\doifnextoptionalelse\spac_indentation_setup_options\relax}
+ {\doifelsenextoptionalcs\spac_indentation_setup_options\relax}
% use \noindentation to suppress next indentation
@@ -217,7 +264,7 @@
\unexpanded\def\spac_indentation_check_next_indentation
{\global\let\dorechecknextindentation\relax
- \doifnextcharelse\par\donothing\spac_indentation_variant_no} % messy check as next is seldom \par
+ \doifelsenextchar\par\donothing\spac_indentation_variant_no} % messy check as next is seldom \par
\def\spac_indentation_variant_auto
{\global\let\dorechecknextindentation\spac_indentation_check_next_indentation}
@@ -339,7 +386,7 @@
\installspacingmethod \v!broad {\nonfrenchspacing} % more depending on what punctuation
\unexpanded\def\setupspacing
- {\doifnextoptionalelse\spac_spacecodes_setup_yes\spac_spacecodes_setup_nop}
+ {\doifelsenextoptionalcs\spac_spacecodes_setup_yes\spac_spacecodes_setup_nop}
\def\spac_spacecodes_setup_yes[#1]%
{\csname\??spacecodemethod#1\endcsname
@@ -630,7 +677,7 @@
\fi}
\def\spac_narrower_start_named_one[#1]%
- {\doifassignmentelse{#1}\spac_narrower_start_named_one_yes\spac_narrower_start_named_one_nop[#1]}
+ {\doifelseassignment{#1}\spac_narrower_start_named_one_yes\spac_narrower_start_named_one_nop[#1]}
\def\spac_narrower_start_named_one_yes[#1][#2]% [settings] []
{\setupcurrentnarrower[#1]%
@@ -641,14 +688,14 @@
\spac_narrower_start_apply{\narrowerparameter\v!default}}
\def\spac_narrower_start_named_two[#1]%
- {\doifassignmentelse{#1}\spac_narrower_start_named_settings_how\spac_narrower_start_named_tag_unknown[#1]}
+ {\doifelseassignment{#1}\spac_narrower_start_named_settings_how\spac_narrower_start_named_tag_unknown[#1]}
\def\spac_narrower_start_named_settings_how[#1][#2]% [settings] [how]
{\setupcurrentnarrower[#1]%
\spac_narrower_start_apply{#2}}
\def\spac_narrower_start_named_tag_unknown[#1][#2]% [tag] [...]
- {\doifassignmentelse{#2}\spac_narrower_start_named_tag_settings\spac_narrower_start_named_tag_how[#1][#2]}
+ {\doifelseassignment{#2}\spac_narrower_start_named_tag_settings\spac_narrower_start_named_tag_how[#1][#2]}
\def\spac_narrower_start_named_tag_settings[#1][#2]% [tag] [settings]
{\edef\currentnarrower{#1}%
@@ -692,8 +739,8 @@
\unexpanded\def\dosetleftskipadaption #1{\leftskipadaption \ifcsname\??skipadaptionleft #1\endcsname\csname\??skipadaptionleft #1\endcsname\else#1\fi\relax}
\unexpanded\def\dosetrightskipadaption#1{\rightskipadaption\ifcsname\??skipadaptionright#1\endcsname\csname\??skipadaptionright#1\endcsname\else#1\fi\relax}
-\unexpanded\def\doadaptleftskip #1{\dosetleftskipadaption {#1}\advance\leftskip \leftskipadaption }
-\unexpanded\def\doadaptrightskip#1{\dosetrightskipadaption{#1}\advance\rightskip\rightskipadaption}
+\unexpanded\def\doadaptleftskip #1{\normalexpanded{\dosetleftskipadaption {#1}}\advance\leftskip \leftskipadaption }
+\unexpanded\def\doadaptrightskip#1{\normalexpanded{\dosetrightskipadaption{#1}}\advance\rightskip\rightskipadaption}
\unexpanded\def\forgetbothskips
{\leftskip\zeropoint
@@ -739,7 +786,7 @@
{\dosingleargument\spac_tolerances_setup}
\def\spac_tolerances_setup[#1]%
- {\doifinsetelse\v!vertical{#1}%
+ {\doifelseinset\v!vertical{#1}%
{\processcommacommand[#1]\spac_tolerances_step_vertical }
{\processcommacommand[#1]\spac_tolerances_step_horizontal}}
@@ -809,31 +856,6 @@
%D In \CONTEXT\ however we save some processing time by putting
%D an extra \type{\hbox} around the \type{\strutbox}.
-% moved from page-lin.tex to here (due to visualization added
-% in august 2003)
-%
-% \unexpanded \def\crlf
-% {\ifhmode\unskip\else\strut\fi\ifcase\raggedstatus\hfil\fi\break}
-
-\unexpanded\def\crlf
- {\ifhmode
- \unskip
- \prewordbreak\crlfplaceholder
- \ifcase\raggedstatus\hfil\or\or\or\hfil\fi
- \break
- \else
- \crlfplaceholder
- \endgraf
- \fi}
-
-\unexpanded\def\crlfplaceholder
- {\strut}
-
-\unexpanded\def\settestcrlf
- {\unexpanded\def\crlfplaceholder
- {\hbox to \zeropoint
- {\strut{\infofont\kern.25em}\lohi{\infofont CR}{\infofont LF}\hss}}}
-
%D \starttyping
%D % \setuplayout[gridgrid=yes] \showgrid
%D
@@ -1024,7 +1046,7 @@
{\futurelet\nexttoken\spac_spaces_auto_insert_next}
\def\spac_spaces_auto_insert_next
- {\ctxcommand{autonextspace(\!!bs\meaning\nexttoken\!!es)}} % todo, just consult nexttoken at the lua end
+ {\clf_autonextspace{\meaning\nexttoken}} % todo, just consult nexttoken at the lua end
%D Moved from bib module:
@@ -1059,7 +1081,7 @@
%D A rather unknown one:
\unexpanded\def\widened % moved from cont-new
- {\doifnextoptionalelse\spac_widened_yes\spac_widened_nop}
+ {\doifelsenextoptionalcs\spac_widened_yes\spac_widened_nop}
\def\spac_widened_yes[#1]#2{\hbox \s!spread #1{\hss#2\hss}}
\def\spac_widened_nop #1{\hbox \s!spread \emwidth{\hss#1\hss}}
diff --git a/tex/context/base/spac-lin.mkiv b/tex/context/base/spac-lin.mkiv
index 094e18e0b..20fec5d45 100644
--- a/tex/context/base/spac-lin.mkiv
+++ b/tex/context/base/spac-lin.mkiv
@@ -88,13 +88,13 @@
\linesparameter\c!before
\pushmacro\checkindentation
\whitespace
- \dostarttagged\t!lines\currentlines
+ \dostarttaggedchained\t!lines\currentlines\??lines
\begingroup
\uselinesstyleandcolor\c!style\c!color
- \setupindenting[\linesparameter\c!indenting]%
- \setupalign[\linesparameter\c!align]%
+ \useindentingparameter\linesparameter
+ \usealignparameter\linesparameter
\typesettinglinestrue
- \setupwhitespace[\v!none]%
+ \setupwhitespace[\v!none]% todo use fast variant
%\obeylines % move upwards to keep spaces in the first line due to optional argument
\ignorespaces
\glet\spac_after_first_obeyed_line\spac_lines_after_first_obeyed_line_a
@@ -128,7 +128,7 @@
\egroup}
\def\spac_lines_between
- {\doifmeaningelse\next\obeyedline % brrr
+ {\doifelsemeaning\next\obeyedline % brrr
{\linesparameter\c!inbetween}
{\spac_after_first_obeyed_line}}
diff --git a/tex/context/base/spac-pag.mkiv b/tex/context/base/spac-pag.mkiv
index da4c8e970..1ecc31d8c 100644
--- a/tex/context/base/spac-pag.mkiv
+++ b/tex/context/base/spac-pag.mkiv
@@ -51,7 +51,7 @@
\setpagestaterealpageno{#1}{\number#2}%
\fi}
-\unexpanded\def\doifrightpagestateelse#1#2% not expandable !
+\unexpanded\def\doifelserightpagestate#1#2% not expandable !
{\ifcase\frozenpagestate
\pagestatemismatchfalse
\realpagestateno\realfolio
@@ -100,7 +100,7 @@
\expandafter\secondoftwoarguments
\fi}
-\unexpanded\def\doifforcedrightpagestateelse#1#2%
+\unexpanded\def\doifelseforcedrightpagestate#1#2%
{\ifcase\frozenpagestate
\pagestatemismatchfalse
\realpagestateno\realfolio
@@ -135,6 +135,9 @@
\expandafter\secondoftwoarguments
\fi}
+\let\doifrightpagestateelse \doifelserightpagestate
+\let\doifforcedrightpagestateelse\doifelseforcedrightpagestate
+
\unexpanded\def\freezepagestate {\frozenpagestate\plusone }
\unexpanded\def\defrostpagestate{\frozenpagestate\zerocount}
@@ -147,7 +150,9 @@
\definepagestate[\s!paragraph]
\unexpanded\def\signalrightpage {\dotrackpagestate \s!paragraph\nofraggedparagraphs} % use \dontleavehmode if needed
-\unexpanded\def\doifrightpageelse{\doifrightpagestateelse\s!paragraph\nofraggedparagraphs}
+\unexpanded\def\doifelserightpage{\doifelserightpagestate\s!paragraph\nofraggedparagraphs}
+
+\let\doifrightpageelse\doifelserightpage
\installcorenamespace{pagechanges}
diff --git a/tex/context/base/spac-par.mkiv b/tex/context/base/spac-par.mkiv
index 825cdca46..4dd3db243 100644
--- a/tex/context/base/spac-par.mkiv
+++ b/tex/context/base/spac-par.mkiv
@@ -203,6 +203,8 @@
% \glet\flushpostponednodedata\spac_postponed_data_flush
% \fi}
+\newtoks\everyflushatnextpar
+
\unexpanded\def\pushpostponednodedata
{\globalpushbox\b_spac_postponed_data}
@@ -214,6 +216,7 @@
\unexpanded\def\flushatnextpar
{\begingroup
+ \the\everyflushatnextpar
\glet\flushpostponednodedata\spac_postponed_data_flush
\dowithnextboxcs\spac_postponed_data_finish\hbox}
diff --git a/tex/context/base/spac-prf.mkiv b/tex/context/base/spac-prf.mkiv
new file mode 100644
index 000000000..5f1553ede
--- /dev/null
+++ b/tex/context/base/spac-prf.mkiv
@@ -0,0 +1,31 @@
+%D \module
+%D [ file=spac-prf,
+%D version=2015.11.16, % moved from test module mathplus
+%D title=\CONTEXT\ Spacing Macros,
+%D subtitle=Profiling,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Spacing Macros / Profiling}
+
+%D This is a placeholder for something to come. But as I don't want to
+%D be harrassed by 'why does it work different than a week before' this
+%D cool new feature will only end up here when stable enough. Alas.
+
+\unprotect
+
+\definesystemattribute[profilemethod][public]
+
+\unexpanded\def\setprofile [#1]{}
+\unexpanded\def\resetprofile {}
+\unexpanded\def\useprofileparameter#1{}
+\unexpanded\def\addprofiletobox #1{}
+\unexpanded\def\profilegivenbox #1#2{}
+\unexpanded\def\profiledbox {\vbox}
+
+\protect \endinput
diff --git a/tex/context/base/spac-ver.lua b/tex/context/base/spac-ver.lua
index 0035c4119..d1cf09e17 100644
--- a/tex/context/base/spac-ver.lua
+++ b/tex/context/base/spac-ver.lua
@@ -8,10 +8,16 @@ if not modules then modules = { } end modules ['spac-ver'] = {
-- we also need to call the spacer for inserts!
--- todo: directly set skips
+-- somehow lists still don't always have proper prev nodes so i need to
+-- check all of the luatex code some day .. maybe i should replece the
+-- whole mvl handler by lua code .. why not
+
+-- todo: use lua nodes with lua data (>0.79)
+-- see ** can go when 0.79
-- this code dates from the beginning and is kind of experimental; it
--- will be optimized and improved soon
+-- will be optimized and improved soon .. it's way too complex now but
+-- dates from less possibilities
--
-- the collapser will be redone with user nodes; also, we might get make
-- parskip into an attribute and appy it explicitly thereby getting rid
@@ -32,14 +38,20 @@ local formatters = string.formatters
local P, C, R, S, Cc = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cc
-local nodes, node, trackers, attributes, context, commands, tex = nodes, node, trackers, attributes, context, commands, tex
+local nodes = nodes
+local node = node
+local trackers = trackers
+local attributes = attributes
+local context = context
+local tex = tex
local texlists = tex.lists
local texgetdimen = tex.getdimen
+local texsetdimen = tex.setdimen
local texnest = tex.nest
-local texgetbox = tex.getbox
local variables = interfaces.variables
+local implement = interfaces.implement
-- vertical space handler
@@ -49,11 +61,12 @@ local trace_page_builder = false trackers.register("builders.page", fun
local trace_collect_vspacing = false trackers.register("vspacing.collect", function(v) trace_collect_vspacing = v end)
local trace_vspacing = false trackers.register("vspacing.spacing", function(v) trace_vspacing = v end)
local trace_vsnapping = false trackers.register("vspacing.snapping", function(v) trace_vsnapping = v end)
-local trace_vpacking = false trackers.register("vspacing.packing", function(v) trace_vpacking = v end)
+local trace_specials = false trackers.register("vspacing.specials", function(v) trace_specials = v end)
local report_vspacing = logs.reporter("vspacing","spacing")
local report_collapser = logs.reporter("vspacing","collapsing")
local report_snapper = logs.reporter("vspacing","snapping")
+local report_specials = logs.reporter("vspacing","specials")
local report_page_builder = logs.reporter("builders","page")
local a_skipcategory = attributes.private('skipcategory')
@@ -63,27 +76,46 @@ local a_skiporder = attributes.private('skiporder')
local a_snapmethod = attributes.private('snapmethod')
local a_snapvbox = attributes.private('snapvbox')
-local find_node_tail = node.tail
-local free_node = node.free
-local free_node_list = node.flush_list
-local copy_node = node.copy
-local traverse_nodes = node.traverse
-local traverse_nodes_id = node.traverse_id
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local remove_node = nodes.remove
-local count_nodes = nodes.count
-local nodeidstostring = nodes.idstostring
-local hpack_node = node.hpack
-local vpack_node = node.vpack
-local writable_spec = nodes.writable_spec
+local nuts = nodes.nuts
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+local ntostring = nuts.tostring
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getsubtype = nuts.getsubtype
+local getbox = nuts.getbox
+
+local find_node_tail = nuts.tail
+local free_node = nuts.free
+local free_node_list = nuts.flush_list
+local copy_node = nuts.copy
+local traverse_nodes = nuts.traverse
+local traverse_nodes_id = nuts.traverse_id
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local remove_node = nuts.remove
+local count_nodes = nuts.count
+local hpack_node = nuts.hpack
+local vpack_node = nuts.vpack
+local writable_spec = nuts.writable_spec
+local nodereference = nuts.reference
+
local listtoutf = nodes.listtoutf
+local nodeidstostring = nodes.idstostring
-local nodepool = nodes.pool
+local nodepool = nuts.pool
local new_penalty = nodepool.penalty
local new_kern = nodepool.kern
local new_rule = nodepool.rule
+local new_glue = nodepool.glue
local new_gluespec = nodepool.gluespec
local nodecodes = nodes.nodecodes
@@ -103,8 +135,8 @@ builders.vspacing = vspacing
local vspacingdata = vspacing.data or { }
vspacing.data = vspacingdata
-vspacingdata.snapmethods = vspacingdata.snapmethods or { }
-local snapmethods = vspacingdata.snapmethods --maybe some older code can go
+local snapmethods = vspacingdata.snapmethods or { }
+vspacingdata.snapmethods = snapmethods
storage.register("builders/vspacing/data/snapmethods", snapmethods, "builders.vspacing.data.snapmethods")
@@ -114,11 +146,13 @@ local default = {
strut = true,
hfraction = 1,
dfraction = 1,
+ bfraction = 0.25,
}
local fractions = {
minheight = "hfraction", maxheight = "hfraction",
mindepth = "dfraction", maxdepth = "dfraction",
+ box = "bfraction",
top = "tlines", bottom = "blines",
}
@@ -179,28 +213,26 @@ end
-- local rule_id = nodecodes.rule
-- local vlist_id = nodecodes.vlist
-- function nodes.makevtop(n)
--- if n.id == vlist_id then
--- local list = n.list
--- local height = (list and list.id <= rule_id and list.height) or 0
--- n.depth = n.depth - height + n.height
--- n.height = height
+-- if getid(n) == vlist_id then
+-- local list = getlist(n)
+-- local height = (list and getid(list) <= rule_id and getfield(list,"height")) or 0
+-- setfield(n,"depth",getfield(n,"depth") - height + getfield(n,"height")
+-- setfield(n,"height",height
-- end
-- end
-local reference = nodes.reference
-
local function validvbox(parentid,list)
if parentid == hlist_code then
- local id = list.id
+ local id = getid(list)
if id == whatsit_code then -- check for initial par subtype
- list = list.next
+ list = getnext(list)
if not next then
return nil
end
end
local done = nil
for n in traverse_nodes(list) do
- local id = n.id
+ local id = getid(n)
if id == vlist_code or id == hlist_code then
if done then
return nil
@@ -214,9 +246,9 @@ local function validvbox(parentid,list)
end
end
if done then
- local id = done.id
+ local id = getid(done)
if id == hlist_code then
- return validvbox(id,done.list)
+ return validvbox(id,getlist(done))
end
end
return done -- only one vbox
@@ -226,19 +258,19 @@ end
local function already_done(parentid,list,a_snapmethod) -- todo: done when only boxes and all snapped
-- problem: any snapped vbox ends up in a line
if list and parentid == hlist_code then
- local id = list.id
+ local id = getid(list)
if id == whatsit_code then -- check for initial par subtype
- list = list.next
+ list = getnext(list)
if not next then
return false
end
end
--~ local i = 0
for n in traverse_nodes(list) do
- local id = n.id
---~ i = i + 1 print(i,nodecodes[id],n[a_snapmethod])
+ local id = getid(n)
+--~ i = i + 1 print(i,nodecodes[id],getattr(n,a_snapmethod))
if id == hlist_code or id == vlist_code then
- local a = n[a_snapmethod]
+ local a = getattr(n,a_snapmethod)
if not a then
-- return true -- not snapped at all
elseif a == 0 then
@@ -276,11 +308,11 @@ end
-- check variables.none etc
local function snap_hlist(where,current,method,height,depth) -- method.strut is default
- local list = current.list
+ local list = getlist(current)
local t = trace_vsnapping and { }
if t then
t[#t+1] = formatters["list content: %s"](listtoutf(list))
- t[#t+1] = formatters["parent id: %s"](reference(current))
+ t[#t+1] = formatters["parent id: %s"](nodereference(current))
t[#t+1] = formatters["snap method: %s"](method.name)
t[#t+1] = formatters["specification: %s"](method.specification)
end
@@ -312,26 +344,58 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
t[#t+1] = formatters["auto: snapht %p snapdp %p"](snapht,snapdp)
end
end
- local h, d = height or current.height, depth or current.depth
- local hr, dr, ch, cd = method.hfraction or 1, method.dfraction or 1, h, d
- local tlines, blines = method.tlines or 1, method.blines or 1
- local done, plusht, plusdp = false, snapht, snapdp
+
+ local h = (method.noheight and 0) or height or getfield(current,"height")
+ local d = (method.nodepth and 0) or depth or getfield(current,"depth")
+ local hr = method.hfraction or 1
+ local dr = method.dfraction or 1
+ local br = method.bfraction or 0
+ local ch = h
+ local cd = d
+ local tlines = method.tlines or 1
+ local blines = method.blines or 1
+ local done = false
+ local plusht = snapht
+ local plusdp = snapdp
local snaphtdp = snapht + snapdp
- if method.none then
+ if method.box then
+ local br = 1 - br
+ if br < 0 then
+ br = 0
+ elseif br > 1 then
+ br = 1
+ end
+ local n = ceiled((h+d-br*snapht-br*snapdp)/snaphtdp)
+ local x = n * snaphtdp - h - d
+ plusht = h + x / 2
+ plusdp = d + x / 2
+ elseif method.max then
+ local n = ceiled((h+d)/snaphtdp)
+ local x = n * snaphtdp - h - d
+ plusht = h + x / 2
+ plusdp = d + x / 2
+ elseif method.min then
+ local n = floored((h+d)/snaphtdp)
+ local x = n * snaphtdp - h - d
+ plusht = h + x / 2
+ plusdp = d + x / 2
+ elseif method.none then
plusht, plusdp = 0, 0
if t then
t[#t+1] = "none: plusht 0pt plusdp 0pt"
end
end
if method.halfline then -- extra halfline
- plusht, plusdp = plusht + snaphtdp/2, plusdp + snaphtdp/2
+ plusht = plusht + snaphtdp/2
+ plusdp = plusdp + snaphtdp/2
if t then
t[#t+1] = formatters["halfline: plusht %p plusdp %p"](plusht,plusdp)
end
end
if method.line then -- extra line
- plusht, plusdp = plusht + snaphtdp, plusdp + snaphtdp
+ plusht = plusht + snaphtdp
+ plusdp = plusdp + snaphtdp
if t then
t[#t+1] = formatters["line: plusht %p plusdp %p"](plusht,plusdp)
end
@@ -339,22 +403,22 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
if method.first then
local thebox = current
- local id = thebox.id
+ local id = getid(thebox)
if id == hlist_code then
- thebox = validvbox(id,thebox.list)
- id = thebox and thebox.id
+ thebox = validvbox(id,getlist(thebox))
+ id = thebox and getid(thebox)
end
if thebox and id == vlist_code then
- local list = thebox.list
+ local list = getlist(thebox)
local lh, ld
for n in traverse_nodes_id(hlist_code,list) do
- lh = n.height
- ld = n.depth
+ lh = getfield(n,"height")
+ ld = getfield(n,"depth")
break
end
if lh then
- local ht = thebox.height
- local dp = thebox.depth
+ local ht = getfield(thebox,"height")
+ local dp = getfield(thebox,"depth")
if t then
t[#t+1] = formatters["first line: height %p depth %p"](lh,ld)
t[#t+1] = formatters["dimensions: height %p depth %p"](ht,dp)
@@ -362,9 +426,9 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
local delta = h - lh
ch, cd = lh, delta + d
h, d = ch, cd
- local shifted = hpack_node(current.list)
- shifted.shift = delta
- current.list = shifted
+ local shifted = hpack_node(getlist(current))
+ setfield(shifted,"shift",delta)
+ setfield(current,"list",shifted)
done = true
if t then
t[#t+1] = formatters["first: height %p depth %p shift %p"](ch,cd,delta)
@@ -377,20 +441,21 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
end
elseif method.last then
local thebox = current
- local id = thebox.id
+ local id = getid(thebox)
if id == hlist_code then
- thebox = validvbox(id,thebox.list)
- id = thebox and thebox.id
+ thebox = validvbox(id,getlist(thebox))
+ id = thebox and getid(thebox)
end
if thebox and id == vlist_code then
- local list, lh, ld = thebox.list
+ local list = getlist(thebox)
+ local lh, ld
for n in traverse_nodes_id(hlist_code,list) do
- lh = n.height
- ld = n.depth
+ lh = getfield(n,"height")
+ ld = getfield(n,"depth")
end
if lh then
- local ht = thebox.height
- local dp = thebox.depth
+ local ht = getfield(thebox,"height")
+ local dp = getfield(thebox,"depth")
if t then
t[#t+1] = formatters["last line: height %p depth %p" ](lh,ld)
t[#t+1] = formatters["dimensions: height %p depth %p"](ht,dp)
@@ -398,9 +463,9 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
local delta = d - ld
cd, ch = ld, delta + h
h, d = ch, cd
- local shifted = hpack_node(current.list)
- shifted.shift = delta
- current.list = shifted
+ local shifted = hpack_node(getlist(current))
+ setfield(shifted,"shift",delta)
+ setfield(current,"list",shifted)
done = true
if t then
t[#t+1] = formatters["last: height %p depth %p shift %p"](ch,cd,delta)
@@ -461,25 +526,25 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
if offset then
-- we need to set the attr
if t then
- t[#t+1] = formatters["before offset: %p (width %p height %p depth %p)"](offset,current.width,current.height,current.depth)
+ t[#t+1] = formatters["before offset: %p (width %p height %p depth %p)"](offset,getfield(current,"width"),getfield(current,"height"),getfield(current,"depth"))
end
- local shifted = hpack_node(current.list)
- shifted.shift = offset
- current.list = shifted
+ local shifted = hpack_node(getlist(current))
+ setfield(shifted,"shift",offset)
+ setfield(current,"list",shifted)
if t then
- t[#t+1] = formatters["after offset: %p (width %p height %p depth %p)"](offset,current.width,current.height,current.depth)
+ t[#t+1] = formatters["after offset: %p (width %p height %p depth %p)"](offset,getfield(current,"width"),getfield(current,"height"),getfield(current,"depth"))
end
- shifted[a_snapmethod] = 0
- current[a_snapmethod] = 0
+ setattr(shifted,a_snapmethod,0)
+ setattr(current,a_snapmethod,0)
end
if not height then
- current.height = ch
+ setfield(current,"height",ch)
if t then
t[#t+1] = formatters["forced height: %p"](ch)
end
end
if not depth then
- current.depth = cd
+ setfield(current,"depth",cd)
if t then
t[#t+1] = formatters["forced depth: %p"](cd)
end
@@ -492,18 +557,24 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
t[#t+1] = formatters["final height: %p -> %p"](h,ch)
t[#t+1] = formatters["final depth: %p -> %p"](d,cd)
end
+-- todo:
+--
+-- if h < 0 or d < 0 then
+-- h = 0
+-- d = 0
+-- end
if t then
- report_snapper("trace: %s type %s\n\t%\n\tt",where,nodecodes[current.id],t)
+ report_snapper("trace: %s type %s\n\t%\n\tt",where,nodecodes[getid(current)],t)
end
return h, d, ch, cd, lines
end
local function snap_topskip(current,method)
- local spec = current.spec
- local w = spec.width
+ local spec = getfield(current,"spec")
+ local w = getfield(spec,"width")
local wd = w
- if spec.writable then
- spec.width = 0
+ if getfield(spec,"writable") then
+ setfield(spec,"width",0)
wd = 0
end
return w, wd
@@ -518,14 +589,15 @@ local categories = allocate {
[5] = 'disable',
[6] = 'nowhite',
[7] = 'goback',
- [8] = 'together'
+ [8] = 'together', -- not used (?)
+ [9] = 'overlay',
}
vspacing.categories = categories
function vspacing.tocategories(str)
local t = { }
- for s in gmatch(str,"[^, ]") do
+ for s in gmatch(str,"[^, ]") do -- use lpeg instead
local n = tonumber(s)
if n then
t[categories[n]] = true
@@ -536,7 +608,7 @@ function vspacing.tocategories(str)
return t
end
-function vspacing.tocategory(str)
+function vspacing.tocategory(str) -- can be optimized
if type(str) == "string" then
return set.tonumber(vspacing.tocategories(str))
else
@@ -567,15 +639,15 @@ do -- todo: interface.variables
-- This will change: just node.write and we can store the values in skips which
-- then obeys grouping
- local fixedblankskip = context.fixedblankskip
- local flexibleblankskip = context.flexibleblankskip
- local setblankcategory = context.setblankcategory
- local setblankorder = context.setblankorder
- local setblankpenalty = context.setblankpenalty
- local setblankhandling = context.setblankhandling
- local flushblankhandling = context.flushblankhandling
- local addpredefinedblankskip = context.addpredefinedblankskip
- local addaskedblankskip = context.addaskedblankskip
+ local ctx_fixedblankskip = context.fixedblankskip
+ local ctx_flexibleblankskip = context.flexibleblankskip
+ local ctx_setblankcategory = context.setblankcategory
+ local ctx_setblankorder = context.setblankorder
+ local ctx_setblankpenalty = context.setblankpenalty
+ ----- ctx_setblankhandling = context.setblankhandling
+ local ctx_flushblankhandling = context.flushblankhandling
+ local ctx_addpredefinedblankskip = context.addpredefinedblankskip
+ local ctx_addaskedblankskip = context.addaskedblankskip
local function analyze(str,oldcategory) -- we could use shorter names
for s in gmatch(str,"([^ ,]+)") do
@@ -587,35 +659,35 @@ do -- todo: interface.variables
if mk then
category = analyze(mk,category)
elseif keyword == k_fixed then
- fixedblankskip()
+ ctx_fixedblankskip()
elseif keyword == k_flexible then
- flexibleblankskip()
+ ctx_flexibleblankskip()
elseif keyword == k_category then
local category = tonumber(detail)
if category then
- setblankcategory(category)
+ ctx_setblankcategory(category)
if category ~= oldcategory then
- flushblankhandling()
+ ctx_flushblankhandling()
oldcategory = category
end
end
elseif keyword == k_order and detail then
local order = tonumber(detail)
if order then
- setblankorder(order)
+ ctx_setblankorder(order)
end
elseif keyword == k_penalty and detail then
local penalty = tonumber(detail)
if penalty then
- setblankpenalty(penalty)
+ ctx_setblankpenalty(penalty)
end
else
amount = tonumber(amount) or 1
local sk = skip[keyword]
if sk then
- addpredefinedblankskip(amount,keyword)
+ ctx_addpredefinedblankskip(amount,keyword)
else -- no check
- addaskedblankskip(amount,keyword)
+ ctx_addaskedblankskip(amount,keyword)
end
end
end
@@ -623,22 +695,22 @@ do -- todo: interface.variables
return category
end
- local pushlogger = context.pushlogger
- local startblankhandling = context.startblankhandling
- local stopblankhandling = context.stopblankhandling
- local poplogger = context.poplogger
+ local ctx_pushlogger = context.pushlogger
+ local ctx_startblankhandling = context.startblankhandling
+ local ctx_stopblankhandling = context.stopblankhandling
+ local ctx_poplogger = context.poplogger
function vspacing.analyze(str)
if trace_vspacing then
- pushlogger(report_vspacing)
- startblankhandling()
+ ctx_pushlogger(report_vspacing)
+ ctx_startblankhandling()
analyze(str,1)
- stopblankhandling()
- poplogger()
+ ctx_stopblankhandling()
+ ctx_poplogger()
else
- startblankhandling()
+ ctx_startblankhandling()
analyze(str,1)
- stopblankhandling()
+ ctx_stopblankhandling()
end
end
@@ -664,18 +736,18 @@ local trace_list, tracing_info, before, after = { }, false, "", ""
local function nodes_to_string(head)
local current, t = head, { }
while current do
- local id = current.id
+ local id = getid(current)
local ty = nodecodes[id]
if id == penalty_code then
- t[#t+1] = formatters["%s:%s"](ty,current.penalty)
+ t[#t+1] = formatters["%s:%s"](ty,getfield(current,"penalty"))
elseif id == glue_code then -- or id == kern_code then -- to be tested
t[#t+1] = formatters["%s:%p"](ty,current)
elseif id == kern_code then
- t[#t+1] = formatters["%s:%p"](ty,current.kern)
+ t[#t+1] = formatters["%s:%p"](ty,getfield(current,"kern"))
else
t[#t+1] = ty
end
- current = current.next
+ current = getnext(current)
end
return concat(t," + ")
end
@@ -699,7 +771,7 @@ local function trace_info(message, where, what)
end
local function trace_node(what)
- local nt = nodecodes[what.id]
+ local nt = nodecodes[getid(what)]
local tl = trace_list[#trace_list]
if tl and tl[1] == "node" then
trace_list[#trace_list] = { "node", formatters["%s + %s"](tl[2],nt) }
@@ -709,8 +781,8 @@ local function trace_node(what)
end
local function trace_done(str,data)
- if data.id == penalty_code then
- trace_list[#trace_list+1] = { "penalty", formatters["%s | %s"](str,data.penalty) }
+ if getid(data) == penalty_code then
+ trace_list[#trace_list+1] = { "penalty", formatters["%s | %s"](str,getfield(data,"penalty")) }
else
trace_list[#trace_list+1] = { "glue", formatters["%s | %p"](str,data) }
end
@@ -748,22 +820,32 @@ local belowdisplayshortskip_code = skipcodes.belowdisplayshortskip
local topskip_code = skipcodes.topskip
local splittopskip_code = skipcodes.splittopskip
+-- local function free_glue_node(n)
+-- free_node(n)
+-- local s = getfield(n,"spec")
+-- if s then
+-- free_node(s)
+-- end
+-- end
+
local free_glue_node = free_node
+local free_glue_spec = function() end
+----- free_glue_spec = free_node -- can be enabled in in 0.73 (so for the moment we leak due to old luatex engine issues)
function vspacing.snapbox(n,how)
local sv = snapmethods[how]
if sv then
- local box = texgetbox(n)
- local list = box.list
+ local box = getbox(n)
+ local list = getlist(box)
if list then
- local s = list[a_snapmethod]
+ local s = getattr(list,a_snapmethod)
if s == 0 then
if trace_vsnapping then
-- report_snapper("box list not snapped, already done")
end
else
- local ht = box.height
- local dp = box.depth
+ local ht = getfield(box,"height")
+ local dp = getfield(box,"depth")
if false then -- todo: already_done
-- assume that the box is already snapped
if trace_vsnapping then
@@ -772,14 +854,14 @@ function vspacing.snapbox(n,how)
end
else
local h, d, ch, cd, lines = snap_hlist("box",box,sv,ht,dp)
- box.height= ch
- box.depth = cd
+ setfield(box,"height",ch)
+ setfield(box,"depth",cd)
if trace_vsnapping then
report_snapper("box list snapped from (%p,%p) to (%p,%p) using method %a (%s) for %a (%s lines): %s",
h,d,ch,cd,sv.name,sv.specification,"direct",lines,listtoutf(list))
end
- box[a_snapmethod] = 0 --
- list[a_snapmethod] = 0 -- yes or no
+ setattr(box,a_snapmethod,0) --
+ setattr(list,a_snapmethod,0) -- yes or no
end
end
end
@@ -801,8 +883,10 @@ local w, h, d = 0, 0, 0
----- w, h, d = 100*65536, 65536, 65536
local function forced_skip(head,current,width,where,trace)
- if head == current and head.subtype == baselineskip_code then
- width = width - head.spec.width
+ if head == current then
+ if getsubtype(head) == baselineskip_code then
+ width = width - getfield(getfield(head,"spec"),"width")
+ end
end
if width == 0 then
-- do nothing
@@ -825,62 +909,270 @@ end
-- penalty only works well when before skip
-local discard, largest, force, penalty, add, disable, nowhite, goback, together = 0, 1, 2, 3, 4, 5, 6, 7, 8 -- move into function when upvalue 60 issue
+local discard = 0
+local largest = 1
+local force = 2
+local penalty = 3
+local add = 4
+local disable = 5
+local nowhite = 6
+local goback = 7
+local together = 8 -- not used (?)
+local overlay = 9
-- [whatsits][hlist][glue][glue][penalty]
local special_penalty_min = 32250
local special_penalty_max = 35000
+local special_penalty_xxx = 0
+
+-- this is rather messy and complex: we want to make sure that successive
+-- header don't break but also make sure that we have at least a decent
+-- break when we have succesive ones (often when testing)
+
+-- todo: mark headers as such so that we can recognize them
+
+local specialmethods = { }
+local specialmethod = 1
-local function specialpenalty(start,penalty)
- -- nodes.showsimplelist(texlists.page_head,1)
- local current = find_node_tail(texlists.page_head)
+local properties = nodes.properties.data
+
+specialmethods[1] = function(pagehead,pagetail,start,penalty)
+ --
+ if not pagehead or penalty < special_penalty_min or penalty > special_penalty_max then
+ return
+ end
+ local current = pagetail
+ --
+ -- nodes.showsimplelist(pagehead,0)
+ --
+ if trace_specials then
+ report_specials("checking penalty %a",penalty)
+ end
while current do
- local id = current.id
- if id == glue_code then
- current = current.prev
- elseif id == penalty_code then
- local p = current.penalty
- if p == penalty then
- if trace_vspacing then
- report_vspacing("overloading penalty %a",p)
+ local id = getid(current)
+ if id == penalty_code then
+ local p = properties[current]
+ if p then
+ local p = p.special_penalty
+ if not p then
+ if trace_specials then
+ report_specials(" regular penalty, continue")
+ end
+ elseif p == penalty then
+ if trace_specials then
+ report_specials(" context penalty %a, same level, overloading",p)
+ end
+ return special_penalty_xxx
+ elseif p > special_penalty_min and p < special_penalty_max then
+ if penalty < p then
+ if trace_specials then
+ report_specials(" context penalty %a, lower level, overloading",p)
+ end
+ return special_penalty_xxx
+ else
+ if trace_specials then
+ report_specials(" context penalty %a, higher level, quitting",p)
+ end
+ return
+ end
+ elseif trace_specials then
+ report_specials(" context penalty %a, higher level, continue",p)
end
- return current
- elseif p >= 10000 then
- current = current.prev
else
- break
+ local p = getfield(current,"penalty")
+ if p < 10000 then
+ -- assume some other mechanism kicks in so we seem to have content
+ if trace_specials then
+ report_specials(" regular penalty %a, quitting",p)
+ end
+ break
+ else
+ if trace_specials then
+ report_specials(" regular penalty %a, continue",p)
+ end
+ end
+ end
+ end
+ current = getprev(current)
+ end
+ -- none found, so no reson to be special
+ if trace_specials then
+ if pagetail then
+ report_specials(" context penalty, discarding, nothing special")
+ else
+ report_specials(" context penalty, discarding, nothing preceding")
+ end
+ end
+ return special_penalty_xxx
+end
+
+-- specialmethods[2] : always put something before and use that as to-be-changed
+--
+-- we could inject a vadjust to force a recalculation .. a mess
+--
+-- So, the next is far from robust and okay but for the moment this overlaying
+-- has to do. Always test this with the examples in spec-ver.mkvi!
+
+local function check_experimental_overlay(head,current)
+ local p = nil
+ local c = current
+ local n = nil
+
+ -- setfield(head,"prev",nil) -- till we have 0.79 **
+
+ local function overlay(p,n,mvl)
+ local p_ht = getfield(p,"height")
+ local p_dp = getfield(p,"depth")
+ local n_ht = getfield(n,"height")
+ local skips = 0
+ --
+ -- We deal with this at the tex end .. we don't see spacing .. enabling this code
+ -- is probably harmless btu then we need to test it.
+ --
+ local c = getnext(p)
+ while c and c ~= n do
+ local id = getid(c)
+ if id == glue_code then
+ skips = skips + getfield(getfield(c,"glue_spec"),"width")
+ elseif id == kern_code then
+ skips = skips + getfield(c,"kern")
end
+ c = getnext(c)
+ end
+ --
+ local delta = n_ht + skips + p_dp
+ texsetdimen("global","d_spac_overlay",-delta) -- for tracing
+ local k = new_kern(-delta)
+ if n_ht > p_ht then
+ -- we should adapt pagetotal ! (need a hook for that) .. now we have the wrong pagebreak
+ setfield(p,"height",n_ht)
+ end
+ insert_node_before(head,n,k)
+ if p == head then
+ head = k
+ end
+ if trace_vspacing then
+ report_vspacing("overlaying, prev height: %p, prev depth: %p, next height: %p, skips: %p, move up: %p",p_ht,p_dp,n_ht,skips,delta)
+ end
+ return remove_node(head,current,true)
+ end
+
+ -- goto next line
+ while c do
+ local id = getid(c)
+ if id == glue_code or id == penalty_code or id == kern_code then
+ -- skip (actually, remove)
+ c = getnext(c)
+ elseif id == hlist_code then
+ n = c
+ break
else
- current = current.prev
+ break
+ end
+ end
+ if n then
+ -- we have a next line, goto prev line
+ c = current
+ while c do
+ local id = getid(c)
+ if id == glue_code or id == penalty_code then
+ c = getprev(c)
+ elseif id == hlist_code then
+ p = c
+ break
+ else
+ break
+ end
+ end
+ if not p then
+ if a_snapmethod == a_snapvbox then
+ -- quit, we're not on the mvl
+ else
+ local c = tonut(texlists.page_head)
+ while c and c ~= n do
+ local id = getid(c)
+ if id == hlist_code then
+ p = c
+ end
+ c = getnext(c)
+ end
+ if p and p ~= n then
+ return overlay(p,n,true)
+ end
+ end
+ elseif p ~= n then
+ return overlay(p,n,false)
end
end
+ -- in fact, we could try again later ... so then no remove (a few tries)
+ return remove_node(head, current, true)
end
+-- This will be replaced after 0.80+ when we have a more robust look-back and
+-- can look at the bigger picture.
+
+-- todo: look back and when a special is there before a list is seen penalty keep ut
+
+-- we now look back a lot, way too often
+
local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also pass tail
if trace then
reset_tracing(head)
end
local current, oldhead = head, head
local glue_order, glue_data, force_glue = 0, nil, false
- local penalty_order, penalty_data, natural_penalty = 0, nil, nil
+ local penalty_order, penalty_data, natural_penalty, special_penalty = 0, nil, nil, nil
local parskip, ignore_parskip, ignore_following, ignore_whitespace, keep_together = nil, false, false, false, false
--
-- todo: keep_together: between headers
--
+ local pagehead = nil
+ local pagetail = nil
+
+ local function getpagelist()
+ if not pagehead then
+ pagehead = texlists.page_head
+ if pagehead then
+ pagehead = tonut(texlists.page_head)
+ pagetail = find_node_tail(pagehead) -- no texlists.page_tail yet-- no texlists.page_tail yet
+ end
+ end
+ end
+ --
local function flush(why)
if penalty_data then
local p = new_penalty(penalty_data)
if trace then trace_done("flushed due to " .. why,p) end
- head = insert_node_before(head,current,p)
+ if penalty_data >= 10000 then -- or whatever threshold?
+ local prev = getprev(current)
+ if getid(prev) == glue_code then -- maybe go back more, or maybe even push back before any glue
+ -- tricky case: spacing/grid-007.tex: glue penalty glue
+ head = insert_node_before(head,prev,p)
+ else
+ head = insert_node_before(head,current,p)
+ end
+ else
+ head = insert_node_before(head,current,p)
+ end
+-- if penalty_data > special_penalty_min and penalty_data < special_penalty_max then
+ local props = properties[p]
+ if props then
+ props.special_penalty = special_penalty or penalty_data
+ else
+ properties[p] = {
+ special_penalty = special_penalty or penalty_data
+ }
+ end
+-- end
end
if glue_data then
- local spec = glue_data.spec
+ local spec = getfield(glue_data,"spec")
if force_glue then
if trace then trace_done("flushed due to " .. why,glue_data) end
- head = forced_skip(head,current,spec.width,"before",trace)
+ head = forced_skip(head,current,getfield(spec,"width"),"before",trace)
free_glue_node(glue_data)
- elseif spec.writable then
+ elseif getfield(spec,"writable") then
if trace then trace_done("flushed due to " .. why,glue_data) end
head = insert_node_before(head,current,glue_data)
else
@@ -892,6 +1184,26 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
penalty_order, penalty_data, natural_penalty = 0, nil, nil
parskip, ignore_parskip, ignore_following, ignore_whitespace = nil, false, false, false
end
+ --
+
+-- quick hack, can be done nicer
+-- local nobreakfound = nil
+-- local function checknobreak()
+-- local pagehead, pagetail = getpagelist()
+-- local current = pagetail
+-- while current do
+-- local id = getid(current)
+-- if id == hlist_code or id == vlist_code then
+-- return false
+-- elseif id == penalty_code then
+-- return getfield(current,"penalty") >= 10000
+-- end
+-- current = getprev(current)
+-- end
+-- return false
+-- end
+
+ --
if trace_vsnapping then
report_snapper("global ht/dp = %p/%p, local ht/dp = %p/%p",
texgetdimen("globalbodyfontstrutheight"), texgetdimen("globalbodyfontstrutdepth"),
@@ -899,13 +1211,19 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
)
end
if trace then trace_info("start analyzing",where,what) end
+
+-- local headprev = getprev(head)
+
while current do
- local id = current.id
+ local id = getid(current)
if id == hlist_code or id == vlist_code then
+-- if nobreakfound == nil then
+-- nobreakfound = false
+-- end
-- needs checking, why so many calls
if snap then
- local list = current.list
- local s = current[a_snapmethod]
+ local list = getlist(current)
+ local s = getattr(current,a_snapmethod)
if not s then
-- if trace_vsnapping then
-- report_snapper("mvl list not snapped")
@@ -919,8 +1237,8 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
if sv then
-- check if already snapped
if list and already_done(id,list,a_snapmethod) then
- local ht = current.height
- local dp = current.depth
+ local ht = getfield(current,"height")
+ local dp = getfield(current,"depth")
-- assume that the box is already snapped
if trace_vsnapping then
report_snapper("mvl list already snapped at (%p,%p): %s",ht,dp,listtoutf(list))
@@ -935,40 +1253,60 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
elseif trace_vsnapping then
report_snapper("mvl %a not snapped due to unknown snap specification: %s",nodecodes[id],listtoutf(list))
end
- current[a_snapmethod] = 0
+ setattr(current,a_snapmethod,0)
end
else
--
end
-- tex.prevdepth = 0
flush("list")
- current = current.next
+ current = getnext(current)
elseif id == penalty_code then
- -- natural_penalty = current.penalty
+ -- natural_penalty = getfield(current,"penalty")
-- if trace then trace_done("removed penalty",current) end
-- head, current = remove_node(head, current, true)
- current = current.next
+
+-- if nobreakfound == nil then
+-- nobreakfound = checknobreak()
+-- end
+-- if nobreakfound and getfield(current,"penalty") <= 10000 then
+-- -- if trace then
+-- trace_done("removed penalty",current)
+-- -- end
+-- head, current = remove_node(head, current, true)
+-- end
+
+ current = getnext(current)
elseif id == kern_code then
- if snap and trace_vsnapping and current.kern ~= 0 then
- report_snapper("kern of %p kept",current.kern)
+ if snap and trace_vsnapping and getfield(current,"kern") ~= 0 then
+ report_snapper("kern of %p kept",getfield(current,"kern"))
end
flush("kern")
- current = current.next
+ current = getnext(current)
elseif id == glue_code then
- local subtype = current.subtype
+ local subtype = getsubtype(current)
if subtype == userskip_code then
- local sc = current[a_skipcategory] -- has no default, no unset (yet)
- local so = current[a_skiporder] or 1 -- has 1 default, no unset (yet)
- local sp = current[a_skippenalty] -- has no default, no unset (yet)
+ local sc = getattr(current,a_skipcategory) -- has no default, no unset (yet)
+ local so = getattr(current,a_skiporder) or 1 -- has 1 default, no unset (yet)
+ local sp = getattr(current,a_skippenalty) -- has no default, no unset (yet)
if sp and sc == penalty then
+ if where == "page" then
+ getpagelist()
+ local p = specialmethods[specialmethod](pagehead,pagetail,current,sp)
+ if p then
+ if trace then
+ trace_skip("previous special penalty %a is changed to %a using method %a",sp,p,specialmethod)
+ end
+ special_penalty = sp
+ sp = p
+ end
-if where == "page" and sp >= special_penalty_min and sp <= special_penalty_max then
- local previousspecial = specialpenalty(current,sp)
- if previousspecial then
- previousspecial.penalty = 0
- sp = 0
- end
-end
+-- else
+-- if nobreakfound == nil then
+-- nobreakfound = checknobreak()
+-- end
+
+ end
if not penalty_data then
penalty_data = sp
elseif penalty_order < so then
@@ -977,43 +1315,52 @@ end
penalty_data = sp
end
if trace then trace_skip("penalty in skip",sc,so,sp,current) end
+
+-- if nobreakfound then
+-- penalty_data = 10000
+-- if trace then
+-- trace_skip("nobreak found before penalty in skip",sc,so,sp,current)
+-- end
+-- end
+
head, current = remove_node(head, current, true)
elseif not sc then -- if not sc then
if glue_data then
if trace then trace_done("flush",glue_data) end
head = insert_node_before(head,current,glue_data)
if trace then trace_natural("natural",current) end
- current = current.next
+ current = getnext(current)
else
-- not look back across head
--- todo: prev can be whatsit (latelua)
- local previous = current.prev
- if previous and previous.id == glue_code and previous.subtype == userskip_code then
- local ps = previous.spec
- if ps.writable then
- local cs = current.spec
- if cs.writable and ps.stretch_order == 0 and ps.shrink_order == 0 and cs.stretch_order == 0 and cs.shrink_order == 0 then
- local pw, pp, pm = ps.width, ps.stretch, ps.shrink
- local cw, cp, cm = cs.width, cs.stretch, cs.shrink
+ -- todo: prev can be whatsit (latelua)
+ local previous = getprev(current)
+ if previous and getid(previous) == glue_code and getsubtype(previous) == userskip_code then
+ local ps = getfield(previous,"spec")
+ if getfield(ps,"writable") then
+ local cs = getfield(current,"spec")
+ if getfield(cs,"writable") and getfield(ps,"stretch_order") == 0 and getfield(ps,"shrink_order") == 0 and getfield(cs,"stretch_order") == 0 and getfield(cs,"shrink_order") == 0 then
+ local pw, pp, pm = getfield(ps,"width"), getfield(ps,"stretch"), getfield(ps,"shrink")
+ local cw, cp, cm = getfield(cs,"width"), getfield(cs,"stretch"), getfield(cs,"shrink")
-- ps = writable_spec(previous) -- no writable needed here
-- ps.width, ps.stretch, ps.shrink = pw + cw, pp + cp, pm + cm
- previous.spec = new_gluespec(pw + cw, pp + cp, pm + cm) -- else topskip can disappear
+ free_glue_spec(ps)
+ setfield(previous,"spec",new_gluespec(pw + cw, pp + cp, pm + cm)) -- else topskip can disappear
if trace then trace_natural("removed",current) end
head, current = remove_node(head, current, true)
-- current = previous
if trace then trace_natural("collapsed",previous) end
- -- current = current.next
+ -- current = getnext(current)
else
if trace then trace_natural("filler",current) end
- current = current.next
+ current = getnext(current)
end
else
if trace then trace_natural("natural (no prev spec)",current) end
- current = current.next
+ current = getnext(current)
end
else
if trace then trace_natural("natural (no prev)",current) end
- current = current.next
+ current = getnext(current)
end
end
glue_order, glue_data = 0, nil
@@ -1031,6 +1378,12 @@ end
elseif sc == discard then
if trace then trace_skip("discard",sc,so,sp,current) end
head, current = remove_node(head, current, true)
+ elseif sc == overlay then
+ -- todo (overlay following line over previous
+ if trace then trace_skip("overlay",sc,so,sp,current) end
+ -- beware: head can actually be after the affected nodes as
+ -- we look back ... some day head will the real head
+ head, current = check_experimental_overlay(head,current,a_snapmethod)
elseif ignore_following then
if trace then trace_skip("disabled",sc,so,sp,current) end
head, current = remove_node(head, current, true)
@@ -1046,12 +1399,12 @@ end
elseif glue_order == so then
-- is now exclusive, maybe support goback as combi, else why a set
if sc == largest then
- local cs, gs = current.spec, glue_data.spec
- local cw, gw = cs.width, gs.width
+ local cs, gs = getfield(current,"spec"), getfield(glue_data,"spec")
+ local cw, gw = getfield(cs,"width"), getfield(gs,"width")
if cw > gw then
if trace then trace_skip("largest",sc,so,sp,current) end
free_glue_node(glue_data) -- also free spec
- head, current, glue_data = remove_node(head, current)
+ head, current, glue_data = remove_node(head,current)
else
if trace then trace_skip("remove smallest",sc,so,sp,current) end
head, current = remove_node(head, current, true)
@@ -1059,7 +1412,7 @@ end
elseif sc == goback then
if trace then trace_skip("goback",sc,so,sp,current) end
free_glue_node(glue_data) -- also free spec
- head, current, glue_data = remove_node(head, current)
+ head, current, glue_data = remove_node(head,current)
elseif sc == force then
-- last one counts, some day we can provide an accumulator and largest etc
-- but not now
@@ -1073,11 +1426,11 @@ end
head, current = remove_node(head, current, true)
elseif sc == add then
if trace then trace_skip("add",sc,so,sp,current) end
- -- local old, new = glue_data.spec, current.spec
- local old, new = writable_spec(glue_data), current.spec
- old.width = old.width + new.width
- old.stretch = old.stretch + new.stretch
- old.shrink = old.shrink + new.shrink
+ -- local old, new = glue_data.spec, getfield(current,"spec")
+ local old, new = writable_spec(glue_data), getfield(current,"spec")
+ setfield(old,"width",getfield(old,"width") + getfield(new,"width"))
+ setfield(old,"stretch",getfield(old,"stretch") + getfield(new,"stretch"))
+ setfield(old,"shrink",getfield(old,"shrink") + getfield(new,"shrink"))
-- toto: order
head, current = remove_node(head, current, true)
else
@@ -1093,12 +1446,13 @@ end
end
elseif subtype == lineskip_code then
if snap then
- local s = current[a_snapmethod]
+ local s = getattr(current,a_snapmethod)
if s and s ~= 0 then
- current[a_snapmethod] = 0
- if current.spec.writable then
+ setattr(current,a_snapmethod,0)
+ local spec = getfield(current,"spec")
+ if getfield(spec,"writable") then
local spec = writable_spec(current)
- spec.width = 0
+ setfield(spec,"width",0)
if trace_vsnapping then
report_snapper("lineskip set to zero")
end
@@ -1111,15 +1465,16 @@ end
if trace then trace_skip("lineskip",sc,so,sp,current) end
flush("lineskip")
end
- current = current.next
+ current = getnext(current)
elseif subtype == baselineskip_code then
if snap then
- local s = current[a_snapmethod]
+ local s = getattr(current,a_snapmethod)
if s and s ~= 0 then
- current[a_snapmethod] = 0
- if current.spec.writable then
+ setattr(current,a_snapmethod,0)
+ local spec = getfield(current,"spec")
+ if getfield(spec,"writable") then
local spec = writable_spec(current)
- spec.width = 0
+ setfield(spec,"width",0)
if trace_vsnapping then
report_snapper("baselineskip set to zero")
end
@@ -1132,17 +1487,17 @@ end
if trace then trace_skip("baselineskip",sc,so,sp,current) end
flush("baselineskip")
end
- current = current.next
+ current = getnext(current)
elseif subtype == parskip_code then
-- parskip always comes later
if ignore_whitespace then
if trace then trace_natural("ignored parskip",current) end
head, current = remove_node(head, current, true)
elseif glue_data then
- local ps = current.spec
- local gs = glue_data.spec
- if ps.writable and gs.writable and ps.width > gs.width then
- glue_data.spec = copy_node(ps)
+ local ps = getfield(current,"spec")
+ local gs = getfield(glue_data,"spec")
+ if getfield(ps,"writable") and getfield(gs,"writable") and getfield(ps,"width") > getfield(gs,"width") then
+ setfield(glue_data,"spec",copy_node(ps))
if trace then trace_natural("taking parskip",current) end
else
if trace then trace_natural("removed parskip",current) end
@@ -1154,9 +1509,9 @@ end
end
elseif subtype == topskip_code or subtype == splittopskip_code then
if snap then
- local s = current[a_snapmethod]
+ local s = getattr(current,a_snapmethod)
if s and s ~= 0 then
- current[a_snapmethod] = 0
+ setattr(current,a_snapmethod,0)
local sv = snapmethods[s]
local w, cw = snap_topskip(current,sv)
if trace_vsnapping then
@@ -1170,46 +1525,46 @@ end
if trace then trace_skip("topskip",sc,so,sp,current) end
flush("topskip")
end
- current = current.next
+ current = getnext(current)
elseif subtype == abovedisplayskip_code then
--
if trace then trace_skip("above display skip (normal)",sc,so,sp,current) end
flush("above display skip (normal)")
- current = current.next
+ current = getnext(current)
--
elseif subtype == belowdisplayskip_code then
--
if trace then trace_skip("below display skip (normal)",sc,so,sp,current) end
flush("below display skip (normal)")
- current = current.next
- --
+ current = getnext(current)
+ --
elseif subtype == abovedisplayshortskip_code then
--
if trace then trace_skip("above display skip (short)",sc,so,sp,current) end
flush("above display skip (short)")
- current = current.next
+ current = getnext(current)
--
elseif subtype == belowdisplayshortskip_code then
--
if trace then trace_skip("below display skip (short)",sc,so,sp,current) end
flush("below display skip (short)")
- current = current.next
+ current = getnext(current)
--
else -- other glue
if snap and trace_vsnapping then
- local spec = current.spec
- if spec.writable and spec.width ~= 0 then
- report_snapper("glue %p of type %a kept",current.spec.width,skipcodes[subtype])
- -- spec.width = 0
+ local spec = getfield(current,"spec")
+ if getfield(spec,"writable") and getfield(spec,"width") ~= 0 then
+ report_snapper("glue %p of type %a kept",getfield(spec,"width"),skipcodes[subtype])
+ -- setfield(spec,"width",0)
end
end
- if trace then trace_skip(formatter["glue of type %a"](subtype),sc,so,sp,current) end
+ if trace then trace_skip(formatters["glue of type %a"](subtype),sc,so,sp,current) end
flush("some glue")
- current = current.next
+ current = getnext(current)
end
else
- flush("something else")
- current = current.next
+ flush(formatters["node with id %a"](id))
+ current = getnext(current)
end
end
if trace then trace_info("stop analyzing",where,what) end
@@ -1225,17 +1580,28 @@ end
local p = new_penalty(penalty_data)
if trace then trace_done("result",p) end
head, tail = insert_node_after(head,tail,p)
+ -- if penalty_data > special_penalty_min and penalty_data < special_penalty_max then
+ local props = properties[p]
+ if props then
+ props.special_penalty = special_penalty or penalty_data
+ else
+ properties[p] = {
+ special_penalty = special_penalty or penalty_data
+ }
+ end
+ -- end
end
if glue_data then
if not tail then tail = find_node_tail(head) end
if trace then trace_done("result",glue_data) end
if force_glue then
- head, tail = forced_skip(head,tail,glue_data.spec.width,"after",trace)
+ local spec = getfield(glue_data,"spec")
+ head, tail = forced_skip(head,tail,getfield(spec,"width"),"after",trace)
free_glue_node(glue_data)
else
head, tail = insert_node_after(head,tail,glue_data)
end
-texnest[texnest.ptr].prevdepth = 0 -- appending to the list bypasses tex's prevdepth handler
+ texnest[texnest.ptr].prevdepth = 0 -- appending to the list bypasses tex's prevdepth handler
end
if trace then
if glue_data or penalty_data then
@@ -1243,9 +1609,16 @@ texnest[texnest.ptr].prevdepth = 0 -- appending to the list bypasses tex's prevd
end
show_tracing(head)
if oldhead ~= head then
- trace_info("head has been changed from %a to %a",nodecodes[oldhead.id],nodecodes[head.id])
+ trace_info("head has been changed from %a to %a",nodecodes[getid(oldhead)],nodecodes[getid(head)])
end
end
+
+-- if headprev then
+-- setprev(head,headprev)
+-- setnext(headprev,head)
+-- end
+-- print("C HEAD",tonode(head))
+
return head, true
end
@@ -1271,16 +1644,17 @@ end
function vspacing.pagehandler(newhead,where)
-- local newhead = texlists.contrib_head
if newhead then
+ newhead = tonut(newhead)
local newtail = find_node_tail(newhead) -- best pass that tail, known anyway
local flush = false
stackhack = true -- todo: only when grid snapping once enabled
-- todo: fast check if head = tail
for n in traverse_nodes(newhead) do -- we could just look for glue nodes
- local id = n.id
+ local id = getid(n)
if id ~= glue_code then
flush = true
- elseif n.subtype == userskip_code then
- if n[a_skipcategory] then
+ elseif getsubtype(n) == userskip_code then
+ if getattr(n,a_skipcategory) then
stackhack = true
else
flush = true
@@ -1292,35 +1666,36 @@ function vspacing.pagehandler(newhead,where)
if flush then
if stackhead then
if trace_collect_vspacing then report("appending %s nodes to stack (final): %s",newhead) end
- stacktail.next = newhead
- newhead.prev = stacktail
+ setfield(stacktail,"next",newhead)
+ setfield(newhead,"prev",stacktail)
newhead = stackhead
stackhead, stacktail = nil, nil
end
if stackhack then
stackhack = false
if trace_collect_vspacing then report("processing %s nodes: %s",newhead) end
- -- texlists.contrib_head = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod)
- newhead = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod)
+ -- texlists.contrib_head = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod)
+ newhead = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod)
else
if trace_collect_vspacing then report("flushing %s nodes: %s",newhead) end
-- texlists.contrib_head = newhead
end
+ return tonode(newhead)
else
if stackhead then
if trace_collect_vspacing then report("appending %s nodes to stack (intermediate): %s",newhead) end
- stacktail.next = newhead
- newhead.prev = stacktail
+ setfield(stacktail,"next",newhead)
+ setfield(newhead,"prev",stacktail)
else
if trace_collect_vspacing then report("storing %s nodes in stack (initial): %s",newhead) end
stackhead = newhead
end
stacktail = newtail
-- texlists.contrib_head = nil
- newhead = nil
+ -- newhead = nil
end
end
- return newhead
+ return nil
end
local ignore = table.tohash {
@@ -1330,18 +1705,27 @@ local ignore = table.tohash {
}
function vspacing.vboxhandler(head,where)
- if head and not ignore[where] and head.next then
- head = collapser(head,"vbox",where,trace_vbox_vspacing,true,a_snapvbox) -- todo: local snapper
+ if head and not ignore[where] then
+ local h = tonut(head)
+ if getnext(h) then -- what if a one liner and snapping?
+ h = collapser(h,"vbox",where,trace_vbox_vspacing,true,a_snapvbox) -- todo: local snapper
+ return tonode(h)
+ end
end
return head
end
-function vspacing.collapsevbox(n) -- for boxes but using global a_snapmethod
- local box = texgetbox(n)
+function vspacing.collapsevbox(n,aslist) -- for boxes but using global a_snapmethod
+ local box = getbox(n)
if box then
- local list = box.list
+ local list = getlist(box)
if list then
- box.list = vpack_node(collapser(list,"snapper","vbox",trace_vbox_vspacing,true,a_snapmethod))
+ list = collapser(list,"snapper","vbox",trace_vbox_vspacing,true,a_snapmethod)
+ if aslist then
+ setfield(box,"list",list) -- beware, dimensions of box are wrong now
+ else
+ setfield(box,"list",vpack_node(list))
+ end
end
end
end
@@ -1352,14 +1736,65 @@ end
local outer = texnest[0]
function vspacing.resetprevdepth()
- outer.prevdepth = 0
+ if texlists.hold_head then
+ outer.prevdepth = 0
+ end
end
-- interface
-commands.vspacing = vspacing.analyze
-commands.vspacingsetamount = vspacing.setskip
-commands.vspacingdefine = vspacing.setmap
-commands.vspacingcollapse = vspacing.collapsevbox
-commands.vspacingsnap = vspacing.snapbox
-commands.resetprevdepth = vspacing.resetprevdepth
+implement {
+ name = "vspacing",
+ actions = vspacing.analyze,
+ scope = "private",
+ arguments = "string"
+}
+
+implement {
+ name = "resetprevdepth",
+ actions = vspacing.resetprevdepth,
+ scope = "private"
+}
+
+implement {
+ name = "vspacingsetamount",
+ actions = vspacing.setskip,
+ scope = "private",
+ arguments = "string",
+}
+
+implement {
+ name = "vspacingdefine",
+ actions = vspacing.setmap,
+ scope = "private",
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "vspacingcollapse",
+ actions = vspacing.collapsevbox,
+ scope = "private",
+ arguments = "integer"
+}
+
+implement {
+ name = "vspacingcollapseonly",
+ actions = vspacing.collapsevbox,
+ scope = "private",
+ arguments = { "integer", true }
+}
+
+implement {
+ name = "vspacingsnap",
+ actions = vspacing.snapbox,
+ scope = "private",
+ arguments = { "integer", "integer" }
+}
+
+implement {
+ name = "definesnapmethod",
+ actions = vspacing.definesnapmethod,
+ scope = "private",
+ arguments = { "string", "string" }
+}
+
diff --git a/tex/context/base/spac-ver.mkiv b/tex/context/base/spac-ver.mkiv
index afa722cfe..86a731d3c 100644
--- a/tex/context/base/spac-ver.mkiv
+++ b/tex/context/base/spac-ver.mkiv
@@ -17,6 +17,8 @@
\registerctxluafile{spac-ver}{1.001}
+% todo: use usernodes ?
+
% todo: itemize : intro ... only when there is one or two lines preceding and then
% keep these together i.e. \blank[intro]
@@ -120,7 +122,7 @@
\setvalue{\??interlinespacerelative\v!auto }{\let\setrelativeinterlinespace\spac_linespacing_set_relative_interlinespace}
\def\spac_linespacing_set_specified_relative_interlinespace#1% fragile?
- {\doifdimenstringelse{#1}
+ {\doifelsedimenstring{#1}
{\setupspecifiedinterlinespace[\c!line=#1]}
{\assignvalue{#1}\currentrelativeinterlinespace{1.00}{1.25}{1.50}%
\spacing\currentrelativeinterlinespace}}
@@ -140,6 +142,20 @@
\spacing\currentrelativeinterlinespace
\fi}
+\unexpanded\def\spac_linespacing_setup_use
+ {\ifcsname\namedinterlinespacehash\m_spac_interlinespace\s!parent\endcsname
+ \let\currentinterlinespace\m_spac_interlinespace
+ \spac_linespacing_setup_specified_interline_space
+ % \else
+ % we only support named interlinespaces
+ \fi}
+
+\unexpanded\def\useinterlinespaceparameter#1% see footnotes
+ {\edef\m_spac_interlinespace{#1\c!interlinespace}%
+ \ifx\m_spac_interlinespace\empty \else
+ \spac_linespacing_setup_use
+ \fi}
+
\newtoks\everysetupglobalinterlinespace
\newtoks\everysetuplocalinterlinespace
@@ -152,10 +168,14 @@
\unexpanded\def\setupinterlinespace
{\dodoubleempty\spac_linespacing_setup}
+\ifdefined\setupinterlinespace_double \else
+ \let\setupinterlinespace_double\setup_interlinespace % for a while
+\fi
+
\def\spac_linespacing_setup[#1][#2]%
{\settrue\interlinespaceisset % reset has to be done when needed
\ifsecondargument
- \setup_interlinespace[#1][#2]%
+ \setupinterlinespace_double[#1][#2]%
\else\iffirstargument
\ifcsname\namedinterlinespacehash{#1}\s!parent\endcsname
\edef\currentinterlinespace{#1}%
@@ -170,7 +190,7 @@
\fi\fi}
\def\spac_linespacing_setup_specified_or_relative[#1]%
- {\doifassignmentelse{#1}\setupspecifiedinterlinespace\setuprelativeinterlinespace[#1]%
+ {\doifelseassignment{#1}\setupspecifiedinterlinespace\setuprelativeinterlinespace[#1]%
\the\iflocalinterlinespace\everysetuplocalinterlinespace\else\everysetupglobalinterlinespace\fi}
\def\spac_linespacing_synchronize_local % adapts to the font
@@ -197,7 +217,7 @@
\fi
\popmacro\currentinterlinespace
\else
- \normalexpanded{\noexpand\doifassignmentelse{\p_spac_checked_interlinespace}%
+ \normalexpanded{\noexpand\doifelseassignment{\p_spac_checked_interlinespace}%
\setupspecifiedinterlinespace\setuprelativeinterlinespace[\p_spac_checked_interlinespace]}%
\iflocalinterlinespace
\the\everysetuplocalinterlinespace
@@ -330,7 +350,7 @@
\let\v_spac_whitespace_current\v!none
\unexpanded\def\setupwhitespace
- {\doifnextoptionalelse\spac_whitespace_setup_yes\spac_whitespace_setup_nop}
+ {\doifelsenextoptionalcs\spac_whitespace_setup_yes\spac_whitespace_setup_nop}
\def\spac_whitespace_setup_nop
{\ifx\v_spac_whitespace_current\v!none\else
@@ -542,6 +562,8 @@
\ignorespaces
\let\spac_lines_stop_correction\spac_lines_stop_correction_yes}
+% still not ok ... will move to the lua end ... needs a final solution
+
\unexpanded\def\spac_lines_stop_correction_yes
{\removeunwantedspaces
\egroup
@@ -549,6 +571,11 @@
\blank[\v!white]%
\snaptogrid\hbox{\box\scratchbox}%
\else
+\blank[\v!nowhite]%
+\ifdim\parskip>\zeropoint
+ % too fuzzy otherwise
+\else
+ % doesn't like whitespace
\ifdim\d_spac_prevdepth<\maxdimen
\unless\ifdim\d_spac_prevdepth<\zeropoint
\ifdim\d_spac_prevdepth<\strutdp \relax
@@ -562,6 +589,7 @@
\fi
\fi
\fi
+\fi
\ifdim\pagegoal<\maxdimen
\blank[\v!white,\the\d_spac_lines_correction_before]% \blank[\v!white]\dotopbaselinecorrection
\fi
@@ -853,11 +881,11 @@
\strutheightfactor\dimexpr\normallineheight
\fi
\strutdp\spacingfactor\dimexpr
- \ifdim\minimumstrutdepth>\zeropoint
- \minimumstrutdepth
- \else
- \strutdepthfactor\dimexpr\normallineheight
- \fi
+ \ifdim\minimumstrutdepth>\zeropoint
+ \minimumstrutdepth
+ \else
+ \strutdepthfactor\dimexpr\normallineheight
+ \fi
\dosetstrut}
\unexpanded\def\setcharstrut#1%
@@ -896,7 +924,9 @@
\ifabsnum\dimexpr\strutht+\strutdp-\lineheight\relax<\plustwo
% compensate rounding error /- 1sp to avoid too many
% 1sp baselineskips in for instance verbatim
- \strutht\dimexpr\lineheight-\strutdp\relax
+ % \strutht\dimexpr\lineheight-\strutdp\relax
+ % better:
+ \strutdp\dimexpr\lineheight-\strutht\relax
\struttotal\lineheight
\else
\struttotal\dimexpr\strutht+\strutdp\relax
@@ -916,15 +946,47 @@
\s!height\strutht
\s!depth \strutdp}}
+\newconstant\c_strut_visual_mode
+
\def\spac_struts_set_vide
{\setbox\strutbox\hbox % at some time this extra wrapping was needed
{\spac_struts_vide_hbox to \zeropoint
- {% \hss % new, will be option
- \vrule
- \s!width \strutwidth
- \s!height\strutht
- \s!depth \strutdp
- \hss}}}
+ {\ifcase\c_strut_visual_mode
+ \spac_struts_black
+ \or
+ \spac_struts_color
+ \else
+ \spac_struts_black
+ \fi}}}
+
+\def\spac_struts_black
+ {\vrule
+ \s!width \strutwidth
+ \s!height\strutht
+ \s!depth \strutdp
+ \hss}
+
+\def\spac_struts_color
+ {\hss % new, will be option
+ \scratchwidth.1\struthtdp
+ \begingroup
+ \directcolor[f:b:t]%
+ \vrule
+ \s!width \scratchwidth
+ \s!height\strutht
+ \s!depth \strutdp
+ \kern-\scratchwidth
+ \vrule
+ \s!width \scratchwidth
+ \s!height\zeropoint
+ \s!depth \strutdp
+ \endgroup
+ \kern-.625\scratchwidth
+ \vrule
+ \s!width .25\scratchwidth
+ \s!height\strutht
+ \s!depth \strutdp
+ \hss}
\let\spac_struts_vide_hbox\hbox % overloaded in trac-vis.mkiv
@@ -948,6 +1010,35 @@
\let\normalstrut\strut
+\unexpanded\def\halfstrut
+ {\relax
+ \dontleavehmode
+ \begingroup
+ \setbox\scratchbox\copy\strutbox
+ \ht\scratchbox\dimexpr\strutht/\plustwo\relax
+ \dp\scratchbox\dimexpr\strutdp/\plustwo\relax
+ \box\scratchbox
+ \endgroup}
+
+\unexpanded\def\quarterstrut
+ {\relax
+ \dontleavehmode
+ \begingroup
+ \setbox\scratchbox\copy\strutbox
+ \ht\scratchbox\dimexpr\strutht/\plusfour\relax
+ \dp\scratchbox\dimexpr\strutdp/\plusfour\relax
+ \box\scratchbox
+ \endgroup}
+
+\unexpanded\def\depthstrut
+ {\relax
+ \dontleavehmode
+ \begingroup
+ \setbox\scratchbox\copy\strutbox
+ \ht\scratchbox\dimexpr\strutht-\struthtdp/\plustwo\relax % assumes that ht > lineheight/2
+ \box\scratchbox
+ \endgroup}
+
%D Sometimes a capstrut comes in handy
%D
%D \starttabulate[|Tl|l|l|]
@@ -989,7 +1080,13 @@
\fi}
\unexpanded\def\showstruts % adapts .. is wrong
- {\setteststrut
+ {\c_strut_visual_mode\zerocount
+ \setteststrut
+ \settestcrlf}
+
+\unexpanded\def\showcolorstruts % adapts .. is wrong
+ {\c_strut_visual_mode\plusone
+ \setteststrut
\settestcrlf}
\unexpanded\def\setteststrut
@@ -1020,12 +1117,17 @@
\newbox\nostrutbox \setbox\nostrutbox\emptyhbox
+\newtoks\everysetnostrut
+
\unexpanded\def\setnostrut
- {\setbox\strutbox\copy\nostrutbox
- \let\strut\empty
- \let\endstrut\empty
- \let\begstrut\empty
- \let\crlfplaceholder\empty}
+ {\the\everysetnostrut}
+
+\appendtoks
+ \setbox\strutbox\copy\nostrutbox
+ \let\strut\empty
+ \let\endstrut\empty
+ \let\begstrut\empty
+\to \everysetnostrut
% when enabled, sigstruts will remove themselves if nothing
% goes inbetween
@@ -1154,6 +1256,10 @@
\let\normaloffinterlineskip\offinterlineskip % knuth's original
+\appendtoks
+ \ifvmode\clf_resetprevdepth\fi % a nasty hack (tested for a while now)
+\to \everyafteroutput
+
%D My own one:
\unexpanded\def\spac_helpers_push_interlineskip_yes
@@ -1325,10 +1431,20 @@
\unexpanded\def\installsnapvalues#1#2% todo: a proper define
{\edef\currentsnapper{#1:#2}%
\ifcsname\??gridsnapperattributes\currentsnapper\endcsname \else
- \setevalue{\??gridsnapperattributes\currentsnapper}{\ctxlua{builders.vspacing.definesnapmethod("#1","#2")}}%
+ \setevalue{\??gridsnapperattributes\currentsnapper}{\clf_definesnapmethod{#1}{#2}}%
\fi
\setevalue{\??gridsnappers#1}{\attribute\snapmethodattribute\csname\??gridsnapperattributes\currentsnapper\endcsname\space}}
+\unexpanded\def\usegridparameter#1% no checking here
+ {\edef\m_spac_grid_asked{#1\c!grid}%
+ \ifx\m_spac_grid_asked\empty
+ \attribute \snapvboxattribute\attributeunsetvalue
+ \else
+ \spac_grids_snap_value_set\m_spac_grid_asked
+ \attribute \snapvboxattribute\attribute\snapmethodattribute
+ \fi}
+
+
\unexpanded\def\definegridsnapping
{\dodoubleargument\spac_grids_define}
@@ -1394,6 +1510,9 @@
\definegridsnapping[\v!none] [\v!none]
\definegridsnapping[\v!line] [\v!line]
\definegridsnapping[\v!strut] [\v!strut]
+\definegridsnapping[\v!box] [\v!box] % centers a box rounded upwards (box:.5 -> tolerance)
+\definegridsnapping[\v!min] [\v!min] % centers a box rounded downwards
+\definegridsnapping[\v!max] [\v!max] % centers a box rounded upwards
\definegridsnapping[\v!max] [\v!maxdepth,\v!maxheight,\v!strut]
\definegridsnapping[\v!min] [\v!mindepth,\v!minheight,\v!strut]
@@ -1402,13 +1521,13 @@
\unexpanded\def\synchronizelocallinespecs
{\bodyfontlineheight \normallineheight
- \bodyfontstrutheight\strutheight
- \bodyfontstrutdepth \strutdepth}
+ \bodyfontstrutheight\strutht
+ \bodyfontstrutdepth \strutdp}
\unexpanded\def\synchronizegloballinespecs
{\global\globalbodyfontlineheight \normallineheight
- \global\globalbodyfontstrutheight\strutheight
- \global\globalbodyfontstrutdepth \strutdepth}
+ \global\globalbodyfontstrutheight\strutht
+ \global\globalbodyfontstrutdepth \strutdp}
\appendtoks
\synchronizegloballinespecs
@@ -1479,10 +1598,10 @@
\def\spac_grids_snap_to_finish#1%
{\ifvbox\nextbox % this will go away
- \ctxcommand{vspacingcollapse(\number\nextbox)}% isn't that already done?
+ \clf_vspacingcollapse\nextbox\relax % isn't that already done?
\fi
\doifelsenothing{#1}{\spac_grids_snap_value_set\v!normal}{\spac_grids_snap_value_set{#1}}%
- \ctxcommand{vspacingsnap(\number\nextbox,\number\attribute\snapmethodattribute)}%
+ \clf_vspacingsnap\nextbox\attribute\snapmethodattribute\relax
\ifvbox\nextbox\vbox\else\hbox\fi attr \snapmethodattribute \zerocount {\box\nextbox}%
\egroup}
@@ -1632,7 +1751,7 @@
\def\spac_vspacing_define_amount[#1][#2][#3]% can be combined
{\setvalue{\??vspacingamount#1}{\ifgridsnapping#3\else#2\fi}%
- \ctxcommand{vspacingsetamount("#1")}}
+ \clf_vspacingsetamount{#1}}
% \installcorenamespace{vspacingamountnormal}
% \installcorenamespace{vspacingamountgrid}
@@ -1644,13 +1763,13 @@
% \fi
% \csname n>#1\endcsname{#2}%
% \csname g>#1\endcsname{#3}%
-% \ctxcommand{vspacingsetamount("#1")}}
+% \clf_vspacingsetamount{#1}}
\unexpanded\def\definevspacing
{\dodoubleempty\spac_vspacing_define}
\def\spac_vspacing_define[#1][#2]%
- {\ctxcommand{vspacingdefine("#1","#2")}}
+ {\clf_vspacingdefine{#1}{#2}}
%D The injector code (generated at the \LUA\ end):
@@ -1751,7 +1870,7 @@
% The main spacer:
\unexpanded\def\vspacing
- {\doifnextoptionalelse\spac_vspacing_yes\spac_vspacing_nop}
+ {\doifelsenextoptionalcs\spac_vspacing_yes\spac_vspacing_nop}
\def\spac_vspacing_yes
{\ifinpagebody % somewhat weird
@@ -1772,21 +1891,21 @@
\fi\fi}
\def\spac_vspacing_yes_indeed[#1]%
- {\ifmmode\else\par\ctxcommand{vspacing("#1")}\fi}
+ {\ifmmode\else\par\clf_vspacing{#1}\fi}
\def\spac_vspacing_yes_ignore[#1]%
{\ifmmode\else\par\fi}
\def\spac_vspacing_nop_indeed
- {\ifmmode\else\par\ctxcommand{vspacing("\currentvspacing")}\fi}
+ {\ifmmode\else\par\clf_vspacing{\currentvspacing}\fi}
\def\spac_vspacing_nop_ignore
{\ifmmode\else\par\fi}
\def\directvspacing#1%
- {\par\ctxcommand{vspacing("#1")}}
+ {\par\clf_vspacing{#1}}
-% handy (and faste):
+% handy (and faster):
\unexpanded\def\directvpenalty#1%
{\begingroup
@@ -1807,7 +1926,7 @@
% these depend on bigskipamount cum suis so we'd better sync them
\unexpanded\def\setupvspacing
- {\doifnextoptionalelse\setupvspacing_yes\setupvspacing_nop}
+ {\doifelsenextoptionalcs\setupvspacing_yes\setupvspacing_nop}
\let\currentvspacing\s!default % hm, default, standard ...
@@ -1829,7 +1948,7 @@
% category:4 is default
-% this interface might change (into an \install, buw we will then keep this one hidden)
+% this interface might change (into an \install, but we will then keep this one hidden)
\definevspacingamount[\v!none] [\zeropoint] [\zeropoint]
\definevspacingamount[\v!big] [\bigskipamount] [\bodyfontlineheight]
@@ -1840,8 +1959,8 @@
\definevspacingamount[\v!quarterline] [.25\openlineheight] [.25\bodyfontlineheight]
\definevspacingamount[\v!formula] [\medskipamount] [.5\bodyfontlineheight]
\definevspacingamount[\v!white] [\parskip] [\bodyfontwhitespace]
-\definevspacingamount[\v!height] [\strutheight] [\bodyfontstrutheight]
-\definevspacingamount[\v!depth] [\strutdepth] [\bodyfontstrutdepth]
+\definevspacingamount[\v!height] [\strutht] [\bodyfontstrutheight]
+\definevspacingamount[\v!depth] [\strutdp] [\bodyfontstrutdepth]
\definevspacingamount[-\v!line] [-\openlineheight] [-\bodyfontlineheight]
\definevspacingamount[-\v!halfline] [-.5\openlineheight] [-.5\bodyfontlineheight]
@@ -1860,6 +1979,32 @@
\fi\fi
\relax}
+% used in itemize ... always test this
+
+\newdimen\d_spac_overlay
+
+\def\spac_overlay_lines
+ {\blank[\v!back,\v!overlay]%
+ \nointerlineskip}
+
+% \startitemize[n]
+% \item \input zapf
+% \item \startitemize[a]
+% \item \input knuth
+% \stopitemize
+% \stopitemize
+%
+% \strut \hfill first line \blank[overlay] second line \hfill \strut
+%
+% \ruledvbox {
+% \strut \hfill line 1 \blank[overlay]
+% line 2 \hfill \strut \blank[overlay]
+% \strut \hfill line 3 \hfill \strut
+% }
+%
+% \dorecurse{50}
+% {\startitemize[n] \startitem \startitemize[a] \item #1 \stopitemize \stopitem \stopitemize}
+
\definevspacing[\v!preference][penalty:-500] % goodbreak
\definevspacing[\v!samepage] [penalty:10000] % nobreak
\definevspacing[\v!max] [category:1]
@@ -1867,7 +2012,9 @@
\definevspacing[\v!disable] [category:5]
\definevspacing[\v!nowhite] [category:6]
\definevspacing[\v!back] [category:7]
-\definevspacing[\v!always] [category:0]
+% together [category:8]
+\definevspacing[\v!overlay] [category:9]
+\definevspacing[\v!always] [category:0] % hm, internally it's discard
\definevspacing[\v!weak] [order:0]
\definevspacing[\v!strong] [order:100]
@@ -1894,8 +2041,20 @@
%D \type {\blank} (we needed the first one while playing with the
%D new code).
+% We keep this one as reference
+%
+% \unexpanded\def\inhibitblank
+% {\vspacing[\v!disable]}
+%
+% but use the following more efficient variant instead:
+
\unexpanded\def\inhibitblank
- {\vspacing[\v!disable]} % can be made faster
+ {\ifvmode
+ \begingroup
+ \attribute\skipcategoryattribute\plusfive
+ \vskip\zeropoint
+ \endgroup
+ \fi}
\let\doinhibitblank\inhibitblank % keep this command, used in styles
@@ -1980,7 +2139,7 @@
\let\m_spac_hanging_location\empty
\def\spac_hanging_start[#1]%
- {\doifassignmentelse{#1}
+ {\doifelseassignment{#1}
{\let\m_spac_hanging_location\empty
\setupcurrenthanging[#1]}%
{\edef\m_spac_hanging_location{#1}}%
@@ -2109,7 +2268,7 @@
% as encountered in forced blank skips (see lua code)
%
% \appendtoks
-% \ifvmode\ctxcommand{resetprevdepth()}\fi
+% \ifvmode\clf_resetprevdepth\fi
% \to \everyafteroutput
%
% this should only happen when there is nothing left over (how to detemine that) .. testcase:
diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf
index a74501e41..e43651ef8 100644
--- a/tex/context/base/status-files.pdf
+++ b/tex/context/base/status-files.pdf
Binary files differ
diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf
index a591afb75..37e62b70b 100644
--- a/tex/context/base/status-lua.pdf
+++ b/tex/context/base/status-lua.pdf
Binary files differ
diff --git a/tex/context/base/status-mkiv.lua b/tex/context/base/status-mkiv.lua
index caa7dc16c..45c282256 100644
--- a/tex/context/base/status-mkiv.lua
+++ b/tex/context/base/status-mkiv.lua
@@ -320,7 +320,6 @@ return {
},
{
category = "mkiv",
- comment = "maybe this becomes a runtime module",
filename = "toks-ini",
loading = "always",
status = "okay",
@@ -394,6 +393,12 @@ return {
},
{
category = "mkiv",
+ filename = "typo-sus",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
filename = "node-pag",
loading = "never",
status = "okay",
@@ -542,6 +547,12 @@ return {
},
{
category = "mkiv",
+ filename = "lang-hyp",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
filename = "unic-ini",
loading = "always",
status = "okay",
@@ -615,6 +626,12 @@ return {
},
{
category = "mkiv",
+ filename = "lang-hyp",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
filename = "lang-frq",
loading = "on demand",
status = "okay",
@@ -2014,13 +2031,13 @@ return {
{
category = "mkiv",
filename = "bibl-bib",
- loading = "always",
+ loading = "on demand",
status = "pending",
},
{
category = "mkiv",
filename = "bibl-tra",
- loading = "always",
+ loading = "on demand",
status = "pending",
},
{
@@ -2534,6 +2551,66 @@ return {
loading = "on demand",
status = "okay",
},
+ {
+ category = "mkiv",
+ filename = "publ-ini",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "publ-old",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "publ-tra",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "publ-usr",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "publ-jrn",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "publ-xml",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "publ-imp-apa",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "publ-imp-cite",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "publ-imp-definitions",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "publ-imp-commands",
+ loading = "always",
+ status = "pending",
+ },
},
lua = {
{
@@ -2606,12 +2683,12 @@ return {
{
category = "lua",
filename = "bibl-bib",
- status = "todo",
+ loading = "on demand",
},
{
category = "lua",
filename = "bibl-tra",
- status = "todo",
+ loading = "on demand",
},
{
category = "lua",
@@ -3153,6 +3230,12 @@ return {
},
{
category = "lua",
+ filename = "font-inj",
+ loading = "font-lib",
+ status = "okay",
+ },
+ {
+ category = "lua",
filename = "font-ldr",
loading = "on demand",
status = "okay",
@@ -3430,6 +3513,18 @@ return {
},
{
category = "lua",
+ filename = "lang-dis",
+ loading = "lang-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "lang-hyp",
+ loading = "lang-hyp",
+ status = "okay",
+ },
+ {
+ category = "lua",
filename = "lang-ini",
loading = "lang-ini",
status = "okay",
@@ -3442,6 +3537,12 @@ return {
},
{
category = "lua",
+ filename = "lang-hyp",
+ loading = "lang-hyp",
+ status = "okay",
+ },
+ {
+ category = "lua",
filename = "lang-txt",
loading = "lang-lab",
status = "okay",
@@ -4035,6 +4136,11 @@ return {
},
{
category = "lua",
+ filename = "node-ppt",
+ status = "todo",
+ },
+ {
+ category = "lua",
filename = "node-pro",
status = "todo",
},
@@ -4651,7 +4757,14 @@ return {
{
category = "lua",
filename = "toks-ini",
- status = "todo",
+ loading = "toks-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "toks-scn",
+ loading = "toks-ini",
+ status = "okay",
},
{
category = "lua",
@@ -4755,6 +4868,11 @@ return {
},
{
category = "lua",
+ filename = "typo-sus",
+ status = "okay",
+ },
+ {
+ category = "lua",
filename = "typo-brk",
status = "todo",
},
@@ -4842,6 +4960,11 @@ return {
},
{
category = "lua",
+ filename = "typo-man",
+ status = "todo",
+ },
+ {
+ category = "lua",
filename = "typo-prc",
status = "todo",
},
@@ -4997,6 +5120,48 @@ return {
filename = "x-mathml",
status = "todo",
},
+ {
+ category = "lua",
+ filename = "publ-ini",
+ loading = "publ-ini.mkiv",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "publ-aut",
+ loading = "publ-ini.mkiv",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "publ-dat",
+ loading = "publ-ini.mkiv",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "publ-oth",
+ loading = "publ-ini.mkiv",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "publ-fnd",
+ loading = "publ-ini.mkiv",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "publ-tra",
+ loading = "publ-ini.mkiv",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "publ-usr",
+ loading = "publ-ini.mkiv",
+ status = "pending",
+ },
},
metafun = {
{
diff --git a/tex/context/base/strc-bkm.lua b/tex/context/base/strc-bkm.lua
index c38ab3c2e..96b68b236 100644
--- a/tex/context/base/strc-bkm.lua
+++ b/tex/context/base/strc-bkm.lua
@@ -13,14 +13,15 @@ if not modules then modules = { } end modules ['strc-bkm'] = {
-- we should hook the placement into everystoptext ... needs checking
-local format, concat, gsub = string.format, table.concat, string.gsub
+-- todo: make an lpeg for stripped
+
+local next, type = next, type
+local gsub, lower = string.gsub, string.lower
+local concat = table.concat
local utfvalues = utf.values
local settings_to_hash = utilities.parsers.settings_to_hash
-local codeinjections = backends.codeinjections
-
-local trace_bookmarks = false trackers.register("references.bookmarks", function(v) trace_bookmarks = v end)
-
+local trace_bookmarks = false trackers.register("references.bookmarks", function(v) trace_bookmarks = v end)
local report_bookmarks = logs.reporter("structure","bookmarks")
local structures = structures
@@ -30,13 +31,17 @@ structures.bookmarks = structures.bookmarks or { }
local bookmarks = structures.bookmarks
local sections = structures.sections
local lists = structures.lists
-
local levelmap = sections.levelmap
local variables = interfaces.variables
+local implement = interfaces.implement
+local codeinjections = backends.codeinjections
-bookmarks.method = "internal" -- or "page"
+bookmarks.method = "internal" -- or "page"
-local names, opened, forced, numbered = { }, { }, { }, { }
+local names = { }
+local opened = { }
+local forced = { }
+local numbered = { }
function bookmarks.register(settings)
local force = settings.force == variables.yes
@@ -78,8 +83,13 @@ function bookmarks.overload(name,text)
end
end
if ls then
- ls.titledata.bookmark = text
+ local titledata = ls.titledata
+ if titledata then
+ titledata.bookmark = text
+ end
end
+ -- last resort
+ -- context.writetolist({name},text,"")
end
local function stripped(str) -- kind of generic
@@ -101,54 +111,6 @@ function bookmarks.setup(spec)
end
end
--- function bookmarks.place()
--- if next(names) then
--- local list = lists.filtercollected(names,"all",nil,lists.collected,forced)
--- if #list > 0 then
--- local levels, noflevels, lastlevel = { }, 0, 1
--- for i=1,#list do
--- local li = list[i]
--- local metadata = li.metadata
--- local name = metadata.name
--- if not metadata.nolist or forced[name] then -- and levelmap[name] then
--- local titledata = li.titledata
--- if titledata then
--- local structural = levelmap[name]
--- lastlevel = structural or lastlevel
--- local title = titledata.bookmark
--- if not title or title == "" then
--- -- We could typeset the title and then convert it.
--- if not structural then
--- -- placeholder, todo: bookmarklabel
--- title = name .. ": " .. (titledata.title or "?")
--- else
--- title = titledata.title or "?"
--- end
--- end
--- if numbered[name] then
--- local sectiondata = sections.collected[li.references.section]
--- local numberdata = li.numberdata
--- if sectiondata and numberdata and not numberdata.hidenumber then
--- -- we could typeset the number and convert it
--- title = concat(sections.typesetnumber(sectiondata,"direct",numberspec,sectiondata)) .. " " .. title
--- end
--- end
--- noflevels = noflevels + 1
--- levels[noflevels] = {
--- lastlevel,
--- stripped(title), -- can be replaced by converter
--- li.references, -- has internal and realpage
--- allopen or opened[name]
--- }
--- end
--- end
--- end
--- bookmarks.finalize(levels)
--- end
--- function bookmarks.place() end -- prevent second run
--- end
--- end
-
function bookmarks.place()
if next(names) then
local levels = { }
@@ -157,26 +119,53 @@ function bookmarks.place()
local nofblocks = #lists.sectionblocks -- always >= 1
local showblocktitle = toboolean(numberspec.showblocktitle,true)
for i=1,nofblocks do
- local block = lists.sectionblocks[i]
+ local block = lists.sectionblocks[i]
local blockdone = nofblocks == 1
- local list = lists.filtercollected(names,block..":all",nil,lists.collected,forced)
+ local list = lists.filter {
+ names = names,
+ criterium = block .. ":all",
+ forced = forced,
+ }
for i=1,#list do
local li = list[i]
local metadata = li.metadata
local name = metadata.name
if not metadata.nolist or forced[name] then -- and levelmap[name] then
local titledata = li.titledata
+ --
+ if not titledata then
+ local userdata = li.userdata
+ if userdata then
+ local first = userdata.first
+ local second = userdata.second
+ if first then
+ if second then
+ titledata = { title = first .. " " .. second }
+ else
+ titledata = { title = first }
+ end
+ elseif second then
+ titledata = { title = second }
+ else
+ -- ignoring (command and so)
+ end
+ end
+ end
+ --
if titledata then
if not blockdone then
if showblocktitle then
-- add block entry
local blockdata = sections.sectionblockdata[block]
noflevels = noflevels + 1
+ local references = li.references
levels[noflevels] = {
- 1, -- toplevel
- stripped(blockdata.bookmark ~= "" and blockdata.bookmark or block),
- li.references,
- allopen or opened[name] -- same as first entry
+ level = 1, -- toplevel
+ title = stripped(blockdata.bookmark ~= "" and blockdata.bookmark or block),
+ reference = references,
+ opened = allopen or opened[name], -- same as first entry
+ realpage = references and references.realpage or 0, -- handy for later
+ usedpage = true,
}
end
blockdone = true
@@ -190,27 +179,36 @@ function bookmarks.place()
local title = titledata.bookmark
if not title or title == "" then
-- We could typeset the title and then convert it.
- if not structural then
- -- placeholder, todo: bookmarklabel
- title = name .. ": " .. (titledata.title or "?")
- else
+ -- if not structural then
+ -- title = titledata.title or "?")
+ -- else
title = titledata.title or "?"
- end
+ -- end
end
if numbered[name] then
local sectiondata = sections.collected[li.references.section]
local numberdata = li.numberdata
- if sectiondata and numberdata and not numberdata.hidenumber then
+ if sectiondata and numberdata then
+ if not numberdata.hidenumber then
-- we could typeset the number and convert it
- title = concat(sections.typesetnumber(sectiondata,"direct",numberspec,sectiondata)) .. " " .. title
+ local number = sections.typesetnumber(sectiondata,"direct",numberspec,sectiondata)
+ if number and #number > 0 then
+ title = concat(number) .. " " .. title
+ end
+ end
end
end
noflevels = noflevels + 1
+ local references = li.references
levels[noflevels] = {
- lastlevel,
- stripped(title), -- can be replaced by converter
- li.references, -- has internal and realpage
- allopen or opened[name]
+ level = lastlevel,
+ title = stripped(title), -- can be replaced by converter
+ reference = references, -- has internal and realpage
+ opened = allopen or opened[name],
+ realpage = references and references.realpage or 0, -- handy for later
+ usedpage = true,
+ structural = structural,
+ name = name,
}
end
end
@@ -222,47 +220,296 @@ function bookmarks.place()
end
function bookmarks.flatten(levels)
+ if not levels then
+ -- a plugin messed up
+ return { }
+ end
-- This function promotes leading structurelements with a higher level
-- to the next lower level. Such situations are the result of lack of
-- structure: a subject preceding a chapter in a sectionblock. So, the
-- following code runs over section blocks as well. (bookmarks-007.tex)
local noflevels = #levels
if noflevels > 1 then
- local skip, start, one = false, 1, levels[1]
- local first, block = one[1], one[3].block
+ local function showthem()
+ for i=1,noflevels do
+ local level = levels[i]
+ -- if level.structural then
+ -- report_bookmarks("%i > %s > %s",level.level,level.reference.block,level.title)
+ -- else
+ report_bookmarks("%i > %s > %s > %s",level.level,level.reference.block,level.name,level.title)
+ -- end
+ end
+ end
+ if trace_bookmarks then
+ report_bookmarks("checking structure")
+ showthem()
+ end
+ local skip = false
+ local done = 0
+ local start = 1
+ local one = levels[1]
+ local first = one.level
+ local block = one.reference.block
for i=2,noflevels do
- local li = levels[i]
- local new, newblock = li[1], li[3].block
+ local current = levels[i]
+ local new = current.level
+ local reference = current.reference
+ local newblock = type(reference) == "table" and current.reference.block or block
if newblock ~= block then
- first, block, start, skip = new, newblock, i, false
+ first = new
+ block = newblock
+ start = i
+ skip = false
elseif skip then
-- go on
elseif new > first then
skip = true
elseif new < first then
for j=start,i-1 do
- local lj = levels[j]
- local old = lj[1]
- lj[1] = new
+ local previous = levels[j]
+ local old = previous.level
+ previous.level = new
if trace_bookmarks then
- report_bookmarks("promoting entry %a from level %a to %a: %s",j,old,new,lj[2])
+ report_bookmarks("promoting entry %a from level %a to %a: %s",j,old,new,previous.title)
end
+ done = done + 1
end
skip = true
end
end
+ if trace_bookmarks then
+ if done > 0 then
+ report_bookmarks("%a entries promoted")
+ showthem()
+ else
+ report_bookmarks("nothing promoted")
+ end
+ end
+ end
+ return levels
+end
+
+local extras = { }
+local lists = { }
+local names = { }
+
+bookmarks.extras = extras
+
+local function cleanname(name)
+ return lower(file.basename(name))
+end
+
+function extras.register(name,levels)
+ if name and levels then
+ name = cleanname(name)
+ local found = names[name]
+ if found then
+ lists[found].levels = levels
+ else
+ lists[#lists+1] = {
+ name = name,
+ levels = levels,
+ }
+ names[name] = #lists
+ end
+ end
+end
+
+function extras.get(name)
+ if name then
+ local found = names[cleanname(name)]
+ if found then
+ return lists[found].levels
+ end
+ else
+ return lists
+ end
+end
+
+function extras.reset(name)
+ local l, n = { }, { }
+ if name then
+ name = cleanname(name)
+ for i=1,#lists do
+ local li = lists[i]
+ local ln = li.name
+ if name == ln then
+ -- skip
+ else
+ local m = #l + 1
+ l[m] = li
+ n[ln] = m
+ end
+ end
+ end
+ lists, names = l, n
+end
+
+local function checklists()
+ for i=1,#lists do
+ local levels = lists[i].levels
+ for j=1,#levels do
+ local entry = levels[j]
+ local pageindex = entry.pageindex
+ if pageindex then
+ entry.reference = figures.getrealpage(pageindex)
+ entry.pageindex = nil
+ end
+ end
end
end
+function extras.tosections(levels)
+ local sections = { }
+ local noflists = #lists
+ for i=1,noflists do
+ local levels = lists[i].levels
+ local data = { }
+ sections[i] = data
+ for j=1,#levels do
+ local entry = levels[j]
+ if entry.usedpage then
+ local section = entry.section
+ local d = data[section]
+ if d then
+ d[#d+1] = entry
+ else
+ data[section] = { entry }
+ end
+ end
+ end
+ end
+ return sections
+end
+
+function extras.mergesections(levels,sections)
+ if not sections or #sections == 0 then
+ return levels
+ elseif not levels then
+ return { }
+ else
+ local merge = { }
+ local noflists = #lists
+ if #levels == 0 then
+ local level = 0
+ local section = 0
+ for i=1,noflists do
+ local entries = sections[i][0]
+ if entries then
+ for i=1,#entries do
+ local entry = entries[i]
+ merge[#merge+1] = entry
+ entry.level = entry.level + level
+ end
+ end
+ end
+ else
+ for j=1,#levels do
+ local entry = levels[j]
+ merge[#merge+1] = entry
+ local section = entry.reference.section
+ local level = entry.level
+ entry.section = section -- for tracing
+ for i=1,noflists do
+ local entries = sections[i][section]
+ if entries then
+ for i=1,#entries do
+ local entry = entries[i]
+ merge[#merge+1] = entry
+ entry.level = entry.level + level
+ end
+ end
+ end
+ end
+ end
+ return merge
+ end
+end
+
+function bookmarks.merge(levels,mode)
+ return extras.mergesections(levels,extras.tosections())
+end
+
+local sequencers = utilities.sequencers
+local appendgroup = sequencers.appendgroup
+local appendaction = sequencers.appendaction
+
+local bookmarkactions = sequencers.new {
+ arguments = "levels,method",
+ returnvalues = "levels",
+ results = "levels",
+}
+
+appendgroup(bookmarkactions,"before") -- user
+appendgroup(bookmarkactions,"system") -- private
+appendgroup(bookmarkactions,"after" ) -- user
+
+appendaction(bookmarkactions,"system",bookmarks.flatten)
+appendaction(bookmarkactions,"system",bookmarks.merge)
+
function bookmarks.finalize(levels)
- -- This function can be overloaded by an optional converter
- -- that uses nodes.toutf on a typeset stream. This is something
- -- that we will support when the main loop has become a coroutine.
- codeinjections.addbookmarks(levels,bookmarks.method)
+ local method = bookmarks.method or "internal"
+ checklists() -- so that plugins have the adapted page number
+ levels = bookmarkactions.runner(levels,method)
+ if levels and #levels > 0 then
+ -- normally this is not needed
+ local purged = { }
+ for i=1,#levels do
+ local l = levels[i]
+ if l.usedpage ~= false then
+ purged[#purged+1] = l
+ end
+ end
+ --
+ codeinjections.addbookmarks(purged,method)
+ else
+ -- maybe a plugin messed up
+ end
+end
+
+function bookmarks.installhandler(what,where,func)
+ if not func then
+ where, func = "after", where
+ end
+ if where == "before" or where == "after" then
+ sequencers.appendaction(bookmarkactions,where,func)
+ else
+ report_tex("installing bookmark %a handlers in %a is not possible",what,tostring(where))
+ end
end
-- interface
-commands.overloadbookmark = bookmarks.overload
-commands.registerbookmark = bookmarks.register
-commands.setupbookmarks = bookmarks.setup
+implement {
+ name = "setupbookmarks",
+ actions = bookmarks.setup,
+ arguments = {
+ {
+ { "separatorset" },
+ { "conversionset" },
+ { "starter" },
+ { "stopper" },
+ { "segments" },
+ { "showblocktitle" },
+ }
+ }
+}
+
+implement {
+ name = "registerbookmark",
+ actions = bookmarks.register,
+ arguments = {
+ {
+ { "names" },
+ { "opened" },
+ { "force" },
+ { "number" },
+ }
+ }
+}
+
+implement {
+ name = "overloadbookmark",
+ actions = bookmarks.overload,
+ arguments = { "string", "string" }
+}
diff --git a/tex/context/base/strc-bkm.mkiv b/tex/context/base/strc-bkm.mkiv
index 9d2ebd796..9688a1f93 100644
--- a/tex/context/base/strc-bkm.mkiv
+++ b/tex/context/base/strc-bkm.mkiv
@@ -74,7 +74,11 @@
\def\strc_bookmarks_bookmark_yes[#1]#2%
{\begingroup
\simplifycommands
- \ctxcommand{overloadbookmark("#1",\!!bs\detokenize\expandafter{\normalexpanded{#2}}\!!es)}%
+ \ifnum\thenamedheadlevel{#1}>\zerocount
+ \clf_overloadbookmark{#1}{\detokenize\expandafter{\normalexpanded{#2}}}%
+ \else
+ \strc_lists_write_to[#1][]{#2}{}% todo: a dedicated bookmark writer
+ \fi
\endgroup}
\def\strc_bookmarks_bookmark_nop[#1]#2%
@@ -106,27 +110,59 @@
\ifthirdargument
\setupcurrentbookmark[#3]% no every so not all possible
\else\ifsecondargument
- \doifassignmentelse{#2}{\let\m_bookmarks_opened\empty\setupcurrentbookmark[#2]}\donothing
+ \doifelseassignment{#2}{\let\m_bookmarks_opened\empty\setupcurrentbookmark[#2]}\donothing
\fi\fi
- \ctxcommand{registerbookmark {
- names = "\m_bookmarks_names",
- opened = "\m_bookmarks_opened",
- force = "\bookmarkparameter\c!force",
- number = "\bookmarkparameter\c!number",
- }}%
+ \clf_registerbookmark
+ names {\m_bookmarks_names}%
+ opened {\m_bookmarks_opened}%
+ force {\bookmarkparameter\c!force}%
+ number {\bookmarkparameter\c!number}%
+ \relax
\endgroup}
\appendtoks
- \ctxcommand{setupbookmarks {
- separatorset = "\bookmarkparameter\c!numberseparatorset",
- conversionset = "\bookmarkparameter\c!numberconversionset",
- starter = \!!bs\bookmarkparameter\c!numberstarter\!!es,
- stopper = \!!bs\bookmarkparameter\c!numberstopper\!!es,
- segments = "\bookmarkparameter\c!numbersegments",
- showblocktitle = "\bookmarkparameter\c!sectionblock",
- }}%
+ \clf_setupbookmarks
+ separatorset {\bookmarkparameter\c!numberseparatorset}%
+ conversionset {\bookmarkparameter\c!numberconversionset}%
+ starter {\bookmarkparameter\c!numberstarter}%
+ stopper {\bookmarkparameter\c!numberstopper}%
+ segments {\bookmarkparameter\c!numbersegments}%
+ showblocktitle {\bookmarkparameter\c!sectionblock}%
+ \relax
\to \everysetupbookmark
+%D There is a plugin mechanism but this is for experts only. The intermediate
+%D data structures are stable.
+%D
+%D \starttyping
+%D \startluacode
+%D structures.bookmarks.installhandler("check before","before",function(levels)
+%D logs.report("extra bookmarks","before (normal bookmarks)")
+%D inspect(levels)
+%D logs.report("extra bookmarks","before (extra bookmarks)")
+%D inspect(structures.bookmarks.extras.get())
+%D return levels
+%D end)
+%D structures.bookmarks.installhandler("check after", "after", function(levels)
+%D logs.report("extra bookmarks","after (merged bookmarks)")
+%D inspect(levels)
+%D return levels
+%D end)
+%D \stopluacode
+%D \starttyping
+%D
+%D This mechanism was added when bookmark inclusion became (optional) part of graphic
+%D inclusion (which is needed by Taco).
+%D
+%D \starttyping
+%D \getfiguredimensions[somefile.pdf]
+%D \dorecurse {\noffigurepages} {
+%D \startTEXpage
+%D \externalfigure[somefile.pdf][interaction=bookmark,page=\recurselevel]
+%D \stopTEXpage
+%D }
+%D \starttyping
+
\protect \endinput
% \starttext
diff --git a/tex/context/base/strc-blk.lua b/tex/context/base/strc-blk.lua
index 935b6c061..0ababcfc0 100644
--- a/tex/context/base/strc-blk.lua
+++ b/tex/context/base/strc-blk.lua
@@ -16,6 +16,8 @@ local allocate = utilities.storage.allocate
local context = context
local commands = commands
+local implement = interfaces.implement
+
local structures = structures
structures.blocks = structures.blocks or { }
@@ -78,13 +80,19 @@ end
function blocks.select(state,name,tag,criterium)
criterium = criterium or "text"
- if find(tag,"=") then tag = "" end
- local names = settings_to_set(name)
- local all = tag == ""
- local tags = not all and settings_to_set(tag)
- local hide = state == "process"
- local n = sections.numberatdepth(criterium)
- local result = lists.filtercollected("all", criterium, n, collected, { })
+ if find(tag,"=",1,true) then
+ tag = ""
+ end
+ local names = settings_to_set(name)
+ local all = tag == ""
+ local tags = not all and settings_to_set(tag)
+ local hide = state == "process"
+ local result = lists.filter {
+ names = "all",
+ criterium = criterium,
+ number = sections.numberatdepth(criterium), -- not needed
+ collected = collected,
+ }
for i=1,#result do
local ri = result[i]
local metadata = ri.metadata
@@ -148,8 +156,7 @@ end
-- interface
-
-commands.definestructureblock = blocks.define
-commands.savestructureblock = blocks.save
-commands.selectstructureblock = blocks.select
-commands.setstructureblockstate = blocks.setstate
+implement { name = "definestructureblock", actions = blocks.define, arguments = "string" }
+implement { name = "savestructureblock", actions = blocks.save, arguments = { "string", "string" ,"string" } }
+implement { name = "selectstructureblock", actions = blocks.select, arguments = { "string", "string" ,"string", "string" } }
+implement { name = "setstructureblockstate", actions = blocks.setstate, arguments = { "string", "string" ,"string" } }
diff --git a/tex/context/base/strc-blk.mkiv b/tex/context/base/strc-blk.mkiv
index 1dd144aa9..fe259d223 100644
--- a/tex/context/base/strc-blk.mkiv
+++ b/tex/context/base/strc-blk.mkiv
@@ -33,7 +33,7 @@
\installcommandhandler \??block {block} \??block
\appendtoks
- \ctxcommand{definestructureblock("\currentblock")}%
+ \clf_definestructureblock{\currentblock}%
\setuevalue{\e!begin\currentblock}{\dodoubleempty\strc_blocks_begin[\currentblock]}%
\setuevalue{\e!end \currentblock}{}%
\to \everydefineblock
@@ -41,7 +41,8 @@
\unexpanded\def\strc_blocks_begin[#1][#2]%
{\normalexpanded{\buff_pickup{@block@}{\e!begin#1}{\e!end#1}}
{}% before
- {\ctxcommand{savestructureblock("#1","#2","@block@")}}}% after
+ {\clf_savestructureblock{#1}{#2}{@block@}}%
+ \plusone}% after
\let\strc_blocks_setup\relax
@@ -71,17 +72,17 @@
\egroup}
\def\strc_blocks_set_state[#1][#2][#3]% state name tag
- {\ctxcommand{setstructureblockstate("#1","#2","#3")}}
+ {\clf_setstructureblockstate{#1}{#2}{#3}}
\def\strc_blocks_select[#1][#2][#3][#4]% state name tag setups
{\bgroup
- \doifassignmentelse{#3}
+ \doifelseassignment{#3}
{\getparameters[\??blocktemp][\c!criterium=\v!text,#3]%
\def\strc_blocks_setup{\setupcurrentblock[#3]}%
- \ctxcommand{selectstructureblock("#1","#2","","\csname\??blocktemp\c!criterium\endcsname")}}
+ \clf_selectstructureblock{#1}{#2}{}{\csname\??blocktemp\c!criterium\endcsname}}
{\getparameters[\??blocktemp][\c!criterium=\v!text,#4]%
\def\strc_blocks_setup{\setupcurrentblock[#4]}%
- \ctxcommand{selectstructureblock("#1","#2","#3","\csname\??blocktemp\c!criterium\endcsname")}}%
+ \clf_selectstructureblock{#1}{#2}{#3}{\csname\??blocktemp\c!criterium\endcsname}}%
\egroup}
% hide : save, if [+] also hidden execute
diff --git a/tex/context/base/strc-con.mkvi b/tex/context/base/strc-con.mkvi
index 75519b8ce..11f6f758e 100644
--- a/tex/context/base/strc-con.mkvi
+++ b/tex/context/base/strc-con.mkvi
@@ -159,6 +159,7 @@
\unexpanded\def\strc_constructions_initialize#1% class instance
{\edef\currentconstruction{#1}%
+ \let\currentconstructionhash\??construction
\let\currentconstructionlistentry\!!zerocount
\expandafter\let\expandafter\currentconstructionmain \csname\??constructionmain \currentconstruction\endcsname
\expandafter\let\expandafter\currentconstructionlevel \csname\??constructionlevel\currentconstruction\endcsname
@@ -214,7 +215,7 @@
\constructionparameter\c!headcommand
{\strut
\constructionparameter\c!text
- \ctxcommand{savedlisttitle("\currentconstructionmain",\currentconstructionlistentry)}}%
+ \clf_savedlisttitle{\currentconstructionmain}\currentconstructionlistentry\relax}%
\endgroup}
\unexpanded\def\strc_constructions_stored_start
@@ -262,10 +263,14 @@
\def\strc_constructions_ignore_head
{\constructionsheaddistance\zeropoint
- \constructionsheadwidth \zeropoint}
+ \constructionsheadwidth \zeropoint
+ % we also need to make sure that no stretch creeps in (new per 2015-02-02, for Alan)
+ \settrue\c_strc_constructions_distance_none}
+
+\let\currentconstructionhash\??construction
\unexpanded\setvalue{\??constructionstarthandler\v!construction}% this will be redone (reorganized) .. too much boxing
- {\dostarttagged\t!construction\currentconstruction
+ {\dostarttaggedchained\t!construction\currentconstruction\currentconstructionhash
\dotagsetconstruction
\constructionparameter\c!before
\begingroup
@@ -322,7 +327,7 @@
\else
\strc_constructions_preroll_head\currentconstructionsample
\ifzeropt\wd\constructionheadbox
- \strc_constructions_ignore_head
+ \strc_constructions_ignore_head
\else
\strc_constructions_set_width_and_distance
\fi
@@ -352,10 +357,6 @@
\ifx\p_strc_constructions_align\empty \else
\setupalign[\p_strc_constructions_align]% \use...
\fi
- \edef\p_strc_constructions_indenting{\constructionparameter\c!indenting}%
- \ifx\p_strc_constructions_indenting\empty \else
- \indenting[\p_strc_constructions_indenting]% \use...
- \fi
\ifcase\c_strc_constructions_nested_state
\c_strc_constructions_nested_state\plusone
\or
@@ -366,6 +367,11 @@
\edef\p_strc_constructions_headalign{\constructionparameter\c!headalign}%
%
\directsetup\p_strc_constructions_renderingsetup\relax
+ % moved to here 2014-07-03
+ \edef\p_strc_constructions_indenting{\constructionparameter\c!indenting}%
+ \ifx\p_strc_constructions_indenting\empty \else
+ \indenting[\p_strc_constructions_indenting]% \use...
+ \fi
%
\dostoptagged % tag
\dostarttagged\t!constructioncontent\empty
@@ -502,7 +508,7 @@
\setupalign[\p_strc_constructions_headalign]% use fast one
\fi
\ifhbox\constructionheadbox\unhcopy\else\copy\fi\constructionheadbox}%
-\setbox\constructionheadbox\hbox{\box\constructionheadbox}% needed in case of e.g. a real big head font, see descriptions-006.tex
+ \setbox\constructionheadbox\hbox{\box\constructionheadbox}% needed in case of e.g. a real big head font, see descriptions-006.tex
\ht\constructionheadbox\strutht
\dp\constructionheadbox\strutdp}
@@ -526,9 +532,14 @@
% The setups. These only deal with placement of the descriptor and initializing the
% environment. The wrapping happens elsewhere.
+% todo: optimize the setups with
+%
+% \ifconditional\c_strc_constructions_distance_none : no need for skip
+% \ifzeropt\wd\constructionheadbox : no need for box and skips
+
\startsetups[\??constructionrenderings:\v!left]
\edef\p_strc_constructions_hang{\constructionparameter\c!hang}%
- \doifsetupselse{\??constructionrenderings:\v!left:\p_strc_constructions_hang} {
+ \doifelsesetups{\??constructionrenderings:\v!left:\p_strc_constructions_hang} {
\directsetup{\??constructionrenderings:\v!left:\p_strc_constructions_hang}
} {
\directsetup{\??constructionrenderings:\v!left:\v!hanging}
@@ -537,7 +548,7 @@
\startsetups[\??constructionrenderings:\v!right]
\edef\p_strc_constructions_hang{\constructionparameter\c!hang}
- \doifsetupselse{\??constructionrenderings:\v!right:\p_strc_constructions_hang} {
+ \doifelsesetups{\??constructionrenderings:\v!right:\p_strc_constructions_hang} {
\directsetup{\??constructionrenderings:\v!right:\p_strc_constructions_hang}
} {
\directsetup{\??constructionrenderings:\v!right:\v!hanging}
@@ -556,6 +567,7 @@
\copy\constructionheadbox\hss
}
}
+ \nobreak
\useconstructionstyleandcolor\c!style\c!color
\ignorespaces
\stopsetups
@@ -580,6 +592,7 @@
\copy\constructionheadbox
\hskip\rightconstructionskip
}
+ \nobreak
\advance\rightskip \constructionsheaddistance
\useconstructionstyleandcolor\c!style\c!color
\ignorespaces
@@ -634,6 +647,7 @@
\box\constructionheadbox
}
}
+ \nobreak
\useconstructionstyleandcolor\c!style\c!color
\ignorespaces
\stopsetups
@@ -653,6 +667,7 @@
}
}
}
+ \nobreak
\useconstructionstyleandcolor\c!style\c!color
\ignorespaces
\stopsetups
@@ -730,7 +745,7 @@
\startsetups[\??constructionrenderings:\v!serried]
\edef\p_strc_constructions_width{\constructionparameter\c!width}% CHECK ! ! ! wrong parameter namespace
- \doifsetupselse{\??constructionrenderings:\v!serried:\p_strc_constructions_width} {
+ \doifelsesetups{\??constructionrenderings:\v!serried:\p_strc_constructions_width} {
\directsetup{\??constructionrenderings:\v!serried:\p_strc_constructions_width}
} {
\directsetup{\??constructionrenderings:\v!serried:\v!wide}
@@ -741,7 +756,7 @@
\let\\=\crlf
\noindent
\ifhbox\constructionheadbox\unhcopy\else\copy\fi\constructionheadbox % why copy? leftover?
- \penalty\plustenthousand % new
+ \nobreak
\hskip\constructionsheaddistance\relax
\useconstructionstyleandcolor\c!style\c!color
\ignorespaces
@@ -752,8 +767,8 @@
\noindent
\ifhbox\constructionheadbox\unhcopy\else\copy\fi\constructionheadbox % why copy? leftover?
\ifconditional\c_strc_constructions_distance_none \else
- \penalty\plustenthousand % new
- \hskip\constructionsheaddistance \!!plus .5\constructionsheaddistance \!!minus .25\constructionsheaddistance\relax
+ \nobreak
+ \hskip\constructionsheaddistance \!!plus .5\constructionsheaddistance \!!minus .25\constructionsheaddistance\relax
\fi
\useconstructionstyleandcolor\c!style\c!color
\ignorespaces
@@ -770,6 +785,7 @@
\ifhbox\constructionheadbox\unhcopy\else\copy\fi\constructionheadbox
\hss
}
+ \nobreak
\hskip\constructionsheaddistance\relax
\useconstructionstyleandcolor\c!style\c!color
\ignorespaces
@@ -845,7 +861,7 @@
\let\currentconstructionlistentry\!!zerocount
\def\strc_constructions_register
- {\ctxcommand{doiflisthasentry(\currentconstructionlistentry)}%
+ {\clf_doifelselisthasentry\numexpr\currentconstructionlistentry\relax
\strc_constructions_register_nop
\strc_constructions_register_yes}
@@ -858,7 +874,7 @@
\def\strc_constructions_discard
{\iftrialtypesetting
% \writestatus{constructions}{discarding \currentconstruction: \number\currentconstructionlistentry}%
- \ctxcommand{discardfromlist(\currentconstructionlistentry)}%
+ \clf_discardfromlist\currentconstructionlistentry\relax
\fi}
\let\currentconstructionlistnumber \!!zerocount
@@ -928,62 +944,67 @@
\else
\setnextinternalreferences{construction}\currentconstructionmain % plural
\relax
- \scratchcounter\ctxcommand{addtolist{ % we can set a counter at the lua end
- metadata = {
- kind = "construction",
- name = "\currentconstructionmain",
- level = structures.sections.currentlevel(),
- catcodes = \the\catcodetable,
- % \currentdirectionparameters
- },
- references = {
- internal = \nextinternalreference,
- order = \nextinternalorderreference,
- reference = "\currentconstructionreference",
- referenceprefix = "\referenceprefix",
- block = "\currentsectionblock",
- section = structures.sections.currentid(),
- },
- titledata = {
- label = \!!bs\detokenize\expandafter{\currentconstructionlabel }\!!es,
- title = \!!bs\detokenize\expandafter{\currentconstructiontitle }\!!es,
+ \scratchcounter\clf_addtolist
+ metadata {
+ kind {construction}
+ name {\currentconstructionmain}
+ % level structures.sections.currentlevel()
+ catcodes \catcodetable
+ % \currentdirectionparameters
+ }
+ references {
+ internal \nextinternalreference
+ order \nextinternalorderreference
+ reference {\currentconstructionreference}
+ prefix {\referenceprefix}
+ % block {\currentsectionblock}
+ % section structures.sections.currentid(),
+ }
+ titledata {
+ label {\detokenize\expandafter{\currentconstructionlabel}}
+ title {\detokenize\expandafter{\currentconstructiontitle}}
\ifx\currentconstructionbookmark\currentconstructiontitle \else
- bookmark = \!!bs\detokenize\expandafter{\currentconstructionbookmark}\!!es,
+ bookmark {\detokenize\expandafter{\currentconstructionbookmark}}
\fi
\ifx\currentconstructionlist\currentconstructiontitle \else
- list = \!!bs\detokenize\expandafter{\currentconstructionlist }\!!es,
+ list {\detokenize\expandafter{\currentconstructionlist}}
\fi
- },
+ }
\ifconditional\c_strc_constructions_number_state
- prefixdata = {
- prefix = "\constructionparameter\c!prefix",
- separatorset = "\constructionparameter\c!prefixseparatorset",
- conversion = \!!bs\constructionparameter\c!prefixconversion\!!es,
- conversionset = "\constructionparameter\c!prefixconversionset",
- set = "\constructionparameter\c!prefixset",
- segments = "\constructionparameter\c!prefixsegments",
- connector = \!!bs\constructionparameter\c!prefixconnector\!!es,
- },
- numberdata = {
- numbers = structures.counters.compact("\currentconstructionnumber",nil,true), % ! number can be cloned
- separatorset = "\constructionparameter\c!numberseparatorset",
- conversion = "\constructionparameter\c!numberconversion",
- conversionset = "\constructionparameter\c!numberconversionset",
- starter = \!!bs\constructionparameter\c!numberstarter\!!es,
- stopper = \!!bs\constructionparameter\c!numberstopper\!!es,
- segments = "\constructionparameter\c!numbersegments",
- },
+ prefixdata {
+ prefix {\constructionparameter\c!prefix}
+ separatorset {\constructionparameter\c!prefixseparatorset}
+ conversion {\constructionparameter\c!prefixconversion}
+ conversionset {\constructionparameter\c!prefixconversionset}
+ set {\constructionparameter\c!prefixset}
+ segments {\constructionparameter\c!prefixsegments}
+ connector {\constructionparameter\c!prefixconnector}
+ }
+ numberdata {
+ numbers {\currentconstructionnumber}
+ separatorset {\constructionparameter\c!numberseparatorset}
+ conversion {\constructionparameter\c!numberconversion}
+ conversionset {\constructionparameter\c!numberconversionset}
+ starter {\constructionparameter\c!numberstarter}
+ stopper {\constructionparameter\c!numberstopper}
+ segments {\constructionparameter\c!numbersegments}
+ }
\or
% symbol
\fi
- userdata = \!!bs\detokenize{#2}\!!es % will be converted to table at the lua end
- }
- }\relax
+ userdata {\detokenize{#2}}
+ \relax
% \writestatus{constructions}{registering \currentconstruction: \number\scratchcounter}%
+ \clf_setinternalreference
+ prefix {\referenceprefix}%
+ reference {\currentconstructionreference}%
+ internal \nextinternalreference
+ view {\interactionparameter\c!focus}%
+ \relax
\normalexpanded{%
\endgroup
\edef\noexpand\currentconstructionlistentry {\the\scratchcounter}%
- \edef\noexpand\currentconstructionattribute {\ctxcommand {setinternalreference("\referenceprefix","\currentconstructionreference",\nextinternalreference,"\interactionparameter\c!focus")}}%
+ \edef\noexpand\currentconstructionattribute {\the\lastdestinationattribute}%
\edef\noexpand\currentconstructionsynchronize{\ctxlatecommand{enhancelist(\the\scratchcounter)}}%
}%
\fi}
@@ -993,7 +1014,7 @@
% macros.
\def\reinstateconstructionnumberentry#1% was xdef
- {\edef\currentconstructionattribute {\ctxcommand {getinternalreference(#1)}}%
+ {\edef\currentconstructionattribute {\clf_getinternallistreference#1}%
\edef\currentconstructionsynchronize{\ctxlatecommand{enhancelist(#1)}}}
\installstructurelistprocessor{construction}{\usestructurelistprocessor{number+title}}
diff --git a/tex/context/base/strc-def.mkiv b/tex/context/base/strc-def.mkiv
index 0738bdf29..b4d2a5fea 100644
--- a/tex/context/base/strc-def.mkiv
+++ b/tex/context/base/strc-def.mkiv
@@ -32,6 +32,7 @@
\defineresetset [\s!default] [] [1] % each level
\defineprefixset [\s!default] [section-1,section-2,section-3] []
+\defineconversionset [\v!number] [] [numbers]
\defineconversionset [\v!pagenumber] [] [numbers]
\defineprefixset [\v!all] [section-1,section-2,section-3,section-4,section-5,section-6,section-7,section-8] []
@@ -47,6 +48,9 @@
\setupuserpagenumber
[\c!numberconversionset=\v!pagenumber]
+\setupcounters
+ [\c!numberconversionset=\v!number]
+
% \startsetups defaults:frontpart:pagenumbers:roman
% \defineconversionset[\c!frontpart:\c!pagenumber][][romannumerals]
% \setupuserpagenumber[\c!way=\v!by\v!block]
@@ -220,19 +224,19 @@
[\c!before={\blank[\v!preference,\v!big]}, % sort of mkii compatible, watch columns
\c!after=\blank,
\c!label=\v!yes,
- \c!distance=1em]
+ \c!distance=\emwidth]
\setuplist
[\v!chapter]
[\c!before={\blank[\v!preference,\v!big]}, % sort of mkii compatible, watch columns
\c!after=]
-\setuplist [\v!part] [\c!width=0em]
-\setuplist [\v!chapter] [\c!width=2em]
-\setuplist [\v!section] [\c!width=3em]
-\setuplist [\v!subsection] [\c!width=4em]
-\setuplist [\v!subsubsection] [\c!width=5em]
-\setuplist [\v!subsubsubsection] [\c!width=6em]
-\setuplist [\v!subsubsubsubsection] [\c!width=7em]
+\setuplist [\v!part] [\c!width=0\emwidth]
+\setuplist [\v!chapter] [\c!width=2\emwidth]
+\setuplist [\v!section] [\c!width=3\emwidth]
+\setuplist [\v!subsection] [\c!width=4\emwidth]
+\setuplist [\v!subsubsection] [\c!width=5\emwidth]
+\setuplist [\v!subsubsubsection] [\c!width=6\emwidth]
+\setuplist [\v!subsubsubsubsection] [\c!width=7\emwidth]
\protect \endinput
diff --git a/tex/context/base/strc-des.mkvi b/tex/context/base/strc-des.mkvi
index 9c4d3fc6d..3557000f9 100644
--- a/tex/context/base/strc-des.mkvi
+++ b/tex/context/base/strc-des.mkvi
@@ -76,6 +76,7 @@
\unexpanded\setvalue{\??constructioninitializer\v!description}%
{\let\currentdescription \currentconstruction
\let\constructionparameter \descriptionparameter
+ \let\constructionnamespace \??description
\let\detokenizedconstructionparameter\detokenizeddescriptionparameter
\let\letconstructionparameter \letdescriptionparameter
\let\useconstructionstyleandcolor \usedescriptionstyleandcolor
@@ -102,10 +103,10 @@
\unexpanded\def\strc_descriptions_start#1%
{\begingroup
\strc_constructions_initialize{#1}%
- \doifnextoptionalelse\strc_descriptions_start_yes\strc_descriptions_start_nop}
+ \doifelsenextoptionalcs\strc_descriptions_start_yes\strc_descriptions_start_nop}
\unexpanded\def\strc_descriptions_start_yes[#1]%
- {\doifassignmentelse{#1}\strc_descriptions_start_yes_assignment\strc_descriptions_start_yes_reference[#1]}
+ {\doifelseassignment{#1}\strc_descriptions_start_yes_assignment\strc_descriptions_start_yes_reference[#1]}
\unexpanded\def\strc_descriptions_start_yes_assignment[#1]% todo userdata
{\strc_constructions_register[\c!label={\descriptionparameter\c!text},\c!reference=,\c!title=,\c!bookmark=,\c!list=,#1][]%
@@ -119,7 +120,7 @@
\fi}
\unexpanded\def\strc_descriptions_start_yes_titled[#1]%
- {\doifnextbgroupelse
+ {\doifelsenextbgroup
{\strc_descriptions_start_yes_titled_indeed[#1]}%
{\setfalse\c_strc_constructions_title_state
\strc_descriptions_start_yes_normal[#1]}}
@@ -140,7 +141,7 @@
\fi}
\unexpanded\def\strc_descriptions_start_nop_titled
- {\doifnextbgroupelse
+ {\doifelsenextbgroup
{\strc_descriptions_start_nop_titled_indeed}%
{\setfalse\c_strc_constructions_title_state
\strc_descriptions_start_nop_normal}}%
@@ -162,7 +163,7 @@
\unexpanded\def\strc_descriptions_command#1%
{\begingroup
\strc_constructions_initialize{#1}%
- \doifnextoptionalelse\strc_descriptions_yes\strc_descriptions_nop}
+ \doifelsenextoptionalcs\strc_descriptions_yes\strc_descriptions_nop}
\unexpanded\def\strc_descriptions_yes
{\ifconditional\c_strc_constructions_title_state
@@ -176,7 +177,7 @@
\csname\??constructioncommandhandler\currentconstructionhandler\endcsname}
\unexpanded\def\strc_descriptions_yes_titled[#1]%
- {\doifnextbgroupelse
+ {\doifelsenextbgroup
{\strc_descriptions_yes_titled_indeed[#1]}%
{\setfalse\c_strc_constructions_title_state
\strc_descriptions_yes_normal[#1]}}
@@ -197,7 +198,7 @@
\fi}
\unexpanded\def\strc_descriptions_nop_titled
- {\doifnextbgroupelse
+ {\doifelsenextbgroup
{\strc_descriptions_nop_titled_indeed}%
{\setfalse\c_strc_constructions_title_state
\strc_descriptions_nop_normal}}
diff --git a/tex/context/base/strc-doc.lua b/tex/context/base/strc-doc.lua
index e3cbb02ed..029d68a9d 100644
--- a/tex/context/base/strc-doc.lua
+++ b/tex/context/base/strc-doc.lua
@@ -17,15 +17,18 @@ if not modules then modules = { } end modules ['strc-doc'] = {
local next, type, tonumber, select = next, type, tonumber, select
local format, gsub, find, gmatch, match = string.format, string.gsub, string.find, string.gmatch, string.match
-local concat, fastcopy = table.concat, table.fastcopy
+local concat, fastcopy, insert, remove = table.concat, table.fastcopy, table.insert, table.remove
local max, min = math.max, math.min
local allocate, mark, accesstable = utilities.storage.allocate, utilities.storage.mark, utilities.tables.accesstable
local setmetatableindex = table.setmetatableindex
+local lpegmatch, P, C = lpeg.match, lpeg.P, lpeg.C
local catcodenumbers = catcodes.numbers
local ctxcatcodes = catcodenumbers.ctxcatcodes
local variables = interfaces.variables
+local implement = interfaces.implement
+
local v_last = variables.last
local v_first = variables.first
local v_previous = variables.previous
@@ -59,8 +62,14 @@ local startapplyprocessor = processors.startapply
local stopapplyprocessor = processors.stopapply
local strippedprocessor = processors.stripped
+local convertnumber = converters.convert
+
local a_internal = attributes.private('internal')
+local ctx_convertnumber = context.convertnumber
+local ctx_sprint = context.sprint
+local ctx_finalizeauto = context.finalizeautostructurelevel
+
-- -- -- document -- -- --
local data -- the current state
@@ -124,28 +133,48 @@ local registered = sections.registered
storage.register("structures/sections/registered", registered, "structures.sections.registered")
+local function update(name,level,section)
+ for k, v in next, registered do
+ if k ~= name and v.coupling == name then
+ report_structure("updating section level %a to level of %a",k,name)
+ context.doredefinehead(k,name)
+ update(k,level,section)
+ end
+ end
+end
+
function sections.register(name,specification)
registered[name] = specification
+ local level = specification.level
+ local section = specification.section
+ update(name,level,section)
end
function sections.currentid()
return #tobesaved
end
+local lastsaved = 0
+
function sections.save(sectiondata)
-- local sectionnumber = helpers.simplify(section.sectiondata) -- maybe done earlier
local numberdata = sectiondata.numberdata
local ntobesaved = #tobesaved
if not numberdata or sectiondata.metadata.nolist then
- return ntobesaved
+ -- stay
else
ntobesaved = ntobesaved + 1
tobesaved[ntobesaved] = numberdata
if not collected[ntobesaved] then
collected[ntobesaved] = numberdata
end
- return ntobesaved
end
+ lastsaved = ntobesaved
+ return ntobesaved
+end
+
+function sections.currentsectionindex()
+ return lastsaved -- only for special controlled situations
end
function sections.load()
@@ -210,7 +239,7 @@ end
function sections.pushblock(name,settings)
counters.check(0) -- we assume sane usage of \page between blocks
local block = name or data.block
- data.blocks[#data.blocks+1] = block
+ insert(data.blocks,block)
data.block = block
sectionblockdata[block] = settings
documents.reset()
@@ -218,17 +247,18 @@ function sections.pushblock(name,settings)
end
function sections.popblock()
- data.blocks[#data.blocks] = nil
- local block = data.blocks[#data.blocks] or data.block
+ local block = remove(data.blocks) or data.block
data.block = block
documents.reset()
return block
end
-function sections.currentblock()
+local function getcurrentblock()
return data.block or data.blocks[#data.blocks] or "unknown"
end
+sections.currentblock = getcurrentblock
+
function sections.currentlevel()
return data.depth
end
@@ -239,18 +269,36 @@ end
local saveset = { } -- experiment, see sections/tricky-001.tex
-function sections.somelevel(given)
+function sections.setentry(given)
-- old number
local numbers = data.numbers
+ --
+ local metadata = given.metadata
+ local numberdata = given.numberdata
+ local references = given.references
+ local directives = given.directives
+ local userdata = given.userdata
+
+ if not metadata then
+ metadata = { }
+ given.metadata = metadata
+ end
+ if not numberdata then
+ numberdata = { }
+ given.numberdata = numberdata
+ end
+ if not references then
+ references = { }
+ given.references = references
+ end
local ownnumbers = data.ownnumbers
local forced = data.forced
local status = data.status
local olddepth = data.depth
- local givenname = given.metadata.name
+ local givenname = metadata.name
local mappedlevel = levelmap[givenname]
local newdepth = tonumber(mappedlevel or (olddepth > 0 and olddepth) or 1) -- hm, levelmap only works for section-*
- local directives = given.directives
local resetset = directives and directives.resetset or ""
-- local resetter = sets.getall("structure:resets",data.block,resetset)
-- a trick to permit userdata to overload title, ownnumber and reference
@@ -260,14 +308,13 @@ function sections.somelevel(given)
report_structure("name %a, mapped level %a, old depth %a, new depth %a, reset set %a",
givenname,mappedlevel,olddepth,newdepth,resetset)
end
- local u = given.userdata
- if u then
- -- kind of obsolete as we can pass them directly anyway
- if u.reference and u.reference ~= "" then given.metadata.reference = u.reference ; u.reference = nil end
- if u.ownnumber and u.ownnumber ~= "" then given.numberdata.ownnumber = u.ownnumber ; u.ownnumber = nil end
- if u.title and u.title ~= "" then given.titledata.title = u.title ; u.title = nil end
- if u.bookmark and u.bookmark ~= "" then given.titledata.bookmark = u.bookmark ; u.bookmark = nil end
- if u.label and u.label ~= "" then given.titledata.label = u.label ; u.label = nil end
+ if userdata then
+ -- kind of obsolete as we can pass them directly anyway ... NEEDS CHECKING !
+ if userdata.reference and userdata.reference ~= "" then given.metadata.reference = userdata.reference ; userdata.reference = nil end
+ if userdata.ownnumber and userdata.ownnumber ~= "" then given.numberdata.ownnumber = userdata.ownnumber ; userdata.ownnumber = nil end
+ if userdata.title and userdata.title ~= "" then given.titledata.title = userdata.title ; userdata.title = nil end
+ if userdata.bookmark and userdata.bookmark ~= "" then given.titledata.bookmark = userdata.bookmark ; userdata.bookmark = nil end
+ if userdata.label and userdata.label ~= "" then given.titledata.label = userdata.label ; userdata.label = nil end
end
-- so far for the trick
if saveset then
@@ -305,12 +352,12 @@ function sections.somelevel(given)
end
end
counters.check(newdepth)
- ownnumbers[newdepth] = given.numberdata.ownnumber or ""
- given.numberdata.ownnumber = nil
+ ownnumbers[newdepth] = numberdata.ownnumber or ""
+ numberdata.ownnumber = nil
data.depth = newdepth
-- new number
olddepth = newdepth
- if given.metadata.increment then
+ if metadata.increment then
local oldn, newn = numbers[newdepth] or 0, 0
local fd = forced[newdepth]
if fd then
@@ -340,40 +387,31 @@ function sections.somelevel(given)
v[2](k)
end
end
- local numberdata= given.numberdata
- if not numberdata then
- -- probably simplified to nothing
- numberdata = { }
- given.numberdata = numberdata
- end
-
local n = { }
for i=1,newdepth do
n[i] = numbers[i]
end
numberdata.numbers = n
--- numberdata.numbers = fastcopy(numbers)
-
+ if not numberdata.block then
+ numberdata.block = getcurrentblock() -- also in references
+ end
if #ownnumbers > 0 then
numberdata.ownnumbers = fastcopy(ownnumbers)
end
if trace_detail then
report_structure("name %a, numbers % a, own numbers % a",givenname,numberdata.numbers,numberdata.ownnumbers)
end
-
- local metadata = given.metadata
- local references = given.references
-
+ if not references.block then
+ references.block = getcurrentblock() -- also in numberdata
+ end
local tag = references.tag or tags.getid(metadata.kind,metadata.name)
if tag and tag ~= "" and tag ~= "?" then
references.tag = tag
end
-
local setcomponent = structures.references.setcomponent
if setcomponent then
setcomponent(given) -- might move to the tex end
end
-
references.section = sections.save(given)
-- given.numberdata = nil
end
@@ -456,7 +494,7 @@ function sections.structuredata(depth,key,default,honorcatcodetable) -- todo: sp
local data = data.status[depth]
local d
if data then
- if find(key,"%.") then
+ if find(key,".",1,true) then
d = accesstable(key,data)
else
d = data.titledata
@@ -468,7 +506,7 @@ function sections.structuredata(depth,key,default,honorcatcodetable) -- todo: sp
local metadata = data.metadata
local catcodes = metadata and metadata.catcodes
if catcodes then
- context.sprint(catcodes,d)
+ ctx_sprint(catcodes,d)
else
context(d)
end
@@ -477,7 +515,7 @@ function sections.structuredata(depth,key,default,honorcatcodetable) -- todo: sp
else
local catcodes = catcodenumbers[honorcatcodetable]
if catcodes then
- context.sprint(catcodes,d)
+ ctx_sprint(catcodes,d)
else
context(d)
end
@@ -512,16 +550,18 @@ function sections.current()
return data.status[data.depth]
end
-function sections.depthnumber(n)
+local function depthnumber(n)
local depth = data.depth
if not n or n == 0 then
n = depth
elseif n < 0 then
n = depth + n
end
- return context(data.numbers[n] or 0)
+ return data.numbers[n] or 0
end
+sections.depthnumber = depthnumber
+
function sections.autodepth(numbers)
for i=#numbers,1,-1 do
if numbers[i] ~= 0 then
@@ -547,10 +587,9 @@ end
-- sign=positive => also zero
-- sign=hang => llap sign
---~ todo: test this
---~
+-- this can be a local function
-local function process(index,numbers,ownnumbers,criterium,separatorset,conversion,conversionset,index,entry,result,preceding,done)
+local function process(index,numbers,ownnumbers,criterium,separatorset,conversion,conversionset,entry,result,preceding,done,language)
-- todo: too much (100 steps)
local number = numbers and (numbers[index] or 0)
local ownnumber = ownnumbers and ownnumbers[index] or ""
@@ -571,20 +610,20 @@ local function process(index,numbers,ownnumbers,criterium,separatorset,conversio
if ownnumber ~= "" then
result[#result+1] = ownnumber
elseif conversion and conversion ~= "" then -- traditional (e.g. used in itemgroups) .. inherited!
- result[#result+1] = converters.convert(conversion,number)
+ result[#result+1] = convertnumber(conversion,number,language)
else
local theconversion = sets.get("structure:conversions",block,conversionset,index,"numbers")
- result[#result+1] = converters.convert(theconversion,number)
+ result[#result+1] = convertnumber(theconversion,number,language)
end
else
if ownnumber ~= "" then
applyprocessor(ownnumber)
elseif conversion and conversion ~= "" then -- traditional (e.g. used in itemgroups)
- context.convertnumber(conversion,number)
+ ctx_convertnumber(conversion,number)
else
local theconversion = sets.get("structure:conversions",block,conversionset,index,"numbers")
local data = startapplyprocessor(theconversion)
- context.convertnumber(data or "numbers",number)
+ ctx_convertnumber(data or "numbers",number)
stopapplyprocessor()
end
end
@@ -606,6 +645,7 @@ function sections.typesetnumber(entry,kind,...) -- kind='section','number','pref
local set = ""
local segments = ""
local criterium = ""
+ local language = ""
for d=1,select("#",...) do
local data = select(d,...) -- can be multiple parametersets
if data then
@@ -619,6 +659,7 @@ function sections.typesetnumber(entry,kind,...) -- kind='section','number','pref
if set == "" then set = data.set or "" end
if segments == "" then segments = data.segments or "" end
if criterium == "" then criterium = data.criterium or "" end
+ if language == "" then language = data.language or "" end
end
end
if separatorset == "" then separatorset = "default" end
@@ -630,6 +671,7 @@ function sections.typesetnumber(entry,kind,...) -- kind='section','number','pref
if connector == "" then connector = nil end
if set == "" then set = "default" end
if segments == "" then segments = nil end
+ if language == "" then language = nil end
--
if criterium == v_strict then
criterium = 0
@@ -641,10 +683,10 @@ function sections.typesetnumber(entry,kind,...) -- kind='section','number','pref
criterium = 0
end
--
- local firstprefix, lastprefix = 0, 16
+ local firstprefix, lastprefix = 0, 16 -- too much, could max found level
if segments then
local f, l = match(tostring(segments),"^(.-):(.+)$")
- if l == "*" then
+ if l == "*" or l == v_all then
l = 100 -- new
end
if f and l then
@@ -678,7 +720,7 @@ function sections.typesetnumber(entry,kind,...) -- kind='section','number','pref
applyprocessor(starter)
end
end
- if prefixlist and (kind == 'section' or kind == 'prefix' or kind == 'direct') then
+ if prefixlist and (kind == "section" or kind == "prefix" or kind == "direct") then
-- find valid set (problem: for sectionnumber we should pass the level)
-- no holes
local b, e, bb, ee = 1, #prefixlist, 0, 0
@@ -722,15 +764,13 @@ function sections.typesetnumber(entry,kind,...) -- kind='section','number','pref
local prefix = prefixlist[k]
local index = sections.getlevel(prefix) or k
if index >= firstprefix and index <= lastprefix then
- -- process(index,result)
- preceding, done = process(index,numbers,ownnumbers,criterium,separatorset,conversion,conversionset,index,entry,result,preceding,done)
+ preceding, done = process(index,numbers,ownnumbers,criterium,separatorset,conversion,conversionset,entry,result,preceding,done,language)
end
end
else
-- also holes check
for index=firstprefix,lastprefix do
- -- process(index,result)
- preceding, done = process(index,numbers,ownnumbers,criterium,separatorset,conversion,conversionset,index,entry,result,preceding,done)
+ preceding, done = process(index,numbers,ownnumbers,criterium,separatorset,conversion,conversionset,entry,result,preceding,done,language)
end
end
--
@@ -746,7 +786,7 @@ function sections.typesetnumber(entry,kind,...) -- kind='section','number','pref
if result then
result[#result+1] = strippedprocessor(groupsuffix)
else
- applyprocessor(groupsuffix)
+ applyprocessor(groupsuffix)
end
end
if stopper then
@@ -891,34 +931,21 @@ end
function sections.getnumber(depth,what) -- redefined here
local sectiondata = sections.findnumber(depth,what)
- context((sectiondata and sectiondata.numbers[depth]) or 0)
+ local askednumber = 0
+ if sectiondata then
+ local numbers = sectiondata.numbers
+ if numbers then
+ askednumber = numbers[depth] or 0
+ end
+ end
+ context(askednumber)
end
-- experimental
local levels = { }
---~ function commands.autonextstructurelevel(level)
---~ if level > #levels then
---~ for i=#levels+1,level do
---~ levels[i] = ""
---~ end
---~ end
---~ local finish = concat(levels,"\n",level) or ""
---~ for i=level+1,#levels do
---~ levels[i] = ""
---~ end
---~ levels[level] = [[\finalizeautostructurelevel]]
---~ context(finish)
---~ end
-
---~ function commands.autofinishstructurelevels()
---~ local finish = concat(levels,"\n") or ""
---~ levels = { }
---~ context(finish)
---~ end
-
-function commands.autonextstructurelevel(level)
+local function autonextstructurelevel(level)
if level > #levels then
for i=#levels+1,level do
levels[i] = false
@@ -926,7 +953,7 @@ function commands.autonextstructurelevel(level)
else
for i=level,#levels do
if levels[i] then
- context.finalizeautostructurelevel()
+ ctx_finalizeauto()
levels[i] = false
end
end
@@ -934,39 +961,141 @@ function commands.autonextstructurelevel(level)
levels[level] = true
end
-function commands.autofinishstructurelevels()
+local function autofinishstructurelevels()
for i=1,#levels do
if levels[i] then
- context.finalizeautostructurelevel()
+ ctx_finalizeauto()
end
end
levels = { }
end
--- interface (some are actually already commands, like sections.fullnumber)
+implement {
+ name = "autonextstructurelevel",
+ actions = autonextstructurelevel,
+ arguments = "integer",
+}
-commands.structurenumber = function() sections.fullnumber() end
-commands.structuretitle = function() sections.title () end
+implement {
+ name = "autofinishstructurelevels",
+ actions = autofinishstructurelevels,
+}
-commands.structurevariable = function(name) sections.structuredata(nil,name) end
-commands.structureuservariable = function(name) sections.userdata (nil,name) end
-commands.structurecatcodedget = function(name) sections.structuredata(nil,name,nil,true) end
-commands.structuregivencatcodedget = function(name,catcode) sections.structuredata(nil,name,nil,catcode) end
-commands.structureautocatcodedget = function(name,catcode) sections.structuredata(nil,name,nil,catcode) end
+-- interface (some are actually already commands, like sections.fullnumber)
-commands.namedstructurevariable = function(depth,name) sections.structuredata(depth,name) end
-commands.namedstructureuservariable = function(depth,name) sections.userdata (depth,name) end
+implement {
+ name = "depthnumber",
+ actions = { depthnumber, context },
+ arguments = "integer",
+}
---
+implement { name = "structurenumber", actions = sections.fullnumber }
+implement { name = "structuretitle", actions = sections.title }
+
+implement { name = "structurevariable", actions = sections.structuredata, arguments = { false, "string" } }
+implement { name = "structureuservariable", actions = sections.userdata, arguments = { false, "string" } }
+implement { name = "structurecatcodedget", actions = sections.structuredata, arguments = { false, "string", false, true } }
+implement { name = "structuregivencatcodedget", actions = sections.structuredata, arguments = { false, "string", false, "integer" } }
+implement { name = "structureautocatcodedget", actions = sections.structuredata, arguments = { false, "string", false, "string" } }
+
+implement { name = "namedstructurevariable", actions = sections.structuredata, arguments = { "string", "string" } }
+implement { name = "namedstructureuservariable", actions = sections.userdata, arguments = { "string", "string" } }
+
+implement { name = "setstructurelevel", actions = sections.setlevel, arguments = { "string", "string" } }
+implement { name = "getstructurelevel", actions = sections.getcurrentlevel, arguments = { "string" } }
+implement { name = "setstructurenumber", actions = sections.setnumber, arguments = { "integer", "string" } }
+implement { name = "getstructurenumber", actions = sections.getnumber, arguments = { "integer" } }
+implement { name = "getsomestructurenumber", actions = sections.getnumber, arguments = { "integer", "string" } }
+implement { name = "getfullstructurenumber", actions = sections.fullnumber, arguments = { "integer" } }
+implement { name = "getsomefullstructurenumber", actions = sections.fullnumber, arguments = { "integer", "string" } }
+implement { name = "getspecificstructuretitle", actions = sections.structuredata, arguments = { "string", "'titledata.title'",false,"string" } }
+
+implement { name = "reportstructure", actions = sections.reportstructure }
+
+implement {
+ name = "registersection",
+ actions = sections.register,
+ arguments = {
+ "string",
+ {
+ { "coupling" },
+ { "section" },
+ { "level", "integer" },
+ { "parent" },
+ }
+ }
+}
-commands.setsectionblock = sections.setblock
-commands.pushsectionblock = sections.pushblock
-commands.popsectionblock = sections.popblock
+implement {
+ name = "setsectionentry",
+ actions = sections.setentry,
+ arguments = {
+ {
+ { "references", {
+ { "internal", "integer" },
+ { "block" },
+ { "backreference" },
+ { "prefix" },
+ { "reference" },
+ }
+ },
+ { "directives", {
+ { "resetset" }
+ }
+ },
+ { "metadata", {
+ { "kind" },
+ { "name" },
+ { "catcodes", "integer" },
+ { "coding" },
+ { "xmlroot" },
+ { "xmlsetup" },
+ { "nolist", "boolean" },
+ { "increment" },
+ }
+ },
+ { "titledata", {
+ { "label" },
+ { "title" },
+ { "bookmark" },
+ { "marking" },
+ { "list" },
+ }
+ },
+ { "numberdata", {
+ { "block" },
+ { "hidenumber", "boolean" },
+ { "separatorset" },
+ { "conversionset" },
+ { "conversion" },
+ { "starter" },
+ { "stopper" },
+ { "set" },
+ { "segments" },
+ { "ownnumber" },
+ { "language" },
+ },
+ },
+ { "userdata" },
+ }
+ }
+}
---
+-- os.exit()
-local byway = "^" .. v_by -- ugly but downward compatible
+implement {
+ name = "setsectionblock",
+ actions = sections.setblock,
+ arguments = { "string", { { "bookmark" } } }
+}
-function commands.way(way)
- context((gsub(way,byway,"")))
-end
+implement {
+ name = "pushsectionblock",
+ actions = sections.pushblock,
+ arguments = { "string", { { "bookmark" } } }
+}
+
+implement {
+ name = "popsectionblock",
+ actions = sections.popblock,
+}
diff --git a/tex/context/base/strc-doc.mkiv b/tex/context/base/strc-doc.mkiv
index c8dfae1e4..c453f199e 100644
--- a/tex/context/base/strc-doc.mkiv
+++ b/tex/context/base/strc-doc.mkiv
@@ -20,7 +20,13 @@
%D This will move:
\unexpanded\def\setstructuresynchronization#1% todo: use ctxcontext
- {\xdef\currentstructureattribute {\ctxlua {tex.write(structures.references.setinternalreference("\currentstructurereferenceprefix","\currentstructurereference",\nextinternalreference,"\interactionparameter\c!focus"))}}%
- \xdef\currentstructuresynchronize{\ctxlatecommand{enhancelist(#1)}}}
+ {\clf_setinternalreference
+ prefix {\currentstructurereferenceprefix}%
+ reference {\currentstructurereference}
+ internal \nextinternalreference
+ view {\interactionparameter\c!focus}%
+ \relax
+ \xdef\currentstructureattribute {\the\lastdestinationattribute}%
+ \xdef\currentstructuresynchronize{\strc_lists_inject_enhance{#1}{\nextinternalreference}}}
\protect \endinput
diff --git a/tex/context/base/strc-enu.mkvi b/tex/context/base/strc-enu.mkvi
index e369bc2e1..b76bc0067 100644
--- a/tex/context/base/strc-enu.mkvi
+++ b/tex/context/base/strc-enu.mkvi
@@ -172,7 +172,7 @@
\ifx\p_counter\empty %
\let\p_counter\currentenumeration
\fi
- \doifcounterelse\p_counter\donothing{\strc_enumerations_define_counter\p_counter}%
+ \doifelsecounter\p_counter\donothing{\strc_enumerations_define_counter\p_counter}%
\letenumerationparameter\s!counter\p_counter
%\strc_enumerations_setup_counter\currentenumeration
\to \everydefineenumeration
@@ -183,6 +183,7 @@
\unexpanded\setvalue{\??constructioninitializer\v!enumeration}%
{\let\currentenumeration \currentconstruction
\let\constructionparameter \enumerationparameter
+ \let\constructionnamespace \??enumeration
\let\detokenizedconstructionparameter\detokenizedenumerationparameter
\let\letconstructionparameter \letenumerationparameter
\let\useconstructionstyleandcolor \useenumerationstyleandcolor
@@ -283,7 +284,7 @@
\unexpanded\def\strc_enumerations_inject_extra_text
{\ifconditional\c_strc_constructions_title_state
- \ctxcommand{doiflisthastitleelse("\currentconstructionmain",\currentconstructionlistentry)}
+ \clf_doifelselisthastitle{\currentconstructionmain}\numexpr\currentconstructionlistentry\relax
\donothing
\strc_enumerations_inject_extra_text_indeed
\fi}
@@ -300,7 +301,7 @@
\useconstructionstyleandcolor\c!titlestyle\c!titlecolor
\constructionparameter\c!titlecommand
{\constructionparameter\c!titleleft
- \ctxcommand{savedlisttitle("\currentconstructionmain",\currentconstructionlistentry)}%
+ \clf_savedlisttitle{\currentconstructionmain}\currentconstructionlistentry\relax
\constructionparameter\c!titleright}%
\endgroup}
@@ -312,7 +313,7 @@
\unexpanded\def\strc_enumerations_inject_number
{\constructionparameter\c!left
\constructionparameter\c!starter
- \ctxcommand{savedlistprefixednumber("\currentconstructionmain",\currentconstructionlistentry)}%
+ \clf_savedlistprefixednumber{\currentconstructionmain}\currentconstructionlistentry\relax
\constructionparameter\c!stopper
\constructionparameter\c!right}
@@ -370,6 +371,6 @@
\fi}
\unexpanded\def\strc_enumerations_skip_number_coupling[#tag]% e.g. for questions with no answer
- {\ctxlua{structures.references.setnextorder("construction","#tag")}}
+ {\clf_setnextreferenceorder{construction}{#tag}}
\protect \endinput
diff --git a/tex/context/base/strc-flt.mkvi b/tex/context/base/strc-flt.mkvi
index a93921317..be2958fbf 100644
--- a/tex/context/base/strc-flt.mkvi
+++ b/tex/context/base/strc-flt.mkvi
@@ -11,12 +11,31 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+%D This module will be redone with conditionals and everythings
+
\writestatus{loading}{ConTeXt Structure Macros / Float Numbering}
\registerctxluafile{strc-flt}{1.001}
\unprotect
+% todo: a keyword for this (and then a settings->hash for speed)
+%
+% \setuplayout[width=middle,backspace=3cm]
+%
+% \appendtoks
+% \settrue\inhibitmargindata
+% \to \everyinsidefloat
+%
+% \starttext
+% \dorecurse{20}{
+% \par \inleft{\red\infofont<#1>} \par
+% \placefigure[leftmargin]{}{\framed[height=1cm,width=2cm]{}}
+% % \placefigure{#1}{\framed[height=1cm,width=2cm]{}}
+% \par line #1.1 \par line #1.2 \par
+% }
+% \stoptext
+
% todo: delay caption creation and make setups for each method instead
% so that we can have a list of methods and redo them as we can
% keep the list or even better: recreate it
@@ -91,8 +110,9 @@
% \c!stopper=\@@kostopper,
\c!suffixseparator=, % currently rather hard coded
\c!suffix=\floatcaptionsuffix,
- \c!distance=1em,
+ \c!distance=\emwidth,
\c!conversion=\v!numbers,
+ \c!maxwidth=\hsize,
\c!command=]
% we can comment some of these
@@ -133,6 +153,7 @@
\c!outermargin=\zeropoint, % idem
\c!leftmargindistance=\zeropoint,
\c!rightmargindistance=\floatparameter\c!leftmargindistance,
+ \c!step=\v!big, % the flish side float step (big=line, medium=halfline, small=quarterline, depth=halfline with normaldepth)
\c!ntop=2,
\c!nbottom=0,
\c!nlines=4, % used?
@@ -186,8 +207,8 @@
{\definefloatcaption[#1][#3]%
\definecounter[#1][#3]%
\definelist[#1][#3]%
- \presetlabeltext[#1=\Word{#3}~]%
- \presetheadtext[#2=\Word{#2}]%
+ \copylabeltext[#1=#3]%
+ %\presetheadtext[#2=\Word{#2}]%
\strc_floats_define_saved[#1][#3]%
\strc_floats_define_commands{#1}{#2}}
@@ -235,8 +256,9 @@
\namedtaggedlabeltexts
\t!floatlabel \currentfloat
\t!floatnumber\currentfloat
- {\ctxcommand{savedlistprefixednumber("\currentfloat",\currentfloatnumber)}%
- \thecurrentfloatnumbersuffix}%
+ {\floatcaptionparameter\c!numbercommand
+ {\clf_savedlistprefixednumber{\currentfloat}\currentfloatnumber\relax
+ \thecurrentfloatnumbersuffix}}%
\fi
\fi \fi}
@@ -244,7 +266,8 @@
{\ifnofloatcaption \else
\ifx\currentfloatnumber\relax\else
\dostarttagged\t!floattext\empty
- \ctxcommand{savedlisttitle("\currentfloat",\currentfloatnumber)}%
+ \floatcaptionparameter\c!textcommand
+ {\clf_savedlisttitle{\currentfloat}\currentfloatnumber\relax}%
\dostoptagged
\fi
\fi}
@@ -457,6 +480,10 @@
\ifx\currentfloat\empty
\let\currentfloat\v!figure % a bit of a hack
\fi
+ \doifelsecommandhandler\??float\currentfloat
+ \donothing
+ {\writestatus\m!floatblocks{unknown float type '\currentfloat'}%
+ \let\currentfloat\v!figure}% also a hack
\global\let\lastplacedfloat\currentfloat
\let\m_strc_floats_saved_userdata\empty
\let\currentfloatcaption\currentfloat}
@@ -489,8 +516,9 @@
\edef\floatlocation{\floatparameter\c!default}% beware of a clash between alignment locations
\fi
\strc_floats_analyze_location
+ % todo: use \lets
\setupcurrentfloatcaption[\c!reference={#reference},\c!title={#caption},\c!marking=,\c!list=,\c!bookmark=]%
- \doifinsetelse\v!split\floatlocation\strc_floats_place_next_box_split\strc_floats_place_next_box_normal}
+ \doifelseinset\v!split\floatlocation\strc_floats_place_next_box_split\strc_floats_place_next_box_normal}
\unexpanded\def\placefloat
{\flushnotes
@@ -535,7 +563,7 @@
\setupcurrentfloatuserdata[#userdata]%
\fi
\strc_floats_analyze_location
- \doifinsetelse\v!split\floatlocation\strc_floats_place_next_box_split\strc_floats_place_next_box_normal
+ \doifelseinset\v!split\floatlocation\strc_floats_place_next_box_split\strc_floats_place_next_box_normal
\bgroup
\ignorespaces}
@@ -547,7 +575,7 @@
{\flushnotes
\page_otr_command_flush_side_floats % here !
\strc_floats_begin_group
- \dodoubleempty\strc_floats_start_place_float}
+ \dotripleempty\strc_floats_start_place_float}
\def\strc_floats_start_place_float[#tag]%
{\strc_floats_set_current_tag{#tag}%
@@ -636,14 +664,14 @@
\unexpanded\def\strc_floats_place_next_box_normal
{\ifconditional\c_page_floats_some_waiting
% this was \checkwaitingfloats spread all over
- \doifinsetelse\v!always\floatlocation
+ \doifelseinset\v!always\floatlocation
{\showmessage\m!floatblocks5\empty}
- {\doifcommonelse\floatlocation\flushfloatslist\page_otr_command_flush_floats\donothing}%
+ {\doifelsecommon\floatlocation\flushfloatslist\page_otr_command_flush_floats\donothing}%
% but which should be done before using box \floatbox
\fi
\page_margin_strc_floats_before % todo: each float handler gets a before
\global\insidefloattrue
- \dostarttagged\t!float\currentfloat
+ \dostarttaggedchained\t!float\currentfloat\??float
\page_margin_strc_floats_set_hsize % todo: each float handler gets a set_hsize
\the\everyinsidefloat
\strc_floats_analyze_variables_one
@@ -681,7 +709,7 @@
{\ifinsidecolumns
\global\setfalse\c_strc_floats_par_float
\else
- \doifcommonelse\floatlocation\flushfloatslist
+ \doifelsecommon\floatlocation\flushfloatslist
{\global\settrue \c_strc_floats_par_float}
{\global\setfalse\c_strc_floats_par_float}%
\fi
@@ -720,12 +748,12 @@
180=>\global\c_strc_floats_rotation\commalistelement\relax,%
270=>\global\c_strc_floats_rotation\commalistelement\relax]%
\fi
- \doifinsetelse\v!nonumber\floatlocation
+ \doifelseinset\v!nonumber\floatlocation
{\global\nofloatnumbertrue}
{\doifelse{\floatcaptionparameter\c!number}\v!yes
{\global\nofloatnumberfalse}
{\global\nofloatnumbertrue}}%
- \doifinsetelse\v!none\floatlocation
+ \doifelseinset\v!none\floatlocation
{\global\nofloatcaptiontrue}
{\global\nofloatcaptionfalse}%
\doif{\floatcaptionparameter\c!number}\v!none % new
@@ -741,7 +769,7 @@
\ifconditional\c_page_floats_center_box_global
\settrue\c_page_floats_center_box_local
\else
- \doifinsetelse\v!local\floatlocation\settrue\setfalse\c_page_floats_center_box_local
+ \doifelseinset\v!local\floatlocation\settrue\setfalse\c_page_floats_center_box_local
\fi
\doifnotcommon{\v!always,\v!here,\v!force}\floatlocation % ! ! ! ! ! !
{\setfalse\c_page_floats_center_box_global
@@ -767,6 +795,8 @@
\expandafter\firstoftwoarguments
\fi}
+\let\doifmainfloatbodyelse\doifelsemainfloatbody
+
% todo: optional user pars
\let\currentfloatattribute\empty % to be checked
@@ -801,7 +831,7 @@
% uses:
\def\strc_floats_group_index
- {\numexpr\ctxcommand{listgroupindex("\currentfloat","\currentfloatgroup")}\relax}
+ {\numexpr\clf_listgroupindex{\currentfloat}{\currentfloatgroup}\relax}
\def\strc_floats_place_packaged_boxes
{\expandafter\strc_floats_place_packaged_boxes_indeed\expandafter{\m_strc_floats_saved_userdata}}
@@ -835,7 +865,7 @@
\relax
\relax
\relax
- [\c!name=\currentfloat,%
+ [\s!name=\currentfloat,% was c!name
\s!counter=\currentfloatcounter,%
\s!hascaption=\ifnofloatcaption \v!no\else\v!yes\fi,%
\s!hasnumber=\ifnofloatnumber \v!no\else\v!yes\fi,%
@@ -896,26 +926,29 @@
\strc_float_load_data
\to \everyinsidefloat
-\def\doifrightpagefloatelse
+\def\doifelserightpagefloat
{\ifdoublesided
\ifsinglesided
\doubleexpandafter\firstoftwoarguments
\else
- \doubleexpandafter\doifoddfloatpageelse
+ \doubleexpandafter\doifelseoddfloatpage
\fi
\else
\expandafter\firstoftwoarguments
\fi}
-\def\doifoddfloatpageelse
+\def\doifelseoddfloatpage
{\ifodd\purenumber\strc_float_realpage\space
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
+\let\doifrightpagefloatelse\doifelserightpagefloat
+\let\doifoddpagefloatelse \doifelseoddpagefloat
+
\appendtoks
- \let\rightorleftpageaction\doifrightpagefloatelse
+ \let\rightorleftpageaction\doifelserightpagefloat
\to \everyinsidefloat
% \let\movesidefloat\gobbleoneargument
@@ -926,7 +959,7 @@
\unexpanded\def\movesidefloat[#settings]% (-)n*line|x=,y=
{\global\d_page_sides_downshift \zeropoint
\global\d_page_sides_extrashift\zeropoint
- \doifassignmentelse{#settings}%
+ \doifelseassignment{#settings}%
{\begingroup
\setupcurrentfloat[\c!x=\zeropoint,\c!y=\zeropoint,#settings]%
\ifgridsnapping
@@ -948,6 +981,12 @@
\setvalue{\??floatmovement+\v!hang}{\strc_floats_move_down_hang\plusone}
\setvalue{\??floatmovement-\v!hang}{\strc_floats_move_down_hang\minusone}
+\setvalue{\??floatmovement-2*\v!line}{\strc_floats_move_down_line{-2}}
+\setvalue{\??floatmovement+2*\v!line}{\strc_floats_move_down_line{2}}
+\setvalue{\??floatmovement 2*\v!line}{\strc_floats_move_down_line{2}}
+
+\unexpanded\def\installfloatmovement#1#2{\setvalue{\??floatmovement#1}{#2}}
+
\def\strc_floats_move_down#setting%
{\csname\??floatmovement
\ifcsname\??floatmovement#setting\endcsname#setting\fi
@@ -1038,10 +1077,10 @@
\fi
\fi
% we can also support edges .. in that case no common but a fast loop
- \doifinsetelse\v!hanging\floatlocation
- {\doifcommonelse{\v!inleft,\v!leftmargin}\floatlocation
+ \doifelseinset\v!hanging\floatlocation
+ {\doifelsecommon{\v!inleft,\v!leftmargin}\floatlocation
{\let\p_maxwidth\leftmarginwidth}%
- {\doifcommonelse{\v!inright,\v!rightmargin}\floatlocation
+ {\doifelsecommon{\v!inright,\v!rightmargin}\floatlocation
{\let\p_maxwidth\rightmarginwidth}%
{\edef\p_maxwidth{\floatparameter\c!maxwidth}}}}%
{\edef\p_maxwidth{\floatparameter\c!maxwidth}}%
@@ -1050,9 +1089,9 @@
\else
\scratchwidth\p_maxwidth\relax
\ifdim\wd\floatbox>\scratchwidth
- \doifcommonelse{\v!inright,\v!rightmargin,\v!rightedge,\v!inleft,\v!leftmargin,\v!leftedge}\floatlocation
+ \doifelsecommon{\v!inright,\v!rightmargin,\v!rightedge,\v!inleft,\v!leftmargin,\v!leftedge}\floatlocation
{\global\d_page_sides_maximum\scratchwidth}
- {\doifcommonelse{\v!right,\v!left}\floatlocation
+ {\doifelsecommon{\v!right,\v!left}\floatlocation
\strc_floats_realign_floatbox_horizontal_one
\strc_floats_realign_floatbox_horizontal_two}%
\fi
@@ -1094,21 +1133,32 @@
\strc_floats_calculate_skip\d_strc_floats_bottom {\rootfloatparameter\c!spaceafter }%
\strc_floats_calculate_skip\d_page_sides_topskip {\rootfloatparameter\c!sidespacebefore}%
\strc_floats_calculate_skip\d_page_sides_bottomskip{\rootfloatparameter\c!sidespaceafter }%
- \global\d_strc_floats_margin \rootfloatparameter\c!margin
- \global\d_page_sided_leftshift \floatparameter \c!leftmargindistance
- \global\d_page_sided_rightshift\floatparameter \c!rightmargindistance
- \global\c_page_floats_n_of_top \rootfloatparameter\c!ntop
- \global\c_page_floats_n_of_bottom \rootfloatparameter\c!nbottom
+ \global\d_strc_floats_margin \rootfloatparameter\c!margin
+ \global\d_page_sided_leftshift \floatparameter \c!leftmargindistance
+ \global\d_page_sided_rightshift \floatparameter \c!rightmargindistance
+ \global\c_page_floats_n_of_top \rootfloatparameter\c!ntop
+ \global\c_page_floats_n_of_bottom\rootfloatparameter\c!nbottom
\endgroup}
\unexpanded\def\betweenfloatblanko % assumes that spaceafter is present
{\blank[\rootfloatparameter\c!spacebefore]} % or v!back,....
+% \unexpanded\def\doplacefloatbox % used elsewhere
+% {%\forgetall % NO
+% \whitespace
+% \blank[\rootfloatparameter\c!spacebefore]
+% \page_otr_command_flush_float_box
+% \blank[\rootfloatparameter\c!spaceafter]}
+
\unexpanded\def\doplacefloatbox % used elsewhere
{%\forgetall % NO
\whitespace
\blank[\rootfloatparameter\c!spacebefore]
+ \nointerlineskip
+ \flushnotes % new per 2014-05-29 : todo: move them up in the mvl
+ \nointerlineskip
\page_otr_command_flush_float_box
+ \nointerlineskip
\blank[\rootfloatparameter\c!spaceafter]}
% test case:
@@ -1126,7 +1176,7 @@
\global\floatheight \ht\floatbox % forget about the depth
\global\floattextwidth\dimexpr\hsize-\floatwidth-\rootfloatparameter\c!margin\relax
\edef\floatlocation{\floatlocationmethod}% to be sure .. why
- \doifinsetelse\v!tall\floatlocationmethod
+ \doifelseinset\v!tall\floatlocationmethod
{\floattextheight\dimexpr\pagegoal-\pagetotal-\bigskipamount\relax % ugly, this bigskip
\ifdim\floattextheight>\textheight
\floattextheight\textheight
@@ -1151,27 +1201,27 @@
{\floattextheight\ifdim\ht\floattext<\floatheight\floatheight\else\ht\floattext\fi}%
\setbox\floatbox\vbox to \floattextheight
{\hsize\floatwidth
- \doifinsetelse\v!both\floatlocation
- {\doifinsetelse\v!low\floatlocation
+ \doifelseinset\v!both\floatlocation
+ {\doifelseinset\v!low\floatlocation
{\vfill\box\floatbox}
- {\doifinsetelse\v!middle\floatlocation
+ {\doifelseinset\v!middle\floatlocation
{\vfill\box\floatbox\vfill}
{\box\floatbox\vfill}}}
{\box\floatbox\vfill}}%
\setbox\floattext\vbox to \floattextheight
{\hsize\floattextwidth
- \doifinsetelse\v!low\floatlocation
+ \doifelseinset\v!low\floatlocation
{\vfill
\box\floattext
\doifinset\c!offset\floatlocation{\whitespace\blank}}
- {\doifinsetelse\v!middle\floatlocation
+ {\doifelseinset\v!middle\floatlocation
{\vfill
\box\floattext
\vfill}
{\doifinset\v!offset\floatlocation{\whitespace\blank}%
\box\floattext
\vfill}}}%
- \doifinsetelse\v!right\floatlocation
+ \doifelseinset\v!right\floatlocation
{\setbox\floatbox\hbox to \hsize
{\box\floattext
\hfill
@@ -1190,13 +1240,13 @@
\blank[\rootfloatparameter\c!spaceafter]%
\strc_floats_end_text_group
\page_floats_report_total}
-
+
\def\borderedfloatbox
{\begingroup
\setupcurrentfloat[\c!location=\v!normal,\c!width=\v!fit,\c!height=\v!fit]%
\inheritedfloatframed{\box\floatbox}%
\endgroup}
-
+
% minwidth=fit,width=max : no overshoot, as wide as graphic
\def\strc_floats_align_content_indeed
@@ -1366,17 +1416,82 @@
\fi
\strc_floats_make_complete_caption}}
+% \def\strc_floats_prepare_stack_caption_auto
+% {\ifx\p_strc_floats_caption_align\empty \else
+% \doifnotinset\v!middle\p_strc_floats_caption_align{\let\captionovershoot\!!zeropoint}%
+% \fi
+% \edef\captionhsize{\the\wd\b_strc_floats_content}%
+% \ifdim\captionhsize>\hsize
+% % float is wider than \hsize
+% \setbox\b_strc_floats_caption\vbox
+% {\settrialtypesetting
+% \strc_floats_caption_set_align
+% \hsize\captionhsize
+% \notesenabledfalse
+% \strc_floats_make_complete_caption}%
+% \ifdim\ht\scratchbox>\lineheight % more lines
+% \setbox\b_strc_floats_caption\vbox
+% {\strc_floats_caption_set_align
+% \hsize\dimexpr\captionhsize-\captionovershoot\relax
+% \ifdim\hsize<\captionminwidth\relax
+% \hsize\captionhsize
+% \fi
+% \strc_floats_make_complete_caption}%
+% \else
+% \setbox\b_strc_floats_caption\vbox
+% {\strc_floats_caption_set_align
+% \hsize\captionhsize
+% \strc_floats_make_complete_caption}%
+% \fi
+% \else
+% % float is smaller of equal to \hsize
+% \ifdim\captionhsize<\captionminwidth\relax
+% \scratchdimen\captionminwidth % float smaller than min width
+% \edef\captionhsize{\the\scratchdimen}%
+% \fi
+% \setbox\scratchbox\vbox % test with overshoot
+% {\settrialtypesetting
+% \scratchdimen\dimexpr\captionhsize+\captionovershoot+3\emwidth\relax % 3em is an average word length
+% \ifdim\scratchdimen<\hsize
+% \hsize\scratchdimen
+% \fi
+% \notesenabledfalse
+% \strc_floats_make_complete_caption}%
+% \ifdim\ht\scratchbox>\lineheight
+% % at least an average word longer than a line
+% \setbox\b_strc_floats_caption\vbox
+% {\strc_floats_caption_set_align
+% \scratchdimen\dimexpr\captionhsize+\captionovershoot\relax
+% \ifdim\scratchdimen<\hsize
+% \hsize\scratchdimen
+% \fi
+% \strc_floats_make_complete_caption}%
+% \else\ifx\p_strc_floats_caption_align\empty
+% \setbox\b_strc_floats_caption\vbox
+% {\strc_floats_caption_set_align
+% \hsize\captionhsize
+% \raggedcenter % overloads
+% \strc_floats_make_complete_caption}%
+% \else
+% \setbox\b_strc_floats_caption\vbox
+% {\strc_floats_caption_set_align
+% \hsize\captionhsize
+% \strc_floats_make_complete_caption}%
+% \fi\fi
+% \fi}
+
\def\strc_floats_prepare_stack_caption_auto
{\ifx\p_strc_floats_caption_align\empty \else
\doifnotinset\v!middle\p_strc_floats_caption_align{\let\captionovershoot\!!zeropoint}%
\fi
\edef\captionhsize{\the\wd\b_strc_floats_content}%
- \ifdim\captionhsize>\hsize
+ \scratchwidth\floatcaptionparameter\c!maxwidth\relax
+ \ifdim\captionhsize>\scratchwidth
% float is wider than \hsize
\setbox\b_strc_floats_caption\vbox
{\settrialtypesetting
\strc_floats_caption_set_align
- \hsize\captionhsize
+ \hsize\scratchwidth
\notesenabledfalse
\strc_floats_make_complete_caption}%
\ifdim\ht\scratchbox>\lineheight % more lines
@@ -1384,13 +1499,13 @@
{\strc_floats_caption_set_align
\hsize\dimexpr\captionhsize-\captionovershoot\relax
\ifdim\hsize<\captionminwidth\relax
- \hsize\captionhsize
+ \hsize\scratchwidth
\fi
\strc_floats_make_complete_caption}%
\else
\setbox\b_strc_floats_caption\vbox
{\strc_floats_caption_set_align
- \hsize\captionhsize
+ \hsize\scratchwidth
\strc_floats_make_complete_caption}%
\fi
\else
@@ -1456,10 +1571,10 @@
\box\b_strc_floats_content}}
\def\strc_floats_build_box_next_outer
- {\doifrightpagefloatelse\strc_floats_build_box_next_right\strc_floats_build_box_next_left}
+ {\doifelserightpagefloat\strc_floats_build_box_next_right\strc_floats_build_box_next_left}
\def\strc_floats_build_box_next_inner
- {\doifrightpagefloatelse\strc_floats_build_box_next_left\strc_floats_build_box_next_right}
+ {\doifelserightpagefloat\strc_floats_build_box_next_left\strc_floats_build_box_next_right}
\def\strc_floats_build_box_next_right_hang#1%
{\ifconditional\c_strc_floats_par_float \hbox \else \expandafter \strc_floats_align_content \fi
@@ -1510,12 +1625,12 @@
{\strc_floats_build_box_next_left_margin_indeed \leftmargindistance }
\def\strc_floats_build_box_next_outer_margin
- {\doifrightpagefloatelse
+ {\doifelserightpagefloat
{\strc_floats_build_box_next_right_margin_indeed\rightmargindistance}
{\strc_floats_build_box_next_left_margin_indeed \rightmargindistance}}
\def\strc_floats_build_box_next_inner_margin
- {\doifrightpagefloatelse
+ {\doifelserightpagefloat
{\strc_floats_build_box_next_left_margin_indeed \leftmargindistance}
{\strc_floats_build_box_next_right_margin_indeed\leftmargindistance}}
@@ -1552,29 +1667,16 @@
\def\strc_floats_flush_left_caption_hang
{\hsmash{\llap{\box\b_strc_floats_caption\dotfskip{\floatcaptionparameter\c!distance}}}}
-% \def\strc_floats_flush_caption_hang % expanded can go
-% {\expanded{\doifinsetelse{\v!righthanging}{\floatcaptionparameter\c!location}}
-% {\strc_floats_flush_right_caption_hang}
-% {\expanded{\doifinsetelse{\v!lefthanging}{\floatcaptionparameter\c!location}}
-% {\strc_floats_flush_left_caption_hang}
-% {\expanded{\doifinsetelse{\v!hang}{\floatcaptionparameter\c!location}}
-% {\expanded{\doifinsetelse{\v!outer}{\floatcaptionparameter\c!location}}
-% {\doifrightpagefloatelse{\strc_floats_flush_right_caption_hang}{\strc_floats_flush_left_caption_hang}}
-% {\expanded{\doifinsetelse{\v!right}{\floatcaptiondirectives}}
-% {\strc_floats_flush_right_caption_hang}
-% {\strc_floats_flush_left_caption_hang}}}
-% {\box\b_strc_floats_caption}}}}
-
\def\strc_floats_flush_caption_hang % expanded can go
{\edef\p_strc_floats_caption_location{\floatcaptionparameter\c!location}%
- \doifinsetelse\v!righthanging\p_strc_floats_caption_location
+ \doifelseinset\v!righthanging\p_strc_floats_caption_location
{\strc_floats_flush_right_caption_hang}
- {\doifinsetelse\v!lefthanging\p_strc_floats_caption_location
+ {\doifelseinset\v!lefthanging\p_strc_floats_caption_location
{\strc_floats_flush_left_caption_hang}
- {\doifinsetelse\v!hang\p_strc_floats_caption_location
- {\doifinsetelse\v!outer\p_strc_floats_caption_location
- {\doifrightpagefloatelse{\strc_floats_flush_right_caption_hang}{\strc_floats_flush_left_caption_hang}}
- {\doifinsetelse\v!right\floatcaptiondirectives
+ {\doifelseinset\v!hang\p_strc_floats_caption_location
+ {\doifelseinset\v!outer\p_strc_floats_caption_location
+ {\doifelserightpagefloat{\strc_floats_flush_right_caption_hang}{\strc_floats_flush_left_caption_hang}}
+ {\doifelseinset\v!right\floatcaptiondirectives
{\strc_floats_flush_right_caption_hang}
{\strc_floats_flush_left_caption_hang}}}
{\box\b_strc_floats_caption}}}}
@@ -1637,7 +1739,7 @@
\fi}
\def\strc_floats_build_box_top_stack_normal
- {\doifinsetelse\v!overlay{\floatcaptionparameter\c!location}
+ {\doifelseinset\v!overlay{\floatcaptionparameter\c!location}
\strc_floats_build_box_top_stack_normal_overlay
\strc_floats_build_box_top_stack_normal_content}
@@ -1684,7 +1786,7 @@
{\dp\b_strc_floats_caption\strutdepth
\setbox\scratchbox\vbox
{\strc_floats_align_caption{\copy\b_strc_floats_caption}%
- \strc_floats_align_content {\copy\b_strc_floats_content }}%
+ \strc_floats_align_content{\copy\b_strc_floats_content}}%
\getnoflines{\dimexpr\htdp\scratchbox-10\scaledpoint\relax}% get rid of inaccuracy
\vbox to \noflines\lineheight
{\d_strc_float_temp_width\wd\b_strc_floats_content
@@ -1753,8 +1855,8 @@
\processallactionsinset[\floatcaptionparameter\c!location]
[ \v!left=>\let\next\strc_floats_relocate_caption_left,
\v!right=>\let\next\strc_floats_relocate_caption_right,
- \v!inner=>\doifrightpagefloatelse{\let\next\strc_floats_relocate_caption_left }{\let\next\strc_floats_relocate_caption_right},
- \v!outer=>\doifrightpagefloatelse{\let\next\strc_floats_relocate_caption_right}{\let\next\strc_floats_relocate_caption_left }]%
+ \v!inner=>\doifelserightpagefloat{\let\next\strc_floats_relocate_caption_left }{\let\next\strc_floats_relocate_caption_right},
+ \v!outer=>\doifelserightpagefloat{\let\next\strc_floats_relocate_caption_right}{\let\next\strc_floats_relocate_caption_left }]%
\next}
\installfloatboxbuilder \v!none \strc_floats_build_box_default
@@ -1987,7 +2089,7 @@
\definefloat
[\v!graphic]
[\v!graphics]
-
+
% float strategy, replaces some of the above macros
\installcorenamespace{floatmethods}
@@ -1999,7 +2101,7 @@
\let\forcedfloatmethod\empty % set by lua
\def\setfloatmethodvariables#1% \floatmethod \floatlabel \floatrow \floatcolumn
- {\ctxcommand{analysefloatmethod("#1")}}
+ {\clf_analysefloatmethod{#1}}
\def\somesomewherefloat[#1]%
{\page_floats_save_somewhere_float\s!somewhere{#1}}
@@ -2214,6 +2316,7 @@
\installfloatmethod \s!singlecolumn \v!local \somelocalfloat
\installfloatmethod \s!multicolumn \v!local \somelocalfloat
+\installfloatmethod \s!mixedcolumn \v!local \somelocalfloat
\installfloatmethod \s!columnset \v!local \somelocalfloat
\protect \endinput
diff --git a/tex/context/base/strc-ini.lua b/tex/context/base/strc-ini.lua
index 09ed79288..f736427bb 100644
--- a/tex/context/base/strc-ini.lua
+++ b/tex/context/base/strc-ini.lua
@@ -38,14 +38,19 @@ local txtcatcodes = catcodenumbers.txtcatcodes
local context = context
local commands = commands
-local pushcatcodes = context.pushcatcodes
-local popcatcodes = context.popcatcodes
-
local trace_processors = false
local report_processors = logs.reporter("processors","structure")
trackers.register("typesetters.processors", function(v) trace_processors = v end)
+local xmlconvert = lxml.convert
+local xmlstore = lxml.store
+
+local ctx_pushcatcodes = context.pushcatcodes
+local ctx_popcatcodes = context.popcatcodes
+local ctx_xmlsetup = context.xmlsetup
+local ctx_xmlprocessbuffer = context.xmlprocessbuffer
+
-- -- -- namespace -- -- --
-- This is tricky: we have stored and initialized already some of
@@ -68,7 +73,7 @@ structures.itemgroups = structures.itemgroups or { }
structures.specials = structures.specials or { }
structures.counters = structures.counters or { }
structures.tags = structures.tags or { }
-structures.formulas = structures.formulas or { }
+structures.formulas = structures.formulas or { } -- not used but reserved
structures.sets = structures.sets or { }
structures.marks = structures.marks or { }
structures.floats = structures.floats or { }
@@ -151,11 +156,17 @@ local function simplify(d,nodefault)
for k, v in next, d do
local tv = type(v)
if tv == "table" then
- if next(v) then t[k] = simplify(v) end
+ if next(v) then
+ t[k] = simplify(v)
+ end
elseif tv == "string" then
- if v ~= "" and v ~= "default" then t[k] = v end
+ if v ~= "" then
+ t[k] = v
+ end
elseif tv == "boolean" then
- if v then t[k] = v end
+ if v then
+ t[k] = v
+ end
else
t[k] = v
end
@@ -168,6 +179,34 @@ local function simplify(d,nodefault)
end
end
+-- we only care about the tuc file so this would do too:
+--
+-- local function simplify(d,nodefault)
+-- if d then
+-- for k, v in next, d do
+-- local tv = type(v)
+-- if tv == "string" then
+-- if v == "" or v == "default" then
+-- d[k] = nil
+-- end
+-- elseif tv == "table" then
+-- if next(v) then
+-- simplify(v)
+-- end
+-- elseif tv == "boolean" then
+-- if not v then
+-- d[k] = nil
+-- end
+-- end
+-- end
+-- return d
+-- elseif nodefault then
+-- return nil
+-- else
+-- return { }
+-- end
+-- end
+
helpers.simplify = simplify
function helpers.merged(...)
@@ -211,19 +250,19 @@ function helpers.title(title,metadata) -- coding is xml is rather old and not th
report_processors("putting xml data in buffer: %s",xmldata)
report_processors("processing buffer with setup %a and tag %a",xmlsetup,tag)
end
- if experiment then
- -- the question is: will this be forgotten ... better store in a via file
- local xmltable = lxml.convert("temp",xmldata or "")
- lxml.store("temp",xmltable)
- context.xmlsetup("temp",xmlsetup or "")
- else
- context.xmlprocessbuffer("dummy",tag,xmlsetup or "")
- end
+ if experiment then
+ -- the question is: will this be forgotten ... better store in a via file
+ local xmltable = xmlconvert("temp",xmldata or "")
+ xmlstore("temp",xmltable)
+ ctx_xmlsetup("temp",xmlsetup or "")
+ else
+ ctx_xmlprocessbuffer("dummy",tag,xmlsetup or "")
+ end
elseif xmlsetup then -- title is reference to node (so \xmlraw should have been used)
if trace_processors then
report_processors("feeding xmlsetup %a using node %a",xmlsetup,title)
end
- context.xmlsetup(title,metadata.xmlsetup)
+ ctx_xmlsetup(title,metadata.xmlsetup)
else
local catcodes = metadata.catcodes
if catcodes == notcatcodes or catcodes == xmlcatcodes then
@@ -241,9 +280,9 @@ function helpers.title(title,metadata) -- coding is xml is rather old and not th
-- doesn't work when a newline is in there \section{Test\ A} so we do
-- it this way:
--
- pushcatcodes(catcodes)
+ ctx_pushcatcodes(catcodes)
context(title)
- popcatcodes()
+ ctx_popcatcodes()
end
end
else
@@ -297,7 +336,8 @@ function sets.getall(namespace,block,name)
end
end
--- messy (will be another keyword, fixedconversion)
+-- messy (will be another keyword, fixedconversion) .. needs to be documented too
+-- maybe we should cache
local splitter = lpeg.splitat("::")
@@ -337,4 +377,8 @@ end
-- interface
-commands.definestructureset = sets.define
+interfaces.implement {
+ name = "definestructureset",
+ actions = sets.define,
+ arguments = { "string", "string", "string", "string", "boolean" }
+}
diff --git a/tex/context/base/strc-ini.mkvi b/tex/context/base/strc-ini.mkvi
index 8488d1dab..ad83cbc58 100644
--- a/tex/context/base/strc-ini.mkvi
+++ b/tex/context/base/strc-ini.mkvi
@@ -70,12 +70,12 @@
\unexpanded\def\defineconversionset{\dotripleempty\strc_sets_define_conversion_set}
\unexpanded\def\defineprefixset {\dotripleempty\strc_sets_define_prefix_set}
-% Low level versions (no optional checking). The detokenize and escaping might go away.
+% Low level versions (no optional checking). The detokenize might go away.
-\unexpanded\def\strc_sets_define_reset_set [#name][#set][#default]{\ctxcommand{definestructureset("structure:resets", "#name","\luaescapestring{\detokenize{#set}}","\luaescapestring{\detokenize{#default}}",true)}}
-\unexpanded\def\strc_sets_define_separator_set [#name][#set][#default]{\ctxcommand{definestructureset("structure:separators", "#name","\luaescapestring{\detokenize{#set}}","\luaescapestring{\detokenize{#default}}")}}
-\unexpanded\def\strc_sets_define_conversion_set[#name][#set][#default]{\ctxcommand{definestructureset("structure:conversions","#name","\luaescapestring{\detokenize{#set}}","\luaescapestring{\detokenize{#default}}")}}
-\unexpanded\def\strc_sets_define_prefix_set [#name][#set][#default]{\ctxcommand{definestructureset("structure:prefixes", "#name","\luaescapestring{\detokenize{#set}}","\luaescapestring{\detokenize{#default}}")}}
+\unexpanded\def\strc_sets_define_reset_set [#name][#set][#default]{\clf_definestructureset{structure:resets} {#name}{\detokenize{#set}}{\detokenize{#default}}true\relax}
+\unexpanded\def\strc_sets_define_separator_set [#name][#set][#default]{\clf_definestructureset{structure:separators} {#name}{\detokenize{#set}}{\detokenize{#default}}false\relax}
+\unexpanded\def\strc_sets_define_conversion_set[#name][#set][#default]{\clf_definestructureset{structure:conversions}{#name}{\detokenize{#set}}{\detokenize{#default}}false\relax}
+\unexpanded\def\strc_sets_define_prefix_set [#name][#set][#default]{\clf_definestructureset{structure:prefixes} {#name}{\detokenize{#set}}{\detokenize{#default}}false\relax}
\let\definestructureresetset \defineresetset
\let\definestructureseparatorset \defineseparatorset
diff --git a/tex/context/base/strc-itm.lua b/tex/context/base/strc-itm.lua
index 4945c282f..adec591c1 100644
--- a/tex/context/base/strc-itm.lua
+++ b/tex/context/base/strc-itm.lua
@@ -6,33 +6,58 @@ if not modules then modules = { } end modules ['strc-itm'] = {
license = "see context related readme files"
}
-local structures = structures
-local itemgroups = structures.itemgroups
-local jobpasses = job.passes
-
-local setvariable = jobpasses.save
-local getvariable = jobpasses.getfield
-
-function itemgroups.register(name,nofitems,maxwidth)
- setvariable("itemgroup", { nofitems, maxwidth })
+local structures = structures
+local itemgroups = structures.itemgroups
+local jobpasses = job.passes
+
+local implement = interfaces.implement
+
+local setvariable = jobpasses.save
+local getvariable = jobpasses.getfield
+
+local texsetcount = tex.setcount
+local texsetdimen = tex.setdimen
+
+local f_stamp = string.formatters["itemgroup:%s:%s"]
+local counts = table.setmetatableindex("number")
+
+-- We keep the counter at the Lua end so we can group the items within
+-- an itemgroup which in turn makes for less passes when one itemgroup
+-- entry is added or removed.
+
+local trialtypesetting = context.trialtypesetting
+
+local function analyzeitemgroup(name,level)
+ local n = counts[name]
+ if level == 1 then
+ n = n + 1
+ counts[name] = n
+ end
+ local stamp = f_stamp(name,n)
+ local n = getvariable(stamp,level,1,0)
+ local w = getvariable(stamp,level,2,0)
+ texsetcount("local","c_strc_itemgroups_max_items",n)
+ texsetdimen("local","d_strc_itemgroups_max_width",w)
end
-function itemgroups.nofitems(name,index)
- return getvariable("itemgroup", index, 1, 0)
+local function registeritemgroup(name,level,nofitems,maxwidth)
+ local n = counts[name]
+ if not trialtypesetting() then
+ -- no trialtypsetting
+ setvariable(f_stamp(name,n), { nofitems, maxwidth }, level)
+ elseif level == 1 then
+ counts[name] = n - 1
+ end
end
-function itemgroups.maxwidth(name,index)
- return getvariable("itemgroup", index, 2, 0)
-end
-
--- interface (might become counter/dimension)
-
-commands.registeritemgroup = itemgroups.register
-
-function commands.nofitems(name,index)
- context(getvariable("itemgroup", index, 1, 0))
-end
+implement {
+ name = "analyzeitemgroup",
+ actions = analyzeitemgroup,
+ arguments = { "string", "integer" }
+}
-function commands.maxitemwidth(name,index)
- context(getvariable("itemgroup", index, 2, 0))
-end
+implement {
+ name = "registeritemgroup",
+ actions = registeritemgroup,
+ arguments = { "string", "integer", "integer", "dimen" }
+}
diff --git a/tex/context/base/strc-itm.mkvi b/tex/context/base/strc-itm.mkvi
index 8259fa38d..86fc9d9fd 100644
--- a/tex/context/base/strc-itm.mkvi
+++ b/tex/context/base/strc-itm.mkvi
@@ -15,6 +15,9 @@
\registerctxluafile{strc-itm}{1.001}
+%D As we analyze/register widths and such we could as well push and pop the
+%D numbers at the \LUA\ end (which saves a few calls).
+
%D Cleaning up this module happened around the time when Kate Bush came up
%D with the nicest numbered list of words: 50 Words For Snow. It's therefore
%D no surprise that I had that cd running several times when updating this
@@ -203,7 +206,6 @@
\newdimen \d_strc_itemgroups_max_width % multipass
\newcount \c_strc_itemgroups_max_items % multipass
-\newcount \c_strc_itemgroups_n_of_lists
\newcount \c_strc_itemgroups_n_of_items
\newcount \c_strc_itemgroups_nesting
\newcount \c_strc_itemgroups_column_depth
@@ -227,13 +229,10 @@
\let \currentitemgroupsegments \empty
\def\strc_itemgroups_register_status
- {\iftrialtypesetting \else
- \ctxcommand{registeritemgroup("\currentitemgroup",\number\c_strc_itemgroups_n_of_items,"\itemgroupparameter\c!maxwidth")}%
- \fi}
+ {\clf_registeritemgroup{\currentparentitemgroup}\c_strc_itemgroups_nesting\c_strc_itemgroups_n_of_items\dimexpr\itemgroupparameter\c!maxwidth\relax}
\def\strc_itemgroups_check_n_of_items % we could do this at the lua end and save a call (i.e. will be dimen and counter)
- {\c_strc_itemgroups_max_items\ctxcommand{nofitems("\currentitemgroup",\number\c_strc_itemgroups_n_of_lists)}\relax
- \d_strc_itemgroups_max_width\ctxcommand{maxitemwidth("\currentitemgroup",\number\c_strc_itemgroups_n_of_lists)}\scaledpoint
+ {\clf_analyzeitemgroup{\currentparentitemgroup}\c_strc_itemgroups_nesting\relax
\edef\currentnofitems{\the\c_strc_itemgroups_max_items}}
% todo: \dodosetreference -> \strc_counters_register_component (to be checked)
@@ -249,47 +248,51 @@
\fi \fi}
\def\strc_itemgroups_insert_reference_indeed % maybe we need a 'frozen counter' numberdata blob / quick hack .. .mive this to strc-ref
- {%\setnextinternalreference
+ {% needs testing, gave problems:
+ \setnextinternalreference
% no need to collect nodes in \b_strc_destination_nodes here ... maybe at some point
\strc_references_start_destination_nodes
- % this is somewhat over the top ... we should use the counter's reference
- \ctxcommand{setreferenceattribute("\s!full", "\referenceprefix","\currentitemreference",
- {
- metadata = {
- kind = "item",% ?
- catcodes = \the\catcodetable,
- xmlroot = \ifx\currentreferencecoding\s!xml "\xmldocument" \else nil \fi, % only useful when text
- },
- references = {
- % internal = \nextinternalreference, % no: this spoils references
- block = "\currentsectionblock",
- section = structures.sections.currentid(),
- },
- prefixdata = structures.helpers.simplify {
- prefix = "\namedcounterparameter\v_strc_itemgroups_counter\c!prefix",
- separatorset = "\namedcounterparameter\v_strc_itemgroups_counter\c!prefixseparatorset",
- conversion = \!!bs\namedcounterparameter\v_strc_itemgroups_counter\c!prefixconversion\!!es,
- conversionset = "\namedcounterparameter\v_strc_itemgroups_counter\c!prefixconversionset",
- set = "\namedcounterparameter\v_strc_itemgroups_counter\c!prefixset",
- segments = "\namedcounterparameter\v_strc_itemgroups_counter\c!prefixsegments",
- % segments = "\askedprefixsegments",
- connector = \!!bs\namedcounterparameter\v_strc_itemgroups_counter\c!prefixconnector\!!es,
- },
- numberdata = structures.helpers.simplify {
- numbers = structures.counters.compact("\v_strc_itemgroups_counter",nil,true),
- separatorset = "\namedcounterparameter\v_strc_itemgroups_counter\c!numberseparatorset",
- % conversion = "\namedcounterparameter\v_strc_itemgroups_counter\c!numberconversion",
- % conversion = "\currentitemgroupconversionset",
- % conversionset = "\namedcounterparameter\v_strc_itemgroups_counter\c!numberconversionset",
- % todo: fixedconversion = "\currentitemgroupconversionset", % temp hack:
- conversionset = "fixed::\currentitemgroupconversionset",
- %
- % for the moment no stopper, we need to make references configurable first
- % stopper = \!!bs\namedcounterparameter\v_strc_itemgroups_counter\c!numberstopper\!!es,
- segments = "\namedcounterparameter\v_strc_itemgroups_counter\c!numbersegments",
- },
- })
+ % this is somewhat over the top ... we should use the counter's reference
+ \clf_setreferenceattribute
+ {%
+ metadata {%
+ kind {item}% was item, why?
+ \ifx\currentreferencecoding\s!xml
+ xmlroot {\xmldocument}% only useful when text
+ \fi
+ catcodes \catcodetable
+ }%
+ references {%
+ internal \nextinternalreference % no: this spoils references
+ % block {\currentsectionblock}%
+ view {\interactionparameter\c!focus}%
+ prefix {\referenceprefix}%
+ reference {\currentitemreference}%
+ }%
+ prefixdata {%
+ prefix {\namedcounterparameter\v_strc_itemgroups_counter\c!prefix}%
+ separatorset {\namedcounterparameter\v_strc_itemgroups_counter\c!prefixseparatorset}%
+ conversion {\namedcounterparameter\v_strc_itemgroups_counter\c!prefixconversion}%
+ conversionset {\namedcounterparameter\v_strc_itemgroups_counter\c!prefixconversionset}%
+ set {\namedcounterparameter\v_strc_itemgroups_counter\c!prefixset}%
+ segments {\namedcounterparameter\v_strc_itemgroups_counter\c!prefixsegments}%
+ % segments {\askedprefixsegments}%
+ connector {\namedcounterparameter\v_strc_itemgroups_counter\c!prefixconnector}%
+ }%
+ numberdata {%
+ numbers {\v_strc_itemgroups_counter}%
+ separatorset {\namedcounterparameter\v_strc_itemgroups_counter\c!numberseparatorset}%
+ % conversion {\namedcounterparameter\v_strc_itemgroups_counter\c!numberconversion}%
+ % conversionset {\namedcounterparameter\v_strc_itemgroups_counter\c!numberconversionset}%
+ % fixedconversion {\currentitemgroupconversionset}%
+ conversionset {fixed::\currentitemgroupconversionset}% temp hack
+ %
+ % for the moment no stopper, we need to make references configurable first
+ % stopper {\namedcounterparameter\v_strc_itemgroups_counter\c!numberstopper}%
+ segments {\namedcounterparameter\v_strc_itemgroups_counter\c!numbersegments}%
+ }%
}%
+ \relax
\strc_references_stop_destination_nodes
\xdef\currentdestinationattribute{\number\lastdestinationattribute}%
% will become an option:
@@ -331,7 +334,7 @@
\def\strc_itemgroups_store_continue_state#options#settings%
{\setxvalue{\??itemgroupoption \currentitemgroup}{\strc_itemgroups_process_options{#options}}%
- \setgvalue{\??itemgroupsetting\currentitemgroup}{\setupcurrentitemgroup [#settings]}}
+ \setgvalue{\??itemgroupsetting\currentitemgroup}{\setupcurrentitemgroup[#settings]}}
\def\strc_itemgroups_fetch_continue_state
{\getvalue{\??itemgroupoption \currentitemgroup}%
@@ -467,7 +470,7 @@
\let\strc_itemgroups_margin_symbol\empty
\let\strc_itemgroups_extra_symbol\empty
%
- \global\letitemgroupparameter\c!maxwidth\!!zerocount
+ \global\letitemgroupparameter\c!maxwidth\!!zeropoint
}
\setvalue{\??itemgroupfirst\v!intro }{\settrue\c_strc_itemgroups_intro }
@@ -482,10 +485,11 @@
\csname\??itemgroupfirst#option\endcsname
\fi}
-\ifdefined\dotagsetitemgroup \else \let\dotagsetitemgroup\relax \fi
+\ifdefined\dotagsetitemgroup \else \let\dotagsetitemgroup\relax \fi
+\ifdefined\dotagsetitem \else \let\dotagsetitem \gobbleoneargument \fi
\def\strc_itemgroups_tag_start_group
- {\dostarttagged\t!itemgroup\currentitemgroup
+ {\dostarttaggedchained\t!itemgroup\currentparentitemgroup\??itemgroup
\dotagsetitemgroup}
\def\strc_itemgroups_tag_stop_group
@@ -576,7 +580,7 @@
% can be made a bit faster
\def\strc_itemgroups_setup_each#category#whatever%
- {\doifassignmentelse{#whatever}
+ {\doifelseassignment{#whatever}
{\strc_itemgroups_normal_setup[#category][#whatever]}
{\strc_itemgroups_normal_setup[#category][\c!option={#whatever}]}}
@@ -587,7 +591,7 @@
{\strc_itemgroups_normal_setup[#category:#level][\c!option={#whatever}]}
\def\strc_itemgroups_setup_list#subcategories#category#whatever%
- {\doifassignmentelse{#whatever}
+ {\doifelseassignment{#whatever}
{\processcommalist[#subcategories]{\strc_itemgroups_setup_list_level_a{#category}{#whatever}}}
{\processcommalist[#subcategories]{\strc_itemgroups_setup_list_level_b{#category}{#whatever}}}}
@@ -639,9 +643,9 @@
\def\strc_itemgroups_set_symbol#symbol%
{\edef\strc_itemgroups_tmp_symbol{#symbol}%
\ifx\strc_itemgroups_tmp_symbol\empty \else
- \doifsymboldefinedelse\strc_itemgroups_tmp_symbol
+ \doifelsesymboldefined\strc_itemgroups_tmp_symbol
\strc_itemgroups_set_symbol_symbol
- {\doifconversiondefinedelse\strc_itemgroups_tmp_symbol
+ {\doifelseconversiondefined\strc_itemgroups_tmp_symbol
\strc_itemgroups_set_symbol_conversion
\donothing}%
\fi}
@@ -706,7 +710,7 @@
\ifthirdargument
\strc_itemgroups_start_indeed[#options][#settings]%
\else\ifsecondargument
- \doifassignmentelse{#options}
+ \doifelseassignment{#options}
{\strc_itemgroups_start_indeed[][#options]}
{\strc_itemgroups_start_indeed[#options][]}%
\else
@@ -745,7 +749,6 @@
\iftrialtypesetting
\strc_counters_save\v_strc_itemgroups_counter
\fi
- \global\advance\c_strc_itemgroups_n_of_lists\plusone
\c_strc_itemgroups_n_of_items\zerocount
\strc_itemgroups_check_n_of_items
\ifx\itemgroupoptions\empty
@@ -811,7 +814,7 @@
\ifinsidecolumns\else\ifcase\c_strc_itemgroups_column_depth
\global\c_strc_itemgroups_column_depth\c_strc_itemgroups_nesting % global ?
\strc_itemgroups_before_command
- \strc_itemgroups_tag_start_group
+ %\strc_itemgroups_tag_start_group
\strc_itemgroups_start_columns
\fi\fi
\fi
@@ -941,8 +944,7 @@
\fi\fi
% new test, needed in sidefloats (surfaced in volker's proceedings)
\iftrialtypesetting
- \strc_counters_restore\v_strc_itemgroups_counter
- \global\advance\c_strc_itemgroups_n_of_lists\minusone
+ \strc_counters_restore\v_strc_itemgroups_counter % could happen in LUA
\fi
\global\advance\c_strc_itemgroups_nesting\minusone
\xdef\currentitemlevel{\number\c_strc_itemgroups_nesting}%
@@ -965,7 +967,7 @@
{\setfalse\c_strc_itemgroups_first
% \begingroup % (3)
\ifcase\c_strc_itemgroups_nesting
- % 0
+ % 0
\or
\strc_itemgroups_start_item_first_one % 1
\else
@@ -1009,8 +1011,21 @@
\strc_itemgroups_between_command
\fi}
-\unexpanded\def\strc_itemgroups_start_item[#1]% we can reuse more
- {\def\currentitemreference{#1}%
+% c_strc_itemgroups_concat:
+%
+% the problem is that we use leftskip so concat cannot reliable take the height into
+% account; it's .. rather tricky when white space in there anyway (due to \par) .. so
+% we rely on a special blank method
+%
+% \startitemize[n]
+% \item bla
+% \item \startitemize[a]
+% \item bla $\displaystyle\int^{x^{y^4}}$ \item bla
+% \stopitemize
+% \stopitemize
+
+\unexpanded\def\strc_itemgroups_start_item[#reference]% we can reuse more
+ {\def\currentitemreference{#reference}%
\ifconditional\c_strc_itemgroups_text
% begin of item
\else
@@ -1026,13 +1041,11 @@
\strc_itemgroups_start_item_next
\fi
\ifconditional\c_strc_itemgroups_concat
- % \vskip-\dimexpr\lastskip+\lineheight\relax
- \vskip-\lastskip % we cannot use a \dimexpr here because
- \vskip-\lineheight % then we loose the stretch and shrink
- \nobreak
+ \spac_overlay_lines % see spac-ver.mkvi ... a typical potential problem
\setfalse\c_strc_itemgroups_concat
\fi
\dostarttagged\t!item\empty
+ % \dotagsetitem\empty
\dostarttagged\t!itemtag\empty
\strc_itemgroups_insert_item
\dostoptagged
@@ -1073,17 +1086,24 @@
\fi
\fi
\strc_itemgroups_start_item[#reference]%
- \groupedcommand\strc_itemgroups_start_head_indeed\strc_itemgroups_stop_head_indeed}
+ \pickupgroupedcommand
+ \strc_itemgroups_start_head_indeed
+ \strc_itemgroups_stop_head_indeed
+ \strc_itemgroups_head_body_indeed}
\unexpanded\def\stopitemgrouphead
- {\stopitemgroupitem}
+ {\dostoptagged
+ \stopitemgroupitem}
\unexpanded\def\strc_itemgroups_start_head_indeed
{\settrue\c_strc_itemgroups_head
+ \dotagsetitem\s!head% % weird place
+ \dostarttagged\t!itemhead\empty
\useitemgroupstyleandcolor\c!headstyle\c!headcolor\ignorespaces}
\unexpanded\def\strc_itemgroups_stop_head_indeed
{\removeunwantedspaces
+ \dostoptagged
\ifconditional\c_strc_itemgroups_text
\space
\ignorespaces
@@ -1092,7 +1112,10 @@
\fi
\strc_itemgroups_insert_breakno
\ifconditional\c_strc_itemgroups_pack\else\strc_itemgroups_after_head_command\fi
- \strc_itemgroups_insert_breakno
+ \strc_itemgroups_insert_breakno}
+
+\unexpanded\def\strc_itemgroups_head_body_indeed
+ {\dostarttagged\t!itembody\empty
\noindentation}
% Simple commands.
@@ -1110,30 +1133,37 @@
\unexpanded\def\strc_itemgroups_start_symbol#text%
{\def\strc_itemgroups_extra_symbol{#text}%
\settrue\c_strc_itemgroups_symbol
- \startitemgroupitem}
+ \startitemgroupitem
+ \dotagsetitem\s!symbol}
\unexpanded\def\strc_itemgroups_start_dummy
{\strc_itemgroups_start_symbol\strut\strut} % two ?
\unexpanded\def\strc_itemgroups_start_subitem
{\settrue\c_strc_itemgroups_sub
- \startitemgroupitem}
+ \startitemgroupitem
+ \dotagsetitem\s!sub}
\unexpanded\def\strc_itemgroups_start_edge#text%
{\strc_itemgroups_start_symbol
{\strc_itemgroups_calculate_list_width\c_strc_itemgroups_nesting
+ \dostarttagged\t!ignore\empty % for the moment, maybe an attribute
\hbox to \d_strc_itemgroups_list_width
- {#text\hskip\itemgroupparameter\c!distance}}}
+ {#text\hskip\itemgroupparameter\c!distance}%
+ \dostoptagged}}
\unexpanded\def\strc_itemgroups_start_margin#text%
{\def\strc_itemgroups_margin_symbol % brrr
- {\llap
+ {\dostarttagged\t!ignore\empty % for the moment, maybe an attribute
+ \llap
{\begingroup
\useitemgroupstyleandcolor\c!marstyle\c!marcolor
#text% keep em/ex local
\endgroup
- \hskip\dimexpr\leftskip+\leftmargindistance\relax}}%
- \startitemgroupitem}
+ \hskip\dimexpr\leftskip+\leftmargindistance\relax}%
+ \dostoptagged}%
+ \startitemgroupitem
+ \dotagsetitem\s!margin}
\unexpanded\def\strc_itemgroups_start_text#text%
{\def\strc_itemgroups_extra_symbol{#text}%
@@ -1207,7 +1237,9 @@
{\dontcomplain
\hbox to \d_strc_itemgroups_list_width
{\ifconditional\c_strc_itemgroups_sub
+ \dostarttagged\t!ignore\empty
\llap{+\enspace}%
+ \dostoptagged
\fi
\strc_itemgroups_left_sym_filler
\box\b_strc_itemgroups % can already have a forced widt, only factor handled here
@@ -1217,7 +1249,9 @@
\def\strc_itemgroups_handle_lapped_item_negative
{\llap
{\ifconditional\c_strc_itemgroups_sub
+ \dostarttagged\t!ignore\empty
\llap{+\enspace}%
+ \dostoptagged
\fi
\box\b_strc_itemgroups
\hskip\leftmargindistance}}
@@ -1225,7 +1259,9 @@
\def\strc_itemgroups_handle_groups_text_item
{\hbox
{\ifconditional\c_strc_itemgroups_sub
+ \dostarttagged\t!ignore\empty
+\enspace
+ \dostoptagged
\fi
\box\b_strc_itemgroups
\hskip\interwordspace}%
@@ -1234,7 +1270,9 @@
\def\strc_itemgroups_handle_groups_inline_item
{\hbox to \d_strc_itemgroups_list_width
{\ifconditional\c_strc_itemgroups_sub
+ \dostarttagged\t!ignore\empty
\llap{+\enspace}%
+ \dostoptagged
\fi
\box\b_strc_itemgroups
\hss}} % was: \hfill
@@ -1270,8 +1308,8 @@
\setfalse\c_strc_itemgroups_symbol}
\def\strc_itemgroups_make_fitting_box
- {\ifdim\wd\b_strc_itemgroups>\itemgroupparameter\c!maxwidth\scaledpoint\relax % brr, sp
- \normalexpanded{\global\setitemgroupparameter{\c!maxwidth}{\number\wd\b_strc_itemgroups}}%
+ {\ifdim\wd\b_strc_itemgroups>\itemgroupparameter\c!maxwidth\relax
+ \normalexpanded{\global\setitemgroupparameter{\c!maxwidth}{\the\wd\b_strc_itemgroups}}%
\fi
\ifdim\d_strc_itemgroups_max_width>\zeropoint
\setbox\b_strc_itemgroups\simplealignedbox
@@ -1524,7 +1562,7 @@
{\strc_itemgroups_item_alone}
\def\strc_itemgroups_item_alone[#category]#text\par
- {\doifsomethingelse{#category}{\startitemgroup[#category]}{\startitemgroup[\v!itemize]}%
+ {\doifelsesomething{#category}{\startitemgroup[#category]}{\startitemgroup[\v!itemize]}%
\startitem#text\stopitem
\stopitemgroup}
@@ -1532,7 +1570,7 @@
{\strc_itemgroups_head_alone}
\def\strc_itemgroups_head_alone[#category]#head\par#body\par
- {\doifsomethingelse{#category}{\startitemgroup[#category]}{\startitemgroup[\v!itemize]}%
+ {\doifelsesomething{#category}{\startitemgroup[#category]}{\startitemgroup[\v!itemize]}%
\starthead{#head}#body\stophead
\stopitemgroup}
diff --git a/tex/context/base/strc-lab.mkiv b/tex/context/base/strc-lab.mkiv
index ce4cdcc5e..ac49941aa 100644
--- a/tex/context/base/strc-lab.mkiv
+++ b/tex/context/base/strc-lab.mkiv
@@ -58,10 +58,15 @@
{\normalexpanded{\defineconstruction[#1][#3][\s!handler=\v!label,\c!level=#2]}%
\setevalue{\??label#1:\s!parent}{\??label#3}}%
\ifconditional\c_strc_constructions_define_commands
- \setuevalue{\e!next #1}{\strc_labels_next {#1}{\number#2}}% obsolete
- \setuevalue{\c!reset#1}{\strc_labels_reset {#1}{\number#2}}% obsolete
- %setuevalue{\c!set #1}{\strc_labels_set {#1}{\number#2}}% obsolete
- \setuevalue {#1}{\strc_labels_command{#1}}%
+ \setuevalue{\e!next #1}{\strc_labels_next {#1}{\number#2}}% obsolete
+ \setuevalue{\v!reset #1}{\strc_labels_reset {#1}{\number#2}}% obsolete % should be \e!reset anyway
+ %setuevalue{\c!set #1}{\strc_labels_set {#1}{\number#2}}% obsolete
+ \ifcsname\v!current#1\endcsname
+ % we play safe
+ \else
+ \setuevalue{\v!current#1}{\strc_labels_current{#1}}% % obsolete % should be \e!current anyway
+ \fi
+ \setuevalue {#1}{\strc_labels_command{#1}}%
\fi}
% todo: \strc_labels_command for user
@@ -96,16 +101,19 @@
\ifx\p_counter\empty %
\let\p_counter\currentlabel
\fi
- \doifcounterelse\p_counter\donothing{\strc_labels_define_counter\p_counter}%
+ \doifelsecounter\p_counter\donothing{\strc_labels_define_counter\p_counter}%
\letlabelparameter\s!counter\p_counter
\to \everydefinelabel
\let\p_strc_constructions_title \empty
\let\p_strc_constructions_number\empty
+\newconditional\c_strc_constructions_number_keep
+
\setvalue{\??constructioninitializer\v!label}%
{\let\currentlabel \currentconstruction
\let\constructionparameter \labelparameter
+ \let\constructionnamespace \??label
\let\detokenizedconstructionparameter\detokenizedlabelparameter
\let\letconstructionparameter \letlabelparameter
\let\useconstructionstyleandcolor \uselabelstyleandcolor
@@ -117,7 +125,9 @@
\iftrialtypesetting
\strc_counters_save\currentconstructionnumber
\fi
- \strc_counters_increment_sub\currentconstructionnumber\currentconstructionlevel
+ \ifconditional\c_strc_constructions_number_keep \else
+ \strc_counters_increment_sub\currentconstructionnumber\currentconstructionlevel
+ \fi
\else
\setfalse\c_strc_constructions_number_state
\fi
@@ -137,11 +147,12 @@
%D Interfaces:
-\let\strc_labels_command\strc_descriptions_command
+\unexpanded\def\strc_labels_command{\setfalse\c_strc_constructions_number_keep\strc_descriptions_command}
+\unexpanded\def\strc_labels_current{\settrue \c_strc_constructions_number_keep\strc_descriptions_command}
-\unexpanded\def\strc_labels_next {\strc_constructions_next_indeed \namedlabelparameter} % #1#2
-\unexpanded\def\strc_labels_reset{\strc_constructions_reset_indeed\namedlabelparameter} % #1#2
-%unexpanded\def\strc_labels_set {\strc_constructions_set_indeed \namedlabelparameter} % #1#2
+\unexpanded\def\strc_labels_next {\strc_constructions_next_indeed \namedlabelparameter} % #1#2
+\unexpanded\def\strc_labels_reset {\strc_constructions_reset_indeed\namedlabelparameter} % #1#2
+%unexpanded\def\strc_labels_set {\strc_constructions_set_indeed \namedlabelparameter} % #1#2
% similar to enumerations
diff --git a/tex/context/base/strc-lev.lua b/tex/context/base/strc-lev.lua
index 947889e1e..d7ffd6af4 100644
--- a/tex/context/base/strc-lev.lua
+++ b/tex/context/base/strc-lev.lua
@@ -8,26 +8,30 @@ if not modules then modules = { } end modules ['strc-lev'] = {
local insert, remove = table.insert, table.remove
-local context = context
-local commands = commands
+local context = context
+local interfaces = interfaces
-local sections = structures.sections
-local default = interfaces.variables.default
+local sections = structures.sections
+local implement = interfaces.implement
-sections.levels = sections.levels or { }
+local v_default = interfaces.variables.default
-local level, levels, categories = 0, sections.levels, { }
+sections.levels = sections.levels or { }
-storage.register("structures/sections/levels", levels, "structures.sections.levels")
+local level = 0
+local levels = sections.levels
+local categories = { }
local f_two_colon = string.formatters["%s:%s"]
-function commands.definesectionlevels(category,list)
+storage.register("structures/sections/levels", levels, "structures.sections.levels")
+
+local function definesectionlevels(category,list)
levels[category] = utilities.parsers.settings_to_array(list)
end
-function commands.startsectionlevel(category)
- category = category ~= "" and category or default
+local function startsectionlevel(category)
+ category = category ~= "" and category or v_default
level = level + 1
local lc = levels[category]
if not lc or level > #lc then
@@ -38,7 +42,7 @@ function commands.startsectionlevel(category)
insert(categories,category)
end
-function commands.stopsectionlevel()
+local function stopsectionlevel()
local category = remove(categories)
if category then
local lc = levels[category]
@@ -52,3 +56,20 @@ function commands.stopsectionlevel()
-- error
end
end
+
+implement {
+ name = "definesectionlevels",
+ actions = definesectionlevels,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "startsectionlevel",
+ actions = startsectionlevel,
+ arguments = "string"
+}
+
+implement {
+ name = "stopsectionlevel",
+ actions = stopsectionlevel,
+}
diff --git a/tex/context/base/strc-lev.mkvi b/tex/context/base/strc-lev.mkvi
index b3ec2a2d5..dae3f28e3 100644
--- a/tex/context/base/strc-lev.mkvi
+++ b/tex/context/base/strc-lev.mkvi
@@ -26,17 +26,17 @@
\unexpanded\def\definesectionlevels{\dodoubleargument\strc_levels_define}
\unexpanded\def\startsectionlevel {\dosingleempty \strc_levels_start }
-\unexpanded\def\strc_levels_define[#category][#list]{\ctxcommand{definesectionlevels("#category","#list")}}
-\unexpanded\def\strc_levels_start [#category]{\ctxcommand{startsectionlevel("#category")}}
-\unexpanded\def\stopsectionlevel {\ctxcommand{stopsectionlevel()}}
+\unexpanded\def\strc_levels_define[#category][#list]{\clf_definesectionlevels{#category}{#list}}
+\unexpanded\def\strc_levels_start [#category]{\clf_startsectionlevel{#category}}
+\unexpanded\def\stopsectionlevel {\clf_stopsectionlevel}
\unexpanded\def\nostarthead{\dotripleargument\strc_levels_start_nop} % used at the lua end
\unexpanded\def\nostophead {\dosingleargument\strc_levels_stop_nop } % used at the lua end
\unexpanded\def\strc_levels_start[#category]%
- {\doifassignmentelse{#category}
- {\ctxcommand{startsectionlevel("\v!default")}[#category]}
- {\ctxcommand{startsectionlevel("#category")}}}
+ {\doifelseassignment{#category}
+ {\clf_startsectionlevel{\v!default}[#category]}
+ {\clf_startsectionlevel{#category}}}
\unexpanded\def\strc_levels_start_nop[#category][#settings][#userdata]%
{\blank
diff --git a/tex/context/base/strc-lnt.mkvi b/tex/context/base/strc-lnt.mkvi
index f84521002..ee7d5dd88 100644
--- a/tex/context/base/strc-lnt.mkvi
+++ b/tex/context/base/strc-lnt.mkvi
@@ -92,10 +92,10 @@
\def\page_lines_in_to {\in[lr:e:\currentlinenotereference]}
\unexpanded\def\strc_linenotes_range_normal#1% order
- {\doifreferencefoundelse{lr:b:\currentlinenotereference}\settrue\setfalse\c_page_lines_current_from
+ {\doifelsereferencefound{lr:b:\currentlinenotereference}\settrue\setfalse\c_page_lines_current_from
\ifconditional\c_page_lines_current_from
\xdef\m_page_lines_current_from{\currentreferencelinenumber}%
- \doifreferencefoundelse{lr:e:\currentlinenotereference}\settrue\setfalse\c_page_lines_current_to
+ \doifelsereferencefound{lr:e:\currentlinenotereference}\settrue\setfalse\c_page_lines_current_to
\ifconditional\c_page_lines_current_to
\xdef\m_page_lines_current_to{\currentreferencelinenumber}%
\page_lines_in_from
@@ -111,10 +111,10 @@
\fi}
\unexpanded\def\strc_linenotes_range_sparse#1% order
- {\doifreferencefoundelse{lr:b:\currentlinenotereference}\settrue\setfalse\c_page_lines_current_from
+ {\doifelsereferencefound{lr:b:\currentlinenotereference}\settrue\setfalse\c_page_lines_current_from
\ifconditional\c_page_lines_current_from
\xdef\m_page_lines_current_from{\currentreferencelinenumber}%
- \doifreferencefoundelse{lr:e:\currentlinenotereference}\settrue\setfalse\c_page_lines_current_to
+ \doifelsereferencefound{lr:e:\currentlinenotereference}\settrue\setfalse\c_page_lines_current_to
\ifconditional\c_page_lines_current_to
\xdef\m_page_lines_current_to{\currentreferencelinenumber}%
\ifx\m_page_lines_previous_from\m_page_lines_current_from
diff --git a/tex/context/base/strc-lst.lua b/tex/context/base/strc-lst.lua
index d86368b6a..fd79bbd7a 100644
--- a/tex/context/base/strc-lst.lua
+++ b/tex/context/base/strc-lst.lua
@@ -15,12 +15,16 @@ if not modules then modules = { } end modules ['strc-lst'] = {
--
-- move more to commands
-local format, gmatch, gsub = string.format, string.gmatch, string.gsub
-local tonumber = tonumber
-local concat, insert, remove = table.concat, table.insert, table.remove
+local tonumber, type = tonumber, type
+local concat, insert, remove, sort = table.concat, table.insert, table.remove, table.sort
local lpegmatch = lpeg.match
-local simple_hash_to_string, settings_to_hash = utilities.parsers.simple_hash_to_string, utilities.parsers.settings_to_hash
-local allocate, checked = utilities.storage.allocate, utilities.storage.checked
+
+local setmetatableindex = table.setmetatableindex
+local sortedkeys = table.sortedkeys
+
+local settings_to_set = utilities.parsers.settings_to_set
+local allocate = utilities.storage.allocate
+local checked = utilities.storage.checked
local trace_lists = false trackers.register("structures.lists", function(v) trace_lists = v end)
@@ -28,44 +32,72 @@ local report_lists = logs.reporter("structure","lists")
local context = context
local commands = commands
-
-local texgetcount = tex.getcount
+local implement = interfaces.implement
local structures = structures
local lists = structures.lists
local sections = structures.sections
local helpers = structures.helpers
local documents = structures.documents
-local pages = structures.pages
local tags = structures.tags
+local counters = structures.counters
local references = structures.references
local collected = allocate()
local tobesaved = allocate()
local cached = allocate()
local pushed = allocate()
+local kinds = allocate()
+local names = allocate()
lists.collected = collected
lists.tobesaved = tobesaved
lists.enhancers = lists.enhancers or { }
-lists.internals = allocate(lists.internals or { }) -- to be checked
+-----.internals = allocate(lists.internals or { }) -- to be checked
lists.ordered = allocate(lists.ordered or { }) -- to be checked
lists.cached = cached
lists.pushed = pushed
+lists.kinds = kinds
+lists.names = names
+
+local sorters = sorters
+local sortstripper = sorters.strip
+local sortsplitter = sorters.splitters.utf
+local sortcomparer = sorters.comparers.basic
local sectionblocks = allocate()
lists.sectionblocks = sectionblocks
references.specials = references.specials or { }
-local variables = interfaces.variables
local matchingtilldepth = sections.matchingtilldepth
local numberatdepth = sections.numberatdepth
+local getsectionlevel = sections.getlevel
+local typesetnumber = sections.typesetnumber
+local autosectiondepth = sections.autodepth
--- -- -- -- -- --
+local variables = interfaces.variables
-local function zerostrippedconcat(t,separator) -- for the moment not public
+local v_all = variables.all
+local v_reference = variables.reference
+local v_title = variables.title
+local v_number = variables.reference
+local v_command = variables.command
+local v_text = variables.text
+local v_current = variables.current
+local v_previous = variables.previous
+local v_next = variables.next
+local v_intro = variables.intro
+local v_here = variables.here
+local v_component = variables.component
+local v_reference = variables.reference
+local v_local = variables["local"]
+local v_default = variables.default
+
+-- for the moment not public --
+
+local function zerostrippedconcat(t,separator)
local f, l = 1, #t
for i=f,l do
if t[i] == 0 then
@@ -85,10 +117,11 @@ end
local function initializer()
-- create a cross reference between internal references
-- and list entries
- local collected = lists.collected
- local internals = checked(references.internals)
- local ordered = lists.ordered
- local blockdone = { }
+ local collected = lists.collected
+ local internals = checked(references.internals)
+ local ordered = lists.ordered
+ local usedinternals = references.usedinternals
+ local blockdone = { }
for i=1,#collected do
local c = collected[i]
local m = c.metadata
@@ -99,6 +132,7 @@ local function initializer()
local internal = r.internal
if internal then
internals[internal] = c
+ usedinternals[internal] = r.used
end
local block = r.block
if block and not blockdone[block] then
@@ -107,7 +141,8 @@ local function initializer()
end
end
-- access by order in list
- local kind, name = m.kind, m.name
+ local kind = m.kind
+ local name = m.name
if kind and name then
local ok = ordered[kind]
if ok then
@@ -120,6 +155,12 @@ local function initializer()
else
ordered[kind] = { [name] = { c } }
end
+ kinds[kind] = true
+ names[name] = true
+ elseif kind then
+ kinds[kind] = true
+ elseif name then
+ names[name] = true
end
end
if r then
@@ -128,9 +169,24 @@ local function initializer()
end
end
-job.register('structures.lists.collected', tobesaved, initializer)
+local function finalizer()
+ local flaginternals = references.flaginternals
+ local usedviews = references.usedviews
+ for i=1,#tobesaved do
+ local r = tobesaved[i].references
+ if r then
+ local i = r.internal
+ local f = flaginternals[i]
+ if f then
+ r.used = usedviews[i] or true
+ end
+ end
+ end
+end
+
+job.register('structures.lists.collected', tobesaved, initializer, finalizer)
-local groupindices = table.setmetatableindex("table")
+local groupindices = setmetatableindex("table")
function lists.groupindex(name,group)
local groupindex = groupindices[name]
@@ -139,15 +195,24 @@ end
-- we could use t (as hash key) in order to check for dup entries
-function lists.addto(t)
- local m = t.metadata
- local u = t.userdata
- if u and type(u) == "string" then
- t.userdata = helpers.touserdata(u) -- nicer at the tex end
- end
+function lists.addto(t) -- maybe more more here (saves parsing at the tex end)
+ local metadata = t.metadata
+ local userdata = t.userdata
local numberdata = t.numberdata
+ if userdata and type(userdata) == "string" then
+ t.userdata = helpers.touserdata(userdata)
+ end
+ if not metadata.level then
+ metadata.level = structures.sections.currentlevel() -- this is not used so it will go away
+ end
+ if numberdata then
+ local numbers = numberdata.numbers
+ if type(numbers) == "string" then
+ numberdata.numbers = counters.compact(numbers,nil,true)
+ end
+ end
local group = numberdata and numberdata.group
- local name = m.name
+ local name = metadata.name
if not group then
-- forget about it
elseif group == "" then
@@ -158,7 +223,14 @@ function lists.addto(t)
numberdata.numbers = cached[groupindex].numberdata.numbers
end
end
+ local setcomponent = references.setcomponent
+ if setcomponent then
+ setcomponent(t) -- can be inlined
+ end
local r = t.references
+ if r and not r.section then
+ r.section = structures.sections.currentid()
+ end
local i = r and r.internal or 0 -- brrr
local p = pushed[i]
if not p then
@@ -167,10 +239,6 @@ function lists.addto(t)
pushed[i] = p
r.listindex = p
end
- local setcomponent = references.setcomponent
- if setcomponent then
- setcomponent(t) -- might move to the tex end
- end
if group then
groupindices[name][group] = p
end
@@ -204,6 +272,11 @@ end
local enhanced = { }
+local synchronizepage = function(r) -- bah ... will move
+ synchronizepage = references.synchronizepage
+ return synchronizepage(r)
+end
+
function lists.enhance(n)
local l = cached[n]
if not l then
@@ -220,7 +293,7 @@ function lists.enhance(n)
-- save in the right order (happens at shipout)
lists.tobesaved[#lists.tobesaved+1] = l
-- default enhancer (cross referencing)
- references.realpage = texgetcount("realpageno")
+ synchronizepage(references)
-- tags
local kind = metadata.kind
local name = metadata.name
@@ -250,51 +323,88 @@ end
local nesting = { }
function lists.pushnesting(i)
- local parent = lists.result[i]
- local name = parent.metadata.name
+ local parent = lists.result[i]
+ local name = parent.metadata.name
local numberdata = parent and parent.numberdata
- local numbers = numberdata and numberdata.numbers
- local number = numbers and numbers[sections.getlevel(name)] or 0
- insert(nesting, { number = number, name = name, result = lists.result, parent = parent })
+ local numbers = numberdata and numberdata.numbers
+ local number = numbers and numbers[getsectionlevel(name)] or 0
+ insert(nesting, {
+ number = number,
+ name = name,
+ result = lists.result,
+ parent = parent
+ })
end
function lists.popnesting()
local old = remove(nesting)
- lists.result = old.result
+ if old then
+ lists.result = old.result
+ else
+ report_lists("nesting error")
+ end
end
--- will be split
-
-- Historically we had blocks but in the mkiv approach that could as well be a level
-- which would simplify things a bit.
-local splitter = lpeg.splitat(":")
+local splitter = lpeg.splitat(":") -- maybe also :: or have a block parameter
--- this will become filtercollected(specification) and then we'll also have sectionblock as key
-
-local sorters = {
- [variables.command] = function(a,b)
+local listsorters = {
+ [v_command] = function(a,b)
if a.metadata.kind == "command" or b.metadata.kind == "command" then
return a.references.internal < b.references.internal
else
return a.references.order < b.references.order
end
end,
- [variables.all] = function(a,b)
+ [v_all] = function(a,b)
return a.references.internal < b.references.internal
end,
+ [v_title] = function(a,b)
+ local da = a.titledata
+ local db = b.titledata
+ if da and db then
+ local ta = da.title
+ local tb = db.title
+ if ta and tb then
+ local sa = da.split
+ if not sa then
+ sa = sortsplitter(sortstripper(ta))
+ da.split = sa
+ end
+ local sb = db.split
+ if not sb then
+ sb = sortsplitter(sortstripper(tb))
+ db.split = sb
+ end
+ return sortcomparer(da,db) == -1
+ end
+ end
+ return a.references.internal < b.references.internal
+ end
}
--- some day soon we will pass a table .. also split the function
+-- was: names, criterium, number, collected, forced, nested, sortorder
+
+local filters = setmetatableindex(function(t,k) return t[v_default] end)
-local function filtercollected(names, criterium, number, collected, forced, nested, sortorder) -- names is hash or string
- local numbers, depth = documents.data.numbers, documents.data.depth
- local result, nofresult, detail = { }, 0, nil
- local block = false -- all
- criterium = gsub(criterium or ""," ","") -- not needed
- -- new, will be applied stepwise
+local function filtercollected(specification)
+ --
+ local names = specification.names or { }
+ local criterium = specification.criterium or v_default
+ local number = 0 -- specification.number
+ local reference = specification.reference or ""
+ local collected = specification.collected or lists.collected
+ local forced = specification.forced or { }
+ local nested = specification.nested or false
+ local sortorder = specification.sortorder or specification.order
+ --
+ local numbers = documents.data.numbers
+ local depth = documents.data.depth
+ local block = false -- all
local wantedblock, wantedcriterium = lpegmatch(splitter,criterium) -- block:criterium
- if wantedblock == "" or wantedblock == variables.all or wantedblock == variables.text then
+ if wantedblock == "" or wantedblock == v_all or wantedblock == v_text then
criterium = wantedcriterium ~= "" and wantedcriterium or criterium
elseif not wantedcriterium then
block = documents.data.block
@@ -304,236 +414,393 @@ local function filtercollected(names, criterium, number, collected, forced, nest
if block == "" then
block = false
end
--- print(">>",block,criterium)
- --
- forced = forced or { } -- todo: also on other branched, for the moment only needed for bookmarks
if type(names) == "string" then
- names = settings_to_hash(names)
+ names = settings_to_set(names)
end
- local all = not next(names) or names[variables.all] or false
+ local all = not next(names) or names[v_all] or false
+ --
+ specification.names = names
+ specification.criterium = criterium
+ specification.number = 0 -- obsolete
+ specification.reference = reference -- new
+ specification.collected = collected
+ specification.forced = forced -- todo: also on other branched, for the moment only needed for bookmarks
+ specification.nested = nested
+ specification.sortorder = sortorder
+ specification.numbers = numbers
+ specification.depth = depth
+ specification.block = block
+ specification.all = all
+ --
if trace_lists then
- report_lists("filtering names %a, criterium %a, block %a, number %a",names,criterium,block or "*",number)
+ report_lists("filtering names %,t, criterium %a, block %a",sortedkeys(names), criterium, block or "*")
end
- if criterium == variables.intro then
- -- special case, no structure yet
- for i=1,#collected do
- local v = collected[i]
+ local result = filters[criterium](specification)
+ if trace_lists then
+ report_lists("criterium %a, block %a, found %a",specification.criterium, specification.block or "*", #result)
+ end
+ --
+ if sortorder then -- experiment
+ local sorter = listsorters[sortorder]
+ if sorter then
+ if trace_lists then
+ report_lists("sorting list using method %a",sortorder)
+ end
+ for i=1,#result do
+ result[i].references.order = i
+ end
+ sort(result,sorter)
+ end
+ end
+ --
+ return result
+end
+
+filters[v_intro] = function(specification)
+ local collected = specification.collected
+ local result = { }
+ local nofresult = #result
+ local all = specification.all
+ local names = specification.names
+ for i=1,#collected do
+ local v = collected[i]
+ local metadata = v.metadata
+ if metadata and (all or names[metadata.name or false]) then
local r = v.references
if r and r.section == 0 then
nofresult = nofresult + 1
result[nofresult] = v
end
end
- elseif all or criterium == variables.all or criterium == variables.text then
- for i=1,#collected do
- local v = collected[i]
- local r = v.references
- if r and (not block or not r.block or block == r.block) then
- local metadata = v.metadata
- if metadata then
- local name = metadata.name or false
- local sectionnumber = (r.section == 0) or sections.collected[r.section]
- if forced[name] or (sectionnumber and not metadata.nolist and (all or names[name])) then -- and not sectionnumber.hidenumber then
- nofresult = nofresult + 1
- result[nofresult] = v
- end
- end
- end
- end
- elseif criterium == variables.current then
- if depth == 0 then
- return filtercollected(names,variables.intro,number,collected,forced,false,sortorder)
- else
+ end
+ return result
+end
+
+filters[v_reference] = function(specification)
+ local collected = specification.collected
+ local result = { }
+ local nofresult = #result
+ local names = specification.names
+ local sections = sections.collected
+ local reference = specification.reference
+ if reference ~= "" then
+ local prefix, rest = lpegmatch(references.prefixsplitter,reference) -- p::r
+ local r = prefix and rest and references.derived[prefix][rest] or references.derived[""][reference]
+ local s = r and r.numberdata -- table ref !
+ if s then
+ local depth = getsectionlevel(r.metadata.name)
+ local numbers = s.numbers
for i=1,#collected do
local v = collected[i]
local r = v.references
if r and (not block or not r.block or block == r.block) then
- local sectionnumber = sections.collected[r.section]
- if sectionnumber then -- and not sectionnumber.hidenumber then
- local cnumbers = sectionnumber.numbers
- local metadata = v.metadata
- if cnumbers then
- if metadata and not metadata.nolist and (all or names[metadata.name or false]) and #cnumbers > depth then
- local ok = true
- for d=1,depth do
- local cnd = cnumbers[d]
- if not (cnd == 0 or cnd == numbers[d]) then
- ok = false
- break
- end
- end
- if ok then
- nofresult = nofresult + 1
- result[nofresult] = v
- end
+ local metadata = v.metadata
+ if metadata and names[metadata.name or false] then
+ local sectionnumber = (r.section == 0) or sections[r.section]
+ if sectionnumber then
+ if matchingtilldepth(depth,numbers,sectionnumber.numbers) then
+ nofresult = nofresult + 1
+ result[nofresult] = v
end
end
end
end
end
- end
- elseif criterium == variables.here then
- -- this is quite dirty ... as cnumbers is not sparse we can misuse #cnumbers
- if depth == 0 then
- return filtercollected(names,variables.intro,number,collected,forced,false,sortorder)
else
- for i=1,#collected do
- local v = collected[i]
- local r = v.references
- if r then -- and (not block or not r.block or block == r.block) then
- local sectionnumber = sections.collected[r.section]
- if sectionnumber then -- and not sectionnumber.hidenumber then
- local cnumbers = sectionnumber.numbers
- local metadata = v.metadata
- if cnumbers then
- if metadata and not metadata.nolist and (all or names[metadata.name or false]) and #cnumbers >= depth then
- local ok = true
- for d=1,depth do
- local cnd = cnumbers[d]
- if not (cnd == 0 or cnd == numbers[d]) then
- ok = false
- break
- end
- end
- if ok then
- nofresult = nofresult + 1
- result[nofresult] = v
- end
- end
- end
- end
+ report_lists("unknown reference %a specified",reference)
+ end
+ else
+ report_lists("no reference specified")
+ end
+ return result
+end
+
+filters[v_all] = function(specification)
+ local collected = specification.collected
+ local result = { }
+ local nofresult = #result
+ local block = specification.block
+ local all = specification.all
+ local forced = specification.forced
+ local names = specification.names
+ local sections = sections.collected
+ for i=1,#collected do
+ local v = collected[i]
+ local r = v.references
+ if r and (not block or not r.block or block == r.block) then
+ local metadata = v.metadata
+ if metadata then
+ local name = metadata.name or false
+ local sectionnumber = (r.section == 0) or sections[r.section]
+ if forced[name] or (sectionnumber and not metadata.nolist and (all or names[name])) then -- and not sectionnumber.hidenumber then
+ nofresult = nofresult + 1
+ result[nofresult] = v
end
end
end
- elseif criterium == variables.previous then
- if depth == 0 then
- return filtercollected(names,variables.intro,number,collected,forced,false,sortorder)
- else
- for i=1,#collected do
- local v = collected[i]
- local r = v.references
- if r and (not block or not r.block or block == r.block) then
- local sectionnumber = sections.collected[r.section]
- if sectionnumber then -- and not sectionnumber.hidenumber then
- local cnumbers = sectionnumber.numbers
- local metadata = v.metadata
- if cnumbers then
- if metadata and not metadata.nolist and (all or names[metadata.name or false]) and #cnumbers >= depth then
- local ok = true
- for d=1,depth-1 do
- local cnd = cnumbers[d]
- if not (cnd == 0 or cnd == numbers[d]) then
- ok = false
- break
- end
- end
- if ok then
- nofresult = nofresult + 1
- result[nofresult] = v
- end
+ end
+ return result
+end
+
+filters[v_text] = filters[v_all]
+
+filters[v_current] = function(specification)
+ if specification.depth == 0 then
+ specification.nested = false
+ specification.criterium = v_intro
+ return filters[v_intro](specification)
+ end
+ local collected = specification.collected
+ local result = { }
+ local nofresult = #result
+ local depth = specification.depth
+ local block = specification.block
+ local all = specification.all
+ local names = specification.names
+ local numbers = specification.numbers
+ local sections = sections.collected
+ for i=1,#collected do
+ local v = collected[i]
+ local r = v.references
+ if r and (not block or not r.block or block == r.block) then
+ local sectionnumber = sections[r.section]
+ if sectionnumber then -- and not sectionnumber.hidenumber then
+ local cnumbers = sectionnumber.numbers
+ local metadata = v.metadata
+ if cnumbers then
+ if metadata and not metadata.nolist and (all or names[metadata.name or false]) and #cnumbers > depth then
+ local ok = true
+ for d=1,depth do
+ local cnd = cnumbers[d]
+ if not (cnd == 0 or cnd == numbers[d]) then
+ ok = false
+ break
end
end
+ if ok then
+ nofresult = nofresult + 1
+ result[nofresult] = v
+ end
end
end
end
end
- elseif criterium == variables["local"] then -- not yet ok
- local nested = nesting[#nesting]
- if nested then
- return filtercollected(names,nested.name,nested.number,collected,forced,nested,sortorder)
- elseif sections.autodepth(documents.data.numbers) == 0 then
- return filtercollected(names,variables.all,number,collected,forced,false,sortorder)
- else
- return filtercollected(names,variables.current,number,collected,forced,false,sortorder)
- end
- elseif criterium == variables.component then
- -- special case, no structure yet
- local component = resolvers.jobs.currentcomponent() or ""
- if component ~= "" then
- for i=1,#collected do
- local v = collected[i]
- local r = v.references
- local m = v.metadata
- if r and r.component == component and (m and names[m.name] or all) then
- nofresult = nofresult + 1
- result[nofresult] = v
+ end
+ return result
+end
+
+filters[v_here] = function(specification)
+ -- this is quite dirty ... as cnumbers is not sparse we can misuse #cnumbers
+ if specification.depth == 0 then
+ specification.nested = false
+ specification.criterium = v_intro
+ return filters[v_intro](specification)
+ end
+ local collected = specification.collected
+ local result = { }
+ local nofresult = #result
+ local depth = specification.depth
+ local block = specification.block
+ local all = specification.all
+ local names = specification.names
+ local numbers = specification.numbers
+ local sections = sections.collected
+ for i=1,#collected do
+ local v = collected[i]
+ local r = v.references
+ if r then -- and (not block or not r.block or block == r.block) then
+ local sectionnumber = sections[r.section]
+ if sectionnumber then -- and not sectionnumber.hidenumber then
+ local cnumbers = sectionnumber.numbers
+ local metadata = v.metadata
+ if cnumbers then
+ if metadata and not metadata.nolist and (all or names[metadata.name or false]) and #cnumbers >= depth then
+ local ok = true
+ for d=1,depth do
+ local cnd = cnumbers[d]
+ if not (cnd == 0 or cnd == numbers[d]) then
+ ok = false
+ break
+ end
+ end
+ if ok then
+ nofresult = nofresult + 1
+ result[nofresult] = v
+ end
+ end
end
end
end
- else -- sectionname, number
- -- not the same as register
- local depth = sections.getlevel(criterium)
- local number = tonumber(number) or numberatdepth(depth) or 0
- if trace_lists then
- local t = sections.numbers()
- detail = format("depth %s, number %s, numbers %s, startset %s",depth,number,(#t>0 and concat(t,".",1,depth)) or "?",#collected)
- end
- if number > 0 then
- local pnumbers = nil
- local pblock = block
- local parent = nested and nested.parent
- if parent then
- pnumbers = parent.numberdata.numbers or pnumbers -- so local as well as nested
- pblock = parent.references.block or pblock
- end
- for i=1,#collected do
- local v = collected[i]
- local r = v.references
- if r and (not block or not r.block or pblock == r.block) then
- local sectionnumber = sections.collected[r.section]
- if sectionnumber then
- local metadata = v.metadata
- local cnumbers = sectionnumber.numbers
- if cnumbers then
- if (all or names[metadata.name or false]) and #cnumbers >= depth and matchingtilldepth(depth,cnumbers,pnumbers) then
- nofresult = nofresult + 1
- result[nofresult] = v
+ end
+ return result
+end
+
+filters[v_previous] = function(specification)
+ if specification.depth == 0 then
+ specification.nested = false
+ specification.criterium = v_intro
+ return filters[v_intro](specification)
+ end
+ local collected = specification.collected
+ local result = { }
+ local nofresult = #result
+ local block = specification.block
+ local all = specification.all
+ local names = specification.names
+ local numbers = specification.numbers
+ local sections = sections.collected
+ local depth = specification.depth
+ for i=1,#collected do
+ local v = collected[i]
+ local r = v.references
+ if r and (not block or not r.block or block == r.block) then
+ local sectionnumber = sections[r.section]
+ if sectionnumber then -- and not sectionnumber.hidenumber then
+ local cnumbers = sectionnumber.numbers
+ local metadata = v.metadata
+ if cnumbers then
+ if metadata and not metadata.nolist and (all or names[metadata.name or false]) and #cnumbers >= depth then
+ local ok = true
+ for d=1,depth-1 do
+ local cnd = cnumbers[d]
+ if not (cnd == 0 or cnd == numbers[d]) then
+ ok = false
+ break
end
end
+ if ok then
+ nofresult = nofresult + 1
+ result[nofresult] = v
+ end
end
end
end
end
end
- if trace_lists then
- report_lists("criterium %a, block %a, found %a, detail %a",criterium,block or "*",#result,detail)
+ return result
+end
+
+filters[v_local] = function(specification)
+ local numbers = specification.numbers
+ local nested = nesting[#nesting]
+ if nested then
+ return filtercollected {
+ names = specification.names,
+ criterium = nested.name,
+ collected = specification.collected,
+ forced = specification.forced,
+ nested = nested,
+ sortorder = specification.sortorder,
+ }
+ else
+ specification.criterium = autosectiondepth(numbers) == 0 and v_all or v_current
+ specification.nested = false
+ return filtercollected(specification) -- rechecks, so better (for determining all)
end
+end
- if sortorder then -- experiment
- local sorter = sorters[sortorder]
- if sorter then
- if trace_lists then
- report_lists("sorting list using method %a",sortorder)
- end
- for i=1,#result do
- result[i].references.order = i
+
+filters[v_component] = function(specification)
+ -- special case, no structure yet
+ local collected = specification.collected
+ local result = { }
+ local nofresult = #result
+ local all = specification.all
+ local names = specification.names
+ local component = resolvers.jobs.currentcomponent() or ""
+ if component ~= "" then
+ for i=1,#collected do
+ local v = collected[i]
+ local r = v.references
+ local m = v.metadata
+ if r and r.component == component and (m and names[m.name] or all) then
+ nofresult = nofresult + 1
+ result[nofresult] = v
end
- table.sort(result,sorter)
end
end
-
return result
end
-lists.filtercollected = filtercollected
+-- local number = tonumber(number) or numberatdepth(depth) or 0
+-- if number > 0 then
+-- ...
+-- end
-function lists.filter(specification)
- return filtercollected(
- specification.names,
- specification.criterium,
- specification.number,
- lists.collected,
- specification.forced,
- false,
- specification.order
- )
+filters[v_default] = function(specification) -- is named
+ local collected = specification.collected
+ local result = { }
+ local nofresult = #result
+ ----- depth = specification.depth
+ local block = specification.block
+ local criterium = specification.criterium
+ local all = specification.all
+ local names = specification.names
+ local numbers = specification.numbers
+ local sections = sections.collected
+ local reference = specification.reference
+ local nested = specification.nested
+ --
+ if reference then
+ reference = tonumber(reference)
+ end
+ --
+ local depth = getsectionlevel(criterium)
+ local pnumbers = nil
+ local pblock = block
+ local parent = nested and nested.parent
+ --
+ if parent then
+ pnumbers = parent.numberdata.numbers or pnumbers -- so local as well as nested
+ pblock = parent.references.block or pblock
+ if trace_lists then
+ report_lists("filtering by block %a and section %a",pblock,criterium)
+ end
+ end
+ --
+ for i=1,#collected do
+ local v = collected[i]
+ local r = v.references
+ if r and (not block or not r.block or pblock == r.block) then
+ local sectionnumber = sections[r.section]
+ if sectionnumber then
+ local metadata = v.metadata
+ local cnumbers = sectionnumber.numbers
+ if cnumbers then
+ if all or names[metadata.name or false] then
+ if reference then
+ -- filter by number
+ if reference == cnumbers[depth] then
+ nofresult = nofresult + 1
+ result[nofresult] = v
+ end
+ else
+ if #cnumbers >= depth and matchingtilldepth(depth,cnumbers,pnumbers) then
+ nofresult = nofresult + 1
+ result[nofresult] = v
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ return result
end
+-- names, criterium, number, collected, forced, nested, sortorder) -- names is hash or string
+
+lists.filter = filtercollected
+
lists.result = { }
+function lists.getresult(r)
+ return lists.result[r]
+end
+
function lists.process(specification)
- lists.result = lists.filter(specification)
- local specials = utilities.parsers.settings_to_hash(specification.extras or "")
+ lists.result = filtercollected(specification)
+ local specials = settings_to_set(specification.extras or "")
specials = next(specials) and specials or nil
for i=1,#lists.result do
local r = lists.result[i]
@@ -544,7 +811,7 @@ function lists.process(specification)
end
function lists.analyze(specification)
- lists.result = lists.filter(specification)
+ lists.result = filtercollected(specification)
end
function lists.userdata(name,r,tag) -- to tex (todo: xml)
@@ -584,7 +851,7 @@ function lists.sectionnumber(name,n,spec)
local data = lists.result[n]
local sectiondata = sections.collected[data.references.section]
-- hm, prefixnumber?
- sections.typesetnumber(sectiondata,"prefix",spec,sectiondata) -- data happens to contain the spec too
+ typesetnumber(sectiondata,"prefix",spec,sectiondata) -- data happens to contain the spec too
end
-- some basics (todo: helpers for pages)
@@ -661,18 +928,18 @@ function lists.number(name,n,spec)
if data then
local numberdata = data.numberdata
if numberdata then
- sections.typesetnumber(numberdata,"number",spec or false,numberdata or false)
+ typesetnumber(numberdata,"number",spec or false,numberdata or false)
end
end
end
-function lists.prefixednumber(name,n,prefixspec,numberspec)
+function lists.prefixednumber(name,n,prefixspec,numberspec,forceddata)
local data = lists.result[n]
if data then
helpers.prefix(data,prefixspec)
- local numberdata = data.numberdata
+ local numberdata = data.numberdata or forceddata
if numberdata then
- sections.typesetnumber(numberdata,"number",numberspec or false,numberdata or false)
+ typesetnumber(numberdata,"number",numberspec or false,numberdata or false)
end
end
end
@@ -701,29 +968,175 @@ end
-- interface (maybe strclistpush etc)
-commands.pushlist = lists.pushnesting
-commands.poplist = lists.popnesting
-commands.enhancelist = lists.enhance
-commands.processlist = lists.process
-commands.analyzelist = lists.analyze
-commands.listtitle = lists.title
-commands.listprefixednumber = lists.prefixednumber
-commands.listprefixedpage = lists.prefixedpage
+if not lists.reordered then
+ function lists.reordered(data)
+ return data.numberdata
+ end
+end
+
+implement { name = "pushlist", actions = lists.pushnesting, arguments = "integer" }
+implement { name = "poplist", actions = lists.popnesting }
+
+implement {
+ name = "addtolist",
+ actions = { lists.addto, context },
+ arguments = {
+ {
+ { "references", {
+ { "internal", "integer" },
+ { "block" },
+ { "section", "integer" },
+ { "location" },
+ { "prefix" },
+ { "reference" },
+ { "order", "integer" },
+ }
+ },
+ { "metadata", {
+ { "kind" },
+ { "name" },
+ { "level", "integer" },
+ { "catcodes", "integer" },
+ { "coding" },
+ { "xmlroot" },
+ { "setup" },
+ }
+ },
+ { "userdata" },
+ { "titledata", {
+ { "label" },
+ { "title" },
+ { "bookmark" },
+ { "marking" },
+ { "list" },
+ }
+ },
+ { "prefixdata", {
+ { "prefix" },
+ { "separatorset" },
+ { "conversionset" },
+ { "conversion" },
+ { "set" },
+ { "segments" },
+ { "connector" },
+ }
+ },
+ { "numberdata", {
+ { "numbers" },
+ { "groupsuffix" },
+ { "group" },
+ { "counter" },
+ { "separatorset" },
+ { "conversionset" },
+ { "conversion" },
+ { "starter" },
+ { "stopper" },
+ { "segments" },
+ }
+ }
+ }
+ }
+}
+
+implement {
+ name = "enhancelist",
+ actions = lists.enhance,
+ arguments = "integer"
+}
+implement {
+ name = "processlist",
+ actions = lists.process,
+ arguments = {
+ {
+ { "names" },
+ { "criterium" },
+ { "reference" },
+ { "extras" },
+ { "order" },
+ }
+ }
+}
-function commands.addtolist (...) context(lists.addto (...)) end
-function commands.listsize (...) context(lists.size (...)) end
-function commands.listlocation (...) context(lists.location (...)) end
-function commands.listlabel (...) context(lists.label (...)) end
-function commands.listrealpage (...) context(lists.realpage (...)) end
-function commands.listgroupindex (...) context(lists.groupindex(...)) end
+implement {
+ name = "analyzelist",
+ actions = lists.analyze,
+ arguments = {
+ {
+ { "names" },
+ { "criterium" },
+ { "reference" },
+ }
+ }
+}
-function commands.currentsectiontolist()
- context(lists.addto(sections.current()))
-end
+implement {
+ name = "listtitle",
+ actions = lists.title,
+ arguments = { "string", "integer" }
+}
+
+implement {
+ name = "listprefixednumber",
+ actions = lists.prefixednumber,
+ arguments = {
+ "string",
+ "integer",
+ {
+ { "prefix" },
+ { "separatorset" },
+ { "conversionset" },
+ { "starter" },
+ { "stopper" },
+ { "set" },
+ { "segments" },
+ { "connector" },
+ },
+ {
+ { "separatorset" },
+ { "conversionset" },
+ { "starter" },
+ { "stopper" },
+ { "segments" },
+ }
+ }
+}
+
+implement {
+ name = "listprefixedpage",
+ actions = lists.prefixedpage,
+ arguments = {
+ "string",
+ "integer",
+ {
+ { "separatorset" },
+ { "conversionset" },
+ { "set" },
+ { "segments" },
+ { "connector" },
+ },
+ {
+ { "prefix" },
+ { "conversionset" },
+ { "starter" },
+ { "stopper" },
+ }
+ }
+}
+
+implement { name = "listsize", actions = { lists.size, context } }
+implement { name = "listlocation", actions = { lists.location, context }, arguments = "integer" }
+implement { name = "listlabel", actions = { lists.label, context }, arguments = { "integer", "string" } }
+implement { name = "listrealpage", actions = { lists.realpage, context }, arguments = { "string", "integer" } }
+implement { name = "listgroupindex", actions = { lists.groupindex, context }, arguments = { "string", "string" } }
+
+implement {
+ name = "currentsectiontolist",
+ actions = { sections.current, lists.addto, context }
+}
-function commands.listuserdata(...)
- local str, metadata = lists.userdata(...)
+local function userdata(name,r,tag)
+ local str, metadata = lists.userdata(name,r,tag)
if str then
-- local catcodes = metadata and metadata.catcodes
-- if catcodes then
@@ -735,25 +1148,21 @@ function commands.listuserdata(...)
end
end
+implement {
+ name = "listuserdata",
+ actions = userdata,
+ arguments = { "string", "integer", "string" }
+}
+
-- we could also set variables .. names will change (when this module is done)
-- maybe strc_lists_savedtitle etc
-function commands.doiflisthastitleelse (...) commands.doifelse(lists.hastitledata (...)) end
-function commands.doiflisthaspageelse (...) commands.doifelse(lists.haspagedata (...)) end
-function commands.doiflisthasnumberelse(...) commands.doifelse(lists.hasnumberdata(...)) end
-function commands.doiflisthasentry (n) commands.doifelse(lists.iscached (n )) end
+implement { name = "doifelselisthastitle", actions = { lists.hastitledata, commands.doifelse }, arguments = { "string", "integer" } }
+implement { name = "doifelselisthaspage", actions = { lists.haspagedata, commands.doifelse }, arguments = { "string", "integer" } }
+implement { name = "doifelselisthasnumber", actions = { lists.hasnumberdata, commands.doifelse }, arguments = { "string", "integer" } }
+implement { name = "doifelselisthasentry", actions = { lists.iscached, commands.doifelse }, arguments = { "integer" } }
-function commands.savedlistnumber(name,n)
- local data = cached[tonumber(n)]
- if data then
- local numberdata = data.numberdata
- if numberdata then
- sections.typesetnumber(numberdata,"number",numberdata or false)
- end
- end
-end
-
-function commands.savedlisttitle(name,n,tag)
+local function savedlisttitle(name,n,tag)
local data = cached[tonumber(n)]
if data then
local titledata = data.titledata
@@ -763,39 +1172,56 @@ function commands.savedlisttitle(name,n,tag)
end
end
--- function commands.savedlistprefixednumber(name,n)
--- local data = cached[tonumber(n)]
--- if data then
--- local numberdata = data.numberdata
--- if numberdata then
--- helpers.prefix(data,data.prefixdata)
--- sections.typesetnumber(numberdata,"number",numberdata or false)
--- end
--- end
--- end
-
-if not lists.reordered then
- function lists.reordered(data)
- return data.numberdata
+local function savedlistnumber(name,n)
+ local data = cached[tonumber(n)]
+ if data then
+ local numberdata = data.numberdata
+ if numberdata then
+ typesetnumber(numberdata,"number",numberdata or false)
+ end
end
end
-function commands.savedlistprefixednumber(name,n)
+local function savedlistprefixednumber(name,n)
local data = cached[tonumber(n)]
if data then
local numberdata = lists.reordered(data)
if numberdata then
helpers.prefix(data,data.prefixdata)
- sections.typesetnumber(numberdata,"number",numberdata or false)
+ typesetnumber(numberdata,"number",numberdata or false)
end
end
end
-commands.discardfromlist = lists.discard
+lists.savedlisttitle = savedlisttitle
+lists.savedlistnumber = savedlistnumber
+lists.savedlistprefixednumber = savedlistprefixednumber
--- new and experimental and therefore off by default
+implement {
+ name = "savedlistnumber",
+ actions = savedlistnumber,
+ arguments = { "string", "integer" }
+}
-local sort, setmetatableindex = table.sort, table.setmetatableindex
+implement {
+ name = "savedlisttitle",
+ actions = savedlisttitle,
+ arguments = { "string", "integer" }
+}
+
+implement {
+ name = "savedlistprefixednumber",
+ actions = savedlistprefixednumber,
+ arguments = { "string", "integer" }
+}
+
+implement {
+ name = "discardfromlist",
+ actions = lists.discard,
+ arguments = { "integer" }
+}
+
+-- new and experimental and therefore off by default
lists.autoreorder = false -- true
diff --git a/tex/context/base/strc-lst.mkvi b/tex/context/base/strc-lst.mkvi
index 63c3e030a..71fc09829 100644
--- a/tex/context/base/strc-lst.mkvi
+++ b/tex/context/base/strc-lst.mkvi
@@ -47,7 +47,7 @@
\c!state=\v!start,
\c!coupling=\v!off,
\c!criterium=\v!local,
- \c!number=\zerocount,
+ \c!reference=,% was number which was sort of obsolete
\c!width=3\emwidth,
%\c!maxwidth=,
\c!distance=\zeropoint,
@@ -94,6 +94,8 @@
\expandafter\secondoftwoarguments
\fi}
+\let\doiflistelse\doifelselist
+
%D Regular list entries are bound to a specific location in order to
%D get the right pagenumber etc.\ associated. When pushing something
%D inbetween (in mkiv) it ends up directtly in the list. This is the
@@ -125,37 +127,48 @@
\def\strc_lists_inject_nop[#dummya][#dummyb]%
{\endgroup}
-\def\strc_lists_inject_yes[#settings][#userdata]%
+\unexpanded\def\strc_lists_inject_enhance#listindex#internal%
+ {\normalexpanded{\ctxlatecommand{enhancelist(#listindex)}}}
+
+\unexpanded\def\strc_lists_inject_yes[#settings][#userdata]% can be used directly
{\setupcurrentlist[\c!type=userdata,\c!location=\v!none,#settings]% grouped (use \let...
\edef\p_location{\listparameter\c!location}%
\setnextinternalreference
- \edef\currentlistnumber{\ctxcommand{addtolist{
- references = {
- internal = \nextinternalreference,
- block = "\currentsectionblock", % handy for lists, like bibl
- section = structures.sections.currentid(),
- % location = "\p_location",
- },
- metadata = {
- kind = "\listparameter\c!type",
- name = "\currentlist",
- level = structures.sections.currentlevel(),
- catcodes = \the\catcodetable,
- },
- userdata = \!!bs\detokenize{#userdata}\!!es % will be converted to table at the lua end
- }}}%
+ \scratchcounter\clf_addtolist
+ references {
+ internal \nextinternalreference
+ % block {\currentsectionblock}
+ % section structures.sections.currentid()
+ % location {\p_location}
+ }
+ metadata {
+ kind {\listparameter\c!type}
+ name {\currentlist}
+ % level structures.sections.currentlevel()
+ catcodes \catcodetable
+ }
+ userdata {\detokenize\expandafter{\normalexpanded{#userdata}}}
+ \relax
+ \edef\currentlistnumber{\the\scratchcounter}%
\ifx\p_location\v!here
% this branch injects nodes !
- \expanded{\ctxlatecommand{enhancelist(\currentlistnumber)}}%
- \ctxlua{structures.references.setinternalreference(nil,nil,\nextinternalreference)}% will change
- \xdef\currentstructurelistattribute{\number\lastdestinationattribute}%
+ \strc_lists_inject_enhance{\currentlistnumber}{\nextinternalreference}%
+ \clf_setinternalreference internal \nextinternalreference\relax % this will change
+ \xdef\currentstructurelistattribute{\the\lastdestinationattribute}%
\dontleavehmode\hbox attr \destinationattribute \lastdestinationattribute{}% todo
\else
% and this one doesn't
- \ctxcommand{enhancelist(\currentlistnumber)}%
+ \clf_enhancelist\currentlistnumber\relax
\fi
\endgroup}
+% todo: make like \strc_references_direct_full_user ... with {}{}{}
+
+\unexpanded\def\strc_lists_inject_direct[#tag]% [#settings][#userdata]
+ {\begingroup
+ \edef\currentlist{#tag}%
+ \strc_lists_inject_yes} % [#settings][#userdata]
+
\unexpanded\def\writebetweenlist{\dodoubleempty \strc_lists_write_between}
\unexpanded\def\writedatatolist {\dotripleargument\strc_lists_write_data_to}
\unexpanded\def\writetolist {\dodoubleempty \strc_lists_write_to}
@@ -244,7 +257,7 @@
\strc_lists_place_current % maybe inline
{#list}%
{\listparameter\c!criterium}%
- {\listparameter\c!number}%
+ {\listparameter\c!reference}%
{\listparameter\c!extras}%
{\listparameter\c!order}%
% \stoplistreferences
@@ -288,7 +301,7 @@
{\dotripleempty\strc_lists_combined_define}
\def\strc_lists_combined_define[#tag][#list][#settings]%
- {\definelist[#tag][\c!criterium=\v!local,\c!number=0,\c!alternative=,\c!list={#list},#settings]% inherits from root
+ {\definelist[#tag][\c!criterium=\v!local,\c!reference=,\c!alternative=,\c!list={#list},#settings]% inherits from root
\setvalue{\e!setup #tag\e!endsetup}{\dodoubleempty\strc_lists_combined_setup [#tag]}%
\setvalue{\e!place #tag}{\dodoubleempty\strc_lists_combined_place [#tag]}%
\setvalue{\e!complete #tag}{\dodoubleempty\strc_lists_combined_complete[#tag]}}
@@ -324,42 +337,46 @@
\def\currentstructurelistnumber{0} % injection
\def\currentlistmethod {entry} % typesetting
-\def\currentlistindex {0} % typesetting
+\def\currentlistindex {0} % typesetting (maybe also a real counter)
+
+\unexpanded\def\savedlistnumber #1#2{\clf_savedlistnumber {#1}\numexpr#2\relax}
+\unexpanded\def\savedlisttitle #1#2{\clf_savedlisttitle {#1}\numexpr#2\relax}
+\unexpanded\def\savedlistprefixednumber#1#2{\clf_savedlistprefixednumber{#1}\numexpr#2\relax}
\def\structurelistlocation
- {\ctxcommand{listlocation(\currentlistindex)}}
+ {\clf_listlocation\numexpr\currentlistindex\relax}
\def\structurelistrealpagenumber
- {\ctxcommand{listrealpage("\currentlist",\currentlistindex)}}
+ {\clf_listrealpage{\currentlist}\numexpr\currentlistindex\relax}
\unexpanded\def\structurelistpagenumber
{\dostarttagged\t!listpage\empty
- \ctxcommand{listprefixedpage(
- "\currentlist",
- \currentlistindex,
- {
- separatorset = "\listparameter\c!pageprefixseparatorset",
- conversionset = "\listparameter\c!pageprefixconversionset",
- set = "\listparameter\c!pageprefixset",
- segments = "\listparameter\c!pageprefixsegments",
- connector = \!!bs\listparameter\c!pageprefixconnector\!!es,
- },
- {
- prefix = "\listparameter\c!pageprefix",
- conversionset = "\listparameter\c!pageconversionset",
- starter = \!!bs\listparameter\c!pagestarter\!!es,
- stopper = \!!bs\listparameter\c!pagestopper\!!es,
+ \clf_listprefixedpage
+ {\currentlist}
+ \currentlistindex
+ {
+ separatorset {\listparameter\c!pageprefixseparatorset}
+ conversionset {\listparameter\c!pageprefixconversionset}
+ set {\listparameter\c!pageprefixset}
+ segments {\listparameter\c!pageprefixsegments}
+ connector {\listparameter\c!pageprefixconnector}
+ }
+ {
+ prefix {\listparameter\c!pageprefix}
+ conversionset {\listparameter\c!pageconversionset}
+ starter {\listparameter\c!pagestarter}
+ stopper {\listparameter\c!pagestopper}
}
- )}%
+ \relax
\dostoptagged}
\unexpanded\def\structurelistuservariable#name%
{\dostarttagged\t!listdata{#name}%
- \ctxcommand{listuserdata("\currentlist",\currentlistindex,"#name")}%
+ \clf_listuserdata{\currentlist}\currentlistindex{#name}%
\dostoptagged}
\def\rawstructurelistuservariable#name%
- {\ctxcommand{listuserdata("\currentlist",\currentlistindex,"#name")}}
+ {\clf_listuserdata{\currentlist}\currentlistindex{#name}}
\unexpanded\def\structurelistfirst {\structurelistuservariable\s!first } % s!
\unexpanded\def\structurelistsecond{\structurelistuservariable\s!second} % s!
@@ -367,63 +384,70 @@
\def\rawstructurelistfirst {\rawstructurelistuservariable\s!first } % s! % was \unexpanded
\def\rawstructurelistsecond{\rawstructurelistuservariable\s!second} % s! % was \unexpanded
-\unexpanded\def\doifstructurelisthaspageelse
- {\ctxcommand{doiflisthaspageelse("\currentlist",\currentlistindex)}}
+\unexpanded\def\doifelsestructurelisthaspage
+ {\clf_doifelselisthaspage{\currentlist}\numexpr\currentlistindex\relax}
-\unexpanded\def\doifstructurelisthasnumberelse
- {\ctxcommand{doiflisthasnumberelse("\currentlist",\currentlistindex)}}
+\unexpanded\def\doifelsestructurelisthasnumber
+ {\clf_doifelselisthasnumber{\currentlist}\numexpr\currentlistindex\relax}
+
+\let\doifstructurelisthaspageelse \doifelsestructurelisthaspage
+\let\doifstructurelisthasnumberelse\doifelsestructurelisthasnumber
\unexpanded\def\structurelistgenerictitle
{\dostarttagged\t!listcontent\empty
- \ctxcommand{listtitle("\currentlist",\currentlistindex)}%
+ \clf_listtitle{\currentlist}\currentlistindex\relax
\dostoptagged}
\unexpanded\def\structurelistgenericnumber % tricky, we need to delay tagging as we have nested lua calls
{\dostarttagged\t!listtag\empty
- \ctxcommand{listprefixednumber("\currentlist",\currentlistindex, {
- prefix = "\listparameter\c!prefix",
- separatorset = "\listparameter\c!prefixseparatorset",
- conversionset = "\listparameter\c!prefixconversionset",
- starter = \!!bs\listparameter\c!prefixstarter\!!es,
- stopper = \!!bs\listparameter\c!prefixstopper\!!es,
- set = "\listparameter\c!prefixset",
- segments = "\listparameter\c!prefixsegments",
- connector = \!!bs\listparameter\c!prefixconnector\!!es,
- },
- {
- separatorset = "\listparameter\c!numberseparatorset",
- conversionset = "\listparameter\c!numberconversionset",
- starter = \!!bs\listparameter\c!numberstarter\!!es,
- stopper = \!!bs\listparameter\c!numberstopper\!!es,
- segments = "\listparameter\c!numbersegments",
- } )}%
+ \clf_listprefixednumber
+ {\currentlist}%
+ \currentlistindex
+ {%
+ prefix {\listparameter\c!prefix}%
+ separatorset {\listparameter\c!prefixseparatorset}%
+ conversionset {\listparameter\c!prefixconversionset}%
+ starter {\listparameter\c!prefixstarter}%
+ stopper {\listparameter\c!prefixstopper}%
+ set {\listparameter\c!prefixset}%
+ segments {\listparameter\c!prefixsegments}%
+ connector {\listparameter\c!prefixconnector}%
+ }%
+ {%
+ separatorset {\listparameter\c!numberseparatorset}%
+ conversionset {\listparameter\c!numberconversionset}%
+ starter {\listparameter\c!numberstarter}%
+ stopper {\listparameter\c!numberstopper}%
+ segments {\listparameter\c!numbersegments}%
+ }%
+ \relax
\dostoptagged}
% TODO: pass extra tag name (contents, figures, bibliography ...)
-\unexpanded\def\strc_lists_place_current#list#criterium#number#extras#order% beware, not a user command
- {\dostarttagged\t!list\empty
- \ctxcommand{processlist{
- names = "#list",
- criterium = "#criterium",
- number = "#number",
- extras = "#extras",
- order = "#order"
- }}%
+\unexpanded\def\strc_lists_place_current#list#criterium#reference#extras#order% beware, not a user command
+ {\dostarttaggedchained\t!list\empty\??list
+ \clf_processlist
+ names {#list}
+ criterium {#criterium}
+ reference {#reference}
+ extras {#extras}
+ order {#order}
+ \relax
\dostoptagged}
-\unexpanded\def\strc_lists_analyze#list#criterium#number%
- {\ctxcommand{analyzelist{
- names = "#list",
- criterium = "#criterium",
- number = "#number"
- }}}
+\unexpanded\def\strc_lists_analyze#list#criterium#reference%
+ {\clf_analyzelist
+ names {#list}
+ criterium {#criterium}
+ reference {#reference}
+ \relax}
-\def\firststructureelementinlist#list%
- {\ctxcommand{firstinset("#list")}}
+\def\firststructureelementinlist#list% expandable
+ {\clf_firstinset{#list}}
\def\structurelistsize
- {\ctxcommand{listsize()}}
+ {\clf_listsize}
%D Depending on what kind of list we have (e.g.\ a section related one)
%D processors can be defined.
@@ -451,14 +475,12 @@
\def\usestructurelistprocessor#tag%
{\csname\??structurelistprocessor#tag\endcsname}
-\unexpanded\def\strclistsentryprocess#tag#method#index#extra% This one is called at the lua end!
- {\ctxcommand{pushlist(#index)}%
- \edef\currentlist {#tag}%
- \edef\currentlistmethod{#method}%
- \edef\currentlistindex {#index}%
- \edef\currentlistextra {#extra}%
- \listextraparameter\c!before
+\let\dotaglistlocation\relax
+
+\def\strc_lists_entry_process % assume things to be set up
+ {\listextraparameter\c!before
\dostarttagged\t!listitem\currentlist
+ \dotaglistlocation
\csname\??structurelistprocessor
\ifcsname\??structurelistprocessor\currentlist:\currentlistmethod\endcsname\currentlist:\currentlistmethod\else
\ifcsname\??structurelistprocessor\currentlistmethod \endcsname\currentlistmethod \else
@@ -466,8 +488,16 @@
\s!default \fi\fi\fi
\endcsname
\dostoptagged
- \listextraparameter\c!after
- \ctxcommand{poplist()}}
+ \listextraparameter\c!after}
+
+\unexpanded\def\strclistsentryprocess#tag#method#index#extra% This one is called at the lua end!
+ {\clf_pushlist#index\relax
+ \edef\currentlist {#tag}%
+ \edef\currentlistmethod{#method}%
+ \edef\currentlistindex {#index}%
+ \edef\currentlistextra {#extra}%
+ \strc_lists_entry_process
+ \clf_poplist}
% lists that have a number/title are kind of generic and can share code
@@ -481,7 +511,7 @@
\strc_lists_apply_renderingsetup}
\installstructurelistprocessor\s!command
- {\ctxcommand{listuserdata("\currentlist",\currentlistindex,"\s!command")}}
+ {\clf_listuserdata{\currentlist}\currentlistindex{\s!command}}
\installstructurelistprocessor{section}
{\let\currentlistentrynumber \structurelistgenericnumber
@@ -498,7 +528,7 @@
% example of usage elsewhere:
%
% \installstructcurelistprocessor{pubs:userdata}
-% {\ctxcommand{listuserdata("\currentlist",\currentlistindex,"bibref")}}
+% {\clf_listuserdata{\currentlist}\currentlistindex{bibref}}
%D List symbols are used in interactive documents where no numbers
%D are used but nevertheless structure is present. Beware, the list
@@ -520,7 +550,7 @@
\def\strc_lists_assign_dimen#dimension#key#default%
{\edef\m_strc_list_dimen{\listparameter#key}%
- \doifinsetelse\m_strc_list_dimen{\v!fit,\v!broad}{#dimension#default}{#dimension\m_strc_list_dimen}\relax}
+ \doifelseinset\m_strc_list_dimen{\v!fit,\v!broad}{#dimension#default}{#dimension\m_strc_list_dimen}\relax}
\definesymbol[\v!list][\v!none ][\strc_lists_symbol_none]
\definesymbol[\v!list][\v!one ][\strc_lists_symbol_one]
@@ -531,7 +561,7 @@
\unexpanded\def\currentlistsymbol
{\edef\p_symbol{\listparameter\c!symbol}%
- \doifinsymbolsetelse\v!list\p_symbol
+ \doifelseinsymbolset\v!list\p_symbol
{\directsymbol\v!list\p_symbol}
{\directsymbol\v!list\s!default}}
@@ -589,7 +619,7 @@
{\currentlistentrynumber}
\setvalue{\??listsymbollabels\v!yes}% auto (use value stored in tuc file)
- {\edef\currentlistlabel{\ctxcommand{listlabel(\currentlistindex,"\currentlistlabel")}}%
+ {\edef\currentlistlabel{\clf_listlabel\currentlistindex{\currentlistlabel}}%
\leftlabeltext\currentlistlabel
\listparameter\c!starter
\currentlistentrynumber
@@ -700,7 +730,7 @@
\let\p_method\v!command
\fi
\normalexpanded{\definelistalternative[#tag][\p_method]}[\c!command=\strc_lists_placement_command]%
- \doifnextbgroupelse
+ \doifelsenextbgroup
{\strc_lists_define_placement_yes{#tag}}
{\strc_lists_define_placement_nop{#tag}}}
@@ -747,7 +777,7 @@
% better is to use a special list entry but we keep this for compatibility
\let\\=\space
% so expanding this token register has to come *after* the font switch
- \dontconvertfont
+ \dontconvertfont % (**) this has to become an option (see publ)
\to \t_lists_every_renderingtext
\appendtoks
@@ -764,7 +794,7 @@
\settrue\c_lists_has_page
\settrue\c_lists_show_page
\else
- \doifstructurelisthaspageelse\settrue\setfalse\c_lists_has_page
+ \doifelsestructurelisthaspage\settrue\setfalse\c_lists_has_page
\ifx\p_pagenumber\v!yes
\settrue\c_lists_show_page
\else
@@ -777,7 +807,7 @@
\settrue\c_lists_has_number
\settrue\c_lists_show_number
\else
- \doifstructurelisthasnumberelse\settrue\setfalse\c_lists_has_number
+ \doifelsestructurelisthasnumber\settrue\setfalse\c_lists_has_number
\ifx\p_headnumber\v!yes
\settrue\c_lists_show_number
\else
@@ -818,6 +848,8 @@
% todo: provide packager via attributes
+\doinstallinjector\s!list
+
\installcorenamespace{listalternativemethods} % the general wrapper of a rendering
\startsetups[\??listrenderings:none]
@@ -845,12 +877,15 @@
% \stopsetups
\startsetups[\??listrenderings:generic]
+ \typo_injectors_check_list
\listparameter\c!before % can be \hskip
\edef\p_command{\listalternativeparameter\c!command}
\ifx\p_command\empty
\listalternativeparameter\c!before
\vbox {
\forgetall
+ \noindent % otherwise annotations are mirrored up
+ \typo_injectors_mark_list
\hbox \strc_lists_get_reference_attribute\v!all {
\ifconditional\c_lists_show_number
% \ifconditional\c_lists_has_page
@@ -878,6 +913,8 @@
}
\listalternativeparameter\c!after
\else
+ \noindent % otherwise annotations are mirrored up
+\typo_injectors_mark_list
\hbox \strc_lists_get_reference_attribute\v!all \strc_lists_get_destination_attribute {
\p_command\currentlistentrynumber\currentlistentrytitle\currentlistentrypagenumber
}
@@ -889,6 +926,8 @@
\startsetups[\??listrenderings:abc]
\endgraf % are we grouped?
+ \typo_injectors_check_list
+ % \advance % yes or no ... \rightskip is also honored
\leftskip\listparameter\c!margin % after \endgraf !
\listparameter\c!before
\endgraf
@@ -907,7 +946,8 @@
\scratchwidth\p_width
\fi
\fi\fi
- \noindent
+ \noindent % otherwise annotations are mirrored up
+ \typo_injectors_mark_list
\hbox \strc_lists_get_reference_attribute\v!all \strc_lists_get_destination_attribute {
\setlocalhsize
\hsize\localhsize
@@ -917,11 +957,19 @@
\scratchhsize\hsize
\ifconditional\c_lists_has_number
\ifconditional\c_lists_show_number
- \setbox\b_strc_lists_number\hbox \strc_lists_get_reference_attribute\v!number \ifdim\scratchwidth>\zeropoint to \scratchwidth \fi {
- \strc_lists_set_style_color\c!numberstyle\c!numbercolor\v!number
- \listparameter\c!numbercommand\currentlistsymbol
- \hfill
- }
+ \setbox\b_strc_lists_number
+% \hbox
+% \strc_lists_get_reference_attribute\v!number
+% \ifdim\scratchwidth>\zeropoint to \scratchwidth \fi
+ \simplealignedboxplus
+ \scratchwidth
+ {\listparameter\c!numberalign}
+ {\strc_lists_get_reference_attribute\v!number}
+ {
+ \strc_lists_set_style_color\c!numberstyle\c!numbercolor\v!number
+ \listparameter\c!numbercommand\currentlistsymbol
+% \hfill
+ }
\else
\setbox\b_strc_lists_number\emptyhbox
\fi
@@ -951,6 +999,8 @@
\hsize\scratchhsize
\usealignparameter\listparameter
\ifdim\scratchwidth<\hsize
+ % we have leftskip so we'd better just skip back instead of messing
+ % with hang*
\edef\p_hang{\listparameter\c!hang}
\hangindent\dimexpr\wd\b_strc_lists_number+\scratchdistance\relax
\hangafter\ifx\p_hang\v!no\zerocount\else\plusone\fi
@@ -963,7 +1013,7 @@
\scratchdistance\zeropoint
\fi
\parindent\zeropoint
- \dontleavehmode
+ \dontleavehmode % this nils hang: i need to figure out why
% % topaligned
%
% \scratchdimen\wd\b_strc_lists_number
@@ -1049,7 +1099,7 @@
\listparameter\c!numbercommand\currentlistsymbol
\listparameter\c!right
\endgroup
- \kern.5em
+ \kern.5\emwidth\relax
\nobreak
\fi
\fi
@@ -1068,7 +1118,7 @@
\ifconditional\c_lists_has_page
\ifconditional\c_lists_show_page
\nobreak
- \hskip.75em\relax
+ \hskip.75\emwidth\relax
\nobreak
\strc_lists_set_reference_attribute\v!pagenumber
\strc_lists_set_style_color\c!pagestyle\c!pagecolor\v!pagenumber
@@ -1088,7 +1138,9 @@
\stopsetups
\startsetups[\??listrenderings:e]
- \noindent
+ \typo_injectors_check_list
+ \noindent % otherwise annotations are mirrored up
+ \typo_injectors_mark_list
\hbox \strc_lists_get_reference_attribute\v!all \strc_lists_get_destination_attribute {
\letlistparameter\c!depth\zeropoint
\letlistparameter\c!color\empty
@@ -1108,7 +1160,9 @@
\stopsetups
\startsetups[\??listrenderings:f]
- \noindent
+ \typo_injectors_check_list
+ \noindent % otherwise annotations are mirrored up
+ \typo_injectors_mark_list
\hbox \strc_lists_get_reference_attribute\v!all \strc_lists_get_destination_attribute {
\dosetraggedhbox{\listparameter\c!align}%
\raggedbox {
@@ -1126,7 +1180,9 @@
\stopsetups
\startsetups[\??listrenderings:g]
- \noindent
+ \typo_injectors_check_list
+ \noindent % otherwise annotations are mirrored up
+ \typo_injectors_mark_list
\hbox \strc_lists_get_reference_attribute\v!all \strc_lists_get_destination_attribute {
\midaligned {
\strc_lists_set_style_color\c!style\c!color\v!all
@@ -1147,19 +1203,115 @@
\definelistalternative
[\v!interactive]
- [\c!renderingsetup=\??listrenderings:interactive]
+ [\c!renderingsetup=\??listrenderings:interactive,
+ \c!before=\endgraf, % new per 2014-11-08
+ \c!after=\endgraf] % new per 2014-11-08
\startsetups[\??listrenderings:interactive]
- \edef\p_command{\listalternativeparameter\c!command}%
- \ifx\p_command\empty
- [\currentlist: \currentlistentrynumber\space -- \currentlistentrytitle\space -- \currentlistentrypagenumber]%
- \else
- \listparameter\c!before
- \hbox \strc_lists_get_reference_attribute\v!all \strc_lists_get_destination_attribute {
- \p_command\currentlistentrynumber\currentlistentrytitle\currentlistentrypagenumber
- }
- \listparameter\c!after
- \fi
+ \edef\p_command{\listalternativeparameter\c!command}%
+ \typo_injectors_check_list
+ \listparameter\c!before
+ \noindent % otherwise annotations are mirrored up
+ \typo_injectors_mark_list
+ \hbox \strc_lists_get_reference_attribute\v!all \strc_lists_get_destination_attribute {
+ \ifx\p_command\empty
+ [
+ \currentlist:\space
+ \currentlistentrynumber
+ \space\emdash\space
+ \currentlistentrytitle
+ \space\emdash\space
+ \currentlistentrypagenumber
+ ]
+ \else
+ \p_command\currentlistentrynumber\currentlistentrytitle\currentlistentrypagenumber
+ \fi
+ }
+ \listparameter\c!after
+\stopsetups
+
+%D One special for publications (as Alan loves to hangindent). No fonts and
+%D such (for now). No interaction either as that is dealt with elsewhere.
+%D
+%D \currentlistsymbol
+%D \currentlistentry
+%D \currentlistentrypagenumber % not really used
+
+\definelistalternative
+ [\v!paragraph]
+ [\c!filler=\hskip.25\emwidth,
+ \c!renderingsetup=\??listrenderings:\v!paragraph]
+
+\startsetups[\??listrenderings:\v!paragraph]
+ \endgraf % are we grouped?
+ \typo_injectors_check_list % ?
+ \listparameter\c!before
+ \endgraf
+ \begingroup
+ \forgetall
+ \noindent
+ \parindent\zeropoint
+ \edef\p_width{\listparameter\c!width}%
+ \edef\p_distance{\listparameter\c!distance}% we are nice for bib users
+ \edef\p_margin{\listparameter\c!margin}% we are nice for bib users
+ \ifx\p_distance\v!none
+ \scratchdistance\zeropoint
+ \else
+ \scratchdistance\p_distance
+ \fi
+ \ifx\p_margin\v!none
+ \scratchoffset\zeropoint
+ \else
+ \scratchoffset\p_margin
+ \fi
+ \ifx\p_width\v!fit
+ \scratchwidth\zeropoint
+ \leftskip\scratchoffset
+ \else
+ \scratchwidth\p_width
+ \ifdim\scratchoffset=\zeropoint
+ \leftskip\dimexpr\scratchwidth+\scratchdistance\relax
+ \else
+ \leftskip\scratchoffset
+ \fi
+ \fi
+ \usealignparameter\listparameter
+ \hskip-\leftskip
+ \ifconditional\c_lists_has_number
+ \ifconditional\c_lists_show_number
+ \setbox\scratchbox
+ \simplealignedbox\scratchwidth{\listparameter\c!numberalign}
+ \bgroup
+ \useliststyleandcolor\c!numberstyle\c!numbercolor
+ \currentlistsymbol
+ \egroup
+ \ifdim\wd\scratchbox>\zeropoint
+ \box\scratchbox
+ \hskip\scratchdistance\relax
+ \fi
+ \fi
+ \fi
+ \begingroup
+ \useliststyleandcolor\c!textstyle\c!textcolor
+ \setstrut
+ \begstrut
+ \currentlistentrytitle
+ \endstrut
+ \endgroup
+ \ifconditional\c_lists_has_page
+ \ifconditional\c_lists_show_page
+ \nobreak
+ \listalternativeparameter\c!filler\relax
+ \begingroup
+ \useliststyleandcolor\c!pagestyle\c!pagecolor
+ \currentlistentrypagenumber
+ \endgroup
+ \fi
+ \fi
+ \endgraf
+ \endgroup
+ \allowbreak
+ \listparameter\c!after
\stopsetups
%D List elements are packaged in such a way that we can click on them
@@ -1302,7 +1454,7 @@
\let\listlength\!!zerocount
\else
\setupcurrentlist[#settings]%
- \strc_lists_analyze{#list}{\listparameter\c!criterium}{\listparameter\c!number}%
+ \strc_lists_analyze{#list}{\listparameter\c!criterium}{\listparameter\c!reference}%
\normalexpanded{\endgroup\noexpand\edef\noexpand\listlength{\structurelistsize}}%
\fi
\strc_lists_set_mode}
diff --git a/tex/context/base/strc-mar.lua b/tex/context/base/strc-mar.lua
index b3a6e8f35..3af9113bf 100644
--- a/tex/context/base/strc-mar.lua
+++ b/tex/context/base/strc-mar.lua
@@ -10,23 +10,37 @@ if not modules then modules = { } end modules ['strc-mar'] = {
-- todo: only commands.* print to tex, native marks return values
local insert, concat = table.insert, table.concat
-local tostring, next, rawget = tostring, next, rawget
+local tostring, next, rawget, type = tostring, next, rawget, type
local lpegmatch = lpeg.match
local context = context
local commands = commands
+local implement = interfaces.implement
+
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
-local traversenodes = nodes.traverse
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getattr = nuts.getattr
+local getbox = nuts.getbox
+
+local traversenodes = nuts.traverse
+
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local texsetattribute = tex.setattribute
-local texgetbox = tex.getbox
local a_marks = attributes.private("structure","marks")
@@ -106,9 +120,9 @@ end
local function sweep(head,first,last)
for n in traversenodes(head) do
- local id = n.id
+ local id = getid(n)
if id == glyph_code then
- local a = n[a_marks]
+ local a = getattr(n,a_marks)
if not a then
-- next
elseif first == 0 then
@@ -118,7 +132,7 @@ local function sweep(head,first,last)
end
elseif id == hlist_code or id == vlist_code then
if boxes_too then
- local a = n[a_marks]
+ local a = getattr(n,a_marks)
if not a then
-- next
elseif first == 0 then
@@ -127,7 +141,7 @@ local function sweep(head,first,last)
last = a
end
end
- local list = n.list
+ local list = getlist(n)
if list then
first, last = sweep(list,first,last)
end
@@ -143,9 +157,9 @@ setmetatableindex(classes, function(t,k) local s = settings_to_array(k) t[k] = s
local lasts = { }
function marks.synchronize(class,n,option)
- local box = texgetbox(n)
+ local box = getbox(n)
if box then
- local first, last = sweep(box.list,0,0)
+ local first, last = sweep(getlist(box),0,0)
if option == v_keep and first == 0 and last == 0 then
if trace_marks_get or trace_marks_set then
report_marks("action %a, class %a, box %a","retain at synchronize",class,n)
@@ -204,7 +218,11 @@ local function resolve(t,k)
end
function marks.define(name,settings)
- settings = settings or { }
+ if not settings then
+ settings = { }
+ elseif type(settings) == "string" then
+ settings = { parent = settings }
+ end
data[name] = settings
local parent = settings.parent
if parent == nil or parent == "" or parent == name then
@@ -699,17 +717,17 @@ end
-- interface
-commands.definemarking = marks.define
-commands.relatemarking = marks.relate
-commands.setmarking = marks.set
-commands.resetmarking = marks.reset
-commands.synchronizemarking = marks.synchronize
-commands.getmarking = marks.fetch
-commands.fetchonemark = marks.fetchonemark
-commands.fetchtwomarks = marks.fetchtwomarks
-commands.fetchallmarks = marks.fetchallmarks
-
-function commands.doifelsemarking(str) -- can be shortcut
- commands.doifelse(marks.exists(str))
-end
+implement { name = "markingtitle", actions = marks.title, arguments = { "string", "string" } }
+implement { name = "markingnumber", actions = marks.number, arguments = { "string", "string" } }
+
+implement { name = "definemarking", actions = marks.define, arguments = { "string", "string" } }
+implement { name = "relatemarking", actions = marks.relate, arguments = { "string", "string" } }
+implement { name = "setmarking", actions = marks.set, arguments = { "string", "string" } }
+implement { name = "resetmarking", actions = marks.reset, arguments = { "string" } }
+implement { name = "synchronizemarking", actions = marks.synchronize, arguments = { "string", "integer", "string" } }
+implement { name = "getmarking", actions = marks.fetch, arguments = { "string", "string", "string" } }
+implement { name = "fetchonemark", actions = marks.fetchonemark, arguments = { "string", "string", "string" } }
+implement { name = "fetchtwomarks", actions = marks.fetchtwomarks, arguments = { "string", "string" } }
+implement { name = "fetchallmarks", actions = marks.fetchallmarks, arguments = { "string", "string" } }
+implement { name = "doifelsemarking", actions = { marks.exists, commands.doifelse }, arguments = "string" }
diff --git a/tex/context/base/strc-mar.mkiv b/tex/context/base/strc-mar.mkiv
index 3685b66a7..8bd8c094e 100644
--- a/tex/context/base/strc-mar.mkiv
+++ b/tex/context/base/strc-mar.mkiv
@@ -56,35 +56,31 @@
\unexpanded\def\synchronizemarking{\dotripleargument\strc_markings_synchronize}
\appendtoks
- \ctxcommand{definemarking("\currentmarking",{ parent = "\currentmarkingparent" })}%
+ \clf_definemarking{\currentmarking}{\currentmarkingparent}%
\to \everydefinemarking
\def\strc_markings_relate[#1][#2]%
- {\ctxcommand{relatemarking("#1","#2")}}
+ {\clf_relatemarking{#1}{#2}}
\def\strc_markings_set[#1]#2%
{\ifconditional\inhibitsetmarking
% nothing
\else
\doifelse{\namedmarkingparameter{#1}\c!expansion}\v!yes
- {\ctxcommand{setmarking("#1",\!!bs#2\!!es)}}
- {\ctxcommand{setmarking("#1",\!!bs\detokenize{#2}\!!es)}}%
+ {\clf_setmarking{#1}{#2}}
+ {\clf_setmarking{#1}{\detokenize{#2}}}%
\fi}
\def\strc_markings_reset[#1]%
- {\ctxcommand{resetmarking("#1")}}
+ {\clf_resetmarking{#1}}
\def\strc_markings_synchronize[#1][#2][#3]% #1=class #2=boxnumber (some day also name) #3=options, maybe second argument table
- {\ifvoid#2\else\ctxcommand{synchronizemarking("#1",\number#2,"#3")}\fi}
+ {\ifvoid#2\else\clf_synchronizemarking{#1}#2{#3}\fi}
-% \def\doifelsemarking#1% why no \unexpanded
-% {\ctxcommand{doifelsemarking("#1")}}
+\def\doifelsemarking#1% no \noexpanded
+ {\clf_doifelsemarking{#1}}
-% \def\doifelsemarking#1%
-% {\normalexpanded{\noexpand\ctxcommand{doifelsemarking("\noexpand\detokenize{#1}")}}}
-
-\def\doifelsemarking#1%
- {\ctxcommand{doifelsemarking(\!!bs#1\!!es)}}
+\let\doifmarkingelse \doifelsemarking
% \appendtoks
% \strc_markings_synchronize[\v!page][\normalpagebox][\v!keep]% keep if no marks
@@ -116,25 +112,25 @@
\setsystemmode\v!marking
\the\everymarking
\ifthirdargument
- \ctxcommand{getmarking("#1","#2","#3")}%
+ \clf_getmarking{#1}{#2}{#3}%
\else
- \ctxcommand{getmarking("#1","\v!page","#2")}%
+ \clf_getmarking{#1}{\v!page}{#2}%
\fi
\endgroup}}
% the fetchers are fully expandable: [name][method]
-\def\fetchonemark[#1]#2[#3]{\ifconditional\inhibitgetmarking\else\ctxcommand{fetchonemark ("#1","\v!page","#2")}\fi}
-\def\fetchtwomarks [#1]{\ifconditional\inhibitgetmarking\else\ctxcommand{fetchtwomarks("#1","\v!page")}\fi}
-\def\fetchallmarks [#1]{\ifconditional\inhibitgetmarking\else\ctxcommand{fetchallmarks("#1","\v!page")}\fi}
+\def\fetchonemark[#1]#2[#3]{\ifconditional\inhibitgetmarking\else\clf_fetchonemark {#1}{\v!page}{#2}\fi}
+\def\fetchtwomarks [#1]{\ifconditional\inhibitgetmarking\else\clf_fetchtwomarks{#1}{\v!page}\fi}
+\def\fetchallmarks [#1]{\ifconditional\inhibitgetmarking\else\clf_fetchallmarks{#1}{\v!page}\fi}
\let\fetchmark\fetchonemark
% also fully expandable but here we have: [name][range][method]
-\def\fetchonemarking[#1]#2[#3]#4[#5]{\ifconditional\inhibitgetmarking\else\ctxcommand{fetchonemark ("#1","#3","#5")}\fi}
-\def\fetchtwomarkings [#1]#2[#3]{\ifconditional\inhibitgetmarking\else\ctxcommand{fetchtwomarks("#1","#3")}\fi}
-\def\fetchallmarkings [#1]#2[#3]{\ifconditional\inhibitgetmarking\else\ctxcommand{fetchallmarks("#1","#3")}\fi}
+\def\fetchonemarking[#1]#2[#3]#4[#5]{\ifconditional\inhibitgetmarking\else\clf_fetchonemark {#1}{#3}{#5}\fi}
+\def\fetchtwomarkings [#1]#2[#3]{\ifconditional\inhibitgetmarking\else\clf_fetchtwomarks{#1}{#3}\fi}
+\def\fetchallmarkings [#1]#2[#3]{\ifconditional\inhibitgetmarking\else\clf_fetchallmarks{#1}{#3}\fi}
\let\fetchmarking\fetchonemarking
diff --git a/tex/context/base/strc-mat.lua b/tex/context/base/strc-mat.lua
index 98b1e996c..87f35ed1d 100644
--- a/tex/context/base/strc-mat.lua
+++ b/tex/context/base/strc-mat.lua
@@ -6,28 +6,35 @@ if not modules then modules = { } end modules ['strc-mat'] = {
license = "see context related readme files"
}
+----- copytable = table.copy
+
local structures = structures
-local lists = structures.lists
-local sections = structures.sections
-local floats = structures.floats
-local helpers = structures.helpers
-local formulas = structures.formulas
+local lists = structures.lists
+local sections = structures.sections
+local floats = structures.floats
+local helpers = structures.helpers
+local formulas = structures.formulas -- not used but reserved
-lists.enhancers = lists.enhancers or { }
+----- context = context
+----- simplify = helpers.simplify
-- maybe we want to do clever things with formulas, the store might go away
-local formuladata = { }
-
-function formulas.store(data)
- formuladata[#formuladata+1] = data
- context(#formuladata)
-end
-
-function formulas.current()
- return formuladata[#formuladata]
-end
+-- local formuladata = { }
+--
+-- function formulas.store(data)
+-- formuladata[#formuladata+1] = data
+-- context(#formuladata)
+-- end
+--
+-- function formulas.current()
+-- return formuladata[#formuladata]
+-- end
+
+-- function formulas.simplify(entry)
+-- return simplify(copytable(entry or formuladata[#formuladata]))
+-- end
function helpers.formulanumber(data,spec)
if data then
@@ -38,10 +45,9 @@ function helpers.formulanumber(data,spec)
end
end
-function formulas.simplify(entry)
- return helpers.simplify(table.copy(entry or formuladata[#formuladata]))
-end
-
function lists.formulanumber(name,n,spec)
- helpers.formulanumber(lists.result[n])
+ local result = lists.result
+ if result then
+ helpers.formulanumber(result[n])
+ end
end
diff --git a/tex/context/base/strc-mat.mkiv b/tex/context/base/strc-mat.mkiv
index b9263cdb0..22fa54889 100644
--- a/tex/context/base/strc-mat.mkiv
+++ b/tex/context/base/strc-mat.mkiv
@@ -360,7 +360,7 @@
\def\strc_formulas_handle_sub_numbering_indeed
{\let\strc_formulas_handle_sub_numbering\relax % else error: see math/numbering-001.tex
- \doiftextelse\currentsubformulasuffix
+ \doifelsetext\currentsubformulasuffix
{\strc_counters_setown_sub\v!formula\plustwo\currentsubformulasuffix}
{\strc_counters_increment_sub\v!formula\plustwo}%
\placecurrentformulanumber}
@@ -528,7 +528,7 @@
\unexpanded\def\strc_formulas_start_formula_indeed[#1][#2]% setting leftskip adaption is slow !
{\bgroup % HERE
\def\currentformula{#1}%
- \dostarttagged\t!formula\currentformula
+ \dostarttaggedchained\t!formula\currentformula\??formula
\the\everybeforedisplayformula
\d_strc_formulas_display_skip_par\parskip\relax
%\formulastrutdp\strutdepth
@@ -564,7 +564,7 @@
\unexpanded\def\strc_formulas_start_formula_nested#1%
{\bgroup
\let\strc_formulas_stop_formula\strc_formulas_stop_formula_nested
- \dostarttagged\t!subformula}
+ \dostarttagged\t!subformula\empty}
\unexpanded\def\strc_formulas_stop_formula_nested
{\dostoptagged
@@ -630,13 +630,13 @@
% \prevdepth-\maxdimen % texbook pagina 79-80
% \fi
% \noindent % else funny hlist with funny baselineskip
-% $$% \Ustartdisplaymath
+% $$% \Ucheckedstartdisplaymath
% \setdisplaydimensions
% \startinnermath}
%
% \unexpanded\def\stopdisplaymath
% {\stopinnermath
-% $$% \Ustopdisplaymath
+% $$% \Ucheckedstopdisplaymath
% \par
% \afterdisplayspace
% \par
@@ -663,13 +663,13 @@
\fi
\fi
\noindent % else funny hlist with funny baselineskip
- $$% \Ustartdisplaymath
+ \Ucheckedstartdisplaymath
\setdisplaydimensions
\startinnermath}
\unexpanded\def\stopdisplaymath
{\stopinnermath
- $$% \Ustopdisplaymath
+ \Ucheckedstopdisplaymath
\par
\ifvmode
\ifcase\c_strc_formulas_space_model
@@ -835,14 +835,14 @@
\def\strc_formulas_place[#1]%
{\def\currentplaceformulareference{#1}%
\let\currentplaceformulasuffix\empty
- \doifnextbgroupelse\strc_formulas_place_yes\strc_formulas_place_nop\strc_formulas_place_nop} % [ref]{}
+ \doifelsenextbgroup\strc_formulas_place_yes\strc_formulas_place_nop\strc_formulas_place_nop} % [ref]{}
\def\strc_formulas_place_yes#1%
{\def\currentplaceformulasuffix{#1}%
\strc_formulas_place_nop}
\def\strc_formulas_place_nop
- {\doifnextcharelse$\strc_formulas_place_pickup\strc_formulas_place_indeed} % [ref]$$ [ref]\start
+ {\doifelsenextchar$\strc_formulas_place_pickup\strc_formulas_place_indeed} % [ref]$$ [ref]\start
\def\strc_formulas_place_indeed
{\strc_formulas_place_numbering}
diff --git a/tex/context/base/strc-not.lua b/tex/context/base/strc-not.lua
index 40b78d59f..71eccf1ce 100644
--- a/tex/context/base/strc-not.lua
+++ b/tex/context/base/strc-not.lua
@@ -38,6 +38,8 @@ local variables = interfaces.variables
local context = context
local commands = commands
+local implement = interfaces.implement
+
-- state: store, insert, postpone
local function store(tag,n)
@@ -69,9 +71,11 @@ end
notes.store = store
-function commands.storenote(tag,n)
- context(store(tag,n))
-end
+implement {
+ name = "storenote",
+ actions = { store, context },
+ arguments = { "string", "integer" }
+}
local function get(tag,n) -- tricky ... only works when defined
local nd = notedata[tag]
@@ -108,9 +112,11 @@ end
notes.listindex = listindex
-function commands.notelistindex(tag,n)
- context(listindex(tag,n))
-end
+implement {
+ name = "notelistindex",
+ actions = { listindex, context },
+ arguments = { "string", "integer" }
+}
local function setstate(tag,newkind)
local state = notestates[tag]
@@ -144,18 +150,28 @@ end
notes.setstate = setstate
notes.getstate = getstate
-commands.setnotestate = setstate
+implement {
+ name = "setnotestate",
+ actions = setstate,
+ arguments = { "string", "string" }
+}
-function commands.getnotestate(tag)
- context(getstate(tag))
-end
+implement {
+ name = "getnotestate",
+ actions = { getstate, context },
+ arguments = "string"
+}
function notes.define(tag,kind,number)
local state = setstate(tag,kind)
state.number = number
end
-commands.definenote = notes.define
+implement {
+ name = "definenote",
+ actions = notes.define,
+ arguments = { "string", "string", "integer" }
+}
function notes.save(tag,newkind)
local state = notestates[tag]
@@ -184,8 +200,8 @@ function notes.restore(tag,forcedstate)
end
end
-commands.savenote = notes.save
-commands.restorenote = notes.restore
+implement { name = "savenote", actions = notes.save, arguments = { "string", "string" } }
+implement { name = "restorenote", actions = notes.restore, arguments = { "string", "string" } }
local function hascontent(tag)
local ok = notestates[tag]
@@ -205,9 +221,11 @@ end
notes.hascontent = hascontent
-function commands.doifnotecontent(tag)
- commands.doif(hascontent(tag))
-end
+implement {
+ name = "doifnotecontent",
+ actions = { hascontent, commands.doif },
+ arguments = "string",
+}
local function internal(tag,n)
local nd = get(tag,n)
@@ -243,9 +261,11 @@ end
notes.doifonsamepageasprevious = onsamepageasprevious
-function commands.doifnoteonsamepageasprevious(tag)
- commands.doifelse(onsamepageasprevious(tag))
-end
+implement {
+ name = "doifnoteonsamepageasprevious",
+ actions = { onsamepageasprevious, commands.doifelse },
+ arguments = "string",
+}
function notes.checkpagechange(tag) -- called before increment !
local nd = notedata[tag] -- can be unset at first entry
@@ -277,7 +297,10 @@ function notes.postpone()
end
end
-commands.postponenotes = notes.postpone
+implement {
+ name = "postponenotes",
+ actions = notes.postpone
+}
function notes.setsymbolpage(tag,n,l)
local l = l or listindex(tag,n)
@@ -297,7 +320,11 @@ function notes.setsymbolpage(tag,n,l)
end
end
-commands.setnotesymbolpage = notes.setsymbolpage
+implement {
+ name = "setnotesymbolpage",
+ actions = notes.setsymbolpage,
+ arguments = { "string", "integer" }
+}
local function getsymbolpage(tag,n)
local li = internal(tag,n)
@@ -351,11 +378,11 @@ notes.getsymbolpage = getsymbolpage
notes.getnumberpage = getnumberpage
notes.getdeltapage = getdeltapage
-function commands.notesymbolpage(tag,n) context(getsymbolpage(tag,n)) end
-function commands.notenumberpage(tag,n) context(getnumberpage(tag,n)) end
-function commands.notedeltapage (tag,n) context(getdeltapage (tag,n)) end
+implement { name = "notesymbolpage", actions = { getsymbolpage, context }, arguments = { "string", "integer" } }
+implement { name = "notenumberpage", actions = { getnumberpage, context }, arguments = { "string", "integer" } }
+implement { name = "notedeltapage", actions = { getdeltapage, context }, arguments = { "string", "integer" } }
-function commands.flushnotes(tag,whatkind,how) -- store and postpone
+local function flushnotes(tag,whatkind,how) -- store and postpone
local state = notestates[tag]
local kind = state.kind
if kind == whatkind then
@@ -411,15 +438,26 @@ function commands.flushnotes(tag,whatkind,how) -- store and postpone
end
end
-function commands.flushpostponednotes()
+local function flushpostponednotes()
if trace_notes then
report_notes("flushing all postponed notes")
end
for tag, _ in next, notestates do
- commands.flushnotes(tag,"postpone")
+ flushnotes(tag,"postpone")
end
end
+implement {
+ name = "flushpostponednotes",
+ actions = flushpostponednotes
+}
+
+implement {
+ name = "flushnotes",
+ actions = flushnotes,
+ arguments = { "string", "string", "string" }
+}
+
function notes.resetpostponed()
if trace_notes then
report_notes("resetting all postponed notes")
@@ -432,13 +470,17 @@ function notes.resetpostponed()
end
end
-function commands.notetitle(tag,n)
- command.savedlisttitle(tag,notedata[tag][n])
-end
+implement {
+ name = "notetitle",
+ actions = function(tag,n) lists.savedlisttitle(tag,notedata[tag][n]) end,
+ arguments = { "string", "integer" }
+}
-function commands.noteprefixednumber(tag,n,spec)
- commands.savedlistprefixednumber(tag,notedata[tag][n])
-end
+implement {
+ name = "noteprefixednumber",
+ actions = function(tag,n) lists.savedlistprefixednumber(tag,notedata[tag][n]) end,
+ arguments = { "string", "integer" }
+}
function notes.internalid(tag,n)
local nd = get(tag,n)
diff --git a/tex/context/base/strc-not.mkvi b/tex/context/base/strc-not.mkvi
index a1aecf83a..25a1072a3 100644
--- a/tex/context/base/strc-not.mkvi
+++ b/tex/context/base/strc-not.mkvi
@@ -170,7 +170,7 @@
\ifx\p_counter\empty %
\let\p_counter\currentnotation
\fi
- \doifcounterelse\p_counter\donothing{\strc_notes_define_counter\p_counter}%
+ \doifelsecounter\p_counter\donothing{\strc_notes_define_counter\p_counter}%
\letnotationparameter\s!counter\p_counter
%\strc_notes_setup_counter\currentnotation
\to \everydefinenotation
@@ -181,6 +181,7 @@
\unexpanded\setvalue{\??constructioninitializer\v!notation}%
{\let\currentnotation \currentconstruction
\let\constructionparameter \notationparameter
+ \let\constructionnamespace \??notation
\let\detokenizedconstructionparameter\detokenizednotationparameter
\let\letconstructionparameter \letnotationparameter
\let\useconstructionstyleandcolor \usenotationstyleandcolor
@@ -231,7 +232,7 @@
\ifnotesenabled
\strc_counters_increment_sub\currentconstructionnumber\currentconstructionlevel
\fi
- \doifnextoptionalelse\strc_notations_command_yes\strc_notations_command_nop}
+ \doifelsenextoptionalcs\strc_notations_command_yes\strc_notations_command_nop}
\unexpanded\def\strc_notations_command_nop#title%
{\strc_constructions_register[\c!label={\descriptionparameter\c!text},\c!reference=,\c!title={#title},\c!bookmark=,\c!list=][]%
@@ -240,7 +241,7 @@
\normalexpanded{\endgroup\noteparameter\c!next}}
\unexpanded\def\strc_notations_command_yes[#optional]%
- {\doifassignmentelse{#optional}\strc_notations_command_assignment\strc_notations_command_argument[#optional]}
+ {\doifelseassignment{#optional}\strc_notations_command_assignment\strc_notations_command_argument[#optional]}
\unexpanded\def\strc_notations_command_assignment[#settings]%
{\strc_constructions_register[\c!label={\descriptionparameter\c!text},\c!reference=,\c!title=,\c!bookmark=,\c!list=,#settings][]%
@@ -265,7 +266,7 @@
% \normalexpanded % not that efficient but also not that frequently used (\normaldef for parser)
% {\normaldef\noexpand\strc_pickup_yes[##1]##2\csname\e!stop#stoptag\endcsname{\strc_notations_command_yes[##1]{##2}}%
% \normaldef\noexpand\strc_pickup_nop ##1\csname\e!stop#stoptag\endcsname{\strc_notations_command_nop {##1}}}%
-% \doifnextoptionalelse\strc_pickup_yes\strc_pickup_nop}
+% \doifnextoptionalcselse\strc_pickup_yes\strc_pickup_nop}
\unexpanded\def\strc_notations_start#tag#stoptag%
{\begingroup
@@ -278,7 +279,7 @@
\normalexpanded % not that efficient but also not that frequently used (\normaldef for parser)
{\def\noexpand\strc_pickup_yes[#one]#two\csname\e!stop#stoptag\endcsname{\strc_notations_command_yes[#one]{#two}}%
\def\noexpand\strc_pickup_nop #one\csname\e!stop#stoptag\endcsname{\strc_notations_command_nop {#one}}}%
- \doifnextoptionalelse\strc_pickup_yes\strc_pickup_nop}
+ \doifelsenextoptionalcs\strc_pickup_yes\strc_pickup_nop}
\unexpanded\def\strc_notations_start_yes[#reference]#title%
{\strc_constructions_register[\c!label={\descriptionparameter\c!text},\c!reference={#reference},\c!title={#title},\c!bookmark=,\c!list=][]%
@@ -382,7 +383,7 @@
\appendtoks
\ifx\currentnoteparent\empty
- \doifinsertionelse\currentnote
+ \doifelseinsertion\currentnote
\donothing
{\defineinsertion[\currentnote]% could be an option
\normalexpanded{\t_strc_notes{\the\t_strc_notes\noexpand\strc_notes_process_list{\currentnote}}}}%
@@ -392,7 +393,7 @@
\setexpandednoteparameter\s!insert{\namednoteparameter\currentnoteparent\s!insert}%
\definenotation[\currentnote][\currentnoteparent][\c!type=\v!note]%
\fi
- \ctxcommand{definenote("\currentnote","insert",\number\currentnoteinsertionnumber)}%
+ \clf_definenote{\currentnote}{insert}\currentnoteinsertionnumber\relax
\to \everydefinenote
% maybe we will share this at some point:
@@ -437,7 +438,7 @@
\strc_notes_inject_dummy
\else
\begingroup
- \edef\currentnotenumber{\ctxcommand{storenote("\currentnote",\currentconstructionlistentry)}}%
+ \edef\currentnotenumber{\clf_storenote{\currentnote}\currentconstructionlistentry}%
\settrue\processingnote
\ifconditional\c_strc_notes_skip
\globallet\lastnotesymbol\strc_notes_inject_symbol_nop
@@ -460,7 +461,11 @@
\else\ifconditional\inlocalnotes % todo: per note class
\global\settrue\postponednote
\else
+\ifconditional\c_strc_notes_delayed
+ % probably end notes
+\else
\handlenoteinsert\currentnote\currentnotenumber % either an insert or just delayed
+\fi
\fi\fi
\endgroup
\fi
@@ -515,7 +520,7 @@
\endcsname}
\setvalue{\??noteinteractioninline\v!yes}%
- {\strc_references_get_simple_page_reference{page(\ctxcommand{notenumberpage("\currentnote",\currentnotenumber)})}%
+ {\strc_references_get_simple_page_reference{page(\clf_notenumberpage{\currentnote}\currentnotenumber)}%
\edef\strc_notes_get_reference_attribute_symbol{attr\referenceattribute\currentreferenceattribute}%
\let \strc_notes_set_style_color_inline \strc_notes_set_style_color_inline_yes}
@@ -535,7 +540,7 @@
% page(...) : we could have a dedicated one
\setvalue{\??noteinteractiondisplay\v!yes}%
- {\strc_references_get_simple_page_reference{page(\ctxcommand{notesymbolpage("\currentnote",\currentnotenumber)})}%
+ {\strc_references_get_simple_page_reference{page(\clf_notesymbolpage{\currentnote}\currentnotenumber)}%
\edef\strc_notes_set_reference_attribute_number{\attribute\referenceattribute\currentreferenceattribute}%
\let \strc_notes_set_reference_attribute_text \donothing
\let \strc_notes_set_destination_attribute_text\donothing
@@ -548,7 +553,7 @@
\let\strc_notes_set_style_color_display \strc_notes_set_style_color_display_nop}
\setvalue{\??noteinteractiondisplay\v!all}%
- {\strc_references_get_simple_page_reference{page(\ctxcommand{notesymbolpage("\currentnote",\currentnotenumber)})}%
+ {\strc_references_get_simple_page_reference{page(\clf_notesymbolpage{\currentnote}\currentnotenumber)}%
\edef\strc_notes_set_reference_attribute_text {\attribute\referenceattribute\currentreferenceattribute}%
%\strc_references_set_simple_page_reference{note:\cldcontext{structures.notes.internalid("\currentnote",\currentnotenumber)}}%
\strc_references_set_simple_page_reference{note:\currentnote:\currentnotenumber}%
@@ -557,7 +562,7 @@
\let \strc_notes_set_style_color_display \strc_notes_set_style_color_display_yes}
\setvalue{\??noteinteractiondisplay\v!text}%
- {\strc_references_get_simple_page_reference{page(\ctxcommand{notesymbolpage("\currentnote",\currentnotenumber)})}%
+ {\strc_references_get_simple_page_reference{page(\clf_notesymbolpage{\currentnote}\currentnotenumber)}%
\edef\strc_notes_set_reference_attribute_text {\attribute\referenceattribute\currentreferenceattribute}%
%\strc_references_set_simple_page_reference{note:\cldcontext{structures.notes.internalid("\currentnote",\currentnotenumber)}}%
\strc_references_set_simple_page_reference{note:\currentnote:\currentnotenumber}%
@@ -582,7 +587,7 @@
{\iftrialtypesetting
% keep
\else\ifx\currentcolorparameter\empty
- \scratchcounter\ctxcommand{notedeltapage("\currentnote",\currentnotenumber)}\relax % todo calculate once
+ \scratchcounter\clf_notedeltapage{\currentnote}\currentnotenumber\relax % todo calculate once
\setlocationcolorspecified\scratchcounter
\fi\fi}
@@ -601,7 +606,7 @@
% in mkii the pointer only showed up in pagewise notes
\unexpanded\def\strc_notes_inject_pointer % todo calculate once
- {\ifcase\ctxcommand{notedeltapage("\currentnote",\currentnotenumber)}\relax
+ {\ifcase\clf_notedeltapage{\currentnote}\currentnotenumber\relax\relax
% unknown
\or
% same page
@@ -634,7 +639,7 @@
\unexpanded\def\strc_notes_inject_symbol_indeed#synchronize%
{\removeunwantedspaces
- \doifitalicelse\/\donothing % Charles IV \footnote{the fourth}
+ \doifelseitalic\/\donothing % Charles IV \footnote{the fourth}
\ifdim\lastkern=\notesignal
% \kern\noteparameter\c!distance % yes or no note font? or main text
\strc_notes_inject_separator
@@ -650,18 +655,18 @@
\hbox \strc_notes_get_reference_attribute_symbol \bgroup
\dostarttagged\t!descriptionsymbol\currentnote
\dotagsetnotesymbol
- \noteparameter\c!textcommand{\ctxcommand{noteprefixednumber("\currentnote",\currentnotenumber)}}%
+ \noteparameter\c!textcommand{\clf_noteprefixednumber{\currentnote}\currentnotenumber\relax}%
% the next one can cycle so we need to make sure it has no advance width
\doif{\noteparameter\c!indicator}\v!yes\strc_notes_inject_pointer
\dostoptagged
\egroup
\endgroup
- \dostoptagged % check
+% \dostoptagged % check
\globallet\lastnotesymbol\relax}
\unexpanded\def\strc_notes_inject_dummy % temp hack
{\removeunwantedspaces
- \doifitalicelse\/\donothing % Charles IV \footnote{the fourth}
+ \doifelseitalic\/\donothing % Charles IV \footnote{the fourth}
\ifdim\lastkern=\notesignal
% \kern\noteparameter\c!distance % yes or no note font? or main text
\strc_notes_inject_separator
@@ -756,7 +761,9 @@
%appendtoks \notesenabledfalse \to \everymarking
\appendtoks \notesenabledfalse \to \everybeforepagebody
-\appendtoks \notesenabledfalse \to \everystructurelist % quick hack
+\appendtoks \notesenabledfalse \to \everystructurelist % quick hack
+\appendtoks \notesenabledfalse \to \everysimplifycommands % quick hack
+\appendtoks \notesenabledfalse \to \everypreroll % quick hack
%D Often we need to process the whole set of notes and to make that
%D fast, we use a token register:
@@ -1023,7 +1030,7 @@
\def\strc_notes_set_location_text % we don't use inserts anyway (e.g. endnotes)
{\settrue\c_strc_notes_delayed
- \ctxcommand{setnotestate("\currentnote","store")}%
+ \clf_setnotestate{\currentnote}{store}%
\page_inserts_set_location\currentnoteinsertion\v!text % \setupinsertion[\currentnote][\c!location=\v!text]%
\global\count\currentnoteinsertionnumber\zerocount
\global\dimen\currentnoteinsertionnumber\maxdimen
@@ -1175,6 +1182,7 @@
\insert\currentnoteinsertionnumber\bgroup
\the\everyinsidenoteinsert\relax
\usesetupsparameter\noteparameter % experimental
+ \useinterlinespaceparameter\noteparameter
\doifelse{\noteparameter\c!paragraph}\v!yes
{\nointerlineskip
\startvboxtohboxseparator
@@ -1197,7 +1205,7 @@
\edef\currentnote{#tag}%
\strc_constructions_initialize{#tag}%
\strc_notes_synchronize
- \edef\currentconstructionlistentry{\ctxcommand{notelistindex("\currentnote",#id)}}% index in list cache
+ \edef\currentconstructionlistentry{\clf_notelistindex{\currentnote}#id}% index in list cache
% as we can have collected notes (e.g. in tables) we need to recover
% \currentdescriptionattribute and \currentdescriptionsynchronize
%
@@ -1211,14 +1219,14 @@
\begstrut
\strc_references_flush_destination_nodes
\strc_notes_set_destination_attribute_text
- \strc_notes_inject_text
- \endstrut
+ \strc_notes_inject_text\relax
+ \ifvmode\obeydepth\else\endstrut\fi % \obeydepth is new per 2015-01-10
\strc_constructions_stored_stop
% \endgroup
}
\unexpanded\def\strc_notes_inject_text % hm main?
- {\ctxcommand{savedlisttitle("\currentconstructionmain",\currentconstructionlistentry)}}
+ {\clf_savedlisttitle{\currentconstructionmain}\currentconstructionlistentry\relax}
\let\startpushnote\relax
\let\stoppushnote \relax
@@ -1242,6 +1250,7 @@
\appendtoks
\doif{\noteparameter\c!scope}\v!page{\floatingpenalty\maxdimen}% experiment
\penalty\currentnotepenalty
+ %\interlinepenalty\maxdimen % todo
\forgetall
\strc_notes_set_bodyfont
\redoconvertfont % to undo \undo calls in in headings etc
@@ -1290,7 +1299,7 @@
\noteparameter\c!before
\fi
% \bgroup
- % \setupalign[\noteparameter\c!align]%
+ % \usealignparameter\noteparameter
\placenoterule % alleen in ..mode
% \par
% \egroup
@@ -1298,6 +1307,7 @@
\strc_notes_set_bodyfont
\setbox\scratchbox\hbox
{\strc_notes_flush_inserts}%
+ \page_postprocessors_linenumbers_deepbox\scratchbox
\setbox\scratchbox\hbox
{\setupcurrentnote
[\c!location=,
@@ -1345,11 +1355,14 @@
% idea: tag with attr and then just flush them again
\def\strc_notes_flush_global
- {\doifelse{\noteparameter\c!paragraph}\v!yes
+ {\begingroup
+ \useinterlinespaceparameter\noteparameter
+ \doifelse{\noteparameter\c!paragraph}\v!yes
{\vbox\starthboxestohbox
\iftrialtypesetting\unvcopy\else\unvbox\fi\currentnoteinsertionnumber
\stophboxestohbox}
- {\iftrialtypesetting\unvcopied\else\unvboxed\fi\currentnoteinsertionnumber}}
+ {\iftrialtypesetting\unvcopied\else\unvboxed\fi\currentnoteinsertionnumber}%
+ \endgroup}
%D Supporting end notes is surprisingly easy. Even better, we
%D can combine this feature with solving the common \TEX\
@@ -1387,7 +1400,7 @@
{\ifconditional\postponingnotes\else
\global\settrue\postponingnotes
\global\let\flushnotes\doflushnotes
- \ctxcommand{postponenotes()}%
+ \clf_postponenotes
\fi}
\let\flushnotes\relax
@@ -1397,7 +1410,7 @@
\begingroup
\let\flushnotes \relax
\let\postponenotes\relax
- \ctxcommand{flushpostponednotes()}% this also resets the states !
+ \clf_flushpostponednotes% this also resets the states !
\global\setfalse\postponednote
\global\setfalse\postponingnotes
\global\let\flushnotes\relax
@@ -1444,20 +1457,20 @@
\strc_counters_save{#tag}%
\strc_counters_reset{#tag}%
\fi
- \ctxcommand{savenote("#tag","store")}}
+ \clf_savenote{#tag}{store}}
\def\strc_notes_local_stop_step#tag%
{\p_strc_notes_continue{\noteparameter\c!continue}%
\ifx\p_strc_notes_continue\v!yes \else
\strc_counters_restore{#tag}%
\fi
- \ctxcommand{restorenote("#tag")}}
+ \clf_restorenote{#tag}}
\unexpanded\def\placelocalnotes
{\dodoubleempty\strc_notes_local_place}
\def\strc_notes_local_place[#tag][#settings]%
- {\doif{\ctxcommand{getnotestate("#tag")}}{store}{\strc_notes_local_place_indeed{#settings}{#tag}}}
+ {\doif{\clf_getnotestate{#tag}}{store}{\strc_notes_local_place_indeed{#settings}{#tag}}}
\def\strc_notes_local_place_indeed#settings#tag%
{\begingroup
@@ -1517,7 +1530,7 @@
\def\strc_notes_place_indeed#settings#tag% settings note
{\edef\currentnote{#tag}% grouping ?
- \doifelse{\ctxcommand{getnotestate("#tag")}}{store}
+ \doifelse{\clf_getnotestate{#tag}}{store}
\strc_notes_local_place_indeed
\strc_notes_global_place_indeed
{#settings}{#tag}}
@@ -1537,7 +1550,7 @@
{\setvalue{\??notealternative#alternative}{#command}}
\unexpanded\def\doifnotescollected#tag%
- {\ctxcommand{doifnotecontent("#tag")}}
+ {\clf_doifnotecontent{#tag}}
\def\strc_notes_place_local_alternative % will be a setup (wrapper)
{\doifnotescollected\currentnote
@@ -1564,7 +1577,7 @@
% setups ?
-\def\flushlocalnotes#tag{\ctxcommand{flushnotes("#tag","store","\noteparameter\c!criterium")}}
+\def\flushlocalnotes#tag{\clf_flushnotes{#tag}{store}{\noteparameter\c!criterium}}
\installnotealternative \v!none
{\flushlocalnotes\currentnote}
@@ -1589,7 +1602,7 @@
\setupcurrentnote[\c!location=]%
\inheritednoteframed
{\edef\currentnotewidth{\noteparameter\c!width}%
- \doifdimensionelse\currentnotewidth\donothing
+ \doifelsedimension\currentnotewidth\donothing
{\edef\currentnotewidth{\the\hsize}}%
\startsimplecolumns[\c!distance=\noteparameter\c!columndistance,\c!n=\noteparameter\c!n,\c!width=\currentnotewidth]%
\flushlocalnotes\currentnote
@@ -1714,8 +1727,8 @@
\def\strc_notes_set_bodyfont
{\let\strc_notes_set_bodyfont\relax
\restoreglobalbodyfont
- \switchtobodyfont[\noteparameter\c!bodyfont]%
- \setupalign[\noteparameter\c!align]}
+ \usebodyfontparameter\noteparameter
+ \usealignparameter\noteparameter}
%D The footnote mechanism defaults to a traditional one
%D column way of showing them. By default we precede them by
@@ -1756,7 +1769,9 @@
%D }
%D \stoptyping
-\def\doifnoteonsamepageelse[#tag]{\ctxcommand{doifnoteonsamepageasprevious("#tag")}}
+\def\doifelsenoteonsamepage[#tag]{\clf_doifnoteonsamepageasprevious{#tag}}
+
+\let\doifnoteonsamepageelse\doifelsenoteonsamepage
%D New trickery:
@@ -1778,19 +1793,19 @@
{\dodoubleempty\strc_notes_symbol}
\def\strc_notes_symbol[#tag][#reference]%
- {\dontleavehmode
- \begingroup
- \edef\currentnote{#tag}%
- \usenotestyleandcolor\c!textstyle\c!textcolor
- \ifnotesenabled
+ {\ifnotesenabled
+ \dontleavehmode
+ \begingroup
+ \edef\currentnote{#tag}%
+ \usenotestyleandcolor\c!textstyle\c!textcolor
\ifsecondargument
\unskip
\noteparameter\c!textcommand{\in[#reference]}% command here?
\else
\noteparameter\c!textcommand\lastnotesymbol % check if command double
\fi
- \fi
- \endgroup}
+ \endgroup
+ \fi}
\unexpanded\def\note
{\dodoubleempty\strc_notes_note}
@@ -1821,4 +1836,24 @@
% [ownnote]
% [\ownnotesymbol]
+% tricky:
+%
+% \enabletrackers[nodes.areas]
+% \enabletrackers[nodes.references]
+% \enabletrackers[nodes.destinations]
+%
+% \setupnotes[interaction=all,rule=no]
+% \setupinteraction[state=start,focus=standard]
+%
+% \starttext
+% \goto{\input tufte\relax}[page(2)] \par
+% \ruledhbox{\gotobox{\vtop{\input tufte\relax}}[page(2)]} \par
+% \ruledhbox{\gotobox{\vbox{\input tufte\relax}}[page(2)]} \par
+% % \completecontent
+% % \chapter{Chapter}
+% % \dorecurse{5}{\input knuth}
+% a\footnote{\input tufte\par\input ward\relax}
+% \stoptext
+
\protect \endinput
+
diff --git a/tex/context/base/strc-num.lua b/tex/context/base/strc-num.lua
index 67e9b1734..0203334ff 100644
--- a/tex/context/base/strc-num.lua
+++ b/tex/context/base/strc-num.lua
@@ -20,6 +20,8 @@ local setmetatableindex = table.setmetatableindex
local trace_counters = false trackers.register("structures.counters", function(v) trace_counters = v end)
local report_counters = logs.reporter("structure","counters")
+local implement = interfaces.implement
+
local structures = structures
local helpers = structures.helpers
local sections = structures.sections
@@ -199,6 +201,24 @@ local function allocate(name,i) -- can be metatable
return ci
end
+local pattern = lpeg.P(variables.by)^-1 * lpeg.C(lpeg.P(1)^1)
+local lpegmatch = lpeg.match
+
+function counters.way(way)
+ if not way or way == "" then
+ return ""
+ else
+ return lpegmatch(pattern,way)
+ end
+end
+
+implement {
+ name = "way",
+ actions = { counters.way, context },
+ arguments = "string"
+}
+
+
function counters.record(name,i)
return allocate(name,i or 1)
end
@@ -376,10 +396,8 @@ local function check(name,data,start,stop)
end
end
-counters.reset = reset
-counters.set = set
-function counters.setown(name,n,value)
+local function setown(name,n,value)
local cd = counterdata[name]
if cd then
local d = allocate(name,n)
@@ -397,14 +415,14 @@ function counters.setown(name,n,value)
end
end
-function counters.restart(name,n,newstart,noreset)
+local function restart(name,n,newstart,noreset)
local cd = counterdata[name]
if cd then
newstart = tonumber(newstart)
if newstart then
local d = allocate(name,n)
d.start = newstart
- if not noreset then
+ if not noreset then -- why / when needed ?
reset(name,n) -- hm
end
end
@@ -425,7 +443,7 @@ function counters.restore(name)
end
end
-function counters.add(name,n,delta)
+local function add(name,n,delta)
local cd = counterdata[name]
if cd and (cd.state == v_start or cd.state == "") then
local data = cd.data
@@ -494,7 +512,12 @@ local function get(name,n,key)
end
end
-counters.get = get
+counters.reset = reset
+counters.set = set
+counters.add = add
+counters.get = get
+counters.setown = setown
+counters.restart = restart
function counters.value(name,n) -- what to do with own
return get(name,n or 1,'number') or 0
@@ -553,24 +576,7 @@ end
-- interfacing
-commands.definecounter = counters.define
-commands.setcounter = counters.set
-commands.setowncounter = counters.setown
-commands.resetcounter = counters.reset
-commands.restartcounter = counters.restart
-commands.savecounter = counters.save
-commands.restorecounter = counters.restore
-commands.addcounter = counters.add
-
-commands.rawcountervalue = function(...) context(counters.raw (...)) end
-commands.countervalue = function(...) context(counters.value (...)) end
-commands.lastcountervalue = function(...) context(counters.last (...)) end
-commands.firstcountervalue = function(...) context(counters.first (...)) end
-commands.nextcountervalue = function(...) context(counters.next (...)) end
-commands.prevcountervalue = function(...) context(counters.previous(...)) end
-commands.subcountervalues = function(...) context(counters.subs (...)) end
-
-function commands.showcounter(name)
+local function showcounter(name)
local cd = counterdata[name]
if cd then
context("[%s:",name)
@@ -583,19 +589,82 @@ function commands.showcounter(name)
end
end
-function commands.doifelsecounter(name) commands.doifelse(counterdata[name]) end
-function commands.doifcounter (name) commands.doif (counterdata[name]) end
-function commands.doifnotcounter (name) commands.doifnot (counterdata[name]) end
-
-function commands.incrementedcounter(...) context(counters.add(...)) end
+-- the noreset is somewhat messy ... always false messes up e.g. itemize but true the pagenumbers
+--
+-- if this fails i'll clean up this still somewhat experimental mechanism (but i need use cases)
-function commands.checkcountersetup(name,level,start,state)
- counters.restart(name,1,start,true) -- no reset
+local function checkcountersetup(name,level,start,state)
+ local noreset = true -- level > 0 -- was true
+ counters.restart(name,1,start,noreset) -- was true
counters.setstate(name,state)
counters.setlevel(name,level)
sections.setchecker(name,level,counters.reset)
end
+--
+
+implement { name = "addcounter", actions = add, arguments = { "string", "integer", "integer" } }
+implement { name = "setcounter", actions = set, arguments = { "string", 1, "integer" } }
+implement { name = "setowncounter", actions = setown, arguments = { "string", 1, "string" } }
+implement { name = "restartcounter", actions = restart, arguments = { "string", 1, "integer" } }
+implement { name = "resetcounter", actions = reset, arguments = { "string", 1 } }
+implement { name = "incrementcounter", actions = add, arguments = { "string", 1, 1 } }
+implement { name = "decrementcounter", actions = add, arguments = { "string", 1, -1 } }
+
+implement { name = "setsubcounter", actions = set, arguments = { "string", "integer", "integer" } }
+implement { name = "setownsubcounter", actions = setown, arguments = { "string", "integer", "string" } }
+implement { name = "restartsubcounter", actions = restart, arguments = { "string", "integer", "integer" } }
+implement { name = "resetsubcounter", actions = reset, arguments = { "string", "integer" } }
+implement { name = "incrementsubcounter", actions = add, arguments = { "string", "integer", 1 } }
+implement { name = "decrementsubcounter", actions = add, arguments = { "string", "integer", -1 } }
+
+implement { name = "rawcountervalue", actions = { counters.raw , context }, arguments = { "string", 1 } }
+implement { name = "countervalue", actions = { counters.value , context }, arguments = { "string", 1 } }
+implement { name = "lastcountervalue", actions = { counters.last , context }, arguments = { "string", 1 } }
+implement { name = "firstcountervalue", actions = { counters.first , context }, arguments = { "string", 1 } }
+implement { name = "nextcountervalue", actions = { counters.next , context }, arguments = { "string", 1 } }
+implement { name = "prevcountervalue", actions = { counters.previous, context }, arguments = { "string", 1 } }
+implement { name = "subcountervalues", actions = { counters.subs , context }, arguments = { "string", 1 } }
+
+implement { name = "rawsubcountervalue", actions = { counters.raw , context }, arguments = { "string", "integer" } }
+implement { name = "subcountervalue", actions = { counters.value , context }, arguments = { "string", "integer" } }
+implement { name = "lastsubcountervalue", actions = { counters.last , context }, arguments = { "string", "integer" } }
+implement { name = "firstsubcountervalue", actions = { counters.first , context }, arguments = { "string", "integer" } }
+implement { name = "nextsubcountervalue", actions = { counters.next , context }, arguments = { "string", "integer" } }
+implement { name = "previoussubcountervalue", actions = { counters.previous, context }, arguments = { "string", "integer" } }
+implement { name = "subsubcountervalues", actions = { counters.subs , context }, arguments = { "string", "integer" } }
+
+implement { name = "savecounter", actions = counters.save, arguments = "string" }
+implement { name = "restorecounter", actions = counters.restore, arguments = "string" }
+
+implement { name = "incrementedcounter", actions = { add, context }, arguments = { "string", 1, 1 } }
+implement { name = "decrementedcounter", actions = { add, context }, arguments = { "string", 1, -1 } }
+
+implement { name = "showcounter", actions = showcounter, arguments = "string" } -- todo
+implement { name = "checkcountersetup", actions = checkcountersetup, arguments = { "string", "integer", "integer", "string" } }
+
+table.setmetatablecall(counterdata,function(t,k) return t[k] end)
+
+implement { name = "doifelsecounter", actions = { counterdata, commands.doifelse }, arguments = "string" }
+implement { name = "doifcounter", actions = { counterdata, commands.doif }, arguments = "string" }
+implement { name = "doifnotcounter", actions = { counterdata, commands.doifnot }, arguments = "string" }
+
+implement {
+ name = "definecounter",
+ actions = counters.define,
+ arguments = {
+ {
+ { "name" } ,
+ { "start", "integer" },
+ { "counter" },
+ { "method" },
+ }
+ }
+}
+
+------------------------------------------------------------------
+------------------------------------------------------------------
+
-- -- move to strc-pag.lua
--
-- function counters.analyze(name,counterspecification)
diff --git a/tex/context/base/strc-num.mkiv b/tex/context/base/strc-num.mkiv
index 2fa8b0e9a..58095b8e7 100644
--- a/tex/context/base/strc-num.mkiv
+++ b/tex/context/base/strc-num.mkiv
@@ -17,6 +17,8 @@
\unprotect
+\startcontextdefinitioncode
+
% work in progress
% to be checked: can we use the command handler code here?
% all settings will move to lua
@@ -25,7 +27,9 @@
\installcommandhandler \??counter {counter} \??counter
-\let\setupstructurecounting\setupcounter
+\let\setupcounters\setupcounter
+
+\let\setupstructurecounting\setupcounter % will disappear
\setupcounter
[\c!way=\v!by\v!chapter,
@@ -39,7 +43,7 @@
\c!state=\v!start]
\def\autostructureprefixsegments#1% todo: \c!prefixsegments=\v!auto
- {2:\thenamedheadlevel{\ctxcommand{way("#1\c!way")}}}
+ {2:\thenamedheadlevel{\clf_way{#1\c!way}}}
\appendtoks
\resetcounterparameter\s!counter
@@ -48,21 +52,27 @@
\appendtoks
\ifx\currentcounterparent\empty
\edef\p_start{\counterparameter\c!start}%
- \ctxcommand{definecounter {
- name = "\currentcounter",
- start = \ifx\p_start\empty0\else\number\p_start\fi,
- counter = "\counterparameter\s!counter",
- method = "\counterparameter\c!method",
- }}%
+ \clf_definecounter
+ name {\currentcounter}%
+ start \ifx\p_start\empty\zerocount\else\numexpr\p_start\relax\fi
+ counter {\counterparameter\s!counter}%
+ method {\counterparameter\c!method}%
+ \relax
\letcounterparameter\s!name\currentcounter
\else
- \letcounterparameter\s!name\currentcounterparent
+ % \letcounterparameter\s!name\currentcounterparent % we need a chained clone
+ \setexpandedcounterparameter\s!name{\namedcounterparameter\currentcounterparent\s!name}%
\fi
\strc_counters_check_setup
\to \everydefinecounter
\appendtoks
\ifx\currentcounter\empty \else
+ \edef\p_number{\counterparameter\c!number}%
+ \ifx\p_number\empty \else
+ \clf_setcounter{\counterparameter\s!name}\numexpr\p_number\relax
+ \letcounterparameter\c!number\empty
+ \fi
\edef\p_start{\counterparameter\c!start}%
\setexpandedcounterparameter\c!start{\ifx\p_start\empty0\else\number\p_start\fi}%
\strc_counters_check_setup
@@ -71,40 +81,28 @@
% % % %
-\def\strc_counters_way#1% slow, we need to store it at the tex end
- {\ctxcommand{way("\namedcounterparameter{#1}\c!way")}}
-
-% \def\thenamedcounterlevel#1%
-% {\thenamedheadlevel{\strc_counters_way{#1}}}
+\def\strc_counters_way#1{\clf_way{\namedcounterparameter{#1}\c!way}}
\def\thenamedcounterlevel#1%
- {\xthenamedheadlevel{\strc_counters_way{#1}}}
-
-% \def\strc_counters_check_setup#1% does it have to happen here?
-% {% this can be done at the lua end / a bit messy here ... todo ...
-% \ifcsname\??counter#1\c!number\endcsname
-% \doifelsevalue {\??counter#1\c!number}{#1} {\letbeundefined{\??counter#1\c!number}}%
-% {\doifvaluenothing{\??counter#1\c!number} {\letbeundefined{\??counter#1\c!number}}}%
-% \fi
-% \ifcsname\??counter#1\c!number\endcsname
-% % it's a clone
-% \else
-% \edef\currentcounterlevel{\thenamedcounterlevel{#1}}%
-% \edef\p_start{\counterparameter{#1}\c!start}%
-% \ctxcommand{checkcountersetup("#1",\currentcounterlevel,\ifx\p_start\empty0\else\number\p_start\fi,"\counterparameter{#1}\c!state")}%
-% \fi}
+ {\xthenamedheadlevel{\clf_way{\namedcounterparameter{#1}\c!way}}}
-\def\strc_counters_check_setup
+\unexpanded\def\strc_counters_check_setup
{\edef\p_name{\directcounterparameter\s!name}%
\ifx\currentcounter\p_name
\edef\currentcounterlevel{\thenamedcounterlevel\currentcounter}%
\edef\p_start{\counterparameter\c!start}%
- \ctxcommand{checkcountersetup("\currentcounter",\currentcounterlevel,\ifx\p_start\empty0\else\number\p_start\fi,"\counterparameter\c!state")}%
+ \clf_checkcountersetup
+ {\currentcounter}%
+ \numexpr\currentcounterlevel\relax
+ \numexpr\ifx\p_start\empty\zerocount\else\p_start\fi\relax % bug in scanner
+ {\counterparameter\c!state}%
\fi}
-\unexpanded\def\doifcounterelse #1{\ctxcommand{doifelsecounter("\namedcounterparameter{#1}\s!name")}}
-\unexpanded\def\doifcounter #1{\ctxcommand{doifcounter ("\namedcounterparameter{#1}\s!name")}}
-\unexpanded\def\doifnotcounter #1{\ctxcommand{doifnotcounter ("\namedcounterparameter{#1}\s!name")}}
+\unexpanded\def\doifelsecounter #1{\clf_doifelsecounter{\namedcounterparameter{#1}\s!name}}
+\unexpanded\def\doifcounter #1{\clf_doifcounter {\namedcounterparameter{#1}\s!name}}
+\unexpanded\def\doifnotcounter #1{\clf_doifnotcounter {\namedcounterparameter{#1}\s!name}}
+
+\let\doifcounterelse\doifelsecounter
\unexpanded\def\setcounter {\dotripleempty \strc_counters_set_interfaced}
\unexpanded\def\setcounterown {\dotripleempty \strc_counters_setown_interfaced}
@@ -245,90 +243,90 @@
\expandafter\gobbleoneoptional
\fi}
-\def\strc_counters_set_two [#1][#2][#3]{\strc_counters_set_sub {#1}{#2}{#3}}
-\def\strc_counters_setown_two [#1][#2][#3]{\strc_counters_setown_sub {#1}{#2}{#3}}
-\def\strc_counters_restart_two [#1][#2][#3]{\strc_counters_restart_sub {#1}{#2}{#3}}
-\def\strc_counters_reset_two [#1][#2]{\strc_counters_reset_sub {#1}{#2}}
-\def\strc_counters_increment_two [#1][#2]{\strc_counters_increment_sub{#1}{#2}}
-\def\strc_counters_decrement_two [#1][#2]{\strc_counters_decrement_sub{#1}{#2}}
-
-\def\strc_counters_raw_two [#1][#2]{\strc_counters_raw_sub {#1}{#2}}
-\def\strc_counters_last_two [#1][#2]{\strc_counters_last_sub {#1}{#2}}
-\def\strc_counters_first_two [#1][#2]{\strc_counters_first_sub {#1}{#2}}
-\def\strc_counters_next_two [#1][#2]{\strc_counters_next_sub {#1}{#2}}
-\def\strc_counters_prev_two [#1][#2]{\strc_counters_prev_sub {#1}{#2}}
-\def\strc_counters_subs_two [#1][#2]{\strc_counters_subs_sub {#1}{#2}}
-
-\def\strc_counters_set_one [#1][#2][#3]{\strc_counters_set_sub {#1}\plusone{#2}}
-\def\strc_counters_setown_one [#1][#2][#3]{\strc_counters_setown_sub {#1}\plusone{#2}}
-\def\strc_counters_restart_one [#1][#2][#3]{\strc_counters_restart_sub {#1}\plusone{#2}}
-\def\strc_counters_reset_one [#1][#2]{\strc_counters_reset_sub {#1}\plusone}
-\def\strc_counters_increment_one [#1][#2]{\strc_counters_increment_sub{#1}\plusone}
-\def\strc_counters_decrement_one [#1][#2]{\strc_counters_decrement_sub{#1}\plusone}
-
-\def\strc_counters_raw_one [#1][#2]{\strc_counters_raw_sub {#1}\plusone}
-\def\strc_counters_last_one [#1][#2]{\strc_counters_last_sub {#1}\plusone}
-\def\strc_counters_first_one [#1][#2]{\strc_counters_first_sub {#1}\plusone}
-\def\strc_counters_next_one [#1][#2]{\strc_counters_next_sub {#1}\plusone}
-\def\strc_counters_prev_one [#1][#2]{\strc_counters_prev_sub {#1}\plusone}
-\def\strc_counters_subs_one [#1][#2]{\strc_counters_subs_sub {#1}\plusone}
-
-\def\strc_counters_save_one [#1]{\strc_counters_save {#1}}
-\def\strc_counters_restore_one [#1]{\strc_counters_restore {#1}}
-
-\unexpanded\def\strc_counters_set #1#2{\strc_counters_set_sub {#1}\plusone{#2}}
-\unexpanded\def\strc_counters_setown #1#2{\strc_counters_setown_sub {#1}\plusone{#2}}
-\unexpanded\def\strc_counters_restart #1#2{\strc_counters_restart_sub {#1}\plusone{#2}}
-\unexpanded\def\strc_counters_reset #1{\strc_counters_reset_sub {#1}\plusone}
-\unexpanded\def\strc_counters_increment #1{\strc_counters_increment_sub{#1}\plusone}
-\unexpanded\def\strc_counters_decrement #1{\strc_counters_decrement_sub{#1}\plusone}
-
- \def\strc_counters_raw #1{\strc_counters_raw_sub {#1}\plusone}
- \def\strc_counters_last #1{\strc_counters_last_sub {#1}\plusone}
- \def\strc_counters_first #1{\strc_counters_first_sub {#1}\plusone}
- \def\strc_counters_next #1{\strc_counters_next_sub {#1}\plusone}
- \def\strc_counters_prev #1{\strc_counters_prev_sub {#1}\plusone}
- \def\strc_counters_subs #1{\strc_counters_subs_sub {#1}\plusone}
-
-\unexpanded\def\strc_counters_set_sub #1#2#3{\ctxcommand{setcounter ("\namedcounterparameter{#1}\s!name",\number#2,\number#3)}}
-\unexpanded\def\strc_counters_setown_sub #1#2#3{\ctxcommand{setowncounter ("\namedcounterparameter{#1}\s!name",\number#2,"#3")}}
-\unexpanded\def\strc_counters_restart_sub #1#2#3{\ctxcommand{restartcounter("\namedcounterparameter{#1}\s!name",\number#2,\number#3)}}
-\unexpanded\def\strc_counters_reset_sub #1#2{\ctxcommand{resetcounter ("\namedcounterparameter{#1}\s!name",\number#2)}}
-\unexpanded\def\strc_counters_increment_sub #1#2{\ctxcommand{addcounter ("\namedcounterparameter{#1}\s!name",\number#2,1)}}
-\unexpanded\def\strc_counters_decrement_sub #1#2{\ctxcommand{addcounter ("\namedcounterparameter{#1}\s!name",\number#2,-1)}}
-
- \def\strc_counters_raw_sub #1#2{\ctxcommand{countervalue ("\namedcounterparameter{#1}\s!name",\number#2)}} % maybe raw
- \def\strc_counters_last_sub #1#2{\ctxcommand{lastcountervalue ("\namedcounterparameter{#1}\s!name",\number#2)}}
- \def\strc_counters_first_sub #1#2{\ctxcommand{firstcountervalue ("\namedcounterparameter{#1}\s!name",\number#2)}}
- \def\strc_counters_next_sub #1#2{\ctxcommand{nextcountervalue ("\namedcounterparameter{#1}\s!name",\number#2)}}
- \def\strc_counters_prev_sub #1#2{\ctxcommand{previouscountervalue("\namedcounterparameter{#1}\s!name",\number#2)}}
- \def\strc_counters_subs_sub #1#2{\ctxcommand{subcountervalues ("\namedcounterparameter{#1}\s!name",\number#2)}}
-
-\unexpanded\def\strc_counters_save #1{\ctxcommand{savecounter ("\namedcounterparameter{#1}\s!name")}}
-\unexpanded\def\strc_counters_restore #1{\ctxcommand{restorecounter("\namedcounterparameter{#1}\s!name")}}
-
-\unexpanded\def\strc_counters_incremented #1{\ctxcommand{incrementedcounter("\namedcounterparameter{#1}\s!name",1, 1)}}
-\unexpanded\def\strc_counters_decremented #1{\ctxcommand{incrementedcounter("\namedcounterparameter{#1}\s!name",1,-1)}}
-
-\unexpanded\def\showcounter [#1]{\ctxcommand{tracecounter("\namedcounterparameter{#1}\s!name")}}
-
-\unexpanded\def\incrementedcounter [#1]{\strc_counters_incremented{#1}} % expandable, no \dosingleargument
-\unexpanded\def\decrementedcounter [#1]{\strc_counters_decremented{#1}} % expandable, no \dosingleargument
+\def\strc_counters_set_two [#1][#2][#3]{\clf_setsubcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax\numexpr#3\relax}
+\def\strc_counters_setown_two [#1][#2][#3]{\clf_setownsubcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax{#3}}
+\def\strc_counters_restart_two [#1][#2][#3]{\clf_restartsubcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax\numexpr#3\relax}
+\def\strc_counters_reset_two [#1][#2]{\clf_resetsubcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+\def\strc_counters_increment_two [#1][#2]{\clf_incrementsubcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+\def\strc_counters_decrement_two [#1][#2]{\clf_decrementsubcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+
+\def\strc_counters_raw_two [#1][#2]{\clf_subcountervalue {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+\def\strc_counters_last_two [#1][#2]{\clf_lastsubcountervalue {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+\def\strc_counters_first_two [#1][#2]{\clf_firstsubcountervalue {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+\def\strc_counters_next_two [#1][#2]{\clf_nextsubcountervalue {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+\def\strc_counters_prev_two [#1][#2]{\clf_previoussubcountervalue{\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+\def\strc_counters_subs_two [#1][#2]{\clf_subsubcountervalues {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+
+\def\strc_counters_set_one [#1][#2][#3]{\clf_setcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+\def\strc_counters_setown_one [#1][#2][#3]{\clf_setowncounter {\namedcounterparameter{#1}\s!name}{#2}}
+\def\strc_counters_restart_one [#1][#2][#3]{\clf_restartcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+\def\strc_counters_reset_one [#1][#2]{\clf_resetcounter {\namedcounterparameter{#1}\s!name}}
+\def\strc_counters_increment_one [#1][#2]{\clf_incrementcounter {\namedcounterparameter{#1}\s!name}}
+\def\strc_counters_decrement_one [#1][#2]{\clf_decrementcounter {\namedcounterparameter{#1}\s!name}}
+
+\def\strc_counters_raw_one [#1][#2]{\clf_countervalue {\namedcounterparameter{#1}\s!name}}
+\def\strc_counters_last_one [#1][#2]{\clf_lastcountervalue {\namedcounterparameter{#1}\s!name}}
+\def\strc_counters_first_one [#1][#2]{\clf_firstcountervalue {\namedcounterparameter{#1}\s!name}}
+\def\strc_counters_next_one [#1][#2]{\clf_nextcountervalue {\namedcounterparameter{#1}\s!name}}
+\def\strc_counters_prev_one [#1][#2]{\clf_previouscountervalue {\namedcounterparameter{#1}\s!name}}
+\def\strc_counters_subs_one [#1][#2]{\clf_subcountervalues {\namedcounterparameter{#1}\s!name}}
+
+\def\strc_counters_save_one [#1]{\clf_savecounter {\namedcounterparameter{#1}\s!name}}
+\def\strc_counters_restore_one [#1]{\clf_restorecounter {\namedcounterparameter{#1}\s!name}}
+
+\unexpanded\def\strc_counters_set #1#2{\clf_setcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+\unexpanded\def\strc_counters_setown #1#2{\clf_setowncounter {\namedcounterparameter{#1}\s!name}{#2}}
+\unexpanded\def\strc_counters_restart #1#2{\clf_restartcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+\unexpanded\def\strc_counters_reset #1{\clf_resetcounter {\namedcounterparameter{#1}\s!name}}
+\unexpanded\def\strc_counters_increment #1{\clf_incrementcounter {\namedcounterparameter{#1}\s!name}}
+\unexpanded\def\strc_counters_decrement #1{\clf_decrementcounter {\namedcounterparameter{#1}\s!name}}
+
+ \def\strc_counters_raw #1{\clf_countervalue {\namedcounterparameter{#1}\s!name}}
+ \def\strc_counters_last #1{\clf_lastcountervalue {\namedcounterparameter{#1}\s!name}}
+ \def\strc_counters_first #1{\clf_firstcountervalue {\namedcounterparameter{#1}\s!name}}
+ \def\strc_counters_next #1{\clf_nextcountervalue {\namedcounterparameter{#1}\s!name}}
+ \def\strc_counters_prev #1{\clf_previouscountervalue {\namedcounterparameter{#1}\s!name}}
+ \def\strc_counters_subs #1{\clf_subcountervalues {\namedcounterparameter{#1}\s!name}}
+
+\unexpanded\def\strc_counters_set_sub #1#2#3{\clf_setsubcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax\numexpr#3\relax}
+\unexpanded\def\strc_counters_setown_sub #1#2#3{\clf_setownsubcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax{#3}}
+\unexpanded\def\strc_counters_restart_sub #1#2#3{\clf_restartsubcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax\numexpr#3\relax}
+\unexpanded\def\strc_counters_reset_sub #1#2{\clf_resetsubcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+\unexpanded\def\strc_counters_increment_sub #1#2{\clf_incrementsubcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+\unexpanded\def\strc_counters_decrement_sub #1#2{\clf_decrementsubcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+
+ \def\strc_counters_raw_sub #1#2{\clf_subcountervalue {\namedcounterparameter{#1}\s!name}\numexpr#2\relax} % maybe raw
+ \def\strc_counters_last_sub #1#2{\clf_lastsubcountervalue {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+ \def\strc_counters_first_sub #1#2{\clf_firstsubcountervalue {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+ \def\strc_counters_next_sub #1#2{\clf_nextsubcountervalue {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+ \def\strc_counters_prev_sub #1#2{\clf_previoussubcountervalue{\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+ \def\strc_counters_subs_sub #1#2{\clf_subsubcountervalues {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+
+\unexpanded\def\strc_counters_save #1{\clf_savecounter {\namedcounterparameter{#1}\s!name}}
+\unexpanded\def\strc_counters_restore #1{\clf_restorecounter {\namedcounterparameter{#1}\s!name}}
+
+\unexpanded\def\strc_counters_incremented #1{\clf_incrementedcounter {\namedcounterparameter{#1}\s!name}}
+\unexpanded\def\strc_counters_decremented #1{\clf_decrementedcounter {\namedcounterparameter{#1}\s!name}}
+
+\unexpanded\def\showcounter [#1]{\clf_showcounter {\namedcounterparameter{#1}\s!name}}
+
+\unexpanded\def\incrementedcounter [#1]{\clf_incrementedcounter {\namedcounterparameter{#1}\s!name}} % no \dosingleargument
+\unexpanded\def\decrementedcounter [#1]{\clf_decrementedcounter {\namedcounterparameter{#1}\s!name}} % no \dosingleargument
% public variants ... beware, for old cases, from now on the value variants are the
% ones that are expandable
-\def\rawcountervalue [#1]{\strc_counters_raw_sub {#1}\plusone}
-\def\lastcountervalue [#1]{\strc_counters_last_sub {#1}\plusone}
-\def\firstcountervalue[#1]{\strc_counters_first_sub{#1}\plusone}
-\def\nextcountervalue [#1]{\strc_counters_next_sub {#1}\plusone}
-\def\prevcountervalue [#1]{\strc_counters_prev_sub {#1}\plusone}
+\def\rawcountervalue [#1]{\clf_countervalue {\namedcounterparameter{#1}\s!name}}
+\def\lastcountervalue [#1]{\clf_lastcountervalue {\namedcounterparameter{#1}\s!name}}
+\def\firstcountervalue [#1]{\clf_firstcountervalue {\namedcounterparameter{#1}\s!name}}
+\def\nextcountervalue [#1]{\clf_nextcountervalue {\namedcounterparameter{#1}\s!name}}
+\def\prevcountervalue [#1]{\clf_previouscountervalue{\namedcounterparameter{#1}\s!name}}
-\let\rawsubcountervalue \strc_counters_raw_two
-\let\lastsubcountervalue \strc_counters_last_two
-\let\firstsubcountervalue\strc_counters_first_two
-\let\nextsubcountervalue \strc_counters_next_two
-\let\prevsubcountervalue \strc_counters_prev_two
+\let\rawsubcountervalue \strc_counters_raw_two
+\let\lastsubcountervalue \strc_counters_last_two
+\let\firstsubcountervalue \strc_counters_first_two
+\let\nextsubcountervalue \strc_counters_next_two
+\let\prevsubcountervalue \strc_counters_prev_two
% The bypage check needs a multipass reference and therefore we only check for it when we increment
% and know that some content will be placed. We could also check for spreads.
@@ -342,7 +340,7 @@
\strc_counters_reset{#1}%
\fi
\fi
- \ctxcommand{addcounter("\namedcounterparameter{#1}\s!name",\number#2,1)}}
+ \clf_incrementsubcounter{\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
\unexpanded\def\convertedcounter
{\dodoubleempty\strc_counters_converted}
@@ -351,58 +349,58 @@
{\begingroup
\edef\currentcounter{#1}%
\ifsecondargument\setupcurrentcounter[#2]\fi
- \ctxlua{structures.sections.prefixedconverted(
- "\counterparameter\s!name",
+ \clf_prefixedconverted
+ {\counterparameter\s!name}
{
- prefix = "\counterparameter\c!prefix",
- separatorset = "\counterparameter\c!prefixseparatorset",
- conversion = "\counterparameter\c!prefixconversion",
- conversionset = "\counterparameter\c!prefixconversionset",
- starter = \!!bs\counterparameter\c!prefixstarter\!!es,
- stopper = \!!bs\counterparameter\c!prefixstopper\!!es,
- set = "\counterparameter\c!prefixset",
- segments = "\counterparameter\c!prefixsegments",
- connector = \!!bs\counterparameter\c!prefixconnector\!!es,
- },
+ prefix {\counterparameter\c!prefix}
+ separatorset {\counterparameter\c!prefixseparatorset}
+ conversion {\counterparameter\c!prefixconversion}
+ conversionset {\counterparameter\c!prefixconversionset}
+ starter {\counterparameter\c!prefixstarter}
+ stopper {\counterparameter\c!prefixstopper}
+ set {\counterparameter\c!prefixset}
+ segments {\counterparameter\c!prefixsegments}
+ connector {\counterparameter\c!prefixconnector}
+ }
{
- order = "\counterparameter\c!numberorder",
- separatorset = "\counterparameter\c!numberseparatorset",
- conversion = \!!bs\counterparameter\c!numberconversion\!!es,
- conversionset = "\counterparameter\c!numberconversionset",
- starter = \!!bs\counterparameter\c!numberstarter\!!es,
- stopper = \!!bs\counterparameter\c!numberstopper\!!es,
- segments = "\counterparameter\c!numbersegments",
- type = "\counterparameter\c!type",
- criterium = "\counterparameter\c!criterium", % might change if we also want this with sectioning
+ order {\counterparameter\c!numberorder}
+ separatorset {\counterparameter\c!numberseparatorset}
+ conversion {\counterparameter\c!numberconversion}
+ conversionset {\counterparameter\c!numberconversionset}
+ starter {\counterparameter\c!numberstarter}
+ stopper {\counterparameter\c!numberstopper}
+ segments {\counterparameter\c!numbersegments}
+ type {\counterparameter\c!type}
+ criterium {\counterparameter\c!criterium}
}
- )}%
+ \relax
\endgroup}
\def\directconvertedcounter#1#2% name, type
- {\ctxlua{structures.sections.prefixedconverted(
- "\namedcounterparameter{#1}\s!name",
+ {\clf_prefixedconverted
+ {\namedcounterparameter{#1}\s!name}
{
- prefix = "\namedcounterparameter{#1}\c!prefix",
- separatorset = "\namedcounterparameter{#1}\c!prefixseparatorset",
- conversion = "\namedcounterparameter{#1}\c!prefixconversion",
- conversionset = "\namedcounterparameter{#1}\c!prefixconversionset",
- % starter = \!!bs\namedcounterparameter{#1}\c!prefixstarter\!!es,
- % stopper = \!!bs\namedcounterparameter{#1}\c!prefixstopper\!!es,
- set = "\namedcounterparameter{#1}\c!prefixset",
- segments = "\namedcounterparameter{#1}\c!prefixsegments",
- connector = \!!bs\namedcounterparameter{#1}\c!prefixconnector\!!es,
- },
+ prefix {\namedcounterparameter{#1}\c!prefix}
+ separatorset {\namedcounterparameter{#1}\c!prefixseparatorset}
+ conversion {\namedcounterparameter{#1}\c!prefixconversion}
+ conversionset {\namedcounterparameter{#1}\c!prefixconversionset}
+ % starter {\namedcounterparameter{#1}\c!prefixstarter}
+ % stopper {\namedcounterparameter{#1}\c!prefixstopper}
+ set {\namedcounterparameter{#1}\c!prefixset}
+ segments {\namedcounterparameter{#1}\c!prefixsegments}
+ connector {\namedcounterparameter{#1}\c!prefixconnector}
+ }
{
- order = "\namedcounterparameter{#1}\c!numberorder",
- separatorset = "\namedcounterparameter{#1}\c!numberseparatorset",
- conversion = \!!bs\namedcounterparameter{#1}\c!numberconversion\!!es,
- conversionset = "\namedcounterparameter{#1}\c!numberconversionset",
- starter = \!!bs\namedcounterparameter{#1}\c!numberstarter\!!es,
- stopper = \!!bs\namedcounterparameter{#1}\c!numberstopper\!!es,
- segments = "\namedcounterparameter{#1}\c!numbersegments",
- type = "#2",
+ order {\namedcounterparameter{#1}\c!numberorder}
+ separatorset {\namedcounterparameter{#1}\c!numberseparatorset}
+ conversion {\namedcounterparameter{#1}\c!numberconversion}
+ conversionset {\namedcounterparameter{#1}\c!numberconversionset}
+ starter {\namedcounterparameter{#1}\c!numberstarter}
+ stopper {\namedcounterparameter{#1}\c!numberstopper}
+ segments {\namedcounterparameter{#1}\c!numbersegments}
+ type {#2}
}
- )}}
+ \relax}
\unexpanded\def\convertedsubcounter
{\dotripleempty\strc_counters_converted_sub}
@@ -437,7 +435,9 @@
\unexpanded\def\doifdefinedcounter {\doifcommandhandler \??counter}
\unexpanded\def\doifundefinedcounter {\doifnotcommandhandler \??counter}
-\unexpanded\def\doifdefinedcounterelse{\doifelsecommandhandler\??counter}
+\unexpanded\def\doifelsedefinedcounter{\doifelsecommandhandler\??counter}
+
+\let\doifdefinedcounterelse\doifelsedefinedcounter
%D What follows is a compatibility layer.
@@ -458,7 +458,8 @@
\let \doifdefinednumber \doifdefinedcounter % {number}{true}
\let \doifundefinednumber \doifnotdefinedcounter % {number}{true}
-\let \doifdefinednumberelse \doifdefinedcounterelse % {number}{true}{false}
+\let \doifelsedefinednumber \doifelsedefinedcounter % {number}{true}{false}
+\let \doifdefinednumberelse \doifelsedefinedcounter % {number}{true}{false}
\let \setupnumbering \setupcounter
@@ -504,119 +505,164 @@
\fi
%
\ifx\p_hascaption\v!yes
- \xdef\currentstructurecomponentname {#3\s!name}%
- \xdef\currentstructurecomponentlevel {#3\c!level}%
- \edef\currentstructurecomponentexpansion {#3\c!expansion}%
- \xdef\currentstructurecomponentxmlsetup {#3\c!xmlsetup}%
- \xdef\currentstructurecomponentcatcodes {#3\s!catcodes}%
- \xdef\currentstructurecomponentlabel {#3\c!label}%
- \xdef\currentstructurecomponentreference {#3\c!reference}%
- \xdef\currentstructurecomponentreferenceprefix{#3\c!referenceprefix}%
- \ifx\currentstructurecomponentexpansion\s!xml
- \xmlstartraw
- \xdef\currentstructurecomponenttitle {#3\c!title}%
- \xdef\currentstructurecomponentbookmark{#3\c!bookmark}%
- \xdef\currentstructurecomponentmarking {#3\c!marking}%
- \xdef\currentstructurecomponentlist {#3\c!list}%
- \xmlstopraw
- \ifx\currentstructurecomponentlist\empty
- \globallet\currentstructurecomponentlist\currentstructurecomponenttitle
- \fi
- \globallet\currentstructurecomponentcoding\s!xml
- \else
- \ifx\currentstructurecomponentexpansion\v!yes
- \xdef\currentstructurecomponenttitle {#3\c!title}%
- \xdef\currentstructurecomponentbookmark{#3\c!bookmark}%
- \xdef\currentstructurecomponentmarking {#3\c!marking}%
- \xdef\currentstructurecomponentlist {#3\c!list}%
- \else
- \xdef\currentstructurecomponenttitle {#4\c!title}%
- \xdef\currentstructurecomponentbookmark{#4\c!bookmark}%
- \xdef\currentstructurecomponentmarking {#4\c!marking}%
- \xdef\currentstructurecomponentlist {#4\c!list}%
- \iflocation \ifx\currentstructurecomponentbookmark\empty
- \begingroup
- \simplifycommands
- \xdef\currentstructurecomponentbookmark{\detokenize\expandafter{\normalexpanded{#3\c!title}}}%
- \endgroup
- \fi \fi
- \fi
- \ifx\currentstructurecomponentlist\empty
- \globallet\currentstructurecomponentlist\currentstructurecomponenttitle
- \fi
- \globallet\currentstructurecomponentcoding\s!tex
+ \strc_counters_register_component_list{#1}{#3}{#4}{#9}%
+ \else\ifx\currentstructurecomponentreference\empty
+ \strc_counters_register_component_none
+ \else
+ \strc_counters_register_component_page{#3}%
+ \fi\fi
+ \endgroup}
+
+\def\strc_counters_register_component_none
+ {\glet\m_strc_counters_last_registered_index \relax
+ \glet\m_strc_counters_last_registered_attribute \attributeunsetvalue
+ \glet\m_strc_counters_last_registered_synchronize\relax}
+
+\def\strc_counters_register_component_check_prefix
+ {\ifx\currentstructurecomponentreferenceprefix\empty
+ \let\currentstructurecomponentreferenceprefix\currentstructurereferenceprefix
+ \fi
+ \ifx\currentstructurecomponentreferenceprefix\empty
+ \let\currentstructurecomponentreferenceprefix\referenceprefix
+ \fi
+ \ifdefined\currentstructurecomponentreferenceprefix\else
+ \let\currentstructurecomponentreferenceprefix\empty
+ \fi}
+
+\def\strc_counters_register_component_page#1%
+ {\xdef\currentstructurecomponentreference {#1\c!reference}%
+ \xdef\currentstructurecomponentreferenceprefix{#1\c!referenceprefix}%
+ \strc_counters_register_component_check_prefix
+ \setnextinternalreference
+ \clf_setreferenceattribute
+ {%
+ references {%
+ internal \nextinternalreference
+ % block {\currentsectionblock}% move to lua
+ view {\interactionparameter\c!focus}%
+ prefix {\currentstructurecomponentreferenceprefix}%
+ reference {\currentstructurecomponentreference}%
+ }%
+ metadata {%
+ kind {\s!page}%
+ }%
+ }%
+ \relax
+ \xdef\m_strc_counters_last_registered_attribute {\the\lastdestinationattribute}%
+ \glet\m_strc_counters_last_registered_index \relax
+ \glet\m_strc_counters_last_registered_synchronize\relax}
+
+\def\strc_counters_register_component_list#1#2#3#4%
+ {\xdef\currentstructurecomponentname {#2\s!name}%
+ \xdef\currentstructurecomponentlevel {#2\c!level}%
+ \edef\currentstructurecomponentexpansion {#2\c!expansion}%
+ \xdef\currentstructurecomponentxmlsetup {#2\c!xmlsetup}%
+ \xdef\currentstructurecomponentcatcodes {#2\s!catcodes}%
+ \xdef\currentstructurecomponentlabel {#2\c!label}%
+ \xdef\currentstructurecomponentreference {#2\c!reference}%
+ \xdef\currentstructurecomponentreferenceprefix{#2\c!referenceprefix}%
+ \strc_counters_register_component_check_prefix
+ \ifx\currentstructurecomponentexpansion\s!xml
+ \xmlstartraw
+ \xdef\currentstructurecomponenttitle {#2\c!title}%
+ \xdef\currentstructurecomponentbookmark{#2\c!bookmark}%
+ \xdef\currentstructurecomponentmarking {#2\c!marking}%
+ \xdef\currentstructurecomponentlist {#2\c!list}%
+ \xmlstopraw
+ \ifx\currentstructurecomponentlist\empty
+ \globallet\currentstructurecomponentlist\currentstructurecomponenttitle
\fi
- %
- \setnextinternalreference
- \xdef\m_strc_counters_last_registered_index{\ctxcommand{addtolist{
- metadata = {
- kind = "#1",
- name = "\currentname",
- level = structures.sections.currentlevel(),
- catcodes = \the\ifx\currentstructurecomponentcatcodes\empty\catcodetable\else\csname\currentstructurecomponentcatcodes\endcsname\fi,
- coding = "\currentstructurecomponentcoding",
- \ifx\currentstructurecomponentcoding\s!xml
- xmlroot = "\xmldocument",
- \fi
- \ifx\currentstructurecomponentxmlsetup\empty \else
- xmlsetup = "\currentstructurexmlsetup",
- \fi
- },
- references = {
- internal = \nextinternalreference,
- block = "\currentsectionblock",
- reference = "\currentstructurecomponentreference",
- referenceprefix = "\currentstructurecomponentreferenceprefix",
- section = structures.sections.currentid(),
- },
- titledata = {
- label = \!!bs\detokenize\expandafter{\currentstructurecomponentlabel }\!!es,
- title = \!!bs\detokenize\expandafter{\currentstructurecomponenttitle }\!!es,
- \ifx\currentstructurecomponentbookmark\currentstructurecomponenttitle \else
- bookmark = \!!bs\detokenize\expandafter{\currentstructurecomponentbookmark }\!!es,
- \fi
- \ifx\currentstructurecomponentmarking\currentstructurecomponenttitle \else
- marking = \!!bs\detokenize\expandafter{\currentstructurecomponentmarking }\!!es,
- \fi
- \ifx\currentstructurecomponentlist\currentstructurecomponenttitle \else
- list = \!!bs\detokenize\expandafter{\currentstructurecomponentlist}\!!es,
- \fi
- },
- \ifx\p_hasnumber\v!yes
- prefixdata = {
- prefix = "#3\c!prefix",
- separatorset = "#3\c!prefixseparatorset",
- conversion = \!!bs#3\c!prefixconversion\!!es,
- conversionset = "#3\c!prefixconversionset",
- set = "#3\c!prefixset",
- % segments = "#3\c!prefixsegments",
- segments = "\p_prefixsegments",
- connector = \!!bs#3\c!prefixconnector\!!es,
- },
- numberdata = { % more helpers here, like compact elsewhere
- numbers = structures.counters.compact("\currentcounter",nil,true),
- group = "#3\c!group",
- groupsuffix = \!!bs#3\c!groupsuffix\!!es,
- counter = "\currentcounter",
- separatorset = "#3\c!numberseparatorset",
- conversion = \!!bs#3\c!numberconversion\!!es,
- conversionset = "#3\c!numberconversionset",
- starter = \!!bs#3\c!numberstarter\!!es,
- stopper = \!!bs#3\c!numberstopper\!!es,
- segments = "#3\c!numbersegments",
- },
- \fi
- userdata = \!!bs\detokenize{#9}\!!es % will be converted to table at the lua end
- }
- }}%
- \xdef\m_strc_counters_last_registered_attribute {\ctxcommand {setinternalreference(nil,nil,\nextinternalreference)}}%
- \xdef\m_strc_counters_last_registered_synchronize{\ctxlatecommand{enhancelist(\m_strc_counters_last_registered_index)}}%
+ \globallet\currentstructurecomponentcoding\s!xml
\else
- \glet\m_strc_counters_last_registered_index \relax
- \glet\m_strc_counters_last_registered_attribute \attributeunsetvalue
- \glet\m_strc_counters_last_registered_synchronize\relax
+ \ifx\currentstructurecomponentexpansion\v!yes
+ \xdef\currentstructurecomponenttitle {#2\c!title}%
+ \xdef\currentstructurecomponentbookmark{#2\c!bookmark}%
+ \xdef\currentstructurecomponentmarking {#2\c!marking}%
+ \xdef\currentstructurecomponentlist {#2\c!list}%
+ \else
+ \xdef\currentstructurecomponenttitle {#3\c!title}%
+ \xdef\currentstructurecomponentbookmark{#3\c!bookmark}%
+ \xdef\currentstructurecomponentmarking {#3\c!marking}%
+ \xdef\currentstructurecomponentlist {#3\c!list}%
+ \iflocation \ifx\currentstructurecomponentbookmark\empty
+ \begingroup
+ \simplifycommands
+ \xdef\currentstructurecomponentbookmark{\detokenize\expandafter{\normalexpanded{#2\c!title}}}%
+ \endgroup
+ \fi \fi
+ \fi
+ \ifx\currentstructurecomponentlist\empty
+ \globallet\currentstructurecomponentlist\currentstructurecomponenttitle
+ \fi
+ \globallet\currentstructurecomponentcoding\s!tex
\fi
- \endgroup}
+ %
+ \setnextinternalreference
+ \scratchcounter\clf_addtolist %{
+ metadata {
+ kind {#1}
+ name {\currentname}
+ % level structures.sections.currentlevel()
+ catcodes \ifx\currentstructurecomponentcatcodes\empty\catcodetable\else\csname\currentstructurecomponentcatcodes\endcsname\fi
+ coding {\currentstructurecomponentcoding}
+ \ifx\currentstructurecomponentcoding\s!xml
+ xmlroot {\xmldocument}
+ \fi
+ \ifx\currentstructurecomponentxmlsetup\empty \else
+ xmlsetup {\currentstructurexmlsetup}
+ \fi
+ }
+ references {
+ internal \nextinternalreference
+ % block {\currentsectionblock}
+ reference {\currentstructurecomponentreference}
+ prefix {\currentstructurecomponentreferenceprefix}
+ % section structures.sections.currentid()
+ }
+ titledata {
+ label {\detokenize\expandafter{\currentstructurecomponentlabel}}
+ title {\detokenize\expandafter{\currentstructurecomponenttitle}}
+ \ifx\currentstructurecomponentbookmark\currentstructurecomponenttitle \else
+ bookmark {\detokenize\expandafter{\currentstructurecomponentbookmark}}
+ \fi
+ \ifx\currentstructurecomponentmarking\currentstructurecomponenttitle \else
+ marking {\detokenize\expandafter{\currentstructurecomponentmarking}}
+ \fi
+ \ifx\currentstructurecomponentlist\currentstructurecomponenttitle \else
+ list {\detokenize\expandafter{\currentstructurecomponentlist}}
+ \fi
+ }
+ \ifx\p_hasnumber\v!yes
+ prefixdata {
+ prefix {#2\c!prefix}
+ separatorset {#2\c!prefixseparatorset}
+ conversion {#2\c!prefixconversion}
+ conversionset {#2\c!prefixconversionset}
+ set {#2\c!prefixset}
+ % segments {#2\c!prefixsegments}
+ segments {\p_prefixsegments}
+ connector {#2\c!prefixconnector}
+ }
+ numberdata { % more helpers here, like compact elsewhere
+ numbers {\currentcounter}
+ group {#2\c!group}
+ groupsuffix {#2\c!groupsuffix}
+ counter {\currentcounter}
+ separatorset {#2\c!numberseparatorset}
+ conversion {#2\c!numberconversion}
+ conversionset {#2\c!numberconversionset}
+ starter {#2\c!numberstarter}
+ stopper {#2\c!numberstopper}
+ segments {#2\c!numbersegments}
+ }
+ \fi
+ userdata {\detokenize{#4}}
+ %}
+ \relax
+ \xdef\m_strc_counters_last_registered_index{\the\scratchcounter}%
+ \clf_setinternalreference internal \nextinternalreference\relax
+ \xdef\m_strc_counters_last_registered_attribute {\the\lastdestinationattribute}%
+ \xdef\m_strc_counters_last_registered_synchronize{\strc_lists_inject_enhance{\m_strc_counters_last_registered_index}{\nextinternalreference}}}
\let\m_strc_counters_last_registered_index \relax
\let\m_strc_counters_last_registered_attribute \relax
@@ -764,4 +810,6 @@
% \fi
% \to \everysetupcounter
+\stopcontextdefinitioncode
+
\protect \endinput
diff --git a/tex/context/base/strc-pag.lua b/tex/context/base/strc-pag.lua
index 02ed5610f..96d26e6f6 100644
--- a/tex/context/base/strc-pag.lua
+++ b/tex/context/base/strc-pag.lua
@@ -25,6 +25,7 @@ local counterdata = counters.data
local variables = interfaces.variables
local context = context
local commands = commands
+local implement = interfaces.implement
local processors = typesetters.processors
local applyprocessor = processors.apply
@@ -34,34 +35,42 @@ local stopapplyprocessor = processors.stopapply
local texsetcount = tex.setcount
local texgetcount = tex.getcount
+local ctx_convertnumber = context.convertnumber
+
-- storage
local collected, tobesaved = allocate(), allocate()
pages.collected = collected
pages.tobesaved = tobesaved
+pages.nofpages = 0
local function initializer()
collected = pages.collected
tobesaved = pages.tobesaved
+ pages.nofpages = #collected
end
job.register('structures.pages.collected', tobesaved, initializer)
local specification = { } -- to be checked
-function pages.save(prefixdata,numberdata)
+function pages.save(prefixdata,numberdata,extradata)
local realpage = texgetcount("realpageno")
local userpage = texgetcount("userpageno")
if realpage > 0 then
if trace_pages then
report_pages("saving page %s.%s",realpage,userpage)
end
+ local viewerprefix = extradata.viewerprefix
+ local state = extradata.state
local data = {
- number = userpage,
- block = sections.currentblock(),
- prefixdata = prefixdata and helpers.simplify(prefixdata),
- numberdata = numberdata and helpers.simplify(numberdata),
+ number = userpage,
+ viewerprefix = viewerprefix ~= "" and viewerprefix or nil,
+ state = state ~= "" and state or nil, -- maybe let "start" be default
+ block = sections.currentblock(),
+ prefixdata = prefixdata and helpers.simplify(prefixdata),
+ numberdata = numberdata and helpers.simplify(numberdata),
}
tobesaved[realpage] = data
if not collected[realpage] then
@@ -97,11 +106,11 @@ function counters.specials.userpage()
end
end
-local f_convert = string.formatters["\\convertnumber{%s}{%s}"]
-
-local function convertnumber(str,n)
- return f_convert(str or "numbers",n)
-end
+-- local f_convert = string.formatters["\\convertnumber{%s}{%s}"]
+--
+-- local function convertnumber(str,n)
+-- return f_convert(str or "numbers",n)
+-- end
function pages.number(realdata,pagespec)
local userpage, block = realdata.number, realdata.block or "" -- sections.currentblock()
@@ -114,12 +123,12 @@ function pages.number(realdata,pagespec)
applyprocessor(starter)
end
if conversion ~= "" then
- context.convertnumber(conversion,userpage)
+ ctx_convertnumber(conversion,userpage)
else
if conversionset == "" then conversionset = "default" end
local theconversion = sets.get("structure:conversions",block,conversionset,1,"numbers") -- to be checked: 1
local data = startapplyprocessor(theconversion)
- context.convertnumber(data or "number",userpage)
+ ctx_convertnumber(data or "number",userpage)
stopapplyprocessor()
end
if stopper ~= "" then
@@ -263,6 +272,24 @@ function pages.is_odd(n)
end
end
+function pages.on_right(n)
+ local pagemode = texgetcount("pageduplexmode")
+ if pagemode == 2 or pagemode == 1 then
+ n = n or texgetcount("realpageno")
+ if texgetcount("pagenoshift") % 2 == 0 then
+ return n % 2 == 0
+ else
+ return n % 2 ~= 0
+ end
+ else
+ return true
+ end
+end
+
+function pages.in_body(n)
+ return texgetcount("pagebodymode") > 0
+end
+
-- move to strc-pag.lua
function counters.analyze(name,counterspecification)
@@ -314,3 +341,61 @@ function sections.prefixedconverted(name,prefixspec,numberspec)
counters.converted(name,numberspec)
end
end
+
+--
+
+implement {
+ name = "savepagedata",
+ actions = pages.save,
+ arguments = {
+ {
+ { "prefix" },
+ { "separatorset" },
+ { "conversionset" },
+ { "conversion" },
+ { "set" },
+ { "segments" },
+ { "connector" },
+ },
+ {
+ { "conversionset" },
+ { "conversion" },
+ { "starter" },
+ { "stopper" },
+ },
+ {
+ { "viewerprefix" },
+ { "state" },
+ }
+ }
+}
+
+implement { -- weird place
+ name = "prefixedconverted",
+ actions = sections.prefixedconverted,
+ arguments = {
+ "string",
+ {
+ { "prefix" },
+ { "separatorset" },
+ { "conversionset" },
+ { "conversion" },
+ { "starter" },
+ { "stopper" },
+ { "set" },
+ { "segments" },
+ { "connector" },
+ },
+ {
+ { "order" },
+ { "separatorset" },
+ { "conversionset" },
+ { "conversion" },
+ { "starter" },
+ { "stopper" },
+ { "segments" },
+ { "type" },
+ { "criterium" },
+ }
+ }
+}
diff --git a/tex/context/base/strc-pag.mkiv b/tex/context/base/strc-pag.mkiv
index 85cfeb40f..72f0cf32a 100644
--- a/tex/context/base/strc-pag.mkiv
+++ b/tex/context/base/strc-pag.mkiv
@@ -17,6 +17,8 @@
\unprotect
+\startcontextdefinitioncode
+
% Allocation:
\countdef\realpageno \zerocount \realpageno \plusone
@@ -24,12 +26,14 @@
\countdef\subpageno \plustwo \subpageno \zerocount % !
\countdef\arrangeno \plusthree \arrangeno \zerocount % !
\countdef\pagenoshift\plusfour \pagenoshift\zerocount % !
+\countdef\lastpageno \plusfive \lastpageno \zerocount % !
\let\pageno\userpageno
\def\realfolio{\the\realpageno}
\def\userfolio{\the\userpageno}
\def\subfolio {\the\subpageno }
+\def\lastfolio{\the\lastpageno}
\newtoks\everyinitializepagecounters
@@ -101,27 +105,34 @@
\strc_counters_set\s!realpage\realpageno
\strc_counters_set\s!userpage\userpageno
\strc_counters_set\s!subpage \subpageno
+ \lastpageno\lastcountervalue[\s!realpage]\relax
\to \everyinitializepagecounters
\let\setuppagenumber\setupuserpagenumber
\let\resetpagenumber\resetuserpagenumber
+% invisible =
+
\def\strc_pagenumbers_page_state_save % \normalexpanded?
- {\ctxlua{structures.pages.save({
- prefix = "\namedcounterparameter\s!userpage\c!prefix",
- separatorset = "\namedcounterparameter\s!userpage\c!prefixseparatorset",
- conversion = "\namedcounterparameter\s!userpage\c!prefixconversion",
- conversionset = "\namedcounterparameter\s!userpage\c!prefixconversionset",
- set = "\namedcounterparameter\s!userpage\c!prefixset",
- segments = "\namedcounterparameter\s!userpage\c!prefixsegments",
- connector = \!!bs\namedcounterparameter\s!userpage\c!prefixconnector\!!es,
- },{
- conversion = "\namedcounterparameter\s!userpage\c!numberconversion",
- conversionset = "\namedcounterparameter\s!userpage\c!numberconversionset",
- starter = \!!bs\namedcounterparameter\s!userpage\c!numberstarter\!!es,
- stopper = \!!bs\namedcounterparameter\s!userpage\c!numberstopper\!!es,
- }
- )}}
+ {\clf_savepagedata
+ {
+ prefix {\namedcounterparameter\s!userpage\c!prefix}
+ separatorset {\namedcounterparameter\s!userpage\c!prefixseparatorset}
+ conversion {\namedcounterparameter\s!userpage\c!prefixconversion}
+ conversionset {\namedcounterparameter\s!userpage\c!prefixconversionset}
+ set {\namedcounterparameter\s!userpage\c!prefixset}
+ segments {\namedcounterparameter\s!userpage\c!prefixsegments}
+ connector {\namedcounterparameter\s!userpage\c!prefixconnector}
+ }{
+ conversion {\namedcounterparameter\s!userpage\c!numberconversion}
+ conversionset {\namedcounterparameter\s!userpage\c!numberconversionset}
+ starter {\namedcounterparameter\s!userpage\c!numberstarter}
+ stopper {\namedcounterparameter\s!userpage\c!numberstopper}
+ }{
+ viewerprefix {\namedcounterparameter\s!userpage\c!viewerprefix}
+ state {\namedcounterparameter\s!userpage\c!state}
+ }%
+ \relax}
\prependtoks
\strc_pagenumbers_page_state_save
@@ -296,6 +307,12 @@
\trackingmarginnotesfalse
\fi
\fi
+ \pageduplexmode
+ \ifsinglesided
+ \ifdoublesided\plustwo\else\zerocount\fi
+ \else
+ \ifdoublesided\plusone\else\zerocount\fi
+ \fi
\page_backgrounds_recalculate
\strc_pagenumbers_set_location
\to \everysetuppagenumbering
@@ -457,4 +474,6 @@
\initializepagecounters
+\stopcontextdefinitioncode
+
\protect \endinput
diff --git a/tex/context/base/strc-ref.lua b/tex/context/base/strc-ref.lua
index 938af1ad7..2a1d0dd59 100644
--- a/tex/context/base/strc-ref.lua
+++ b/tex/context/base/strc-ref.lua
@@ -16,7 +16,7 @@ if not modules then modules = { } end modules ['strc-ref'] = {
local format, find, gmatch, match, strip = string.format, string.find, string.gmatch, string.match, string.strip
local floor = math.floor
-local rawget, tonumber = rawget, tonumber
+local rawget, tonumber, type = rawget, tonumber, type
local lpegmatch = lpeg.match
local insert, remove, copytable = table.insert, table.remove, table.copy
local formatters = string.formatters
@@ -44,19 +44,21 @@ local report_importing = logs.reporter("references","importing")
local report_empty = logs.reporter("references","empty")
local variables = interfaces.variables
-local constants = interfaces.constants
+local v_default = variables.default
+local v_url = variables.url
+local v_file = variables.file
+local v_unknown = variables.unknown
+local v_page = variables.page
+local v_auto = variables.auto
+
local context = context
local commands = commands
+local implement = interfaces.implement
local texgetcount = tex.getcount
local texsetcount = tex.setcount
local texconditionals = tex.conditionals
-local v_default = variables.default
-local v_url = variables.url
-local v_file = variables.file
-local v_unknown = variables.unknown
-local v_yes = variables.yes
local productcomponent = resolvers.jobs.productcomponent
local justacomponent = resolvers.jobs.justacomponent
@@ -75,6 +77,8 @@ local references = structures.references
local lists = structures.lists
local counters = structures.counters
+local jobpositions = job.positions
+
-- some might become local
references.defined = references.defined or allocate()
@@ -82,6 +86,7 @@ references.defined = references.defined or allocate()
local defined = references.defined
local derived = allocate()
local specials = allocate()
+local functions = allocate()
local runners = allocate()
local internals = allocate()
local filters = allocate()
@@ -91,9 +96,13 @@ local tobesaved = allocate()
local collected = allocate()
local tobereferred = allocate()
local referred = allocate()
+local usedinternals = allocate()
+local flaginternals = allocate()
+local usedviews = allocate()
references.derived = derived
references.specials = specials
+references.functions = functions
references.runners = runners
references.internals = internals
references.filters = filters
@@ -103,6 +112,9 @@ references.tobesaved = tobesaved
references.collected = collected
references.tobereferred = tobereferred
references.referred = referred
+references.usedinternals = usedinternals
+references.flaginternals = flaginternals
+references.usedviews = usedviews
local splitreference = references.splitreference
local splitprefix = references.splitcomponent -- replaces: references.splitprefix
@@ -111,6 +123,22 @@ local componentsplitter = references.componentsplitter
local currentreference = nil
+local txtcatcodes = catcodes.numbers.txtcatcodes -- or just use "txtcatcodes"
+local context_delayed = context.delayed
+
+local ctx_pushcatcodes = context.pushcatcodes
+local ctx_popcatcodes = context.popcatcodes
+local ctx_dofinishreference = context.dofinishreference
+local ctx_dofromurldescription = context.dofromurldescription
+local ctx_dofromurlliteral = context.dofromurlliteral
+local ctx_dofromfiledescription = context.dofromfiledescription
+local ctx_dofromfileliteral = context.dofromfileliteral
+local ctx_expandreferenceoperation = context.expandreferenceoperation
+local ctx_expandreferencearguments = context.expandreferencearguments
+local ctx_getreferencestructureprefix = context.getreferencestructureprefix
+local ctx_convertnumber = context.convertnumber
+local ctx_emptyreference = context.emptyreference
+
storage.register("structures/references/defined", references.defined, "structures.references.defined")
local initializers = { }
@@ -119,6 +147,7 @@ local finalizers = { }
function references.registerinitializer(func) -- we could use a token register instead
initializers[#initializers+1] = func
end
+
function references.registerfinalizer(func) -- we could use a token register instead
finalizers[#finalizers+1] = func
end
@@ -129,12 +158,32 @@ local function initializer() -- can we use a tobesaved as metatable for collecte
for i=1,#initializers do
initializers[i](tobesaved,collected)
end
+ for prefix, list in next, collected do
+ for tag, data in next, list do
+ local r = data.references
+ local i = r.internal
+ if i then
+ internals[i] = data
+ usedinternals[i] = r.used
+ end
+ end
+ end
end
local function finalizer()
for i=1,#finalizers do
finalizers[i](tobesaved)
end
+ for prefix, list in next, tobesaved do
+ for tag, data in next, list do
+ local r = data.references
+ local i = r.internal
+ local f = flaginternals[i]
+ if f then
+ r.used = usedviews[i] or true
+ end
+ end
+ end
end
job.register('structures.references.collected', tobesaved, initializer, finalizer)
@@ -148,6 +197,38 @@ local function initializer() -- can we use a tobesaved as metatable for collecte
nofreferred = #referred
end
+-- no longer fone this way
+
+-- references.resolvers = references.resolvers or { }
+-- local resolvers = references.resolvers
+--
+-- function resolvers.section(var)
+-- local vi = lists.collected[var.i[2]]
+-- if vi then
+-- var.i = vi
+-- var.r = (vi.references and vi.references.realpage) or (vi.pagedata and vi.pagedata.realpage) or 1
+-- else
+-- var.i = nil
+-- var.r = 1
+-- end
+-- end
+--
+-- resolvers.float = resolvers.section
+-- resolvers.description = resolvers.section
+-- resolvers.formula = resolvers.section
+-- resolvers.note = resolvers.section
+--
+-- function resolvers.reference(var)
+-- local vi = var.i[2]
+-- if vi then
+-- var.i = vi
+-- var.r = (vi.references and vi.references.realpage) or (vi.pagedata and vi.pagedata.realpage) or 1
+-- else
+-- var.i = nil
+-- var.r = 1
+-- end
+-- end
+
-- We make the array sparse (maybe a finalizer should optionally return a table) because
-- there can be quite some page links involved. We only store one action number per page
-- which is normally good enough for what we want (e.g. see above/below) and we do
@@ -215,8 +296,6 @@ local function referredpage(n)
return texgetcount("realpageno")
end
--- setmetatableindex(referred,function(t,k) return referredpage(k) end )
-
references.referredpage = referredpage
function references.registerpage(n) -- called in the backend code
@@ -246,16 +325,15 @@ local function setnextorder(kind,name)
texsetcount("global","locationorder",lastorder)
end
-references.setnextorder = setnextorder
-function references.setnextinternal(kind,name)
+local function setnextinternal(kind,name)
setnextorder(kind,name) -- always incremented with internal
local n = texgetcount("locationcount") + 1
texsetcount("global","locationcount",n)
return n
end
-function references.currentorder(kind,name)
+local function currentorder(kind,name)
return orders[kind] and orders[kind][name] or lastorder
end
@@ -266,43 +344,52 @@ local function setcomponent(data)
local references = data and data.references
if references then
references.component = component
+ if references.prefix == component then
+ references.prefix = nil
+ end
end
return component
end
-- but for the moment we do it here (experiment)
end
-commands.setnextinternalreference = references.setnextinternal
+references.setnextorder = setnextorder
+references.setnextinternal = setnextinternal
+references.currentorder = currentorder
+references.setcomponent = setcomponent
-function commands.currentreferenceorder(kind,name)
- context(references.currentorder(kind,name))
-end
+implement {
+ name = "setnextreferenceorder",
+ actions = setnextorder,
+ arguments = { "string", "string" }
+}
-references.setcomponent = setcomponent
+implement {
+ name = "setnextinternalreference",
+ actions = setnextinternal,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "currentreferenceorder",
+ actions = { currentorder, context },
+ arguments = { "string", "string" }
+}
-function references.set(kind,prefix,tag,data)
--- setcomponent(data)
- local pd = tobesaved[prefix] -- nicer is a metatable
+function references.set(data)
+ local references = data.references
+ local reference = references.reference
+ if not reference or reference == "" then
+ -- report_references("invalid reference") -- harmless
+ return 0
+ end
+ local prefix = references.prefix or ""
+ local pd = tobesaved[prefix] -- nicer is a metatable
if not pd then
pd = { }
tobesaved[prefix] = pd
end
local n = 0
- -- for ref in gmatch(tag,"[^,]+") do
- -- if ref ~= "" then
- -- if check_duplicates and pd[ref] then
- -- if prefix and prefix ~= "" then
- -- report_references("redundant reference %a in namespace %a",ref,prefix)
- -- else
- -- report_references("redundant reference %a",ref)
- -- end
- -- else
- -- n = n + 1
- -- pd[ref] = data
- -- context.dofinishsomereference(kind,prefix,ref)
- -- end
- -- end
- -- end
local function action(ref)
if ref == "" then
-- skip
@@ -315,145 +402,201 @@ function references.set(kind,prefix,tag,data)
else
n = n + 1
pd[ref] = data
- context.dofinishsomereference(kind,prefix,ref)
+ local r = data.references
+ ctx_dofinishreference(prefix or "",ref or "",r and r.internal or 0)
end
end
- process_settings(tag,action)
+ process_settings(reference,action)
return n > 0
end
+-- function references.enhance(prefix,tag)
+-- local l = tobesaved[prefix][tag]
+-- if l then
+-- l.references.realpage = texgetcount("realpageno")
+-- end
+-- end
+
+local getpos = function() getpos = backends.codeinjections.getpos return getpos () end
+
+local function synchronizepage(reference) -- non public helper
+ reference.realpage = texgetcount("realpageno")
+ if jobpositions.used then
+ reference.x, reference.y = getpos()
+ end
+end
+
+references.synchronizepage = synchronizepage
+
function references.enhance(prefix,tag)
local l = tobesaved[prefix][tag]
if l then
- l.references.realpage = texgetcount("realpageno")
+ synchronizepage(l.references)
end
end
-commands.enhancereference = references.enhance
+implement {
+ name = "enhancereference",
+ actions = references.enhance,
+ arguments = { "string", "string" }
+}
-- -- -- related to strc-ini.lua -- -- --
-references.resolvers = references.resolvers or { }
-local resolvers = references.resolvers
+-- no metatable here .. better be sparse
-local function getfromlist(var)
- local vi = var.i
- if vi then
- vi = vi[3] or lists.collected[vi[2]]
- if vi then
- local r = vi.references and vi.references
- if r then
- r = r.realpage
- end
- if not r then
- r = vi.pagedata and vi.pagedata
- if r then
- r = r.realpage
+local function register_from_lists(collected,derived,pages,sections)
+ local derived_g = derived[""] -- global
+ local derived_p = nil
+ local derived_c = nil
+ local prefix = nil
+ local component = nil
+ local entry = nil
+ if not derived_g then
+ derived_g = { }
+ derived[""] = derived_g
+ end
+ local function action(s)
+ if trace_referencing then
+ report_references("list entry %a provides %a reference %a on realpage %a",i,kind,s,realpage)
+ end
+ if derived_p and not derived_p[s] then
+ derived_p[s] = entry
+ end
+ if derived_c and not derived_c[s] then
+ derived_c[s] = entry
+ end
+ if not derived_g[s] then
+ derived_g[s] = entry -- first wins
+ end
+ end
+ for i=1,#collected do
+ entry = collected[i]
+ local metadata = entry.metadata
+ if metadata then
+ local kind = metadata.kind -- why this check
+ if kind then
+ local references = entry.references
+ if references then
+ local reference = references.reference
+ if reference and reference ~= "" then
+ local realpage = references.realpage
+ if realpage then
+ prefix = references.prefix
+ component = references.component
+ if prefix and prefix ~= "" then
+ derived_p = derived[prefix]
+ if not derived_p then
+ derived_p = { }
+ derived[prefix] = derived_p
+ end
+ end
+ if component and component ~= "" and component ~= prefix then
+ derived_c = derived[component]
+ if not derived_c then
+ derived_c = { }
+ derived[component] = derived_c
+ end
+ end
+ process_settings(reference,action)
+ end
+ end
end
end
- var.i = vi
- var.r = r or 1
- else
- var.i = nil
- var.r = 1
end
- else
- var.i = nil
- var.r = 1
end
+ -- inspect(derived)
end
--- resolvers.section = getfromlist
--- resolvers.float = getfromlist
--- resolvers.description = getfromlist
--- resolvers.formula = getfromlist
--- resolvers.note = getfromlist
-
-setmetatableindex(resolvers,function(t,k)
- local v = getfromlist
- resolvers[k] = v
- return v
-end)
-
-function resolvers.reference(var)
- local vi = var.i[2] -- check
- if vi then
- var.i = vi
- var.r = (vi.references and vi.references.realpage) or (vi.pagedata and vi.pagedata.realpage) or 1
- else
- var.i = nil
- var.r = 1
- end
-end
+references.registerinitializer(function() register_from_lists(lists.collected,derived) end)
-local function register_from_lists(collected,derived,pages,sections)
- local g = derived[""] if not g then g = { } derived[""] = g end -- global
- for i=1,#collected do
- local entry = collected[i]
- local m, r = entry.metadata, entry.references
- if m and r then
- local reference = r.reference or ""
- local prefix = r.referenceprefix or ""
- local component = r.component and r.component or ""
- if reference ~= "" then
- local kind, realpage = m.kind, r.realpage
- if kind and realpage then
- local d = derived[prefix]
- if not d then
- d = { }
- derived[prefix] = d
+-- tracing
+
+local function collectbypage(tracedpages)
+ -- lists
+ do
+ local collected = structures.lists.collected
+ local data = nil
+ local function action(reference)
+ local prefix = data.prefix
+ local component = data.component
+ local realpage = data.realpage
+ if realpage then
+ local pagelist = rawget(tracedpages,realpage)
+ local internal = data.internal or 0
+ local prefix = (prefix ~= "" and prefix) or (component ~= "" and component) or ""
+ local pagedata = { prefix, reference, internal }
+ if pagelist then
+ pagelist[#pagelist+1] = pagedata
+ else
+ tracedpages[realpage] = { pagedata }
+ end
+ if internal > 0 then
+ data.usedprefix = prefix
+ end
+ end
+ end
+ for i=1,#collected do
+ local entry = collected[i]
+ local metadata = entry.metadata
+ if metadata and metadata.kind then
+ data = entry.references
+ if data then
+ local reference = data.reference
+ if reference and reference ~= "" then
+ process_settings(reference,action)
end
- local c = derived[component]
- if not c then
- c = { }
- derived[component] = c
+ end
+ end
+ end
+ end
+ -- references
+ do
+ for prefix, list in next, collected do
+ for reference, entry in next, list do
+ local data = entry.references
+ if data then
+ local realpage = data.realpage
+ local internal = data.internal or 0
+ local pagelist = rawget(tracedpages,realpage)
+ local pagedata = { prefix, reference, internal }
+ if pagelist then
+ pagelist[#pagelist+1] = pagedata
+ else
+ tracedpages[realpage] = { pagedata }
end
- local t = { kind, i, entry }
- -- for s in gmatch(reference,"%s*([^,]+)") do
- -- if trace_referencing then
- -- report_references("list entry %a provides %a reference %a on realpage %a",i,kind,s,realpage)
- -- end
- -- c[s] = c[s] or t -- share them
- -- d[s] = d[s] or t -- share them
- -- g[s] = g[s] or t -- first wins
- -- end
- local function action(s)
- if trace_referencing then
- report_references("list entry %a provides %a reference %a on realpage %a",i,kind,s,realpage)
- end
- c[s] = c[s] or t -- share them
- d[s] = d[s] or t -- share them
- g[s] = g[s] or t -- first wins
+ if internal > 0 then
+ data.usedprefix = prefix
end
- process_settings(reference,action)
end
end
end
end
--- inspect(derived)
end
-references.registerinitializer(function() register_from_lists(lists.collected,derived) end)
+references.tracedpages = table.setmetatableindex(allocate(),function(t,k)
+ if collectbypage then
+ collectbypage(t)
+ collectbypage = nil
+ end
+ return rawget(t,k)
+end)
-- urls
-references.urls = references.urls or { }
-references.urls.data = references.urls.data or { }
-
-local urls = references.urls.data
+local urls = references.urls or { }
+references.urls = urls
+local urldata = urls.data or { }
+urls.data = urldata
-function references.urls.define(name,url,file,description)
+function urls.define(name,url,file,description)
if name and name ~= "" then
- urls[name] = { url or "", file or "", description or url or file or ""}
+ urldata[name] = { url or "", file or "", description or url or file or ""}
end
end
-local pushcatcodes = context.pushcatcodes
-local popcatcodes = context.popcatcodes
-local txtcatcodes = catcodes.numbers.txtcatcodes -- or just use "txtcatcodes"
-
-function references.urls.get(name)
- local u = urls[name]
+function urls.get(name)
+ local u = urldata[name]
if u then
local url, file = u[1], u[2]
if file and file ~= "" then
@@ -464,59 +607,93 @@ function references.urls.get(name)
end
end
-function commands.geturl(name)
- local url = references.urls.get(name)
+function urls.found(name)
+ return urldata[name]
+end
+
+local function geturl(name)
+ local url = urls.get(name)
if url and url ~= "" then
- pushcatcodes(txtcatcodes)
+ ctx_pushcatcodes(txtcatcodes)
context(url)
- popcatcodes()
+ ctx_popcatcodes()
end
end
--- function commands.gethyphenatedurl(name,...)
--- local url = references.urls.get(name)
--- if url and url ~= "" then
--- hyphenatedurl(url,...)
--- end
--- end
+implement {
+ name = "doifelseurldefined",
+ actions = { urls.found, commands.doifelse },
+ arguments = "string"
+}
-function commands.doifurldefinedelse(name)
- commands.doifelse(urls[name])
-end
+implement {
+ name = "useurl",
+ actions = urls.define,
+ arguments = { "string", "string", "string", "string" }
+}
-commands.useurl= references.urls.define
+implement {
+ name = "geturl",
+ actions = geturl,
+ arguments = "string",
+}
-- files
-references.files = references.files or { }
-references.files.data = references.files.data or { }
-
-local files = references.files.data
+local files = references.files or { }
+references.files = files
+local filedata = files.data or { }
+files.data = filedata
-function references.files.define(name,file,description)
+function files.define(name,file,description)
if name and name ~= "" then
- files[name] = { file or "", description or file or "" }
+ filedata[name] = { file or "", description or file or "" }
end
end
-function references.files.get(name,method,space) -- method: none, before, after, both, space: yes/no
- local f = files[name]
+function files.get(name,method,space) -- method: none, before, after, both, space: yes/no
+ local f = filedata[name]
if f then
context(f[1])
end
end
-function commands.doiffiledefinedelse(name)
- commands.doifelse(files[name])
+function files.found(name)
+ return filedata[name]
end
-commands.usefile= references.files.define
+local function getfile(name)
+ local fil = files.get(name)
+ if fil and fil ~= "" then
+ ctx_pushcatcodes(txtcatcodes)
+ context(fil)
+ ctx_popcatcodes()
+ end
+end
+
+implement {
+ name = "doifelsefiledefined",
+ actions = { files.found, commands.doifelse },
+ arguments = "string"
+}
+
+implement {
+ name = "usefile",
+ actions = files.define,
+ arguments = { "string", "string", "string" }
+}
+
+implement {
+ name = "getfile",
+ actions = getfile,
+ arguments = "string"
+}
-- helpers
function references.checkedfile(whatever) -- return whatever if not resolved
if whatever then
- local w = files[whatever]
+ local w = filedata[whatever]
if w then
return w[1]
else
@@ -527,7 +704,7 @@ end
function references.checkedurl(whatever) -- return whatever if not resolved
if whatever then
- local w = urls[whatever]
+ local w = urldata[whatever]
if w then
local u, f = w[1], w[2]
if f and f ~= "" then
@@ -543,11 +720,11 @@ end
function references.checkedfileorurl(whatever,default) -- return nil, nil if not resolved
if whatever then
- local w = files[whatever]
+ local w = filedata[whatever]
if w then
return w[1], nil
else
- local w = urls[whatever]
+ local w = urldata[whatever]
if w then
local u, f = w[1], w[2]
if f and f ~= "" then
@@ -563,25 +740,25 @@ end
-- programs
-references.programs = references.programs or { }
-references.programs.data = references.programs.data or { }
+local programs = references.programs or { }
+references.programs = programs
+local programdata = programs.data or { }
+programs.data = programdata
-local programs = references.programs.data
-
-function references.programs.define(name,file,description)
+function programs.define(name,file,description)
if name and name ~= "" then
- programs[name] = { file or "", description or file or ""}
+ programdata[name] = { file or "", description or file or ""}
end
end
-function references.programs.get(name)
- local f = programs[name]
+function programs.get(name)
+ local f = programdata[name]
return f and f[1]
end
function references.checkedprogram(whatever) -- return whatever if not resolved
if whatever then
- local w = programs[whatever]
+ local w = programdata[whatever]
if w then
return w[1]
else
@@ -590,23 +767,33 @@ function references.checkedprogram(whatever) -- return whatever if not resolved
end
end
-commands.defineprogram = references.programs.define
+implement {
+ name = "defineprogram",
+ actions = programs.define,
+ arguments = { "string", "string", "string" }
+}
-function commands.getprogram(name)
- local f = programs[name]
- if f then
- context(f[1])
+local function getprogram(name)
+ local p = programdata[name]
+ if p then
+ context(p[1])
end
end
+implement {
+ name = "getprogram",
+ actions = getprogram,
+ arguments = "string"
+}
+
-- shared by urls and files
-function references.whatfrom(name)
- context((urls[name] and v_url) or (files[name] and v_file) or v_unknown)
-end
+-- function references.whatfrom(name)
+-- context((urldata[name] and v_url) or (filedata[name] and v_file) or v_unknown)
+-- end
function references.from(name)
- local u = urls[name]
+ local u = urldata[name]
if u then
local url, file, description = u[1], u[2], u[3]
if description ~= "" then
@@ -618,7 +805,7 @@ function references.from(name)
return url
end
else
- local f = files[name]
+ local f = filedata[name]
if f then
local file, description = f[1], f[2]
if description ~= "" then
@@ -630,34 +817,40 @@ function references.from(name)
end
end
-function commands.from(name)
- local u = urls[name]
+local function from(name)
+ local u = urldata[name]
if u then
local url, file, description = u[1], u[2], u[3]
if description ~= "" then
- context.dofromurldescription(description)
+ ctx_dofromurldescription(description)
-- ok
elseif file and file ~= "" then
- context.dofromurlliteral(url .. "/" .. file)
+ ctx_dofromurlliteral(url .. "/" .. file)
else
- context.dofromurlliteral(url)
+ ctx_dofromurlliteral(url)
end
else
- local f = files[name]
+ local f = filedata[name]
if f then
local file, description = f[1], f[2]
if description ~= "" then
- context.dofromfiledescription(description)
+ ctx_dofromfiledescription(description)
else
- context.dofromfileliteral(file)
+ ctx_dofromfileliteral(file)
end
end
end
end
+implement {
+ name = "from",
+ actions = from,
+ arguments = "string"
+}
+
function references.define(prefix,reference,list)
local d = defined[prefix] if not d then d = { } defined[prefix] = d end
- d[reference] = { "defined", list }
+ d[reference] = list
end
function references.reset(prefix,reference)
@@ -667,44 +860,34 @@ function references.reset(prefix,reference)
end
end
-commands.definereference = references.define
-commands.resetreference = references.reset
-
--- \primaryreferencefoundaction
--- \secondaryreferencefoundaction
--- \referenceunknownaction
-
--- t.special t.operation t.arguments t.outer t.inner
+implement {
+ name = "definereference",
+ actions = references.define,
+ arguments = { "string", "string", "string" }
+}
--- to what extend do we check the non prefixed variant
+implement {
+ name = "resetreference",
+ actions = references.reset,
+ arguments = { "string", "string" }
+}
-local strict = false
+setmetatableindex(defined,"table")
local function resolve(prefix,reference,args,set) -- we start with prefix,reference
if reference and reference ~= "" then
if not set then
set = { prefix = prefix, reference = reference }
else
- set.reference = set.reference or reference
- set.prefix = set.prefix or prefix
+ if not set.reference then set.reference = reference end
+ if not set.prefix then set.prefix = prefix end
end
local r = settings_to_array(reference)
for i=1,#r do
local ri = r[i]
- local d
- if strict then
- d = defined[prefix] or defined[""]
- d = d and d[ri]
- else
- d = defined[prefix]
- d = d and d[ri]
- if not d then
- d = defined[""]
- d = d and d[ri]
- end
- end
+ local d = defined[prefix][ri] or defined[""][ri]
if d then
- resolve(prefix,d[2],nil,set)
+ resolve(prefix,d,nil,set)
else
local var = splitreference(ri)
if var then
@@ -712,20 +895,10 @@ local function resolve(prefix,reference,args,set) -- we start with prefix,refere
local vo, vi = var.outer, var.inner
if not vo and vi then
-- to be checked
- if strict then
- d = defined[prefix] or defined[""]
- d = d and d[vi]
- else
- d = defined[prefix]
- d = d and d[vi]
- if not d then
- d = defined[""]
- d = d and d[vi]
- end
- end
+ d = defined[prefix][vi] or defined[""][vi]
--
if d then
- resolve(prefix,d[2],var.arguments,set) -- args can be nil
+ resolve(prefix,d,var.arguments,set) -- args can be nil
else
if args then var.arguments = args end
set[#set+1] = var
@@ -752,35 +925,47 @@ end
references.currentset = nil
-function commands.setreferenceoperation(k,v)
+local function setreferenceoperation(k,v)
references.currentset[k].operation = v
end
-function commands.setreferencearguments(k,v)
+local function setreferencearguments(k,v)
references.currentset[k].arguments = v
end
-local expandreferenceoperation = context.expandreferenceoperation
-local expandreferencearguments = context.expandreferencearguments
-
function references.expandcurrent() -- todo: two booleans: o_has_tex& a_has_tex
local currentset = references.currentset
if currentset and currentset.has_tex then
for i=1,#currentset do
local ci = currentset[i]
local operation = ci.operation
- if operation and find(operation,"\\") then -- if o_has_tex then
- expandreferenceoperation(i,operation)
+ if operation and find(operation,"\\",1,true) then -- if o_has_tex then
+ ctx_expandreferenceoperation(i,operation)
end
local arguments = ci.arguments
- if arguments and find(arguments,"\\") then -- if a_has_tex then
- expandreferencearguments(i,arguments)
+ if arguments and find(arguments,"\\",1,true) then -- if a_has_tex then
+ ctx_expandreferencearguments(i,arguments)
end
end
end
end
-commands.expandcurrentreference = references.expandcurrent -- for the moment the same
+implement {
+ name = "expandcurrentreference",
+ actions = references.expandcurrent
+}
+
+implement {
+ name = "setreferenceoperation",
+ actions = setreferenceoperation,
+ arguments = { "integer", "string" }
+}
+
+implement {
+ name = "setreferencearguments",
+ actions = setreferencearguments,
+ arguments = { "integer", "string" }
+}
local externals = { }
@@ -824,7 +1009,7 @@ local function loadexternalreferences(name,utilitydata)
local realpage = references.realpage
if kind and realpage then
references.pagedata = pages[realpage]
- local prefix = references.referenceprefix or ""
+ local prefix = references.prefix or ""
local target = external[prefix]
if not target then
target = { }
@@ -856,8 +1041,8 @@ end
local externalfiles = { }
-table.setmetatableindex(externalfiles, function(t,k)
- local v = files[k]
+setmetatableindex(externalfiles, function(t,k)
+ local v = filedata[k]
if not v then
v = { k, k }
end
@@ -865,7 +1050,7 @@ table.setmetatableindex(externalfiles, function(t,k)
return v
end)
-table.setmetatableindex(externals,function(t,k) -- either or not automatically
+setmetatableindex(externals, function(t,k) -- either or not automatically
local filename = externalfiles[k][1] -- filename
local fullname = file.replacesuffix(filename,"tuc")
if lfs.isfile(fullname) then -- todo: use other locator
@@ -926,7 +1111,7 @@ local function loadproductreferences(productname,componentname,utilitydata)
local realpage = references.realpage
if kind and realpage then
references.pagedata = pages[realpage]
- local prefix = references.referenceprefix or ""
+ local prefix = references.prefix or ""
local component = references.component
local ctarget, ptarget
if not component or component == componentname then
@@ -952,22 +1137,6 @@ local function loadproductreferences(productname,componentname,utilitydata)
ptarget = { }
productreferences[prefix] = ptarget
end
- -- for s in gmatch(reference,"%s*([^,]+)") do
- -- if ptarget then
- -- if trace_importing then
- -- report_importing("registering %s reference, kind %a, name %a, prefix %a, reference %a",
- -- "product",kind,productname,prefix,s)
- -- end
- -- ptarget[s] = ptarget[s] or entry
- -- end
- -- if ctarget then
- -- if trace_importing then
- -- report_importing("registering %s reference, kind %a, name %a, prefix %a, referenc %a",
- -- "component",kind,productname,prefix,s)
- -- end
- -- ctarget[s] = ctarget[s] or entry
- -- end
- -- end
local function action(s)
if ptarget then
if trace_importing then
@@ -1062,7 +1231,7 @@ references.registerinitializer(function(tobesaved,collected)
productdata.components = componentlist(job.structure.collected) or { }
end)
-function structures.references.loadpresets(product,component) -- we can consider a special components hash
+function references.loadpresets(product,component) -- we can consider a special components hash
if product and component and product~= "" and component ~= "" and not productdata.product then -- maybe: productdata.filename ~= filename
productdata.product = product
productdata.component = component
@@ -1082,13 +1251,13 @@ function structures.references.loadpresets(product,component) -- we can consider
end
end
-structures.references.productdata = productdata
+references.productdata = productdata
local useproduct = commands.useproduct
if useproduct then
- function commands.useproduct(product)
+ local function newuseproduct(product)
useproduct(product)
if texconditionals.autocrossfilereferences then
local component = justacomponent()
@@ -1096,11 +1265,18 @@ if useproduct then
if trace_referencing or trace_importing then
report_references("loading presets for component %a of product %a",component,product)
end
- structures.references.loadpresets(product,component)
+ references.loadpresets(product,component)
end
end
end
+ implement {
+ name = "useproduct",
+ actions = newuseproduct,
+ arguments = "string",
+ overload = true,
+ }
+
end
-- productdata.firstsection.numberdata.numbers
@@ -1194,7 +1370,7 @@ local function identify_arguments(set,var,i)
local s = specials[var.inner]
if s then
-- inner{argument}
- var.kind = "special with arguments"
+ var.kind = "special operation with arguments"
else
var.error = "unknown inner or special"
end
@@ -1204,114 +1380,105 @@ local function identify_arguments(set,var,i)
return var
end
-local function identify_inner(set,var,prefix,collected,derived,tobesaved)
+-- needs checking: if we don't do too much (redundant) checking now
+-- inner ... we could move the prefix logic into the parser so that we have 'm for each entry
+-- foo:bar -> foo == prefix (first we try the global one)
+-- -:bar -> ignore prefix
+
+local function finish_inner(var,p,i)
+ var.kind = "inner"
+ var.i = i
+ var.p = p
+ var.r = (i.references and i.references.realpage) or (i.pagedata and i.pagedata.realpage) or 1
+ return var
+end
+
+local function identify_inner(set,var,prefix,collected,derived)
local inner = var.inner
- local outer = var.outer
- -- inner ... we could move the prefix logic into the parser so that we have 'm for each entry
- -- foo:bar -> foo == prefix (first we try the global one)
- -- -:bar -> ignore prefix
- local p, i = prefix, nil
- local splitprefix, splitinner
-- the next test is a safeguard when references are auto loaded from outer
- if inner then
- splitprefix, splitinner = lpegmatch(prefixsplitter,inner)
+ if not inner or inner == "" then
+ return false
end
- -- these are taken from other anonymous references
+ local splitprefix, splitinner = lpegmatch(prefixsplitter,inner)
if splitprefix and splitinner then
+ -- we check for a prefix:reference instance in the regular set of collected
+ -- references; a special case is -: which forces a lookup in the global list
if splitprefix == "-" then
- i = collected[""]
- i = i and i[splitinner]
+ local i = collected[""]
if i then
- p = ""
- end
- else
- i = collected[splitprefix]
- i = i and i[splitinner]
- if i then
- p = splitprefix
+ i = i[splitinner]
+ if i then
+ return finish_inner(var,"",i)
+ end
end
end
- end
- -- todo: strict here
- if not i then
- i = collected[prefix]
- i = i and i[inner]
- if i then
- p = prefix
- end
- end
- if not i and prefix ~= "" then
- i = collected[""]
- i = i and i[inner]
+ local i = collected[splitprefix]
if i then
- p = ""
+ i = i[splitinner]
+ if i then
+ return finish_inner(var,splitprefix,i)
+ end
end
- end
- if i then
- var.i = { "reference", i }
- resolvers.reference(var)
- var.kind = "inner"
- var.p = p
- elseif derived then
- -- these are taken from other data structures (like lists)
- if splitprefix and splitinner then
+ if derived then
+ -- next we look for a reference in the regular set of collected references
+ -- using the prefix that is active at this moment (so we overload the given
+ -- these are taken from other data structures (like lists)
if splitprefix == "-" then
- i = derived[""]
- i = i and i[splitinner]
+ local i = derived[""]
if i then
- p = ""
+ i = i[splitinner]
+ if i then
+ return finish_inner(var,"",i)
+ end
end
- else
- i = derived[splitprefix]
- i = i and i[splitinner]
+ end
+ local i = derived[splitprefix]
+ if i then
+ i = i[splitinner]
if i then
- p = splitprefix
+ return finish_inner(var,splitprefix,i)
end
end
end
- if not i then
- i = derived[prefix]
- i = i and i[inner]
- if i then
- p = prefix
- end
+ end
+ -- we now ignore the split prefix and treat the whole inner as a potential
+ -- referenice into the global list
+ local i = collected[prefix]
+ if i then
+ i = i[inner]
+ if i then
+ return finish_inner(var,prefix,i)
end
- if not i and prefix ~= "" then
- i = derived[""]
- i = i and i[inner]
+ end
+ if not i and derived then
+ -- and if not found we look in the derived references
+ local i = derived[prefix]
+ if i then
+ i = i[inner]
if i then
- p = ""
+ return finish_inner(var,prefix,i)
end
end
+ end
+ return false
+end
+
+local function unprefixed_inner(set,var,prefix,collected,derived,tobesaved)
+ local inner = var.inner
+ local s = specials[inner]
+ if s then
+ var.kind = "special"
+ else
+ local i = (collected and collected[""] and collected[""][inner]) or
+ (derived and derived [""] and derived [""][inner]) or
+ (tobesaved and tobesaved[""] and tobesaved[""][inner])
if i then
var.kind = "inner"
- var.i = i
- var.p = p
- local ri = resolvers[i[1]]
- if ri then
- ri(var)
- else
- -- can't happen as we catch it with a metatable now
- report_references("unknown inner resolver for %a",i[1])
- end
+ var.p = ""
+ var.i = i
+ var.r = (i.references and i.references.realpage) or (i.pagedata and i.pagedata.realpage) or 1
else
- -- no prefixes here
- local s = specials[inner]
- if s then
- var.kind = "special"
- else
- i = (collected and collected[""] and collected[""][inner]) or
- (derived and derived [""] and derived [""][inner]) or
- (tobesaved and tobesaved[""] and tobesaved[""][inner])
- if i then
- var.kind = "inner"
- var.i = { "reference", i }
- resolvers.reference(var)
- var.p = ""
- else
- var.error = "unknown inner or special"
- end
- end
+ var.error = "unknown inner or special"
end
end
return var
@@ -1322,9 +1489,8 @@ local function identify_outer(set,var,i)
local inner = var.inner
local external = externals[outer]
if external then
- local v = copytable(var)
- v = identify_inner(set,v,nil,external)
- if v.i and not v.error then
+ local v = identify_inner(set,var,nil,external)
+ if v then
v.kind = "outer with inner"
set.external = true
if trace_identifying then
@@ -1332,9 +1498,8 @@ local function identify_outer(set,var,i)
end
return v
end
- v = copytable(var)
- local v = identify_inner(set,v,v.outer,external)
- if v.i and not v.error then
+ local v = identify_inner(set,var,var.outer,external)
+ if v then
v.kind = "outer with inner"
set.external = true
if trace_identifying then
@@ -1345,8 +1510,8 @@ local function identify_outer(set,var,i)
end
local external = productdata.componentreferences[outer]
if external then
- local v = identify_inner(set,copytable(var),nil,external)
- if v.i and not v.error then
+ local v = identify_inner(set,var,nil,external)
+ if v then
v.kind = "outer with inner"
set.external = true
if trace_identifying then
@@ -1373,6 +1538,8 @@ local function identify_outer(set,var,i)
local arguments = var.arguments
local operation = var.operation
if inner then
+ -- tricky: in this case we can only use views when we're sure that all inners
+ -- are flushed in the outer document so that should become an option
if arguments then
-- outer::inner{argument}
var.kind = "outer with inner with arguments"
@@ -1380,9 +1547,9 @@ local function identify_outer(set,var,i)
-- outer::inner
var.kind = "outer with inner"
end
- var.i = { "reference", inner }
- resolvers.reference(var)
+ var.i = inner
var.f = outer
+ var.r = (inner.references and inner.references.realpage) or (inner.pagedata and inner.pagedata.realpage) or 1
if trace_identifying then
report_identify_outer(set,var,i,"2e")
end
@@ -1419,57 +1586,62 @@ local function identify_outer(set,var,i)
return var
end
+-- todo: avoid copy
+
local function identify_inner_or_outer(set,var,i)
-- here we fall back on product data
local inner = var.inner
if inner and inner ~= "" then
- local v = identify_inner(set,copytable(var),set.prefix,collected,derived,tobesaved)
- if v.i and not v.error then
- v.kind = "inner" -- check this
+
+ -- first we look up in collected and derived using the current prefix
+
+ local prefix = set.prefix
+
+ local v = identify_inner(set,var,set.prefix,collected,derived)
+ if v then
if trace_identifying then
report_identify_outer(set,v,i,"4a")
end
return v
end
- -- these get auto prefixes but are loaded in the document so they are
- -- internal .. we also set the realpage (for samepage analysis)
+ -- nest we look at each component (but we can omit the already consulted one
local components = job.structure.components
if components then
- for i=1,#components do
- local component = components[i]
- local data = collected[component]
- local vi = data and data[inner]
- if vi then
--- var = copytable(var)
--- var.kind = "inner"
--- var.i = vi
--- var.p = component
--- runners.inner(var.r = vi.references.realpage
--- if trace_identifying then
--- report_identify_outer(set,var,i,"4x")
--- end
--- return var
-local v = identify_inner(set,copytable(var),component,collected) -- is copy needed ?
-if v.i and not v.error then
- v.kind = "inner"
- if trace_identifying then
- report_identify_outer(set,var,i,"4x")
- end
- return v
-end
+ for c=1,#components do
+ local component = components[c]
+ if component ~= prefix then
+ local v = identify_inner(set,var,component,collected,derived)
+ if v then
+ if trace_identifying then
+ report_identify_outer(set,var,i,"4b")
+ end
+ return v
+ end
end
end
end
+ -- as a last resort we will consult the global lists
+
+ local v = unprefixed_inner(set,var,"",collected,derived,tobesaved)
+ if v then
+ if trace_identifying then
+ report_identify_outer(set,v,i,"4c")
+ end
+ return v
+ end
+
+ -- not it gets bad ... we need to look in external files ... keep in mind that
+ -- we can best use explicit references for this ... we might issue a warning
+
local componentreferences = productdata.componentreferences
local productreferences = productdata.productreferences
local components = productdata.components
if components and componentreferences then
- -- for component, data in next, productdata.componentreferences do -- better do this in order of processing:
- for i=1,#components do
- local component = components[i]
+ for c=1,#components do
+ local component = components[c]
local data = componentreferences[component]
if data then
local d = data[""]
@@ -1480,7 +1652,7 @@ end
var.kind = "outer with inner"
set.external = true
if trace_identifying then
- report_identify_outer(set,var,i,"4b")
+ report_identify_outer(set,var,i,"4d")
end
return var
end
@@ -1500,7 +1672,7 @@ end
var.kind = "outer with inner"
set.external = true
if trace_identifying then
- report_identify_outer(set,var,i,"4c")
+ report_identify_outer(set,var,i,"4e")
end
return var
end
@@ -1515,7 +1687,7 @@ end
var.kind = "outer with inner"
set.external = true
if trace_identifying then
- report_identify_outer(set,var,i,"4d")
+ report_identify_outer(set,var,i,"4f")
end
return var
end
@@ -1526,30 +1698,18 @@ end
var.error = "no inner"
end
if trace_identifying then
- report_identify_outer(set,var,i,"4e")
+ report_identify_outer(set,var,i,"4g")
end
return var
end
--- local function identify_inner_or_outer(set,var,i)
--- -- we might consider first checking with a prefix prepended and then without
--- -- which is better for fig:oeps
--- local var = do_identify_inner_or_outer(set,var,i)
--- if var.error then
--- local prefix = set.prefix
--- if prefix and prefix ~= "" then
--- var.inner = prefix .. ':' .. var.inner
--- var.error = nil
--- return do_identify_inner_or_outer(set,var,i)
--- end
--- end
--- return var
--- end
-
local function identify_inner_component(set,var,i)
-- we're in a product (maybe ignore when same as component)
local component = var.component
- identify_inner(set,var,component,collected,derived,tobesaved)
+ local v = identify_inner(set,var,component,collected,derived)
+ if not v then
+ var.error = "unknown inner in component"
+ end
if trace_identifying then
report_identify_outer(set,var,i,"5a")
end
@@ -1611,7 +1771,11 @@ local function identify(prefix,reference)
set.n = nofidentified
for i=1,#set do
local var = set[i]
- if var.special then
+ local spe = var.special
+ local fnc = functions[spe]
+ if fnc then
+ var = fnc(var) or { error = "invalid special function" }
+ elseif spe then
var = identify_special(set,var,i)
elseif var.outer then
var = identify_outer(set,var,i)
@@ -1638,7 +1802,7 @@ references.identify = identify
local unknowns, nofunknowns, f_valid = { }, 0, formatters["[%s][%s]"]
-function references.valid(prefix,reference,highlight,newwindow,layer)
+function references.valid(prefix,reference,specification)
local set, bug = identify(prefix,reference)
local unknown = bug or #set == 0
if unknown then
@@ -1653,16 +1817,28 @@ function references.valid(prefix,reference,highlight,newwindow,layer)
unknowns[str] = u + 1
end
else
- set.highlight, set.newwindow, set.layer = highlight, newwindow, layer
+ set.highlight = specification.highlight
+ set.newwindow = specification.newwindow
+ set.layer = specification.layer
currentreference = set[1]
end
-- we can do the expansion here which saves a call
return not unknown
end
-function commands.doifelsereference(prefix,reference,highlight,newwindow,layer)
- commands.doifelse(references.valid(prefix,reference,highlight,newwindow,layer))
-end
+implement {
+ name = "doifelsereference",
+ actions = { references.valid, commands.doifelse },
+ arguments = {
+ "string",
+ "string",
+ {
+ { "highlight", "boolean" },
+ { "newwindow", "boolean" },
+ { "layer" },
+ }
+ }
+}
function references.reportproblems() -- might become local
if nofunknowns > 0 then
@@ -1685,92 +1861,199 @@ end
luatex.registerstopactions(references.reportproblems)
-local innermethod = "names"
+-- The auto method will try to avoid named internals in a clever way which
+-- can make files smaller without sacrificing external references. Some of
+-- the housekeeping happens the backend side.
+
+local innermethod = v_auto -- only page|auto now
+local defaultinnermethod = defaultinnermethod
+references.innermethod = innermethod -- don't mess with this one directly
function references.setinnermethod(m)
- if m then
- if m == "page" or m == "mixed" or m == "names" then
- innermethod = m
- elseif m == true or m == v_yes then
- innermethod = "page"
- end
+ if toboolean(m) or m == v_page then
+ innermethod = v_page
+ else
+ innermethod = v_auto
end
+ references.innermethod = innermethod
function references.setinnermethod()
report_references("inner method is already set and frozen to %a",innermethod)
end
end
+implement {
+ name = "setinnerreferencemethod",
+ actions = references.setinnermethod,
+ arguments = "string",
+ onlyonce = true
+}
+
function references.getinnermethod()
- return innermethod or "names"
+ return innermethod or defaultinnermethod
end
-directives.register("references.linkmethod", function(v) -- page mixed names
+directives.register("references.linkmethod", function(v) -- page auto
references.setinnermethod(v)
end)
-- this is inconsistent
-function references.setinternalreference(prefix,tag,internal,view) -- needs checking
- if innermethod == "page" then
- return unsetvalue
- else
- local t, tn = { }, 0 -- maybe add to current
- if tag then
+local destinationattributes = { }
+
+local function setinternalreference(specification)
+ local internal = specification.internal
+ local destination = unsetvalue
+ if innermethod == v_auto then
+ local t, tn = { }, 0 -- maybe add to current (now only used for tracing)
+ local reference = specification.reference
+ if reference then
+ local prefix = specification.prefix
if prefix and prefix ~= "" then
prefix = prefix .. ":" -- watch out, : here
- -- for ref in gmatch(tag,"[^,]+") do
- -- tn = tn + 1
- -- t[tn] = prefix .. ref
- -- end
local function action(ref)
tn = tn + 1
t[tn] = prefix .. ref
end
- process_settings(tag,action)
+ process_settings(reference,action)
else
- -- for ref in gmatch(tag,"[^,]+") do
- -- tn = tn + 1
- -- t[tn] = ref
- -- end
local function action(ref)
tn = tn + 1
t[tn] = ref
end
- process_settings(tag,action)
+ process_settings(reference,action)
end
end
- if internal and innermethod == "names" then -- mixed or page
+ -- ugly .. later we decide to ignore it when we have a real one
+ -- but for testing we might want to see them all
+ if internal then
tn = tn + 1
- t[tn] = "aut:" .. internal
+ t[tn] = internal -- when number it's internal
end
- local destination = references.mark(t,nil,nil,view) -- returns an attribute
- texsetcount("lastdestinationattribute",destination)
- return destination
+ destination = references.mark(t,nil,nil,specification.view) -- returns an attribute
end
+ if internal then -- new
+ destinationattributes[internal] = destination
+ end
+ texsetcount("lastdestinationattribute",destination)
+ return destination
end
-function references.setandgetattribute(kind,prefix,tag,data,view) -- maybe do internal automatically here
- local attr = references.set(kind,prefix,tag,data) and references.setinternalreference(prefix,tag,nil,view) or unsetvalue
- texsetcount("lastdestinationattribute",attr)
- return attr
+local function getinternalreference(internal)
+ return destinationattributes[internal] or 0
end
-commands.setreferenceattribute = references.setandgetattribute
+references.setinternalreference = setinternalreference
+references.getinternalreference = getinternalreference
-function references.getinternalreference(n) -- n points into list (todo: registers)
- local l = lists.collected[n]
- return l and l.references.internal or n
-end
+implement {
+ name = "setinternalreference",
+ actions = setinternalreference,
+ arguments = {
+ {
+ { "prefix" },
+ { "reference" },
+ { "internal", "integer" },
+ { "view" }
+ }
+ }
+}
-function commands.setinternalreference(prefix,tag,internal,view) -- needs checking
- context(references.setinternalreference(prefix,tag,internal,view))
+-- implement {
+-- name = "getinternalreference",
+-- actions = { getinternalreference, context },
+-- arguments = "integer",
+-- }
+
+function references.setandgetattribute(data) -- maybe do internal automatically here
+ local attr = unsetvalue
+ local mdat = data.metadata
+ local rdat = data.references
+ if mdat and rdat then
+ if not rdat.section then
+ rdat.section = structures.sections.currentid()
+ end
+ local ndat = data.numberdata
+ if ndat then
+ local numbers = ndat.numbers
+ if type(numbers) == "string" then
+ ndat.numbers = counters.compact(numbers,nil,true)
+ end
+ data.numberdata = helpers.simplify(ndat)
+ end
+ local pdat = data.prefixdata
+ if pdat then
+ data.prefixdata = helpers.simplify(pdat)
+ end
+ local udat = data.userdata
+ if type(udat) == "string" then
+ data.userdata = helpers.touserdata(udat)
+ end
+ if not rdat.block then
+ rdat.block = structures.sections.currentblock()
+ end
+ local done = references.set(data) -- we had kind i.e .item -> full
+ if done then
+ attr = setinternalreference {
+ prefix = prefix,
+ reference = tag,
+ internal = rdat.internal,
+ view = rdat.view
+ } or unsetvalue
+ end
+ end
+ texsetcount("lastdestinationattribute",attr)
+ return attr
end
-function commands.getinternalreference(n) -- this will also be a texcount
+implement {
+ name = "setreferenceattribute",
+ actions = references.setandgetattribute,
+ arguments = {
+ {
+ {
+ "references", {
+ { "internal", "integer" },
+ { "block" },
+ { "view" },
+ { "prefix" },
+ { "reference" },
+ },
+ },
+ {
+ "metadata", {
+ { "kind" },
+ { "xmlroot" },
+ { "catcodes", "integer" },
+ },
+ },
+ {
+ "prefixdata", { "*" }
+ },
+ {
+ "numberdata", { "*" }
+ },
+ {
+ "entries", { "*" }
+ },
+ {
+ "userdata"
+ }
+ }
+ }
+}
+
+function references.getinternallistreference(n) -- n points into list (todo: registers)
local l = lists.collected[n]
- context(l and l.references.internal or n)
+ local i = l and l.references.internal
+ return i and destinationattributes[i] or 0
end
+implement {
+ name = "getinternallistreference",
+ actions = { references.getinternallistreference, context },
+ arguments = "integer"
+}
+
--
function references.getcurrentmetadata(tag)
@@ -1778,12 +2061,11 @@ function references.getcurrentmetadata(tag)
return data and data.metadata and data.metadata[tag]
end
-function commands.getcurrentreferencemetadata(tag)
- local data = references.getcurrentmetadata(tag)
- if data then
- context(data)
- end
-end
+implement {
+ name = "getcurrentreferencemetadata",
+ actions = { references.getcurrentmetadata, context },
+ arguments = "string",
+}
local function currentmetadata(tag)
local data = currentreference and currentreference.i
@@ -1793,32 +2075,58 @@ end
references.currentmetadata = currentmetadata
local function getcurrentprefixspec(default)
- -- todo: message
- return currentmetadata("kind") or "?", currentmetadata("name") or "?", default or "?"
+ local data = currentreference and currentreference.i
+ local metadata = data and data.metadata
+ return
+ metatadata and metadata.kind or "?",
+ metatadata and metadata.name or "?",
+ default or "?"
end
references.getcurrentprefixspec = getcurrentprefixspec
-function commands.getcurrentprefixspec(default)
- context.getreferencestructureprefix(getcurrentprefixspec(default))
-end
+-- implement {
+-- name = "getcurrentprefixspec",
+-- actions = { getcurrentprefixspec, context }, -- returns 3 arguments
+-- arguments = "string",
+-- }
+
+implement {
+ name = "getcurrentprefixspec",
+ actions = function(tag)
+ context("{%s}{%s}{%s}",getcurrentprefixspec(tag))
+ end,
+ arguments = "string",
+}
-function references.filter(name,...) -- number page title ...
+local genericfilters = { }
+local userfilters = { }
+local textfilters = { }
+local fullfilters = { }
+local sectionfilters = { }
+
+filters.generic = genericfilters
+filters.user = userfilters
+filters.text = textfilters
+filters.full = fullfilters
+filters.section = sectionfilters
+
+local function filterreference(name,prefixspec,numberspec) -- number page title ...
local data = currentreference and currentreference.i -- maybe we should take realpage from here
if data then
if name == "realpage" then
local cs = references.analyze() -- normally already analyzed but also sets state
- context(tonumber(cs.realpage) or 0) -- todo, return and in command namespace
+ context(tonumber(cs.realpage) or 0)
else -- assumes data is table
local kind = type(data) == "table" and data.metadata and data.metadata.kind
if kind then
- local filter = filters[kind] or filters.generic
- filter = filter and (filter[name] or filter.unknown or filters.generic[name] or filters.generic.unknown)
+ local filter = filters[kind] or genericfilters
+ filter = filter and (filter[name] or filter.unknown or genericfilters[name] or genericfilters.unknown)
if filter then
if trace_referencing then
report_references("name %a, kind %a, using dedicated filter",name,kind)
end
- filter(data,name,...)
+ filter(data,name,prefixspec,numberspec)
elseif trace_referencing then
report_references("name %a, kind %a, using generic filter",name,kind)
end
@@ -1833,18 +2141,30 @@ function references.filter(name,...) -- number page title ...
end
end
-function references.filterdefault()
- return references.filter("default",getcurrentprefixspec(v_default))
+local function filterreferencedefault()
+ return filterreference("default",getcurrentprefixspec("default"))
end
-function commands.currentreferencedefault(tag)
- if not tag then tag = "default" end
- references.filter(tag,context.delayed(getcurrentprefixspec(tag)))
-end
+references.filter = filterreference
+references.filterdefault = filterreferencedefault
+
+implement {
+ name = "filterreference",
+ actions = filterreference,
+ arguments = "string",
+}
-filters.generic = { }
+implement {
+ name = "filterdefaultreference",
+ actions = filterreference,
+ arguments = {
+ "string", -- 'default'
+ { { "*" } }, -- prefixspec
+ { { "*" } }, -- numberspec
+ }
+}
-function filters.generic.title(data)
+function genericfilters.title(data)
if data then
local titledata = data.titledata or data.useddata
if titledata then
@@ -1853,7 +2173,7 @@ function filters.generic.title(data)
end
end
-function filters.generic.text(data)
+function genericfilters.text(data)
if data then
local entries = data.entries or data.useddata
if entries then
@@ -1862,12 +2182,12 @@ function filters.generic.text(data)
end
end
-function filters.generic.number(data,what,prefixspec) -- todo: spec and then no stopper
+function genericfilters.number(data,what,prefixspec,numberspec)
if data then
numberdata = lists.reordered(data) -- data.numberdata
if numberdata then
helpers.prefix(data,prefixspec)
- sections.typesetnumber(numberdata,"number",numberdata)
+ sections.typesetnumber(numberdata,"number",numberspec,numberdata)
else
local useddata = data.useddata
if useddata and useddata.number then
@@ -1877,16 +2197,16 @@ function filters.generic.number(data,what,prefixspec) -- todo: spec and then no
end
end
-filters.generic.default = filters.generic.text
+genericfilters.default = genericfilters.text
-function filters.generic.page(data,prefixspec,pagespec)
+function genericfilters.page(data,prefixspec,pagespec)
local pagedata = data.pagedata
if pagedata then
local number, conversion = pagedata.number, pagedata.conversion
if not number then
-- error
elseif conversion then
- context.convertnumber(conversion,number)
+ ctx_convertnumber(conversion,number)
else
context(number)
end
@@ -1895,14 +2215,12 @@ function filters.generic.page(data,prefixspec,pagespec)
end
end
-filters.user = { }
-
-function filters.user.unknown(data,name)
+function userfilters.unknown(data,name)
if data then
local userdata = data.userdata
local userkind = userdata and userdata.kind
if userkind then
- local filter = filters[userkind] or filters.generic
+ local filter = filters[userkind] or genericfilters
filter = filter and (filter[name] or filter.unknown)
if filter then
filter(data,name)
@@ -1916,9 +2234,7 @@ function filters.user.unknown(data,name)
end
end
-filters.text = { }
-
-function filters.text.title(data)
+function textfilters.title(data)
helpers.title(data.entries.text or "?",data.metadata)
end
@@ -1928,18 +2244,14 @@ end
-- helpers.title(data.entries.text or "?",data.metadata)
-- end
-function filters.text.page(data,prefixspec,pagespec)
+function textfilters.page(data,prefixspec,pagespec)
helpers.prefixpage(data,prefixspec,pagespec)
end
-filters.full = { }
-
-filters.full.title = filters.text.title
-filters.full.page = filters.text.page
-
-filters.section = { }
+fullfilters.title = textfilters.title
+fullfilters.page = textfilters.page
-function filters.section.number(data,what,prefixspec)
+function sectionfilters.number(data,what,prefixspec)
if data then
local numberdata = data.numberdata
if not numberdata then
@@ -1951,7 +2263,7 @@ function filters.section.number(data,what,prefixspec)
local references = data.references
if trace_empty then
report_empty("reference %a has a hidden number",references.reference)
- context.emptyreference() -- maybe an option
+ ctx_emptyreference() -- maybe an option
end
else
sections.typesetnumber(numberdata,"number",prefixspec,numberdata)
@@ -1959,18 +2271,18 @@ function filters.section.number(data,what,prefixspec)
end
end
-filters.section.title = filters.generic.title
-filters.section.page = filters.generic.page
-filters.section.default = filters.section.number
+sectionfilters.title = genericfilters.title
+sectionfilters.page = genericfilters.page
+sectionfilters.default = sectionfilters.number
--- filters.note = { default = filters.generic.number }
--- filters.formula = { default = filters.generic.number }
--- filters.float = { default = filters.generic.number }
--- filters.description = { default = filters.generic.number }
--- filters.item = { default = filters.generic.number }
+-- filters.note = { default = genericfilters.number }
+-- filters.formula = { default = genericfilters.number }
+-- filters.float = { default = genericfilters.number }
+-- filters.description = { default = genericfilters.number }
+-- filters.item = { default = genericfilters.number }
setmetatableindex(filters, function(t,k) -- beware, test with rawget
- local v = { default = filters.generic.number } -- not copy as it might be extended differently
+ local v = { default = genericfilters.number } -- not copy as it might be extended differently
t[k] = v
return v
end)
@@ -1999,12 +2311,71 @@ local specials = references.testspecials
-- pretty slow (progressively). In the pagebody one can best check the reference
-- real page to determine if we need contrastlocation as that is more lightweight.
-local function checkedpagestate(n,page)
- local r = referredpage(n)
+local function checkedpagestate(n,page,actions,position,spread)
local p = tonumber(page)
if not p then
return 0
- elseif p > r then
+ end
+ if position and #actions > 0 then
+ local i = actions[1].i -- brrr
+ if i then
+ local a = i.references
+ if a then
+ local x = a.x
+ local y = a.y
+ if x and y then
+ local jp = jobpositions.collected[position]
+ if jp then
+ local px = jp.x
+ local py = jp.y
+ local pp = jp.p
+ if p == pp then
+ -- same page
+ if py > y then
+ return 5 -- above
+ elseif py < y then
+ return 4 -- below
+ elseif px > x then
+ return 4 -- below
+ elseif px < x then
+ return 5 -- above
+ else
+ return 1 -- same
+ end
+ elseif spread then
+ if pp % 2 == 0 then
+ -- left page
+ if pp > p then
+ return 2 -- before
+ elseif pp + 1 == p then
+-- return 4 -- below (on right page)
+ return 5 -- above (on left page)
+ else
+ return 3 -- after
+ end
+ else
+ -- right page
+ if pp < p then
+ return 3 -- after
+ elseif pp - 1 == p then
+-- return 5 -- above (on left page)
+ return 4 -- below (on right page)
+ else
+ return 2 -- before
+ end
+ end
+ elseif pp > p then
+ return 2 -- before
+ else
+ return 3 -- after
+ end
+ end
+ end
+ end
+ end
+ end
+ local r = referredpage(n) -- sort of obsolete
+ if p > r then
return 3 -- after
elseif p < r then
return 2 -- before
@@ -2043,11 +2414,13 @@ local function setreferencerealpage(actions)
end
end
+references.setreferencerealpage = setreferencerealpage
+
-- we store some analysis data alongside the indexed array
-- at this moment only the real reference page is analyzed
-- normally such an analysis happens in the backend code
-function references.analyze(actions)
+function references.analyze(actions,position,spread)
if not actions then
actions = references.currentset
end
@@ -2062,32 +2435,56 @@ function references.analyze(actions)
elseif actions.external then
actions.pagestate = 0
else
- actions.pagestate = checkedpagestate(actions.n,realpage)
+ actions.pagestate = checkedpagestate(actions.n,realpage,actions,position,spread)
end
end
return actions
end
-function commands.referencepagestate(actions)
- if not actions then
- actions = references.currentset
- end
+local function referencepagestate(position,detail,spread)
+ local actions = references.currentset
if not actions then
- context(0)
+ return 0
else
if not actions.pagestate then
- references.analyze(actions) -- delayed unless explicitly asked for
--- print("NO STATE",actions.reference,actions.pagestate)
+ references.analyze(actions,position,spread) -- delayed unless explicitly asked for
+ end
+ local pagestate = actions.pagestate
+ if detail then
+ return pagestate
+ elseif pagestate == 4 then
+ return 2 -- compatible
+ elseif pagestate == 5 then
+ return 3 -- compatible
+ else
+ return pagestate
end
- context(actions.pagestate)
end
end
-function commands.referencerealpage(actions)
+implement {
+ name = "referencepagestate",
+ actions = { referencepagestate, context },
+ arguments = "string"
+}
+
+implement {
+ name = "referencepagedetail",
+ actions = { referencepagestate, context },
+ arguments = { "string", "boolean", "boolean" }
+}
+
+local function referencerealpage(actions)
actions = actions or references.currentset
- context(not actions and 0 or actions.realpage or setreferencerealpage(actions))
+ return not actions and 0 or actions.realpage or setreferencerealpage(actions)
end
+implement {
+ name = "referencerealpage",
+ actions = { referencerealpage, context },
+ arguments = "string"
+}
+
local plist, nofrealpages
local function realpageofpage(p) -- the last one counts !
@@ -2164,7 +2561,7 @@ runners["special operation with arguments"] = runners["special"]
-- check the validity.
function specials.internal(var,actions)
- local v = references.internals[tonumber(var.operation)]
+ local v = internals[tonumber(var.operation)]
local r = v and v.references.realpage
if r then
actions.realpage = r
@@ -2224,10 +2621,103 @@ function specials.section(var,actions)
end
end
--- needs a better split ^^^
+-- experimental:
+
+local p_splitter = lpeg.splitat(":")
+local p_lower = lpeg.patterns.utf8lower
+
+-- We can cache lowercased titles which saves a lot of time, but then
+-- we can better have a global cache with weak keys.
+
+-- local lowercache = table.setmetatableindex(function(t,k)
+-- local v = lpegmatch(p_lower,k)
+-- t[k] = v
+-- return v
+-- end)
-commands.filterreference = references.filter
-commands.filterdefaultreference = references.filterdefault
+local lowercache = false
+
+local function locate(list,askedkind,askedname,pattern)
+ local kinds = lists.kinds
+ local names = lists.names
+ if askedkind and not kinds[askedkind] then
+ return false
+ end
+ if askedname and not names[askedname] then
+ return false
+ end
+ for i=1,#list do
+ local entry = list[i]
+ local metadata = entry.metadata
+ if metadata then
+ local found = false
+ if askedname then
+ local name = metadata.name
+ if name then
+ found = name == askedname
+ end
+ elseif askedkind then
+ local kind = metadata.kind
+ if kind then
+ found = kind == askedkind
+ end
+ end
+ if found then
+ local titledata = entry.titledata
+ if titledata then
+ local title = titledata.title
+ if title then
+ if lowercache then
+ found = lpegmatch(pattern,lowercache[title])
+ else
+ found = lpegmatch(pattern,lpegmatch(p_lower,title))
+ end
+ if found then
+ return {
+ inner = pattern,
+ kind = "inner",
+ reference = pattern,
+ i = entry,
+ p = "",
+ r = entry.references.realpage,
+ }
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
+function functions.match(var,actions)
+ if not var.outer then
+ local operation = var.operation
+ if operation and operation ~= "" then
+ local operation = lpegmatch(p_lower,operation)
+ local list = lists.collected
+ local names = false
+ local kinds = false
+ local where, what = lpegmatch(p_splitter,operation)
+ if where and what then
+ local pattern = lpeg.finder(what)
+ return
+ locate(list,false,where,pattern)
+ or locate(list,where,false,pattern)
+ or { error = "no match" }
+ else
+ local pattern = lpeg.finder(operation)
+ -- todo: don't look at section and float in last pass
+ return
+ locate(list,"section",false,pattern)
+ or locate(list,"float",false,pattern)
+ or locate(list,false,false,pattern)
+ or { error = "no match" }
+ end
+ end
+ end
+end
+
+-- needs a better split ^^^
-- done differently now:
@@ -2235,24 +2725,36 @@ function references.export(usedname) end
function references.import(usedname) end
function references.load (usedname) end
-commands.exportreferences = references.export
+implement { name = "exportreferences", actions =references.export }
-- better done here .... we don't insert/remove, just use a pointer
local prefixstack = { "" }
local prefixlevel = 1
-function commands.pushreferenceprefix(prefix)
+local function pushreferenceprefix(prefix)
prefixlevel = prefixlevel + 1
prefixstack[prefixlevel] = prefix
- context(prefix)
+ return prefix
end
-function commands.popreferenceprefix()
+local function popreferenceprefix()
prefixlevel = prefixlevel - 1
if prefixlevel > 0 then
- context(prefixstack[prefixlevel])
+ return prefixstack[prefixlevel]
else
report_references("unable to pop referenceprefix")
+ return ""
end
end
+
+implement {
+ name = "pushreferenceprefix",
+ actions = { pushreferenceprefix, context }, -- we can use setmacro
+ arguments = "string",
+}
+
+implement {
+ name = "popreferenceprefix",
+ actions = { popreferenceprefix, context }, -- we can use setmacro
+}
diff --git a/tex/context/base/strc-ref.mkvi b/tex/context/base/strc-ref.mkvi
index 85c6a0729..f5d0d1d78 100644
--- a/tex/context/base/strc-ref.mkvi
+++ b/tex/context/base/strc-ref.mkvi
@@ -118,6 +118,11 @@
\unexpanded\def\reference {\dosingleargument\strc_references_full_reference} % never forgotten
\unexpanded\def\setreference {\dodoubleargument\strc_references_set_reference } %
+% maybe: \let\reference\textreference
+
+\unexpanded\def\showreferences
+ {\enabletrackers[nodes.references.show,nodes.destinations.show]}
+
%D These are implemented in a low level form as:
\unexpanded\def\strc_references_text_reference [#labels]{\strc_references_set_named_reference\s!text{#labels}{}}
@@ -140,13 +145,29 @@
\newcount\lastreferenceattribute
\newcount\lastdestinationattribute
-\def\dofinishfullreference#prefix#label{\normalexpanded{\ctxlatecommand{enhancereference("#prefix","#label")}}}
-\def\dofinishtextreference#prefix#label{\normalexpanded{\ctxlatecommand{enhancereference("#prefix","#label",{})}}}
+\def\strc_references_finish#prefix#reference#internal%
+ {\normalexpanded{\ctxlatecommand{enhancereference("#prefix","#reference")}}}
+
+\let\dofinishreference\strc_references_finish % used at lua end
+
+% This is somewhat tricky: we want to keep the reference with the following word but
+% that word should also hyphenate. We need to find a better way.
-\let\dofinishpagereference\dofinishfullreference
-\let\dofinishuserreference\dofinishfullreference
+% 0 = nothing
+% 1 = bind to following word
+
+\setnewconstant\c_strc_references_bind_state\plusone
+
+\def\strc_references_inject_before
+ {}
+
+\def\strc_references_inject_after
+ {\ifcase\c_strc_references_bind_state
+ % nothing
+ \or
+ \prewordbreak % to be tested: \removeunwantedspaces\permithyphenation
+ \fi}
-\def\dofinishsomereference#kind{\executeifdefined{dofinish#{kind}reference}\gobbletwoarguments}
\unexpanded\def\strc_references_set_named_reference
{\ifreferencing
@@ -193,32 +214,36 @@
\globallet\currentreferencecoding\s!tex
\fi
% beware, the structures.references.set writes a
- % \setnextinternalreference
+ \setnextinternalreference
\strc_references_start_destination_nodes
- \ctxcommand{setreferenceattribute("\currentreferencekind", "\referenceprefix","\currentreferencelabels",
- {
- references = {
- % internal = \nextinternalreference, % no need for an internal as we have an explicit
- block = "\currentsectionblock",
- section = structures.sections.currentid(),
- },
- metadata = { % we could assume page to have no metadata
- kind = "#kind", % \currentreferencekind
- \ifx\currentreferencekind\s!page\else
- catcodes = \the\catcodetable,
- xmlroot = \ifx\currentreferencecoding\s!xml "\xmldocument"\else nil\fi, % only useful when text
- \fi
- },
+ \clf_setreferenceattribute
+ {%
+ references {%
+ internal \nextinternalreference
+ % block {\currentsectionblock}%
+ view {\interactionparameter\c!focus}%
+ prefix {\referenceprefix}%
+ reference {\currentreferencelabels}%
+ }%
+ metadata {%
+ kind {\currentreferencekind}%
+ \ifx\currentreferencekind\s!page\else
+ \ifx\currentreferencecoding\s!xml
+ xmlroot {\xmldocument}%
+ \fi
+ catcodes \catcodetable
+ \fi
+ }%
\ifx\currentreferencedata\empty\else
- entries = {
- text = \!!bs\currentreferencedata\!!es
- },
+ entries {%
+ text {\currentreferencedata}%
+ }%
\fi
\ifx\currentreferenceuserdata\empty\else
- userdata = structures.helpers.touserdata(\!!bs\detokenize{#userdata}\!!es)
+ userdata {\detokenize{#userdata}}%
\fi
- },"\interactionparameter\c!focus")
- }%
+ }%
+ \relax
\strc_references_stop_destination_nodes
\fi
\else
@@ -229,10 +254,11 @@
\xdef\currentdestinationattribute{\number\lastdestinationattribute}%
% will become an option:
\ifnum\lastdestinationattribute>\zerocount
+ \strc_references_inject_before % new
\dontleavehmode\hbox attr \destinationattribute\lastdestinationattribute\bgroup
\strc_references_flush_destination_nodes
\egroup
- \prewordbreak % new
+ \strc_references_inject_after % new
\fi}
\def\strc_references_set_page_only_destination_attribute#labels% could in fact be fully expandable
@@ -243,17 +269,21 @@
\lastdestinationattribute\attributeunsetvalue
\else
\strc_references_start_destination_nodes
- \ctxcommand{setreferenceattribute("\s!page", "\referenceprefix","\currentreferencelabels",
- {
- references = {
- block = "\currentsectionblock",
- section = structures.sections.currentid(),
- },
- metadata = {
- kind = "page",
- },
- },"\interactionparameter\c!focus")
- }%
+ \setnextinternalreference
+ \clf_setreferenceattribute
+ {%
+ references {%
+ internal \nextinternalreference
+ % block {\currentsectionblock}%
+ view {\interactionparameter\c!focus}%
+ prefix {\referenceprefix}%
+ reference {\currentreferencelabels}%
+ }%
+ metadata {%
+ kind {page}%
+ }%
+ }%
+ \relax
\strc_references_stop_destination_nodes
\fi
\else
@@ -261,23 +291,34 @@
\lastdestinationattribute\attributeunsetvalue
\fi}
-\unexpanded\def\strc_references_direct_full#labels#text%
+\unexpanded\def\strc_references_direct_full_user#user#labels#text%
{\ifreferencing
\strc_references_start_destination_nodes
- \ctxcommand{setreferenceattribute("\s!full", "\referenceprefix","#labels",
- {
- references = {
- block = "\currentsectionblock",
- section = structures.sections.currentid(),
- },
- metadata = {
- kind = "\s!full",
- },
- entries = {
- text = \!!bs#text\!!es
- },
- },"\interactionparameter\c!focus")
- }%
+ \setnextinternalreference
+ \edef\m_strc_references_user{#user}%
+ \edef\m_strc_references_text{#text}%
+ \clf_setreferenceattribute
+ {%
+ references {%
+ internal \nextinternalreference
+ % block {\currentsectionblock}%
+ view {\interactionparameter\c!focus}%
+ prefix {\referenceprefix}%
+ reference {#labels}%
+ }%
+ metadata {%
+ kind {\s!full}%
+ }%
+ \ifx\m_strc_references_text\empty \else
+ entries {%
+ text {\m_strc_references_text}%
+ }%
+ \fi
+ \ifx\m_strc_references_user\empty \else
+ userdata {\m_strc_references_user}% \detokenize\expandafter{\normalexpanded{...}}
+ \fi
+ }%
+ \relax
\strc_references_stop_destination_nodes
\else
\setbox\b_strc_destination_nodes\emptyhbox
@@ -287,13 +328,17 @@
\xdef\currentdestinationattribute{\number\lastdestinationattribute}%
% will become an option:
\ifnum\lastdestinationattribute>\zerocount
+ \strc_references_inject_before % new
\dontleavehmode\hbox attr \destinationattribute\lastdestinationattribute\bgroup
\strc_references_flush_destination_nodes
\egroup
- \prewordbreak % new
+ \strc_references_inject_after % new
\fi}
-\let\dodirectfullreference\strc_references_direct_full % for at lua end
+\unexpanded\def\strc_references_direct_full
+ {\strc_references_direct_full_user\empty}
+
+\let\dodirectfullreference\strc_references_direct_full % for at lua end (no longer)
\def\strc_references_set_page_only_destination_box_attribute#cs#labels%
{\strc_references_set_page_only_destination_attribute{#labels}%
@@ -329,17 +374,19 @@
\def\strc_references_set_simple_page_reference#label%
{\iflocation
\strc_references_start_destination_nodes
- \ctxcommand{setreferenceattribute("\s!page", "\referenceprefix","#label",
- {
- references = {
- % block = "\currentsectionblock",
- % section = structures.sections.currentid(),
- },
- metadata = { % we could assume page to have no metadata
- kind = "\s!page",
- },
- },"\interactionparameter\c!focus")
- }%
+ \clf_setreferenceattribute
+ {%
+ references {%
+ % block {\currentsectionblock}%
+ view {\interactionparameter\c!focus}%
+ prefix {\referenceprefix}%
+ reference {#label}%
+ }%
+ metadata {%
+ kind {\s!page}%
+ }%
+ }%
+ \relax
\strc_references_stop_destination_nodes
\xdef\currentdestinationattribute{\number\lastdestinationattribute}%
\else
@@ -349,7 +396,15 @@
\def\strc_references_get_simple_page_reference#label%
{\iflocation
- \ctxcommand{injectreference("\referenceprefix","#label",\number\ht\strutbox,\number\dp\strutbox,\extrareferencearguments)}%
+ \clf_injectreference
+ {\referenceprefix}%
+ {#label}%
+ {%
+ height \ht\strutbox
+ depth \dp\strutbox
+ \extrareferencearguments
+ }%
+ \relax
\xdef\currentreferenceattribute{\number\lastreferenceattribute}%
\else
\xdef\currentreferenceattribute{\number\attributeunsetvalue}%
@@ -489,9 +544,6 @@
\newcount\referencehastexstate % set in backend
-\def\referencepagestate{\ctxcommand{referencepagestate()}}
-\def\referencerealpage {\ctxcommand{referencerealpage ()}}
-
% referencepagestate:
%
% 0 = no page ref, 1=same page, 2=before, 3=after
@@ -579,13 +631,13 @@
{\dodoubleempty\strc_references_define_reference}
\def\strc_references_define_reference[#name][#specification]%
- {\ctxcommand{definereference("\referenceprefix","#name",\!!bs\detokenize{#specification}\!!es)}}
+ {\clf_definereference{\referenceprefix}{#name}{\detokenize{#specification}}}
\unexpanded\def\resetreference[#name]%
- {\ctxcommand{resetreference("\referenceprefix","#name")}}
+ {\clf_resetreference{\referenceprefix}{#name}}
\def\setpagereference#name#specification% hm,. low level ?
- {\ctxcommand{definereference("","#name",\!!bs\v!page(\luaescapestring{#specification})\!!es)}}
+ {\clf_definereference{}{#name}{\v!page(\detokenize{#specification}}} % is detokenize needed here?
%D Chained references are defined as:
%D
@@ -615,17 +667,19 @@
\newconditional\gotonewwindow \setfalse\gotonewwindow
\def\expandtexincurrentreference % will happen in lua some time
- {\ifcase\referencehastexstate\else\ctxcommand{expandcurrentreference()}\fi}
+ {\ifcase\referencehastexstate\else\clf_expandcurrentreference\fi}
-\def\expandreferenceoperation#tag#content{\ctxcommand{setreferenceoperation(#tag,\!!bs#content\!!es)}}
-\def\expandreferencearguments#tag#content{\ctxcommand{setreferencearguments(#tag,\!!bs#content\!!es)}}
+\def\expandreferenceoperation#tag#content{\clf_setreferenceoperation#tag{#content}}
+\def\expandreferencearguments#tag#content{\clf_setreferencearguments#tag{#content}}
-\def\doifreferencefoundelse#labels#yes#nop%
- {\ctxcommand{doifelsereference("\referenceprefix",\!!bs#labels\!!es,\luaconditional\highlighthyperlinks,\luaconditional\gotonewwindow)}%
+\def\doifelsereferencefound#label#yes#nop%
+ {\clf_doifelsereference{\referenceprefix}{#label}{\extrareferencearguments}%
{\expandtexincurrentreference
#yes}%
{#nop}}
+\let\doifreferencefoundelse \doifelsereferencefound
+
%D The tester only splits the reference in components but does
%D not look into them. The following macro does a preroll and
%D determines for instance the current real reference pagenumber.
@@ -731,14 +785,14 @@
{\global\advance\locationcount\plusone}
\def\setnextinternalreferences#kind#name% plural
- {\ctxcommand{setnextinternalreference("#kind","#name")}}
+ {\clf_setnextinternalreference{#kind}{#name}}
\def\getinternalorderreference#kind#name%
- {\ctxcommand{currentreferenceorder("#kind","#name")}}
+ {\clf_currentreferenceorder{#kind}{#name}}
\def\thisissomeinternal#kind#name% only for old time sake
{\begingroup
- \ctxcommand{setinternalreference("","#kind:#name")}%
+ \clf_setinternalreference reference {#kind:#name}\relax
\hbox attr \destinationattribute\lastdestinationattribute{}%
\endgroup}
@@ -801,7 +855,7 @@
\appendtoks
\edef\p_export{\referencingparameter\c!export}%
\ifx\p_export\v!yes
- \ctxcommand{exportreferences()}%
+ \clf_exportreferences
\fi
\to \everygoodbye
@@ -816,16 +870,16 @@
% {\popmacro\referenceprefix}
\unexpanded\def\globalpushreferenceprefix#prefix%
- {\xdef\referenceprefix{\ctxcommand{pushreferenceprefix("#prefix")}}}
+ {\xdef\referenceprefix{\clf_pushreferenceprefix{#prefix}}}
\unexpanded\def\globalpopreferenceprefix
- {\xdef\referenceprefix{\ctxcommand{popreferenceprefix()}}}
+ {\xdef\referenceprefix{\clf_popreferenceprefix}}
\unexpanded\def\pushreferenceprefix#prefix%
- {\edef\referenceprefix{\ctxcommand{pushreferenceprefix("#prefix")}}}
+ {\edef\referenceprefix{\clf_pushreferenceprefix{#prefix}}}
\unexpanded\def\popreferenceprefix
- {\edef\referenceprefix{\ctxcommand{popreferenceprefix()}}}
+ {\edef\referenceprefix{\clf_popreferenceprefix}}
\def\m_strc_references_prefix_yes{+}
\def\m_strc_references_prefix_nop{-}
@@ -915,15 +969,15 @@
\ifdefined\over \let\normalmathover \over \unexpanded\def\over {\mathortext\normalmathover \strc_references_about} \else \let\over \strc_references_about \fi
\to \everydump
- \def\filterreference #key{\ctxcommand{filterreference("#key")}} % no checking, expanded
-\unexpanded\def\getreferenceentry#key{\ctxcommand{filterreference("#key")}} % no checking, unexpanded
+ \def\filterreference #key{\clf_filterreference{#key}} % no checking, expanded
+\unexpanded\def\getreferenceentry#key{\clf_filterreference{#key}} % no checking, unexpanded
-\def\currentreferencenumber {\filterreference{number}}
-\def\currentreferencepage {\filterreference{page}}
-\def\currentreferencetitle {\filterreference{title}}
-\def\currentreferencetext {\filterreference{text}}
-\def\currentreferencedefault {\filterreference{default}}
-\def\currentreferencerealpage{\filterreference{realpage}}
+\def\currentreferencenumber {\clf_filterreference{number}}
+\def\currentreferencepage {\clf_filterreference{page}}
+\def\currentreferencetitle {\clf_filterreference{title}}
+\def\currentreferencetext {\clf_filterreference{text}}
+\def\currentreferencedefault {\clf_filterreference{default}}
+\def\currentreferencerealpage{\clf_filterreference{realpage}}
%D The most straightforward way of retrieving references is
%D using \type{\ref}.
@@ -933,7 +987,7 @@
\def\strc_references_get_reference[#key][#label]% #key = number page title text default realpage ...
{\ifsecondargument
- \doifreferencefoundelse{#label}{\ctxcommand{filterreference("#key")}}\dummyreference
+ \doifelsereferencefound{#label}{\clf_filterreference{#key}}\dummyreference
\else
\dummyreference
\fi}
@@ -947,11 +1001,12 @@
\begingroup
\let\crlf\space
\let\\\space
- \postponenotes
+ \postponenotes % might go
\referencingparameter\c!left
- \doifreferencefoundelse{#label}
+ \doifelsereferencefound{#label}
{\goto{\limitatetext\currentreferencetitle{\referencingparameter\c!width}\unknown}[#label]}% not so efficient (dup lookup)
{}% todo
+ \flushnotes % might go
\referencingparameter\c!right
\endgroup}
@@ -967,44 +1022,151 @@
%D ... \atpage[someref] ...
%D \stoptyping
-% 0 = unknown
-% 1 = same
-% 2 = before
-% 3 = after
+% standard detail
+%
+% 0 = unknown unknown
+% 1 = same on same page
+% 2 = before preceding page
+% 3 = after following page
+%
+% 4 = above above on same page
+% 5 = below below on same page
-\def\strc_references_by_reference_page_state#unknown#before#current#after%
- {\ifcase\referencepagestate\space#unknown\or#current\or#before\or#after\fi}
+% todo: optimize for use in pagebody
+% todo: maybe make it optional
-% \unexpanded\def\somewhere#backward#foreward#dummy[#label]% #dummy gobbles space around #foreward
-% {\doifreferencefoundelse{#label}% usage needs checking (useless)
-% {\goto{\strc_references_by_reference_page_state{#label}{#backward}{}{#foreward}}[#label]}
-% {[#label]}}
+% \setuppagenumbering[alternative=doublesided]
+% \setupreferencing [doublesided=no] % yes is default
%
-% better:
+% \somewhere{backward}{foreward}[label]
+% \someplace{preceding}{backward}{current}{foreward}{following}[label]
+% \atpage[#label]
+% \doifcheckedpagestate{label}{preceding}{backward}{current}{foreward}{following}{otherwise}
+%
+% \dorecurse {20} {
+% \placefigure[here][fig:#1]{}{\externalfigure[dummy]}
+% \dorecurse {20} {
+% ##1: \atpage[fig:##1] /
+% \doifcheckedpagestate
+% {fig:##1}
+% {preceding}{backward}{current}{foreward}{following}
+% {otherwise}
+% }
+% }
+
+\newcount \nofreferencestates
+\newconditional\pagestatespread
-\unexpanded\def\somewhere#backward#foreward#dummy[#label]% #dummy gobbles space around #foreward
- {\doifreferencefoundelse{#label}%
- {\strc_references_by_reference_page_state{#label}{\goto{#backward}[#label]}{}{\goto{#foreward}[#label]}}
- {[#label]}}
+\appendtoks
+ \doifelse{\referencingparameter\c!doublesided}\v!yes\settrue\setfalse\pagestatespread
+\to \everysetupreferencing
-\unexpanded\def\atpage[#label]% todo
- {\doifreferencefoundelse{#label}% kind of inefficient as \goto also analyzes
- {\goto
- {\strc_references_by_reference_page_state
- {\labeltexts\v!page\dummyreference}%
- {\labeltext\v!hencefore}%
- {\labeltexts\v!atpage\currentreferencepage}%
- {\labeltext\v!hereafter}}%
- [#label]}
- {[#label]}}
+\setupreferencing
+ [\c!doublesided=\v!yes]
+
+\def\referencepagestate
+ {\numexpr\clf_referencepagestate
+ {rst::\number\nofreferencestates}%
+ \relax}
+
+\def\referencepagedetail
+ {\numexpr\clf_referencepagestate
+ {rst::\number\nofreferencestates}%
+ true %
+ \ifconditional\pagestatespread false\ifdoublesided true\else false\fi\fi
+ \relax}
+
+\def\referencerealpage
+ {\clf_referencerealpage}
+
+\unexpanded\def\tracedpagestate
+ {{\blue\tttf(\ifcase\referencepagedetail unknown\or same\or previous\or next\or above\or below\else unknown\fi)}}
+
+\unexpanded\def\markreferencepage
+ {\dontleavehmode\begingroup
+ \iftrialtypesetting
+ % issue warning that not stable
+ \else
+ % needs checking ... but probably never in trialmode
+ \global\advance\nofreferencestates\plusone
+ \xypos{rst::\number\nofreferencestates}%
+ % \tracedpagestate
+ \fi
+ \endgroup}
+
+\unexpanded\def\doifcheckedpagestate#label% #preceding#backward#current#foreward#following#otherwise%
+ {\doifelsereferencefound{#label}\strc_references_handle_page_state_yes\strc_references_handle_page_state_nop}
+
+\let\strc_references_handle_page_state_nop\sixthofsixarguments
+
+\def\strc_references_handle_page_state_yes
+ {\markreferencepage
+ \ifcase\referencepagedetail
+ \expandafter\sixthofsixarguments \or
+ \expandafter\thirdofsixarguments \or
+ \expandafter\firstofsixarguments \or
+ \expandafter\fifthofsixarguments \or
+ \expandafter\secondofsixarguments\or
+ \expandafter\fourthofsixarguments\else
+ \expandafter\sixthofsixarguments \fi}
\unexpanded\def\referencesymbol
- {\hbox{\strut\high
- {\setupsymbolset[\interactionparameter\c!symbolset]%
- \symbol[\strc_references_by_reference_page_state\v!somewhere\v!nowhere\v!previous\v!next]}}}
+ {\hbox\bgroup
+ \strut
+ \markreferencepage
+ \high
+ {\setupsymbolset[\interactionparameter\c!symbolset]%
+ \symbol[\ifcase\referencepagedetail\v!somewhere\or\v!nowhere\or\v!previous\or\v!next\or\v!previous\or\v!next\else\v!somewhere\fi]}%
+ \egroup}
+
+%D Hereafter the \type {\ignorespaces} binds the state node to next character (more likely
+%D than a preceding one) and one can always add an explicit space.
+
+\unexpanded\def\somewhere#backward#foreward#dummy[#label]% #dummy gobbles space around #foreward
+ {\doifcheckedpagestate{#label}%
+ {\goto{#backward}[#label]}%
+ {\goto{#backward}[#label]}%
+ {\ignorespaces}%
+ {\goto{#foreward}[#label]}%
+ {\goto{#foreward}[#label]}%
+ {#label}}%
+
+\unexpanded\def\someplace#preceding#backward#current#foreward#following#dummy[#label]% #dummy gobbles space around #foreward
+ {\doifcheckedpagestate{#label}%
+ {\doifelsenothing{#preceding}{\goto{#preceding}[#label]}\ignorespaces}%
+ {\doifelsenothing {#backward}{\goto {#backward}[#label]}\ignorespaces}%
+ {\doifelsenothing {#current}{\goto {#current}[#label]}\ignorespaces}%
+ {\doifelsenothing {#foreward}{\goto {#foreward}[#label]}\ignorespaces}%
+ {\doifelsenothing{#following}{\goto{#following}[#label]}\ignorespaces}%
+ {#label}}
+
+\unexpanded\def\atpage[#label]% todo
+ {\doifcheckedpagestate{#label}%
+ {\goto{\labeltext \v!precedingpage }[#label]}%
+ {\goto{\labeltext \v!hencefore }[#label]}%
+ {\ignorespaces}%
+ {\goto{\labeltext \v!hereafter }[#label]}%
+ {\goto{\labeltext \v!followingpage }[#label]}%
+ {\goto{\labeltexts\v!page\dummyreference}[#label]}}
+
+% Someone requested this but in retrospect didn't need it so we keep it as example.
+% Beware: a node is injected which is why we add ignorespaces!
+%
+% \unexpanded\def\strc_references_conditional#action#text[#condition]#dummy[#label]%
+% {\doifcheckedpagestate{#label}%
+% {\doifelse{#condition}\v!precedingpage{#action{#text}[#label]}\ignorespaces}%
+% {\doifelse{#condition}\v!hencefore {#action{#text}[#label]}\ignorespaces}%
+% {\doifelse{#condition}\v!current {#action{#text}[#label]}\ignorespaces}%
+% {\doifelse{#condition}\v!hereafter {#action{#text}[#label]}\ignorespaces}%
+% {\doifelse{#condition}\v!followingpage{#action{#text}[#label]}\ignorespaces}%
+% {#label}}
+%
+% \unexpanded\def\conditionalat {\strc_references_conditional\at}
+% \unexpanded\def\conditionalin {\strc_references_conditional\in}
+% \unexpanded\def\conditionalabout{\strc_references_conditional\about}
-%D The other alternatives just conform their names: only the
-%D label, only the text, or the label and the text.
+%D The other alternatives just conform their names: only the label, only the text, or the
+%D label and the text.
% \dounknownreference -> \dummyreference
@@ -1027,11 +1189,11 @@
\setvalue{\??referencinginteraction\v!all}%
{\the\leftreferencetoks
- \doifsometokselse\leftreferencetoks \leftofreferencecontent \donothing
+ \doifelsesometoks\leftreferencetoks \leftofreferencecontent \donothing
\leftofreference
\currentreferencecontent
\rightofreference
- \doifsometokselse\rightreferencetoks\rightofreferencecontent\donothing
+ \doifelsesometoks\rightreferencetoks\rightofreferencecontent\donothing
\the\rightreferencetoks}
\setvalue{\??referencinginteraction\v!label}%
@@ -1091,14 +1253,21 @@
\let\rightofreferencecontent\empty
\fi
% inefficient: double resolve
- \doifreferencefoundelse{#label} % we need to resolve the text
+ \doifelsereferencefound{#label} % we need to resolve the text
{\goto{\referencesequence}[#label]}
{\let\currentreferencecontent\dummyreference
\goto{\referencesequence}[#label]}%
\strc_references_stop_goto}
-\unexpanded\def\strc_references_in{\strc_references_start_goto\let\currentreferencecontent\currentreferencedefault\strc_references_pickup_goto}
-\unexpanded\def\strc_references_at{\strc_references_start_goto\let\currentreferencecontent\currentreferencepage \strc_references_pickup_goto}
+\unexpanded\def\strc_references_in
+ {\strc_references_start_goto
+ \let\currentreferencecontent\currentreferencedefault
+ \strc_references_pickup_goto}
+
+\unexpanded\def\strc_references_at
+ {\strc_references_start_goto
+ \let\currentreferencecontent\currentreferencepage
+ \strc_references_pickup_goto}
%D \macros
%D {definereferenceformat}
@@ -1209,7 +1378,7 @@
\def\autoreferencelabeltextflag{*} % a proper key like 'auto' or 'name' can clash with a label key
\unexpanded\def\autoreferencelabeltext
- {\ctxcommand{getcurrentreferencemetadata("name")}}
+ {\clf_getcurrentreferencemetadata{name}}
% \starttext
% \definereferenceformat[inxx] [left=(,right=),text=txt]
@@ -1254,7 +1423,9 @@
\newconditional\uselocationstrut \settrue\uselocationstrut
\def\extrareferencearguments
- {\luaconditional\highlighthyperlinks,\luaconditional\gotonewwindow,"\currentviewerlayer"}
+ {highlight \luaconditional\highlighthyperlinks\space
+ newwindow \luaconditional\gotonewwindow\space
+ layer {\currentviewerlayer}}
\unexpanded\def\directgoto
{\ifconditional\uselocationstrut
@@ -1286,7 +1457,15 @@
\attribute\referenceattribute\attributeunsetvalue
\global\lastsavedreferenceattribute\attributeunsetvalue
\iflocation
- \ctxcommand{injectreference("\referenceprefix","#label",\number\ht\strutbox,\number\dp\strutbox,\extrareferencearguments)}%
+ \clf_injectreference
+ {\referenceprefix}%
+ {#label}%
+ {%
+ height \ht\strutbox
+ depth \dp\strutbox
+ \extrareferencearguments
+ }%
+ \relax
\setlocationattributes
\setstrut % can be option
\global\lastsavedreferenceattribute\lastreferenceattribute
@@ -1305,7 +1484,15 @@
\global\lastsavedreferenceattribute\attributeunsetvalue
\attribute\referenceattribute\attributeunsetvalue
\iflocation
- \ctxcommand{injectreference("\referenceprefix","#label",\number\dimexpr\interactionparameter\c!height\relax,\number\dimexpr\interactionparameter\c!depth\relax,\extrareferencearguments)}%
+ \clf_injectreference
+ {\referenceprefix}%
+ {#label}%
+ {%
+ height \dimexpr\interactionparameter\c!height\relax
+ depth \dimexpr\interactionparameter\c!depth \relax
+ \extrareferencearguments
+ }%
+ \relax
\setlocationattributes
\attribute\referenceattribute\lastreferenceattribute
\global\lastsavedreferenceattribute\lastreferenceattribute
@@ -1324,9 +1511,12 @@
\global\lastsavedreferenceattribute\attributeunsetvalue
\attribute\referenceattribute\attributeunsetvalue
\iflocation
- \ctxcommand{doifelsereference("\referenceprefix","#label",\extrareferencearguments)}%
+ \clf_doifelsereference{\referenceprefix}{#label}{\extrareferencearguments}%
{\expandtexincurrentreference
- \ctxcommand{injectcurrentreference(\number\ht\strutbox,\number\dp\strutbox)}%
+ \clf_injectcurrentreferencehtdp
+ \ht\strutbox
+ \dp\strutbox
+ \relax
\setlocationattributes
\setstrut % can be option
\global\lastsavedreferenceattribute\lastreferenceattribute
@@ -1341,15 +1531,52 @@
%\egroup\unhbox\referencebox}
\endgroup}
+\unexpanded\def\startgoto[#label]%
+ {\dontleavehmode
+ \begingroup
+ \iflocation
+ \clf_doifelsereference{\referenceprefix}{#label}{\extrareferencearguments}%
+ {\expandafter\startgoto_yes}%
+ {\expandafter\startgoto_nop}%
+ \else
+ \expandafter\startgoto_nop
+ \fi}
+
+\unexpanded\def\startgoto_nop
+ {\let\stopgoto\stopgoto_nop}
+
+\unexpanded\def\stopgoto_nop
+ {\endgroup}
+
+\unexpanded\def\startgoto_yes
+ {\expandtexincurrentreference
+ \clf_injectcurrentreferencehtdp
+ \ht\strutbox
+ \dp\strutbox
+ \relax
+ \setlocationattributes
+ \setstrut % can be option
+ \global\lastsavedreferenceattribute\lastreferenceattribute
+ \attribute\referenceattribute\lastreferenceattribute
+ \dostarttagged\t!link\empty
+ \let\stopgoto\stopgoto_yes}
+
+\unexpanded\def\stopgoto_yes
+ {\dostoptagged
+ \endgroup}
+
\def\dogotohtdp#content#dummy[#label]% dummy gobbles spaces
{\dontleavehmode
\begingroup
\global\lastsavedreferenceattribute\attributeunsetvalue
\attribute\referenceattribute\attributeunsetvalue
\iflocation
- \ctxcommand{doifelsereference("\referenceprefix","#label",\extrareferencearguments)}%
+ \clf_doifelsereference{\referenceprefix}{#label}{\extrareferencearguments}%
{\expandtexincurrentreference
- \ctxcommand{injectcurrentreference(\number\dimexpr\interactionparameter\c!height\relax,\number\dimexpr\interactionparameter\c!depth\relax)}%
+ \clf_injectcurrentreferencehtdp
+ \dimexpr\interactionparameter\c!height\relax
+ \dimexpr\interactionparameter\c!depth \relax
+ \relax
\setlocationattributes
\global\lastsavedreferenceattribute\lastreferenceattribute
\attribute\referenceattribute\lastreferenceattribute
@@ -1368,7 +1595,11 @@
\global\lastsavedreferenceattribute\attributeunsetvalue
\attribute\referenceattribute\attributeunsetvalue
\iflocation
- \ctxcommand{injectreference("\referenceprefix","#label",nil,nil,\extrareferencearguments)}%
+ \clf_injectreference
+ {\referenceprefix}%
+ {#label}%
+ {\extrareferencearguments}%
+ \relax
\setlocationattributes
\global\lastsavedreferenceattribute\lastreferenceattribute
\dostarttagged\t!link\empty
@@ -1385,7 +1616,11 @@
\global\lastsavedreferenceattribute\attributeunsetvalue
\attribute\referenceattribute\attributeunsetvalue
\iflocation
- \ctxcommand{injectreference("\referenceprefix","#label",nil,nil,\extrareferencearguments)}%
+ \clf_injectreference
+ {\referenceprefix}%
+ {#label}%
+ {\extrareferencearguments}%
+ \relax
\setlocationcolorspec{#resolver}% no consequence for strut
\global\lastsavedreferenceattribute\lastreferenceattribute
\dostarttagged\t!link\empty
@@ -1402,7 +1637,11 @@
\global\lastsavedreferenceattribute\attributeunsetvalue
\attribute\referenceattribute\attributeunsetvalue
\iflocation
- \ctxcommand{injectreference("\referenceprefix","#label",nil,nil,\extrareferencearguments)}%
+ \clf_injectreference
+ {\referenceprefix}%
+ {#label}%
+ {\extrareferencearguments}%
+ \relax
\global\lastsavedreferenceattribute\lastreferenceattribute
\dostarttagged\t!link\empty
\hbox attr \referenceattribute \lastreferenceattribute {#content}%
@@ -1418,9 +1657,9 @@
\global\lastsavedreferenceattribute\attributeunsetvalue
\attribute\referenceattribute\attributeunsetvalue
\iflocation
- \ctxcommand{doifelsereference("\referenceprefix","#label",\extrareferencearguments)}%
+ \clf_doifelsereference{\referenceprefix}{#label}{\extrareferencearguments}%
{\expandtexincurrentreference
- \ctxcommand{injectcurrentreference(nil,nil)}%
+ \clf_injectcurrentreference
\setlocationattributes
\global\lastsavedreferenceattribute\lastreferenceattribute
\dostarttagged\t!link\empty
@@ -1440,8 +1679,8 @@
\ht\scratchbox#height%
\global\lastsavedreferenceattribute\attributeunsetvalue
\attribute\referenceattribute\attributeunsetvalue
- \ctxcommand{doifelsereference("\referenceprefix","#label",\extrareferencearguments)}%
- {\ctxcommand{injectcurrentreference(nil,nil)}%
+ \clf_doifelsereference{\referenceprefix}{#label}{\extrareferencearguments}%
+ {\clf_injectcurrentreference
\global\lastsavedreferenceattribute\lastreferenceattribute
\hbox attr \referenceattribute \lastreferenceattribute {\box\scratchbox}}
{\box\scratchbox}%
@@ -1503,13 +1742,16 @@
\let\useexternaldocument\usefile
\def\strc_references_use_url[#label][#url][#file][#description]%
- {\ctxcommand{useurl("#label",\!!bs\detokenize{#url}\!!es,\!!bs\detokenize{#file}\!!es,\!!bs\detokenize{#description}\!!es)}}
+ {\clf_useurl{#label}{\detokenize{#url}}{\detokenize{#file}}{\detokenize{#description}}}
\def\strc_references_use_file[#label][#file][#description]%
- {\ctxcommand{usefile("#label",\!!bs\detokenize{#file}\!!es,\!!bs\detokenize{#description}\!!es)}}
+ {\clf_usefile{#label}{\detokenize{#file}}{\detokenize{#description}}}
-\def\doifurldefinedelse #label{\ctxcommand{doifurldefinedelse ("#label")}}
-\def\doiffiledefinedelse#label{\ctxcommand{doiffiledefinedelse("#label")}}
+\def\doifelseurldefined #label{\clf_doifelseurldefined {#label}}
+\def\doifelsefiledefined#label{\clf_doifelsefiledefined{#label}}
+
+\let\doifurldefinedelse \doifelseurldefined
+\let\doiffiledefinedelse\doifelsefiledefined
%D \macros
%D {url,setupurl}
@@ -1540,7 +1782,7 @@
{\dontleavehmode
\begingroup
\useurlstyleandcolor\c!style\c!color
- \hyphenatedurl{\ctxcommand{geturl("#label")}}%
+ \hyphenatedurl{\clf_geturl{#label}}%
\endgroup}
%D This macro is hooked into a support macro, and thereby
@@ -1571,7 +1813,7 @@
\def\strc_references_do_special_from[#label]%
{\dontleavehmode
- \goto{\ctxcommand{from("#label")}}[fileorurl(#label)]}
+ \goto{\clf_from{#label}}[fileorurl(#label)]}
\def\dofromurldescription#content% called at the lua end
{#content}
@@ -1616,13 +1858,13 @@
{\dotripleargument\strc_references_define_program}
\def\strc_references_define_program[#name][#program][#description]%
- {\ctxcommand{defineprogram("#name",\!!bs#program\!!es,\!!bs#description\!!es)}}
+ {\clf_defineprogram{#name}{#program}{#description}}
\def\program[#name]% incompatible, more consistent, hardy used anyway
{\dontleavehmode
\begingroup
\useprogramsstyleandcolor\c!style\c!color
- \ctxcommand{getprogram("#name","\directprogramsparameter\c!alternative","\directprogramsparameter\c!space")}%
+ \clf_getprogram{#name}%
\endgroup}
%D As we can see, we directly use the special reference
@@ -1711,7 +1953,7 @@
%D
%D Only when \type {text} is not empty, a space is inserted.
-\def\dotextprefix#text%
+\unexpanded\def\dotextprefix#text%
{\begingroup
\setbox\scratchbox\hbox{#text}% to be solved some day
\ifdim\wd\scratchbox>\zeropoint
@@ -1847,14 +2089,24 @@
\installcorenamespace{referencingprefix}
\def\getreferencestructureprefix#kind#name#category% name will change
- {{
- prefix = "\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefix",
- separatorset = "\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixseparatorset",
- conversion = "\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixconversion",
- conversionset = "\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixconversionset",
- set = "\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixset",
- segments = "\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixsegments",
- connector = \!!bs\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixconnector\!!es,
+ {{%
+ prefix {\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefix}%
+ separatorset {\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixseparatorset}%
+ conversion {\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixconversion}%
+ conversionset {\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixconversionset}%
+ starter {\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixstarter}%
+ stopper {\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixstopper}%
+ set {\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixset}%
+ segments {\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixsegments}%
+ connector {\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixconnector}%
+ }%
+ {%
+ separatorset {\referencestructureprefixparameter{#kind}{#name}{#category}\c!numberseparatorset}%
+ conversion {\referencestructureprefixparameter{#kind}{#name}{#category}\c!numberconversion}%
+ conversionset {\referencestructureprefixparameter{#kind}{#name}{#category}\c!numberconversionset}%
+ starter {\referencestructureprefixparameter{#kind}{#name}{#category}\c!numberstarter}%
+ stopper {\referencestructureprefixparameter{#kind}{#name}{#category}\c!numberstopper}%
+ segments {\referencestructureprefixparameter{#kind}{#name}{#category}\c!numbersegments}%
}}
\unexpanded\def\setupreferencestructureprefix
@@ -1876,16 +2128,11 @@
\csname \??referencingprefix:#category#parameter\endcsname
\fi\fi\fi}
-% \def\currentreferencedefault
-% {\ctxcommand{filterdefaultreference()}}
-
-\def\currentreferencedefault
- {\ctxcommand{filterreference("\s!default",\ctxcommand{getcurrentprefixspec("\s!default")})}}
-
-% needs testing
-%
-% \def\currentreferencedefault
-% {\ctxcommand{currentreferencedefault()}}
+\def\currentreferencedefault % for some reason we need to explicitly expand
+ {\normalexpanded{\noexpand\clf_filterdefaultreference
+ {\s!default}%
+ \noexpand\getreferencestructureprefix\clf_getcurrentprefixspec{\s!default}% returns #kind#name#category
+ \relax}}
%D Not all support is visible by looking at the \TEX\ code; here is one of those:^
%D
@@ -1903,6 +2150,55 @@
%D \stopinteractionmenu
%D \stoptyping
+%D Relatively new:
+%D
+%D \starttyping
+%D \chapter{The never ending story}
+%D
+%D \section{An ending story}
+%D
+%D \in{chapter}[match(complex bibliographies)]
+%D \in{chapter}[match(never ending)]
+%D \in{chapter}[match(ending)]
+%D \in{chapter}[match(chapter:never ending)]
+%D \in{chapter}[match(chapter:ending)]
+%D \in{section}[match(section:ending)]
+%D \in{figure}[match(float:mess)]
+%D \in{figure}[match(figure:mess)]
+%D \in{figure (not found)}[match(section:mess)]
+%D \in{figure (not found)}[match(section:xxxx)]
+%D \in{figure}[match(mess)]
+%D
+%D \placefigure{What a mess}{}
+%D
+%D \chapter{About complex bibliographies}
+%D
+%D \in{chapter}[match(complex bibliographies)]
+%D \in{chapter}[match(never ending)]
+%D \in{figure}[match(mess)]
+%D \stoptyping
+
+%D Tracing:
+
+\unexpanded\def\strc_references_tracer#1#2% \csleft csright
+ {\hbox to \zeropoint \bgroup
+ \hss
+ \infofont
+ \darkblue
+ \ifx#1\empty\else
+ \raise\strutht \hbox \s!to \zeropoint \bgroup
+ \hss#1\hskip.2\emwidth
+ \egroup
+ \fi
+ \vrule \s!height 1.5\strutht \s!depth \strutdp \s!width .1\emwidth
+ \ifx#2\empty\else
+ \raise\strutht \hbox \s!to \zeropoint \bgroup
+ \hskip.2\emwidth#2\hss
+ \egroup
+ \fi
+ \hss
+ \egroup}%
+
\protect \endinput
% tricky:
diff --git a/tex/context/base/strc-reg.lua b/tex/context/base/strc-reg.lua
index b0d8a8a25..ed3292195 100644
--- a/tex/context/base/strc-reg.lua
+++ b/tex/context/base/strc-reg.lua
@@ -13,50 +13,203 @@ local utfchar = utf.char
local lpegmatch = lpeg.match
local allocate = utilities.storage.allocate
-local trace_registers = false trackers.register("structures.registers", function(v) trace_registers = v end)
+local trace_registers = false trackers.register("structures.registers", function(v) trace_registers = v end)
-local report_registers = logs.reporter("structure","registers")
+local report_registers = logs.reporter("structure","registers")
-local structures = structures
-local registers = structures.registers
-local helpers = structures.helpers
-local sections = structures.sections
-local documents = structures.documents
-local pages = structures.pages
-local references = structures.references
+local structures = structures
+local registers = structures.registers
+local helpers = structures.helpers
+local sections = structures.sections
+local documents = structures.documents
+local pages = structures.pages
+local references = structures.references
-local mappings = sorters.mappings
-local entries = sorters.entries
-local replacements = sorters.replacements
+local usedinternals = references.usedinternals
-local processors = typesetters.processors
-local splitprocessor = processors.split
+local mappings = sorters.mappings
+local entries = sorters.entries
+local replacements = sorters.replacements
-local texgetcount = tex.getcount
+local processors = typesetters.processors
+local splitprocessor = processors.split
-local variables = interfaces.variables
-local context = context
-local commands = commands
+local texgetcount = tex.getcount
-local matchingtilldepth = sections.matchingtilldepth
-local numberatdepth = sections.numberatdepth
+local variables = interfaces.variables
+local v_forward = variables.forward
+local v_all = variables.all
+local v_yes = variables.yes
+local v_current = variables.current
+local v_previous = variables.previous
+local v_text = variables.text
-local absmaxlevel = 5 -- \c_strc_registers_maxlevel
+local context = context
+local commands = commands
+
+local implement = interfaces.implement
+
+local matchingtilldepth = sections.matchingtilldepth
+local numberatdepth = sections.numberatdepth
+local currentlevel = sections.currentlevel
+local currentid = sections.currentid
+
+local touserdata = helpers.touserdata
+
+local internalreferences = references.internals
+local setinternalreference = references.setinternalreference
+
+local setmetatableindex = table.setmetatableindex
+local texsetattribute = tex.setattribute
+
+local a_destination = attributes.private('destination')
+
+local absmaxlevel = 5 -- \c_strc_registers_maxlevel
+
+local h_prefixpage = helpers.prefixpage
+local h_prefixlastpage = helpers.prefixlastpage
+local h_title = helpers.title
+
+local ctx_startregisteroutput = context.startregisteroutput
+local ctx_stopregisteroutput = context.stopregisteroutput
+local ctx_startregistersection = context.startregistersection
+local ctx_stopregistersection = context.stopregistersection
+local ctx_startregisterentries = context.startregisterentries
+local ctx_stopregisterentries = context.stopregisterentries
+local ctx_startregisterentry = context.startregisterentry
+local ctx_stopregisterentry = context.stopregisterentry
+local ctx_startregisterpages = context.startregisterpages
+local ctx_stopregisterpages = context.stopregisterpages
+local ctx_startregisterseewords = context.startregisterseewords
+local ctx_stopregisterseewords = context.stopregisterseewords
+local ctx_registerentry = context.registerentry
+local ctx_registerseeword = context.registerseeword
+local ctx_registerpagerange = context.registerpagerange
+local ctx_registeronepage = context.registeronepage
+
+-- possible export, but ugly code (overloads)
+--
+-- local output, section, entries, nofentries, pages, words, rawtext
+--
+-- h_title = function(a,b) rawtext = a end
+--
+-- local function ctx_startregisteroutput()
+-- output = { }
+-- section = nil
+-- entries = nil
+-- nofentries = nil
+-- pages = nil
+-- words = nil
+-- rawtext = nil
+-- end
+-- local function ctx_stopregisteroutput()
+-- inspect(output)
+-- output = nil
+-- section = nil
+-- entries = nil
+-- nofentries = nil
+-- pages = nil
+-- words = nil
+-- rawtext = nil
+-- end
+-- local function ctx_startregistersection(tag)
+-- section = { }
+-- output[#output+1] = {
+-- section = section,
+-- tag = tag,
+-- }
+-- end
+-- local function ctx_stopregistersection()
+-- end
+-- local function ctx_startregisterentries(n)
+-- entries = { }
+-- nofentries = 0
+-- section[#section+1] = entries
+-- end
+-- local function ctx_stopregisterentries()
+-- end
+-- local function ctx_startregisterentry(n) -- or subentries (nested?)
+-- nofentries = nofentries + 1
+-- entry = { }
+-- entries[nofentries] = entry
+-- end
+-- local function ctx_stopregisterentry()
+-- nofentries = nofentries - 1
+-- entry = entries[nofentries]
+-- end
+-- local function ctx_startregisterpages()
+-- pages = { }
+-- entry.pages = pages
+-- end
+-- local function ctx_stopregisterpages()
+-- end
+-- local function ctx_startregisterseewords()
+-- words = { }
+-- entry.words = words
+-- end
+-- local function ctx_stopregisterseewords()
+-- end
+-- local function ctx_registerentry(processor,internal,seeparent,text)
+-- text()
+-- entry.text = {
+-- processor = processor,
+-- internal = internal,
+-- seeparent = seeparent,
+-- text = rawtext,
+-- }
+-- end
+-- local function ctx_registerseeword(i,n,processor,internal,seeindex,seetext)
+-- seetext()
+-- entry.words[i] = {
+-- processor = processor,
+-- internal = internal,
+-- seeparent = seeparent,
+-- seetext = rawtext,
+-- }
+-- end
+-- local function ctx_registerpagerange(fprocessor,finternal,frealpage,lprocessor,linternal,lrealpage)
+-- pages[#pages+1] = {
+-- first = {
+-- processor = fprocessor,
+-- internal = finternal,
+-- realpage = frealpage,
+-- },
+-- last = {
+-- processor = lprocessor,
+-- internal = linternal,
+-- realpage = lrealpage,
+-- },
+-- }
+-- end
+-- local function ctx_registeronepage(processor,internal,realpage)
+-- pages[#pages+1] = {
+-- processor = processor,
+-- internal = internal,
+-- realpage = realpage,
+-- }
+-- end
-- some day we will share registers and lists (although there are some conceptual
-- differences in the application of keywords)
local function filtercollected(names,criterium,number,collected,prevmode)
- if not criterium or criterium == "" then criterium = variables.all end
- local data = documents.data
- local numbers, depth = data.numbers, data.depth
- local hash, result, nofresult, all, detail = { }, { }, 0, not names or names == "" or names == variables.all, nil
+ if not criterium or criterium == "" then
+ criterium = v_all
+ end
+ local data = documents.data
+ local numbers = data.numbers
+ local depth = data.depth
+ local hash = { }
+ local result = { }
+ local nofresult = 0
+ local all = not names or names == "" or names == v_all
+ local detail = nil
if not all then
for s in gmatch(names,"[^, ]+") do
hash[s] = true
end
end
- if criterium == variables.all or criterium == variables.text then
+ if criterium == v_all or criterium == v_text then
for i=1,#collected do
local v = collected[i]
if all then
@@ -70,10 +223,11 @@ local function filtercollected(names,criterium,number,collected,prevmode)
end
end
end
- elseif criterium == variables.current then
+ elseif criterium == v_current then
+ local collectedsections = sections.collected
for i=1,#collected do
local v = collected[i]
- local sectionnumber = sections.collected[v.references.section]
+ local sectionnumber = collectedsections[v.references.section]
if sectionnumber then
local cnumbers = sectionnumber.numbers
if prevmode then
@@ -108,10 +262,11 @@ local function filtercollected(names,criterium,number,collected,prevmode)
end
end
end
- elseif criterium == variables.previous then
+ elseif criterium == v_previous then
+ local collectedsections = sections.collected
for i=1,#collected do
local v = collected[i]
- local sectionnumber = sections.collected[v.references.section]
+ local sectionnumber = collectedsections[v.references.section]
if sectionnumber then
local cnumbers = sectionnumber.numbers
if (all or hash[v.metadata.name]) and #cnumbers >= depth then
@@ -141,12 +296,13 @@ local function filtercollected(names,criterium,number,collected,prevmode)
end
elseif criterium == variables["local"] then
if sections.autodepth(data.numbers) == 0 then
- return filtercollected(names,variables.all,number,collected,prevmode)
+ return filtercollected(names,v_all,number,collected,prevmode)
else
- return filtercollected(names,variables.current,number,collected,prevmode)
+ return filtercollected(names,v_current,number,collected,prevmode)
end
else -- sectionname, number
-- beware, this works ok for registers
+ -- to be redone with reference instead
local depth = sections.getlevel(criterium)
local number = tonumber(number) or numberatdepth(depth) or 0
if trace_registers then
@@ -193,81 +349,155 @@ registers.filtercollected = filtercollected
-- result table; we might do that here as well but since sorting code is
-- older we delay that decision
+-- maybe store the specification in the format (although we predefine only
+-- saved registers)
+
+local function checker(t,k)
+ local v = {
+ metadata = {
+ language = 'en',
+ sorted = false,
+ class = class,
+ },
+ entries = { },
+ }
+ t[k] = v
+ return v
+end
+
local function initializer()
tobesaved = registers.tobesaved
collected = registers.collected
- local internals = references.internals
+ setmetatableindex(tobesaved,checker)
+ setmetatableindex(collected,checker)
+ local usedinternals = references.usedinternals
for name, list in next, collected do
local entries = list.entries
- for e=1,#entries do
- local entry = entries[e]
- local r = entry.references
- if r then
- local internal = r and r.internal
- if internal then
- internals[internal] = entry
+ if not list.metadata.notsaved then
+ for e=1,#entries do
+ local entry = entries[e]
+ local r = entry.references
+ if r then
+ local internal = r and r.internal
+ if internal then
+ internalreferences[internal] = entry
+ usedinternals[internal] = r.used
+ end
end
end
end
end
end
-job.register('structures.registers.collected', tobesaved, initializer)
+local function finalizer()
+ local flaginternals = references.flaginternals
+ for k, v in next, tobesaved do
+ local entries = v.entries
+ if entries then
+ for i=1,#entries do
+ local r = entries[i].references
+ if r and flaginternals[r.internal] then
+ r.used = true
+ end
+ end
+ end
+ end
+end
-local function allocate(class)
+job.register('structures.registers.collected', tobesaved, initializer, finalizer)
+
+setmetatableindex(tobesaved,checker)
+setmetatableindex(collected,checker)
+
+local function defineregister(class,method)
local d = tobesaved[class]
- if not d then
- d = {
- metadata = {
- language = 'en',
- sorted = false,
- class = class
- },
- entries = { },
- }
- tobesaved[class] = d
+ if method == v_forward then
+ d.metadata.notsaved = true
end
- return d
end
-registers.define = allocate
+registers.define = defineregister -- 4 times is somewhat over the top but we want consistency
+registers.setmethod = defineregister -- and we might have a difference some day
+
+implement {
+ name = "defineregister",
+ actions = defineregister,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "setregistermethod",
+ actions = defineregister, -- duplicate use
+ arguments = { "string", "string" }
+}
local entrysplitter = lpeg.tsplitat('+') -- & obsolete in mkiv
local tagged = { }
+-- this whole splitting is an inheritance of mkii
+
local function preprocessentries(rawdata)
local entries = rawdata.entries
if entries then
---~ table.print(rawdata)
- local e, k = entries[1] or "", entries[2] or ""
- local et, kt, entryproc, pageproc
- if type(e) == "table" then
- et = e
- else
- entryproc, e = splitprocessor(e)
+ --
+ -- local e = entries[1] or ""
+ -- local k = entries[2] or ""
+ -- local et, kt, entryproc, pageproc
+ -- if type(e) == "table" then
+ -- et = e
+ -- else
+ -- entryproc, e = splitprocessor(e)
+ -- et = lpegmatch(entrysplitter,e)
+ -- end
+ -- if type(k) == "table" then
+ -- kt = k
+ -- else
+ -- pageproc, k = splitprocessor(k)
+ -- kt = lpegmatch(entrysplitter,k)
+ -- end
+ --
+ local processors = rawdata.processors
+ local et = entries.entries
+ local kt = entries.keys
+ local entryproc = processors and processors.entry
+ local pageproc = processors and processors.page
+ if entryproc == "" then
+ entryproc = nil
+ end
+ if pageproc == "" then
+ pageproc = nil
+ end
+ if not et then
+ local p, e = splitprocessor(entries.entry or "")
+ if p then
+ entryproc = p
+ end
et = lpegmatch(entrysplitter,e)
end
- if type(k) == "table" then
- kt = k
- else
- pageproc, k = splitprocessor(k)
+ if not kt then
+ local p, k = splitprocessor(entries.key or "")
+ if p then
+ pageproc = p
+ end
kt = lpegmatch(entrysplitter,k)
end
+ --
entries = { }
- for k=1,#et do
- entries[k] = { et[k] or "", kt[k] or "" }
- end
+ local ok = false
for k=#et,1,-1 do
- if entries[k][1] ~= "" then
- break
- else
+ local etk = et[k]
+ local ktk = kt[k]
+ if not ok and etk == "" then
entries[k] = nil
+ else
+ entries[k] = { etk or "", ktk ~= "" and ktk or nil }
+ ok = true
end
end
rawdata.list = entries
if pageproc or entryproc then
- rawdata.processors = { entryproc, pageproc }
+ rawdata.processors = { entryproc, pageproc } -- old way: indexed .. will be keys
end
rawdata.entries = nil
end
@@ -277,21 +507,74 @@ local function preprocessentries(rawdata)
end
end
-function registers.store(rawdata) -- metadata, references, entries
- local data = allocate(rawdata.metadata.name).entries
+local function storeregister(rawdata) -- metadata, references, entries
local references = rawdata.references
- references.realpage = references.realpage or 0 -- just to be sure as it can be refered to
+ local metadata = rawdata.metadata
+ -- checking
+ if not metadata then
+ metadata = { }
+ rawdata.metadata = metadata
+ end
+ --
+ if not metadata.kind then
+ metadata.kind = "entry"
+ end
+ --
+ if not metadata.catcodes then
+ metadata.catcodes = tex.catcodetable -- get
+ end
+ --
+ local name = metadata.name
+ local notsaved = tobesaved[name].metadata.notsaved
+ --
+ if not references then
+ references = { }
+ rawdata.references = references
+ end
+ --
+ local internal = references.internal
+ if not internal then
+ internal = texgetcount("locationcount") -- we assume that it has been set
+ references.internal = internal
+ end
+ --
+ if notsaved then
+ usedinternals[internal] = true -- todo view (we assume that forward references index entries are used)
+ end
+ --
+ if not references.realpage then
+ references.realpage = 0 -- just to be sure as it can be refered to
+ end
+ --
+ local userdata = rawdata.userdata
+ if userdata then
+ rawdata.userdata = touserdata(userdata)
+ end
+ --
+ references.section = currentid()
+ metadata.level = currentlevel()
+ --
+ local data = notsaved and collected[name] or tobesaved[name]
+ local entries = data.entries
+ internalreferences[internal] = rawdata
preprocessentries(rawdata)
- data[#data+1] = rawdata
+ entries[#entries+1] = rawdata
local label = references.label
- if label and label ~= "" then tagged[label] = #data end
- context(#data)
+ if label and label ~= "" then
+ tagged[label] = #entries
+ else
+ references.label = nil
+ end
+ return #entries
end
+registers.store = storeregister
+
function registers.enhance(name,n)
- local r = tobesaved[name].entries[n]
- if r then
- r.references.realpage = texgetcount("realpageno")
+ local data = tobesaved[name].metadata.notsaved and collected[name] or tobesaved[name]
+ local entry = data.entries[n]
+ if entry then
+ entry.references.realpage = texgetcount("realpageno")
end
end
@@ -300,21 +583,30 @@ function registers.extend(name,tag,rawdata) -- maybe do lastsection internally
tag = tagged[tag]
end
if tag then
- local r = tobesaved[name].entries[tag]
- if r then
- local rr = r.references
- rr.lastrealpage = texgetcount("realpageno")
- rr.lastsection = sections.currentid()
+ local data = tobesaved[name].metadata.notsaved and collected[name] or tobesaved[name]
+ local entry = data.entries[tag]
+ if entry then
+ local references = entry.references
+ references.lastrealpage = texgetcount("realpageno")
+ references.lastsection = currentid()
if rawdata then
+ local userdata = rawdata.userdata
+ if userdata then
+ rawdata.userdata = touserdata(userdata)
+ end
if rawdata.entries then
preprocessentries(rawdata)
end
- for k,v in next, rawdata do
- if not r[k] then
- r[k] = v
+ local metadata = rawdata.metadata
+ if metadata and not metadata.catcodes then
+ metadata.catcodes = tex.catcodetable -- get
+ end
+ for k, v in next, rawdata do
+ local rk = references[k]
+ if not rk then
+ references[k] = v
else
- local rk = r[k]
- for kk,vv in next, v do
+ for kk, vv in next, v do
if type(vv) == "table" then
if next(vv) then
rk[kk] = vv
@@ -330,6 +622,71 @@ function registers.extend(name,tag,rawdata) -- maybe do lastsection internally
end
end
+function registers.get(tag,n)
+ local list = tobesaved[tag]
+ return list and list.entries[n]
+end
+
+implement {
+ name = "enhanceregister",
+ actions = registers.enhance,
+ arguments = { "string", "integer" }
+}
+
+implement {
+ name = "extendregister",
+ actions = registers.extend,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "storeregister",
+ actions = function(rawdata)
+ local nofentries = storeregister(rawdata)
+ setinternalreference { internal = rawdata.references.internal }
+ context(nofentries)
+ end,
+ arguments = {
+ {
+ { "metadata", {
+ { "kind" },
+ { "name" },
+ { "coding" },
+ { "level", "integer" },
+ { "catcodes", "integer" },
+ { "own" },
+ { "xmlroot" },
+ { "xmlsetup" }
+ }
+ },
+ { "entries", {
+ { "entries", "list" },
+ { "keys", "list" },
+ { "entry" },
+ { "key" }
+ }
+ },
+ { "references", {
+ { "internal", "integer" },
+ { "section", "integer" },
+ { "label" }
+ }
+ },
+ { "seeword", {
+ { "text" }
+ }
+ },
+ { "processors", {
+ { "entry" },
+ { "key" },
+ { "page" }
+ }
+ },
+ { "userdata" },
+ }
+ }
+}
+
-- sorting and rendering
local compare = sorters.comparers.basic
@@ -339,7 +696,8 @@ function registers.compare(a,b)
if result ~= 0 then
return result
else
- local ka, kb = a.metadata.kind, b.metadata.kind
+ local ka = a.metadata.kind
+ local kb = b.metadata.kind
if ka == kb then
local page_a, page_b = a.references.realpage, b.references.realpage
if not page_a or not page_b then
@@ -364,7 +722,7 @@ end
local seeindex = 0
--- meerdere loops, seewords, dan words, an seewords
+-- meerdere loops, seewords, dan words, anders seewords
local function crosslinkseewords(result) -- all words
-- collect all seewords
@@ -453,17 +811,19 @@ end
function registers.prepare(data)
-- data has 'list' table
- local strip = sorters.strip
+ local strip = sorters.strip
local splitter = sorters.splitters.utf
- local result = data.result
+ local result = data.result
if result then
for i=1, #result do
- local entry, split = result[i], { }
- local list = entry.list
+ local entry = result[i]
+ local split = { }
+ local list = entry.list
if list then
for l=1,#list do
- local ll = list[l]
- local word, key = ll[1], ll[2]
+ local ll = list[l]
+ local word = ll[1]
+ local key = ll[2]
if not key or key == "" then
key = word
end
@@ -478,7 +838,11 @@ function registers.prepare(data)
end
function registers.sort(data,options)
- sorters.sort(data.result,registers.compare)
+ -- if options.pagenumber == false then
+ -- sorters.sort(data.result,compare)
+ -- else
+ sorters.sort(data.result,registers.compare)
+ -- end
end
function registers.unique(data,options)
@@ -487,7 +851,8 @@ function registers.unique(data,options)
for k=1,#dataresult do
local v = dataresult[k]
if prev then
- local pr, vr = prev.references, v.references
+ local vr = v.references
+ local pr = prev.references
if not equal(prev.list,v.list) then
-- ok
elseif pr.realpage ~= vr.realpage then
@@ -530,10 +895,11 @@ function registers.finalize(data,options) -- maps character to index (order)
if trace_registers then
report_registers("splitting at %a",tag)
end
- done, nofdone = { }, 0
+ done = { }
+ nofdone = 0
nofsplit = nofsplit + 1
+ lasttag = tag
split[nofsplit] = { tag = tag, data = done }
- lasttag = tag
end
nofdone = nofdone + 1
done[nofdone] = v
@@ -541,7 +907,7 @@ function registers.finalize(data,options) -- maps character to index (order)
data.result = split
end
-function registers.analyzed(class,options)
+local function analyzeregister(class,options)
local data = collected[class]
if data and data.entries then
options = options or { }
@@ -558,34 +924,55 @@ function registers.analyzed(class,options)
end
end
+registers.analyze = analyzeregister
+
+implement {
+ name = "analyzeregister",
+ actions = { analyzeregister, context },
+ arguments = {
+ "string",
+ {
+ { "language" },
+ { "method" },
+ { "numberorder" },
+ { "compress" },
+ { "criterium" },
+ { "pagenumber", "boolean" },
+ }
+ }
+}
+
-- todo take conversion from index
function registers.userdata(index,name)
local data = references.internals[tonumber(index)]
- data = data and data.userdata and data.userdata[name]
- if data then
- context(data)
- end
+ return data and data.userdata and data.userdata[name] or nil
end
+implement {
+ name = "registeruserdata",
+ actions = { registers.userdata, context },
+ arguments = { "integer", "string" }
+}
+
-- todo: ownnumber
local function pagerange(f_entry,t_entry,is_last,prefixspec,pagespec)
local fer, ter = f_entry.references, t_entry.references
- context.registerpagerange(
+ ctx_registerpagerange(
f_entry.processors and f_entry.processors[2] or "",
fer.internal or 0,
fer.realpage or 0,
function()
- helpers.prefixpage(f_entry,prefixspec,pagespec)
+ h_prefixpage(f_entry,prefixspec,pagespec)
end,
ter.internal or 0,
ter.lastrealpage or ter.realpage or 0,
function()
if is_last then
- helpers.prefixlastpage(t_entry,prefixspec,pagespec) -- swaps page and realpage keys
+ h_prefixlastpage(t_entry,prefixspec,pagespec) -- swaps page and realpage keys
else
- helpers.prefixpage (t_entry,prefixspec,pagespec)
+ h_prefixpage (t_entry,prefixspec,pagespec)
end
end
)
@@ -593,11 +980,11 @@ end
local function pagenumber(entry,prefixspec,pagespec)
local er = entry.references
- context.registeronepage(
+ ctx_registeronepage(
entry.processors and entry.processors[2] or "",
er.internal or 0,
er.realpage or 0,
- function() helpers.prefixpage(entry,prefixspec,pagespec) end
+ function() h_prefixpage(entry,prefixspec,pagespec) end
)
end
@@ -665,8 +1052,9 @@ local function collapsepages(pages)
end
function registers.flush(data,options,prefixspec,pagespec)
- local collapse_singles = options.compress == variables.yes
- local collapse_ranges = options.compress == variables.all
+ local collapse_singles = options.compress == v_yes
+ local collapse_ranges = options.compress == v_all
+ local show_page_number = options.pagenumber ~= false -- true or false
local result = data.result
local maxlevel = 0
--
@@ -684,18 +1072,19 @@ function registers.flush(data,options,prefixspec,pagespec)
report_registers("limiting level to %a",maxlevel)
end
--
- context.startregisteroutput()
-local done = { }
+ ctx_startregisteroutput()
+ local done = { }
+ local started = false
for i=1,#result do
-- ranges need checking !
local sublist = result[i]
-- local done = { false, false, false, false }
-for i=1,maxlevel do
- done[i] = false
-end
+ for i=1,maxlevel do
+ done[i] = false
+ end
local data = sublist.data
local d, n = 0, 0
- context.startregistersection(sublist.tag)
+ ctx_startregistersection(sublist.tag)
for d=1,#data do
local entry = data[d]
if entry.metadata.kind == "see" then
@@ -703,8 +1092,8 @@ end
if #list > 1 then
list[#list] = nil
else
- -- we have an \seeindex{Foo}{Bar} without Foo being defined anywhere
- report_registers("invalid see entry in register %a, reference %a",entry.metadata.name,list[1][1])
+ -- we have an \seeindex{Foo}{Bar} without Foo being defined anywhere .. somehow this message is wrong
+ -- report_registers("invalid see entry in register %a, reference %a",entry.metadata.name,list[1][1])
end
end
end
@@ -712,140 +1101,158 @@ end
-- but we don't want to allocate too many entries so there we go
while d < #data do
d = d + 1
- local entry = data[d]
- local e = { false, false, false }
-for i=3,maxlevel do
- e[i] = false
-end
+ local entry = data[d]
local metadata = entry.metadata
- local kind = metadata.kind
- local list = entry.list
+ local kind = metadata.kind
+ local list = entry.list
+ local e = { false, false, false }
+ for i=3,maxlevel do
+ e[i] = false
+ end
for i=1,maxlevel do
if list[i] then
e[i] = list[i][1]
end
- if e[i] ~= done[i] then
- if e[i] and e[i] ~= "" then
- done[i] = e[i]
-for j=i+1,maxlevel do
- done[j] = false
-end
- if n == i then
- context.stopregisterentries()
- context.startregisterentries(n)
- else
- while n > i do
- n = n - 1
- context.stopregisterentries()
- end
- while n < i do
- n = n + 1
- context.startregisterentries(n)
- end
+ if e[i] == done[i] then
+ -- skip
+ elseif not e[i] then
+ -- see ends up here
+ -- can't happen any more
+ done[i] = false
+ for j=i+1,maxlevel do
+ done[j] = false
+ end
+ elseif e[i] == "" then
+ done[i] = false
+ for j=i+1,maxlevel do
+ done[j] = false
+ end
+ else
+ done[i] = e[i]
+ for j=i+1,maxlevel do
+ done[j] = false
+ end
+ if started then
+ ctx_stopregisterentry()
+ started = false
+ end
+ if n == i then
+-- ctx_stopregisterentries()
+-- ctx_startregisterentries(n)
+ else
+ while n > i do
+ n = n - 1
+ ctx_stopregisterentries()
end
- local internal = entry.references.internal or 0
- local seeparent = entry.references.seeparent or ""
- local processor = entry.processors and entry.processors[1] or ""
- -- so, we need to keep e as is (local), or we need local title = e[i] ... which might be
- -- more of a problem
- if metadata then
- context.registerentry(processor,internal,seeparent,function() helpers.title(e[i],metadata) end)
- else -- ?
- context.registerentry(processor,internal,seeindex,e[i])
+ while n < i do
+ n = n + 1
+ ctx_startregisterentries(n)
end
+ end
+ local references = entry.references
+ local processors = entry.processors
+ local internal = references.internal or 0
+ local seeparent = references.seeparent or ""
+ local processor = processors and processors[1] or ""
+ -- so, we need to keep e as is (local), or we need local title = e[i] ... which might be
+ -- more of a problem
+ ctx_startregisterentry(0) -- will become a counter
+ started = true
+ if metadata then
+ ctx_registerentry(processor,internal,seeparent,function() h_title(e[i],metadata) end)
else
- done[i] = false
-for j=i+1,maxlevel do
- done[j] = false
-end
+ -- can this happen?
+ ctx_registerentry(processor,internal,seeindex,e[i])
end
end
end
if kind == 'entry' then
- context.startregisterpages()
- if collapse_singles or collapse_ranges then
- -- we collapse ranges and keep existing ranges as they are
- -- so we get prebuilt as well as built ranges
- local first, last, prev, pages, dd, nofpages = entry, nil, entry, { }, d, 0
- while dd < #data do
- dd = dd + 1
- local next = data[dd]
- if next and next.metadata.kind == "see" then
- dd = dd - 1
- break
- else
- local el, nl = entry.list, next.list
- if not equal(el,nl) then
+ if show_page_number then
+ ctx_startregisterpages()
+ if collapse_singles or collapse_ranges then
+ -- we collapse ranges and keep existing ranges as they are
+ -- so we get prebuilt as well as built ranges
+ local first, last, prev, pages, dd, nofpages = entry, nil, entry, { }, d, 0
+ while dd < #data do
+ dd = dd + 1
+ local next = data[dd]
+ if next and next.metadata.kind == "see" then
dd = dd - 1
- --~ first = nil
break
- elseif next.references.lastrealpage then
- nofpages = nofpages + 1
- pages[nofpages] = first and { first, last or first } or { entry, entry }
- nofpages = nofpages + 1
- pages[nofpages] = { next, next }
- first, last, prev = nil, nil, nil
- elseif not first then
- first, prev = next, next
- elseif next.references.realpage - prev.references.realpage == 1 then -- 1 ?
- last, prev = next, next
else
- nofpages = nofpages + 1
- pages[nofpages] = { first, last or first }
- first, last, prev = next, nil, next
+ local el, nl = entry.list, next.list
+ if not equal(el,nl) then
+ dd = dd - 1
+ --~ first = nil
+ break
+ elseif next.references.lastrealpage then
+ nofpages = nofpages + 1
+ pages[nofpages] = first and { first, last or first } or { entry, entry }
+ nofpages = nofpages + 1
+ pages[nofpages] = { next, next }
+ first, last, prev = nil, nil, nil
+ elseif not first then
+ first, prev = next, next
+ elseif next.references.realpage - prev.references.realpage == 1 then -- 1 ?
+ last, prev = next, next
+ else
+ nofpages = nofpages + 1
+ pages[nofpages] = { first, last or first }
+ first, last, prev = next, nil, next
+ end
end
end
- end
- if first then
- nofpages = nofpages + 1
- pages[nofpages] = { first, last or first }
- end
- if collapse_ranges and nofpages > 1 then
- nofpages = collapsepages(pages)
- end
- if nofpages > 0 then -- or 0
- d = dd
- for p=1,nofpages do
- local first, last = pages[p][1], pages[p][2]
- if first == last then
- if first.references.lastrealpage then
- pagerange(first,first,true,prefixspec,pagespec)
+ if first then
+ nofpages = nofpages + 1
+ pages[nofpages] = { first, last or first }
+ end
+ if collapse_ranges and nofpages > 1 then
+ nofpages = collapsepages(pages)
+ end
+ if nofpages > 0 then -- or 0
+ d = dd
+ for p=1,nofpages do
+ local first, last = pages[p][1], pages[p][2]
+ if first == last then
+ if first.references.lastrealpage then
+ pagerange(first,first,true,prefixspec,pagespec)
+ else
+ pagenumber(first,prefixspec,pagespec)
+ end
+ elseif last.references.lastrealpage then
+ pagerange(first,last,true,prefixspec,pagespec)
else
- pagenumber(first,prefixspec,pagespec)
+ pagerange(first,last,false,prefixspec,pagespec)
end
- elseif last.references.lastrealpage then
- pagerange(first,last,true,prefixspec,pagespec)
- else
- pagerange(first,last,false,prefixspec,pagespec)
end
- end
- elseif entry.references.lastrealpage then
- pagerange(entry,entry,true,prefixspec,pagespec)
- else
- pagenumber(entry,prefixspec,pagespec)
- end
- else
- while true do
- if entry.references.lastrealpage then
+ elseif entry.references.lastrealpage then
pagerange(entry,entry,true,prefixspec,pagespec)
else
pagenumber(entry,prefixspec,pagespec)
end
- if d == #data then
- break
- else
- d = d + 1
- local next = data[d]
- if next.metadata.kind == "see" or not equal(entry.list,next.list) then
- d = d - 1
+ else
+ while true do
+ if entry.references.lastrealpage then
+ pagerange(entry,entry,true,prefixspec,pagespec)
+ else
+ pagenumber(entry,prefixspec,pagespec)
+ end
+ if d == #data then
break
else
- entry = next
+ d = d + 1
+ local next = data[d]
+ if next.metadata.kind == "see" or not equal(entry.list,next.list) then
+ d = d - 1
+ break
+ else
+ entry = next
+ end
end
end
end
+ ctx_stopregisterpages()
end
- context.stopregisterpages()
elseif kind == 'see' then
local t, nt = { }, 0
while true do
@@ -864,38 +1271,77 @@ end
end
end
end
- context.startregisterseewords()
+ ctx_startregisterseewords()
for i=1,nt do
local entry = t[i]
local seeword = entry.seeword
local seetext = seeword.text or ""
local processor = seeword.processor or (entry.processors and entry.processors[1]) or ""
local seeindex = entry.references.seeindex or ""
- context.registerseeword(i,n,processor,0,seeindex,seetext)
+ -- ctx_registerseeword(i,nt,processor,0,seeindex,seetext)
+ ctx_registerseeword(i,nt,processor,0,seeindex,function() h_title(seetext,metadata) end)
end
- context.stopregisterseewords()
+ ctx_stopregisterseewords()
end
end
+ if started then
+ ctx_stopregisterentry()
+ started = false
+ end
while n > 0 do
- context.stopregisterentries()
+ ctx_stopregisterentries()
n = n - 1
end
- context.stopregistersection()
+ ctx_stopregistersection()
end
- context.stopregisteroutput()
+ ctx_stopregisteroutput()
-- for now, maybe at some point we will do a multipass or so
data.result = nil
data.metadata.sorted = false
-end
-
-
-function registers.analyze(class,options)
- context(registers.analyzed(class,options))
+ -- temp hack for luajittex :
+ local entries = data.entries
+ for i=1,#entries do
+ entries[i].split = nil
+ end
+ -- collectgarbage("collect")
end
function registers.process(class,...)
- if registers.analyzed(class,...) > 0 then
- registers.flush(collected[class],...)
+ if analyzeregister(class,...) > 0 then
+ local data = collected[class]
+ registers.flush(data,...)
end
end
+implement {
+ name = "processregister",
+ actions = registers.process,
+ arguments = {
+ "string",
+ {
+ { "language" },
+ { "method" },
+ { "numberorder" },
+ { "compress" },
+ { "criterium" },
+ { "pagenumber", "boolean" },
+ },
+ {
+ { "separatorset" },
+ { "conversionset" },
+ { "starter" },
+ { "stopper" },
+ { "set" },
+ { "segments" },
+ { "connector" },
+ },
+ {
+ { "prefix" },
+ { "separatorset" },
+ { "conversionset" },
+ { "starter" },
+ { "stopper" },
+ { "segments" },
+ }
+ }
+}
diff --git a/tex/context/base/strc-reg.mkiv b/tex/context/base/strc-reg.mkiv
index 2d28114c3..138a1486f 100644
--- a/tex/context/base/strc-reg.mkiv
+++ b/tex/context/base/strc-reg.mkiv
@@ -17,6 +17,8 @@
\unprotect
+\startcontextdefinitioncode
+
% todo: tag:: becomes rendering
% todo: language, character, linked, location
% todo: fonts etc at sublevels (already defined)
@@ -106,6 +108,14 @@
\c!entries=,
\c!alternative=]
+
+\definemixedcolumns
+ [\v!register]
+ [\c!n=\registerparameter\c!n,
+ \c!balance=\registerparameter\c!balance,
+ \c!align=\registerparameter\c!align,
+ \c!tolerance=\registerparameter\c!tolerance]
+
%D \starttyping
%D \setupregister[index][1][textcolor=darkred]
%D \setupregister[index][2][textcolor=darkgreen,textstyle=bold]
@@ -123,7 +133,8 @@
\appendtoks
\ifconditional\c_strc_registers_defining \else % todo: dosingle ...
\settrue\c_strc_registers_defining
- \ctxlua{structures.registers.define('\currentregister')}%
+ \definemixedcolumns[\currentregister][\v!register]% first as otherwise it overloads start/stop
+ \clf_defineregister{\currentregister}{\registerparameter\c!referencemethod}%
\normalexpanded{\presetheadtext[\currentregister=\Word{\currentregister}]}%
\setuevalue{\currentregister}{\dodoubleempty\strc_registers_insert_entry[\currentregister]}%
\setuevalue{\e!see\currentregister}{\dodoubleempty\strc_registers_insert_see[\currentregister]}%
@@ -143,6 +154,10 @@
\fi
\to \everydefineregister
+\appendtoks
+ \clf_setregistermethod{\currentregister}{\registerparameter\c!referencemethod}%
+\to \everysetupregister
+
%D Registering:
\def\strc_registers_register_page_entry
@@ -152,6 +167,52 @@
\expandafter\strc_registers_register_page_entry_indeed
\fi}
+\def\strc_registers_register_page_expand_xml_entries
+ {\xmlstartraw
+ \xdef\currentregisterentriesa{\registerparameter{\c!entries:1}}%
+ \xdef\currentregisterentriesb{\registerparameter{\c!entries:2}}%
+ \xdef\currentregisterentriesc{\registerparameter{\c!entries:3}}%
+ \xmlstopraw
+ \globallet\currentregistercoding\s!xml}
+
+\def\strc_registers_register_page_expand_yes_entries
+ {\xdef\currentregisterentriesa{\registerparameter{\c!entries:1}}%
+ \xdef\currentregisterentriesb{\registerparameter{\c!entries:2}}%
+ \xdef\currentregisterentriesc{\registerparameter{\c!entries:3}}%
+ \globallet\currentregistercoding\s!tex}
+
+\def\strc_registers_register_page_expand_nop_entries
+ {\xdef\currentregisterentriesa{\detokenizedregisterparameter{\c!entries:1}}%
+ \xdef\currentregisterentriesb{\detokenizedregisterparameter{\c!entries:2}}%
+ \xdef\currentregisterentriesc{\detokenizedregisterparameter{\c!entries:3}}%
+ \globallet\currentregistercoding\s!tex}
+
+\def\strc_registers_register_page_expand_xml
+ {\xmlstartraw
+ \xdef\currentregisterentries{\registerparameter\c!entries}%
+ \xmlstopraw
+ \globallet\currentregistercoding\s!xml}
+
+\def\strc_registers_register_page_expand_yes
+ {\xdef\currentregisterentries{\registerparameter\c!entries}%
+ \globallet\currentregistercoding\s!tex}
+
+\def\strc_registers_register_page_expand_nop
+ {\xdef\currentregisterentries{\detokenizedregisterparameter\c!entries}%
+ \globallet\currentregistercoding\s!tex}
+
+\def\strc_registers_register_page_expand_xml_keys
+ {\xmlstartraw
+ \xdef\currentregisterkeysa{\registerparameter{\c!keys:1}}%
+ \xdef\currentregisterkeysb{\registerparameter{\c!keys:2}}%
+ \xdef\currentregisterkeysc{\registerparameter{\c!keys:3}}%
+ \xmlstopraw}
+
+\def\strc_registers_register_page_expand_yes_keys
+ {\xdef\currentregisterkeysa{\registerparameter{\c!keys:1}}%
+ \xdef\currentregisterkeysb{\registerparameter{\c!keys:2}}%
+ \xdef\currentregisterkeysc{\registerparameter{\c!keys:3}}}
+
\def\strc_registers_register_page_entry_indeed#1#2#3% register data userdata
{\begingroup
\edef\currentregister{#1}%
@@ -165,106 +226,118 @@
\xdef\currentregisterxmlsetup {\registerparameter\c!xmlsetup}%
\ifx\currentregisterentries\empty
\ifx\currentregisterexpansion\s!xml
- \xmlstartraw
- \xdef\currentregisterentriesa{\registerparameter{\c!entries:1}}%
- \xdef\currentregisterentriesb{\registerparameter{\c!entries:2}}%
- \xdef\currentregisterentriesc{\registerparameter{\c!entries:3}}%
- \xmlstopraw
- \globallet\currentregistercoding\s!xml
+ \strc_registers_register_page_expand_xml_entries
+ \else\ifx\currentregisterexpansion\v!yes
+ \strc_registers_register_page_expand_yes_entries
\else
- \ifx\currentregisterexpansion\v!yes
- \xdef\currentregisterentriesa{\registerparameter{\c!entries:1}}%
- \xdef\currentregisterentriesb{\registerparameter{\c!entries:2}}%
- \xdef\currentregisterentriesc{\registerparameter{\c!entries:3}}%
- \else
- \xdef\currentregisterentriesa{\detokenizedregisterparameter{\c!entries:1}}%
- \xdef\currentregisterentriesb{\detokenizedregisterparameter{\c!entries:2}}%
- \xdef\currentregisterentriesc{\detokenizedregisterparameter{\c!entries:3}}%
- \fi
- \globallet\currentregistercoding\s!tex
- \fi
+ \strc_registers_register_page_expand_nop_entries
+ \fi\fi
\else
\ifx\currentregisterexpansion\s!xml
- \xmlstartraw
- \xdef\currentregisterentries{\registerparameter\c!entries}%
- \xmlstopraw
- \globallet\currentregistercoding\s!xml
+ \strc_registers_register_page_expand_xml
+ \else\ifx\currentregisterexpansion\v!yes
+ \strc_registers_register_page_expand_yes
\else
- \ifx\currentregisterexpansion\v!yes
- \xdef\currentregisterentries{\registerparameter\c!entries}%
- \else
- \xdef\currentregisterentries{\detokenizedregisterparameter\c!entries}%
- \fi
- \globallet\currentregistercoding\s!tex
- \fi
+ \strc_registers_register_page_expand_nop
+ \fi\fi
\fi
\ifx\currentregisterkeys\empty
\ifx\currentregistercoding\s!xml
- \xmlstartraw
- \xdef\currentregisterkeysa{\registerparameter{\c!keys:1}}%
- \xdef\currentregisterkeysb{\registerparameter{\c!keys:2}}%
- \xdef\currentregisterkeysc{\registerparameter{\c!keys:3}}%
- \xmlstopraw
+ \strc_registers_register_page_expand_xml_keys
\else
- \xdef\currentregisterkeysa{\registerparameter{\c!keys:1}}%
- \xdef\currentregisterkeysb{\registerparameter{\c!keys:2}}%
- \xdef\currentregisterkeysc{\registerparameter{\c!keys:3}}%
+ \strc_registers_register_page_expand_yes_keys
\fi
\fi
\setnextinternalreference
% we could consider storing register entries in a list which we
% could then sort
- \xdef\currentregisternumber{\ctxlua{
- structures.registers.store { % 'own' should not be in metadata
- metadata = {
- kind = "entry",
- name = "\currentregister",
- level = structures.sections.currentlevel(),
- coding = "\currentregistercoding",
- catcodes = \the\catcodetable,
+ \xdef\currentregisternumber{\clf_storeregister % 'own' should not be in metadata
+ metadata {%
+ name {\currentregister}%
+ coding {\currentregistercoding}%
\ifx\currentregisterownnumber\v!yes
- own = "\registerparameter\c!alternative", % can be used instead of pagenumber
+ own {\registerparameter\c!alternative}% can be used instead of pagenumber
+ \fi
+ \ifx\currentreferencecoding\s!xml
+ xmlroot {\xmldocument} % only useful when text
\fi
- xmlroot = \ifx\currentreferencecoding\s!xml "\xmldocument" \else nil \fi, % only useful when text
\ifx\currentregisterxmlsetup\empty \else
- xmlsetup = "\currentregisterxmlsetup",
+ xmlsetup {\currentregisterxmlsetup}%
\fi
- },
- references = {
- internal = \nextinternalreference,
- section = structures.sections.currentid(), % hm, why then not also lastsection the same way
- label = "\currentregisterlabel",
- },
- % \ifx\currentregisterentries\empty \else
- entries = {
+ }%
+ references {%
+ \ifx\currentregisterlabel\empty \else
+ label {\currentregisterlabel}%
+ \fi
+ }%
+ entries {%
% we need a special one for xml, this is just a single one
\ifx\currentregisterentries\empty
- { \!!bs\currentregisterentriesa\!!es, \!!bs\currentregisterentriesb\!!es, \!!bs\currentregisterentriesc\!!es },
+ entries {
+ {\currentregisterentriesa}%
+ {\currentregisterentriesb}%
+ {\currentregisterentriesc}%
+ }
\else
- \!!bs\currentregisterentries\!!es,
+ entry {\currentregisterentries}%
\fi
\ifx\currentregisterkeys\empty
- { \!!bs\currentregisterkeysa\!!es, \!!bs\currentregisterkeysb\!!es, \!!bs\currentregisterkeysc\!!es },
+ keys {
+ {\currentregisterkeysa}%
+ {\currentregisterkeysb}%
+ {\currentregisterkeysc}%
+ }
\else
- \!!bs\currentregisterkeys\!!es,
+ key {\currentregisterkeys}%
\fi
- },
- % \fi
- userdata = structures.helpers.touserdata(\!!bs\detokenize{#3}\!!es)
- }
- }}%
- \ctxlua{structures.references.setinternalreference(nil,nil,\nextinternalreference)}%
+ }%
+ userdata {\detokenize\expandafter{\normalexpanded{#3}}}
+ }%
+ % \clf_setinternalreference internal \nextinternalreference\relax % in previous
\ifx\currentregisterownnumber\v!yes
\glet\currentregistersynchronize\relax
\else
- \xdef\currentregistersynchronize{\ctxlatelua{structures.registers.enhance("\currentregister",\currentregisternumber)}}%
+ \xdef\currentregistersynchronize{\ctxlatecommand{enhanceregister("\currentregister",\currentregisternumber)}}%
\fi
\currentregistersynchronize % here?
% needs thinking ... bla\index{bla}. will break before the . but adding a
% penalty is also no solution
+ \dostarttagged\t!registerlocation\currentregister
\attribute\destinationattribute\lastdestinationattribute \signalcharacter % no \strut as it will be removed during cleanup
+ \dotagregisterlocation
+ \dostoptagged
+ \endgroup}
+
+\unexpanded\def\dosetfastregisterentry#1#2#3#4#5% register entry key processor processor
+ {\begingroup
+ \edef\currentregister{#1}%
+ \setnextinternalreference
+ \xdef\currentregisternumber{\clf_storeregister
+ {%
+ metadata {%
+ name {\currentregister}%
+ }
+ entries {%
+ entry {#2}%
+ key {#3}%
+ }%
+ processors {%
+ entry {#4}%
+ page {#5}%
+ }%
+ }%
+ }%
+ % overlap with the above
+ \xdef\currentregistersynchronize{\ctxlatecommand{enhanceregister("\currentregister",\currentregisternumber)}}%
+ \currentregistersynchronize % here?
+ \dostarttagged\t!registerlocation\currentregister
+ \attribute\destinationattribute\lastdestinationattribute \signalcharacter % no \strut as it will be removed during cleanup
+ \dotagregisterlocation
+ \dostoptagged
\endgroup}
+\let\dotagregisterlocation\relax % experiment
+
\unexpanded\def\strc_registers_insert_entry[#1][#2]%
{\def\currentregister{#1}%
\doifelse{\registerparameter\c!ownnumber}\v!yes
@@ -296,7 +369,7 @@
\fi}
\def\strc_registers_stop_entry[#1][#2]%
- {\normalexpanded{\ctxlatelua{structures.registers.extend("#1","#2")}}}
+ {\normalexpanded{\ctxlatecommand{extendregister("#1","#2")}}}
\def\setregisterentry {\dotripleempty\strc_registers_set_entry}
\def\finishregisterentry{\dotripleempty\strc_registers_finish_entry}
@@ -309,7 +382,7 @@
\def\strc_registers_finish_entry_indeed#1#2#3% register data userdata
{\begingroup
\edef\currentregister{#1}%
- \setupcurrentregister[\c!entries=,\c!label=,\c!keys=,\c!alternative=,#2]%
+ \setupcurrentregister[\c!entries=,\c!label=,\c!keys=,\c!alternative=,#2]% todo: fast setter
\edef\currentregisterlabel {\registerparameter\c!label}%
\edef\currentregisterexpansion{\registerparameter\c!expansion}%
\edef\currentregisterownnumber{\registerparameter\c!ownnumber}%
@@ -329,19 +402,19 @@
\fi
% I hate this kind of mess ... but it's a user request.
\ifx\currentregisterentries\empty
- \normalexpanded{\ctxlua{structures.registers.extend("\currentregister","\currentregisterlabel", {
+ \normalexpanded{\ctxcommand{extendregister("\currentregister","\currentregisterlabel", {
metadata = {
\ifx\currentregisterownnumber\v!yes
own = "\registerparameter\c!alternative", % can be used instead of pagenumber
\fi
},
- userdata = structures.helpers.touserdata(\!!bs\detokenize{#3}\!!es)
+ userdata = \!!bs\detokenize{#3}\!!es
})%
}}%
\else
- \normalexpanded{\ctxlua{structures.registers.extend("\currentregister","\currentregisterlabel", {
+ \normalexpanded{\ctxcommand{extendregister("\currentregister","\currentregisterlabel", {
metadata = {
- catcodes = \the\catcodetable,
+ % catcodes = \the\catcodetable,
coding = "\currentregistercoding",
\ifx\currentregisterownnumber\v!yes
own = "\registerparameter\c!alternative", % can be used instead of pagenumber
@@ -352,7 +425,7 @@
\!!bs\currentregisterentries\!!es,
\!!bs\currentregisterkeys\!!es
},
- userdata = structures.helpers.touserdata(\!!bs\detokenize{#3}\!!es)
+ userdata = \!!bs\detokenize{#3}\!!es
})
}}%
\fi
@@ -374,7 +447,7 @@
% \placeregister[index][n=1]
% \stoptext
-% some overlap wit previous
+% some overlap with previous
\unexpanded\def\setstructurepageregister
{\dotripleempty\strc_registers_set}
@@ -421,27 +494,28 @@
\fi
\setnextinternalreference
% we could consider storing register entries in list
- \edef\temp{\ctxlua{ structures.registers.store {
- metadata = {
- kind = "see",
- name = "\currentregister",
- level = structures.sections.currentlevel(),
- catcodes = \the\catcodetable,
- },
- references = {
- internal = \nextinternalreference,
- section = structures.sections.currentid(),
- },
- entries = {
- % we need a special one for xml, this is just a single one
- \!!bs\currentregisterentries\!!es,
- \!!bs#2\!!es
- },
- seeword = {
- text = \!!bs\currentregisterseeword\!!es
- },
- }
+ \edef\temp{\clf_storeregister{
+ metadata {%
+ kind {see}%
+ name {\currentregister}%
+ }%
+ references {%
+ % internal = \nextinternalreference,
+ % section = structures.sections.currentid(),
+ }%
+ entries {%
+ % we need a special one for xml, this is just a single one
+ entry {\currentregisterentries}%
+ key {#2}%
+ }%
+ seeword {%
+ text {\currentregisterseeword}%
+ }%
}}%
+ \dostarttagged\t!registerlocation\currentregister
+ \attribute\destinationattribute\lastdestinationattribute \signalcharacter % no \strut as it will be removed during cleanup
+ \dotagregisterlocation
+ \dostoptagged
\endgroup}
%D Rendering:
@@ -457,13 +531,17 @@
{\begingroup
\edef\currentregister{#1}%
\setupregister[\currentregister][#2]%
- \normalexpanded{\endgroup\noexpand\xdef\noexpand\utilityregisterlength{\ctxlua{structures.registers.analyze('\currentregister',{
- language = "\registerparameter\s!language",
- method = "\registerparameter\c!method",
- numberorder = "\registerparameter\c!numberorder",
- compress = "\registerparameter\c!compress",
- criterium = "\registerparameter\c!criterium",
- })}}}% brrr
+ \normalexpanded{\endgroup\noexpand\xdef\noexpand\utilityregisterlength{\clf_analyzeregister
+ {\currentregister}%
+ {%
+ language {\registerparameter\s!language}%
+ method {\registerparameter\c!method}%
+ numberorder {\registerparameter\c!numberorder}%
+ compress {\registerparameter\c!compress}%
+ criterium {\registerparameter\c!criterium}%
+ pagenumber \ifx\registerpageseparatorsymbol\empty false\else true\fi
+ }%
+ }}%
\ifcase\utilityregisterlength\relax
\resetsystemmode\v!register
\else
@@ -486,59 +564,43 @@
\edef\currentregister{#1}%
\setupregister[\currentregister][#2]%
\the\everyplaceregister
- \ifnum\registerparameter\c!n>\plusone
- \startcolumns
- [\c!n=\registerparameter\c!n,
- \c!balance=\registerparameter\c!balance,
- \c!align=\registerparameter\c!align,
- \c!tolerance=\registerparameter\c!tolerance]%
- \strc_registers_place_indeed
- \stopcolumns
+ \ifnum\namedmixedcolumnsparameter\currentregister\c!n>\plusone
+ \startmixedcolumns[\currentregister]
+ \strc_registers_place_indeed
+ \stopmixedcolumns
\else
\strc_registers_place_indeed
\fi
\endgroup
\fi}
-\def\strc_registers_place_columns
- {\startcolumns
- [\c!n=\registerparameter\c!n,
- \c!balance=\registerparameter\c!balance,
- \c!align=\registerparameter\c!align,
- \c!tolerance=\registerparameter\c!tolerance]%
- \startpacked[\v!blank]%
- \strc_registers_place_indeed
- \stoppacked
- \stopcolumns}
-
-\def\strc_registers_place_normal
- {\startpacked[\v!blank]%
- \strc_registers_place_indeed
- \stoppacked}
-
\def\strc_registers_place_indeed
- {\ctxlua{structures.registers.process('\currentregister',{
- language = "\registerparameter\s!language",
- method = "\registerparameter\c!method",
- numberorder = "\registerparameter\c!numberorder",
- compress = "\registerparameter\c!compress",
- criterium = "\registerparameter\c!criterium",
- },{
- separatorset = "\registerparameter\c!pageprefixseparatorset",
- conversionset = "\registerparameter\c!pageprefixconversionset",
- starter = \!!bs\registerparameter\c!pageprefixstarter\!!es,
- stopper = \!!bs\registerparameter\c!pageprefixstopper\!!es,
- set = "\registerparameter\c!pageprefixset",
- segments = "\registerparameter\c!pageprefixsegments",
- connector = \!!bs\registerparameter\c!pageprefixconnector\!!es,
- },{
- prefix = "\registerparameter\c!pageprefix",
- separatorset = "\registerparameter\c!pageseparatorset",
- conversionset = "\registerparameter\c!pageconversionset",
- starter = \!!bs\registerparameter\c!pagestarter\!!es,
- stopper = \!!bs\registerparameter\c!pagestopper\!!es,
- segments = "\registerparameter\c!pagesegments",
- })}}
+ {\clf_processregister
+ {\currentregister}%
+ {%
+ language {\registerparameter\s!language}%
+ method {\registerparameter\c!method}%
+ numberorder {\registerparameter\c!numberorder}%
+ compress {\registerparameter\c!compress}%
+ criterium {\registerparameter\c!criterium}%
+ pagenumber \ifx\registerpageseparatorsymbol\empty false\else true\fi
+ }{%
+ separatorset {\registerparameter\c!pageprefixseparatorset}%
+ conversionset {\registerparameter\c!pageprefixconversionset}%
+ starter {\registerparameter\c!pageprefixstarter}%
+ stopper {\registerparameter\c!pageprefixstopper}%
+ set {\registerparameter\c!pageprefixset}%
+ segments {\registerparameter\c!pageprefixsegments}%
+ connector {\registerparameter\c!pageprefixconnector}%
+ }{%
+ prefix {\registerparameter\c!pageprefix}%
+ separatorset {\registerparameter\c!pageseparatorset}%
+ conversionset {\registerparameter\c!pageconversionset}%
+ starter {\registerparameter\c!pagestarter}%
+ stopper {\registerparameter\c!pagestopper}%
+ segments {\registerparameter\c!pagesegments}%
+ }%
+ \relax}
\def\strc_registers_limited_entry#1%
{\limitatetext{#1}\currentregistermaxwidth\unknown}%
@@ -613,17 +675,19 @@
% a = <before> <goodbreak> <character> <par> <after> <nobreak>
-\setvalue{\??registerindicator a}#1%
+\def\strc_registers_indicator_a#1#2%
{\registerparameter\c!before
% bugged, why does leftskip gets set: \vskip\lineheight\goodbreak\vskip-\lineheight
+ \typo_injectors_check_register
\begingroup
\useregisterstyleandcolor\c!style\c!color
\dontleavehmode
+ \typo_injectors_mark_register
\strut
\iflocation
\dosetdirectpagereference{\currentregister:\v!section:#1}%
\fi
- \registerparameter\c!command{#1}%
+ \registerparameter\c!command{#2}%
\endgroup
\blank[\v!samepage]%
\registerparameter\c!after
@@ -632,38 +696,55 @@
% b = <goodbreak> <before> <character> <after> <nobreak>
-\setvalue{\??registerindicator b}#1% will be shared with a
+\def\strc_registers_indicator_b#1#2%
{\registerparameter\c!before
+ \typo_injectors_check_register
\begingroup
\useregisterstyleandcolor\c!style\c!color
\dontleavehmode
+ \typo_injectors_mark_register
\strut
\iflocation
\dosetdirectpagereference{\currentregister:\v!section:#1}%
\fi
- \registerparameter\c!command{#1}%
+ \registerparameter\c!command{#2}%
\endgroup
\registerparameter\c!after
\nobreak}
-\setvalue{\??registerindicator A}#1{\getvalue{\??registerindicator a}{\WORD{#1}}}
-\setvalue{\??registerindicator B}#1{\getvalue{\??registerindicator b}{\WORD{#1}}}
+\setvalue{\??registerindicator a}#1{\strc_registers_indicator_a{#1}{#1}}
+\setvalue{\??registerindicator A}#1{\strc_registers_indicator_a{#1}{\WORD{#1}}}
+\setvalue{\??registerindicator b}#1{\strc_registers_indicator_b{#1}{#1}}
+\setvalue{\??registerindicator B}#1{\strc_registers_indicator_b{#1}{\WORD{#1}}}
%D The following macros are the interface to the rendering. These are
%D generated by \LUA. This might change.
+% \showinjector
+% \setinjector[register][2][\column]
+%
+% \starttext
+% first \index{first}
+% second \index{second}
+% third \index{third}
+% fourth \index{fourth}
+% \placeregister[index]
+% \stoptext
+
+\doinstallinjector\s!register
+
%D Beware, we get funny side effects when a dangling \index precedes an
%D placeindex as then flushing takes place inside the index. Took me hours
%D to notice that.
-\newconditional\c_strc_registers_page_done
+\newconstant \c_strc_registers_page_state % 0=nothing 1=page 2=see
\newdimen \d_strc_registers_distance
\unexpanded\def\startregisteroutput
{\endgraf
\begingroup
\d_strc_registers_distance\registerparameter\c!distance\relax
- \dostarttagged\t!register\currentregister
+ \dostarttaggedchained\t!register\currentregister\??register
\forgeteverypar
\forgetparindent
\forgetparskip}
@@ -673,17 +754,8 @@
\dostoptagged
\endgroup}
-% \unexpanded\def\startregisterentries#1% depth
-% {\endgraf
-% \begingroup
-% \dostarttagged\t!registerentries\empty
-% \let\savedcurrentregister\currentregister
-% \edef\currentregister{\currentregister:#1}%
-% \useregisterstyleandcolor\c!textstyle\c!textcolor
-% \advance\leftskip\numexpr#1-\plusone\relax\dimexpr\d_strc_registers_distance\relax
-% \hangindent\registerparameter\c!distance\relax
-% \hangafter\plusone
-% \let\currentregister\savedcurrentregister}
+\newdimen\d_strc_registers_hangindent
+\newcount\c_strc_registers_hangafter
\unexpanded\def\startregisterentries#1% depth
{\endgraf
@@ -696,8 +768,9 @@
\ifnum\scratchcounter>\plusone
\advance\leftskip\d_strc_registers_distance\relax
\fi
- \hangindent\registerparameter\c!distance\relax
- \hangafter\plusone
+ \d_strc_registers_hangindent\registerparameter\c!distance\relax
+ \c_strc_registers_hangafter \plusone
+ \blank[\v!samepage]%
\let\currentregister\savedcurrentregister}
\unexpanded\def\stopregisterentries
@@ -705,6 +778,21 @@
\dostoptagged
\endgroup}
+\unexpanded\def\startregisterentry#1% todo: level
+ {\typo_injectors_check_register
+ \begingroup
+ \dostarttagged\t!registerentry\empty
+ \global\setconstant\c_strc_registers_page_state\zerocount
+ \hangindent\d_strc_registers_hangindent
+ \hangafter \c_strc_registers_hangafter
+ \typo_injectors_mark_register}
+
+\unexpanded\def\stopregisterentry
+ {\endgraf
+ \global\setconstant\c_strc_registers_page_state\zerocount
+ \dostoptagged
+ \endgroup}
+
\unexpanded\def\startregistersection#1% title
{\dostarttagged\t!registersection\empty
\dostarttagged\t!registertag\empty
@@ -718,7 +806,6 @@
\unexpanded\def\startregisterpages
{\begingroup
\dostarttagged\t!registerpages\empty
- \setfalse\c_strc_registers_page_done
\useregisterstyleandcolor\c!pagestyle\c!pagecolor}
\unexpanded\def\stopregisterpages
@@ -726,9 +813,8 @@
\endgroup}
\unexpanded\def\startregisterseewords
- {\ifhmode\crlf\fi
+ {%\par % \ifhmode\crlf\fi % otherwise wrong level
\begingroup
- \setfalse\c_strc_registers_page_done
\dostarttagged\t!registerpage\empty
\useregisterstyleandcolor\c!pagestyle\c!pagecolor}
@@ -736,16 +822,21 @@
{\dostoptagged
\endgroup}
-\unexpanded\def\registerpageseparator% todo: , configurable
- {\ifconditional\c_strc_registers_page_done
- \registerpageseparatorsymbol
- \else
+\unexpanded\def\registerpageseparator % todo: , configurable
+ {\ifcase\c_strc_registers_page_state
\hskip\d_strc_registers_distance\relax
- \settrue\c_strc_registers_page_done
+ \or
+ \dostarttagged\t!registerseparator\empty
+ \registerpageseparatorsymbol % page
+ \dostoptagged
+ \or
+ \dostarttagged\t!registerseparator\empty
+ \registerpageseparatorsymbol % see
+ \dostoptagged
\fi}
\unexpanded\def\registeronepagerangeseparator
- {|\endash|}
+ {|\endash|} % todo use \prewordbreak
\def\withregisterpagecommand#1#2#3#4%
{\def\currentregisterpageindex{#2}%
@@ -757,12 +848,14 @@
\unexpanded\def\registeronepage#1#2#3#4% #1:processor content
{\registerpageseparator
+ \global\setconstant\c_strc_registers_page_state\plusone
\dostarttagged\t!registerpage\empty
\withregisterpagecommand{#1}{#2}{#3}{#4}%
\dostoptagged}
\unexpanded\def\registerpagerange#1#2#3#4#5#6#7% #1:processor content, content todo: -- configurable
{\registerpageseparator
+ \global\setconstant\c_strc_registers_page_state\plusone
\dostarttagged\t!registerpagerange\empty
\dostarttagged\t!registerfrompage\empty
\withregisterpagecommand{#1}{#2}{#3}{#4}%
@@ -773,12 +866,8 @@
\dostoptagged
\dostoptagged}
-\let\strc_register_injector_process\relax
-\let\strc_register_injector_show \relax
-
\unexpanded\def\defaultregisterentry#1#2#3#4% #1:processor #2:internal #3:seeindex #4:word
{\def\currentregisterpageindex{#2}%
- \strc_register_injector_process
\iflocation
\def\currentregisterseeindex{#3}%
\doifelse{\registerparameter\c!interaction}\v!text
@@ -790,10 +879,9 @@
\fi}
\unexpanded\def\doapplyregisterentrycommand#1#2% processor text
- {\dostarttagged\t!registerentry\empty
+ {\dostarttagged\t!registercontent\empty
\ifx\currentregisterseeindex\empty \else
\dontleavehmode
- \strc_register_injector_show
\dosetdirectpagereference{seeindex:\currentregisterseeindex}% maybe some day we will support an area
\fi
\applyprocessor{#1}{\registerparameter\c!textcommand{\limitedregisterentry{\registerparameter\c!deeptextcommand{#2}}}}%
@@ -808,7 +896,8 @@
\fi}
\unexpanded\def\defaultregisterseeword#1#2#3#4#5#6% i n #3:processor #4:internal #5:seeindex #6:word
- {%\registerpageseparator
+ {\registerpageseparator
+ \global\setconstant\c_strc_registers_page_state\plustwo
\def\currentregisterpageindex{#4}%
\dostarttagged\t!registersee\empty
\settrue\c_strc_registers_page_done
@@ -846,7 +935,7 @@
% \placeregister[index][n=1,pagecommand=\MyRegisterPageCommand]
% \stoptext
-\def\registerpageuserdata #1#2{\ctxlua{structures.registers.userdata(#1,"#2")}}
+\def\registerpageuserdata #1#2{\clf_registeruserdata#1{#2}}
\def\currentregisterpageuserdata {\registerpageuserdata\currentregisterpageindex} % {#1}
% not yet ok : new internal handler names
@@ -857,10 +946,10 @@
\installcorenamespace{registersymbol}
\setvalue{\??registersymbol n}%
- {\def\registerpageseparatorsymbol{, }}
+ {\def\registerpageseparatorsymbol{,\space}}
\setvalue{\??registersymbol a}%
- {\def\registerpageseparatorsymbol{, }} % now done via conversion
+ {\def\registerpageseparatorsymbol{,\space}} % now done via conversion
\setvalue{\??registersymbol\v!none}%
{\let\registerpageseparatorsymbol\empty
@@ -877,7 +966,7 @@
\def\registeronepage {\registerpagebuttonsymbol\gobblefourarguments}%
\def\registerpagerange{\registerpagebuttonsymbol\gobblesevenarguments}}
-\def\setregisterpagerendering
+\unexpanded\def\setregisterpagerendering
{\doifelse{\registerparameter\c!pagenumber}\v!no
{\let \currentregisterpagesymbol\v!none}
{\edef\currentregisterpagesymbol{\registerparameter\c!symbol}}%
@@ -904,4 +993,6 @@
[\v!index]
% [\v!indices]
+\stopcontextdefinitioncode
+
\protect \endinput
diff --git a/tex/context/base/strc-ren.mkiv b/tex/context/base/strc-ren.mkiv
index fdf8fb7f4..34903dfa0 100644
--- a/tex/context/base/strc-ren.mkiv
+++ b/tex/context/base/strc-ren.mkiv
@@ -135,7 +135,7 @@
\unexpanded\def\strc_rendering_place_head_number_and_text
{\strc_rendering_start_placement
\setheadmarking
- \doiftextelse\getheadnumber
+ \doifelsetext\getheadnumber
\dosettructureheadnumbercontent
\doresettructureheadnumbercontent
\ifconditional\c_strc_sectioning_empty
@@ -422,7 +422,7 @@
{\dodoubleargument\strc_rendering_define_placement}
\def\strc_rendering_define_placement[#1][#2]%
- {\doifnextbgroupelse
+ {\doifelsenextbgroup
{\strc_rendering_define_placement_yes[#1][#2]}%
{\strc_rendering_define_placement_nop[#1][#2]}}
@@ -516,6 +516,11 @@
\fi
\endgroup}
+\def\fakedheadnumber{\vphantom{0}} % needed for mathplus
+
+\unexpanded\def\fakeheadnumbercontent
+ {\hbox to \zeropoint{\let\getheadnumber\fakedheadnumber\headnumbercontent}}
+
\unexpanded\def\strc_rendering_inject_number_and_text
{\edef\p_command{\headparameter\c!command}% assumes \unexpanded definition
\ifx\p_command\empty
@@ -608,6 +613,7 @@
\hsize\headtextwidth
\fi
\noindent
+ \fakeheadnumbercontent % will also be done in the other ones (force consistency with numbered)
\fi
\headtextcontent
}
@@ -629,7 +635,7 @@
\hbox {
\hfill
\headnumbercontent
- \doifrightpageelse{
+ \doifelserightpage{
\scratchdistance\leftmargindistance
} {
\scratchdistance\rightmargindistance
@@ -637,6 +643,8 @@
\hskip\dimexpr\d_strc_rendering_local_leftoffset+\scratchdistance\relax
}
}
+ \else
+ \fakeheadnumbercontent % will also be done in the other ones (force consistency with numbered)
\fi
\headtextcontent
}
@@ -650,6 +658,30 @@
% \directsetup{\??headrenderings:\v!vertical:\v!inmargin}
% \stopsetups
+%D This one is for head based numbering usage: foo 1.2 and so:
+
+\defineheadalternative
+ [\v!reverse]
+ [\c!alternative=\v!vertical,
+ \c!renderingsetup=\??headrenderings:\v!reverse]
+
+\startsetups[\??headrenderings:\v!reverse]
+ \vbox {
+ \headsetupspacing
+ \noindent
+ \begstrut
+ \setfalse\headisdisplay % so a kind of mix
+ \headtextcontent
+ \ifconditional\headshownumber
+ \kern\headnumberdistance
+ \headnumbercontent
+ \else
+ \fakeheadnumbercontent
+ \fi
+ \endstrut
+ }
+\stopsetups
+
\defineheadalternative
[\v!middle]
[\c!alternative=\v!vertical,
@@ -665,6 +697,8 @@
\strut
\headnumbercontent
\par
+ \else
+ \fakeheadnumbercontent % will also be done in the other ones (force consistency with numbered)
\fi
\begstrut
\headtextcontent
diff --git a/tex/context/base/strc-rsc.lua b/tex/context/base/strc-rsc.lua
index a90f577e3..627e443b2 100644
--- a/tex/context/base/strc-rsc.lua
+++ b/tex/context/base/strc-rsc.lua
@@ -12,7 +12,8 @@ if not modules then modules = { } end modules ['strc-rsc'] = {
-- The scanner accepts nested outer, but we don't care too much, maybe
-- some day we will have both but currently the innermost wins.
-local lpegmatch, lpegP, lpegS, lpegCs, lpegCt, lpegCf, lpegCc, lpegC, lpegCg = lpeg.match, lpeg.P, lpeg.S, lpeg.Cs, lpeg.Ct, lpeg.Cf, lpeg.Cc, lpeg.C, lpeg.Cg
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local lpegP, lpegS, lpegCs, lpegCt, lpegCf, lpegCc, lpegC, lpegCg = lpeg.P, lpeg.S, lpeg.Cs, lpeg.Ct, lpeg.Cf, lpeg.Cc, lpeg.C, lpeg.Cg
local find = string.find
local spaces = lpegP(" ")^0
@@ -34,18 +35,28 @@ local backslash = lpegP("\\")
local endofall = spaces * lpegP(-1)
-local o_token = 1 - rparent - rbrace - lparent - lbrace -- can be made more efficient
-local a_token = 1 - rbrace
+----- o_token = 1 - rparent - rbrace - lparent - lbrace -- can be made more efficient
+----- a_token = 1 - rbrace
local s_token = 1 - lparent - lbrace
local i_token = 1 - lparent - lbrace - endofall
local f_token = 1 - lparent - lbrace - dcolon
local c_token = 1 - lparent - lbrace - tcolon
+-- experimental
+
+local o_token = lpegpatterns.nestedparents
+ + (1 - rparent - lbrace)
+local a_token = lpegpatterns.nestedbraces
+ + (1 - rbrace)
+local q_token = lpegpatterns.unsingle
+ + lpegpatterns.undouble
+
local hastexcode = lpegCg(lpegCc("has_tex") * lpegCc(true)) -- cannot be made to work
local component = lpegCg(lpegCc("component") * lpegCs(c_token^1))
local outer = lpegCg(lpegCc("outer") * lpegCs(f_token^1))
-local operation = lpegCg(lpegCc("operation") * lpegCs(o_token^1))
-local arguments = lpegCg(lpegCc("arguments") * lpegCs(a_token^0))
+----- operation = lpegCg(lpegCc("operation") * lpegCs(o_token^1))
+local operation = lpegCg(lpegCc("operation") * lpegCs(q_token + o_token^1))
+local arguments = lpegCg(lpegCc("arguments") * lpegCs(q_token + a_token^0))
local special = lpegCg(lpegCc("special") * lpegCs(s_token^1))
local inner = lpegCg(lpegCc("inner") * lpegCs(i_token^1))
@@ -56,9 +67,16 @@ local inner = lpegCg(lpegCc("inner") * lpegCs(i_token^1))
inner = inner * arguments
special = special * lparent * (operation * arguments)^-1 * rparent
-local referencesplitter = spaces * lpegCf (lpegCt("") * (component + outer)^-1 * (special + inner)^-1 * endofall, rawset)
-local prefixsplitter = lpegCs(lpegP((1-scolon)^1 * scolon)) * #-scolon * lpegCs(lpegP(1)^1)
-local componentsplitter = lpegCs(lpegP((1-scolon)^1)) * scolon * #-scolon * lpegCs(lpegP(1)^1)
+local referencesplitter = spaces
+ * lpegCf (lpegCt("") * (component + outer)^-1 * (special + inner)^-1 * endofall, rawset)
+
+local prefixsplitter = lpegCs(lpegP((1-scolon)^1 * scolon))
+ * #-scolon
+ * lpegCs(lpegP(1)^1)
+
+local componentsplitter = lpegCs(lpegP((1-scolon)^1))
+ * scolon * #-scolon
+ * lpegCs(lpegP(1)^1)
prefixsplitter = componentsplitter
@@ -67,11 +85,11 @@ local function splitreference(str)
local t = lpegmatch(referencesplitter,str)
if t then
local a = t.arguments
- if a and find(a,"\\") then
+ if a and find(a,"\\",1,true) then
t.has_tex = true
else
local o = t.arguments
- if o and find(o,"\\") then
+ if o and find(o,"\\",1,true) then
t.has_tex = true
end
end
@@ -135,6 +153,8 @@ references.splitcomponent = splitcomponent
-- inspect(splitreference([[ outer :: inner { argument } ]]))
-- inspect(splitreference([[ special ( outer :: operation ) ]]))
+-- inspect(splitreference([[inner(foo,bar)]]))
+
-- inspect(splitreference([[]]))
-- inspect(splitreference([[inner]]))
-- inspect(splitreference([[special(operation{argument,argument})]]))
@@ -152,3 +172,14 @@ references.splitcomponent = splitcomponent
-- inspect(splitreference([[outer::special()]]))
-- inspect(splitreference([[outer::inner{argument}]]))
-- inspect(splitreference([[special(outer::operation)]]))
+
+-- inspect(splitreference([[special(operation)]]))
+-- inspect(splitreference([[special(operation(whatever))]]))
+-- inspect(splitreference([[special(operation{argument,argument{whatever}})]]))
+-- inspect(splitreference([[special(operation{argument{whatever}})]]))
+
+-- inspect(splitreference([[special("operation(")]]))
+-- inspect(splitreference([[special("operation(whatever")]]))
+-- inspect(splitreference([[special(operation{"argument,argument{whatever"})]]))
+-- inspect(splitreference([[special(operation{"argument{whatever"})]]))
+
diff --git a/tex/context/base/strc-sbe.mkiv b/tex/context/base/strc-sbe.mkiv
index fc48307ec..9f1d214cf 100644
--- a/tex/context/base/strc-sbe.mkiv
+++ b/tex/context/base/strc-sbe.mkiv
@@ -65,7 +65,7 @@
\resetallstructuremarks
\strc_sectionblock_get_environment\currentsectionblock
\sectionblockparameter\c!before % don't move
- \dostarttagged\t!division\currentsectionblock
+ \dostarttagged\t!division\currentsectionblock % no parents
\to \everybeforesectionblock
\appendtoks
@@ -81,7 +81,9 @@
\def\strc_sectionblock_set[#1][#2]% used to set the default
{\edef\currentsectionblock{#1}% from now on we assume a value
\setupcurrentsectionblock[#2]%
- \ctxcommand{setsectionblock("#1", { bookmark = "\sectionblockparameter\c!bookmark" })}}
+ \clf_setsectionblock{#1}
+ bookmark {\sectionblockparameter\c!bookmark}%
+ \relax}
\let\currentsectionblock\empty % was \s!unknown
@@ -93,7 +95,9 @@
\begingroup
\edef\currentsectionblock{#1}% from now on we assume a value
\setupcurrentsectionblock[#2]%
- \ctxcommand{pushsectionblock("#1", { bookmark = "\sectionblockparameter\c!bookmark" })}%
+ \clf_pushsectionblock{#1}
+ bookmark {\sectionblockparameter\c!bookmark}%
+ \relax
\csname #1true\endcsname % obsolete
\setsystemmode\currentsectionblock
\the\everybeforesectionblock\relax
@@ -102,7 +106,7 @@
\unexpanded\def\stopsectionblock
{\showmessage\m!structures2\currentsectionblock
\the\everyaftersectionblock\relax
- \ctxcommand{popsectionblock()}%
+ \clf_popsectionblock
\endgroup}
%D \starttyping
diff --git a/tex/context/base/strc-sec.mkiv b/tex/context/base/strc-sec.mkiv
index 2962e2c49..b5a1a5ba0 100644
--- a/tex/context/base/strc-sec.mkiv
+++ b/tex/context/base/strc-sec.mkiv
@@ -15,6 +15,8 @@
\unprotect
+\startcontextdefinitioncode
+
\installcorenamespace{structure}
\installdirectcommandhandler \??structure {structure} % unchecked, so we need to initialize used parameters
@@ -101,8 +103,11 @@
{\setfalse\c_strc_bookmarks_preroll}
\def\strc_sectioning_autobookmark#1%
- {\nodestostring\tempstring{#1}%
- \globallet\currentstructurebookmark\tempstring}
+ {\begingroup
+ \the\everypreroll
+ \nodestostring\tempstring{#1}%
+ \globallet\currentstructurebookmark\tempstring
+ \endgroup}
% so it's an experiment
@@ -130,9 +135,9 @@
\xdef\currentstructuremarking {\structureparameter\c!marking}%
\xdef\currentstructurelist {\structureparameter\c!list}%
\xmlstopraw
-\iflocation \ifx\currentstructurebookmark\empty \ifconditional\c_strc_bookmarks_preroll
- \strc_sectioning_autobookmark\currentstructuretitle
-\fi \fi \fi
+ \iflocation \ifx\currentstructurebookmark\empty \ifconditional\c_strc_bookmarks_preroll
+ \strc_sectioning_autobookmark\currentstructuretitle
+ \fi \fi \fi
\ifx\currentstructurelist\empty
\globallet\currentstructurelist\currentstructuretitle
\fi
@@ -143,23 +148,23 @@
\xdef\currentstructurebookmark{\structureparameter\c!bookmark}%
\xdef\currentstructuremarking {\structureparameter\c!marking}%
\xdef\currentstructurelist {\structureparameter\c!list}%
-\iflocation \ifx\currentstructurebookmark\empty \ifconditional\c_strc_bookmarks_preroll
- \strc_sectioning_autobookmark\currentstructuretitle
-\fi \fi \fi
+ \iflocation \ifx\currentstructurebookmark\empty \ifconditional\c_strc_bookmarks_preroll
+ \strc_sectioning_autobookmark\currentstructuretitle
+ \fi \fi \fi
\else
\xdef\currentstructuretitle {\detokenizedstructureparameter\c!title}%
\xdef\currentstructurebookmark{\detokenizedstructureparameter\c!bookmark}%
\xdef\currentstructuremarking {\detokenizedstructureparameter\c!marking}%
\xdef\currentstructurelist {\detokenizedstructureparameter\c!list}%
\iflocation \ifx\currentstructurebookmark\empty
-\ifconditional\c_strc_bookmarks_preroll
- \strc_sectioning_autobookmark{\structureparameter\c!title}%
-\else
- \begingroup
- \simplifycommands
- \xdef\currentstructurebookmark{\detokenize\expandafter{\normalexpanded{\structureparameter\c!title}}}%
- \endgroup
-\fi
+ \ifconditional\c_strc_bookmarks_preroll
+ \strc_sectioning_autobookmark{\structureparameter\c!title}%
+ \else
+ \begingroup
+ \simplifycommands
+ \xdef\currentstructurebookmark{\detokenize\expandafter{\normalexpanded{\structureparameter\c!title}}}%
+ \endgroup
+ \fi
\fi \fi
\fi
\ifx\currentstructurelist\empty
@@ -170,75 +175,72 @@
\setnextinternalreference
\storeinternalreference\currentstructurename\nextinternalreference %
\strc_sectioning_set_reference_prefix
- \xdef\currentstructurenumber{\ctxlua{ % todo: combine with next call, adapt marks accordingly
- structures.sections.somelevel {
- references = {
- internal = \nextinternalreference,
- block = "\currentsectionblock",
- reference = "\currentstructurereference",
- referenceprefix = "\currentstructurereferenceprefix",
- backreference = "\currentstructurebackreference",
- },
- directives = {
- resetset = "\structureparameter\c!sectionresetset",
- },
- metadata = {
- kind = "section",
- name = "\currentstructurename",
- catcodes = \the\ifx\currentstructurecatcodes\empty\catcodetable\else\csname\currentstructurecatcodes\endcsname\fi,
- coding = "\currentstructurecoding",
- \ifx\currentstructurecoding\s!xml
- xmlroot = "\xmldocument",
- \fi
- \ifx\currentstructurexmlsetup\empty \else
- xmlsetup = "\currentstructurexmlsetup",
- \fi
- \ifx\currentstructuresaveinlist\v!no
- nolist = true,
- \fi
- \ifx\currentstructureincrementnumber\v!yes
- increment = "\currentstructureincrementnumber",
- \fi
- },
- titledata = { % we can add mark and reference
- label = \!!bs\detokenize\expandafter{\currentstructurelabel }\!!es,
- title = \!!bs\detokenize\expandafter{\currentstructuretitle }\!!es,
- \ifx\currentstructurebookmark\currentstructuretitle \else
- bookmark = \!!bs\detokenize\expandafter{\currentstructurebookmark }\!!es,
- \fi
- \ifx\currentstructuremarking\currentstructuretitle \else
- marking = \!!bs\detokenize\expandafter{\currentstructuremarking }\!!es,
- \fi
- \ifx\currentstructuresaveinlist\v!no \else
- \ifx\currentstructurelist\currentstructuretitle \else
- list = \!!bs\detokenize\expandafter{\currentstructurelist}\!!es,
- \fi
- \fi
- },
- numberdata = {
- % needed ?
- block = "\currentsectionblock",
- hidenumber = \ifx\currentstructureshownumber\v!no true\else nil\fi, % titles
- % so far
- separatorset = "\structureparameter\c!sectionseparatorset",
- conversion = "\structureparameter\c!sectionconversion", % for good old times sake
- conversionset = "\structureparameter\c!sectionconversionset",
- starter = \!!bs\structureparameter\c!sectionstarter\!!es,
- stopper = \!!bs\structureparameter\c!sectionstopper\!!es,
- set = "\structureparameter\c!sectionset",
- segments = "\structureparameter\c!sectionsegments",
- ownnumber = "\currentstructureownnumber",
- },
- userdata = \!!bs\detokenize{#3}\!!es % will be converted to table at the lua end
+ \clf_setsectionentry
+ references {
+ internal \nextinternalreference\space
+ % block {\currentsectionblock}
+ prefix {\currentstructurereferenceprefix}
+ reference {\currentstructurereference}
+ backreference {\currentstructurebackreference}
+ }
+ directives {
+ resetset {\structureparameter\c!sectionresetset}
+ }
+ metadata {
+ kind {section}
+ name {\currentstructurename}
+ catcodes \ifx\currentstructurecatcodes\empty\catcodetable\else\csname\currentstructurecatcodes\endcsname\fi\space
+ coding {\currentstructurecoding}
+ \ifx\currentstructurecoding\s!xml
+ xmlroot {\xmldocument}
+ \fi
+ \ifx\currentstructurexmlsetup\empty \else
+ xmlsetup {\currentstructurexmlsetup}
+ \fi
+ \ifx\currentstructuresaveinlist\v!no
+ nolist \space true\space
+ \fi
+ \ifx\currentstructureincrementnumber\v!yes
+ increment {\currentstructureincrementnumber}
+ \fi
+ }
+ titledata {
+ label {\detokenize\expandafter{\currentstructurelabel}}
+ title {\detokenize\expandafter{\currentstructuretitle}}
+ \ifx\currentstructurebookmark\currentstructuretitle \else
+ bookmark {\detokenize\expandafter{\currentstructurebookmark}}
+ \fi
+ \ifx\currentstructuremarking\currentstructuretitle \else
+ marking {\detokenize\expandafter{\currentstructuremarking}}
+ \fi
+ \ifx\currentstructuresaveinlist\v!no \else
+ \ifx\currentstructurelist\currentstructuretitle \else
+ list {\detokenize\expandafter{\currentstructurelist}}
+ \fi
+ \fi
}
- }}%
- % \xdef\currentstructurelistnumber{\ctxcommand{addtolist(structures.sections.current())}}%
- \xdef\currentstructurelistnumber{\ctxcommand{currentsectiontolist()}}%
+ numberdata {
+ % block {\currentsectionblock}
+ \ifx\currentstructureshownumber\v!no
+ hidenumber \space true\space
+ \fi
+ separatorset {\structureparameter\c!sectionseparatorset}
+ conversionset {\structureparameter\c!sectionconversionset}
+ conversion {\structureparameter\c!sectionconversion}
+ starter {\structureparameter\c!sectionstarter}
+ stopper {\structureparameter\c!sectionstopper}
+ set {\structureparameter\c!sectionset}
+ segments {\structureparameter\c!sectionsegments}
+ ownnumber {\currentstructureownnumber}
+ language {\currentlanguage}% for the moment, needed for bookmarks conversion
+ }
+ userdata {\detokenize{#3}}% will be converted to table at the lua end
+ \relax
+ \xdef\currentstructurelistnumber{\clf_currentsectiontolist}%
% \currentstructuresynchronize has to be called someplace, since it introduces a node
\setstructuresynchronization\currentstructurelistnumber
\endgroup}
-\let\currentstructurenumber \!!zerocount
\let\currentsectioncountervalue \!!zerocount % redefined later
\let\previoussectioncountervalue\!!zerocount % redefined later
@@ -261,16 +263,15 @@
% todo: #1 => "#1" ... adapt lua code for name and number
-\def\structurenumber {\ctxcommand{structurenumber()}}
-\def\structuretitle {\ctxcommand{structuretitle()}}
-\def\structurevariable #1{\ctxcommand{structurevariable("#1")}}
-\def\structureuservariable #1{\ctxcommand{structureuservariable("#1")}}
-\def\structurecatcodedget #1{\ctxcommand{structurecatcodedget("#1")}} % bad name
-\def\structuregivencatcodedget #1#2{\ctxcommand{structuregivencatcodedget("#1",\number#2)}} % bad name
-\def\structureautocatcodedget #1#2{\ctxcommand{structureautocatcodedget ("#1","#2")}}
-
-\def\namedstructurevariable #1#2{\ctxcommand{namedstructurevariable ("#1","#2")}}
-\def\namedstructureuservariable#1#2{\ctxcommand{namedstructureuservariable("#1","#2")}}
+\def\structurenumber {\clf_structurenumber}
+\def\structuretitle {\clf_structuretitle}
+\def\structurevariable #1{\clf_structurevariable {#1}}
+\def\structureuservariable #1{\clf_structureuservariable {#1}}
+\def\structurecatcodedget #1{\clf_structurecatcodedget {#1}} % bad name
+\def\structuregivencatcodedget #1#2{\clf_structuregivencatcodedget {#1}#2 } % bad name
+\def\structureautocatcodedget #1#2{\clf_structureautocatcodedget {#1}{#2}}
+\def\namedstructurevariable #1#2{\clf_namedstructurevariable {#1}{#2}}
+\def\namedstructureuservariable#1#2{\clf_namedstructureuservariable{#1}{#2}}
% compatibility issue:
%
@@ -300,18 +301,14 @@
\newconditional\c_strc_rendering_continuous % not used (mkii ?)
-\def\setstructurelevel #1#2{\ctxlua{structures.sections.setlevel("#1","#2")}} % name, level|parent
-\def\getstructurelevel #1{\ctxlua{structures.sections.getcurrentlevel("#1")}}% name
-\def\setstructurenumber #1#2{\ctxlua{structures.sections.setnumber(#1,"#2")}} % level, number (+/-)
-\def\getstructurenumber #1{\ctxlua{structures.sections.getnumber(#1)}} % level
-\def\getsomestructurenumber #1#2{\ctxlua{structures.sections.getnumber(#1,"#2")}} % level, what
-\def\getfullstructurenumber #1{\ctxlua{structures.sections.fullnumber(#1)}} % level
-\def\getsomefullstructurenumber#1#2{\ctxlua{structures.sections.fullnumber(#1,"#2")}}
-\def\getspecificstructuretitle #1{\ctxlua{structures.sections.structuredata("#1","titledata.title",nil,"\headparameter\s!catcodes")}}%
-
-% will be:
-%
-% \def\getfullstructurenumber #1{\ctxcommand{structurenumber(\thenamedheadlevel{#1})}}
+\def\setstructurelevel #1#2{\clf_setstructurelevel {#1}{#2}} % name, level|parent
+\def\getstructurelevel #1{\clf_getstructurelevel {#1}} % name
+\def\setstructurenumber #1#2{\clf_setstructurenumber #1{#2}} % level, number (+/-)
+\def\getstructurenumber #1{\clf_getstructurenumber \numexpr#1\relax} % level
+\def\getsomestructurenumber #1#2{\clf_getsomestructurenumber #1{#2}} % level, what
+\def\getfullstructurenumber #1{\clf_getfullstructurenumber \numexpr#1\relax} % level
+\def\getsomefullstructurenumber#1#2{\clf_getsomefullstructurenumber #1{#2}} % level, what
+\def\getspecificstructuretitle #1{\clf_getspecificstructuretitle {#1}{\headparameter\s!catcodes}}
% structure heads (like \startchapter)
@@ -407,7 +404,7 @@
\definemarking[\currenthead] [\currentheadsection]%
\definemarking[\currenthead\v!number][\currentheadsection]%
\setupmarking [\currenthead] [\c!filtercommand=\noexpand\sectionheadmarkingtitle {\currenthead}]%
- \setupmarking [\currenthead\c!number][\c!filtercommand=\noexpand\sectionheadmarkingnumber{\currenthead}]%
+ \setupmarking [\currenthead\v!number][\c!filtercommand=\noexpand\sectionheadmarkingnumber{\currenthead}]%
}%
\doifelselist\currenthead\donothing
{\definelist[\currenthead][\c!prefix=\v!no]}%
@@ -425,9 +422,15 @@
\the\everysetuphead
\to \everydefinehead
+\newtoks\everyredefinehead
+
+\appendtoks
+ \the\everyredefinehead
+\to \everydefinehead
+
\appendtoks
\setstructurelevel\currenthead{\thenamedheadlevel\currenthead}%
-\to \everydefinehead
+\to \everyredefinehead
\appendtoks
% beware, this is a global register
@@ -435,13 +438,14 @@
\edef\currentsectionheadcoupling{\sectionheadcoupling\currenthead}%
\edef\currentsectionheadsection {\sectionheadsection \currentsectionheadcoupling}%
\edef\currentsectionlevel {\sectionlevel \currentsectionheadsection}%
- \ctxlua{structures.sections.register("\currenthead",{
- coupling = "\currentsectionheadcoupling",
- section = "\currentsectionheadsection",
- level = \currentsectionlevel,
- })}%
+ \clf_registersection {\currenthead} {
+ coupling {\currentsectionheadcoupling}
+ section {\currentsectionheadsection}
+ level \currentsectionlevel
+ parent {\currentheadparent}
+ }%
\endgroup
-\to \everydefinehead
+\to \everyredefinehead
\appendtoks
% \setevalue{\e!next \currenthead}{\donexthead [\currenthead]}%
@@ -455,6 +459,15 @@
{\setuevalue\currenthead{\strc_sectioning_handle_nop[\currenthead]}}%
\to \everysetuphead
+\unexpanded\def\doredefinehead#1#2% called at lua end
+ {\pushmacro\currenthead
+ \pushmacro\currentheadparent
+ \edef\currenthead{#1}%
+ \edef\currentheadparent{#2}%
+ \the\everyredefinehead\relax
+ \popmacro\currentheadparent
+ \popmacro\currenthead}
+
\let\currentnamedsection\empty
\unexpanded\def\startnamedsection
@@ -578,8 +591,8 @@
% head -> head
-\def\sectionheadmarkingtitle #1#2{\ctxlua{structures.marks.title("#1","#2")}}
-\def\sectionheadmarkingnumber#1#2{\ctxlua{structures.marks.number("#1","#2")}}
+\def\sectionheadmarkingtitle #1#2{\clf_markingtitle {#1}{#2}}
+\def\sectionheadmarkingnumber#1#2{\clf_markingnumber{#1}{#2}}
\def\sectionheadcoupling#1{\namedheadparameter{#1}\c!coupling}
\def\sectionheadsection #1{\namedheadparameter{#1}\c!section}
@@ -603,7 +616,7 @@
\unexpanded\def\strc_sectioning_handle_nop_indeed[#1][#2]% for taco: [key=value] variant
{\setfalse\currentstructureown
\triggerautostructurelevel
- \doifassignmentelse{#2}\strc_sectioning_handle_nop_indeed_yes\strc_sectioning_handle_nop_indeed_nop{#1}{#2}}
+ \doifelseassignment{#2}\strc_sectioning_handle_nop_indeed_yes\strc_sectioning_handle_nop_indeed_nop{#1}{#2}}
\unexpanded\def\strc_sectioning_handle_nop_indeed_yes#1#2%
{\strc_sectioning_handle{#1}{#2}{}}
@@ -763,7 +776,7 @@
\unexpanded\def\placeheadtext {\dosingleempty\strc_sectioning_place_head_text } % use with care
\unexpanded\def\placeheadnumber{\dosingleempty\strc_sectioning_place_head_number} % use with care
-\unexpanded\def\strc_sectioning_report{\ctxlua{structures.sections.reportstructure()}}
+\unexpanded\def\strc_sectioning_report{\clf_reportstructure}
\ifdefined\strc_rendering_initialize_style_and_color \else
@@ -985,15 +998,19 @@
\hskip\s_strc_sectioniong_continuous_signal\relax
\fi}
+% \let\dotagsectionlevel\relax
+
\def\strc_sectioning_before_yes
{\strc_sectioning_check_before\strc_sectioning_handle_page_yes
\headparameter\c!inbetween
- \dostarttagged\t!section\currenthead}
+ \dostarttaggedchained\t!section\currenthead\??head
+% \dotagsectionlevel
+ }
\def\strc_sectioning_before_nop
{\strc_sectioning_check_before\strc_sectioning_handle_page_nop
\headparameter\c!inbetween
- \dostarttagged\currenthead\empty}
+ \dostarttagged\currenthead\empty} % this is a weird one .. needs checking
\def\strc_sectioning_empty_correction
{\ifconditional\c_strc_sectioning_empty
@@ -1007,7 +1024,8 @@
\def\strc_sectioning_after_yes
{\ifconditional\headisdisplay
\ifconditional\c_strc_sectioning_auto_break
- \vspacing[\v!samepage-\currentheadlevel]%
+ % \vspacing[\v!samepage-\currentheadlevel]%
+\vspacing[\v!samepage]%
\fi
\strc_sectioning_empty_correction
\headparameter\c!after
@@ -1039,8 +1057,8 @@
#1%
\fi}
-\def\currentsectioncountervalue {\ctxlua{structures.sections.depthnumber(\thenamedheadlevel\currenthead)}}
-\def\previoussectioncountervalue{\ctxlua{structures.sections.depthnumber(\thenamedheadlevel\currenthead-1)}}
+\def\currentsectioncountervalue {\clf_depthnumber\numexpr\thenamedheadlevel\currenthead\relax}
+\def\previoussectioncountervalue{\clf_depthnumber\numexpr\thenamedheadlevel\currenthead+\minusone\relax}
\def\strc_sectioning_handle_page_nop
{\edef\p_continue{\headparameter\c!continue}%
@@ -1063,10 +1081,16 @@
\strc_sectioning_handle_page_nop
\edef\p_aligntitle{\headparameter\c!aligntitle}%
\ifx\p_aligntitle\v!float
+\ifconditional\c_strc_sectioning_auto_break
+ \vspacing[\v!samepage-\currentheadlevel]%
+\fi
\headparameter\c!before\relax
\indent
\else
\page_otr_command_flush_side_floats
+\ifconditional\c_strc_sectioning_auto_break
+ \vspacing[\v!samepage-\currentheadlevel]%
+\fi
\headparameter\c!before\relax
\fi
\global\c_strc_sectioniong_preceding_level\currentheadlevel
@@ -1089,7 +1113,7 @@
{\dodoubleargument\strc_sectioning_setup_number}
\def\strc_sectioning_setup_number[#1][#2]% todo: reset if at other level
- {\setstructurenumber{\thenamedheadlevel{#1}}{#2}}
+ {\setstructurenumber{\thenamedheadlevel{#1}}{\number#2}}
\def\currentheadnumber{0} % ==> \currentheadnumber
@@ -1119,7 +1143,7 @@
\let\sectioncountervalue\structurevalue
-\def\currentheadtext{obsolete, use marks}
+\def\currentheadtext{obsolete,\space use marks}
% list references, will be redone in lua when we need it
@@ -1136,7 +1160,7 @@
\unexpanded\def\strc_sectioning_initialize_autolevel
{\ifconditional\c_strc_sectioning_auto_levels
- \ctxcommand{autonextstructurelevel(\number\currentheadlevel)}%
+ \clf_autonextstructurelevel\currentheadlevel\relax
\global\setfalse\c_strc_sectioning_auto_levels
\fi}
@@ -1144,7 +1168,7 @@
{\global\settrue\c_strc_sectioning_auto_levels}
\unexpanded\def\finalizeautostructurelevels
- {\ctxcommand{autofinishstructurelevels()}}
+ {\clf_autofinishstructurelevels}
\unexpanded\def\finalizeautostructurelevel
{\dostoptagged
@@ -1154,4 +1178,6 @@
\finalizeautostructurelevels
\to \everystoptext
+\stopcontextdefinitioncode
+
\protect \endinput
diff --git a/tex/context/base/strc-syn.lua b/tex/context/base/strc-syn.lua
index ca4b3ac18..5f3557a69 100644
--- a/tex/context/base/strc-syn.lua
+++ b/tex/context/base/strc-syn.lua
@@ -7,10 +7,13 @@ if not modules then modules = { } end modules ['strc-syn'] = {
}
local next, type = next, type
-local format = string.format
-local allocate = utilities.storage.allocate
--- interface to tex end
+local context = context
+local implement = interfaces.implement
+
+local allocate = utilities.storage.allocate
+
+local sorters = sorters
local structures = structures
local synonyms = structures.synonyms
@@ -19,6 +22,10 @@ local tags = structures.tags
local collected = allocate()
local tobesaved = allocate()
+local firstofsplit = sorters.firstofsplit
+local strip = sorters.strip
+local splitter = sorters.splitters.utf
+
synonyms.collected = collected
synonyms.tobesaved = tobesaved
@@ -37,94 +44,144 @@ job.register('structures.synonyms.collected', tobesaved, initializer, finalizer)
-- todo: allocate becomes metatable
-local function allocate(class)
- local d = tobesaved[class]
- if not d then
- d = {
- metadata = {
- language = 'en',
- sorted = false,
- class = class
- },
- entries = {
- },
- hash = {
- }
+table.setmetatableindex(tobesaved,function(t,k)
+ local v = {
+ metadata = {
+ language = 'en',
+ sorted = false,
+ class = v
+ },
+ entries = {
+ },
+ hash = {
}
- tobesaved[class] = d
- end
- return d
-end
+ }
+ t[k] = v
+ return v
+end)
function synonyms.define(class,kind)
- local data = allocate(class)
+ local data = tobesaved[class]
data.metadata.kind = kind
end
function synonyms.register(class,kind,spec)
- local data = allocate(class)
+ local data = tobesaved[class]
+ local hash = data.hash
+ local definition = spec.definition
+ local tag = definition.tag or ""
data.metadata.kind = kind -- runtime, not saved in format (yet)
- if not data.hash[spec.definition.tag or ""] then
- data.entries[#data.entries+1] = spec
- data.hash[spec.definition.tag or ""] = spec
+ if not hash[tag] then
+ if definition.used == nil then
+ definition.used = false
+ end
+ if definition.shown == nil then
+ definition.shown = false
+ end
+ local entries = data.entries
+ entries[#entries+1] = spec
+ hash[tag] = spec
end
end
function synonyms.registerused(class,tag)
- local data = allocate(class)
- local dht = data.hash[tag]
- if dht then
- dht.definition.used = true
+ local data = tobesaved[class]
+ local okay = data.hash[tag]
+ if okay then
+ local definition = okay.definition
+ definition.used = true
+ definition.list = true
+ end
+end
+
+function synonyms.registershown(class,tag)
+ local data = tobesaved[class]
+ local okay = data.hash[tag]
+ if okay then
+ local definition = okay.definition
+ definition.shown = true
+ definition.list = true
+ end
+end
+
+function synonyms.isused(class,tag)
+ local data = tobesaved[class]
+ local okay = data.hash[tag]
+ return okay and okay.definition.used
+end
+
+function synonyms.isshown(class,tag)
+ local data = tobesaved[class]
+ local okay = data.hash[tag]
+ return okay and okay.definition.shown
+end
+
+function synonyms.resetused(class)
+ for tag, data in next, tobesaved[class].hash do
+ data.definition.used = false
+ end
+end
+
+function synonyms.resetshown(class)
+ for tag, data in next, tobesaved[class].hash do
+ data.definition.shown = false
end
end
function synonyms.synonym(class,tag)
- local data = allocate(class).hash
- local d = data[tag]
- if d then
- local de = d.definition
- de.used = true
- context(de.synonym)
+ local data = tobesaved[class]
+ local okay = data.hash[tag]
+ if okay then
+ local definition = okay.definition
+ definition.used = true
+ definition.list = true
+ context(definition.synonym)
end
end
function synonyms.meaning(class,tag)
- local data = allocate(class).hash
- local d = data[tag]
- if d then
- local de = d.definition
- de.used = true
- context(de.meaning)
+ local data = tobesaved[class]
+ local okay = data.hash[tag]
+ if okay then
+ local definition = okay.definition
+ definition.shown = true
+ definition.list = true
+ context(definition.meaning)
end
end
synonyms.compare = sorters.comparers.basic -- (a,b)
function synonyms.filter(data,options)
- local result = { }
+ local result = { }
local entries = data.entries
- local all = options and options.criterium == interfaces.variables.all
- for i=1,#entries do
- local entry = entries[i]
- if all or entry.definition.used then
- result[#result+1] = entry
+ local all = options and options.criterium == interfaces.variables.all
+ if all then
+ for i=1,#entries do
+ result[i] = entries[i]
+ end
+ else
+ for i=1,#entries do
+ local entry = entries[i]
+ local definition = entry.definition
+ if definition.list then
+ result[#result+1] = entry
+ end
end
end
data.result = result
end
function synonyms.prepare(data)
- local strip = sorters.strip
- local splitter = sorters.splitters.utf
local result = data.result
if result then
for i=1, #result do
- local r = result[i]
- local rd = r.definition
- if rd then
- local rt = rd.tag
- local sortkey = (rt and rt ~= "" and rt) or rd.synonym
- r.split = splitter(strip(sortkey))
+ local entry = result[i]
+ local definition = entry.definition
+ if definition then
+ local tag = definition.tag
+ local key = tag ~= "" and tag or definition.synonym
+ entry.split = splitter(strip(key))
end
end
end
@@ -132,21 +189,31 @@ end
function synonyms.sort(data,options)
sorters.sort(data.result,synonyms.compare)
+ data.metadata.sorted = true
end
-function synonyms.finalize(data,options)
- local result = data.result
- data.metadata.nofsorted = #result
- local split = { }
+function synonyms.finalize(data,options) -- mostly the same as registers so we will generalize it: sorters.split
+ local result = data.result
+ local split = { }
+ local nofsplit = 0
+ local lasttag = nil
+ local lasttag = nil
+ local nofdone = 0
for k=1,#result do
- local v = result[k]
- local entry, tag = sorters.firstofsplit(v)
- local s = split[entry] -- keeps track of change
- if not s then
- s = { tag = tag, data = { } }
- split[entry] = s
+ local entry = result[k]
+ local first, tag = firstofsplit(entry)
+ if tag ~= lasttag then
+ -- if trace_registers then
+ -- report_registers("splitting at %a",tag)
+ -- end
+ done = { }
+ nofdone = 0
+ nofsplit = nofsplit + 1
+ lasttag = tag
+ split[nofsplit] = { tag = tag, data = done }
end
- s.data[#s.data+1] = v
+ nofdone = nofdone + 1
+ done[nofdone] = entry
end
data.result = split
end
@@ -154,33 +221,28 @@ end
-- for now, maybe at some point we will do a multipass or so
-- maybe pass the settings differently
+local ctx_synonymentry = context.synonymentry
+
function synonyms.flush(data,options)
- local kind = data.metadata.kind -- hack, will be done better
- -- context[format("\\start%soutput",kind)]()
local result = data.result
- local sorted = table.sortedkeys(result)
- for k=1,#sorted do
- local letter = sorted[k]
- local sublist = result[letter]
- local data = sublist.data
- -- context[format("\\start%ssection",kind)](sublist.tag)
+ for i=1,#result do
+ local sublist = result[i]
+ local letter = sublist.tag
+ local data = sublist.data
for d=1,#data do
local entry = data[d].definition
- -- context[format("\\%sentry",kind)](d,entry.tag,entry.synonym,entry.meaning or "")
- context("\\%sentry{%s}{%s}{%s}{%s}",kind,d,entry.tag,entry.synonym,entry.meaning or "")
+ ctx_synonymentry(d,entry.tag,entry.synonym,entry.meaning or "")
end
- -- context[format("\\stop%ssection",kind)]()
end
- -- context[format("\\stop%soutput",kind)]()
- data.result = nil
+ data.result = nil
data.metadata.sorted = false
end
function synonyms.analyzed(class,options)
- local data = synonyms.collected[class]
+ local data = collected[class]
if data and data.entries then
options = options or { }
- sorters.setlanguage(options.language)
+ sorters.setlanguage(options.language,options.method)
synonyms.filter(data,options) -- filters entries to result
synonyms.prepare(data,options) -- adds split table parallel to list table
synonyms.sort(data,options) -- sorts entries in result
@@ -192,7 +254,65 @@ end
function synonyms.process(class,options)
if synonyms.analyzed(class,options) then
- synonyms.flush(synonyms.collected[class],options)
+ synonyms.flush(collected[class],options)
end
end
+-- todo: local higher up
+
+implement { name = "registerusedsynonym", actions = synonyms.registerused, arguments = { "string", "string" } }
+implement { name = "registershownsynonym", actions = synonyms.registershown, arguments = { "string", "string" } }
+implement { name = "synonymmeaning", actions = synonyms.meaning, arguments = { "string", "string" } }
+implement { name = "synonymname", actions = synonyms.synonym, arguments = { "string", "string" } }
+implement { name = "resetusedsynonyms", actions = synonyms.resetused, arguments = "string" }
+implement { name = "resetshownsynonyms", actions = synonyms.resetshown, arguments = "string" }
+
+implement {
+ name = "doifelsesynonymused",
+ actions = { synonyms.isused, commands.doifelse },
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "doifelsesynonymshown",
+ actions = { synonyms.isshown, commands.doifelse },
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "registersynonym",
+ actions = synonyms.register,
+ arguments = {
+ "string",
+ "string",
+ {
+ { "metadata", {
+ { "catcodes", "integer" },
+ { "coding" },
+ { "xmlroot" }
+ }
+ },
+ {
+ "definition", {
+ { "tag" },
+ { "synonym" },
+ { "meaning" },
+ { "used", "boolean" }
+ }
+ }
+ }
+ }
+}
+
+implement {
+ name = "processsynonyms",
+ actions = synonyms.process,
+ arguments = {
+ "string",
+ {
+ { "criterium" },
+ { "language" },
+ { "method" }
+ }
+ }
+}
diff --git a/tex/context/base/strc-syn.mkiv b/tex/context/base/strc-syn.mkiv
index e0087d450..b206f8069 100644
--- a/tex/context/base/strc-syn.mkiv
+++ b/tex/context/base/strc-syn.mkiv
@@ -15,28 +15,101 @@
\registerctxluafile{strc-syn}{1.001}
+%D Although we could nowadays build this on top of regular lists we keep this
+%D more efficient variant around. Eventually we can add some options to lists
+%D that also provide such functionality but at the cost of much more overhead.
+%D
+%D We show a usage of both synonyms and sorts, which are deep down variants of
+%D so called simple lists. A definition looks like this:
+%D
+%D \startbuffer
+%D \definesynonyms
+%D [myabbreviation]
+%D
+%D \setupsynonyms
+%D [myabbreviation]
+%D [headstyle=bold,
+%D headcolor=darkred,
+%D synonymstyle=boldslanted,
+%D synonymcolor=darkblue,
+%D textstyle=slanted,
+%D textcolor=darkgreen,
+%D style=normal,
+%D color=darkyellow]
+%D
+%D \definesorting
+%D [mylogo]
+%D
+%D \setupsorting
+%D [mylogo]
+%D [style=bold,
+%D color=darkmagenta]
+%D \stopbuffer
+%D
+%D \typebuffer \getbuffer
+%D
+%D More complex definitions involves commands to call up meanings and such. The
+%D use of the defined commands is as follows: \
+%D
+%D \startbuffer
+%D \myabbreviation [FIRST] {TheFirst} {The First Words}
+%D \myabbreviation [SECOND] {TheSecond} {The Second Words}
+%D \myabbreviation [THIRD] {TheThird} {The Third Words}
+%D
+%D \mylogo [FOURTH] {TheFourth}
+%D \stopbuffer
+%D
+%D \typebuffer \getbuffer
+%D
+%D By default a synonym is just typeset and flagges as being used, so that in
+%D a list it wil be shows with its meaning. You can however also expand the
+%D meaning automatically at first use:
+%D
+%D \startbuffer
+%D \setupsynonyms[myabbreviation][alternative=first]
+%D
+%D We have \FIRST, \SECOND\ and also \THIRD\ but no \FOURTH.
+%D
+%D We have \FIRST, \SECOND\ and also \THIRD\ but no \FOURTH.
+%D \stopbuffer
+%D
+%D \typebuffer \getbuffer
+%D
+%D We can change the order, as demonstrated in:
+%D
+%D \startbuffer
+%D \resetshownsynonyms[myabbreviation]
+%D
+%D \setupsynonyms[myabbreviation][alternative=last]
+%D
+%D We have \FIRST\ and \THIRD\ or \FOURTH.
+%D
+%D We have \FIRST\ and \THIRD\ or \FOURTH.
+%D \stopbuffer
+%D
+%D \typebuffer \getbuffer
+%D
+%D A list is called up with:
+%D
+%D \startbuffer
+%D \placelistofsynonyms[myabbreviation]
+%D
+%D \placelistofsorts[mylogo]
+%D \stopbuffer
+%D
+%D \typebuffer \getbuffer
+%D
+%D The lists are constructions (like descriptions are) and can be set up
+%D likewise.
+
% todo: add 'define only' option to descriptions, then add sorting (also based on key)
% and call to definition -> replaces this module
\unprotect
-\ifdefined\dotagsynonym \else \let\dotagsynonym\relax \fi
-\ifdefined\dotagsorting \else \let\dotagsorting\relax \fi
-
-% general help, can be shared
-
-% simplifiedcommands -> flag in lua
-%
-% expansion
-% criterium -> when start, then flag in list
-% command-> wanneer?
-% state -> flagging enabled
-% conversion ?
-% todo: register xml mode etc
-
% split but common in lua
-\def\preprocessexpansion#1#2#3#4%
+\def\preprocessexpansion#1#2#3#4% do this at the lua end if still needed
{\ifx#1\s!xml
\xmlstartraw
\xdef#2{#4}%
@@ -51,13 +124,94 @@
\globallet#3\s!tex
\fi}
-\installcorenamespace{synonym}
+%D We now use a simple list variant:
+
+\installcorenamespace {simplelist}
+
+\installcommandhandler \??simplelist {simplelist} \??simplelist
+
+\let\setupsimplelists\setupsimplelist
+
+\setupsimplelists[%
+ %c!title=,
+ %c!text=,
+ %
+ %c!style=,
+ %c!color=,
+ %c!command=,
+ %c!align=,
+ %
+ %c!headstyle=,
+ %c!headcolor=,
+ %c!headalign=,
+ %
+ %c!titlestyle=,
+ %c!titlecolor=,
+ %c!titlecommand=,
+ %c!titleleft=,
+ %c!titleright=,
+ %
+ %c!closesymbol=,
+ %c!closecommand=,
+ %
+ \c!alternative=\v!left,
+ \c!display=\v!yes,
+ \c!width=7\emwidth,
+ \c!distance=\emwidth,
+ \c!titledistance=.5\emwidth,
+ %c!hang=,
+ %c!sample=,
+ \c!margin=\v!no,
+ \c!before=\blank,
+ \c!inbetween=\blank,
+ \c!after=\blank,
+ %c!indentnext=,
+ %c!indenting=,
+ %
+ \c!expansion=\v!no,
+ %c!xmlsetup=,
+ %s!catcodes=,
+ \s!language=\currentmainlanguage,
+]
+
+\appendtoks
+ \setfalse\c_strc_constructions_define_commands
+ \ifx\currentsimplelistparent\empty
+ \defineconstruction[\currentsimplelist][\s!handler=\v!simplelist,\c!level=1]%
+ \else
+ \defineconstruction[\currentsimplelist][\currentsimplelistparent][\s!handler=\v!simplelist,\c!level=1]%
+ \fi
+ \settrue\c_strc_constructions_define_commands
+\to \everydefinesimplelist
+
+\setuvalue{\??constructioninitializer\v!simplelist}%
+ {\let\currentsimplelist \currentconstruction
+ \let\constructionparameter \simplelistparameter
+ \let\constructionnamespace \??simplelist
+ \let\detokenizedconstructionparameter\detokenizedsimplelistparameter
+ \let\letconstructionparameter \letsimplelistparameter
+ \let\useconstructionstyleandcolor \usesimpleliststyleandcolor
+ \let\setupcurrentconstruction \setupcurrentsimplelist}
+
+\setuvalue{\??constructionfinalizer\v!simplelist}%
+ {}
+
+\setuvalue{\??constructiontexthandler\v!simplelist}%
+ {\begingroup
+ \useconstructionstyleandcolor\c!headstyle\c!headcolor
+ \the\everyconstruction
+ \constructionparameter\c!headcommand
+ {\strut
+ \currentsimplelistentry}%
+ \endgroup}
-\installsimplecommandhandler \??synonym {synonym} \??synonym
+% And we build on top of this.
-\let\setupsynonyms\setupsynonym
+\ifdefined\dotagsynonym \else \let\dotagsynonym\relax \fi
+\ifdefined\dotagsorting \else \let\dotagsorting\relax \fi
-\setupsynonyms
+\definesimplelist
+ [\v!synonym]
[\c!state=\v!start,
%\c!synonymstyle=,
%\c!textstyle=,
@@ -75,147 +229,231 @@
%\c!after=,
\c!indentnext=\v!no,
%\c!expansion=,
- \c!method=,
- \s!language=\currentmainlanguage]
+ \c!method=]
+
+\let\setupsynonyms\setupsimplelist
\unexpanded\def\definesynonyms
- {\doquadrupleempty\dodefinesynonyms}
+ {\doquadrupleempty\strc_synonyms_define}
-\def\dodefinesynonyms[#1][#2][#3][#4]% name plural \meaning \use
+\def\strc_synonyms_define[#1][#2][#3][#4]% name plural \meaning \use
{\edef\currentsynonym{#1}%
\iffourthargument
- \unexpanded\def#4##1{\doinsertsynonym{#1}{##1}}% name tag
+ \unexpanded\def#4##1{\strc_synonyms_insert{#1}{##1}}% name tag
\ifthirdargument
- \unexpanded\def#3##1{\doinsertsynonymmeaning{#1}{##1}}% \meaning
+ \unexpanded\def#3##1{\strc_synonyms_insert_meaning{#1}{##1}}% \meaning
\fi
\setuvalue{#1}{\definesynonym[\v!no][#1]}% \name
\else
\ifthirdargument
- \unexpanded\def#3##1{\doinsertsynonymmeaning{#1}{##1}}% \meaning
+ \unexpanded\def#3##1{\strc_synonyms_insert_meaning{#1}{##1}}% \meaning
\fi
\setuvalue{#1}{\definesynonym[\v!yes][#1]}% \name
\fi
- \checksynonymparent
- \setupcurrentsynonym[\s!single={#1},\s!multi={#2}]%
+ %
+% \checksynonymparent
+% \setupcurrentsynonym[\s!single={#1},\s!multi={#2}]%
+ \setfalse\c_strc_constructions_define_commands
+ \definesimplelist
+ [\currentsynonym]%
+ [\v!sorting]
+ [\s!single={#1},%
+ \s!multi={#2}]%
+ \settrue\c_strc_constructions_define_commands
+ %
\presetheadtext[#2=\Word{#2}]% changes the \if...argument
- \setvalue{\e!setup #2\e!endsetup}{\setupsynonym[#1]}% obsolete definition
+ %
+ \setvalue{\e!setup #2\e!endsetup}{\setupsynonyms[#1]}% obsolete definition
\setvalue{\e!place \e!listof#2}{\placelistofsynonyms[#1]}% accepts extra argument
\setvalue{\e!complete\e!listof#2}{\completelistofsynonyms[#1]}}
\unexpanded\def\definesynonym
- {\dotripleempty\dodefinesynonym}
+ {\dotripleempty\strc_synonyms_define_entry}
-\def\dodefinesynonym[#1][#2][#3]#4#5%
+\def\strc_synonyms_define_entry[#1][#2][#3]#4#5%
{\begingroup
\edef\currentsynonym{#2}%
\edef\currentsynonymtag{#3}%
+ \let\currentsimplelist\currentsimplelist
\ifx\currentsynonymtag\empty
\edef\currentsynonymtag{#4}%
\fi
\ifx\currentsynonymtag\empty
% todo: error message
\else
- \edef\currentsynonymexpansion{\synonymparameter\c!expansion}%
- \preprocessexpansion\currentsynonymexpansion\currentsynonymtext \currentsynonymcoding{#4}%
- \preprocessexpansion\currentsynonymexpansion\currentsynonymmeaning\currentsynonymcoding{#5}%
- \ctxlua{structures.synonyms.register("\currentsynonym", "synonym", {
- metadata = {
- catcodes = \the\catcodetable,
- coding = "\currentsynonymcoding",
- xmlroot = \ifx\currentsynonymcoding\s!xml "\xmldocument" \else nil \fi,
- },
- definition = {
- tag = "\currentsynonymtag",
- synonym = \!!bs\currentsynonymtext\!!es,
- meaning = \!!bs\currentsynonymmeaning\!!es,
- used = false,
- }
- })}%
- \doif{#1}\v!yes{\setuxvalue\currentsynonymtag{\noexpand\doinsertsynonym{\currentsynonym}{\currentsynonymtag}}}%
+ \edef\currentsynonymexpansion{\simplelistparameter\c!expansion}%
+ \preprocessexpansion\currentsynonymexpansion\m_synonyms_text \currentsynonymcoding{#4}%
+ \preprocessexpansion\currentsynonymexpansion\m_synonyms_meaning\currentsynonymcoding{#5}%
+ \clf_registersynonym
+ {\currentsynonym}%
+ {synonym}%
+ {%
+ metadata {%
+ catcodes \catcodetable
+ coding {\currentsynonymcoding}%
+ \ifx\currentsynonymcoding\s!xml
+ xmlroot {\xmldocument}%
+ \fi
+ }%
+ definition {%
+ tag {\currentsynonymtag}%
+ synonym {\m_synonyms_text}%
+ meaning {\m_synonyms_meaning}%
+ % used false
+ }%
+ }%
+ \relax
+ \doif{#1}\v!yes{\setuxvalue\currentsynonymtag{\strc_synonyms_insert{\currentsynonym}{\currentsynonymtag}}}%
\fi
\endgroup}
\unexpanded\def\registersynonym
- {\dodoubleargument\doregistersynonym}
+ {\dodoubleargument\strc_synonyms_register}
+
+\def\strc_synonyms_register[#1][#2]%
+ {\clf_registerusedsynonym{#1}{#2}}
+
+\unexpanded\def\currentsynonymname {\clf_synonymname {\currentsimplelist}{\currentsynonymtag}}
+\unexpanded\def\currentsynonymmeaning {\clf_synonymmeaning {\currentsimplelist}{\currentsynonymtag}}
+\unexpanded\def\doifelsecurrentsynonymused {\clf_doifelsesynonymused {\currentsimplelist}{\currentsynonymtag}}
+\unexpanded\def\doifelsecurrentsynonymshown{\clf_doifelsesynonymshown{\currentsimplelist}{\currentsynonymtag}}
+\unexpanded\def\resetusedsynonyms [#1]{\clf_resetusedsynonyms {#1}}
+\unexpanded\def\resetshownsynonyms [#1]{\clf_resetshownsynonyms {#1}}
+
+\installcorenamespace{simplelistalternative} % specific ways of rendering a list
+\installcorenamespace{simplelistrenderings} % a namespace for setups (rather local)
-\def\doregistersynonym[#1][#2]%
- {\ctxlua{structures.synonyms.registerused("#1","#2")}}
+\installcommandhandler \??simplelistalternative {simplelistalternative} \??simplelistalternative
-\unexpanded\def\doinsertsynonymmeaning#1#2% name tag
+\setupsimplelist
+ [\v!synonym]
+ [\c!alternative=\v!normal]
+
+\unexpanded\def\strc_synonyms_insert_meaning#1#2% name tag
{\begingroup
- \def\currentsynonym{#1}%
- \usesynonymstyleandcolor\c!textstyle\c!textcolor
- \synonymparameter\c!textcommand{\ctxlua{structures.synonyms.meaning("#1","#2")}}%
+ \def\currentsimplelist{#1}%
+ \def\currentsynonymtag{#2}%
+ \fastsetup{\??simplelistrenderings::\v!text}%
\endgroup}
-\unexpanded\def\doinsertsynonym#1#2% name tag
+\unexpanded\def\strc_synonyms_insert#1#2% name tag
{\begingroup
- \def\currentsynonym{#1}%
- \def\currentsynonymtag{#2}%
- \dostarttagged\t!synonym\currentsynonym
- \dotagsynonym
- \usesynonymstyleandcolor\c!synonymstyle\c!synonymcolor
- \synonymparameter\c!synonymcommand{\ctxlua{structures.synonyms.synonym("#1","#2")}}%
- \dostoptagged
- \normalexpanded{\endgroup\synonymparameter\c!next}}
+ \edef\currentsimplelist{#1}%
+ \let \currentsynonym\currentsimplelist % for a while
+ \def \currentsynonymtag{#2}%
+ \edef\currentsimplelistalternative{\simplelistparameter\c!alternative}%
+ \fastsetup{\??simplelistrenderings:\v!synonym:\currentsimplelistalternative}%
+ \normalexpanded{\endgroup\simplelistparameter\c!next}}
+
+% \setupsimplelistalternative
+% [\c!command=\strictsimplelistparameter\c!command]
+
+\definesimplelistalternative
+ [\v!normal]
+ [\c!inbetween=\space,
+ \c!left=(,
+ \c!right=)]
+
+\definesimplelistalternative
+ [\v!first]
+ [\v!normal]
+
+\definesimplelistalternative
+ [\v!last]
+ [\v!normal]
+
+\startsetups[\??simplelistrenderings::\v!synonym]
+ \begingroup
+ \dostarttaggedchained\t!synonym\currentsynonym\??simplelist
+ \dotagsynonym
+ \usesimpleliststyleandcolor\c!synonymstyle\c!synonymcolor
+ \simplelistparameter\c!synonymcommand{\currentsynonymname}%
+ \dostoptagged
+ \endgroup
+\stopsetups
+
+\startsetups[\??simplelistrenderings::\v!text]
+ \begingroup
+ \usesimpleliststyleandcolor\c!textstyle\c!textcolor
+ \simplelistparameter\c!textcommand{\currentsynonymmeaning}%
+ \endgroup
+\stopsetups
+
+\startsetups[\??simplelistrenderings:\v!synonym:\v!normal]
+ \fastsetup{\??simplelistrenderings::\v!synonym}
+\stopsetups
+
+\startsetups[\??simplelistrenderings:\v!synonym:\v!first]
+ \fastsetup{\??simplelistrenderings::\v!synonym}
+ \doifelsecurrentsynonymshown \donothing {
+ \simplelistalternativeparameter\c!inbetween
+ \simplelistalternativeparameter\c!left
+ \fastsetup{\??simplelistrenderings::\v!text}
+ \simplelistalternativeparameter\c!right
+ }
+\stopsetups
+
+\startsetups[\??simplelistrenderings:\v!synonym:\v!last]
+ \doifelsecurrentsynonymshown {
+ \fastsetup{\??simplelistrenderings::\v!synonym}
+ } {
+ \fastsetup{\??simplelistrenderings::\v!text}
+ \simplelistalternativeparameter\c!inbetween
+ \simplelistalternativeparameter\c!left
+ \fastsetup{\??simplelistrenderings::\v!synonym}
+ \simplelistalternativeparameter\c!right
+ }
+\stopsetups
\unexpanded\def\placelistofsynonyms
- {\dodoubleempty\doplacelistofsynonyms}
+ {\dodoubleempty\strc_synonyms_place_list}
-\def\doplacelistofsynonyms[#1][#2]%
+\def\strc_synonyms_place_list[#1][#2]%
{\begingroup
- \def\currentsynonym{#1}%
- \definedescription % todo, per class
- [syndef]
- [\c!location=\synonymparameter\c!location,
- \c!width=\synonymparameter\c!width,
- \c!distance=\synonymparameter\c!distance,
- \c!sample=\synonymparameter\c!sample,
- \c!hang=\synonymparameter\c!hang,
- \c!align=\synonymparameter\c!align,
- \c!before=\synonymparameter\c!before,
- \c!inbetween=\synonymparameter\c!inbetween,
- \c!after=\synonymparameter\c!after,
- \c!indentnext=\synonymparameter\c!indentnext,
- \c!headstyle=\synonymparameter\c!textstyle,
- \c!headcolor=\synonymparameter\c!textcolor,
- \c!style=,
- \c!color=.
- #2]%
- \startpacked
- \ctxlua{structures.synonyms.process('#1',{
- criterium = "\synonymparameter\c!criterium",
- language = "\synonymparameter\s!language",
- method = "\synonymparameter\c!method",
- })}%
- \stoppacked
+ \edef\currentsimplelist{#1}%
+ \doifelsecommandhandler\??simplelist\currentsimplelist
+ {\strc_constructions_initialize{#1}%
+ \setupcurrentsimplelist[#2]%
+ \let\synonymentry\strc_synonym_normal
+ \startpacked
+ \clf_processsynonyms
+ {#1}%
+ {%
+ criterium {\simplelistparameter\c!criterium}%
+ language {\simplelistparameter\s!language}%
+ method {\simplelistparameter\c!method}%
+ }%
+ \relax
+ \stoppacked}%
+ {}% todo: message that invalid
\endgroup}
\def\completelistofsynonyms
- {\dodoubleempty\docompletelistofsynonyms}
+ {\dodoubleempty\strc_synonyms_complete_list}
-\def\docompletelistofsynonyms[#1][#2]%
- {\edef\currentsynonym{#1}%
- \normalexpanded{\startnamedsection[\v!chapter][\c!title={\headtext{\synonymparameter\s!multi}},\c!reference=#1]}%
- \doplacelistofsynonyms[#1][#2]%
- \page
- \stopnamedsection}
-
-\let\startsynonymoutput \relax
-\let\stopsynonymoutput \relax
-\let\startsynonymsection\gobbleoneargument
-\let\stopsynonymsection \relax
+\def\strc_synonyms_complete_list[#1][#2]%
+ {\begingroup
+ \edef\currentsimplelist{#1}%
+ \doifelsecommandhandler\??simplelist\currentsimplelist
+ {\normalexpanded{\startnamedsection[\v!chapter][\c!title={\headtext{\simplelistparameter\s!multi}},\c!reference=#1]}%
+ \strc_synonyms_place_list[#1][#2]%
+ \page
+ \stopnamedsection}%
+ {}% todo: message that invalid
+ \endgroup}
-\unexpanded\def\synonymentry#1#2#3#4%
- {\syndef{#3}#4\par}
+\unexpanded\def\strc_synonym_normal#1#2#3#4%
+ {\begingroup
+ \def\currentsimplelistentry{#3}%
+ \csname\??constructionstarthandler\v!construction\endcsname
+ #4%
+ \csname\??constructionstophandler\v!construction\endcsname
+ \endgroup}
%D Sorting (a simplified version of synonym).
-\installcorenamespace{sorting}
-
-\installsimplecommandhandler \??sorting {sorting} \??sorting
-
-\setupsorting
+\definesimplelist
+ [\v!sorting]
[\c!state=\v!start,
%\c!command=, % we test for defined !
%\c!criterium=,
@@ -223,123 +461,179 @@
%\c!before=,
\c!after=\endgraf,
%\c!expansion=,
- \c!method=,
- \s!language=\currentmainlanguage]
+ \c!method=]
+
+\let\setupsorting\setupsimplelist
\unexpanded\def\definesorting
- {\dotripleempty\dodefinesorting}
+ {\dotripleempty\strc_sorting_define}
% if #3=\relax or \v!none, then no command but still protected
-\def\dodefinesorting[#1][#2][#3]%
+\def\strc_sorting_define[#1][#2][#3]%
{\edef\currentsorting{#1}%
\ifthirdargument
\doifnot{#3}\v!none
{\ifx#3\relax \else
- \unexpanded\def#3##1{\doinsertsort{#1}{##1}}%
+ \unexpanded\def#3##1{\strc_sorting_insert{#1}{##1}}%
\fi}%
\setuvalue{#1}{\definesort[\v!no][#1]}%
\else
\setuvalue{#1}{\definesort[\v!yes][#1]}%
\fi
- \checksortingparent
- \setupcurrentsorting[\s!multi={#2}]%
+ \setfalse\c_strc_constructions_define_commands
+ \definesimplelist
+ [\currentsorting]%
+ [\v!sorting]
+ [\s!single={#1},%
+ \s!multi={#2}]%
+ \settrue\c_strc_constructions_define_commands
+ %
\presetheadtext[#2=\Word{#2}]% after \ifthirdargument -)
+ %
\setvalue{\e!setup #2\e!endsetup}{\setupsorting[#1]}% obsolete definition
\setvalue{\e!place \e!listof#2}{\placelistofsorts[#1]}%
\setvalue{\e!complete\e!listof#2}{\completelistofsorts[#1]}}
\unexpanded\def\definesort
- {\dotripleempty\dodefinesort}
+ {\dotripleempty\strc_sorting_define_entry}
-\def\dodefinesort[#1][#2][#3]#4%
+\def\strc_sorting_define_entry[#1][#2][#3]#4%
{\begingroup
\edef\currentsorting{#2}%
\edef\currentsortingtag{#3}%
+ \let\currentsimplelist\currentsimplelist
\ifx\currentsortingtag\empty
\edef\currentsortingtag{#4}%
\fi
\ifx\currentsortingtag\empty
% todo: error message
\else
- \edef\currentsortingexpansion{\sortingparameter\c!expansion}%
+ \edef\currentsortingexpansion{\simplelistparameter\c!expansion}%
\preprocessexpansion\currentsortingexpansion\currentsortingtext\currentsortingcoding{#4}%
- \ctxlua{structures.synonyms.register("\currentsorting", "sorting", {
- metadata = {
- catcodes = \the\catcodetable,
- coding = "\currentsortingcoding",
- xmlroot = \ifx\currentsortingcoding\s!xml "\xmldocument" \else nil \fi,
- },
- definition = {
- tag = "\currentsortingtag",
- synonym = \!!bs\currentsortingtext\!!es,
- % used = false,
- }
- })}%
- \doif{#1}\v!yes{\setuxvalue\currentsortingtag{\noexpand\doinsertsort{\currentsorting}{\currentsortingtag}}}%
+ \clf_registersynonym
+ {\currentsorting}%
+ {sorting}%
+ {%
+ metadata {%
+ catcodes \catcodetable
+ coding {\currentsortingcoding}%
+ \ifx\currentsortingcoding\s!xml
+ xmlroot {\xmldocument}%
+ \fi
+ }%
+ definition {%
+ tag {\currentsortingtag}%
+ synonym {\currentsortingtext}%
+ % used false
+ }%
+ }%
+ \relax
+ \doif{#1}\v!yes{\setuxvalue\currentsortingtag{\strc_sorting_insert{\currentsorting}{\currentsortingtag}}}%
\fi
\endgroup}
-\unexpanded\def\doinsertsort#1#2% name tag
+\unexpanded\def\currentsortingname {\clf_synonymname {\currentsimplelist}{\currentsortingtag}}
+\unexpanded\def\doifelsecurrentsortingused {\clf_doifelsesynonymused {\currentsimplelist}{\currentsortingtag}}
+\unexpanded\def\resetusedsortings [#1]{\clf_resetusedsynonyms {#1}}
+
+\setupsimplelist
+ [\v!sorting]
+ [\c!alternative=\v!normal]
+
+\unexpanded\def\strc_sorting_insert#1#2% name tag
{\begingroup
% no kap currently, of .. we need to map cap onto WORD
\edef\currentsorting{#1}%
- \def\currentsortingtag{#2}%
- \dostarttagged\t!sorting\currentsorting
- \dotagsorting
- \usesortingstyleandcolor\c!style\c!color
- \ctxlua{structures.synonyms.synonym("#1","#2")}%
- \dostoptagged
- \normalexpanded{\endgroup\sortingparameter\c!next}}
+ \def \currentsortingtag{#2}%
+ \let \currentsimplelist\currentsorting
+ \edef\currentsimplelistalternative{\simplelistparameter\c!alternative}%
+ \fastsetup{\??simplelistrenderings:\v!sorting:\currentsimplelistalternative}%
+ \normalexpanded{\endgroup\simplelistparameter\c!next}}
+
+\startsetups [\??simplelistrenderings:\v!sorting:\v!normal]
+ \fastsetup{\??simplelistrenderings::\v!sorting}%
+\stopsetups
+
+\startsetups [\??simplelistrenderings::\v!sorting]
+ \begingroup
+ \dostarttaggedchained\t!sorting\currentsorting\??simplelist
+ \dotagsorting
+ \usesimpleliststyleandcolor\c!style\c!color
+ \currentsortingname
+ \dostoptagged
+ \endgroup
+\stopsetups
\unexpanded\def\registersort
- {\dodoubleargument\doregistersort}
+ {\dodoubleargument\strc_sorting_register}
-\def\doregistersort[#1][#2]%
- {\ctxlua{structures.synonyms.registerused("#1","#2")}}
+\def\strc_sorting_register[#1][#2]%
+ {\clf_registerusedsynonym{#1}{#2}}
% before after
%
% maybe just 'commandset' and then combine
\unexpanded\def\placelistofsorts
- {\dodoubleempty\doplacelistofsorts}
+ {\dodoubleempty\strc_sorting_place_list}
-\def\doplacelistofsorts[#1][#2]% NOG EEN RUWE VERSIE MAKEN ZONDER WITRUIMTE ETC ETC
+\def\strc_sorting_place_list[#1][#2]%
{\begingroup
- \def\currentsorting{#1}%
- \setupcurrentsorting[#2]%
+ \edef\currentsimplelist{#1}%
+ \strc_constructions_initialize{#1}%
+ \setupcurrentsimplelist[#2]%
+ \edef\p_simplelist_command{\simplelistparameter\c!command}%
+ \ifx\p_simplelist_command\empty
+ \let\synonymentry\strc_sorting_normal
+ \else
+ \let\synonymentry\strc_sorting_command
+ \fi
\startpacked
- \ctxlua{structures.synonyms.process('#1',{
- criterium = "\sortingparameter\c!criterium",
- language = "\sortingparameter\s!language",
- method = "\sortingparameter\c!method",
- })}%
+ \clf_processsynonyms
+ {#1}%
+ {%
+ criterium {\simplelistparameter\c!criterium}%
+ language {\simplelistparameter\s!language}%
+ method {\simplelistparameter\c!method}%
+ }%
+ \relax
\stoppacked
\endgroup}
\unexpanded\def\completelistofsorts
- {\dodoubleempty\docompletelistofsorts}
+ {\dodoubleempty\strc_sorting_complete_list}
-\def\docompletelistofsorts[#1][#2]%
- {\edef\currentsorting{#1}%
- \normalexpanded{\startnamedsection[\v!chapter][\c!title={\headtext{\sortingparameter\s!multi}},\c!reference=#1]}%
- \doplacelistofsorts[#1][#2]%
+\def\strc_sorting_complete_list[#1][#2]%
+ {\begingroup
+ \edef\currentsimplelist{#1}%
+ \normalexpanded{\startnamedsection[\v!chapter][\c!title={\headtext{\simplelistparameter\s!multi}},\c!reference=#1]}%
+ \strc_sorting_place_list[#1][#2]%
\page
- \stopnamedsection}
+ \stopnamedsection
+ \endgroup}
-\let\startsortingoutput \relax
-\let\stopsortingoutput \relax
-\let\startsortingsection\gobbleoneargument
-\let\stopsortingsection \relax
+\def\strc_sorting_command#1#2#3#4% #4 is meaning but empty here
+ {\p_simplelist_command{#1}{#2}{#3}}
-\def\sortingentry#1#2#3#4% #4 is meaning but empty here
- {\doifelsenothing{\sortingparameter\c!command}
- {\begingroup\usesortingstyleandcolor\c!style\c!color#3\endgroup\par} % todo
- {\sortingparameter\c!command{#1}{#2}{#3}}}
+\def\strc_sorting_normal#1#2#3#4% #4 is meaning but empty here
+ {\begingroup
+ \usesimpleliststyleandcolor\c!style\c!color
+ #3%
+ \endgroup
+ \par}
%D Presets.
+% To be considered:
+%
+% \setupsimplelist
+% [\v!sorting]
+% [\c!headstyle=\simplelistparameter\c!synonymstyle,
+% \c!headcolor=\simplelistparameter\c!synonymcolor,
+% \c!style=\simplelistparameter\c!textstyle,
+% \c!color=\simplelistparameter\c!textcolor]
+
\definesynonyms
[\v!abbreviation]
[\v!abbreviations]
diff --git a/tex/context/base/strc-tag.lua b/tex/context/base/strc-tag.lua
index 7e5c6f993..637d74e8c 100644
--- a/tex/context/base/strc-tag.lua
+++ b/tex/context/base/strc-tag.lua
@@ -6,189 +6,219 @@ if not modules then modules = { } end modules ['strc-tag'] = {
license = "see context related readme files"
}
--- This is rather experimental code.
+-- This is rather experimental code. Tagging happens on the fly and there are two analysers
+-- involved: the pdf backend tagger and the exporter. They share data but there are subtle
+-- differences. Each tag carries a specification and these can be accessed by attribute (the
+-- end of the chain tag) or by so called fullname which is a tagname combined with a number.
+local type, next = type, next
local insert, remove, unpack, concat = table.insert, table.remove, table.unpack, table.concat
-local gsub, find, topattern, format = string.gsub, string.find, string.topattern, string.format
-local lpegmatch = lpeg.match
+local find, topattern, format = string.find, string.topattern, string.format
+local lpegmatch, P, S, C, Cc = lpeg.match, lpeg.P, lpeg.S, lpeg.C, lpeg.Cc
local texattribute = tex.attribute
local allocate = utilities.storage.allocate
local settings_to_hash = utilities.parsers.settings_to_hash
+local setmetatableindex = table.setmetatableindex
local trace_tags = false trackers.register("structures.tags", function(v) trace_tags = v end)
local report_tags = logs.reporter("structure","tags")
-local attributes, structures = attributes, structures
+local attributes = attributes
+local structures = structures
+local implement = interfaces.implement
local a_tagged = attributes.private('tagged')
local unsetvalue = attributes.unsetvalue
local codeinjections = backends.codeinjections
-local taglist = allocate()
-local properties = allocate()
-local labels = allocate()
-local stack = { }
-local chain = { }
-local ids = { }
-local enabled = false
-local tagdata = { } -- used in export
-local tagmetadata = { } -- used in export
-
-local tags = structures.tags
-tags.taglist = taglist -- can best be hidden
-tags.labels = labels
-tags.data = tagdata
-tags.metadata = tagmetadata
-
-local properties = allocate {
-
- document = { pdf = "Div", nature = "display" },
-
- division = { pdf = "Div", nature = "display" },
- paragraph = { pdf = "P", nature = "mixed" },
- p = { pdf = "P", nature = "mixed" },
- construct = { pdf = "Span", nature = "inline" },
- highlight = { pdf = "Span", nature = "inline" },
-
- section = { pdf = "Sect", nature = "display" },
- sectiontitle = { pdf = "H", nature = "mixed" },
- sectionnumber = { pdf = "H", nature = "mixed" },
- sectioncontent = { pdf = "Div", nature = "display" },
-
- itemgroup = { pdf = "L", nature = "display" },
- item = { pdf = "Li", nature = "display" },
- itemtag = { pdf = "Lbl", nature = "mixed" },
- itemcontent = { pdf = "LBody", nature = "mixed" },
-
- description = { pdf = "Div", nature = "display" },
- descriptiontag = { pdf = "Div", nature = "mixed" },
- descriptioncontent = { pdf = "Div", nature = "mixed" },
- descriptionsymbol = { pdf = "Span", nature = "inline" }, -- note reference
-
- verbatimblock = { pdf = "Code", nature = "display" },
- verbatimlines = { pdf = "Code", nature = "display" },
- verbatimline = { pdf = "Code", nature = "mixed" },
- verbatim = { pdf = "Code", nature = "inline" },
-
- lines = { pdf = "Code", nature = "display" },
- line = { pdf = "Code", nature = "mixed" },
-
- synonym = { pdf = "Span", nature = "inline" },
- sorting = { pdf = "Span", nature = "inline" },
-
- register = { pdf = "Div", nature = "display" },
- registersection = { pdf = "Div", nature = "display" },
- registertag = { pdf = "Span", nature = "mixed" },
- registerentries = { pdf = "Div", nature = "display" },
- registerentry = { pdf = "Span", nature = "mixed" },
- registersee = { pdf = "Span", nature = "mixed" },
- registerpages = { pdf = "Span", nature = "mixed" },
- registerpage = { pdf = "Span", nature = "inline" },
- registerpagerange = { pdf = "Span", nature = "mixed" },
-
- table = { pdf = "Table", nature = "display" },
- tablerow = { pdf = "TR", nature = "display" },
- tablecell = { pdf = "TD", nature = "mixed" },
-
- tabulate = { pdf = "Table", nature = "display" },
- tabulaterow = { pdf = "TR", nature = "display" },
- tabulatecell = { pdf = "TD", nature = "mixed" },
-
- list = { pdf = "TOC", nature = "display" },
- listitem = { pdf = "TOCI", nature = "display" },
- listtag = { pdf = "Lbl", nature = "mixed" },
- listcontent = { pdf = "P", nature = "mixed" },
- listdata = { pdf = "P", nature = "mixed" },
- listpage = { pdf = "Reference", nature = "mixed" },
-
- delimitedblock = { pdf = "BlockQuote", nature = "display" },
- delimited = { pdf = "Quote", nature = "inline" },
- subsentence = { pdf = "Span", nature = "inline" },
-
- label = { pdf = "Span", nature = "mixed" },
- number = { pdf = "Span", nature = "mixed" },
-
- float = { pdf = "Div", nature = "display" }, -- Figure
- floatcaption = { pdf = "Caption", nature = "mixed" },
- floatlabel = { pdf = "Span", nature = "inline" },
- floatnumber = { pdf = "Span", nature = "inline" },
- floattext = { pdf = "Span", nature = "mixed" },
- floatcontent = { pdf = "P", nature = "mixed" },
-
- image = { pdf = "P", nature = "mixed" },
- mpgraphic = { pdf = "P", nature = "mixed" },
-
- formulaset = { pdf = "Div", nature = "display" },
- formula = { pdf = "Div", nature = "display" }, -- Formula
- formulacaption = { pdf = "Span", nature = "mixed" },
- formulalabel = { pdf = "Span", nature = "mixed" },
- formulanumber = { pdf = "Span", nature = "mixed" },
- formulacontent = { pdf = "P", nature = "display" },
- subformula = { pdf = "Div", nature = "display" },
-
- link = { pdf = "Link", nature = "inline" },
-
- margintextblock = { pdf = "Span", nature = "inline" },
- margintext = { pdf = "Span", nature = "inline" },
-
- math = { pdf = "Div", nature = "inline" }, -- no display
- mn = { pdf = "Span", nature = "mixed" },
- mi = { pdf = "Span", nature = "mixed" },
- mo = { pdf = "Span", nature = "mixed" },
- ms = { pdf = "Span", nature = "mixed" },
- mrow = { pdf = "Span", nature = "display" },
- msubsup = { pdf = "Span", nature = "display" },
- msub = { pdf = "Span", nature = "display" },
- msup = { pdf = "Span", nature = "display" },
- merror = { pdf = "Span", nature = "mixed" },
- munderover = { pdf = "Span", nature = "display" },
- munder = { pdf = "Span", nature = "display" },
- mover = { pdf = "Span", nature = "display" },
- mtext = { pdf = "Span", nature = "mixed" },
- mfrac = { pdf = "Span", nature = "display" },
- mroot = { pdf = "Span", nature = "display" },
- msqrt = { pdf = "Span", nature = "display" },
- mfenced = { pdf = "Span", nature = "display" },
- maction = { pdf = "Span", nature = "display" },
-
- mtable = { pdf = "Table", nature = "display" }, -- might change
- mtr = { pdf = "TR", nature = "display" }, -- might change
- mtd = { pdf = "TD", nature = "display" }, -- might change
-
- ignore = { pdf = "Span", nature = "mixed" },
- metadata = { pdf = "Div", nature = "display" },
- metavariable = { pdf = "Span", nature = "mixed" },
-
- mid = { pdf = "Span", nature = "inline" },
- sub = { pdf = "Span", nature = "inline" },
- sup = { pdf = "Span", nature = "inline" },
- subsup = { pdf = "Span", nature = "inline" },
-
- combination = { pdf = "Span", nature = "display" },
- combinationpair = { pdf = "Span", nature = "display" },
- combinationcontent = { pdf = "Span", nature = "mixed" },
- combinationcaption = { pdf = "Span", nature = "mixed" },
+local taglist = allocate() -- access by attribute
+local specifications = allocate() -- access by fulltag
+local labels = allocate()
+local stack = { }
+local chain = { }
+local ids = { }
+local enabled = false
+local tagcontext = { }
+local tagpatterns = { }
+local lasttags = { }
+local stacksize = 0
+local metadata = nil -- applied to the next element
+
+local tags = structures.tags
+tags.taglist = taglist -- can best be hidden
+tags.labels = labels
+tags.patterns = tagpatterns
+tags.specifications = specifications
+
+-- Tags are internally stored as:
+--
+-- tag>number tag>number tag>number
+
+local p_splitter = C((1-S(">"))^1) * P(">") * C(P(1)^1)
+tagpatterns.splitter = p_splitter
+
+local properties = allocate {
+
+ document = { pdf = "Div", nature = "display" },
+
+ division = { pdf = "Div", nature = "display" },
+ paragraph = { pdf = "P", nature = "mixed" },
+ p = { pdf = "P", nature = "mixed" },
+ construct = { pdf = "Span", nature = "inline" },
+ highlight = { pdf = "Span", nature = "inline" },
+
+ section = { pdf = "Sect", nature = "display" },
+ sectiontitle = { pdf = "H", nature = "mixed" },
+ sectionnumber = { pdf = "H", nature = "mixed" },
+ sectioncontent = { pdf = "Div", nature = "display" },
+
+ itemgroup = { pdf = "L", nature = "display" },
+ item = { pdf = "LI", nature = "display" },
+ itemtag = { pdf = "Lbl", nature = "mixed" },
+ itemcontent = { pdf = "LBody", nature = "mixed" },
+ itemhead = { pdf = "Div", nature = "display" },
+ itembody = { pdf = "Div", nature = "display" },
+
+ description = { pdf = "Div", nature = "display" },
+ descriptiontag = { pdf = "Div", nature = "mixed" },
+ descriptioncontent = { pdf = "Div", nature = "mixed" },
+ descriptionsymbol = { pdf = "Span", nature = "inline" }, -- note reference
+
+ verbatimblock = { pdf = "Code", nature = "display" },
+ verbatimlines = { pdf = "Code", nature = "display" },
+ verbatimline = { pdf = "Code", nature = "mixed" },
+ verbatim = { pdf = "Code", nature = "inline" },
+
+ lines = { pdf = "Code", nature = "display" },
+ line = { pdf = "Code", nature = "mixed" },
+
+ synonym = { pdf = "Span", nature = "inline" },
+ sorting = { pdf = "Span", nature = "inline" },
+
+ register = { pdf = "Div", nature = "display" },
+ registerlocation = { pdf = "Span", nature = "inline" },
+ registersection = { pdf = "Div", nature = "display" },
+ registertag = { pdf = "Span", nature = "mixed" },
+ registerentries = { pdf = "Div", nature = "display" },
+ registerentry = { pdf = "Div", nature = "display" },
+ registercontent = { pdf = "Span", nature = "mixed" },
+ registersee = { pdf = "Span", nature = "mixed" },
+ registerpages = { pdf = "Span", nature = "mixed" },
+ registerpage = { pdf = "Span", nature = "mixed" },
+ registerseparator = { pdf = "Span", nature = "inline" },
+ registerpagerange = { pdf = "Span", nature = "mixed" },
+
+ table = { pdf = "Table", nature = "display" },
+ tablerow = { pdf = "TR", nature = "display" },
+ tablecell = { pdf = "TD", nature = "mixed" },
+
+ tabulate = { pdf = "Table", nature = "display" },
+ tabulaterow = { pdf = "TR", nature = "display" },
+ tabulatecell = { pdf = "TD", nature = "mixed" },
+
+ list = { pdf = "TOC", nature = "display" },
+ listitem = { pdf = "TOCI", nature = "display" },
+ listtag = { pdf = "Lbl", nature = "mixed" },
+ listcontent = { pdf = "P", nature = "mixed" },
+ listdata = { pdf = "P", nature = "mixed" },
+ listpage = { pdf = "Reference", nature = "mixed" },
+
+ delimitedblock = { pdf = "BlockQuote", nature = "display" },
+ delimited = { pdf = "Quote", nature = "inline" },
+ subsentence = { pdf = "Span", nature = "inline" },
+
+ label = { pdf = "Span", nature = "mixed" },
+ number = { pdf = "Span", nature = "mixed" },
+
+ float = { pdf = "Div", nature = "display" }, -- Figure
+ floatcaption = { pdf = "Caption", nature = "mixed" },
+ floatlabel = { pdf = "Span", nature = "inline" },
+ floatnumber = { pdf = "Span", nature = "inline" },
+ floattext = { pdf = "Span", nature = "mixed" },
+ floatcontent = { pdf = "P", nature = "mixed" },
+
+ image = { pdf = "P", nature = "mixed" },
+ mpgraphic = { pdf = "P", nature = "mixed" },
+
+ formulaset = { pdf = "Div", nature = "display" },
+ formula = { pdf = "Div", nature = "display" }, -- Formula
+ formulacaption = { pdf = "Span", nature = "mixed" },
+ formulalabel = { pdf = "Span", nature = "mixed" },
+ formulanumber = { pdf = "Span", nature = "mixed" },
+ formulacontent = { pdf = "P", nature = "display" },
+ subformula = { pdf = "Div", nature = "display" },
+
+ link = { pdf = "Link", nature = "inline" },
+
+ margintextblock = { pdf = "Span", nature = "inline" },
+ margintext = { pdf = "Span", nature = "inline" },
+
+ math = { pdf = "Div", nature = "inline" }, -- no display
+ mn = { pdf = "Span", nature = "mixed" },
+ mi = { pdf = "Span", nature = "mixed" },
+ mo = { pdf = "Span", nature = "mixed" },
+ ms = { pdf = "Span", nature = "mixed" },
+ mrow = { pdf = "Span", nature = "display" },
+ msubsup = { pdf = "Span", nature = "display" },
+ msub = { pdf = "Span", nature = "display" },
+ msup = { pdf = "Span", nature = "display" },
+ merror = { pdf = "Span", nature = "mixed" },
+ munderover = { pdf = "Span", nature = "display" },
+ munder = { pdf = "Span", nature = "display" },
+ mover = { pdf = "Span", nature = "display" },
+ mtext = { pdf = "Span", nature = "mixed" },
+ mfrac = { pdf = "Span", nature = "display" },
+ mroot = { pdf = "Span", nature = "display" },
+ msqrt = { pdf = "Span", nature = "display" },
+ mfenced = { pdf = "Span", nature = "display" },
+ maction = { pdf = "Span", nature = "display" },
+
+ mstacker = { pdf = "Span", nature = "display" }, -- these are only internally used
+ mstackertop = { pdf = "Span", nature = "display" }, -- these are only internally used
+ mstackerbot = { pdf = "Span", nature = "display" }, -- these are only internally used
+ mstackermid = { pdf = "Span", nature = "display" }, -- these are only internally used
+
+ mtable = { pdf = "Table", nature = "display" }, -- might change
+ mtr = { pdf = "TR", nature = "display" }, -- might change
+ mtd = { pdf = "TD", nature = "display" }, -- might change
+
+ ignore = { pdf = "Span", nature = "mixed" }, -- used internally
+ private = { pdf = "Span", nature = "mixed" }, -- for users (like LS) when they need it
+ metadata = { pdf = "Div", nature = "display" },
+ metavariable = { pdf = "Span", nature = "mixed" },
+
+ mid = { pdf = "Span", nature = "inline" },
+ sub = { pdf = "Span", nature = "inline" },
+ sup = { pdf = "Span", nature = "inline" },
+ subsup = { pdf = "Span", nature = "inline" },
+
+ combination = { pdf = "Span", nature = "display" },
+ combinationpair = { pdf = "Span", nature = "display" },
+ combinationcontent = { pdf = "Span", nature = "mixed" },
+ combinationcaption = { pdf = "Span", nature = "mixed" },
}
-function tags.detailedtag(tag,detail,attribute)
- if not attribute then
- attribute = texattribute[a_tagged]
- end
+tags.properties = properties
+
+local patterns = setmetatableindex(function(t,tag)
+ local v = topattern("^" .. tag .. ">")
+ t[tag] = v
+ return v
+end)
+
+function tags.locatedtag(tag)
+ local attribute = texattribute[a_tagged]
if attribute >= 0 then
- local tl = taglist[attribute]
- if tl then
- local pattern
- if detail and detail ~= "" then
- pattern = "^" .. tag .. ":".. detail .. "%-"
- else
- pattern = "^" .. tag .. "%-"
- end
- for i=#tl,1,-1 do
- local tli = tl[i]
- if find(tli,pattern) then
- return tli
+ local specification = taglist[attribute]
+ if specification then
+ local taglist = specification.taglist
+ local pattern = patterns[tag]
+ for i=#taglist,1,-1 do
+ local t = taglist[i]
+ if find(t,pattern) then
+ return t
end
end
end
@@ -198,12 +228,20 @@ function tags.detailedtag(tag,detail,attribute)
return false -- handy as bogus index
end
-tags.properties = properties
-
-local lasttags = { }
-local userdata = { }
-
-tags.userdata = userdata
+function structures.atlocation(str)
+ local specification = taglist[texattribute[a_tagged]]
+ if specification then
+ if list then
+ local taglist = specification.taglist
+ local pattern = patterns[str]
+ for i=#list,1,-1 do
+ if find(list[i],pattern) then
+ return true
+ end
+ end
+ end
+ end
+end
function tags.setproperty(tag,key,value)
local p = properties[tag]
@@ -214,15 +252,18 @@ function tags.setproperty(tag,key,value)
end
end
-function tags.registerdata(data)
- local fulltag = chain[nstack]
- if fulltag then
- tagdata[fulltag] = data
+function tags.setaspect(key,value)
+ local tag = chain[stacksize]
+ if tag then
+ local p = properties[tag]
+ if p then
+ p[key] = value
+ else
+ properties[tag] = { [key] = value }
+ end
end
end
-local metadata
-
function tags.registermetadata(data)
local d = settings_to_hash(data)
if metadata then
@@ -232,75 +273,92 @@ function tags.registermetadata(data)
end
end
-local nstack = 0
-
function tags.start(tag,specification)
- local label, detail, user
- if specification then
- label, detail, user = specification.label, specification.detail, specification.userdata
- end
if not enabled then
codeinjections.enabletags()
enabled = true
end
--
---~ labels[tag] = label ~= "" and label or tag
---~ local fulltag
---~ if detail and detail ~= "" then
---~ fulltag = tag .. ":" .. detail
---~ else
---~ fulltag = tag
---~ end
+ labels[tag] = tag -- can go away
--
- local fulltag = label ~= "" and label or tag
- labels[tag] = fulltag
- if detail and detail ~= "" then
- fulltag = fulltag .. ":" .. detail
- end
+ local attribute = #taglist + 1
+ local tagindex = (ids[tag] or 0) + 1
--
- local t = #taglist + 1
- local n = (ids[fulltag] or 0) + 1
- ids[fulltag] = n
- lasttags[tag] = n
- local completetag = fulltag .. "-" .. n
- nstack = nstack + 1
- chain[nstack] = completetag
- stack[nstack] = t
- -- a copy as we can add key values for alt and actualtext if needed:
- taglist[t] = { unpack(chain,1,nstack) }
+ local completetag = tag .. ">" .. tagindex
--
- if user and user ~= "" then
- -- maybe we should merge this into taglist or whatever ... anyway there is room to optimize
- -- taglist.userdata = settings_to_hash(user)
- userdata[completetag] = settings_to_hash(user)
- end
- if metadata then
- tagmetadata[completetag] = metadata
+ ids[tag] = tagindex
+ lasttags[tag] = tagindex
+ stacksize = stacksize + 1
+ --
+ chain[stacksize] = completetag
+ stack[stacksize] = attribute
+ tagcontext[tag] = completetag
+ --
+ local tagnesting = { unpack(chain,1,stacksize) } -- a copy so we can add actualtext
+ --
+ if specification then
+ specification.attribute = attribute
+ specification.tagindex = tagindex
+ specification.taglist = tagnesting
+ specification.tagname = tag
+ if metadata then
+ specification.metadata = metadata
+ metadata = nil
+ end
+ local userdata = specification.userdata
+ if user ~= "" and type(userdata) == "string" then
+ specification.userdata = settings_to_hash(userdata)
+ end
+ local detail = specification.detail
+ if detail == "" then
+ specification.detail = nil
+ end
+ local parents = specification.parents
+ if parents == "" then
+ specification.parents = nil
+ end
+ else
+ specification = {
+ attribute = attribute,
+ tagindex = tagindex,
+ taglist = tagnesting,
+ tagname = tag,
+ metadata = metadata,
+ }
metadata = nil
end
- texattribute[a_tagged] = t
- return t
+ --
+ taglist[attribute] = specification
+ specifications[completetag] = specification
+ --
+ texattribute[a_tagged] = attribute
+ return attribute
end
-function tags.restart(completetag)
- local t = #taglist + 1
- nstack = nstack + 1
- chain[nstack] = completetag
- stack[nstack] = t
- taglist[t] = { unpack(chain,1,nstack) }
- texattribute[a_tagged] = t
- return t
+function tags.restart(attribute)
+ stacksize = stacksize + 1
+ if type(attribute) == "number" then
+ local taglist = taglist[attribute].taglist
+ chain[stacksize] = taglist[#taglist]
+ else
+ chain[stacksize] = attribute -- a string
+ attribute = #taglist + 1
+ taglist[attribute] = { taglist = { unpack(chain,1,stacksize) } }
+ end
+ stack[stacksize] = attribute
+ texattribute[a_tagged] = attribute
+ return attribute
end
function tags.stop()
- if nstack > 0 then
- nstack = nstack -1
+ if stacksize > 0 then
+ stacksize = stacksize - 1
end
- local t = stack[nstack]
+ local t = stack[stacksize]
if not t then
- if trace_tags then
- report_tags("ignoring end tag, previous chain: %s",nstack > 0 and concat(chain[nstack],"",1,nstack) or "none")
- end
+ -- if trace_tags then
+ report_tags("ignoring end tag, previous chain: %s",stacksize > 0 and concat(chain," ",1,stacksize) or "none")
+ -- end
t = unsetvalue
end
texattribute[a_tagged] = t
@@ -308,24 +366,56 @@ function tags.stop()
end
function tags.getid(tag,detail)
- if detail and detail ~= "" then
- return ids[tag .. ":" .. detail] or "?"
- else
- return ids[tag] or "?"
- end
+ return ids[tag] or "?"
end
function tags.last(tag)
return lasttags[tag] -- or false
end
-function tags.lastinchain()
- return chain[nstack]
+function tags.lastinchain(tag)
+ if tag and tag ~= "" then
+ return tagcontext[tag]
+ else
+ return chain[stacksize]
+ end
end
-function structures.atlocation(str)
- local location = gsub(concat(taglist[texattribute[a_tagged]],"-"),"%-%d+","")
- return find(location,topattern(str)) ~= nil
+local strip = C((1-S(">"))^1)
+
+function tags.elementtag()
+ local fulltag = chain[stacksize]
+ if fulltag then
+ return lpegmatch(strip,fulltag)
+ end
+end
+
+function tags.strip(fulltag)
+ return lpegmatch(strip,fulltag)
+end
+
+function tags.setuserproperties(tag,list)
+ if not list or list == "" then
+ tag, list = chain[stacksize], tag
+ else
+ tag = tagcontext[tag]
+ end
+ if tag then -- an attribute now
+ local l = settings_to_hash(list)
+ local s = specifications[tag]
+ if s then
+ local u = s.userdata
+ if u then
+ for k, v in next, l do
+ u[k] = v
+ end
+ else
+ s.userdata = l
+ end
+ else
+ -- error
+ end
+ end
end
function tags.handler(head) -- we need a dummy
@@ -334,8 +424,8 @@ end
statistics.register("structure elements", function()
if enabled then
- if nstack > 0 then
- return format("%s element chains identified, open chain: %s ",#taglist,concat(chain," => ",1,nstack))
+ if stacksize > 0 then
+ return format("%s element chains identified, open chain: %s ",#taglist,concat(chain," => ",1,stacksize))
else
return format("%s element chains identified",#taglist)
end
@@ -349,6 +439,65 @@ directives.register("backend.addtags", function(v)
end
end)
-commands.starttag = tags.start
-commands.stoptag = tags.stop
-commands.settagproperty = tags.setproperty
+-- interface
+
+local starttag = tags.start
+
+implement {
+ name = "starttag",
+ actions = starttag,
+ arguments = { "string" }
+}
+
+implement {
+ name = "stoptag",
+ actions = tags.stop,
+}
+
+implement {
+ name = "starttag_u",
+ scope = "private",
+ actions = function(tag,userdata) starttag(tag,{ userdata = userdata }) end,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "starttag_d",
+ scope = "private",
+ actions = function(tag,detail) starttag(tag,{ detail = detail }) end,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "starttag_c",
+ scope = "private",
+ actions = function(tag,detail,parents) starttag(tag,{ detail = detail, parents = parents }) end,
+ arguments = { "string", "string", "string" }
+}
+
+implement { name = "settagaspect", actions = tags.setaspect, arguments = { "string", "string" } }
+
+implement { name = "settagproperty", actions = tags.setproperty, arguments = { "string", "string", "string" } }
+implement { name = "settagproperty_b", actions = tags.setproperty, arguments = { "string", "'backend'", "string" }, scope = "private" }
+implement { name = "settagproperty_n", actions = tags.setproperty, arguments = { "string", "'nature'", "string" }, scope = "private" }
+
+implement { name = "getelementtag", actions = { tags.elementtag, context } }
+
+implement {
+ name = "setelementuserproperties",
+ scope = "private",
+ actions = tags.setuserproperties,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "doifelseinelement",
+ actions = { structures.atlocation, commands.testcase },
+ arguments = "string",
+}
+
+implement {
+ name = "settaggedmetadata",
+ actions = structures.tags.registermetadata,
+ arguments = "string"
+}
diff --git a/tex/context/base/strc-tag.mkiv b/tex/context/base/strc-tag.mkiv
index 6e792fd3f..f2b59c29c 100644
--- a/tex/context/base/strc-tag.mkiv
+++ b/tex/context/base/strc-tag.mkiv
@@ -11,6 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+% labels: no language needed
% key/values and other names might change (and probably will)
\writestatus{loading}{ConTeXt Structure Macros / Tags}
@@ -22,123 +23,139 @@
%D Eventually these labels will either move to the modules
%D where they're used, or they will en dup in mult-tag.
-\def\t!document {document} % Div
-
-\def\t!division {division} % Div
-\def\t!paragraph {paragraph} % P
-\def\t!p {p} % P
-\def\t!construct {construct} % Span
-\def\t!highlight {highlight} % Span
-
-\def\t!section {section} % Sect
-\def\t!sectiontitle {sectiontitle} % H
-\def\t!sectionnumber {sectionnumber} % H
-\def\t!sectioncontent {sectioncontent} % Div
-
-\def\t!itemgroup {itemgroup} % L
-\def\t!item {item} % Li
-\def\t!itemtag {itemtag} % Lbl
-\def\t!itemcontent {itemcontent} % LBody
-
-\def\t!description {description} % Li
-\def\t!descriptiontag {descriptiontag} % Lbl
-\def\t!descriptioncontent {descriptioncontent} % LBody
-\def\t!descriptionsymbol {descriptionsymbol} % Span
-
-\let\t!construction \t!description
-\let\t!constructiontag \t!descriptiontag
-\let\t!constructioncontent\t!descriptioncontent
-\let\t!constructionsymbol \t!descriptionsymbol
-
-\def\t!verbatimblock {verbatimblock} % Code
-\def\t!verbatimlines {verbatimlines} % Code
-\def\t!verbatimline {verbatimline} % Code
-\def\t!verbatim {verbatim} % Code
-
-\def\t!lines {lines} % Code
-\def\t!line {line} % Code
-
-\def\t!sorting {sorting} % Span
-\def\t!synonym {synonym} % Span
-
-\def\t!register {register} % Div
-\def\t!registersection {registersection} % Div
-\def\t!registertag {registertag} % Span
-\def\t!registerentries {registerentries} % Div
-\def\t!registerentry {registerentry} % Span
-\def\t!registersee {registersee} % Span
-\def\t!registerpages {registerpages} % Span
-\def\t!registerpage {registerpage} % Span
-\def\t!registerpagerange {registerpagerange} % Span
-
-\def\t!table {table} % Table
-\def\t!tablerow {tablerow} % TR
-\def\t!tablecell {tablecell} % TD
-\def\t!tabulate {tabulate} % Table
-\def\t!tabulaterow {tabulaterow} % TR
-\def\t!tabulatecell {tabulatecell} % TD
-
-\def\t!math {math} % math
-\def\t!mathtable {mtable} % Table
-\def\t!mathtablerow {mtr} % TR
-\def\t!mathtablecell {mtd} % TD
-\def\t!mathaction {maction} %
-
-\def\t!list {list} % TOC
-\def\t!listitem {listitem} % TOCI
-\def\t!listtag {listtag} % Lbl
-\def\t!listcontent {listcontent} % P
-\def\t!listdata {listdata} % P
-\def\t!listpage {listpage} % Reference
-
-\def\t!delimitedblock {delimited} % BlockQuote
-\def\t!delimited {delimited} % Quote
-\def\t!subsentence {subsentence} % Span
-
-\def\t!float {float} % Div
-\def\t!floatcaption {floatcaption} % Caption
-\def\t!floatlabel {floatlabel} % Span
-\def\t!floattext {floattext} % Span
-\def\t!floatnumber {floatnumber} % Span
-\def\t!floatcontent {floatcontent} % P
-
-\def\t!image {image} % P
-
-\def\t!mpgraphic {mpgraphic} % P
-
-\def\t!formulaset {formulaset} % Div
-\def\t!formula {formula} % Div
-\def\t!formulacaption {formulacaption} % Span
-\def\t!formulalabel {formulalabel} % Span
-\def\t!formulanumber {formulanumber} % P
-\def\t!formulacontent {formulacontent} % P
-\def\t!subformula {subformula} % Div
-
-\def\t!link {link} % Link
-
-\def\t!margintext {margintext} % Span
-\def\t!margintextblock {margintextblock} % Div
+\def\t!document {document} % Div
+
+\def\t!division {division} % Div
+\def\t!paragraph {paragraph} % P
+\def\t!p {p} % P
+\def\t!construct {construct} % Span
+\def\t!highlight {highlight} % Span
+
+\def\t!section {section} % Sect
+\def\t!sectiontitle {sectiontitle} % H
+\def\t!sectionnumber {sectionnumber} % H
+\def\t!sectioncontent {sectioncontent} % Div
+
+\def\t!itemgroup {itemgroup} % L
+\def\t!item {item} % Li
+\def\t!itemtag {itemtag} % Lbl
+\def\t!itemcontent {itemcontent} % LBody
+\def\t!itemhead {itemhead} % Div
+\def\t!itembody {itembody} % Div
+
+\def\t!description {description} % Li
+\def\t!descriptiontag {descriptiontag} % Lbl
+\def\t!descriptioncontent {descriptioncontent} % LBody
+\def\t!descriptionsymbol {descriptionsymbol} % Span
+
+\let\t!construction \t!description
+\let\t!constructiontag \t!descriptiontag
+\let\t!constructioncontent \t!descriptioncontent
+\let\t!constructionsymbol \t!descriptionsymbol
+
+\def\t!verbatimblock {verbatimblock} % Code
+\def\t!verbatimlines {verbatimlines} % Code
+\def\t!verbatimline {verbatimline} % Code
+\def\t!verbatim {verbatim} % Code
+
+\def\t!lines {lines} % Code
+\def\t!line {line} % Code
+
+\def\t!sorting {sorting} % Span
+\def\t!synonym {synonym} % Span
+
+\def\t!register {register} % Div
+\def\t!registerlocation {registerlocation} % Span
+\def\t!registersection {registersection} % Div
+\def\t!registertag {registertag} % Span
+\def\t!registerentries {registerentries} % Div
+\def\t!registerentry {registerentry} % Span
+\def\t!registercontent {registercontent} % Span
+\def\t!registersee {registersee} % Span
+\def\t!registerpages {registerpages} % Span
+\def\t!registerpage {registerpage} % Span
+\def\t!registerpagerange {registerpagerange} % Span
+\def\t!registerfrompage {registerfrompage} % Span
+\def\t!registertopage {registertopage} % Span
+\def\t!registerseparator {registerseparator} % Span
+
+\def\t!table {table} % Table
+\def\t!tablerow {tablerow} % TR
+\def\t!tablecell {tablecell} % TD
+\def\t!tabulate {tabulate} % Table
+\def\t!tabulaterow {tabulaterow} % TR
+\def\t!tabulatecell {tabulatecell} % TD
+
+\def\t!math {math} % math
+\def\t!mathtable {mtable} % Table
+\def\t!mathtablerow {mtr} % TR
+\def\t!mathtablecell {mtd} % TD
+\def\t!mathaction {maction} %
+\def\t!mathstacker {mstacker}
+\def\t!mathstackertop {mstackertop}
+\def\t!mathstackermid {mstackermid}
+\def\t!mathstackerbot {mstackerbot}
+
+\def\t!munderover {munderover} % special cases
+\def\t!munder {munder} % special cases
+\def\t!mover {mover} % special cases
+
+\def\t!list {list} % TOC
+\def\t!listitem {listitem} % TOCI
+\def\t!listtag {listtag} % Lbl
+\def\t!listcontent {listcontent} % P
+\def\t!listdata {listdata} % P
+\def\t!listpage {listpage} % Reference
+
+\def\t!delimitedblock {delimited} % BlockQuote
+\def\t!delimited {delimited} % Quote
+\def\t!subsentence {subsentence} % Span
+
+\def\t!float {float} % Div
+\def\t!floatcaption {floatcaption} % Caption
+\def\t!floatlabel {floatlabel} % Span
+\def\t!floattext {floattext} % Span
+\def\t!floatnumber {floatnumber} % Span
+\def\t!floatcontent {floatcontent} % P
+
+\def\t!image {image} % P
+
+\def\t!mpgraphic {mpgraphic} % P
+
+\def\t!formulaset {formulaset} % Div
+\def\t!formula {formula} % Div
+\def\t!formulacaption {formulacaption} % Span
+\def\t!formulalabel {formulalabel} % Span
+\def\t!formulanumber {formulanumber} % P
+\def\t!formulacontent {formulacontent} % P
+\def\t!subformula {subformula} % Div
+
+\def\t!link {link} % Link
+
+\def\t!margintext {margintext} % Span
+\def\t!margintextblock {margintextblock} % Div
% we might opt for verbose variants so this is experimental:
-\def\t!label {label} % Span
-\def\t!number {number} % Span
+\def\t!label {label} % Span
+\def\t!number {number} % Span
-\def\t!ignore {ignore} % Span
+\def\t!ignore {ignore} % Span
+\def\t!private {private} % Span
-\def\t!mid {mid} % Span
-\def\t!sub {sub} % Span
-\def\t!sup {sup} % Span
-\def\t!subsup {subsup} % Span
+\def\t!mid {mid} % Span
+\def\t!sub {sub} % Span
+\def\t!sup {sup} % Span
+\def\t!subsup {subsup} % Span
-\def\t!unit {unit} % Span
-\def\t!quantity {quantity} % Span
-\def\t!number {number} % Span
+\def\t!unit {unit} % Span
+\def\t!quantity {quantity} % Span
+\def\t!number {number} % Span
-\def\t!combination {combination} % Span
-\def\t!combinationpair {combinationpair} % Span
-\def\t!combinationcontent {combinationcontent} % Span
-\def\t!combinationcaption {combinationcaption} % Span
+\def\t!combination {combination} % Span
+\def\t!combinationpair {combinationpair} % Span
+\def\t!combinationcontent {combinationcontent} % Span
+\def\t!combinationcaption {combinationcaption} % Span
% \setuptaglabeltext
% [en]
@@ -152,8 +169,11 @@
\unexpanded\def\setelementbackendtag{\dodoubleargument\strc_tags_set_backend}
\unexpanded\def\setelementnature {\dodoubleargument\strc_tags_set_nature}
-\def\strc_tags_set_backend[#1][#2]{\ctxcommand{settagproperty("#1","backend","#2")}}
-\def\strc_tags_set_nature [#1][#2]{\ctxcommand{settagproperty("#1","nature", "#2")}}
+\def\strc_tags_set_backend[#1][#2]{\clf_settagproperty_b{#1}{#2}} % todo: ignore when no export
+\def\strc_tags_set_nature [#1][#2]{\clf_settagproperty_n{#1}{#2}} % todo: ignore when no export
+
+\unexpanded\def\strc_tags_set_aspect_nop#1#2{}
+\unexpanded\def\strc_tags_set_aspect_yes#1#2{\clf_settagaspect{#1}{#2}} % todo: ignore when no export / also \let
\installcorenamespace{tagging}
@@ -176,11 +196,14 @@
\expandafter\strc_tags_element_stop_yes
\fi}
+% it makes no sense to have labels ... maybe some day as a last 'replace' in the export
+% which might be more efficient then ... okay, we now cannot overload but who cares
+
\unexpanded\def\strc_tags_element_start_yes_indeed_yes[#1][#2]%
- {\ctxcommand{starttag("#1",{label="\dogetupsometaglabeltext{#1}",userdata=\!!bs#2\!!es})}}
+ {\clf_starttag_u{#1}{#2}}
\unexpanded\def\strc_tags_element_stop_yes
- {\ctxcommand{stoptag()}}
+ {\clf_stoptag}
\unexpanded\def\strc_tags_element_start_nop_indeed[#1][#2]%
{}
@@ -191,19 +214,35 @@
\unexpanded\def\strc_tags_enable_elements
{\setuplanguage[\s!default][\s!righthyphenchar="AD]% for the moment here
\let\startelement\strc_tags_element_start_yes
- \let\stopelement \strc_tags_element_stop_yes}
+ \let\stopelement \strc_tags_element_stop_yes
+ \let\dosettagproperty\strc_tags_set_aspect_yes}
\unexpanded\def\strc_tags_disable_elements
{\let\startelement\strc_tags_element_start_nop
- \let\stopelement \strc_tags_element_stop_nop}
+ \let\stopelement \strc_tags_element_stop_nop
+ \let\dosettagproperty\strc_tags_set_aspect_nop}
% beware: making these unexpanded spoils tables (noalign problem)
-\def\strc_tags_enabled_start
+\def\strc_tags_enabled_start_no_detail
+ {\iftrialtypesetting
+ \expandafter\strc_tags_start_nop_no_detail
+ \else
+ \expandafter\strc_tags_start_yes_no_detail
+ \fi}
+
+\def\strc_tags_enabled_start_detail
+ {\iftrialtypesetting
+ \expandafter\strc_tags_start_nop_detail
+ \else
+ \expandafter\strc_tags_start_yes_detail
+ \fi}
+
+\def\strc_tags_enabled_start_chained
{\iftrialtypesetting
- \expandafter\strc_tags_start_nop
+ \expandafter\strc_tags_start_nop_chained
\else
- \expandafter\strc_tags_start_yes
+ \expandafter\strc_tags_start_yes_chained
\fi}
\def\strc_tags_enabled_stop
@@ -213,25 +252,66 @@
\expandafter\strc_tags_stop_yes
\fi}
-\def\strc_tags_start_yes#1#2% we could have a fast labeltext resolver
- {\ctxcommand{starttag("#1",{label="\dogetupsometaglabeltext{#1}",detail="#2"})}}
+\def\strc_tags_start_yes_no_detail #1{\clf_starttag{#1}}
+\def\strc_tags_start_yes_detail #1#2{\clf_starttag_d{#1}{#2}}
+\def\strc_tags_start_yes_chained #1#2#3{\clf_starttag_c{#1}{#2}{\getcurrentparentchain#3{#2}}}
+\def\strc_tags_stop_yes {\clf_stoptag}
-\def\strc_tags_stop_yes
- {\ctxcommand{stoptag()}}
+\let\strc_tags_start_nop_no_detail\gobbleoneargument
+\let\strc_tags_start_nop_detail \gobbletwoarguments
+\let\strc_tags_start_nop_chained \gobblethreearguments
+\let\strc_tags_stop_nop \donothing
-\def\strc_tags_start_nop#1#2%
- {}
+% more efficient:
-\def\strc_tags_stop_nop
- {}
+% \dostarttagged % {tag} {detail}
+% \dostarttaggedchained % {tag} {detail} \??hash
+% \dostarttaggednodetail % {tag}
+
+% \unexpanded\def\strc_tags_enable
+% {\let\dostarttagged\strc_tags_start_yes
+% \let\dostoptagged \strc_tags_stop_yes}
\unexpanded\def\strc_tags_enable
- {\let\dostarttagged\strc_tags_start_yes
- \let\dostoptagged \strc_tags_stop_yes}
+ {\let\dostarttagged \strc_tags_enabled_start_detail
+ \let\dostarttaggednodetail\strc_tags_enabled_start_no_detail
+ \let\dostarttaggedchained \strc_tags_enabled_start_chained
+ \let\dostoptagged \strc_tags_enabled_stop}
\unexpanded\def\strc_tags_disable
- {\let\dostarttagged\strc_tags_start_nop
- \let\dostoptagged \strc_tags_stop_nop}
+ {\let\dostarttagged \strc_tags_start_nop_detail
+ \let\dostarttaggednodetail\strc_tags_start_nop_no_detail
+ \let\dostarttaggedchained \strc_tags_start_nop_chained
+ \let\dostoptagged \strc_tags_stop_nop}
+
+% for luigi (beware: fully expandable):
+
+\def\strc_tags_get_element_tag_yes{\clf_getelementtag}
+\let\strc_tags_get_element_tag_nop\donothing
+
+\unexpanded\def\strc_tags_setup_element_user_properties_yes
+ {\dodoubleempty\strc_tags_setup_element_user_properties_indeed}
+
+\unexpanded\def\strc_tags_setup_element_user_properties_nop
+ {\dodoubleempty\strc_tags_setup_element_user_properties_indeed_nop}
+
+\def\strc_tags_setup_element_user_properties_indeed
+ {\iftrialtypesetting
+ \expandafter\strc_tags_setup_element_user_properties_indeed_nop
+ \else
+ \expandafter\strc_tags_setup_element_user_properties_indeed_yes
+ \fi}
+
+\def\strc_tags_setup_element_user_properties_indeed_nop[#1][#2]{}
+\def\strc_tags_setup_element_user_properties_indeed_yes[#1][#2]{\clf_setelementuserproperties{#1}{#2}}
+
+\unexpanded\def\strc_tags_enable_properties
+ {\let\getelementtag \strc_tags_get_element_tag_yes
+ \let\setupelementuserproperties\strc_tags_setup_element_user_properties_yes}
+
+\unexpanded\def\strc_tags_disable_properties
+ {\let\getelementtag \strc_tags_get_element_tag_nop
+ \let\setupelementuserproperties\strc_tags_setup_element_user_properties_nop}
%D The triggers:
@@ -240,11 +320,13 @@
\appendtoks
\strc_tags_enable_elements
+ \strc_tags_enable_properties
\doifelse{\taggingparameter\c!method}\v!auto\strc_tags_enable\strc_tags_disable
\to \everyenableelements
\appendtoks
\strc_tags_disable_elements
+ \strc_tags_disable_properties
\strc_tags_disable
\to \everydisableelements
@@ -277,41 +359,41 @@
\installcorenamespace {paragraph}
\installcommandhandler \??paragraph {paragraph} \??paragraph
-\setupparagraph % someday maybe also strut (beg/end)
+\setupparagraph % someday maybe also strut (beg/end) and align
[\c!color=,
\c!style=]
\unexpanded\def\startparagraph
- {\dodoubleempty\paragraph_start}
+ {\dodoubleempty\paragraph_start}
\def\paragraph_start
- {\iffirstargument
- \ifsecondargument
- \doubleexpandafter\paragraph_start_two
- \else
- \doubleexpandafter\paragraph_start_one
- \fi
- \else
- \expandafter\paragraph_start_zero
- \fi}
+ {\endgraf % we end before the group
+ \begingroup
+ \iffirstargument
+ \ifsecondargument
+ \doubleexpandafter\paragraph_start_two
+ \else
+ \doubleexpandafter\paragraph_start_one
+ \fi
+ \else
+ \expandafter\paragraph_start_zero
+ \fi}
\def\paragraph_start_two[#1][#2]%
- {\endgraf % we end before the group
- \begingroup
- \let\stopparagraph\paragraph_stop_indeed
- \edef\currentparagraph{#1}%
- \setupcurrentparagraph[#2]%
- \paragraph_start_indeed}
+ {\edef\currentparagraph{#1}%
+ \setupcurrentparagraph[#2]%
+ \paragraph_start_indeed}
\def\paragraph_start_one[#1][#2]%
- {\endgraf % we end before the group
- \begingroup
- \let\stopparagraph\paragraph_stop_indeed
- \doifassignmentelse{#1}
- {\let\currentparagraph\empty
- \setupcurrentparagraph[#1]}
- {\edef\currentparagraph{#1}}%
- \paragraph_start_indeed}
+ {\doifelseassignment{#1}
+ {\let\currentparagraph\empty
+ \setupcurrentparagraph[#1]}
+ {\edef\currentparagraph{#1}}%
+ \paragraph_start_indeed}
+
+\def\paragraph_start_zero[#1][#2]%
+ {\let\currentparagraph\empty
+ \paragraph_start_indeed}
\def\paragraph_start_indeed
{\useparagraphstyleandcolor\c!style\c!color
@@ -319,27 +401,32 @@
\usesetupsparameter\paragraphparameter
\dostarttagged\t!paragraph\currentparagraph}
-\def\paragraph_start_zero[#1][#2]%
- {\let\currentparagraph\empty
- \paragraph_start_indeed}
-
-\unexpanded\def\paragraph_stop_indeed
- {\dostoptagged
- \endgraf % we end inside the group
- \endgroup}
-
\unexpanded\def\stopparagraph
- {\dostoptagged}
+ {\dostoptagged
+ \endgraf % we end inside the group
+ \endgroup}
\let\startpar\startparagraph
\let\stoppar \stopparagraph
+\def\strc_tags_document_start_indeed
+ {\glet\strc_tags_document_start_indeed\relax
+ \dostarttagged\t!document\empty}
+
+\def\strc_tags_document_stop_indeed
+ {\glet\strc_tags_document_stop_indeed\relax
+ \dostoptagged}
+
\appendtoks
- \dostarttagged\t!document\empty
-\to \everystarttext
+ \strc_tags_document_start_indeed % here because otherwise products don't get a root (starttext before env)
+\to \everyenableelements
+
+% \appendtoks
+% \strc_tags_document_start_indeed
+% \to \everystarttext
\appendtoks
- \dostoptagged
+ \strc_tags_document_stop_indeed
\to \everystoptext
\appendtoks
@@ -347,12 +434,14 @@
\strc_tags_disable
\to \everybeforepagebody
-% \doifinelementelse{structure:section} {yes} {no}
-% \doifinelementelse{structure:chapter} {yes} {no}
-% \doifinelementelse{division:*-structure:chapter} {yes} {no}
+% \doifelseinelement{structure:section} {yes} {no}
+% \doifelseinelement{structure:chapter} {yes} {no}
+% \doifelseinelement{division:*-structure:chapter} {yes} {no}
+
+\unexpanded\def\doifelseinelement#1%
+ {\clf_doifelseinelement{#1}}
-\unexpanded\def\doifinelementelse#1%
- {\ctxcommand{testcase(structures.atlocation("#1"))}}
+\let\doifinelementelse\doifelseinelement
\unexpanded\def\taggedlabeltexts#1#2#3% experimental: label, numberdetail, numbercontent
{\begingroup
@@ -398,7 +487,7 @@
%D \stoptyping
\unexpanded\def\settaggedmetadata[#1]%
- {\ctxlua{structures.tags.registermetadata(\!!bs#1\!!es)}}
+ {\clf_settaggedmetadata{#1}}
%D An overload:
diff --git a/tex/context/base/supp-box.lua b/tex/context/base/supp-box.lua
index 27078f46f..7cc71a891 100644
--- a/tex/context/base/supp-box.lua
+++ b/tex/context/base/supp-box.lua
@@ -8,13 +8,16 @@ if not modules then modules = { } end modules ['supp-box'] = {
-- this is preliminary code, use insert_before etc
+local lpegmatch = lpeg.match
+
local report_hyphenation = logs.reporter("languages","hyphenation")
local tex = tex
local context = context
-local commands = commands
local nodes = nodes
+local implement = interfaces.implement
+
local splitstring = string.split
local nodecodes = nodes.nodecodes
@@ -26,110 +29,217 @@ local glue_code = nodecodes.glue
local kern_code = nodecodes.kern
local glyph_code = nodecodes.glyph
-local new_penalty = nodes.pool.penalty
-local new_hlist = nodes.pool.hlist
-local new_glue = nodes.pool.glue
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
-local free_node = nodes.free
-local copy_list = nodes.copy_list
-local copy_node = nodes.copy
-local find_tail = nodes.tail
+local getfield = nuts.getfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getattribute = nuts.getattribute
+local getbox = nuts.getbox
+
+local setfield = nuts.setfield
+local setbox = nuts.setbox
+
+local free_node = nuts.free
+local flush_list = nuts.flush_list
+local copy_node = nuts.copy
+local copy_list = nuts.copy_list
+local find_tail = nuts.tail
+local traverse_id = nuts.traverse_id
+local link_nodes = nuts.linked
+
+local listtoutf = nodes.listtoutf
+
+local nodepool = nuts.pool
+local new_penalty = nodepool.penalty
+local new_hlist = nodepool.hlist
+local new_glue = nodepool.glue
+local new_rule = nodepool.rule
+local new_kern = nodepool.kern
+
+local setlistcolor = nodes.tracers.colors.setlist
-local texsetbox = tex.setbox
-local texgetbox = tex.getbox
local texget = tex.get
+local texgetbox = tex.getbox
-local function hyphenatedlist(list)
- while list do
- local id, next, prev = list.id, list.next, list.prev
+local function hyphenatedlist(head,usecolor)
+ local current = head and tonut(head)
+ while current do
+ local id = getid(current)
+ local next = getnext(current)
+ local prev = getprev(current)
if id == disc_code then
- local hyphen = list.pre
- if hyphen then
- local penalty = new_penalty(-500)
- hyphen.next, penalty.prev = penalty, hyphen
- prev.next, next.prev = hyphen, penalty
- penalty.next, hyphen.prev = next, prev
- list.pre = nil
- free_node(list)
+ local pre = getfield(current,"pre")
+ local post = getfield(current,"post")
+ local replace = getfield(current,"replace")
+ if pre then
+ setfield(current,"pre",nil)
+ end
+ if post then
+ setfield(current,"post",nil)
end
+ if not usecolor then
+ -- nothing fancy done
+ elseif pre and post then
+ setlistcolor(pre,"darkmagenta")
+ setlistcolor(post,"darkcyan")
+ elseif pre then
+ setlistcolor(pre,"darkyellow")
+ elseif post then
+ setlistcolor(post,"darkyellow")
+ end
+ if replace then
+ flush_list(replace)
+ setfield(current,"replace",nil)
+ end
+ -- setfield(current,"replace",new_rule(65536)) -- new_kern(65536*2))
+ setfield(current,"next",nil)
+ setfield(current,"prev",nil)
+ local list = link_nodes (
+ pre and new_penalty(10000),
+ pre,
+ current,
+ post,
+ post and new_penalty(10000)
+ )
+ local tail = find_tail(list)
+ if prev then
+ setfield(prev,"next",list)
+ setfield(list,"prev",prev)
+ end
+ if next then
+ setfield(tail,"next",next)
+ setfield(next,"prev",tail)
+ end
+ -- free_node(current)
elseif id == vlist_code or id == hlist_code then
- hyphenatedlist(list.list)
+ hyphenatedlist(getlist(current))
end
- list = next
+ current = next
end
end
-commands.hyphenatedlist = hyphenatedlist
+implement {
+ name = "hyphenatedlist",
+ arguments = { "integer", "boolean" },
+ actions = function(n,color)
+ local b = texgetbox(n)
+ if b then
+ hyphenatedlist(b.list,color)
+ end
+ end
+}
-function commands.showhyphenatedinlist(list)
- report_hyphenation("show: %s",nodes.listtoutf(list,false,true))
-end
+-- local function hyphenatedhack(head,pre)
+-- pre = tonut(pre)
+-- for n in traverse_id(disc_code,tonut(head)) do
+-- local hyphen = getfield(n,"pre")
+-- if hyphen then
+-- flush_list(hyphen)
+-- end
+-- setfield(n,"pre",copy_list(pre))
+-- end
+-- end
+--
+-- commands.hyphenatedhack = hyphenatedhack
local function checkedlist(list)
if type(list) == "number" then
- return texgetbox(list).list
+ return getlist(getbox(tonut(list)))
else
- return list
+ return tonut(list)
end
end
-local function applytochars(list,what,nested)
- local doaction = context[what or "ruledhbox"]
- local noaction = context
- local current = checkedlist(list)
+implement {
+ name = "showhyphenatedinlist",
+ arguments = "integer",
+ actions = function(box)
+ report_hyphenation("show: %s",listtoutf(checkedlist(n),false,true))
+ end
+}
+
+local function applytochars(current,doaction,noaction,nested)
while current do
- local id = current.id
+ local id = getid(current)
if nested and (id == hlist_code or id == vlist_code) then
context.beginhbox()
- applytochars(current.list,what,nested)
+ applytochars(getlist(current),doaction,noaction,nested)
context.endhbox()
elseif id ~= glyph_code then
- noaction(copy_node(current))
+ noaction(tonode(copy_node(current)))
else
- doaction(copy_node(current))
+ doaction(tonode(copy_node(current)))
end
- current = current.next
+ current = getnext(current)
end
end
-local function applytowords(list,what,nested)
- local doaction = context[what or "ruledhbox"]
- local noaction = context
- local current = checkedlist(list)
+local function applytowords(current,doaction,noaction,nested)
local start
while current do
- local id = current.id
+ local id = getid(current)
if id == glue_code then
if start then
- doaction(copy_list(start,current))
+ doaction(tonode(copy_list(start,current)))
start = nil
end
- noaction(copy_node(current))
+ noaction(tonode(copy_node(current)))
elseif nested and (id == hlist_code or id == vlist_code) then
context.beginhbox()
- applytowords(current.list,what,nested)
+ applytowords(getlist(current),doaction,noaction,nested)
context.egroup()
elseif not start then
start = current
end
- current = current.next
+ current = getnext(current)
end
if start then
- doaction(copy_list(start))
+ doaction(tonode(copy_list(start)))
end
end
-commands.applytochars = applytochars
-commands.applytowords = applytowords
+local methods = {
+ char = applytochars,
+ characters = applytochars,
+ word = applytowords,
+ words = applytowords,
+}
+
+implement {
+ name = "applytobox",
+ arguments = {
+ {
+ { "box", "integer" },
+ { "command" },
+ { "method" },
+ { "nested", "boolean" },
+ }
+ },
+ actions = function(specification)
+ local list = checkedlist(specification.box)
+ local action = methods[specification.method or "char"]
+ if list and action then
+ action(list,context[specification.command or "ruledhbox"],context,specification.nested)
+ end
+ end
+}
local split_char = lpeg.Ct(lpeg.C(1)^0)
local split_word = lpeg.tsplitat(lpeg.patterns.space)
local split_line = lpeg.tsplitat(lpeg.patterns.eol)
-function commands.processsplit(str,command,how,spaced)
- how = how or "word"
- if how == "char" then
- local words = lpeg.match(split_char,str)
+local function processsplit(specification)
+ local str = specification.data or ""
+ local command = specification.command or "ruledhbox"
+ local method = specification.method or "word"
+ local spaced = specification.spaced
+ if method == "char" or method == "character" then
+ local words = lpegmatch(split_char,str)
for i=1,#words do
local word = words[i]
if word == " " then
@@ -142,8 +252,8 @@ function commands.processsplit(str,command,how,spaced)
context(word)
end
end
- elseif how == "word" then
- local words = lpeg.match(split_word,str)
+ elseif method == "word" then
+ local words = lpegmatch(split_word,str)
for i=1,#words do
local word = words[i]
if spaced and i > 1 then
@@ -155,8 +265,8 @@ function commands.processsplit(str,command,how,spaced)
context(word)
end
end
- elseif how == "line" then
- local words = lpeg.match(split_line,str)
+ elseif method == "line" then
+ local words = lpegmatch(split_line,str)
for i=1,#words do
local word = words[i]
if spaced and i > 1 then
@@ -173,63 +283,88 @@ function commands.processsplit(str,command,how,spaced)
end
end
+implement {
+ name = "processsplit",
+ actions = processsplit,
+ arguments = {
+ {
+ { "data" },
+ { "command" },
+ { "method" },
+ { "spaced", "boolean" },
+ }
+ }
+}
+
local a_vboxtohboxseparator = attributes.private("vboxtohboxseparator")
-function commands.vboxlisttohbox(original,target,inbetween)
- local current = texgetbox(original).list
- local head = nil
- local tail = nil
- while current do
- local id = current.id
- local next = current.next
- if id == hlist_code then
- local list = current.list
- if head then
- if inbetween > 0 then
- local n = new_glue(0,0,inbetween)
- tail.next = n
- n.prev = tail
- tail = n
- end
- tail.next = list
- list.prev = tail
- else
- head = list
- end
- tail = find_tail(list)
- -- remove last separator
- if tail.id == hlist_code and tail[a_vboxtohboxseparator] == 1 then
- local temp = tail
- local prev = tail.prev
- if next then
- local list = tail.list
- prev.next = list
- list.prev = prev
- tail.list = nil
- tail = find_tail(list)
+implement {
+ name = "vboxlisttohbox",
+ arguments = { "integer", "integer", "dimen" },
+ actions = function(original,target,inbetween)
+ local current = getlist(getbox(original))
+ local head = nil
+ local tail = nil
+ while current do
+ local id = getid(current)
+ local next = getnext(current)
+ if id == hlist_code then
+ local list = getlist(current)
+ if head then
+ if inbetween > 0 then
+ local n = new_glue(0,0,inbetween)
+ setfield(tail,"next",n)
+ setfield(n,"prev",tail)
+ tail = n
+ end
+ setfield(tail,"next",list)
+ setfield(list,"prev",tail)
else
- tail = prev
+ head = list
end
- free_node(temp)
+ tail = find_tail(list)
+ -- remove last separator
+ if getid(tail) == hlist_code and getattribute(tail,a_vboxtohboxseparator) == 1 then
+ local temp = tail
+ local prev = getprev(tail)
+ if next then
+ local list = getlist(tail)
+ setfield(prev,"next",list)
+ setfield(list,"prev",prev)
+ setfield(tail,"list",nil)
+ tail = find_tail(list)
+ else
+ tail = prev
+ end
+ free_node(temp)
+ end
+ -- done
+ setfield(tail,"next",nil)
+ setfield(current,"list",nil)
end
- -- done
- tail.next = nil
- current.list = nil
+ current = next
end
- current = next
+ local result = new_hlist()
+ setfield(result,"list",head)
+ setbox(target,result)
end
- local result = new_hlist()
- result.list = head
- texsetbox(target,result)
-end
+}
-function commands.hboxtovbox(original)
- local b = texgetbox(original)
- local factor = texget("baselineskip").width / texget("hsize")
- b.depth = 0
- b.height = b.width * factor
-end
+implement {
+ name = "hboxtovbox",
+ arguments = "integer",
+ actions = function(n)
+ local b = getbox(n)
+ local factor = texget("baselineskip").width / texget("hsize")
+ setfield(b,"depth",0)
+ setfield(b,"height",getfield(b,"width") * factor)
+ end
+}
-function commands.boxtostring(n)
- context.puretext(nodes.toutf(tex.box[n].list)) -- helper is defined later
-end
+implement {
+ name = "boxtostring",
+ arguments = "integer",
+ actions = function(n)
+ context.puretext(nodes.toutf(texgetbox(n).list)) -- helper is defined later
+ end
+}
diff --git a/tex/context/base/supp-box.mkiv b/tex/context/base/supp-box.mkiv
index 66f373b72..54b0e2860 100644
--- a/tex/context/base/supp-box.mkiv
+++ b/tex/context/base/supp-box.mkiv
@@ -570,7 +570,7 @@
%D \doiftext {data} {then branch}
%D \stoptyping
-\unexpanded\def\doiftextelse#1%
+\unexpanded\def\doifelsetext#1%
{\begingroup
\setbox\scratchbox\hbox
{\settrialtypesetting
@@ -581,6 +581,8 @@
\endgroup\expandafter\firstoftwoarguments
\fi}
+\let\doiftextelse\doifelsetext
+
\unexpanded\def\doiftext#1%
{\begingroup
\setbox\scratchbox\hbox
@@ -658,8 +660,8 @@
%D A slower but more versatile implementation is:
%D
%D \starttyping
-%D \long\def\dowithnextbox#1#2%
-%D {\long\def\syst_boxes_with_next_box{#1}%
+%D \unexpanded\def\dowithnextbox#1#2%
+%D {\def\syst_boxes_with_next_box{#1}%
%D \ifx#2\hbox
%D \afterassignment\syst_boxes_with_next_box_indeed
%D \else\ifx#2\vbox
@@ -1063,10 +1065,11 @@
%D \showhyphens{dohyphenatedword}
%D \stoptyping
-\def\doshowhyphenatednextbox
- {\ctxcommand{showhyphenatedinlist(tex.box[\number\nextbox].list)}}
+\unexpanded\def\doshowhyphenatednextbox
+ {\clf_showhyphenatedinlist\nextbox}
-\unexpanded\def\showhyphens{\dowithnextboxcs\doshowhyphenatednextbox\hbox}
+\unexpanded\def\showhyphens
+ {\dowithnextboxcs\doshowhyphenatednextbox\hbox}
%D The following macros are seldom used but handy for tracing.
%D
@@ -1076,14 +1079,20 @@
%D \hyphenatedfile{tufte}
%D \stoptyping
-\def\dohyphenatednextbox
- {\ctxcommand{hyphenatedlist(tex.box[\number\nextbox].list)}%
+\unexpanded\def\dohyphenatednextbox
+ {\clf_hyphenatedlist\nextbox false\relax
\unhbox\nextbox}
\unexpanded\def\hyphenatedword {\dowithnextboxcs\dohyphenatednextbox\hbox}
\unexpanded\def\hyphenatedpar {\dowithnextboxcs\dohyphenatednextbox\hbox}
\unexpanded\def\hyphenatedfile#1{\dowithnextboxcs\dohyphenatednextbox\hbox{\readfile{#1}\donothing\donothing}}
+\unexpanded\def\dohyphenatednextboxcolor
+ {\clf_hyphenatedlist\nextbox true\relax
+ \unhbox\nextbox}
+
+\unexpanded\def\hyphenatedcoloredword{\dowithnextboxcs\dohyphenatednextboxcolor\hbox}
+
%D \macros
%D {processtokens}
%D
@@ -1363,7 +1372,12 @@
\begingroup
\setbox\scratchbox\hbox{\settrialtypesetting#2{\savecurrentattributes{pic}}}%
\setbox\scratchbox\hbox{\restorecurrentattributes{pic}#1}%
- \ctxcommand{applytochars(\number\scratchbox,"\strippedcsname#2",true)}%
+ \clf_applytobox
+ method {char}%
+ box \scratchbox
+ command {\checkedstrippedcsname#2}%
+ nested true%
+ \relax
\endgroup}
\unexpanded\def\processisolatedwords#1#2%
@@ -1371,34 +1385,88 @@
\begingroup
\setbox\scratchbox\hbox{\settrialtypesetting#2{\savecurrentattributes{pic}}}%
\setbox\scratchbox\hbox{\restorecurrentattributes{pic}#1}%
- \ctxcommand{applytowords(\number\scratchbox,"\strippedcsname#2",true)}%
+ \clf_applytobox
+ method {word}%
+ box \scratchbox
+ command {\checkedstrippedcsname#2}%
+ nested true%
+ \relax
\endgroup}
-\unexpanded\def\processwords#1%
- {\processisolatedwords{#1}\processword}
-
-\let\processword\relax
-
-\unexpanded\def\applytosplitstringchar#1#2{\dontleavehmode\ctxcommand{processsplit(\!!bs#2\!!es,"\strippedcsname#1","char")}}
-\unexpanded\def\applytosplitstringword#1#2{\dontleavehmode\ctxcommand{processsplit(\!!bs#2\!!es,"\strippedcsname#1","word")}}
-\unexpanded\def\applytosplitstringline#1#2{\dontleavehmode\ctxcommand{processsplit(\!!bs#2\!!es,"\strippedcsname#1","line")}}
-
-\unexpanded\def\applytosplitstringcharspaced#1#2{\dontleavehmode\ctxcommand{processsplit(\!!bs#2\!!es,"\strippedcsname#1","char",true)}}
-\unexpanded\def\applytosplitstringwordspaced#1#2{\dontleavehmode\ctxcommand{processsplit(\!!bs#2\!!es,"\strippedcsname#1","word",true)}}
-\unexpanded\def\applytosplitstringlinespaced#1#2{\dontleavehmode\ctxcommand{processsplit(\!!bs#2\!!es,"\strippedcsname#1","line",true)}}
-
%D A variant:
\unexpanded\def\applytocharacters#1%
{\dontleavehmode
- \dowithnextbox{\ctxcommand{applytochars(\number\nextbox,"\strippedcsname#1",true)}}%
+ \dowithnextbox{\clf_applytobox
+ method {char}%
+ box \nextbox
+ command {\checkedstrippedcsname#1}%
+ nested true%
+ \relax}%
\hbox}
\unexpanded\def\applytowords#1%
{\dontleavehmode
- \dowithnextbox{\ctxcommand{applytowords(\number\nextbox,"\strippedcsname#1",true)}}%
+ \dowithnextbox{\clf_applytobox
+ method {word}%
+ box \nextbox
+ command {\checkedstrippedcsname#1}%
+ nested true%
+ \relax}%
\hbox}
+%D The old call:
+
+\unexpanded\def\processwords#1%
+ {\processisolatedwords{#1}\processword}
+
+\let\processword\relax
+
+\unexpanded\def\applytosplitstringchar#1#2%
+ {\dontleavehmode\clf_processsplit
+ data {#2}%
+ command {\checkedstrippedcsname#1}%
+ method {char}%
+ \relax}
+
+\unexpanded\def\applytosplitstringword#1#2%
+ {\dontleavehmode\clf_processsplit
+ data {#2}%
+ command {\checkedstrippedcsname#1}%
+ method {word}%
+ \relax}
+
+\unexpanded\def\applytosplitstringline#1#2%
+ {\dontleavehmode\clf_processsplit
+ data {#2}%
+ command {\checkedstrippedcsname#1}%
+ method {line}%
+ \relax}
+
+\unexpanded\def\applytosplitstringcharspaced#1#2%
+ {\dontleavehmode\clf_processsplit
+ data {#2}%
+ command {\checkedstrippedcsname#1}%
+ method {char}%
+ spaced true%
+ \relax}
+
+\unexpanded\def\applytosplitstringwordspaced#1#2%
+ {\dontleavehmode\clf_processsplit
+ data {#2}%
+ command {\checkedstrippedcsname#1}%
+ method {word}%
+ spaced true%
+ \relax}
+
+\unexpanded\def\applytosplitstringlinespaced#1#2%
+ {\dontleavehmode\clf_processsplit
+ data {#2}%
+ command {\checkedstrippedcsname#1}%
+ method {line}%
+ spaced true%
+ \relax}
+
%D \macros
%D {sbox}
%D
@@ -1795,7 +1863,7 @@
\box\d_syst_boxes_separator
\fi
\egroup
- \ctxcommand{hboxtovbox(\number\scratchbox)}%
+ \clf_hboxtovbox\scratchbox
\box\scratchbox
\endgroup}
@@ -1849,7 +1917,7 @@
\unexpanded\def\stophboxestohbox
{\egroup
- \ctxcommand{vboxlisttohbox(\number\scratchbox,\number\nextbox,\number\dimexpr\hboxestohboxslack)}%
+ \clf_vboxlisttohbox\scratchbox\nextbox\dimexpr\hboxestohboxslack\relax
\dontleavehmode
\unhbox\nextbox
\removeunwantedspaces
@@ -2024,7 +2092,7 @@
{\bgroup
\def\syst_boxes_process_indeed{#1}% #1 can be redefined halfway
\setbox\processbox\emptybox
- \doifnextbgroupelse\syst_boxes_process_yes\syst_boxes_process_nop}
+ \doifelsenextbgroup\syst_boxes_process_yes\syst_boxes_process_nop}
\def\syst_boxes_process_yes
{\dowithnextboxcs\syst_boxes_process_content\hbox}
@@ -2032,7 +2100,7 @@
\def\syst_boxes_process_content
{\removeunwantedspaces
\syst_boxes_process_indeed % takes \nextbox makes \processbox
- \doifnextbgroupelse\syst_boxes_process_yes\syst_boxes_process_nop}
+ \doifelsenextbgroup\syst_boxes_process_yes\syst_boxes_process_nop}
\unexpanded\def\syst_boxes_process_nop
{\removeunwantedspaces
@@ -2489,13 +2557,15 @@
\copy\csname\??stackbox#1:#2\endcsname
\fi}}
-\unexpanded\def\doifboxelse#1#2#3#4%
+\unexpanded\def\doifelsebox#1#2#3#4%
{\ifcsname\??stackbox#1:#2\endcsname
\ifvoid\csname\??stackbox#1:#2\endcsname#4\else#3\fi
\else
#4%
\fi}
+\let\doifboxelse\doifelsebox
+
%D This one is cheaper (the above is no longer used that much):
\installcorenamespace {boxstack}
@@ -2525,9 +2595,6 @@
\unexpanded\def\globalpushbox{\syst_boxes_push\global}
\unexpanded\def\globalpopbox {\syst_boxes_pop \global}
-% \unexpanded\def\pushbox#1{\ctxcommand{pushbox(\number#1)}}
-% \unexpanded\def\popbox #1{\ctxcommand{popbox(\number#1)}}
-
%D \macros
%D {removedepth, obeydepth}
%D
@@ -2776,12 +2843,16 @@
% {\dowithnextbox{\edef#1{\syst_boxes_nodestostring}}\hbox}
%
% \def\syst_boxes_nodestostring
-% {\ctxcommand{boxtostring(\number\nextbox)}}
+% {\clf_boxtostring\nextbox}
\unexpanded\def\nodestostring#1#2% more tolerant for #2=\cs
{\begingroup
\setbox\nextbox\hbox{#2}%
- \normalexpanded{\endgroup\edef\noexpand#1{\ctxcommand{boxtostring(\number\nextbox)}}}}
+ \normalexpanded{\endgroup\edef\noexpand#1{\clf_boxtostring\nextbox}}}
+
+%D Even more dirty:
+
+\let\hyphenatedhbox\hbox
\protect \endinput
diff --git a/tex/context/base/supp-mat.mkiv b/tex/context/base/supp-mat.mkiv
index f77ee3454..925f25cc4 100644
--- a/tex/context/base/supp-mat.mkiv
+++ b/tex/context/base/supp-mat.mkiv
@@ -53,6 +53,36 @@
\let\normalstartdmath \Ustartdisplaymath
\let\normalstopdmath \Ustopdisplaymath
+% \unexpanded\def\Ustartdisplaymath
+% {\ifinner
+% \ifhmode
+% \normalUstartmath
+% \let\Ustopdisplaymath\normalUstopmath
+% \else
+% \normalUstartdisplaymath
+% \let\Ustopdisplaymath\normalUstopdisplaymath
+% \fi
+% \else
+% \normalUstartdisplaymath
+% \let\Ustopdisplaymath\normalUstopdisplaymath
+% \fi}
+
+\unexpanded\def\Ucheckedstartdisplaymath
+ {\ifinner
+ \ifhmode
+ \normalUstartmath
+ \let\Ucheckedstopdisplaymath\normalUstopmath
+ \else
+ \normalUstartdisplaymath
+ \let\Ucheckedstopdisplaymath\normalUstopdisplaymath
+ \fi
+ \else
+ \normalUstartdisplaymath
+ \let\Ucheckedstopdisplaymath\normalUstopdisplaymath
+ \fi}
+
+\let\Ucheckedstopdisplaymath\relax
+
\def\normalmathaligntab{&} % \let\normalmathaligntab\aligntab does to work well in a let to & (a def works ok)
\let\normalsuper \Usuperscript % obsolete
@@ -60,8 +90,8 @@
\let\startimath \Ustartmath
\let\stopimath \Ustopmath
-\let\startdmath \Ustartdisplaymath
-\let\stopdmath \Ustopmath
+\let\startdmath \Ustartdisplaymath % \Ucheckedstartdisplaymath
+\let\stopdmath \Ustopdisplaymath % \Ucheckedstopdisplaymath
\unexpanded\def\mathematics#1{\relax \ifmmode#1\else\normalstartimath#1\normalstopimath\fi}
\unexpanded\def\displaymath#1{\noindent \ifmmode#1\else\normalstartdmath#1\normalstopdmath\fi}
diff --git a/tex/context/base/supp-ran.lua b/tex/context/base/supp-ran.lua
index 7997db8f6..4968e8cfc 100644
--- a/tex/context/base/supp-ran.lua
+++ b/tex/context/base/supp-ran.lua
@@ -10,23 +10,31 @@ if not modules then modules = { } end modules ['supp-ran'] = {
local report_system = logs.reporter("system","randomizer")
-local math = math
-local context, commands = context, commands
+local trace_random = false trackers.register("system.randomizer", function(v) trace_random = v end)
+local trace_random_mp = false trackers.register("system.randomizer.mp",function(v) trace_random_mp = v end)
-local random, randomseed, round, seed, last = math.random, math.randomseed, math.round, false, 1
+local insert, remove = table.insert, table.remove
-local maxcount = 2^30-1 -- 1073741823
+local math = math
+local context = context
+local implement = interfaces.implement
-local function setrandomseedi(n,comment)
- if not n then
- -- n = 0.5 -- hack
- end
+local random = math.random
+local randomseed = math.randomseed
+local round = math.round
+local stack = { }
+local last = 1
+local maxcount = 2^30-1 -- 1073741823
+
+local function setrandomseedi(n)
if n <= 1 then
n = n * maxcount
+ elseif n < 1000 then
+ n = n * 1000
end
n = round(n)
- if false then
- report_system("setting seed to %s (%s)",n,comment or "normal")
+ if trace_random then
+ report_system("setting seed to %s",n)
end
randomseed(n)
last = random(0,maxcount) -- we need an initial value
@@ -34,40 +42,66 @@ end
math.setrandomseedi = setrandomseedi
-function commands.getrandomcounta(min,max)
+local function getrandomnumber(min,max)
last = random(min,max)
- context(last)
-end
-
-function commands.getrandomcountb(min,max)
- last = random(min,max)/65536
- context(last)
+ return last
end
-function commands.setrandomseed(n)
+local function setrandomseed(n)
last = n
setrandomseedi(n)
end
-function commands.getrandomseed(n)
- context(last)
+local function getrandomseed()
+ return last
+end
+
+local function getmprandomnumber()
+ last = random(0,4095)
+ if trace_random_mp then
+ report_system("using mp seed %s",last)
+ end
+ return last
end
-- maybe stack
-function commands.freezerandomseed(n)
- if seed == false or seed == nil then
- seed = last
- setrandomseedi(seed,"freeze",seed)
+local function pushrandomseed()
+ insert(stack,last)
+ if trace_random then
+ report_system("pushing seed %s",last)
end
- if n then
- randomseed(n)
+end
+
+local function reuserandomseed(n)
+ local seed = stack[#stack]
+ if seed then
+ if trace_random then
+ report_system("reusing seed %s",last)
+ end
+ randomseed(seed)
end
end
-function commands.defrostrandomseed()
- if seed ~= false then
- setrandomseedi(seed,"defrost",seed) -- was last (bug)
- seed = false
+local function poprandomseed()
+ local seed = remove(stack)
+ if seed then
+ if trace_random then
+ report_system("popping seed %s",seed)
+ end
+ randomseed(seed)
end
end
+
+-- todo: also open up in utilities.randomizer.*
+
+implement { name = "getrandomnumber", actions = { getrandomnumber, context }, arguments = { "integer", "integer" } }
+implement { name = "getrandomdimen", actions = { getrandomnumber, context }, arguments = { "dimen", "dimen" } }
+implement { name = "getrandomfloat", actions = { getrandomnumber, context }, arguments = { "number", "number" } }
+implement { name = "getmprandomnumber", actions = { getmprandomnumber, context } }
+implement { name = "setrandomseed", actions = { setrandomseed }, arguments = { "integer" } }
+implement { name = "getrandomseed", actions = { getrandomseed, context } }
+implement { name = "pushrandomseed", actions = { pushrandomseed } }
+implement { name = "poprandomseed", actions = { poprandomseed } }
+implement { name = "reuserandomseed", actions = { reuserandomseed } }
+
diff --git a/tex/context/base/supp-ran.mkiv b/tex/context/base/supp-ran.mkiv
index f5466a0e1..f7cfd6e73 100644
--- a/tex/context/base/supp-ran.mkiv
+++ b/tex/context/base/supp-ran.mkiv
@@ -18,13 +18,22 @@
\registerctxluafile{supp-ran}{1.001}
-\unexpanded\def\getrandomcount #1#2#3{#1=\ctxcommand{getrandomcounta(\number#2,\number#3)}\relax}
-\unexpanded\def\getrandomdimen #1#2#3{#1=\ctxcommand{getrandomcounta(\number\dimexpr#2,\number\dimexpr#3)}\scaledpoint\relax}
-\unexpanded\def\getrandomnumber#1#2#3{\edef#1{\ctxcommand{getrandomcounta(\number#2,\number#3)}}}
-\unexpanded\def\getrandomfloat #1#2#3{\edef#1{\ctxcommand{getrandomcountb(\number\dimexpr#2\points,\number\dimexpr#3\points)}}}
-\unexpanded\def\setrandomseed #1{\ctxcommand{setrandomseed(\number#1)}}
-\unexpanded\def\getrandomseed #1{\edef#1{\ctxcommand{getrandomseed()}}}
-\unexpanded\def\freezerandomseed {\ctxcommand{freezerandomseed()}}
-\unexpanded\def\defrostrandomseed {\ctxcommand{defrostrandomseed()}}
-
-\endinput
+\unprotect
+
+\unexpanded\def\getrandomcount #1#2#3{#1=\clf_getrandomnumber#2 #3\relax}
+\unexpanded\def\getrandomdimen #1#2#3{#1=\clf_getrandomdimen#2 #3 \scaledpoint\relax}
+\unexpanded\def\getrandomnumber#1#2#3{\edef#1{\clf_getrandomnumber#2 #3}}
+\unexpanded\def\getrandomfloat #1#2#3{\edef#1{\clf_getrandomfloat#2 #3}}
+\unexpanded\def\setrandomseed #1{\clf_setrandomseed#1\relax}
+\unexpanded\def\getrandomseed #1{\edef#1{\clf_getrandomseed}}
+\unexpanded\def\pushrandomseed {\clf_pushrandomseed}
+\unexpanded\def\poprandomseed {\clf_poprandomseed}
+\unexpanded\def\reuserandomseed {\clf_reuserandomseed} % within push/pop
+
+\let\freezerandomseed \pushrandomseed
+\let\defrostrandomseed\poprandomseed
+
+\def\randomnumber #1#2{\clf_getrandomnumber\numexpr#1\relax\numexpr#2\relax}
+\let\mprandomnumber \clf_getmprandomnumber
+
+\protect \endinput
diff --git a/tex/context/base/symb-imp-cc.mkiv b/tex/context/base/symb-imp-cc.mkiv
index 45ea97732..e753d695f 100644
--- a/tex/context/base/symb-imp-cc.mkiv
+++ b/tex/context/base/symb-imp-cc.mkiv
@@ -11,8 +11,10 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+%D LS/HH: This font has changed over time so there might be diffences
+%D in rendering. The otf, ttf and type1 version can differ too.
-\doiffontpresentelse{ccicons.otf} { % redone ctan version
+\doifelsefontpresent{ccicons.otf} { % redone ctan version
\definefontfeature
[creativecommons]
[mode=base,
diff --git a/tex/context/base/symb-ini.lua b/tex/context/base/symb-ini.lua
index 9586338be..6a4a18825 100644
--- a/tex/context/base/symb-ini.lua
+++ b/tex/context/base/symb-ini.lua
@@ -6,20 +6,27 @@ if not modules then modules = { } end modules ['symb-ini'] = {
license = "see context related readme files"
}
-local context, commands = context, commands
+local context = context
+local variables = interfaces.variables
-local variables = interfaces.variables
+fonts = fonts or { } -- brrrr
-fonts = fonts or { } -- brrrr
+local symbols = fonts.symbols or { }
+fonts.symbols = symbols
-local symbols = fonts.symbols or { }
-fonts.symbols = symbols
+local listitem = utilities.parsers.listitem
+local uselibrary = resolvers.uselibrary
local report_symbols = logs.reporter ("fonts","symbols")
local status_symbols = logs.messenger("fonts","symbols")
-local patterns = { "symb-imp-%s.mkiv", "symb-imp-%s.tex", "symb-%s.mkiv", "symb-%s.tex" }
-local listitem = utilities.parsers.listitem
+local patterns = {
+ "symb-imp-%s.mkiv",
+ "symb-imp-%s.tex",
+ -- obsolete:
+ "symb-%s.mkiv",
+ "symb-%s.tex"
+}
local function action(name,foundname)
-- context.startnointerference()
@@ -37,7 +44,7 @@ end
function symbols.uselibrary(name)
if name ~= variables.reset then
for name in listitem(name) do
- commands.uselibrary {
+ uselibrary {
name = name,
patterns = patterns,
action = action,
@@ -48,4 +55,8 @@ function symbols.uselibrary(name)
end
end
-commands.usesymbols = symbols.uselibrary
+interfaces.implement {
+ name = "usesymbols",
+ actions = symbols.uselibrary,
+ arguments = "string",
+}
diff --git a/tex/context/base/symb-ini.mkiv b/tex/context/base/symb-ini.mkiv
index e957e68c1..40a0af6d6 100644
--- a/tex/context/base/symb-ini.mkiv
+++ b/tex/context/base/symb-ini.mkiv
@@ -74,9 +74,12 @@
\letvalue{\??symbolset}\empty
-\unexpanded\def\doifinsymbolsetelse#1#2{\ifcsname\??symbol#1:#2\endcsname\expandafter\firstoftwoarguments\else\expandafter\secondoftwoarguments\fi}
+\unexpanded\def\doifelseinsymbolset#1#2{\ifcsname\??symbol#1:#2\endcsname\expandafter\firstoftwoarguments\else\expandafter\secondoftwoarguments\fi}
\unexpanded\def\doifinsymbolset #1#2{\ifcsname\??symbol#1:#2\endcsname\expandafter\firstofoneargument \else\expandafter\gobbleoneargument \fi}
-\unexpanded\def\doifsymbolsetelse #1{\ifcsname\??symbolset#1\endcsname\expandafter\firstoftwoarguments\else\expandafter\secondoftwoarguments\fi}
+\unexpanded\def\doifelsesymbolset #1{\ifcsname\??symbolset#1\endcsname\expandafter\firstoftwoarguments\else\expandafter\secondoftwoarguments\fi}
+
+\let\doifinsymbolsetelse\doifelseinsymbolset
+\let\doifsymbolsetelse \doifelsesymbolset
\def\symbolset#1{\csname\??symbolset\ifcsname\??symbolset#1\endcsname#1\fi\endcsname} % no [#1], to be used in commalists etc
@@ -216,7 +219,7 @@
\def\symb_fetch_second#1%
{\doifinsymbolset\empty\currentsymbol{\settrue\c_symb_found}}
-\def\doifsymboldefinedelse#1%
+\unexpanded\def\doifelsesymboldefined#1%
{\begingroup
\edef\currentsymbol{#1}%
\let\symb_fetch\symb_fetch_first
@@ -233,6 +236,8 @@
\fi
\fi}
+\let\doifsymboldefinedelse\doifelsesymboldefined
+
%D \macros
%D {setupsymbolset,startsymbolset}
%D
@@ -300,7 +305,7 @@
%D
%D \showsetup{usesymbols}
-\unexpanded\def\usesymbols[#1]{\ctxcommand{usesymbols("#1")}}
+\unexpanded\def\usesymbols[#1]{\clf_usesymbols{#1}}
%D As longs as symbols are linked to levels or numbers, we can
%D also use the conversion mechanism, but in for instance the
diff --git a/tex/context/base/syst-aux.lua b/tex/context/base/syst-aux.lua
index 6b5e18d16..fff9bbb4c 100644
--- a/tex/context/base/syst-aux.lua
+++ b/tex/context/base/syst-aux.lua
@@ -11,42 +11,66 @@ if not modules then modules = { } end modules ['syst-aux'] = {
-- utfmatch(str,"(.?)(.*)$")
-- utf.sub(str,1,1)
-local commands, context = commands, context
-
-local settings_to_array = utilities.parsers.settings_to_array
-local format = string.format
+local tonumber = tonumber
local utfsub = utf.sub
-local P, S, C, Cc, Cs, Carg, lpegmatch, utf8char = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Carg, lpeg.match, lpeg.patterns.utf8char
-
+local P, S, R, C, Cc, Cs, Carg, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Carg, lpeg.match
-local setvalue = context.setvalue
-
-local pattern = C(utf8char^-1) * C(P(1)^0)
+local context = context
+local implement = interfaces.implement
+local formatters = string.formatters
+local setcatcode = tex.setcatcode
+local utf8character = lpeg.patterns.utf8character
+local settings_to_array = utilities.parsers.settings_to_array
+local setmacro = interfaces.setmacro
+
+local pattern = C(utf8character^-1) * C(P(1)^0)
+
+implement {
+ name = "getfirstcharacter",
+ arguments = "string",
+ actions = function(str)
+ local first, rest = lpegmatch(pattern,str)
+ setmacro("firstcharacter",first)
+ setmacro("remainingcharacters",rest)
+ end
+}
-function commands.getfirstcharacter(str)
- local first, rest = lpegmatch(pattern,str)
- setvalue("firstcharacter",first)
- setvalue("remainingcharacters",rest)
-end
+implement {
+ name = "thefirstcharacter",
+ arguments = "string",
+ actions = function(str)
+ local first, rest = lpegmatch(pattern,str)
+ context(first)
+ end
+}
-function commands.thefirstcharacter(str)
- local first, rest = lpegmatch(pattern,str)
- context(first)
-end
-function commands.theremainingcharacters(str)
- local first, rest = lpegmatch(pattern,str)
- context(rest)
-end
+implement {
+ name = "theremainingcharacters",
+ arguments = "string",
+ actions = function(str)
+ local first, rest = lpegmatch(pattern,str)
+ context(rest)
+ end
+}
-local pattern = C(utf8char^-1)
+local pattern = C(utf8character^-1)
+local ctx_doifelse = commands.doifelse
-function commands.doiffirstcharelse(chr,str)
- commands.doifelse(lpegmatch(pattern,str) == chr)
-end
+implement {
+ name = "doifelsefirstchar",
+ arguments = { "string", "string" },
+ actions = function(str,chr)
+ ctx_doifelse(lpegmatch(pattern,str) == chr)
+ end
+}
-function commands.getsubstring(str,first,last)
- context(utfsub(str,tonumber(first),tonumber(last)))
-end
+implement {
+ name = "getsubstring",
+ arguments = { "string", "string", "string" },
+ actions = function(str,first,last)
+ context(utfsub(str,tonumber(first),tonumber(last)))
+ end
+}
-- function commands.addtocommalist(list,item)
-- if list == "" then
@@ -78,21 +102,40 @@ end
-- end
-- end
-local pattern = (C((1-P("%"))^1) * Carg(1)) /function(n,d) return format("%.0fsp",d * tonumber(n)/100) end * P("%") * P(-1)
+local pattern = (C((1-P("%"))^1) * Carg(1)) / function(n,d)
+ return formatters["%.0fsp"](d * tonumber(n)/100) end * P("%") * P(-1) -- .0 ?
--- commands.percentageof("10%",65536*10)
+-- percentageof("10%",65536*10)
-function commands.percentageof(str,dim)
- context(lpegmatch(pattern,str,1,dim) or str)
-end
+implement {
+ name = "percentageof",
+ arguments = { "string", "dimen" },
+ actions = function(str,dim)
+ context(lpegmatch(pattern,str,1,dim) or str)
+ end
+}
-- \gdef\setpercentdimen#1#2%
-- {#1=\ctxcommand{percentageof("#2",\number#1)}\relax}
-local spaces = P(" ")^0/""
+local spaces = P(" ")^0 / ""
+local nohash = 1 - P("#")
+local digit = R("09")
+local double = P("##") / "#"
+local single = P("#")
+local sentinel = spaces * (nohash^1 / "\\%0")
+local sargument = (single * digit)^1
+local dargument = (double * digit)^1
-local pattern = Cs(
- ( P("global") / "\\global" )^0
+local usespaces = nil
+local texpreamble = nil
+
+local pattern = Cs( -- ^-1
+ ( P("spaces") / function() usespaces = true return "" end )^0
+ * spaces
+ * ( P("nospaces") / function() usespaces = false return "" end )^0
+ * spaces
+ * ( P("global") / "\\global" )^0
* spaces
* ( P("unexpanded") / "\\unexpanded" )^0
* spaces
@@ -102,14 +145,85 @@ local pattern = Cs(
* spaces
* ( P((1-S(" #"))^1) / "def\\csname %0\\endcsname" )
* spaces
- * Cs( (P("##")/"#" + P(1))^0 )
+ * (
+ -- (double * digit)^1 * sentinel^-1 * double^-1
+ -- + (single * digit)^1 * sentinel^-1 * single^-1
+ ( P("[") * dargument * P("]") + dargument)^1 * sentinel^-1 * double^-1
+ + ( P("[") * sargument * P("]") + sargument)^1 * sentinel^-1 * single^-1
+ + sentinel^-1 * (double+single)^-1
+ )
)
-function commands.thetexdefinition(str)
- context(lpegmatch(pattern,str))
+local ctx_dostarttexdefinition = context.dostarttexdefinition
+
+local function texdefinition_one(str)
+ usespaces = nil
+ texpreamble = lpegmatch(pattern,str)
+ if usespaces == true then
+ setcatcode(32,10) -- space
+ setcatcode(13, 5) -- endofline
+ elseif usespaces == false then
+ setcatcode(32, 9) -- ignore
+ setcatcode(13, 9) -- ignore
+ else
+ -- this is default
+ -- setcatcode(32,10) -- space
+ -- setcatcode(13, 9) -- ignore
+ end
+ ctx_dostarttexdefinition()
+end
+
+local function texdefinition_two()
+ context(texpreamble)
end
-local upper, lower = utf.upper, utf.lower
+implement { name = "texdefinition_one", actions = texdefinition_one, scope = "private", arguments = "string" }
+implement { name = "texdefinition_two", actions = texdefinition_two, scope = "private" }
+
+implement { name = "upper", arguments = "string", actions = { utf.upper, context } }
+implement { name = "lower", arguments = "string", actions = { utf.lower, context } }
+implement { name = "strip", arguments = "string", actions = { string.strip, context } } -- or utf.strip
+
+implement {
+ name = "converteddimen",
+ arguments = { "dimen", "string" },
+ actions = function(dimen,unit)
+ context(number.todimen(dimen,unit or "pt","%0.5f")) -- no unit appended (%F)
+ end
+}
+
+-- where, not really the best spot for this:
-function commands.upper(s) context(upper(s)) end
-function commands.lower(s) context(lower(s)) end
+implement {
+ name = "immediatemessage",
+ arguments = { "'message'", "string" },
+ actions = logs.status
+}
+
+implement {
+ name = "resettimer",
+ actions = function()
+ statistics.resettiming("whatever")
+ statistics.starttiming("whatever")
+ end
+}
+
+implement {
+ name = "elapsedtime",
+ actions = function()
+ statistics.stoptiming("whatever")
+ context(statistics.elapsedtime("whatever"))
+ end
+}
+
+local accuracy = table.setmetatableindex(function(t,k)
+ local v = formatters["%0." ..k .. "f"]
+ t[k] = v
+ return v
+end)
+
+implement {
+ name = "rounded",
+ arguments = "integer",
+ actions = function(n,m) context(accuracy[n](m)) end
+}
diff --git a/tex/context/base/syst-aux.mkiv b/tex/context/base/syst-aux.mkiv
index c7be461a3..bd9b1630e 100644
--- a/tex/context/base/syst-aux.mkiv
+++ b/tex/context/base/syst-aux.mkiv
@@ -11,13 +11,17 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D There are some references to \LUA\ variants here but these concern
-%D (often old) experiments, moved from local test modules to here,
-%D cleaned up, but not really used. After all it's not that urgent
-%D and replacing helpers is a delicate process. Don't depend on it.
+%D There are some references to \LUA\ variants here but these concern (often old)
+%D experiments, moved from local test modules to here, cleaned up, but not really
+%D used. After all it's not that urgent and replacing helpers is a delicate process.
+%D Don't depend on it.
\registerctxluafile{syst-aux}{1.001}
+% A dedicated primitive \ifvoidmacro\cs == \ifx\cs\empty is some 10% faster but
+% probably not that noticeable in practice. An \ifvoidtoks might make sense but we
+% don't test that often for it (and it's more work to implement in the engine).
+
%D This is a stripped down combination of:
%D
%D \startitemize
@@ -26,20 +30,18 @@
%D \item \type {syst-new.tex}
%D \stopitemize
%D
-%D We keep them around (for \MKII) so you can find comments,
-%D experiences, intermediate versions and cleaner variants
-%D there (and also non-\ETEX\ variants).
+%D We keep them around (for \MKII) so you can find comments, experiences,
+%D intermediate versions and cleaner variants there (and also non-\ETEX\ variants).
%D
-%D Contrary to the older files, we now assume that this one
-%D is used in \CONTEXT\ and therefore we might also assume that
-%D some basic functionality is available.
+%D Contrary to the older files, we now assume that this one is used in \CONTEXT\ and
+%D therefore we might also assume that some basic functionality is available.
%D
-%D The original files contain previous implementations and notes about
-%D performance. This file will be stripped down in due time.
-
-%D Some of the macros here were only used in the bibliography module. They
-%D have been be moved to a separate syst module since the bib module is no
-%D longer using them. Some more will go away.
+%D The original files contain previous implementations and notes about performance.
+%D This file will be stripped down in due time.
+%D
+%D Some of the macros here were only used in the bibliography module. They have been
+%D be moved to a separate syst module since the bib module is no longer using them.
+%D Some more will go away.
\unprotect
@@ -72,13 +74,20 @@
% %def\expunded#1{\normalexpanded\expandafter{#1}}
% \def\expunded#1{\expandafter\empty#1} % used within an edef anyway
-
%D As we don't have namespace definers yet, we use a special one:
-\newcount\c_syst_helpers_n_of_namespaces \c_syst_helpers_n_of_namespaces\pluseight % 1-8 reserved for catcodes
+\ifdefined\c_syst_helpers_n_of_namespaces
+
+ % lets plug in a better error message
+
+\else
+
+ \newcount\c_syst_helpers_n_of_namespaces \c_syst_helpers_n_of_namespaces\pluseight % 1-8 reserved for catcodes
-\def\v_interfaces_prefix_template_system{\number \c_syst_helpers_n_of_namespaces>>}
-%def\v_interfaces_prefix_template_system{\characters\c_syst_helpers_n_of_namespaces>>} % no \characters yet
+ \def\v_interfaces_prefix_template_system{\number \c_syst_helpers_n_of_namespaces>>}
+ %def\v_interfaces_prefix_template_system{\characters\c_syst_helpers_n_of_namespaces>>} % no \characters yet
+
+\fi
\unexpanded\def\installsystemnamespace#1% maybe move this to syst-ini
{\ifcsname ??#1\endcsname
@@ -128,10 +137,16 @@
\newif\if!!doned \newif\if!!donee \newif\if!!donef
\def\!!zerocount {0} % alongside \zerocount
-\def\!!minusone {-1} % alongside \minusone
-\def\!!plusone {1} % alongside \plusone
-\def\!!plustwo {2} % alongside \plustwo
-\def\!!plusthree {3} % alongside \plusthree
+\def\!!minusone {-1} % ...
+\def\!!plusone {1} % ...
+\def\!!plustwo {2} % ...
+\def\!!plusthree {3} % ...
+\def\!!plusfour {4} % ...
+\def\!!plusfive {5} % ...
+\def\!!plussix {6} % ...
+\def\!!plusseven {7} % ...
+\def\!!pluseight {8} % ...
+\def\!!plusnine {9} % alongside \plusnine
\setnewconstant \uprotationangle 0
\setnewconstant\rightrotationangle 90
@@ -272,12 +287,14 @@
%D {\localnext} because we don't want clashes with \type
%D {\next}.
-\unexpanded\def\doifnextcharelse#1#2#3% #1 should not be {} !
+\unexpanded\def\doifelsenextchar#1#2#3% #1 should not be {} !
{\let\charactertoken=#1% = needed here
\def\m_syst_action_yes{#2}%
\def\m_syst_action_nop{#3}%
\futurelet\nexttoken\syst_helpers_inspect_next_character}
+\let\doifnextcharelse\doifelsenextchar
+
\def\syst_helpers_inspect_next_character
{\ifx\nexttoken\blankspace
\expandafter\syst_helpers_reinspect_next_character
@@ -312,18 +329,21 @@
\let\syst_helpers_next_optional_character_token=[
-\unexpanded\def\doifnextoptionalelse#1#2%
+\unexpanded\def\doifelsenextoptional#1#2%
{\def\m_syst_action_yes{#1}%
\def\m_syst_action_nop{#2}%
\let\if_next_blank_space_token\iffalse
\futurelet\nexttoken\syst_helpers_inspect_next_optional_character}
-\unexpanded\def\doifnextoptionalcselse#1#2% \cs \cs (upto 10% faster)
+\unexpanded\def\doifelsenextoptionalcs#1#2% \cs \cs (upto 10% faster)
{\let\m_syst_action_yes#1%
\let\m_syst_action_nop#2%
\let\if_next_blank_space_token\iffalse
\futurelet\nexttoken\syst_helpers_inspect_next_optional_character}
+\let\doifnextoptionalelse \doifelsenextoptional
+\let\doifnextoptionalcselse\doifelsenextoptionalcs
+
\def\syst_helpers_inspect_next_optional_character
{\ifx\nexttoken\blankspace
\expandafter\syst_helpers_reinspect_next_optional_character
@@ -340,12 +360,21 @@
\let\syst_helpers_next_bgroup_character_token\bgroup
-\unexpanded\def\doifnextbgroupelse#1#2%
+\unexpanded\def\doifelsenextbgroup#1#2%
{\def\m_syst_action_yes{#1}%
\def\m_syst_action_nop{#2}%
\let\if_next_blank_space_token\iffalse
\futurelet\nexttoken\syst_helpers_inspect_next_bgroup_character}
+\unexpanded\def\doifelsenextbgroupcs#1#2%
+ {\let\m_syst_action_yes#1%
+ \let\m_syst_action_nop#2%
+ \let\if_next_blank_space_token\iffalse
+ \futurelet\nexttoken\syst_helpers_inspect_next_bgroup_character}
+
+\let\doifnextbgroupelse \doifelsenextbgroup
+\let\doifnextbgroupcselse\doifelsenextbgroupcs
+
\def\syst_helpers_inspect_next_bgroup_character
{\ifx\nexttoken\blankspace
\expandafter\syst_helpers_reinspect_next_bgroup_character
@@ -362,12 +391,14 @@
\let\syst_helpers_next_parenthesis_character_token(
-\unexpanded\def\doifnextparenthesiselse#1#2%
+\unexpanded\def\doifelsenextparenthesis#1#2%
{\def\m_syst_action_yes{#1}%
\def\m_syst_action_nop{#2}%
\let\if_next_blank_space_token\iffalse
\futurelet\nexttoken\syst_helpers_inspect_next_parenthesis_character}
+\let\doifnextparenthesiselse\doifelsenextparenthesis
+
\def\syst_helpers_inspect_next_parenthesis_character
{\ifx\nexttoken\blankspace
\expandafter\syst_helpers_reinspect_next_parenthesis_character
@@ -384,16 +415,19 @@
%D The next one is handy in predictable situations:
-\unexpanded\def\doiffastoptionalcheckelse#1#2%
+\unexpanded\def\doifelsefastoptionalcheck#1#2%
{\def\m_syst_action_yes{#1}%
\def\m_syst_action_nop{#2}%
\futurelet\nexttoken\syst_helpers_do_if_fast_optional_check_else}
-\unexpanded\def\doiffastoptionalcheckcselse#1#2% \cs \cs
+\unexpanded\def\doifelsefastoptionalcheckcs#1#2% \cs \cs
{\let\m_syst_action_yes#1%
\let\m_syst_action_nop#2%
\futurelet\nexttoken\syst_helpers_do_if_fast_optional_check_else}
+\let\doiffastoptionalcheckelse \doifelsefastoptionalcheck
+\let\doiffastoptionalcheckcselse\doifelsefastoptionalcheckcs
+
\def\syst_helpers_do_if_fast_optional_check_else
{\ifx\nexttoken\syst_helpers_next_optional_character_token
\expandafter\m_syst_action_yes
@@ -562,12 +596,12 @@
\suppressifcsnameerror\plusone
-\def\doifundefinedelse#1%
+\def\doifelseundefined#1%
{\ifcsname#1\endcsname
\expandafter\secondoftwoarguments\else\expandafter\firstoftwoarguments
\fi}
-\def\doifdefinedelse#1%
+\def\doifelsedefined#1%
{\ifcsname#1\endcsname
\expandafter\firstoftwoarguments\else\expandafter\secondoftwoarguments
\fi}
@@ -582,6 +616,9 @@
\expandafter\firstofoneargument\else\expandafter\gobbleoneargument
\fi}
+\let\doifundefinedelse\doifelseundefined
+\let\doifdefinedelse \doifelsedefined
+
%D \macros
%D {letbeundefined}
%D
@@ -605,7 +642,7 @@
%D behavior in text and math mode, which was due to this grouping subtilities. We
%D therefore decided to use \type{\begingroup} instead of \type{\bgroup}.
-\unexpanded\def\doifalldefinedelse#1%
+\unexpanded\def\doifelsealldefined#1%
{\begingroup
\donetrue % we could use a reserved one and avoid the group
\processcommalist[#1]\syst_helpers_do_if_all_defined_else
@@ -615,6 +652,8 @@
\endgroup\expandafter\secondoftwoarguments
\fi}
+\let\doifalldefinedelse\doifelsealldefined
+
\def\syst_helpers_do_if_all_defined_else#1%
{\ifcsname#1\endcsname\else
\donefalse
@@ -678,7 +717,7 @@
%D
%D This time, the string is not expanded.
-\unexpanded\def\doifemptyelse#1%
+\unexpanded\def\doifelseempty#1%
{\def\m_syst_string_one{#1}%
\ifx\m_syst_string_one\empty
\expandafter\firstoftwoarguments
@@ -686,6 +725,8 @@
\expandafter\secondoftwoarguments
\fi}
+\let\doifemptyelse\doifelseempty
+
\unexpanded\def\doifempty#1%
{\def\m_syst_string_one{#1}%
\ifx\m_syst_string_one\empty
@@ -751,7 +792,7 @@
\def\syst_helpers_re_do_if_in_set {\expandafter\syst_helpers_do_check_if_item_in_set \m_syst_string_two,],\relax}
\def\syst_helpers_re_do_if_not_in_set {\expandafter\syst_helpers_do_check_if_item_not_in_set \m_syst_string_two,],\relax}
-\unexpanded\def\doifinsetelse#1% make this two step too
+\unexpanded\def\doifelseinset#1% make this two step too
{\edef\m_syst_string_one{#1}%
\ifx\m_syst_string_one\empty
\expandafter\thirdofthreearguments
@@ -759,6 +800,8 @@
\expandafter\syst_helpers_do_if_in_set_else
\fi}
+\let\doifinsetelse\doifelseinset
+
\def\syst_helpers_do_if_in_set_else#1%
{\edef\m_syst_string_two{#1}%
\ifx\m_syst_string_two\empty
@@ -949,10 +992,12 @@
\fi\fi
#1#2}
-\unexpanded\def\doifcommonelse{\syst_helpers_do_do_if_common_else\firstoftwoarguments\secondoftwoarguments}
+\unexpanded\def\doifelsecommon{\syst_helpers_do_do_if_common_else\firstoftwoarguments\secondoftwoarguments}
\unexpanded\def\doifcommon {\syst_helpers_do_do_if_common_else\firstofoneargument \gobbleoneargument }
\unexpanded\def\doifnotcommon {\syst_helpers_do_do_if_common_else\gobbleoneargument \firstofoneargument }
+\let\doifcommonelse\doifelsecommon
+
%D \macros
%D {processcommalist,processcommacommand,quitcommalist,
%D processcommalistwithparameters}
@@ -1294,10 +1339,12 @@
\let\firstcharacter \empty
\let\remainingcharacters\empty
-\unexpanded\def\getfirstcharacter #1{\ctxcommand{getfirstcharacter(\!!bs#1\!!es)}}
-\unexpanded\def\doiffirstcharelse #1#2{\ctxcommand{doiffirstcharelse(\!!bs#1\!!es,\!!bs#2\!!es)}} % chr str
-\unexpanded\def\thefirstcharacter #1{\ctxcommand{thefirstcharacter(\!!bs#1\!!es)}}
-\unexpanded\def\theremainingcharacters#1{\ctxcommand{theremainingcharacters(\!!bs#1\!!es)}}
+\unexpanded\def\getfirstcharacter #1{\clf_getfirstcharacter{#1}}
+\unexpanded\def\doifelsefirstchar #1#2{\clf_doifelsefirstchar{#1}{#2}}
+\unexpanded\def\thefirstcharacter #1{\clf_thefirstcharacter{#1}}
+\unexpanded\def\theremainingcharacters#1{\clf_theremainingcharacters{#1}}
+
+\let\doiffirstcharelse\doifelsefirstchar
%D \macros
%D {doifinstringelse, doifincsnameelse}
@@ -1311,7 +1358,7 @@
\let\m_syst_sub_string\empty
-\unexpanded\def\doifinstringelse#1%
+\unexpanded\def\doifelseinstring#1%
{\edef\m_syst_sub_string{#1}% expand #1 here
\ifx\m_syst_sub_string\empty
\expandafter\thirdofthreearguments
@@ -1319,6 +1366,8 @@
\expandafter\syst_helpers_do_if_in_string_else_indeed
\fi}
+\let\doifinstringelse\doifelseinstring
+
\unexpanded\def\syst_helpers_do_if_in_string_else_indeed#1%
{\syst_helpers_do_if_in_string_else\m_syst_sub_string{#1}%
\expandafter\firstoftwoarguments
@@ -1381,13 +1430,15 @@
{\unless\if##2@}%
\expandafter\syst_helpers_do_do_if_in_csname_else#2#1@@\_e_o_s_}
-\unexpanded\def\doifincsnameelse#1#2%
+\unexpanded\def\doifelseincsname#1#2%
{\normalexpanded{\syst_helpers_do_if_in_csname_else{#1}}{#2}%
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
+\let\doifincsnameelse\doifelseincsname
+
%D \macros
%D {doifnumberelse,doifnumber,doifnotnumber}
%D
@@ -1403,13 +1454,15 @@
%D The macro accepts \type {123}, \type {abc}, \type {{}}, \type {\getal} and
%D \type {\the\count...}. This macro is a rather dirty one.
-\def\doifnumberelse#1% does not accept counters (fully expandable)
+\def\doifelsenumber#1% does not accept counters (fully expandable)
{\ifcase0\ifcase1#1\or\or\or\or\or\or\or\or\or\else1\fi\space
\expandafter\secondoftwoarguments
\else
\expandafter\firstoftwoarguments
\fi}
+\let\doifnumberelse\doifelsenumber
+
\def\doifnumber#1%
{\ifcase0\ifcase1#1\or\or\or\or\or\or\or\or\or\else1\fi\space
\expandafter\firstofoneargument
@@ -1434,10 +1487,10 @@
%D \stoptyping
\def\percentdimen#1#2% dimen percentage (with %)
- {\dimexpr\ctxcommand{percentageof("#2",\number#1)}\relax}
+ {\dimexpr\clf_percentageof{#2}\dimexpr#1\relax}
\unexpanded\def\setpercentdimen#1#2% dimen percentage (with %)
- {#1=\ctxcommand{percentageof("#2",\number#1)}\relax}
+ {#1=\clf_percentageof{#2}\dimexpr#1\relax}
%D \macros
%D {makerawcommalist,
@@ -1457,7 +1510,7 @@
%D
%D \starttyping
%D \makerawcommalist[string,string,...]\stringlist
-%D \rawdoifinsetelse{string}{string,...}{...}{...}
+%D \rawdoifelseinset{string}{string,...}{...}{...}
%D \rawprocesscommalist[string,string,...]\commando
%D \rawprocessaction[x][a=>\a,b=>\b,c=>\c]
%D \stoptyping
@@ -1492,12 +1545,12 @@
\unexpanded\def\rawprocesscommacommand[#1]% not really needed
{\normalexpanded{\rawprocesscommalist[#1]}}
-% \def\rawdoifinsetelse#1#2{\doifinstringelse{,#1,}{,#2,}}
+% \def\rawdoifelseinset#1#2{\doifinstringelse{,#1,}{,#2,}}
% \def\rawdoifinset #1#2{\doifinstring {,#1,}{,#2,}}
\def\m_syst_two_commas{,,}
-\unexpanded\def\rawdoifinsetelse#1%
+\unexpanded\def\rawdoifelseinset#1%
{\edef\m_syst_sub_string{,#1,}% expand #1 here
\ifx\m_syst_sub_string\m_syst_two_commas
\expandafter\thirdofthreearguments
@@ -1505,6 +1558,8 @@
\expandafter\syst_helpers_raw_do_if_in_set_else
\fi}
+\let\rawdoifinsetelse\rawdoifinsetelse
+
\unexpanded\def\syst_helpers_raw_do_if_in_set_else#1%
{\syst_helpers_do_if_in_string_else\m_syst_sub_string{,#1,}%
\expandafter\firstoftwoarguments
@@ -2170,7 +2225,7 @@
\unexpanded\def\dosingleempty#1%
{\syst_helpers_argument_reset
- \doifnextoptionalelse
+ \doifelsenextoptional
{\firstargumenttrue#1}%
{\syst_helpers_single_empty_one_nop#1}}
@@ -2182,13 +2237,13 @@
\unexpanded\def\dodoubleempty#1%
{\syst_helpers_argument_reset
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_double_empty_one_yes#1}%
{\syst_helpers_double_empty_one_nop#1}}
\def\syst_helpers_double_empty_one_yes#1[#2]%
{\firstargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\secondargumenttrue#1[{#2}]}%
{\syst_helpers_double_empty_two_nop#1{#2}}}
@@ -2212,19 +2267,19 @@
\unexpanded\def\dotripleempty#1%
{\syst_helpers_argument_reset
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_triple_empty_one_yes#1}%
{\syst_helpers_triple_empty_one_nop#1}}
\def\syst_helpers_triple_empty_one_yes#1[#2]%
{\firstargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_triple_empty_two_yes#1{#2}}%
{\syst_helpers_triple_empty_two_nop#1{#2}}}
\def\syst_helpers_triple_empty_two_yes#1#2[#3]%
{\secondargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\thirdargumenttrue#1[{#2}][{#3}]}%
{\syst_helpers_triple_empty_three_nop#1{#2}{#3}}}
@@ -2260,25 +2315,25 @@
\unexpanded\def\doquadrupleempty#1%
{\syst_helpers_argument_reset
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_quadruple_empty_one_yes#1}%
{\syst_helpers_quadruple_empty_one_nop#1}}
\def\syst_helpers_quadruple_empty_one_yes#1[#2]%
{\firstargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_quadruple_empty_two_yes#1{#2}}%
{\syst_helpers_quadruple_empty_two_nop#1{#2}}}
\def\syst_helpers_quadruple_empty_two_yes#1#2[#3]%
{\secondargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_quadruple_empty_three_yes#1{#2}{#3}}%
{\syst_helpers_quadruple_empty_three_nop#1{#2}{#3}}}
\def\syst_helpers_quadruple_empty_three_yes#1#2#3[#4]%
{\thirdargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\fourthargumenttrue#1[{#2}][{#3}][{#4}]}%
{\syst_helpers_quadruple_empty_four_nop#1{#2}{#3}{#4}}}
@@ -2327,31 +2382,31 @@
\unexpanded\def\doquintupleempty#1%
{\syst_helpers_argument_reset
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_quintuple_empty_one_yes#1}%
{\syst_helpers_quintuple_empty_one_nop#1}}
\def\syst_helpers_quintuple_empty_one_yes#1[#2]%
{\firstargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_quintuple_empty_two_yes#1{#2}}%
{\syst_helpers_quintuple_empty_two_nop#1{#2}}}
\def\syst_helpers_quintuple_empty_two_yes#1#2[#3]%
{\secondargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_quintuple_empty_three_yes#1{#2}{#3}}%
{\syst_helpers_quintuple_empty_three_nop#1{#2}{#3}}}
\def\syst_helpers_quintuple_empty_three_yes#1#2#3[#4]%
{\thirdargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_quintuple_empty_four_yes#1{#2}{#3}{#4}}%
{\syst_helpers_quintuple_empty_four_nop#1{#2}{#3}{#4}}}
\def\syst_helpers_quintuple_empty_four_yes#1#2#3#4[#5]%
{\fourthargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\fifthargumenttrue#1[{#2}][{#3}][{#4}][{#5}]}%
{\syst_helpers_quintuple_empty_five_nop#1{#2}{#3}{#4}{#5}}}
@@ -2414,37 +2469,37 @@
\unexpanded\def\dosixtupleempty#1%
{\syst_helpers_argument_reset
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_sixtuple_empty_one_yes#1}
{\syst_helpers_sixtuple_empty_one_nop#1}}
\def\syst_helpers_sixtuple_empty_one_yes#1[#2]%
{\firstargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_sixtuple_empty_two_yes#1{#2}}%
{\syst_helpers_sixtuple_empty_two_nop#1{#2}}}
\def\syst_helpers_sixtuple_empty_two_yes#1#2[#3]%
{\secondargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_sixtuple_empty_three_yes#1{#2}{#3}}%
{\syst_helpers_sixtuple_empty_three_nop#1{#2}{#3}}}
\def\syst_helpers_sixtuple_empty_three_yes#1#2#3[#4]%
{\thirdargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_sixtuple_empty_four_yes#1{#2}{#3}{#4}}%
{\syst_helpers_sixtuple_empty_four_nop#1{#2}{#3}{#4}}}
\def\syst_helpers_sixtuple_empty_four_yes#1#2#3#4[#5]%
{\fourthargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_sixtuple_empty_five_yes#1{#2}{#3}{#4}{#5}}%
{\syst_helpers_sixtuple_empty_five_nop#1{#2}{#3}{#4}{#5}}}
\def\syst_helpers_sixtuple_empty_five_yes#1#2#3#4#5[#6]%
{\fifthargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\sixthargumenttrue#1[{#2}][{#3}][{#4}][{#5}][{#6}]}%
{\syst_helpers_sixtuple_empty_six_nop#1{#2}{#3}{#4}{#5}{#6}}}
@@ -2522,43 +2577,43 @@
\unexpanded\def\doseventupleempty#1%
{\syst_helpers_argument_reset
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_seventuple_empty_one_yes#1}%
{\syst_helpers_seventuple_empty_one_nop#1}}
\def\syst_helpers_seventuple_empty_one_yes#1[#2]%
{\firstargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_seventuple_empty_two_yes#1{#2}}%
{\syst_helpers_seventuple_empty_two_nop#1{#2}}}
\def\syst_helpers_seventuple_empty_two_yes#1#2[#3]%
{\secondargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_seventuple_empty_three_yes#1{#2}{#3}}%
{\syst_helpers_seventuple_empty_three_nop#1{#2}{#3}}}
\def\syst_helpers_seventuple_empty_three_yes#1#2#3[#4]%
{\thirdargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_seventuple_empty_four_yes#1{#2}{#3}{#4}}%
{\syst_helpers_seventuple_empty_four_nop#1{#2}{#3}{#4}}}
\def\syst_helpers_seventuple_empty_four_yes#1#2#3#4[#5]%
{\fourthargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_seventuple_empty_five_yes#1{#2}{#3}{#4}{#5}}%
{\syst_helpers_seventuple_empty_five_nop#1{#2}{#3}{#4}{#5}}}
\def\syst_helpers_seventuple_empty_five_yes#1#2#3#4#5[#6]%
{\fifthargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_seventuple_empty_six_yes#1{#2}{#3}{#4}{#5}{#6}}%
{\syst_helpers_seventuple_empty_six_nop#1{#2}{#3}{#4}{#5}{#6}}}
\def\syst_helpers_seventuple_empty_six_yes#1#2#3#4#5#6[#7]%
{\sixthargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\seventhargumenttrue#1[{#2}][{#3}][{#4}][{#5}][{#6}][{#7}]}%
{\syst_helpers_seventuple_empty_seven_nop#1{#2}{#3}{#4}{#5}{#6}{#7}}}
@@ -2714,13 +2769,13 @@
\unexpanded\def\complexorsimple#1%
{% \relax % prevents lookahead, brrr
- \doifnextoptionalelse
+ \doifelsenextoptional
{\firstargumenttrue \csname\s!complex\strippedcsname#1\endcsname}
{\firstargumentfalse\csname\s!simple \strippedcsname#1\endcsname}}
\unexpanded\def\complexorsimpleempty#1%
{% \relax % prevents lookahead, brrr
- \doifnextoptionalelse
+ \doifelsenextoptional
{\firstargumenttrue \csname\s!complex\strippedcsname#1\endcsname}
{\firstargumentfalse\csname\s!complex\strippedcsname#1\endcsname[]}}
@@ -2732,10 +2787,10 @@
%D in protection.
\unexpanded\def\syst_helpers_complex_or_simple#1#2%
- {\doifnextoptionalelse{\firstargumenttrue#1}{\firstargumentfalse#2}}
+ {\doifelsenextoptional{\firstargumenttrue#1}{\firstargumentfalse#2}}
\unexpanded\def\syst_helpers_complex_or_simple_empty#1%
- {\doifnextoptionalelse{\firstargumenttrue#1}{\firstargumentfalse#1[]}}
+ {\doifelsenextoptional{\firstargumenttrue#1}{\firstargumentfalse#1[]}}
\unexpanded\def\definecomplexorsimple#1%
{\unexpanded\edef#1{\syst_helpers_complex_or_simple
@@ -2936,7 +2991,14 @@
\def\fifthofsixarguments #1#2#3#4#5#6{#5}
\def\sixthofsixarguments #1#2#3#4#5#6{#6}
-\unexpanded\def\firstofoneunexpanded#1{#1}
+\unexpanded\def\firstofoneunexpanded #1{#1}
+
+\unexpanded\def\firstoftwounexpanded #1#2{#1}
+\unexpanded\def\secondoftwounexpanded #1#2{#2}
+
+\unexpanded\def\firstofthreeunexpanded #1#2#3{#1}
+\unexpanded\def\secondofthreeunexpanded#1#2#3{#2}
+\unexpanded\def\thirdofthreeunexpanded #1#2#3{#3}
%D \macros
%D {globalletempty,letempty,
@@ -3021,7 +3083,7 @@
%D
%D A fully expandable message:
-\def\immediatemessage#1{\ctxlua{logs.status("message","#1")}}
+\let\immediatemessage\clf_immediatemessage % {} mandate
%D \macros
%D {debuggerinfo}
@@ -3237,6 +3299,25 @@
% [here #oeps: \the\scratchcounter]
% \stoptexdefinition
+% \bgroup \obeylines
+%
+% \global\let\stoptexdefinition\relax
+%
+% \unexpanded\gdef\starttexdefinition%
+% {\bgroup%
+% \obeylines%
+% \syst_helpers_start_tex_definition_one}
+%
+% \gdef\syst_helpers_start_tex_definition_one#1
+% {\catcode\endoflineasciicode\ignorecatcode%
+% \syst_helpers_start_tex_definition_two{#1}}
+%
+% \gdef\syst_helpers_start_tex_definition_two#1#2\stoptexdefinition%
+% {\egroup%
+% \ctxcommand{thetexdefinition("#1")}{#2}}
+%
+% \egroup
+
\bgroup \obeylines
\global\let\stoptexdefinition\relax
@@ -3244,19 +3325,19 @@
\unexpanded\gdef\starttexdefinition%
{\bgroup%
\obeylines%
- \syst_helpers_start_tex_definition_one}
+ \syst_helpers_start_tex_definition}
-\gdef\syst_helpers_start_tex_definition_one#1
+\gdef\syst_helpers_start_tex_definition#1
{\catcode\endoflineasciicode\ignorecatcode%
- \syst_helpers_start_tex_definition_two{#1}}
+ \clf_texdefinition_one{#1}}
-\gdef\syst_helpers_start_tex_definition_two#1#2\stoptexdefinition%
+\gdef\dostarttexdefinition#1\stoptexdefinition%
{\egroup%
- \ctxcommand{thetexdefinition("#1")}{#2}}
+ \clf_texdefinition_two{#1}}
\egroup
-\unexpanded\def\texdefinition#1{\csname\ifcsname#1\endcsname#1\else donothing\fi\endcsname}
+\unexpanded\def\texdefinition#1{\csname\ifcsname#1\endcsname#1\else donothing\fi\endcsname} % todo: a nop cs: char 0 or some corenamespace
% This is a first variant, more might be added:
@@ -3333,14 +3414,14 @@
\def\syst_helpers_do_do_do_increment#1,#2){\dodoglobal\edef#1{\the\numexpr\ifdefined#1\ifx#1\relax\else#1\fi\fi+#2\relax}}
\def\syst_helpers_do_do_do_decrement#1,#2){\dodoglobal\edef#1{\the\numexpr\ifdefined#1\ifx#1\relax\else#1\fi\fi-#2\relax}}
-\def\syst_helpers_do_do_increment(#1{\doifnextcharelse,{\syst_helpers_do_do_do_increment#1}{\syst_helpers_do_do_do_increment#1,\plusone}}
-\def\syst_helpers_do_do_decrement(#1{\doifnextcharelse,{\syst_helpers_do_do_do_decrement#1}{\syst_helpers_do_do_do_decrement#1,\plusone}}
+\def\syst_helpers_do_do_increment(#1{\doifelsenextchar,{\syst_helpers_do_do_do_increment#1}{\syst_helpers_do_do_do_increment#1,\plusone}}
+\def\syst_helpers_do_do_decrement(#1{\doifelsenextchar,{\syst_helpers_do_do_do_decrement#1}{\syst_helpers_do_do_do_decrement#1,\plusone}}
\unexpanded\def\fastincrement#1{\dodoglobal\edef#1{\the\numexpr#1+\plusone \relax}}
\unexpanded\def\fastdecrement#1{\dodoglobal\edef#1{\the\numexpr#1+\minusone\relax}}
-\unexpanded\def\increment{\doifnextcharelse(\syst_helpers_do_do_increment\syst_helpers_do_increment}
-\unexpanded\def\decrement{\doifnextcharelse(\syst_helpers_do_do_decrement\syst_helpers_do_decrement}
+\unexpanded\def\increment{\doifelsenextchar(\syst_helpers_do_do_increment\syst_helpers_do_increment}
+\unexpanded\def\decrement{\doifelsenextchar(\syst_helpers_do_do_decrement\syst_helpers_do_decrement}
\unexpanded\def\incrementvalue#1{\expandafter\increment\csname#1\endcsname}
\unexpanded\def\decrementvalue#1{\expandafter\decrement\csname#1\endcsname}
@@ -3384,6 +3465,7 @@
{\expandafter\syst_helpers_checked_stripped_csname\string#1}
\def\syst_helpers_checked_stripped_csname#1%
+ %{\ifx#1\letterbackslash\else#1\fi}
{\if\noexpand#1\letterbackslash\else#1\fi}
%D \macros
@@ -4016,13 +4098,15 @@
\def\syst_helpers_check_if_assignment_else#1=#2#3\_e_o_p_{\if#2@}%
-\unexpanded\def\doifassignmentelse#1% expandable
+\unexpanded\def\doifelseassignment#1% expandable
{\expandafter\syst_helpers_check_if_assignment_else\detokenize{#1}=@@\_e_o_p_
\expandafter\secondoftwoarguments
\else
\expandafter\firstoftwoarguments
\fi}
+\let\doifassignmentelse\doifelseassignment
+
\newif\ifassignment
\unexpanded\def\docheckassignment#1%
@@ -4095,7 +4179,7 @@
%D
%D Watch the one level expansion of the second argument.
-\unexpanded\def\doifmeaningelse#1#2%
+\unexpanded\def\doifelsemeaning#1#2%
{\edef\m_syst_string_one{\meaning#1}%
\def \m_syst_string_two{#2}%
\edef\m_syst_string_two{\meaning\m_syst_string_two}%
@@ -4105,6 +4189,8 @@
\expandafter\secondoftwoarguments
\fi}
+\let\doifmeaningelse\doifelsemeaning
+
%D \macros
%D {doifsamestringselse,doifsamestring,doifnotsamestring}
%D
@@ -4122,10 +4208,12 @@
\edef\m_syst_string_two{\detokenize\expandafter{\normalexpanded{#4}}}%
\ifx\m_syst_string_one\m_syst_string_two\expandafter#1\else\expandafter#2\fi}
-\unexpanded\def\doifsamestringelse{\syst_helpers_if_samestring_else\firstoftwoarguments\secondoftwoarguments}
+\unexpanded\def\doifelsesamestring{\syst_helpers_if_samestring_else\firstoftwoarguments\secondoftwoarguments}
\unexpanded\def\doifsamestring {\syst_helpers_if_samestring_else\firstofoneargument \gobbleoneargument }
\unexpanded\def\doifnotsamestring {\syst_helpers_if_samestring_else\gobbleoneargument \firstofoneargument }
+\let\doifsamestringelse\doifelsesamestring
+
%D \macros
%D {ConvertToConstant,ConvertConstantAfter}
%D
@@ -4187,7 +4275,7 @@
\unexpanded\def\CheckConstantAfter#1#2%
{\expandafter\convertargument\v!prefix!\to\ascii
\convertargument#1\to#2\relax
- \doifinstringelse\ascii{#2}
+ \doifelseinstring\ascii{#2}
{\expandafter\convertargument#1\to#2}
{}}
@@ -4426,6 +4514,18 @@
{\m_syst_helpers_handle_group_after
\egroup}%
+\unexpanded\def\syst_helpers_handle_group_pickup#1#2#3% no inner group (so no kerning interference)
+ {\bgroup
+ \def\m_syst_helpers_handle_group_before{#1}%
+ \def\m_syst_helpers_handle_group_after {#2\egroup#3}%
+ \afterassignment\m_syst_helpers_handle_group_pickup_before
+ \let\next=}
+
+\def\m_syst_helpers_handle_group_pickup_before
+ {\bgroup
+ \aftergroup\m_syst_helpers_handle_group_after
+ \m_syst_helpers_handle_group_before}
+
\unexpanded\def\syst_helpers_handle_group_nop
{\ifnum\currentgrouptype=\semisimplegroupcode
\expandafter\syst_helpers_handle_group_nop_a
@@ -4458,10 +4558,13 @@
%D implementation became:
\unexpanded\def\groupedcommand#1#2%
- {\doifnextbgroupelse{\syst_helpers_handle_group_normal{#1}{#2}}{\syst_helpers_handle_group_nop{#1}{#2}}}
+ {\doifelsenextbgroup{\syst_helpers_handle_group_normal{#1}{#2}}{\syst_helpers_handle_group_nop{#1}{#2}}}
\unexpanded\def\simplegroupedcommand#1#2%
- {\doifnextbgroupelse{\syst_helpers_handle_group_simple{#1}{#2}}{\syst_helpers_handle_group_nop{#1}{#2}}}
+ {\doifelsenextbgroup{\syst_helpers_handle_group_simple{#1}{#2}}{\syst_helpers_handle_group_nop{#1}{#2}}}
+
+\unexpanded\def\pickupgroupedcommand#1#2#3%
+ {\doifelsenextbgroup{\syst_helpers_handle_group_pickup{#1}{#2}{#3}}{\syst_helpers_handle_group_nop{#1}{#2}}}
%D Users should be aware of the fact that grouping can
%D interfere with ones paragraph settings that are executed
@@ -4555,14 +4658,14 @@
\newtoks\AfterPar
\def\redowithpar\par
- {\doifnextcharelse\par\redowithpar\dodowithpar}%
+ {\doifelsenextchar\par\redowithpar\dodowithpar}%
\def\dowithpar#1#2%
{\def\dodowithpar##1\par{#1##1#2}%
\redowithpar\par}
\def\redogotopar\par
- {\doifnextcharelse\par\redogotopar\dodogotopar}%
+ {\doifelsenextchar\par\redogotopar\dodogotopar}%
\def\dogotopar#1%
{\def\dodogotopar{#1}%
@@ -4626,7 +4729,7 @@
\unexpanded\def\dowithpargument#1%
{\def\syst_helpers_next_par##1 \par{#1{##1}}%
\def\syst_helpers_next_arg##1{#1{##1}}%
- \doifnextbgroupelse\syst_helpers_next_arg{\doifnextcharelse\par{#1{}}\syst_helpers_next_par}}
+ \doifelsenextbgroup\syst_helpers_next_arg{\doifelsenextchar\par{#1{}}\syst_helpers_next_par}}
%D The \type{p} in the previous command stands for paragraph.
%D When we want to act upon words we can use the \type{w}
@@ -4656,7 +4759,7 @@
\unexpanded\def\dowithwargument#1%
{\def\syst_helpers_next_war##1 {#1{##1}}%
\def\syst_helpers_next_arg##1{#1{##1}}%
- \doifnextbgroupelse\syst_helpers_next_arg\syst_helpers_next_war}
+ \doifelsenextbgroup\syst_helpers_next_arg\syst_helpers_next_war}
%D \macros
%D {dorepeat,dorepeatwithcommand}
@@ -4751,11 +4854,13 @@
\expandafter\firstoftwoarguments
\fi}
-\def\doifstringinstringelse#1#2%
+\def\doifelsestringinstring#1#2%
{\expandafter\def\expandafter\syst_helpers_if_instring_else\expandafter##\expandafter1#1##2##3\_e_o_s_
{\syst_helpers_if_instring_else_indeed##2}%
\expandafter\expandafter\expandafter\syst_helpers_if_instring_else\expandafter#2#1@@\_e_o_s_}
+\let\doifstringinstringelse\doifelsestringinstring
+
%D \macros
%D {appendtoks,prependtoks,appendtoksonce,prependtoksonce,
%D doifintokselse,flushtoks,dotoks}
@@ -4801,19 +4906,21 @@
\def\syst_helpers_append_toks_once#1\to#2%
{\let\m_syst_helpers_scratch#2%
\t_syst_helpers_scratch\expandafter{\gobbleoneargument#1}%
- \doifintokselse\t_syst_helpers_scratch\m_syst_helpers_scratch\donothing\syst_helpers_append_toks_indeed}
+ \doifelseintoks\t_syst_helpers_scratch\m_syst_helpers_scratch\donothing\syst_helpers_append_toks_indeed}
\def\syst_helpers_prepend_toks_once#1\to#2%
{\let\m_syst_helpers_scratch#2%
\t_syst_helpers_scratch\expandafter{\gobbleoneargument#1}%
- \doifintokselse\t_syst_helpers_scratch\m_syst_helpers_scratch\donothing\syst_helpers_prepend_toks_indeed}
+ \doifelseintoks\t_syst_helpers_scratch\m_syst_helpers_scratch\donothing\syst_helpers_prepend_toks_indeed}
%D The test macro:
-\unexpanded\def\doifintokselse#1#2% #1 en #2 zijn toks
+\unexpanded\def\doifelseintoks#1#2% #1 en #2 zijn toks
{\edef\asciia{\detokenize\expandafter{\the#1}}%
\edef\asciib{\detokenize\expandafter{\the#2}}%
- \doifstringinstringelse\asciia\asciib}
+ \doifelsestringinstring\asciia\asciib}
+
+\let\doifintokselse\doifelseintoks
%D A nice one too:
@@ -5007,24 +5114,26 @@
{\dodoglobal\edef#2{#1\ifx#2\empty\else,#2\fi}}
\unexpanded\def\addtocommalist#1#2% {item} \cs
- {\rawdoifinsetelse{#1}#2\resetglobal
+ {\rawdoifelseinset{#1}#2\resetglobal
{\dodoglobal\edef#2{\ifx#2\empty\else#2,\fi#1}}}
\unexpanded\def\pretocommalist#1#2% {item} \cs
- {\rawdoifinsetelse{#1}#2\resetglobal
+ {\rawdoifelseinset{#1}#2\resetglobal
{\dodoglobal\edef#2{#1\ifx#2\empty\else,#2\fi}}}
-\unexpanded\def\robustdoifinsetelse#1#2%
+\unexpanded\def\robustdoifelseinset#1#2%
{\edef\m_syst_string_one{\detokenize\expandafter{\normalexpanded{#1}}}%
\edef\m_syst_string_two{\detokenize\expandafter{\normalexpanded{#2}}}%
- \rawdoifinsetelse\m_syst_string_one\m_syst_string_two}
+ \rawdoifelseinset\m_syst_string_one\m_syst_string_two}
+
+\let\robustdoifinsetelse\robustdoifelseinset
\unexpanded\def\robustaddtocommalist#1#2% {item} \cs
- {\robustdoifinsetelse{#1}#2\resetglobal
+ {\robustdoifelseinset{#1}#2\resetglobal
{\dodoglobal\edef#2{\ifx#2\empty\else#2,\fi#1}}}
\unexpanded\def\robustpretocommalist#1#2% {item} \cs
- {\robustdoifinsetelse{#1}#2\resetglobal
+ {\robustdoifelseinset{#1}#2\resetglobal
{\dodoglobal\edef#2{#1\ifx#2\empty\else,#2\fi}}}
\unexpanded\def\xsplitstring#1#2% \cs {str}
@@ -5038,7 +5147,7 @@
\def\acleanedupcommalist#1,,#2\relax{#1}
\unexpanded\def\removefromcommalist#1#2% to be sped up
- {\rawdoifinsetelse{#1}#2%
+ {\rawdoifelseinset{#1}#2%
{\normalexpanded{\xsplitstring\noexpand#2{#1}}%
\dodoglobal\edef#2%
{\ifx\m_syst_string_one\empty
@@ -5276,7 +5385,7 @@
\let\popmacro \localpopmacro
%D \macros
-%D {setlocalhsize}
+%D {setlocalhsize,distributedhsize}
%D
%D Sometimes we need to work with the \type{\hsize} that is
%D corrected for indentation and left and right skips. The
@@ -5295,7 +5404,7 @@
\newdimen\localhsize
\unexpanded\def\setlocalhsize % don't change !
- {\doifnextoptionalelse
+ {\doifelsenextoptional
\syst_helpers_set_local_hsize_yes
\syst_helpers_set_local_hsize_nop}
@@ -5314,6 +5423,12 @@
\fi
\relax}
+\def\distributedhsize#1#2#3%
+ {\dimexpr(#1-\numexpr#3-1\relax\dimexpr#2\relax)/#3\relax}
+
+\def\hsizefraction#1#2%
+ {\dimexpr#1/#2\relax}
+
%D \macros
%D {doifvalue,doifnotvalue,doifelsevalue,
%D doifnothing,doifsomething,doifelsenothing,
@@ -5376,7 +5491,7 @@
\expandafter\secondoftwoarguments
\fi}
-\unexpanded\def\doifsomethingelse#1%
+\unexpanded\def\doifelsesomething#1%
{\edef\m_syst_string_one{#1}%
\ifx\m_syst_string_one\empty
\expandafter\secondoftwoarguments
@@ -5408,18 +5523,25 @@
\expandafter\secondoftwoarguments
\fi}
+\let\doifvalueelse \doifelsevalue
+\let\doifnothingelse \doifelsenothing
+\let\doifsomethingelse \doifelsesomething
+\let\doifvaluenothingelse\doifelsevaluenothing
+
%D \macros
%D {doifemptyelsevalue, doifemptyvalue, doifnotemptyvalue}
%D
%D Also handy:
-\def\doifemptyelsevalue#1%
+\def\doifelseemptyvalue#1%
{\expandafter\ifx\csname#1\endcsname\empty
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
+\let\doifemptyvalueelse\doifelseemptyvalue
+
\def\doifemptyvalue#1%
{\expandafter\ifx\csname#1\endcsname\empty
\expandafter\firstofoneargument
@@ -5449,10 +5571,12 @@
\processcommalist[#3]\syst_helpers_do_common_check_all
\ifdone\expandafter#1\else\expandafter#2\fi}
-\unexpanded\def\doifallcommonelse{\syst_helpers_do_if_all_common_else\firstoftwoarguments\secondoftwoarguments}
+\unexpanded\def\doifelseallcommon{\syst_helpers_do_if_all_common_else\firstoftwoarguments\secondoftwoarguments}
\unexpanded\def\doifallcommon {\syst_helpers_do_if_all_common_else\firstofonearguments\gobbleoneargument }
\unexpanded\def\doifnotallcommon {\syst_helpers_do_if_all_common_else\gobbleoneargument \firstofonearguments }
+\let\doifallcommonelse\doifelseallcommon
+
%D \macros
%D {DOIF,DOIFELSE,DOIFNOT}
%D
@@ -5656,13 +5780,15 @@
\def\syst_helpers_if_some_space_else#1 #2#3\_e_o_s_{\if\noexpand#2@}
-\def\doifsomespaceelse#1% % #2#3%
+\def\doifelsesomespace#1% % #2#3%
{\syst_helpers_if_some_space_else#1 @ @\_e_o_s_ % #3\else#2\fi}
\expandafter\secondoftwoarguments
\else
\expandafter\firstoftwoarguments
\fi}
+\let\doifsomespaceelse\doifelsesomespace
+
%D \macros
%D {processseparatedlist}
%D
@@ -5912,11 +6038,11 @@
%D These macros are sort of obsolete as we never use uppercase this
%D way. But nevertheless we provide them:
-\def\utfupper#1{\ctxcommand{upper(\!!bs#1\!!es)}} % expandable
-\def\utflower#1{\ctxcommand{lower(\!!bs#1\!!es)}} % expandable
+\def\utfupper#1{\clf_upper{#1}} % expandable
+\def\utflower#1{\clf_lower{#1}} % expandable
-\unexpanded\def\uppercasestring#1\to#2{\dodoglobal\edef#2{\ctxcommand{upper(\!!bs#1\!!es)}}}
-\unexpanded\def\lowercasestring#1\to#2{\dodoglobal\edef#2{\ctxcommand{lower(\!!bs#1\!!es)}}}
+\unexpanded\def\uppercasestring#1\to#2{\dodoglobal\edef#2{\clf_upper{#1}}}
+\unexpanded\def\lowercasestring#1\to#2{\dodoglobal\edef#2{\clf_lower{#1}}}
%D \macros
%D {handletokens}
@@ -6112,24 +6238,20 @@
% \def\threedigitrounding#1%
% {\expandafter\expandafter\expandafter\dothreedigitrounding\expandafter\WITHOUTPT\the\dimexpr#1\points+.0005\points\relax0000.*00\relax}
-\def\integerrounding #1{\cldcontext{"\letterpercent 0.0f",#1}}
-\def\onedigitrounding #1{\cldcontext{"\letterpercent 0.1f",#1}}
-\def\twodigitrounding #1{\cldcontext{"\letterpercent 0.2f",#1}}
-\def\threedigitrounding#1{\cldcontext{"\letterpercent 0.3f",#1}}
+\def\integerrounding #1{\clf_rounded\zerocount\numexpr#1\relax}
+\def\onedigitrounding #1{\clf_rounded\plusone \numexpr#1\relax}
+\def\twodigitrounding #1{\clf_rounded\plustwo \numexpr#1\relax}
+\def\threedigitrounding#1{\clf_rounded\plusthree\numexpr#1\relax}
%D \macros
%D {processcontent}
%D
-%D This is the first occasion where \TEX\ and \ETEX\ are no
-%D longer compatible, although in many cases things go ok.
-%D Beware of verbatim, i.e. catcode changes.
+%D This macro is first used in the tabulation macros.
%D
%D \starttyping
%D \unexpanded\def\starthans%
%D {\processcontent{stophans}\test{\message{\test}\wait}}
%D \stoptyping
-%D
-%D This macro is first used in the tabulation macros.
\unexpanded\def\processcontent#1%
{\begingroup\expandafter\syst_helpers_process_content\csname#1\endcsname}
@@ -6195,7 +6317,7 @@
%D Not that fast I guess, but here's a way to test for token
%D registers being empty.
-\unexpanded\def\doifsometokselse#1%
+\unexpanded\def\doifelsesometoks#1%
{\edef\m_syst_string_one{\the#1}% one level expansion so quite ok
\ifx\m_syst_string_one\empty
\expandafter\secondoftwoarguments
@@ -6203,6 +6325,8 @@
\expandafter\firstoftwoarguments
\fi}
+\let\doifsometokselse\doifelsesometoks
+
\unexpanded\def\doifsometoks#1%
{\edef\m_syst_string_one{\the#1}% one level expansion so quite ok
\ifx\m_syst_string_one\empty
@@ -6241,9 +6365,11 @@
\unexpanded\def\stopstrictinspectnextcharacter
{\let\syst_helpers_inspect_next_character\syst_helpers_normal_inspect_next_character}
-\def\strictdoifnextoptionalelse#1#2%
+\unexpanded\def\strictdoifelsenextoptional#1#2%
{\startstrictinspectnextcharacter
- \doifnextcharelse[{\stopstrictinspectnextcharacter#1}{\stopstrictinspectnextcharacter#2}}
+ \doifelsenextchar[{\stopstrictinspectnextcharacter#1}{\stopstrictinspectnextcharacter#2}}
+
+\let\strictdoifnextoptionalelse\strictdoifelsenextoptional
%D \macros
%D {gobblespacetokens}
@@ -6436,9 +6562,11 @@
%D
%D This is a dirty one: we simply append a unit and discard it when needed.
-\def\doifdimensionelse#1%
+\def\doifelsedimension#1%
{\afterassignment\syst_helpers_if_dimension_else\scratchdimen#1pt\relax}
+\let\doifdimensionelse\doifelsedimension
+
\def\syst_helpers_if_dimension_else#1%
{\ifx#1\relax
\expandafter\secondoftwoarguments
@@ -6467,7 +6595,9 @@
\installsystemnamespace{dimencheckb}
\installsystemnamespace{dimencheckc}
-\def\doifdimenstringelse#1{\normalexpanded{\noexpand\dodimenteststageone#1}\empty\empty]}
+\def\doifelsedimenstring#1{\normalexpanded{\noexpand\dodimenteststageone#1}\empty\empty]}
+
+\let\doifdimenstringelse\doifelsedimenstring
\def\dodimenteststageone #1#2{\csname \??dimenchecka\ifcsname \??dimenchecka#2\endcsname#2\else x\fi\endcsname#2}
\def\dodimenteststagetwo #1#2{\csname \??dimencheckb\ifcsname \??dimencheckb#2\endcsname#2\else x\fi\endcsname#2}
@@ -6590,10 +6720,12 @@
% sometimes handy:
-\unexpanded\def\doifhasspaceelse#1%
+\unexpanded\def\doifelsehasspace#1%
{\edef\m_syst_string_one{#1}%
\normalexpanded{\syst_helpers_if_has_space_else#1\space}\empty\relax}
+\let\doifhasspaceelse\doifelsehasspace
+
\unexpanded\def\syst_helpers_if_has_space_else#1 #2#3\relax % \space\empty\relax
{\ifx\m_syst_string_one\space
\expandafter\firstoftwoarguments
@@ -6602,7 +6734,7 @@
\else
\doubleexpandafter\firstoftwoarguments
\fi\fi}
-
+
% this will replace loadfile once and alike !!! todo
\installsystemnamespace{flag}
@@ -6621,6 +6753,8 @@
\doubleexpandafter\secondoftwoarguments
\fi\fi}
+\let\doifflaggedelse\doifelseflagged
+
\def\doifnotflagged#1%
{\expandafter\ifx\csname\??flag#1\endcsname\relax
\expandafter\firstofoneargument
@@ -6629,10 +6763,10 @@
\else
\doubleexpandafter\firstofoneargument
\fi\fi}
-
+
\unexpanded\def\inheritparameter[#1]#2[#3]#4[#5]% tag tokey fromkey
{\expandafter\def\csname#1#3\expandafter\endcsname\expandafter{\csname#1#5\endcsname}}
-
+
\def\syst_helpers_if_non_zero_positive_else#1#2\end % #3#4%
{\ifx#1\relax
\ifcase\scratchcounter
@@ -6647,9 +6781,11 @@
\expandafter\secondoftwoarguments
\fi}
-\def\doifnonzeropositiveelse#1%
+\def\doifelsenonzeropositive#1%
{\begingroup\afterassignment\syst_helpers_if_non_zero_positive_else\scratchcounter=0#1\relax\empty\end}
-
+
+\let\doifnonzeropositiveelse\doifelsenonzeropositive
+
% here ?
\unexpanded\def\dosetrawvalue #1#2#3{\expandafter \def\csname#1#2\endcsname{#3}}
@@ -6693,8 +6829,8 @@
% \fi
% \def\elapsedseconds{\expandafter\withoutpt\the\dimexpr\elapsedtime sp\relax}
-\def\resettimer {\ctxcommand{resettimer()}}
-\def\elapsedtime {\ctxcommand{elapsedtime()}}
+\let\resettimer \clf_resettimer
+\let\elapsedtime \clf_elapsedtime
\let\elapsedseconds \elapsedtime
\newcount\c_syst_helpers_test_feature_n
@@ -6808,7 +6944,7 @@
%D \typebuffer \getbuffer
\unexpanded\def\ignoreimplicitspaces
- {\doifnextcharelse\relax\relax\relax}
+ {\doifelsenextchar\relax\relax\relax}
%D \macros
%D {processwords}
@@ -6871,7 +7007,7 @@
\def\syst_helpers_direct_double_empty_one_nop#1{#1[][]}
\def\syst_helpers_direct_double_empty_two_nop#1[#2]{#1[#2][]}
-%D Used in math definitions (in an type {\edef}):
+%D Used in math definitions (in an \type {\edef}):
%D \startbuffer
%D [\docheckedpair{}]
@@ -6985,8 +7121,80 @@
% expandable:
-%def\getsubstring#1#2#3{\cldcontext{utf.sub([[#3]],tonumber("#1"),tonumber("#2"))}}
-\def\getsubstring#1#2#3{\ctxcommand{getsubstring(\!!bs#3\!!es,"#1","#2")}}
+\def\getsubstring#1#2#3{\clf_getsubstring{#3}{#1}{#2}}
+
+%D Other dimensions than pt (used in mb-mp)
+
+\def\converteddimen#1#2{\clf_converteddimen\dimexpr#1\relax{#2}}
+
+%D Maybe (looks ugly):
+%D
+%D \starttyping
+%D \doifcase {foo}
+%D {bar} {BAR}
+%D {foo} {FOO}
+%D {default} {DEFAULT}
+%D
+%D \doifcase {foo}
+%D {bar} {BAR}
+%D {foo} {\doifcase {bar}
+%D {bar} {BAR}
+%D {foo} {FOO}
+%D {default} {DEFAULT}
+%D }
+%D {default} {DEFAULT}
+%D \stoptyping
+
+% \doifcase {\btxfoundname{author}}
+% {author} {\btxflush{author}}
+% {editor} {\texdefinition{btx:apa:editor-or-editors}}
+% {title} {\texdefinition{btx:apa:title-subtitle-type}}
+% {default} {\btxflush{author}}
+
+% \unexpanded\def\doifcase#1%
+% {\edef\m_case_asked{#1}%
+% \syst_aux_case}
+%
+% \def\syst_aux_case#1%
+% {\edef\m_case_temp{#1}%
+% \ifx\m_case_temp\m_case_asked
+% \expandafter\syst_aux_case_yes
+% \else\ifx\m_case_temp\s!default
+% \doubleexpandafter\firstofoneargument
+% \else
+% \doubleexpandafter\syst_aux_case_nop
+% \fi\fi}
+%
+% \def\syst_aux_skip#1#2%
+% {\edef\m_case_temp{#1}%
+% \ifx\m_case_temp\s!default
+% \expandafter\syst_aux_done
+% \else
+% \expandafter\syst_aux_skip
+% \fi}
+%
+% \def\syst_aux_case_yes#1%
+% {\def\syst_aux_done{#1}%
+% \syst_aux_skip}
+%
+% \def\syst_aux_case_nop#1%
+% {\syst_aux_case}
+
+%D \macros
+%D {ntimes}
+%D
+%D some repetition:
+%D
+%D \startbuffer
+%D \ntimes{*}{20}
+%D \stopbuffer
+%D
+%D \typebuffer \blank gives: \getbuffer \blank
+%D
+%D This is not real fast but quite okay:
+
+%def\ntimes#1#2{\ifnum#2>\zerocount#1\ntimes{#1}{\numexpr#2-\plusone\relax}\fi} % 1.72
+\def\ntimes#1#2{\clf_ntimes{#1}\numexpr#2\relax} % 0.33
\protect \endinput
diff --git a/tex/context/base/syst-con.lua b/tex/context/base/syst-con.lua
index 48f02da3a..0fa685b2d 100644
--- a/tex/context/base/syst-con.lua
+++ b/tex/context/base/syst-con.lua
@@ -6,57 +6,63 @@ if not modules then modules = { } end modules ['syst-con'] = {
license = "see context related readme files"
}
-converters = converters or { }
+local tonumber = tonumber
+local math = math
+local utfchar = utf.char
+local gsub = string.gsub
+
+converters = converters or { }
+local converters = converters
+
+local context = context
+local commands = commands
+local implement = interfaces.implement
+
+local formatters = string.formatters
--[[ldx--
<p>For raw 8 bit characters, the offset is 0x110000 (bottom of plane 18) at
the top of <l n='luatex'/>'s char range but outside the unicode range.</p>
--ldx]]--
-local tonumber = tonumber
-local utfchar = utf.char
-local gsub, format = string.gsub, string.format
+function converters.hexstringtonumber(n) tonumber(n,16) end
+function converters.octstringtonumber(n) tonumber(n, 8) end
-function converters.hexstringtonumber(n) tonumber(n,16) end
-function converters.octstringtonumber(n) tonumber(n, 8) end
function converters.rawcharacter (n) utfchar(0x110000+n) end
-function converters.lchexnumber (n) format("%x" ,n) end
-function converters.uchexnumber (n) format("%X" ,n) end
-function converters.lchexnumbers (n) format("%02x",n) end
-function converters.uchexnumbers (n) format("%02X",n) end
-function converters.octnumber (n) format("%03o",n) end
-
-function commands.hexstringtonumber(n) context(tonumber(n,16)) end
-function commands.octstringtonumber(n) context(tonumber(n, 8)) end
-function commands.rawcharacter (n) context(utfchar(0x110000+n)) end
-function commands.lchexnumber (n) context("%x" ,n) end
-function commands.uchexnumber (n) context("%X" ,n) end
-function commands.lchexnumbers (n) context("%02x",n) end
-function commands.uchexnumbers (n) context("%02X",n) end
-function commands.octnumber (n) context("%03o",n) end
-
-function commands.format(fmt,...) -- used ?
- fmt = gsub(fmt,"@","%%")
- context(fmt,...)
-end
-
-local cosd, sind, tand = math.cosd, math.sind, math.tand
-local cos, sin, tan = math.cos, math.sin, math.tan
-
--- unfortunately %s spits out: 6.1230317691119e-017
---
--- function commands.sind(n) context(sind(n)) end
--- function commands.cosd(n) context(cosd(n)) end
--- function commands.tand(n) context(tand(n)) end
---
--- function commands.sin (n) context(sin (n)) end
--- function commands.cos (n) context(cos (n)) end
--- function commands.tan (n) context(tan (n)) end
-
-function commands.sind(n) context("%0.6f",sind(n)) end
-function commands.cosd(n) context("%0.6f",cosd(n)) end
-function commands.tand(n) context("%0.6f",tand(n)) end
-
-function commands.sin (n) context("%0.6f",sin (n)) end
-function commands.cos (n) context("%0.6f",cos (n)) end
-function commands.tan (n) context("%0.6f",tan (n)) end
+
+local lchexnumber = formatters["%x"]
+local uchexnumber = formatters["%X"]
+local lchexnumbers = formatters["%02x"]
+local uchexnumbers = formatters["%02X"]
+local octnumber = formatters["%03o"]
+local nicenumber = formatters["%0.6F"]
+
+converters.lchexnumber = lchexnumber
+converters.uchexnumber = uchexnumber
+converters.lchexnumbers = lchexnumbers
+converters.uchexnumbers = uchexnumbers
+converters.octnumber = octnumber
+converters.nicenumber = nicenumber
+
+implement { name = "hexstringtonumber", actions = { tonumber, context }, arguments = { "integer", 16 } }
+implement { name = "octstringtonumber", actions = { tonumber, context }, arguments = { "integer", 8 } }
+
+implement { name = "rawcharacter", actions = function(n) context(utfchar(0x110000+n)) end, arguments = "integer" }
+
+implement { name = "lchexnumber", actions = { lchexnumber, context }, arguments = "integer" }
+implement { name = "uchexnumber", actions = { uchexnumber, context }, arguments = "integer" }
+implement { name = "lchexnumbers", actions = { lchexnumbers, context }, arguments = "integer" }
+implement { name = "uchexnumbers", actions = { uchexnumbers, context }, arguments = "integer" }
+implement { name = "octnumber", actions = { octnumber, context }, arguments = "integer" }
+
+implement { name = "sin", actions = { math.sin, nicenumber, context }, arguments = "number" }
+implement { name = "cos", actions = { math.cos, nicenumber, context }, arguments = "number" }
+implement { name = "tan", actions = { math.tan, nicenumber, context }, arguments = "number" }
+
+implement { name = "sind", actions = { math.sind, nicenumber, context }, arguments = "number" }
+implement { name = "cosd", actions = { math.cosd, nicenumber, context }, arguments = "number" }
+implement { name = "tand", actions = { math.tand, nicenumber, context }, arguments = "number" }
+
+-- only as commands
+
+function commands.format(fmt,...) context((gsub(fmt,"@","%%")),...) end
diff --git a/tex/context/base/syst-con.mkiv b/tex/context/base/syst-con.mkiv
index de8ed597e..17c407819 100644
--- a/tex/context/base/syst-con.mkiv
+++ b/tex/context/base/syst-con.mkiv
@@ -46,10 +46,10 @@
%D [\expandafter\uchexnumber\expandafter{\the\zerocount}]
%D \stoptyping
-\def\lchexnumber #1{\ctxcommand{lchexnumber(\number#1)}}
-\def\uchexnumber #1{\ctxcommand{uchexnumber(\number#1)}}
-\def\lchexnumbers#1{\ctxcommand{lchexnumbers(\number#1)}}
-\def\uchexnumbers#1{\ctxcommand{uchexnumbers(\number#1)}}
+\def\lchexnumber #1{\clf_lchexnumber \numexpr#1\relax}
+\def\uchexnumber #1{\clf_uchexnumber \numexpr#1\relax}
+\def\lchexnumbers#1{\clf_lchexnumbers\numexpr#1\relax}
+\def\uchexnumbers#1{\clf_uchexnumbers\numexpr#1\relax}
\let\hexnumber\uchexnumber
@@ -58,7 +58,7 @@
%D
%D For unicode remapping purposes, we need octal numbers.
-\def\octnumber#1{\ctxcommand{octnumber(\number#1)}}
+\def\octnumber#1{\clf_octnumber\numexpr#1\relax}
%D \macros
%D {hexstringtonumber,octstringtonumber}
@@ -67,8 +67,8 @@
%D a decimal number, thereby taking care of lowercase characters
%D as well.
-\def\hexstringtonumber#1{\ctxcommand{hexstringtonumber("#1")}}
-\def\octstringtonumber#1{\ctxcommand{octstringtonumber("#1")}}
+\def\hexstringtonumber#1{\clf_hexstringtonumber\numexpr#1\relax}
+\def\octstringtonumber#1{\clf_octstringtonumber\numexpr#1\relax}
%D \macros
%D {rawcharacter}
@@ -76,7 +76,7 @@
%D This macro can be used to produce proper 8 bit characters
%D that we sometimes need in backends and round||trips.
-\def\rawcharacter#1{\ctxcommand{rawcharacter(\number#1)}}
+\def\rawcharacter#1{\clf_rawcharacter\numexpr#1\relax}
%D \macros
%D {twodigits, threedigits}
@@ -126,23 +126,12 @@
%D \macros
%D {setcalculatedsin,setcalculatedcos,setcalculatedtan}
-%D
-%D This saves some 2K in the format. At some point we will redo the
-%D code that calls this. Beware: in \MKII\ this is a separate module.
-
-% \let\calculatesin\gobbleoneargument
-% \let\calculatecos\gobbleoneargument
-% \let\calculatetan\gobbleoneargument
-
-% \def\setcalculatedsin#1#2{\edef#1{\cldcontext{math.sind(#2)}}} % jit-unsafe
-% \def\setcalculatedcos#1#2{\edef#1{\cldcontext{math.cosd(#2)}}} % jit-unsafe
-% \def\setcalculatedtan#1#2{\edef#1{\cldcontext{math.tand(#2)}}} % jit-unsafe
-\def\setcalculatedsin#1#2{\edef#1{\ctxcommand{sind(#2)}}}
-\def\setcalculatedcos#1#2{\edef#1{\ctxcommand{cosd(#2)}}}
-\def\setcalculatedtan#1#2{\edef#1{\ctxcommand{tand(#2)}}}
+\def\setcalculatedsin#1#2{\edef#1{\clf_sind#2}}
+\def\setcalculatedcos#1#2{\edef#1{\clf_cosd#2}}
+\def\setcalculatedtan#1#2{\edef#1{\clf_tand#2}}
- \def\formatted#1{\ctxcommand{format(#1)}}
-\unexpanded\def\format #1{\ctxcommand{format(#1)}}
+ \def\formatted#1{\ctxcommand{format(#1)}} % not clf
+\unexpanded\def\format #1{\ctxcommand{format(#1)}} % not clf
\protect \endinput
diff --git a/tex/context/base/syst-ini.mkiv b/tex/context/base/syst-ini.mkiv
index ab1c53131..8631aed90 100644
--- a/tex/context/base/syst-ini.mkiv
+++ b/tex/context/base/syst-ini.mkiv
@@ -114,11 +114,6 @@
end
}
-%D Handy.
-
-\suppresslongerror = 1
-\suppressoutererror = 1
-
%D \ETEX\ has a not so handy way of telling you the version number, i.e. the revision
%D number has a period in it:
@@ -198,7 +193,7 @@
\countdef \c_syst_last_allocated_language = 41 \c_syst_last_allocated_language = \c_syst_min_allocated_language % not used in context
\countdef \c_syst_last_allocated_insertion = 42 \c_syst_last_allocated_insertion = \c_syst_min_allocated_insert
\countdef \c_syst_last_allocated_family = 43 \c_syst_last_allocated_family = \c_syst_min_allocated_family % not used in context
-\countdef \c_syst_last_allocated_attribute = 44 \c_syst_last_allocated_attribute = \c_syst_min_allocated_attribute
+\countdef \c_syst_last_allocated_attribute = 44 \c_syst_last_allocated_attribute = \c_syst_min_allocated_attribute % not used in context
\countdef \c_syst_min_counter_value = 125 \c_syst_min_counter_value = -"7FFFFFFF % beware, we use index 125 at the lua end
\countdef \c_syst_max_counter_value = 126 \c_syst_max_counter_value = "7FFFFFFF % beware, we use index 126 at the lua end
@@ -246,9 +241,10 @@
% Watch out, for the moment we disable the check for already being defined
% later we will revert this but first all chardefs must be replaced.
-\normalprotected\def\newconstant #1{\ifdefined#1\let#1\undefined\fi\newcount#1}
-\normalprotected\def\setnewconstant#1{\ifdefined#1\let#1\undefined\fi\newcount#1#1} % just a number
-\normalprotected\def\setconstant {} % dummy, no checking, so it warns
+\normalprotected\def\newconstant #1{\ifdefined#1\let#1\undefined\fi\newcount#1}
+\normalprotected\def\setnewconstant #1{\ifdefined#1\let#1\undefined\fi\newcount#1#1} % just a number
+\normalprotected\def\setconstant {} % dummy, no checking, so it warns
+\normalprotected\def\setconstantvalue#1#2{\csname#1\endcsname\numexpr#2\relax}
% maybe setconstant with check
@@ -300,7 +296,7 @@
%D 128-1023 are private and should not be touched.
\let\attributeunsetvalue\c_syst_min_counter_value % used to be \minusone
-\normalprotected\def\newattribute{\syst_basics_allocate\c_syst_min_allocated_attribute\attribute\attributedef\c_syst_max_allocated_register}
+\normalprotected\def\newattribute{\syst_basics_allocate\c_syst_last_allocated_attribute\attribute\attributedef\c_syst_max_allocated_register}
%D Not used by \CONTEXT\ but for instance \PICTEX\ needs it. It's a trick to force
%D strings instead of tokens that take more memory. It's a trick to trick to force
@@ -326,8 +322,8 @@
\newcount\scratchcounterone \newcount\scratchcountertwo \newcount\scratchcounterthree
\newdimen \scratchdimenone \newdimen \scratchdimentwo \newdimen \scratchdimenthree
-\newdimen \scratchskipone \newdimen \scratchskiptwo \newdimen \scratchskipthree
-\newbox \scratchmuskipone \newbox \scratchmuskiptwo \newbox \scratchmuskipthree
+\newskip \scratchskipone \newskip \scratchskiptwo \newskip \scratchskipthree
+\newmuskip\scratchmuskipone \newmuskip\scratchmuskiptwo \newmuskip\scratchmuskipthree
\newtoks \scratchtoksone \newtoks \scratchtokstwo \newtoks \scratchtoksthree
\newbox \scratchboxone \newbox \scratchboxtwo \newbox \scratchboxthree
@@ -385,6 +381,8 @@
\newmuskip\zeromuskip \zeromuskip = 0mu
\newmuskip\onemuskip \onemuskip = 1mu
+\newmuskip\muquad \muquad = 18mu
+
\let\points \onepoint
\let\halfpoint\halfapoint
@@ -416,6 +414,7 @@
\chardef \plustwentythousand = 20000
\chardef \medcard = 32768
\chardef \maxcard = 65536 % pdftex has less mathchars
+\chardef \maxcardminusone = 65535
%D \macros
%D {doubleexpandafter,tripleexpandafter,expanded,startexpanded}
@@ -630,6 +629,8 @@
%D
%D In \LUATEX\ we have ways around this.
+% no longer \errorstopmode cf. plain tex 3.141592653
+
\normalprotected\def\tracingall
{\tracingonline \plusone
\tracingcommands \plusthree
@@ -646,8 +647,7 @@
\tracingifs \plusone
\tracingscantokens\plusone
\tracingnesting \plusone
- \tracingassigns \plustwo
- \errorstopmode}
+ \tracingassigns \plustwo}
\normalprotected\def\loggingall
{\tracingall
@@ -744,6 +744,9 @@
\normalprotected\def\settrue #1{\let#1\conditionaltrue }
\normalprotected\def\setfalse#1{\let#1\conditionalfalse}
+\normalprotected\def\settruevalue #1{\expandafter\let\csname#1\endcsname\conditionaltrue }
+\normalprotected\def\setfalsevalue#1{\expandafter\let\csname#1\endcsname\conditionalfalse}
+
\let\newconditional\setfalse
\let\ifconditional \ifcase
@@ -762,8 +765,8 @@
\normalprotected\def\newfraction#1{\let#1\!!plusone}
-%D It would be handy to have a primitive \unless\ifcase because then we could
-%D use nicer values. Anyhow, this conditional code used to be in the \type
+%D It would be handy to have a primitive \type {\unless\ifcase} because then we
+%D could use nicer values. Anyhow, this conditional code used to be in the \type
%D {syst-aux} module but is now promoted to here.
%D \macros
@@ -1012,12 +1015,18 @@
% \bindprimitive ifabsnum ifpdfabsnum
%D We need to make sure that we start up in \DVI\ mode, so, after testing for running
-%D \PDFTEX, we default to \DVI.
-
-\pdfoutput \zerocount
-\pdfminorversion \plusfive
-\pdfgentounicode \plusone
-\pdfinclusioncopyfonts\plusone
+%D \PDFTEX, we default to \DVI. Why?
+
+\pdfoutput \zerocount
+\pdfminorversion \plusseven
+\pdfgentounicode \plusone
+\pdfinclusioncopyfonts \plusone
+\pdfinclusionerrorlevel \zerocount
+\pdfdecimaldigits \plusten
+\pdfhorigin 1 true in
+\pdfvorigin \pdfhorigin
+\pdfimageresolution 300
+\pdfpkresolution 600
\normalprotected\def\nopdfcompression {\pdfobjcompresslevel\zerocount \pdfcompresslevel\zerocount}
\normalprotected\def\maximumpdfcompression{\pdfobjcompresslevel\plusnine \pdfcompresslevel\plusnine }
@@ -1080,6 +1089,12 @@
\voffset\zeropoint \let\voffset\relax \newdimen\voffset % prevent messing up
\hoffset\zeropoint \let\hoffset\relax \newdimen\hoffset % prevent messing up
+%D Handy.
+
+\suppresslongerror \plusone
+\suppressoutererror \plusone
+\suppressmathparerror \plusone
+
%D While cleaning this code up a bit I was listening to Heather Nova's \CD\ Redbird.
%D The first song on that \CD\ ends with a few lines suitable for ending this
%D initialization module:
@@ -1097,4 +1112,21 @@
%D
%D So let's see what \TEX\ can do now that we've opened up the basic machinery.
+%D Now we define a few helpers that we need in a very early stage. We hav eno message system
+%D yet but redundant definitions are fatal anyway.
+
+\newcount\c_syst_helpers_n_of_namespaces \c_syst_helpers_n_of_namespaces\pluseight % 1-8 reserved for catcodes
+
+\def\v_interfaces_prefix_template_system{\number \c_syst_helpers_n_of_namespaces>>}
+%def\v_interfaces_prefix_template_system{\characters\c_syst_helpers_n_of_namespaces>>} % no \characters yet
+
+\normalprotected\def\installsystemnamespace#1%
+ {\ifcsname ??#1\endcsname
+ \immediate\write16{fatal error: duplicate system namespace '#1'}%
+ \expandafter\normalend
+ \else
+ \global\advance\c_syst_helpers_n_of_namespaces\plusone
+ \expandafter\edef\csname ??#1\endcsname{\v_interfaces_prefix_template_system}%
+ \fi}
+
\protect \endinput
diff --git a/tex/context/base/syst-lua.lua b/tex/context/base/syst-lua.lua
index e47041444..a6665f410 100644
--- a/tex/context/base/syst-lua.lua
+++ b/tex/context/base/syst-lua.lua
@@ -6,48 +6,96 @@ if not modules then modules = { } end modules ['syst-lua'] = {
license = "see context related readme files"
}
-local format, find, match, rep = string.format, string.find, string.match, string.rep
+local find, match = string.find, string.match
local tonumber = tonumber
-local S, lpegmatch, lpegtsplitat = lpeg.S, lpeg.match, lpeg.tsplitat
+local S, C, P, lpegmatch, lpegtsplitat = lpeg.S, lpeg.C, lpeg.P, lpeg.match, lpeg.tsplitat
-commands = commands or { }
-local commands = commands
-local context = context
+commands = commands or { }
+local commands = commands
-function commands.writestatus(...) logs.status(...) end -- overloaded later
+local implement = interfaces.implement
-local firstoftwoarguments = context.firstoftwoarguments -- context.constructcsonly("firstoftwoarguments" )
-local secondoftwoarguments = context.secondoftwoarguments -- context.constructcsonly("secondoftwoarguments")
-local firstofoneargument = context.firstofoneargument -- context.constructcsonly("firstofoneargument" )
-local gobbleoneargument = context.gobbleoneargument -- context.constructcsonly("gobbleoneargument" )
+local two_strings = interfaces.strings[2]
--- contextsprint(prtcatcodes,[[\ui_fo]]) -- firstofonearguments
--- contextsprint(prtcatcodes,[[\ui_go]]) -- gobbleonearguments
--- contextsprint(prtcatcodes,[[\ui_ft]]) -- firstoftwoarguments
--- contextsprint(prtcatcodes,[[\ui_st]]) -- secondoftwoarguments
+local context = context
+local csprint = context.sprint
+
+local prtcatcodes = tex.prtcatcodes
+
+implement { -- will b eoverloaded later
+ name = "writestatus",
+ arguments = two_strings,
+ actions = logs.status,
+}
+
+local ctx_firstoftwoarguments = context.firstoftwoarguments -- context.constructcsonly("firstoftwoarguments" )
+local ctx_secondoftwoarguments = context.secondoftwoarguments -- context.constructcsonly("secondoftwoarguments")
+local ctx_firstofoneargument = context.firstofoneargument -- context.constructcsonly("firstofoneargument" )
+local ctx_gobbleoneargument = context.gobbleoneargument -- context.constructcsonly("gobbleoneargument" )
+
+-- contextsprint(prtcatcodes,[[\ui_fo]]) -- ctx_firstofonearguments
+-- contextsprint(prtcatcodes,[[\ui_go]]) -- ctx_gobbleonearguments
+-- contextsprint(prtcatcodes,[[\ui_ft]]) -- ctx_firstoftwoarguments
+-- contextsprint(prtcatcodes,[[\ui_st]]) -- ctx_secondoftwoarguments
function commands.doifelse(b)
if b then
- firstoftwoarguments()
+ ctx_firstoftwoarguments()
+-- csprint(prtcatcodes,[[\ui_ft]]) -- ctx_firstoftwoarguments
else
- secondoftwoarguments()
+ ctx_secondoftwoarguments()
+-- csprint(prtcatcodes,[[\ui_st]]) -- ctx_secondoftwoarguments
+ end
+end
+
+function commands.doifelsesomething(b)
+ if b and b ~= "" then
+ ctx_firstoftwoarguments()
+-- csprint(prtcatcodes,[[\ui_ft]]) -- ctx_firstoftwoarguments
+ else
+ ctx_secondoftwoarguments()
+-- csprint(prtcatcodes,[[\ui_st]]) -- ctx_secondoftwoarguments
end
end
function commands.doif(b)
if b then
- firstofoneargument()
+ ctx_firstofoneargument()
+-- context.__flushdirect(prtcatcodes,[[\ui_fo]]) -- ctx_firstofonearguments
+ else
+ ctx_gobbleoneargument()
+-- context.__flushdirect(prtcatcodes,[[\ui_go]]) -- ctx_gobbleonearguments
+ end
+end
+
+function commands.doifsomething(b)
+ if b and b ~= "" then
+ ctx_firstofoneargument()
+-- context.__flushdirect(prtcatcodes,[[\ui_fo]]) -- ctx_firstofonearguments
else
- gobbleoneargument()
+ ctx_gobbleoneargument()
+-- context.__flushdirect(prtcatcodes,[[\ui_go]]) -- ctx_gobbleonearguments
end
end
function commands.doifnot(b)
if b then
- gobbleoneargument()
+ ctx_gobbleoneargument()
+-- csprint(prtcatcodes,[[\ui_go]]) -- ctx_gobbleonearguments
+ else
+ ctx_firstofoneargument()
+-- csprint(prtcatcodes,[[\ui_fo]]) -- ctx_firstofonearguments
+ end
+end
+
+function commands.doifnotthing(b)
+ if b and b ~= "" then
+ ctx_gobbleoneargument()
+-- csprint(prtcatcodes,[[\ui_go]]) -- ctx_gobbleonearguments
else
- firstofoneargument()
+ ctx_firstofoneargument()
+-- csprint(prtcatcodes,[[\ui_fo]]) -- ctx_firstofonearguments
end
end
@@ -59,16 +107,16 @@ end
function commands.doifelsespaces(str)
if find(str,"^ +$") then
- firstoftwoarguments()
+ ctx_firstoftwoarguments()
else
- secondoftwoarguments()
+ ctx_secondoftwoarguments()
end
end
local s = lpegtsplitat(",")
local h = { }
-function commands.doifcommonelse(a,b) -- often the same test
+local function doifelsecommon(a,b) -- often the same test
local ha = h[a]
local hb = h[b]
if not ha then
@@ -84,41 +132,112 @@ function commands.doifcommonelse(a,b) -- often the same test
for i=1,na do
for j=1,nb do
if ha[i] == hb[j] then
- firstoftwoarguments()
+ ctx_firstoftwoarguments()
return
end
end
end
- secondoftwoarguments()
+ ctx_secondoftwoarguments()
end
-function commands.doifinsetelse(a,b)
+local function doifelseinset(a,b)
local hb = h[b]
if not hb then hb = lpegmatch(s,b) h[b] = hb end
for i=1,#hb do
if a == hb[i] then
- firstoftwoarguments()
+ ctx_firstoftwoarguments()
return
end
end
- secondoftwoarguments()
+ ctx_secondoftwoarguments()
end
+implement {
+ name = "doifelsecommon",
+ arguments = two_strings,
+ actions = doifelsecommon
+}
+
+implement {
+ name = "doifelseinset",
+ arguments = two_strings,
+ actions = doifelseinset
+}
+
local pattern = lpeg.patterns.validdimen
-function commands.doifdimenstringelse(str)
+function commands.doifelsedimenstring(str)
if lpegmatch(pattern,str) then
- firstoftwoarguments()
+ ctx_firstoftwoarguments()
else
- secondoftwoarguments()
+ ctx_secondoftwoarguments()
end
end
-function commands.firstinset(str)
- local first = match(str,"^([^,]+),")
- context(first or str)
-end
+local p_first = C((1-P(",")-P(-1))^0)
-function commands.ntimes(str,n)
- context(rep(str,n or 1))
-end
+implement {
+ name = "firstinset",
+ arguments = "string",
+ actions = function(str) context(lpegmatch(p_first,str or "")) end
+}
+
+implement {
+ name = "ntimes",
+ arguments = { "string", "integer" },
+ actions = { string.rep, context }
+}
+
+implement {
+ name = "execute",
+ arguments = "string",
+ actions = os.execute -- wrapped in sandbox
+}
+
+-- function commands.write(n,str)
+-- if n == 18 then
+-- os.execute(str)
+-- elseif n == 16 then
+-- -- immediate
+-- logs.report(str)
+-- else
+-- -- at the tex end we can still drop the write / also delayed vs immediate
+-- context.writeviatex(n,str)
+-- end
+-- end
+
+implement {
+ name = "doifelsesame",
+ arguments = two_strings,
+ actions = function(a,b)
+ if a == b then
+ ctx_firstoftwoarguments()
+ else
+ ctx_secondoftwoarguments()
+ end
+ end
+}
+
+implement {
+ name = "doifsame",
+ arguments = two_strings,
+ actions = function(a,b)
+ if a == b then
+ ctx_firstofoneargument()
+ else
+ ctx_gobbleoneargument()
+ end
+ end
+}
+
+implement {
+ name = "doifnotsame",
+ arguments = two_strings,
+ actions = function(a,b)
+ if a == b then
+ ctx_gobbleoneargument()
+ else
+ ctx_firstofoneargument()
+ end
+ end
+}
diff --git a/tex/context/base/syst-lua.mkiv b/tex/context/base/syst-lua.mkiv
index 88a8c246e..5e82a9ea9 100644
--- a/tex/context/base/syst-lua.mkiv
+++ b/tex/context/base/syst-lua.mkiv
@@ -15,15 +15,18 @@
\unprotect
-\def\expdoifelse#1#2{\ctxcommand{doifelse(\!!bs#1\!!es==\!!bs#2\!!es)}}
-\def\expdoif #1#2{\ctxcommand{doif (\!!bs#1\!!es==\!!bs#2\!!es)}}
-\def\expdoifnot #1#2{\ctxcommand{doifnot (\!!bs#1\!!es==\!!bs#2\!!es)}}
+\def\expdoifelse#1#2{\clf_doifelsesame{#1}{#2}}
+\def\expdoif #1#2{\clf_doifsame {#1}{#2}}
+\def\expdoifnot #1#2{\clf_doifnotsame {#1}{#2}}
% \testfeatureonce{100000}{\doifelse{hello world}{here i am}{}} % 0.3
% \testfeatureonce{100000}{\expandabledoifelse{hello world}{here i am}{}} % 1.5
-\def\expdoifcommonelse#1#2{\ctxcommand{doifcommonelse("#1","#2")}}
-\def\expdoifinsetelse #1#2{\ctxcommand{doifinsetelse("#1","#2")}}
+\def\expdoifelsecommon#1#2{\clf_doifelsecommon{#1}{#2}}
+\def\expdoifelseinset #1#2{\clf_doifelseinset {#1}{#2}}
+
+\def\expdoifcommonelse\expdoifelsecommon
+\def\expdoifinsetelse \expdoifelseinset
% we define these here, just in case ...
@@ -32,7 +35,7 @@
\edef\!!bs{[\luastringsep[}
\edef\!!es{]\luastringsep]}
-\unexpanded\def\writestatus#1#2{\ctxcommand{writestatus(\!!bs#1\!!es,\!!bs#2\!!es)}}
+\unexpanded\def\writestatus#1#2{\clf_writestatus{#1}{#2}}
% A handy helper:
@@ -41,7 +44,7 @@
% We can omit the tex.ctxcatcodes here as nowadays we seldom
% change the regime at the \TEX\ end:
-\def\luaexpr#1{\directlua{tex.print(tostring(#1))}}
+\def\luaexpr#1{\ctxlua{tex.print(tostring(#1))}} % no use is shortcutting has to be compiled
% helpers:
@@ -50,4 +53,49 @@
\def\ui_ft#1#2{#1}
\def\ui_st#1#2{#2}
+%D Let's bring this under \LUA\ (and therefore \MKIV\ sandbox) control:
+
+% \setnewconstant\c_syst_write 18
+%
+% \unexpanded\def\write#1#% so we can handle \immediate
+% {\ifnum#1=\c_syst_write
+% \expandafter\syst_execute
+% \else
+% \normalwrite#1%
+% \fi}
+%
+% \unexpanded\def\syst_execute#1%
+% {\ctxlua{os.execute(\!!bs#1\!!es)}}
+
+%D But as we only use write 16 we could as well do all in \LUA\
+%D and ignore the rest. Okay, we still can do writes here but only
+%D when not blocked.
+
+% Nicer would be if we could just disable write 18 and keep os.execute
+% which in fact we can do by defining write18 as macro instead of
+% primitive ... todo.
+
+% \unexpanded\def\write#1#%
+% {\syst_write{#1}}
+%
+% \def\syst_write#1#2%
+% {\ctxcommand{write(\number#1,\!!bs\normalunexpanded{#2}\!!es)}}
+%
+% \unexpanded\def\writeviatex#1#2%
+% {\ifx\normalwrite\relax\else
+% \normalwrite#1{#2}%
+% \fi}
+
+% we have no way yet to pickup \immediate unless we redefine it
+% we assume immediate execution
+
+\let\syst_write_execute\clf_execute % always {...}
+
+\unexpanded\def\write#1#%
+ {\ifnum#1=18
+ \expandafter\syst_write_execute
+ \else
+ \normalwrite#1%
+ \fi}
+
\protect \endinput
diff --git a/tex/context/base/syst-rtp.mkiv b/tex/context/base/syst-rtp.mkiv
index f65e599ec..82c0778b4 100644
--- a/tex/context/base/syst-rtp.mkiv
+++ b/tex/context/base/syst-rtp.mkiv
@@ -13,6 +13,6 @@
\unprotect
-\def\executesystemcommand#1{\ctxlua{os.execute([[#1]])}}
+\let\executesystemcommand\clf_execute
\protect \endinput
diff --git a/tex/context/base/tabl-ltb.mkiv b/tex/context/base/tabl-ltb.mkiv
index f7fbc0390..4c892138e 100644
--- a/tex/context/base/tabl-ltb.mkiv
+++ b/tex/context/base/tabl-ltb.mkiv
@@ -138,6 +138,8 @@
\expandafter\secondoftwoarguments
\fi}
+\let\doiflinetablecparameterelse\doifelselinetablecparameter
+
\def\linetablecparameter#1%
{\csname
\ifcsname\??lec\number\linetablecolumn#1\endcsname
@@ -614,7 +616,7 @@
{\global\linetablepage\zerocount
\ifcase\linetablerepeat \else \ifcase\linetablepage
\doif{\linetableparameter\c!repeat}\v!no
- {\global\linetablepage\doifoddpageelse\plusone\plustwo}%
+ {\global\linetablepage\doifelseoddpage\plusone\plustwo}%
\fi \fi}
\def\flushlinetablehead
diff --git a/tex/context/base/tabl-ntb.mkii b/tex/context/base/tabl-ntb.mkii
index 465ed44b2..fbf43eb1e 100644
--- a/tex/context/base/tabl-ntb.mkii
+++ b/tex/context/base/tabl-ntb.mkii
@@ -423,7 +423,9 @@
\ifcsname\@@tblprefix\c!x\positivecol\c!y\positiverow\endcsname\csname\@@tblprefix\c!x\positivecol\c!y\positiverow\endcsname\fi
\ifcsname\@@tblprefix\c!x\negativecol\c!y\negativerow\endcsname\csname\@@tblprefix\c!x\negativecol\c!y\negativerow\endcsname\fi
% done
- \global\letcscsname\@@tblsplitafter\csname\@@tbl\@@tbl\c!after\endcsname
+ \global\letcscsname\@@tblsplitafter \csname\@@tbl\@@tbl\c!after \endcsname
+ \global\letcscsname\@@tblsplitbefore \csname\@@tbl\@@tbl\c!before \endcsname
+ \global\letcscsname\@@tblsplitsamepage\csname\@@tbl\@@tbl\c!samepage\endcsname
\relax}
% we cannot use +n (checking on number/last/first would slow down too much)
@@ -439,7 +441,9 @@
% \dorecurse{10}{\bTR \dorecurse{6}{\bTD xxx \eTD} \eTR}
% \eTABLE
-\globallet\@@tblsplitafter\relax
+\globallet\@@tblsplitafter \relax
+\globallet\@@tblsplitbefore \relax
+\globallet\@@tblsplitsamepage\relax
% split + page:
%
@@ -851,7 +855,17 @@
{\noalign
{\global\advance\tblrow\plusone
\global\tblcol\zerocount
- \global\tblspn\zerocount}%
+ \global\tblspn\zerocount
+ \bgroup % protect local vars
+ \@@tblsplitbefore
+ \egroup
+ \ifx\@@tblsplitsamepage\v!before
+ \unpenalty
+ \nobreak
+ \else\ifx\@@tblsplitsamepage\v!both
+ \unpenalty
+ \nobreak
+ \fi\fi}%
\nexttblcol
\kern\dimexpr\tbltblleftmargindistance-\tbltblcolumndistance\relax}
@@ -862,7 +876,14 @@
{\nointerlineskip
\ifnum\tblrow>\noftblheadlines
\ifnum\gettblnob\tblrow=\zerocount
- \allowbreak
+ \unpenalty
+ \ifx\@@tblsplitsamepage\v!after
+ \nobreak
+ \else\ifx\@@tblsplitsamepage\v!both
+ \nobreak
+ \else
+ \allowbreak
+ \fi\fi
\fi
\else
\allowbreak % else no proper head split off
diff --git a/tex/context/base/tabl-ntb.mkiv b/tex/context/base/tabl-ntb.mkiv
index a1ae94712..fcf8ac312 100644
--- a/tex/context/base/tabl-ntb.mkiv
+++ b/tex/context/base/tabl-ntb.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D This module as a more modern variant in xtables but as we follow a bit
+%D This module has a more modern variant in xtables but as we follow a bit
%D different approach with settings there, this mechanism will stay. In fact
%D each of them has its advantages. This module could be sped up a bit and made
%D more efficient by delegating some housekeeping to \LUA\ but it's not worth
@@ -19,8 +19,14 @@
%D real purpose in it. If needed I can squeeze out a few more percentages
%D runtime.
+% columndistance 'optimized' ... needs checking
+%
+% we don't need the alignment mechanism .. we can just pack the row in a box
+
\writestatus{loading}{ConTeXt Table Macros / Natural Tables}
+% sometimes this helps (with nc going wild): \setupTABLE[maxwidth=100cm]
+%
% bug: width 3cm is not honored and column becomes too wide
% as given width is added to distributed width
%
@@ -98,7 +104,7 @@
\to \t_tabl_ntb_cell_start
\unexpanded\def\tabl_ntb_cell_start
- {\inhibitblank
+ {% \inhibitblank
\dotagTABLEcell
%\tabl_ntb_next_level
\usenaturaltablelocalstyleandcolor\c!style\c!color
@@ -145,6 +151,9 @@
\newcount\c_tabl_ntb_maximum_row_span
\newcount\c_tabl_ntb_maximum_col_span
+\newcount\c_tabl_ntb_encountered_col
+\newcount\c_tabl_ntb_encountered_max
+
\newtoks\t_tabl_ntb
\newtoks\t_tabl_ntb_row
@@ -162,6 +171,11 @@
\newdimen\d_tabl_ntb_height
\newdimen\d_tabl_ntb_width
+\newdimen\d_tabl_ntb_leftmargindistance
+\newdimen\d_tabl_ntb_rightmargindistance
+\newdimen\d_tabl_ntb_columndistance
+\newdimen\d_tabl_ntb_maxwidth
+
\newtoks\everyTABLEpass % public
\newcount\tablecellrows % public (needs checking)
@@ -208,6 +222,8 @@
\installdirectcommandhandler \??naturaltable {naturaltable} % \??naturaltable
\installsimpleframedcommandhandler \??naturaltablelocal {naturaltablelocal} \??naturaltablelocal
+\installcorenamespace{naturaltabletal}
+\installcorenamespace{naturaltablegal}
\installcorenamespace{naturaltablenob}
\installcorenamespace{naturaltabletag}
\installcorenamespace{naturaltablecol}
@@ -219,11 +235,26 @@
\installcorenamespace{naturaltablehei}
\installcorenamespace{naturaltabledis}
\installcorenamespace{naturaltableaut}
+%installcorenamespace{naturaltablefwd} % forcedwidth
\installcorenamespace{naturaltabletxt}
\installcorenamespace{naturaltablespn}
\installcorenamespace{naturaltableref}
\installcorenamespace{naturaltableset}
\installcorenamespace{naturaltablecell}
+\installcorenamespace{naturaltablesqueeze}
+\installcorenamespace{naturaltabletok}
+
+\letvalue{\??naturaltablesqueeze }\donefalse
+\letvalue{\??naturaltablesqueeze\v!fit }\donetrue
+\letvalue{\??naturaltablesqueeze\v!fixed}\donetrue
+\letvalue{\??naturaltablesqueeze\v!broad}\donetrue
+\letvalue{\??naturaltablesqueeze\v!local}\donetrue
+
+\def\tabl_ntb_let_gal{\global\expandafter\let\csname\??naturaltablegal\m_tabl_tbl_level\endcsname}
+\def\tabl_ntb_get_gal{\csname\??naturaltablegal\m_tabl_tbl_level\endcsname}
+
+\def\tabl_ntb_let_tal#1{\global\expandafter\let\csname\??naturaltabletal\m_tabl_tbl_level:\number#1\endcsname}
+\def\tabl_ntb_get_tal#1{\csname\??naturaltabletal\m_tabl_tbl_level:\number#1\endcsname}
\def\tabl_ntb_set_nob#1{\expandafter\let\csname\??naturaltablenob\m_tabl_tbl_level:\number#1\endcsname\plusone}
\def\tabl_ntb_get_nob#1{\ifcsname\??naturaltablenob\m_tabl_tbl_level:\number#1\endcsname\plusone\else\zerocount\fi}
@@ -281,15 +312,22 @@
% \def\tabl_ntb_row_state#1#2{\ifcsname\??naturaltablerow\m_tabl_tbl_level:\number#1:\number#2\endcsname\zerocount\else\plusone\fi}
% \def\tabl_ntb_col_state#1#2{\ifcsname\??naturaltablecol\m_tabl_tbl_level:\number#1:\number#2\endcsname\zerocount\else\plusone\fi}
-\def\tabl_ntb_set_spn #1{\expandafter\let\csname\??naturaltablespn\m_tabl_tbl_level:\number#1\endcsname \!!plusone}
-\def\tabl_ntb_spn_doifelse#1{\doifelse {\csname\??naturaltablespn\m_tabl_tbl_level:\number#1\endcsname}\!!plusone}
+%def\tabl_ntb_set_spn #1{\expandafter\let\csname\??naturaltablespn\m_tabl_tbl_level:\number#1\endcsname \!!plusone}
+%def\tabl_ntb_spn_doifelse#1{\doifelse {\csname\??naturaltablespn\m_tabl_tbl_level:\number#1\endcsname}\!!plusone}
-\def\tabl_ntb_set_spn #1{\setvalue {\??naturaltablespn\m_tabl_tbl_level:\number#1}{1}}
-\def\tabl_ntb_spn_doifelse#1{\doifelsevalue{\??naturaltablespn\m_tabl_tbl_level:\number#1}{1}}
+%def\tabl_ntb_set_spn #1{\setvalue {\??naturaltablespn\m_tabl_tbl_level:\number#1}{1}}
+%def\tabl_ntb_spn_doifelse#1{\doifelsevalue{\??naturaltablespn\m_tabl_tbl_level:\number#1}{1}}
-\def\tabl_ntb_let_ref#1#2{\expandafter\glet\csname\??naturaltableref\m_tabl_tbl_level:\number#1:\number#2\endcsname}
-\def\tabl_ntb_set_ref#1#2{\expandafter\xdef\csname\??naturaltableref\m_tabl_tbl_level:\number#1:\number#2\endcsname}
-\def\tabl_ntb_get_ref#1#2{\ifcsname\??naturaltableref\m_tabl_tbl_level:\number#1:\number#2\endcsname\csname\??naturaltableref\m_tabl_tbl_level:\number#1:\number#2\endcsname\fi}
+\def\tabl_ntb_let_ref #1#2{\expandafter\glet\csname\??naturaltableref\m_tabl_tbl_level:\number#1:\number#2\endcsname}
+\def\tabl_ntb_set_ref #1#2{\expandafter\xdef\csname\??naturaltableref\m_tabl_tbl_level:\number#1:\number#2\endcsname}
+\def\tabl_ntb_get_ref #1#2{\ifcsname\??naturaltableref\m_tabl_tbl_level:\number#1:\number#2\endcsname\csname\??naturaltableref\m_tabl_tbl_level:\number#1:\number#2\endcsname\fi}
+
+\def\tabl_ntb_set_spn #1{\expandafter\let\csname\??naturaltablespn\m_tabl_tbl_level:\number#1\endcsname \!!plusone}
+\def\tabl_ntb_spn_doifelse#1{\ifcase0\csname\??naturaltablespn\m_tabl_tbl_level:\number#1\endcsname\relax % could be inlined
+ \expandafter\secondoftwoarguments % unset
+ \else
+ \expandafter\firstoftwoarguments % a span
+ \fi}
% keep for a while:
%
@@ -317,6 +355,7 @@
\newif\ifenableTBLbreak \enableTBLbreakfalse
\newif\ifmultipleTBLheads \multipleTBLheadsfalse
\newif\iftightTBLrowspan \tightTBLrowspantrue
+\newif\iftightTBLcolspan \tightTBLcolspanfalse
\newif\iftraceTABLE \traceTABLEfalse
@@ -549,7 +588,6 @@
% local
\ifcsname\m_tabl_ntb_prefix\c!y++\m_tabl_ntb_positive_row\endcsname\csname\m_tabl_ntb_prefix\c!y++\m_tabl_ntb_positive_row\endcsname\fi
% done
- \xdef\m_tabl_ntb_after_split{\naturaltablelocalparameter\c!after}% to be checked
\relax}
% we cannot use +n (checking on number/last/first would slow down too much)
@@ -565,7 +603,9 @@
% \dorecurse{10}{\bTR \dorecurse{6}{\bTD xxx \eTD} \eTR}
% \eTABLE
-\let\m_tabl_ntb_after_split\relax
+\let\m_tabl_ntb_before_split\empty
+\let\m_tabl_ntb_after_split \empty
+\let\m_tabl_ntb_same_page \empty
% split + page:
%
@@ -577,6 +617,7 @@
\unexpanded\def\tabl_ntb_tr
{\c_tabl_ntb_running_col\zerocount
+ \c_tabl_ntb_encountered_col\zerocount
\advance\c_tabl_ntb_maximum_row\plusone
\iffirstargument
\expandafter\tabl_ntb_tr_yes
@@ -591,7 +632,8 @@
\def\m_tabl_ntb_default_nc{\naturaltableparameter\c!nc}
\unexpanded\def\tabl_ntb_td
- {\iffirstargument
+ {\advance\c_tabl_ntb_encountered_col\plusone
+ \iffirstargument
\expandafter\tabl_ntb_td_yes
\else
\expandafter\tabl_ntb_td_nop
@@ -651,7 +693,10 @@
% save text
\normalexpanded
{\tabl_ntb_set_txt_process\c_tabl_ntb_maximum_row\c_tabl_ntb_running_col{\the\c_tabl_ntb_maximum_row}{\the\c_tabl_ntb_running_col}}%
- {#1}{#2}}
+ {#1}{#2}%
+ \ifnum\c_tabl_ntb_encountered_col>\c_tabl_ntb_encountered_max
+ \c_tabl_ntb_encountered_max\c_tabl_ntb_encountered_col
+ \fi}
\def\tabl_ntb_td_nop[#1]#2\eTD
{\global\advance\c_tabl_ntb_spn\plusone\relax
@@ -671,7 +716,10 @@
\tabl_ntb_let_ref\c_tabl_ntb_maximum_row\c_tabl_ntb_running_col\empty
\normalexpanded
{\tabl_ntb_set_txt_process\c_tabl_ntb_maximum_row\c_tabl_ntb_running_col{\the\c_tabl_ntb_maximum_row}{\the\c_tabl_ntb_running_col}}%
- {#1}{#2}}
+ {#1}{#2}%
+ \ifnum\c_tabl_ntb_encountered_col>\c_tabl_ntb_encountered_max
+ \c_tabl_ntb_encountered_max\c_tabl_ntb_encountered_col
+ \fi}
\def\tabl_ntb_td_pass_n#1%
{\scratchcounter\numexpr\m_tabl_ntb_n-\c_tabl_ntb_running_col+\minusone-\c_tabl_ntb_spn\relax
@@ -850,10 +898,20 @@
\ifhmode\kern\zeropoint\fi % blocks \removeunwantedspaces: check this on icare handelingsschema
\resetcharacteralign % new
\setupcurrentnaturaltablelocal[\c!align={\v!right,\v!broad,\v!high},#1]%
+ %
+ \d_tabl_ntb_leftmargindistance \naturaltablelocalparameter\c!leftmargindistance\relax
+ \d_tabl_ntb_rightmargindistance\naturaltablelocalparameter\c!rightmargindistance\relax
+ \d_tabl_ntb_columndistance \naturaltablelocalparameter\c!columndistance\relax
+ \d_tabl_ntb_maxwidth \naturaltablelocalparameter\c!maxwidth\relax
+ %
\usesetupsparameter\naturaltablelocalparameter
\doifelse{\naturaltablelocalparameter\c!textwidth}\v!local
{\hsize\availablehsize}
{\hsize\naturaltablelocalparameter\c!textwidth}%
+ \enableTBLbreakfalse
+ \multipleTBLheadsfalse
+ \autoTBLspreadfalse
+ \tightTBLcolspanfalse
\processaction
[\naturaltablelocalparameter\c!split]
[ \v!yes=>\enableTBLbreaktrue,
@@ -865,7 +923,8 @@
\tabl_ntb_preset_parameters
\processallactionsinset
[\naturaltablelocalparameter\c!option]
- [\v!stretch=>\autoTBLspreadtrue]%
+ [\v!stretch=>\autoTBLspreadtrue,%
+ \v!tight=>\tightTBLcolspantrue]%
\linewidth\naturaltablelocalparameter\c!rulethickness % needs to be frozen
\dontcomplain
\c_tabl_ntb_running_col \zerocount
@@ -905,10 +964,10 @@
\unexpanded\def\eTABLE % beware, we need to get rid of spurious spaces when in hmode
{% tricky and dirty order -)
- \doifsometokselse\t_tabl_ntb_head % slow, better a flag
+ \doifelsesometoks\t_tabl_ntb_head % slow, better a flag
{\the\t_tabl_ntb_head
\c_tabl_ntb_n_of_head_lines\c_tabl_ntb_maximum_row\relax
- \doifsometokselse\t_tabl_ntb_next
+ \doifelsesometoks\t_tabl_ntb_next
{\the\t_tabl_ntb_next
\c_tabl_ntb_n_of_next_lines\numexpr\c_tabl_ntb_maximum_row-\c_tabl_ntb_n_of_head_lines\relax}%
{\c_tabl_ntb_n_of_next_lines\zerocount}% was 1
@@ -1013,7 +1072,7 @@
\ifautoTBLemptycell
\normalexpanded
{\tabl_ntb_set_txt_process\c_tabl_ntb_current_row\c_tabl_ntb_current_col{\the\c_tabl_ntb_current_row}{\the\c_tabl_ntb_current_col}}%
- {}{\strut}%
+ {\c!option=\v!tight}{\strut\kern\scaledpoint}% the kern forces the tight
\fi}
\def\tabl_ntb_loop_two
@@ -1045,79 +1104,179 @@
\newcount\c_tabl_prelocated_rows % \prelocateTBLrows{1000} may speed up large tables
-\def\tabl_ntb_row_start{\t_tabl_ntb_row\emptytoks}
-\def\tabl_ntb_row_stop {\normalexpanded{\t_tabl_ntb{\the\t_tabl_ntb\noexpand\tabl_ntb_row_align_start\the\t_tabl_ntb_row\tabl_ntb_row_align_stop}}}
+% \def\tabl_ntb_row_start{\t_tabl_ntb_row\emptytoks}
+% \def\tabl_ntb_row_stop {\normalexpanded{\t_tabl_ntb{\the\t_tabl_ntb\noexpand\tabl_ntb_row_align_start\the\t_tabl_ntb_row\tabl_ntb_row_align_stop}}}
+
+\def\tabl_ntb_row_start
+ {\t_tabl_ntb_row\emptytoks}
+
+\def\tabl_ntb_row_stop
+ {\ifenableTBLbreak
+ \tabl_ntb_row_stop_split
+ \else
+ \tabl_ntb_row_stop_boxed
+ \fi}
+
+\def\tabl_ntb_row_stop_boxed
+ {% \noindent % no, else double leftskip in narrower
+ \normalexpanded
+ {\t_tabl_ntb
+ {\the\t_tabl_ntb
+ % no need for init
+ \tabl_ntb_row_align_start
+ \the\t_tabl_ntb_row
+ \tabl_ntb_row_align_stop}}}
+
+\def\tabl_ntb_row_stop_split
+ {\ifcsname\??naturaltableset\m_tabl_tbl_level:\c!y++\the\c_tabl_ntb_current_row\endcsname
+ \tabl_ntb_row_stop_split_yes
+ \else
+ \tabl_ntb_row_stop_split_nop
+ \fi}
+
+\def\tabl_ntb_row_stop_split_nop
+ {\normalexpanded
+ {\t_tabl_ntb
+ {\the\t_tabl_ntb
+ \tabl_ntb_row_align_reset
+ \tabl_ntb_row_align_start
+ \the\t_tabl_ntb_row
+ \tabl_ntb_row_align_stop}}}
+
+\def\tabl_ntb_row_stop_split_yes
+ {\begingroup
+ \csname\??naturaltableset\m_tabl_tbl_level:\c!y++\the\c_tabl_ntb_current_row\endcsname
+ \xdef\m_tabl_ntb_before_split{\naturaltablelocalparameter\c!before}% to be checked
+ \xdef\m_tabl_ntb_after_split {\naturaltablelocalparameter\c!after}% to be checked
+ \xdef\m_tabl_ntb_same_page {\naturaltablelocalparameter\c!samepage}%
+ \endgroup
+ \normalexpanded
+ {\t_tabl_ntb
+ {\the\t_tabl_ntb
+ \tabl_ntb_row_align_set{\m_tabl_ntb_before_split}{\m_tabl_ntb_after_split}{\m_tabl_ntb_same_page}%
+ \tabl_ntb_row_align_start
+ \the\t_tabl_ntb_row
+ \tabl_ntb_row_align_stop}}}
+
+\unexpanded\def\tabl_ntb_row_align_set#1#2#3%
+ {\xdef\m_tabl_ntb_before_split{#1}%
+ \xdef\m_tabl_ntb_after_split {#2}%
+ \xdef\m_tabl_ntb_same_page {#3}}
+
+\unexpanded\def\tabl_ntb_row_align_reset
+ {\global\let\m_tabl_ntb_before_split\empty
+ \global\let\m_tabl_ntb_after_split \empty
+ \global\let\m_tabl_ntb_same_page \empty}
\def\tabl_ntb_prelocate_error
{\writestatus\m!system{fatal error: use \string\prelocateTBLrows\space to increase table memory (now: \the\c_tabl_prelocated_rows)}}
% \prelocateTBLrows{1000} % may speed up large tables
-\installcorenamespace{naturaltabletok}
-
\def\prelocateTBLrows#1% we start at zero so we have one to much, better play safe anyway
- {\dostepwiserecurse\c_tabl_prelocated_rows{#1}\plusone{\expandafter\newtoks\csname\??naturaltabletok\recurselevel\endcsname}%
+ {\dostepwiserecurse\c_tabl_prelocated_rows{#1}\plusone
+ {\expandafter\newtoks\csname\??naturaltabletok\recurselevel\endcsname}%
\def\tabl_ntb_row_start
{\ifnum\c_tabl_ntb_row<\c_tabl_prelocated_rows\relax
- \expandafter\let\expandafter\t_tabl_ntb_row\csname\??naturaltabletok\the\c_tabl_ntb_row\endcsname\t_tabl_ntb_row\emptytoks
+ \tabl_ntb_prelocate_okay
\else
\tabl_ntb_prelocate_error
\fi}%
\def\tabl_ntb_row_stop
- {\normalexpanded{\t_tabl_ntb{\the\t_tabl_ntb\noexpand\tabl_ntb_row_align_start\the\csname\??naturaltabletok\the\c_tabl_ntb_row\endcsname\tabl_ntb_row_align_stop}}}%
+ {\normalexpanded
+ {\t_tabl_ntb
+ {\the\t_tabl_ntb
+ \tabl_ntb_row_align_start
+ \the\csname\??naturaltabletok\the\c_tabl_ntb_row\endcsname
+ \tabl_ntb_row_align_stop}}}%
\global\c_tabl_prelocated_rows#1\relax}
+\def\tabl_ntb_prelocate_okay
+ {\expandafter\let\expandafter\t_tabl_ntb_row\csname\??naturaltabletok\the\c_tabl_ntb_row\endcsname\t_tabl_ntb_row\emptytoks}
+
% We use aligments to handle the empty (skipped) columns, so
% that we don't have to (re|)|calculate these.
-\def\tabl_ntb_column_skip
- {\global\advance\c_tabl_ntb_col\plusone}
-
-\def\tabl_ntb_column_next
- {\global\advance\c_tabl_ntb_col\plusone
- \kern\naturaltablelocalparameter\c!columndistance
- \aligntab}
-
-\def\tabl_ntb_column_span
- {\span}
-
\let\m_tabl_ntb_saved_row\!!zerocount
\let\m_tabl_ntb_saved_col\!!zerocount
-\def\tabl_ntb_row_align_start
- {\noalign{\tabl_ntb_row_align_reset}%
- \tabl_ntb_column_next
- \kern\dimexpr\naturaltablelocalparameter\c!leftmargindistance-\naturaltablelocalparameter\c!columndistance\relax}
-
-\unexpanded\def\tabl_ntb_row_align_reset
+\unexpanded\def\tabl_ntb_row_align_start
{\global\advance\c_tabl_ntb_row\plusone
- \global\c_tabl_ntb_col\zerocount
- \global\c_tabl_ntb_spn\zerocount}
+ \global\c_tabl_ntb_col\plusone
+ \global\c_tabl_ntb_spn\zerocount
+ \tabl_ntb_row_align_start_inject
+ \dostarttagged\t!tablerow\empty
+ \hbox\bgroup
+ \kern\dimexpr\d_tabl_ntb_leftmargindistance\relax}
\unexpanded\def\tabl_ntb_row_align_stop
- {\kern\dimexpr\naturaltablelocalparameter\c!rightmargindistance-\naturaltablelocalparameter\c!columndistance\relax
- \crcr
- \noalign
- {\nointerlineskip
- \ifnum\c_tabl_ntb_row>\c_tabl_ntb_n_of_head_lines
- \ifnum\tabl_ntb_get_nob\c_tabl_ntb_row=\zerocount
- \allowbreak
- \fi
- \else
- \allowbreak % else no proper head split off
- \fi
- \bgroup % protect local vars
- \m_tabl_ntb_after_split
- \egroup
- \bgroup % protect local vars
- \scratchcounter\numexpr\c_tabl_ntb_row+\plusone\relax
- \ifnum\scratchcounter>\c_tabl_ntb_n_of_hdnx_lines\relax
- \ifnum\scratchcounter<\c_tabl_ntb_maximum_row\relax
- \doifsomething{\naturaltablelocalparameter\c!spaceinbetween}
- {\blank[\naturaltablelocalparameter\c!spaceinbetween]}%
- \fi
- \fi
- \egroup}}
+ {\kern\dimexpr\d_tabl_ntb_rightmargindistance-\d_tabl_ntb_columndistance\relax
+ \egroup
+ \dostoptagged
+ \tabl_ntb_row_align_stop_inject}
+
+\unexpanded\def\tabl_ntb_before_page
+ {\ifx\m_tabl_ntb_same_page\v!before
+ % \blank[\v!samepage,\v!strong]%
+ \unpenalty
+ \nobreak
+ \else\ifx\m_tabl_ntb_same_page\v!both
+ % \blank[\v!samepage,\v!strong]%
+ \unpenalty
+ \nobreak
+ \fi\fi}
+
+\unexpanded\def\tabl_ntb_after_page
+ {\ifnum\c_tabl_ntb_row>\c_tabl_ntb_n_of_head_lines
+ \ifnum\tabl_ntb_get_nob\c_tabl_ntb_row=\zerocount
+ \unpenalty
+ \ifx\m_tabl_ntb_same_page\v!after
+ % \blank[\v!samepage,\v!strong]%
+ \nobreak
+ \else\ifx\m_tabl_ntb_same_page\v!both
+ % \blank[\v!samepage,\v!strong]%
+ \nobreak
+ \else
+ % \blank[\v!preference,\v!weak]%
+ \allowbreak
+ \fi\fi
+ \fi
+ \else
+ % \blank[\v!preference,\v!weak]%
+ \allowbreak % else no proper head split off
+ \fi}
+
+\unexpanded\def\tabl_ntb_inbetween
+ {\scratchcounter\numexpr\c_tabl_ntb_row+\plusone\relax
+ \ifnum\scratchcounter>\c_tabl_ntb_n_of_hdnx_lines\relax
+ \ifnum\scratchcounter<\c_tabl_ntb_maximum_row\relax
+ \edef\p_spaceinbetween{\naturaltablelocalparameter\c!spaceinbetween}%
+ \ifx\p_spaceinbetween\empty\else
+ \blank[\p_spaceinbetween]%
+ \fi
+ \fi
+ \fi}
+
+\unexpanded\def\tabl_ntb_row_align_start_inject
+ {\bgroup % protect local vars
+ \m_tabl_ntb_before_split
+ \egroup
+ \ifenableTBLbreak
+ \tabl_ntb_before_page
+ \fi}
+
+\unexpanded\def\tabl_ntb_row_align_stop_inject
+ {\par
+ \nointerlineskip
+ \ifenableTBLbreak
+ \tabl_ntb_after_page
+ \fi
+ \bgroup % protect local vars
+ \m_tabl_ntb_after_split
+ \egroup
+ \bgroup % protect local vars
+ \tabl_ntb_inbetween
+ \egroup}
\def\tabl_ntb_flush_content
{\the\everyTABLEpass
@@ -1125,47 +1284,56 @@
\global\c_tabl_ntb_col\zerocount
\global\c_tabl_ntb_row\zerocount
\global\advance\c_tabl_ntb_row\minusone
- \tabskip\zeropoint
- \dostarttagged\t!table\empty
- \dostarttagged\t!tablerow\empty
- \appendtoks\dostoptagged\dostarttagged\t!tablerow\empty\to\everycr
- \halign\bgroup
- \registerparoptions
- % watch out: tagging the cell happens at the outer level (faster)
- \ignorespaces\alignmark\alignmark\unskip
- \aligntab\aligntab
- \ignorespaces\alignmark\alignmark\unskip
- \cr % one too many
- \the\t_tabl_ntb
- \dostoptagged
- \egroup
+ \dostarttaggedchained\t!table\empty\??naturaltable
+ %\registerparoptions % (*) triggers max hsize
+ \the\t_tabl_ntb
\dostoptagged}
+\unexpanded\def\tabl_ntb_span#1%
+ {\dorecurse{#1}
+ {\hskip\tabl_ntb_get_wid\c_tabl_ntb_col\relax
+ \global\advance\c_tabl_ntb_col\plusone}}
+
+\unexpanded\def\tabl_ntb_skip#1%
+ {\global\advance\c_tabl_ntb_col#1\relax}
+
+\unexpanded\def\tabl_ntb_plus
+ {\global\advance\c_tabl_ntb_col\plusone
+ \kern\d_tabl_ntb_columndistance}
+
+% \setvalue{\??naturaltablecell\the\c_tabl_ntb_none}#1#2%
+% {\scratchcounter\tabl_ntb_get_col{#1}{#2}\relax
+% \ifnum\scratchcounter>\zerocount
+% \normalexpanded
+% {\t_tabl_ntb_row
+% {\the\t_tabl_ntb_row
+% \tabl_ntb_span{\the\scratchcounter}%
+% \tabl_ntb_plus}}%
+% \fi}
+
\setvalue{\??naturaltablecell\the\c_tabl_ntb_none}#1#2%
{\scratchcounter\tabl_ntb_get_col{#1}{#2}\relax
\ifnum\scratchcounter>\zerocount
- \advance\scratchcounter\minusone
- \ifnum\scratchcounter>\zerocount
- \tabl_ntb_span
- \fi
- \t_tabl_ntb_row\expandafter{\the\t_tabl_ntb_row\tabl_ntb_column_next}
+ \normalexpanded
+ {\t_tabl_ntb_row
+ {\the\t_tabl_ntb_row
+ \tabl_ntb_span{\the\scratchcounter}}}%
\fi}
\setvalue{\??naturaltablecell\the\c_tabl_ntb_cell}#1#2%
{\t_tabl_ntb_row\expandafter{\the\t_tabl_ntb_row\tabl_ntb_pass #1 #2 }% space delimited -> less tokens
\scratchcounter\tabl_ntb_get_col{#1}{#2}\relax
\ifnum\scratchcounter>\zerocount
- \advance\scratchcounter\minusone
- \ifnum\scratchcounter>\zerocount
- \tabl_ntb_span
- \fi
- \t_tabl_ntb_row\expandafter{\the\t_tabl_ntb_row\tabl_ntb_column_next}
+ \normalexpanded
+ {\t_tabl_ntb_row
+ {\the\t_tabl_ntb_row
+ \ifnum\scratchcounter=\plusone
+ \tabl_ntb_plus
+ \else
+ \tabl_ntb_skip{\the\scratchcounter}%
+ \fi}}%
\fi}
-\def\tabl_ntb_span
- {\dorecurse\scratchcounter{\t_tabl_ntb_row\expandafter{\the\t_tabl_ntb_row\tabl_ntb_column_span}}%
- \dorecurse\scratchcounter{\t_tabl_ntb_row\expandafter{\the\t_tabl_ntb_row\tabl_ntb_column_skip}}}
-
\unexpanded\def\tabl_ntb_cell#1#2%
{\csname\??naturaltablecell\the\tabl_ntb_get_tag{#1}{#2}\endcsname{#1}{#2}}
@@ -1183,16 +1351,22 @@
{\d_tabl_ntb_width\zeropoint
\scratchcounter\c_tabl_ntb_col
\!!counta\tabl_ntb_get_col{#1}{#2}\relax
- \dorecurse\!!counta
- {\advance\d_tabl_ntb_width\dimexpr
- \tabl_ntb_get_wid\scratchcounter
- +\naturaltablelocalparameter\c!columndistance
- \ifnum\recurselevel<\!!counta
- +\tabl_ntb_get_dis\scratchcounter
- \fi
- \relax
- \advance\scratchcounter\plusone}%
- \advance\d_tabl_ntb_width-\naturaltablelocalparameter\c!columndistance\relax
+ \ifcase\!!counta\or
+ \advance\d_tabl_ntb_width\dimexpr
+ \tabl_ntb_get_wid\scratchcounter
+ \relax
+ \advance\scratchcounter\plusone
+ \else
+ \dorecurse\!!counta
+ {\advance\d_tabl_ntb_width\dimexpr
+ \tabl_ntb_get_wid\scratchcounter
+ \ifnum\recurselevel<\!!counta
+ +\d_tabl_ntb_columndistance
+ +\tabl_ntb_get_dis\scratchcounter
+ \fi
+ \relax
+ \advance\scratchcounter\plusone}%
+ \fi
\setbox\scratchbox\hbox{\tabl_ntb_get_txt{#1}{#2}}%
\tabl_ntb_set_ht{#1}{#2}{\the\ht\scratchbox}%
\tabl_ntb_set_wd{#1}{#2}{\the\wd\scratchbox}%
@@ -1223,16 +1397,22 @@
% width
\d_tabl_ntb_width\zeropoint
\scratchcounter\c_tabl_ntb_col
- \dorecurse\!!counta
- {\advance\d_tabl_ntb_width\dimexpr
- \tabl_ntb_get_wid\scratchcounter
- +\naturaltablelocalparameter\c!columndistance
- \ifnum\recurselevel<\!!counta
- +\tabl_ntb_get_dis\scratchcounter
- \fi
- \relax
- \advance\scratchcounter\plusone}%
- \advance\d_tabl_ntb_width-\naturaltablelocalparameter\c!columndistance\relax
+ \ifcase\!!counta\or
+ \advance\d_tabl_ntb_width\dimexpr
+ \tabl_ntb_get_wid\scratchcounter
+ \relax
+ \advance\scratchcounter\plusone
+ \else
+ \dorecurse\!!counta
+ {\advance\d_tabl_ntb_width\dimexpr
+ \tabl_ntb_get_wid\scratchcounter
+ \ifnum\recurselevel<\!!counta
+ +\d_tabl_ntb_columndistance
+ +\tabl_ntb_get_dis\scratchcounter
+ \fi
+ \relax
+ \advance\scratchcounter\plusone}%
+ \fi
% cell
\setbox\scratchbox\hbox attr \taggedattribute \attribute\taggedattribute \bgroup
\dotagTABLEsignal % maybe we need to add some packaging in this case
@@ -1255,18 +1435,40 @@
\fi
\dostoptagged} % right spot
+% \def\tabl_ntb_cell_finalize
+% {\doifnotinset\localwidth{\v!fit,\v!broad}% user set
+% {\scratchdimen\tabl_ntb_get_aut\c_tabl_ntb_col\relax
+% \ifdim\localwidth>\scratchdimen
+% \tabl_ntb_set_aut\c_tabl_ntb_col{\the\dimexpr\localwidth}%
+% \fi}}
+
\def\tabl_ntb_cell_finalize
- {\doifnotinset\localwidth{\v!fit,\v!broad}% user set
- {\scratchdimen\tabl_ntb_get_aut\c_tabl_ntb_col\relax
- \ifdim\localwidth>\scratchdimen
- \tabl_ntb_set_aut\c_tabl_ntb_col{\the\dimexpr\localwidth}%
- \fi}}
+ {\ifx\localwidth\v!fit
+ % nothing
+ \else\ifx\localwidth\v!broad
+ % nothing
+ \else\ifx\localwidth\empty
+ % nothing (safeguard)
+ \else
+ \tabl_ntb_cell_finalize_indeed
+ \fi\fi\fi}
+
+\def\tabl_ntb_cell_finalize_indeed
+ {\scratchdimen\tabl_ntb_get_aut\c_tabl_ntb_col\relax
+ \ifdim\localwidth>\scratchdimen
+ \tabl_ntb_set_aut\c_tabl_ntb_col{\the\dimexpr\localwidth}%
+ \fi}
+
+\let\tabl_ntb_preroll\relax
\def\tabl_ntb_table_stop
- {\setbox\scratchbox\hbox
- {\setupcurrentnaturaltablelocal[\c!frame=\v!off,\c!background=,\c!align=\v!no]%
- \inheritednaturaltablelocalframed{\strut}}%
- \edef\minimalcellheight{\the\ht\scratchbox}% not used
+ {\forgetall % new, here see narrower-004.tex
+ %\setbox\scratchbox\hbox
+ % {\letnaturaltablelocalparameter\c!frame\v!off
+ % \letnaturaltablelocalparameter\c!background\empty
+ % \letnaturaltablelocalparameter\c!align\v!no
+ % \inheritednaturaltablelocalframed{\strut}}%
+ %\edef\minimalcellheight{\the\ht\scratchbox}% not used
\dorecurse\c_tabl_ntb_maximum_col
{\tabl_ntb_let_aut\recurselevel\zeropoint
% new
@@ -1275,14 +1477,23 @@
{\tabl_ntb_let_wd\recurselevel\c_tabl_ntb_current_col_one\zeropoint
\tabl_ntb_let_ht\recurselevel\c_tabl_ntb_current_col_one\zeropoint}%
% till here
+ \tabl_ntb_let_tal\recurselevel\zerocount
\tabl_ntb_let_wid\recurselevel\zeropoint
\tabl_ntb_let_dis\recurselevel\zeropoint}%
\dorecurse\c_tabl_ntb_maximum_row
{\tabl_ntb_let_hei\recurselevel\maxdimen}%
+ \tabl_ntb_let_gal\zerocount
+\tabl_ntb_preroll\relax
\c_tabl_tbl_pass\plusone
\let\tabl_ntb_pass\tabl_ntb_pass_one
\let\tabl_ntb_cell_process\tabl_ntb_cell_process_a
- \setbox0\vbox{\settrialtypesetting \tabl_ntb_flush_content}%
+ \setbox\scratchbox\vbox{\settrialtypesetting \tabl_ntb_flush_content}%
+ \ifcase\tabl_ntb_get_gal\or
+ % \c_tabl_tbl_pass\plusone
+ % \let\tabl_ntb_pass\tabl_ntb_pass_one
+ \let\tabl_ntb_cell_process\tabl_ntb_cell_process_a_extra
+ \setbox\scratchbox\vbox{\settrialtypesetting \tabl_ntb_flush_content}%
+ \fi
\tabl_ntb_let_dis\c_tabl_ntb_maximum_col\zeropoint
\ifautoTBLspread
% experimental, stretch non fixed cells to \hsize
@@ -1291,21 +1502,24 @@
\tabl_ntb_stretch_widths
\let\tabl_ntb_cell_process\tabl_ntb_cell_process_b
\setbox\scratchbox\vbox{\settrialtypesetting \tabl_ntb_flush_content}%
- \else\ifdim\wd0>\hsize
+ \else\ifdim\wd\scratchbox>\hsize
\ifautoTBLhsize
\tabl_ntb_check_widths_one % trial run
\tabl_ntb_check_widths_two % real run
\let\tabl_ntb_cell_process\tabl_ntb_cell_process_b
\setbox\scratchbox\vbox{\settrialtypesetting \tabl_ntb_flush_content}%
\fi
- \else\ifautoTBLrowspan\ifnum\c_tabl_ntb_maximum_row_span>1 % max ?
+ \else\ifautoTBLrowspan\ifnum\c_tabl_ntb_maximum_row_span>\plusone % max ?
% added jan 2002 because nx=* did no longer work
+ \ifnum\c_tabl_ntb_encountered_max<\c_tabl_ntb_maximum_col
+ % added jun 2014 because someone had less columns than nx .. sigh / see *nx*
+ \writestatus\m!TABLE{missing\space\number\numexpr\c_tabl_ntb_maximum_col-\c_tabl_ntb_encountered_max\relax\space column(s), guessing widths}%
+ \fi
\edef\savedhsize{\the\hsize}%
- \hsize\wd0\relax % new per 17/04/2006
+ \hsize\wd\scratchbox\relax % new per 17/04/2006
\tabl_ntb_check_widths_one % trial run
\tabl_ntb_check_widths_two % real run
\hsize\savedhsize
- %
\let\tabl_ntb_cell_process\tabl_ntb_cell_process_c
\setbox\scratchbox\vbox{\settrialtypesetting \tabl_ntb_flush_content}%
\fi\fi\fi\fi
@@ -1319,39 +1533,56 @@
\c_tabl_tbl_pass\plusthree
\let\tabl_ntb_pass\tabl_ntb_pass_three
\ifnum\m_tabl_tbl_level>\plusone
- \expandafter\tabl_tbl_split_nop
+ \tabl_tbl_split_nop
\else\ifenableTBLbreak
- \doubleexpandafter\tabl_tbl_split_yes
+ \tabl_tbl_split_yes
\else
- \doubleexpandafter\tabl_tbl_split_nop
- \fi\fi{\tabl_ntb_flush_content}}
+ \tabl_tbl_split_nop
+ \fi\fi}
\def\tabl_ntb_stretch_widths % more variants, e.g. a max to \dimend
{\ifcase\c_tabl_ntb_maximum_col\else % else division by zero
\!!dimend\zeropoint
- \!!dimene\hsize
+ \!!dimene\dimexpr
+ \hsize
+ -\d_tabl_ntb_leftmargindistance
+ -\d_tabl_ntb_rightmargindistance
+ +\d_tabl_ntb_columndistance
+ \relax
\dorecurse\c_tabl_ntb_maximum_col
- {\advance\!!dimend\dimexpr\tabl_ntb_get_wid\recurselevel+\naturaltablelocalparameter\c!columndistance\relax
- \advance\!!dimene-\tabl_ntb_get_dis\recurselevel}%
- \advance\!!dimend\dimexpr-\naturaltablelocalparameter\c!columndistance+\naturaltablelocalparameter\c!leftmargindistance+\naturaltablelocalparameter\c!rightmargindistance\relax
+ {\advance\!!dimend\dimexpr
+ \tabl_ntb_get_wid\recurselevel
+ \relax
+ \advance\!!dimene\dimexpr
+ -\tabl_ntb_get_dis\recurselevel
+ -\d_tabl_ntb_columndistance
+ \relax}%
+ \relax
% distribute width (stretch)
\ifdim\!!dimend<\!!dimene
\advance\!!dimend-\!!dimene
- \!!dimend-\!!dimend
\divide\!!dimend\c_tabl_ntb_maximum_col
\dorecurse\c_tabl_ntb_maximum_col
- {\tabl_ntb_set_wid\recurselevel{\the\dimexpr\tabl_ntb_get_wid\recurselevel+\!!dimend\relax}}%
+ {\tabl_ntb_set_wid\recurselevel{\the\dimexpr\tabl_ntb_get_wid\recurselevel-\!!dimend\relax}}%
\fi
\fi}
-\def\tabl_tbl_split_nop#1%
- {\setbox\b_tabl_ntb_final\vbox{#1}%
+\def\tabl_tbl_split_nop
+ {\setbox\b_tabl_ntb_final\vbox{\tabl_ntb_flush_content}%
\postprocessTABLEbox\b_tabl_ntb_final
\beforeTABLEbox
+ % packaging prevents max hsized box
+ % \hbox{\registerparoptions\box\b_tabl_ntb_final}% (*) better here
+ % better :
+ \ifinsidefloat
+ % nothing, else we get a \hsized box
+ \else
+ \registerparoptions % (*) better here
+ \fi
\box\b_tabl_ntb_final
\afterTABLEbox}
-\def\tabl_tbl_split_yes % #1
+\def\tabl_tbl_split_yes
{\ifinsidesplitfloat
\donetrue
\else\ifinsidefloat
@@ -1369,14 +1600,15 @@
\let\extratblsplitheight\zeropoint % additional space taken by before/afterTABLEsplitbox
-\def\tabl_ntb_split_box#1%
+\def\tabl_ntb_split_box
{\resettsplit
\def\tsplitminimumfreelines{2}%
\def\tsplitminimumfreespace{\dimexpr\extratblsplitheight+\naturaltablelocalparameter\c!splitoffset\relax}%
\def\tsplitbeforeresult {\beforeTABLEsplitbox}%
\def\tsplitafterresult {\afterTABLEsplitbox}%
\def\tsplitafter {\m_tabl_ntb_after_split}%
- \setbox\tsplitcontent\vbox{#1}%
+ \def\tsplitbefore {\m_tabl_ntb_before_split}% supported ?
+ \setbox\tsplitcontent\vbox{\tabl_ntb_flush_content}%
\ifmultipleTBLheads
\dorecurse\c_tabl_ntb_n_of_head_lines
{\setbox\scratchbox\vsplit\tsplitcontent to \lineheight
@@ -1385,8 +1617,10 @@
{\setbox\scratchbox\vsplit\tsplitcontent to \lineheight
\setbox\tsplitnext\vbox{\unvcopy\tsplitnext\unvcopy\scratchbox}}%
\fi
- \doifsomething{\naturaltablelocalparameter\c!spaceinbetween}
- {\def\tsplitinbetween{\blank[\naturaltablelocalparameter\c!spaceinbetween]}}%
+ \edef\p_spaceinbetween{\naturaltablelocalparameter\c!spaceinbetween}%
+ \ifx\p_spaceinbetween\empty\else
+ \blank[\p_spaceinbetween]%
+ \fi
\def\postprocesstsplit{\postprocessTABLEsplitbox{\box\tsplitresult}}%
\handletsplit}
@@ -1406,15 +1640,23 @@
\def\tabl_ntb_check_widths_indeed#1%
{\iftraceTABLE\tabl_ntb_show_widths{B#1}\fi
\!!counta\zerocount
- \!!dimena\dimexpr\hsize-\naturaltablelocalparameter\c!leftmargindistance-\naturaltablelocalparameter\c!rightmargindistance-\naturaltablelocalparameter\c!columndistance\relax
+ \!!dimena\dimexpr
+ \hsize
+ -\d_tabl_ntb_leftmargindistance
+ -\d_tabl_ntb_rightmargindistance
+ -\d_tabl_ntb_columndistance
+ \relax
\dorecurse\c_tabl_ntb_maximum_col
{\scratchdimen\tabl_ntb_get_aut\recurselevel\relax
- \advance\!!dimena-\tabl_ntb_get_dis\recurselevel\relax
+ \advance\!!dimena\dimexpr
+ -\tabl_ntb_get_dis\recurselevel
+ -\d_tabl_ntb_columndistance
+ \relax
\ifdim\scratchdimen>\zeropoint\relax
\advance\!!dimena -\scratchdimen
\else
\scratchdimen\tabl_ntb_get_wid\recurselevel\relax
- \ifdim\scratchdimen>\naturaltablelocalparameter\c!maxwidth\relax
+ \ifdim\scratchdimen>\d_tabl_ntb_maxwidth\relax
\ifcase#1\else\tabl_ntb_let_wid\recurselevel\zeropoint\fi
\advance\!!counta \plusone
\else
@@ -1423,7 +1665,9 @@
\else
% eigenlijk moet dit alleen als de kolom wordt overspannen door een
% vorige, maw extra dubbele loop en status var
- \advance\!!counta \plusone
+ \ifnum\c_tabl_ntb_encountered_max=\c_tabl_ntb_maximum_col % *nx* bah
+ \advance\!!counta \plusone % setting maxwidth to a large value also works
+ \fi
\fi
\fi
\fi}%
@@ -1500,7 +1744,6 @@
\fi
\fi}
-
\def\tabl_ntb_check_heights_one
{\dorecurse\c_tabl_ntb_maximum_row
{\c_tabl_ntb_current_row_three\recurselevel\relax
@@ -1515,7 +1758,9 @@
\def\tabl_ntb_show_widths#1%
{\vbox
- {\forgetall\tttf[#1]\dorecurse\c_tabl_ntb_maximum_col
+ {\forgetall
+ \tttf[#1]%
+ \dorecurse\c_tabl_ntb_maximum_col
{\scratchdimen\tabl_ntb_get_wid\recurselevel\relax
[\recurselevel:\the\scratchdimen]}}}
@@ -1532,46 +1777,124 @@
% \setsecondpasscharacteralign \checkalignment{#3}% {\strut#2\unskip}%
% \ignorespaces}
-\def\tabl_ntb_char_align
- {\doifelse{\naturaltablelocalparameter\c!aligncharacter}\v!yes
- \tabl_ntb_char_align_indeed
- \gobbletwoarguments}
+\def\tabl_ntb_char_align % called often
+ {\edef\p_characteralign{\naturaltablelocalparameter\c!aligncharacter}%
+ \ifx\p_characteralign\v!yes
+ \ifcase\c_tabl_tbl_pass\or
+ \tabl_ntb_let_tal\currentTABLEcolumn\plusone
+ \tabl_ntb_let_gal\plusone
+ \fi
+ \expandafter\tabl_ntb_char_align_indeed
+ \else
+ \expandafter\gobbletwoarguments
+ \fi}
\def\tabl_ntb_char_align_indeed#1#2% row column
{\ifcase\c_tabl_tbl_pass \or
- \setcharacteralign{#2}{\naturaltablelocalparameter\c!alignmentcharacter}%
+ \setcharacteralign{#2}{\naturaltablelocalparameter\c!alignmentcharacter}% we could store the character in tal
\fi
\typo_charalign_adapt_font
\signalcharacteralign{#2}{#1}}
+\unexpanded\def\tabl_ntb_cell_process_a_extra#1#2%
+ {\ifcase\tabl_ntb_get_tal{#2}\relax
+ \expandafter\tabl_ntb_cell_process_x
+ \else
+ \expandafter\tabl_ntb_cell_process_a
+ \fi{#1}{#2}}
+
+\unexpanded\def\tabl_ntb_cell_process_x#1#2[#3]#4%
+ {}
+
+% problem: when span doesn't break we can have a span that is the sum of
+% cells but still to small .. chicken egg problem ... for that we should
+% also have a smallest width run
+%
+% nilling the background makes a run upto 25% faster
+
+\def\tabl_ntb_cell_process_a_check_span_one
+ {\ifautosqueezeTBLspan
+ \edef\p_width{\naturaltablelocalparameter\c!width}%
+ \csname\??naturaltablesqueeze\ifcsname\??naturaltablesqueeze\p_width\endcsname\p_width\fi\endcsname
+ \else
+ \donetrue
+ \fi
+ \ifdone % brr, 0
+ \ifnum\scratchcounter>\plusone
+ \tabl_ntb_set_spn\c_tabl_ntb_col
+ \fi
+ \fi}
+
+% \def\tabl_ntb_cell_process_a_check_span_two_yes
+% {\iftightTBLcolspan
+% \donefalse
+% \else
+% \ifnum\scratchcounter>\plusone
+% \begingroup
+% \edef\p_width{\naturaltablelocalparameter\c!width}%
+% \csname\??naturaltablesqueeze\ifcsname\??naturaltablesqueeze\p_width\endcsname\p_width\fi\endcsname
+% \ifdone
+% \endgroup
+% \edef\p_option{\naturaltablelocalparameter\c!option}%
+% \ifx\p_option\v!tight\donefalse\else\donetrue\fi
+% \else
+% % a dimension
+% \endgroup
+% \donefalse
+% \fi
+% \else
+% \edef\p_option{\naturaltablelocalparameter\c!option}%
+% \ifx\p_option\v!tight\donefalse\else\donetrue\fi
+% \fi
+% \fi
+% \ifdone
+% \ifdim\tabl_ntb_get_wid\c_tabl_ntb_col<\wd\scratchbox
+% \tabl_ntb_set_wid\c_tabl_ntb_col{\the\wd\scratchbox}%
+% \fi
+% \fi}
+
+% \def\tabl_ntb_cell_process_a_check_span_two_nop
+% {\ifnum\scratchcounter>\plusone
+% \edef\p_width{\naturaltablelocalparameter\c!width}%
+% \csname\??naturaltablesqueeze\ifcsname\??naturaltablesqueeze\p_width\endcsname\p_width\fi\endcsname
+% \else
+% \donetrue
+% \fi
+% \ifdone
+% \ifdim\tabl_ntb_get_wid\c_tabl_ntb_col<\wd\scratchbox
+% \tabl_ntb_set_wid\c_tabl_ntb_col{\the\wd\scratchbox}%
+% \fi
+% \fi}
+
+\let\tabl_ntb_cell_process_a_check_span_two_yes\relax
+
+\def\tabl_ntb_cell_process_a_check_span_two_nop
+ {\ifdim\tabl_ntb_get_wid\c_tabl_ntb_col<\wd\scratchbox
+ \tabl_ntb_set_wid\c_tabl_ntb_col{\the\wd\scratchbox}%
+ \fi}
+
\unexpanded\def\tabl_ntb_cell_process_a#1#2[#3]#4% grouping added ! ! !
{\bgroup
+ \letnaturaltablelocalparameter\c!option\empty
\tabl_ntb_setup_cell{#1}{#2}%
+ \setupcurrentnaturaltablelocal[#3]%
+ \letnaturaltablelocalparameter\c!background\empty
+ \letnaturaltablelocalparameter\c!frame\v!off
+ \scratchcounter\tabl_ntb_get_col{#1}{#2}\relax
\setbox\scratchbox\hbox
{\scratchdimen\naturaltablelocalparameter\c!distance\relax
\ifdim\scratchdimen>\tabl_ntb_get_dis{#2}\relax
\tabl_ntb_set_dis{#2}{\the\scratchdimen}%
\fi
- \setupcurrentnaturaltablelocal[#3,\c!background=,\c!frame=\v!off]% 25% faster
\inheritednaturaltablelocalframed{\tabl_ntb_cell_start\tabl_ntb_char_align{#1}{#2}#4\tabl_ntb_cell_stop\tabl_ntb_cell_finalize}}%
\scratchdimen\tabl_ntb_get_wid\c_tabl_ntb_col\relax
\ifdim\wd\scratchbox>\scratchdimen
\ifsqueezeTBLspan
- \ifautosqueezeTBLspan
- \doifinsetelse{\naturaltablelocalparameter\c!width}{\v!fit,\v!fixed,\v!broad,\v!local}
- \donetrue \donefalse
- \else
- \donetrue
- \fi
- \ifdone % brr, 0
- \ifnum\tabl_ntb_get_col{#1}{#2}>\plusone \tabl_ntb_set_spn\c_tabl_ntb_col\fi
- \fi
+ \tabl_ntb_cell_process_a_check_span_one
\fi
\tabl_ntb_spn_doifelse\c_tabl_ntb_col
- \donothing
- {\ifdim\tabl_ntb_get_wid\c_tabl_ntb_col<\wd\scratchbox
- \tabl_ntb_set_wid\c_tabl_ntb_col{\the\wd\scratchbox}%
- \fi}% auto set
+ \tabl_ntb_cell_process_a_check_span_two_yes
+ \tabl_ntb_cell_process_a_check_span_two_nop
\fi
\scratchcounter\numexpr\c_tabl_ntb_row+\plusone\relax
\scratchdimen\tabl_ntb_get_hei\scratchcounter\relax
@@ -1592,8 +1915,7 @@
% unless span
\tabl_ntb_set_aut\c_tabl_ntb_col{\the\wd\scratchbox}%
% to be translated
- \writestatus\m!TABLE
- {no auto width in (\number#1,\number#2)\space\the\wd\scratchbox/\the\hsize}%
+ \writestatus\m!TABLE{no auto width in (\number#1,\number#2)\space\the\wd\scratchbox/\the\hsize}%
\fi\fi
\fi
\fi
@@ -1608,7 +1930,9 @@
\unexpanded\def\tabl_ntb_cell_process_b_c#1#2#3[#4]#5%
{\setbox\scratchbox\hbox
{\tabl_ntb_setup_cell{#2}{#3}%
- \setupcurrentnaturaltablelocal[#4,#1,\c!frame=\v!off,\c!background=]%
+ \setupcurrentnaturaltablelocal[#4,#1]%
+ \letnaturaltablelocalparameter\c!background\empty
+ \letnaturaltablelocalparameter\c!frame\v!off
\inheritednaturaltablelocalframed{\tabl_ntb_cell_start#5\tabl_ntb_cell_stop}}%
\setbox2\emptyhbox
\wd2\wd\scratchbox
@@ -1647,7 +1971,10 @@
\unexpanded\def\tabl_ntb_cell_process_d#1#2[#3]#4%
{\tabl_ntb_setup_cell{#1}{#2}%
\bgroup
- \setupcurrentnaturaltablelocal[#3,\c!width=\d_tabl_ntb_width,\c!background=,\c!frame=\v!off]% 25% faster
+ \setupcurrentnaturaltablelocal[#3]%
+ \letnaturaltablelocalparameter\c!background\empty
+ \letnaturaltablelocalparameter\c!frame\v!off
+ \setnaturaltablelocalparameter\c!width{\d_tabl_ntb_width}%
\inheritednaturaltablelocalframed{\tabl_ntb_cell_start\tabl_ntb_char_align{#1}{#2}#4\tabl_ntb_cell_stop}%
\egroup}
@@ -1656,10 +1983,11 @@
\setupcurrentnaturaltablelocal[#3]% to get the color right, the way we
\color % handle color here prevents interference due to whatsit nodes
[\naturaltablelocalparameter\c!color] % as well as permits local colors to take precedence
- {\ifdim\d_tabl_ntb_height=\zeropoint\relax % case: nc=maxcolumns
- \setupcurrentnaturaltablelocal[\c!color=,\c!width=\d_tabl_ntb_width]%
+ {\letnaturaltablelocalparameter\c!color\empty
+ \setnaturaltablelocalparameter\c!width{\d_tabl_ntb_width}%
+ \ifdim\d_tabl_ntb_height=\zeropoint\relax % case: nc=maxcolumns
\else
- \setupcurrentnaturaltablelocal[\c!color=,\c!width=\d_tabl_ntb_width,\c!height=\d_tabl_ntb_height]%
+ \setnaturaltablelocalparameter\c!height{\d_tabl_ntb_height}%
\fi
\inheritednaturaltablelocalframed{\tabl_ntb_cell_start\tabl_ntb_char_align{#1}{#2}#4\tabl_ntb_cell_stop}}%
\hskip\tabl_ntb_get_dis{#2}}
@@ -1667,7 +1995,7 @@
\setupTABLE
[\c!frameoffset=.5\linewidth,
\c!backgroundoffset=\v!frame,
- \c!framecolor=\s!black,
+ % \c!framecolor=\s!black,
\c!width=\v!fit,
\c!height=\v!fit,
\c!autowidth=\v!yes,
@@ -1712,14 +2040,14 @@
\newconditional\resetTABLEmode \settrue\resetTABLEmode
-\def\tabl_ntb_parameters_reset
+\def\tabl_ntb_parameters_reset % we can use setters instead
{\ifnum\m_tabl_tbl_level>\plusone % in ieder geval
\ifconditional\resetTABLEmode
% not ok yet
\setupTABLE
[\c!frameoffset=.5\linewidth,
\c!backgroundoffset=\v!frame,
- \c!framecolor=\s!black,
+ % \c!framecolor=\s!black,
\c!width=\v!fit,
\c!height=\v!fit,
\c!autowidth=\v!yes,
diff --git a/tex/context/base/tabl-tab.mkiv b/tex/context/base/tabl-tab.mkiv
index f9ac27e70..5add1c684 100644
--- a/tex/context/base/tabl-tab.mkiv
+++ b/tex/context/base/tabl-tab.mkiv
@@ -1503,9 +1503,9 @@
\global\setfalse\tableactionstatepermitted
\global\setfalse\hassometablehead
\global\setfalse\hassometabletail
- \expanded{\doifinstringelse{|}{#1}}
+ \expanded{\doifelseinstring{|}{#1}}
{\xdef\restarttable{\noexpand\dorestarttable{\noexpand\thirdstagestarttable{#1}}}}
- {\doifdefinedelse{\??tabletemplate#1}
+ {\doifelsedefined{\??tabletemplate#1}
{\gdef\restarttable{\getvalue{\??tabletemplate#1}}}
{\gdef\restarttable{\dorestarttable{\getvalue{#1}}}}}%
\egroup
@@ -1921,7 +1921,7 @@
{\global\currenttablecolumn\zerocount}
\def\dotablevrulecommand#1% global assignments
- {\doifnumberelse{#1}
+ {\doifelsenumber{#1}
{\global\tablevrulethicknessfactor#1\relax
\global\multiply\tablevrulethicknessfactor\m_tabl_table_VLwidth\relax}
{\xdef\tablecurrentvrulecolor{#1}}}
@@ -1955,7 +1955,7 @@
\unexpanded\def\dotableVN#1{\global\noftablevrules#1\relax\VL}
\def\dotablehrulecommand#1% global assignments
- {\doifnumberelse{#1}
+ {\doifelsenumber{#1}
{\global\tablehrulethicknessfactor#1\relax
\global\multiply\tablehrulethicknessfactor\m_tabl_table_HLheight\relax}
{\xdef\tablecurrenthrulecolor{#1}}}
@@ -2075,7 +2075,7 @@
\fi}
\def\dotabledrulecommand#1% global assignments
- {\doifnumberelse{#1}
+ {\doifelsenumber{#1}
{\ifcase\tabledrulespan
\global\tabledrulespan#1\relax
\else
@@ -2203,7 +2203,7 @@
\def\dolocaltablesetup
{\directtablesparameter\c!commands\relax
- \doifsomething{\directtablesparameter\c!bodyfont}{\switchtobodyfont[\directtablesparameter\c!bodyfont]}%
+ \usebodyfontparameter\directtablesparameter
\tablelinethicknessunit\dimexpr\directtablesparameter\c!rulethickness/\tablelinethicknessfactor\relax
\edef\p_tabl_table_height{\directtablesparameter\c!height}%
\edef\p_tabl_table_depth{\directtablesparameter\c!depth}%
@@ -2288,8 +2288,7 @@
\c!distance=\v!medium,
\c!bodyfont=,
\c!commands=,
- \c!background=\v!screen, % huh?
- \c!backgroundscreen=\defaultbackgroundscreen,
+ \c!background=,
\c!backgroundcolor=,
\c!split=\v!auto]
diff --git a/tex/context/base/tabl-tbl.lua b/tex/context/base/tabl-tbl.lua
index 21564a472..13d899de6 100644
--- a/tex/context/base/tabl-tbl.lua
+++ b/tex/context/base/tabl-tbl.lua
@@ -9,21 +9,25 @@ if not modules then modules = { } end modules ['tabl-tbl'] = {
-- A couple of hacks ... easier to do in Lua than in regular TeX. More will
-- follow.
-local context, commands = context, commands
-
local tonumber = tonumber
local gsub, rep, sub, find = string.gsub, string.rep, string.sub, string.find
local P, C, Cc, Ct, lpegmatch = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Ct, lpeg.match
+local context = context
+local commands = commands
+
local texsetcount = tex.setcount
-local separator = P("|")
-local nested = lpeg.patterns.nested
-local pattern = Ct((separator * (C(nested) + Cc("")) * C((1-separator)^0))^0)
+local separator = P("|")
+local nested = lpeg.patterns.nested
+local pattern = Ct((separator * (C(nested) + Cc("")) * C((1-separator)^0))^0)
+
+local ctx_settabulatelastentry = context.settabulatelastentry
+local ctx_settabulateentry = context.settabulateentry
-function commands.presettabulate(preamble)
+local function presettabulate(preamble)
preamble = gsub(preamble,"~","d") -- let's get rid of ~ mess here
- if find(preamble,"%*") then
+ if find(preamble,"*",1,true) then
-- todo: lpeg but not now
preamble = gsub(preamble, "%*(%b{})(%b{})", function(n,p)
return rep(sub(p,2,-2),tonumber(sub(n,2,-2)) or 1)
@@ -35,7 +39,14 @@ function commands.presettabulate(preamble)
texsetcount("global","c_tabl_tabulate_has_rule_spec_first", t[1] == "" and 0 or 1)
texsetcount("global","c_tabl_tabulate_has_rule_spec_last", t[m+1] == "" and 0 or 1)
for i=1,m,2 do
- context.settabulateentry(t[i],t[i+1])
+ ctx_settabulateentry(t[i],t[i+1])
end
- context.settabulatelastentry(t[m+1])
+ ctx_settabulatelastentry(t[m+1])
end
+
+interfaces.implement {
+ name = "presettabulate",
+ actions = presettabulate,
+ arguments = "string",
+ scope = "private",
+}
diff --git a/tex/context/base/tabl-tbl.mkiv b/tex/context/base/tabl-tbl.mkiv
index cd5efa7f7..ae4f03825 100644
--- a/tex/context/base/tabl-tbl.mkiv
+++ b/tex/context/base/tabl-tbl.mkiv
@@ -331,7 +331,7 @@
{\dowithnextboxcs\tabl_tabulate_shaped_par_finish\vbox\bgroup}
\def\tabl_tabulate_shaped_par_finish
- {\ctxcommand{doreshapeframedbox(\number\nextbox)}%
+ {\clf_doreshapeframedbox\nextbox\relax
\ifvmode\unvbox\else\box\fi\nextbox}
\let\tabl_tabulate_shaped_par_end\egroup
@@ -429,7 +429,8 @@
\aligntab
\tabl_tabulate_column_vrule_inject
\tabl_tabulate_color_side_left
- \tabl_tabulate_inject_pre_skip{\the\s_tabl_tabulate_pre}%
+% \tabl_tabulate_inject_pre_skip{\the\s_tabl_tabulate_pre}%
+ \tabl_tabulate_inject_pre_skip{\the\dimexpr\s_tabl_tabulate_pre}% get rid of plus
\alignmark\alignmark
\aligntab
\tabl_tabulate_color_side_both
@@ -496,7 +497,7 @@
\egroup
\aligntab
\noexpand\dostoptagged
- \tabl_tabulate_inject_post_skip{\the\s_tabl_tabulate_post}%
+ \tabl_tabulate_inject_post_skip{\the\dimexpr\s_tabl_tabulate_post}% get rid of plus
\alignmark\alignmark
}}%
\t_tabl_tabulate_dummy\expandafter{\the\t_tabl_tabulate_dummy\NC}%
@@ -598,17 +599,17 @@
\tabl_tabulate_set_preamble}
\def\tabl_tabulate_set_preskip#1%
- {\doifnumberelse{#1}
+ {\doifelsenumber{#1}
{\s_tabl_tabulate_pre#1\d_tabl_tabulate_unit\tabl_tabulate_set_preamble }
{\s_tabl_tabulate_pre.5\d_tabl_tabulate_unit\tabl_tabulate_set_preamble#1}}
\def\tabl_tabulate_set_posskip#1%
- {\doifnumberelse{#1}
+ {\doifelsenumber{#1}
{\s_tabl_tabulate_post#1\d_tabl_tabulate_unit\tabl_tabulate_set_preamble }
{\s_tabl_tabulate_post.5\d_tabl_tabulate_unit\tabl_tabulate_set_preamble#1}}
\def\tabl_tabulate_set_preposskip#1%
- {\doifnumberelse{#1}
+ {\doifelsenumber{#1}
{\s_tabl_tabulate_pre#1\d_tabl_tabulate_unit\s_tabl_tabulate_post\s_tabl_tabulate_pre\tabl_tabulate_set_preamble }
{\s_tabl_tabulate_pre.5\d_tabl_tabulate_unit\s_tabl_tabulate_post\s_tabl_tabulate_pre\tabl_tabulate_set_preamble#1}}
@@ -650,7 +651,7 @@
\tabl_tabulate_set_preamble}
\def\tabl_tabulate_pickup_width
- {\doifnextparenthesiselse\tabl_tabulate_set_width_indeed\tabl_tabulate_set_preamble}
+ {\doifelsenextparenthesis\tabl_tabulate_set_width_indeed\tabl_tabulate_set_preamble}
\def\tabl_tabulate_set_width
{\setfalse\c_tabl_tabulate_pwidth_set
@@ -663,7 +664,7 @@
\tabl_tabulate_set_preamble}
\def\tabl_tabulate_set_paragraph
- {\doifnextparenthesiselse
+ {\doifelsenextparenthesis
{\c_tabl_tabulate_modus\plusone
\settrue\c_tabl_tabulate_pwidth_set
\tabl_tabulate_pickup_width}
@@ -748,7 +749,7 @@
\tabl_tabulate_set_preamble}
\def\tabl_tabulate_set_vrule_command#1%
- {\doifnumberelse{#1}
+ {\doifelsenumber{#1}
{\global\d_tabl_tabulate_vrulethickness#1\d_tabl_tabulate_vrulethickness_default}
{\xdef\m_tabl_tabulate_vrule_color{#1}}}
@@ -1048,23 +1049,34 @@
\def\tabl_tabulate_insert_foot_content
{\tabulatenoalign{\global\settrue\c_tabl_tabulate_someamble}%
- \csname\??tabulatehead\currenttabulation\endcsname
+ \csname\??tabulatefoot\currenttabulation\endcsname
\tabulatenoalign{\global\setfalse\c_tabl_tabulate_someamble}}%
\def\tabl_tabulate_check_full_content % - needed, else confusion with \c!header
{\ifcsname\??tabulatehead\currenttabulation\endcsname
- \let\tabl_tabulate_insert_head\tabl_tabulate_insert_head_content
+ \expandafter\ifx\csname\??tabulatehead\currenttabulation\endcsname\empty
+ \let\tabl_tabulate_insert_head\empty
+ \else
+ \let\tabl_tabulate_insert_head\tabl_tabulate_insert_head_content
+ \fi
\else
\let\tabl_tabulate_insert_head\empty
\fi
\ifcsname\??tabulatefoot\currenttabulation\endcsname
- \let\tabl_tabulate_insert_foot\tabl_tabulate_insert_foot_content
+ \expandafter\ifx\csname\??tabulatefoot\currenttabulation\endcsname\empty
+ \let\tabl_tabulate_insert_foot\empty
+ \else
+ \let\tabl_tabulate_insert_foot\tabl_tabulate_insert_head_content
+ \fi
\else
\let\tabl_tabulate_insert_foot\empty
\fi}
\def\tabl_tabulate_insert_content
{\tabl_tabulate_insert_head
+ \ifcase\c_tabl_tabulate_repeathead \else
+ \tabulatenoalign{\penalty\zerocount}% added 7/5/2014 WS mail
+ \fi
\tabl_tabulate_insert_body
\tabl_tabulate_insert_foot
\tabl_tabulate_remove_funny_line}
@@ -1075,8 +1087,10 @@
\tabulatenoalign{\kern-\lineheight}%
\fi}
-\setuvalue{\e!start\v!tabulatehead}{\doifnextoptionalelse\tabl_tabulate_start_head_yes\tabl_tabulate_start_head_nop}
-\setuvalue{\e!start\v!tabulatetail}{\doifnextoptionalelse\tabl_tabulate_start_foot_yes\tabl_tabulate_start_foot_nop}
+% todo: make footer synonym to tail
+
+\setuvalue{\e!start\v!tabulatehead}{\doifelsenextoptionalcs\tabl_tabulate_start_head_yes\tabl_tabulate_start_head_nop}
+\setuvalue{\e!start\v!tabulatetail}{\doifelsenextoptionalcs\tabl_tabulate_start_foot_yes\tabl_tabulate_start_foot_nop}
\let\m_tabl_tabulate_data\empty
@@ -1086,30 +1100,30 @@
\def\tabl_tabulate_start_foot_yes[#1]%
{\processcontent{\e!stop\v!tabulatetail}\m_tabl_tabulate_data{\letvalue{\??tabulatefoot#1}\m_tabl_tabulate_data}}
-% \def\tabl_tabulate_start_head_nop{\tabl_tabulate_start_head_yes[\v!tabulate]}
-% \def\tabl_tabulate_start_foot_nop{\tabl_tabulate_start_foot_yes[\v!tabulate]}
+\def\tabl_tabulate_start_head_nop{\tabl_tabulate_start_head_yes[\v!tabulate]}
+\def\tabl_tabulate_start_foot_nop{\tabl_tabulate_start_foot_yes[\v!tabulate]}
-\def\tabl_tabulate_start_head_nop{\tabl_tabulate_start_head_yes[]}
-\def\tabl_tabulate_start_foot_nop{\tabl_tabulate_start_foot_yes[]}
+% \def\tabl_tabulate_start_head_nop{\tabl_tabulate_start_head_yes[]}
+% \def\tabl_tabulate_start_foot_nop{\tabl_tabulate_start_foot_yes[]}
% \unexpanded\def\tabl_start_defined[#1]%
% {\bgroup
% \edef\currenttabulationparent{#1}%
% \let\currenttabulation\currenttabulationparent
-% \doifnextoptionalelse\tabl_start_defined_yes\tabl_start_defined_nop}
-%
-% \def\tabl_start_defined_yes[#1]%
-% {\edef\currenttabulation{\currenttabulation:#1}%
-% \tabl_tabulate_start_building}
-%
-% \def\tabl_start_defined_nop
-% {\tabl_tabulate_start_building}
+% \dodoubleargument\tabl_start_defined_indeed}
\unexpanded\def\tabl_start_defined[#1]%
{\bgroup
\edef\currenttabulationparent{#1}%
\let\currenttabulation\currenttabulationparent
- \dodoubleargument\tabl_start_defined_indeed}
+ \edef\p_format{\tabulationparameter\c!format}%
+ \ifx\p_format\v!none
+ % this is special case: we need to define the generic english
+ % \starttabulate in other interfaces as well
+ \expandafter\dodoubleempty \expandafter\tabl_start_regular
+ \else
+ \expandafter\dodoubleargument\expandafter\tabl_start_defined_indeed
+ \fi}
\def\tabl_start_defined_indeed
{\iffirstargument
@@ -1123,7 +1137,7 @@
\fi}
\def\tabl_start_defined_one[#1][#2]%
- {\doifassignmentelse{#1}
+ {\doifelseassignment{#1}
{\setuptabulation[\currenttabulation][#1]}%
{\edef\currenttabulation{\currenttabulation:#1}}%
\tabl_tabulate_start_building}
@@ -1140,26 +1154,11 @@
\unexpanded\setuvalue{\e!start\v!tabulate}%
{\bgroup % whole thing
+ \let\currenttabulationparent\empty
\dodoubleempty\tabl_start_regular}
-% \def\tabl_start_regular[#1][#2]%
-% {%\let\currenttabulationparent\v!tabulate
-% \let\currenttabulationparent\empty
-% \let\currenttabulation\currenttabulationparent
-% \def\p_format{#1}%
-% \ifx\p_format\empty
-% \def\p_format{|l|p|}%
-% \fi
-% \lettabulationparameter\c!format\p_format
-% \ifsecondargument
-% \setupcurrenttabulation[#2]%
-% \fi
-% \tabl_tabulate_start_building}
-
\def\tabl_start_regular
- {%\let\currenttabulationparent\v!tabulate
- \let\currenttabulationparent\empty
- \let\currenttabulation\currenttabulationparent
+ {\let\currenttabulation\currenttabulationparent
\ifsecondargument
\expandafter\tabl_start_regular_two
\else
@@ -1167,7 +1166,7 @@
\fi}
\def\tabl_start_regular_one[#1][#2]%
- {\doifassignmentelse{#1}
+ {\doifelseassignment{#1}
{\setupcurrenttabulation[\c!format={|l|p|},#1]}
{\def\p_format{#1}%
\ifx\p_format\empty
@@ -1317,7 +1316,7 @@
\def\tabl_tabulate_set_color_column#1% overloaded
{\unskip
- \doiffastoptionalcheckelse{\tabl_tabulate_set_color_column_yes#1}{\tabl_tabulate_set_color_column_nop#1}}
+ \doifelsefastoptionalcheck{\tabl_tabulate_set_color_column_yes#1}{\tabl_tabulate_set_color_column_nop#1}}
\def\tabl_tabulate_set_color_column_nop
{\tabl_tabulate_column_normal}
@@ -1356,13 +1355,13 @@
% \fi\fi
\global\let\m_tabl_tabulate_vrule_color_local\m_tabl_tabulate_vrule_color_default
\global\d_tabl_tabulate_vrulethickness_local\d_tabl_tabulate_vrulethickness_default
- \doiffastoptionalcheckelse{\tabl_tabulate_column_vruled_yes#1}{\tabl_tabulate_column_vruled_nop#1}}
+ \doifelsefastoptionalcheck{\tabl_tabulate_column_vruled_yes#1}{\tabl_tabulate_column_vruled_nop#1}}
\def\tabl_tabulate_column_vruled_nop
{\tabl_tabulate_column_normal}
\def\tabl_tabulate_column_vruled_step#1%
- {\doifnumberelse{#1}
+ {\doifelsenumber{#1}
{\global\d_tabl_tabulate_vrulethickness_local#1\d_tabl_tabulate_vrulethickness_default}
{\xdef\m_tabl_tabulate_vrule_color_local{#1}}}
@@ -1450,15 +1449,15 @@
\def\tabl_tabulate_hrule_spec_ignore#1%
{%\global\let\currenttabulationlocalhrulecolor\empty
%\global\d_tabl_tabulate_hrulethickness_local\d_tabl_tabulate_hrulethickness_default
- \doiffastoptionalcheckelse#1#1}
+ \doifelsefastoptionalcheck#1#1}
\def\tabl_tabulate_hrule_spec_pickup#1%
{\global\let\currenttabulationlocalhrulecolor\m_tabl_tabulate_hrule_color_default
\global\d_tabl_tabulate_hrulethickness_local\d_tabl_tabulate_hrulethickness_default
- \doiffastoptionalcheckelse{\tabl_tabulate_hrule_preset#1}#1}
+ \doifelsefastoptionalcheck{\tabl_tabulate_hrule_preset#1}#1}
\def\tabl_tabulate_hrule_preset_step#1%
- {\doifnumberelse{#1}
+ {\doifelsenumber{#1}
{\global\d_tabl_tabulate_hrulethickness_local#1\d_tabl_tabulate_hrulethickness_default}
{\xdef\currenttabulationlocalhrulecolor{#1}}}
@@ -1527,7 +1526,7 @@
\unexpanded\def\tabl_tabulate_color_set#1% we could store the attributes at the cost of a lua call
{\begingroup
- \node_backgrounds_align_initialize % name might change
+ \clf_enablebackgroundalign % was \node_backgrounds_align_initialize
\global\let\tabl_tabulate_color_repeat\tabl_tabulate_color_repeat_second
\global\settrue\c_tabl_tabulate_has_colors
\ifnum\c_tabl_tabulate_column>\c_tabl_tabulate_max_colorcolumn
@@ -2066,9 +2065,9 @@
\donetrue
\fi\fi
\ifdone
- \c_tabl_tabulate_repeathead\executeifdefined{\??tabulateheader\tabulationparameter\c!header}\zerocount
+ \global\c_tabl_tabulate_repeathead\executeifdefined{\??tabulateheader\tabulationparameter\c!header}\zerocount
\else
- \c_tabl_tabulate_repeathead\zerocount
+ \global\c_tabl_tabulate_repeathead\zerocount
\fi
%
\the\t_tabl_tabulate_initializers_first % collect more here
@@ -2102,7 +2101,7 @@
\c_tabl_tabulate_nofcolumns \zerocount
\c_tabl_tabulate_has_rule_spec_first\zerocount
\c_tabl_tabulate_has_rule_spec_last \zerocount
- \ctxcommand{presettabulate(\!!bs\detokenizedtabulationparameter\c!format\!!es)}%
+ \clf_presettabulate{\detokenizedtabulationparameter\c!format}%
%
% \edef\totaltabulatecolumns{\the\numexpr3*\c_tabl_tabulate_columns+\plusfour}%
\d_tabl_tabulate_width\zeropoint
@@ -2168,7 +2167,7 @@
\global\setbox\b_tabl_tabulate\vbox \bgroup
\fi
%
- \dostarttagged\t!tabulate\empty
+ \dostarttaggedchained\t!tabulate\empty\??tabulation
\dostarttagged\t!tabulaterow\empty
\setfalse\inhibitmargindata % new per 2012.06.13 ... really needed
\everycr\expandafter{\the\everycr\dostoptagged\dostarttagged\t!tabulaterow\empty}%
@@ -2448,4 +2447,10 @@
%D \stopwhatever
%D \stoptyping
+%D This is needed because we soemtimes use the english command in
+%D tracing macros. In fact, most detailed tracing macros that
+%D are done with \LUA\ only work in the english interface anyway.
+
+\definetabulate[tabulate] \setuptabulate[tabulate][\c!format=\v!none] % so no \v! here
+
\protect \endinput
diff --git a/tex/context/base/tabl-tsp.mkiv b/tex/context/base/tabl-tsp.mkiv
index 0138697af..64ab94a67 100644
--- a/tex/context/base/tabl-tsp.mkiv
+++ b/tex/context/base/tabl-tsp.mkiv
@@ -195,7 +195,7 @@
\normalexpanded{\egroup\noexpand\edef\noexpand\extrasplitfloatlines{\the\noflines}}%
\global\settrue\usesamefloatnumber
\else
- \doifnumberelse\extrasplitfloatlines\donothing{\def\extrasplitfloatlines{1}}%
+ \doifelsenumber\extrasplitfloatlines\donothing{\def\extrasplitfloatlines{1}}%
\fi}
\unexpanded\def\doifnotinsidesplitfloat
diff --git a/tex/context/base/tabl-xnt.mkvi b/tex/context/base/tabl-xnt.mkvi
index ffa1f501e..fd2de5a13 100644
--- a/tex/context/base/tabl-xnt.mkvi
+++ b/tex/context/base/tabl-xnt.mkvi
@@ -130,6 +130,6 @@
{\bgroup
\tabl_x_prepare{#settings}%
\edef\tabl_x_current_buffer{\tabl_x_default_buffer}%
- \buff_pickup\tabl_x_current_buffer{bTABLE}{eTABLE}\relax\tabl_x_process}
+ \buff_pickup\tabl_x_current_buffer{bTABLE}{eTABLE}\relax\tabl_x_process\zerocount}
\protect \endinput
diff --git a/tex/context/base/tabl-xtb.lua b/tex/context/base/tabl-xtb.lua
index 488ef5b78..4bf8e3107 100644
--- a/tex/context/base/tabl-xtb.lua
+++ b/tex/context/base/tabl-xtb.lua
@@ -24,38 +24,57 @@ this mechamism will be improved so that it can replace its older cousin.
]]--
-- todo: use linked list instead of r/c array
+-- todo: we can use the sum of previously forced widths for column spans
-local commands, context, tex, node = commands, context, tex, node
+local tonumber, next = tonumber, next
-local texgetcount = tex.getcount
-local texsetcount = tex.setcount
-local texgetbox = tex.getbox
-local texgetdimen = tex.getdimen
-local texsetdimen = tex.setdimen
-local texget = tex.get
+local commands = commands
+local context = context
+local tex = tex
-local format = string.format
-local concat = table.concat
-local points = number.points
+local implement = interfaces.implement
+
+local texgetcount = tex.getcount
+local texsetcount = tex.setcount
+local texgetdimen = tex.getdimen
+local texsetdimen = tex.setdimen
+local texget = tex.get
+
+local format = string.format
+local concat = table.concat
+local points = number.points
+
+local todimen = string.todimen
-local context = context
local context_beginvbox = context.beginvbox
local context_endvbox = context.endvbox
local context_blank = context.blank
local context_nointerlineskip = context.nointerlineskip
+local context_dummyxcell = context.dummyxcell
local variables = interfaces.variables
local setmetatableindex = table.setmetatableindex
local settings_to_hash = utilities.parsers.settings_to_hash
-local copy_node_list = node.copy_list
-local hpack_node_list = node.hpack
-local vpack_node_list = node.vpack
-local slide_node_list = node.slide
-local flush_node_list = node.flush_list
+local nuts = nodes.nuts -- here nuts gain hardly nothing
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getlist = nuts.getlist
+local getfield = nuts.getfield
+local getbox = nuts.getbox
-local nodepool = nodes.pool
+local setfield = nuts.setfield
+
+local copy_node_list = nuts.copy_list
+local hpack_node_list = nuts.hpack
+local flush_node_list = nuts.flush_list
+local takebox = nuts.takebox
+
+local nodepool = nuts.pool
local new_glue = nodepool.glue
local new_kern = nodepool.kern
@@ -69,6 +88,12 @@ local v_height = variables.height
local v_repeat = variables["repeat"]
local v_max = variables.max
local v_fixed = variables.fixed
+local v_auto = variables.auto
+local v_before = variables.before
+local v_after = variables.after
+local v_both = variables.both
+local v_samepage = variables.samepage
+local v_tight = variables.tight
local xtables = { }
typesetters.xtables = xtables
@@ -96,36 +121,40 @@ local stack, data = { }, nil
function xtables.create(settings)
table.insert(stack,data)
- local rows = { }
- local widths = { }
- local heights = { }
- local depths = { }
- local spans = { }
- local distances = { }
- local autowidths = { }
- local modes = { }
- local fixedrows = { }
- local fixedcolumns = { }
- local frozencolumns = { }
- local options = { }
+ local rows = { }
+ local widths = { }
+ local heights = { }
+ local depths = { }
+ local spans = { }
+ local distances = { }
+ local autowidths = { }
+ local modes = { }
+ local fixedrows = { }
+ local fixedcolumns = { }
+ -- local fixedcspans = { }
+ local frozencolumns = { }
+ local options = { }
+ local rowproperties = { }
data = {
- rows = rows,
- widths = widths,
- heights = heights,
- depths = depths,
- spans = spans,
- distances = distances,
- modes = modes,
- autowidths = autowidths,
- fixedrows = fixedrows,
- fixedcolumns = fixedcolumns,
- frozencolumns = frozencolumns,
- options = options,
- nofrows = 0,
- nofcolumns = 0,
- currentrow = 0,
- currentcolumn = 0,
- settings = settings or { },
+ rows = rows,
+ widths = widths,
+ heights = heights,
+ depths = depths,
+ spans = spans,
+ distances = distances,
+ modes = modes,
+ autowidths = autowidths,
+ fixedrows = fixedrows,
+ fixedcolumns = fixedcolumns,
+ -- fixedcspans = fixedcspans,
+ frozencolumns = frozencolumns,
+ options = options,
+ nofrows = 0,
+ nofcolumns = 0,
+ currentrow = 0,
+ currentcolumn = 0,
+ settings = settings or { },
+ rowproperties = rowproperties,
}
local function add_zero(t,k)
t[k] = 0
@@ -141,6 +170,9 @@ function xtables.create(settings)
nx = 0,
ny = 0,
list = false,
+ wd = 0,
+ ht = 0,
+ dp = 0,
}
row[c] = cell
if c > data.nofcolumns then
@@ -166,24 +198,28 @@ function xtables.create(settings)
setmetatableindex(fixedrows,add_zero)
setmetatableindex(fixedcolumns,add_zero)
setmetatableindex(options,add_table)
+ -- setmetatableindex(fixedcspans,add_table)
+ --
+ local globaloptions = settings_to_hash(settings.option)
--
- settings.columndistance = tonumber(settings.columndistance) or 0
- settings.rowdistance = tonumber(settings.rowdistance) or 0
- settings.leftmargindistance = tonumber(settings.leftmargindistance) or 0
+ settings.columndistance = tonumber(settings.columndistance) or 0
+ settings.rowdistance = tonumber(settings.rowdistance) or 0
+ settings.leftmargindistance = tonumber(settings.leftmargindistance) or 0
settings.rightmargindistance = tonumber(settings.rightmargindistance) or 0
- settings.options = settings_to_hash(settings.option)
- settings.textwidth = tonumber(settings.textwidth) or texget("hsize")
- settings.lineheight = tonumber(settings.lineheight) or texgetdimen("lineheight")
- settings.maxwidth = tonumber(settings.maxwidth) or settings.textwidth/8
+ settings.options = globaloptions
+ settings.textwidth = tonumber(settings.textwidth) or texget("hsize")
+ settings.lineheight = tonumber(settings.lineheight) or texgetdimen("lineheight")
+ settings.maxwidth = tonumber(settings.maxwidth) or settings.textwidth/8
-- if #stack > 0 then
-- settings.textwidth = texget("hsize")
-- end
data.criterium_v = 2 * data.settings.lineheight
data.criterium_h = .75 * data.settings.textwidth
-
+ --
+ data.tight = globaloptions[v_tight] and true or false
end
-function xtables.initialize_reflow_width(option)
+function xtables.initialize_reflow_width(option,width)
local r = data.currentrow
local c = data.currentcolumn + 1
local drc = data.rows[r][c]
@@ -204,8 +240,7 @@ function xtables.initialize_reflow_width(option)
data.currentcolumn = c
end
--- local function rather_fixed(n)
--- for n in node.
+-- todo: we can better set the cell values in one go
function xtables.set_reflow_width()
local r = data.currentrow
@@ -215,43 +250,111 @@ function xtables.set_reflow_width()
while row[c].span do -- can also be previous row ones
c = c + 1
end
- local tb = texgetbox("b_tabl_x")
+ local tb = getbox("b_tabl_x")
local drc = row[c]
--
drc.list = true -- we don't need to keep the content around as we're in trial mode (no: copy_node_list(tb))
--
- local widths, width = data.widths, tb.width
- if width > widths[c] then
- widths[c] = width
- end
- local heights, height = data.heights, tb.height
- if height > heights[r] then
- heights[r] = height
+ local width = getfield(tb,"width")
+ local height = getfield(tb,"height")
+ local depth = getfield(tb,"depth")
+ --
+ local widths = data.widths
+ local heights = data.heights
+ local depths = data.depths
+ local cspan = drc.nx
+ if cspan < 2 then
+ if width > widths[c] then
+ widths[c] = width
+ end
+ else
+ local options = data.options[r][c]
+ if data.tight then
+ -- no check
+ elseif not options then
+ if width > widths[c] then
+ widths[c] = width
+ end
+ elseif not options[v_tight] then
+ if width > widths[c] then
+ widths[c] = width
+ end
+ end
end
- local depths, depth = data.depths, tb.depth
- if depth > depths[r] then
- depths[r] = depth
+ -- if cspan > 1 then
+ -- local f = data.fixedcspans[c]
+ -- local w = f[cspan] or 0
+ -- if width > w then
+ -- f[cspan] = width -- maybe some day a solution for autospanmax and so
+ -- end
+ -- end
+ if drc.ny < 2 then
+ if height > heights[r] then
+ heights[r] = height
+ end
+ if depth > depths[r] then
+ depths[r] = depth
+ end
end
--
+ drc.wd = width
+ drc.ht = height
+ drc.dp = depth
+ --
local dimensionstate = texgetcount("frameddimensionstate")
local fixedcolumns = data.fixedcolumns
local fixedrows = data.fixedrows
if dimensionstate == 1 then
- if width > fixedcolumns[c] then -- how about a span here?
- fixedcolumns[c] = width
- end
+ if cspan > 1 then
+ -- ignore width
+ elseif width > fixedcolumns[c] then -- how about a span here?
+ fixedcolumns[c] = width
+ end
elseif dimensionstate == 2 then
fixedrows[r] = height
elseif dimensionstate == 3 then
fixedrows[r] = height -- width
fixedcolumns[c] = width -- height
- else -- probably something frozen, like an image -- we could parse the list
- if width <= data.criterium_h and height >= data.criterium_v then
- if width > fixedcolumns[c] then -- how about a span here?
- fixedcolumns[c] = width
- end
+ elseif width <= data.criterium_h and height >= data.criterium_v then
+ -- somewhat tricky branch
+ if width > fixedcolumns[c] then -- how about a span here?
+ -- maybe an image, so let's fix
+ fixedcolumns[c] = width
end
end
+--
+-- -- this fails so not good enough predictor
+--
+-- -- \startxtable
+-- -- \startxrow
+-- -- \startxcell knuth \stopxcell
+-- -- \startxcell \input knuth \stopxcell
+-- -- \stopxrow
+--
+-- else
+-- local o = data.options[r][c]
+-- if o and o[v_auto] then -- new per 5/5/2014 - removed per 15/07/2014
+-- data.autowidths[c] = true
+-- else
+-- -- no dimensions are set in the cell
+-- if width <= data.criterium_h and height >= data.criterium_v then
+-- -- somewhat tricky branch
+-- if width > fixedcolumns[c] then -- how about a span here?
+-- -- maybe an image, so let's fix
+-- fixedcolumns[c] = width
+-- end
+-- else
+-- -- safeguard as it could be text that can be recalculated
+-- -- and the previous branch could have happened in a previous
+-- -- row and then forces a wrong one-liner in a multiliner
+-- if width > fixedcolumns[c] then
+-- data.autowidths[c] = true -- new per 5/5/2014 - removed per 15/07/2014
+-- end
+-- end
+-- end
+-- end
+--
+ --
drc.dimensionstate = dimensionstate
--
local nx, ny = drc.nx, drc.ny
@@ -306,6 +409,8 @@ function xtables.initialize_reflow_height()
elseif data.autowidths[c] then
-- width has changed so we need to recalculate the height
texsetcount("c_tabl_x_skip_mode",0)
+ elseif data.fixedcolumns[c] then
+ texsetcount("c_tabl_x_skip_mode",0) -- new
else
texsetcount("c_tabl_x_skip_mode",1)
end
@@ -319,18 +424,30 @@ function xtables.set_reflow_height()
-- while row[c].span do -- we could adapt drc.nx instead
-- c = c + 1
-- end
- local tb = texgetbox("b_tabl_x")
+ local tb = getbox("b_tabl_x")
local drc = row[c]
- if data.fixedrows[r] == 0 then -- and drc.dimensionstate < 2
- local heights, height = data.heights, tb.height
- if height > heights[r] then
- heights[r] = height
- end
- local depths, depth = data.depths, tb.depth
- if depth > depths[r] then
- depths[r] = depth
+ --
+ local width = getfield(tb,"width")
+ local height = getfield(tb,"height")
+ local depth = getfield(tb,"depth")
+ --
+ if drc.ny < 2 then
+ if data.fixedrows[r] == 0 then -- and drc.dimensionstate < 2
+ local heights = data.heights
+ local depths = data.depths
+ if height > heights[r] then
+ heights[r] = height
+ end
+ if depth > depths[r] then
+ depths[r] = depth
+ end
end
end
+ --
+ drc.wd = width
+ drc.ht = height
+ drc.dp = depth
+ --
-- c = c + drc.nx - 1
-- data.currentcolumn = c
end
@@ -344,23 +461,35 @@ function xtables.initialize_construct()
c = c + 1
end
data.currentcolumn = c
- local widths = data.widths
+ local widths = data.widths
local heights = data.heights
- local depths = data.depths
- local w = widths[c]
- local h = heights[r]
- local d = depths[r]
+ local depths = data.depths
+ --
local drc = row[c]
+ local wd = drc.wd
+ local ht = drc.ht
+ local dp = drc.dp
+ --
+ local width = widths[c]
+ local height = heights[r]
+ local depth = depths[r]
+ --
for x=1,drc.nx-1 do
- w = w + widths[c+x]
+ width = width + widths[c+x]
end
- for y=1,drc.ny-1 do
- h = h + heights[r+y]
- d = d + depths[r+y]
+ --
+ local total = height + depth
+ local ny = drc.ny
+ if ny > 1 then
+ for y=1,ny-1 do
+ local nxt = r + y
+ total = total + heights[nxt] + depths[nxt]
+ end
end
- texsetdimen("d_tabl_x_width",w)
- texsetdimen("d_tabl_x_height",h + d)
- texsetdimen("d_tabl_x_depth",0)
+ --
+ texsetdimen("d_tabl_x_width",width)
+ texsetdimen("d_tabl_x_height",total)
+ texsetdimen("d_tabl_x_depth",0) -- for now
end
function xtables.set_construct()
@@ -373,7 +502,7 @@ function xtables.set_construct()
-- end
local drc = row[c]
-- this will change as soon as in luatex we can reset a box list without freeing
- drc.list = copy_node_list(texgetbox("b_tabl_x"))
+ drc.list = takebox("b_tabl_x")
-- c = c + drc.nx - 1
-- data.currentcolumn = c
end
@@ -383,7 +512,7 @@ local function showwidths(where,widths,autowidths)
for i=1,#widths do
result[#result+1] = format("%12s%s",points(widths[i]),autowidths[i] and "*" or " ")
end
- return report_xtable("%s : %s",where,concat(result," "))
+ return report_xtable("%s widths: %s",where,concat(result," "))
end
function xtables.reflow_width()
@@ -406,6 +535,8 @@ function xtables.reflow_width()
local maxwidth = settings.maxwidth
-- calculate width
local widths = data.widths
+ local heights = data.heights
+ local depths = data.depths
local distances = data.distances
local autowidths = data.autowidths
local fixedcolumns = data.fixedcolumns
@@ -419,6 +550,7 @@ function xtables.reflow_width()
showwidths("stage 1",widths,autowidths)
end
local noffrozen = 0
+ -- inspect(data.fixedcspans)
if options[v_max] then
for c=1,nofcolumns do
width = width + widths[c]
@@ -541,16 +673,51 @@ function xtables.reflow_width()
--
data.currentrow = 0
data.currentcolumn = 0
+ --
+-- inspect(data)
end
function xtables.reflow_height()
data.currentrow = 0
data.currentcolumn = 0
local settings = data.settings
+ --
+ -- analyze ny
+ --
+ local nofrows = data.nofrows
+ local nofcolumns = data.nofcolumns
+ local widths = data.widths
+ local heights = data.heights
+ local depths = data.depths
+ --
+ for r=1,nofrows do
+ for c=1,nofcolumns do
+ local drc = data.rows[r][c]
+ if drc then
+ local ny = drc.ny
+ if ny > 1 then
+ local height = heights[r]
+ local depth = depths[r]
+ local total = height + depth
+ local htdp = drc.ht + drc.dp
+ for y=1,ny-1 do
+ local nxt = r + y
+ total = total + heights[nxt] + depths[nxt]
+ end
+ local delta = htdp - total
+ if delta > 0 then
+ delta = delta / ny
+ for y=0,ny-1 do
+ local nxt = r + y
+ heights[nxt] = heights[nxt] + delta
+ end
+ end
+ end
+ end
+ end
+ end
+ --
if settings.options[v_height] then
- local heights = data.heights
- local depths = data.depths
- local nofrows = data.nofrows
local totalheight = 0
local totaldepth = 0
for i=1,nofrows do
@@ -560,14 +727,16 @@ function xtables.reflow_height()
local total = totalheight + totaldepth
local leftover = settings.textheight - total
if leftover > 0 then
- local leftheight = (totalheight / total ) * leftover / #heights
- local leftdepth = (totaldepth / total ) * leftover / #depths
+ local leftheight = (totalheight / total) * leftover / #heights
+ local leftdepth = (totaldepth / total) * leftover / #depths
for i=1,nofrows do
heights[i] = heights[i] + leftheight
depths [i] = depths [i] + leftdepth
end
end
end
+ --
+-- inspect(data)
end
local function showspans(data)
@@ -607,6 +776,7 @@ function xtables.construct()
local rowdistance = settings.rowdistance
local leftmargindistance = settings.leftmargindistance
local rightmargindistance = settings.rightmargindistance
+ local rowproperties = data.rowproperties
-- ranges can be mixes so we collect
if trace_xtable then
@@ -646,23 +816,23 @@ function xtables.construct()
end
local list = drc.list
if list then
- list.shift = list.height + list.depth
+ setfield(list,"shift",getfield(list,"height") + getfield(list,"depth"))
-- list = hpack_node_list(list) -- is somehow needed
- -- list.width = 0
- -- list.height = 0
- -- list.depth = 0
+ -- setfield(list,"width",0)
+ -- setfield(list,"height",0)
+ -- setfield(list,"depth",0)
-- faster:
local h = new_hlist()
- h.list = list
+ setfield(h,"list",list)
list = h
--
if start then
- stop.next = list
- list.prev = stop
+ setfield(stop,"next",list)
+ setfield(list,"prev",stop)
else
start = list
end
- stop = list -- one node anyway, so not needed: slide_node_list(list)
+ stop = list
end
local step = widths[c]
if c < nofcolumns then
@@ -670,8 +840,8 @@ function xtables.construct()
end
local kern = new_kern(step)
if stop then
- stop.next = kern
- kern.prev = stop
+ setfield(stop,"next",kern)
+ setfield(kern,"prev",stop)
else -- can be first spanning next row (ny=...)
start = kern
end
@@ -680,8 +850,8 @@ function xtables.construct()
if start then
if rightmargindistance > 0 then
local kern = new_kern(rightmargindistance)
- stop.next = kern
- kern.prev = stop
+ setfield(stop,"next",kern)
+ setfield(kern,"prev",stop)
-- stop = kern
end
return start, heights[r] + depths[r], hasspan
@@ -699,14 +869,33 @@ function xtables.construct()
result[nofr][4] = true
end
nofr = nofr + 1
+ local rp = rowproperties[r]
+ -- we have a direction issue here but hpack_node_list(list,0,"exactly","TLT") cannot be used
+ -- due to the fact that we need the width
+ local hbox = hpack_node_list(list)
+ setfield(hbox,"dir","TLT")
result[nofr] = {
- hpack_node_list(list),
+ hbox,
size,
i < nofrange and rowdistance > 0 and rowdistance or false, -- might move
- false
+ false,
+ rp and rp.samepage or false,
}
end
end
+ if nofr > 0 then
+ -- the [5] slot gets the after break
+ result[1] [5] = false
+ result[nofr][5] = false
+ for i=2,nofr-1 do
+ local r = result[i]
+ if r == v_both or r == v_before then
+ result[i-1][5] = true
+ elseif r == v_after then
+ result[i][5] = true
+ end
+ end
+ end
return result
end
local body = collect_range(ranges[body_mode])
@@ -721,7 +910,7 @@ function xtables.construct()
texsetdimen("global","d_tabl_x_final_width",0)
else
texsetcount("global","c_tabl_x_state",1)
- texsetdimen("global","d_tabl_x_final_width",body[1][1].width)
+ texsetdimen("global","d_tabl_x_final_width",getfield(body[1][1],"width"))
end
end
@@ -734,22 +923,28 @@ local function inject(row,copy,package)
end
if package then
context_beginvbox()
- context(list)
- context(new_kern(row[2]))
+ context(tonode(list))
+ context(tonode(new_kern(row[2])))
context_endvbox()
context_nointerlineskip() -- figure out a better way
if row[4] then
-- nothing as we have a span
+ elseif row[5] then
+ if row[3] then
+ context_blank { v_samepage, row[3] .. "sp" }
+ else
+ context_blank { v_samepage }
+ end
elseif row[3] then
- context_blank(row[3] .. "sp") -- why blank ?
+ context_blank { row[3] .. "sp" } -- why blank ?
else
- context(new_glue(0))
+ context(tonode(new_glue(0)))
end
else
- context(list)
- context(new_kern(row[2]))
+ context(tonode(list))
+ context(tonode(new_kern(row[2])))
if row[3] then
- context(new_glue(row[3]))
+ context(tonode(new_glue(row[3])))
end
end
end
@@ -794,23 +989,23 @@ local function spanheight(body,i)
end
function xtables.flush(directives) -- todo split by size / no inbetween then .. glue list kern blank
- local vsize = directives.vsize
- local method = directives.method or v_normal
- local settings = data.settings
- local results = data.results
- local rowdistance = settings.rowdistance
- local head = results[head_mode]
- local foot = results[foot_mode]
- local more = results[more_mode]
- local body = results[body_mode]
+ local height = directives.height
+ local method = directives.method or v_normal
+ local settings = data.settings
+ local results = data.results
+ local rowdistance = settings.rowdistance
+ local head = results[head_mode]
+ local foot = results[foot_mode]
+ local more = results[more_mode]
+ local body = results[body_mode]
local repeatheader = settings.header == v_repeat
local repeatfooter = settings.footer == v_repeat
- if vsize and vsize > 0 then
+ if height and height > 0 then
context_beginvbox()
local bodystart = data.bodystart or 1
local bodystop = data.bodystop or #body
if bodystart > 0 and bodystart <= bodystop then
- local bodysize = vsize
+ local bodysize = height
local footsize = total(foot,rowdistance)
local headsize = total(head,rowdistance)
local moresize = total(more,rowdistance)
@@ -822,7 +1017,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
inject(head[i],repeatheader)
end
if rowdistance > 0 then
- context(new_glue(rowdistance))
+ context(tonode(new_glue(rowdistance)))
end
if not repeatheader then
results[head_mode] = { }
@@ -835,7 +1030,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
inject(more[i],true)
end
if rowdistance > 0 then
- context(new_glue(rowdistance))
+ context(tonode(new_glue(rowdistance)))
end
end
elseif headsize > 0 and repeatheader then -- following chunk gets head
@@ -845,7 +1040,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
inject(head[i],true)
end
if rowdistance > 0 then
- context(new_glue(rowdistance))
+ context(tonode(new_glue(rowdistance)))
end
end
else -- following chunk gets nothing
@@ -872,7 +1067,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
-- all is flushed and footer fits
if footsize > 0 then
if rowdistance > 0 then
- context(new_glue(rowdistance))
+ context(tonode(new_glue(rowdistance)))
end
for i=1,#foot do
inject(foot[i])
@@ -886,7 +1081,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
-- todo: try to flush a few more lines
if repeatfooter and footsize > 0 then
if rowdistance > 0 then
- context(new_glue(rowdistance))
+ context(tonode(new_glue(rowdistance)))
end
for i=1,#foot do
inject(foot[i],true)
@@ -897,7 +1092,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
texsetcount("global","c_tabl_x_state",2)
end
else
- if firstsize > vsize then
+ if firstsize > height then
-- get rid of the too large cell
for s=1,firstspans do
inject(body[bodystart])
@@ -921,13 +1116,13 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
inject(head[i],false,true)
end
if #head > 0 and rowdistance > 0 then
- context_blank(rowdistance .. "sp")
+ context_blank { rowdistance .. "sp" }
end
for i=1,#body do
inject(body[i],false,true)
end
if #foot > 0 and rowdistance > 0 then
- context_blank(rowdistance .. "sp")
+ context_blank { rowdistance .. "sp" }
end
for i=1,#foot do
inject(foot[i],false,true)
@@ -938,13 +1133,13 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
inject(head[i])
end
if #head > 0 and rowdistance > 0 then
- context(new_glue(rowdistance))
+ context(tonode(new_glue(rowdistance)))
end
for i=1,#body do
inject(body[i])
end
if #foot > 0 and rowdistance > 0 then
- context(new_glue(rowdistance))
+ context(tonode(new_glue(rowdistance)))
end
for i=1,#foot do
inject(foot[i])
@@ -964,31 +1159,92 @@ function xtables.cleanup()
flush_node_list(r[1])
end
end
+
+ -- local rows = data.rows
+ -- for i=1,#rows do
+ -- local row = rows[i]
+ -- for i=1,#row do
+ -- local cell = row[i]
+ -- local list = cell.list
+ -- if list then
+ -- cell.width = getfield(list,"width")
+ -- cell.height = getfield(list,"height")
+ -- cell.depth = getfield(list,"depth")
+ -- cell.list = true
+ -- end
+ -- end
+ -- end
+ -- data.result = nil
+ -- inspect(data)
+
data = table.remove(stack)
end
-function xtables.next_row()
+function xtables.next_row(specification)
local r = data.currentrow + 1
data.modes[r] = texgetcount("c_tabl_x_mode")
data.currentrow = r
data.currentcolumn = 0
+ data.rowproperties[r] = specification
+end
+
+function xtables.finish_row()
+ local n = data.nofcolumns - data.currentcolumn
+ if n > 0 then
+ -- message
+ for i=1,n do
+ context_dummyxcell()
+ end
+ end
end
-- eventually we might only have commands
-commands.x_table_create = xtables.create
-commands.x_table_reflow_width = xtables.reflow_width
-commands.x_table_reflow_height = xtables.reflow_height
-commands.x_table_construct = xtables.construct
-commands.x_table_flush = xtables.flush
-commands.x_table_cleanup = xtables.cleanup
-commands.x_table_next_row = xtables.next_row
-commands.x_table_init_reflow_width = xtables.initialize_reflow_width
-commands.x_table_init_reflow_height = xtables.initialize_reflow_height
-commands.x_table_init_construct = xtables.initialize_construct
-commands.x_table_set_reflow_width = xtables.set_reflow_width
-commands.x_table_set_reflow_height = xtables.set_reflow_height
-commands.x_table_set_construct = xtables.set_construct
-
-commands.x_table_r = function() context(data.currentrow or 0) end
-commands.x_table_c = function() context(data.currentcolumn or 0) end
+implement {
+ name = "x_table_create",
+ actions = xtables.create,
+ arguments = {
+ {
+ { "option" },
+ { "textwidth", "dimen" },
+ { "textheight", "dimen" },
+ { "maxwidth", "dimen" },
+ { "lineheight", "dimen" },
+ { "columndistance", "dimen" },
+ { "leftmargindistance", "dimen" },
+ { "rightmargindistance", "dimen" },
+ { "rowdistance", "dimen" },
+ { "header" },
+ { "footer" },
+ }
+ }
+}
+
+implement {
+ name = "x_table_flush",
+ actions = xtables.flush,
+ arguments = {
+ {
+ { "method" },
+ { "height", "dimen" }
+ }
+ }
+}
+
+implement { name = "x_table_reflow_width", actions = xtables.reflow_width }
+implement { name = "x_table_reflow_height", actions = xtables.reflow_height }
+implement { name = "x_table_construct", actions = xtables.construct }
+implement { name = "x_table_cleanup", actions = xtables.cleanup }
+implement { name = "x_table_next_row", actions = xtables.next_row }
+implement { name = "x_table_next_row_option", actions = xtables.next_row, arguments = "string" }
+implement { name = "x_table_finish_row", actions = xtables.finish_row }
+implement { name = "x_table_init_reflow_width", actions = xtables.initialize_reflow_width }
+implement { name = "x_table_init_reflow_height", actions = xtables.initialize_reflow_height }
+implement { name = "x_table_init_reflow_width_option", actions = xtables.initialize_reflow_width, arguments = "string" }
+implement { name = "x_table_init_reflow_height_option", actions = xtables.initialize_reflow_height, arguments = "string" }
+implement { name = "x_table_init_construct", actions = xtables.initialize_construct }
+implement { name = "x_table_set_reflow_width", actions = xtables.set_reflow_width }
+implement { name = "x_table_set_reflow_height", actions = xtables.set_reflow_height }
+implement { name = "x_table_set_construct", actions = xtables.set_construct }
+implement { name = "x_table_r", actions = function() context(data.currentrow or 0) end }
+implement { name = "x_table_c", actions = function() context(data.currentcolumn or 0) end }
diff --git a/tex/context/base/tabl-xtb.mkvi b/tex/context/base/tabl-xtb.mkvi
index 556bec5ce..73ba91045 100644
--- a/tex/context/base/tabl-xtb.mkvi
+++ b/tex/context/base/tabl-xtb.mkvi
@@ -29,6 +29,8 @@
% - maybe only tag the box
% - scale to fit
%
+% - buffers permit verbatim but are not always handy
+
%D This module started as an afternoon experiment and surprisingly could be
%D mostly finished the same evening. Of course it builds upon existing
%D functionality. The main reason for writing it is that we occasionally
@@ -78,8 +80,13 @@
\let\dotagxtablesignal\relax % names will change
\appendtoks
- \def\dotagxtablecell {\taggedctxcommand{settagtablecell(\number\tablecellrows,\number\tablecellcolumns,\number\raggedstatus)}}%
- \def\dotagxtablesignal{\char\zerocount}% not used
+ \def\dotagxtablecell
+ {\clf_settagtablecell
+ \numexpr\tablecellrows\relax
+ \numexpr\tablecellcolumns\relax
+ \numexpr\raggedstatus\relax}%
+ \def\dotagxtablesignal
+ {\char\zerocount}% not used
\to \everyenableelements
\newdimen\d_tabl_x_width
@@ -96,8 +103,8 @@
\newcount\c_tabl_x_skip_mode % 1 = skip
\newdimen\d_tabl_x_textwidth
-\def\currentxtablerow {\ctxcommand{x_table_r()}}
-\def\currentxtablecolumn{\ctxcommand{x_table_c()}}
+\let\currentxtablerow \clf_x_table_r
+\let\currentxtablecolumn\clf_x_table_c
% \setupxtable[one][parent][a=b,c=d]
% \setupxtable[one] [a=b,c=d]
@@ -159,7 +166,7 @@
{\bgroup
\tabl_x_prepare{#settings}%
\edef\tabl_x_current_buffer{\tabl_x_default_buffer}%
- \buff_pickup{\tabl_x_current_buffer}{startxtable}{stopxtable}\relax\tabl_x_process}
+ \buff_pickup{\tabl_x_current_buffer}{startxtable}{stopxtable}\relax\tabl_x_process\zerocount}
\unexpanded\def\processxtablebuffer
{\dosingleempty\tabl_x_process_buffer_directly}
@@ -175,7 +182,7 @@
{\bgroup
\let\tabl_x_start_table\tabl_x_process_buffer
\edef\tabl_x_current_buffer{#name}%
- \tabl_x_get_buffer % pickup settings
+ \tabl_x_get_buffer % settings
\tabl_x_process}
\unexpanded\def\tabl_x_start_ignore[#settings]%
@@ -194,7 +201,7 @@
\unexpanded\def\tabl_x_embedded_start[#settings]#content\stopembeddedxtable
{\tabl_x_prepare{#settings}%
- \ctxcommand{assignbuffer("embedded_x_table",\!!bs\detokenize{#content}\!!es)}%
+ \clf_assignbuffer{embedded_x_table}{\detokenize{#content}}\catcodetable\relax
\bgroup
\let\tabl_x_start_table\tabl_x_process_buffer
\edef\tabl_x_current_buffer{embedded_x_table}%
@@ -216,14 +223,14 @@
\unexpanded\def\tabl_x_start_named_indeed[#settings]%
{\advance\c_tabl_x_nesting\plusone
- \dostarttagged\t!table\empty
+ \dostarttaggedchained\t!table\empty\??xtable
\iffirstargument
\setupcurrentxtable[#settings]%
\fi
\tabl_x_check_textwidth
- \forgetall
+ %\forgetall % else whitespace mess
\edef\tabl_x_current_buffer{\tabl_x_default_buffer}%
- \normalexpanded{\buff_pickup{\tabl_x_current_buffer}{\e!start\currentxtable}{\e!stop\currentxtable}\relax\tabl_x_process}}
+ \normalexpanded{\buff_pickup{\tabl_x_current_buffer}{\e!start\currentxtable}{\e!stop\currentxtable}\relax\tabl_x_process\zerocount}}
\unexpanded\def\tabl_x_stop_named
{}
@@ -240,15 +247,15 @@
\unexpanded\def\tabl_x_prepare#settings% assumes \iffirstargument to be set
{\advance\c_tabl_x_nesting\plusone
- \dostarttagged\t!table\empty
+ \dostarttaggedchained\t!table\empty\??xtable
\iffirstargument
\tabl_x_set_checked{#settings}%
\fi
\tabl_x_check_textwidth
- \forgetall}
+ }% else whitespace mess
\def\tabl_x_get_buffer
- {\ctxcommand{gettexbuffer("\tabl_x_current_buffer")}}
+ {\clf_gettexbuffer{\tabl_x_current_buffer}}
\let\tabl_x_start_row_yes \relax
\let\tabl_x_start_row_nop \relax
@@ -259,23 +266,24 @@
\unexpanded\def\tabl_x_process
{\begingroup % *
- \doifsomething{\xtableparameter\c!bodyfont}
- {\setupbodyfont[\xtableparameter\c!bodyfont]}%
+ \forgetall % moved here
+ \dontcomplain % for the moment here till we figure out where we get the overflow
+ \usebodyfontparameter\xtableparameter
\setbox\scratchbox\vbox
- {\xtableparameter\c!spaceinbetween}%
- \ctxcommand{x_table_create {
- option = "\xtableparameter\c!option",
- textwidth = \number\d_tabl_x_textwidth,
- textheight = \number\dimexpr\xtableparameter\c!textheight,
- maxwidth = \number\dimexpr\xtableparameter\c!maxwidth,
- lineheight = \number\openlineheight,
- columndistance = \number\dimexpr\xtableparameter\c!columndistance,
- leftmargindistance = \number\dimexpr\xtableparameter\c!leftmargindistance,
- rightmargindistance = \number\dimexpr\xtableparameter\c!rightmargindistance,
- rowdistance = \number\ht\scratchbox,
- header = "\xtableparameter\c!header",
- footer = "\xtableparameter\c!footer",
- } }%
+ {\doifsomething{\xtableparameter\c!spaceinbetween}{\blank[\xtableparameter\c!spaceinbetween]}}%
+ \clf_x_table_create
+ option {\xtableparameter\c!option}%
+ textwidth \d_tabl_x_textwidth
+ textheight \dimexpr\xtableparameter\c!textheight\relax
+ maxwidth \dimexpr\xtableparameter\c!maxwidth\relax
+ lineheight \openlineheight
+ columndistance \dimexpr\xtableparameter\c!columndistance\relax
+ leftmargindistance \dimexpr\xtableparameter\c!leftmargindistance\relax
+ rightmargindistance \dimexpr\xtableparameter\c!rightmargindistance\relax
+ rowdistance \ht\scratchbox
+ header {\xtableparameter\c!header}%
+ footer {\xtableparameter\c!footer}%
+ \relax
%
\letxtableparameter\c!option\empty
% not so nice but needed as we use this in the setup
@@ -290,7 +298,7 @@
\let\tabl_x_stop_cell \tabl_x_stop_cell_reflow_width
\settrialtypesetting
\tabl_x_get_buffer
- \ctxcommand{x_table_reflow_width()}\relax
+ \clf_x_table_reflow_width
\endgroup
\begingroup
\let\tabl_x_start_row_yes \tabl_x_start_row_reflow_height_yes
@@ -301,7 +309,7 @@
\let\tabl_x_stop_cell \tabl_x_stop_cell_reflow_height
\settrialtypesetting
\tabl_x_get_buffer
- \ctxcommand{x_table_reflow_height()}\relax
+ \clf_x_table_reflow_height
\endgroup
\begingroup
\let\tabl_x_start_row_yes \tabl_x_start_row_construct_yes
@@ -311,7 +319,7 @@
\let\tabl_x_start_cell_nop\tabl_x_start_cell_construct_nop
\let\tabl_x_stop_cell \tabl_x_stop_cell_construct
\tabl_x_get_buffer
- \ctxcommand{x_table_construct()}\relax
+ \clf_x_table_construct
\endgroup
\endgroup % *
\ifinsidesplitfloat
@@ -321,7 +329,7 @@
\else
\tabl_x_flush_text_checked
\fi\fi
- \ctxcommand{x_table_cleanup()}%
+ \clf_x_table_cleanup
\dostoptagged
\resetbuffer[\tabl_x_current_buffer]%
\egroup}
@@ -336,19 +344,31 @@
% in text flow: headers and footers only once
\setvalue{\??xtableflushsplit\v!yes}%
- {\ctxcommand{x_table_flush{ method = "\v!split" }}}
+ {\clf_x_table_flush
+ method {\v!split}%
+ \relax}
% in text flow: headers and footers only once
\setvalue{\??xtableflushsplit\v!no}%
- {\dontleavehmode % else no leftskip etc
- \ctxcommand{x_table_flush{ method = "\v!normal" }}}
+ {% \noindent % gives extra line after table
+ % \noindentation % messes up the next indentation
+ % \dontleavehmode % no leftskip
+ \kern\zeropoint % yet another guess
+ \ignorespaces
+ \clf_x_table_flush
+ method {\v!normal}%
+ \relax
+ \removeunwantedspaces}
% in text flow: headers and footers get repeated
\setvalue{\??xtableflushsplit\v!repeat}%
{\doloop
- {\ctxcommand{x_table_flush{ method = "\v!split", vsize = \number\ifdim\pagegoal=\maxdimen\textheight\else\pagegoal\fi }}%
+ {\clf_x_table_flush
+ method {\v!split}%
+ height \ifdim\pagegoal=\maxdimen\textheight\else\pagegoal\fi
+ \relax
\ifcase\c_tabl_x_state
\exitloop
\else
@@ -370,7 +390,10 @@
% \stopsetups
%
% \unexpanded\def\xtablesplitflush
-% {\ctxcommand{x_table_flush{ method = "\v!split", vsize = \number\dimexpr\xtablesplitvsize\relax}}\relax}
+% {\clf_x_table_flush
+% method {\v!split}%
+% height \dimexpr\xtablesplitvsize\relax
+% \relax}
%
% \def\xtablesplitvsize
% {\ifdim\pagegoal=\maxdimen\textheight\else\pagegoal\fi}
@@ -380,7 +403,9 @@
\let\extratxtablesplitheight\zeropoint % might disappear so don't depend on it
\unexpanded\def\tabl_x_flush_float_normal
- {\ctxcommand{x_table_flush{ method = "\v!normal" }}}
+ {\clf_x_table_flush
+ method {\v!normal}%
+ \relax}
\unexpanded\def\tabl_x_flush_float_split
{\resetdirecttsplit
@@ -391,9 +416,12 @@
\let\tsplitdirectwidth \d_tabl_x_final_width
\handledirecttsplit}
-\unexpanded\def\tabl_x_split_splitter#vsize%
+\unexpanded\def\tabl_x_split_splitter#height%
{\setbox\tsplitresult\vbox
- {\ctxcommand{x_table_flush{ method = "\v!split", vsize = \number\dimexpr#vsize }}}%
+ {\clf_x_table_flush
+ method {\v!split}%
+ height \dimexpr#height\relax
+ \relax}%
\ifcase\c_tabl_x_state
\global\setfalse\somenextsplitofffloat
\else
@@ -402,14 +430,14 @@
\unexpanded\def\startxrow
{\begingroup
- \doifnextoptionalelse\tabl_x_start_row_yes\tabl_x_start_row_nop}
+ \doifelsenextoptionalcs\tabl_x_start_row_yes\tabl_x_start_row_nop}
\unexpanded\def\tabl_x_start_row_reflow_width_yes[#settings]%
{\setupcurrentxtable[#settings]%
- \ctxcommand{x_table_next_row()}}
+ \clf_x_table_next_row}
\unexpanded\def\tabl_x_start_row_reflow_width_nop
- {\ctxcommand{x_table_next_row()}}
+ {\clf_x_table_next_row}
\unexpanded\def\tabl_x_stop_row_reflow_width
{}
@@ -421,27 +449,36 @@
\unexpanded\def\tabl_x_start_row_construct_yes[#settings]%
{\setupcurrentxtable[#settings]%
\dostarttagged\t!tablerow\empty
- \ctxcommand{x_table_next_row()}}
+ \clf_x_table_next_row_option{\xtableparameter\c!samepage}}
\unexpanded\def\tabl_x_start_row_construct_nop
{\dostarttagged\t!tablerow\empty
- \ctxcommand{x_table_next_row()}}
+ \clf_x_table_next_row}
\unexpanded\def\tabl_x_stop_row_construct
- {\dostoptagged}
+ {\clf_x_table_finish_row
+ \dostoptagged}
\unexpanded\def\stopxrow
{\tabl_x_stop_row
\endgroup}
\unexpanded\def\startxcell
- {\doifnextoptionalelse\tabl_x_start_cell_yes\tabl_x_start_cell_nop}
+ {\doifelsenextoptionalcs\tabl_x_start_cell_yes\tabl_x_start_cell_nop}
\unexpanded\def\stopxcell
{\tabl_x_stop_cell}
+\unexpanded\def\dummyxcell#1%
+ {\tabl_x_start_cell_nop
+ \tabl_x_stop_cell}
+
+\unexpanded\def\dummyxcell
+ {\tabl_x_start_cell_nop
+ \tabl_x_stop_cell}
+
\unexpanded\def\tabl_x_begin_of_cell
- {\inhibitblank
+ {%\inhibitblank % already in framed
\everypar{\delayedbegstrut}}
\unexpanded\def\tabl_x_end_of_cell
@@ -467,8 +504,16 @@
% Although this becomes kind of messy. It saves already time that we only check
% for it when we have settings.
-\def\tabl_x_set_hsize
- {\hsize.25\maxdimen} % let's be reasonable
+% \def\tabl_x_set_hsize
+% {\hsize.25\maxdimen} % let's be reasonable
+
+% \def\tabl_x_set_hsize
+% {\edef\p_width{\xtableparameter\c!width}%
+% \ifx\p_width\empty
+% \hsize.25\maxdimen % is this really needed
+% \fi}
+
+\let\tabl_x_set_hsize\relax
\unexpanded\def\tabl_x_start_cell_reflow_width_yes[#settings]%
{\setbox\b_tabl_x\hbox\bgroup
@@ -494,7 +539,7 @@
\fi
%
\d_tabl_x_distance\xtableparameter\c!distance\relax
- \ctxcommand{x_table_init_reflow_width("\xtableparameter\c!option")}%
+ \clf_x_table_init_reflow_width_option{\xtableparameter\c!option}%
\inheritedxtableframed\bgroup
\tabl_x_begin_of_cell
\tabl_x_set_hsize}
@@ -508,7 +553,7 @@
\c_tabl_x_nx\plusone
\c_tabl_x_ny\plusone
\d_tabl_x_distance\xtableparameter\c!distance\relax
- \ctxcommand{x_table_init_reflow_width()}%
+ \clf_x_table_init_reflow_width
\inheritedxtableframed\bgroup
\tabl_x_begin_of_cell
\tabl_x_set_hsize}
@@ -517,11 +562,11 @@
{\tabl_x_end_of_cell
\egroup
\egroup
- \ctxcommand{x_table_set_reflow_width()}}
+ \clf_x_table_set_reflow_width}
\unexpanded\def\tabl_x_start_cell_reflow_height_yes[#settings]%
{\setbox\b_tabl_x\hbox\bgroup
- \ctxcommand{x_table_init_reflow_height()}%
+ \clf_x_table_init_reflow_height
\ifcase\c_tabl_x_skip_mode % can be sped up
\ifnum\c_tabl_x_nesting>\plusone
\letxtableparameter\c!height\v!fit % overloads given height
@@ -535,7 +580,7 @@
\unexpanded\def\tabl_x_start_cell_reflow_height_nop
{\setbox\b_tabl_x\hbox\bgroup
- \ctxcommand{x_table_init_reflow_height()}%
+ \clf_x_table_init_reflow_height
\ifcase\c_tabl_x_skip_mode % can be sped up
\ifnum\c_tabl_x_nesting>\plusone
\letxtableparameter\c!height\v!fit % overloads given height
@@ -552,7 +597,7 @@
\egroup
\fi
\egroup
- \ctxcommand{x_table_set_reflow_height()}}
+ \clf_x_table_set_reflow_height}
\unexpanded\def\tabl_x_start_cell_construct_yes[#settings]%
{\dostarttagged\t!tablecell\empty % can't we just tag the box
@@ -560,7 +605,7 @@
\setupcurrentxtable[#settings]%
\letxtableparameter\c!width \d_tabl_x_width % overloads given width
\letxtableparameter\c!height\d_tabl_x_height % overloads given height
- \ctxcommand{x_table_init_construct()}%
+ \clf_x_table_init_construct
\inheritedxtableframed\bgroup
\tabl_x_begin_of_cell
\dotagxtablecell}
@@ -569,8 +614,8 @@
{\dostarttagged\t!tablecell\empty % can't we just tag the box
\setbox\b_tabl_x\hbox\bgroup
\letxtableparameter\c!width \d_tabl_x_width % overloads given width
- \letxtableparameter\c!height\d_tabl_x_height % overloads given height
- \ctxcommand{x_table_init_construct()}%
+ \letxtableparameter\c!height\d_tabl_x_height % overloads given height (commenting it ... nice option)
+ \clf_x_table_init_construct
\inheritedxtableframed\bgroup
\tabl_x_begin_of_cell
\dotagxtablecell}
@@ -580,7 +625,7 @@
\egroup
\dotagxtablesignal % harmless spot
\egroup
- \ctxcommand{x_table_set_construct()}%
+ \clf_x_table_set_construct
\dostoptagged}
\unexpanded\def\startxcellgroup
@@ -677,7 +722,7 @@
\unexpanded\def\startxgroup
{\begingroup
- \doifnextoptionalelse\tabl_x_start_group_delayed_one\relax}
+ \doifelsenextoptionalcs\tabl_x_start_group_delayed_one\relax}
\unexpanded\def\stopxgroup
{\endgroup}
@@ -695,7 +740,7 @@
\chaintocurrentxtable{#tag}%
\fi
\edef\currentxtable{#tag}%
- \doifnextoptionalelse\setupcurrentxtable\relax}
+ \doifelsenextoptionalcs\setupcurrentxtable\relax}
\let\startxrowgroup \startxgroup
\let\stopxrowgroup \stopxgroup
@@ -706,7 +751,7 @@
\unexpanded\def\startxcell
{\begingroup
- \doifnextoptionalelse\tabl_x_start_cell_delayed_one\tabl_x_start_cell_nop}
+ \doifelsenextoptionalcs\tabl_x_start_cell_delayed_one\tabl_x_start_cell_nop}
\unexpanded\def\tabl_x_start_cell_delayed_one[#tag]%
% {\ifcsname\namedxtablehash{#tag}\s!parent\endcsname
@@ -721,7 +766,7 @@
\chaintocurrentxtable{#tag}%
\fi
\edef\currentxtable{#tag}%
- \doifnextoptionalelse\tabl_x_start_cell_yes\tabl_x_start_cell_nop}
+ \doifelsenextoptionalcs\tabl_x_start_cell_yes\tabl_x_start_cell_nop}
\unexpanded\def\stopxcell
{\tabl_x_stop_cell
@@ -731,7 +776,7 @@
\unexpanded\def\startxrow
{\begingroup
- \doifnextoptionalelse\tabl_x_start_row_delayed_one\tabl_x_start_row_nop}
+ \doifelsenextoptionalcs\tabl_x_start_row_delayed_one\tabl_x_start_row_nop}
\unexpanded\def\tabl_x_start_row_delayed_one[#tag]%
% {\ifcsname\namedxtablehash{#tag}\s!parent\endcsname
@@ -746,7 +791,7 @@
\chaintocurrentxtable{#tag}%
\fi
\edef\currentxtable{#tag}%
- \doifnextoptionalelse\tabl_x_start_row_yes\tabl_x_start_row_nop}
+ \doifelsenextoptionalcs\tabl_x_start_row_yes\tabl_x_start_row_nop}
\unexpanded\def\stopxrow
{\tabl_x_stop_row
diff --git a/tex/context/base/task-ini.lua b/tex/context/base/task-ini.lua
index 3447214bd..062f0208f 100644
--- a/tex/context/base/task-ini.lua
+++ b/tex/context/base/task-ini.lua
@@ -18,11 +18,14 @@ if not modules then modules = { } end modules ['task-ini'] = {
-- not apply the font handler, we can remove all checks for subtypes 255
local tasks = nodes.tasks
+local prependaction = tasks.prependaction
local appendaction = tasks.appendaction
local disableaction = tasks.disableaction
+local enableaction = tasks.enableaction
local freezegroup = tasks.freezegroup
local freezecallbacks = callbacks.freeze
+appendaction("processors", "normalizers", "typesetters.wrappers.handler") -- disabled
appendaction("processors", "normalizers", "typesetters.characters.handler") -- always on
appendaction("processors", "normalizers", "fonts.collections.process") -- disabled
appendaction("processors", "normalizers", "fonts.checkers.missing") -- disabled
@@ -35,11 +38,14 @@ appendaction("processors", "characters", "typesetters.cases.handler")
appendaction("processors", "characters", "typesetters.breakpoints.handler") -- disabled
appendaction("processors", "characters", "scripts.injectors.handler") -- disabled
-appendaction("processors", "words", "builders.kernel.hyphenation") -- always on
+appendaction("processors", "words", "languages.replacements.handler") -- disabled
+
+appendaction("processors", "words", "languages.hyphenators.handler") -- always on
+
appendaction("processors", "words", "languages.words.check") -- disabled -- might move up, no disc check needed then
appendaction("processors", "words", "typesetters.initials.handler") -- disabled -- might move up
-appendaction("processors", "words", "typesetters.firstlines.handler") -- disabled -- might move up
+appendaction("processors", "words", "typesetters.firstlines.handler") -- disabled -- might move down
appendaction("processors", "fonts", "builders.paragraphs.solutions.splitters.split") -- experimental
appendaction("processors", "fonts", "nodes.handlers.characters") -- maybe todo
@@ -55,9 +61,12 @@ appendaction("processors", "lists", "typesetters.spacings.handler")
appendaction("processors", "lists", "typesetters.kerns.handler") -- disabled
appendaction("processors", "lists", "typesetters.digits.handler") -- disabled (after otf handling)
appendaction("processors", "lists", "typesetters.italics.handler") -- disabled (after otf/kern handling)
-------------("processors", "lists", "typesetters.initials.handler") -- disabled
+appendaction("processors", "lists", "languages.visualizediscretionaries") -- disabled
+
+-- appendaction("processors", "lists", "typesetters.initials.handler") -- disabled
appendaction("shipouts", "normalizers", "nodes.handlers.cleanuppage") -- disabled
+appendaction("shipouts", "normalizers", "builders.paragraphs.expansion.trace") -- disabled
appendaction("shipouts", "normalizers", "typesetters.alignments.handler")
appendaction("shipouts", "normalizers", "nodes.references.handler") -- disabled
appendaction("shipouts", "normalizers", "nodes.destinations.handler") -- disabled
@@ -99,27 +108,35 @@ appendaction("math", "builders", "builders.kernel.mlist_to_hlist")
------------("math", "builders", "noads.handlers.italics", nil, "nohead") -- disabled
appendaction("math", "builders", "typesetters.directions.processmath") -- disabled (has to happen pretty late)
--- quite experimental (nodes.handlers.graphicvadjust might go away)
-
appendaction("finalizers", "lists", "builders.paragraphs.keeptogether")
-appendaction("finalizers", "lists", "nodes.handlers.graphicvadjust") -- todo
+------------("finalizers", "lists", "nodes.handlers.graphicvadjust") -- todo
appendaction("finalizers", "fonts", "builders.paragraphs.solutions.splitters.optimize") -- experimental
appendaction("finalizers", "lists", "builders.paragraphs.tag")
-- still experimental
-appendaction("mvlbuilders", "normalizers", "nodes.handlers.migrate") --
+appendaction("mvlbuilders", "normalizers", "nodes.handlers.migrate")
+
appendaction("mvlbuilders", "normalizers", "builders.vspacing.pagehandler") -- last !
+appendaction("mvlbuilders", "normalizers", "builders.profiling.pagehandler") -- here !
-appendaction("vboxbuilders", "normalizers", "builders.vspacing.vboxhandler") --
+appendaction("vboxbuilders", "normalizers", "builders.vspacing.vboxhandler")
+appendaction("vboxbuilders", "normalizers", "builders.profiling.vboxhandler") -- here !
-- experimental too
-appendaction("mvlbuilders", "normalizers","typesetters.checkers.handler")
-appendaction("vboxbuilders","normalizers","typesetters.checkers.handler")
+appendaction("mvlbuilders", "normalizers", "typesetters.checkers.handler")
+appendaction("vboxbuilders", "normalizers", "typesetters.checkers.handler")
+
+-- rather special (this might get hardcoded):
+
+prependaction("processors", "before", "nodes.properties.attach") -- enabled but optimized for quick abort
+appendaction ("shipouts", "normalizers", "nodes.properties.delayed") -- enabled but optimized for quick abort
-- speedup: only kick in when used
+disableaction("processors", "typesetters.wrappers.handler")
+disableaction("processors", "languages.replacements.handler")
disableaction("processors", "typesetters.characteralign.handler")
disableaction("processors", "scripts.autofontfeature.handler")
disableaction("processors", "scripts.splitters.handler")
@@ -138,8 +155,10 @@ disableaction("processors", "typesetters.firstlines.handler")
disableaction("processors", "typesetters.spacings.handler")
disableaction("processors", "typesetters.kerns.handler")
disableaction("processors", "typesetters.italics.handler")
+disableaction("processors", "languages.visualizediscretionaries")
disableaction("processors", "nodes.handlers.stripping")
+disableaction("shipouts", "builders.paragraphs.expansion.trace")
disableaction("shipouts", "typesetters.alignments.handler")
disableaction("shipouts", "nodes.rules.handler")
disableaction("shipouts", "nodes.shifts.handler")
@@ -154,7 +173,6 @@ disableaction("shipouts", "nodes.visualizers.handler")
disableaction("shipouts", "nodes.handlers.accessibility")
disableaction("shipouts", "nodes.handlers.backgrounds")
disableaction("shipouts", "nodes.handlers.alignbackgrounds")
-disableaction("shipouts", "nodes.handlers.cleanuppage")
disableaction("shipouts", "nodes.references.handler")
disableaction("shipouts", "nodes.destinations.handler")
@@ -167,7 +185,7 @@ disableaction("processors", "builders.paragraphs.solutions.splitters.split")
disableaction("finalizers", "builders.paragraphs.keeptogether")
disableaction("finalizers", "builders.paragraphs.solutions.splitters.optimize")
-disableaction("finalizers", "nodes.handlers.graphicvadjust") -- sort of obsolete
+-------------("finalizers", "nodes.handlers.graphicvadjust") -- sort of obsolete
disableaction("finalizers", "builders.paragraphs.tag")
disableaction("math", "noads.handlers.showtree")
@@ -177,7 +195,10 @@ disableaction("math", "noads.handlers.classes")
disableaction("math", "typesetters.directions.processmath")
disableaction("mvlbuilders", "typesetters.checkers.handler")
+disableaction("mvlbuilders", "builders.profiling.pagehandler")
+
disableaction("vboxbuilders","typesetters.checkers.handler")
+disableaction("vboxbuilders","builders.profiling.vboxhandler")
freezecallbacks("find_.*_file", "find file using resolver")
freezecallbacks("read_.*_file", "read file at once")
@@ -206,3 +227,18 @@ freezegroup("vboxbuilders", "normalizers")
freezegroup("math", "normalizers")
freezegroup("math", "builders")
+
+-- new: disabled here
+
+disableaction("processors", "builders.kernel.ligaturing")
+disableaction("processors", "builders.kernel.kerning")
+
+directives.register("nodes.basepass", function(v)
+ if v then
+ disableaction("processors", "builders.kernel.ligaturing")
+ disableaction("processors", "builders.kernel.kerning")
+ else
+ enableaction("processors", "builders.kernel.ligaturing")
+ enableaction("processors", "builders.kernel.kerning")
+ end
+end)
diff --git a/tex/context/base/toks-ini.lua b/tex/context/base/toks-ini.lua
index 0f0c016f8..f8b945890 100644
--- a/tex/context/base/toks-ini.lua
+++ b/tex/context/base/toks-ini.lua
@@ -5,338 +5,252 @@ if not modules then modules = { } end modules ['toks-ini'] = {
license = "see context related readme files"
}
-local context, commands = context, commands
-local utfbyte, utfchar, utfvalues = utf.byte, utf.char, utf.values
-local format, gsub = string.format, string.gsub
-
---[[ldx--
-<p>This code is experimental and needs a cleanup. The visualizers will move to
-a module.</p>
---ldx]]--
-
--- 1 = command, 2 = modifier (char), 3 = controlsequence id
---
--- callback.register('token_filter', token.get_next)
---
--- token.get_next()
--- token.expand()
--- token.create()
--- token.csname_id()
--- token.csname_name(v)
--- token.command_id()
--- token.command_name(v)
--- token.is_expandable()
--- token.is_activechar()
--- token.lookup(v)
-
--- actually, we can use token registers to store tokens
-
-local token, tex = token, tex
-
-local createtoken = token.create
-local csname_id = token.csname_id
-local command_id = token.command_id
-local command_name = token.command_name
-local get_next = token.get_next
-local expand = token.expand
-local is_activechar = token.is_activechar
-local csname_name = token.csname_name
-
-tokens = tokens or { }
-local tokens = tokens
-
-tokens.vbox = createtoken("vbox")
-tokens.hbox = createtoken("hbox")
-tokens.vtop = createtoken("vtop")
-tokens.bgroup = createtoken(utfbyte("{"), 1)
-tokens.egroup = createtoken(utfbyte("}"), 2)
-
-tokens.letter = function(chr) return createtoken(utfbyte(chr), 11) end
-tokens.other = function(chr) return createtoken(utfbyte(chr), 12) end
-
-tokens.letters = function(str)
- local t, n = { }, 0
- for chr in utfvalues(str) do
- n = n + 1
- t[n] = createtoken(chr, 11)
- end
- return t
-end
-
-tokens.collectors = tokens.collectors or { }
-local collectors = tokens.collectors
-
-collectors.data = collectors.data or { }
-local collectordata = collectors.data
+tokens = tokens or { }
+
+local tokens = tokens
+local tostring = tostring
+local utfchar = utf.char
+local char = string.char
+local printtable = table.print
+local concat = table.concat
+
+if newtoken then
+
+ if setinspector then
+
+ local istoken = newtoken.is_token
+ local simple = { letter = "letter", other_char = "other" }
+
+ local function astable(t)
+ if t and istoken(t) then
+ local cmdname = t.cmdname
+ local simple = simple[cmdname]
+ if simple then
+ return {
+ category = simple,
+ character = utfchar(t.mode) or nil,
+ }
+ else
+ return {
+ command = t.command,
+ id = t.id,
+ tok = t.tok,
+ csname = t.csname,
+ active = t.active,
+ expandable = t.expandable,
+ protected = t.protected,
+ mode = t.mode,
+ cmdname = cmdname,
+ }
+ end
+ end
+ end
-collectors.registered = collectors.registered or { }
-local registered = collectors.registered
+ tokens.istoken = istoken
+ tokens.astable = astable
-local function printlist(data)
- callbacks.push('token_filter', function ()
- callbacks.pop('token_filter') -- tricky but the nil assignment helps
- return data
- end)
-end
+ setinspector(function(v) if istoken(v) then printtable(astable(v),tostring(v)) return true end end)
-tex.printlist = printlist -- will change to another namespace
+ end
-function collectors.flush(tag)
- printlist(collectordata[tag])
-end
+ local scan_toks = newtoken.scan_toks
+ local scan_string = newtoken.scan_string
+ local scan_int = newtoken.scan_int
+ local scan_code = newtoken.scan_code
+ local scan_dimen = newtoken.scan_dimen
+ local scan_glue = newtoken.scan_glue
+ local scan_keyword = newtoken.scan_keyword
+ local scan_token = newtoken.scan_token
+ local scan_word = newtoken.scan_word
+ local scan_number = newtoken.scan_number
-function collectors.test(tag)
- printlist(collectordata[tag])
-end
+ local get_next = newtoken.get_next
-function collectors.register(name)
- registered[csname_id(name)] = name
-end
+ local set_macro = newtoken.set_macro
-local call = command_id("call")
-local letter = command_id("letter")
-local other = command_id("other_char")
-
-function collectors.install(tag,end_cs)
- local data, d = { }, 0
- collectordata[tag] = data
- local endcs = csname_id(end_cs)
- while true do
- local t = get_next()
- local a, b = t[1], t[3]
- if b == endcs then
- context["end_cs"]()
- return
- elseif a == call and registered[b] then
- expand()
+ set_macro = function(k,v,g)
+ if g == "global" then
+ context.setgvalue(k,v or '')
else
- d = d + 1
- data[d] = t
+ context.setvalue(k,v or '')
end
end
-end
-
-function collectors.handle(tag,handle,flush)
- collectordata[tag] = handle(collectordata[tag])
- if flush then
- collectors.flush(tag)
- end
-end
-local show_methods = { }
-collectors.show_methods = show_methods
+ local bits = {
+ escape = 2^ 0,
+ begingroup = 2^ 1,
+ endgroup = 2^ 2,
+ mathshift = 2^ 3,
+ alignment = 2^ 4,
+ endofline = 2^ 5,
+ parameter = 2^ 6,
+ superscript = 2^ 7,
+ subscript = 2^ 8,
+ ignore = 2^ 9,
+ space = 2^10, -- 1024
+ letter = 2^11,
+ other = 2^12,
+ active = 2^13,
+ comment = 2^14,
+ invalid = 2^15,
+ --
+ character = 2^11 + 2^12,
+ whitespace = 2^13 + 2^10, -- / needs more checking
+ --
+ open = 2^10 + 2^1, -- space + begingroup
+ close = 2^10 + 2^2, -- space + endgroup
+ }
+
+ -- for k, v in next, bits do bits[v] = k end
+
+ tokens.bits = bits
+
+ local space_bits = bits.space
+
+ -- words are space or \relax terminated and the trailing space is gobbled; a word
+ -- can contain any non-space letter/other
+
+ local t = { } -- small optimization, a shared variable that is not reset
+
+ if scan_word then
+
+ scan_number = function(base)
+ local s = scan_word()
+ if not s then
+ return nil
+ elseif base then
+ return tonumber(s,base)
+ else
+ return tonumber(s)
+ end
+ end
-function collectors.show(tag, method)
- if type(tag) == "table" then
- show_methods[method or 'a'](tag)
else
- show_methods[method or 'a'](collectordata[tag])
- end
-end
-
-function collectors.defaultwords(t,str)
- local n = #t
- n = n + 1
- t[n] = tokens.bgroup
- n = n + 1
- t[n] = createtoken("red")
- for i=1,#str do
- n = n + 1
- t[n] = tokens.other('*')
- end
- n = n + 1
- t[n] = tokens.egroup
-end
-function collectors.dowithwords(tag,handle)
- local t, w, tn, wn = { }, { }, 0, 0
- handle = handle or collectors.defaultwords
- local tagdata = collectordata[tag]
- for k=1,#tagdata do
- local v = tagdata[k]
- if v[1] == letter then
- wn = wn + 1
- w[wn] = v[2]
- else
- if wn > 0 then
- handle(t,w)
- wn = 0
+ scan_word = function()
+ local n = 0
+ while true do
+ local c = scan_code()
+ if c then
+ n = n + 1
+ t[n] = utfchar(c)
+ elseif scan_code(space_bits) then
+ if n > 0 then
+ break
+ end
+ elseif n > 0 then
+ break
+ else
+ return
+ end
end
- tn = tn + 1
- t[tn] = v
+ return concat(t,"",1,n)
end
- end
- if wn > 0 then
- handle(t,w)
- end
- collectordata[tag] = t
-end
-local function showtoken(t)
- if t then
- local cmd, chr, id, cs, name = t[1], t[2], t[3], nil, command_name(t) or ""
- if cmd == letter or cmd == other then
- return format("%s-> %s -> %s", name, chr, utfchar(chr))
- elseif id > 0 then
- cs = csname_name(t) or nil
- if cs then
- return format("%s-> %s", name, cs)
- elseif tonumber(chr) < 0 then
- return format("%s-> %s", name, id)
+ -- so we gobble the space (like scan_int) (number has to be space or non-char terminated
+ -- as we accept 0xabcd and such so there is no clear separator for a keyword
+
+ scan_number = function(base)
+ local n = 0
+ while true do
+ local c = scan_code()
+ if c then
+ n = n + 1
+ t[n] = char(c)
+ elseif scan_code(space_bits) then
+ if n > 0 then
+ break
+ end
+ elseif n > 0 then
+ break
+ else
+ return
+ end
+ end
+ local s = concat(t,"",1,n)
+ if base then
+ return tonumber(s,base)
else
- return format("%s-> (%s,%s)", name, chr, id)
+ return tonumber(s)
end
- else
- return format("%s", name)
end
- else
- return "no node"
- end
-end
-
-collectors.showtoken = showtoken
-
-function collectors.trace()
- local t = get_next()
- logs.report("tokenlist",showtoken(t))
- return t
-end
-
--- these might move to a runtime module
-show_methods.a = function(data) -- no need to store the table, just pass directly
- local function row(one,two,three,four,five)
- context.NC() context(one)
- context.NC() context(two)
- context.NC() context(three)
- context.NC() context(four)
- context.NC() context(five)
- context.NC() context.NR()
end
- context.starttabulate { "|T|Tr|cT|Tr|T|" }
- row("cmd","chr","","id","name")
- context.HL()
- for _,v in next, data do
- local cmd, chr, id, cs, sym = v[1], v[2], v[3], "", ""
- local name = gsub(command_name(v) or "","_","\\_")
- if id > 0 then
- cs = csname_name(v) or ""
- if cs ~= "" then cs = "\\string " .. cs end
- else
- id = ""
- end
- if cmd == letter or cmd == other then
- sym = "\\char " .. chr
- end
- if tonumber(chr) < 0 then
- row(name,"",sym,id,cs)
- else
- row(name,chr,sym,id,cs)
- end
- end
- context.stoptabulate()
-end
-local function show_b_c(data,swap) -- no need to store the table, just pass directly
- local function row(one,two,three)
- context.NC() context(one)
- context.NC() context(two)
- context.NC() context(three)
- context.NC() context.NR()
- end
- if swap then
- context.starttabulate { "|Tl|Tl|Tr|" }
- else
- context.starttabulate { "|Tl|Tr|Tl|" }
- end
- row("cmd","chr","name")
- context.HL()
- for _,v in next, data do
- local cmd, chr, id, cs, sym = v[1], v[2], v[3], "", ""
- local name = gsub(command_name(v) or "","_","\\_")
- if id > 0 then
- cs = csname_name(v) or ""
- end
- if cmd == letter or cmd == other then
- sym = "\\char " .. chr
- elseif cs == "" then
- -- okay
- elseif is_activechar(v) then
- sym = "\\string " .. cs
+ -- -- the next one cannot handle \iftrue true\else false\fi
+ --
+ -- local function scan_boolean()
+ -- if scan_keyword("true") then
+ -- return true
+ -- elseif scan_keyword("false") then
+ -- return false
+ -- else
+ -- return nil
+ -- end
+ -- end
+
+ local function scan_boolean()
+ local kw = scan_word()
+ if kw == "true" then
+ return true
+ elseif kw == "false" then
+ return false
else
- sym = "\\string\\" .. cs
- end
- if swap then
- row(name,sym,chr)
- elseif tonumber(chr) < 0 then
- row(name,"",sym)
- else
- row(name,chr,sym)
+ return nil
end
end
- context.stoptabulate()
-end
--- Even more experimental ...
+ tokens.scanners = { -- these expand
+ token = scan_token or get_next,
+ toks = scan_toks,
+ tokens = scan_toks,
+ dimen = scan_dimen,
+ dimension = scan_dimen,
+ glue = scan_glue,
+ skip = scan_glue,
+ integer = scan_int,
+ count = scan_int,
+ string = scan_string,
+ code = scan_code,
+ word = scan_word,
+ number = scan_number,
+ boolean = scan_boolean,
+ keyword = scan_keyword,
+ }
+
+ tokens.getters = { -- these don't expand
+ token = get_next,
+ count = tex.getcount,
+ dimen = tex.getdimen,
+ box = tex.getbox,
+ }
+
+ tokens.setters = {
+ macro = set_macro,
+ count = tex.setcount,
+ dimen = tex.setdimen,
+ box = tex.setbox,
+ }
-show_methods.b = function(data) show_b_c(data,false) end
-show_methods.c = function(data) show_b_c(data,true ) end
-
-local remapper = { } -- namespace
-collectors.remapper = remapper
-
-local remapperdata = { } -- user mappings
-remapper.data = remapperdata
-
-function remapper.store(tag,class,key)
- local s = remapperdata[class]
- if not s then
- s = { }
- remapperdata[class] = s
- end
- s[key] = collectordata[tag]
- collectordata[tag] = nil
end
-function remapper.convert(tag,toks)
- local data = remapperdata[tag]
- local leftbracket, rightbracket = utfbyte('['), utfbyte(']')
- local skipping = 0
- -- todo: math
- if data then
- local t, n = { }, 0
- for s=1,#toks do
- local tok = toks[s]
- local one, two = tok[1], tok[2]
- if one == 11 or one == 12 then
- if two == leftbracket then
- skipping = skipping + 1
- n = n + 1 ; t[n] = tok
- elseif two == rightbracket then
- skipping = skipping - 1
- n = n + 1 ; t[n] = tok
- elseif skipping == 0 then
- local new = data[two]
- if new then
- if #new > 1 then
- for n=1,#new do
- n = n + 1 ; t[n] = new[n]
- end
- else
- n = n + 1 ; t[n] = new[1]
- end
- else
- n = n + 1 ; t[n] = tok
- end
- else
- n = n + 1 ; t[n] = tok
- end
- else
- n = n + 1 ; t[n] = tok
- end
- end
- return t
- else
- return toks
- end
-end
+-- static int run_scan_token(lua_State * L)
+-- {
+-- saved_tex_scanner texstate;
+-- save_tex_scanner(texstate);
+-- get_x_token();
+-- make_new_token(L, cur_cmd, cur_chr, cur_cs);
+-- unsave_tex_scanner(texstate);
+-- return 1;
+-- }
+--
+-- static int run_get_future(lua_State * L)
+-- {
+-- /* saved_tex_scanner texstate; */
+-- /* save_tex_scanner(texstate); */
+-- get_token();
+-- make_new_token(L, cur_cmd, cur_chr, cur_cs);
+-- back_input();
+-- /* unsave_tex_scanner(texstate); */
+-- return 1;
+-- }
diff --git a/tex/context/base/toks-ini.mkiv b/tex/context/base/toks-ini.mkiv
index 4eb756b75..49625a939 100644
--- a/tex/context/base/toks-ini.mkiv
+++ b/tex/context/base/toks-ini.mkiv
@@ -14,57 +14,11 @@
\writestatus{loading}{ConTeXt Token Support / Initialization}
\registerctxluafile{toks-ini}{1.001}
+\registerctxluafile{toks-scn}{1.001}
+\registerctxluafile{cldf-scn}{1.001}
\unprotect
-%D Handy for manuals \unknown
-
-\unexpanded\def\starttokens [#1]{\ctxlua{tokens.collectors.install("#1","stoptokens")}}
- \let\stoptokens \relax
- \def\flushtokens [#1]{\ctxlua{tokens.collectors.flush("#1")}}
- \def\showtokens [#1]{\ctxlua{tokens.collectors.show("#1")}}
- \def\testtokens [#1]{\ctxlua{tokens.collectors.with_words("#1")}}
- \def\registertoken #1{\ctxlua{tokens.collectors.register("#1")}}
-
-%D Inspired by a prototype by Taco for Thomas cum suis.
-
-% \defineremapper[babelgreek]
-%
-% \remapcharacter[babelgreek][`a]{\alpha}
-% \remapcharacter[babelgreek][`b]{\beta}
-% \remapcharacter[babelgreek][`c]{\gamma}
-% \remapcharacter[babelgreek][`d]{OEPS}
-%
-% \starttext
-%
-% [\startbabelgreek
-% a b c some stuff here \blank[big] oeps b d
-% \stopbabelgreek]
-%
-% [\babelgreek{some stuff here}]
-%
-% \stoptext
-
-\unexpanded\def\defineremapper[#1]%
- {\setuevalue{\e!start#1}{\toks_start_remapper{#1}}%
- \setuevalue{\e!stop #1}{\toks_stop_remapper {#1}}%
- \letvalue{#1}\relax
- \normalexpanded{\expandafter\def\csname#1\endcsname##1{\csname\e!start#1\endcsname##1\csname\e!stop#1\endcsname}}}
-
-\unexpanded\def\toks_start_remapper#1%
- {\ctxlua{tokens.collectors.install("#1", "\e!stop#1")}}
-
-\unexpanded\def\toks_stop_remapper#1%
- {\ctxlua{tokens.collectors.handle("#1",function(str) return tokens.collectors.remapper.convert("#1",str) end, true)}}
-
-\unexpanded\def\remaptokens#1%
- {\ctxlua{tokens.collectors.handle("#1",function(str) return tokens.collectors.remapper.convert("#1",str) end)}}
-
-\unexpanded\def\remapcharacter
- {\dodoubleempty\toks_remap_character}
-
-\def\toks_remap_character[#1][#2]#3%
- {\ctxlua{tokens.collectors.install("store", "ctxlua")}#3%
- \ctxlua{tokens.collectors.remapper.store("store","#1",\number#2)}}
+% nothing yet
\protect \endinput
diff --git a/tex/context/base/toks-map.lua b/tex/context/base/toks-map.lua
new file mode 100644
index 000000000..9120c2084
--- /dev/null
+++ b/tex/context/base/toks-map.lua
@@ -0,0 +1,70 @@
+if not modules then modules = { } end modules ['toks-map'] = {
+ version = 1.001,
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- Even more experimental ... this used to be part of toks-ini but as
+-- this kind of remapping has not much use it is not loaded in the
+-- core. We just keep it here for old times sake.
+
+-- local remapper = { } -- namespace
+-- collectors.remapper = remapper
+--
+-- local remapperdata = { } -- user mappings
+-- remapper.data = remapperdata
+--
+-- function remapper.store(tag,class,key)
+-- local s = remapperdata[class]
+-- if not s then
+-- s = { }
+-- remapperdata[class] = s
+-- end
+-- s[key] = collectordata[tag]
+-- collectordata[tag] = nil
+-- end
+--
+-- function remapper.convert(tag,toks)
+-- local data = remapperdata[tag]
+-- local leftbracket = utfbyte('[')
+-- local rightbracket = utfbyte(']')
+-- local skipping = 0
+-- -- todo: math
+-- if data then
+-- local t, n = { }, 0
+-- for s=1,#toks do
+-- local tok = toks[s]
+-- local one, two = tok[1], tok[2]
+-- if one == 11 or one == 12 then
+-- if two == leftbracket then
+-- skipping = skipping + 1
+-- n = n + 1 ; t[n] = tok
+-- elseif two == rightbracket then
+-- skipping = skipping - 1
+-- n = n + 1 ; t[n] = tok
+-- elseif skipping == 0 then
+-- local new = data[two]
+-- if new then
+-- if #new > 1 then
+-- for n=1,#new do
+-- n = n + 1 ; t[n] = new[n]
+-- end
+-- else
+-- n = n + 1 ; t[n] = new[1]
+-- end
+-- else
+-- n = n + 1 ; t[n] = tok
+-- end
+-- else
+-- n = n + 1 ; t[n] = tok
+-- end
+-- else
+-- n = n + 1 ; t[n] = tok
+-- end
+-- end
+-- return t
+-- else
+-- return toks
+-- end
+-- end
diff --git a/tex/context/base/toks-map.mkiv b/tex/context/base/toks-map.mkiv
new file mode 100644
index 000000000..f1b63a68b
--- /dev/null
+++ b/tex/context/base/toks-map.mkiv
@@ -0,0 +1,63 @@
+%D \module
+%D [ file=toks-map, % experimental moved from toks-ini
+%D version=2007.03.03,
+%D title=\CONTEXT\ Token Support,
+%D subtitle=Initialization,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% \writestatus{loading}{ConTeXt Token Support / Remapping}
+%
+% \registerctxluafile{toks-map}{1.001}
+%
+% \unprotect
+%
+% %D Inspired by a prototype by Taco for Thomas cum suis.
+% %D
+% %D \starttyping
+% %D \defineremapper[babelgreek]
+% %D
+% %D \remapcharacter[babelgreek][`a]{\alpha}
+% %D \remapcharacter[babelgreek][`b]{\beta}
+% %D \remapcharacter[babelgreek][`c]{\gamma}
+% %D \remapcharacter[babelgreek][`d]{OEPS}
+% %D
+% %D \starttext
+% %D
+% %D [\startbabelgreek
+% %D a b c some stuff here \blank[big] oeps b d
+% %D \stopbabelgreek]
+% %D
+% %D [\babelgreek{some stuff here}]
+% %D
+% %D \stoptext
+% %D \stoptyping
+%
+% \unexpanded\def\defineremapper[#1]%
+% {\setuevalue{\e!start#1}{\toks_start_remapper{#1}}%
+% \setuevalue{\e!stop #1}{\toks_stop_remapper {#1}}%
+% \letvalue{#1}\relax
+% \normalexpanded{\expandafter\def\csname#1\endcsname##1{\csname\e!start#1\endcsname##1\csname\e!stop#1\endcsname}}}
+%
+% \unexpanded\def\toks_start_remapper#1%
+% {\ctxlua{tokens.collectors.install("#1", "\e!stop#1")}}
+%
+% \unexpanded\def\toks_stop_remapper#1%
+% {\ctxlua{tokens.collectors.handle("#1",function(str) return tokens.collectors.remapper.convert("#1",str) end, true)}}
+%
+% \unexpanded\def\remaptokens#1%
+% {\ctxlua{tokens.collectors.handle("#1",function(str) return tokens.collectors.remapper.convert("#1",str) end)}}
+%
+% \unexpanded\def\remapcharacter
+% {\dodoubleempty\toks_remap_character}
+%
+% \def\toks_remap_character[#1][#2]#3%
+% {\ctxlua{tokens.collectors.install("store", "ctxlua")}#3%
+% \ctxlua{tokens.collectors.remapper.store("store","#1",\number#2)}}
+%
+% \protect \endinput
diff --git a/tex/context/base/toks-scn.lua b/tex/context/base/toks-scn.lua
new file mode 100644
index 000000000..84924c694
--- /dev/null
+++ b/tex/context/base/toks-scn.lua
@@ -0,0 +1,437 @@
+if not modules then modules = { } end modules ['toks-scn'] = {
+ version = 1.001,
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- Writing this kind of code (and completing the newtoken code base) is fun. I did
+-- so with the brilliant film music from The Girl with the Dragon Tattoo running in a
+-- loop in the background (three cd's by Trent Reznor and Atticus Ross). An alien
+-- feeling helps with alien code.
+
+-- todo: more \let's at the tex end
+
+local type, next, tostring, tonumber = type, next, tostring, tonumber
+
+local formatters = string.formatters
+local concat = table.concat
+
+local scanners = tokens.scanners
+local tokenbits = tokens.bits
+
+local scanstring = scanners.string
+local scaninteger = scanners.integer
+local scannumber = scanners.number
+local scankeyword = scanners.keyword
+local scanword = scanners.word
+local scancode = scanners.code
+local scanboolean = scanners.boolean
+local scandimen = scanners.dimen
+
+local todimen = number.todimen
+local toboolean = toboolean
+
+local lpegmatch = lpeg.match
+local p_unquoted = lpeg.Cs(lpeg.patterns.unquoted)
+
+local trace_compile = false trackers.register("tokens.compile", function(v) trace_compile = v end)
+local report_compile = logs.reporter("tokens","compile")
+local report_scan = logs.reporter("tokens","scan")
+
+local open = tokenbits.open
+local close = tokenbits.close
+
+local function scanopen()
+ while true do
+ local c = scancode(open)
+ if c == 123 then
+ return true
+ -- elseif c ~= 32 then
+ elseif not c then
+ return
+ end
+ end
+end
+
+local function scanclose()
+ while true do
+ local c = scancode(close)
+ if c == 125 then
+ return true
+ -- elseif c ~= 32 then
+ elseif not c then
+ return
+ end
+ end
+end
+
+scanners.scanopen = scanopen
+scanners.scanclose = scanclose
+
+local function scanlist()
+ local wrapped = scanopen()
+ local list = { }
+ local size = 0
+ while true do
+ local entry = scanstring()
+ if entry then
+ size = size + 1
+ list[size] = entry
+ else
+ break
+ end
+ end
+ if wrapped then
+ scanclose()
+ end
+ return list
+end
+
+local function scanconditional()
+ local kw = scanword()
+ if kw == "true" then
+ return true
+ end
+ if kw == "false" then
+ return false
+ end
+ local c = scaninteger()
+ if c then
+ return c == 0 -- with a conditional 0=true
+ end
+ return nil
+end
+
+scanners.list = scanlist
+scanners.conditional = scanconditional
+
+local shortcuts = {
+ tokens = tokens,
+ bits = tokenbits,
+ open = open,
+ close = close,
+ scanners = scanners,
+ scanstring = scanstring,
+ scaninteger = scaninteger,
+ scannumber = scannumber,
+ scankeyword = scankeyword,
+ scanword = scanword,
+ scancode = scancode,
+ scanboolean = scanboolean,
+ scandimen = scandimen,
+ scandimension = scandimen,
+ scanconditional = scanconditional,
+ scanopen = scanopen,
+ scanclose = scanclose,
+ scanlist = scanlist,
+ todimen = todimen,
+ tonumber = tonumber,
+ tostring = tostring,
+ toboolean = toboolean,
+ inspect = inspect,
+ report = report_scan,
+}
+
+tokens.shortcuts = shortcuts
+
+local load = load
+local dump = string.dump
+
+local function loadstripped(code)
+ return load(code,nil,nil,shortcuts)
+ -- return load(dump(load(code),true),nil,nil,shortcuts)
+end
+
+tokens.converters = {
+ tonumber = "tonumber",
+ tostring = "tostring",
+ toboolean = "toboolean",
+ todimen = "todimen",
+ toglue = "todimen",
+}
+
+local f_if = formatters[ " if scankeyword('%s') then data['%s'] = scan%s()"]
+local f_elseif = formatters[" elseif scankeyword('%s') then data['%s'] = scan%s()"]
+local f_local = formatters["local scan%s = scanners.%s"]
+local f_scan = formatters["scan%s()"]
+local f_shortcut = formatters["local %s = scanners.converters.%s"]
+
+local f_if_c = formatters[ " if scankeyword('%s') then data['%s'] = %s(scan%s())"]
+local f_elseif_c = formatters[" elseif scankeyword('%s') then data['%s'] = %s(scan%s())"]
+local f_scan_c = formatters["%s(scan%s())"]
+
+local f_any = formatters[" else local key = scanword() if key then data[key] = scan%s() else break end end"]
+local f_any_c = formatters[" else local key = scanword() if key then data[key] = %s(scan%s()) else break end end"]
+local s_done = " else break end"
+
+local f_any_all = formatters[" local key = scanword() if key then data[key] = scan%s() else break end"]
+local f_any_all_c= formatters[" local key = scanword() if key then data[key] = %s(scan%s()) else break end"]
+
+local f_table = formatters["%\nt\nreturn function()\n local data = { }\n%s\n return %s\nend\n"]
+local f_sequence = formatters["%\nt\n%\nt\n%\nt\nreturn function()\n return %s\nend\n"]
+local f_simple = formatters["%\nt\nreturn function()\n return %s\nend\n"]
+local f_string = formatters["%q"]
+local f_action_f = formatters["action%s(%s)"]
+local f_action_s = formatters["local action%s = tokens._action[%s]"]
+local f_nested = formatters["local function scan%s()\n local data = { }\n%s\n return data\nend\n"]
+
+-- local f_check = formatters[ [[
+-- local wrapped = false
+-- while true do
+-- local c = scancode(open)
+-- if c == 123 then
+-- wrapped = true
+-- break
+-- elseif c ~= 32 then
+-- break
+-- end
+-- end
+-- while true do
+-- ]] .. "%\nt\n" .. [[
+-- %s
+-- end
+-- if wrapped then
+-- while true do
+-- local c = scancode(close)
+-- if c == 125 then
+-- break
+-- elseif c ~= 32 then
+-- break
+-- end
+-- end
+-- end
+-- ]] ]
+
+local f_check = formatters[ [[
+ local wrapped = scanopen()
+ while true do
+ ]] .. "%\nt\n" .. [[
+ %s
+ end
+ if wrapped then
+ scanclose()
+ end
+]] ]
+
+function tokens.compile(specification)
+ local f = { }
+ local n = 0
+ local c = { }
+ local t = specification.arguments or specification
+ local a = specification.actions or nil
+ if type(a) == "function" then
+ a = { a }
+ end
+ local code
+ local function compile(t,nested)
+ local done = s_done
+ local r = { }
+ local m = 0
+ for i=1,#t do
+ local ti = t[i]
+ if ti == "*" and i == 1 then
+ done = f_any_all("string")
+ else
+ local t1 = ti[1]
+ local t2 = ti[2] or "string"
+ if type(t2) == "table" then
+ n = n + 1
+ f[n] = compile(t2,n)
+ t2 = n
+ end
+ local t3 = ti[3]
+ if type(t3) == "function" then
+ -- todo: also create shortcut
+ elseif t3 then
+ c[t3] = f_shortcut(t3,t3)
+ if t1 == "*" then
+ if i == 1 then
+ done = f_any_all_c(t3,t2)
+ break
+ else
+ done = f_any_c(t3,t2)
+ end
+ else
+ m = m + 1
+ r[m] = (m > 1 and f_elseif_c or f_if_c)(t1,t1,t3,t2)
+ end
+ else
+ if t1 == "*" then
+ if i == 1 then
+ done = f_any_all(t2)
+ break
+ else
+ done = f_any(t2)
+ end
+ else
+ m = m + 1
+ r[m] = (m > 1 and f_elseif or f_if )(t1,t1,t2)
+ end
+ end
+ end
+ end
+ local c = f_check(r,done)
+ if nested then
+ return f_nested(nested,c)
+ else
+ return c
+ end
+ end
+ local tt = type(t)
+ if tt == "string" then
+ if a then
+ local s = lpegmatch(p_unquoted,t)
+ if s and t ~= s then
+ code = t
+ else
+ code = f_scan(t)
+ end
+ tokens._action = a
+ for i=1,#a do
+ code = f_action_f(i,code)
+ f[#f+1] = f_action_s(i,i)
+ end
+ code = f_simple(f,code)
+ else
+ return scanners[t]
+ end
+ elseif tt ~= "table" then
+ return
+ elseif #t == 1 then
+ local ti = t[1]
+ if type(ti) == "table" then
+ ti = compile(ti)
+ code = "data"
+ if a then
+ tokens._action = a
+ for i=1,#a do
+ code = f_action_f(i,code)
+ f[#f+1] = f_action_s(i,i)
+ end
+ end
+ code = f_table(f,ti,code)
+ elseif a then
+ code = f_scan(ti)
+ tokens._action = a
+ for i=1,#a do
+ code = f_action_f(i,code)
+ f[#f+1] = f_action_s(i,i)
+ end
+ code = f_simple(f,code)
+ else
+ return scanners[ti]
+ end
+ else
+ local r = { }
+ local p = { }
+ local m = 0
+ for i=1,#t do
+ local ti = t[i]
+ local tt = type(ti)
+ if tt == "table" then
+ if ti[1] == "_constant_" then
+ local v = ti[2]
+ if type(v) == "string" then
+ r[i] = f_string(v)
+ else
+ r[i] = tostring(v)
+ end
+ else
+ m = m + 1
+ p[m] = compile(ti,100+m)
+ r[i] = f_scan(100+m)
+ end
+ elseif tt == "number" then
+ r[i] = tostring(ti)
+ elseif tt == "boolean" then
+ r[i] = tostring(ti)
+ else
+ local s = lpegmatch(p_unquoted,ti)
+ if s and ti ~= s then
+ r[i] = ti -- a string, given as "'foo'" or '"foo"'
+ elseif scanners[ti] then
+ r[i] = f_scan(ti)
+ else
+ report_compile("unknown scanner %a",ti)
+ r[i] = ti
+ end
+ end
+ end
+ code = concat(r,",")
+ if a then
+ tokens._action = a
+ for i=1,#a do
+ code = f_action_f(i,code)
+ f[#f+1] = f_action_s(i,i)
+ end
+ end
+ code = f_sequence(c,f,p,code)
+ end
+ if not code then
+ return
+ end
+ if trace_compile then
+ report_compile("code: %s",code)
+ end
+ local code, message = loadstripped(code)
+ if code then
+ code = code() -- sets action
+ else
+ report_compile("error in code: %s",code)
+ report_compile("error message: %s",message)
+ end
+ if a then
+ tokens._action = nil
+ end
+ if code then
+ return code
+ end
+end
+
+-- local fetch = tokens.compile {
+-- "string",
+-- "string",
+-- {
+-- { "data", "string" },
+-- { "tab", "string" },
+-- { "method", "string" },
+-- { "foo", {
+-- { "method", "integer" },
+-- { "compact", "number" },
+-- { "nature" },
+-- { "*" }, -- any key
+-- } },
+-- { "compact", "string", "tonumber" },
+-- { "nature", "boolean" },
+-- { "escape", "string" },
+-- { "escape" },
+-- }
+-- "boolean",
+-- }
+--
+-- os.exit()
+
+function tokens.scantable(t,data)
+ if not data then
+ data = { }
+ end
+ local wrapped = scanopen()
+ while true do
+ local key = scanword()
+ if key then
+ local get = t[key]
+ if get then
+ data[key] = get()
+ else
+ -- catch all we can get
+ end
+ else
+ break
+ end
+ end
+ if wrapped then
+ scanclose()
+ end
+ return data
+end
diff --git a/tex/context/base/toks-tra.lua b/tex/context/base/toks-tra.lua
new file mode 100644
index 000000000..bf2b91d38
--- /dev/null
+++ b/tex/context/base/toks-tra.lua
@@ -0,0 +1,298 @@
+if not modules then modules = { } end modules ['toks-ini'] = {
+ version = 1.001,
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+
+local utfbyte, utfchar, utfvalues = utf.byte, utf.char, utf.values
+local format, gsub = string.format, string.gsub
+local tostring = tostring
+
+local tokens = tokens
+local newtoken = newtoken
+local tex = tex
+local context = context
+local commands = commands
+
+tokens.collectors = tokens.collectors or { }
+local collectors = tokens.collectors
+
+collectors.data = collectors.data or { }
+local collectordata = collectors.data
+
+collectors.registered = collectors.registered or { }
+local registered = collectors.registered
+
+local report = logs.reporter("tokens","collectors")
+
+if newtoken then
+
+ -- todo:
+ --
+ -- register : macros that will be expanded (only for demo-ing)
+ -- flush : print back to tex
+ -- test : fancy stuff
+
+ local get_next = newtoken.get_next
+ local create = newtoken.create
+
+ function collectors.install(tag,end_cs)
+ local data, d = { }, 0
+ collectordata[tag] = data
+ end_cs = gsub(end_cs,"^\\","")
+ while true do
+ local t = get_next()
+ if t.csname == end_cs then
+ context[end_cs]()
+ return
+ else
+ d = d + 1
+ data[d] = t
+ end
+ end
+ end
+
+ local simple = { letter = "letter", other_char = "other" }
+
+ function collectors.show(data)
+ -- We no longer have methods as we only used (in demos) method a
+ -- so there is no need to burden the core with this. We have a
+ -- different table anyway.
+ if type(data) == "string" then
+ data = collectordata[data]
+ end
+ if not data then
+ return
+ end
+ local ctx_NC = context.NC
+ local ctx_NR = context.NR
+ local ctx_bold = context.bold
+ local ctx_verbatim = context.verbatim
+ context.starttabulate { "|Tl|Tc|Tl|" }
+ ctx_NC() ctx_bold("cmd")
+ ctx_NC() ctx_bold("meaning")
+ ctx_NC() ctx_bold("properties")
+ ctx_NC() ctx_NR()
+ context.HL()
+ for i=1,#data do
+ local token = data[i]
+ local cmdname = token.cmdname
+ local simple = simple[cmdname]
+ ctx_NC()
+ ctx_verbatim(simple or cmdname)
+ ctx_NC()
+ ctx_verbatim(simple and utfchar(token.mode) or token.csname)
+ ctx_NC()
+ if token.active then context("active ") end
+ if token.expandable then context("expandable ") end
+ if token.protected then context("protected ") end
+ ctx_NC()
+ ctx_NR()
+ end
+ context.stoptabulate()
+ end
+
+ local function printlist(data)
+ if data and #data > 0 then
+ report("not supported (yet): printing back to tex")
+ end
+ end
+
+ tokens.printlist = printlist -- will change to another namespace
+
+ function collectors.flush(tag)
+ printlist(collectordata[tag])
+ end
+
+ function collectors.test(tag,handle)
+ report("not supported (yet): testing")
+ end
+
+ function collectors.register(name)
+ report("not supported (yet): registering")
+ end
+
+else
+
+ -- 1 = command, 2 = modifier (char), 3 = controlsequence id
+
+ local create = token.create
+ local csname_id = token.csname_id
+ local command_id = token.command_id
+ local command_name = token.command_name
+ local get_next = token.get_next
+ local expand = token.expand
+ local csname_name = token.csname_name
+
+ local function printlist(data)
+ if data and #data > 0 then
+ callbacks.push('token_filter', function ()
+ callbacks.pop('token_filter') -- tricky but the nil assignment helps
+ return data
+ end)
+ end
+ end
+
+ tokens.printlist = printlist -- will change to another namespace
+
+ function collectors.flush(tag)
+ printlist(collectordata[tag])
+ end
+
+ function collectors.register(name)
+ registered[csname_id(name)] = name
+ end
+
+ local call = command_id("call")
+ local letter = command_id("letter")
+ local other = command_id("other_char")
+
+ function collectors.install(tag,end_cs)
+ local data, d = { }, 0
+ collectordata[tag] = data
+ end_cs = gsub(end_cs,"^\\","")
+ local endcs = csname_id(end_cs)
+ while true do
+ local t = get_next()
+ local a, b = t[1], t[3]
+ if b == endcs then
+ context[end_cs]()
+ return
+ elseif a == call and registered[b] then
+ expand()
+ else
+ d = d + 1
+ data[d] = t
+ end
+ end
+ end
+
+ function collectors.show(data)
+ -- We no longer have methods as we only used (in demos) method a
+ -- so there is no need to burden the core with this.
+ if type(data) == "string" then
+ data = collectordata[data]
+ end
+ if not data then
+ return
+ end
+ local ctx_NC = context.NC
+ local ctx_NR = context.NR
+ local ctx_bold = context.bold
+ local ctx_verbatim = context.verbatim
+ context.starttabulate { "|T|Tr|cT|Tr|T|" }
+ ctx_NC() ctx_bold("cmd")
+ ctx_NC() ctx_bold("chr")
+ ctx_NC()
+ ctx_NC() ctx_bold("id")
+ ctx_NC() ctx_bold("name")
+ ctx_NC() ctx_NR()
+ context.HL()
+ for i=1,#data do
+ local token = data[i]
+ local cmd = token[1]
+ local chr = token[2]
+ local id = token[3]
+ local name = command_name(token)
+ ctx_NC()
+ ctx_verbatim(name)
+ ctx_NC()
+ if tonumber(chr) >= 0 then
+ ctx_verbatim(chr)
+ end
+ ctx_NC()
+ if cmd == letter or cmd == other then
+ ctx_verbatim(utfchar(chr))
+ end
+ ctx_NC()
+ if id > 0 then
+ ctx_verbatim(id)
+ end
+ ctx_NC()
+ if id > 0 then
+ ctx_verbatim(csname_name(token) or "")
+ end
+ ctx_NC() ctx_NR()
+ end
+ context.stoptabulate()
+ end
+
+ function collectors.test(tag,handle)
+ local t, w, tn, wn = { }, { }, 0, 0
+ handle = handle or collectors.defaultwords
+ local tagdata = collectordata[tag]
+ for k=1,#tagdata do
+ local v = tagdata[k]
+ if v[1] == letter then
+ wn = wn + 1
+ w[wn] = v[2]
+ else
+ if wn > 0 then
+ handle(t,w)
+ wn = 0
+ end
+ tn = tn + 1
+ t[tn] = v
+ end
+ end
+ if wn > 0 then
+ handle(t,w)
+ end
+ collectordata[tag] = t
+ end
+
+end
+
+-- Interfacing:
+
+commands.collecttokens = collectors.install
+commands.showtokens = collectors.show
+commands.flushtokens = collectors.flush
+commands.testtokens = collectors.test
+commands.registertoken = collectors.register
+
+-- Redundant:
+
+-- function collectors.test(tag)
+-- printlist(collectordata[tag])
+-- end
+
+-- For old times sake:
+
+collectors.dowithwords = collectors.test
+
+-- This is only used in old articles ... will move to a module:
+
+local create = newtoken and newtoken.create or token.create
+
+tokens.vbox = create("vbox")
+tokens.hbox = create("hbox")
+tokens.vtop = create("vtop")
+tokens.bgroup = create(utfbyte("{"),1)
+tokens.egroup = create(utfbyte("}"),2)
+
+tokens.letter = function(chr) return create(utfbyte(chr),11) end
+tokens.other = function(chr) return create(utfbyte(chr),12) end
+
+tokens.letters = function(str)
+ local t, n = { }, 0
+ for chr in utfvalues(str) do
+ n = n + 1
+ t[n] = create(chr, 11)
+ end
+ return t
+end
+
+function collectors.defaultwords(t,str)
+ if t then
+ local n = #t
+ n = n + 1 ; t[n] = tokens.bgroup
+ n = n + 1 ; t[n] = create("red")
+ for i=1,#str do
+ n = n + 1 ; t[n] = tokens.other('*')
+ end
+ n = n + 1 ; t[n] = tokens.egroup
+ end
+end
diff --git a/tex/context/base/toks-tra.mkiv b/tex/context/base/toks-tra.mkiv
new file mode 100644
index 000000000..a3e27eaf8
--- /dev/null
+++ b/tex/context/base/toks-tra.mkiv
@@ -0,0 +1,31 @@
+%D \module
+%D [ file=toks-tra, % was toks-ini
+%D version=2007.03.03,
+%D title=\CONTEXT\ Token Support,
+%D subtitle=Initialization,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Token Support / Tracing}
+
+\registerctxluafile{toks-tra}{1.001}
+
+\unprotect
+
+%D Handy for manuals \unknown\ but not really used in practice, so it might
+%D become a runtime loaded module instead.
+
+\unexpanded\def\starttokens [#1]{\ctxcommand{collecttokens("#1","stoptokens")}}
+ \let\stoptokens \relax
+ \def\flushtokens [#1]{\ctxcommand{flushtokens("#1")}}
+ \def\showtokens [#1]{\ctxcommand{showtokens("#1")}}
+ \def\testtokens [#1]{\ctxcommand{testtokens("#1")}}
+ \def\registertoken #1{\ctxcommand{registertoken("#1")}}
+
+
+\protect \endinput
diff --git a/tex/context/base/trac-ctx.lua b/tex/context/base/trac-ctx.lua
index 706e7a244..493ce7936 100644
--- a/tex/context/base/trac-ctx.lua
+++ b/tex/context/base/trac-ctx.lua
@@ -6,15 +6,15 @@ if not modules then modules = { } end modules ['trac-ctx'] = {
license = "see context related readme files"
}
-local commands = commands
-local context = context
-local register = trackers.register
+local context = context
+local implement = interfaces.implement
+local register = trackers.register
local textrackers = tex.trackers or { }
local texdirectives = tex.directives or { }
-tex.trackers = textrackers
-tex.directives = texdirectives
+tex.trackers = textrackers
+tex.directives = texdirectives
storage.register("tex/trackers", textrackers, "tex.trackers")
storage.register("tex/directives",texdirectives,"tex.directives")
@@ -39,10 +39,32 @@ local function install(category,register,tag,enable,disable)
register(tag, function(v) doit(category,tag,v) end) -- todo: v,tag in caller
end
-function commands.initializetextrackers () initialize(textrackers ,trackers .register ) end
-function commands.initializetexdirectives() initialize(texdirectives,directives.register) end
+implement {
+ name = "initializetextrackers",
+ actions = function()
+ initialize(textrackers,trackers.register)
+ end
+}
--- commands.install(tag,enable,disable):
+implement {
+ name = "initializetexdirectives",
+ actions = function()
+ initialize(texdirectives,directives.register)
+ end
+}
-function commands.installtextracker (...) install(textrackers ,trackers .register,...) end
-function commands.installtexdirective(...) install(texdirectives,directives.register,...) end
+implement {
+ name = "installtextracker",
+ actions = function(tag,enable,disable)
+ install(textrackers,trackers.register,tag,enable,disable)
+ end,
+ arguments = { "string", "string", "string" }
+}
+
+implement {
+ name = "installtexdirective",
+ actions = function(tag,enable,disable)
+ install(texdirectives,directives.register,tag,enable,disable)
+ end,
+ arguments = { "string", "string", "string" }
+}
diff --git a/tex/context/base/trac-ctx.mkiv b/tex/context/base/trac-ctx.mkiv
index 3baddede2..4240281a6 100644
--- a/tex/context/base/trac-ctx.mkiv
+++ b/tex/context/base/trac-ctx.mkiv
@@ -1,4 +1,3 @@
-
%D \module
%D [ file=trac-ctx,
%D version=2012.07.13,
@@ -19,14 +18,14 @@
\unprotect
\unexpanded\def\installtextracker#1#2#3%
- {\ctxcommand{installtextracker("#1",\!!bs\detokenize{#2}\!!es,\!!bs\detokenize{#3}\!!es)}}
+ {\clf_installtextracker{#1}{\detokenize{#2}}{\detokenize{#3}}}
\unexpanded\def\installtexdirective#1#2#3%
- {\ctxcommand{installtexdirective("#1",\!!bs\detokenize{#2}\!!es,\!!bs\detokenize{#3}\!!es)}}
+ {\clf_installtexdirective{#1}{\detokenize{#2}}{\detokenize{#3}}}
\appendtoks
- \ctxcommand{initializetextrackers ()}%
- \ctxcommand{initializetexdirectives()}%
+ \clf_initializetextrackers
+ \clf_initializetexdirectives
\to \everyjob
\protect \endinput
diff --git a/tex/context/base/trac-deb.lua b/tex/context/base/trac-deb.lua
index 4cc48c4a5..d998295c1 100644
--- a/tex/context/base/trac-deb.lua
+++ b/tex/context/base/trac-deb.lua
@@ -9,27 +9,32 @@ if not modules then modules = { } end modules ['trac-deb'] = {
local lpeg, status = lpeg, status
local lpegmatch = lpeg.match
-local format, concat, match = string.format, table.concat, string.match
+local format, concat, match, find = string.format, table.concat, string.match, string.find
local tonumber, tostring = tonumber, tostring
-- maybe tracers -> tracers.tex (and tracers.lua for current debugger)
-local report_system = logs.reporter("system","tex")
+----- report_tex = logs.reporter("tex error")
+----- report_lua = logs.reporter("lua error")
+local report_nl = logs.newline
+local report_str = logs.writer
-tracers = tracers or { }
-local tracers = tracers
+tracers = tracers or { }
+local tracers = tracers
-tracers.lists = { }
-local lists = tracers.lists
+tracers.lists = { }
+local lists = tracers.lists
-tracers.strings = { }
-local strings = tracers.strings
+tracers.strings = { }
+local strings = tracers.strings
-local texgetdimen = tex.getdimen
-local texgettoks = tex.gettoks
-local texgetcount = tex.getcount
+local texgetdimen = tex.getdimen
+local texgettoks = tex.gettoks
+local texgetcount = tex.getcount
-strings.undefined = "undefined"
+local implement = interfaces.implement
+
+strings.undefined = "undefined"
lists.scratch = {
0, 2, 4, 6, 8
@@ -96,7 +101,19 @@ function tracers.knownlist(name)
return l and #l > 0
end
-function tracers.showlines(filename,linenumber,offset,errorstr)
+local savedluaerror = nil
+
+local function errorreporter(luaerror)
+ if luaerror then
+ logs.enable("lua error") --
+ return logs.reporter("lua error")
+ else
+ logs.enable("tex error")
+ return logs.reporter("tex error")
+ end
+end
+
+function tracers.showlines(filename,linenumber,offset,luaerrorline)
local data = io.loaddata(filename)
if not data or data == "" then
local hash = url.hashed(filename)
@@ -109,35 +126,21 @@ function tracers.showlines(filename,linenumber,offset,errorstr)
end
local lines = data and string.splitlines(data)
if lines and #lines > 0 then
- -- This does not work completely as we cannot access the last Lua error using
- -- table.print(status.list()). This is on the agenda. Eventually we will
- -- have a sequence of checks here (tex, lua, mp) at this end.
- --
- -- Actually, in 0.75+ the lua error message is even weirder as you can
- -- get:
- --
- -- LuaTeX error [string "\directlua "]:3: unexpected symbol near '1' ...
- --
- -- <inserted text> \endgroup \directlua {
- --
- -- So there is some work to be done in the LuaTeX engine.
- --
- local what, where = match(errorstr,[[LuaTeX error <main (%a+) instance>:(%d+)]])
- or match(errorstr,[[LuaTeX error %[string "\\(.-lua) "%]:(%d+)]]) -- buglet
- if where then
+ if luaerrorline and luaerrorline > 0 then
-- lua error: linenumber points to last line
local start = "\\startluacode"
local stop = "\\stopluacode"
- local where = tonumber(where)
- if lines[linenumber] == start then
- local n = linenumber
- for i=n,1,-1 do
- if lines[i] == start then
- local n = i + where
- if n <= linenumber then
- linenumber = n
- end
+ local n = linenumber
+ for i=n,1,-1 do
+ local line = lines[i]
+ if not line then
+ break
+ elseif find(line,start) then
+ n = i + luaerrorline - 1
+ if n <= linenumber then
+ linenumber = n
end
+ break
end
end
end
@@ -159,30 +162,84 @@ function tracers.showlines(filename,linenumber,offset,errorstr)
end
end
-function tracers.printerror(offset)
- local inputstack = resolvers.inputstack
- local filename = inputstack[#inputstack] or status.filename
- local linenumber = tonumber(status.linenumber) or 0
+-- this will work ok in >=0.79
+
+-- todo: last tex error has ! prepended
+-- todo: some nested errors have two line numbers
+-- todo: collect errorcontext in string (after code cleanup)
+-- todo: have a separate status.lualinenumber
+
+-- todo: \starttext bla \blank[foo] bla \stoptext
+
+local function processerror(offset)
+ local inputstack = resolvers.inputstack
+ local filename = inputstack[#inputstack] or status.filename
+ local linenumber = tonumber(status.linenumber) or 0
+ --
+ -- print("[[ last tex error: " .. tostring(status.lasterrorstring) .. " ]]")
+ -- print("[[ last lua error: " .. tostring(status.lastluaerrorstring) .. " ]]")
+ -- print("[[ start errorcontext ]]")
+ -- tex.show_context()
+ -- print("\n[[ stop errorcontext ]]")
+ --
+ local lasttexerror = status.lasterrorstring or "?"
+ local lastluaerror = status.lastluaerrorstring or lasttexerror
+ local luaerrorline = match(lastluaerror,[[lua%]?:.-(%d+)]]) or (lastluaerror and find(lastluaerror,"?:0:",1,true) and 0)
+ local report = errorreporter(luaerrorline)
+ tracers.printerror {
+ filename = filename,
+ linenumber = linenumber,
+ lasttexerror = lasttexerror,
+ lastluaerror = lastluaerror,
+ luaerrorline = luaerrorline,
+ offset = tonumber(offset) or 10,
+ }
+end
+
+-- so one can overload the printer if (really) needed
+
+function tracers.printerror(specification)
+ local filename = specification.filename
+ local linenumber = specification.linenumber
+ local lasttexerror = specification.lasttexerror
+ local lastluaerror = specification.lastluaerror
+ local luaerrorline = specification.luaerrorline
+ local offset = specification.offset
+ local report = errorreporter(luaerrorline)
if not filename then
- report_system("error not related to input file: %s ...",status.lasterrorstring)
+ report("error not related to input file: %s ...",lasttexerror)
elseif type(filename) == "number" then
- report_system("error on line %s of filehandle %s: %s ...",linenumber,filename,status.lasterrorstring)
+ report("error on line %s of filehandle %s: %s ...",linenumber,lasttexerror)
else
- -- currently we still get the error message printed to the log/console so we
- -- add a bit of spacing around our variant
- texio.write_nl("\n")
- local errorstr = status.lasterrorstring or "?"
- -- inspect(status.list())
- report_system("error on line %s in file %s: %s ...\n",linenumber,filename,errorstr) -- lua error?
- texio.write_nl(tracers.showlines(filename,linenumber,offset,errorstr),"\n")
+ report_nl()
+ if luaerrorline then
+ report("error on line %s in file %s:\n\n%s",linenumber,filename,lastluaerror)
+ -- report("error on line %s in file %s:\n\n%s",linenumber,filename,lasttexerror)
+ else
+ report("error on line %s in file %s: %s",linenumber,filename,lasttexerror)
+ if tex.show_context then
+ report_nl()
+ tex.show_context()
+ end
+ end
+ report_nl()
+ report_str(tracers.showlines(filename,linenumber,offset,tonumber(luaerrorline)))
+ report_nl()
end
end
+local nop = function() end
+
directives.register("system.errorcontext", function(v)
+ local register = callback.register
if v then
- callback.register('show_error_hook', function() tracers.printerror(v) end)
+ register('show_error_message', nop)
+ register('show_error_hook', function() processerror(v) end)
+ register('show_lua_error_hook', nop)
else
- callback.register('show_error_hook', nil)
+ register('show_error_message', nil)
+ register('show_error_hook', nil)
+ register('show_lua_error_hook', nil)
end
end)
@@ -249,3 +306,20 @@ local function trace_calls(n)
end
directives.register("system.tracecalls", function(n) trace_calls(n) end) -- indirect is needed for nilling
+
+implement { name = "showtrackers", actions = trackers.show }
+implement { name = "enabletrackers", actions = trackers.enable, arguments = "string" }
+implement { name = "disabletrackers", actions = trackers.disable, arguments = "string" }
+implement { name = "resettrackers", actions = trackers.reset }
+
+implement { name = "showdirectives", actions = directives.show }
+implement { name = "enabledirectives", actions = directives.enable, arguments = "string" }
+implement { name = "disabledirectives", actions = directives.disable, arguments = "string" }
+
+implement { name = "showexperiments", actions = experiments.show }
+implement { name = "enableexperiments", actions = experiments.enable, arguments = "string" }
+implement { name = "disableexperiments", actions = experiments.disable, arguments = "string" }
+
+implement { name = "showdebuginfo", actions = lmx.showdebuginfo }
+implement { name = "overloaderror", actions = lmx.overloaderror }
+implement { name = "showlogcategories", actions = logs.show }
diff --git a/tex/context/base/trac-deb.mkiv b/tex/context/base/trac-deb.mkiv
index fe5dd02dc..1e61a3512 100644
--- a/tex/context/base/trac-deb.mkiv
+++ b/tex/context/base/trac-deb.mkiv
@@ -13,25 +13,31 @@
\writestatus{loading}{ConTeXt Tracing Macros / Debugger}
+\unprotect
+
%registerctxluafile{trac-lmx}{1.001}
\registerctxluafile{trac-deb}{1.001}
-\unexpanded\def\breakpoint{\showdebuginfo\wait}
+\unexpanded\def\breakpoint {\showdebuginfo\wait}
+
+\unexpanded\def\showtrackers {\clf_showtrackers}
+\unexpanded\def\enabletrackers [#1]{\clf_enabletrackers{#1}}
+\unexpanded\def\disabletrackers [#1]{\clf_disabletrackers{#1}}
+\unexpanded\def\resettrackers {\clf_resettrackers}
+
+\unexpanded\def\showdirectives {\clf_showdirectives}
+\unexpanded\def\enabledirectives [#1]{\clf_enabledirectives{#1}}
+\unexpanded\def\disabledirectives [#1]{\clf_disabledirectives{#1}}
-\unexpanded\def\showtrackers {\ctxlua{trackers.show()}}
-\unexpanded\def\enabletrackers [#1]{\ctxlua{trackers.enable("#1")}}
-\unexpanded\def\disabletrackers [#1]{\ctxlua{trackers.disable("#1")}}
-\unexpanded\def\resettrackers {\ctxlua{trackers.reset()}}
+\unexpanded\def\showexperiments {\clf_showexperiments}
+\unexpanded\def\enableexperiments [#1]{\clf_enableexperiments{#1}}
+\unexpanded\def\disableexperiments[#1]{\clf_disableexperiments{#1}}
-\unexpanded\def\showdirectives {\ctxlua{directives.show()}}
-\unexpanded\def\enabledirectives [#1]{\ctxlua{directives.enable("#1")}}
-\unexpanded\def\disabledirectives [#1]{\ctxlua{directives.disable("#1")}}
+\unexpanded\def\showdebuginfo {\clf_showdebuginfo}
+\unexpanded\def\overloaderror {\clf_overloaderror}
-\unexpanded\def\showexperiments {\ctxlua{experiments.show()}}
-\unexpanded\def\enableexperiments [#1]{\ctxlua{experiments.enable("#1")}}
-\unexpanded\def\disableexperiments[#1]{\ctxlua{experiments.disable("#1")}}
+\unexpanded\def\showlogcategories {\clf_showlogcategories}
-\unexpanded\def\showdebuginfo {\ctxlua{lmx.showdebuginfo()}}
-\unexpanded\def\overloaderror {\ctxlua{lmx.overloaderror()}} % \enabledirectives[system.showerror]
+% \enabledirectives[system.showerror]
-\unexpanded\def\showlogcategories {\ctxlua{logs.show()}}
+\protect \endinput
diff --git a/tex/context/base/trac-inf.lua b/tex/context/base/trac-inf.lua
index 802f2e667..5497e54eb 100644
--- a/tex/context/base/trac-inf.lua
+++ b/tex/context/base/trac-inf.lua
@@ -12,7 +12,7 @@ if not modules then modules = { } end modules ['trac-inf'] = {
-- and rawget.
local type, tonumber, select = type, tonumber, select
-local format, lower = string.format, string.lower
+local format, lower, find = string.format, string.lower, string.find
local concat = table.concat
local clock = os.gettimeofday or os.clock -- should go in environment
@@ -123,7 +123,8 @@ function statistics.show()
-- this code will move
local register = statistics.register
register("used platform", function()
- return format("%s, type: %s, binary subtree: %s",os.platform or "unknown",os.type or "unknown", environment.texos or "unknown")
+ return format("%s, type: %s, binary subtree: %s",
+ os.platform or "unknown",os.type or "unknown", environment.texos or "unknown")
end)
register("luatex banner", function()
return lower(status.banner)
@@ -136,16 +137,25 @@ function statistics.show()
return format("%s direct, %s indirect, %s total", total-indirect, indirect, total)
end)
if jit then
- local status = { jit.status() }
- if status[1] then
- register("luajit status", function()
- return concat(status," ",2)
- end)
+ local jitstatus = { jit.status() }
+ if jitstatus[1] then
+ register("luajit options", concat(jitstatus," ",2))
end
end
-- so far
-- collectgarbage("collect")
- register("current memory usage",statistics.memused)
+ register("lua properties",function()
+ local list = status.list()
+ local hashchar = tonumber(list.luatex_hashchars)
+ local mask = lua.mask or "ascii"
+ return format("engine: %s, used memory: %s, hash type: %s, hash chars: min(%s,40), symbol mask: %s (%s)",
+ jit and "luajit" or "lua",
+ statistics.memused(),
+ list.luatex_hashtype or "default",
+ hashchar and 2^hashchar or "unknown",
+ mask,
+ mask == "utf" and "τεχ" or "tex")
+ end)
register("runtime",statistics.runtime)
logs.newline() -- initial newline
for i=1,#statusinfo do
@@ -197,17 +207,3 @@ function statistics.tracefunction(base,tag,...)
statistics.register(formatters["%s.%s"](tag,name),function() return serialize(stat,"calls") end)
end
end
-
--- where, not really the best spot for this:
-
-commands = commands or { }
-
-function commands.resettimer(name)
- resettiming(name or "whatever")
- starttiming(name or "whatever")
-end
-
-function commands.elapsedtime(name)
- stoptiming(name or "whatever")
- context(elapsedtime(name or "whatever"))
-end
diff --git a/tex/context/base/trac-jus.lua b/tex/context/base/trac-jus.lua
index 38220a752..0c92848dc 100644
--- a/tex/context/base/trac-jus.lua
+++ b/tex/context/base/trac-jus.lua
@@ -14,14 +14,30 @@ typesetters.checkers = checkers
local a_alignstate = attributes.private("alignstate")
local a_justification = attributes.private("justification")
-local tracers = nodes.tracers
-local tracedrule = tracers.rule
-
-local new_rule = nodes.pool.rule
-local new_hlist = nodes.pool.hlist
-local new_glue = nodes.pool.glue
-local new_kern = nodes.pool.kern
-local get_list_dimensions = node.dimensions
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getlist = nuts.getlist
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local setlist = nuts.setlist
+
+local traverse_id = nuts.traverse_id
+local get_list_dimensions = nuts.dimensions
+local linked_nodes = nuts.linked
+local copy_node = nuts.copy
+
+local tracedrule = nodes.tracers.pool.nuts.rule
+
+local nodepool = nuts.pool
+
+local new_rule = nodepool.rule
+local new_hlist = nodepool.hlist
+local new_glue = nodepool.glue
+local new_kern = nodepool.kern
+
local hlist_code = nodes.nodecodes.hlist
local texsetattribute = tex.setattribute
@@ -46,9 +62,10 @@ end
checkers.set = set
checkers.reset = reset
-function commands.showjustification(n)
- set(n)
-end
+interfaces.implement {
+ name = "showjustification",
+ actions = set
+}
trackers.register("visualizers.justification", function(v)
if v then
@@ -59,34 +76,35 @@ trackers.register("visualizers.justification", function(v)
end)
function checkers.handler(head)
- for current in node.traverse_id(hlist_code,head) do
- if current[a_justification] == 1 then
- current[a_justification] = 0
- local width = current.width
+ for current in traverse_id(hlist_code,tonut(head)) do
+ if getattr(current,a_justification) == 1 then
+ setattr(current,a_justification,0) -- kind of reset
+ local width = getfield(current,"width")
if width > 0 then
- local list = current.list
+ local list = getlist(current)
if list then
local naturalwidth, naturalheight, naturaldepth = get_list_dimensions(list)
local delta = naturalwidth - width
if naturalwidth == 0 or delta == 0 then
-- special box
elseif delta >= max_threshold then
- local rule = tracedrule(delta,naturalheight,naturaldepth,list.glue_set == 1 and "trace:dr"or "trace:db")
- current.list = list .. new_hlist(rule)
+ local rule = tracedrule(delta,naturalheight,naturaldepth,getfield(list,"glue_set") == 1 and "trace:dr" or "trace:db")
+ setfield(current,"list",linked_nodes(list,new_hlist(rule)))
elseif delta <= min_threshold then
- local alignstate = list[a_alignstate]
+ local alignstate = getattr(list,a_alignstate)
if alignstate == 1 then
local rule = tracedrule(-delta,naturalheight,naturaldepth,"trace:dc")
- current.list = new_hlist(rule) .. list
+ setfield(current,"list",linked_nodes(new_hlist(rule),list))
elseif alignstate == 2 then
- local rule = tracedrule(-delta/2,naturalheight,naturaldepth,"trace:dy")
- current.list = new_hlist(rule^1) .. list .. new_kern(delta/2) .. new_hlist(rule)
+ local lrule = tracedrule(-delta/2,naturalheight,naturaldepth,"trace:dy")
+ local rrule = copy_node(lrule)
+ setfield(current,"list",linked_nodes(new_hlist(lrule),list,new_kern(delta/2),new_hlist(rrule)))
elseif alignstate == 3 then
local rule = tracedrule(-delta,naturalheight,naturaldepth,"trace:dm")
- current.list = list .. new_kern(delta) .. new_hlist(rule)
+ setfield(current,"list",linked_nodes(list,new_kern(delta),new_hlist(rule)))
else
local rule = tracedrule(-delta,naturalheight,naturaldepth,"trace:dg")
- current.list = list .. new_kern(delta) .. new_hlist(rule)
+ setfield(current,"list",linked_nodes(list,new_kern(delta),new_hlist(rule)))
end
end
end
diff --git a/tex/context/base/trac-jus.mkiv b/tex/context/base/trac-jus.mkiv
index 7a5347da8..d0823d311 100644
--- a/tex/context/base/trac-jus.mkiv
+++ b/tex/context/base/trac-jus.mkiv
@@ -20,6 +20,6 @@
\definesystemattribute[justification] [public]
\unexpanded\def\showjustification
- {\ctxcommand{showjustification()}}
+ {\clf_showjustification} % currently no argument (default 1)
\protect \endinput
diff --git a/tex/context/base/trac-lmx.lua b/tex/context/base/trac-lmx.lua
index 41d930536..4f4ea62c4 100644
--- a/tex/context/base/trac-lmx.lua
+++ b/tex/context/base/trac-lmx.lua
@@ -530,7 +530,7 @@ do_nested_include = function(data) -- also used in include
return lpegmatch(pattern_1,data)
end
-function lmxnew(data,defaults,nocache,path) -- todo: use defaults in calling routines
+local function lmxnew(data,defaults,nocache,path) -- todo: use defaults in calling routines
data = data or ""
local known = cache[data]
if not known then
@@ -608,7 +608,7 @@ function lmx.convertfile(templatefile,variables,nocache)
return lmxresult(converter,variables)
end
-function lmxconvert(templatefile,resultfile,variables,nocache) -- or (templatefile,variables)
+local function lmxconvert(templatefile,resultfile,variables,nocache) -- or (templatefile,variables)
if trace_variables then -- will become templates
report_lmx("converting file %a",templatefile)
end
@@ -665,14 +665,13 @@ function lmx.color(r,g,b,a)
end
end
-
-- these can be overloaded
lmx.lmxfile = string.itself
lmx.htmfile = string.itself
lmx.popupfile = os.launch
-function lmxmake(name,variables)
+local function lmxmake(name,variables)
local lmxfile = lmx.lmxfile(name)
local htmfile = lmx.htmfile(name)
if lmxfile == htmfile then
@@ -682,7 +681,7 @@ function lmxmake(name,variables)
return htmfile
end
-lmxmake = lmx.make
+lmx.make = lmxmake
function lmx.show(name,variables)
local htmfile = lmxmake(name,variables)
diff --git a/tex/context/base/trac-log.lua b/tex/context/base/trac-log.lua
index 0d0b66260..ce620e6cf 100644
--- a/tex/context/base/trac-log.lua
+++ b/tex/context/base/trac-log.lua
@@ -6,76 +6,31 @@ if not modules then modules = { } end modules ['trac-log'] = {
license = "see context related readme files"
}
--- if tex and (tex.jobname or tex.formatname) then
---
--- -- quick hack, awaiting speedup in engine (8 -> 6.4 sec for --make with console2)
--- -- still needed for luajittex
---
--- local texio_write_nl = texio.write_nl
--- local texio_write = texio.write
--- local io_write = io.write
-
--- local write_nl = function(target,...)
--- if not io_write then
--- io_write = io.write
--- end
--- if target == "term and log" then
--- texio_write_nl("log",...)
--- texio_write_nl("term","")
--- io_write(...)
--- elseif target == "log" then
--- texio_write_nl("log",...)
--- elseif target == "term" then
--- texio_write_nl("term","")
--- io_write(...)
--- else
--- texio_write_nl("log",target,...)
--- texio_write_nl("term","")
--- io_write(target,...)
--- end
--- end
-
--- local write = function(target,...)
--- if not io_write then
--- io_write = io.write
--- end
--- if target == "term and log" then
--- texio_write("log",...)
--- io_write(...)
--- elseif target == "log" then
--- texio_write("log",...)
--- elseif target == "term" then
--- io_write(...)
--- else
--- texio_write("log",target,...)
--- io_write(target,...)
--- end
--- end
-
--- texio.write = write
--- texio.write_nl = write_nl
---
--- else
---
--- -- texlua or just lua
---
--- end
-
--- todo: less categories, more subcategories (e.g. nodes)
--- todo: split into basics and ctx specific
+-- In fact all writes could go through lua and we could write the console and
+-- terminal handler in lua then. Ok, maybe it's slower then, so a no-go.
+local next, type, select, print = next, type, select, print
local write_nl, write = texio and texio.write_nl or print, texio and texio.write or io.write
local format, gmatch, find = string.format, string.gmatch, string.find
local concat, insert, remove = table.concat, table.insert, table.remove
local topattern = string.topattern
-local next, type, select = next, type, select
local utfchar = utf.char
+local datetime = os.date
+local openfile = io.open
local setmetatableindex = table.setmetatableindex
local formatters = string.formatters
local texgetcount = tex and tex.getcount
+-- variant is set now
+
+local variant = "default"
+-- local variant = "ansi"
+
+-- todo: less categories, more subcategories (e.g. nodes)
+-- todo: split into basics and ctx specific
+
--[[ldx--
<p>This is a prelude to a more extensive logging module. We no longer
provide <l n='xml'/> based logging as parsing is relatively easy anyway.</p>
@@ -109,12 +64,12 @@ wiki : http://contextgarden.net
-- [[local chruni = utilities.strings.chruni]]
-- )
-utilities.strings.formatters.add (
+formatters.add (
formatters, "unichr",
[["U+" .. format("%%05X",%s) .. " (" .. utfchar(%s) .. ")"]]
)
-utilities.strings.formatters.add (
+formatters.add (
formatters, "chruni",
[[utfchar(%s) .. " (U+" .. format("%%05X",%s) .. ")"]]
)
@@ -147,20 +102,127 @@ setmetatableindex(logs, function(t,k) t[k] = ignore ; return ignore end)
local report, subreport, status, settarget, setformats, settranslations
-local direct, subdirect, writer, pushtarget, poptarget, setlogfile, settimedlog, setprocessor, setformatters
+local direct, subdirect, writer, pushtarget, poptarget, setlogfile, settimedlog, setprocessor, setformatters, newline
+
+-- we use formatters but best check for % then because for simple messages but
+-- we don't want this overhead for single messages (not that there are that
+-- many; we could have a special weak table)
if tex and (tex.jobname or tex.formatname) then
- -- local format = string.formatter
+ local function useluawrites()
+
+ -- quick hack, awaiting speedup in engine (8 -> 6.4 sec for --make with console2)
+ -- still needed for luajittex .. luatex should not have that ^^ mess
+
+ local texio_write_nl = texio.write_nl
+ local texio_write = texio.write
+ local io_write = io.write
+
+ write_nl = function(target,...)
+ if not io_write then
+ io_write = io.write
+ end
+ if target == "term and log" then
+ texio_write_nl("log",...)
+ texio_write_nl("term","")
+ io_write(...)
+ elseif target == "log" then
+ texio_write_nl("log",...)
+ elseif target == "term" then
+ texio_write_nl("term","")
+ io_write(...)
+ elseif target ~= "none" then
+ texio_write_nl("log",target,...)
+ texio_write_nl("term","")
+ io_write(target,...)
+ end
+ end
- local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
+ write = function(target,...)
+ if not io_write then
+ io_write = io.write
+ end
+ if target == "term and log" then
+ texio_write("log",...)
+ io_write(...)
+ elseif target == "log" then
+ texio_write("log",...)
+ elseif target == "term" then
+ io_write(...)
+ elseif target ~= "none" then
+ texio_write("log",target,...)
+ io_write(target,...)
+ end
+ end
- local target = "term and log"
+ texio.write = write
+ texio.write_nl = write_nl
- logs.flush = io.flush
+ useluawrites = ignore
- local formats = { } setmetatable(formats, valueiskey)
- local translations = { } setmetatable(translations,valueiskey)
+ end
+
+ -- local format = string.formatter
+
+ local whereto = "both"
+ local target = nil
+ local targets = nil
+
+ local formats = table.setmetatableindex("self")
+ local translations = table.setmetatableindex("self")
+
+ local report_yes, subreport_yes, direct_yes, subdirect_yes, status_yes
+ local report_nop, subreport_nop, direct_nop, subdirect_nop, status_nop
+
+ local variants = {
+ default = {
+ formats = {
+ report_yes = formatters["%-15s > %s\n"],
+ report_nop = formatters["%-15s >\n"],
+ direct_yes = formatters["%-15s > %s"],
+ direct_nop = formatters["%-15s >"],
+ subreport_yes = formatters["%-15s > %s > %s\n"],
+ subreport_nop = formatters["%-15s > %s >\n"],
+ subdirect_yes = formatters["%-15s > %s > %s"],
+ subdirect_nop = formatters["%-15s > %s >"],
+ status_yes = formatters["%-15s : %s\n"],
+ status_nop = formatters["%-15s :\n"],
+ },
+ targets = {
+ logfile = "log",
+ log = "log",
+ file = "log",
+ console = "term",
+ terminal = "term",
+ both = "term and log",
+ },
+ },
+ ansi = {
+ formats = {
+ report_yes = formatters["%-15s > %s\n"],
+ report_nop = formatters["%-15s >\n"],
+ direct_yes = formatters["%-15s > %s"],
+ direct_nop = formatters["%-15s >"],
+ subreport_yes = formatters["%-15s > %s > %s\n"],
+ subreport_nop = formatters["%-15s > %s >\n"],
+ subdirect_yes = formatters["%-15s > %s > %s"],
+ subdirect_nop = formatters["%-15s > %s >"],
+ status_yes = formatters["%-15s : %s\n"],
+ status_nop = formatters["%-15s :\n"],
+ },
+ targets = {
+ logfile = "none",
+ log = "none",
+ file = "none",
+ console = "term",
+ terminal = "term",
+ both = "term",
+ },
+ }
+ }
+
+ logs.flush = io.flush
writer = function(...)
write_nl(target,...)
@@ -170,13 +232,6 @@ if tex and (tex.jobname or tex.formatname) then
write_nl(target,"\n")
end
- local report_yes = formatters["%-15s > %s\n"]
- local report_nop = formatters["%-15s >\n"]
-
- -- we can use formatters but best check for % then because for simple messages
- -- we con't want this overhead for single messages (not that there are that
- -- many; we could have a special weak table)
-
report = function(a,b,c,...)
if c then
write_nl(target,report_yes(translations[a],formatters[formats[b]](c,...)))
@@ -189,9 +244,6 @@ if tex and (tex.jobname or tex.formatname) then
end
end
- local direct_yes = formatters["%-15s > %s"]
- local direct_nop = formatters["%-15s >"]
-
direct = function(a,b,c,...)
if c then
return direct_yes(translations[a],formatters[formats[b]](c,...))
@@ -204,9 +256,6 @@ if tex and (tex.jobname or tex.formatname) then
end
end
- local subreport_yes = formatters["%-15s > %s > %s\n"]
- local subreport_nop = formatters["%-15s > %s >\n"]
-
subreport = function(a,s,b,c,...)
if c then
write_nl(target,subreport_yes(translations[a],translations[s],formatters[formats[b]](c,...)))
@@ -219,9 +268,6 @@ if tex and (tex.jobname or tex.formatname) then
end
end
- local subdirect_yes = formatters["%-15s > %s > %s"]
- local subdirect_nop = formatters["%-15s > %s >"]
-
subdirect = function(a,s,b,c,...)
if c then
return subdirect_yes(translations[a],translations[s],formatters[formats[b]](c,...))
@@ -234,9 +280,6 @@ if tex and (tex.jobname or tex.formatname) then
end
end
- local status_yes = formatters["%-15s : %s\n"]
- local status_nop = formatters["%-15s :\n"]
-
status = function(a,b,c,...)
if c then
write_nl(target,status_yes(translations[a],formatters[formats[b]](c,...)))
@@ -249,17 +292,13 @@ if tex and (tex.jobname or tex.formatname) then
end
end
- local targets = {
- logfile = "log",
- log = "log",
- file = "log",
- console = "term",
- terminal = "term",
- both = "term and log",
- }
-
- settarget = function(whereto)
- target = targets[whereto or "both"] or targets.both
+ settarget = function(askedwhereto)
+ whereto = askedwhereto or whereto or "both"
+ target = targets[whereto]
+ if not target then
+ whereto = "both"
+ target = targets[whereto]
+ end
if target == "term" or target == "term and log" then
logs.flush = io.flush
else
@@ -295,24 +334,81 @@ if tex and (tex.jobname or tex.formatname) then
end
end
- setformatters = function(f)
- report_yes = f.report_yes or report_yes
- report_nop = f.report_nop or report_nop
- subreport_yes = f.subreport_yes or subreport_yes
- subreport_nop = f.subreport_nop or subreport_nop
- direct_yes = f.direct_yes or direct_yes
- direct_nop = f.direct_nop or direct_nop
- subdirect_yes = f.subdirect_yes or subdirect_yes
- subdirect_nop = f.subdirect_nop or subdirect_nop
- status_yes = f.status_yes or status_yes
- status_nop = f.status_nop or status_nop
+ setformatters = function(specification)
+ local t = nil
+ local f = nil
+ local d = variants.default
+ if not specification then
+ --
+ elseif type(specification) == "table" then
+ t = specification.targets
+ f = specification.formats or specification
+ else
+ local v = variants[specification]
+ if v then
+ t = v.targets
+ f = v.formats
+ variant = specification
+ end
+ end
+ targets = t or d.targets
+ target = targets[whereto] or target
+ if f then
+ d = d.formats
+ else
+ f = d.formats
+ d = f
+ end
+ setmetatableindex(f,d)
+ report_yes = f.report_yes
+ report_nop = f.report_nop
+ subreport_yes = f.subreport_yes
+ subreport_nop = f.subreport_nop
+ direct_yes = f.direct_yes
+ direct_nop = f.direct_nop
+ subdirect_yes = f.subdirect_yes
+ subdirect_nop = f.subdirect_nop
+ status_yes = f.status_yes
+ status_nop = f.status_nop
+ if variant == "ansi" then
+ useluawrites() -- because tex escapes ^^
+ end
+ settarget(whereto)
end
+ setformatters(variant)
+
setlogfile = ignore
settimedlog = ignore
else
+ local report_yes, subreport_yes, status_yes
+ local report_nop, subreport_nop, status_nop
+
+ local variants = {
+ default = {
+ formats = {
+ report_yes = formatters["%-15s | %s"],
+ report_nop = formatters["%-15s |"],
+ subreport_yes = formatters["%-15s | %s | %s"],
+ subreport_nop = formatters["%-15s | %s |"],
+ status_yes = formatters["%-15s : %s\n"],
+ status_nop = formatters["%-15s :\n"],
+ },
+ },
+ ansi = {
+ formats = {
+ report_yes = formatters["%-15s | %s"],
+ report_nop = formatters["%-15s |"],
+ subreport_yes = formatters["%-15s | %s | %s"],
+ subreport_nop = formatters["%-15s | %s |"],
+ status_yes = formatters["%-15s : %s\n"],
+ status_nop = formatters["%-15s :\n"],
+ },
+ },
+ }
+
logs.flush = ignore
writer = function(s)
@@ -323,9 +419,6 @@ else
write_nl("\n")
end
- local report_yes = formatters["%-15s | %s"]
- local report_nop = formatters["%-15s |"]
-
report = function(a,b,c,...)
if c then
write_nl(report_yes(a,formatters[b](c,...)))
@@ -338,9 +431,6 @@ else
end
end
- local subreport_yes = formatters["%-15s | %s | %s"]
- local subreport_nop = formatters["%-15s | %s |"]
-
subreport = function(a,sub,b,c,...)
if c then
write_nl(subreport_yes(a,sub,formatters[b](c,...)))
@@ -353,9 +443,6 @@ else
end
end
- local status_yes = formatters["%-15s : %s\n"]
- local status_nop = formatters["%-15s :\n"]
-
status = function(a,b,c,...) -- not to be used in lua anyway
if c then
write_nl(status_yes(a,formatters[b](c,...)))
@@ -384,15 +471,36 @@ else
end
end
- setformatters = function(f)
- report_yes = f.report_yes or report_yes
- report_nop = f.report_nop or report_nop
- subreport_yes = f.subreport_yes or subreport_yes
- subreport_nop = f.subreport_nop or subreport_nop
- status_yes = f.status_yes or status_yes
- status_nop = f.status_nop or status_nop
+ setformatters = function(specification)
+ local f = nil
+ local d = variants.default
+ if specification then
+ if type(specification) == "table" then
+ f = specification.formats or specification
+ else
+ local v = variants[specification]
+ if v then
+ f = v.formats
+ end
+ end
+ end
+ if f then
+ d = d.formats
+ else
+ f = d.formats
+ d = f
+ end
+ setmetatableindex(f,d)
+ report_yes = f.report_yes
+ report_nop = f.report_nop
+ subreport_yes = f.subreport_yes
+ subreport_nop = f.subreport_nop
+ status_yes = f.status_yes
+ status_nop = f.status_nop
end
+ setformatters(variant)
+
setlogfile = function(name,keepopen)
if name and name ~= "" then
local localtime = os.localtime
@@ -535,9 +643,10 @@ local function setblocked(category,value)
v.state = value
end
else
- states = utilities.parsers.settings_to_hash(category)
+ states = utilities.parsers.settings_to_hash(category,type(states)=="table" and states or nil)
for c, _ in next, states do
- if data[c] then
+ local v = data[c]
+ if v then
v.state = value
else
c = topattern(c,true,true)
@@ -722,13 +831,13 @@ logs.simpleline = simple
-- obsolete
-function logs.setprogram () end -- obsolete
-function logs.extendbanner() end -- obsolete
-function logs.reportlines () end -- obsolete
-function logs.reportbanner() end -- obsolete
-function logs.reportline () end -- obsolete
-function logs.simplelines () end -- obsolete
-function logs.help () end -- obsolete
+logs.setprogram = ignore -- obsolete
+logs.extendbanner = ignore -- obsolete
+logs.reportlines = ignore -- obsolete
+logs.reportbanner = ignore -- obsolete
+logs.reportline = ignore -- obsolete
+logs.simplelines = ignore -- obsolete
+logs.help = ignore -- obsolete
-- applications
@@ -841,10 +950,12 @@ end
-- logs.system(syslogname,"context","test","fonts","font %s recached due to newer version (%s)","blabla","123")
-- end
-function logs.system(whereto,process,jobname,category,...)
- local message = formatters["%s %s => %s => %s => %s\r"](os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...))
+local f_syslog = formatters["%s %s => %s => %s => %s\r"]
+
+function logs.system(whereto,process,jobname,category,fmt,arg,...)
+ local message = f_syslog(datetime("%d/%m/%y %H:%m:%S"),process,jobname,category,arg == nil and fmt or format(fmt,arg,...))
for i=1,10 do
- local f = io.open(whereto,"a") -- we can consider keeping the file open
+ local f = openfile(whereto,"a") -- we can consider keeping the file open
if f then
f:write(message)
f:close()
diff --git a/tex/context/base/trac-par.lua b/tex/context/base/trac-par.lua
index 262a9cc33..aab57ce5c 100644
--- a/tex/context/base/trac-par.lua
+++ b/tex/context/base/trac-par.lua
@@ -1,8 +1,25 @@
--- for the moment here:
+if not modules then modules = { } end modules ['trac-par'] = {
+ version = 1.001,
+ comment = "companion to node-par.mkiv",
+ author = "Hans Hagen",
+ copyright = "ConTeXt Development Team",
+ license = "see context related readme files",
+ comment = "a translation of the built in parbuilder, initial convertsin by Taco Hoekwater",
+}
local utfchar = utf.char
local concat = table.concat
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local getid = nuts.getid
+local getnext = nuts.getnext
+local getlist = nuts.getlist
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+
local nodecodes = nodes.nodecodes
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
@@ -42,14 +59,14 @@ local function colorize(n)
-- tricky: the built-in method creates dummy fonts and the last line normally has the
-- original font and that one then has ex.auto set
while n do
- local id = n.id
+ local id = getid(n)
if id == glyph_code then
- local ne = n.expansion_factor
+ local ne = getfield(n,"expansion_factor")
if ne == 0 then
if length > 0 then flush() end
setnodecolor(n,"hz:zero")
else
- local f = n.font
+ local f = getfont(n)
if f ~= font then
if length > 0 then
flush()
@@ -79,8 +96,8 @@ local function colorize(n)
end
if trace_verbose then
length = length + 1
- list[length] = utfchar(n.char)
- width = width + n.width -- no kerning yet
+ list[length] = utfchar(getchar(n))
+ width = width + getfield(n,"width") -- no kerning yet
end
end
end
@@ -88,13 +105,13 @@ local function colorize(n)
if length > 0 then
flush()
end
- colorize(n.list,flush)
+ colorize(getlist(n),flush)
else -- nothing to show on kerns
if length > 0 then
flush()
end
end
- n = n.next
+ n = getnext(n)
end
if length > 0 then
flush()
@@ -104,14 +121,14 @@ end
builders.paragraphs.expansion = builders.paragraphs.expansion or { }
function builders.paragraphs.expansion.trace(head)
- colorize(head,true)
+ colorize(tonut(head),true)
return head
end
local tasks = nodes.tasks
-tasks.prependaction("shipouts","normalizers","builders.paragraphs.expansion.trace")
-tasks.disableaction("shipouts","builders.paragraphs.expansion.trace")
+-- tasks.prependaction("shipouts","normalizers","builders.paragraphs.expansion.trace")
+-- tasks.disableaction("shipouts","builders.paragraphs.expansion.trace")
local function set(v)
if v then
diff --git a/tex/context/base/trac-pro.lua b/tex/context/base/trac-pro.lua
index d6e0d0339..897b6a15c 100644
--- a/tex/context/base/trac-pro.lua
+++ b/tex/context/base/trac-pro.lua
@@ -26,7 +26,8 @@ local registered = { }
local function report_index(k,name)
if trace_namespaces then
- report_system("reference to %a in protected namespace %a: %s",k,name,debug.traceback())
+ report_system("reference to %a in protected namespace %a: %s",k,name)
+ debugger.showtraceback(report_system)
else
report_system("reference to %a in protected namespace %a",k,name)
end
@@ -34,7 +35,8 @@ end
local function report_newindex(k,name)
if trace_namespaces then
- report_system("assignment to %a in protected namespace %a: %s",k,name,debug.traceback())
+ report_system("assignment to %a in protected namespace %a: %s",k,name)
+ debugger.showtraceback(report_system)
else
report_system("assignment to %a in protected namespace %a",k,name)
end
diff --git a/tex/context/base/trac-set.lua b/tex/context/base/trac-set.lua
index 95fdc43b3..9e2bf8758 100644
--- a/tex/context/base/trac-set.lua
+++ b/tex/context/base/trac-set.lua
@@ -37,7 +37,7 @@ function setters.initialize(filename,name,values) -- filename only for diagnosti
local data = setter.data
if data then
for key, newvalue in next, values do
- local newvalue = is_boolean(newvalue,newvalue)
+ local newvalue = is_boolean(newvalue,newvalue,true) -- strict
local functions = data[key]
if functions then
local oldvalue = functions.value
@@ -97,7 +97,7 @@ local function set(t,what,newvalue)
elseif not value then
value = false -- catch nil
else
- value = is_boolean(value,value)
+ value = is_boolean(value,value,true) -- strict
end
w = topattern(w,true,true)
for name, functions in next, data do
@@ -258,6 +258,7 @@ function setters.new(name) -- we could use foo:bar syntax (but not used that oft
report = function(...) setters.report (setter,...) end,
enable = function(...) enable (setter,...) end,
disable = function(...) disable (setter,...) end,
+ reset = function(...) reset (setter,...) end, -- can be dangerous
register = function(...) register(setter,...) end,
list = function(...) list (setter,...) end,
show = function(...) show (setter,...) end,
diff --git a/tex/context/base/trac-tex.lua b/tex/context/base/trac-tex.lua
index 7e3406073..5fe4754cb 100644
--- a/tex/context/base/trac-tex.lua
+++ b/tex/context/base/trac-tex.lua
@@ -8,7 +8,7 @@ if not modules then modules = { } end modules ['trac-tex'] = {
-- moved from trac-deb.lua
-local format = string.format
+local next = next
local texhashtokens = tex.hashtokens
@@ -20,23 +20,70 @@ function trackers.savehash()
saved = texhashtokens()
end
-function trackers.dumphashtofile(filename,delta)
- local list, hash, command_name = { }, texhashtokens(), token.command_name
- for name, token in next, hash do
- if not delta or not saved[name] then
- -- token: cmd, chr, csid -- combination cmd,chr determines name
- local category = command_name(token)
- local dk = list[category]
- if not dk then
- -- a bit funny names but this sorts better (easier to study)
- dk = { names = { }, found = 0, code = token[1] }
- list[category] = dk
+if newtoken then
+
+ function trackers.dumphashtofile(filename,delta)
+ local list = { }
+ local hash = tex.hashtokens()
+ local create = newtoken.create
+ for name, token in next, hash do
+ if not delta or not saved[name] then
+ if token[2] ~= 0 then -- still old interface
+ local token = create(name)
+ -- inspect(token)
+ local category = token.cmdname
+ local dk = list[category]
+ if not dk then
+ dk = {
+ names = { },
+ found = 0,
+ -- code = token[1],
+ }
+ list[category] = dk
+ end
+ if token.protected then
+ if token.expandable then
+ dk.names[name] = "ep"
+ else
+ dk.names[name] = "-p"
+ end
+ else
+ if token.expandable then
+ dk.names[name] = "ep"
+ else
+ dk.names[name] = "--"
+ end
+ end
+ dk.found = dk.found + 1
+ end
end
- dk.names[name] = { token[2], token[3] }
- dk.found = dk.found + 1
end
+ table.save(filename or tex.jobname .. "-hash.log",list)
end
- io.savedata(filename or tex.jobname .. "-hash.log",table.serialize(list,true))
+
+else
+
+ function trackers.dumphashtofile(filename,delta)
+ local list = { }
+ local hash = texhashtokens()
+ local getname = token.command_name
+ for name, token in next, hash do
+ if not delta or not saved[name] then
+ -- token: cmd, chr, csid -- combination cmd,chr determines name
+ local category = getname(token)
+ local dk = list[category]
+ if not dk then
+ -- a bit funny names but this sorts better (easier to study)
+ dk = { names = { }, found = 0, code = token[1] }
+ list[category] = dk
+ end
+ dk.names[name] = { token[2], token[3] }
+ dk.found = dk.found + 1
+ end
+ end
+ table.save(filename or tex.jobname .. "-hash.log",list)
+ end
+
end
local delta = nil
diff --git a/tex/context/base/trac-tim.lua b/tex/context/base/trac-tim.lua
index 15ac9bf1b..b4744291c 100644
--- a/tex/context/base/trac-tim.lua
+++ b/tex/context/base/trac-tim.lua
@@ -88,7 +88,7 @@ local function convert(name)
delta = factor/delta
end
for k=1,#s do
- s[k] = format("(%s,%s)",k,(s[k]-b)*delta)
+ s[k] = format("(%.3f,%.3f)",k,(s[k]-b)*delta)
end
paths[tagname] = concat(s,"--")
end
diff --git a/tex/context/base/trac-vis.lua b/tex/context/base/trac-vis.lua
index dc8bcc5e7..a20e42d1a 100644
--- a/tex/context/base/trac-vis.lua
+++ b/tex/context/base/trac-vis.lua
@@ -32,8 +32,10 @@ local formatters = string.formatters
-- todo: global switch (so no attributes)
-- todo: maybe also xoffset, yoffset of glyph
-- todo: inline concat (more efficient)
+-- todo: tags can also be numbers (just add to hash)
local nodecodes = nodes.nodecodes
+local disc_code = nodecodes.disc
local kern_code = nodecodes.kern
local glyph_code = nodecodes.glyph
local hlist_code = nodecodes.hlist
@@ -42,6 +44,7 @@ local glue_code = nodecodes.glue
local penalty_code = nodecodes.penalty
local whatsit_code = nodecodes.whatsit
local user_code = nodecodes.user
+local math_code = nodecodes.math
local gluespec_code = nodecodes.gluespec
local kerncodes = nodes.kerncodes
@@ -57,31 +60,54 @@ local leftskip_code = gluecodes.leftskip
local rightskip_code = gluecodes.rightskip
local whatsitcodes = nodes.whatsitcodes
-
-local hpack_nodes = node.hpack
-local vpack_nodes = node.vpack
-local fast_hpack_string = nodes.typesetters.fast_hpack
-local copy_node = node.copy
-local copy_list = node.copy_list
-local free_node = node.free
-local free_node_list = node.flush_list
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local fast_hpack = nodes.fasthpack
-local traverse_nodes = node.traverse
+local mathcodes = nodes.mathcodes
+
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getfield = nuts.getfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local setfield = nuts.setfield
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+local getbox = nuts.getbox
+local getlist = nuts.getlist
+local getleader = nuts.getleader
+
+local hpack_nodes = nuts.hpack
+local vpack_nodes = nuts.vpack
+local copy_node = nuts.copy
+local copy_list = nuts.copy_list
+local free_node = nuts.free
+local free_node_list = nuts.flush_list
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local traverse_nodes = nuts.traverse
+local linked_nodes = nuts.linked
+
+local fast_hpack = nuts.fasthpack
+local fast_hpack_string = nuts.typesetters.fast_hpack
local texgetattribute = tex.getattribute
local texsetattribute = tex.setattribute
-local texgetbox = tex.getbox
+
local unsetvalue = attributes.unsetvalue
local current_font = font.current
-local exheights = fonts.hashes.exheights
-local emwidths = fonts.hashes.emwidths
+local fonthashes = fonts.hashes
+local chardata = fonthashes.characters
+local exheights = fonthashes.exheights
+local emwidths = fonthashes.emwidths
local pt_factor = number.dimenfactors.pt
-local nodepool = nodes.pool
+local nodepool = nuts.pool
local new_rule = nodepool.rule
local new_kern = nodepool.kern
local new_glue = nodepool.glue
@@ -117,6 +143,7 @@ local trace_fontkern
local trace_strut
local trace_whatsit
local trace_user
+local trace_math
local report_visualize = logs.reporter("visualize")
@@ -136,21 +163,22 @@ local modes = {
simplevbox = 1024 + 2,
simplevtop = 1024 + 4,
user = 2048,
+ math = 4096,
}
local modes_makeup = { "hbox", "vbox", "kern", "glue", "penalty" }
local modes_boxes = { "hbox", "vbox" }
-local modes_all = { "hbox", "vbox", "kern", "glue", "penalty", "fontkern", "whatsit", "glyph", "user" }
+local modes_all = { "hbox", "vbox", "kern", "glue", "penalty", "fontkern", "whatsit", "glyph", "user", "math" }
local usedfont, exheight, emwidth
-local l_penalty, l_glue, l_kern, l_fontkern, l_hbox, l_vbox, l_vtop, l_strut, l_whatsit, l_glyph, l_user
+local l_penalty, l_glue, l_kern, l_fontkern, l_hbox, l_vbox, l_vtop, l_strut, l_whatsit, l_glyph, l_user, l_math
local enabled = false
local layers = { }
local preset_boxes = modes.hbox + modes.vbox
local preset_makeup = preset_boxes + modes.kern + modes.glue + modes.penalty
-local preset_all = preset_makeup + modes.fontkern + modes.whatsit + modes.glyph + modes.user
+local preset_all = preset_makeup + modes.fontkern + modes.whatsit + modes.glyph + modes.user + modes.math
function visualizers.setfont(id)
usedfont = id or current_font()
@@ -162,7 +190,7 @@ end
local function enable()
if not usedfont then
- -- we use a narrow monospaced font
+ -- we use a narrow monospaced font -- infofont ?
visualizers.setfont(fonts.definers.define { name = "lmmonoltcond10regular", size = tex.sp("4pt") })
end
for mode, value in next, modes do
@@ -187,6 +215,7 @@ local function enable()
l_whatsit = layers.whatsit
l_glyph = layers.glyph
l_user = layers.user
+ l_math = layers.math
nodes.tasks.enableaction("shipouts","nodes.visualizers.handler")
report_visualize("enabled")
enabled = true
@@ -251,13 +280,6 @@ function visualizers.setlayer(n)
texsetattribute(a_layer,layers[n] or unsetvalue)
end
-commands.setvisual = visualizers.setvisual
-commands.setlayer = visualizers.setlayer
-
-function commands.visual(n)
- context(setvisual(n))
-end
-
local function set(mode,v)
texsetattribute(a_visual,setvisual(mode,texgetattribute(a_visual),v))
end
@@ -266,10 +288,13 @@ for mode, value in next, modes do
trackers.register(formatters["visualizers.%s"](mode), function(v) set(mode,v) end)
end
-trackers.register("visualizers.reset", function(v) set("reset", v) end)
-trackers.register("visualizers.all", function(v) set("all", v) end)
-trackers.register("visualizers.makeup",function(v) set("makeup",v) end)
-trackers.register("visualizers.boxes", function(v) set("boxes", v) end)
+local fraction = 10
+
+trackers .register("visualizers.reset", function(v) set("reset", v) end)
+trackers .register("visualizers.all", function(v) set("all", v) end)
+trackers .register("visualizers.makeup", function(v) set("makeup",v) end)
+trackers .register("visualizers.boxes", function(v) set("boxes", v) end)
+directives.register("visualizers.fraction", function(v) fraction = tonumber(v) or fraction end)
local c_positive = "trace:b"
local c_negative = "trace:r"
@@ -279,7 +304,9 @@ local c_space = "trace:y"
local c_skip_a = "trace:c"
local c_skip_b = "trace:m"
local c_glyph = "trace:o"
+local c_ligature = "trace:s"
local c_white = "trace:w"
+local c_math = "trace:r"
local c_positive_d = "trace:db"
local c_negative_d = "trace:dr"
@@ -289,43 +316,48 @@ local c_space_d = "trace:dy"
local c_skip_a_d = "trace:dc"
local c_skip_b_d = "trace:dm"
local c_glyph_d = "trace:do"
+local c_ligature_d = "trace:ds"
local c_white_d = "trace:dw"
+local c_math_d = "trace:dr"
-local function sometext(str,layer,color,textcolor) -- we can just paste verbatim together .. no typesteting needed
+local function sometext(str,layer,color,textcolor,lap) -- we can just paste verbatim together .. no typesteting needed
local text = fast_hpack_string(str,usedfont)
- local size = text.width
+ local size = getfield(text,"width")
local rule = new_rule(size,2*exheight,exheight/2)
local kern = new_kern(-size)
if color then
setcolor(rule,color)
end
if textcolor then
- setlistcolor(text.list,textcolor)
+ setlistcolor(getlist(text),textcolor)
end
- local info = rule .. kern .. text
+ local info = linked_nodes(rule,kern,text)
setlisttransparency(info,c_zero)
info = fast_hpack(info)
+ local width = getfield(info,"width")
+ if lap then
+ info = fast_hpack(linked_nodes(new_kern(-width),info))
+ end
if layer then
- info[a_layer] = layer
+ setattr(info,a_layer,layer)
end
- local width = info.width
- info.width = 0
- info.height = 0
- info.depth = 0
+ setfield(info,"width",0)
+ setfield(info,"height",0)
+ setfield(info,"depth",0)
return info, width
end
local f_cache = { }
local function fontkern(head,current)
- local kern = current.kern
+ local kern = getfield(current,"kern")
local info = f_cache[kern]
if info then
-- print("hit fontkern")
else
local text = fast_hpack_string(formatters[" %0.3f"](kern*pt_factor),usedfont)
- local rule = new_rule(emwidth/10,6*exheight,2*exheight)
- local list = text.list
+ local rule = new_rule(emwidth/fraction,6*exheight,2*exheight)
+ local list = getlist(text)
if kern > 0 then
setlistcolor(list,c_positive_d)
elseif kern < 0 then
@@ -335,13 +367,12 @@ local function fontkern(head,current)
end
setlisttransparency(list,c_text_d)
settransparency(rule,c_text_d)
- text.shift = -5 * exheight
- info = rule .. text
- info = fast_hpack(info)
- info[a_layer] = l_fontkern
- info.width = 0
- info.height = 0
- info.depth = 0
+ setfield(text,"shift",-5 * exheight)
+ info = fast_hpack(linked_nodes(rule,text))
+ setattr(info,a_layer,l_fontkern)
+ setfield(info,"width",0)
+ setfield(info,"height",0)
+ setfield(info,"depth",0)
f_cache[kern] = info
end
head = insert_node_before(head,current,copy_list(info))
@@ -349,8 +380,7 @@ local function fontkern(head,current)
end
local w_cache = { }
-
-local tags = {
+local tags = {
open = "FIC",
write = "FIW",
close = "FIC",
@@ -382,7 +412,7 @@ local tags = {
}
local function whatsit(head,current)
- local what = current.subtype
+ local what = getsubtype(current)
local info = w_cache[what]
if info then
-- print("hit whatsit")
@@ -390,22 +420,45 @@ local function whatsit(head,current)
local tag = whatsitcodes[what]
-- maybe different text colors per tag
info = sometext(formatters["W:%s"](tag and tags[tag] or what),usedfont,nil,c_white)
- info[a_layer] = l_whatsit
+ setattr(info,a_layer,l_whatsit)
w_cache[what] = info
end
head, current = insert_node_after(head,current,copy_list(info))
return head, current
end
+local u_cache = { }
+
local function user(head,current)
- local what = current.subtype
- local info = w_cache[what]
+ local what = getsubtype(current)
+ local info = u_cache[what]
if info then
-- print("hit user")
else
info = sometext(formatters["U:%s"](what),usedfont)
- info[a_layer] = l_user
- w_cache[what] = info
+ setattr(info,a_layer,l_user)
+ u_cache[what] = info
+ end
+ head, current = insert_node_after(head,current,copy_list(info))
+ return head, current
+end
+
+local m_cache = { }
+local tags = {
+ beginmath = "B",
+ endmath = "E",
+}
+
+local function math(head,current)
+ local what = getsubtype(current)
+ local info = m_cache[what]
+ if info then
+ -- print("hit math")
+ else
+ local tag = mathcodes[what]
+ info = sometext(formatters["M:%s"](tag and tags[tag] or what),usedfont,nil,c_math_d)
+ setattr(info,a_layer,l_math)
+ m_cache[what] = info
end
head, current = insert_node_after(head,current,copy_list(info))
return head, current
@@ -414,15 +467,15 @@ end
local b_cache = { }
local function ruledbox(head,current,vertical,layer,what,simple,previous)
- local wd = current.width
+ local wd = getfield(current,"width")
if wd ~= 0 then
- local ht = current.height
- local dp = current.depth
- local next = current.next
- local prev = previous -- current.prev ... prev can be wrong in math mode
- current.next = nil
- current.prev = nil
- local linewidth = emwidth/10
+ local ht = getfield(current,"height")
+ local dp = getfield(current,"depth")
+ local next = getnext(current)
+ local prev = previous -- getprev(current) ... prev can be wrong in math mode < 0.78.3
+ setfield(current,"next",nil)
+ setfield(current,"prev",nil)
+ local linewidth = emwidth/fraction
local baseline, baseskip
if dp ~= 0 and ht ~= 0 then
if wd > 20*linewidth then
@@ -430,16 +483,16 @@ local function ruledbox(head,current,vertical,layer,what,simple,previous)
if not baseline then
-- due to an optimized leader color/transparency we need to set the glue node in order
-- to trigger this mechanism
- local leader = new_glue(2*linewidth) .. new_rule(6*linewidth,linewidth,0) .. new_glue(2*linewidth)
+ local leader = linked_nodes(new_glue(2*linewidth),new_rule(6*linewidth,linewidth,0),new_glue(2*linewidth))
-- setlisttransparency(leader,c_text)
leader = fast_hpack(leader)
-- setlisttransparency(leader,c_text)
baseline = new_glue(0)
- baseline.leader = leader
- baseline.subtype = cleaders_code
- local spec = baseline.spec
- spec.stretch = 65536
- spec.stretch_order = 2
+ setfield(baseline,"leader",leader)
+ setfield(baseline,"subtype",cleaders_code)
+ local spec = getfield(baseline,"spec")
+ setfield(spec,"stretch",65536)
+ setfield(spec,"stretch_order",2)
setlisttransparency(baseline,c_text)
b_cache.baseline = baseline
end
@@ -461,47 +514,49 @@ local function ruledbox(head,current,vertical,layer,what,simple,previous)
this = b_cache[what]
if not this then
local text = fast_hpack_string(what,usedfont)
- this = new_kern(-text.width) .. text
+ this = linked_nodes(new_kern(-getfield(text,"width")),text)
setlisttransparency(this,c_text)
this = fast_hpack(this)
- this.width = 0
- this.height = 0
- this.depth = 0
+ setfield(this,"width",0)
+ setfield(this,"height",0)
+ setfield(this,"depth",0)
b_cache[what] = this
end
end
-- we need to trigger the right mode (else sometimes no whatits)
- local info =
- (this and copy_list(this) or nil) ..
- new_rule(linewidth,ht,dp) ..
- new_rule(wd-2*linewidth,-dp+linewidth,dp) ..
- new_rule(linewidth,ht,dp) ..
- new_kern(-wd+linewidth) ..
+ local info = linked_nodes(
+ this and copy_list(this) or nil,
+ new_rule(linewidth,ht,dp),
+ new_rule(wd-2*linewidth,-dp+linewidth,dp),
+ new_rule(linewidth,ht,dp),
+ new_kern(-wd+linewidth),
new_rule(wd-2*linewidth,ht,-ht+linewidth)
+ )
if baseskip then
- info = info .. baseskip .. baseline
+ info = linked_nodes(info,baseskip,baseline)
end
setlisttransparency(info,c_text)
info = fast_hpack(info)
- info.width = 0
- info.height = 0
- info.depth = 0
- info[a_layer] = layer
- local info = current .. new_kern(-wd) .. info
+ setfield(info,"width",0)
+ setfield(info,"height",0)
+ setfield(info,"depth",0)
+ setattr(info,a_layer,layer)
+ local info = linked_nodes(current,new_kern(-wd),info)
info = fast_hpack(info,wd)
if vertical then
info = vpack_nodes(info)
end
if next then
- info.next = next
- next.prev = info
+ setfield(info,"next",next)
+ setfield(next,"prev",info)
end
if prev then
- if prev.id == gluespec_code then
- -- weird, how can this happen, an inline glue-spec
+ if getid(prev) == gluespec_code then
+ report_visualize("ignoring invalid prev")
+ -- weird, how can this happen, an inline glue-spec, probably math
else
- info.prev = prev
- prev.next = info
+ setfield(info,"prev",prev)
+ setfield(prev,"next",info)
end
end
if head == current then
@@ -515,46 +570,55 @@ local function ruledbox(head,current,vertical,layer,what,simple,previous)
end
local function ruledglyph(head,current,previous)
- local wd = current.width
+ local wd = getfield(current,"width")
+ -- local wd = chardata[getfield(current,"font")][getfield(current,"char")].width
if wd ~= 0 then
- local ht = current.height
- local dp = current.depth
- local next = current.next
+ local ht = getfield(current,"height")
+ local dp = getfield(current,"depth")
+ local next = getnext(current)
local prev = previous
- current.next = nil
- current.prev = nil
- local linewidth = emwidth/20
+ setfield(current,"next",nil)
+ setfield(current,"prev",nil)
+ local linewidth = emwidth/(2*fraction)
local baseline
- if dp ~= 0 and ht ~= 0 then
+ -- if dp ~= 0 and ht ~= 0 then
+ if (dp >= 0 and ht >= 0) or (dp <= 0 and ht <= 0) then
baseline = new_rule(wd-2*linewidth,linewidth,0)
end
local doublelinewidth = 2*linewidth
-- could be a pdf rule
- local info =
- new_rule(linewidth,ht,dp) ..
- new_rule(wd-doublelinewidth,-dp+linewidth,dp) ..
- new_rule(linewidth,ht,dp) ..
- new_kern(-wd+linewidth) ..
- new_rule(wd-doublelinewidth,ht,-ht+linewidth) ..
- new_kern(-wd+doublelinewidth) ..
+ local info = linked_nodes(
+ new_rule(linewidth,ht,dp),
+ new_rule(wd-doublelinewidth,-dp+linewidth,dp),
+ new_rule(linewidth,ht,dp),
+ new_kern(-wd+linewidth),
+ new_rule(wd-doublelinewidth,ht,-ht+linewidth),
+ new_kern(-wd+doublelinewidth),
baseline
+ )
+local char = chardata[getfield(current,"font")][getfield(current,"char")]
+if char and char.tounicode and #char.tounicode > 4 then -- hack test
+ setlistcolor(info,c_ligature)
+ setlisttransparency(info,c_ligature_d)
+else
setlistcolor(info,c_glyph)
setlisttransparency(info,c_glyph_d)
+end
info = fast_hpack(info)
- info.width = 0
- info.height = 0
- info.depth = 0
- info[a_layer] = l_glyph
- local info = current .. new_kern(-wd) .. info
+ setfield(info,"width",0)
+ setfield(info,"height",0)
+ setfield(info,"depth",0)
+ setattr(info,a_layer,l_glyph)
+ local info = linked_nodes(current,new_kern(-wd),info)
info = fast_hpack(info)
- info.width = wd
+ setfield(info,"width",wd)
if next then
- info.next = next
- next.prev = info
+ setfield(info,"next",next)
+ setfield(next,"prev",info)
end
if prev then
- info.prev = prev
- prev.next = info
+ setfield(info,"prev",prev)
+ setfield(prev,"next",info)
end
if head == current then
return info, info
@@ -599,9 +663,9 @@ local tags = {
-- we sometimes pass previous as we can have issues in math (not watertight for all)
local function ruledglue(head,current,vertical)
- local spec = current.spec
- local width = spec.width
- local subtype = current.subtype
+ local spec = getfield(current,"spec")
+ local width = getfield(spec,"width")
+ local subtype = getsubtype(current)
local amount = formatters["%s:%0.3f"](tags[subtype] or (vertical and "VS") or "HS",width*pt_factor)
local info = g_cache[amount]
if info then
@@ -629,13 +693,13 @@ local function ruledglue(head,current,vertical)
info = vpack_nodes(info)
end
head, current = insert_node_before(head,current,info)
- return head, current.next
+ return head, getnext(current)
end
local k_cache = { }
local function ruledkern(head,current,vertical)
- local kern = current.kern
+ local kern = getfield(current,"kern")
local info = k_cache[kern]
if info then
-- print("kern hit")
@@ -655,13 +719,13 @@ local function ruledkern(head,current,vertical)
info = vpack_nodes(info)
end
head, current = insert_node_before(head,current,info)
- return head, current.next
+ return head, getnext(current)
end
local p_cache = { }
local function ruledpenalty(head,current,vertical)
- local penalty = current.penalty
+ local penalty = getfield(current,"penalty")
local info = p_cache[penalty]
if info then
-- print("penalty hit")
@@ -681,10 +745,10 @@ local function ruledpenalty(head,current,vertical)
info = vpack_nodes(info)
end
head, current = insert_node_before(head,current,info)
- return head, current.next
+ return head, getnext(current)
end
-local function visualize(head,vertical)
+local function visualize(head,vertical,forced)
local trace_hbox = false
local trace_vbox = false
local trace_vtop = false
@@ -697,13 +761,14 @@ local function visualize(head,vertical)
local trace_glyph = false
local trace_simple = false
local trace_user = false
+ local trace_math = false
local current = head
local previous = nil
local attr = unsetvalue
local prev_trace_fontkern = nil
while current do
- local id = current.id
- local a = current[a_visual] or unsetvalue
+ local id = getid(current)
+ local a = forced or getattr(current,a_visual) or unsetvalue
if a ~= attr then
prev_trace_fontkern = trace_fontkern
if a == unsetvalue then
@@ -719,6 +784,7 @@ local function visualize(head,vertical)
trace_glyph = false
trace_simple = false
trace_user = false
+ trace_math = false
else -- dead slow:
trace_hbox = hasbit(a, 1)
trace_vbox = hasbit(a, 2)
@@ -732,46 +798,45 @@ local function visualize(head,vertical)
trace_glyph = hasbit(a, 512)
trace_simple = hasbit(a,1024)
trace_user = hasbit(a,2048)
+ trace_math = hasbit(a,4096)
end
attr = a
end
if trace_strut then
- current[a_layer] = l_strut
+ setattr(current,a_layer,l_strut)
elseif id == glyph_code then
if trace_glyph then
head, current = ruledglyph(head,current,previous)
end
elseif id == disc_code then
- if trace_glyph then
- local pre = current.pre
- if pre then
- current.pre = ruledglyph(pre,pre)
- end
- local post = current.post
- if post then
- current.post = ruledglyph(post,post)
- end
- local replace = current.replace
- if replace then
- current.replace = ruledglyph(replace,replace)
- end
+ local pre = getfield(current,"pre")
+ if pre then
+ setfield(current,"pre",visualize(pre,false,a))
+ end
+ local post = getfield(current,"post")
+ if post then
+ setfield(current,"post",visualize(post,false,a))
+ end
+ local replace = getfield(current,"replace")
+ if replace then
+ setfield(current,"replace",visualize(replace,false,a))
end
elseif id == kern_code then
- local subtype = current.subtype
+ local subtype = getsubtype(current)
-- tricky ... we don't copy the trace attribute in node-inj (yet)
- if subtype == font_kern_code or current[a_fontkern] then
+ if subtype == font_kern_code or getattr(current,a_fontkern) then
if trace_fontkern or prev_trace_fontkern then
head, current = fontkern(head,current)
end
- elseif subtype == user_kern_code then
+ else -- if subtype == user_kern_code then
if trace_kern then
head, current = ruledkern(head,current,vertical)
end
end
elseif id == glue_code then
- local content = current.leader
+ local content = getleader(current)
if content then
- current.leader = visualize(content,false)
+ setfield(current,"leader",visualize(content,false))
elseif trace_glue then
head, current = ruledglue(head,current,vertical)
end
@@ -779,22 +844,18 @@ local function visualize(head,vertical)
if trace_penalty then
head, current = ruledpenalty(head,current,vertical)
end
- elseif id == disc_code then
- current.pre = visualize(current.pre)
- current.post = visualize(current.post)
- current.replace = visualize(current.replace)
elseif id == hlist_code then
- local content = current.list
+ local content = getlist(current)
if content then
- current.list = visualize(content,false)
+ setfield(current,"list",visualize(content,false))
end
if trace_hbox then
head, current = ruledbox(head,current,false,l_hbox,"H__",trace_simple,previous)
end
elseif id == vlist_code then
- local content = current.list
+ local content = getlist(current)
if content then
- current.list = visualize(content,true)
+ setfield(current,"list",visualize(content,true))
end
if trace_vtop then
head, current = ruledbox(head,current,true,l_vtop,"_T_",trace_simple,previous)
@@ -806,12 +867,16 @@ local function visualize(head,vertical)
head, current = whatsit(head,current)
end
elseif id == user_code then
- if trace_whatsit then
+ if trace_user then
head, current = user(head,current)
end
+ elseif id == math_code then
+ if trace_math then
+ head, current = math(head,current)
+ end
end
previous = current
- current = current.next
+ current = getnext(current)
end
return head
end
@@ -837,28 +902,39 @@ local function cleanup()
nk, k_cache = freed(k_cache)
nw, w_cache = freed(w_cache)
nb, b_cache = freed(b_cache)
- -- report_visualize("cache: %s fontkerns, %s skips, %s penalties, %s kerns, %s whatsits, %s boxes",nf,ng,np,nk,nw,nb)
+ -- report_visualize("cache cleanup: %s fontkerns, %s skips, %s penalties, %s kerns, %s whatsits, %s boxes",nf,ng,np,nk,nw,nb)
end
-function visualizers.handler(head)
+local function handler(head)
if usedfont then
starttiming(visualizers)
-- local l = texgetattribute(a_layer)
-- local v = texgetattribute(a_visual)
-- texsetattribute(a_layer,unsetvalue)
-- texsetattribute(a_visual,unsetvalue)
- head = visualize(head)
+ head = visualize(tonut(head))
-- texsetattribute(a_layer,l)
-- texsetattribute(a_visual,v)
-- -- cleanup()
stoptiming(visualizers)
+ return tonode(head), true
+ else
+ return head, false
end
- return head, false
end
+visualizers.handler = handler
+
function visualizers.box(n)
- local box = texgetbox(n)
- box.list = visualizers.handler(box.list)
+ if usedfont then
+ starttiming(visualizers)
+ local box = getbox(n)
+ setfield(box,"list",visualize(getlist(box)))
+ stoptiming(visualizers)
+ return head, true
+ else
+ return head, false
+ end
end
local last = nil
@@ -872,9 +948,9 @@ local mark = {
local function markfonts(list)
for n in traverse_nodes(list) do
- local id = n.id
+ local id = getid(n)
if id == glyph_code then
- local font = n.font
+ local font = getfont(n)
local okay = used[font]
if not okay then
last = last + 1
@@ -883,23 +959,32 @@ local function markfonts(list)
end
setcolor(n,okay)
elseif id == hlist_code or id == vlist_code then
- markfonts(n.list)
+ markfonts(getlist(n))
end
end
end
function visualizers.markfonts(list)
last, used = 0, { }
- markfonts(type(n) == "number" and texgetbox(n).list or n)
+ markfonts(type(n) == "number" and getlist(getbox(n)) or n)
end
-function commands.markfonts(n)
- visualizers.markfonts(n)
-end
+luatex.registerstopactions(cleanup)
statistics.register("visualization time",function()
if enabled then
- cleanup() -- in case we don't don't do it each time
+ -- cleanup() -- in case we don't don't do it each time
return format("%s seconds",statistics.elapsedtime(visualizers))
end
end)
+
+-- interface
+
+local implement = interfaces.implement
+
+implement { name = "setvisual", arguments = "string", actions = visualizers.setvisual }
+implement { name = "getvisual", arguments = "string", actions = { setvisual, context } }
+implement { name = "setvisuallayer", arguments = "string", actions = visualizers.setlayer }
+implement { name = "markvisualfonts", arguments = "integer", actions = visualizers.markfonts }
+implement { name = "setvisualfont", arguments = "integer", actions = visualizers.setfont }
+
diff --git a/tex/context/base/trac-vis.mkiv b/tex/context/base/trac-vis.mkiv
index 694d1b09d..6ee8a6b8d 100644
--- a/tex/context/base/trac-vis.mkiv
+++ b/tex/context/base/trac-vis.mkiv
@@ -54,10 +54,10 @@
\let\syst_visualizers_vbox\vbox
\let\syst_visualizers_vtop\vtop
-\unexpanded\def\ruledhbox{\syst_visualizers_hbox attr \visualattribute \ctxcommand{visual("simplehbox")} }
-\unexpanded\def\ruledvbox{\syst_visualizers_vbox attr \visualattribute \ctxcommand{visual("simplevbox")} }
-\unexpanded\def\ruledvtop{\syst_visualizers_vtop attr \visualattribute \ctxcommand{visual("simplevtop")} } % special case
-\unexpanded\def\ruledtopv{\syst_visualizers_vtop attr \visualattribute \ctxcommand{visual("vtop")} }
+\unexpanded\def\ruledhbox{\syst_visualizers_hbox attr \visualattribute \clf_getvisual{simplehbox} }
+\unexpanded\def\ruledvbox{\syst_visualizers_vbox attr \visualattribute \clf_getvisual{simplevbox} }
+\unexpanded\def\ruledvtop{\syst_visualizers_vtop attr \visualattribute \clf_getvisual{simplevtop} } % special case
+\unexpanded\def\ruledtopv{\syst_visualizers_vtop attr \visualattribute \clf_getvisual{vtop} }
\unexpanded\def\ruledmbox#1{\ruledhbox{\startimath#1\stopimath}}
@@ -83,26 +83,33 @@
\to \t_syst_visualizers_optimize
\unexpanded\def\showmakeup
- {\ctxcommand{setvisual("makeup")}%
- \let\normalvtop\ruledtopv
- \let\vtop \ruledtopv}
+ {\dosingleempty\syst_visualizers_makeup}
+
+\unexpanded\def\syst_visualizers_makeup[#1]%
+ {\iffirstargument
+ \clf_setvisual{#1}%
+ \else
+ \clf_setvisual{makeup}%
+ \let\normalvtop\ruledtopv
+ \let\vtop \ruledtopv
+ \fi}
\unexpanded\def\showallmakeup
- {\ctxcommand{setvisual("all")}%
+ {\clf_setvisual{all}%
\let\normalvtop\ruledtopv
\let\vtop \ruledtopv
\showstruts}
\unexpanded\def\showboxes
- {\ctxcommand{setvisual("boxes")}%
+ {\clf_setvisual{boxes}%
\let\normalvtop\ruledtopv
\let\vtop \ruledtopv}
\unexpanded\def\showglyphs
- {\ctxcommand{setvisual("glyph")}}
+ {\clf_setvisual{glyph}}
\unexpanded\def\showfontkerns
- {\ctxcommand{setvisual("fontkern")}}
+ {\clf_setvisual{fontkern}}
\unexpanded\def\setvisualizerfont
{\dosingleempty\syst_visualizers_setfont}
@@ -110,18 +117,21 @@
\def\syst_visualizers_setfont[#1]% somename at 4pt
{\begingroup
\doifelsenothing{#1}{\definedfont[Mono at 4pt]}{\definedfont[#1]}%
- \ctxlua{nodes.visualizers.setfont()}%
+ \clf_setvisualfont\fontid\font
\endgroup}
+\unexpanded\def\resetvisualizers
+ {\attribute\visualattribute\attributeunsetvalue}
+
%D New (these might also be a visualizers):
-\definecolor[f:r:t][a=1,t=.25,r=1]
-\definecolor[f:g:t][a=1,t=.25,g=1]
-\definecolor[f:b:t][a=1,t=.25,b=1]
-\definecolor[f:c:t][a=1,t=.25,c=1]
-\definecolor[f:m:t][a=1,t=.25,m=1]
-\definecolor[f:y:t][a=1,t=.25,y=1]
-\definecolor[f:k:t][a=1,t=.25,s=0]
+% \definecolor[f:r:t][a=1,t=.25,r=1]
+% \definecolor[f:g:t][a=1,t=.25,g=1]
+% \definecolor[f:b:t][a=1,t=.25,b=1]
+% \definecolor[f:c:t][a=1,t=.25,c=1]
+% \definecolor[f:m:t][a=1,t=.25,m=1]
+% \definecolor[f:y:t][a=1,t=.25,y=1]
+% \definecolor[f:k:t][a=1,t=.25,s=0]
% \def\node_backgrounds_boxes_add#1[#2]%
% {\node_backgrounds_boxes_initialize
@@ -141,14 +151,14 @@
%D Overload:
% \def\spac_struts_vide_hbox
-% {\hbox attr \visualattribute \ctxcommand{visual("strut")} }
+% {\hbox attr \visualattribute \clf_getvisual{strut} }
%
% \def\spac_struts_vide_hbox
-% {\xdef\spac_struts_vide_hbox{\hbox attr \visualattribute \ctxcommand{visual("strut")} }%
+% {\xdef\spac_struts_vide_hbox{\hbox attr \visualattribute \clf_getvisual{strut} }%
% \spac_struts_vide_hbox}
\unexpanded\def\spac_struts_vide_hbox
- {\hbox attr \visualattribute \ctxcommand{visual("strut")} }
+ {\hbox attr \visualattribute \clf_getvisual{strut} }
\appendtoks
\normalexpanded{\unexpanded\xdef\spac_struts_vide_hbox\expandafter{\spac_struts_vide_hbox}}%
@@ -160,7 +170,7 @@
{\dontleavehmode
\begingroup
\setbox\scratchbox\hbox{\getbuffer}%
- \ctxcommand{markfonts(\number\scratchbox)}%
+ \clf_markvisualfonts\scratchbox
\unhbox\scratchbox
\endgroup}
diff --git a/tex/context/base/type-imp-buy.mkiv b/tex/context/base/type-imp-buy.mkiv
index 9815cc44b..dbfffe57c 100644
--- a/tex/context/base/type-imp-buy.mkiv
+++ b/tex/context/base/type-imp-buy.mkiv
@@ -11,27 +11,125 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+% monotype sabon
+
\starttypescriptcollection[sabon]
-\starttypescript [serif] [sabon] [name]
-
- \definefontsynonym [Serif] [SabonMT]
- \definefontsynonym [SerifItalic] [SabonMT-Italic]
- \definefontsynonym [SerifSlanted] [SabonMT-Italic]
- \definefontsynonym [SerifBold] [SabonMT-SemiBold]
- \definefontsynonym [SerifBoldItalic] [SabonMT-SemiBoldItalic]
- \definefontsynonym [SerifBoldSlanted] [SabonMT-SemiBoldItalic]
- \definefontsynonym [SerifCaps] [SabonMT-RegularSC]
-
- \definefontsynonym[SabonMT] [sab_____]
- \definefontsynonym[SabonMT-Italic] [sabi____]
- \definefontsynonym[SabonMT-ItalicOsF] [saboi___]
- \definefontsynonym[SabonMT-SemiBoldOsF] [sabos___]
- \definefontsynonym[SabonMT-SemiBold] [sabs____]
- \definefontsynonym[SabonMT-RegularSC] [sabsc___]
- \definefontsynonym[SabonMT-SemiBoldItalic] [sabsi___]
- \definefontsynonym[SabonMT-SemiBoldItalicOsF][sasio___]
-\stoptypescript
+ \starttypescript [serif] [sabon] [name]
+ %
+ \definefontsynonym [Serif] [SabonMT]
+ \definefontsynonym [SerifItalic] [SabonMT-Italic]
+ \definefontsynonym [SerifSlanted] [SabonMT-Italic]
+ \definefontsynonym [SerifBold] [SabonMT-SemiBold]
+ \definefontsynonym [SerifBoldItalic] [SabonMT-SemiBoldItalic]
+ \definefontsynonym [SerifBoldSlanted][SabonMT-SemiBoldItalic]
+ \definefontsynonym [SerifCaps] [SabonMT-RegularSC]
+ %
+ \definefontsynonym[SabonMT] [sab_____]
+ \definefontsynonym[SabonMT-Italic] [sabi____]
+ \definefontsynonym[SabonMT-ItalicOsF] [saboi___]
+ \definefontsynonym[SabonMT-SemiBoldOsF] [sabos___]
+ \definefontsynonym[SabonMT-SemiBold] [sabs____]
+ \definefontsynonym[SabonMT-RegularSC] [sabsc___]
+ \definefontsynonym[SabonMT-SemiBoldItalic] [sabsi___]
+ \definefontsynonym[SabonMT-SemiBoldItalicOsF][sasio___]
+ %
+ \stoptypescript
\stoptypescriptcollection
+% itc stone
+
+\starttypescriptcollection[stone]
+
+ \starttypescript [sans] [stone] [name]
+ %
+ \definefontsynonym [Sans] [StoneSansITC-Medium]
+ \definefontsynonym [SansItalic] [StoneSansITC-MediumItalic]
+ \definefontsynonym [SansSlanted] [StoneSansITC-MediumItalic]
+ \definefontsynonym [SansBold] [StoneSansITC-Bold]
+ \definefontsynonym [SansBoldItalic] [StoneSansITC-BoldItalic]
+ \definefontsynonym [SansBoldSlanted][StoneSansITC-BoldItalic]
+ \definefontsynonym [SansCaps] [StoneSansSCITC-Medium]
+ %
+ \definefontsynonym[StoneSansITC-Bold] [stosnb]
+ \definefontsynonym[StoneSansITC-BoldItalic] [stosnbi]
+ \definefontsynonym[StoneSansITC-Medium] [stosnm]
+ \definefontsynonym[StoneSansITC-MediumItalic][stosnmi]
+ \definefontsynonym[StoneSansSemITC-Semi] [stosns]
+ \definefontsynonym[StoneSansSemITC-SemiIta] [stosnsi]
+ \definefontsynonym[StoneSansSCITC-Medium] [stosnscm]
+ \definefontsynonym[StoneSansSemSCITC-Semi] [stosnscs]
+ %
+ \stoptypescript
+
+ \starttypescript [serif] [stone] [name]
+ %
+ \definefontsynonym [Serif] [StoneSerifITC-Medium]
+ \definefontsynonym [SerifItalic] [StoneSerifITC-MediumItalic]
+ \definefontsynonym [SerifSlanted] [StoneSerifITC-MediumItalic]
+ \definefontsynonym [SerifBold] [StoneSerifITC-Bold]
+ \definefontsynonym [SerifBoldItalic] [StoneSerifITC-BoldItalic]
+ \definefontsynonym [SerifBoldSlanted][StoneSerifITC-BoldItalic]
+ \definefontsynonym [SerifCaps] [StoneSerifSCITC-Medium]
+ %
+ \definefontsynonym[StoneSerifITC-Bold] [stosfb]
+ \definefontsynonym[StoneSerifITC-BoldItalic] [stosfbi]
+ \definefontsynonym[StoneSerifITC-Medium] [stosfm]
+ \definefontsynonym[StoneSerifITC-MediumItalic][stosfmi]
+ \definefontsynonym[StoneSerifSemITC-Semi] [stosfs]
+ \definefontsynonym[StoneSerifSemITC-SemiIta] [stosfsi]
+ \definefontsynonym[StoneSerifSCITC-Medium] [stosfscm]
+ \definefontsynonym[StoneSerifSemSCITC-Semi] [stosfscs]
+ %
+ \stoptypescript
+
+ \starttypescript [sans] [stone-oldstyle] [name]
+ %
+ \definefontsynonym [Sans] [StoneSansOSITC-Medium]
+ \definefontsynonym [SansItalic] [StoneSansOSITC-MediumItalic]
+ \definefontsynonym [SansSlanted] [StoneSansOSITC-MediumItalic]
+ \definefontsynonym [SansBold] [StoneSansOSITC-Bold]
+ \definefontsynonym [SansBoldItalic] [StoneSansOSITC-BoldItalic]
+ \definefontsynonym [SansBoldSlanted][StoneSansOSITC-BoldItalic]
+ \definefontsynonym [SansCaps] [StoneSansSCITC-Medium]
+ %
+ \definefontsynonym[StoneSansOSITC-Bold] [stosnob]
+ \definefontsynonym[StoneSansOSITC-BoldItalic] [stosnobi]
+ \definefontsynonym[StoneSansOSITC-Medium] [stosnom]
+ \definefontsynonym[StoneSansOSITC-MediumItalic][stosnomi]
+ \definefontsynonym[StoneSansSemOSITC-Semi] [stosnos]
+ \definefontsynonym[StoneSansSemOSITC-SemiIta] [stosnosi]
+ %
+ \stoptypescript
+
+ \starttypescript [serif] [stone-oldstyle] [name]
+ %
+ \definefontsynonym [Serif] [StoneSerifOSITC-Medium]
+ \definefontsynonym [SerifItalic] [StoneSerifOSITC-MediumItalic]
+ \definefontsynonym [SerifSlanted] [StoneSerifOSITC-MediumItalic]
+ \definefontsynonym [SerifBold] [StoneSerifOSITC-Bold]
+ \definefontsynonym [SerifBoldItalic] [StoneSerifOSITC-BoldItalic]
+ \definefontsynonym [SerifBoldSlanted] [StoneSerifOSITC-BoldItalic]
+ \definefontsynonym [SerifCaps] [StoneSerifSCITC-Medium]
+ %
+ \definefontsynonym[StoneSerifOSITC-Bold] [stosfob]
+ \definefontsynonym[StoneSerifOSITC-BoldItalic] [stosfobi]
+ \definefontsynonym[StoneSerifOSITC-Medium] [stosfom]
+ \definefontsynonym[StoneSerifOSITC-MediumItalic][stosfomi]
+ \definefontsynonym[StoneSerifSemOSITC-Semi] [stosfos]
+ \definefontsynonym[StoneSerifSemOSITC-SemiIta] [stosfosi]
+ %
+ \stoptypescript
+
+\stoptypescriptcollection
+
+% linotype industria
+
+\starttypescriptcollection[industria]
+
+ \starttypescript [sans] [industria] [name]
+ \definefontsynonym[Industria-Solid][lt_50545]
+ \stoptypescript
+
+\stoptypescriptcollection
diff --git a/tex/context/base/type-imp-cambria.mkiv b/tex/context/base/type-imp-cambria.mkiv
index 91288b6d0..9bfa2ee5c 100644
--- a/tex/context/base/type-imp-cambria.mkiv
+++ b/tex/context/base/type-imp-cambria.mkiv
@@ -11,6 +11,10 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+%D We use Dejavu as it covers wider range of monospaced glyphs.
+
+\loadtypescriptfile[dejavu]
+
\starttypescriptcollection[cambria]
% microsoft: cambria.ttc cambriab.ttf cambriai.ttf cambriaz.ttf
@@ -60,13 +64,13 @@
\starttypescript [cambria,cambria-m,cambria-a]
% any
\definetypeface [cambria] [\s!rm] [\s!serif] [\typescriptone] [\s!default]
- \definetypeface [cambria] [\s!tt] [\s!mono] [modern] [\s!default]
+ \definetypeface [cambria] [\s!tt] [\s!mono] [dejavu] [\s!default]
\definetypeface [cambria] [\s!mm] [\s!math] [\typescriptone] [\s!default]
\stoptypescript
\starttypescript [cambria-x,cambria-y]
% test x
\definetypeface [\typescriptone] [\s!rm] [\s!serif] [cambria] [\s!default]
- \definetypeface [\typescriptone] [\s!tt] [\s!mono] [modern] [\s!default]
+ \definetypeface [\typescriptone] [\s!tt] [\s!mono] [dejavu] [\s!default]
\definetypeface [\typescriptone] [\s!mm] [\s!math] [\typescriptone] [\s!default]
\stoptypescript
diff --git a/tex/context/base/type-imp-dejavu.mkiv b/tex/context/base/type-imp-dejavu.mkiv
index de1f7752c..41cf1f701 100644
--- a/tex/context/base/type-imp-dejavu.mkiv
+++ b/tex/context/base/type-imp-dejavu.mkiv
@@ -15,33 +15,6 @@
\starttypescriptcollection[dejavu]
- \starttypescript [\s!mono] [dejavu-condensed] [\s!name]
- \setups[\s!font:\s!fallback:\s!mono]
- % \definefontsynonym [\s!Mono] [\s!name:dejavusansmonocondensed] [\s!features=\s!none]
- % \definefontsynonym [\s!MonoBold] [\s!name:dejavusansmonoboldcondensed] [\s!features=\s!none]
- % \definefontsynonym [\s!MonoItalic] [\s!name:dejavusansmonoobliquecondensed] [\s!features=\s!none]
- % \definefontsynonym [\s!MonoBoldItalic] [\s!name:dejavusansmonoboldobliquecondensed] [\s!features=\s!none]
- \definefontsynonym [\s!Mono] [\s!name:dejavusansmono] [\s!features=dejavu-condensed-mono]
- \definefontsynonym [\s!MonoBold] [\s!name:dejavusansmonobold] [\s!features=dejavu-condensed-mono]
- \definefontsynonym [\s!MonoItalic] [\s!name:dejavusansmonooblique] [\s!features=dejavu-condensed-mono]
- \definefontsynonym [\s!MonoBoldItalic] [\s!name:dejavusansmonoboldoblique] [\s!features=dejavu-condensed-mono]
- \stoptypescript
-
- % \starttypescript [\s!mono] [dejavu-condensed] [\s!name]
- % \setups[\s!font:\s!fallback:\s!mono]
- % \definefontsynonym [\s!Mono] [\s!name:dejavusansmono] [\s!features=\s!none]
- % \definefontsynonym [\s!MonoBold] [\s!name:dejavusansmonobold] [\s!features=\s!none]
- % \definefontsynonym [\s!MonoItalic] [\s!name:dejavusansmonooblique] [\s!features=\s!none]
- % \definefontsynonym [\s!MonoBoldItalic] [\s!name:dejavusansmonoboldoblique] [\s!features=\s!none]
- % \stoptypescript
-
- \starttypescript[dejavu-condensed]
- \definetypeface [dejavu-condensed] [\s!rm] [\s!serif] [dejavu-condensed] [\s!default]
- \definetypeface [dejavu-condensed] [\s!ss] [\s!sans] [dejavu-condensed] [\s!default]
- \definetypeface [dejavu-condensed] [\s!tt] [\s!mono] [dejavu-condensed] [\s!default]
- \definetypeface [dejavu-condensed] [\s!mm] [\s!math] [xits] [\s!default] [\s!rscale=1.2]
- \stoptypescript
-
\starttypescript [\s!serif] [dejavu] [\s!name]
\setups[\s!font:\s!fallback:\s!serif]
\definefontsynonym [\s!Serif] [\s!name:dejavuserif] [\s!features=\s!default]
@@ -66,11 +39,17 @@
\definefontsynonym [\s!MonoBoldItalic] [\s!name:dejavusansmonoboldoblique] [\s!features=\s!none]
\stoptypescript
+ \starttypescript [\s!math][dejavu][\s!name]
+ %\loadfontgoodies[dejavu]
+ \definefontsynonym[\s!MathRoman][file:dejavu-math.otf][\s!features=\s!math\mathsizesuffix]
+ \stoptypescript
+
\starttypescript[dejavu]
\definetypeface [dejavu] [\s!rm] [\s!serif] [dejavu] [\s!default]
\definetypeface [dejavu] [\s!ss] [\s!sans] [dejavu] [\s!default]
\definetypeface [dejavu] [\s!tt] [\s!mono] [dejavu] [\s!default]
- \definetypeface [dejavu] [\s!mm] [\s!math] [xits] [\s!default] [\s!rscale=1.2]
+% \definetypeface [dejavu] [\s!mm] [\s!math] [xits] [\s!default] [\s!rscale=1.2]
+ \definetypeface [dejavu] [\s!mm] [\s!math] [dejavu] [\s!default]
\stoptypescript
\starttypescript[dejavubidi]
@@ -96,4 +75,32 @@
\definefontsynonym [\s!SansBoldItalic] [\s!name:dejavusanscondensedboldoblique] [\s!features=\s!default]
\stoptypescript
+ \starttypescript [\s!mono] [dejavu-condensed] [\s!name]
+ \setups[\s!font:\s!fallback:\s!mono]
+ % \definefontsynonym [\s!Mono] [\s!name:dejavusansmonocondensed] [\s!features=\s!none]
+ % \definefontsynonym [\s!MonoBold] [\s!name:dejavusansmonoboldcondensed] [\s!features=\s!none]
+ % \definefontsynonym [\s!MonoItalic] [\s!name:dejavusansmonoobliquecondensed] [\s!features=\s!none]
+ % \definefontsynonym [\s!MonoBoldItalic] [\s!name:dejavusansmonoboldobliquecondensed] [\s!features=\s!none]
+ \definefontsynonym [\s!Mono] [\s!name:dejavusansmono] [\s!features=dejavu-condensed-mono]
+ \definefontsynonym [\s!MonoBold] [\s!name:dejavusansmonobold] [\s!features=dejavu-condensed-mono]
+ \definefontsynonym [\s!MonoItalic] [\s!name:dejavusansmonooblique] [\s!features=dejavu-condensed-mono]
+ \definefontsynonym [\s!MonoBoldItalic] [\s!name:dejavusansmonoboldoblique] [\s!features=dejavu-condensed-mono]
+ \stoptypescript
+
+ % \starttypescript [\s!mono] [dejavu-condensed] [\s!name]
+ % \setups[\s!font:\s!fallback:\s!mono]
+ % \definefontsynonym [\s!Mono] [\s!name:dejavusansmono] [\s!features=\s!none]
+ % \definefontsynonym [\s!MonoBold] [\s!name:dejavusansmonobold] [\s!features=\s!none]
+ % \definefontsynonym [\s!MonoItalic] [\s!name:dejavusansmonooblique] [\s!features=\s!none]
+ % \definefontsynonym [\s!MonoBoldItalic] [\s!name:dejavusansmonoboldoblique] [\s!features=\s!none]
+ % \stoptypescript
+
+ \starttypescript[dejavu-condensed]
+ \definetypeface [dejavu-condensed] [\s!rm] [\s!serif] [dejavu-condensed] [\s!default]
+ \definetypeface [dejavu-condensed] [\s!ss] [\s!sans] [dejavu-condensed] [\s!default]
+ \definetypeface [dejavu-condensed] [\s!tt] [\s!mono] [dejavu-condensed] [\s!default]
+% \definetypeface [dejavu-condensed] [\s!mm] [\s!math] [xits] [\s!default] [\s!rscale=1.2]
+ \definetypeface [dejavu-condensed] [\s!mm] [\s!math] [dejavu] [\s!default]
+ \stoptypescript
+
\stoptypescriptcollection
diff --git a/tex/context/base/type-imp-ebgaramond.mkiv b/tex/context/base/type-imp-ebgaramond.mkiv
new file mode 100644
index 000000000..838654d49
--- /dev/null
+++ b/tex/context/base/type-imp-ebgaramond.mkiv
@@ -0,0 +1,45 @@
+%D \module
+%D [ file=type-imp-ebgaramond,
+%D version=2013.06.22,
+%D title=\CONTEXT\ Typescript Macros,
+%D subtitle=EB Garamond,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\definefontfeature
+ [eb-garamond-normal]
+ [default]
+ [mode=node,ccmp=yes,calt=yes,
+ liga=yes,dlig=yes,hlig=yes,
+ kern=yes,mark=yes,mkmk=yes,
+ onum=yes,pnum=yes,salt=yes,
+ script=latn]
+
+\definefontfeature
+ [eb-garamond-smallcaps]
+ [eb-garamond-normal]
+ [smcp=yes,c2sc=yes]
+
+\starttypescriptcollection[ebgaramond]
+
+ \starttypescript [serif] [ebgaramond]
+ \loadfontgoodies[ebgaramond]
+ \setups[font:fallback:serif]
+ \definefontsynonym [Serif] [file:ebgaramond-regular] [features=eb-garamond-normal]
+ \definefontsynonym [SerifItalic] [file:ebgaramond-italic] [features=eb-garamond-normal]
+ \definefontsynonym [SerifBold] [file:ebgaramond-bold] [features=eb-garamond-normal]
+ \definefontsynonym [SerifCaps] [Serif] [features=eb-garamond-smallcaps]
+ \stoptypescript
+
+ \starttypescript[ebgaramond]
+ \definetypeface [ebgaramond] [rm] [serif] [ebgaramond] [default] [designsize=auto]
+ \definetypeface [ebgaramond] [tt] [mono] [dejavu] [default]
+ \definetypeface [ebgaramond] [mm] [math] [bonum] [default]
+ \stoptypescript
+
+\stoptypescriptcollection
diff --git a/tex/context/base/type-imp-ipaex.mkiv b/tex/context/base/type-imp-ipaex.mkiv
new file mode 100644
index 000000000..b11f96878
--- /dev/null
+++ b/tex/context/base/type-imp-ipaex.mkiv
@@ -0,0 +1,137 @@
+%D \module
+%D [ file=type-imp-ipaex,
+%D version=2015.05.01,
+%D title=\CONTEXT\ Typescript Macros,
+%D subtitle=IPAex,
+%D author=Yusuke KUROKI \& Hans HAGEN,
+%D date=\currentdate,
+%D copyright=Yusuke KUROKI \& Hans HAGEN]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% usage: \setupbodyfont[ipaex,10pt]
+
+\loadtypescriptfile[texgyre]
+\loadtypescriptfile[xits]
+
+\definefontfeature
+ [jp-default]
+ [script=default,lang=japanese]
+
+\definefontfeature
+ [jp-latin-default]
+ [jp-default]
+
+\definefontfeature
+ [jp-latin-slanted]
+ [jp-latin-default]
+ [slant=.2]
+
+\definefontfeature
+ [jp-latin-slanted-mono]
+ [slant=.2]
+
+\definefontfeature
+ [jp-latin-smallcaps]
+ [jp-latin-default]
+ [smcp=yes]
+
+\definefontfeature
+ [jp-latin-smallcaps-mono]
+ [cmcp=yes]
+
+\definefontfallback[jp-serif] [texgyrepagella-regular*jp-latin-default] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-serifbold] [texgyrepagella-bold*jp-latin-default] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-serifitalic] [texgyrepagella-italic*jp-latin-default] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-serifbolditalic] [texgyrepagella-bolditalic*jp-latin-default] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-serifslanted] [texgyrepagella-regular*jp-latin-slanted] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-serifboldslanted][texgyrepagella-bold*jp-latin-slanted] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-serifcaps] [texgyrepagella-regular*jp-latin-smallcaps] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-sans] [texgyreheros-regular*jp-latin-default] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-sansbold] [texgyreheros-bold*jp-latin-default] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-sansitalic] [texgyreheros-italic*jp-latin-default] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-sansbolditalic] [texgyreheros-bolditalic*jp-latin-default] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-sansslanted] [texgyreheros-regular*jp-latin-slanted] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-sansboldslanted] [texgyreheros-bold*jp-latin-slanted] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-sanscaps] [texgyreheros-regular*jp-latin-smallcaps] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-mono] [texgyrecursor-regular] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-monobold] [texgyrecursor-bold] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-monoitalic] [texgyrecursor-italic] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-monobolditalic] [texgyrecursor-bolditalic] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-monoslanted] [texgyrecursor-regular*jp-latin-slanted-mono] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-monoboldslanted] [texgyrecursor-bold*jp-latin-slanted-mono] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-monocaps] [texgyrecursor-regular*jp-latin-smallcaps-mono][0x0000-0x0400][force=yes,rscale=1.0406]
+
+\starttypescriptcollection[ipaex]
+ \starttypescript [\s!serif] [ipaexm]
+ \definefontsynonym [ipaexm] [\s!file:ipaexm][\s!features=jp-default,\s!fallbacks=jp-serif]
+ \definefontsynonym [ipaexmbold] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-serifbold]
+ \definefontsynonym [ipaexmitalic] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-serifitalic]
+ \definefontsynonym [ipaexmbolditalic] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-serifbolditalic]
+ \definefontsynonym [ipaexmslanted] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-serifslanted]
+ \definefontsynonym [ipaexmboldslanted][\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-serifboldslanted]
+ \definefontsynonym [ipaexmcaps] [\s!file:ipaexm][\s!features=jp-default,\s!fallbacks=jp-serifcaps]
+ \stoptypescript
+
+ \starttypescript [\s!sans] [ipaexg]
+ \definefontsynonym[ipaexg] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-sans]
+ \definefontsynonym[ipaexgbold] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-sansbold]
+ \definefontsynonym[ipaexgitalic] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-sansitalic]
+ \definefontsynonym[ipaexgbolditalic] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-sansbolditalic]
+ \definefontsynonym[ipaexgslanted] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-sansslanted]
+ \definefontsynonym[ipaexgboldslanted] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-sansboldslanted]
+ \definefontsynonym[ipaexgcaps] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-sanscaps]
+ \stoptypescript
+
+ \starttypescript [\s!mono] [ipaexgmono]
+ \definefontsynonym[ipaexgmono] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-mono]
+ \definefontsynonym[ipaexgmonobold] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-monobold]
+ \definefontsynonym[ipaexgmonoitalic] [\s!file:ipaexg][\s!features=jp-slanted,\s!fallbacks=jp-monoitalic]
+ \definefontsynonym[ipaexgmonobolditalic] [\s!file:ipaexg][\s!features=jp-slanted,\s!fallbacks=jp-monobolditalic]
+ \definefontsynonym[ipaexgmonoslanted] [\s!file:ipaexg][\s!features=jp-slanted,\s!fallbacks=jp-monoslanted]
+ \definefontsynonym[ipaexgmonoboldslanted] [\s!file:ipaexg][\s!features=jp-slanted,\s!fallbacks=jp-monoboldslanted]
+ \definefontsynonym[ipaexgmonocaps] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-monocaps]
+ \stoptypescript
+
+ \starttypescript [\s!serif] [ipaexm] [\s!name]
+ \definefontsynonym[\s!Serif] [ipaexm]
+ \definefontsynonym[\s!SerifBold] [ipaexmbold]
+ \definefontsynonym[\s!SerifItalic] [ipaexmitalic]
+ \definefontsynonym[\s!SerifBoldItalic] [ipaexmbolditalic]
+ \definefontsynonym[\s!SerifSlanted] [ipaexmslanted]
+ \definefontsynonym[\s!SerifBoldSlanted][ipaexmboldslanted]
+ \definefontsynonym[\s!SerifCaps] [ipaexmcaps]
+ \stoptypescript
+
+ \starttypescript [\s!sans] [ipaexg] [\s!name]
+ \definefontsynonym[\s!Sans] [ipaexg]
+ \definefontsynonym[\s!SansBold] [ipaexgbold]
+ \definefontsynonym[\s!SansItalic] [ipaexgitalic]
+ \definefontsynonym[\s!SansBoldItalic] [ipaexgbolditalic]
+ \definefontsynonym[\s!SansSlanted] [ipaexgslanted]
+ \definefontsynonym[\s!SansBoldSlanted] [ipaexgboldslanted]
+ \definefontsynonym[\s!SansCaps] [ipaexgcaps]
+ \stoptypescript
+
+ \starttypescript [\s!mono] [ipaexgmono] [\s!name]
+ \definefontsynonym[\s!Mono] [ipaexgmono]
+ \definefontsynonym[\s!MonoBold] [ipaexgmonobold]
+ \definefontsynonym[\s!MonoItalic] [ipaexgmonoitalic]
+ \definefontsynonym[\s!MonoBoldItalic] [ipaexgmonobolditalic]
+ \definefontsynonym[\s!MonoSlanted] [ipaexgmonoslanted]
+ \definefontsynonym[\s!MonoBoldSlanted] [ipaexgmonoboldslanted]
+ \definefontsynonym[\s!MonoCaps] [ipaexgmonocaps]
+ \stoptypescript
+
+ % xits might get replaced with a pagella once we have it
+
+ \starttypescript[ipaex]
+ \definetypeface [ipaex] [\s!rm] [\s!serif] [ipaexm]
+ \definetypeface [ipaex] [\s!ss] [\s!sans] [ipaexg]
+ \definetypeface [ipaex] [\s!tt] [\s!mono] [ipaexgmono]
+ \definetypeface [ipaex] [\s!mm] [\s!math] [xits]
+ \stoptypescript
+
+\stoptypescriptcollection
diff --git a/tex/context/base/type-imp-latinmodern.mkiv b/tex/context/base/type-imp-latinmodern.mkiv
index afe2c6417..fe4b669bd 100644
--- a/tex/context/base/type-imp-latinmodern.mkiv
+++ b/tex/context/base/type-imp-latinmodern.mkiv
@@ -71,11 +71,14 @@
\starttypescript [\s!math] [modern,latin-modern-designsize,latin-modern] [\s!name]
\loadfontgoodies[lm]
- \loadfontgoodies[lm-math]
\definefontsynonym [\s!MathRoman] [LMMathRoman-Regular]
\definefontsynonym [\s!MathRomanBold] [LMMathRoman-Bold]
\stoptypescript
+ \starttypescript [\s!math] [latin-modern-designsize] [\s!name]
+ \loadfontgoodies[lm-math]
+ \stoptypescript
+
\starttypescript [\s!serif] [modern-variable,latin-modern-variable-designsize,latin-modern-variable] [\s!name]
\loadfontgoodies[lm]
\definefontsynonym [\s!Serif] [LMTypewriterVarWd-Regular] [\s!features=\s!default]
diff --git a/tex/context/base/type-imp-lato.mkiv b/tex/context/base/type-imp-lato.mkiv
new file mode 100644
index 000000000..8fb8647fc
--- /dev/null
+++ b/tex/context/base/type-imp-lato.mkiv
@@ -0,0 +1,56 @@
+%D \module
+%D [ file=type-imp-lato,
+%D version=2014.05.02,
+%D title=\CONTEXT\ Typescript Macros,
+%D subtitle=Lato fonts,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% hai : hair / lta : italic
+% lig : light / lta : italic
+% reg : regular / lta : italic
+% bol : bold / lta : italic
+% bla : black / lta : italic
+
+\loadtypescriptfile[dejavu]
+\loadtypescriptfile[xits]
+
+\starttypescriptcollection[lato]
+
+ \starttypescript [\s!sans] [lato] [\s!name]
+ \setups[\s!font:\s!fallback:\s!sans]
+ \definefontsynonym [\s!Sans] [\s!file:lato-reg] [\s!features=\s!default]
+ \definefontsynonym [\s!SansBold] [\s!file:lato-bol] [\s!features=\s!default]
+ \definefontsynonym [\s!SansItalic] [\s!file:lato-reglta] [\s!features=\s!default]
+ \definefontsynonym [\s!SansBoldItalic] [\s!file:lato-bollta] [\s!features=\s!default]
+ \stoptypescript
+
+ \starttypescript [\s!sans] [lato-light] [\s!name]
+ \setups[\s!font:\s!fallback:\s!sans]
+ \definefontsynonym [\s!Sans] [\s!file:lato-lig] [\s!features=\s!default]
+ \definefontsynonym [\s!SansBold] [\s!file:lato-reg] [\s!features=\s!default]
+ \definefontsynonym [\s!SansItalic] [\s!file:lato-liglta] [\s!features=\s!default]
+ \definefontsynonym [\s!SansBoldItalic] [\s!file:lato-reglta] [\s!features=\s!default]
+ \stoptypescript
+
+ \starttypescript [\s!sans] [lato-dark] [\s!name]
+ \setups[\s!font:\s!fallback:\s!sans]
+ \definefontsynonym [\s!Sans] [\s!file:lato-bol] [\s!features=\s!default]
+ \definefontsynonym [\s!SansBold] [\s!file:lato-bla] [\s!features=\s!default]
+ \definefontsynonym [\s!SansItalic] [\s!file:lato-bollta] [\s!features=\s!default]
+ \definefontsynonym [\s!SansBoldItalic] [\s!file:lato-blalta] [\s!features=\s!default]
+ \stoptypescript
+
+ \starttypescript[lato,lato-light,lato-dark]
+ \definetypeface [\typescriptone] [\s!ss] [\s!sans] [\typescriptone] [\s!default]
+ \definetypeface [\typescriptone] [\s!rm] [\s!serif] [dejavu] [\s!default]
+ \definetypeface [\typescriptone] [\s!tt] [\s!mono] [dejavu] [\s!default]
+ \definetypeface [\typescriptone] [\s!mm] [\s!math] [xits] [\s!default] [\s!rscale=1.2]
+ \stoptypescript
+
+\stoptypescriptcollection
diff --git a/tex/context/base/type-imp-texgyre.mkiv b/tex/context/base/type-imp-texgyre.mkiv
index 24185f41d..247b4ef21 100644
--- a/tex/context/base/type-imp-texgyre.mkiv
+++ b/tex/context/base/type-imp-texgyre.mkiv
@@ -153,7 +153,7 @@
\definetypeface [\typescriptone] [\s!rm] [\s!serif] [\typescriptone] [\s!default]
\definetypeface [\typescriptone] [\s!ss] [\s!sans] [helvetica] [\s!default] [\s!rscale=0.9]
\definetypeface [\typescriptone] [\s!tt] [\s!mono] [modern] [\s!default] [\s!rscale=1.05]
- \definetypeface [\typescriptone] [\s!mm] [\s!math] [times] [\s!default]
+ \definetypeface [\typescriptone] [\s!mm] [\s!math] [termes] [\s!default]
\quittypescriptscanning
\stoptypescript
@@ -161,7 +161,7 @@
\definetypeface [\typescriptone] [\s!rm] [\s!serif] [\typescriptone] [\s!default]
\definetypeface [\typescriptone] [\s!ss] [\s!sans] [modern] [\s!default] [\s!rscale=1.075]
\definetypeface [\typescriptone] [\s!tt] [\s!mono] [modern] [\s!default] [\s!rscale=1.075]
- \definetypeface [\typescriptone] [\s!mm] [\s!math] [palatino] [\s!default]
+ \definetypeface [\typescriptone] [\s!mm] [\s!math] [pagella] [\s!default]
\quittypescriptscanning
\stoptypescript
@@ -169,7 +169,7 @@
\definetypeface [\typescriptone] [\s!rm] [\s!serif] [\typescriptone] [\s!default]
\definetypeface [\typescriptone] [\s!ss] [\s!sans] [modern] [\s!default] [\s!rscale=1.1]
\definetypeface [\typescriptone] [\s!tt] [\s!mono] [modern] [\s!default] [\s!rscale=1.1]
- \definetypeface [\typescriptone] [\s!mm] [\s!math] [modern] [\s!default] [\s!rscale=1.1]
+ \definetypeface [\typescriptone] [\s!mm] [\s!math] [schola] [\s!default]
\quittypescriptscanning
\stoptypescript
@@ -277,3 +277,12 @@
\stoptypescript
\stoptypescriptcollection
+
+\starttypescriptcollection[texgyre-math-schola]
+
+ \starttypescript [\s!math][schoolbook,schola][\s!all]
+ \loadfontgoodies[texgyre]
+ \definefontsynonym[\s!MathRoman][file:texgyre-schola-math-regular.otf][\s!features=\s!math\mathsizesuffix]
+ \stoptypescript
+
+\stoptypescriptcollection
diff --git a/tex/context/base/type-ini.lua b/tex/context/base/type-ini.lua
index 9ee97acae..cd5b32d3e 100644
--- a/tex/context/base/type-ini.lua
+++ b/tex/context/base/type-ini.lua
@@ -6,15 +6,23 @@ if not modules then modules = { } end modules ['type-ini'] = {
license = "see context related readme files"
}
+local gsub = string.gsub
+local lpegmatch, P, Cs = lpeg.match, lpeg.P, lpeg.Cs
+
-- more code will move here
-local commands, context = commands, context
+local commands = commands
+local context = context
+local implement = interfaces.implement
-local gsub = string.gsub
+local uselibrary = resolvers.uselibrary
-local report_typescripts = logs.reporter("fonts","typescripts")
+local name_one = nil
+local name_two = nil
-local patterns = { "type-imp-%s.mkiv", "type-imp-%s.tex", "type-%s.mkiv", "type-%s.tex" } -- this will be imp only
+local p_strip = Cs((P("type-") * (P("imp-")^0))^0/"" * P(1)^0)
+
+local report = logs.reporter("fonts","typescripts")
local function action(name,foundname)
-- context.startreadingfile()
@@ -27,50 +35,83 @@ local function action(name,foundname)
context.loadfoundtypescriptfile(foundname)
end
-local name_one, name_two
+local patterns = {
+ "type-imp-%s.mkiv",
+ "type-imp-%s.tex"
+}
-local function failure_two(name)
- report_typescripts("unknown library %a or %a",name_one,name_two)
+local function failure(name)
+ if name == "loc" then
+ -- ignore
+ else
+ report("unknown library %a",name)
+ end
end
-local function failure_one(name)
- name_two = gsub(name,"%-.*$","")
- if name_two == name then
- report_typescripts("unknown library %a",name_one)
- else
- commands.uselibrary {
- name = name_two,
+implement {
+ name = "loadtypescriptfile",
+ arguments = "string",
+ actions = function(name) -- a more specific name
+ uselibrary {
+ name = lpegmatch(p_strip,name) or name,
patterns = patterns,
action = action,
- failure = failure_two,
+ failure = failure,
onlyonce = false, -- will become true
}
end
-end
-
-function commands.doprocesstypescriptfile(name)
- name_one = gsub(name,"^type%-","")
- commands.uselibrary {
- name = name_one,
- patterns = patterns,
- action = action,
- failure = failure_one,
- onlyonce = false, -- will become true
- }
-end
+}
-local patterns = { "type-imp-%s.mkiv", "type-imp-%s.tex" }
+local patterns = {
+ "type-imp-%s.mkiv",
+ "type-imp-%s.tex",
+ -- obsolete
+ "type-%s.mkiv",
+ "type-%s.tex"
+}
-local function failure(name)
- report_typescripts("unknown library %a",name)
-end
+-- local function failure_two(name)
+-- report("unknown library %a or %a",name_one,name_two)
+-- end
+--
+-- local function failure_one(name)
+-- name_two = gsub(name,"%-.*$","")
+-- if name == "loc" then
+-- -- ignore
+-- elseif name_two == name then
+-- report("unknown library %a",name_one)
+-- else
+-- commands.uselibrary {
+-- name = name_two,
+-- patterns = patterns,
+-- action = action,
+-- failure = failure_two,
+-- onlyonce = false, -- will become true
+-- }
+-- end
+-- end
+--
+-- function commands.doprocesstypescriptfile(name)
+-- name_one = lpegmatch(p_strip,name) or name
+-- uselibrary {
+-- name = name_one,
+-- patterns = patterns,
+-- action = action,
+-- failure = failure_one,
+-- onlyonce = false, -- will become true
+-- }
+-- end
-function commands.loadtypescriptfile(name) -- a more specific name
- commands.uselibrary {
- name = gsub(name,"^type%-",""),
- patterns = patterns,
- action = action,
- failure = failure,
- onlyonce = false, -- will become true
- }
-end
+implement {
+ name = "doprocesstypescriptfile",
+ arguments = "string",
+ actions = function(name)
+ uselibrary {
+ name = lpegmatch(p_strip,name) or name,
+ patterns = patterns,
+ action = action,
+ failure = failure,
+ onlyonce = false, -- will become true
+ }
+ end
+}
diff --git a/tex/context/base/type-ini.mkvi b/tex/context/base/type-ini.mkvi
index a4d576d80..f56c5573d 100644
--- a/tex/context/base/type-ini.mkvi
+++ b/tex/context/base/type-ini.mkvi
@@ -172,7 +172,7 @@
\unexpanded\def\loadtypescriptfile[#1]%
{\pushmacro\typescriptstate
\let\typescriptstate\plustwo % assumes 2 at the outer level
- \ctxcommand{loadtypescriptfile("#1")}%
+ \clf_loadtypescriptfile{#1}%
\popmacro\typescriptstate}
\unexpanded\def\loadfoundtypescriptfile#1%
@@ -237,7 +237,7 @@
\expandafter\let\csname\??typescriptfiles\currenttypefile\endcsname\t_font_typescripts}
\def\font_typescript_process_typescript_file
- {\ctxcommand{doprocesstypescriptfile("\currenttypefile")}}
+ {\clf_doprocesstypescriptfile{\currenttypefile}}
\unexpanded\def\usetypescriptonce
{\dotripleempty\font_typescripts_use_once}
@@ -294,12 +294,12 @@
%{\appendtoks\starttypescript#definitions\stoptypescript\to\c_font_typescripts_document}
{\c_font_typescripts_document\expandafter{\the\c_font_typescripts_document\starttypescript#definitions\stoptypescript}}
-\def\font_typescripts_start_process % could be a faster \doifnextoptionalelse if needed
+\def\font_typescripts_start_process % could be a faster \doifelsenextoptionalif needed
{\let\typescriptone \m_font_typescripts_one
\let\typescripttwo \m_font_typescripts_two
\let\typescriptthree\m_font_typescripts_three
\let\m_font_typescripts_match\empty
- \doifnextoptionalelse\font_typescripts_start_process_one\font_typescripts_start_process_all}
+ \doifelsenextoptionalcs\font_typescripts_start_process_one\font_typescripts_start_process_all}
\def\font_typescripts_start_process_all % could be a \let
{\ifconditional\c_font_typescripts_first_pass
@@ -333,10 +333,10 @@
{\font_typescripts_check\m_font_typescripts_three\typescriptthree\font_typescripts_start_process_again_three}
\def\font_typescripts_start_process_again_one
- {\doifnextoptionalelse\font_typescripts_start_process_two\font_typescripts_start_process_yes}
+ {\doifelsenextoptionalcs\font_typescripts_start_process_two\font_typescripts_start_process_yes}
\def\font_typescripts_start_process_again_two
- {\doifnextoptionalelse\font_typescripts_start_process_three\font_typescripts_start_process_yes}
+ {\doifelsenextoptionalcs\font_typescripts_start_process_three\font_typescripts_start_process_yes}
\let\font_typescripts_start_process_again_three\font_typescripts_start_process_yes
@@ -359,7 +359,7 @@
\donetrue
\let#target\m_font_typescripts_check
\else
- \doifcommonelse\m_font_typescripts_check#asked\donetrue\donefalse
+ \doifelsecommon\m_font_typescripts_check#asked\donetrue\donefalse
\ifdone
\let#target\commalistelement
\fi
@@ -381,17 +381,17 @@
\unexpanded\def\loadmapline{\dodoubleempty\font_map_load_line}
\def\font_map_load_file[#filename]%
- {\ctxlua{fonts.mappings.loadfile("#filename")}}
+ {\clf_loadmapfile{#filename}}
\def\font_map_load_line[#kind][#data]%
- {\ctxlua{fonts.mappings.loadline("#kind","#data")}}
+ {\clf_loadmapline{#kind}{#data}}
\unexpanded\def\forgetmapfiles
- {\ctxlua{fonts.mappings.reset()}}
+ {\clf_resetmapfiles}
-\prependtoks
- \loadmapfile[mkiv-base.map]% can't we preload this one?
-\to \everystarttext
+% \prependtoks
+% \loadmapfile[mkiv-base.map]% can't we preload this one?
+% \to \everystarttext
%D A handy shortcut:
@@ -497,7 +497,7 @@
{\font_typefaces_define_indeed[#name][#style]}
\def\font_typefaces_define_d[#name][#specification][#dummya][#dummyb][#dummyc][#dummyd]% use definitions in lfg file
- {\ctxlua{fonts.definetypeface("#name",\!!bs#specification\!!es)}}
+ {\clf_definetypeface{#name}{#specification}}
\def\font_typefaces_define_indeed[#name][#style]% saveguard against redefinition
{\doifsomething{#name}
@@ -575,12 +575,6 @@
\fi \fi \fi
\ifmmode\mr\else\tf\fi} % needed ?
-% obsolete
-%
-% \unexpanded\def\usetypefile[#type]% recurses on path ! % no storage
-% {\edef\currenttypefile{#type}%
-% \ctxcommand{doprocesstypescriptfile("\currenttypefile")}}
-
%D For Taco:
%D
%D \starttyping
diff --git a/tex/context/base/type-run.mkiv b/tex/context/base/type-run.mkiv
index 4da633371..0455d8966 100644
--- a/tex/context/base/type-run.mkiv
+++ b/tex/context/base/type-run.mkiv
@@ -21,7 +21,7 @@
\def\dochecktypescript##1##2% script use
{\doifelsenothing{##1##2}
{\donetrue}
- {\doifcommonelse{##1}{##2}\donetrue\donefalse}}
+ {\doifelsecommon{##1}{##2}\donetrue\donefalse}}
\edef\typescriptone {\truetypescript{#1}}%
\edef\typescripttwo {\truetypescript{#2}}%
\edef\typescriptthree{\truetypescript{#3}}%
@@ -30,7 +30,7 @@
\obeylines % else we loose the first line due to lookahead
\dotripleempty\dostarttypescript}
\def\dostarttypescript[##1][##2][##3]%
- {\long\def\next####1\stoptypescript{\egroup}
+ {\def\next####1\stoptypescript{\egroup}
\dochecktypescript{##1}\typescriptone \ifdone
\dochecktypescript{##2}\typescripttwo \ifdone
\dochecktypescript{##3}\typescriptthree\ifdone
diff --git a/tex/context/base/typo-bld.lua b/tex/context/base/typo-bld.lua
index bc9f66ee4..4d0f28d9a 100644
--- a/tex/context/base/typo-bld.lua
+++ b/tex/context/base/typo-bld.lua
@@ -6,9 +6,12 @@ if not modules then modules = { } end modules ['typo-bld'] = { -- was node-par
license = "see context related readme files"
}
+-- no need for nuts in the one-line demo (that might move anyway)
+
local insert, remove = table.insert, table.remove
-local builders, nodes, node = builders, nodes, node
+builders = builders or { }
+local builders = builders
builders.paragraphs = builders.paragraphs or { }
local parbuilders = builders.paragraphs
@@ -33,11 +36,12 @@ local texsetattribute = tex.setattribute
local texnest = tex.nest
local texlists = tex.lists
+local nodes = nodes
local nodepool = nodes.pool
local new_baselineskip = nodepool.baselineskip
local new_lineskip = nodepool.lineskip
-local insert_node_before = node.insert_before
-local hpack_node = node.hpack
+local insert_node_before = nodes.insert_before
+local hpack_node = nodes.hpack
local starttiming = statistics.starttiming
local stoptiming = statistics.stoptiming
@@ -161,14 +165,8 @@ local function processor(head,followed_by_display)
end
end
-function constructors.enable()
- enabled = true
-end
-
-function constructors.disable()
- enabled = false
-end
-
+function constructors.enable () enabled = true end
+function constructors.disable() enabled = false end
callbacks.register('linebreak_filter', processor, "breaking paragraps into lines")
@@ -176,15 +174,6 @@ statistics.register("linebreak processing time", function()
return statistics.elapsedseconds(parbuilders)
end)
--- interface
-
-commands.defineparbuilder = constructors.define
-commands.startparbuilder = constructors.start
-commands.stopparbuilder = constructors.stop
-commands.setparbuilder = constructors.set
-commands.enableparbuilder = constructors.enable
-commands.disableparbuilder = constructors.disable
-
-- todo: move from nodes.builders to builders
nodes.builders = nodes.builder or { }
@@ -226,7 +215,16 @@ local function report(groupcode,head)
report_page_builder(" list : %s",head and nodeidstostring(head) or "<empty>")
end
+-- use tex.[sg]etlist
+
function builders.buildpage_filter(groupcode)
+ -- -- this needs checking .. gets called too often
+ -- if group_code ~= "after_output" then
+ -- if trace_page_builder then
+ -- report(groupcode)
+ -- end
+ -- return nil, false
+ -- end
local head, done = texlists.contrib_head, false
if head then
starttiming(builders)
@@ -237,14 +235,16 @@ function builders.buildpage_filter(groupcode)
stoptiming(builders)
-- -- doesn't work here (not passed on?)
-- tex.pagegoal = tex.vsize - tex.dimen.d_page_floats_inserted_top - tex.dimen.d_page_floats_inserted_bottom
- texlists.contrib_head = head
- return done and head or true
+ texlists.contrib_head = head or nil -- needs checking
+-- tex.setlist("contrib_head",head,head and nodes.tail(head))
+ return done and head or true -- no return value needed
else
if trace_page_builder then
report(groupcode)
end
- return nil, false
+ return nil, false -- no return value needed
end
+
end
callbacks.register('vpack_filter', builders.vpack_filter, "vertical spacing etc")
@@ -253,3 +253,12 @@ callbacks.register('buildpage_filter', builders.buildpage_filter, "vertical spac
statistics.register("v-node processing time", function()
return statistics.elapsedseconds(builders)
end)
+
+local implement = interfaces.implement
+
+implement { name = "defineparbuilder", actions = constructors.define, arguments = "string" }
+implement { name = "setparbuilder", actions = constructors.set, arguments = "string" }
+implement { name = "startparbuilder", actions = constructors.start, arguments = "string" }
+implement { name = "stopparbuilder", actions = constructors.stop }
+implement { name = "enableparbuilder", actions = constructors.enable }
+implement { name = "disableparbuilder", actions = constructors.disable }
diff --git a/tex/context/base/typo-bld.mkiv b/tex/context/base/typo-bld.mkiv
index 10502005b..69047c98b 100644
--- a/tex/context/base/typo-bld.mkiv
+++ b/tex/context/base/typo-bld.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\writestatus{loading}{ConTeXt Node Macros / Paragraph Building}
+\writestatus{loading}{ConTeXt Typesetting Macros / Paragraph Building}
%D This is very experimental, undocumented, subjected to changes, etc. just as
%D the underlying interfaces. But at least it's cleaned as part of the status-mkiv
@@ -38,18 +38,18 @@
\installcorenamespace {parbuilder}
\unexpanded\def\defineparbuilder[#1]%
- {\ctxcommand{defineparbuilder("#1")}}
+ {\clf_defineparbuilder{#1}}
\unexpanded\def\startparbuilder[#1]%
{\ifhmode\par\fi
- \ctxcommand{startparbuilder("#1")}}
+ \clf_startparbuilder{#1}}
\unexpanded\def\stopparbuilder
{\ifhmode\par\fi
- \ctxcommand{stopparbuilder()}}
+ \clf_stopparbuilder}
\unexpanded\def\setmainparbuilder[#1]%
- {\ctxcommand{setparbuilder("#1")}}
+ {\clf_setparbuilder{#1}}
% no high level interface, after all implementing a linebreaker is not something that
% the average user will do
@@ -58,7 +58,7 @@
\defineparbuilder[oneline] % just for testing
\defineparbuilder[basic] % just for testing
-\def\enableparbuilders {\ctxcommand{enableparbuilder()}} % hooks in otr so we need to pickup
-\def\disableparbuilders{\ctxcommand{disableparbuilder()}} % hooks in otr so we need to pickup
+\unexpanded\def\enableparbuilders {\clf_enableparbuilder } % hooks in otr so we need to pickup
+\unexpanded\def\disableparbuilders{\clf_disableparbuilder} % hooks in otr so we need to pickup
\protect \endinput
diff --git a/tex/context/base/typo-brk.lua b/tex/context/base/typo-brk.lua
index 3558efa8e..a9d775856 100644
--- a/tex/context/base/typo-brk.lua
+++ b/tex/context/base/typo-brk.lua
@@ -20,23 +20,41 @@ local report_breakpoints = logs.reporter("typesetting","breakpoints")
local nodes, node = nodes, node
local settings_to_array = utilities.parsers.settings_to_array
-local copy_node = node.copy
-local copy_nodelist = node.copy_list
-local free_node = node.free
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local remove_node = nodes.remove -- ! nodes
-local tonodes = nodes.tonodes
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+local getfont = nuts.getfont
+local getid = nuts.getid
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+local copy_node = nuts.copy
+local copy_nodelist = nuts.copy_list
+local free_node = nuts.free
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local remove_node = nuts.remove
+
+local tonodes = nuts.tonodes
local texsetattribute = tex.setattribute
local unsetvalue = attributes.unsetvalue
-local nodepool = nodes.pool
+local nodepool = nuts.pool
local tasks = nodes.tasks
local v_reset = interfaces.variables.reset
+local implement = interfaces.implement
+
local new_penalty = nodepool.penalty
local new_glue = nodepool.glue
local new_disc = nodepool.disc
@@ -80,74 +98,86 @@ local function insert_break(head,start,before,after)
end
methods[1] = function(head,start)
- if start.prev and start.next then
+ if getprev(start) and getnext(start) then
insert_break(head,start,10000,0)
end
return head, start
end
methods[2] = function(head,start) -- ( => (-
- if start.prev and start.next then
+ if getprev(start) and getnext(start) then
local tmp
head, start, tmp = remove_node(head,start)
head, start = insert_node_before(head,start,new_disc())
- start.attr = copy_nodelist(tmp.attr) -- todo: critical only
- start.replace = tmp
- local tmp, hyphen = copy_node(tmp), copy_node(tmp)
- hyphen.char = languages.prehyphenchar(tmp.lang)
- tmp.next, hyphen.prev = hyphen, tmp
- start.post = tmp
+ -- setfield(start,"attr",copy_nodelist(getfield(tmp,"attr"))) -- just a copy will do
+ setfield(start,"attr",getfield(tmp,"attr"))
+ setfield(start,"replace",tmp)
+ local tmp = copy_node(tmp)
+ local hyphen = copy_node(tmp)
+ setfield(hyphen,"char",languages.prehyphenchar(getfield(tmp,"lang")))
+ setfield(tmp,"next",hyphen)
+ setfield(hyphen,"prev",tmp)
+ setfield(start,"post",tmp)
insert_break(head,start,10000,10000)
end
return head, start
end
methods[3] = function(head,start) -- ) => -)
- if start.prev and start.next then
+ if getprev(start) and getnext(start) then
local tmp
head, start, tmp = remove_node(head,start)
head, start = insert_node_before(head,start,new_disc())
- start.attr = copy_nodelist(tmp.attr) -- todo: critical only
- start.replace = tmp
- local tmp, hyphen = copy_node(tmp), copy_node(tmp)
- hyphen.char = languages.prehyphenchar(tmp.lang)
- tmp.prev, hyphen.next = hyphen, tmp
- start.pre = hyphen
+ -- setfield(start,"attr",copy_nodelist(getfield(tmp,"attr"))) -- just a copy will do
+ setfield(start,"attr",getfield(tmp,"attr"))
+ setfield(start,"replace",tmp)
+ local tmp = copy_node(tmp)
+ local hyphen = copy_node(tmp)
+ setfield(hyphen,"char",languages.prehyphenchar(getfield(tmp,"lang")))
+ setfield(tmp,"prev",hyphen)
+ setfield(hyphen,"next",tmp)
+ setfield(start,"pre",hyphen)
insert_break(head,start,10000,10000)
end
return head, start
end
methods[4] = function(head,start) -- - => - - -
- if start.prev and start.next then
+ if getprev(start) and getnext(start) then
local tmp
head, start, tmp = remove_node(head,start)
head, start = insert_node_before(head,start,new_disc())
- start.attr = copy_nodelist(tmp.attr) -- todo: critical only
- start.pre, start.post, start.replace = copy_node(tmp), copy_node(tmp), tmp
+ -- setfield(start,"attr",copy_nodelist(getfield(tmp,"attr"))) -- just a copy will do
+ setfield(start,"attr",getfield(tmp,"attr"))
+ setfield(start,"pre",copy_node(tmp))
+ setfield(start,"post",copy_node(tmp))
+ setfield(start,"replace",tmp)
insert_break(head,start,10000,10000)
end
return head, start
end
methods[5] = function(head,start,settings) -- x => p q r
- if start.prev and start.next then
+ if getprev(start) and getnext(start) then
local tmp
head, start, tmp = remove_node(head,start)
head, start = insert_node_before(head,start,new_disc())
- local attr = tmp.attr
- local font = tmp.font
- start.attr = copy_nodelist(attr) -- todo: critical only
- local left, right, middle = settings.left, settings.right, settings.middle
+ local attr = getfield(tmp,"attr")
+ local font = getfont(tmp)
+ local left = settings.left
+ local right = settings.right
+ local middle = settings.middle
if left then
- start.pre = tonodes(tostring(left),font,attr) -- was right
+ setfield(start,"pre",(tonodes(tostring(left),font,attr))) -- was right
end
if right then
- start.post = tonodes(tostring(right),font,attr) -- was left
+ setfield(start,"post",(tonodes(tostring(right),font,attr))) -- was left
end
if middle then
- start.replace = tonodes(tostring(middle),font,attr)
+ setfield(start,"replace",(tonodes(tostring(middle),font,attr)))
end
+ -- setfield(start,"attr",copy_nodelist(attr)) -- todo: critical only -- just a copy will do
+ setfield(start,"attr",attr) -- todo: critical only -- just a copy will do
free_node(tmp)
insert_break(head,start,10000,10000)
end
@@ -155,31 +185,32 @@ methods[5] = function(head,start,settings) -- x => p q r
end
function breakpoints.handler(head)
+ head = tonut(head)
local done, numbers = false, languages.numbers
local start, n = head, 0
while start do
- local id = start.id
+ local id = getid(start)
if id == glyph_code then
- local attr = start[a_breakpoints]
+ local attr = getattr(start,a_breakpoints)
if attr and attr > 0 then
- start[a_breakpoints] = unsetvalue -- maybe test for subtype > 256 (faster)
+ setattr(start,a_breakpoints,unsetvalue) -- maybe test for subtype > 256 (faster)
-- look ahead and back n chars
local data = mapping[attr]
if data then
local map = data.characters
- local cmap = map[start.char]
+ local cmap = map[getchar(start)]
if cmap then
- local lang = start.lang
+ local lang = getfield(start,"lang")
-- we do a sanity check for language
local smap = lang and lang >= 0 and lang < 0x7FFF and (cmap[numbers[lang]] or cmap[""])
if smap then
if n >= smap.nleft then
local m = smap.nright
- local next = start.next
+ local next = getnext(start)
while next do -- gamble on same attribute (not that important actually)
- local id = next.id
+ local id = getid(next)
if id == glyph_code then -- gamble on same attribute (not that important actually)
- if map[next.char] then
+ if map[getchar(next)] then
break
elseif m == 1 then
local method = methods[smap.type]
@@ -190,10 +221,10 @@ function breakpoints.handler(head)
break
else
m = m - 1
- next = next.next
+ next = getnext(next)
end
- elseif id == kern_code and next.subtype == kerning_code then
- next = next.next
+ elseif id == kern_code and getsubtype(next) == kerning_code then
+ next = getnext(next)
-- ignore intercharacter kerning, will go way
else
-- we can do clever and set n and jump ahead but ... not now
@@ -214,14 +245,14 @@ function breakpoints.handler(head)
else
-- n = n + 1 -- if we want single char handling (|-|) then we will use grouping and then we need this
end
- elseif id == kern_code and start.subtype == kerning_code then
+ elseif id == kern_code and getsubtype(start) == kerning_code then
-- ignore intercharacter kerning, will go way
else
n = 0
end
- start = start.next
+ start = getnext(start)
end
- return head, done
+ return tonode(head), done
end
local enabled = false
@@ -290,6 +321,32 @@ end
-- interface
-commands.definebreakpoints = breakpoints.define
-commands.definebreakpoint = breakpoints.setreplacement
-commands.setbreakpoints = breakpoints.set
+implement {
+ name = "definebreakpoints",
+ actions = breakpoints.define,
+ arguments = "string"
+}
+
+implement {
+ name = "definebreakpoint",
+ actions = breakpoints.setreplacement,
+ arguments = {
+ "string",
+ "string",
+ "string",
+ {
+ { "type", "integer" },
+ { "nleft" },
+ { "nright" },
+ { "right" },
+ { "left" },
+ { "middle" },
+ }
+ }
+}
+
+implement {
+ name = "setbreakpoints",
+ actions = breakpoints.set,
+ arguments = "string"
+}
diff --git a/tex/context/base/typo-brk.mkiv b/tex/context/base/typo-brk.mkiv
index af498bfec..3b463c06f 100644
--- a/tex/context/base/typo-brk.mkiv
+++ b/tex/context/base/typo-brk.mkiv
@@ -25,13 +25,13 @@
\definesystemattribute[breakpoint][public,global]
-\exhyphenchar\minusone % we use a different order then base tex, so we really need this
+% see below: \exhyphenchar \minusone % we use a different order tha n base tex, so we really need this
\unexpanded\def\definebreakpoints
{\dosingleargument\typo_breakpoints_define}
\def\typo_breakpoints_define[#1]%
- {\ctxcommand{definebreakpoints("#1")}}
+ {\clf_definebreakpoints{#1}}
\def\definebreakpoint
{\dotripleempty\typo_breakpoints_define_character}
@@ -39,18 +39,24 @@
\def\typo_breakpoints_define_character[#1][#2][#3]% name char settings
{\begingroup
\getdummyparameters[\c!type=1,\c!nleft=3,\c!nright=3,\s!language=,\c!left=,\c!right=,\c!middle=,#3]%
- \ctxcommand{definebreakpoint("#1", "#2", "\reallanguagetag{\directdummyparameter\s!language}", { % maybe deal with #3 at the lua end
- type = \directdummyparameter\c!type,
- nleft = "\directdummyparameter\c!nleft",
- nright = "\directdummyparameter\c!nright",
- right = "\directdummyparameter\c!right",
- left = "\directdummyparameter\c!left",
- middle = "\directdummyparameter\c!middle"
- } )}%
+ \clf_definebreakpoint
+ {#1}%
+ {#2}%
+ {\reallanguagetag{\directdummyparameter\s!language}}%
+ {% maybe deal with #3 at the lua end
+ type \directdummyparameter\c!type
+ nleft {\directdummyparameter\c!nleft}%
+ nright {\directdummyparameter\c!nright}%
+ right {\directdummyparameter\c!right}%
+ left {\directdummyparameter\c!left}%
+ middle {\directdummyparameter\c!middle}%
+ }%
+ \relax
\endgroup}
\unexpanded\def\setbreakpoints[#1]%
- {\ctxcommand{setbreakpoints("#1")}}
+ {\exhyphenchar\minusone % we use a different order tha n base tex, so we really need this
+ \clf_setbreakpoints{#1}}
\unexpanded\def\resetbreakpoints
{\attribute\breakpointattribute\attributeunsetvalue}
diff --git a/tex/context/base/typo-cap.lua b/tex/context/base/typo-cap.lua
index 0fc1a3093..80a74bac9 100644
--- a/tex/context/base/typo-cap.lua
+++ b/tex/context/base/typo-cap.lua
@@ -16,9 +16,26 @@ local report_casing = logs.reporter("typesetting","casing")
local nodes, node = nodes, node
-local copy_node = nodes.copy
-local end_of_math = nodes.end_of_math
-
+local nuts = nodes.nuts
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+
+local copy_node = nuts.copy
+local end_of_math = nuts.end_of_math
+local traverse_nodes = nuts.traverse
+local traverse_id = nuts.traverse_id
+local insert_after = nuts.insert_after
local nodecodes = nodes.nodecodes
local skipcodes = nodes.skipcodes
@@ -58,6 +75,8 @@ local a_cases = attributes.private("case")
local extract = bit32.extract
local run = 0 -- a trick to make neighbouring ranges work
+local blocked = { }
+
local function set(tag,font)
if run == 2^6 then
run = 1
@@ -90,88 +109,40 @@ end
--
-- \WORD {far too \Word{many \WORD{more \word{pushed} in between} useless} words}
-local uccodes = characters.uccodes
-local lccodes = characters.lccodes
+local uccodes = characters.uccodes
+local lccodes = characters.lccodes
+local categories = characters.categories
-- true false true == mixed
-local function helper(start,attr,lastfont,n,codes,special,once,keepother)
- local char = start.char
+local function replacer(start,codes)
+ local char = getchar(start)
local dc = codes[char]
if dc then
- local fnt = start.font
- if keepother and dc == char then
- local lfa = lastfont[n]
- if lfa then
- start.font = lfa
- return start, true
- else
- return start, false
- end
- else
- if special then
- local lfa = lastfont[n]
- if lfa then
- local previd = start.prev.id
- if previd ~= glyph_code and previd ~= disc_code then
- fnt = lfa
- start.font = lfa
- end
+ local fnt = getfont(start)
+ local ifc = fontchar[fnt]
+ if type(dc) == "table" then
+ for i=1,#dc do
+ if not ifc[dc[i]] then
+ return start, false
end
end
- local ifc = fontchar[fnt]
- if type(dc) == "table" then
- local ok = true
- for i=1,#dc do
- -- could be cached in font
- if not ifc[dc[i]] then
- ok = false
- break
- end
- end
- if ok then
- -- todo: use generic injector
- local prev = start
- local original = start
- for i=1,#dc do
- local chr = dc[i]
- prev = start
- if i == 1 then
- start.char = chr
- else
- local g = copy_node(original)
- g.char = chr
- local next = start.next
- g.prev = start
- if next then
- g.next = next
- start.next = g
- next.prev = g
- end
- start = g
- end
- end
- if once then
- lastfont[n] = false
- end
- return prev, true
- end
- if once then
- lastfont[n] = false
+ for i=#dc,1,-1 do
+ local chr = dc[i]
+ if i == 1 then
+ setfield(start,"char",chr)
+ else
+ local g = copy_node(start)
+ setfield(g,"char",chr)
+ insert_after(start,start,g)
end
- return start, false
- elseif ifc[dc] then
- start.char = dc
- if once then
- lastfont[n] = false
- end
- return start, true
end
+ return start, true
+ elseif ifc[dc] then
+ setfield(start,"char",dc)
+ return start, true
end
end
- if once then
- lastfont[n] = false
- end
return start, false
end
@@ -192,148 +163,156 @@ end
cases.register = register
-local function WORD(start,attr,lastfont,n)
+local function WORD(start,attr,lastfont,n,count,where,first)
lastfont[n] = false
- return helper(start,attr,lastfont,n,uccodes)
+ return replacer(first or start,uccodes)
end
-local function word(start,attr,lastfont,n)
+local function word(start,attr,lastfont,n,count,where,first)
lastfont[n] = false
- return helper(start,attr,lastfont,n,lccodes)
-end
-
-local function blockrest(start)
- local n = start.next
- while n do
- local id = n.id
- if id == glyph_code or id == disc_node and n[a_cases] == attr then
- n[a_cases] = unsetvalue
- else
- -- break -- we can have nested mess
- end
- n = n.next
- end
+ return replacer(first or start,lccodes)
end
-local function Word(start,attr,lastfont,n) -- looks quite complex
- lastfont[n] = false
- local prev = start.prev
- if prev and prev.id == kern_code and prev.subtype == kerning_code then
- prev = prev.prev
- end
- if not prev then
- blockrest(start)
- return helper(start,attr,lastfont,n,uccodes)
+local function Words(start,attr,lastfont,n,count,where,first) -- looks quite complex
+ if where == "post" then
+ return
end
- local previd = prev.id
- if previd ~= glyph_code and previd ~= disc_code then
- -- only the first character is treated
- blockrest(start)
- -- we could return the last in the range and save some scanning
- -- but why bother
- return helper(start,attr,lastfont,n,uccodes)
+ if count == 1 and where ~= "post" then
+ replacer(first or start,uccodes)
+ return start, true, true
else
- return start, false
+ return start, false, true
end
end
-local function Words(start,attr,lastfont,n)
- lastfont[n] = false
- local prev = start.prev
- if prev and prev.id == kern_code and prev.subtype == kerning_code then
- prev = prev.prev
- end
- if not prev then
- return helper(start,attr,lastfont,n,uccodes)
+local function Word(start,attr,lastfont,n,count,where,first)
+ blocked[attr] = true
+ return Words(start,attr,lastfont,n,count,where,first)
+end
+
+local function camel(start,attr,lastfont,n,count,where,first)
+ local _, done_1 = word(start,attr,lastfont,n,count,where,first)
+ local _, done_2 = Words(start,attr,lastfont,n,count,where,first)
+ return start, done_1 or done_2, true
+end
+
+local function mixed(start,attr,lastfont,n,count,where,first)
+ if where == "post" then
+ return
end
- local previd = prev.id
- if previd ~= glyph_code and previd ~= disc_code then
- return helper(start,attr,lastfont,n,uccodes)
+ local used = first or start
+ local char = getchar(first)
+ local dc = uccodes[char]
+ if not dc then
+ return start, false, true
+ elseif dc == char then
+ local lfa = lastfont[n]
+ if lfa then
+ setfield(first,"font",lfa)
+ return start, true, true
+ else
+ return start, false, true
+ end
else
- return start, false
+ replacer(first or start,uccodes)
+ return start, true, true
end
end
-local function capital(start,attr,lastfont,n) -- 3
- return helper(start,attr,lastfont,n,uccodes,true,true)
-end
-
-local function Capital(start,attr,lastfont,n) -- 4
- return helper(start,attr,lastfont,n,uccodes,true,false)
+local function Capital(start,attr,lastfont,n,count,where,first,once) -- 3
+ local used = first or start
+ if count == 1 and where ~= "post" then
+ local lfa = lastfont[n]
+ if lfa then
+ local dc = uccodes[getchar(used)]
+ if dc then
+ setfield(used,"font",lfa)
+ end
+ end
+ end
+ local s, d, c = replacer(first or start,uccodes)
+ if once then
+ lastfont[n] = false -- here
+ end
+ return start, d, c
end
-local function mixed(start,attr,lastfont,n)
- return helper(start,attr,lastfont,n,uccodes,false,false,true)
+local function capital(start,attr,lastfont,n,where,count,first,count) -- 4
+ return Capital(start,attr,lastfont,n,where,count,first,true)
end
-local function none(start,attr,lastfont,n)
- return start, false
+local function none(start,attr,lastfont,n,count,where,first)
+ return start, false, true
end
-local function random(start,attr,lastfont,n)
+local function random(start,attr,lastfont,n,count,where,first)
+ local used = first or start
+ local char = getchar(used)
+ local font = getfont(used)
+ local tfm = fontchar[font]
lastfont[n] = false
- local ch = start.char
- local tfm = fontchar[start.font]
- if lccodes[ch] then
+ local kind = categories[char]
+ if kind == "lu" then
while true do
- local d = chardata[randomnumber(1,0xFFFF)]
- if d then
- local uc = uccodes[d]
- if uc and tfm[uc] then -- this also intercepts tables
- start.char = uc
- return start, true
- end
+ local n = randomnumber(0x41,0x5A)
+ if tfm[n] then -- this also intercepts tables
+ setfield(used,"char",n)
+ return start, true
end
end
- elseif uccodes[ch] then
+ elseif kind == "ll" then
while true do
- local d = chardata[randomnumber(1,0xFFFF)]
- if d then
- local lc = lccodes[d]
- if lc and tfm[lc] then -- this also intercepts tables
- start.char = lc
- return start, true
- end
+ local n = randomnumber(0x61,0x7A)
+ if tfm[n] then -- this also intercepts tables
+ setfield(used,"char",n)
+ return start, true
end
end
end
return start, false
end
-register(variables.WORD, WORD) -- 1
-register(variables.word, word) -- 2
-register(variables.Word, Word) -- 3
-register(variables.Words, Words) -- 4
-register(variables.capital, capital) -- 5
-register(variables.Capital, Capital) -- 6
-register(variables.none, none) -- 7 (dummy)
-register(variables.random, random) -- 8
-register(variables.mixed, mixed) -- 9
+register(variables.WORD, WORD) -- 1
+register(variables.word, word) -- 2
+register(variables.Word, Word) -- 3
+register(variables.Words, Words) -- 4
+register(variables.capital,capital) -- 5
+register(variables.Capital,Capital) -- 6
+register(variables.none, none) -- 7 (dummy)
+register(variables.random, random) -- 8
+register(variables.mixed, mixed) -- 9
+register(variables.camel, camel) -- 10
-register(variables.cap, variables.capital) -- clone
-register(variables.Cap, variables.Capital) -- clone
+register(variables.cap, variables.capital) -- clone
+register(variables.Cap, variables.Capital) -- clone
function cases.handler(head) -- not real fast but also not used on much data
local lastfont = { }
local lastattr = nil
local done = false
- local start = head
+ local start = tonut(head)
+ local count = 0
+ local previd = nil
+ local prev = nil
while start do -- while because start can jump ahead
- local id = start.id
+ local id = getid(start)
if id == glyph_code then
- local attr = start[a_cases]
- if attr and attr > 0 then
+ local attr = getattr(start,a_cases)
+ if attr and attr > 0 and not blocked[attr] then
if attr ~= lastattr then
lastattr = attr
+ count = 1
+ else
+ count = count + 1
end
- start[a_cases] = unsetvalue
+ setattr(start,a_cases,unsetvalue)
local n, id, m = get(attr)
if lastfont[n] == nil then
lastfont[n] = id
end
local action = actions[n] -- map back to low number
if action then
- start, ok = action(start,attr,lastfont,n)
+ start, ok = action(start,attr,lastfont,n,count)
if ok then
done = true
end
@@ -345,42 +324,88 @@ function cases.handler(head) -- not real fast but also not used on much data
end
end
elseif id == disc_code then
- local attr = start[a_cases]
- if attr and attr > 0 then
+ local attr = getattr(start,a_cases)
+ if attr and attr > 0 and not blocked[attr] then
if attr ~= lastattr then
lastattr = attr
+ count = 0
end
- start[a_cases] = unsetvalue
+ setattr(start,a_cases,unsetvalue)
local n, id, m = get(attr)
if lastfont[n] == nil then
lastfont[n] = id
end
local action = actions[n] -- map back to low number
if action then
- local replace = start.replace
+ local replace = getfield(start,"replace")
if replace then
- action(replace,attr,lastfont,n)
+ local cnt = count
+ for g in traverse_id(glyph_code,replace) do
+ cnt = cnt + 1
+ -- setattr(g,a_cases,unsetvalue)
+ local _, _, quit = action(start,attr,lastfont,n,cnt,"replace",g)
+ if quit then break end
+ end
end
- local pre = start.pre
+ local pre = getfield(start,"pre")
if pre then
- action(pre,attr,lastfont,n)
+ local cnt = count
+ for g in traverse_id(glyph_code,pre) do
+ cnt = cnt + 1
+ -- setattr(g,a_cases,unsetvalue)
+ local _, _, quit = action(start,attr,lastfont,n,cnt,"pre",g)
+ if quit then break end
+ end
end
- local post = start.post
+ local post = getfield(start,"post")
if post then
- action(post,attr,lastfont,n)
+ local cnt = count
+ for g in traverse_id(glyph_code,post) do
+ cnt = cnt + 1
+ -- setattr(g,a_cases,unsetvalue)
+ local _, _, quit = action(start,attr,lastfont,n,cnt,"post",g)
+ if quit then break end
+ end
end
end
+ count = count + 1
end
elseif id == math_code then
start = end_of_math(start)
+ count = 0
+ elseif prev_id == kern_code and getsubtype(prev) == kerning_code then
+ -- still inside a word ...nomally kerns are added later
+ else
+ count = 0
end
- if start then -- why test
- start = start.next
+ if start then
+ prev = start
+ previd = id
+ start = getnext(start)
end
end
return head, done
end
+-- function cases.handler(head) -- let's assume head doesn't change ... no reason
+-- local done = false
+-- local lastfont = { }
+-- for first, last, size, attr in nuts.words(tonut(head),a_cases) do
+-- local n, id, m = get(attr)
+-- if lastfont[n] == nil then
+-- lastfont[n] = id
+-- end
+-- local action = actions[n]
+-- if action then
+-- local _, ok = action(first,attr,lastfont,n)
+-- if ok then
+-- done = true
+-- end
+-- end
+-- end
+-- return head, done
+-- end
+
local enabled = false
function cases.set(n,id)
@@ -407,4 +432,8 @@ end
-- interface
-commands.setcharactercasing = cases.set
+interfaces.implement {
+ name = "setcharactercasing",
+ actions = cases.set,
+ arguments = { "string", "integer" }
+}
diff --git a/tex/context/base/typo-cap.mkiv b/tex/context/base/typo-cap.mkiv
index c4458129f..2859ba104 100644
--- a/tex/context/base/typo-cap.mkiv
+++ b/tex/context/base/typo-cap.mkiv
@@ -43,6 +43,7 @@
\definecapitals[\v!WORD] % all lower
\definecapitals[\v!Word] % one upper + font
\definecapitals[\v!Words] % some upper
+\definecapitals[\v!camel] % lowers first
\definecapitals[\v!word][\c!style=] % nothing
%D \macros
@@ -76,7 +77,7 @@
% test \Word{test TEST \TeX} test
\unexpanded\def\setcharactercasing[#1]%
- {\ctxcommand{setcharactercasing("#1",\number\fontid\font)}}
+ {\clf_setcharactercasing{#1}\fontid\font}
% todo: names casings
@@ -84,6 +85,7 @@
\unexpanded\def\word {\groupedcommand{\setcharactercasing[\v!word ]}{}}
\unexpanded\def\Word {\groupedcommand{\setcharactercasing[\v!Word ]}{}}
\unexpanded\def\Words{\groupedcommand{\setcharactercasing[\v!Words]}{}}
+\unexpanded\def\camel{\groupedcommand{\setcharactercasing[\v!camel]}{}}
% This might become:
%
@@ -166,16 +168,13 @@
\def\typo_capitals_set_fake#1%
{\edef\currentcapitals{#1}%
- %\setcharactercasing[\currentcapitals]%
- \ctxcommand{setcharactercasing("\currentcapitals",\number\fontid\font)}%
- \signalcharacter % retain current style
+ \clf_setcharactercasing{\currentcapitals}\fontid\font
\usecapitalsstyleparameter\c!style}
\def\typo_capitals_set_real#1%
{\edef\currentcapitals{#1}%
\sc
- %\setcharactercasing[\currentcapitals]}
- \ctxcommand{setcharactercasing("\currentcapitals",\number\fontid\font)}}
+ \clf_setcharactercasing{\currentcapitals}\fontid\font}
\unexpanded\def\pseudosmallcapped{\groupedcommand{\typo_capitals_set_fake\v!WORD }\donothing} % all upper
\unexpanded\def\pseudoSmallcapped{\groupedcommand{\typo_capitals_set_fake\v!capital}\donothing} % one upper + font
diff --git a/tex/context/base/typo-chr.lua b/tex/context/base/typo-chr.lua
new file mode 100644
index 000000000..db8579c84
--- /dev/null
+++ b/tex/context/base/typo-chr.lua
@@ -0,0 +1,251 @@
+if not modules then modules = { } end modules ['typo-chr'] = {
+ version = 1.001,
+ comment = "companion to typo-bld.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- local nodecodes = nodes.nodecodes
+-- local whatsitcodes = nodes.whatsitcodes
+-- local glyph_code = nodecodes.glyph
+-- local whatsit_code = nodecodes.whatsit
+-- local user_code = whatsitcodes.userdefined
+--
+-- local stringusernode = nodes.pool.userstring
+--
+-- local nuts = nodes.nuts
+-- local pool = nuts.pool
+--
+-- local tonut = nuts.tonut
+-- local tonode = nuts.tonode
+-- local getid = nuts.getid
+-- local getprev = nuts.getprev
+-- local getsubtype = nuts.getsubtype
+-- local getchar = nuts.getchar
+-- local getfield = nuts.getfield
+--
+-- local remove_node = nuts.remove
+-- local traverse_by_id = nuts.traverse_id
+--
+-- local signal = pool.userids.signal
+--
+-- local is_punctuation = characters.is_punctuation
+--
+-- local actions = {
+-- removepunctuation = function(head,n)
+-- local prev = getprev(n)
+-- if prev then
+-- if getid(prev) == glyph_code then
+-- if is_punctuation[getchar(prev)] then
+-- head = remove_node(head,prev,true)
+-- end
+-- end
+-- end
+-- return head
+-- end
+-- }
+--
+-- -- we can also use properties .. todo (saves pass)
+--
+-- typesetters.signals = { }
+--
+-- function typesetters.signals.handler(head)
+-- local h = tonut(head)
+-- local done = false
+-- for n in traverse_by_id(whatsit_code,h) do
+-- if getsubtype(n) == user_code and getfield(n,"user_id") == signal and getfield(n,"type") == 115 then
+-- local action = actions[getfield(n,"value")]
+-- if action then
+-- h = action(h,n)
+-- end
+-- h = remove_node(h,n,true)
+-- done = true
+-- end
+-- end
+-- if done then
+-- return tonode(h), true
+-- else
+-- return head
+-- end
+-- end
+--
+-- local enabled = false
+--
+-- local function signal(what)
+-- if not enabled then
+-- nodes.tasks.prependaction("processors","normalizers", "typesetters.signals.handler")
+-- enabled = true
+-- end
+-- context(stringusernode(signal,what))
+-- end
+--
+-- interfaces.implement {
+-- name = "signal",
+-- actions = signal,
+-- arguments = "string",
+-- }
+
+local insert, remove = table.insert, table.remove
+
+local nodecodes = nodes.nodecodes
+local whatsitcodes = nodes.whatsitcodes
+local glyph_code = nodecodes.glyph
+local whatsit_code = nodecodes.whatsit
+local localpar_code = whatsitcodes.localpar
+
+local texnest = tex.nest
+local free_node = node.free
+local flush_list = node.flush_list
+
+local settexattribute = tex.setattribute
+local punctuation = characters.is_punctuation
+
+local variables = interfaces.variables
+local v_all = variables.all
+local v_reset = variables.reset
+
+local a_marked = attributes.numbers['marked']
+local lastmarked = 0
+local marked = {
+ [v_all] = 1,
+ [""] = 1,
+ [v_reset] = attributes.unsetvalue,
+}
+
+local stack = { }
+
+local function pickup()
+ local list = texnest[texnest.ptr]
+ if list then
+ local tail = list.tail
+ if tail and tail.id == glyph_code and punctuation[tail.char] then
+ local prev = tail.prev
+ list.tail = prev
+ if prev then
+ prev.next = nil
+ end
+ list.tail = prev
+ tail.prev = nil
+ return tail
+ end
+ end
+end
+
+local actions = {
+ remove = function(specification)
+ local n = pickup()
+ if n then
+ free_node(n)
+ end
+ end,
+ push = function(specification)
+ local n = pickup()
+ if n then
+ insert(stack,n or false)
+ end
+ end,
+ pop = function(specification)
+ local n = remove(stack)
+ if n then
+ context(n)
+ end
+ end,
+}
+
+local function pickuppunctuation(specification)
+ local action = actions[specification.action or "remove"]
+ if action then
+ action(specification)
+ end
+end
+
+-- I played with nested marked content but it makes no sense and gives
+-- complex code. Also, it's never needed so why bother.
+
+local function pickup(head,tail,str)
+ local attr = marked[str]
+ local last = tail
+ if last[a_marked] == attr then
+ local first = last
+ while true do
+ local prev = first.prev
+ if prev and prev[a_marked] == attr then
+ if prev.id == whatsit_code and prev.subtype == localpar_code then
+ break
+ else
+ first = prev
+ end
+ else
+ break
+ end
+ end
+ return first, last
+ end
+end
+
+local actions = {
+ remove = function(specification)
+ local list = texnest[texnest.ptr]
+ if list then
+ local head = list.head
+ local tail = list.tail
+ local first, last = pickup(head,tail,specification.mark)
+ if first then
+ if first == head then
+ list.head = nil
+ list.tail = nil
+ else
+ local prev = first.prev
+ list.tail = prev
+ prev.next = nil
+ end
+ flush_list(first)
+ end
+ end
+ end,
+}
+
+local function pickupmarkedcontent(specification)
+ local action = actions[specification.action or "remove"]
+ if action then
+ action(specification)
+ end
+end
+
+local function markcontent(str)
+ local currentmarked = marked[str or v_all]
+ if not currentmarked then
+ lastmarked = lastmarked + 1
+ currentmarked = lastmarked
+ marked[str] = currentmarked
+ end
+ settexattribute(a_marked,currentmarked)
+end
+
+interfaces.implement {
+ name = "pickuppunctuation",
+ actions = pickuppunctuation,
+ arguments = {
+ {
+ { "action" }
+ }
+ }
+}
+
+interfaces.implement {
+ name = "pickupmarkedcontent",
+ actions = pickupmarkedcontent,
+ arguments = {
+ {
+ { "action" },
+ { "mark" }
+ }
+ }
+}
+
+interfaces.implement {
+ name = "markcontent",
+ actions = markcontent,
+ arguments = "string",
+}
diff --git a/tex/context/base/typo-chr.mkiv b/tex/context/base/typo-chr.mkiv
new file mode 100644
index 000000000..dc0c68664
--- /dev/null
+++ b/tex/context/base/typo-chr.mkiv
@@ -0,0 +1,82 @@
+
+%D \module
+%D [ file=typo-chr,
+%D version=2015.01.01, % or about that time
+%D title=\CONTEXT\ Typesetting Macros,
+%D subtitle=Cleaning Up Mess,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Typesetting Macros / Characters}
+
+\unprotect
+
+%D This is a very experimental feature, mostly for Alan and me to play with in
+%D publication hell.
+%D
+%D \startbuffer
+%D before, after\par
+%D before,\removepunctuation after\par
+%D before\markcontent[gone]{\darkred gone}\removemarkedcontent[gone]after\par
+%D before\markcontent[kept]{\darkgreen kept}\removemarkedcontent[gone]after\par
+%D \markcontent[gone]{\darkred gone}\removemarkedcontent[gone]after\par
+%D \markcontent[kept]{\darkgreen kept}\removemarkedcontent[gone]after\par
+%D \stopbuffer
+%D
+%D \typebuffer \blank \getbuffer\blank
+%D
+%D This feature is paragraph based and is only to be used for small snippets of text,
+%D for instance when combining bit and pieces where keeping a state is complex compared
+%D to cleaning up unwanted stuff.
+
+\registerctxluafile{typo-chr}{1.001}
+
+\definesystemattribute[marked][public]
+
+\unexpanded\def\removepunctuation
+ {\clf_pickuppunctuation action{remove}\relax} % the first experiment
+
+\unexpanded\def\pushpunctuation
+ {\clf_pickuppunctuation action{push}\relax} % the first experiment
+
+\unexpanded\def\poppunctuation
+ {\clf_pickuppunctuation action{pop}\relax} % the first experiment
+
+\unexpanded\def\markcontent
+ {\dosingleempty\typo_marked_mark}
+
+\def\typo_marked_mark[#1]#2%
+ {\dontleavehmode
+ \bgroup
+ \clf_markcontent{#1}%
+ \bgroup
+ #2% double grouping makes aftergroups work ok
+ \egroup
+ \egroup}
+
+\unexpanded\def\startmarkedcontent
+ {\dontleavehmode
+ \bgroup
+ \dosingleempty\typo_marked_start}
+
+\def\typo_marked_start[#1]%
+ {\clf_markcontent{#1}%
+ % double grouping makes aftergroups work ok
+ \bgroup}
+
+\unexpanded\def\stopmarkedcontent
+ {\egroup
+ \egroup}
+
+\unexpanded\def\removemarkedcontent
+ {\dosingleempty\typo_marked_remove}
+
+\def\typo_marked_remove[#1]%
+ {\clf_pickupmarkedcontent action{remove}mark{#1}\relax}
+
+\protect \endinput
diff --git a/tex/context/base/typo-cln.lua b/tex/context/base/typo-cln.lua
index 2aa05b6d1..7228e02c5 100644
--- a/tex/context/base/typo-cln.lua
+++ b/tex/context/base/typo-cln.lua
@@ -28,7 +28,15 @@ local tasks = nodes.tasks
local texsetattribute = tex.setattribute
-local traverse_id = node.traverse_id
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local setfield = nuts.setfield
+local getchar = nuts.getchar
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+local traverse_id = nuts.traverse_id
local unsetvalue = attributes.unsetvalue
@@ -48,18 +56,18 @@ local resetter = { -- this will become an entry in char-def
function cleaners.handler(head)
local inline, done = false, false
- for n in traverse_id(glyph_code,head) do
- local char = n.char
+ for n in traverse_id(glyph_code,tonut(head)) do
+ local char = getchar(n)
if resetter[char] then
inline = false
elseif not inline then
- local a = n[a_cleaner]
+ local a = getattr(n,a_cleaner)
if a == 1 then -- currently only one cleaner so no need to be fancy
local upper = uccodes[char]
if type(upper) == "table" then
-- some day, not much change that \SS ends up here
else
- n.char = upper
+ setfield(n,"char",upper)
done = true
if trace_autocase then
report_autocase("")
@@ -93,4 +101,8 @@ end
-- interface
-commands.setcharactercleaning = cleaners.set
+interfaces.implement {
+ name = "setcharactercleaning",
+ actions = cleaners.set,
+ arguments = "string"
+}
diff --git a/tex/context/base/typo-cln.mkiv b/tex/context/base/typo-cln.mkiv
index 5306f614f..193198dc5 100644
--- a/tex/context/base/typo-cln.mkiv
+++ b/tex/context/base/typo-cln.mkiv
@@ -24,7 +24,7 @@
%D 1: Autocap first character of a line
\unexpanded\def\setcharactercleaning[#1]% This name might change!
- {\ctxcommand{setcharactercleaning("#1")}} % also accepts "reset"
+ {\clf_setcharactercleaning{#1}} % also accepts "reset"
% \appendtoks
% \attribute\cleanerattribute\attributeunsetvalue
diff --git a/tex/context/base/typo-del.mkiv b/tex/context/base/typo-del.mkiv
index 603471f75..4c3c5ab3d 100644
--- a/tex/context/base/typo-del.mkiv
+++ b/tex/context/base/typo-del.mkiv
@@ -70,23 +70,23 @@
{\ifcase\boundarycharactermode
\or
%\nobreak
- \hskip\hspaceamount\currentlanguage{#2}%
- \languageparameter#1%
+ \hskip\hspaceamount\currentusedlanguage{#2}%
+ \usedlanguageparameter#1%
%\nobreak
- \hskip\hspaceamount\currentlanguage{#2}%
+ \hskip\hspaceamount\currentusedlanguage{#2}%
\or
- \languageparameter#1%
+ \usedlanguageparameter#1%
\fi
\boundarycharactermode\plusone}
\unexpanded\def\leftboundarycharacter#1#2%
{\ifcase\boundarycharactermode
\or
- \languageparameter#1%
+ \usedlanguageparameter#1%
\nobreak
- \hskip\hspaceamount\currentlanguage{#2}%
+ \hskip\hspaceamount\currentusedlanguage{#2}%
\or
- \languageparameter#1%
+ \usedlanguageparameter#1%
\fi
\boundarycharactermode\plusone}
@@ -94,10 +94,10 @@
{\ifcase\boundarycharactermode
\or
\prewordbreak %\nobreak
- \hskip\hspaceamount\currentlanguage{#2}%
- \languageparameter#1%
+ \hskip\hspaceamount\currentusedlanguage{#2}%
+ \usedlanguageparameter#1%
\or
- \languageparameter#1%
+ \usedlanguageparameter#1%
\fi
\boundarycharactermode\plusone}
@@ -142,13 +142,13 @@
{\beforesubsentence
\ifdim\lastkern=\d_typo_subsentence_signal
\unskip
- \kern\hspaceamount\currentlanguage{intersentence}%
+ \kern\hspaceamount\currentusedlanguage{intersentence}%
\fi
\global\advance\c_typo_subsentence_nesting\plusone
\ifnum\c_typo_subsentence_nesting=\plusone
\dontleavehmode % was \leaveoutervmode
\fi
- \dostarttagged\t!subsentence\empty
+ \dostarttagged\t!subsentence\empty % no chain
\symbol[\ifodd\c_typo_subsentence_nesting\c!leftsentence\else\c!leftsubsentence\fi]%
}% \ignorespaces}
@@ -166,7 +166,7 @@
\unexpanded\def\endofsubsentencespacing
{\ifdim\lastkern=\d_typo_subsentence_signal
\unskip
- \hskip\hspaceamount\currentlanguage{intersentence}%
+ \hskip\hspaceamount\currentusedlanguage{intersentence}%
% no good, actually language dependent:
% \ignorespaces
\else
@@ -193,8 +193,8 @@
%definehspace [quote] [\zeropoint]
%definehspace [speech] [\zeropoint]
-\definehspace [quote] [\hspaceamount\currentlanguage{quotation}]
-\definehspace [speech] [\hspaceamount\currentlanguage{quotation}]
+\definehspace [quote] [\hspaceamount\currentusedlanguage{quotation}]
+\definehspace [speech] [\hspaceamount\currentusedlanguage{quotation}]
\definesymbol
[\c!leftquotation]
@@ -205,6 +205,14 @@
[\rightboundarycharacter\c!rightquotation{quotation}]
\definesymbol
+ [\c!nextleftquotation]
+ [\rightboundarycharacter\c!leftquotation{quotation}]
+
+\definesymbol
+ [\c!nextrightquotation]
+ [\leftboundarycharacter\c!rightquotation{quotation}]
+
+\definesymbol
[\c!leftquote]
[\leftboundarycharacter\c!leftquote{quote}]
@@ -248,6 +256,7 @@
\def\typo_delimited_push#1%
{\globalpushmacro\currentdelimitedtext
\def\currentdelimitedtext{#1}%
+ \setlanguageparameter\delimitedtextparameter
\let\currentparentdelimitedtext\currentdelimitedtext
\global\advance\c_typo_delimited_nesting\plusone
\edef\delimitedtextlevel{\number\c_typo_delimited_nesting}%
@@ -297,9 +306,9 @@
\unexpanded\def\startdelimitedtext[#1]%
{\begingroup
\typo_delimited_push{#1}%
- \dostarttagged\t!delimitedblock\currentdelimitedtext
- \edef\p_method{\delimitedtextparameter\c!method}%
- \ifx\p_method\s!font
+ \dostarttaggedchained\t!delimitedblock\currentdelimitedtext\??delimitedtext
+ \edef\p_delimited_method{\delimitedtextparameter\c!method}%
+ \ifx\p_delimited_method\s!font
\expandafter\typo_delimited_start_font
\else
\expandafter\typo_delimited_start_other
@@ -313,16 +322,16 @@
\ignorespaces}
\def\typo_delimited_start_other
- {\edef\p_repeat{\delimitedtextparameter\c!repeat}%
- \ifx\p_repeat\v!yes
+ {\edef\p_delimited_repeat{\delimitedtextparameter\c!repeat}%
+ \ifx\p_delimited_repeat\v!yes
\let\typo_delimited_repeat\typo_delimited_repeat_ideed
\else
\let\typo_delimited_repeat\relax
\fi
- \edef\p_location{\delimitedtextparameter\c!location}%
- \ifx\p_location\v!paragraph
+ \edef\p_delimited_location{\delimitedtextparameter\c!location}%
+ \ifx\p_delimited_location\v!paragraph
\singleexpandafter\typo_delimited_start_par
- \else\ifx\p_location\v!margin
+ \else\ifx\p_delimited_location\v!margin
\doubleexpandafter\typo_delimited_start_par
\else
\doubleexpandafter\typo_delimited_start_txt
@@ -338,8 +347,10 @@
\def\typo_delimited_start_par_indeed[#1]%
{\let\typo_delimited_stop\typo_delimited_stop_par
- \doifsomething{\delimitedtextparameter\c!spacebefore}
- {\blank[\delimitedtextparameter\c!spacebefore]}%
+ \edef\p_delimited_spacebefore{\delimitedtextparameter\c!spacebefore}%
+ \ifx\p_delimited_spacebefore\empty \else
+ \blank[\p_delimited_spacebefore]%
+ \fi
\delimitedtextparameter\c!before
\edef\m_typo_delimited_narrower{#1}%
\ifx\m_typo_delimited_narrower\empty
@@ -353,11 +364,21 @@
\fi
% so far
\pushmacro\checkindentation
- \doifsomething{\delimitedtextparameter\c!indenting} % WS
- {\setupindenting[\delimitedtextparameter\c!indenting]}%
+ \useindentingparameter\delimitedtextparameter
+ %
\begingroup
\usedelimitedtextstyleandcolor\c!style\c!color
+ %
+ \edef\p_delimited_left {\delimitedtextparameter{\c!left}}%
+ \edef\p_delimited_right {\delimitedtextparameter{\c!right}}%
+ \edef\p_delimited_nextleft {\delimitedtextparameter{\c!nextleft}}%
+ \edef\p_delimited_nextright{\delimitedtextparameter{\c!nextright}}%
+ %
\leftdelimitedtextmark
+ %
+ \setnextleftdelimitedtextmark
+ \setnextrightdelimitedtextmark
+ %
\ignorespaces}
\def\typo_delimited_stop_par
@@ -368,10 +389,12 @@
\popmacro\checkindentation
\typo_delimited_stop_par_indeed
\delimitedtextparameter\c!after
- \doifsomething{\delimitedtextparameter\c!spaceafter}
- {\blank[\delimitedtextparameter\c!spaceafter]}%
+ \edef\p_delimited_spaceafter{\delimitedtextparameter\c!spaceafter}%
+ \ifx\p_delimited_spaceafter\empty \else
+ \blank[\p_delimited_spaceafter]%
+ \fi
\useindentnextparameter\delimitedtextparameter
- \dorechecknextindentation}% AM: This was missing!
+ \aftergroup\dorechecknextindentation}% AM: This was missing!
\def\typo_delimited_start_txt
{\let\typo_delimited_stop\typo_delimited_stop_txt
@@ -394,18 +417,18 @@
\unexpanded\def\delimitedtext[#1]%
{\dontleavehmode % following ones can be omited
\typo_delimited_push{#1}%
- \edef\p_method{\delimitedtextparameter\c!method}%
- \ifx\p_method\s!font
+ \edef\p_delimited_method{\delimitedtextparameter\c!method}%
+ \ifx\p_delimited_method\s!font
\expandafter\typo_delimited_fontdriven
\else
\expandafter\typo_delimited_other
\fi}
\def\typo_delimited_other
- {\edef\p_location{\delimitedtextparameter\c!location}%
- \ifx\p_location\v!paragraph
+ {\edef\p_delimited_location{\delimitedtextparameter\c!location}%
+ \ifx\p_delimited_location\v!paragraph
\singleexpandafter\typo_delimited_par
- \else\ifx\p_location\v!margin
+ \else\ifx\p_delimited_location\v!margin
\doubleexpandafter\typo_delimited_par
\else
\doubleexpandafter\typo_delimited_txt
@@ -417,16 +440,89 @@
\unexpanded\def\stopdelimited {\stopdelimitedtext} % no let, dynamically assigned
\def\delimited {\delimitedtext}
+% todo: \dostarttagged\t!nothing\empty % for left/right boxes
+
+%D We have 4 different location and symbol handlers (two pairs):
+%D
+%D \starttyping
+%D \input tufte \startquotation \input tufte \stopquotation
+%D
+%D \setupdelimitedtext
+%D [quotation]
+%D [nextleft=right,
+%D nextright=left]
+%D
+%D \input tufte \startquotation \input tufte \stopquotation
+%D
+%D \setupdelimitedtext
+%D [quotation]
+%D [nextleft={\symbol[nextleftquotation]},
+%D nextright={\symbol[nextrightquotation]}]
+%D
+%D \input tufte \startquotation \input tufte \stopquotation
+%D \stoptyping
+
+\unexpanded\def\setnextleftdelimitedtextmark
+ {\ifx\p_delimited_nextleft\empty
+ % nothing
+ \else\ifx\p_delimited_nextleft\v!left
+ \typo_delimited_nextleft_symbol\p_delimited_left
+ \else\ifx\p_delimited_nextleft\v!right
+ \typo_delimited_nextleft_symbol\p_delimited_right
+ \else
+ \typo_delimited_nextleft_symbol\p_delimited_nextleft
+ \fi\fi\fi}
+
+\unexpanded\def\setnextrightdelimitedtextmark
+ {\ifx\p_delimited_nextright\empty
+ % nothing
+ \else\ifx\p_delimited_nextright\v!right
+ \typo_delimited_nextright_symbol\p_delimited_right
+ \else\ifx\p_delimited_nextright\v!left
+ \typo_delimited_nextright_symbol\p_delimited_left
+ \else
+ \typo_delimited_nextright_symbol\p_delimited_nextright
+ \fi\fi\fi}
+
\unexpanded\def\leftdelimitedtextmark
- {\doifsomething{\delimitedtextparameter\c!left}
- {\setbox\scratchbox\hbox{\delimitedtextparameter\c!left}%
- \dontleavehmode
- \doif{\delimitedtextparameter\c!location}\v!margin{\hskip-\wd\scratchbox}%
- \box\scratchbox}}
+ {\ifx\p_delimited_left\empty
+ % nothing
+ \else
+ \typo_delimited_left_symbol\p_delimited_left
+ \fi}
\unexpanded\def\rightdelimitedtextmark
- {\doifsomething{\delimitedtextparameter\c!right}
- {\hsmash{\delimitedtextparameter\c!right}}}
+ {\ifx\p_delimited_right\empty
+ % nothing
+ \else
+ \typo_delimited_right_symbol\p_delimited_right
+ \fi}
+
+\def\typo_delimited_left_symbol#1%
+ {\setbox\scratchbox\hbox{\usedelimitedtextstyleandcolor\c!symstyle\c!symcolor#1}%
+ \dontleavehmode
+ \edef\p_delimited_margin{\delimitedtextparameter\c!location}%
+ \ifx\p_delimited_margin\v!margin
+ \hskip-\wd\scratchbox
+ \fi
+ \box\scratchbox}
+
+\def\typo_delimited_right_symbol#1%
+ {\hsmash{\usedelimitedtextstyleandcolor\c!symstyle\c!symcolor#1}}
+
+\def\typo_delimited_nextleft_symbol#1%
+ {\localleftbox\bgroup
+ \swapmacros\leftboundarycharacter\rightboundarycharacter
+ \boundarycharactermode\plusone
+ \typo_delimited_left_symbol#1%
+ \egroup}
+
+\def\typo_delimited_nextright_symbol#1%
+ {\localrightbox\bgroup
+ \swapmacros\leftboundarycharacter\rightboundarycharacter
+ \boundarycharactermode\plusone
+ \typo_delimited_right_symbol#1%
+ \egroup}
% \starttext
% \hyphenatedword{groepsvrijstellingsverordeningen}\par
@@ -443,11 +539,12 @@
\def\typo_delimited_handle_middle#1%
{\begingroup
+ \usedelimitedtextstyleandcolor\c!symstyle\c!symcolor
\setbox\scratchbox\hbox{\delimitedtextparameter#1}%
\ifdim\wd\scratchbox>\zeropoint
\ifdim\lastkern=\d_typo_delimited_signal
\unkern
- \hskip\hspaceamount\currentlanguage{interquotation}%
+ \hskip\hspaceamount\currentusedlanguage{interquotation}%
\fi
\ifhmode % else funny pagebeaks
\penalty\plustenthousand
@@ -462,14 +559,15 @@
\def\typo_delimited_handle_left#1%
{\begingroup
+ \usedelimitedtextstyleandcolor\c!symstyle\c!symcolor
\setbox\scratchbox\hbox{\delimitedtextparameter#1}%
\ifdim\wd\scratchbox>\zeropoint
\ifdim\lastkern=\d_typo_delimited_signal
\unkern
- \hskip\hspaceamount\currentlanguage{interquotation}%
+ \hskip\hspaceamount\currentusedlanguage{interquotation}%
\else\ifdim\lastskip=\d_typo_delimited_signal
\unskip
- \hskip\hspaceamount\currentlanguage{interquotation}%
+ \hskip\hspaceamount\currentusedlanguage{interquotation}%
\fi\fi
\strut % new, needed below
\ifhmode % else funny pagebeaks
@@ -485,16 +583,17 @@
\def\typo_delimited_handle_right#1%
{\begingroup
+ \usedelimitedtextstyleandcolor\c!symstyle\c!symcolor
\setbox\scratchbox\hbox{\delimitedtextparameter#1}%
\ifdim\wd\scratchbox>\zeropoint
\ifdim\lastkern=\d_typo_delimited_signal
\unkern
\penalty\plustenthousand
- \hskip\hspaceamount\currentlanguage{interquotation}%
+ \hskip\hspaceamount\currentusedlanguage{interquotation}%
\else\ifdim\lastskip=\d_typo_delimited_signal
\unskip
\penalty\plustenthousand
- \hskip\hspaceamount\currentlanguage{interquotation}%
+ \hskip\hspaceamount\currentusedlanguage{interquotation}%
\fi\fi
\ifhmode % else funny pagebeaks
\penalty\plustenthousand
@@ -508,21 +607,57 @@
\unexpanded\def\typo_delimited_par
{\groupedcommand
- {\dostarttagged\t!delimited\currentdelimitedtext % block?
+ {\dostarttaggedchained\t!delimited\currentdelimitedtext\??delimitedtext % block?
\typo_delimited_handle_left\c!left}
{\typo_delimited_handle_right\c!right
\removelastskip
\dostoptagged
\typo_delimited_pop}}
+% \unexpanded\def\typo_delimited_txt
+% {\doifelse{\delimitedtextparameter\c!style}\v!normal
+% \typo_delimited_quoted
+% \typo_delimited_attributed}
+%
+% \def\typo_delimited_quoted_b
+% {\dostarttaggedchained\t!delimited\currentdelimitedtext\??delimitedtext
+% \typo_delimited_handle_left\c!left}
+%
+% \def\typo_delimited_quoted_e
+% {\typo_delimited_handle_right\c!right
+% \removelastskip
+% \dostoptagged
+% \typo_delimited_pop}
+%
+% \def\typo_delimited_attributed_b
+% {\dostarttaggedchained\t!delimited\currentdelimitedtext\??delimitedtext
+% \usedelimitedtextstyleandcolor\c!style\c!color}
+%
+% \def\typo_delimited_attributed_e
+% {\dostoptagged
+% \typo_delimited_pop}
+%
+% \def\typo_delimited_fontdriven_b
+% {\dostarttaggedchained\t!delimited\currentdelimitedtext\??delimitedtext
+% \languageparameter{\c!left\currentparentdelimitedtext}}% was: \currentdelimitedtext
+%
+% \def\typo_delimited_fontdriven_e
+% {\languageparameter{\c!right\currentparentdelimitedtext}% was: \currentdelimitedtext
+% \dostoptagged
+% \typo_delimited_pop}
+
\unexpanded\def\typo_delimited_txt
- {\doifelse{\delimitedtextparameter\c!style}\v!normal
- \typo_delimited_quoted
- \typo_delimited_attributed}
+ {\edef\p_left_right{\delimitedtextparameter\c!left\delimitedtextparameter\c!right}%
+ \ifx\p_left_right\empty
+ \expandafter\typo_delimited_attributed
+ \else
+ \expandafter\typo_delimited_quoted
+ \fi}
\def\typo_delimited_quoted_b
- {\dostarttagged\t!delimited\currentdelimitedtext
- \typo_delimited_handle_left\c!left}
+ {\dostarttaggedchained\t!delimited\currentdelimitedtext\??delimitedtext
+ \typo_delimited_handle_left\c!left
+ \usedelimitedtextstyleandcolor\c!style\c!color}
\def\typo_delimited_quoted_e
{\typo_delimited_handle_right\c!right
@@ -531,7 +666,7 @@
\typo_delimited_pop}
\def\typo_delimited_attributed_b
- {\dostarttagged\t!delimited\currentdelimitedtext
+ {\dostarttaggedchained\t!delimited\currentdelimitedtext\??delimitedtext
\usedelimitedtextstyleandcolor\c!style\c!color}
\def\typo_delimited_attributed_e
@@ -539,11 +674,12 @@
\typo_delimited_pop}
\def\typo_delimited_fontdriven_b
- {\dostarttagged\t!delimited\currentdelimitedtext
- \languageparameter{\c!left\currentparentdelimitedtext}}% was: \currentdelimitedtext
+ {\dostarttaggedchained\t!delimited\currentdelimitedtext\??delimitedtext
+ \usedlanguageparameter{\c!left\currentparentdelimitedtext}% was: \currentdelimitedtext
+ \usedelimitedtextstyleandcolor\c!style\c!color}
\def\typo_delimited_fontdriven_e
- {\languageparameter{\c!right\currentparentdelimitedtext}% was: \currentdelimitedtext
+ {\usedlanguageparameter{\c!right\currentparentdelimitedtext}% was: \currentdelimitedtext
\dostoptagged
\typo_delimited_pop}
diff --git a/tex/context/base/typo-dha.lua b/tex/context/base/typo-dha.lua
index d5ad66e7e..904b774ec 100644
--- a/tex/context/base/typo-dha.lua
+++ b/tex/context/base/typo-dha.lua
@@ -49,13 +49,30 @@ local trace_directions = false trackers.register("typesetters.directions.defa
local report_directions = logs.reporter("typesetting","text directions")
-
-local insert_node_before = nodes.insert_before
-local insert_node_after = nodes.insert_after
-local remove_node = nodes.remove
-local end_of_math = nodes.end_of_math
-
-local nodepool = nodes.pool
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+local nutstring = nuts.tostring
+
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+local getid = nuts.getid
+local getsubtype = nuts.getsubtype
+local getlist = nuts.getlist
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getattr = nuts.getattr
+local getprop = nuts.getprop
+local setprop = nuts.setprop
+
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local remove_node = nuts.remove
+local end_of_math = nuts.end_of_math
+
+local nodepool = nuts.pool
local nodecodes = nodes.nodecodes
local whatcodes = nodes.whatcodes
@@ -108,7 +125,7 @@ end
local function process(start)
- local head = start
+ local head = tonut(start) -- we have a global head
local current = head
local inserted = nil
@@ -167,9 +184,9 @@ local function process(start)
finidir = finish
end
if embedded <= 0 then
- finish, autodir, done = "TRT", -1
+ finish, autodir = "TRT", -1
else
- finish, autodir, done = "TLT", 1
+ finish, autodir = "TLT", 1
end
done = true
if finidir == finish then
@@ -180,31 +197,31 @@ local function process(start)
end
local function nextisright(current)
- current = current.next
- local id = current.id
+ current = getnext(current)
+ local id = getid(current)
if id == glyph_code then
- local character = current.char
+ local character = getchar(current)
local direction = chardirections[character]
return direction == "r" or direction == "al" or direction == "an"
end
end
local function previsright(current)
- current = current.prev
- local id = current.id
+ current = getprev(current)
+ local id = getid(current)
if id == glyph_code then
- local char = current.char
+ local character = getchar(current)
local direction = chardirections[character]
return direction == "r" or direction == "al" or direction == "an"
end
end
while current do
- local id = current.id
+ local id = getid(current)
if id == math_code then
- current = end_of_math(current.next).next
+ current = getnext(end_of_math(getnext(current)))
else
- local attr = current[a_directions]
+ local attr = getattr(current,a_directions)
if attr and attr > 0 and attr ~= prevattr then
if not getglobal(a) then
lro, rlo = false, false
@@ -213,7 +230,7 @@ local function process(start)
end
if id == glyph_code then
if attr and attr > 0 then
- local character = current.char
+ local character = getchar(current)
local direction = chardirections[character]
local reversed = false
if rlo or override > 0 then
@@ -223,24 +240,24 @@ local function process(start)
end
elseif lro or override < 0 then
if direction == "r" or direction == "al" then
- current[a_state] = s_isol
+ setprop(current,a_state,s_isol)
direction = "l"
reversed = true
end
end
if direction == "on" then
local mirror = charmirrors[character]
- if mirror and fontchar[current.font][mirror] then
+ if mirror and fontchar[getfont(current)][mirror] then
local class = charclasses[character]
if class == "open" then
if nextisright(current) then
if autodir >= 0 then
force_auto_right_before(direction)
end
- current.char = mirror
+ setfield(current,"char",mirror)
done = true
elseif autodir < 0 then
- current.char = mirror
+ setfield(current,"char",mirror)
done = true
else
mirror = false
@@ -251,14 +268,14 @@ local function process(start)
local fencedir = fences[#fences]
fences[#fences] = nil
if fencedir < 0 then
- current.char = mirror
+ setfield(current,"char",mirror)
done = true
force_auto_right_before(direction)
else
mirror = false
end
elseif autodir < 0 then
- current.char = mirror
+ setfield(current,"char",mirror)
done = true
else
mirror = false
@@ -329,16 +346,16 @@ local function process(start)
top = top - 1
end
obsolete[#obsolete+1] = current
- else
+ elseif trace_directions then
setcolor(current)
end
else
-- we do nothing
end
elseif id == whatsit_code then
- local subtype = current.subtype
+ local subtype = getsubtype(current)
if subtype == localpar_code then
- local dir = current.dir
+ local dir = getfield(current,"dir")
if dir == 'TRT' then
autodir = -1
elseif dir == 'TLT' then
@@ -351,7 +368,7 @@ local function process(start)
if finish then
finish_auto_before()
end
- local dir = current.dir
+ local dir = getfield(current,"dir")
if dir == "+TRT" then
finish, autodir = "TRT", -1
elseif dir == "-TRT" then
@@ -370,7 +387,7 @@ local function process(start)
elseif finish then
finish_auto_before()
end
- local cn = current.next
+ local cn = getnext(current)
if cn then
-- we're okay
elseif finish then
@@ -390,7 +407,7 @@ local function process(start)
end
end
- return head, done
+ return tonode(head), done
end
diff --git a/tex/context/base/typo-dig.lua b/tex/context/base/typo-dig.lua
index ef05e62da..c753a0352 100644
--- a/tex/context/base/typo-dig.lua
+++ b/tex/context/base/typo-dig.lua
@@ -19,10 +19,24 @@ local report_digits = logs.reporter("typesetting","digits")
local nodes, node = nodes, node
-local hpack_node = node.hpack
-local traverse_id = node.traverse_id
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+local getid = nuts.getid
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+local hpack_node = nuts.hpack
+local traverse_id = nuts.traverse_id
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
local texsetattribute = tex.setattribute
local unsetvalue = attributes.unsetvalue
@@ -30,7 +44,7 @@ local unsetvalue = attributes.unsetvalue
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
-local nodepool = nodes.pool
+local nodepool = nuts.pool
local tasks = nodes.tasks
local new_glue = nodepool.glue
@@ -66,16 +80,20 @@ function nodes.aligned(head,start,stop,width,how)
if how == "flushleft" or how == "middle" then
head, stop = insert_node_after(head,stop,new_glue(0,65536,65536))
end
- local prv, nxt = start.prev, stop.next
- start.prev, stop.next = nil, nil
+ local prv = getprev(start)
+ local nxt = getnext(stop)
+ setfield(start,"prev",nil)
+ setfield(stop,"next",nil)
local packed = hpack_node(start,width,"exactly") -- no directional mess here, just lr
if prv then
- prv.next, packed.prev = packed, prv
+ setfield(prv,"next",packed)
+ setfield(packed,"prev",prv)
end
if nxt then
- nxt.prev, packed.next = packed, nxt
+ setfield(nxt,"prev",packed)
+ setfield(packed,"next",nxt)
end
- if packed.prev then
+ if getprev(packed) then
return head, packed
else
return packed, packed
@@ -83,16 +101,16 @@ function nodes.aligned(head,start,stop,width,how)
end
actions[1] = function(head,start,attr)
- local font = start.font
- local char = start.char
- local unic = chardata[font][char].tounicode
- local what = unic and tonumber(unic,16) or char
- if charbase[what].category == "nd" then
- local oldwidth, newwidth = start.width, getdigitwidth(font)
+ local font = getfont(start)
+ local char = getchar(start)
+ local unic = chardata[font][char].unicode or char
+ if charbase[unic].category == "nd" then -- ignore unic tables
+ local oldwidth = getfield(start,"width")
+ local newwidth = getdigitwidth(font)
if newwidth ~= oldwidth then
if trace_digits then
report_digits("digit trigger %a, instance %a, char %C, unicode %U, delta %s",
- attr%100,div(attr,100),char,what,newwidth-oldwidth)
+ attr%100,div(attr,100),char,unic,newwidth-oldwidth)
end
head, start = nodes.aligned(head,start,start,newwidth,"middle")
return head, start, true
@@ -102,12 +120,13 @@ actions[1] = function(head,start,attr)
end
function digits.handler(head)
+ head = tonut(head)
local done, current, ok = false, head, false
while current do
- if current.id == glyph_code then
- local attr = current[a_digits]
+ if getid(current) == glyph_code then
+ local attr = getattr(current,a_digits)
if attr and attr > 0 then
- current[a_digits] = unsetvalue
+ setattr(current,a_digits,unsetvalue)
local action = actions[attr%100] -- map back to low number
if action then
head, current, ok = action(head,current,attr)
@@ -117,9 +136,11 @@ function digits.handler(head)
end
end
end
- current = current and current.next
+ if current then
+ current = getnext(current)
+ end
end
- return head, done
+ return tonode(head), done
end
local m, enabled = 0, false -- a trick to make neighbouring ranges work
@@ -152,4 +173,8 @@ end
-- interface
-commands.setdigitsmanipulation = digits.set
+interfaces.implement {
+ name = "setdigitsmanipulation",
+ actions = digits.set,
+ arguments = "string"
+}
diff --git a/tex/context/base/typo-dig.mkiv b/tex/context/base/typo-dig.mkiv
index 71425c594..aa610b3c9 100644
--- a/tex/context/base/typo-dig.mkiv
+++ b/tex/context/base/typo-dig.mkiv
@@ -39,7 +39,7 @@
%D \stoplines
\unexpanded\def\setdigitsmanipulation[#1]%
- {\ctxcommand{setdigitsmanipulation("#1")}}
+ {\clf_setdigitsmanipulation{#1}}
\unexpanded\def\resetdigitsmanipulation
{\attribute\digitsattribute\attributeunsetvalue}
diff --git a/tex/context/base/typo-dir.lua b/tex/context/base/typo-dir.lua
index a04028452..482b7114d 100644
--- a/tex/context/base/typo-dir.lua
+++ b/tex/context/base/typo-dir.lua
@@ -33,85 +33,43 @@ local formatters = string.formatters
local nodes, node = nodes, node
-local trace_textdirections = false trackers.register("typesetters.directions.text", function(v) trace_textdirections = v end)
-local trace_mathdirections = false trackers.register("typesetters.directions.math", function(v) trace_mathdirections = v end)
-local trace_directions = false trackers.register("typesetters.directions", function(v) trace_textdirections = v trace_mathdirections = v end)
+local trace_textdirections = false trackers.register("typesetters.directions.text", function(v) trace_textdirections = v end)
+local trace_mathdirections = false trackers.register("typesetters.directions.math", function(v) trace_mathdirections = v end)
+local trace_directions = false trackers.register("typesetters.directions", function(v) trace_textdirections = v trace_mathdirections = v end)
local report_textdirections = logs.reporter("typesetting","text directions")
local report_mathdirections = logs.reporter("typesetting","math directions")
+local hasbit = number.hasbit
+local texsetattribute = tex.setattribute
+local unsetvalue = attributes.unsetvalue
+local tasks = nodes.tasks
+local tracers = nodes.tracers
+local setcolor = tracers.colors.set
+local resetcolor = tracers.colors.reset
-local traverse_id = node.traverse_id
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local remove_node = nodes.remove
-local end_of_math = nodes.end_of_math
+local implement = interfaces.implement
-local texsetattribute = tex.setattribute
-local texsetcount = tex.setcount
-local unsetvalue = attributes.unsetvalue
+local directions = typesetters.directions or { }
+typesetters.directions = directions
-local hasbit = number.hasbit
+local a_directions = attributes.private('directions')
-local nodecodes = nodes.nodecodes
-local whatcodes = nodes.whatcodes
-local mathcodes = nodes.mathcodes
+local variables = interfaces.variables
+local v_global = variables["global"]
+local v_local = variables["local"]
+local v_on = variables.on
+local v_yes = variables.yes
-local tasks = nodes.tasks
-local tracers = nodes.tracers
-local setcolor = tracers.colors.set
-local resetcolor = tracers.colors.reset
+local m_enabled = 2^6 -- 64
+local m_global = 2^7
+local m_fences = 2^8
-local glyph_code = nodecodes.glyph
-local whatsit_code = nodecodes.whatsit
-local math_code = nodecodes.math
-local penalty_code = nodecodes.penalty
-local kern_code = nodecodes.kern
-local glue_code = nodecodes.glue
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-
-local localpar_code = whatcodes.localpar
-local dir_code = whatcodes.dir
-
-local nodepool = nodes.pool
-
-local new_textdir = nodepool.textdir
-
-local fonthashes = fonts.hashes
-local fontdata = fonthashes.identifiers
-local fontchar = fonthashes.characters
-
-local chardirections = characters.directions
-local charmirrors = characters.mirrors
-local charclasses = characters.textclasses
-
-local directions = typesetters.directions or { }
-typesetters.directions = directions
-
-local a_state = attributes.private('state')
-local a_directions = attributes.private('directions')
-local a_mathbidi = attributes.private('mathbidi')
-
-local strip = false
-
-local s_isol = fonts.analyzers.states.isol
-
-local variables = interfaces.variables
-local v_global = variables["global"]
-local v_local = variables["local"]
-local v_on = variables.on
-local v_yes = variables.yes
-
-local m_enabled = 2^6 -- 64
-local m_global = 2^7
-local m_fences = 2^8
-
-local handlers = { }
-local methods = { }
-local lastmethod = 0
+local handlers = { }
+local methods = { }
+local lastmethod = 0
local function installhandler(name,handler)
local method = methods[name]
@@ -181,9 +139,17 @@ function directions.setcolor(current,direction,reversed,mirror)
end
end
-function commands.getbidimode(specification)
- context(tomode(specification)) -- hash at tex end
-end
+implement {
+ name = "getbidimode",
+ actions = { tomode, context },
+ arguments = {
+ {
+ { "scope" },
+ { "method" },
+ { "fences" },
+ }
+ }
+}
local enabled = false
@@ -234,4 +200,8 @@ function directions.set(n) -- todo: names and numbers
texsetattribute(a_directions,n)
end
-commands.setdirection = directions.set
+implement {
+ name = "setdirection",
+ arguments = "integer",
+ actions = directions.set
+}
diff --git a/tex/context/base/typo-dir.mkiv b/tex/context/base/typo-dir.mkiv
index 0362af56c..f9b4ecb97 100644
--- a/tex/context/base/typo-dir.mkiv
+++ b/tex/context/base/typo-dir.mkiv
@@ -13,6 +13,9 @@
\writestatus{loading}{ConTeXt Typesetting Macros / Directions}
+%D At some point we might default to method 'two' but first I need to make it more
+%D efficient (and provide some options). I also want to have some basic tracing.
+
\unprotect
\registerctxluafile{typo-dir}{1.001}
@@ -33,10 +36,14 @@
\edef\righttoleftmark{\normalUchar"200F} \let\rlm\righttoleftmark
\unexpanded\def\setdirection[#1]% todo: symbolic names
- {\ctxcommand{setdirection(\number#1)}}
+ {\clf_setdirection#1\relax}
+
+% \unexpanded\def\resetdirection
+% {\clf_setdirection\zerocount}
+%
+% is in fact:
\unexpanded\def\resetdirection
- %{\ctxcommand{setdirection(0)}}
{\attribute\directionsattribute\attributeunsetvalue}
\newconstant\directionsbidimode % this one might become pivate
@@ -46,12 +53,14 @@
% \setupdirections[bidi=global,method=two]
% \setupdirections[bidi=global,method=two,fences=no]
+% maybee use chardefs
+
\def\typo_dir_get_mode
- {\def\currentbidimode{\ctxcommand{getbidimode {
- scope = "\directionsparameter\c!bidi ",
- method = "\directionsparameter\c!method",
- fences = "\directionsparameter\c!fences",
- }}}%
+ {\def\currentbidimode{\clf_getbidimode
+ scope {\directionsparameter\c!bidi}%
+ method {\directionsparameter\c!method}%
+ fences {\directionsparameter\c!fences}%
+ }%
\expandafter\glet\csname\??directionsbidimode\currentbidistamp\endcsname\currentbidimode}
\appendtoks
diff --git a/tex/context/base/typo-drp.lua b/tex/context/base/typo-drp.lua
index 903140dae..4bbf0b8e9 100644
--- a/tex/context/base/typo-drp.lua
+++ b/tex/context/base/typo-drp.lua
@@ -11,9 +11,7 @@ if not modules then modules = { } end modules ['typo-drp'] = {
local tonumber, type, next = tonumber, type, next
local ceil = math.ceil
-
-local utfbyte = utf.byte
-local utfchar = utf.char
+local settings_to_hash = utilities.parsers.settings_to_hash
local trace_initials = false trackers.register("typesetters.initials", function(v) trace_initials = v end)
local report_initials = logs.reporter("nodes","initials")
@@ -24,19 +22,41 @@ typesetters.initials = initials or { }
local nodes = nodes
local tasks = nodes.tasks
-local hpack_nodes = nodes.hpack
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getchar = nuts.getchar
+local getid = nuts.getid
+local getsubtype = nuts.getsubtype
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+local hpack_nodes = nuts.hpack
+
local nodecodes = nodes.nodecodes
local whatsitcodes = nodes.whatsitcodes
-local nodepool = nodes.pool
+local nodepool = nuts.pool
local new_kern = nodepool.kern
-local insert_before = nodes.insert_before
-local insert_after = nodes.insert_after
+local insert_before = nuts.insert_before
+local insert_after = nuts.insert_after
+local remove_node = nuts.remove
+local traverse_id = nuts.traverse_id
+local traverse = nuts.traverse
+local free_node = nuts.free
local variables = interfaces.variables
local v_default = variables.default
local v_margin = variables.margin
+local v_auto = variables.auto
+local v_first = variables.first
+local v_last = variables.last
local texget = tex.get
local texsetattribute = tex.setattribute
@@ -44,7 +64,8 @@ local unsetvalue = attributes.unsetvalue
local glyph_code = nodecodes.glyph
local hlist_code = nodecodes.hlist
-local kern_node = nodecodes.kern
+local glue_code = nodecodes.glue
+local kern_code = nodecodes.kern
local whatsit_code = nodecodes.whatsit
local localpar_code = whatsitcodes.localpar
@@ -56,6 +77,8 @@ local a_color = attributes.private('color')
local a_transparency = attributes.private('transparency')
local a_colorspace = attributes.private('colormodel')
+local category = characters.category
+
local settings = nil
function initials.set(specification)
@@ -68,7 +91,27 @@ function initials.set(specification)
texsetattribute(a_initial,1)
end
-commands.setinitial = initials.set
+interfaces.implement {
+ name = "setinitial",
+ actions = initials.set,
+ arguments = {
+ {
+ { "location" },
+ { "enabled", "boolean" },
+ { "method" },
+ { "distance" ,"dimen" },
+ { "hoffset" ,"dimen" },
+ { "voffset" ,"dimen" },
+ { "font", "integer" },
+ { "dynamic", "integer" },
+ { "ca", "integer" },
+ { "ma", "integer" },
+ { "ta", "integer" },
+ { "n", "integer" },
+ { "m", "integer" },
+ }
+ }
+}
-- dropped caps experiment (will be done properly when luatex
-- stores the state in the local par node) .. btw, search still
@@ -84,74 +127,297 @@ commands.setinitial = initials.set
-- todo: prevent linebreak .. but normally a initial ends up at the top of
-- a page so this has a low priority
+-- actions[v_default] = function(head,setting)
+-- local done = false
+-- if getid(head) == whatsit_code and getsubtype(head) == localpar_code then
+-- -- begin of par
+-- local first = getnext(head)
+-- -- parbox .. needs to be set at 0
+-- if first and getid(first) == hlist_code then
+-- first = getnext(first)
+-- end
+-- -- we need to skip over kerns and glues (signals)
+-- while first and getid(first) ~= glyph_code do
+-- first = getnext(first)
+-- end
+-- if first and getid(first) == glyph_code then
+-- local char = getchar(first)
+-- local prev = getprev(first)
+-- local next = getnext(first)
+-- -- if getid(prev) == hlist_code then
+-- -- -- set the width to 0
+-- -- end
+-- if next and getid(next) == kern_code then
+-- setfield(next,"kern",0)
+-- end
+-- if setting.font then
+-- setfield(first,"font",setting.font)
+-- end
+-- if setting.dynamic > 0 then
+-- setattr(first,0,setting.dynamic)
+-- end
+-- -- can be a helper
+-- local ma = setting.ma or 0
+-- local ca = setting.ca
+-- local ta = setting.ta
+-- if ca and ca > 0 then
+-- setattr(first,a_colorspace,ma == 0 and 1 or ma)
+-- setattr(first,a_color,ca)
+-- end
+-- if ta and ta > 0 then
+-- setattr(first,a_transparency,ta)
+-- end
+-- --
+-- local width = getfield(first,"width")
+-- local height = getfield(first,"height")
+-- local depth = getfield(first,"depth")
+-- local distance = setting.distance or 0
+-- local voffset = setting.voffset or 0
+-- local hoffset = setting.hoffset or 0
+-- local parindent = tex.parindent
+-- local baseline = texget("baselineskip").width
+-- local lines = tonumber(setting.n) or 0
+-- --
+-- setfield(first,"xoffset",- width - hoffset - distance - parindent)
+-- setfield(first,"yoffset",- voffset) -- no longer - height here
+-- -- We pack so that successive handling cannot touch the dropped cap. Packaging
+-- -- in a hlist is also needed because we cannot locally adapt e.g. parindent (not
+-- -- yet stored in with localpar).
+-- setfield(first,"prev",nil)
+-- setfield(first,"next",nil)
+-- local h = hpack_nodes(first)
+-- setfield(h,"width",0)
+-- setfield(h,"height",0)
+-- setfield(h,"depth",0)
+-- setfield(prev,"next",h)
+-- setfield(next,"prev",h)
+-- setfield(h,"next",next)
+-- setfield(h,"prev",prev)
+-- first = h
+-- -- end of packaging
+-- if setting.location == v_margin then
+-- -- okay
+-- else
+-- if lines == 0 then -- safeguard, not too precise
+-- lines = ceil((height+voffset) / baseline)
+-- end
+-- -- We cannot set parshape yet ... when we can I'll add a slope
+-- -- option (positive and negative, in emwidth).
+-- local hangafter = - lines
+-- local hangindent = width + distance + parindent
+-- if trace_initials then
+-- report_initials("setting hangafter to %i and hangindent to %p",hangafter,hangindent)
+-- end
+-- tex.hangafter = hangafter
+-- tex.hangindent = hangindent
+-- if parindent ~= 0 then
+-- insert_after(first,first,new_kern(-parindent))
+-- end
+-- end
+-- done = true
+-- end
+-- end
+-- return head, done
+-- end
+
actions[v_default] = function(head,setting)
local done = false
- if head.id == whatsit_code and head.subtype == localpar_code then
+ if getid(head) == whatsit_code and getsubtype(head) == localpar_code then
-- begin of par
- local first = head.next
+ local first = getnext(head)
+ local indent = false
-- parbox .. needs to be set at 0
- if first and first.id == hlist_code then
- first = first.next
+ if first and getid(first) == hlist_code then
+ first = getnext(first)
+ indent = true
end
-- we need to skip over kerns and glues (signals)
- while first and first.id ~= glyph_code do
- first = first.next
+ while first and getid(first) ~= glyph_code do
+ first = getnext(first)
end
- if first and first.id == glyph_code then
- local char = first.char
- local prev = first.prev
- local next = first.next
- -- if prev.id == hlist_code then
- -- -- set the width to 0
- -- end
- if next and next.id == kern_node then
- next.kern = 0
- end
- if setting.font then
- first.font = setting.font
- end
- if setting.dynamic > 0 then
- first[0] = setting.dynamic
- end
- -- can be a helper
- local ma = setting.ma or 0
- local ca = setting.ca
- local ta = setting.ta
- if ca and ca > 0 then
- first[a_colorspace] = ma == 0 and 1 or ma
- first[a_color] = ca
- end
- if ta and ta > 0 then
- first[a_transparency] = ta
- end
- --
- local width = first.width
- local height = first.height
- local depth = first.depth
+ if first and getid(first) == glyph_code then
+ local ma = setting.ma or 0
+ local ca = setting.ca
+ local ta = setting.ta
+ local last = first
local distance = setting.distance or 0
local voffset = setting.voffset or 0
local hoffset = setting.hoffset or 0
local parindent = tex.parindent
local baseline = texget("baselineskip").width
local lines = tonumber(setting.n) or 0
+ local dynamic = setting.dynamic
+ local font = setting.font
+ local method = settings_to_hash(setting.method)
+ local length = tonumber(setting.m) or 1
--
- first.xoffset = - width - hoffset - distance - parindent
- first.yoffset = - voffset -- no longer - height here
+ -- 1 char | n chars | skip first quote | ignore punct | keep punct
+ --
+ if getattr(first,a_initial) then
+ for current in traverse(getnext(first)) do
+ if getattr(current,a_initial) then
+ last = current
+ else
+ break
+ end
+ end
+ elseif method[v_auto] then
+ local char = getchar(first)
+ local kind = category(char)
+ if kind == "po" or kind == "pi" then
+ if method[v_first] then
+ -- remove quote etc before initial
+ local next = getnext(first)
+ if not next then
+ -- don't start with a quote or so
+ return head, false
+ end
+ last = nil
+ for current in traverse_id(glyph_code,next) do
+ head, first = remove_node(head,first,true)
+ first = current
+ last = first
+ break
+ end
+ if not last then
+ -- no following glyph or so
+ return head, false
+ end
+ else
+ -- keep quote etc with initial
+ local next = getnext(first)
+ if not next then
+ -- don't start with a quote or so
+ return head, false
+ end
+ for current in traverse_id(glyph_code,next) do
+ last = current
+ break
+ end
+ if last == first then
+ return head, false
+ end
+ end
+ elseif kind == "pf" then
+ -- error: final quote
+ else
+ -- okay
+ end
+ -- maybe also: get all A. B. etc
+ local next = getnext(first)
+ if next then
+ for current in traverse_id(glyph_code,next) do
+ local char = getchar(current)
+ local kind = category(char)
+ if kind == "po" then
+ if method[v_last] then
+ -- remove period etc after initial
+ remove_node(head,current,true)
+ else
+ -- keep period etc with initial
+ last = current
+ end
+ end
+ break
+ end
+ end
+ else
+ for current in traverse_id(glyph_code,first) do
+ last = current
+ if length <= 1 then
+ break
+ else
+ length = length - 1
+ end
+ end
+ end
+ local current = first
+ while true do
+ local id = getid(current)
+ if id == kern_code then
+ setfield(current,"kern",0)
+ elseif id == glyph_code then
+ local next = getnext(current)
+ if font then
+ setfield(current,"font",font)
+ end
+ if dynamic > 0 then
+ setattr(current,0,dynamic)
+ end
+-- apply font
+
+-- local g = nodes.copy(tonode(current))
+-- g.subtype = 0
+-- nodes.handlers.characters(g)
+-- nodes.handlers.protectglyphs(g)
+-- setfield(current,"char",g.char)
+-- nodes.free(g)
+
+ -- can be a helper
+ if ca and ca > 0 then
+ setattr(current,a_colorspace,ma == 0 and 1 or ma)
+ setattr(current,a_color,ca)
+ end
+ if ta and ta > 0 then
+ setattr(current,a_transparency,ta)
+ end
+ --
+ end
+ if current == last then
+ break
+ else
+ current = getnext(current)
+ end
+ end
-- We pack so that successive handling cannot touch the dropped cap. Packaging
-- in a hlist is also needed because we cannot locally adapt e.g. parindent (not
-- yet stored in with localpar).
- first.prev = nil
- first.next = nil
- local h = hpack_nodes(first)
- h.width = 0
- h.height = 0
- h.depth = 0
- prev.next = h
- next.prev = h
- h.next = next
- h.prev = prev
-
- -- end of packaging
+ local prev = getprev(first)
+ local next = getnext(last)
+ --
+ setfield(first,"prev",nil)
+ setfield(last,"next",nil)
+ local dropper = hpack_nodes(first)
+ local width = getfield(dropper,"width")
+ local height = getfield(dropper,"height")
+ local depth = getfield(dropper,"depth")
+ setfield(dropper,"width",0)
+ setfield(dropper,"height",0)
+ setfield(dropper,"depth",0)
+ --
+ setfield(prev,"next",dropper)
+ if next then
+ setfield(next,"prev",dropper)
+ end
+ setfield(dropper,"next",next)
+ setfield(dropper,"prev",prev)
+ --
+ if next then
+ local current = next
+ while current do
+ local id = getid(current)
+ if id == glue_code or id == kern_code then
+ local next = getnext(current)
+ -- remove_node(current,current,true) -- created an invalid next link and dangling remains
+ remove_node(head,current,true)
+ current = next
+ else
+ break
+ end
+ end
+ end
+ --
+ local hoffset = width + hoffset + distance + (indent and parindent or 0)
+ for current in traverse_id(glyph_code,first) do
+ setfield(current,"xoffset",- hoffset )
+ setfield(current,"yoffset",- voffset) -- no longer - height here
+ if current == last then
+ break
+ end
+ end
+ --
+ first = dropper
+ --
if setting.location == v_margin then
-- okay
else
@@ -161,15 +427,15 @@ actions[v_default] = function(head,setting)
-- We cannot set parshape yet ... when we can I'll add a slope
-- option (positive and negative, in emwidth).
local hangafter = - lines
- local hangindent = width + distance + parindent
+ local hangindent = width + distance
if trace_initials then
report_initials("setting hangafter to %i and hangindent to %p",hangafter,hangindent)
end
tex.hangafter = hangafter
tex.hangindent = hangindent
- if parindent ~= 0 then
- insert_after(first,first,new_kern(-parindent))
- end
+ end
+ if indent then
+ insert_after(first,first,new_kern(-parindent))
end
done = true
end
@@ -178,16 +444,17 @@ actions[v_default] = function(head,setting)
end
function initials.handler(head)
+ head = tonut(head)
local start = head
local attr = nil
while start do
- attr = start[a_initial]
+ attr = getattr(start,a_initial)
if attr then
break
- elseif start.id == glyph then
+ elseif getid(start) == glyph then
break
else
- start = start.next
+ start = getnext(start)
end
end
if attr then
@@ -201,8 +468,8 @@ function initials.handler(head)
report_initials("processing initials, alternative %a",alternative)
end
local head, done = action(head,settings)
- return head, done
+ return tonode(head), done
end
end
- return head, false
+ return tonode(head), false
end
diff --git a/tex/context/base/typo-drp.mkiv b/tex/context/base/typo-drp.mkiv
index 78f6df0a2..2520c3bfd 100644
--- a/tex/context/base/typo-drp.mkiv
+++ b/tex/context/base/typo-drp.mkiv
@@ -57,6 +57,8 @@
\setupinitial
[\c!location=\v!text,
\c!n=3,
+ \c!m=1,
+ \c!method=\v!none,
% \s!font=Bold sa 4,
% \s!font=Bold ht \measure{initial:n},
\s!font=Bold cp \measure{initial:n},
@@ -67,24 +69,25 @@
\c!color=,
\c!before=\blank]
-\unexpanded\def\placeinitial
+\unexpanded\def\placeinitial % we cannot group so no settings
{\dosingleempty\typo_initials_place}
\def\typo_initials_place[#1]% old command
{\par
\namedinitialparameter{#1}\c!before
- \setinitial[#1]}
+ \setinitial[#1]\relax}
\unexpanded\def\setinitial
- {\dosingleempty\typo_initials_set}
+ {\dodoubleempty\typo_initials_set}
-\unexpanded\def\typo_initials_set[#1]%
- {\edef\typo_initial_handle{\typo_initial_handle_indeed{#1}}}
+\unexpanded\def\typo_initials_set[#1][#2]%
+ {\edef\typo_initial_handle{\typo_initial_handle_indeed{#1}{#2}}}
-\unexpanded\def\typo_initial_handle_indeed#1%
+\unexpanded\def\typo_initial_handle_indeed#1#2%
{\dontleavehmode
\begingroup
\edef\currentinitial{#1}%
+ \setupcurrentinitial[#2]%
\scratchcounter \initialparameter\c!n\relax
\scratchdistance\initialparameter\c!distance\relax
\scratchhoffset \initialparameter\c!hoffset \relax
@@ -95,24 +98,43 @@
{\definedfont[\initialparameter\s!font]}
{\useinitialstyleparameter\c!style}%
\useinitialcolorparameter\c!color
- \ctxcommand{setinitial{
- location = "\initialparameter\c!location",
- enabled = true,
- n = \number\scratchcounter,
- distance = \number\scratchdistance,
- hoffset = \number\scratchhoffset,
- voffset = \number\scratchvoffset,
- ma = \the\attribute\colormodelattribute ,
- ca = \the\attribute\colorattribute ,
- ta = \the\attribute\transparencyattribute,
- font = \fontid\font,
- dynamic = \number\attribute\zerocount, % it's a bit over the top to support this here
- }}%
+ \edef\p_text{\initialparameter\c!text}% optional
+ \clf_setinitial
+ location {\initialparameter\c!location}%
+ enabled true\space
+ n \scratchcounter
+ m \numexpr\initialparameter\c!m\relax
+ method {\initialparameter\c!method}%
+ distance \scratchdistance
+ hoffset \scratchhoffset
+ voffset \scratchvoffset
+ ma \attribute\colormodelattribute
+ ca \attribute\colorattribute
+ ta \attribute\transparencyattribute
+ font \fontid\font
+ dynamic \attribute\zerocount % it's a bit over the top to support this here
+ \relax
\stopluacode
\kern\zeropoint % we need a node
+ \p_text
\endgroup
\globallet\typo_initial_handle\relax}
\let\typo_initial_handle\relax
+% \setupbodyfont[dejavu,9pt]
+%
+% \startbuffer
+% \setinitial[two] D. E. Knuth \ignorespaces\input knuth \par
+% \setinitial[two] Knuth \ignorespaces\input knuth \par
+% \setinitial[two] \quotation{D. E. Knuth} \ignorespaces\input knuth \par
+% \setinitial[two] \quotation {Knuth} \ignorespaces\input knuth \par
+% \setinitial[two] [text={D.E. Knuth}] \ignorespaces\input knuth \par
+% \setinitial[two] [m=4] D. E. Knuth \ignorespaces\input knuth \par
+% \stopbuffer
+%
+% \type{m=2} \start \defineinitial[two][m=2,method=none] \getbuffer \page \stop
+% \type{m=1,method=auto} \start \defineinitial[two][m=1,method=auto] \getbuffer \page \stop
+% \type{m=1,method={auto,first,last}} \start \defineinitial[two][m=1,method={first,auto,last}] \getbuffer \page \stop
+
\protect \endinput
diff --git a/tex/context/base/typo-dua.lua b/tex/context/base/typo-dua.lua
index ec85a3d9f..1e9b325a6 100644
--- a/tex/context/base/typo-dua.lua
+++ b/tex/context/base/typo-dua.lua
@@ -66,11 +66,25 @@ local formatters = string.formatters
local directiondata = characters.directions
local mirrordata = characters.mirrors
-local remove_node = nodes.remove
-local insert_node_after = nodes.insert_after
-local insert_node_before = nodes.insert_before
-
-local nodepool = nodes.pool
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+local nutstring = nuts.tostring
+
+local getnext = nuts.getnext
+local getchar = nuts.getchar
+local getid = nuts.getid
+local getsubtype = nuts.getsubtype
+local getlist = nuts.getlist
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+
+local remove_node = nuts.remove
+local copy_node = nuts.copy
+local insert_node_after = nuts.insert_after
+local insert_node_before = nuts.insert_before
+
+local nodepool = nuts.pool
local new_textdir = nodepool.textdir
local nodecodes = nodes.nodecodes
@@ -93,7 +107,7 @@ local maximum_stack = 60 -- probably spec but not needed
local directions = typesetters.directions
local setcolor = directions.setcolor
-local a_directions = attributes.private('directions')
+----- a_directions = attributes.private('directions')
local remove_controls = true directives.register("typesetters.directions.one.removecontrols",function(v) remove_controls = v end)
@@ -189,17 +203,17 @@ local function build_list(head) -- todo: store node pointer ... saves loop
local size = 0
while current do
size = size + 1
- local id = current.id
+ local id = getid(current)
if id == glyph_code then
- local chr = current.char
+ local chr = getchar(current)
local dir = directiondata[chr]
list[size] = { char = chr, direction = dir, original = dir, level = 0 }
- current = current.next
+ current = getnext(current)
elseif id == glue_code then
list[size] = { char = 0x0020, direction = "ws", original = "ws", level = 0 }
- current = current.next
- elseif id == whatsit_code and current.subtype == dir_code then
- local dir = current.dir
+ current = getnext(current)
+ elseif id == whatsit_code and getsubtype(current) == dir_code then
+ local dir = getfield(current,"dir")
if dir == "+TLT" then
list[size] = { char = 0x202A, direction = "lre", original = "lre", level = 0 }
elseif dir == "+TRT" then
@@ -209,27 +223,27 @@ local function build_list(head) -- todo: store node pointer ... saves loop
else
list[size] = { char = 0xFFFC, direction = "on", original = "on", level = 0, id = id } -- object replacement character
end
- current = current.next
+ current = getnext(current)
elseif id == math_code then
local skip = 0
- current = current.next
- while current.id ~= math_code do
+ current = getnext(current)
+ while getid(current) ~= math_code do
skip = skip + 1
- current = current.next
+ current = getnext(current)
end
- skip = skip + 1
- current = current.next
+ skip = skip + 1
+ current = getnext(current)
list[size] = { char = 0xFFFC, direction = "on", original = "on", level = 0, skip = skip, id = id }
else
local skip = 0
local last = id
- current = current.next
+ current = getnext(current)
while n do
- local id = current.id
- if id ~= glyph_code and id ~= glue_code and not (id == whatsit_code and current.subtype == dir_code) then
+ local id = getid(current)
+ if id ~= glyph_code and id ~= glue_code and not (id == whatsit_code and getsubtype(current) == dir_code) then
skip = skip + 1
last = id
- current = current.next
+ current = getnext(current)
else
break
end
@@ -289,8 +303,8 @@ local function find_run_limit_b_s_ws_on(list,start,limit)
end
local function get_baselevel(head,list,size) -- todo: skip if first is object (or pass head and test for local_par)
- if head.id == whatsit_code and head.subtype == localpar_code then
- if head.dir == "TRT" then
+ if getid(head) == whatsit_code and getsubtype(head) == localpar_code then
+ if getfield(head,"dir") == "TRT" then
return 1, "TRT", true
else
return 0, "TLT", true
@@ -677,58 +691,66 @@ local function apply_to_list(list,size,head,pardir)
report_directions("fatal error, size mismatch")
break
end
- local id = current.id
+ local id = getid(current)
local entry = list[index]
local begindir = entry.begindir
local enddir = entry.enddir
if id == glyph_code then
local mirror = entry.mirror
if mirror then
- current.char = mirror
+ setfield(current,"char",mirror)
end
if trace_directions then
local direction = entry.direction
setcolor(current,direction,direction ~= entry.original,mirror)
end
elseif id == hlist_code or id == vlist_code then
- current.dir = pardir -- is this really needed?
+ setfield(current,"dir",pardir) -- is this really needed?
elseif id == glue_code then
- if enddir and current.subtype == parfillskip_code then
+ if enddir and getsubtype(current) == parfillskip_code then
-- insert the last enddir before \parfillskip glue
- head = insert_node_before(head,current,new_textdir(enddir))
+ local d = new_textdir(enddir)
+ -- setfield(d,"attr",getfield(current,"attr"))
+ head = insert_node_before(head,current,d)
enddir = false
done = true
end
elseif id == whatsit_code then
- if begindir and current.subtype == localpar_code then
+ if begindir and getsubtype(current) == localpar_code then
-- local_par should always be the 1st node
- head, current = insert_node_after(head,current,new_textdir(begindir))
+ local d = new_textdir(begindir)
+ -- setfield(d,"attr",getfield(current,"attr"))
+ head, current = insert_node_after(head,current,d)
begindir = nil
done = true
end
end
if begindir then
- head = insert_node_before(head,current,new_textdir(begindir))
+ local d = new_textdir(begindir)
+ -- setfield(d,"attr",getfield(current,"attr"))
+ head = insert_node_before(head,current,d)
done = true
end
local skip = entry.skip
if skip and skip > 0 then
for i=1,skip do
- current = current.next
+ current = getnext(current)
end
end
if enddir then
- head, current = insert_node_after(head,current,new_textdir(enddir))
+ local d = new_textdir(enddir)
+ -- setfield(d,"attr",getfield(current,"attr"))
+ head, current = insert_node_after(head,current,d)
done = true
end
if not entry.remove then
- current = current.next
+ current = getnext(current)
elseif remove_controls then
-- X9
head, current = remove_node(head,current,true)
done = true
else
- current = current.next
+ current = getnext(current)
end
index = index + 1
end
@@ -736,6 +758,7 @@ local function apply_to_list(list,size,head,pardir)
end
local function process(head)
+ head = tonut(head)
local list, size = build_list(head)
local baselevel, pardir, dirfound = get_baselevel(head,list,size) -- we always have an inline dir node in context
if not dirfound and trace_details then
@@ -752,7 +775,7 @@ local function process(head)
report_directions("result : %s",show_done(list,size))
end
head, done = apply_to_list(list,size,head,pardir)
- return head, done
+ return tonode(head), done
end
directions.installhandler(interfaces.variables.one,process)
diff --git a/tex/context/base/typo-dub.lua b/tex/context/base/typo-dub.lua
index 3ecfce364..a1c9de752 100644
--- a/tex/context/base/typo-dub.lua
+++ b/tex/context/base/typo-dub.lua
@@ -54,11 +54,26 @@ local directiondata = characters.directions
local mirrordata = characters.mirrors
local textclassdata = characters.textclasses
-local remove_node = nodes.remove
-local insert_node_after = nodes.insert_after
-local insert_node_before = nodes.insert_before
-
-local nodepool = nodes.pool
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+local nutstring = nuts.tostring
+
+local getnext = nuts.getnext
+local getchar = nuts.getchar
+local getid = nuts.getid
+local getsubtype = nuts.getsubtype
+local getlist = nuts.getlist
+local getattr = nuts.getattr
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+
+local remove_node = nuts.remove
+local copy_node = nuts.copy
+local insert_node_after = nuts.insert_after
+local insert_node_before = nuts.insert_before
+
+local nodepool = nuts.pool
local new_textdir = nodepool.textdir
local nodecodes = nodes.nodecodes
@@ -83,11 +98,11 @@ local getfences = directions.getfences
local a_directions = attributes.private('directions')
local a_textbidi = attributes.private('textbidi')
-local a_state = attributes.private('state')
+----- a_state = attributes.private('state')
-local s_isol = fonts.analyzers.states.isol
+----- s_isol = fonts.analyzers.states.isol
--- current[a_state] = s_isol -- maybe better have a special bidi attr value -> override (9) -> todo
+----- current[a_state] = s_isol -- maybe better have a special bidi attr value -> override (9) -> todo
local remove_controls = true directives.register("typesetters.directions.removecontrols",function(v) remove_controls = v end)
----- analyze_fences = true directives.register("typesetters.directions.analyzefences", function(v) analyze_fences = v end)
@@ -131,7 +146,7 @@ local report_directions = logs.reporter("typesetting","directions two")
--
-- l : left to right
-- r : right to left
--- al : right to legt arabic (esp punctuation issues)
+-- al : right to left arabic (esp punctuation issues)
-- explicit: (new)
--
@@ -242,17 +257,17 @@ local function build_list(head) -- todo: store node pointer ... saves loop
local size = 0
while current do
size = size + 1
- local id = current.id
+ local id = getid(current)
if id == glyph_code then
- local chr = current.char
+ local chr = getchar(current)
local dir = directiondata[chr]
list[size] = { char = chr, direction = dir, original = dir, level = 0 }
- current = current.next
+ current = getnext(current)
elseif id == glue_code then
list[size] = { char = 0x0020, direction = "ws", original = "ws", level = 0 }
- current = current.next
- elseif id == whatsit_code and current.subtype == dir_code then
- local dir = current.dir
+ current = getnext(current)
+ elseif id == whatsit_code and getsubtype(current) == dir_code then
+ local dir = getfield(current,"dir")
if dir == "+TLT" then
list[size] = { char = 0x202A, direction = "lre", original = "lre", level = 0 }
elseif dir == "+TRT" then
@@ -262,27 +277,27 @@ local function build_list(head) -- todo: store node pointer ... saves loop
else
list[size] = { char = 0xFFFC, direction = "on", original = "on", level = 0, id = id } -- object replacement character
end
- current = current.next
+ current = getnext(current)
elseif id == math_code then
local skip = 0
- current = current.next
- while current.id ~= math_code do
+ current = getnext(current)
+ while getid(current) ~= math_code do
skip = skip + 1
- current = current.next
+ current = getnext(current)
end
skip = skip + 1
- current = current.next
+ current = getnext(current)
list[size] = { char = 0xFFFC, direction = "on", original = "on", level = 0, skip = skip, id = id }
else
local skip = 0
local last = id
- current = current.next
+ current = getnext(current)
while n do
- local id = current.id
- if id ~= glyph_code and id ~= glue_code and not (id == whatsit_code and current.subtype == dir_code) then
+ local id = getid(current)
+ if id ~= glyph_code and id ~= glue_code and not (id == whatsit_code and getsubtype(current) == dir_code) then
skip = skip + 1
last = id
- current = current.next
+ current = getnext(current)
else
break
end
@@ -311,7 +326,7 @@ end
-- ש ( ל ( א ) כ ) 2-8,4-6
-- ש ( ל [ א ] כ ) 2-8,4-6
-function resolve_fences(list,size,start,limit)
+local function resolve_fences(list,size,start,limit)
-- N0: funny effects, not always better, so it's an options
local stack = { }
local top = 0
@@ -365,8 +380,8 @@ end
-- the action
local function get_baselevel(head,list,size) -- todo: skip if first is object (or pass head and test for local_par)
- if head.id == whatsit_code and head.subtype == localpar_code then
- if head.dir == "TRT" then
+ if getid(head) == whatsit_code and getsubtype(head) == localpar_code then
+ if getfield(head,"dir") == "TRT" then
return 1, "TRT", true
else
return 0, "TLT", true
@@ -785,58 +800,66 @@ local function apply_to_list(list,size,head,pardir)
report_directions("fatal error, size mismatch")
break
end
- local id = current.id
+ local id = getid(current)
local entry = list[index]
local begindir = entry.begindir
local enddir = entry.enddir
if id == glyph_code then
local mirror = entry.mirror
if mirror then
- current.char = mirror
+ setfield(current,"char",mirror)
end
if trace_directions then
local direction = entry.direction
setcolor(current,direction,direction ~= entry.original,mirror)
end
elseif id == hlist_code or id == vlist_code then
- current.dir = pardir -- is this really needed?
+ setfield(current,"dir",pardir) -- is this really needed?
elseif id == glue_code then
- if enddir and current.subtype == parfillskip_code then
+ if enddir and getsubtype(current) == parfillskip_code then
-- insert the last enddir before \parfillskip glue
- head = insert_node_before(head,current,new_textdir(enddir))
+ local d = new_textdir(enddir)
+ -- setfield(d,"attr",getfield(current,"attr"))
+ head = insert_node_before(head,current,d)
enddir = false
done = true
end
elseif id == whatsit_code then
- if begindir and current.subtype == localpar_code then
+ if begindir and getsubtype(current) == localpar_code then
-- local_par should always be the 1st node
- head, current = insert_node_after(head,current,new_textdir(begindir))
+ local d = new_textdir(begindir)
+ -- setfield(d,"attr",getfield(current,"attr"))
+ head, current = insert_node_after(head,current,d)
begindir = nil
done = true
end
end
if begindir then
- head = insert_node_before(head,current,new_textdir(begindir))
+ local d = new_textdir(begindir)
+ -- setfield(d,"attr",getfield(current,"attr"))
+ head = insert_node_before(head,current,d)
done = true
end
local skip = entry.skip
if skip and skip > 0 then
for i=1,skip do
- current = current.next
+ current = getnext(current)
end
end
if enddir then
- head, current = insert_node_after(head,current,new_textdir(enddir))
+ local d = new_textdir(enddir)
+ -- setfield(d,"attr",getfield(current,"attr"))
+ head, current = insert_node_after(head,current,d)
done = true
end
if not entry.remove then
- current = current.next
+ current = getnext(current)
elseif remove_controls then
-- X9
head, current = remove_node(head,current,true)
done = true
else
- current = current.next
+ current = getnext(current)
end
index = index + 1
end
@@ -844,8 +867,9 @@ local function apply_to_list(list,size,head,pardir)
end
local function process(head)
+ head = tonut(head)
-- for the moment a whole paragraph property
- local attr = head[a_directions]
+ local attr = getattr(head,a_directions)
local analyze_fences = getfences(attr)
--
local list, size = build_list(head)
@@ -864,7 +888,7 @@ local function process(head)
report_directions("result : %s",show_done(list,size))
end
head, done = apply_to_list(list,size,head,pardir)
- return head, done
+ return tonode(head), done
end
directions.installhandler(interfaces.variables.two,process)
diff --git a/tex/context/base/typo-fln.lua b/tex/context/base/typo-fln.lua
index 4c97af450..2076a7464 100644
--- a/tex/context/base/typo-fln.lua
+++ b/tex/context/base/typo-fln.lua
@@ -23,25 +23,40 @@ local firstlines = typesetters.firstlines
local nodes = nodes
local tasks = nodes.tasks
-local getbox = nodes.getbox
+local context = context
+local implement = interfaces.implement
+
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getnext = nuts.getnext
+local getid = nuts.getid
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getlist = nuts.getlist
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getbox = nuts.getbox
+
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local disc_code = nodecodes.disc
local kern_code = nodecodes.kern
-local traverse_id = nodes.traverse_id
-local free_node_list = nodes.flush_list
-local free_node = nodes.flush_node
-local copy_node_list = nodes.copy_list
-local insert_node_after = nodes.insert_after
-local insert_node_before = nodes.insert_before
-local hpack_node_list = nodes.hpack
-local remove_node = nodes.remove
+local traverse_id = nuts.traverse_id
+local free_node_list = nuts.flush_list
+local free_node = nuts.flush_node
+local copy_node_list = nuts.copy_list
+local insert_node_after = nuts.insert_after
+local insert_node_before = nuts.insert_before
+local hpack_node_list = nuts.hpack
+local remove_node = nuts.remove
-local nodepool = nodes.pool
+local nodepool = nuts.pool
local newpenalty = nodepool.penalty
local newkern = nodepool.kern
-local tracerrule = nodes.tracers.pool.nodes.rule
+local tracerrule = nodes.tracers.pool.nuts.rule
local actions = { }
firstlines.actions = actions
@@ -73,7 +88,21 @@ function firstlines.set(specification)
texsetattribute(a_firstline,1)
end
-commands.setfirstline = firstlines.set
+implement {
+ name = "setfirstline",
+ actions = firstlines.set,
+ arguments = {
+ {
+ { "alternative" },
+ { "font", "integer" },
+ { "dynamic", "integer" },
+ { "ma", "integer" },
+ { "ca", "integer" },
+ { "ta", "integer" },
+ { "n", "integer" },
+ }
+ }
+}
actions[v_line] = function(head,setting)
-- local attribute = fonts.specifiers.contextnumber(setting.feature) -- was experimental
@@ -92,9 +121,9 @@ actions[v_line] = function(head,setting)
local linebreaks = { }
for g in traverse_id(glyph_code,temp) do
if dynamic > 0 then
- g[0] = dynamic
+ setattr(g,0,dynamic)
end
- g.font = font
+ setfield(g,"font",font)
end
local start = temp
local list = temp
@@ -108,7 +137,7 @@ actions[v_line] = function(head,setting)
hsize = hsize - hangindent
end
while start do
- local id = start.id
+ local id = getid(start)
if id == glyph_code then
n = n + 1
elseif id == disc_code then
@@ -117,7 +146,7 @@ actions[v_line] = function(head,setting)
-- this could be an option
elseif n > 0 then
local pack = hpack_node_list(copy_node_list(list,start))
- if pack.width > hsize then
+ if getfield(pack,"width") > hsize then
free_node_list(pack)
list = prev
break
@@ -128,7 +157,7 @@ actions[v_line] = function(head,setting)
nofchars = n
end
end
- start = start.next
+ start = getnext(start)
end
if not linebreaks[i] then
linebreaks[i] = n
@@ -139,18 +168,18 @@ actions[v_line] = function(head,setting)
for i=1,noflines do
local linebreak = linebreaks[i]
while start and n < nofchars do
- local id = start.id
+ local id = getid(start)
if id == glyph_code then -- or id == disc_code then
if dynamic > 0 then
- start[0] = dynamic
+ setattr(start,0,dynamic)
end
- start.font = font
+ setfield(start,"font",font)
if ca and ca > 0 then
- start[a_colorspace] = ma == 0 and 1 or ma
- start[a_color] = ca
+ setattr(start,a_colorspace,ma == 0 and 1 or ma)
+ setattr(start,a_color,ca)
end
if ta and ta > 0 then
- start[a_transparency] = ta
+ setattr(start,a_transparency,ta)
end
n = n + 1
end
@@ -163,7 +192,7 @@ actions[v_line] = function(head,setting)
head, start = insert_node_after(head,start,newpenalty(-10000)) -- break
break
end
- start = start.next
+ start = getnext(start)
end
end
free_node_list(temp)
@@ -182,7 +211,7 @@ actions[v_word] = function(head,setting)
local ca = setting.ca
local ta = setting.ta
while start do
- local id = start.id
+ local id = getid(start)
-- todo: delete disc nodes
if id == glyph_code then
if not ok then
@@ -190,16 +219,16 @@ actions[v_word] = function(head,setting)
ok = true
end
if ca and ca > 0 then
- start[a_colorspace] = ma == 0 and 1 or ma
- start[a_color] = ca
+ setattr(start,a_colorspace,ma == 0 and 1 or ma)
+ setattr(start,a_color,ca)
end
if ta and ta > 0 then
- start[a_transparency] = ta
+ setattr(start,a_transparency,ta)
end
if dynamic > 0 then
- start[0] = dynamic
+ setattr(start,0,dynamic)
end
- start.font = font
+ setfield(start,"font",font)
elseif id == disc_code then
-- continue
elseif id == kern_code then -- todo: fontkern
@@ -210,7 +239,7 @@ actions[v_word] = function(head,setting)
break
end
end
- start = start.next
+ start = getnext(start)
end
return head, true
end
@@ -218,16 +247,17 @@ end
actions[v_default] = actions[v_line]
function firstlines.handler(head)
+ head = tonut(head)
local start = head
local attr = nil
while start do
- attr = start[a_firstline]
+ attr = getattr(start,a_firstline)
if attr then
break
- elseif start.id == glyph then
+ elseif getid(start) == glyph_code then
break
else
- start = start.next
+ start = getnext(start)
end
end
if attr then
@@ -240,17 +270,18 @@ function firstlines.handler(head)
if trace_firstlines then
report_firstlines("processing firstlines, alternative %a",alternative)
end
- return action(head,settings)
+ local head, done = action(head,settings)
+ return tonode(head), done
end
end
- return head, false
+ return tonode(head), false
end
-- goodie
-function commands.applytofirstcharacter(box,what)
+local function applytofirstcharacter(box,what)
local tbox = getbox(box) -- assumes hlist
- local list = tbox.list
+ local list = getlist(tbox)
local done = nil
for n in traverse_id(glyph_code,list) do
list = remove_node(list,n)
@@ -258,10 +289,10 @@ function commands.applytofirstcharacter(box,what)
break
end
if done then
- tbox.list = list
+ setfield(tbox,"list",list)
local kind = type(what)
if kind == "string" then
- context[what](done)
+ context[what](tonode(done))
elseif kind == "function" then
what(done)
else
@@ -269,3 +300,9 @@ function commands.applytofirstcharacter(box,what)
end
end
end
+
+implement {
+ name = "applytofirstcharacter",
+ actions = applytofirstcharacter,
+ arguments = { "integer", "string" }
+}
diff --git a/tex/context/base/typo-fln.mkiv b/tex/context/base/typo-fln.mkiv
index d8651b459..38a53bfa9 100644
--- a/tex/context/base/typo-fln.mkiv
+++ b/tex/context/base/typo-fln.mkiv
@@ -79,15 +79,15 @@
\begingroup
\edef\currentfirstline{#1}%
\usefirstlinestyleandcolor\c!style\c!color
- \ctxlua{commands.setfirstline {
- alternative = "\firstlineparameter\c!alternative",
- ma = \the\attribute\colormodelattribute,
- ca = \the\attribute\colorattribute,
- ta = \the\attribute\transparencyattribute,
- n = \number\firstlineparameter\c!n,
- font = \fontid\font,
- dynamic = \number\attribute\zerocount,
- }}%
+ \clf_setfirstline
+ alternative {\firstlineparameter\c!alternative}%
+ ma \attribute\colormodelattribute
+ ca \attribute\colorattribute
+ ta \attribute\transparencyattribute
+ n \numexpr\firstlineparameter\c!n\relax
+ font \fontid\font
+ dynamic \attribute\zerocount
+ \relax
\kern\zeropoint % we need a node
% \hskip\zeropoint\s!plus\emwidth\relax % can be an option
\endgroup
@@ -104,7 +104,7 @@
\unexpanded\def\applytofirstcharacter#1%
{\begingroup
\dowithnextbox
- {\ctxcommand{applytofirstcharacter(\number\nextbox,"\strippedcsname#1")}%
+ {\clf_applytofirstcharacter\nextbox{\strippedcsname#1}%
\unhbox\nextbox
\endgroup}%
\hbox}
diff --git a/tex/context/base/typo-inj.lua b/tex/context/base/typo-inj.lua
new file mode 100644
index 000000000..b5d9e1c51
--- /dev/null
+++ b/tex/context/base/typo-inj.lua
@@ -0,0 +1,94 @@
+if not modules then modules = { } end modules ['typo-inj'] = { -- was node-par
+ version = 1.001,
+ comment = "companion to typo-inj.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local tonumber = tonumber
+
+local context = context
+local implement = interfaces.implement
+
+local injectors = { }
+typesetters.injectors = injectors
+local list = { }
+injectors.list = list
+local showall = false
+
+local settings_to_array = utilities.parsers.settings_to_array
+
+local ctx_domarkinjector = context.domarkinjector
+local ctx_doactivateinjector = context.doactivateinjector
+
+table.setmetatableindex(list,function(t,k)
+ local v = {
+ counter = 0,
+ actions = { },
+ show = false,
+ active = false,
+ }
+ t[k] = v
+ return v
+end)
+
+function injectors.reset(name)
+ list[name] = nil
+end
+
+function injectors.set(name,numbers,command)
+ local injector = list[name]
+ local actions = injector.actions
+ local places = settings_to_array(numbers)
+ for i=1,#places do
+ actions[tonumber(places[i])] = command
+ end
+ if not injector.active then
+ ctx_doactivateinjector(name)
+ injector.active = true
+ end
+end
+
+function injectors.show(name)
+ if not name or name == "" then
+ showall = true
+ else
+ local list = settings_to_array(name)
+ for i=1,#list do
+ list[list[i]].show = true
+ end
+ end
+end
+
+function injectors.mark(name,show)
+ local injector = list[name]
+ local n = injector.counter + 1
+ injector.counter = n
+ if showall or injector.show then
+ ctx_domarkinjector(injector.actions[n] and 1 or 0,n)
+ end
+end
+
+function injectors.check(name,n) -- we could also accent n = number : +/- 2
+ local injector = list[name]
+ if n == false then
+ n = injector.counter
+ elseif n == nil then
+ n = injector.counter + 1 -- next (upcoming)
+ else
+ n = tonumber(n) or 0
+ end
+ local action = injector.actions[n]
+ if action then
+ context(action)
+ end
+end
+
+implement { name = "resetinjector", actions = injectors.reset, arguments = "string" }
+implement { name = "showinjector", actions = injectors.show, arguments = "string" }
+implement { name = "setinjector", actions = injectors.set, arguments = { "string", "string", "string" } }
+implement { name = "markinjector", actions = injectors.mark, arguments = "string" }
+implement { name = "checkinjector", actions = injectors.check, arguments = "string" }
+implement { name = "checkpreviousinjector", actions = injectors.check, arguments = { "string", true } }
+implement { name = "checknextinjector", actions = injectors.check }
diff --git a/tex/context/base/typo-inj.mkiv b/tex/context/base/typo-inj.mkiv
new file mode 100644
index 000000000..46cd9fe45
--- /dev/null
+++ b/tex/context/base/typo-inj.mkiv
@@ -0,0 +1,77 @@
+%D \module
+%D [ file=typo-inj,
+%D version=2014.10.13,
+%D title=\CONTEXT\ Typesetting Macros,
+%D subtitle=Triggering Actions,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Typesetting Macros / Triggering Actions}
+
+%D This is a sort of escape from too automatic typesetting of lists. I have
+%D been thinking fo a more generic injector for instance based on tags (as we
+%D already index each element) but thi sis sort of fuzzy because the number of a
+%D tag is not always incremented before we check for it. Also, registers and lists
+%D are among the few candidates that cannot be controlled directly by putting
+%D something in the input. So,m for the moment I stick to this mechanism but
+%D future versions of \CONTEXT\ might do it differently. Compatibility is not much
+%D of an issue here as this mechanism is only to be used in final production runs.
+
+\unprotect
+
+\registerctxluafile{typo-inj}{1.001}
+
+% todo: no need in trialmode
+
+%D \showinjector
+%D
+%D \setinjector[register][3][\column]
+%D \setinjector[list] [2][{\blank[3*big]}]
+%D
+%D \starttext
+%D \placelist[section][criterium=text]
+%D \blank[3*big]
+%D \placeregister[index][criterium=text]
+%D \page
+%D \startsection[title=Alpha] first \index{first} \stopsection
+%D \startsection[title=Beta] second \index{second} \stopsection
+%D \startsection[title=Gamma] third \index{third} \stopsection
+%D \startsection[title=Delta] fourth \index{fourth} \stopsection
+%D \stoptext
+
+\unexpanded\def\resetinjector [#1]{\clf_resetinjector{#1}}
+\unexpanded\def\markinjector [#1]{\dontleavehmode\clf_markinjector{#1}}
+\unexpanded\def\checkinjector [#1]{\clf_checkinjector{#1}}
+\unexpanded\def\checknextinjector {\clf_checknextinjector}
+\unexpanded\def\checkpreviousinjector {\clf_checkpreviousinjector}
+\unexpanded\def\dosetinjector [#1][#2][#3]{\clf_setinjector{#1}{#2}{#3}}
+\unexpanded\def\doshowinjector [#1]{\clf_showinjector{#1}}
+
+\unexpanded\def\setinjector {\dotripleargument\dosetinjector}
+\unexpanded\def\showinjector{\dosingleempty\doshowinjector}
+
+\unexpanded\def\domarkinjector#1#2% called at the lua end
+ {\dontleavehmode\llap{\infofont\ifcase#1\else\red\fi<#2>\quad}}
+
+% low level definers .. we could have \injectors_mark and \injectors_check and then
+% use \v!list instead of \s!list
+
+\unexpanded\def\doinstallinjector#1%
+ {\letvalue{typo_injectors_mark_#1}\donothing
+ \letvalue{typo_injectors_check_#1}\donothing}
+
+\unexpanded\def\doactivateinjector#1% used at lua end
+ {\setuxvalue{typo_injectors_mark_#1}{\dontleavehmode\noexpand\clf_markinjector{#1}}%
+ \setuxvalue{typo_injectors_check_#1}{\noexpand\clf_checkinjector{#1}}}
+
+\unexpanded\def\dotestinjector#1% only for testing outside unprotect
+ {\csname typo_injectors_check_#1\endcsname
+ \csname typo_injectors_mark_#1\endcsname}
+
+\protect \endinput
+
diff --git a/tex/context/base/typo-itc.lua b/tex/context/base/typo-itc.lua
index 452b623c8..d3a31fad1 100644
--- a/tex/context/base/typo-itc.lua
+++ b/tex/context/base/typo-itc.lua
@@ -9,9 +9,10 @@ if not modules then modules = { } end modules ['typo-itc'] = {
local utfchar = utf.char
local trace_italics = false trackers.register("typesetters.italics", function(v) trace_italics = v end)
-
local report_italics = logs.reporter("nodes","italics")
+local threshold = 0.5 trackers.register("typesetters.threshold", function(v) threshold = v == true and 0.5 or tonumber(v) end)
+
typesetters.italics = typesetters.italics or { }
local italics = typesetters.italics
@@ -24,21 +25,41 @@ local math_code = nodecodes.math
local tasks = nodes.tasks
-local insert_node_after = node.insert_after
-local delete_node = nodes.delete
-local end_of_math = node.end_of_math
+local nuts = nodes.nuts
+local nodepool = nuts.pool
+
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local getprev = nuts.getprev
+local getnext = nuts.getnext
+local getid = nuts.getid
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local setfield = nuts.setfield
+
+local insert_node_after = nuts.insert_after
+local delete_node = nuts.delete
+local end_of_math = nuts.end_of_math
+local find_tail = nuts.tail
local texgetattribute = tex.getattribute
local texsetattribute = tex.setattribute
local a_italics = attributes.private("italics")
local unsetvalue = attributes.unsetvalue
-local new_correction_kern = nodes.pool.fontkern
-local new_correction_glue = nodes.pool.glue
+local new_correction_kern = nodepool.fontkern
+local new_correction_glue = nodepool.glue
local fonthashes = fonts.hashes
local fontdata = fonthashes.identifiers
local italicsdata = fonthashes.italics
+local exheights = fonthashes.exheights
+
+local implement = interfaces.implement
local forcedvariant = false
@@ -82,105 +103,369 @@ end
-- todo: clear attribute
+local function okay(data,current,font,prevchar,previtalic,char,what)
+ if not data then
+ if trace_italics then
+ report_italics("ignoring %p between %s italic %C and italic %C",previtalic,what,prevchar,char)
+ end
+ return false
+ end
+ if threshold then
+ local ht = getfield(current,"height")
+ local ex = exheights[font]
+ local th = threshold * ex
+ if ht <= th then
+ if trace_italics then
+ report_italics("ignoring correction between %s italic %C and regular %C, height %p less than threshold %p",prevchar,what,char,ht,th)
+ end
+ return false
+ end
+ end
+ if trace_italics then
+ report_italics("inserting %p between %s italic %C and regular %C",previtalic,what,prevchar,char)
+ end
+ return true
+end
+
+-- maybe: with_attributes(current,n) :
+--
+-- local function correction_kern(kern,n)
+-- return with_attributes(new_correction_kern(kern),n)
+-- end
+
+local function correction_kern(kern,n)
+ local k = new_correction_kern(kern)
+ if n then
+ local a = getfield(n,"attr")
+ if a then -- maybe not
+ setfield(k,"attr",a) -- can be a marked content (border case)
+ end
+ end
+ return k
+end
+
+local function correction_glue(glue,n)
+ local g = new_correction_glue(glue)
+ if n then
+ local a = getfield(n,"attr")
+ if a then -- maybe not
+ setfield(g,"attr",a) -- can be a marked content (border case)
+ end
+ end
+ return g
+end
+
function italics.handler(head)
- local done = false
- local italic = 0
- local lastfont = nil
- local lastattr = nil
- local previous = nil
- local prevchar = nil
- local current = head
- local inserted = nil
+
+ local prev = nil
+ local prevchar = nil
+ local prevhead = tonut(head)
+ local previtalic = 0
+ local previnserted = nil
+
+ local replace = nil
+ local replacechar = nil
+ local replacehead = nil
+ local replaceitalic = 0
+ local replaceinserted = nil
+
+ local post = nil
+ local postchar = nil
+ local posthead = nil
+ local postitalic = 0
+ local postinserted = nil
+
+ local current = prevhead
+ local done = false
+ local lastfont = nil
+ local lastattr = nil
+
while current do
- local id = current.id
+ local id = getid(current)
if id == glyph_code then
- local font = current.font
- local char = current.char
+ local font = getfont(current)
+ local char = getchar(current)
local data = italicsdata[font]
if font ~= lastfont then
- if italic ~= 0 then
- if data then
- if trace_italics then
- report_italics("ignoring %p between italic %C and italic %C",italic,prevchar,char)
- end
- else
- if trace_italics then
- report_italics("inserting %p between italic %C and regular %C",italic,prevchar,char)
- end
- insert_node_after(head,previous,new_correction_kern(italic))
+ if previtalic ~= 0 then
+ if okay(data,current,font,prevchar,previtalic,char,"glyph") then
+ insert_node_after(prevhead,prev,correction_kern(previtalic,current))
done = true
end
- elseif inserted and data then
+ elseif previnserted and data then
if trace_italics then
- report_italics("deleting last correction before %C",char)
+ report_italics("deleting last correction before %s %C",char,"glyph")
end
- delete_node(head,inserted)
+ delete_node(prevhead,previnserted)
else
- -- nothing
+ --
+ if replaceitalic ~= 0 then
+ if okay(data,replace,font,replacechar,replaceitalic,char,"replace") then
+ insert_node_after(replacehead,replace,correction_kern(replaceitalic,current))
+ done = true
+ end
+ replaceitalic = 0
+ elseif replaceinserted and data then
+ if trace_italics then
+ report_italics("deleting last correction before %s %C","replace",char)
+ end
+ delete_node(replacehead,replaceinserted)
+ end
+ --
+ if postitalic ~= 0 then
+ if okay(data,post,font,postchar,postitalic,char,"post") then
+ insert_node_after(posthead,post,correction_kern(postitalic,current))
+ done = true
+ end
+ postitalic = 0
+ elseif postinserted and data then
+ if trace_italics then
+ report_italics("deleting last correction before %s %C","post",char)
+ end
+ delete_node(posthead,postinserted)
+ end
end
+ --
lastfont = font
end
if data then
- local attr = forcedvariant or current[a_italics]
+ local attr = forcedvariant or getattr(current,a_italics)
if attr and attr > 0 then
local cd = data[char]
if not cd then
-- this really can happen
- italic = 0
+ previtalic = 0
else
- italic = cd.italic or cd.italic_correction
- if not italic then
- italic = setitalicinfont(font,char) -- calculated once
- -- italic = 0
+ previtalic = cd.italic or cd.italic_correction
+ if not previtalic then
+ previtalic = setitalicinfont(font,char) -- calculated once
+ -- previtalic = 0
end
- if italic ~= 0 then
+ if previtalic ~= 0 then
lastfont = font
lastattr = attr
- previous = current
+ prev = current
+ -- prevhead = head
prevchar = char
end
end
else
- italic = 0
+ previtalic = 0
end
else
- italic = 0
+ previtalic = 0
end
- inserted = nil
+ previnserted = nil
+ replaceinserted = nil
+ postinserted = nil
elseif id == disc_code then
- -- skip
- elseif id == kern_code then
- inserted = nil
- italic = 0
+ previnserted = nil
+ previtalic = 0
+ replaceinserted = nil
+ replaceitalic = 0
+ postinserted = nil
+ postitalic = 0
+ replace = getfield(current,"replace")
+ if replace then
+ local current = find_tail(replace)
+ if getid(current) ~= glyph_code then
+ current = getprev(current)
+ end
+ if current and getid(current) == glyph_code then
+ local font = getfont(current)
+ local char = getchar(current)
+ local data = italicsdata[font]
+ if data then
+ local attr = forcedvariant or getattr(current,a_italics)
+ if attr and attr > 0 then
+ local cd = data[char]
+ if not cd then
+ -- this really can happen
+ replaceitalic = 0
+ else
+ replaceitalic = cd.italic or cd.italic_correction
+ if not replaceitalic then
+ replaceitalic = setitalicinfont(font,char) -- calculated once
+ -- replaceitalic = 0
+ end
+ if replaceitalic ~= 0 then
+ lastfont = font
+ lastattr = attr
+ replacechar = char
+ replacehead = replace
+ replace = current
+ end
+ end
+-- else
+-- replaceitalic = 0
+ end
+-- else
+-- replaceitalic = 0
+ end
+-- else
+-- replaceitalic = 0
+ end
+-- replaceinserted = nil
+-- else
+-- replaceitalic = 0
+-- replaceinserted = nil
+ end
+ post = getfield(current,"post")
+ if post then
+ local current = find_tail(post)
+ if getid(current) ~= glyph_code then
+ current = getprev(current)
+ end
+ if current and getid(current) == glyph_code then
+ local font = getfont(current)
+ local char = getchar(current)
+ local data = italicsdata[font]
+ if data then
+ local attr = forcedvariant or getattr(current,a_italics)
+ if attr and attr > 0 then
+ local cd = data[char]
+ if not cd then
+ -- this really can happen
+-- postitalic = 0
+ else
+ postitalic = cd.italic or cd.italic_correction
+ if not postitalic then
+ postitalic = setitalicinfont(font,char) -- calculated once
+ -- postitalic = 0
+ end
+ if postitalic ~= 0 then
+ lastfont = font
+ lastattr = attr
+ postchar = char
+ posthead = post
+ post = current
+ end
+ end
+-- else
+-- postitalic = 0
+ end
+-- else
+-- postitalic = 0
+ end
+-- else
+-- postitalic = 0
+ end
+-- postinserted = nil
+-- else
+-- postitalic = 0
+-- postinserted = nil
+ end
+ elseif id == kern_code then -- how about fontkern ?
+ previnserted = nil
+ previtalic = 0
+ replaceinserted = nil
+ replaceitalic = 0
+ postinserted = nil
+ postitalic = 0
elseif id == glue_code then
- if italic ~= 0 then
+ if previtalic ~= 0 then
if trace_italics then
- report_italics("inserting %p between italic %C and glue",italic,prevchar)
+ report_italics("inserting %p between %s italic %C and glue",previtalic,"glyph",prevchar)
+ end
+ previnserted = correction_glue(previtalic,current) -- maybe just add ? else problem with penalties
+ previtalic = 0
+ done = true
+ insert_node_after(prevhead,prev,previnserted)
+ else
+ if replaceitalic ~= 0 then
+ if trace_italics then
+ report_italics("inserting %p between %s italic %C and glue",replaceitalic,"replace",replacechar)
+ end
+ replaceinserted = correction_kern(replaceitalic,current) -- needs to be a kern
+ replaceitalic = 0
+ done = true
+ insert_node_after(replacehead,replace,replaceinserted)
+ end
+ if postitalic ~= 0 then
+ if trace_italics then
+ report_italics("inserting %p between %s italic %C and glue",postitalic,"post",postchar)
+ end
+ postinserted = correction_kern(postitalic,current) -- needs to be a kern
+ postitalic = 0
+ done = true
+ insert_node_after(posthead,post,postinserted)
end
- inserted = new_correction_glue(italic) -- maybe just add ? else problem with penalties
- insert_node_after(head,previous,inserted)
- italic = 0
- done = true
end
elseif id == math_code then
current = end_of_math(current)
- elseif italic ~= 0 then
- if trace_italics then
- report_italics("inserting %p between italic %C and whatever",italic,prevchar)
+ previnserted = nil
+ previtalic = 0
+ replaceinserted = nil
+ replaceitalic = 0
+ postinserted = nil
+ postitalic = 0
+ else
+ if previtalic ~= 0 then
+ if trace_italics then
+ report_italics("inserting %p between %s italic %C and whatever",previtalic,"glyph",prevchar)
+ end
+ insert_node_after(prevhead,prev,correction_kern(previtalic,current))
+ previnserted = nil
+ previtalic = 0
+ replaceinserted = nil
+ replaceitalic = 0
+ postinserted = nil
+ postitalic = 0
+ done = true
+ else
+ if replaceitalic ~= 0 then
+ if trace_italics then
+ report_italics("inserting %p between %s italic %C and whatever",replaceritalic,"replace",replacechar)
+ end
+ insert_node_after(replacehead,replace,correction_kern(replaceitalic,current))
+ previnserted = nil
+ previtalic = 0
+ replaceinserted = nil
+ replaceitalic = 0
+ postinserted = nil
+ postitalic = 0
+ done = true
+ end
+ if postitalic ~= 0 then
+ if trace_italics then
+ report_italics("inserting %p between %s italic %C and whatever",postitalic,"post",postchar)
+ end
+ insert_node_after(posthead,post,correction_kern(postitalic,current))
+ previnserted = nil
+ previtalic = 0
+ replaceinserted = nil
+ replaceitalic = 0
+ postinserted = nil
+ postitalic = 0
+ done = true
+ end
end
- inserted = nil
- insert_node_after(head,previous,new_correction_kern(italic))
- italic = 0
- done = true
end
- current = current.next
+ current = getnext(current)
end
- if italic ~= 0 and lastattr > 1 then -- more control is needed here
- if trace_italics then
- report_italics("inserting %p between italic %C and end of list",italic,prevchar)
+ if lastattr and lastattr > 1 then -- more control is needed here
+ if previtalic ~= 0 then
+ if trace_italics then
+ report_italics("inserting %p between %s italic %C and end of list",previtalic,"glyph",prevchar)
+ end
+ insert_node_after(prevhead,prev,correction_kern(previtalic,current))
+ done = true
+ else
+ if replaceitalic ~= 0 then
+ if trace_italics then
+ report_italics("inserting %p between %s italic %C and end of list",replaceitalic,"replace",replacechar)
+ end
+ insert_node_after(replacehead,replace,correction_kern(replaceitalic,current))
+ done = true
+ end
+ if postitalic ~= 0 then
+ if trace_italics then
+ report_italics("inserting %p between %s italic %C and end of list",postitalic,"post",postchar)
+ end
+ insert_node_after(posthead,post,correction_kern(postitalic,current))
+ done = true
+ end
end
- insert_node_after(head,previous,new_correction_kern(italic))
- done = true
end
return head, done
end
@@ -210,10 +495,21 @@ function italics.reset()
texsetattribute(a_italics,unsetvalue)
end
+implement {
+ name = "setitaliccorrection",
+ actions = italics.set,
+ arguments = "string"
+}
+
+implement {
+ name = "resetitaliccorrection",
+ actions = italics.reset,
+}
+
local variables = interfaces.variables
local settings_to_hash = utilities.parsers.settings_to_hash
-function commands.setupitaliccorrection(option) -- no grouping !
+local function setupitaliccorrection(option) -- no grouping !
if enable then
enable()
end
@@ -224,6 +520,7 @@ function commands.setupitaliccorrection(option) -- no grouping !
elseif options[variables.always] then
variant = 2
end
+ -- maybe also keywords for threshold
if options[variables.global] then
forcedvariant = variant
texsetattribute(a_italics,unsetvalue)
@@ -236,16 +533,28 @@ function commands.setupitaliccorrection(option) -- no grouping !
end
end
+implement {
+ name = "setupitaliccorrection",
+ actions = setupitaliccorrection,
+ arguments = "string"
+}
+
-- for manuals:
local stack = { }
-function commands.pushitaliccorrection()
- table.insert(stack,{forcedvariant, texgetattribute(a_italics) })
-end
+implement {
+ name = "pushitaliccorrection",
+ actions = function()
+ table.insert(stack,{forcedvariant, texgetattribute(a_italics) })
+ end
+}
-function commands.popitaliccorrection()
- local top = table.remove(stack)
- forcedvariant = top[1]
- texsetattribute(a_italics,top[2])
-end
+implement {
+ name = "popitaliccorrection",
+ actions = function()
+ local top = table.remove(stack)
+ forcedvariant = top[1]
+ texsetattribute(a_italics,top[2])
+ end
+}
diff --git a/tex/context/base/typo-itc.mkvi b/tex/context/base/typo-itc.mkvi
index 4a3bba518..c0aa8e2f6 100644
--- a/tex/context/base/typo-itc.mkvi
+++ b/tex/context/base/typo-itc.mkvi
@@ -42,10 +42,10 @@
% 2 = end of word and end of a list
\unexpanded\def\setitaliccorrection[#code]% rather low level (might go away)
- {\ctxlua{typesetters.italics.set(\number#code)}}
+ {\clf_setitaliccorrection#code\relax}
\unexpanded\def\resetitaliccorrection% rather low level (might go away)
- {\ctxlua{typesetters.italics.reset()}}
+ {\clf_resetitaliccorrection}
% global : no attributes, just always (faster and less memory)
% text : only text
@@ -53,7 +53,7 @@
% none : -
\unexpanded\def\setupitaliccorrection[#settings]%
- {\ctxcommand{setupitaliccorrection("#settings")}}
+ {\clf_setupitaliccorrection{#settings}}
\appendtoks
\attribute\italicsattribute\attributeunsetvalue
diff --git a/tex/context/base/typo-itm.mkiv b/tex/context/base/typo-itm.mkiv
index eb47e4076..0bb8170c7 100644
--- a/tex/context/base/typo-itm.mkiv
+++ b/tex/context/base/typo-itm.mkiv
@@ -109,9 +109,9 @@
\let\p_typo_items_symbol\empty
\let\m_typo_items_symbol\firstofoneargument
\else
- \doifconversiondefinedelse\p_typo_items_symbol
+ \doifelseconversiondefined\p_typo_items_symbol
{\def\m_typo_items_symbol{\convertnumber\p_typo_items_symbol}}
- {\doifsymboldefinedelse\p_typo_items_symbol
+ {\doifelsesymboldefined\p_typo_items_symbol
{\def\m_typo_items_symbol{\symbol[\p_typo_items_symbol]\gobbleoneargument}}
{\let\m_typo_items_symbol\firstofoneargument}}%
\fi\fi
diff --git a/tex/context/base/typo-krn.lua b/tex/context/base/typo-krn.lua
index 56f58bb73..46a977cfd 100644
--- a/tex/context/base/typo-krn.lua
+++ b/tex/context/base/typo-krn.lua
@@ -11,23 +11,38 @@ if not modules then modules = { } end modules ['typo-krn'] = {
local next, type, tonumber = next, type, tonumber
local utfchar = utf.char
-local nodes, node, fonts = nodes, node, fonts
+local nodes = nodes
+local fonts = fonts
-local find_node_tail = node.tail or node.slide
-local free_node = node.free
-local free_nodelist = node.flush_list
-local copy_node = node.copy
-local copy_nodelist = node.copy_list
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local end_of_math = node.end_of_math
+local tasks = nodes.tasks
+local nuts = nodes.nuts
+local nodepool = nuts.pool
+
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+-- check what is used
+
+local find_node_tail = nuts.tail
+local free_node = nuts.free
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local end_of_math = nuts.end_of_math
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
local texsetattribute = tex.setattribute
local unsetvalue = attributes.unsetvalue
-local nodepool = nodes.pool
-local tasks = nodes.tasks
-
local new_gluespec = nodepool.gluespec
local new_kern = nodepool.kern
local new_glue = nodepool.glue
@@ -36,6 +51,7 @@ local nodecodes = nodes.nodecodes
local kerncodes = nodes.kerncodes
local skipcodes = nodes.skipcodes
local disccodes = nodes.disccodes
+local listcodes = nodes.listcodes
local glyph_code = nodecodes.glyph
local kern_code = nodecodes.kern
@@ -45,7 +61,12 @@ local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local math_code = nodecodes.math
+local box_list_code = listcodes.box
+local user_list_code = listcodes.unknown
+
local discretionary_code = disccodes.discretionary
+local automatic_code = disccodes.automatic
+
local kerning_code = kerncodes.kerning
local userkern_code = kerncodes.userkern
local userskip_code = skipcodes.userskip
@@ -107,10 +128,10 @@ kerns.keeptogether = false -- just for fun (todo: control setting with key/value
-- blue : keep by goodie
function kerns.keepligature(n) -- might become default
- local f = n.font
- local a = n[0] or 0
+ local f = getfont(n)
+ local a = getattr(n,0) or 0
if trace_ligatures then
- local c = n.char
+ local c = getchar(n)
local d = fontdescriptions[f][c].name
if a > 0 and contextsetups[a].keepligatures == v_auto then
report("font %!font:name!, glyph %a, slot %X -> ligature %s, by %s feature %a",f,d,c,"kept","dynamic","keepligatures")
@@ -169,9 +190,9 @@ end
local function kern_injector(fillup,kern)
if fillup then
local g = new_glue(kern)
- local s = g.spec
- s.stretch = kern
- s.stretch_order = 1
+ local s = getfield(g,"spec")
+ setfield(s,"stretch",kern)
+ setfield(s,"stretch_order",1)
return g
else
return new_kern(kern)
@@ -181,212 +202,398 @@ end
local function spec_injector(fillup,width,stretch,shrink)
if fillup then
local s = new_gluespec(width,2*stretch,2*shrink)
- s.stretch_order = 1
+ setfield(s,"stretch_order",1)
return s
else
return new_gluespec(width,stretch,shrink)
end
end
--- needs checking ... base mode / node mode -- also use insert_before/after etc
+-- a simple list injector, no components and such .. just disable ligatures in
+-- kern mode .. maybe not even hyphenate ... anyway, the next one is for simple
+-- sublists .. beware: we can have char -1
+
+local function inject_begin(boundary,prev,keeptogether,krn,ok) -- prev is a glyph
+ local id = getid(boundary)
+ if id == kern_code then
+ if getsubtype(boundary) == kerning_code or getattr(boundary,a_fontkern) then
+ local inject = true
+ if keeptogether then
+ local next = getnext(boundary)
+ if not next or (getid(next) == glyph_code and keeptogether(prev,next)) then
+ inject = false
+ end
+ end
+ if inject then
+ -- not yet ok, as injected kerns can be overlays (from node-inj.lua)
+ setfield(boundary,"subtype",userkern_code)
+ setfield(boundary,"kern",getfield(boundary,"kern") + quaddata[getfont(prev)]*krn)
+ return boundary, true
+ end
+ end
+ elseif id == glyph_code then
+ if keeptogether and keeptogether(boundary,prev) then
+ -- keep 'm
+ else
+ local charone = getchar(prev)
+ if charone > 0 then
+ local font = getfont(boundary)
+ local chartwo = getchar(boundary)
+ local kerns = chardata[font][charone].kerns
+ local kern = new_kern((kerns and kerns[chartwo] or 0) + quaddata[font]*krn)
+ setfield(boundary,"prev",kern)
+ setfield(kern,"next",boundary)
+ return kern, true
+ end
+ end
+ end
+ return boundary, ok
+end
+
+local function inject_end(boundary,next,keeptogether,krn,ok)
+ local tail = find_node_tail(boundary)
+ local id = getid(tail)
+ if id == kern_code then
+ if getsubtype(tail) == kerning_code or getattr(tail,a_fontkern) then
+ local inject = true
+ if keeptogether then
+ local prev = getprev(tail)
+ if getid(prev) == glyph_code and keeptogether(prev,two) then
+ inject = false
+ end
+ end
+ if inject then
+ -- not yet ok, as injected kerns can be overlays (from node-inj.lua)
+ setfield(tail,"subtype",userkern_code)
+ setfield(tail,"kern",getfield(tail,"kern") + quaddata[getfont(next)]*krn)
+ return boundary, true
+ end
+ end
+ elseif id == glyph_code then
+ if keeptogether and keeptogether(tail,two) then
+ -- keep 'm
+ else
+ local charone = getchar(tail)
+ if charone > 0 then
+ local font = getfont(tail)
+ local chartwo = getchar(next)
+ local kerns = chardata[font][charone].kerns
+ local kern = (kerns and kerns[chartwo] or 0) + quaddata[font]*krn
+ insert_node_after(boundary,tail,new_kern(kern))
+ return boundary, true
+ end
+ end
+ end
+ return boundary, ok
+end
-local function do_process(head,force) -- todo: glue so that we can fully stretch
- local start, done, lastfont = head, false, nil
+local function process_list(head,keeptogether,krn,font,okay)
+ local start = head
+ local prev = nil
+ local pid = nil
+ local kern = 0
+ local mark = font and markdata[font]
+ while start do
+ local id = getid(start)
+ if id == glyph_code then
+ if not font then
+ font = getfont(start)
+ mark = markdata[font]
+ kern = quaddata[font]*krn
+ end
+ if prev then
+ local char = getchar(start)
+ if mark[char] then
+ -- skip
+ elseif pid == kern_code then
+ if getsubtype(prev) == kerning_code or getattr(prev,a_fontkern) then
+ local inject = true
+ if keeptogether then
+ local prevprev = getprev(prev)
+ if getid(prevprev) == glyph_code and keeptogether(prevprev,start) then
+ inject = false
+ end
+ end
+ if inject then
+ -- not yet ok, as injected kerns can be overlays (from node-inj.lua)
+ setfield(prev,"subtype",userkern_code)
+ setfield(prev,"kern",getfield(prev,"kern") + kern)
+ okay = true
+ end
+ end
+ elseif pid == glyph_code then
+ if keeptogether and keeptogether(prev,start) then
+ -- keep 'm
+ else
+ local prevchar = getchar(prev)
+ local kerns = chardata[font][prevchar].kerns
+ -- if kerns then
+ -- print("it happens indeed, basemode kerns not yet injected")
+ -- end
+ insert_node_before(head,start,new_kern((kerns and kerns[char] or 0) + kern))
+ okay = true
+ end
+ end
+ end
+ end
+ if start then
+ prev = start
+ pid = id
+ start = getnext(start)
+ end
+ end
+ return head, okay, prev
+end
+
+local function closest_bound(b,get)
+ b = get(b)
+ if b and getid(b) ~= glue_code then
+ while b do
+ if not getattr(b,a_kerns) then
+ break
+ elseif getid(b) == glyph_code then
+ return b, getfont(b)
+ else
+ b = get(b)
+ end
+ end
+ end
+end
+
+function kerns.handler(head)
+ local head = tonut(head)
+ local start = head
+ local done = false
+ local lastfont = nil
local keepligature = kerns.keepligature
local keeptogether = kerns.keeptogether
- local fillup = false
+ local fillup = false
+ local bound = false
+ local prev = nil
+ local previd = nil
+ local prevchar = nil
+ local prevfont = nil
+ local prevmark = nil
while start do
- -- faster to test for attr first
- local attr = force or start[a_kerns]
+ -- fontkerns don't get the attribute but they always sit between glyphs so
+ -- are always valid bound .. disc nodes also somtimes don't get them
+ local id = getid(start)
+ local attr = getattr(start,a_kerns)
if attr and attr > 0 then
- start[a_kerns] = unsetvalue
+ setattr(start,a_kerns,0) -- unsetvalue)
local krn = mapping[attr]
if krn == v_max then
- krn = .25
+ krn = .25
fillup = true
else
fillup = false
end
- if krn and krn ~= 0 then
- local id = start.id
- if id == glyph_code then
- lastfont = start.font
- local c = start.components
- if not c then
- -- fine
- elseif keepligature and keepligature(start) then
- -- keep 'm
- else
- c = do_process(c,attr)
+ if not krn or krn == 0 then
+ bound = false
+ elseif id == glyph_code then -- we could use the subtype ligature
+ local c = getfield(start,"components")
+ if not c then
+ -- fine
+ elseif keepligature and keepligature(start) then
+ -- keep 'm
+ c = nil
+ else
+ while c do
local s = start
- local p, n = s.prev, s.next
- local tail = find_node_tail(c)
+ local t = find_node_tail(c)
+ local p = getprev(s)
+ local n = getnext(s)
if p then
- p.next = c
- c.prev = p
+ setfield(p,"next",c)
+ setfield(c,"prev",p)
else
head = c
end
if n then
- n.prev = tail
+ setfield(n,"prev",t)
+ setfield(t,"next",n)
end
- tail.next = n
start = c
- s.components = nil
- -- we now leak nodes !
- -- free_node(s)
- done = true
+ setfield(s,"components",nil)
+ free_node(s)
+ c = getfield(start,"components")
end
- local prev = start.prev
- if not prev then
- -- skip
- elseif markdata[lastfont][start.char] then
- -- skip
- else
- local pid = prev.id
- if not pid then
- -- nothing
- elseif pid == kern_code then
- if prev.subtype == kerning_code or prev[a_fontkern] then
- if keeptogether and prev.prev.id == glyph_code and keeptogether(prev.prev,start) then -- we could also pass start
- -- keep 'm
- else
- -- not yet ok, as injected kerns can be overlays (from node-inj.lua)
- prev.subtype = userkern_code
- prev.kern = prev.kern + quaddata[lastfont]*krn -- here
- done = true
- end
- end
- elseif pid == glyph_code then
- if prev.font == lastfont then
- local prevchar, lastchar = prev.char, start.char
- if keeptogether and keeptogether(prev,start) then
- -- keep 'm
- else
- local kerns = chardata[lastfont][prevchar].kerns
- local kern = kerns and kerns[lastchar] or 0
- krn = kern + quaddata[lastfont]*krn -- here
- insert_node_before(head,start,kern_injector(fillup,krn))
- done = true
- end
- else
- krn = quaddata[lastfont]*krn -- here
- insert_node_before(head,start,kern_injector(fillup,krn))
- done = true
- end
- elseif pid == disc_code then
- -- a bit too complicated, we can best not copy and just calculate
- -- but we could have multiple glyphs involved so ...
- local disc = prev -- disc
- local prv, nxt = disc.prev, disc.next
- if disc.subtype == discretionary_code then
- -- maybe we should forget about this variant as there is no glue
- -- possible
- local pre, post, replace = disc.pre, disc.post, disc.replace
- if pre and prv then -- must pair with start.prev
- -- this one happens in most cases
- local before = copy_node(prv)
- pre.prev = before
- before.next = pre
- before.prev = nil
- pre = do_process(before,attr)
- pre = pre.next
- pre.prev = nil
- disc.pre = pre
- free_node(before)
- end
- if post and nxt then -- must pair with start
- local after = copy_node(nxt)
- local tail = find_node_tail(post)
- tail.next = after
- after.prev = tail
- after.next = nil
- post = do_process(post,attr)
- tail.next = nil
- disc.post = post
- free_node(after)
- end
- if replace and prv and nxt then -- must pair with start and start.prev
- local before = copy_node(prv)
- local after = copy_node(nxt)
- local tail = find_node_tail(replace)
- replace.prev = before
- before.next = replace
- before.prev = nil
- tail.next = after
- after.prev = tail
- after.next = nil
- replace = do_process(before,attr)
- replace = replace.next
- replace.prev = nil
- after.prev.next = nil
- disc.replace = replace
- free_node(after)
- free_node(before)
- elseif prv and prv.id == glyph_code and prv.font == lastfont then
- local prevchar, lastchar = prv.char, start.char
- local kerns = chardata[lastfont][prevchar].kerns
- local kern = kerns and kerns[lastchar] or 0
- krn = kern + quaddata[lastfont]*krn -- here
- disc.replace = kern_injector(false,krn) -- only kerns permitted, no glue
- else
- krn = quaddata[lastfont]*krn -- here
- disc.replace = kern_injector(false,krn) -- only kerns permitted, no glue
- end
- else
- -- this one happens in most cases: automatic (-), explicit (\-), regular (patterns)
- if prv and prv.id == glyph_code and prv.font == lastfont then
- local prevchar, lastchar = prv.char, start.char
- local kerns = chardata[lastfont][prevchar].kerns
- local kern = kerns and kerns[lastchar] or 0
- krn = kern + quaddata[lastfont]*krn -- here
- else
- krn = quaddata[lastfont]*krn -- here
- end
- insert_node_before(head,start,kern_injector(fillup,krn))
+ end
+ local char = getchar(start)
+ local font = getfont(start)
+ local mark = markdata[font]
+ if not bound then
+ -- yet
+ elseif mark[char] then
+ -- skip
+ elseif previd == kern_code then
+ if getsubtype(prev) == kerning_code or getattr(prev,a_fontkern) then
+ local inject = true
+ if keeptogether then
+ if previd == glyph_code and keeptogether(prev,start) then
+ inject = false
end
end
+ if inject then
+ -- not yet ok, as injected kerns can be overlays (from node-inj.lua)
+ setfield(prev,"subtype",userkern_code)
+ setfield(prev,"kern",getfield(prev,"kern") + quaddata[font]*krn)
+ done = true
+ end
end
- elseif id == glue_code then
- local subtype = start.subtype
- if subtype == userskip_code or subtype == xspaceskip_code or subtype == spaceskip_code then
- local s = start.spec
- local w = s.width
- if w > 0 then
- local width, stretch, shrink = w+gluefactor*w*krn, s.stretch, s.shrink
- start.spec = spec_injector(fillup,width,stretch*width/w,shrink*width/w)
+ elseif previd == glyph_code then
+ if prevfont == font then
+ if keeptogether and keeptogether(prev,start) then
+ -- keep 'm
+ else
+ local kerns = chardata[font][prevchar].kerns
+ local kern = (kerns and kerns[char] or 0) + quaddata[font]*krn
+ insert_node_before(head,start,kern_injector(fillup,kern))
done = true
end
+ else
+ insert_node_before(head,start,kern_injector(fillup,quaddata[font]*krn))
+ done = true
+ end
+ end
+ prev = start
+ prevchar = char
+ prevfont = font
+ prevmark = mark
+ previd = id
+ bound = true
+ elseif id == disc_code then
+ local prev, next, pglyph, nglyph -- delayed till needed
+ local subtype = getsubtype(start)
+ if subtype == automatic_code then
+ -- this is kind of special, as we have already injected the
+ -- previous kern
+ local prev = getprev(start)
+ local pglyph = prev and getid(prev) == glyph_code
+ languages.expand(start,pglyph and prev)
+ -- we can have a different start now
+ elseif subtype ~= discretionary_code then
+ prev = getprev(start)
+ pglyph = prev and getid(prev) == glyph_code
+ languages.expand(start,pglyph and prev)
+ end
+ local pre = getfield(start,"pre")
+ local post = getfield(start,"post")
+ local replace = getfield(start,"replace")
+ -- we really need to reasign the fields as luatex keeps track of
+ -- the tail in a temp preceding head .. kind of messy so we might
+ -- want to come up with a better solution some day like a real
+ -- pretail etc fields in a disc node
+ --
+ -- maybe i'll merge the now split functions
+ if pre then
+ local okay = false
+ if not prev then
+ prev = prev or getprev(start)
+ pglyph = prev and getid(prev) == glyph_code
+ end
+ if pglyph then
+ pre, okay = inject_begin(pre,prev,keeptogether,krn,okay)
+ end
+ pre, okay = process_list(pre,keeptogether,krn,false,okay)
+ if okay then
+ setfield(start,"pre",pre)
+ done = true
+ end
+ end
+ if post then
+ local okay = false
+ if not next then
+ next = getnext(start)
+ nglyph = next and getid(next) == glyph_code
+ end
+ if nglyph then
+ post, okay = inject_end(post,next,keeptogether,krn,okay)
end
- elseif id == kern_code then
- -- if start.subtype == kerning_code then -- handle with glyphs
- -- local sk = start.kern
- -- if sk > 0 then
- -- start.kern = sk*krn
- -- done = true
- -- end
- -- end
- elseif lastfont and (id == hlist_code or id == vlist_code) then -- todo: lookahead
- local p = start.prev
- if p and p.id ~= glue_code then
- insert_node_before(head,start,kern_injector(fillup,quaddata[lastfont]*krn))
+ post, okay = process_list(post,keeptogether,krn,false,okay)
+ if okay then
+ setfield(start,"post",post)
done = true
end
- local n = start.next
- if n and n.id ~= glue_code then
- insert_node_after(head,start,kern_injector(fillup,quaddata[lastfont]*krn))
+ end
+ if replace then
+ local okay = false
+ if not prev then
+ prev = prev or getprev(start)
+ pglyph = prev and getid(prev) == glyph_code
+ end
+ if pglyph then
+ replace, okay = inject_begin(replace,prev,keeptogether,krn,okay)
+ end
+ if not next then
+ next = getnext(start)
+ nglyph = next and getid(next) == glyph_code
+ end
+ if nglyph then
+ replace, okay = inject_end(replace,next,keeptogether,krn,okay)
+ end
+ replace, okay = process_list(replace,keeptogether,krn,false,okay)
+ if okay then
+ setfield(start,"replace",replace)
done = true
end
- elseif id == math_code then
- start = end_of_math(start)
+ elseif prevfont then
+ setfield(start,"replace",new_kern(quaddata[prevfont]*krn))
+ done = true
end
+ bound = false
+ elseif id == kern_code then
+ bound = getsubtype(start) == kerning_code or getattr(start,a_fontkern)
+ prev = start
+ previd = id
+ elseif id == glue_code then
+ local subtype = getsubtype(start)
+ if subtype == userskip_code or subtype == xspaceskip_code or subtype == spaceskip_code then
+ local s = getfield(start,"spec")
+ local w = getfield(s,"width")
+ if w > 0 then
+ local width = w+gluefactor*w*krn
+ local stretch = getfield(s,"stretch")
+ local shrink = getfield(s,"shrink")
+ setfield(start,"spec",spec_injector(fillup,width,stretch*width/w,shrink*width/w))
+ done = true
+ end
+ end
+ bound = false
+ elseif id == hlist_code or id == vlist_code then
+ local subtype = getsubtype(start)
+ if subtype == user_list_code or subtype == box_list_code then
+ -- special case
+ local b, f = closest_bound(start,getprev)
+ if b then
+ insert_node_before(head,start,kern_injector(fillup,quaddata[f]*krn))
+ done = true
+ end
+ local b, f = closest_bound(start,getnext)
+ if b then
+ insert_node_after(head,start,kern_injector(fillup,quaddata[f]*krn))
+ done = true
+ end
+ end
+ bound = false
+ elseif id == math_code then
+ start = end_of_math(start)
+ bound = false
end
- end
- if start then
- start = start.next
+ if start then
+ start = getnext(start)
+ end
+ elseif id == kern_code then
+ bound = getsubtype(start) == kerning_code or getattr(start,a_fontkern)
+ prev = start
+ previd = id
+ start = getnext(start)
+ else
+ bound = false
+ start = getnext(start)
end
end
- return head, done
+ return tonode(head), done
end
local enabled = false
@@ -413,10 +620,11 @@ function kerns.set(factor)
return factor
end
-function kerns.handler(head)
- return do_process(head) -- no direct map, because else fourth argument is tail == true
-end
-
-- interface
-commands.setcharacterkerning = kerns.set
+interfaces.implement {
+ name = "setcharacterkerning",
+ actions = kerns.set,
+ arguments = "string"
+}
+
diff --git a/tex/context/base/typo-krn.mkiv b/tex/context/base/typo-krn.mkiv
index 3522c02fc..6d6126542 100644
--- a/tex/context/base/typo-krn.mkiv
+++ b/tex/context/base/typo-krn.mkiv
@@ -35,13 +35,10 @@
\typo_kerning_set
\fi}
-% \def\typo_kerning_set
-% {\ctxcommand{setcharacterkerning(\characterkerningparameter\c!factor)}}
-
\def\typo_kerning_set
{\usecharacterkerningstyleandcolor\c!style\c!color % goodie, maybe also strut
\useaddfontfeatureparameter\characterkerningparameter
- \ctxcommand{setcharacterkerning("\characterkerningparameter\c!factor")}}
+ \clf_setcharacterkerning{\characterkerningparameter\c!factor}}
\unexpanded\def\resetcharacterkerning % fast one
{\attribute\kernattribute\attributeunsetvalue}
@@ -70,7 +67,7 @@
% \definecharacterkerning [\v!letterspacing ] [\v!kerncharacters] [\c!features=letterspacing]
%
% \unexpanded\def\kerncharacters
-% {\doifnextoptionalelse\typo_kerning_apply_yes\typo_kerning_apply_nop}
+% {\doifnextoptionalcselse\typo_kerning_apply_yes\typo_kerning_apply_nop}
%
% \def\typo_kerning_apply_yes[#1]%
% {\groupedcommand{\typo_kerning_apply_yes_indeed{#1}}\donothing}
@@ -89,9 +86,9 @@
\appendtoks
\setuevalue{\currentcharacterkerning}%
- {\doifnextoptionalelse
+ {\doifelsenextoptional
{\typo_kerning_apply_yes{\currentcharacterkerning}}%
- {\typo_kerning_apply_nop{\currentcharacterkerning}}}
+ {\typo_kerning_apply_nop{\currentcharacterkerning}}}%
\to \everydefinecharacterkerning
\unexpanded\def\typo_kerning_apply_yes#1[#2]%
diff --git a/tex/context/base/typo-lan.lua b/tex/context/base/typo-lan.lua
index 50927f744..c42d2119e 100644
--- a/tex/context/base/typo-lan.lua
+++ b/tex/context/base/typo-lan.lua
@@ -67,6 +67,8 @@ function frequencies.averagecharwidth(language,font)
return frequencycache[language or "en"][font or currentfont()]
end
-function commands.averagecharwidth(language,font)
- context(frequencycache[language or "en"][font or currentfont()])
-end
+interfaces.implement {
+ name = "averagecharwidth",
+ actions = { frequencies.averagecharwidth, context },
+ arguments = "string"
+}
diff --git a/tex/context/base/typo-lan.mkiv b/tex/context/base/typo-lan.mkiv
index bb4ed2042..8b633957b 100644
--- a/tex/context/base/typo-lan.mkiv
+++ b/tex/context/base/typo-lan.mkiv
@@ -30,7 +30,7 @@
\def\charwidthlanguage{\currentmainlanguage}
-\def\averagecharwidth{\dimexpr\ctxcommand{averagecharwidth("\charwidthlanguage")}\scaledpoint\relax}
+\def\averagecharwidth{\dimexpr\clf_averagecharwidth{\charwidthlanguage}\scaledpoint\relax}
\protect
diff --git a/tex/context/base/typo-lig.mkiv b/tex/context/base/typo-lig.mkiv
new file mode 100644
index 000000000..6171441d6
--- /dev/null
+++ b/tex/context/base/typo-lig.mkiv
@@ -0,0 +1,31 @@
+%D \module
+%D [ file=typo-lig,
+%D version=2014.12.01,
+%D title=\CONTEXT\ Typesetting Macros,
+%D subtitle=Ligatures,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Typesetting Macros / Ligatures}
+
+%D This macro is mostly used for testing an attribute that blocks ligatures
+%D in the nodemode handler. It is not really used in practice, and mostly
+%D there as a plug in the generic font handler.
+
+\unprotect
+
+\definesystemattribute[noligature][public]
+
+\unexpanded\def\noligature#1%
+ {\dontleavehmode
+ \begingroup
+ \attribute\noligatureattribute\plusone
+ #1%
+ \endgroup}
+
+\protect \endinput
diff --git a/tex/context/base/typo-man.lua b/tex/context/base/typo-man.lua
new file mode 100644
index 000000000..6c6d7926f
--- /dev/null
+++ b/tex/context/base/typo-man.lua
@@ -0,0 +1,113 @@
+if not modules then modules = { } end modules ['typo-man'] = {
+ version = 1.001,
+ comment = "companion to typo-prc.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if not characters then
+ -- for testing stand-alone
+ require("char-def")
+ require("char-ini")
+end
+
+local lpegmatch = lpeg.match
+local P, R, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
+local global = global or _G
+
+local methods = {
+ uppercase = characters.upper,
+ lowercase = characters.lower,
+ Word = converters.Word,
+ Words = converters.Words,
+}
+
+local function nothing(s) return s end -- we already have that one somewhere
+
+-- table.setmetatableindex(methods,function(t,k)
+-- t[k] = nothing
+-- return nothing
+-- end)
+
+local splitter = lpeg.tsplitat(".")
+
+table.setmetatableindex(methods,function(t,k)
+ local s = lpegmatch(splitter,k)
+ local v = global
+ for i=1,#s do
+ v = v[s[i]]
+ if not v then
+ break
+ end
+ end
+ if not v or v == global then
+ v = nothing
+ end
+ t[k] = v
+ return v
+end)
+
+local whitespace = lpeg.patterns.whitespace^0
+local separator = whitespace * P("->") * whitespace
+local pair = C((1-separator)^1) * separator * C(P(1)^0)
+local list = Ct((C((1-separator)^1) * separator)^1) * C(P(1)^0)
+
+local pattern = Carg(1) * pair / function(methods,operation,str)
+ return methods[operation](str) or str
+end
+
+local function apply(str,m)
+ return lpegmatch(pattern,str,1,m or methods) or str
+end
+
+local function splitspecification(field,m)
+ local m, f = lpegmatch(list,field,1,m or methods)
+ if m then
+ return m, f or field
+ else
+ return nil, field
+ end
+end
+
+local function applyspecification(actions,str)
+ if actions then
+ for i=1,#actions do
+ local action = methods[actions[i]]
+ if action then
+ str = action(str) or str
+ end
+ end
+ end
+ return str
+end
+
+if not typesetters then typesetters = { } end
+
+typesetters.manipulators = {
+ methods = methods,
+ apply = apply,
+ patterns = {
+ pair = pair,
+ list = list,
+ },
+ splitspecification = splitspecification,
+ applyspecification = applyspecification,
+}
+
+local pattern = Cs((1 - P(1) * P(-1))^0 * (P(".")/"" + P(1)))
+
+methods.stripperiod = function(str) return lpegmatch(pattern,str) end
+
+-- print(apply("hans"))
+-- print(apply("uppercase->hans"))
+-- print(apply("string.reverse -> hans"))
+-- print(apply("uppercase->hans",{ uppercase = string.reverse } ))
+
+-- print(applyspecification(splitspecification("hans")))
+-- print(applyspecification(splitspecification("lowercase->uppercase->hans")))
+-- print(applyspecification(splitspecification("uppercase->stripperiod->hans.")))
+
+function commands.manipulated(str)
+ context(apply(str))
+end
diff --git a/tex/context/base/typo-mar.lua b/tex/context/base/typo-mar.lua
index 85d5c85a8..fed9e0745 100644
--- a/tex/context/base/typo-mar.lua
+++ b/tex/context/base/typo-mar.lua
@@ -76,6 +76,8 @@ if not modules then modules = { } end modules ['typo-mar'] = {
local format, validstring = string.format, string.valid
local insert, remove = table.insert, table.remove
local setmetatable, next = setmetatable, next
+local formatters = string.formatters
+local toboolean = toboolean
local attributes, nodes, node, variables = attributes, nodes, node, variables
@@ -114,14 +116,32 @@ local v_continue = variables.continue
local v_first = variables.first
local v_text = variables.text
local v_column = variables.column
-
-local copy_node_list = node.copy_list
-local slide_nodes = node.slide
-local hpack_nodes = node.hpack -- nodes.fasthpack not really faster here
-local traverse_id = node.traverse_id
-local free_node_list = node.flush_list
-local insert_node_after = node.insert_after
-local insert_node_before = node.insert_before
+local v_line = variables.line
+
+local nuts = nodes.nuts
+local nodepool = nuts.pool
+
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local copy_node_list = nuts.copy_list
+local hpack_nodes = nuts.hpack -- nodes.fasthpack not really faster here
+local traverse_id = nuts.traverse_id
+local free_node_list = nuts.flush_list
+local insert_node_after = nuts.insert_after
+local insert_node_before = nuts.insert_before
+local linked_nodes = nuts.linked
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getsubtype = nuts.getsubtype
+local getbox = nuts.getbox
+local getlist = nuts.getlist
local nodecodes = nodes.nodecodes
local listcodes = nodes.listcodes
@@ -144,33 +164,31 @@ local userdefined_code = whatsitcodes.userdefined
local dir_code = whatsitcodes.dir
local localpar_code = whatsitcodes.localpar
-local nodepool = nodes.pool
+local nodepool = nuts.pool
local new_kern = nodepool.kern
-local new_glue = nodepool.glue
-local new_penalty = nodepool.penalty
-local new_stretch = nodepool.stretch
local new_usernumber = nodepool.usernumber
local new_latelua = nodepool.latelua
+local lateluafunction = nodepool.lateluafunction
+
local texgetcount = tex.getcount
local texgetdimen = tex.getdimen
-local texgetbox = tex.getbox
local texget = tex.get
-local points = number.points
-
local isleftpage = layouts.status.isleftpage
-local registertogether = builders.paragraphs.registertogether
-
-local jobpositions = job.positions
-local getposition = jobpositions.position
+local registertogether = builders.paragraphs.registertogether -- tonode
local a_margindata = attributes.private("margindata")
local inline_mark = nodepool.userids["margins.inline"]
-local margins = { }
+local jobpositions = job.positions
+local getposition = jobpositions.get
+local setposition = jobpositions.set
+local getreserved = jobpositions.getreserved
+
+local margins = { }
typesetters.margins = margins
local locations = { v_left, v_right, v_inner, v_outer } -- order might change
@@ -233,7 +251,7 @@ local function showstore(store,banner,location)
if next(store) then
for i, si in table.sortedpairs(store) do
local si =store[i]
- report_margindata("%s: stored in %a at %s: %a => %s",banner,location,i,validstring(si.name,"no name"),nodes.toutf(si.box.list))
+ report_margindata("%s: stored in %a at %s: %a => %s",banner,location,i,validstring(si.name,"no name"),nodes.toutf(getlist(si.box)))
end
else
report_margindata("%s: nothing stored in location %a",banner,location)
@@ -242,7 +260,7 @@ end
function margins.save(t)
setmetatable(t,defaults)
- local content = texgetbox(t.number)
+ local content = getbox(t.number)
local location = t.location
local category = t.category
local inline = t.inline
@@ -310,11 +328,11 @@ function margins.save(t)
-- nice is to make a special status table mechanism
local leftmargindistance = texgetdimen("naturalleftmargindistance")
local rightmargindistance = texgetdimen("naturalrightmargindistance")
- local strutbox = texgetbox("strutbox")
- t.strutdepth = strutbox.depth
- t.strutheight = strutbox.height
- t.leftskip = texget("leftskip").width -- we're not in forgetall
- t.rightskip = texget("rightskip").width -- we're not in forgetall
+ local strutbox = getbox("strutbox")
+ t.strutdepth = getfield(strutbox,"depth")
+ t.strutheight = getfield(strutbox,"height")
+ t.leftskip = getfield(texget("leftskip"),"width") -- we're not in forgetall
+ t.rightskip = getfield(texget("rightskip"),"width") -- we're not in forgetall
t.leftmargindistance = leftmargindistance -- todo:layoutstatus table
t.rightmargindistance = rightmargindistance
t.leftedgedistance = texgetdimen("naturalleftedgedistance")
@@ -327,7 +345,7 @@ function margins.save(t)
--
-- t.realpageno = texgetcount("realpageno")
if inline then
- context(new_usernumber(inline_mark,nofsaved))
+ context(tonode(new_usernumber(inline_mark,nofsaved))) -- or use a normal node
store[nofsaved] = t -- no insert
nofinlined = nofinlined + 1
else
@@ -352,6 +370,18 @@ end
local status, nofstatus = { }, 0
+local f_anchor = formatters["_plib_.set('md:h',%i,{x=true,c=true})"]
+
+local function setanchor(h_anchor)
+ return new_latelua(f_anchor(h_anchor))
+end
+
+-- local t_anchor = { x = true, c = true }
+--
+-- local function setanchor(h_anchor)
+-- return lateluafunction(function() setposition("md:h",h_anchor,t_anchor) end)
+-- end
+
local function realign(current,candidate)
local location = candidate.location
local margin = candidate.margin
@@ -404,7 +434,7 @@ local function realign(current,candidate)
-- we assume that list is a hbox, otherwise we had to take the whole current
-- in order to get it right
- current.width = 0
+ setfield(current,"width",0)
local anchornode, move_x
-- this mess is needed for alignments (combinations) so we use that
@@ -418,12 +448,12 @@ local function realign(current,candidate)
anchor = v_text
end
if inline or anchor ~= v_text or candidate.psubtype == alignment_code then
- -- the alignment_code check catches margintexts ste before a tabulate
+ -- the alignment_code check catches margintexts before a tabulate
h_anchors = h_anchors + 1
- anchornode = new_latelua(format("_plib_.set('md:h',%i,{x=true,c=true})",h_anchors))
- local blob = jobpositions.get('md:h', h_anchors)
+ anchornode = setanchor(h_anchors)
+ local blob = getposition('md:h',h_anchors)
if blob then
- local reference = jobpositions.getreserved(anchor,blob.c)
+ local reference = getreserved(anchor,blob.c)
if reference then
if location == v_left then
move_x = (reference.x or 0) - (blob.x or 0)
@@ -446,9 +476,9 @@ local function realign(current,candidate)
report_margindata("realigned %a, location %a, margin %a",candidate.n,location,margin)
end
end
-
- current.list = hpack_nodes(anchornode .. new_kern(-delta) .. current.list .. new_kern(delta))
- current.width = 0
+ local list = hpack_nodes(linked_nodes(anchornode,new_kern(-delta),getlist(current),new_kern(delta)))
+ setfield(current,"list",list)
+ setfield(current,"width",0)
end
local function realigned(current,a)
@@ -478,24 +508,36 @@ end
-- resetstacked()
-function margins.ha(tag) -- maybe l/r keys ipv left/right keys
+local function ha(tag) -- maybe l/r keys ipv left/right keys
local p = cache[tag]
p.p = true
p.y = true
- jobpositions.set('md:v',tag,p)
+ setposition('md:v',tag,p)
cache[tag] = nil
end
-local function markovershoot(current)
+margins.ha = ha
+
+local f_anchor = formatters["typesetters.margins.ha(%s)"]
+local function setanchor(v_anchor)
+ return new_latelua(f_anchor(v_anchor))
+end
+
+-- local function setanchor(v_anchor) -- freezes the global here
+-- return lateluafunction(function() ha(v_anchor) end)
+-- end
+
+local function markovershoot(current) -- todo: alleen als offset > line
v_anchors = v_anchors + 1
cache[v_anchors] = stacked
- local anchor = new_latelua(format("typesetters.margins.ha(%s)",v_anchors)) -- todo: alleen als offset > line
- current.list = hpack_nodes(anchor .. current.list)
+ local anchor = setanchor(v_anchors)
+ local list = hpack_nodes(linked_nodes(anchor,getlist(current)))
+ setfield(current,"list",list)
end
local function getovershoot(location)
- local p = jobpositions.get("md:v",v_anchors)
- local c = jobpositions.get("md:v",v_anchors+1)
+ local p = getposition("md:v",v_anchors)
+ local c = getposition("md:v",v_anchors+1)
if p and c and p.p and p.p == c.p then
local distance = p.y - c.y
local offset = p[location] or 0
@@ -512,10 +554,13 @@ end
local function inject(parent,head,candidate)
local box = candidate.box
- local width = box.width
- local height = box.height
- local depth = box.depth
- local shift = box.shift
+ if not box then
+ return head, nil, false -- we can have empty texts
+ end
+ local width = getfield(box,"width")
+ local height = getfield(box,"height")
+ local depth = getfield(box,"depth")
+ local shift = getfield(box,"shift")
local stack = candidate.stack
local location = candidate.location
local method = candidate.method
@@ -524,13 +569,18 @@ local function inject(parent,head,candidate)
local baseline = candidate.baseline
local strutheight = candidate.strutheight
local strutdepth = candidate.strutdepth
- local psubtype = parent.subtype
+ local psubtype = getsubtype(parent)
local offset = stacked[location]
local firstonstack = offset == false or offset == nil
nofstatus = nofstatus + 1
nofdelayed = nofdelayed + 1
status[nofstatus] = candidate
-- yet untested
+ baseline = tonumber(baseline)
+ if not baseline then
+ baseline = toboolean(baseline)
+ end
+ --
if baseline == true then
baseline = false
-- hbox vtop
@@ -546,7 +596,7 @@ local function inject(parent,head,candidate)
end
end
candidate.width = width
- candidate.hsize = parent.width -- we can also pass textwidth
+ candidate.hsize = getfield(parent,"width") -- we can also pass textwidth
candidate.psubtype = psubtype
if trace_margindata then
report_margindata("processing, index %s, height %p, depth %p, parent %s",candidate.n,height,depth,listcodes[psubtype])
@@ -557,10 +607,10 @@ local function inject(parent,head,candidate)
-- offset = offset + height
end
if stack == v_yes then
- offset = offset + candidate.dy
+ offset = offset + candidate.dy -- always
shift = shift + offset
elseif stack == v_continue then
- offset = offset + candidate.dy
+ offset = offset + candidate.dy -- always
if firstonstack then
offset = offset + getovershoot(location)
end
@@ -573,13 +623,23 @@ local function inject(parent,head,candidate)
-- experimental.
-- -- --
if method == v_top then
- local delta = height - parent.height
+ local delta = height - getfield(parent,"height")
if trace_margindata then
report_margindata("top aligned by %p",delta)
end
- if delta < candidate.threshold then
+ if delta < candidate.threshold then -- often we need a negative threshold here
shift = shift + voffset + delta
end
+ elseif method == v_line then
+ if getfield(parent,"depth") == 0 then
+ local delta = height - getfield(parent,"height")
+ if trace_margindata then
+ report_margindata("top aligned by %p (no depth)",delta)
+ end
+ if delta < candidate.threshold then -- often we need a negative threshold here
+ shift = shift + voffset + delta
+ end
+ end
elseif method == v_first then
if baseline then
shift = shift + voffset + height - baseline -- option
@@ -616,22 +676,23 @@ local function inject(parent,head,candidate)
shift = shift + delta
offset = offset + delta
end
- box.shift = shift
- box.width = 0
+ setfield(box,"shift",shift)
+ setfield(box,"width",0)
if not head then
head = box
- elseif head.id == whatsit_code and head.subtype == localpar_code then
+ elseif getid(head) == whatsit_code and getsubtype(head) == localpar_code then
-- experimental
- if head.dir == "TRT" then
- box.list = hpack_nodes(new_kern(candidate.hsize) .. box.list .. new_kern(-candidate.hsize))
+ if getfield(head,"dir") == "TRT" then
+ local list = hpack_nodes(linked_nodes(new_kern(candidate.hsize),getlist(box),new_kern(-candidate.hsize)))
+ setfield(box,"list",list)
end
insert_node_after(head,head,box)
else
- head.prev = box
- box.next = head
+ setfield(head,"prev",box)
+ setfield(box,"next",head)
head = box
end
- box[a_margindata] = nofstatus
+ setattr(box,a_margindata,nofstatus)
if trace_margindata then
report_margindata("injected, location %a, shift %p",location,shift)
end
@@ -656,12 +717,12 @@ local function flushinline(parent,head)
local current = head
local done = false
local continue = false
- local room, don, con
+ local room, don, con, list
while current and nofinlined > 0 do
- local id = current.id
+ local id = getid(current)
if id == whatsit_code then
- if current.subtype == userdefined_code and current.user_id == inline_mark then
- local n = current.value
+ if getsubtype(current) == userdefined_code and getfield(current,"user_id") == inline_mark then
+ local n = getfield(current,"value")
local candidate = inlinestore[n]
if candidate then -- no vpack, as we want to realign
inlinestore[n] = nil
@@ -674,11 +735,12 @@ local function flushinline(parent,head)
end
elseif id == hlist_code or id == vlist_code then
-- optional (but sometimes needed)
- current.list, don, con = flushinline(current,current.list)
+ list, don, con = flushinline(current,getlist(current))
+ setfield(current,"list",list)
continue = continue or con
done = done or don
end
- current = current.next
+ current = getnext(current)
end
return head, done, continue
end
@@ -686,7 +748,7 @@ end
local a_linenumber = attributes.private('linenumber')
local function flushed(scope,parent) -- current is hlist
- local head = parent.list
+ local head = getlist(parent)
local done = false
local continue = false
local room, con, don
@@ -695,33 +757,40 @@ local function flushed(scope,parent) -- current is hlist
for l=1,#locations do
local location = locations[l]
local store = displaystore[category][location][scope]
- while true do
- local candidate = remove(store,1) -- brr, local stores are sparse
- if candidate then -- no vpack, as we want to realign
- head, room, con = inject(parent,head,candidate)
- done = true
- continue = continue or con
- nofstored = nofstored - 1
- registertogether(parent,room)
- else
- break
+ if store then
+ while true do
+ local candidate = remove(store,1) -- brr, local stores are sparse
+ if candidate then -- no vpack, as we want to realign
+ head, room, con = inject(parent,head,candidate)
+ done = true
+ continue = continue or con
+ nofstored = nofstored - 1
+ if room then
+ registertogether(tonode(parent),room) -- !! tonode
+ end
+ else
+ break
+ end
end
+ else
+ -- report_margindata("fatal error: invalid category %a",category or "?")
end
end
end
if nofinlined > 0 then
if done then
- parent.list = head
+ setfield(parent,"list",head)
end
head, don, con = flushinline(parent,head)
continue = continue or con
done = done or don
end
if done then
- local a = head[a_linenumber] -- hack .. we need a more decent critical attribute inheritance mechanism
- parent.list = hpack_nodes(head,parent.width,"exactly")
+ local a = getattr(head,a_linenumber) -- hack .. we need a more decent critical attribute inheritance mechanism
+ local l = hpack_nodes(head,getfield(parent,"width"),"exactly")
+ setfield(parent,"list",l)
if a then
- parent.list[a_linenumber] = a
+ setattr(l,a_linenumber,a)
end
-- resetstacked()
end
@@ -736,14 +805,15 @@ local function handler(scope,head,group)
if trace_margindata then
report_margindata("flushing stage one, stored %s, scope %s, delayed %s, group %a",nofstored,scope,nofdelayed,group)
end
+ head = tonut(head)
local current = head
local done = false
while current do
- local id = current.id
- if (id == vlist_code or id == hlist_code) and not current[a_margindata] then
+ local id = getid(current)
+ if (id == vlist_code or id == hlist_code) and not getattr(current,a_margindata) then
local don, continue = flushed(scope,current)
if don then
- current[a_margindata] = 0 -- signal to prevent duplicate processing
+ setattr(current,a_margindata,0) -- signal to prevent duplicate processing
if continue then
markovershoot(current)
end
@@ -753,12 +823,12 @@ local function handler(scope,head,group)
done = true
end
end
- current = current.next
+ current = getnext(current)
end
-- if done then
resetstacked() -- why doesn't done work ok here?
-- end
- return head, done
+ return tonode(head), done
else
return head, false
end
@@ -789,15 +859,15 @@ function margins.globalhandler(head,group) -- check group
end
return head, false
elseif group == "hmode_par" then
- return handler("global",head,group)
+ return handler(v_global,head,group)
elseif group == "vmode_par" then -- experiment (for alignments)
- return handler("global",head,group)
+ return handler(v_global,head,group)
-- this needs checking as we then get quite some one liners to process and
-- we cannot look ahead then:
elseif group == "box" then -- experiment (for alignments)
- return handler("global",head,group)
+ return handler(v_global,head,group)
elseif group == "alignment" then -- experiment (for alignments)
- return handler("global",head,group)
+ return handler(v_global,head,group)
else
if trace_margingroup then
report_margindata("ignored 2, group %a, stored %s, inhibit %a",group,nofstored,inhibit)
@@ -811,11 +881,11 @@ local function finalhandler(head)
local current = head
local done = false
while current do
- local id = current.id
+ local id = getid(current)
if id == hlist_code then
- local a = current[a_margindata]
+ local a = getattr(current,a_margindata)
if not a or a == 0 then
- finalhandler(current.list)
+ finalhandler(getlist(current))
elseif realigned(current,a) then
done = true
if nofdelayed == 0 then
@@ -823,9 +893,9 @@ local function finalhandler(head)
end
end
elseif id == vlist_code then
- finalhandler(current.list)
+ finalhandler(getlist(current))
end
- current = current.next
+ current = getnext(current)
end
return head, done
else
@@ -838,7 +908,10 @@ function margins.finalhandler(head)
-- if trace_margindata then
-- report_margindata("flushing stage two, instore: %s, delayed: %s",nofstored,nofdelayed)
-- end
- return finalhandler(head)
+ head = tonut(head)
+ local head, done = finalhandler(head)
+ head = tonode(head)
+ return head, done
else
return head, false
end
@@ -877,3 +950,33 @@ statistics.register("margin data", function()
return nil
end
end)
+
+interfaces.implement {
+ name = "savemargindata",
+ actions = margins.save,
+ arguments = {
+ {
+ { "location" },
+ { "method" },
+ { "category" },
+ { "name" },
+ { "scope" },
+ { "number", "integer" },
+ { "margin" },
+ { "distance", "dimen" },
+ { "hoffset", "dimen" },
+ { "voffset", "dimen" },
+ { "dy", "dimen" },
+ { "bottomspace", "dimen" },
+ { "baseline"}, -- dimen or string or
+ { "threshold", "dimen" },
+ { "inline", "boolean" },
+ { "anchor" },
+ -- { "leftskip", "dimen" },
+ -- { "rightskip", "dimen" },
+ { "align" },
+ { "line", "integer" },
+ { "stack" },
+ }
+ }
+}
diff --git a/tex/context/base/typo-mar.mkiv b/tex/context/base/typo-mar.mkiv
index 595cf3756..d5869b459 100644
--- a/tex/context/base/typo-mar.mkiv
+++ b/tex/context/base/typo-mar.mkiv
@@ -14,6 +14,8 @@
%C details.
% todo: tags
+% todo: force inline with option (saves pos)
+% todo: margintitle (also less position then)
\writestatus{loading}{ConTeXt Typesetting Macros / Margindata}
@@ -113,7 +115,7 @@
\c!color=, % maybe \maintextcolor
% \c!name=,
% \c!category=,
- \c!threshold=.25ex,
+ \c!threshold=.25\exheight,
\c!margin=\v!normal,
\c!scope=\v!global,
\c!width=,
@@ -163,7 +165,7 @@
\unexpanded\def\typo_margins_data_synchronize
{\doforcedtrackpagestate\s!margintext\nofmargintexts % includes increment
\docheckpagestate\s!margintext\nofmargintexts
- %\doifrightpagestateelse\s!margintext\nofmargintexts\relax\relax
+ %\doifelserightpagestate\s!margintext\nofmargintexts\relax\relax
\realpageno\realpagestateno
\swapmargins}
@@ -199,13 +201,13 @@
\strc_references_set_page_only_destination_box_attribute\currentmarginreference\currentmarginreference
\fi
\edef\currentmargindatastrut{\margindataparameter\c!strut}%
- \dostarttagged\t!margintext\currentmargindata
+ \dostarttaggedchained\t!margintext\currentmargindata\??margindata
\ifcsname\currentmarginframedhash\s!parent\endcsname
\setbox\nextbox\hbox \currentmarginreference \bgroup
\the\everymargindatacontent
\usemargindatastyleandcolor\c!style\c!color
\setupcurrentmarginframed[\c!location=\v!normal,#textparameters]%
-\typo_margins_data_synchronize
+ \typo_margins_data_synchronize
\inheritedmarginframedframed\bgroup
\ifx\currentmargindatastrut\empty \else
\synchronizestrut\currentmargindatastrut
@@ -221,7 +223,7 @@
\edef\currentmargindatawidth{\margindataparameter\c!width}%
\ifx\currentmargindatawidth\empty
\setbox\nextbox\hbox \currentmarginreference \bgroup
-\typo_margins_data_synchronize
+ \typo_margins_data_synchronize
\the\everymargindatacontent
\usemargindatastyleandcolor\c!style\c!color
\ifx\currentmargindatastrut\empty \else
@@ -235,7 +237,7 @@
\let\currentmarginfirstheight\empty
\else
\setbox\nextbox\hbox \currentmarginreference \bgroup
-\typo_margins_data_synchronize
+ \typo_margins_data_synchronize
\dosetraggedcommand{\margindataparameter\c!align}%
\vtop \bgroup
\the\everymargindatacontent
@@ -258,49 +260,49 @@
\fi
\ifdone
\anch_positions_initialize % we use positions at the lua end
- \ctxlua{typesetters.margins.save{
- location = "\margindataparameter\c!location",
- method = "\margindataparameter\c!method",
- category = "\margindataparameter\c!category",
- name = "\margindataparameter\c!name",
- margin = "\margindataparameter\c!margin", % local normal margin edge
- distance = \number\dimexpr\margindataparameter\c!distance,
- hoffset = \number\dimexpr\margindataparameter\c!hoffset,
- voffset = \number\dimexpr\margindataparameter\c!voffset,
- dy = \number\dimexpr\margindataparameter\c!dy,
- bottomspace = \number\dimexpr\margindataparameter\c!bottomspace,
+ \clf_savemargindata
+ location {\margindataparameter\c!location}%
+ method {\margindataparameter\c!method}%
+ category {\margindataparameter\c!category}%
+ name {\margindataparameter\c!name}%
+ scope {\margindataparameter\c!scope}%
+ number \nextbox
+ margin {\margindataparameter\c!margin}% local normal margin edge
+ distance \dimexpr\margindataparameter\c!distance\relax
+ hoffset \dimexpr\margindataparameter\c!hoffset\relax
+ voffset \dimexpr\margindataparameter\c!voffset\relax
+ dy \dimexpr\margindataparameter\c!dy\relax
+ bottomspace \dimexpr\margindataparameter\c!bottomspace\relax
\ifx\currentmarginfirstheight\empty \else
- baseline = \currentmarginfirstheight,
+ baseline {\currentmarginfirstheight}%
\fi
- threshold = \number\dimexpr\margindataparameter\c!threshold, % overlap related, will change
+ threshold \dimexpr\margindataparameter\c!threshold\relax % overlap related, will change
\ifhmode
- inline = true,
+ inline true %
\fi
- anchor = "\margindataparameter\c!anchor",
+ anchor {\margindataparameter\c!anchor}%
%
% we're not in forgetall
%
% \ifzeropt\leftskip \else
- % leftskip = \number\leftskip,
+ % leftskip \dimexpr\leftskip\relax
% \fi
% \ifzeropt\leftskip \else
- % rightskip = \number\rightskip,
+ % rightskip \dimexpr\rightskip\relax
% \fi
- scope = "\margindataparameter\c!scope",
- align = "\margindataparameter\c!align",
- line = \number\margindataparameter\c!line,
- stack = "\margindataparameter\c!stack",
- number = \number\nextbox,
- }}%
+ align {\margindataparameter\c!align}%
+ line \numexpr\margindataparameter\c!line\relax
+ stack {\margindataparameter\c!stack}%
+ \relax
\else
- \ctxlua{typesetters.margins.save{
- location = "\margindataparameter\c!location",
- method = "\margindataparameter\c!method",
- category = "\margindataparameter\c!category",
- name = "\margindataparameter\c!name",
- scope = "\margindataparameter\c!scope",
- number = \number\nextbox,
- }}%
+ \clf_savemargindata
+ location {\margindataparameter\c!location}%
+ method {\margindataparameter\c!method}%
+ category {\margindataparameter\c!category}%
+ name {\margindataparameter\c!name}%
+ scope {\margindataparameter\c!scope}%
+ number \nextbox
+ \relax
\fi
\endgroup}
diff --git a/tex/context/base/typo-pag.lua b/tex/context/base/typo-pag.lua
index 0dd75ddf9..53f79fcfc 100644
--- a/tex/context/base/typo-pag.lua
+++ b/tex/context/base/typo-pag.lua
@@ -6,6 +6,14 @@ if not modules then modules = { } end modules ['typo-pag'] = {
license = "see context related readme files"
}
+
+builders = builders or { }
+local builders = builders
+
+builders.paragraphs = builders.paragraphs or { }
+local parbuilders = builders.paragraphs
+
+local nodes = nodes
local nodecodes = nodes.nodecodes
local hlist_code = nodecodes.hlist
@@ -14,13 +22,23 @@ local glue_code = nodecodes.glue
local kern_code = nodecodes.kern
local penalty_code = nodecodes.penalty
-local insert_node_after = node.insert_after
-local new_penalty = nodes.pool.penalty
-
local unsetvalue = attributes.unsetvalue
-
local a_keeptogether = attributes.private("keeptogether")
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+local insert_node_after = nuts.insert_after
+local new_penalty = nuts.pool.penalty
+
local trace_keeptogether = false
local report_keeptogether = logs.reporter("parbuilders","keeptogether")
@@ -33,11 +51,14 @@ trackers.register("parbuilders.keeptogether", function(v) trace_keeptogether =
-- todo: also support lines = 3 etc (e.g. dropped caps) but how to set that
-- when no hlists are there ? ... maybe the local_par
-function builders.paragraphs.registertogether(line,specification) -- might change
+function parbuilders.registertogether(line,specification) -- might change
+ if not specification then
+ return
+ end
if not enabled then
nodes.tasks.enableaction("finalizers","builders.paragraphs.keeptogether")
end
- local a = line[a_keeptogether]
+ local a = getattr(line,a_keeptogether)
local c = a and cache[a]
if c then
local height = specification.height
@@ -64,23 +85,21 @@ function builders.paragraphs.registertogether(line,specification) -- might chang
if not specification.slack then
specification.slack = 0
end
- line[a_keeptogether] = last
+ setattr(line,a_keeptogether,last)
end
if trace_keeptogether then
local a = a or last
local c = cache[a]
- if trace_keeptogether then
- local noflines = specification.lineheight
- local height = c.height
- local depth = c.depth
- local slack = c.slack
- if not noflines or noflines == 0 then
- noflines = "unknown"
- else
- noflines = math.round((height + depth - slack) / noflines)
- end
- report_keeptogether("registered, index %s, height %p, depth %p, slack %p, noflines %a",a,height,depth,slack,noflines)
+ local noflines = specification.lineheight
+ local height = c.height
+ local depth = c.depth
+ local slack = c.slack
+ if not noflines or noflines == 0 then
+ noflines = "unknown"
+ else
+ noflines = math.round((height + depth - slack) / noflines)
end
+ report_keeptogether("registered, index %s, height %p, depth %p, slack %p, noflines %a",a,height,depth,slack,noflines)
end
end
@@ -88,24 +107,24 @@ local function keeptogether(start,a)
if start then
local specification = cache[a]
if a then
- local current = start.next
+ local current = getnext(start)
local previous = start
- local total = previous.depth
+ local total = getfield(previous,"depth")
local slack = specification.slack
local threshold = specification.depth - slack
if trace_keeptogether then
report_keeptogether("%s, index %s, total %p, threshold %p, slack %p","list",a,total,threshold,slack)
end
while current do
- local id = current.id
+ local id = getid(current)
if id == vlist_code or id == hlist_code then
- total = total + current.height + current.depth
+ total = total + getfield(current,"height") + getfield(current,"depth")
if trace_keeptogether then
report_keeptogether("%s, index %s, total %p, threshold %p","list",a,total,threshold)
end
if total <= threshold then
- if previous.id == penalty_code then
- previous.penalty = 10000
+ if getid(previous) == penalty_code then
+ setfield(previous,"penalty",10000)
else
insert_node_after(head,previous,new_penalty(10000))
end
@@ -114,13 +133,13 @@ local function keeptogether(start,a)
end
elseif id == glue_code then
-- hm, breakpoint, maybe turn this into kern
- total = total + current.spec.width
+ total = total + getfield(getfield(current,"spec"),"width")
if trace_keeptogether then
report_keeptogether("%s, index %s, total %p, threshold %p","glue",a,total,threshold)
end
if total <= threshold then
- if previous.id == penalty_code then
- previous.penalty = 10000
+ if getid(previous) == penalty_code then
+ setfield(previous,"penalty",10000)
else
insert_node_after(head,previous,new_penalty(10000))
end
@@ -128,13 +147,13 @@ local function keeptogether(start,a)
break
end
elseif id == kern_code then
- total = total + current.kern
+ total = total + getfield(current,"kern")
if trace_keeptogether then
report_keeptogether("%s, index %s, total %s, threshold %s","kern",a,total,threshold)
end
if total <= threshold then
- if previous.id == penalty_code then
- previous.penalty = 10000
+ if getid(previous) == penalty_code then
+ setfield(previous,"penalty",10000)
else
insert_node_after(head,previous,new_penalty(10000))
end
@@ -143,16 +162,16 @@ local function keeptogether(start,a)
end
elseif id == penalty_code then
if total <= threshold then
- if previous.id == penalty_code then
- previous.penalty = 10000
+ if getid(previous) == penalty_code then
+ setfield(previous,"penalty",10000)
end
- current.penalty = 10000
+ setfield(current,"penalty",10000)
else
break
end
end
previous = current
- current = current.next
+ current = getnext(current)
end
end
end
@@ -160,20 +179,20 @@ end
-- also look at first non glue/kern node e.g for a dropped caps
-function builders.paragraphs.keeptogether(head)
+function parbuilders.keeptogether(head)
local done = false
- local current = head
+ local current = tonut(head)
while current do
- if current.id == hlist_code then
- local a = current[a_keeptogether]
+ if getid(current) == hlist_code then
+ local a = getattr(current,a_keeptogether)
if a and a > 0 then
keeptogether(current,a)
- current[a_keeptogether] = unsetvalue
+ setattr(current,a_keeptogether,unsetvalue)
cache[a] = nil
done = true
end
end
- current = current.next
+ current = getnext(current)
end
return head, done
end
diff --git a/tex/context/base/typo-par.mkiv b/tex/context/base/typo-par.mkiv
new file mode 100644
index 000000000..8572f31b8
--- /dev/null
+++ b/tex/context/base/typo-par.mkiv
@@ -0,0 +1,29 @@
+%D \module
+%D [ file=typo-par,
+%D version=2008.09.30,
+%D title=\CONTEXT\ Typesetting Macros,
+%D subtitle=Paragraph Building,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Node Macros / Paragraph Building}
+
+%D This is very experimental, undocumented, subjected to changes, etc. just as
+%D the underlying interfaces. But at least it's cleaned as part of the status-mkiv
+%D cleanup.
+
+% \startparbuilder[basic]
+% \input tufte \par
+% \stopparbuilder
+
+\unprotect
+
+\registerctxluafile{node-ltp}{1.001}
+\registerctxluafile{trac-par}{1.001}
+
+\protect \endinput
diff --git a/tex/context/base/typo-prc.lua b/tex/context/base/typo-prc.lua
index a6c27ede6..cde66df00 100644
--- a/tex/context/base/typo-prc.lua
+++ b/tex/context/base/typo-prc.lua
@@ -6,21 +6,25 @@ if not modules then modules = { } end modules ['typo-prc'] = {
license = "see context related readme files"
}
--- moved from strc-ini.lua
-
-local context, commands = context, commands
-local formatters = string.formatters
local lpegmatch, patterns, P, C, Cs = lpeg.match, lpeg.patterns, lpeg.P, lpeg.C, lpeg.Cs
-- processors: syntax: processor->data ... not ok yet
-typesetters.processors = typesetters.processors or { }
-local processors = typesetters.processors
+local context = context
+local implement = interfaces.implement
+
+local formatters = string.formatters
+
+typesetters.processors = typesetters.processors or { }
+local processors = typesetters.processors
local trace_processors = false
local report_processors = logs.reporter("processors")
local registered = { }
+local ctx_applyprocessor = context.applyprocessor
+local ctx_firstofoneargument = context.firstofoneargument
+
trackers.register("typesetters.processors", function(v) trace_processors = v end)
function processors.register(p)
@@ -55,7 +59,7 @@ function processors.apply(p,s)
if trace_processors then
report_processors("applying %s processor %a, argument: %s","known",p,s)
end
- context.applyprocessor(p,s)
+ ctx_applyprocessor(p,s)
elseif s then
if trace_processors then
report_processors("applying %s processor %a, argument: %s","unknown",p,s)
@@ -78,21 +82,21 @@ function processors.startapply(p,s)
if trace_processors then
report_processors("start applying %s processor %a","known",p)
end
- context.applyprocessor(p)
+ ctx_applyprocessor(p)
context("{")
return s
elseif p then
if trace_processors then
report_processors("start applying %s processor %a","unknown",p)
end
- context.firstofoneargument()
+ ctx_firstofoneargument()
context("{")
return s
else
if trace_processors then
report_processors("start applying %s processor","ignored")
end
- context.firstofoneargument()
+ ctx_firstofoneargument()
context("{")
return str
end
@@ -121,5 +125,5 @@ end
-- interface
-commands.registerstructureprocessor = processors.register
-commands.resetstructureprocessor = processors.reset
+implement { name = "registerstructureprocessor", actions = processors.register, arguments = "string" }
+implement { name = "resetstructureprocessor", actions = processors.reset, arguments = "string" }
diff --git a/tex/context/base/typo-prc.mkvi b/tex/context/base/typo-prc.mkvi
index de221f241..49a165696 100644
--- a/tex/context/base/typo-prc.mkvi
+++ b/tex/context/base/typo-prc.mkvi
@@ -13,7 +13,11 @@
\writestatus{loading}{ConTeXt Typesetting Macros / Processors}
+%D For the moment manipulators are loaded here too, as they're in the same
+%D category as processors. This might change. (They are used in publications.)
+
\registerctxluafile{typo-prc}{1.001}
+\registerctxluafile{typo-man}{1.001}
\unprotect
@@ -51,7 +55,7 @@
\appendtoks
\letvalue{\??processorcheck\currentprocessor}\relax
- \ctxcommand{registerstructureprocessor("\currentprocessor")}% global, but it permits using processor that are yet undefined
+ \clf_registerstructureprocessor{\currentprocessor}% global, but it permits using processor that are yet undefined
\to \everydefineprocessor
%D The following command can be used by users but normally it will be
@@ -68,9 +72,12 @@
\fi}
\def\typo_processor_apply
- {\doifelse{\processorparameter\c!state}\v!stop
- \firstofoneargument
- \typo_processor_apply_indeed}
+ {\edef\p_state{\processorparameter\c!state}%
+ \ifx\p_state\v!stop
+ \expandafter\firstofoneargument
+ \else
+ \expandafter\typo_processor_apply_indeed
+ \fi}
\def\typo_processor_apply_indeed#content%
{\begingroup
diff --git a/tex/context/base/typo-rep.lua b/tex/context/base/typo-rep.lua
index 01868f490..d95eff68e 100644
--- a/tex/context/base/typo-rep.lua
+++ b/tex/context/base/typo-rep.lua
@@ -10,31 +10,43 @@ if not modules then modules = { } end modules ['typo-rep'] = {
-- endure it by listening to a couple cd's by The Scene and The Lau
-- on the squeezebox on my desk.
+local next, type, tonumber = next, type, tonumber
+
local trace_stripping = false trackers.register("nodes.stripping", function(v) trace_stripping = v end)
trackers.register("fonts.stripping", function(v) trace_stripping = v end)
local report_stripping = logs.reporter("fonts","stripping")
-local nodes, node = nodes, node
+local nodes = nodes
+local tasks = nodes.tasks
+
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getnext = nuts.getnext
+local getchar = nuts.getchar
+local getid = nuts.getid
-local delete_node = nodes.delete
-local replace_node = nodes.replace
-local copy_node = node.copy
+local getattr = nuts.getattr
+
+local delete_node = nuts.delete
+local replace_node = nuts.replace
+local copy_node = nuts.copy
+
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
local chardata = characters.data
local collected = false
-local a_stripping = attributes.private("stripping")
local fontdata = fonts.hashes.identifiers
-local tasks = nodes.tasks
+local a_stripping = attributes.private("stripping")
local texsetattribute = tex.setattribute
local unsetvalue = attributes.unsetvalue
local v_reset = interfaces.variables.reset
-local nodecodes = nodes.nodecodes
-local glyph_code = nodecodes.glyph
-
-- todo: other namespace -> typesetters
nodes.stripping = nodes.stripping or { } local stripping = nodes.stripping
@@ -59,13 +71,13 @@ local function process(what,head,current,char)
head, current = delete_node(head,current)
elseif type(what) == "function" then
head, current = what(head,current)
- current = current.next
+ current = getnext(current)
if trace_stripping then
report_stripping("processing %C in text",char)
end
elseif what then -- assume node
head, current = replace_node(head,current,copy_node(what))
- current = current.next
+ current = getnext(current)
if trace_stripping then
report_stripping("replacing %C in text",char)
end
@@ -74,28 +86,29 @@ local function process(what,head,current,char)
end
function nodes.handlers.stripping(head)
+ head = tonut(head)
local current, done = head, false
while current do
- if current.id == glyph_code then
+ if getid(current) == glyph_code then
-- it's more efficient to keep track of what needs to be kept
- local todo = current[a_stripping]
+ local todo = getattr(current,a_stripping)
if todo == 1 then
- local char = current.char
+ local char = getchar(current)
local what = glyphs[char]
if what then
head, current = process(what,head,current,char)
done = true
else -- handling of spacing etc has to be done elsewhere
- current = current.next
+ current = getnext(current)
end
else
- current = current.next
+ current = getnext(current)
end
else
- current = current.next
+ current = getnext(current)
end
end
- return head, done
+ return tonode(head), done
end
local enabled = false
@@ -125,4 +138,8 @@ tasks.disableaction("processors","nodes.handlers.stripping")
-- interface
-commands.setcharacterstripping = stripping.set
+interfaces.implement {
+ name = "setcharacterstripping",
+ actions = stripping.set,
+ arguments = "string"
+}
diff --git a/tex/context/base/typo-rep.mkiv b/tex/context/base/typo-rep.mkiv
index 46b439491..c1146997e 100644
--- a/tex/context/base/typo-rep.mkiv
+++ b/tex/context/base/typo-rep.mkiv
@@ -38,7 +38,7 @@
\definesystemattribute[stripping][public]
\unexpanded\def\setcharacterstripping[#1]%
- {\ctxcommand{setcharacterstripping("#1")}}
+ {\clf_setcharacterstripping{#1}}
\unexpanded\def\resetcharacterstripping
{\attribute\strippingattribute\attributeunsetvalue}
diff --git a/tex/context/base/typo-spa.lua b/tex/context/base/typo-spa.lua
index c3f50fe98..519ba3f34 100644
--- a/tex/context/base/typo-spa.lua
+++ b/tex/context/base/typo-spa.lua
@@ -15,10 +15,7 @@ local report_spacing = logs.reporter("typesetting","spacing")
local nodes, fonts, node = nodes, fonts, node
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local remove_node = nodes.remove
-local end_of_math = node.end_of_math
+local tasks = nodes.tasks
local fonthashes = fonts.hashes
local fontdata = fonthashes.identifiers
@@ -29,6 +26,27 @@ local unsetvalue = attributes.unsetvalue
local v_reset = interfaces.variables.reset
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getchar = nuts.getchar
+local getid = nuts.getid
+local getfont = nuts.getfont
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local remove_node = nuts.remove
+local end_of_math = nuts.end_of_math
+
+local nodepool = nuts.pool
+local new_penalty = nodepool.penalty
+local new_glue = nodepool.glue
+
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local math_code = nodecodes.math
@@ -36,12 +54,6 @@ local math_code = nodecodes.math
local somespace = nodes.somespace
local somepenalty = nodes.somepenalty
-local nodepool = nodes.pool
-local tasks = nodes.tasks
-
-local new_penalty = nodepool.penalty
-local new_glue = nodepool.glue
-
typesetters = typesetters or { }
local typesetters = typesetters
@@ -52,7 +64,6 @@ spacings.mapping = spacings.mapping or { }
spacings.numbers = spacings.numbers or { }
local a_spacings = attributes.private("spacing")
-spacings.attribute = a_spacings
storage.register("typesetters/spacings/mapping", spacings.mapping, "typesetters.spacings.mapping")
@@ -67,29 +78,30 @@ end
-- todo cache lastattr
function spacings.handler(head)
+ head = tonut(head)
local done = false
local start = head
-- head is always begin of par (whatsit), so we have at least two prev nodes
-- penalty followed by glue
while start do
- local id = start.id
+ local id = getid(start)
if id == glyph_code then
- local attr = start[a_spacings]
+ local attr = getattr(start,a_spacings)
if attr and attr > 0 then
local data = mapping[attr]
if data then
- local char = start.char
+ local char = getchar(start)
local map = data.characters[char]
- start[a_spacings] = unsetvalue -- needed?
+ setattr(start,a_spacings,unsetvalue) -- needed?
if map then
local left = map.left
local right = map.right
local alternative = map.alternative
- local quad = quaddata[start.font]
- local prev = start.prev
+ local quad = quaddata[getfont(start)]
+ local prev = getprev(start)
if left and left ~= 0 and prev then
local ok = false
- local prevprev = prev.prev
+ local prevprev = getprev(prev)
if alternative == 1 then
local somespace = somespace(prev,true)
if somespace then
@@ -120,10 +132,10 @@ function spacings.handler(head)
done = true
end
end
- local next = start.next
+ local next = getnext(start)
if right and right ~= 0 and next then
local ok = false
- local nextnext = next.next
+ local nextnext = getnext(next)
if alternative == 1 then
local somepenalty = somepenalty(next,10000)
if somepenalty then
@@ -164,10 +176,10 @@ function spacings.handler(head)
start = end_of_math(start) -- weird, can return nil .. no math end?
end
if start then
- start = start.next
+ start = getnext(start)
end
end
- return head, done
+ return tonode(head), done
end
local enabled = false
@@ -218,6 +230,30 @@ end
-- interface
-commands.definecharacterspacing = spacings.define
-commands.setupcharacterspacing = spacings.setup
-commands.setcharacterspacing = spacings.set
+local implement = interfaces.implement
+
+implement {
+ name = "definecharacterspacing",
+ actions = spacings.define,
+ arguments = "string"
+}
+
+implement {
+ name = "setupcharacterspacing",
+ actions = spacings.setup,
+ arguments = {
+ "string",
+ "integer",
+ {
+ { "left", "number" },
+ { "right", "number" },
+ { "alternative", "integer" },
+ }
+ }
+}
+
+implement {
+ name = "setcharacterspacing",
+ actions = spacings.set,
+ arguments = "string"
+}
diff --git a/tex/context/base/typo-spa.mkiv b/tex/context/base/typo-spa.mkiv
index d783353d6..2e3e71bf3 100644
--- a/tex/context/base/typo-spa.mkiv
+++ b/tex/context/base/typo-spa.mkiv
@@ -35,7 +35,7 @@
\c!alternative=0]
\unexpanded\def\definecharacterspacing[#1]%
- {\ctxcommand{definecharacterspacing("#1")}}
+ {\clf_definecharacterspacing{#1}}
\unexpanded\def\setupcharacterspacing
{\dotripleargument\typo_characterspacing_setup}
@@ -43,15 +43,19 @@
\def\typo_characterspacing_setup[#1][#2][#3]% todo: #2 list
{\begingroup
\setupcurrent_p_characterspacing[#3]%
- \ctxcommand{setupcharacterspacing("#1",\number#2, { % todo: just pass #3 to the lua end
- left = \direct_p_characterspacingparameter\c!left,
- right = \direct_p_characterspacingparameter\c!right,
- alternative = \direct_p_characterspacingparameter\c!alternative
- })}%
+ \clf_setupcharacterspacing
+ {#1}%
+ \numexpr#2\relax
+ {%
+ left \direct_p_characterspacingparameter\c!left\space
+ right \direct_p_characterspacingparameter\c!right\space
+ alternative \direct_p_characterspacingparameter\c!alternative
+ }%
+ \relax
\endgroup}
\unexpanded\def\setcharacterspacing[#1]% we can store the attribute if we want speed
- {\ctxcommand{setcharacterspacing("#1")}}
+ {\clf_setcharacterspacing{#1}}
\unexpanded\def\resetcharacterspacing % fast one
{\attribute\spacingattribute\attributeunsetvalue}
diff --git a/tex/context/base/typo-sus.lua b/tex/context/base/typo-sus.lua
new file mode 100644
index 000000000..0fe8e143a
--- /dev/null
+++ b/tex/context/base/typo-sus.lua
@@ -0,0 +1,311 @@
+if not modules then modules = { } end modules ['typo-sus'] = {
+ version = 1.001,
+ comment = "companion to typo-sus.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local punctuation = {
+ po = true,
+}
+
+local openquote = {
+ ps = true,
+ pi = true,
+}
+
+local closequote = {
+ pe = true,
+ pf = true,
+}
+
+local weird = {
+ lm = true,
+ no = true,
+}
+
+local categories = characters.categories
+
+local nodecodes = nodes.nodecodes
+
+local glyph_code = nodecodes.glyph
+local kern_code = nodecodes.kern
+local penalty_code = nodecodes.penalty
+local glue_code = nodecodes.glue
+local math_code = nodecodes.math
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+
+local nuts = nodes.nuts
+local tonut = nodes.tonut
+local tonode = nodes.tonode
+
+local getid = nuts.getid
+local getchar = nuts.getchar
+local getprev = nuts.getprev
+local getnext = nuts.getnext
+local getfield = nuts.getfield
+local getattr = nuts.getattr
+local getfont = nuts.getfont
+local getlist = nuts.getlist
+
+local setfield = nuts.setfield
+local setattr = nuts.setattr
+
+local setcolor = nodes.tracers.colors.set
+local insert_before = nuts.insert_before
+local insert_after = nuts.insert_after
+local end_of_math = nuts.end_of_math
+
+local nodepool = nuts.pool
+
+local new_rule = nodepool.rule
+local new_kern = nodepool.kern
+local new_hlist = nodepool.hlist
+----- new_penalty = nodepool.penalty
+
+local a_characters = attributes.private("characters")
+local a_suspecting = attributes.private('suspecting')
+local a_suspect = attributes.private('suspect')
+local texsetattribute = tex.setattribute
+local enabled = false
+
+local threshold = 65536 / 4
+
+local function special(n)
+ if n then
+ local id = getid(n)
+ if id == kern_code then
+ local kern = getfield(n,"kern")
+ return kern < threshold
+ elseif id == penalty_code then
+ return true
+ elseif id == glue_code then
+ local width = getfield(getfield(n,"spec"),"width")
+ return width < threshold
+ elseif id == hlist_code then
+ local width = getfield(n,"width")
+ return width < threshold
+ end
+ else
+ return false
+ end
+end
+
+local function goback(current)
+ local prev = getprev(current)
+ while prev and special(prev) do
+ prev = getprev(prev)
+ end
+ if prev then
+ return prev, getid(prev)
+ end
+end
+
+local function goforward(current)
+ local next = getnext(current)
+ while next and special(next) do
+ next = getnext(next)
+ end
+ if next then
+ return next, getid(next)
+ end
+end
+
+local function mark(head,current,id,color)
+ if id == glue_code then
+ -- the glue can have stretch and/or shrink so the rule can overlap with the
+ -- following glyph .. no big deal as that one then sits on top of the rule
+ local width = getfield(getfield(current,"spec"),"width")
+ local rule = new_rule(width)
+ local kern = new_kern(-width)
+ head = insert_before(head,current,rule)
+ head = insert_before(head,current,kern)
+ setcolor(rule,color)
+ -- elseif id == kern_code then
+ -- local width = getfield(current,"kern")
+ -- local rule = new_rule(width)
+ -- local kern = new_kern(-width)
+ -- head = insert_before(head,current,rule)
+ -- head = insert_before(head,current,kern)
+ -- setcolor(rule,color)
+ else
+ local width = getfield(current,"width")
+ local extra = fonts.hashes.xheights[getfont(current)] / 2
+ local rule = new_rule(width,getfield(current,"height")+extra,getfield(current,"depth")+extra)
+ local hlist = new_hlist(rule)
+ head = insert_before(head,current,hlist)
+ setcolor(rule,color)
+ setcolor(current,"white")
+ end
+ return head, current
+end
+
+-- we can cache the font and skip ahead to next but it doesn't
+-- save enough time and it makes the code looks bad too ... after
+-- all, we seldom use this
+
+local colors = {
+ "darkred",
+ "darkgreen",
+ "darkblue",
+ "darkcyan",
+ "darkmagenta",
+ "darkyellow",
+ "darkgray",
+ "orange",
+}
+
+local found = 0
+
+function typesetters.marksuspects(head)
+ local head = tonut(head)
+ local current = head
+ local lastdone = nil
+ while current do
+ if getattr(current,a_suspecting) then
+ local id = getid(current)
+ if id == glyph_code then
+ local char = getchar(current)
+ local code = categories[char]
+ local done = false
+ if punctuation[code] then
+ local prev, pid = goback(current)
+ if prev and pid == glue_code then
+ done = 3 -- darkblue
+ elseif prev and pid == math_code then
+ done = 3 -- darkblue
+ else
+ local next, nid = goforward(current)
+ if next and nid ~= glue_code then
+ done = 3 -- darkblue
+ end
+ end
+ elseif openquote[code] then
+ local next, nid = goforward(current)
+ if next and nid == glue_code then
+ done = 1 -- darkred
+ end
+ elseif closequote[code] then
+ local prev, pid = goback(current)
+ if prev and pid == glue_code then
+ done = 1 -- darkred
+ end
+ elseif weird[code] then
+ done = 2 -- darkgreen
+ else
+ local prev, pid = goback(current)
+ if prev then
+ if pid == math_code then
+ done = 7-- darkgray
+ elseif pid == glyph_code and getfont(current) ~= getfont(prev) then
+ if lastdone ~= prev then
+ done = 2 -- darkgreen
+ end
+ end
+ end
+ if not done then
+ local next, nid = goforward(current)
+ if next then
+ if nid == math_code then
+ done = 7 -- darkgray
+ elseif nid == glyph_code and getfont(current) ~= getfont(next) then
+ if lastdone ~= prev then
+ done = 2 -- darkgreen
+ end
+ end
+ end
+ end
+ end
+ if done then
+ setattr(current,a_suspect,done)
+ lastdone = current
+ found = found + 1
+ end
+ current = getnext(current)
+ elseif id == math_code then
+ current = getnext(end_of_math(current))
+ elseif id == glue_code then
+ local a = getattr(current,a_characters)
+ if a then
+ local prev = getprev(current)
+ local prid = prev and getid(prev)
+ local done = false
+ if prid == penalty_code and getfield(prev,"penalty") == 10000 then
+ done = 8 -- orange
+ else
+ done = 5 -- darkmagenta
+ end
+ if done then
+ setattr(current,a_suspect,done)
+ -- lastdone = current
+ found = found + 1
+ end
+ end
+ current = getnext(current)
+ else
+ current = getnext(current)
+ end
+ else
+ current = getnext(current)
+ end
+ end
+ return tonode(head), found > 0
+end
+
+local function showsuspects(head)
+ local current = head
+ while current do
+ local id = getid(current)
+ if id == glyph_code then
+ local a = getattr(current,a_suspect)
+ if a then
+ head, current = mark(head,current,id,colors[a])
+ end
+ elseif id == glue_code then
+ local a = getattr(current,a_suspect)
+ if a then
+ head, current = mark(head,current,id,colors[a])
+ end
+ elseif id == math_code then
+ current = end_of_math(current)
+ elseif id == hlist_code or id == vlist_code then
+ local list = getlist(current)
+ if list then
+ local l = showsuspects(list)
+ if l ~= list then
+ setfield(current,"list",l)
+ end
+ end
+ end
+ current = getnext(current)
+ end
+ return head
+end
+
+function typesetters.showsuspects(head)
+ if found > 0 then
+ return tonode(showsuspects(tonut(head))), true
+ else
+ return head, false
+ end
+end
+
+nodes.tasks.appendaction ("processors","after", "typesetters.marksuspects")
+nodes.tasks.prependaction("shipouts", "normalizers","typesetters.showsuspects")
+
+nodes.tasks.disableaction("processors","typesetters.marksuspects")
+nodes.tasks.disableaction("shipouts", "typesetters.showsuspects")
+
+-- or maybe a directive
+
+trackers.register("typesetters.suspects",function(v)
+ texsetattribute(a_suspecting,v and 1 or unsetvalue)
+ if v and not enabled then
+ nodes.tasks.enableaction("processors","typesetters.marksuspects")
+ nodes.tasks.enableaction("shipouts", "typesetters.showsuspects")
+ enabled = true
+ end
+end)
+
diff --git a/tex/context/base/typo-sus.mkiv b/tex/context/base/typo-sus.mkiv
new file mode 100644
index 000000000..fe44e6327
--- /dev/null
+++ b/tex/context/base/typo-sus.mkiv
@@ -0,0 +1,51 @@
+%D \module
+%D [ file=typo-sus,
+%D version=2014.11.06,
+%D title=\CONTEXT\ Typesetting Macros,
+%D subtitle=Checking Suspects,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Typesetting Macros / Checking Suspects}
+
+%D This is a rather special module, mostly needed by ourselves for
+%D projects where copy||editing is not that precise.
+
+\registerctxluafile{typo-sus}{1.001}
+
+\unexpanded\def\showsuspects{\enabletrackers[typesetters.suspects]}
+
+%D The suspicious spacing will be colored in the text. There can be false
+%D positives but this features is mostly used when proofreading. So, we
+%D don't worry too much about interference (and efficiency).
+%D
+%D \unexpanded\def\showsample#1%
+%D {\NC \type{#1}%
+%D \NC \enabletrackers[typesetters.suspects]#1\disabletrackers[typesetters.spacing]%
+%D \NC \NR}
+%D
+%D \starttabulate[|||][before=,after=]
+%D \showsample{foo$x$}
+%D \showsample{$x$bar}
+%D \showsample{foo$x$bar}
+%D \showsample{$f+o+o$:}
+%D \showsample{;$f+o+o$}
+%D \showsample{; bar}
+%D \showsample{foo:bar}
+%D \showsample{\quote{ foo }}
+%D \showsample{\quote{bar }}
+%D \showsample{\quote{ bar}}
+%D \showsample{(foo )}
+%D \showsample{\{foo \}}
+%D \showsample{foo{\bf gnu}bar}
+%D \showsample{foo$x^2$bar}
+%D \showsample{foo\nobreakspace bar}
+%D \stoptabulate
+
+\endinput
+
diff --git a/tex/context/base/typo-tal.lua b/tex/context/base/typo-tal.lua
index 63a66d037..5663c3bd9 100644
--- a/tex/context/base/typo-tal.lua
+++ b/tex/context/base/typo-tal.lua
@@ -6,33 +6,60 @@ if not modules then modules = { } end modules ['typo-tal'] = {
license = "see context related readme files"
}
--- I'll make it a bit more efficient and provide named instances too.
+-- I'll make it a bit more efficient and provide named instances too which is needed for
+-- nested tables.
+--
+-- Currently we have two methods: text and number with some downward compatible
+-- defaulting.
+
+-- We can speed up by saving the current fontcharacters[font] + lastfont.
local next, type = next, type
local div = math.div
local utfbyte = utf.byte
+local splitmethod = utilities.parsers.splitmethod
+
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local glue_code = nodecodes.glue
local fontcharacters = fonts.hashes.characters
-local unicodes = fonts.hashes.unicodes
+----- unicodes = fonts.hashes.unicodes
local categories = characters.categories -- nd
-local insert_node_before = nodes.insert_before
-local insert_node_after = nodes.insert_after
-local traverse_list_by_id = nodes.traverse_id
-local dimensions_of_list = nodes.dimensions
-local first_glyph = nodes.first_glyph
+local variables = interfaces.variables
+local v_text = variables.text
+local v_number = variables.number
+
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+local getfield = nuts.getfield
+local setfield = nuts.setfield
-local nodepool = nodes.pool
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local traverse_list_by_id = nuts.traverse_id
+local dimensions_of_list = nuts.dimensions
+local first_glyph = nuts.first_glyph
+
+local nodepool = nuts.pool
local new_kern = nodepool.kern
local new_gluespec = nodepool.gluespec
local tracers = nodes.tracers
local setcolor = tracers.colors.set
-local tracedrule = tracers.pool.nodes.rule
+local tracedrule = tracers.pool.nuts.rule
local characteralign = { }
typesetters.characteralign = characteralign
@@ -47,6 +74,8 @@ local enabled = false
local datasets = false
+local implement = interfaces.implement
+
local comma = 0x002C
local period = 0x002E
local punctuationspace = 0x2008
@@ -65,14 +94,75 @@ local validsigns = {
[0x2213] = 0x2213, -- minusplus
}
+-- If needed we can have more modes which then also means a faster simple handler
+-- for non numbers.
+
+local function setcharacteralign(column,separator)
+ if not enabled then
+ nodes.tasks.enableaction("processors","typesetters.characteralign.handler")
+ enabled = true
+ end
+ if not datasets then
+ datasets = { }
+ end
+ local dataset = datasets[column] -- we can use a metatable
+ if not dataset then
+ local method, token
+ if separator then
+ method, token = splitmethod(separator)
+ if method and token then
+ separator = utfbyte(token) or comma
+ else
+ separator = utfbyte(separator) or comma
+ method = validseparators[separator] and v_number or v_text
+ end
+ else
+ separator = comma
+ method = v_number
+ end
+ dataset = {
+ separator = separator,
+ list = { },
+ maxafter = 0,
+ maxbefore = 0,
+ collected = false,
+ method = method,
+ separators = validseparators,
+ signs = validsigns,
+ }
+ datasets[column] = dataset
+ used = true
+ end
+ return dataset
+end
+
+local function resetcharacteralign()
+ datasets = false
+end
+
+characteralign.setcharacteralign = setcharacteralign
+characteralign.resetcharacteralign = resetcharacteralign
+
+implement {
+ name = "setcharacteralign",
+ actions = setcharacteralign,
+ arguments = { "integer", "string" }
+}
+
+implement {
+ name = "resetcharacteralign",
+ actions = resetcharacteralign
+}
+
local function traced_kern(w)
return tracedrule(w,nil,nil,"darkgray")
end
-function characteralign.handler(head,where)
+function characteralign.handler(originalhead,where)
if not datasets then
- return head, false
+ return originalhead, false
end
+ local head = tonut(originalhead)
-- local first = first_glyph(head) -- we could do that once
local first
for n in traverse_list_by_id(glyph_code,head) do
@@ -80,14 +170,14 @@ function characteralign.handler(head,where)
break
end
if not first then
- return head, false
+ return originalhead, false
end
- local a = first[a_characteralign]
+ local a = getattr(first,a_characteralign)
if not a or a == 0 then
- return head, false
+ return originalhead, false
end
- local column = div(a,100)
- local row = a % 100
+ local column = div(a,0xFFFF)
+ local row = a % 0xFFFF
local dataset = datasets and datasets[column] or setcharacteralign(column)
local separator = dataset.separator
local list = dataset.list
@@ -98,82 +188,128 @@ function characteralign.handler(head,where)
local c = nil
local current = first
local sign = nil
+ --
+ local validseparators = dataset.separators
+ local validsigns = dataset.signs
+ local method = dataset.method
-- we can think of constraints
- while current do
- local id = current.id
- if id == glyph_code then
- local char = current.char
- local font = current.font
- local unicode = unicodes[font][char]
- if not unicode then
- -- no unicode so forget about it
- elseif unicode == separator then
- c = current
- if trace_split then
- setcolor(current,"darkred")
- end
- dataset.hasseparator = true
- elseif categories[unicode] == "nd" or validseparators[unicode] then
- if c then
- if not a_start then
- a_start = current
- end
- a_stop = current
+ if method == v_number then
+ while current do
+ local id = getid(current)
+ if id == glyph_code then
+ local char = getchar(current)
+ local font = getfont(current)
+ -- local unicode = unicodes[font][char]
+ local unicode = fontcharacters[font][char].unicode or char -- ignore tables
+ if not unicode then
+ -- no unicode so forget about it
+ elseif unicode == separator then
+ c = current
if trace_split then
- setcolor(current,validseparators[unicode] and "darkcyan" or "darkblue")
+ setcolor(current,"darkred")
end
- else
- if not b_start then
- if sign then
- b_start = sign
- local new = validsigns[sign.char]
- if char == new or not fontcharacters[sign.font][new] then
- if trace_split then
- setcolor(sign,"darkyellow")
+ dataset.hasseparator = true
+ elseif categories[unicode] == "nd" or validseparators[unicode] then
+ if c then
+ if not a_start then
+ a_start = current
+ end
+ a_stop = current
+ if trace_split then
+ setcolor(current,validseparators[unicode] and "darkcyan" or "darkblue")
+ end
+ else
+ if not b_start then
+ if sign then
+ b_start = sign
+ local new = validsigns[getchar(sign)]
+ if char == new or not fontcharacters[getfont(sign)][new] then
+ if trace_split then
+ setcolor(sign,"darkyellow")
+ end
+ else
+ setfield(sign,"char",new)
+ if trace_split then
+ setcolor(sign,"darkmagenta")
+ end
end
+ sign = nil
+ b_stop = current
else
- sign.char = new
- if trace_split then
- setcolor(sign,"darkmagenta")
- end
+ b_start = current
+ b_stop = current
end
- sign = nil
- b_stop = current
else
- b_start = current
b_stop = current
- if trace_split then
- setcolor(current,validseparators[unicode] and "darkcyan" or "darkblue")
- end
end
- else
- b_stop = current
- if trace_split then
+ if trace_split and current ~= sign then
setcolor(current,validseparators[unicode] and "darkcyan" or "darkblue")
end
end
+ elseif not b_start then
+ sign = validsigns[unicode] and current
+ -- if trace_split then
+ -- setcolor(current,"darkgreen")
+ -- end
+ end
+ elseif (b_start or a_start) and id == glue_code then
+ -- maybe only in number mode
+ -- somewhat inefficient
+ local next = getnext(current)
+ local prev = getprev(current)
+ if next and prev and getid(next) == glyph_code and getid(prev) == glyph_code then -- too much checking
+ local width = fontcharacters[getfont(b_start)][separator or period].width
+ -- local spec = getfield(current,"spec")
+ -- free_spec(spec)
+ setfield(current,"spec",new_gluespec(width))
+ setattr(current,a_character,punctuationspace)
+ if a_start then
+ a_stop = current
+ elseif b_start then
+ b_stop = current
+ end
end
- elseif not b_start then
- sign = validsigns[unicode] and current
end
- elseif (b_start or a_start) and id == glue_code then
- -- somewhat inefficient
- local next = current.next
- local prev = current.prev
- if next and prev and next.id == glyph_code and prev.id == glyph_code then -- too much checking
- local width = fontcharacters[b_start.font][separator or period].width
- -- local spec = current.spec
- -- nodes.free(spec) -- hm, we leak but not that many specs
- current.spec = new_gluespec(width)
- current[a_character] = punctuationspace
- if a_start then
- a_stop = current
- elseif b_start then
- b_stop = current
+ current = getnext(current)
+ end
+ else
+ while current do
+ local id = getid(current)
+ if id == glyph_code then
+ local char = getchar(current)
+ local font = getfont(current)
+ -- local unicode = unicodes[font][char]
+ local unicode = fontcharacters[font][char].unicode or char -- ignore tables
+ if not unicode then
+ -- no unicode so forget about it
+ elseif unicode == separator then
+ c = current
+ if trace_split then
+ setcolor(current,"darkred")
+ end
+ dataset.hasseparator = true
+ else
+ if c then
+ if not a_start then
+ a_start = current
+ end
+ a_stop = current
+ if trace_split then
+ setcolor(current,"darkgreen")
+ end
+ else
+ if not b_start then
+ b_start = current
+ end
+ b_stop = current
+ if trace_split then
+ setcolor(current,"darkblue")
+ end
+ end
end
end
+ current = getnext(current)
end
- current = current.next
end
local entry = list[row]
if entry then
@@ -199,7 +335,7 @@ function characteralign.handler(head,where)
local maxafter = dataset.maxafter
local before = entry.before or 0
local after = entry.after or 0
- local new_kern = trace_split and traced_kern or new_kern
+ local new_kern = trace_split and traced_kern or new_kern
if b_start then
if before < maxbefore then
head = insert_node_before(head,b_start,new_kern(maxbefore-before))
@@ -207,7 +343,7 @@ function characteralign.handler(head,where)
if not c then
-- print("[before]")
if dataset.hasseparator then
- local width = fontcharacters[b_stop.font][separator].width
+ local width = fontcharacters[getfont(b_stop)][separator].width
insert_node_after(head,b_stop,new_kern(maxafter+width))
end
elseif a_start then
@@ -229,7 +365,7 @@ function characteralign.handler(head,where)
end
else
-- print("[after]")
- local width = fontcharacters[b_stop.font][separator].width
+ local width = fontcharacters[getfont(b_stop)][separator].width
head = insert_node_before(head,a_start,new_kern(maxbefore+width))
end
if after < maxafter then
@@ -246,44 +382,10 @@ function characteralign.handler(head,where)
end
else
entry = {
- before = b_start and dimensions_of_list(b_start,b_stop.next) or 0,
- after = a_start and dimensions_of_list(a_start,a_stop.next) or 0,
+ before = b_start and dimensions_of_list(b_start,getnext(b_stop)) or 0,
+ after = a_start and dimensions_of_list(a_start,getnext(a_stop)) or 0,
}
list[row] = entry
end
- return head, true
+ return tonode(head), true
end
-
-function setcharacteralign(column,separator)
- if not enabled then
- nodes.tasks.enableaction("processors","typesetters.characteralign.handler")
- enabled = true
- end
- if not datasets then
- datasets = { }
- end
- local dataset = datasets[column] -- we can use a metatable
- if not dataset then
- dataset = {
- separator = separator and utfbyte(separator) or comma,
- list = { },
- maxafter = 0,
- maxbefore = 0,
- collected = false,
- }
- datasets[column] = dataset
- used = true
- end
- return dataset
-end
-
-local function resetcharacteralign()
- datasets = false
-end
-
-characteralign.setcharacteralign = setcharacteralign
-characteralign.resetcharacteralign = resetcharacteralign
-
-commands.setcharacteralign = setcharacteralign
-commands.resetcharacteralign = resetcharacteralign
-
diff --git a/tex/context/base/typo-tal.mkiv b/tex/context/base/typo-tal.mkiv
index 7de10a6ec..570f1a1f5 100644
--- a/tex/context/base/typo-tal.mkiv
+++ b/tex/context/base/typo-tal.mkiv
@@ -40,25 +40,26 @@
%D
%D \typebuffer \blank \getbuffer \blank
-%D \startbuffer
-%D \bTABLE
-%D \bTR \bTD[aligncharacter=yes] € 1,1 \eTD \eTR
-%D \bTR \bTD[aligncharacter=yes] € 11,11 \eTD \eTR
-%D \bTR \bTD[aligncharacter=yes] € 12\punctuationspace111,11 \eTD \eTR
-%D \bTR \bTD[aligncharacter=yes] € 12 111,11 \eTD \eTR
-%D \bTR \bTD[aligncharacter=yes] € 1.234.451,22222 \eTD \eTR
-%D \bTR \bTD[aligncharacter=yes] € 234.451,2 \eTD \eTR
-%D \bTR \bTD[aligncharacter=yes] € 234.451 \eTD \eTR
-%D \bTR \bTD[aligncharacter=yes] € 451 \eTD \eTR
-%D \bTR \bTD \bf some text \eTD \eTR
-%D \eTABLE
-%D \stopbuffer
-%D
-%D \typebuffer \blank \getbuffer \blank
-
-\unexpanded\def\signalcharacteralign#1#2{\attribute\characteralignattribute=\numexpr#1*\plushundred+#2\relax}
-\unexpanded\def\setcharacteralign #1#2{\ctxcommand{setcharacteralign(\number#1,"#2")}}
-\unexpanded\def\resetcharacteralign {\ctxcommand{resetcharacteralign()}}
+% D \startbuffer
+% D \bTABLE
+% D \bTR \bTD[aligncharacter=yes] € 1,1 \eTD \eTR
+% D \bTR \bTD[aligncharacter=yes] € 11,11 \eTD \eTR
+% D \bTR \bTD[aligncharacter=yes] € 12\punctuationspace111,11 \eTD \eTR
+% D \bTR \bTD[aligncharacter=yes] € 12 111,11 \eTD \eTR
+% D \bTR \bTD[aligncharacter=yes] € 1.234.451,22222 \eTD \eTR
+% D \bTR \bTD[aligncharacter=yes] € 234.451,2 \eTD \eTR
+% D \bTR \bTD[aligncharacter=yes] € 234.451 \eTD \eTR
+% D \bTR \bTD[aligncharacter=yes] € 451 \eTD \eTR
+% D \bTR \bTD \bf some text \eTD \eTR
+% D \eTABLE
+% D \stopbuffer
+% D
+% D \typebuffer \blank \getbuffer \blank
+
+\unexpanded\def\signalcharacteralign#1#2{\attribute\characteralignattribute\numexpr#1*\maxcardminusone+#2\relax} % 0xFFFF
+\unexpanded\def\setcharacteralign #1#2{\clf_setcharacteralign#1{#2}}
+\unexpanded\def\resetcharacteralign {\clf_resetcharacteralign}
+\unexpanded\def\nocharacteralign {\attribute\characteralignattribute\attributeunsetvalue}
%D Mostly downward compatible:
%D
@@ -73,6 +74,15 @@
%D
%D \typebuffer \blank \getbuffer \blank
+%D We have (currently) two modes: \type {text} and \type {number}. The handler tries
+%D to determine the mode automatically. When using periods and commas as separators
+%D the \type {number} mode is chosen. If you use for instance a \type {-} as
+%D separator, \type {text} is chosen, but you can enforce \type {number} with \type
+%D {number->-} (as with other mechanisms, the arrow indicates a methot to apply).
+%D
+%D One can use \type {\nocharacteralign} to disable this mechanism, for instance in
+%D a table cell.
+
\def\alignmentcharacter{,}
\unexpanded\def\typo_charalign_pass_one
@@ -86,19 +96,26 @@
\def\typo_charalign_pass
{\hbox\bgroup\signalcharacteralign\plusone\scratchcounter\let\next}
-\unexpanded\def\startcharacteralign#1\stopcharacteralign
+\unexpanded\def\startcharacteralign
+ {\dosingleempty\typo_charalign_start}
+
+\def\typo_charalign_start[#1]#2\stopcharacteralign
{\bgroup
+ \edef\m_temp{#1}%
+ \ifx\m_temp\empty \else
+ \let\alignmentcharacter\m_temp
+ \fi
\setcharacteralign\plusone\alignmentcharacter
\begingroup
\scratchcounter\zerocount
\let\checkcharacteralign\typo_charalign_pass_one
\settrialtypesetting
- #1\relax
+ #2\relax
\endgroup
\begingroup
\scratchcounter\zerocount
\let\checkcharacteralign\typo_charalign_pass_two
- #1\relax
+ #2\relax
\endgroup
\resetcharacteralign
\egroup}
diff --git a/tex/context/base/typo-txt.mkvi b/tex/context/base/typo-txt.mkvi
index 57f4e5f42..7562fe70c 100644
--- a/tex/context/base/typo-txt.mkvi
+++ b/tex/context/base/typo-txt.mkvi
@@ -17,7 +17,7 @@
\unprotect
-\registerctxluafile{typo-txt}{1.001}
+% registerctxluafile{typo-txt}{1.001}
%D \macros
%D {normalizefontheight,normalizefontwidth,normalizedfontsize}
@@ -194,4 +194,51 @@
%D \HL
%D \stoptabulate
+%D This is used in the beginners manual. One needs to set the font size to an
+%D acceptable value for this to work.
+
+\unexpanded\def\startnicelyfilledbox
+ {\vbox\bgroup
+ \forgetall
+ \dosingleempty\dostartnicelyfilledbox}
+
+\def\dostartnicelyfilledbox[#1]%
+ {\letdummyparameter\c!width \hsize
+ \letdummyparameter\c!height\vsize
+ \letdummyparameter\c!offset\exheight % we obey to the outer exheight
+ \letdummyparameter\c!strut \v!yes % we obey to the inner strut !
+ \getdummyparameters[#1]%
+ \scratchoffset\dummyparameter\c!offset\relax
+ \setbox\scratchbox\vbox to \dummyparameter\c!height \bgroup
+ \hsize\dummyparameter\c!width\relax
+ \emergencystretch10\scratchoffset
+ \parfillskip\zeropoint
+ \baselineskip\zeropoint plus \onepoint minus \onepoint
+ \beginofshapebox
+ \leftskip \scratchoffset
+ \rightskip\scratchoffset}
+
+\unexpanded\def\stopnicelyfilledbox
+ {\doifelse{\dummyparameter\c!strut}\v!yes
+ {\xdef\doflushnicelyfilledbox
+ {\ht\shapebox\the\strutht
+ \dp\shapebox\the\strutdp
+ \box\shapebox}}%
+ {\gdef\doflushnicelyfilledbox
+ {\box\shapebox}}%
+ \endofshapebox
+ \doreshapebox
+ {\doflushnicelyfilledbox}
+ {\penalty\shapepenalty}
+ {\kern\shapekern}
+ {\vfil}%
+ \kern\scratchoffset
+ \vfilneg
+ \flushshapebox
+ \vfilneg
+ \kern\scratchoffset
+ \egroup
+ \box\scratchbox
+ \egroup}
+
\protect \endinput
diff --git a/tex/context/base/typo-wrp.lua b/tex/context/base/typo-wrp.lua
new file mode 100644
index 000000000..07639392f
--- /dev/null
+++ b/tex/context/base/typo-wrp.lua
@@ -0,0 +1,76 @@
+if not modules then modules = { } end modules ['typo-wrp'] = {
+ version = 1.001,
+ comment = "companion to typo-wrp.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- begin/end par wrapping stuff ... more to come
+
+local nodecodes = nodes.nodecodes
+
+local glue_code = nodecodes.glue
+local penalty_code = nodecodes.penalty
+local parfill_skip_code = nodes.gluecodes.parfillskip
+local user_penalty_code = nodes.penaltycodes.userpenalty
+
+local nuts = nodes.nuts
+local tonut = nodes.tonut
+local tonode = nodes.tonode
+
+local findtail = nuts.tail
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getsubtype = nuts.getsubtype
+local getfield = nuts.getfield
+local remove = nuts.remove
+
+local wrappers = { }
+typesetters.wrappers = wrappers
+
+local trace_wrappers = trackers.register("typesetters.wrappers",function(v) trace_wrappers = v end)
+
+local report = logs.reporter("paragraphs","wrappers")
+
+-- we really need to pass tail too ... but then we need to check all the plugins
+-- bah ... slowdown
+
+local function remove_dangling_crlf(head,tail)
+ if tail and getid(tail) == glue_code and getsubtype(tail) == parfill_skip_code then
+ tail = getprev(tail)
+ if tail and getid(tail) == penalty_code and getsubtype(tail) == user_penalty_code and getfield(tail,"penalty") == 10000 then
+ tail = getprev(tail)
+ if tail and getid(tail) == penalty_code and getsubtype(tail) == user_penalty_code and getfield(tail,"penalty") == -10000 then
+ if tail == head then
+ -- can't happen
+ else
+ if trace_wrappers then
+ report("removing a probably unwanted end-of-par break in line %s (guess)",tex.inputlineno)
+ end
+ remove(head,tail,true)
+ return head, tail, true
+ end
+ end
+ end
+ end
+ return head, tail, false
+end
+
+function wrappers.handler(head)
+ local head = tonut(head)
+ if head then
+ local tail = findtail(head)
+ local done = false
+ head, tail, done = remove_dangling_crlf(head,tail) -- will be action chain
+ end
+ return head, true
+end
+
+interfaces.implement {
+ name = "enablecrlf",
+ onlyonce = true,
+ actions = function()
+ nodes.tasks.enableaction("processors","typesetters.wrappers.handler")
+ end
+}
diff --git a/tex/context/base/typo-wrp.mkiv b/tex/context/base/typo-wrp.mkiv
new file mode 100644
index 000000000..0538a9662
--- /dev/null
+++ b/tex/context/base/typo-wrp.mkiv
@@ -0,0 +1,65 @@
+%D \module
+%D [ file=typo-wrp,
+%D version=2014.11.09,
+%D title=\CONTEXT\ Typesetting Macros,
+%D subtitle=Wrappers,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Typesetting Macros / Wrapping}
+
+\unprotect
+
+\registerctxluafile{typo-wrp}{1.001}
+
+%D This definition has moved from page-lin.tex to spac-hor.tex (due to
+%D visualization added in august 2003) and now to here (november 2014)
+%D due to cacthing border cases in dirty and messy \XML\ sources).
+
+% \unexpanded\def\crlf
+% {\ifhmode
+% \unskip
+% \prewordbreak\crlfplaceholder
+% \ifcase\raggedstatus\hfil\or\or\or\hfil\fi
+% \break
+% \else
+% \crlfplaceholder
+% \endgraf
+% \fi}
+
+\unexpanded\def\crlf
+ {\ifhmode
+ \expandafter\spac_crlf
+ \fi}
+
+\unexpanded\def\spac_crlf
+ {\clf_enablecrlf % once
+ \unskip
+ \prewordbreak % here or in \spac_crlf_placeholder
+ \spac_crlf_placeholder
+ \ifcase\raggedstatus\hfil\or\or\or\hfil\fi
+ \break
+ \ignorespaces}
+
+\unexpanded\def\spac_crlf_placeholder
+ {\strut}
+
+\unexpanded\def\spac_crlf_placeholder_show
+ {\hbox to \zeropoint{\strut{\infofont\kern.25\emwidth}\lohi{\infofont CR}{\infofont LF}\hss}}
+
+\unexpanded\def\settestcrlf
+ {\let\spac_crlf_placeholder\spac_crlf_placeholder_show}
+
+\unexpanded\def\crlfplaceholder % for old times sake
+ {\spac_crlf_placeholder}
+
+\appendtoks
+ \let\spac_crlf_placeholder\empty
+\to \everysetnostrut
+
+\protect \endinput
diff --git a/tex/context/base/unic-ini.lua b/tex/context/base/unic-ini.lua
index cca1f0617..132c92efa 100644
--- a/tex/context/base/unic-ini.lua
+++ b/tex/context/base/unic-ini.lua
@@ -11,9 +11,13 @@ local utfchar = utf.char
-- Beware, initializing unicodechar happens at first usage and takes
-- 0.05 -- 0.1 second (lots of function calls).
-function commands.unicodechar(asked)
- local n = characters.unicodechar(asked)
- if n then
- context(utfchar(n))
+interfaces.implement {
+ name = "unicodechar",
+ arguments = "string",
+ actions = function(asked)
+ local n = characters.unicodechar(asked)
+ if n then
+ context(utfchar(n))
+ end
end
-end
+}
diff --git a/tex/context/base/unic-ini.mkiv b/tex/context/base/unic-ini.mkiv
index ece0da283..13ad4bdb9 100644
--- a/tex/context/base/unic-ini.mkiv
+++ b/tex/context/base/unic-ini.mkiv
@@ -26,8 +26,7 @@
%D
%D \typebuffer \getbuffer
-%def\unicodechar#1{\char\numexpr#1\relax} % no lookahead
-\def\unicodechar#1{\ctxcommand{unicodechar("#1")}}
+\def\unicodechar#1{\clf_unicodechar{#1}}
\unexpanded\def\unknownchar
{\dontleavehmode\hbox{\vrule\s!width.5\emwidth\s!height\exheight\s!depth\zeropoint}}
diff --git a/tex/context/base/util-deb.lua b/tex/context/base/util-deb.lua
index 785373f86..ee732b3b5 100644
--- a/tex/context/base/util-deb.lua
+++ b/tex/context/base/util-deb.lua
@@ -92,37 +92,41 @@ end
function debugger.disable()
debug.sethook()
---~ counters[debug.getinfo(2,"f").func] = nil
+ -- counters[debug.getinfo(2,"f").func] = nil
end
---~ debugger.enable()
-
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
-
---~ debugger.disable()
-
---~ print("")
---~ debugger.showstats()
---~ print("")
---~ debugger.showstats(print,3)
-
+-- debugger.enable()
+--
+-- print(math.sin(1*.5))
+-- print(math.sin(1*.5))
+-- print(math.sin(1*.5))
+-- print(math.sin(1*.5))
+-- print(math.sin(1*.5))
+--
+-- debugger.disable()
+--
+-- print("")
+-- debugger.showstats()
+-- print("")
+-- debugger.showstats(print,3)
+--
-- from the lua book:
-function traceback()
- local level = 1
+local function showtraceback(rep) -- from lua site / adapted
+ local level = 2 -- we don't want this function to be reported
+ local reporter = rep or report
while true do
- local info = debug.getinfo(level, "Sl")
+ local info = getinfo(level, "Sl")
if not info then
break
elseif info.what == "C" then
- print(format("%3i : C function",level))
+ reporter("%2i : %s",level-1,"C function")
else
- print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ reporter("%2i : %s : %s",level-1,info.short_src,info.currentline)
end
level = level + 1
end
end
+
+debugger.showtraceback = showtraceback
+-- debug.showtraceback = showtraceback
diff --git a/tex/context/base/util-dim.lua b/tex/context/base/util-dim.lua
index 69061495f..2bdb870e7 100644
--- a/tex/context/base/util-dim.lua
+++ b/tex/context/base/util-dim.lua
@@ -24,13 +24,15 @@ local formatters = string.formatters
local texget = tex and tex.get or function() return 65536*10*100 end
+local p_stripzeros = lpeg.patterns.stripzeros
+
--this might become another namespace
number = number or { }
local number = number
-number.tonumberf = function(n) return match(format("%.20f",n),"(.-0?)0*$") end -- one zero too much but alas
-number.tonumberg = function(n) return format("%.20g",n) end
+number.tonumberf = function(n) return lpegmatch(p_stripzeros,format("%.20f",n)) end
+number.tonumberg = function(n) return format("%.20g",n) end
local dimenfactors = allocate {
["pt"] = 1/65536,
@@ -46,66 +48,65 @@ local dimenfactors = allocate {
["nc"] = ( 5080/65043)/65536
}
---~ print(table.serialize(dimenfactors))
---~
---~ %.99g:
---~
---~ t={
---~ ["bp"]=1.5201782378580324e-005,
---~ ["cc"]=1.1883696112892098e-006,
---~ ["cm"]=5.3628510057769479e-007,
---~ ["dd"]=1.4260435335470516e-005,
---~ ["em"]=0.000152587890625,
---~ ["ex"]=6.103515625e-005,
---~ ["in"]=2.1113586636917117e-007,
---~ ["mm"]=5.3628510057769473e-008,
---~ ["nc"]=1.1917446679504327e-006,
---~ ["nd"]=1.4300936015405194e-005,
---~ ["pc"]=1.2715657552083333e-006,
---~ ["pt"]=1.52587890625e-005,
---~ ["sp"]=1,
---~ }
---~
---~ patched %s and tonumber
---~
---~ t={
---~ ["bp"]=0.00001520178238,
---~ ["cc"]=0.00000118836961,
---~ ["cm"]=0.0000005362851,
---~ ["dd"]=0.00001426043534,
---~ ["em"]=0.00015258789063,
---~ ["ex"]=0.00006103515625,
---~ ["in"]=0.00000021113587,
---~ ["mm"]=0.00000005362851,
---~ ["nc"]=0.00000119174467,
---~ ["nd"]=0.00001430093602,
---~ ["pc"]=0.00000127156576,
---~ ["pt"]=0.00001525878906,
---~ ["sp"]=1,
---~ }
+-- print(table.serialize(dimenfactors))
+--
+-- %.99g:
+--
+-- t={
+-- ["bp"]=1.5201782378580324e-005,
+-- ["cc"]=1.1883696112892098e-006,
+-- ["cm"]=5.3628510057769479e-007,
+-- ["dd"]=1.4260435335470516e-005,
+-- ["em"]=0.000152587890625,
+-- ["ex"]=6.103515625e-005,
+-- ["in"]=2.1113586636917117e-007,
+-- ["mm"]=5.3628510057769473e-008,
+-- ["nc"]=1.1917446679504327e-006,
+-- ["nd"]=1.4300936015405194e-005,
+-- ["pc"]=1.2715657552083333e-006,
+-- ["pt"]=1.52587890625e-005,
+-- ["sp"]=1,
+-- }
+--
+-- patched %s and tonumber
+--
+-- t={
+-- ["bp"]=0.00001520178238,
+-- ["cc"]=0.00000118836961,
+-- ["cm"]=0.0000005362851,
+-- ["dd"]=0.00001426043534,
+-- ["em"]=0.00015258789063,
+-- ["ex"]=0.00006103515625,
+-- ["in"]=0.00000021113587,
+-- ["mm"]=0.00000005362851,
+-- ["nc"]=0.00000119174467,
+-- ["nd"]=0.00001430093602,
+-- ["pc"]=0.00000127156576,
+-- ["pt"]=0.00001525878906,
+-- ["sp"]=1,
+-- }
--[[ldx--
<p>A conversion function that takes a number, unit (string) and optional
format (string) is implemented using this table.</p>
--ldx]]--
+local f_none = formatters["%s%s"]
+local f_true = formatters["%0.5F%s"]
-local function numbertodimen(n,unit,fmt)
+local function numbertodimen(n,unit,fmt) -- will be redefined later !
if type(n) == 'string' then
return n
else
unit = unit or 'pt'
+ n = n * dimenfactors[unit]
if not fmt then
- fmt = "%s%s"
+ fmt = f_none(n,unit)
elseif fmt == true then
- fmt = "%0.5f%s"
+ fmt = f_true(n,unit)
+ else
+ return formatters[fmt](n,unit)
end
- return format(fmt,n*dimenfactors[unit],unit)
- -- if fmt then
- -- return format(fmt,n*dimenfactors[unit],unit)
- -- else
- -- return match(format("%.20f",n*dimenfactors[unit]),"(.-0?)0*$") .. unit
- -- end
end
end
diff --git a/tex/context/base/util-env.lua b/tex/context/base/util-env.lua
index 0a708ea43..b72226900 100644
--- a/tex/context/base/util-env.lua
+++ b/tex/context/base/util-env.lua
@@ -9,11 +9,11 @@ if not modules then modules = { } end modules ['util-env'] = {
local allocate, mark = utilities.storage.allocate, utilities.storage.mark
local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find
-local unquoted, quoted = string.unquoted, string.quoted
+local unquoted, quoted, optionalquoted = string.unquoted, string.quoted, string.optionalquoted
local concat, insert, remove = table.concat, table.insert, table.remove
-environment = environment or { }
-local environment = environment
+environment = environment or { }
+local environment = environment
-- precautions
@@ -182,26 +182,14 @@ function environment.splitarguments(separator) -- rather special, cut-off before
end
function environment.reconstructcommandline(arg,noquote)
+ local resolveprefix = resolvers.resolve -- something rather special
arg = arg or environment.originalarguments
if noquote and #arg == 1 then
- -- we could just do: return unquoted(resolvers.resolve(arg[i]))
- local a = arg[1]
- a = resolvers.resolve(a)
- a = unquoted(a)
- return a
+ return unquoted(resolveprefix and resolveprefix(arg[1]) or arg[1])
elseif #arg > 0 then
local result = { }
for i=1,#arg do
- -- we could just do: result[#result+1] = format("%q",unquoted(resolvers.resolve(arg[i])))
- local a = arg[i]
- a = resolvers.resolve(a)
- a = unquoted(a)
- a = gsub(a,'"','\\"') -- tricky
- if find(a," ") then
- result[#result+1] = quoted(a)
- else
- result[#result+1] = a
- end
+ result[i] = optionalquoted(resolveprefix and resolveprefix(arg[i]) or resolveprefix)
end
return concat(result," ")
else
@@ -238,26 +226,10 @@ end
-- print(environment.relativepath("//x")) -- //x
-- print(environment.relativepath()) -- e:/tmp
--- -- to be tested:
---
--- function environment.reconstructcommandline(arg,noquote)
--- arg = arg or environment.originalarguments
--- if noquote and #arg == 1 then
--- return unquoted(resolvers.resolve(arg[1]))
--- elseif #arg > 0 then
--- local result = { }
--- for i=1,#arg do
--- result[#result+1] = format("%q",unquoted(resolvers.resolve(arg[i]))) -- always quote
--- end
--- return concat(result," ")
--- else
--- return ""
--- end
--- end
-
if arg then
-- new, reconstruct quoted snippets (maybe better just remove the " then and add them later)
+
local newarg, instring = { }, false
for index=1,#arg do
diff --git a/tex/context/base/util-prs.lua b/tex/context/base/util-prs.lua
index e5b35a727..a3c1c6f8f 100644
--- a/tex/context/base/util-prs.lua
+++ b/tex/context/base/util-prs.lua
@@ -21,6 +21,8 @@ parsers.patterns = patterns
local setmetatableindex = table.setmetatableindex
local sortedhash = table.sortedhash
+local sortedkeys = table.sortedkeys
+local tohash = table.tohash
-- we share some patterns
@@ -94,9 +96,7 @@ patterns.settings_to_hash_b = pattern_b_s
patterns.settings_to_hash_c = pattern_c_s
function parsers.make_settings_to_hash_pattern(set,how)
- if type(str) == "table" then
- return set
- elseif how == "strict" then
+ if how == "strict" then
return (pattern_c/set)^1
elseif how == "tolerant" then
return (pattern_b/set)^1
@@ -106,7 +106,9 @@ function parsers.make_settings_to_hash_pattern(set,how)
end
function parsers.settings_to_hash(str,existing)
- if type(str) == "table" then
+ if not str or str == "" then
+ return { }
+ elseif type(str) == "table" then
if existing then
for k, v in next, str do
existing[k] = v
@@ -115,17 +117,17 @@ function parsers.settings_to_hash(str,existing)
else
return str
end
- elseif str and str ~= "" then
+ else
hash = existing or { }
lpegmatch(pattern_a_s,str)
return hash
- else
- return { }
end
end
function parsers.settings_to_hash_tolerant(str,existing)
- if type(str) == "table" then
+ if not str or str == "" then
+ return { }
+ elseif type(str) == "table" then
if existing then
for k, v in next, str do
existing[k] = v
@@ -134,17 +136,17 @@ function parsers.settings_to_hash_tolerant(str,existing)
else
return str
end
- elseif str and str ~= "" then
+ else
hash = existing or { }
lpegmatch(pattern_b_s,str)
return hash
- else
- return { }
end
end
function parsers.settings_to_hash_strict(str,existing)
- if type(str) == "table" then
+ if not str or str == "" then
+ return nil
+ elseif type(str) == "table" then
if existing then
for k, v in next, str do
existing[k] = v
@@ -157,8 +159,6 @@ function parsers.settings_to_hash_strict(str,existing)
hash = existing or { }
lpegmatch(pattern_c_s,str)
return next(hash) and hash
- else
- return nil
end
end
@@ -167,24 +167,24 @@ local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace)
+ C((nestedbraces + (1-comma))^0)
local pattern = spaces * Ct(value*(separator*value)^0)
--- "aap, {noot}, mies" : outer {} removes, leading spaces ignored
+-- "aap, {noot}, mies" : outer {} removed, leading spaces ignored
patterns.settings_to_array = pattern
-- we could use a weak table as cache
function parsers.settings_to_array(str,strict)
- if type(str) == "table" then
- return str
- elseif not str or str == "" then
+ if not str or str == "" then
return { }
+ elseif type(str) == "table" then
+ return str
elseif strict then
- if find(str,"{") then
+ if find(str,"{",1,true) then
return lpegmatch(pattern,str)
else
return { str }
end
- elseif find(str,",") then
+ elseif find(str,",",1,true) then
return lpegmatch(pattern,str)
else
return { str }
@@ -195,12 +195,40 @@ end
--
-- "{123} , 456 " -> "123" "456"
-local separator = space^0 * comma * space^0
-local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace)
- + C((nestedbraces + (1-(space^0*(comma+P(-1)))))^0)
-local withvalue = Carg(1) * value / function(f,s) return f(s) end
-local pattern_a = spaces * Ct(value*(separator*value)^0)
-local pattern_b = spaces * withvalue * (separator*withvalue)^0
+-- local separator = space^0 * comma * space^0
+-- local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace)
+-- + C((nestedbraces + (1-(space^0*(comma+P(-1)))))^0)
+-- local withvalue = Carg(1) * value / function(f,s) return f(s) end
+-- local pattern_a = spaces * Ct(value*(separator*value)^0)
+-- local pattern_b = spaces * withvalue * (separator*withvalue)^0
+
+local cache_a = { }
+local cache_b = { }
+
+function parsers.groupedsplitat(symbol,withaction)
+ if not symbol then
+ symbol = ","
+ end
+ local pattern = (withaction and cache_b or cache_a)[symbol]
+ if not pattern then
+ local symbols = S(symbol)
+ local separator = space^0 * symbols * space^0
+ local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace)
+ + C((nestedbraces + (1-(space^0*(symbols+P(-1)))))^0)
+ if withaction then
+ local withvalue = Carg(1) * value / function(f,s) return f(s) end
+ pattern = spaces * withvalue * (separator*withvalue)^0
+ cache_b[symbol] = pattern
+ else
+ pattern = spaces * Ct(value*(separator*value)^0)
+ cache_a[symbol] = pattern
+ end
+ end
+ return pattern
+end
+
+local pattern_a = parsers.groupedsplitat(",",false)
+local pattern_b = parsers.groupedsplitat(",",true)
function parsers.stripped_settings_to_array(str)
if not str or str == "" then
@@ -221,8 +249,6 @@ end
-- parsers.process_stripped_settings("{123} , 456 ",function(s) print("["..s.."]") end)
-- parsers.process_stripped_settings("123 , 456 ",function(s) print("["..s.."]") end)
---
-
local function set(t,v)
t[#t+1] = v
end
@@ -236,8 +262,8 @@ end
function parsers.hash_to_string(h,separator,yes,no,strict,omit)
if h then
- local t, tn, s = { }, 0, table.sortedkeys(h)
- omit = omit and table.tohash(omit)
+ local t, tn, s = { }, 0, sortedkeys(h)
+ omit = omit and tohash(omit)
for i=1,#s do
local key = s[i]
if not omit or not omit[key] then
@@ -275,15 +301,25 @@ function parsers.array_to_string(a,separator)
end
end
-function parsers.settings_to_set(str,t) -- tohash? -- todo: lpeg -- duplicate anyway
- t = t or { }
--- for s in gmatch(str,"%s*([^, ]+)") do -- space added
- for s in gmatch(str,"[^, ]+") do -- space added
- t[s] = true
- end
- return t
+-- function parsers.settings_to_set(str,t) -- tohash? -- todo: lpeg -- duplicate anyway
+-- if str then
+-- t = t or { }
+-- for s in gmatch(str,"[^, ]+") do -- space added
+-- t[s] = true
+-- end
+-- return t
+-- else
+-- return { }
+-- end
+-- end
+
+local pattern = Cf(Ct("") * Cg(C((1-S(", "))^1) * S(", ")^0 * Cc(true))^1,rawset)
+
+function utilities.parsers.settings_to_set(str,t)
+ return str and lpegmatch(pattern,str) or { }
end
+
function parsers.simple_hash_to_string(h, separator)
local t, tn = { }, 0
for k, v in sortedhash(h) do
@@ -297,7 +333,7 @@ end
-- for mtx-context etc: aaaa bbbb cccc=dddd eeee=ffff
-local str = C((1-whitespace-equal)^1)
+local str = Cs(lpegpatterns.unquoted) + C((1-whitespace-equal)^1)
local setting = Cf( Carg(1) * (whitespace^0 * Cg(str * whitespace^0 * (equal * whitespace^0 * str + Cc(""))))^1,rawset)
local splitter = setting^1
@@ -305,6 +341,12 @@ function utilities.parsers.options_to_hash(str,target)
return str and lpegmatch(splitter,str,1,target or { }) or { }
end
+local splitter = lpeg.tsplitat(" ")
+
+function utilities.parsers.options_to_array(str)
+ return str and lpegmatch(splitter,str) or { }
+end
+
-- for chem (currently one level)
local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace)
@@ -436,7 +478,7 @@ local defaultspecification = { separator = ",", quote = '"' }
-- database module
function parsers.csvsplitter(specification)
- specification = specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification
+ specification = specification and setmetatableindex(specification,defaultspecification) or defaultspecification
local separator = specification.separator
local quotechar = specification.quote
local separator = S(separator ~= "" and separator or ",")
@@ -475,7 +517,7 @@ end
-- local list, names = mycsvsplitter(crap) inspect(list) inspect(names)
function parsers.rfc4180splitter(specification)
- specification = specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification
+ specification = specification and setmetatableindex(specification,defaultspecification) or defaultspecification
local separator = specification.separator --> rfc: COMMA
local quotechar = P(specification.quote) --> DQUOTE
local dquotechar = quotechar * quotechar --> 2DQUOTE
@@ -488,7 +530,7 @@ function parsers.rfc4180splitter(specification)
local field = escaped + non_escaped + Cc("")
local record = Ct(field * (separator * field)^1)
local headerline = record * Cp()
- local wholeblob = Ct((newline^-1 * record)^0)
+ local wholeblob = Ct((newline^(specification.strict and -1 or 1) * record)^0)
return function(data,getheader)
if getheader then
local header, position = lpegmatch(headerline,data)
@@ -542,8 +584,8 @@ end
--
-local pattern_math = Cs((P("%")/"\\percent " + P("^") * Cc("{") * lpegpatterns.integer * Cc("}") + P(1))^0)
-local pattern_text = Cs((P("%")/"\\percent " + (P("^")/"\\high") * Cc("{") * lpegpatterns.integer * Cc("}") + P(1))^0)
+local pattern_math = Cs((P("%")/"\\percent " + P("^") * Cc("{") * lpegpatterns.integer * Cc("}") + anything)^0)
+local pattern_text = Cs((P("%")/"\\percent " + (P("^")/"\\high") * Cc("{") * lpegpatterns.integer * Cc("}") + anything)^0)
patterns.unittotex = pattern
@@ -551,7 +593,7 @@ function parsers.unittotex(str,textmode)
return lpegmatch(textmode and pattern_text or pattern_math,str)
end
-local pattern = Cs((P("^") / "<sup>" * lpegpatterns.integer * Cc("</sup>") + P(1))^0)
+local pattern = Cs((P("^") / "<sup>" * lpegpatterns.integer * Cc("</sup>") + anything)^0)
function parsers.unittoxml(str)
return lpegmatch(pattern,str)
@@ -560,10 +602,10 @@ end
-- print(utilities.parsers.unittotex("10^-32 %"),utilities.parsers.unittoxml("10^32 %"))
local cache = { }
-local spaces = lpeg.patterns.space^0
+local spaces = lpegpatterns.space^0
local dummy = function() end
-table.setmetatableindex(cache,function(t,k)
+setmetatableindex(cache,function(t,k)
local separator = P(k)
local value = (1-separator)^0
local pattern = spaces * C(value) * separator^0 * Cp()
@@ -648,3 +690,27 @@ function utilities.parsers.runtime(time)
local seconds = mod(time,60)
return days, hours, minutes, seconds
end
+
+--
+
+local spacing = whitespace^0
+local apply = P("->")
+local method = C((1-apply)^1)
+local token = lbrace * C((1-rbrace)^1) * rbrace + C(anything^1)
+
+local pattern = spacing * (method * spacing * apply + Carg(1)) * spacing * token
+
+function utilities.parsers.splitmethod(str,default)
+ if str then
+ return lpegmatch(pattern,str,1,default or false)
+ else
+ return default or false, ""
+ end
+end
+
+-- print(utilities.parsers.splitmethod(" foo -> {bar} "))
+-- print(utilities.parsers.splitmethod("foo->{bar}"))
+-- print(utilities.parsers.splitmethod("foo->bar"))
+-- print(utilities.parsers.splitmethod("foo"))
+-- print(utilities.parsers.splitmethod("{foo}"))
+-- print(utilities.parsers.splitmethod())
diff --git a/tex/context/base/util-sbx.lua b/tex/context/base/util-sbx.lua
new file mode 100644
index 000000000..260e8b3b5
--- /dev/null
+++ b/tex/context/base/util-sbx.lua
@@ -0,0 +1,415 @@
+if not modules then modules = { } end modules ['util-sbx'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- Note: we use expandname and collapsepath and these use chdir
+-- which is overloaded so we need to use originals there. Just
+-- something to keep in mind.
+
+if not sandbox then require("l-sandbox") end -- for testing
+
+local next, type = next, type
+
+local replace = utilities.templates.replace
+local collapsepath = file.collapsepath
+local expandname = dir.expandname
+local sortedhash = table.sortedhash
+local lpegmatch = lpeg.match
+local platform = os.type
+local P, S, C = lpeg.P, lpeg.S, lpeg.C
+local gsub = string.gsub
+local lower = string.lower
+local unquoted = string.unquoted
+local optionalquoted = string.optionalquoted
+
+local sandbox = sandbox
+local validroots = { }
+local validrunners = { }
+local validbinaries = { }
+local validators = { }
+local p_validroot = nil
+local finalized = nil
+local norunners = false
+local trace = false
+local p_split = lpeg.tsplitat(" ") -- more spaces?
+
+local report = logs.reporter("sandbox")
+
+trackers.register("sandbox",function(v) trace = v end) -- often too late anyway
+
+sandbox.setreporter(report)
+
+sandbox.finalizer(function()
+ finalized = true
+end)
+
+local function registerroot(root,what) -- what == read|write
+ if finalized then
+ report("roots are already finalized")
+ else
+ root = collapsepath(expandname(root))
+ if platform == "windows" then
+ root = lower(root) -- we assume ascii names
+ end
+ -- true: read & write | false: read
+ validroots[root] = what == "write" or false
+ end
+end
+
+sandbox.finalizer(function() -- initializers can set the path
+ if p_validroot then
+ report("roots are already initialized")
+ else
+ sandbox.registerroot(".","write") -- always ok
+ -- also register texmf as read
+ for name in sortedhash(validroots) do
+ if p_validroot then
+ p_validroot = P(name) + p_validroot
+ else
+ p_validroot = P(name)
+ end
+ end
+ p_validroot = p_validroot / validroots
+ end
+end)
+
+local function registerrunner(specification)
+ if finalized then
+ report("runners are already finalized")
+ else
+ local name = specification.name
+ if not name then
+ report("no runner name specified")
+ return
+ end
+ local program = specification.program
+ if type(program) == "string" then
+ -- common for all platforms
+ elseif type(program) == "table" then
+ program = program[platform]
+ end
+ if type(program) ~= "string" or program == "" then
+ report("invalid runner %a specified for platform %a",name,platform)
+ return
+ end
+ specification.program = program
+ validrunners[name] = specification
+ end
+end
+
+local function registerbinary(name)
+ if finalized then
+ report("binaries are already finalized")
+ elseif type(name) == "string" and name ~= "" then
+ validbinaries[name] = true
+ end
+end
+
+-- begin of validators
+
+local p_write = S("wa") p_write = (1 - p_write)^0 * p_write
+local p_path = S("\\/~$%:") p_path = (1 - p_path )^0 * p_path -- be easy on other arguments
+
+local function normalized(name) -- only used in executers
+ if platform == "windows" then
+ name = gsub(name,"/","\\")
+ end
+ return name
+end
+
+function sandbox.possiblepath(name)
+ return lpegmatch(p_path,name) and true or false
+end
+
+local filenamelogger = false
+
+function sandbox.setfilenamelogger(l)
+ filenamelogger = type(l) == "function" and l or false
+end
+
+local function validfilename(name,what)
+ if p_validroot and type(name) == "string" and lpegmatch(p_path,name) then
+ local asked = collapsepath(expandname(name))
+ if platform == "windows" then
+ asked = lower(asked) -- we assume ascii names
+ end
+ local okay = lpegmatch(p_validroot,asked)
+ if okay == true then
+ -- read and write access
+ if filenamelogger then
+ filenamelogger(name,"w",asked,true)
+ end
+ return name
+ elseif okay == false then
+ -- read only access
+ if not what then
+ -- no further argument to io.open so a readonly case
+ if filenamelogger then
+ filenamelogger(name,"r",asked,true)
+ end
+ return name
+ elseif lpegmatch(p_write,what) then
+ if filenamelogger then
+ filenamelogger(name,"w",asked,false)
+ end
+ return -- we want write access
+ else
+ if filenamelogger then
+ filenamelogger(name,"r",asked,true)
+ end
+ return name
+ end
+ else
+ if filenamelogger then
+ filenamelogger(name,"*",name,false)
+ end
+ end
+ else
+ return name
+ end
+end
+
+local function readable(name)
+ if platform == "windows" then
+ name = lower(name) -- we assume ascii names
+ end
+ local valid = validfilename(name,"r")
+ if valid then
+ return normalized(valid)
+ end
+end
+
+local function writeable(name)
+ if platform == "windows" then
+ name = lower(name) -- we assume ascii names
+ end
+ local valid = validfilename(name,"w")
+ if valid then
+ return normalized(valid)
+ end
+end
+
+validators.readable = readable
+validators.writeable = writeable
+validators.filename = readable
+
+table.setmetatableindex(validators,function(t,k)
+ if k then
+ t[k] = readable
+ end
+ return readable
+end)
+
+function validators.string(s)
+ return s -- can be used to prevent filename checking
+end
+
+-- end of validators
+
+sandbox.registerroot = registerroot
+sandbox.registerrunner = registerrunner
+sandbox.registerbinary = registerbinary
+sandbox.validfilename = validfilename
+
+local function filehandlerone(action,one,...)
+ local checkedone = validfilename(one)
+ if checkedone then
+ return action(one,...)
+ else
+-- report("file %a is unreachable",one)
+ end
+end
+
+local function filehandlertwo(action,one,two,...)
+ local checkedone = validfilename(one)
+ if checkedone then
+ local checkedtwo = validfilename(two)
+ if checkedtwo then
+ return action(one,two,...)
+ else
+-- report("file %a is unreachable",two)
+ end
+ else
+-- report("file %a is unreachable",one)
+ end
+end
+
+local function iohandler(action,one,...)
+ if type(one) == "string" then
+ local checkedone = validfilename(one)
+ if checkedone then
+ return action(one,...)
+ end
+ elseif one then
+ return action(one,...)
+ else
+ return action()
+ end
+end
+
+-- runners can be strings or tables
+--
+-- os.execute : string
+-- os.exec : table with program in [0|1]
+-- os.spawn : table with program in [0|1]
+--
+-- our execute: registered program with specification
+
+local function runhandler(action,name,specification)
+ local kind = type(name)
+ if kind ~= "string" then
+ return
+ end
+ if norunners then
+ report("no runners permitted, ignoring command: %s",name)
+ return
+ end
+ local spec = validrunners[name]
+ if not spec then
+ report("unknown runner: %s",name)
+ return
+ end
+ -- specs are already checked
+ local program = spec.program
+ local variables = { }
+ local checkers = spec.checkers or { }
+ if specification then
+ -- we only handle runners that are defined before the sandbox is
+ -- closed so in principle we cannot have user runs with no files
+ -- while for context runners we assume a robust specification
+ for k, v in next, specification do
+ local checker = validators[checkers[k]]
+ local value = checker(unquoted(v)) -- todo: write checkers
+ if value then
+ variables[k] = optionalquoted(value)
+ else
+ report("suspicious argument found, run blocked: %s",v)
+ return
+ end
+ end
+ end
+ local command = replace(program,variables)
+ if trace then
+ report("executing runner: %s",command)
+ end
+ return action(command)
+end
+
+-- only registered (from list) -- no checking on writable so let's assume harmless
+-- runs
+
+local function binaryhandler(action,name)
+ local kind = type(name)
+ local list = name
+ if kind == "string" then
+ list = lpegmatch(p_split,name)
+ end
+ local program = name[0] or name[1]
+ if type(program) ~= "string" or program == "" then
+ return --silently ignore
+ end
+ if norunners then
+ report("no binaries permitted, ignoring command: %s",program)
+ return
+ end
+ if not validbinaries[program] then
+ report("binary is not permitted: %s",program)
+ return
+ end
+ for i=0,#list do
+ local n = list[i]
+ if n then
+ local v = readable(unquoted(n))
+ if v then
+ list[i] = optionalquoted(v)
+ else
+ report("suspicious argument found, run blocked: %s",n)
+ return
+ end
+ end
+ end
+ return action(name)
+end
+
+sandbox.filehandlerone = filehandlerone
+sandbox.filehandlertwo = filehandlertwo
+sandbox.iohandler = iohandler
+sandbox.runhandler = runhandler
+sandbox.binaryhandler = binaryhandler
+
+function sandbox.disablerunners()
+ norunners = true
+end
+
+local execute = sandbox.original(os.execute)
+
+function sandbox.run(name,specification)
+ return runhandler(execute,name,specification)
+end
+
+-------------------
+
+local overload = sandbox.overload
+local register = sandbox.register
+
+ overload(loadfile, filehandlerone,"loadfile") -- todo
+
+if io then
+ overload(io.open, filehandlerone,"io.open")
+ overload(io.popen, filehandlerone,"io.popen")
+ overload(io.input, iohandler, "io.input")
+ overload(io.output, iohandler, "io.output")
+ overload(io.lines, filehandlerone,"io.lines")
+end
+
+if os then
+ overload(os.execute, binaryhandler, "os.execute")
+ overload(os.spawn, binaryhandler, "os.spawn")
+ overload(os.exec, binaryhandler, "os.exec")
+ overload(os.rename, filehandlertwo,"os.rename")
+ overload(os.remove, filehandlerone,"os.remove")
+end
+
+if lfs then
+ overload(lfs.chdir, filehandlerone,"lfs.chdir")
+ overload(lfs.mkdir, filehandlerone,"lfs.mkdir")
+ overload(lfs.rmdir, filehandlerone,"lfs.rmdir")
+ overload(lfs.isfile, filehandlerone,"lfs.isfile")
+ overload(lfs.isdir, filehandlerone,"lfs.isdir")
+ overload(lfs.attributes, filehandlerone,"lfs.attributes")
+ overload(lfs.dir, filehandlerone,"lfs.dir")
+ overload(lfs.lock_dir, filehandlerone,"lfs.lock_dir")
+ overload(lfs.touch, filehandlerone,"lfs.touch")
+ overload(lfs.link, filehandlertwo,"lfs.link")
+ overload(lfs.setmode, filehandlerone,"lfs.setmode")
+ overload(lfs.readlink, filehandlerone,"lfs.readlink")
+ overload(lfs.shortname, filehandlerone,"lfs.shortname")
+ overload(lfs.symlinkattributes,filehandlerone,"lfs.symlinkattributes")
+end
+
+-- these are used later on
+
+if zip then
+ zip.open = register(zip.open, filehandlerone,"zip.open")
+end
+
+if fontloader then
+ fontloader.open = register(fontloader.open,filehandlerone,"fontloader.open")
+ fontloader.info = register(fontloader.info,filehandlerone,"fontloader.info")
+end
+
+if epdf then
+ epdf.open = register(epdf.open, filehandlerone,"epdf.open")
+end
+
+-- not used in a normal mkiv run : os.spawn = os.execute
+-- not used in a normal mkiv run : os.exec = os.exec
+
+-- print(io.open("test.log"))
+-- sandbox.enable()
+-- print(io.open("test.log"))
+-- print(io.open("t:/test.log"))
diff --git a/tex/context/base/util-sci.lua b/tex/context/base/util-sci.lua
new file mode 100644
index 000000000..c3e24cd9d
--- /dev/null
+++ b/tex/context/base/util-sci.lua
@@ -0,0 +1,280 @@
+local gsub, sub, find = string.gsub, string.sub, string.find
+local concat = table.concat
+local formatters = string.formatters
+local lpegmatch = lpeg.match
+local setmetatableindex = table.setmetatableindex
+
+local scite = scite or { }
+utilities.scite = scite
+
+local report = logs.reporter("scite")
+
+local lexerroot = file.dirname(resolvers.find_file("scite-context-lexer.lua"))
+
+local knownlexers = {
+ tex = "tex", mkiv = "tex", mkvi = "tex", mkxi = "tex", mkix = "tex", mkii = "tex", cld = "tex",
+ lua = "lua", lfg = "lua", lus = "lua",
+ w = "web", ww = "web",
+ c = "cpp", h = "cpp", cpp = "cpp", hpp = "cpp", cxx = "cpp", hxx = "cpp",
+ xml = "xml", lmx = "xml", ctx = "xml", xsl = "xml", xsd = "xml", rlx = "xml", css = "xml", dtd = "xml",
+ bib = "bibtex",
+ rme = "txt",
+ -- todo: pat/hyp ori
+}
+
+lexer = nil -- main lexer, global (for the moment needed for themes)
+
+local function loadscitelexer()
+ if not lexer then
+ dir.push(lexerroot)
+ lexer = dofile("scite-context-lexer.lua")
+ dofile("themes/scite-context-theme.lua")
+ dir.pop()
+ end
+ return lexer
+end
+
+local loadedlexers = setmetatableindex(function(t,k)
+ local l = knownlexers[k] or k
+ dir.push(lexerroot)
+ loadscitelexer()
+ local v = lexer.load(formatters["scite-context-lexer-%s"](l))
+ dir.pop()
+ t[l] = v
+ t[k] = v
+ return v
+end)
+
+scite.loadedlexers = loadedlexers
+scite.knownlexers = knownlexers
+scite.loadscitelexer = loadscitelexer
+
+local f_fore_bold = formatters['.%s { display: inline ; font-weight: bold ; color: #%s%s%s ; }']
+local f_fore_none = formatters['.%s { display: inline ; font-weight: normal ; color: #%s%s%s ; }']
+local f_none_bold = formatters['.%s { display: inline ; font-weight: bold ; }']
+local f_none_none = formatters['.%s { display: inline ; font-weight: normal ; }']
+local f_div_class = formatters['<div class="%s">%s</div>']
+local f_linenumber = formatters['<div class="linenumber">%s</div>\n']
+local f_div_number = formatters['.linenumber { display: inline-block ; font-weight: normal ; width: %sem ; margin-right: 2em ; padding-right: .25em ; text-align: right ; background-color: #C7C7C7 ; }']
+
+local replacer_regular = lpeg.replacer {
+ ["<"] = "&lt;",
+ [">"] = "&gt;",
+ ["&"] = "&amp;",
+}
+
+local linenumber = 0
+local linenumbers = { }
+
+local replacer_numbered = lpeg.replacer {
+ ["<"] = "&lt;",
+ [">"] = "&gt;",
+ ["&"] = "&amp;",
+ [lpeg.patterns.newline] = function()
+ linenumber = linenumber + 1
+ linenumbers[linenumber] = f_linenumber(linenumber)
+ return "\n"
+ end,
+}
+
+local css = nil
+
+local function exportcsslexing()
+ if not css then
+ loadscitelexer()
+ local function black(f)
+ return (f[1] == f[2]) and (f[2] == f[3]) and (f[3] == '00')
+ end
+ local result, r = { }, 0
+ for k, v in table.sortedhash(lexer.context.styles) do
+ local bold = v.bold
+ local fore = v.fore
+ r = r + 1
+ if fore and not black(fore) then
+ if bold then
+ result[r] = f_fore_bold(k,fore[1],fore[2],fore[3])
+ else
+ result[r] = f_fore_none(k,fore[1],fore[2],fore[3])
+ end
+ else
+ if bold then
+ result[r] = f_none_bold(k)
+ else
+ result[r] = f_none_none(k)
+ end
+ end
+ end
+ css = concat(result,"\n")
+ end
+ return css
+end
+
+local function exportwhites()
+ return setmetatableindex(function(t,k)
+ local v = find(k,"white") and true or false
+ t[k] = v
+ return v
+ end)
+end
+
+local function exportstyled(lexer,text,numbered)
+ local result = lexer.lex(lexer,text,0)
+ local start = 1
+ local whites = exportwhites()
+ local buffer = { }
+ local b = 0
+ linenumber = 0
+ linenumbers = { }
+ local replacer = numbered and replacer_numbered or replacer_regular
+ local n = #result
+ for i=1,n,2 do
+ local ii = i + 1
+ local style = result[i]
+ local position = result[ii]
+ local txt = sub(text,start,position-1)
+ txt = lpegmatch(replacer,txt)
+ b = b + 1
+ if whites[style] then
+ buffer[b] = txt
+ else
+ buffer[b] = f_div_class(style,txt)
+ end
+ start = position
+ end
+ buffer = concat(buffer)
+ return buffer, concat(linenumbers)
+end
+
+local function exportcsslinenumber()
+ return f_div_number(#tostring(linenumber)/2+1)
+end
+
+local htmlfile = utilities.templates.replacer([[
+<?xml version="1.0"?>
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+ <html xmlns="http://www.w3.org/1999/xhtml">
+ <title>%title%</title>
+ <meta http-equiv="content-type" content="text/html; charset=UTF-8"/>
+ <style type="text/css"><!--
+%lexingstyles%
+%numberstyles%
+ --></style>
+ <body>
+ <table style="padding:0; margin:0;">
+ <tr>
+ <td><pre>%linenumbers%</pre></td>
+ <td><pre>%lexedcontent%</pre></td>
+ </tr>
+ </table>
+ </body>
+</html>
+]])
+
+function scite.tohtml(data,lexname,numbered,title)
+ local source, lines = exportstyled(loadedlexers[lexname],data or "",numbered)
+ return htmlfile {
+ lexedcontent = source, -- before numberstyles
+ lexingstyles = exportcsslexing(),
+ numberstyles = exportcsslinenumber(),
+ title = title or "context source file",
+ linenumbers = lines,
+ }
+end
+
+local function maketargetname(name)
+ if name then
+ return file.removesuffix(name) .. "-" .. file.suffix(name) .. ".html"
+ else
+ return "util-sci.html"
+ end
+end
+
+function scite.filetohtml(filename,lexname,targetname,numbered,title)
+ io.savedata(targetname or "util-sci.html",scite.tohtml(io.loaddata(filename),lexname or file.suffix(filename),numbered,title or filename))
+end
+
+function scite.css()
+ return exportcsslexing() .. "\n" .. exportcsslinenumber()
+end
+
+function scite.html(data,lexname,numbered)
+ return exportstyled(loadedlexers[lexname],data or "",numbered)
+end
+
+local f_tree_entry = formatters['<a href="%s" class="dir-entry">%s</a>']
+
+local htmlfile = utilities.templates.replacer([[
+<?xml version="1.0"?>
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+ <html xmlns="http://www.w3.org/1999/xhtml">
+ <title>%title%</title>
+ <meta http-equiv="content-type" content="text/html; charset=UTF-8"/>
+ <style type="text/css"><!--
+%styles%
+ --></style>
+ <body>
+ <pre>
+%dirlist%
+ </pre>
+ </body>
+</html>
+]])
+
+function scite.converttree(sourceroot,targetroot,numbered)
+ if lfs.isdir(sourceroot) then
+ statistics.starttiming()
+ local skipped = { }
+ local noffiles = 0
+ dir.makedirs(targetroot)
+ local function scan(sourceroot,targetroot,subpath)
+ local tree = { }
+ for name in lfs.dir(sourceroot) do
+ if name ~= "." and name ~= ".." then
+ local sourcename = file.join(sourceroot,name)
+ local targetname = file.join(targetroot,name)
+ local mode = lfs.attributes(sourcename,'mode')
+ local path = subpath and file.join(subpath,name) or name
+ if mode == 'file' then
+ local filetype = file.suffix(sourcename)
+ local basename = file.basename(name)
+ local targetname = maketargetname(targetname)
+ local fullname = file.join(path,name)
+ if knownlexers[filetype] then
+ report("converting file %a to %a",sourcename,targetname)
+ scite.filetohtml(sourcename,nil,targetname,numbered,fullname)
+ noffiles = noffiles + 1
+ tree[#tree+1] = f_tree_entry(file.basename(targetname),basename)
+ else
+ skipped[filetype] = true
+ report("no lexer for %a",sourcename)
+ end
+ else
+ dir.makedirs(targetname)
+ scan(sourcename,targetname,path)
+ tree[#tree+1] = f_tree_entry(file.join(name,"files.html"),name)
+ end
+ end
+ end
+ report("saving tree in %a",targetroot)
+ local htmldata = htmlfile {
+ dirlist = concat(tree,"\n"),
+ styles = "",
+ title = path or "context dir listing",
+ }
+ io.savedata(file.join(targetroot,"files.html"),htmldata)
+ end
+ scan(sourceroot,targetroot)
+ if next(skipped) then
+ report("skipped filetypes: %a",table.concat(table.sortedkeys(skipped)," "))
+ end
+ statistics.stoptiming()
+ report("conversion time for %s files: %s",noffiles,statistics.elapsedtime())
+ end
+end
+
+-- scite.filetohtml("strc-sec.mkiv",nil,"e:/tmp/util-sci.html",true)
+-- scite.filetohtml("syst-aux.mkiv",nil,"e:/tmp/util-sci.html",true)
+
+-- scite.converttree("t:/texmf/tex/context","e:/tmp/html/context",true)
+
+return scite
diff --git a/tex/context/base/util-seq.lua b/tex/context/base/util-seq.lua
index 35e693285..08fc4e95c 100644
--- a/tex/context/base/util-seq.lua
+++ b/tex/context/base/util-seq.lua
@@ -17,13 +17,15 @@ use locals to refer to them when compiling the chain.</p>
-- todo: protect groups (as in tasks)
-local format, gsub, concat, gmatch = string.format, string.gsub, table.concat, string.gmatch
+local gsub, concat, gmatch = string.gsub, table.concat, string.gmatch
local type, load = type, load
utilities = utilities or { }
local tables = utilities.tables
local allocate = utilities.storage.allocate
+local formatters = string.formatters
+
local sequencers = { }
utilities.sequencers = sequencers
@@ -31,6 +33,7 @@ local functions = allocate()
sequencers.functions = functions
local removevalue = tables.removevalue
+local replacevalue = tables.replacevalue
local insertaftervalue = tables.insertaftervalue
local insertbeforevalue = tables.insertbeforevalue
@@ -189,6 +192,18 @@ function sequencers.removeaction(t,group,action,force)
end
end
+function sequencers.replaceaction(t,group,oldaction,newaction,force)
+ t = known[t]
+ if t then
+ local g = t.list[group]
+ if g and (force or validaction(oldaction)) then
+ replacevalue(g,oldaction,newaction)
+ t.dirty = true
+ t.runner = nil
+ end
+ end
+end
+
local function localize(str)
return (gsub(str,"[%.: ]+","_"))
end
@@ -204,20 +219,23 @@ local function construct(t)
for i=1,#actions do
local action = actions[i]
if not askip[action] then
+ local localized
if type(action) == "function" then
local name = localize(tostring(action))
functions[name] = action
- action = format("utilities.sequencers.functions.%s",name)
+ action = formatters["utilities.sequencers.functions.%s"](name)
+ localized = localize(name) -- shorter than action
+ else
+ localized = localize(action)
end
- local localized = localize(action)
n = n + 1
- variables[n] = format("local %s = %s",localized,action)
+ variables[n] = formatters["local %s = %s"](localized,action)
if not returnvalues then
- calls[n] = format("%s(%s)",localized,arguments)
+ calls[n] = formatters["%s(%s)"](localized,arguments)
elseif n == 1 then
- calls[n] = format("local %s = %s(%s)",returnvalues,localized,arguments)
+ calls[n] = formatters["local %s = %s(%s)"](returnvalues,localized,arguments)
else
- calls[n] = format("%s = %s(%s)",returnvalues,localized,arguments)
+ calls[n] = formatters["%s = %s(%s)"](returnvalues,localized,arguments)
end
end
end
@@ -230,9 +248,9 @@ local function construct(t)
variables = concat(variables,"\n")
calls = concat(calls,"\n")
if results then
- t.compiled = format("%s\nreturn function(%s)\n%s\nreturn %s\nend",variables,arguments,calls,results)
+ t.compiled = formatters["%s\nreturn function(%s)\n%s\nreturn %s\nend"](variables,arguments,calls,results)
else
- t.compiled = format("%s\nreturn function(%s)\n%s\nend",variables,arguments,calls)
+ t.compiled = formatters["%s\nreturn function(%s)\n%s\nend"](variables,arguments,calls)
end
end
-- print(t.compiled)
@@ -258,6 +276,7 @@ compile = function(t,compiler,n) -- already referred to in sequencers.new
if compiled == "" then
runner = false
else
+-- inspect(compiled)
runner = compiled and load(compiled)() -- we can use loadstripped here
end
t.runner = runner
@@ -314,12 +333,12 @@ function sequencers.nodeprocessor(t,nofarguments) -- todo: handle 'kind' in plug
if not askip[action] then
local localized = localize(action)
n = n + 1
- vars[n] = format("local %s = %s",localized,action)
+ vars[n] = formatters["local %s = %s"](localized,action)
-- only difference with tostring is kind and rets (why no return)
if kind[action] == "nohead" then
- calls[n] = format(" ok = %s(head%s) done = done or ok",localized,args)
+ calls[n] = formatters[" ok = %s(head%s) done = done or ok"](localized,args)
else
- calls[n] = format(" head, ok = %s(head%s) done = done or ok",localized,args)
+ calls[n] = formatters[" head, ok = %s(head%s) done = done or ok"](localized,args)
end
-- local s = " print('" .. tostring(group) .. " " .. tostring(action) .. " : ' .. tostring(head)) "
-- calls[n] = s .. calls[n] .. s
@@ -327,6 +346,6 @@ function sequencers.nodeprocessor(t,nofarguments) -- todo: handle 'kind' in plug
end
end
end
- local processor = #calls > 0 and format(template_yes,concat(vars,"\n"),args,concat(calls,"\n")) or template_nop
+ local processor = #calls > 0 and formatters[template_yes](concat(vars,"\n"),args,concat(calls,"\n")) or template_nop
return processor
end
diff --git a/tex/context/base/util-sql-loggers.lua b/tex/context/base/util-sql-loggers.lua
index 7fceb8032..ceb1ff75c 100644
--- a/tex/context/base/util-sql-loggers.lua
+++ b/tex/context/base/util-sql-loggers.lua
@@ -52,7 +52,7 @@ table.setmetatableindex(fromtype,function() return "info" end)
loggers.totype = totype
loggers.fromtype = fromtype
-local template =[[
+local template = [[
CREATE TABLE IF NOT EXISTS %basename% (
`id` int(11) NOT NULL AUTO_INCREMENT,
`time` int(11) NOT NULL,
diff --git a/tex/context/base/util-sta.lua b/tex/context/base/util-sta.lua
index 1a61ec4e6..27ab5a624 100644
--- a/tex/context/base/util-sta.lua
+++ b/tex/context/base/util-sta.lua
@@ -81,6 +81,8 @@ end
function stacker.new(name)
+ local report = logs.reporter("stacker",name or nil)
+
local s
local stack = { }
@@ -126,8 +128,18 @@ function stacker.new(name)
end
end
- local tops = { }
- local top, switch
+ local tops = { }
+ local top = nil
+ local switch = nil
+
+ local function resolve_reset(mode)
+ if #tops > 0 then
+ report("resetting %s left-over states of %a",#tops,name)
+ end
+ tops = { }
+ top = nil
+ switch = nil
+ end
local function resolve_begin(mode)
if mode then
@@ -206,8 +218,7 @@ function stacker.new(name)
local function resolve_end()
-- resolve_step(s.unset)
- local noftop = #top
- if noftop > 0 then
+ if #tops > 0 then -- was #top brrr
local result = s.stop(s,top,1,#top)
remove(tops)
top = tops[#tops]
@@ -224,8 +235,6 @@ function stacker.new(name)
resolve_end()
end
- local report = logs.reporter("stacker",name or nil)
-
s = {
name = name or "unknown",
unset = -1,
@@ -240,6 +249,7 @@ function stacker.new(name)
resolve_begin = resolve_begin,
resolve_step = resolve_step,
resolve_end = resolve_end,
+ resolve_reset = resolve_reset,
}
return s -- we can overload functions
diff --git a/tex/context/base/util-str.lua b/tex/context/base/util-str.lua
index af8b1651e..de4a87e9f 100644
--- a/tex/context/base/util-str.lua
+++ b/tex/context/base/util-str.lua
@@ -20,21 +20,44 @@ local utfchar, utfbyte = utf.char, utf.byte
----- loadstripped = utilities.lua.loadstripped
----- setmetatableindex = table.setmetatableindex
-local loadstripped = _LUAVERSION < 5.2 and load or function(str)
- return load(dump(load(str),true)) -- it only makes sense in luajit and luatex where we have a stipped load
+local loadstripped = nil
+
+if _LUAVERSION < 5.2 then
+
+ loadstripped = function(str,shortcuts)
+ return load(str)
+ end
+
+else
+
+ loadstripped = function(str,shortcuts)
+ if shortcuts then
+ return load(dump(load(str),true),nil,nil,shortcuts)
+ else
+ return load(dump(load(str),true))
+ end
+ end
+
end
-- todo: make a special namespace for the formatter
if not number then number = { } end -- temp hack for luatex-fonts
-local stripper = patterns.stripzeros
+local stripper = patterns.stripzeros
+local newline = patterns.newline
+local endofstring = patterns.endofstring
+local whitespace = patterns.whitespace
+local spacer = patterns.spacer
+local spaceortab = patterns.spaceortab
local function points(n)
+ n = tonumber(n)
return (not n or n == 0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536))
end
local function basepoints(n)
+ n = tonumber(n)
return (not n or n == 0) and "0bp" or lpegmatch(stripper,format("%.5fbp", n*(7200/7227)/65536))
end
@@ -44,12 +67,12 @@ number.basepoints = basepoints
-- str = " \n \ntest \n test\ntest "
-- print("["..string.gsub(string.collapsecrlf(str),"\n","+").."]")
-local rubish = patterns.spaceortab^0 * patterns.newline
-local anyrubish = patterns.spaceortab + patterns.newline
+local rubish = spaceortab^0 * newline
+local anyrubish = spaceortab + newline
local anything = patterns.anything
-local stripped = (patterns.spaceortab^1 / "") * patterns.newline
+local stripped = (spaceortab^1 / "") * newline
local leading = rubish^0 / ""
-local trailing = (anyrubish^1 * patterns.endofstring) / ""
+local trailing = (anyrubish^1 * endofstring) / ""
local redundant = rubish^3 / "\n"
local pattern = Cs(leading * (trailing + redundant + stripped + anything)^0)
@@ -111,7 +134,7 @@ local pattern =
return ""
end
end
- + patterns.newline * Cp() / function(position)
+ + newline * Cp() / function(position)
extra, start = 0, position
end
+ patterns.anything
@@ -136,17 +159,105 @@ end
-- print(strings.tabtospace(t[k]))
-- end
-function strings.striplong(str) -- strips all leading spaces
- str = gsub(str,"^%s*","")
- str = gsub(str,"[\n\r]+ *","\n")
- return str
+-- todo: lpeg
+
+-- function strings.striplong(str) -- strips all leading spaces
+-- str = gsub(str,"^%s*","")
+-- str = gsub(str,"[\n\r]+ *","\n")
+-- return str
+-- end
+
+local space = spacer^0
+local nospace = space/""
+local endofline = nospace * newline
+
+local stripend = (whitespace^1 * endofstring)/""
+
+local normalline = (nospace * ((1-space*(newline+endofstring))^1) * nospace)
+
+local stripempty = endofline^1/""
+local normalempty = endofline^1
+local singleempty = endofline * (endofline^0/"")
+local doubleempty = endofline * endofline^-1 * (endofline^0/"")
+
+local stripstart = stripempty^0
+
+local p_prune_normal = Cs ( stripstart * ( stripend + normalline + normalempty )^0 )
+local p_prune_collapse = Cs ( stripstart * ( stripend + normalline + doubleempty )^0 )
+local p_prune_noempty = Cs ( stripstart * ( stripend + normalline + singleempty )^0 )
+local p_retain_normal = Cs ( ( normalline + normalempty )^0 )
+local p_retain_collapse = Cs ( ( normalline + doubleempty )^0 )
+local p_retain_noempty = Cs ( ( normalline + singleempty )^0 )
+
+-- function striplines(str,prune,collapse,noempty)
+-- if prune then
+-- if noempty then
+-- return lpegmatch(p_prune_noempty,str) or str
+-- elseif collapse then
+-- return lpegmatch(p_prune_collapse,str) or str
+-- else
+-- return lpegmatch(p_prune_normal,str) or str
+-- end
+-- else
+-- if noempty then
+-- return lpegmatch(p_retain_noempty,str) or str
+-- elseif collapse then
+-- return lpegmatch(p_retain_collapse,str) or str
+-- else
+-- return lpegmatch(p_retain_normal,str) or str
+-- end
+-- end
+-- end
+
+local striplinepatterns = {
+ ["prune"] = p_prune_normal,
+ ["prune and collapse"] = p_prune_collapse, -- default
+ ["prune and no empty"] = p_prune_noempty,
+ ["retain"] = p_retain_normal,
+ ["retain and collapse"] = p_retain_collapse,
+ ["retain and no empty"] = p_retain_noempty,
+ ["collapse"] = patterns.collapser, -- how about: stripper fullstripper
+}
+
+setmetatable(striplinepatterns,{ __index = function(t,k) return p_prune_collapse end })
+
+strings.striplinepatterns = striplinepatterns
+
+function strings.striplines(str,how)
+ return str and lpegmatch(striplinepatterns[how],str) or str
end
--- local template = string.striplong([[
+-- also see: string.collapsespaces
+
+strings.striplong = strings.striplines -- for old times sake
+
+-- local str = table.concat( {
+-- " ",
+-- " aap",
+-- " noot mies",
+-- " ",
+-- " ",
+-- " zus wim jet",
+-- "zus wim jet",
+-- " zus wim jet",
+-- " ",
+-- }, "\n")
+
+-- local str = table.concat( {
+-- " aaaa",
+-- " bb",
+-- " cccccc",
+-- }, "\n")
+
+-- for k, v in table.sortedhash(utilities.strings.striplinepatterns) do
+-- logs.report("stripper","method: %s, result: [[%s]]",k,utilities.strings.striplines(str,k))
+-- end
+
+-- inspect(strings.striplong([[
-- aaaa
-- bb
-- cccccc
--- ]])
+-- ]]))
function strings.nice(str)
str = gsub(str,"[:%-+_]+"," ") -- maybe more
@@ -178,6 +289,7 @@ end
-- octal %...o number
-- string %...s string number
-- float %...f number
+-- checked float %...F number
-- exponential %...e number
-- exponential %...E number
-- autofloat %...g number
@@ -249,10 +361,10 @@ strings.tracers = tracedchars
function string.tracedchar(b)
-- todo: table
if type(b) == "number" then
- return tracedchars[b] or (utfchar(b) .. " (U+" .. format('%05X',b) .. ")")
+ return tracedchars[b] or (utfchar(b) .. " (U+" .. format("%05X",b) .. ")")
else
local c = utfbyte(b)
- return tracedchars[c] or (b .. " (U+" .. format('%05X',c) .. ")")
+ return tracedchars[c] or (b .. " (U+" .. (c and format("%05X",c) or "?????") .. ")")
end
end
@@ -291,33 +403,67 @@ function number.sparseexponent(f,n)
return tostring(n)
end
-local preamble = [[
-local type = type
-local tostring = tostring
-local tonumber = tonumber
-local format = string.format
-local concat = table.concat
-local signed = number.signed
-local points = number.points
-local basepoints = number.basepoints
-local utfchar = utf.char
-local utfbyte = utf.byte
-local lpegmatch = lpeg.match
-local nspaces = string.nspaces
-local tracedchar = string.tracedchar
-local autosingle = string.autosingle
-local autodouble = string.autodouble
-local sequenced = table.sequenced
-local formattednumber = number.formatted
-local sparseexponent = number.sparseexponent
-]]
-
local template = [[
%s
%s
return function(%s) return %s end
]]
+local preamble, environment = "", { }
+
+if _LUAVERSION < 5.2 then
+
+ preamble = [[
+local lpeg=lpeg
+local type=type
+local tostring=tostring
+local tonumber=tonumber
+local format=string.format
+local concat=table.concat
+local signed=number.signed
+local points=number.points
+local basepoints= number.basepoints
+local utfchar=utf.char
+local utfbyte=utf.byte
+local lpegmatch=lpeg.match
+local nspaces=string.nspaces
+local tracedchar=string.tracedchar
+local autosingle=string.autosingle
+local autodouble=string.autodouble
+local sequenced=table.sequenced
+local formattednumber=number.formatted
+local sparseexponent=number.sparseexponent
+ ]]
+
+else
+
+ environment = {
+ global = global or _G,
+ lpeg = lpeg,
+ type = type,
+ tostring = tostring,
+ tonumber = tonumber,
+ format = string.format,
+ concat = table.concat,
+ signed = number.signed,
+ points = number.points,
+ basepoints = number.basepoints,
+ utfchar = utf.char,
+ utfbyte = utf.byte,
+ lpegmatch = lpeg.match,
+ nspaces = string.nspaces,
+ tracedchar = string.tracedchar,
+ autosingle = string.autosingle,
+ autodouble = string.autodouble,
+ sequenced = table.sequenced,
+ formattednumber = number.formatted,
+ sparseexponent = number.sparseexponent,
+ }
+
+end
+
+-- -- --
+
local arguments = { "a1" } -- faster than previously used (select(n,...))
setmetatable(arguments, { __index =
@@ -368,7 +514,7 @@ local format_i = function(f)
if f and f ~= "" then
return format("format('%%%si',a%s)",f,n)
else
- return format("format('%%i',a%s)",n)
+ return format("format('%%i',a%s)",n) -- why not just tostring()
end
end
@@ -384,6 +530,24 @@ local format_f = function(f)
return format("format('%%%sf',a%s)",f,n)
end
+-- The next one formats an integer as integer and very small values as zero. This is needed
+-- for pdf backend code.
+--
+-- 1.23 % 1 : 0.23
+-- - 1.23 % 1 : 0.77
+--
+-- We could probably use just %s with integers but who knows what Lua 5.3 will do? So let's
+-- for the moment use %i.
+
+local format_F = function(f) -- beware, no cast to number
+ n = n + 1
+ if not f or f == "" then
+ return format("(((a%s > -0.0000000005 and a%s < 0.0000000005) and '0') or format((a%s %% 1 == 0) and '%%i' or '%%.9f',a%s))",n,n,n,n)
+ else
+ return format("format((a%s %% 1 == 0) and '%%i' or '%%%sf',a%s)",n,f,n)
+ end
+end
+
local format_g = function(f)
n = n + 1
return format("format('%%%sg',a%s)",f,n)
@@ -657,7 +821,7 @@ local builder = Cs { "start",
V("!") -- new
+ V("s") + V("q")
+ V("i") + V("d")
- + V("f") + V("g") + V("G") + V("e") + V("E")
+ + V("f") + V("F") + V("g") + V("G") + V("e") + V("E")
+ V("x") + V("X") + V("o")
--
+ V("c")
@@ -680,7 +844,7 @@ local builder = Cs { "start",
+ V("m") + V("M") -- new
+ V("z") -- new
--
- + V("*") -- ignores probably messed up %
+ -- + V("?") -- ignores probably messed up %
)
+ V("*")
)
@@ -692,6 +856,7 @@ local builder = Cs { "start",
["i"] = (prefix_any * P("i")) / format_i, -- %i => regular %i (integer)
["d"] = (prefix_any * P("d")) / format_d, -- %d => regular %d (integer)
["f"] = (prefix_any * P("f")) / format_f, -- %f => regular %f (float)
+ ["F"] = (prefix_any * P("F")) / format_F, -- %F => regular %f (float) but 0/1 check
["g"] = (prefix_any * P("g")) / format_g, -- %g => regular %g (float)
["G"] = (prefix_any * P("G")) / format_G, -- %G => regular %G (float)
["e"] = (prefix_any * P("e")) / format_e, -- %e => regular %e (float)
@@ -734,34 +899,45 @@ local builder = Cs { "start",
["A"] = (prefix_any * P("A")) / format_A, -- %A => "..." (forces tostring)
--
["*"] = Cs(((1-P("%"))^1 + P("%%")/"%%")^1) / format_rest, -- rest (including %%)
+ ["?"] = Cs(((1-P("%"))^1 )^1) / format_rest, -- rest (including %%)
--
["!"] = Carg(2) * prefix_any * P("!") * C((1-P("!"))^1) * P("!") / format_extension,
}
-- we can be clever and only alias what is needed
+-- local direct = Cs (
+-- P("%")/""
+-- * Cc([[local format = string.format return function(str) return format("%]])
+-- * (S("+- .") + R("09"))^0
+-- * S("sqidfgGeExXo")
+-- * Cc([[",str) end]])
+-- * P(-1)
+-- )
+
local direct = Cs (
- P("%")/""
- * Cc([[local format = string.format return function(str) return format("%]])
- * (S("+- .") + R("09"))^0
- * S("sqidfgGeExXo")
- * Cc([[",str) end]])
- * P(-1)
- )
+ P("%")
+ * (S("+- .") + R("09"))^0
+ * S("sqidfgGeExXo")
+ * P(-1) / [[local format = string.format return function(str) return format("%0",str) end]]
+)
local function make(t,str)
local f
local p
local p = lpegmatch(direct,str)
if p then
+ -- f = loadstripped(p)()
+ -- print("builder 1 >",p)
f = loadstripped(p)()
else
n = 0
- p = lpegmatch(builder,str,1,"..",t._extensions_) -- after this we know n
+ -- p = lpegmatch(builder,str,1,"..",t._extensions_) -- after this we know n
+ p = lpegmatch(builder,str,1,t._connector_,t._extensions_) -- after this we know n
if n > 0 then
p = format(template,preamble,t._preamble_,arguments[n],p)
--- print("builder>",p)
- f = loadstripped(p)()
+ -- print("builder 2 >",p)
+ f = loadstripped(p,t._environment_)() -- t._environment is not populated (was experiment)
else
f = function() return str end
end
@@ -816,10 +992,28 @@ strings.formatters = { }
-- table (metatable) in which case we could better keep a count and
-- clear that table when a threshold is reached
-function strings.formatters.new()
- local t = { _extensions_ = { }, _preamble_ = "", _type_ = "formatter" }
- setmetatable(t, { __index = make, __call = use })
- return t
+-- _connector_ is an experiment
+
+if _LUAVERSION < 5.2 then
+
+ function strings.formatters.new(noconcat)
+ local t = { _type_ = "formatter", _connector_ = noconcat and "," or "..", _extensions_ = { }, _preamble_ = preamble, _environment_ = { } }
+ setmetatable(t, { __index = make, __call = use })
+ return t
+ end
+
+else
+
+ function strings.formatters.new(noconcat)
+ local e = { } -- better make a copy as we can overload
+ for k, v in next, environment do
+ e[k] = v
+ end
+ local t = { _type_ = "formatter", _connector_ = noconcat and "," or "..", _extensions_ = { }, _preamble_ = "", _environment_ = e }
+ setmetatable(t, { __index = make, __call = use })
+ return t
+ end
+
end
-- function strings.formatters.new()
@@ -838,8 +1032,12 @@ string.formatter = function(str,...) return formatters[str](...) end -- someti
local function add(t,name,template,preamble)
if type(t) == "table" and t._type_ == "formatter" then
t._extensions_[name] = template or "%s"
- if preamble then
+ if type(preamble) == "string" then
t._preamble_ = preamble .. "\n" .. t._preamble_ -- so no overload !
+ elseif type(preamble) == "table" then
+ for k, v in next, preamble do
+ t._environment_[k] = v
+ end
end
end
end
@@ -856,9 +1054,23 @@ patterns.luaquoted = Cs(Cc('"') * ((1-S('"\n'))^1 + P('"')/'\\"' + P('\n')/'\\n"
-- escaping by lpeg is faster for strings without quotes, slower on a string with quotes, but
-- faster again when other q-escapables are found (the ones we don't need to escape)
-add(formatters,"xml", [[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
-add(formatters,"tex", [[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
-add(formatters,"lua", [[lpegmatch(luaescape,%s)]],[[local luaescape = lpeg.patterns.luaescape]])
+-- add(formatters,"xml", [[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
+-- add(formatters,"tex", [[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
+-- add(formatters,"lua", [[lpegmatch(luaescape,%s)]],[[local luaescape = lpeg.patterns.luaescape]])
+
+if _LUAVERSION < 5.2 then
+
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape")
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape")
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape")
+
+else
+
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape = lpeg.patterns.xmlescape })
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape = lpeg.patterns.texescape })
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape = lpeg.patterns.luaescape })
+
+end
-- -- yes or no:
--
@@ -885,3 +1097,29 @@ add(formatters,"lua", [[lpegmatch(luaescape,%s)]],[[local luaescape = lpeg.patte
-- string.formatteds = formatteds
--
-- setmetatable(formatteds, { __index = make, __call = use })
+
+-- This is a somewhat silly one used in commandline reconstruction but the older
+-- method, using a combination of fine, gsub, quoted and unquoted was not that
+-- reliable.
+--
+-- '"foo"bar \"and " whatever"' => "foo\"bar \"and \" whatever"
+-- 'foo"bar \"and " whatever' => "foo\"bar \"and \" whatever"
+
+local dquote = patterns.dquote -- P('"')
+local equote = patterns.escaped + dquote / '\\"' + 1
+local space = patterns.space
+local cquote = Cc('"')
+
+local pattern =
+ Cs(dquote * (equote - P(-2))^0 * dquote) -- we keep the outer but escape unescaped ones
+ + Cs(cquote * (equote - space)^0 * space * equote^0 * cquote) -- we escape unescaped ones
+
+function string.optionalquoted(str)
+ return lpegmatch(pattern,str) or str
+end
+
+local pattern = Cs((newline / os.newline + 1)^0)
+
+function string.replacenewlines(str)
+ return lpegmatch(pattern,str)
+end
diff --git a/tex/context/base/util-tab.lua b/tex/context/base/util-tab.lua
index ae44269bb..618f34cee 100644
--- a/tex/context/base/util-tab.lua
+++ b/tex/context/base/util-tab.lua
@@ -11,7 +11,7 @@ utilities.tables = utilities.tables or { }
local tables = utilities.tables
local format, gmatch, gsub, sub = string.format, string.gmatch, string.gsub, string.sub
-local concat, insert, remove = table.concat, table.insert, table.remove
+local concat, insert, remove, sort = table.concat, table.insert, table.remove, table.sort
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
local type, next, rawset, tonumber, tostring, load, select = type, next, rawset, tonumber, tostring, load, select
local lpegmatch, P, Cs, Cc = lpeg.match, lpeg.P, lpeg.Cs, lpeg.Cc
@@ -21,27 +21,29 @@ local utftoeight = utf.toeight
local splitter = lpeg.tsplitat(".")
-function tables.definetable(target,nofirst,nolast) -- defines undefined tables
- local composed, shortcut, t = nil, nil, { }
+function utilities.tables.definetable(target,nofirst,nolast) -- defines undefined tables
+ local composed, t = nil, { }
local snippets = lpegmatch(splitter,target)
for i=1,#snippets - (nolast and 1 or 0) do
local name = snippets[i]
if composed then
- composed = shortcut .. "." .. name
- shortcut = shortcut .. "_" .. name
- t[#t+1] = formatters["local %s = %s if not %s then %s = { } %s = %s end"](shortcut,composed,shortcut,shortcut,composed,shortcut)
+ composed = composed .. "." .. name
+ t[#t+1] = formatters["if not %s then %s = { } end"](composed,composed)
else
composed = name
- shortcut = name
if not nofirst then
t[#t+1] = formatters["%s = %s or { }"](composed,composed)
end
end
end
- if nolast then
- composed = shortcut .. "." .. snippets[#snippets]
+ if composed then
+ if nolast then
+ composed = composed .. "." .. snippets[#snippets]
+ end
+ return concat(t,"\n"), composed -- could be shortcut
+ else
+ return "", target
end
- return concat(t,"\n"), composed
end
-- local t = tables.definedtable("a","b","c","d")
@@ -73,7 +75,7 @@ end
function tables.migratetable(target,v,root)
local t = root or _G
- local names = string.split(target,".")
+ local names = lpegmatch(splitter,target)
for i=1,#names-1 do
local name = names[i]
t[name] = t[name] or { }
@@ -96,6 +98,17 @@ function tables.removevalue(t,value) -- todo: n
end
end
+function tables.replacevalue(t,oldvalue,newvalue)
+ if oldvalue and newvalue then
+ for i=1,#t do
+ if t[i] == oldvalue then
+ t[i] = newvalue
+ -- replace all, so no: return
+ end
+ end
+ end
+end
+
function tables.insertbeforevalue(t,value,extra)
for i=1,#t do
if t[i] == extra then
@@ -316,7 +329,7 @@ function table.fastserialize(t,prefix)
-- not sorted
-- only number and string indices (currently)
- local r = { prefix or "return" }
+ local r = { type(prefix) == "string" and prefix or "return" }
local m = 1
local function fastserialize(t,outer) -- no mixes
@@ -376,7 +389,6 @@ function table.fastserialize(t,prefix)
end
return r
end
-
return concat(fastserialize(t,true))
end
@@ -494,7 +506,8 @@ end
-- The next version is somewhat faster, although in practice one will seldom
-- serialize a lot using this one. Often the above variants are more efficient.
--- If we would really need this a lot, we could hash q keys.
+-- If we would really need this a lot, we could hash q keys, or just not used
+-- indented code.
-- char-def.lua : 0.53 -> 0.38
-- husayni.tma : 0.28 -> 0.19
@@ -558,8 +571,42 @@ function table.serialize(root,name,specification)
local t -- = { }
local n = 1
+-- local function simple_table(t)
+-- local ts = #t
+-- if ts > 0 then
+-- local n = 0
+-- for _, v in next, t do
+-- n = n + 1
+-- if type(v) == "table" then
+-- return nil
+-- end
+-- end
+-- if n == ts then
+-- local tt = { }
+-- local nt = 0
+-- for i=1,ts do
+-- local v = t[i]
+-- local tv = type(v)
+-- nt = nt + 1
+-- if tv == "number" then
+-- tt[nt] = v
+-- elseif tv == "string" then
+-- tt[nt] = format("%q",v) -- f_string(v)
+-- elseif tv == "boolean" then
+-- tt[nt] = v and "true" or "false"
+-- else
+-- return nil
+-- end
+-- end
+-- return tt
+-- end
+-- end
+-- return nil
+-- end
+
local function simple_table(t)
- if #t > 0 then
+ local nt = #t
+ if nt > 0 then
local n = 0
for _, v in next, t do
n = n + 1
@@ -567,19 +614,17 @@ function table.serialize(root,name,specification)
return nil
end
end
- if n == #t then
+ if n == nt then
local tt = { }
- local nt = 0
- for i=1,#t do
+ for i=1,nt do
local v = t[i]
local tv = type(v)
- nt = nt + 1
if tv == "number" then
- tt[nt] = v
+ tt[i] = v -- not needed tostring(v)
elseif tv == "string" then
- tt[nt] = format("%q",v) -- f_string(v)
+ tt[i] = format("%q",v) -- f_string(v)
elseif tv == "boolean" then
- tt[nt] = v and "true" or "false"
+ tt[i] = v and "true" or "false"
else
return nil
end
@@ -610,7 +655,7 @@ function table.serialize(root,name,specification)
depth = depth + 1
end
-- we could check for k (index) being number (cardinal)
- if root and next(root) then
+ if root and next(root) ~= nil then
local first = nil
local last = 0
last = #root
@@ -623,19 +668,19 @@ function table.serialize(root,name,specification)
if last > 0 then
first = 1
end
- local sk = sortedkeys(root) -- inline fast version?
+ local sk = sortedkeys(root) -- inline fast version?\
for i=1,#sk do
local k = sk[i]
local v = root[k]
local tv = type(v)
local tk = type(k)
- if first and tk == "number" and k >= first and k <= last then
+ if first and tk == "number" and k <= last and k >= first then
if tv == "number" then
n = n + 1 t[n] = f_val_num(depth,v)
elseif tv == "string" then
n = n + 1 t[n] = f_val_str(depth,v)
elseif tv == "table" then
- if not next(v) then
+ if next(v) == nil then
n = n + 1 t[n] = f_val_not(depth)
else
local st = simple_table(v)
@@ -665,13 +710,13 @@ function table.serialize(root,name,specification)
n = n + 1 t[n] = f_key_boo_value_str(depth,k,v)
end
elseif tv == "table" then
- if not next(v) then
+ if next(v) == nil then
if tk == "number" then
- n = n + 1 t[n] = f_key_num_value_not(depth,k,v)
+ n = n + 1 t[n] = f_key_num_value_not(depth,k)
elseif tk == "string" then
- n = n + 1 t[n] = f_key_str_value_not(depth,k,v)
+ n = n + 1 t[n] = f_key_str_value_not(depth,k)
elseif tk == "boolean" then
- n = n + 1 t[n] = f_key_boo_value_not(depth,k,v)
+ n = n + 1 t[n] = f_key_boo_value_not(depth,k)
end
else
local st = simple_table(v)
@@ -729,7 +774,7 @@ function table.serialize(root,name,specification)
root._w_h_a_t_e_v_e_r_ = nil
end
-- Let's forget about empty tables.
- if next(root) then
+ if next(root) ~= nil then
do_serialize(root,name,1,0)
end
end
diff --git a/tex/context/base/util-tpl.lua b/tex/context/base/util-tpl.lua
index 67d058221..468dd429c 100644
--- a/tex/context/base/util-tpl.lua
+++ b/tex/context/base/util-tpl.lua
@@ -52,7 +52,7 @@ local sqlescape = lpeg.replacer {
-- { "\t", "\\t" },
}
-local sqlquoted = lpeg.Cs(lpeg.Cc("'") * sqlescape * lpeg.Cc("'"))
+local sqlquoted = Cs(Cc("'") * sqlescape * Cc("'"))
lpegpatterns.sqlescape = sqlescape
lpegpatterns.sqlquoted = sqlquoted
@@ -111,13 +111,26 @@ local luaescaper = escapers.lua
local quotedluaescaper = quotedescapers.lua
local function replacekeyunquoted(s,t,how,recurse) -- ".. \" "
- local escaper = how and escapers[how] or luaescaper
- return escaper(replacekey(s,t,how,recurse))
+ if how == false then
+ return replacekey(s,t,how,recurse)
+ else
+ local escaper = how and escapers[how] or luaescaper
+ return escaper(replacekey(s,t,how,recurse))
+ end
end
local function replacekeyquoted(s,t,how,recurse) -- ".. \" "
- local escaper = how and quotedescapers[how] or quotedluaescaper
- return escaper(replacekey(s,t,how,recurse))
+ if how == false then
+ return replacekey(s,t,how,recurse)
+ else
+ local escaper = how and quotedescapers[how] or quotedluaescaper
+ return escaper(replacekey(s,t,how,recurse))
+ end
+end
+
+local function replaceoptional(l,m,r,t,how,recurse)
+ local v = t[l]
+ return v and v ~= "" and lpegmatch(replacer,r,1,t,how or "lua",recurse or false) or ""
end
local single = P("%") -- test %test% test : resolves test
@@ -135,12 +148,19 @@ local norquoted = rquoted / ''
local nolquotedq = lquotedq / ''
local norquotedq = rquotedq / ''
-local key = nosingle * ((C((1-nosingle )^1) * Carg(1) * Carg(2) * Carg(3)) / replacekey ) * nosingle
-local quoted = nolquotedq * ((C((1-norquotedq)^1) * Carg(1) * Carg(2) * Carg(3)) / replacekeyquoted ) * norquotedq
-local unquoted = nolquoted * ((C((1-norquoted )^1) * Carg(1) * Carg(2) * Carg(3)) / replacekeyunquoted) * norquoted
+local noloptional = P("%?") / ''
+local noroptional = P("?%") / ''
+local nomoptional = P(":") / ''
+
+
+local args = Carg(1) * Carg(2) * Carg(3)
+local key = nosingle * ((C((1-nosingle )^1) * args) / replacekey ) * nosingle
+local quoted = nolquotedq * ((C((1-norquotedq )^1) * args) / replacekeyquoted ) * norquotedq
+local unquoted = nolquoted * ((C((1-norquoted )^1) * args) / replacekeyunquoted) * norquoted
+local optional = noloptional * ((C((1-nomoptional)^1) * nomoptional * C((1-noroptional)^1) * args) / replaceoptional) * noroptional
local any = P(1)
- replacer = Cs((unquoted + quoted + escape + key + any)^0)
+ replacer = Cs((unquoted + quoted + escape + optional + key + any)^0)
local function replace(str,mapping,how,recurse)
if mapping and str then
@@ -156,6 +176,7 @@ end
-- print(replace("test '%[x]%' test",{ x = [[a '%y%'  a]], y = "oeps" },'sql',true))
-- print(replace([[test %[x]% test]],{ x = [[a "x"  a]]}))
-- print(replace([[test %(x)% test]],{ x = [[a "x"  a]]}))
+-- print(replace([[convert %?x: -x "%x%" ?% %?y: -y "%y%" ?%]],{ x = "yes" }))
templates.replace = replace
@@ -188,3 +209,5 @@ end
-- inspect(utilities.templates.replace("test %one% test", { one = "%two%", two = "two" }))
-- inspect(utilities.templates.resolve({ one = "%two%", two = "two", three = "%three%" }))
+-- inspect(utilities.templates.replace("test %one% test", { one = "%two%", two = "two" },false,true))
+-- inspect(utilities.templates.replace("test %one% test", { one = "%two%", two = "two" },false))
diff --git a/tex/context/base/x-asciimath.lua b/tex/context/base/x-asciimath.lua
index 992c37eae..51f401e66 100644
--- a/tex/context/base/x-asciimath.lua
+++ b/tex/context/base/x-asciimath.lua
@@ -7,266 +7,2095 @@ if not modules then modules = { } end modules ['x-asciimath'] = {
}
--[[ldx--
-<p>Some backgrounds are discussed in <t>x-asciimath.mkiv</t>.</p>
+<p>Some backgrounds are discussed in <t>x-asciimath.mkiv</t>. This is a third version. I first
+tried a to make a proper expression parser but it's not that easy. First we have to avoid left
+recursion, which is not that trivial (maybe a future version of lpeg will provide that), and
+second there is not really a syntax but a mix of expressions and sequences with some fuzzy logic
+applied. Most problematic are fractions and we also need to handle incomplete expressions. So,
+instead we (sort of) tokenize the string and then do some passes over the result. Yes, it's real
+ugly and unsatisfying code mess down here. Don't take this as an example.</p>
--ldx]]--
-local trace_mapping = false if trackers then trackers.register("modules.asciimath.mapping", function(v) trace_mapping = v end) end
+-- todo: spaces around all elements in cleanup?
+-- todo: filter from files listed in tuc file
-local asciimath = { }
-local moduledata = moduledata or { }
-moduledata.asciimath = asciimath
+local trace_mapping = false if trackers then trackers.register("modules.asciimath.mapping", function(v) trace_mapping = v end) end
+local trace_detail = false if trackers then trackers.register("modules.asciimath.detail", function(v) trace_detail = v end) end
+local trace_digits = false if trackers then trackers.register("modules.asciimath.digits", function(v) trace_digits = v end) end
local report_asciimath = logs.reporter("mathematics","asciimath")
-local format = string.format
-local lpegmatch = lpeg.match
-local S, P, R, C, V, Cc, Ct, Cs = lpeg.S, lpeg.P, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Ct, lpeg.Cs
-
-local letter = lpeg.patterns.utf8
-local space = S(" \n\r\t")
-local spaces = space^0/""
-local integer = P("-")^-1 * R("09")^1
-local realpart = P("-")^-1 * R("09")^1 * S(".")^1 * R("09")^1
-local number = integer -- so we can support nice formatting if needed
-local real = realpart -- so we can support nice formatting if needed
-local float = realpart * P("E") * integer -- so we can support nice formatting if needed
-local texnic = P("\\") * (R("az","AZ")^1)
-
-local premapper = Cs ( (
-
- P("@") / "\\degrees " +
- P("O/") / "\\varnothing " +
- P("o+") / "\\oplus " +
- P("o.") / "\\ocirc " +
- P("!in") / "\\not\\in " +
- P("!=") / "\\neq " +
- P("**") / "\\star " +
- P("*") / "\\cdot " +
- P("//") / "\\slash " +
- P("/_") / "\\angle " +
- P("\\\\") / "\\backslash " +
- P("^^^") / "\\wedge " +
- P("^^") / "\\wedge " +
- P("<<") / "\\left\\langle " +
- P(">>") / "\\right\\rangle " +
- P("<=") / "\\leq " +
- P(">=") / "\\geq " +
- P("-<") / "\\precc " +
- P(">-") / "\\succ " +
- P("~=") / "\\cong " +
- P("~~") / "\\approx " +
- P("=>") / "\\Rightarrow " +
- P("(:") / "\\left\\langle " +
- P(":)") / "\\right\\rangle " +
- P(":.") / "\\therefore " +
- P("~|") / "\\right\\rceil " +
- P("_|_") / "\\bot " +
- P("_|") / "\\right\\rfloor " +
- P("+-") / "\\pm " +
- P("|--") / "\\vdash " +
- P("|==") / "\\models " +
- P("|_") / "\\left\\lfloor " +
- P("|~") / "\\left\\lceil " +
- P("-:") / "\\div " +
- P("_=") / "\\equiv " +
-
- P("|") / "\\middle\\| " +
-
- P("dx") / "(dx)" +
- P("dy") / "(dy)" +
- P("dz") / "(dz)" +
-
- letter + P(1)
+local asciimath = { }
+local moduledata = moduledata or { }
+moduledata.asciimath = asciimath
-)^0 )
+if not characters then
+ require("char-def")
+ require("char-ini")
+ require("char-ent")
+end
+
+local type, rawget = type, rawget
+local concat, insert, remove = table.concat, table.insert, table.remove
+local rep, gmatch, gsub, find = string.rep, string.gmatch, string.gsub, string.find
+local utfchar, utfbyte = utf.char, utf.byte
+
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
+local S, P, R, C, V, Cc, Ct, Cs, Carg = lpeg.S, lpeg.P, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Ct, lpeg.Cs, lpeg.Carg
+
+local sortedhash = table.sortedhash
+local sortedkeys = table.sortedkeys
+local formatters = string.formatters
+
+local entities = characters.entities or { }
+
+local xmltext = xml.text
+local xmlinclusion = xml.inclusion
+local xmlcollected = xml.collected
+
+-- todo: use private unicodes as temporary slots ... easier to compare
+
+local s_lparent = "\\left\\lparent"
+local s_lbrace = "\\left\\lbrace"
+local s_lbracket = "\\left\\lbracket"
+local s_langle = "\\left\\langle"
+local s_lfloor = "\\left\\lfloor"
+local s_lceil = "\\left\\lceil"
+local s_left = "\\left."
+
+local s_rparent = "\\right\\rparent"
+local s_rbrace = "\\right\\rbrace"
+local s_rbracket = "\\right\\rbracket"
+local s_rangle = "\\right\\rangle"
+local s_rfloor = "\\right\\rfloor"
+local s_rceil = "\\right\\rceil"
+local s_right = "\\right."
+
+local s_mslash = "\\middle/"
+
+local s_lbar = "\\left\\|"
+local s_mbar = "\\middle\\|"
+local s_rbar = "\\right\\|"
+
+local s_lnothing = "\\left ." -- space fools checker
+local s_rnothing = "\\right ." -- space fools checker
local reserved = {
- ["aleph"] = "\\aleph ",
- ["vdots"] = "\\vdots ",
- ["ddots"] = "\\ddots ",
- ["oint"] = "\\oint ",
- ["grad"] = "\\nabla ",
- ["prod"] = "\\prod ",
- ["prop"] = "\\propto ",
- ["sube"] = "\\subseteq ",
- ["supe"] = "\\supseteq ",
- ["sinh"] = "\\sinh ",
- ["cosh"] = "\\cosh ",
- ["tanh"] = "\\tanh ",
- ["sum"] = "\\sum ",
- ["vvv"] = "\\vee ",
- ["nnn"] = "\\cap ",
- ["uuu"] = "\\cup ",
- ["sub"] = "\\subset ",
- ["sup"] = "\\supset ",
- ["not"] = "\\lnot ",
- ["iff"] = "\\Leftrightarrow ",
- ["int"] = "\\int ",
- ["del"] = "\\partial ",
- ["and"] = "\\and ",
- ["not"] = "\\not ",
- ["sin"] = "\\sin ",
- ["cos"] = "\\cos ",
- ["tan"] = "\\tan ",
- ["csc"] = "\\csc ",
- ["sec"] = "\\sec ",
- ["cot"] = "\\cot ",
- ["log"] = "\\log ",
- ["det"] = "\\det ",
- ["lim"] = "\\lim ",
- ["mod"] = "\\mod ",
- ["gcd"] = "\\gcd ",
- ["lcm"] = "\\lcm ",
- ["min"] = "\\min ",
- ["max"] = "\\max ",
- ["xx"] = "\\times ",
- ["in"] = "\\in ",
- ["ox"] = "\\otimes ",
- ["vv"] = "\\vee ",
- ["nn"] = "\\cap ",
- ["uu"] = "\\cup ",
- ["oo"] = "\\infty ",
- ["ln"] = "\\ln ",
- ["or"] = "\\or ",
-
- ["AA"] = "\\forall ",
- ["EE"] = "\\exists ",
- ["TT"] = "\\top ",
- ["CC"] = "\\Bbb{C}",
- ["NN"] = "\\Bbb{N}",
- ["QQ"] = "\\Bbb{Q}",
- ["RR"] = "\\Bbb{R}",
- ["ZZ"] = "\\Bbb{Z}",
+ ["prod"] = { false, "\\prod" },
+ ["sinh"] = { false, "\\sinh" },
+ ["cosh"] = { false, "\\cosh" },
+ ["tanh"] = { false, "\\tanh" },
+ ["sum"] = { false, "\\sum" },
+ ["int"] = { false, "\\int" },
+ ["sin"] = { false, "\\sin" },
+ ["cos"] = { false, "\\cos" },
+ ["tan"] = { false, "\\tan" },
+ ["csc"] = { false, "\\csc" },
+ ["sec"] = { false, "\\sec" },
+ ["cot"] = { false, "\\cot" },
+ ["log"] = { false, "\\log" },
+ ["det"] = { false, "\\det" },
+ ["lim"] = { false, "\\lim" },
+ ["mod"] = { false, "\\mod" },
+ ["gcd"] = { false, "\\gcd" },
+ ["min"] = { false, "\\min" },
+ ["max"] = { false, "\\max" },
+ ["ln"] = { false, "\\ln" },
+
+ ["atan"] = { false, "\\atan" }, -- extra
+ ["acos"] = { false, "\\acos" }, -- extra
+ ["asin"] = { false, "\\asin" }, -- extra
+ -- extra
+ ["arctan"] = { false, "\\arctan" }, -- extra
+ ["arccos"] = { false, "\\arccos" }, -- extra
+ ["arcsin"] = { false, "\\arcsin" }, -- extra
+
+ ["and"] = { false, "\\text{and}" },
+ ["or"] = { false, "\\text{or}" },
+ ["if"] = { false, "\\text{if}" },
+
+ ["sqrt"] = { false, "\\asciimathsqrt", "unary" },
+ ["root"] = { false, "\\asciimathroot", "binary" },
+ ["frac"] = { false, "\\frac", "binary" },
+ ["stackrel"] = { false, "\\asciimathstackrel", "binary" },
+ ["hat"] = { false, "\\widehat", "unary" },
+ ["bar"] = { false, "\\overbar", "unary" },
+ ["overbar"] = { false, "\\overbar", "unary" },
+ ["underline"] = { false, "\\underline", "unary" },
+ ["ul"] = { false, "\\underline", "unary" },
+ ["vec"] = { false, "\\overrightarrow", "unary" },
+ ["dot"] = { false, "\\dot", "unary" }, -- 0x2D9
+ ["ddot"] = { false, "\\ddot", "unary" }, -- 0xA8
+
+ -- binary operators
+
+ ["+"] = { true, "+" },
+ ["-"] = { true, "-" },
+ ["*"] = { true, "⋅" },
+ ["**"] = { true, "⋆" },
+ ["//"] = { true, "⁄" }, -- \slash
+ ["\\"] = { true, "\\" },
+ ["xx"] = { true, "×" },
+ ["times"] = { true, "×" },
+ ["-:"] = { true, "÷" },
+ ["@"] = { true, "∘" },
+ ["circ"] = { true, "∘" },
+ ["o+"] = { true, "⊕" },
+ ["ox"] = { true, "⊗" },
+ ["o."] = { true, "⊙" },
+ ["^^"] = { true, "∧" },
+ ["vv"] = { true, "∨" },
+ ["nn"] = { true, "∩" },
+ ["uu"] = { true, "∪" },
+
+ -- big operators
+
+ ["^^^"] = { true, "⋀" },
+ ["vvv"] = { true, "⋁" },
+ ["nnn"] = { true, "⋂" },
+ ["uuu"] = { true, "⋃" },
+ ["int"] = { true, "∫" },
+ ["oint"] = { true, "∮" },
+
+ -- brackets
+
+ ["("] = { true, "(" },
+ [")"] = { true, ")" },
+ ["["] = { true, "[" },
+ ["]"] = { true, "]" },
+ ["{"] = { true, "{" },
+ ["}"] = { true, "}" },
+
+ -- binary relations
+
+ ["="] = { true, "=" },
+ ["eq"] = { true, "=" },
+ ["!="] = { true, "≠" },
+ ["ne"] = { true, "≠" },
+ ["neq"] = { true, "≠" },
+ ["<"] = { true, "<" },
+ ["lt"] = { true, "<" },
+ [">"] = { true, ">" },
+ ["gt"] = { true, ">" },
+ ["<="] = { true, "≤" },
+ ["le"] = { true, "≤" },
+ ["leq"] = { true, "≤" },
+ [">="] = { true, "≥" },
+ ["ge"] = { true, "≥" },
+ ["geq"] = { true, "≥" },
+ ["-<"] = { true, "≺" },
+ [">-"] = { true, "≻" },
+ ["in"] = { true, "∈" },
+ ["!in"] = { true, "∉" },
+ ["sub"] = { true, "⊂" },
+ ["sup"] = { true, "⊃" },
+ ["sube"] = { true, "⊆" },
+ ["supe"] = { true, "⊇" },
+ ["-="] = { true, "≡" },
+ ["~="] = { true, "≅" },
+ ["~~"] = { true, "≈" },
+ ["prop"] = { true, "∝" },
+
+ -- arrows
+
+ ["rarr"] = { true, "→" },
+ ["->"] = { true, "→" },
+ ["larr"] = { true, "←" },
+ ["harr"] = { true, "↔" },
+ ["uarr"] = { true, "↑" },
+ ["darr"] = { true, "↓" },
+ ["rArr"] = { true, "⇒" },
+ ["lArr"] = { true, "⇐" },
+ ["hArr"] = { true, "⇔" },
+ ["|->"] = { true, "↦" },
+
+ -- logical
+
+ ["not"] = { true, "¬" },
+ ["=>"] = { true, "⇒" },
+ ["iff"] = { true, "⇔" },
+ ["AA"] = { true, "∀" },
+ ["EE"] = { true, "∃" },
+ ["_|_"] = { true, "⊥" },
+ ["TT"] = { true, "⊤" },
+ ["|--"] = { true, "⊢" },
+ ["|=="] = { true, "⊨" },
+
+ -- miscellaneous
+
+ ["del"] = { true, "∂" },
+ ["grad"] = { true, "∇" },
+ ["+-"] = { true, "±" },
+ ["O/"] = { true, "∅" },
+ ["oo"] = { true, "∞" },
+ ["aleph"] = { true, "ℵ" },
+ ["angle"] = { true, "∠" },
+ ["/_"] = { true, "∠" },
+ [":."] = { true, "∴" },
+ ["..."] = { true, "..." }, -- ldots
+ ["ldots"] = { true, "..." }, -- ldots
+ ["cdots"] = { true, "⋯" },
+ ["vdots"] = { true, "⋮" },
+ ["ddots"] = { true, "⋱" },
+ ["diamond"] = { true, "⋄" },
+ ["square"] = { true, "□" },
+ ["|__"] = { true, "⌊" },
+ ["__|"] = { true, "⌋" },
+ ["|~"] = { true, "⌈" },
+ ["~|"] = { true, "⌉" },
+
+ -- more
+
+ ["_="] = { true, "≡" },
+
+ -- bonus
+
+ ["prime"] = { true, "′" }, -- bonus
+ ["'"] = { true, "′" }, -- bonus
+ ["''"] = { true, "″" }, -- bonus
+ ["'''"] = { true, "‴" }, -- bonus
+
+ -- special
+
+ ["%"] = { false, "\\mathpercent" },
+ ["&"] = { false, "\\mathampersand" },
+ ["#"] = { false, "\\mathhash" },
+ ["$"] = { false, "\\mathdollar" },
+
+ -- blackboard
+
+ ["CC"] = { true, "ℂ" },
+ ["NN"] = { true, "ℕ" },
+ ["QQ"] = { true, "ℚ" },
+ ["RR"] = { true, "ℝ" },
+ ["ZZ"] = { true, "ℤ" },
+
+ -- greek lowercase
+
+ ["alpha"] = { true, "α" },
+ ["beta"] = { true, "β" },
+ ["gamma"] = { true, "γ" },
+ ["delta"] = { true, "δ" },
+ ["epsilon"] = { true, "ε" },
+ ["varepsilon"] = { true, "ɛ" },
+ ["zeta"] = { true, "ζ" },
+ ["eta"] = { true, "η" },
+ ["theta"] = { true, "θ" },
+ ["vartheta"] = { true, "ϑ" },
+ ["iota"] = { true, "ι" },
+ ["kappa"] = { true, "κ" },
+ ["lambda"] = { true, "λ" },
+ ["mu"] = { true, "μ" },
+ ["nu"] = { true, "ν" },
+ ["xi"] = { true, "ξ" },
+ ["pi"] = { true, "π" },
+ ["rho"] = { true, "ρ" },
+ ["sigma"] = { true, "σ" },
+ ["tau"] = { true, "τ" },
+ ["upsilon"] = { true, "υ" },
+ ["phi"] = { true, "φ" },
+ ["varphi"] = { true, "ϕ" },
+ ["chi"] = { true, "χ" },
+ ["psi"] = { true, "ψ" },
+ ["omega"] = { true, "ω" },
+
+ -- greek uppercase
+
+ ["Gamma"] = { true, "Γ" },
+ ["Delta"] = { true, "Δ" },
+ ["Theta"] = { true, "Θ" },
+ ["Lambda"] = { true, "Λ" },
+ ["Xi"] = { true, "Ξ" },
+ ["Pi"] = { true, "Π" },
+ ["Sigma"] = { true, "Σ" },
+ ["Phi"] = { true, "Φ" },
+ ["Psi"] = { true, "Ψ" },
+ ["Omega"] = { true, "Ω" },
+
+ -- blackboard
+
+ ["bbb a"] = { true, "𝕒" },
+ ["bbb b"] = { true, "𝕓" },
+ ["bbb c"] = { true, "𝕔" },
+ ["bbb d"] = { true, "𝕕" },
+ ["bbb e"] = { true, "𝕖" },
+ ["bbb f"] = { true, "𝕗" },
+ ["bbb g"] = { true, "𝕘" },
+ ["bbb h"] = { true, "𝕙" },
+ ["bbb i"] = { true, "𝕚" },
+ ["bbb j"] = { true, "𝕛" },
+ ["bbb k"] = { true, "𝕜" },
+ ["bbb l"] = { true, "𝕝" },
+ ["bbb m"] = { true, "𝕞" },
+ ["bbb n"] = { true, "𝕟" },
+ ["bbb o"] = { true, "𝕠" },
+ ["bbb p"] = { true, "𝕡" },
+ ["bbb q"] = { true, "𝕢" },
+ ["bbb r"] = { true, "𝕣" },
+ ["bbb s"] = { true, "𝕤" },
+ ["bbb t"] = { true, "𝕥" },
+ ["bbb u"] = { true, "𝕦" },
+ ["bbb v"] = { true, "𝕧" },
+ ["bbb w"] = { true, "𝕨" },
+ ["bbb x"] = { true, "𝕩" },
+ ["bbb y"] = { true, "𝕪" },
+ ["bbb z"] = { true, "𝕫" },
+
+ ["bbb A"] = { true, "𝔸" },
+ ["bbb B"] = { true, "𝔹" },
+ ["bbb C"] = { true, "ℂ" },
+ ["bbb D"] = { true, "𝔻" },
+ ["bbb E"] = { true, "𝔼" },
+ ["bbb F"] = { true, "𝔽" },
+ ["bbb G"] = { true, "𝔾" },
+ ["bbb H"] = { true, "ℍ" },
+ ["bbb I"] = { true, "𝕀" },
+ ["bbb J"] = { true, "𝕁" },
+ ["bbb K"] = { true, "𝕂" },
+ ["bbb L"] = { true, "𝕃" },
+ ["bbb M"] = { true, "𝕄" },
+ ["bbb N"] = { true, "ℕ" },
+ ["bbb O"] = { true, "𝕆" },
+ ["bbb P"] = { true, "ℙ" },
+ ["bbb Q"] = { true, "ℚ" },
+ ["bbb R"] = { true, "ℝ" },
+ ["bbb S"] = { true, "𝕊" },
+ ["bbb T"] = { true, "𝕋" },
+ ["bbb U"] = { true, "𝕌" },
+ ["bbb V"] = { true, "𝕍" },
+ ["bbb W"] = { true, "𝕎" },
+ ["bbb X"] = { true, "𝕏" },
+ ["bbb Y"] = { true, "𝕐" },
+ ["bbb Z"] = { true, "ℤ" },
+
+ -- fraktur
+
+ ["fr a"] = { true, "𝔞" },
+ ["fr b"] = { true, "𝔟" },
+ ["fr c"] = { true, "𝔠" },
+ ["fr d"] = { true, "𝔡" },
+ ["fr e"] = { true, "𝔢" },
+ ["fr f"] = { true, "𝔣" },
+ ["fr g"] = { true, "𝔤" },
+ ["fr h"] = { true, "𝔥" },
+ ["fr i"] = { true, "𝔦" },
+ ["fr j"] = { true, "𝔧" },
+ ["fr k"] = { true, "𝔨" },
+ ["fr l"] = { true, "𝔩" },
+ ["fr m"] = { true, "𝔪" },
+ ["fr n"] = { true, "𝔫" },
+ ["fr o"] = { true, "𝔬" },
+ ["fr p"] = { true, "𝔭" },
+ ["fr q"] = { true, "𝔮" },
+ ["fr r"] = { true, "𝔯" },
+ ["fr s"] = { true, "𝔰" },
+ ["fr t"] = { true, "𝔱" },
+ ["fr u"] = { true, "𝔲" },
+ ["fr v"] = { true, "𝔳" },
+ ["fr w"] = { true, "𝔴" },
+ ["fr x"] = { true, "𝔵" },
+ ["fr y"] = { true, "𝔶" },
+ ["fr z"] = { true, "𝔷" },
+
+ ["fr A"] = { true, "𝔄" },
+ ["fr B"] = { true, "𝔅" },
+ ["fr C"] = { true, "ℭ" },
+ ["fr D"] = { true, "𝔇" },
+ ["fr E"] = { true, "𝔈" },
+ ["fr F"] = { true, "𝔉" },
+ ["fr G"] = { true, "𝔊" },
+ ["fr H"] = { true, "ℌ" },
+ ["fr I"] = { true, "ℑ" },
+ ["fr J"] = { true, "𝔍" },
+ ["fr K"] = { true, "𝔎" },
+ ["fr L"] = { true, "𝔏" },
+ ["fr M"] = { true, "𝔐" },
+ ["fr N"] = { true, "𝔑" },
+ ["fr O"] = { true, "𝔒" },
+ ["fr P"] = { true, "𝔓" },
+ ["fr Q"] = { true, "𝔔" },
+ ["fr R"] = { true, "ℜ" },
+ ["fr S"] = { true, "𝔖" },
+ ["fr T"] = { true, "𝔗" },
+ ["fr U"] = { true, "𝔘" },
+ ["fr V"] = { true, "𝔙" },
+ ["fr W"] = { true, "𝔚" },
+ ["fr X"] = { true, "𝔛" },
+ ["fr Y"] = { true, "𝔜" },
+ ["fr Z"] = { true, "ℨ" },
+
+ -- script
+
+ ["cc a"] = { true, "𝒶" },
+ ["cc b"] = { true, "𝒷" },
+ ["cc c"] = { true, "𝒸" },
+ ["cc d"] = { true, "𝒹" },
+ ["cc e"] = { true, "ℯ" },
+ ["cc f"] = { true, "𝒻" },
+ ["cc g"] = { true, "ℊ" },
+ ["cc h"] = { true, "𝒽" },
+ ["cc i"] = { true, "𝒾" },
+ ["cc j"] = { true, "𝒿" },
+ ["cc k"] = { true, "𝓀" },
+ ["cc l"] = { true, "𝓁" },
+ ["cc m"] = { true, "𝓂" },
+ ["cc n"] = { true, "𝓃" },
+ ["cc o"] = { true, "ℴ" },
+ ["cc p"] = { true, "𝓅" },
+ ["cc q"] = { true, "𝓆" },
+ ["cc r"] = { true, "𝓇" },
+ ["cc s"] = { true, "𝓈" },
+ ["cc t"] = { true, "𝓉" },
+ ["cc u"] = { true, "𝓊" },
+ ["cc v"] = { true, "𝓋" },
+ ["cc w"] = { true, "𝓌" },
+ ["cc x"] = { true, "𝓍" },
+ ["cc y"] = { true, "𝓎" },
+ ["cc z"] = { true, "𝓏" },
+
+ ["cc A"] = { true, "𝒜" },
+ ["cc B"] = { true, "ℬ" },
+ ["cc C"] = { true, "𝒞" },
+ ["cc D"] = { true, "𝒟" },
+ ["cc E"] = { true, "ℰ" },
+ ["cc F"] = { true, "ℱ" },
+ ["cc G"] = { true, "𝒢" },
+ ["cc H"] = { true, "ℋ" },
+ ["cc I"] = { true, "ℐ" },
+ ["cc J"] = { true, "𝒥" },
+ ["cc K"] = { true, "𝒦" },
+ ["cc L"] = { true, "ℒ" },
+ ["cc M"] = { true, "ℳ" },
+ ["cc N"] = { true, "𝒩" },
+ ["cc O"] = { true, "𝒪" },
+ ["cc P"] = { true, "𝒫" },
+ ["cc Q"] = { true, "𝒬" },
+ ["cc R"] = { true, "ℛ" },
+ ["cc S"] = { true, "𝒮" },
+ ["cc T"] = { true, "𝒯" },
+ ["cc U"] = { true, "𝒰" },
+ ["cc V"] = { true, "𝒱" },
+ ["cc W"] = { true, "𝒲" },
+ ["cc X"] = { true, "𝒳" },
+ ["cc Y"] = { true, "𝒴" },
+ ["cc Z"] = { true, "𝒵" },
+
+ -- bold
+
+ ["bb a"] = { true, "𝒂" },
+ ["bb b"] = { true, "𝒃" },
+ ["bb c"] = { true, "𝒄" },
+ ["bb d"] = { true, "𝒅" },
+ ["bb e"] = { true, "𝒆" },
+ ["bb f"] = { true, "𝒇" },
+ ["bb g"] = { true, "𝒈" },
+ ["bb h"] = { true, "𝒉" },
+ ["bb i"] = { true, "𝒊" },
+ ["bb j"] = { true, "𝒋" },
+ ["bb k"] = { true, "𝒌" },
+ ["bb l"] = { true, "𝒍" },
+ ["bb m"] = { true, "𝒎" },
+ ["bb n"] = { true, "𝒏" },
+ ["bb o"] = { true, "𝒐" },
+ ["bb p"] = { true, "𝒑" },
+ ["bb q"] = { true, "𝒒" },
+ ["bb r"] = { true, "𝒓" },
+ ["bb s"] = { true, "𝒔" },
+ ["bb t"] = { true, "𝒕" },
+ ["bb u"] = { true, "𝒖" },
+ ["bb v"] = { true, "𝒗" },
+ ["bb w"] = { true, "𝒘" },
+ ["bb x"] = { true, "𝒙" },
+ ["bb y"] = { true, "𝒚" },
+ ["bb z"] = { true, "𝒛" },
+
+ ["bb A"] = { true, "𝑨" },
+ ["bb B"] = { true, "𝑩" },
+ ["bb C"] = { true, "𝑪" },
+ ["bb D"] = { true, "𝑫" },
+ ["bb E"] = { true, "𝑬" },
+ ["bb F"] = { true, "𝑭" },
+ ["bb G"] = { true, "𝑮" },
+ ["bb H"] = { true, "𝑯" },
+ ["bb I"] = { true, "𝑰" },
+ ["bb J"] = { true, "𝑱" },
+ ["bb K"] = { true, "𝑲" },
+ ["bb L"] = { true, "𝑳" },
+ ["bb M"] = { true, "𝑴" },
+ ["bb N"] = { true, "𝑵" },
+ ["bb O"] = { true, "𝑶" },
+ ["bb P"] = { true, "𝑷" },
+ ["bb Q"] = { true, "𝑸" },
+ ["bb R"] = { true, "𝑹" },
+ ["bb S"] = { true, "𝑺" },
+ ["bb T"] = { true, "𝑻" },
+ ["bb U"] = { true, "𝑼" },
+ ["bb V"] = { true, "𝑽" },
+ ["bb W"] = { true, "𝑾" },
+ ["bb X"] = { true, "𝑿" },
+ ["bb Y"] = { true, "𝒀" },
+ ["bb Z"] = { true, "𝒁" },
+
+ -- sans
+
+ ["sf a"] = { true, "𝖺" },
+ ["sf b"] = { true, "𝖻" },
+ ["sf c"] = { true, "𝖼" },
+ ["sf d"] = { true, "𝖽" },
+ ["sf e"] = { true, "𝖾" },
+ ["sf f"] = { true, "𝖿" },
+ ["sf g"] = { true, "𝗀" },
+ ["sf h"] = { true, "𝗁" },
+ ["sf i"] = { true, "𝗂" },
+ ["sf j"] = { true, "𝗃" },
+ ["sf k"] = { true, "𝗄" },
+ ["sf l"] = { true, "𝗅" },
+ ["sf m"] = { true, "𝗆" },
+ ["sf n"] = { true, "𝗇" },
+ ["sf o"] = { true, "𝗈" },
+ ["sf p"] = { true, "𝗉" },
+ ["sf q"] = { true, "𝗊" },
+ ["sf r"] = { true, "𝗋" },
+ ["sf s"] = { true, "𝗌" },
+ ["sf t"] = { true, "𝗍" },
+ ["sf u"] = { true, "𝗎" },
+ ["sf v"] = { true, "𝗏" },
+ ["sf w"] = { true, "𝗐" },
+ ["sf x"] = { true, "𝗑" },
+ ["sf y"] = { true, "𝗒" },
+ ["sf z"] = { true, "𝗓" },
+
+ ["sf A"] = { true, "𝖠" },
+ ["sf B"] = { true, "𝖡" },
+ ["sf C"] = { true, "𝖢" },
+ ["sf D"] = { true, "𝖣" },
+ ["sf E"] = { true, "𝖤" },
+ ["sf F"] = { true, "𝖥" },
+ ["sf G"] = { true, "𝖦" },
+ ["sf H"] = { true, "𝖧" },
+ ["sf I"] = { true, "𝖨" },
+ ["sf J"] = { true, "𝖩" },
+ ["sf K"] = { true, "𝖪" },
+ ["sf L"] = { true, "𝖫" },
+ ["sf M"] = { true, "𝖬" },
+ ["sf N"] = { true, "𝖭" },
+ ["sf O"] = { true, "𝖮" },
+ ["sf P"] = { true, "𝖯" },
+ ["sf Q"] = { true, "𝖰" },
+ ["sf R"] = { true, "𝖱" },
+ ["sf S"] = { true, "𝖲" },
+ ["sf T"] = { true, "𝖳" },
+ ["sf U"] = { true, "𝖴" },
+ ["sf V"] = { true, "𝖵" },
+ ["sf W"] = { true, "𝖶" },
+ ["sf X"] = { true, "𝖷" },
+ ["sf Y"] = { true, "𝖸" },
+ ["sf Z"] = { true, "𝖹" },
+
+ -- monospace
+
+ ["tt a"] = { true, "𝚊" },
+ ["tt b"] = { true, "𝚋" },
+ ["tt c"] = { true, "𝚌" },
+ ["tt d"] = { true, "𝚍" },
+ ["tt e"] = { true, "𝚎" },
+ ["tt f"] = { true, "𝚏" },
+ ["tt g"] = { true, "𝚐" },
+ ["tt h"] = { true, "𝚑" },
+ ["tt i"] = { true, "𝚒" },
+ ["tt j"] = { true, "𝚓" },
+ ["tt k"] = { true, "𝚔" },
+ ["tt l"] = { true, "𝚕" },
+ ["tt m"] = { true, "𝚖" },
+ ["tt n"] = { true, "𝚗" },
+ ["tt o"] = { true, "𝚘" },
+ ["tt p"] = { true, "𝚙" },
+ ["tt q"] = { true, "𝚚" },
+ ["tt r"] = { true, "𝚛" },
+ ["tt s"] = { true, "𝚜" },
+ ["tt t"] = { true, "𝚝" },
+ ["tt u"] = { true, "𝚞" },
+ ["tt v"] = { true, "𝚟" },
+ ["tt w"] = { true, "𝚠" },
+ ["tt x"] = { true, "𝚡" },
+ ["tt y"] = { true, "𝚢" },
+ ["tt z"] = { true, "𝚣" },
+
+ ["tt A"] = { true, "𝙰" },
+ ["tt B"] = { true, "𝙱" },
+ ["tt C"] = { true, "𝙲" },
+ ["tt D"] = { true, "𝙳" },
+ ["tt E"] = { true, "𝙴" },
+ ["tt F"] = { true, "𝙵" },
+ ["tt G"] = { true, "𝙶" },
+ ["tt H"] = { true, "𝙷" },
+ ["tt I"] = { true, "𝙸" },
+ ["tt J"] = { true, "𝙹" },
+ ["tt K"] = { true, "𝙺" },
+ ["tt L"] = { true, "𝙻" },
+ ["tt M"] = { true, "𝙼" },
+ ["tt N"] = { true, "𝙽" },
+ ["tt O"] = { true, "𝙾" },
+ ["tt P"] = { true, "𝙿" },
+ ["tt Q"] = { true, "𝚀" },
+ ["tt R"] = { true, "𝚁" },
+ ["tt S"] = { true, "𝚂" },
+ ["tt T"] = { true, "𝚃" },
+ ["tt U"] = { true, "𝚄" },
+ ["tt V"] = { true, "𝚅" },
+ ["tt W"] = { true, "𝚆" },
+ ["tt X"] = { true, "𝚇" },
+ ["tt Y"] = { true, "𝚈" },
+ ["tt Z"] = { true, "𝚉" },
+
+ -- some more undocumented
+
+ ["dx"] = { false, { "d", "x" } }, -- "{dx}" "\\left(dx\\right)"
+ ["dy"] = { false, { "d", "y" } }, -- "{dy}" "\\left(dy\\right)"
+ ["dz"] = { false, { "d", "z" } }, -- "{dz}" "\\left(dz\\right)"
+
+ -- fences
+
+ ["(:"] = { true, "(:" },
+ ["{:"] = { true, "{:" },
+ ["[:"] = { true, "[:" },
+ ["("] = { true, "(" },
+ ["["] = { true, "[" },
+ ["{"] = { true, "{" },
+ ["<<"] = { true, "⟨" }, -- why not <:
+ ["|_"] = { true, "⌊" },
+ ["|~"] = { true, "⌈" },
+ ["⟨"] = { true, "⟨" },
+ ["〈"] = { true, "⟨" },
+ ["〈"] = { true, "⟨" },
+
+ [":)"] = { true, ":)" },
+ [":}"] = { true, ":}" },
+ [":]"] = { true, ":]" },
+ [")"] = { true, ")" },
+ ["]"] = { true, "]" },
+ ["}"] = { true, "}" },
+ [">>"] = { true, "⟩" }, -- why not :>
+ ["~|"] = { true, "⌉" },
+ ["_|"] = { true, "⌋" },
+ ["⟩"] = { true, "⟩" },
+ ["〉"] = { true, "⟩" },
+ ["〉"] = { true, "⟩" },
+
+ ["lparent"] = { true, "(" },
+ ["lbracket"] = { true, "[" },
+ ["lbrace"] = { true, "{" },
+ ["langle"] = { true, "⟨" },
+ ["lfloor"] = { true, "⌊" },
+ ["lceil"] = { true, "⌈" },
+
+ ["rparent"] = { true, ")" },
+ ["rbracket"] = { true, "]" },
+ ["rbrace"] = { true, "}" },
+ ["rangle"] = { true, "⟩" },
+ ["rfloor"] = { true, "⌋" },
+ ["rceil"] = { true, "⌉" },
+
+ -- a bit special:
+
+ ["\\frac"] = { true, "frac" },
+
+ -- now it gets real crazy, only these two:
+
+ ["&gt;"] = { true, ">" },
+ ["&lt;"] = { true, "<" },
+
+}
+
+for k, v in next, characters.data do
+ local name = v.mathname
+ if name and not reserved[name] then
+ reserved[name] = { true, utfchar(k) }
+ end
+ local spec = v.mathspec
+ -- if spec then
+ -- for i=1,#spec do
+ -- local name = spec[i].name
+ -- if name and not reserved[name] then
+ -- reserved[name] = { true, utfchar(k) }
+ -- end
+ -- end
+ -- end
+end
+
+reserved.P = nil
+reserved.S = nil
+
+local isbinary = {
+ ["\\frac"] = true,
+ ["\\root"] = true,
+ ["\\asciimathroot"] = true,
+ ["\\asciimathstackrel"] = true,
}
-table.setmetatableindex(reserved,characters.entities)
+local isunary = {
+ ["\\sqrt"] = true,
+ ["\\asciimathsqrt"] = true,
+ ["\\text"] = true, -- mathoptext
+ ["\\mathoptext"] = true, -- mathoptext
+ ["\\asciimathoptext"] = true, -- mathoptext
+ ["\\hat"] = true, -- widehat
+ ["\\widehat"] = true, -- widehat
+ ["\\bar"] = true, --
+ ["\\overbar"] = true, --
+ ["\\underline"] = true, --
+ ["\\vec"] = true, -- overrightarrow
+ ["\\overrightarrow"] = true, -- overrightarrow
+ ["\\dot"] = true, --
+ ["\\ddot"] = true, --
-local postmapper = Cs ( (
+}
+
+local isfunny = {
+ ["\\sin"] = true,
+}
+
+local isinfix = {
+ ["^"] = true,
+ ["_"] = true,
+}
+
+local isleft = {
+ [s_lparent] = true,
+ [s_lbrace] = true,
+ [s_lbracket] = true,
+ [s_langle] = true,
+ [s_lfloor] = true,
+ [s_lceil] = true,
+ [s_left] = true,
+}
+
+local isright = {
+ [s_rparent] = true,
+ [s_rbrace] = true,
+ [s_rbracket] = true,
+ [s_rangle] = true,
+ [s_rfloor] = true,
+ [s_rceil] = true,
+ [s_right] = true,
+}
+
+local issimplified = {
+}
+
+--
+
+-- special mess
- P("\\mathoptext ") * spaces * (P("\\bgroup ")/"{") * (1-P("\\egroup "))^1 * (P("\\egroup ")/"}") +
+local d_one = R("09")
+local d_two = d_one * d_one
+local d_three = d_two * d_one
+local d_four = d_three * d_one
+local d_split = P(-1) + P(",")
- (P("\\bgroup ")) / "{" +
- (P("\\egroup ")) / "}" +
+local d_spaced = (Carg(1) * d_three)^1
- P("\\") * (R("az","AZ")^2) +
+local digitized_1 = Cs ( (
+ d_three * d_spaced * d_split +
+ d_two * d_spaced * d_split +
+ d_one * d_spaced * d_split +
+ P(1)
+ )^1 )
- (R("AZ","az")^2) / reserved +
+local p_fourbefore = d_four * d_split
+local p_fourafter = d_four * P(-1)
- P("{:") / "\\left." +
- P(":}") / "\\right." +
- P("(") / "\\left(" +
- P(")") / "\\right)" +
- P("[") / "\\left[" +
- P("]") / "\\right]" +
- P("{") / "\\left\\{" +
- P("}") / "\\right\\}" +
+local p_beforecomma = d_three * d_spaced * d_split
+ + d_two * d_spaced * d_split
+ + d_one * d_spaced * d_split
+ + d_one * d_split
+
+local p_aftercomma = p_fourafter
+ + d_three * d_spaced
+ + d_two * d_spaced
+ + d_one * d_spaced
+
+local digitized_2 = Cs (
+ p_fourbefore * (p_aftercomma^0) +
+ p_beforecomma * ((p_aftercomma + d_one^1)^0)
+ )
+
+local p_fourbefore = d_four * d_split
+local p_fourafter = d_four
+local d_spaced = (Carg(1) * (d_three + d_two + d_one))^1
+local p_aftercomma = p_fourafter * P(-1)
+ + d_three * d_spaced * P(1)^0
+ + d_one^1
+
+-- local digitized_3 = Cs (
+-- p_fourbefore * p_aftercomma^0 +
+-- p_beforecomma * p_aftercomma^0
+-- )
+
+local digitized_3 = Cs((p_fourbefore + p_beforecomma) * p_aftercomma^0)
+
+local splitmethods = {
+ digitized_1,
+ digitized_2,
+ digitized_3,
+}
+
+local splitmethod = nil
+
+function asciimath.setup(settings)
+ splitmethod = splitmethods[tonumber(settings.splitmethod) or 0]
+ if splitmethod then
+ local separator = settings.separator
+ if separator == true or not interfaces or interfaces.variables.yes then
+ digitseparator = utfchar(0x2008)
+ elseif type(separator) == "string" and separator ~= "" then
+ digitseparator = separator
+ else
+ splitmethod = nil
+ end
+ end
+end
- letter + P(1)
+local collected_digits = { }
+local collected_filename = "asciimath-digits.lua"
+
+function numbermess(s)
+ if splitmethod then
+ local d = lpegmatch(splitmethod,s,1,digitseparator)
+ if d then
+ if trace_digits and s ~= d then
+ collected_digits[s] = d
+ end
+ return d
+ end
+ end
+ return s
+end
+
+-- asciimath.setup { splitmethod = 3 }
+-- local t = {
+-- "1", "12", "123", "1234", "12345", "123456", "1234567", "12345678", "123456789",
+-- "1,1",
+-- "12,12",
+-- "123,123",
+-- "1234,123",
+-- "1234,1234",
+-- "12345,1234",
+-- "1234,12345",
+-- "12345,12345",
+-- "123456,123456",
+-- "1234567,1234567",
+-- "12345678,12345678",
+-- "123456789,123456789",
+-- "0,1234",
+-- "1234,0",
+-- "1234,00",
+-- "0,123456789",
+-- }
+-- for i=1,#t do print(formatters["%-20s : [%s]"](t[i],numbermess(t[i]))) end
+
+statistics.register("asciimath",function()
+ if trace_digits then
+ local n = table.count(collected_digits)
+ if n > 0 then
+ table.save(collected_filename,collected_digits)
+ return string.format("%s digit conversions saved in %s",n,collected_filename)
+ else
+ os.remove(collected_filename)
+ end
+ end
+end)
+
+local p_number_base = patterns.cpnumber or patterns.cnumber or patterns.number
+local p_number = C(p_number_base)
+----- p_number = p_number_base
+local p_spaces = patterns.whitespace
+
+local p_utf_base = patterns.utf8character
+local p_utf = C(p_utf_base)
+-- local p_entity = (P("&") * C((1-P(";"))^2) * P(";"))/ entities
+
+-- entities["gt"] = ">"
+-- entities["lt"] = "<"
+-- entities["amp"] = "&"
+-- entities["dquot"] = '"'
+-- entities["quot"] = "'"
+
+local p_onechar = p_utf_base * P(-1)
+
+----- p_number = Cs((patterns.cpnumber or patterns.cnumber or patterns.number)/function(s) return (gsub(s,",","{,}")) end)
+
+local sign = P("-")^-1
+local digits = R("09")^1
+local integer = sign * digits
+local real = digits * (S(".") * digits)^-1
+local float = real * (P("E") * integer)^-1
+
+-- local number = C(float + integer)
+-- local p_number = C(float)
+local p_number = float / numbermess
+
+local k_reserved = sortedkeys(reserved)
+local k_commands = { }
+local k_unicode = { }
+
+asciimath.keys = {
+ reserved = k_reserved
+}
+
+local k_reserved_different = { }
+local k_reserved_words = { }
+
+for k, v in sortedhash(reserved) do
+ local replacement = v[2]
+ if v[1] then
+ k_unicode[k] = replacement
+ else
+ k_unicode[k] = k -- keep them ... later we remap these
+ if k ~= replacement then
+ k_reserved_different[#k_reserved_different+1] = k
+ end
+ end
+ if not find(k,"[^[a-zA-Z]+$]") then
+ k_unicode["\\"..k] = k -- dirty trick, no real unicode
+ end
+ if not find(k,"[^a-zA-Z]") then
+ k_reserved_words[#k_reserved_words+1] = k
+ end
+ k_commands[k] = replacement
+end
+
+local p_reserved =
+ lpeg.utfchartabletopattern(k_reserved_different) / k_commands
+
+local p_unicode =
+ lpeg.utfchartabletopattern(table.keys(k_unicode)) / k_unicode
+
+-- inspect(k_reserved_different)
+
+local p_texescape = patterns.texescape
+
+local function texescaped(s)
+ return lpegmatch(p_texescape,s)
+end
+
+local p_text =
+ P("text")
+ * p_spaces^0
+ * Cc("\\asciimathoptext")
+ * ( -- maybe balanced
+ Cs( P("{") * ((1-P("}"))^0/texescaped) * P("}") )
+ + Cs((P("(")/"{") * ((1-P(")"))^0/texescaped) * (P(")")/"}"))
+ )
+ + Cc("\\asciimathoptext") * Cs(Cc("{") * (patterns.undouble/texescaped) * Cc("}"))
+
+local m_left = {
+ ["(:"] = s_langle,
+ ["{:"] = s_left,
+ ["[:"] = s_left,
+ ["("] = s_lparent,
+ ["["] = s_lbracket,
+ ["{"] = s_lbrace,
+ ["⟨"] = s_langle,
+ ["⌈"] = s_lceil,
+ ["⌊"] = s_lfloor,
+
+ -- ["<<"] = s_langle, -- why not <:
+ -- ["|_"] = s_lfloor,
+ -- ["|~"] = s_lceil,
+ -- ["〈"] = s_langle,
+ -- ["〈"] = s_langle,
+
+ -- ["lparent"] = s_lparent,
+ -- ["lbracket"] = s_lbracket,
+ -- ["lbrace"] = s_lbrace,
+ -- ["langle"] = s_langle,
+ -- ["lfloor"] = s_lfloor,
+ -- ["lceil"] = s_lceil,
+}
+
+local m_right = {
+ [":)"] = s_rangle,
+ [":}"] = s_right,
+ [":]"] = s_right,
+ [")"] = s_rparent,
+ ["]"] = s_rbracket,
+ ["}"] = s_rbrace,
+ ["⟩"] = s_rangle,
+ ["⌉"] = s_rceil,
+ ["⌋"] = s_rfloor,
+
+ -- [">>"] = s_rangle, -- why not :>
+ -- ["~|"] = s_rceil,
+ -- ["_|"] = s_rfloor,
+ -- ["〉"] = s_rangle,
+ -- ["〉"] = s_rangle,
+
+ -- ["rparent"] = s_rparent,
+ -- ["rbracket"] = s_rbracket,
+ -- ["rbrace"] = s_rbrace,
+ -- ["rangle"] = s_rangle,
+ -- ["rfloor"] = s_rfloor,
+ -- ["rceil"] = s_rceil,
+}
+
+local islimits = {
+ ["\\sum"] = true,
+ -- ["∑"] = true,
+ ["\\prod"] = true,
+ -- ["∏"] = true,
+ ["\\lim"] = true,
+}
+
+local p_left =
+ lpeg.utfchartabletopattern(m_left) / m_left
+local p_right =
+ lpeg.utfchartabletopattern(m_right) / m_right
+
+-- special cases
+
+-- local p_special =
+-- C("/")
+-- + P("\\ ") * Cc("{}") * p_spaces^0 * C(S("^_"))
+-- + P("\\ ") * Cc("\\space")
+-- + P("\\\\") * Cc("\\backslash")
+-- + P("\\") * (R("az","AZ")^1/entities)
+-- + P("|") * Cc("\\|")
+--
+-- faster bug also uglier:
+
+local p_special =
+ P("|") * Cc("\\|") -- s_mbar -- maybe always add left / right as in mml ?
+ + P("\\") * (
+ (
+ P(" ") * (
+ Cc("{}") * p_spaces^0 * C(S("^_"))
+ + Cc("\\space")
+ )
+ )
+ + P("\\") * Cc("\\backslash")
+ -- + (R("az","AZ")^1/entities)
+ + C(R("az","AZ")^1)
+ )
+
+-- open | close :: {: | :}
+
+local u_parser = Cs ( (
+ patterns.doublequoted +
+ P("text") * p_spaces^0 * P("(") * (1-P(")"))^0 * P(")") + -- -- todo: balanced
+ p_unicode +
+ p_utf_base
)^0 )
-local parser
+local a_parser = Ct { "tokenizer",
+ tokenizer = (
+ p_spaces
+ + p_number
+ + p_text
+ -- + Ct(p_open * V("tokenizer") * p_close) -- {: (a+b,=,1),(a+b,=,7) :}
+ -- + Ct(p_open * V("tokenizer") * p_close_right) -- { (a+b,=,1),(a+b,=,7) :}
+ -- + Ct(p_open_left * V("tokenizer") * p_right) -- {: (a+b,=,1),(a+b,=,7) }
+ + Ct(p_left * V("tokenizer") * p_right) -- { (a+b,=,1),(a+b,=,7) }
+ + p_special
+ + p_reserved
+ -- + p_utf - p_close - p_right
+ + (p_utf - p_right)
+ )^1,
+}
-local function converted(original,totex)
- local ok, result
- if trace_mapping then
- report_asciimath("original : %s",original)
+local collapse = nil
+local serialize = table.serialize
+local f_state = formatters["level %s : %s : intermediate"]
+
+local function show_state(t,level,state)
+ report_asciimath(serialize(t,f_state(level,state)))
+end
+
+local function show_result(original,unicoded,texcoded)
+ report_asciimath("original > %s",original)
+ report_asciimath("unicoded > %s",unicoded)
+ report_asciimath("texcoded > %s",texcoded)
+end
+
+local function collapse_matrices(t)
+ local n = #t
+ if n > 4 and t[3] == "," then
+ local l1 = t[1]
+ local r1 = t[n]
+ if isleft[l1] and isright[r1] then
+ local l2 = t[2]
+ local r2 = t[n-1]
+ if type(l2) == "table" and type(r2) == "table" then
+ -- we have a matrix
+ local valid = true
+ for i=3,n-2,2 do
+ if t[i] ~= "," then
+ valid = false
+ break
+ end
+ end
+ if valid then
+ for i=2,n-1,2 do
+ local ti = t[i]
+ local tl = ti[1]
+ local tr = ti[#ti]
+ if isleft[tl] and isright[tr] then
+ -- ok
+ else
+ valid = false
+ break
+ end
+ end
+ if valid then
+ local omit = l1 == s_left and r1 == s_right
+ if omit then
+ t[1] = "\\startmatrix"
+ else
+ t[1] = l1 .. "\\startmatrix"
+ end
+ for i=2,n-1 do
+ if t[i] == "," then
+ t[i] = "\\NR"
+ else
+ local ti = t[i]
+ ti[1] = "\\NC"
+ for i=2,#ti-1 do
+ if ti[i] == "," then
+ ti[i] = "\\NC"
+ end
+ end
+ ti[#ti] = nil
+ end
+ end
+ if omit then
+ t[n] = "\\NR\\stopmatrix"
+ else
+ t[n] = "\\NR\\stopmatrix" .. r1
+ end
+ end
+ end
+ end
+ end
+ end
+ return t
+end
+
+local function collapse_bars(t)
+ local n, i, l, m = #t, 1, false, 0
+ while i <= n do
+ local current = t[i]
+ if current == "\\|" then
+ if l then
+ m = m + 1
+ t[l] = s_lbar
+ t[i] = s_rbar
+ t[m] = { unpack(t,l,i) }
+ l = false
+ else
+ l = i
+ end
+ elseif not l then
+ m = m + 1
+ t[m] = current
+ end
+ i = i + 1
end
- local premapped = lpegmatch(premapper,original)
- if premapped then
- if trace_mapping then
- report_asciimath("prepared : %s",premapped)
+ if l then
+ local tt = { s_lnothing } -- space fools final checker
+ local tm = 1
+ for i=1,m do
+ tm = tm + 1
+ tt[tm] = t[i]
end
- local parsed = lpegmatch(parser,premapped)
- if parsed then
- if trace_mapping then
- report_asciimath("parsed : %s",parsed)
+ tm = tm + 1
+ tt[tm] = s_mbar
+ for i=l+1,n do
+ tm = tm + 1
+ tt[tm] = t[i]
+ end
+ tm = tm + 1
+ tt[tm] = s_rnothing -- space fools final checker
+ m = tm
+ t = tt
+ elseif m < n then
+ for i=n,m+1,-1 do
+ t[i] = nil
+ end
+ end
+ return t
+end
+
+local function collapse_pairs(t)
+ local n, i = #t, 1
+ while i < n do
+ local current = t[i]
+ if current == "/" and i > 1 then
+ local tl = t[i-1]
+ local tr = t[i+1]
+ local tn = t[i+2]
+ if type(tl) == "table" then
+ if isleft[tl[1]] and isright[tl[#tl]] then
+ tl[1] = "" -- todo: remove
+ tl[#tl] = nil
+ end
+ end
+ if type(tr) == "table" then
+ if tn == "^" then
+ -- brr 1/(1+x)^2
+ elseif isleft[tr[1]] and isright[tr[#tr]] then
+ tr[1] = "" -- todo: remove
+ tr[#tr] = nil
+ end
+ end
+ i = i + 2
+ elseif current == "," or current == ";" then
+ -- t[i] = current .. "\\thinspace" -- look sbad in (a,b)
+ i = i + 1
+ else
+ i = i + 1
+ end
+ end
+ return t
+end
+
+local function collapse_parentheses(t)
+ local n, i = #t, 1
+ if n > 2 then
+ while i < n do
+ local current = t[i]
+ if type(current) == "table" and isleft[t[i-1]] and isright[t[i+1]] then
+ local c = #current
+ if c > 2 and isleft[current[1]] and isright[current[c]] then
+ remove(current,c)
+ remove(current,1)
+ end
+ i = i + 3
+ else
+ i = i + 1
+ end
+ end
+ end
+ return t
+end
+
+local function collapse_signs(t)
+ local n, m, i = #t, 0, 1
+ while i <= n do
+ m = m + 1
+ local current = t[i]
+ if isunary[current] then
+ local one = t[i+1]
+ if not one then
+-- m = m + 1
+ t[m] = current .. "{}" -- error
+return t
+-- break
+ end
+ if type(one) == "table" then
+ if isleft[one[1]] and isright[one[#one]] then
+ remove(one,#one)
+ remove(one,1)
+ end
+ one = collapse(one,level)
+ elseif one == "-" and i + 2 <= n then -- or another sign ? or unary ?
+ local t2 = t[i+2]
+ if type(t2) == "string" then
+ one = one .. t2
+ i = i + 1
+ end
+ end
+ t[m] = current .. "{" .. one .. "}"
+ i = i + 2
+ elseif i + 2 <= n and isfunny[current] then
+ local one = t[i+1]
+ if isinfix[one] then
+ local two = t[i+2]
+ if two == "-" then -- or another sign ? or unary ?
+ local three = t[i+3]
+ if three then
+ if type(three) == "table" then
+ three = collapse(three,level)
+ end
+ t[m] = current .. one .. "{" .. two .. three .. "}"
+ i = i + 4
+ else
+ t[m] = current
+ i = i + 1
+ end
+ else
+ t[m] = current
+ i = i + 1
+ end
+ else
+ t[m] = current
+ i = i + 1
+ end
+ else
+ t[m] = current
+ i = i + 1
+ end
+ end
+ if i == n then -- yes?
+ m = m + 1
+ t[m] = t[n]
+ end
+ if m < n then
+ for i=n,m+1,-1 do
+ t[i] = nil
+ end
+ end
+ return t
+end
+
+local function collapse_binaries(t)
+ local n, m, i = #t, 0, 1
+ while i <= n do
+ m = m + 1
+ local current = t[i]
+ if isbinary[current] then
+ local one = t[i+1]
+ local two = t[i+2]
+ if not one then
+ t[m] = current .. "{}{}" -- error
+return t
+-- break
+ end
+ if type(one) == "table" then
+ if isleft[one[1]] and isright[one[#one]] then
+ remove(one,#one)
+ remove(one,1)
+ end
+ one = collapse(one,level)
+ end
+ if not two then
+ t[m] = current .. "{" .. one .. "}{}"
+return t
+-- break
+ end
+ if type(two) == "table" then
+ if isleft[two[1]] and isright[two[#two]] then
+ remove(two,#two)
+ remove(two,1)
+ end
+ two = collapse(two,level)
+ end
+ t[m] = current .. "{" .. one .. "}{" .. two .. "}"
+ i = i + 3
+ else
+ t[m] = current
+ i = i + 1
+ end
+ end
+ if i == n then -- yes?
+ m = m + 1
+ t[m] = t[n]
+ end
+ if m < n then
+ for i=n,m+1,-1 do
+ t[i] = nil
+ end
+ end
+ return t
+end
+
+local function collapse_infixes_1(t)
+ local n, i = #t, 1
+ while i <= n do
+ local current = t[i]
+ if isinfix[current] then
+ local what = t[i+1]
+ if what then
+ if type(what) == "table" then
+ local f, l = what[1], what[#what]
+ if isleft[f] and isright[l] then
+ remove(what,#what)
+ remove(what,1)
+ end
+ t[i+1] = collapse(what,level) -- collapse ?
+ end
+ i = i + 2
+ else
+ break
end
- local postmapped = lpegmatch(postmapper,parsed)
- if postmapped then
- if trace_mapping then
- report_asciimath("finalized: %s",postmapped)
+ else
+ i = i + 1
+ end
+ end
+ return t
+end
+
+function collapse_limits(t)
+ local n, m, i = #t, 0, 1
+ while i <= n do
+ m = m + 1
+ local current = t[i]
+ if islimits[current] then
+ local one, two, first, second = nil, nil, t[i+1], t[i+3]
+ if first and isinfix[first] then
+ one = t[i+2]
+ if one then
+ -- if type(one) == "table" then
+ -- if isleft[one[1]] and isright[one[#one]] then
+ -- remove(one,#one)
+ -- remove(one,1)
+ -- end
+ -- one = collapse(one,level)
+ -- end
+ if second and isinfix[second] then
+ two = t[i+4]
+ -- if type(two) == "table" then
+ -- if isleft[two[1]] and isright[two[#two]] then
+ -- remove(two,#two)
+ -- remove(two,1)
+ -- end
+ -- two = collapse(two,level)
+ -- end
+ end
+ if two then
+ t[m] = current .. "\\limits" .. first .. "{" .. one .. "}" .. second .. "{" .. two .. "}"
+ i = i + 5
+ else
+ t[m] = current .. "\\limits" .. first .. "{" .. one .. "}"
+ i = i + 3
+ end
+ else
+ t[m] = current
+ i = i + 1
end
- result, ok = postmapped, true
else
- result = "error in postmapping"
+ t[m] = current
+ i = i + 1
+ end
+ else
+ t[m] = current
+ i = i + 1
+ end
+ end
+ if i == n then -- yes?
+ m = m + 1
+ t[m] = t[n]
+ end
+ if m < n then
+ for i=n,m+1,-1 do
+ t[i] = nil
+ end
+ end
+ return t
+end
+
+local function collapse_tables(t)
+ local n, m, i = #t, 0, 1
+ while i <= n do
+ m = m + 1
+ local current = t[i]
+ if type(current) == "table" then
+ if current[1] == "\\NC" then
+ t[m] = collapse(current,level)
+ else
+ t[m] = "{" .. collapse(current,level) .. "}"
+ end
+ i = i + 1
+ else
+ t[m] = current
+ i = i + 1
+ end
+ end
+ if i == n then -- yes?
+ m = m + 1
+ t[m] = t[n]
+ end
+ if m < n then
+ for i=n,m+1,-1 do
+ t[i] = nil
+ end
+ end
+ return t
+end
+
+local function collapse_infixes_2(t)
+ local n, m, i = #t, 0, 1
+ while i < n do
+ local current = t[i]
+ if isinfix[current] and i > 1 then
+ local tl = t[i-1]
+ local tr = t[i+1]
+ local ti = t[i+2]
+ local tn = t[i+3]
+ if ti and tn and isinfix[ti] then
+ t[m] = tl .. current .. "{" .. tr .. "}" .. ti .. "{" .. tn .. "}"
+ i = i + 4
+ else
+ t[m] = tl .. current .. "{" .. tr .. "}"
+ i = i + 2
+ end
+ else
+ m = m + 1
+ t[m] = current
+ i = i + 1
+ end
+ end
+ if i == n then
+ m = m + 1
+ t[m] = t[n]
+ end
+ if m < n then
+ for i=n,m+1,-1 do
+ t[i] = nil
+ end
+ end
+ return t
+end
+
+local function collapse_fractions_1(t)
+ local n, m, i = #t, 0, 1
+ while i < n do
+ local current = t[i]
+ if current == "/" and i > 1 then
+ local tl = t[i-1]
+ local tr = t[i+1]
+ t[m] = "\\frac{" .. tl .. "}{" .. tr .. "}"
+ i = i + 2
+ if i < n then
+ m = m + 1
+ t[m] = t[i]
+ i = i + 1
end
else
- result = "error in mapping"
+ m = m + 1
+ t[m] = current
+ i = i + 1
end
+ end
+ if i == n then
+ m = m + 1
+ t[m] = t[n]
+ end
+ if m < n then
+ for i=n,m+1,-1 do
+ t[i] = nil
+ end
+ end
+ return t
+end
+
+local function collapse_fractions_2(t)
+ local n, m, i = #t, 0, 1
+ while i < n do
+ local current = t[i]
+ if current == "⁄" and i > 1 then -- \slash
+ t[m] = "{" .. s_left .. t[i-1] .. s_mslash .. t[i+1] .. s_right .. "}"
+ i = i + 2
+ if i < n then
+ m = m + 1
+ t[m] = t[i]
+ i = i + 1
+ end
+ else
+ m = m + 1
+ t[m] = current
+ i = i + 1
+ end
+ end
+ if i == n then
+ m = m + 1
+ t[m] = t[n]
+ end
+ if m < n then
+ for i=n,m+1,-1 do
+ t[i] = nil
+ end
+ end
+ return t
+end
+
+local function collapse_result(t)
+ local n = #t
+ if t[1] == s_left and t[n] == s_right then -- see bar .. space needed there
+ return concat(t," ",2,n-1)
else
- result = "error in premapping"
+ return concat(t," ")
end
- if totex then
- if ok then
- context.mathematics(result)
+end
+
+collapse = function(t,level)
+ -- check
+ if not t then
+ return ""
+ end
+ -- tracing
+ if trace_detail then
+ if level then
+ level = level + 1
else
- context.type(result) -- some day monospaced
+ level = 1
+ end
+ show_state(t,level,"parsed")
+ end
+ -- steps
+ t = collapse_matrices (t) if trace_detail then show_state(t,level,"matrices") end
+ t = collapse_bars (t) if trace_detail then show_state(t,level,"bars") end
+ t = collapse_pairs (t) if trace_detail then show_state(t,level,"pairs") end
+ t = collapse_parentheses(t) if trace_detail then show_state(t,level,"parentheses") end
+ t = collapse_signs (t) if trace_detail then show_state(t,level,"signs") end
+ t = collapse_binaries (t) if trace_detail then show_state(t,level,"binaries") end
+ t = collapse_infixes_1 (t) if trace_detail then show_state(t,level,"infixes (1)") end
+ t = collapse_limits (t) if trace_detail then show_state(t,level,"limits") end
+ t = collapse_tables (t) if trace_detail then show_state(t,level,"tables") end
+ t = collapse_infixes_2 (t) if trace_detail then show_state(t,level,"infixes (2)") end
+ t = collapse_fractions_1(t) if trace_detail then show_state(t,level,"fractions (1)") end
+ t = collapse_fractions_2(t) if trace_detail then show_state(t,level,"fractions (2)") end
+ -- done
+ return collapse_result(t)
+end
+
+-- todo: cache simple ones, say #str < 10, maybe weak
+
+local context = context
+local ctx_mathematics = context and context.mathematics or report_asciimath
+local ctx_type = context and context.type or function() end
+local ctx_inleft = context and context.inleft or function() end
+
+local function convert(str,totex)
+ local unicoded = lpegmatch(u_parser,str) or str
+ local texcoded = collapse(lpegmatch(a_parser,unicoded))
+ if trace_mapping then
+ show_result(str,unicoded,texcoded)
+ end
+ if totex then
+ ctx_mathematics(texcoded)
+ else
+ return texcoded
+ end
+end
+
+local n = 0
+local p = (
+ (S("{[(") + P("\\left" )) / function() n = n + 1 end
+ + (S("}])") + P("\\right")) / function() n = n - 1 end
+ + p_utf_base
+)^0
+
+local function invalidtex(str)
+ n = 0
+ lpegmatch(p,str)
+ if n == 0 then
+ return false
+ elseif n < 0 then
+ return formatters["too many left fences: %s"](-n)
+ elseif n > 0 then
+ return formatters["not enough right fences: %s"](n)
+ end
+end
+
+local collected = { }
+local indexed = { }
+
+-- bonus
+
+local p_reserved_spaced =
+ C(lpeg.utfchartabletopattern(k_reserved_words)) / " %1 "
+
+local p_text =
+ C(P("text")) / " %1 "
+ * p_spaces^0
+ * ( -- maybe balanced
+ (P("{") * (1-P("}"))^0 * P("}"))
+ + (P("(") * (1-P(")"))^0 * P(")"))
+ )
+ + patterns.doublequoted
+
+local p_expand = Cs((p_text + p_reserved_spaced + p_utf_base)^0)
+local p_compress = patterns.collapser
+
+local function cleanedup(str)
+ return lpegmatch(p_compress,lpegmatch(p_expand,str)) or str
+end
+
+-- so far
+
+local function register(s,cleanedup,collected,shortname)
+ local c = cleanedup(s)
+ local f = collected[c]
+ if f then
+ f.count = f.count + 1
+ f.files[shortname] = (f.files[shortname] or 0) + 1
+ if s ~= c then
+ f.cleanedup = f.cleanedup + 1
end
+ f.dirty[s] = (f.dirty[s] or 0) + 1
else
- return result
- end
-end
-
-local function onlyconverted(str)
- local parsed = lpegmatch(parser,str)
- return parsed or str
-end
-
-local sqrt = P("sqrt") / "\\rootradical \\bgroup \\egroup "
-local root = P("root") / "\\rootradical "
-local frac = P("frac") / "\\frac "
-local stackrel = P("stackrel") / "\\stackrel "
-local text = P("text") / "\\mathoptext "
-local hat = P("hat") / "\\widehat "
-local overbar = P("bar") / "\\overbar "
-local underline = P("ul") / "\\underline "
-local vec = P("vec") / "\\overrightarrow "
-local dot = P("dot") / "\\dot "
-local ddot = P("ddot") / "\\ddot "
-
-local left = P("(:") + P("{:") + P("(") + P("[") + P("{")
-local right = P(":)") + P(":}") + P(")") + P("]") + P("}")
-local leftnorright = 1 - left - right
-local singles = sqrt + text + hat + underline + overbar + vec + ddot + dot
-local doubles = root + frac + stackrel
-local ignoreleft = (left/"") * spaces * spaces
-local ignoreright = spaces * (right/"") * spaces
-local ignoreslash = spaces * (P("/")/"") * spaces
-local comma = P(",")
-local nocomma = 1-comma
-local anychar = P(1)
-local openmatrix = left * spaces * Cc("\\matrix\\bgroup ")
-local closematrix = Cc("\\egroup ") * spaces * right
-local nextcolumn = spaces * (comma/"&") * spaces
-local nextrow = spaces * (comma/"\\cr ") * spaces
-local finishrow = Cc("\\cr ")
-local opengroup = left/"\\bgroup "
-local closegroup = right/"\\egroup "
-local somescript = S("^_") * spaces
-local beginargument = Cc("\\bgroup ")
-local endargument = Cc("\\egroup ")
-
-parser = Cs { "main",
-
- scripts = somescript * V("argument"),
- division = Cc("\\frac") * V("argument") * spaces * ignoreslash * spaces * V("argument"),
- double = doubles * spaces * V("argument") * spaces * V("argument"),
- single = singles * spaces * V("argument"),
-
- balanced = opengroup * (C((leftnorright + V("balanced"))^0)/onlyconverted) * closegroup,
- argument = V("balanced") + V("token"),
-
- element = (V("step") + (V("argument") + V("step")) - ignoreright - nextcolumn - comma)^1,
- commalist = ignoreleft * V("element") * (nextcolumn * spaces * V("element"))^0 * ignoreright,
- matrix = openmatrix * spaces * (V("commalist") * (nextrow * V("commalist"))^0) * finishrow * closematrix,
-
- token = beginargument * (texnic + float + real + number + letter) * endargument,
-
- step = V("scripts") + V("division") + V("single") + V("double"),
- main = (V("matrix") + V("step") + anychar)^0,
+ local texcoded = convert(s)
+ local message = invalidtex(texcoded)
+ if message then
+ report_asciimath("%s: %s : %s",message,s,texcoded)
+ end
+ collected[c] = {
+ count = 1,
+ files = { [shortname] = 1 },
+ texcoded = texcoded,
+ message = message,
+ cleanedup = s ~= c and 1 or 0,
+ dirty = { [s] = 1 }
+ }
+ end
+end
-}
+local function wrapup(collected,indexed)
+ local n = 0
+ for k, v in sortedhash(collected) do
+ n = n + 1
+ v.n= n
+ indexed[n] = k
+ end
+end
+function collect(fpattern,element,collected,indexed)
+ local element = element or "am"
+ local mpattern = formatters["<%s>(.-)</%s>"](element,element)
+ local filenames = resolvers.findtexfile(fpattern)
+ if filenames and filenames ~= "" then
+ filenames = { filenames }
+ else
+ filenames = dir.glob(fpattern)
+ end
+ local cfpattern = gsub(fpattern,"^%./",lfs.currentdir())
+ local cfpattern = gsub(cfpattern,"\\","/")
+ local wildcard = string.split(cfpattern,"*")[1]
+ if not collected then
+ collected = { }
+ indexed = { }
+ end
+ for i=1,#filenames do
+ filename = gsub(filenames[i],"\\","/")
+ local splitname = (wildcard and wildcard ~= "" and string.split(filename,wildcard)[2]) or filename
+ local shortname = gsub(splitname or file.basename(filename),"^%./","")
+ if shortname == "" then
+ shortname = filename
+ end
+ local fullname = resolvers.findtexfile(filename) or filename
+ if fullname ~= "" then
+ for s in gmatch(io.loaddata(fullname),mpattern) do
+ register(s,cleanedup,collected,shortname)
+ end
+ end
+ end
+ wrapup(collected,indexed)
+ return collected, indexed
+end
+
+function filter(root,pattern,collected,indexed)
+ if not pattern or pattern == "" then
+ pattern = "am"
+ end
+ if not collected then
+ collected = { }
+ indexed = { }
+ end
+ for c in xmlcollected(root,pattern) do
+ register(xmltext(c),cleanedup,collected,xmlinclusion(c) or "" )
+ end
+ wrapup(collected,indexed)
+ return collected, indexed
+end
+
+asciimath.convert = convert
asciimath.reserved = reserved
-asciimath.convert = converted
+asciimath.collect = collect
+asciimath.filter = filter
+asciimath.invalidtex = invalidtex
+asciimath.cleanedup = cleanedup
+
+-- sin(x) = 1 : 3.3 uncached 1.2 cached , so no real gain (better optimize the converter then)
+
+local uncrapped = {
+ ["%"] = "\\mathpercent",
+ ["&"] = "\\mathampersand",
+ ["#"] = "\\mathhash",
+ ["$"] = "\\mathdollar",
+ ["^"] = "\\Hat{\\enspace}", -- terrible hack ... tex really does it sbest to turn any ^ into a superscript
+ ["_"] = "\\underline{\\enspace}",
+}
+
+local function convert(str,nowrap)
+ if #str > 0 then
+ local unicoded = lpegmatch(u_parser,str) or str
+ if lpegmatch(p_onechar,unicoded) then
+ ctx_mathematics(uncrapped[unicoded] or unicoded)
+ else
+ local texcoded = collapse(lpegmatch(a_parser,unicoded))
+ if trace_mapping then
+ show_result(str,unicoded,texcoded)
+ end
+ if #texcoded == 0 then
+ report_asciimath("error in asciimath: %s",str)
+ else
+ local message = invalidtex(texcoded)
+ if message then
+ report_asciimath("%s: %s : %s",message,str,texcoded)
+ ctx_type(formatters["<%s>"](message))
+ elseif nowrap then
+ context(texcoded)
+ else
+ ctx_mathematics(texcoded)
+ end
+ end
+ end
+ end
+end
+
+
+local context = context
+
+if not context then
+
+-- trace_mapping = true
+-- trace_detail = true
+
+-- report_asciimath(cleanedup([[ac+sinx+xsqrtx+sinsqrtx+sinsqrt(x)]]))
+-- report_asciimath(cleanedup([[a "αsinsqrtx" b]]))
+-- report_asciimath(cleanedup([[a "α" b]]))
+-- report_asciimath(cleanedup([[//4]]))
+
+-- convert([[\^{1/5}log]])
+-- convert("sqrt")
+-- convert("^")
+
+-- convert("\\frac{a}{b}")
+-- convert("frac{a}{b}")
+-- convert("\\sin{a}{b}")
+-- convert("sin{a}{b}")
+-- convert("1: rightarrow")
+-- convert("2: \\rightarrow")
+
+-- convert("((1,2,3),(4,5,6),(7,8,9))")
+
+-- convert("1/(t+x)^2")
+
+-- convert("AA a > 0 ^^ b > 0 | {:log_g:} a + {:log_g:} b")
+-- convert("AA a &gt; 0 ^^ b > 0 | {:log_g:} a + {:log_g:} b")
+
+-- convert("10000,00001")
+-- convert("4/18*100text(%)~~22,2")
+-- convert("4/18*100text(%)≈22,2")
+-- convert("62541/(197,6)≈316,05")
+
+-- convert([[sum x]])
+-- convert([[sum^(1)_(2) x]])
+-- convert([[lim_(1)^(2) x]])
+-- convert([[lim_(1) x]])
+-- convert([[lim^(2) x]])
+
+-- convert([[{: rangle]])
+-- convert([[\langle\larr]])
+-- convert([[langlelarr]])
+-- convert([[D_f=[0 ,→〉]])
+-- convert([[ac+sinx+xsqrtx]])
+-- convert([[ac+\alpha x+xsqrtx-cc b*pi**psi-3alephx / bb X]])
+-- convert([[ac+\ ^ x+xsqrtx]])
+-- convert([[d/dx(x^2+1)]])
+-- convert([[a "αsinsqrtx" b]])
+-- convert([[a "α" b]])
+-- convert([[//4]])
+-- convert([[ {(a+b,=,1),(a+b,=,7)) ]])
+
+-- convert([[ 2/a // 5/b = (2 b) / ( a b) // ( 5 a ) / ( a b ) = (2 b ) / ( 5 a ) ]])
+-- convert([[ (2+x)/a // 5/b ]])
+
+-- convert([[ ( 2/a ) // ( 5/b ) = ( (2 b) / ( a b) ) // ( ( 5 a ) / ( a b ) ) = (2 b ) / ( 5 a ) ]])
+
+-- convert([[ (x/y)^3 = x^3/y^3 ]])
+
+-- convert([[ {: (1,2) :} ]])
+-- convert([[ {: (a+b,=,1),(a+b,=,7) :} ]])
+-- convert([[ { (a+b,=,1),(a+b,=,7) :} ]])
+-- convert([[ {: (a+b,=,1),(a+b,=,7) } ]])
+-- convert([[ { (a+b,=,1),(a+b,=,7) } ]])
+
+-- convert([[(1,5 ±sqrt(1,25 ),0 )]])
+-- convert([[1//2]])
+-- convert([[(p)/sqrt(p)]])
+-- convert([[u_tot]])
+-- convert([[u_tot=4,4 L+0,054 T]])
+
+-- convert([[ [←;0,2] ]])
+-- convert([[ [←;0,2⟩ ]])
+-- convert([[ ⟨←;0,2 ) ]])
+-- convert([[ ⟨←;0,2 ] ]])
+-- convert([[ ⟨←;0,2⟩ ]])
+
+-- convert([[ x^2(x-1/16)=0 ]])
+-- convert([[ y = ax + 3 - 3a ]])
+-- convert([[ y= ((1/4)) ^x ]])
+-- convert([[ x=\ ^ (1/4) log(0 ,002 )= log(0,002) / (log(1/4) ]])
+-- convert([[ x=\ ^glog(y) ]])
+-- convert([[ x^ (-1 1/2) =1/x^ (1 1/2)=1/ (x^1*x^ (1/2)) =1/ (xsqrt(x)) ]])
+-- convert([[ x^2(10 -x)&gt;2 x^2 ]])
+-- convert([[ x^4&gt;x ]])
+
+ return
+
+end
+
+interfaces.implement {
+ name = "asciimath",
+ actions = convert,
+ arguments = "string"
+}
+
+interfaces.implement {
+ name = "justasciimath",
+ actions = convert,
+ arguments = { "string", true },
+}
+
+local ctx_typebuffer = context.typebuffer
+local ctx_mathematics = context.mathematics
+local ctx_color = context.color
+
+local sequenced = table.sequenced
+local assign_buffer = buffers.assign
+
+local show = { }
+asciimath.show = show
+
+local collected, indexed, ignored = { }, { }, { }
+
+local color = { "darkred" }
+
+function show.ignore(n)
+ if type(n) == "string" then
+ local c = collected[n]
+ n = c and c.n
+ end
+ if n then
+ ignored[n] = true
+ end
+end
+
+function show.count(n,showcleanedup)
+ local v = collected[indexed[n]]
+ local count = v.count
+ local cleanedup = v.cleanedup
+ if not showcleanedup or cleanedup == 0 then
+ context(count)
+ elseif count == cleanedup then
+ ctx_color(color,count)
+ else
+ context("%s+",count-cleanedup)
+ ctx_color(color,cleanedup)
+ end
+end
+
+local h = { }
+local am = { "am" }
+
+function show.nofdirty(n)
+ local k = indexed[n]
+ local v = collected[k]
+ local n = v.cleanedup
+ h = { }
+ if n > 0 then
+ for d, n in sortedhash(v.dirty) do
+ if d ~= k then
+ h[#h+1] = { d, n }
+ end
+ end
+ end
+ context(#h)
+end
+
+function show.dirty(m,wrapped)
+ local d = h[m]
+ if d then
+ ctx_inleft(d[2])
+ if wrapped then
+ assign_buffer("am",'"' .. d[1] .. '"')
+ else
+ assign_buffer("am",d[1])
+ end
+ ctx_typebuffer(am)
+ end
+end
+
+function show.files(n)
+ context(sequenced(collected[indexed[n]].files," "))
+end
+
+function show.input(n,wrapped)
+ if wrapped then
+ assign_buffer("am",'"' .. indexed[n] .. '"')
+ else
+ assign_buffer("am",indexed[n])
+ end
+ ctx_typebuffer(am)
+end
+
+function show.result(n)
+ local v = collected[indexed[n]]
+ if ignored[n] then
+ context("ignored")
+ elseif v.message then
+ ctx_color(color, v.message)
+ else
+ ctx_mathematics(v.texcoded)
+ end
+end
+
+function show.load(str,element)
+ collected, indexed, ignored = { }, { }, { }
+ local t = utilities.parsers.settings_to_array(str)
+ for i=1,#t do
+ asciimath.collect(t[i],element or "am",collected,indexed)
+ end
+end
+
+function show.filter(id,element)
+ collected, indexed, ignored = { }, { }, { }
+ asciimath.filter(lxml.getid(id),element or "am",collected,indexed)
+end
+
+function show.max()
+ context(#indexed)
+end
+
+function show.statistics()
+ local usedfiles = { }
+ local noffiles = 0
+ local nofokay = 0
+ local nofbad = 0
+ local nofcleanedup = 0
+ for k, v in next, collected do
+ if ignored[v.n] then
+ nofbad = nofbad + v.count
+ elseif v.message then
+ nofbad = nofbad + v.count
+ else
+ nofokay = nofokay + v.count
+ end
+ nofcleanedup = nofcleanedup + v.cleanedup
+ for k, v in next, v.files do
+ local u = usedfiles[k]
+ if u then
+ usedfiles[k] = u + 1
+ else
+ noffiles = noffiles + 1
+ usedfiles[k] = 1
+ end
+ end
+ end
+ local NC = context.NC
+ local NR = context.NR
+ local EQ = context.EQ
+ context.starttabulate { "|B||" }
+ NC() context("files") EQ() context(noffiles) NC() NR()
+ NC() context("formulas") EQ() context(nofokay+nofbad) NC() NR()
+ NC() context("uniques") EQ() context(#indexed) NC() NR()
+ NC() context("cleanedup") EQ() context(nofcleanedup) NC() NR()
+ NC() context("errors") EQ() context(nofbad) NC() NR()
+ context.stoptabulate()
+end
+
+function show.save(name)
+ table.save(name ~= "" and name or "dummy.lua",collected)
+end
diff --git a/tex/context/base/x-asciimath.mkiv b/tex/context/base/x-asciimath.mkiv
index b555115ff..1d62fb93d 100644
--- a/tex/context/base/x-asciimath.mkiv
+++ b/tex/context/base/x-asciimath.mkiv
@@ -1,6 +1,6 @@
%D \module
-%D [ file=m-asciimath,
-%D version=2006.04.24, % 1999.11.06,
+%D [ file=x-asciimath,
+%D version=2014.06.01, % 2006.04.24, % 1999.11.06,
%D title=\CONTEXT\ Modules,
%D subtitle=AsciiMath,
%D author=Hans Hagen,
@@ -11,88 +11,383 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D Lua code.
-
\registerctxluafile{x-asciimath}{}
-\def\ctxmoduleasciimath#1{\ctxlua{moduledata.asciimath.#1}}
-
-%D The following code is not officially supported and is only meant
-%D for the Math4All project.
+%D When the Math4All project started, we immediately started using content \MATHML.
+%D Because in school math there is often a reference to calculator input, we also
+%D provided what we called \quote {calcmath}: a predictable expression based way
+%D entering math. At some point \OPENMATH\ was also used but that was later
+%D abandoned because editing is more cumbersome.
%D
-%D The following code kind of maps ascii math
-%D http://www1.chapman.edu/~jipsen/mathml/asciimath.html onto \TEX. The
-%D code was written for the math4all project but in retrospect we
-%D could have used just tex code as the web version can handle that
-%D as well. Anyhow, as we use \MATHML\ as basis it makes sense to add
-%D this to the repertoire as annotation variant, so now we have
-%D content \MATHML\ (prefered), presentation \MATHML\ (often messy),
-%D \OPENMATH\ (what was which we started with in this project)
-%D calcmath (handy for students who are accustomed to calculators),
-%D asciimath (to make Frits's live easier) and of course \TEX. Of
-%D course all are used mixed.
+%D Due to limitations in the web variant (which is independent of rendering for
+%D paper but often determines the coding of document, not seldom for the worse) the
+%D switch was made to presentational \MATHML. But even that proved to be too complex
+%D for rendering on the web, so it got converted to so called \ASCIIMATH\ which
+%D can be rendered using some \JAVASCRIPT\ magic. However, all the formulas (and
+%D we're talking of tens of thousands of them) were very precisely coded by the main
+%D author. Because in intermediate stages of the editing (by additional authors) a
+%D mixture of \MATHML\ and \ASCIIMATH\ was used, we wrote the first version of this
+%D module. As reference we took \url
+%D {http://www1.chapman.edu/~jipsen/mathml/asciimath.html} and. The idea was to
+%D stick to \MATHML\ as reference and if needed use \ASCIIMATH\ as annotation.
%D
-%D We don't support all quirks of asciimath as I am not in the mood to
-%D write a complex parser while a bit of sane coding can work as well.
+%D Eventually we ended up with supporting several math encodings in \CONTEXT\ that
+%D could be used mixed: content \MATHML\ (preferred), presentation \MATHML\ (often
+%D messy), \OPENMATH\ (somewhat minimalistic) calcmath (handy for students who are
+%D accustomed to calculators), \ASCIIMATH\ (to make web support easier) and of
+%D course \TEX.
%D
+%D The first version had some limitations as we didn't want to support all quirks of
+%D \ASCIIMATH\ and also because I was not really in the mood to write a complex parser
+%D when a bit of sane coding can work equally well. Some comments from that version:
+%D
+%D \startnarrower
%D \startitemize
-%D \item We support only the syntactically clear variants and as long
-%D as lpeg does not support left recursion this is as far as we
-%D want to go.
-%D \item The parser is rather insensitive for spaces but yet the advice is
-%D to avoid weird coding like \type {d/dxf(x)} but use \type {d/dx
-%D f(x)} instead. After all we're not in a compact coding cq.\
-%D parser challenge.
-%D \item We also don't support the somewhat confusing \type {sqrt sqrt 2}
-%D nor \type {root3x} (although the second one kind of works). A bit
-%D of defensive coding does not hurt.
-%D \item We can process \type {a/b/c/d} but it's not compatible with the
-%D default behaviour of asciimath. Use grouping instead. Yes, we do
-%D support the somewhat nonstandard grouping token mix.
-%D \item You should use explicit \type {text(..)} directives as one can
-%D never be sure what is a reserved word and not.
+%D \item We support only the syntactically clear variants and as long as lpeg does
+%D not support left recursion this is as far as we want to go.
+%D \item The parser is rather insensitive for spaces but yet the advice is to avoid
+%D weird coding like \type {d/dxf(x)} but use \type {d/dx f(x)} instead. After
+%D all we're not in a compact coding cq.\ parser challenge.
+%D \item We also don't support the somewhat confusing \type {sqrt sqrt 2} nor \type
+%D {root3x} (although the second one kind of works). A bit of defensive coding
+%D does not hurt.
+%D \item We can process \type {a/b/c/d} but it's not compatible with the default
+%D behaviour of \ASCIIMATH. Use grouping instead. Yes, we do support the somewhat
+%D nonstandard grouping token mix.
+%D \item You should use explicit \type {text(..)} directives as one can never be sure
+%D what is a reserved word and not.
%D \stopitemize
%D
-%D Actually, as the only parsing sensitive elements of \TEX\ are
-%D fractions (\type {\over} and friends, a restricted use of \TEX\
-%D coding is probably as comprehensive and parseble.
-%D
-%D The webpage with examples served as starting point so anything beyond
+%D Actually, as the only parsing sensitive elements of \TEX\ are fractions (\type {\over}
+%D and friends, a restricted use of \TEX\ coding is probably as comprehensive and
+%D parsable. The webpage with examples served as starting point so anything beyond
%D what can be found there isn't supported.
+%D \stopnarrower
+%D
+%D Then in 2014 something bad happened. Following the fashion of minimal encoding
+%D (which of course means messy encoding of complex cases and which can make authors
+%D sloppy too) the web based support workflow of the mentioned project ran into some
+%D limitations and magically one day all carefully coded \MATHML\ was converted into
+%D \ASCIIMATH. As there was no way to recover the original thousands of files and
+%D tens of thousands of formulas we were suddenly stuck with \ASCIIMATH. Because the
+%D conversion had be done automagically, we also saw numerous errors and were forced
+%D to come up with some methods to check formulas. Because \MATHML\ poses some
+%D restrictions it has predictable rendering; \ASCIIMATH\ on the other hand enforces
+%D no structure. Also, because \MATHML\ has to be valid \XML\ it always processes.
+%D Of course, during the decade that the project had run we also had to built in
+%D some catches for abuse but at least we had a relatively stable and configurable
+%D subsystem. So, in order to deal with less predictable cases as well as extensive
+%D checking, a new \ASCIIMATH\ parser was written, one that could also be used to
+%D trace bad coding.
+%D
+%D Because the formal description is incomplete, and because some links to resources
+%D are broken, and because some testing on the web showed that sequences of characters
+%D are interpreted that were not mentioned anywhere (visible), and because we noticed
+%D that the parser was dangerously tolerant, the new code is quite different from the
+%D old code.
+%D
+%D One need to keep in mind that because spaces are optional, the only robust way to
+%D edit \ASCIIMATH\ is to use a \WYSIWYG\ editor and hope that the parser doesn't
+%D change ever. Keys are picked up from spaceless sequences and when not recognized
+%D a (sequence) of characters is considered to be variables. So, \type {xsqrtx} is
+%D valid and renders as \type {$x\sqrt{x}$}, \type {xx} becomes \type {×} (times)
+%D but \type {ac} becomes \type {$a c$} (a times c). We're lucky that \type {AC} is
+%D not turned into Alternating Current, but who knows what happens a few years from
+%D now. So, we do support this spaceless mess, but users are warned: best use a
+%D spacy sequence. The extra amount of spaces (at one byte each) an author has to
+%D include in his|/|her active writing time probably stays below the size of one
+%D holiday picture. Another complication is that numbers (in Dutch) use commas instead
+%D of periods, but vectors use commas as well. We also hav esome different names for
+%D functions which then can conflict with the expectations about collapsed variables.
+%D
+%D It must be noted that simplified encodings (that seem to be the fashion today)
+%D can demand from applications to apply fuzzy logic to make something work out
+%D well. Because we have sequential data that gets rendered, sometimes wrong input
+%D gets obscured simply by the rendering: like the comma's in numbers as well as
+%D for separators (depending on space usage), or plain wrong symbols that somehow
+%D get a representation anyway. This in itself is more a side effect of trying to
+%D use the simplified encoding without applying rules (in the input) or to use it
+%D beyong its intended usage, which then of course can lead to adapted parsers and
+%D catches that themselves trigger further abuse. Imagine that instead of developing
+%D new cars, planes, space ships, mobile phones, computers we would have adapted
+%D horse cars, kites, firework, old fashioned phones and mechanical calculators in a
+%D similar way: patch upon patch of traditional means for sure would not have
+%D worked. So, when you use \ASCIIMATH\ best check immediately how it gets rendered
+%D in the browser as well as on paper. And be prepared to check the more complex
+%D code in the future again. We don't offer any guarantees but of course will try to
+%D keep up.
+%D
+%D In retrospect I sometimes wonder if the energy put into constantly adapting to
+%D the fashion of the day pays off. Probably not. It definitely doesn't pay of.
+%D
+%D More complex crap:
+%D
+%D 1: $x + \stackrel{comment}{\stackrel{\utfchar{"23DE}}{yyyyyyyy}} = y$ \blank
+%D 2: \asciimath{x + stackrel{\utfchar{"23DE}}{yyyyyyyy} = y} \blank
+%D 3: \asciimath{x + stackrel{yyyyyyyy}{\utfchar{"23DE}} = y} \blank
+%D 4: \asciimath{x + stackrel{"comment"}{stackrel{\utfchar{"23DE}}{yyyyyyyy}} = y} \blank
+
+\usemodule[mathml-basics]
+
+\startmodule[asciimath]
\unprotect
\writestatus{asciimath}{beware, this is an experimental (m4all only) module}
-\unexpanded\def\asciimath#1{\ctxmoduleasciimath{convert(\!!bs\detokenize{#1}\!!es,true)}}
+%D Hacks:
+
+\unexpanded\def\asciimathoptext #1{\ifmmode\mathoptext{#1}\else#1\fi}
+\unexpanded\def\asciimathoptexttraced #1{\ifmmode\mathoptext{\color[darkgreen]{#1}}\else\color[darkgreen]{#1}\fi}
+\unexpanded\def\asciimathstackrel #1#2{\mathematics{\mathop{\let\limits\relax\mover{#2}{#1}}}}
+\unexpanded\def\asciimathroot #1#2{\sqrt[#1]{#2}}
+\unexpanded\def\asciimathsqrt #1{\sqrt{#1}}
+
+%D The core commands:
+
+% if we need to set
+
+\installsetuponlycommandhandler {asciimath} {asciimath}
+
+\appendtoks
+ \ctxlua{moduledata.asciimath.setup {
+ splitmethod = "\asciimathparameter\c!splitmethod",
+ separator = "\asciimathparameter\c!separator",
+ }}%
+\to \everysetupasciimath
+
+\newtoks\everyascimath
+
+% \appendtoks
+% \ignorediscretionaries
+% \to \everyasciimath
+
+\unexpanded\def\asciimath
+ {\doifnextoptionalelse\asciimath_yes\asciimath_nop}
+
+\def\asciimath_yes[#1]#2%
+ {\mathematics[#1]{\clf_justasciimath{\detokenize\expandafter{\normalexpanded{#2}}}}}
+
+\def\asciimath_nop#1%
+ {\mathematics{\clf_justasciimath{\detokenize\expandafter{\normalexpanded{#1}}}}}
+
+\unexpanded\def\ctxmoduleasciimath#1%
+ {\ctxlua{moduledata.asciimath.#1}}
+
+%D Some tracing commands. Using tex commands is 10\% slower that directly piping
+%D from \LUA, but this is non|-|critical code.
+
+\unexpanded\def\ShowAsciiMathLoad [#1]{\ctxlua{moduledata.asciimath.show.load("#1")}}
+\unexpanded\def\ShowAsciiMathIgnore[#1]{\ctxlua{moduledata.asciimath.show.ignore("#1")}}
+\unexpanded\def\ShowAsciiMathXML #1#2{\ctxlua{moduledata.asciimath.show.filter("#1","#2")}}
+\unexpanded\def\ShowAsciiMathStats {\ctxlua{moduledata.asciimath.show.statistics()}}
+\unexpanded\def\ShowAsciiMathMax {\ctxlua{moduledata.asciimath.show.max()}}
+
+\unexpanded\def\ShowAsciiMathResult#1%
+ {\begingroup
+ \blank
+ % if we are in vmode, we don't get positions i.e. a smaller tuc file
+ \inleft{\ttbf#1\hfill\ctxlua{moduledata.asciimath.show.count(#1,true)}}%
+ \dontleavehmode
+ \begingroup
+ \ttbf
+ \ctxlua{moduledata.asciimath.show.files(#1)}
+ \endgroup
+ \blank[medium,samepage]
+ \startcolor[darkblue]
+ \ctxlua{moduledata.asciimath.show.input(#1,true)}
+ \stopcolor
+ \blank[medium,samepage]
+ \doifmode{asciimath:show:dirty} {
+ \dorecurse{\ctxlua{moduledata.asciimath.show.nofdirty(#1)}} {
+ \ctxlua{moduledata.asciimath.show.dirty(\recurselevel,true)}
+ \blank[medium,samepage]
+ }
+ }
+ \ctxlua{moduledata.asciimath.show.result(#1)}
+ \blank
+ \endgroup}
+
+\unexpanded\def\ShowAsciiMathStart
+ {\begingroup
+ \let\asciimathoptext\asciimathoptexttraced
+ \setuptyping[\v!buffer][\c!before=,\c!after=]
+ \setupmargindata[\v!left][\c!style=]}
+
+\unexpanded\def\ShowAsciiMathStop
+ {\endgroup}
+
+\unexpanded\def\ShowAsciiMath
+ {\dodoubleempty\doShowAsciiMath}
+
+\unexpanded\def\doShowAsciiMath[#1][#2]%
+ {\iffirstargument
+ \ShowAsciiMathStart
+ \ShowAsciiMathLoad[#1]
+ \ifsecondargument
+ \ShowAsciiMathIgnore[#2]
+ \fi
+ \dorecurse{\ShowAsciiMathMax}{\ShowAsciiMathResult\recurselevel}
+ \page
+ \ShowAsciiMathStats
+ \ShowAsciiMathStop
+ \fi}
+
+\unexpanded\def\xmlShowAsciiMath#1#2%
+ {\iffirstargument
+ \ShowAsciiMathStart
+ \ShowAsciiMathXML{#1}{#2}%
+ \dorecurse{\ShowAsciiMathMax}{\ShowAsciiMathResult\recurselevel}
+ \page
+ \ShowAsciiMathStats
+ \ShowAsciiMathStop
+ \fi}
+
+\unexpanded\def\ShowAsciiMathSave
+ {\dosingleempty\doShowAsciiMathSave}
+
+\unexpanded\def\doShowAsciiMathSave[#1]%
+ {\ctxlua{moduledata.asciimath.show.save("#1")}}
\protect
+\startsetups asciimath:layout
+
+ \setupbodyfont
+ % [pagella,10pt]
+ [dejavu,10pt]
+
+ \setuplayout
+ [backspace=35mm,
+ leftmargin=20mm,
+ rightmargindistance=0pt,
+ leftmargindistance=5mm,
+ cutspace=1cm,
+ topspace=1cm,
+ bottomspace=1cm,
+ width=middle,
+ height=middle,
+ header=0cm,
+ footer=1cm]
+
+ \setupheadertexts
+ []
+
+ \setupfootertexts
+ [\currentdate][\pagenumber]
+
+ \setupalign
+ [flushleft,verytolerant,stretch]
+
+ \dontcomplain
+
+\stopsetups
+
+\stopmodule
+
\continueifinputfile{x-asciimath.mkiv}
-\enabletrackers[modules.asciimath.mapping]
+%D This will become an extra.
-\starttext
+\showframe
+
+\setups[asciimath:layout]
+
+% \enabletrackers[modules.asciimath.mapping]
+% \enabletrackers[modules.asciimath.detail]
+% \starttext
+% \enablemode[asciimath:show:dirty]
+% \ShowAsciiMath[e:/temporary/asciimath/*.xml]
+% % \ShowAsciiMathSave[e:/temporary/asciimath/asciimath.lua]
+% \stoptext
+
+\starttext
+\unexpanded\def\MyAsciiMath#1{\startformula\asciimath{#1}\stopformula}
\startlines
-\asciimath{x^2+y_1+z_12^34}
-\asciimath{sin^-1(x)}
-\asciimath{d/dx f(x)=lim_(h->0) (f(x+h)-f(x))/h}
-\asciimath{f(x)=sum_(n=0)^oo(f^((n))(a))/(n!)(x-a)^n}
-\asciimath{int_0^1 f(x)dx}
-\asciimath{int^1_0 f(x)dx}
-\asciimath{a//b}
-\asciimath{(a/b)/(d/c)}
-\asciimath{((a*b))/(d/c)}
-\asciimath{[[a,b],[c,d]]((n),(k))}
-\asciimath{1/x={(1,text{if } x!=0),(text{undefined},if x=0):}}
-\asciimath{{ (1,2), (x,(x + text(x))) }}
-\asciimath{{(1,2),(x,(x+text(x))),(x,text(x))}}
-\asciimath{{(1,2),(x,(x+text(x))),(x,x text(x))}}
-\asciimath{{(1,2/2),(x,(x+x^22+sqrt(xx))),(x,x text(xyz))}}
-\asciimath{{(1,2/2),(x,(x+x^22+sqrt(xx))),(x,text(xyz)+1+text(hans))}}
-\asciimath{<<a,b>> text{and} {:(x,y),(u,v):}}
-\asciimath{(a,b] = {x text(in) RR | a < x <= b}}
+\MyAsciiMath{x^2 / 10 // z_12^34 / 20}
+% \MyAsciiMath{{:{:x^2:} / 10:} // {:{:z_12^34 :} / 20:}}
+% \MyAsciiMath{x^2+y_1+z_12^34}
+% \MyAsciiMath{sin^-1(x)}
+% \MyAsciiMath{d/dx f(x)=lim_(h->0) (f(x+h)-f(x))/h}
+% \MyAsciiMath{f(x)=sum_(n=0)^oo(f^((n))(a))/(n!)(x-a)^n}
+% \MyAsciiMath{int_0^1 f(x)dx}
+% \MyAsciiMath{int^1_0 f(x)dx}
+% \MyAsciiMath{a//b}
+% \MyAsciiMath{a//\alpha}
+% \MyAsciiMath{(a/b)/(d/c)}
+% \MyAsciiMath{((a*b))/(d/c)}
+% \MyAsciiMath{[[a,b],[c,d]]((n),(k))}
+% \MyAsciiMath{1/x={(1,text{if } x!=0),(text{undefined},if x=0):}}
+% \MyAsciiMath{{ (1,2), (x,(x + text(x))) }}
+% \MyAsciiMath{{(1,2),(x,(x+text(x))),(x,text(x))}}
+% \MyAsciiMath{{(1,2),(x,(x+text(x))),(x,x text(x))}}
+% \MyAsciiMath{{(1,2/2),(x,(x+x^22+sqrt(xx))),(x,x text(xyz))}}
+% \MyAsciiMath{{(1,2/2),(x,(x+x^22+sqrt(xx))),(x,text(xyz)+1+text(hans))}}
+% \MyAsciiMath{<<a,b>> text{and} {:(x,y),(u,v):}}
+% \MyAsciiMath{(a,b] = {x text(in) RR | a < x <= b}}
+% \MyAsciiMath{a/b / c/d = (a * d) / (b * d) / (b * c) / (b * d) = (a * d) / (b * c)}
+% \MyAsciiMath{ (a/b) // (c/d) = ( (a * d) / (b * d) ) // ( (b * c) / (b * d) ) = (a * d) / (b * c)}
+% \MyAsciiMath{sin(x+1)_3^2/b / c/d}
+% \MyAsciiMath{{:{:sin(x+1)_3^2:}/b:} / {:c/d:}}
+% \MyAsciiMath{cos(a) + sin(x+1)_3^2/b / c/d = (a * d) / (b * d) / (b * c) / (b * d) = (a * d) / (b * c)}
+% \MyAsciiMath{S_(11)}
+% \MyAsciiMath{f(x)}
+% \MyAsciiMath{sin(x)}
+% \MyAsciiMath{sin(x+1)}
+% \MyAsciiMath{sin^-1(x)}
+% \MyAsciiMath{sin(2x)}
+% \MyAsciiMath{a_2^2}
+% \MyAsciiMath{( (S_(11),S_(12),S_(1n)),(vdots,ddots,vdots),(S_(m1),S_(m2),S_(mn)) ]}
+% \MyAsciiMath{frac a b}
+% \MyAsciiMath{sin(x)/2 // cos(x)/pi}
+% \MyAsciiMath{a/13 // c/d}
+% \MyAsciiMath{a/b // c/d}
+% \MyAsciiMath{x}
+% \MyAsciiMath{x^2}
+% \MyAsciiMath{sqrt x}
+% \MyAsciiMath{sqrt (x)}
+% \MyAsciiMath{root 2 x}
+% \MyAsciiMath{x+x}
+% \MyAsciiMath{x/3}
+% \MyAsciiMath{x^2 / 10}
+% \MyAsciiMath{x^2 / 10 // z_12^34 / 20}
+% \MyAsciiMath{a^23}
+% \MyAsciiMath{a^{:b^23:}+3x}
+% \MyAsciiMath{a/b / c/d}
+% \MyAsciiMath{sin(x)/b / c/d}
+% \MyAsciiMath{sin(x)/b // c/d}
+% \MyAsciiMath{a/b / c/d = (a * d) / (b * d) / (b * c) / (b * d) = (a * d) / (b * c) }
+% \MyAsciiMath{{:{:x^2:} / 10:} // {:{:z_12^34 :} / 20:}}
+% \MyAsciiMath{x^2+y_1+z_12^34}
+% \MyAsciiMath{sin^-1(x)}
+% \MyAsciiMath{d/dx f(x)=lim_(h->0) (f(x+h)-f(x))/h}
+% \MyAsciiMath{f(x)=sum_(n=0)^oo(f^((n))(a))/(n!)(x-a)^n}
+% \MyAsciiMath{int_0^1 f(x)dx}
+% \MyAsciiMath{int^1_0 f(x)dx}
+% \MyAsciiMath{2x}
+% \MyAsciiMath{a//b}
+% \MyAsciiMath{a//\alpha}
+% \MyAsciiMath{(a/b)/(d/c)}
+% \MyAsciiMath{((a*b))/(d/c)}
+% \MyAsciiMath{[[a,b],[c,d]]((n),(k))}
+% \MyAsciiMath{1/x={(1,text{if } x!=0),(text{undefined},if x=0):}}
+% \MyAsciiMath{{ (1,2), (x,(x + text(x))) }}
+% \MyAsciiMath{{(1,2),(x,(x+text(x))),(x,text(x))}}
+% \MyAsciiMath{{(1,2),(x,(x+text(x))),(x,x text(x))}}
+% \MyAsciiMath{{(1,2/2),(x,(x+x^22+sqrt(xx))),(x,x text(xyz))}}
+% \MyAsciiMath{{(1,2/2),(x,(x+x^22+sqrt(xx))),(x,text(xyz)+1+text(hans))}}
+% \MyAsciiMath{<<a,b>> text{and} {:(x,y),(u,v):}}
+% \MyAsciiMath{(a,b] = {x text(in) RR | a < x <= b}}
+% \MyAsciiMath{x^-2}
+% \MyAsciiMath{x^2(x-1/16)=0}
+% \MyAsciiMath{y= ((1/4)) ^x}
+% \MyAsciiMath{log (0,002) / (log(1/4))}
+% \MyAsciiMath{x=ax+b \ oeps}
+% \MyAsciiMath{x=\ ^ (1/4) log(x)}
+% \MyAsciiMath{x=\ ^ (1/4) log(0 ,002 )= log(0,002) / (log(1/4))}
+% \MyAsciiMath{x^ (-1 1/2) =1/x^ (1 1/2)=1/ (x^1*x^ (1/2)) =1/ (xsqrt(x))}
+% \MyAsciiMath{x^2(10 -x)&gt;2 x^2}
+% \MyAsciiMath{x^4&gt;x}
\stoplines
-
\stoptext
diff --git a/tex/context/base/x-calcmath.lua b/tex/context/base/x-calcmath.lua
index 1394f3450..c96d8d0ac 100644
--- a/tex/context/base/x-calcmath.lua
+++ b/tex/context/base/x-calcmath.lua
@@ -16,6 +16,8 @@ local calcmath = { }
local moduledata = moduledata or { }
moduledata.calcmath = calcmath
+local context = context
+
local list_1 = {
"median", "min", "max", "round", "ln", "log",
"sin", "cos", "tan", "sinh", "cosh", "tanh"
@@ -46,8 +48,8 @@ local function freeze()
for k=1,#list_2 do
local v = list_2[k]
list_2_1[v .. "%((.-),(.-),(.-)%)"] = "\\" .. upper(v) .. "^{%1}_{%2}{%3}"
- list_2_2[v .. "%((.-),(.-)%)"] = "\\" .. upper(v) .. "^{%1}{%2}"
- list_2_3[v .. "%((.-)%)"] = "\\" .. upper(v) .. "{%1}"
+ list_2_2[v .. "%((.-),(.-)%)"] = "\\" .. upper(v) .. "^{%1}{%2}"
+ list_2_3[v .. "%((.-)%)"] = "\\" .. upper(v) .. "{%1}"
end
for k=1,#list_4 do
local v = list_4[k]
@@ -192,7 +194,6 @@ if false then
-- Df Dg {\rm f}^{\prime}
-- f() g() {\rm f}()
-
-- valid utf8
local S, P, R, C, V, Cc, Ct = lpeg.S, lpeg.P, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Ct
diff --git a/tex/context/base/x-cals.lua b/tex/context/base/x-cals.lua
index 36bc1aaba..3af6106d8 100644
--- a/tex/context/base/x-cals.lua
+++ b/tex/context/base/x-cals.lua
@@ -6,6 +6,7 @@ if not modules then modules = { } end modules ['x-cals'] = {
license = "see context related readme files"
}
+local next = next
local format, lower = string.format, string.lower
local xmlsprint, xmlcprint, xmlcollected, xmlelements = xml.sprint, xml.cprint, xml.collected, xml.elements
local n_todimen, s_todimen = number.todimen, string.todimen
@@ -65,8 +66,10 @@ end
local function getspecs(root, pattern, names, widths)
-- here, but actually we need this in core-ntb.tex
-- but ideally we need an mkiv enhanced core-ntb.tex
- local ignore_widths = cals.ignore_widths
- local shrink_widths = cals.shrink_widths
+ local ignore_widths = cals.ignore_widths
+-- local shrink_widths = at.option == "shrink" or cals.shrink_widths
+-- local stretch_widths = at.option == "stretch" or cals.stretch_widths
+ local shrink_widths = cals.shrink_widths
local stretch_widths = cals.stretch_widths
for e in xmlcollected(root,pattern) do
local at = e.at
diff --git a/tex/context/base/x-ct.lua b/tex/context/base/x-ct.lua
index 2dee985c3..9c647e8e7 100644
--- a/tex/context/base/x-ct.lua
+++ b/tex/context/base/x-ct.lua
@@ -122,6 +122,8 @@ function moduledata.ct.tabulate(root,namespace)
end
+-- todo: use content and caption
+
function moduledata.ct.combination(root,namespace)
if not root then
diff --git a/tex/context/base/x-html.mkiv b/tex/context/base/x-html.mkiv
new file mode 100644
index 000000000..e1806eb9e
--- /dev/null
+++ b/tex/context/base/x-html.mkiv
@@ -0,0 +1,379 @@
+%D \module
+%D [ file=x-html,
+%D version=2011.02.03, % adapted 2014.11.08
+%D title=\CONTEXT\ Modules,
+%D subtitle=HTML,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\startmodule[html]
+
+%D Usage:
+%D
+%D \starttyping
+%D \xmlregistersetup{xml:html:basics}
+%D \xmlregistersetup{xml:html:tables}
+%D \stoptyping
+
+% \xmlsetsetup{#1}{(p|span)[@lang]}{xml:html:lang}
+%
+% \startxmlsetups xml:html:lang
+% \begingroup
+% \language[\xmlatt{#1}{lang}]
+% \xmlsetup{#1}{xml:html:\xmltag{#1}}
+% \endgroup
+% \stopxmlsetups
+
+\unprotect
+
+\definehighlight[b] [\c!command=\v!no,\c!style=\v!bold]
+\definehighlight[i] [\c!command=\v!no,\c!style=\v!italic]
+\definehighlight[bi] [\c!command=\v!no,\c!style=\v!bolditalic]
+\definehighlight[em] [\c!command=\v!no,\c!style=\em]
+\definehighlight[tt] [\c!command=\v!no,\c!style=\v!mono]
+\definehighlight[strong][\c!command=\v!no,\c!style=\v!bold]
+\definehighlight[u] [\c!command=\v!no,\c!style=\directsetbar{\v!underbar}]
+\definehighlight[code] [\c!command=\v!no,\c!style=\v!mono]
+\definehighlight[pre] [\c!command=\v!no]
+
+\protect
+
+% todo: pre
+
+\startxmlsetups xml:html:basics
+ \xmlsetsetup{#1}{p|br|b|i|u|em|tt|strong|ul|ol|li|table|thead|tbody|tfoot|tr|td|th|span|img}{xml:html:*}
+ \xmlsetsetup{#1}{b/i}{xml:html:bi}
+ \xmlsetsetup{#1}{i/b}{xml:html:bi}
+ \xmlstripanywhere{#1}{!pre}
+\stopxmlsetups
+
+\startxmlsetups xml:html:tables
+ \xmlsetsetup{#1}{table|thead|tbody|tfoot|tr|td|th}{xml:html:*}
+\stopxmlsetups
+
+\startxmlsetups xml:html:p
+ \xmldoifnotselfempty {#1} {
+ \dontleavehmode
+ \ignorespaces
+ \xmlflush{#1}
+ \removeunwantedspaces
+ }
+ \par
+\stopxmlsetups
+
+\startxmlsetups xml:html:br
+ \crlf
+\stopxmlsetups
+
+\startxmlsetups xml:html:b
+ \directhighlight{b}{\xmlflush{#1}}
+\stopxmlsetups
+
+\startxmlsetups xml:html:i
+ \directhighlight{i}{\xmlflush{#1}}
+\stopxmlsetups
+
+\startxmlsetups xml:html:bi
+ \directhighlight{bi}{\xmlflush{#1}}
+\stopxmlsetups
+
+\startxmlsetups xml:html:em
+ \directhighlight{em}{\xmlflush{#1}}
+\stopxmlsetups
+
+\startxmlsetups xml:html:tt
+ \directhighlight{tt}{\xmlflush{#1}}
+\stopxmlsetups
+
+\startxmlsetups xml:html:strong
+ \directhighlight{strong}{\xmlflush{#1}}
+\stopxmlsetups
+
+\startxmlsetups xml:html:u
+ \directhighlight{u}{\xmlflush{#1}}
+\stopxmlsetups
+
+\startxmlsetups xml:html:ul
+ \startitemize[packed]
+ \xmlflush{#1}
+ \stopitemize
+\stopxmlsetups
+
+\startxmlsetups xml:html:ol
+ \startitemize[packed,n]
+ \xmlflush{#1}
+ \stopitemize
+\stopxmlsetups
+
+\startxmlsetups xml:html:li
+ \startitem
+ \xmlflush{#1}
+ \stopitem
+\stopxmlsetups
+
+\startxmlsetups xml:html:code
+ \directhighlight{code}{\xmlflushspacewise{#1}}
+\stopxmlsetups
+
+\startxmlsetups xml:html:pre
+ \directhighlight{pre}{\xmlflushspacewise{#1}}
+\stopxmlsetups
+
+\startxmlsetups xml:html:span
+ \xmlflush{#1}
+\stopxmlsetups
+
+\startxmlsetups xml:html:img
+ \ifhmode
+ \dontleavehmode
+ \externalfigure[\xmlatt{#1}{src}]
+ \else
+ \startlinecorrection
+ \externalfigure[\xmlatt{#1}{src}]
+ \stoplinecorrection
+ \fi
+\stopxmlsetups
+
+% tables, maybe we need a generic html table module
+%
+% todo: align
+
+% beware, the padding code is somewhat experimental, eventually the
+% table will be done in cld code
+%
+% we can also use \xmlmap for border etc
+
+\starttexdefinition cssgetsinglepadding #1
+ \ctxlua {
+ context((moduledata.css.padding(
+ "#1",
+ \number\dimexpr0.1ex,
+ \number\dimexpr0.01\hsize,
+ \number\dimexpr1ex,
+ \number\dimexpr1em
+ ))) % returns 4 values therefore ()
+ }sp
+\stoptexdefinition
+
+\startxmlsetups xml:html:table
+ \edef\CellPadding{\xmlatt{#1}{cellpadding}}
+ \ifx\CellPadding\empty
+ \edef\CellPadding{.25ex}
+ \else
+ \edef\CellPadding{\cssgetsinglepadding\CellPadding}
+ \fi
+ \startlinecorrection[blank]
+ \doifelse {\xmlatt{#1}{border}} {0} {
+ \bTABLE[frame=off,offset=\CellPadding]
+ \xmlflush{#1}
+ \eTABLE
+ } {
+ \bTABLE[offset=\CellPadding]
+ \xmlflush{#1}
+ \eTABLE
+ }
+ \stoplinecorrection
+\stopxmlsetups
+
+\startxmlsetups xml:html:thead
+ \bTABLEhead
+ \xmlflush{#1}
+ \eTABLEhead
+\stopxmlsetups
+
+\startxmlsetups xml:html:tbody
+ \bTABLEbody
+ \xmlflush{#1}
+ \eTABLEbody
+\stopxmlsetups
+
+\startxmlsetups xml:html:tfoot
+ \bTABLEfoot
+ \xmlflush{#1}
+ \eTABLEfoot
+\stopxmlsetups
+
+\startxmlsetups xml:html:tr
+ \bTR[ny=\xmlattdef{#1}{rowspan}{1}]
+ \xmlflush{#1}
+ \eTR
+\stopxmlsetups
+
+\startxmlsetups xml:html:td
+ \bTD[nx=\xmlattdef{#1}{colspan}{1}]
+ \xmlflush{#1}
+ \eTD
+\stopxmlsetups
+
+\startxmlsetups xml:html:th
+ \bTH[nx=\xmlattdef{#1}{colspan}{1}]
+ \xmlflush{#1}
+ \eTH
+\stopxmlsetups
+
+% \xmlregistersetup{xml:html:basics}
+
+%D For old times sake:
+
+\startxmlsetups xml:setups:common
+ \xmlsetup{#1}{xml:html:basics}
+ \xmlsetup{#1}{xml:html:tables}
+% \ifconditional\qmlcleanuptwo
+% \xmlsetsetup{#1}{html/br[index() == 1]}{xml:noppes:1}
+% \xmlsetsetup{#1}{html/p[index() == lastindex()]/br[index() == lastindex()]}{xml:noppes:2}
+% \xmlsetsetup{#1}{html/br[index() == lastindex()]}{xml:noppes:3}
+% \xmlsetsetup{#1}{br[name(1) == 'img']}{xml:noppes}
+% \xmlsetsetup{#1}{br[name(1) == 'br' and name(2) == 'img']}{xml:noppes}
+% % \xmlsetsetup{#1}{br/following-sibling::img[position()==1]}{xml:noppes}
+% \fi
+\stopxmlsetups
+
+\stopmodule
+
+\continueifinputfile{x-html.mkiv}
+
+\xmlregistersetup{xml:html:basics}
+\xmlregistersetup{xml:html:tables}
+
+\startxmlsetups xml:whatever
+ \xmlsetsetup {#1} {
+ html|body
+ } {xml:html:*}
+\stopxmlsetups
+
+\xmlregisterdocumentsetup{main}{xml:whatever}
+
+\startxmlsetups xml:html:html
+ \xmlflush{#1}
+\stopxmlsetups
+
+\startxmlsetups xml:html:body
+ \xmlflush{#1}
+\stopxmlsetups
+
+\setuphead[subject][page=yes,style=\bfa]
+
+\starttexdefinition ShowExample#1
+ \startsubject[title=#1]
+ \typebuffer[#1]
+ \starttextrule{result}
+ \xmlprocessbuffer{main}{#1}{}
+ \stoptextrule
+ \stopsubject
+\stoptexdefinition
+
+\starttext
+
+\startbuffer[test 1]
+<html><body>
+<p>test</p>
+<p/>
+<p>test</p>
+</body></html>
+\stopbuffer
+
+\startbuffer[test 2]
+<html><body>
+<p>test (hierna een lf)
+test</p>
+</body></html>
+\stopbuffer
+
+\startbuffer[test 3]
+<html><body>
+<p>test (hierna een lf met lege regel)
+
+test</p>
+</body></html>
+\stopbuffer
+
+\startbuffer[test 4]
+<html><body>
+<p>test (hierna een lf met twee lege regels)
+
+
+test</p>
+</body></html>
+\stopbuffer
+
+\startbuffer[test 5]
+<html><body>
+<p>test (hierna br geen lf)<br/> test</p>
+</body></html>
+\stopbuffer
+
+\startbuffer[test 6]
+<html><body>
+<p>test (hierna br met lf)<br/>
+test</p>
+</body></html>
+\stopbuffer
+
+\startbuffer[test 7]
+<html><body>
+<p>test (hierna br met lf en lege regel)<br/>
+
+test</p>
+</body></html>
+\stopbuffer
+
+\startbuffer[test 8]
+<html><body>
+<p>test (hierna br met lf en twee lege regels)<br/>
+
+
+test</p>
+</body></html>
+\stopbuffer
+
+\startbuffer[test 9]
+<html><body>
+<p>test (hierna bold) <b>bold</b> test</p>
+</body></html>
+\stopbuffer
+
+\startbuffer[test 10]
+<html><body>
+<p>test (hierna lf met bold)
+<b>bold <u>underlined</u></b> test</p>
+</body></html>
+\stopbuffer
+
+\startbuffer[test 11]
+<html><body>
+<p>test (hierna lf met lege regel en bold)
+
+<b>bold</b> test</p>
+</body></html>
+\stopbuffer
+
+\startbuffer[test 12]
+<html><body>
+<p>test (hierna lf met lege regel en lf in bold)
+
+<b>
+bold
+</b> test</p>
+</body></html>
+\stopbuffer
+
+\startbuffer[test 13]
+<html><body>
+<p>test (hierna lf met lege regel en lf en lege regel in bold)
+
+<b>
+
+bold
+
+</b> test</p>
+</body></html>
+\stopbuffer
+
+\dorecurse{13}{\ShowExample{test #1}}
+
+\stoptext
diff --git a/tex/context/base/x-math-svg.lua b/tex/context/base/x-math-svg.lua
new file mode 100644
index 000000000..8a6288167
--- /dev/null
+++ b/tex/context/base/x-math-svg.lua
@@ -0,0 +1,176 @@
+if not modules then modules = { } end modules ['x-math-svg'] = {
+ version = 1.001,
+ comment = "companion to x-math-svg.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local tostring, type, next = tostring, type, next
+local lpegmatch, P, Cs = lpeg.match, lpeg.P, lpeg.Cs
+
+local xmlfirst = xml.first
+local xmlconvert = xml.convert
+local xmlload = xml.load
+local xmlsave = xml.save
+local xmlcollected = xml.collected
+local xmldelete = xml.delete
+
+local loadtable = table.load
+local savetable = table.save
+
+local replacesuffix = file.replacesuffix
+local addsuffix = file.addsuffix
+local removefile = os.remove
+local isfile = lfs.isfile
+
+local formatters = string.formatters
+
+moduledata = moduledata or table.setmetatableindex("table")
+local svgmath = moduledata.svgmath -- autodefined
+
+local namedata = { }
+local pagedata = { }
+
+local statusname = "x-math-svg-status.lua"
+local pdfname = "x-math-svg.pdf"
+
+local pdftosvg = os.which("mudraw")
+
+local f_make_tex = formatters[ [[context --global kpse:x-math-svg.mkvi --inputfile="%s" --svgstyle="%s" --batch --noconsole --once --purgeall]] ]
+local f_make_svg = formatters[ [[mudraw -o "math-%%d.svg" "%s" 1-9999]] ]
+
+----- f_inline = formatters[ [[<div class='math-inline' style='vertical-align:%p'></div>]] ]
+local f_inline = formatters[ [[<div class='math-inline'></div>]] ]
+local f_display = formatters[ [[<div class='math-display'></div>]] ]
+local f_style = formatters[ [[vertical-align:%p]] ]
+
+local f_math_tmp = formatters[ [[math-%i]] ]
+
+function svgmath.process(filename)
+ if not filename then
+ -- no filename given
+ return
+ elseif not isfile(filename) then
+ -- invalid filename
+ return
+ end
+ local index = 0
+ local page = 0
+ local blobs = { }
+ local root = xmlload(filename)
+ for mth in xmlcollected(root,"math") do
+ index = index + 1
+ local blob = tostring(mth)
+ if blobs[blob] then
+ context.ReuseSVGMath(index,blobs[blob])
+ else
+ page = page + 1
+ buffers.assign(f_math_tmp(page),blob)
+ context.MakeSVGMath(index,page,mth.at.display)
+ blobs[blob] = page
+ end
+ end
+ context(function()
+ -- for tracing purposes:
+ for mathdata, pagenumber in next, blobs do
+ local p = pagedata[pagenumber]
+ p.mathml = mathdata
+ p.number = pagenumber
+ end
+ --
+ savetable(statusname, {
+ pagedata = pagedata,
+ namedata = namedata,
+ })
+ end)
+end
+
+function svgmath.register(index,page,specification)
+ if specification then
+ pagedata[page] = specification
+ end
+ namedata[index] = page
+end
+
+function svgmath.convert(filename,svgstyle)
+ if not filename then
+ -- no filename given
+ return false, "no filename"
+ elseif not isfile(filename) then
+ -- invalid filename
+ return false, "invalid filename"
+ elseif not pdftosvg then
+ return false, "mudraw is not installed"
+ end
+
+ os.execute(f_make_tex(filename,svgstyle))
+
+ local data = loadtable(statusname)
+ if not data then
+ -- invalid tex run
+ return false, "invalid tex run"
+ elseif not next(data) then
+ return false, "no converson needed"
+ end
+
+ local pagedata = data.pagedata
+ local namedata = data.namedata
+
+ os.execute(f_make_svg(pdfname))
+
+ local root = xmlload(filename)
+ local index = 0
+ local done = { }
+ local unique = 0
+
+ local between = (1-P("<"))^1/""
+ local strip = Cs((
+ (P("<text") * ((1-P("</text>"))^1) * P("</text>")) * between^0 / "" +
+ P(">") * between +
+ P(1)
+ )^1)
+
+ for mth in xmlcollected(root,"m:math") do
+ index = index + 1
+ local page = namedata[index]
+ if done[page] then
+ mth.__p__.dt[mth.ni] = done[page]
+ else
+ local info = pagedata[page]
+ local depth = info.depth
+ local mode = info.mode
+ local svgname = addsuffix(f_math_tmp(page),"svg")
+ local action = mode == "inline" and f_inline or f_display
+ -- local x_div = xmlfirst(xmlconvert(action(-depth)),"/div")
+ local x_div = xmlfirst(xmlconvert(action()),"/div")
+ local svgdata = io.loaddata(svgname)
+ if not svgdata or svgdata == "" then
+ print("error in:",svgname,tostring(mth))
+ else
+ -- svgdata = string.gsub(svgdata,">%s<","")
+ svgdata = lpegmatch(strip,svgdata)
+ local x_svg = xmlfirst(xmlconvert(svgdata),"/svg")
+ -- xmldelete(x_svg,"text")
+if mode == "inline" then
+ x_svg.at.style = f_style(-depth)
+end
+
+ x_div.dt = { x_svg }
+ mth.__p__.dt[mth.ni] = x_div -- use helper
+ end
+ done[page] = x_div
+ unique = unique + 1
+ end
+ end
+
+-- for k, v in next, data do
+-- removefile(addsuffix(k,"svg"))
+-- end
+-- removefile(statusname)
+-- removefile(pdfname)
+
+ xmlsave(root,filename)
+
+ return true, index, unique
+end
diff --git a/tex/context/base/x-mathml-basics.mkiv b/tex/context/base/x-mathml-basics.mkiv
new file mode 100644
index 000000000..e166995b0
--- /dev/null
+++ b/tex/context/base/x-mathml-basics.mkiv
@@ -0,0 +1,276 @@
+% macros=mkvi
+
+% makes sense (but rel vs op ...):
+
+% \unexpanded\def\stackrel#1#2{\mathematics{\mathop{\let\limits\relax\mover{#2}{#1}}}}
+
+% this can become a core helper
+
+% bwe could do all of them in lua
+
+\startluacode
+local find = string.find
+local lpegmatch = lpeg.match
+
+local splitter = lpeg.Ct(lpeg.C(lpeg.patterns.nestedbraces + lpeg.patterns.utf8character)^1)
+
+function commands.xmfenced(left,middle,right,content)
+ local l = left ~= "" and left or "("
+ local r = right ~= "" and right or ")"
+ local m = middle ~= "" and middle and lpegmatch(splitter,middle) or { "," }
+ local c = find(content,"{") and lpegmatch(splitter,content) or { content }
+ local n = #c
+ if n > 1 then
+ context("\\left%s",l)
+ for i=1,n do
+ if i > 1 then
+ context("%s %s",m[i] or m[#m],c[i])
+ else
+ context(c[i])
+ end
+ end
+ context("\\right%s",r)
+ else
+ context("\\left%s %s \\right%s",l,content,r)
+ end
+end
+
+\stopluacode
+
+\unprotect
+
+\unexpanded\def\mexecuteifdefined#1%
+ {\ifx#1\empty
+ \expandafter\secondoftwoarguments
+ \else\ifcsname#1\endcsname
+ \doubleexpandafter\firstoftwoarguments
+ \else
+ \doubleexpandafter\secondoftwoarguments
+ \fi\fi
+ {\csname#1\endcsname}}
+
+% mrow
+
+\let\mrow\mathematics
+
+% msub msup msubsup
+
+\starttexdefinition msub #1#2
+ \mathematics {
+ #1_{#2}
+ }
+\stoptexdefinition
+
+\starttexdefinition msup #1#2
+ \mathematics {
+ #1^{#2}
+ }
+\stoptexdefinition
+
+\starttexdefinition msubsup #1#2#3
+ \mathematics {
+ #1_{#2}^{#3}
+ }
+\stoptexdefinition
+
+% mn mo mi
+
+\let\mn\mathematics
+\let\mo\mathematics
+\let\mi\mathematics
+
+% ms mtext
+
+\starttexdefinition ms #1
+ \text {
+ "#1"
+ }
+\stoptexdefinition
+
+\starttexdefinition mtext #1
+ \text {
+ #1
+ }
+\stoptexdefinition
+
+% mover
+
+\starttexdefinition unexpanded moverabove #1
+ \edef\movercommand{\utfmathfiller\movertoken}
+ \mexecuteifdefined\movercommand {#1} \relax
+\stoptexdefinition
+\starttexdefinition unexpanded moverbase #1
+ \edef\mbasecommand{\utfmathfiller\mbasetoken}
+ \mexecuteifdefined\mbasecommand {#1}
+ \relax
+\stoptexdefinition
+\starttexdefinition unexpanded moverbasefiller #1#2
+ \edef\mbasecommand{e\utfmathcommandfiller\mbasetoken}
+ \mexecuteifdefined\mbasecommand \relax {#2} {}
+\stoptexdefinition
+\starttexdefinition unexpanded moveraccent #1#2
+ \edef\movercommand{\utfmathcommandabove\movertoken}
+ \mexecuteifdefined\movercommand \relax {#1}
+\stoptexdefinition
+\starttexdefinition unexpanded movertext #1#2
+ % \mathtriplet {\mathstylehbox{#1}} {#2} {}
+ \mathtriplet {\mathematics{#1}} {#2} {}
+\stoptexdefinition
+\starttexdefinition unexpanded moveraccentchecker #1#2
+ \edef\movertoken{\tochar{#2}}
+ \doifelseutfmathabove\movertoken \moveraccent \movertext {#1}{#2}
+\stoptexdefinition
+
+\starttexdefinition unexpanded mover #1#2
+ \mathematics {
+ \edef\mbasetoken{\tochar{#1}}
+ \doifelseutfmathfiller\mbasetoken \moverbasefiller \moveraccentchecker {#1}{#2}
+ }
+\stoptexdefinition
+
+% munder
+
+\starttexdefinition unexpanded munderbelow #1
+ \edef\mundercommand{\utfmathfiller\mundertoken}
+ \mexecuteifdefined\mundercommand {#1} \relax
+\stoptexdefinition
+\starttexdefinition unexpanded munderbase #1
+ \edef\mbasecommand{\utfmathfiller\mbasetoken}
+ \mexecuteifdefined\mbasecommand {#1}
+ \relax
+\stoptexdefinition
+\starttexdefinition unexpanded munderbasefiller #1#2
+ \edef\mbasecommand{e\utfmathcommandfiller\mbasetoken}
+ \mexecuteifdefined\mbasecommand \relax {#2} {}
+\stoptexdefinition
+\starttexdefinition unexpanded munderaccent #1#2
+ \edef\mundercommand{\utfmathcommandbelow\mundertoken}
+ \mexecuteifdefined\mundercommand \relax {#1}
+\stoptexdefinition
+\starttexdefinition unexpanded mundertext #1#2
+ % \mathtriplet {\mathstylehbox{#1}} {} {#2}
+ \mathtriplet {\mathematics{#1}} {} {#2}
+\stoptexdefinition
+\starttexdefinition unexpanded munderaccentchecker #1#2
+ \edef\mundertoken{\tochar{#2}}
+ \doifelseutfmathbelow\mundertoken \munderaccent \mundertext {#1}{#2}
+\stoptexdefinition
+
+\starttexdefinition unexpanded munder #1#2
+ \mathematics {
+ \edef\mbasetoken{\tochar{#1}}
+ \doifelseutfmathfiller\mbasetoken \munderbasefiller \munderaccentchecker {#1}{#2}
+ }
+\stoptexdefinition
+
+% munderover
+
+% mfenced
+
+% \mfenced{x,y}
+% \mfenced{{x}{y}}
+% \mfenced[separators]{{x}{y}}
+% \mfenced[left][right]{{x}{y}}
+% \mfenced[left][separators][right]{{x}{y}}
+
+\starttexdefinition unexpanded mfenced
+ \dotripleempty\do_mfenced
+\stoptexdefinition
+
+\starttexdefinition unexpanded do_mfenced [#1][#2][#3]#4
+ \mathematics {
+ \ctxcommand{xmfenced(
+ \ifthirdargument "#1","#2","#3"\else
+ \ifsecondargument "#1",",","#2"\else
+ \iffirstargument "(","#1",")"\else
+ "(",",",")"\fi\fi\fi
+ ,"#4")}
+ }
+\stoptexdefinition
+
+% mfrac
+
+\starttexdefinition unexpanded mfrac #1#2
+ \mathematics {
+ \frac{#1}{#2}
+ }
+\stoptexdefinition
+
+% mroot msqrt
+
+\starttexdefinition unexpanded mroot #1#2
+ \mathematics {
+ \sqrt[#1]{#2}
+ }
+\stoptexdefinition
+
+\starttexdefinition unexpanded msqrt #1
+ \mathematics {
+ \sqrt{#1}
+ }
+\stoptexdefinition
+
+% menclose
+
+% merror
+
+% mglyph
+
+% mmultiscripts
+
+% mpadded
+
+% mphantom
+
+% mspace
+
+% mstyle
+
+% mtable mtr mlabeledtr mtd
+
+% maction
+
+% semantics
+
+\protect
+
+\continueifinputfile{x-mathml-basics.mkiv}
+
+\starttext
+
+$\mfenced{1+a}$\par
+$\mfenced[,]{1+a}$\par
+$\mfenced[,]{{1+a}{1+b}}$\par
+
+% $\mover{←}{test}$\par
+% $\mover{\utfchar{"2190}}{test}$\par
+% $\mover{e:leftarrow}{test}$\par
+% $\mover{x:2190}{test}$\par
+
+% $\mover{test}{⏞}$\par
+% $\mover{test}{\utfchar{"23DE}}$\par
+% $\mover{test}{e:overbrace}$\par
+% $\mover{test}{x:23DE}$\par
+% $\mover{test}{over}$\par
+
+% \mover{test}{⏞}\par
+% \mover{test}{\utfchar{"23DE}}\par
+% \mover{test}{e:overbrace}\par
+% \mover{test}{x:23DE}\par
+
+% $\munder{←}{test}$\par
+% $\munder{\utfchar{"2190}}{test}$\par
+% $\munder{e:leftarrow}{test}$\par
+% $\munder{x:2190}{test}$\par
+
+% $\munder{test}{⏟}$\par
+% $\munder{test}{\utfchar{"23DF}}$\par
+% $\munder{test}{e:underbrace}$\par
+% $\munder{test}{x:23DF}$\par
+% $\munder{test}{under}$\par
+
+% \math{{\msup{x}{2}\mo{+}\mn{2}\mi{x}\mo{+}\mi{b}}}
+
+% \mrow{\msup{x}{2}\mo{+}\mn{2}\mi{x}\mo{+}\mi{b}}
+
+\stoptext
diff --git a/tex/context/base/x-mathml-html.mkiv b/tex/context/base/x-mathml-html.mkiv
new file mode 100644
index 000000000..2ac7d3cba
--- /dev/null
+++ b/tex/context/base/x-mathml-html.mkiv
@@ -0,0 +1,40 @@
+%D \modul
+%D [ file=x-mathml,
+%D version=2014.05.18,
+%D title=\CONTEXT\ XML Modules,
+%D subtitle=\MATHML\ embedded HTML,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% maybe some more
+
+\startmodule [mathml-html]
+
+\startxmlsetups mml:html:b
+ \bold{\xmlflush{#1}}
+\stopxmlsetups
+
+\startxmlsetups mml:html:i
+ \italic{\xmlflush{#1}}
+\stopxmlsetups
+
+\startxmlsetups mml:html:tt
+ \mono{\xmlflush{#1}}
+\stopxmlsetups
+
+\startxmlsetups mml:html:em
+ \emphasized{\xmlflush{#1}}
+\stopxmlsetups
+
+\startxmlsetups mml:html
+ \xmlsetsetup{#1}{mml:b|mml:i|mml:tt|mml:em}{mml:html:*}
+\stopxmlsetups
+
+\xmlregistersetup{mml:html}
+
+\stopmodule
diff --git a/tex/context/base/x-mathml.lua b/tex/context/base/x-mathml.lua
index cd60e756d..a0db339bc 100644
--- a/tex/context/base/x-mathml.lua
+++ b/tex/context/base/x-mathml.lua
@@ -6,20 +6,56 @@ if not modules then modules = { } end modules ['x-mathml'] = {
license = "see context related readme files"
}
--- This needs an upgrade to the latest greatest mechanisms.
+-- This needs an upgrade to the latest greatest mechanisms. But ... it
+-- probably doesn't pay back as no mathml support ever did.
local type, next = type, next
-local format, lower, find, gsub = string.format, string.lower, string.find, string.gsub
+local formatters, lower, find, gsub, match = string.formatters, string.lower, string.find, string.gsub, string.match
local strip = string.strip
-local xmlsprint, xmlcprint, xmltext, xmlcontent = xml.sprint, xml.cprint, xml.text, xml.content
+local xmlsprint, xmlcprint, xmltext, xmlcontent, xmlempty = xml.sprint, xml.cprint, xml.text, xml.content, xml.empty
+local lxmlcollected, lxmlfilter = lxml.collected, lxml.filter
local getid = lxml.getid
-local utfchar, utfcharacters, utfvalues = utf.char, utf.characters, utf.values
-local lpegmatch = lpeg.match
+local utfchar, utfcharacters, utfvalues, utfsplit, utflen = utf.char, utf.characters, utf.values, utf.split, utf.len
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local P, Cs = lpeg.P, lpeg.Cs
local mathml = { }
moduledata.mathml = mathml
lxml.mathml = mathml -- for the moment
+local context = context
+
+local ctx_enabledelimiter = context.enabledelimiter
+local ctx_disabledelimiter = context.disabledelimiter
+local ctx_xmlflush = context.xmlflush -- better xmlsprint
+
+local ctx_halign = context.halign
+local ctx_noalign = context.noalign
+local ctx_bgroup = context.bgroup
+local ctx_egroup = context.egroup
+local ctx_crcr = context.crcr
+
+local ctx_bTABLE = context.bTABLE
+local ctx_eTABLE = context.eTABLE
+local ctx_bTR = context.bTR
+local ctx_eTR = context.eTR
+local ctx_bTD = context.bTD
+local ctx_eTD = context.eTD
+
+local ctx_mn = context.mn
+local ctx_mi = context.mi
+local ctx_mo = context.mo
+local ctx_startimath = context.startimath
+local ctx_ignorespaces = context.ignorespaces
+local ctx_removeunwantedspaces = context.removeunwantedspaces
+local ctx_stopimath = context.stopimath
+
+local ctx_mmlapplycsymbol = context.mmlapplycsymbol
+
+local ctx_mathopnolimits = context.mathopnolimits
+local ctx_left = context.left
+local ctx_right = context.right
+
-- an alternative is to remap to private codes, where we can have
-- different properties .. to be done; this will move and become
-- generic; we can then make the private ones active in math mode
@@ -62,6 +98,7 @@ local o_replacements = { -- in main table
["{"] = "\\mmlleftdelimiter \\lbrace",
["}"] = "\\mmlrightdelimiter\\rbrace",
["|"] = "\\mmlleftorrightdelimiter\\vert",
+ -- ["."] = "\\mmlleftorrightdelimiter.",
["/"] = "\\mmlleftorrightdelimiter\\solidus",
[doublebar] = "\\mmlleftorrightdelimiter\\Vert",
["("] = "\\mmlleftdelimiter(",
@@ -82,8 +119,9 @@ local o_replacements = { -- in main table
-- [utfchar(0xF103C)] = "\\mmlleftdelimiter<",
[utfchar(0xF1026)] = "\\mmlchar{38}",
+ [utfchar(0x02061)] = "", -- function applicator sometimes shows up in font
-- [utfchar(0xF103E)] = "\\mmlleftdelimiter>",
-
+ -- [utfchar(0x000AF)] = '\\mmlchar{"203E}', -- 0x203E
}
local simpleoperatorremapper = utf.remapper(o_replacements)
@@ -466,20 +504,39 @@ function mathml.stripped(str)
context(strip(str))
end
+local p_entity = (P("&") * ((1-P(";"))^0) * P(";"))
+local p_utfchar = lpegpatterns.utf8character
+local p_spacing = lpegpatterns.whitespace^1
+
+local p_mn = Cs((p_entity/"" + p_spacing/utfchar(0x205F) + p_utfchar/n_replacements)^0)
+local p_strip = Cs((p_entity/"" + p_utfchar )^0)
+local p_mi = Cs((p_entity/"" + p_utfchar/i_replacements)^0)
+
+-- function mathml.mn(id,pattern)
+-- -- maybe at some point we need to interpret the number, but
+-- -- currently we assume an upright font
+-- local str = xmlcontent(getid(id)) or ""
+-- local rep = gsub(str,"&.-;","")
+-- local rep = gsub(rep,"(%s+)",utfchar(0x205F)) -- medspace e.g.: twenty one (nbsp is not seen)
+-- local rep = gsub(rep,".",n_replacements)
+-- ctx_mn(rep)
+-- end
+
function mathml.mn(id,pattern)
-- maybe at some point we need to interpret the number, but
-- currently we assume an upright font
- local str = xmlcontent(getid(id)) or ""
- local rep = gsub(str,"&.-;","")
- local rep = gsub(rep,"(%s+)",utfchar(0x205F)) -- medspace e.g.: twenty one (nbsp is not seen)
- local rep = gsub(rep,".",n_replacements)
- context.mn(rep)
+ ctx_mn(lpegmatch(p_mn,xmlcontent(getid(id)) or ""))
end
+-- function mathml.mo(id)
+-- local str = xmlcontent(getid(id)) or ""
+-- local rep = gsub(str,"&.-;","") -- todo
+-- context(simpleoperatorremapper(rep) or rep)
+-- end
+
function mathml.mo(id)
- local str = xmlcontent(getid(id)) or ""
- local rep = gsub(str,"&.-;","") -- todo
- context(simpleoperatorremapper(rep))
+ local str = lpegmatch(p_strip,xmlcontent(getid(id)) or "")
+ context(simpleoperatorremapper(str) or str)
end
function mathml.mi(id)
@@ -491,34 +548,45 @@ function mathml.mi(id)
if n == 0 then
-- nothing to do
elseif n == 1 then
- local str = gsub(str[1],"&.-;","") -- bah
- local rep = i_replacements[str]
- if not rep then
- rep = gsub(str,".",i_replacements)
+ local first = str[1]
+ if type(first) == "string" then
+ -- local str = gsub(first,"&.-;","") -- bah
+ -- local rep = i_replacements[str]
+ -- if not rep then
+ -- rep = gsub(str,".",i_replacements)
+ -- end
+ local str = lpegmatch(p_strip,first)
+ local rep = i_replacements[str] or lpegmatch(p_mi,str)
+ context(rep)
+ -- ctx_mi(rep)
+ else
+ ctx_xmlflush(id) -- xmlsprint or so
end
- context(rep)
- -- context.mi(rep)
else
- context.xmlflush(id) -- xmlsprint or so
+ ctx_xmlflush(id) -- xmlsprint or so
end
else
- context.xmlflush(id) -- xmlsprint or so
+ ctx_xmlflush(id) -- xmlsprint or so
end
end
function mathml.mfenced(id) -- multiple separators
id = getid(id)
- local left, right, separators = id.at.open or "(", id.at.close or ")", id.at.separators or ","
- local l, r = l_replacements[left], r_replacements[right]
- context.enabledelimiter()
+ local at = id.at
+ local left = at.open or "("
+ local right = at.close or ")"
+ local separators = at.separators or ","
+ local l = l_replacements[left]
+ local r = r_replacements[right]
+ ctx_enabledelimiter()
if l then
context(l_replacements[left] or o_replacements[left] or "")
else
context(o_replacements["@l"])
context(left)
end
- context.disabledelimiter()
- local collected = lxml.filter(id,"/*") -- check the *
+ ctx_disabledelimiter()
+ local collected = lxmlfilter(id,"/*") -- check the *
if collected then
local n = #collected
if n == 0 then
@@ -526,7 +594,7 @@ function mathml.mfenced(id) -- multiple separators
elseif n == 1 then
xmlsprint(collected[1]) -- to be checked
else
- local t = utf.split(separators,true)
+ local t = utfsplit(separators,true)
for i=1,n do
xmlsprint(collected[i]) -- to be checked
if i < n then
@@ -545,35 +613,16 @@ function mathml.mfenced(id) -- multiple separators
end
end
end
- context.enabledelimiter()
+ ctx_enabledelimiter()
if r then
context(r_replacements[right] or o_replacements[right] or "")
else
context(right)
context(o_replacements["@r"])
end
- context.disabledelimiter()
+ ctx_disabledelimiter()
end
---~ local function flush(e,tag,toggle)
---~ if toggle then
---~ context("^{")
---~ else
---~ context("_{")
---~ end
---~ if tag == "none" then
---~ context("{}")
---~ else
---~ xmlsprint(e.dt)
---~ end
---~ if not toggle then
---~ context("}")
---~ else
---~ context("}{}")
---~ end
---~ return not toggle
---~ end
-
local function flush(e,tag,toggle)
if tag == "none" then
-- if not toggle then
@@ -593,7 +642,7 @@ end
function mathml.mmultiscripts(id)
local done, toggle = false, false
- for e in lxml.collected(id,"/*") do
+ for e in lxmlcollected(id,"/*") do
local tag = e.tg
if tag == "mprescripts" then
context("{}")
@@ -603,14 +652,14 @@ function mathml.mmultiscripts(id)
end
end
local done, toggle = false, false
- for e in lxml.collected(id,"/*") do
+ for e in lxmlcollected(id,"/*") do
local tag = e.tg
if tag == "mprescripts" then
break
elseif done then
toggle = flush(e,tag,toggle)
else
- xmlsprint(e.dt)
+ xmlsprint(e)
done = true
end
end
@@ -645,12 +694,12 @@ function mathml.mcolumn(root)
local tag = e.tg
if tag == "mi" or tag == "mn" or tag == "mo" or tag == "mtext" then
local str = xmltext(e)
- str = gsub(str,"&.-;","")
+ str = lpegmatch(p_strip,str)
for s in utfcharacters(str) do
m[#m+1] = { tag, s }
end
if tag == "mn" then
- local n = utf.len(str)
+ local n = utflen(str)
if n > numbers then
numbers = n
end
@@ -664,20 +713,20 @@ function mathml.mcolumn(root)
-- m[#m+1] = { tag, e }
end
end
- for e in lxml.collected(root,"/*") do
+ for e in lxmlcollected(root,"/*") do
local m = { }
matrix[#matrix+1] = m
if e.tg == "mrow" then
-- only one level
- for e in lxml.collected(e,"/*") do
+ for e in lxmlcollected(e,"/*") do
collect(m,e)
end
else
collect(m,e)
end
end
- context.halign()
- context.bgroup()
+ ctx_halign()
+ ctx_bgroup()
context([[\hss\startimath\alignmark\stopimath\aligntab\startimath\alignmark\stopimath\cr]])
for i=1,#matrix do
local m = matrix[i]
@@ -689,7 +738,7 @@ function mathml.mcolumn(root)
end
end
if mline then
- context.noalign([[\obeydepth\nointerlineskip]])
+ ctx_noalign([[\obeydepth\nointerlineskip]])
end
for j=1,#m do
local mm = m[j]
@@ -732,9 +781,9 @@ function mathml.mcolumn(root)
local nchr = n_replacements[chr]
context(nchr or chr)
end
- context.crcr()
+ ctx_crcr()
end
- context.egroup()
+ ctx_egroup()
end
local spacesplitter = lpeg.tsplitat(" ")
@@ -752,42 +801,47 @@ function mathml.mtable(root)
local framespacing = at.framespacing or "0pt"
local framespacing = at.framespacing or "-\\ruledlinewidth" -- make this an option
- context.bTABLE { frame = frametypes[frame or "none"] or "off", offset = framespacing }
- for e in lxml.collected(root,"/(mml:mtr|mml:mlabeledtr)") do
- context.bTR()
+ ctx_bTABLE { frame = frametypes[frame or "none"] or "off", offset = framespacing, background = "" } -- todo: use xtables and definextable
+ for e in lxmlcollected(root,"/(mml:mtr|mml:mlabeledtr)") do
+ ctx_bTR()
local at = e.at
local col = 0
local rfr = at.frame or (frames and frames [#frames])
local rra = at.rowalign or (rowaligns and rowaligns [#rowaligns])
local rca = at.columnalign or (columnaligns and columnaligns[#columnaligns])
local ignorelabel = e.tg == "mlabeledtr"
- for e in lxml.collected(e,"/mml:mtd") do -- nested we can use xml.collected
+ for e in lxmlcollected(e,"/mml:mtd") do -- nested we can use xml.collected
col = col + 1
if ignorelabel and col == 1 then
-- get rid of label, should happen at the document level
else
local at = e.at
- local rowspan, columnspan = at.rowspan or 1, at.columnspan or 1
+ local rowspan = at.rowspan or 1
+ local columnspan = at.columnspan or 1
local cra = rowalignments [at.rowalign or (rowaligns and rowaligns [col]) or rra or "center"] or "lohi"
local cca = columnalignments[at.columnalign or (columnaligns and columnaligns[col]) or rca or "center"] or "middle"
local cfr = frametypes [at.frame or (frames and frames [col]) or rfr or "none" ] or "off"
- context.bTD { align = format("{%s,%s}",cra,cca), frame = cfr, nx = columnspan, ny = rowspan }
- context.startimath()
- context.ignorespaces()
- xmlcprint(e)
- context.stopimath()
- context.removeunwantedspaces()
- context.eTD()
+ ctx_bTD { align = formatters["{%s,%s}"](cra,cca), frame = cfr, nx = columnspan, ny = rowspan }
+ if xmlempty(e,".") then
+ -- nothing, else hsize max
+ else
+ ctx_startimath()
+ -- ctx_ignorespaces()
+ xmlcprint(e)
+ -- ctx_removeunwantedspaces()
+ ctx_stopimath()
+ end
+ ctx_eTD()
end
end
-- if e.tg == "mlabeledtr" then
- -- context.bTD()
+ -- ctx_bTD()
-- xmlcprint(xml.first(e,"/!mml:mtd"))
- -- context.eTD()
+ -- ctx_eTD()
-- end
- context.eTR()
+ ctx_eTR()
end
- context.eTABLE()
+ ctx_eTABLE()
end
function mathml.csymbol(root)
@@ -798,14 +852,16 @@ function mathml.csymbol(root)
local full = hash.original or ""
local base = hash.path or ""
local text = strip(xmltext(root) or "")
- context.mmlapplycsymbol(full,base,encoding,text)
+ ctx_mmlapplycsymbol(full,base,encoding,text)
end
+local p = lpeg.Cs(((1-lpegpatterns.whitespace)^1 / "mml:enclose:%0" + (lpegpatterns.whitespace^1)/",")^1)
+
function mathml.menclosepattern(root)
root = getid(root)
local a = root.at.notation
if a and a ~= "" then
- context("mml:enclose:",(gsub(a," +",",mml:enclose:")))
+ context(lpegmatch(p,a))
end
end
@@ -816,8 +872,8 @@ end
function mathml.cpolar_a(root)
root = getid(root)
local dt = root.dt
- context.mathopnolimits("Polar")
- context.left(false,"(")
+ ctx_mathopnolimits("Polar")
+ ctx_left(false,"(")
for k=1,#dt do
local dk = dt[k]
if xml.is_element(dk,"sep") then
@@ -826,5 +882,15 @@ function mathml.cpolar_a(root)
xmlsprint(dk)
end
end
- context.right(false,")")
+ ctx_right(false,")")
+end
+
+-- crap .. maybe in char-def a mathml overload
+
+local mathmleq = {
+ [utfchar(0x00AF)] = utfchar(0x203E),
+}
+
+function mathml.extensible(chr)
+ context(mathmleq[chr] or chr)
end
diff --git a/tex/context/base/x-mathml.mkiv b/tex/context/base/x-mathml.mkiv
index ec8fd74e4..65a7223ce 100644
--- a/tex/context/base/x-mathml.mkiv
+++ b/tex/context/base/x-mathml.mkiv
@@ -1,4 +1,4 @@
-%D \modul
+%D \module
%D [ file=x-mathml,
%D version=2008.05.29,
%D title=\CONTEXT\ XML Modules,
@@ -16,12 +16,17 @@
% This module is under construction and will be cleaned up. We use a funny mix of
% xml, tex and lua. I could rewrite the lot but it also shows how context evolves.
%
-% I might en dup with a lua-only implementation some day.
+% I might end up with a lua-only implementation some day. I must find a good reason
+% to spend time on it. In fact, it might even be more messy.
%
% no m:text strip (needs checking, maybe nbsp is mandate
%
% todo: more will be moved to lua (less hassle)
% todo: move left/right to the lua end
+%
+% this implememation looks like a hack ... this is because we deal with all weird cases we
+% ran into, including abuse that was supposed to render ok (even if it didn't in other
+% renderers) .. it was simply expected to work that way.
\writestatus{loading}{ConTeXt XML Macros / MathML Renderer}
@@ -51,10 +56,10 @@
\xmlregistersetup{xml:mml:define}
-\unexpanded\def\MMLhack
- {\let\MMLpar\par
- \let\par\relax
- \everyvbox{\let\par\MMLpar}}
+% \unexpanded\def\MMLhack % no longer needed
+% {\let\MMLpar\par
+% \let\par\relax
+% \everyvbox{\let\par\MMLpar}}
\xmlmapvalue {mml:math:mode} {display} {\displaymathematics} % we had this already
\xmlmapvalue {mml:math:mode} {inline} {\inlinemathematics }
@@ -75,7 +80,7 @@
}
{
\math_fences_checked_start
- \MMLhack
+ %\MMLhack
\xmlflush{#1}
\math_fences_checked_stop
}
@@ -85,7 +90,7 @@
\startxmlsetups mml:imath
\inlinemathematics {
\math_fences_checked_start
- \MMLhack
+ %\MMLhack
\xmlflush{#1}
\math_fences_checked_stop
}
@@ -94,7 +99,7 @@
\startxmlsetups mml:dmath
\displaymathematics {
\math_fences_checked_start
- \MMLhack
+ %\MMLhack
\xmlflush{#1}
\math_fences_checked_stop
}
@@ -106,7 +111,10 @@
\edef\mmlformulalabel {\xmlatt{#1}{label}\xmlatt{#1}{id}}
\edef\mmlformulasublabel{\xmlatt{#1}{sublabel}\xmlatt{#1}{id}}
\doifsomething\mmlformulalabel{\placeformula[\mmlformulalabel]{\mmlformulasublabel}}
- \startformula\MMLhack\xmlfirst{#1}{/mml:math}\stopformula
+ \startformula
+ %\MMLhack
+ \xmlfirst{#1}{/mml:math}
+ \stopformula
\stopxmlsetups
% old delimiter hacks
@@ -164,11 +172,6 @@
%D filter. There is an intermediate cleaner module but it has
%D some namespace limitations. Here we do it the \MKIV\ way.
-\def\widevec#1%
- {\vbox{\mathsurround\zeropoint\ialign{##\crcr
- \rightarrowfill\crcr\noalign{\nointerlineskip}%
- \startimath\hfil\displaystyle{#1}\hfil\stopimath\crcr}}}
-
%D The rendering macros:
\def\MMLrm{\mr}
@@ -209,7 +212,9 @@
%D We start with the parent elements and the option handler.
-\def\xmlmathmldirective#1{\dosetvalue{MML#1}}
+\unexpanded\def\xmlmathmldirective#1{\dosetvalue{MML#1}}
+
+\xmlinstalldirective{mathml}{xmlmathmldirective}
%def\xmlmathmldirective#1#2#3{[#1][#2][#3]\dosetvalue{MML#1}{#2}{#3}}
@@ -378,7 +383,7 @@
% \MMLdoR
% } {
\edef\mmlapplyaction{\xmlfilter{#1}{/*/name()}}
- \doifsetupselse {mml:apply:mml:\mmlapplyaction} {
+ \doifelsesetups {mml:apply:mml:\mmlapplyaction} {
\xmlsetup{#1}{mml:apply:mml:\mmlapplyaction}
} {
% \MMLdoL
@@ -399,7 +404,7 @@
\startxmlsetups mml:apply:mml:fn
\xmldoifelse {#1} {/mml:fn/mml:ci} {
\edef\mmlfnci{\xmlstripped{#1}{/mml:fn/mml:ci}}% was xmlcontent
- \doifsetupselse{mmc:fn:\mmlfnci} { % was mmc:fn:...
+ \doifelsesetups{mmc:fn:\mmlfnci} { % was mmc:fn:...
\xmlsetup{#1}{mmc:fn:\mmlfnci} % \MMLdoL/MMLdoR to be handled in plugin
} {
\MMLcreset
@@ -466,7 +471,7 @@
\fi
\xmldoifelse {#1} {/mml:ci} { % first
\edef\mmlfnci{\xmltext{#1}{/mml:ci}}% was xmlcontent
- \doifsetupselse{mmc:fn:\mmlfnci} { % was mmc:fn:...
+ \doifelsesetups{mmc:fn:\mmlfnci} { % was mmc:fn:...
\xmlsetup{#1}{mmc:fn:\mmlfnci} % \MMLdoL/MMLdoR to be handled in plugin
} {
\MMLcreset
@@ -497,7 +502,7 @@
\startxmlsetups mmc:fn:apply % where used?
\xmldoifelse {#1} {/mml:ci} { % first
\edef\mmlfnci{\xmltext{#1}{/mml:ci}}% was xmlcontent
- \doifsetupselse{mmc:fn:\mmlfnci} { % was mmc:fn:...
+ \doifelsesetups{mmc:fn:\mmlfnci} { % was mmc:fn:...
\xmlsetup{#1}{mmc:fn:\mmlfnci} % \MMLdoL/MMLdoR to be handled in plugin
} {
\MMLcreset
@@ -524,16 +529,15 @@
\starttexdefinition mmlapplycsymbol #1#2#3#4
% #1=full url, #2=name, #3=encoding, #4=text
\doifelse {#3} {text} {
-% {\mr #4}
\text{#4}
} {
- \doifsetupselse {mml:csymbol:#1} {
+ \doifelsesetups {mml:csymbol:#1} {
% full url
- \directsetup{mml:csymbol:#1}
+ \fastsetup{mml:csymbol:#1}
} {
% somename (fallback)
- \doifsetupselse {mml:csymbol:#2} {
- \directsetup{mml:csymbol:#2}
+ \doifelsesetups {mml:csymbol:#2} {
+ \fastsetup{mml:csymbol:#2}
} {
\xmlval{mmc:cs}{#3}{}% todo
}
@@ -580,7 +584,7 @@
\stopxmlsetups
\startxmlsetups mml:ci:vector
- \widevec{\xmlflush{#1}}
+ \overrightarrow{\xmlflush{#1}}
\stopxmlsetups
\startxmlsetups mml:ci:matrix
@@ -720,7 +724,7 @@
\startxmlsetups mml:cn:rational
\xmldoifelse {#1} {/mml:sep} {
- \frac
+ \mmlfrac
{\xmlsnippet{#1}{1}}
{\xmlsnippet{#1}{3}}
} {
@@ -905,7 +909,7 @@
\mmlsecond{#1}/\mmlthird{#1}
\else
\MMLcreset
- \frac{\MMLcreset\mmlsecond{#1}}{\MMLcreset\mmlthird{#1}}
+ \mmlfrac{\MMLcreset\mmlsecond{#1}}{\MMLcreset\mmlthird{#1}}
\fi
}
\advance\mmldividelevel\minusone
@@ -1051,7 +1055,7 @@
\let\MMLtimes@@symbol\MMLtimessymbol
} {
\xmldoifelse {#1} {/mml:cn[name(1) == 'mml:cn']} {% name(1) is next one
- \doifinsetelse\MMLtimessymbol{\v!yes,\v!no} {
+ \doifelseinset\MMLtimessymbol{\v!yes,\v!no} {
\let\MMLtimes@@symbol\v!yes
} {
\let\MMLtimes@@symbol\MMLtimessymbol
@@ -1303,7 +1307,7 @@
\doifelse \MMLdiffalternative \v!a {
\xmldoifelse {#1} {/mml:lambda} {
% a special case (mathadore/openmath)
- \frac {
+ \mmlfrac {
d
\normalsuperscript
{\xmlfirst{#1}{/mml:bvar}\xmlfirst{#1}{/mml:cn}}
@@ -1316,7 +1320,7 @@
}
} {
\xmldoifelse {#1} {/mml:bvar} {
- \frac {
+ \mmlfrac {
{\mr d}{
\xmldoifelse {#1} {/mml:degree} {
\normalsuperscript{\xmlconcat{#1}{/mml:degree}\empty}
@@ -1331,13 +1335,11 @@
\xmlfirst{#1}{/mml:ci}
} {
\MMLcreset
-\ifnum\xmlcount{#1}{/mml:apply/*}>\plustwo % hack
- \left(
- \xmlfirst{#1}{/mml:apply}
- \right)
-\else
- \xmlfirst{#1}{/mml:apply}
-\fi
+ \ifnum\xmlcount{#1}{/mml:apply/*}>\plustwo % hack
+ \left(\xmlfirst{#1}{/mml:apply}\right)
+ \else
+ \xmlfirst{#1}{/mml:apply}
+ \fi
}
}
} {
@@ -1387,7 +1389,7 @@
\xmlfirst{#1}{/(mml:apply\string|mml:reln\string|mml:ci\string|mml:cn)}
} {
\xmldoifelse {#1} {/mml:bvar} {
- \frac {
+ \mmlfrac {
{\mr d}\normalsuperscript{
\xmldoifelse {#1} {/mml:degree} {
\xmlconcat{#1}{/mml:degree}\empty
@@ -1709,7 +1711,7 @@
\stopxmlsetups
\startxmlsetups mml:annotation
- \xmldoifelse {#1} {.[oneof(@encoding,'TeX','tex','TEX','ConTeXt','context','CONTEXT','ctx')]} {
+ \xmldoifelse {#1} {.[oneof(@encoding,'TeX','tex','application/x-tex','TEX','ConTeXt','context','CONTEXT','ctx')]} {
\xmlflushcontext{#1}
} {
\xmldoifelse {#1} {.[oneof(@encoding,'calcmath','cm')]} {
@@ -1745,7 +1747,7 @@
\startxmlsetups mml:notanumber \mathopnolimits{NaN} \stopxmlsetups
\startxmlsetups mml:true \mathopnolimits{true} \stopxmlsetups
\startxmlsetups mml:false \mathopnolimits{false} \stopxmlsetups
-\startxmlsetups mml:emptyset \mathopnolimits{\O} \stopxmlsetups
+\startxmlsetups mml:emptyset \mathopnolimits{Ø} \stopxmlsetups
\startxmlsetups mml:pi \pi \stopxmlsetups
\startxmlsetups mml:eulergamma \gamma \stopxmlsetups
\startxmlsetups mml:infinity \infty \stopxmlsetups
@@ -1824,25 +1826,33 @@
% helpers: maybe we can need a setting for the uprights
-\xmlmapvalue {mml} {normal} {\mathupright} % {\mathtf}
-\xmlmapvalue {mml} {double-struck} {\mathblackboard}
-\xmlmapvalue {mml} {italic} {\mathit}
-\xmlmapvalue {mml} {fraktur} {\mathfraktur}
-\xmlmapvalue {mml} {script} {\mathscript}
-\xmlmapvalue {mml} {bold} {\mb} % {\mathbf}
-\xmlmapvalue {mml} {bold-italic} {\mathbi}
-\xmlmapvalue {mml} {bold-fraktur} {\mathfraktur\mathbf}
-\xmlmapvalue {mml} {bold-script} {\mathscript\mathbf}
-\xmlmapvalue {mml} {sans-serif} {\mathss}
-\xmlmapvalue {mml} {bold-sans-serif} {\mathss\mathbf}
-\xmlmapvalue {mml} {sans-serif-italic} {\mathss\mathit}
-\xmlmapvalue {mml} {sans-serif-bold-italic} {\mathss\mathbi}
-\xmlmapvalue {mml} {monospace} {\mathtt}
+\xmlmapvalue {mml:s} {normal} {\mathupright} % {\mathtf}
+\xmlmapvalue {mml:s} {double-struck} {\mathblackboard}
+\xmlmapvalue {mml:s} {italic} {\mathit}
+\xmlmapvalue {mml:s} {fraktur} {\mathfraktur}
+\xmlmapvalue {mml:s} {script} {\mathscript}
+\xmlmapvalue {mml:s} {bold} {\mb} % {\mathbf}
+\xmlmapvalue {mml:s} {bold-italic} {\mathbi}
+\xmlmapvalue {mml:s} {bold-fraktur} {\mathfraktur\mathbf}
+\xmlmapvalue {mml:s} {bold-script} {\mathscript\mathbf}
+\xmlmapvalue {mml:s} {sans-serif} {\mathss}
+\xmlmapvalue {mml:s} {bold-sans-serif} {\mathss\mathbf}
+\xmlmapvalue {mml:s} {sans-serif-italic} {\mathss\mathit}
+\xmlmapvalue {mml:s} {sans-serif-bold-italic} {\mathss\mathbi}
+\xmlmapvalue {mml:s} {monospace} {\mathtt}
+
+\xmlmapvalue {mml:l} {-} {\let\mmlfrac\tfrac}
+ \let\mmlfrac\frac
+\xmlmapvalue {mml:l} {+} {\let\mmlfrac\sfrac}
% todo: displaystyle=true/false (or whatever else shows up)
\starttexdefinition setmmlmathstyle #1
- \xmlval {mml} {\xmlatt{#1}{mathvariant}} \empty % was: \mmmr
+ \xmlval{mml:s}{\xmlatt{#1}{mathvariant}}\empty % was: \mmmr
+\stoptexdefinition
+
+\starttexdefinition setmmlscriptlevel #1
+ \xmlval{mml:l}{\xmlatt{#1}{scriptlevel}}{\let\mmlfrac\frac}
\stoptexdefinition
\starttexdefinition applymmlmathcolor #1#2
@@ -1899,8 +1909,11 @@
\startxmlsetups mml:mi % todo: mathsize (unlikely) mathcolor (easy) mathbackground (easy)
\begingroup
+ \pushmathstyle
\setmmlmathstyle{#1}
+ \setmmlscriptlevel{#1}
\ctxmodulemathml{mi("#1")}
+ \popmathstyle
\endgroup
\stopxmlsetups
@@ -2000,7 +2013,7 @@
\overline{\left)\strut\xmlflush{#1}\right.}
} {
\doifelse \mmlmenclosenotation {mml:enclose:actuarial} {
- \overline{\left.\strut\xmlflush{#1}\right|}
+ \overline{\left.\strut\xmlflush{#1}\right\vert}
} {
\doifelse \mmlmenclosenotation {mml:enclose:radical} {
\sqrt{\xmlflush{#1}}
@@ -2010,13 +2023,13 @@
\framed
[frame=off,strut=no,background={\mmlmenclosenotation}] % offset is kind of undefined
{\startimath
- \expanded{\doifinsetelse {mml:enclose:longdiv} {\mmlmenclosenotation}} {
+ \expanded{\doifelseinset {mml:enclose:longdiv} {\mmlmenclosenotation}} {
\overline{\left)\strut\xmlflush{#1}\right.}
} {
- \expanded{\doifinsetelse {mml:enclose:actuarial} {\mmlmenclosenotation}} {
- \overline{\left.\strut\xmlflush{#1}\right|}
+ \expanded{\doifelseinset {mml:enclose:actuarial} {\mmlmenclosenotation}} {
+ \overline{\left.\strut\xmlflush{#1}\right\vert}
} {
- \expanded{\doifinsetelse {mml:enclose:radical} {\mmlmenclosenotation}} {
+ \expanded{\doifelseinset {mml:enclose:radical} {\mmlmenclosenotation}} {
\sqrt{\xmlflush{#1}}
} {
\xmlflush{#1}
@@ -2043,7 +2056,7 @@
\doifelse{\xmlatt{#1}{bevelled}}{true} {
\left.\mmlfirst{#1}\middle/\mmlsecond{#1}\right.% \thinspace\middle/\thinspace
} {
- \frac{\mmlfirst{#1}}{\mmlsecond{#1}}
+ \mmlfrac{\mmlfirst{#1}}{\mmlsecond{#1}}
}
\else
\doifelse {\xmlval{mml:mfrac:linethickness}{\mmlfraclinethickness}{}} {} {
@@ -2074,8 +2087,11 @@
\startxmlsetups mml:mstyle
\begingroup
+ \pushmathstyle
\setmmlmathstyle{#1}
+ \setmmlscriptlevel{#1}
\xmlflush{#1}
+ \popmathstyle
\endgroup
\stopxmlsetups
@@ -2230,6 +2246,8 @@
}
\stopxmlsetups
+% helpers
+
\unexpanded\def\mmlexecuteifdefined#1%
{\ifx#1\empty
\expandafter\secondoftwoarguments
@@ -2240,135 +2258,166 @@
\fi\fi
{\csname#1\endcsname}}
-% todo: combine topaccent/over/bottomaccent/under check
-
-\definemathextensible [\v!mathematics] [mml:overleftarrow] ["2190] % ["27F5]
-\definemathextensible [\v!mathematics] [mml:overrightarrow] ["2192] % ["27F6]
-\definemathextensible [\v!mathematics] [mml:overleftrightarrow] ["27F7]
-\definemathextensible [\v!mathematics] [mml:overtwoheadrightarrow] ["27F9]
-\definemathextensible [\v!mathematics] [mml:overleftharpoondown] ["21BD]
-\definemathextensible [\v!mathematics] [mml:overleftharpoonup] ["21BC]
-\definemathextensible [\v!mathematics] [mml:overrightharpoondown] ["21C1]
-\definemathextensible [\v!mathematics] [mml:overrightharpoonup] ["21C0]
-
-\definemathextensible [\v!mathematics] [mml:underleftarrow] ["2190] % ["27F5]
-\definemathextensible [\v!mathematics] [mml:underrightarrow] ["2192] % ["27F6]
-\definemathextensible [\v!mathematics] [mml:underleftrightarrow] ["27F7]
-\definemathextensible [\v!mathematics] [mml:undertwoheadrightarrow] ["27F9]
-\definemathextensible [\v!mathematics] [mml:underleftharpoondown] ["21BD]
-\definemathextensible [\v!mathematics] [mml:underleftharpoonup] ["21BC]
-\definemathextensible [\v!mathematics] [mml:underrightharpoondown] ["21C1]
-\definemathextensible [\v!mathematics] [mml:underrightharpoonup] ["21C0]
-
-\definemathtriplet [\v!mathematics] [mmlovertriplet]
-\definemathtriplet [\v!mathematics] [mmlundertriplet]
-\definemathtriplet [\v!mathematics] [mmldoubletriplet]
-
-% alternative:
-%
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x2190}] ["2190] % ["27F5]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x2192}] ["2192] % ["27F6]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x27F5}] ["2190] % ["27F5]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x27F6}] ["2192] % ["27F6]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x27F7}] ["27F7]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x27F9}] ["27F9]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x21BD}] ["21BD]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x21BC}] ["21BC]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x21C1}] ["21C1]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x21C0}] ["21C0]
-
-\unexpanded\def\mmloverof#1{\mmlexecuteifdefined\mmlovercommand\relax{\mmlunexpandedfirst {#1}}\relax}
-\unexpanded\def\mmloveros#1{\mmlexecuteifdefined\mmlovercommand {\mmlunexpandedsecond{#1}}\relax}
-\unexpanded\def\mmloverbf#1{\mmlexecuteifdefined\mmlbasecommand {\mmlunexpandedfirst {#1}}\relax}
-\unexpanded\def\mmloverbs#1{\mmlexecuteifdefined\mmlbasecommand\relax{\mmlunexpandedsecond{#1}}\relax}
+\def\mmlextensible#1{\ctxmodulemathml{extensible(\!!bs#1\!!es)}}
+
+\definemathtriplet [\v!mathematics] [mmlovertriplet] % or will we use a special instance
+\definemathtriplet [\v!mathematics] [mmlundertriplet] % or will we use a special instance
+\definemathtriplet [\v!mathematics] [mmldoubletriplet] % or will we use a special instance
+
+% common to munder/mover/munderover
+
+\starttexdefinition unexpanded mmlfencedfirst #1
+ \math_fences_checked_start
+ \mmlunexpandedfirst{#1}
+ \math_fences_checked_stop
+\stoptexdefinition
+\starttexdefinition unexpanded mmlfencedsecond #1
+ \math_fences_checked_start
+ \mmlunexpandedsecond{#1}
+ \math_fences_checked_stop
+\stoptexdefinition
+\starttexdefinition unexpanded mmlfencedthird #1
+ \math_fences_checked_start
+ \mmlunexpandedthird{#1}
+ \math_fences_checked_stop
+\stoptexdefinition
+
+% mover
+
+\starttexdefinition unexpanded mmloverabove #1
+ \edef\mmlovercommand{\utfmathfiller\mmlovertoken}
+ \mmlexecuteifdefined\mmlovercommand {\mmlfencedsecond{#1}} \relax
+\stoptexdefinition
+\starttexdefinition unexpanded mmloverbase #1
+ \edef\mmlbasecommand{\utfmathfiller\mmlbasetoken}
+ \mmlexecuteifdefined\mmlbasecommand {\mmlfencedfirst{#1}}
+ \relax
+\stoptexdefinition
+\starttexdefinition unexpanded mmloverbasefiller #1
+ \edef\mmlbasecommand{e\utfmathcommandfiller\mmlbasetoken}
+ \mmlexecuteifdefined\mmlbasecommand \relax {\mmlfencedsecond{#1}} {}
+\stoptexdefinition
+\starttexdefinition unexpanded mmloveraccent #1
+ \edef\mmlovercommand{\utfmathcommandabove\mmlovertoken}
+ \mmlexecuteifdefined\mmlovercommand \relax {\mmlfencedfirst{#1}}
+\stoptexdefinition
+\starttexdefinition unexpanded mmlovertext #1
+ \mmlovertriplet {\mmloverbase{#1}} {\mmloverabove{#1}} {}
+\stoptexdefinition
+\starttexdefinition unexpanded mmloveraccentchecker #1
+ \edef\mmlovertoken{\mmlextensible{\xmlraw{#1}{/mml:*[2]}}}% /text()
+ \doifelseutfmathabove\mmlovertoken \mmloveraccent \mmlovertext {#1}
+\stoptexdefinition
\startxmlsetups mml:mover
- \edef\mmlovertoken{\xmlraw{#1}{/mml:*[2]}}% /text()
- \doifelseutfmathabove\mmlovertoken {
- \edef\mmlovercommand{\utfmathcommandabove\mmlovertoken}
- \mmloverof{#1}
- } {
- \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]}}% /text()
- \doifelseutfmathabove\mmlbasetoken {
- \edef\mmlbasecommand{mml:\utfmathcommandabove\mmlbasetoken}
- \mmloverbs{#1}
- } {
- \edef\mmlbasecommand{\utfmathfiller\mmlbasetoken}
- \edef\mmlovercommand{\utfmathfiller\mmlovertoken}
- \mmlovertriplet{\mmloveros{#1}}{\mmloverbf{#1}}\relax
- }
- }
- % \limits % spoils spacing
+ \edef\mmlbasetoken{\mmlextensible{\xmlraw{#1}{/mml:*[1]}}}% /text()
+ \doifelseutfmathfiller\mmlbasetoken \mmloverbasefiller \mmloveraccentchecker {#1}
\stopxmlsetups
-% alternative:
-%
-% \startxmlsetups mml:mover
-% \edef\mmlovertoken{\xmlraw{#1}{/mml:*[2]}}% /text()
-% \doifelseutfmathabove\mmlovertoken {
-% \edef\mmlovercommand{\utfmathcommandabove\mmlovertoken}
-% \mmloverof{#1}
-% } {
-% \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]/text()}}
-% \ifcsname mml:\mmlbasetoken\endcsname
-% \csname mml:\mmlbasetoken\endcsname{\mmlunexpandedsecond{#1}}\relax
-% \else
-% \edef\mmlbasecommand{\utfmathfiller\mmlbasetoken}
-% \edef\mmlovercommand{\utfmathfiller\mmlovertoken}
-% \mmlovertriplet{\mmloveros{#1}}{\mmloverbf{#1}}\relax
-% \fi
-% }
-% % \limits % spoils spacing
-% \stopxmlsetups
+% munder
-\unexpanded\def\mmlunderuf#1{\mmlexecuteifdefined\mmlundercommand\relax {\mmlunexpandedfirst {#1}}\relax}
-\unexpanded\def\mmlunderus#1{\mmlexecuteifdefined\mmlundercommand {\mmlunexpandedsecond{#1}}\relax}
-\unexpanded\def\mmlunderbf#1{\mmlexecuteifdefined\mmlbasecommand {\mmlunexpandedfirst {#1}}\relax}
-\unexpanded\def\mmlunderbs#1{\mmlexecuteifdefined\mmlbasecommand \relax{}{\mmlunexpandedsecond{#1}}\relax}
+\starttexdefinition unexpanded mmlunderbelow #1
+ \edef\mmlundercommand{\utfmathfiller\mmlundertoken}
+ \mmlexecuteifdefined\mmlundercommand {\mmlfencedsecond{#1}} \relax
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderbase #1
+ \edef\mmlbasecommand{\utfmathfiller\mmlbasetoken}
+ \mmlexecuteifdefined\mmlbasecommand {\mmlfencedfirst{#1}}
+ \relax
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderbasefiller #1
+ \edef\mmlbasecommand{e\utfmathcommandfiller\mmlbasetoken}%
+ \mmlexecuteifdefined\mmlbasecommand \relax {} {\mmlfencedsecond{#1}}
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderaccent #1
+ \edef\mmlundercommand{\utfmathcommandbelow\mmlundertoken}
+ \mmlexecuteifdefined\mmlundercommand \relax {\mmlfencedfirst{#1}}
+\stoptexdefinition
+\starttexdefinition unexpanded mmlundertext #1
+ \mmlundertriplet {\mmlunderbase{#1}} {} {\mmlunderbelow{#1}}
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderaccentchecker #1
+ \edef\mmlundertoken{\mmlextensible{\xmlraw{#1}{/mml:*[2]}}}% /text()
+ \doifelseutfmathbelow\mmlundertoken \mmlunderaccent \mmlundertext {#1}
+\stoptexdefinition
\startxmlsetups mml:munder
- \edef\mmlundertoken{\xmlraw{#1}{/mml:*[2]}}% /text()
- \doifelseutfmathbelow\mmlundertoken {%
- \edef\mmlundercommand{\utfmathcommandbelow\mmlundertoken}
- \mmlunderuf{#1}
- } {
- \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]}}% /text()
- \doifelseutfmathbelow\mmlbasetoken {
- \edef\mmlbasecommand{mml:\utfmathcommandbelow\mmlbasetoken}
- \mmlunderbs{#1}
- } {
- \edef\mmlbasecommand {\utfmathfiller\mmlbasetoken}
- \edef\mmlundercommand{\utfmathfiller\mmlundertoken}
- \mmlundertriplet{\mmlunderus{#1}}{\mmlunderbf{#1}}\relax
- }
- }
- % \limits % spoils spacing
+ \edef\mmlbasetoken{\mmlextensible{\xmlraw{#1}{/mml:*[1]}}}% /text()
+ \doifelseutfmathfiller\mmlbasetoken \mmlunderbasefiller \mmlunderaccentchecker {#1}
\stopxmlsetups
-\unexpanded\def\mmlunderoverst#1{\mmlexecuteifdefined\mmlbasecommand \relax{\mmlunexpandedsecond{#1}}{\mmlunexpandedthird{#1}}\relax}
-\unexpanded\def\mmlunderoverbf#1{\mmlexecuteifdefined\mmlbasecommand {\mmlunexpandedfirst {#1}}\relax}
-\unexpanded\def\mmlunderoverus#1{\mmlexecuteifdefined\mmlundercommand {\mmlunexpandedsecond{#1}}\relax}
-\unexpanded\def\mmlunderoverot#1{\mmlexecuteifdefined\mmlovercommand {\mmlunexpandedthird {#1}}\relax}
+% munderover
-\startxmlsetups mml:munderover
- \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]}}% /text()
- \doifelseutfmathbelow\mmlbasetoken {
- \edef\mmlbasecommand{mml:\utfmathcommandbelow\mmlbasetoken}
- \mmlunderoverst{#1}
+\starttexdefinition unexpanded mmlunderoveraccentcheckerUO #1
+ \edef\mmlundercommand{\utfmathcommandbelow\mmlundertoken}
+ \edef\mmlovercommand {\utfmathcommandabove\mmlovertoken}
+ \edef\mmlbasecommand {\mmlovercommand\mmlundercommand}
+ \ifcsname\mmlbasecommand\endcsname
+ \csname\mmlbasecommand\endcsname {\mmlfencedfirst{#1}}
+ \else\ifcsname\mmlundercommand\endcsname
+ \ifcsname\mmlovercommand\endcsname
+ \csname\mmlovercommand\endcsname {\csname\mmlundercommand\endcsname{\mmlfencedfirst{#1}}}
+ \else
+ \mmldoubletriplet {\csname\mmlundercommand\endcsname{\mmlfencedfirst{#1}}} {\mmlfencedthird{#1}\mmlfencedthird{#1}} {}
+ \fi
+ \else\ifcsname\mmlovercommand\endcsname
+ \mmldoubletriplet {\csname\mmlovercommand\endcsname{\mmlfencedfirst{#1}}} {} {\mmlfencedsecond{#1}}
+ \else
+ \mmlunderoveraccentcheckerTT {#1}
+ \fi\fi\fi
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderoveraccentcheckerUT #1
+ \edef\mmlundercommand{\utfmathcommandbelow\mmlundertoken}
+ \edef\mmlbasecommand {\mmlundercommand text}
+ \ifcsname\mmlbasecommand\endcsname
+ \csname\mmlbasecommand\endcsname {\mmlfencedfirst{#1}} {\mmlfencedthird{#1}}
+ \else\ifcsname\mmlundercommand\endcsname
+ \mmldoubletriplet {\csname\mmlundercommand\endcsname{\mmlfencedfirst{#1}}} {\mmlfencedthird{#1}} {}
+ \else
+ \mmlunderoveraccentcheckerTT {#1}
+ \fi\fi
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderoveraccentcheckerOT #1
+ \edef\mmlovercommand{\utfmathcommandabove\mmlovertoken}
+ \edef\mmlbasecommand{\mmlovercommand text}
+ \ifcsname\mmlbasecommand\endcsname
+ \csname\mmlbasecommand\endcsname {\mmlfencedfirst{#1}} {\mmlfencedsecond{#1}}
+ \else\ifcsname\mmlovercommand\endcsname
+ \mmldoubletriplet {\csname\mmlovercommand\endcsname{\mmlfencedfirst{#1}}} {} {\mmlfencedsecond{#1}}
+ \else
+ \mmlunderoveraccentcheckerTT {#1}
+ \fi\fi
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderoveraccentcheckerTT #1
+ \mmldoubletriplet {\mmlfencedfirst{#1}} {\mmlfencedthird{#1}} {\mmlfencedsecond{#1}} \relax
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderoveraccentchecker #1
+ \edef\mmlundertoken{\mmlextensible{\xmlraw{#1}{/mml:*[2]}}}% /text()
+ \edef\mmlovertoken {\mmlextensible{\xmlraw{#1}{/mml:*[3]}}}% /text()
+ \doifelseutfmathbelow\mmlundertoken {
+ \doifelseutfmathabove\mmlovertoken \mmlunderoveraccentcheckerUO \mmlunderoveraccentcheckerUT {#1}
} {
- \edef\mmlundertoken {\xmlraw{#1}{/mml:*[2]}}% /text()
- \edef\mmlovertoken {\xmlraw{#1}{/mml:*[3]}}% /text()
- \edef\mmlbasecommand {\utfmathfiller\mmlbasetoken}
- \edef\mmlundercommand{\utfmathfiller\mmlundertoken}
- \edef\mmlovercommand {\utfmathfiller\mmlovertoken}
- \mmldoubletriplet{\mmlunderoverbf{#1}}{\mmlunderoverot{#1}}{\mmlunderoverus{#1}}\relax
+ \doifelseutfmathabove\mmlovertoken \mmlunderoveraccentcheckerOT \mmlunderoveraccentcheckerTT {#1}
}
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderoverbasefiller #1
+ \edef\mmlbasecommand{e\utfmathcommandfiller\mmlbasetoken}%
+ \mmlexecuteifdefined\mmlbasecommand \relax {\mmlfencedthird{#1}} {\mmlfencedsecond{#1}}
+\stoptexdefinition
+
+\startxmlsetups mml:munderover
+ \edef\mmlbasetoken{\mmlextensible{\xmlraw{#1}{/mml:*[1]}}}% /text()
+ \doifelseutfmathfiller\mmlbasetoken \mmlunderoverbasefiller \mmlunderoveraccentchecker {#1}
\stopxmlsetups
% tables (mml:mtable, mml:mtr, mml:mlabledtr, mml:mtd)
\startxmlsetups mml:mtable % some more attributes need to be supported
- \vcenter{\ctxmodulemathml{mtable("#1")}}
+ \vcenter {
+ \hbox {% needed because otherwise positions makr the vcenter wide
+ \ctxmodulemathml{mtable("#1")}
+ }
+ }
\stopxmlsetups
\startxmlsetups mml:mcolumn
@@ -2378,29 +2427,39 @@
\def\mmlsetfakewidth#1{\setbox\scratchbox\hbox{#1}\scratchdimen\wd\scratchbox}
\def\mmlmcolumndigitspace {\mmlsetfakewidth {0}\kern\scratchdimen}
-\def\mmlmcolumndigitrule {\mmlsetfakewidth {0}\vrule width \scratchdimen height .2pt depth .2pt\relax}
-\def\mmlmcolumnsymbolrule {\mmlsetfakewidth{\times}\vrule width \scratchdimen height .2pt depth .2pt\relax}
-\def\mmlmcolumnpunctuationrule{\mmlsetfakewidth {.}\vrule width \scratchdimen height .2pt depth .2pt\relax}
+\def\mmlmcolumndigitrule {\mmlsetfakewidth {0}\vrule \s!width \scratchdimen \s!height .2\points \s!depth .2\points\relax}
+\def\mmlmcolumnsymbolrule {\mmlsetfakewidth{\times}\vrule \s!width \scratchdimen \s!height .2\points \s!depth .2\points\relax}
+\def\mmlmcolumnpunctuationrule{\mmlsetfakewidth {.}\vrule \s!width \scratchdimen \s!height .2\points \s!depth .2\points\relax}
+
+\setupMMLappearance[mspace][\c!option=] % \v!test
\startxmlsetups mml:mspace
\begingroup
\edef\mmlspacetext{\xmlatt{#1}{spacing}}
\ifx\mmlspacetext\empty
- \!!widtha \xmlattdef{#1}{width} \!!zeropoint % must be string
- \!!heighta\xmlattdef{#1}{height}\!!zeropoint
- \!!deptha \xmlattdef{#1}{depth} \!!zeropoint
- \ifdim\!!heighta=\zeropoint
- \ifdim\!!deptha=\zeropoint\else
- \hbox{\vrule\s!depth\!!deptha\s!height\zeropoint\s!width\zeropoint}%
+ \scratchwidth \xmlattdef{#1}{width} \!!zeropoint % must be string
+ \scratchheight\xmlattdef{#1}{height}\!!zeropoint
+ \scratchdepth \xmlattdef{#1}{depth} \!!zeropoint
+ \ifdim\scratchheight=\zeropoint
+ \ifdim\scratchdepth=\zeropoint\else
+ \hbox{\vrule\s!depth\scratchdepth\s!height\zeropoint\s!width\zeropoint}%
\fi
\else
- \hbox{\vrule\s!depth\zeropoint\s!height\!!heighta\s!width\zeropoint}%
+ \hbox{\vrule\s!depth\zeropoint\s!height\scratchheight\s!width\zeropoint}%
\fi
- \ifdim\!!widtha=\zeropoint\else
- \hskip\!!widtha
+ \ifdim\scratchwidth=\zeropoint\else
+ \ifx\MMLmspaceoption\v!test
+ \hbox to \scratchwidth{\showstruts\strut\hss\lower2\exheight\hbox{\infofont\xmlattdef{#1}{width}}\hss\strut}
+ \else
+ \hskip\scratchwidth
+ \fi
\fi
\else
- \phantom{\triggermathstyle\normalmathstyle\mmlspacetext}
+ \ifx\MMLmspaceoption\v!test
+ \hbox{\showstruts\strut\phantom{\triggermathstyle\normalmathstyle\mmlspacetext}\strut}
+ \else
+ \phantom{\triggermathstyle\normalmathstyle\mmlspacetext}
+ \fi
\fi
\endgroup
\stopxmlsetups
diff --git a/tex/context/base/x-mathml.xsd b/tex/context/base/x-mathml.xsd
index 17f0bea2a..1c29452b0 100644
--- a/tex/context/base/x-mathml.xsd
+++ b/tex/context/base/x-mathml.xsd
@@ -3,9 +3,9 @@
<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema"
xmlns:xds="http://www.w3.org/?">
-<xsd:schema>
+<xsd:schema>
-<xsd:element name="imath" type="mathml:math" />
-<xsd:element name="dmath" type="mathml:math" />
+<xsd:element name="imath" type="mathml:math" />
+<xsd:element name="dmath" type="mathml:math" />
</xsd:schema>
diff --git a/tex/context/base/x-res-01.mkiv b/tex/context/base/x-res-01.mkiv
index e234e9867..36070c615 100644
--- a/tex/context/base/x-res-01.mkiv
+++ b/tex/context/base/x-res-01.mkiv
@@ -256,7 +256,7 @@
\vfill
}
\advance\hsize by -30pt
- \doifmodeelse {clipgrid-distance,clipgrid-steps} {
+ \doifelsemode {clipgrid-distance,clipgrid-steps} {
\xmlsetup{#1}{xml:resource:a}
} {
\xmlsetup{#1}{xml:resource:b}
diff --git a/tex/context/base/x-set-11.mkiv b/tex/context/base/x-set-11.mkiv
index d4b43a9ee..73e68e073 100644
--- a/tex/context/base/x-set-11.mkiv
+++ b/tex/context/base/x-set-11.mkiv
@@ -57,7 +57,7 @@
2: -- wordt verwerkt
3: -- is niet gedefinieerd
4: -- wordt nogmaals verwerkt
- optional: optioneel
+ optional: opt
displaymath: formule
index: ingang
math: formule
@@ -95,7 +95,7 @@
2: -- is processed
3: -- is undefined
4: -- is processed again
- optional: optional
+ optional: opt
displaymath: formula
index: entry
math: formula
@@ -133,7 +133,7 @@
2: -- wird verarbeitet
3: -- ist undefiniert
4: -- ist mehrmals verarbeitet
- optional: optioneel
+ optional: opt
displaymath: formula
index: entry
math: formula
@@ -171,7 +171,7 @@
2: -- je zpracovano
3: -- je nedefinovano
4: -- je zpracovano znovu
- optional: optioneel
+ optional: opt
displaymath: formula
index: entry
math: formula
@@ -209,7 +209,7 @@
2: -- is processed
3: -- is undefined
4: -- is processed again
- optional: optioneel
+ optional: opt
displaymath: formula
index: entry
math: formula
@@ -247,7 +247,7 @@
2: este procesat --
3: -- este nedefinit
4: -- este procesat din nou
- optional: optioneel
+ optional: opt
displaymath: formula
index: entry
math: formula
@@ -285,7 +285,7 @@
2: -- est traité
3: -- n'est pas défini
4: -- est traité de nouveau
- optional: optionel
+ optional: opt
displaymath: formule
index: entrée
math: formule
@@ -369,7 +369,8 @@
} {
\let\currentSETUPprefix\empty
}
- \edef\currentSETUPname{\xmlatt{#1}{name}}
+ % \edef\currentSETUPname{\xmlatt{#1}{name}}
+ \edef\currentSETUPname{\xmlattribute{#1}{/sequence/string[1]}{value}}%
\doifelse {\xmlatt{#1}{generated}} {yes} {
\def\currentSETUPgenerated{*}
} {
@@ -448,8 +449,18 @@
% \def\showsetupindeed#1%
% {\xmlfilterlist{\loadedsetups}{interface/command[@name='#1']/command(xml:setups:typeset)}}
+% \def\showsetupindeed#1%
+% {\xmlfilterlist{\loadedsetups}{/interface/command['#1' == (@type=='environment' and 'start' or '') .. @name]/command(xml:setups:typeset)}}
+
+% \setelementnature[setup][display]
+% \setelementnature[setup][mixed]
+
\def\showsetupindeed#1%
- {\xmlfilterlist{\loadedsetups}{/interface/command['#1' == (@type=='environment' and 'start' or '') .. @name]/command(xml:setups:typeset)}}
+ {\startelement[setup][name=#1]%
+ \startelement[noexport][comment={setup definition #1}]
+ \xmlfilterlist{\loadedsetups}{/interface/command['#1' == (@type=='environment' and '\e!start' or '') .. @name]/command(xml:setups:typeset)}%
+ \stopelement
+ \stopelement}
\unexpanded\def\placesetup {\placelistofsorts[texcommand][\c!criterium=\v!used]}
\unexpanded\def\placeallsetups{\placelistofsorts[texcommand][\c!criterium=\v!all ]}
@@ -459,7 +470,7 @@
%D Typesetting:
\setupxml
- [\c!method=mkiv, % mixed mode
+ [%\c!method=mkiv, % mixed mode
\c!default=\v!hidden, % ignore elements that are not defined
\c!compress=\v!yes, % strip comment
\c!entities=\v!yes] % replace entities
@@ -489,37 +500,41 @@
\xmlfilter{#1}{/sequence/first()}
\ignorespaces
\egroup
- \xmldoif{#1}{/arguments} {
- \bgroup
- \enablemode[setups-pass-one]
- \doglobal\newcounter\currentSETUPargument
- \ignorespaces
- \xmlfilter{#1}{/arguments/text()}
- \egroup
- }
- \doif {\xmlatt{#1}{type}} {environment} {
- \bgroup
- \enablemode[setups-pass-one]%
- \hskip.5em\unknown\hskip.5em
- \doif {\xmlatt{#1}{generated}} {yes} {
- \ttsl
- }
- \tex{\e!stop}
- \xmlfilter{#1}{/sequence/first()}
- \ignorespaces
- \egroup
- }
- \endgraf
- \xmldoif{#1}{/arguments} {
- \bgroup
- \enablemode[setups-pass-two]
- \doglobal\newcounter\currentSETUPargument
- %\blank[\v!line] % packed mode (we could do \startunpacked ...)
- \godown[.75\lineheight]
- \switchtobodyfont[\v!small]
- \ignorespaces\xmlfilter{#1}{/arguments/text()}\endgraf
- \egroup
- }
+ \ifshortsetup
+ % nothing
+ \else
+ \xmldoif{#1}{/arguments} {
+ \bgroup
+ \enablemode[setups-pass-one]
+ \doglobal\newcounter\currentSETUPargument
+ \ignorespaces
+ \xmlfilter{#1}{/arguments/text()}
+ \egroup
+ }
+ \doif {\xmlatt{#1}{type}} {environment} {
+ \bgroup
+ \enablemode[setups-pass-one]%
+ \hskip.5em\unknown\hskip.5em
+ \doif {\xmlatt{#1}{generated}} {yes} {
+ \ttsl
+ }
+ \tex{\e!stop}
+ \xmlfilter{#1}{/sequence/first()}
+ \ignorespaces
+ \egroup
+ }
+ \endgraf
+ \xmldoif{#1}{/arguments} {
+ \bgroup
+ \enablemode[setups-pass-two]
+ \doglobal\newcounter\currentSETUPargument
+ %\blank[\v!line] % packed mode (we could do \startunpacked ...)
+ \godown[.75\lineheight]
+ \switchtobodyfont[\v!small]
+ \ignorespaces\xmlfilter{#1}{/arguments/text()}\endgraf
+ \egroup
+ }
+ \fi
\getvalue{\e!stop setuptext}
\stopxmlsetups
@@ -562,7 +577,7 @@
\startxmlsetups xml:setups:word \showSETUPcomponent{#1}{word} {word} \stopxmlsetups
\def\showSETUPcomponent#1#2#3%
- {\doifmodeelse{setups-pass-one}
+ {\doifelsemode{setups-pass-one}
{\getvalue{showSETUP#2}{#1}}
{\simpleSETUPargument{#3}}}
@@ -597,7 +612,7 @@
\stopxmlsetups
\startxmlsetups xml:setups:assignments
- \doifmodeelse{setups-pass-one} {
+ \doifelsemode{setups-pass-one} {
\showSETUPassignment{#1}
} {
\xdef\currentSETUPwidth{0pt}%
@@ -619,7 +634,7 @@
\stopxmlsetups
\startxmlsetups xml:setups:keywords
- \doifmodeelse{setups-pass-one} {
+ \doifelsemode{setups-pass-one} {
\showSETUPkeyword{#1}
} {
\startfirstSETUPcolumn{\showSETUPnumber}%
@@ -638,11 +653,16 @@
\xmlflush{#1}
\doifmode{interface:setup:defaults} {
\ifx\currentSETUPhash\empty \else
- \edef\currentSETUPvalue{\csname named\currentSETUPhash parameter\endcsname\empty{\xmlatt{#1}{name}}}
- \ifx\currentSETUPvalue\empty
- \space=\space
- \detokenize\expandafter{\currentSETUPvalue}
- \fi
+ \begingroup
+ % todo, make a one level expansion of parameter
+ \let\emwidth \relax
+ \let\exheight\relax
+ \edef\currentSETUPvalue{\csname named\currentSETUPhash parameter\endcsname\empty{\xmlatt{#1}{name}}}
+ \ifx\currentSETUPvalue\empty \else
+ =\space
+ \detokenize\expandafter{\currentSETUPvalue}
+ \fi
+ \endgroup
\fi
}
\stopsecondSETUPcolumn
@@ -650,7 +670,7 @@
\stopxmlsetups
\startxmlsetups xml:setups:constant
- \doifmodeelse {setups-pass-one} {
+ \doifelsemode {setups-pass-one} {
} {
\doif {\xmlatt{#1}{default}} {yes} {
\underbar % next needs to be {braced}
@@ -662,7 +682,7 @@
\stopxmlsetups
\startxmlsetups xml:setups:variable
- \doifmodeelse {setups-pass-one} {
+ \doifelsemode {setups-pass-one} {
\expanded{\setupintfont{\xmlatt{#1}{value}}}\ignorespaces
} {
\c!setup!reserved!{\xmlatt{#1}{value}}
@@ -818,7 +838,6 @@
\stoptabulate
\stopxmlsetups
-
\starttexdefinition showrootvalues [#1]
\edef\currentsetupparametercategory{#1}
\edef\currentsetupparametercommand{setup#1}
diff --git a/tex/context/base/x-set-12.mkiv b/tex/context/base/x-set-12.mkiv
index 6590bfe9e..c60445313 100644
--- a/tex/context/base/x-set-12.mkiv
+++ b/tex/context/base/x-set-12.mkiv
@@ -146,8 +146,7 @@
[\c!alternative=\v!doublesided]
\setupsetup
- [\c!criterium=\v!all,
- \c!reference=0]
+ [\c!criterium=\v!all]
\setupframedtexts
[setuptext]
diff --git a/tex/context/base/x-xtag.mkiv b/tex/context/base/x-xtag.mkiv
index 09490cc8f..ab95c567e 100644
--- a/tex/context/base/x-xtag.mkiv
+++ b/tex/context/base/x-xtag.mkiv
@@ -14,7 +14,7 @@
%D Here we load the \MKII\ (mostly) streaming \XML\ parser. We
%D define a couple of catcode regimes first.
-\ifdefined\XMLbanner \endinput \fi
+\endinput
\writestatus{xtag}{this module is obsolete, use the mkiv-xml features or use mkii instead}
diff --git a/tex/context/bib/bibl-apa-it.tex b/tex/context/bib/bibl-apa-it.tex
new file mode 100644
index 000000000..ebf03313a
--- /dev/null
+++ b/tex/context/bib/bibl-apa-it.tex
@@ -0,0 +1,385 @@
+%D \module
+%D [ file=bibl-apa-it,
+%D version=2014.10.10,
+%D title=APA bibliography style,
+%D subtitle=Publications,
+%D author={Andrea Valle \& Alan Braslau},
+%D date=\currentdate,
+%D copyright={Public Domain}]
+%C
+%C Donated to the public domain. Use at your own risk
+
+\unprotect
+
+\setupcite
+ [author,year]
+ [\c!andtext={ e },
+ \c!otherstext={ et al.},
+ \c!pubsep={, },
+ \c!lastpubsep={ e },
+ \c!compress=\v!no,
+ \c!inbetween={ },
+ \c!left={(},
+ \c!right={)}]
+
+\setupcite
+ [authoryear]
+ [\c!andtext={ e },
+ \c!otherstext={ et al.},
+ \c!pubsep={, },
+ \c!lastpubsep={ e },
+ \c!compress=\v!yes,
+ \c!inbetween={ },
+ \c!left={(},
+ \c!right={)}]
+
+\setupcite
+ [authoryears]
+ [\c!andtext={ e },
+ \c!otherstext={ et al.},
+ \c!pubsep={, },
+ \c!lastpubsep={ e },
+ \c!compress=\v!yes,
+ \c!inbetween={, },
+ \c!left={(},
+ \c!right={)}]
+
+\setupcite
+ [key,serial,authornum,page,short,type,doi,url]
+ [\c!andtext={ e },
+ \c!otherstext={ et al.},
+ \c!pubsep={, },
+ \c!lastpubsep={ e },
+ \c!compress=\v!no,
+ \c!inbetween={ },
+ \c!left={[},
+ \c!right={]}]
+
+\setupcite
+ [num]
+ [\c!andtext={ e },
+ \c!otherstext={ et al.},
+ \c!pubsep={, },
+ \c!lastpubsep={ e },
+ \c!compress=\v!yes,
+ \c!inbetween={--},
+ \c!left={[},
+ \c!right={]}]
+
+\setuppublications
+ [\c!sorttype=,
+ \c!criterium=,
+ \c!refcommand=authoryears,
+ \c!numbering=\v!no,
+ \c!autohang=\v!no]
+
+\setuppublicationlist
+ [\c!width=24pt,
+ \c!artauthor=\invertedshortauthor,
+ \c!editor=\invertedshortauthor,
+ \c!author=\invertedshortauthor,
+ \c!namesep={, },
+ \c!lastnamesep={ e },
+ \c!finalnamesep={ e },
+ \c!firstnamesep={, },
+ \c!juniorsep={ },
+ \c!vonsep={ },
+ \c!surnamesep={, },
+ \c!authoretallimit=5,
+ \c!editoretallimit=5,
+ \c!artauthoretallimit=5,
+ \c!authoretaldisplay=5,
+ \c!editoretaldisplay=5,
+ \c!artauthoretaldisplay=5,
+ \c!authoretaltext={ et al.},
+ \c!editoretaltext={ et al.},
+ \c!artauthoretaltext={ et al.}]
+
+\def\maybeyear#1{#1}
+\def\etalchar #1{#1}
+
+%D \macros{insertchapter,insertpublisher}
+
+%D Some shortcuts.
+
+% ((#1(type\ |)chapter#2)|#3)
+
+\def\insertchap#1#2#3%
+ {\insertchapter
+ {#1\insertbibtype{}{\ }{capitolo\ }}{#2}%
+ {#3}}
+
+% #1city, country: pubname#2
+% #1country: pubname#2
+% #1pubname#2
+% #1city, country#2
+% #3
+
+\def\insertpublisher#1#2#3%
+ {\insertpubname
+ {\insertcity
+ {#1}
+ {\insertcountry{, }{}{}: }%
+ {#1\insertcountry{}{: }{}}}%
+ {#2}%
+ {\insertcity
+ {#1}
+ {\insertcountry{, }{}{#2}}%
+ {\insertcountry{#1}{#2}{#3}}}%
+ }
+
+\def\insertorg#1#2#3%
+ {\insertorganization
+ {\insertcity
+ {#1}
+ {\insertcountry{, }{}{#2}: }%
+ {\insertcountry{}{: }{#2}}}%
+ {}%
+ {\insertcity
+ {#1}
+ {\insertcountry{, }{}{#2}}%
+ {\insertcountry{}{#2}{#3}}}%
+ }
+
+
+
+\setuppublicationlayout[article]{%
+ \insertartauthors{}{ }{\insertthekey{}{ }{}}%
+ \insertpubyear{(}{). }{\unskip.}%
+ \insertarttitle{\bgroup }{\egroup. }{}%
+ \insertjournal{\bgroup \it}{\egroup}
+ {\insertcrossref{In }{}{}}%
+ \insertvolume
+ {\bgroup \it, }
+ {\egroup\insertissue{\/(}{)}{}\insertpages{, }{.}{.}}
+ {\insertpages{, pp. }{.}{.}}%
+ \insertnote{ }{.}{}%
+ \insertcomment{}{.}{}%
+}
+
+\newif\ifeditedbook
+
+\setuppublicationlayout[book]{%
+ \insertauthors{}{ }{\inserteditors{}{, a c. di%
+ \ \global\editedbooktrue
+ }{\insertthekey{}{. }{}}}%
+ \insertpubyear{(}{). }{\unskip.}%
+ \inserttitle
+ {\bgroup\it }%
+ {\/\egroup
+ \ifeditedbook
+ \global\editedbookfalse
+ \insertvolume
+ { N.~}%
+ {\insertseries
+ { in~\bgroup}%
+ {\egroup. }%
+ {\insertcrossref{ in~}{}{. }}}%
+ {\insertseries{ }{.}{.}}%
+ \else
+ \insertcrossref
+ {\insertchap{, }{}{}%
+ \insertpages{\unskip, pp. }{. }{. }%
+ \insertvolume{Vol.~}{ di~}{}%
+ }%
+ {}%
+ {\insertvolume
+ {, vol.~}%
+ {\insertseries
+ { di~\bgroup\it}%
+ {\egroup}
+ {}}
+ {}%
+ \insertchap{, }{}{}%
+ \insertpages{\unskip, pp. }{.}{.}%
+ }%
+ \fi}%
+ {}%
+ \insertedition{ }{ ed.}{}%
+ \insertpublisher{ }{.}{.}%
+ \insertpages{ }{p. }{ }%
+ \insertnote{}{.}{}%
+}
+
+\setuppublicationlayout[inbook]{%
+ \insertauthors{}{ }{\inserteditors{}{, a c. di%
+ \ \global\editedbooktrue
+ }{\insertthekey{}{. }{}}}%
+ \insertpubyear{(}{). }{\unskip.}%
+ \inserttitle
+ {\bgroup\it }%
+ {\/\egroup
+ \ifeditedbook
+ \global\editedbookfalse
+ \insertvolume
+ { number~}%
+ {\insertseries
+ { in~\bgroup}%
+ {\egroup. }%
+ {\insertcrossref{ in~}{}{.}}}%
+ {\insertseries{ }{.}{}}%
+ \else
+ \insertcrossref
+ {\insertchap{, }{}{}%
+ \insertpages{\unskip, pp. }{. }{. }%
+ \insertvolume{Volume~}{ di~}{}%
+ }%
+ {}%
+ {\insertvolume
+ {, volume~}%
+ {\insertseries
+ { di~\bgroup\it}%
+ {\egroup}
+ {}}
+ {}%
+ \insertchap{, }{}{}%
+ \insertpages{\unskip, pp. }{.}{}%
+ }%
+ \fi}%
+ { }%
+ \insertedition{ }{ ed.}{}%
+ \insertpublisher{ }{.}{.}%
+ \insertnote{ }{.}{}%
+}
+
+\setuppublicationlayout[booklet]{%
+ \insertauthors{}{ }{\insertthekey{}{. }{}}%
+ \insertpubyear{(}{). }{}%
+ \inserttitle{\bgroup }{\egroup \insertseries{ (}{)}{}. }{}%
+ \insertedition{ }{ ed.}{}%
+ \insertpublisher{ }{.}{.}%
+ \insertpages{}{p. }{}%
+ \insertnote{ }{.}{}%
+}
+
+\setuppublicationlayout[manual]{%
+ \insertauthors{}{ }{\insertthekey{}{. }{}}%
+ \insertpubyear{(}{). }{}%
+ \inserttitle{\bgroup \it }{\/\egroup \insertseries{ (}{)}{}. }{}%
+ \insertedition{ }{ ed.}{}%
+ \insertorg{ }{.}{.}%
+ \insertpages{}{p. }{}%
+ \insertnote{ }{.}{}%
+}
+
+\setuppublicationlayout[incollection]{%
+ \insertartauthors{}{ }{\insertthekey{}{. }{}}%
+ \insertpubyear{(}{). }{}%
+ \insertarttitle{\bgroup }{\egroup. }{}%
+ \inserttitle
+ {In \inserteditors{}%
+ {, a c. di, }%
+ {}%
+ \bgroup\it}%
+ {\egroup
+ \insertseries
+ {\insertvolume{, number }{~in }{ }}%
+ {}%
+ {}%
+ \insertchap{\unskip, }{ }{ }%
+ \insertpages{\unskip, pp.~}{. }{\unskip. }%
+ \insertedition{ }{ edition}{}%
+ \insertpublisher{ }{.}{.}%
+ }%
+ {In \insertcrossref{}{}{}%
+ \insertchap{\unskip, }{ }{ }%
+ \insertpages{\unskip, pp.~}{. }{\unskip. }%
+ }%
+ \insertnote{ }{.}{}%
+}
+
+\setuppublicationlayout[inproceedings]{%
+ \insertauthors{}{ }{}%
+ \insertpubyear{(}{). }{}%
+ \insertarttitle{\bgroup }{\egroup. }{}%%
+ \inserttitle
+ {In \inserteditors{}%
+ {, a c. di, }%
+ {}%
+ \bgroup\it}%
+ {\egroup
+ \insertseries
+ {\insertvolume{, number }{~in }{ }}%
+ {}%
+ {}%
+ \insertchap{\unskip, }{ }{ }%
+ \insertpages{\unskip, pp.~}{}{}%
+ \insertorg{. }{.}{.}%
+ }%
+ {In \insertcrossref{}{}{}%
+ \insertchap{\unskip, }{ }{ }%
+ \insertpages{\unskip, pp.~}{. }{\unskip. }%
+ }%
+ \insertnote{ }{.}{}%
+}
+
+\setuppublicationlayout[proceedings]{%
+ \inserteditors{}{, a c. di%
+ \ \global\editedbooktrue
+ }{\insertthekey{}{ }{}}%
+ \insertpubyear{(}{). }{}%
+ \inserttitle
+ {\bgroup\it}%
+ {\egroup
+ \insertseries
+ {\insertvolume{, number }{~in }{ }}%
+ {}%
+ {}%
+ \insertchap{\unskip, }{ }{ }%
+ \insertpages{\unskip, pp.~}{}{}%
+ \insertorg{. }{.}{.}%
+ }%
+ {}%
+ \insertnote{ }{.}{}%
+}
+
+\setuppublicationlayout[mastersthesis]{%
+ \insertauthors{}{ }{}%
+ \insertpubyear{(}{). }{}%
+ \inserttitle{\bgroup }{\egroup \insertseries{ (}{)}{}. }{}%
+ \insertbibtype{}{, }{Tesi di laurea, }%
+ \insertpublisher{ }{.}{.}%
+ \insertpages{ }{p. }{}%
+ \insertnote{ }{.}{}%
+}
+
+\setuppublicationlayout[phdthesis]{%
+ \insertauthors{}{ }{}%
+ \insertpubyear{(}{). }{}%
+ \inserttitle{\bgroup\it }{\egroup \insertseries{ (}{)}{}. }{}%
+ \insertbibtype{}{, }{Tesi di dottorato,}%
+ \insertpublisher{ }{.}{.}%
+ \insertpages{ }{ p. }{}%
+ \insertnote{ }{.}{}%
+}
+
+\setuppublicationlayout[misc]{%
+ \insertauthors{}{ }{\insertthekey{}{. }{}}%
+ \insertpubyear{(}{). }{}%
+ \inserttitle{\bgroup }{\egroup \insertseries{ (}{)}. }{}%
+ \insertpublisher{ }{.}{.}%
+ \insertpages{ }{p. }{}%
+ \insertnote{ }{.}{}%
+}
+
+\setuppublicationlayout[techreport]{%
+ \insertauthors{}{ }{}%
+ \insertpubyear{(}{). }{}%
+ \inserttitle{\bgroup }{\egroup \insertseries{ (}{)}{}. }{}%
+ \insertbibtype{}{\insertvolume{ }{, }{, }}{Relazione tecnica}%
+ \insertpublisher{ }{.}{.}%
+ \insertpages{ }{p. }{}%
+ \insertnote{ }{.}{}%
+}
+
+\setuppublicationlayout[unpublished]{%
+ \insertauthors{}{ }{}%
+ \insertpubyear{(}{). }{}%
+ \inserttitle{\bgroup }{\egroup \insertseries{ (}{)}{}. }{}%
+ % \insertpublisher{ }{.}{.}%
+ \insertpages{ }{p. }{}%
+ \insertbibtype{(}{)}{}%
+ \insertnote{ }{.}{}%
+}
+
+\protect
diff --git a/tex/context/bib/sample.bib b/tex/context/bib/sample.bib
index 3f8df623e..4bb71d3a7 100644
--- a/tex/context/bib/sample.bib
+++ b/tex/context/bib/sample.bib
@@ -35,4 +35,3 @@
address = {London},
keywords = {general},
}
-
diff --git a/tex/context/extra/showunic.tex b/tex/context/extra/showunic.tex
deleted file mode 100644
index efdbf4d3a..000000000
--- a/tex/context/extra/showunic.tex
+++ /dev/null
@@ -1,130 +0,0 @@
-% author : Hans Hagen / PRAGMA-ADE
-% version : 2005-06-22
-
-% todo: take antiqua (has everything)
-
-% \tracetypescriptstrue
-
-\usetypescriptfile[type-cbg]
-
-% \preloadtypescriptstrue (default at pragma)
-
-% this font does not work ... why
-%
-% \starttypescript [serif] [hebrew] [default]
-% \definefontsynonym [Serif] [hclassic]
-% \loadmapline[=][hclassic < hclassic.pfb]
-% \stoptypescript
-
-\setuppapersize
- [S6][S6]
-
-\setupbodyfont
- [10pt]
-
-\setuplayout
- [backspace=12pt,
- topspace=12pt,
- width=middle,
- height=middle,
- header=0pt,
- footer=0pt]
-
-\setupcolors
- [state=start]
-
-\setupbackgrounds
- [page]
- [background=color,
- backgroundcolor=darkgray]
-
-\definetypeface [main-latin] [rm] [serif] [latin-modern] [default][encoding=texnansi]
-\definetypeface [main-latin] [tt] [mono] [latin-modern] [default][encoding=texnansi]
-
-\definetypeface [main-math] [rm] [serif] [latin-modern] [default][encoding=texnansi]
-\definetypeface [main-math] [tt] [mono] [latin-modern] [default][encoding=texnansi]
-\definetypeface [main-math] [mm] [math] [latin-modern] [default][encoding=default]
-
-\definetypeface [main-eastern] [rm] [serif] [latin-modern] [default][encoding=qx]
-\definetypeface [main-eastern] [tt] [mono] [latin-modern] [default][encoding=texnansi]
-
-\definetypeface [main-greek] [rm] [serif] [cbgreek] [default][encoding=default]
-\definetypeface [main-greek] [tt] [mono] [latin-modern] [default][encoding=texnansi]
-
-\definetypeface [main-cyrillic] [rm] [serif] [computer-modern] [default][encoding=t2a]
-\definetypeface [main-cyrillic] [tt] [mono] [latin-modern] [default][encoding=texnansi]
-
-% \definetypeface [main-hebrew] [rm] [serif] [hebrew] [default][encoding=default]
-% \definetypeface [main-hebrew] [tt] [mono] [latin-modern] [default][encoding=texnansi]
-
-% The \showunicodetable macro is defined in unic-run.tex.
-
-\starttext
-
-% latin: western / eastern
-
-\startstandardmakeup
- \setupbodyfont[main-latin]
- \centerbox{\scale[factor=max]{\showunicodetable{000}}}
-\stopstandardmakeup
-\startstandardmakeup
- \setupbodyfont[main-eastern]
- \centerbox{\scale[factor=max]{\showunicodetable{001}}}
-\stopstandardmakeup
-\startstandardmakeup
- \setupbodyfont[main-latin]
- \centerbox{\scale[factor=max]{\showunicodetable{002}}}
-\stopstandardmakeup
-
-% greek
-
-\startstandardmakeup
- \setupbodyfont[main-greek]
- \centerbox{\scale[factor=max]{\showunicodetable{003}}}
-\stopstandardmakeup
-
-% cyrillic
-
-\startstandardmakeup
- \setupbodyfont[main-cyrillic]
- \centerbox{\scale[factor=max]{\showunicodetable{004}}}
-\stopstandardmakeup
-
-% hebrew
-
-% \startstandardmakeup
-% \setupbodyfont[mainhebrew]
-% \centerbox{\scale[factor=max]{\showunicodetable{005}}}
-% \stopstandardmakeup
-
-% misc
-
-\startstandardmakeup
- \setupbodyfont[main-latin]
- \centerbox{\scale[factor=max]{\showunicodetable{030}}}
-\stopstandardmakeup
-\startstandardmakeup
- \setupbodyfont[main-latin]
- \centerbox{\scale[factor=max]{\showunicodetable{031}}}
-\stopstandardmakeup
-\startstandardmakeup
- \setupbodyfont[main-latin]
- \centerbox{\scale[factor=max]{\showunicodetable{032}}}
-\stopstandardmakeup
-
-% math
-
-\startstandardmakeup
- \setupbodyfont[main-math]
- \centerbox{\scale[factor=max]{\showunicodetable{033}}}
-\stopstandardmakeup
-\startstandardmakeup
- \setupbodyfont[main-math]
- \centerbox{\scale[factor=max]{\showunicodetable{034}}}
-\stopstandardmakeup
-\startstandardmakeup
- \setupbodyfont[main-math]
- \centerbox{\scale[factor=max]{\showunicodetable{039}}}
-\stopstandardmakeup
-
-\stoptext
diff --git a/tex/context/fonts/lm-math.lfg b/tex/context/fonts/lm-math.lfg
index 87c37cd78..b8c996979 100644
--- a/tex/context/fonts/lm-math.lfg
+++ b/tex/context/fonts/lm-math.lfg
@@ -231,7 +231,7 @@ local seventeen = {
return {
name = "lm-math",
version = "1.00",
- comment = "Goodies that complement latin modern math.",
+ comment = "Goodies that complement latin modern math (virtual).",
author = "Hans Hagen",
copyright = "ConTeXt development team",
mathematics = {
diff --git a/tex/context/fonts/lm.lfg b/tex/context/fonts/lm.lfg
index 8d7614718..546d18def 100644
--- a/tex/context/fonts/lm.lfg
+++ b/tex/context/fonts/lm.lfg
@@ -34,8 +34,19 @@ return {
[0x2213] = { -- ∓
yoffset = -100,
},
- }
- }
+ },
+ },
+-- parameters = {
+-- FractionNumeratorDisplayStyleShiftUp = function(value,target,original)
+-- local o = original.mathparameters.FractionNumeratorDisplayStyleShiftUp
+-- if o > 675 then
+-- o = 600
+-- else
+-- -- probably tuned
+-- end
+-- return o * target.parameters.factor
+-- end,
+-- }
},
filenames = {
["latinmodern-math-regular.otf"] = {
diff --git a/tex/context/fonts/texgyre.lfg b/tex/context/fonts/texgyre.lfg
index 7782aa509..785982037 100644
--- a/tex/context/fonts/texgyre.lfg
+++ b/tex/context/fonts/texgyre.lfg
@@ -26,5 +26,11 @@ return {
"tgbonummath-regular.otf",
"tgbonum-math.otf",
},
+ ["texgyre-schola-math-regular.otf"] = {
+ "texgyreschola-math.otf", -- beta
+ "texgyrescholamath-regular.otf",
+ "tgscholamath-regular.otf",
+ "tgschola-math.otf",
+ },
},
}
diff --git a/tex/context/fonts/treatments.lfg b/tex/context/fonts/treatments.lfg
index 44d24da22..40bac427c 100644
--- a/tex/context/fonts/treatments.lfg
+++ b/tex/context/fonts/treatments.lfg
@@ -25,24 +25,50 @@ local fix_unifraktur = {
end,
}
-local fix_lmmonoregular = {
- comment = "wrong widths of some glyphs",
- fixes = function(data)
- report("fixing some wrong widths")
- local unicodes = data.resources.unicodes
- local descriptions = data.descriptions
- local defaultwidth = descriptions[unicodes["zero"]].width
- descriptions[unicodes["six"] ].width = defaultwidth
- descriptions[unicodes["nine"] ].width = defaultwidth
- descriptions[unicodes["caron"] ].width = defaultwidth
- descriptions[unicodes["perthousand"] ].width = defaultwidth
- descriptions[unicodes["numero"] ].width = defaultwidth
- descriptions[unicodes["caron.cap"] ].width = defaultwidth
- descriptions[unicodes["six.taboldstyle"] ].width = defaultwidth
- descriptions[unicodes["nine.taboldstyle"]].width = defaultwidth
- descriptions[unicodes["dollar.oldstyle" ]].width = defaultwidth
- end
-}
+-- local fix_lmmonoregular = {
+-- --
+-- -- there are now some extra safeguards for idris
+-- --
+-- comment = "wrong widths of some glyphs",
+-- fixes = function(data)
+-- report("fixing some wrong widths")
+-- local unicodes = data.resources.unicodes
+-- local descriptions = data.descriptions
+-- local function getdescription(name)
+-- local unicode = unicodes[name]
+-- if not unicode then
+-- report("no valid unicode for %a",name)
+-- return
+-- end
+-- local description = descriptions[unicode]
+-- if not description then
+-- report("no glyph names %a in font",name)
+-- return
+-- end
+-- return description
+-- end
+-- local zero = getdescription("zero")
+-- if not zero then
+-- return
+-- end
+-- local defaultwidth = zero.width
+-- local function setwidth(name)
+-- local data = getdescription(name)
+-- if data then
+-- data.width = defaultwidth
+-- end
+-- end
+-- setwidth("six")
+-- setwidth("nine")
+-- setwidth("caron")
+-- setwidth("perthousand")
+-- setwidth("numero")
+-- setwidth("caron.cap")
+-- setwidth("six.taboldstyle")
+-- setwidth("nine.taboldstyle")
+-- setwidth("dollar.oldstyle")
+-- end
+-- }
return {
name = "treatments",
diff --git a/tex/context/foxet/fe-bryson.xml b/tex/context/foxet/fe-bryson.xml
deleted file mode 100644
index 28646b65f..000000000
--- a/tex/context/foxet/fe-bryson.xml
+++ /dev/null
@@ -1,12 +0,0 @@
-<?xml version='1.0'?>
-<fe:sample xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng' origin='Bill Bryson'>
-Imagine trying to live in a world dominated by dihydrogen oxide, a
-compound that has no taste or smell and is so viable in its properties
-that it is generally benign but at other times swiftly lethal.
-Depending on its state, it can scald you or freeze you. In the
-presence of certain organic molecules it can form carbonic acids so
-nasty that they can strip the leaves from trees and eat the faces off
-statuary. In bulk, when agitated, it can strike with a fury that no
-human edifice could withstand. Even for those who have learned to live
-with it, it is often murderous substance. We call it water.
-</fe:sample>
diff --git a/tex/context/foxet/fe-ward.xml b/tex/context/foxet/fe-ward.xml
deleted file mode 100644
index 05f774265..000000000
--- a/tex/context/foxet/fe-ward.xml
+++ /dev/null
@@ -1,8 +0,0 @@
-<?xml version='1.0'?>
-<fe:sample xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng' origin='Peter Ward'>
-The Earth, as a habitat for animal life, is in old age and
-has a fatal illness. Several, in fact. It would be happening
-whether humans had ever evolved or not. But our presence is
-like the effect of an old-age patient who smokes many packs
-of cigarettes per day - and we humans are the cigarettes.
-</fe:sample>
diff --git a/tex/context/foxet/fe-zapf.xml b/tex/context/foxet/fe-zapf.xml
deleted file mode 100644
index faf9a0831..000000000
--- a/tex/context/foxet/fe-zapf.xml
+++ /dev/null
@@ -1,14 +0,0 @@
-<?xml version='1.0'?>
-<fe:sample xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng' origin='Hermann Zapf'>
-Coming back to the use of typefaces in electronic
-publishing: many of the new typographers receive their
-knowledge and information about the rules of typography
-from books, from computer magazines or the instruction
-manuals which they get with the purchase of a PC or
-software. There is not so much basic instruction, as of
-now, as there was in the old days, showing the differences
-between good and bad typographic design. Many people are
-just fascinated by their PC's tricks, and think that a
-widely<fe:compound/>praised program, called up on the
-screen, will make everything automatic from now on.
-</fe:sample>
diff --git a/tex/context/foxet/fo-0101.fo b/tex/context/foxet/fo-0101.fo
deleted file mode 100644
index 197c5834c..000000000
--- a/tex/context/foxet/fo-0101.fo
+++ /dev/null
@@ -1,17 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='default'>
- <fo:region-body/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='default'>
- <fo:flow flow-name='xsl-region-body'/>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-0102.fo b/tex/context/foxet/fo-0102.fo
deleted file mode 100644
index 9adcf917e..000000000
--- a/tex/context/foxet/fo-0102.fo
+++ /dev/null
@@ -1,25 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format'
- xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='default'>
- <fo:region-body/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:declarations>
- <fe:definecolor name='red' r='.6'/>
- <fe:definecolor name='green' g='.6'/>
- <fe:definecolor name='blue' b='.6'/>
- <fe:definefontsynonym name='mono' file='lmtt10' encoding='texnansi'/>
- </fo:declarations>
-
- <fo:page-sequence master-reference='default'>
- <fo:flow flow-name='xsl-region-body'/>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-0103.fo b/tex/context/foxet/fo-0103.fo
deleted file mode 100644
index 95d0d4769..000000000
--- a/tex/context/foxet/fo-0103.fo
+++ /dev/null
@@ -1,21 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='default'>
- <fo:region-body/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:declarations>
- <fo:color-profile src="url('myprofile.xml')" color-profile-name="myprofile"/>
- </fo:declarations>
-
- <fo:page-sequence master-reference='default'>
- <fo:flow flow-name='xsl-region-body'/>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-0201.fo b/tex/context/foxet/fo-0201.fo
deleted file mode 100644
index f524cfd09..000000000
--- a/tex/context/foxet/fo-0201.fo
+++ /dev/null
@@ -1,22 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format'
- xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test' margin='1cm'>
- <fo:region-body/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test' line-height='18pt'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block>
- <fe:include url='fe-zapf.xml'/>
- </fo:block>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-0301.fo b/tex/context/foxet/fo-0301.fo
deleted file mode 100644
index e0b8a6fc4..000000000
--- a/tex/context/foxet/fo-0301.fo
+++ /dev/null
@@ -1,56 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format'
- xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test' margin='1cm' fe:option='fit'>
- <fo:region-body background-color='lightgray'/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block-container space-after.optimum='12pt' fe:tracing='true'>
- <fo:block>
- before<fo:character character='@'/>after
- </fo:block>
- <fo:block>
- before<fo:character character='@' vertical-align='super'/>after
- before<fo:character character='@' vertical-align='sub' />after
- before<fo:character character='@' vertical-align='-3pt' />after
- before<fo:character character='@' vertical-align='+3pt' />after
- </fo:block>
- <fo:block>
- before<fo:character character='@' glyph-orientation-horizontal='0deg' />after
- before<fo:character character='@' glyph-orientation-horizontal='90deg' />after
- before<fo:character character='@' glyph-orientation-horizontal='180deg'/>after
- before<fo:character character='@' glyph-orientation-horizontal='270deg'/>after
- </fo:block>
- <fo:block>
- before<fo:character character='@' glyph-orientation-horizontal='0deg' vertical-align='super'/>after
- before<fo:character character='@' glyph-orientation-horizontal='90deg' vertical-align='super'/>after
- before<fo:character character='@' glyph-orientation-horizontal='180deg' vertical-align='super'/>after
- before<fo:character character='@' glyph-orientation-horizontal='270deg' vertical-align='super'/>after
- </fo:block>
- <fo:block>
- before<fo:character character='@' glyph-orientation-horizontal='0deg' vertical-align='3pt'/>after
- before<fo:character character='@' glyph-orientation-horizontal='90deg' vertical-align='3pt'/>after
- before<fo:character character='@' glyph-orientation-horizontal='180deg' vertical-align='3pt'/>after
- before<fo:character character='@' glyph-orientation-horizontal='270deg' vertical-align='3pt'/>after
- </fo:block>
- <fo:block>
- before<fo:character character='@' vertical-align='super' text-altitude='10pt'/>after
- before<fo:character character='@' vertical-align='super' text-altitude='20pt'/>after
- before<fo:character character='@' vertical-align='100%' text-altitude='20pt'/>after
- before<fo:character character='@' vertical-align='sub' text-depth='10pt' />after
- before<fo:character character='@' vertical-align='sub' text-depth='20pt' />after
- before<fo:character character='@' vertical-align='-100%' text-depth='20pt' />after
- </fo:block>
- </fo:block-container>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-0601.fo b/tex/context/foxet/fo-0601.fo
deleted file mode 100644
index 1e291f278..000000000
--- a/tex/context/foxet/fo-0601.fo
+++ /dev/null
@@ -1,29 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format' xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test'
- margin-top='1cm'
- margin-bottom='2cm'
- margin-left='3cm'
- margin-right='4cm'>
- <fo:region-before extent='1cm' background-color='red' />
- <fo:region-after extent='1cm' background-color='green' />
- <fo:region-start extent='1cm' background-color='blue' />
- <fo:region-end extent='1cm' background-color='yellow'/>
- <fo:region-body margin='2cm' background-color='gray' />
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block>
- <fe:include url='fe-zapf.xml'/>
- </fo:block>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-0602.fo b/tex/context/foxet/fo-0602.fo
deleted file mode 100644
index 36e864767..000000000
--- a/tex/context/foxet/fo-0602.fo
+++ /dev/null
@@ -1,27 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format'
- xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test'
- margin-top='1cm' margin-bottom='2cm' margin-left='3cm' margin-right='4cm'>
- <fo:region-body margin='2cm' background-color='gray' />
- <fo:region-before extent='1cm' background-color='red' precedence='true'/>
- <fo:region-after extent='1cm' background-color='green' />
- <fo:region-start extent='1cm' background-color='blue' />
- <fo:region-end extent='1cm' background-color='yellow'/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block>
- <fe:include url='fe-zapf.xml'/>
- </fo:block>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-0603.fo b/tex/context/foxet/fo-0603.fo
deleted file mode 100644
index 268249d3e..000000000
--- a/tex/context/foxet/fo-0603.fo
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format' xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test'
- margin-top='1cm' margin-bottom='2cm' margin-left='3cm' margin-right='4cm'>
- <fo:region-body margin='2cm' background-color='gray' />
- <fo:region-before extent='1cm' background-color='red' />
- <fo:region-after extent='1cm' background-color='green' precedence='true'/>
- <fo:region-start extent='1cm' background-color='blue' />
- <fo:region-end extent='1cm' background-color='yellow'/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block>
- <fe:include url='fe-zapf.xml'/>
- </fo:block>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-0604.fo b/tex/context/foxet/fo-0604.fo
deleted file mode 100644
index 891198ee0..000000000
--- a/tex/context/foxet/fo-0604.fo
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format' xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test'
- margin-top='1cm' margin-bottom='2cm' margin-left='3cm' margin-right='4cm'>
- <fo:region-body margin='2cm' background-color='gray' />
- <fo:region-before extent='1cm' background-color='red' precedence='true'/>
- <fo:region-after extent='1cm' background-color='green' precedence='true'/>
- <fo:region-start extent='1cm' background-color='blue' />
- <fo:region-end extent='1cm' background-color='yellow'/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block>
- <fe:include url='fe-zapf.xml'/>
- </fo:block>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-0611.fo b/tex/context/foxet/fo-0611.fo
deleted file mode 100644
index 70f495fb6..000000000
--- a/tex/context/foxet/fo-0611.fo
+++ /dev/null
@@ -1,21 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format' xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test' margin='2cm'>
- <fo:region-body margin='2cm'/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block>
- <fe:include url='fe-zapf.xml' n='15'/>
- </fo:block>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-0612.fo b/tex/context/foxet/fo-0612.fo
deleted file mode 100644
index 4b3de6940..000000000
--- a/tex/context/foxet/fo-0612.fo
+++ /dev/null
@@ -1,21 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format' xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test' margin='2cm'>
- <fo:region-body margin='2cm' column-count='2' column-gap='24pt'/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block>
- <fe:include url='fe-zapf.xml' n='25'/>
- </fo:block>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-0613.fo b/tex/context/foxet/fo-0613.fo
deleted file mode 100644
index f478a571d..000000000
--- a/tex/context/foxet/fo-0613.fo
+++ /dev/null
@@ -1,21 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format' xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test' margin='1.25cm'>
- <fo:region-body margin='0cm' column-count='3' column-gap='12pt'/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block>
- <fe:include url='fe-zapf.xml' n='15'/>
- </fo:block>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-0621.fo b/tex/context/foxet/fo-0621.fo
deleted file mode 100644
index 554fcc4a0..000000000
--- a/tex/context/foxet/fo-0621.fo
+++ /dev/null
@@ -1,106 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root
- xmlns:fo='http://www.w3.org/1999/XSL/Format'
- xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='any' margin='2cm'>
- <fe:message>setting up simple page master 'any'</fe:message>
- <fo:region-body background-color='darkgray' margin-top='2cm' margin-bottom='2cm' padding='.5cm'/>
- <fo:region-before background-color='darkgray' extent='1cm'/>
- <fo:region-after background-color='darkgray' extent='1cm'/>
- </fo:simple-page-master>
- <fo:simple-page-master master-name='first-page' margin='2cm'>
- <fe:message>setting up simple page master 'first-page'</fe:message>
- <fo:region-body background-color='red' margin='2cm' padding='.5cm'/>
- <fo:region-before background-color='red' extent='1cm'/>
- <fo:region-after background-color='red' extent='1cm'/>
- </fo:simple-page-master>
- <fo:simple-page-master master-name='left-page' margin='2cm'>
- <fe:message>setting up simple page master 'left-page'</fe:message>
- <fo:region-body background-color='green' margin='2cm' padding='.5cm'/>
- <fo:region-before background-color='green' extent='1cm'/>
- <fo:region-after background-color='green' extent='1cm'/>
- </fo:simple-page-master>
- <fo:simple-page-master master-name='right-page' margin='2cm'>
- <fe:message>setting up simple page master 'right-page'</fe:message>
- <fo:region-body background-color='blue' margin='2cm' padding='.5cm'/>
- <fo:region-before background-color='blue' extent='1cm'/>
- <fo:region-after background-color='blue' extent='1cm'/>
- </fo:simple-page-master>
- <fo:simple-page-master master-name='blank-page' margin='2cm'>
- <fe:message>setting up simple page master 'blank-page'</fe:message>
- <fo:region-body background-color='black' margin='2cm' padding='.5cm'/>
- <fo:region-before background-color='black' extent='1cm'/>
- <fo:region-after background-color='black' extent='1cm'/>
- </fo:simple-page-master>
- <fo:simple-page-master master-name='odd' margin='2cm'>
- <fe:message>setting up simple page master 'odd'</fe:message>
- <fo:region-body background-color='cyan' margin='2cm' padding='.5cm' margin-right='2cm'/>
- <fo:region-before background-color='cyan' extent='1cm'/>
- <fo:region-after background-color='cyan' extent='1cm'/>
- </fo:simple-page-master>
- <fo:simple-page-master master-name='even' margin='2cm'>
- <fe:message>setting up simple page master 'even'</fe:message>
- <fo:region-body background-color='magenta' margin='2cm' padding='.5cm' margin-left='2cm'/>
- <fo:region-before background-color='magenta' extent='1cm'/>
- <fo:region-after background-color='magenta' extent='1cm'/>
- </fo:simple-page-master>
- <fo:simple-page-master master-name='rest' margin='2cm'>
- <fe:message>setting up simple page master 'rest'</fe:message>
- <fo:region-body background-color='yellow' margin='2cm' padding='.5cm'/>
- <fo:region-before background-color='yellow' extent='1cm'/>
- <fo:region-after background-color='yellow' extent='1cm'/>
- </fo:simple-page-master>
-
- <fo:page-sequence-master master-name='demo'>
- <fe:message>setting up page sequence master 'demo'</fe:message>
- <fo:repeatable-page-master-alternatives>
- <fo:conditional-page-master-reference page-position='first' master-reference='first-page'/>
- <fo:conditional-page-master-reference page-position='last' master-reference='blank-page'/>
- <fo:conditional-page-master-reference page-position='any' master-reference='odd' odd-or-even='odd'/>
- <fo:conditional-page-master-reference page-position='any' master-reference='even' odd-or-even='even'/>
- </fo:repeatable-page-master-alternatives>
- </fo:page-sequence-master>
- <fo:page-sequence-master master-name='omed'>
- <fe:message>setting up page sequence master 'omed'</fe:message>
- <fo:repeatable-page-master-alternatives>
- <fo:conditional-page-master-reference page-position='first' master-reference='first-page'/>
- <fo:conditional-page-master-reference page-position='last' master-reference='blank-page'/>
- <fo:conditional-page-master-reference page-position='any' master-reference='odd' odd-or-even='even'/>
- <fo:conditional-page-master-reference page-position='any' master-reference='even' odd-or-even='odd'/>
- </fo:repeatable-page-master-alternatives>
- </fo:page-sequence-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='any'>
- <fe:message>starting page sequence 'any'</fe:message>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block>
- <fe:include url='fe-zapf.xml' n='15'/>
- </fo:block>
- </fo:flow>
- </fo:page-sequence>
-
- <fo:page-sequence master-reference='demo' force-page-count='end-on-odd'>
- <fe:message>starting page sequence 'demo'</fe:message>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block>
- <fe:include url='fe-bryson.xml' n='30'/>
- </fo:block>
- </fo:flow>
- </fo:page-sequence>
-
- <fo:page-sequence master-reference='omed' force-page-count='end-on-odd'>
- <fe:message>starting page sequence 'omed'</fe:message>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block>
- <fe:include url='fe-ward.xml' n='30'/>
- </fo:block>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-0641.fo b/tex/context/foxet/fo-0641.fo
deleted file mode 100644
index 9dbb90870..000000000
--- a/tex/context/foxet/fo-0641.fo
+++ /dev/null
@@ -1,25 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format' xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test' margin='2cm'>
- <fo:region-body margin='2cm' background-color='gray' fe:background-height='1cm' background-image="url('hacker.jpg')" clip='rect(1cm,1cm,1cm,1cm)' />
- <fo:region-before extent='2cm' background-color='red' fe:background-height='1cm' background-image="url('hacker.jpg')" background-repeat='repeat-x' />
- <fo:region-after extent='2cm' background-color='green' fe:background-height='2cm' background-image="url('hacker.jpg')" background-repeat='repeat-y' />
- <fo:region-start extent='2cm' background-color='blue' fe:background-height='1cm' background-image="url('hacker.jpg')" background-repeat='no-repeat'/>
- <fo:region-end extent='2cm' background-color='yellow' fe:background-height='2cm' background-image="url('hacker.jpg')" background-repeat='repeat' />
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block>
- <fe:include url='fe-zapf.xml'/>
- </fo:block>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-0642.fo b/tex/context/foxet/fo-0642.fo
deleted file mode 100644
index 8f646509c..000000000
--- a/tex/context/foxet/fo-0642.fo
+++ /dev/null
@@ -1,27 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format'
- xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test' margin='2cm'>
- <fo:region-body margin='2cm'
- background-color='gray'
- background-image="url('hacker.jpg')"
- background-position-horizontal='10%'
- background-position-vertical='20%'
- background-repeat='no-repeat'/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block>
- <fe:include url='fe-zapf.xml'/>
- </fo:block>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-0643.fo b/tex/context/foxet/fo-0643.fo
deleted file mode 100644
index 9595d4438..000000000
--- a/tex/context/foxet/fo-0643.fo
+++ /dev/null
@@ -1,27 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format'
- xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test' margin='2cm'>
- <fo:region-body margin='2cm'
- background-color='gray'
- background-image="url('hacker.jpg')"
- background-position-horizontal='center'
- background-position-vertical='center'
- background-repeat='no-repeat'/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block>
- <fe:include url='fe-zapf.xml'/>
- </fo:block>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-0644.fo b/tex/context/foxet/fo-0644.fo
deleted file mode 100644
index a408661e0..000000000
--- a/tex/context/foxet/fo-0644.fo
+++ /dev/null
@@ -1,27 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format'
- xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test' margin='2cm'>
- <fo:region-body margin='2cm'
- background-color='gray'
- background-image="url('hacker.jpg')"
- background-position-horizontal='3cm'
- background-position-vertical='5cm'
- background-repeat='no-repeat'/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block>
- <fe:include url='fe-zapf.xml'/>
- </fo:block>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-0650.fo b/tex/context/foxet/fo-0650.fo
deleted file mode 100644
index 54fd7c128..000000000
--- a/tex/context/foxet/fo-0650.fo
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format' xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test'
- margin-top='1cm' margin-bottom='2cm' margin-left='3cm' margin-right='4cm'>
- <fo:region-body margin='.5cm' background-color='gray' fe:z-order='above' />
- <fo:region-before extent='1cm' background-color='red' />
- <fo:region-after extent='1cm' background-color='green' />
- <fo:region-start extent='1cm' background-color='blue' />
- <fo:region-end extent='1cm' background-color='yellow'/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block>
- <fe:include url='fe-zapf.xml'/>
- </fo:block>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-0651.fo b/tex/context/foxet/fo-0651.fo
deleted file mode 100644
index 319592ba8..000000000
--- a/tex/context/foxet/fo-0651.fo
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format' xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test'
- margin-top='1cm' margin-bottom='2cm' margin-left='3cm' margin-right='4cm'>
- <fo:region-body margin='.5cm' background-color='gray' fe:z-order='below' />
- <fo:region-before extent='1cm' background-color='red' />
- <fo:region-after extent='1cm' background-color='green' />
- <fo:region-start extent='1cm' background-color='blue' />
- <fo:region-end extent='1cm' background-color='yellow'/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block>
- <fe:include url='fe-zapf.xml'/>
- </fo:block>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-0701.fo b/tex/context/foxet/fo-0701.fo
deleted file mode 100644
index fabbe0722..000000000
--- a/tex/context/foxet/fo-0701.fo
+++ /dev/null
@@ -1,39 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format' xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master margin='2cm' master-name='default'>
- <fo:region-body background-color='darkgray'/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='default' fe:tracing='true'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block-container absolute-position='absolute' height='2cm' width='3cm' left='1cm' top='1cm'>
- <fo:block>Test 1</fo:block>
- </fo:block-container>
- <fo:block-container absolute-position='absolute' height='2cm' left='1cm' top='6cm'>
- <fo:block>Test 2</fo:block>
- </fo:block-container>
- <fo:block-container absolute-position='absolute' height='2cm' left='1cm' top='11cm' right='4cm'>
- <fo:block>Test 3</fo:block>
- </fo:block-container>
- <fo:block-container absolute-position='absolute' height='2cm' width='3cm' right='1cm' bottom='1cm'>
- <fo:block>Test 4</fo:block>
- </fo:block-container>
- <fo:block-container absolute-position='absolute' width='3cm' right='1cm' bottom='6cm'>
- <fo:block>Test 5</fo:block>
- </fo:block-container>
- <fo:block-container absolute-position='absolute' width='3cm' right='1cm' bottom='11cm' top='7cm'>
- <fo:block>Test 6</fo:block>
- </fo:block-container>
- <fo:block-container absolute-position='absolute' left='20%' right='20%' top='20%' bottom='20%'>
- <fo:block>Test 7</fo:block>
- </fo:block-container>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-0801.fo b/tex/context/foxet/fo-0801.fo
deleted file mode 100644
index b18c4282f..000000000
--- a/tex/context/foxet/fo-0801.fo
+++ /dev/null
@@ -1,55 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format' xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test' margin='2cm'>
- <fo:region-body margin='2cm'/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test' initial-page-number='123' format='n'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block id='format n'>digits: <fo:page-number/></fo:block>
- </fo:flow>
- </fo:page-sequence>
-
- <fo:page-sequence master-reference='test' initial-page-number='123' format='a'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block id='format a'>characters: <fo:page-number/></fo:block>
- </fo:flow>
- </fo:page-sequence>
-
- <fo:page-sequence master-reference='test' initial-page-number='123' format='i'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block id='format i'>romannumerals: <fo:page-number/></fo:block>
- </fo:flow>
- </fo:page-sequence>
-
- <fo:page-sequence master-reference='test' initial-page-number='123' format='-[(n)]-'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block id='complex format'>complex format: <fo:page-number/></fo:block>
- </fo:flow>
- </fo:page-sequence>
-
- <fo:page-sequence master-reference='test' initial-page-number='123'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block id='no format'>no format: <fo:page-number/></fo:block>
- </fo:flow>
- </fo:page-sequence>
-
- <fo:page-sequence master-reference='test'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block-container space-after='12pt' id='y'>
- <fo:block>digits: <fo:page-number-citation ref-id='format n'/> </fo:block>
- <fo:block>characters: <fo:page-number-citation ref-id='format a'/> </fo:block>
- <fo:block>romannumerals: <fo:page-number-citation ref-id='format i'/> </fo:block>
- <fo:block>complex format: <fo:page-number-citation ref-id='complex format'/> </fo:block>
- <fo:block>no format: <fo:page-number-citation ref-id='no format'/> </fo:block>
- </fo:block-container>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-0901.fo b/tex/context/foxet/fo-0901.fo
deleted file mode 100644
index 05011269b..000000000
--- a/tex/context/foxet/fo-0901.fo
+++ /dev/null
@@ -1,58 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format' xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test' margin='.5cm'>
- <fo:region-body margin='.5cm'/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test' initial-page-number='123'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block-container font-style='italic'>
- <fo:block font-family='Times'>times</fo:block>
- <fo:block font-family='Helvetica'>helvetica</fo:block>
- <fo:block font-family='Courier'>courier</fo:block>
- </fo:block-container>
- <fo:block-container font-style='italic'>
- <fo:block font-family='Times'>times</fo:block>
- <fo:block font-family='Helvetica'>helvetica</fo:block>
- <fo:block font-family='Courier'>courier</fo:block>
- </fo:block-container>
- <fo:block-container font-weight='bold'>
- <fo:block font-family='Times'>times</fo:block>
- <fo:block font-family='Helvetica'>helvetica</fo:block>
- <fo:block font-family='Courier'>courier</fo:block>
- </fo:block-container>
- <fo:block-container font-style='italic'>
- <fo:block font-family='Times'>times</fo:block>
- <fo:block font-family='Helvetica'>helvetica</fo:block>
- <fo:block font-family='Courier'>courier</fo:block>
- </fo:block-container>
- <fo:block-container font-style='italic' font-weight='bold' font-size='large'>
- <fo:block font-family='Times'>large</fo:block>
- <fo:block font-family='Helvetica'>large</fo:block>
- <fo:block font-family='Courier'>large</fo:block>
- </fo:block-container>
- <fo:block-container font-style='italic' font-weight='bold' font-size='x-large'>
- <fo:block font-family='Times'>x-large</fo:block>
- <fo:block font-family='Helvetica'>x-large</fo:block>
- <fo:block font-family='Courier'>x-large</fo:block>
- </fo:block-container>
- <fo:block-container font-style='italic' font-weight='bold' font-size='xx-large'>
- <fo:block font-family='Times'>xx-large</fo:block>
- <fo:block font-family='Helvetica'>xx-large</fo:block>
- <fo:block font-family='Courier'>xx-large</fo:block>
- </fo:block-container>
- <fo:block-container font-style='italic' font-weight='bold' font-size='350%'>
- <fo:block font-family='Times'>times</fo:block>
- <fo:block font-family='Helvetica'>helvetica</fo:block>
- <fo:block font-family='Courier'>courier</fo:block>
- </fo:block-container>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-0902.fo b/tex/context/foxet/fo-0902.fo
deleted file mode 100644
index ebaa06651..000000000
--- a/tex/context/foxet/fo-0902.fo
+++ /dev/null
@@ -1,33 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format' xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test' margin='.5cm'>
- <fo:region-body margin='.5cm'/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test' initial-page-number='123'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block-container>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:block-container>
- <fo:block-container line-height='40pt'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:block-container>
- <fo:block-container line-height='180%'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:block-container>
- <fo:block-container line-height='1.2'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:block-container>
- <fo:block-container line-height='normal'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:block-container>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-1001.fo b/tex/context/foxet/fo-1001.fo
deleted file mode 100644
index 3733265fd..000000000
--- a/tex/context/foxet/fo-1001.fo
+++ /dev/null
@@ -1,63 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format' xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test' margin='2cm'>
- <fo:region-body margin='2cm'/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test' initial-page-number='123'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block-container fe:tracing='true'>
- <fo:block>a test line d 10pt</fo:block>
- <fo:block space-before.optimum='10pt' space-before.conditionality='discard'/>
- <fo:block>a test line d 5pt</fo:block>
- <fo:block space-before.optimum='5pt' space-before.conditionality='discard'/>
- <fo:block>a test line d 4pt</fo:block>
- <fo:block space-before.optimum='4pt' space-before.conditionality='discard'/>
- <fo:block>a test line d d d</fo:block>
- <fo:block space-before.optimum='10pt' space-before.conditionality='discard'/>
- <fo:block space-before.optimum='4pt' space-before.conditionality='discard'/>
- <fo:block space-before.optimum='5pt' space-before.conditionality='discard'/>
- <fo:block>a test line r d d</fo:block>
- <fo:block space-before.optimum='10pt' space-before.conditionality='retain' />
- <fo:block space-before.optimum='4pt' space-before.conditionality='discard'/>
- <fo:block space-before.optimum='5pt' space-before.conditionality='discard'/>
- <fo:block>a test line d r d</fo:block>
- <fo:block space-before.optimum='10pt' space-before.conditionality='discard'/>
- <fo:block space-before.optimum='4pt' space-before.conditionality='retain' />
- <fo:block space-before.optimum='5pt' space-before.conditionality='discard'/>
- <fo:block>a test line r r d</fo:block>
- <fo:block space-before.optimum='10pt' space-before.conditionality='retain' />
- <fo:block space-before.optimum='4pt' space-before.conditionality='retain' />
- <fo:block space-before.optimum='5pt' space-before.conditionality='discard'/>
- <fo:block>a test line d d r</fo:block>
- <fo:block space-before.optimum='10pt' space-before.conditionality='discard'/>
- <fo:block space-before.optimum='4pt' space-before.conditionality='discard'/>
- <fo:block space-before.optimum='5pt' space-before.conditionality='retain' />
- <fo:block>a test line r d r</fo:block>
- <fo:block space-before.optimum='10pt' space-before.conditionality='retain' />
- <fo:block space-before.optimum='4pt' space-before.conditionality='discard'/>
- <fo:block space-before.optimum='5pt' space-before.conditionality='retain' />
- <fo:block>a test line d r r</fo:block>
- <fo:block space-before.optimum='10pt' space-before.conditionality='discard'/>
- <fo:block space-before.optimum='4pt' space-before.conditionality='retain' />
- <fo:block space-before.optimum='5pt' space-before.conditionality='retain' />
- <fo:block>a test line r r r</fo:block>
- <fo:block space-before.optimum='10pt' space-before.conditionality='retain' />
- <fo:block space-before.optimum='4pt' space-before.conditionality='retain' />
- <fo:block space-before.optimum='5pt' space-before.conditionality='retain' />
- <fo:block>a test line d r r force</fo:block>
- <fo:block space-before.optimum='10pt' space-before.conditionality='discard' space-before.precedence='force'/>
- <fo:block space-before.optimum='4pt' space-before.conditionality='retain' space-before.precedence='force'/>
- <fo:block space-before.optimum='5pt' space-before.conditionality='retain' space-before.precedence='force'/>
- <fo:block>a test line</fo:block>
- </fo:block-container>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-1002.fo b/tex/context/foxet/fo-1002.fo
deleted file mode 100644
index cc0ea8100..000000000
--- a/tex/context/foxet/fo-1002.fo
+++ /dev/null
@@ -1,31 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format' xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test' margin='2cm'>
- <fo:region-body margin='2cm'/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test' initial-page-number='123'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block start-indent='1cm'>
- <fe:include url='fe-zapf.xml'/>
- </fo:block>
- <fo:block start-indent='1cm' end-indent='1cm'>
- <fe:include url='fe-zapf.xml'/>
- <fo:block start-indent='1cm' end-indent='1cm'>
- <fe:include url='fe-zapf.xml'/>
- </fo:block>
- <fe:include url='fe-zapf.xml'/>
- </fo:block>
- <fo:block end-indent='1cm'>
- <fe:include url='fe-zapf.xml'/>
- </fo:block>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-1003.fo b/tex/context/foxet/fo-1003.fo
deleted file mode 100644
index 4c416e02d..000000000
--- a/tex/context/foxet/fo-1003.fo
+++ /dev/null
@@ -1,31 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format' xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test' margin='2cm'>
- <fo:region-body margin='2cm'/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test' initial-page-number='123'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block start-indent='1cm' text-indent='3cm'>
- <fe:include url='fe-zapf.xml'/>
- </fo:block>
- <fo:block start-indent='1cm' end-indent='1cm' text-indent='1cm'>
- <fe:include url='fe-zapf.xml'/>
- <fo:block start-indent='1cm' end-indent='1cm' text-indent='1cm'>
- <fe:include url='fe-zapf.xml'/>
- </fo:block>
- <fe:include url='fe-zapf.xml'/>
- </fo:block>
- <fo:block end-indent='1cm' text-indent='3cm'>
- <fe:include url='fe-zapf.xml'/>
- </fo:block>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-1004.fo b/tex/context/foxet/fo-1004.fo
deleted file mode 100644
index 1c057edd7..000000000
--- a/tex/context/foxet/fo-1004.fo
+++ /dev/null
@@ -1,35 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format' xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test' margin='2cm'>
- <fo:region-body margin='2cm'/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test' initial-page-number='123'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block-container space-before.optimum='12pt'>
- <fo:block>
- before<fo:inline space-start='10pt' >inbetween</fo:inline>after
- </fo:block>
- <fo:block>
- before<fo:inline space-end='10pt'>inbetween</fo:inline>after
- </fo:block>
- <fo:block>
- before<fo:inline space-start='10pt' space-end='10pt'>inbetween</fo:inline>after
- </fo:block>
- <fo:block>
- before<fo:inline space-start='-10pt' space-end='-10pt'>inbetween</fo:inline>after
- </fo:block>
- <fo:block>
- before<fo:inline space-start='250%' space-end='250%'>inbetween</fo:inline>after
- </fo:block>
- </fo:block-container>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-1101.fo b/tex/context/foxet/fo-1101.fo
deleted file mode 100644
index f6953d522..000000000
--- a/tex/context/foxet/fo-1101.fo
+++ /dev/null
@@ -1,63 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format' xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test' margin='2cm'>
- <fo:region-body margin='1cm'/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test' fe:tracing='true'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- <fo:list-block
- space-before.optimum='12pt'
- space-after.optimum='12pt'>
- <fo:list-item>
- <fo:list-item-label start-indent='10pt'>
- <fo:block>[a]</fo:block>
- </fo:list-item-label>
- <fo:list-item-body end-indent='0pt'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:list-item-body>
- </fo:list-item>
- <fo:list-item>
- <fo:list-item-label start-indent='10pt'>
- <fo:block>[b]</fo:block>
- </fo:list-item-label>
- <fo:list-item-body end-indent='0pt'>
- <fo:list-block>
- <fo:list-item>
- <fo:list-item-label start-indent='40pt'>
- <fo:block>[bb]</fo:block>
- </fo:list-item-label>
- <fo:list-item-body end-indent='0pt'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:list-item-body>
- </fo:list-item>
- </fo:list-block>
- </fo:list-item-body>
- </fo:list-item>
- </fo:list-block>
- <fo:list-block
- start-indent='1cm'
- end-indent='1cm'
- space-before.optimum='12pt'
- space-after.optimum='12pt'>
- <fo:list-item>
- <fo:list-item-label start-indent='10pt'>
- <fo:block>[a]</fo:block>
- </fo:list-item-label>
- <fo:list-item-body end-indent='0pt'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:list-item-body>
- </fo:list-item>
- </fo:list-block>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-1102.fo b/tex/context/foxet/fo-1102.fo
deleted file mode 100644
index f8d5a9df3..000000000
--- a/tex/context/foxet/fo-1102.fo
+++ /dev/null
@@ -1,128 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format' xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test' margin='2cm'>
- <fo:region-body margin='1cm'/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test' fe:tracing='true' fe:testgrid='true'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:list-block
- space-before.optimum='12pt'
- space-after.optimum='12pt'
- provisional-distance-between-starts='2cm'
- provisional-label-separation='.5cm'>
- <!-- fo:list-item>
- <fo:list-item-label start-indent='0pt' end-indent='0pt'>
- <fo:block>[a]</fo:block>
- </fo:list-item-label>
- <fo:list-item-body start-indent='0pt' end-indent='0pt'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:list-item-body>
- </fo:list-item>
- <fo:list-item>
- <fo:list-item-label start-indent='50pt' end-indent='0pt'>
- <fo:block>[a]</fo:block>
- </fo:list-item-label>
- <fo:list-item-body start-indent='0pt' end-indent='0pt'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:list-item-body>
- </fo:list-item>
- <fo:list-item>
- <fo:list-item-label start-indent='0pt' end-indent='50pt'>
- <fo:block>[a]</fo:block>
- </fo:list-item-label>
- <fo:list-item-body start-indent='0pt' end-indent='0pt'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:list-item-body>
- </fo:list-item>
- <fo:list-item>
- <fo:list-item-label start-indent='0pt' end-indent='0pt'>
- <fo:block>[a]</fo:block>
- </fo:list-item-label>
- <fo:list-item-body start-indent='50pt' end-indent='0pt'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:list-item-body>
- </fo:list-item>
- <fo:list-item>
- <fo:list-item-label start-indent='0pt' end-indent='0pt'>
- <fo:block>[a]</fo:block>
- </fo:list-item-label>
- <fo:list-item-body start-indent='0pt' end-indent='50pt'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:list-item-body>
- </fo:list-item>
- <fo:list-item>
- <fo:list-item-label start-indent='50pt' end-indent='50pt'>
- <fo:block>[a]</fo:block>
- </fo:list-item-label>
- <fo:list-item-body start-indent='0pt' end-indent='0pt'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:list-item-body>
- </fo:list-item>
- <fo:list-item>
- <fo:list-item-label start-indent='0pt' end-indent='0pt'>
- <fo:block>[a]</fo:block>
- </fo:list-item-label>
- <fo:list-item-body start-indent='50pt' end-indent='50pt'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:list-item-body>
- </fo:list-item>
- <fo:list-item>
- <fo:list-item-label start-indent='50pt' end-indent='50pt'>
- <fo:block>[a]</fo:block>
- </fo:list-item-label>
- <fo:list-item-body start-indent='50pt' end-indent='50pt'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:list-item-body>
- </fo:list-item>
- <fo:list-item>
- <fo:list-item-label start-indent='10%' end-indent='75%'>
- <fo:block>[a]</fo:block>
- </fo:list-item-label>
- <fo:list-item-body start-indent='35%' end-indent='10%'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:list-item-body>
- </fo:list-item -->
- <fo:list-item>
- <fo:list-item-label start-indent='1cm' end-indent='10cm'>
- <fo:block>[a]</fo:block>
- </fo:list-item-label>
- <fo:list-item-body start-indent='4cm' end-indent='1cm'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:list-item-body>
- </fo:list-item>
- <fo:list-item>
- <fo:list-item-label start-indent='1cm' end-indent='label-end()'>
- <fo:block>[a]</fo:block>
- </fo:list-item-label>
- <fo:list-item-body start-indent='5cm' end-indent='1cm'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:list-item-body>
- </fo:list-item>
- <fo:list-item>
- <fo:list-item-label start-indent='1cm' end-indent='10cm'>
- <fo:block>[a]</fo:block>
- </fo:list-item-label>
- <fo:list-item-body start-indent='body-start()' end-indent='2cm'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:list-item-body>
- </fo:list-item>
- <fo:list-item>
- <fo:list-item-label start-indent='1cm' end-indent='label-end()'>
- <fo:block>[a]</fo:block>
- </fo:list-item-label>
- <fo:list-item-body start-indent='body-start()' end-indent='3cm'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:list-item-body>
- </fo:list-item>
- </fo:list-block>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-1103.fo b/tex/context/foxet/fo-1103.fo
deleted file mode 100644
index 245f3ff0e..000000000
--- a/tex/context/foxet/fo-1103.fo
+++ /dev/null
@@ -1,85 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format' xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test' margin='2cm'>
- <fo:region-body margin='1cm'/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test' fe:tracing='true'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:list-block space-before.optimum='12pt' space-after.optimum='12pt'
- start-indent='10pt' end-indent='10pt'>
- <fo:list-item>
- <fo:list-item-label start-indent='0pt' end-indent='0pt'>
- <fo:block>[a]</fo:block>
- </fo:list-item-label>
- <fo:list-item-body start-indent='0pt' end-indent='0pt'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:list-item-body>
- </fo:list-item>
- <fo:list-item>
- <fo:list-item-label start-indent='10pt' end-indent='0pt'>
- <fo:block>[a]</fo:block>
- </fo:list-item-label>
- <fo:list-item-body start-indent='0pt' end-indent='0pt'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:list-item-body>
- </fo:list-item>
- <fo:list-item>
- <fo:list-item-label start-indent='0pt' end-indent='10pt'>
- <fo:block>[a]</fo:block>
- </fo:list-item-label>
- <fo:list-item-body start-indent='0pt' end-indent='0pt'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:list-item-body>
- </fo:list-item>
- <fo:list-item>
- <fo:list-item-label start-indent='0pt' end-indent='0pt'>
- <fo:block>[a]</fo:block>
- </fo:list-item-label>
- <fo:list-item-body start-indent='10pt' end-indent='0pt'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:list-item-body>
- </fo:list-item>
- <fo:list-item>
- <fo:list-item-label start-indent='0pt' end-indent='0pt'>
- <fo:block>[a]</fo:block>
- </fo:list-item-label>
- <fo:list-item-body start-indent='0pt' end-indent='10pt'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:list-item-body>
- </fo:list-item>
- <fo:list-item>
- <fo:list-item-label start-indent='10pt' end-indent='10pt'>
- <fo:block>[a]</fo:block>
- </fo:list-item-label>
- <fo:list-item-body start-indent='0pt' end-indent='0pt'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:list-item-body>
- </fo:list-item>
- <fo:list-item>
- <fo:list-item-label start-indent='0pt' end-indent='0pt'>
- <fo:block>[a]</fo:block>
- </fo:list-item-label>
- <fo:list-item-body start-indent='10pt' end-indent='10pt'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:list-item-body>
- </fo:list-item>
- <fo:list-item>
- <fo:list-item-label start-indent='10pt' end-indent='0pt'>
- <fo:block>[a]</fo:block>
- </fo:list-item-label>
- <fo:list-item-body start-indent='10pt' end-indent='10pt'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:list-item-body>
- </fo:list-item>
- </fo:list-block>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-1104.fo b/tex/context/foxet/fo-1104.fo
deleted file mode 100644
index 6867e772f..000000000
--- a/tex/context/foxet/fo-1104.fo
+++ /dev/null
@@ -1,28 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format' xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test' margin='2cm'>
- <fo:region-body margin='1cm'/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:list-block>
- <fo:list-item>
- <fo:list-item-label>
- <fo:block text-align="center">X</fo:block>
- </fo:list-item-label>
- <fo:list-item-body>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:list-item-body>
- </fo:list-item>
- </fo:list-block>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/foxet/fo-1201.fo b/tex/context/foxet/fo-1201.fo
deleted file mode 100644
index 364dcb214..000000000
--- a/tex/context/foxet/fo-1201.fo
+++ /dev/null
@@ -1,40 +0,0 @@
-<?xml version='1.0'?>
-
-<!-- foxet test file - Hans Hagen - PRAGMA ADE - www.pragma-ade.com -->
-
-<fo:root xmlns:fo='http://www.w3.org/1999/XSL/Format' xmlns:fe='http://www.pragma-ade.com/schemas/foxet.rng'>
-
- <fo:layout-master-set>
- <fo:simple-page-master master-name='test' margin='2cm'>
- <fo:region-body margin='1cm'/>
- </fo:simple-page-master>
- </fo:layout-master-set>
-
- <fo:page-sequence master-reference='test' fe:tracing='true'>
- <fo:flow flow-name='xsl-region-body'>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- <fo:block>
- test
- <fo:inline position='static' font-size='600%'>S</fo:inline>
- <fo:inline position='static' font-size='400%'>S</fo:inline>
- <fo:inline position='static' font-size='500%'>S</fo:inline>
- <fo:inline position='fixed' right='2cm' bottom='2cm' font-size='600%'>F</fo:inline>
- <fo:inline position='fixed' right='2cm' top='2cm' font-size='400%'>F</fo:inline>
- <fo:inline position='fixed' left='2cm' bottom='2cm' font-size='500%'>F</fo:inline>
- <fo:inline position='absolute' right='1cm' bottom='1cm' font-size='600%'>A</fo:inline>
- <fo:inline position='absolute' right='1cm' top='1cm' font-size='400%'>A</fo:inline>
- <fo:inline position='absolute' left='1cm' bottom='1cm' font-size='500%'>A</fo:inline>
- <fo:inline position='relative' right='2mm' bottom='2mm' font-size='600%'>R</fo:inline>
- <fo:inline position='relative' right='2mm' top='2mm' font-size='400%'>R</fo:inline>
- <fo:inline position='relative' left='2mm' bottom='2mm' font-size='500%'>R</fo:inline>
- test
- <fo:inline position='static' font-size='600%'>Q</fo:inline>
- <fo:inline position='relative' right='2mm' bottom='2mm' font-size='600%'>Q</fo:inline>
- <fo:inline position='relative' right='2mm' top='2mm' font-size='400%'>Q</fo:inline>
- <fo:inline position='relative' left='2mm' bottom='2mm' font-size='500%'>Q</fo:inline>
- </fo:block>
- <fo:block><fe:include url='fe-zapf.xml'/></fo:block>
- </fo:flow>
- </fo:page-sequence>
-
-</fo:root>
diff --git a/tex/context/interface/cont-cs.xml b/tex/context/interface/cont-cs.xml
index afaacb709..c798ea4a9 100644
--- a/tex/context/interface/cont-cs.xml
+++ b/tex/context/interface/cont-cs.xml
@@ -7188,6 +7188,7 @@
<cd:constant type="tolerantni"/>
<cd:constant type="velmitolerantni"/>
<cd:constant type="natahnout"/>
+ <cd:constant type="extremestretch"/>
<cd:constant type="lefttoright"/>
<cd:constant type="righttoleft"/>
</cd:keywords>
diff --git a/tex/context/interface/cont-de.xml b/tex/context/interface/cont-de.xml
index e9771d07a..f40927b45 100644
--- a/tex/context/interface/cont-de.xml
+++ b/tex/context/interface/cont-de.xml
@@ -7188,6 +7188,7 @@
<cd:constant type="tolerant"/>
<cd:constant type="sehrtolerant"/>
<cd:constant type="strecken"/>
+ <cd:constant type="extremestretch"/>
<cd:constant type="lefttoright"/>
<cd:constant type="righttoleft"/>
</cd:keywords>
diff --git a/tex/context/interface/cont-en.xml b/tex/context/interface/cont-en.xml
index f00215596..651292e7f 100644
--- a/tex/context/interface/cont-en.xml
+++ b/tex/context/interface/cont-en.xml
@@ -7188,6 +7188,7 @@
<cd:constant type="tolerant"/>
<cd:constant type="verytolerant"/>
<cd:constant type="stretch"/>
+ <cd:constant type="extremestretch"/>
<cd:constant type="lefttoright"/>
<cd:constant type="righttoleft"/>
</cd:keywords>
diff --git a/tex/context/interface/cont-fr.xml b/tex/context/interface/cont-fr.xml
index 1cd7fa33a..30a89c059 100644
--- a/tex/context/interface/cont-fr.xml
+++ b/tex/context/interface/cont-fr.xml
@@ -7188,6 +7188,7 @@
<cd:constant type="tolerant"/>
<cd:constant type="trestolerant"/>
<cd:constant type="etire"/>
+ <cd:constant type="extremestretch"/>
<cd:constant type="lefttoright"/>
<cd:constant type="righttoleft"/>
</cd:keywords>
diff --git a/tex/context/interface/cont-it.xml b/tex/context/interface/cont-it.xml
index d9fe0ac97..32d19e87f 100644
--- a/tex/context/interface/cont-it.xml
+++ b/tex/context/interface/cont-it.xml
@@ -7188,6 +7188,7 @@
<cd:constant type="tollerante"/>
<cd:constant type="moltotollerante"/>
<cd:constant type="dilata"/>
+ <cd:constant type="extremestretch"/>
<cd:constant type="lefttoright"/>
<cd:constant type="righttoleft"/>
</cd:keywords>
diff --git a/tex/context/interface/cont-nl.xml b/tex/context/interface/cont-nl.xml
index 685033f81..6c49baccd 100644
--- a/tex/context/interface/cont-nl.xml
+++ b/tex/context/interface/cont-nl.xml
@@ -6525,7 +6525,7 @@
<cd:parameter name="maxbreedte">
<cd:constant type="cd:dimension"/>
</cd:parameter>
- <cd:parameter name="onbekendeverwijzing">
+ <cd:parameter name="onbekendereferentie">
<cd:constant type="leeg"/>
<cd:constant type="geen"/>
</cd:parameter>
@@ -7188,6 +7188,7 @@
<cd:constant type="soepel"/>
<cd:constant type="zeersoepel"/>
<cd:constant type="rek"/>
+ <cd:constant type="extremestretch"/>
<cd:constant type="lefttoright"/>
<cd:constant type="righttoleft"/>
</cd:keywords>
@@ -8996,7 +8997,7 @@
<cd:constant type="nee"/>
<cd:constant type="geen"/>
</cd:parameter>
- <cd:parameter name="onbekendeverwijzing">
+ <cd:parameter name="onbekendereferentie">
<cd:constant type="ja"/>
<cd:constant type="leeg"/>
<cd:constant type="nee"/>
diff --git a/tex/context/interface/cont-pe.xml b/tex/context/interface/cont-pe.xml
index 434a328e8..bf7d65fd0 100644
--- a/tex/context/interface/cont-pe.xml
+++ b/tex/context/interface/cont-pe.xml
@@ -7188,6 +7188,7 @@
<cd:constant type="بردبار"/>
<cd:constant type="خیلی‌بردبار"/>
<cd:constant type="بکش"/>
+ <cd:constant type="extremestretch"/>
<cd:constant type="lefttoright"/>
<cd:constant type="righttoleft"/>
</cd:keywords>
diff --git a/tex/context/interface/cont-ro.xml b/tex/context/interface/cont-ro.xml
index 31ef5d920..b81c3bc7e 100644
--- a/tex/context/interface/cont-ro.xml
+++ b/tex/context/interface/cont-ro.xml
@@ -7188,6 +7188,7 @@
<cd:constant type="tolerant"/>
<cd:constant type="foartetolerant"/>
<cd:constant type="dilatat"/>
+ <cd:constant type="extremestretch"/>
<cd:constant type="lefttoright"/>
<cd:constant type="righttoleft"/>
</cd:keywords>
diff --git a/tex/context/interface/keys-cs.xml b/tex/context/interface/keys-cs.xml
index d36f969f3..c8e7e7517 100644
--- a/tex/context/interface/keys-cs.xml
+++ b/tex/context/interface/keys-cs.xml
@@ -183,6 +183,7 @@
<cd:variable name='flushleft' value='flushleft'/>
<cd:variable name='flushouter' value='flushouter'/>
<cd:variable name='flushright' value='flushright'/>
+ <cd:variable name='followingpage' value='followingpage'/>
<cd:variable name='footer' value='upati'/>
<cd:variable name='footnote' value='poznamkapodcarou'/>
<cd:variable name='force' value='sila'/>
@@ -304,6 +305,7 @@
<cd:variable name='mirrored' value='zrcadleno'/>
<cd:variable name='monday' value='pondeli'/>
<cd:variable name='mono' value='mono'/>
+ <cd:variable name='monobold' value='monotucne'/>
<cd:variable name='month' value='mesic'/>
<cd:variable name='more' value='more'/>
<cd:variable name='morehyphenation' value='morehyphenation'/>
@@ -363,6 +365,7 @@
<cd:variable name='positive' value='positiv'/>
<cd:variable name='postponing' value='odlozit'/>
<cd:variable name='postscript' value='postscript'/>
+ <cd:variable name='precedingpage' value='followingpage'/>
<cd:variable name='preference' value='nastaveni'/>
<cd:variable name='preview' value='nahled'/>
<cd:variable name='previous' value='predchozi'/>
@@ -422,7 +425,7 @@
<cd:variable name='september' value='zari'/>
<cd:variable name='serif' value='serif'/>
<cd:variable name='serried' value='semknuto'/>
- <cd:variable name='setups' value='einstellungen'/>
+ <cd:variable name='setups' value='setups'/>
<cd:variable name='sheet' value='sheet'/>
<cd:variable name='short' value='short'/>
<cd:variable name='simplefonts' value='simplefonts'/>
@@ -657,7 +660,7 @@
<cd:constant name='coupling' value='propojeni'/>
<cd:constant name='couplingway' value='zpusobpropojeni'/>
<cd:constant name='criterium' value='kriterium'/>
- <cd:constant name='css' value='css'/>
+ <cd:constant name='cssfile' value='cssfile'/>
<cd:constant name='current' value='aktualni'/>
<cd:constant name='cutspace' value='cutspace'/>
<cd:constant name='dash' value='pomlcka'/>
@@ -692,7 +695,7 @@
<cd:constant name='equalwidth' value='equalwidth'/>
<cd:constant name='escape' value='escape'/>
<cd:constant name='evenmargin' value='sudamarginalie'/>
- <cd:constant name='exitoffset' value='labeloffset'/>
+ <cd:constant name='exitoffset' value='exitoffset'/>
<cd:constant name='expansion' value='expanzen'/>
<cd:constant name='export' value='export'/>
<cd:constant name='extras' value='extras'/>
@@ -707,6 +710,7 @@
<cd:constant name='file' value='soubor'/>
<cd:constant name='filtercommand' value='filtercommand'/>
<cd:constant name='finalnamesep' value='finalnamesep'/>
+ <cd:constant name='finalpubsep' value='finalpubsep'/>
<cd:constant name='firstnamesep' value='firstnamesep'/>
<cd:constant name='firstpage' value='prvnistranka'/>
<cd:constant name='focus' value='zaostreni'/>
@@ -758,6 +762,7 @@
<cd:constant name='indenting' value='odsazovani'/>
<cd:constant name='indentnext' value='odsadpristi'/>
<cd:constant name='indicator' value='indikator'/>
+ <cd:constant name='initialsep' value='initialsep'/>
<cd:constant name='inner' value='vnitrni'/>
<cd:constant name='innermargin' value='innermargin'/>
<cd:constant name='inputfile' value='inputfile'/>
@@ -863,6 +868,7 @@
<cd:constant name='nright' value='nvpravo'/>
<cd:constant name='ntop' value='nvrsek'/>
<cd:constant name='number' value='cislo'/>
+ <cd:constant name='numberalign' value='numberalign'/>
<cd:constant name='numbercolor' value='barvacisla'/>
<cd:constant name='numbercommand' value='ciselnyprikaz'/>
<cd:constant name='numberconversion' value='numberconversion'/>
@@ -1051,6 +1057,8 @@
<cd:constant name='suffix' value='suffix'/>
<cd:constant name='suffixseparator' value='suffixseparator'/>
<cd:constant name='suffixstopper' value='suffixstopper'/>
+ <cd:constant name='surnamefirstnamesep' value='surnamefirstnamesep'/>
+ <cd:constant name='surnameinitialsep' value='surnameinitialsep'/>
<cd:constant name='surnamesep' value='surnamesep'/>
<cd:constant name='sx' value='sx'/>
<cd:constant name='sy' value='sy'/>
@@ -1290,6 +1298,7 @@
<cd:command name='definetabletemplate' value='definujsablonutabulky'/>
<cd:command name='definetabulate' value='definujtabelaci'/>
<cd:command name='definetext' value='definujtext'/>
+ <cd:command name='definetextbackground' value='definetextbackground'/>
<cd:command name='definetextposition' value='definetextposition'/>
<cd:command name='definetextvariable' value='definetextvariable'/>
<cd:command name='definetype' value='definetype'/>
@@ -1450,7 +1459,6 @@
<cd:command name='paperheight' value='vyskapapiru'/>
<cd:command name='paperwidth' value='sirkapapiru'/>
<cd:command name='periods' value='tecky'/>
- <cd:command name='plaatsruwelijst' value='placerawlist'/>
<cd:command name='placebookmarks' value='umistizalozky'/>
<cd:command name='placecombinedlist' value='umistikombinovanyseznam'/>
<cd:command name='placefloat' value='placefloat'/>
@@ -1460,11 +1468,13 @@
<cd:command name='placeheadtext' value='placeheadtext'/>
<cd:command name='placelegend' value='umistilegendu'/>
<cd:command name='placelist' value='umistiseznam'/>
+ <cd:command name='placelistofsynonyms' value='placelistofsynonyms'/>
<cd:command name='placelocalfootnotes' value='umistilokalnipoznamkypodcarou'/>
<cd:command name='placelogos' value='umistiloga'/>
<cd:command name='placeongrid' value='umistinamrizku'/>
<cd:command name='placeontopofeachother' value='umistinadsebe'/>
<cd:command name='placepagenumber' value='placepagenumber'/>
+ <cd:command name='placerawlist' value='placerawlist'/>
<cd:command name='placereferencelist' value='placereferencelist'/>
<cd:command name='placeregister' value='umistirejstrik'/>
<cd:command name='placerule' value='placerule'/>
@@ -1611,7 +1621,6 @@
<cd:command name='setupregister' value='nastavrejstrik'/>
<cd:command name='setuprotate' value='nastavotoceni'/>
<cd:command name='setuprule' value='setuprule'/>
- <cd:command name='setups' value='nastaveni'/>
<cd:command name='setupscreens' value='nastavrastr'/>
<cd:command name='setupsection' value='nastavsekci'/>
<cd:command name='setupsectionblock' value='nastavbloksekce'/>
@@ -1629,6 +1638,7 @@
<cd:command name='setuptables' value='nastavtabulky'/>
<cd:command name='setuptabulate' value='nastavtabelaci'/>
<cd:command name='setuptext' value='nastavtext'/>
+ <cd:command name='setuptextbackground' value='setuptextbackground'/>
<cd:command name='setuptextposition' value='setuptextposition'/>
<cd:command name='setuptextrules' value='nastavtextovelinky'/>
<cd:command name='setuptexttexts' value='nastavtexttexty'/>
@@ -1675,6 +1685,7 @@
<cd:command name='startdocument' value='startdokument'/>
<cd:command name='startenvironment' value='startprostredi'/>
<cd:command name='startfigure' value='startobraz'/>
+ <cd:command name='startframed' value='startframed'/>
<cd:command name='startglobal' value='startglobalni'/>
<cd:command name='startline' value='startradek'/>
<cd:command name='startlinecorrection' value='startkorekceradku'/>
@@ -1701,6 +1712,7 @@
<cd:command name='starttable' value='starttabulka'/>
<cd:command name='starttables' value='starttabulky'/>
<cd:command name='starttext' value='starttext'/>
+ <cd:command name='starttextbackground' value='starttextbackground'/>
<cd:command name='starttextrule' value='starttextovalinka'/>
<cd:command name='startunpacked' value='startnezhustene'/>
<cd:command name='startversion' value='startverze'/>
@@ -1715,6 +1727,7 @@
<cd:command name='stopcomponent' value='stopkomponenta'/>
<cd:command name='stopdocument' value='stopdokument'/>
<cd:command name='stopenvironment' value='stopprostredi'/>
+ <cd:command name='stopframed' value='stopframed'/>
<cd:command name='stopglobal' value='stopglobalni'/>
<cd:command name='stopline' value='stopradek'/>
<cd:command name='stoplinecorrection' value='stopkorekceradku'/>
@@ -1740,6 +1753,7 @@
<cd:command name='stoptable' value='stoptabulka'/>
<cd:command name='stoptables' value='stoptabulky'/>
<cd:command name='stoptext' value='stoptext'/>
+ <cd:command name='stoptextbackground' value='stoptextbackground'/>
<cd:command name='stoptextrule' value='stoptextovalinka'/>
<cd:command name='stopunpacked' value='stopnezhustene'/>
<cd:command name='stopversion' value='stopverze'/>
diff --git a/tex/context/interface/keys-de.xml b/tex/context/interface/keys-de.xml
index c5ba364e3..a100a938f 100644
--- a/tex/context/interface/keys-de.xml
+++ b/tex/context/interface/keys-de.xml
@@ -183,6 +183,7 @@
<cd:variable name='flushleft' value='flushleft'/>
<cd:variable name='flushouter' value='flushouter'/>
<cd:variable name='flushright' value='flushright'/>
+ <cd:variable name='followingpage' value='followingpage'/>
<cd:variable name='footer' value='fusszeile'/>
<cd:variable name='footnote' value='fussnote'/>
<cd:variable name='force' value='zwinge'/>
@@ -304,6 +305,7 @@
<cd:variable name='mirrored' value='gespiegelt'/>
<cd:variable name='monday' value='montag'/>
<cd:variable name='mono' value='mono'/>
+ <cd:variable name='monobold' value='monofett'/>
<cd:variable name='month' value='monat'/>
<cd:variable name='more' value='more'/>
<cd:variable name='morehyphenation' value='morehyphenation'/>
@@ -363,6 +365,7 @@
<cd:variable name='positive' value='positiv'/>
<cd:variable name='postponing' value='verschieben'/>
<cd:variable name='postscript' value='postscript'/>
+ <cd:variable name='precedingpage' value='followingpage'/>
<cd:variable name='preference' value='einstellung'/>
<cd:variable name='preview' value='vorschau'/>
<cd:variable name='previous' value='vorig'/>
@@ -422,7 +425,7 @@
<cd:variable name='september' value='september'/>
<cd:variable name='serif' value='serif'/>
<cd:variable name='serried' value='kleinerabstand'/>
- <cd:variable name='setups' value='impostazioni'/>
+ <cd:variable name='setups' value='setups'/>
<cd:variable name='sheet' value='sheet'/>
<cd:variable name='short' value='kurz'/>
<cd:variable name='simplefonts' value='simplefonts'/>
@@ -657,7 +660,7 @@
<cd:constant name='coupling' value='verknuepfung'/>
<cd:constant name='couplingway' value='verkopplungsart'/>
<cd:constant name='criterium' value='kriterium'/>
- <cd:constant name='css' value='css'/>
+ <cd:constant name='cssfile' value='cssfile'/>
<cd:constant name='current' value='aktuell'/>
<cd:constant name='cutspace' value='cutspace'/>
<cd:constant name='dash' value='strich'/>
@@ -692,7 +695,7 @@
<cd:constant name='equalwidth' value='equalwidth'/>
<cd:constant name='escape' value='escape'/>
<cd:constant name='evenmargin' value='geraderand'/>
- <cd:constant name='exitoffset' value='labeloffset'/>
+ <cd:constant name='exitoffset' value='exitoffset'/>
<cd:constant name='expansion' value='expansion'/>
<cd:constant name='export' value='export'/>
<cd:constant name='extras' value='extras'/>
@@ -707,6 +710,7 @@
<cd:constant name='file' value='datei'/>
<cd:constant name='filtercommand' value='filtercommand'/>
<cd:constant name='finalnamesep' value='finalnamesep'/>
+ <cd:constant name='finalpubsep' value='finalpubsep'/>
<cd:constant name='firstnamesep' value='firstnamesep'/>
<cd:constant name='firstpage' value='ersteseite'/>
<cd:constant name='focus' value='focus'/>
@@ -758,6 +762,7 @@
<cd:constant name='indenting' value='einziehen'/>
<cd:constant name='indentnext' value='ziehefolgendeein'/>
<cd:constant name='indicator' value='indikator'/>
+ <cd:constant name='initialsep' value='initialsep'/>
<cd:constant name='inner' value='innen'/>
<cd:constant name='innermargin' value='innermargin'/>
<cd:constant name='inputfile' value='inputfile'/>
@@ -863,6 +868,7 @@
<cd:constant name='nright' value='nrechts'/>
<cd:constant name='ntop' value='noben'/>
<cd:constant name='number' value='nummer'/>
+ <cd:constant name='numberalign' value='numberalign'/>
<cd:constant name='numbercolor' value='nummernfarbe'/>
<cd:constant name='numbercommand' value='nummerbefehl'/>
<cd:constant name='numberconversion' value='numberconversion'/>
@@ -1051,6 +1057,8 @@
<cd:constant name='suffix' value='suffix'/>
<cd:constant name='suffixseparator' value='suffixseparator'/>
<cd:constant name='suffixstopper' value='suffixstopper'/>
+ <cd:constant name='surnamefirstnamesep' value='surnamefirstnamesep'/>
+ <cd:constant name='surnameinitialsep' value='surnameinitialsep'/>
<cd:constant name='surnamesep' value='surnamesep'/>
<cd:constant name='sx' value='sx'/>
<cd:constant name='sy' value='sy'/>
@@ -1290,6 +1298,7 @@
<cd:command name='definetabletemplate' value='definieretabellenvorlage'/>
<cd:command name='definetabulate' value='definieretabulator'/>
<cd:command name='definetext' value='definieretext'/>
+ <cd:command name='definetextbackground' value='definetextbackground'/>
<cd:command name='definetextposition' value='definetextposition'/>
<cd:command name='definetextvariable' value='definetextvariable'/>
<cd:command name='definetype' value='definetype'/>
@@ -1450,7 +1459,6 @@
<cd:command name='paperheight' value='papierhoehe'/>
<cd:command name='paperwidth' value='papierbreite'/>
<cd:command name='periods' value='punkt'/>
- <cd:command name='plaatsruwelijst' value='placerawlist'/>
<cd:command name='placebookmarks' value='platzierebookmarks'/>
<cd:command name='placecombinedlist' value='platzierezusammengestellteliste'/>
<cd:command name='placefloat' value='placefloat'/>
@@ -1460,11 +1468,13 @@
<cd:command name='placeheadtext' value='placeheadtext'/>
<cd:command name='placelegend' value='platzierelegende'/>
<cd:command name='placelist' value='platziereliste'/>
+ <cd:command name='placelistofsynonyms' value='placelistofsynonyms'/>
<cd:command name='placelocalfootnotes' value='platzierelokalefussnoten'/>
<cd:command name='placelogos' value='platzierelogo'/>
<cd:command name='placeongrid' value='amgitterausrichten'/>
<cd:command name='placeontopofeachother' value='platziereuntereinander'/>
<cd:command name='placepagenumber' value='placepagenumber'/>
+ <cd:command name='placerawlist' value='placerawlist'/>
<cd:command name='placereferencelist' value='placereferencelist'/>
<cd:command name='placeregister' value='platziereregister'/>
<cd:command name='placerule' value='placerule'/>
@@ -1611,7 +1621,6 @@
<cd:command name='setupregister' value='stelleregisterein'/>
<cd:command name='setuprotate' value='stelledrehenein'/>
<cd:command name='setuprule' value='setuprule'/>
- <cd:command name='setups' value='einstellungen'/>
<cd:command name='setupscreens' value='stellerasterein'/>
<cd:command name='setupsection' value='stelleabschnittein'/>
<cd:command name='setupsectionblock' value='stelleabschnittsblockein'/>
@@ -1629,6 +1638,7 @@
<cd:command name='setuptables' value='stelletabellenein'/>
<cd:command name='setuptabulate' value='stelletabulatorein'/>
<cd:command name='setuptext' value='stelletextein'/>
+ <cd:command name='setuptextbackground' value='setuptextbackground'/>
<cd:command name='setuptextposition' value='setuptextposition'/>
<cd:command name='setuptextrules' value='stelletextumrissein'/>
<cd:command name='setuptexttexts' value='stelletexttexteein'/>
@@ -1675,6 +1685,7 @@
<cd:command name='startdocument' value='startdokument'/>
<cd:command name='startenvironment' value='startumgebung'/>
<cd:command name='startfigure' value='startabbildung'/>
+ <cd:command name='startframed' value='startframed'/>
<cd:command name='startglobal' value='startglobal'/>
<cd:command name='startline' value='startzeile'/>
<cd:command name='startlinecorrection' value='startzeilenkorrektur'/>
@@ -1701,6 +1712,7 @@
<cd:command name='starttable' value='starttabelle'/>
<cd:command name='starttables' value='starttabellen'/>
<cd:command name='starttext' value='starttext'/>
+ <cd:command name='starttextbackground' value='starttextbackground'/>
<cd:command name='starttextrule' value='starttextlinie'/>
<cd:command name='startunpacked' value='startgrosserdurchschuss'/>
<cd:command name='startversion' value='startversion'/>
@@ -1715,6 +1727,7 @@
<cd:command name='stopcomponent' value='stopkomponente'/>
<cd:command name='stopdocument' value='stopdokument'/>
<cd:command name='stopenvironment' value='stopumgebung'/>
+ <cd:command name='stopframed' value='stopframed'/>
<cd:command name='stopglobal' value='stopglobal'/>
<cd:command name='stopline' value='stopzeile'/>
<cd:command name='stoplinecorrection' value='stopzeilenkorrektur'/>
@@ -1740,6 +1753,7 @@
<cd:command name='stoptable' value='stoptabelle'/>
<cd:command name='stoptables' value='stoptabellen'/>
<cd:command name='stoptext' value='stoptext'/>
+ <cd:command name='stoptextbackground' value='stoptextbackground'/>
<cd:command name='stoptextrule' value='stoptextlinie'/>
<cd:command name='stopunpacked' value='stopgrosserdurchschuss'/>
<cd:command name='stopversion' value='stopversion'/>
diff --git a/tex/context/interface/keys-en.xml b/tex/context/interface/keys-en.xml
index be59542e7..0e51dfc7c 100644
--- a/tex/context/interface/keys-en.xml
+++ b/tex/context/interface/keys-en.xml
@@ -183,6 +183,7 @@
<cd:variable name='flushleft' value='flushleft'/>
<cd:variable name='flushouter' value='flushouter'/>
<cd:variable name='flushright' value='flushright'/>
+ <cd:variable name='followingpage' value='followingpage'/>
<cd:variable name='footer' value='footer'/>
<cd:variable name='footnote' value='footnote'/>
<cd:variable name='force' value='force'/>
@@ -304,6 +305,7 @@
<cd:variable name='mirrored' value='mirrored'/>
<cd:variable name='monday' value='monday'/>
<cd:variable name='mono' value='mono'/>
+ <cd:variable name='monobold' value='monobold'/>
<cd:variable name='month' value='month'/>
<cd:variable name='more' value='more'/>
<cd:variable name='morehyphenation' value='morehyphenation'/>
@@ -363,6 +365,7 @@
<cd:variable name='positive' value='positive'/>
<cd:variable name='postponing' value='postponing'/>
<cd:variable name='postscript' value='postscript'/>
+ <cd:variable name='precedingpage' value='followingpage'/>
<cd:variable name='preference' value='preference'/>
<cd:variable name='preview' value='preview'/>
<cd:variable name='previous' value='previous'/>
@@ -657,7 +660,7 @@
<cd:constant name='coupling' value='coupling'/>
<cd:constant name='couplingway' value='couplingway'/>
<cd:constant name='criterium' value='criterium'/>
- <cd:constant name='css' value='css'/>
+ <cd:constant name='cssfile' value='cssfile'/>
<cd:constant name='current' value='current'/>
<cd:constant name='cutspace' value='cutspace'/>
<cd:constant name='dash' value='dash'/>
@@ -692,7 +695,7 @@
<cd:constant name='equalwidth' value='equalwidth'/>
<cd:constant name='escape' value='escape'/>
<cd:constant name='evenmargin' value='evenmargin'/>
- <cd:constant name='exitoffset' value='labeloffset'/>
+ <cd:constant name='exitoffset' value='exitoffset'/>
<cd:constant name='expansion' value='expansion'/>
<cd:constant name='export' value='export'/>
<cd:constant name='extras' value='extras'/>
@@ -707,6 +710,7 @@
<cd:constant name='file' value='file'/>
<cd:constant name='filtercommand' value='filtercommand'/>
<cd:constant name='finalnamesep' value='finalnamesep'/>
+ <cd:constant name='finalpubsep' value='finalpubsep'/>
<cd:constant name='firstnamesep' value='firstnamesep'/>
<cd:constant name='firstpage' value='firstpage'/>
<cd:constant name='focus' value='focus'/>
@@ -758,6 +762,7 @@
<cd:constant name='indenting' value='indenting'/>
<cd:constant name='indentnext' value='indentnext'/>
<cd:constant name='indicator' value='indicator'/>
+ <cd:constant name='initialsep' value='initialsep'/>
<cd:constant name='inner' value='inner'/>
<cd:constant name='innermargin' value='innermargin'/>
<cd:constant name='inputfile' value='inputfile'/>
@@ -863,6 +868,7 @@
<cd:constant name='nright' value='nright'/>
<cd:constant name='ntop' value='ntop'/>
<cd:constant name='number' value='number'/>
+ <cd:constant name='numberalign' value='numberalign'/>
<cd:constant name='numbercolor' value='numbercolor'/>
<cd:constant name='numbercommand' value='numbercommand'/>
<cd:constant name='numberconversion' value='numberconversion'/>
@@ -1051,6 +1057,8 @@
<cd:constant name='suffix' value='suffix'/>
<cd:constant name='suffixseparator' value='suffixseparator'/>
<cd:constant name='suffixstopper' value='suffixstopper'/>
+ <cd:constant name='surnamefirstnamesep' value='surnamefirstnamesep'/>
+ <cd:constant name='surnameinitialsep' value='surnameinitialsep'/>
<cd:constant name='surnamesep' value='surnamesep'/>
<cd:constant name='sx' value='sx'/>
<cd:constant name='sy' value='sy'/>
@@ -1290,6 +1298,7 @@
<cd:command name='definetabletemplate' value='definetabletemplate'/>
<cd:command name='definetabulate' value='definetabulate'/>
<cd:command name='definetext' value='definetext'/>
+ <cd:command name='definetextbackground' value='definetextbackground'/>
<cd:command name='definetextposition' value='definetextposition'/>
<cd:command name='definetextvariable' value='definetextvariable'/>
<cd:command name='definetype' value='definetype'/>
@@ -1450,7 +1459,6 @@
<cd:command name='paperheight' value='paperheight'/>
<cd:command name='paperwidth' value='paperwidth'/>
<cd:command name='periods' value='periods'/>
- <cd:command name='plaatsruwelijst' value='placerawlist'/>
<cd:command name='placebookmarks' value='placebookmarks'/>
<cd:command name='placecombinedlist' value='placecombinedlist'/>
<cd:command name='placefloat' value='placefloat'/>
@@ -1460,11 +1468,13 @@
<cd:command name='placeheadtext' value='placeheadtext'/>
<cd:command name='placelegend' value='placelegend'/>
<cd:command name='placelist' value='placelist'/>
+ <cd:command name='placelistofsynonyms' value='placelistofsynonyms'/>
<cd:command name='placelocalfootnotes' value='placelocalfootnotes'/>
<cd:command name='placelogos' value='placelogos'/>
<cd:command name='placeongrid' value='placeongrid'/>
<cd:command name='placeontopofeachother' value='placeontopofeachother'/>
<cd:command name='placepagenumber' value='placepagenumber'/>
+ <cd:command name='placerawlist' value='placerawlist'/>
<cd:command name='placereferencelist' value='placereferencelist'/>
<cd:command name='placeregister' value='placeregister'/>
<cd:command name='placerule' value='placerule'/>
@@ -1611,7 +1621,6 @@
<cd:command name='setupregister' value='setupregister'/>
<cd:command name='setuprotate' value='setuprotate'/>
<cd:command name='setuprule' value='setuprule'/>
- <cd:command name='setups' value='setups'/>
<cd:command name='setupscreens' value='setupscreens'/>
<cd:command name='setupsection' value='setupsection'/>
<cd:command name='setupsectionblock' value='setupsectionblock'/>
@@ -1629,6 +1638,7 @@
<cd:command name='setuptables' value='setuptables'/>
<cd:command name='setuptabulate' value='setuptabulate'/>
<cd:command name='setuptext' value='setuptext'/>
+ <cd:command name='setuptextbackground' value='setuptextbackground'/>
<cd:command name='setuptextposition' value='setuptextposition'/>
<cd:command name='setuptextrules' value='setuptextrules'/>
<cd:command name='setuptexttexts' value='setuptexttexts'/>
@@ -1675,6 +1685,7 @@
<cd:command name='startdocument' value='startdocument'/>
<cd:command name='startenvironment' value='startenvironment'/>
<cd:command name='startfigure' value='startfigure'/>
+ <cd:command name='startframed' value='startframed'/>
<cd:command name='startglobal' value='startglobal'/>
<cd:command name='startline' value='startline'/>
<cd:command name='startlinecorrection' value='startlinecorrection'/>
@@ -1701,6 +1712,7 @@
<cd:command name='starttable' value='starttable'/>
<cd:command name='starttables' value='starttables'/>
<cd:command name='starttext' value='starttext'/>
+ <cd:command name='starttextbackground' value='starttextbackground'/>
<cd:command name='starttextrule' value='starttextrule'/>
<cd:command name='startunpacked' value='startunpacked'/>
<cd:command name='startversion' value='startversion'/>
@@ -1715,6 +1727,7 @@
<cd:command name='stopcomponent' value='stopcomponent'/>
<cd:command name='stopdocument' value='stopdocument'/>
<cd:command name='stopenvironment' value='stopenvironment'/>
+ <cd:command name='stopframed' value='stopframed'/>
<cd:command name='stopglobal' value='stopglobal'/>
<cd:command name='stopline' value='stopline'/>
<cd:command name='stoplinecorrection' value='stoplinecorrection'/>
@@ -1740,6 +1753,7 @@
<cd:command name='stoptable' value='stoptable'/>
<cd:command name='stoptables' value='stoptables'/>
<cd:command name='stoptext' value='stoptext'/>
+ <cd:command name='stoptextbackground' value='stoptextbackground'/>
<cd:command name='stoptextrule' value='stoptextrule'/>
<cd:command name='stopunpacked' value='stopunpacked'/>
<cd:command name='stopversion' value='stopversion'/>
diff --git a/tex/context/interface/keys-fr.xml b/tex/context/interface/keys-fr.xml
index 43c47d578..cd35ad7e1 100644
--- a/tex/context/interface/keys-fr.xml
+++ b/tex/context/interface/keys-fr.xml
@@ -183,6 +183,7 @@
<cd:variable name='flushleft' value='flushleft'/>
<cd:variable name='flushouter' value='flushouter'/>
<cd:variable name='flushright' value='flushright'/>
+ <cd:variable name='followingpage' value='followingpage'/>
<cd:variable name='footer' value='pdp'/>
<cd:variable name='footnote' value='notepdp'/>
<cd:variable name='force' value='force'/>
@@ -304,6 +305,7 @@
<cd:variable name='mirrored' value='reflete'/>
<cd:variable name='monday' value='lundi'/>
<cd:variable name='mono' value='mono'/>
+ <cd:variable name='monobold' value='monogras'/>
<cd:variable name='month' value='mois'/>
<cd:variable name='more' value='more'/>
<cd:variable name='morehyphenation' value='morehyphenation'/>
@@ -363,6 +365,7 @@
<cd:variable name='positive' value='positif'/>
<cd:variable name='postponing' value='postponing'/>
<cd:variable name='postscript' value='postscript'/>
+ <cd:variable name='precedingpage' value='followingpage'/>
<cd:variable name='preference' value='preference'/>
<cd:variable name='preview' value='previsualisation'/>
<cd:variable name='previous' value='precedent'/>
@@ -422,7 +425,7 @@
<cd:variable name='september' value='septembre'/>
<cd:variable name='serif' value='serif'/>
<cd:variable name='serried' value='serried'/>
- <cd:variable name='setups' value='reglages'/>
+ <cd:variable name='setups' value='setups'/>
<cd:variable name='sheet' value='sheet'/>
<cd:variable name='short' value='short'/>
<cd:variable name='simplefonts' value='simplefonts'/>
@@ -657,7 +660,7 @@
<cd:constant name='coupling' value='couplage'/>
<cd:constant name='couplingway' value='modecouplage'/>
<cd:constant name='criterium' value='critere'/>
- <cd:constant name='css' value='css'/>
+ <cd:constant name='cssfile' value='cssfile'/>
<cd:constant name='current' value='courant'/>
<cd:constant name='cutspace' value='cutspace'/>
<cd:constant name='dash' value='pointille'/>
@@ -692,7 +695,7 @@
<cd:constant name='equalwidth' value='equalwidth'/>
<cd:constant name='escape' value='escape'/>
<cd:constant name='evenmargin' value='margepaire'/>
- <cd:constant name='exitoffset' value='labeloffset'/>
+ <cd:constant name='exitoffset' value='exitoffset'/>
<cd:constant name='expansion' value='expansion'/>
<cd:constant name='export' value='export'/>
<cd:constant name='extras' value='extras'/>
@@ -707,6 +710,7 @@
<cd:constant name='file' value='fichier'/>
<cd:constant name='filtercommand' value='filtercommand'/>
<cd:constant name='finalnamesep' value='finalnamesep'/>
+ <cd:constant name='finalpubsep' value='finalpubsep'/>
<cd:constant name='firstnamesep' value='firstnamesep'/>
<cd:constant name='firstpage' value='premierepage'/>
<cd:constant name='focus' value='focus'/>
@@ -758,6 +762,7 @@
<cd:constant name='indenting' value='composeenalinea'/>
<cd:constant name='indentnext' value='indentesuivant'/>
<cd:constant name='indicator' value='indicateur'/>
+ <cd:constant name='initialsep' value='initialsep'/>
<cd:constant name='inner' value='interieur'/>
<cd:constant name='innermargin' value='margeinterieure'/>
<cd:constant name='inputfile' value='fichierentree'/>
@@ -863,6 +868,7 @@
<cd:constant name='nright' value='ndroite'/>
<cd:constant name='ntop' value='nsup'/>
<cd:constant name='number' value='numero'/>
+ <cd:constant name='numberalign' value='numberalign'/>
<cd:constant name='numbercolor' value='couleurnumero'/>
<cd:constant name='numbercommand' value='commandenumero'/>
<cd:constant name='numberconversion' value='numberconversion'/>
@@ -1007,7 +1013,7 @@
<cd:constant name='separatorcolor' value='separatorcolor'/>
<cd:constant name='separatorstyle' value='separatorstyle'/>
<cd:constant name='set' value='set'/>
- <cd:constant name='setups' value='reglages'/>
+ <cd:constant name='setups' value='setups'/>
<cd:constant name='shrink' value='shrink'/>
<cd:constant name='side' value='cote'/>
<cd:constant name='sidealign' value='sidealign'/>
@@ -1051,6 +1057,8 @@
<cd:constant name='suffix' value='suffix'/>
<cd:constant name='suffixseparator' value='suffixseparator'/>
<cd:constant name='suffixstopper' value='suffixstopper'/>
+ <cd:constant name='surnamefirstnamesep' value='surnamefirstnamesep'/>
+ <cd:constant name='surnameinitialsep' value='surnameinitialsep'/>
<cd:constant name='surnamesep' value='surnamesep'/>
<cd:constant name='sx' value='sx'/>
<cd:constant name='sy' value='sy'/>
@@ -1290,6 +1298,7 @@
<cd:command name='definetabletemplate' value='definittrametableau'/>
<cd:command name='definetabulate' value='definittabulation'/>
<cd:command name='definetext' value='definittexte'/>
+ <cd:command name='definetextbackground' value='definetextbackground'/>
<cd:command name='definetextposition' value='definitpositiontexte'/>
<cd:command name='definetextvariable' value='definitvariabletexte'/>
<cd:command name='definetype' value='definittype'/>
@@ -1450,7 +1459,6 @@
<cd:command name='paperheight' value='hauteurpapier'/>
<cd:command name='paperwidth' value='largeurpapier'/>
<cd:command name='periods' value='periodes'/>
- <cd:command name='plaatsruwelijst' value='placerawlist'/>
<cd:command name='placebookmarks' value='placemarquespages'/>
<cd:command name='placecombinedlist' value='placelisteinmbriquee'/>
<cd:command name='placefloat' value='placeflottant'/>
@@ -1460,11 +1468,13 @@
<cd:command name='placeheadtext' value='placetextetete'/>
<cd:command name='placelegend' value='placelegende'/>
<cd:command name='placelist' value='placeliste'/>
+ <cd:command name='placelistofsynonyms' value='placelistofsynonyms'/>
<cd:command name='placelocalfootnotes' value='placenotespdplocales'/>
<cd:command name='placelogos' value='placelogos'/>
<cd:command name='placeongrid' value='placesurgrille'/>
<cd:command name='placeontopofeachother' value='placelesunsaudessusdesautres'/>
<cd:command name='placepagenumber' value='placenumeropage'/>
+ <cd:command name='placerawlist' value='placerawlist'/>
<cd:command name='placereferencelist' value='placelistereference'/>
<cd:command name='placeregister' value='placeregistre'/>
<cd:command name='placerule' value='placeregle'/>
@@ -1611,7 +1621,6 @@
<cd:command name='setupregister' value='regleregistre'/>
<cd:command name='setuprotate' value='regleoriente'/>
<cd:command name='setuprule' value='regleregle'/>
- <cd:command name='setups' value='reglages'/>
<cd:command name='setupscreens' value='regleecrans'/>
<cd:command name='setupsection' value='reglesection'/>
<cd:command name='setupsectionblock' value='regleblocsection'/>
@@ -1629,6 +1638,7 @@
<cd:command name='setuptables' value='regletableaux'/>
<cd:command name='setuptabulate' value='regletabulation'/>
<cd:command name='setuptext' value='regletexte'/>
+ <cd:command name='setuptextbackground' value='setuptextbackground'/>
<cd:command name='setuptextposition' value='reglepositiontexte'/>
<cd:command name='setuptextrules' value='reglelignesreglestexte'/>
<cd:command name='setuptexttexts' value='regletextestexte'/>
@@ -1675,6 +1685,7 @@
<cd:command name='startdocument' value='demarredocument'/>
<cd:command name='startenvironment' value='demarreenvironement'/>
<cd:command name='startfigure' value='demarrefigure'/>
+ <cd:command name='startframed' value='startframed'/>
<cd:command name='startglobal' value='demarreglobal'/>
<cd:command name='startline' value='demarreligne'/>
<cd:command name='startlinecorrection' value='demarrecorrectionligne'/>
@@ -1701,6 +1712,7 @@
<cd:command name='starttable' value='demarretableau'/>
<cd:command name='starttables' value='demarretableaux'/>
<cd:command name='starttext' value='demarretexte'/>
+ <cd:command name='starttextbackground' value='starttextbackground'/>
<cd:command name='starttextrule' value='demarreligneregleetexte'/>
<cd:command name='startunpacked' value='demarredegroupe'/>
<cd:command name='startversion' value='demarreversion'/>
@@ -1715,6 +1727,7 @@
<cd:command name='stopcomponent' value='stoppecomposant'/>
<cd:command name='stopdocument' value='stoppedocument'/>
<cd:command name='stopenvironment' value='stoppeenvironement'/>
+ <cd:command name='stopframed' value='stopframed'/>
<cd:command name='stopglobal' value='stoppeglobal'/>
<cd:command name='stopline' value='stoppeligne'/>
<cd:command name='stoplinecorrection' value='stoppecorrectionligne'/>
@@ -1740,6 +1753,7 @@
<cd:command name='stoptable' value='stoppetableau'/>
<cd:command name='stoptables' value='stoppetableaux'/>
<cd:command name='stoptext' value='stoppetexte'/>
+ <cd:command name='stoptextbackground' value='stoptextbackground'/>
<cd:command name='stoptextrule' value='stoppeligneregleetexte'/>
<cd:command name='stopunpacked' value='stoppedegroupe'/>
<cd:command name='stopversion' value='stoppeversion'/>
diff --git a/tex/context/interface/keys-it.xml b/tex/context/interface/keys-it.xml
index 95c2d8aa5..f07dbb5e6 100644
--- a/tex/context/interface/keys-it.xml
+++ b/tex/context/interface/keys-it.xml
@@ -183,6 +183,7 @@
<cd:variable name='flushleft' value='flushleft'/>
<cd:variable name='flushouter' value='flushouter'/>
<cd:variable name='flushright' value='flushright'/>
+ <cd:variable name='followingpage' value='followingpage'/>
<cd:variable name='footer' value='piedipagina'/>
<cd:variable name='footnote' value='notapdp'/>
<cd:variable name='force' value='forza'/>
@@ -304,6 +305,7 @@
<cd:variable name='mirrored' value='riflesso'/>
<cd:variable name='monday' value='lunedi'/>
<cd:variable name='mono' value='mono'/>
+ <cd:variable name='monobold' value='monograssetto'/>
<cd:variable name='month' value='mese'/>
<cd:variable name='more' value='more'/>
<cd:variable name='morehyphenation' value='morehyphenation'/>
@@ -363,6 +365,7 @@
<cd:variable name='positive' value='positivo'/>
<cd:variable name='postponing' value='posporre'/>
<cd:variable name='postscript' value='postscript'/>
+ <cd:variable name='precedingpage' value='followingpage'/>
<cd:variable name='preference' value='preferenza'/>
<cd:variable name='preview' value='anteprima'/>
<cd:variable name='previous' value='precedente'/>
@@ -422,7 +425,7 @@
<cd:variable name='september' value='settembre'/>
<cd:variable name='serif' value='serif'/>
<cd:variable name='serried' value='vicino'/>
- <cd:variable name='setups' value='nastaveni'/>
+ <cd:variable name='setups' value='setups'/>
<cd:variable name='sheet' value='sheet'/>
<cd:variable name='short' value='short'/>
<cd:variable name='simplefonts' value='simplefonts'/>
@@ -657,7 +660,7 @@
<cd:constant name='coupling' value='accoppiamento'/>
<cd:constant name='couplingway' value='modoaccoppiamento'/>
<cd:constant name='criterium' value='criterio'/>
- <cd:constant name='css' value='css'/>
+ <cd:constant name='cssfile' value='cssfile'/>
<cd:constant name='current' value='corrente'/>
<cd:constant name='cutspace' value='cutspace'/>
<cd:constant name='dash' value='dash'/>
@@ -692,7 +695,7 @@
<cd:constant name='equalwidth' value='equalwidth'/>
<cd:constant name='escape' value='escape'/>
<cd:constant name='evenmargin' value='marginepari'/>
- <cd:constant name='exitoffset' value='labeloffset'/>
+ <cd:constant name='exitoffset' value='exitoffset'/>
<cd:constant name='expansion' value='espansione'/>
<cd:constant name='export' value='export'/>
<cd:constant name='extras' value='extras'/>
@@ -707,6 +710,7 @@
<cd:constant name='file' value='file'/>
<cd:constant name='filtercommand' value='filtercommand'/>
<cd:constant name='finalnamesep' value='finalnamesep'/>
+ <cd:constant name='finalpubsep' value='finalpubsep'/>
<cd:constant name='firstnamesep' value='firstnamesep'/>
<cd:constant name='firstpage' value='primapagina'/>
<cd:constant name='focus' value='focus'/>
@@ -758,6 +762,7 @@
<cd:constant name='indenting' value='rientro'/>
<cd:constant name='indentnext' value='rientrasuccessivo'/>
<cd:constant name='indicator' value='indicatore'/>
+ <cd:constant name='initialsep' value='initialsep'/>
<cd:constant name='inner' value='interno'/>
<cd:constant name='innermargin' value='margineinterno'/>
<cd:constant name='inputfile' value='inputfile'/>
@@ -839,7 +844,7 @@
<cd:constant name='middletext' value='testocentro'/>
<cd:constant name='midsentence' value='midsentence'/>
<cd:constant name='min' value='min'/>
- <cd:constant name='mindepth' value='mindeoth'/>
+ <cd:constant name='mindepth' value='mindepth'/>
<cd:constant name='minheight' value='altezzamin'/>
<cd:constant name='minwidth' value='ampiezzamin'/>
<cd:constant name='moffset' value='moffset'/>
@@ -863,6 +868,7 @@
<cd:constant name='nright' value='ndestra'/>
<cd:constant name='ntop' value='ncima'/>
<cd:constant name='number' value='numero'/>
+ <cd:constant name='numberalign' value='numberalign'/>
<cd:constant name='numbercolor' value='colorenumero'/>
<cd:constant name='numbercommand' value='comandonumero'/>
<cd:constant name='numberconversion' value='numberconversion'/>
@@ -1051,6 +1057,8 @@
<cd:constant name='suffix' value='suffix'/>
<cd:constant name='suffixseparator' value='suffixseparator'/>
<cd:constant name='suffixstopper' value='suffixstopper'/>
+ <cd:constant name='surnamefirstnamesep' value='surnamefirstnamesep'/>
+ <cd:constant name='surnameinitialsep' value='surnameinitialsep'/>
<cd:constant name='surnamesep' value='surnamesep'/>
<cd:constant name='sx' value='sx'/>
<cd:constant name='sy' value='sy'/>
@@ -1290,6 +1298,7 @@
<cd:command name='definetabletemplate' value='definiscimodellotabella'/>
<cd:command name='definetabulate' value='definiscitabulato'/>
<cd:command name='definetext' value='definiscitesto'/>
+ <cd:command name='definetextbackground' value='definetextbackground'/>
<cd:command name='definetextposition' value='definisciposizionetesto'/>
<cd:command name='definetextvariable' value='definiscivariabiletesto'/>
<cd:command name='definetype' value='definiscitype'/>
@@ -1450,7 +1459,6 @@
<cd:command name='paperheight' value='altezzacarta'/>
<cd:command name='paperwidth' value='ampiezzacarta'/>
<cd:command name='periods' value='punti'/>
- <cd:command name='plaatsruwelijst' value='placerawlist'/>
<cd:command name='placebookmarks' value='mettisegnalibro'/>
<cd:command name='placecombinedlist' value='mettielencocombinato'/>
<cd:command name='placefloat' value='placefloat'/>
@@ -1460,11 +1468,13 @@
<cd:command name='placeheadtext' value='posizionatestotesta'/>
<cd:command name='placelegend' value='mettilegenda'/>
<cd:command name='placelist' value='mettielenco'/>
+ <cd:command name='placelistofsynonyms' value='placelistofsynonyms'/>
<cd:command name='placelocalfootnotes' value='mettinotepdplocali'/>
<cd:command name='placelogos' value='mettiloghi'/>
<cd:command name='placeongrid' value='mettiingriglia'/>
<cd:command name='placeontopofeachother' value='mettiunosullaltro'/>
<cd:command name='placepagenumber' value='mettinumeropagina'/>
+ <cd:command name='placerawlist' value='placerawlist'/>
<cd:command name='placereferencelist' value='placereferencelist'/>
<cd:command name='placeregister' value='mettiregistro'/>
<cd:command name='placerule' value='mettilinea'/>
@@ -1611,7 +1621,6 @@
<cd:command name='setupregister' value='impostaregistro'/>
<cd:command name='setuprotate' value='impostarotazione'/>
<cd:command name='setuprule' value='impostalinea'/>
- <cd:command name='setups' value='impostazioni'/>
<cd:command name='setupscreens' value='impostaschermi'/>
<cd:command name='setupsection' value='impostasezione'/>
<cd:command name='setupsectionblock' value='impostabloccosezione'/>
@@ -1629,6 +1638,7 @@
<cd:command name='setuptables' value='impostatabelle'/>
<cd:command name='setuptabulate' value='impostatabulato'/>
<cd:command name='setuptext' value='impostatesto'/>
+ <cd:command name='setuptextbackground' value='setuptextbackground'/>
<cd:command name='setuptextposition' value='impostaposizionetesto'/>
<cd:command name='setuptextrules' value='impostalineetesto'/>
<cd:command name='setuptexttexts' value='impostatestotesti'/>
@@ -1675,6 +1685,7 @@
<cd:command name='startdocument' value='iniziadocumento'/>
<cd:command name='startenvironment' value='iniziaambiente'/>
<cd:command name='startfigure' value='iniziafigura'/>
+ <cd:command name='startframed' value='startframed'/>
<cd:command name='startglobal' value='iniziaglobale'/>
<cd:command name='startline' value='iniziariga'/>
<cd:command name='startlinecorrection' value='iniziacorrezioneriga'/>
@@ -1701,6 +1712,7 @@
<cd:command name='starttable' value='iniziatabella'/>
<cd:command name='starttables' value='iniziatabelle'/>
<cd:command name='starttext' value='iniziatesto'/>
+ <cd:command name='starttextbackground' value='starttextbackground'/>
<cd:command name='starttextrule' value='inizialineatesto'/>
<cd:command name='startunpacked' value='iniziaunpacked'/>
<cd:command name='startversion' value='iniziaversione'/>
@@ -1715,6 +1727,7 @@
<cd:command name='stopcomponent' value='terminacomponente'/>
<cd:command name='stopdocument' value='terminadocumento'/>
<cd:command name='stopenvironment' value='terminaambiente'/>
+ <cd:command name='stopframed' value='stopframed'/>
<cd:command name='stopglobal' value='terminaglobale'/>
<cd:command name='stopline' value='terminariga'/>
<cd:command name='stoplinecorrection' value='terminacorrezioneriga'/>
@@ -1740,6 +1753,7 @@
<cd:command name='stoptable' value='terminatabella'/>
<cd:command name='stoptables' value='terminatabelle'/>
<cd:command name='stoptext' value='terminatesto'/>
+ <cd:command name='stoptextbackground' value='stoptextbackground'/>
<cd:command name='stoptextrule' value='terminalineatesto'/>
<cd:command name='stopunpacked' value='terminaunpacked'/>
<cd:command name='stopversion' value='terminaversioni'/>
diff --git a/tex/context/interface/keys-nl.xml b/tex/context/interface/keys-nl.xml
index bc940ebc4..f32d79275 100644
--- a/tex/context/interface/keys-nl.xml
+++ b/tex/context/interface/keys-nl.xml
@@ -183,6 +183,7 @@
<cd:variable name='flushleft' value='lijnlinks'/>
<cd:variable name='flushouter' value='lijnbuiten'/>
<cd:variable name='flushright' value='lijnrechts'/>
+ <cd:variable name='followingpage' value='opvolgendepagina'/>
<cd:variable name='footer' value='voet'/>
<cd:variable name='footnote' value='voetnoot'/>
<cd:variable name='force' value='forceer'/>
@@ -290,20 +291,21 @@
<cd:variable name='mathematics' value='wiskunde'/>
<cd:variable name='mathmatrix' value='wiskundematrix'/>
<cd:variable name='max' value='max'/>
- <cd:variable name='maxdepth' value='maxdepth'/>
- <cd:variable name='maxheight' value='maxheight'/>
- <cd:variable name='maxwidth' value='maxwidth'/>
+ <cd:variable name='maxdepth' value='maxdiepte'/>
+ <cd:variable name='maxheight' value='maxhoogte'/>
+ <cd:variable name='maxwidth' value='maxbreedte'/>
<cd:variable name='may' value='mei'/>
<cd:variable name='mediaeval' value='mediaeval'/>
<cd:variable name='medium' value='middel'/>
<cd:variable name='middle' value='midden'/>
<cd:variable name='min' value='min'/>
- <cd:variable name='mindepth' value='mindepth'/>
+ <cd:variable name='mindepth' value='mindiepte'/>
<cd:variable name='minheight' value='minhoogte'/>
<cd:variable name='minwidth' value='minbreedte'/>
<cd:variable name='mirrored' value='gespiegeld'/>
<cd:variable name='monday' value='maandag'/>
<cd:variable name='mono' value='mono'/>
+ <cd:variable name='monobold' value='monovet'/>
<cd:variable name='month' value='maand'/>
<cd:variable name='more' value='meer'/>
<cd:variable name='morehyphenation' value='morehyphenation'/>
@@ -363,6 +365,7 @@
<cd:variable name='positive' value='positief'/>
<cd:variable name='postponing' value='uitstellen'/>
<cd:variable name='postscript' value='postscript'/>
+ <cd:variable name='precedingpage' value='voorafgaandepagina'/>
<cd:variable name='preference' value='voorkeur'/>
<cd:variable name='preview' value='preview'/>
<cd:variable name='previous' value='vorige'/>
@@ -422,7 +425,7 @@
<cd:variable name='september' value='september'/>
<cd:variable name='serif' value='serif'/>
<cd:variable name='serried' value='aanelkaar'/>
- <cd:variable name='setups' value='instellingen'/>
+ <cd:variable name='setups' value='setups'/>
<cd:variable name='sheet' value='sheet'/>
<cd:variable name='short' value='kort'/>
<cd:variable name='simplefonts' value='simplefonts'/>
@@ -657,7 +660,7 @@
<cd:constant name='coupling' value='koppeling'/>
<cd:constant name='couplingway' value='koppelwijze'/>
<cd:constant name='criterium' value='criterium'/>
- <cd:constant name='css' value='css'/>
+ <cd:constant name='cssfile' value='cssfile'/>
<cd:constant name='current' value='huidige'/>
<cd:constant name='cutspace' value='snijwit'/>
<cd:constant name='dash' value='streep'/>
@@ -692,7 +695,7 @@
<cd:constant name='equalwidth' value='equalwidth'/>
<cd:constant name='escape' value='escape'/>
<cd:constant name='evenmargin' value='evenmarge'/>
- <cd:constant name='exitoffset' value='labeloffset'/>
+ <cd:constant name='exitoffset' value='exitoffset'/>
<cd:constant name='expansion' value='expansie'/>
<cd:constant name='export' value='exporteer'/>
<cd:constant name='extras' value='extras'/>
@@ -707,6 +710,7 @@
<cd:constant name='file' value='file'/>
<cd:constant name='filtercommand' value='filtercommand'/>
<cd:constant name='finalnamesep' value='finalnamesep'/>
+ <cd:constant name='finalpubsep' value='finalpubsep'/>
<cd:constant name='firstnamesep' value='firstnamesep'/>
<cd:constant name='firstpage' value='eerstepagina'/>
<cd:constant name='focus' value='focus'/>
@@ -758,6 +762,7 @@
<cd:constant name='indenting' value='inspringen'/>
<cd:constant name='indentnext' value='springvolgendein'/>
<cd:constant name='indicator' value='aanduiding'/>
+ <cd:constant name='initialsep' value='initialsep'/>
<cd:constant name='inner' value='binnen'/>
<cd:constant name='innermargin' value='binnenmarge'/>
<cd:constant name='inputfile' value='inputfile'/>
@@ -863,6 +868,7 @@
<cd:constant name='nright' value='nrechts'/>
<cd:constant name='ntop' value='nboven'/>
<cd:constant name='number' value='nummer'/>
+ <cd:constant name='numberalign' value='nummeruitlijnen'/>
<cd:constant name='numbercolor' value='nummerkleur'/>
<cd:constant name='numbercommand' value='nummercommando'/>
<cd:constant name='numberconversion' value='numberconversion'/>
@@ -950,7 +956,7 @@
<cd:constant name='reduction' value='reductie'/>
<cd:constant name='ref' value='ref'/>
<cd:constant name='refcommand' value='refcommand'/>
- <cd:constant name='reference' value='verwijzing'/>
+ <cd:constant name='reference' value='referentie'/>
<cd:constant name='referenceprefix' value='referenceprefix'/>
<cd:constant name='referencing' value='refereren'/>
<cd:constant name='region' value='gebied'/>
@@ -1051,6 +1057,8 @@
<cd:constant name='suffix' value='suffix'/>
<cd:constant name='suffixseparator' value='suffixscheider'/>
<cd:constant name='suffixstopper' value='suffixafsluiter'/>
+ <cd:constant name='surnamefirstnamesep' value='surnamefirstnamesep'/>
+ <cd:constant name='surnameinitialsep' value='surnameinitialsep'/>
<cd:constant name='surnamesep' value='surnamesep'/>
<cd:constant name='sx' value='sx'/>
<cd:constant name='sy' value='sy'/>
@@ -1098,7 +1106,7 @@
<cd:constant name='totalnumber' value='totalnumber'/>
<cd:constant name='type' value='type'/>
<cd:constant name='unit' value='eenheid'/>
- <cd:constant name='unknownreference' value='onbekendeverwijzing'/>
+ <cd:constant name='unknownreference' value='onbekendereferentie'/>
<cd:constant name='urlalternative' value='urlvariant'/>
<cd:constant name='urlspace' value='urlspatie'/>
<cd:constant name='validate' value='valideer'/>
@@ -1290,6 +1298,7 @@
<cd:command name='definetabletemplate' value='definieertabelvorm'/>
<cd:command name='definetabulate' value='definieertabulatie'/>
<cd:command name='definetext' value='definieertekst'/>
+ <cd:command name='definetextbackground' value='definieertekstachtergrond'/>
<cd:command name='definetextposition' value='definieertekstpositie'/>
<cd:command name='definetextvariable' value='definieertekstvariabele'/>
<cd:command name='definetype' value='definieertype'/>
@@ -1450,7 +1459,6 @@
<cd:command name='paperheight' value='papierhoogte'/>
<cd:command name='paperwidth' value='papierbreedte'/>
<cd:command name='periods' value='punten'/>
- <cd:command name='plaatsruwelijst' value='plaatsruwelijst'/>
<cd:command name='placebookmarks' value='plaatsbookmarks'/>
<cd:command name='placecombinedlist' value='plaatssamengesteldelijst'/>
<cd:command name='placefloat' value='plaatsplaatsblok'/>
@@ -1460,11 +1468,13 @@
<cd:command name='placeheadtext' value='plaatskoptekst'/>
<cd:command name='placelegend' value='plaatslegenda'/>
<cd:command name='placelist' value='plaatslijst'/>
+ <cd:command name='placelistofsynonyms' value='plaatslijstmetsynoniemen'/>
<cd:command name='placelocalfootnotes' value='plaatslokalevoetnoten'/>
<cd:command name='placelogos' value='plaatsbeeldmerken'/>
<cd:command name='placeongrid' value='plaatsopgrid'/>
<cd:command name='placeontopofeachother' value='plaatsonderelkaar'/>
<cd:command name='placepagenumber' value='plaatspaginanummer'/>
+ <cd:command name='placerawlist' value='plaatsruwelijst'/>
<cd:command name='placereferencelist' value='plaatsreferentielijst'/>
<cd:command name='placeregister' value='plaatsregister'/>
<cd:command name='placerule' value='plaatslijn'/>
@@ -1611,7 +1621,6 @@
<cd:command name='setupregister' value='stelregisterin'/>
<cd:command name='setuprotate' value='stelroterenin'/>
<cd:command name='setuprule' value='stellijnin'/>
- <cd:command name='setups' value='instellingen'/>
<cd:command name='setupscreens' value='stelrastersin'/>
<cd:command name='setupsection' value='stelsectiein'/>
<cd:command name='setupsectionblock' value='stelsectieblokin'/>
@@ -1629,6 +1638,7 @@
<cd:command name='setuptables' value='steltabellenin'/>
<cd:command name='setuptabulate' value='steltabulatiein'/>
<cd:command name='setuptext' value='steltekstin'/>
+ <cd:command name='setuptextbackground' value='steltekstachtergrondin'/>
<cd:command name='setuptextposition' value='steltekstpositiein'/>
<cd:command name='setuptextrules' value='steltekstlijnenin'/>
<cd:command name='setuptexttexts' value='stelteksttekstenin'/>
@@ -1675,6 +1685,7 @@
<cd:command name='startdocument' value='startdocument'/>
<cd:command name='startenvironment' value='startomgeving'/>
<cd:command name='startfigure' value='startfiguur'/>
+ <cd:command name='startframed' value='startomlijnd'/>
<cd:command name='startglobal' value='startglobaal'/>
<cd:command name='startline' value='startregel'/>
<cd:command name='startlinecorrection' value='startregelcorrectie'/>
@@ -1701,6 +1712,7 @@
<cd:command name='starttable' value='starttabel'/>
<cd:command name='starttables' value='starttabellen'/>
<cd:command name='starttext' value='starttekst'/>
+ <cd:command name='starttextbackground' value='starttekstachtergrond'/>
<cd:command name='starttextrule' value='starttekstlijn'/>
<cd:command name='startunpacked' value='startvanelkaar'/>
<cd:command name='startversion' value='startversie'/>
@@ -1715,6 +1727,7 @@
<cd:command name='stopcomponent' value='stoponderdeel'/>
<cd:command name='stopdocument' value='stopdocument'/>
<cd:command name='stopenvironment' value='stopomgeving'/>
+ <cd:command name='stopframed' value='stopomlijnd'/>
<cd:command name='stopglobal' value='stopglobaal'/>
<cd:command name='stopline' value='stopregel'/>
<cd:command name='stoplinecorrection' value='stopregelcorrectie'/>
@@ -1740,6 +1753,7 @@
<cd:command name='stoptable' value='stoptabel'/>
<cd:command name='stoptables' value='stoptabellen'/>
<cd:command name='stoptext' value='stoptekst'/>
+ <cd:command name='stoptextbackground' value='stoptekstachtergrond'/>
<cd:command name='stoptextrule' value='stoptekstlijn'/>
<cd:command name='stopunpacked' value='stopvanelkaar'/>
<cd:command name='stopversion' value='stopversie'/>
diff --git a/tex/context/interface/keys-pe.xml b/tex/context/interface/keys-pe.xml
index 75e3a17c2..a55ad78ce 100644
--- a/tex/context/interface/keys-pe.xml
+++ b/tex/context/interface/keys-pe.xml
@@ -183,6 +183,7 @@
<cd:variable name='flushleft' value='پمپ‌چپ'/>
<cd:variable name='flushouter' value='پمپ‌خارجی'/>
<cd:variable name='flushright' value='پمپ‌راست'/>
+ <cd:variable name='followingpage' value='followingpage'/>
<cd:variable name='footer' value='ته‌برگ'/>
<cd:variable name='footnote' value='پانوشت'/>
<cd:variable name='force' value='اجبار'/>
@@ -304,6 +305,7 @@
<cd:variable name='mirrored' value='منعکس'/>
<cd:variable name='monday' value='دوشنبه'/>
<cd:variable name='mono' value='مونو'/>
+ <cd:variable name='monobold' value='monobold'/>
<cd:variable name='month' value='ماه'/>
<cd:variable name='more' value='more'/>
<cd:variable name='morehyphenation' value='شکست‌کلمات‌بیشتر'/>
@@ -363,6 +365,7 @@
<cd:variable name='positive' value='مثبت'/>
<cd:variable name='postponing' value='تاخیر'/>
<cd:variable name='postscript' value='پست‌اسکریپت'/>
+ <cd:variable name='precedingpage' value='followingpage'/>
<cd:variable name='preference' value='ترجیح'/>
<cd:variable name='preview' value='پیش‌دید'/>
<cd:variable name='previous' value='قبلی'/>
@@ -422,7 +425,7 @@
<cd:variable name='september' value='سپتامبر'/>
<cd:variable name='serif' value='سریف'/>
<cd:variable name='serried' value='تنگ‌هم'/>
- <cd:variable name='setups' value='بارگذاریها'/>
+ <cd:variable name='setups' value='setups'/>
<cd:variable name='sheet' value='ورقه'/>
<cd:variable name='short' value='short'/>
<cd:variable name='simplefonts' value='simplefonts'/>
@@ -657,7 +660,7 @@
<cd:constant name='coupling' value='تزویج'/>
<cd:constant name='couplingway' value='روش‌تزویج'/>
<cd:constant name='criterium' value='criterium'/>
- <cd:constant name='css' value='css'/>
+ <cd:constant name='cssfile' value='cssfile'/>
<cd:constant name='current' value='جاری'/>
<cd:constant name='cutspace' value='فضای‌برش'/>
<cd:constant name='dash' value='دش'/>
@@ -692,7 +695,7 @@
<cd:constant name='equalwidth' value='عرض‌یکسان'/>
<cd:constant name='escape' value='فرار'/>
<cd:constant name='evenmargin' value='حاشیه‌زوج'/>
- <cd:constant name='exitoffset' value='labeloffset'/>
+ <cd:constant name='exitoffset' value='exitoffset'/>
<cd:constant name='expansion' value='گسترش'/>
<cd:constant name='export' value='export'/>
<cd:constant name='extras' value='extras'/>
@@ -707,6 +710,7 @@
<cd:constant name='file' value='پرونده'/>
<cd:constant name='filtercommand' value='filtercommand'/>
<cd:constant name='finalnamesep' value='finalnamesep'/>
+ <cd:constant name='finalpubsep' value='finalpubsep'/>
<cd:constant name='firstnamesep' value='firstnamesep'/>
<cd:constant name='firstpage' value='صفحه‌اول'/>
<cd:constant name='focus' value='تمرکز'/>
@@ -758,6 +762,7 @@
<cd:constant name='indenting' value='تورفتگی'/>
<cd:constant name='indentnext' value='متن‌تورفته'/>
<cd:constant name='indicator' value='اندیکاتور'/>
+ <cd:constant name='initialsep' value='initialsep'/>
<cd:constant name='inner' value='داخلی'/>
<cd:constant name='innermargin' value='حاشیه‌داخلی'/>
<cd:constant name='inputfile' value='پرونده‌ورودی'/>
@@ -863,6 +868,7 @@
<cd:constant name='nright' value='nright'/>
<cd:constant name='ntop' value='ntop'/>
<cd:constant name='number' value='شماره'/>
+ <cd:constant name='numberalign' value='numberalign'/>
<cd:constant name='numbercolor' value='رنگ‌شماره'/>
<cd:constant name='numbercommand' value='فرمان‌شماره'/>
<cd:constant name='numberconversion' value='numberconversion'/>
@@ -1007,7 +1013,7 @@
<cd:constant name='separatorcolor' value='separatorcolor'/>
<cd:constant name='separatorstyle' value='separatorstyle'/>
<cd:constant name='set' value='قراربده'/>
- <cd:constant name='setups' value='بارگذاریها'/>
+ <cd:constant name='setups' value='setups'/>
<cd:constant name='shrink' value='shrink'/>
<cd:constant name='side' value='کنار'/>
<cd:constant name='sidealign' value='تنظیم‌کنار'/>
@@ -1051,6 +1057,8 @@
<cd:constant name='suffix' value='پسوند'/>
<cd:constant name='suffixseparator' value='suffixseparator'/>
<cd:constant name='suffixstopper' value='suffixstopper'/>
+ <cd:constant name='surnamefirstnamesep' value='surnamefirstnamesep'/>
+ <cd:constant name='surnameinitialsep' value='surnameinitialsep'/>
<cd:constant name='surnamesep' value='surnamesep'/>
<cd:constant name='sx' value='sx'/>
<cd:constant name='sy' value='sy'/>
@@ -1290,6 +1298,7 @@
<cd:command name='definetabletemplate' value='تعریف‌الگوی‌جدول'/>
<cd:command name='definetabulate' value='تعریف‌جدول‌بندی'/>
<cd:command name='definetext' value='تعریف‌متن'/>
+ <cd:command name='definetextbackground' value='definetextbackground'/>
<cd:command name='definetextposition' value='تعریف‌مکان‌متن'/>
<cd:command name='definetextvariable' value='تعریف‌متغیرمتن'/>
<cd:command name='definetype' value='تعریف‌تایپ'/>
@@ -1450,7 +1459,6 @@
<cd:command name='paperheight' value='ارتفاع‌برگ'/>
<cd:command name='paperwidth' value='عرض‌برگ'/>
<cd:command name='periods' value='نقطه‌ها'/>
- <cd:command name='plaatsruwelijst' value='درج‌لیست‌خام'/>
<cd:command name='placebookmarks' value='درج‌چوب‌خط'/>
<cd:command name='placecombinedlist' value='درج‌لیست‌مختلط'/>
<cd:command name='placefloat' value='درج‌شناور'/>
@@ -1460,11 +1468,13 @@
<cd:command name='placeheadtext' value='درج‌متن‌سر'/>
<cd:command name='placelegend' value='درج‌راهنما'/>
<cd:command name='placelist' value='درج‌لیست'/>
+ <cd:command name='placelistofsynonyms' value='placelistofsynonyms'/>
<cd:command name='placelocalfootnotes' value='درج‌پانوشتهای‌موضعی'/>
<cd:command name='placelogos' value='درج‌آرمها'/>
<cd:command name='placeongrid' value='درج‌در‌توری'/>
<cd:command name='placeontopofeachother' value='درج‌در‌بالای‌یکدیگر'/>
<cd:command name='placepagenumber' value='درج‌شماره‌صفحه'/>
+ <cd:command name='placerawlist' value='درج‌لیست‌خام'/>
<cd:command name='placereferencelist' value='درج‌لیست‌مرجع'/>
<cd:command name='placeregister' value='درج‌ثبت'/>
<cd:command name='placerule' value='درج‌خط'/>
@@ -1611,7 +1621,6 @@
<cd:command name='setupregister' value='بارگذاری‌ثبت'/>
<cd:command name='setuprotate' value='بارگذاری‌دوران'/>
<cd:command name='setuprule' value='بارگذاری‌خط'/>
- <cd:command name='setups' value='بارگذاریها'/>
<cd:command name='setupscreens' value='بارگذاری‌پرده‌ها'/>
<cd:command name='setupsection' value='بارگذاری‌بخش'/>
<cd:command name='setupsectionblock' value='بارگذاری‌بلوک‌بخش'/>
@@ -1629,6 +1638,7 @@
<cd:command name='setuptables' value='بارگذاری‌جدولها'/>
<cd:command name='setuptabulate' value='بارگذاری‌جدول‌بندی'/>
<cd:command name='setuptext' value='بارگذاری‌متن'/>
+ <cd:command name='setuptextbackground' value='setuptextbackground'/>
<cd:command name='setuptextposition' value='بارگذاری‌مکان‌متن'/>
<cd:command name='setuptextrules' value='بارگذاری‌خطهای‌متن'/>
<cd:command name='setuptexttexts' value='بارگذاری‌متن‌متنها'/>
@@ -1675,6 +1685,7 @@
<cd:command name='startdocument' value='شروع‌نوشتار'/>
<cd:command name='startenvironment' value='شروع‌محیط'/>
<cd:command name='startfigure' value='شروع‌شکل'/>
+ <cd:command name='startframed' value='startframed'/>
<cd:command name='startglobal' value='شروع‌سراسری'/>
<cd:command name='startline' value='شروع‌خط'/>
<cd:command name='startlinecorrection' value='شروع‌تصحیح‌خط'/>
@@ -1701,6 +1712,7 @@
<cd:command name='starttable' value='شروع‌جدول'/>
<cd:command name='starttables' value='شروع‌جدولها'/>
<cd:command name='starttext' value='شروع‌متن'/>
+ <cd:command name='starttextbackground' value='starttextbackground'/>
<cd:command name='starttextrule' value='شروع‌خط‌متن'/>
<cd:command name='startunpacked' value='شروع‌غیر‌فشرده'/>
<cd:command name='startversion' value='شروع‌نسخه'/>
@@ -1715,6 +1727,7 @@
<cd:command name='stopcomponent' value='پایان‌مولفه'/>
<cd:command name='stopdocument' value='پایان‌نوشتار'/>
<cd:command name='stopenvironment' value='پایان‌محیط'/>
+ <cd:command name='stopframed' value='stopframed'/>
<cd:command name='stopglobal' value='پایان‌سراسری'/>
<cd:command name='stopline' value='پایان‌خط'/>
<cd:command name='stoplinecorrection' value='پایان‌تصحیح‌خط'/>
@@ -1740,6 +1753,7 @@
<cd:command name='stoptable' value='پایان‌جدول'/>
<cd:command name='stoptables' value='پایان‌جدولها'/>
<cd:command name='stoptext' value='پایان‌متن'/>
+ <cd:command name='stoptextbackground' value='stoptextbackground'/>
<cd:command name='stoptextrule' value='پایان‌خط‌متن'/>
<cd:command name='stopunpacked' value='پایان‌غیرفشرده'/>
<cd:command name='stopversion' value='پایان‌نسخه'/>
diff --git a/tex/context/interface/keys-ro.xml b/tex/context/interface/keys-ro.xml
index e83d145d0..951a5e8c9 100644
--- a/tex/context/interface/keys-ro.xml
+++ b/tex/context/interface/keys-ro.xml
@@ -183,6 +183,7 @@
<cd:variable name='flushleft' value='flushleft'/>
<cd:variable name='flushouter' value='flushouter'/>
<cd:variable name='flushright' value='flushright'/>
+ <cd:variable name='followingpage' value='followingpage'/>
<cd:variable name='footer' value='subsol'/>
<cd:variable name='footnote' value='notasubsol'/>
<cd:variable name='force' value='fortat'/>
@@ -304,6 +305,7 @@
<cd:variable name='mirrored' value='oglindit'/>
<cd:variable name='monday' value='luni'/>
<cd:variable name='mono' value='mono'/>
+ <cd:variable name='monobold' value='monoaldin'/>
<cd:variable name='month' value='luna'/>
<cd:variable name='more' value='more'/>
<cd:variable name='morehyphenation' value='morehyphenation'/>
@@ -363,6 +365,7 @@
<cd:variable name='positive' value='positiv'/>
<cd:variable name='postponing' value='postponing'/>
<cd:variable name='postscript' value='postscript'/>
+ <cd:variable name='precedingpage' value='followingpage'/>
<cd:variable name='preference' value='preferinta'/>
<cd:variable name='preview' value='previzualizare'/>
<cd:variable name='previous' value='precedent'/>
@@ -422,7 +425,7 @@
<cd:variable name='september' value='septembrie'/>
<cd:variable name='serif' value='serif'/>
<cd:variable name='serried' value='serried'/>
- <cd:variable name='setups' value='setari'/>
+ <cd:variable name='setups' value='setups'/>
<cd:variable name='sheet' value='sheet'/>
<cd:variable name='short' value='short'/>
<cd:variable name='simplefonts' value='simplefonts'/>
@@ -657,7 +660,7 @@
<cd:constant name='coupling' value='cuplare'/>
<cd:constant name='couplingway' value='modcuplare'/>
<cd:constant name='criterium' value='criteriu'/>
- <cd:constant name='css' value='css'/>
+ <cd:constant name='cssfile' value='cssfile'/>
<cd:constant name='current' value='curent'/>
<cd:constant name='cutspace' value='cutspace'/>
<cd:constant name='dash' value='dash'/>
@@ -692,7 +695,7 @@
<cd:constant name='equalwidth' value='equalwidth'/>
<cd:constant name='escape' value='escape'/>
<cd:constant name='evenmargin' value='marginepara'/>
- <cd:constant name='exitoffset' value='labeloffset'/>
+ <cd:constant name='exitoffset' value='exitoffset'/>
<cd:constant name='expansion' value='expansiune'/>
<cd:constant name='export' value='export'/>
<cd:constant name='extras' value='extras'/>
@@ -707,6 +710,7 @@
<cd:constant name='file' value='fisier'/>
<cd:constant name='filtercommand' value='filtercommand'/>
<cd:constant name='finalnamesep' value='finalnamesep'/>
+ <cd:constant name='finalpubsep' value='finalpubsep'/>
<cd:constant name='firstnamesep' value='firstnamesep'/>
<cd:constant name='firstpage' value='primapagina'/>
<cd:constant name='focus' value='focus'/>
@@ -758,6 +762,7 @@
<cd:constant name='indenting' value='aliniat'/>
<cd:constant name='indentnext' value='aliniaturmator'/>
<cd:constant name='indicator' value='indicator'/>
+ <cd:constant name='initialsep' value='initialsep'/>
<cd:constant name='inner' value='intern'/>
<cd:constant name='innermargin' value='innermargin'/>
<cd:constant name='inputfile' value='inputfile'/>
@@ -863,6 +868,7 @@
<cd:constant name='nright' value='ndreapta'/>
<cd:constant name='ntop' value='nsus'/>
<cd:constant name='number' value='numar'/>
+ <cd:constant name='numberalign' value='numberalign'/>
<cd:constant name='numbercolor' value='culoarenumar'/>
<cd:constant name='numbercommand' value='comandanumar'/>
<cd:constant name='numberconversion' value='numberconversion'/>
@@ -1051,6 +1057,8 @@
<cd:constant name='suffix' value='suffix'/>
<cd:constant name='suffixseparator' value='suffixseparator'/>
<cd:constant name='suffixstopper' value='suffixstopper'/>
+ <cd:constant name='surnamefirstnamesep' value='surnamefirstnamesep'/>
+ <cd:constant name='surnameinitialsep' value='surnameinitialsep'/>
<cd:constant name='surnamesep' value='surnamesep'/>
<cd:constant name='sx' value='sx'/>
<cd:constant name='sy' value='sy'/>
@@ -1290,6 +1298,7 @@
<cd:command name='definetabletemplate' value='definestesablontabel'/>
<cd:command name='definetabulate' value='definestetabulatori'/>
<cd:command name='definetext' value='definestetext'/>
+ <cd:command name='definetextbackground' value='definetextbackground'/>
<cd:command name='definetextposition' value='definestepozitietext'/>
<cd:command name='definetextvariable' value='definestevariabilatext'/>
<cd:command name='definetype' value='definetype'/>
@@ -1450,7 +1459,6 @@
<cd:command name='paperheight' value='inaltimehartie'/>
<cd:command name='paperwidth' value='latimehartie'/>
<cd:command name='periods' value='puncte'/>
- <cd:command name='plaatsruwelijst' value='placerawlist'/>
<cd:command name='placebookmarks' value='plaseazasemnecarte'/>
<cd:command name='placecombinedlist' value='punelistacombinata'/>
<cd:command name='placefloat' value='placefloat'/>
@@ -1460,11 +1468,13 @@
<cd:command name='placeheadtext' value='placeheadtext'/>
<cd:command name='placelegend' value='punelegenda'/>
<cd:command name='placelist' value='punelista'/>
+ <cd:command name='placelistofsynonyms' value='placelistofsynonyms'/>
<cd:command name='placelocalfootnotes' value='punenotesubsollocale'/>
<cd:command name='placelogos' value='punelogouri'/>
<cd:command name='placeongrid' value='plaseazapegrid'/>
<cd:command name='placeontopofeachother' value='punedeasuprafiecareia'/>
<cd:command name='placepagenumber' value='punenumarpagina'/>
+ <cd:command name='placerawlist' value='placerawlist'/>
<cd:command name='placereferencelist' value='placereferencelist'/>
<cd:command name='placeregister' value='puneregistru'/>
<cd:command name='placerule' value='punerigla'/>
@@ -1611,7 +1621,6 @@
<cd:command name='setupregister' value='seteazaregistru'/>
<cd:command name='setuprotate' value='seteazarotare'/>
<cd:command name='setuprule' value='seteazarigla'/>
- <cd:command name='setups' value='setari'/>
<cd:command name='setupscreens' value='seteazaecrane'/>
<cd:command name='setupsection' value='seteazasectiune'/>
<cd:command name='setupsectionblock' value='seteazablocsectiune'/>
@@ -1629,6 +1638,7 @@
<cd:command name='setuptables' value='seteazatabele'/>
<cd:command name='setuptabulate' value='seteazatabulatori'/>
<cd:command name='setuptext' value='seteazatext'/>
+ <cd:command name='setuptextbackground' value='setuptextbackground'/>
<cd:command name='setuptextposition' value='seteazapozitietext'/>
<cd:command name='setuptextrules' value='seteazarigletext'/>
<cd:command name='setuptexttexts' value='seteazatextetext'/>
@@ -1675,6 +1685,7 @@
<cd:command name='startdocument' value='startdocument'/>
<cd:command name='startenvironment' value='startmediu'/>
<cd:command name='startfigure' value='startfigura'/>
+ <cd:command name='startframed' value='startframed'/>
<cd:command name='startglobal' value='startglobal'/>
<cd:command name='startline' value='startlinie'/>
<cd:command name='startlinecorrection' value='startcorectielinie'/>
@@ -1701,6 +1712,7 @@
<cd:command name='starttable' value='starttabel'/>
<cd:command name='starttables' value='starttabele'/>
<cd:command name='starttext' value='starttext'/>
+ <cd:command name='starttextbackground' value='starttextbackground'/>
<cd:command name='starttextrule' value='startriglatext'/>
<cd:command name='startunpacked' value='startneimpachetat'/>
<cd:command name='startversion' value='startversiune'/>
@@ -1715,6 +1727,7 @@
<cd:command name='stopcomponent' value='stopcomponenta'/>
<cd:command name='stopdocument' value='stopdocument'/>
<cd:command name='stopenvironment' value='stopmediu'/>
+ <cd:command name='stopframed' value='stopframed'/>
<cd:command name='stopglobal' value='stopblobal'/>
<cd:command name='stopline' value='stoplinie'/>
<cd:command name='stoplinecorrection' value='stopcorectielinie'/>
@@ -1740,6 +1753,7 @@
<cd:command name='stoptable' value='stoptabel'/>
<cd:command name='stoptables' value='stoptabele'/>
<cd:command name='stoptext' value='stoptext'/>
+ <cd:command name='stoptextbackground' value='stoptextbackground'/>
<cd:command name='stoptextrule' value='stopriglatext'/>
<cd:command name='stopunpacked' value='stopneimpachetat'/>
<cd:command name='stopversion' value='stopversiune'/>
diff --git a/tex/context/patterns/lang-it.lua b/tex/context/patterns/lang-it.lua
index 20ab48fbf..fb6a9d893 100644
--- a/tex/context/patterns/lang-it.lua
+++ b/tex/context/patterns/lang-it.lua
@@ -38,7 +38,7 @@ return {
%\
% This work consists of the single file hyph-it.tex.\
%\
-% \\versionnumber{4.8i} \\versiondate{2011/08/16}\
+% \\versionnumber{4.9} \\versiondate{2014/04/22}\
%\
% These hyphenation patterns for the Italian language are supposed to comply\
% with the Recommendation UNI 6461 on hyphenation issued by the Italian\
@@ -47,6 +47,7 @@ return {
% liability is disclaimed.\
%\
% ChangeLog:\
+% - 2014-04-22 - Add few pattherns involving `h'\
% - 2011-08-16 - Change the licence from GNU LGPL into LPPL v1.3.\
% - 2010-05-24 - Fix for Italian patterns for proper hyphenation of -ich and Ljubljana.\
% - 2008-06-09 - Import of original ithyph.tex into hyph-utf8 package.\
@@ -56,11 +57,11 @@ return {
},
["patterns"]={
["characters"]="'abcdefghijklmnopqrstuvwxyz’",
- ["data"]=".a3p2n .anti1 .anti3m2n .bio1 .ca4p3s .circu2m1 .contro1 .di2s3cine .e2x1eu .fran2k3 .free3 .li3p2sa .narco1 .opto1 .orto3p2 .para1 .poli3p2 .pre1 .p2s .re1i2scr .sha2re3 .tran2s3c .tran2s3d .tran2s3l .tran2s3n .tran2s3p .tran2s3r .tran2s3t .su2b3lu .su2b3r .wa2g3n .wel2t1 2'2 2’2 a1ia a1ie a1io a1iu a1uo a1ya 2at. e1iu e2w o1ia o1ie o1io o1iu 1b 2bb 2bc 2bd 2bf 2bm 2bn 2bp 2bs 2bt 2bv b2l b2r 2b. 2b' 2b’ 1c 2cb 2cc 2cd 2cf 2ck 2cm 2cn 2cq 2cs 2ct 2cz 2chh c2h 2ch. 2ch'. 2ch’. 2ch''. 2ch’’. 2chb ch2r 2chn c2l c2r 2c. 2c' 2c’ .c2 1d 2db 2dd 2dg 2dl 2dm 2dn 2dp d2r 2ds 2dt 2dv 2dw 2d. 2d' 2d’ .d2 1f 2fb 2fg 2ff 2fn f2l f2r 2fs 2ft 2f. 2f' 2f’ 1g 2gb 2gd 2gf 2gg g2h g2l 2gm g2n 2gp g2r 2gs 2gt 2gv 2gw 2gz 2gh2t 2g. 2g' 2g’ 1h 2hb 2hd 2hh hi3p2n h2l 2hm 2hn 2hr 2hv 2h. 2h' 2h’ 1j 2j. 2j' 2j’ 1k 2kg 2kf k2h 2kk k2l 2km k2r 2ks 2kt 2k. 2k' 2k’ 1l 2lb 2lc 2ld 2l3f2 2lg l2h l2j 2lk 2ll 2lm 2ln 2lp 2lq 2lr 2ls 2lt 2lv 2lw 2lz 2l. 2l'. 2l’. 2l'' 2l’’ 1m 2mb 2mc 2mf 2ml 2mm 2mn 2mp 2mq 2mr 2ms 2mt 2mv 2mw 2m. 2m' 2m’ 1n 2nb 2nc 2nd 2nf 2ng 2nk 2nl 2nm 2nn 2np 2nq 2nr 2ns n2s3fer 2nt 2nv 2nz n2g3n 2nheit 2n. 2n' 2n’ 1p 2pd p2h p2l 2pn 3p2ne 2pp p2r 2ps 3p2sic 2pt 2pz 2p. 2p' 2p’ 1q 2qq 2q. 2q' 2q’ 1r 2rb 2rc 2rd 2rf r2h 2rg 2rk 2rl 2rm 2rn 2rp 2rq 2rr 2rs 2rt r2t2s3 2rv 2rx 2rw 2rz 2r. 2r' 2r’ 1s2 2shm 2sh. 2sh' 2sh’ 2s3s s4s3m 2s3p2n 2stb 2stc 2std 2stf 2stg 2stm 2stn 2stp 2sts 2stt 2stv 2sz 4s. 4s'. 4s’. 4s'' 4s’’ 1t 2tb 2tc 2td 2tf 2tg t2h t2l 2tm 2tn 2tp t2r t2s 3t2sch 2tt t2t3s 2tv 2tw t2z 2tzk tz2s 2t. 2t'. 2t’. 2t'' 2t’’ 1v 2vc v2l v2r 2vv 2v. 2v'. 2v’. 2v'' 2v’’ 1w w2h wa2r 2w1y 2w. 2w' 2w’ 1x 2xb 2xc 2xf 2xh 2xm 2xp 2xt 2xw 2x. 2x' 2x’ y1ou y1i 1z 2zb 2zd 2zl 2zn 2zp 2zt 2zs 2zv 2zz 2z. 2z'. 2z’. 2z'' 2z’’ .z2",
- ["length"]=1806,
+ ["data"]=".a3p2n .anti1 .anti3m2n .bio1 .ca4p3s .circu2m1 .contro1 .di2s3cine .e2x1eu .fran2k3 .free3 .li3p2sa .narco1 .opto1 .orto3p2 .para1 .ph2l .ph2r .poli3p2 .pre1 .p2s .re1i2scr .sha2re3 .tran2s3c .tran2s3d .tran2s3l .tran2s3n .tran2s3p .tran2s3r .tran2s3t .su2b3lu .su2b3r .wa2g3n .wel2t1 2'2 2’2 a1ia a1ie a1io a1iu a1uo a1ya 2at. e1iu e2w o1ia o1ie o1io o1iu 1b 2bb 2bc 2bd 2bf 2bm 2bn 2bp 2bs 2bt 2bv b2l b2r 2b. 2b' 2b’ 1c 2cb 2cc 2cd 2cf 2ck 2cm 2cn 2cq 2cs 2ct 2cz 2chh c2h 2ch. 2ch'. 2ch’. 2ch''. 2ch’’. 2chb ch2r 2chn c2l c2r 2c. 2c' 2c’ .c2 1d 2db 2dd 2dg 2dl 2dm 2dn 2dp d2r 2ds 2dt 2dv 2dw 2d. 2d' 2d’ .d2 1f 2fb 2fg 2ff 2fn f2l f2r 2fs 2ft 2f. 2f' 2f’ 1g 2gb 2gd 2gf 2gg g2h g2l 2gm g2n 2gp g2r 2gs 2gt 2gv 2gw 2gz 2gh2t 2g. 2g' 2g’ .h2 1h 2hb 2hd 2hh hi3p2n h2l 2hm 2hn 2hr 2hv 2h. 2h' 2h’ .j2 1j 2j. 2j' 2j’ .k2 1k 2kg 2kf k2h 2kk k2l 2km k2r 2ks 2kt 2k. 2k' 2k’ 1l 2lb 2lc 2ld 2l3f2 2lg l2h l2j 2lk 2ll 2lm 2ln 2lp 2lq 2lr 2ls 2lt 2lv 2lw 2lz 2l. 2l'. 2l’. 2l'' 2l’’ 1m 2mb 2mc 2mf 2ml 2mm 2mn 2mp 2mq 2mr 2ms 2mt 2mv 2mw 2m. 2m' 2m’ 1n 2nb 2nc 2nd 2nf 2ng 2nk 2nl 2nm 2nn 2np 2nq 2nr 2ns n2s3fer 2nt 2nv 2nz n2g3n 2nheit 2n. 2n' 2n’ 1p 2pd p2h p2l 2pn 3p2ne 2pp p2r 2ps 3p2sic 2pt 2pz 2p. 2p' 2p’ 1q 2qq 2q. 2q' 2q’ 1r 2rb 2rc 2rd 2rf r2h 2rg 2rk 2rl 2rm 2rn 2rp 2rq 2rr 2rs 2rt r2t2s3 2rv 2rx 2rw 2rz 2r. 2r' 2r’ 1s2 2shm 2sh. 2sh' 2sh’ 2s3s s4s3m 2s3p2n 2stb 2stc 2std 2stf 2stg 2stm 2stn 2stp 2sts 2stt 2stv 2sz 4s. 4s'. 4s’. 4s'' 4s’’ .t2 1t 2tb 2tc 2td 2tf 2tg t2h 2th. t2l 2tm 2tn 2tp t2r t2s 3t2sch 2tt t2t3s 2tv 2tw t2z 2tzk tz2s 2t. 2t'. 2t’. 2t'' 2t’’ 1v 2vc v2l v2r 2vv 2v. 2v'. 2v’. 2v'' 2v’’ 1w w2h wa2r 2w1y 2w. 2w' 2w’ 1x 2xb 2xc 2xf 2xh 2xm 2xp 2xt 2xw 2x. 2x' 2x’ y1ou y1i 1z 2zb 2zd 2zl 2zn 2zp 2zt 2zs 2zv 2zz 2z. 2z'. 2z’. 2z'' 2z’’ .z2",
+ ["length"]=1839,
["minhyphenmax"]=1,
["minhyphenmin"]=1,
- ["n"]=377,
+ ["n"]=384,
},
["version"]="1.001",
} \ No newline at end of file
diff --git a/tex/context/patterns/lang-it.pat b/tex/context/patterns/lang-it.pat
index 78a127aa7..12a9edf33 100644
--- a/tex/context/patterns/lang-it.pat
+++ b/tex/context/patterns/lang-it.pat
@@ -21,6 +21,8 @@
.opto1
.orto3p2
.para1
+.ph2l
+.ph2r
.poli3p2
.pre1
.p2s
@@ -137,6 +139,7 @@ g2r
2gh2t
2g.
2g'
+.h2
1h
2hb
2hd
@@ -149,9 +152,11 @@ h2l
2hv
2h.
2h'
+.j2
1j
2j.
2j'
+.k2
1k
2kg
2kf
@@ -288,6 +293,7 @@ s4s3m
4s.
4s'.
4s''
+.t2
1t
2tb
2tc
@@ -295,6 +301,7 @@ s4s3m
2tf
2tg
t2h
+2th.
t2l
2tm
2tn
diff --git a/tex/context/patterns/lang-it.rme b/tex/context/patterns/lang-it.rme
index 6cfe6896a..2a2fb60d5 100644
--- a/tex/context/patterns/lang-it.rme
+++ b/tex/context/patterns/lang-it.rme
@@ -32,7 +32,7 @@ Italian hyphenation patterns
%
% This work consists of the single file hyph-it.tex.
%
-% \versionnumber{4.8i} \versiondate{2011/08/16}
+% \versionnumber{4.9} \versiondate{2014/04/22}
%
% These hyphenation patterns for the Italian language are supposed to comply
% with the Recommendation UNI 6461 on hyphenation issued by the Italian
@@ -41,6 +41,7 @@ Italian hyphenation patterns
% liability is disclaimed.
%
% ChangeLog:
+% - 2014-04-22 - Add few pattherns involving `h'
% - 2011-08-16 - Change the licence from GNU LGPL into LPPL v1.3.
% - 2010-05-24 - Fix for Italian patterns for proper hyphenation of -ich and Ljubljana.
% - 2008-06-09 - Import of original ithyph.tex into hyph-utf8 package.
diff --git a/tex/context/sample/cervantes-es.tex b/tex/context/sample/cervantes-es.tex
new file mode 100644
index 000000000..153797023
--- /dev/null
+++ b/tex/context/sample/cervantes-es.tex
@@ -0,0 +1,6 @@
+En un lugar de la Mancha, de cuyo nombre no quiero acordar-me, no ha
+mucho tiempo que vivía un hidalgo de los de lanza en astillero, adarga
+antigua, rocín flaco y galgo corredor. Una olla de algo más vaca que
+carnero, salpicón las más noches, duelos y quebrantos los sábados,
+lantejas los viernes, algún palomino de añadidura los domingos,
+consumían las tres partes de su hacienda.
diff --git a/tex/context/sample/darwin.tex b/tex/context/sample/darwin.tex
new file mode 100644
index 000000000..6425bf156
--- /dev/null
+++ b/tex/context/sample/darwin.tex
@@ -0,0 +1,19 @@
+It is interesting to contemplate an entangled bank, clothed with many
+plants of many kinds, with birds singing on the bushes, with various
+insects flitting about, and with worms crawling through the damp earth,
+and to reflect that these elaborately constructed forms, so different
+from each other, and dependent on each other in so complex a manner,
+have all been produced by laws acting around us. These laws, taken in
+the largest sense, being Growth with Reproduction; Inheritance which is
+almost implied by reproduction; Variability from the indirect and
+direct action of the external conditions of life, and from use and
+disuse; a Ratio of Increase so high as to lead to a Struggle for Life,
+and as a consequence to Natural Selection, entailing Divergence of
+Character and the Extinction of less-improved forms. Thus, from the war
+of nature, from famine and death, the most exalted object which we are
+capable of conceiving, namely, the production of the higher animals,
+directly follows. There is grandeur in this view of life, with its
+several powers, having been originally breathed into a few forms or
+into one; and that, whilst this planet has gone cycling on according to
+the fixed law of gravity, from so simple a beginning endless forms most
+beautiful and most wonderful have been, and are being, evolved.
diff --git a/tex/context/sample/dawkins.tex b/tex/context/sample/dawkins.tex
index 3490b79b0..c0acd3157 100644
--- a/tex/context/sample/dawkins.tex
+++ b/tex/context/sample/dawkins.tex
@@ -8,13 +8,13 @@ individual out. \quotation {Let's first establish,} I said,
right half of the lecture hall.} I invited everybody to
stand up while my assistant tossed a coin. Everybody on the
left of the hall was asked to \quote {will} the coin to
-come down head. Everybody on the right had to will it to be
+come down heads. Everybody on the right had to will it to be
tails. Obviously one side had to lose, and they were asked
-to sit down. Then those who remained were divided into two,
+to sit down. Then those that remained were divided into two,
with half \quote {willing} heads and the other half tails.
Again the losers sat down. And so on by successive halvings
until, inevitably, after seven or eight tosses, one
individual was left standing. \quotation {A big round of
applause for our psychic.} He must be psychic, mustn't he,
because he successfully influenced the coin eight times in
-a row? \ No newline at end of file
+a row?
diff --git a/tex/context/sample/douglas.tex b/tex/context/sample/douglas.tex
index 7d986d484..838c6d24d 100644
--- a/tex/context/sample/douglas.tex
+++ b/tex/context/sample/douglas.tex
@@ -1,18 +1,18 @@
Donald Knuth has spent the past several years working on a
system allowing him to control many aspects of the design
-of his forthcoming books, from the typesetting and layout
+of his forthcoming books|=|from the typesetting and layout
down to the very shapes of the letters! Seldom has an
author had anything remotely like this power to control the
final appearance of his or her work. Knuth's \TEX\
-typesetting system has become well|-|known and available in
+typesetting system has become well|-|known and as available in
many countries around the world. By contrast, his
\METAFONT\ system for designing families of typefaces has
-not become as well known or available.
+not become as well known or as available.
In his article \quotation {The Concept of a Meta|-|Font},
Knuth sets forth for the first time the underlying
philosophy of \METAFONT, as well as some of its products.
-Not only is the concept exiting and clearly well executed,
+Not only is the concept exciting and clearly well executed,
but in my opinion the article is charmingly written as well.
However, despite my overall enthusiasm for Knuth's idea and
article, there are some points in it that I feel might be
diff --git a/tex/context/sample/quevedo-es.tex b/tex/context/sample/quevedo-es.tex
new file mode 100644
index 000000000..166b0328f
--- /dev/null
+++ b/tex/context/sample/quevedo-es.tex
@@ -0,0 +1,19 @@
+\startlines
+Un soneto me manda hacer Violante
+que en mi vida me he visto en tanto aprieto;
+catorce versos dicen que es soneto;
+burla burlando van los tres delante.
+
+Yo pensé que no hallara consonante,
+y estoy a la mitad de otro cuarteto;
+mas si me veo en el primer terceto,
+no hay cosa en los cuartetos que me espante.
+
+Por el primer terceto voy entrando,
+y parece que entré con pie derecho,
+pues fin con este verso le voy dando.
+
+Ya estoy en el segundo, y aun sospecho
+que voy los trece versos acabando;
+contad si son catorce, y está hecho.
+\stoplines
diff --git a/tex/context/sample/sample.tex b/tex/context/sample/sample.tex
index 5f97ea009..ef31153a1 100644
--- a/tex/context/sample/sample.tex
+++ b/tex/context/sample/sample.tex
@@ -1,75 +1,21 @@
-\starttext
-
-The sample directory contains a few files with quotes that can be used
-while testing styles.
-
-I'll complete this file when I've reorganized my books and audio cd's.
-
-If someone makes a nice bibtex file of these, the quotes can also be
-used in testing bibliographic references and citations.
-
-\starttabulate[|l|l|p|]
-\NC \bf file \NC \bf author \NC \bf source \NC \NR
-\HL
-%NC stork.tex \NC David F. Stork \NC \NC \NR
-\NC knuth.tex \NC Donald E. Knuth \NC \NC \NR
-\NC tufte.tex \NC Edward R. Tufte \NC \NC \NR
-\NC reich.tex \NC Steve Reich \NC \NC \NR
-\NC materie.tex \NC Louis Andriessen \NC De Materie \NC \NR
-\NC douglas.tex \NC Douglas R. Hofstadter \NC \NC \NR
-\NC dawkins.tex \NC Dawkins \NC \NC \NR
-\NC ward.tex \NC Peter D. Ward \NC The Life and Death of Planet Earth \NC \NR
-\NC zapf.tex \NC Hermann Zapf \NC About micro-typography and the hz-program, \endgraf
- Electronic Publishing, vol. 6(3), \endgraf
- 283-288 (September 1993) \NC \NR
-\NC bryson.tex \NC Bill Bryson \NC A Short History of Nearly Everything, \endgraf
- Random House, 2003 \NC \NR
-\NC davis.tex \NC Kenneth C. Davis \NC Don't Know Much About History, \endgraf
- Everything You Need to Know About American
- History but Never Learned, \endgraf
- HarperCollins, 2003 \NC \NR
-\NC thuan.tex \NC Trinh Xuan Thuan \NC Chaos and Harmony, Perspectives on Scientific
- Revolutions of the Twentieth Century, \endgraf
- Oxford University Press, 2001 \NC \NR
-\NC hawking.tex \NC Steve W. Hawking \NC The Universe in a Nutshell, Bantam Books
- Random House, 2001 \NC \NR
-\NC linden.tex \NC Eugene Linden \NC The Winds of Change, Climate, Weather, and the
- Destruction of Civilizations, \endgraf
- Simon \& Schuster, 2006, p.106 \NC \NR
-\NC weisman.tex \NC Alan Weisman \NC The World Without Us, \endgraf
- Thomas Dunne Books, 2007, p.160 \NC \NR
-\NC montgomery.tex \NC David R Montgomery \NC Dirt, The Erosion of Civilizations, \endgraf
- University of California Press, 2007, p.199 \NC \NR
-\NC carrol.tex \NC Sean B. Carrol \NC The Making of the Fittest, \endgraf
- Quercus, London, 2006 \NC \NR
-%NC schwarzenegger.tex \NC Arnold Schwarzenegger \NC Several place on the World Wide Web. \NC \NR
-\stoptabulate
+% this original file has been renamed to samples.tex and describes the
+% short quotes that can be used in test files
-% Tufte: This quote will always produce hyphenated text, apart from the content,
-% it's a pretty good test case for protruding.
-
-% Ward: I should find a quote in the extremely well written Rare Earth as well. All Wards
-% books excell.
-
-% A Short History of Nearly Everything: I wish that I had the memory to remember this book
-% verbatim.
-
-% Chaos and Harmony: very nice and well written book, but the typography is rather bad:
-% quite visible inter-character spacing in a text that can be typeset quite well by \TeX.
+\starttext
-% The Universe in a Nutshell: a beautiful designed book, (companion of A Short History
-% of Time).
+see \type {samples.tex}
-% The World Without Us: A properly typeset, very readable book. Read it and you'll look at
-% the world around you differently (and a bit more freightened).
+\blank
-% Dirt, The Erosion of Civilizations: one of those books that you buy immediately after
-% reading a few sentences. Also one of those books that every politician should read.
+\startluacode
+ context("see \\type {samples.tex}")
+\stopluacode
-% The Making of the Fittest: nice sample for color ans subsentence testing. A very
-% readable book but unfortunately it has inter-character spacing.
+\blank
-% The Schwarzenegger letter was originally typeset at a width equivalent to 16.1cm in
-% a default ConTeXt setup.
+\startMPcode
+ draw textext("see \type {samples.tex}") ;
+ draw boundingbox currentpicture enlarged 2pt ;
+\stopMPcode
\stoptext
diff --git a/tex/context/sample/samples.bib b/tex/context/sample/samples.bib
new file mode 100644
index 000000000..e54092aae
--- /dev/null
+++ b/tex/context/sample/samples.bib
@@ -0,0 +1,29 @@
+% The following entries were provided by Rik Kabel:
+
+@book{dawkins2000unweaving,
+ title = {Unweaving the Rainbow: Science, Delusion and the Appetite for Wonder},
+ author = {Dawkins, R.},
+ isbn = {9780547347356},
+ url = {http://books.google.com/books?id=ZudTchiioUoC},
+ year = {2000},
+ publisher = {Houghton Mifflin Harcourt},
+ pages = {145--146},
+}
+
+@book{Hofstadter:1985:MTQ:537101,
+ author = {Hofstadter, Douglas R.},
+ title = {Metamagical Themas: Questing for the Essence of Mind and Pattern},
+ year = {1985},
+ isbn = {0465045405},
+ publisher = {Basic Books, Inc.},
+ address = {New York, NY, USA},
+}
+
+@book{Tufte:1990:EI:78223,
+ author = {Tufte, Edward},
+ title = {Envisioning Information},
+ year = {1990},
+ isbn = {0-9613921-1-8},
+ publisher = {Graphics Press},
+ address = {Cheshire, CT, USA},
+}
diff --git a/tex/context/sample/samples.tex b/tex/context/sample/samples.tex
new file mode 100644
index 000000000..6e217a592
--- /dev/null
+++ b/tex/context/sample/samples.tex
@@ -0,0 +1,78 @@
+\starttext
+
+The sample directory contains a few files with quotes that can be used
+while testing styles.
+
+I'll complete this file when I've reorganized my books and audio cd's.
+
+If someone makes a nice bibtex file of these, the quotes can also be
+used in testing bibliographic references and citations.
+
+\starttabulate[|l|l|p|]
+\NC \bf file \NC \bf author \NC \bf source \NC \NR
+\HL
+%NC stork.tex \NC David F. Stork \NC \NC \NR
+\NC knuth.tex \NC Donald E. Knuth \NC \NC \NR
+\NC tufte.tex \NC Edward R. Tufte \NC \NC \NR
+\NC reich.tex \NC Steve Reich \NC City Life (1995) \NC \NR
+\NC materie.tex \NC Louis Andriessen \NC De Materie \NC \NR
+\NC douglas.tex \NC Douglas R. Hofstadter \NC \NC \NR
+\NC dawkins.tex \NC Dawkins \NC \NC \NR
+\NC ward.tex \NC Peter D. Ward \NC The Life and Death of Planet Earth \NC \NR
+\NC zapf.tex \NC Hermann Zapf \NC About micro-typography and the hz-program, \endgraf
+ Electronic Publishing, vol. 6(3), \endgraf
+ 283-288 (September 1993) \NC \NR
+\NC bryson.tex \NC Bill Bryson \NC A Short History of Nearly Everything, \endgraf
+ Random House, 2003 \NC \NR
+\NC davis.tex \NC Kenneth C. Davis \NC Don't Know Much About History, \endgraf
+ Everything You Need to Know About American
+ History but Never Learned, \endgraf
+ HarperCollins, 2003 \NC \NR
+\NC thuan.tex \NC Trinh Xuan Thuan \NC Chaos and Harmony, Perspectives on Scientific
+ Revolutions of the Twentieth Century, \endgraf
+ Oxford University Press, 2001 \NC \NR
+\NC hawking.tex \NC Steve W. Hawking \NC The Universe in a Nutshell, Bantam Books
+ Random House, 2001 \NC \NR
+\NC linden.tex \NC Eugene Linden \NC The Winds of Change, Climate, Weather, and the
+ Destruction of Civilizations, \endgraf
+ Simon \& Schuster, 2006, p.106 \NC \NR
+\NC weisman.tex \NC Alan Weisman \NC The World Without Us, \endgraf
+ Thomas Dunne Books, 2007, p.160 \NC \NR
+\NC montgomery.tex \NC David R Montgomery \NC Dirt, The Erosion of Civilizations, \endgraf
+ University of California Press, 2007, p.199 \NC \NR
+\NC carrol.tex \NC Sean B. Carrol \NC The Making of the Fittest, \endgraf
+ Quercus, London, 2006 \NC \NR
+%NC jojomayer.tex \NC Jojo Mayer \NC Between Zero & One, www.youtube.com/watch?v=mSj298iBjBY \NC \NR
+%NC schwarzenegger.tex \NC Arnold Schwarzenegger \NC Several place on the World Wide Web. \NC \NR
+\stoptabulate
+
+% Tufte: This quote will always produce hyphenated text, apart from the content,
+% it's a pretty good test case for protruding.
+
+% Reich: This is a list of the voice samples from Steve Reich's 1995 composition City Life.
+
+% Ward: I should find a quote in the extremely well written Rare Earth as well. All Wards
+% books excell.
+
+% A Short History of Nearly Everything: I wish that I had the memory to remember this book
+% verbatim.
+
+% Chaos and Harmony: very nice and well written book, but the typography is rather bad:
+% quite visible inter-character spacing in a text that can be typeset quite well by \TeX.
+
+% The Universe in a Nutshell: a beautiful designed book, (companion of A Short History
+% of Time).
+
+% The World Without Us: A properly typeset, very readable book. Read it and you'll look at
+% the world around you differently (and a bit more freightened).
+
+% Dirt, The Erosion of Civilizations: one of those books that you buy immediately after
+% reading a few sentences. Also one of those books that every politician should read.
+
+% The Making of the Fittest: nice sample for color ans subsentence testing. A very
+% readable book but unfortunately it has inter-character spacing.
+
+% The Schwarzenegger letter was originally typeset at a width equivalent to 16.1cm in
+% a default ConTeXt setup.
+
+\stoptext
diff --git a/tex/context/test/pdf-a1b-2005.mkiv b/tex/context/test/pdf-a1b-2005.mkiv
index f980e3148..bc970c3f9 100644
--- a/tex/context/test/pdf-a1b-2005.mkiv
+++ b/tex/context/test/pdf-a1b-2005.mkiv
@@ -1,9 +1,9 @@
% PDF/A-1b:2005
-\enabletrackers[structure.tags,backend.tags]
+\enabletrackers[structure.tags,backend.tags,backend.xmp]
\setupbackend
- [format=PDF/A-1a:2005,
+ [format=PDF/A-1b:2005,
intent=sRGB IEC61966-2.1, % use <info> entry here; otherwise problems with predefined default profile
profile=sRGB.icc, % use <filename> here
level=0]
@@ -20,8 +20,6 @@
Text is needed, otherwise tagging base entries are not applied.
-\stopchapter
-
\stoptextcolor
%\startTEXpage
diff --git a/tex/generic/context/luatex/luatex-basics-gen.lua b/tex/generic/context/luatex/luatex-basics-gen.lua
index 9cf5b9317..c4d653604 100644
--- a/tex/generic/context/luatex/luatex-basics-gen.lua
+++ b/tex/generic/context/luatex/luatex-basics-gen.lua
@@ -15,8 +15,13 @@ local dummyfunction = function()
end
local dummyreporter = function(c)
- return function(...)
- (texio.reporter or texio.write_nl)(c .. " : " .. string.formatters(...))
+ return function(f,...)
+ local r = texio.reporter or texio.write_nl
+ if f then
+ r(c .. " : " .. string.formatters(f,...))
+ else
+ r("")
+ end
end
end
@@ -254,6 +259,18 @@ function caches.loaddata(paths,name)
for i=1,#paths do
local data = false
local luaname, lucname = makefullname(paths[i],name)
+ if lucname and not lfs.isfile(lucname) and type(caches.compile) == "function" then
+ -- in case we used luatex and luajittex mixed ... lub or luc file
+ texio.write(string.format("(compiling luc: %s)",lucname))
+ data = loadfile(luaname)
+ if data then
+ data = data()
+ end
+ if data then
+ caches.compile(data,luaname,lucname)
+ return data
+ end
+ end
if lucname and lfs.isfile(lucname) then -- maybe also check for size
texio.write(string.format("(load luc: %s)",lucname))
data = loadfile(lucname)
@@ -339,5 +356,23 @@ end
--
function table.setmetatableindex(t,f)
+ if type(t) ~= "table" then
+ f = f or t
+ t = { }
+ end
setmetatable(t,{ __index = f })
+ return t
+end
+
+-- helper for plain:
+
+arguments = { }
+
+if arg then
+ for i=1,#arg do
+ local k, v = string.match(arg[i],"^%-%-([^=]+)=?(.-)$")
+ if k and v then
+ arguments[k] = v
+ end
+ end
end
diff --git a/tex/generic/context/luatex/luatex-basics-nod.lua b/tex/generic/context/luatex/luatex-basics-nod.lua
index 50af40193..1ec2895ba 100644
--- a/tex/generic/context/luatex/luatex-basics-nod.lua
+++ b/tex/generic/context/luatex/luatex-basics-nod.lua
@@ -45,7 +45,7 @@ attributes.private = attributes.private or function(name)
return number
end
--- Nodes:
+-- Nodes (a subset of context so that we don't get too much unused code):
nodes = { }
nodes.pool = { }
@@ -54,7 +54,7 @@ nodes.handlers = { }
local nodecodes = { } for k,v in next, node.types () do nodecodes[string.gsub(v,"_","")] = k end
local whatcodes = { } for k,v in next, node.whatsits() do whatcodes[string.gsub(v,"_","")] = k end
local glyphcodes = { [0] = "character", "glyph", "ligature", "ghost", "left", "right" }
-local disccodes = { [0] = "discretionary","explicit", "automatic", "regular", "first", "second" }
+local disccodes = { [0] = "discretionary", "explicit", "automatic", "regular", "first", "second" }
nodes.nodecodes = nodecodes
nodes.whatcodes = whatcodes
@@ -67,11 +67,20 @@ local remove_node = node.remove
local new_node = node.new
local traverse_id = node.traverse_id
-local math_code = nodecodes.math
-
nodes.handlers.protectglyphs = node.protect_glyphs
nodes.handlers.unprotectglyphs = node.unprotect_glyphs
+local math_code = nodecodes.math
+local end_of_math = node.end_of_math
+
+function node.end_of_math(n)
+ if n.id == math_code and n.subtype == 1 then
+ return n
+ else
+ return end_of_math(n)
+ end
+end
+
function nodes.remove(head, current, free_too)
local t = current
head, current = remove_node(head,current)
@@ -96,10 +105,8 @@ function nodes.pool.kern(k)
return n
end
--- experimental
-
-local getfield = node.getfield or function(n,tag) return n[tag] end
-local setfield = node.setfield or function(n,tag,value) n[tag] = value end
+local getfield = node.getfield
+local setfield = node.setfield
nodes.getfield = getfield
nodes.setfield = setfield
@@ -107,17 +114,6 @@ nodes.setfield = setfield
nodes.getattr = getfield
nodes.setattr = setfield
-if node.getid then nodes.getid = node.getid else function nodes.getid (n) return getfield(n,"id") end end
-if node.getsubtype then nodes.getsubtype = node.getsubtype else function nodes.getsubtype(n) return getfield(n,"subtype") end end
-if node.getnext then nodes.getnext = node.getnext else function nodes.getnext (n) return getfield(n,"next") end end
-if node.getprev then nodes.getprev = node.getprev else function nodes.getprev (n) return getfield(n,"prev") end end
-if node.getchar then nodes.getchar = node.getchar else function nodes.getchar (n) return getfield(n,"char") end end
-if node.getfont then nodes.getfont = node.getfont else function nodes.getfont (n) return getfield(n,"font") end end
-if node.getlist then nodes.getlist = node.getlist else function nodes.getlist (n) return getfield(n,"list") end end
-
-function nodes.tonut (n) return n end
-function nodes.tonode(n) return n end
-
-- being lazy ... just copy a bunch ... not all needed in generic but we assume
-- nodes to be kind of private anyway
@@ -158,12 +154,95 @@ nodes.unset_attribute = node.unset_attribute
nodes.protect_glyphs = node.protect_glyphs
nodes.unprotect_glyphs = node.unprotect_glyphs
-nodes.kerning = node.kerning
-nodes.ligaturing = node.ligaturing
+-----.kerning = node.kerning
+-----.ligaturing = node.ligaturing
nodes.mlist_to_hlist = node.mlist_to_hlist
-- in generic code, at least for some time, we stay nodes, while in context
-- we can go nuts (e.g. experimental); this split permits us us keep code
-- used elsewhere stable but at the same time play around in context
-nodes.nuts = nodes
+local direct = node.direct
+local nuts = { }
+nodes.nuts = nuts
+
+local tonode = direct.tonode
+local tonut = direct.todirect
+
+nodes.tonode = tonode
+nodes.tonut = tonut
+
+nuts.tonode = tonode
+nuts.tonut = tonut
+
+
+local getfield = direct.getfield
+local setfield = direct.setfield
+
+nuts.getfield = getfield
+nuts.setfield = setfield
+nuts.getnext = direct.getnext
+nuts.getprev = direct.getprev
+nuts.getid = direct.getid
+nuts.getattr = getfield
+nuts.setattr = setfield
+nuts.getfont = direct.getfont
+nuts.getsubtype = direct.getsubtype
+nuts.getchar = direct.getchar
+
+nuts.insert_before = direct.insert_before
+nuts.insert_after = direct.insert_after
+nuts.delete = direct.delete
+nuts.copy = direct.copy
+nuts.copy_list = direct.copy_list
+nuts.tail = direct.tail
+nuts.flush_list = direct.flush_list
+nuts.free = direct.free
+nuts.remove = direct.remove
+nuts.is_node = direct.is_node
+nuts.end_of_math = direct.end_of_math
+nuts.traverse = direct.traverse
+nuts.traverse_id = direct.traverse_id
+
+nuts.getprop = nuts.getattr
+nuts.setprop = nuts.setattr
+
+local new_nut = direct.new
+nuts.new = new_nut
+nuts.pool = { }
+
+function nuts.pool.kern(k)
+ local n = new_nut("kern",1)
+ setfield(n,"kern",k)
+ return n
+end
+
+-- properties as used in the (new) injector:
+
+local propertydata = direct.get_properties_table()
+nodes.properties = { data = propertydata }
+
+direct.set_properties_mode(true,true) -- needed for injection
+
+function direct.set_properties_mode() end -- we really need the set modes
+
+nuts.getprop = function(n,k)
+ local p = propertydata[n]
+ if p then
+ return p[k]
+ end
+end
+
+nuts.setprop = function(n,k,v)
+ if v then
+ local p = propertydata[n]
+ if p then
+ p[k] = v
+ else
+ propertydata[n] = { [k] = v }
+ end
+ end
+end
+
+nodes.setprop = nodes.setproperty
+nodes.getprop = nodes.getproperty
diff --git a/tex/generic/context/luatex/luatex-fonts-cbk.lua b/tex/generic/context/luatex/luatex-fonts-cbk.lua
index 9db94f65e..ce19c8811 100644
--- a/tex/generic/context/luatex/luatex-fonts-cbk.lua
+++ b/tex/generic/context/luatex/luatex-fonts-cbk.lua
@@ -18,14 +18,51 @@ local nodes = nodes
local traverse_id = node.traverse_id
local glyph_code = nodes.nodecodes.glyph
+local disc_code = nodes.nodecodes.disc
-function nodes.handlers.characters(head)
+-- from now on we apply ligaturing and kerning here because it might interfere with complex
+-- opentype discretionary handling where the base ligature pass expect some weird extra
+-- pointers (which then confuse the tail slider that has some checking built in)
+
+local ligaturing = node.ligaturing
+local kerning = node.kerning
+
+local basepass = true
+
+local function l_warning() texio.write_nl("warning: node.ligaturing called directly") l_warning = nil end
+local function k_warning() texio.write_nl("warning: node.kerning called directly") k_warning = nil end
+
+function node.ligaturing(...)
+ if basepass and l_warning then
+ l_warning()
+ end
+ return ligaturing(...)
+end
+
+function node.kerning(...)
+ if basepass and k_warning then
+ k_warning()
+ end
+ return kerning(...)
+end
+
+function nodes.handlers.setbasepass(v)
+ basepass = v
+end
+
+function nodes.handlers.nodepass(head)
local fontdata = fonts.hashes.identifiers
if fontdata then
- local usedfonts, done, prevfont = { }, false, nil
+ local usedfonts = { }
+ local basefonts = { }
+ local prevfont = nil
+ local basefont = nil
for n in traverse_id(glyph_code,head) do
local font = n.font
if font ~= prevfont then
+ if basefont then
+ basefont[2] = n.prev
+ end
prevfont = font
local used = usedfonts[font]
if not used then
@@ -36,18 +73,57 @@ function nodes.handlers.characters(head)
local processors = shared.processes
if processors and #processors > 0 then
usedfonts[font] = processors
- done = true
+ elseif basepass then
+ basefont = { n, nil }
+ basefonts[#basefonts+1] = basefont
+ end
+ end
+ end
+ end
+ end
+ end
+ for d in traverse_id(disc_code,head) do
+ local r = d.replace
+ if r then
+ for n in traverse_id(glyph_code,r) do
+ local font = n.font
+ if font ~= prevfont then
+ prevfont = font
+ local used = usedfonts[font]
+ if not used then
+ local tfmdata = fontdata[font] --
+ if tfmdata then
+ local shared = tfmdata.shared -- we need to check shared, only when same features
+ if shared then
+ local processors = shared.processes
+ if processors and #processors > 0 then
+ usedfonts[font] = processors
+ end
+ end
end
end
end
end
end
end
- if done then
+ if next(usedfonts) then
for font, processors in next, usedfonts do
for i=1,#processors do
- local h, d = processors[i](head,font,0)
- head, done = h or head, done or d
+ head = processors[i](head,font,0) or head
+ end
+ end
+ end
+ if basepass and #basefonts > 0 then
+ for i=1,#basefonts do
+ local range = basefonts[i]
+ local start = range[1]
+ local stop = range[2]
+ if stop then
+ start, stop = ligaturing(start,stop)
+ start, stop = kerning(start,stop)
+ elseif start then
+ start = ligaturing(start)
+ start = kerning(start)
end
end
end
@@ -57,12 +133,27 @@ function nodes.handlers.characters(head)
end
end
+function nodes.handlers.basepass(head)
+ if not basepass then
+ head = ligaturing(head)
+ head = kerning(head)
+ end
+ return head, true
+end
+
+local nodepass = nodes.handlers.nodepass
+local basepass = nodes.handlers.basepass
+local injectpass = nodes.injections.handler
+local protectpass = nodes.handlers.protectglyphs
+
function nodes.simple_font_handler(head)
--- lang.hyphenate(head)
- head = nodes.handlers.characters(head)
- nodes.injections.handler(head)
- nodes.handlers.protectglyphs(head)
- head = node.ligaturing(head)
- head = node.kerning(head)
- return head
+ if head then
+ head = nodepass(head)
+ head = injectpass(head)
+ head = basepass(head)
+ protectpass(head)
+ return head, true
+ else
+ return head, false
+ end
end
diff --git a/tex/generic/context/luatex/luatex-fonts-enc.lua b/tex/generic/context/luatex/luatex-fonts-enc.lua
index e20c3a03b..2e1c6a466 100644
--- a/tex/generic/context/luatex/luatex-fonts-enc.lua
+++ b/tex/generic/context/luatex/luatex-fonts-enc.lua
@@ -11,9 +11,10 @@ if context then
os.exit()
end
-local fonts = fonts
-fonts.encodings = { }
-fonts.encodings.agl = { }
+local fonts = fonts
+fonts.encodings = { }
+fonts.encodings.agl = { }
+fonts.encodings.known = { }
setmetatable(fonts.encodings.agl, { __index = function(t,k)
if k == "unicodes" then
diff --git a/tex/generic/context/luatex/luatex-fonts-inj.lua b/tex/generic/context/luatex/luatex-fonts-inj.lua
new file mode 100644
index 000000000..332e92033
--- /dev/null
+++ b/tex/generic/context/luatex/luatex-fonts-inj.lua
@@ -0,0 +1,1055 @@
+if not modules then modules = { } end modules ['font-inj'] = {
+ version = 1.001,
+ comment = "companion to font-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- This property based variant is not faster but looks nicer than the attribute one. We
+-- need to use rawget (which is apbout 4 times slower than a direct access but we cannot
+-- get/set that one for our purpose!
+
+if not nodes.properties then return end
+
+local next, rawget = next, rawget
+local utfchar = utf.char
+local fastcopy = table.fastcopy
+
+local trace_injections = false trackers.register("fonts.injections", function(v) trace_injections = v end)
+
+local report_injections = logs.reporter("fonts","injections")
+
+local attributes, nodes, node = attributes, nodes, node
+
+fonts = fonts
+local fontdata = fonts.hashes.identifiers
+
+nodes.injections = nodes.injections or { }
+local injections = nodes.injections
+
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local kern_code = nodecodes.kern
+
+local nuts = nodes.nuts
+local nodepool = nuts.pool
+
+local newkern = nodepool.kern
+
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+
+local traverse_id = nuts.traverse_id
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local find_tail = nuts.tail
+
+local properties = nodes.properties.data
+
+function injections.installnewkern(nk)
+ newkern = nk or newkern
+end
+
+local nofregisteredkerns = 0
+local nofregisteredpairs = 0
+local nofregisteredmarks = 0
+local nofregisteredcursives = 0
+----- markanchors = { } -- one base can have more marks
+local keepregisteredcounts = false
+
+function injections.keepcounts()
+ keepregisteredcounts = true
+end
+
+function injections.resetcounts()
+ nofregisteredkerns = 0
+ nofregisteredpairs = 0
+ nofregisteredmarks = 0
+ nofregisteredcursives = 0
+ keepregisteredcounts = false
+end
+
+-- We need to make sure that a possible metatable will not kick in
+-- unexpectedly.
+
+function injections.reset(n)
+ local p = rawget(properties,n)
+ if p and rawget(p,"injections") then
+ p.injections = nil
+ end
+end
+
+function injections.copy(target,source)
+ local sp = rawget(properties,source)
+ if sp then
+ local tp = rawget(properties,target)
+ local si = rawget(sp,"injections")
+ if si then
+ si = fastcopy(si)
+ if tp then
+ tp.injections = si
+ else
+ propertydata[target] = {
+ injections = si,
+ }
+ end
+ else
+ if tp then
+ tp.injections = nil
+ end
+ end
+ end
+end
+
+function injections.setligaindex(n,index)
+ local p = rawget(properties,n)
+ if p then
+ local i = rawget(p,"injections")
+ if i then
+ i.ligaindex = index
+ else
+ p.injections = {
+ ligaindex = index
+ }
+ end
+ else
+ properties[n] = {
+ injections = {
+ ligaindex = index
+ }
+ }
+ end
+end
+
+function injections.getligaindex(n,default)
+ local p = rawget(properties,n)
+ if p then
+ local i = rawget(p,"injections")
+ if i then
+ return i.ligaindex or default
+ end
+ end
+ return default
+end
+
+function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext) -- hm: nuts or nodes
+ local dx = factor*(exit[1]-entry[1])
+ local dy = -factor*(exit[2]-entry[2])
+ local ws, wn = tfmstart.width, tfmnext.width
+ nofregisteredcursives = nofregisteredcursives + 1
+ if rlmode < 0 then
+ dx = -(dx + wn)
+ else
+ dx = dx - ws
+ end
+ --
+ local p = rawget(properties,start)
+ if p then
+ local i = rawget(p,"injections")
+ if i then
+ i.cursiveanchor = true
+ else
+ p.injections = {
+ cursiveanchor = true,
+ }
+ end
+ else
+ properties[start] = {
+ injections = {
+ cursiveanchor = true,
+ },
+ }
+ end
+ local p = rawget(properties,nxt)
+ if p then
+ local i = rawget(p,"injections")
+ if i then
+ i.cursivex = dx
+ i.cursivey = dy
+ else
+ p.injections = {
+ cursivex = dx,
+ cursivey = dy,
+ }
+ end
+ else
+ properties[nxt] = {
+ injections = {
+ cursivex = dx,
+ cursivey = dy,
+ },
+ }
+ end
+ return dx, dy, nofregisteredcursives
+end
+
+function injections.setpair(current,factor,rlmode,r2lflag,spec,injection) -- r2lflag & tfmchr not used
+ local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4]
+ if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then -- okay?
+ local yoffset = y - h
+ local leftkern = x -- both kerns are set in a pair kern compared
+ local rightkern = w - x -- to normal kerns where we set only leftkern
+ if leftkern ~= 0 or rightkern ~= 0 or yoffset ~= 0 then
+ nofregisteredpairs = nofregisteredpairs + 1
+ if rlmode and rlmode < 0 then
+ leftkern, rightkern = rightkern, leftkern
+ end
+ local p = rawget(properties,current)
+ if p then
+ local i = rawget(p,"injections")
+ if i then
+ if leftkern ~= 0 then
+ i.leftkern = (i.leftkern or 0) + leftkern
+ end
+ if rightkern ~= 0 then
+ i.rightkern = (i.rightkern or 0) + rightkern
+ end
+ if yoffset ~= 0 then
+ i.yoffset = (i.yoffset or 0) + yoffset
+ end
+ elseif leftkern ~= 0 or rightkern ~= 0 then
+ p.injections = {
+ leftkern = leftkern,
+ rightkern = rightkern,
+ yoffset = yoffset,
+ }
+ else
+ p.injections = {
+ yoffset = yoffset,
+ }
+ end
+ elseif leftkern ~= 0 or rightkern ~= 0 then
+ properties[current] = {
+ injections = {
+ leftkern = leftkern,
+ rightkern = rightkern,
+ yoffset = yoffset,
+ },
+ }
+ else
+ properties[current] = {
+ injections = {
+ yoffset = yoffset,
+ },
+ }
+ end
+ return x, y, w, h, nofregisteredpairs
+ end
+ end
+ return x, y, w, h -- no bound
+end
+
+-- this needs checking for rl < 0 but it is unlikely that a r2l script
+-- uses kernclasses between glyphs so we're probably safe (KE has a
+-- problematic font where marks interfere with rl < 0 in the previous
+-- case)
+
+function injections.setkern(current,factor,rlmode,x,injection)
+ local dx = factor * x
+ if dx ~= 0 then
+ nofregisteredkerns = nofregisteredkerns + 1
+ local p = rawget(properties,current)
+ if not injection then
+ injection = "injections"
+ end
+ if p then
+ local i = rawget(p,injection)
+ if i then
+ i.leftkern = dx + (i.leftkern or 0)
+ else
+ p[injection] = {
+ leftkern = dx,
+ }
+ end
+ else
+ properties[current] = {
+ [injection] = {
+ leftkern = dx,
+ },
+ }
+ end
+ return dx, nofregisteredkerns
+ else
+ return 0, 0
+ end
+end
+
+function injections.setmark(start,base,factor,rlmode,ba,ma,tfmbase) -- ba=baseanchor, ma=markanchor
+ local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2])
+ nofregisteredmarks = nofregisteredmarks + 1
+ -- markanchors[nofregisteredmarks] = base
+ if rlmode >= 0 then
+ dx = tfmbase.width - dx -- see later commented ox
+ end
+ local p = rawget(properties,start)
+ if p then
+ local i = rawget(p,"injections")
+ if i then
+ i.markx = dx
+ i.marky = dy
+ i.markdir = rlmode or 0
+ i.markbase = nofregisteredmarks
+ i.markbasenode = base
+ else
+ p.injections = {
+ markx = dx,
+ marky = dy,
+ markdir = rlmode or 0,
+ markbase = nofregisteredmarks,
+ markbasenode = base,
+ }
+ end
+ else
+ properties[start] = {
+ injections = {
+ markx = dx,
+ marky = dy,
+ markdir = rlmode or 0,
+ markbase = nofregisteredmarks,
+ markbasenode = base,
+ },
+ }
+ end
+ return dx, dy, nofregisteredmarks
+end
+
+local function dir(n)
+ return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
+end
+
+local function showchar(n,nested)
+ local char = getchar(n)
+ report_injections("%wfont %s, char %U, glyph %c",nested and 2 or 0,getfont(n),char,char)
+end
+
+local function show(n,what,nested,symbol)
+ if n then
+ local p = rawget(properties,n)
+ if p then
+ local i = rawget(p,what)
+ if i then
+ local leftkern = i.leftkern or 0
+ local rightkern = i.rightkern or 0
+ local yoffset = i.yoffset or 0
+ local markx = i.markx or 0
+ local marky = i.marky or 0
+ local markdir = i.markdir or 0
+ local markbase = i.markbase or 0 -- will be markbasenode
+ local cursivex = i.cursivex or 0
+ local cursivey = i.cursivey or 0
+ local ligaindex = i.ligaindex or 0
+ local margin = nested and 4 or 2
+ --
+ if rightkern ~= 0 or yoffset ~= 0 then
+ report_injections("%w%s pair: lx %p, rx %p, dy %p",margin,symbol,leftkern,rightkern,yoffset)
+ elseif leftkern ~= 0 then
+ report_injections("%w%s kern: dx %p",margin,symbol,leftkern)
+ end
+ if markx ~= 0 or marky ~= 0 or markbase ~= 0 then
+ report_injections("%w%s mark: dx %p, dy %p, dir %s, base %s",margin,symbol,markx,marky,markdir,markbase ~= 0 and "yes" or "no")
+ end
+ if cursivex ~= 0 or cursivey ~= 0 then
+ report_injections("%w%s curs: dx %p, dy %p",margin,symbol,cursivex,cursivey)
+ end
+ if ligaindex ~= 0 then
+ report_injections("%w%s liga: index %i",margin,symbol,ligaindex)
+ end
+ end
+ end
+ end
+end
+
+local function showsub(n,what,where)
+ report_injections("begin subrun: %s",where)
+ for n in traverse_id(glyph_code,n) do
+ showchar(n,where)
+ show(n,what,where," ")
+ end
+ report_injections("end subrun")
+end
+
+local function trace(head,where)
+ report_injections("begin run %s: %s kerns, %s pairs, %s marks and %s cursives registered",
+ where or "",nofregisteredkerns,nofregisteredpairs,nofregisteredmarks,nofregisteredcursives)
+ local n = head
+ while n do
+ local id = getid(n)
+ if id == glyph_code then
+ showchar(n)
+ show(n,"injections",false," ")
+ show(n,"preinjections",false,"<")
+ show(n,"postinjections",false,">")
+ show(n,"replaceinjections",false,"=")
+ elseif id == disc_code then
+ local pre = getfield(n,"pre")
+ local post = getfield(n,"post")
+ local replace = getfield(n,"replace")
+ if pre then
+ showsub(pre,"preinjections","pre")
+ end
+ if post then
+ showsub(post,"postinjections","post")
+ end
+ if replace then
+ showsub(replace,"replaceinjections","replace")
+ end
+ end
+ n = getnext(n)
+ end
+ report_injections("end run")
+end
+
+local function show_result(head)
+ local current = head
+ local skipping = false
+ while current do
+ local id = getid(current)
+ if id == glyph_code then
+ report_injections("char: %C, width %p, xoffset %p, yoffset %p",
+ getchar(current),getfield(current,"width"),getfield(current,"xoffset"),getfield(current,"yoffset"))
+ skipping = false
+ elseif id == kern_code then
+ report_injections("kern: %p",getfield(current,"kern"))
+ skipping = false
+ elseif not skipping then
+ report_injections()
+ skipping = true
+ end
+ current = getnext(current)
+ end
+end
+
+-- we could also check for marks here but maybe not all are registered (needs checking)
+
+local function collect_glyphs_1(head)
+ local glyphs, nofglyphs = { }, 0
+ local marks, nofmarks = { }, 0
+ local nf, tm = nil, nil
+ for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts
+ if getsubtype(n) < 256 then
+ local f = getfont(n)
+ if f ~= nf then
+ nf = f
+ tm = fontdata[nf].resources.marks -- other hash in ctx
+ end
+ if tm and tm[getchar(n)] then
+ nofmarks = nofmarks + 1
+ marks[nofmarks] = n
+ else
+ nofglyphs = nofglyphs + 1
+ glyphs[nofglyphs] = n
+ end
+ -- yoffsets can influence curs steps
+ local p = rawget(properties,n)
+ if p then
+ local i = rawget(p,"injections")
+ if i then
+ local yoffset = i.yoffset
+ if yoffset and yoffset ~= 0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ end
+ end
+ end
+ end
+ return glyphs, nofglyphs, marks, nofmarks
+end
+
+local function collect_glyphs_2(head)
+ local glyphs, nofglyphs = { }, 0
+ local marks, nofmarks = { }, 0
+ local nf, tm = nil, nil
+ for n in traverse_id(glyph_code,head) do
+ if getsubtype(n) < 256 then
+ local f = getfont(n)
+ if f ~= nf then
+ nf = f
+ tm = fontdata[nf].resources.marks -- other hash in ctx
+ end
+ if tm and tm[getchar(n)] then
+ nofmarks = nofmarks + 1
+ marks[nofmarks] = n
+ else
+ nofglyphs = nofglyphs + 1
+ glyphs[nofglyphs] = n
+ end
+ end
+ end
+ return glyphs, nofglyphs, marks, nofmarks
+end
+
+local function inject_marks(marks,nofmarks)
+ for i=1,nofmarks do
+ local n = marks[i]
+ local pn = rawget(properties,n)
+ if pn then
+ pn = rawget(pn,"injections")
+ if pn then
+ local p = pn.markbasenode
+ if p then
+ local px = getfield(p,"xoffset")
+ local ox = 0
+ local rightkern = nil
+ local pp = rawget(properties,p)
+ if pp then
+ pp = rawget(pp,"injections")
+ if pp then
+ rightkern = pp.rightkern
+ end
+ end
+ if rightkern then -- x and w ~= 0
+ if pn.markdir < 0 then
+ -- kern(w-x) glyph(p) kern(x) mark(n)
+ ox = px - pn.markx - rightkern
+ -- report_injections("r2l case 1: %p",ox)
+ else
+ -- kern(x) glyph(p) kern(w-x) mark(n)
+ -- ox = px - getfield(p,"width") + pn.markx - pp.leftkern
+ local leftkern = pp.leftkern
+ if leftkern then
+ ox = px - pn.markx
+ else
+ ox = px - pn.markx - leftkern
+ end
+-- report_injections("l2r case 1: %p",ox)
+ end
+ else
+ -- we need to deal with fonts that have marks with width
+ -- if pn.markdir < 0 then
+ -- ox = px - pn.markx
+ -- -- report_injections("r2l case 3: %p",ox)
+ -- else
+ -- -- ox = px - getfield(p,"width") + pn.markx
+ ox = px - pn.markx
+ -- report_injections("l2r case 3: %p",ox)
+ -- end
+ local wn = getfield(n,"width") -- in arial marks have widths
+ if wn ~= 0 then
+ -- bad: we should center
+ -- insert_node_before(head,n,newkern(-wn/2))
+ -- insert_node_after(head,n,newkern(-wn/2))
+ pn.leftkern = -wn/2
+ pn.rightkern = -wn/2
+ -- wx[n] = { 0, -wn/2, 0, -wn }
+ end
+ -- so far
+ end
+ setfield(n,"xoffset",ox)
+ --
+ local py = getfield(p,"yoffset")
+ local oy = 0
+ if marks[p] then
+ oy = py + pn.marky
+ else
+ oy = getfield(n,"yoffset") + py + pn.marky
+ end
+ setfield(n,"yoffset",oy)
+ else
+ -- normally this can't happen (only when in trace mode which is a special case anyway)
+ -- report_injections("missing mark anchor %i",pn.markbase or 0)
+ end
+ end
+ end
+ end
+end
+
+local function inject_cursives(glyphs,nofglyphs)
+ local cursiveanchor, lastanchor = nil, nil
+ local minc, maxc, last = 0, 0, nil
+ for i=1,nofglyphs do
+ local n = glyphs[i]
+ local pn = rawget(properties,n)
+ if pn then
+ pn = rawget(pn,"injections")
+ end
+ if pn then
+ local cursivex = pn.cursivex
+ if cursivex then
+ if cursiveanchor then
+ if cursivex ~= 0 then
+ pn.leftkern = (pn.leftkern or 0) + cursivex
+ end
+ if lastanchor then
+ if maxc == 0 then
+ minc = lastanchor
+ end
+ maxc = lastanchor
+ properties[cursiveanchor].cursivedy = pn.cursivey
+ end
+ last = n
+ else
+ maxc = 0
+ end
+ elseif maxc > 0 then
+ local ny = getfield(n,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti = glyphs[i]
+ ny = ny + properties[ti].cursivedy
+ setfield(ti,"yoffset",ny) -- why not add ?
+ end
+ maxc = 0
+ end
+ if pn.cursiveanchor then
+ cursiveanchor = n
+ lastanchor = i
+ else
+ cursiveanchor = nil
+ lastanchor = nil
+ if maxc > 0 then
+ local ny = getfield(n,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti = glyphs[i]
+ ny = ny + properties[ti].cursivedy
+ setfield(ti,"yoffset",ny) -- why not add ?
+ end
+ maxc = 0
+ end
+ end
+ elseif maxc > 0 then
+ local ny = getfield(n,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti = glyphs[i]
+ ny = ny + properties[ti].cursivedy
+ setfield(ti,"yoffset",getfield(ti,"yoffset") + ny) -- ?
+ end
+ maxc = 0
+ cursiveanchor = nil
+ lastanchor = nil
+ end
+ -- if maxc > 0 and not cursiveanchor then
+ -- local ny = getfield(n,"yoffset")
+ -- for i=maxc,minc,-1 do
+ -- local ti = glyphs[i]
+ -- ny = ny + properties[ti].cursivedy
+ -- setfield(ti,"yoffset",ny) -- why not add ?
+ -- end
+ -- maxc = 0
+ -- end
+ end
+ if last and maxc > 0 then
+ local ny = getfield(last,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti = glyphs[i]
+ ny = ny + properties[ti].cursivedy
+ setfield(ti,"yoffset",ny) -- why not add ?
+ end
+ end
+end
+
+local function inject_kerns(head,list,length)
+ -- todo: pre/post/replace
+ for i=1,length do
+ local n = list[i]
+ local pn = rawget(properties,n)
+ if pn then
+ local i = rawget(pn,"injections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ insert_node_before(head,n,newkern(leftkern)) -- type 0/2
+ end
+ local rightkern = i.rightkern
+ if rightkern and rightkern ~= 0 then
+ insert_node_after(head,n,newkern(rightkern)) -- type 0/2
+ end
+ end
+ end
+ end
+end
+
+local function inject_everything(head,where)
+ head = tonut(head)
+ if trace_injections then
+ trace(head,"everything")
+ end
+ local glyphs, nofglyphs, marks, nofmarks
+ if nofregisteredpairs > 0 then
+ glyphs, nofglyphs, marks, nofmarks = collect_glyphs_1(head)
+ else
+ glyphs, nofglyphs, marks, nofmarks = collect_glyphs_2(head)
+ end
+ if nofglyphs > 0 then
+ if nofregisteredcursives > 0 then
+ inject_cursives(glyphs,nofglyphs)
+ end
+ if nofregisteredmarks > 0 then -- and nofmarks > 0
+ inject_marks(marks,nofmarks)
+ end
+ inject_kerns(head,glyphs,nofglyphs)
+ end
+ if nofmarks > 0 then
+ inject_kerns(head,marks,nofmarks)
+ end
+ if keepregisteredcounts then
+ keepregisteredcounts = false
+ else
+ nofregisteredkerns = 0
+ nofregisteredpairs = 0
+ nofregisteredmarks = 0
+ nofregisteredcursives = 0
+ end
+ return tonode(head), true
+end
+
+local function inject_kerns_only(head,where)
+ head = tonut(head)
+ if trace_injections then
+ trace(head,"kerns")
+ end
+ local n = head
+ local p = nil
+ while n do
+ local id = getid(n)
+ if id == glyph_code then
+ if getsubtype(n) < 256 then
+ local pn = rawget(properties,n)
+ if pn then
+ if p then
+ local d = getfield(p,"post")
+ if d then
+ local i = rawget(pn,"postinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ local t = find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ end
+ end
+ end
+ local d = getfield(p,"replace")
+ if d then
+ local i = rawget(pn,"replaceinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ local t = find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ end
+ end
+ else
+ local i = rawget(pn,"injections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ setfield(p,"replace",newkern(leftkern))
+ end
+ end
+ end
+ else
+ local i = rawget(pn,"injections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ head = insert_node_before(head,n,newkern(leftkern))
+ end
+ end
+ end
+ end
+ else
+ break
+ end
+ p = nil
+ elseif id == disc_code then
+ local d = getfield(n,"pre")
+ if d then
+ local h = d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ local pn = rawget(properties,n)
+ if pn then
+ local i = rawget(pn,"preinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ h = insert_node_before(h,n,newkern(leftkern))
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h ~= d then
+ setfield(n,"pre",h)
+ end
+ end
+ local d = getfield(n,"post")
+ if d then
+ local h = d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ local pn = rawget(properties,n)
+ if pn then
+ local i = rawget(pn,"postinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ h = insert_node_before(h,n,newkern(leftkern))
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h ~= d then
+ setfield(n,"post",h)
+ end
+ end
+ local d = getfield(n,"replace")
+ if d then
+ local h = d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ local pn = rawget(properties,n) -- why can it be empty { }
+ if pn then
+ local i = rawget(pn,"replaceinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ h = insert_node_before(h,n,newkern(leftkern))
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h ~= d then
+ setfield(n,"replace",h)
+ end
+ end
+ p = n
+ else
+ p = nil
+ end
+ n = getnext(n)
+ end
+ --
+ if keepregisteredcounts then
+ keepregisteredcounts = false
+ else
+ nofregisteredkerns = 0
+ end
+ return tonode(head), true
+end
+
+local function inject_pairs_only(head,where)
+ head = tonut(head)
+ if trace_injections then
+ trace(head,"pairs")
+ end
+ --
+ local n = head
+ local p = nil
+ while n do
+ local id = getid(n)
+ if id == glyph_code then
+ if getsubtype(n) < 256 then
+ local pn = rawget(properties,n)
+ if pn then
+ if p then
+ local d = getfield(p,"post")
+ if d then
+ local i = rawget(pn,"postinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ local t = find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ end
+ -- local rightkern = i.rightkern
+ -- if rightkern and rightkern ~= 0 then
+ -- insert_node_after(head,n,newkern(rightkern))
+ -- n = getnext(n) -- to be checked
+ -- end
+ end
+ end
+ local d = getfield(p,"replace")
+ if d then
+ local i = rawget(pn,"replaceinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ local t = find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ end
+ -- local rightkern = i.rightkern
+ -- if rightkern and rightkern ~= 0 then
+ -- insert_node_after(head,n,newkern(rightkern))
+ -- n = getnext(n) -- to be checked
+ -- end
+ end
+ else
+ local i = rawget(pn,"injections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ setfield(p,"replace",newkern(leftkern))
+ end
+ -- local rightkern = i.rightkern
+ -- if rightkern and rightkern ~= 0 then
+ -- insert_node_after(head,n,newkern(rightkern))
+ -- n = getnext(n) -- to be checked
+ -- end
+ end
+ end
+ else
+ -- this is the most common case
+ local i = rawget(pn,"injections")
+ if i then
+ local yoffset = i.yoffset
+ if yoffset and yoffset ~= 0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ insert_node_before(head,n,newkern(leftkern))
+ end
+ local rightkern = i.rightkern
+ if rightkern and rightkern ~= 0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n = getnext(n) -- to be checked
+ end
+ end
+ end
+ end
+ else
+ break
+ end
+ p = nil
+ elseif id == disc_code then
+ local d = getfield(n,"pre")
+ if d then
+ local h = d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ local p = rawget(properties,n)
+ if p then
+ local i = rawget(p,"preinjections")
+ if i then
+ local yoffset = i.yoffset
+ if yoffset and yoffset ~= 0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ local leftkern = i.leftkern
+ if leftkern ~= 0 then
+ h = insert_node_before(h,n,newkern(leftkern))
+ end
+ local rightkern = i.rightkern
+ if rightkern and rightkern ~= 0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n = getnext(n) -- to be checked
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h ~= d then
+ setfield(n,"pre",h)
+ end
+ end
+ local d = getfield(n,"post")
+ if d then
+ local h = d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ local p = rawget(properties,n)
+ if p then
+ local i = rawget(p,"postinjections")
+ if i then
+ local yoffset = i.yoffset
+ if yoffset and yoffset ~= 0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ h = insert_node_before(h,n,newkern(leftkern))
+ end
+ local rightkern = i.rightkern
+ if rightkern and rightkern ~= 0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n = getnext(n) -- to be checked
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h ~= d then
+ setfield(n,"post",h)
+ end
+ end
+ local d = getfield(n,"replace")
+ if d then
+ local h = d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ local p = rawget(properties,n)
+ if p then
+ local i = rawget(p,"replaceinjections")
+ if i then
+ local yoffset = i.yoffset
+ if yoffset and yoffset ~= 0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ h = insert_node_before(h,n,newkern(leftkern))
+ end
+ local rightkern = i.rightkern
+ if rightkern and rightkern ~= 0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n = getnext(n) -- to be checked
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h ~= d then
+ setfield(n,"replace",h)
+ end
+ end
+ p = n
+ else
+ p = nil
+ end
+ n = getnext(n)
+ end
+ --
+ if keepregisteredcounts then
+ keepregisteredcounts = false
+ else
+ nofregisteredpairs = 0
+ nofregisteredkerns = 0
+ end
+ return tonode(head), true
+end
+
+function injections.handler(head,where) -- optimize for n=1 ?
+ if nofregisteredmarks > 0 or nofregisteredcursives > 0 then
+ return inject_everything(head,where)
+ elseif nofregisteredpairs > 0 then
+ return inject_pairs_only(head,where)
+ elseif nofregisteredkerns > 0 then
+ return inject_kerns_only(head,where)
+ else
+ return head, false
+ end
+end
diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua
index 24e49308c..81883b8b8 100644
--- a/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 01/03/14 00:40:35
+-- merge date : 05/15/15 23:03:46
do -- begin closure to overcome local limits and interference
@@ -82,6 +82,16 @@ function optionalrequire(...)
return result
end
end
+if lua then
+ lua.mask=load([[τεχ = 1]]) and "utf" or "ascii"
+end
+local flush=io.flush
+if flush then
+ local execute=os.execute if execute then function os.execute(...) flush() return execute(...) end end
+ local exec=os.exec if exec then function os.exec (...) flush() return exec (...) end end
+ local spawn=os.spawn if spawn then function os.spawn (...) flush() return spawn (...) end end
+ local popen=io.popen if popen then function io.popen (...) flush() return popen (...) end end
+end
end -- closure
@@ -101,7 +111,9 @@ local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.forma
local floor=math.floor
local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt
local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print
-setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+if setinspector then
+ setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+end
lpeg.patterns=lpeg.patterns or {}
local patterns=lpeg.patterns
local anything=P(1)
@@ -120,7 +132,7 @@ local uppercase=R("AZ")
local underscore=P("_")
local hexdigit=digit+lowercase+uppercase
local cr,lf,crlf=P("\r"),P("\n"),P("\r\n")
-local newline=crlf+S("\r\n")
+local newline=P("\r")*(P("\n")+P(true))+P("\n")
local escaped=P("\\")*anything
local squote=P("'")
local dquote=P('"')
@@ -142,8 +154,10 @@ patterns.utfbom_32_le=utfbom_32_le
patterns.utfbom_16_be=utfbom_16_be
patterns.utfbom_16_le=utfbom_16_le
patterns.utfbom_8=utfbom_8
-patterns.utf_16_be_nl=P("\000\r\000\n")+P("\000\r")+P("\000\n")
-patterns.utf_16_le_nl=P("\r\000\n\000")+P("\r\000")+P("\n\000")
+patterns.utf_16_be_nl=P("\000\r\000\n")+P("\000\r")+P("\000\n")
+patterns.utf_16_le_nl=P("\r\000\n\000")+P("\r\000")+P("\n\000")
+patterns.utf_32_be_nl=P("\000\000\000\r\000\000\000\n")+P("\000\000\000\r")+P("\000\000\000\n")
+patterns.utf_32_le_nl=P("\r\000\000\000\n\000\000\000")+P("\r\000\000\000")+P("\n\000\000\000")
patterns.utf8one=R("\000\127")
patterns.utf8two=R("\194\223")*utf8next
patterns.utf8three=R("\224\239")*utf8next*utf8next
@@ -170,10 +184,24 @@ patterns.spacer=spacer
patterns.whitespace=whitespace
patterns.nonspacer=nonspacer
patterns.nonwhitespace=nonwhitespace
-local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
+local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
+local fullstripper=whitespace^0*C((whitespace^0*nonwhitespace^1)^0)
local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0))
+local b_collapser=Cs(whitespace^0/""*(nonwhitespace^1+whitespace^1/" ")^0)
+local e_collapser=Cs((whitespace^1*P(-1)/""+nonwhitespace^1+whitespace^1/" ")^0)
+local m_collapser=Cs((nonwhitespace^1+whitespace^1/" ")^0)
+local b_stripper=Cs(spacer^0/""*(nonspacer^1+spacer^1/" ")^0)
+local e_stripper=Cs((spacer^1*P(-1)/""+nonspacer^1+spacer^1/" ")^0)
+local m_stripper=Cs((nonspacer^1+spacer^1/" ")^0)
patterns.stripper=stripper
+patterns.fullstripper=fullstripper
patterns.collapser=collapser
+patterns.b_collapser=b_collapser
+patterns.m_collapser=m_collapser
+patterns.e_collapser=e_collapser
+patterns.b_stripper=b_stripper
+patterns.m_stripper=m_stripper
+patterns.e_stripper=e_stripper
patterns.lowercase=lowercase
patterns.uppercase=uppercase
patterns.letter=patterns.lowercase+patterns.uppercase
@@ -210,9 +238,12 @@ patterns.integer=sign^-1*digit^1
patterns.unsigned=digit^0*period*digit^1
patterns.float=sign^-1*patterns.unsigned
patterns.cunsigned=digit^0*comma*digit^1
+patterns.cpunsigned=digit^0*(period+comma)*digit^1
patterns.cfloat=sign^-1*patterns.cunsigned
+patterns.cpfloat=sign^-1*patterns.cpunsigned
patterns.number=patterns.float+patterns.integer
patterns.cnumber=patterns.cfloat+patterns.integer
+patterns.cpnumber=patterns.cpfloat+patterns.integer
patterns.oct=zero*octdigit^1
patterns.octal=patterns.oct
patterns.HEX=zero*P("X")*(digit+uppercase)^1
@@ -395,7 +426,7 @@ function lpeg.replacer(one,two,makefunction,isutf)
return pattern
end
end
-function lpeg.finder(lst,makefunction)
+function lpeg.finder(lst,makefunction,isutf)
local pattern
if type(lst)=="table" then
pattern=P(false)
@@ -411,7 +442,11 @@ function lpeg.finder(lst,makefunction)
else
pattern=P(lst)
end
- pattern=(1-pattern)^0*pattern
+ if isutf then
+ pattern=((utf8char or 1)-pattern)^0*pattern
+ else
+ pattern=(1-pattern)^0*pattern
+ end
if makefunction then
return function(str)
return lpegmatch(pattern,str)
@@ -625,37 +660,139 @@ function lpeg.append(list,pp,delayed,checked)
end
return p
end
+local p_false=P(false)
+local p_true=P(true)
local function make(t)
- local p
+ local function making(t)
+ local p=p_false
+ local keys=sortedkeys(t)
+ for i=1,#keys do
+ local k=keys[i]
+ if k~="" then
+ local v=t[k]
+ if v==true then
+ p=p+P(k)*p_true
+ elseif v==false then
+ else
+ p=p+P(k)*making(v)
+ end
+ end
+ end
+ if t[""] then
+ p=p+p_true
+ end
+ return p
+ end
+ local p=p_false
local keys=sortedkeys(t)
for i=1,#keys do
local k=keys[i]
- local v=t[k]
- if not p then
- if next(v) then
- p=P(k)*make(v)
+ if k~="" then
+ local v=t[k]
+ if v==true then
+ p=p+P(k)*p_true
+ elseif v==false then
else
- p=P(k)
+ p=p+P(k)*making(v)
end
- else
- if next(v) then
- p=p+P(k)*make(v)
+ end
+ end
+ return p
+end
+local function collapse(t,x)
+ if type(t)~="table" then
+ return t,x
+ else
+ local n=next(t)
+ if n==nil then
+ return t,x
+ elseif next(t,n)==nil then
+ local k=n
+ local v=t[k]
+ if type(v)=="table" then
+ return collapse(v,x..k)
else
- p=p+P(k)
+ return v,x..k
end
+ else
+ local tt={}
+ for k,v in next,t do
+ local vv,kk=collapse(v,k)
+ tt[kk]=vv
+ end
+ return tt,x
end
end
- return p
end
function lpeg.utfchartabletopattern(list)
local tree={}
- for i=1,#list do
- local t=tree
- for c in gmatch(list[i],".") do
- if not t[c] then
- t[c]={}
+ local n=#list
+ if n==0 then
+ for s in next,list do
+ local t=tree
+ local p,pk
+ for c in gmatch(s,".") do
+ if t==true then
+ t={ [c]=true,[""]=true }
+ p[pk]=t
+ p=t
+ t=false
+ elseif t==false then
+ t={ [c]=false }
+ p[pk]=t
+ p=t
+ t=false
+ else
+ local tc=t[c]
+ if not tc then
+ tc=false
+ t[c]=false
+ end
+ p=t
+ t=tc
+ end
+ pk=c
+ end
+ if t==false then
+ p[pk]=true
+ elseif t==true then
+ else
+ t[""]=true
+ end
+ end
+ else
+ for i=1,n do
+ local s=list[i]
+ local t=tree
+ local p,pk
+ for c in gmatch(s,".") do
+ if t==true then
+ t={ [c]=true,[""]=true }
+ p[pk]=t
+ p=t
+ t=false
+ elseif t==false then
+ t={ [c]=false }
+ p[pk]=t
+ p=t
+ t=false
+ else
+ local tc=t[c]
+ if not tc then
+ tc=false
+ t[c]=false
+ end
+ p=t
+ t=tc
+ end
+ pk=c
+ end
+ if t==false then
+ p[pk]=true
+ elseif t==true then
+ else
+ t[""]=true
end
- t=t[c]
end
end
return make(tree)
@@ -695,6 +832,65 @@ local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"")
local number=digit^1*(case_1+case_2)
local stripper=Cs((number+1)^0)
lpeg.patterns.stripzeros=stripper
+local byte_to_HEX={}
+local byte_to_hex={}
+local byte_to_dec={}
+local hex_to_byte={}
+for i=0,255 do
+ local H=format("%02X",i)
+ local h=format("%02x",i)
+ local d=format("%03i",i)
+ local c=char(i)
+ byte_to_HEX[c]=H
+ byte_to_hex[c]=h
+ byte_to_dec[c]=d
+ hex_to_byte[h]=c
+ hex_to_byte[H]=c
+end
+local hextobyte=P(2)/hex_to_byte
+local bytetoHEX=P(1)/byte_to_HEX
+local bytetohex=P(1)/byte_to_hex
+local bytetodec=P(1)/byte_to_dec
+local hextobytes=Cs(hextobyte^0)
+local bytestoHEX=Cs(bytetoHEX^0)
+local bytestohex=Cs(bytetohex^0)
+local bytestodec=Cs(bytetodec^0)
+patterns.hextobyte=hextobyte
+patterns.bytetoHEX=bytetoHEX
+patterns.bytetohex=bytetohex
+patterns.bytetodec=bytetodec
+patterns.hextobytes=hextobytes
+patterns.bytestoHEX=bytestoHEX
+patterns.bytestohex=bytestohex
+patterns.bytestodec=bytestodec
+function string.toHEX(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(bytestoHEX,s)
+ end
+end
+function string.tohex(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(bytestohex,s)
+ end
+end
+function string.todec(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(bytestodec,s)
+ end
+end
+function string.tobytes(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(hextobytes,s)
+ end
+end
end -- closure
@@ -748,11 +944,15 @@ function string.limit(str,n,sentinel)
end
end
local stripper=patterns.stripper
+local fullstripper=patterns.fullstripper
local collapser=patterns.collapser
local longtostring=patterns.longtostring
function string.strip(str)
return lpegmatch(stripper,str) or ""
end
+function string.fullstrip(str)
+ return lpegmatch(fullstripper,str) or ""
+end
function string.collapsespaces(str)
return lpegmatch(collapser,str) or ""
end
@@ -841,7 +1041,7 @@ end
function table.keys(t)
if t then
local keys,k={},0
- for key,_ in next,t do
+ for key in next,t do
k=k+1
keys[k]=key
end
@@ -851,32 +1051,52 @@ function table.keys(t)
end
end
local function compare(a,b)
- local ta,tb=type(a),type(b)
- if ta==tb then
- return a<b
- else
- return tostring(a)<tostring(b)
+ local ta=type(a)
+ if ta=="number" then
+ local tb=type(b)
+ if ta==tb then
+ return a<b
+ elseif tb=="string" then
+ return tostring(a)<b
+ end
+ elseif ta=="string" then
+ local tb=type(b)
+ if ta==tb then
+ return a<b
+ else
+ return a<tostring(b)
+ end
end
+ return tostring(a)<tostring(b)
end
local function sortedkeys(tab)
if tab then
local srt,category,s={},0,0
- for key,_ in next,tab do
+ for key in next,tab do
s=s+1
srt[s]=key
if category==3 then
+ elseif category==1 then
+ if type(key)~="string" then
+ category=3
+ end
+ elseif category==2 then
+ if type(key)~="number" then
+ category=3
+ end
else
local tkey=type(key)
if tkey=="string" then
- category=(category==2 and 3) or 1
+ category=1
elseif tkey=="number" then
- category=(category==1 and 3) or 2
+ category=2
else
category=3
end
end
end
- if category==0 or category==3 then
+ if s<2 then
+ elseif category==3 then
sort(srt,compare)
else
sort(srt)
@@ -886,16 +1106,52 @@ local function sortedkeys(tab)
return {}
end
end
+local function sortedhashonly(tab)
+ if tab then
+ local srt,s={},0
+ for key in next,tab do
+ if type(key)=="string" then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ if s>1 then
+ sort(srt)
+ end
+ return srt
+ else
+ return {}
+ end
+end
+local function sortedindexonly(tab)
+ if tab then
+ local srt,s={},0
+ for key in next,tab do
+ if type(key)=="number" then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ if s>1 then
+ sort(srt)
+ end
+ return srt
+ else
+ return {}
+ end
+end
local function sortedhashkeys(tab,cmp)
if tab then
local srt,s={},0
- for key,_ in next,tab do
+ for key in next,tab do
if key then
s=s+1
srt[s]=key
end
end
- sort(srt,cmp)
+ if s>1 then
+ sort(srt,cmp)
+ end
return srt
else
return {}
@@ -904,13 +1160,15 @@ end
function table.allkeys(t)
local keys={}
for k,v in next,t do
- for k,v in next,v do
+ for k in next,v do
keys[k]=true
end
end
return sortedkeys(keys)
end
table.sortedkeys=sortedkeys
+table.sortedhashonly=sortedhashonly
+table.sortedindexonly=sortedindexonly
table.sortedhashkeys=sortedhashkeys
local function nothing() end
local function sortedhash(t,cmp)
@@ -921,19 +1179,21 @@ local function sortedhash(t,cmp)
else
s=sortedkeys(t)
end
- local n=0
local m=#s
- local function kv(s)
- if n<m then
- n=n+1
- local k=s[n]
- return k,t[k]
+ if m==1 then
+ return next,t
+ elseif m>0 then
+ local n=0
+ return function()
+ if n<m then
+ n=n+1
+ local k=s[n]
+ return k,t[k]
+ end
end
end
- return kv,s
- else
- return nothing
end
+ return nothing
end
table.sortedhash=sortedhash
table.sortedpairs=sortedhash
@@ -1075,39 +1335,36 @@ function table.fromhash(t)
end
return hsh
end
-local noquotes,hexify,handle,reduce,compact,inline,functions
+local noquotes,hexify,handle,compact,inline,functions
local reserved=table.tohash {
'and','break','do','else','elseif','end','false','for','function','if',
'in','local','nil','not','or','repeat','return','then','true','until','while',
'NaN','goto',
}
local function simple_table(t)
- if #t>0 then
+ local nt=#t
+ if nt>0 then
local n=0
for _,v in next,t do
n=n+1
end
- if n==#t then
- local tt,nt={},0
- for i=1,#t do
+ if n==nt then
+ local tt={}
+ for i=1,nt do
local v=t[i]
local tv=type(v)
if tv=="number" then
- nt=nt+1
if hexify then
- tt[nt]=format("0x%04X",v)
+ tt[i]=format("0x%X",v)
else
- tt[nt]=tostring(v)
+ tt[i]=tostring(v)
end
elseif tv=="string" then
- nt=nt+1
- tt[nt]=format("%q",v)
+ tt[i]=format("%q",v)
elseif tv=="boolean" then
- nt=nt+1
- tt[nt]=v and "true" or "false"
+ tt[i]=v and "true" or "false"
else
- tt=nil
- break
+ return nil
end
end
return tt
@@ -1126,7 +1383,7 @@ local function do_serialize(root,name,depth,level,indexed)
local tn=type(name)
if tn=="number" then
if hexify then
- handle(format("%s[0x%04X]={",depth,name))
+ handle(format("%s[0x%X]={",depth,name))
else
handle(format("%s[%s]={",depth,name))
end
@@ -1143,7 +1400,7 @@ local function do_serialize(root,name,depth,level,indexed)
end
end
end
- if root and next(root) then
+ if root and next(root)~=nil then
local first,last=nil,0
if compact then
last=#root
@@ -1161,22 +1418,19 @@ local function do_serialize(root,name,depth,level,indexed)
for i=1,#sk do
local k=sk[i]
local v=root[k]
- local tv,tk=type(v),type(k)
+ local tv=type(v)
+ local tk=type(k)
if compact and first and tk=="number" and k>=first and k<=last then
if tv=="number" then
if hexify then
- handle(format("%s 0x%04X,",depth,v))
+ handle(format("%s 0x%X,",depth,v))
else
handle(format("%s %s,",depth,v))
end
elseif tv=="string" then
- if reduce and tonumber(v) then
- handle(format("%s %s,",depth,v))
- else
- handle(format("%s %q,",depth,v))
- end
+ handle(format("%s %q,",depth,v))
elseif tv=="table" then
- if not next(v) then
+ if next(v)==nil then
handle(format("%s {},",depth))
elseif inline then
local st=simple_table(v)
@@ -1206,64 +1460,48 @@ local function do_serialize(root,name,depth,level,indexed)
elseif tv=="number" then
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
+ handle(format("%s [0x%X]=0x%X,",depth,k,v))
else
handle(format("%s [%s]=%s,",depth,k,v))
end
elseif tk=="boolean" then
if hexify then
- handle(format("%s [%s]=0x%04X,",depth,k and "true" or "false",v))
+ handle(format("%s [%s]=0x%X,",depth,k and "true" or "false",v))
else
handle(format("%s [%s]=%s,",depth,k and "true" or "false",v))
end
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
if hexify then
- handle(format("%s %s=0x%04X,",depth,k,v))
+ handle(format("%s %s=0x%X,",depth,k,v))
else
handle(format("%s %s=%s,",depth,k,v))
end
else
if hexify then
- handle(format("%s [%q]=0x%04X,",depth,k,v))
+ handle(format("%s [%q]=0x%X,",depth,k,v))
else
handle(format("%s [%q]=%s,",depth,k,v))
end
end
elseif tv=="string" then
- if reduce and tonumber(v) then
- if tk=="number" then
- if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,v))
- else
- handle(format("%s [%s]=%s,",depth,k,v))
- end
- elseif tk=="boolean" then
- handle(format("%s [%s]=%s,",depth,k and "true" or "false",v))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%s,",depth,k,v))
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]=%q,",depth,k,v))
else
- handle(format("%s [%q]=%s,",depth,k,v))
+ handle(format("%s [%s]=%q,",depth,k,v))
end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,k and "true" or "false",v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,v))
else
- if tk=="number" then
- if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,v))
- else
- handle(format("%s [%s]=%q,",depth,k,v))
- end
- elseif tk=="boolean" then
- handle(format("%s [%s]=%q,",depth,k and "true" or "false",v))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%q,",depth,k,v))
- else
- handle(format("%s [%q]=%q,",depth,k,v))
- end
+ handle(format("%s [%q]=%q,",depth,k,v))
end
elseif tv=="table" then
- if not next(v) then
+ if next(v)==nil then
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]={},",depth,k))
+ handle(format("%s [0x%X]={},",depth,k))
else
handle(format("%s [%s]={},",depth,k))
end
@@ -1279,7 +1517,7 @@ local function do_serialize(root,name,depth,level,indexed)
if st then
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
+ handle(format("%s [0x%X]={ %s },",depth,k,concat(st,", ")))
else
handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
end
@@ -1299,7 +1537,7 @@ local function do_serialize(root,name,depth,level,indexed)
elseif tv=="boolean" then
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,v and "true" or "false"))
+ handle(format("%s [0x%X]=%s,",depth,k,v and "true" or "false"))
else
handle(format("%s [%s]=%s,",depth,k,v and "true" or "false"))
end
@@ -1315,7 +1553,7 @@ local function do_serialize(root,name,depth,level,indexed)
local f=getinfo(v).what=="C" and dump(dummy) or dump(v)
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]=load(%q),",depth,k,f))
+ handle(format("%s [0x%X]=load(%q),",depth,k,f))
else
handle(format("%s [%s]=load(%q),",depth,k,f))
end
@@ -1330,7 +1568,7 @@ local function do_serialize(root,name,depth,level,indexed)
else
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
+ handle(format("%s [0x%X]=%q,",depth,k,tostring(v)))
else
handle(format("%s [%s]=%q,",depth,k,tostring(v)))
end
@@ -1354,7 +1592,6 @@ local function serialize(_handle,root,name,specification)
noquotes=specification.noquotes
hexify=specification.hexify
handle=_handle or specification.handle or print
- reduce=specification.reduce or false
functions=specification.functions
compact=specification.compact
inline=specification.inline and compact
@@ -1371,7 +1608,6 @@ local function serialize(_handle,root,name,specification)
noquotes=false
hexify=false
handle=_handle or print
- reduce=false
compact=true
inline=true
functions=true
@@ -1384,7 +1620,7 @@ local function serialize(_handle,root,name,specification)
end
elseif tname=="number" then
if hexify then
- handle(format("[0x%04X]={",name))
+ handle(format("[0x%X]={",name))
else
handle("["..name.."]={")
end
@@ -1402,7 +1638,7 @@ local function serialize(_handle,root,name,specification)
local dummy=root._w_h_a_t_e_v_e_r_
root._w_h_a_t_e_v_e_r_=nil
end
- if next(root) then
+ if next(root)~=nil then
do_serialize(root,name,"",0)
end
end
@@ -1531,14 +1767,25 @@ local function identical(a,b)
end
table.identical=identical
table.are_equal=are_equal
-function table.compact(t)
- if t then
- for k,v in next,t do
- if not next(v) then
- t[k]=nil
+local function sparse(old,nest,keeptables)
+ local new={}
+ for k,v in next,old do
+ if not (v=="" or v==false) then
+ if nest and type(v)=="table" then
+ v=sparse(v,nest)
+ if keeptables or next(v)~=nil then
+ new[k]=v
+ end
+ else
+ new[k]=v
end
end
end
+ return new
+end
+table.sparse=sparse
+function table.compact(t)
+ return sparse(t,true,true)
end
function table.contains(t,v)
if t then
@@ -1636,15 +1883,17 @@ function table.print(t,...)
serialize(print,t,...)
end
end
-setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
+if setinspector then
+ setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
+end
function table.sub(t,i,j)
return { unpack(t,i,j) }
end
function table.is_empty(t)
- return not t or not next(t)
+ return not t or next(t)==nil
end
function table.has_one_entry(t)
- return t and not next(t,next(t))
+ return t and next(t,next(t))==nil
end
function table.loweredkeys(t)
local l={}
@@ -1689,6 +1938,44 @@ function table.values(t,s)
return {}
end
end
+function table.filtered(t,pattern,sort,cmp)
+ if t and type(pattern)=="string" then
+ if sort then
+ local s
+ if cmp then
+ s=sortedhashkeys(t,function(a,b) return cmp(t,a,b) end)
+ else
+ s=sortedkeys(t)
+ end
+ local n=0
+ local m=#s
+ local function kv(s)
+ while n<m do
+ n=n+1
+ local k=s[n]
+ if find(k,pattern) then
+ return k,t[k]
+ end
+ end
+ end
+ return kv,s
+ else
+ local n=next(t)
+ local function iterator()
+ while n~=nil do
+ local k=n
+ n=next(t,k)
+ if find(k,pattern) then
+ return k,t[k]
+ end
+ end
+ end
+ return iterator,t
+ end
+ else
+ return nothing
+ end
+end
end -- closure
@@ -1706,7 +1993,7 @@ local byte,find,gsub,format=string.byte,string.find,string.gsub,string.format
local concat=table.concat
local floor=math.floor
local type=type
-if string.find(os.getenv("PATH"),";") then
+if string.find(os.getenv("PATH"),";",1,true) then
io.fileseparator,io.pathseparator="\\",";"
else
io.fileseparator,io.pathseparator="/",":"
@@ -1999,8 +2286,6 @@ function io.readstring(f,n,m)
local str=gsub(f:read(n),"\000","")
return str
end
-if not io.i_limiter then function io.i_limiter() end end
-if not io.o_limiter then function io.o_limiter() end end
end -- closure
@@ -2018,41 +2303,28 @@ local file=file
if not lfs then
lfs=optionalrequire("lfs")
end
-if not lfs then
- lfs={
- getcurrentdir=function()
- return "."
- end,
- attributes=function()
- return nil
- end,
- isfile=function(name)
- local f=io.open(name,'rb')
- if f then
- f:close()
- return true
- end
- end,
- isdir=function(name)
- print("you need to load lfs")
- return false
- end
- }
-elseif not lfs.isfile then
- local attributes=lfs.attributes
- function lfs.isdir(name)
- return attributes(name,"mode")=="directory"
- end
- function lfs.isfile(name)
- return attributes(name,"mode")=="file"
- end
-end
local insert,concat=table.insert,table.concat
local match,find,gmatch=string.match,string.find,string.gmatch
local lpegmatch=lpeg.match
local getcurrentdir,attributes=lfs.currentdir,lfs.attributes
local checkedsplit=string.checkedsplit
local P,R,S,C,Cs,Cp,Cc,Ct=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Cs,lpeg.Cp,lpeg.Cc,lpeg.Ct
+local tricky=S("/\\")*P(-1)
+local attributes=lfs.attributes
+if sandbox then
+ sandbox.redefine(lfs.isfile,"lfs.isfile")
+ sandbox.redefine(lfs.isdir,"lfs.isdir")
+end
+function lfs.isdir(name)
+ if lpegmatch(tricky,name) then
+ return attributes(name,"mode")=="directory"
+ else
+ return attributes(name.."/.","mode")=="directory"
+ end
+end
+function lfs.isfile(name)
+ return attributes(name,"mode")=="file"
+end
local colon=P(":")
local period=P(".")
local periods=P("..")
@@ -2254,28 +2526,30 @@ local isroot=fwslash^1*-1
local hasroot=fwslash^1
local reslasher=lpeg.replacer(S("\\/"),"/")
local deslasher=lpeg.replacer(S("\\/")^1,"/")
-function file.join(...)
- local lst={... }
- local one=lst[1]
+function file.join(one,two,three,...)
+ if not two then
+ return one=="" and one or lpegmatch(stripper,one)
+ end
+ if one=="" then
+ return lpegmatch(stripper,three and concat({ two,three,... },"/") or two)
+ end
if lpegmatch(isnetwork,one) then
local one=lpegmatch(reslasher,one)
- local two=lpegmatch(deslasher,concat(lst,"/",2))
+ local two=lpegmatch(deslasher,three and concat({ two,three,... },"/") or two)
if lpegmatch(hasroot,two) then
return one..two
else
return one.."/"..two
end
elseif lpegmatch(isroot,one) then
- local two=lpegmatch(deslasher,concat(lst,"/",2))
+ local two=lpegmatch(deslasher,three and concat({ two,three,... },"/") or two)
if lpegmatch(hasroot,two) then
return two
else
return "/"..two
end
- elseif one=="" then
- return lpegmatch(stripper,concat(lst,"/",2))
else
- return lpegmatch(deslasher,concat(lst,"/"))
+ return lpegmatch(deslasher,concat({ one,two,three,... },"/"))
end
end
local drivespec=R("az","AZ")^1*colon
@@ -2444,11 +2718,11 @@ function string.booleanstring(str)
return str=="yes" or str=="on" or str=="t"
end
end
-function string.is_boolean(str,default)
+function string.is_boolean(str,default,strict)
if type(str)=="string" then
- if str=="true" or str=="yes" or str=="on" or str=="t" or str=="1" then
+ if str=="true" or str=="yes" or str=="on" or str=="t" or (not strict and str=="1") then
return true
- elseif str=="false" or str=="no" or str=="off" or str=="f" or str=="0" then
+ elseif str=="false" or str=="no" or str=="off" or str=="f" or (not strict and str=="0") then
return false
end
end
@@ -2467,6 +2741,9 @@ if not modules then modules={} end modules ['l-math']={
license="see context related readme files"
}
local floor,sin,cos,tan=math.floor,math.sin,math.cos,math.tan
+if not math.ceiling then
+ math.ceiling=math.ceil
+end
if not math.round then
function math.round(x) return floor(x+0.5) end
end
@@ -2508,25 +2785,43 @@ local unpack,concat=table.unpack,table.concat
local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc
local patterns,lpegmatch=lpeg.patterns,lpeg.match
local utfchar,utfbyte=utf.char,utf.byte
-local loadstripped=_LUAVERSION<5.2 and load or function(str)
- return load(dump(load(str),true))
+local loadstripped=nil
+if _LUAVERSION<5.2 then
+ loadstripped=function(str,shortcuts)
+ return load(str)
+ end
+else
+ loadstripped=function(str,shortcuts)
+ if shortcuts then
+ return load(dump(load(str),true),nil,nil,shortcuts)
+ else
+ return load(dump(load(str),true))
+ end
+ end
end
if not number then number={} end
local stripper=patterns.stripzeros
+local newline=patterns.newline
+local endofstring=patterns.endofstring
+local whitespace=patterns.whitespace
+local spacer=patterns.spacer
+local spaceortab=patterns.spaceortab
local function points(n)
+ n=tonumber(n)
return (not n or n==0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536))
end
local function basepoints(n)
+ n=tonumber(n)
return (not n or n==0) and "0bp" or lpegmatch(stripper,format("%.5fbp",n*(7200/7227)/65536))
end
number.points=points
number.basepoints=basepoints
-local rubish=patterns.spaceortab^0*patterns.newline
-local anyrubish=patterns.spaceortab+patterns.newline
+local rubish=spaceortab^0*newline
+local anyrubish=spaceortab+newline
local anything=patterns.anything
-local stripped=(patterns.spaceortab^1/"")*patterns.newline
+local stripped=(spaceortab^1/"")*newline
local leading=rubish^0/""
-local trailing=(anyrubish^1*patterns.endofstring)/""
+local trailing=(anyrubish^1*endofstring)/""
local redundant=rubish^3/"\n"
local pattern=Cs(leading*(trailing+redundant+stripped+anything)^0)
function strings.collapsecrlf(str)
@@ -2572,18 +2867,44 @@ local pattern=Carg(1)/function(t)
else
return ""
end
- end+patterns.newline*Cp()/function(position)
+ end+newline*Cp()/function(position)
extra,start=0,position
end+patterns.anything
)^1)
function strings.tabtospace(str,tab)
return lpegmatch(pattern,str,1,tab or 7)
end
-function strings.striplong(str)
- str=gsub(str,"^%s*","")
- str=gsub(str,"[\n\r]+ *","\n")
- return str
+local space=spacer^0
+local nospace=space/""
+local endofline=nospace*newline
+local stripend=(whitespace^1*endofstring)/""
+local normalline=(nospace*((1-space*(newline+endofstring))^1)*nospace)
+local stripempty=endofline^1/""
+local normalempty=endofline^1
+local singleempty=endofline*(endofline^0/"")
+local doubleempty=endofline*endofline^-1*(endofline^0/"")
+local stripstart=stripempty^0
+local p_prune_normal=Cs (stripstart*(stripend+normalline+normalempty )^0 )
+local p_prune_collapse=Cs (stripstart*(stripend+normalline+doubleempty )^0 )
+local p_prune_noempty=Cs (stripstart*(stripend+normalline+singleempty )^0 )
+local p_retain_normal=Cs ((normalline+normalempty )^0 )
+local p_retain_collapse=Cs ((normalline+doubleempty )^0 )
+local p_retain_noempty=Cs ((normalline+singleempty )^0 )
+local striplinepatterns={
+ ["prune"]=p_prune_normal,
+ ["prune and collapse"]=p_prune_collapse,
+ ["prune and no empty"]=p_prune_noempty,
+ ["retain"]=p_retain_normal,
+ ["retain and collapse"]=p_retain_collapse,
+ ["retain and no empty"]=p_retain_noempty,
+ ["collapse"]=patterns.collapser,
+}
+setmetatable(striplinepatterns,{ __index=function(t,k) return p_prune_collapse end })
+strings.striplinepatterns=striplinepatterns
+function strings.striplines(str,how)
+ return str and lpegmatch(striplinepatterns[how],str) or str
end
+strings.striplong=strings.striplines
function strings.nice(str)
str=gsub(str,"[:%-+_]+"," ")
return str
@@ -2621,10 +2942,10 @@ string.tracedchars=tracedchars
strings.tracers=tracedchars
function string.tracedchar(b)
if type(b)=="number" then
- return tracedchars[b] or (utfchar(b).." (U+"..format('%05X',b)..")")
+ return tracedchars[b] or (utfchar(b).." (U+"..format("%05X",b)..")")
else
local c=utfbyte(b)
- return tracedchars[c] or (b.." (U+"..format('%05X',c)..")")
+ return tracedchars[c] or (b.." (U+"..(c and format("%05X",c) or "?????")..")")
end
end
function number.signed(i)
@@ -2659,31 +2980,58 @@ function number.sparseexponent(f,n)
end
return tostring(n)
end
-local preamble=[[
-local type = type
-local tostring = tostring
-local tonumber = tonumber
-local format = string.format
-local concat = table.concat
-local signed = number.signed
-local points = number.points
-local basepoints = number.basepoints
-local utfchar = utf.char
-local utfbyte = utf.byte
-local lpegmatch = lpeg.match
-local nspaces = string.nspaces
-local tracedchar = string.tracedchar
-local autosingle = string.autosingle
-local autodouble = string.autodouble
-local sequenced = table.sequenced
-local formattednumber = number.formatted
-local sparseexponent = number.sparseexponent
-]]
local template=[[
%s
%s
return function(%s) return %s end
]]
+local preamble,environment="",{}
+if _LUAVERSION<5.2 then
+ preamble=[[
+local lpeg=lpeg
+local type=type
+local tostring=tostring
+local tonumber=tonumber
+local format=string.format
+local concat=table.concat
+local signed=number.signed
+local points=number.points
+local basepoints= number.basepoints
+local utfchar=utf.char
+local utfbyte=utf.byte
+local lpegmatch=lpeg.match
+local nspaces=string.nspaces
+local tracedchar=string.tracedchar
+local autosingle=string.autosingle
+local autodouble=string.autodouble
+local sequenced=table.sequenced
+local formattednumber=number.formatted
+local sparseexponent=number.sparseexponent
+ ]]
+else
+ environment={
+ global=global or _G,
+ lpeg=lpeg,
+ type=type,
+ tostring=tostring,
+ tonumber=tonumber,
+ format=string.format,
+ concat=table.concat,
+ signed=number.signed,
+ points=number.points,
+ basepoints=number.basepoints,
+ utfchar=utf.char,
+ utfbyte=utf.byte,
+ lpegmatch=lpeg.match,
+ nspaces=string.nspaces,
+ tracedchar=string.tracedchar,
+ autosingle=string.autosingle,
+ autodouble=string.autodouble,
+ sequenced=table.sequenced,
+ formattednumber=number.formatted,
+ sparseexponent=number.sparseexponent,
+ }
+end
local arguments={ "a1" }
setmetatable(arguments,{ __index=function(t,k)
local v=t[k-1]..",a"..k
@@ -2722,7 +3070,7 @@ local format_i=function(f)
if f and f~="" then
return format("format('%%%si',a%s)",f,n)
else
- return format("format('%%i',a%s)",n)
+ return format("format('%%i',a%s)",n)
end
end
local format_d=format_i
@@ -2734,6 +3082,14 @@ local format_f=function(f)
n=n+1
return format("format('%%%sf',a%s)",f,n)
end
+local format_F=function(f)
+ n=n+1
+ if not f or f=="" then
+ return format("(((a%s > -0.0000000005 and a%s < 0.0000000005) and '0') or format((a%s %% 1 == 0) and '%%i' or '%%.9f',a%s))",n,n,n,n)
+ else
+ return format("format((a%s %% 1 == 0) and '%%i' or '%%%sf',a%s)",n,f,n)
+ end
+end
local format_g=function(f)
n=n+1
return format("format('%%%sg',a%s)",f,n)
@@ -2948,7 +3304,7 @@ local builder=Cs { "start",
(
P("%")/""*(
V("!")
-+V("s")+V("q")+V("i")+V("d")+V("f")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o")
++V("s")+V("q")+V("i")+V("d")+V("f")+V("F")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o")
+V("c")+V("C")+V("S")
+V("Q")
+V("N")
@@ -2959,7 +3315,6 @@ local builder=Cs { "start",
+V("j")+V("J")
+V("m")+V("M")
+V("z")
-+V("*")
)+V("*")
)*(P(-1)+Carg(1))
)^0,
@@ -2968,6 +3323,7 @@ local builder=Cs { "start",
["i"]=(prefix_any*P("i"))/format_i,
["d"]=(prefix_any*P("d"))/format_d,
["f"]=(prefix_any*P("f"))/format_f,
+ ["F"]=(prefix_any*P("F"))/format_F,
["g"]=(prefix_any*P("g"))/format_g,
["G"]=(prefix_any*P("G"))/format_G,
["e"]=(prefix_any*P("e"))/format_e,
@@ -3002,11 +3358,12 @@ local builder=Cs { "start",
["a"]=(prefix_any*P("a"))/format_a,
["A"]=(prefix_any*P("A"))/format_A,
["*"]=Cs(((1-P("%"))^1+P("%%")/"%%")^1)/format_rest,
+ ["?"]=Cs(((1-P("%"))^1 )^1)/format_rest,
["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension,
}
local direct=Cs (
- P("%")/""*Cc([[local format = string.format return function(str) return format("%]])*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*Cc([[",str) end]])*P(-1)
- )
+ P("%")*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*P(-1)/[[local format = string.format return function(str) return format("%0",str) end]]
+)
local function make(t,str)
local f
local p
@@ -3015,10 +3372,10 @@ local function make(t,str)
f=loadstripped(p)()
else
n=0
- p=lpegmatch(builder,str,1,"..",t._extensions_)
+ p=lpegmatch(builder,str,1,t._connector_,t._extensions_)
if n>0 then
p=format(template,preamble,t._preamble_,arguments[n],p)
- f=loadstripped(p)()
+ f=loadstripped(p,t._environment_)()
else
f=function() return str end
end
@@ -3030,10 +3387,22 @@ local function use(t,fmt,...)
return t[fmt](...)
end
strings.formatters={}
-function strings.formatters.new()
- local t={ _extensions_={},_preamble_="",_type_="formatter" }
- setmetatable(t,{ __index=make,__call=use })
- return t
+if _LUAVERSION<5.2 then
+ function strings.formatters.new(noconcat)
+ local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_=preamble,_environment_={} }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
+ end
+else
+ function strings.formatters.new(noconcat)
+ local e={}
+ for k,v in next,environment do
+ e[k]=v
+ end
+ local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_="",_environment_=e }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
+ end
end
local formatters=strings.formatters.new()
string.formatters=formatters
@@ -3041,8 +3410,12 @@ string.formatter=function(str,...) return formatters[str](...) end
local function add(t,name,template,preamble)
if type(t)=="table" and t._type_=="formatter" then
t._extensions_[name]=template or "%s"
- if preamble then
+ if type(preamble)=="string" then
t._preamble_=preamble.."\n"..t._preamble_
+ elseif type(preamble)=="table" then
+ for k,v in next,preamble do
+ t._environment_[k]=v
+ end
end
end
end
@@ -3051,9 +3424,28 @@ patterns.xmlescape=Cs((P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"+P('"')/"&quot;
patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0)
patterns.luaescape=Cs(((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0)
patterns.luaquoted=Cs(Cc('"')*((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0*Cc('"'))
-add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
-add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
-add(formatters,"lua",[[lpegmatch(luaescape,%s)]],[[local luaescape = lpeg.patterns.luaescape]])
+if _LUAVERSION<5.2 then
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape")
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape")
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape")
+else
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape=lpeg.patterns.xmlescape })
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape=lpeg.patterns.texescape })
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape=lpeg.patterns.luaescape })
+end
+local dquote=patterns.dquote
+local equote=patterns.escaped+dquote/'\\"'+1
+local space=patterns.space
+local cquote=Cc('"')
+local pattern=Cs(dquote*(equote-P(-2))^0*dquote)
++Cs(cquote*(equote-space)^0*space*equote^0*cquote)
+function string.optionalquoted(str)
+ return lpegmatch(pattern,str) or str
+end
+local pattern=Cs((newline/os.newline+1)^0)
+function string.replacenewlines(str)
+ return lpegmatch(pattern,str)
+end
end -- closure
@@ -3073,8 +3465,13 @@ end
local dummyfunction=function()
end
local dummyreporter=function(c)
- return function(...)
- (texio.reporter or texio.write_nl)(c.." : "..string.formatters(...))
+ return function(f,...)
+ local r=texio.reporter or texio.write_nl
+ if f then
+ r(c.." : "..string.formatters(f,...))
+ else
+ r("")
+ end
end
end
statistics={
@@ -3250,6 +3647,17 @@ function caches.loaddata(paths,name)
for i=1,#paths do
local data=false
local luaname,lucname=makefullname(paths[i],name)
+ if lucname and not lfs.isfile(lucname) and type(caches.compile)=="function" then
+ texio.write(string.format("(compiling luc: %s)",lucname))
+ data=loadfile(luaname)
+ if data then
+ data=data()
+ end
+ if data then
+ caches.compile(data,luaname,lucname)
+ return data
+ end
+ end
if lucname and lfs.isfile(lucname) then
texio.write(string.format("(load luc: %s)",lucname))
data=loadfile(lucname)
@@ -3303,7 +3711,21 @@ function caches.compile(data,luaname,lucname)
end
end
function table.setmetatableindex(t,f)
+ if type(t)~="table" then
+ f=f or t
+ t={}
+ end
setmetatable(t,{ __index=f })
+ return t
+end
+arguments={}
+if arg then
+ for i=1,#arg do
+ local k,v=string.match(arg[i],"^%-%-([^=]+)=?(.-)$")
+ if k and v then
+ arguments[k]=v
+ end
+ end
end
end -- closure
@@ -3473,9 +3895,17 @@ local free_node=node.free
local remove_node=node.remove
local new_node=node.new
local traverse_id=node.traverse_id
-local math_code=nodecodes.math
nodes.handlers.protectglyphs=node.protect_glyphs
nodes.handlers.unprotectglyphs=node.unprotect_glyphs
+local math_code=nodecodes.math
+local end_of_math=node.end_of_math
+function node.end_of_math(n)
+ if n.id==math_code and n.subtype==1 then
+ return n
+ else
+ return end_of_math(n)
+ end
+end
function nodes.remove(head,current,free_too)
local t=current
head,current=remove_node(head,current)
@@ -3497,21 +3927,12 @@ function nodes.pool.kern(k)
n.kern=k
return n
end
-local getfield=node.getfield or function(n,tag) return n[tag] end
-local setfield=node.setfield or function(n,tag,value) n[tag]=value end
+local getfield=node.getfield
+local setfield=node.setfield
nodes.getfield=getfield
nodes.setfield=setfield
nodes.getattr=getfield
nodes.setattr=setfield
-if node.getid then nodes.getid=node.getid else function nodes.getid (n) return getfield(n,"id") end end
-if node.getsubtype then nodes.getsubtype=node.getsubtype else function nodes.getsubtype(n) return getfield(n,"subtype") end end
-if node.getnext then nodes.getnext=node.getnext else function nodes.getnext (n) return getfield(n,"next") end end
-if node.getprev then nodes.getprev=node.getprev else function nodes.getprev (n) return getfield(n,"prev") end end
-if node.getchar then nodes.getchar=node.getchar else function nodes.getchar (n) return getfield(n,"char") end end
-if node.getfont then nodes.getfont=node.getfont else function nodes.getfont (n) return getfield(n,"font") end end
-if node.getlist then nodes.getlist=node.getlist else function nodes.getlist (n) return getfield(n,"list") end end
-function nodes.tonut (n) return n end
-function nodes.tonode(n) return n end
nodes.tostring=node.tostring or tostring
nodes.copy=node.copy
nodes.copy_list=node.copy_list
@@ -3545,10 +3966,73 @@ nodes.set_attribute=node.set_attribute
nodes.unset_attribute=node.unset_attribute
nodes.protect_glyphs=node.protect_glyphs
nodes.unprotect_glyphs=node.unprotect_glyphs
-nodes.kerning=node.kerning
-nodes.ligaturing=node.ligaturing
nodes.mlist_to_hlist=node.mlist_to_hlist
-nodes.nuts=nodes
+local direct=node.direct
+local nuts={}
+nodes.nuts=nuts
+local tonode=direct.tonode
+local tonut=direct.todirect
+nodes.tonode=tonode
+nodes.tonut=tonut
+nuts.tonode=tonode
+nuts.tonut=tonut
+local getfield=direct.getfield
+local setfield=direct.setfield
+nuts.getfield=getfield
+nuts.setfield=setfield
+nuts.getnext=direct.getnext
+nuts.getprev=direct.getprev
+nuts.getid=direct.getid
+nuts.getattr=getfield
+nuts.setattr=setfield
+nuts.getfont=direct.getfont
+nuts.getsubtype=direct.getsubtype
+nuts.getchar=direct.getchar
+nuts.insert_before=direct.insert_before
+nuts.insert_after=direct.insert_after
+nuts.delete=direct.delete
+nuts.copy=direct.copy
+nuts.copy_list=direct.copy_list
+nuts.tail=direct.tail
+nuts.flush_list=direct.flush_list
+nuts.free=direct.free
+nuts.remove=direct.remove
+nuts.is_node=direct.is_node
+nuts.end_of_math=direct.end_of_math
+nuts.traverse=direct.traverse
+nuts.traverse_id=direct.traverse_id
+nuts.getprop=nuts.getattr
+nuts.setprop=nuts.setattr
+local new_nut=direct.new
+nuts.new=new_nut
+nuts.pool={}
+function nuts.pool.kern(k)
+ local n=new_nut("kern",1)
+ setfield(n,"kern",k)
+ return n
+end
+local propertydata=direct.get_properties_table()
+nodes.properties={ data=propertydata }
+direct.set_properties_mode(true,true)
+function direct.set_properties_mode() end
+nuts.getprop=function(n,k)
+ local p=propertydata[n]
+ if p then
+ return p[k]
+ end
+end
+nuts.setprop=function(n,k,v)
+ if v then
+ local p=propertydata[n]
+ if p then
+ p[k]=v
+ else
+ propertydata[n]={ [k]=v }
+ end
+ end
+end
+nodes.setprop=nodes.setproperty
+nodes.getprop=nodes.getproperty
end -- closure
@@ -3574,7 +4058,7 @@ fonts.analyzers={}
fonts.readers={}
fonts.definers={ methods={} }
fonts.loggers={ register=function() end }
-fontloader.totable=fontloader.to_table
+fontloader.totable=fontloader.to_table
end -- closure
@@ -3607,7 +4091,8 @@ constructors.autocleanup=true
constructors.namemode="fullpath"
constructors.version=1.01
constructors.cache=containers.define("fonts","constructors",constructors.version,false)
-constructors.privateoffset=0xF0000
+constructors.privateoffset=0xF0000
+constructors.cacheintex=true
constructors.keys={
properties={
encodingbytes="number",
@@ -3769,14 +4254,15 @@ constructors.sharefonts=false
constructors.nofsharedfonts=0
local sharednames={}
function constructors.trytosharefont(target,tfmdata)
- if constructors.sharefonts then
+ if constructors.sharefonts then
local characters=target.characters
local n=1
local t={ target.psname }
local u=sortedkeys(characters)
for i=1,#u do
+ local k=u[i]
n=n+1;t[n]=k
- n=n+1;t[n]=characters[u[i]].index or k
+ n=n+1;t[n]=characters[k].index or k
end
local h=md5.HEX(concat(t," "))
local s=sharednames[h]
@@ -3859,7 +4345,7 @@ function constructors.scale(tfmdata,specification)
targetparameters.textsize=textsize
targetparameters.forcedsize=forcedsize
targetparameters.extrafactor=extrafactor
- local tounicode=resources.tounicode
+ local tounicode=fonts.mappings.tounicode
local defaultwidth=resources.defaultwidth or 0
local defaultheight=resources.defaultheight or 0
local defaultdepth=resources.defaultdepth or 0
@@ -3885,6 +4371,7 @@ function constructors.scale(tfmdata,specification)
target.tounicode=1
target.cidinfo=properties.cidinfo
target.format=properties.format
+ target.cache=constructors.cacheintex and "yes" or "renew"
local fontname=properties.fontname or tfmdata.fontname
local fullname=properties.fullname or tfmdata.fullname
local filename=properties.filename or tfmdata.filename
@@ -3939,7 +4426,9 @@ function constructors.scale(tfmdata,specification)
local autoitalicamount=properties.autoitalicamount
local stackmath=not properties.nostackmath
local nonames=properties.noglyphnames
- local nodemode=properties.mode=="node"
+ local haskerns=properties.haskerns or properties.mode=="base"
+ local hasligatures=properties.hasligatures or properties.mode=="base"
+ local realdimensions=properties.realdimensions
if changed and not next(changed) then
changed=false
end
@@ -4002,38 +4491,44 @@ function constructors.scale(tfmdata,specification)
constructors.beforecopyingcharacters(target,tfmdata)
local sharedkerns={}
for unicode,character in next,characters do
- local chr,description,index,touni
+ local chr,description,index
if changed then
local c=changed[unicode]
if c then
description=descriptions[c] or descriptions[unicode] or character
character=characters[c] or character
index=description.index or c
- if tounicode then
- touni=tounicode[index]
- if not touni then
- local d=descriptions[unicode] or characters[unicode]
- local i=d.index or unicode
- touni=tounicode[i]
- end
- end
else
description=descriptions[unicode] or character
index=description.index or unicode
- if tounicode then
- touni=tounicode[index]
- end
end
else
description=descriptions[unicode] or character
index=description.index or unicode
- if tounicode then
- touni=tounicode[index]
- end
end
local width=description.width
local height=description.height
local depth=description.depth
+ if realdimensions then
+ if not height or height==0 then
+ local bb=description.boundingbox
+ local ht=bb[4]
+ if ht~=0 then
+ height=ht
+ end
+ if not depth or depth==0 then
+ local dp=-bb[2]
+ if dp~=0 then
+ depth=dp
+ end
+ end
+ elseif not depth or depth==0 then
+ local dp=-description.boundingbox[2]
+ if dp~=0 then
+ depth=dp
+ end
+ end
+ end
if width then width=hdelta*width else width=scaledwidth end
if height then height=vdelta*height else height=scaledheight end
if depth and depth~=0 then
@@ -4070,8 +4565,10 @@ function constructors.scale(tfmdata,specification)
}
end
end
- if touni then
- chr.tounicode=touni
+ local isunicode=description.unicode
+ if isunicode then
+ chr.unicode=isunicode
+ chr.tounicode=tounicode(isunicode)
end
if hasquality then
local ve=character.expansion_factor
@@ -4164,7 +4661,7 @@ function constructors.scale(tfmdata,specification)
end
end
end
- if not nodemode then
+ if haskerns then
local vk=character.kerns
if vk then
local s=sharedkerns[vk]
@@ -4175,6 +4672,8 @@ function constructors.scale(tfmdata,specification)
end
chr.kerns=s
end
+ end
+ if hasligatures then
local vl=character.ligatures
if vl then
if true then
@@ -4331,6 +4830,7 @@ function constructors.finalize(tfmdata)
tfmdata.extend=nil
tfmdata.slant=nil
tfmdata.units_per_em=nil
+ tfmdata.cache=nil
properties.finalized=true
return tfmdata
end
@@ -4675,6 +5175,16 @@ function constructors.applymanipulators(what,tfmdata,features,trace,report)
end
end
end
+function constructors.addcoreunicodes(unicodes)
+ if not unicodes then
+ unicodes={}
+ end
+ unicodes.space=0x0020
+ unicodes.hyphen=0x002D
+ unicodes.zwj=0x200D
+ unicodes.zwnj=0x200C
+ return unicodes
+end
end -- closure
@@ -4694,6 +5204,7 @@ end
local fonts=fonts
fonts.encodings={}
fonts.encodings.agl={}
+fonts.encodings.known={}
setmetatable(fonts.encodings.agl,{ __index=function(t,k)
if k=="unicodes" then
texio.write(" <loading (extended) adobe glyph list>")
@@ -4765,7 +5276,7 @@ local function loadcidfile(filename)
ordering=ordering,
filename=filename,
unicodes=unicodes,
- names=names
+ names=names,
}
end
end
@@ -4802,10 +5313,23 @@ function cid.getmap(specification)
local ordering=specification.ordering
local supplement=specification.supplement
local filename=format(registry,ordering,supplement)
- local found=cidmap[lower(filename)]
+ local lowername=lower(filename)
+ local found=cidmap[lowername]
if found then
return found
end
+ if ordering=="Identity" then
+ local found={
+ supplement=supplement,
+ registry=registry,
+ ordering=ordering,
+ filename=filename,
+ unicodes={},
+ names={},
+ }
+ cidmap[lowername]=found
+ return found
+ end
if trace_loading then
report_otf("cidmap needed, registry %a, ordering %a, supplement %a",registry,ordering,supplement)
end
@@ -4856,17 +5380,19 @@ if not modules then modules={} end modules ['font-map']={
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-local tonumber=tonumber
+local tonumber,next,type=tonumber,next,type
local match,format,find,concat,gsub,lower=string.match,string.format,string.find,table.concat,string.gsub,string.lower
local P,R,S,C,Ct,Cc,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.match
local utfbyte=utf.byte
local floor=math.floor
+local formatters=string.formatters
local trace_loading=false trackers.register("fonts.loading",function(v) trace_loading=v end)
local trace_mapping=false trackers.register("fonts.mapping",function(v) trace_unimapping=v end)
local report_fonts=logs.reporter("fonts","loading")
local fonts=fonts or {}
local mappings=fonts.mappings or {}
fonts.mappings=mappings
+local allocate=utilities.storage.allocate
local function loadlumtable(filename)
local lumname=file.replacesuffix(file.basename(filename),"lum")
local lumfile=resolvers.findfile(lumname,"map") or ""
@@ -4900,11 +5426,13 @@ local function makenameparser(str)
return p
end
end
+local f_single=formatters["%04X"]
+local f_double=formatters["%04X%04X"]
local function tounicode16(unicode,name)
if unicode<0x10000 then
- return format("%04X",unicode)
+ return f_single(unicode)
elseif unicode<0x1FFFFFFFFF then
- return format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00)
+ return f_double(floor(unicode/1024),unicode%1024+0xDC00)
else
report_fonts("can't convert %a in %a into tounicode",unicode,name)
end
@@ -4912,17 +5440,43 @@ end
local function tounicode16sequence(unicodes,name)
local t={}
for l=1,#unicodes do
- local unicode=unicodes[l]
- if unicode<0x10000 then
- t[l]=format("%04X",unicode)
+ local u=unicodes[l]
+ if u<0x10000 then
+ t[l]=f_single(u)
elseif unicode<0x1FFFFFFFFF then
- t[l]=format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00)
+ t[l]=f_double(floor(u/1024),u%1024+0xDC00)
else
- report_fonts ("can't convert %a in %a into tounicode",unicode,name)
+ report_fonts ("can't convert %a in %a into tounicode",u,name)
+ return
end
end
return concat(t)
end
+local function tounicode(unicode,name)
+ if type(unicode)=="table" then
+ local t={}
+ for l=1,#unicode do
+ local u=unicode[l]
+ if u<0x10000 then
+ t[l]=f_single(u)
+ elseif u<0x1FFFFFFFFF then
+ t[l]=f_double(floor(u/1024),u%1024+0xDC00)
+ else
+ report_fonts ("can't convert %a in %a into tounicode",u,name)
+ return
+ end
+ end
+ return concat(t)
+ else
+ if unicode<0x10000 then
+ return f_single(unicode)
+ elseif unicode<0x1FFFFFFFFF then
+ return f_double(floor(unicode/1024),unicode%1024+0xDC00)
+ else
+ report_fonts("can't convert %a in %a into tounicode",unicode,name)
+ end
+ end
+end
local function fromunicode16(str)
if #str==4 then
return tonumber(str,16)
@@ -4933,17 +5487,41 @@ local function fromunicode16(str)
end
mappings.loadlumtable=loadlumtable
mappings.makenameparser=makenameparser
+mappings.tounicode=tounicode
mappings.tounicode16=tounicode16
mappings.tounicode16sequence=tounicode16sequence
mappings.fromunicode16=fromunicode16
local ligseparator=P("_")
local varseparator=P(".")
local namesplitter=Ct(C((1-ligseparator-varseparator)^1)*(ligseparator*C((1-ligseparator-varseparator)^1))^0)
+local overloads=allocate {
+ IJ={ name="I_J",unicode={ 0x49,0x4A },mess=0x0132 },
+ ij={ name="i_j",unicode={ 0x69,0x6A },mess=0x0133 },
+ ff={ name="f_f",unicode={ 0x66,0x66 },mess=0xFB00 },
+ fi={ name="f_i",unicode={ 0x66,0x69 },mess=0xFB01 },
+ fl={ name="f_l",unicode={ 0x66,0x6C },mess=0xFB02 },
+ ffi={ name="f_f_i",unicode={ 0x66,0x66,0x69 },mess=0xFB03 },
+ ffl={ name="f_f_l",unicode={ 0x66,0x66,0x6C },mess=0xFB04 },
+ fj={ name="f_j",unicode={ 0x66,0x6A } },
+ fk={ name="f_k",unicode={ 0x66,0x6B } },
+}
+for k,v in next,overloads do
+ local name=v.name
+ local mess=v.mess
+ if name then
+ overloads[name]=v
+ end
+ if mess then
+ overloads[mess]=v
+ end
+end
+mappings.overloads=overloads
function mappings.addtounicode(data,filename)
local resources=data.resources
local properties=data.properties
local descriptions=data.descriptions
local unicodes=resources.unicodes
+ local lookuptypes=resources.lookuptypes
if not unicodes then
return
end
@@ -4952,18 +5530,10 @@ function mappings.addtounicode(data,filename)
unicodes['zwj']=unicodes['zwj'] or 0x200D
unicodes['zwnj']=unicodes['zwnj'] or 0x200C
local private=fonts.constructors.privateoffset
- local unknown=format("%04X",utfbyte("?"))
- local unicodevector=fonts.encodings.agl.unicodes
- local tounicode={}
- local originals={}
- resources.tounicode=tounicode
- resources.originals=originals
+ local unicodevector=fonts.encodings.agl.unicodes
+ local missing={}
local lumunic,uparser,oparser
local cidinfo,cidnames,cidcodes,usedmap
- if false then
- lumunic=loadlumtable(filename)
- lumunic=lumunic and lumunic.tounicode
- end
cidinfo=properties.cidinfo
usedmap=cidinfo and fonts.cid.getmap(cidinfo)
if usedmap then
@@ -4976,11 +5546,13 @@ function mappings.addtounicode(data,filename)
for unic,glyph in next,descriptions do
local index=glyph.index
local name=glyph.name
- if unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then
+ local r=overloads[name]
+ if r then
+ glyph.unicode=r.unicode
+ elseif unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then
local unicode=lumunic and lumunic[name] or unicodevector[name]
if unicode then
- originals[index]=unicode
- tounicode[index]=tounicode16(unicode,name)
+ glyph.unicode=unicode
ns=ns+1
end
if (not unicode) and usedmap then
@@ -4988,8 +5560,7 @@ function mappings.addtounicode(data,filename)
if foundindex then
unicode=cidcodes[foundindex]
if unicode then
- originals[index]=unicode
- tounicode[index]=tounicode16(unicode,name)
+ glyph.unicode=unicode
ns=ns+1
else
local reference=cidnames[foundindex]
@@ -4998,21 +5569,18 @@ function mappings.addtounicode(data,filename)
if foundindex then
unicode=cidcodes[foundindex]
if unicode then
- originals[index]=unicode
- tounicode[index]=tounicode16(unicode,name)
+ glyph.unicode=unicode
ns=ns+1
end
end
if not unicode or unicode=="" then
local foundcodes,multiple=lpegmatch(uparser,reference)
if foundcodes then
- originals[index]=foundcodes
+ glyph.unicode=foundcodes
if multiple then
- tounicode[index]=tounicode16sequence(foundcodes)
nl=nl+1
unicode=true
else
- tounicode[index]=tounicode16(foundcodes,name)
ns=ns+1
unicode=foundcodes
end
@@ -5050,39 +5618,157 @@ function mappings.addtounicode(data,filename)
end
if n==0 then
elseif n==1 then
- originals[index]=t[1]
- tounicode[index]=tounicode16(t[1],name)
+ glyph.unicode=t[1]
else
- originals[index]=t
- tounicode[index]=tounicode16sequence(t)
+ glyph.unicode=t
end
nl=nl+1
end
if not unicode or unicode=="" then
local foundcodes,multiple=lpegmatch(uparser,name)
if foundcodes then
+ glyph.unicode=foundcodes
if multiple then
- originals[index]=foundcodes
- tounicode[index]=tounicode16sequence(foundcodes,name)
nl=nl+1
unicode=true
else
- originals[index]=foundcodes
- tounicode[index]=tounicode16(foundcodes,name)
ns=ns+1
unicode=foundcodes
end
end
end
+ local r=overloads[unicode]
+ if r then
+ unicode=r.unicode
+ glyph.unicode=unicode
+ end
+ if not unicode then
+ missing[name]=true
+ end
+ end
+ end
+ if next(missing) then
+ local guess={}
+ local function check(gname,code,unicode)
+ local description=descriptions[code]
+ local variant=description.name
+ if variant==gname then
+ return
+ end
+ local unic=unicodes[variant]
+ if unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then
+ else
+ return
+ end
+ if descriptions[code].unicode then
+ return
+ end
+ local g=guess[variant]
+ if g then
+ g[gname]=unicode
+ else
+ guess[variant]={ [gname]=unicode }
+ end
+ end
+ for unicode,description in next,descriptions do
+ local slookups=description.slookups
+ if slookups then
+ local gname=description.name
+ for tag,data in next,slookups do
+ local lookuptype=lookuptypes[tag]
+ if lookuptype=="alternate" then
+ for i=1,#data do
+ check(gname,data[i],unicode)
+ end
+ elseif lookuptype=="substitution" then
+ check(gname,data,unicode)
+ end
+ end
+ end
+ local mlookups=description.mlookups
+ if mlookups then
+ local gname=description.name
+ for tag,list in next,mlookups do
+ local lookuptype=lookuptypes[tag]
+ if lookuptype=="alternate" then
+ for i=1,#list do
+ local data=list[i]
+ for i=1,#data do
+ check(gname,data[i],unicode)
+ end
+ end
+ elseif lookuptype=="substitution" then
+ for i=1,#list do
+ check(gname,list[i],unicode)
+ end
+ end
+ end
+ end
+ end
+ local done=true
+ while done do
+ done=false
+ for k,v in next,guess do
+ if type(v)~="number" then
+ for kk,vv in next,v do
+ if vv==-1 or vv>=private or (vv>=0xE000 and vv<=0xF8FF) or vv==0xFFFE or vv==0xFFFF then
+ local uu=guess[kk]
+ if type(uu)=="number" then
+ guess[k]=uu
+ done=true
+ end
+ else
+ guess[k]=vv
+ done=true
+ end
+ end
+ end
+ end
+ end
+ local orphans=0
+ local guessed=0
+ for k,v in next,guess do
+ if type(v)=="number" then
+ descriptions[unicodes[k]].unicode=descriptions[v].unicode or v
+ guessed=guessed+1
+ else
+ local t=nil
+ local l=lower(k)
+ local u=unicodes[l]
+ if not u then
+ orphans=orphans+1
+ elseif u==-1 or u>=private or (u>=0xE000 and u<=0xF8FF) or u==0xFFFE or u==0xFFFF then
+ local unicode=descriptions[u].unicode
+ if unicode then
+ descriptions[unicodes[k]].unicode=unicode
+ guessed=guessed+1
+ else
+ orphans=orphans+1
+ end
+ else
+ orphans=orphans+1
+ end
+ end
+ end
+ if trace_loading and orphans>0 or guessed>0 then
+ report_fonts("%s glyphs with no related unicode, %s guessed, %s orphans",guessed+orphans,guessed,orphans)
end
end
if trace_mapping then
for unic,glyph in table.sortedhash(descriptions) do
local name=glyph.name
local index=glyph.index
- local toun=tounicode[index]
- if toun then
- report_fonts("internal slot %U, name %a, unicode %U, tounicode %a",index,name,unic,toun)
+ local unicode=glyph.unicode
+ if unicode then
+ if type(unicode)=="table" then
+ local unicodes={}
+ for i=1,#unicode do
+ unicodes[i]=formatters("%U",unicode[i])
+ end
+ report_fonts("internal slot %U, name %a, unicode %U, tounicode % t",index,name,unic,unicodes)
+ else
+ report_fonts("internal slot %U, name %a, unicode %U, tounicode %U",index,name,unic,unicode)
+ end
else
report_fonts("internal slot %U, name %a, unicode %U",index,name,unic)
end
@@ -5224,14 +5910,15 @@ local function read_from_tfm(specification)
properties.fontname=tfmdata.fontname
properties.psname=tfmdata.psname
properties.filename=specification.filename
+ properties.format=fonts.formats.tfm
parameters.size=size
- shared.rawdata={}
- shared.features=features
- shared.processes=next(features) and tfm.setfeatures(tfmdata,features) or nil
tfmdata.properties=properties
tfmdata.resources=resources
tfmdata.parameters=parameters
tfmdata.shared=shared
+ shared.rawdata={}
+ shared.features=features
+ shared.processes=next(features) and tfm.setfeatures(tfmdata,features) or nil
parameters.slant=parameters.slant or parameters[1] or 0
parameters.space=parameters.space or parameters[2] or 0
parameters.space_stretch=parameters.space_stretch or parameters[3] or 0
@@ -5263,6 +5950,10 @@ local function read_from_tfm(specification)
features.encoding=encoding
end
end
+ properties.haskerns=true
+ properties.haslogatures=true
+ resources.unicodes={}
+ resources.lookuptags={}
return tfmdata
end
end
@@ -5318,15 +6009,20 @@ local trace_indexing=false trackers.register("afm.indexing",function(v) trace_in
local trace_loading=false trackers.register("afm.loading",function(v) trace_loading=v end)
local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end)
local report_afm=logs.reporter("fonts","afm loading")
+local setmetatableindex=table.setmetatableindex
local findbinfile=resolvers.findbinfile
local definers=fonts.definers
local readers=fonts.readers
local constructors=fonts.constructors
+local fontloader=fontloader
+local font_to_table=fontloader.to_table
+local open_font=fontloader.open
+local close_font=fontloader.close
local afm=constructors.newhandler("afm")
local pfb=constructors.newhandler("pfb")
local afmfeatures=constructors.newfeatures("afm")
local registerafmfeature=afmfeatures.register
-afm.version=1.410
+afm.version=1.500
afm.cache=containers.define("fonts","afm",afm.version,true)
afm.autoprefixed=true
afm.helpdata={}
@@ -5334,6 +6030,7 @@ afm.syncspace=true
afm.addligatures=true
afm.addtexligatures=true
afm.addkerns=true
+local overloads=fonts.mappings.overloads
local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes
local function setmode(tfmdata,value)
if value then
@@ -5436,10 +6133,10 @@ local function get_variables(data,fontmetrics)
end
local function get_indexes(data,pfbname)
data.resources.filename=resolvers.unresolve(pfbname)
- local pfbblob=fontloader.open(pfbname)
+ local pfbblob=open_font(pfbname)
if pfbblob then
local characters=data.characters
- local pfbdata=fontloader.to_table(pfbblob)
+ local pfbdata=font_to_table(pfbblob)
if pfbdata then
local glyphs=pfbdata.glyphs
if glyphs then
@@ -5464,7 +6161,7 @@ local function get_indexes(data,pfbname)
elseif trace_loading then
report_afm("no data in pfb file %a",pfbname)
end
- fontloader.close(pfbblob)
+ close_font(pfbblob)
elseif trace_loading then
report_afm("invalid pfb file %a",pfbname)
end
@@ -5521,7 +6218,7 @@ local function readafm(filename)
return nil
end
end
-local addkerns,addligatures,addtexligatures,unify,normalize
+local addkerns,addligatures,addtexligatures,unify,normalize,fixnames
function afm.load(filename)
filename=resolvers.findfile(filename,'afm') or ""
if filename~="" and not fonts.names.ignoredfile(filename) then
@@ -5564,6 +6261,7 @@ function afm.load(filename)
addkerns(data)
end
normalize(data)
+ fixnames(data)
report_afm("add tounicode data")
fonts.mappings.addtounicode(data,filename)
data.size=size
@@ -5571,6 +6269,7 @@ function afm.load(filename)
data.pfbsize=pfbsize
data.pfbtime=pfbtime
report_afm("saving %a in cache",name)
+ data.resources.unicodes=nil
data=containers.write(afm.cache,name,data)
data=containers.read(afm.cache,name)
end
@@ -5620,7 +6319,6 @@ unify=function(data,filename)
if unicode then
krn[unicode]=kern
else
- print(unicode,name)
end
end
description.kerns=krn
@@ -5631,18 +6329,30 @@ unify=function(data,filename)
local filename=resources.filename or file.removesuffix(file.basename(filename))
resources.filename=resolvers.unresolve(filename)
resources.unicodes=unicodes
- resources.marks={}
- resources.names=names
+ resources.marks={}
resources.private=private
end
normalize=function(data)
end
+fixnames=function(data)
+ for k,v in next,data.descriptions do
+ local n=v.name
+ local r=overloads[n]
+ if r then
+ local name=r.name
+ if trace_indexing then
+ report_afm("renaming characters %a to %a",n,name)
+ end
+ v.name=name
+ v.unicode=r.unicode
+ end
+ end
+end
local addthem=function(rawdata,ligatures)
if ligatures then
local descriptions=rawdata.descriptions
local resources=rawdata.resources
local unicodes=resources.unicodes
- local names=resources.names
for ligname,ligdata in next,ligatures do
local one=descriptions[unicodes[ligname]]
if one then
@@ -5775,8 +6485,8 @@ local function copytotfm(data)
local filename=constructors.checkedfilename(resources)
local fontname=metadata.fontname or metadata.fullname
local fullname=metadata.fullname or metadata.fontname
- local endash=unicodes['space']
- local emdash=unicodes['emdash']
+ local endash=0x0020
+ local emdash=0x2014
local spacer="space"
local spaceunits=500
local monospaced=metadata.isfixedpitch
@@ -5830,7 +6540,7 @@ local function copytotfm(data)
if charxheight then
parameters.x_height=charxheight
else
- local x=unicodes['x']
+ local x=0x0078
if x then
local x=descriptions[x]
if x then
@@ -5877,7 +6587,34 @@ function afm.setfeatures(tfmdata,features)
return {}
end
end
-local function checkfeatures(specification)
+local function addtables(data)
+ local resources=data.resources
+ local lookuptags=resources.lookuptags
+ local unicodes=resources.unicodes
+ if not lookuptags then
+ lookuptags={}
+ resources.lookuptags=lookuptags
+ end
+ setmetatableindex(lookuptags,function(t,k)
+ local v=type(k)=="number" and ("lookup "..k) or k
+ t[k]=v
+ return v
+ end)
+ if not unicodes then
+ unicodes={}
+ resources.unicodes=unicodes
+ setmetatableindex(unicodes,function(t,k)
+ setmetatableindex(unicodes,nil)
+ for u,d in next,data.descriptions do
+ local n=d.name
+ if n then
+ t[n]=u
+ end
+ end
+ return rawget(t,k)
+ end)
+ end
+ constructors.addcoreunicodes(unicodes)
end
local function afmtotfm(specification)
local afmname=specification.filename or specification.name
@@ -5904,6 +6641,7 @@ local function afmtotfm(specification)
if not tfmdata then
local rawdata=afm.load(afmname)
if rawdata and next(rawdata) then
+ addtables(rawdata)
adddimensions(rawdata)
tfmdata=copytotfm(rawdata)
if tfmdata and next(tfmdata) then
@@ -5938,6 +6676,7 @@ end
local function prepareligatures(tfmdata,ligatures,value)
if value then
local descriptions=tfmdata.descriptions
+ local hasligatures=false
for unicode,character in next,tfmdata.characters do
local description=descriptions[unicode]
local dligatures=description.ligatures
@@ -5953,8 +6692,10 @@ local function prepareligatures(tfmdata,ligatures,value)
type=0
}
end
+ hasligatures=true
end
end
+ tfmdata.properties.hasligatures=hasligatures
end
end
local function preparekerns(tfmdata,kerns,value)
@@ -5963,6 +6704,7 @@ local function preparekerns(tfmdata,kerns,value)
local resources=rawdata.resources
local unicodes=resources.unicodes
local descriptions=tfmdata.descriptions
+ local haskerns=false
for u,chr in next,tfmdata.characters do
local d=descriptions[u]
local newkerns=d[kerns]
@@ -5978,8 +6720,10 @@ local function preparekerns(tfmdata,kerns,value)
kerns[uk]=v
end
end
+ haskerns=true
end
end
+ tfmdata.properties.haskerns=haskerns
end
end
local list={
@@ -6405,10 +7149,12 @@ local type,next,tonumber,tostring=type,next,tonumber,tostring
local abs=math.abs
local insert=table.insert
local lpegmatch=lpeg.match
-local reversed,concat,remove=table.reversed,table.concat,table.remove
+local reversed,concat,remove,sortedkeys=table.reversed,table.concat,table.remove,table.sortedkeys
local ioflush=io.flush
local fastcopy,tohash,derivetable=table.fastcopy,table.tohash,table.derive
local formatters=string.formatters
+local P,R,S,C,Ct,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.match
+local setmetatableindex=table.setmetatableindex
local allocate=utilities.storage.allocate
local registertracker=trackers.register
local registerdirective=directives.register
@@ -6423,26 +7169,28 @@ local trace_dynamics=false registertracker("otf.dynamics",function(v) trace_dyna
local trace_sequences=false registertracker("otf.sequences",function(v) trace_sequences=v end)
local trace_markwidth=false registertracker("otf.markwidth",function(v) trace_markwidth=v end)
local trace_defining=false registertracker("fonts.defining",function(v) trace_defining=v end)
+local compact_lookups=true registertracker("otf.compactlookups",function(v) compact_lookups=v end)
+local purge_names=true registertracker("otf.purgenames",function(v) purge_names=v end)
local report_otf=logs.reporter("fonts","otf loading")
local fonts=fonts
local otf=fonts.handlers.otf
otf.glists={ "gsub","gpos" }
-otf.version=2.749
+otf.version=2.803
otf.cache=containers.define("fonts","otf",otf.version,true)
-local fontdata=fonts.hashes.identifiers
+local hashes=fonts.hashes
+local definers=fonts.definers
+local readers=fonts.readers
+local constructors=fonts.constructors
+local fontdata=hashes and hashes.identifiers
local chardata=characters and characters.data
-local otffeatures=fonts.constructors.newfeatures("otf")
+local otffeatures=constructors.newfeatures("otf")
local registerotffeature=otffeatures.register
local enhancers=allocate()
otf.enhancers=enhancers
local patches={}
enhancers.patches=patches
-local definers=fonts.definers
-local readers=fonts.readers
-local constructors=fonts.constructors
local forceload=false
local cleanup=0
-local usemetatables=false
local packdata=true
local syncspace=true
local forcenotdef=false
@@ -6451,7 +7199,11 @@ local overloadkerns=false
local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes
local wildcard="*"
local default="dflt"
-local fontloaderfields=fontloader.fields
+local fontloader=fontloader
+local open_font=fontloader.open
+local close_font=fontloader.close
+local font_fields=fontloader.fields
+local apply_featurefile=fontloader.apply_featurefile
local mainfields=nil
local glyphfields=nil
local formats=fonts.formats
@@ -6461,7 +7213,6 @@ formats.ttc="truetype"
formats.dfont="truetype"
registerdirective("fonts.otf.loader.cleanup",function(v) cleanup=tonumber(v) or (v and 1) or 0 end)
registerdirective("fonts.otf.loader.force",function(v) forceload=v end)
-registerdirective("fonts.otf.loader.usemetatables",function(v) usemetatables=v end)
registerdirective("fonts.otf.loader.pack",function(v) packdata=v end)
registerdirective("fonts.otf.loader.syncspace",function(v) syncspace=v end)
registerdirective("fonts.otf.loader.forcenotdef",function(v) forcenotdef=v end)
@@ -6493,7 +7244,7 @@ local function load_featurefile(raw,featurefile)
if trace_loading then
report_otf("using featurefile %a",featurefile)
end
- fontloader.apply_featurefile(raw,featurefile)
+ apply_featurefile(raw,featurefile)
end
end
local function showfeatureorder(rawdata,filename)
@@ -6553,7 +7304,6 @@ local valid_fields=table.tohash {
"extrema_bound",
"familyname",
"fontname",
- "fontname",
"fontstyle_id",
"fontstyle_name",
"fullname",
@@ -6579,6 +7329,7 @@ local valid_fields=table.tohash {
"upos",
"use_typo_metrics",
"uwidth",
+ "validation_state",
"version",
"vert_base",
"weight",
@@ -6590,7 +7341,6 @@ local ordered_enhancers={
"prepare lookups",
"analyze glyphs",
"analyze math",
- "prepare tounicode",
"reorganize lookups",
"reorganize mark classes",
"reorganize anchor classes",
@@ -6603,9 +7353,12 @@ local ordered_enhancers={
"check glyphs",
"check metadata",
"check extra features",
+ "prepare tounicode",
"check encoding",
"add duplicates",
"cleanup tables",
+ "compact lookups",
+ "purge names",
}
local actions=allocate()
local before=allocate()
@@ -6742,12 +7495,12 @@ function otf.load(filename,sub,featurefile)
report_otf("loading %a, hash %a",filename,hash)
local fontdata,messages
if sub then
- fontdata,messages=fontloader.open(filename,sub)
+ fontdata,messages=open_font(filename,sub)
else
- fontdata,messages=fontloader.open(filename)
+ fontdata,messages=open_font(filename)
end
if fontdata then
- mainfields=mainfields or (fontloaderfields and fontloaderfields(fontdata))
+ mainfields=mainfields or (font_fields and font_fields(fontdata))
end
if trace_loading and messages and #messages>0 then
if type(messages)=="string" then
@@ -6787,6 +7540,7 @@ function otf.load(filename,sub,featurefile)
},
lookuptypes={},
},
+ warnings={},
metadata={
},
properties={
@@ -6795,7 +7549,7 @@ function otf.load(filename,sub,featurefile)
goodies={},
helpers={
tounicodelist=splitter,
- tounicodetable=lpeg.Ct(splitter),
+ tounicodetable=Ct(splitter),
},
}
starttiming(data)
@@ -6820,7 +7574,7 @@ function otf.load(filename,sub,featurefile)
report_otf("preprocessing and caching time %s, packtime %s",
elapsedtime(data),packdata and elapsedtime(packtime) or 0)
end
- fontloader.close(fontdata)
+ close_font(fontdata)
if cleanup>3 then
collectgarbage("collect")
end
@@ -6838,6 +7592,34 @@ function otf.load(filename,sub,featurefile)
report_otf("loading from cache using hash %a",hash)
end
enhance("unpack",data,filename,nil,false)
+ local resources=data.resources
+ local lookuptags=resources.lookuptags
+ local unicodes=resources.unicodes
+ if not lookuptags then
+ lookuptags={}
+ resources.lookuptags=lookuptags
+ end
+ setmetatableindex(lookuptags,function(t,k)
+ local v=type(k)=="number" and ("lookup "..k) or k
+ t[k]=v
+ return v
+ end)
+ if not unicodes then
+ unicodes={}
+ resources.unicodes=unicodes
+ setmetatableindex(unicodes,function(t,k)
+ setmetatableindex(unicodes,nil)
+ for u,d in next,data.descriptions do
+ local n=d.name
+ if n then
+ t[n]=u
+ else
+ end
+ end
+ return rawget(t,k)
+ end)
+ end
+ constructors.addcoreunicodes(unicodes)
if applyruntimefixes then
applyruntimefixes(filename,data)
end
@@ -6874,26 +7656,16 @@ actions["add dimensions"]=function(data,filename)
local defaultheight=resources.defaultheight or 0
local defaultdepth=resources.defaultdepth or 0
local basename=trace_markwidth and file.basename(filename)
- if usemetatables then
- for _,d in next,descriptions do
- local wd=d.width
- if not wd then
- d.width=defaultwidth
- elseif trace_markwidth and wd~=0 and d.class=="mark" then
- report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename)
- end
- setmetatable(d,mt)
+ for _,d in next,descriptions do
+ local bb,wd=d.boundingbox,d.width
+ if not wd then
+ d.width=defaultwidth
+ elseif trace_markwidth and wd~=0 and d.class=="mark" then
+ report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename)
end
- else
- for _,d in next,descriptions do
- local bb,wd=d.boundingbox,d.width
- if not wd then
- d.width=defaultwidth
- elseif trace_markwidth and wd~=0 and d.class=="mark" then
- report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename)
- end
- if bb then
- local ht,dp=bb[4],-bb[2]
+ if bb then
+ local ht=bb[4]
+ local dp=-bb[2]
if ht==0 or ht<0 then
else
d.height=ht
@@ -6902,7 +7674,6 @@ actions["add dimensions"]=function(data,filename)
else
d.depth=dp
end
- end
end
end
end
@@ -6969,17 +7740,26 @@ actions["prepare glyphs"]=function(data,filename,raw)
local glyph=cidglyphs[index]
if glyph then
local unicode=glyph.unicode
+ if unicode>=0x00E000 and unicode<=0x00F8FF then
+ unicode=-1
+ elseif unicode>=0x0F0000 and unicode<=0x0FFFFD then
+ unicode=-1
+ elseif unicode>=0x100000 and unicode<=0x10FFFD then
+ unicode=-1
+ end
local name=glyph.name or cidnames[index]
- if not unicode or unicode==-1 or unicode>=criterium then
+ if not unicode or unicode==-1 then
unicode=cidunicodes[index]
end
if unicode and descriptions[unicode] then
- report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode)
+ if trace_private then
+ report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode)
+ end
unicode=-1
end
- if not unicode or unicode==-1 or unicode>=criterium then
+ if not unicode or unicode==-1 then
if not name then
- name=format("u%06X",private)
+ name=format("u%06X.ctx",private)
end
unicode=private
unicodes[name]=private
@@ -6990,7 +7770,7 @@ actions["prepare glyphs"]=function(data,filename,raw)
nofnames=nofnames+1
else
if not name then
- name=format("u%06X",unicode)
+ name=format("u%06X.ctx",unicode)
end
unicodes[name]=unicode
nofunicodes=nofunicodes+1
@@ -7023,7 +7803,7 @@ actions["prepare glyphs"]=function(data,filename,raw)
if glyph then
local unicode=glyph.unicode
local name=glyph.name
- if not unicode or unicode==-1 or unicode>=criterium then
+ if not unicode or unicode==-1 then
unicode=private
unicodes[name]=private
if trace_private then
@@ -7031,12 +7811,29 @@ actions["prepare glyphs"]=function(data,filename,raw)
end
private=private+1
else
+ if unicode>criterium then
+ local taken=descriptions[unicode]
+ if taken then
+ if unicode>=private then
+ private=unicode+1
+ else
+ private=private+1
+ end
+ descriptions[private]=taken
+ unicodes[taken.name]=private
+ indices[taken.index]=private
+ if trace_private then
+ report_otf("slot %U is moved to %U due to private in font",unicode)
+ end
+ else
+ if unicode>=private then
+ private=unicode+1
+ end
+ end
+ end
unicodes[name]=unicode
end
indices[index]=unicode
- if not name then
- name=format("u%06X",unicode)
- end
descriptions[unicode]={
boundingbox=glyph.boundingbox,
name=name,
@@ -7045,7 +7842,6 @@ actions["prepare glyphs"]=function(data,filename,raw)
}
local altuni=glyph.altuni
if altuni then
- local d
for i=1,#altuni do
local a=altuni[i]
local u=a.unicode
@@ -7058,15 +7854,8 @@ actions["prepare glyphs"]=function(data,filename,raw)
vv={ [u]=unicode }
variants[v]=vv
end
- elseif d then
- d[#d+1]=u
- else
- d={ u }
end
end
- if d then
- duplicates[unicode]=d
- end
end
else
report_otf("potential problem: glyph %U is used but empty",index)
@@ -7084,47 +7873,45 @@ actions["check encoding"]=function(data,filename,raw)
local duplicates=resources.duplicates
local mapdata=raw.map or {}
local unicodetoindex=mapdata and mapdata.map or {}
+ local indextounicode=mapdata and mapdata.backmap or {}
local encname=lower(data.enc_name or mapdata.enc_name or "")
- local criterium=0xFFFF
+ local criterium=0xFFFF
+ local privateoffset=constructors.privateoffset
if find(encname,"unicode") then
if trace_loading then
report_otf("checking embedded unicode map %a",encname)
end
- for unicode,index in next,unicodetoindex do
- if unicode<=criterium and not descriptions[unicode] then
- local parent=indices[index]
- if not parent then
- report_otf("weird, unicode %U points to nowhere with index %H",unicode,index)
+ local reported={}
+ for maybeunicode,index in next,unicodetoindex do
+ if descriptions[maybeunicode] then
+ else
+ local unicode=indices[index]
+ if not unicode then
+ elseif maybeunicode==unicode then
+ elseif unicode>privateoffset then
else
- local parentdescription=descriptions[parent]
- if parentdescription then
- local altuni=parentdescription.altuni
- if not altuni then
- altuni={ { unicode=unicode } }
- parentdescription.altuni=altuni
- duplicates[parent]={ unicode }
+ local d=descriptions[unicode]
+ if d then
+ local c=d.copies
+ if c then
+ c[maybeunicode]=true
else
- local done=false
- for i=1,#altuni do
- if altuni[i].unicode==unicode then
- done=true
- break
- end
- end
- if not done then
- insert(altuni,{ unicode=unicode })
- insert(duplicates[parent],unicode)
- end
+ d.copies={ [maybeunicode]=true }
end
- if trace_loading then
- report_otf("weird, unicode %U points to nowhere with index %H",unicode,index)
- end
- else
- report_otf("weird, unicode %U points to %U with index %H",unicode,index)
+ elseif index and not reported[index] then
+ report_otf("missing index %i",index)
+ reported[index]=true
end
end
end
end
+ for unicode,data in next,descriptions do
+ local d=data.copies
+ if d then
+ duplicates[unicode]=sortedkeys(d)
+ data.copies=nil
+ end
+ end
elseif properties.cidinfo then
report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname)
else
@@ -7132,6 +7919,7 @@ actions["check encoding"]=function(data,filename,raw)
end
if mapdata then
mapdata.map={}
+ mapdata.backmap={}
end
end
actions["add duplicates"]=function(data,filename,raw)
@@ -7142,28 +7930,37 @@ actions["add duplicates"]=function(data,filename,raw)
local indices=resources.indices
local duplicates=resources.duplicates
for unicode,d in next,duplicates do
- for i=1,#d do
- local u=d[i]
- if not descriptions[u] then
- local description=descriptions[unicode]
- local duplicate=table.copy(description)
- duplicate.comment=format("copy of U+%05X",unicode)
- descriptions[u]=duplicate
- local n=0
- for _,description in next,descriptions do
- if kerns then
+ local nofduplicates=#d
+ if nofduplicates>4 then
+ if trace_loading then
+ report_otf("ignoring excessive duplicates of %U (n=%s)",unicode,nofduplicates)
+ end
+ else
+ for i=1,nofduplicates do
+ local u=d[i]
+ if not descriptions[u] then
+ local description=descriptions[unicode]
+ local n=0
+ for _,description in next,descriptions do
local kerns=description.kerns
- for _,k in next,kerns do
- local ku=k[unicode]
- if ku then
- k[u]=ku
- n=n+1
+ if kerns then
+ for _,k in next,kerns do
+ local ku=k[unicode]
+ if ku then
+ k[u]=ku
+ n=n+1
+ end
end
end
end
- end
- if trace_loading then
- report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
+ if u>0 then
+ local duplicate=table.copy(description)
+ duplicate.comment=format("copy of U+%05X",unicode)
+ descriptions[u]=duplicate
+ if trace_loading then
+ report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
+ end
+ end
end
end
end
@@ -7358,10 +8155,16 @@ actions["reorganize subtables"]=function(data,filename,raw)
report_otf("skipping weird lookup number %s",k)
elseif features then
local f={}
+ local o={}
for i=1,#features do
local df=features[i]
local tag=strip(lower(df.tag))
- local ft=f[tag] if not ft then ft={} f[tag]=ft end
+ local ft=f[tag]
+ if not ft then
+ ft={}
+ f[tag]=ft
+ o[#o+1]=tag
+ end
local dscripts=df.scripts
for i=1,#dscripts do
local d=dscripts[i]
@@ -7381,6 +8184,7 @@ actions["reorganize subtables"]=function(data,filename,raw)
subtables=subtables,
markclass=markclass,
features=f,
+ order=o,
}
else
lookups[name]={
@@ -7433,9 +8237,14 @@ local function t_hashed(t,cache)
local ti=t[i]
local tih=cache[ti]
if not tih then
- tih={}
- for i=1,#ti do
- tih[ti[i]]=true
+ local tn=#ti
+ if tn==1 then
+ tih={ [ti[1]]=true }
+ else
+ tih={}
+ for i=1,tn do
+ tih[ti[i]]=true
+ end
end
cache[ti]=tih
end
@@ -7448,12 +8257,17 @@ local function t_hashed(t,cache)
end
local function s_hashed(t,cache)
if t then
- local ht={}
local tf=t[1]
- for i=1,#tf do
- ht[i]={ [tf[i]]=true }
+ local nf=#tf
+ if nf==1 then
+ return { [tf[1]]=true }
+ else
+ local ht={}
+ for i=1,nf do
+ ht[i]={ [tf[i]]=true }
+ end
+ return ht
end
- return ht
else
return nil
end
@@ -7603,12 +8417,12 @@ actions["reorganize lookups"]=function(data,filename,raw)
local fore=glyphs.fore
if fore and fore~="" then
fore=s_uncover(splitter,s_u_cache,fore)
- rule.before=s_hashed(fore,s_h_cache)
+ rule.after=s_hashed(fore,s_h_cache)
end
local back=glyphs.back
if back then
back=s_uncover(splitter,s_u_cache,back)
- rule.after=s_hashed(back,s_h_cache)
+ rule.before=s_hashed(back,s_h_cache)
end
local names=glyphs.names
if names then
@@ -7616,6 +8430,14 @@ actions["reorganize lookups"]=function(data,filename,raw)
rule.current=s_hashed(names,s_h_cache)
end
rule.glyphs=nil
+ local lookups=rule.lookups
+ if lookups then
+ for i=1,#names do
+ if not lookups[i] then
+ lookups[i]=""
+ end
+ end
+ end
end
end
end
@@ -7632,7 +8454,9 @@ local function check_variants(unicode,the_variants,splitter,unicodes)
for i=1,#glyphs do
local g=glyphs[i]
if done[g] then
- report_otf("skipping cyclic reference %U in math variant %U",g,unicode)
+ if i>1 then
+ report_otf("skipping cyclic reference %U in math variant %U",g,unicode)
+ end
else
if n==0 then
n=1
@@ -7871,6 +8695,10 @@ actions["check glyphs"]=function(data,filename,raw)
description.glyph=nil
end
end
+local valid=(R("\x00\x7E")-S("(){}[]<>%/ \n\r\f\v"))^0*P(-1)
+local function valid_ps_name(str)
+ return str and str~="" and #str<64 and lpegmatch(valid,str) and true or false
+end
actions["check metadata"]=function(data,filename,raw)
local metadata=data.metadata
for _,k in next,mainfields do
@@ -7887,10 +8715,51 @@ actions["check metadata"]=function(data,filename,raw)
ttftables[i].data="deleted"
end
end
+ if metadata.validation_state and table.contains(metadata.validation_state,"bad_ps_fontname") then
+ local function valid(what)
+ local names=raw.names
+ for i=1,#names do
+ local list=names[i]
+ local names=list.names
+ if names then
+ local name=names[what]
+ if name and valid_ps_name(name) then
+ return name
+ end
+ end
+ end
+ end
+ local function check(what)
+ local oldname=metadata[what]
+ if valid_ps_name(oldname) then
+ report_otf("ignoring warning %a because %s %a is proper ASCII","bad_ps_fontname",what,oldname)
+ else
+ local newname=valid(what)
+ if not newname then
+ newname=formatters["bad-%s-%s"](what,file.nameonly(filename))
+ end
+ local warning=formatters["overloading %s from invalid ASCII name %a to %a"](what,oldname,newname)
+ data.warnings[#data.warnings+1]=warning
+ report_otf(warning)
+ metadata[what]=newname
+ end
+ end
+ check("fontname")
+ check("fullname")
+ end
end
actions["cleanup tables"]=function(data,filename,raw)
+ local duplicates=data.resources.duplicates
+ if duplicates then
+ for k,v in next,duplicates do
+ if #v==1 then
+ duplicates[k]=v[1]
+ end
+ end
+ end
data.resources.indices=nil
- data.helpers=nil
+ data.resources.unicodes=nil
+ data.helpers=nil
end
actions["reorganize glyph lookups"]=function(data,filename,raw)
local resources=data.resources
@@ -7972,6 +8841,7 @@ actions["reorganize glyph lookups"]=function(data,filename,raw)
end
end
end
+local zero={ 0,0 }
actions["reorganize glyph anchors"]=function(data,filename,raw)
local descriptions=data.descriptions
for unicode,description in next,descriptions do
@@ -7980,14 +8850,32 @@ actions["reorganize glyph anchors"]=function(data,filename,raw)
for class,data in next,anchors do
if class=="baselig" then
for tag,specification in next,data do
- for i=1,#specification do
- local si=specification[i]
- specification[i]={ si.x or 0,si.y or 0 }
+ local n=0
+ for k,v in next,specification do
+ if k>n then
+ n=k
+ end
+ local x,y=v.x,v.y
+ if x or y then
+ specification[k]={ x or 0,y or 0 }
+ else
+ specification[k]=zero
+ end
end
+ local t={}
+ for i=1,n do
+ t[i]=specification[i] or zero
+ end
+ data[tag]=t
end
else
for tag,specification in next,data do
- data[tag]={ specification.x or 0,specification.y or 0 }
+ local x,y=specification.x,specification.y
+ if x or y then
+ data[tag]={ x or 0,y or 0 }
+ else
+ data[tag]=zero
+ end
end
end
end
@@ -7995,6 +8883,142 @@ actions["reorganize glyph anchors"]=function(data,filename,raw)
end
end
end
+local bogusname=(P("uni")+P("u"))*R("AF","09")^4+(P("index")+P("glyph")+S("Ii")*P("dentity")*P(".")^0)*R("09")^1
+local uselessname=(1-bogusname)^0*bogusname
+actions["purge names"]=function(data,filename,raw)
+ if purge_names then
+ local n=0
+ for u,d in next,data.descriptions do
+ if lpegmatch(uselessname,d.name) then
+ n=n+1
+ d.name=nil
+ end
+ end
+ if n>0 then
+ report_otf("%s bogus names removed",n)
+ end
+ end
+end
+actions["compact lookups"]=function(data,filename,raw)
+ if not compact_lookups then
+ report_otf("not compacting")
+ return
+ end
+ local last=0
+ local tags=table.setmetatableindex({},
+ function(t,k)
+ last=last+1
+ t[k]=last
+ return last
+ end
+ )
+ local descriptions=data.descriptions
+ local resources=data.resources
+ for u,d in next,descriptions do
+ local slookups=d.slookups
+ if type(slookups)=="table" then
+ local s={}
+ for k,v in next,slookups do
+ s[tags[k]]=v
+ end
+ d.slookups=s
+ end
+ local mlookups=d.mlookups
+ if type(mlookups)=="table" then
+ local m={}
+ for k,v in next,mlookups do
+ m[tags[k]]=v
+ end
+ d.mlookups=m
+ end
+ local kerns=d.kerns
+ if type(kerns)=="table" then
+ local t={}
+ for k,v in next,kerns do
+ t[tags[k]]=v
+ end
+ d.kerns=t
+ end
+ end
+ local lookups=data.lookups
+ if lookups then
+ local l={}
+ for k,v in next,lookups do
+ local rules=v.rules
+ if rules then
+ for i=1,#rules do
+ local l=rules[i].lookups
+ if type(l)=="table" then
+ for i=1,#l do
+ l[i]=tags[l[i]]
+ end
+ end
+ end
+ end
+ l[tags[k]]=v
+ end
+ data.lookups=l
+ end
+ local lookups=resources.lookups
+ if lookups then
+ local l={}
+ for k,v in next,lookups do
+ local s=v.subtables
+ if type(s)=="table" then
+ for i=1,#s do
+ s[i]=tags[s[i]]
+ end
+ end
+ l[tags[k]]=v
+ end
+ resources.lookups=l
+ end
+ local sequences=resources.sequences
+ if sequences then
+ for i=1,#sequences do
+ local s=sequences[i]
+ local n=s.name
+ if n then
+ s.name=tags[n]
+ end
+ local t=s.subtables
+ if type(t)=="table" then
+ for i=1,#t do
+ t[i]=tags[t[i]]
+ end
+ end
+ end
+ end
+ local lookuptypes=resources.lookuptypes
+ if lookuptypes then
+ local l={}
+ for k,v in next,lookuptypes do
+ l[tags[k]]=v
+ end
+ resources.lookuptypes=l
+ end
+ local anchor_to_lookup=resources.anchor_to_lookup
+ if anchor_to_lookup then
+ for anchor,lookups in next,anchor_to_lookup do
+ local l={}
+ for lookup,value in next,lookups do
+ l[tags[lookup]]=value
+ end
+ anchor_to_lookup[anchor]=l
+ end
+ end
+ local lookup_to_anchor=resources.lookup_to_anchor
+ if lookup_to_anchor then
+ local l={}
+ for lookup,value in next,lookup_to_anchor do
+ l[tags[lookup]]=value
+ end
+ resources.lookup_to_anchor=l
+ end
+ tags=table.swapped(tags)
+ report_otf("%s lookup tags compacted",#tags)
+ resources.lookuptags=tags
+end
function otf.setfeatures(tfmdata,features)
local okay=constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf)
if okay then
@@ -8006,6 +9030,7 @@ end
local function copytotfm(data,cache_id)
if data then
local metadata=data.metadata
+ local warnings=data.warnings
local resources=data.resources
local properties=derivetable(data.properties)
local descriptions=derivetable(data.descriptions)
@@ -8080,6 +9105,7 @@ local function copytotfm(data,cache_id)
local filename=constructors.checkedfilename(resources)
local fontname=metadata.fontname
local fullname=metadata.fullname or fontname
+ local psname=fontname or fullname
local units=metadata.units_per_em or 1000
if units==0 then
units=1000
@@ -8094,8 +9120,8 @@ local function copytotfm(data,cache_id)
parameters.italicangle=italicangle
parameters.charwidth=charwidth
parameters.charxheight=charxheight
- local space=0x0020
- local emdash=0x2014
+ local space=0x0020
+ local emdash=0x2014
if monospaced then
if descriptions[space] then
spaceunits,spacer=descriptions[space].width,"space"
@@ -8142,7 +9168,7 @@ local function copytotfm(data,cache_id)
if charxheight then
parameters.x_height=charxheight
else
- local x=0x78
+ local x=0x0078
if x then
local x=descriptions[x]
if x then
@@ -8161,8 +9187,16 @@ local function copytotfm(data,cache_id)
properties.filename=filename
properties.fontname=fontname
properties.fullname=fullname
- properties.psname=fontname or fullname
+ properties.psname=psname
properties.name=filename or fullname
+ if warnings and #warnings>0 then
+ report_otf("warnings for font: %s",filename)
+ report_otf()
+ for i=1,#warnings do
+ report_otf(" %s",warnings[i])
+ end
+ report_otf()
+ end
return {
characters=characters,
descriptions=descriptions,
@@ -8171,6 +9205,7 @@ local function copytotfm(data,cache_id)
resources=resources,
properties=properties,
goodies=goodies,
+ warnings=warnings,
}
end
end
@@ -8184,6 +9219,33 @@ local function otftotfm(specification)
local features=specification.features.normal
local rawdata=otf.load(filename,sub,features and features.featurefile)
if rawdata and next(rawdata) then
+ local descriptions=rawdata.descriptions
+ local duplicates=rawdata.resources.duplicates
+ if duplicates then
+ local nofduplicates,nofduplicated=0,0
+ for parent,list in next,duplicates do
+ if type(list)=="table" then
+ local n=#list
+ for i=1,n do
+ local unicode=list[i]
+ if not descriptions[unicode] then
+ descriptions[unicode]=descriptions[parent]
+ nofduplicated=nofduplicated+1
+ end
+ end
+ nofduplicates=nofduplicates+n
+ else
+ if not descriptions[list] then
+ descriptions[list]=descriptions[parent]
+ nofduplicated=nofduplicated+1
+ end
+ nofduplicates=nofduplicates+1
+ end
+ end
+ if trace_otf and nofduplicated~=nofduplicates then
+ report_otf("%i extra duplicates copied out of %i",nofduplicated,nofduplicates)
+ end
+ end
rawdata.lookuphash={}
tfmdata=copytotfm(rawdata,cache_id)
if tfmdata and next(tfmdata) then
@@ -8309,7 +9371,7 @@ if not modules then modules={} end modules ['font-otb']={
}
local concat=table.concat
local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
-local type,next,tonumber,tostring=type,next,tonumber,tostring
+local type,next,tonumber,tostring,rawget=type,next,tonumber,tostring,rawget
local lpegmatch=lpeg.match
local utfchar=utf.char
local trace_baseinit=false trackers.register("otf.baseinit",function(v) trace_baseinit=v end)
@@ -8341,13 +9403,14 @@ local function gref(descriptions,n)
return f_unicode(n)
end
elseif n then
- local num,nam={},{}
- for i=2,#n do
+ local num,nam,j={},{},0
+ for i=1,#n do
local ni=n[i]
if tonumber(ni) then
+ j=j+1
local di=descriptions[ni]
- num[i]=f_unicode(ni)
- nam[i]=di and di.name or "-"
+ num[j]=f_unicode(ni)
+ nam[j]=di and di.name or "-"
end
end
return f_unilist(num,nam)
@@ -8355,36 +9418,36 @@ local function gref(descriptions,n)
return "<error in base mode tracing>"
end
end
-local function cref(feature,lookupname)
+local function cref(feature,lookuptags,lookupname)
if lookupname then
- return formatters["feature %a, lookup %a"](feature,lookupname)
+ return formatters["feature %a, lookup %a"](feature,lookuptags[lookupname])
else
return formatters["feature %a"](feature)
end
end
-local function report_alternate(feature,lookupname,descriptions,unicode,replacement,value,comment)
+local function report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,comment)
report_prepare("%s: base alternate %s => %s (%S => %S)",
- cref(feature,lookupname),
+ cref(feature,lookuptags,lookupname),
gref(descriptions,unicode),
replacement and gref(descriptions,replacement),
value,
comment)
end
-local function report_substitution(feature,lookupname,descriptions,unicode,substitution)
+local function report_substitution(feature,lookuptags,lookupname,descriptions,unicode,substitution)
report_prepare("%s: base substitution %s => %S",
- cref(feature,lookupname),
+ cref(feature,lookuptags,lookupname),
gref(descriptions,unicode),
gref(descriptions,substitution))
end
-local function report_ligature(feature,lookupname,descriptions,unicode,ligature)
+local function report_ligature(feature,lookuptags,lookupname,descriptions,unicode,ligature)
report_prepare("%s: base ligature %s => %S",
- cref(feature,lookupname),
+ cref(feature,lookuptags,lookupname),
gref(descriptions,ligature),
gref(descriptions,unicode))
end
-local function report_kern(feature,lookupname,descriptions,unicode,otherunicode,value)
+local function report_kern(feature,lookuptags,lookupname,descriptions,unicode,otherunicode,value)
report_prepare("%s: base kern %s + %s => %S",
- cref(feature,lookupname),
+ cref(feature,lookuptags,lookupname),
gref(descriptions,unicode),
gref(descriptions,otherunicode),
value)
@@ -8421,7 +9484,7 @@ local function finalize_ligatures(tfmdata,ligatures)
local characters=tfmdata.characters
local descriptions=tfmdata.descriptions
local resources=tfmdata.resources
- local unicodes=resources.unicodes
+ local unicodes=resources.unicodes
local private=resources.private
local alldone=false
while not alldone do
@@ -8430,8 +9493,8 @@ local function finalize_ligatures(tfmdata,ligatures)
local ligature=ligatures[i]
if ligature then
local unicode,lookupdata=ligature[1],ligature[2]
- if trace then
- trace_ligatures_detail("building % a into %a",lookupdata,unicode)
+ if trace_ligatures_detail then
+ report_prepare("building % a into %a",lookupdata,unicode)
end
local size=#lookupdata
local firstcode=lookupdata[1]
@@ -8443,8 +9506,8 @@ local function finalize_ligatures(tfmdata,ligatures)
local firstdata=characters[firstcode]
if not firstdata then
firstcode=private
- if trace then
- trace_ligatures_detail("defining %a as %a",firstname,firstcode)
+ if trace_ligatures_detail then
+ report_prepare("defining %a as %a",firstname,firstcode)
end
unicodes[firstname]=firstcode
firstdata={ intermediate=true,ligatures={} }
@@ -8457,18 +9520,18 @@ local function finalize_ligatures(tfmdata,ligatures)
local secondname=firstname.."_"..secondcode
if i==size-1 then
target=unicode
- if not unicodes[secondname] then
+ if not rawget(unicodes,secondname) then
unicodes[secondname]=unicode
end
okay=true
else
- target=unicodes[secondname]
+ target=rawget(unicodes,secondname)
if not target then
break
end
end
- if trace then
- trace_ligatures_detail("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target)
+ if trace_ligatures_detail then
+ report_prepare("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target)
end
local firstligs=firstdata.ligatures
if firstligs then
@@ -8479,6 +9542,8 @@ local function finalize_ligatures(tfmdata,ligatures)
firstcode=target
firstname=secondname
end
+ elseif trace_ligatures_detail then
+ report_prepare("no glyph (%a,%a) for building %a",firstname,firstcode,target)
end
if okay then
ligatures[i]=false
@@ -8488,62 +9553,66 @@ local function finalize_ligatures(tfmdata,ligatures)
end
alldone=done==0
end
- if trace then
- for k,v in next,characters do
- if v.ligatures then table.print(v,k) end
+ if trace_ligatures_detail then
+ for k,v in table.sortedhash(characters) do
+ if v.ligatures then
+ table.print(v,k)
+ end
end
end
- tfmdata.resources.private=private
+ resources.private=private
+ return true
end
end
local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
local characters=tfmdata.characters
local descriptions=tfmdata.descriptions
local resources=tfmdata.resources
+ local properties=tfmdata.properties
local changed=tfmdata.changed
- local unicodes=resources.unicodes
local lookuphash=resources.lookuphash
local lookuptypes=resources.lookuptypes
+ local lookuptags=resources.lookuptags
local ligatures={}
- local alternate=tonumber(value)
+ local alternate=tonumber(value) or true and 1
local defaultalt=otf.defaultbasealternate
local trace_singles=trace_baseinit and trace_singles
local trace_alternatives=trace_baseinit and trace_alternatives
local trace_ligatures=trace_baseinit and trace_ligatures
local actions={
- substitution=function(lookupdata,lookupname,description,unicode)
+ substitution=function(lookupdata,lookuptags,lookupname,description,unicode)
if trace_singles then
- report_substitution(feature,lookupname,descriptions,unicode,lookupdata)
+ report_substitution(feature,lookuptags,lookupname,descriptions,unicode,lookupdata)
end
changed[unicode]=lookupdata
end,
- alternate=function(lookupdata,lookupname,description,unicode)
+ alternate=function(lookupdata,lookuptags,lookupname,description,unicode)
local replacement=lookupdata[alternate]
if replacement then
changed[unicode]=replacement
if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal")
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal")
end
elseif defaultalt=="first" then
replacement=lookupdata[1]
changed[unicode]=replacement
if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
end
elseif defaultalt=="last" then
replacement=lookupdata[#data]
if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
end
else
if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown")
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown")
end
end
end,
- ligature=function(lookupdata,lookupname,description,unicode)
+ ligature=function(lookupdata,lookuptags,lookupname,description,unicode)
if trace_ligatures then
- report_ligature(feature,lookupname,descriptions,unicode,lookupdata)
+ report_ligature(feature,lookuptags,lookupname,descriptions,unicode,lookupdata)
end
ligatures[#ligatures+1]={ unicode,lookupdata }
end,
@@ -8559,7 +9628,7 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis
local lookuptype=lookuptypes[lookupname]
local action=actions[lookuptype]
if action then
- action(lookupdata,lookupname,description,unicode)
+ action(lookupdata,lookuptags,lookupname,description,unicode)
end
end
end
@@ -8574,22 +9643,24 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis
local action=actions[lookuptype]
if action then
for i=1,#lookuplist do
- action(lookuplist[i],lookupname,description,unicode)
+ action(lookuplist[i],lookuptags,lookupname,description,unicode)
end
end
end
end
end
end
- finalize_ligatures(tfmdata,ligatures)
+ properties.hasligatures=finalize_ligatures(tfmdata,ligatures)
end
local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
local characters=tfmdata.characters
local descriptions=tfmdata.descriptions
local resources=tfmdata.resources
- local unicodes=resources.unicodes
+ local properties=tfmdata.properties
+ local lookuptags=resources.lookuptags
local sharedkerns={}
local traceindeed=trace_baseinit and trace_kerns
+ local haskerns=false
for unicode,character in next,characters do
local description=descriptions[unicode]
local rawkerns=description.kerns
@@ -8611,13 +9682,13 @@ local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist
newkerns={ [otherunicode]=value }
done=true
if traceindeed then
- report_kern(feature,lookup,descriptions,unicode,otherunicode,value)
+ report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value)
end
elseif not newkerns[otherunicode] then
newkerns[otherunicode]=value
done=true
if traceindeed then
- report_kern(feature,lookup,descriptions,unicode,otherunicode,value)
+ report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value)
end
end
end
@@ -8626,12 +9697,14 @@ local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist
if done then
sharedkerns[rawkerns]=newkerns
character.kerns=newkerns
+ haskerns=true
else
sharedkerns[rawkerns]=false
end
end
end
end
+ properties.haskerns=haskerns
end
basemethods.independent={
preparesubstitutions=preparesubstitutions,
@@ -8657,13 +9730,13 @@ local function make_1(present,tree,name)
end
end
end
-local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookupname)
+local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookuptags,lookupname)
for k,v in next,tree do
if k=="ligature" then
local character=characters[preceding]
if not character then
if trace_baseinit then
- report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookupname,v,preceding)
+ report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookuptags[lookupname],v,preceding)
end
character=makefake(tfmdata,name,present)
end
@@ -8684,7 +9757,7 @@ local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,don
else
local code=present[name] or unicode
local name=name.."_"..k
- make_2(present,tfmdata,characters,v,name,code,k,done,lookupname)
+ make_2(present,tfmdata,characters,v,name,code,k,done,lookuptags,lookupname)
end
end
end
@@ -8695,8 +9768,9 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis
local changed=tfmdata.changed
local lookuphash=resources.lookuphash
local lookuptypes=resources.lookuptypes
+ local lookuptags=resources.lookuptags
local ligatures={}
- local alternate=tonumber(value)
+ local alternate=tonumber(value) or true and 1
local defaultalt=otf.defaultbasealternate
local trace_singles=trace_baseinit and trace_singles
local trace_alternatives=trace_baseinit and trace_alternatives
@@ -8708,7 +9782,7 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis
for unicode,data in next,lookupdata do
if lookuptype=="substitution" then
if trace_singles then
- report_substitution(feature,lookupname,descriptions,unicode,data)
+ report_substitution(feature,lookuptags,lookupname,descriptions,unicode,data)
end
changed[unicode]=data
elseif lookuptype=="alternate" then
@@ -8716,28 +9790,28 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis
if replacement then
changed[unicode]=replacement
if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal")
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal")
end
elseif defaultalt=="first" then
replacement=data[1]
changed[unicode]=replacement
if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
end
elseif defaultalt=="last" then
replacement=data[#data]
if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
end
else
if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown")
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown")
end
end
elseif lookuptype=="ligature" then
ligatures[#ligatures+1]={ unicode,data,lookupname }
if trace_ligatures then
- report_ligature(feature,lookupname,descriptions,unicode,data)
+ report_ligature(feature,lookuptags,lookupname,descriptions,unicode,data)
end
end
end
@@ -8755,7 +9829,7 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis
for i=1,nofligatures do
local ligature=ligatures[i]
local unicode,tree,lookupname=ligature[1],ligature[2],ligature[3]
- make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookupname)
+ make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookuptags,lookupname)
end
end
end
@@ -8763,7 +9837,9 @@ local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist
local characters=tfmdata.characters
local descriptions=tfmdata.descriptions
local resources=tfmdata.resources
+ local properties=tfmdata.properties
local lookuphash=resources.lookuphash
+ local lookuptags=resources.lookuptags
local traceindeed=trace_baseinit and trace_kerns
for l=1,#lookuplist do
local lookupname=lookuplist[l]
@@ -8779,7 +9855,7 @@ local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist
for otherunicode,kern in next,data do
if not kerns[otherunicode] and kern~=0 then
kerns[otherunicode]=kern
- report_kern(feature,lookup,descriptions,unicode,otherunicode,kern)
+ report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,kern)
end
end
else
@@ -8803,8 +9879,9 @@ basemethods.shared={
basemethod="independent"
local function featuresinitializer(tfmdata,value)
if true then
- local t=trace_preparing and os.clock()
+ local starttime=trace_preparing and os.clock()
local features=tfmdata.shared.features
+ local fullname=tfmdata.properties.fullname or "?"
if features then
applybasemethod("initializehashes",tfmdata)
local collectlookups=otf.collectlookups
@@ -8814,26 +9891,35 @@ local function featuresinitializer(tfmdata,value)
local language=properties.language
local basesubstitutions=rawdata.resources.features.gsub
local basepositionings=rawdata.resources.features.gpos
- if basesubstitutions then
- for feature,data in next,basesubstitutions do
- local value=features[feature]
- if value then
- local validlookups,lookuplist=collectlookups(rawdata,feature,script,language)
- if validlookups then
- applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
- registerbasefeature(feature,value)
- end
- end
- end
- end
- if basepositionings then
- for feature,data in next,basepositionings do
- local value=features[feature]
- if value then
- local validlookups,lookuplist=collectlookups(rawdata,feature,script,language)
- if validlookups then
- applybasemethod("preparepositionings",tfmdata,feature,features[feature],validlookups,lookuplist)
- registerbasefeature(feature,value)
+ if basesubstitutions or basepositionings then
+ local sequences=tfmdata.resources.sequences
+ for s=1,#sequences do
+ local sequence=sequences[s]
+ local sfeatures=sequence.features
+ if sfeatures then
+ local order=sequence.order
+ if order then
+ for i=1,#order do
+ local feature=order[i]
+ local value=features[feature]
+ if value then
+ local validlookups,lookuplist=collectlookups(rawdata,feature,script,language)
+ if not validlookups then
+ elseif basesubstitutions and basesubstitutions[feature] then
+ if trace_preparing then
+ report_prepare("filtering base %s feature %a for %a with value %a","sub",feature,fullname,value)
+ end
+ applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ elseif basepositionings and basepositionings[feature] then
+ if trace_preparing then
+ report_prepare("filtering base %a feature %a for %a with value %a","pos",feature,fullname,value)
+ end
+ applybasemethod("preparepositionings",tfmdata,feature,value,validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ end
+ end
+ end
end
end
end
@@ -8841,7 +9927,7 @@ local function featuresinitializer(tfmdata,value)
registerbasehash(tfmdata)
end
if trace_preparing then
- report_prepare("preparation time is %0.3f seconds for %a",os.clock()-t,tfmdata.properties.fullname)
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,fullname)
end
end
end
@@ -8863,17 +9949,19 @@ end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules={} end modules ['node-inj']={
+if not modules then modules={} end modules ['font-inj']={
version=1.001,
- comment="companion to node-ini.mkiv",
+ comment="companion to font-lib.mkiv",
author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files",
}
-local next=next
+if not nodes.properties then return end
+local next,rawget=next,rawget
local utfchar=utf.char
-local trace_injections=false trackers.register("nodes.injections",function(v) trace_injections=v end)
-local report_injections=logs.reporter("nodes","injections")
+local fastcopy=table.fastcopy
+local trace_injections=false trackers.register("fonts.injections",function(v) trace_injections=v end)
+local report_injections=logs.reporter("fonts","injections")
local attributes,nodes,node=attributes,nodes,node
fonts=fonts
local fontdata=fonts.hashes.identifiers
@@ -8889,140 +9977,339 @@ local newkern=nodepool.kern
local tonode=nuts.tonode
local tonut=nuts.tonut
local getfield=nuts.getfield
+local setfield=nuts.setfield
local getnext=nuts.getnext
local getprev=nuts.getprev
local getid=nuts.getid
-local getattr=nuts.getattr
local getfont=nuts.getfont
local getsubtype=nuts.getsubtype
local getchar=nuts.getchar
-local setfield=nuts.setfield
-local setattr=nuts.setattr
local traverse_id=nuts.traverse_id
local insert_node_before=nuts.insert_before
local insert_node_after=nuts.insert_after
-local a_kernpair=attributes.private('kernpair')
-local a_ligacomp=attributes.private('ligacomp')
-local a_markbase=attributes.private('markbase')
-local a_markmark=attributes.private('markmark')
-local a_markdone=attributes.private('markdone')
-local a_cursbase=attributes.private('cursbase')
-local a_curscurs=attributes.private('curscurs')
-local a_cursdone=attributes.private('cursdone')
+local find_tail=nuts.tail
+local properties=nodes.properties.data
function injections.installnewkern(nk)
newkern=nk or newkern
end
-local cursives={}
-local marks={}
-local kerns={}
-function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
- local dx,dy=factor*(exit[1]-entry[1]),factor*(exit[2]-entry[2])
- local ws,wn=tfmstart.width,tfmnext.width
- local bound=#cursives+1
- setattr(start,a_cursbase,bound)
- setattr(nxt,a_curscurs,bound)
- cursives[bound]={ rlmode,dx,dy,ws,wn }
- return dx,dy,bound
+local nofregisteredkerns=0
+local nofregisteredpairs=0
+local nofregisteredmarks=0
+local nofregisteredcursives=0
+local keepregisteredcounts=false
+function injections.keepcounts()
+ keepregisteredcounts=true
+end
+function injections.resetcounts()
+ nofregisteredkerns=0
+ nofregisteredpairs=0
+ nofregisteredmarks=0
+ nofregisteredcursives=0
+ keepregisteredcounts=false
+end
+function injections.reset(n)
+ local p=rawget(properties,n)
+ if p and rawget(p,"injections") then
+ p.injections=nil
+ end
+end
+function injections.copy(target,source)
+ local sp=rawget(properties,source)
+ if sp then
+ local tp=rawget(properties,target)
+ local si=rawget(sp,"injections")
+ if si then
+ si=fastcopy(si)
+ if tp then
+ tp.injections=si
+ else
+ propertydata[target]={
+ injections=si,
+ }
+ end
+ else
+ if tp then
+ tp.injections=nil
+ end
+ end
+ end
end
-function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
- local x,y,w,h=factor*spec[1],factor*spec[2],factor*spec[3],factor*spec[4]
- if x~=0 or w~=0 or y~=0 or h~=0 then
- local bound=getattr(current,a_kernpair)
- if bound then
- local kb=kerns[bound]
- kb[2],kb[3],kb[4],kb[5]=(kb[2] or 0)+x,(kb[3] or 0)+y,(kb[4] or 0)+w,(kb[5] or 0)+h
+function injections.setligaindex(n,index)
+ local p=rawget(properties,n)
+ if p then
+ local i=rawget(p,"injections")
+ if i then
+ i.ligaindex=index
else
- bound=#kerns+1
- setattr(current,a_kernpair,bound)
- kerns[bound]={ rlmode,x,y,w,h,r2lflag,tfmchr.width }
+ p.injections={
+ ligaindex=index
+ }
end
- return x,y,w,h,bound
+ else
+ properties[n]={
+ injections={
+ ligaindex=index
+ }
+ }
+ end
+end
+function injections.getligaindex(n,default)
+ local p=rawget(properties,n)
+ if p then
+ local i=rawget(p,"injections")
+ if i then
+ return i.ligaindex or default
+ end
+ end
+ return default
+end
+function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
+ local dx=factor*(exit[1]-entry[1])
+ local dy=-factor*(exit[2]-entry[2])
+ local ws,wn=tfmstart.width,tfmnext.width
+ nofregisteredcursives=nofregisteredcursives+1
+ if rlmode<0 then
+ dx=-(dx+wn)
+ else
+ dx=dx-ws
+ end
+ local p=rawget(properties,start)
+ if p then
+ local i=rawget(p,"injections")
+ if i then
+ i.cursiveanchor=true
+ else
+ p.injections={
+ cursiveanchor=true,
+ }
+ end
+ else
+ properties[start]={
+ injections={
+ cursiveanchor=true,
+ },
+ }
+ end
+ local p=rawget(properties,nxt)
+ if p then
+ local i=rawget(p,"injections")
+ if i then
+ i.cursivex=dx
+ i.cursivey=dy
+ else
+ p.injections={
+ cursivex=dx,
+ cursivey=dy,
+ }
+ end
+ else
+ properties[nxt]={
+ injections={
+ cursivex=dx,
+ cursivey=dy,
+ },
+ }
+ end
+ return dx,dy,nofregisteredcursives
+end
+function injections.setpair(current,factor,rlmode,r2lflag,spec,injection)
+ local x,y,w,h=factor*spec[1],factor*spec[2],factor*spec[3],factor*spec[4]
+ if x~=0 or w~=0 or y~=0 or h~=0 then
+ local yoffset=y-h
+ local leftkern=x
+ local rightkern=w-x
+ if leftkern~=0 or rightkern~=0 or yoffset~=0 then
+ nofregisteredpairs=nofregisteredpairs+1
+ if rlmode and rlmode<0 then
+ leftkern,rightkern=rightkern,leftkern
+ end
+ local p=rawget(properties,current)
+ if p then
+ local i=rawget(p,"injections")
+ if i then
+ if leftkern~=0 then
+ i.leftkern=(i.leftkern or 0)+leftkern
+ end
+ if rightkern~=0 then
+ i.rightkern=(i.rightkern or 0)+rightkern
+ end
+ if yoffset~=0 then
+ i.yoffset=(i.yoffset or 0)+yoffset
+ end
+ elseif leftkern~=0 or rightkern~=0 then
+ p.injections={
+ leftkern=leftkern,
+ rightkern=rightkern,
+ yoffset=yoffset,
+ }
+ else
+ p.injections={
+ yoffset=yoffset,
+ }
+ end
+ elseif leftkern~=0 or rightkern~=0 then
+ properties[current]={
+ injections={
+ leftkern=leftkern,
+ rightkern=rightkern,
+ yoffset=yoffset,
+ },
+ }
+ else
+ properties[current]={
+ injections={
+ yoffset=yoffset,
+ },
+ }
+ end
+ return x,y,w,h,nofregisteredpairs
+ end
end
return x,y,w,h
end
-function injections.setkern(current,factor,rlmode,x,tfmchr)
+function injections.setkern(current,factor,rlmode,x,injection)
local dx=factor*x
if dx~=0 then
- local bound=#kerns+1
- setattr(current,a_kernpair,bound)
- kerns[bound]={ rlmode,dx }
- return dx,bound
+ nofregisteredkerns=nofregisteredkerns+1
+ local p=rawget(properties,current)
+ if not injection then
+ injection="injections"
+ end
+ if p then
+ local i=rawget(p,injection)
+ if i then
+ i.leftkern=dx+(i.leftkern or 0)
+ else
+ p[injection]={
+ leftkern=dx,
+ }
+ end
+ else
+ properties[current]={
+ [injection]={
+ leftkern=dx,
+ },
+ }
+ end
+ return dx,nofregisteredkerns
else
return 0,0
end
end
-function injections.setmark(start,base,factor,rlmode,ba,ma,index,baseismark)
- local dx,dy=factor*(ba[1]-ma[1]),factor*(ba[2]-ma[2])
- local bound=getattr(base,a_markbase)
- local index=1
- if bound then
- local mb=marks[bound]
- if mb then
- index=#mb+1
- mb[index]={ dx,dy,rlmode }
- setattr(start,a_markmark,bound)
- setattr(start,a_markdone,index)
- return dx,dy,bound
+function injections.setmark(start,base,factor,rlmode,ba,ma,tfmbase)
+ local dx,dy=factor*(ba[1]-ma[1]),factor*(ba[2]-ma[2])
+ nofregisteredmarks=nofregisteredmarks+1
+ if rlmode>=0 then
+ dx=tfmbase.width-dx
+ end
+ local p=rawget(properties,start)
+ if p then
+ local i=rawget(p,"injections")
+ if i then
+ i.markx=dx
+ i.marky=dy
+ i.markdir=rlmode or 0
+ i.markbase=nofregisteredmarks
+ i.markbasenode=base
else
- report_injections("possible problem, %U is base mark without data (id %a)",getchar(base),bound)
+ p.injections={
+ markx=dx,
+ marky=dy,
+ markdir=rlmode or 0,
+ markbase=nofregisteredmarks,
+ markbasenode=base,
+ }
end
+ else
+ properties[start]={
+ injections={
+ markx=dx,
+ marky=dy,
+ markdir=rlmode or 0,
+ markbase=nofregisteredmarks,
+ markbasenode=base,
+ },
+ }
end
- index=index or 1
- bound=#marks+1
- setattr(base,a_markbase,bound)
- setattr(start,a_markmark,bound)
- setattr(start,a_markdone,index)
- marks[bound]={ [index]={ dx,dy,rlmode,baseismark } }
- return dx,dy,bound
+ return dx,dy,nofregisteredmarks
end
local function dir(n)
return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
end
-local function trace(head)
- report_injections("begin run")
- for n in traverse_id(glyph_code,head) do
- if getsubtype(n)<256 then
- local kp=getattr(n,a_kernpair)
- local mb=getattr(n,a_markbase)
- local mm=getattr(n,a_markmark)
- local md=getattr(n,a_markdone)
- local cb=getattr(n,a_cursbase)
- local cc=getattr(n,a_curscurs)
- local char=getchar(n)
- report_injections("font %s, char %U, glyph %c",getfont(n),char,char)
- if kp then
- local k=kerns[kp]
- if k[3] then
- report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5])
- else
- report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2])
- end
- end
- if mb then
- report_injections(" markbase: bound %a",mb)
- end
- if mm then
- local m=marks[mm]
- if mb then
- local m=m[mb]
- if m then
- report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2])
- else
- report_injections(" markmark: bound %a, missing index",mm)
- end
- else
- m=m[1]
- report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2])
- end
+local function showchar(n,nested)
+ local char=getchar(n)
+ report_injections("%wfont %s, char %U, glyph %c",nested and 2 or 0,getfont(n),char,char)
+end
+local function show(n,what,nested,symbol)
+ if n then
+ local p=rawget(properties,n)
+ if p then
+ local i=rawget(p,what)
+ if i then
+ local leftkern=i.leftkern or 0
+ local rightkern=i.rightkern or 0
+ local yoffset=i.yoffset or 0
+ local markx=i.markx or 0
+ local marky=i.marky or 0
+ local markdir=i.markdir or 0
+ local markbase=i.markbase or 0
+ local cursivex=i.cursivex or 0
+ local cursivey=i.cursivey or 0
+ local ligaindex=i.ligaindex or 0
+ local margin=nested and 4 or 2
+ if rightkern~=0 or yoffset~=0 then
+ report_injections("%w%s pair: lx %p, rx %p, dy %p",margin,symbol,leftkern,rightkern,yoffset)
+ elseif leftkern~=0 then
+ report_injections("%w%s kern: dx %p",margin,symbol,leftkern)
+ end
+ if markx~=0 or marky~=0 or markbase~=0 then
+ report_injections("%w%s mark: dx %p, dy %p, dir %s, base %s",margin,symbol,markx,marky,markdir,markbase~=0 and "yes" or "no")
+ end
+ if cursivex~=0 or cursivey~=0 then
+ report_injections("%w%s curs: dx %p, dy %p",margin,symbol,cursivex,cursivey)
+ end
+ if ligaindex~=0 then
+ report_injections("%w%s liga: index %i",margin,symbol,ligaindex)
+ end
+ end
+ end
+ end
+end
+local function showsub(n,what,where)
+ report_injections("begin subrun: %s",where)
+ for n in traverse_id(glyph_code,n) do
+ showchar(n,where)
+ show(n,what,where," ")
+ end
+ report_injections("end subrun")
+end
+local function trace(head,where)
+ report_injections("begin run %s: %s kerns, %s pairs, %s marks and %s cursives registered",
+ where or "",nofregisteredkerns,nofregisteredpairs,nofregisteredmarks,nofregisteredcursives)
+ local n=head
+ while n do
+ local id=getid(n)
+ if id==glyph_code then
+ showchar(n)
+ show(n,"injections",false," ")
+ show(n,"preinjections",false,"<")
+ show(n,"postinjections",false,">")
+ show(n,"replaceinjections",false,"=")
+ elseif id==disc_code then
+ local pre=getfield(n,"pre")
+ local post=getfield(n,"post")
+ local replace=getfield(n,"replace")
+ if pre then
+ showsub(pre,"preinjections","pre")
end
- if cb then
- report_injections(" cursbase: bound %a",cb)
+ if post then
+ showsub(post,"postinjections","post")
end
- if cc then
- local c=cursives[cc]
- report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3])
+ if replace then
+ showsub(replace,"replaceinjections","replace")
end
end
+ n=getnext(n)
end
report_injections("end run")
end
@@ -9045,306 +10332,574 @@ local function show_result(head)
current=getnext(current)
end
end
-function injections.handler(head,where,keep)
- head=tonut(head)
- local has_marks,has_cursives,has_kerns=next(marks),next(cursives),next(kerns)
- if has_marks or has_cursives then
- if trace_injections then
- trace(head)
- end
- local done,ky,rl,valid,cx,wx,mk,nofvalid=false,{},{},{},{},{},{},0
- if has_kerns then
- local nf,tm=nil,nil
- for n in traverse_id(glyph_code,head) do
- if getsubtype(n)<256 then
- nofvalid=nofvalid+1
- valid[nofvalid]=n
- local f=getfont(n)
- if f~=nf then
- nf=f
- tm=fontdata[nf].resources.marks
- end
- if tm then
- mk[n]=tm[getchar(n)]
+local function collect_glyphs_1(head)
+ local glyphs,nofglyphs={},0
+ local marks,nofmarks={},0
+ local nf,tm=nil,nil
+ for n in traverse_id(glyph_code,head) do
+ if getsubtype(n)<256 then
+ local f=getfont(n)
+ if f~=nf then
+ nf=f
+ tm=fontdata[nf].resources.marks
+ end
+ if tm and tm[getchar(n)] then
+ nofmarks=nofmarks+1
+ marks[nofmarks]=n
+ else
+ nofglyphs=nofglyphs+1
+ glyphs[nofglyphs]=n
+ end
+ local p=rawget(properties,n)
+ if p then
+ local i=rawget(p,"injections")
+ if i then
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(n,"yoffset",yoffset)
end
- local k=getattr(n,a_kernpair)
- if k then
- local kk=kerns[k]
- if kk then
- local x,y,w,h=kk[2] or 0,kk[3] or 0,kk[4] or 0,kk[5] or 0
- local dy=y-h
- if dy~=0 then
- ky[n]=dy
- end
- if w~=0 or x~=0 then
- wx[n]=kk
+ end
+ end
+ end
+ end
+ return glyphs,nofglyphs,marks,nofmarks
+end
+local function collect_glyphs_2(head)
+ local glyphs,nofglyphs={},0
+ local marks,nofmarks={},0
+ local nf,tm=nil,nil
+ for n in traverse_id(glyph_code,head) do
+ if getsubtype(n)<256 then
+ local f=getfont(n)
+ if f~=nf then
+ nf=f
+ tm=fontdata[nf].resources.marks
+ end
+ if tm and tm[getchar(n)] then
+ nofmarks=nofmarks+1
+ marks[nofmarks]=n
+ else
+ nofglyphs=nofglyphs+1
+ glyphs[nofglyphs]=n
+ end
+ end
+ end
+ return glyphs,nofglyphs,marks,nofmarks
+end
+local function inject_marks(marks,nofmarks)
+ for i=1,nofmarks do
+ local n=marks[i]
+ local pn=rawget(properties,n)
+ if pn then
+ pn=rawget(pn,"injections")
+ if pn then
+ local p=pn.markbasenode
+ if p then
+ local px=getfield(p,"xoffset")
+ local ox=0
+ local rightkern=nil
+ local pp=rawget(properties,p)
+ if pp then
+ pp=rawget(pp,"injections")
+ if pp then
+ rightkern=pp.rightkern
+ end
+ end
+ if rightkern then
+ if pn.markdir<0 then
+ ox=px-pn.markx-rightkern
+ else
+ local leftkern=pp.leftkern
+ if leftkern then
+ ox=px-pn.markx
+ else
+ ox=px-pn.markx-leftkern
end
- rl[n]=kk[1]
end
+ else
+ ox=px-pn.markx
+ local wn=getfield(n,"width")
+ if wn~=0 then
+ pn.leftkern=-wn/2
+ pn.rightkern=-wn/2
+ end
+ end
+ setfield(n,"xoffset",ox)
+ local py=getfield(p,"yoffset")
+ local oy=0
+ if marks[p] then
+ oy=py+pn.marky
+ else
+ oy=getfield(n,"yoffset")+py+pn.marky
end
+ setfield(n,"yoffset",oy)
+ else
end
end
- else
- local nf,tm=nil,nil
- for n in traverse_id(glyph_code,head) do
- if getsubtype(n)<256 then
- nofvalid=nofvalid+1
- valid[nofvalid]=n
- local f=getfont(n)
- if f~=nf then
- nf=f
- tm=fontdata[nf].resources.marks
+ end
+ end
+end
+local function inject_cursives(glyphs,nofglyphs)
+ local cursiveanchor,lastanchor=nil,nil
+ local minc,maxc,last=0,0,nil
+ for i=1,nofglyphs do
+ local n=glyphs[i]
+ local pn=rawget(properties,n)
+ if pn then
+ pn=rawget(pn,"injections")
+ end
+ if pn then
+ local cursivex=pn.cursivex
+ if cursivex then
+ if cursiveanchor then
+ if cursivex~=0 then
+ pn.leftkern=(pn.leftkern or 0)+cursivex
end
- if tm then
- mk[n]=tm[getchar(n)]
+ if lastanchor then
+ if maxc==0 then
+ minc=lastanchor
+ end
+ maxc=lastanchor
+ properties[cursiveanchor].cursivedy=pn.cursivey
+ end
+ last=n
+ else
+ maxc=0
+ end
+ elseif maxc>0 then
+ local ny=getfield(n,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti=glyphs[i]
+ ny=ny+properties[ti].cursivedy
+ setfield(ti,"yoffset",ny)
+ end
+ maxc=0
+ end
+ if pn.cursiveanchor then
+ cursiveanchor=n
+ lastanchor=i
+ else
+ cursiveanchor=nil
+ lastanchor=nil
+ if maxc>0 then
+ local ny=getfield(n,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti=glyphs[i]
+ ny=ny+properties[ti].cursivedy
+ setfield(ti,"yoffset",ny)
end
+ maxc=0
end
end
+ elseif maxc>0 then
+ local ny=getfield(n,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti=glyphs[i]
+ ny=ny+properties[ti].cursivedy
+ setfield(ti,"yoffset",getfield(ti,"yoffset")+ny)
+ end
+ maxc=0
+ cursiveanchor=nil
+ lastanchor=nil
+ end
+ end
+ if last and maxc>0 then
+ local ny=getfield(last,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti=glyphs[i]
+ ny=ny+properties[ti].cursivedy
+ setfield(ti,"yoffset",ny)
end
- if nofvalid>0 then
- local cx={}
- if has_kerns and next(ky) then
- for n,k in next,ky do
- setfield(n,"yoffset",k)
+ end
+end
+local function inject_kerns(head,list,length)
+ for i=1,length do
+ local n=list[i]
+ local pn=rawget(properties,n)
+ if pn then
+ local i=rawget(pn,"injections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ insert_node_before(head,n,newkern(leftkern))
+ end
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(head,n,newkern(rightkern))
end
end
- if has_cursives then
- local p_cursbase,p=nil,nil
- local t,d,maxt={},{},0
- for i=1,nofvalid do
- local n=valid[i]
- if not mk[n] then
- local n_cursbase=getattr(n,a_cursbase)
- if p_cursbase then
- local n_curscurs=getattr(n,a_curscurs)
- if p_cursbase==n_curscurs then
- local c=cursives[n_curscurs]
- if c then
- local rlmode,dx,dy,ws,wn=c[1],c[2],c[3],c[4],c[5]
- if rlmode>=0 then
- dx=dx-ws
- else
- dx=dx+wn
- end
- if dx~=0 then
- cx[n]=dx
- rl[n]=rlmode
- end
- dy=-dy
- maxt=maxt+1
- t[maxt]=p
- d[maxt]=dy
- else
- maxt=0
+ end
+ end
+end
+local function inject_everything(head,where)
+ head=tonut(head)
+ if trace_injections then
+ trace(head,"everything")
+ end
+ local glyphs,nofglyphs,marks,nofmarks
+ if nofregisteredpairs>0 then
+ glyphs,nofglyphs,marks,nofmarks=collect_glyphs_1(head)
+ else
+ glyphs,nofglyphs,marks,nofmarks=collect_glyphs_2(head)
+ end
+ if nofglyphs>0 then
+ if nofregisteredcursives>0 then
+ inject_cursives(glyphs,nofglyphs)
+ end
+ if nofregisteredmarks>0 then
+ inject_marks(marks,nofmarks)
+ end
+ inject_kerns(head,glyphs,nofglyphs)
+ end
+ if nofmarks>0 then
+ inject_kerns(head,marks,nofmarks)
+ end
+ if keepregisteredcounts then
+ keepregisteredcounts=false
+ else
+ nofregisteredkerns=0
+ nofregisteredpairs=0
+ nofregisteredmarks=0
+ nofregisteredcursives=0
+ end
+ return tonode(head),true
+end
+local function inject_kerns_only(head,where)
+ head=tonut(head)
+ if trace_injections then
+ trace(head,"kerns")
+ end
+ local n=head
+ local p=nil
+ while n do
+ local id=getid(n)
+ if id==glyph_code then
+ if getsubtype(n)<256 then
+ local pn=rawget(properties,n)
+ if pn then
+ if p then
+ local d=getfield(p,"post")
+ if d then
+ local i=rawget(pn,"postinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ local t=find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ end
+ end
+ end
+ local d=getfield(p,"replace")
+ if d then
+ local i=rawget(pn,"replaceinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ local t=find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ end
+ end
+ else
+ local i=rawget(pn,"injections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ setfield(p,"replace",newkern(leftkern))
end
end
- elseif maxt>0 then
- local ny=getfield(n,"yoffset")
- for i=maxt,1,-1 do
- ny=ny+d[i]
- local ti=t[i]
- setfield(ti,"yoffset",getfield(ti,"yoffset")+ny)
+ end
+ else
+ local i=rawget(pn,"injections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ head=insert_node_before(head,n,newkern(leftkern))
end
- maxt=0
end
- if not n_cursbase and maxt>0 then
- local ny=getfield(n,"yoffset")
- for i=maxt,1,-1 do
- ny=ny+d[i]
- local ti=t[i]
- setfield(ti,"yoffset",ny)
+ end
+ end
+ else
+ break
+ end
+ p=nil
+ elseif id==disc_code then
+ local d=getfield(n,"pre")
+ if d then
+ local h=d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ local pn=rawget(properties,n)
+ if pn then
+ local i=rawget(pn,"preinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ h=insert_node_before(h,n,newkern(leftkern))
+ end
end
- maxt=0
end
- p_cursbase,p=n_cursbase,n
+ else
+ break
end
end
- if maxt>0 then
- local ny=getfield(n,"yoffset")
- for i=maxt,1,-1 do
- ny=ny+d[i]
- local ti=t[i]
- setfield(ti,"yoffset",ny)
- end
- maxt=0
- end
- if not keep then
- cursives={}
- end
- end
- if has_marks then
- for i=1,nofvalid do
- local p=valid[i]
- local p_markbase=getattr(p,a_markbase)
- if p_markbase then
- local mrks=marks[p_markbase]
- local nofmarks=#mrks
- for n in traverse_id(glyph_code,getnext(p)) do
- local n_markmark=getattr(n,a_markmark)
- if p_markbase==n_markmark then
- local index=getattr(n,a_markdone) or 1
- local d=mrks[index]
- if d then
- local rlmode=d[3]
- local k=wx[p]
- local px=getfield(p,"xoffset")
- local ox=0
- if k then
- local x=k[2]
- local w=k[4]
- if w then
- if rlmode and rlmode>=0 then
- ox=px-getfield(p,"width")+d[1]-(w-x)
- else
- ox=px-d[1]-x
- end
- else
- if rlmode and rlmode>=0 then
- ox=px-getfield(p,"width")+d[1]
- else
- ox=px-d[1]-x
- end
- end
- else
- local wp=getfield(p,"width")
- local wn=getfield(n,"width")
- if rlmode and rlmode>=0 then
- ox=px-wp+d[1]
- else
- ox=px-d[1]
- end
- if wn~=0 then
- insert_node_before(head,n,newkern(-wn/2))
- insert_node_after(head,n,newkern(-wn/2))
- end
- end
- setfield(n,"xoffset",ox)
- local py=getfield(p,"yoffset")
- local oy=0
- if mk[p] then
- oy=py+d[2]
- else
- oy=getfield(n,"yoffset")+py+d[2]
- end
- setfield(n,"yoffset",oy)
- if nofmarks==1 then
- break
- else
- nofmarks=nofmarks-1
- end
+ if h~=d then
+ setfield(n,"pre",h)
+ end
+ end
+ local d=getfield(n,"post")
+ if d then
+ local h=d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ local pn=rawget(properties,n)
+ if pn then
+ local i=rawget(pn,"postinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ h=insert_node_before(h,n,newkern(leftkern))
end
- elseif not n_markmark then
- break
- else
end
end
+ else
+ break
end
end
- if not keep then
- marks={}
+ if h~=d then
+ setfield(n,"post",h)
end
end
- if next(wx) then
- for n,k in next,wx do
- local x=k[2]
- local w=k[4]
- if w then
- local rl=k[1]
- local wx=w-x
- if rl<0 then
- if wx~=0 then
- insert_node_before(head,n,newkern(wx))
+ local d=getfield(n,"replace")
+ if d then
+ local h=d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ local pn=rawget(properties,n)
+ if pn then
+ local i=rawget(pn,"replaceinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ h=insert_node_before(h,n,newkern(leftkern))
+ end
end
- if x~=0 then
- insert_node_after (head,n,newkern(x))
+ end
+ else
+ break
+ end
+ end
+ if h~=d then
+ setfield(n,"replace",h)
+ end
+ end
+ p=n
+ else
+ p=nil
+ end
+ n=getnext(n)
+ end
+ if keepregisteredcounts then
+ keepregisteredcounts=false
+ else
+ nofregisteredkerns=0
+ end
+ return tonode(head),true
+end
+local function inject_pairs_only(head,where)
+ head=tonut(head)
+ if trace_injections then
+ trace(head,"pairs")
+ end
+ local n=head
+ local p=nil
+ while n do
+ local id=getid(n)
+ if id==glyph_code then
+ if getsubtype(n)<256 then
+ local pn=rawget(properties,n)
+ if pn then
+ if p then
+ local d=getfield(p,"post")
+ if d then
+ local i=rawget(pn,"postinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ local t=find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ end
+ end
+ end
+ local d=getfield(p,"replace")
+ if d then
+ local i=rawget(pn,"replaceinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ local t=find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ end
end
else
- if x~=0 then
- insert_node_before(head,n,newkern(x))
+ local i=rawget(pn,"injections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ setfield(p,"replace",newkern(leftkern))
+ end
+ end
+ end
+ else
+ local i=rawget(pn,"injections")
+ if i then
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ insert_node_before(head,n,newkern(leftkern))
end
- if wx~=0 then
- insert_node_after (head,n,newkern(wx))
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n=getnext(n)
end
end
- elseif x~=0 then
- insert_node_before(head,n,newkern(x))
end
end
+ else
+ break
end
- if next(cx) then
- for n,k in next,cx do
- if k~=0 then
- local rln=rl[n]
- if rln and rln<0 then
- insert_node_before(head,n,newkern(-k))
- else
- insert_node_before(head,n,newkern(k))
+ p=nil
+ elseif id==disc_code then
+ local d=getfield(n,"pre")
+ if d then
+ local h=d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ local p=rawget(properties,n)
+ if p then
+ local i=rawget(p,"preinjections")
+ if i then
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ local leftkern=i.leftkern
+ if leftkern~=0 then
+ h=insert_node_before(h,n,newkern(leftkern))
+ end
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n=getnext(n)
+ end
+ end
end
+ else
+ break
end
end
+ if h~=d then
+ setfield(n,"pre",h)
+ end
end
- if not keep then
- kerns={}
- end
-head=tonode(head)
- return head,true
- elseif not keep then
- kerns,cursives,marks={},{},{}
- end
- elseif has_kerns then
- if trace_injections then
- trace(head)
- end
- for n in traverse_id(glyph_code,head) do
- if getsubtype(n)<256 then
- local k=getattr(n,a_kernpair)
- if k then
- local kk=kerns[k]
- if kk then
- local rl,x,y,w=kk[1],kk[2] or 0,kk[3],kk[4]
- if y and y~=0 then
- setfield(n,"yoffset",y)
- end
- if w then
- local wx=w-x
- if rl<0 then
- if wx~=0 then
- insert_node_before(head,n,newkern(wx))
- end
- if x~=0 then
- insert_node_after (head,n,newkern(x))
+ local d=getfield(n,"post")
+ if d then
+ local h=d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ local p=rawget(properties,n)
+ if p then
+ local i=rawget(p,"postinjections")
+ if i then
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(n,"yoffset",yoffset)
end
- else
- if x~=0 then
- insert_node_before(head,n,newkern(x))
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ h=insert_node_before(h,n,newkern(leftkern))
end
- if wx~=0 then
- insert_node_after(head,n,newkern(wx))
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n=getnext(n)
end
end
- else
- if x~=0 then
- insert_node_before(head,n,newkern(x))
+ end
+ else
+ break
+ end
+ end
+ if h~=d then
+ setfield(n,"post",h)
+ end
+ end
+ local d=getfield(n,"replace")
+ if d then
+ local h=d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ local p=rawget(properties,n)
+ if p then
+ local i=rawget(p,"replaceinjections")
+ if i then
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ h=insert_node_before(h,n,newkern(leftkern))
+ end
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n=getnext(n)
+ end
end
end
+ else
+ break
end
end
+ if h~=d then
+ setfield(n,"replace",h)
+ end
end
+ p=n
+ else
+ p=nil
end
- if not keep then
- kerns={}
- end
- return tonode(head),true
+ n=getnext(n)
+ end
+ if keepregisteredcounts then
+ keepregisteredcounts=false
else
+ nofregisteredpairs=0
+ nofregisteredkerns=0
+ end
+ return tonode(head),true
+end
+function injections.handler(head,where)
+ if nofregisteredmarks>0 or nofregisteredcursives>0 then
+ return inject_everything(head,where)
+ elseif nofregisteredpairs>0 then
+ return inject_pairs_only(head,where)
+ elseif nofregisteredkerns>0 then
+ return inject_kerns_only(head,where)
+ else
+ return head,false
end
- return tonode(head),false
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules={} end modules ['font-ota']={
+if not modules then modules={} end modules ['font-otx']={
version=1.001,
comment="companion to font-otf.lua (analysing)",
author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -9363,13 +10918,24 @@ analyzers.initializers=initializers
analyzers.methods=methods
analyzers.useunicodemarks=false
local a_state=attributes.private('state')
+local nuts=nodes.nuts
+local tonut=nuts.tonut
+local getfield=nuts.getfield
+local getnext=nuts.getnext
+local getprev=nuts.getprev
+local getid=nuts.getid
+local getprop=nuts.getprop
+local setprop=nuts.setprop
+local getfont=nuts.getfont
+local getsubtype=nuts.getsubtype
+local getchar=nuts.getchar
+local traverse_id=nuts.traverse_id
+local traverse_node_list=nuts.traverse
+local end_of_math=nuts.end_of_math
local nodecodes=nodes.nodecodes
local glyph_code=nodecodes.glyph
local disc_code=nodecodes.disc
local math_code=nodecodes.math
-local traverse_id=node.traverse_id
-local traverse_node_list=node.traverse
-local end_of_math=node.end_of_math
local fontdata=fonts.hashes.identifiers
local categories=characters and characters.categories or {}
local otffeatures=fonts.constructors.newfeatures("otf")
@@ -9411,51 +10977,52 @@ function analyzers.setstate(head,font)
local tfmdata=fontdata[font]
local descriptions=tfmdata.descriptions
local first,last,current,n,done=nil,nil,head,0,false
+ current=tonut(current)
while current do
- local id=current.id
- if id==glyph_code and current.font==font then
+ local id=getid(current)
+ if id==glyph_code and getfont(current)==font then
done=true
- local char=current.char
+ local char=getchar(current)
local d=descriptions[char]
if d then
if d.class=="mark" or (useunicodemarks and categories[char]=="mn") then
done=true
- current[a_state]=s_mark
+ setprop(current,a_state,s_mark)
elseif n==0 then
first,last,n=current,current,1
- current[a_state]=s_init
+ setprop(current,a_state,s_init)
else
last,n=current,n+1
- current[a_state]=s_medi
+ setprop(current,a_state,s_medi)
end
else
if first and first==last then
- last[a_state]=s_isol
+ setprop(last,a_state,s_isol)
elseif last then
- last[a_state]=s_fina
+ setprop(last,a_state,s_fina)
end
first,last,n=nil,nil,0
end
elseif id==disc_code then
- current[a_state]=s_medi
+ setprop(current,a_state,s_medi)
last=current
else
if first and first==last then
- last[a_state]=s_isol
+ setprop(last,a_state,s_isol)
elseif last then
- last[a_state]=s_fina
+ setprop(last,a_state,s_fina)
end
first,last,n=nil,nil,0
if id==math_code then
current=end_of_math(current)
end
end
- current=current.next
+ current=getnext(current)
end
if first and first==last then
- last[a_state]=s_isol
+ setprop(last,a_state,s_isol)
elseif last then
- last[a_state]=s_fina
+ setprop(last,a_state,s_fina)
end
return head,done
end
@@ -9511,6 +11078,7 @@ local isolated={
[0x0856]=true,[0x0858]=true,[0x0857]=true,
[0x07FA]=true,
[zwnj]=true,
+ [0x08AD]=true,
}
local final={
[0x0622]=true,[0x0623]=true,[0x0624]=true,[0x0625]=true,
@@ -9528,15 +11096,16 @@ local final={
[0x06D3]=true,[0x06D5]=true,[0x06EE]=true,[0x06EF]=true,
[0x0759]=true,[0x075A]=true,[0x075B]=true,[0x076B]=true,
[0x076C]=true,[0x0771]=true,[0x0773]=true,[0x0774]=true,
- [0x0778]=true,[0x0779]=true,
+ [0x0778]=true,[0x0779]=true,
[0x08AA]=true,[0x08AB]=true,[0x08AC]=true,
[0xFEF5]=true,[0xFEF7]=true,[0xFEF9]=true,[0xFEFB]=true,
- [0x0710]=true,[0x0715]=true,[0x0716]=true,[0x0717]=true,
- [0x0718]=true,[0x0719]=true,[0x0728]=true,[0x072A]=true,
- [0x072C]=true,[0x071E]=true,
+ [0x0710]=true,[0x0715]=true,[0x0716]=true,[0x0717]=true,
+ [0x0718]=true,[0x0719]=true,[0x0728]=true,[0x072A]=true,
+ [0x072C]=true,[0x071E]=true,
[0x072F]=true,[0x074D]=true,
[0x0840]=true,[0x0849]=true,[0x0854]=true,[0x0846]=true,
- [0x084F]=true
+ [0x084F]=true,
+ [0x08AE]=true,[0x08B1]=true,[0x08B2]=true,
}
local medial={
[0x0626]=true,[0x0628]=true,[0x062A]=true,[0x062B]=true,
@@ -9596,12 +11165,12 @@ local medial={
[0x07D2]=true,[0x07D0]=true,[0x07CF]=true,[0x07CD]=true,
[0x07CB]=true,[0x07D3]=true,[0x07E4]=true,[0x07D5]=true,
[0x07E6]=true,
- [tatweel]=true,
- [zwj]=true,
+ [tatweel]=true,[zwj]=true,
+ [0x08A1]=true,[0x08AF]=true,[0x08B0]=true,
}
local arab_warned={}
local function warning(current,what)
- local char=current.char
+ local char=getchar(current)
if not arab_warned[char] then
log.report("analyze","arab: character %C has no %a class",char,what)
arab_warned[char]=true
@@ -9610,30 +11179,30 @@ end
local function finish(first,last)
if last then
if first==last then
- local fc=first.char
+ local fc=getchar(first)
if medial[fc] or final[fc] then
- first[a_state]=s_isol
+ setprop(first,a_state,s_isol)
else
warning(first,"isol")
- first[a_state]=s_error
+ setprop(first,a_state,s_error)
end
else
- local lc=last.char
+ local lc=getchar(last)
if medial[lc] or final[lc] then
- last[a_state]=s_fina
+ setprop(last,a_state,s_fina)
else
warning(last,"fina")
- last[a_state]=s_error
+ setprop(last,a_state,s_error)
end
end
first,last=nil,nil
elseif first then
- local fc=first.char
+ local fc=getchar(first)
if medial[fc] or final[fc] then
- first[a_state]=s_isol
+ setprop(first,a_state,s_isol)
else
warning(first,"isol")
- first[a_state]=s_error
+ setprop(first,a_state,s_error)
end
first=nil
end
@@ -9644,38 +11213,39 @@ function methods.arab(head,font,attr)
local tfmdata=fontdata[font]
local marks=tfmdata.resources.marks
local first,last,current,done=nil,nil,head,false
+ current=tonut(current)
while current do
- local id=current.id
- if id==glyph_code and current.font==font and current.subtype<256 and not current[a_state] then
+ local id=getid(current)
+ if id==glyph_code and getfont(current)==font and getsubtype(current)<256 and not getprop(current,a_state) then
done=true
- local char=current.char
+ local char=getchar(current)
if marks[char] or (useunicodemarks and categories[char]=="mn") then
- current[a_state]=s_mark
+ setprop(current,a_state,s_mark)
elseif isolated[char] then
first,last=finish(first,last)
- current[a_state]=s_isol
+ setprop(current,a_state,s_isol)
first,last=nil,nil
elseif not first then
if medial[char] then
- current[a_state]=s_init
+ setprop(current,a_state,s_init)
first,last=first or current,current
elseif final[char] then
- current[a_state]=s_isol
+ setprop(current,a_state,s_isol)
first,last=nil,nil
else
first,last=finish(first,last)
end
elseif medial[char] then
first,last=first or current,current
- current[a_state]=s_medi
+ setprop(current,a_state,s_medi)
elseif final[char] then
- if not last[a_state]==s_init then
- last[a_state]=s_medi
+ if getprop(last,a_state)~=s_init then
+ setprop(last,a_state,s_medi)
end
- current[a_state]=s_fina
+ setprop(current,a_state,s_fina)
first,last=nil,nil
elseif char>=0x0600 and char<=0x06FF then
- current[a_state]=s_rest
+ setprop(current,a_state,s_rest)
first,last=finish(first,last)
else
first,last=finish(first,last)
@@ -9688,7 +11258,7 @@ function methods.arab(head,font,attr)
current=end_of_math(current)
end
end
- current=current.next
+ current=getnext(current)
end
if first or last then
finish(first,last)
@@ -9745,6 +11315,7 @@ local report_chain=logs.reporter("fonts","otf chain")
local report_process=logs.reporter("fonts","otf process")
local report_prepare=logs.reporter("fonts","otf prepare")
local report_warning=logs.reporter("fonts","otf warning")
+local report_run=logs.reporter("fonts","otf run")
registertracker("otf.verbose_chain",function(v) otf.setcontextchain(v and "verbose") end)
registertracker("otf.normal_chain",function(v) otf.setcontextchain(v and "normal") end)
registertracker("otf.replacements","otf.singles,otf.multiples,otf.alternatives,otf.ligatures")
@@ -9756,21 +11327,29 @@ local nuts=nodes.nuts
local tonode=nuts.tonode
local tonut=nuts.tonut
local getfield=nuts.getfield
+local setfield=nuts.setfield
local getnext=nuts.getnext
local getprev=nuts.getprev
local getid=nuts.getid
local getattr=nuts.getattr
+local setattr=nuts.setattr
+local getprop=nuts.getprop
+local setprop=nuts.setprop
local getfont=nuts.getfont
local getsubtype=nuts.getsubtype
local getchar=nuts.getchar
-local setfield=nuts.setfield
-local setattr=nuts.setattr
+local insert_node_before=nuts.insert_before
local insert_node_after=nuts.insert_after
local delete_node=nuts.delete
+local remove_node=nuts.remove
local copy_node=nuts.copy
+local copy_node_list=nuts.copy_list
local find_node_tail=nuts.tail
local flush_node_list=nuts.flush_list
+local free_node=nuts.free
local end_of_math=nuts.end_of_math
+local traverse_nodes=nuts.traverse
+local traverse_id=nuts.traverse_id
local setmetatableindex=table.setmetatableindex
local zwnj=0x200C
local zwj=0x200D
@@ -9788,25 +11367,22 @@ local math_code=nodecodes.math
local dir_code=whatcodes.dir
local localpar_code=whatcodes.localpar
local discretionary_code=disccodes.discretionary
+local regular_code=disccodes.regular
+local automatic_code=disccodes.automatic
local ligature_code=glyphcodes.ligature
local privateattribute=attributes.private
local a_state=privateattribute('state')
-local a_markbase=privateattribute('markbase')
-local a_markmark=privateattribute('markmark')
-local a_markdone=privateattribute('markdone')
-local a_cursbase=privateattribute('cursbase')
-local a_curscurs=privateattribute('curscurs')
-local a_cursdone=privateattribute('cursdone')
-local a_kernpair=privateattribute('kernpair')
-local a_ligacomp=privateattribute('ligacomp')
+local a_cursbase=privateattribute('cursbase')
local injections=nodes.injections
local setmark=injections.setmark
local setcursive=injections.setcursive
local setkern=injections.setkern
local setpair=injections.setpair
-local markonce=true
+local resetinjection=injections.reset
+local copyinjection=injections.copy
+local setligaindex=injections.setligaindex
+local getligaindex=injections.getligaindex
local cursonce=true
-local kernonce=true
local fonthashes=fonts.hashes
local fontdata=fonthashes.identifiers
local otffeatures=fonts.constructors.newfeatures("otf")
@@ -9822,6 +11398,7 @@ local currentfont=false
local lookuptable=false
local anchorlookups=false
local lookuptypes=false
+local lookuptags=false
local handlers={}
local rlmode=0
local featurevalue=false
@@ -9866,29 +11443,32 @@ local function gref(n)
end
local function cref(kind,chainname,chainlookupname,lookupname,index)
if index then
- return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookupname,index)
+ return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookuptags[lookupname],index)
elseif lookupname then
- return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookupname)
+ return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookuptags[lookupname])
elseif chainlookupname then
- return formatters["feature %a, chain %a, sub %a"](kind,chainname,chainlookupname)
+ return formatters["feature %a, chain %a, sub %a"](kind,lookuptags[chainname],lookuptags[chainlookupname])
elseif chainname then
- return formatters["feature %a, chain %a"](kind,chainname)
+ return formatters["feature %a, chain %a"](kind,lookuptags[chainname])
else
return formatters["feature %a"](kind)
end
end
local function pref(kind,lookupname)
- return formatters["feature %a, lookup %a"](kind,lookupname)
+ return formatters["feature %a, lookup %a"](kind,lookuptags[lookupname])
end
local function copy_glyph(g)
local components=getfield(g,"components")
if components then
setfield(g,"components",nil)
local n=copy_node(g)
+ copyinjection(n,g)
setfield(g,"components",components)
return n
else
- return copy_node(g)
+ local n=copy_node(g)
+ copyinjection(n,g)
+ return n
end
end
local function markstoligature(kind,lookupname,head,start,stop,char)
@@ -9903,6 +11483,7 @@ local function markstoligature(kind,lookupname,head,start,stop,char)
if head==start then
head=base
end
+ resetinjection(base)
setfield(base,"char",char)
setfield(base,"subtype",ligature_code)
setfield(base,"components",start)
@@ -9936,6 +11517,7 @@ local function getcomponentindex(start)
end
local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound)
if start==stop and getchar(start)==char then
+ resetinjection(start)
setfield(start,"char",char)
return head,start
end
@@ -9947,6 +11529,7 @@ local function toligature(kind,lookupname,head,start,stop,char,markflag,discfoun
if start==head then
head=base
end
+ resetinjection(base)
setfield(base,"char",char)
setfield(base,"subtype",ligature_code)
setfield(base,"components",start)
@@ -9971,9 +11554,9 @@ local function toligature(kind,lookupname,head,start,stop,char,markflag,discfoun
baseindex=baseindex+componentindex
componentindex=getcomponentindex(start)
elseif not deletemarks then
- setattr(start,a_ligacomp,baseindex+(getattr(start,a_ligacomp) or componentindex))
+ setligaindex(start,baseindex+getligaindex(start,componentindex))
if trace_marks then
- logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),getattr(start,a_ligacomp))
+ logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start))
end
head,current=insert_node_after(head,current,copy_node(start))
elseif trace_marks then
@@ -9985,9 +11568,9 @@ local function toligature(kind,lookupname,head,start,stop,char,markflag,discfoun
while start and getid(start)==glyph_code do
local char=getchar(start)
if marks[char] then
- setattr(start,a_ligacomp,baseindex+(getattr(start,a_ligacomp) or componentindex))
+ setligaindex(start,baseindex+getligaindex(start,componentindex))
if trace_marks then
- logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),getattr(start,a_ligacomp))
+ logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start))
end
else
break
@@ -10001,6 +11584,7 @@ function handlers.gsub_single(head,start,kind,lookupname,replacement)
if trace_singles then
logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(getchar(start)),gref(replacement))
end
+ resetinjection(start)
setfield(start,"char",replacement)
return head,start,true
end
@@ -10038,11 +11622,13 @@ end
local function multiple_glyphs(head,start,multiple,ignoremarks)
local nofmultiples=#multiple
if nofmultiples>0 then
+ resetinjection(start)
setfield(start,"char",multiple[1])
if nofmultiples>1 then
local sn=getnext(start)
for k=2,nofmultiples do
local n=copy_node(start)
+ resetinjection(n)
setfield(n,"char",multiple[k])
setfield(n,"next",sn)
setfield(n,"prev",start)
@@ -10068,6 +11654,7 @@ function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence
if trace_alternatives then
logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(getchar(start)),choice,gref(choice),comment)
end
+ resetinjection(start)
setfield(start,"char",choice)
else
if trace_alternatives then
@@ -10155,6 +11742,7 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
end
else
+ resetinjection(start)
setfield(start,"char",lig)
if trace_ligatures then
logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig))
@@ -10200,7 +11788,7 @@ function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence
if al[anchor] then
local ma=markanchors[anchor]
if ma then
- local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -10246,7 +11834,7 @@ function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequ
end
end
end
- local index=getattr(start,a_ligacomp)
+ local index=getligaindex(start)
local baseanchors=descriptions[basechar]
if baseanchors then
baseanchors=baseanchors.anchors
@@ -10260,7 +11848,7 @@ function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequ
if ma then
ba=ba[index]
if ba then
- local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
if trace_marks then
logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
@@ -10294,10 +11882,10 @@ function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence
local markchar=getchar(start)
if marks[markchar] then
local base=getprev(start)
- local slc=getattr(start,a_ligacomp)
+ local slc=getligaindex(start)
if slc then
while base do
- local blc=getattr(base,a_ligacomp)
+ local blc=getligaindex(base)
if blc and blc~=slc then
base=getprev(base)
else
@@ -10318,7 +11906,7 @@ function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence
if al[anchor] then
local ma=markanchors[anchor]
if ma then
- local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true)
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -10344,7 +11932,7 @@ function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence
return head,start,false
end
function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence)
- local alreadydone=cursonce and getattr(start,a_cursbase)
+ local alreadydone=cursonce and getprop(start,a_cursbase)
if not alreadydone then
local done=false
local startchar=getchar(start)
@@ -10485,6 +12073,7 @@ function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,loo
if trace_singles then
logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
end
+ resetinjection(start)
setfield(start,"char",replacement)
return head,start,true
else
@@ -10516,6 +12105,7 @@ function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lo
if trace_singles then
logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
end
+ resetinjection(current)
setfield(current,"char",replacement)
end
end
@@ -10575,6 +12165,7 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext
if trace_alternatives then
logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment)
end
+ resetinjection(start)
setfield(start,"char",choice)
else
if trace_alternatives then
@@ -10704,7 +12295,7 @@ function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext
if al[anchor] then
local ma=markanchors[anchor]
if ma then
- local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -10758,7 +12349,7 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
end
end
end
- local index=getattr(start,a_ligacomp)
+ local index=getligaindex(start)
local baseanchors=descriptions[basechar].anchors
if baseanchors then
local baseanchors=baseanchors['baselig']
@@ -10770,7 +12361,7 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
if ma then
ba=ba[index]
if ba then
- local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
@@ -10799,63 +12390,63 @@ end
function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
local markchar=getchar(start)
if marks[markchar] then
- local subtables=currentlookup.subtables
- local lookupname=subtables[1]
- local markanchors=lookuphash[lookupname]
- if markanchors then
- markanchors=markanchors[markchar]
- end
- if markanchors then
- local base=getprev(start)
- local slc=getattr(start,a_ligacomp)
- if slc then
- while base do
- local blc=getattr(base,a_ligacomp)
- if blc and blc~=slc then
- base=getprev(base)
- else
- break
- end
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local markanchors=lookuphash[lookupname]
+ if markanchors then
+ markanchors=markanchors[markchar]
+ end
+ if markanchors then
+ local base=getprev(start)
+ local slc=getligaindex(start)
+ if slc then
+ while base do
+ local blc=getligaindex(base)
+ if blc and blc~=slc then
+ base=getprev(base)
+ else
+ break
end
end
- if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
- local basechar=getchar(base)
- local baseanchors=descriptions[basechar].anchors
+ end
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ local basechar=getchar(base)
+ local baseanchors=descriptions[basechar].anchors
+ if baseanchors then
+ baseanchors=baseanchors['basemark']
if baseanchors then
- baseanchors=baseanchors['basemark']
- if baseanchors then
- local al=anchorlookups[lookupname]
- for anchor,ba in next,baseanchors do
- if al[anchor] then
- local ma=markanchors[anchor]
- if ma then
- local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head,start,true
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
end
+ return head,start,true
end
end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
end
end
- elseif trace_bugs then
- logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
end
elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
elseif trace_bugs then
logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
end
return head,start,false
end
function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local alreadydone=cursonce and getattr(start,a_cursbase)
+ local alreadydone=cursonce and getprop(start,a_cursbase)
if not alreadydone then
local startchar=getchar(start)
local subtables=currentlookup.subtables
@@ -11131,15 +12722,8 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
break
end
end
- elseif f==2 then
- match=seq[1][32]
else
- for n=f-1,1 do
- if not seq[n][32] then
- match=false
- break
- end
- end
+ match=false
end
end
if match and s>l then
@@ -11189,15 +12773,8 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
break
end
end
- elseif s-l==1 then
- match=seq[s][32]
else
- for n=l+1,s do
- if not seq[n][32] then
- match=false
- break
- end
- end
+ match=false
end
end
end
@@ -11235,7 +12812,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
else
local i=1
- repeat
+ while true do
if skipped then
while true do
local char=getchar(start)
@@ -11272,11 +12849,13 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
end
end
- if start then
+ if i>nofchainlookups then
+ break
+ elseif start then
start=getnext(start)
else
end
- until i>nofchainlookups
+ end
end
else
local replacements=ck[7]
@@ -11352,14 +12931,20 @@ local autofeatures=fonts.analyzers.features
local function initialize(sequence,script,language,enabled)
local features=sequence.features
if features then
- for kind,scripts in next,features do
- local valid=enabled[kind]
- if valid then
- local languages=scripts[script] or scripts[wildcard]
- if languages and (languages[language] or languages[wildcard]) then
- return { valid,autofeatures[kind] or false,sequence.chain or 0,kind,sequence }
+ local order=sequence.order
+ if order then
+ for i=1,#order do
+ local kind=order[i]
+ local valid=enabled[kind]
+ if valid then
+ local scripts=features[kind]
+ local languages=scripts[script] or scripts[wildcard]
+ if languages and (languages[language] or languages[wildcard]) then
+ return { valid,autofeatures[kind] or false,sequence.chain or 0,kind,sequence }
+ end
end
end
+ else
end
end
return false
@@ -11386,12 +12971,12 @@ function otf.dataset(tfmdata,font)
}
rs[language]=rl
local sequences=tfmdata.resources.sequences
-for s=1,#sequences do
- local v=enabled and initialize(sequences[s],script,language,enabled)
- if v then
- rl[#rl+1]=v
- end
-end
+ for s=1,#sequences do
+ local v=enabled and initialize(sequences[s],script,language,enabled)
+ if v then
+ rl[#rl+1]=v
+ end
+ end
end
return rl
end
@@ -11412,6 +12997,7 @@ local function featuresprocessor(head,font,attr)
anchorlookups=resources.lookup_to_anchor
lookuptable=resources.lookups
lookuptypes=resources.lookuptypes
+ lookuptags=resources.lookuptags
currentfont=font
rlmode=0
local sequences=resources.sequences
@@ -11488,9 +13074,9 @@ local function featuresprocessor(head,font,attr)
if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then
local a=getattr(start,0)
if a then
- a=(a==attr) and (not attribute or getattr(start,a_state)==attribute)
+ a=(a==attr) and (not attribute or getprop(start,a_state)==attribute)
else
- a=not attribute or getattr(start,a_state)==attribute
+ a=not attribute or getprop(start,a_state)==attribute
end
if a then
local lookupmatch=lookupcache[getchar(start)]
@@ -11521,9 +13107,9 @@ local function featuresprocessor(head,font,attr)
setfield(prev,"next",next)
local a=getattr(prev,0)
if a then
- a=(a==attr) and (not attribute or getattr(prev,a_state)==attribute)
+ a=(a==attr) and (not attribute or getprop(prev,a_state)==attribute)
else
- a=not attribute or getattr(prev,a_state)==attribute
+ a=not attribute or getprop(prev,a_state)==attribute
end
if a then
local lookupmatch=lookupcache[getchar(prev)]
@@ -11545,9 +13131,9 @@ local function featuresprocessor(head,font,attr)
if getfont(start)==font and getsubtype(start)<256 then
local a=getattr(start,0)
if a then
- a=(a==attr) and (not attribute or getattr(start,a_state)==attribute)
+ a=(a==attr) and (not attribute or getprop(start,a_state)==attribute)
else
- a=not attribute or getattr(start,a_state)==attribute
+ a=not attribute or getprop(start,a_state)==attribute
end
if a then
local lookupmatch=lookupcache[getchar(start)]
@@ -11638,9 +13224,9 @@ elseif typ=="gpos_single" or typ=="gpos_pair" then
if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then
local a=getattr(start,0)
if a then
- a=(a==attr) and (not attribute or getattr(start,a_state)==attribute)
+ a=(a==attr) and (not attribute or getprop(start,a_state)==attribute)
else
- a=not attribute or getattr(start,a_state)==attribute
+ a=not attribute or getprop(start,a_state)==attribute
end
if a then
for i=1,ns do
@@ -11682,9 +13268,9 @@ elseif typ=="gpos_single" or typ=="gpos_pair" then
setfield(prev,"next",next)
local a=getattr(prev,0)
if a then
- a=(a==attr) and (not attribute or getattr(prev,a_state)==attribute)
+ a=(a==attr) and (not attribute or getprop(prev,a_state)==attribute)
else
- a=not attribute or getattr(prev,a_state)==attribute
+ a=not attribute or getprop(prev,a_state)==attribute
end
if a then
for i=1,ns do
@@ -11714,9 +13300,9 @@ elseif typ=="gpos_single" or typ=="gpos_pair" then
if getfont(start)==font and getsubtype(start)<256 then
local a=getattr(start,0)
if a then
- a=(a==attr) and (not attribute or getattr(start,a_state)==attribute)
+ a=(a==attr) and (not attribute or getprop(start,a_state)==attribute)
else
- a=not attribute or getattr(start,a_state)==attribute
+ a=not attribute or getprop(start,a_state)==attribute
end
if a then
for i=1,ns do
@@ -11944,6 +13530,7 @@ local function prepare_contextchains(tfmdata)
local rawdata=tfmdata.shared.rawdata
local resources=rawdata.resources
local lookuphash=resources.lookuphash
+ local lookuptags=resources.lookuptags
local lookups=rawdata.lookups
if lookups then
for lookupname,lookupdata in next,rawdata.lookups do
@@ -11956,7 +13543,7 @@ local function prepare_contextchains(tfmdata)
if not validformat then
report_prepare("unsupported format %a",format)
elseif not validformat[lookuptype] then
- report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookupname)
+ report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookuptags[lookupname])
else
local contexts=lookuphash[lookupname]
if not contexts then
@@ -12005,7 +13592,7 @@ local function prepare_contextchains(tfmdata)
else
end
else
- report_prepare("missing lookuptype for lookupname %a",lookupname)
+ report_prepare("missing lookuptype for lookupname %a",lookuptags[lookupname])
end
end
end
@@ -12420,6 +14007,14 @@ local function packdata(data)
features[script]=pack_normal(feature)
end
end
+ local order=sequence.order
+ if order then
+ sequence.order=pack_indexed(order)
+ end
+ local markclass=sequence.markclass
+ if markclass then
+ sequence.markclass=pack_boolean(markclass)
+ end
end
end
local lookups=resources.lookups
@@ -12749,27 +14344,6 @@ local function unpackdata(data)
rule.replacements=tv
end
end
- local fore=rule.fore
- if fore then
- local tv=tables[fore]
- if tv then
- rule.fore=tv
- end
- end
- local back=rule.back
- if back then
- local tv=tables[back]
- if tv then
- rule.back=tv
- end
- end
- local names=rule.names
- if names then
- local tv=tables[names]
- if tv then
- rule.names=tv
- end
- end
local lookups=rule.lookups
if lookups then
local tv=tables[lookups]
@@ -12832,6 +14406,20 @@ local function unpackdata(data)
end
end
end
+ local order=feature.order
+ if order then
+ local tv=tables[order]
+ if tv then
+ feature.order=tv
+ end
+ end
+ local markclass=feature.markclass
+ if markclass then
+ local tv=tables[markclass]
+ if tv then
+ feature.markclass=tv
+ end
+ end
end
end
local lookups=resources.lookups
@@ -12876,6 +14464,7 @@ if otf.enhancers.register then
otf.enhancers.register("unpack",unpackdata)
end
otf.enhancers.unpack=unpackdata
+otf.enhancers.pack=packdata
end -- closure
@@ -13231,8 +14820,8 @@ function definers.read(specification,size,id)
elseif trace_defining and type(tfmdata)=="table" then
local properties=tfmdata.properties or {}
local parameters=tfmdata.parameters or {}
- report_defining("using %s font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a",
- properties.format,id,properties.name,parameters.size,properties.encodingbytes,
+ report_defining("using %a font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a",
+ properties.format or "unknown",id,properties.name,parameters.size,properties.encodingbytes,
properties.encodingname,properties.fullname,file.basename(properties.filename))
end
statistics.stoptiming(fonts)
@@ -13563,13 +15152,40 @@ local fonts=fonts
local nodes=nodes
local traverse_id=node.traverse_id
local glyph_code=nodes.nodecodes.glyph
-function nodes.handlers.characters(head)
+local disc_code=nodes.nodecodes.disc
+local ligaturing=node.ligaturing
+local kerning=node.kerning
+local basepass=true
+local function l_warning() texio.write_nl("warning: node.ligaturing called directly") l_warning=nil end
+local function k_warning() texio.write_nl("warning: node.kerning called directly") k_warning=nil end
+function node.ligaturing(...)
+ if basepass and l_warning then
+ l_warning()
+ end
+ return ligaturing(...)
+end
+function node.kerning(...)
+ if basepass and k_warning then
+ k_warning()
+ end
+ return kerning(...)
+end
+function nodes.handlers.setbasepass(v)
+ basepass=v
+end
+function nodes.handlers.nodepass(head)
local fontdata=fonts.hashes.identifiers
if fontdata then
- local usedfonts,done,prevfont={},false,nil
+ local usedfonts={}
+ local basefonts={}
+ local prevfont=nil
+ local basefont=nil
for n in traverse_id(glyph_code,head) do
local font=n.font
if font~=prevfont then
+ if basefont then
+ basefont[2]=n.prev
+ end
prevfont=font
local used=usedfonts[font]
if not used then
@@ -13580,18 +15196,57 @@ function nodes.handlers.characters(head)
local processors=shared.processes
if processors and #processors>0 then
usedfonts[font]=processors
- done=true
+ elseif basepass then
+ basefont={ n,nil }
+ basefonts[#basefonts+1]=basefont
end
end
end
end
end
end
- if done then
+ for d in traverse_id(disc_code,head) do
+ local r=d.replace
+ if r then
+ for n in traverse_id(glyph_code,r) do
+ local font=n.font
+ if font~=prevfont then
+ prevfont=font
+ local used=usedfonts[font]
+ if not used then
+ local tfmdata=fontdata[font]
+ if tfmdata then
+ local shared=tfmdata.shared
+ if shared then
+ local processors=shared.processes
+ if processors and #processors>0 then
+ usedfonts[font]=processors
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ if next(usedfonts) then
for font,processors in next,usedfonts do
for i=1,#processors do
- local h,d=processors[i](head,font,0)
- head,done=h or head,done or d
+ head=processors[i](head,font,0) or head
+ end
+ end
+ end
+ if basepass and #basefonts>0 then
+ for i=1,#basefonts do
+ local range=basefonts[i]
+ local start=range[1]
+ local stop=range[2]
+ if stop then
+ start,stop=ligaturing(start,stop)
+ start,stop=kerning(start,stop)
+ elseif start then
+ start=ligaturing(start)
+ start=kerning(start)
end
end
end
@@ -13600,13 +15255,27 @@ function nodes.handlers.characters(head)
return head,false
end
end
+function nodes.handlers.basepass(head)
+ if not basepass then
+ head=ligaturing(head)
+ head=kerning(head)
+ end
+ return head,true
+end
+local nodepass=nodes.handlers.nodepass
+local basepass=nodes.handlers.basepass
+local injectpass=nodes.injections.handler
+local protectpass=nodes.handlers.protectglyphs
function nodes.simple_font_handler(head)
- head=nodes.handlers.characters(head)
- nodes.injections.handler(head)
- nodes.handlers.protectglyphs(head)
- head=node.ligaturing(head)
- head=node.kerning(head)
- return head
+ if head then
+ head=nodepass(head)
+ head=injectpass(head)
+ head=basepass(head)
+ protectpass(head)
+ return head,true
+ else
+ return head,false
+ end
end
end -- closure
diff --git a/tex/generic/context/luatex/luatex-fonts-ota.lua b/tex/generic/context/luatex/luatex-fonts-ota.lua
new file mode 100644
index 000000000..f083fe09e
--- /dev/null
+++ b/tex/generic/context/luatex/luatex-fonts-ota.lua
@@ -0,0 +1,459 @@
+if not modules then modules = { } end modules ['font-otx'] = {
+ version = 1.001,
+ comment = "companion to font-otf.lua (analysing)",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local type = type
+
+if not trackers then trackers = { register = function() end } end
+
+----- trace_analyzing = false trackers.register("otf.analyzing", function(v) trace_analyzing = v end)
+
+local fonts, nodes, node = fonts, nodes, node
+
+local allocate = utilities.storage.allocate
+
+local otf = fonts.handlers.otf
+
+local analyzers = fonts.analyzers
+local initializers = allocate()
+local methods = allocate()
+
+analyzers.initializers = initializers
+analyzers.methods = methods
+analyzers.useunicodemarks = false
+
+local a_state = attributes.private('state')
+
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getprop = nuts.getprop
+local setprop = nuts.setprop
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+
+local traverse_id = nuts.traverse_id
+local traverse_node_list = nuts.traverse
+local end_of_math = nuts.end_of_math
+
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local math_code = nodecodes.math
+
+local fontdata = fonts.hashes.identifiers
+local categories = characters and characters.categories or { } -- sorry, only in context
+
+local otffeatures = fonts.constructors.newfeatures("otf")
+local registerotffeature = otffeatures.register
+
+--[[ldx--
+<p>Analyzers run per script and/or language and are needed in order to
+process features right.</p>
+--ldx]]--
+
+-- never use these numbers directly
+
+local s_init = 1 local s_rphf = 7
+local s_medi = 2 local s_half = 8
+local s_fina = 3 local s_pref = 9
+local s_isol = 4 local s_blwf = 10
+local s_mark = 5 local s_pstf = 11
+local s_rest = 6
+
+local states = {
+ init = s_init,
+ medi = s_medi,
+ fina = s_fina,
+ isol = s_isol,
+ mark = s_mark,
+ rest = s_rest,
+ rphf = s_rphf,
+ half = s_half,
+ pref = s_pref,
+ blwf = s_blwf,
+ pstf = s_pstf,
+}
+
+local features = {
+ init = s_init,
+ medi = s_medi,
+ fina = s_fina,
+ isol = s_isol,
+ -- mark = s_mark,
+ -- rest = s_rest,
+ rphf = s_rphf,
+ half = s_half,
+ pref = s_pref,
+ blwf = s_blwf,
+ pstf = s_pstf,
+}
+
+analyzers.states = states
+analyzers.features = features
+
+-- todo: analyzers per script/lang, cross font, so we need an font id hash -> script
+-- e.g. latin -> hyphenate, arab -> 1/2/3 analyze -- its own namespace
+
+function analyzers.setstate(head,font)
+ local useunicodemarks = analyzers.useunicodemarks
+ local tfmdata = fontdata[font]
+ local descriptions = tfmdata.descriptions
+ local first, last, current, n, done = nil, nil, head, 0, false -- maybe make n boolean
+ current = tonut(current)
+ while current do
+ local id = getid(current)
+ if id == glyph_code and getfont(current) == font then
+ done = true
+ local char = getchar(current)
+ local d = descriptions[char]
+ if d then
+ if d.class == "mark" or (useunicodemarks and categories[char] == "mn") then
+ done = true
+ setprop(current,a_state,s_mark)
+ elseif n == 0 then
+ first, last, n = current, current, 1
+ setprop(current,a_state,s_init)
+ else
+ last, n = current, n+1
+ setprop(current,a_state,s_medi)
+ end
+ else -- finish
+ if first and first == last then
+ setprop(last,a_state,s_isol)
+ elseif last then
+ setprop(last,a_state,s_fina)
+ end
+ first, last, n = nil, nil, 0
+ end
+ elseif id == disc_code then
+ -- always in the middle
+ setprop(current,a_state,s_medi)
+ last = current
+ else -- finish
+ if first and first == last then
+ setprop(last,a_state,s_isol)
+ elseif last then
+ setprop(last,a_state,s_fina)
+ end
+ first, last, n = nil, nil, 0
+ if id == math_code then
+ current = end_of_math(current)
+ end
+ end
+ current = getnext(current)
+ end
+ if first and first == last then
+ setprop(last,a_state,s_isol)
+ elseif last then
+ setprop(last,a_state,s_fina)
+ end
+ return head, done
+end
+
+-- in the future we will use language/script attributes instead of the
+-- font related value, but then we also need dynamic features which is
+-- somewhat slower; and .. we need a chain of them
+
+local function analyzeinitializer(tfmdata,value) -- attr
+ local script, language = otf.scriptandlanguage(tfmdata) -- attr
+ local action = initializers[script]
+ if not action then
+ -- skip
+ elseif type(action) == "function" then
+ return action(tfmdata,value)
+ else
+ local action = action[language]
+ if action then
+ return action(tfmdata,value)
+ end
+ end
+end
+
+local function analyzeprocessor(head,font,attr)
+ local tfmdata = fontdata[font]
+ local script, language = otf.scriptandlanguage(tfmdata,attr)
+ local action = methods[script]
+ if not action then
+ -- skip
+ elseif type(action) == "function" then
+ return action(head,font,attr)
+ else
+ action = action[language]
+ if action then
+ return action(head,font,attr)
+ end
+ end
+ return head, false
+end
+
+registerotffeature {
+ name = "analyze",
+ description = "analysis of character classes",
+ default = true,
+ initializers = {
+ node = analyzeinitializer,
+ },
+ processors = {
+ position = 1,
+ node = analyzeprocessor,
+ }
+}
+
+-- latin
+
+methods.latn = analyzers.setstate
+
+-- This info eventually can go into char-def and we will have a state
+-- table for generic then (unicode recognized all states but in practice
+-- only has only
+--
+-- isolated : isol
+-- final : isol_fina
+-- medial : isol_fina_medi_init
+--
+-- so in practice, without analyzer it's rather useless info which is
+-- why having it in char-def makes only sense for special purposes (like)
+-- like tracing cq. visualizing.
+
+local tatweel = 0x0640
+local zwnj = 0x200C
+local zwj = 0x200D
+
+local isolated = { -- isol
+ [0x0600] = true, [0x0601] = true, [0x0602] = true, [0x0603] = true,
+ [0x0604] = true,
+ [0x0608] = true, [0x060B] = true, [0x0621] = true, [0x0674] = true,
+ [0x06DD] = true,
+ -- mandaic
+ [0x0856] = true, [0x0858] = true, [0x0857] = true,
+ -- n'ko
+ [0x07FA] = true,
+ -- also here:
+ [zwnj] = true,
+ -- 7
+ [0x08AD] = true,
+}
+
+local final = { -- isol_fina
+ [0x0622] = true, [0x0623] = true, [0x0624] = true, [0x0625] = true,
+ [0x0627] = true, [0x0629] = true, [0x062F] = true, [0x0630] = true,
+ [0x0631] = true, [0x0632] = true, [0x0648] = true, [0x0671] = true,
+ [0x0672] = true, [0x0673] = true, [0x0675] = true, [0x0676] = true,
+ [0x0677] = true, [0x0688] = true, [0x0689] = true, [0x068A] = true,
+ [0x068B] = true, [0x068C] = true, [0x068D] = true, [0x068E] = true,
+ [0x068F] = true, [0x0690] = true, [0x0691] = true, [0x0692] = true,
+ [0x0693] = true, [0x0694] = true, [0x0695] = true, [0x0696] = true,
+ [0x0697] = true, [0x0698] = true, [0x0699] = true, [0x06C0] = true,
+ [0x06C3] = true, [0x06C4] = true, [0x06C5] = true, [0x06C6] = true,
+ [0x06C7] = true, [0x06C8] = true, [0x06C9] = true, [0x06CA] = true,
+ [0x06CB] = true, [0x06CD] = true, [0x06CF] = true, [0x06D2] = true,
+ [0x06D3] = true, [0x06D5] = true, [0x06EE] = true, [0x06EF] = true,
+ [0x0759] = true, [0x075A] = true, [0x075B] = true, [0x076B] = true,
+ [0x076C] = true, [0x0771] = true, [0x0773] = true, [0x0774] = true,
+ [0x0778] = true, [0x0779] = true,
+ [0x08AA] = true, [0x08AB] = true, [0x08AC] = true,
+ [0xFEF5] = true, [0xFEF7] = true, [0xFEF9] = true, [0xFEFB] = true,
+ -- syriac
+ [0x0710] = true, [0x0715] = true, [0x0716] = true, [0x0717] = true,
+ [0x0718] = true, [0x0719] = true, [0x0728] = true, [0x072A] = true,
+ [0x072C] = true, [0x071E] = true,
+ [0x072F] = true, [0x074D] = true,
+ -- mandaic
+ [0x0840] = true, [0x0849] = true, [0x0854] = true, [0x0846] = true,
+ [0x084F] = true,
+ -- 7
+ [0x08AE] = true, [0x08B1] = true, [0x08B2] = true,
+}
+
+local medial = { -- isol_fina_medi_init
+ [0x0626] = true, [0x0628] = true, [0x062A] = true, [0x062B] = true,
+ [0x062C] = true, [0x062D] = true, [0x062E] = true, [0x0633] = true,
+ [0x0634] = true, [0x0635] = true, [0x0636] = true, [0x0637] = true,
+ [0x0638] = true, [0x0639] = true, [0x063A] = true, [0x063B] = true,
+ [0x063C] = true, [0x063D] = true, [0x063E] = true, [0x063F] = true,
+ [0x0641] = true, [0x0642] = true, [0x0643] = true,
+ [0x0644] = true, [0x0645] = true, [0x0646] = true, [0x0647] = true,
+ [0x0649] = true, [0x064A] = true, [0x066E] = true, [0x066F] = true,
+ [0x0678] = true, [0x0679] = true, [0x067A] = true, [0x067B] = true,
+ [0x067C] = true, [0x067D] = true, [0x067E] = true, [0x067F] = true,
+ [0x0680] = true, [0x0681] = true, [0x0682] = true, [0x0683] = true,
+ [0x0684] = true, [0x0685] = true, [0x0686] = true, [0x0687] = true,
+ [0x069A] = true, [0x069B] = true, [0x069C] = true, [0x069D] = true,
+ [0x069E] = true, [0x069F] = true, [0x06A0] = true, [0x06A1] = true,
+ [0x06A2] = true, [0x06A3] = true, [0x06A4] = true, [0x06A5] = true,
+ [0x06A6] = true, [0x06A7] = true, [0x06A8] = true, [0x06A9] = true,
+ [0x06AA] = true, [0x06AB] = true, [0x06AC] = true, [0x06AD] = true,
+ [0x06AE] = true, [0x06AF] = true, [0x06B0] = true, [0x06B1] = true,
+ [0x06B2] = true, [0x06B3] = true, [0x06B4] = true, [0x06B5] = true,
+ [0x06B6] = true, [0x06B7] = true, [0x06B8] = true, [0x06B9] = true,
+ [0x06BA] = true, [0x06BB] = true, [0x06BC] = true, [0x06BD] = true,
+ [0x06BE] = true, [0x06BF] = true, [0x06C1] = true, [0x06C2] = true,
+ [0x06CC] = true, [0x06CE] = true, [0x06D0] = true, [0x06D1] = true,
+ [0x06FA] = true, [0x06FB] = true, [0x06FC] = true, [0x06FF] = true,
+ [0x0750] = true, [0x0751] = true, [0x0752] = true, [0x0753] = true,
+ [0x0754] = true, [0x0755] = true, [0x0756] = true, [0x0757] = true,
+ [0x0758] = true, [0x075C] = true, [0x075D] = true, [0x075E] = true,
+ [0x075F] = true, [0x0760] = true, [0x0761] = true, [0x0762] = true,
+ [0x0763] = true, [0x0764] = true, [0x0765] = true, [0x0766] = true,
+ [0x0767] = true, [0x0768] = true, [0x0769] = true, [0x076A] = true,
+ [0x076D] = true, [0x076E] = true, [0x076F] = true, [0x0770] = true,
+ [0x0772] = true, [0x0775] = true, [0x0776] = true, [0x0777] = true,
+ [0x077A] = true, [0x077B] = true, [0x077C] = true, [0x077D] = true,
+ [0x077E] = true, [0x077F] = true,
+ [0x08A0] = true, [0x08A2] = true, [0x08A4] = true, [0x08A5] = true,
+ [0x08A6] = true, [0x0620] = true, [0x08A8] = true, [0x08A9] = true,
+ [0x08A7] = true, [0x08A3] = true,
+ -- syriac
+ [0x0712] = true, [0x0713] = true, [0x0714] = true, [0x071A] = true,
+ [0x071B] = true, [0x071C] = true, [0x071D] = true, [0x071F] = true,
+ [0x0720] = true, [0x0721] = true, [0x0722] = true, [0x0723] = true,
+ [0x0724] = true, [0x0725] = true, [0x0726] = true, [0x0727] = true,
+ [0x0729] = true, [0x072B] = true, [0x072D] = true, [0x072E] = true,
+ [0x074E] = true, [0x074F] = true,
+ -- mandaic
+ [0x0841] = true, [0x0842] = true, [0x0843] = true, [0x0844] = true,
+ [0x0845] = true, [0x0847] = true, [0x0848] = true, [0x0855] = true,
+ [0x0851] = true, [0x084E] = true, [0x084D] = true, [0x084A] = true,
+ [0x084B] = true, [0x084C] = true, [0x0850] = true, [0x0852] = true,
+ [0x0853] = true,
+ -- n'ko
+ [0x07D7] = true, [0x07E8] = true, [0x07D9] = true, [0x07EA] = true,
+ [0x07CA] = true, [0x07DB] = true, [0x07CC] = true, [0x07DD] = true,
+ [0x07CE] = true, [0x07DF] = true, [0x07D4] = true, [0x07E5] = true,
+ [0x07E9] = true, [0x07E7] = true, [0x07E3] = true, [0x07E2] = true,
+ [0x07E0] = true, [0x07E1] = true, [0x07DE] = true, [0x07DC] = true,
+ [0x07D1] = true, [0x07DA] = true, [0x07D8] = true, [0x07D6] = true,
+ [0x07D2] = true, [0x07D0] = true, [0x07CF] = true, [0x07CD] = true,
+ [0x07CB] = true, [0x07D3] = true, [0x07E4] = true, [0x07D5] = true,
+ [0x07E6] = true,
+ -- also here:
+ [tatweel]= true, [zwj] = true,
+ -- 7
+ [0x08A1] = true, [0x08AF] = true, [0x08B0] = true,
+}
+
+local arab_warned = { }
+
+-- todo: gref
+
+local function warning(current,what)
+ local char = getchar(current)
+ if not arab_warned[char] then
+ log.report("analyze","arab: character %C has no %a class",char,what)
+ arab_warned[char] = true
+ end
+end
+
+-- potential optimization: local medial_final = table.merged(medial,final)
+
+local function finish(first,last)
+ if last then
+ if first == last then
+ local fc = getchar(first)
+ if medial[fc] or final[fc] then
+ setprop(first,a_state,s_isol)
+ else
+ warning(first,"isol")
+ setprop(first,a_state,s_error)
+ end
+ else
+ local lc = getchar(last)
+ if medial[lc] or final[lc] then
+ -- if laststate == 1 or laststate == 2 or laststate == 4 then
+ setprop(last,a_state,s_fina)
+ else
+ warning(last,"fina")
+ setprop(last,a_state,s_error)
+ end
+ end
+ first, last = nil, nil
+ elseif first then
+ -- first and last are either both set so we never com here
+ local fc = getchar(first)
+ if medial[fc] or final[fc] then
+ setprop(first,a_state,s_isol)
+ else
+ warning(first,"isol")
+ setprop(first,a_state,s_error)
+ end
+ first = nil
+ end
+ return first, last
+end
+
+function methods.arab(head,font,attr)
+ local useunicodemarks = analyzers.useunicodemarks
+ local tfmdata = fontdata[font]
+ local marks = tfmdata.resources.marks
+ local first, last, current, done = nil, nil, head, false
+ current = tonut(current)
+ while current do
+ local id = getid(current)
+ if id == glyph_code and getfont(current) == font and getsubtype(current)<256 and not getprop(current,a_state) then
+ done = true
+ local char = getchar(current)
+ if marks[char] or (useunicodemarks and categories[char] == "mn") then
+ setprop(current,a_state,s_mark)
+ elseif isolated[char] then -- can be zwj or zwnj too
+ first, last = finish(first,last)
+ setprop(current,a_state,s_isol)
+ first, last = nil, nil
+ elseif not first then
+ if medial[char] then
+ setprop(current,a_state,s_init)
+ first, last = first or current, current
+ elseif final[char] then
+ setprop(current,a_state,s_isol)
+ first, last = nil, nil
+ else -- no arab
+ first, last = finish(first,last)
+ end
+ elseif medial[char] then
+ first, last = first or current, current
+ setprop(current,a_state,s_medi)
+ elseif final[char] then
+ if getprop(last,a_state) ~= s_init then
+ -- tricky, we need to check what last may be !
+ setprop(last,a_state,s_medi)
+ end
+ setprop(current,a_state,s_fina)
+ first, last = nil, nil
+ elseif char >= 0x0600 and char <= 0x06FF then -- needs checking
+ setprop(current,a_state,s_rest)
+ first, last = finish(first,last)
+ else -- no
+ first, last = finish(first,last)
+ end
+ else
+ if first or last then
+ first, last = finish(first,last)
+ end
+ if id == math_code then
+ current = end_of_math(current)
+ end
+ end
+ current = getnext(current)
+ end
+ if first or last then
+ finish(first,last)
+ end
+ return head, done
+end
+
+methods.syrc = methods.arab
+methods.mand = methods.arab
+methods.nko = methods.arab
+
+directives.register("otf.analyze.useunicodemarks",function(v)
+ analyzers.useunicodemarks = v
+end)
diff --git a/tex/generic/context/luatex/luatex-fonts-otn.lua b/tex/generic/context/luatex/luatex-fonts-otn.lua
new file mode 100644
index 000000000..dd3aa6153
--- /dev/null
+++ b/tex/generic/context/luatex/luatex-fonts-otn.lua
@@ -0,0 +1,2893 @@
+if not modules then modules = { } end modules ['font-otn'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- todo: looks like we have a leak somewhere (probably in ligatures)
+-- todo: copy attributes to disc
+
+-- this is a context version which can contain experimental code, but when we
+-- have serious patches we also need to change the other two font-otn files
+
+-- preprocessors = { "nodes" }
+
+-- anchor class : mark, mkmk, curs, mklg (todo)
+-- anchor type : mark, basechar, baselig, basemark, centry, cexit, max (todo)
+
+-- this is still somewhat preliminary and it will get better in due time;
+-- much functionality could only be implemented thanks to the husayni font
+-- of Idris Samawi Hamid to who we dedicate this module.
+
+-- in retrospect it always looks easy but believe it or not, it took a lot
+-- of work to get proper open type support done: buggy fonts, fuzzy specs,
+-- special made testfonts, many skype sessions between taco, idris and me,
+-- torture tests etc etc ... unfortunately the code does not show how much
+-- time it took ...
+
+-- todo:
+--
+-- extension infrastructure (for usage out of context)
+-- sorting features according to vendors/renderers
+-- alternative loop quitters
+-- check cursive and r2l
+-- find out where ignore-mark-classes went
+-- default features (per language, script)
+-- handle positions (we need example fonts)
+-- handle gpos_single (we might want an extra width field in glyph nodes because adding kerns might interfere)
+-- mark (to mark) code is still not what it should be (too messy but we need some more extreem husayni tests)
+-- remove some optimizations (when I have a faster machine)
+--
+-- maybe redo the lot some way (more context specific)
+
+--[[ldx--
+<p>This module is a bit more split up that I'd like but since we also want to test
+with plain <l n='tex'/> it has to be so. This module is part of <l n='context'/>
+and discussion about improvements and functionality mostly happens on the
+<l n='context'/> mailing list.</p>
+
+<p>The specification of OpenType is kind of vague. Apart from a lack of a proper
+free specifications there's also the problem that Microsoft and Adobe
+may have their own interpretation of how and in what order to apply features.
+In general the Microsoft website has more detailed specifications and is a
+better reference. There is also some information in the FontForge help files.</p>
+
+<p>Because there is so much possible, fonts might contain bugs and/or be made to
+work with certain rederers. These may evolve over time which may have the side
+effect that suddenly fonts behave differently.</p>
+
+<p>After a lot of experiments (mostly by Taco, me and Idris) we're now at yet another
+implementation. Of course all errors are mine and of course the code can be
+improved. There are quite some optimizations going on here and processing speed
+is currently acceptable. Not all functions are implemented yet, often because I
+lack the fonts for testing. Many scripts are not yet supported either, but I will
+look into them as soon as <l n='context'/> users ask for it.</p>
+
+<p>Because there are different interpretations possible, I will extend the code
+with more (configureable) variants. I can also add hooks for users so that they can
+write their own extensions.</p>
+
+<p>Glyphs are indexed not by unicode but in their own way. This is because there is no
+relationship with unicode at all, apart from the fact that a font might cover certain
+ranges of characters. One character can have multiple shapes. However, at the
+<l n='tex'/> end we use unicode so and all extra glyphs are mapped into a private
+space. This is needed because we need to access them and <l n='tex'/> has to include
+then in the output eventually.</p>
+
+<p>The raw table as it coms from <l n='fontforge'/> gets reorganized in to fit out needs.
+In <l n='context'/> that table is packed (similar tables are shared) and cached on disk
+so that successive runs can use the optimized table (after loading the table is
+unpacked). The flattening code used later is a prelude to an even more compact table
+format (and as such it keeps evolving).</p>
+
+<p>This module is sparsely documented because it is a moving target. The table format
+of the reader changes and we experiment a lot with different methods for supporting
+features.</p>
+
+<p>As with the <l n='afm'/> code, we may decide to store more information in the
+<l n='otf'/> table.</p>
+
+<p>Incrementing the version number will force a re-cache. We jump the number by one
+when there's a fix in the <l n='fontforge'/> library or <l n='lua'/> code that
+results in different tables.</p>
+--ldx]]--
+
+-- action handler chainproc chainmore comment
+--
+-- gsub_single ok ok ok
+-- gsub_multiple ok ok not implemented yet
+-- gsub_alternate ok ok not implemented yet
+-- gsub_ligature ok ok ok
+-- gsub_context ok --
+-- gsub_contextchain ok --
+-- gsub_reversecontextchain ok --
+-- chainsub -- ok
+-- reversesub -- ok
+-- gpos_mark2base ok ok
+-- gpos_mark2ligature ok ok
+-- gpos_mark2mark ok ok
+-- gpos_cursive ok untested
+-- gpos_single ok ok
+-- gpos_pair ok ok
+-- gpos_context ok --
+-- gpos_contextchain ok --
+--
+-- todo: contextpos and contextsub and class stuff
+--
+-- actions:
+--
+-- handler : actions triggered by lookup
+-- chainproc : actions triggered by contextual lookup
+-- chainmore : multiple substitutions triggered by contextual lookup (e.g. fij -> f + ij)
+--
+-- remark: the 'not implemented yet' variants will be done when we have fonts that use them
+-- remark: we need to check what to do with discretionaries
+
+-- We used to have independent hashes for lookups but as the tags are unique
+-- we now use only one hash. If needed we can have multiple again but in that
+-- case I will probably prefix (i.e. rename) the lookups in the cached font file.
+
+-- Todo: make plugin feature that operates on char/glyphnode arrays
+
+local concat, insert, remove = table.concat, table.insert, table.remove
+local gmatch, gsub, find, match, lower, strip = string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
+local type, next, tonumber, tostring = type, next, tonumber, tostring
+local lpegmatch = lpeg.match
+local random = math.random
+local formatters = string.formatters
+
+local logs, trackers, nodes, attributes = logs, trackers, nodes, attributes
+
+local registertracker = trackers.register
+
+local fonts = fonts
+local otf = fonts.handlers.otf
+
+local trace_lookups = false registertracker("otf.lookups", function(v) trace_lookups = v end)
+local trace_singles = false registertracker("otf.singles", function(v) trace_singles = v end)
+local trace_multiples = false registertracker("otf.multiples", function(v) trace_multiples = v end)
+local trace_alternatives = false registertracker("otf.alternatives", function(v) trace_alternatives = v end)
+local trace_ligatures = false registertracker("otf.ligatures", function(v) trace_ligatures = v end)
+local trace_contexts = false registertracker("otf.contexts", function(v) trace_contexts = v end)
+local trace_marks = false registertracker("otf.marks", function(v) trace_marks = v end)
+local trace_kerns = false registertracker("otf.kerns", function(v) trace_kerns = v end)
+local trace_cursive = false registertracker("otf.cursive", function(v) trace_cursive = v end)
+local trace_preparing = false registertracker("otf.preparing", function(v) trace_preparing = v end)
+local trace_bugs = false registertracker("otf.bugs", function(v) trace_bugs = v end)
+local trace_details = false registertracker("otf.details", function(v) trace_details = v end)
+local trace_applied = false registertracker("otf.applied", function(v) trace_applied = v end)
+local trace_steps = false registertracker("otf.steps", function(v) trace_steps = v end)
+local trace_skips = false registertracker("otf.skips", function(v) trace_skips = v end)
+local trace_directions = false registertracker("otf.directions", function(v) trace_directions = v end)
+
+local report_direct = logs.reporter("fonts","otf direct")
+local report_subchain = logs.reporter("fonts","otf subchain")
+local report_chain = logs.reporter("fonts","otf chain")
+local report_process = logs.reporter("fonts","otf process")
+local report_prepare = logs.reporter("fonts","otf prepare")
+local report_warning = logs.reporter("fonts","otf warning")
+local report_run = logs.reporter("fonts","otf run")
+
+registertracker("otf.verbose_chain", function(v) otf.setcontextchain(v and "verbose") end)
+registertracker("otf.normal_chain", function(v) otf.setcontextchain(v and "normal") end)
+
+registertracker("otf.replacements", "otf.singles,otf.multiples,otf.alternatives,otf.ligatures")
+registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive")
+registertracker("otf.actions","otf.replacements,otf.positions")
+registertracker("otf.injections","nodes.injections")
+
+registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing")
+
+local nuts = nodes.nuts
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getprop = nuts.getprop
+local setprop = nuts.setprop
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local delete_node = nuts.delete
+local remove_node = nuts.remove
+local copy_node = nuts.copy
+local copy_node_list = nuts.copy_list
+local find_node_tail = nuts.tail
+local flush_node_list = nuts.flush_list
+local free_node = nuts.free
+local end_of_math = nuts.end_of_math
+local traverse_nodes = nuts.traverse
+local traverse_id = nuts.traverse_id
+
+local setmetatableindex = table.setmetatableindex
+
+local zwnj = 0x200C
+local zwj = 0x200D
+local wildcard = "*"
+local default = "dflt"
+
+local nodecodes = nodes.nodecodes
+local whatcodes = nodes.whatcodes
+local glyphcodes = nodes.glyphcodes
+local disccodes = nodes.disccodes
+
+local glyph_code = nodecodes.glyph
+local glue_code = nodecodes.glue
+local disc_code = nodecodes.disc
+local whatsit_code = nodecodes.whatsit
+local math_code = nodecodes.math
+
+local dir_code = whatcodes.dir
+local localpar_code = whatcodes.localpar
+
+local discretionary_code = disccodes.discretionary
+local regular_code = disccodes.regular
+local automatic_code = disccodes.automatic
+
+local ligature_code = glyphcodes.ligature
+
+local privateattribute = attributes.private
+
+-- Something is messed up: we have two mark / ligature indices, one at the injection
+-- end and one here ... this is based on KE's patches but there is something fishy
+-- there as I'm pretty sure that for husayni we need some connection (as it's much
+-- more complex than an average font) but I need proper examples of all cases, not
+-- of only some.
+
+local a_state = privateattribute('state')
+local a_cursbase = privateattribute('cursbase') -- to be checked, probably can go
+
+local injections = nodes.injections
+local setmark = injections.setmark
+local setcursive = injections.setcursive
+local setkern = injections.setkern
+local setpair = injections.setpair
+local resetinjection = injections.reset
+local copyinjection = injections.copy
+local setligaindex = injections.setligaindex
+local getligaindex = injections.getligaindex
+
+local cursonce = true
+
+local fonthashes = fonts.hashes
+local fontdata = fonthashes.identifiers
+
+local otffeatures = fonts.constructors.newfeatures("otf")
+local registerotffeature = otffeatures.register
+
+local onetimemessage = fonts.loggers.onetimemessage or function() end
+
+otf.defaultnodealternate = "none" -- first last
+
+-- we share some vars here, after all, we have no nested lookups and less code
+
+local tfmdata = false
+local characters = false
+local descriptions = false
+local resources = false
+local marks = false
+local currentfont = false
+local lookuptable = false
+local anchorlookups = false
+local lookuptypes = false
+local lookuptags = false
+local handlers = { }
+local rlmode = 0
+local featurevalue = false
+
+-- head is always a whatsit so we can safely assume that head is not changed
+
+-- we use this for special testing and documentation
+
+local checkstep = (nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end
+local registerstep = (nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end
+local registermessage = (nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end
+
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_direct(...)
+end
+
+local function logwarning(...)
+ report_direct(...)
+end
+
+local f_unicode = formatters["%U"]
+local f_uniname = formatters["%U (%s)"]
+local f_unilist = formatters["% t (% t)"]
+
+local function gref(n) -- currently the same as in font-otb
+ if type(n) == "number" then
+ local description = descriptions[n]
+ local name = description and description.name
+ if name then
+ return f_uniname(n,name)
+ else
+ return f_unicode(n)
+ end
+ elseif n then
+ local num, nam = { }, { }
+ for i=1,#n do
+ local ni = n[i]
+ if tonumber(ni) then -- later we will start at 2
+ local di = descriptions[ni]
+ num[i] = f_unicode(ni)
+ nam[i] = di and di.name or "-"
+ end
+ end
+ return f_unilist(num,nam)
+ else
+ return "<error in node mode tracing>"
+ end
+end
+
+local function cref(kind,chainname,chainlookupname,lookupname,index) -- not in the mood to alias f_
+ if index then
+ return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookuptags[lookupname],index)
+ elseif lookupname then
+ return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookuptags[lookupname])
+ elseif chainlookupname then
+ return formatters["feature %a, chain %a, sub %a"](kind,lookuptags[chainname],lookuptags[chainlookupname])
+ elseif chainname then
+ return formatters["feature %a, chain %a"](kind,lookuptags[chainname])
+ else
+ return formatters["feature %a"](kind)
+ end
+end
+
+local function pref(kind,lookupname)
+ return formatters["feature %a, lookup %a"](kind,lookuptags[lookupname])
+end
+
+-- We can assume that languages that use marks are not hyphenated. We can also assume
+-- that at most one discretionary is present.
+
+-- We do need components in funny kerning mode but maybe I can better reconstruct then
+-- as we do have the font components info available; removing components makes the
+-- previous code much simpler. Also, later on copying and freeing becomes easier.
+-- However, for arabic we need to keep them around for the sake of mark placement
+-- and indices.
+
+local function copy_glyph(g) -- next and prev are untouched !
+ local components = getfield(g,"components")
+ if components then
+ setfield(g,"components",nil)
+ local n = copy_node(g)
+ copyinjection(n,g) -- we need to preserve the lig indices
+ setfield(g,"components",components)
+ return n
+ else
+ local n = copy_node(g)
+ copyinjection(n,g) -- we need to preserve the lig indices
+ return n
+ end
+end
+
+--
+
+
+-- start is a mark and we need to keep that one
+
+local function markstoligature(kind,lookupname,head,start,stop,char)
+ if start == stop and getchar(start) == char then
+ return head, start
+ else
+ local prev = getprev(start)
+ local next = getnext(stop)
+ setfield(start,"prev",nil)
+ setfield(stop,"next",nil)
+ local base = copy_glyph(start)
+ if head == start then
+ head = base
+ end
+ resetinjection(base)
+ setfield(base,"char",char)
+ setfield(base,"subtype",ligature_code)
+ setfield(base,"components",start)
+ if prev then
+ setfield(prev,"next",base)
+ end
+ if next then
+ setfield(next,"prev",base)
+ end
+ setfield(base,"next",next)
+ setfield(base,"prev",prev)
+ return head, base
+ end
+end
+
+-- The next code is somewhat complicated by the fact that some fonts can have ligatures made
+-- from ligatures that themselves have marks. This was identified by Kai in for instance
+-- arabtype: KAF LAM SHADDA ALEF FATHA (0x0643 0x0644 0x0651 0x0627 0x064E). This becomes
+-- KAF LAM-ALEF with a SHADDA on the first and a FATHA op de second component. In a next
+-- iteration this becomes a KAF-LAM-ALEF with a SHADDA on the second and a FATHA on the
+-- third component.
+
+local function getcomponentindex(start)
+ if getid(start) ~= glyph_code then
+ return 0
+ elseif getsubtype(start) == ligature_code then
+ local i = 0
+ local components = getfield(start,"components")
+ while components do
+ i = i + getcomponentindex(components)
+ components = getnext(components)
+ end
+ return i
+ elseif not marks[getchar(start)] then
+ return 1
+ else
+ return 0
+ end
+end
+
+-- eventually we will do positioning in an other way (needs addional w/h/d fields)
+
+local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) -- brr head
+ if start == stop and getchar(start) == char then
+ resetinjection(start)
+ setfield(start,"char",char)
+ return head, start
+ end
+ local prev = getprev(start)
+ local next = getnext(stop)
+ setfield(start,"prev",nil)
+ setfield(stop,"next",nil)
+ local base = copy_glyph(start)
+ if start == head then
+ head = base
+ end
+ resetinjection(base)
+ setfield(base,"char",char)
+ setfield(base,"subtype",ligature_code)
+ setfield(base,"components",start) -- start can have components
+ if prev then
+ setfield(prev,"next",base)
+ end
+ if next then
+ setfield(next,"prev",base)
+ end
+ setfield(base,"next",next)
+ setfield(base,"prev",prev)
+ if not discfound then
+ local deletemarks = markflag ~= "mark"
+ local components = start
+ local baseindex = 0
+ local componentindex = 0
+ local head = base
+ local current = base
+ -- first we loop over the glyphs in start .. stop
+ while start do
+ local char = getchar(start)
+ if not marks[char] then
+ baseindex = baseindex + componentindex
+ componentindex = getcomponentindex(start)
+ elseif not deletemarks then -- quite fishy
+ setligaindex(start,baseindex + getligaindex(start,componentindex))
+ if trace_marks then
+ logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start))
+ end
+ head, current = insert_node_after(head,current,copy_node(start)) -- unlikely that mark has components
+ elseif trace_marks then
+ logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char))
+ end
+ start = getnext(start)
+ end
+ -- we can have one accent as part of a lookup and another following
+ -- local start = components -- was wrong (component scanning was introduced when more complex ligs in devanagari was added)
+ local start = getnext(current)
+ while start and getid(start) == glyph_code do
+ local char = getchar(start)
+ if marks[char] then
+ setligaindex(start,baseindex + getligaindex(start,componentindex))
+ if trace_marks then
+ logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start))
+ end
+ else
+ break
+ end
+ start = getnext(start)
+ end
+ end
+ return head, base
+end
+
+function handlers.gsub_single(head,start,kind,lookupname,replacement)
+ if trace_singles then
+ logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(getchar(start)),gref(replacement))
+ end
+ resetinjection(start)
+ setfield(start,"char",replacement)
+ return head, start, true
+end
+
+local function get_alternative_glyph(start,alternatives,value,trace_alternatives)
+ local n = #alternatives
+ if value == "random" then
+ local r = random(1,n)
+ return alternatives[r], trace_alternatives and formatters["value %a, taking %a"](value,r)
+ elseif value == "first" then
+ return alternatives[1], trace_alternatives and formatters["value %a, taking %a"](value,1)
+ elseif value == "last" then
+ return alternatives[n], trace_alternatives and formatters["value %a, taking %a"](value,n)
+ else
+ value = tonumber(value)
+ if type(value) ~= "number" then
+ return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
+ elseif value > n then
+ local defaultalt = otf.defaultnodealternate
+ if defaultalt == "first" then
+ return alternatives[n], trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
+ elseif defaultalt == "last" then
+ return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,n)
+ else
+ return false, trace_alternatives and formatters["invalid value %a, %s"](value,"out of range")
+ end
+ elseif value == 0 then
+ return getchar(start), trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
+ elseif value < 1 then
+ return alternatives[1], trace_alternatives and formatters["invalid value %a, taking %a"](value,1)
+ else
+ return alternatives[value], trace_alternatives and formatters["value %a, taking %a"](value,value)
+ end
+ end
+end
+
+local function multiple_glyphs(head,start,multiple,ignoremarks)
+ local nofmultiples = #multiple
+ if nofmultiples > 0 then
+ resetinjection(start)
+ setfield(start,"char",multiple[1])
+ if nofmultiples > 1 then
+ local sn = getnext(start)
+ for k=2,nofmultiples do -- todo: use insert_node
+-- untested:
+--
+-- while ignoremarks and marks[getchar(sn)] then
+-- local sn = getnext(sn)
+-- end
+ local n = copy_node(start) -- ignore components
+ resetinjection(n)
+ setfield(n,"char",multiple[k])
+ setfield(n,"next",sn)
+ setfield(n,"prev",start)
+ if sn then
+ setfield(sn,"prev",n)
+ end
+ setfield(start,"next",n)
+ start = n
+ end
+ end
+ return head, start, true
+ else
+ if trace_multiples then
+ logprocess("no multiple for %s",gref(getchar(start)))
+ end
+ return head, start, false
+ end
+end
+
+function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence)
+ local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
+ local choice, comment = get_alternative_glyph(start,alternative,value,trace_alternatives)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(getchar(start)),choice,gref(choice),comment)
+ end
+ resetinjection(start)
+ setfield(start,"char",choice)
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(getchar(start)),comment)
+ end
+ end
+ return head, start, true
+end
+
+function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence)
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(getchar(start)),gref(multiple))
+ end
+ return multiple_glyphs(head,start,multiple,sequence.flags[1])
+end
+
+function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
+ local s, stop, discfound = getnext(start), nil, false
+ local startchar = getchar(start)
+ if marks[startchar] then
+ while s do
+ local id = getid(s)
+ if id == glyph_code and getfont(s) == currentfont and getsubtype(s)<256 then
+ local lg = ligature[getchar(s)]
+ if lg then
+ stop = s
+ ligature = lg
+ s = getnext(s)
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ if stop then
+ local lig = ligature.ligature
+ if lig then
+ if trace_ligatures then
+ local stopchar = getchar(stop)
+ head, start = markstoligature(kind,lookupname,head,start,stop,lig)
+ logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start)))
+ else
+ head, start = markstoligature(kind,lookupname,head,start,stop,lig)
+ end
+ return head, start, true
+ else
+ -- ok, goto next lookup
+ end
+ end
+ else
+ local skipmark = sequence.flags[1]
+ while s do
+ local id = getid(s)
+ if id == glyph_code and getsubtype(s)<256 then
+ if getfont(s) == currentfont then
+ local char = getchar(s)
+ if skipmark and marks[char] then
+ s = getnext(s)
+ else
+ local lg = ligature[char]
+ if lg then
+ stop = s
+ ligature = lg
+ s = getnext(s)
+ else
+ break
+ end
+ end
+ else
+ break
+ end
+ elseif id == disc_code then
+ discfound = true
+ s = getnext(s)
+ else
+ break
+ end
+ end
+ local lig = ligature.ligature
+ if lig then
+ if stop then
+ if trace_ligatures then
+ local stopchar = getchar(stop)
+ head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
+ logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start)))
+ else
+ head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
+ end
+ else
+ -- weird but happens (in some arabic font)
+ resetinjection(start)
+ setfield(start,"char",lig)
+ if trace_ligatures then
+ logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig))
+ end
+ end
+ return head, start, true
+ else
+ -- weird but happens
+ end
+ end
+ return head, start, false
+end
+
+--[[ldx--
+<p>We get hits on a mark, but we're not sure if the it has to be applied so
+we need to explicitly test for basechar, baselig and basemark entries.</p>
+--ldx]]--
+
+function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence)
+ local markchar = getchar(start)
+ if marks[markchar] then
+ local base = getprev(start) -- [glyph] [start=mark]
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ local basechar = getchar(base)
+ if marks[basechar] then
+ while true do
+ base = getprev(base)
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ basechar = getchar(base)
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+ end
+ end
+ end
+ local baseanchors = descriptions[basechar]
+ if baseanchors then
+ baseanchors = baseanchors.anchors
+ end
+ if baseanchors then
+ local baseanchors = baseanchors['basechar']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
+ pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ elseif trace_bugs then
+ -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence)
+ -- check chainpos variant
+ local markchar = getchar(start)
+ if marks[markchar] then
+ local base = getprev(start) -- [glyph] [optional marks] [start=mark]
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ local basechar = getchar(base)
+ if marks[basechar] then
+ while true do
+ base = getprev(base)
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ basechar = getchar(base)
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+ end
+ end
+ end
+ local index = getligaindex(start)
+ local baseanchors = descriptions[basechar]
+ if baseanchors then
+ baseanchors = baseanchors.anchors
+ if baseanchors then
+ local baseanchors = baseanchors['baselig']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor, ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ ba = ba[index]
+ if ba then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) -- index
+ if trace_marks then
+ logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head, start, true
+ else
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(kind,lookupname),gref(markchar),gref(basechar),index)
+ end
+ end
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence)
+ local markchar = getchar(start)
+ if marks[markchar] then
+ local base = getprev(start) -- [glyph] [basemark] [start=mark]
+ local slc = getligaindex(start)
+ if slc then -- a rather messy loop ... needs checking with husayni
+ while base do
+ local blc = getligaindex(base)
+ if blc and blc ~= slc then
+ base = getprev(base)
+ else
+ break
+ end
+ end
+ end
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then -- subtype test can go
+ local basechar = getchar(base)
+ local baseanchors = descriptions[basechar]
+ if baseanchors then
+ baseanchors = baseanchors.anchors
+ if baseanchors then
+ baseanchors = baseanchors['basemark']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no mark",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) -- to be checked
+ local alreadydone = cursonce and getprop(start,a_cursbase)
+ if not alreadydone then
+ local done = false
+ local startchar = getchar(start)
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
+ end
+ else
+ local nxt = getnext(start)
+ while not done and nxt and getid(nxt) == glyph_code and getfont(nxt) == currentfont and getsubtype(nxt)<256 do
+ local nextchar = getchar(nxt)
+ if marks[nextchar] then
+ -- should not happen (maybe warning)
+ nxt = getnext(nxt)
+ else
+ local entryanchors = descriptions[nextchar]
+ if entryanchors then
+ entryanchors = entryanchors.anchors
+ if entryanchors then
+ entryanchors = entryanchors['centry']
+ if entryanchors then
+ local al = anchorlookups[lookupname]
+ for anchor, entry in next, entryanchors do
+ if al[anchor] then
+ local exit = exitanchors[anchor]
+ if exit then
+ local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
+ end
+ done = true
+ break
+ end
+ end
+ end
+ end
+ end
+ elseif trace_bugs then
+ -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
+ end
+ break
+ end
+ end
+ end
+ return head, start, done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
+ end
+ return head, start, false
+ end
+end
+
+function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence)
+ local startchar = getchar(start)
+ local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
+ end
+ return head, start, false
+end
+
+function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
+ -- todo: kerns in disc nodes: pre, post, replace -> loop over disc too
+ -- todo: kerns in components of ligatures
+ local snext = getnext(start)
+ if not snext then
+ return head, start, false
+ else
+ local prev, done = start, false
+ local factor = tfmdata.parameters.factor
+ local lookuptype = lookuptypes[lookupname]
+ while snext and getid(snext) == glyph_code and getfont(snext) == currentfont and getsubtype(snext)<256 do
+ local nextchar = getchar(snext)
+ local krn = kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev = snext
+ snext = getnext(snext)
+ else
+ if not krn then
+ -- skip
+ elseif type(krn) == "table" then
+ if lookuptype == "pair" then -- probably not needed
+ local a, b = krn[2], krn[3]
+ if a and #a > 0 then
+ local startchar = getchar(start)
+ local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b > 0 then
+ local startchar = getchar(start)
+ local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ else -- wrong ... position has different entries
+ report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
+ -- local a, b = krn[2], krn[6]
+ -- if a and a ~= 0 then
+ -- local k = setkern(snext,factor,rlmode,a)
+ -- if trace_kerns then
+ -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar))
+ -- end
+ -- end
+ -- if b and b ~= 0 then
+ -- logwarning("%s: ignoring second kern xoff %s",pref(kind,lookupname),b*factor)
+ -- end
+ end
+ done = true
+ elseif krn ~= 0 then
+ local k = setkern(snext,factor,rlmode,krn)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar))
+ end
+ done = true
+ end
+ break
+ end
+ end
+ return head, start, done
+ end
+end
+
+--[[ldx--
+<p>I will implement multiple chain replacements once I run into a font that uses
+it. It's not that complex to handle.</p>
+--ldx]]--
+
+local chainmores = { }
+local chainprocs = { }
+
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_subchain(...)
+end
+
+local logwarning = report_subchain
+
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_chain(...)
+end
+
+local logwarning = report_chain
+
+-- We could share functions but that would lead to extra function calls with many
+-- arguments, redundant tests and confusing messages.
+
+function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname)
+ logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
+ return head, start, false
+end
+
+function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n)
+ logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
+ return head, start, false
+end
+
+-- The reversesub is a special case, which is why we need to store the replacements
+-- in a bit weird way. There is no lookup and the replacement comes from the lookup
+-- itself. It is meant mostly for dealing with Urdu.
+
+function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements)
+ local char = getchar(start)
+ local replacement = replacements[char]
+ if replacement then
+ if trace_singles then
+ logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
+ end
+ resetinjection(start)
+ setfield(start,"char",replacement)
+ return head, start, true
+ else
+ return head, start, false
+ end
+end
+
+--[[ldx--
+<p>This chain stuff is somewhat tricky since we can have a sequence of actions to be
+applied: single, alternate, multiple or ligature where ligature can be an invalid
+one in the sense that it will replace multiple by one but not neccessary one that
+looks like the combination (i.e. it is the counterpart of multiple then). For
+example, the following is valid:</p>
+
+<typing>
+<line>xxxabcdexxx [single a->A][multiple b->BCD][ligature cde->E] xxxABCDExxx</line>
+</typing>
+
+<p>Therefore we we don't really do the replacement here already unless we have the
+single lookup case. The efficiency of the replacements can be improved by deleting
+as less as needed but that would also make the code even more messy.</p>
+--ldx]]--
+
+-- local function delete_till_stop(head,start,stop,ignoremarks) -- keeps start
+-- local n = 1
+-- if start == stop then
+-- -- done
+-- elseif ignoremarks then
+-- repeat -- start x x m x x stop => start m
+-- local next = getnext(start)
+-- if not marks[getchar(next)] then
+-- local components = getfield(next,"components")
+-- if components then -- probably not needed
+-- flush_node_list(components)
+-- end
+-- head = delete_node(head,next)
+-- end
+-- n = n + 1
+-- until next == stop
+-- else -- start x x x stop => start
+-- repeat
+-- local next = getnext(start)
+-- local components = getfield(next,"components")
+-- if components then -- probably not needed
+-- flush_node_list(components)
+-- end
+-- head = delete_node(head,next)
+-- n = n + 1
+-- until next == stop
+-- end
+-- return head, n
+-- end
+
+--[[ldx--
+<p>Here we replace start by a single variant, First we delete the rest of the
+match.</p>
+--ldx]]--
+
+function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
+ -- todo: marks ?
+ local current = start
+ local subtables = currentlookup.subtables
+ if #subtables > 1 then
+ logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
+ end
+ while current do
+ if getid(current) == glyph_code then
+ local currentchar = getchar(current)
+ local lookupname = subtables[1] -- only 1
+ local replacement = lookuphash[lookupname]
+ if not replacement then
+ if trace_bugs then
+ logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
+ end
+ else
+ replacement = replacement[currentchar]
+ if not replacement or replacement == "" then
+ if trace_bugs then
+ logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar))
+ end
+ else
+ if trace_singles then
+ logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
+ end
+ resetinjection(current)
+ setfield(current,"char",replacement)
+ end
+ end
+ return head, start, true
+ elseif current == stop then
+ break
+ else
+ current = getnext(current)
+ end
+ end
+ return head, start, false
+end
+
+chainmores.gsub_single = chainprocs.gsub_single
+
+--[[ldx--
+<p>Here we replace start by a sequence of new glyphs. First we delete the rest of
+the match.</p>
+--ldx]]--
+
+function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ -- local head, n = delete_till_stop(head,start,stop)
+ local startchar = getchar(start)
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local replacements = lookuphash[lookupname]
+ if not replacements then
+ if trace_bugs then
+ logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ else
+ replacements = replacements[startchar]
+ if not replacements or replacement == "" then
+ if trace_bugs then
+ logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar))
+ end
+ else
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements))
+ end
+ return multiple_glyphs(head,start,replacements,currentlookup.flags[1])
+ end
+ end
+ return head, start, false
+end
+
+chainmores.gsub_multiple = chainprocs.gsub_multiple
+
+--[[ldx--
+<p>Here we replace start by new glyph. First we delete the rest of the match.</p>
+--ldx]]--
+
+-- char_1 mark_1 -> char_x mark_1 (ignore marks)
+-- char_1 mark_1 -> char_x
+
+-- to be checked: do we always have just one glyph?
+-- we can also have alternates for marks
+-- marks come last anyway
+-- are there cases where we need to delete the mark
+
+function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local current = start
+ local subtables = currentlookup.subtables
+ local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
+ while current do
+ if getid(current) == glyph_code then -- is this check needed?
+ local currentchar = getchar(current)
+ local lookupname = subtables[1]
+ local alternatives = lookuphash[lookupname]
+ if not alternatives then
+ if trace_bugs then
+ logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ else
+ alternatives = alternatives[currentchar]
+ if alternatives then
+ local choice, comment = get_alternative_glyph(current,alternatives,value,trace_alternatives)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment)
+ end
+ resetinjection(start)
+ setfield(start,"char",choice)
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment)
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment)
+ end
+ end
+ return head, start, true
+ elseif current == stop then
+ break
+ else
+ current = getnext(current)
+ end
+ end
+ return head, start, false
+end
+
+chainmores.gsub_alternate = chainprocs.gsub_alternate
+
+--[[ldx--
+<p>When we replace ligatures we use a helper that handles the marks. I might change
+this function (move code inline and handle the marks by a separate function). We
+assume rather stupid ligatures (no complex disc nodes).</p>
+--ldx]]--
+
+function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
+ local startchar = getchar(start)
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local ligatures = lookuphash[lookupname]
+ if not ligatures then
+ if trace_bugs then
+ logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
+ end
+ else
+ ligatures = ligatures[startchar]
+ if not ligatures then
+ if trace_bugs then
+ logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
+ end
+ else
+ local s = getnext(start)
+ local discfound = false
+ local last = stop
+ local nofreplacements = 0
+ local skipmark = currentlookup.flags[1]
+ while s do
+ local id = getid(s)
+ if id == disc_code then
+ s = getnext(s)
+ discfound = true
+ else
+ local schar = getchar(s)
+ if skipmark and marks[schar] then -- marks
+ s = getnext(s)
+ else
+ local lg = ligatures[schar]
+ if lg then
+ ligatures, last, nofreplacements = lg, s, nofreplacements + 1
+ if s == stop then
+ break
+ else
+ s = getnext(s)
+ end
+ else
+ break
+ end
+ end
+ end
+ end
+ local l2 = ligatures.ligature
+ if l2 then
+ if chainindex then
+ stop = last
+ end
+ if trace_ligatures then
+ if start == stop then
+ logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
+ else
+ logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)),gref(l2))
+ end
+ end
+ head, start = toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound)
+ return head, start, true, nofreplacements
+ elseif trace_bugs then
+ if start == stop then
+ logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
+ else
+ logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)))
+ end
+ end
+ end
+ end
+ return head, start, false, 0
+end
+
+chainmores.gsub_ligature = chainprocs.gsub_ligature
+
+function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar = getchar(start)
+ if marks[markchar] then
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local markanchors = lookuphash[lookupname]
+ if markanchors then
+ markanchors = markanchors[markchar]
+ end
+ if markanchors then
+ local base = getprev(start) -- [glyph] [start=mark]
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ local basechar = getchar(base)
+ if marks[basechar] then
+ while true do
+ base = getprev(base)
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ basechar = getchar(base)
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+ end
+ end
+ end
+ local baseanchors = descriptions[basechar].anchors
+ if baseanchors then
+ local baseanchors = baseanchors['basechar']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar = getchar(start)
+ if marks[markchar] then
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local markanchors = lookuphash[lookupname]
+ if markanchors then
+ markanchors = markanchors[markchar]
+ end
+ if markanchors then
+ local base = getprev(start) -- [glyph] [optional marks] [start=mark]
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ local basechar = getchar(base)
+ if marks[basechar] then
+ while true do
+ base = getprev(base)
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ basechar = getchar(base)
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar)
+ end
+ return head, start, false
+ end
+ end
+ end
+ -- todo: like marks a ligatures hash
+ local index = getligaindex(start)
+ local baseanchors = descriptions[basechar].anchors
+ if baseanchors then
+ local baseanchors = baseanchors['baselig']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ ba = ba[index]
+ if ba then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname)
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar = getchar(start)
+ if marks[markchar] then
+ -- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local markanchors = lookuphash[lookupname]
+ if markanchors then
+ markanchors = markanchors[markchar]
+ end
+ if markanchors then
+ local base = getprev(start) -- [glyph] [basemark] [start=mark]
+ local slc = getligaindex(start)
+ if slc then -- a rather messy loop ... needs checking with husayni
+ while base do
+ local blc = getligaindex(base)
+ if blc and blc ~= slc then
+ base = getprev(base)
+ else
+ break
+ end
+ end
+ end
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then -- subtype test can go
+ local basechar = getchar(base)
+ local baseanchors = descriptions[basechar].anchors
+ if baseanchors then
+ baseanchors = baseanchors['basemark']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local alreadydone = cursonce and getprop(start,a_cursbase)
+ if not alreadydone then
+ local startchar = getchar(start)
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local exitanchors = lookuphash[lookupname]
+ if exitanchors then
+ exitanchors = exitanchors[startchar]
+ end
+ if exitanchors then
+ local done = false
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
+ end
+ else
+ local nxt = getnext(start)
+ while not done and nxt and getid(nxt) == glyph_code and getfont(nxt) == currentfont and getsubtype(nxt)<256 do
+ local nextchar = getchar(nxt)
+ if marks[nextchar] then
+ -- should not happen (maybe warning)
+ nxt = getnext(nxt)
+ else
+ local entryanchors = descriptions[nextchar]
+ if entryanchors then
+ entryanchors = entryanchors.anchors
+ if entryanchors then
+ entryanchors = entryanchors['centry']
+ if entryanchors then
+ local al = anchorlookups[lookupname]
+ for anchor, entry in next, entryanchors do
+ if al[anchor] then
+ local exit = exitanchors[anchor]
+ if exit then
+ local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
+ end
+ done = true
+ break
+ end
+ end
+ end
+ end
+ end
+ elseif trace_bugs then
+ -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
+ end
+ break
+ end
+ end
+ end
+ return head, start, done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
+ end
+ return head, start, false
+ end
+ end
+ return head, start, false
+end
+
+function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+ -- untested .. needs checking for the new model
+ local startchar = getchar(start)
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local kerns = lookuphash[lookupname]
+ if kerns then
+ kerns = kerns[startchar] -- needed ?
+ if kerns then
+ local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
+ end
+ end
+ end
+ return head, start, false
+end
+
+chainmores.gpos_single = chainprocs.gpos_single -- okay?
+
+-- when machines become faster i will make a shared function
+
+function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+ local snext = getnext(start)
+ if snext then
+ local startchar = getchar(start)
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local kerns = lookuphash[lookupname]
+ if kerns then
+ kerns = kerns[startchar]
+ if kerns then
+ local lookuptype = lookuptypes[lookupname]
+ local prev, done = start, false
+ local factor = tfmdata.parameters.factor
+ while snext and getid(snext) == glyph_code and getfont(snext) == currentfont and getsubtype(snext)<256 do
+ local nextchar = getchar(snext)
+ local krn = kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev = snext
+ snext = getnext(snext)
+ else
+ if not krn then
+ -- skip
+ elseif type(krn) == "table" then
+ if lookuptype == "pair" then
+ local a, b = krn[2], krn[3]
+ if a and #a > 0 then
+ local startchar = getchar(start)
+ local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b > 0 then
+ local startchar = getchar(start)
+ local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ else
+ report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
+ local a, b = krn[2], krn[6]
+ if a and a ~= 0 then
+ local k = setkern(snext,factor,rlmode,a)
+ if trace_kerns then
+ logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar))
+ end
+ end
+ if b and b ~= 0 then
+ logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor)
+ end
+ end
+ done = true
+ elseif krn ~= 0 then
+ local k = setkern(snext,factor,rlmode,krn)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar))
+ end
+ done = true
+ end
+ break
+ end
+ end
+ return head, start, done
+ end
+ end
+ end
+ return head, start, false
+end
+
+chainmores.gpos_pair = chainprocs.gpos_pair -- okay?
+
+-- what pointer to return, spec says stop
+-- to be discussed ... is bidi changer a space?
+-- elseif char == zwnj and sequence[n][32] then -- brrr
+
+-- somehow l or f is global
+-- we don't need to pass the currentcontext, saves a bit
+-- make a slow variant then can be activated but with more tracing
+
+local function show_skip(kind,chainname,char,ck,class)
+ if ck[9] then
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
+ else
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2])
+ end
+end
+
+local quit_on_no_replacement = true
+
+directives.register("otf.chain.quitonnoreplacement",function(value) -- maybe per font
+ quit_on_no_replacement = value
+end)
+
+local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash)
+ -- local rule, lookuptype, sequence, f, l, lookups = ck[1], ck[2] ,ck[3], ck[4], ck[5], ck[6]
+ local flags = sequence.flags
+ local done = false
+ local skipmark = flags[1]
+ local skipligature = flags[2]
+ local skipbase = flags[3]
+ local someskip = skipmark or skipligature or skipbase -- could be stored in flags for a fast test (hm, flags could be false !)
+ local markclass = sequence.markclass -- todo, first we need a proper test
+ local skipped = false
+ for k=1,#contexts do
+ local match = true
+ local current = start
+ local last = start
+ local ck = contexts[k]
+ local seq = ck[3]
+ local s = #seq
+ -- f..l = mid string
+ if s == 1 then
+ -- never happens
+ match = getid(current) == glyph_code and getfont(current) == currentfont and getsubtype(current)<256 and seq[1][getchar(current)]
+ else
+ -- maybe we need a better space check (maybe check for glue or category or combination)
+ -- we cannot optimize for n=2 because there can be disc nodes
+ local f, l = ck[4], ck[5]
+ -- current match
+ if f == 1 and f == l then -- current only
+ -- already a hit
+ -- match = true
+ else -- before/current/after | before/current | current/after
+ -- no need to test first hit (to be optimized)
+ if f == l then -- new, else last out of sync (f is > 1)
+ -- match = true
+ else
+ local n = f + 1
+ last = getnext(last)
+ while n <= l do
+ if last then
+ local id = getid(last)
+ if id == glyph_code then
+ if getfont(last) == currentfont and getsubtype(last)<256 then
+ local char = getchar(last)
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ skipped = true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ last = getnext(last)
+ elseif seq[n][char] then
+ if n < l then
+ last = getnext(last)
+ end
+ n = n + 1
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ elseif id == disc_code then
+ last = getnext(last)
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ end
+ end
+ end
+ -- before
+ if match and f > 1 then
+ local prev = getprev(start)
+ if prev then
+ local n = f-1
+ while n >= 1 do
+ if prev then
+ local id = getid(prev)
+ if id == glyph_code then
+ if getfont(prev) == currentfont and getsubtype(prev)<256 then -- normal char
+ local char = getchar(prev)
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ skipped = true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n = n -1
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ elseif id == disc_code then
+ -- skip 'm
+ elseif seq[n][32] then
+ n = n -1
+ else
+ match = false
+ break
+ end
+ prev = getprev(prev)
+ elseif seq[n][32] then -- somewhat special, as zapfino can have many preceding spaces
+ n = n -1
+ else
+ match = false
+ break
+ end
+ end
+ else
+ match = false
+ end
+ end
+ -- after
+ if match and s > l then
+ local current = last and getnext(last)
+ if current then
+ -- removed optimization for s-l == 1, we have to deal with marks anyway
+ local n = l + 1
+ while n <= s do
+ if current then
+ local id = getid(current)
+ if id == glyph_code then
+ if getfont(current) == currentfont and getsubtype(current)<256 then -- normal char
+ local char = getchar(current)
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ skipped = true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n = n + 1
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ elseif id == disc_code then
+ -- skip 'm
+ elseif seq[n][32] then -- brrr
+ n = n + 1
+ else
+ match = false
+ break
+ end
+ current = getnext(current)
+ elseif seq[n][32] then
+ n = n + 1
+ else
+ match = false
+ break
+ end
+ end
+ else
+ match = false
+ end
+ end
+ end
+ if match then
+ -- ck == currentcontext
+ if trace_contexts then
+ local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5]
+ local char = getchar(start)
+ if ck[9] then
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
+ else
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
+ end
+ end
+ local chainlookups = ck[6]
+ if chainlookups then
+ local nofchainlookups = #chainlookups
+ -- we can speed this up if needed
+ if nofchainlookups == 1 then
+ local chainlookupname = chainlookups[1]
+ local chainlookup = lookuptable[chainlookupname]
+ if chainlookup then
+ local cp = chainprocs[chainlookup.type]
+ if cp then
+ local ok
+ head, start, ok = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ if ok then
+ done = true
+ end
+ else
+ logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
+ end
+ else -- shouldn't happen
+ logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname))
+ end
+ else
+ local i = 1
+ while true do
+ if skipped then
+ while true do
+ local char = getchar(start)
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ start = getnext(start)
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ end
+ local chainlookupname = chainlookups[i]
+ local chainlookup = lookuptable[chainlookupname]
+ if not chainlookup then
+ -- okay, n matches, < n replacements
+ i = i + 1
+ else
+ local cp = chainmores[chainlookup.type]
+ if not cp then
+ -- actually an error
+ logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
+ i = i + 1
+ else
+ local ok, n
+ head, start, ok, n = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
+ -- messy since last can be changed !
+ if ok then
+ done = true
+ -- skip next one(s) if ligature
+ i = i + (n or 1)
+ else
+ i = i + 1
+ end
+ end
+ end
+ if i > nofchainlookups then
+ break
+ elseif start then
+ start = getnext(start)
+ else
+ -- weird
+ end
+ end
+ end
+ else
+ local replacements = ck[7]
+ if replacements then
+ head, start, done = chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) -- sequence
+ else
+ done = quit_on_no_replacement -- can be meant to be skipped / quite inconsistent in fonts
+ if trace_contexts then
+ logprocess("%s: skipping match",cref(kind,chainname))
+ end
+ end
+ end
+ end
+ end
+ return head, start, done
+end
+
+-- Because we want to keep this elsewhere (an because speed is less an issue) we
+-- pass the font id so that the verbose variant can access the relevant helper tables.
+
+local verbose_handle_contextchain = function(font,...)
+ logwarning("no verbose handler installed, reverting to 'normal'")
+ otf.setcontextchain()
+ return normal_handle_contextchain(...)
+end
+
+otf.chainhandlers = {
+ normal = normal_handle_contextchain,
+ verbose = verbose_handle_contextchain,
+}
+
+function otf.setcontextchain(method)
+ if not method or method == "normal" or not otf.chainhandlers[method] then
+ if handlers.contextchain then -- no need for a message while making the format
+ logwarning("installing normal contextchain handler")
+ end
+ handlers.contextchain = normal_handle_contextchain
+ else
+ logwarning("installing contextchain handler %a",method)
+ local handler = otf.chainhandlers[method]
+ handlers.contextchain = function(...)
+ return handler(currentfont,...) -- hm, get rid of ...
+ end
+ end
+ handlers.gsub_context = handlers.contextchain
+ handlers.gsub_contextchain = handlers.contextchain
+ handlers.gsub_reversecontextchain = handlers.contextchain
+ handlers.gpos_contextchain = handlers.contextchain
+ handlers.gpos_context = handlers.contextchain
+end
+
+otf.setcontextchain()
+
+local missing = { } -- we only report once
+
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_process(...)
+end
+
+local logwarning = report_process
+
+local function report_missing_cache(typ,lookup)
+ local f = missing[currentfont] if not f then f = { } missing[currentfont] = f end
+ local t = f[typ] if not t then t = { } f[typ] = t end
+ if not t[lookup] then
+ t[lookup] = true
+ logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname)
+ end
+end
+
+local resolved = { } -- we only resolve a font,script,language pair once
+
+-- todo: pass all these 'locals' in a table
+
+local lookuphashes = { }
+
+setmetatableindex(lookuphashes, function(t,font)
+ local lookuphash = fontdata[font].resources.lookuphash
+ if not lookuphash or not next(lookuphash) then
+ lookuphash = false
+ end
+ t[font] = lookuphash
+ return lookuphash
+end)
+
+-- fonts.hashes.lookups = lookuphashes
+
+local autofeatures = fonts.analyzers.features -- was: constants
+
+local function initialize(sequence,script,language,enabled)
+ local features = sequence.features
+ if features then
+ local order = sequence.order
+ if order then
+ for i=1,#order do --
+ local kind = order[i] --
+ local valid = enabled[kind]
+ if valid then
+ local scripts = features[kind] --
+ local languages = scripts[script] or scripts[wildcard]
+ if languages and (languages[language] or languages[wildcard]) then
+ return { valid, autofeatures[kind] or false, sequence.chain or 0, kind, sequence }
+ end
+ end
+ end
+ else
+ -- can't happen
+ end
+ end
+ return false
+end
+
+function otf.dataset(tfmdata,font) -- generic variant, overloaded in context
+ local shared = tfmdata.shared
+ local properties = tfmdata.properties
+ local language = properties.language or "dflt"
+ local script = properties.script or "dflt"
+ local enabled = shared.features
+ local res = resolved[font]
+ if not res then
+ res = { }
+ resolved[font] = res
+ end
+ local rs = res[script]
+ if not rs then
+ rs = { }
+ res[script] = rs
+ end
+ local rl = rs[language]
+ if not rl then
+ rl = {
+ -- indexed but we can also add specific data by key
+ }
+ rs[language] = rl
+ local sequences = tfmdata.resources.sequences
+ for s=1,#sequences do
+ local v = enabled and initialize(sequences[s],script,language,enabled)
+ if v then
+ rl[#rl+1] = v
+ end
+ end
+ end
+ return rl
+end
+
+-- elseif id == glue_code then
+-- if p[5] then -- chain
+-- local pc = pp[32]
+-- if pc then
+-- start, ok = start, false -- p[1](start,kind,p[2],pc,p[3],p[4])
+-- if ok then
+-- done = true
+-- end
+-- if start then start = getnext(start) end
+-- else
+-- start = getnext(start)
+-- end
+-- else
+-- start = getnext(start)
+-- end
+
+-- there will be a new direction parser (pre-parsed etc)
+
+-- less bytecode: 290 -> 254
+--
+-- attr = attr or false
+--
+-- local a = getattr(start,0)
+-- if (a == attr and (not attribute or getprop(start,a_state) == attribute)) or (not attribute or getprop(start,a_state) == attribute) then
+-- -- the action
+-- end
+
+local function featuresprocessor(head,font,attr)
+
+ local lookuphash = lookuphashes[font] -- we can also check sequences here
+
+ if not lookuphash then
+ return head, false
+ end
+
+ head = tonut(head)
+
+ if trace_steps then
+ checkstep(head)
+ end
+
+ tfmdata = fontdata[font]
+ descriptions = tfmdata.descriptions
+ characters = tfmdata.characters
+ resources = tfmdata.resources
+
+ marks = resources.marks
+ anchorlookups = resources.lookup_to_anchor
+ lookuptable = resources.lookups
+ lookuptypes = resources.lookuptypes
+ lookuptags = resources.lookuptags
+
+ currentfont = font
+ rlmode = 0
+
+ local sequences = resources.sequences
+ local done = false
+ local datasets = otf.dataset(tfmdata,font,attr)
+
+ local dirstack = { } -- could move outside function
+
+ -- We could work on sub start-stop ranges instead but I wonder if there is that
+ -- much speed gain (experiments showed that it made not much sense) and we need
+ -- to keep track of directions anyway. Also at some point I want to play with
+ -- font interactions and then we do need the full sweeps.
+
+ -- Keeping track of the headnode is needed for devanagari (I generalized it a bit
+ -- so that multiple cases are also covered.)
+
+ -- todo: retain prev
+
+ for s=1,#datasets do
+ local dataset = datasets[s]
+ featurevalue = dataset[1] -- todo: pass to function instead of using a global
+
+ local sequence = dataset[5] -- sequences[s] -- also dataset[5]
+ local rlparmode = 0
+ local topstack = 0
+ local success = false
+ local attribute = dataset[2]
+ local chain = dataset[3] -- sequence.chain or 0
+ local typ = sequence.type
+ local subtables = sequence.subtables
+ if chain < 0 then
+ -- this is a limited case, no special treatments like 'init' etc
+ local handler = handlers[typ]
+ -- we need to get rid of this slide! probably no longer needed in latest luatex
+ local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo
+ while start do
+ local id = getid(start)
+ if id == glyph_code then
+ if getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
+ if a then
+ a = a == attr
+ else
+ a = true
+ end
+ if a then
+ for i=1,#subtables do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[getchar(start)]
+ if lookupmatch then
+ head, start, success = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if success then
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start = getprev(start) end
+ else
+ start = getprev(start)
+ end
+ else
+ start = getprev(start)
+ end
+ else
+ start = getprev(start)
+ end
+ end
+ else
+ local handler = handlers[typ]
+ local ns = #subtables
+ local start = head -- local ?
+ rlmode = 0 -- to be checked ?
+ if ns == 1 then -- happens often
+ local lookupname = subtables[1]
+ local lookupcache = lookuphash[lookupname]
+ if not lookupcache then -- also check for empty cache
+ report_missing_cache(typ,lookupname)
+ else
+
+ local function subrun(start)
+ -- mostly for gsub, gpos would demand a more clever approach
+ local head = start
+ local done = false
+ while start do
+ local id = getid(start)
+ if id == glyph_code and getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
+ else
+ a = not attribute or getprop(start,a_state) == attribute
+ end
+ if a then
+ local lookupmatch = lookupcache[getchar(start)]
+ if lookupmatch then
+ -- sequence kan weg
+ local ok
+ head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ done = true
+ end
+ end
+ if start then start = getnext(start) end
+ else
+ start = getnext(start)
+ end
+ else
+ start = getnext(start)
+ end
+ end
+ if done then
+ success = true
+ return head
+ end
+ end
+
+ local function kerndisc(disc) -- we can assume that prev and next are glyphs
+ local prev = getprev(disc)
+ local next = getnext(disc)
+ if prev and next then
+ setfield(prev,"next",next)
+ -- setfield(next,"prev",prev)
+ local a = getattr(prev,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(prev,a_state) == attribute)
+ else
+ a = not attribute or getprop(prev,a_state) == attribute
+ end
+ if a then
+ local lookupmatch = lookupcache[getchar(prev)]
+ if lookupmatch then
+ -- sequence kan weg
+ local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ done = true
+ success = true
+ end
+ end
+ end
+ setfield(prev,"next",disc)
+ -- setfield(next,"prev",disc)
+ end
+ return next
+ end
+
+ while start do
+ local id = getid(start)
+ if id == glyph_code then
+ if getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
+ else
+ a = not attribute or getprop(start,a_state) == attribute
+ end
+ if a then
+ local lookupmatch = lookupcache[getchar(start)]
+ if lookupmatch then
+ -- sequence kan weg
+ local ok
+ head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ success = true
+ end
+ end
+ if start then start = getnext(start) end
+ else
+ start = getnext(start)
+ end
+ else
+ start = getnext(start)
+ end
+ elseif id == disc_code then
+ -- mostly for gsub
+ if getsubtype(start) == discretionary_code then
+ local pre = getfield(start,"pre")
+ if pre then
+ local new = subrun(pre)
+ if new then setfield(start,"pre",new) end
+ end
+ local post = getfield(start,"post")
+ if post then
+ local new = subrun(post)
+ if new then setfield(start,"post",new) end
+ end
+ local replace = getfield(start,"replace")
+ if replace then
+ local new = subrun(replace)
+ if new then setfield(start,"replace",new) end
+ end
+elseif typ == "gpos_single" or typ == "gpos_pair" then
+ kerndisc(start)
+ end
+ start = getnext(start)
+ elseif id == whatsit_code then -- will be function
+ local subtype = getsubtype(start)
+ if subtype == dir_code then
+ local dir = getfield(start,"dir")
+ if dir == "+TRT" or dir == "+TLT" then
+ topstack = topstack + 1
+ dirstack[topstack] = dir
+ elseif dir == "-TRT" or dir == "-TLT" then
+ topstack = topstack - 1
+ end
+ local newdir = dirstack[topstack]
+ if newdir == "+TRT" then
+ rlmode = -1
+ elseif newdir == "+TLT" then
+ rlmode = 1
+ else
+ rlmode = rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
+ end
+ elseif subtype == localpar_code then
+ local dir = getfield(start,"dir")
+ if dir == "TRT" then
+ rlparmode = -1
+ elseif dir == "TLT" then
+ rlparmode = 1
+ else
+ rlparmode = 0
+ end
+ -- one might wonder if the par dir should be looked at, so we might as well drop the next line
+ rlmode = rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
+ end
+ start = getnext(start)
+ elseif id == math_code then
+ start = getnext(end_of_math(start))
+ else
+ start = getnext(start)
+ end
+ end
+ end
+ else
+
+ local function subrun(start)
+ -- mostly for gsub, gpos would demand a more clever approach
+ local head = start
+ local done = false
+ while start do
+ local id = getid(start)
+ if id == glyph_code and getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
+ else
+ a = not attribute or getprop(start,a_state) == attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[getchar(start)]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local ok
+ head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ done = true
+ break
+ elseif not start then
+ -- don't ask why ... shouldn't happen
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start = getnext(start) end
+ else
+ start = getnext(start)
+ end
+ else
+ start = getnext(start)
+ end
+ end
+ if done then
+ success = true
+ return head
+ end
+ end
+
+ local function kerndisc(disc) -- we can assume that prev and next are glyphs
+ local prev = getprev(disc)
+ local next = getnext(disc)
+ if prev and next then
+ setfield(prev,"next",next)
+ -- setfield(next,"prev",prev)
+ local a = getattr(prev,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(prev,a_state) == attribute)
+ else
+ a = not attribute or getprop(prev,a_state) == attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[getchar(prev)]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ done = true
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ end
+ setfield(prev,"next",disc)
+ -- setfield(next,"prev",disc)
+ end
+ return next
+ end
+
+ while start do
+ local id = getid(start)
+ if id == glyph_code then
+ if getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
+ else
+ a = not attribute or getprop(start,a_state) == attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[getchar(start)]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local ok
+ head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ success = true
+ break
+ elseif not start then
+ -- don't ask why ... shouldn't happen
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start = getnext(start) end
+ else
+ start = getnext(start)
+ end
+ else
+ start = getnext(start)
+ end
+ elseif id == disc_code then
+ -- mostly for gsub
+ if getsubtype(start) == discretionary_code then
+ local pre = getfield(start,"pre")
+ if pre then
+ local new = subrun(pre)
+ if new then setfield(start,"pre",new) end
+ end
+ local post = getfield(start,"post")
+ if post then
+ local new = subrun(post)
+ if new then setfield(start,"post",new) end
+ end
+ local replace = getfield(start,"replace")
+ if replace then
+ local new = subrun(replace)
+ if new then setfield(start,"replace",new) end
+ end
+elseif typ == "gpos_single" or typ == "gpos_pair" then
+ kerndisc(start)
+ end
+ start = getnext(start)
+ elseif id == whatsit_code then
+ local subtype = getsubtype(start)
+ if subtype == dir_code then
+ local dir = getfield(start,"dir")
+ if dir == "+TRT" or dir == "+TLT" then
+ topstack = topstack + 1
+ dirstack[topstack] = dir
+ elseif dir == "-TRT" or dir == "-TLT" then
+ topstack = topstack - 1
+ end
+ local newdir = dirstack[topstack]
+ if newdir == "+TRT" then
+ rlmode = -1
+ elseif newdir == "+TLT" then
+ rlmode = 1
+ else
+ rlmode = rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
+ end
+ elseif subtype == localpar_code then
+ local dir = getfield(start,"dir")
+ if dir == "TRT" then
+ rlparmode = -1
+ elseif dir == "TLT" then
+ rlparmode = 1
+ else
+ rlparmode = 0
+ end
+ rlmode = rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
+ end
+ start = getnext(start)
+ elseif id == math_code then
+ start = getnext(end_of_math(start))
+ else
+ start = getnext(start)
+ end
+ end
+ end
+ end
+ if success then
+ done = true
+ end
+ if trace_steps then -- ?
+ registerstep(head)
+ end
+
+ end
+
+ head = tonode(head)
+
+ return head, done
+end
+
+local function generic(lookupdata,lookupname,unicode,lookuphash)
+ local target = lookuphash[lookupname]
+ if target then
+ target[unicode] = lookupdata
+ else
+ lookuphash[lookupname] = { [unicode] = lookupdata }
+ end
+end
+
+local action = {
+
+ substitution = generic,
+ multiple = generic,
+ alternate = generic,
+ position = generic,
+
+ ligature = function(lookupdata,lookupname,unicode,lookuphash)
+ local target = lookuphash[lookupname]
+ if not target then
+ target = { }
+ lookuphash[lookupname] = target
+ end
+ for i=1,#lookupdata do
+ local li = lookupdata[i]
+ local tu = target[li]
+ if not tu then
+ tu = { }
+ target[li] = tu
+ end
+ target = tu
+ end
+ target.ligature = unicode
+ end,
+
+ pair = function(lookupdata,lookupname,unicode,lookuphash)
+ local target = lookuphash[lookupname]
+ if not target then
+ target = { }
+ lookuphash[lookupname] = target
+ end
+ local others = target[unicode]
+ local paired = lookupdata[1]
+ if others then
+ others[paired] = lookupdata
+ else
+ others = { [paired] = lookupdata }
+ target[unicode] = others
+ end
+ end,
+
+}
+
+local function prepare_lookups(tfmdata)
+
+ local rawdata = tfmdata.shared.rawdata
+ local resources = rawdata.resources
+ local lookuphash = resources.lookuphash
+ local anchor_to_lookup = resources.anchor_to_lookup
+ local lookup_to_anchor = resources.lookup_to_anchor
+ local lookuptypes = resources.lookuptypes
+ local characters = tfmdata.characters
+ local descriptions = tfmdata.descriptions
+
+ -- we cannot free the entries in the descriptions as sometimes we access
+ -- then directly (for instance anchors) ... selectively freeing does save
+ -- much memory as it's only a reference to a table and the slot in the
+ -- description hash is not freed anyway
+
+ for unicode, character in next, characters do -- we cannot loop over descriptions !
+
+ local description = descriptions[unicode]
+
+ if description then
+
+ local lookups = description.slookups
+ if lookups then
+ for lookupname, lookupdata in next, lookups do
+ action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash)
+ end
+ end
+
+ local lookups = description.mlookups
+ if lookups then
+ for lookupname, lookuplist in next, lookups do
+ local lookuptype = lookuptypes[lookupname]
+ for l=1,#lookuplist do
+ local lookupdata = lookuplist[l]
+ action[lookuptype](lookupdata,lookupname,unicode,lookuphash)
+ end
+ end
+ end
+
+ local list = description.kerns
+ if list then
+ for lookup, krn in next, list do -- ref to glyph, saves lookup
+ local target = lookuphash[lookup]
+ if target then
+ target[unicode] = krn
+ else
+ lookuphash[lookup] = { [unicode] = krn }
+ end
+ end
+ end
+
+ local list = description.anchors
+ if list then
+ for typ, anchors in next, list do -- types
+ if typ == "mark" or typ == "cexit" then -- or entry?
+ for name, anchor in next, anchors do
+ local lookups = anchor_to_lookup[name]
+ if lookups then
+ for lookup, _ in next, lookups do
+ local target = lookuphash[lookup]
+ if target then
+ target[unicode] = anchors
+ else
+ lookuphash[lookup] = { [unicode] = anchors }
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ end
+
+ end
+
+end
+
+local function split(replacement,original)
+ local result = { }
+ for i=1,#replacement do
+ result[original[i]] = replacement[i]
+ end
+ return result
+end
+
+local valid = {
+ coverage = { chainsub = true, chainpos = true, contextsub = true },
+ reversecoverage = { reversesub = true },
+ glyphs = { chainsub = true, chainpos = true },
+}
+
+local function prepare_contextchains(tfmdata)
+ local rawdata = tfmdata.shared.rawdata
+ local resources = rawdata.resources
+ local lookuphash = resources.lookuphash
+ local lookuptags = resources.lookuptags
+ local lookups = rawdata.lookups
+ if lookups then
+ for lookupname, lookupdata in next, rawdata.lookups do
+ local lookuptype = lookupdata.type
+ if lookuptype then
+ local rules = lookupdata.rules
+ if rules then
+ local format = lookupdata.format
+ local validformat = valid[format]
+ if not validformat then
+ report_prepare("unsupported format %a",format)
+ elseif not validformat[lookuptype] then
+ -- todo: dejavu-serif has one (but i need to see what use it has)
+ report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookuptags[lookupname])
+ else
+ local contexts = lookuphash[lookupname]
+ if not contexts then
+ contexts = { }
+ lookuphash[lookupname] = contexts
+ end
+ local t, nt = { }, 0
+ for nofrules=1,#rules do
+ local rule = rules[nofrules]
+ local current = rule.current
+ local before = rule.before
+ local after = rule.after
+ local replacements = rule.replacements
+ local sequence = { }
+ local nofsequences = 0
+ -- Eventually we can store start, stop and sequence in the cached file
+ -- but then less sharing takes place so best not do that without a lot
+ -- of profiling so let's forget about it.
+ if before then
+ for n=1,#before do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = before[n]
+ end
+ end
+ local start = nofsequences + 1
+ for n=1,#current do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = current[n]
+ end
+ local stop = nofsequences
+ if after then
+ for n=1,#after do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = after[n]
+ end
+ end
+ if sequence[1] then
+ -- Replacements only happen with reverse lookups as they are single only. We
+ -- could pack them into current (replacement value instead of true) and then
+ -- use sequence[start] instead but it's somewhat ugly.
+ nt = nt + 1
+ t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements }
+ for unic, _ in next, sequence[start] do
+ local cu = contexts[unic]
+ if not cu then
+ contexts[unic] = t
+ end
+ end
+ end
+ end
+ end
+ else
+ -- no rules
+ end
+ else
+ report_prepare("missing lookuptype for lookupname %a",lookuptags[lookupname])
+ end
+ end
+ end
+end
+
+-- we can consider lookuphash == false (initialized but empty) vs lookuphash == table
+
+local function featuresinitializer(tfmdata,value)
+ if true then -- value then
+ -- beware we need to use the topmost properties table
+ local rawdata = tfmdata.shared.rawdata
+ local properties = rawdata.properties
+ if not properties.initialized then
+ local starttime = trace_preparing and os.clock()
+ local resources = rawdata.resources
+ resources.lookuphash = resources.lookuphash or { }
+ prepare_contextchains(tfmdata)
+ prepare_lookups(tfmdata)
+ properties.initialized = true
+ if trace_preparing then
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname)
+ end
+ end
+ end
+end
+
+registerotffeature {
+ name = "features",
+ description = "features",
+ default = true,
+ initializers = {
+ position = 1,
+ node = featuresinitializer,
+ },
+ processors = {
+ node = featuresprocessor,
+ }
+}
+
+-- This can be used for extra handlers, but should be used with care!
+
+otf.handlers = handlers
diff --git a/tex/generic/context/luatex/luatex-fonts.lua b/tex/generic/context/luatex/luatex-fonts.lua
index 7995be33e..c81e8cd1a 100644
--- a/tex/generic/context/luatex/luatex-fonts.lua
+++ b/tex/generic/context/luatex/luatex-fonts.lua
@@ -27,6 +27,17 @@ if not modules then modules = { } end modules ['luatex-fonts'] = {
-- also add more helper code here, but that depends to what extend metatex (sidetrack of context)
-- evolves into a low level layer (depends on time, as usual).
+texio.write_nl("")
+texio.write_nl("--------------------------------------------------------------------------------")
+texio.write_nl("The font code has been brought in sync with the context version of 2014.12.21 so")
+texio.write_nl("if things don't work out as expected the interfacing needs to be checked. When")
+texio.write_nl("this works as expected a second upgrade will happen that gives a more complete")
+texio.write_nl("support and another sync with the context code (that new code is currently being")
+texio.write_nl("tested. The base pass is now integrated in the main pass. The results can differ")
+texio.write_nl("from those in context because there we integrate some mechanisms differently.")
+texio.write_nl("--------------------------------------------------------------------------------")
+texio.write_nl("")
+
utf = utf or unicode.utf8
-- We have some (global) hooks (for latex):
@@ -210,12 +221,12 @@ if non_generic_context.luatex_fonts.skip_loading ~= true then
loadmodule('font-oti.lua')
loadmodule('font-otf.lua')
loadmodule('font-otb.lua')
- loadmodule('node-inj.lua') -- will be replaced (luatex >= .70)
- loadmodule('font-ota.lua')
- loadmodule('font-otn.lua')
- loadmodule('font-otp.lua') -- optional
+ loadmodule('luatex-fonts-inj.lua')
+ loadmodule('luatex-fonts-ota.lua')
+ loadmodule('luatex-fonts-otn.lua')
+ loadmodule('font-otp.lua')
loadmodule('luatex-fonts-lua.lua')
- loadmodule('font-def.lua')
+ loadmodule('font-def.lua') -- this code (stripped) might end up in luatex-fonts-def.lua
loadmodule('luatex-fonts-def.lua')
loadmodule('luatex-fonts-ext.lua') -- some extensions
diff --git a/tex/generic/context/luatex/luatex-math.tex b/tex/generic/context/luatex/luatex-math.tex
index ab304b974..604b4a1f8 100644
--- a/tex/generic/context/luatex/luatex-math.tex
+++ b/tex/generic/context/luatex/luatex-math.tex
@@ -19,15 +19,6 @@
% a bunch of fonts:
-\font\tenrm = file:lmroman10-regular.otf:+liga;+kern;+tlig;+trep at 10pt
-\font\sevenrm = file:lmroman7-regular.otf:+liga;+kern;+tlig;+trep at 7pt
-\font\fiverm = file:lmroman5-regular.otf:+liga;+kern;+tlig;+trep at 5pt
-
-\font\tentt = file:lmmono10-regular.otf at 10pt
-\font\tensl = file:lmromanslant10-regular.otf:+liga;+kern;+tlig;+trep at 10pt
-\font\tenit = file:lmroman10-italic.otf:+liga;+kern;+tlig;+trep at 10pt
-\font\tenbi = file:lmroman10-bolditalic.otf:+liga;+kern;+tlig;+trep at 10pt
-
\let \teni = \relax
\let \seveni = \relax
\let \fivei = \relax
@@ -35,19 +26,58 @@
\let \sevensy = \relax
\let \fivesy = \relax
\let \tenex = \relax
-\let \tenbf = \relax
\let \sevenbf = \relax
\let \fivebf = \relax
-\tenrm
+\def\latinmodern
+ {\font\tenrm = file:lmroman10-regular.otf:+liga;+kern;+tlig;+trep at 10pt
+ \font\sevenrm = file:lmroman7-regular.otf:+liga;+kern;+tlig;+trep at 7pt
+ \font\fiverm = file:lmroman5-regular.otf:+liga;+kern;+tlig;+trep at 5pt
+ %
+ \font\tentt = file:lmmono10-regular.otf at 10pt
+ \font\tensl = file:lmromanslant10-regular.otf:+liga;+kern;+tlig;+trep at 10pt
+ \font\tenit = file:lmroman10-italic.otf:+liga;+kern;+tlig;+trep at 10pt
+ \font\tenbf = file:lmroman10-bold.otf:+liga;+kern;+tlig;+trep at 10pt
+ \font\tenbi = file:lmroman10-bolditalic.otf:+liga;+kern;+tlig;+trep at 10pt
+ %
+ \font\mathfonttextupright = file:latinmodern-math.otf:ssty=0;fixmath=yes at 10pt
+ \font\mathfontscriptupright = file:latinmodern-math.otf:ssty=1;fixmath=yes at 7pt
+ \font\mathfontscriptscriptupright = file:latinmodern-math.otf:ssty=2;fixmath=yes at 5pt
+ %
+ \textfont 0 = \mathfonttextupright
+ \scriptfont 0 = \mathfontscriptupright
+ \scriptscriptfont 0 = \mathfontscriptscriptupright
+ %
+ \tenrm}
-\font\mathfonttextupright = file:latinmodern-math.otf:ssty=0;fixmath=yes at 10pt
-\font\mathfontscriptupright = file:latinmodern-math.otf:ssty=1;fixmath=yes at 7pt
-\font\mathfontscriptscriptupright = file:latinmodern-math.otf:ssty=2;fixmath=yes at 5pt
+\def\lucidabright
+ {\font\tenrm = file:lucidabrightot.otf:+liga;+kern;+tlig;+trep at 10pt
+ \font\sevenrm = file:lucidabrightot.otf:+liga;+kern;+tlig;+trep at 7pt
+ \font\fiverm = file:lucidabrightot.otf:+liga;+kern;+tlig;+trep at 5pt
+ %
+ \font\tentt = file:lucidabrightot.otf at 10pt
+ \font\tenit = file:lucidabrightot.otf:+liga;+kern;+tlig;+trep at 10pt
+ \font\tenit = file:lucidabrightot-italic.otf:+liga;+kern;+tlig;+trep at 10pt
+ \font\tenbf = file:lucidabrightot-demi.otf:+liga;+kern;+tlig;+trep at 10pt
+ \font\tenbi = file:lucidabrightot-demiitalic.otf:+liga;+kern;+tlig;+trep at 10pt
+ %
+ \font\mathfonttextupright = file:lucidabrightmathot.otf:ssty=0;fixmath=yes at 10pt
+ \font\mathfontscriptupright = file:lucidabrightmathot.otf:ssty=1;fixmath=yes at 7pt
+ \font\mathfontscriptscriptupright = file:lucidabrightmathot.otf:ssty=2;fixmath=yes at 5pt
+ %
+ \textfont 0 = \mathfonttextupright
+ \scriptfont 0 = \mathfontscriptupright
+ \scriptscriptfont 0 = \mathfontscriptscriptupright
+ %
+ \tenrm}
-\textfont 0 = \mathfonttextupright
-\scriptfont 0 = \mathfontscriptupright
-\scriptscriptfont 0 = \mathfontscriptscriptupright
+\directlua {
+ if arguments["mtx:lucidabright"] then
+ tex.print("\string\\lucidabright")
+ else
+ tex.print("\string\\latinmodern")
+ end
+}
\newtoks\everymathrm
\newtoks\everymathmit
@@ -58,12 +88,12 @@
\newtoks\everymathbi
\newtoks\everymathtt
-\def\rm{\fam0\relax\the\everymathmrm\relax\tenrm\relax}
-\def\it{\fam0\relax\the\everymathit \relax\tenit\relax}
-\def\sl{\fam0\relax\the\everymathsl \relax\tensl\relax}
-\def\bf{\fam0\relax\the\everymathbf \relax\tenbf\relax}
-\def\bi{\fam0\relax\the\everymathbi \relax\tenbi\relax}
-\def\tt{\fam0\relax\the\everymathtt \relax\tentt\relax}
+\def\rm{\fam0\relax\the\everymathrm\relax\tenrm\relax}
+\def\it{\fam0\relax\the\everymathit\relax\tenit\relax}
+\def\sl{\fam0\relax\the\everymathsl\relax\tensl\relax}
+\def\bf{\fam0\relax\the\everymathbf\relax\tenbf\relax}
+\def\bi{\fam0\relax\the\everymathbi\relax\tenbi\relax}
+\def\tt{\fam0\relax\the\everymathtt\relax\tentt\relax}
\let\mit \relax % use names or \Uchar or define a vector
\let\cal \relax % idem, i'm not in the mood for this now
@@ -1799,7 +1829,8 @@
% a few definitions:
-\def\sqrt{\Uroot "0 "221A }
+\def\sqrt {\Uroot "0 "221A{}}
+\def\root#1\of{\Uroot "0 "221A{#1}}
% \skewchar\teni='177 \skewchar\seveni='177 \skewchar\fivei='177
% \skewchar\tensy='60 \skewchar\sevensy='60 \skewchar\fivesy='60
diff --git a/tex/generic/context/luatex/luatex-mplib.tex b/tex/generic/context/luatex/luatex-mplib.tex
index 8af9f2d8a..09dd179f3 100644
--- a/tex/generic/context/luatex/luatex-mplib.tex
+++ b/tex/generic/context/luatex/luatex-mplib.tex
@@ -61,6 +61,7 @@
%D Now load the needed \LUA\ code.
\directlua{dofile(kpse.find_file('luatex-mplib.lua'))}
+% \directlua{dofile(resolvers.findfile('luatex-mplib.lua'))}
%D The following code takes care of encapsulating the literals:
diff --git a/tex/generic/context/luatex/luatex-plain.tex b/tex/generic/context/luatex/luatex-plain.tex
index 1ea8558e9..c9a9e36cf 100644
--- a/tex/generic/context/luatex/luatex-plain.tex
+++ b/tex/generic/context/luatex/luatex-plain.tex
@@ -20,6 +20,7 @@
\input {luatex-math}%
\input {luatex-languages}%
\input {luatex-mplib}%
+ % \input {luatex-gadgets}%
}
\edef\fmtversion{\fmtversion+luatex}
diff --git a/tex/generic/context/luatex/luatex-test.tex b/tex/generic/context/luatex/luatex-test.tex
index fbf8ce3cf..6f48e0ced 100644
--- a/tex/generic/context/luatex/luatex-test.tex
+++ b/tex/generic/context/luatex/luatex-test.tex
@@ -35,14 +35,16 @@
\font\gothic=msgothic(ms-gothic) {\gothic whatever}
-\font\testy=file:IranNastaliq.ttf:mode=node;script=arab;language=dflt;+calt;+ccmp;+init;+isol;+medi;+fina;+liga;+rlig;+kern;+mark;+mkmk at 14pt
-\testy این یک متن نمونه است با قلم ذر که درست آمده است.
+\bgroup
-\pdfprotrudechars2 \pdfadjustspacing2
+ \pdfprotrudechars2
+ \pdfadjustspacing2
-\font\testb=file:lmroman12-regular:+liga;extend=1.5 at 12pt \testb \input tufte \par
-\font\testb=file:lmroman12-regular:+liga;slant=0.8 at 12pt \testb \input tufte \par
-\font\testb=file:lmroman12-regular:+liga;protrusion=default at 12pt \testb \input tufte \par
+ \font\testb=file:lmroman12-regular:+liga;extend=1.5 at 12pt \testb \input tufte \par
+ \font\testb=file:lmroman12-regular:+liga;slant=0.8 at 12pt \testb \input tufte \par
+ \font\testb=file:lmroman12-regular:+liga;protrusion=default at 12pt \testb \input tufte \par
+
+\egroup
\setmplibformat{plain}
@@ -64,13 +66,12 @@
\font\test=dejavuserif:+kern at 10pt \test
-\hsize 1mm
-\noindent Циолковский
+\bgroup \hsize 1mm \noindent Циолковский \par \egroup
\loadpatterns{ru}
-\noindent Циолковский
+\bgroup \hsize 1mm \noindent Циолковский \par \egroup
a bit of math
@@ -84,4 +85,30 @@ $$\sqrt {2} { { {1} \over { {1} \over {x} } } } $$
\cows Hello World!
+% math test
+
+\latinmodern
+
+\def\sqrt{\Uroot "0 "221A{}}
+
+\def\root#1\of{\Uroot "0 "221A{#1}}
+
+Inline $\sqrt{x}{1.2}$ math. % same for $\root n of x$
+
+$\root3\of x$
+
+$\sin{x}$
+
+\lucidabright
+
+\def\sqrt{\Uroot "0 "221A{}}
+
+\def\root#1\of{\Uroot "0 "221A{#1}}
+
+Inline $\sqrt{x}{1.2}$ math. % same for $\root n of x$
+
+$\root3\of x$
+
+$\sin{x}$
+
\end
diff --git a/web2c/contextcnf.lua b/web2c/contextcnf.lua
index 366df01ac..af65f41d0 100644
--- a/web2c/contextcnf.lua
+++ b/web2c/contextcnf.lua
@@ -101,13 +101,18 @@ return {
PERLINPUTS = ".;$TEXMF/scripts/context/perl",
PYTHONINPUTS = ".;$TEXMF/scripts/context/python",
RUBYINPUTS = ".;$TEXMF/scripts/context/ruby",
- LUAINPUTS = ".;$TEXINPUTS;$TEXMF/scripts/context/lua//",
+ LUAINPUTS = ".;$TEXINPUTS;$TEXMF/scripts/context/lua//;$TEXMF",
CLUAINPUTS = ".;$SELFAUTOLOC/lib/{context,$engine,luatex}/lua//",
+ -- texmf-local/tex/generic/example/foo :
+ --
+ -- package.helpers.trace = true
+ -- require("example.foo.bar")
+
-- Not really used by MkIV so they might go away.
- BIBINPUTS = ".;$TEXMF/bibtex/bib//",
- BSTINPUTS = ".;$TEXMF/bibtex/bst//",
+ BIBINPUTS = ".;$TEXMF/bibtex/bib//;$TEXMF/tex/context//",
+ BSTINPUTS = ".;$TEXMF/bibtex/bst//;$TEXMF/tex/context//",
-- Experimental
@@ -118,38 +123,7 @@ return {
FONTCONFIG_FILE = "fonts.conf",
FONTCONFIG_PATH = "$TEXMFSYSTEM/fonts/conf",
- limiters = {
- input = {
- -- any = {
- -- { "permit", "*" },
- -- },
- -- restricted = {
- -- { "permit", "*" },
- -- },
- paranoid = {
- { "permit", "^[^/]+$" },
- { "permit", "^./" },
- { "forbid", ".." },
- { "tree" , "TEXMF" },
- -- { "tree" , "MPINPUTS" },
- -- { "tree" , "TEXINPUTS" },
- { "forbid", "^/.." },
- { "forbid", "^[a-c]:/.." },
- },
- },
- output = {
- -- any = {
- -- { "permit", "*" },
- -- },
- -- restricted = {
- -- { "permit", "*" },
- -- },
- paranoid = {
- { "permit", "^[^/]+$" },
- { "permit", "^./" },
- },
- }
- },
+ -- we now have a different subsystem for this,
},
@@ -211,6 +185,14 @@ return {
-- ["fonts.usesystemfonts"] = false,
+ -- You can permit loading modules with no prefix:
+
+ -- ["modules.permitunprefixed"] = "no",
+
+ -- You can permit loading files from anywhere in the TDS tree:
+
+ -- ["resolvers.otherwise"] = "no",
+
},
experiments = {